]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/tree-cfg.c
errors.c (internal_error): Commentary typo fix.
[thirdparty/gcc.git] / gcc / tree-cfg.c
CommitLineData
6de9cd9a 1/* Control flow functions for trees.
66647d44 2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
56e84019 3 Free Software Foundation, Inc.
6de9cd9a
DN
4 Contributed by Diego Novillo <dnovillo@redhat.com>
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify
9it under the terms of the GNU General Public License as published by
9dcd6f09 10the Free Software Foundation; either version 3, or (at your option)
6de9cd9a
DN
11any later version.
12
13GCC is distributed in the hope that it will be useful,
14but WITHOUT ANY WARRANTY; without even the implied warranty of
15MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16GNU General Public License for more details.
17
18You should have received a copy of the GNU General Public License
9dcd6f09
NC
19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
6de9cd9a
DN
21
22#include "config.h"
23#include "system.h"
24#include "coretypes.h"
25#include "tm.h"
26#include "tree.h"
27#include "rtl.h"
28#include "tm_p.h"
29#include "hard-reg-set.h"
30#include "basic-block.h"
31#include "output.h"
6de9cd9a
DN
32#include "flags.h"
33#include "function.h"
34#include "expr.h"
35#include "ggc.h"
36#include "langhooks.h"
37#include "diagnostic.h"
38#include "tree-flow.h"
39#include "timevar.h"
40#include "tree-dump.h"
41#include "tree-pass.h"
42#include "toplev.h"
43#include "except.h"
44#include "cfgloop.h"
42759f1e 45#include "cfglayout.h"
9af0df6b 46#include "tree-ssa-propagate.h"
6946b3f7 47#include "value-prof.h"
4437b50d 48#include "pointer-set.h"
917948d3 49#include "tree-inline.h"
6de9cd9a
DN
50
51/* This file contains functions for building the Control Flow Graph (CFG)
52 for a function tree. */
53
54/* Local declarations. */
55
56/* Initial capacity for the basic block array. */
57static const int initial_cfg_capacity = 20;
58
d6be0d7f
JL
59/* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
60 which use a particular edge. The CASE_LABEL_EXPRs are chained together
61 via their TREE_CHAIN field, which we clear after we're done with the
726a989a 62 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
92b6dff3 63
d6be0d7f
JL
64 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
65 update the case vector in response to edge redirections.
92b6dff3 66
d6be0d7f
JL
67 Right now this table is set up and torn down at key points in the
68 compilation process. It would be nice if we could make the table
69 more persistent. The key is getting notification of changes to
70 the CFG (particularly edge removal, creation and redirection). */
71
15814ba0 72static struct pointer_map_t *edge_to_cases;
92b6dff3 73
6de9cd9a
DN
74/* CFG statistics. */
75struct cfg_stats_d
76{
77 long num_merged_labels;
78};
79
80static struct cfg_stats_d cfg_stats;
81
82/* Nonzero if we found a computed goto while building basic blocks. */
83static bool found_computed_goto;
84
6c52e687
CC
85/* Hash table to store last discriminator assigned for each locus. */
86struct locus_discrim_map
87{
88 location_t locus;
89 int discriminator;
90};
91static htab_t discriminator_per_locus;
92
6de9cd9a 93/* Basic blocks and flowgraphs. */
726a989a 94static void make_blocks (gimple_seq);
6de9cd9a 95static void factor_computed_gotos (void);
6de9cd9a
DN
96
97/* Edges. */
98static void make_edges (void);
6de9cd9a 99static void make_cond_expr_edges (basic_block);
726a989a 100static void make_gimple_switch_edges (basic_block);
6de9cd9a 101static void make_goto_expr_edges (basic_block);
6c52e687
CC
102static unsigned int locus_map_hash (const void *);
103static int locus_map_eq (const void *, const void *);
104static void assign_discriminator (location_t, basic_block);
726a989a
RB
105static edge gimple_redirect_edge_and_branch (edge, basic_block);
106static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
c2924966 107static unsigned int split_critical_edges (void);
6de9cd9a
DN
108
109/* Various helpers. */
726a989a
RB
110static inline bool stmt_starts_bb_p (gimple, gimple);
111static int gimple_verify_flow_info (void);
112static void gimple_make_forwarder_block (edge);
113static void gimple_cfg2vcg (FILE *);
6c52e687 114static gimple first_non_label_stmt (basic_block);
6de9cd9a
DN
115
116/* Flowgraph optimization and cleanup. */
726a989a
RB
117static void gimple_merge_blocks (basic_block, basic_block);
118static bool gimple_can_merge_blocks_p (basic_block, basic_block);
6de9cd9a 119static void remove_bb (basic_block);
be477406 120static edge find_taken_edge_computed_goto (basic_block, tree);
6de9cd9a
DN
121static edge find_taken_edge_cond_expr (basic_block, tree);
122static edge find_taken_edge_switch_expr (basic_block, tree);
726a989a 123static tree find_case_label_for_value (gimple, tree);
6de9cd9a 124
a930a4ef 125void
9defb1fe 126init_empty_tree_cfg_for_function (struct function *fn)
a930a4ef
JH
127{
128 /* Initialize the basic block array. */
9defb1fe
DN
129 init_flow (fn);
130 profile_status_for_function (fn) = PROFILE_ABSENT;
131 n_basic_blocks_for_function (fn) = NUM_FIXED_BLOCKS;
132 last_basic_block_for_function (fn) = NUM_FIXED_BLOCKS;
133 basic_block_info_for_function (fn)
134 = VEC_alloc (basic_block, gc, initial_cfg_capacity);
135 VEC_safe_grow_cleared (basic_block, gc,
136 basic_block_info_for_function (fn),
a590ac65 137 initial_cfg_capacity);
a930a4ef
JH
138
139 /* Build a mapping of labels to their associated blocks. */
9defb1fe
DN
140 label_to_block_map_for_function (fn)
141 = VEC_alloc (basic_block, gc, initial_cfg_capacity);
142 VEC_safe_grow_cleared (basic_block, gc,
143 label_to_block_map_for_function (fn),
a590ac65 144 initial_cfg_capacity);
a930a4ef 145
9defb1fe
DN
146 SET_BASIC_BLOCK_FOR_FUNCTION (fn, ENTRY_BLOCK,
147 ENTRY_BLOCK_PTR_FOR_FUNCTION (fn));
148 SET_BASIC_BLOCK_FOR_FUNCTION (fn, EXIT_BLOCK,
149 EXIT_BLOCK_PTR_FOR_FUNCTION (fn));
150
151 ENTRY_BLOCK_PTR_FOR_FUNCTION (fn)->next_bb
152 = EXIT_BLOCK_PTR_FOR_FUNCTION (fn);
153 EXIT_BLOCK_PTR_FOR_FUNCTION (fn)->prev_bb
154 = ENTRY_BLOCK_PTR_FOR_FUNCTION (fn);
155}
156
157void
158init_empty_tree_cfg (void)
159{
160 init_empty_tree_cfg_for_function (cfun);
a930a4ef 161}
6de9cd9a
DN
162
163/*---------------------------------------------------------------------------
164 Create basic blocks
165---------------------------------------------------------------------------*/
166
726a989a 167/* Entry point to the CFG builder for trees. SEQ is the sequence of
6de9cd9a
DN
168 statements to be added to the flowgraph. */
169
170static void
726a989a 171build_gimple_cfg (gimple_seq seq)
6de9cd9a 172{
726a989a
RB
173 /* Register specific gimple functions. */
174 gimple_register_cfg_hooks ();
6de9cd9a 175
6de9cd9a
DN
176 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
177
a930a4ef 178 init_empty_tree_cfg ();
6de9cd9a
DN
179
180 found_computed_goto = 0;
726a989a 181 make_blocks (seq);
6de9cd9a
DN
182
183 /* Computed gotos are hell to deal with, especially if there are
184 lots of them with a large number of destinations. So we factor
185 them to a common computed goto location before we build the
186 edge list. After we convert back to normal form, we will un-factor
187 the computed gotos since factoring introduces an unwanted jump. */
188 if (found_computed_goto)
189 factor_computed_gotos ();
190
f0b698c1 191 /* Make sure there is always at least one block, even if it's empty. */
24bd1a0b 192 if (n_basic_blocks == NUM_FIXED_BLOCKS)
6de9cd9a
DN
193 create_empty_bb (ENTRY_BLOCK_PTR);
194
6de9cd9a 195 /* Adjust the size of the array. */
68f9b844 196 if (VEC_length (basic_block, basic_block_info) < (size_t) n_basic_blocks)
a590ac65 197 VEC_safe_grow_cleared (basic_block, gc, basic_block_info, n_basic_blocks);
6de9cd9a 198
f667741c
SB
199 /* To speed up statement iterator walks, we first purge dead labels. */
200 cleanup_dead_labels ();
201
202 /* Group case nodes to reduce the number of edges.
203 We do this after cleaning up dead labels because otherwise we miss
204 a lot of obvious case merging opportunities. */
205 group_case_labels ();
206
6de9cd9a 207 /* Create the edges of the flowgraph. */
6c52e687
CC
208 discriminator_per_locus = htab_create (13, locus_map_hash, locus_map_eq,
209 free);
6de9cd9a 210 make_edges ();
8b11009b 211 cleanup_dead_labels ();
6c52e687 212 htab_delete (discriminator_per_locus);
6de9cd9a
DN
213
214 /* Debugging dumps. */
215
216 /* Write the flowgraph to a VCG file. */
217 {
218 int local_dump_flags;
10d22567
ZD
219 FILE *vcg_file = dump_begin (TDI_vcg, &local_dump_flags);
220 if (vcg_file)
6de9cd9a 221 {
726a989a 222 gimple_cfg2vcg (vcg_file);
10d22567 223 dump_end (TDI_vcg, vcg_file);
6de9cd9a
DN
224 }
225 }
226
81cfbbc2
JH
227#ifdef ENABLE_CHECKING
228 verify_stmts ();
229#endif
6de9cd9a
DN
230}
231
c2924966 232static unsigned int
6de9cd9a
DN
233execute_build_cfg (void)
234{
39ecc018
JH
235 gimple_seq body = gimple_body (current_function_decl);
236
237 build_gimple_cfg (body);
238 gimple_set_body (current_function_decl, NULL);
cff7525f
JH
239 if (dump_file && (dump_flags & TDF_DETAILS))
240 {
241 fprintf (dump_file, "Scope blocks:\n");
242 dump_scope_blocks (dump_file, dump_flags);
243 }
c2924966 244 return 0;
6de9cd9a
DN
245}
246
8ddbbcae 247struct gimple_opt_pass pass_build_cfg =
6de9cd9a 248{
8ddbbcae
JH
249 {
250 GIMPLE_PASS,
6de9cd9a
DN
251 "cfg", /* name */
252 NULL, /* gate */
253 execute_build_cfg, /* execute */
254 NULL, /* sub */
255 NULL, /* next */
256 0, /* static_pass_number */
257 TV_TREE_CFG, /* tv_id */
726a989a 258 PROP_gimple_leh, /* properties_required */
6de9cd9a
DN
259 PROP_cfg, /* properties_provided */
260 0, /* properties_destroyed */
261 0, /* todo_flags_start */
11b08ee9
RG
262 TODO_verify_stmts | TODO_cleanup_cfg
263 | TODO_dump_func /* todo_flags_finish */
8ddbbcae 264 }
6de9cd9a
DN
265};
266
726a989a
RB
267
268/* Return true if T is a computed goto. */
269
270static bool
271computed_goto_p (gimple t)
272{
273 return (gimple_code (t) == GIMPLE_GOTO
274 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
275}
276
277
6531d1be 278/* Search the CFG for any computed gotos. If found, factor them to a
6de9cd9a 279 common computed goto site. Also record the location of that site so
6531d1be 280 that we can un-factor the gotos after we have converted back to
6de9cd9a
DN
281 normal form. */
282
283static void
284factor_computed_gotos (void)
285{
286 basic_block bb;
287 tree factored_label_decl = NULL;
288 tree var = NULL;
726a989a
RB
289 gimple factored_computed_goto_label = NULL;
290 gimple factored_computed_goto = NULL;
6de9cd9a
DN
291
292 /* We know there are one or more computed gotos in this function.
293 Examine the last statement in each basic block to see if the block
294 ends with a computed goto. */
6531d1be 295
6de9cd9a
DN
296 FOR_EACH_BB (bb)
297 {
726a989a
RB
298 gimple_stmt_iterator gsi = gsi_last_bb (bb);
299 gimple last;
6de9cd9a 300
726a989a 301 if (gsi_end_p (gsi))
6de9cd9a 302 continue;
726a989a
RB
303
304 last = gsi_stmt (gsi);
6de9cd9a
DN
305
306 /* Ignore the computed goto we create when we factor the original
307 computed gotos. */
308 if (last == factored_computed_goto)
309 continue;
310
311 /* If the last statement is a computed goto, factor it. */
312 if (computed_goto_p (last))
313 {
726a989a 314 gimple assignment;
6de9cd9a
DN
315
316 /* The first time we find a computed goto we need to create
317 the factored goto block and the variable each original
318 computed goto will use for their goto destination. */
726a989a 319 if (!factored_computed_goto)
6de9cd9a
DN
320 {
321 basic_block new_bb = create_empty_bb (bb);
726a989a 322 gimple_stmt_iterator new_gsi = gsi_start_bb (new_bb);
6de9cd9a
DN
323
324 /* Create the destination of the factored goto. Each original
325 computed goto will put its desired destination into this
326 variable and jump to the label we create immediately
327 below. */
328 var = create_tmp_var (ptr_type_node, "gotovar");
329
330 /* Build a label for the new block which will contain the
331 factored computed goto. */
332 factored_label_decl = create_artificial_label ();
333 factored_computed_goto_label
726a989a
RB
334 = gimple_build_label (factored_label_decl);
335 gsi_insert_after (&new_gsi, factored_computed_goto_label,
336 GSI_NEW_STMT);
6de9cd9a
DN
337
338 /* Build our new computed goto. */
726a989a
RB
339 factored_computed_goto = gimple_build_goto (var);
340 gsi_insert_after (&new_gsi, factored_computed_goto, GSI_NEW_STMT);
6de9cd9a
DN
341 }
342
343 /* Copy the original computed goto's destination into VAR. */
726a989a
RB
344 assignment = gimple_build_assign (var, gimple_goto_dest (last));
345 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
6de9cd9a
DN
346
347 /* And re-vector the computed goto to the new destination. */
726a989a 348 gimple_goto_set_dest (last, factored_label_decl);
6de9cd9a
DN
349 }
350 }
351}
352
353
726a989a 354/* Build a flowgraph for the sequence of stmts SEQ. */
6de9cd9a
DN
355
356static void
726a989a 357make_blocks (gimple_seq seq)
6de9cd9a 358{
726a989a
RB
359 gimple_stmt_iterator i = gsi_start (seq);
360 gimple stmt = NULL;
6de9cd9a 361 bool start_new_block = true;
726a989a 362 bool first_stmt_of_seq = true;
6de9cd9a
DN
363 basic_block bb = ENTRY_BLOCK_PTR;
364
726a989a 365 while (!gsi_end_p (i))
6de9cd9a 366 {
726a989a 367 gimple prev_stmt;
6de9cd9a
DN
368
369 prev_stmt = stmt;
726a989a 370 stmt = gsi_stmt (i);
6de9cd9a
DN
371
372 /* If the statement starts a new basic block or if we have determined
373 in a previous pass that we need to create a new block for STMT, do
374 so now. */
375 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
376 {
726a989a
RB
377 if (!first_stmt_of_seq)
378 seq = gsi_split_seq_before (&i);
379 bb = create_basic_block (seq, NULL, bb);
6de9cd9a
DN
380 start_new_block = false;
381 }
382
383 /* Now add STMT to BB and create the subgraphs for special statement
384 codes. */
726a989a 385 gimple_set_bb (stmt, bb);
6de9cd9a
DN
386
387 if (computed_goto_p (stmt))
388 found_computed_goto = true;
389
390 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
391 next iteration. */
392 if (stmt_ends_bb_p (stmt))
54634841
RG
393 {
394 /* If the stmt can make abnormal goto use a new temporary
395 for the assignment to the LHS. This makes sure the old value
396 of the LHS is available on the abnormal edge. Otherwise
397 we will end up with overlapping life-ranges for abnormal
398 SSA names. */
399 if (gimple_has_lhs (stmt)
400 && stmt_can_make_abnormal_goto (stmt)
401 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
402 {
403 tree lhs = gimple_get_lhs (stmt);
404 tree tmp = create_tmp_var (TREE_TYPE (lhs), NULL);
405 gimple s = gimple_build_assign (lhs, tmp);
406 gimple_set_location (s, gimple_location (stmt));
407 gimple_set_block (s, gimple_block (stmt));
408 gimple_set_lhs (stmt, tmp);
409 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
410 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
411 DECL_GIMPLE_REG_P (tmp) = 1;
412 gsi_insert_after (&i, s, GSI_SAME_STMT);
413 }
414 start_new_block = true;
415 }
6de9cd9a 416
726a989a
RB
417 gsi_next (&i);
418 first_stmt_of_seq = false;
6de9cd9a
DN
419 }
420}
421
422
423/* Create and return a new empty basic block after bb AFTER. */
424
425static basic_block
426create_bb (void *h, void *e, basic_block after)
427{
428 basic_block bb;
429
1e128c5f 430 gcc_assert (!e);
6de9cd9a 431
27fd69fa
KH
432 /* Create and initialize a new basic block. Since alloc_block uses
433 ggc_alloc_cleared to allocate a basic block, we do not have to
434 clear the newly allocated basic block here. */
6de9cd9a 435 bb = alloc_block ();
6de9cd9a
DN
436
437 bb->index = last_basic_block;
438 bb->flags = BB_NEW;
726a989a
RB
439 bb->il.gimple = GGC_CNEW (struct gimple_bb_info);
440 set_bb_seq (bb, h ? (gimple_seq) h : gimple_seq_alloc ());
6de9cd9a
DN
441
442 /* Add the new block to the linked list of blocks. */
443 link_block (bb, after);
444
445 /* Grow the basic block array if needed. */
68f9b844 446 if ((size_t) last_basic_block == VEC_length (basic_block, basic_block_info))
6de9cd9a
DN
447 {
448 size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
a590ac65 449 VEC_safe_grow_cleared (basic_block, gc, basic_block_info, new_size);
6de9cd9a
DN
450 }
451
452 /* Add the newly created block to the array. */
68f9b844 453 SET_BASIC_BLOCK (last_basic_block, bb);
6de9cd9a 454
6de9cd9a
DN
455 n_basic_blocks++;
456 last_basic_block++;
457
6de9cd9a
DN
458 return bb;
459}
460
461
462/*---------------------------------------------------------------------------
463 Edge creation
464---------------------------------------------------------------------------*/
465
fca01525
KH
466/* Fold COND_EXPR_COND of each COND_EXPR. */
467
e21aff8a 468void
fca01525
KH
469fold_cond_expr_cond (void)
470{
471 basic_block bb;
472
473 FOR_EACH_BB (bb)
474 {
726a989a 475 gimple stmt = last_stmt (bb);
fca01525 476
726a989a 477 if (stmt && gimple_code (stmt) == GIMPLE_COND)
fca01525 478 {
6ac01510
ILT
479 tree cond;
480 bool zerop, onep;
481
482 fold_defer_overflow_warnings ();
726a989a
RB
483 cond = fold_binary (gimple_cond_code (stmt), boolean_type_node,
484 gimple_cond_lhs (stmt), gimple_cond_rhs (stmt));
485 if (cond)
486 {
487 zerop = integer_zerop (cond);
488 onep = integer_onep (cond);
489 }
490 else
491 zerop = onep = false;
492
e233ac97 493 fold_undefer_overflow_warnings (zerop || onep,
4df28528 494 stmt,
6ac01510
ILT
495 WARN_STRICT_OVERFLOW_CONDITIONAL);
496 if (zerop)
726a989a 497 gimple_cond_make_false (stmt);
6ac01510 498 else if (onep)
726a989a 499 gimple_cond_make_true (stmt);
fca01525
KH
500 }
501 }
502}
503
6de9cd9a
DN
504/* Join all the blocks in the flowgraph. */
505
506static void
507make_edges (void)
508{
509 basic_block bb;
bed575d5 510 struct omp_region *cur_region = NULL;
6de9cd9a
DN
511
512 /* Create an edge from entry to the first block with executable
513 statements in it. */
24bd1a0b 514 make_edge (ENTRY_BLOCK_PTR, BASIC_BLOCK (NUM_FIXED_BLOCKS), EDGE_FALLTHRU);
6de9cd9a 515
adb35797 516 /* Traverse the basic block array placing edges. */
6de9cd9a
DN
517 FOR_EACH_BB (bb)
518 {
726a989a 519 gimple last = last_stmt (bb);
56e84019 520 bool fallthru;
6de9cd9a 521
56e84019 522 if (last)
6de9cd9a 523 {
726a989a 524 enum gimple_code code = gimple_code (last);
bed575d5 525 switch (code)
56e84019 526 {
726a989a 527 case GIMPLE_GOTO:
56e84019
RH
528 make_goto_expr_edges (bb);
529 fallthru = false;
530 break;
726a989a 531 case GIMPLE_RETURN:
56e84019
RH
532 make_edge (bb, EXIT_BLOCK_PTR, 0);
533 fallthru = false;
534 break;
726a989a 535 case GIMPLE_COND:
56e84019
RH
536 make_cond_expr_edges (bb);
537 fallthru = false;
538 break;
726a989a
RB
539 case GIMPLE_SWITCH:
540 make_gimple_switch_edges (bb);
56e84019
RH
541 fallthru = false;
542 break;
726a989a 543 case GIMPLE_RESX:
56e84019
RH
544 make_eh_edges (last);
545 fallthru = false;
546 break;
547
726a989a 548 case GIMPLE_CALL:
56e84019
RH
549 /* If this function receives a nonlocal goto, then we need to
550 make edges from this call site to all the nonlocal goto
551 handlers. */
726a989a 552 if (stmt_can_make_abnormal_goto (last))
4f6c2131 553 make_abnormal_goto_edges (bb, true);
6de9cd9a 554
56e84019
RH
555 /* If this statement has reachable exception handlers, then
556 create abnormal edges to them. */
557 make_eh_edges (last);
558
559 /* Some calls are known not to return. */
726a989a 560 fallthru = !(gimple_call_flags (last) & ECF_NORETURN);
56e84019
RH
561 break;
562
726a989a
RB
563 case GIMPLE_ASSIGN:
564 /* A GIMPLE_ASSIGN may throw internally and thus be considered
565 control-altering. */
56e84019
RH
566 if (is_ctrl_altering_stmt (last))
567 {
56e84019
RH
568 make_eh_edges (last);
569 }
570 fallthru = true;
571 break;
572
726a989a
RB
573 case GIMPLE_OMP_PARALLEL:
574 case GIMPLE_OMP_TASK:
575 case GIMPLE_OMP_FOR:
576 case GIMPLE_OMP_SINGLE:
577 case GIMPLE_OMP_MASTER:
578 case GIMPLE_OMP_ORDERED:
579 case GIMPLE_OMP_CRITICAL:
580 case GIMPLE_OMP_SECTION:
bed575d5 581 cur_region = new_omp_region (bb, code, cur_region);
56e84019
RH
582 fallthru = true;
583 break;
584
726a989a 585 case GIMPLE_OMP_SECTIONS:
bed575d5 586 cur_region = new_omp_region (bb, code, cur_region);
e5c95afe
ZD
587 fallthru = true;
588 break;
589
726a989a 590 case GIMPLE_OMP_SECTIONS_SWITCH:
7e2df4a1 591 fallthru = false;
777f7f9a
RH
592 break;
593
a509ebb5 594
726a989a
RB
595 case GIMPLE_OMP_ATOMIC_LOAD:
596 case GIMPLE_OMP_ATOMIC_STORE:
a509ebb5
RL
597 fallthru = true;
598 break;
599
600
726a989a
RB
601 case GIMPLE_OMP_RETURN:
602 /* In the case of a GIMPLE_OMP_SECTION, the edge will go
603 somewhere other than the next block. This will be
604 created later. */
bed575d5 605 cur_region->exit = bb;
726a989a 606 fallthru = cur_region->type != GIMPLE_OMP_SECTION;
bed575d5
RS
607 cur_region = cur_region->outer;
608 break;
609
726a989a 610 case GIMPLE_OMP_CONTINUE:
bed575d5
RS
611 cur_region->cont = bb;
612 switch (cur_region->type)
613 {
726a989a
RB
614 case GIMPLE_OMP_FOR:
615 /* Mark all GIMPLE_OMP_FOR and GIMPLE_OMP_CONTINUE
616 succs edges as abnormal to prevent splitting
617 them. */
135a171d 618 single_succ_edge (cur_region->entry)->flags |= EDGE_ABNORMAL;
e5c95afe 619 /* Make the loopback edge. */
135a171d
JJ
620 make_edge (bb, single_succ (cur_region->entry),
621 EDGE_ABNORMAL);
622
726a989a
RB
623 /* Create an edge from GIMPLE_OMP_FOR to exit, which
624 corresponds to the case that the body of the loop
625 is not executed at all. */
135a171d
JJ
626 make_edge (cur_region->entry, bb->next_bb, EDGE_ABNORMAL);
627 make_edge (bb, bb->next_bb, EDGE_FALLTHRU | EDGE_ABNORMAL);
628 fallthru = false;
bed575d5
RS
629 break;
630
726a989a 631 case GIMPLE_OMP_SECTIONS:
bed575d5 632 /* Wire up the edges into and out of the nested sections. */
bed575d5 633 {
e5c95afe
ZD
634 basic_block switch_bb = single_succ (cur_region->entry);
635
bed575d5
RS
636 struct omp_region *i;
637 for (i = cur_region->inner; i ; i = i->next)
638 {
726a989a 639 gcc_assert (i->type == GIMPLE_OMP_SECTION);
e5c95afe 640 make_edge (switch_bb, i->entry, 0);
bed575d5
RS
641 make_edge (i->exit, bb, EDGE_FALLTHRU);
642 }
e5c95afe
ZD
643
644 /* Make the loopback edge to the block with
726a989a 645 GIMPLE_OMP_SECTIONS_SWITCH. */
e5c95afe
ZD
646 make_edge (bb, switch_bb, 0);
647
648 /* Make the edge from the switch to exit. */
649 make_edge (switch_bb, bb->next_bb, 0);
650 fallthru = false;
bed575d5
RS
651 }
652 break;
6531d1be 653
bed575d5
RS
654 default:
655 gcc_unreachable ();
656 }
bed575d5
RS
657 break;
658
56e84019
RH
659 default:
660 gcc_assert (!stmt_ends_bb_p (last));
661 fallthru = true;
662 }
6de9cd9a 663 }
56e84019
RH
664 else
665 fallthru = true;
6de9cd9a 666
56e84019 667 if (fallthru)
6c52e687
CC
668 {
669 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
670 if (last)
671 assign_discriminator (gimple_location (last), bb->next_bb);
672 }
6de9cd9a
DN
673 }
674
bed575d5
RS
675 if (root_omp_region)
676 free_omp_regions ();
677
fca01525
KH
678 /* Fold COND_EXPR_COND of each COND_EXPR. */
679 fold_cond_expr_cond ();
6de9cd9a
DN
680}
681
6c52e687
CC
682/* Trivial hash function for a location_t. ITEM is a pointer to
683 a hash table entry that maps a location_t to a discriminator. */
684
685static unsigned int
686locus_map_hash (const void *item)
687{
688 return ((const struct locus_discrim_map *) item)->locus;
689}
690
691/* Equality function for the locus-to-discriminator map. VA and VB
692 point to the two hash table entries to compare. */
693
694static int
695locus_map_eq (const void *va, const void *vb)
696{
697 const struct locus_discrim_map *a = (const struct locus_discrim_map *) va;
698 const struct locus_discrim_map *b = (const struct locus_discrim_map *) vb;
699 return a->locus == b->locus;
700}
701
702/* Find the next available discriminator value for LOCUS. The
703 discriminator distinguishes among several basic blocks that
704 share a common locus, allowing for more accurate sample-based
705 profiling. */
706
707static int
708next_discriminator_for_locus (location_t locus)
709{
710 struct locus_discrim_map item;
711 struct locus_discrim_map **slot;
712
713 item.locus = locus;
714 item.discriminator = 0;
715 slot = (struct locus_discrim_map **)
716 htab_find_slot_with_hash (discriminator_per_locus, (void *) &item,
717 (hashval_t) locus, INSERT);
718 gcc_assert (slot);
719 if (*slot == HTAB_EMPTY_ENTRY)
720 {
721 *slot = XNEW (struct locus_discrim_map);
722 gcc_assert (*slot);
723 (*slot)->locus = locus;
724 (*slot)->discriminator = 0;
725 }
726 (*slot)->discriminator++;
727 return (*slot)->discriminator;
728}
729
730/* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
731
732static bool
733same_line_p (location_t locus1, location_t locus2)
734{
735 expanded_location from, to;
736
737 if (locus1 == locus2)
738 return true;
739
740 from = expand_location (locus1);
741 to = expand_location (locus2);
742
743 if (from.line != to.line)
744 return false;
745 if (from.file == to.file)
746 return true;
747 return (from.file != NULL
748 && to.file != NULL
749 && strcmp (from.file, to.file) == 0);
750}
751
752/* Assign a unique discriminator value to block BB if it begins at the same
753 LOCUS as its predecessor block. */
754
755static void
756assign_discriminator (location_t locus, basic_block bb)
757{
758 gimple to_stmt;
759
760 if (locus == 0 || bb->discriminator != 0)
761 return;
762
763 to_stmt = first_non_label_stmt (bb);
764 if (to_stmt && same_line_p (locus, gimple_location (to_stmt)))
765 bb->discriminator = next_discriminator_for_locus (locus);
766}
6de9cd9a 767
726a989a 768/* Create the edges for a GIMPLE_COND starting at block BB. */
6de9cd9a
DN
769
770static void
771make_cond_expr_edges (basic_block bb)
772{
726a989a
RB
773 gimple entry = last_stmt (bb);
774 gimple then_stmt, else_stmt;
6de9cd9a
DN
775 basic_block then_bb, else_bb;
776 tree then_label, else_label;
d783b2a2 777 edge e;
6c52e687 778 location_t entry_locus;
6de9cd9a 779
1e128c5f 780 gcc_assert (entry);
726a989a 781 gcc_assert (gimple_code (entry) == GIMPLE_COND);
6de9cd9a 782
6c52e687
CC
783 entry_locus = gimple_location (entry);
784
6de9cd9a 785 /* Entry basic blocks for each component. */
726a989a
RB
786 then_label = gimple_cond_true_label (entry);
787 else_label = gimple_cond_false_label (entry);
6de9cd9a
DN
788 then_bb = label_to_block (then_label);
789 else_bb = label_to_block (else_label);
726a989a
RB
790 then_stmt = first_stmt (then_bb);
791 else_stmt = first_stmt (else_bb);
6de9cd9a 792
d783b2a2 793 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
6c52e687 794 assign_discriminator (entry_locus, then_bb);
726a989a 795 e->goto_locus = gimple_location (then_stmt);
cc2a64dd
JJ
796 if (e->goto_locus)
797 e->goto_block = gimple_block (then_stmt);
d783b2a2
JH
798 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
799 if (e)
7241571e 800 {
6c52e687 801 assign_discriminator (entry_locus, else_bb);
7241571e 802 e->goto_locus = gimple_location (else_stmt);
cc2a64dd
JJ
803 if (e->goto_locus)
804 e->goto_block = gimple_block (else_stmt);
7241571e 805 }
a9b77cd1 806
726a989a
RB
807 /* We do not need the labels anymore. */
808 gimple_cond_set_true_label (entry, NULL_TREE);
809 gimple_cond_set_false_label (entry, NULL_TREE);
6de9cd9a
DN
810}
811
92b6dff3 812
d6be0d7f
JL
813/* Called for each element in the hash table (P) as we delete the
814 edge to cases hash table.
815
6531d1be 816 Clear all the TREE_CHAINs to prevent problems with copying of
d6be0d7f
JL
817 SWITCH_EXPRs and structure sharing rules, then free the hash table
818 element. */
819
15814ba0 820static bool
ac7d7749 821edge_to_cases_cleanup (const void *key ATTRIBUTE_UNUSED, void **value,
15814ba0 822 void *data ATTRIBUTE_UNUSED)
d6be0d7f 823{
d6be0d7f
JL
824 tree t, next;
825
15814ba0 826 for (t = (tree) *value; t; t = next)
d6be0d7f
JL
827 {
828 next = TREE_CHAIN (t);
829 TREE_CHAIN (t) = NULL;
830 }
15814ba0
PB
831
832 *value = NULL;
833 return false;
d6be0d7f
JL
834}
835
836/* Start recording information mapping edges to case labels. */
837
c9784e6d 838void
d6be0d7f
JL
839start_recording_case_labels (void)
840{
841 gcc_assert (edge_to_cases == NULL);
15814ba0 842 edge_to_cases = pointer_map_create ();
d6be0d7f
JL
843}
844
845/* Return nonzero if we are recording information for case labels. */
846
847static bool
848recording_case_labels_p (void)
849{
850 return (edge_to_cases != NULL);
851}
852
853/* Stop recording information mapping edges to case labels and
854 remove any information we have recorded. */
c9784e6d 855void
d6be0d7f
JL
856end_recording_case_labels (void)
857{
15814ba0
PB
858 pointer_map_traverse (edge_to_cases, edge_to_cases_cleanup, NULL);
859 pointer_map_destroy (edge_to_cases);
d6be0d7f
JL
860 edge_to_cases = NULL;
861}
862
d6be0d7f
JL
863/* If we are inside a {start,end}_recording_cases block, then return
864 a chain of CASE_LABEL_EXPRs from T which reference E.
865
866 Otherwise return NULL. */
92b6dff3
JL
867
868static tree
726a989a 869get_cases_for_edge (edge e, gimple t)
92b6dff3 870{
92b6dff3 871 void **slot;
d6be0d7f 872 size_t i, n;
92b6dff3 873
d6be0d7f
JL
874 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
875 chains available. Return NULL so the caller can detect this case. */
876 if (!recording_case_labels_p ())
877 return NULL;
6531d1be 878
15814ba0 879 slot = pointer_map_contains (edge_to_cases, e);
92b6dff3 880 if (slot)
15814ba0 881 return (tree) *slot;
92b6dff3 882
d6be0d7f
JL
883 /* If we did not find E in the hash table, then this must be the first
884 time we have been queried for information about E & T. Add all the
885 elements from T to the hash table then perform the query again. */
92b6dff3 886
726a989a 887 n = gimple_switch_num_labels (t);
92b6dff3
JL
888 for (i = 0; i < n; i++)
889 {
726a989a 890 tree elt = gimple_switch_label (t, i);
15814ba0 891 tree lab = CASE_LABEL (elt);
d6be0d7f 892 basic_block label_bb = label_to_block (lab);
15814ba0
PB
893 edge this_edge = find_edge (e->src, label_bb);
894
895 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
896 a new chain. */
897 slot = pointer_map_insert (edge_to_cases, this_edge);
898 TREE_CHAIN (elt) = (tree) *slot;
899 *slot = elt;
92b6dff3 900 }
15814ba0
PB
901
902 return (tree) *pointer_map_contains (edge_to_cases, e);
92b6dff3 903}
6de9cd9a 904
726a989a 905/* Create the edges for a GIMPLE_SWITCH starting at block BB. */
6de9cd9a
DN
906
907static void
726a989a 908make_gimple_switch_edges (basic_block bb)
6de9cd9a 909{
726a989a 910 gimple entry = last_stmt (bb);
6c52e687 911 location_t entry_locus;
6de9cd9a 912 size_t i, n;
6de9cd9a 913
6c52e687
CC
914 entry_locus = gimple_location (entry);
915
726a989a 916 n = gimple_switch_num_labels (entry);
6de9cd9a
DN
917
918 for (i = 0; i < n; ++i)
919 {
726a989a 920 tree lab = CASE_LABEL (gimple_switch_label (entry, i));
6de9cd9a 921 basic_block label_bb = label_to_block (lab);
d6be0d7f 922 make_edge (bb, label_bb, 0);
6c52e687 923 assign_discriminator (entry_locus, label_bb);
6de9cd9a
DN
924 }
925}
926
927
928/* Return the basic block holding label DEST. */
929
930basic_block
997de8ed 931label_to_block_fn (struct function *ifun, tree dest)
6de9cd9a 932{
242229bb
JH
933 int uid = LABEL_DECL_UID (dest);
934
f0b698c1
KH
935 /* We would die hard when faced by an undefined label. Emit a label to
936 the very first basic block. This will hopefully make even the dataflow
242229bb
JH
937 and undefined variable warnings quite right. */
938 if ((errorcount || sorrycount) && uid < 0)
939 {
726a989a
RB
940 gimple_stmt_iterator gsi = gsi_start_bb (BASIC_BLOCK (NUM_FIXED_BLOCKS));
941 gimple stmt;
242229bb 942
726a989a
RB
943 stmt = gimple_build_label (dest);
944 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
242229bb
JH
945 uid = LABEL_DECL_UID (dest);
946 }
e597f337
KH
947 if (VEC_length (basic_block, ifun->cfg->x_label_to_block_map)
948 <= (unsigned int) uid)
98f464e0 949 return NULL;
e597f337 950 return VEC_index (basic_block, ifun->cfg->x_label_to_block_map, uid);
6de9cd9a
DN
951}
952
4f6c2131
EB
953/* Create edges for an abnormal goto statement at block BB. If FOR_CALL
954 is true, the source statement is a CALL_EXPR instead of a GOTO_EXPR. */
955
956void
957make_abnormal_goto_edges (basic_block bb, bool for_call)
958{
959 basic_block target_bb;
726a989a 960 gimple_stmt_iterator gsi;
4f6c2131
EB
961
962 FOR_EACH_BB (target_bb)
726a989a 963 for (gsi = gsi_start_bb (target_bb); !gsi_end_p (gsi); gsi_next (&gsi))
4f6c2131 964 {
726a989a
RB
965 gimple label_stmt = gsi_stmt (gsi);
966 tree target;
4f6c2131 967
726a989a 968 if (gimple_code (label_stmt) != GIMPLE_LABEL)
4f6c2131
EB
969 break;
970
726a989a 971 target = gimple_label_label (label_stmt);
4f6c2131
EB
972
973 /* Make an edge to every label block that has been marked as a
974 potential target for a computed goto or a non-local goto. */
975 if ((FORCED_LABEL (target) && !for_call)
976 || (DECL_NONLOCAL (target) && for_call))
977 {
978 make_edge (bb, target_bb, EDGE_ABNORMAL);
979 break;
980 }
981 }
982}
983
6de9cd9a
DN
984/* Create edges for a goto statement at block BB. */
985
986static void
987make_goto_expr_edges (basic_block bb)
988{
726a989a
RB
989 gimple_stmt_iterator last = gsi_last_bb (bb);
990 gimple goto_t = gsi_stmt (last);
6de9cd9a 991
4f6c2131
EB
992 /* A simple GOTO creates normal edges. */
993 if (simple_goto_p (goto_t))
6de9cd9a 994 {
726a989a 995 tree dest = gimple_goto_dest (goto_t);
6c52e687
CC
996 basic_block label_bb = label_to_block (dest);
997 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
726a989a 998 e->goto_locus = gimple_location (goto_t);
6c52e687 999 assign_discriminator (e->goto_locus, label_bb);
cc2a64dd
JJ
1000 if (e->goto_locus)
1001 e->goto_block = gimple_block (goto_t);
726a989a 1002 gsi_remove (&last, true);
4f6c2131 1003 return;
6de9cd9a
DN
1004 }
1005
4f6c2131
EB
1006 /* A computed GOTO creates abnormal edges. */
1007 make_abnormal_goto_edges (bb, false);
6de9cd9a
DN
1008}
1009
1010
1011/*---------------------------------------------------------------------------
1012 Flowgraph analysis
1013---------------------------------------------------------------------------*/
1014
f698d217
SB
1015/* Cleanup useless labels in basic blocks. This is something we wish
1016 to do early because it allows us to group case labels before creating
1017 the edges for the CFG, and it speeds up block statement iterators in
1018 all passes later on.
8b11009b
ZD
1019 We rerun this pass after CFG is created, to get rid of the labels that
1020 are no longer referenced. After then we do not run it any more, since
1021 (almost) no new labels should be created. */
f698d217
SB
1022
1023/* A map from basic block index to the leading label of that block. */
8b11009b
ZD
1024static struct label_record
1025{
1026 /* The label. */
1027 tree label;
1028
1029 /* True if the label is referenced from somewhere. */
1030 bool used;
1031} *label_for_bb;
f698d217
SB
1032
1033/* Callback for for_each_eh_region. Helper for cleanup_dead_labels. */
1034static void
7e5487a2 1035update_eh_label (struct eh_region_d *region)
f698d217
SB
1036{
1037 tree old_label = get_eh_region_tree_label (region);
1038 if (old_label)
1039 {
165b54c3
SB
1040 tree new_label;
1041 basic_block bb = label_to_block (old_label);
1042
1043 /* ??? After optimizing, there may be EH regions with labels
1044 that have already been removed from the function body, so
1045 there is no basic block for them. */
1046 if (! bb)
1047 return;
1048
8b11009b
ZD
1049 new_label = label_for_bb[bb->index].label;
1050 label_for_bb[bb->index].used = true;
f698d217
SB
1051 set_eh_region_tree_label (region, new_label);
1052 }
1053}
1054
726a989a 1055
242229bb 1056/* Given LABEL return the first label in the same basic block. */
726a989a 1057
242229bb
JH
1058static tree
1059main_block_label (tree label)
1060{
1061 basic_block bb = label_to_block (label);
8b11009b 1062 tree main_label = label_for_bb[bb->index].label;
242229bb
JH
1063
1064 /* label_to_block possibly inserted undefined label into the chain. */
8b11009b
ZD
1065 if (!main_label)
1066 {
1067 label_for_bb[bb->index].label = label;
1068 main_label = label;
1069 }
1070
1071 label_for_bb[bb->index].used = true;
1072 return main_label;
242229bb
JH
1073}
1074
b986ebf3 1075/* Cleanup redundant labels. This is a three-step process:
f698d217
SB
1076 1) Find the leading label for each block.
1077 2) Redirect all references to labels to the leading labels.
1078 3) Cleanup all useless labels. */
6de9cd9a 1079
165b54c3 1080void
6de9cd9a
DN
1081cleanup_dead_labels (void)
1082{
1083 basic_block bb;
8b11009b 1084 label_for_bb = XCNEWVEC (struct label_record, last_basic_block);
6de9cd9a
DN
1085
1086 /* Find a suitable label for each block. We use the first user-defined
f0b698c1 1087 label if there is one, or otherwise just the first label we see. */
6de9cd9a
DN
1088 FOR_EACH_BB (bb)
1089 {
726a989a 1090 gimple_stmt_iterator i;
6de9cd9a 1091
726a989a 1092 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
6de9cd9a 1093 {
726a989a
RB
1094 tree label;
1095 gimple stmt = gsi_stmt (i);
6de9cd9a 1096
726a989a 1097 if (gimple_code (stmt) != GIMPLE_LABEL)
6de9cd9a
DN
1098 break;
1099
726a989a 1100 label = gimple_label_label (stmt);
6de9cd9a
DN
1101
1102 /* If we have not yet seen a label for the current block,
1103 remember this one and see if there are more labels. */
8b11009b 1104 if (!label_for_bb[bb->index].label)
6de9cd9a 1105 {
8b11009b 1106 label_for_bb[bb->index].label = label;
6de9cd9a
DN
1107 continue;
1108 }
1109
1110 /* If we did see a label for the current block already, but it
1111 is an artificially created label, replace it if the current
1112 label is a user defined label. */
8b11009b
ZD
1113 if (!DECL_ARTIFICIAL (label)
1114 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
6de9cd9a 1115 {
8b11009b 1116 label_for_bb[bb->index].label = label;
6de9cd9a
DN
1117 break;
1118 }
1119 }
1120 }
1121
f698d217
SB
1122 /* Now redirect all jumps/branches to the selected label.
1123 First do so for each block ending in a control statement. */
6de9cd9a
DN
1124 FOR_EACH_BB (bb)
1125 {
726a989a 1126 gimple stmt = last_stmt (bb);
6de9cd9a
DN
1127 if (!stmt)
1128 continue;
1129
726a989a 1130 switch (gimple_code (stmt))
6de9cd9a 1131 {
726a989a 1132 case GIMPLE_COND:
6de9cd9a 1133 {
726a989a
RB
1134 tree true_label = gimple_cond_true_label (stmt);
1135 tree false_label = gimple_cond_false_label (stmt);
6de9cd9a 1136
726a989a
RB
1137 if (true_label)
1138 gimple_cond_set_true_label (stmt, main_block_label (true_label));
1139 if (false_label)
1140 gimple_cond_set_false_label (stmt, main_block_label (false_label));
6de9cd9a
DN
1141 break;
1142 }
6531d1be 1143
726a989a 1144 case GIMPLE_SWITCH:
6de9cd9a 1145 {
726a989a 1146 size_t i, n = gimple_switch_num_labels (stmt);
6531d1be 1147
6de9cd9a
DN
1148 /* Replace all destination labels. */
1149 for (i = 0; i < n; ++i)
92b6dff3 1150 {
726a989a
RB
1151 tree case_label = gimple_switch_label (stmt, i);
1152 tree label = main_block_label (CASE_LABEL (case_label));
1153 CASE_LABEL (case_label) = label;
92b6dff3 1154 }
6de9cd9a
DN
1155 break;
1156 }
1157
726a989a 1158 /* We have to handle gotos until they're removed, and we don't
f667741c 1159 remove them until after we've created the CFG edges. */
726a989a
RB
1160 case GIMPLE_GOTO:
1161 if (!computed_goto_p (stmt))
242229bb 1162 {
726a989a
RB
1163 tree new_dest = main_block_label (gimple_goto_dest (stmt));
1164 gimple_goto_set_dest (stmt, new_dest);
242229bb
JH
1165 break;
1166 }
f667741c 1167
6de9cd9a
DN
1168 default:
1169 break;
1170 }
1171 }
1172
f698d217
SB
1173 for_each_eh_region (update_eh_label);
1174
6de9cd9a 1175 /* Finally, purge dead labels. All user-defined labels and labels that
cea0f4f1
AP
1176 can be the target of non-local gotos and labels which have their
1177 address taken are preserved. */
6de9cd9a
DN
1178 FOR_EACH_BB (bb)
1179 {
726a989a 1180 gimple_stmt_iterator i;
8b11009b 1181 tree label_for_this_bb = label_for_bb[bb->index].label;
6de9cd9a 1182
8b11009b 1183 if (!label_for_this_bb)
6de9cd9a
DN
1184 continue;
1185
8b11009b
ZD
1186 /* If the main label of the block is unused, we may still remove it. */
1187 if (!label_for_bb[bb->index].used)
1188 label_for_this_bb = NULL;
1189
726a989a 1190 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
6de9cd9a 1191 {
726a989a
RB
1192 tree label;
1193 gimple stmt = gsi_stmt (i);
6de9cd9a 1194
726a989a 1195 if (gimple_code (stmt) != GIMPLE_LABEL)
6de9cd9a
DN
1196 break;
1197
726a989a 1198 label = gimple_label_label (stmt);
6de9cd9a
DN
1199
1200 if (label == label_for_this_bb
726a989a 1201 || !DECL_ARTIFICIAL (label)
cea0f4f1
AP
1202 || DECL_NONLOCAL (label)
1203 || FORCED_LABEL (label))
726a989a 1204 gsi_next (&i);
6de9cd9a 1205 else
726a989a 1206 gsi_remove (&i, true);
6de9cd9a
DN
1207 }
1208 }
1209
1210 free (label_for_bb);
1211}
1212
f667741c
SB
1213/* Look for blocks ending in a multiway branch (a SWITCH_EXPR in GIMPLE),
1214 and scan the sorted vector of cases. Combine the ones jumping to the
1215 same label.
1216 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1217
165b54c3 1218void
f667741c
SB
1219group_case_labels (void)
1220{
1221 basic_block bb;
1222
1223 FOR_EACH_BB (bb)
1224 {
726a989a
RB
1225 gimple stmt = last_stmt (bb);
1226 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
f667741c 1227 {
726a989a 1228 int old_size = gimple_switch_num_labels (stmt);
f667741c 1229 int i, j, new_size = old_size;
b7814a18
RG
1230 tree default_case = NULL_TREE;
1231 tree default_label = NULL_TREE;
726a989a 1232 bool has_default;
29c4d22b 1233
726a989a 1234 /* The default label is always the first case in a switch
b7814a18 1235 statement after gimplification if it was not optimized
726a989a
RB
1236 away */
1237 if (!CASE_LOW (gimple_switch_default_label (stmt))
1238 && !CASE_HIGH (gimple_switch_default_label (stmt)))
b7814a18 1239 {
726a989a 1240 default_case = gimple_switch_default_label (stmt);
b7814a18 1241 default_label = CASE_LABEL (default_case);
726a989a 1242 has_default = true;
b7814a18 1243 }
726a989a
RB
1244 else
1245 has_default = false;
f667741c 1246
b7814a18 1247 /* Look for possible opportunities to merge cases. */
726a989a
RB
1248 if (has_default)
1249 i = 1;
1250 else
1251 i = 0;
b7814a18 1252 while (i < old_size)
f667741c 1253 {
ed9cef22 1254 tree base_case, base_label, base_high;
726a989a 1255 base_case = gimple_switch_label (stmt, i);
f667741c 1256
1e128c5f 1257 gcc_assert (base_case);
f667741c 1258 base_label = CASE_LABEL (base_case);
31e9eea2
SB
1259
1260 /* Discard cases that have the same destination as the
1261 default case. */
1262 if (base_label == default_label)
1263 {
726a989a 1264 gimple_switch_set_label (stmt, i, NULL_TREE);
31e9eea2 1265 i++;
29c4d22b 1266 new_size--;
31e9eea2
SB
1267 continue;
1268 }
1269
726a989a
RB
1270 base_high = CASE_HIGH (base_case)
1271 ? CASE_HIGH (base_case)
1272 : CASE_LOW (base_case);
d717e500 1273 i++;
726a989a 1274
f667741c
SB
1275 /* Try to merge case labels. Break out when we reach the end
1276 of the label vector or when we cannot merge the next case
1277 label with the current one. */
b7814a18 1278 while (i < old_size)
f667741c 1279 {
726a989a 1280 tree merge_case = gimple_switch_label (stmt, i);
f667741c
SB
1281 tree merge_label = CASE_LABEL (merge_case);
1282 tree t = int_const_binop (PLUS_EXPR, base_high,
1283 integer_one_node, 1);
1284
1285 /* Merge the cases if they jump to the same place,
1286 and their ranges are consecutive. */
1287 if (merge_label == base_label
1288 && tree_int_cst_equal (CASE_LOW (merge_case), t))
1289 {
1290 base_high = CASE_HIGH (merge_case) ?
1291 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1292 CASE_HIGH (base_case) = base_high;
726a989a 1293 gimple_switch_set_label (stmt, i, NULL_TREE);
f667741c 1294 new_size--;
d717e500 1295 i++;
f667741c
SB
1296 }
1297 else
1298 break;
1299 }
1300 }
1301
1302 /* Compress the case labels in the label vector, and adjust the
1303 length of the vector. */
1304 for (i = 0, j = 0; i < new_size; i++)
1305 {
726a989a 1306 while (! gimple_switch_label (stmt, j))
f667741c 1307 j++;
726a989a
RB
1308 gimple_switch_set_label (stmt, i,
1309 gimple_switch_label (stmt, j++));
f667741c 1310 }
726a989a
RB
1311
1312 gcc_assert (new_size <= old_size);
1313 gimple_switch_set_num_labels (stmt, new_size);
f667741c
SB
1314 }
1315 }
1316}
6de9cd9a
DN
1317
1318/* Checks whether we can merge block B into block A. */
1319
1320static bool
726a989a 1321gimple_can_merge_blocks_p (basic_block a, basic_block b)
6de9cd9a 1322{
726a989a
RB
1323 gimple stmt;
1324 gimple_stmt_iterator gsi;
1325 gimple_seq phis;
6de9cd9a 1326
c5cbcccf 1327 if (!single_succ_p (a))
6de9cd9a
DN
1328 return false;
1329
496a4ef5 1330 if (single_succ_edge (a)->flags & (EDGE_ABNORMAL | EDGE_EH))
6de9cd9a
DN
1331 return false;
1332
c5cbcccf 1333 if (single_succ (a) != b)
6de9cd9a
DN
1334 return false;
1335
c5cbcccf 1336 if (!single_pred_p (b))
6de9cd9a
DN
1337 return false;
1338
26e75214
KH
1339 if (b == EXIT_BLOCK_PTR)
1340 return false;
6531d1be 1341
6de9cd9a
DN
1342 /* If A ends by a statement causing exceptions or something similar, we
1343 cannot merge the blocks. */
726a989a 1344 stmt = last_stmt (a);
6de9cd9a
DN
1345 if (stmt && stmt_ends_bb_p (stmt))
1346 return false;
1347
1348 /* Do not allow a block with only a non-local label to be merged. */
726a989a
RB
1349 if (stmt
1350 && gimple_code (stmt) == GIMPLE_LABEL
1351 && DECL_NONLOCAL (gimple_label_label (stmt)))
6de9cd9a
DN
1352 return false;
1353
38965eb2 1354 /* It must be possible to eliminate all phi nodes in B. If ssa form
8f8bb1d2
ZD
1355 is not up-to-date, we cannot eliminate any phis; however, if only
1356 some symbols as whole are marked for renaming, this is not a problem,
1357 as phi nodes for those symbols are irrelevant in updating anyway. */
726a989a
RB
1358 phis = phi_nodes (b);
1359 if (!gimple_seq_empty_p (phis))
38965eb2 1360 {
726a989a
RB
1361 gimple_stmt_iterator i;
1362
8f8bb1d2 1363 if (name_mappings_registered_p ())
38965eb2
ZD
1364 return false;
1365
726a989a
RB
1366 for (i = gsi_start (phis); !gsi_end_p (i); gsi_next (&i))
1367 {
1368 gimple phi = gsi_stmt (i);
1369
1370 if (!is_gimple_reg (gimple_phi_result (phi))
1371 && !may_propagate_copy (gimple_phi_result (phi),
1372 gimple_phi_arg_def (phi, 0)))
1373 return false;
1374 }
38965eb2 1375 }
6de9cd9a
DN
1376
1377 /* Do not remove user labels. */
726a989a 1378 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi); gsi_next (&gsi))
6de9cd9a 1379 {
726a989a
RB
1380 stmt = gsi_stmt (gsi);
1381 if (gimple_code (stmt) != GIMPLE_LABEL)
6de9cd9a 1382 break;
726a989a 1383 if (!DECL_ARTIFICIAL (gimple_label_label (stmt)))
6de9cd9a
DN
1384 return false;
1385 }
1386
2b271002
ZD
1387 /* Protect the loop latches. */
1388 if (current_loops
1389 && b->loop_father->latch == b)
1390 return false;
1391
6de9cd9a
DN
1392 return true;
1393}
1394
38965eb2
ZD
1395/* Replaces all uses of NAME by VAL. */
1396
684aaf29 1397void
38965eb2
ZD
1398replace_uses_by (tree name, tree val)
1399{
1400 imm_use_iterator imm_iter;
1401 use_operand_p use;
726a989a 1402 gimple stmt;
38965eb2 1403 edge e;
38965eb2 1404
6c00f606 1405 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
38965eb2 1406 {
6c00f606
AM
1407 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1408 {
1409 replace_exp (use, val);
38965eb2 1410
726a989a 1411 if (gimple_code (stmt) == GIMPLE_PHI)
38965eb2 1412 {
726a989a 1413 e = gimple_phi_arg_edge (stmt, PHI_ARG_INDEX_FROM_USE (use));
6c00f606
AM
1414 if (e->flags & EDGE_ABNORMAL)
1415 {
1416 /* This can only occur for virtual operands, since
1417 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1418 would prevent replacement. */
1419 gcc_assert (!is_gimple_reg (name));
1420 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1421 }
38965eb2
ZD
1422 }
1423 }
cfaab3a9 1424
726a989a 1425 if (gimple_code (stmt) != GIMPLE_PHI)
6c00f606 1426 {
726a989a 1427 size_t i;
9af0df6b 1428
6c00f606 1429 fold_stmt_inplace (stmt);
672987e8 1430 if (cfgcleanup_altered_bbs)
726a989a 1431 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
cfaab3a9 1432
cff4e50d 1433 /* FIXME. This should go in update_stmt. */
726a989a
RB
1434 for (i = 0; i < gimple_num_ops (stmt); i++)
1435 {
1436 tree op = gimple_op (stmt, i);
1437 /* Operands may be empty here. For example, the labels
1438 of a GIMPLE_COND are nulled out following the creation
1439 of the corresponding CFG edges. */
1440 if (op && TREE_CODE (op) == ADDR_EXPR)
1441 recompute_tree_invariant_for_addr_expr (op);
1442 }
9af0df6b 1443
6c00f606 1444 maybe_clean_or_replace_eh_stmt (stmt, stmt);
cff4e50d 1445 update_stmt (stmt);
6c00f606 1446 }
38965eb2 1447 }
6531d1be 1448
40b448ef 1449 gcc_assert (has_zero_uses (name));
d5ab5675
ZD
1450
1451 /* Also update the trees stored in loop structures. */
1452 if (current_loops)
1453 {
1454 struct loop *loop;
42fd6772 1455 loop_iterator li;
d5ab5675 1456
42fd6772 1457 FOR_EACH_LOOP (li, loop, 0)
d5ab5675 1458 {
42fd6772 1459 substitute_in_loop_info (loop, name, val);
d5ab5675
ZD
1460 }
1461 }
38965eb2 1462}
6de9cd9a
DN
1463
1464/* Merge block B into block A. */
1465
1466static void
726a989a 1467gimple_merge_blocks (basic_block a, basic_block b)
6de9cd9a 1468{
726a989a
RB
1469 gimple_stmt_iterator last, gsi, psi;
1470 gimple_seq phis = phi_nodes (b);
6de9cd9a
DN
1471
1472 if (dump_file)
1473 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1474
c4f548b8
DN
1475 /* Remove all single-valued PHI nodes from block B of the form
1476 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
726a989a
RB
1477 gsi = gsi_last_bb (a);
1478 for (psi = gsi_start (phis); !gsi_end_p (psi); )
38965eb2 1479 {
726a989a
RB
1480 gimple phi = gsi_stmt (psi);
1481 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
1482 gimple copy;
1483 bool may_replace_uses = !is_gimple_reg (def)
1484 || may_propagate_copy (def, use);
d7f0e25c 1485
7c8eb293
ZD
1486 /* In case we maintain loop closed ssa form, do not propagate arguments
1487 of loop exit phi nodes. */
d7f0e25c 1488 if (current_loops
f87000d0 1489 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
d7f0e25c
ZD
1490 && is_gimple_reg (def)
1491 && TREE_CODE (use) == SSA_NAME
1492 && a->loop_father != b->loop_father)
1493 may_replace_uses = false;
1494
1495 if (!may_replace_uses)
38965eb2
ZD
1496 {
1497 gcc_assert (is_gimple_reg (def));
1498
128a79fb 1499 /* Note that just emitting the copies is fine -- there is no problem
38965eb2
ZD
1500 with ordering of phi nodes. This is because A is the single
1501 predecessor of B, therefore results of the phi nodes cannot
1502 appear as arguments of the phi nodes. */
726a989a
RB
1503 copy = gimple_build_assign (def, use);
1504 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
1505 remove_phi_node (&psi, false);
38965eb2
ZD
1506 }
1507 else
611021e1 1508 {
d0f76c4b
RG
1509 /* If we deal with a PHI for virtual operands, we can simply
1510 propagate these without fussing with folding or updating
1511 the stmt. */
1512 if (!is_gimple_reg (def))
1513 {
1514 imm_use_iterator iter;
1515 use_operand_p use_p;
726a989a 1516 gimple stmt;
d0f76c4b
RG
1517
1518 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
1519 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1520 SET_USE (use_p, use);
1521 }
1522 else
1523 replace_uses_by (def, use);
726a989a
RB
1524
1525 remove_phi_node (&psi, true);
611021e1 1526 }
38965eb2
ZD
1527 }
1528
6de9cd9a
DN
1529 /* Ensure that B follows A. */
1530 move_block_after (b, a);
1531
c5cbcccf 1532 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1e128c5f 1533 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
6de9cd9a 1534
726a989a
RB
1535 /* Remove labels from B and set gimple_bb to A for other statements. */
1536 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
6de9cd9a 1537 {
726a989a 1538 if (gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL)
be477406 1539 {
726a989a
RB
1540 gimple label = gsi_stmt (gsi);
1541
1542 gsi_remove (&gsi, false);
be477406 1543
be477406
JL
1544 /* Now that we can thread computed gotos, we might have
1545 a situation where we have a forced label in block B
1546 However, the label at the start of block B might still be
1547 used in other ways (think about the runtime checking for
1548 Fortran assigned gotos). So we can not just delete the
1549 label. Instead we move the label to the start of block A. */
726a989a 1550 if (FORCED_LABEL (gimple_label_label (label)))
be477406 1551 {
726a989a
RB
1552 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
1553 gsi_insert_before (&dest_gsi, label, GSI_NEW_STMT);
be477406
JL
1554 }
1555 }
6de9cd9a
DN
1556 else
1557 {
726a989a
RB
1558 gimple_set_bb (gsi_stmt (gsi), a);
1559 gsi_next (&gsi);
6de9cd9a
DN
1560 }
1561 }
1562
726a989a
RB
1563 /* Merge the sequences. */
1564 last = gsi_last_bb (a);
1565 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
1566 set_bb_seq (b, NULL);
672987e8
ZD
1567
1568 if (cfgcleanup_altered_bbs)
1569 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
6de9cd9a
DN
1570}
1571
1572
bc23502b
PB
1573/* Return the one of two successors of BB that is not reachable by a
1574 reached by a complex edge, if there is one. Else, return BB. We use
1575 this in optimizations that use post-dominators for their heuristics,
1576 to catch the cases in C++ where function calls are involved. */
6531d1be 1577
bc23502b 1578basic_block
6531d1be 1579single_noncomplex_succ (basic_block bb)
bc23502b
PB
1580{
1581 edge e0, e1;
1582 if (EDGE_COUNT (bb->succs) != 2)
1583 return bb;
6531d1be 1584
bc23502b
PB
1585 e0 = EDGE_SUCC (bb, 0);
1586 e1 = EDGE_SUCC (bb, 1);
1587 if (e0->flags & EDGE_COMPLEX)
1588 return e1->dest;
1589 if (e1->flags & EDGE_COMPLEX)
1590 return e0->dest;
6531d1be 1591
bc23502b 1592 return bb;
6531d1be 1593}
bc23502b
PB
1594
1595
6de9cd9a
DN
1596/* Walk the function tree removing unnecessary statements.
1597
1598 * Empty statement nodes are removed
1599
1600 * Unnecessary TRY_FINALLY and TRY_CATCH blocks are removed
1601
1602 * Unnecessary COND_EXPRs are removed
1603
1604 * Some unnecessary BIND_EXPRs are removed
1605
726a989a
RB
1606 * GOTO_EXPRs immediately preceding destination are removed.
1607
6de9cd9a
DN
1608 Clearly more work could be done. The trick is doing the analysis
1609 and removal fast enough to be a net improvement in compile times.
1610
1611 Note that when we remove a control structure such as a COND_EXPR
1612 BIND_EXPR, or TRY block, we will need to repeat this optimization pass
1613 to ensure we eliminate all the useless code. */
1614
1615struct rus_data
1616{
6de9cd9a
DN
1617 bool repeat;
1618 bool may_throw;
1619 bool may_branch;
1620 bool has_label;
726a989a
RB
1621 bool last_was_goto;
1622 gimple_stmt_iterator last_goto_gsi;
6de9cd9a
DN
1623};
1624
726a989a
RB
1625
1626static void remove_useless_stmts_1 (gimple_stmt_iterator *gsi, struct rus_data *);
1627
1628/* Given a statement sequence, find the first executable statement with
1629 location information, and warn that it is unreachable. When searching,
1630 descend into containers in execution order. */
6de9cd9a
DN
1631
1632static bool
726a989a 1633remove_useless_stmts_warn_notreached (gimple_seq stmts)
6de9cd9a 1634{
726a989a 1635 gimple_stmt_iterator gsi;
6de9cd9a 1636
726a989a 1637 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
6de9cd9a 1638 {
726a989a 1639 gimple stmt = gsi_stmt (gsi);
6de9cd9a 1640
726a989a
RB
1641 if (gimple_has_location (stmt))
1642 {
1643 location_t loc = gimple_location (stmt);
1644 if (LOCATION_LINE (loc) > 0)
1645 {
1646 warning (OPT_Wunreachable_code, "%Hwill never be executed", &loc);
1647 return true;
1648 }
1649 }
6de9cd9a 1650
726a989a
RB
1651 switch (gimple_code (stmt))
1652 {
1653 /* Unfortunately, we need the CFG now to detect unreachable
1654 branches in a conditional, so conditionals are not handled here. */
6de9cd9a 1655
726a989a
RB
1656 case GIMPLE_TRY:
1657 if (remove_useless_stmts_warn_notreached (gimple_try_eval (stmt)))
1658 return true;
1659 if (remove_useless_stmts_warn_notreached (gimple_try_cleanup (stmt)))
1660 return true;
1661 break;
6de9cd9a 1662
726a989a
RB
1663 case GIMPLE_CATCH:
1664 return remove_useless_stmts_warn_notreached (gimple_catch_handler (stmt));
1665
1666 case GIMPLE_EH_FILTER:
1667 return remove_useless_stmts_warn_notreached (gimple_eh_filter_failure (stmt));
1668
1669 case GIMPLE_BIND:
1670 return remove_useless_stmts_warn_notreached (gimple_bind_body (stmt));
1671
1672 default:
1673 break;
1674 }
6de9cd9a
DN
1675 }
1676
1677 return false;
1678}
1679
726a989a
RB
1680/* Helper for remove_useless_stmts_1. Handle GIMPLE_COND statements. */
1681
6de9cd9a 1682static void
726a989a 1683remove_useless_stmts_cond (gimple_stmt_iterator *gsi, struct rus_data *data)
6de9cd9a 1684{
726a989a 1685 gimple stmt = gsi_stmt (*gsi);
6de9cd9a 1686
726a989a 1687 /* The folded result must still be a conditional statement. */
2586ba4b
RG
1688 fold_stmt (gsi);
1689 gcc_assert (gsi_stmt (*gsi) == stmt);
6de9cd9a 1690
726a989a 1691 data->may_branch = true;
6de9cd9a 1692
726a989a
RB
1693 /* Replace trivial conditionals with gotos. */
1694 if (gimple_cond_true_p (stmt))
6de9cd9a 1695 {
726a989a
RB
1696 /* Goto THEN label. */
1697 tree then_label = gimple_cond_true_label (stmt);
6de9cd9a 1698
726a989a
RB
1699 gsi_replace (gsi, gimple_build_goto (then_label), false);
1700 data->last_goto_gsi = *gsi;
1701 data->last_was_goto = true;
6de9cd9a
DN
1702 data->repeat = true;
1703 }
726a989a 1704 else if (gimple_cond_false_p (stmt))
6de9cd9a 1705 {
726a989a
RB
1706 /* Goto ELSE label. */
1707 tree else_label = gimple_cond_false_label (stmt);
1708
1709 gsi_replace (gsi, gimple_build_goto (else_label), false);
1710 data->last_goto_gsi = *gsi;
1711 data->last_was_goto = true;
6de9cd9a
DN
1712 data->repeat = true;
1713 }
6de9cd9a
DN
1714 else
1715 {
726a989a
RB
1716 tree then_label = gimple_cond_true_label (stmt);
1717 tree else_label = gimple_cond_false_label (stmt);
6de9cd9a 1718
726a989a
RB
1719 if (then_label == else_label)
1720 {
1721 /* Goto common destination. */
1722 gsi_replace (gsi, gimple_build_goto (then_label), false);
1723 data->last_goto_gsi = *gsi;
1724 data->last_was_goto = true;
6de9cd9a
DN
1725 data->repeat = true;
1726 }
6de9cd9a
DN
1727 }
1728
726a989a
RB
1729 gsi_next (gsi);
1730
1731 data->last_was_goto = false;
6de9cd9a
DN
1732}
1733
726a989a
RB
1734/* Helper for remove_useless_stmts_1.
1735 Handle the try-finally case for GIMPLE_TRY statements. */
6de9cd9a
DN
1736
1737static void
726a989a 1738remove_useless_stmts_tf (gimple_stmt_iterator *gsi, struct rus_data *data)
6de9cd9a
DN
1739{
1740 bool save_may_branch, save_may_throw;
1741 bool this_may_branch, this_may_throw;
1742
726a989a
RB
1743 gimple_seq eval_seq, cleanup_seq;
1744 gimple_stmt_iterator eval_gsi, cleanup_gsi;
1745
1746 gimple stmt = gsi_stmt (*gsi);
1747
6de9cd9a
DN
1748 /* Collect may_branch and may_throw information for the body only. */
1749 save_may_branch = data->may_branch;
1750 save_may_throw = data->may_throw;
1751 data->may_branch = false;
1752 data->may_throw = false;
726a989a 1753 data->last_was_goto = false;
6de9cd9a 1754
726a989a
RB
1755 eval_seq = gimple_try_eval (stmt);
1756 eval_gsi = gsi_start (eval_seq);
1757 remove_useless_stmts_1 (&eval_gsi, data);
6de9cd9a
DN
1758
1759 this_may_branch = data->may_branch;
1760 this_may_throw = data->may_throw;
1761 data->may_branch |= save_may_branch;
1762 data->may_throw |= save_may_throw;
726a989a 1763 data->last_was_goto = false;
6de9cd9a 1764
726a989a
RB
1765 cleanup_seq = gimple_try_cleanup (stmt);
1766 cleanup_gsi = gsi_start (cleanup_seq);
1767 remove_useless_stmts_1 (&cleanup_gsi, data);
6de9cd9a
DN
1768
1769 /* If the body is empty, then we can emit the FINALLY block without
1770 the enclosing TRY_FINALLY_EXPR. */
726a989a 1771 if (gimple_seq_empty_p (eval_seq))
6de9cd9a 1772 {
726a989a
RB
1773 gsi_insert_seq_before (gsi, cleanup_seq, GSI_SAME_STMT);
1774 gsi_remove (gsi, false);
6de9cd9a
DN
1775 data->repeat = true;
1776 }
1777
1778 /* If the handler is empty, then we can emit the TRY block without
1779 the enclosing TRY_FINALLY_EXPR. */
726a989a 1780 else if (gimple_seq_empty_p (cleanup_seq))
6de9cd9a 1781 {
726a989a
RB
1782 gsi_insert_seq_before (gsi, eval_seq, GSI_SAME_STMT);
1783 gsi_remove (gsi, false);
6de9cd9a
DN
1784 data->repeat = true;
1785 }
1786
1787 /* If the body neither throws, nor branches, then we can safely
1788 string the TRY and FINALLY blocks together. */
1789 else if (!this_may_branch && !this_may_throw)
1790 {
726a989a
RB
1791 gsi_insert_seq_before (gsi, eval_seq, GSI_SAME_STMT);
1792 gsi_insert_seq_before (gsi, cleanup_seq, GSI_SAME_STMT);
1793 gsi_remove (gsi, false);
6de9cd9a
DN
1794 data->repeat = true;
1795 }
726a989a
RB
1796 else
1797 gsi_next (gsi);
6de9cd9a
DN
1798}
1799
726a989a
RB
1800/* Helper for remove_useless_stmts_1.
1801 Handle the try-catch case for GIMPLE_TRY statements. */
6de9cd9a
DN
1802
1803static void
726a989a 1804remove_useless_stmts_tc (gimple_stmt_iterator *gsi, struct rus_data *data)
6de9cd9a
DN
1805{
1806 bool save_may_throw, this_may_throw;
726a989a
RB
1807
1808 gimple_seq eval_seq, cleanup_seq, handler_seq, failure_seq;
1809 gimple_stmt_iterator eval_gsi, cleanup_gsi, handler_gsi, failure_gsi;
1810
1811 gimple stmt = gsi_stmt (*gsi);
6de9cd9a
DN
1812
1813 /* Collect may_throw information for the body only. */
1814 save_may_throw = data->may_throw;
1815 data->may_throw = false;
726a989a 1816 data->last_was_goto = false;
6de9cd9a 1817
726a989a
RB
1818 eval_seq = gimple_try_eval (stmt);
1819 eval_gsi = gsi_start (eval_seq);
1820 remove_useless_stmts_1 (&eval_gsi, data);
6de9cd9a
DN
1821
1822 this_may_throw = data->may_throw;
1823 data->may_throw = save_may_throw;
1824
726a989a
RB
1825 cleanup_seq = gimple_try_cleanup (stmt);
1826
6de9cd9a
DN
1827 /* If the body cannot throw, then we can drop the entire TRY_CATCH_EXPR. */
1828 if (!this_may_throw)
1829 {
1830 if (warn_notreached)
726a989a
RB
1831 {
1832 remove_useless_stmts_warn_notreached (cleanup_seq);
1833 }
1834 gsi_insert_seq_before (gsi, eval_seq, GSI_SAME_STMT);
1835 gsi_remove (gsi, false);
6de9cd9a
DN
1836 data->repeat = true;
1837 return;
1838 }
1839
1840 /* Process the catch clause specially. We may be able to tell that
1841 no exceptions propagate past this point. */
1842
1843 this_may_throw = true;
726a989a
RB
1844 cleanup_gsi = gsi_start (cleanup_seq);
1845 stmt = gsi_stmt (cleanup_gsi);
1846 data->last_was_goto = false;
6de9cd9a 1847
726a989a 1848 switch (gimple_code (stmt))
6de9cd9a 1849 {
726a989a
RB
1850 case GIMPLE_CATCH:
1851 /* If the first element is a catch, they all must be. */
1852 while (!gsi_end_p (cleanup_gsi))
1853 {
1854 stmt = gsi_stmt (cleanup_gsi);
6de9cd9a
DN
1855 /* If we catch all exceptions, then the body does not
1856 propagate exceptions past this point. */
726a989a 1857 if (gimple_catch_types (stmt) == NULL)
6de9cd9a 1858 this_may_throw = false;
726a989a
RB
1859 data->last_was_goto = false;
1860 handler_seq = gimple_catch_handler (stmt);
1861 handler_gsi = gsi_start (handler_seq);
1862 remove_useless_stmts_1 (&handler_gsi, data);
1863 gsi_next (&cleanup_gsi);
6de9cd9a 1864 }
726a989a 1865 gsi_next (gsi);
6de9cd9a
DN
1866 break;
1867
726a989a
RB
1868 case GIMPLE_EH_FILTER:
1869 /* If the first element is an eh_filter, it should stand alone. */
1870 if (gimple_eh_filter_must_not_throw (stmt))
6de9cd9a 1871 this_may_throw = false;
726a989a 1872 else if (gimple_eh_filter_types (stmt) == NULL)
6de9cd9a 1873 this_may_throw = false;
726a989a
RB
1874 failure_seq = gimple_eh_filter_failure (stmt);
1875 failure_gsi = gsi_start (failure_seq);
1876 remove_useless_stmts_1 (&failure_gsi, data);
1877 gsi_next (gsi);
6de9cd9a
DN
1878 break;
1879
1880 default:
726a989a
RB
1881 /* Otherwise this is a list of cleanup statements. */
1882 remove_useless_stmts_1 (&cleanup_gsi, data);
6de9cd9a
DN
1883
1884 /* If the cleanup is empty, then we can emit the TRY block without
1885 the enclosing TRY_CATCH_EXPR. */
726a989a 1886 if (gimple_seq_empty_p (cleanup_seq))
6de9cd9a 1887 {
726a989a
RB
1888 gsi_insert_seq_before (gsi, eval_seq, GSI_SAME_STMT);
1889 gsi_remove(gsi, false);
6de9cd9a
DN
1890 data->repeat = true;
1891 }
726a989a
RB
1892 else
1893 gsi_next (gsi);
6de9cd9a
DN
1894 break;
1895 }
726a989a 1896
6de9cd9a
DN
1897 data->may_throw |= this_may_throw;
1898}
1899
726a989a 1900/* Helper for remove_useless_stmts_1. Handle GIMPLE_BIND statements. */
6de9cd9a
DN
1901
1902static void
726a989a 1903remove_useless_stmts_bind (gimple_stmt_iterator *gsi, struct rus_data *data ATTRIBUTE_UNUSED)
6de9cd9a
DN
1904{
1905 tree block;
726a989a
RB
1906 gimple_seq body_seq, fn_body_seq;
1907 gimple_stmt_iterator body_gsi;
1908
1909 gimple stmt = gsi_stmt (*gsi);
6de9cd9a
DN
1910
1911 /* First remove anything underneath the BIND_EXPR. */
726a989a
RB
1912
1913 body_seq = gimple_bind_body (stmt);
1914 body_gsi = gsi_start (body_seq);
1915 remove_useless_stmts_1 (&body_gsi, data);
6de9cd9a 1916
726a989a
RB
1917 /* If the GIMPLE_BIND has no variables, then we can pull everything
1918 up one level and remove the GIMPLE_BIND, unless this is the toplevel
1919 GIMPLE_BIND for the current function or an inlined function.
6de9cd9a
DN
1920
1921 When this situation occurs we will want to apply this
1922 optimization again. */
726a989a
RB
1923 block = gimple_bind_block (stmt);
1924 fn_body_seq = gimple_body (current_function_decl);
1925 if (gimple_bind_vars (stmt) == NULL_TREE
1926 && (gimple_seq_empty_p (fn_body_seq)
1927 || stmt != gimple_seq_first_stmt (fn_body_seq))
6de9cd9a
DN
1928 && (! block
1929 || ! BLOCK_ABSTRACT_ORIGIN (block)
1930 || (TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block))
1931 != FUNCTION_DECL)))
1932 {
ee0ee7e2
JJ
1933 tree var = NULL_TREE;
1934 /* Even if there are no gimple_bind_vars, there might be other
1935 decls in BLOCK_VARS rendering the GIMPLE_BIND not useless. */
9f0e7885 1936 if (block && !BLOCK_NUM_NONLOCALIZED_VARS (block))
ee0ee7e2
JJ
1937 for (var = BLOCK_VARS (block); var; var = TREE_CHAIN (var))
1938 if (TREE_CODE (var) == IMPORTED_DECL)
1939 break;
9f0e7885 1940 if (var || (block && BLOCK_NUM_NONLOCALIZED_VARS (block)))
ee0ee7e2
JJ
1941 gsi_next (gsi);
1942 else
1943 {
1944 gsi_insert_seq_before (gsi, body_seq, GSI_SAME_STMT);
1945 gsi_remove (gsi, false);
1946 data->repeat = true;
1947 }
6de9cd9a 1948 }
726a989a
RB
1949 else
1950 gsi_next (gsi);
6de9cd9a
DN
1951}
1952
726a989a 1953/* Helper for remove_useless_stmts_1. Handle GIMPLE_GOTO statements. */
6de9cd9a
DN
1954
1955static void
726a989a 1956remove_useless_stmts_goto (gimple_stmt_iterator *gsi, struct rus_data *data)
6de9cd9a 1957{
726a989a
RB
1958 gimple stmt = gsi_stmt (*gsi);
1959
1960 tree dest = gimple_goto_dest (stmt);
6de9cd9a
DN
1961
1962 data->may_branch = true;
726a989a 1963 data->last_was_goto = false;
6de9cd9a 1964
726a989a 1965 /* Record iterator for last goto expr, so that we can delete it if unnecessary. */
6de9cd9a 1966 if (TREE_CODE (dest) == LABEL_DECL)
726a989a
RB
1967 {
1968 data->last_goto_gsi = *gsi;
1969 data->last_was_goto = true;
1970 }
1971
1972 gsi_next(gsi);
6de9cd9a
DN
1973}
1974
726a989a 1975/* Helper for remove_useless_stmts_1. Handle GIMPLE_LABEL statements. */
6de9cd9a
DN
1976
1977static void
726a989a 1978remove_useless_stmts_label (gimple_stmt_iterator *gsi, struct rus_data *data)
6de9cd9a 1979{
726a989a
RB
1980 gimple stmt = gsi_stmt (*gsi);
1981
1982 tree label = gimple_label_label (stmt);
6de9cd9a
DN
1983
1984 data->has_label = true;
1985
1986 /* We do want to jump across non-local label receiver code. */
1987 if (DECL_NONLOCAL (label))
726a989a 1988 data->last_was_goto = false;
6de9cd9a 1989
726a989a
RB
1990 else if (data->last_was_goto
1991 && gimple_goto_dest (gsi_stmt (data->last_goto_gsi)) == label)
6de9cd9a 1992 {
726a989a
RB
1993 /* Replace the preceding GIMPLE_GOTO statement with
1994 a GIMPLE_NOP, which will be subsequently removed.
1995 In this way, we avoid invalidating other iterators
1996 active on the statement sequence. */
1997 gsi_replace(&data->last_goto_gsi, gimple_build_nop(), false);
1998 data->last_was_goto = false;
6de9cd9a
DN
1999 data->repeat = true;
2000 }
2001
2002 /* ??? Add something here to delete unused labels. */
6de9cd9a 2003
726a989a 2004 gsi_next (gsi);
6de9cd9a
DN
2005}
2006
2007
2008/* T is CALL_EXPR. Set current_function_calls_* flags. */
2009
2010void
726a989a 2011notice_special_calls (gimple call)
6de9cd9a 2012{
726a989a 2013 int flags = gimple_call_flags (call);
6de9cd9a
DN
2014
2015 if (flags & ECF_MAY_BE_ALLOCA)
e3b5732b 2016 cfun->calls_alloca = true;
6de9cd9a 2017 if (flags & ECF_RETURNS_TWICE)
e3b5732b 2018 cfun->calls_setjmp = true;
6de9cd9a
DN
2019}
2020
2021
2022/* Clear flags set by notice_special_calls. Used by dead code removal
2023 to update the flags. */
2024
2025void
2026clear_special_calls (void)
2027{
e3b5732b
JH
2028 cfun->calls_alloca = false;
2029 cfun->calls_setjmp = false;
6de9cd9a
DN
2030}
2031
726a989a
RB
2032/* Remove useless statements from a statement sequence, and perform
2033 some preliminary simplifications. */
6de9cd9a
DN
2034
2035static void
726a989a 2036remove_useless_stmts_1 (gimple_stmt_iterator *gsi, struct rus_data *data)
6de9cd9a 2037{
726a989a 2038 while (!gsi_end_p (*gsi))
6de9cd9a 2039 {
726a989a 2040 gimple stmt = gsi_stmt (*gsi);
6de9cd9a 2041
726a989a
RB
2042 switch (gimple_code (stmt))
2043 {
2044 case GIMPLE_COND:
2045 remove_useless_stmts_cond (gsi, data);
2046 break;
2047
2048 case GIMPLE_GOTO:
2049 remove_useless_stmts_goto (gsi, data);
2050 break;
2051
2052 case GIMPLE_LABEL:
2053 remove_useless_stmts_label (gsi, data);
2054 break;
2055
2056 case GIMPLE_ASSIGN:
2057 fold_stmt (gsi);
2058 stmt = gsi_stmt (*gsi);
2059 data->last_was_goto = false;
2060 if (stmt_could_throw_p (stmt))
2061 data->may_throw = true;
2062 gsi_next (gsi);
2063 break;
2064
2065 case GIMPLE_ASM:
2066 fold_stmt (gsi);
2067 data->last_was_goto = false;
2068 gsi_next (gsi);
2069 break;
2070
2071 case GIMPLE_CALL:
2072 fold_stmt (gsi);
2073 stmt = gsi_stmt (*gsi);
2074 data->last_was_goto = false;
2075 if (is_gimple_call (stmt))
2076 notice_special_calls (stmt);
2077
2078 /* We used to call update_gimple_call_flags here,
2079 which copied side-effects and nothrows status
2080 from the function decl to the call. In the new
2081 tuplified GIMPLE, the accessors for this information
2082 always consult the function decl, so this copying
2083 is no longer necessary. */
2084 if (stmt_could_throw_p (stmt))
2085 data->may_throw = true;
2086 gsi_next (gsi);
2087 break;
2088
2089 case GIMPLE_RETURN:
2090 fold_stmt (gsi);
2091 data->last_was_goto = false;
2092 data->may_branch = true;
2093 gsi_next (gsi);
2094 break;
2095
2096 case GIMPLE_BIND:
2097 remove_useless_stmts_bind (gsi, data);
2098 break;
2099
2100 case GIMPLE_TRY:
2101 if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
2102 remove_useless_stmts_tc (gsi, data);
2103 else if (gimple_try_kind (stmt) == GIMPLE_TRY_FINALLY)
2104 remove_useless_stmts_tf (gsi, data);
2105 else
2106 gcc_unreachable ();
2107 break;
2108
2109 case GIMPLE_CATCH:
2110 gcc_unreachable ();
2111 break;
2112
2113 case GIMPLE_NOP:
2114 gsi_remove (gsi, false);
2115 break;
2116
2117 case GIMPLE_OMP_FOR:
2118 {
2119 gimple_seq pre_body_seq = gimple_omp_for_pre_body (stmt);
2120 gimple_stmt_iterator pre_body_gsi = gsi_start (pre_body_seq);
2121
2122 remove_useless_stmts_1 (&pre_body_gsi, data);
2123 data->last_was_goto = false;
2124 }
2125 /* FALLTHROUGH */
2126 case GIMPLE_OMP_CRITICAL:
2127 case GIMPLE_OMP_CONTINUE:
2128 case GIMPLE_OMP_MASTER:
2129 case GIMPLE_OMP_ORDERED:
2130 case GIMPLE_OMP_SECTION:
2131 case GIMPLE_OMP_SECTIONS:
2132 case GIMPLE_OMP_SINGLE:
2133 {
2134 gimple_seq body_seq = gimple_omp_body (stmt);
2135 gimple_stmt_iterator body_gsi = gsi_start (body_seq);
2136
2137 remove_useless_stmts_1 (&body_gsi, data);
2138 data->last_was_goto = false;
2139 gsi_next (gsi);
2140 }
2141 break;
2142
2143 case GIMPLE_OMP_PARALLEL:
2144 case GIMPLE_OMP_TASK:
2145 {
2146 /* Make sure the outermost GIMPLE_BIND isn't removed
2147 as useless. */
2148 gimple_seq body_seq = gimple_omp_body (stmt);
2149 gimple bind = gimple_seq_first_stmt (body_seq);
2150 gimple_seq bind_seq = gimple_bind_body (bind);
2151 gimple_stmt_iterator bind_gsi = gsi_start (bind_seq);
2152
2153 remove_useless_stmts_1 (&bind_gsi, data);
2154 data->last_was_goto = false;
2155 gsi_next (gsi);
2156 }
2157 break;
2158
2159 default:
2160 data->last_was_goto = false;
2161 gsi_next (gsi);
2162 break;
2163 }
6de9cd9a
DN
2164 }
2165}
2166
726a989a
RB
2167/* Walk the function tree, removing useless statements and performing
2168 some preliminary simplifications. */
2169
c2924966 2170static unsigned int
6de9cd9a
DN
2171remove_useless_stmts (void)
2172{
2173 struct rus_data data;
2174
2175 clear_special_calls ();
2176
2177 do
2178 {
726a989a
RB
2179 gimple_stmt_iterator gsi;
2180
2181 gsi = gsi_start (gimple_body (current_function_decl));
6de9cd9a 2182 memset (&data, 0, sizeof (data));
726a989a 2183 remove_useless_stmts_1 (&gsi, &data);
6de9cd9a
DN
2184 }
2185 while (data.repeat);
211ca15c
RG
2186
2187#ifdef ENABLE_TYPES_CHECKING
2188 verify_types_in_gimple_seq (gimple_body (current_function_decl));
2189#endif
2190
c2924966 2191 return 0;
6de9cd9a
DN
2192}
2193
2194
8ddbbcae 2195struct gimple_opt_pass pass_remove_useless_stmts =
6de9cd9a 2196{
8ddbbcae
JH
2197 {
2198 GIMPLE_PASS,
6de9cd9a
DN
2199 "useless", /* name */
2200 NULL, /* gate */
2201 remove_useless_stmts, /* execute */
2202 NULL, /* sub */
2203 NULL, /* next */
2204 0, /* static_pass_number */
7072a650 2205 TV_NONE, /* tv_id */
9e5a3e6c
RH
2206 PROP_gimple_any, /* properties_required */
2207 0, /* properties_provided */
6de9cd9a
DN
2208 0, /* properties_destroyed */
2209 0, /* todo_flags_start */
8ddbbcae
JH
2210 TODO_dump_func /* todo_flags_finish */
2211 }
6de9cd9a
DN
2212};
2213
6de9cd9a
DN
2214/* Remove PHI nodes associated with basic block BB and all edges out of BB. */
2215
2216static void
2217remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
2218{
6de9cd9a
DN
2219 /* Since this block is no longer reachable, we can just delete all
2220 of its PHI nodes. */
81b822d5 2221 remove_phi_nodes (bb);
6de9cd9a
DN
2222
2223 /* Remove edges to BB's successors. */
628f6a4e 2224 while (EDGE_COUNT (bb->succs) > 0)
d0d2cc21 2225 remove_edge (EDGE_SUCC (bb, 0));
6de9cd9a
DN
2226}
2227
2228
2229/* Remove statements of basic block BB. */
2230
2231static void
2232remove_bb (basic_block bb)
2233{
726a989a 2234 gimple_stmt_iterator i;
dbce1570 2235 source_location loc = UNKNOWN_LOCATION;
6de9cd9a
DN
2236
2237 if (dump_file)
2238 {
2239 fprintf (dump_file, "Removing basic block %d\n", bb->index);
2240 if (dump_flags & TDF_DETAILS)
2241 {
2242 dump_bb (bb, dump_file, 0);
2243 fprintf (dump_file, "\n");
2244 }
2245 }
2246
2b271002
ZD
2247 if (current_loops)
2248 {
2249 struct loop *loop = bb->loop_father;
2250
598ec7bd
ZD
2251 /* If a loop gets removed, clean up the information associated
2252 with it. */
2b271002
ZD
2253 if (loop->latch == bb
2254 || loop->header == bb)
598ec7bd 2255 free_numbers_of_iterations_estimates_loop (loop);
2b271002
ZD
2256 }
2257
6de9cd9a 2258 /* Remove all the instructions in the block. */
726a989a 2259 if (bb_seq (bb) != NULL)
6de9cd9a 2260 {
726a989a 2261 for (i = gsi_start_bb (bb); !gsi_end_p (i);)
77568960 2262 {
726a989a
RB
2263 gimple stmt = gsi_stmt (i);
2264 if (gimple_code (stmt) == GIMPLE_LABEL
2265 && (FORCED_LABEL (gimple_label_label (stmt))
2266 || DECL_NONLOCAL (gimple_label_label (stmt))))
7506e1cb
ZD
2267 {
2268 basic_block new_bb;
726a989a 2269 gimple_stmt_iterator new_gsi;
7506e1cb
ZD
2270
2271 /* A non-reachable non-local label may still be referenced.
2272 But it no longer needs to carry the extra semantics of
2273 non-locality. */
726a989a 2274 if (DECL_NONLOCAL (gimple_label_label (stmt)))
7506e1cb 2275 {
726a989a
RB
2276 DECL_NONLOCAL (gimple_label_label (stmt)) = 0;
2277 FORCED_LABEL (gimple_label_label (stmt)) = 1;
7506e1cb 2278 }
bb1ecfe8 2279
7506e1cb 2280 new_bb = bb->prev_bb;
726a989a
RB
2281 new_gsi = gsi_start_bb (new_bb);
2282 gsi_remove (&i, false);
2283 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
7506e1cb
ZD
2284 }
2285 else
bb1ecfe8 2286 {
7506e1cb
ZD
2287 /* Release SSA definitions if we are in SSA. Note that we
2288 may be called when not in SSA. For example,
2289 final_cleanup calls this function via
2290 cleanup_tree_cfg. */
2291 if (gimple_in_ssa_p (cfun))
2292 release_defs (stmt);
2293
726a989a 2294 gsi_remove (&i, true);
bb1ecfe8 2295 }
6531d1be 2296
7506e1cb
ZD
2297 /* Don't warn for removed gotos. Gotos are often removed due to
2298 jump threading, thus resulting in bogus warnings. Not great,
2299 since this way we lose warnings for gotos in the original
2300 program that are indeed unreachable. */
726a989a
RB
2301 if (gimple_code (stmt) != GIMPLE_GOTO
2302 && gimple_has_location (stmt)
2303 && !loc)
2304 loc = gimple_location (stmt);
43e05e45 2305 }
6de9cd9a
DN
2306 }
2307
2308 /* If requested, give a warning that the first statement in the
2309 block is unreachable. We walk statements backwards in the
2310 loop above, so the last statement we process is the first statement
2311 in the block. */
5ffeb913 2312 if (loc > BUILTINS_LOCATION && LOCATION_LINE (loc) > 0)
44c21c7f 2313 warning (OPT_Wunreachable_code, "%Hwill never be executed", &loc);
6de9cd9a
DN
2314
2315 remove_phi_nodes_and_edges_for_unreachable_block (bb);
726a989a 2316 bb->il.gimple = NULL;
6de9cd9a
DN
2317}
2318
6de9cd9a 2319
35920270
KH
2320/* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2321 predicate VAL, return the edge that will be taken out of the block.
2322 If VAL does not match a unique edge, NULL is returned. */
6de9cd9a
DN
2323
2324edge
2325find_taken_edge (basic_block bb, tree val)
2326{
726a989a 2327 gimple stmt;
6de9cd9a
DN
2328
2329 stmt = last_stmt (bb);
2330
1e128c5f
GB
2331 gcc_assert (stmt);
2332 gcc_assert (is_ctrl_stmt (stmt));
6de9cd9a 2333
726a989a
RB
2334 if (val == NULL)
2335 return NULL;
2336
2337 if (!is_gimple_min_invariant (val))
6de9cd9a
DN
2338 return NULL;
2339
726a989a 2340 if (gimple_code (stmt) == GIMPLE_COND)
6de9cd9a
DN
2341 return find_taken_edge_cond_expr (bb, val);
2342
726a989a 2343 if (gimple_code (stmt) == GIMPLE_SWITCH)
6de9cd9a
DN
2344 return find_taken_edge_switch_expr (bb, val);
2345
be477406 2346 if (computed_goto_p (stmt))
1799efef
JL
2347 {
2348 /* Only optimize if the argument is a label, if the argument is
2349 not a label then we can not construct a proper CFG.
2350
2351 It may be the case that we only need to allow the LABEL_REF to
2352 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2353 appear inside a LABEL_EXPR just to be safe. */
2354 if ((TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
2355 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
2356 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
2357 return NULL;
2358 }
be477406 2359
35920270 2360 gcc_unreachable ();
6de9cd9a
DN
2361}
2362
be477406
JL
2363/* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2364 statement, determine which of the outgoing edges will be taken out of the
2365 block. Return NULL if either edge may be taken. */
2366
2367static edge
2368find_taken_edge_computed_goto (basic_block bb, tree val)
2369{
2370 basic_block dest;
2371 edge e = NULL;
2372
2373 dest = label_to_block (val);
2374 if (dest)
2375 {
2376 e = find_edge (bb, dest);
2377 gcc_assert (e != NULL);
2378 }
2379
2380 return e;
2381}
6de9cd9a
DN
2382
2383/* Given a constant value VAL and the entry block BB to a COND_EXPR
2384 statement, determine which of the two edges will be taken out of the
2385 block. Return NULL if either edge may be taken. */
2386
2387static edge
2388find_taken_edge_cond_expr (basic_block bb, tree val)
2389{
2390 edge true_edge, false_edge;
2391
2392 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
6531d1be 2393
f1b19062 2394 gcc_assert (TREE_CODE (val) == INTEGER_CST);
6e682d7e 2395 return (integer_zerop (val) ? false_edge : true_edge);
6de9cd9a
DN
2396}
2397
fca01525 2398/* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
6de9cd9a
DN
2399 statement, determine which edge will be taken out of the block. Return
2400 NULL if any edge may be taken. */
2401
2402static edge
2403find_taken_edge_switch_expr (basic_block bb, tree val)
2404{
6de9cd9a
DN
2405 basic_block dest_bb;
2406 edge e;
726a989a
RB
2407 gimple switch_stmt;
2408 tree taken_case;
6de9cd9a 2409
726a989a
RB
2410 switch_stmt = last_stmt (bb);
2411 taken_case = find_case_label_for_value (switch_stmt, val);
6de9cd9a
DN
2412 dest_bb = label_to_block (CASE_LABEL (taken_case));
2413
2414 e = find_edge (bb, dest_bb);
1e128c5f 2415 gcc_assert (e);
6de9cd9a
DN
2416 return e;
2417}
2418
2419
726a989a 2420/* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
f667741c
SB
2421 We can make optimal use here of the fact that the case labels are
2422 sorted: We can do a binary search for a case matching VAL. */
6de9cd9a
DN
2423
2424static tree
726a989a 2425find_case_label_for_value (gimple switch_stmt, tree val)
6de9cd9a 2426{
726a989a
RB
2427 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2428 tree default_case = gimple_switch_default_label (switch_stmt);
6de9cd9a 2429
726a989a 2430 for (low = 0, high = n; high - low > 1; )
6de9cd9a 2431 {
f667741c 2432 size_t i = (high + low) / 2;
726a989a 2433 tree t = gimple_switch_label (switch_stmt, i);
f667741c
SB
2434 int cmp;
2435
2436 /* Cache the result of comparing CASE_LOW and val. */
2437 cmp = tree_int_cst_compare (CASE_LOW (t), val);
6de9cd9a 2438
f667741c
SB
2439 if (cmp > 0)
2440 high = i;
2441 else
2442 low = i;
2443
2444 if (CASE_HIGH (t) == NULL)
6de9cd9a 2445 {
f667741c
SB
2446 /* A singe-valued case label. */
2447 if (cmp == 0)
6de9cd9a
DN
2448 return t;
2449 }
2450 else
2451 {
2452 /* A case range. We can only handle integer ranges. */
f667741c 2453 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
6de9cd9a
DN
2454 return t;
2455 }
2456 }
2457
6de9cd9a
DN
2458 return default_case;
2459}
2460
2461
6de9cd9a
DN
2462/* Dump a basic block on stderr. */
2463
2464void
726a989a 2465gimple_debug_bb (basic_block bb)
6de9cd9a 2466{
726a989a 2467 gimple_dump_bb (bb, stderr, 0, TDF_VOPS|TDF_MEMSYMS);
6de9cd9a
DN
2468}
2469
2470
2471/* Dump basic block with index N on stderr. */
2472
2473basic_block
726a989a 2474gimple_debug_bb_n (int n)
6de9cd9a 2475{
726a989a 2476 gimple_debug_bb (BASIC_BLOCK (n));
6de9cd9a 2477 return BASIC_BLOCK (n);
6531d1be 2478}
6de9cd9a
DN
2479
2480
2481/* Dump the CFG on stderr.
2482
2483 FLAGS are the same used by the tree dumping functions
6531d1be 2484 (see TDF_* in tree-pass.h). */
6de9cd9a
DN
2485
2486void
726a989a 2487gimple_debug_cfg (int flags)
6de9cd9a 2488{
726a989a 2489 gimple_dump_cfg (stderr, flags);
6de9cd9a
DN
2490}
2491
2492
2493/* Dump the program showing basic block boundaries on the given FILE.
2494
2495 FLAGS are the same used by the tree dumping functions (see TDF_* in
2496 tree.h). */
2497
2498void
726a989a 2499gimple_dump_cfg (FILE *file, int flags)
6de9cd9a
DN
2500{
2501 if (flags & TDF_DETAILS)
2502 {
2503 const char *funcname
673fda6b 2504 = lang_hooks.decl_printable_name (current_function_decl, 2);
6de9cd9a
DN
2505
2506 fputc ('\n', file);
2507 fprintf (file, ";; Function %s\n\n", funcname);
2508 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2509 n_basic_blocks, n_edges, last_basic_block);
2510
2511 brief_dump_cfg (file);
2512 fprintf (file, "\n");
2513 }
2514
2515 if (flags & TDF_STATS)
2516 dump_cfg_stats (file);
2517
2518 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2519}
2520
2521
2522/* Dump CFG statistics on FILE. */
2523
2524void
2525dump_cfg_stats (FILE *file)
2526{
2527 static long max_num_merged_labels = 0;
2528 unsigned long size, total = 0;
7b0cab99 2529 long num_edges;
6de9cd9a
DN
2530 basic_block bb;
2531 const char * const fmt_str = "%-30s%-13s%12s\n";
f7fda749 2532 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
cac50d94 2533 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
6de9cd9a
DN
2534 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2535 const char *funcname
673fda6b 2536 = lang_hooks.decl_printable_name (current_function_decl, 2);
6de9cd9a
DN
2537
2538
2539 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2540
2541 fprintf (file, "---------------------------------------------------------\n");
2542 fprintf (file, fmt_str, "", " Number of ", "Memory");
2543 fprintf (file, fmt_str, "", " instances ", "used ");
2544 fprintf (file, "---------------------------------------------------------\n");
2545
2546 size = n_basic_blocks * sizeof (struct basic_block_def);
2547 total += size;
f7fda749
RH
2548 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks,
2549 SCALE (size), LABEL (size));
6de9cd9a 2550
7b0cab99 2551 num_edges = 0;
6de9cd9a 2552 FOR_EACH_BB (bb)
7b0cab99
JH
2553 num_edges += EDGE_COUNT (bb->succs);
2554 size = num_edges * sizeof (struct edge_def);
6de9cd9a 2555 total += size;
cac50d94 2556 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
6de9cd9a 2557
6de9cd9a
DN
2558 fprintf (file, "---------------------------------------------------------\n");
2559 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2560 LABEL (total));
2561 fprintf (file, "---------------------------------------------------------\n");
2562 fprintf (file, "\n");
2563
2564 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2565 max_num_merged_labels = cfg_stats.num_merged_labels;
2566
2567 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2568 cfg_stats.num_merged_labels, max_num_merged_labels);
2569
2570 fprintf (file, "\n");
2571}
2572
2573
2574/* Dump CFG statistics on stderr. Keep extern so that it's always
2575 linked in the final executable. */
2576
2577void
2578debug_cfg_stats (void)
2579{
2580 dump_cfg_stats (stderr);
2581}
2582
2583
2584/* Dump the flowgraph to a .vcg FILE. */
2585
2586static void
726a989a 2587gimple_cfg2vcg (FILE *file)
6de9cd9a
DN
2588{
2589 edge e;
628f6a4e 2590 edge_iterator ei;
6de9cd9a
DN
2591 basic_block bb;
2592 const char *funcname
673fda6b 2593 = lang_hooks.decl_printable_name (current_function_decl, 2);
6de9cd9a
DN
2594
2595 /* Write the file header. */
2596 fprintf (file, "graph: { title: \"%s\"\n", funcname);
2597 fprintf (file, "node: { title: \"ENTRY\" label: \"ENTRY\" }\n");
2598 fprintf (file, "node: { title: \"EXIT\" label: \"EXIT\" }\n");
2599
2600 /* Write blocks and edges. */
628f6a4e 2601 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
6de9cd9a
DN
2602 {
2603 fprintf (file, "edge: { sourcename: \"ENTRY\" targetname: \"%d\"",
2604 e->dest->index);
2605
2606 if (e->flags & EDGE_FAKE)
2607 fprintf (file, " linestyle: dotted priority: 10");
2608 else
2609 fprintf (file, " linestyle: solid priority: 100");
2610
2611 fprintf (file, " }\n");
2612 }
2613 fputc ('\n', file);
2614
2615 FOR_EACH_BB (bb)
2616 {
726a989a 2617 enum gimple_code head_code, end_code;
6de9cd9a
DN
2618 const char *head_name, *end_name;
2619 int head_line = 0;
2620 int end_line = 0;
726a989a
RB
2621 gimple first = first_stmt (bb);
2622 gimple last = last_stmt (bb);
6de9cd9a
DN
2623
2624 if (first)
2625 {
726a989a
RB
2626 head_code = gimple_code (first);
2627 head_name = gimple_code_name[head_code];
6de9cd9a
DN
2628 head_line = get_lineno (first);
2629 }
2630 else
2631 head_name = "no-statement";
2632
2633 if (last)
2634 {
726a989a
RB
2635 end_code = gimple_code (last);
2636 end_name = gimple_code_name[end_code];
6de9cd9a
DN
2637 end_line = get_lineno (last);
2638 }
2639 else
2640 end_name = "no-statement";
2641
2642 fprintf (file, "node: { title: \"%d\" label: \"#%d\\n%s (%d)\\n%s (%d)\"}\n",
2643 bb->index, bb->index, head_name, head_line, end_name,
2644 end_line);
2645
628f6a4e 2646 FOR_EACH_EDGE (e, ei, bb->succs)
6de9cd9a
DN
2647 {
2648 if (e->dest == EXIT_BLOCK_PTR)
2649 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"EXIT\"", bb->index);
2650 else
2651 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"%d\"", bb->index, e->dest->index);
2652
2653 if (e->flags & EDGE_FAKE)
2654 fprintf (file, " priority: 10 linestyle: dotted");
2655 else
2656 fprintf (file, " priority: 100 linestyle: solid");
2657
2658 fprintf (file, " }\n");
2659 }
2660
2661 if (bb->next_bb != EXIT_BLOCK_PTR)
2662 fputc ('\n', file);
2663 }
2664
2665 fputs ("}\n\n", file);
2666}
2667
2668
2669
2670/*---------------------------------------------------------------------------
2671 Miscellaneous helpers
2672---------------------------------------------------------------------------*/
2673
2674/* Return true if T represents a stmt that always transfers control. */
2675
2676bool
726a989a 2677is_ctrl_stmt (gimple t)
6de9cd9a 2678{
726a989a
RB
2679 return gimple_code (t) == GIMPLE_COND
2680 || gimple_code (t) == GIMPLE_SWITCH
2681 || gimple_code (t) == GIMPLE_GOTO
2682 || gimple_code (t) == GIMPLE_RETURN
2683 || gimple_code (t) == GIMPLE_RESX;
6de9cd9a
DN
2684}
2685
2686
2687/* Return true if T is a statement that may alter the flow of control
2688 (e.g., a call to a non-returning function). */
2689
2690bool
726a989a 2691is_ctrl_altering_stmt (gimple t)
6de9cd9a 2692{
1e128c5f 2693 gcc_assert (t);
726a989a
RB
2694
2695 if (is_gimple_call (t))
6de9cd9a 2696 {
726a989a
RB
2697 int flags = gimple_call_flags (t);
2698
2699 /* A non-pure/const call alters flow control if the current
6de9cd9a 2700 function has nonlocal labels. */
726a989a
RB
2701 if (!(flags & (ECF_CONST | ECF_PURE))
2702 && cfun->has_nonlocal_label)
6de9cd9a
DN
2703 return true;
2704
726a989a
RB
2705 /* A call also alters control flow if it does not return. */
2706 if (gimple_call_flags (t) & ECF_NORETURN)
6de9cd9a 2707 return true;
6de9cd9a
DN
2708 }
2709
50674e96 2710 /* OpenMP directives alter control flow. */
726a989a 2711 if (is_gimple_omp (t))
50674e96
DN
2712 return true;
2713
6de9cd9a 2714 /* If a statement can throw, it alters control flow. */
726a989a 2715 return stmt_can_throw_internal (t);
6de9cd9a
DN
2716}
2717
2718
4f6c2131 2719/* Return true if T is a simple local goto. */
6de9cd9a
DN
2720
2721bool
726a989a 2722simple_goto_p (gimple t)
6de9cd9a 2723{
726a989a
RB
2724 return (gimple_code (t) == GIMPLE_GOTO
2725 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
4f6c2131
EB
2726}
2727
2728
2729/* Return true if T can make an abnormal transfer of control flow.
2730 Transfers of control flow associated with EH are excluded. */
2731
2732bool
726a989a 2733stmt_can_make_abnormal_goto (gimple t)
4f6c2131
EB
2734{
2735 if (computed_goto_p (t))
2736 return true;
726a989a
RB
2737 if (is_gimple_call (t))
2738 return gimple_has_side_effects (t) && cfun->has_nonlocal_label;
4f6c2131 2739 return false;
6de9cd9a
DN
2740}
2741
2742
726a989a
RB
2743/* Return true if STMT should start a new basic block. PREV_STMT is
2744 the statement preceding STMT. It is used when STMT is a label or a
2745 case label. Labels should only start a new basic block if their
2746 previous statement wasn't a label. Otherwise, sequence of labels
2747 would generate unnecessary basic blocks that only contain a single
2748 label. */
6de9cd9a
DN
2749
2750static inline bool
726a989a 2751stmt_starts_bb_p (gimple stmt, gimple prev_stmt)
6de9cd9a 2752{
726a989a 2753 if (stmt == NULL)
6de9cd9a
DN
2754 return false;
2755
726a989a
RB
2756 /* Labels start a new basic block only if the preceding statement
2757 wasn't a label of the same type. This prevents the creation of
2758 consecutive blocks that have nothing but a single label. */
2759 if (gimple_code (stmt) == GIMPLE_LABEL)
6de9cd9a
DN
2760 {
2761 /* Nonlocal and computed GOTO targets always start a new block. */
726a989a
RB
2762 if (DECL_NONLOCAL (gimple_label_label (stmt))
2763 || FORCED_LABEL (gimple_label_label (stmt)))
6de9cd9a
DN
2764 return true;
2765
726a989a 2766 if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
6de9cd9a 2767 {
726a989a 2768 if (DECL_NONLOCAL (gimple_label_label (prev_stmt)))
6de9cd9a
DN
2769 return true;
2770
2771 cfg_stats.num_merged_labels++;
2772 return false;
2773 }
2774 else
2775 return true;
2776 }
2777
2778 return false;
2779}
2780
2781
2782/* Return true if T should end a basic block. */
2783
2784bool
726a989a 2785stmt_ends_bb_p (gimple t)
6de9cd9a
DN
2786{
2787 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2788}
2789
726a989a 2790/* Remove block annotations and other data structures. */
6de9cd9a
DN
2791
2792void
242229bb 2793delete_tree_cfg_annotations (void)
6de9cd9a 2794{
6de9cd9a 2795 label_to_block_map = NULL;
6de9cd9a
DN
2796}
2797
2798
2799/* Return the first statement in basic block BB. */
2800
726a989a 2801gimple
6de9cd9a
DN
2802first_stmt (basic_block bb)
2803{
726a989a
RB
2804 gimple_stmt_iterator i = gsi_start_bb (bb);
2805 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
6de9cd9a
DN
2806}
2807
6c52e687
CC
2808/* Return the first non-label statement in basic block BB. */
2809
2810static gimple
2811first_non_label_stmt (basic_block bb)
2812{
2813 gimple_stmt_iterator i = gsi_start_bb (bb);
2814 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2815 gsi_next (&i);
2816 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2817}
2818
6de9cd9a
DN
2819/* Return the last statement in basic block BB. */
2820
726a989a 2821gimple
6de9cd9a
DN
2822last_stmt (basic_block bb)
2823{
726a989a
RB
2824 gimple_stmt_iterator b = gsi_last_bb (bb);
2825 return !gsi_end_p (b) ? gsi_stmt (b) : NULL;
6de9cd9a
DN
2826}
2827
6de9cd9a
DN
2828/* Return the last statement of an otherwise empty block. Return NULL
2829 if the block is totally empty, or if it contains more than one
2830 statement. */
2831
726a989a 2832gimple
6de9cd9a
DN
2833last_and_only_stmt (basic_block bb)
2834{
726a989a
RB
2835 gimple_stmt_iterator i = gsi_last_bb (bb);
2836 gimple last, prev;
6de9cd9a 2837
726a989a
RB
2838 if (gsi_end_p (i))
2839 return NULL;
6de9cd9a 2840
726a989a
RB
2841 last = gsi_stmt (i);
2842 gsi_prev (&i);
2843 if (gsi_end_p (i))
6de9cd9a
DN
2844 return last;
2845
2846 /* Empty statements should no longer appear in the instruction stream.
2847 Everything that might have appeared before should be deleted by
726a989a 2848 remove_useless_stmts, and the optimizers should just gsi_remove
6de9cd9a
DN
2849 instead of smashing with build_empty_stmt.
2850
2851 Thus the only thing that should appear here in a block containing
2852 one executable statement is a label. */
726a989a
RB
2853 prev = gsi_stmt (i);
2854 if (gimple_code (prev) == GIMPLE_LABEL)
6de9cd9a
DN
2855 return last;
2856 else
726a989a 2857 return NULL;
82b85a85 2858}
6de9cd9a 2859
4f7db7f7
KH
2860/* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2861
2862static void
2863reinstall_phi_args (edge new_edge, edge old_edge)
2864{
ea7e6d5a
AH
2865 edge_var_map_vector v;
2866 edge_var_map *vm;
2867 int i;
726a989a
RB
2868 gimple_stmt_iterator phis;
2869
ea7e6d5a
AH
2870 v = redirect_edge_var_map_vector (old_edge);
2871 if (!v)
4f7db7f7 2872 return;
726a989a
RB
2873
2874 for (i = 0, phis = gsi_start_phis (new_edge->dest);
2875 VEC_iterate (edge_var_map, v, i, vm) && !gsi_end_p (phis);
2876 i++, gsi_next (&phis))
4f7db7f7 2877 {
726a989a 2878 gimple phi = gsi_stmt (phis);
ea7e6d5a
AH
2879 tree result = redirect_edge_var_map_result (vm);
2880 tree arg = redirect_edge_var_map_def (vm);
726a989a
RB
2881
2882 gcc_assert (result == gimple_phi_result (phi));
2883
d2e398df 2884 add_phi_arg (phi, arg, new_edge);
4f7db7f7 2885 }
726a989a 2886
ea7e6d5a 2887 redirect_edge_var_map_clear (old_edge);
4f7db7f7
KH
2888}
2889
2a8a8292 2890/* Returns the basic block after which the new basic block created
b9a66240
ZD
2891 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2892 near its "logical" location. This is of most help to humans looking
2893 at debugging dumps. */
2894
2895static basic_block
2896split_edge_bb_loc (edge edge_in)
2897{
2898 basic_block dest = edge_in->dest;
2899
2900 if (dest->prev_bb && find_edge (dest->prev_bb, dest))
2901 return edge_in->src;
2902 else
2903 return dest->prev_bb;
2904}
2905
6de9cd9a
DN
2906/* Split a (typically critical) edge EDGE_IN. Return the new block.
2907 Abort on abnormal edges. */
2908
2909static basic_block
726a989a 2910gimple_split_edge (edge edge_in)
6de9cd9a 2911{
4741d956 2912 basic_block new_bb, after_bb, dest;
6de9cd9a 2913 edge new_edge, e;
6de9cd9a
DN
2914
2915 /* Abnormal edges cannot be split. */
1e128c5f 2916 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
6de9cd9a 2917
6de9cd9a
DN
2918 dest = edge_in->dest;
2919
b9a66240 2920 after_bb = split_edge_bb_loc (edge_in);
6de9cd9a
DN
2921
2922 new_bb = create_empty_bb (after_bb);
b829f3fa
JH
2923 new_bb->frequency = EDGE_FREQUENCY (edge_in);
2924 new_bb->count = edge_in->count;
6de9cd9a 2925 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
b829f3fa
JH
2926 new_edge->probability = REG_BR_PROB_BASE;
2927 new_edge->count = edge_in->count;
6de9cd9a 2928
1e128c5f 2929 e = redirect_edge_and_branch (edge_in, new_bb);
c7b852c8 2930 gcc_assert (e == edge_in);
4f7db7f7 2931 reinstall_phi_args (new_edge, e);
6de9cd9a
DN
2932
2933 return new_bb;
2934}
2935
6de9cd9a 2936/* Callback for walk_tree, check that all elements with address taken are
7a442a1d
SB
2937 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2938 inside a PHI node. */
6de9cd9a
DN
2939
2940static tree
2fbe90f2 2941verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
6de9cd9a
DN
2942{
2943 tree t = *tp, x;
2944
2945 if (TYPE_P (t))
2946 *walk_subtrees = 0;
6531d1be 2947
e8ca4159 2948 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2fbe90f2 2949#define CHECK_OP(N, MSG) \
e8ca4159 2950 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2fbe90f2 2951 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
6de9cd9a
DN
2952
2953 switch (TREE_CODE (t))
2954 {
2955 case SSA_NAME:
2956 if (SSA_NAME_IN_FREE_LIST (t))
2957 {
2958 error ("SSA name in freelist but still referenced");
2959 return *tp;
2960 }
2961 break;
2962
26de0bcb
AP
2963 case INDIRECT_REF:
2964 x = TREE_OPERAND (t, 0);
2965 if (!is_gimple_reg (x) && !is_gimple_min_invariant (x))
2966 {
2967 error ("Indirect reference's operand is not a register or a constant.");
2968 return x;
2969 }
2970 break;
2971
0bca51f0
DN
2972 case ASSERT_EXPR:
2973 x = fold (ASSERT_EXPR_COND (t));
2974 if (x == boolean_false_node)
2975 {
2976 error ("ASSERT_EXPR with an always-false condition");
2977 return *tp;
2978 }
2979 break;
2980
6de9cd9a 2981 case MODIFY_EXPR:
26de0bcb 2982 error ("MODIFY_EXPR not expected while having tuples.");
e57fcb68 2983 return *tp;
6de9cd9a
DN
2984
2985 case ADDR_EXPR:
81fc3052 2986 {
81fc3052
DB
2987 bool old_constant;
2988 bool old_side_effects;
81fc3052
DB
2989 bool new_constant;
2990 bool new_side_effects;
2991
51eed280
PB
2992 gcc_assert (is_gimple_address (t));
2993
81fc3052
DB
2994 old_constant = TREE_CONSTANT (t);
2995 old_side_effects = TREE_SIDE_EFFECTS (t);
2996
127203ac 2997 recompute_tree_invariant_for_addr_expr (t);
81fc3052
DB
2998 new_side_effects = TREE_SIDE_EFFECTS (t);
2999 new_constant = TREE_CONSTANT (t);
3000
81fc3052
DB
3001 if (old_constant != new_constant)
3002 {
3003 error ("constant not recomputed when ADDR_EXPR changed");
3004 return t;
3005 }
3006 if (old_side_effects != new_side_effects)
3007 {
3008 error ("side effects not recomputed when ADDR_EXPR changed");
3009 return t;
3010 }
3011
3012 /* Skip any references (they will be checked when we recurse down the
3013 tree) and ensure that any variable used as a prefix is marked
3014 addressable. */
3015 for (x = TREE_OPERAND (t, 0);
3016 handled_component_p (x);
3017 x = TREE_OPERAND (x, 0))
3018 ;
3019
5006671f
RG
3020 if (!(TREE_CODE (x) == VAR_DECL
3021 || TREE_CODE (x) == PARM_DECL
3022 || TREE_CODE (x) == RESULT_DECL))
81fc3052
DB
3023 return NULL;
3024 if (!TREE_ADDRESSABLE (x))
3025 {
3026 error ("address taken, but ADDRESSABLE bit not set");
3027 return x;
3028 }
ba4d8f9d
RG
3029 if (DECL_GIMPLE_REG_P (x))
3030 {
3031 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
3032 return x;
3033 }
bdb69bee 3034
81fc3052
DB
3035 break;
3036 }
6de9cd9a
DN
3037
3038 case COND_EXPR:
a6234684 3039 x = COND_EXPR_COND (t);
d40055ab 3040 if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
6de9cd9a 3041 {
d40055ab 3042 error ("non-integral used in condition");
6de9cd9a
DN
3043 return x;
3044 }
9c691961
AP
3045 if (!is_gimple_condexpr (x))
3046 {
ab532386 3047 error ("invalid conditional operand");
9c691961
AP
3048 return x;
3049 }
6de9cd9a
DN
3050 break;
3051
a134e5f3
TB
3052 case NON_LVALUE_EXPR:
3053 gcc_unreachable ();
3054
1043771b 3055 CASE_CONVERT:
6de9cd9a 3056 case FIX_TRUNC_EXPR:
6de9cd9a
DN
3057 case FLOAT_EXPR:
3058 case NEGATE_EXPR:
3059 case ABS_EXPR:
3060 case BIT_NOT_EXPR:
6de9cd9a 3061 case TRUTH_NOT_EXPR:
ab532386 3062 CHECK_OP (0, "invalid operand to unary operator");
6de9cd9a
DN
3063 break;
3064
3065 case REALPART_EXPR:
3066 case IMAGPART_EXPR:
2fbe90f2
RK
3067 case COMPONENT_REF:
3068 case ARRAY_REF:
3069 case ARRAY_RANGE_REF:
3070 case BIT_FIELD_REF:
3071 case VIEW_CONVERT_EXPR:
3072 /* We have a nest of references. Verify that each of the operands
3073 that determine where to reference is either a constant or a variable,
3074 verify that the base is valid, and then show we've already checked
3075 the subtrees. */
afe84921 3076 while (handled_component_p (t))
2fbe90f2
RK
3077 {
3078 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
ab532386 3079 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
2fbe90f2
RK
3080 else if (TREE_CODE (t) == ARRAY_REF
3081 || TREE_CODE (t) == ARRAY_RANGE_REF)
3082 {
ab532386 3083 CHECK_OP (1, "invalid array index");
2fbe90f2 3084 if (TREE_OPERAND (t, 2))
ab532386 3085 CHECK_OP (2, "invalid array lower bound");
2fbe90f2 3086 if (TREE_OPERAND (t, 3))
ab532386 3087 CHECK_OP (3, "invalid array stride");
2fbe90f2
RK
3088 }
3089 else if (TREE_CODE (t) == BIT_FIELD_REF)
3090 {
e55f42fb
RG
3091 if (!host_integerp (TREE_OPERAND (t, 1), 1)
3092 || !host_integerp (TREE_OPERAND (t, 2), 1))
3093 {
3094 error ("invalid position or size operand to BIT_FIELD_REF");
3095 return t;
3096 }
fc0f49f3
RG
3097 else if (INTEGRAL_TYPE_P (TREE_TYPE (t))
3098 && (TYPE_PRECISION (TREE_TYPE (t))
3099 != TREE_INT_CST_LOW (TREE_OPERAND (t, 1))))
3100 {
3101 error ("integral result type precision does not match "
3102 "field size of BIT_FIELD_REF");
3103 return t;
3104 }
3105 if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
3106 && (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (t)))
3107 != TREE_INT_CST_LOW (TREE_OPERAND (t, 1))))
3108 {
3109 error ("mode precision of non-integral result does not "
3110 "match field size of BIT_FIELD_REF");
3111 return t;
3112 }
2fbe90f2
RK
3113 }
3114
3115 t = TREE_OPERAND (t, 0);
3116 }
3117
bb0c55f6 3118 if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
2fbe90f2 3119 {
ab532386 3120 error ("invalid reference prefix");
2fbe90f2
RK
3121 return t;
3122 }
3123 *walk_subtrees = 0;
6de9cd9a 3124 break;
5be014d5
AP
3125 case PLUS_EXPR:
3126 case MINUS_EXPR:
3127 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
3128 POINTER_PLUS_EXPR. */
3129 if (POINTER_TYPE_P (TREE_TYPE (t)))
3130 {
3131 error ("invalid operand to plus/minus, type is a pointer");
3132 return t;
3133 }
3134 CHECK_OP (0, "invalid operand to binary operator");
3135 CHECK_OP (1, "invalid operand to binary operator");
3136 break;
6de9cd9a 3137
5be014d5
AP
3138 case POINTER_PLUS_EXPR:
3139 /* Check to make sure the first operand is a pointer or reference type. */
3140 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
3141 {
3142 error ("invalid operand to pointer plus, first operand is not a pointer");
3143 return t;
3144 }
3145 /* Check to make sure the second operand is an integer with type of
3146 sizetype. */
36618b93
RG
3147 if (!useless_type_conversion_p (sizetype,
3148 TREE_TYPE (TREE_OPERAND (t, 1))))
5be014d5
AP
3149 {
3150 error ("invalid operand to pointer plus, second operand is not an "
3151 "integer with type of sizetype.");
3152 return t;
3153 }
3154 /* FALLTHROUGH */
6de9cd9a
DN
3155 case LT_EXPR:
3156 case LE_EXPR:
3157 case GT_EXPR:
3158 case GE_EXPR:
3159 case EQ_EXPR:
3160 case NE_EXPR:
3161 case UNORDERED_EXPR:
3162 case ORDERED_EXPR:
3163 case UNLT_EXPR:
3164 case UNLE_EXPR:
3165 case UNGT_EXPR:
3166 case UNGE_EXPR:
3167 case UNEQ_EXPR:
d1a7edaf 3168 case LTGT_EXPR:
6de9cd9a
DN
3169 case MULT_EXPR:
3170 case TRUNC_DIV_EXPR:
3171 case CEIL_DIV_EXPR:
3172 case FLOOR_DIV_EXPR:
3173 case ROUND_DIV_EXPR:
3174 case TRUNC_MOD_EXPR:
3175 case CEIL_MOD_EXPR:
3176 case FLOOR_MOD_EXPR:
3177 case ROUND_MOD_EXPR:
3178 case RDIV_EXPR:
3179 case EXACT_DIV_EXPR:
3180 case MIN_EXPR:
3181 case MAX_EXPR:
3182 case LSHIFT_EXPR:
3183 case RSHIFT_EXPR:
3184 case LROTATE_EXPR:
3185 case RROTATE_EXPR:
3186 case BIT_IOR_EXPR:
3187 case BIT_XOR_EXPR:
3188 case BIT_AND_EXPR:
ab532386
JM
3189 CHECK_OP (0, "invalid operand to binary operator");
3190 CHECK_OP (1, "invalid operand to binary operator");
6de9cd9a
DN
3191 break;
3192
84816907
JM
3193 case CONSTRUCTOR:
3194 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
3195 *walk_subtrees = 0;
3196 break;
3197
6de9cd9a
DN
3198 default:
3199 break;
3200 }
3201 return NULL;
2fbe90f2
RK
3202
3203#undef CHECK_OP
6de9cd9a
DN
3204}
3205
7e98624c
RG
3206
3207/* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
3208 Returns true if there is an error, otherwise false. */
3209
3210static bool
726a989a 3211verify_types_in_gimple_min_lval (tree expr)
7e98624c
RG
3212{
3213 tree op;
3214
3215 if (is_gimple_id (expr))
3216 return false;
3217
9f509004
RG
3218 if (!INDIRECT_REF_P (expr)
3219 && TREE_CODE (expr) != TARGET_MEM_REF)
7e98624c
RG
3220 {
3221 error ("invalid expression for min lvalue");
3222 return true;
3223 }
3224
9f509004
RG
3225 /* TARGET_MEM_REFs are strange beasts. */
3226 if (TREE_CODE (expr) == TARGET_MEM_REF)
3227 return false;
3228
7e98624c
RG
3229 op = TREE_OPERAND (expr, 0);
3230 if (!is_gimple_val (op))
3231 {
3232 error ("invalid operand in indirect reference");
3233 debug_generic_stmt (op);
3234 return true;
3235 }
3236 if (!useless_type_conversion_p (TREE_TYPE (expr),
3237 TREE_TYPE (TREE_TYPE (op))))
3238 {
3239 error ("type mismatch in indirect reference");
3240 debug_generic_stmt (TREE_TYPE (expr));
3241 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3242 return true;
3243 }
3244
3245 return false;
3246}
3247
3a19701a
RG
3248/* Verify if EXPR is a valid GIMPLE reference expression. If
3249 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
7e98624c
RG
3250 if there is an error, otherwise false. */
3251
3252static bool
3a19701a 3253verify_types_in_gimple_reference (tree expr, bool require_lvalue)
7e98624c
RG
3254{
3255 while (handled_component_p (expr))
3256 {
3257 tree op = TREE_OPERAND (expr, 0);
3258
3259 if (TREE_CODE (expr) == ARRAY_REF
3260 || TREE_CODE (expr) == ARRAY_RANGE_REF)
3261 {
3262 if (!is_gimple_val (TREE_OPERAND (expr, 1))
3263 || (TREE_OPERAND (expr, 2)
3264 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3265 || (TREE_OPERAND (expr, 3)
3266 && !is_gimple_val (TREE_OPERAND (expr, 3))))
3267 {
3268 error ("invalid operands to array reference");
3269 debug_generic_stmt (expr);
3270 return true;
3271 }
3272 }
3273
3274 /* Verify if the reference array element types are compatible. */
3275 if (TREE_CODE (expr) == ARRAY_REF
3276 && !useless_type_conversion_p (TREE_TYPE (expr),
3277 TREE_TYPE (TREE_TYPE (op))))
3278 {
3279 error ("type mismatch in array reference");
3280 debug_generic_stmt (TREE_TYPE (expr));
3281 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3282 return true;
3283 }
3284 if (TREE_CODE (expr) == ARRAY_RANGE_REF
3285 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
3286 TREE_TYPE (TREE_TYPE (op))))
3287 {
3288 error ("type mismatch in array range reference");
3289 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
3290 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3291 return true;
3292 }
3293
3294 if ((TREE_CODE (expr) == REALPART_EXPR
3295 || TREE_CODE (expr) == IMAGPART_EXPR)
3296 && !useless_type_conversion_p (TREE_TYPE (expr),
3297 TREE_TYPE (TREE_TYPE (op))))
3298 {
3299 error ("type mismatch in real/imagpart reference");
3300 debug_generic_stmt (TREE_TYPE (expr));
3301 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3302 return true;
3303 }
3304
3305 if (TREE_CODE (expr) == COMPONENT_REF
3306 && !useless_type_conversion_p (TREE_TYPE (expr),
3307 TREE_TYPE (TREE_OPERAND (expr, 1))))
3308 {
3309 error ("type mismatch in component reference");
3310 debug_generic_stmt (TREE_TYPE (expr));
3311 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
3312 return true;
3313 }
3314
3315 /* For VIEW_CONVERT_EXPRs which are allowed here, too, there
3316 is nothing to verify. Gross mismatches at most invoke
3317 undefined behavior. */
9f509004
RG
3318 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
3319 && !handled_component_p (op))
3320 return false;
7e98624c
RG
3321
3322 expr = op;
3323 }
3324
3a19701a
RG
3325 return ((require_lvalue || !is_gimple_min_invariant (expr))
3326 && verify_types_in_gimple_min_lval (expr));
7e98624c
RG
3327}
3328
20dcff2a
RG
3329/* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3330 list of pointer-to types that is trivially convertible to DEST. */
3331
3332static bool
3333one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3334{
3335 tree src;
3336
3337 if (!TYPE_POINTER_TO (src_obj))
3338 return true;
3339
3340 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3341 if (useless_type_conversion_p (dest, src))
3342 return true;
3343
3344 return false;
3345}
3346
726a989a
RB
3347/* Return true if TYPE1 is a fixed-point type and if conversions to and
3348 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3349
3350static bool
3351valid_fixed_convert_types_p (tree type1, tree type2)
3352{
3353 return (FIXED_POINT_TYPE_P (type1)
3354 && (INTEGRAL_TYPE_P (type2)
3355 || SCALAR_FLOAT_TYPE_P (type2)
3356 || FIXED_POINT_TYPE_P (type2)));
3357}
3358
726a989a
RB
3359/* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3360 is a problem, otherwise false. */
3361
3362static bool
b59d3976 3363verify_gimple_call (gimple stmt)
726a989a 3364{
b59d3976
RG
3365 tree fn = gimple_call_fn (stmt);
3366 tree fntype;
726a989a 3367
b59d3976
RG
3368 if (!POINTER_TYPE_P (TREE_TYPE (fn))
3369 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3370 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE))
3371 {
3372 error ("non-function in gimple call");
3373 return true;
3374 }
726a989a 3375
b59d3976
RG
3376 if (gimple_call_lhs (stmt)
3377 && !is_gimple_lvalue (gimple_call_lhs (stmt)))
3378 {
3379 error ("invalid LHS in gimple call");
3380 return true;
3381 }
726a989a 3382
b59d3976
RG
3383 fntype = TREE_TYPE (TREE_TYPE (fn));
3384 if (gimple_call_lhs (stmt)
3385 && !useless_type_conversion_p (TREE_TYPE (gimple_call_lhs (stmt)),
3386 TREE_TYPE (fntype))
3387 /* ??? At least C++ misses conversions at assignments from
3388 void * call results.
3389 ??? Java is completely off. Especially with functions
3390 returning java.lang.Object.
3391 For now simply allow arbitrary pointer type conversions. */
3392 && !(POINTER_TYPE_P (TREE_TYPE (gimple_call_lhs (stmt)))
3393 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3394 {
3395 error ("invalid conversion in gimple call");
3396 debug_generic_stmt (TREE_TYPE (gimple_call_lhs (stmt)));
3397 debug_generic_stmt (TREE_TYPE (fntype));
3398 return true;
3399 }
726a989a 3400
b59d3976
RG
3401 /* ??? The C frontend passes unpromoted arguments in case it
3402 didn't see a function declaration before the call. So for now
3403 leave the call arguments unverified. Once we gimplify
3404 unit-at-a-time we have a chance to fix this. */
726a989a 3405
b59d3976 3406 return false;
726a989a
RB
3407}
3408
b59d3976
RG
3409/* Verifies the gimple comparison with the result type TYPE and
3410 the operands OP0 and OP1. */
17d23165
RS
3411
3412static bool
b59d3976 3413verify_gimple_comparison (tree type, tree op0, tree op1)
17d23165 3414{
b59d3976
RG
3415 tree op0_type = TREE_TYPE (op0);
3416 tree op1_type = TREE_TYPE (op1);
726a989a 3417
b59d3976
RG
3418 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3419 {
3420 error ("invalid operands in gimple comparison");
3421 return true;
3422 }
17d23165 3423
b59d3976
RG
3424 /* For comparisons we do not have the operations type as the
3425 effective type the comparison is carried out in. Instead
3426 we require that either the first operand is trivially
3427 convertible into the second, or the other way around.
3428 The resulting type of a comparison may be any integral type.
3429 Because we special-case pointers to void we allow
3430 comparisons of pointers with the same mode as well. */
3431 if ((!useless_type_conversion_p (op0_type, op1_type)
3432 && !useless_type_conversion_p (op1_type, op0_type)
3433 && (!POINTER_TYPE_P (op0_type)
3434 || !POINTER_TYPE_P (op1_type)
3435 || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3436 || !INTEGRAL_TYPE_P (type))
3437 {
3438 error ("type mismatch in comparison expression");
3439 debug_generic_expr (type);
3440 debug_generic_expr (op0_type);
3441 debug_generic_expr (op1_type);
3442 return true;
3443 }
3444
3445 return false;
3446}
726a989a 3447
9f509004
RG
3448/* Verify a gimple assignment statement STMT with an unary rhs.
3449 Returns true if anything is wrong. */
7e98624c
RG
3450
3451static bool
9f509004 3452verify_gimple_assign_unary (gimple stmt)
7e98624c 3453{
726a989a
RB
3454 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3455 tree lhs = gimple_assign_lhs (stmt);
726a989a 3456 tree lhs_type = TREE_TYPE (lhs);
9f509004 3457 tree rhs1 = gimple_assign_rhs1 (stmt);
726a989a 3458 tree rhs1_type = TREE_TYPE (rhs1);
7e98624c 3459
9f509004
RG
3460 if (!is_gimple_reg (lhs)
3461 && !(optimize == 0
3462 && TREE_CODE (lhs_type) == COMPLEX_TYPE))
3463 {
3464 error ("non-register as LHS of unary operation");
3465 return true;
3466 }
3467
3468 if (!is_gimple_val (rhs1))
3469 {
3470 error ("invalid operand in unary operation");
3471 return true;
3472 }
3473
3474 /* First handle conversions. */
726a989a 3475 switch (rhs_code)
7e98624c 3476 {
1043771b 3477 CASE_CONVERT:
7e98624c 3478 {
7e98624c 3479 /* Allow conversions between integral types and pointers only if
9f509004
RG
3480 there is no sign or zero extension involved.
3481 For targets were the precision of sizetype doesn't match that
3482 of pointers we need to allow arbitrary conversions from and
3483 to sizetype. */
3484 if ((POINTER_TYPE_P (lhs_type)
3485 && INTEGRAL_TYPE_P (rhs1_type)
3486 && (TYPE_PRECISION (lhs_type) >= TYPE_PRECISION (rhs1_type)
3487 || rhs1_type == sizetype))
3488 || (POINTER_TYPE_P (rhs1_type)
3489 && INTEGRAL_TYPE_P (lhs_type)
3490 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3491 || lhs_type == sizetype)))
7e98624c
RG
3492 return false;
3493
3494 /* Allow conversion from integer to offset type and vice versa. */
726a989a
RB
3495 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3496 && TREE_CODE (rhs1_type) == INTEGER_TYPE)
3497 || (TREE_CODE (lhs_type) == INTEGER_TYPE
3498 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
7e98624c
RG
3499 return false;
3500
3501 /* Otherwise assert we are converting between types of the
3502 same kind. */
726a989a 3503 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
7e98624c
RG
3504 {
3505 error ("invalid types in nop conversion");
726a989a
RB
3506 debug_generic_expr (lhs_type);
3507 debug_generic_expr (rhs1_type);
7e98624c
RG
3508 return true;
3509 }
3510
3511 return false;
3512 }
3513
17d23165
RS
3514 case FIXED_CONVERT_EXPR:
3515 {
726a989a
RB
3516 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3517 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
17d23165
RS
3518 {
3519 error ("invalid types in fixed-point conversion");
726a989a
RB
3520 debug_generic_expr (lhs_type);
3521 debug_generic_expr (rhs1_type);
17d23165
RS
3522 return true;
3523 }
3524
3525 return false;
3526 }
3527
7e98624c
RG
3528 case FLOAT_EXPR:
3529 {
726a989a 3530 if (!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
7e98624c
RG
3531 {
3532 error ("invalid types in conversion to floating point");
726a989a
RB
3533 debug_generic_expr (lhs_type);
3534 debug_generic_expr (rhs1_type);
7e98624c
RG
3535 return true;
3536 }
726a989a 3537
7e98624c
RG
3538 return false;
3539 }
3540
3541 case FIX_TRUNC_EXPR:
3542 {
726a989a 3543 if (!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
7e98624c
RG
3544 {
3545 error ("invalid types in conversion to integer");
726a989a
RB
3546 debug_generic_expr (lhs_type);
3547 debug_generic_expr (rhs1_type);
7e98624c
RG
3548 return true;
3549 }
726a989a 3550
7e98624c
RG
3551 return false;
3552 }
3553
587aa063
RG
3554 case VEC_UNPACK_HI_EXPR:
3555 case VEC_UNPACK_LO_EXPR:
3556 case REDUC_MAX_EXPR:
3557 case REDUC_MIN_EXPR:
3558 case REDUC_PLUS_EXPR:
3559 case VEC_UNPACK_FLOAT_HI_EXPR:
3560 case VEC_UNPACK_FLOAT_LO_EXPR:
3561 /* FIXME. */
3562 return false;
9f509004 3563
587aa063 3564 case TRUTH_NOT_EXPR:
9f509004
RG
3565 case NEGATE_EXPR:
3566 case ABS_EXPR:
3567 case BIT_NOT_EXPR:
3568 case PAREN_EXPR:
3569 case NON_LVALUE_EXPR:
3570 case CONJ_EXPR:
9f509004
RG
3571 break;
3572
3573 default:
3574 gcc_unreachable ();
3575 }
3576
3577 /* For the remaining codes assert there is no conversion involved. */
3578 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3579 {
3580 error ("non-trivial conversion in unary operation");
3581 debug_generic_expr (lhs_type);
3582 debug_generic_expr (rhs1_type);
3583 return true;
3584 }
3585
3586 return false;
3587}
3588
3589/* Verify a gimple assignment statement STMT with a binary rhs.
3590 Returns true if anything is wrong. */
3591
3592static bool
3593verify_gimple_assign_binary (gimple stmt)
3594{
3595 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3596 tree lhs = gimple_assign_lhs (stmt);
3597 tree lhs_type = TREE_TYPE (lhs);
3598 tree rhs1 = gimple_assign_rhs1 (stmt);
3599 tree rhs1_type = TREE_TYPE (rhs1);
3600 tree rhs2 = gimple_assign_rhs2 (stmt);
3601 tree rhs2_type = TREE_TYPE (rhs2);
3602
3603 if (!is_gimple_reg (lhs)
3604 && !(optimize == 0
3605 && TREE_CODE (lhs_type) == COMPLEX_TYPE))
3606 {
3607 error ("non-register as LHS of binary operation");
3608 return true;
3609 }
726a989a 3610
9f509004
RG
3611 if (!is_gimple_val (rhs1)
3612 || !is_gimple_val (rhs2))
3613 {
3614 error ("invalid operands in binary operation");
3615 return true;
3616 }
3617
3618 /* First handle operations that involve different types. */
3619 switch (rhs_code)
3620 {
3621 case COMPLEX_EXPR:
3622 {
3623 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3624 || !(INTEGRAL_TYPE_P (rhs1_type)
726a989a 3625 || SCALAR_FLOAT_TYPE_P (rhs1_type))
9f509004 3626 || !(INTEGRAL_TYPE_P (rhs2_type)
726a989a 3627 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
7e98624c
RG
3628 {
3629 error ("type mismatch in complex expression");
726a989a
RB
3630 debug_generic_expr (lhs_type);
3631 debug_generic_expr (rhs1_type);
3632 debug_generic_expr (rhs2_type);
7e98624c
RG
3633 return true;
3634 }
726a989a 3635
7e98624c
RG
3636 return false;
3637 }
3638
7e98624c
RG
3639 case LSHIFT_EXPR:
3640 case RSHIFT_EXPR:
3641 case LROTATE_EXPR:
3642 case RROTATE_EXPR:
3643 {
587aa063
RG
3644 /* Shifts and rotates are ok on integral types, fixed point
3645 types and integer vector types. */
3646 if ((!INTEGRAL_TYPE_P (rhs1_type)
3647 && !FIXED_POINT_TYPE_P (rhs1_type)
3648 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3649 && TREE_CODE (TREE_TYPE (rhs1_type)) == INTEGER_TYPE))
3650 || (!INTEGRAL_TYPE_P (rhs2_type)
3651 /* Vector shifts of vectors are also ok. */
3652 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3653 && TREE_CODE (TREE_TYPE (rhs1_type)) == INTEGER_TYPE
3654 && TREE_CODE (rhs2_type) == VECTOR_TYPE
3655 && TREE_CODE (TREE_TYPE (rhs2_type)) == INTEGER_TYPE))
726a989a 3656 || !useless_type_conversion_p (lhs_type, rhs1_type))
7e98624c
RG
3657 {
3658 error ("type mismatch in shift expression");
726a989a
RB
3659 debug_generic_expr (lhs_type);
3660 debug_generic_expr (rhs1_type);
3661 debug_generic_expr (rhs2_type);
7e98624c
RG
3662 return true;
3663 }
726a989a 3664
7e98624c
RG
3665 return false;
3666 }
3667
9f509004
RG
3668 case VEC_LSHIFT_EXPR:
3669 case VEC_RSHIFT_EXPR:
7e98624c 3670 {
9f509004 3671 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
0009b473 3672 || !(INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
1fe479fd
RG
3673 || FIXED_POINT_TYPE_P (TREE_TYPE (rhs1_type))
3674 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type)))
9f509004
RG
3675 || (!INTEGRAL_TYPE_P (rhs2_type)
3676 && (TREE_CODE (rhs2_type) != VECTOR_TYPE
3677 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3678 || !useless_type_conversion_p (lhs_type, rhs1_type))
7e98624c 3679 {
9f509004
RG
3680 error ("type mismatch in vector shift expression");
3681 debug_generic_expr (lhs_type);
3682 debug_generic_expr (rhs1_type);
3683 debug_generic_expr (rhs2_type);
7e98624c
RG
3684 return true;
3685 }
1fe479fd
RG
3686 /* For shifting a vector of floating point components we
3687 only allow shifting by a constant multiple of the element size. */
3688 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type))
3689 && (TREE_CODE (rhs2) != INTEGER_CST
3690 || !div_if_zero_remainder (EXACT_DIV_EXPR, rhs2,
3691 TYPE_SIZE (TREE_TYPE (rhs1_type)))))
3692 {
3693 error ("non-element sized vector shift of floating point vector");
3694 return true;
3695 }
726a989a 3696
9f509004 3697 return false;
7e98624c
RG
3698 }
3699
646bea10
RG
3700 case PLUS_EXPR:
3701 {
3702 /* We use regular PLUS_EXPR for vectors.
3703 ??? This just makes the checker happy and may not be what is
3704 intended. */
3705 if (TREE_CODE (lhs_type) == VECTOR_TYPE
3706 && POINTER_TYPE_P (TREE_TYPE (lhs_type)))
3707 {
3708 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3709 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
3710 {
3711 error ("invalid non-vector operands to vector valued plus");
3712 return true;
3713 }
3714 lhs_type = TREE_TYPE (lhs_type);
3715 rhs1_type = TREE_TYPE (rhs1_type);
3716 rhs2_type = TREE_TYPE (rhs2_type);
3717 /* PLUS_EXPR is commutative, so we might end up canonicalizing
3718 the pointer to 2nd place. */
3719 if (POINTER_TYPE_P (rhs2_type))
3720 {
3721 tree tem = rhs1_type;
3722 rhs1_type = rhs2_type;
3723 rhs2_type = tem;
3724 }
3725 goto do_pointer_plus_expr_check;
3726 }
3727 }
3728 /* Fallthru. */
3729 case MINUS_EXPR:
3730 {
3731 if (POINTER_TYPE_P (lhs_type)
3732 || POINTER_TYPE_P (rhs1_type)
3733 || POINTER_TYPE_P (rhs2_type))
3734 {
3735 error ("invalid (pointer) operands to plus/minus");
3736 return true;
3737 }
3738
3739 /* Continue with generic binary expression handling. */
3740 break;
3741 }
3742
7e98624c
RG
3743 case POINTER_PLUS_EXPR:
3744 {
646bea10 3745do_pointer_plus_expr_check:
726a989a
RB
3746 if (!POINTER_TYPE_P (rhs1_type)
3747 || !useless_type_conversion_p (lhs_type, rhs1_type)
3748 || !useless_type_conversion_p (sizetype, rhs2_type))
7e98624c
RG
3749 {
3750 error ("type mismatch in pointer plus expression");
726a989a
RB
3751 debug_generic_stmt (lhs_type);
3752 debug_generic_stmt (rhs1_type);
3753 debug_generic_stmt (rhs2_type);
7e98624c
RG
3754 return true;
3755 }
7e98624c 3756
726a989a
RB
3757 return false;
3758 }
7e98624c 3759
7e98624c
RG
3760 case TRUTH_ANDIF_EXPR:
3761 case TRUTH_ORIF_EXPR:
2893f753
RAE
3762 gcc_unreachable ();
3763
7e98624c
RG
3764 case TRUTH_AND_EXPR:
3765 case TRUTH_OR_EXPR:
3766 case TRUTH_XOR_EXPR:
3767 {
7e98624c 3768 /* We allow any kind of integral typed argument and result. */
726a989a
RB
3769 if (!INTEGRAL_TYPE_P (rhs1_type)
3770 || !INTEGRAL_TYPE_P (rhs2_type)
3771 || !INTEGRAL_TYPE_P (lhs_type))
7e98624c
RG
3772 {
3773 error ("type mismatch in binary truth expression");
726a989a
RB
3774 debug_generic_expr (lhs_type);
3775 debug_generic_expr (rhs1_type);
3776 debug_generic_expr (rhs2_type);
7e98624c
RG
3777 return true;
3778 }
3779
3780 return false;
3781 }
3782
9f509004
RG
3783 case LT_EXPR:
3784 case LE_EXPR:
3785 case GT_EXPR:
3786 case GE_EXPR:
3787 case EQ_EXPR:
3788 case NE_EXPR:
3789 case UNORDERED_EXPR:
3790 case ORDERED_EXPR:
3791 case UNLT_EXPR:
3792 case UNLE_EXPR:
3793 case UNGT_EXPR:
3794 case UNGE_EXPR:
3795 case UNEQ_EXPR:
3796 case LTGT_EXPR:
3797 /* Comparisons are also binary, but the result type is not
3798 connected to the operand types. */
3799 return verify_gimple_comparison (lhs_type, rhs1, rhs2);
7e98624c 3800
587aa063
RG
3801 case WIDEN_SUM_EXPR:
3802 case WIDEN_MULT_EXPR:
3803 case VEC_WIDEN_MULT_HI_EXPR:
3804 case VEC_WIDEN_MULT_LO_EXPR:
3805 case VEC_PACK_TRUNC_EXPR:
3806 case VEC_PACK_SAT_EXPR:
3807 case VEC_PACK_FIX_TRUNC_EXPR:
3808 case VEC_EXTRACT_EVEN_EXPR:
3809 case VEC_EXTRACT_ODD_EXPR:
3810 case VEC_INTERLEAVE_HIGH_EXPR:
3811 case VEC_INTERLEAVE_LOW_EXPR:
3812 /* FIXME. */
3813 return false;
3814
9f509004
RG
3815 case MULT_EXPR:
3816 case TRUNC_DIV_EXPR:
3817 case CEIL_DIV_EXPR:
3818 case FLOOR_DIV_EXPR:
3819 case ROUND_DIV_EXPR:
3820 case TRUNC_MOD_EXPR:
3821 case CEIL_MOD_EXPR:
3822 case FLOOR_MOD_EXPR:
3823 case ROUND_MOD_EXPR:
3824 case RDIV_EXPR:
3825 case EXACT_DIV_EXPR:
3826 case MIN_EXPR:
3827 case MAX_EXPR:
3828 case BIT_IOR_EXPR:
3829 case BIT_XOR_EXPR:
3830 case BIT_AND_EXPR:
9f509004
RG
3831 /* Continue with generic binary expression handling. */
3832 break;
7e98624c 3833
9f509004
RG
3834 default:
3835 gcc_unreachable ();
3836 }
b691d4b0 3837
9f509004
RG
3838 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3839 || !useless_type_conversion_p (lhs_type, rhs2_type))
3840 {
3841 error ("type mismatch in binary expression");
3842 debug_generic_stmt (lhs_type);
3843 debug_generic_stmt (rhs1_type);
3844 debug_generic_stmt (rhs2_type);
3845 return true;
3846 }
3847
3848 return false;
3849}
3850
3851/* Verify a gimple assignment statement STMT with a single rhs.
3852 Returns true if anything is wrong. */
3853
3854static bool
3855verify_gimple_assign_single (gimple stmt)
3856{
3857 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3858 tree lhs = gimple_assign_lhs (stmt);
3859 tree lhs_type = TREE_TYPE (lhs);
3860 tree rhs1 = gimple_assign_rhs1 (stmt);
3861 tree rhs1_type = TREE_TYPE (rhs1);
3862 bool res = false;
3863
3864 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3865 {
3866 error ("non-trivial conversion at assignment");
3867 debug_generic_expr (lhs_type);
3868 debug_generic_expr (rhs1_type);
3869 return true;
7e98624c
RG
3870 }
3871
9f509004 3872 if (handled_component_p (lhs))
3a19701a 3873 res |= verify_types_in_gimple_reference (lhs, true);
9f509004
RG
3874
3875 /* Special codes we cannot handle via their class. */
3876 switch (rhs_code)
7e98624c 3877 {
9f509004
RG
3878 case ADDR_EXPR:
3879 {
3880 tree op = TREE_OPERAND (rhs1, 0);
3881 if (!is_gimple_addressable (op))
3882 {
3883 error ("invalid operand in unary expression");
3884 return true;
3885 }
f5e85907 3886
22a65a54
RG
3887 if (!one_pointer_to_useless_type_conversion_p (lhs_type,
3888 TREE_TYPE (op)))
9f509004
RG
3889 {
3890 error ("type mismatch in address expression");
3891 debug_generic_stmt (lhs_type);
3892 debug_generic_stmt (TYPE_POINTER_TO (TREE_TYPE (op)));
3893 return true;
3894 }
3895
3a19701a 3896 return verify_types_in_gimple_reference (op, true);
9f509004
RG
3897 }
3898
3899 /* tcc_reference */
3900 case COMPONENT_REF:
3901 case BIT_FIELD_REF:
3902 case INDIRECT_REF:
3903 case ALIGN_INDIRECT_REF:
3904 case MISALIGNED_INDIRECT_REF:
3905 case ARRAY_REF:
3906 case ARRAY_RANGE_REF:
3907 case VIEW_CONVERT_EXPR:
3908 case REALPART_EXPR:
3909 case IMAGPART_EXPR:
3910 case TARGET_MEM_REF:
3911 if (!is_gimple_reg (lhs)
3912 && is_gimple_reg_type (TREE_TYPE (lhs)))
f5e85907 3913 {
9f509004
RG
3914 error ("invalid rhs for gimple memory store");
3915 debug_generic_stmt (lhs);
3916 debug_generic_stmt (rhs1);
726a989a
RB
3917 return true;
3918 }
3a19701a 3919 return res || verify_types_in_gimple_reference (rhs1, false);
7e98624c 3920
9f509004
RG
3921 /* tcc_constant */
3922 case SSA_NAME:
3923 case INTEGER_CST:
3924 case REAL_CST:
3925 case FIXED_CST:
3926 case COMPLEX_CST:
3927 case VECTOR_CST:
3928 case STRING_CST:
3929 return res;
3930
3931 /* tcc_declaration */
3932 case CONST_DECL:
3933 return res;
3934 case VAR_DECL:
3935 case PARM_DECL:
3936 if (!is_gimple_reg (lhs)
3937 && !is_gimple_reg (rhs1)
3938 && is_gimple_reg_type (TREE_TYPE (lhs)))
2f9864e6 3939 {
9f509004
RG
3940 error ("invalid rhs for gimple memory store");
3941 debug_generic_stmt (lhs);
3942 debug_generic_stmt (rhs1);
2f9864e6
RG
3943 return true;
3944 }
9f509004 3945 return res;
7e98624c 3946
9f509004
RG
3947 case COND_EXPR:
3948 case CONSTRUCTOR:
3949 case OBJ_TYPE_REF:
3950 case ASSERT_EXPR:
3951 case WITH_SIZE_EXPR:
3952 case EXC_PTR_EXPR:
3953 case FILTER_EXPR:
3954 case POLYNOMIAL_CHREC:
3955 case DOT_PROD_EXPR:
3956 case VEC_COND_EXPR:
3957 case REALIGN_LOAD_EXPR:
3958 /* FIXME. */
3959 return res;
7e98624c 3960
726a989a 3961 default:;
7e98624c
RG
3962 }
3963
9f509004 3964 return res;
7e98624c
RG
3965}
3966
9f509004
RG
3967/* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
3968 is a problem, otherwise false. */
3969
3970static bool
3971verify_gimple_assign (gimple stmt)
3972{
3973 switch (gimple_assign_rhs_class (stmt))
3974 {
3975 case GIMPLE_SINGLE_RHS:
3976 return verify_gimple_assign_single (stmt);
3977
3978 case GIMPLE_UNARY_RHS:
3979 return verify_gimple_assign_unary (stmt);
3980
3981 case GIMPLE_BINARY_RHS:
3982 return verify_gimple_assign_binary (stmt);
3983
3984 default:
3985 gcc_unreachable ();
3986 }
3987}
726a989a
RB
3988
3989/* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
3990 is a problem, otherwise false. */
7e98624c
RG
3991
3992static bool
b59d3976 3993verify_gimple_return (gimple stmt)
7e98624c 3994{
726a989a 3995 tree op = gimple_return_retval (stmt);
b59d3976 3996 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
726a989a 3997
b59d3976
RG
3998 /* We cannot test for present return values as we do not fix up missing
3999 return values from the original source. */
726a989a
RB
4000 if (op == NULL)
4001 return false;
b59d3976
RG
4002
4003 if (!is_gimple_val (op)
4004 && TREE_CODE (op) != RESULT_DECL)
4005 {
4006 error ("invalid operand in return statement");
4007 debug_generic_stmt (op);
4008 return true;
4009 }
4010
4011 if (!useless_type_conversion_p (restype, TREE_TYPE (op))
4012 /* ??? With C++ we can have the situation that the result
4013 decl is a reference type while the return type is an aggregate. */
4014 && !(TREE_CODE (op) == RESULT_DECL
4015 && TREE_CODE (TREE_TYPE (op)) == REFERENCE_TYPE
4016 && useless_type_conversion_p (restype, TREE_TYPE (TREE_TYPE (op)))))
4017 {
4018 error ("invalid conversion in return statement");
4019 debug_generic_stmt (restype);
4020 debug_generic_stmt (TREE_TYPE (op));
4021 return true;
4022 }
4023
4024 return false;
726a989a 4025}
7e98624c 4026
7e98624c 4027
b59d3976
RG
4028/* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4029 is a problem, otherwise false. */
4030
4031static bool
4032verify_gimple_goto (gimple stmt)
4033{
4034 tree dest = gimple_goto_dest (stmt);
4035
4036 /* ??? We have two canonical forms of direct goto destinations, a
4037 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4038 if (TREE_CODE (dest) != LABEL_DECL
4039 && (!is_gimple_val (dest)
4040 || !POINTER_TYPE_P (TREE_TYPE (dest))))
4041 {
4042 error ("goto destination is neither a label nor a pointer");
4043 return true;
4044 }
4045
4046 return false;
4047}
4048
726a989a
RB
4049/* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4050 is a problem, otherwise false. */
4051
4052static bool
b59d3976 4053verify_gimple_switch (gimple stmt)
726a989a
RB
4054{
4055 if (!is_gimple_val (gimple_switch_index (stmt)))
7e98624c 4056 {
726a989a 4057 error ("invalid operand to switch statement");
b59d3976 4058 debug_generic_stmt (gimple_switch_index (stmt));
7e98624c
RG
4059 return true;
4060 }
4061
726a989a
RB
4062 return false;
4063}
7e98624c 4064
7e98624c 4065
726a989a
RB
4066/* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4067 and false otherwise. */
7e98624c 4068
726a989a 4069static bool
b59d3976 4070verify_gimple_phi (gimple stmt)
726a989a 4071{
b59d3976
RG
4072 tree type = TREE_TYPE (gimple_phi_result (stmt));
4073 unsigned i;
7e98624c 4074
b59d3976
RG
4075 if (!is_gimple_variable (gimple_phi_result (stmt)))
4076 {
4077 error ("Invalid PHI result");
4078 return true;
4079 }
7e98624c 4080
726a989a 4081 for (i = 0; i < gimple_phi_num_args (stmt); i++)
b59d3976
RG
4082 {
4083 tree arg = gimple_phi_arg_def (stmt, i);
9f509004
RG
4084 if ((is_gimple_reg (gimple_phi_result (stmt))
4085 && !is_gimple_val (arg))
4086 || (!is_gimple_reg (gimple_phi_result (stmt))
4087 && !is_gimple_addressable (arg)))
b59d3976
RG
4088 {
4089 error ("Invalid PHI argument");
4090 debug_generic_stmt (arg);
4091 return true;
4092 }
4093 if (!useless_type_conversion_p (type, TREE_TYPE (arg)))
4094 {
587aa063 4095 error ("Incompatible types in PHI argument %u", i);
b59d3976
RG
4096 debug_generic_stmt (type);
4097 debug_generic_stmt (TREE_TYPE (arg));
4098 return true;
4099 }
4100 }
726a989a 4101
7e98624c
RG
4102 return false;
4103}
4104
726a989a 4105
7e98624c
RG
4106/* Verify the GIMPLE statement STMT. Returns true if there is an
4107 error, otherwise false. */
4108
4109static bool
726a989a 4110verify_types_in_gimple_stmt (gimple stmt)
7e98624c 4111{
726a989a 4112 if (is_gimple_omp (stmt))
7e98624c
RG
4113 {
4114 /* OpenMP directives are validated by the FE and never operated
726a989a 4115 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
7e98624c
RG
4116 non-gimple expressions when the main index variable has had
4117 its address taken. This does not affect the loop itself
726a989a 4118 because the header of an GIMPLE_OMP_FOR is merely used to determine
7e98624c
RG
4119 how to setup the parallel iteration. */
4120 return false;
4121 }
4122
726a989a 4123 switch (gimple_code (stmt))
7e98624c 4124 {
726a989a 4125 case GIMPLE_ASSIGN:
9f509004 4126 return verify_gimple_assign (stmt);
7e98624c 4127
726a989a
RB
4128 case GIMPLE_LABEL:
4129 return TREE_CODE (gimple_label_label (stmt)) != LABEL_DECL;
7e98624c 4130
726a989a 4131 case GIMPLE_CALL:
b59d3976 4132 return verify_gimple_call (stmt);
7e98624c 4133
726a989a 4134 case GIMPLE_COND:
b59d3976
RG
4135 return verify_gimple_comparison (boolean_type_node,
4136 gimple_cond_lhs (stmt),
4137 gimple_cond_rhs (stmt));
7e98624c 4138
726a989a 4139 case GIMPLE_GOTO:
b59d3976 4140 return verify_gimple_goto (stmt);
7e98624c 4141
726a989a 4142 case GIMPLE_SWITCH:
b59d3976 4143 return verify_gimple_switch (stmt);
7e98624c 4144
726a989a 4145 case GIMPLE_RETURN:
b59d3976 4146 return verify_gimple_return (stmt);
7e98624c 4147
726a989a 4148 case GIMPLE_ASM:
7e98624c
RG
4149 return false;
4150
726a989a 4151 case GIMPLE_PHI:
b59d3976
RG
4152 return verify_gimple_phi (stmt);
4153
4154 /* Tuples that do not have tree operands. */
4155 case GIMPLE_NOP:
4156 case GIMPLE_RESX:
4157 case GIMPLE_PREDICT:
4158 return false;
726a989a 4159
7e98624c
RG
4160 default:
4161 gcc_unreachable ();
4162 }
4163}
4164
726a989a 4165/* Verify the GIMPLE statements inside the sequence STMTS. */
7e98624c 4166
7dc83ebc 4167static bool
726a989a 4168verify_types_in_gimple_seq_2 (gimple_seq stmts)
7e98624c 4169{
726a989a 4170 gimple_stmt_iterator ittr;
7dc83ebc 4171 bool err = false;
7e98624c 4172
726a989a 4173 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
7e98624c 4174 {
726a989a 4175 gimple stmt = gsi_stmt (ittr);
7e98624c 4176
726a989a
RB
4177 switch (gimple_code (stmt))
4178 {
b59d3976
RG
4179 case GIMPLE_BIND:
4180 err |= verify_types_in_gimple_seq_2 (gimple_bind_body (stmt));
4181 break;
4182
4183 case GIMPLE_TRY:
4184 err |= verify_types_in_gimple_seq_2 (gimple_try_eval (stmt));
4185 err |= verify_types_in_gimple_seq_2 (gimple_try_cleanup (stmt));
4186 break;
4187
4188 case GIMPLE_EH_FILTER:
4189 err |= verify_types_in_gimple_seq_2 (gimple_eh_filter_failure (stmt));
4190 break;
4191
4192 case GIMPLE_CATCH:
4193 err |= verify_types_in_gimple_seq_2 (gimple_catch_handler (stmt));
4194 break;
7e98624c
RG
4195
4196 default:
7dc83ebc 4197 {
726a989a 4198 bool err2 = verify_types_in_gimple_stmt (stmt);
7dc83ebc 4199 if (err2)
726a989a 4200 debug_gimple_stmt (stmt);
7dc83ebc
RG
4201 err |= err2;
4202 }
7e98624c
RG
4203 }
4204 }
7dc83ebc
RG
4205
4206 return err;
4207}
4208
4209
4210/* Verify the GIMPLE statements inside the statement list STMTS. */
4211
4212void
726a989a 4213verify_types_in_gimple_seq (gimple_seq stmts)
7dc83ebc 4214{
726a989a 4215 if (verify_types_in_gimple_seq_2 (stmts))
7dc83ebc 4216 internal_error ("verify_gimple failed");
7e98624c
RG
4217}
4218
6de9cd9a
DN
4219
4220/* Verify STMT, return true if STMT is not in GIMPLE form.
4221 TODO: Implement type checking. */
4222
4223static bool
726a989a 4224verify_stmt (gimple_stmt_iterator *gsi)
6de9cd9a
DN
4225{
4226 tree addr;
726a989a
RB
4227 struct walk_stmt_info wi;
4228 bool last_in_block = gsi_one_before_end_p (*gsi);
4229 gimple stmt = gsi_stmt (*gsi);
6de9cd9a 4230
726a989a 4231 if (is_gimple_omp (stmt))
50674e96
DN
4232 {
4233 /* OpenMP directives are validated by the FE and never operated
726a989a 4234 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
50674e96
DN
4235 non-gimple expressions when the main index variable has had
4236 its address taken. This does not affect the loop itself
726a989a 4237 because the header of an GIMPLE_OMP_FOR is merely used to determine
50674e96
DN
4238 how to setup the parallel iteration. */
4239 return false;
4240 }
4241
726a989a
RB
4242 /* FIXME. The C frontend passes unpromoted arguments in case it
4243 didn't see a function declaration before the call. */
4244 if (is_gimple_call (stmt))
6de9cd9a 4245 {
7c9577be 4246 tree decl;
726a989a 4247
7c9577be
RG
4248 if (!is_gimple_call_addr (gimple_call_fn (stmt)))
4249 {
4250 error ("invalid function in call statement");
4251 return true;
4252 }
4253
4254 decl = gimple_call_fndecl (stmt);
4255 if (decl
4256 && TREE_CODE (decl) == FUNCTION_DECL
726a989a
RB
4257 && DECL_LOOPING_CONST_OR_PURE_P (decl)
4258 && (!DECL_PURE_P (decl))
4259 && (!TREE_READONLY (decl)))
4260 {
4261 error ("invalid pure const state for function");
4262 return true;
4263 }
6de9cd9a
DN
4264 }
4265
726a989a
RB
4266 memset (&wi, 0, sizeof (wi));
4267 addr = walk_gimple_op (gsi_stmt (*gsi), verify_expr, &wi);
6de9cd9a
DN
4268 if (addr)
4269 {
726a989a 4270 debug_generic_expr (addr);
1f5b3869 4271 inform (input_location, "in statement");
726a989a 4272 debug_gimple_stmt (stmt);
6de9cd9a
DN
4273 return true;
4274 }
4275
1eaba2f2
RH
4276 /* If the statement is marked as part of an EH region, then it is
4277 expected that the statement could throw. Verify that when we
4278 have optimizations that simplify statements such that we prove
4279 that they cannot throw, that we update other data structures
4280 to match. */
4281 if (lookup_stmt_eh_region (stmt) >= 0)
4282 {
2505c5ed
JH
4283 /* During IPA passes, ipa-pure-const sets nothrow flags on calls
4284 and they are updated on statements only after fixup_cfg
4285 is executed at beggining of expansion stage. */
4286 if (!stmt_could_throw_p (stmt) && cgraph_state != CGRAPH_STATE_IPA_SSA)
1eaba2f2 4287 {
ab532386 4288 error ("statement marked for throw, but doesn%'t");
1eaba2f2
RH
4289 goto fail;
4290 }
726a989a 4291 if (!last_in_block && stmt_can_throw_internal (stmt))
1eaba2f2 4292 {
ab532386 4293 error ("statement marked for throw in middle of block");
1eaba2f2
RH
4294 goto fail;
4295 }
4296 }
4297
6de9cd9a 4298 return false;
1eaba2f2
RH
4299
4300 fail:
726a989a 4301 debug_gimple_stmt (stmt);
1eaba2f2 4302 return true;
6de9cd9a
DN
4303}
4304
4305
4306/* Return true when the T can be shared. */
4307
4308static bool
4309tree_node_can_be_shared (tree t)
4310{
6615c446 4311 if (IS_TYPE_OR_DECL_P (t)
6de9cd9a 4312 || is_gimple_min_invariant (t)
5e23162d 4313 || TREE_CODE (t) == SSA_NAME
953ff289
DN
4314 || t == error_mark_node
4315 || TREE_CODE (t) == IDENTIFIER_NODE)
6de9cd9a
DN
4316 return true;
4317
92b6dff3
JL
4318 if (TREE_CODE (t) == CASE_LABEL_EXPR)
4319 return true;
4320
44de5aeb 4321 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
953ff289
DN
4322 && is_gimple_min_invariant (TREE_OPERAND (t, 1)))
4323 || TREE_CODE (t) == COMPONENT_REF
4324 || TREE_CODE (t) == REALPART_EXPR
4325 || TREE_CODE (t) == IMAGPART_EXPR)
6de9cd9a
DN
4326 t = TREE_OPERAND (t, 0);
4327
4328 if (DECL_P (t))
4329 return true;
4330
4331 return false;
4332}
4333
4334
726a989a 4335/* Called via walk_gimple_stmt. Verify tree sharing. */
6de9cd9a
DN
4336
4337static tree
726a989a 4338verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
6de9cd9a 4339{
726a989a
RB
4340 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4341 struct pointer_set_t *visited = (struct pointer_set_t *) wi->info;
6de9cd9a
DN
4342
4343 if (tree_node_can_be_shared (*tp))
4344 {
4345 *walk_subtrees = false;
4346 return NULL;
4347 }
4348
4437b50d
JH
4349 if (pointer_set_insert (visited, *tp))
4350 return *tp;
6de9cd9a
DN
4351
4352 return NULL;
4353}
4354
4355
4437b50d
JH
4356static bool eh_error_found;
4357static int
4358verify_eh_throw_stmt_node (void **slot, void *data)
4359{
4360 struct throw_stmt_node *node = (struct throw_stmt_node *)*slot;
4361 struct pointer_set_t *visited = (struct pointer_set_t *) data;
4362
4363 if (!pointer_set_contains (visited, node->stmt))
4364 {
4365 error ("Dead STMT in EH table");
726a989a 4366 debug_gimple_stmt (node->stmt);
4437b50d
JH
4367 eh_error_found = true;
4368 }
c13edb67 4369 return 1;
4437b50d
JH
4370}
4371
726a989a
RB
4372
4373/* Verify the GIMPLE statements in every basic block. */
6de9cd9a
DN
4374
4375void
4376verify_stmts (void)
4377{
4378 basic_block bb;
726a989a 4379 gimple_stmt_iterator gsi;
6de9cd9a 4380 bool err = false;
4437b50d 4381 struct pointer_set_t *visited, *visited_stmts;
6de9cd9a 4382 tree addr;
726a989a 4383 struct walk_stmt_info wi;
6de9cd9a
DN
4384
4385 timevar_push (TV_TREE_STMT_VERIFY);
4437b50d
JH
4386 visited = pointer_set_create ();
4387 visited_stmts = pointer_set_create ();
6de9cd9a 4388
726a989a
RB
4389 memset (&wi, 0, sizeof (wi));
4390 wi.info = (void *) visited;
4391
6de9cd9a
DN
4392 FOR_EACH_BB (bb)
4393 {
726a989a
RB
4394 gimple phi;
4395 size_t i;
6de9cd9a 4396
726a989a 4397 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6de9cd9a 4398 {
726a989a 4399 phi = gsi_stmt (gsi);
4437b50d 4400 pointer_set_insert (visited_stmts, phi);
726a989a 4401 if (gimple_bb (phi) != bb)
8de1fc1b 4402 {
726a989a 4403 error ("gimple_bb (phi) is set to a wrong basic block");
8de1fc1b
KH
4404 err |= true;
4405 }
4406
726a989a 4407 for (i = 0; i < gimple_phi_num_args (phi); i++)
6de9cd9a 4408 {
726a989a 4409 tree t = gimple_phi_arg_def (phi, i);
6de9cd9a
DN
4410 tree addr;
4411
e9705dc5
AO
4412 if (!t)
4413 {
4414 error ("missing PHI def");
726a989a 4415 debug_gimple_stmt (phi);
e9705dc5
AO
4416 err |= true;
4417 continue;
4418 }
6de9cd9a
DN
4419 /* Addressable variables do have SSA_NAMEs but they
4420 are not considered gimple values. */
e9705dc5
AO
4421 else if (TREE_CODE (t) != SSA_NAME
4422 && TREE_CODE (t) != FUNCTION_DECL
220f1c29 4423 && !is_gimple_min_invariant (t))
6de9cd9a 4424 {
726a989a
RB
4425 error ("PHI argument is not a GIMPLE value");
4426 debug_gimple_stmt (phi);
4427 debug_generic_expr (t);
6de9cd9a
DN
4428 err |= true;
4429 }
4430
4437b50d 4431 addr = walk_tree (&t, verify_node_sharing, visited, NULL);
6de9cd9a
DN
4432 if (addr)
4433 {
ab532386 4434 error ("incorrect sharing of tree nodes");
726a989a
RB
4435 debug_gimple_stmt (phi);
4436 debug_generic_expr (addr);
6de9cd9a
DN
4437 err |= true;
4438 }
4439 }
211ca15c
RG
4440
4441#ifdef ENABLE_TYPES_CHECKING
4442 if (verify_gimple_phi (phi))
4443 {
4444 debug_gimple_stmt (phi);
4445 err |= true;
4446 }
4447#endif
6de9cd9a
DN
4448 }
4449
726a989a 4450 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
6de9cd9a 4451 {
726a989a
RB
4452 gimple stmt = gsi_stmt (gsi);
4453
4454 if (gimple_code (stmt) == GIMPLE_WITH_CLEANUP_EXPR
4455 || gimple_code (stmt) == GIMPLE_BIND)
4456 {
4457 error ("invalid GIMPLE statement");
4458 debug_gimple_stmt (stmt);
4459 err |= true;
4460 }
8de1fc1b 4461
4437b50d 4462 pointer_set_insert (visited_stmts, stmt);
07beea0d 4463
726a989a 4464 if (gimple_bb (stmt) != bb)
8de1fc1b 4465 {
726a989a 4466 error ("gimple_bb (stmt) is set to a wrong basic block");
8de1fc1b
KH
4467 err |= true;
4468 }
4469
726a989a
RB
4470 if (gimple_code (stmt) == GIMPLE_LABEL)
4471 {
4472 tree decl = gimple_label_label (stmt);
4473 int uid = LABEL_DECL_UID (decl);
4474
4475 if (uid == -1
4476 || VEC_index (basic_block, label_to_block_map, uid) != bb)
4477 {
4478 error ("incorrect entry in label_to_block_map.\n");
4479 err |= true;
4480 }
4481 }
4482
4483 err |= verify_stmt (&gsi);
211ca15c
RG
4484
4485#ifdef ENABLE_TYPES_CHECKING
4486 if (verify_types_in_gimple_stmt (gsi_stmt (gsi)))
4487 {
4488 debug_gimple_stmt (stmt);
4489 err |= true;
4490 }
4491#endif
726a989a 4492 addr = walk_gimple_op (gsi_stmt (gsi), verify_node_sharing, &wi);
6de9cd9a
DN
4493 if (addr)
4494 {
ab532386 4495 error ("incorrect sharing of tree nodes");
726a989a
RB
4496 debug_gimple_stmt (stmt);
4497 debug_generic_expr (addr);
6de9cd9a
DN
4498 err |= true;
4499 }
726a989a 4500 gsi_next (&gsi);
6de9cd9a
DN
4501 }
4502 }
726a989a 4503
4437b50d
JH
4504 eh_error_found = false;
4505 if (get_eh_throw_stmt_table (cfun))
4506 htab_traverse (get_eh_throw_stmt_table (cfun),
4507 verify_eh_throw_stmt_node,
4508 visited_stmts);
6de9cd9a 4509
4437b50d 4510 if (err | eh_error_found)
ab532386 4511 internal_error ("verify_stmts failed");
6de9cd9a 4512
4437b50d
JH
4513 pointer_set_destroy (visited);
4514 pointer_set_destroy (visited_stmts);
6946b3f7 4515 verify_histograms ();
6de9cd9a
DN
4516 timevar_pop (TV_TREE_STMT_VERIFY);
4517}
4518
4519
4520/* Verifies that the flow information is OK. */
4521
4522static int
726a989a 4523gimple_verify_flow_info (void)
6de9cd9a
DN
4524{
4525 int err = 0;
4526 basic_block bb;
726a989a
RB
4527 gimple_stmt_iterator gsi;
4528 gimple stmt;
6de9cd9a 4529 edge e;
628f6a4e 4530 edge_iterator ei;
6de9cd9a 4531
726a989a 4532 if (ENTRY_BLOCK_PTR->il.gimple)
6de9cd9a 4533 {
7506e1cb 4534 error ("ENTRY_BLOCK has IL associated with it");
6de9cd9a
DN
4535 err = 1;
4536 }
4537
726a989a 4538 if (EXIT_BLOCK_PTR->il.gimple)
6de9cd9a 4539 {
7506e1cb 4540 error ("EXIT_BLOCK has IL associated with it");
6de9cd9a
DN
4541 err = 1;
4542 }
4543
628f6a4e 4544 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
6de9cd9a
DN
4545 if (e->flags & EDGE_FALLTHRU)
4546 {
ab532386 4547 error ("fallthru to exit from bb %d", e->src->index);
6de9cd9a
DN
4548 err = 1;
4549 }
4550
4551 FOR_EACH_BB (bb)
4552 {
4553 bool found_ctrl_stmt = false;
4554
726a989a 4555 stmt = NULL;
548414c6 4556
6de9cd9a 4557 /* Skip labels on the start of basic block. */
726a989a 4558 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6de9cd9a 4559 {
726a989a
RB
4560 tree label;
4561 gimple prev_stmt = stmt;
548414c6 4562
726a989a 4563 stmt = gsi_stmt (gsi);
548414c6 4564
726a989a 4565 if (gimple_code (stmt) != GIMPLE_LABEL)
6de9cd9a
DN
4566 break;
4567
726a989a
RB
4568 label = gimple_label_label (stmt);
4569 if (prev_stmt && DECL_NONLOCAL (label))
548414c6 4570 {
953ff289 4571 error ("nonlocal label ");
726a989a 4572 print_generic_expr (stderr, label, 0);
953ff289
DN
4573 fprintf (stderr, " is not first in a sequence of labels in bb %d",
4574 bb->index);
548414c6
KH
4575 err = 1;
4576 }
4577
726a989a 4578 if (label_to_block (label) != bb)
6de9cd9a 4579 {
953ff289 4580 error ("label ");
726a989a 4581 print_generic_expr (stderr, label, 0);
953ff289
DN
4582 fprintf (stderr, " to block does not match in bb %d",
4583 bb->index);
6de9cd9a
DN
4584 err = 1;
4585 }
4586
726a989a 4587 if (decl_function_context (label) != current_function_decl)
6de9cd9a 4588 {
953ff289 4589 error ("label ");
726a989a 4590 print_generic_expr (stderr, label, 0);
953ff289
DN
4591 fprintf (stderr, " has incorrect context in bb %d",
4592 bb->index);
6de9cd9a
DN
4593 err = 1;
4594 }
4595 }
4596
4597 /* Verify that body of basic block BB is free of control flow. */
726a989a 4598 for (; !gsi_end_p (gsi); gsi_next (&gsi))
6de9cd9a 4599 {
726a989a 4600 gimple stmt = gsi_stmt (gsi);
6de9cd9a
DN
4601
4602 if (found_ctrl_stmt)
4603 {
ab532386 4604 error ("control flow in the middle of basic block %d",
6de9cd9a
DN
4605 bb->index);
4606 err = 1;
4607 }
4608
4609 if (stmt_ends_bb_p (stmt))
4610 found_ctrl_stmt = true;
4611
726a989a 4612 if (gimple_code (stmt) == GIMPLE_LABEL)
6de9cd9a 4613 {
953ff289 4614 error ("label ");
726a989a 4615 print_generic_expr (stderr, gimple_label_label (stmt), 0);
953ff289 4616 fprintf (stderr, " in the middle of basic block %d", bb->index);
6de9cd9a
DN
4617 err = 1;
4618 }
4619 }
953ff289 4620
726a989a
RB
4621 gsi = gsi_last_bb (bb);
4622 if (gsi_end_p (gsi))
6de9cd9a
DN
4623 continue;
4624
726a989a 4625 stmt = gsi_stmt (gsi);
6de9cd9a 4626
cc7220fd
JH
4627 err |= verify_eh_edges (stmt);
4628
6de9cd9a
DN
4629 if (is_ctrl_stmt (stmt))
4630 {
628f6a4e 4631 FOR_EACH_EDGE (e, ei, bb->succs)
6de9cd9a
DN
4632 if (e->flags & EDGE_FALLTHRU)
4633 {
ab532386 4634 error ("fallthru edge after a control statement in bb %d",
6de9cd9a
DN
4635 bb->index);
4636 err = 1;
4637 }
4638 }
4639
726a989a 4640 if (gimple_code (stmt) != GIMPLE_COND)
36b24193
ZD
4641 {
4642 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
4643 after anything else but if statement. */
4644 FOR_EACH_EDGE (e, ei, bb->succs)
4645 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
4646 {
726a989a 4647 error ("true/false edge after a non-GIMPLE_COND in bb %d",
36b24193
ZD
4648 bb->index);
4649 err = 1;
4650 }
4651 }
4652
726a989a 4653 switch (gimple_code (stmt))
6de9cd9a 4654 {
726a989a 4655 case GIMPLE_COND:
6de9cd9a
DN
4656 {
4657 edge true_edge;
4658 edge false_edge;
a9b77cd1 4659
6de9cd9a
DN
4660 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
4661
726a989a
RB
4662 if (!true_edge
4663 || !false_edge
6de9cd9a
DN
4664 || !(true_edge->flags & EDGE_TRUE_VALUE)
4665 || !(false_edge->flags & EDGE_FALSE_VALUE)
4666 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
4667 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
628f6a4e 4668 || EDGE_COUNT (bb->succs) >= 3)
6de9cd9a 4669 {
ab532386 4670 error ("wrong outgoing edge flags at end of bb %d",
6de9cd9a
DN
4671 bb->index);
4672 err = 1;
4673 }
6de9cd9a
DN
4674 }
4675 break;
4676
726a989a 4677 case GIMPLE_GOTO:
6de9cd9a
DN
4678 if (simple_goto_p (stmt))
4679 {
ab532386 4680 error ("explicit goto at end of bb %d", bb->index);
6531d1be 4681 err = 1;
6de9cd9a
DN
4682 }
4683 else
4684 {
6531d1be 4685 /* FIXME. We should double check that the labels in the
6de9cd9a 4686 destination blocks have their address taken. */
628f6a4e 4687 FOR_EACH_EDGE (e, ei, bb->succs)
6de9cd9a
DN
4688 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
4689 | EDGE_FALSE_VALUE))
4690 || !(e->flags & EDGE_ABNORMAL))
4691 {
ab532386 4692 error ("wrong outgoing edge flags at end of bb %d",
6de9cd9a
DN
4693 bb->index);
4694 err = 1;
4695 }
4696 }
4697 break;
4698
726a989a 4699 case GIMPLE_RETURN:
c5cbcccf
ZD
4700 if (!single_succ_p (bb)
4701 || (single_succ_edge (bb)->flags
4702 & (EDGE_FALLTHRU | EDGE_ABNORMAL
4703 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
6de9cd9a 4704 {
ab532386 4705 error ("wrong outgoing edge flags at end of bb %d", bb->index);
6de9cd9a
DN
4706 err = 1;
4707 }
c5cbcccf 4708 if (single_succ (bb) != EXIT_BLOCK_PTR)
6de9cd9a 4709 {
ab532386 4710 error ("return edge does not point to exit in bb %d",
6de9cd9a
DN
4711 bb->index);
4712 err = 1;
4713 }
4714 break;
4715
726a989a 4716 case GIMPLE_SWITCH:
6de9cd9a 4717 {
7853504d 4718 tree prev;
6de9cd9a
DN
4719 edge e;
4720 size_t i, n;
6de9cd9a 4721
726a989a 4722 n = gimple_switch_num_labels (stmt);
6de9cd9a
DN
4723
4724 /* Mark all the destination basic blocks. */
4725 for (i = 0; i < n; ++i)
4726 {
726a989a 4727 tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
6de9cd9a 4728 basic_block label_bb = label_to_block (lab);
1e128c5f 4729 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
6de9cd9a
DN
4730 label_bb->aux = (void *)1;
4731 }
4732
7853504d 4733 /* Verify that the case labels are sorted. */
726a989a 4734 prev = gimple_switch_label (stmt, 0);
b7814a18 4735 for (i = 1; i < n; ++i)
7853504d 4736 {
726a989a
RB
4737 tree c = gimple_switch_label (stmt, i);
4738 if (!CASE_LOW (c))
7853504d 4739 {
726a989a
RB
4740 error ("found default case not at the start of "
4741 "case vector");
4742 err = 1;
7853504d
SB
4743 continue;
4744 }
726a989a
RB
4745 if (CASE_LOW (prev)
4746 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
7853504d 4747 {
953ff289 4748 error ("case labels not sorted: ");
7853504d
SB
4749 print_generic_expr (stderr, prev, 0);
4750 fprintf (stderr," is greater than ");
4751 print_generic_expr (stderr, c, 0);
4752 fprintf (stderr," but comes before it.\n");
4753 err = 1;
4754 }
4755 prev = c;
4756 }
b7814a18
RG
4757 /* VRP will remove the default case if it can prove it will
4758 never be executed. So do not verify there always exists
4759 a default case here. */
7853504d 4760
628f6a4e 4761 FOR_EACH_EDGE (e, ei, bb->succs)
6de9cd9a
DN
4762 {
4763 if (!e->dest->aux)
4764 {
ab532386 4765 error ("extra outgoing edge %d->%d",
6de9cd9a
DN
4766 bb->index, e->dest->index);
4767 err = 1;
4768 }
726a989a 4769
6de9cd9a
DN
4770 e->dest->aux = (void *)2;
4771 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
4772 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
4773 {
ab532386 4774 error ("wrong outgoing edge flags at end of bb %d",
6de9cd9a
DN
4775 bb->index);
4776 err = 1;
4777 }
4778 }
4779
4780 /* Check that we have all of them. */
4781 for (i = 0; i < n; ++i)
4782 {
726a989a 4783 tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
6de9cd9a
DN
4784 basic_block label_bb = label_to_block (lab);
4785
4786 if (label_bb->aux != (void *)2)
4787 {
726a989a 4788 error ("missing edge %i->%i", bb->index, label_bb->index);
6de9cd9a
DN
4789 err = 1;
4790 }
4791 }
4792
628f6a4e 4793 FOR_EACH_EDGE (e, ei, bb->succs)
6de9cd9a
DN
4794 e->dest->aux = (void *)0;
4795 }
4796
4797 default: ;
4798 }
4799 }
4800
2b28c07a 4801 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
6de9cd9a
DN
4802 verify_dominators (CDI_DOMINATORS);
4803
4804 return err;
4805}
4806
4807
f0b698c1 4808/* Updates phi nodes after creating a forwarder block joined
6de9cd9a
DN
4809 by edge FALLTHRU. */
4810
4811static void
726a989a 4812gimple_make_forwarder_block (edge fallthru)
6de9cd9a
DN
4813{
4814 edge e;
628f6a4e 4815 edge_iterator ei;
6de9cd9a 4816 basic_block dummy, bb;
726a989a
RB
4817 tree var;
4818 gimple_stmt_iterator gsi;
6de9cd9a
DN
4819
4820 dummy = fallthru->src;
4821 bb = fallthru->dest;
4822
c5cbcccf 4823 if (single_pred_p (bb))
6de9cd9a
DN
4824 return;
4825
cfaab3a9 4826 /* If we redirected a branch we must create new PHI nodes at the
6de9cd9a 4827 start of BB. */
726a989a 4828 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
6de9cd9a 4829 {
726a989a
RB
4830 gimple phi, new_phi;
4831
4832 phi = gsi_stmt (gsi);
4833 var = gimple_phi_result (phi);
6de9cd9a
DN
4834 new_phi = create_phi_node (var, bb);
4835 SSA_NAME_DEF_STMT (var) = new_phi;
726a989a
RB
4836 gimple_phi_set_result (phi, make_ssa_name (SSA_NAME_VAR (var), phi));
4837 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru);
6de9cd9a
DN
4838 }
4839
6de9cd9a 4840 /* Add the arguments we have stored on edges. */
628f6a4e 4841 FOR_EACH_EDGE (e, ei, bb->preds)
6de9cd9a
DN
4842 {
4843 if (e == fallthru)
4844 continue;
4845
71882046 4846 flush_pending_stmts (e);
6de9cd9a
DN
4847 }
4848}
4849
4850
6de9cd9a
DN
4851/* Return a non-special label in the head of basic block BLOCK.
4852 Create one if it doesn't exist. */
4853
d7621d3c 4854tree
726a989a 4855gimple_block_label (basic_block bb)
6de9cd9a 4856{
726a989a 4857 gimple_stmt_iterator i, s = gsi_start_bb (bb);
6de9cd9a 4858 bool first = true;
726a989a
RB
4859 tree label;
4860 gimple stmt;
6de9cd9a 4861
726a989a 4862 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
6de9cd9a 4863 {
726a989a
RB
4864 stmt = gsi_stmt (i);
4865 if (gimple_code (stmt) != GIMPLE_LABEL)
6de9cd9a 4866 break;
726a989a 4867 label = gimple_label_label (stmt);
6de9cd9a
DN
4868 if (!DECL_NONLOCAL (label))
4869 {
4870 if (!first)
726a989a 4871 gsi_move_before (&i, &s);
6de9cd9a
DN
4872 return label;
4873 }
4874 }
4875
4876 label = create_artificial_label ();
726a989a
RB
4877 stmt = gimple_build_label (label);
4878 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
6de9cd9a
DN
4879 return label;
4880}
4881
4882
4883/* Attempt to perform edge redirection by replacing a possibly complex
4884 jump instruction by a goto or by removing the jump completely.
4885 This can apply only if all edges now point to the same block. The
4886 parameters and return values are equivalent to
4887 redirect_edge_and_branch. */
4888
4889static edge
726a989a 4890gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
6de9cd9a
DN
4891{
4892 basic_block src = e->src;
726a989a
RB
4893 gimple_stmt_iterator i;
4894 gimple stmt;
6de9cd9a 4895
07b43a87
KH
4896 /* We can replace or remove a complex jump only when we have exactly
4897 two edges. */
4898 if (EDGE_COUNT (src->succs) != 2
4899 /* Verify that all targets will be TARGET. Specifically, the
4900 edge that is not E must also go to TARGET. */
4901 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
6de9cd9a
DN
4902 return NULL;
4903
726a989a
RB
4904 i = gsi_last_bb (src);
4905 if (gsi_end_p (i))
6de9cd9a 4906 return NULL;
6de9cd9a 4907
726a989a
RB
4908 stmt = gsi_stmt (i);
4909
4910 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
6de9cd9a 4911 {
726a989a 4912 gsi_remove (&i, true);
6de9cd9a
DN
4913 e = ssa_redirect_edge (e, target);
4914 e->flags = EDGE_FALLTHRU;
4915 return e;
4916 }
4917
4918 return NULL;
4919}
4920
4921
4922/* Redirect E to DEST. Return NULL on failure. Otherwise, return the
4923 edge representing the redirected branch. */
4924
4925static edge
726a989a 4926gimple_redirect_edge_and_branch (edge e, basic_block dest)
6de9cd9a
DN
4927{
4928 basic_block bb = e->src;
726a989a 4929 gimple_stmt_iterator gsi;
6de9cd9a 4930 edge ret;
726a989a 4931 gimple stmt;
6de9cd9a 4932
4f6c2131 4933 if (e->flags & EDGE_ABNORMAL)
6de9cd9a
DN
4934 return NULL;
4935
6531d1be 4936 if (e->src != ENTRY_BLOCK_PTR
726a989a 4937 && (ret = gimple_try_redirect_by_replacing_jump (e, dest)))
6de9cd9a
DN
4938 return ret;
4939
4940 if (e->dest == dest)
4941 return NULL;
4942
a3710436
JH
4943 if (e->flags & EDGE_EH)
4944 return redirect_eh_edge (e, dest);
4945
726a989a
RB
4946 gsi = gsi_last_bb (bb);
4947 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
6de9cd9a 4948
726a989a 4949 switch (stmt ? gimple_code (stmt) : ERROR_MARK)
6de9cd9a 4950 {
726a989a 4951 case GIMPLE_COND:
a9b77cd1 4952 /* For COND_EXPR, we only need to redirect the edge. */
6de9cd9a
DN
4953 break;
4954
726a989a 4955 case GIMPLE_GOTO:
6de9cd9a
DN
4956 /* No non-abnormal edges should lead from a non-simple goto, and
4957 simple ones should be represented implicitly. */
1e128c5f 4958 gcc_unreachable ();
6de9cd9a 4959
726a989a 4960 case GIMPLE_SWITCH:
6de9cd9a 4961 {
726a989a 4962 tree label = gimple_block_label (dest);
d6be0d7f 4963 tree cases = get_cases_for_edge (e, stmt);
6de9cd9a 4964
d6be0d7f
JL
4965 /* If we have a list of cases associated with E, then use it
4966 as it's a lot faster than walking the entire case vector. */
4967 if (cases)
6de9cd9a 4968 {
4edbbd3f 4969 edge e2 = find_edge (e->src, dest);
d6be0d7f
JL
4970 tree last, first;
4971
4972 first = cases;
4973 while (cases)
4974 {
4975 last = cases;
4976 CASE_LABEL (cases) = label;
4977 cases = TREE_CHAIN (cases);
4978 }
4979
4980 /* If there was already an edge in the CFG, then we need
4981 to move all the cases associated with E to E2. */
4982 if (e2)
4983 {
4984 tree cases2 = get_cases_for_edge (e2, stmt);
4985
4986 TREE_CHAIN (last) = TREE_CHAIN (cases2);
4987 TREE_CHAIN (cases2) = first;
4988 }
6de9cd9a 4989 }
92b6dff3
JL
4990 else
4991 {
726a989a 4992 size_t i, n = gimple_switch_num_labels (stmt);
d6be0d7f
JL
4993
4994 for (i = 0; i < n; i++)
4995 {
726a989a 4996 tree elt = gimple_switch_label (stmt, i);
d6be0d7f
JL
4997 if (label_to_block (CASE_LABEL (elt)) == e->dest)
4998 CASE_LABEL (elt) = label;
4999 }
92b6dff3 5000 }
d6be0d7f 5001
92b6dff3 5002 break;
6de9cd9a 5003 }
6de9cd9a 5004
726a989a
RB
5005 case GIMPLE_RETURN:
5006 gsi_remove (&gsi, true);
6de9cd9a
DN
5007 e->flags |= EDGE_FALLTHRU;
5008 break;
5009
726a989a
RB
5010 case GIMPLE_OMP_RETURN:
5011 case GIMPLE_OMP_CONTINUE:
5012 case GIMPLE_OMP_SECTIONS_SWITCH:
5013 case GIMPLE_OMP_FOR:
e5c95afe
ZD
5014 /* The edges from OMP constructs can be simply redirected. */
5015 break;
5016
6de9cd9a
DN
5017 default:
5018 /* Otherwise it must be a fallthru edge, and we don't need to
5019 do anything besides redirecting it. */
1e128c5f 5020 gcc_assert (e->flags & EDGE_FALLTHRU);
6de9cd9a
DN
5021 break;
5022 }
5023
5024 /* Update/insert PHI nodes as necessary. */
5025
5026 /* Now update the edges in the CFG. */
5027 e = ssa_redirect_edge (e, dest);
5028
5029 return e;
5030}
5031
14fa2cc0
ZD
5032/* Returns true if it is possible to remove edge E by redirecting
5033 it to the destination of the other edge from E->src. */
5034
5035static bool
726a989a 5036gimple_can_remove_branch_p (const_edge e)
14fa2cc0 5037{
496a4ef5 5038 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
14fa2cc0
ZD
5039 return false;
5040
5041 return true;
5042}
6de9cd9a
DN
5043
5044/* Simple wrapper, as we can always redirect fallthru edges. */
5045
5046static basic_block
726a989a 5047gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
6de9cd9a 5048{
726a989a 5049 e = gimple_redirect_edge_and_branch (e, dest);
1e128c5f 5050 gcc_assert (e);
6de9cd9a
DN
5051
5052 return NULL;
5053}
5054
5055
5056/* Splits basic block BB after statement STMT (but at least after the
5057 labels). If STMT is NULL, BB is split just after the labels. */
5058
5059static basic_block
726a989a 5060gimple_split_block (basic_block bb, void *stmt)
6de9cd9a 5061{
726a989a
RB
5062 gimple_stmt_iterator gsi;
5063 gimple_stmt_iterator gsi_tgt;
5064 gimple act;
5065 gimple_seq list;
6de9cd9a
DN
5066 basic_block new_bb;
5067 edge e;
628f6a4e 5068 edge_iterator ei;
6de9cd9a
DN
5069
5070 new_bb = create_empty_bb (bb);
5071
5072 /* Redirect the outgoing edges. */
628f6a4e
BE
5073 new_bb->succs = bb->succs;
5074 bb->succs = NULL;
5075 FOR_EACH_EDGE (e, ei, new_bb->succs)
6de9cd9a
DN
5076 e->src = new_bb;
5077
726a989a 5078 if (stmt && gimple_code ((gimple) stmt) == GIMPLE_LABEL)
6de9cd9a
DN
5079 stmt = NULL;
5080
726a989a
RB
5081 /* Move everything from GSI to the new basic block. */
5082 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6de9cd9a 5083 {
726a989a
RB
5084 act = gsi_stmt (gsi);
5085 if (gimple_code (act) == GIMPLE_LABEL)
6de9cd9a
DN
5086 continue;
5087
5088 if (!stmt)
5089 break;
5090
5091 if (stmt == act)
5092 {
726a989a 5093 gsi_next (&gsi);
6de9cd9a
DN
5094 break;
5095 }
5096 }
5097
726a989a 5098 if (gsi_end_p (gsi))
597ae074
JH
5099 return new_bb;
5100
5101 /* Split the statement list - avoid re-creating new containers as this
5102 brings ugly quadratic memory consumption in the inliner.
5103 (We are still quadratic since we need to update stmt BB pointers,
5104 sadly.) */
726a989a
RB
5105 list = gsi_split_seq_before (&gsi);
5106 set_bb_seq (new_bb, list);
5107 for (gsi_tgt = gsi_start (list);
5108 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
5109 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
6de9cd9a
DN
5110
5111 return new_bb;
5112}
5113
5114
5115/* Moves basic block BB after block AFTER. */
5116
5117static bool
726a989a 5118gimple_move_block_after (basic_block bb, basic_block after)
6de9cd9a
DN
5119{
5120 if (bb->prev_bb == after)
5121 return true;
5122
5123 unlink_block (bb);
5124 link_block (bb, after);
5125
5126 return true;
5127}
5128
5129
5130/* Return true if basic_block can be duplicated. */
5131
5132static bool
726a989a 5133gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
6de9cd9a
DN
5134{
5135 return true;
5136}
5137
6de9cd9a
DN
5138/* Create a duplicate of the basic block BB. NOTE: This does not
5139 preserve SSA form. */
5140
5141static basic_block
726a989a 5142gimple_duplicate_bb (basic_block bb)
6de9cd9a
DN
5143{
5144 basic_block new_bb;
726a989a
RB
5145 gimple_stmt_iterator gsi, gsi_tgt;
5146 gimple_seq phis = phi_nodes (bb);
5147 gimple phi, stmt, copy;
6de9cd9a
DN
5148
5149 new_bb = create_empty_bb (EXIT_BLOCK_PTR->prev_bb);
b0382c67 5150
84d65814
DN
5151 /* Copy the PHI nodes. We ignore PHI node arguments here because
5152 the incoming edges have not been setup yet. */
726a989a 5153 for (gsi = gsi_start (phis); !gsi_end_p (gsi); gsi_next (&gsi))
b0382c67 5154 {
726a989a
RB
5155 phi = gsi_stmt (gsi);
5156 copy = create_phi_node (gimple_phi_result (phi), new_bb);
5157 create_new_def_for (gimple_phi_result (copy), copy,
5158 gimple_phi_result_ptr (copy));
b0382c67 5159 }
84d65814 5160
726a989a
RB
5161 gsi_tgt = gsi_start_bb (new_bb);
5162 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6de9cd9a 5163 {
84d65814
DN
5164 def_operand_p def_p;
5165 ssa_op_iter op_iter;
cc7220fd 5166 int region;
6de9cd9a 5167
726a989a
RB
5168 stmt = gsi_stmt (gsi);
5169 if (gimple_code (stmt) == GIMPLE_LABEL)
6de9cd9a
DN
5170 continue;
5171
84d65814
DN
5172 /* Create a new copy of STMT and duplicate STMT's virtual
5173 operands. */
726a989a
RB
5174 copy = gimple_copy (stmt);
5175 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
cc7220fd
JH
5176 region = lookup_stmt_eh_region (stmt);
5177 if (region >= 0)
5178 add_stmt_to_eh_region (copy, region);
6946b3f7 5179 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
84d65814
DN
5180
5181 /* Create new names for all the definitions created by COPY and
5182 add replacement mappings for each new name. */
5183 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
5184 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
6de9cd9a
DN
5185 }
5186
5187 return new_bb;
5188}
5189
5f40b3cb
ZD
5190/* Adds phi node arguments for edge E_COPY after basic block duplication. */
5191
5192static void
5193add_phi_args_after_copy_edge (edge e_copy)
5194{
5195 basic_block bb, bb_copy = e_copy->src, dest;
5196 edge e;
5197 edge_iterator ei;
726a989a
RB
5198 gimple phi, phi_copy;
5199 tree def;
5200 gimple_stmt_iterator psi, psi_copy;
5f40b3cb 5201
726a989a 5202 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
5f40b3cb
ZD
5203 return;
5204
5205 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
5206
5207 if (e_copy->dest->flags & BB_DUPLICATED)
5208 dest = get_bb_original (e_copy->dest);
5209 else
5210 dest = e_copy->dest;
5211
5212 e = find_edge (bb, dest);
5213 if (!e)
5214 {
5215 /* During loop unrolling the target of the latch edge is copied.
5216 In this case we are not looking for edge to dest, but to
5217 duplicated block whose original was dest. */
5218 FOR_EACH_EDGE (e, ei, bb->succs)
5219 {
5220 if ((e->dest->flags & BB_DUPLICATED)
5221 && get_bb_original (e->dest) == dest)
5222 break;
5223 }
5224
5225 gcc_assert (e != NULL);
5226 }
5227
726a989a
RB
5228 for (psi = gsi_start_phis (e->dest),
5229 psi_copy = gsi_start_phis (e_copy->dest);
5230 !gsi_end_p (psi);
5231 gsi_next (&psi), gsi_next (&psi_copy))
5f40b3cb 5232 {
726a989a
RB
5233 phi = gsi_stmt (psi);
5234 phi_copy = gsi_stmt (psi_copy);
5f40b3cb
ZD
5235 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
5236 add_phi_arg (phi_copy, def, e_copy);
5237 }
5238}
5239
84d65814 5240
42759f1e
ZD
5241/* Basic block BB_COPY was created by code duplication. Add phi node
5242 arguments for edges going out of BB_COPY. The blocks that were
6580ee77 5243 duplicated have BB_DUPLICATED set. */
42759f1e
ZD
5244
5245void
5246add_phi_args_after_copy_bb (basic_block bb_copy)
5247{
5f40b3cb 5248 edge e_copy;
726a989a 5249 edge_iterator ei;
42759f1e 5250
628f6a4e 5251 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
42759f1e 5252 {
5f40b3cb 5253 add_phi_args_after_copy_edge (e_copy);
42759f1e
ZD
5254 }
5255}
5256
5257/* Blocks in REGION_COPY array of length N_REGION were created by
5258 duplication of basic blocks. Add phi node arguments for edges
5f40b3cb
ZD
5259 going from these blocks. If E_COPY is not NULL, also add
5260 phi node arguments for its destination.*/
42759f1e
ZD
5261
5262void
5f40b3cb
ZD
5263add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
5264 edge e_copy)
42759f1e
ZD
5265{
5266 unsigned i;
5267
5268 for (i = 0; i < n_region; i++)
6580ee77 5269 region_copy[i]->flags |= BB_DUPLICATED;
42759f1e
ZD
5270
5271 for (i = 0; i < n_region; i++)
5272 add_phi_args_after_copy_bb (region_copy[i]);
5f40b3cb
ZD
5273 if (e_copy)
5274 add_phi_args_after_copy_edge (e_copy);
42759f1e
ZD
5275
5276 for (i = 0; i < n_region; i++)
6580ee77 5277 region_copy[i]->flags &= ~BB_DUPLICATED;
42759f1e
ZD
5278}
5279
42759f1e
ZD
5280/* Duplicates a REGION (set of N_REGION basic blocks) with just a single
5281 important exit edge EXIT. By important we mean that no SSA name defined
5282 inside region is live over the other exit edges of the region. All entry
5283 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
5284 to the duplicate of the region. SSA form, dominance and loop information
5285 is updated. The new basic blocks are stored to REGION_COPY in the same
5286 order as they had in REGION, provided that REGION_COPY is not NULL.
5287 The function returns false if it is unable to copy the region,
5288 true otherwise. */
5289
5290bool
726a989a 5291gimple_duplicate_sese_region (edge entry, edge exit,
42759f1e
ZD
5292 basic_block *region, unsigned n_region,
5293 basic_block *region_copy)
5294{
66f97d31 5295 unsigned i;
42759f1e
ZD
5296 bool free_region_copy = false, copying_header = false;
5297 struct loop *loop = entry->dest->loop_father;
5298 edge exit_copy;
66f97d31 5299 VEC (basic_block, heap) *doms;
42759f1e 5300 edge redirected;
09bac500
JH
5301 int total_freq = 0, entry_freq = 0;
5302 gcov_type total_count = 0, entry_count = 0;
42759f1e
ZD
5303
5304 if (!can_copy_bbs_p (region, n_region))
5305 return false;
5306
5307 /* Some sanity checking. Note that we do not check for all possible
5308 missuses of the functions. I.e. if you ask to copy something weird,
5309 it will work, but the state of structures probably will not be
5310 correct. */
42759f1e
ZD
5311 for (i = 0; i < n_region; i++)
5312 {
5313 /* We do not handle subloops, i.e. all the blocks must belong to the
5314 same loop. */
5315 if (region[i]->loop_father != loop)
5316 return false;
5317
5318 if (region[i] != entry->dest
5319 && region[i] == loop->header)
5320 return false;
5321 }
5322
561e8a90 5323 set_loop_copy (loop, loop);
42759f1e
ZD
5324
5325 /* In case the function is used for loop header copying (which is the primary
5326 use), ensure that EXIT and its copy will be new latch and entry edges. */
5327 if (loop->header == entry->dest)
5328 {
5329 copying_header = true;
561e8a90 5330 set_loop_copy (loop, loop_outer (loop));
42759f1e
ZD
5331
5332 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
5333 return false;
5334
5335 for (i = 0; i < n_region; i++)
5336 if (region[i] != exit->src
5337 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
5338 return false;
5339 }
5340
5341 if (!region_copy)
5342 {
858904db 5343 region_copy = XNEWVEC (basic_block, n_region);
42759f1e
ZD
5344 free_region_copy = true;
5345 }
5346
5006671f 5347 gcc_assert (!need_ssa_update_p (cfun));
42759f1e 5348
5deaef19 5349 /* Record blocks outside the region that are dominated by something
42759f1e 5350 inside. */
66f97d31 5351 doms = NULL;
6580ee77
JH
5352 initialize_original_copy_tables ();
5353
66f97d31 5354 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
42759f1e 5355
09bac500
JH
5356 if (entry->dest->count)
5357 {
5358 total_count = entry->dest->count;
5359 entry_count = entry->count;
5360 /* Fix up corner cases, to avoid division by zero or creation of negative
5361 frequencies. */
5362 if (entry_count > total_count)
5363 entry_count = total_count;
5364 }
5365 else
5366 {
5367 total_freq = entry->dest->frequency;
5368 entry_freq = EDGE_FREQUENCY (entry);
5369 /* Fix up corner cases, to avoid division by zero or creation of negative
5370 frequencies. */
5371 if (total_freq == 0)
5372 total_freq = 1;
5373 else if (entry_freq > total_freq)
5374 entry_freq = total_freq;
5375 }
5deaef19 5376
b9a66240
ZD
5377 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
5378 split_edge_bb_loc (entry));
09bac500
JH
5379 if (total_count)
5380 {
5381 scale_bbs_frequencies_gcov_type (region, n_region,
5382 total_count - entry_count,
5383 total_count);
5384 scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
6531d1be 5385 total_count);
09bac500
JH
5386 }
5387 else
5388 {
5389 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
5390 total_freq);
5391 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
5392 }
42759f1e
ZD
5393
5394 if (copying_header)
5395 {
5396 loop->header = exit->dest;
5397 loop->latch = exit->src;
5398 }
5399
5400 /* Redirect the entry and add the phi node arguments. */
6580ee77 5401 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
42759f1e 5402 gcc_assert (redirected != NULL);
71882046 5403 flush_pending_stmts (entry);
42759f1e
ZD
5404
5405 /* Concerning updating of dominators: We must recount dominators
84d65814
DN
5406 for entry block and its copy. Anything that is outside of the
5407 region, but was dominated by something inside needs recounting as
5408 well. */
42759f1e 5409 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
66f97d31
ZD
5410 VEC_safe_push (basic_block, heap, doms, get_bb_original (entry->dest));
5411 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
5f40b3cb 5412 VEC_free (basic_block, heap, doms);
42759f1e 5413
84d65814 5414 /* Add the other PHI node arguments. */
5f40b3cb
ZD
5415 add_phi_args_after_copy (region_copy, n_region, NULL);
5416
5417 /* Update the SSA web. */
5418 update_ssa (TODO_update_ssa);
5419
5420 if (free_region_copy)
5421 free (region_copy);
5422
5423 free_original_copy_tables ();
5424 return true;
5425}
5426
5427/* Duplicates REGION consisting of N_REGION blocks. The new blocks
5428 are stored to REGION_COPY in the same order in that they appear
5429 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
5430 the region, EXIT an exit from it. The condition guarding EXIT
5431 is moved to ENTRY. Returns true if duplication succeeds, false
5432 otherwise.
5433
5434 For example,
5435
5436 some_code;
5437 if (cond)
5438 A;
5439 else
5440 B;
5441
5442 is transformed to
5443
5444 if (cond)
5445 {
5446 some_code;
5447 A;
5448 }
5449 else
5450 {
5451 some_code;
5452 B;
5453 }
5454*/
5455
5456bool
726a989a
RB
5457gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED, edge exit ATTRIBUTE_UNUSED,
5458 basic_block *region ATTRIBUTE_UNUSED, unsigned n_region ATTRIBUTE_UNUSED,
5459 basic_block *region_copy ATTRIBUTE_UNUSED)
5f40b3cb
ZD
5460{
5461 unsigned i;
5462 bool free_region_copy = false;
5463 struct loop *loop = exit->dest->loop_father;
5464 struct loop *orig_loop = entry->dest->loop_father;
5465 basic_block switch_bb, entry_bb, nentry_bb;
5466 VEC (basic_block, heap) *doms;
5467 int total_freq = 0, exit_freq = 0;
5468 gcov_type total_count = 0, exit_count = 0;
5469 edge exits[2], nexits[2], e;
726a989a
RB
5470 gimple_stmt_iterator gsi;
5471 gimple cond_stmt;
5f40b3cb
ZD
5472 edge sorig, snew;
5473
5474 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
5475 exits[0] = exit;
5476 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
5477
5478 if (!can_copy_bbs_p (region, n_region))
5479 return false;
5480
5481 /* Some sanity checking. Note that we do not check for all possible
5482 missuses of the functions. I.e. if you ask to copy something weird
5483 (e.g., in the example, if there is a jump from inside to the middle
5484 of some_code, or come_code defines some of the values used in cond)
5485 it will work, but the resulting code will not be correct. */
5486 for (i = 0; i < n_region; i++)
5487 {
5488 /* We do not handle subloops, i.e. all the blocks must belong to the
5489 same loop. */
5490 if (region[i]->loop_father != orig_loop)
5491 return false;
5492
5493 if (region[i] == orig_loop->latch)
5494 return false;
5495 }
5496
5497 initialize_original_copy_tables ();
5498 set_loop_copy (orig_loop, loop);
5499
5500 if (!region_copy)
5501 {
5502 region_copy = XNEWVEC (basic_block, n_region);
5503 free_region_copy = true;
5504 }
5505
5006671f 5506 gcc_assert (!need_ssa_update_p (cfun));
5f40b3cb
ZD
5507
5508 /* Record blocks outside the region that are dominated by something
5509 inside. */
5510 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
5511
5512 if (exit->src->count)
5513 {
5514 total_count = exit->src->count;
5515 exit_count = exit->count;
5516 /* Fix up corner cases, to avoid division by zero or creation of negative
5517 frequencies. */
5518 if (exit_count > total_count)
5519 exit_count = total_count;
5520 }
5521 else
5522 {
5523 total_freq = exit->src->frequency;
5524 exit_freq = EDGE_FREQUENCY (exit);
5525 /* Fix up corner cases, to avoid division by zero or creation of negative
5526 frequencies. */
5527 if (total_freq == 0)
5528 total_freq = 1;
5529 if (exit_freq > total_freq)
5530 exit_freq = total_freq;
5531 }
5532
5533 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
5534 split_edge_bb_loc (exit));
5535 if (total_count)
5536 {
5537 scale_bbs_frequencies_gcov_type (region, n_region,
5538 total_count - exit_count,
5539 total_count);
5540 scale_bbs_frequencies_gcov_type (region_copy, n_region, exit_count,
5541 total_count);
5542 }
5543 else
5544 {
5545 scale_bbs_frequencies_int (region, n_region, total_freq - exit_freq,
5546 total_freq);
5547 scale_bbs_frequencies_int (region_copy, n_region, exit_freq, total_freq);
5548 }
5549
5550 /* Create the switch block, and put the exit condition to it. */
5551 entry_bb = entry->dest;
5552 nentry_bb = get_bb_copy (entry_bb);
5553 if (!last_stmt (entry->src)
5554 || !stmt_ends_bb_p (last_stmt (entry->src)))
5555 switch_bb = entry->src;
5556 else
5557 switch_bb = split_edge (entry);
5558 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
5559
726a989a
RB
5560 gsi = gsi_last_bb (switch_bb);
5561 cond_stmt = last_stmt (exit->src);
5562 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
5563 cond_stmt = gimple_copy (cond_stmt);
5564 gimple_cond_set_lhs (cond_stmt, unshare_expr (gimple_cond_lhs (cond_stmt)));
5565 gimple_cond_set_rhs (cond_stmt, unshare_expr (gimple_cond_rhs (cond_stmt)));
5566 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
5f40b3cb
ZD
5567
5568 sorig = single_succ_edge (switch_bb);
5569 sorig->flags = exits[1]->flags;
5570 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
5571
5572 /* Register the new edge from SWITCH_BB in loop exit lists. */
5573 rescan_loop_exit (snew, true, false);
5574
5575 /* Add the PHI node arguments. */
5576 add_phi_args_after_copy (region_copy, n_region, snew);
5577
5578 /* Get rid of now superfluous conditions and associated edges (and phi node
5579 arguments). */
5580 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
726a989a 5581 PENDING_STMT (e) = NULL;
5f40b3cb 5582 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
726a989a 5583 PENDING_STMT (e) = NULL;
5f40b3cb
ZD
5584
5585 /* Anything that is outside of the region, but was dominated by something
5586 inside needs to update dominance info. */
5587 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
5588 VEC_free (basic_block, heap, doms);
42759f1e 5589
84d65814
DN
5590 /* Update the SSA web. */
5591 update_ssa (TODO_update_ssa);
42759f1e
ZD
5592
5593 if (free_region_copy)
5594 free (region_copy);
5595
6580ee77 5596 free_original_copy_tables ();
42759f1e
ZD
5597 return true;
5598}
6de9cd9a 5599
50674e96
DN
5600/* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
5601 adding blocks when the dominator traversal reaches EXIT. This
5602 function silently assumes that ENTRY strictly dominates EXIT. */
5603
9f9f72aa 5604void
50674e96
DN
5605gather_blocks_in_sese_region (basic_block entry, basic_block exit,
5606 VEC(basic_block,heap) **bbs_p)
5607{
5608 basic_block son;
5609
5610 for (son = first_dom_son (CDI_DOMINATORS, entry);
5611 son;
5612 son = next_dom_son (CDI_DOMINATORS, son))
5613 {
5614 VEC_safe_push (basic_block, heap, *bbs_p, son);
5615 if (son != exit)
5616 gather_blocks_in_sese_region (son, exit, bbs_p);
5617 }
5618}
5619
917948d3
ZD
5620/* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
5621 The duplicates are recorded in VARS_MAP. */
5622
5623static void
5624replace_by_duplicate_decl (tree *tp, struct pointer_map_t *vars_map,
5625 tree to_context)
5626{
5627 tree t = *tp, new_t;
5628 struct function *f = DECL_STRUCT_FUNCTION (to_context);
5629 void **loc;
5630
5631 if (DECL_CONTEXT (t) == to_context)
5632 return;
5633
5634 loc = pointer_map_contains (vars_map, t);
5635
5636 if (!loc)
5637 {
5638 loc = pointer_map_insert (vars_map, t);
5639
5640 if (SSA_VAR_P (t))
5641 {
5642 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
cb91fab0 5643 f->local_decls = tree_cons (NULL_TREE, new_t, f->local_decls);
917948d3
ZD
5644 }
5645 else
5646 {
5647 gcc_assert (TREE_CODE (t) == CONST_DECL);
5648 new_t = copy_node (t);
5649 }
5650 DECL_CONTEXT (new_t) = to_context;
5651
5652 *loc = new_t;
5653 }
5654 else
3d9a9f94 5655 new_t = (tree) *loc;
917948d3
ZD
5656
5657 *tp = new_t;
5658}
5659
726a989a 5660
917948d3
ZD
5661/* Creates an ssa name in TO_CONTEXT equivalent to NAME.
5662 VARS_MAP maps old ssa names and var_decls to the new ones. */
5663
5664static tree
5665replace_ssa_name (tree name, struct pointer_map_t *vars_map,
5666 tree to_context)
5667{
5668 void **loc;
5669 tree new_name, decl = SSA_NAME_VAR (name);
5670
5671 gcc_assert (is_gimple_reg (name));
5672
5673 loc = pointer_map_contains (vars_map, name);
5674
5675 if (!loc)
5676 {
5677 replace_by_duplicate_decl (&decl, vars_map, to_context);
5678
5679 push_cfun (DECL_STRUCT_FUNCTION (to_context));
5680 if (gimple_in_ssa_p (cfun))
5681 add_referenced_var (decl);
5682
5683 new_name = make_ssa_name (decl, SSA_NAME_DEF_STMT (name));
5684 if (SSA_NAME_IS_DEFAULT_DEF (name))
5685 set_default_def (decl, new_name);
5686 pop_cfun ();
5687
5688 loc = pointer_map_insert (vars_map, name);
5689 *loc = new_name;
5690 }
5691 else
3d9a9f94 5692 new_name = (tree) *loc;
917948d3
ZD
5693
5694 return new_name;
5695}
50674e96
DN
5696
5697struct move_stmt_d
5698{
b357f682
JJ
5699 tree orig_block;
5700 tree new_block;
50674e96
DN
5701 tree from_context;
5702 tree to_context;
917948d3 5703 struct pointer_map_t *vars_map;
fad41cd7 5704 htab_t new_label_map;
50674e96
DN
5705 bool remap_decls_p;
5706};
5707
5708/* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
b357f682
JJ
5709 contained in *TP if it has been ORIG_BLOCK previously and change the
5710 DECL_CONTEXT of every local variable referenced in *TP. */
50674e96
DN
5711
5712static tree
726a989a 5713move_stmt_op (tree *tp, int *walk_subtrees, void *data)
50674e96 5714{
726a989a
RB
5715 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5716 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
fad41cd7 5717 tree t = *tp;
50674e96 5718
726a989a
RB
5719 if (EXPR_P (t))
5720 /* We should never have TREE_BLOCK set on non-statements. */
5721 gcc_assert (!TREE_BLOCK (t));
fad41cd7 5722
917948d3 5723 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
50674e96 5724 {
917948d3
ZD
5725 if (TREE_CODE (t) == SSA_NAME)
5726 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
5727 else if (TREE_CODE (t) == LABEL_DECL)
fad41cd7
RH
5728 {
5729 if (p->new_label_map)
5730 {
5731 struct tree_map in, *out;
fc8600f9 5732 in.base.from = t;
3d9a9f94
KG
5733 out = (struct tree_map *)
5734 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
fad41cd7
RH
5735 if (out)
5736 *tp = t = out->to;
5737 }
50674e96 5738
fad41cd7
RH
5739 DECL_CONTEXT (t) = p->to_context;
5740 }
5741 else if (p->remap_decls_p)
50674e96 5742 {
917948d3
ZD
5743 /* Replace T with its duplicate. T should no longer appear in the
5744 parent function, so this looks wasteful; however, it may appear
5745 in referenced_vars, and more importantly, as virtual operands of
5746 statements, and in alias lists of other variables. It would be
5747 quite difficult to expunge it from all those places. ??? It might
5748 suffice to do this for addressable variables. */
5749 if ((TREE_CODE (t) == VAR_DECL
5750 && !is_global_var (t))
5751 || TREE_CODE (t) == CONST_DECL)
5752 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
5753
5754 if (SSA_VAR_P (t)
5755 && gimple_in_ssa_p (cfun))
fad41cd7 5756 {
917948d3
ZD
5757 push_cfun (DECL_STRUCT_FUNCTION (p->to_context));
5758 add_referenced_var (*tp);
5759 pop_cfun ();
fad41cd7 5760 }
50674e96 5761 }
917948d3 5762 *walk_subtrees = 0;
50674e96 5763 }
fad41cd7
RH
5764 else if (TYPE_P (t))
5765 *walk_subtrees = 0;
50674e96
DN
5766
5767 return NULL_TREE;
5768}
5769
726a989a
RB
5770/* Like move_stmt_op, but for gimple statements.
5771
5772 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
5773 contained in the current statement in *GSI_P and change the
5774 DECL_CONTEXT of every local variable referenced in the current
5775 statement. */
5776
5777static tree
5778move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
5779 struct walk_stmt_info *wi)
5780{
5781 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
5782 gimple stmt = gsi_stmt (*gsi_p);
5783 tree block = gimple_block (stmt);
5784
5785 if (p->orig_block == NULL_TREE
5786 || block == p->orig_block
5787 || block == NULL_TREE)
5788 gimple_set_block (stmt, p->new_block);
5789#ifdef ENABLE_CHECKING
5790 else if (block != p->new_block)
5791 {
5792 while (block && block != p->orig_block)
5793 block = BLOCK_SUPERCONTEXT (block);
5794 gcc_assert (block);
5795 }
5796#endif
5797
5798 if (is_gimple_omp (stmt)
5799 && gimple_code (stmt) != GIMPLE_OMP_RETURN
5800 && gimple_code (stmt) != GIMPLE_OMP_CONTINUE)
5801 {
5802 /* Do not remap variables inside OMP directives. Variables
5803 referenced in clauses and directive header belong to the
5804 parent function and should not be moved into the child
5805 function. */
5806 bool save_remap_decls_p = p->remap_decls_p;
5807 p->remap_decls_p = false;
5808 *handled_ops_p = true;
5809
5810 walk_gimple_seq (gimple_omp_body (stmt), move_stmt_r, move_stmt_op, wi);
5811
5812 p->remap_decls_p = save_remap_decls_p;
5813 }
5814
5815 return NULL_TREE;
5816}
5817
917948d3
ZD
5818/* Marks virtual operands of all statements in basic blocks BBS for
5819 renaming. */
5820
dea61d92
SP
5821void
5822mark_virtual_ops_in_bb (basic_block bb)
917948d3 5823{
726a989a 5824 gimple_stmt_iterator gsi;
dea61d92 5825
726a989a
RB
5826 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5827 mark_virtual_ops_for_renaming (gsi_stmt (gsi));
dea61d92 5828
726a989a
RB
5829 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5830 mark_virtual_ops_for_renaming (gsi_stmt (gsi));
dea61d92
SP
5831}
5832
50674e96
DN
5833/* Move basic block BB from function CFUN to function DEST_FN. The
5834 block is moved out of the original linked list and placed after
5835 block AFTER in the new list. Also, the block is removed from the
5836 original array of blocks and placed in DEST_FN's array of blocks.
5837 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
5838 updated to reflect the moved edges.
6531d1be 5839
917948d3
ZD
5840 The local variables are remapped to new instances, VARS_MAP is used
5841 to record the mapping. */
50674e96
DN
5842
5843static void
5844move_block_to_fn (struct function *dest_cfun, basic_block bb,
5845 basic_block after, bool update_edge_count_p,
b357f682 5846 struct move_stmt_d *d, int eh_offset)
50674e96
DN
5847{
5848 struct control_flow_graph *cfg;
5849 edge_iterator ei;
5850 edge e;
726a989a 5851 gimple_stmt_iterator si;
728b26bb 5852 unsigned old_len, new_len;
50674e96 5853
3722506a
ZD
5854 /* Remove BB from dominance structures. */
5855 delete_from_dominance_info (CDI_DOMINATORS, bb);
5f40b3cb
ZD
5856 if (current_loops)
5857 remove_bb_from_loops (bb);
3722506a 5858
50674e96
DN
5859 /* Link BB to the new linked list. */
5860 move_block_after (bb, after);
5861
5862 /* Update the edge count in the corresponding flowgraphs. */
5863 if (update_edge_count_p)
5864 FOR_EACH_EDGE (e, ei, bb->succs)
5865 {
5866 cfun->cfg->x_n_edges--;
5867 dest_cfun->cfg->x_n_edges++;
5868 }
5869
5870 /* Remove BB from the original basic block array. */
5871 VEC_replace (basic_block, cfun->cfg->x_basic_block_info, bb->index, NULL);
5872 cfun->cfg->x_n_basic_blocks--;
5873
5874 /* Grow DEST_CFUN's basic block array if needed. */
5875 cfg = dest_cfun->cfg;
5876 cfg->x_n_basic_blocks++;
3722506a
ZD
5877 if (bb->index >= cfg->x_last_basic_block)
5878 cfg->x_last_basic_block = bb->index + 1;
50674e96 5879
728b26bb
DN
5880 old_len = VEC_length (basic_block, cfg->x_basic_block_info);
5881 if ((unsigned) cfg->x_last_basic_block >= old_len)
50674e96 5882 {
728b26bb 5883 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
a590ac65
KH
5884 VEC_safe_grow_cleared (basic_block, gc, cfg->x_basic_block_info,
5885 new_len);
50674e96
DN
5886 }
5887
5888 VEC_replace (basic_block, cfg->x_basic_block_info,
e0310afb 5889 bb->index, bb);
50674e96 5890
917948d3 5891 /* Remap the variables in phi nodes. */
726a989a 5892 for (si = gsi_start_phis (bb); !gsi_end_p (si); )
917948d3 5893 {
726a989a 5894 gimple phi = gsi_stmt (si);
917948d3
ZD
5895 use_operand_p use;
5896 tree op = PHI_RESULT (phi);
5897 ssa_op_iter oi;
5898
5899 if (!is_gimple_reg (op))
5f40b3cb
ZD
5900 {
5901 /* Remove the phi nodes for virtual operands (alias analysis will be
5902 run for the new function, anyway). */
726a989a 5903 remove_phi_node (&si, true);
5f40b3cb
ZD
5904 continue;
5905 }
917948d3 5906
b357f682
JJ
5907 SET_PHI_RESULT (phi,
5908 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
917948d3
ZD
5909 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
5910 {
5911 op = USE_FROM_PTR (use);
5912 if (TREE_CODE (op) == SSA_NAME)
b357f682 5913 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
917948d3 5914 }
726a989a
RB
5915
5916 gsi_next (&si);
917948d3
ZD
5917 }
5918
726a989a 5919 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
50674e96 5920 {
726a989a 5921 gimple stmt = gsi_stmt (si);
fad41cd7 5922 int region;
726a989a 5923 struct walk_stmt_info wi;
50674e96 5924
726a989a
RB
5925 memset (&wi, 0, sizeof (wi));
5926 wi.info = d;
5927 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
50674e96 5928
726a989a 5929 if (gimple_code (stmt) == GIMPLE_LABEL)
50674e96 5930 {
726a989a 5931 tree label = gimple_label_label (stmt);
50674e96
DN
5932 int uid = LABEL_DECL_UID (label);
5933
5934 gcc_assert (uid > -1);
5935
5936 old_len = VEC_length (basic_block, cfg->x_label_to_block_map);
5937 if (old_len <= (unsigned) uid)
5938 {
5006671f 5939 new_len = 3 * uid / 2 + 1;
a590ac65
KH
5940 VEC_safe_grow_cleared (basic_block, gc,
5941 cfg->x_label_to_block_map, new_len);
50674e96
DN
5942 }
5943
5944 VEC_replace (basic_block, cfg->x_label_to_block_map, uid, bb);
5945 VEC_replace (basic_block, cfun->cfg->x_label_to_block_map, uid, NULL);
5946
5947 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
5948
cb91fab0
JH
5949 if (uid >= dest_cfun->cfg->last_label_uid)
5950 dest_cfun->cfg->last_label_uid = uid + 1;
50674e96 5951 }
726a989a
RB
5952 else if (gimple_code (stmt) == GIMPLE_RESX && eh_offset != 0)
5953 gimple_resx_set_region (stmt, gimple_resx_region (stmt) + eh_offset);
fad41cd7
RH
5954
5955 region = lookup_stmt_eh_region (stmt);
5956 if (region >= 0)
5957 {
5958 add_stmt_to_eh_region_fn (dest_cfun, stmt, region + eh_offset);
5959 remove_stmt_from_eh_region (stmt);
6946b3f7
JH
5960 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
5961 gimple_remove_stmt_histograms (cfun, stmt);
fad41cd7 5962 }
917948d3 5963
5f40b3cb
ZD
5964 /* We cannot leave any operands allocated from the operand caches of
5965 the current function. */
5966 free_stmt_operands (stmt);
5967 push_cfun (dest_cfun);
917948d3 5968 update_stmt (stmt);
5f40b3cb 5969 pop_cfun ();
fad41cd7 5970 }
7241571e
JJ
5971
5972 FOR_EACH_EDGE (e, ei, bb->succs)
5973 if (e->goto_locus)
5974 {
5975 tree block = e->goto_block;
5976 if (d->orig_block == NULL_TREE
5977 || block == d->orig_block)
5978 e->goto_block = d->new_block;
5979#ifdef ENABLE_CHECKING
5980 else if (block != d->new_block)
5981 {
5982 while (block && block != d->orig_block)
5983 block = BLOCK_SUPERCONTEXT (block);
5984 gcc_assert (block);
5985 }
5986#endif
5987 }
fad41cd7
RH
5988}
5989
5990/* Examine the statements in BB (which is in SRC_CFUN); find and return
5991 the outermost EH region. Use REGION as the incoming base EH region. */
5992
5993static int
5994find_outermost_region_in_block (struct function *src_cfun,
5995 basic_block bb, int region)
5996{
726a989a 5997 gimple_stmt_iterator si;
6531d1be 5998
726a989a 5999 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
fad41cd7 6000 {
726a989a 6001 gimple stmt = gsi_stmt (si);
fad41cd7 6002 int stmt_region;
1799e5d5 6003
726a989a
RB
6004 if (gimple_code (stmt) == GIMPLE_RESX)
6005 stmt_region = gimple_resx_region (stmt);
07ed51c9
JJ
6006 else
6007 stmt_region = lookup_stmt_eh_region_fn (src_cfun, stmt);
7e2df4a1
JJ
6008 if (stmt_region > 0)
6009 {
6010 if (region < 0)
6011 region = stmt_region;
6012 else if (stmt_region != region)
6013 {
6014 region = eh_region_outermost (src_cfun, stmt_region, region);
6015 gcc_assert (region != -1);
6016 }
6017 }
50674e96 6018 }
fad41cd7
RH
6019
6020 return region;
50674e96
DN
6021}
6022
fad41cd7
RH
6023static tree
6024new_label_mapper (tree decl, void *data)
6025{
6026 htab_t hash = (htab_t) data;
6027 struct tree_map *m;
6028 void **slot;
6029
6030 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
6031
3d9a9f94 6032 m = XNEW (struct tree_map);
fad41cd7 6033 m->hash = DECL_UID (decl);
fc8600f9 6034 m->base.from = decl;
fad41cd7
RH
6035 m->to = create_artificial_label ();
6036 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
cb91fab0
JH
6037 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
6038 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
fad41cd7
RH
6039
6040 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
6041 gcc_assert (*slot == NULL);
6042
6043 *slot = m;
6044
6045 return m->to;
6046}
50674e96 6047
b357f682
JJ
6048/* Change DECL_CONTEXT of all BLOCK_VARS in block, including
6049 subblocks. */
6050
6051static void
6052replace_block_vars_by_duplicates (tree block, struct pointer_map_t *vars_map,
6053 tree to_context)
6054{
6055 tree *tp, t;
6056
6057 for (tp = &BLOCK_VARS (block); *tp; tp = &TREE_CHAIN (*tp))
6058 {
6059 t = *tp;
e1e2bac4
JJ
6060 if (TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != CONST_DECL)
6061 continue;
b357f682
JJ
6062 replace_by_duplicate_decl (&t, vars_map, to_context);
6063 if (t != *tp)
6064 {
6065 if (TREE_CODE (*tp) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (*tp))
6066 {
6067 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (*tp));
6068 DECL_HAS_VALUE_EXPR_P (t) = 1;
6069 }
6070 TREE_CHAIN (t) = TREE_CHAIN (*tp);
6071 *tp = t;
6072 }
6073 }
6074
6075 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
6076 replace_block_vars_by_duplicates (block, vars_map, to_context);
6077}
6078
50674e96
DN
6079/* Move a single-entry, single-exit region delimited by ENTRY_BB and
6080 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
6081 single basic block in the original CFG and the new basic block is
6082 returned. DEST_CFUN must not have a CFG yet.
6083
6084 Note that the region need not be a pure SESE region. Blocks inside
6085 the region may contain calls to abort/exit. The only restriction
6086 is that ENTRY_BB should be the only entry point and it must
6087 dominate EXIT_BB.
6088
b357f682
JJ
6089 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
6090 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
6091 to the new function.
6092
50674e96
DN
6093 All local variables referenced in the region are assumed to be in
6094 the corresponding BLOCK_VARS and unexpanded variable lists
6095 associated with DEST_CFUN. */
6096
6097basic_block
6098move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
b357f682 6099 basic_block exit_bb, tree orig_block)
50674e96 6100{
917948d3
ZD
6101 VEC(basic_block,heap) *bbs, *dom_bbs;
6102 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
6103 basic_block after, bb, *entry_pred, *exit_succ, abb;
6104 struct function *saved_cfun = cfun;
fad41cd7 6105 int *entry_flag, *exit_flag, eh_offset;
917948d3 6106 unsigned *entry_prob, *exit_prob;
50674e96
DN
6107 unsigned i, num_entry_edges, num_exit_edges;
6108 edge e;
6109 edge_iterator ei;
fad41cd7 6110 htab_t new_label_map;
917948d3 6111 struct pointer_map_t *vars_map;
5f40b3cb 6112 struct loop *loop = entry_bb->loop_father;
b357f682 6113 struct move_stmt_d d;
50674e96
DN
6114
6115 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
6116 region. */
6117 gcc_assert (entry_bb != exit_bb
2aee3e57
JJ
6118 && (!exit_bb
6119 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
50674e96 6120
917948d3
ZD
6121 /* Collect all the blocks in the region. Manually add ENTRY_BB
6122 because it won't be added by dfs_enumerate_from. */
50674e96
DN
6123 bbs = NULL;
6124 VEC_safe_push (basic_block, heap, bbs, entry_bb);
6125 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
6126
917948d3
ZD
6127 /* The blocks that used to be dominated by something in BBS will now be
6128 dominated by the new block. */
6129 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
6130 VEC_address (basic_block, bbs),
6131 VEC_length (basic_block, bbs));
6132
50674e96
DN
6133 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
6134 the predecessor edges to ENTRY_BB and the successor edges to
6135 EXIT_BB so that we can re-attach them to the new basic block that
6136 will replace the region. */
6137 num_entry_edges = EDGE_COUNT (entry_bb->preds);
6138 entry_pred = (basic_block *) xcalloc (num_entry_edges, sizeof (basic_block));
6139 entry_flag = (int *) xcalloc (num_entry_edges, sizeof (int));
917948d3 6140 entry_prob = XNEWVEC (unsigned, num_entry_edges);
50674e96
DN
6141 i = 0;
6142 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
6143 {
917948d3 6144 entry_prob[i] = e->probability;
50674e96
DN
6145 entry_flag[i] = e->flags;
6146 entry_pred[i++] = e->src;
6147 remove_edge (e);
6148 }
6149
2aee3e57 6150 if (exit_bb)
50674e96 6151 {
2aee3e57
JJ
6152 num_exit_edges = EDGE_COUNT (exit_bb->succs);
6153 exit_succ = (basic_block *) xcalloc (num_exit_edges,
6154 sizeof (basic_block));
6155 exit_flag = (int *) xcalloc (num_exit_edges, sizeof (int));
917948d3 6156 exit_prob = XNEWVEC (unsigned, num_exit_edges);
2aee3e57
JJ
6157 i = 0;
6158 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
6159 {
917948d3 6160 exit_prob[i] = e->probability;
2aee3e57
JJ
6161 exit_flag[i] = e->flags;
6162 exit_succ[i++] = e->dest;
6163 remove_edge (e);
6164 }
6165 }
6166 else
6167 {
6168 num_exit_edges = 0;
6169 exit_succ = NULL;
6170 exit_flag = NULL;
917948d3 6171 exit_prob = NULL;
50674e96
DN
6172 }
6173
6174 /* Switch context to the child function to initialize DEST_FN's CFG. */
6175 gcc_assert (dest_cfun->cfg == NULL);
917948d3 6176 push_cfun (dest_cfun);
fad41cd7 6177
50674e96 6178 init_empty_tree_cfg ();
fad41cd7
RH
6179
6180 /* Initialize EH information for the new function. */
6181 eh_offset = 0;
6182 new_label_map = NULL;
6183 if (saved_cfun->eh)
6184 {
6185 int region = -1;
6186
6187 for (i = 0; VEC_iterate (basic_block, bbs, i, bb); i++)
6188 region = find_outermost_region_in_block (saved_cfun, bb, region);
6189
6190 init_eh_for_function ();
6191 if (region != -1)
6192 {
6193 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
6194 eh_offset = duplicate_eh_regions (saved_cfun, new_label_mapper,
6195 new_label_map, region, 0);
6196 }
6197 }
6198
917948d3
ZD
6199 pop_cfun ();
6200
50674e96
DN
6201 /* Move blocks from BBS into DEST_CFUN. */
6202 gcc_assert (VEC_length (basic_block, bbs) >= 2);
6203 after = dest_cfun->cfg->x_entry_block_ptr;
917948d3 6204 vars_map = pointer_map_create ();
b357f682
JJ
6205
6206 memset (&d, 0, sizeof (d));
6207 d.vars_map = vars_map;
6208 d.from_context = cfun->decl;
6209 d.to_context = dest_cfun->decl;
6210 d.new_label_map = new_label_map;
6211 d.remap_decls_p = true;
6212 d.orig_block = orig_block;
6213 d.new_block = DECL_INITIAL (dest_cfun->decl);
6214
50674e96
DN
6215 for (i = 0; VEC_iterate (basic_block, bbs, i, bb); i++)
6216 {
6217 /* No need to update edge counts on the last block. It has
6218 already been updated earlier when we detached the region from
6219 the original CFG. */
b357f682 6220 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d, eh_offset);
50674e96
DN
6221 after = bb;
6222 }
6223
b357f682
JJ
6224 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
6225 if (orig_block)
6226 {
6227 tree block;
6228 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
6229 == NULL_TREE);
6230 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
6231 = BLOCK_SUBBLOCKS (orig_block);
6232 for (block = BLOCK_SUBBLOCKS (orig_block);
6233 block; block = BLOCK_CHAIN (block))
6234 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
6235 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
6236 }
6237
6238 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
6239 vars_map, dest_cfun->decl);
6240
fad41cd7
RH
6241 if (new_label_map)
6242 htab_delete (new_label_map);
917948d3 6243 pointer_map_destroy (vars_map);
50674e96
DN
6244
6245 /* Rewire the entry and exit blocks. The successor to the entry
6246 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
6247 the child function. Similarly, the predecessor of DEST_FN's
6248 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
6249 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
6250 various CFG manipulation function get to the right CFG.
6251
6252 FIXME, this is silly. The CFG ought to become a parameter to
6253 these helpers. */
917948d3 6254 push_cfun (dest_cfun);
50674e96 6255 make_edge (ENTRY_BLOCK_PTR, entry_bb, EDGE_FALLTHRU);
2aee3e57
JJ
6256 if (exit_bb)
6257 make_edge (exit_bb, EXIT_BLOCK_PTR, 0);
917948d3 6258 pop_cfun ();
50674e96
DN
6259
6260 /* Back in the original function, the SESE region has disappeared,
6261 create a new basic block in its place. */
6262 bb = create_empty_bb (entry_pred[0]);
5f40b3cb
ZD
6263 if (current_loops)
6264 add_bb_to_loop (bb, loop);
50674e96 6265 for (i = 0; i < num_entry_edges; i++)
917948d3
ZD
6266 {
6267 e = make_edge (entry_pred[i], bb, entry_flag[i]);
6268 e->probability = entry_prob[i];
6269 }
50674e96
DN
6270
6271 for (i = 0; i < num_exit_edges; i++)
917948d3
ZD
6272 {
6273 e = make_edge (bb, exit_succ[i], exit_flag[i]);
6274 e->probability = exit_prob[i];
6275 }
6276
6277 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
6278 for (i = 0; VEC_iterate (basic_block, dom_bbs, i, abb); i++)
6279 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
6280 VEC_free (basic_block, heap, dom_bbs);
50674e96 6281
2aee3e57
JJ
6282 if (exit_bb)
6283 {
917948d3 6284 free (exit_prob);
2aee3e57
JJ
6285 free (exit_flag);
6286 free (exit_succ);
6287 }
917948d3 6288 free (entry_prob);
50674e96
DN
6289 free (entry_flag);
6290 free (entry_pred);
50674e96
DN
6291 VEC_free (basic_block, heap, bbs);
6292
6293 return bb;
6294}
6295
84d65814 6296
726a989a
RB
6297/* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in tree-pass.h)
6298 */
6de9cd9a
DN
6299
6300void
6301dump_function_to_file (tree fn, FILE *file, int flags)
6302{
6303 tree arg, vars, var;
459ffad3 6304 struct function *dsf;
6de9cd9a
DN
6305 bool ignore_topmost_bind = false, any_var = false;
6306 basic_block bb;
6307 tree chain;
6531d1be 6308
673fda6b 6309 fprintf (file, "%s (", lang_hooks.decl_printable_name (fn, 2));
6de9cd9a
DN
6310
6311 arg = DECL_ARGUMENTS (fn);
6312 while (arg)
6313 {
2f9ea521
RG
6314 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
6315 fprintf (file, " ");
6de9cd9a 6316 print_generic_expr (file, arg, dump_flags);
3e894af1
KZ
6317 if (flags & TDF_VERBOSE)
6318 print_node (file, "", arg, 4);
6de9cd9a
DN
6319 if (TREE_CHAIN (arg))
6320 fprintf (file, ", ");
6321 arg = TREE_CHAIN (arg);
6322 }
6323 fprintf (file, ")\n");
6324
3e894af1
KZ
6325 if (flags & TDF_VERBOSE)
6326 print_node (file, "", fn, 2);
6327
459ffad3
EB
6328 dsf = DECL_STRUCT_FUNCTION (fn);
6329 if (dsf && (flags & TDF_DETAILS))
6330 dump_eh_tree (file, dsf);
6331
39ecc018 6332 if (flags & TDF_RAW && !gimple_has_body_p (fn))
6de9cd9a
DN
6333 {
6334 dump_node (fn, TDF_SLIM | flags, file);
6335 return;
6336 }
6337
953ff289 6338 /* Switch CFUN to point to FN. */
db2960f4 6339 push_cfun (DECL_STRUCT_FUNCTION (fn));
953ff289 6340
6de9cd9a
DN
6341 /* When GIMPLE is lowered, the variables are no longer available in
6342 BIND_EXPRs, so display them separately. */
cb91fab0 6343 if (cfun && cfun->decl == fn && cfun->local_decls)
6de9cd9a
DN
6344 {
6345 ignore_topmost_bind = true;
6346
6347 fprintf (file, "{\n");
cb91fab0 6348 for (vars = cfun->local_decls; vars; vars = TREE_CHAIN (vars))
6de9cd9a
DN
6349 {
6350 var = TREE_VALUE (vars);
6351
6352 print_generic_decl (file, var, flags);
3e894af1
KZ
6353 if (flags & TDF_VERBOSE)
6354 print_node (file, "", var, 4);
6de9cd9a
DN
6355 fprintf (file, "\n");
6356
6357 any_var = true;
6358 }
6359 }
6360
32a87d45 6361 if (cfun && cfun->decl == fn && cfun->cfg && basic_block_info)
6de9cd9a 6362 {
726a989a 6363 /* If the CFG has been built, emit a CFG-based dump. */
878f99d2 6364 check_bb_profile (ENTRY_BLOCK_PTR, file);
6de9cd9a
DN
6365 if (!ignore_topmost_bind)
6366 fprintf (file, "{\n");
6367
6368 if (any_var && n_basic_blocks)
6369 fprintf (file, "\n");
6370
6371 FOR_EACH_BB (bb)
726a989a 6372 gimple_dump_bb (bb, file, 2, flags);
6531d1be 6373
6de9cd9a 6374 fprintf (file, "}\n");
878f99d2 6375 check_bb_profile (EXIT_BLOCK_PTR, file);
6de9cd9a 6376 }
726a989a
RB
6377 else if (DECL_SAVED_TREE (fn) == NULL)
6378 {
6379 /* The function is now in GIMPLE form but the CFG has not been
6380 built yet. Emit the single sequence of GIMPLE statements
6381 that make up its body. */
6382 gimple_seq body = gimple_body (fn);
6383
6384 if (gimple_seq_first_stmt (body)
6385 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
6386 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
6387 print_gimple_seq (file, body, 0, flags);
6388 else
6389 {
6390 if (!ignore_topmost_bind)
6391 fprintf (file, "{\n");
6392
6393 if (any_var)
6394 fprintf (file, "\n");
6395
6396 print_gimple_seq (file, body, 2, flags);
6397 fprintf (file, "}\n");
6398 }
6399 }
6de9cd9a
DN
6400 else
6401 {
6402 int indent;
6403
6404 /* Make a tree based dump. */
6405 chain = DECL_SAVED_TREE (fn);
6406
953ff289 6407 if (chain && TREE_CODE (chain) == BIND_EXPR)
6de9cd9a
DN
6408 {
6409 if (ignore_topmost_bind)
6410 {
6411 chain = BIND_EXPR_BODY (chain);
6412 indent = 2;
6413 }
6414 else
6415 indent = 0;
6416 }
6417 else
6418 {
6419 if (!ignore_topmost_bind)
6420 fprintf (file, "{\n");
6421 indent = 2;
6422 }
6423
6424 if (any_var)
6425 fprintf (file, "\n");
6426
6427 print_generic_stmt_indented (file, chain, flags, indent);
6428 if (ignore_topmost_bind)
6429 fprintf (file, "}\n");
6430 }
6431
6432 fprintf (file, "\n\n");
953ff289
DN
6433
6434 /* Restore CFUN. */
db2960f4 6435 pop_cfun ();
953ff289
DN
6436}
6437
6438
6439/* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
6440
6441void
6442debug_function (tree fn, int flags)
6443{
6444 dump_function_to_file (fn, stderr, flags);
6de9cd9a
DN
6445}
6446
6447
d7770457 6448/* Print on FILE the indexes for the predecessors of basic_block BB. */
6de9cd9a
DN
6449
6450static void
628f6a4e 6451print_pred_bbs (FILE *file, basic_block bb)
6de9cd9a 6452{
628f6a4e
BE
6453 edge e;
6454 edge_iterator ei;
6455
6456 FOR_EACH_EDGE (e, ei, bb->preds)
d7770457 6457 fprintf (file, "bb_%d ", e->src->index);
6de9cd9a
DN
6458}
6459
6460
d7770457 6461/* Print on FILE the indexes for the successors of basic_block BB. */
6de9cd9a
DN
6462
6463static void
628f6a4e 6464print_succ_bbs (FILE *file, basic_block bb)
6de9cd9a 6465{
628f6a4e
BE
6466 edge e;
6467 edge_iterator ei;
6468
6469 FOR_EACH_EDGE (e, ei, bb->succs)
d7770457 6470 fprintf (file, "bb_%d ", e->dest->index);
6de9cd9a
DN
6471}
6472
0c8efed8
SP
6473/* Print to FILE the basic block BB following the VERBOSITY level. */
6474
6475void
6476print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
6477{
6478 char *s_indent = (char *) alloca ((size_t) indent + 1);
6479 memset ((void *) s_indent, ' ', (size_t) indent);
6480 s_indent[indent] = '\0';
6481
6482 /* Print basic_block's header. */
6483 if (verbosity >= 2)
6484 {
6485 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
6486 print_pred_bbs (file, bb);
6487 fprintf (file, "}, succs = {");
6488 print_succ_bbs (file, bb);
6489 fprintf (file, "})\n");
6490 }
6491
6492 /* Print basic_block's body. */
6493 if (verbosity >= 3)
6494 {
6495 fprintf (file, "%s {\n", s_indent);
726a989a 6496 gimple_dump_bb (bb, file, indent + 4, TDF_VOPS|TDF_MEMSYMS);
0c8efed8
SP
6497 fprintf (file, "%s }\n", s_indent);
6498 }
6499}
6500
6501static void print_loop_and_siblings (FILE *, struct loop *, int, int);
6de9cd9a 6502
0c8efed8
SP
6503/* Pretty print LOOP on FILE, indented INDENT spaces. Following
6504 VERBOSITY level this outputs the contents of the loop, or just its
6505 structure. */
6de9cd9a
DN
6506
6507static void
0c8efed8 6508print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
6de9cd9a
DN
6509{
6510 char *s_indent;
6511 basic_block bb;
6531d1be 6512
6de9cd9a
DN
6513 if (loop == NULL)
6514 return;
6515
6516 s_indent = (char *) alloca ((size_t) indent + 1);
6517 memset ((void *) s_indent, ' ', (size_t) indent);
6518 s_indent[indent] = '\0';
6519
0c8efed8
SP
6520 /* Print loop's header. */
6521 fprintf (file, "%sloop_%d (header = %d, latch = %d", s_indent,
6522 loop->num, loop->header->index, loop->latch->index);
6523 fprintf (file, ", niter = ");
6524 print_generic_expr (file, loop->nb_iterations, 0);
6531d1be 6525
0c8efed8
SP
6526 if (loop->any_upper_bound)
6527 {
6528 fprintf (file, ", upper_bound = ");
6529 dump_double_int (file, loop->nb_iterations_upper_bound, true);
6530 }
6531d1be 6531
0c8efed8
SP
6532 if (loop->any_estimate)
6533 {
6534 fprintf (file, ", estimate = ");
6535 dump_double_int (file, loop->nb_iterations_estimate, true);
6536 }
6537 fprintf (file, ")\n");
6538
6539 /* Print loop's body. */
6540 if (verbosity >= 1)
6541 {
6542 fprintf (file, "%s{\n", s_indent);
6543 FOR_EACH_BB (bb)
6544 if (bb->loop_father == loop)
6545 print_loops_bb (file, bb, indent, verbosity);
6546
6547 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
6548 fprintf (file, "%s}\n", s_indent);
6549 }
6de9cd9a
DN
6550}
6551
0c8efed8
SP
6552/* Print the LOOP and its sibling loops on FILE, indented INDENT
6553 spaces. Following VERBOSITY level this outputs the contents of the
6554 loop, or just its structure. */
6555
6556static void
6557print_loop_and_siblings (FILE *file, struct loop *loop, int indent, int verbosity)
6558{
6559 if (loop == NULL)
6560 return;
6561
6562 print_loop (file, loop, indent, verbosity);
6563 print_loop_and_siblings (file, loop->next, indent, verbosity);
6564}
6de9cd9a
DN
6565
6566/* Follow a CFG edge from the entry point of the program, and on entry
6567 of a loop, pretty print the loop structure on FILE. */
6568
6531d1be 6569void
0c8efed8 6570print_loops (FILE *file, int verbosity)
6de9cd9a
DN
6571{
6572 basic_block bb;
6531d1be 6573
f8bf9252 6574 bb = ENTRY_BLOCK_PTR;
6de9cd9a 6575 if (bb && bb->loop_father)
0c8efed8 6576 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
6de9cd9a
DN
6577}
6578
6579
0c8efed8
SP
6580/* Debugging loops structure at tree level, at some VERBOSITY level. */
6581
6582void
6583debug_loops (int verbosity)
6584{
6585 print_loops (stderr, verbosity);
6586}
6587
6588/* Print on stderr the code of LOOP, at some VERBOSITY level. */
6de9cd9a 6589
6531d1be 6590void
0c8efed8 6591debug_loop (struct loop *loop, int verbosity)
6de9cd9a 6592{
0c8efed8 6593 print_loop (stderr, loop, 0, verbosity);
6de9cd9a
DN
6594}
6595
0c8efed8
SP
6596/* Print on stderr the code of loop number NUM, at some VERBOSITY
6597 level. */
6598
6599void
6600debug_loop_num (unsigned num, int verbosity)
6601{
6602 debug_loop (get_loop (num), verbosity);
6603}
6de9cd9a
DN
6604
6605/* Return true if BB ends with a call, possibly followed by some
6606 instructions that must stay with the call. Return false,
6607 otherwise. */
6608
6609static bool
726a989a 6610gimple_block_ends_with_call_p (basic_block bb)
6de9cd9a 6611{
726a989a
RB
6612 gimple_stmt_iterator gsi = gsi_last_bb (bb);
6613 return is_gimple_call (gsi_stmt (gsi));
6de9cd9a
DN
6614}
6615
6616
6617/* Return true if BB ends with a conditional branch. Return false,
6618 otherwise. */
6619
6620static bool
726a989a 6621gimple_block_ends_with_condjump_p (const_basic_block bb)
6de9cd9a 6622{
726a989a
RB
6623 gimple stmt = last_stmt (CONST_CAST_BB (bb));
6624 return (stmt && gimple_code (stmt) == GIMPLE_COND);
6de9cd9a
DN
6625}
6626
6627
6628/* Return true if we need to add fake edge to exit at statement T.
726a989a 6629 Helper function for gimple_flow_call_edges_add. */
6de9cd9a
DN
6630
6631static bool
726a989a 6632need_fake_edge_p (gimple t)
6de9cd9a 6633{
726a989a
RB
6634 tree fndecl = NULL_TREE;
6635 int call_flags = 0;
6de9cd9a
DN
6636
6637 /* NORETURN and LONGJMP calls already have an edge to exit.
321cf1f2 6638 CONST and PURE calls do not need one.
6de9cd9a
DN
6639 We don't currently check for CONST and PURE here, although
6640 it would be a good idea, because those attributes are
6641 figured out from the RTL in mark_constant_function, and
6642 the counter incrementation code from -fprofile-arcs
6643 leads to different results from -fbranch-probabilities. */
726a989a 6644 if (is_gimple_call (t))
23ef6d21 6645 {
726a989a
RB
6646 fndecl = gimple_call_fndecl (t);
6647 call_flags = gimple_call_flags (t);
23ef6d21
BE
6648 }
6649
726a989a
RB
6650 if (is_gimple_call (t)
6651 && fndecl
6652 && DECL_BUILT_IN (fndecl)
23ef6d21 6653 && (call_flags & ECF_NOTHROW)
3cfa762b
RG
6654 && !(call_flags & ECF_RETURNS_TWICE)
6655 /* fork() doesn't really return twice, but the effect of
6656 wrapping it in __gcov_fork() which calls __gcov_flush()
6657 and clears the counters before forking has the same
6658 effect as returning twice. Force a fake edge. */
6659 && !(DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
6660 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FORK))
6661 return false;
23ef6d21 6662
726a989a
RB
6663 if (is_gimple_call (t)
6664 && !(call_flags & ECF_NORETURN))
6de9cd9a
DN
6665 return true;
6666
e0c68ce9 6667 if (gimple_code (t) == GIMPLE_ASM
726a989a 6668 && (gimple_asm_volatile_p (t) || gimple_asm_input_p (t)))
6de9cd9a
DN
6669 return true;
6670
6671 return false;
6672}
6673
6674
6675/* Add fake edges to the function exit for any non constant and non
6676 noreturn calls, volatile inline assembly in the bitmap of blocks
6677 specified by BLOCKS or to the whole CFG if BLOCKS is zero. Return
6678 the number of blocks that were split.
6679
6680 The goal is to expose cases in which entering a basic block does
6681 not imply that all subsequent instructions must be executed. */
6682
6683static int
726a989a 6684gimple_flow_call_edges_add (sbitmap blocks)
6de9cd9a
DN
6685{
6686 int i;
6687 int blocks_split = 0;
6688 int last_bb = last_basic_block;
6689 bool check_last_block = false;
6690
24bd1a0b 6691 if (n_basic_blocks == NUM_FIXED_BLOCKS)
6de9cd9a
DN
6692 return 0;
6693
6694 if (! blocks)
6695 check_last_block = true;
6696 else
6697 check_last_block = TEST_BIT (blocks, EXIT_BLOCK_PTR->prev_bb->index);
6698
6699 /* In the last basic block, before epilogue generation, there will be
6700 a fallthru edge to EXIT. Special care is required if the last insn
6701 of the last basic block is a call because make_edge folds duplicate
6702 edges, which would result in the fallthru edge also being marked
6703 fake, which would result in the fallthru edge being removed by
6704 remove_fake_edges, which would result in an invalid CFG.
6705
6706 Moreover, we can't elide the outgoing fake edge, since the block
6707 profiler needs to take this into account in order to solve the minimal
6708 spanning tree in the case that the call doesn't return.
6709
6710 Handle this by adding a dummy instruction in a new last basic block. */
6711 if (check_last_block)
6712 {
6713 basic_block bb = EXIT_BLOCK_PTR->prev_bb;
726a989a
RB
6714 gimple_stmt_iterator gsi = gsi_last_bb (bb);
6715 gimple t = NULL;
6716
6717 if (!gsi_end_p (gsi))
6718 t = gsi_stmt (gsi);
6de9cd9a 6719
6a60530d 6720 if (t && need_fake_edge_p (t))
6de9cd9a
DN
6721 {
6722 edge e;
6723
9ff3d2de
JL
6724 e = find_edge (bb, EXIT_BLOCK_PTR);
6725 if (e)
6726 {
726a989a
RB
6727 gsi_insert_on_edge (e, gimple_build_nop ());
6728 gsi_commit_edge_inserts ();
9ff3d2de 6729 }
6de9cd9a
DN
6730 }
6731 }
6732
6733 /* Now add fake edges to the function exit for any non constant
6734 calls since there is no way that we can determine if they will
6735 return or not... */
6736 for (i = 0; i < last_bb; i++)
6737 {
6738 basic_block bb = BASIC_BLOCK (i);
726a989a
RB
6739 gimple_stmt_iterator gsi;
6740 gimple stmt, last_stmt;
6de9cd9a
DN
6741
6742 if (!bb)
6743 continue;
6744
6745 if (blocks && !TEST_BIT (blocks, i))
6746 continue;
6747
726a989a
RB
6748 gsi = gsi_last_bb (bb);
6749 if (!gsi_end_p (gsi))
6de9cd9a 6750 {
726a989a 6751 last_stmt = gsi_stmt (gsi);
6de9cd9a
DN
6752 do
6753 {
726a989a 6754 stmt = gsi_stmt (gsi);
6de9cd9a
DN
6755 if (need_fake_edge_p (stmt))
6756 {
6757 edge e;
726a989a 6758
6de9cd9a
DN
6759 /* The handling above of the final block before the
6760 epilogue should be enough to verify that there is
6761 no edge to the exit block in CFG already.
6762 Calling make_edge in such case would cause us to
6763 mark that edge as fake and remove it later. */
6764#ifdef ENABLE_CHECKING
6765 if (stmt == last_stmt)
628f6a4e 6766 {
9ff3d2de
JL
6767 e = find_edge (bb, EXIT_BLOCK_PTR);
6768 gcc_assert (e == NULL);
628f6a4e 6769 }
6de9cd9a
DN
6770#endif
6771
6772 /* Note that the following may create a new basic block
6773 and renumber the existing basic blocks. */
6774 if (stmt != last_stmt)
6775 {
6776 e = split_block (bb, stmt);
6777 if (e)
6778 blocks_split++;
6779 }
6780 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
6781 }
726a989a 6782 gsi_prev (&gsi);
6de9cd9a 6783 }
726a989a 6784 while (!gsi_end_p (gsi));
6de9cd9a
DN
6785 }
6786 }
6787
6788 if (blocks_split)
6789 verify_flow_info ();
6790
6791 return blocks_split;
6792}
6793
4f6c2131
EB
6794/* Purge dead abnormal call edges from basic block BB. */
6795
6796bool
726a989a 6797gimple_purge_dead_abnormal_call_edges (basic_block bb)
4f6c2131 6798{
726a989a 6799 bool changed = gimple_purge_dead_eh_edges (bb);
4f6c2131 6800
e3b5732b 6801 if (cfun->has_nonlocal_label)
4f6c2131 6802 {
726a989a 6803 gimple stmt = last_stmt (bb);
4f6c2131
EB
6804 edge_iterator ei;
6805 edge e;
6806
726a989a 6807 if (!(stmt && stmt_can_make_abnormal_goto (stmt)))
4f6c2131
EB
6808 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6809 {
6810 if (e->flags & EDGE_ABNORMAL)
6811 {
6812 remove_edge (e);
6813 changed = true;
6814 }
6815 else
6816 ei_next (&ei);
6817 }
6818
726a989a 6819 /* See gimple_purge_dead_eh_edges below. */
4f6c2131
EB
6820 if (changed)
6821 free_dominance_info (CDI_DOMINATORS);
6822 }
6823
6824 return changed;
6825}
6826
672987e8
ZD
6827/* Removes edge E and all the blocks dominated by it, and updates dominance
6828 information. The IL in E->src needs to be updated separately.
6829 If dominance info is not available, only the edge E is removed.*/
6830
6831void
6832remove_edge_and_dominated_blocks (edge e)
6833{
6834 VEC (basic_block, heap) *bbs_to_remove = NULL;
6835 VEC (basic_block, heap) *bbs_to_fix_dom = NULL;
6836 bitmap df, df_idom;
6837 edge f;
6838 edge_iterator ei;
6839 bool none_removed = false;
6840 unsigned i;
6841 basic_block bb, dbb;
6842 bitmap_iterator bi;
6843
2b28c07a 6844 if (!dom_info_available_p (CDI_DOMINATORS))
672987e8
ZD
6845 {
6846 remove_edge (e);
6847 return;
6848 }
6849
6850 /* No updating is needed for edges to exit. */
6851 if (e->dest == EXIT_BLOCK_PTR)
6852 {
6853 if (cfgcleanup_altered_bbs)
6854 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
6855 remove_edge (e);
6856 return;
6857 }
6858
6859 /* First, we find the basic blocks to remove. If E->dest has a predecessor
6860 that is not dominated by E->dest, then this set is empty. Otherwise,
6861 all the basic blocks dominated by E->dest are removed.
6862
6863 Also, to DF_IDOM we store the immediate dominators of the blocks in
6864 the dominance frontier of E (i.e., of the successors of the
6865 removed blocks, if there are any, and of E->dest otherwise). */
6866 FOR_EACH_EDGE (f, ei, e->dest->preds)
6867 {
6868 if (f == e)
6869 continue;
6870
6871 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
6872 {
6873 none_removed = true;
6874 break;
6875 }
6876 }
6877
6878 df = BITMAP_ALLOC (NULL);
6879 df_idom = BITMAP_ALLOC (NULL);
6880
6881 if (none_removed)
6882 bitmap_set_bit (df_idom,
6883 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
6884 else
6885 {
438c239d 6886 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
672987e8
ZD
6887 for (i = 0; VEC_iterate (basic_block, bbs_to_remove, i, bb); i++)
6888 {
6889 FOR_EACH_EDGE (f, ei, bb->succs)
6890 {
6891 if (f->dest != EXIT_BLOCK_PTR)
6892 bitmap_set_bit (df, f->dest->index);
6893 }
6894 }
6895 for (i = 0; VEC_iterate (basic_block, bbs_to_remove, i, bb); i++)
6896 bitmap_clear_bit (df, bb->index);
6897
6898 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
6899 {
6900 bb = BASIC_BLOCK (i);
6901 bitmap_set_bit (df_idom,
6902 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
6903 }
6904 }
6905
6906 if (cfgcleanup_altered_bbs)
6907 {
6908 /* Record the set of the altered basic blocks. */
6909 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
6910 bitmap_ior_into (cfgcleanup_altered_bbs, df);
6911 }
6912
6913 /* Remove E and the cancelled blocks. */
6914 if (none_removed)
6915 remove_edge (e);
6916 else
6917 {
6918 for (i = 0; VEC_iterate (basic_block, bbs_to_remove, i, bb); i++)
6919 delete_basic_block (bb);
6920 }
6921
6922 /* Update the dominance information. The immediate dominator may change only
6923 for blocks whose immediate dominator belongs to DF_IDOM:
6924
6925 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
6926 removal. Let Z the arbitrary block such that idom(Z) = Y and
6927 Z dominates X after the removal. Before removal, there exists a path P
6928 from Y to X that avoids Z. Let F be the last edge on P that is
6929 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
6930 dominates W, and because of P, Z does not dominate W), and W belongs to
6931 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
6932 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
6933 {
6934 bb = BASIC_BLOCK (i);
6935 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
6936 dbb;
6937 dbb = next_dom_son (CDI_DOMINATORS, dbb))
6938 VEC_safe_push (basic_block, heap, bbs_to_fix_dom, dbb);
6939 }
6940
66f97d31 6941 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
672987e8
ZD
6942
6943 BITMAP_FREE (df);
6944 BITMAP_FREE (df_idom);
6945 VEC_free (basic_block, heap, bbs_to_remove);
6946 VEC_free (basic_block, heap, bbs_to_fix_dom);
6947}
6948
4f6c2131
EB
6949/* Purge dead EH edges from basic block BB. */
6950
1eaba2f2 6951bool
726a989a 6952gimple_purge_dead_eh_edges (basic_block bb)
1eaba2f2
RH
6953{
6954 bool changed = false;
628f6a4e
BE
6955 edge e;
6956 edge_iterator ei;
726a989a 6957 gimple stmt = last_stmt (bb);
1eaba2f2 6958
726a989a 6959 if (stmt && stmt_can_throw_internal (stmt))
1eaba2f2
RH
6960 return false;
6961
628f6a4e 6962 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
1eaba2f2 6963 {
1eaba2f2
RH
6964 if (e->flags & EDGE_EH)
6965 {
672987e8 6966 remove_edge_and_dominated_blocks (e);
1eaba2f2
RH
6967 changed = true;
6968 }
628f6a4e
BE
6969 else
6970 ei_next (&ei);
1eaba2f2
RH
6971 }
6972
6973 return changed;
6974}
6975
6976bool
726a989a 6977gimple_purge_all_dead_eh_edges (const_bitmap blocks)
1eaba2f2
RH
6978{
6979 bool changed = false;
3cd8c58a 6980 unsigned i;
87c476a2 6981 bitmap_iterator bi;
1eaba2f2 6982
87c476a2
ZD
6983 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
6984 {
833ee764
JJ
6985 basic_block bb = BASIC_BLOCK (i);
6986
6987 /* Earlier gimple_purge_dead_eh_edges could have removed
6988 this basic block already. */
6989 gcc_assert (bb || changed);
6990 if (bb != NULL)
6991 changed |= gimple_purge_dead_eh_edges (bb);
87c476a2 6992 }
1eaba2f2
RH
6993
6994 return changed;
6995}
6de9cd9a 6996
a100ac1e
KH
6997/* This function is called whenever a new edge is created or
6998 redirected. */
6999
7000static void
726a989a 7001gimple_execute_on_growing_pred (edge e)
a100ac1e
KH
7002{
7003 basic_block bb = e->dest;
7004
7005 if (phi_nodes (bb))
7006 reserve_phi_args_for_new_edge (bb);
7007}
7008
e51546f8
KH
7009/* This function is called immediately before edge E is removed from
7010 the edge vector E->dest->preds. */
7011
7012static void
726a989a 7013gimple_execute_on_shrinking_pred (edge e)
e51546f8
KH
7014{
7015 if (phi_nodes (e->dest))
7016 remove_phi_args (e);
7017}
7018
1cb7dfc3
MH
7019/*---------------------------------------------------------------------------
7020 Helper functions for Loop versioning
7021 ---------------------------------------------------------------------------*/
7022
7023/* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
7024 of 'first'. Both of them are dominated by 'new_head' basic block. When
7025 'new_head' was created by 'second's incoming edge it received phi arguments
7026 on the edge by split_edge(). Later, additional edge 'e' was created to
6531d1be
BF
7027 connect 'new_head' and 'first'. Now this routine adds phi args on this
7028 additional edge 'e' that new_head to second edge received as part of edge
726a989a 7029 splitting. */
1cb7dfc3
MH
7030
7031static void
726a989a
RB
7032gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
7033 basic_block new_head, edge e)
1cb7dfc3 7034{
726a989a
RB
7035 gimple phi1, phi2;
7036 gimple_stmt_iterator psi1, psi2;
7037 tree def;
d0e12fc6
KH
7038 edge e2 = find_edge (new_head, second);
7039
7040 /* Because NEW_HEAD has been created by splitting SECOND's incoming
7041 edge, we should always have an edge from NEW_HEAD to SECOND. */
7042 gcc_assert (e2 != NULL);
1cb7dfc3
MH
7043
7044 /* Browse all 'second' basic block phi nodes and add phi args to
7045 edge 'e' for 'first' head. PHI args are always in correct order. */
7046
726a989a
RB
7047 for (psi2 = gsi_start_phis (second),
7048 psi1 = gsi_start_phis (first);
7049 !gsi_end_p (psi2) && !gsi_end_p (psi1);
7050 gsi_next (&psi2), gsi_next (&psi1))
1cb7dfc3 7051 {
726a989a
RB
7052 phi1 = gsi_stmt (psi1);
7053 phi2 = gsi_stmt (psi2);
7054 def = PHI_ARG_DEF (phi2, e2->dest_idx);
d0e12fc6 7055 add_phi_arg (phi1, def, e);
1cb7dfc3
MH
7056 }
7057}
7058
726a989a 7059
6531d1be
BF
7060/* Adds a if else statement to COND_BB with condition COND_EXPR.
7061 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
1cb7dfc3 7062 the destination of the ELSE part. */
726a989a 7063
1cb7dfc3 7064static void
726a989a
RB
7065gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
7066 basic_block second_head ATTRIBUTE_UNUSED,
7067 basic_block cond_bb, void *cond_e)
1cb7dfc3 7068{
726a989a
RB
7069 gimple_stmt_iterator gsi;
7070 gimple new_cond_expr;
1cb7dfc3
MH
7071 tree cond_expr = (tree) cond_e;
7072 edge e0;
7073
7074 /* Build new conditional expr */
726a989a
RB
7075 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
7076 NULL_TREE, NULL_TREE);
1cb7dfc3 7077
6531d1be 7078 /* Add new cond in cond_bb. */
726a989a
RB
7079 gsi = gsi_last_bb (cond_bb);
7080 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
7081
1cb7dfc3
MH
7082 /* Adjust edges appropriately to connect new head with first head
7083 as well as second head. */
7084 e0 = single_succ_edge (cond_bb);
7085 e0->flags &= ~EDGE_FALLTHRU;
7086 e0->flags |= EDGE_FALSE_VALUE;
7087}
7088
726a989a
RB
7089struct cfg_hooks gimple_cfg_hooks = {
7090 "gimple",
7091 gimple_verify_flow_info,
7092 gimple_dump_bb, /* dump_bb */
6de9cd9a 7093 create_bb, /* create_basic_block */
726a989a
RB
7094 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
7095 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
7096 gimple_can_remove_branch_p, /* can_remove_branch_p */
6de9cd9a 7097 remove_bb, /* delete_basic_block */
726a989a
RB
7098 gimple_split_block, /* split_block */
7099 gimple_move_block_after, /* move_block_after */
7100 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
7101 gimple_merge_blocks, /* merge_blocks */
7102 gimple_predict_edge, /* predict_edge */
7103 gimple_predicted_by_p, /* predicted_by_p */
7104 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
7105 gimple_duplicate_bb, /* duplicate_block */
7106 gimple_split_edge, /* split_edge */
7107 gimple_make_forwarder_block, /* make_forward_block */
6de9cd9a 7108 NULL, /* tidy_fallthru_edge */
726a989a
RB
7109 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
7110 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
7111 gimple_flow_call_edges_add, /* flow_call_edges_add */
7112 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
7113 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
7114 gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
7115 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
7116 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
1cb7dfc3 7117 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
6531d1be 7118 flush_pending_stmts /* flush_pending_stmts */
6de9cd9a
DN
7119};
7120
7121
7122/* Split all critical edges. */
7123
c2924966 7124static unsigned int
6de9cd9a
DN
7125split_critical_edges (void)
7126{
7127 basic_block bb;
7128 edge e;
628f6a4e 7129 edge_iterator ei;
6de9cd9a 7130
d6be0d7f
JL
7131 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
7132 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
7133 mappings around the calls to split_edge. */
7134 start_recording_case_labels ();
6de9cd9a
DN
7135 FOR_ALL_BB (bb)
7136 {
628f6a4e 7137 FOR_EACH_EDGE (e, ei, bb->succs)
496a4ef5
JH
7138 {
7139 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
6de9cd9a 7140 split_edge (e);
496a4ef5
JH
7141 /* PRE inserts statements to edges and expects that
7142 since split_critical_edges was done beforehand, committing edge
7143 insertions will not split more edges. In addition to critical
7144 edges we must split edges that have multiple successors and
7145 end by control flow statements, such as RESX.
7146 Go ahead and split them too. This matches the logic in
7147 gimple_find_edge_insert_loc. */
7148 else if ((!single_pred_p (e->dest)
7149 || phi_nodes (e->dest)
7150 || e->dest == EXIT_BLOCK_PTR)
7151 && e->src != ENTRY_BLOCK_PTR
7152 && !(e->flags & EDGE_ABNORMAL))
7153 {
7154 gimple_stmt_iterator gsi;
7155
7156 gsi = gsi_last_bb (e->src);
7157 if (!gsi_end_p (gsi)
7158 && stmt_ends_bb_p (gsi_stmt (gsi))
7159 && gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN)
7160 split_edge (e);
7161 }
7162 }
6de9cd9a 7163 }
d6be0d7f 7164 end_recording_case_labels ();
c2924966 7165 return 0;
6de9cd9a
DN
7166}
7167
8ddbbcae 7168struct gimple_opt_pass pass_split_crit_edges =
6de9cd9a 7169{
8ddbbcae
JH
7170 {
7171 GIMPLE_PASS,
5d44aeed 7172 "crited", /* name */
6de9cd9a
DN
7173 NULL, /* gate */
7174 split_critical_edges, /* execute */
7175 NULL, /* sub */
7176 NULL, /* next */
7177 0, /* static_pass_number */
7178 TV_TREE_SPLIT_EDGES, /* tv_id */
7179 PROP_cfg, /* properties required */
7180 PROP_no_crit_edges, /* properties_provided */
7181 0, /* properties_destroyed */
7182 0, /* todo_flags_start */
9187e02d 7183 TODO_dump_func | TODO_verify_flow /* todo_flags_finish */
8ddbbcae 7184 }
6de9cd9a 7185};
26277d41 7186
26277d41 7187
726a989a 7188/* Build a ternary operation and gimplify it. Emit code before GSI.
26277d41
PB
7189 Return the gimple_val holding the result. */
7190
7191tree
726a989a 7192gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
26277d41
PB
7193 tree type, tree a, tree b, tree c)
7194{
7195 tree ret;
7196
987b67bc 7197 ret = fold_build3 (code, type, a, b, c);
26277d41
PB
7198 STRIP_NOPS (ret);
7199
726a989a
RB
7200 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
7201 GSI_SAME_STMT);
26277d41
PB
7202}
7203
726a989a 7204/* Build a binary operation and gimplify it. Emit code before GSI.
26277d41
PB
7205 Return the gimple_val holding the result. */
7206
7207tree
726a989a 7208gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
26277d41
PB
7209 tree type, tree a, tree b)
7210{
7211 tree ret;
7212
987b67bc 7213 ret = fold_build2 (code, type, a, b);
26277d41
PB
7214 STRIP_NOPS (ret);
7215
726a989a
RB
7216 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
7217 GSI_SAME_STMT);
26277d41
PB
7218}
7219
726a989a 7220/* Build a unary operation and gimplify it. Emit code before GSI.
26277d41
PB
7221 Return the gimple_val holding the result. */
7222
7223tree
726a989a 7224gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
26277d41
PB
7225 tree a)
7226{
7227 tree ret;
7228
987b67bc 7229 ret = fold_build1 (code, type, a);
26277d41
PB
7230 STRIP_NOPS (ret);
7231
726a989a
RB
7232 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
7233 GSI_SAME_STMT);
26277d41
PB
7234}
7235
7236
6de9cd9a
DN
7237\f
7238/* Emit return warnings. */
7239
c2924966 7240static unsigned int
6de9cd9a
DN
7241execute_warn_function_return (void)
7242{
9506ac2b 7243 source_location location;
726a989a 7244 gimple last;
6de9cd9a 7245 edge e;
628f6a4e 7246 edge_iterator ei;
6de9cd9a 7247
6de9cd9a
DN
7248 /* If we have a path to EXIT, then we do return. */
7249 if (TREE_THIS_VOLATILE (cfun->decl)
628f6a4e 7250 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0)
6de9cd9a 7251 {
9506ac2b 7252 location = UNKNOWN_LOCATION;
628f6a4e 7253 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
6de9cd9a
DN
7254 {
7255 last = last_stmt (e->src);
726a989a
RB
7256 if (gimple_code (last) == GIMPLE_RETURN
7257 && (location = gimple_location (last)) != UNKNOWN_LOCATION)
6de9cd9a
DN
7258 break;
7259 }
9506ac2b
PB
7260 if (location == UNKNOWN_LOCATION)
7261 location = cfun->function_end_locus;
d4ee4d25 7262 warning (0, "%H%<noreturn%> function does return", &location);
6de9cd9a
DN
7263 }
7264
7265 /* If we see "return;" in some basic block, then we do reach the end
7266 without returning a value. */
7267 else if (warn_return_type
089efaa4 7268 && !TREE_NO_WARNING (cfun->decl)
628f6a4e 7269 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0
6de9cd9a
DN
7270 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (cfun->decl))))
7271 {
628f6a4e 7272 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
6de9cd9a 7273 {
726a989a
RB
7274 gimple last = last_stmt (e->src);
7275 if (gimple_code (last) == GIMPLE_RETURN
7276 && gimple_return_retval (last) == NULL
7277 && !gimple_no_warning_p (last))
6de9cd9a 7278 {
726a989a 7279 location = gimple_location (last);
9506ac2b
PB
7280 if (location == UNKNOWN_LOCATION)
7281 location = cfun->function_end_locus;
aa14403d 7282 warning_at (location, OPT_Wreturn_type, "control reaches end of non-void function");
089efaa4 7283 TREE_NO_WARNING (cfun->decl) = 1;
6de9cd9a
DN
7284 break;
7285 }
7286 }
7287 }
c2924966 7288 return 0;
6de9cd9a
DN
7289}
7290
7291
7292/* Given a basic block B which ends with a conditional and has
7293 precisely two successors, determine which of the edges is taken if
7294 the conditional is true and which is taken if the conditional is
7295 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
7296
7297void
7298extract_true_false_edges_from_block (basic_block b,
7299 edge *true_edge,
7300 edge *false_edge)
7301{
628f6a4e 7302 edge e = EDGE_SUCC (b, 0);
6de9cd9a
DN
7303
7304 if (e->flags & EDGE_TRUE_VALUE)
7305 {
7306 *true_edge = e;
628f6a4e 7307 *false_edge = EDGE_SUCC (b, 1);
6de9cd9a
DN
7308 }
7309 else
7310 {
7311 *false_edge = e;
628f6a4e 7312 *true_edge = EDGE_SUCC (b, 1);
6de9cd9a
DN
7313 }
7314}
7315
8ddbbcae 7316struct gimple_opt_pass pass_warn_function_return =
6de9cd9a 7317{
8ddbbcae
JH
7318 {
7319 GIMPLE_PASS,
6de9cd9a
DN
7320 NULL, /* name */
7321 NULL, /* gate */
7322 execute_warn_function_return, /* execute */
7323 NULL, /* sub */
7324 NULL, /* next */
7325 0, /* static_pass_number */
7072a650 7326 TV_NONE, /* tv_id */
00bfee6f 7327 PROP_cfg, /* properties_required */
6de9cd9a
DN
7328 0, /* properties_provided */
7329 0, /* properties_destroyed */
7330 0, /* todo_flags_start */
8ddbbcae
JH
7331 0 /* todo_flags_finish */
7332 }
6de9cd9a 7333};
aa313ed4
JH
7334
7335/* Emit noreturn warnings. */
7336
c2924966 7337static unsigned int
aa313ed4
JH
7338execute_warn_function_noreturn (void)
7339{
7340 if (warn_missing_noreturn
7341 && !TREE_THIS_VOLATILE (cfun->decl)
7342 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) == 0
e8924938 7343 && !lang_hooks.missing_noreturn_ok_p (cfun->decl))
3176a0c2
DD
7344 warning (OPT_Wmissing_noreturn, "%Jfunction might be possible candidate "
7345 "for attribute %<noreturn%>",
aa313ed4 7346 cfun->decl);
c2924966 7347 return 0;
aa313ed4
JH
7348}
7349
8ddbbcae 7350struct gimple_opt_pass pass_warn_function_noreturn =
aa313ed4 7351{
8ddbbcae
JH
7352 {
7353 GIMPLE_PASS,
aa313ed4
JH
7354 NULL, /* name */
7355 NULL, /* gate */
7356 execute_warn_function_noreturn, /* execute */
7357 NULL, /* sub */
7358 NULL, /* next */
7359 0, /* static_pass_number */
7072a650 7360 TV_NONE, /* tv_id */
aa313ed4
JH
7361 PROP_cfg, /* properties_required */
7362 0, /* properties_provided */
7363 0, /* properties_destroyed */
7364 0, /* todo_flags_start */
8ddbbcae
JH
7365 0 /* todo_flags_finish */
7366 }
aa313ed4 7367};