]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/tree-cfg.c
re PR c/39902 (x * 1.0DF gets wrong value)
[thirdparty/gcc.git] / gcc / tree-cfg.c
CommitLineData
6de9cd9a 1/* Control flow functions for trees.
66647d44 2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
56e84019 3 Free Software Foundation, Inc.
6de9cd9a
DN
4 Contributed by Diego Novillo <dnovillo@redhat.com>
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify
9it under the terms of the GNU General Public License as published by
9dcd6f09 10the Free Software Foundation; either version 3, or (at your option)
6de9cd9a
DN
11any later version.
12
13GCC is distributed in the hope that it will be useful,
14but WITHOUT ANY WARRANTY; without even the implied warranty of
15MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16GNU General Public License for more details.
17
18You should have received a copy of the GNU General Public License
9dcd6f09
NC
19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
6de9cd9a
DN
21
22#include "config.h"
23#include "system.h"
24#include "coretypes.h"
25#include "tm.h"
26#include "tree.h"
27#include "rtl.h"
28#include "tm_p.h"
29#include "hard-reg-set.h"
30#include "basic-block.h"
31#include "output.h"
6de9cd9a
DN
32#include "flags.h"
33#include "function.h"
34#include "expr.h"
35#include "ggc.h"
36#include "langhooks.h"
37#include "diagnostic.h"
38#include "tree-flow.h"
39#include "timevar.h"
40#include "tree-dump.h"
41#include "tree-pass.h"
42#include "toplev.h"
43#include "except.h"
44#include "cfgloop.h"
42759f1e 45#include "cfglayout.h"
9af0df6b 46#include "tree-ssa-propagate.h"
6946b3f7 47#include "value-prof.h"
4437b50d 48#include "pointer-set.h"
917948d3 49#include "tree-inline.h"
6de9cd9a
DN
50
51/* This file contains functions for building the Control Flow Graph (CFG)
52 for a function tree. */
53
54/* Local declarations. */
55
56/* Initial capacity for the basic block array. */
57static const int initial_cfg_capacity = 20;
58
d6be0d7f
JL
59/* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
60 which use a particular edge. The CASE_LABEL_EXPRs are chained together
61 via their TREE_CHAIN field, which we clear after we're done with the
726a989a 62 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
92b6dff3 63
d6be0d7f
JL
64 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
65 update the case vector in response to edge redirections.
92b6dff3 66
d6be0d7f
JL
67 Right now this table is set up and torn down at key points in the
68 compilation process. It would be nice if we could make the table
69 more persistent. The key is getting notification of changes to
70 the CFG (particularly edge removal, creation and redirection). */
71
15814ba0 72static struct pointer_map_t *edge_to_cases;
92b6dff3 73
6de9cd9a
DN
74/* CFG statistics. */
75struct cfg_stats_d
76{
77 long num_merged_labels;
78};
79
80static struct cfg_stats_d cfg_stats;
81
82/* Nonzero if we found a computed goto while building basic blocks. */
83static bool found_computed_goto;
84
6c52e687
CC
85/* Hash table to store last discriminator assigned for each locus. */
86struct locus_discrim_map
87{
88 location_t locus;
89 int discriminator;
90};
91static htab_t discriminator_per_locus;
92
6de9cd9a 93/* Basic blocks and flowgraphs. */
726a989a 94static void make_blocks (gimple_seq);
6de9cd9a 95static void factor_computed_gotos (void);
6de9cd9a
DN
96
97/* Edges. */
98static void make_edges (void);
6de9cd9a 99static void make_cond_expr_edges (basic_block);
726a989a 100static void make_gimple_switch_edges (basic_block);
6de9cd9a 101static void make_goto_expr_edges (basic_block);
6c52e687
CC
102static unsigned int locus_map_hash (const void *);
103static int locus_map_eq (const void *, const void *);
104static void assign_discriminator (location_t, basic_block);
726a989a
RB
105static edge gimple_redirect_edge_and_branch (edge, basic_block);
106static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
c2924966 107static unsigned int split_critical_edges (void);
6de9cd9a
DN
108
109/* Various helpers. */
726a989a
RB
110static inline bool stmt_starts_bb_p (gimple, gimple);
111static int gimple_verify_flow_info (void);
112static void gimple_make_forwarder_block (edge);
113static void gimple_cfg2vcg (FILE *);
6c52e687 114static gimple first_non_label_stmt (basic_block);
6de9cd9a
DN
115
116/* Flowgraph optimization and cleanup. */
726a989a
RB
117static void gimple_merge_blocks (basic_block, basic_block);
118static bool gimple_can_merge_blocks_p (basic_block, basic_block);
6de9cd9a 119static void remove_bb (basic_block);
be477406 120static edge find_taken_edge_computed_goto (basic_block, tree);
6de9cd9a
DN
121static edge find_taken_edge_cond_expr (basic_block, tree);
122static edge find_taken_edge_switch_expr (basic_block, tree);
726a989a 123static tree find_case_label_for_value (gimple, tree);
6de9cd9a 124
a930a4ef 125void
9defb1fe 126init_empty_tree_cfg_for_function (struct function *fn)
a930a4ef
JH
127{
128 /* Initialize the basic block array. */
9defb1fe
DN
129 init_flow (fn);
130 profile_status_for_function (fn) = PROFILE_ABSENT;
131 n_basic_blocks_for_function (fn) = NUM_FIXED_BLOCKS;
132 last_basic_block_for_function (fn) = NUM_FIXED_BLOCKS;
133 basic_block_info_for_function (fn)
134 = VEC_alloc (basic_block, gc, initial_cfg_capacity);
135 VEC_safe_grow_cleared (basic_block, gc,
136 basic_block_info_for_function (fn),
a590ac65 137 initial_cfg_capacity);
a930a4ef
JH
138
139 /* Build a mapping of labels to their associated blocks. */
9defb1fe
DN
140 label_to_block_map_for_function (fn)
141 = VEC_alloc (basic_block, gc, initial_cfg_capacity);
142 VEC_safe_grow_cleared (basic_block, gc,
143 label_to_block_map_for_function (fn),
a590ac65 144 initial_cfg_capacity);
a930a4ef 145
9defb1fe
DN
146 SET_BASIC_BLOCK_FOR_FUNCTION (fn, ENTRY_BLOCK,
147 ENTRY_BLOCK_PTR_FOR_FUNCTION (fn));
148 SET_BASIC_BLOCK_FOR_FUNCTION (fn, EXIT_BLOCK,
149 EXIT_BLOCK_PTR_FOR_FUNCTION (fn));
150
151 ENTRY_BLOCK_PTR_FOR_FUNCTION (fn)->next_bb
152 = EXIT_BLOCK_PTR_FOR_FUNCTION (fn);
153 EXIT_BLOCK_PTR_FOR_FUNCTION (fn)->prev_bb
154 = ENTRY_BLOCK_PTR_FOR_FUNCTION (fn);
155}
156
157void
158init_empty_tree_cfg (void)
159{
160 init_empty_tree_cfg_for_function (cfun);
a930a4ef 161}
6de9cd9a
DN
162
163/*---------------------------------------------------------------------------
164 Create basic blocks
165---------------------------------------------------------------------------*/
166
726a989a 167/* Entry point to the CFG builder for trees. SEQ is the sequence of
6de9cd9a
DN
168 statements to be added to the flowgraph. */
169
170static void
726a989a 171build_gimple_cfg (gimple_seq seq)
6de9cd9a 172{
726a989a
RB
173 /* Register specific gimple functions. */
174 gimple_register_cfg_hooks ();
6de9cd9a 175
6de9cd9a
DN
176 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
177
a930a4ef 178 init_empty_tree_cfg ();
6de9cd9a
DN
179
180 found_computed_goto = 0;
726a989a 181 make_blocks (seq);
6de9cd9a
DN
182
183 /* Computed gotos are hell to deal with, especially if there are
184 lots of them with a large number of destinations. So we factor
185 them to a common computed goto location before we build the
186 edge list. After we convert back to normal form, we will un-factor
187 the computed gotos since factoring introduces an unwanted jump. */
188 if (found_computed_goto)
189 factor_computed_gotos ();
190
f0b698c1 191 /* Make sure there is always at least one block, even if it's empty. */
24bd1a0b 192 if (n_basic_blocks == NUM_FIXED_BLOCKS)
6de9cd9a
DN
193 create_empty_bb (ENTRY_BLOCK_PTR);
194
6de9cd9a 195 /* Adjust the size of the array. */
68f9b844 196 if (VEC_length (basic_block, basic_block_info) < (size_t) n_basic_blocks)
a590ac65 197 VEC_safe_grow_cleared (basic_block, gc, basic_block_info, n_basic_blocks);
6de9cd9a 198
f667741c
SB
199 /* To speed up statement iterator walks, we first purge dead labels. */
200 cleanup_dead_labels ();
201
202 /* Group case nodes to reduce the number of edges.
203 We do this after cleaning up dead labels because otherwise we miss
204 a lot of obvious case merging opportunities. */
205 group_case_labels ();
206
6de9cd9a 207 /* Create the edges of the flowgraph. */
6c52e687
CC
208 discriminator_per_locus = htab_create (13, locus_map_hash, locus_map_eq,
209 free);
6de9cd9a 210 make_edges ();
8b11009b 211 cleanup_dead_labels ();
6c52e687 212 htab_delete (discriminator_per_locus);
6de9cd9a
DN
213
214 /* Debugging dumps. */
215
216 /* Write the flowgraph to a VCG file. */
217 {
218 int local_dump_flags;
10d22567
ZD
219 FILE *vcg_file = dump_begin (TDI_vcg, &local_dump_flags);
220 if (vcg_file)
6de9cd9a 221 {
726a989a 222 gimple_cfg2vcg (vcg_file);
10d22567 223 dump_end (TDI_vcg, vcg_file);
6de9cd9a
DN
224 }
225 }
226
81cfbbc2
JH
227#ifdef ENABLE_CHECKING
228 verify_stmts ();
229#endif
6de9cd9a
DN
230}
231
c2924966 232static unsigned int
6de9cd9a
DN
233execute_build_cfg (void)
234{
39ecc018
JH
235 gimple_seq body = gimple_body (current_function_decl);
236
237 build_gimple_cfg (body);
238 gimple_set_body (current_function_decl, NULL);
cff7525f
JH
239 if (dump_file && (dump_flags & TDF_DETAILS))
240 {
241 fprintf (dump_file, "Scope blocks:\n");
242 dump_scope_blocks (dump_file, dump_flags);
243 }
c2924966 244 return 0;
6de9cd9a
DN
245}
246
8ddbbcae 247struct gimple_opt_pass pass_build_cfg =
6de9cd9a 248{
8ddbbcae
JH
249 {
250 GIMPLE_PASS,
6de9cd9a
DN
251 "cfg", /* name */
252 NULL, /* gate */
253 execute_build_cfg, /* execute */
254 NULL, /* sub */
255 NULL, /* next */
256 0, /* static_pass_number */
257 TV_TREE_CFG, /* tv_id */
726a989a 258 PROP_gimple_leh, /* properties_required */
6de9cd9a
DN
259 PROP_cfg, /* properties_provided */
260 0, /* properties_destroyed */
261 0, /* todo_flags_start */
11b08ee9
RG
262 TODO_verify_stmts | TODO_cleanup_cfg
263 | TODO_dump_func /* todo_flags_finish */
8ddbbcae 264 }
6de9cd9a
DN
265};
266
726a989a
RB
267
268/* Return true if T is a computed goto. */
269
270static bool
271computed_goto_p (gimple t)
272{
273 return (gimple_code (t) == GIMPLE_GOTO
274 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
275}
276
277
6531d1be 278/* Search the CFG for any computed gotos. If found, factor them to a
6de9cd9a 279 common computed goto site. Also record the location of that site so
6531d1be 280 that we can un-factor the gotos after we have converted back to
6de9cd9a
DN
281 normal form. */
282
283static void
284factor_computed_gotos (void)
285{
286 basic_block bb;
287 tree factored_label_decl = NULL;
288 tree var = NULL;
726a989a
RB
289 gimple factored_computed_goto_label = NULL;
290 gimple factored_computed_goto = NULL;
6de9cd9a
DN
291
292 /* We know there are one or more computed gotos in this function.
293 Examine the last statement in each basic block to see if the block
294 ends with a computed goto. */
6531d1be 295
6de9cd9a
DN
296 FOR_EACH_BB (bb)
297 {
726a989a
RB
298 gimple_stmt_iterator gsi = gsi_last_bb (bb);
299 gimple last;
6de9cd9a 300
726a989a 301 if (gsi_end_p (gsi))
6de9cd9a 302 continue;
726a989a
RB
303
304 last = gsi_stmt (gsi);
6de9cd9a
DN
305
306 /* Ignore the computed goto we create when we factor the original
307 computed gotos. */
308 if (last == factored_computed_goto)
309 continue;
310
311 /* If the last statement is a computed goto, factor it. */
312 if (computed_goto_p (last))
313 {
726a989a 314 gimple assignment;
6de9cd9a
DN
315
316 /* The first time we find a computed goto we need to create
317 the factored goto block and the variable each original
318 computed goto will use for their goto destination. */
726a989a 319 if (!factored_computed_goto)
6de9cd9a
DN
320 {
321 basic_block new_bb = create_empty_bb (bb);
726a989a 322 gimple_stmt_iterator new_gsi = gsi_start_bb (new_bb);
6de9cd9a
DN
323
324 /* Create the destination of the factored goto. Each original
325 computed goto will put its desired destination into this
326 variable and jump to the label we create immediately
327 below. */
328 var = create_tmp_var (ptr_type_node, "gotovar");
329
330 /* Build a label for the new block which will contain the
331 factored computed goto. */
c2255bc4 332 factored_label_decl = create_artificial_label (UNKNOWN_LOCATION);
6de9cd9a 333 factored_computed_goto_label
726a989a
RB
334 = gimple_build_label (factored_label_decl);
335 gsi_insert_after (&new_gsi, factored_computed_goto_label,
336 GSI_NEW_STMT);
6de9cd9a
DN
337
338 /* Build our new computed goto. */
726a989a
RB
339 factored_computed_goto = gimple_build_goto (var);
340 gsi_insert_after (&new_gsi, factored_computed_goto, GSI_NEW_STMT);
6de9cd9a
DN
341 }
342
343 /* Copy the original computed goto's destination into VAR. */
726a989a
RB
344 assignment = gimple_build_assign (var, gimple_goto_dest (last));
345 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
6de9cd9a
DN
346
347 /* And re-vector the computed goto to the new destination. */
726a989a 348 gimple_goto_set_dest (last, factored_label_decl);
6de9cd9a
DN
349 }
350 }
351}
352
353
726a989a 354/* Build a flowgraph for the sequence of stmts SEQ. */
6de9cd9a
DN
355
356static void
726a989a 357make_blocks (gimple_seq seq)
6de9cd9a 358{
726a989a
RB
359 gimple_stmt_iterator i = gsi_start (seq);
360 gimple stmt = NULL;
6de9cd9a 361 bool start_new_block = true;
726a989a 362 bool first_stmt_of_seq = true;
6de9cd9a
DN
363 basic_block bb = ENTRY_BLOCK_PTR;
364
726a989a 365 while (!gsi_end_p (i))
6de9cd9a 366 {
726a989a 367 gimple prev_stmt;
6de9cd9a
DN
368
369 prev_stmt = stmt;
726a989a 370 stmt = gsi_stmt (i);
6de9cd9a
DN
371
372 /* If the statement starts a new basic block or if we have determined
373 in a previous pass that we need to create a new block for STMT, do
374 so now. */
375 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
376 {
726a989a
RB
377 if (!first_stmt_of_seq)
378 seq = gsi_split_seq_before (&i);
379 bb = create_basic_block (seq, NULL, bb);
6de9cd9a
DN
380 start_new_block = false;
381 }
382
383 /* Now add STMT to BB and create the subgraphs for special statement
384 codes. */
726a989a 385 gimple_set_bb (stmt, bb);
6de9cd9a
DN
386
387 if (computed_goto_p (stmt))
388 found_computed_goto = true;
389
390 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
391 next iteration. */
392 if (stmt_ends_bb_p (stmt))
54634841
RG
393 {
394 /* If the stmt can make abnormal goto use a new temporary
395 for the assignment to the LHS. This makes sure the old value
396 of the LHS is available on the abnormal edge. Otherwise
397 we will end up with overlapping life-ranges for abnormal
398 SSA names. */
399 if (gimple_has_lhs (stmt)
400 && stmt_can_make_abnormal_goto (stmt)
401 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
402 {
403 tree lhs = gimple_get_lhs (stmt);
404 tree tmp = create_tmp_var (TREE_TYPE (lhs), NULL);
405 gimple s = gimple_build_assign (lhs, tmp);
406 gimple_set_location (s, gimple_location (stmt));
407 gimple_set_block (s, gimple_block (stmt));
408 gimple_set_lhs (stmt, tmp);
409 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
410 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
411 DECL_GIMPLE_REG_P (tmp) = 1;
412 gsi_insert_after (&i, s, GSI_SAME_STMT);
413 }
414 start_new_block = true;
415 }
6de9cd9a 416
726a989a
RB
417 gsi_next (&i);
418 first_stmt_of_seq = false;
6de9cd9a
DN
419 }
420}
421
422
423/* Create and return a new empty basic block after bb AFTER. */
424
425static basic_block
426create_bb (void *h, void *e, basic_block after)
427{
428 basic_block bb;
429
1e128c5f 430 gcc_assert (!e);
6de9cd9a 431
27fd69fa
KH
432 /* Create and initialize a new basic block. Since alloc_block uses
433 ggc_alloc_cleared to allocate a basic block, we do not have to
434 clear the newly allocated basic block here. */
6de9cd9a 435 bb = alloc_block ();
6de9cd9a
DN
436
437 bb->index = last_basic_block;
438 bb->flags = BB_NEW;
726a989a
RB
439 bb->il.gimple = GGC_CNEW (struct gimple_bb_info);
440 set_bb_seq (bb, h ? (gimple_seq) h : gimple_seq_alloc ());
6de9cd9a
DN
441
442 /* Add the new block to the linked list of blocks. */
443 link_block (bb, after);
444
445 /* Grow the basic block array if needed. */
68f9b844 446 if ((size_t) last_basic_block == VEC_length (basic_block, basic_block_info))
6de9cd9a
DN
447 {
448 size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
a590ac65 449 VEC_safe_grow_cleared (basic_block, gc, basic_block_info, new_size);
6de9cd9a
DN
450 }
451
452 /* Add the newly created block to the array. */
68f9b844 453 SET_BASIC_BLOCK (last_basic_block, bb);
6de9cd9a 454
6de9cd9a
DN
455 n_basic_blocks++;
456 last_basic_block++;
457
6de9cd9a
DN
458 return bb;
459}
460
461
462/*---------------------------------------------------------------------------
463 Edge creation
464---------------------------------------------------------------------------*/
465
fca01525
KH
466/* Fold COND_EXPR_COND of each COND_EXPR. */
467
e21aff8a 468void
fca01525
KH
469fold_cond_expr_cond (void)
470{
471 basic_block bb;
472
473 FOR_EACH_BB (bb)
474 {
726a989a 475 gimple stmt = last_stmt (bb);
fca01525 476
726a989a 477 if (stmt && gimple_code (stmt) == GIMPLE_COND)
fca01525 478 {
db3927fb 479 location_t loc = gimple_location (stmt);
6ac01510
ILT
480 tree cond;
481 bool zerop, onep;
482
483 fold_defer_overflow_warnings ();
db3927fb 484 cond = fold_binary_loc (loc, gimple_cond_code (stmt), boolean_type_node,
726a989a
RB
485 gimple_cond_lhs (stmt), gimple_cond_rhs (stmt));
486 if (cond)
487 {
488 zerop = integer_zerop (cond);
489 onep = integer_onep (cond);
490 }
491 else
492 zerop = onep = false;
493
e233ac97 494 fold_undefer_overflow_warnings (zerop || onep,
4df28528 495 stmt,
6ac01510
ILT
496 WARN_STRICT_OVERFLOW_CONDITIONAL);
497 if (zerop)
726a989a 498 gimple_cond_make_false (stmt);
6ac01510 499 else if (onep)
726a989a 500 gimple_cond_make_true (stmt);
fca01525
KH
501 }
502 }
503}
504
6de9cd9a
DN
505/* Join all the blocks in the flowgraph. */
506
507static void
508make_edges (void)
509{
510 basic_block bb;
bed575d5 511 struct omp_region *cur_region = NULL;
6de9cd9a
DN
512
513 /* Create an edge from entry to the first block with executable
514 statements in it. */
24bd1a0b 515 make_edge (ENTRY_BLOCK_PTR, BASIC_BLOCK (NUM_FIXED_BLOCKS), EDGE_FALLTHRU);
6de9cd9a 516
adb35797 517 /* Traverse the basic block array placing edges. */
6de9cd9a
DN
518 FOR_EACH_BB (bb)
519 {
726a989a 520 gimple last = last_stmt (bb);
56e84019 521 bool fallthru;
6de9cd9a 522
56e84019 523 if (last)
6de9cd9a 524 {
726a989a 525 enum gimple_code code = gimple_code (last);
bed575d5 526 switch (code)
56e84019 527 {
726a989a 528 case GIMPLE_GOTO:
56e84019
RH
529 make_goto_expr_edges (bb);
530 fallthru = false;
531 break;
726a989a 532 case GIMPLE_RETURN:
56e84019
RH
533 make_edge (bb, EXIT_BLOCK_PTR, 0);
534 fallthru = false;
535 break;
726a989a 536 case GIMPLE_COND:
56e84019
RH
537 make_cond_expr_edges (bb);
538 fallthru = false;
539 break;
726a989a
RB
540 case GIMPLE_SWITCH:
541 make_gimple_switch_edges (bb);
56e84019
RH
542 fallthru = false;
543 break;
726a989a 544 case GIMPLE_RESX:
56e84019
RH
545 make_eh_edges (last);
546 fallthru = false;
547 break;
548
726a989a 549 case GIMPLE_CALL:
56e84019
RH
550 /* If this function receives a nonlocal goto, then we need to
551 make edges from this call site to all the nonlocal goto
552 handlers. */
726a989a 553 if (stmt_can_make_abnormal_goto (last))
4f6c2131 554 make_abnormal_goto_edges (bb, true);
6de9cd9a 555
56e84019
RH
556 /* If this statement has reachable exception handlers, then
557 create abnormal edges to them. */
558 make_eh_edges (last);
559
560 /* Some calls are known not to return. */
726a989a 561 fallthru = !(gimple_call_flags (last) & ECF_NORETURN);
56e84019
RH
562 break;
563
726a989a
RB
564 case GIMPLE_ASSIGN:
565 /* A GIMPLE_ASSIGN may throw internally and thus be considered
566 control-altering. */
56e84019
RH
567 if (is_ctrl_altering_stmt (last))
568 {
56e84019
RH
569 make_eh_edges (last);
570 }
571 fallthru = true;
572 break;
573
726a989a
RB
574 case GIMPLE_OMP_PARALLEL:
575 case GIMPLE_OMP_TASK:
576 case GIMPLE_OMP_FOR:
577 case GIMPLE_OMP_SINGLE:
578 case GIMPLE_OMP_MASTER:
579 case GIMPLE_OMP_ORDERED:
580 case GIMPLE_OMP_CRITICAL:
581 case GIMPLE_OMP_SECTION:
bed575d5 582 cur_region = new_omp_region (bb, code, cur_region);
56e84019
RH
583 fallthru = true;
584 break;
585
726a989a 586 case GIMPLE_OMP_SECTIONS:
bed575d5 587 cur_region = new_omp_region (bb, code, cur_region);
e5c95afe
ZD
588 fallthru = true;
589 break;
590
726a989a 591 case GIMPLE_OMP_SECTIONS_SWITCH:
7e2df4a1 592 fallthru = false;
777f7f9a
RH
593 break;
594
a509ebb5 595
726a989a
RB
596 case GIMPLE_OMP_ATOMIC_LOAD:
597 case GIMPLE_OMP_ATOMIC_STORE:
a509ebb5
RL
598 fallthru = true;
599 break;
600
601
726a989a
RB
602 case GIMPLE_OMP_RETURN:
603 /* In the case of a GIMPLE_OMP_SECTION, the edge will go
604 somewhere other than the next block. This will be
605 created later. */
bed575d5 606 cur_region->exit = bb;
726a989a 607 fallthru = cur_region->type != GIMPLE_OMP_SECTION;
bed575d5
RS
608 cur_region = cur_region->outer;
609 break;
610
726a989a 611 case GIMPLE_OMP_CONTINUE:
bed575d5
RS
612 cur_region->cont = bb;
613 switch (cur_region->type)
614 {
726a989a
RB
615 case GIMPLE_OMP_FOR:
616 /* Mark all GIMPLE_OMP_FOR and GIMPLE_OMP_CONTINUE
617 succs edges as abnormal to prevent splitting
618 them. */
135a171d 619 single_succ_edge (cur_region->entry)->flags |= EDGE_ABNORMAL;
e5c95afe 620 /* Make the loopback edge. */
135a171d
JJ
621 make_edge (bb, single_succ (cur_region->entry),
622 EDGE_ABNORMAL);
623
726a989a
RB
624 /* Create an edge from GIMPLE_OMP_FOR to exit, which
625 corresponds to the case that the body of the loop
626 is not executed at all. */
135a171d
JJ
627 make_edge (cur_region->entry, bb->next_bb, EDGE_ABNORMAL);
628 make_edge (bb, bb->next_bb, EDGE_FALLTHRU | EDGE_ABNORMAL);
629 fallthru = false;
bed575d5
RS
630 break;
631
726a989a 632 case GIMPLE_OMP_SECTIONS:
bed575d5 633 /* Wire up the edges into and out of the nested sections. */
bed575d5 634 {
e5c95afe
ZD
635 basic_block switch_bb = single_succ (cur_region->entry);
636
bed575d5
RS
637 struct omp_region *i;
638 for (i = cur_region->inner; i ; i = i->next)
639 {
726a989a 640 gcc_assert (i->type == GIMPLE_OMP_SECTION);
e5c95afe 641 make_edge (switch_bb, i->entry, 0);
bed575d5
RS
642 make_edge (i->exit, bb, EDGE_FALLTHRU);
643 }
e5c95afe
ZD
644
645 /* Make the loopback edge to the block with
726a989a 646 GIMPLE_OMP_SECTIONS_SWITCH. */
e5c95afe
ZD
647 make_edge (bb, switch_bb, 0);
648
649 /* Make the edge from the switch to exit. */
650 make_edge (switch_bb, bb->next_bb, 0);
651 fallthru = false;
bed575d5
RS
652 }
653 break;
6531d1be 654
bed575d5
RS
655 default:
656 gcc_unreachable ();
657 }
bed575d5
RS
658 break;
659
56e84019
RH
660 default:
661 gcc_assert (!stmt_ends_bb_p (last));
662 fallthru = true;
663 }
6de9cd9a 664 }
56e84019
RH
665 else
666 fallthru = true;
6de9cd9a 667
56e84019 668 if (fallthru)
6c52e687
CC
669 {
670 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
671 if (last)
672 assign_discriminator (gimple_location (last), bb->next_bb);
673 }
6de9cd9a
DN
674 }
675
bed575d5
RS
676 if (root_omp_region)
677 free_omp_regions ();
678
fca01525
KH
679 /* Fold COND_EXPR_COND of each COND_EXPR. */
680 fold_cond_expr_cond ();
6de9cd9a
DN
681}
682
6c52e687
CC
683/* Trivial hash function for a location_t. ITEM is a pointer to
684 a hash table entry that maps a location_t to a discriminator. */
685
686static unsigned int
687locus_map_hash (const void *item)
688{
689 return ((const struct locus_discrim_map *) item)->locus;
690}
691
692/* Equality function for the locus-to-discriminator map. VA and VB
693 point to the two hash table entries to compare. */
694
695static int
696locus_map_eq (const void *va, const void *vb)
697{
698 const struct locus_discrim_map *a = (const struct locus_discrim_map *) va;
699 const struct locus_discrim_map *b = (const struct locus_discrim_map *) vb;
700 return a->locus == b->locus;
701}
702
703/* Find the next available discriminator value for LOCUS. The
704 discriminator distinguishes among several basic blocks that
705 share a common locus, allowing for more accurate sample-based
706 profiling. */
707
708static int
709next_discriminator_for_locus (location_t locus)
710{
711 struct locus_discrim_map item;
712 struct locus_discrim_map **slot;
713
714 item.locus = locus;
715 item.discriminator = 0;
716 slot = (struct locus_discrim_map **)
717 htab_find_slot_with_hash (discriminator_per_locus, (void *) &item,
718 (hashval_t) locus, INSERT);
719 gcc_assert (slot);
720 if (*slot == HTAB_EMPTY_ENTRY)
721 {
722 *slot = XNEW (struct locus_discrim_map);
723 gcc_assert (*slot);
724 (*slot)->locus = locus;
725 (*slot)->discriminator = 0;
726 }
727 (*slot)->discriminator++;
728 return (*slot)->discriminator;
729}
730
731/* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
732
733static bool
734same_line_p (location_t locus1, location_t locus2)
735{
736 expanded_location from, to;
737
738 if (locus1 == locus2)
739 return true;
740
741 from = expand_location (locus1);
742 to = expand_location (locus2);
743
744 if (from.line != to.line)
745 return false;
746 if (from.file == to.file)
747 return true;
748 return (from.file != NULL
749 && to.file != NULL
750 && strcmp (from.file, to.file) == 0);
751}
752
753/* Assign a unique discriminator value to block BB if it begins at the same
754 LOCUS as its predecessor block. */
755
756static void
757assign_discriminator (location_t locus, basic_block bb)
758{
cbea518e 759 gimple first_in_to_bb, last_in_to_bb;
6c52e687
CC
760
761 if (locus == 0 || bb->discriminator != 0)
762 return;
763
cbea518e
CC
764 first_in_to_bb = first_non_label_stmt (bb);
765 last_in_to_bb = last_stmt (bb);
2a2869d6
CC
766 if ((first_in_to_bb && same_line_p (locus, gimple_location (first_in_to_bb)))
767 || (last_in_to_bb && same_line_p (locus, gimple_location (last_in_to_bb))))
6c52e687
CC
768 bb->discriminator = next_discriminator_for_locus (locus);
769}
6de9cd9a 770
726a989a 771/* Create the edges for a GIMPLE_COND starting at block BB. */
6de9cd9a
DN
772
773static void
774make_cond_expr_edges (basic_block bb)
775{
726a989a
RB
776 gimple entry = last_stmt (bb);
777 gimple then_stmt, else_stmt;
6de9cd9a
DN
778 basic_block then_bb, else_bb;
779 tree then_label, else_label;
d783b2a2 780 edge e;
6c52e687 781 location_t entry_locus;
6de9cd9a 782
1e128c5f 783 gcc_assert (entry);
726a989a 784 gcc_assert (gimple_code (entry) == GIMPLE_COND);
6de9cd9a 785
6c52e687
CC
786 entry_locus = gimple_location (entry);
787
6de9cd9a 788 /* Entry basic blocks for each component. */
726a989a
RB
789 then_label = gimple_cond_true_label (entry);
790 else_label = gimple_cond_false_label (entry);
6de9cd9a
DN
791 then_bb = label_to_block (then_label);
792 else_bb = label_to_block (else_label);
726a989a
RB
793 then_stmt = first_stmt (then_bb);
794 else_stmt = first_stmt (else_bb);
6de9cd9a 795
d783b2a2 796 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
6c52e687 797 assign_discriminator (entry_locus, then_bb);
726a989a 798 e->goto_locus = gimple_location (then_stmt);
cc2a64dd
JJ
799 if (e->goto_locus)
800 e->goto_block = gimple_block (then_stmt);
d783b2a2
JH
801 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
802 if (e)
7241571e 803 {
6c52e687 804 assign_discriminator (entry_locus, else_bb);
7241571e 805 e->goto_locus = gimple_location (else_stmt);
cc2a64dd
JJ
806 if (e->goto_locus)
807 e->goto_block = gimple_block (else_stmt);
7241571e 808 }
a9b77cd1 809
726a989a
RB
810 /* We do not need the labels anymore. */
811 gimple_cond_set_true_label (entry, NULL_TREE);
812 gimple_cond_set_false_label (entry, NULL_TREE);
6de9cd9a
DN
813}
814
92b6dff3 815
d6be0d7f
JL
816/* Called for each element in the hash table (P) as we delete the
817 edge to cases hash table.
818
6531d1be 819 Clear all the TREE_CHAINs to prevent problems with copying of
d6be0d7f
JL
820 SWITCH_EXPRs and structure sharing rules, then free the hash table
821 element. */
822
15814ba0 823static bool
ac7d7749 824edge_to_cases_cleanup (const void *key ATTRIBUTE_UNUSED, void **value,
15814ba0 825 void *data ATTRIBUTE_UNUSED)
d6be0d7f 826{
d6be0d7f
JL
827 tree t, next;
828
15814ba0 829 for (t = (tree) *value; t; t = next)
d6be0d7f
JL
830 {
831 next = TREE_CHAIN (t);
832 TREE_CHAIN (t) = NULL;
833 }
15814ba0
PB
834
835 *value = NULL;
836 return false;
d6be0d7f
JL
837}
838
839/* Start recording information mapping edges to case labels. */
840
c9784e6d 841void
d6be0d7f
JL
842start_recording_case_labels (void)
843{
844 gcc_assert (edge_to_cases == NULL);
15814ba0 845 edge_to_cases = pointer_map_create ();
d6be0d7f
JL
846}
847
848/* Return nonzero if we are recording information for case labels. */
849
850static bool
851recording_case_labels_p (void)
852{
853 return (edge_to_cases != NULL);
854}
855
856/* Stop recording information mapping edges to case labels and
857 remove any information we have recorded. */
c9784e6d 858void
d6be0d7f
JL
859end_recording_case_labels (void)
860{
15814ba0
PB
861 pointer_map_traverse (edge_to_cases, edge_to_cases_cleanup, NULL);
862 pointer_map_destroy (edge_to_cases);
d6be0d7f
JL
863 edge_to_cases = NULL;
864}
865
d6be0d7f
JL
866/* If we are inside a {start,end}_recording_cases block, then return
867 a chain of CASE_LABEL_EXPRs from T which reference E.
868
869 Otherwise return NULL. */
92b6dff3
JL
870
871static tree
726a989a 872get_cases_for_edge (edge e, gimple t)
92b6dff3 873{
92b6dff3 874 void **slot;
d6be0d7f 875 size_t i, n;
92b6dff3 876
d6be0d7f
JL
877 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
878 chains available. Return NULL so the caller can detect this case. */
879 if (!recording_case_labels_p ())
880 return NULL;
6531d1be 881
15814ba0 882 slot = pointer_map_contains (edge_to_cases, e);
92b6dff3 883 if (slot)
15814ba0 884 return (tree) *slot;
92b6dff3 885
d6be0d7f
JL
886 /* If we did not find E in the hash table, then this must be the first
887 time we have been queried for information about E & T. Add all the
888 elements from T to the hash table then perform the query again. */
92b6dff3 889
726a989a 890 n = gimple_switch_num_labels (t);
92b6dff3
JL
891 for (i = 0; i < n; i++)
892 {
726a989a 893 tree elt = gimple_switch_label (t, i);
15814ba0 894 tree lab = CASE_LABEL (elt);
d6be0d7f 895 basic_block label_bb = label_to_block (lab);
15814ba0
PB
896 edge this_edge = find_edge (e->src, label_bb);
897
898 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
899 a new chain. */
900 slot = pointer_map_insert (edge_to_cases, this_edge);
901 TREE_CHAIN (elt) = (tree) *slot;
902 *slot = elt;
92b6dff3 903 }
15814ba0
PB
904
905 return (tree) *pointer_map_contains (edge_to_cases, e);
92b6dff3 906}
6de9cd9a 907
726a989a 908/* Create the edges for a GIMPLE_SWITCH starting at block BB. */
6de9cd9a
DN
909
910static void
726a989a 911make_gimple_switch_edges (basic_block bb)
6de9cd9a 912{
726a989a 913 gimple entry = last_stmt (bb);
6c52e687 914 location_t entry_locus;
6de9cd9a 915 size_t i, n;
6de9cd9a 916
6c52e687
CC
917 entry_locus = gimple_location (entry);
918
726a989a 919 n = gimple_switch_num_labels (entry);
6de9cd9a
DN
920
921 for (i = 0; i < n; ++i)
922 {
726a989a 923 tree lab = CASE_LABEL (gimple_switch_label (entry, i));
6de9cd9a 924 basic_block label_bb = label_to_block (lab);
d6be0d7f 925 make_edge (bb, label_bb, 0);
6c52e687 926 assign_discriminator (entry_locus, label_bb);
6de9cd9a
DN
927 }
928}
929
930
931/* Return the basic block holding label DEST. */
932
933basic_block
997de8ed 934label_to_block_fn (struct function *ifun, tree dest)
6de9cd9a 935{
242229bb
JH
936 int uid = LABEL_DECL_UID (dest);
937
f0b698c1
KH
938 /* We would die hard when faced by an undefined label. Emit a label to
939 the very first basic block. This will hopefully make even the dataflow
242229bb
JH
940 and undefined variable warnings quite right. */
941 if ((errorcount || sorrycount) && uid < 0)
942 {
726a989a
RB
943 gimple_stmt_iterator gsi = gsi_start_bb (BASIC_BLOCK (NUM_FIXED_BLOCKS));
944 gimple stmt;
242229bb 945
726a989a
RB
946 stmt = gimple_build_label (dest);
947 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
242229bb
JH
948 uid = LABEL_DECL_UID (dest);
949 }
e597f337
KH
950 if (VEC_length (basic_block, ifun->cfg->x_label_to_block_map)
951 <= (unsigned int) uid)
98f464e0 952 return NULL;
e597f337 953 return VEC_index (basic_block, ifun->cfg->x_label_to_block_map, uid);
6de9cd9a
DN
954}
955
4f6c2131
EB
956/* Create edges for an abnormal goto statement at block BB. If FOR_CALL
957 is true, the source statement is a CALL_EXPR instead of a GOTO_EXPR. */
958
959void
960make_abnormal_goto_edges (basic_block bb, bool for_call)
961{
962 basic_block target_bb;
726a989a 963 gimple_stmt_iterator gsi;
4f6c2131
EB
964
965 FOR_EACH_BB (target_bb)
726a989a 966 for (gsi = gsi_start_bb (target_bb); !gsi_end_p (gsi); gsi_next (&gsi))
4f6c2131 967 {
726a989a
RB
968 gimple label_stmt = gsi_stmt (gsi);
969 tree target;
4f6c2131 970
726a989a 971 if (gimple_code (label_stmt) != GIMPLE_LABEL)
4f6c2131
EB
972 break;
973
726a989a 974 target = gimple_label_label (label_stmt);
4f6c2131
EB
975
976 /* Make an edge to every label block that has been marked as a
977 potential target for a computed goto or a non-local goto. */
978 if ((FORCED_LABEL (target) && !for_call)
979 || (DECL_NONLOCAL (target) && for_call))
980 {
981 make_edge (bb, target_bb, EDGE_ABNORMAL);
982 break;
983 }
984 }
985}
986
6de9cd9a
DN
987/* Create edges for a goto statement at block BB. */
988
989static void
990make_goto_expr_edges (basic_block bb)
991{
726a989a
RB
992 gimple_stmt_iterator last = gsi_last_bb (bb);
993 gimple goto_t = gsi_stmt (last);
6de9cd9a 994
4f6c2131
EB
995 /* A simple GOTO creates normal edges. */
996 if (simple_goto_p (goto_t))
6de9cd9a 997 {
726a989a 998 tree dest = gimple_goto_dest (goto_t);
6c52e687
CC
999 basic_block label_bb = label_to_block (dest);
1000 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
726a989a 1001 e->goto_locus = gimple_location (goto_t);
6c52e687 1002 assign_discriminator (e->goto_locus, label_bb);
cc2a64dd
JJ
1003 if (e->goto_locus)
1004 e->goto_block = gimple_block (goto_t);
726a989a 1005 gsi_remove (&last, true);
4f6c2131 1006 return;
6de9cd9a
DN
1007 }
1008
4f6c2131
EB
1009 /* A computed GOTO creates abnormal edges. */
1010 make_abnormal_goto_edges (bb, false);
6de9cd9a
DN
1011}
1012
1013
1014/*---------------------------------------------------------------------------
1015 Flowgraph analysis
1016---------------------------------------------------------------------------*/
1017
f698d217
SB
1018/* Cleanup useless labels in basic blocks. This is something we wish
1019 to do early because it allows us to group case labels before creating
1020 the edges for the CFG, and it speeds up block statement iterators in
1021 all passes later on.
8b11009b
ZD
1022 We rerun this pass after CFG is created, to get rid of the labels that
1023 are no longer referenced. After then we do not run it any more, since
1024 (almost) no new labels should be created. */
f698d217
SB
1025
1026/* A map from basic block index to the leading label of that block. */
8b11009b
ZD
1027static struct label_record
1028{
1029 /* The label. */
1030 tree label;
1031
1032 /* True if the label is referenced from somewhere. */
1033 bool used;
1034} *label_for_bb;
f698d217
SB
1035
1036/* Callback for for_each_eh_region. Helper for cleanup_dead_labels. */
1037static void
7e5487a2 1038update_eh_label (struct eh_region_d *region)
f698d217
SB
1039{
1040 tree old_label = get_eh_region_tree_label (region);
1041 if (old_label)
1042 {
165b54c3
SB
1043 tree new_label;
1044 basic_block bb = label_to_block (old_label);
1045
1046 /* ??? After optimizing, there may be EH regions with labels
1047 that have already been removed from the function body, so
1048 there is no basic block for them. */
1049 if (! bb)
1050 return;
1051
8b11009b
ZD
1052 new_label = label_for_bb[bb->index].label;
1053 label_for_bb[bb->index].used = true;
f698d217
SB
1054 set_eh_region_tree_label (region, new_label);
1055 }
1056}
1057
726a989a 1058
242229bb 1059/* Given LABEL return the first label in the same basic block. */
726a989a 1060
242229bb
JH
1061static tree
1062main_block_label (tree label)
1063{
1064 basic_block bb = label_to_block (label);
8b11009b 1065 tree main_label = label_for_bb[bb->index].label;
242229bb
JH
1066
1067 /* label_to_block possibly inserted undefined label into the chain. */
8b11009b
ZD
1068 if (!main_label)
1069 {
1070 label_for_bb[bb->index].label = label;
1071 main_label = label;
1072 }
1073
1074 label_for_bb[bb->index].used = true;
1075 return main_label;
242229bb
JH
1076}
1077
b986ebf3 1078/* Cleanup redundant labels. This is a three-step process:
f698d217
SB
1079 1) Find the leading label for each block.
1080 2) Redirect all references to labels to the leading labels.
1081 3) Cleanup all useless labels. */
6de9cd9a 1082
165b54c3 1083void
6de9cd9a
DN
1084cleanup_dead_labels (void)
1085{
1086 basic_block bb;
8b11009b 1087 label_for_bb = XCNEWVEC (struct label_record, last_basic_block);
6de9cd9a
DN
1088
1089 /* Find a suitable label for each block. We use the first user-defined
f0b698c1 1090 label if there is one, or otherwise just the first label we see. */
6de9cd9a
DN
1091 FOR_EACH_BB (bb)
1092 {
726a989a 1093 gimple_stmt_iterator i;
6de9cd9a 1094
726a989a 1095 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
6de9cd9a 1096 {
726a989a
RB
1097 tree label;
1098 gimple stmt = gsi_stmt (i);
6de9cd9a 1099
726a989a 1100 if (gimple_code (stmt) != GIMPLE_LABEL)
6de9cd9a
DN
1101 break;
1102
726a989a 1103 label = gimple_label_label (stmt);
6de9cd9a
DN
1104
1105 /* If we have not yet seen a label for the current block,
1106 remember this one and see if there are more labels. */
8b11009b 1107 if (!label_for_bb[bb->index].label)
6de9cd9a 1108 {
8b11009b 1109 label_for_bb[bb->index].label = label;
6de9cd9a
DN
1110 continue;
1111 }
1112
1113 /* If we did see a label for the current block already, but it
1114 is an artificially created label, replace it if the current
1115 label is a user defined label. */
8b11009b
ZD
1116 if (!DECL_ARTIFICIAL (label)
1117 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
6de9cd9a 1118 {
8b11009b 1119 label_for_bb[bb->index].label = label;
6de9cd9a
DN
1120 break;
1121 }
1122 }
1123 }
1124
f698d217
SB
1125 /* Now redirect all jumps/branches to the selected label.
1126 First do so for each block ending in a control statement. */
6de9cd9a
DN
1127 FOR_EACH_BB (bb)
1128 {
726a989a 1129 gimple stmt = last_stmt (bb);
6de9cd9a
DN
1130 if (!stmt)
1131 continue;
1132
726a989a 1133 switch (gimple_code (stmt))
6de9cd9a 1134 {
726a989a 1135 case GIMPLE_COND:
6de9cd9a 1136 {
726a989a
RB
1137 tree true_label = gimple_cond_true_label (stmt);
1138 tree false_label = gimple_cond_false_label (stmt);
6de9cd9a 1139
726a989a
RB
1140 if (true_label)
1141 gimple_cond_set_true_label (stmt, main_block_label (true_label));
1142 if (false_label)
1143 gimple_cond_set_false_label (stmt, main_block_label (false_label));
6de9cd9a
DN
1144 break;
1145 }
6531d1be 1146
726a989a 1147 case GIMPLE_SWITCH:
6de9cd9a 1148 {
726a989a 1149 size_t i, n = gimple_switch_num_labels (stmt);
6531d1be 1150
6de9cd9a
DN
1151 /* Replace all destination labels. */
1152 for (i = 0; i < n; ++i)
92b6dff3 1153 {
726a989a
RB
1154 tree case_label = gimple_switch_label (stmt, i);
1155 tree label = main_block_label (CASE_LABEL (case_label));
1156 CASE_LABEL (case_label) = label;
92b6dff3 1157 }
6de9cd9a
DN
1158 break;
1159 }
1160
726a989a 1161 /* We have to handle gotos until they're removed, and we don't
f667741c 1162 remove them until after we've created the CFG edges. */
726a989a
RB
1163 case GIMPLE_GOTO:
1164 if (!computed_goto_p (stmt))
242229bb 1165 {
726a989a
RB
1166 tree new_dest = main_block_label (gimple_goto_dest (stmt));
1167 gimple_goto_set_dest (stmt, new_dest);
242229bb
JH
1168 break;
1169 }
f667741c 1170
6de9cd9a
DN
1171 default:
1172 break;
1173 }
1174 }
1175
f698d217
SB
1176 for_each_eh_region (update_eh_label);
1177
6de9cd9a 1178 /* Finally, purge dead labels. All user-defined labels and labels that
cea0f4f1
AP
1179 can be the target of non-local gotos and labels which have their
1180 address taken are preserved. */
6de9cd9a
DN
1181 FOR_EACH_BB (bb)
1182 {
726a989a 1183 gimple_stmt_iterator i;
8b11009b 1184 tree label_for_this_bb = label_for_bb[bb->index].label;
6de9cd9a 1185
8b11009b 1186 if (!label_for_this_bb)
6de9cd9a
DN
1187 continue;
1188
8b11009b
ZD
1189 /* If the main label of the block is unused, we may still remove it. */
1190 if (!label_for_bb[bb->index].used)
1191 label_for_this_bb = NULL;
1192
726a989a 1193 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
6de9cd9a 1194 {
726a989a
RB
1195 tree label;
1196 gimple stmt = gsi_stmt (i);
6de9cd9a 1197
726a989a 1198 if (gimple_code (stmt) != GIMPLE_LABEL)
6de9cd9a
DN
1199 break;
1200
726a989a 1201 label = gimple_label_label (stmt);
6de9cd9a
DN
1202
1203 if (label == label_for_this_bb
726a989a 1204 || !DECL_ARTIFICIAL (label)
cea0f4f1
AP
1205 || DECL_NONLOCAL (label)
1206 || FORCED_LABEL (label))
726a989a 1207 gsi_next (&i);
6de9cd9a 1208 else
726a989a 1209 gsi_remove (&i, true);
6de9cd9a
DN
1210 }
1211 }
1212
1213 free (label_for_bb);
1214}
1215
f667741c
SB
1216/* Look for blocks ending in a multiway branch (a SWITCH_EXPR in GIMPLE),
1217 and scan the sorted vector of cases. Combine the ones jumping to the
1218 same label.
1219 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1220
165b54c3 1221void
f667741c
SB
1222group_case_labels (void)
1223{
1224 basic_block bb;
1225
1226 FOR_EACH_BB (bb)
1227 {
726a989a
RB
1228 gimple stmt = last_stmt (bb);
1229 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
f667741c 1230 {
726a989a 1231 int old_size = gimple_switch_num_labels (stmt);
f667741c 1232 int i, j, new_size = old_size;
b7814a18
RG
1233 tree default_case = NULL_TREE;
1234 tree default_label = NULL_TREE;
726a989a 1235 bool has_default;
29c4d22b 1236
726a989a 1237 /* The default label is always the first case in a switch
b7814a18 1238 statement after gimplification if it was not optimized
726a989a
RB
1239 away */
1240 if (!CASE_LOW (gimple_switch_default_label (stmt))
1241 && !CASE_HIGH (gimple_switch_default_label (stmt)))
b7814a18 1242 {
726a989a 1243 default_case = gimple_switch_default_label (stmt);
b7814a18 1244 default_label = CASE_LABEL (default_case);
726a989a 1245 has_default = true;
b7814a18 1246 }
726a989a
RB
1247 else
1248 has_default = false;
f667741c 1249
b7814a18 1250 /* Look for possible opportunities to merge cases. */
726a989a
RB
1251 if (has_default)
1252 i = 1;
1253 else
1254 i = 0;
b7814a18 1255 while (i < old_size)
f667741c 1256 {
ed9cef22 1257 tree base_case, base_label, base_high;
726a989a 1258 base_case = gimple_switch_label (stmt, i);
f667741c 1259
1e128c5f 1260 gcc_assert (base_case);
f667741c 1261 base_label = CASE_LABEL (base_case);
31e9eea2
SB
1262
1263 /* Discard cases that have the same destination as the
1264 default case. */
1265 if (base_label == default_label)
1266 {
726a989a 1267 gimple_switch_set_label (stmt, i, NULL_TREE);
31e9eea2 1268 i++;
29c4d22b 1269 new_size--;
31e9eea2
SB
1270 continue;
1271 }
1272
726a989a
RB
1273 base_high = CASE_HIGH (base_case)
1274 ? CASE_HIGH (base_case)
1275 : CASE_LOW (base_case);
d717e500 1276 i++;
726a989a 1277
f667741c
SB
1278 /* Try to merge case labels. Break out when we reach the end
1279 of the label vector or when we cannot merge the next case
1280 label with the current one. */
b7814a18 1281 while (i < old_size)
f667741c 1282 {
726a989a 1283 tree merge_case = gimple_switch_label (stmt, i);
f667741c
SB
1284 tree merge_label = CASE_LABEL (merge_case);
1285 tree t = int_const_binop (PLUS_EXPR, base_high,
1286 integer_one_node, 1);
1287
1288 /* Merge the cases if they jump to the same place,
1289 and their ranges are consecutive. */
1290 if (merge_label == base_label
1291 && tree_int_cst_equal (CASE_LOW (merge_case), t))
1292 {
1293 base_high = CASE_HIGH (merge_case) ?
1294 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1295 CASE_HIGH (base_case) = base_high;
726a989a 1296 gimple_switch_set_label (stmt, i, NULL_TREE);
f667741c 1297 new_size--;
d717e500 1298 i++;
f667741c
SB
1299 }
1300 else
1301 break;
1302 }
1303 }
1304
1305 /* Compress the case labels in the label vector, and adjust the
1306 length of the vector. */
1307 for (i = 0, j = 0; i < new_size; i++)
1308 {
726a989a 1309 while (! gimple_switch_label (stmt, j))
f667741c 1310 j++;
726a989a
RB
1311 gimple_switch_set_label (stmt, i,
1312 gimple_switch_label (stmt, j++));
f667741c 1313 }
726a989a
RB
1314
1315 gcc_assert (new_size <= old_size);
1316 gimple_switch_set_num_labels (stmt, new_size);
f667741c
SB
1317 }
1318 }
1319}
6de9cd9a
DN
1320
1321/* Checks whether we can merge block B into block A. */
1322
1323static bool
726a989a 1324gimple_can_merge_blocks_p (basic_block a, basic_block b)
6de9cd9a 1325{
726a989a
RB
1326 gimple stmt;
1327 gimple_stmt_iterator gsi;
1328 gimple_seq phis;
6de9cd9a 1329
c5cbcccf 1330 if (!single_succ_p (a))
6de9cd9a
DN
1331 return false;
1332
496a4ef5 1333 if (single_succ_edge (a)->flags & (EDGE_ABNORMAL | EDGE_EH))
6de9cd9a
DN
1334 return false;
1335
c5cbcccf 1336 if (single_succ (a) != b)
6de9cd9a
DN
1337 return false;
1338
c5cbcccf 1339 if (!single_pred_p (b))
6de9cd9a
DN
1340 return false;
1341
26e75214
KH
1342 if (b == EXIT_BLOCK_PTR)
1343 return false;
6531d1be 1344
6de9cd9a
DN
1345 /* If A ends by a statement causing exceptions or something similar, we
1346 cannot merge the blocks. */
726a989a 1347 stmt = last_stmt (a);
6de9cd9a
DN
1348 if (stmt && stmt_ends_bb_p (stmt))
1349 return false;
1350
1351 /* Do not allow a block with only a non-local label to be merged. */
726a989a
RB
1352 if (stmt
1353 && gimple_code (stmt) == GIMPLE_LABEL
1354 && DECL_NONLOCAL (gimple_label_label (stmt)))
6de9cd9a
DN
1355 return false;
1356
38965eb2 1357 /* It must be possible to eliminate all phi nodes in B. If ssa form
8f8bb1d2
ZD
1358 is not up-to-date, we cannot eliminate any phis; however, if only
1359 some symbols as whole are marked for renaming, this is not a problem,
1360 as phi nodes for those symbols are irrelevant in updating anyway. */
726a989a
RB
1361 phis = phi_nodes (b);
1362 if (!gimple_seq_empty_p (phis))
38965eb2 1363 {
726a989a
RB
1364 gimple_stmt_iterator i;
1365
8f8bb1d2 1366 if (name_mappings_registered_p ())
38965eb2
ZD
1367 return false;
1368
726a989a
RB
1369 for (i = gsi_start (phis); !gsi_end_p (i); gsi_next (&i))
1370 {
1371 gimple phi = gsi_stmt (i);
1372
1373 if (!is_gimple_reg (gimple_phi_result (phi))
1374 && !may_propagate_copy (gimple_phi_result (phi),
1375 gimple_phi_arg_def (phi, 0)))
1376 return false;
1377 }
38965eb2 1378 }
6de9cd9a
DN
1379
1380 /* Do not remove user labels. */
726a989a 1381 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi); gsi_next (&gsi))
6de9cd9a 1382 {
726a989a
RB
1383 stmt = gsi_stmt (gsi);
1384 if (gimple_code (stmt) != GIMPLE_LABEL)
6de9cd9a 1385 break;
726a989a 1386 if (!DECL_ARTIFICIAL (gimple_label_label (stmt)))
6de9cd9a
DN
1387 return false;
1388 }
1389
2b271002
ZD
1390 /* Protect the loop latches. */
1391 if (current_loops
1392 && b->loop_father->latch == b)
1393 return false;
1394
6de9cd9a
DN
1395 return true;
1396}
1397
38965eb2
ZD
1398/* Replaces all uses of NAME by VAL. */
1399
684aaf29 1400void
38965eb2
ZD
1401replace_uses_by (tree name, tree val)
1402{
1403 imm_use_iterator imm_iter;
1404 use_operand_p use;
726a989a 1405 gimple stmt;
38965eb2 1406 edge e;
38965eb2 1407
6c00f606 1408 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
38965eb2 1409 {
6c00f606
AM
1410 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1411 {
1412 replace_exp (use, val);
38965eb2 1413
726a989a 1414 if (gimple_code (stmt) == GIMPLE_PHI)
38965eb2 1415 {
726a989a 1416 e = gimple_phi_arg_edge (stmt, PHI_ARG_INDEX_FROM_USE (use));
6c00f606
AM
1417 if (e->flags & EDGE_ABNORMAL)
1418 {
1419 /* This can only occur for virtual operands, since
1420 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1421 would prevent replacement. */
1422 gcc_assert (!is_gimple_reg (name));
1423 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1424 }
38965eb2
ZD
1425 }
1426 }
cfaab3a9 1427
726a989a 1428 if (gimple_code (stmt) != GIMPLE_PHI)
6c00f606 1429 {
726a989a 1430 size_t i;
9af0df6b 1431
6c00f606 1432 fold_stmt_inplace (stmt);
672987e8 1433 if (cfgcleanup_altered_bbs)
726a989a 1434 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
cfaab3a9 1435
cff4e50d 1436 /* FIXME. This should go in update_stmt. */
726a989a
RB
1437 for (i = 0; i < gimple_num_ops (stmt); i++)
1438 {
1439 tree op = gimple_op (stmt, i);
1440 /* Operands may be empty here. For example, the labels
1441 of a GIMPLE_COND are nulled out following the creation
1442 of the corresponding CFG edges. */
1443 if (op && TREE_CODE (op) == ADDR_EXPR)
1444 recompute_tree_invariant_for_addr_expr (op);
1445 }
9af0df6b 1446
6c00f606 1447 maybe_clean_or_replace_eh_stmt (stmt, stmt);
cff4e50d 1448 update_stmt (stmt);
6c00f606 1449 }
38965eb2 1450 }
6531d1be 1451
40b448ef 1452 gcc_assert (has_zero_uses (name));
d5ab5675
ZD
1453
1454 /* Also update the trees stored in loop structures. */
1455 if (current_loops)
1456 {
1457 struct loop *loop;
42fd6772 1458 loop_iterator li;
d5ab5675 1459
42fd6772 1460 FOR_EACH_LOOP (li, loop, 0)
d5ab5675 1461 {
42fd6772 1462 substitute_in_loop_info (loop, name, val);
d5ab5675
ZD
1463 }
1464 }
38965eb2 1465}
6de9cd9a
DN
1466
1467/* Merge block B into block A. */
1468
1469static void
726a989a 1470gimple_merge_blocks (basic_block a, basic_block b)
6de9cd9a 1471{
726a989a
RB
1472 gimple_stmt_iterator last, gsi, psi;
1473 gimple_seq phis = phi_nodes (b);
6de9cd9a
DN
1474
1475 if (dump_file)
1476 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1477
c4f548b8
DN
1478 /* Remove all single-valued PHI nodes from block B of the form
1479 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
726a989a
RB
1480 gsi = gsi_last_bb (a);
1481 for (psi = gsi_start (phis); !gsi_end_p (psi); )
38965eb2 1482 {
726a989a
RB
1483 gimple phi = gsi_stmt (psi);
1484 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
1485 gimple copy;
1486 bool may_replace_uses = !is_gimple_reg (def)
1487 || may_propagate_copy (def, use);
d7f0e25c 1488
7c8eb293
ZD
1489 /* In case we maintain loop closed ssa form, do not propagate arguments
1490 of loop exit phi nodes. */
d7f0e25c 1491 if (current_loops
f87000d0 1492 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
d7f0e25c
ZD
1493 && is_gimple_reg (def)
1494 && TREE_CODE (use) == SSA_NAME
1495 && a->loop_father != b->loop_father)
1496 may_replace_uses = false;
1497
1498 if (!may_replace_uses)
38965eb2
ZD
1499 {
1500 gcc_assert (is_gimple_reg (def));
1501
128a79fb 1502 /* Note that just emitting the copies is fine -- there is no problem
38965eb2
ZD
1503 with ordering of phi nodes. This is because A is the single
1504 predecessor of B, therefore results of the phi nodes cannot
1505 appear as arguments of the phi nodes. */
726a989a
RB
1506 copy = gimple_build_assign (def, use);
1507 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
1508 remove_phi_node (&psi, false);
38965eb2
ZD
1509 }
1510 else
611021e1 1511 {
d0f76c4b
RG
1512 /* If we deal with a PHI for virtual operands, we can simply
1513 propagate these without fussing with folding or updating
1514 the stmt. */
1515 if (!is_gimple_reg (def))
1516 {
1517 imm_use_iterator iter;
1518 use_operand_p use_p;
726a989a 1519 gimple stmt;
d0f76c4b
RG
1520
1521 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
1522 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1523 SET_USE (use_p, use);
1524 }
1525 else
1526 replace_uses_by (def, use);
726a989a
RB
1527
1528 remove_phi_node (&psi, true);
611021e1 1529 }
38965eb2
ZD
1530 }
1531
6de9cd9a
DN
1532 /* Ensure that B follows A. */
1533 move_block_after (b, a);
1534
c5cbcccf 1535 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1e128c5f 1536 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
6de9cd9a 1537
726a989a
RB
1538 /* Remove labels from B and set gimple_bb to A for other statements. */
1539 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
6de9cd9a 1540 {
726a989a 1541 if (gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL)
be477406 1542 {
726a989a
RB
1543 gimple label = gsi_stmt (gsi);
1544
1545 gsi_remove (&gsi, false);
be477406 1546
be477406
JL
1547 /* Now that we can thread computed gotos, we might have
1548 a situation where we have a forced label in block B
1549 However, the label at the start of block B might still be
1550 used in other ways (think about the runtime checking for
1551 Fortran assigned gotos). So we can not just delete the
1552 label. Instead we move the label to the start of block A. */
726a989a 1553 if (FORCED_LABEL (gimple_label_label (label)))
be477406 1554 {
726a989a
RB
1555 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
1556 gsi_insert_before (&dest_gsi, label, GSI_NEW_STMT);
be477406
JL
1557 }
1558 }
6de9cd9a
DN
1559 else
1560 {
726a989a
RB
1561 gimple_set_bb (gsi_stmt (gsi), a);
1562 gsi_next (&gsi);
6de9cd9a
DN
1563 }
1564 }
1565
726a989a
RB
1566 /* Merge the sequences. */
1567 last = gsi_last_bb (a);
1568 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
1569 set_bb_seq (b, NULL);
672987e8
ZD
1570
1571 if (cfgcleanup_altered_bbs)
1572 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
6de9cd9a
DN
1573}
1574
1575
bc23502b 1576/* Return the one of two successors of BB that is not reachable by a
2cd713a0 1577 complex edge, if there is one. Else, return BB. We use
bc23502b
PB
1578 this in optimizations that use post-dominators for their heuristics,
1579 to catch the cases in C++ where function calls are involved. */
6531d1be 1580
bc23502b 1581basic_block
6531d1be 1582single_noncomplex_succ (basic_block bb)
bc23502b
PB
1583{
1584 edge e0, e1;
1585 if (EDGE_COUNT (bb->succs) != 2)
1586 return bb;
6531d1be 1587
bc23502b
PB
1588 e0 = EDGE_SUCC (bb, 0);
1589 e1 = EDGE_SUCC (bb, 1);
1590 if (e0->flags & EDGE_COMPLEX)
1591 return e1->dest;
1592 if (e1->flags & EDGE_COMPLEX)
1593 return e0->dest;
6531d1be 1594
bc23502b 1595 return bb;
6531d1be 1596}
bc23502b
PB
1597
1598
6de9cd9a
DN
1599/* Walk the function tree removing unnecessary statements.
1600
1601 * Empty statement nodes are removed
1602
1603 * Unnecessary TRY_FINALLY and TRY_CATCH blocks are removed
1604
1605 * Unnecessary COND_EXPRs are removed
1606
1607 * Some unnecessary BIND_EXPRs are removed
1608
726a989a
RB
1609 * GOTO_EXPRs immediately preceding destination are removed.
1610
6de9cd9a
DN
1611 Clearly more work could be done. The trick is doing the analysis
1612 and removal fast enough to be a net improvement in compile times.
1613
1614 Note that when we remove a control structure such as a COND_EXPR
1615 BIND_EXPR, or TRY block, we will need to repeat this optimization pass
1616 to ensure we eliminate all the useless code. */
1617
1618struct rus_data
1619{
6de9cd9a
DN
1620 bool repeat;
1621 bool may_throw;
1622 bool may_branch;
1623 bool has_label;
726a989a
RB
1624 bool last_was_goto;
1625 gimple_stmt_iterator last_goto_gsi;
6de9cd9a
DN
1626};
1627
726a989a
RB
1628
1629static void remove_useless_stmts_1 (gimple_stmt_iterator *gsi, struct rus_data *);
1630
1631/* Given a statement sequence, find the first executable statement with
1632 location information, and warn that it is unreachable. When searching,
1633 descend into containers in execution order. */
6de9cd9a
DN
1634
1635static bool
726a989a 1636remove_useless_stmts_warn_notreached (gimple_seq stmts)
6de9cd9a 1637{
726a989a 1638 gimple_stmt_iterator gsi;
6de9cd9a 1639
726a989a 1640 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
6de9cd9a 1641 {
726a989a 1642 gimple stmt = gsi_stmt (gsi);
6de9cd9a 1643
d665b6e5
MLI
1644 if (gimple_no_warning_p (stmt)) return false;
1645
726a989a
RB
1646 if (gimple_has_location (stmt))
1647 {
1648 location_t loc = gimple_location (stmt);
1649 if (LOCATION_LINE (loc) > 0)
1650 {
fab922b1 1651 warning_at (loc, OPT_Wunreachable_code, "will never be executed");
726a989a
RB
1652 return true;
1653 }
1654 }
6de9cd9a 1655
726a989a
RB
1656 switch (gimple_code (stmt))
1657 {
1658 /* Unfortunately, we need the CFG now to detect unreachable
1659 branches in a conditional, so conditionals are not handled here. */
6de9cd9a 1660
726a989a
RB
1661 case GIMPLE_TRY:
1662 if (remove_useless_stmts_warn_notreached (gimple_try_eval (stmt)))
1663 return true;
1664 if (remove_useless_stmts_warn_notreached (gimple_try_cleanup (stmt)))
1665 return true;
1666 break;
6de9cd9a 1667
726a989a
RB
1668 case GIMPLE_CATCH:
1669 return remove_useless_stmts_warn_notreached (gimple_catch_handler (stmt));
1670
1671 case GIMPLE_EH_FILTER:
1672 return remove_useless_stmts_warn_notreached (gimple_eh_filter_failure (stmt));
1673
1674 case GIMPLE_BIND:
1675 return remove_useless_stmts_warn_notreached (gimple_bind_body (stmt));
1676
1677 default:
1678 break;
1679 }
6de9cd9a
DN
1680 }
1681
1682 return false;
1683}
1684
726a989a
RB
1685/* Helper for remove_useless_stmts_1. Handle GIMPLE_COND statements. */
1686
6de9cd9a 1687static void
726a989a 1688remove_useless_stmts_cond (gimple_stmt_iterator *gsi, struct rus_data *data)
6de9cd9a 1689{
726a989a 1690 gimple stmt = gsi_stmt (*gsi);
6de9cd9a 1691
726a989a 1692 /* The folded result must still be a conditional statement. */
2586ba4b
RG
1693 fold_stmt (gsi);
1694 gcc_assert (gsi_stmt (*gsi) == stmt);
6de9cd9a 1695
726a989a 1696 data->may_branch = true;
6de9cd9a 1697
726a989a
RB
1698 /* Replace trivial conditionals with gotos. */
1699 if (gimple_cond_true_p (stmt))
6de9cd9a 1700 {
726a989a
RB
1701 /* Goto THEN label. */
1702 tree then_label = gimple_cond_true_label (stmt);
6de9cd9a 1703
726a989a
RB
1704 gsi_replace (gsi, gimple_build_goto (then_label), false);
1705 data->last_goto_gsi = *gsi;
1706 data->last_was_goto = true;
6de9cd9a
DN
1707 data->repeat = true;
1708 }
726a989a 1709 else if (gimple_cond_false_p (stmt))
6de9cd9a 1710 {
726a989a
RB
1711 /* Goto ELSE label. */
1712 tree else_label = gimple_cond_false_label (stmt);
1713
1714 gsi_replace (gsi, gimple_build_goto (else_label), false);
1715 data->last_goto_gsi = *gsi;
1716 data->last_was_goto = true;
6de9cd9a
DN
1717 data->repeat = true;
1718 }
6de9cd9a
DN
1719 else
1720 {
726a989a
RB
1721 tree then_label = gimple_cond_true_label (stmt);
1722 tree else_label = gimple_cond_false_label (stmt);
6de9cd9a 1723
726a989a
RB
1724 if (then_label == else_label)
1725 {
1726 /* Goto common destination. */
1727 gsi_replace (gsi, gimple_build_goto (then_label), false);
1728 data->last_goto_gsi = *gsi;
1729 data->last_was_goto = true;
6de9cd9a
DN
1730 data->repeat = true;
1731 }
6de9cd9a
DN
1732 }
1733
726a989a
RB
1734 gsi_next (gsi);
1735
1736 data->last_was_goto = false;
6de9cd9a
DN
1737}
1738
726a989a
RB
1739/* Helper for remove_useless_stmts_1.
1740 Handle the try-finally case for GIMPLE_TRY statements. */
6de9cd9a
DN
1741
1742static void
726a989a 1743remove_useless_stmts_tf (gimple_stmt_iterator *gsi, struct rus_data *data)
6de9cd9a
DN
1744{
1745 bool save_may_branch, save_may_throw;
1746 bool this_may_branch, this_may_throw;
1747
726a989a
RB
1748 gimple_seq eval_seq, cleanup_seq;
1749 gimple_stmt_iterator eval_gsi, cleanup_gsi;
1750
1751 gimple stmt = gsi_stmt (*gsi);
1752
6de9cd9a
DN
1753 /* Collect may_branch and may_throw information for the body only. */
1754 save_may_branch = data->may_branch;
1755 save_may_throw = data->may_throw;
1756 data->may_branch = false;
1757 data->may_throw = false;
726a989a 1758 data->last_was_goto = false;
6de9cd9a 1759
726a989a
RB
1760 eval_seq = gimple_try_eval (stmt);
1761 eval_gsi = gsi_start (eval_seq);
1762 remove_useless_stmts_1 (&eval_gsi, data);
6de9cd9a
DN
1763
1764 this_may_branch = data->may_branch;
1765 this_may_throw = data->may_throw;
1766 data->may_branch |= save_may_branch;
1767 data->may_throw |= save_may_throw;
726a989a 1768 data->last_was_goto = false;
6de9cd9a 1769
726a989a
RB
1770 cleanup_seq = gimple_try_cleanup (stmt);
1771 cleanup_gsi = gsi_start (cleanup_seq);
1772 remove_useless_stmts_1 (&cleanup_gsi, data);
6de9cd9a
DN
1773
1774 /* If the body is empty, then we can emit the FINALLY block without
1775 the enclosing TRY_FINALLY_EXPR. */
726a989a 1776 if (gimple_seq_empty_p (eval_seq))
6de9cd9a 1777 {
726a989a
RB
1778 gsi_insert_seq_before (gsi, cleanup_seq, GSI_SAME_STMT);
1779 gsi_remove (gsi, false);
6de9cd9a
DN
1780 data->repeat = true;
1781 }
1782
1783 /* If the handler is empty, then we can emit the TRY block without
1784 the enclosing TRY_FINALLY_EXPR. */
726a989a 1785 else if (gimple_seq_empty_p (cleanup_seq))
6de9cd9a 1786 {
726a989a
RB
1787 gsi_insert_seq_before (gsi, eval_seq, GSI_SAME_STMT);
1788 gsi_remove (gsi, false);
6de9cd9a
DN
1789 data->repeat = true;
1790 }
1791
1792 /* If the body neither throws, nor branches, then we can safely
1793 string the TRY and FINALLY blocks together. */
1794 else if (!this_may_branch && !this_may_throw)
1795 {
726a989a
RB
1796 gsi_insert_seq_before (gsi, eval_seq, GSI_SAME_STMT);
1797 gsi_insert_seq_before (gsi, cleanup_seq, GSI_SAME_STMT);
1798 gsi_remove (gsi, false);
6de9cd9a
DN
1799 data->repeat = true;
1800 }
726a989a
RB
1801 else
1802 gsi_next (gsi);
6de9cd9a
DN
1803}
1804
726a989a
RB
1805/* Helper for remove_useless_stmts_1.
1806 Handle the try-catch case for GIMPLE_TRY statements. */
6de9cd9a
DN
1807
1808static void
726a989a 1809remove_useless_stmts_tc (gimple_stmt_iterator *gsi, struct rus_data *data)
6de9cd9a
DN
1810{
1811 bool save_may_throw, this_may_throw;
726a989a
RB
1812
1813 gimple_seq eval_seq, cleanup_seq, handler_seq, failure_seq;
1814 gimple_stmt_iterator eval_gsi, cleanup_gsi, handler_gsi, failure_gsi;
1815
1816 gimple stmt = gsi_stmt (*gsi);
6de9cd9a
DN
1817
1818 /* Collect may_throw information for the body only. */
1819 save_may_throw = data->may_throw;
1820 data->may_throw = false;
726a989a 1821 data->last_was_goto = false;
6de9cd9a 1822
726a989a
RB
1823 eval_seq = gimple_try_eval (stmt);
1824 eval_gsi = gsi_start (eval_seq);
1825 remove_useless_stmts_1 (&eval_gsi, data);
6de9cd9a
DN
1826
1827 this_may_throw = data->may_throw;
1828 data->may_throw = save_may_throw;
1829
726a989a
RB
1830 cleanup_seq = gimple_try_cleanup (stmt);
1831
6de9cd9a
DN
1832 /* If the body cannot throw, then we can drop the entire TRY_CATCH_EXPR. */
1833 if (!this_may_throw)
1834 {
1835 if (warn_notreached)
726a989a
RB
1836 {
1837 remove_useless_stmts_warn_notreached (cleanup_seq);
1838 }
1839 gsi_insert_seq_before (gsi, eval_seq, GSI_SAME_STMT);
1840 gsi_remove (gsi, false);
6de9cd9a
DN
1841 data->repeat = true;
1842 return;
1843 }
1844
1845 /* Process the catch clause specially. We may be able to tell that
1846 no exceptions propagate past this point. */
1847
1848 this_may_throw = true;
726a989a
RB
1849 cleanup_gsi = gsi_start (cleanup_seq);
1850 stmt = gsi_stmt (cleanup_gsi);
1851 data->last_was_goto = false;
6de9cd9a 1852
726a989a 1853 switch (gimple_code (stmt))
6de9cd9a 1854 {
726a989a
RB
1855 case GIMPLE_CATCH:
1856 /* If the first element is a catch, they all must be. */
1857 while (!gsi_end_p (cleanup_gsi))
1858 {
1859 stmt = gsi_stmt (cleanup_gsi);
6de9cd9a
DN
1860 /* If we catch all exceptions, then the body does not
1861 propagate exceptions past this point. */
726a989a 1862 if (gimple_catch_types (stmt) == NULL)
6de9cd9a 1863 this_may_throw = false;
726a989a
RB
1864 data->last_was_goto = false;
1865 handler_seq = gimple_catch_handler (stmt);
1866 handler_gsi = gsi_start (handler_seq);
1867 remove_useless_stmts_1 (&handler_gsi, data);
1868 gsi_next (&cleanup_gsi);
6de9cd9a 1869 }
726a989a 1870 gsi_next (gsi);
6de9cd9a
DN
1871 break;
1872
726a989a
RB
1873 case GIMPLE_EH_FILTER:
1874 /* If the first element is an eh_filter, it should stand alone. */
1875 if (gimple_eh_filter_must_not_throw (stmt))
6de9cd9a 1876 this_may_throw = false;
726a989a 1877 else if (gimple_eh_filter_types (stmt) == NULL)
6de9cd9a 1878 this_may_throw = false;
726a989a
RB
1879 failure_seq = gimple_eh_filter_failure (stmt);
1880 failure_gsi = gsi_start (failure_seq);
1881 remove_useless_stmts_1 (&failure_gsi, data);
1882 gsi_next (gsi);
6de9cd9a
DN
1883 break;
1884
1885 default:
726a989a
RB
1886 /* Otherwise this is a list of cleanup statements. */
1887 remove_useless_stmts_1 (&cleanup_gsi, data);
6de9cd9a
DN
1888
1889 /* If the cleanup is empty, then we can emit the TRY block without
1890 the enclosing TRY_CATCH_EXPR. */
726a989a 1891 if (gimple_seq_empty_p (cleanup_seq))
6de9cd9a 1892 {
726a989a
RB
1893 gsi_insert_seq_before (gsi, eval_seq, GSI_SAME_STMT);
1894 gsi_remove(gsi, false);
6de9cd9a
DN
1895 data->repeat = true;
1896 }
726a989a
RB
1897 else
1898 gsi_next (gsi);
6de9cd9a
DN
1899 break;
1900 }
726a989a 1901
6de9cd9a
DN
1902 data->may_throw |= this_may_throw;
1903}
1904
726a989a 1905/* Helper for remove_useless_stmts_1. Handle GIMPLE_BIND statements. */
6de9cd9a
DN
1906
1907static void
726a989a 1908remove_useless_stmts_bind (gimple_stmt_iterator *gsi, struct rus_data *data ATTRIBUTE_UNUSED)
6de9cd9a
DN
1909{
1910 tree block;
726a989a
RB
1911 gimple_seq body_seq, fn_body_seq;
1912 gimple_stmt_iterator body_gsi;
1913
1914 gimple stmt = gsi_stmt (*gsi);
6de9cd9a
DN
1915
1916 /* First remove anything underneath the BIND_EXPR. */
726a989a
RB
1917
1918 body_seq = gimple_bind_body (stmt);
1919 body_gsi = gsi_start (body_seq);
1920 remove_useless_stmts_1 (&body_gsi, data);
6de9cd9a 1921
726a989a
RB
1922 /* If the GIMPLE_BIND has no variables, then we can pull everything
1923 up one level and remove the GIMPLE_BIND, unless this is the toplevel
1924 GIMPLE_BIND for the current function or an inlined function.
6de9cd9a
DN
1925
1926 When this situation occurs we will want to apply this
1927 optimization again. */
726a989a
RB
1928 block = gimple_bind_block (stmt);
1929 fn_body_seq = gimple_body (current_function_decl);
1930 if (gimple_bind_vars (stmt) == NULL_TREE
1931 && (gimple_seq_empty_p (fn_body_seq)
1932 || stmt != gimple_seq_first_stmt (fn_body_seq))
6de9cd9a
DN
1933 && (! block
1934 || ! BLOCK_ABSTRACT_ORIGIN (block)
1935 || (TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block))
1936 != FUNCTION_DECL)))
1937 {
ee0ee7e2
JJ
1938 tree var = NULL_TREE;
1939 /* Even if there are no gimple_bind_vars, there might be other
1940 decls in BLOCK_VARS rendering the GIMPLE_BIND not useless. */
9f0e7885 1941 if (block && !BLOCK_NUM_NONLOCALIZED_VARS (block))
ee0ee7e2
JJ
1942 for (var = BLOCK_VARS (block); var; var = TREE_CHAIN (var))
1943 if (TREE_CODE (var) == IMPORTED_DECL)
1944 break;
9f0e7885 1945 if (var || (block && BLOCK_NUM_NONLOCALIZED_VARS (block)))
ee0ee7e2
JJ
1946 gsi_next (gsi);
1947 else
1948 {
1949 gsi_insert_seq_before (gsi, body_seq, GSI_SAME_STMT);
1950 gsi_remove (gsi, false);
1951 data->repeat = true;
1952 }
6de9cd9a 1953 }
726a989a
RB
1954 else
1955 gsi_next (gsi);
6de9cd9a
DN
1956}
1957
726a989a 1958/* Helper for remove_useless_stmts_1. Handle GIMPLE_GOTO statements. */
6de9cd9a
DN
1959
1960static void
726a989a 1961remove_useless_stmts_goto (gimple_stmt_iterator *gsi, struct rus_data *data)
6de9cd9a 1962{
726a989a
RB
1963 gimple stmt = gsi_stmt (*gsi);
1964
1965 tree dest = gimple_goto_dest (stmt);
6de9cd9a
DN
1966
1967 data->may_branch = true;
726a989a 1968 data->last_was_goto = false;
6de9cd9a 1969
726a989a 1970 /* Record iterator for last goto expr, so that we can delete it if unnecessary. */
6de9cd9a 1971 if (TREE_CODE (dest) == LABEL_DECL)
726a989a
RB
1972 {
1973 data->last_goto_gsi = *gsi;
1974 data->last_was_goto = true;
1975 }
1976
1977 gsi_next(gsi);
6de9cd9a
DN
1978}
1979
726a989a 1980/* Helper for remove_useless_stmts_1. Handle GIMPLE_LABEL statements. */
6de9cd9a
DN
1981
1982static void
726a989a 1983remove_useless_stmts_label (gimple_stmt_iterator *gsi, struct rus_data *data)
6de9cd9a 1984{
726a989a
RB
1985 gimple stmt = gsi_stmt (*gsi);
1986
1987 tree label = gimple_label_label (stmt);
6de9cd9a
DN
1988
1989 data->has_label = true;
1990
1991 /* We do want to jump across non-local label receiver code. */
1992 if (DECL_NONLOCAL (label))
726a989a 1993 data->last_was_goto = false;
6de9cd9a 1994
726a989a
RB
1995 else if (data->last_was_goto
1996 && gimple_goto_dest (gsi_stmt (data->last_goto_gsi)) == label)
6de9cd9a 1997 {
726a989a
RB
1998 /* Replace the preceding GIMPLE_GOTO statement with
1999 a GIMPLE_NOP, which will be subsequently removed.
2000 In this way, we avoid invalidating other iterators
2001 active on the statement sequence. */
2002 gsi_replace(&data->last_goto_gsi, gimple_build_nop(), false);
2003 data->last_was_goto = false;
6de9cd9a
DN
2004 data->repeat = true;
2005 }
2006
2007 /* ??? Add something here to delete unused labels. */
6de9cd9a 2008
726a989a 2009 gsi_next (gsi);
6de9cd9a
DN
2010}
2011
2012
2013/* T is CALL_EXPR. Set current_function_calls_* flags. */
2014
2015void
726a989a 2016notice_special_calls (gimple call)
6de9cd9a 2017{
726a989a 2018 int flags = gimple_call_flags (call);
6de9cd9a
DN
2019
2020 if (flags & ECF_MAY_BE_ALLOCA)
e3b5732b 2021 cfun->calls_alloca = true;
6de9cd9a 2022 if (flags & ECF_RETURNS_TWICE)
e3b5732b 2023 cfun->calls_setjmp = true;
6de9cd9a
DN
2024}
2025
2026
2027/* Clear flags set by notice_special_calls. Used by dead code removal
2028 to update the flags. */
2029
2030void
2031clear_special_calls (void)
2032{
e3b5732b
JH
2033 cfun->calls_alloca = false;
2034 cfun->calls_setjmp = false;
6de9cd9a
DN
2035}
2036
726a989a
RB
2037/* Remove useless statements from a statement sequence, and perform
2038 some preliminary simplifications. */
6de9cd9a
DN
2039
2040static void
726a989a 2041remove_useless_stmts_1 (gimple_stmt_iterator *gsi, struct rus_data *data)
6de9cd9a 2042{
726a989a 2043 while (!gsi_end_p (*gsi))
6de9cd9a 2044 {
726a989a 2045 gimple stmt = gsi_stmt (*gsi);
6de9cd9a 2046
726a989a
RB
2047 switch (gimple_code (stmt))
2048 {
2049 case GIMPLE_COND:
2050 remove_useless_stmts_cond (gsi, data);
2051 break;
2052
2053 case GIMPLE_GOTO:
2054 remove_useless_stmts_goto (gsi, data);
2055 break;
2056
2057 case GIMPLE_LABEL:
2058 remove_useless_stmts_label (gsi, data);
2059 break;
2060
2061 case GIMPLE_ASSIGN:
2062 fold_stmt (gsi);
2063 stmt = gsi_stmt (*gsi);
2064 data->last_was_goto = false;
2065 if (stmt_could_throw_p (stmt))
2066 data->may_throw = true;
2067 gsi_next (gsi);
2068 break;
2069
2070 case GIMPLE_ASM:
2071 fold_stmt (gsi);
2072 data->last_was_goto = false;
2073 gsi_next (gsi);
2074 break;
2075
2076 case GIMPLE_CALL:
2077 fold_stmt (gsi);
2078 stmt = gsi_stmt (*gsi);
2079 data->last_was_goto = false;
2080 if (is_gimple_call (stmt))
2081 notice_special_calls (stmt);
2082
2083 /* We used to call update_gimple_call_flags here,
2084 which copied side-effects and nothrows status
2085 from the function decl to the call. In the new
2086 tuplified GIMPLE, the accessors for this information
2087 always consult the function decl, so this copying
2088 is no longer necessary. */
2089 if (stmt_could_throw_p (stmt))
2090 data->may_throw = true;
2091 gsi_next (gsi);
2092 break;
2093
2094 case GIMPLE_RETURN:
2095 fold_stmt (gsi);
2096 data->last_was_goto = false;
2097 data->may_branch = true;
2098 gsi_next (gsi);
2099 break;
2100
2101 case GIMPLE_BIND:
2102 remove_useless_stmts_bind (gsi, data);
2103 break;
2104
2105 case GIMPLE_TRY:
2106 if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
2107 remove_useless_stmts_tc (gsi, data);
2108 else if (gimple_try_kind (stmt) == GIMPLE_TRY_FINALLY)
2109 remove_useless_stmts_tf (gsi, data);
2110 else
2111 gcc_unreachable ();
2112 break;
2113
2114 case GIMPLE_CATCH:
2115 gcc_unreachable ();
2116 break;
2117
2118 case GIMPLE_NOP:
2119 gsi_remove (gsi, false);
2120 break;
2121
2122 case GIMPLE_OMP_FOR:
2123 {
2124 gimple_seq pre_body_seq = gimple_omp_for_pre_body (stmt);
2125 gimple_stmt_iterator pre_body_gsi = gsi_start (pre_body_seq);
2126
2127 remove_useless_stmts_1 (&pre_body_gsi, data);
2128 data->last_was_goto = false;
2129 }
2130 /* FALLTHROUGH */
2131 case GIMPLE_OMP_CRITICAL:
2132 case GIMPLE_OMP_CONTINUE:
2133 case GIMPLE_OMP_MASTER:
2134 case GIMPLE_OMP_ORDERED:
2135 case GIMPLE_OMP_SECTION:
2136 case GIMPLE_OMP_SECTIONS:
2137 case GIMPLE_OMP_SINGLE:
2138 {
2139 gimple_seq body_seq = gimple_omp_body (stmt);
2140 gimple_stmt_iterator body_gsi = gsi_start (body_seq);
2141
2142 remove_useless_stmts_1 (&body_gsi, data);
2143 data->last_was_goto = false;
2144 gsi_next (gsi);
2145 }
2146 break;
2147
2148 case GIMPLE_OMP_PARALLEL:
2149 case GIMPLE_OMP_TASK:
2150 {
2151 /* Make sure the outermost GIMPLE_BIND isn't removed
2152 as useless. */
2153 gimple_seq body_seq = gimple_omp_body (stmt);
2154 gimple bind = gimple_seq_first_stmt (body_seq);
2155 gimple_seq bind_seq = gimple_bind_body (bind);
2156 gimple_stmt_iterator bind_gsi = gsi_start (bind_seq);
2157
2158 remove_useless_stmts_1 (&bind_gsi, data);
2159 data->last_was_goto = false;
2160 gsi_next (gsi);
2161 }
2162 break;
2163
2164 default:
2165 data->last_was_goto = false;
2166 gsi_next (gsi);
2167 break;
2168 }
6de9cd9a
DN
2169 }
2170}
2171
726a989a
RB
2172/* Walk the function tree, removing useless statements and performing
2173 some preliminary simplifications. */
2174
c2924966 2175static unsigned int
6de9cd9a
DN
2176remove_useless_stmts (void)
2177{
2178 struct rus_data data;
2179
2180 clear_special_calls ();
2181
2182 do
2183 {
726a989a
RB
2184 gimple_stmt_iterator gsi;
2185
2186 gsi = gsi_start (gimple_body (current_function_decl));
6de9cd9a 2187 memset (&data, 0, sizeof (data));
726a989a 2188 remove_useless_stmts_1 (&gsi, &data);
6de9cd9a
DN
2189 }
2190 while (data.repeat);
211ca15c
RG
2191
2192#ifdef ENABLE_TYPES_CHECKING
2193 verify_types_in_gimple_seq (gimple_body (current_function_decl));
2194#endif
2195
c2924966 2196 return 0;
6de9cd9a
DN
2197}
2198
2199
8ddbbcae 2200struct gimple_opt_pass pass_remove_useless_stmts =
6de9cd9a 2201{
8ddbbcae
JH
2202 {
2203 GIMPLE_PASS,
6de9cd9a
DN
2204 "useless", /* name */
2205 NULL, /* gate */
2206 remove_useless_stmts, /* execute */
2207 NULL, /* sub */
2208 NULL, /* next */
2209 0, /* static_pass_number */
7072a650 2210 TV_NONE, /* tv_id */
9e5a3e6c
RH
2211 PROP_gimple_any, /* properties_required */
2212 0, /* properties_provided */
6de9cd9a
DN
2213 0, /* properties_destroyed */
2214 0, /* todo_flags_start */
8ddbbcae
JH
2215 TODO_dump_func /* todo_flags_finish */
2216 }
6de9cd9a
DN
2217};
2218
6de9cd9a
DN
2219/* Remove PHI nodes associated with basic block BB and all edges out of BB. */
2220
2221static void
2222remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
2223{
6de9cd9a
DN
2224 /* Since this block is no longer reachable, we can just delete all
2225 of its PHI nodes. */
81b822d5 2226 remove_phi_nodes (bb);
6de9cd9a
DN
2227
2228 /* Remove edges to BB's successors. */
628f6a4e 2229 while (EDGE_COUNT (bb->succs) > 0)
d0d2cc21 2230 remove_edge (EDGE_SUCC (bb, 0));
6de9cd9a
DN
2231}
2232
2233
2234/* Remove statements of basic block BB. */
2235
2236static void
2237remove_bb (basic_block bb)
2238{
726a989a 2239 gimple_stmt_iterator i;
dbce1570 2240 source_location loc = UNKNOWN_LOCATION;
6de9cd9a
DN
2241
2242 if (dump_file)
2243 {
2244 fprintf (dump_file, "Removing basic block %d\n", bb->index);
2245 if (dump_flags & TDF_DETAILS)
2246 {
2247 dump_bb (bb, dump_file, 0);
2248 fprintf (dump_file, "\n");
2249 }
2250 }
2251
2b271002
ZD
2252 if (current_loops)
2253 {
2254 struct loop *loop = bb->loop_father;
2255
598ec7bd
ZD
2256 /* If a loop gets removed, clean up the information associated
2257 with it. */
2b271002
ZD
2258 if (loop->latch == bb
2259 || loop->header == bb)
598ec7bd 2260 free_numbers_of_iterations_estimates_loop (loop);
2b271002
ZD
2261 }
2262
6de9cd9a 2263 /* Remove all the instructions in the block. */
726a989a 2264 if (bb_seq (bb) != NULL)
6de9cd9a 2265 {
726a989a 2266 for (i = gsi_start_bb (bb); !gsi_end_p (i);)
77568960 2267 {
726a989a
RB
2268 gimple stmt = gsi_stmt (i);
2269 if (gimple_code (stmt) == GIMPLE_LABEL
2270 && (FORCED_LABEL (gimple_label_label (stmt))
2271 || DECL_NONLOCAL (gimple_label_label (stmt))))
7506e1cb
ZD
2272 {
2273 basic_block new_bb;
726a989a 2274 gimple_stmt_iterator new_gsi;
7506e1cb
ZD
2275
2276 /* A non-reachable non-local label may still be referenced.
2277 But it no longer needs to carry the extra semantics of
2278 non-locality. */
726a989a 2279 if (DECL_NONLOCAL (gimple_label_label (stmt)))
7506e1cb 2280 {
726a989a
RB
2281 DECL_NONLOCAL (gimple_label_label (stmt)) = 0;
2282 FORCED_LABEL (gimple_label_label (stmt)) = 1;
7506e1cb 2283 }
bb1ecfe8 2284
7506e1cb 2285 new_bb = bb->prev_bb;
726a989a
RB
2286 new_gsi = gsi_start_bb (new_bb);
2287 gsi_remove (&i, false);
2288 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
7506e1cb
ZD
2289 }
2290 else
bb1ecfe8 2291 {
7506e1cb
ZD
2292 /* Release SSA definitions if we are in SSA. Note that we
2293 may be called when not in SSA. For example,
2294 final_cleanup calls this function via
2295 cleanup_tree_cfg. */
2296 if (gimple_in_ssa_p (cfun))
2297 release_defs (stmt);
2298
726a989a 2299 gsi_remove (&i, true);
bb1ecfe8 2300 }
6531d1be 2301
7506e1cb
ZD
2302 /* Don't warn for removed gotos. Gotos are often removed due to
2303 jump threading, thus resulting in bogus warnings. Not great,
2304 since this way we lose warnings for gotos in the original
2305 program that are indeed unreachable. */
726a989a
RB
2306 if (gimple_code (stmt) != GIMPLE_GOTO
2307 && gimple_has_location (stmt)
2308 && !loc)
2309 loc = gimple_location (stmt);
43e05e45 2310 }
6de9cd9a
DN
2311 }
2312
2313 /* If requested, give a warning that the first statement in the
2314 block is unreachable. We walk statements backwards in the
2315 loop above, so the last statement we process is the first statement
2316 in the block. */
5ffeb913 2317 if (loc > BUILTINS_LOCATION && LOCATION_LINE (loc) > 0)
fab922b1 2318 warning_at (loc, OPT_Wunreachable_code, "will never be executed");
6de9cd9a
DN
2319
2320 remove_phi_nodes_and_edges_for_unreachable_block (bb);
726a989a 2321 bb->il.gimple = NULL;
6de9cd9a
DN
2322}
2323
6de9cd9a 2324
35920270
KH
2325/* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2326 predicate VAL, return the edge that will be taken out of the block.
2327 If VAL does not match a unique edge, NULL is returned. */
6de9cd9a
DN
2328
2329edge
2330find_taken_edge (basic_block bb, tree val)
2331{
726a989a 2332 gimple stmt;
6de9cd9a
DN
2333
2334 stmt = last_stmt (bb);
2335
1e128c5f
GB
2336 gcc_assert (stmt);
2337 gcc_assert (is_ctrl_stmt (stmt));
6de9cd9a 2338
726a989a
RB
2339 if (val == NULL)
2340 return NULL;
2341
2342 if (!is_gimple_min_invariant (val))
6de9cd9a
DN
2343 return NULL;
2344
726a989a 2345 if (gimple_code (stmt) == GIMPLE_COND)
6de9cd9a
DN
2346 return find_taken_edge_cond_expr (bb, val);
2347
726a989a 2348 if (gimple_code (stmt) == GIMPLE_SWITCH)
6de9cd9a
DN
2349 return find_taken_edge_switch_expr (bb, val);
2350
be477406 2351 if (computed_goto_p (stmt))
1799efef
JL
2352 {
2353 /* Only optimize if the argument is a label, if the argument is
2354 not a label then we can not construct a proper CFG.
2355
2356 It may be the case that we only need to allow the LABEL_REF to
2357 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2358 appear inside a LABEL_EXPR just to be safe. */
2359 if ((TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
2360 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
2361 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
2362 return NULL;
2363 }
be477406 2364
35920270 2365 gcc_unreachable ();
6de9cd9a
DN
2366}
2367
be477406
JL
2368/* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2369 statement, determine which of the outgoing edges will be taken out of the
2370 block. Return NULL if either edge may be taken. */
2371
2372static edge
2373find_taken_edge_computed_goto (basic_block bb, tree val)
2374{
2375 basic_block dest;
2376 edge e = NULL;
2377
2378 dest = label_to_block (val);
2379 if (dest)
2380 {
2381 e = find_edge (bb, dest);
2382 gcc_assert (e != NULL);
2383 }
2384
2385 return e;
2386}
6de9cd9a
DN
2387
2388/* Given a constant value VAL and the entry block BB to a COND_EXPR
2389 statement, determine which of the two edges will be taken out of the
2390 block. Return NULL if either edge may be taken. */
2391
2392static edge
2393find_taken_edge_cond_expr (basic_block bb, tree val)
2394{
2395 edge true_edge, false_edge;
2396
2397 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
6531d1be 2398
f1b19062 2399 gcc_assert (TREE_CODE (val) == INTEGER_CST);
6e682d7e 2400 return (integer_zerop (val) ? false_edge : true_edge);
6de9cd9a
DN
2401}
2402
fca01525 2403/* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
6de9cd9a
DN
2404 statement, determine which edge will be taken out of the block. Return
2405 NULL if any edge may be taken. */
2406
2407static edge
2408find_taken_edge_switch_expr (basic_block bb, tree val)
2409{
6de9cd9a
DN
2410 basic_block dest_bb;
2411 edge e;
726a989a
RB
2412 gimple switch_stmt;
2413 tree taken_case;
6de9cd9a 2414
726a989a
RB
2415 switch_stmt = last_stmt (bb);
2416 taken_case = find_case_label_for_value (switch_stmt, val);
6de9cd9a
DN
2417 dest_bb = label_to_block (CASE_LABEL (taken_case));
2418
2419 e = find_edge (bb, dest_bb);
1e128c5f 2420 gcc_assert (e);
6de9cd9a
DN
2421 return e;
2422}
2423
2424
726a989a 2425/* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
f667741c
SB
2426 We can make optimal use here of the fact that the case labels are
2427 sorted: We can do a binary search for a case matching VAL. */
6de9cd9a
DN
2428
2429static tree
726a989a 2430find_case_label_for_value (gimple switch_stmt, tree val)
6de9cd9a 2431{
726a989a
RB
2432 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2433 tree default_case = gimple_switch_default_label (switch_stmt);
6de9cd9a 2434
726a989a 2435 for (low = 0, high = n; high - low > 1; )
6de9cd9a 2436 {
f667741c 2437 size_t i = (high + low) / 2;
726a989a 2438 tree t = gimple_switch_label (switch_stmt, i);
f667741c
SB
2439 int cmp;
2440
2441 /* Cache the result of comparing CASE_LOW and val. */
2442 cmp = tree_int_cst_compare (CASE_LOW (t), val);
6de9cd9a 2443
f667741c
SB
2444 if (cmp > 0)
2445 high = i;
2446 else
2447 low = i;
2448
2449 if (CASE_HIGH (t) == NULL)
6de9cd9a 2450 {
f667741c
SB
2451 /* A singe-valued case label. */
2452 if (cmp == 0)
6de9cd9a
DN
2453 return t;
2454 }
2455 else
2456 {
2457 /* A case range. We can only handle integer ranges. */
f667741c 2458 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
6de9cd9a
DN
2459 return t;
2460 }
2461 }
2462
6de9cd9a
DN
2463 return default_case;
2464}
2465
2466
6de9cd9a
DN
2467/* Dump a basic block on stderr. */
2468
2469void
726a989a 2470gimple_debug_bb (basic_block bb)
6de9cd9a 2471{
726a989a 2472 gimple_dump_bb (bb, stderr, 0, TDF_VOPS|TDF_MEMSYMS);
6de9cd9a
DN
2473}
2474
2475
2476/* Dump basic block with index N on stderr. */
2477
2478basic_block
726a989a 2479gimple_debug_bb_n (int n)
6de9cd9a 2480{
726a989a 2481 gimple_debug_bb (BASIC_BLOCK (n));
6de9cd9a 2482 return BASIC_BLOCK (n);
6531d1be 2483}
6de9cd9a
DN
2484
2485
2486/* Dump the CFG on stderr.
2487
2488 FLAGS are the same used by the tree dumping functions
6531d1be 2489 (see TDF_* in tree-pass.h). */
6de9cd9a
DN
2490
2491void
726a989a 2492gimple_debug_cfg (int flags)
6de9cd9a 2493{
726a989a 2494 gimple_dump_cfg (stderr, flags);
6de9cd9a
DN
2495}
2496
2497
2498/* Dump the program showing basic block boundaries on the given FILE.
2499
2500 FLAGS are the same used by the tree dumping functions (see TDF_* in
2501 tree.h). */
2502
2503void
726a989a 2504gimple_dump_cfg (FILE *file, int flags)
6de9cd9a
DN
2505{
2506 if (flags & TDF_DETAILS)
2507 {
2508 const char *funcname
673fda6b 2509 = lang_hooks.decl_printable_name (current_function_decl, 2);
6de9cd9a
DN
2510
2511 fputc ('\n', file);
2512 fprintf (file, ";; Function %s\n\n", funcname);
2513 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2514 n_basic_blocks, n_edges, last_basic_block);
2515
2516 brief_dump_cfg (file);
2517 fprintf (file, "\n");
2518 }
2519
2520 if (flags & TDF_STATS)
2521 dump_cfg_stats (file);
2522
2523 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2524}
2525
2526
2527/* Dump CFG statistics on FILE. */
2528
2529void
2530dump_cfg_stats (FILE *file)
2531{
2532 static long max_num_merged_labels = 0;
2533 unsigned long size, total = 0;
7b0cab99 2534 long num_edges;
6de9cd9a
DN
2535 basic_block bb;
2536 const char * const fmt_str = "%-30s%-13s%12s\n";
f7fda749 2537 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
cac50d94 2538 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
6de9cd9a
DN
2539 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2540 const char *funcname
673fda6b 2541 = lang_hooks.decl_printable_name (current_function_decl, 2);
6de9cd9a
DN
2542
2543
2544 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2545
2546 fprintf (file, "---------------------------------------------------------\n");
2547 fprintf (file, fmt_str, "", " Number of ", "Memory");
2548 fprintf (file, fmt_str, "", " instances ", "used ");
2549 fprintf (file, "---------------------------------------------------------\n");
2550
2551 size = n_basic_blocks * sizeof (struct basic_block_def);
2552 total += size;
f7fda749
RH
2553 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks,
2554 SCALE (size), LABEL (size));
6de9cd9a 2555
7b0cab99 2556 num_edges = 0;
6de9cd9a 2557 FOR_EACH_BB (bb)
7b0cab99
JH
2558 num_edges += EDGE_COUNT (bb->succs);
2559 size = num_edges * sizeof (struct edge_def);
6de9cd9a 2560 total += size;
cac50d94 2561 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
6de9cd9a 2562
6de9cd9a
DN
2563 fprintf (file, "---------------------------------------------------------\n");
2564 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2565 LABEL (total));
2566 fprintf (file, "---------------------------------------------------------\n");
2567 fprintf (file, "\n");
2568
2569 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2570 max_num_merged_labels = cfg_stats.num_merged_labels;
2571
2572 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2573 cfg_stats.num_merged_labels, max_num_merged_labels);
2574
2575 fprintf (file, "\n");
2576}
2577
2578
2579/* Dump CFG statistics on stderr. Keep extern so that it's always
2580 linked in the final executable. */
2581
2582void
2583debug_cfg_stats (void)
2584{
2585 dump_cfg_stats (stderr);
2586}
2587
2588
2589/* Dump the flowgraph to a .vcg FILE. */
2590
2591static void
726a989a 2592gimple_cfg2vcg (FILE *file)
6de9cd9a
DN
2593{
2594 edge e;
628f6a4e 2595 edge_iterator ei;
6de9cd9a
DN
2596 basic_block bb;
2597 const char *funcname
673fda6b 2598 = lang_hooks.decl_printable_name (current_function_decl, 2);
6de9cd9a
DN
2599
2600 /* Write the file header. */
2601 fprintf (file, "graph: { title: \"%s\"\n", funcname);
2602 fprintf (file, "node: { title: \"ENTRY\" label: \"ENTRY\" }\n");
2603 fprintf (file, "node: { title: \"EXIT\" label: \"EXIT\" }\n");
2604
2605 /* Write blocks and edges. */
628f6a4e 2606 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
6de9cd9a
DN
2607 {
2608 fprintf (file, "edge: { sourcename: \"ENTRY\" targetname: \"%d\"",
2609 e->dest->index);
2610
2611 if (e->flags & EDGE_FAKE)
2612 fprintf (file, " linestyle: dotted priority: 10");
2613 else
2614 fprintf (file, " linestyle: solid priority: 100");
2615
2616 fprintf (file, " }\n");
2617 }
2618 fputc ('\n', file);
2619
2620 FOR_EACH_BB (bb)
2621 {
726a989a 2622 enum gimple_code head_code, end_code;
6de9cd9a
DN
2623 const char *head_name, *end_name;
2624 int head_line = 0;
2625 int end_line = 0;
726a989a
RB
2626 gimple first = first_stmt (bb);
2627 gimple last = last_stmt (bb);
6de9cd9a
DN
2628
2629 if (first)
2630 {
726a989a
RB
2631 head_code = gimple_code (first);
2632 head_name = gimple_code_name[head_code];
6de9cd9a
DN
2633 head_line = get_lineno (first);
2634 }
2635 else
2636 head_name = "no-statement";
2637
2638 if (last)
2639 {
726a989a
RB
2640 end_code = gimple_code (last);
2641 end_name = gimple_code_name[end_code];
6de9cd9a
DN
2642 end_line = get_lineno (last);
2643 }
2644 else
2645 end_name = "no-statement";
2646
2647 fprintf (file, "node: { title: \"%d\" label: \"#%d\\n%s (%d)\\n%s (%d)\"}\n",
2648 bb->index, bb->index, head_name, head_line, end_name,
2649 end_line);
2650
628f6a4e 2651 FOR_EACH_EDGE (e, ei, bb->succs)
6de9cd9a
DN
2652 {
2653 if (e->dest == EXIT_BLOCK_PTR)
2654 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"EXIT\"", bb->index);
2655 else
2656 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"%d\"", bb->index, e->dest->index);
2657
2658 if (e->flags & EDGE_FAKE)
2659 fprintf (file, " priority: 10 linestyle: dotted");
2660 else
2661 fprintf (file, " priority: 100 linestyle: solid");
2662
2663 fprintf (file, " }\n");
2664 }
2665
2666 if (bb->next_bb != EXIT_BLOCK_PTR)
2667 fputc ('\n', file);
2668 }
2669
2670 fputs ("}\n\n", file);
2671}
2672
2673
2674
2675/*---------------------------------------------------------------------------
2676 Miscellaneous helpers
2677---------------------------------------------------------------------------*/
2678
2679/* Return true if T represents a stmt that always transfers control. */
2680
2681bool
726a989a 2682is_ctrl_stmt (gimple t)
6de9cd9a 2683{
726a989a
RB
2684 return gimple_code (t) == GIMPLE_COND
2685 || gimple_code (t) == GIMPLE_SWITCH
2686 || gimple_code (t) == GIMPLE_GOTO
2687 || gimple_code (t) == GIMPLE_RETURN
2688 || gimple_code (t) == GIMPLE_RESX;
6de9cd9a
DN
2689}
2690
2691
2692/* Return true if T is a statement that may alter the flow of control
2693 (e.g., a call to a non-returning function). */
2694
2695bool
726a989a 2696is_ctrl_altering_stmt (gimple t)
6de9cd9a 2697{
1e128c5f 2698 gcc_assert (t);
726a989a
RB
2699
2700 if (is_gimple_call (t))
6de9cd9a 2701 {
726a989a
RB
2702 int flags = gimple_call_flags (t);
2703
2704 /* A non-pure/const call alters flow control if the current
6de9cd9a 2705 function has nonlocal labels. */
726a989a
RB
2706 if (!(flags & (ECF_CONST | ECF_PURE))
2707 && cfun->has_nonlocal_label)
6de9cd9a
DN
2708 return true;
2709
726a989a
RB
2710 /* A call also alters control flow if it does not return. */
2711 if (gimple_call_flags (t) & ECF_NORETURN)
6de9cd9a 2712 return true;
6de9cd9a
DN
2713 }
2714
50674e96 2715 /* OpenMP directives alter control flow. */
726a989a 2716 if (is_gimple_omp (t))
50674e96
DN
2717 return true;
2718
6de9cd9a 2719 /* If a statement can throw, it alters control flow. */
726a989a 2720 return stmt_can_throw_internal (t);
6de9cd9a
DN
2721}
2722
2723
4f6c2131 2724/* Return true if T is a simple local goto. */
6de9cd9a
DN
2725
2726bool
726a989a 2727simple_goto_p (gimple t)
6de9cd9a 2728{
726a989a
RB
2729 return (gimple_code (t) == GIMPLE_GOTO
2730 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
4f6c2131
EB
2731}
2732
2733
2734/* Return true if T can make an abnormal transfer of control flow.
2735 Transfers of control flow associated with EH are excluded. */
2736
2737bool
726a989a 2738stmt_can_make_abnormal_goto (gimple t)
4f6c2131
EB
2739{
2740 if (computed_goto_p (t))
2741 return true;
726a989a
RB
2742 if (is_gimple_call (t))
2743 return gimple_has_side_effects (t) && cfun->has_nonlocal_label;
4f6c2131 2744 return false;
6de9cd9a
DN
2745}
2746
2747
726a989a
RB
2748/* Return true if STMT should start a new basic block. PREV_STMT is
2749 the statement preceding STMT. It is used when STMT is a label or a
2750 case label. Labels should only start a new basic block if their
2751 previous statement wasn't a label. Otherwise, sequence of labels
2752 would generate unnecessary basic blocks that only contain a single
2753 label. */
6de9cd9a
DN
2754
2755static inline bool
726a989a 2756stmt_starts_bb_p (gimple stmt, gimple prev_stmt)
6de9cd9a 2757{
726a989a 2758 if (stmt == NULL)
6de9cd9a
DN
2759 return false;
2760
726a989a
RB
2761 /* Labels start a new basic block only if the preceding statement
2762 wasn't a label of the same type. This prevents the creation of
2763 consecutive blocks that have nothing but a single label. */
2764 if (gimple_code (stmt) == GIMPLE_LABEL)
6de9cd9a
DN
2765 {
2766 /* Nonlocal and computed GOTO targets always start a new block. */
726a989a
RB
2767 if (DECL_NONLOCAL (gimple_label_label (stmt))
2768 || FORCED_LABEL (gimple_label_label (stmt)))
6de9cd9a
DN
2769 return true;
2770
726a989a 2771 if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
6de9cd9a 2772 {
726a989a 2773 if (DECL_NONLOCAL (gimple_label_label (prev_stmt)))
6de9cd9a
DN
2774 return true;
2775
2776 cfg_stats.num_merged_labels++;
2777 return false;
2778 }
2779 else
2780 return true;
2781 }
2782
2783 return false;
2784}
2785
2786
2787/* Return true if T should end a basic block. */
2788
2789bool
726a989a 2790stmt_ends_bb_p (gimple t)
6de9cd9a
DN
2791{
2792 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2793}
2794
726a989a 2795/* Remove block annotations and other data structures. */
6de9cd9a
DN
2796
2797void
242229bb 2798delete_tree_cfg_annotations (void)
6de9cd9a 2799{
6de9cd9a 2800 label_to_block_map = NULL;
6de9cd9a
DN
2801}
2802
2803
2804/* Return the first statement in basic block BB. */
2805
726a989a 2806gimple
6de9cd9a
DN
2807first_stmt (basic_block bb)
2808{
726a989a
RB
2809 gimple_stmt_iterator i = gsi_start_bb (bb);
2810 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
6de9cd9a
DN
2811}
2812
6c52e687
CC
2813/* Return the first non-label statement in basic block BB. */
2814
2815static gimple
2816first_non_label_stmt (basic_block bb)
2817{
2818 gimple_stmt_iterator i = gsi_start_bb (bb);
2819 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2820 gsi_next (&i);
2821 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2822}
2823
6de9cd9a
DN
2824/* Return the last statement in basic block BB. */
2825
726a989a 2826gimple
6de9cd9a
DN
2827last_stmt (basic_block bb)
2828{
726a989a
RB
2829 gimple_stmt_iterator b = gsi_last_bb (bb);
2830 return !gsi_end_p (b) ? gsi_stmt (b) : NULL;
6de9cd9a
DN
2831}
2832
6de9cd9a
DN
2833/* Return the last statement of an otherwise empty block. Return NULL
2834 if the block is totally empty, or if it contains more than one
2835 statement. */
2836
726a989a 2837gimple
6de9cd9a
DN
2838last_and_only_stmt (basic_block bb)
2839{
726a989a
RB
2840 gimple_stmt_iterator i = gsi_last_bb (bb);
2841 gimple last, prev;
6de9cd9a 2842
726a989a
RB
2843 if (gsi_end_p (i))
2844 return NULL;
6de9cd9a 2845
726a989a
RB
2846 last = gsi_stmt (i);
2847 gsi_prev (&i);
2848 if (gsi_end_p (i))
6de9cd9a
DN
2849 return last;
2850
2851 /* Empty statements should no longer appear in the instruction stream.
2852 Everything that might have appeared before should be deleted by
726a989a 2853 remove_useless_stmts, and the optimizers should just gsi_remove
6de9cd9a
DN
2854 instead of smashing with build_empty_stmt.
2855
2856 Thus the only thing that should appear here in a block containing
2857 one executable statement is a label. */
726a989a
RB
2858 prev = gsi_stmt (i);
2859 if (gimple_code (prev) == GIMPLE_LABEL)
6de9cd9a
DN
2860 return last;
2861 else
726a989a 2862 return NULL;
82b85a85 2863}
6de9cd9a 2864
4f7db7f7
KH
2865/* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2866
2867static void
2868reinstall_phi_args (edge new_edge, edge old_edge)
2869{
ea7e6d5a
AH
2870 edge_var_map_vector v;
2871 edge_var_map *vm;
2872 int i;
726a989a
RB
2873 gimple_stmt_iterator phis;
2874
ea7e6d5a
AH
2875 v = redirect_edge_var_map_vector (old_edge);
2876 if (!v)
4f7db7f7 2877 return;
726a989a
RB
2878
2879 for (i = 0, phis = gsi_start_phis (new_edge->dest);
2880 VEC_iterate (edge_var_map, v, i, vm) && !gsi_end_p (phis);
2881 i++, gsi_next (&phis))
4f7db7f7 2882 {
726a989a 2883 gimple phi = gsi_stmt (phis);
ea7e6d5a
AH
2884 tree result = redirect_edge_var_map_result (vm);
2885 tree arg = redirect_edge_var_map_def (vm);
726a989a
RB
2886
2887 gcc_assert (result == gimple_phi_result (phi));
2888
d2e398df 2889 add_phi_arg (phi, arg, new_edge);
4f7db7f7 2890 }
726a989a 2891
ea7e6d5a 2892 redirect_edge_var_map_clear (old_edge);
4f7db7f7
KH
2893}
2894
2a8a8292 2895/* Returns the basic block after which the new basic block created
b9a66240
ZD
2896 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2897 near its "logical" location. This is of most help to humans looking
2898 at debugging dumps. */
2899
2900static basic_block
2901split_edge_bb_loc (edge edge_in)
2902{
2903 basic_block dest = edge_in->dest;
2904
2905 if (dest->prev_bb && find_edge (dest->prev_bb, dest))
2906 return edge_in->src;
2907 else
2908 return dest->prev_bb;
2909}
2910
6de9cd9a
DN
2911/* Split a (typically critical) edge EDGE_IN. Return the new block.
2912 Abort on abnormal edges. */
2913
2914static basic_block
726a989a 2915gimple_split_edge (edge edge_in)
6de9cd9a 2916{
4741d956 2917 basic_block new_bb, after_bb, dest;
6de9cd9a 2918 edge new_edge, e;
6de9cd9a
DN
2919
2920 /* Abnormal edges cannot be split. */
1e128c5f 2921 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
6de9cd9a 2922
6de9cd9a
DN
2923 dest = edge_in->dest;
2924
b9a66240 2925 after_bb = split_edge_bb_loc (edge_in);
6de9cd9a
DN
2926
2927 new_bb = create_empty_bb (after_bb);
b829f3fa
JH
2928 new_bb->frequency = EDGE_FREQUENCY (edge_in);
2929 new_bb->count = edge_in->count;
6de9cd9a 2930 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
b829f3fa
JH
2931 new_edge->probability = REG_BR_PROB_BASE;
2932 new_edge->count = edge_in->count;
6de9cd9a 2933
1e128c5f 2934 e = redirect_edge_and_branch (edge_in, new_bb);
c7b852c8 2935 gcc_assert (e == edge_in);
4f7db7f7 2936 reinstall_phi_args (new_edge, e);
6de9cd9a
DN
2937
2938 return new_bb;
2939}
2940
6de9cd9a 2941/* Callback for walk_tree, check that all elements with address taken are
7a442a1d
SB
2942 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2943 inside a PHI node. */
6de9cd9a
DN
2944
2945static tree
2fbe90f2 2946verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
6de9cd9a
DN
2947{
2948 tree t = *tp, x;
2949
2950 if (TYPE_P (t))
2951 *walk_subtrees = 0;
6531d1be 2952
e8ca4159 2953 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2fbe90f2 2954#define CHECK_OP(N, MSG) \
e8ca4159 2955 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2fbe90f2 2956 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
6de9cd9a
DN
2957
2958 switch (TREE_CODE (t))
2959 {
2960 case SSA_NAME:
2961 if (SSA_NAME_IN_FREE_LIST (t))
2962 {
2963 error ("SSA name in freelist but still referenced");
2964 return *tp;
2965 }
2966 break;
2967
26de0bcb
AP
2968 case INDIRECT_REF:
2969 x = TREE_OPERAND (t, 0);
2970 if (!is_gimple_reg (x) && !is_gimple_min_invariant (x))
2971 {
2972 error ("Indirect reference's operand is not a register or a constant.");
2973 return x;
2974 }
2975 break;
2976
0bca51f0
DN
2977 case ASSERT_EXPR:
2978 x = fold (ASSERT_EXPR_COND (t));
2979 if (x == boolean_false_node)
2980 {
2981 error ("ASSERT_EXPR with an always-false condition");
2982 return *tp;
2983 }
2984 break;
2985
6de9cd9a 2986 case MODIFY_EXPR:
26de0bcb 2987 error ("MODIFY_EXPR not expected while having tuples.");
e57fcb68 2988 return *tp;
6de9cd9a
DN
2989
2990 case ADDR_EXPR:
81fc3052 2991 {
81fc3052
DB
2992 bool old_constant;
2993 bool old_side_effects;
81fc3052
DB
2994 bool new_constant;
2995 bool new_side_effects;
2996
51eed280
PB
2997 gcc_assert (is_gimple_address (t));
2998
81fc3052
DB
2999 old_constant = TREE_CONSTANT (t);
3000 old_side_effects = TREE_SIDE_EFFECTS (t);
3001
127203ac 3002 recompute_tree_invariant_for_addr_expr (t);
81fc3052
DB
3003 new_side_effects = TREE_SIDE_EFFECTS (t);
3004 new_constant = TREE_CONSTANT (t);
3005
81fc3052
DB
3006 if (old_constant != new_constant)
3007 {
3008 error ("constant not recomputed when ADDR_EXPR changed");
3009 return t;
3010 }
3011 if (old_side_effects != new_side_effects)
3012 {
3013 error ("side effects not recomputed when ADDR_EXPR changed");
3014 return t;
3015 }
3016
3017 /* Skip any references (they will be checked when we recurse down the
3018 tree) and ensure that any variable used as a prefix is marked
3019 addressable. */
3020 for (x = TREE_OPERAND (t, 0);
3021 handled_component_p (x);
3022 x = TREE_OPERAND (x, 0))
3023 ;
3024
5006671f
RG
3025 if (!(TREE_CODE (x) == VAR_DECL
3026 || TREE_CODE (x) == PARM_DECL
3027 || TREE_CODE (x) == RESULT_DECL))
81fc3052
DB
3028 return NULL;
3029 if (!TREE_ADDRESSABLE (x))
3030 {
3031 error ("address taken, but ADDRESSABLE bit not set");
3032 return x;
3033 }
ba4d8f9d
RG
3034 if (DECL_GIMPLE_REG_P (x))
3035 {
3036 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
3037 return x;
3038 }
bdb69bee 3039
81fc3052
DB
3040 break;
3041 }
6de9cd9a
DN
3042
3043 case COND_EXPR:
a6234684 3044 x = COND_EXPR_COND (t);
d40055ab 3045 if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
6de9cd9a 3046 {
d40055ab 3047 error ("non-integral used in condition");
6de9cd9a
DN
3048 return x;
3049 }
9c691961
AP
3050 if (!is_gimple_condexpr (x))
3051 {
ab532386 3052 error ("invalid conditional operand");
9c691961
AP
3053 return x;
3054 }
6de9cd9a
DN
3055 break;
3056
a134e5f3
TB
3057 case NON_LVALUE_EXPR:
3058 gcc_unreachable ();
3059
1043771b 3060 CASE_CONVERT:
6de9cd9a 3061 case FIX_TRUNC_EXPR:
6de9cd9a
DN
3062 case FLOAT_EXPR:
3063 case NEGATE_EXPR:
3064 case ABS_EXPR:
3065 case BIT_NOT_EXPR:
6de9cd9a 3066 case TRUTH_NOT_EXPR:
ab532386 3067 CHECK_OP (0, "invalid operand to unary operator");
6de9cd9a
DN
3068 break;
3069
3070 case REALPART_EXPR:
3071 case IMAGPART_EXPR:
2fbe90f2
RK
3072 case COMPONENT_REF:
3073 case ARRAY_REF:
3074 case ARRAY_RANGE_REF:
3075 case BIT_FIELD_REF:
3076 case VIEW_CONVERT_EXPR:
3077 /* We have a nest of references. Verify that each of the operands
3078 that determine where to reference is either a constant or a variable,
3079 verify that the base is valid, and then show we've already checked
3080 the subtrees. */
afe84921 3081 while (handled_component_p (t))
2fbe90f2
RK
3082 {
3083 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
ab532386 3084 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
2fbe90f2
RK
3085 else if (TREE_CODE (t) == ARRAY_REF
3086 || TREE_CODE (t) == ARRAY_RANGE_REF)
3087 {
ab532386 3088 CHECK_OP (1, "invalid array index");
2fbe90f2 3089 if (TREE_OPERAND (t, 2))
ab532386 3090 CHECK_OP (2, "invalid array lower bound");
2fbe90f2 3091 if (TREE_OPERAND (t, 3))
ab532386 3092 CHECK_OP (3, "invalid array stride");
2fbe90f2
RK
3093 }
3094 else if (TREE_CODE (t) == BIT_FIELD_REF)
3095 {
e55f42fb
RG
3096 if (!host_integerp (TREE_OPERAND (t, 1), 1)
3097 || !host_integerp (TREE_OPERAND (t, 2), 1))
3098 {
3099 error ("invalid position or size operand to BIT_FIELD_REF");
3100 return t;
3101 }
fc0f49f3
RG
3102 else if (INTEGRAL_TYPE_P (TREE_TYPE (t))
3103 && (TYPE_PRECISION (TREE_TYPE (t))
3104 != TREE_INT_CST_LOW (TREE_OPERAND (t, 1))))
3105 {
3106 error ("integral result type precision does not match "
3107 "field size of BIT_FIELD_REF");
3108 return t;
3109 }
3110 if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
3111 && (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (t)))
3112 != TREE_INT_CST_LOW (TREE_OPERAND (t, 1))))
3113 {
3114 error ("mode precision of non-integral result does not "
3115 "match field size of BIT_FIELD_REF");
3116 return t;
3117 }
2fbe90f2
RK
3118 }
3119
3120 t = TREE_OPERAND (t, 0);
3121 }
3122
bb0c55f6 3123 if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
2fbe90f2 3124 {
ab532386 3125 error ("invalid reference prefix");
2fbe90f2
RK
3126 return t;
3127 }
3128 *walk_subtrees = 0;
6de9cd9a 3129 break;
5be014d5
AP
3130 case PLUS_EXPR:
3131 case MINUS_EXPR:
3132 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
3133 POINTER_PLUS_EXPR. */
3134 if (POINTER_TYPE_P (TREE_TYPE (t)))
3135 {
3136 error ("invalid operand to plus/minus, type is a pointer");
3137 return t;
3138 }
3139 CHECK_OP (0, "invalid operand to binary operator");
3140 CHECK_OP (1, "invalid operand to binary operator");
3141 break;
6de9cd9a 3142
5be014d5
AP
3143 case POINTER_PLUS_EXPR:
3144 /* Check to make sure the first operand is a pointer or reference type. */
3145 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
3146 {
3147 error ("invalid operand to pointer plus, first operand is not a pointer");
3148 return t;
3149 }
3150 /* Check to make sure the second operand is an integer with type of
3151 sizetype. */
36618b93
RG
3152 if (!useless_type_conversion_p (sizetype,
3153 TREE_TYPE (TREE_OPERAND (t, 1))))
5be014d5
AP
3154 {
3155 error ("invalid operand to pointer plus, second operand is not an "
3156 "integer with type of sizetype.");
3157 return t;
3158 }
3159 /* FALLTHROUGH */
6de9cd9a
DN
3160 case LT_EXPR:
3161 case LE_EXPR:
3162 case GT_EXPR:
3163 case GE_EXPR:
3164 case EQ_EXPR:
3165 case NE_EXPR:
3166 case UNORDERED_EXPR:
3167 case ORDERED_EXPR:
3168 case UNLT_EXPR:
3169 case UNLE_EXPR:
3170 case UNGT_EXPR:
3171 case UNGE_EXPR:
3172 case UNEQ_EXPR:
d1a7edaf 3173 case LTGT_EXPR:
6de9cd9a
DN
3174 case MULT_EXPR:
3175 case TRUNC_DIV_EXPR:
3176 case CEIL_DIV_EXPR:
3177 case FLOOR_DIV_EXPR:
3178 case ROUND_DIV_EXPR:
3179 case TRUNC_MOD_EXPR:
3180 case CEIL_MOD_EXPR:
3181 case FLOOR_MOD_EXPR:
3182 case ROUND_MOD_EXPR:
3183 case RDIV_EXPR:
3184 case EXACT_DIV_EXPR:
3185 case MIN_EXPR:
3186 case MAX_EXPR:
3187 case LSHIFT_EXPR:
3188 case RSHIFT_EXPR:
3189 case LROTATE_EXPR:
3190 case RROTATE_EXPR:
3191 case BIT_IOR_EXPR:
3192 case BIT_XOR_EXPR:
3193 case BIT_AND_EXPR:
ab532386
JM
3194 CHECK_OP (0, "invalid operand to binary operator");
3195 CHECK_OP (1, "invalid operand to binary operator");
6de9cd9a
DN
3196 break;
3197
84816907
JM
3198 case CONSTRUCTOR:
3199 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
3200 *walk_subtrees = 0;
3201 break;
3202
6de9cd9a
DN
3203 default:
3204 break;
3205 }
3206 return NULL;
2fbe90f2
RK
3207
3208#undef CHECK_OP
6de9cd9a
DN
3209}
3210
7e98624c
RG
3211
3212/* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
3213 Returns true if there is an error, otherwise false. */
3214
3215static bool
726a989a 3216verify_types_in_gimple_min_lval (tree expr)
7e98624c
RG
3217{
3218 tree op;
3219
3220 if (is_gimple_id (expr))
3221 return false;
3222
9f509004
RG
3223 if (!INDIRECT_REF_P (expr)
3224 && TREE_CODE (expr) != TARGET_MEM_REF)
7e98624c
RG
3225 {
3226 error ("invalid expression for min lvalue");
3227 return true;
3228 }
3229
9f509004
RG
3230 /* TARGET_MEM_REFs are strange beasts. */
3231 if (TREE_CODE (expr) == TARGET_MEM_REF)
3232 return false;
3233
7e98624c
RG
3234 op = TREE_OPERAND (expr, 0);
3235 if (!is_gimple_val (op))
3236 {
3237 error ("invalid operand in indirect reference");
3238 debug_generic_stmt (op);
3239 return true;
3240 }
3241 if (!useless_type_conversion_p (TREE_TYPE (expr),
3242 TREE_TYPE (TREE_TYPE (op))))
3243 {
3244 error ("type mismatch in indirect reference");
3245 debug_generic_stmt (TREE_TYPE (expr));
3246 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3247 return true;
3248 }
3249
3250 return false;
3251}
3252
3a19701a
RG
3253/* Verify if EXPR is a valid GIMPLE reference expression. If
3254 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
7e98624c
RG
3255 if there is an error, otherwise false. */
3256
3257static bool
3a19701a 3258verify_types_in_gimple_reference (tree expr, bool require_lvalue)
7e98624c
RG
3259{
3260 while (handled_component_p (expr))
3261 {
3262 tree op = TREE_OPERAND (expr, 0);
3263
3264 if (TREE_CODE (expr) == ARRAY_REF
3265 || TREE_CODE (expr) == ARRAY_RANGE_REF)
3266 {
3267 if (!is_gimple_val (TREE_OPERAND (expr, 1))
3268 || (TREE_OPERAND (expr, 2)
3269 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3270 || (TREE_OPERAND (expr, 3)
3271 && !is_gimple_val (TREE_OPERAND (expr, 3))))
3272 {
3273 error ("invalid operands to array reference");
3274 debug_generic_stmt (expr);
3275 return true;
3276 }
3277 }
3278
3279 /* Verify if the reference array element types are compatible. */
3280 if (TREE_CODE (expr) == ARRAY_REF
3281 && !useless_type_conversion_p (TREE_TYPE (expr),
3282 TREE_TYPE (TREE_TYPE (op))))
3283 {
3284 error ("type mismatch in array reference");
3285 debug_generic_stmt (TREE_TYPE (expr));
3286 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3287 return true;
3288 }
3289 if (TREE_CODE (expr) == ARRAY_RANGE_REF
3290 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
3291 TREE_TYPE (TREE_TYPE (op))))
3292 {
3293 error ("type mismatch in array range reference");
3294 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
3295 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3296 return true;
3297 }
3298
3299 if ((TREE_CODE (expr) == REALPART_EXPR
3300 || TREE_CODE (expr) == IMAGPART_EXPR)
3301 && !useless_type_conversion_p (TREE_TYPE (expr),
3302 TREE_TYPE (TREE_TYPE (op))))
3303 {
3304 error ("type mismatch in real/imagpart reference");
3305 debug_generic_stmt (TREE_TYPE (expr));
3306 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3307 return true;
3308 }
3309
3310 if (TREE_CODE (expr) == COMPONENT_REF
3311 && !useless_type_conversion_p (TREE_TYPE (expr),
3312 TREE_TYPE (TREE_OPERAND (expr, 1))))
3313 {
3314 error ("type mismatch in component reference");
3315 debug_generic_stmt (TREE_TYPE (expr));
3316 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
3317 return true;
3318 }
3319
3320 /* For VIEW_CONVERT_EXPRs which are allowed here, too, there
3321 is nothing to verify. Gross mismatches at most invoke
3322 undefined behavior. */
9f509004
RG
3323 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
3324 && !handled_component_p (op))
3325 return false;
7e98624c
RG
3326
3327 expr = op;
3328 }
3329
3a19701a
RG
3330 return ((require_lvalue || !is_gimple_min_invariant (expr))
3331 && verify_types_in_gimple_min_lval (expr));
7e98624c
RG
3332}
3333
20dcff2a
RG
3334/* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3335 list of pointer-to types that is trivially convertible to DEST. */
3336
3337static bool
3338one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3339{
3340 tree src;
3341
3342 if (!TYPE_POINTER_TO (src_obj))
3343 return true;
3344
3345 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3346 if (useless_type_conversion_p (dest, src))
3347 return true;
3348
3349 return false;
3350}
3351
726a989a
RB
3352/* Return true if TYPE1 is a fixed-point type and if conversions to and
3353 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3354
3355static bool
3356valid_fixed_convert_types_p (tree type1, tree type2)
3357{
3358 return (FIXED_POINT_TYPE_P (type1)
3359 && (INTEGRAL_TYPE_P (type2)
3360 || SCALAR_FLOAT_TYPE_P (type2)
3361 || FIXED_POINT_TYPE_P (type2)));
3362}
3363
726a989a
RB
3364/* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3365 is a problem, otherwise false. */
3366
3367static bool
b59d3976 3368verify_gimple_call (gimple stmt)
726a989a 3369{
b59d3976
RG
3370 tree fn = gimple_call_fn (stmt);
3371 tree fntype;
726a989a 3372
b59d3976
RG
3373 if (!POINTER_TYPE_P (TREE_TYPE (fn))
3374 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3375 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE))
3376 {
3377 error ("non-function in gimple call");
3378 return true;
3379 }
726a989a 3380
b59d3976
RG
3381 if (gimple_call_lhs (stmt)
3382 && !is_gimple_lvalue (gimple_call_lhs (stmt)))
3383 {
3384 error ("invalid LHS in gimple call");
3385 return true;
3386 }
726a989a 3387
b59d3976
RG
3388 fntype = TREE_TYPE (TREE_TYPE (fn));
3389 if (gimple_call_lhs (stmt)
3390 && !useless_type_conversion_p (TREE_TYPE (gimple_call_lhs (stmt)),
3391 TREE_TYPE (fntype))
3392 /* ??? At least C++ misses conversions at assignments from
3393 void * call results.
3394 ??? Java is completely off. Especially with functions
3395 returning java.lang.Object.
3396 For now simply allow arbitrary pointer type conversions. */
3397 && !(POINTER_TYPE_P (TREE_TYPE (gimple_call_lhs (stmt)))
3398 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3399 {
3400 error ("invalid conversion in gimple call");
3401 debug_generic_stmt (TREE_TYPE (gimple_call_lhs (stmt)));
3402 debug_generic_stmt (TREE_TYPE (fntype));
3403 return true;
3404 }
726a989a 3405
b59d3976
RG
3406 /* ??? The C frontend passes unpromoted arguments in case it
3407 didn't see a function declaration before the call. So for now
3408 leave the call arguments unverified. Once we gimplify
3409 unit-at-a-time we have a chance to fix this. */
726a989a 3410
b59d3976 3411 return false;
726a989a
RB
3412}
3413
b59d3976
RG
3414/* Verifies the gimple comparison with the result type TYPE and
3415 the operands OP0 and OP1. */
17d23165
RS
3416
3417static bool
b59d3976 3418verify_gimple_comparison (tree type, tree op0, tree op1)
17d23165 3419{
b59d3976
RG
3420 tree op0_type = TREE_TYPE (op0);
3421 tree op1_type = TREE_TYPE (op1);
726a989a 3422
b59d3976
RG
3423 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3424 {
3425 error ("invalid operands in gimple comparison");
3426 return true;
3427 }
17d23165 3428
b59d3976
RG
3429 /* For comparisons we do not have the operations type as the
3430 effective type the comparison is carried out in. Instead
3431 we require that either the first operand is trivially
3432 convertible into the second, or the other way around.
3433 The resulting type of a comparison may be any integral type.
3434 Because we special-case pointers to void we allow
3435 comparisons of pointers with the same mode as well. */
3436 if ((!useless_type_conversion_p (op0_type, op1_type)
3437 && !useless_type_conversion_p (op1_type, op0_type)
3438 && (!POINTER_TYPE_P (op0_type)
3439 || !POINTER_TYPE_P (op1_type)
3440 || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3441 || !INTEGRAL_TYPE_P (type))
3442 {
3443 error ("type mismatch in comparison expression");
3444 debug_generic_expr (type);
3445 debug_generic_expr (op0_type);
3446 debug_generic_expr (op1_type);
3447 return true;
3448 }
3449
3450 return false;
3451}
726a989a 3452
9f509004
RG
3453/* Verify a gimple assignment statement STMT with an unary rhs.
3454 Returns true if anything is wrong. */
7e98624c
RG
3455
3456static bool
9f509004 3457verify_gimple_assign_unary (gimple stmt)
7e98624c 3458{
726a989a
RB
3459 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3460 tree lhs = gimple_assign_lhs (stmt);
726a989a 3461 tree lhs_type = TREE_TYPE (lhs);
9f509004 3462 tree rhs1 = gimple_assign_rhs1 (stmt);
726a989a 3463 tree rhs1_type = TREE_TYPE (rhs1);
7e98624c 3464
9f509004
RG
3465 if (!is_gimple_reg (lhs)
3466 && !(optimize == 0
3467 && TREE_CODE (lhs_type) == COMPLEX_TYPE))
3468 {
3469 error ("non-register as LHS of unary operation");
3470 return true;
3471 }
3472
3473 if (!is_gimple_val (rhs1))
3474 {
3475 error ("invalid operand in unary operation");
3476 return true;
3477 }
3478
3479 /* First handle conversions. */
726a989a 3480 switch (rhs_code)
7e98624c 3481 {
1043771b 3482 CASE_CONVERT:
7e98624c 3483 {
7e98624c 3484 /* Allow conversions between integral types and pointers only if
9f509004
RG
3485 there is no sign or zero extension involved.
3486 For targets were the precision of sizetype doesn't match that
3487 of pointers we need to allow arbitrary conversions from and
3488 to sizetype. */
3489 if ((POINTER_TYPE_P (lhs_type)
3490 && INTEGRAL_TYPE_P (rhs1_type)
3491 && (TYPE_PRECISION (lhs_type) >= TYPE_PRECISION (rhs1_type)
3492 || rhs1_type == sizetype))
3493 || (POINTER_TYPE_P (rhs1_type)
3494 && INTEGRAL_TYPE_P (lhs_type)
3495 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3496 || lhs_type == sizetype)))
7e98624c
RG
3497 return false;
3498
3499 /* Allow conversion from integer to offset type and vice versa. */
726a989a
RB
3500 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3501 && TREE_CODE (rhs1_type) == INTEGER_TYPE)
3502 || (TREE_CODE (lhs_type) == INTEGER_TYPE
3503 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
7e98624c
RG
3504 return false;
3505
3506 /* Otherwise assert we are converting between types of the
3507 same kind. */
726a989a 3508 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
7e98624c
RG
3509 {
3510 error ("invalid types in nop conversion");
726a989a
RB
3511 debug_generic_expr (lhs_type);
3512 debug_generic_expr (rhs1_type);
7e98624c
RG
3513 return true;
3514 }
3515
3516 return false;
3517 }
3518
17d23165
RS
3519 case FIXED_CONVERT_EXPR:
3520 {
726a989a
RB
3521 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3522 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
17d23165
RS
3523 {
3524 error ("invalid types in fixed-point conversion");
726a989a
RB
3525 debug_generic_expr (lhs_type);
3526 debug_generic_expr (rhs1_type);
17d23165
RS
3527 return true;
3528 }
3529
3530 return false;
3531 }
3532
7e98624c
RG
3533 case FLOAT_EXPR:
3534 {
726a989a 3535 if (!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
7e98624c
RG
3536 {
3537 error ("invalid types in conversion to floating point");
726a989a
RB
3538 debug_generic_expr (lhs_type);
3539 debug_generic_expr (rhs1_type);
7e98624c
RG
3540 return true;
3541 }
726a989a 3542
7e98624c
RG
3543 return false;
3544 }
3545
3546 case FIX_TRUNC_EXPR:
3547 {
726a989a 3548 if (!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
7e98624c
RG
3549 {
3550 error ("invalid types in conversion to integer");
726a989a
RB
3551 debug_generic_expr (lhs_type);
3552 debug_generic_expr (rhs1_type);
7e98624c
RG
3553 return true;
3554 }
726a989a 3555
7e98624c
RG
3556 return false;
3557 }
3558
587aa063
RG
3559 case VEC_UNPACK_HI_EXPR:
3560 case VEC_UNPACK_LO_EXPR:
3561 case REDUC_MAX_EXPR:
3562 case REDUC_MIN_EXPR:
3563 case REDUC_PLUS_EXPR:
3564 case VEC_UNPACK_FLOAT_HI_EXPR:
3565 case VEC_UNPACK_FLOAT_LO_EXPR:
3566 /* FIXME. */
3567 return false;
9f509004 3568
587aa063 3569 case TRUTH_NOT_EXPR:
9f509004
RG
3570 case NEGATE_EXPR:
3571 case ABS_EXPR:
3572 case BIT_NOT_EXPR:
3573 case PAREN_EXPR:
3574 case NON_LVALUE_EXPR:
3575 case CONJ_EXPR:
9f509004
RG
3576 break;
3577
3578 default:
3579 gcc_unreachable ();
3580 }
3581
3582 /* For the remaining codes assert there is no conversion involved. */
3583 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3584 {
3585 error ("non-trivial conversion in unary operation");
3586 debug_generic_expr (lhs_type);
3587 debug_generic_expr (rhs1_type);
3588 return true;
3589 }
3590
3591 return false;
3592}
3593
3594/* Verify a gimple assignment statement STMT with a binary rhs.
3595 Returns true if anything is wrong. */
3596
3597static bool
3598verify_gimple_assign_binary (gimple stmt)
3599{
3600 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3601 tree lhs = gimple_assign_lhs (stmt);
3602 tree lhs_type = TREE_TYPE (lhs);
3603 tree rhs1 = gimple_assign_rhs1 (stmt);
3604 tree rhs1_type = TREE_TYPE (rhs1);
3605 tree rhs2 = gimple_assign_rhs2 (stmt);
3606 tree rhs2_type = TREE_TYPE (rhs2);
3607
3608 if (!is_gimple_reg (lhs)
3609 && !(optimize == 0
3610 && TREE_CODE (lhs_type) == COMPLEX_TYPE))
3611 {
3612 error ("non-register as LHS of binary operation");
3613 return true;
3614 }
726a989a 3615
9f509004
RG
3616 if (!is_gimple_val (rhs1)
3617 || !is_gimple_val (rhs2))
3618 {
3619 error ("invalid operands in binary operation");
3620 return true;
3621 }
3622
3623 /* First handle operations that involve different types. */
3624 switch (rhs_code)
3625 {
3626 case COMPLEX_EXPR:
3627 {
3628 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3629 || !(INTEGRAL_TYPE_P (rhs1_type)
726a989a 3630 || SCALAR_FLOAT_TYPE_P (rhs1_type))
9f509004 3631 || !(INTEGRAL_TYPE_P (rhs2_type)
726a989a 3632 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
7e98624c
RG
3633 {
3634 error ("type mismatch in complex expression");
726a989a
RB
3635 debug_generic_expr (lhs_type);
3636 debug_generic_expr (rhs1_type);
3637 debug_generic_expr (rhs2_type);
7e98624c
RG
3638 return true;
3639 }
726a989a 3640
7e98624c
RG
3641 return false;
3642 }
3643
7e98624c
RG
3644 case LSHIFT_EXPR:
3645 case RSHIFT_EXPR:
3646 case LROTATE_EXPR:
3647 case RROTATE_EXPR:
3648 {
587aa063
RG
3649 /* Shifts and rotates are ok on integral types, fixed point
3650 types and integer vector types. */
3651 if ((!INTEGRAL_TYPE_P (rhs1_type)
3652 && !FIXED_POINT_TYPE_P (rhs1_type)
3653 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3654 && TREE_CODE (TREE_TYPE (rhs1_type)) == INTEGER_TYPE))
3655 || (!INTEGRAL_TYPE_P (rhs2_type)
3656 /* Vector shifts of vectors are also ok. */
3657 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3658 && TREE_CODE (TREE_TYPE (rhs1_type)) == INTEGER_TYPE
3659 && TREE_CODE (rhs2_type) == VECTOR_TYPE
3660 && TREE_CODE (TREE_TYPE (rhs2_type)) == INTEGER_TYPE))
726a989a 3661 || !useless_type_conversion_p (lhs_type, rhs1_type))
7e98624c
RG
3662 {
3663 error ("type mismatch in shift expression");
726a989a
RB
3664 debug_generic_expr (lhs_type);
3665 debug_generic_expr (rhs1_type);
3666 debug_generic_expr (rhs2_type);
7e98624c
RG
3667 return true;
3668 }
726a989a 3669
7e98624c
RG
3670 return false;
3671 }
3672
9f509004
RG
3673 case VEC_LSHIFT_EXPR:
3674 case VEC_RSHIFT_EXPR:
7e98624c 3675 {
9f509004 3676 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
0009b473 3677 || !(INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
1fe479fd
RG
3678 || FIXED_POINT_TYPE_P (TREE_TYPE (rhs1_type))
3679 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type)))
9f509004
RG
3680 || (!INTEGRAL_TYPE_P (rhs2_type)
3681 && (TREE_CODE (rhs2_type) != VECTOR_TYPE
3682 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3683 || !useless_type_conversion_p (lhs_type, rhs1_type))
7e98624c 3684 {
9f509004
RG
3685 error ("type mismatch in vector shift expression");
3686 debug_generic_expr (lhs_type);
3687 debug_generic_expr (rhs1_type);
3688 debug_generic_expr (rhs2_type);
7e98624c
RG
3689 return true;
3690 }
1fe479fd
RG
3691 /* For shifting a vector of floating point components we
3692 only allow shifting by a constant multiple of the element size. */
3693 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type))
3694 && (TREE_CODE (rhs2) != INTEGER_CST
3695 || !div_if_zero_remainder (EXACT_DIV_EXPR, rhs2,
3696 TYPE_SIZE (TREE_TYPE (rhs1_type)))))
3697 {
3698 error ("non-element sized vector shift of floating point vector");
3699 return true;
3700 }
726a989a 3701
9f509004 3702 return false;
7e98624c
RG
3703 }
3704
646bea10
RG
3705 case PLUS_EXPR:
3706 {
3707 /* We use regular PLUS_EXPR for vectors.
3708 ??? This just makes the checker happy and may not be what is
3709 intended. */
3710 if (TREE_CODE (lhs_type) == VECTOR_TYPE
3711 && POINTER_TYPE_P (TREE_TYPE (lhs_type)))
3712 {
3713 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3714 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
3715 {
3716 error ("invalid non-vector operands to vector valued plus");
3717 return true;
3718 }
3719 lhs_type = TREE_TYPE (lhs_type);
3720 rhs1_type = TREE_TYPE (rhs1_type);
3721 rhs2_type = TREE_TYPE (rhs2_type);
3722 /* PLUS_EXPR is commutative, so we might end up canonicalizing
3723 the pointer to 2nd place. */
3724 if (POINTER_TYPE_P (rhs2_type))
3725 {
3726 tree tem = rhs1_type;
3727 rhs1_type = rhs2_type;
3728 rhs2_type = tem;
3729 }
3730 goto do_pointer_plus_expr_check;
3731 }
3732 }
3733 /* Fallthru. */
3734 case MINUS_EXPR:
3735 {
3736 if (POINTER_TYPE_P (lhs_type)
3737 || POINTER_TYPE_P (rhs1_type)
3738 || POINTER_TYPE_P (rhs2_type))
3739 {
3740 error ("invalid (pointer) operands to plus/minus");
3741 return true;
3742 }
3743
3744 /* Continue with generic binary expression handling. */
3745 break;
3746 }
3747
7e98624c
RG
3748 case POINTER_PLUS_EXPR:
3749 {
646bea10 3750do_pointer_plus_expr_check:
726a989a
RB
3751 if (!POINTER_TYPE_P (rhs1_type)
3752 || !useless_type_conversion_p (lhs_type, rhs1_type)
3753 || !useless_type_conversion_p (sizetype, rhs2_type))
7e98624c
RG
3754 {
3755 error ("type mismatch in pointer plus expression");
726a989a
RB
3756 debug_generic_stmt (lhs_type);
3757 debug_generic_stmt (rhs1_type);
3758 debug_generic_stmt (rhs2_type);
7e98624c
RG
3759 return true;
3760 }
7e98624c 3761
726a989a
RB
3762 return false;
3763 }
7e98624c 3764
7e98624c
RG
3765 case TRUTH_ANDIF_EXPR:
3766 case TRUTH_ORIF_EXPR:
2893f753
RAE
3767 gcc_unreachable ();
3768
7e98624c
RG
3769 case TRUTH_AND_EXPR:
3770 case TRUTH_OR_EXPR:
3771 case TRUTH_XOR_EXPR:
3772 {
7e98624c 3773 /* We allow any kind of integral typed argument and result. */
726a989a
RB
3774 if (!INTEGRAL_TYPE_P (rhs1_type)
3775 || !INTEGRAL_TYPE_P (rhs2_type)
3776 || !INTEGRAL_TYPE_P (lhs_type))
7e98624c
RG
3777 {
3778 error ("type mismatch in binary truth expression");
726a989a
RB
3779 debug_generic_expr (lhs_type);
3780 debug_generic_expr (rhs1_type);
3781 debug_generic_expr (rhs2_type);
7e98624c
RG
3782 return true;
3783 }
3784
3785 return false;
3786 }
3787
9f509004
RG
3788 case LT_EXPR:
3789 case LE_EXPR:
3790 case GT_EXPR:
3791 case GE_EXPR:
3792 case EQ_EXPR:
3793 case NE_EXPR:
3794 case UNORDERED_EXPR:
3795 case ORDERED_EXPR:
3796 case UNLT_EXPR:
3797 case UNLE_EXPR:
3798 case UNGT_EXPR:
3799 case UNGE_EXPR:
3800 case UNEQ_EXPR:
3801 case LTGT_EXPR:
3802 /* Comparisons are also binary, but the result type is not
3803 connected to the operand types. */
3804 return verify_gimple_comparison (lhs_type, rhs1, rhs2);
7e98624c 3805
587aa063
RG
3806 case WIDEN_SUM_EXPR:
3807 case WIDEN_MULT_EXPR:
3808 case VEC_WIDEN_MULT_HI_EXPR:
3809 case VEC_WIDEN_MULT_LO_EXPR:
3810 case VEC_PACK_TRUNC_EXPR:
3811 case VEC_PACK_SAT_EXPR:
3812 case VEC_PACK_FIX_TRUNC_EXPR:
3813 case VEC_EXTRACT_EVEN_EXPR:
3814 case VEC_EXTRACT_ODD_EXPR:
3815 case VEC_INTERLEAVE_HIGH_EXPR:
3816 case VEC_INTERLEAVE_LOW_EXPR:
3817 /* FIXME. */
3818 return false;
3819
9f509004
RG
3820 case MULT_EXPR:
3821 case TRUNC_DIV_EXPR:
3822 case CEIL_DIV_EXPR:
3823 case FLOOR_DIV_EXPR:
3824 case ROUND_DIV_EXPR:
3825 case TRUNC_MOD_EXPR:
3826 case CEIL_MOD_EXPR:
3827 case FLOOR_MOD_EXPR:
3828 case ROUND_MOD_EXPR:
3829 case RDIV_EXPR:
3830 case EXACT_DIV_EXPR:
3831 case MIN_EXPR:
3832 case MAX_EXPR:
3833 case BIT_IOR_EXPR:
3834 case BIT_XOR_EXPR:
3835 case BIT_AND_EXPR:
9f509004
RG
3836 /* Continue with generic binary expression handling. */
3837 break;
7e98624c 3838
9f509004
RG
3839 default:
3840 gcc_unreachable ();
3841 }
b691d4b0 3842
9f509004
RG
3843 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3844 || !useless_type_conversion_p (lhs_type, rhs2_type))
3845 {
3846 error ("type mismatch in binary expression");
3847 debug_generic_stmt (lhs_type);
3848 debug_generic_stmt (rhs1_type);
3849 debug_generic_stmt (rhs2_type);
3850 return true;
3851 }
3852
3853 return false;
3854}
3855
3856/* Verify a gimple assignment statement STMT with a single rhs.
3857 Returns true if anything is wrong. */
3858
3859static bool
3860verify_gimple_assign_single (gimple stmt)
3861{
3862 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3863 tree lhs = gimple_assign_lhs (stmt);
3864 tree lhs_type = TREE_TYPE (lhs);
3865 tree rhs1 = gimple_assign_rhs1 (stmt);
3866 tree rhs1_type = TREE_TYPE (rhs1);
3867 bool res = false;
3868
3869 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3870 {
3871 error ("non-trivial conversion at assignment");
3872 debug_generic_expr (lhs_type);
3873 debug_generic_expr (rhs1_type);
3874 return true;
7e98624c
RG
3875 }
3876
9f509004 3877 if (handled_component_p (lhs))
3a19701a 3878 res |= verify_types_in_gimple_reference (lhs, true);
9f509004
RG
3879
3880 /* Special codes we cannot handle via their class. */
3881 switch (rhs_code)
7e98624c 3882 {
9f509004
RG
3883 case ADDR_EXPR:
3884 {
3885 tree op = TREE_OPERAND (rhs1, 0);
3886 if (!is_gimple_addressable (op))
3887 {
3888 error ("invalid operand in unary expression");
3889 return true;
3890 }
f5e85907 3891
22a65a54
RG
3892 if (!one_pointer_to_useless_type_conversion_p (lhs_type,
3893 TREE_TYPE (op)))
9f509004
RG
3894 {
3895 error ("type mismatch in address expression");
3896 debug_generic_stmt (lhs_type);
3897 debug_generic_stmt (TYPE_POINTER_TO (TREE_TYPE (op)));
3898 return true;
3899 }
3900
3a19701a 3901 return verify_types_in_gimple_reference (op, true);
9f509004
RG
3902 }
3903
3904 /* tcc_reference */
3905 case COMPONENT_REF:
3906 case BIT_FIELD_REF:
3907 case INDIRECT_REF:
3908 case ALIGN_INDIRECT_REF:
3909 case MISALIGNED_INDIRECT_REF:
3910 case ARRAY_REF:
3911 case ARRAY_RANGE_REF:
3912 case VIEW_CONVERT_EXPR:
3913 case REALPART_EXPR:
3914 case IMAGPART_EXPR:
3915 case TARGET_MEM_REF:
3916 if (!is_gimple_reg (lhs)
3917 && is_gimple_reg_type (TREE_TYPE (lhs)))
f5e85907 3918 {
9f509004
RG
3919 error ("invalid rhs for gimple memory store");
3920 debug_generic_stmt (lhs);
3921 debug_generic_stmt (rhs1);
726a989a
RB
3922 return true;
3923 }
3a19701a 3924 return res || verify_types_in_gimple_reference (rhs1, false);
7e98624c 3925
9f509004
RG
3926 /* tcc_constant */
3927 case SSA_NAME:
3928 case INTEGER_CST:
3929 case REAL_CST:
3930 case FIXED_CST:
3931 case COMPLEX_CST:
3932 case VECTOR_CST:
3933 case STRING_CST:
3934 return res;
3935
3936 /* tcc_declaration */
3937 case CONST_DECL:
3938 return res;
3939 case VAR_DECL:
3940 case PARM_DECL:
3941 if (!is_gimple_reg (lhs)
3942 && !is_gimple_reg (rhs1)
3943 && is_gimple_reg_type (TREE_TYPE (lhs)))
2f9864e6 3944 {
9f509004
RG
3945 error ("invalid rhs for gimple memory store");
3946 debug_generic_stmt (lhs);
3947 debug_generic_stmt (rhs1);
2f9864e6
RG
3948 return true;
3949 }
9f509004 3950 return res;
7e98624c 3951
9f509004
RG
3952 case COND_EXPR:
3953 case CONSTRUCTOR:
3954 case OBJ_TYPE_REF:
3955 case ASSERT_EXPR:
3956 case WITH_SIZE_EXPR:
3957 case EXC_PTR_EXPR:
3958 case FILTER_EXPR:
3959 case POLYNOMIAL_CHREC:
3960 case DOT_PROD_EXPR:
3961 case VEC_COND_EXPR:
3962 case REALIGN_LOAD_EXPR:
3963 /* FIXME. */
3964 return res;
7e98624c 3965
726a989a 3966 default:;
7e98624c
RG
3967 }
3968
9f509004 3969 return res;
7e98624c
RG
3970}
3971
9f509004
RG
3972/* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
3973 is a problem, otherwise false. */
3974
3975static bool
3976verify_gimple_assign (gimple stmt)
3977{
3978 switch (gimple_assign_rhs_class (stmt))
3979 {
3980 case GIMPLE_SINGLE_RHS:
3981 return verify_gimple_assign_single (stmt);
3982
3983 case GIMPLE_UNARY_RHS:
3984 return verify_gimple_assign_unary (stmt);
3985
3986 case GIMPLE_BINARY_RHS:
3987 return verify_gimple_assign_binary (stmt);
3988
3989 default:
3990 gcc_unreachable ();
3991 }
3992}
726a989a
RB
3993
3994/* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
3995 is a problem, otherwise false. */
7e98624c
RG
3996
3997static bool
b59d3976 3998verify_gimple_return (gimple stmt)
7e98624c 3999{
726a989a 4000 tree op = gimple_return_retval (stmt);
b59d3976 4001 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
726a989a 4002
b59d3976
RG
4003 /* We cannot test for present return values as we do not fix up missing
4004 return values from the original source. */
726a989a
RB
4005 if (op == NULL)
4006 return false;
b59d3976
RG
4007
4008 if (!is_gimple_val (op)
4009 && TREE_CODE (op) != RESULT_DECL)
4010 {
4011 error ("invalid operand in return statement");
4012 debug_generic_stmt (op);
4013 return true;
4014 }
4015
4016 if (!useless_type_conversion_p (restype, TREE_TYPE (op))
4017 /* ??? With C++ we can have the situation that the result
4018 decl is a reference type while the return type is an aggregate. */
4019 && !(TREE_CODE (op) == RESULT_DECL
4020 && TREE_CODE (TREE_TYPE (op)) == REFERENCE_TYPE
4021 && useless_type_conversion_p (restype, TREE_TYPE (TREE_TYPE (op)))))
4022 {
4023 error ("invalid conversion in return statement");
4024 debug_generic_stmt (restype);
4025 debug_generic_stmt (TREE_TYPE (op));
4026 return true;
4027 }
4028
4029 return false;
726a989a 4030}
7e98624c 4031
7e98624c 4032
b59d3976
RG
4033/* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4034 is a problem, otherwise false. */
4035
4036static bool
4037verify_gimple_goto (gimple stmt)
4038{
4039 tree dest = gimple_goto_dest (stmt);
4040
4041 /* ??? We have two canonical forms of direct goto destinations, a
4042 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4043 if (TREE_CODE (dest) != LABEL_DECL
4044 && (!is_gimple_val (dest)
4045 || !POINTER_TYPE_P (TREE_TYPE (dest))))
4046 {
4047 error ("goto destination is neither a label nor a pointer");
4048 return true;
4049 }
4050
4051 return false;
4052}
4053
726a989a
RB
4054/* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4055 is a problem, otherwise false. */
4056
4057static bool
b59d3976 4058verify_gimple_switch (gimple stmt)
726a989a
RB
4059{
4060 if (!is_gimple_val (gimple_switch_index (stmt)))
7e98624c 4061 {
726a989a 4062 error ("invalid operand to switch statement");
b59d3976 4063 debug_generic_stmt (gimple_switch_index (stmt));
7e98624c
RG
4064 return true;
4065 }
4066
726a989a
RB
4067 return false;
4068}
7e98624c 4069
7e98624c 4070
726a989a
RB
4071/* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4072 and false otherwise. */
7e98624c 4073
726a989a 4074static bool
b59d3976 4075verify_gimple_phi (gimple stmt)
726a989a 4076{
b59d3976
RG
4077 tree type = TREE_TYPE (gimple_phi_result (stmt));
4078 unsigned i;
7e98624c 4079
b59d3976
RG
4080 if (!is_gimple_variable (gimple_phi_result (stmt)))
4081 {
4082 error ("Invalid PHI result");
4083 return true;
4084 }
7e98624c 4085
726a989a 4086 for (i = 0; i < gimple_phi_num_args (stmt); i++)
b59d3976
RG
4087 {
4088 tree arg = gimple_phi_arg_def (stmt, i);
9f509004
RG
4089 if ((is_gimple_reg (gimple_phi_result (stmt))
4090 && !is_gimple_val (arg))
4091 || (!is_gimple_reg (gimple_phi_result (stmt))
4092 && !is_gimple_addressable (arg)))
b59d3976
RG
4093 {
4094 error ("Invalid PHI argument");
4095 debug_generic_stmt (arg);
4096 return true;
4097 }
4098 if (!useless_type_conversion_p (type, TREE_TYPE (arg)))
4099 {
587aa063 4100 error ("Incompatible types in PHI argument %u", i);
b59d3976
RG
4101 debug_generic_stmt (type);
4102 debug_generic_stmt (TREE_TYPE (arg));
4103 return true;
4104 }
4105 }
726a989a 4106
7e98624c
RG
4107 return false;
4108}
4109
726a989a 4110
7e98624c
RG
4111/* Verify the GIMPLE statement STMT. Returns true if there is an
4112 error, otherwise false. */
4113
4114static bool
726a989a 4115verify_types_in_gimple_stmt (gimple stmt)
7e98624c 4116{
726a989a 4117 if (is_gimple_omp (stmt))
7e98624c
RG
4118 {
4119 /* OpenMP directives are validated by the FE and never operated
726a989a 4120 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
7e98624c
RG
4121 non-gimple expressions when the main index variable has had
4122 its address taken. This does not affect the loop itself
726a989a 4123 because the header of an GIMPLE_OMP_FOR is merely used to determine
7e98624c
RG
4124 how to setup the parallel iteration. */
4125 return false;
4126 }
4127
726a989a 4128 switch (gimple_code (stmt))
7e98624c 4129 {
726a989a 4130 case GIMPLE_ASSIGN:
9f509004 4131 return verify_gimple_assign (stmt);
7e98624c 4132
726a989a
RB
4133 case GIMPLE_LABEL:
4134 return TREE_CODE (gimple_label_label (stmt)) != LABEL_DECL;
7e98624c 4135
726a989a 4136 case GIMPLE_CALL:
b59d3976 4137 return verify_gimple_call (stmt);
7e98624c 4138
726a989a 4139 case GIMPLE_COND:
b59d3976
RG
4140 return verify_gimple_comparison (boolean_type_node,
4141 gimple_cond_lhs (stmt),
4142 gimple_cond_rhs (stmt));
7e98624c 4143
726a989a 4144 case GIMPLE_GOTO:
b59d3976 4145 return verify_gimple_goto (stmt);
7e98624c 4146
726a989a 4147 case GIMPLE_SWITCH:
b59d3976 4148 return verify_gimple_switch (stmt);
7e98624c 4149
726a989a 4150 case GIMPLE_RETURN:
b59d3976 4151 return verify_gimple_return (stmt);
7e98624c 4152
726a989a 4153 case GIMPLE_ASM:
7e98624c
RG
4154 return false;
4155
726a989a 4156 case GIMPLE_PHI:
b59d3976
RG
4157 return verify_gimple_phi (stmt);
4158
4159 /* Tuples that do not have tree operands. */
4160 case GIMPLE_NOP:
4161 case GIMPLE_RESX:
4162 case GIMPLE_PREDICT:
4163 return false;
726a989a 4164
7e98624c
RG
4165 default:
4166 gcc_unreachable ();
4167 }
4168}
4169
726a989a 4170/* Verify the GIMPLE statements inside the sequence STMTS. */
7e98624c 4171
7dc83ebc 4172static bool
726a989a 4173verify_types_in_gimple_seq_2 (gimple_seq stmts)
7e98624c 4174{
726a989a 4175 gimple_stmt_iterator ittr;
7dc83ebc 4176 bool err = false;
7e98624c 4177
726a989a 4178 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
7e98624c 4179 {
726a989a 4180 gimple stmt = gsi_stmt (ittr);
7e98624c 4181
726a989a
RB
4182 switch (gimple_code (stmt))
4183 {
b59d3976
RG
4184 case GIMPLE_BIND:
4185 err |= verify_types_in_gimple_seq_2 (gimple_bind_body (stmt));
4186 break;
4187
4188 case GIMPLE_TRY:
4189 err |= verify_types_in_gimple_seq_2 (gimple_try_eval (stmt));
4190 err |= verify_types_in_gimple_seq_2 (gimple_try_cleanup (stmt));
4191 break;
4192
4193 case GIMPLE_EH_FILTER:
4194 err |= verify_types_in_gimple_seq_2 (gimple_eh_filter_failure (stmt));
4195 break;
4196
4197 case GIMPLE_CATCH:
4198 err |= verify_types_in_gimple_seq_2 (gimple_catch_handler (stmt));
4199 break;
7e98624c
RG
4200
4201 default:
7dc83ebc 4202 {
726a989a 4203 bool err2 = verify_types_in_gimple_stmt (stmt);
7dc83ebc 4204 if (err2)
726a989a 4205 debug_gimple_stmt (stmt);
7dc83ebc
RG
4206 err |= err2;
4207 }
7e98624c
RG
4208 }
4209 }
7dc83ebc
RG
4210
4211 return err;
4212}
4213
4214
4215/* Verify the GIMPLE statements inside the statement list STMTS. */
4216
4217void
726a989a 4218verify_types_in_gimple_seq (gimple_seq stmts)
7dc83ebc 4219{
726a989a 4220 if (verify_types_in_gimple_seq_2 (stmts))
7dc83ebc 4221 internal_error ("verify_gimple failed");
7e98624c
RG
4222}
4223
6de9cd9a
DN
4224
4225/* Verify STMT, return true if STMT is not in GIMPLE form.
4226 TODO: Implement type checking. */
4227
4228static bool
726a989a 4229verify_stmt (gimple_stmt_iterator *gsi)
6de9cd9a
DN
4230{
4231 tree addr;
726a989a
RB
4232 struct walk_stmt_info wi;
4233 bool last_in_block = gsi_one_before_end_p (*gsi);
4234 gimple stmt = gsi_stmt (*gsi);
6de9cd9a 4235
726a989a 4236 if (is_gimple_omp (stmt))
50674e96
DN
4237 {
4238 /* OpenMP directives are validated by the FE and never operated
726a989a 4239 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
50674e96
DN
4240 non-gimple expressions when the main index variable has had
4241 its address taken. This does not affect the loop itself
726a989a 4242 because the header of an GIMPLE_OMP_FOR is merely used to determine
50674e96
DN
4243 how to setup the parallel iteration. */
4244 return false;
4245 }
4246
726a989a
RB
4247 /* FIXME. The C frontend passes unpromoted arguments in case it
4248 didn't see a function declaration before the call. */
4249 if (is_gimple_call (stmt))
6de9cd9a 4250 {
7c9577be 4251 tree decl;
726a989a 4252
7c9577be
RG
4253 if (!is_gimple_call_addr (gimple_call_fn (stmt)))
4254 {
4255 error ("invalid function in call statement");
4256 return true;
4257 }
4258
4259 decl = gimple_call_fndecl (stmt);
4260 if (decl
4261 && TREE_CODE (decl) == FUNCTION_DECL
726a989a
RB
4262 && DECL_LOOPING_CONST_OR_PURE_P (decl)
4263 && (!DECL_PURE_P (decl))
4264 && (!TREE_READONLY (decl)))
4265 {
4266 error ("invalid pure const state for function");
4267 return true;
4268 }
6de9cd9a
DN
4269 }
4270
726a989a
RB
4271 memset (&wi, 0, sizeof (wi));
4272 addr = walk_gimple_op (gsi_stmt (*gsi), verify_expr, &wi);
6de9cd9a
DN
4273 if (addr)
4274 {
726a989a 4275 debug_generic_expr (addr);
c2255bc4 4276 inform (gimple_location (gsi_stmt (*gsi)), "in statement");
726a989a 4277 debug_gimple_stmt (stmt);
6de9cd9a
DN
4278 return true;
4279 }
4280
1eaba2f2
RH
4281 /* If the statement is marked as part of an EH region, then it is
4282 expected that the statement could throw. Verify that when we
4283 have optimizations that simplify statements such that we prove
4284 that they cannot throw, that we update other data structures
4285 to match. */
4286 if (lookup_stmt_eh_region (stmt) >= 0)
4287 {
2505c5ed
JH
4288 /* During IPA passes, ipa-pure-const sets nothrow flags on calls
4289 and they are updated on statements only after fixup_cfg
4290 is executed at beggining of expansion stage. */
4291 if (!stmt_could_throw_p (stmt) && cgraph_state != CGRAPH_STATE_IPA_SSA)
1eaba2f2 4292 {
ab532386 4293 error ("statement marked for throw, but doesn%'t");
1eaba2f2
RH
4294 goto fail;
4295 }
726a989a 4296 if (!last_in_block && stmt_can_throw_internal (stmt))
1eaba2f2 4297 {
ab532386 4298 error ("statement marked for throw in middle of block");
1eaba2f2
RH
4299 goto fail;
4300 }
4301 }
4302
6de9cd9a 4303 return false;
1eaba2f2
RH
4304
4305 fail:
726a989a 4306 debug_gimple_stmt (stmt);
1eaba2f2 4307 return true;
6de9cd9a
DN
4308}
4309
4310
4311/* Return true when the T can be shared. */
4312
4313static bool
4314tree_node_can_be_shared (tree t)
4315{
6615c446 4316 if (IS_TYPE_OR_DECL_P (t)
6de9cd9a 4317 || is_gimple_min_invariant (t)
5e23162d 4318 || TREE_CODE (t) == SSA_NAME
953ff289
DN
4319 || t == error_mark_node
4320 || TREE_CODE (t) == IDENTIFIER_NODE)
6de9cd9a
DN
4321 return true;
4322
92b6dff3
JL
4323 if (TREE_CODE (t) == CASE_LABEL_EXPR)
4324 return true;
4325
44de5aeb 4326 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
953ff289
DN
4327 && is_gimple_min_invariant (TREE_OPERAND (t, 1)))
4328 || TREE_CODE (t) == COMPONENT_REF
4329 || TREE_CODE (t) == REALPART_EXPR
4330 || TREE_CODE (t) == IMAGPART_EXPR)
6de9cd9a
DN
4331 t = TREE_OPERAND (t, 0);
4332
4333 if (DECL_P (t))
4334 return true;
4335
4336 return false;
4337}
4338
4339
726a989a 4340/* Called via walk_gimple_stmt. Verify tree sharing. */
6de9cd9a
DN
4341
4342static tree
726a989a 4343verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
6de9cd9a 4344{
726a989a
RB
4345 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4346 struct pointer_set_t *visited = (struct pointer_set_t *) wi->info;
6de9cd9a
DN
4347
4348 if (tree_node_can_be_shared (*tp))
4349 {
4350 *walk_subtrees = false;
4351 return NULL;
4352 }
4353
4437b50d
JH
4354 if (pointer_set_insert (visited, *tp))
4355 return *tp;
6de9cd9a
DN
4356
4357 return NULL;
4358}
4359
4360
4437b50d
JH
4361static bool eh_error_found;
4362static int
4363verify_eh_throw_stmt_node (void **slot, void *data)
4364{
4365 struct throw_stmt_node *node = (struct throw_stmt_node *)*slot;
4366 struct pointer_set_t *visited = (struct pointer_set_t *) data;
4367
4368 if (!pointer_set_contains (visited, node->stmt))
4369 {
4370 error ("Dead STMT in EH table");
726a989a 4371 debug_gimple_stmt (node->stmt);
4437b50d
JH
4372 eh_error_found = true;
4373 }
c13edb67 4374 return 1;
4437b50d
JH
4375}
4376
726a989a
RB
4377
4378/* Verify the GIMPLE statements in every basic block. */
6de9cd9a
DN
4379
4380void
4381verify_stmts (void)
4382{
4383 basic_block bb;
726a989a 4384 gimple_stmt_iterator gsi;
6de9cd9a 4385 bool err = false;
4437b50d 4386 struct pointer_set_t *visited, *visited_stmts;
6de9cd9a 4387 tree addr;
726a989a 4388 struct walk_stmt_info wi;
6de9cd9a
DN
4389
4390 timevar_push (TV_TREE_STMT_VERIFY);
4437b50d
JH
4391 visited = pointer_set_create ();
4392 visited_stmts = pointer_set_create ();
6de9cd9a 4393
726a989a
RB
4394 memset (&wi, 0, sizeof (wi));
4395 wi.info = (void *) visited;
4396
6de9cd9a
DN
4397 FOR_EACH_BB (bb)
4398 {
726a989a
RB
4399 gimple phi;
4400 size_t i;
6de9cd9a 4401
726a989a 4402 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6de9cd9a 4403 {
726a989a 4404 phi = gsi_stmt (gsi);
4437b50d 4405 pointer_set_insert (visited_stmts, phi);
726a989a 4406 if (gimple_bb (phi) != bb)
8de1fc1b 4407 {
726a989a 4408 error ("gimple_bb (phi) is set to a wrong basic block");
8de1fc1b
KH
4409 err |= true;
4410 }
4411
726a989a 4412 for (i = 0; i < gimple_phi_num_args (phi); i++)
6de9cd9a 4413 {
726a989a 4414 tree t = gimple_phi_arg_def (phi, i);
6de9cd9a
DN
4415 tree addr;
4416
e9705dc5
AO
4417 if (!t)
4418 {
4419 error ("missing PHI def");
726a989a 4420 debug_gimple_stmt (phi);
e9705dc5
AO
4421 err |= true;
4422 continue;
4423 }
6de9cd9a
DN
4424 /* Addressable variables do have SSA_NAMEs but they
4425 are not considered gimple values. */
e9705dc5
AO
4426 else if (TREE_CODE (t) != SSA_NAME
4427 && TREE_CODE (t) != FUNCTION_DECL
220f1c29 4428 && !is_gimple_min_invariant (t))
6de9cd9a 4429 {
726a989a
RB
4430 error ("PHI argument is not a GIMPLE value");
4431 debug_gimple_stmt (phi);
4432 debug_generic_expr (t);
6de9cd9a
DN
4433 err |= true;
4434 }
4435
4437b50d 4436 addr = walk_tree (&t, verify_node_sharing, visited, NULL);
6de9cd9a
DN
4437 if (addr)
4438 {
ab532386 4439 error ("incorrect sharing of tree nodes");
726a989a
RB
4440 debug_gimple_stmt (phi);
4441 debug_generic_expr (addr);
6de9cd9a
DN
4442 err |= true;
4443 }
4444 }
211ca15c
RG
4445
4446#ifdef ENABLE_TYPES_CHECKING
4447 if (verify_gimple_phi (phi))
4448 {
4449 debug_gimple_stmt (phi);
4450 err |= true;
4451 }
4452#endif
6de9cd9a
DN
4453 }
4454
726a989a 4455 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
6de9cd9a 4456 {
726a989a
RB
4457 gimple stmt = gsi_stmt (gsi);
4458
4459 if (gimple_code (stmt) == GIMPLE_WITH_CLEANUP_EXPR
4460 || gimple_code (stmt) == GIMPLE_BIND)
4461 {
4462 error ("invalid GIMPLE statement");
4463 debug_gimple_stmt (stmt);
4464 err |= true;
4465 }
8de1fc1b 4466
4437b50d 4467 pointer_set_insert (visited_stmts, stmt);
07beea0d 4468
726a989a 4469 if (gimple_bb (stmt) != bb)
8de1fc1b 4470 {
726a989a 4471 error ("gimple_bb (stmt) is set to a wrong basic block");
2cd713a0 4472 debug_gimple_stmt (stmt);
8de1fc1b
KH
4473 err |= true;
4474 }
4475
726a989a
RB
4476 if (gimple_code (stmt) == GIMPLE_LABEL)
4477 {
4478 tree decl = gimple_label_label (stmt);
4479 int uid = LABEL_DECL_UID (decl);
4480
4481 if (uid == -1
4482 || VEC_index (basic_block, label_to_block_map, uid) != bb)
4483 {
4484 error ("incorrect entry in label_to_block_map.\n");
4485 err |= true;
4486 }
4487 }
4488
4489 err |= verify_stmt (&gsi);
211ca15c
RG
4490
4491#ifdef ENABLE_TYPES_CHECKING
4492 if (verify_types_in_gimple_stmt (gsi_stmt (gsi)))
4493 {
4494 debug_gimple_stmt (stmt);
4495 err |= true;
4496 }
4497#endif
726a989a 4498 addr = walk_gimple_op (gsi_stmt (gsi), verify_node_sharing, &wi);
6de9cd9a
DN
4499 if (addr)
4500 {
ab532386 4501 error ("incorrect sharing of tree nodes");
726a989a
RB
4502 debug_gimple_stmt (stmt);
4503 debug_generic_expr (addr);
6de9cd9a
DN
4504 err |= true;
4505 }
726a989a 4506 gsi_next (&gsi);
6de9cd9a
DN
4507 }
4508 }
726a989a 4509
4437b50d
JH
4510 eh_error_found = false;
4511 if (get_eh_throw_stmt_table (cfun))
4512 htab_traverse (get_eh_throw_stmt_table (cfun),
4513 verify_eh_throw_stmt_node,
4514 visited_stmts);
6de9cd9a 4515
4437b50d 4516 if (err | eh_error_found)
ab532386 4517 internal_error ("verify_stmts failed");
6de9cd9a 4518
4437b50d
JH
4519 pointer_set_destroy (visited);
4520 pointer_set_destroy (visited_stmts);
6946b3f7 4521 verify_histograms ();
6de9cd9a
DN
4522 timevar_pop (TV_TREE_STMT_VERIFY);
4523}
4524
4525
4526/* Verifies that the flow information is OK. */
4527
4528static int
726a989a 4529gimple_verify_flow_info (void)
6de9cd9a
DN
4530{
4531 int err = 0;
4532 basic_block bb;
726a989a
RB
4533 gimple_stmt_iterator gsi;
4534 gimple stmt;
6de9cd9a 4535 edge e;
628f6a4e 4536 edge_iterator ei;
6de9cd9a 4537
726a989a 4538 if (ENTRY_BLOCK_PTR->il.gimple)
6de9cd9a 4539 {
7506e1cb 4540 error ("ENTRY_BLOCK has IL associated with it");
6de9cd9a
DN
4541 err = 1;
4542 }
4543
726a989a 4544 if (EXIT_BLOCK_PTR->il.gimple)
6de9cd9a 4545 {
7506e1cb 4546 error ("EXIT_BLOCK has IL associated with it");
6de9cd9a
DN
4547 err = 1;
4548 }
4549
628f6a4e 4550 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
6de9cd9a
DN
4551 if (e->flags & EDGE_FALLTHRU)
4552 {
ab532386 4553 error ("fallthru to exit from bb %d", e->src->index);
6de9cd9a
DN
4554 err = 1;
4555 }
4556
4557 FOR_EACH_BB (bb)
4558 {
4559 bool found_ctrl_stmt = false;
4560
726a989a 4561 stmt = NULL;
548414c6 4562
6de9cd9a 4563 /* Skip labels on the start of basic block. */
726a989a 4564 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6de9cd9a 4565 {
726a989a
RB
4566 tree label;
4567 gimple prev_stmt = stmt;
548414c6 4568
726a989a 4569 stmt = gsi_stmt (gsi);
548414c6 4570
726a989a 4571 if (gimple_code (stmt) != GIMPLE_LABEL)
6de9cd9a
DN
4572 break;
4573
726a989a
RB
4574 label = gimple_label_label (stmt);
4575 if (prev_stmt && DECL_NONLOCAL (label))
548414c6 4576 {
953ff289 4577 error ("nonlocal label ");
726a989a 4578 print_generic_expr (stderr, label, 0);
953ff289
DN
4579 fprintf (stderr, " is not first in a sequence of labels in bb %d",
4580 bb->index);
548414c6
KH
4581 err = 1;
4582 }
4583
726a989a 4584 if (label_to_block (label) != bb)
6de9cd9a 4585 {
953ff289 4586 error ("label ");
726a989a 4587 print_generic_expr (stderr, label, 0);
953ff289
DN
4588 fprintf (stderr, " to block does not match in bb %d",
4589 bb->index);
6de9cd9a
DN
4590 err = 1;
4591 }
4592
726a989a 4593 if (decl_function_context (label) != current_function_decl)
6de9cd9a 4594 {
953ff289 4595 error ("label ");
726a989a 4596 print_generic_expr (stderr, label, 0);
953ff289
DN
4597 fprintf (stderr, " has incorrect context in bb %d",
4598 bb->index);
6de9cd9a
DN
4599 err = 1;
4600 }
4601 }
4602
4603 /* Verify that body of basic block BB is free of control flow. */
726a989a 4604 for (; !gsi_end_p (gsi); gsi_next (&gsi))
6de9cd9a 4605 {
726a989a 4606 gimple stmt = gsi_stmt (gsi);
6de9cd9a
DN
4607
4608 if (found_ctrl_stmt)
4609 {
ab532386 4610 error ("control flow in the middle of basic block %d",
6de9cd9a
DN
4611 bb->index);
4612 err = 1;
4613 }
4614
4615 if (stmt_ends_bb_p (stmt))
4616 found_ctrl_stmt = true;
4617
726a989a 4618 if (gimple_code (stmt) == GIMPLE_LABEL)
6de9cd9a 4619 {
953ff289 4620 error ("label ");
726a989a 4621 print_generic_expr (stderr, gimple_label_label (stmt), 0);
953ff289 4622 fprintf (stderr, " in the middle of basic block %d", bb->index);
6de9cd9a
DN
4623 err = 1;
4624 }
4625 }
953ff289 4626
726a989a
RB
4627 gsi = gsi_last_bb (bb);
4628 if (gsi_end_p (gsi))
6de9cd9a
DN
4629 continue;
4630
726a989a 4631 stmt = gsi_stmt (gsi);
6de9cd9a 4632
cc7220fd
JH
4633 err |= verify_eh_edges (stmt);
4634
6de9cd9a
DN
4635 if (is_ctrl_stmt (stmt))
4636 {
628f6a4e 4637 FOR_EACH_EDGE (e, ei, bb->succs)
6de9cd9a
DN
4638 if (e->flags & EDGE_FALLTHRU)
4639 {
ab532386 4640 error ("fallthru edge after a control statement in bb %d",
6de9cd9a
DN
4641 bb->index);
4642 err = 1;
4643 }
4644 }
4645
726a989a 4646 if (gimple_code (stmt) != GIMPLE_COND)
36b24193
ZD
4647 {
4648 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
4649 after anything else but if statement. */
4650 FOR_EACH_EDGE (e, ei, bb->succs)
4651 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
4652 {
726a989a 4653 error ("true/false edge after a non-GIMPLE_COND in bb %d",
36b24193
ZD
4654 bb->index);
4655 err = 1;
4656 }
4657 }
4658
726a989a 4659 switch (gimple_code (stmt))
6de9cd9a 4660 {
726a989a 4661 case GIMPLE_COND:
6de9cd9a
DN
4662 {
4663 edge true_edge;
4664 edge false_edge;
a9b77cd1 4665
6de9cd9a
DN
4666 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
4667
726a989a
RB
4668 if (!true_edge
4669 || !false_edge
6de9cd9a
DN
4670 || !(true_edge->flags & EDGE_TRUE_VALUE)
4671 || !(false_edge->flags & EDGE_FALSE_VALUE)
4672 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
4673 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
628f6a4e 4674 || EDGE_COUNT (bb->succs) >= 3)
6de9cd9a 4675 {
ab532386 4676 error ("wrong outgoing edge flags at end of bb %d",
6de9cd9a
DN
4677 bb->index);
4678 err = 1;
4679 }
6de9cd9a
DN
4680 }
4681 break;
4682
726a989a 4683 case GIMPLE_GOTO:
6de9cd9a
DN
4684 if (simple_goto_p (stmt))
4685 {
ab532386 4686 error ("explicit goto at end of bb %d", bb->index);
6531d1be 4687 err = 1;
6de9cd9a
DN
4688 }
4689 else
4690 {
6531d1be 4691 /* FIXME. We should double check that the labels in the
6de9cd9a 4692 destination blocks have their address taken. */
628f6a4e 4693 FOR_EACH_EDGE (e, ei, bb->succs)
6de9cd9a
DN
4694 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
4695 | EDGE_FALSE_VALUE))
4696 || !(e->flags & EDGE_ABNORMAL))
4697 {
ab532386 4698 error ("wrong outgoing edge flags at end of bb %d",
6de9cd9a
DN
4699 bb->index);
4700 err = 1;
4701 }
4702 }
4703 break;
4704
726a989a 4705 case GIMPLE_RETURN:
c5cbcccf
ZD
4706 if (!single_succ_p (bb)
4707 || (single_succ_edge (bb)->flags
4708 & (EDGE_FALLTHRU | EDGE_ABNORMAL
4709 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
6de9cd9a 4710 {
ab532386 4711 error ("wrong outgoing edge flags at end of bb %d", bb->index);
6de9cd9a
DN
4712 err = 1;
4713 }
c5cbcccf 4714 if (single_succ (bb) != EXIT_BLOCK_PTR)
6de9cd9a 4715 {
ab532386 4716 error ("return edge does not point to exit in bb %d",
6de9cd9a
DN
4717 bb->index);
4718 err = 1;
4719 }
4720 break;
4721
726a989a 4722 case GIMPLE_SWITCH:
6de9cd9a 4723 {
7853504d 4724 tree prev;
6de9cd9a
DN
4725 edge e;
4726 size_t i, n;
6de9cd9a 4727
726a989a 4728 n = gimple_switch_num_labels (stmt);
6de9cd9a
DN
4729
4730 /* Mark all the destination basic blocks. */
4731 for (i = 0; i < n; ++i)
4732 {
726a989a 4733 tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
6de9cd9a 4734 basic_block label_bb = label_to_block (lab);
1e128c5f 4735 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
6de9cd9a
DN
4736 label_bb->aux = (void *)1;
4737 }
4738
7853504d 4739 /* Verify that the case labels are sorted. */
726a989a 4740 prev = gimple_switch_label (stmt, 0);
b7814a18 4741 for (i = 1; i < n; ++i)
7853504d 4742 {
726a989a
RB
4743 tree c = gimple_switch_label (stmt, i);
4744 if (!CASE_LOW (c))
7853504d 4745 {
726a989a
RB
4746 error ("found default case not at the start of "
4747 "case vector");
4748 err = 1;
7853504d
SB
4749 continue;
4750 }
726a989a
RB
4751 if (CASE_LOW (prev)
4752 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
7853504d 4753 {
953ff289 4754 error ("case labels not sorted: ");
7853504d
SB
4755 print_generic_expr (stderr, prev, 0);
4756 fprintf (stderr," is greater than ");
4757 print_generic_expr (stderr, c, 0);
4758 fprintf (stderr," but comes before it.\n");
4759 err = 1;
4760 }
4761 prev = c;
4762 }
b7814a18
RG
4763 /* VRP will remove the default case if it can prove it will
4764 never be executed. So do not verify there always exists
4765 a default case here. */
7853504d 4766
628f6a4e 4767 FOR_EACH_EDGE (e, ei, bb->succs)
6de9cd9a
DN
4768 {
4769 if (!e->dest->aux)
4770 {
ab532386 4771 error ("extra outgoing edge %d->%d",
6de9cd9a
DN
4772 bb->index, e->dest->index);
4773 err = 1;
4774 }
726a989a 4775
6de9cd9a
DN
4776 e->dest->aux = (void *)2;
4777 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
4778 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
4779 {
ab532386 4780 error ("wrong outgoing edge flags at end of bb %d",
6de9cd9a
DN
4781 bb->index);
4782 err = 1;
4783 }
4784 }
4785
4786 /* Check that we have all of them. */
4787 for (i = 0; i < n; ++i)
4788 {
726a989a 4789 tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
6de9cd9a
DN
4790 basic_block label_bb = label_to_block (lab);
4791
4792 if (label_bb->aux != (void *)2)
4793 {
726a989a 4794 error ("missing edge %i->%i", bb->index, label_bb->index);
6de9cd9a
DN
4795 err = 1;
4796 }
4797 }
4798
628f6a4e 4799 FOR_EACH_EDGE (e, ei, bb->succs)
6de9cd9a
DN
4800 e->dest->aux = (void *)0;
4801 }
4802
4803 default: ;
4804 }
4805 }
4806
2b28c07a 4807 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
6de9cd9a
DN
4808 verify_dominators (CDI_DOMINATORS);
4809
4810 return err;
4811}
4812
4813
f0b698c1 4814/* Updates phi nodes after creating a forwarder block joined
6de9cd9a
DN
4815 by edge FALLTHRU. */
4816
4817static void
726a989a 4818gimple_make_forwarder_block (edge fallthru)
6de9cd9a
DN
4819{
4820 edge e;
628f6a4e 4821 edge_iterator ei;
6de9cd9a 4822 basic_block dummy, bb;
726a989a
RB
4823 tree var;
4824 gimple_stmt_iterator gsi;
6de9cd9a
DN
4825
4826 dummy = fallthru->src;
4827 bb = fallthru->dest;
4828
c5cbcccf 4829 if (single_pred_p (bb))
6de9cd9a
DN
4830 return;
4831
cfaab3a9 4832 /* If we redirected a branch we must create new PHI nodes at the
6de9cd9a 4833 start of BB. */
726a989a 4834 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
6de9cd9a 4835 {
726a989a
RB
4836 gimple phi, new_phi;
4837
4838 phi = gsi_stmt (gsi);
4839 var = gimple_phi_result (phi);
6de9cd9a
DN
4840 new_phi = create_phi_node (var, bb);
4841 SSA_NAME_DEF_STMT (var) = new_phi;
726a989a
RB
4842 gimple_phi_set_result (phi, make_ssa_name (SSA_NAME_VAR (var), phi));
4843 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru);
6de9cd9a
DN
4844 }
4845
6de9cd9a 4846 /* Add the arguments we have stored on edges. */
628f6a4e 4847 FOR_EACH_EDGE (e, ei, bb->preds)
6de9cd9a
DN
4848 {
4849 if (e == fallthru)
4850 continue;
4851
71882046 4852 flush_pending_stmts (e);
6de9cd9a
DN
4853 }
4854}
4855
4856
6de9cd9a
DN
4857/* Return a non-special label in the head of basic block BLOCK.
4858 Create one if it doesn't exist. */
4859
d7621d3c 4860tree
726a989a 4861gimple_block_label (basic_block bb)
6de9cd9a 4862{
726a989a 4863 gimple_stmt_iterator i, s = gsi_start_bb (bb);
6de9cd9a 4864 bool first = true;
726a989a
RB
4865 tree label;
4866 gimple stmt;
6de9cd9a 4867
726a989a 4868 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
6de9cd9a 4869 {
726a989a
RB
4870 stmt = gsi_stmt (i);
4871 if (gimple_code (stmt) != GIMPLE_LABEL)
6de9cd9a 4872 break;
726a989a 4873 label = gimple_label_label (stmt);
6de9cd9a
DN
4874 if (!DECL_NONLOCAL (label))
4875 {
4876 if (!first)
726a989a 4877 gsi_move_before (&i, &s);
6de9cd9a
DN
4878 return label;
4879 }
4880 }
4881
c2255bc4 4882 label = create_artificial_label (UNKNOWN_LOCATION);
726a989a
RB
4883 stmt = gimple_build_label (label);
4884 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
6de9cd9a
DN
4885 return label;
4886}
4887
4888
4889/* Attempt to perform edge redirection by replacing a possibly complex
4890 jump instruction by a goto or by removing the jump completely.
4891 This can apply only if all edges now point to the same block. The
4892 parameters and return values are equivalent to
4893 redirect_edge_and_branch. */
4894
4895static edge
726a989a 4896gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
6de9cd9a
DN
4897{
4898 basic_block src = e->src;
726a989a
RB
4899 gimple_stmt_iterator i;
4900 gimple stmt;
6de9cd9a 4901
07b43a87
KH
4902 /* We can replace or remove a complex jump only when we have exactly
4903 two edges. */
4904 if (EDGE_COUNT (src->succs) != 2
4905 /* Verify that all targets will be TARGET. Specifically, the
4906 edge that is not E must also go to TARGET. */
4907 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
6de9cd9a
DN
4908 return NULL;
4909
726a989a
RB
4910 i = gsi_last_bb (src);
4911 if (gsi_end_p (i))
6de9cd9a 4912 return NULL;
6de9cd9a 4913
726a989a
RB
4914 stmt = gsi_stmt (i);
4915
4916 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
6de9cd9a 4917 {
726a989a 4918 gsi_remove (&i, true);
6de9cd9a
DN
4919 e = ssa_redirect_edge (e, target);
4920 e->flags = EDGE_FALLTHRU;
4921 return e;
4922 }
4923
4924 return NULL;
4925}
4926
4927
4928/* Redirect E to DEST. Return NULL on failure. Otherwise, return the
4929 edge representing the redirected branch. */
4930
4931static edge
726a989a 4932gimple_redirect_edge_and_branch (edge e, basic_block dest)
6de9cd9a
DN
4933{
4934 basic_block bb = e->src;
726a989a 4935 gimple_stmt_iterator gsi;
6de9cd9a 4936 edge ret;
726a989a 4937 gimple stmt;
6de9cd9a 4938
4f6c2131 4939 if (e->flags & EDGE_ABNORMAL)
6de9cd9a
DN
4940 return NULL;
4941
6531d1be 4942 if (e->src != ENTRY_BLOCK_PTR
726a989a 4943 && (ret = gimple_try_redirect_by_replacing_jump (e, dest)))
6de9cd9a
DN
4944 return ret;
4945
4946 if (e->dest == dest)
4947 return NULL;
4948
a3710436
JH
4949 if (e->flags & EDGE_EH)
4950 return redirect_eh_edge (e, dest);
4951
726a989a
RB
4952 gsi = gsi_last_bb (bb);
4953 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
6de9cd9a 4954
d130ae11 4955 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
6de9cd9a 4956 {
726a989a 4957 case GIMPLE_COND:
a9b77cd1 4958 /* For COND_EXPR, we only need to redirect the edge. */
6de9cd9a
DN
4959 break;
4960
726a989a 4961 case GIMPLE_GOTO:
6de9cd9a
DN
4962 /* No non-abnormal edges should lead from a non-simple goto, and
4963 simple ones should be represented implicitly. */
1e128c5f 4964 gcc_unreachable ();
6de9cd9a 4965
726a989a 4966 case GIMPLE_SWITCH:
6de9cd9a 4967 {
726a989a 4968 tree label = gimple_block_label (dest);
d6be0d7f 4969 tree cases = get_cases_for_edge (e, stmt);
6de9cd9a 4970
d6be0d7f
JL
4971 /* If we have a list of cases associated with E, then use it
4972 as it's a lot faster than walking the entire case vector. */
4973 if (cases)
6de9cd9a 4974 {
4edbbd3f 4975 edge e2 = find_edge (e->src, dest);
d6be0d7f
JL
4976 tree last, first;
4977
4978 first = cases;
4979 while (cases)
4980 {
4981 last = cases;
4982 CASE_LABEL (cases) = label;
4983 cases = TREE_CHAIN (cases);
4984 }
4985
4986 /* If there was already an edge in the CFG, then we need
4987 to move all the cases associated with E to E2. */
4988 if (e2)
4989 {
4990 tree cases2 = get_cases_for_edge (e2, stmt);
4991
4992 TREE_CHAIN (last) = TREE_CHAIN (cases2);
4993 TREE_CHAIN (cases2) = first;
4994 }
6de9cd9a 4995 }
92b6dff3
JL
4996 else
4997 {
726a989a 4998 size_t i, n = gimple_switch_num_labels (stmt);
d6be0d7f
JL
4999
5000 for (i = 0; i < n; i++)
5001 {
726a989a 5002 tree elt = gimple_switch_label (stmt, i);
d6be0d7f
JL
5003 if (label_to_block (CASE_LABEL (elt)) == e->dest)
5004 CASE_LABEL (elt) = label;
5005 }
92b6dff3 5006 }
d6be0d7f 5007
92b6dff3 5008 break;
6de9cd9a 5009 }
6de9cd9a 5010
726a989a
RB
5011 case GIMPLE_RETURN:
5012 gsi_remove (&gsi, true);
6de9cd9a
DN
5013 e->flags |= EDGE_FALLTHRU;
5014 break;
5015
726a989a
RB
5016 case GIMPLE_OMP_RETURN:
5017 case GIMPLE_OMP_CONTINUE:
5018 case GIMPLE_OMP_SECTIONS_SWITCH:
5019 case GIMPLE_OMP_FOR:
e5c95afe
ZD
5020 /* The edges from OMP constructs can be simply redirected. */
5021 break;
5022
6de9cd9a
DN
5023 default:
5024 /* Otherwise it must be a fallthru edge, and we don't need to
5025 do anything besides redirecting it. */
1e128c5f 5026 gcc_assert (e->flags & EDGE_FALLTHRU);
6de9cd9a
DN
5027 break;
5028 }
5029
5030 /* Update/insert PHI nodes as necessary. */
5031
5032 /* Now update the edges in the CFG. */
5033 e = ssa_redirect_edge (e, dest);
5034
5035 return e;
5036}
5037
14fa2cc0
ZD
5038/* Returns true if it is possible to remove edge E by redirecting
5039 it to the destination of the other edge from E->src. */
5040
5041static bool
726a989a 5042gimple_can_remove_branch_p (const_edge e)
14fa2cc0 5043{
496a4ef5 5044 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
14fa2cc0
ZD
5045 return false;
5046
5047 return true;
5048}
6de9cd9a
DN
5049
5050/* Simple wrapper, as we can always redirect fallthru edges. */
5051
5052static basic_block
726a989a 5053gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
6de9cd9a 5054{
726a989a 5055 e = gimple_redirect_edge_and_branch (e, dest);
1e128c5f 5056 gcc_assert (e);
6de9cd9a
DN
5057
5058 return NULL;
5059}
5060
5061
5062/* Splits basic block BB after statement STMT (but at least after the
5063 labels). If STMT is NULL, BB is split just after the labels. */
5064
5065static basic_block
726a989a 5066gimple_split_block (basic_block bb, void *stmt)
6de9cd9a 5067{
726a989a
RB
5068 gimple_stmt_iterator gsi;
5069 gimple_stmt_iterator gsi_tgt;
5070 gimple act;
5071 gimple_seq list;
6de9cd9a
DN
5072 basic_block new_bb;
5073 edge e;
628f6a4e 5074 edge_iterator ei;
6de9cd9a
DN
5075
5076 new_bb = create_empty_bb (bb);
5077
5078 /* Redirect the outgoing edges. */
628f6a4e
BE
5079 new_bb->succs = bb->succs;
5080 bb->succs = NULL;
5081 FOR_EACH_EDGE (e, ei, new_bb->succs)
6de9cd9a
DN
5082 e->src = new_bb;
5083
726a989a 5084 if (stmt && gimple_code ((gimple) stmt) == GIMPLE_LABEL)
6de9cd9a
DN
5085 stmt = NULL;
5086
726a989a
RB
5087 /* Move everything from GSI to the new basic block. */
5088 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6de9cd9a 5089 {
726a989a
RB
5090 act = gsi_stmt (gsi);
5091 if (gimple_code (act) == GIMPLE_LABEL)
6de9cd9a
DN
5092 continue;
5093
5094 if (!stmt)
5095 break;
5096
5097 if (stmt == act)
5098 {
726a989a 5099 gsi_next (&gsi);
6de9cd9a
DN
5100 break;
5101 }
5102 }
5103
726a989a 5104 if (gsi_end_p (gsi))
597ae074
JH
5105 return new_bb;
5106
5107 /* Split the statement list - avoid re-creating new containers as this
5108 brings ugly quadratic memory consumption in the inliner.
5109 (We are still quadratic since we need to update stmt BB pointers,
5110 sadly.) */
726a989a
RB
5111 list = gsi_split_seq_before (&gsi);
5112 set_bb_seq (new_bb, list);
5113 for (gsi_tgt = gsi_start (list);
5114 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
5115 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
6de9cd9a
DN
5116
5117 return new_bb;
5118}
5119
5120
5121/* Moves basic block BB after block AFTER. */
5122
5123static bool
726a989a 5124gimple_move_block_after (basic_block bb, basic_block after)
6de9cd9a
DN
5125{
5126 if (bb->prev_bb == after)
5127 return true;
5128
5129 unlink_block (bb);
5130 link_block (bb, after);
5131
5132 return true;
5133}
5134
5135
5136/* Return true if basic_block can be duplicated. */
5137
5138static bool
ca89096d 5139gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
6de9cd9a
DN
5140{
5141 return true;
5142}
5143
6de9cd9a
DN
5144/* Create a duplicate of the basic block BB. NOTE: This does not
5145 preserve SSA form. */
5146
5147static basic_block
726a989a 5148gimple_duplicate_bb (basic_block bb)
6de9cd9a
DN
5149{
5150 basic_block new_bb;
726a989a
RB
5151 gimple_stmt_iterator gsi, gsi_tgt;
5152 gimple_seq phis = phi_nodes (bb);
5153 gimple phi, stmt, copy;
6de9cd9a
DN
5154
5155 new_bb = create_empty_bb (EXIT_BLOCK_PTR->prev_bb);
b0382c67 5156
84d65814
DN
5157 /* Copy the PHI nodes. We ignore PHI node arguments here because
5158 the incoming edges have not been setup yet. */
726a989a 5159 for (gsi = gsi_start (phis); !gsi_end_p (gsi); gsi_next (&gsi))
b0382c67 5160 {
726a989a
RB
5161 phi = gsi_stmt (gsi);
5162 copy = create_phi_node (gimple_phi_result (phi), new_bb);
5163 create_new_def_for (gimple_phi_result (copy), copy,
5164 gimple_phi_result_ptr (copy));
b0382c67 5165 }
84d65814 5166
726a989a
RB
5167 gsi_tgt = gsi_start_bb (new_bb);
5168 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6de9cd9a 5169 {
84d65814
DN
5170 def_operand_p def_p;
5171 ssa_op_iter op_iter;
cc7220fd 5172 int region;
6de9cd9a 5173
726a989a
RB
5174 stmt = gsi_stmt (gsi);
5175 if (gimple_code (stmt) == GIMPLE_LABEL)
6de9cd9a
DN
5176 continue;
5177
84d65814
DN
5178 /* Create a new copy of STMT and duplicate STMT's virtual
5179 operands. */
726a989a
RB
5180 copy = gimple_copy (stmt);
5181 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
cc7220fd
JH
5182 region = lookup_stmt_eh_region (stmt);
5183 if (region >= 0)
5184 add_stmt_to_eh_region (copy, region);
6946b3f7 5185 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
84d65814
DN
5186
5187 /* Create new names for all the definitions created by COPY and
5188 add replacement mappings for each new name. */
5189 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
5190 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
6de9cd9a
DN
5191 }
5192
5193 return new_bb;
5194}
5195
5f40b3cb
ZD
5196/* Adds phi node arguments for edge E_COPY after basic block duplication. */
5197
5198static void
5199add_phi_args_after_copy_edge (edge e_copy)
5200{
5201 basic_block bb, bb_copy = e_copy->src, dest;
5202 edge e;
5203 edge_iterator ei;
726a989a
RB
5204 gimple phi, phi_copy;
5205 tree def;
5206 gimple_stmt_iterator psi, psi_copy;
5f40b3cb 5207
726a989a 5208 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
5f40b3cb
ZD
5209 return;
5210
5211 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
5212
5213 if (e_copy->dest->flags & BB_DUPLICATED)
5214 dest = get_bb_original (e_copy->dest);
5215 else
5216 dest = e_copy->dest;
5217
5218 e = find_edge (bb, dest);
5219 if (!e)
5220 {
5221 /* During loop unrolling the target of the latch edge is copied.
5222 In this case we are not looking for edge to dest, but to
5223 duplicated block whose original was dest. */
5224 FOR_EACH_EDGE (e, ei, bb->succs)
5225 {
5226 if ((e->dest->flags & BB_DUPLICATED)
5227 && get_bb_original (e->dest) == dest)
5228 break;
5229 }
5230
5231 gcc_assert (e != NULL);
5232 }
5233
726a989a
RB
5234 for (psi = gsi_start_phis (e->dest),
5235 psi_copy = gsi_start_phis (e_copy->dest);
5236 !gsi_end_p (psi);
5237 gsi_next (&psi), gsi_next (&psi_copy))
5f40b3cb 5238 {
726a989a
RB
5239 phi = gsi_stmt (psi);
5240 phi_copy = gsi_stmt (psi_copy);
5f40b3cb
ZD
5241 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
5242 add_phi_arg (phi_copy, def, e_copy);
5243 }
5244}
5245
84d65814 5246
42759f1e
ZD
5247/* Basic block BB_COPY was created by code duplication. Add phi node
5248 arguments for edges going out of BB_COPY. The blocks that were
6580ee77 5249 duplicated have BB_DUPLICATED set. */
42759f1e
ZD
5250
5251void
5252add_phi_args_after_copy_bb (basic_block bb_copy)
5253{
5f40b3cb 5254 edge e_copy;
726a989a 5255 edge_iterator ei;
42759f1e 5256
628f6a4e 5257 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
42759f1e 5258 {
5f40b3cb 5259 add_phi_args_after_copy_edge (e_copy);
42759f1e
ZD
5260 }
5261}
5262
5263/* Blocks in REGION_COPY array of length N_REGION were created by
5264 duplication of basic blocks. Add phi node arguments for edges
5f40b3cb
ZD
5265 going from these blocks. If E_COPY is not NULL, also add
5266 phi node arguments for its destination.*/
42759f1e
ZD
5267
5268void
5f40b3cb
ZD
5269add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
5270 edge e_copy)
42759f1e
ZD
5271{
5272 unsigned i;
5273
5274 for (i = 0; i < n_region; i++)
6580ee77 5275 region_copy[i]->flags |= BB_DUPLICATED;
42759f1e
ZD
5276
5277 for (i = 0; i < n_region; i++)
5278 add_phi_args_after_copy_bb (region_copy[i]);
5f40b3cb
ZD
5279 if (e_copy)
5280 add_phi_args_after_copy_edge (e_copy);
42759f1e
ZD
5281
5282 for (i = 0; i < n_region; i++)
6580ee77 5283 region_copy[i]->flags &= ~BB_DUPLICATED;
42759f1e
ZD
5284}
5285
42759f1e
ZD
5286/* Duplicates a REGION (set of N_REGION basic blocks) with just a single
5287 important exit edge EXIT. By important we mean that no SSA name defined
5288 inside region is live over the other exit edges of the region. All entry
5289 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
5290 to the duplicate of the region. SSA form, dominance and loop information
5291 is updated. The new basic blocks are stored to REGION_COPY in the same
5292 order as they had in REGION, provided that REGION_COPY is not NULL.
5293 The function returns false if it is unable to copy the region,
5294 true otherwise. */
5295
5296bool
726a989a 5297gimple_duplicate_sese_region (edge entry, edge exit,
42759f1e
ZD
5298 basic_block *region, unsigned n_region,
5299 basic_block *region_copy)
5300{
66f97d31 5301 unsigned i;
42759f1e
ZD
5302 bool free_region_copy = false, copying_header = false;
5303 struct loop *loop = entry->dest->loop_father;
5304 edge exit_copy;
66f97d31 5305 VEC (basic_block, heap) *doms;
42759f1e 5306 edge redirected;
09bac500
JH
5307 int total_freq = 0, entry_freq = 0;
5308 gcov_type total_count = 0, entry_count = 0;
42759f1e
ZD
5309
5310 if (!can_copy_bbs_p (region, n_region))
5311 return false;
5312
5313 /* Some sanity checking. Note that we do not check for all possible
5314 missuses of the functions. I.e. if you ask to copy something weird,
5315 it will work, but the state of structures probably will not be
5316 correct. */
42759f1e
ZD
5317 for (i = 0; i < n_region; i++)
5318 {
5319 /* We do not handle subloops, i.e. all the blocks must belong to the
5320 same loop. */
5321 if (region[i]->loop_father != loop)
5322 return false;
5323
5324 if (region[i] != entry->dest
5325 && region[i] == loop->header)
5326 return false;
5327 }
5328
561e8a90 5329 set_loop_copy (loop, loop);
42759f1e
ZD
5330
5331 /* In case the function is used for loop header copying (which is the primary
5332 use), ensure that EXIT and its copy will be new latch and entry edges. */
5333 if (loop->header == entry->dest)
5334 {
5335 copying_header = true;
561e8a90 5336 set_loop_copy (loop, loop_outer (loop));
42759f1e
ZD
5337
5338 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
5339 return false;
5340
5341 for (i = 0; i < n_region; i++)
5342 if (region[i] != exit->src
5343 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
5344 return false;
5345 }
5346
5347 if (!region_copy)
5348 {
858904db 5349 region_copy = XNEWVEC (basic_block, n_region);
42759f1e
ZD
5350 free_region_copy = true;
5351 }
5352
5006671f 5353 gcc_assert (!need_ssa_update_p (cfun));
42759f1e 5354
5deaef19 5355 /* Record blocks outside the region that are dominated by something
42759f1e 5356 inside. */
66f97d31 5357 doms = NULL;
6580ee77
JH
5358 initialize_original_copy_tables ();
5359
66f97d31 5360 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
42759f1e 5361
09bac500
JH
5362 if (entry->dest->count)
5363 {
5364 total_count = entry->dest->count;
5365 entry_count = entry->count;
5366 /* Fix up corner cases, to avoid division by zero or creation of negative
5367 frequencies. */
5368 if (entry_count > total_count)
5369 entry_count = total_count;
5370 }
5371 else
5372 {
5373 total_freq = entry->dest->frequency;
5374 entry_freq = EDGE_FREQUENCY (entry);
5375 /* Fix up corner cases, to avoid division by zero or creation of negative
5376 frequencies. */
5377 if (total_freq == 0)
5378 total_freq = 1;
5379 else if (entry_freq > total_freq)
5380 entry_freq = total_freq;
5381 }
5deaef19 5382
b9a66240
ZD
5383 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
5384 split_edge_bb_loc (entry));
09bac500
JH
5385 if (total_count)
5386 {
5387 scale_bbs_frequencies_gcov_type (region, n_region,
5388 total_count - entry_count,
5389 total_count);
5390 scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
6531d1be 5391 total_count);
09bac500
JH
5392 }
5393 else
5394 {
5395 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
5396 total_freq);
5397 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
5398 }
42759f1e
ZD
5399
5400 if (copying_header)
5401 {
5402 loop->header = exit->dest;
5403 loop->latch = exit->src;
5404 }
5405
5406 /* Redirect the entry and add the phi node arguments. */
6580ee77 5407 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
42759f1e 5408 gcc_assert (redirected != NULL);
71882046 5409 flush_pending_stmts (entry);
42759f1e
ZD
5410
5411 /* Concerning updating of dominators: We must recount dominators
84d65814
DN
5412 for entry block and its copy. Anything that is outside of the
5413 region, but was dominated by something inside needs recounting as
5414 well. */
42759f1e 5415 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
66f97d31
ZD
5416 VEC_safe_push (basic_block, heap, doms, get_bb_original (entry->dest));
5417 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
5f40b3cb 5418 VEC_free (basic_block, heap, doms);
42759f1e 5419
84d65814 5420 /* Add the other PHI node arguments. */
5f40b3cb
ZD
5421 add_phi_args_after_copy (region_copy, n_region, NULL);
5422
5423 /* Update the SSA web. */
5424 update_ssa (TODO_update_ssa);
5425
5426 if (free_region_copy)
5427 free (region_copy);
5428
5429 free_original_copy_tables ();
5430 return true;
5431}
5432
5433/* Duplicates REGION consisting of N_REGION blocks. The new blocks
5434 are stored to REGION_COPY in the same order in that they appear
5435 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
5436 the region, EXIT an exit from it. The condition guarding EXIT
5437 is moved to ENTRY. Returns true if duplication succeeds, false
5438 otherwise.
5439
5440 For example,
5441
5442 some_code;
5443 if (cond)
5444 A;
5445 else
5446 B;
5447
5448 is transformed to
5449
5450 if (cond)
5451 {
5452 some_code;
5453 A;
5454 }
5455 else
5456 {
5457 some_code;
5458 B;
5459 }
5460*/
5461
5462bool
726a989a
RB
5463gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED, edge exit ATTRIBUTE_UNUSED,
5464 basic_block *region ATTRIBUTE_UNUSED, unsigned n_region ATTRIBUTE_UNUSED,
5465 basic_block *region_copy ATTRIBUTE_UNUSED)
5f40b3cb
ZD
5466{
5467 unsigned i;
5468 bool free_region_copy = false;
5469 struct loop *loop = exit->dest->loop_father;
5470 struct loop *orig_loop = entry->dest->loop_father;
5471 basic_block switch_bb, entry_bb, nentry_bb;
5472 VEC (basic_block, heap) *doms;
5473 int total_freq = 0, exit_freq = 0;
5474 gcov_type total_count = 0, exit_count = 0;
5475 edge exits[2], nexits[2], e;
726a989a
RB
5476 gimple_stmt_iterator gsi;
5477 gimple cond_stmt;
5f40b3cb
ZD
5478 edge sorig, snew;
5479
5480 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
5481 exits[0] = exit;
5482 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
5483
5484 if (!can_copy_bbs_p (region, n_region))
5485 return false;
5486
5487 /* Some sanity checking. Note that we do not check for all possible
5488 missuses of the functions. I.e. if you ask to copy something weird
5489 (e.g., in the example, if there is a jump from inside to the middle
5490 of some_code, or come_code defines some of the values used in cond)
5491 it will work, but the resulting code will not be correct. */
5492 for (i = 0; i < n_region; i++)
5493 {
5494 /* We do not handle subloops, i.e. all the blocks must belong to the
5495 same loop. */
5496 if (region[i]->loop_father != orig_loop)
5497 return false;
5498
5499 if (region[i] == orig_loop->latch)
5500 return false;
5501 }
5502
5503 initialize_original_copy_tables ();
5504 set_loop_copy (orig_loop, loop);
5505
5506 if (!region_copy)
5507 {
5508 region_copy = XNEWVEC (basic_block, n_region);
5509 free_region_copy = true;
5510 }
5511
5006671f 5512 gcc_assert (!need_ssa_update_p (cfun));
5f40b3cb
ZD
5513
5514 /* Record blocks outside the region that are dominated by something
5515 inside. */
5516 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
5517
5518 if (exit->src->count)
5519 {
5520 total_count = exit->src->count;
5521 exit_count = exit->count;
5522 /* Fix up corner cases, to avoid division by zero or creation of negative
5523 frequencies. */
5524 if (exit_count > total_count)
5525 exit_count = total_count;
5526 }
5527 else
5528 {
5529 total_freq = exit->src->frequency;
5530 exit_freq = EDGE_FREQUENCY (exit);
5531 /* Fix up corner cases, to avoid division by zero or creation of negative
5532 frequencies. */
5533 if (total_freq == 0)
5534 total_freq = 1;
5535 if (exit_freq > total_freq)
5536 exit_freq = total_freq;
5537 }
5538
5539 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
5540 split_edge_bb_loc (exit));
5541 if (total_count)
5542 {
5543 scale_bbs_frequencies_gcov_type (region, n_region,
5544 total_count - exit_count,
5545 total_count);
5546 scale_bbs_frequencies_gcov_type (region_copy, n_region, exit_count,
5547 total_count);
5548 }
5549 else
5550 {
5551 scale_bbs_frequencies_int (region, n_region, total_freq - exit_freq,
5552 total_freq);
5553 scale_bbs_frequencies_int (region_copy, n_region, exit_freq, total_freq);
5554 }
5555
5556 /* Create the switch block, and put the exit condition to it. */
5557 entry_bb = entry->dest;
5558 nentry_bb = get_bb_copy (entry_bb);
5559 if (!last_stmt (entry->src)
5560 || !stmt_ends_bb_p (last_stmt (entry->src)))
5561 switch_bb = entry->src;
5562 else
5563 switch_bb = split_edge (entry);
5564 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
5565
726a989a
RB
5566 gsi = gsi_last_bb (switch_bb);
5567 cond_stmt = last_stmt (exit->src);
5568 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
5569 cond_stmt = gimple_copy (cond_stmt);
5570 gimple_cond_set_lhs (cond_stmt, unshare_expr (gimple_cond_lhs (cond_stmt)));
5571 gimple_cond_set_rhs (cond_stmt, unshare_expr (gimple_cond_rhs (cond_stmt)));
5572 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
5f40b3cb
ZD
5573
5574 sorig = single_succ_edge (switch_bb);
5575 sorig->flags = exits[1]->flags;
5576 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
5577
5578 /* Register the new edge from SWITCH_BB in loop exit lists. */
5579 rescan_loop_exit (snew, true, false);
5580
5581 /* Add the PHI node arguments. */
5582 add_phi_args_after_copy (region_copy, n_region, snew);
5583
5584 /* Get rid of now superfluous conditions and associated edges (and phi node
5585 arguments). */
5586 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
726a989a 5587 PENDING_STMT (e) = NULL;
5f40b3cb 5588 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
726a989a 5589 PENDING_STMT (e) = NULL;
5f40b3cb
ZD
5590
5591 /* Anything that is outside of the region, but was dominated by something
5592 inside needs to update dominance info. */
5593 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
5594 VEC_free (basic_block, heap, doms);
42759f1e 5595
84d65814
DN
5596 /* Update the SSA web. */
5597 update_ssa (TODO_update_ssa);
42759f1e
ZD
5598
5599 if (free_region_copy)
5600 free (region_copy);
5601
6580ee77 5602 free_original_copy_tables ();
42759f1e
ZD
5603 return true;
5604}
6de9cd9a 5605
50674e96
DN
5606/* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
5607 adding blocks when the dominator traversal reaches EXIT. This
5608 function silently assumes that ENTRY strictly dominates EXIT. */
5609
9f9f72aa 5610void
50674e96
DN
5611gather_blocks_in_sese_region (basic_block entry, basic_block exit,
5612 VEC(basic_block,heap) **bbs_p)
5613{
5614 basic_block son;
5615
5616 for (son = first_dom_son (CDI_DOMINATORS, entry);
5617 son;
5618 son = next_dom_son (CDI_DOMINATORS, son))
5619 {
5620 VEC_safe_push (basic_block, heap, *bbs_p, son);
5621 if (son != exit)
5622 gather_blocks_in_sese_region (son, exit, bbs_p);
5623 }
5624}
5625
917948d3
ZD
5626/* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
5627 The duplicates are recorded in VARS_MAP. */
5628
5629static void
5630replace_by_duplicate_decl (tree *tp, struct pointer_map_t *vars_map,
5631 tree to_context)
5632{
5633 tree t = *tp, new_t;
5634 struct function *f = DECL_STRUCT_FUNCTION (to_context);
5635 void **loc;
5636
5637 if (DECL_CONTEXT (t) == to_context)
5638 return;
5639
5640 loc = pointer_map_contains (vars_map, t);
5641
5642 if (!loc)
5643 {
5644 loc = pointer_map_insert (vars_map, t);
5645
5646 if (SSA_VAR_P (t))
5647 {
5648 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
cb91fab0 5649 f->local_decls = tree_cons (NULL_TREE, new_t, f->local_decls);
917948d3
ZD
5650 }
5651 else
5652 {
5653 gcc_assert (TREE_CODE (t) == CONST_DECL);
5654 new_t = copy_node (t);
5655 }
5656 DECL_CONTEXT (new_t) = to_context;
5657
5658 *loc = new_t;
5659 }
5660 else
3d9a9f94 5661 new_t = (tree) *loc;
917948d3
ZD
5662
5663 *tp = new_t;
5664}
5665
726a989a 5666
917948d3
ZD
5667/* Creates an ssa name in TO_CONTEXT equivalent to NAME.
5668 VARS_MAP maps old ssa names and var_decls to the new ones. */
5669
5670static tree
5671replace_ssa_name (tree name, struct pointer_map_t *vars_map,
5672 tree to_context)
5673{
5674 void **loc;
5675 tree new_name, decl = SSA_NAME_VAR (name);
5676
5677 gcc_assert (is_gimple_reg (name));
5678
5679 loc = pointer_map_contains (vars_map, name);
5680
5681 if (!loc)
5682 {
5683 replace_by_duplicate_decl (&decl, vars_map, to_context);
5684
5685 push_cfun (DECL_STRUCT_FUNCTION (to_context));
5686 if (gimple_in_ssa_p (cfun))
5687 add_referenced_var (decl);
5688
5689 new_name = make_ssa_name (decl, SSA_NAME_DEF_STMT (name));
5690 if (SSA_NAME_IS_DEFAULT_DEF (name))
5691 set_default_def (decl, new_name);
5692 pop_cfun ();
5693
5694 loc = pointer_map_insert (vars_map, name);
5695 *loc = new_name;
5696 }
5697 else
3d9a9f94 5698 new_name = (tree) *loc;
917948d3
ZD
5699
5700 return new_name;
5701}
50674e96
DN
5702
5703struct move_stmt_d
5704{
b357f682
JJ
5705 tree orig_block;
5706 tree new_block;
50674e96
DN
5707 tree from_context;
5708 tree to_context;
917948d3 5709 struct pointer_map_t *vars_map;
fad41cd7 5710 htab_t new_label_map;
50674e96
DN
5711 bool remap_decls_p;
5712};
5713
5714/* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
b357f682
JJ
5715 contained in *TP if it has been ORIG_BLOCK previously and change the
5716 DECL_CONTEXT of every local variable referenced in *TP. */
50674e96
DN
5717
5718static tree
726a989a 5719move_stmt_op (tree *tp, int *walk_subtrees, void *data)
50674e96 5720{
726a989a
RB
5721 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5722 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
fad41cd7 5723 tree t = *tp;
50674e96 5724
726a989a
RB
5725 if (EXPR_P (t))
5726 /* We should never have TREE_BLOCK set on non-statements. */
5727 gcc_assert (!TREE_BLOCK (t));
fad41cd7 5728
917948d3 5729 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
50674e96 5730 {
917948d3
ZD
5731 if (TREE_CODE (t) == SSA_NAME)
5732 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
5733 else if (TREE_CODE (t) == LABEL_DECL)
fad41cd7
RH
5734 {
5735 if (p->new_label_map)
5736 {
5737 struct tree_map in, *out;
fc8600f9 5738 in.base.from = t;
3d9a9f94
KG
5739 out = (struct tree_map *)
5740 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
fad41cd7
RH
5741 if (out)
5742 *tp = t = out->to;
5743 }
50674e96 5744
fad41cd7
RH
5745 DECL_CONTEXT (t) = p->to_context;
5746 }
5747 else if (p->remap_decls_p)
50674e96 5748 {
917948d3
ZD
5749 /* Replace T with its duplicate. T should no longer appear in the
5750 parent function, so this looks wasteful; however, it may appear
5751 in referenced_vars, and more importantly, as virtual operands of
5752 statements, and in alias lists of other variables. It would be
5753 quite difficult to expunge it from all those places. ??? It might
5754 suffice to do this for addressable variables. */
5755 if ((TREE_CODE (t) == VAR_DECL
5756 && !is_global_var (t))
5757 || TREE_CODE (t) == CONST_DECL)
5758 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
5759
5760 if (SSA_VAR_P (t)
5761 && gimple_in_ssa_p (cfun))
fad41cd7 5762 {
917948d3
ZD
5763 push_cfun (DECL_STRUCT_FUNCTION (p->to_context));
5764 add_referenced_var (*tp);
5765 pop_cfun ();
fad41cd7 5766 }
50674e96 5767 }
917948d3 5768 *walk_subtrees = 0;
50674e96 5769 }
fad41cd7
RH
5770 else if (TYPE_P (t))
5771 *walk_subtrees = 0;
50674e96
DN
5772
5773 return NULL_TREE;
5774}
5775
726a989a
RB
5776/* Like move_stmt_op, but for gimple statements.
5777
5778 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
5779 contained in the current statement in *GSI_P and change the
5780 DECL_CONTEXT of every local variable referenced in the current
5781 statement. */
5782
5783static tree
5784move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
5785 struct walk_stmt_info *wi)
5786{
5787 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
5788 gimple stmt = gsi_stmt (*gsi_p);
5789 tree block = gimple_block (stmt);
5790
5791 if (p->orig_block == NULL_TREE
5792 || block == p->orig_block
5793 || block == NULL_TREE)
5794 gimple_set_block (stmt, p->new_block);
5795#ifdef ENABLE_CHECKING
5796 else if (block != p->new_block)
5797 {
5798 while (block && block != p->orig_block)
5799 block = BLOCK_SUPERCONTEXT (block);
5800 gcc_assert (block);
5801 }
5802#endif
5803
5804 if (is_gimple_omp (stmt)
5805 && gimple_code (stmt) != GIMPLE_OMP_RETURN
5806 && gimple_code (stmt) != GIMPLE_OMP_CONTINUE)
5807 {
5808 /* Do not remap variables inside OMP directives. Variables
5809 referenced in clauses and directive header belong to the
5810 parent function and should not be moved into the child
5811 function. */
5812 bool save_remap_decls_p = p->remap_decls_p;
5813 p->remap_decls_p = false;
5814 *handled_ops_p = true;
5815
5816 walk_gimple_seq (gimple_omp_body (stmt), move_stmt_r, move_stmt_op, wi);
5817
5818 p->remap_decls_p = save_remap_decls_p;
5819 }
5820
5821 return NULL_TREE;
5822}
5823
917948d3
ZD
5824/* Marks virtual operands of all statements in basic blocks BBS for
5825 renaming. */
5826
dea61d92
SP
5827void
5828mark_virtual_ops_in_bb (basic_block bb)
917948d3 5829{
726a989a 5830 gimple_stmt_iterator gsi;
dea61d92 5831
726a989a
RB
5832 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5833 mark_virtual_ops_for_renaming (gsi_stmt (gsi));
dea61d92 5834
726a989a
RB
5835 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5836 mark_virtual_ops_for_renaming (gsi_stmt (gsi));
dea61d92
SP
5837}
5838
50674e96
DN
5839/* Move basic block BB from function CFUN to function DEST_FN. The
5840 block is moved out of the original linked list and placed after
5841 block AFTER in the new list. Also, the block is removed from the
5842 original array of blocks and placed in DEST_FN's array of blocks.
5843 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
5844 updated to reflect the moved edges.
6531d1be 5845
917948d3
ZD
5846 The local variables are remapped to new instances, VARS_MAP is used
5847 to record the mapping. */
50674e96
DN
5848
5849static void
5850move_block_to_fn (struct function *dest_cfun, basic_block bb,
5851 basic_block after, bool update_edge_count_p,
b357f682 5852 struct move_stmt_d *d, int eh_offset)
50674e96
DN
5853{
5854 struct control_flow_graph *cfg;
5855 edge_iterator ei;
5856 edge e;
726a989a 5857 gimple_stmt_iterator si;
728b26bb 5858 unsigned old_len, new_len;
50674e96 5859
3722506a
ZD
5860 /* Remove BB from dominance structures. */
5861 delete_from_dominance_info (CDI_DOMINATORS, bb);
5f40b3cb
ZD
5862 if (current_loops)
5863 remove_bb_from_loops (bb);
3722506a 5864
50674e96
DN
5865 /* Link BB to the new linked list. */
5866 move_block_after (bb, after);
5867
5868 /* Update the edge count in the corresponding flowgraphs. */
5869 if (update_edge_count_p)
5870 FOR_EACH_EDGE (e, ei, bb->succs)
5871 {
5872 cfun->cfg->x_n_edges--;
5873 dest_cfun->cfg->x_n_edges++;
5874 }
5875
5876 /* Remove BB from the original basic block array. */
5877 VEC_replace (basic_block, cfun->cfg->x_basic_block_info, bb->index, NULL);
5878 cfun->cfg->x_n_basic_blocks--;
5879
5880 /* Grow DEST_CFUN's basic block array if needed. */
5881 cfg = dest_cfun->cfg;
5882 cfg->x_n_basic_blocks++;
3722506a
ZD
5883 if (bb->index >= cfg->x_last_basic_block)
5884 cfg->x_last_basic_block = bb->index + 1;
50674e96 5885
728b26bb
DN
5886 old_len = VEC_length (basic_block, cfg->x_basic_block_info);
5887 if ((unsigned) cfg->x_last_basic_block >= old_len)
50674e96 5888 {
728b26bb 5889 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
a590ac65
KH
5890 VEC_safe_grow_cleared (basic_block, gc, cfg->x_basic_block_info,
5891 new_len);
50674e96
DN
5892 }
5893
5894 VEC_replace (basic_block, cfg->x_basic_block_info,
e0310afb 5895 bb->index, bb);
50674e96 5896
917948d3 5897 /* Remap the variables in phi nodes. */
726a989a 5898 for (si = gsi_start_phis (bb); !gsi_end_p (si); )
917948d3 5899 {
726a989a 5900 gimple phi = gsi_stmt (si);
917948d3
ZD
5901 use_operand_p use;
5902 tree op = PHI_RESULT (phi);
5903 ssa_op_iter oi;
5904
5905 if (!is_gimple_reg (op))
5f40b3cb
ZD
5906 {
5907 /* Remove the phi nodes for virtual operands (alias analysis will be
5908 run for the new function, anyway). */
726a989a 5909 remove_phi_node (&si, true);
5f40b3cb
ZD
5910 continue;
5911 }
917948d3 5912
b357f682
JJ
5913 SET_PHI_RESULT (phi,
5914 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
917948d3
ZD
5915 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
5916 {
5917 op = USE_FROM_PTR (use);
5918 if (TREE_CODE (op) == SSA_NAME)
b357f682 5919 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
917948d3 5920 }
726a989a
RB
5921
5922 gsi_next (&si);
917948d3
ZD
5923 }
5924
726a989a 5925 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
50674e96 5926 {
726a989a 5927 gimple stmt = gsi_stmt (si);
fad41cd7 5928 int region;
726a989a 5929 struct walk_stmt_info wi;
50674e96 5930
726a989a
RB
5931 memset (&wi, 0, sizeof (wi));
5932 wi.info = d;
5933 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
50674e96 5934
726a989a 5935 if (gimple_code (stmt) == GIMPLE_LABEL)
50674e96 5936 {
726a989a 5937 tree label = gimple_label_label (stmt);
50674e96
DN
5938 int uid = LABEL_DECL_UID (label);
5939
5940 gcc_assert (uid > -1);
5941
5942 old_len = VEC_length (basic_block, cfg->x_label_to_block_map);
5943 if (old_len <= (unsigned) uid)
5944 {
5006671f 5945 new_len = 3 * uid / 2 + 1;
a590ac65
KH
5946 VEC_safe_grow_cleared (basic_block, gc,
5947 cfg->x_label_to_block_map, new_len);
50674e96
DN
5948 }
5949
5950 VEC_replace (basic_block, cfg->x_label_to_block_map, uid, bb);
5951 VEC_replace (basic_block, cfun->cfg->x_label_to_block_map, uid, NULL);
5952
5953 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
5954
cb91fab0
JH
5955 if (uid >= dest_cfun->cfg->last_label_uid)
5956 dest_cfun->cfg->last_label_uid = uid + 1;
50674e96 5957 }
726a989a
RB
5958 else if (gimple_code (stmt) == GIMPLE_RESX && eh_offset != 0)
5959 gimple_resx_set_region (stmt, gimple_resx_region (stmt) + eh_offset);
fad41cd7
RH
5960
5961 region = lookup_stmt_eh_region (stmt);
5962 if (region >= 0)
5963 {
5964 add_stmt_to_eh_region_fn (dest_cfun, stmt, region + eh_offset);
5965 remove_stmt_from_eh_region (stmt);
6946b3f7
JH
5966 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
5967 gimple_remove_stmt_histograms (cfun, stmt);
fad41cd7 5968 }
917948d3 5969
5f40b3cb
ZD
5970 /* We cannot leave any operands allocated from the operand caches of
5971 the current function. */
5972 free_stmt_operands (stmt);
5973 push_cfun (dest_cfun);
917948d3 5974 update_stmt (stmt);
5f40b3cb 5975 pop_cfun ();
fad41cd7 5976 }
7241571e
JJ
5977
5978 FOR_EACH_EDGE (e, ei, bb->succs)
5979 if (e->goto_locus)
5980 {
5981 tree block = e->goto_block;
5982 if (d->orig_block == NULL_TREE
5983 || block == d->orig_block)
5984 e->goto_block = d->new_block;
5985#ifdef ENABLE_CHECKING
5986 else if (block != d->new_block)
5987 {
5988 while (block && block != d->orig_block)
5989 block = BLOCK_SUPERCONTEXT (block);
5990 gcc_assert (block);
5991 }
5992#endif
5993 }
fad41cd7
RH
5994}
5995
5996/* Examine the statements in BB (which is in SRC_CFUN); find and return
5997 the outermost EH region. Use REGION as the incoming base EH region. */
5998
5999static int
6000find_outermost_region_in_block (struct function *src_cfun,
6001 basic_block bb, int region)
6002{
726a989a 6003 gimple_stmt_iterator si;
6531d1be 6004
726a989a 6005 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
fad41cd7 6006 {
726a989a 6007 gimple stmt = gsi_stmt (si);
fad41cd7 6008 int stmt_region;
1799e5d5 6009
726a989a
RB
6010 if (gimple_code (stmt) == GIMPLE_RESX)
6011 stmt_region = gimple_resx_region (stmt);
07ed51c9
JJ
6012 else
6013 stmt_region = lookup_stmt_eh_region_fn (src_cfun, stmt);
7e2df4a1
JJ
6014 if (stmt_region > 0)
6015 {
6016 if (region < 0)
6017 region = stmt_region;
6018 else if (stmt_region != region)
6019 {
6020 region = eh_region_outermost (src_cfun, stmt_region, region);
6021 gcc_assert (region != -1);
6022 }
6023 }
50674e96 6024 }
fad41cd7
RH
6025
6026 return region;
50674e96
DN
6027}
6028
fad41cd7
RH
6029static tree
6030new_label_mapper (tree decl, void *data)
6031{
6032 htab_t hash = (htab_t) data;
6033 struct tree_map *m;
6034 void **slot;
6035
6036 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
6037
3d9a9f94 6038 m = XNEW (struct tree_map);
fad41cd7 6039 m->hash = DECL_UID (decl);
fc8600f9 6040 m->base.from = decl;
c2255bc4 6041 m->to = create_artificial_label (UNKNOWN_LOCATION);
fad41cd7 6042 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
cb91fab0
JH
6043 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
6044 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
fad41cd7
RH
6045
6046 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
6047 gcc_assert (*slot == NULL);
6048
6049 *slot = m;
6050
6051 return m->to;
6052}
50674e96 6053
b357f682
JJ
6054/* Change DECL_CONTEXT of all BLOCK_VARS in block, including
6055 subblocks. */
6056
6057static void
6058replace_block_vars_by_duplicates (tree block, struct pointer_map_t *vars_map,
6059 tree to_context)
6060{
6061 tree *tp, t;
6062
6063 for (tp = &BLOCK_VARS (block); *tp; tp = &TREE_CHAIN (*tp))
6064 {
6065 t = *tp;
e1e2bac4
JJ
6066 if (TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != CONST_DECL)
6067 continue;
b357f682
JJ
6068 replace_by_duplicate_decl (&t, vars_map, to_context);
6069 if (t != *tp)
6070 {
6071 if (TREE_CODE (*tp) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (*tp))
6072 {
6073 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (*tp));
6074 DECL_HAS_VALUE_EXPR_P (t) = 1;
6075 }
6076 TREE_CHAIN (t) = TREE_CHAIN (*tp);
6077 *tp = t;
6078 }
6079 }
6080
6081 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
6082 replace_block_vars_by_duplicates (block, vars_map, to_context);
6083}
6084
50674e96
DN
6085/* Move a single-entry, single-exit region delimited by ENTRY_BB and
6086 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
6087 single basic block in the original CFG and the new basic block is
6088 returned. DEST_CFUN must not have a CFG yet.
6089
6090 Note that the region need not be a pure SESE region. Blocks inside
6091 the region may contain calls to abort/exit. The only restriction
6092 is that ENTRY_BB should be the only entry point and it must
6093 dominate EXIT_BB.
6094
b357f682
JJ
6095 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
6096 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
6097 to the new function.
6098
50674e96
DN
6099 All local variables referenced in the region are assumed to be in
6100 the corresponding BLOCK_VARS and unexpanded variable lists
6101 associated with DEST_CFUN. */
6102
6103basic_block
6104move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
b357f682 6105 basic_block exit_bb, tree orig_block)
50674e96 6106{
917948d3
ZD
6107 VEC(basic_block,heap) *bbs, *dom_bbs;
6108 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
6109 basic_block after, bb, *entry_pred, *exit_succ, abb;
6110 struct function *saved_cfun = cfun;
fad41cd7 6111 int *entry_flag, *exit_flag, eh_offset;
917948d3 6112 unsigned *entry_prob, *exit_prob;
50674e96
DN
6113 unsigned i, num_entry_edges, num_exit_edges;
6114 edge e;
6115 edge_iterator ei;
fad41cd7 6116 htab_t new_label_map;
917948d3 6117 struct pointer_map_t *vars_map;
5f40b3cb 6118 struct loop *loop = entry_bb->loop_father;
b357f682 6119 struct move_stmt_d d;
50674e96
DN
6120
6121 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
6122 region. */
6123 gcc_assert (entry_bb != exit_bb
2aee3e57
JJ
6124 && (!exit_bb
6125 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
50674e96 6126
917948d3
ZD
6127 /* Collect all the blocks in the region. Manually add ENTRY_BB
6128 because it won't be added by dfs_enumerate_from. */
50674e96
DN
6129 bbs = NULL;
6130 VEC_safe_push (basic_block, heap, bbs, entry_bb);
6131 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
6132
917948d3
ZD
6133 /* The blocks that used to be dominated by something in BBS will now be
6134 dominated by the new block. */
6135 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
6136 VEC_address (basic_block, bbs),
6137 VEC_length (basic_block, bbs));
6138
50674e96
DN
6139 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
6140 the predecessor edges to ENTRY_BB and the successor edges to
6141 EXIT_BB so that we can re-attach them to the new basic block that
6142 will replace the region. */
6143 num_entry_edges = EDGE_COUNT (entry_bb->preds);
6144 entry_pred = (basic_block *) xcalloc (num_entry_edges, sizeof (basic_block));
6145 entry_flag = (int *) xcalloc (num_entry_edges, sizeof (int));
917948d3 6146 entry_prob = XNEWVEC (unsigned, num_entry_edges);
50674e96
DN
6147 i = 0;
6148 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
6149 {
917948d3 6150 entry_prob[i] = e->probability;
50674e96
DN
6151 entry_flag[i] = e->flags;
6152 entry_pred[i++] = e->src;
6153 remove_edge (e);
6154 }
6155
2aee3e57 6156 if (exit_bb)
50674e96 6157 {
2aee3e57
JJ
6158 num_exit_edges = EDGE_COUNT (exit_bb->succs);
6159 exit_succ = (basic_block *) xcalloc (num_exit_edges,
6160 sizeof (basic_block));
6161 exit_flag = (int *) xcalloc (num_exit_edges, sizeof (int));
917948d3 6162 exit_prob = XNEWVEC (unsigned, num_exit_edges);
2aee3e57
JJ
6163 i = 0;
6164 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
6165 {
917948d3 6166 exit_prob[i] = e->probability;
2aee3e57
JJ
6167 exit_flag[i] = e->flags;
6168 exit_succ[i++] = e->dest;
6169 remove_edge (e);
6170 }
6171 }
6172 else
6173 {
6174 num_exit_edges = 0;
6175 exit_succ = NULL;
6176 exit_flag = NULL;
917948d3 6177 exit_prob = NULL;
50674e96
DN
6178 }
6179
6180 /* Switch context to the child function to initialize DEST_FN's CFG. */
6181 gcc_assert (dest_cfun->cfg == NULL);
917948d3 6182 push_cfun (dest_cfun);
fad41cd7 6183
50674e96 6184 init_empty_tree_cfg ();
fad41cd7
RH
6185
6186 /* Initialize EH information for the new function. */
6187 eh_offset = 0;
6188 new_label_map = NULL;
6189 if (saved_cfun->eh)
6190 {
6191 int region = -1;
6192
6193 for (i = 0; VEC_iterate (basic_block, bbs, i, bb); i++)
6194 region = find_outermost_region_in_block (saved_cfun, bb, region);
6195
6196 init_eh_for_function ();
6197 if (region != -1)
6198 {
6199 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
6200 eh_offset = duplicate_eh_regions (saved_cfun, new_label_mapper,
6201 new_label_map, region, 0);
6202 }
6203 }
6204
917948d3
ZD
6205 pop_cfun ();
6206
50674e96
DN
6207 /* Move blocks from BBS into DEST_CFUN. */
6208 gcc_assert (VEC_length (basic_block, bbs) >= 2);
6209 after = dest_cfun->cfg->x_entry_block_ptr;
917948d3 6210 vars_map = pointer_map_create ();
b357f682
JJ
6211
6212 memset (&d, 0, sizeof (d));
6213 d.vars_map = vars_map;
6214 d.from_context = cfun->decl;
6215 d.to_context = dest_cfun->decl;
6216 d.new_label_map = new_label_map;
6217 d.remap_decls_p = true;
6218 d.orig_block = orig_block;
6219 d.new_block = DECL_INITIAL (dest_cfun->decl);
6220
50674e96
DN
6221 for (i = 0; VEC_iterate (basic_block, bbs, i, bb); i++)
6222 {
6223 /* No need to update edge counts on the last block. It has
6224 already been updated earlier when we detached the region from
6225 the original CFG. */
b357f682 6226 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d, eh_offset);
50674e96
DN
6227 after = bb;
6228 }
6229
b357f682
JJ
6230 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
6231 if (orig_block)
6232 {
6233 tree block;
6234 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
6235 == NULL_TREE);
6236 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
6237 = BLOCK_SUBBLOCKS (orig_block);
6238 for (block = BLOCK_SUBBLOCKS (orig_block);
6239 block; block = BLOCK_CHAIN (block))
6240 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
6241 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
6242 }
6243
6244 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
6245 vars_map, dest_cfun->decl);
6246
fad41cd7
RH
6247 if (new_label_map)
6248 htab_delete (new_label_map);
917948d3 6249 pointer_map_destroy (vars_map);
50674e96
DN
6250
6251 /* Rewire the entry and exit blocks. The successor to the entry
6252 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
6253 the child function. Similarly, the predecessor of DEST_FN's
6254 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
6255 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
6256 various CFG manipulation function get to the right CFG.
6257
6258 FIXME, this is silly. The CFG ought to become a parameter to
6259 these helpers. */
917948d3 6260 push_cfun (dest_cfun);
50674e96 6261 make_edge (ENTRY_BLOCK_PTR, entry_bb, EDGE_FALLTHRU);
2aee3e57
JJ
6262 if (exit_bb)
6263 make_edge (exit_bb, EXIT_BLOCK_PTR, 0);
917948d3 6264 pop_cfun ();
50674e96
DN
6265
6266 /* Back in the original function, the SESE region has disappeared,
6267 create a new basic block in its place. */
6268 bb = create_empty_bb (entry_pred[0]);
5f40b3cb
ZD
6269 if (current_loops)
6270 add_bb_to_loop (bb, loop);
50674e96 6271 for (i = 0; i < num_entry_edges; i++)
917948d3
ZD
6272 {
6273 e = make_edge (entry_pred[i], bb, entry_flag[i]);
6274 e->probability = entry_prob[i];
6275 }
50674e96
DN
6276
6277 for (i = 0; i < num_exit_edges; i++)
917948d3
ZD
6278 {
6279 e = make_edge (bb, exit_succ[i], exit_flag[i]);
6280 e->probability = exit_prob[i];
6281 }
6282
6283 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
6284 for (i = 0; VEC_iterate (basic_block, dom_bbs, i, abb); i++)
6285 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
6286 VEC_free (basic_block, heap, dom_bbs);
50674e96 6287
2aee3e57
JJ
6288 if (exit_bb)
6289 {
917948d3 6290 free (exit_prob);
2aee3e57
JJ
6291 free (exit_flag);
6292 free (exit_succ);
6293 }
917948d3 6294 free (entry_prob);
50674e96
DN
6295 free (entry_flag);
6296 free (entry_pred);
50674e96
DN
6297 VEC_free (basic_block, heap, bbs);
6298
6299 return bb;
6300}
6301
84d65814 6302
726a989a
RB
6303/* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in tree-pass.h)
6304 */
6de9cd9a
DN
6305
6306void
6307dump_function_to_file (tree fn, FILE *file, int flags)
6308{
6309 tree arg, vars, var;
459ffad3 6310 struct function *dsf;
6de9cd9a
DN
6311 bool ignore_topmost_bind = false, any_var = false;
6312 basic_block bb;
6313 tree chain;
6531d1be 6314
673fda6b 6315 fprintf (file, "%s (", lang_hooks.decl_printable_name (fn, 2));
6de9cd9a
DN
6316
6317 arg = DECL_ARGUMENTS (fn);
6318 while (arg)
6319 {
2f9ea521
RG
6320 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
6321 fprintf (file, " ");
6de9cd9a 6322 print_generic_expr (file, arg, dump_flags);
3e894af1
KZ
6323 if (flags & TDF_VERBOSE)
6324 print_node (file, "", arg, 4);
6de9cd9a
DN
6325 if (TREE_CHAIN (arg))
6326 fprintf (file, ", ");
6327 arg = TREE_CHAIN (arg);
6328 }
6329 fprintf (file, ")\n");
6330
3e894af1
KZ
6331 if (flags & TDF_VERBOSE)
6332 print_node (file, "", fn, 2);
6333
459ffad3
EB
6334 dsf = DECL_STRUCT_FUNCTION (fn);
6335 if (dsf && (flags & TDF_DETAILS))
6336 dump_eh_tree (file, dsf);
6337
39ecc018 6338 if (flags & TDF_RAW && !gimple_has_body_p (fn))
6de9cd9a
DN
6339 {
6340 dump_node (fn, TDF_SLIM | flags, file);
6341 return;
6342 }
6343
953ff289 6344 /* Switch CFUN to point to FN. */
db2960f4 6345 push_cfun (DECL_STRUCT_FUNCTION (fn));
953ff289 6346
6de9cd9a
DN
6347 /* When GIMPLE is lowered, the variables are no longer available in
6348 BIND_EXPRs, so display them separately. */
cb91fab0 6349 if (cfun && cfun->decl == fn && cfun->local_decls)
6de9cd9a
DN
6350 {
6351 ignore_topmost_bind = true;
6352
6353 fprintf (file, "{\n");
cb91fab0 6354 for (vars = cfun->local_decls; vars; vars = TREE_CHAIN (vars))
6de9cd9a
DN
6355 {
6356 var = TREE_VALUE (vars);
6357
6358 print_generic_decl (file, var, flags);
3e894af1
KZ
6359 if (flags & TDF_VERBOSE)
6360 print_node (file, "", var, 4);
6de9cd9a
DN
6361 fprintf (file, "\n");
6362
6363 any_var = true;
6364 }
6365 }
6366
32a87d45 6367 if (cfun && cfun->decl == fn && cfun->cfg && basic_block_info)
6de9cd9a 6368 {
726a989a 6369 /* If the CFG has been built, emit a CFG-based dump. */
878f99d2 6370 check_bb_profile (ENTRY_BLOCK_PTR, file);
6de9cd9a
DN
6371 if (!ignore_topmost_bind)
6372 fprintf (file, "{\n");
6373
6374 if (any_var && n_basic_blocks)
6375 fprintf (file, "\n");
6376
6377 FOR_EACH_BB (bb)
726a989a 6378 gimple_dump_bb (bb, file, 2, flags);
6531d1be 6379
6de9cd9a 6380 fprintf (file, "}\n");
878f99d2 6381 check_bb_profile (EXIT_BLOCK_PTR, file);
6de9cd9a 6382 }
726a989a
RB
6383 else if (DECL_SAVED_TREE (fn) == NULL)
6384 {
6385 /* The function is now in GIMPLE form but the CFG has not been
6386 built yet. Emit the single sequence of GIMPLE statements
6387 that make up its body. */
6388 gimple_seq body = gimple_body (fn);
6389
6390 if (gimple_seq_first_stmt (body)
6391 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
6392 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
6393 print_gimple_seq (file, body, 0, flags);
6394 else
6395 {
6396 if (!ignore_topmost_bind)
6397 fprintf (file, "{\n");
6398
6399 if (any_var)
6400 fprintf (file, "\n");
6401
6402 print_gimple_seq (file, body, 2, flags);
6403 fprintf (file, "}\n");
6404 }
6405 }
6de9cd9a
DN
6406 else
6407 {
6408 int indent;
6409
6410 /* Make a tree based dump. */
6411 chain = DECL_SAVED_TREE (fn);
6412
953ff289 6413 if (chain && TREE_CODE (chain) == BIND_EXPR)
6de9cd9a
DN
6414 {
6415 if (ignore_topmost_bind)
6416 {
6417 chain = BIND_EXPR_BODY (chain);
6418 indent = 2;
6419 }
6420 else
6421 indent = 0;
6422 }
6423 else
6424 {
6425 if (!ignore_topmost_bind)
6426 fprintf (file, "{\n");
6427 indent = 2;
6428 }
6429
6430 if (any_var)
6431 fprintf (file, "\n");
6432
6433 print_generic_stmt_indented (file, chain, flags, indent);
6434 if (ignore_topmost_bind)
6435 fprintf (file, "}\n");
6436 }
6437
6438 fprintf (file, "\n\n");
953ff289
DN
6439
6440 /* Restore CFUN. */
db2960f4 6441 pop_cfun ();
953ff289
DN
6442}
6443
6444
6445/* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
6446
6447void
6448debug_function (tree fn, int flags)
6449{
6450 dump_function_to_file (fn, stderr, flags);
6de9cd9a
DN
6451}
6452
6453
d7770457 6454/* Print on FILE the indexes for the predecessors of basic_block BB. */
6de9cd9a
DN
6455
6456static void
628f6a4e 6457print_pred_bbs (FILE *file, basic_block bb)
6de9cd9a 6458{
628f6a4e
BE
6459 edge e;
6460 edge_iterator ei;
6461
6462 FOR_EACH_EDGE (e, ei, bb->preds)
d7770457 6463 fprintf (file, "bb_%d ", e->src->index);
6de9cd9a
DN
6464}
6465
6466
d7770457 6467/* Print on FILE the indexes for the successors of basic_block BB. */
6de9cd9a
DN
6468
6469static void
628f6a4e 6470print_succ_bbs (FILE *file, basic_block bb)
6de9cd9a 6471{
628f6a4e
BE
6472 edge e;
6473 edge_iterator ei;
6474
6475 FOR_EACH_EDGE (e, ei, bb->succs)
d7770457 6476 fprintf (file, "bb_%d ", e->dest->index);
6de9cd9a
DN
6477}
6478
0c8efed8
SP
6479/* Print to FILE the basic block BB following the VERBOSITY level. */
6480
6481void
6482print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
6483{
6484 char *s_indent = (char *) alloca ((size_t) indent + 1);
6485 memset ((void *) s_indent, ' ', (size_t) indent);
6486 s_indent[indent] = '\0';
6487
6488 /* Print basic_block's header. */
6489 if (verbosity >= 2)
6490 {
6491 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
6492 print_pred_bbs (file, bb);
6493 fprintf (file, "}, succs = {");
6494 print_succ_bbs (file, bb);
6495 fprintf (file, "})\n");
6496 }
6497
6498 /* Print basic_block's body. */
6499 if (verbosity >= 3)
6500 {
6501 fprintf (file, "%s {\n", s_indent);
726a989a 6502 gimple_dump_bb (bb, file, indent + 4, TDF_VOPS|TDF_MEMSYMS);
0c8efed8
SP
6503 fprintf (file, "%s }\n", s_indent);
6504 }
6505}
6506
6507static void print_loop_and_siblings (FILE *, struct loop *, int, int);
6de9cd9a 6508
0c8efed8
SP
6509/* Pretty print LOOP on FILE, indented INDENT spaces. Following
6510 VERBOSITY level this outputs the contents of the loop, or just its
6511 structure. */
6de9cd9a
DN
6512
6513static void
0c8efed8 6514print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
6de9cd9a
DN
6515{
6516 char *s_indent;
6517 basic_block bb;
6531d1be 6518
6de9cd9a
DN
6519 if (loop == NULL)
6520 return;
6521
6522 s_indent = (char *) alloca ((size_t) indent + 1);
6523 memset ((void *) s_indent, ' ', (size_t) indent);
6524 s_indent[indent] = '\0';
6525
0c8efed8
SP
6526 /* Print loop's header. */
6527 fprintf (file, "%sloop_%d (header = %d, latch = %d", s_indent,
6528 loop->num, loop->header->index, loop->latch->index);
6529 fprintf (file, ", niter = ");
6530 print_generic_expr (file, loop->nb_iterations, 0);
6531d1be 6531
0c8efed8
SP
6532 if (loop->any_upper_bound)
6533 {
6534 fprintf (file, ", upper_bound = ");
6535 dump_double_int (file, loop->nb_iterations_upper_bound, true);
6536 }
6531d1be 6537
0c8efed8
SP
6538 if (loop->any_estimate)
6539 {
6540 fprintf (file, ", estimate = ");
6541 dump_double_int (file, loop->nb_iterations_estimate, true);
6542 }
6543 fprintf (file, ")\n");
6544
6545 /* Print loop's body. */
6546 if (verbosity >= 1)
6547 {
6548 fprintf (file, "%s{\n", s_indent);
6549 FOR_EACH_BB (bb)
6550 if (bb->loop_father == loop)
6551 print_loops_bb (file, bb, indent, verbosity);
6552
6553 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
6554 fprintf (file, "%s}\n", s_indent);
6555 }
6de9cd9a
DN
6556}
6557
0c8efed8
SP
6558/* Print the LOOP and its sibling loops on FILE, indented INDENT
6559 spaces. Following VERBOSITY level this outputs the contents of the
6560 loop, or just its structure. */
6561
6562static void
6563print_loop_and_siblings (FILE *file, struct loop *loop, int indent, int verbosity)
6564{
6565 if (loop == NULL)
6566 return;
6567
6568 print_loop (file, loop, indent, verbosity);
6569 print_loop_and_siblings (file, loop->next, indent, verbosity);
6570}
6de9cd9a
DN
6571
6572/* Follow a CFG edge from the entry point of the program, and on entry
6573 of a loop, pretty print the loop structure on FILE. */
6574
6531d1be 6575void
0c8efed8 6576print_loops (FILE *file, int verbosity)
6de9cd9a
DN
6577{
6578 basic_block bb;
6531d1be 6579
f8bf9252 6580 bb = ENTRY_BLOCK_PTR;
6de9cd9a 6581 if (bb && bb->loop_father)
0c8efed8 6582 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
6de9cd9a
DN
6583}
6584
6585
0c8efed8
SP
6586/* Debugging loops structure at tree level, at some VERBOSITY level. */
6587
6588void
6589debug_loops (int verbosity)
6590{
6591 print_loops (stderr, verbosity);
6592}
6593
6594/* Print on stderr the code of LOOP, at some VERBOSITY level. */
6de9cd9a 6595
6531d1be 6596void
0c8efed8 6597debug_loop (struct loop *loop, int verbosity)
6de9cd9a 6598{
0c8efed8 6599 print_loop (stderr, loop, 0, verbosity);
6de9cd9a
DN
6600}
6601
0c8efed8
SP
6602/* Print on stderr the code of loop number NUM, at some VERBOSITY
6603 level. */
6604
6605void
6606debug_loop_num (unsigned num, int verbosity)
6607{
6608 debug_loop (get_loop (num), verbosity);
6609}
6de9cd9a
DN
6610
6611/* Return true if BB ends with a call, possibly followed by some
6612 instructions that must stay with the call. Return false,
6613 otherwise. */
6614
6615static bool
726a989a 6616gimple_block_ends_with_call_p (basic_block bb)
6de9cd9a 6617{
726a989a
RB
6618 gimple_stmt_iterator gsi = gsi_last_bb (bb);
6619 return is_gimple_call (gsi_stmt (gsi));
6de9cd9a
DN
6620}
6621
6622
6623/* Return true if BB ends with a conditional branch. Return false,
6624 otherwise. */
6625
6626static bool
726a989a 6627gimple_block_ends_with_condjump_p (const_basic_block bb)
6de9cd9a 6628{
726a989a
RB
6629 gimple stmt = last_stmt (CONST_CAST_BB (bb));
6630 return (stmt && gimple_code (stmt) == GIMPLE_COND);
6de9cd9a
DN
6631}
6632
6633
6634/* Return true if we need to add fake edge to exit at statement T.
726a989a 6635 Helper function for gimple_flow_call_edges_add. */
6de9cd9a
DN
6636
6637static bool
726a989a 6638need_fake_edge_p (gimple t)
6de9cd9a 6639{
726a989a
RB
6640 tree fndecl = NULL_TREE;
6641 int call_flags = 0;
6de9cd9a
DN
6642
6643 /* NORETURN and LONGJMP calls already have an edge to exit.
321cf1f2 6644 CONST and PURE calls do not need one.
6de9cd9a
DN
6645 We don't currently check for CONST and PURE here, although
6646 it would be a good idea, because those attributes are
6647 figured out from the RTL in mark_constant_function, and
6648 the counter incrementation code from -fprofile-arcs
6649 leads to different results from -fbranch-probabilities. */
726a989a 6650 if (is_gimple_call (t))
23ef6d21 6651 {
726a989a
RB
6652 fndecl = gimple_call_fndecl (t);
6653 call_flags = gimple_call_flags (t);
23ef6d21
BE
6654 }
6655
726a989a
RB
6656 if (is_gimple_call (t)
6657 && fndecl
6658 && DECL_BUILT_IN (fndecl)
23ef6d21 6659 && (call_flags & ECF_NOTHROW)
3cfa762b
RG
6660 && !(call_flags & ECF_RETURNS_TWICE)
6661 /* fork() doesn't really return twice, but the effect of
6662 wrapping it in __gcov_fork() which calls __gcov_flush()
6663 and clears the counters before forking has the same
6664 effect as returning twice. Force a fake edge. */
6665 && !(DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
6666 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FORK))
6667 return false;
23ef6d21 6668
726a989a
RB
6669 if (is_gimple_call (t)
6670 && !(call_flags & ECF_NORETURN))
6de9cd9a
DN
6671 return true;
6672
e0c68ce9 6673 if (gimple_code (t) == GIMPLE_ASM
726a989a 6674 && (gimple_asm_volatile_p (t) || gimple_asm_input_p (t)))
6de9cd9a
DN
6675 return true;
6676
6677 return false;
6678}
6679
6680
6681/* Add fake edges to the function exit for any non constant and non
6682 noreturn calls, volatile inline assembly in the bitmap of blocks
6683 specified by BLOCKS or to the whole CFG if BLOCKS is zero. Return
6684 the number of blocks that were split.
6685
6686 The goal is to expose cases in which entering a basic block does
6687 not imply that all subsequent instructions must be executed. */
6688
6689static int
726a989a 6690gimple_flow_call_edges_add (sbitmap blocks)
6de9cd9a
DN
6691{
6692 int i;
6693 int blocks_split = 0;
6694 int last_bb = last_basic_block;
6695 bool check_last_block = false;
6696
24bd1a0b 6697 if (n_basic_blocks == NUM_FIXED_BLOCKS)
6de9cd9a
DN
6698 return 0;
6699
6700 if (! blocks)
6701 check_last_block = true;
6702 else
6703 check_last_block = TEST_BIT (blocks, EXIT_BLOCK_PTR->prev_bb->index);
6704
6705 /* In the last basic block, before epilogue generation, there will be
6706 a fallthru edge to EXIT. Special care is required if the last insn
6707 of the last basic block is a call because make_edge folds duplicate
6708 edges, which would result in the fallthru edge also being marked
6709 fake, which would result in the fallthru edge being removed by
6710 remove_fake_edges, which would result in an invalid CFG.
6711
6712 Moreover, we can't elide the outgoing fake edge, since the block
6713 profiler needs to take this into account in order to solve the minimal
6714 spanning tree in the case that the call doesn't return.
6715
6716 Handle this by adding a dummy instruction in a new last basic block. */
6717 if (check_last_block)
6718 {
6719 basic_block bb = EXIT_BLOCK_PTR->prev_bb;
726a989a
RB
6720 gimple_stmt_iterator gsi = gsi_last_bb (bb);
6721 gimple t = NULL;
6722
6723 if (!gsi_end_p (gsi))
6724 t = gsi_stmt (gsi);
6de9cd9a 6725
6a60530d 6726 if (t && need_fake_edge_p (t))
6de9cd9a
DN
6727 {
6728 edge e;
6729
9ff3d2de
JL
6730 e = find_edge (bb, EXIT_BLOCK_PTR);
6731 if (e)
6732 {
726a989a
RB
6733 gsi_insert_on_edge (e, gimple_build_nop ());
6734 gsi_commit_edge_inserts ();
9ff3d2de 6735 }
6de9cd9a
DN
6736 }
6737 }
6738
6739 /* Now add fake edges to the function exit for any non constant
6740 calls since there is no way that we can determine if they will
6741 return or not... */
6742 for (i = 0; i < last_bb; i++)
6743 {
6744 basic_block bb = BASIC_BLOCK (i);
726a989a
RB
6745 gimple_stmt_iterator gsi;
6746 gimple stmt, last_stmt;
6de9cd9a
DN
6747
6748 if (!bb)
6749 continue;
6750
6751 if (blocks && !TEST_BIT (blocks, i))
6752 continue;
6753
726a989a
RB
6754 gsi = gsi_last_bb (bb);
6755 if (!gsi_end_p (gsi))
6de9cd9a 6756 {
726a989a 6757 last_stmt = gsi_stmt (gsi);
6de9cd9a
DN
6758 do
6759 {
726a989a 6760 stmt = gsi_stmt (gsi);
6de9cd9a
DN
6761 if (need_fake_edge_p (stmt))
6762 {
6763 edge e;
726a989a 6764
6de9cd9a
DN
6765 /* The handling above of the final block before the
6766 epilogue should be enough to verify that there is
6767 no edge to the exit block in CFG already.
6768 Calling make_edge in such case would cause us to
6769 mark that edge as fake and remove it later. */
6770#ifdef ENABLE_CHECKING
6771 if (stmt == last_stmt)
628f6a4e 6772 {
9ff3d2de
JL
6773 e = find_edge (bb, EXIT_BLOCK_PTR);
6774 gcc_assert (e == NULL);
628f6a4e 6775 }
6de9cd9a
DN
6776#endif
6777
6778 /* Note that the following may create a new basic block
6779 and renumber the existing basic blocks. */
6780 if (stmt != last_stmt)
6781 {
6782 e = split_block (bb, stmt);
6783 if (e)
6784 blocks_split++;
6785 }
6786 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
6787 }
726a989a 6788 gsi_prev (&gsi);
6de9cd9a 6789 }
726a989a 6790 while (!gsi_end_p (gsi));
6de9cd9a
DN
6791 }
6792 }
6793
6794 if (blocks_split)
6795 verify_flow_info ();
6796
6797 return blocks_split;
6798}
6799
4f6c2131
EB
6800/* Purge dead abnormal call edges from basic block BB. */
6801
6802bool
726a989a 6803gimple_purge_dead_abnormal_call_edges (basic_block bb)
4f6c2131 6804{
726a989a 6805 bool changed = gimple_purge_dead_eh_edges (bb);
4f6c2131 6806
e3b5732b 6807 if (cfun->has_nonlocal_label)
4f6c2131 6808 {
726a989a 6809 gimple stmt = last_stmt (bb);
4f6c2131
EB
6810 edge_iterator ei;
6811 edge e;
6812
726a989a 6813 if (!(stmt && stmt_can_make_abnormal_goto (stmt)))
4f6c2131
EB
6814 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6815 {
6816 if (e->flags & EDGE_ABNORMAL)
6817 {
6818 remove_edge (e);
6819 changed = true;
6820 }
6821 else
6822 ei_next (&ei);
6823 }
6824
726a989a 6825 /* See gimple_purge_dead_eh_edges below. */
4f6c2131
EB
6826 if (changed)
6827 free_dominance_info (CDI_DOMINATORS);
6828 }
6829
6830 return changed;
6831}
6832
672987e8
ZD
6833/* Removes edge E and all the blocks dominated by it, and updates dominance
6834 information. The IL in E->src needs to be updated separately.
6835 If dominance info is not available, only the edge E is removed.*/
6836
6837void
6838remove_edge_and_dominated_blocks (edge e)
6839{
6840 VEC (basic_block, heap) *bbs_to_remove = NULL;
6841 VEC (basic_block, heap) *bbs_to_fix_dom = NULL;
6842 bitmap df, df_idom;
6843 edge f;
6844 edge_iterator ei;
6845 bool none_removed = false;
6846 unsigned i;
6847 basic_block bb, dbb;
6848 bitmap_iterator bi;
6849
2b28c07a 6850 if (!dom_info_available_p (CDI_DOMINATORS))
672987e8
ZD
6851 {
6852 remove_edge (e);
6853 return;
6854 }
6855
6856 /* No updating is needed for edges to exit. */
6857 if (e->dest == EXIT_BLOCK_PTR)
6858 {
6859 if (cfgcleanup_altered_bbs)
6860 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
6861 remove_edge (e);
6862 return;
6863 }
6864
6865 /* First, we find the basic blocks to remove. If E->dest has a predecessor
6866 that is not dominated by E->dest, then this set is empty. Otherwise,
6867 all the basic blocks dominated by E->dest are removed.
6868
6869 Also, to DF_IDOM we store the immediate dominators of the blocks in
6870 the dominance frontier of E (i.e., of the successors of the
6871 removed blocks, if there are any, and of E->dest otherwise). */
6872 FOR_EACH_EDGE (f, ei, e->dest->preds)
6873 {
6874 if (f == e)
6875 continue;
6876
6877 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
6878 {
6879 none_removed = true;
6880 break;
6881 }
6882 }
6883
6884 df = BITMAP_ALLOC (NULL);
6885 df_idom = BITMAP_ALLOC (NULL);
6886
6887 if (none_removed)
6888 bitmap_set_bit (df_idom,
6889 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
6890 else
6891 {
438c239d 6892 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
672987e8
ZD
6893 for (i = 0; VEC_iterate (basic_block, bbs_to_remove, i, bb); i++)
6894 {
6895 FOR_EACH_EDGE (f, ei, bb->succs)
6896 {
6897 if (f->dest != EXIT_BLOCK_PTR)
6898 bitmap_set_bit (df, f->dest->index);
6899 }
6900 }
6901 for (i = 0; VEC_iterate (basic_block, bbs_to_remove, i, bb); i++)
6902 bitmap_clear_bit (df, bb->index);
6903
6904 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
6905 {
6906 bb = BASIC_BLOCK (i);
6907 bitmap_set_bit (df_idom,
6908 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
6909 }
6910 }
6911
6912 if (cfgcleanup_altered_bbs)
6913 {
6914 /* Record the set of the altered basic blocks. */
6915 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
6916 bitmap_ior_into (cfgcleanup_altered_bbs, df);
6917 }
6918
6919 /* Remove E and the cancelled blocks. */
6920 if (none_removed)
6921 remove_edge (e);
6922 else
6923 {
6924 for (i = 0; VEC_iterate (basic_block, bbs_to_remove, i, bb); i++)
6925 delete_basic_block (bb);
6926 }
6927
6928 /* Update the dominance information. The immediate dominator may change only
6929 for blocks whose immediate dominator belongs to DF_IDOM:
6930
6931 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
6932 removal. Let Z the arbitrary block such that idom(Z) = Y and
6933 Z dominates X after the removal. Before removal, there exists a path P
6934 from Y to X that avoids Z. Let F be the last edge on P that is
6935 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
6936 dominates W, and because of P, Z does not dominate W), and W belongs to
6937 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
6938 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
6939 {
6940 bb = BASIC_BLOCK (i);
6941 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
6942 dbb;
6943 dbb = next_dom_son (CDI_DOMINATORS, dbb))
6944 VEC_safe_push (basic_block, heap, bbs_to_fix_dom, dbb);
6945 }
6946
66f97d31 6947 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
672987e8
ZD
6948
6949 BITMAP_FREE (df);
6950 BITMAP_FREE (df_idom);
6951 VEC_free (basic_block, heap, bbs_to_remove);
6952 VEC_free (basic_block, heap, bbs_to_fix_dom);
6953}
6954
4f6c2131
EB
6955/* Purge dead EH edges from basic block BB. */
6956
1eaba2f2 6957bool
726a989a 6958gimple_purge_dead_eh_edges (basic_block bb)
1eaba2f2
RH
6959{
6960 bool changed = false;
628f6a4e
BE
6961 edge e;
6962 edge_iterator ei;
726a989a 6963 gimple stmt = last_stmt (bb);
1eaba2f2 6964
726a989a 6965 if (stmt && stmt_can_throw_internal (stmt))
1eaba2f2
RH
6966 return false;
6967
628f6a4e 6968 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
1eaba2f2 6969 {
1eaba2f2
RH
6970 if (e->flags & EDGE_EH)
6971 {
672987e8 6972 remove_edge_and_dominated_blocks (e);
1eaba2f2
RH
6973 changed = true;
6974 }
628f6a4e
BE
6975 else
6976 ei_next (&ei);
1eaba2f2
RH
6977 }
6978
6979 return changed;
6980}
6981
6982bool
726a989a 6983gimple_purge_all_dead_eh_edges (const_bitmap blocks)
1eaba2f2
RH
6984{
6985 bool changed = false;
3cd8c58a 6986 unsigned i;
87c476a2 6987 bitmap_iterator bi;
1eaba2f2 6988
87c476a2
ZD
6989 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
6990 {
833ee764
JJ
6991 basic_block bb = BASIC_BLOCK (i);
6992
6993 /* Earlier gimple_purge_dead_eh_edges could have removed
6994 this basic block already. */
6995 gcc_assert (bb || changed);
6996 if (bb != NULL)
6997 changed |= gimple_purge_dead_eh_edges (bb);
87c476a2 6998 }
1eaba2f2
RH
6999
7000 return changed;
7001}
6de9cd9a 7002
a100ac1e
KH
7003/* This function is called whenever a new edge is created or
7004 redirected. */
7005
7006static void
726a989a 7007gimple_execute_on_growing_pred (edge e)
a100ac1e
KH
7008{
7009 basic_block bb = e->dest;
7010
7011 if (phi_nodes (bb))
7012 reserve_phi_args_for_new_edge (bb);
7013}
7014
e51546f8
KH
7015/* This function is called immediately before edge E is removed from
7016 the edge vector E->dest->preds. */
7017
7018static void
726a989a 7019gimple_execute_on_shrinking_pred (edge e)
e51546f8
KH
7020{
7021 if (phi_nodes (e->dest))
7022 remove_phi_args (e);
7023}
7024
1cb7dfc3
MH
7025/*---------------------------------------------------------------------------
7026 Helper functions for Loop versioning
7027 ---------------------------------------------------------------------------*/
7028
7029/* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
7030 of 'first'. Both of them are dominated by 'new_head' basic block. When
7031 'new_head' was created by 'second's incoming edge it received phi arguments
7032 on the edge by split_edge(). Later, additional edge 'e' was created to
6531d1be
BF
7033 connect 'new_head' and 'first'. Now this routine adds phi args on this
7034 additional edge 'e' that new_head to second edge received as part of edge
726a989a 7035 splitting. */
1cb7dfc3
MH
7036
7037static void
726a989a
RB
7038gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
7039 basic_block new_head, edge e)
1cb7dfc3 7040{
726a989a
RB
7041 gimple phi1, phi2;
7042 gimple_stmt_iterator psi1, psi2;
7043 tree def;
d0e12fc6
KH
7044 edge e2 = find_edge (new_head, second);
7045
7046 /* Because NEW_HEAD has been created by splitting SECOND's incoming
7047 edge, we should always have an edge from NEW_HEAD to SECOND. */
7048 gcc_assert (e2 != NULL);
1cb7dfc3
MH
7049
7050 /* Browse all 'second' basic block phi nodes and add phi args to
7051 edge 'e' for 'first' head. PHI args are always in correct order. */
7052
726a989a
RB
7053 for (psi2 = gsi_start_phis (second),
7054 psi1 = gsi_start_phis (first);
7055 !gsi_end_p (psi2) && !gsi_end_p (psi1);
7056 gsi_next (&psi2), gsi_next (&psi1))
1cb7dfc3 7057 {
726a989a
RB
7058 phi1 = gsi_stmt (psi1);
7059 phi2 = gsi_stmt (psi2);
7060 def = PHI_ARG_DEF (phi2, e2->dest_idx);
d0e12fc6 7061 add_phi_arg (phi1, def, e);
1cb7dfc3
MH
7062 }
7063}
7064
726a989a 7065
6531d1be
BF
7066/* Adds a if else statement to COND_BB with condition COND_EXPR.
7067 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
1cb7dfc3 7068 the destination of the ELSE part. */
726a989a 7069
1cb7dfc3 7070static void
726a989a
RB
7071gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
7072 basic_block second_head ATTRIBUTE_UNUSED,
7073 basic_block cond_bb, void *cond_e)
1cb7dfc3 7074{
726a989a
RB
7075 gimple_stmt_iterator gsi;
7076 gimple new_cond_expr;
1cb7dfc3
MH
7077 tree cond_expr = (tree) cond_e;
7078 edge e0;
7079
7080 /* Build new conditional expr */
726a989a
RB
7081 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
7082 NULL_TREE, NULL_TREE);
1cb7dfc3 7083
6531d1be 7084 /* Add new cond in cond_bb. */
726a989a
RB
7085 gsi = gsi_last_bb (cond_bb);
7086 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
7087
1cb7dfc3
MH
7088 /* Adjust edges appropriately to connect new head with first head
7089 as well as second head. */
7090 e0 = single_succ_edge (cond_bb);
7091 e0->flags &= ~EDGE_FALLTHRU;
7092 e0->flags |= EDGE_FALSE_VALUE;
7093}
7094
726a989a
RB
7095struct cfg_hooks gimple_cfg_hooks = {
7096 "gimple",
7097 gimple_verify_flow_info,
7098 gimple_dump_bb, /* dump_bb */
6de9cd9a 7099 create_bb, /* create_basic_block */
726a989a
RB
7100 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
7101 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
7102 gimple_can_remove_branch_p, /* can_remove_branch_p */
6de9cd9a 7103 remove_bb, /* delete_basic_block */
726a989a
RB
7104 gimple_split_block, /* split_block */
7105 gimple_move_block_after, /* move_block_after */
7106 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
7107 gimple_merge_blocks, /* merge_blocks */
7108 gimple_predict_edge, /* predict_edge */
7109 gimple_predicted_by_p, /* predicted_by_p */
7110 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
7111 gimple_duplicate_bb, /* duplicate_block */
7112 gimple_split_edge, /* split_edge */
7113 gimple_make_forwarder_block, /* make_forward_block */
6de9cd9a 7114 NULL, /* tidy_fallthru_edge */
726a989a
RB
7115 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
7116 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
7117 gimple_flow_call_edges_add, /* flow_call_edges_add */
7118 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
7119 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
7120 gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
7121 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
7122 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
1cb7dfc3 7123 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
6531d1be 7124 flush_pending_stmts /* flush_pending_stmts */
6de9cd9a
DN
7125};
7126
7127
7128/* Split all critical edges. */
7129
c2924966 7130static unsigned int
6de9cd9a
DN
7131split_critical_edges (void)
7132{
7133 basic_block bb;
7134 edge e;
628f6a4e 7135 edge_iterator ei;
6de9cd9a 7136
d6be0d7f
JL
7137 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
7138 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
7139 mappings around the calls to split_edge. */
7140 start_recording_case_labels ();
6de9cd9a
DN
7141 FOR_ALL_BB (bb)
7142 {
628f6a4e 7143 FOR_EACH_EDGE (e, ei, bb->succs)
496a4ef5
JH
7144 {
7145 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
6de9cd9a 7146 split_edge (e);
496a4ef5
JH
7147 /* PRE inserts statements to edges and expects that
7148 since split_critical_edges was done beforehand, committing edge
7149 insertions will not split more edges. In addition to critical
7150 edges we must split edges that have multiple successors and
7151 end by control flow statements, such as RESX.
7152 Go ahead and split them too. This matches the logic in
7153 gimple_find_edge_insert_loc. */
7154 else if ((!single_pred_p (e->dest)
7155 || phi_nodes (e->dest)
7156 || e->dest == EXIT_BLOCK_PTR)
7157 && e->src != ENTRY_BLOCK_PTR
7158 && !(e->flags & EDGE_ABNORMAL))
7159 {
7160 gimple_stmt_iterator gsi;
7161
7162 gsi = gsi_last_bb (e->src);
7163 if (!gsi_end_p (gsi)
7164 && stmt_ends_bb_p (gsi_stmt (gsi))
7165 && gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN)
7166 split_edge (e);
7167 }
7168 }
6de9cd9a 7169 }
d6be0d7f 7170 end_recording_case_labels ();
c2924966 7171 return 0;
6de9cd9a
DN
7172}
7173
8ddbbcae 7174struct gimple_opt_pass pass_split_crit_edges =
6de9cd9a 7175{
8ddbbcae
JH
7176 {
7177 GIMPLE_PASS,
5d44aeed 7178 "crited", /* name */
6de9cd9a
DN
7179 NULL, /* gate */
7180 split_critical_edges, /* execute */
7181 NULL, /* sub */
7182 NULL, /* next */
7183 0, /* static_pass_number */
7184 TV_TREE_SPLIT_EDGES, /* tv_id */
7185 PROP_cfg, /* properties required */
7186 PROP_no_crit_edges, /* properties_provided */
7187 0, /* properties_destroyed */
7188 0, /* todo_flags_start */
9187e02d 7189 TODO_dump_func | TODO_verify_flow /* todo_flags_finish */
8ddbbcae 7190 }
6de9cd9a 7191};
26277d41 7192
26277d41 7193
726a989a 7194/* Build a ternary operation and gimplify it. Emit code before GSI.
26277d41
PB
7195 Return the gimple_val holding the result. */
7196
7197tree
726a989a 7198gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
26277d41
PB
7199 tree type, tree a, tree b, tree c)
7200{
7201 tree ret;
db3927fb 7202 location_t loc = gimple_location (gsi_stmt (*gsi));
26277d41 7203
db3927fb 7204 ret = fold_build3_loc (loc, code, type, a, b, c);
26277d41
PB
7205 STRIP_NOPS (ret);
7206
726a989a
RB
7207 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
7208 GSI_SAME_STMT);
26277d41
PB
7209}
7210
726a989a 7211/* Build a binary operation and gimplify it. Emit code before GSI.
26277d41
PB
7212 Return the gimple_val holding the result. */
7213
7214tree
726a989a 7215gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
26277d41
PB
7216 tree type, tree a, tree b)
7217{
7218 tree ret;
7219
db3927fb 7220 ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
26277d41
PB
7221 STRIP_NOPS (ret);
7222
726a989a
RB
7223 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
7224 GSI_SAME_STMT);
26277d41
PB
7225}
7226
726a989a 7227/* Build a unary operation and gimplify it. Emit code before GSI.
26277d41
PB
7228 Return the gimple_val holding the result. */
7229
7230tree
726a989a 7231gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
26277d41
PB
7232 tree a)
7233{
7234 tree ret;
7235
db3927fb 7236 ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
26277d41
PB
7237 STRIP_NOPS (ret);
7238
726a989a
RB
7239 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
7240 GSI_SAME_STMT);
26277d41
PB
7241}
7242
7243
6de9cd9a
DN
7244\f
7245/* Emit return warnings. */
7246
c2924966 7247static unsigned int
6de9cd9a
DN
7248execute_warn_function_return (void)
7249{
9506ac2b 7250 source_location location;
726a989a 7251 gimple last;
6de9cd9a 7252 edge e;
628f6a4e 7253 edge_iterator ei;
6de9cd9a 7254
6de9cd9a
DN
7255 /* If we have a path to EXIT, then we do return. */
7256 if (TREE_THIS_VOLATILE (cfun->decl)
628f6a4e 7257 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0)
6de9cd9a 7258 {
9506ac2b 7259 location = UNKNOWN_LOCATION;
628f6a4e 7260 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
6de9cd9a
DN
7261 {
7262 last = last_stmt (e->src);
726a989a
RB
7263 if (gimple_code (last) == GIMPLE_RETURN
7264 && (location = gimple_location (last)) != UNKNOWN_LOCATION)
6de9cd9a
DN
7265 break;
7266 }
9506ac2b
PB
7267 if (location == UNKNOWN_LOCATION)
7268 location = cfun->function_end_locus;
fab922b1 7269 warning_at (location, 0, "%<noreturn%> function does return");
6de9cd9a
DN
7270 }
7271
7272 /* If we see "return;" in some basic block, then we do reach the end
7273 without returning a value. */
7274 else if (warn_return_type
089efaa4 7275 && !TREE_NO_WARNING (cfun->decl)
628f6a4e 7276 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0
6de9cd9a
DN
7277 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (cfun->decl))))
7278 {
628f6a4e 7279 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
6de9cd9a 7280 {
726a989a
RB
7281 gimple last = last_stmt (e->src);
7282 if (gimple_code (last) == GIMPLE_RETURN
7283 && gimple_return_retval (last) == NULL
7284 && !gimple_no_warning_p (last))
6de9cd9a 7285 {
726a989a 7286 location = gimple_location (last);
9506ac2b
PB
7287 if (location == UNKNOWN_LOCATION)
7288 location = cfun->function_end_locus;
aa14403d 7289 warning_at (location, OPT_Wreturn_type, "control reaches end of non-void function");
089efaa4 7290 TREE_NO_WARNING (cfun->decl) = 1;
6de9cd9a
DN
7291 break;
7292 }
7293 }
7294 }
c2924966 7295 return 0;
6de9cd9a
DN
7296}
7297
7298
7299/* Given a basic block B which ends with a conditional and has
7300 precisely two successors, determine which of the edges is taken if
7301 the conditional is true and which is taken if the conditional is
7302 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
7303
7304void
7305extract_true_false_edges_from_block (basic_block b,
7306 edge *true_edge,
7307 edge *false_edge)
7308{
628f6a4e 7309 edge e = EDGE_SUCC (b, 0);
6de9cd9a
DN
7310
7311 if (e->flags & EDGE_TRUE_VALUE)
7312 {
7313 *true_edge = e;
628f6a4e 7314 *false_edge = EDGE_SUCC (b, 1);
6de9cd9a
DN
7315 }
7316 else
7317 {
7318 *false_edge = e;
628f6a4e 7319 *true_edge = EDGE_SUCC (b, 1);
6de9cd9a
DN
7320 }
7321}
7322
8ddbbcae 7323struct gimple_opt_pass pass_warn_function_return =
6de9cd9a 7324{
8ddbbcae
JH
7325 {
7326 GIMPLE_PASS,
6de9cd9a
DN
7327 NULL, /* name */
7328 NULL, /* gate */
7329 execute_warn_function_return, /* execute */
7330 NULL, /* sub */
7331 NULL, /* next */
7332 0, /* static_pass_number */
7072a650 7333 TV_NONE, /* tv_id */
00bfee6f 7334 PROP_cfg, /* properties_required */
6de9cd9a
DN
7335 0, /* properties_provided */
7336 0, /* properties_destroyed */
7337 0, /* todo_flags_start */
8ddbbcae
JH
7338 0 /* todo_flags_finish */
7339 }
6de9cd9a 7340};
aa313ed4
JH
7341
7342/* Emit noreturn warnings. */
7343
c2924966 7344static unsigned int
aa313ed4
JH
7345execute_warn_function_noreturn (void)
7346{
7347 if (warn_missing_noreturn
7348 && !TREE_THIS_VOLATILE (cfun->decl)
7349 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) == 0
e8924938 7350 && !lang_hooks.missing_noreturn_ok_p (cfun->decl))
c5d75364
MLI
7351 warning_at (DECL_SOURCE_LOCATION (cfun->decl), OPT_Wmissing_noreturn,
7352 "function might be possible candidate "
7353 "for attribute %<noreturn%>");
c2924966 7354 return 0;
aa313ed4
JH
7355}
7356
8ddbbcae 7357struct gimple_opt_pass pass_warn_function_noreturn =
aa313ed4 7358{
8ddbbcae
JH
7359 {
7360 GIMPLE_PASS,
aa313ed4
JH
7361 NULL, /* name */
7362 NULL, /* gate */
7363 execute_warn_function_noreturn, /* execute */
7364 NULL, /* sub */
7365 NULL, /* next */
7366 0, /* static_pass_number */
7072a650 7367 TV_NONE, /* tv_id */
aa313ed4
JH
7368 PROP_cfg, /* properties_required */
7369 0, /* properties_provided */
7370 0, /* properties_destroyed */
7371 0, /* todo_flags_start */
8ddbbcae
JH
7372 0 /* todo_flags_finish */
7373 }
aa313ed4 7374};
a406865a
RG
7375
7376
7377/* Walk a gimplified function and warn for functions whose return value is
7378 ignored and attribute((warn_unused_result)) is set. This is done before
7379 inlining, so we don't have to worry about that. */
7380
7381static void
7382do_warn_unused_result (gimple_seq seq)
7383{
7384 tree fdecl, ftype;
7385 gimple_stmt_iterator i;
7386
7387 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
7388 {
7389 gimple g = gsi_stmt (i);
7390
7391 switch (gimple_code (g))
7392 {
7393 case GIMPLE_BIND:
7394 do_warn_unused_result (gimple_bind_body (g));
7395 break;
7396 case GIMPLE_TRY:
7397 do_warn_unused_result (gimple_try_eval (g));
7398 do_warn_unused_result (gimple_try_cleanup (g));
7399 break;
7400 case GIMPLE_CATCH:
7401 do_warn_unused_result (gimple_catch_handler (g));
7402 break;
7403 case GIMPLE_EH_FILTER:
7404 do_warn_unused_result (gimple_eh_filter_failure (g));
7405 break;
7406
7407 case GIMPLE_CALL:
7408 if (gimple_call_lhs (g))
7409 break;
7410
7411 /* This is a naked call, as opposed to a GIMPLE_CALL with an
7412 LHS. All calls whose value is ignored should be
7413 represented like this. Look for the attribute. */
7414 fdecl = gimple_call_fndecl (g);
7415 ftype = TREE_TYPE (TREE_TYPE (gimple_call_fn (g)));
7416
7417 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
7418 {
7419 location_t loc = gimple_location (g);
7420
7421 if (fdecl)
7422 warning_at (loc, OPT_Wunused_result,
7423 "ignoring return value of %qD, "
7424 "declared with attribute warn_unused_result",
7425 fdecl);
7426 else
7427 warning_at (loc, OPT_Wunused_result,
7428 "ignoring return value of function "
7429 "declared with attribute warn_unused_result");
7430 }
7431 break;
7432
7433 default:
7434 /* Not a container, not a call, or a call whose value is used. */
7435 break;
7436 }
7437 }
7438}
7439
7440static unsigned int
7441run_warn_unused_result (void)
7442{
7443 do_warn_unused_result (gimple_body (current_function_decl));
7444 return 0;
7445}
7446
7447static bool
7448gate_warn_unused_result (void)
7449{
7450 return flag_warn_unused_result;
7451}
7452
7453struct gimple_opt_pass pass_warn_unused_result =
7454{
7455 {
7456 GIMPLE_PASS,
7457 "warn_unused_result", /* name */
7458 gate_warn_unused_result, /* gate */
7459 run_warn_unused_result, /* execute */
7460 NULL, /* sub */
7461 NULL, /* next */
7462 0, /* static_pass_number */
7463 TV_NONE, /* tv_id */
7464 PROP_gimple_any, /* properties_required */
7465 0, /* properties_provided */
7466 0, /* properties_destroyed */
7467 0, /* todo_flags_start */
7468 0, /* todo_flags_finish */
7469 }
7470};
7471