]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/tree-cfg.c
acinclude.m4 (LIBGOMP_CHECK_SYNC_BUILTINS): Remove set but not unused bar variable.
[thirdparty/gcc.git] / gcc / tree-cfg.c
CommitLineData
6de9cd9a 1/* Control flow functions for trees.
7c028163
AO
2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
3 2010 Free Software Foundation, Inc.
6de9cd9a
DN
4 Contributed by Diego Novillo <dnovillo@redhat.com>
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify
9it under the terms of the GNU General Public License as published by
9dcd6f09 10the Free Software Foundation; either version 3, or (at your option)
6de9cd9a
DN
11any later version.
12
13GCC is distributed in the hope that it will be useful,
14but WITHOUT ANY WARRANTY; without even the implied warranty of
15MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16GNU General Public License for more details.
17
18You should have received a copy of the GNU General Public License
9dcd6f09
NC
19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
6de9cd9a
DN
21
22#include "config.h"
23#include "system.h"
24#include "coretypes.h"
25#include "tm.h"
26#include "tree.h"
27#include "rtl.h"
28#include "tm_p.h"
29#include "hard-reg-set.h"
30#include "basic-block.h"
31#include "output.h"
6de9cd9a
DN
32#include "flags.h"
33#include "function.h"
34#include "expr.h"
35#include "ggc.h"
36#include "langhooks.h"
37#include "diagnostic.h"
38#include "tree-flow.h"
39#include "timevar.h"
40#include "tree-dump.h"
41#include "tree-pass.h"
42#include "toplev.h"
43#include "except.h"
44#include "cfgloop.h"
42759f1e 45#include "cfglayout.h"
9af0df6b 46#include "tree-ssa-propagate.h"
6946b3f7 47#include "value-prof.h"
4437b50d 48#include "pointer-set.h"
917948d3 49#include "tree-inline.h"
6de9cd9a
DN
50
51/* This file contains functions for building the Control Flow Graph (CFG)
52 for a function tree. */
53
54/* Local declarations. */
55
56/* Initial capacity for the basic block array. */
57static const int initial_cfg_capacity = 20;
58
d6be0d7f
JL
59/* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
60 which use a particular edge. The CASE_LABEL_EXPRs are chained together
61 via their TREE_CHAIN field, which we clear after we're done with the
726a989a 62 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
92b6dff3 63
d6be0d7f
JL
64 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
65 update the case vector in response to edge redirections.
92b6dff3 66
d6be0d7f
JL
67 Right now this table is set up and torn down at key points in the
68 compilation process. It would be nice if we could make the table
69 more persistent. The key is getting notification of changes to
70 the CFG (particularly edge removal, creation and redirection). */
71
15814ba0 72static struct pointer_map_t *edge_to_cases;
92b6dff3 73
6de9cd9a
DN
74/* CFG statistics. */
75struct cfg_stats_d
76{
77 long num_merged_labels;
78};
79
80static struct cfg_stats_d cfg_stats;
81
82/* Nonzero if we found a computed goto while building basic blocks. */
83static bool found_computed_goto;
84
6c52e687
CC
85/* Hash table to store last discriminator assigned for each locus. */
86struct locus_discrim_map
87{
88 location_t locus;
89 int discriminator;
90};
91static htab_t discriminator_per_locus;
92
6de9cd9a 93/* Basic blocks and flowgraphs. */
726a989a 94static void make_blocks (gimple_seq);
6de9cd9a 95static void factor_computed_gotos (void);
6de9cd9a
DN
96
97/* Edges. */
98static void make_edges (void);
6de9cd9a 99static void make_cond_expr_edges (basic_block);
726a989a 100static void make_gimple_switch_edges (basic_block);
6de9cd9a 101static void make_goto_expr_edges (basic_block);
1c384bf1 102static void make_gimple_asm_edges (basic_block);
6c52e687
CC
103static unsigned int locus_map_hash (const void *);
104static int locus_map_eq (const void *, const void *);
105static void assign_discriminator (location_t, basic_block);
726a989a
RB
106static edge gimple_redirect_edge_and_branch (edge, basic_block);
107static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
c2924966 108static unsigned int split_critical_edges (void);
6de9cd9a
DN
109
110/* Various helpers. */
726a989a
RB
111static inline bool stmt_starts_bb_p (gimple, gimple);
112static int gimple_verify_flow_info (void);
113static void gimple_make_forwarder_block (edge);
114static void gimple_cfg2vcg (FILE *);
6c52e687 115static gimple first_non_label_stmt (basic_block);
6de9cd9a
DN
116
117/* Flowgraph optimization and cleanup. */
726a989a
RB
118static void gimple_merge_blocks (basic_block, basic_block);
119static bool gimple_can_merge_blocks_p (basic_block, basic_block);
6de9cd9a 120static void remove_bb (basic_block);
be477406 121static edge find_taken_edge_computed_goto (basic_block, tree);
6de9cd9a
DN
122static edge find_taken_edge_cond_expr (basic_block, tree);
123static edge find_taken_edge_switch_expr (basic_block, tree);
726a989a 124static tree find_case_label_for_value (gimple, tree);
6de9cd9a 125
a930a4ef 126void
9defb1fe 127init_empty_tree_cfg_for_function (struct function *fn)
a930a4ef
JH
128{
129 /* Initialize the basic block array. */
9defb1fe
DN
130 init_flow (fn);
131 profile_status_for_function (fn) = PROFILE_ABSENT;
132 n_basic_blocks_for_function (fn) = NUM_FIXED_BLOCKS;
133 last_basic_block_for_function (fn) = NUM_FIXED_BLOCKS;
134 basic_block_info_for_function (fn)
135 = VEC_alloc (basic_block, gc, initial_cfg_capacity);
136 VEC_safe_grow_cleared (basic_block, gc,
137 basic_block_info_for_function (fn),
a590ac65 138 initial_cfg_capacity);
a930a4ef
JH
139
140 /* Build a mapping of labels to their associated blocks. */
9defb1fe
DN
141 label_to_block_map_for_function (fn)
142 = VEC_alloc (basic_block, gc, initial_cfg_capacity);
143 VEC_safe_grow_cleared (basic_block, gc,
144 label_to_block_map_for_function (fn),
a590ac65 145 initial_cfg_capacity);
a930a4ef 146
b8698a0f 147 SET_BASIC_BLOCK_FOR_FUNCTION (fn, ENTRY_BLOCK,
9defb1fe 148 ENTRY_BLOCK_PTR_FOR_FUNCTION (fn));
b8698a0f 149 SET_BASIC_BLOCK_FOR_FUNCTION (fn, EXIT_BLOCK,
9defb1fe
DN
150 EXIT_BLOCK_PTR_FOR_FUNCTION (fn));
151
152 ENTRY_BLOCK_PTR_FOR_FUNCTION (fn)->next_bb
153 = EXIT_BLOCK_PTR_FOR_FUNCTION (fn);
154 EXIT_BLOCK_PTR_FOR_FUNCTION (fn)->prev_bb
155 = ENTRY_BLOCK_PTR_FOR_FUNCTION (fn);
156}
157
158void
159init_empty_tree_cfg (void)
160{
161 init_empty_tree_cfg_for_function (cfun);
a930a4ef 162}
6de9cd9a
DN
163
164/*---------------------------------------------------------------------------
165 Create basic blocks
166---------------------------------------------------------------------------*/
167
726a989a 168/* Entry point to the CFG builder for trees. SEQ is the sequence of
6de9cd9a
DN
169 statements to be added to the flowgraph. */
170
171static void
726a989a 172build_gimple_cfg (gimple_seq seq)
6de9cd9a 173{
726a989a
RB
174 /* Register specific gimple functions. */
175 gimple_register_cfg_hooks ();
6de9cd9a 176
6de9cd9a
DN
177 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
178
a930a4ef 179 init_empty_tree_cfg ();
6de9cd9a
DN
180
181 found_computed_goto = 0;
726a989a 182 make_blocks (seq);
6de9cd9a
DN
183
184 /* Computed gotos are hell to deal with, especially if there are
185 lots of them with a large number of destinations. So we factor
186 them to a common computed goto location before we build the
187 edge list. After we convert back to normal form, we will un-factor
188 the computed gotos since factoring introduces an unwanted jump. */
189 if (found_computed_goto)
190 factor_computed_gotos ();
191
f0b698c1 192 /* Make sure there is always at least one block, even if it's empty. */
24bd1a0b 193 if (n_basic_blocks == NUM_FIXED_BLOCKS)
6de9cd9a
DN
194 create_empty_bb (ENTRY_BLOCK_PTR);
195
6de9cd9a 196 /* Adjust the size of the array. */
68f9b844 197 if (VEC_length (basic_block, basic_block_info) < (size_t) n_basic_blocks)
a590ac65 198 VEC_safe_grow_cleared (basic_block, gc, basic_block_info, n_basic_blocks);
6de9cd9a 199
f667741c
SB
200 /* To speed up statement iterator walks, we first purge dead labels. */
201 cleanup_dead_labels ();
202
203 /* Group case nodes to reduce the number of edges.
204 We do this after cleaning up dead labels because otherwise we miss
205 a lot of obvious case merging opportunities. */
206 group_case_labels ();
207
6de9cd9a 208 /* Create the edges of the flowgraph. */
6c52e687
CC
209 discriminator_per_locus = htab_create (13, locus_map_hash, locus_map_eq,
210 free);
6de9cd9a 211 make_edges ();
8b11009b 212 cleanup_dead_labels ();
6c52e687 213 htab_delete (discriminator_per_locus);
6de9cd9a
DN
214
215 /* Debugging dumps. */
216
217 /* Write the flowgraph to a VCG file. */
218 {
219 int local_dump_flags;
10d22567
ZD
220 FILE *vcg_file = dump_begin (TDI_vcg, &local_dump_flags);
221 if (vcg_file)
6de9cd9a 222 {
726a989a 223 gimple_cfg2vcg (vcg_file);
10d22567 224 dump_end (TDI_vcg, vcg_file);
6de9cd9a
DN
225 }
226 }
227
81cfbbc2
JH
228#ifdef ENABLE_CHECKING
229 verify_stmts ();
230#endif
6de9cd9a
DN
231}
232
c2924966 233static unsigned int
6de9cd9a
DN
234execute_build_cfg (void)
235{
39ecc018
JH
236 gimple_seq body = gimple_body (current_function_decl);
237
238 build_gimple_cfg (body);
239 gimple_set_body (current_function_decl, NULL);
cff7525f
JH
240 if (dump_file && (dump_flags & TDF_DETAILS))
241 {
242 fprintf (dump_file, "Scope blocks:\n");
243 dump_scope_blocks (dump_file, dump_flags);
244 }
c2924966 245 return 0;
6de9cd9a
DN
246}
247
8ddbbcae 248struct gimple_opt_pass pass_build_cfg =
6de9cd9a 249{
8ddbbcae
JH
250 {
251 GIMPLE_PASS,
6de9cd9a
DN
252 "cfg", /* name */
253 NULL, /* gate */
254 execute_build_cfg, /* execute */
255 NULL, /* sub */
256 NULL, /* next */
257 0, /* static_pass_number */
258 TV_TREE_CFG, /* tv_id */
726a989a 259 PROP_gimple_leh, /* properties_required */
6de9cd9a
DN
260 PROP_cfg, /* properties_provided */
261 0, /* properties_destroyed */
262 0, /* todo_flags_start */
11b08ee9
RG
263 TODO_verify_stmts | TODO_cleanup_cfg
264 | TODO_dump_func /* todo_flags_finish */
8ddbbcae 265 }
6de9cd9a
DN
266};
267
726a989a
RB
268
269/* Return true if T is a computed goto. */
270
271static bool
272computed_goto_p (gimple t)
273{
274 return (gimple_code (t) == GIMPLE_GOTO
275 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
276}
277
278
6531d1be 279/* Search the CFG for any computed gotos. If found, factor them to a
6de9cd9a 280 common computed goto site. Also record the location of that site so
6531d1be 281 that we can un-factor the gotos after we have converted back to
6de9cd9a
DN
282 normal form. */
283
284static void
285factor_computed_gotos (void)
286{
287 basic_block bb;
288 tree factored_label_decl = NULL;
289 tree var = NULL;
726a989a
RB
290 gimple factored_computed_goto_label = NULL;
291 gimple factored_computed_goto = NULL;
6de9cd9a
DN
292
293 /* We know there are one or more computed gotos in this function.
294 Examine the last statement in each basic block to see if the block
295 ends with a computed goto. */
6531d1be 296
6de9cd9a
DN
297 FOR_EACH_BB (bb)
298 {
726a989a
RB
299 gimple_stmt_iterator gsi = gsi_last_bb (bb);
300 gimple last;
6de9cd9a 301
726a989a 302 if (gsi_end_p (gsi))
6de9cd9a 303 continue;
726a989a
RB
304
305 last = gsi_stmt (gsi);
6de9cd9a
DN
306
307 /* Ignore the computed goto we create when we factor the original
308 computed gotos. */
309 if (last == factored_computed_goto)
310 continue;
311
312 /* If the last statement is a computed goto, factor it. */
313 if (computed_goto_p (last))
314 {
726a989a 315 gimple assignment;
6de9cd9a
DN
316
317 /* The first time we find a computed goto we need to create
318 the factored goto block and the variable each original
319 computed goto will use for their goto destination. */
726a989a 320 if (!factored_computed_goto)
6de9cd9a
DN
321 {
322 basic_block new_bb = create_empty_bb (bb);
726a989a 323 gimple_stmt_iterator new_gsi = gsi_start_bb (new_bb);
6de9cd9a
DN
324
325 /* Create the destination of the factored goto. Each original
326 computed goto will put its desired destination into this
327 variable and jump to the label we create immediately
328 below. */
329 var = create_tmp_var (ptr_type_node, "gotovar");
330
331 /* Build a label for the new block which will contain the
332 factored computed goto. */
c2255bc4 333 factored_label_decl = create_artificial_label (UNKNOWN_LOCATION);
6de9cd9a 334 factored_computed_goto_label
726a989a
RB
335 = gimple_build_label (factored_label_decl);
336 gsi_insert_after (&new_gsi, factored_computed_goto_label,
337 GSI_NEW_STMT);
6de9cd9a
DN
338
339 /* Build our new computed goto. */
726a989a
RB
340 factored_computed_goto = gimple_build_goto (var);
341 gsi_insert_after (&new_gsi, factored_computed_goto, GSI_NEW_STMT);
6de9cd9a
DN
342 }
343
344 /* Copy the original computed goto's destination into VAR. */
726a989a
RB
345 assignment = gimple_build_assign (var, gimple_goto_dest (last));
346 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
6de9cd9a
DN
347
348 /* And re-vector the computed goto to the new destination. */
726a989a 349 gimple_goto_set_dest (last, factored_label_decl);
6de9cd9a
DN
350 }
351 }
352}
353
354
726a989a 355/* Build a flowgraph for the sequence of stmts SEQ. */
6de9cd9a
DN
356
357static void
726a989a 358make_blocks (gimple_seq seq)
6de9cd9a 359{
726a989a
RB
360 gimple_stmt_iterator i = gsi_start (seq);
361 gimple stmt = NULL;
6de9cd9a 362 bool start_new_block = true;
726a989a 363 bool first_stmt_of_seq = true;
6de9cd9a
DN
364 basic_block bb = ENTRY_BLOCK_PTR;
365
726a989a 366 while (!gsi_end_p (i))
6de9cd9a 367 {
726a989a 368 gimple prev_stmt;
6de9cd9a
DN
369
370 prev_stmt = stmt;
726a989a 371 stmt = gsi_stmt (i);
6de9cd9a
DN
372
373 /* If the statement starts a new basic block or if we have determined
374 in a previous pass that we need to create a new block for STMT, do
375 so now. */
376 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
377 {
726a989a
RB
378 if (!first_stmt_of_seq)
379 seq = gsi_split_seq_before (&i);
380 bb = create_basic_block (seq, NULL, bb);
6de9cd9a
DN
381 start_new_block = false;
382 }
383
384 /* Now add STMT to BB and create the subgraphs for special statement
385 codes. */
726a989a 386 gimple_set_bb (stmt, bb);
6de9cd9a
DN
387
388 if (computed_goto_p (stmt))
389 found_computed_goto = true;
390
391 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
392 next iteration. */
393 if (stmt_ends_bb_p (stmt))
54634841
RG
394 {
395 /* If the stmt can make abnormal goto use a new temporary
396 for the assignment to the LHS. This makes sure the old value
397 of the LHS is available on the abnormal edge. Otherwise
398 we will end up with overlapping life-ranges for abnormal
399 SSA names. */
400 if (gimple_has_lhs (stmt)
401 && stmt_can_make_abnormal_goto (stmt)
402 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
403 {
404 tree lhs = gimple_get_lhs (stmt);
405 tree tmp = create_tmp_var (TREE_TYPE (lhs), NULL);
406 gimple s = gimple_build_assign (lhs, tmp);
407 gimple_set_location (s, gimple_location (stmt));
408 gimple_set_block (s, gimple_block (stmt));
409 gimple_set_lhs (stmt, tmp);
410 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
411 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
412 DECL_GIMPLE_REG_P (tmp) = 1;
413 gsi_insert_after (&i, s, GSI_SAME_STMT);
414 }
415 start_new_block = true;
416 }
6de9cd9a 417
726a989a
RB
418 gsi_next (&i);
419 first_stmt_of_seq = false;
6de9cd9a
DN
420 }
421}
422
423
424/* Create and return a new empty basic block after bb AFTER. */
425
426static basic_block
427create_bb (void *h, void *e, basic_block after)
428{
429 basic_block bb;
430
1e128c5f 431 gcc_assert (!e);
6de9cd9a 432
27fd69fa
KH
433 /* Create and initialize a new basic block. Since alloc_block uses
434 ggc_alloc_cleared to allocate a basic block, we do not have to
435 clear the newly allocated basic block here. */
6de9cd9a 436 bb = alloc_block ();
6de9cd9a
DN
437
438 bb->index = last_basic_block;
439 bb->flags = BB_NEW;
726a989a
RB
440 bb->il.gimple = GGC_CNEW (struct gimple_bb_info);
441 set_bb_seq (bb, h ? (gimple_seq) h : gimple_seq_alloc ());
6de9cd9a
DN
442
443 /* Add the new block to the linked list of blocks. */
444 link_block (bb, after);
445
446 /* Grow the basic block array if needed. */
68f9b844 447 if ((size_t) last_basic_block == VEC_length (basic_block, basic_block_info))
6de9cd9a
DN
448 {
449 size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
a590ac65 450 VEC_safe_grow_cleared (basic_block, gc, basic_block_info, new_size);
6de9cd9a
DN
451 }
452
453 /* Add the newly created block to the array. */
68f9b844 454 SET_BASIC_BLOCK (last_basic_block, bb);
6de9cd9a 455
6de9cd9a
DN
456 n_basic_blocks++;
457 last_basic_block++;
458
6de9cd9a
DN
459 return bb;
460}
461
462
463/*---------------------------------------------------------------------------
464 Edge creation
465---------------------------------------------------------------------------*/
466
fca01525
KH
467/* Fold COND_EXPR_COND of each COND_EXPR. */
468
e21aff8a 469void
fca01525
KH
470fold_cond_expr_cond (void)
471{
472 basic_block bb;
473
474 FOR_EACH_BB (bb)
475 {
726a989a 476 gimple stmt = last_stmt (bb);
fca01525 477
726a989a 478 if (stmt && gimple_code (stmt) == GIMPLE_COND)
fca01525 479 {
db3927fb 480 location_t loc = gimple_location (stmt);
6ac01510
ILT
481 tree cond;
482 bool zerop, onep;
483
484 fold_defer_overflow_warnings ();
db3927fb 485 cond = fold_binary_loc (loc, gimple_cond_code (stmt), boolean_type_node,
726a989a
RB
486 gimple_cond_lhs (stmt), gimple_cond_rhs (stmt));
487 if (cond)
488 {
489 zerop = integer_zerop (cond);
490 onep = integer_onep (cond);
491 }
492 else
493 zerop = onep = false;
494
e233ac97 495 fold_undefer_overflow_warnings (zerop || onep,
4df28528 496 stmt,
6ac01510
ILT
497 WARN_STRICT_OVERFLOW_CONDITIONAL);
498 if (zerop)
726a989a 499 gimple_cond_make_false (stmt);
6ac01510 500 else if (onep)
726a989a 501 gimple_cond_make_true (stmt);
fca01525
KH
502 }
503 }
504}
505
6de9cd9a
DN
506/* Join all the blocks in the flowgraph. */
507
508static void
509make_edges (void)
510{
511 basic_block bb;
bed575d5 512 struct omp_region *cur_region = NULL;
6de9cd9a
DN
513
514 /* Create an edge from entry to the first block with executable
515 statements in it. */
24bd1a0b 516 make_edge (ENTRY_BLOCK_PTR, BASIC_BLOCK (NUM_FIXED_BLOCKS), EDGE_FALLTHRU);
6de9cd9a 517
adb35797 518 /* Traverse the basic block array placing edges. */
6de9cd9a
DN
519 FOR_EACH_BB (bb)
520 {
726a989a 521 gimple last = last_stmt (bb);
56e84019 522 bool fallthru;
6de9cd9a 523
56e84019 524 if (last)
6de9cd9a 525 {
726a989a 526 enum gimple_code code = gimple_code (last);
bed575d5 527 switch (code)
56e84019 528 {
726a989a 529 case GIMPLE_GOTO:
56e84019
RH
530 make_goto_expr_edges (bb);
531 fallthru = false;
532 break;
726a989a 533 case GIMPLE_RETURN:
56e84019
RH
534 make_edge (bb, EXIT_BLOCK_PTR, 0);
535 fallthru = false;
536 break;
726a989a 537 case GIMPLE_COND:
56e84019
RH
538 make_cond_expr_edges (bb);
539 fallthru = false;
540 break;
726a989a
RB
541 case GIMPLE_SWITCH:
542 make_gimple_switch_edges (bb);
56e84019
RH
543 fallthru = false;
544 break;
726a989a 545 case GIMPLE_RESX:
56e84019
RH
546 make_eh_edges (last);
547 fallthru = false;
548 break;
1d65f45c
RH
549 case GIMPLE_EH_DISPATCH:
550 fallthru = make_eh_dispatch_edges (last);
551 break;
56e84019 552
726a989a 553 case GIMPLE_CALL:
56e84019
RH
554 /* If this function receives a nonlocal goto, then we need to
555 make edges from this call site to all the nonlocal goto
556 handlers. */
726a989a 557 if (stmt_can_make_abnormal_goto (last))
4f6c2131 558 make_abnormal_goto_edges (bb, true);
6de9cd9a 559
56e84019
RH
560 /* If this statement has reachable exception handlers, then
561 create abnormal edges to them. */
562 make_eh_edges (last);
563
564 /* Some calls are known not to return. */
726a989a 565 fallthru = !(gimple_call_flags (last) & ECF_NORETURN);
56e84019
RH
566 break;
567
726a989a
RB
568 case GIMPLE_ASSIGN:
569 /* A GIMPLE_ASSIGN may throw internally and thus be considered
570 control-altering. */
56e84019 571 if (is_ctrl_altering_stmt (last))
1d65f45c 572 make_eh_edges (last);
56e84019
RH
573 fallthru = true;
574 break;
575
1c384bf1
RH
576 case GIMPLE_ASM:
577 make_gimple_asm_edges (bb);
578 fallthru = true;
579 break;
580
726a989a
RB
581 case GIMPLE_OMP_PARALLEL:
582 case GIMPLE_OMP_TASK:
583 case GIMPLE_OMP_FOR:
584 case GIMPLE_OMP_SINGLE:
585 case GIMPLE_OMP_MASTER:
586 case GIMPLE_OMP_ORDERED:
587 case GIMPLE_OMP_CRITICAL:
588 case GIMPLE_OMP_SECTION:
bed575d5 589 cur_region = new_omp_region (bb, code, cur_region);
56e84019
RH
590 fallthru = true;
591 break;
592
726a989a 593 case GIMPLE_OMP_SECTIONS:
bed575d5 594 cur_region = new_omp_region (bb, code, cur_region);
e5c95afe
ZD
595 fallthru = true;
596 break;
597
726a989a 598 case GIMPLE_OMP_SECTIONS_SWITCH:
7e2df4a1 599 fallthru = false;
777f7f9a
RH
600 break;
601
726a989a
RB
602 case GIMPLE_OMP_ATOMIC_LOAD:
603 case GIMPLE_OMP_ATOMIC_STORE:
a509ebb5
RL
604 fallthru = true;
605 break;
606
726a989a
RB
607 case GIMPLE_OMP_RETURN:
608 /* In the case of a GIMPLE_OMP_SECTION, the edge will go
609 somewhere other than the next block. This will be
610 created later. */
bed575d5 611 cur_region->exit = bb;
726a989a 612 fallthru = cur_region->type != GIMPLE_OMP_SECTION;
bed575d5
RS
613 cur_region = cur_region->outer;
614 break;
615
726a989a 616 case GIMPLE_OMP_CONTINUE:
bed575d5
RS
617 cur_region->cont = bb;
618 switch (cur_region->type)
619 {
726a989a
RB
620 case GIMPLE_OMP_FOR:
621 /* Mark all GIMPLE_OMP_FOR and GIMPLE_OMP_CONTINUE
622 succs edges as abnormal to prevent splitting
623 them. */
135a171d 624 single_succ_edge (cur_region->entry)->flags |= EDGE_ABNORMAL;
e5c95afe 625 /* Make the loopback edge. */
135a171d
JJ
626 make_edge (bb, single_succ (cur_region->entry),
627 EDGE_ABNORMAL);
628
726a989a
RB
629 /* Create an edge from GIMPLE_OMP_FOR to exit, which
630 corresponds to the case that the body of the loop
631 is not executed at all. */
135a171d
JJ
632 make_edge (cur_region->entry, bb->next_bb, EDGE_ABNORMAL);
633 make_edge (bb, bb->next_bb, EDGE_FALLTHRU | EDGE_ABNORMAL);
634 fallthru = false;
bed575d5
RS
635 break;
636
726a989a 637 case GIMPLE_OMP_SECTIONS:
bed575d5 638 /* Wire up the edges into and out of the nested sections. */
bed575d5 639 {
e5c95afe
ZD
640 basic_block switch_bb = single_succ (cur_region->entry);
641
bed575d5
RS
642 struct omp_region *i;
643 for (i = cur_region->inner; i ; i = i->next)
644 {
726a989a 645 gcc_assert (i->type == GIMPLE_OMP_SECTION);
e5c95afe 646 make_edge (switch_bb, i->entry, 0);
bed575d5
RS
647 make_edge (i->exit, bb, EDGE_FALLTHRU);
648 }
e5c95afe
ZD
649
650 /* Make the loopback edge to the block with
726a989a 651 GIMPLE_OMP_SECTIONS_SWITCH. */
e5c95afe
ZD
652 make_edge (bb, switch_bb, 0);
653
654 /* Make the edge from the switch to exit. */
655 make_edge (switch_bb, bb->next_bb, 0);
656 fallthru = false;
bed575d5
RS
657 }
658 break;
6531d1be 659
bed575d5
RS
660 default:
661 gcc_unreachable ();
662 }
bed575d5
RS
663 break;
664
56e84019
RH
665 default:
666 gcc_assert (!stmt_ends_bb_p (last));
667 fallthru = true;
668 }
6de9cd9a 669 }
56e84019
RH
670 else
671 fallthru = true;
6de9cd9a 672
56e84019 673 if (fallthru)
6c52e687
CC
674 {
675 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
676 if (last)
677 assign_discriminator (gimple_location (last), bb->next_bb);
678 }
6de9cd9a
DN
679 }
680
bed575d5
RS
681 if (root_omp_region)
682 free_omp_regions ();
683
fca01525
KH
684 /* Fold COND_EXPR_COND of each COND_EXPR. */
685 fold_cond_expr_cond ();
6de9cd9a
DN
686}
687
6c52e687
CC
688/* Trivial hash function for a location_t. ITEM is a pointer to
689 a hash table entry that maps a location_t to a discriminator. */
690
691static unsigned int
692locus_map_hash (const void *item)
693{
694 return ((const struct locus_discrim_map *) item)->locus;
695}
696
697/* Equality function for the locus-to-discriminator map. VA and VB
698 point to the two hash table entries to compare. */
699
700static int
701locus_map_eq (const void *va, const void *vb)
702{
703 const struct locus_discrim_map *a = (const struct locus_discrim_map *) va;
704 const struct locus_discrim_map *b = (const struct locus_discrim_map *) vb;
705 return a->locus == b->locus;
706}
707
708/* Find the next available discriminator value for LOCUS. The
709 discriminator distinguishes among several basic blocks that
710 share a common locus, allowing for more accurate sample-based
711 profiling. */
712
713static int
714next_discriminator_for_locus (location_t locus)
715{
716 struct locus_discrim_map item;
717 struct locus_discrim_map **slot;
718
719 item.locus = locus;
720 item.discriminator = 0;
721 slot = (struct locus_discrim_map **)
722 htab_find_slot_with_hash (discriminator_per_locus, (void *) &item,
723 (hashval_t) locus, INSERT);
724 gcc_assert (slot);
725 if (*slot == HTAB_EMPTY_ENTRY)
726 {
727 *slot = XNEW (struct locus_discrim_map);
728 gcc_assert (*slot);
729 (*slot)->locus = locus;
730 (*slot)->discriminator = 0;
731 }
732 (*slot)->discriminator++;
733 return (*slot)->discriminator;
734}
735
736/* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
737
738static bool
739same_line_p (location_t locus1, location_t locus2)
740{
741 expanded_location from, to;
742
743 if (locus1 == locus2)
744 return true;
745
746 from = expand_location (locus1);
747 to = expand_location (locus2);
748
749 if (from.line != to.line)
750 return false;
751 if (from.file == to.file)
752 return true;
753 return (from.file != NULL
754 && to.file != NULL
755 && strcmp (from.file, to.file) == 0);
756}
757
758/* Assign a unique discriminator value to block BB if it begins at the same
759 LOCUS as its predecessor block. */
760
761static void
762assign_discriminator (location_t locus, basic_block bb)
763{
cbea518e 764 gimple first_in_to_bb, last_in_to_bb;
6c52e687
CC
765
766 if (locus == 0 || bb->discriminator != 0)
767 return;
768
cbea518e
CC
769 first_in_to_bb = first_non_label_stmt (bb);
770 last_in_to_bb = last_stmt (bb);
2a2869d6
CC
771 if ((first_in_to_bb && same_line_p (locus, gimple_location (first_in_to_bb)))
772 || (last_in_to_bb && same_line_p (locus, gimple_location (last_in_to_bb))))
6c52e687
CC
773 bb->discriminator = next_discriminator_for_locus (locus);
774}
6de9cd9a 775
726a989a 776/* Create the edges for a GIMPLE_COND starting at block BB. */
6de9cd9a
DN
777
778static void
779make_cond_expr_edges (basic_block bb)
780{
726a989a
RB
781 gimple entry = last_stmt (bb);
782 gimple then_stmt, else_stmt;
6de9cd9a
DN
783 basic_block then_bb, else_bb;
784 tree then_label, else_label;
d783b2a2 785 edge e;
6c52e687 786 location_t entry_locus;
6de9cd9a 787
1e128c5f 788 gcc_assert (entry);
726a989a 789 gcc_assert (gimple_code (entry) == GIMPLE_COND);
6de9cd9a 790
6c52e687
CC
791 entry_locus = gimple_location (entry);
792
6de9cd9a 793 /* Entry basic blocks for each component. */
726a989a
RB
794 then_label = gimple_cond_true_label (entry);
795 else_label = gimple_cond_false_label (entry);
6de9cd9a
DN
796 then_bb = label_to_block (then_label);
797 else_bb = label_to_block (else_label);
726a989a
RB
798 then_stmt = first_stmt (then_bb);
799 else_stmt = first_stmt (else_bb);
6de9cd9a 800
d783b2a2 801 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
6c52e687 802 assign_discriminator (entry_locus, then_bb);
726a989a 803 e->goto_locus = gimple_location (then_stmt);
cc2a64dd
JJ
804 if (e->goto_locus)
805 e->goto_block = gimple_block (then_stmt);
d783b2a2
JH
806 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
807 if (e)
7241571e 808 {
6c52e687 809 assign_discriminator (entry_locus, else_bb);
7241571e 810 e->goto_locus = gimple_location (else_stmt);
cc2a64dd
JJ
811 if (e->goto_locus)
812 e->goto_block = gimple_block (else_stmt);
7241571e 813 }
a9b77cd1 814
726a989a
RB
815 /* We do not need the labels anymore. */
816 gimple_cond_set_true_label (entry, NULL_TREE);
817 gimple_cond_set_false_label (entry, NULL_TREE);
6de9cd9a
DN
818}
819
92b6dff3 820
d6be0d7f
JL
821/* Called for each element in the hash table (P) as we delete the
822 edge to cases hash table.
823
6531d1be 824 Clear all the TREE_CHAINs to prevent problems with copying of
d6be0d7f
JL
825 SWITCH_EXPRs and structure sharing rules, then free the hash table
826 element. */
827
15814ba0 828static bool
ac7d7749 829edge_to_cases_cleanup (const void *key ATTRIBUTE_UNUSED, void **value,
15814ba0 830 void *data ATTRIBUTE_UNUSED)
d6be0d7f 831{
d6be0d7f
JL
832 tree t, next;
833
15814ba0 834 for (t = (tree) *value; t; t = next)
d6be0d7f
JL
835 {
836 next = TREE_CHAIN (t);
837 TREE_CHAIN (t) = NULL;
838 }
15814ba0
PB
839
840 *value = NULL;
841 return false;
d6be0d7f
JL
842}
843
844/* Start recording information mapping edges to case labels. */
845
c9784e6d 846void
d6be0d7f
JL
847start_recording_case_labels (void)
848{
849 gcc_assert (edge_to_cases == NULL);
15814ba0 850 edge_to_cases = pointer_map_create ();
d6be0d7f
JL
851}
852
853/* Return nonzero if we are recording information for case labels. */
854
855static bool
856recording_case_labels_p (void)
857{
858 return (edge_to_cases != NULL);
859}
860
861/* Stop recording information mapping edges to case labels and
862 remove any information we have recorded. */
c9784e6d 863void
d6be0d7f
JL
864end_recording_case_labels (void)
865{
15814ba0
PB
866 pointer_map_traverse (edge_to_cases, edge_to_cases_cleanup, NULL);
867 pointer_map_destroy (edge_to_cases);
d6be0d7f
JL
868 edge_to_cases = NULL;
869}
870
d6be0d7f
JL
871/* If we are inside a {start,end}_recording_cases block, then return
872 a chain of CASE_LABEL_EXPRs from T which reference E.
873
874 Otherwise return NULL. */
92b6dff3
JL
875
876static tree
726a989a 877get_cases_for_edge (edge e, gimple t)
92b6dff3 878{
92b6dff3 879 void **slot;
d6be0d7f 880 size_t i, n;
92b6dff3 881
d6be0d7f
JL
882 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
883 chains available. Return NULL so the caller can detect this case. */
884 if (!recording_case_labels_p ())
885 return NULL;
6531d1be 886
15814ba0 887 slot = pointer_map_contains (edge_to_cases, e);
92b6dff3 888 if (slot)
15814ba0 889 return (tree) *slot;
92b6dff3 890
d6be0d7f
JL
891 /* If we did not find E in the hash table, then this must be the first
892 time we have been queried for information about E & T. Add all the
893 elements from T to the hash table then perform the query again. */
92b6dff3 894
726a989a 895 n = gimple_switch_num_labels (t);
92b6dff3
JL
896 for (i = 0; i < n; i++)
897 {
726a989a 898 tree elt = gimple_switch_label (t, i);
15814ba0 899 tree lab = CASE_LABEL (elt);
d6be0d7f 900 basic_block label_bb = label_to_block (lab);
15814ba0
PB
901 edge this_edge = find_edge (e->src, label_bb);
902
903 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
904 a new chain. */
905 slot = pointer_map_insert (edge_to_cases, this_edge);
906 TREE_CHAIN (elt) = (tree) *slot;
907 *slot = elt;
92b6dff3 908 }
15814ba0
PB
909
910 return (tree) *pointer_map_contains (edge_to_cases, e);
92b6dff3 911}
6de9cd9a 912
726a989a 913/* Create the edges for a GIMPLE_SWITCH starting at block BB. */
6de9cd9a
DN
914
915static void
726a989a 916make_gimple_switch_edges (basic_block bb)
6de9cd9a 917{
726a989a 918 gimple entry = last_stmt (bb);
6c52e687 919 location_t entry_locus;
6de9cd9a 920 size_t i, n;
6de9cd9a 921
6c52e687
CC
922 entry_locus = gimple_location (entry);
923
726a989a 924 n = gimple_switch_num_labels (entry);
6de9cd9a
DN
925
926 for (i = 0; i < n; ++i)
927 {
726a989a 928 tree lab = CASE_LABEL (gimple_switch_label (entry, i));
6de9cd9a 929 basic_block label_bb = label_to_block (lab);
d6be0d7f 930 make_edge (bb, label_bb, 0);
6c52e687 931 assign_discriminator (entry_locus, label_bb);
6de9cd9a
DN
932 }
933}
934
935
936/* Return the basic block holding label DEST. */
937
938basic_block
997de8ed 939label_to_block_fn (struct function *ifun, tree dest)
6de9cd9a 940{
242229bb
JH
941 int uid = LABEL_DECL_UID (dest);
942
f0b698c1
KH
943 /* We would die hard when faced by an undefined label. Emit a label to
944 the very first basic block. This will hopefully make even the dataflow
242229bb
JH
945 and undefined variable warnings quite right. */
946 if ((errorcount || sorrycount) && uid < 0)
947 {
726a989a
RB
948 gimple_stmt_iterator gsi = gsi_start_bb (BASIC_BLOCK (NUM_FIXED_BLOCKS));
949 gimple stmt;
242229bb 950
726a989a
RB
951 stmt = gimple_build_label (dest);
952 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
242229bb
JH
953 uid = LABEL_DECL_UID (dest);
954 }
e597f337
KH
955 if (VEC_length (basic_block, ifun->cfg->x_label_to_block_map)
956 <= (unsigned int) uid)
98f464e0 957 return NULL;
e597f337 958 return VEC_index (basic_block, ifun->cfg->x_label_to_block_map, uid);
6de9cd9a
DN
959}
960
4f6c2131
EB
961/* Create edges for an abnormal goto statement at block BB. If FOR_CALL
962 is true, the source statement is a CALL_EXPR instead of a GOTO_EXPR. */
963
964void
965make_abnormal_goto_edges (basic_block bb, bool for_call)
966{
967 basic_block target_bb;
726a989a 968 gimple_stmt_iterator gsi;
4f6c2131
EB
969
970 FOR_EACH_BB (target_bb)
726a989a 971 for (gsi = gsi_start_bb (target_bb); !gsi_end_p (gsi); gsi_next (&gsi))
4f6c2131 972 {
726a989a
RB
973 gimple label_stmt = gsi_stmt (gsi);
974 tree target;
4f6c2131 975
726a989a 976 if (gimple_code (label_stmt) != GIMPLE_LABEL)
4f6c2131
EB
977 break;
978
726a989a 979 target = gimple_label_label (label_stmt);
4f6c2131
EB
980
981 /* Make an edge to every label block that has been marked as a
982 potential target for a computed goto or a non-local goto. */
983 if ((FORCED_LABEL (target) && !for_call)
984 || (DECL_NONLOCAL (target) && for_call))
985 {
986 make_edge (bb, target_bb, EDGE_ABNORMAL);
987 break;
988 }
989 }
990}
991
6de9cd9a
DN
992/* Create edges for a goto statement at block BB. */
993
994static void
995make_goto_expr_edges (basic_block bb)
996{
726a989a
RB
997 gimple_stmt_iterator last = gsi_last_bb (bb);
998 gimple goto_t = gsi_stmt (last);
6de9cd9a 999
4f6c2131
EB
1000 /* A simple GOTO creates normal edges. */
1001 if (simple_goto_p (goto_t))
6de9cd9a 1002 {
726a989a 1003 tree dest = gimple_goto_dest (goto_t);
6c52e687
CC
1004 basic_block label_bb = label_to_block (dest);
1005 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
726a989a 1006 e->goto_locus = gimple_location (goto_t);
6c52e687 1007 assign_discriminator (e->goto_locus, label_bb);
cc2a64dd
JJ
1008 if (e->goto_locus)
1009 e->goto_block = gimple_block (goto_t);
726a989a 1010 gsi_remove (&last, true);
4f6c2131 1011 return;
6de9cd9a
DN
1012 }
1013
4f6c2131
EB
1014 /* A computed GOTO creates abnormal edges. */
1015 make_abnormal_goto_edges (bb, false);
6de9cd9a
DN
1016}
1017
1c384bf1
RH
1018/* Create edges for an asm statement with labels at block BB. */
1019
1020static void
1021make_gimple_asm_edges (basic_block bb)
1022{
1023 gimple stmt = last_stmt (bb);
1024 location_t stmt_loc = gimple_location (stmt);
1025 int i, n = gimple_asm_nlabels (stmt);
1026
1027 for (i = 0; i < n; ++i)
1028 {
1029 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1030 basic_block label_bb = label_to_block (label);
1031 make_edge (bb, label_bb, 0);
1032 assign_discriminator (stmt_loc, label_bb);
1033 }
1034}
6de9cd9a
DN
1035
1036/*---------------------------------------------------------------------------
1037 Flowgraph analysis
1038---------------------------------------------------------------------------*/
1039
f698d217
SB
1040/* Cleanup useless labels in basic blocks. This is something we wish
1041 to do early because it allows us to group case labels before creating
1042 the edges for the CFG, and it speeds up block statement iterators in
1043 all passes later on.
8b11009b
ZD
1044 We rerun this pass after CFG is created, to get rid of the labels that
1045 are no longer referenced. After then we do not run it any more, since
1046 (almost) no new labels should be created. */
f698d217
SB
1047
1048/* A map from basic block index to the leading label of that block. */
8b11009b
ZD
1049static struct label_record
1050{
1051 /* The label. */
1052 tree label;
1053
1054 /* True if the label is referenced from somewhere. */
1055 bool used;
1056} *label_for_bb;
f698d217 1057
242229bb 1058/* Given LABEL return the first label in the same basic block. */
726a989a 1059
242229bb
JH
1060static tree
1061main_block_label (tree label)
1062{
1063 basic_block bb = label_to_block (label);
8b11009b 1064 tree main_label = label_for_bb[bb->index].label;
242229bb
JH
1065
1066 /* label_to_block possibly inserted undefined label into the chain. */
8b11009b
ZD
1067 if (!main_label)
1068 {
1069 label_for_bb[bb->index].label = label;
1070 main_label = label;
1071 }
1072
1073 label_for_bb[bb->index].used = true;
1074 return main_label;
242229bb
JH
1075}
1076
1d65f45c
RH
1077/* Clean up redundant labels within the exception tree. */
1078
1079static void
1080cleanup_dead_labels_eh (void)
1081{
1082 eh_landing_pad lp;
1083 eh_region r;
1084 tree lab;
1085 int i;
1086
1087 if (cfun->eh == NULL)
1088 return;
1089
1090 for (i = 1; VEC_iterate (eh_landing_pad, cfun->eh->lp_array, i, lp); ++i)
1091 if (lp && lp->post_landing_pad)
1092 {
1093 lab = main_block_label (lp->post_landing_pad);
1094 if (lab != lp->post_landing_pad)
1095 {
1096 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1097 EH_LANDING_PAD_NR (lab) = lp->index;
1098 }
1099 }
1100
1101 FOR_ALL_EH_REGION (r)
1102 switch (r->type)
1103 {
1104 case ERT_CLEANUP:
1105 case ERT_MUST_NOT_THROW:
1106 break;
1107
1108 case ERT_TRY:
1109 {
1110 eh_catch c;
1111 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1112 {
1113 lab = c->label;
1114 if (lab)
1115 c->label = main_block_label (lab);
1116 }
1117 }
1118 break;
1119
1120 case ERT_ALLOWED_EXCEPTIONS:
1121 lab = r->u.allowed.label;
1122 if (lab)
1123 r->u.allowed.label = main_block_label (lab);
1124 break;
1125 }
1126}
1127
1128
b986ebf3 1129/* Cleanup redundant labels. This is a three-step process:
f698d217
SB
1130 1) Find the leading label for each block.
1131 2) Redirect all references to labels to the leading labels.
1132 3) Cleanup all useless labels. */
6de9cd9a 1133
165b54c3 1134void
6de9cd9a
DN
1135cleanup_dead_labels (void)
1136{
1137 basic_block bb;
8b11009b 1138 label_for_bb = XCNEWVEC (struct label_record, last_basic_block);
6de9cd9a
DN
1139
1140 /* Find a suitable label for each block. We use the first user-defined
f0b698c1 1141 label if there is one, or otherwise just the first label we see. */
6de9cd9a
DN
1142 FOR_EACH_BB (bb)
1143 {
726a989a 1144 gimple_stmt_iterator i;
6de9cd9a 1145
726a989a 1146 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
6de9cd9a 1147 {
726a989a
RB
1148 tree label;
1149 gimple stmt = gsi_stmt (i);
6de9cd9a 1150
726a989a 1151 if (gimple_code (stmt) != GIMPLE_LABEL)
6de9cd9a
DN
1152 break;
1153
726a989a 1154 label = gimple_label_label (stmt);
6de9cd9a
DN
1155
1156 /* If we have not yet seen a label for the current block,
1157 remember this one and see if there are more labels. */
8b11009b 1158 if (!label_for_bb[bb->index].label)
6de9cd9a 1159 {
8b11009b 1160 label_for_bb[bb->index].label = label;
6de9cd9a
DN
1161 continue;
1162 }
1163
1164 /* If we did see a label for the current block already, but it
1165 is an artificially created label, replace it if the current
1166 label is a user defined label. */
8b11009b
ZD
1167 if (!DECL_ARTIFICIAL (label)
1168 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
6de9cd9a 1169 {
8b11009b 1170 label_for_bb[bb->index].label = label;
6de9cd9a
DN
1171 break;
1172 }
1173 }
1174 }
1175
f698d217
SB
1176 /* Now redirect all jumps/branches to the selected label.
1177 First do so for each block ending in a control statement. */
6de9cd9a
DN
1178 FOR_EACH_BB (bb)
1179 {
726a989a 1180 gimple stmt = last_stmt (bb);
6de9cd9a
DN
1181 if (!stmt)
1182 continue;
1183
726a989a 1184 switch (gimple_code (stmt))
6de9cd9a 1185 {
726a989a 1186 case GIMPLE_COND:
6de9cd9a 1187 {
726a989a
RB
1188 tree true_label = gimple_cond_true_label (stmt);
1189 tree false_label = gimple_cond_false_label (stmt);
6de9cd9a 1190
726a989a
RB
1191 if (true_label)
1192 gimple_cond_set_true_label (stmt, main_block_label (true_label));
1193 if (false_label)
1194 gimple_cond_set_false_label (stmt, main_block_label (false_label));
6de9cd9a
DN
1195 break;
1196 }
6531d1be 1197
726a989a 1198 case GIMPLE_SWITCH:
6de9cd9a 1199 {
726a989a 1200 size_t i, n = gimple_switch_num_labels (stmt);
6531d1be 1201
6de9cd9a
DN
1202 /* Replace all destination labels. */
1203 for (i = 0; i < n; ++i)
92b6dff3 1204 {
726a989a
RB
1205 tree case_label = gimple_switch_label (stmt, i);
1206 tree label = main_block_label (CASE_LABEL (case_label));
1207 CASE_LABEL (case_label) = label;
92b6dff3 1208 }
6de9cd9a
DN
1209 break;
1210 }
1211
1c384bf1
RH
1212 case GIMPLE_ASM:
1213 {
1214 int i, n = gimple_asm_nlabels (stmt);
1215
1216 for (i = 0; i < n; ++i)
1217 {
1218 tree cons = gimple_asm_label_op (stmt, i);
1219 tree label = main_block_label (TREE_VALUE (cons));
1220 TREE_VALUE (cons) = label;
1221 }
1222 break;
1223 }
1224
726a989a 1225 /* We have to handle gotos until they're removed, and we don't
f667741c 1226 remove them until after we've created the CFG edges. */
726a989a
RB
1227 case GIMPLE_GOTO:
1228 if (!computed_goto_p (stmt))
242229bb 1229 {
726a989a
RB
1230 tree new_dest = main_block_label (gimple_goto_dest (stmt));
1231 gimple_goto_set_dest (stmt, new_dest);
242229bb 1232 }
1c384bf1 1233 break;
f667741c 1234
6de9cd9a
DN
1235 default:
1236 break;
1237 }
1238 }
1239
1d65f45c
RH
1240 /* Do the same for the exception region tree labels. */
1241 cleanup_dead_labels_eh ();
f698d217 1242
6de9cd9a 1243 /* Finally, purge dead labels. All user-defined labels and labels that
cea0f4f1
AP
1244 can be the target of non-local gotos and labels which have their
1245 address taken are preserved. */
6de9cd9a
DN
1246 FOR_EACH_BB (bb)
1247 {
726a989a 1248 gimple_stmt_iterator i;
8b11009b 1249 tree label_for_this_bb = label_for_bb[bb->index].label;
6de9cd9a 1250
8b11009b 1251 if (!label_for_this_bb)
6de9cd9a
DN
1252 continue;
1253
8b11009b
ZD
1254 /* If the main label of the block is unused, we may still remove it. */
1255 if (!label_for_bb[bb->index].used)
1256 label_for_this_bb = NULL;
1257
726a989a 1258 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
6de9cd9a 1259 {
726a989a
RB
1260 tree label;
1261 gimple stmt = gsi_stmt (i);
6de9cd9a 1262
726a989a 1263 if (gimple_code (stmt) != GIMPLE_LABEL)
6de9cd9a
DN
1264 break;
1265
726a989a 1266 label = gimple_label_label (stmt);
6de9cd9a
DN
1267
1268 if (label == label_for_this_bb
726a989a 1269 || !DECL_ARTIFICIAL (label)
cea0f4f1
AP
1270 || DECL_NONLOCAL (label)
1271 || FORCED_LABEL (label))
726a989a 1272 gsi_next (&i);
6de9cd9a 1273 else
726a989a 1274 gsi_remove (&i, true);
6de9cd9a
DN
1275 }
1276 }
1277
1278 free (label_for_bb);
1279}
1280
f667741c
SB
1281/* Look for blocks ending in a multiway branch (a SWITCH_EXPR in GIMPLE),
1282 and scan the sorted vector of cases. Combine the ones jumping to the
1283 same label.
1284 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1285
165b54c3 1286void
f667741c
SB
1287group_case_labels (void)
1288{
1289 basic_block bb;
1290
1291 FOR_EACH_BB (bb)
1292 {
726a989a
RB
1293 gimple stmt = last_stmt (bb);
1294 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
f667741c 1295 {
726a989a 1296 int old_size = gimple_switch_num_labels (stmt);
f667741c 1297 int i, j, new_size = old_size;
b7814a18
RG
1298 tree default_case = NULL_TREE;
1299 tree default_label = NULL_TREE;
726a989a 1300 bool has_default;
29c4d22b 1301
726a989a 1302 /* The default label is always the first case in a switch
b7814a18 1303 statement after gimplification if it was not optimized
726a989a
RB
1304 away */
1305 if (!CASE_LOW (gimple_switch_default_label (stmt))
1306 && !CASE_HIGH (gimple_switch_default_label (stmt)))
b7814a18 1307 {
726a989a 1308 default_case = gimple_switch_default_label (stmt);
b7814a18 1309 default_label = CASE_LABEL (default_case);
726a989a 1310 has_default = true;
b7814a18 1311 }
726a989a
RB
1312 else
1313 has_default = false;
f667741c 1314
b7814a18 1315 /* Look for possible opportunities to merge cases. */
726a989a
RB
1316 if (has_default)
1317 i = 1;
1318 else
1319 i = 0;
b7814a18 1320 while (i < old_size)
f667741c 1321 {
ed9cef22 1322 tree base_case, base_label, base_high;
726a989a 1323 base_case = gimple_switch_label (stmt, i);
f667741c 1324
1e128c5f 1325 gcc_assert (base_case);
f667741c 1326 base_label = CASE_LABEL (base_case);
31e9eea2
SB
1327
1328 /* Discard cases that have the same destination as the
1329 default case. */
1330 if (base_label == default_label)
1331 {
726a989a 1332 gimple_switch_set_label (stmt, i, NULL_TREE);
31e9eea2 1333 i++;
29c4d22b 1334 new_size--;
31e9eea2
SB
1335 continue;
1336 }
1337
726a989a
RB
1338 base_high = CASE_HIGH (base_case)
1339 ? CASE_HIGH (base_case)
1340 : CASE_LOW (base_case);
d717e500 1341 i++;
726a989a 1342
f667741c
SB
1343 /* Try to merge case labels. Break out when we reach the end
1344 of the label vector or when we cannot merge the next case
1345 label with the current one. */
b7814a18 1346 while (i < old_size)
f667741c 1347 {
726a989a 1348 tree merge_case = gimple_switch_label (stmt, i);
f667741c
SB
1349 tree merge_label = CASE_LABEL (merge_case);
1350 tree t = int_const_binop (PLUS_EXPR, base_high,
1351 integer_one_node, 1);
1352
1353 /* Merge the cases if they jump to the same place,
1354 and their ranges are consecutive. */
1355 if (merge_label == base_label
1356 && tree_int_cst_equal (CASE_LOW (merge_case), t))
1357 {
1358 base_high = CASE_HIGH (merge_case) ?
1359 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1360 CASE_HIGH (base_case) = base_high;
726a989a 1361 gimple_switch_set_label (stmt, i, NULL_TREE);
f667741c 1362 new_size--;
d717e500 1363 i++;
f667741c
SB
1364 }
1365 else
1366 break;
1367 }
1368 }
1369
1370 /* Compress the case labels in the label vector, and adjust the
1371 length of the vector. */
1372 for (i = 0, j = 0; i < new_size; i++)
1373 {
726a989a 1374 while (! gimple_switch_label (stmt, j))
f667741c 1375 j++;
726a989a
RB
1376 gimple_switch_set_label (stmt, i,
1377 gimple_switch_label (stmt, j++));
f667741c 1378 }
726a989a
RB
1379
1380 gcc_assert (new_size <= old_size);
1381 gimple_switch_set_num_labels (stmt, new_size);
f667741c
SB
1382 }
1383 }
1384}
6de9cd9a
DN
1385
1386/* Checks whether we can merge block B into block A. */
1387
1388static bool
726a989a 1389gimple_can_merge_blocks_p (basic_block a, basic_block b)
6de9cd9a 1390{
726a989a
RB
1391 gimple stmt;
1392 gimple_stmt_iterator gsi;
1393 gimple_seq phis;
6de9cd9a 1394
c5cbcccf 1395 if (!single_succ_p (a))
6de9cd9a
DN
1396 return false;
1397
496a4ef5 1398 if (single_succ_edge (a)->flags & (EDGE_ABNORMAL | EDGE_EH))
6de9cd9a
DN
1399 return false;
1400
c5cbcccf 1401 if (single_succ (a) != b)
6de9cd9a
DN
1402 return false;
1403
c5cbcccf 1404 if (!single_pred_p (b))
6de9cd9a
DN
1405 return false;
1406
26e75214
KH
1407 if (b == EXIT_BLOCK_PTR)
1408 return false;
6531d1be 1409
6de9cd9a
DN
1410 /* If A ends by a statement causing exceptions or something similar, we
1411 cannot merge the blocks. */
726a989a 1412 stmt = last_stmt (a);
6de9cd9a
DN
1413 if (stmt && stmt_ends_bb_p (stmt))
1414 return false;
1415
1416 /* Do not allow a block with only a non-local label to be merged. */
726a989a
RB
1417 if (stmt
1418 && gimple_code (stmt) == GIMPLE_LABEL
1419 && DECL_NONLOCAL (gimple_label_label (stmt)))
6de9cd9a
DN
1420 return false;
1421
8e7c5fdd
RH
1422 /* Examine the labels at the beginning of B. */
1423 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi); gsi_next (&gsi))
1424 {
1425 tree lab;
1426 stmt = gsi_stmt (gsi);
1427 if (gimple_code (stmt) != GIMPLE_LABEL)
1428 break;
1429 lab = gimple_label_label (stmt);
1430
1431 /* Do not remove user labels. */
1432 if (!DECL_ARTIFICIAL (lab))
1433 return false;
1434 }
1435
1436 /* Protect the loop latches. */
1437 if (current_loops && b->loop_father->latch == b)
1438 return false;
1439
38965eb2 1440 /* It must be possible to eliminate all phi nodes in B. If ssa form
610b64e2
RG
1441 is not up-to-date and a name-mapping is registered, we cannot eliminate
1442 any phis. Symbols marked for renaming are never a problem though. */
726a989a 1443 phis = phi_nodes (b);
610b64e2
RG
1444 if (!gimple_seq_empty_p (phis)
1445 && name_mappings_registered_p ())
1446 return false;
6de9cd9a 1447
6de9cd9a
DN
1448 return true;
1449}
1450
b5b8b0ac
AO
1451/* Return true if the var whose chain of uses starts at PTR has no
1452 nondebug uses. */
1453bool
1454has_zero_uses_1 (const ssa_use_operand_t *head)
1455{
1456 const ssa_use_operand_t *ptr;
1457
1458 for (ptr = head->next; ptr != head; ptr = ptr->next)
1459 if (!is_gimple_debug (USE_STMT (ptr)))
1460 return false;
1461
1462 return true;
1463}
1464
1465/* Return true if the var whose chain of uses starts at PTR has a
1466 single nondebug use. Set USE_P and STMT to that single nondebug
1467 use, if so, or to NULL otherwise. */
1468bool
1469single_imm_use_1 (const ssa_use_operand_t *head,
1470 use_operand_p *use_p, gimple *stmt)
1471{
1472 ssa_use_operand_t *ptr, *single_use = 0;
1473
1474 for (ptr = head->next; ptr != head; ptr = ptr->next)
1475 if (!is_gimple_debug (USE_STMT (ptr)))
1476 {
1477 if (single_use)
1478 {
1479 single_use = NULL;
1480 break;
1481 }
1482 single_use = ptr;
1483 }
1484
1485 if (use_p)
1486 *use_p = single_use;
1487
1488 if (stmt)
1489 *stmt = single_use ? single_use->loc.stmt : NULL;
1490
1491 return !!single_use;
1492}
1493
38965eb2
ZD
1494/* Replaces all uses of NAME by VAL. */
1495
684aaf29 1496void
38965eb2
ZD
1497replace_uses_by (tree name, tree val)
1498{
1499 imm_use_iterator imm_iter;
1500 use_operand_p use;
726a989a 1501 gimple stmt;
38965eb2 1502 edge e;
38965eb2 1503
6c00f606 1504 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
38965eb2 1505 {
6c00f606
AM
1506 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1507 {
1508 replace_exp (use, val);
38965eb2 1509
726a989a 1510 if (gimple_code (stmt) == GIMPLE_PHI)
38965eb2 1511 {
726a989a 1512 e = gimple_phi_arg_edge (stmt, PHI_ARG_INDEX_FROM_USE (use));
6c00f606
AM
1513 if (e->flags & EDGE_ABNORMAL)
1514 {
1515 /* This can only occur for virtual operands, since
1516 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1517 would prevent replacement. */
1518 gcc_assert (!is_gimple_reg (name));
1519 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1520 }
38965eb2
ZD
1521 }
1522 }
cfaab3a9 1523
726a989a 1524 if (gimple_code (stmt) != GIMPLE_PHI)
6c00f606 1525 {
726a989a 1526 size_t i;
9af0df6b 1527
6c00f606 1528 fold_stmt_inplace (stmt);
672987e8 1529 if (cfgcleanup_altered_bbs)
726a989a 1530 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
cfaab3a9 1531
cff4e50d 1532 /* FIXME. This should go in update_stmt. */
726a989a
RB
1533 for (i = 0; i < gimple_num_ops (stmt); i++)
1534 {
1535 tree op = gimple_op (stmt, i);
1536 /* Operands may be empty here. For example, the labels
1537 of a GIMPLE_COND are nulled out following the creation
1538 of the corresponding CFG edges. */
1539 if (op && TREE_CODE (op) == ADDR_EXPR)
1540 recompute_tree_invariant_for_addr_expr (op);
1541 }
9af0df6b 1542
6c00f606 1543 maybe_clean_or_replace_eh_stmt (stmt, stmt);
cff4e50d 1544 update_stmt (stmt);
6c00f606 1545 }
38965eb2 1546 }
6531d1be 1547
40b448ef 1548 gcc_assert (has_zero_uses (name));
d5ab5675
ZD
1549
1550 /* Also update the trees stored in loop structures. */
1551 if (current_loops)
1552 {
1553 struct loop *loop;
42fd6772 1554 loop_iterator li;
d5ab5675 1555
42fd6772 1556 FOR_EACH_LOOP (li, loop, 0)
d5ab5675 1557 {
42fd6772 1558 substitute_in_loop_info (loop, name, val);
d5ab5675
ZD
1559 }
1560 }
38965eb2 1561}
6de9cd9a
DN
1562
1563/* Merge block B into block A. */
1564
1565static void
726a989a 1566gimple_merge_blocks (basic_block a, basic_block b)
6de9cd9a 1567{
726a989a
RB
1568 gimple_stmt_iterator last, gsi, psi;
1569 gimple_seq phis = phi_nodes (b);
6de9cd9a
DN
1570
1571 if (dump_file)
1572 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1573
c4f548b8
DN
1574 /* Remove all single-valued PHI nodes from block B of the form
1575 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
726a989a
RB
1576 gsi = gsi_last_bb (a);
1577 for (psi = gsi_start (phis); !gsi_end_p (psi); )
38965eb2 1578 {
726a989a
RB
1579 gimple phi = gsi_stmt (psi);
1580 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
1581 gimple copy;
1582 bool may_replace_uses = !is_gimple_reg (def)
1583 || may_propagate_copy (def, use);
d7f0e25c 1584
7c8eb293
ZD
1585 /* In case we maintain loop closed ssa form, do not propagate arguments
1586 of loop exit phi nodes. */
d7f0e25c 1587 if (current_loops
f87000d0 1588 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
d7f0e25c
ZD
1589 && is_gimple_reg (def)
1590 && TREE_CODE (use) == SSA_NAME
1591 && a->loop_father != b->loop_father)
1592 may_replace_uses = false;
1593
1594 if (!may_replace_uses)
38965eb2
ZD
1595 {
1596 gcc_assert (is_gimple_reg (def));
1597
128a79fb 1598 /* Note that just emitting the copies is fine -- there is no problem
38965eb2
ZD
1599 with ordering of phi nodes. This is because A is the single
1600 predecessor of B, therefore results of the phi nodes cannot
1601 appear as arguments of the phi nodes. */
726a989a
RB
1602 copy = gimple_build_assign (def, use);
1603 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
1604 remove_phi_node (&psi, false);
38965eb2
ZD
1605 }
1606 else
611021e1 1607 {
d0f76c4b
RG
1608 /* If we deal with a PHI for virtual operands, we can simply
1609 propagate these without fussing with folding or updating
1610 the stmt. */
1611 if (!is_gimple_reg (def))
1612 {
1613 imm_use_iterator iter;
1614 use_operand_p use_p;
726a989a 1615 gimple stmt;
d0f76c4b
RG
1616
1617 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
1618 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1619 SET_USE (use_p, use);
7686fb74
RG
1620
1621 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
1622 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
d0f76c4b
RG
1623 }
1624 else
1625 replace_uses_by (def, use);
726a989a
RB
1626
1627 remove_phi_node (&psi, true);
611021e1 1628 }
38965eb2
ZD
1629 }
1630
6de9cd9a
DN
1631 /* Ensure that B follows A. */
1632 move_block_after (b, a);
1633
c5cbcccf 1634 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1e128c5f 1635 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
6de9cd9a 1636
726a989a
RB
1637 /* Remove labels from B and set gimple_bb to A for other statements. */
1638 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
6de9cd9a 1639 {
1d65f45c
RH
1640 gimple stmt = gsi_stmt (gsi);
1641 if (gimple_code (stmt) == GIMPLE_LABEL)
be477406 1642 {
1d65f45c
RH
1643 tree label = gimple_label_label (stmt);
1644 int lp_nr;
726a989a
RB
1645
1646 gsi_remove (&gsi, false);
be477406 1647
be477406
JL
1648 /* Now that we can thread computed gotos, we might have
1649 a situation where we have a forced label in block B
1650 However, the label at the start of block B might still be
1651 used in other ways (think about the runtime checking for
1652 Fortran assigned gotos). So we can not just delete the
1653 label. Instead we move the label to the start of block A. */
1d65f45c 1654 if (FORCED_LABEL (label))
be477406 1655 {
726a989a 1656 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
1d65f45c
RH
1657 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
1658 }
1659
1660 lp_nr = EH_LANDING_PAD_NR (label);
1661 if (lp_nr)
1662 {
1663 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
1664 lp->post_landing_pad = NULL;
be477406
JL
1665 }
1666 }
6de9cd9a
DN
1667 else
1668 {
1d65f45c 1669 gimple_set_bb (stmt, a);
726a989a 1670 gsi_next (&gsi);
6de9cd9a
DN
1671 }
1672 }
1673
726a989a
RB
1674 /* Merge the sequences. */
1675 last = gsi_last_bb (a);
1676 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
1677 set_bb_seq (b, NULL);
672987e8
ZD
1678
1679 if (cfgcleanup_altered_bbs)
1680 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
6de9cd9a
DN
1681}
1682
1683
bc23502b 1684/* Return the one of two successors of BB that is not reachable by a
2cd713a0 1685 complex edge, if there is one. Else, return BB. We use
bc23502b
PB
1686 this in optimizations that use post-dominators for their heuristics,
1687 to catch the cases in C++ where function calls are involved. */
6531d1be 1688
bc23502b 1689basic_block
6531d1be 1690single_noncomplex_succ (basic_block bb)
bc23502b
PB
1691{
1692 edge e0, e1;
1693 if (EDGE_COUNT (bb->succs) != 2)
1694 return bb;
6531d1be 1695
bc23502b
PB
1696 e0 = EDGE_SUCC (bb, 0);
1697 e1 = EDGE_SUCC (bb, 1);
1698 if (e0->flags & EDGE_COMPLEX)
1699 return e1->dest;
1700 if (e1->flags & EDGE_COMPLEX)
1701 return e0->dest;
6531d1be 1702
bc23502b 1703 return bb;
6531d1be 1704}
bc23502b 1705
6de9cd9a
DN
1706/* T is CALL_EXPR. Set current_function_calls_* flags. */
1707
1708void
726a989a 1709notice_special_calls (gimple call)
6de9cd9a 1710{
726a989a 1711 int flags = gimple_call_flags (call);
6de9cd9a
DN
1712
1713 if (flags & ECF_MAY_BE_ALLOCA)
e3b5732b 1714 cfun->calls_alloca = true;
6de9cd9a 1715 if (flags & ECF_RETURNS_TWICE)
e3b5732b 1716 cfun->calls_setjmp = true;
6de9cd9a
DN
1717}
1718
1719
1720/* Clear flags set by notice_special_calls. Used by dead code removal
1721 to update the flags. */
1722
1723void
1724clear_special_calls (void)
1725{
e3b5732b
JH
1726 cfun->calls_alloca = false;
1727 cfun->calls_setjmp = false;
6de9cd9a
DN
1728}
1729
6de9cd9a
DN
1730/* Remove PHI nodes associated with basic block BB and all edges out of BB. */
1731
1732static void
1733remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
1734{
6de9cd9a
DN
1735 /* Since this block is no longer reachable, we can just delete all
1736 of its PHI nodes. */
81b822d5 1737 remove_phi_nodes (bb);
6de9cd9a
DN
1738
1739 /* Remove edges to BB's successors. */
628f6a4e 1740 while (EDGE_COUNT (bb->succs) > 0)
d0d2cc21 1741 remove_edge (EDGE_SUCC (bb, 0));
6de9cd9a
DN
1742}
1743
1744
1745/* Remove statements of basic block BB. */
1746
1747static void
1748remove_bb (basic_block bb)
1749{
726a989a 1750 gimple_stmt_iterator i;
6de9cd9a
DN
1751
1752 if (dump_file)
1753 {
1754 fprintf (dump_file, "Removing basic block %d\n", bb->index);
1755 if (dump_flags & TDF_DETAILS)
1756 {
1757 dump_bb (bb, dump_file, 0);
1758 fprintf (dump_file, "\n");
1759 }
1760 }
1761
2b271002
ZD
1762 if (current_loops)
1763 {
1764 struct loop *loop = bb->loop_father;
1765
598ec7bd
ZD
1766 /* If a loop gets removed, clean up the information associated
1767 with it. */
2b271002
ZD
1768 if (loop->latch == bb
1769 || loop->header == bb)
598ec7bd 1770 free_numbers_of_iterations_estimates_loop (loop);
2b271002
ZD
1771 }
1772
6de9cd9a 1773 /* Remove all the instructions in the block. */
726a989a 1774 if (bb_seq (bb) != NULL)
6de9cd9a 1775 {
b5b8b0ac
AO
1776 /* Walk backwards so as to get a chance to substitute all
1777 released DEFs into debug stmts. See
1778 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
1779 details. */
1780 for (i = gsi_last_bb (bb); !gsi_end_p (i);)
77568960 1781 {
726a989a
RB
1782 gimple stmt = gsi_stmt (i);
1783 if (gimple_code (stmt) == GIMPLE_LABEL
1784 && (FORCED_LABEL (gimple_label_label (stmt))
1785 || DECL_NONLOCAL (gimple_label_label (stmt))))
7506e1cb
ZD
1786 {
1787 basic_block new_bb;
726a989a 1788 gimple_stmt_iterator new_gsi;
7506e1cb
ZD
1789
1790 /* A non-reachable non-local label may still be referenced.
1791 But it no longer needs to carry the extra semantics of
1792 non-locality. */
726a989a 1793 if (DECL_NONLOCAL (gimple_label_label (stmt)))
7506e1cb 1794 {
726a989a
RB
1795 DECL_NONLOCAL (gimple_label_label (stmt)) = 0;
1796 FORCED_LABEL (gimple_label_label (stmt)) = 1;
7506e1cb 1797 }
bb1ecfe8 1798
7506e1cb 1799 new_bb = bb->prev_bb;
726a989a
RB
1800 new_gsi = gsi_start_bb (new_bb);
1801 gsi_remove (&i, false);
1802 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
7506e1cb
ZD
1803 }
1804 else
bb1ecfe8 1805 {
7506e1cb
ZD
1806 /* Release SSA definitions if we are in SSA. Note that we
1807 may be called when not in SSA. For example,
1808 final_cleanup calls this function via
1809 cleanup_tree_cfg. */
1810 if (gimple_in_ssa_p (cfun))
1811 release_defs (stmt);
1812
726a989a 1813 gsi_remove (&i, true);
bb1ecfe8 1814 }
6531d1be 1815
b5b8b0ac
AO
1816 if (gsi_end_p (i))
1817 i = gsi_last_bb (bb);
1818 else
1819 gsi_prev (&i);
43e05e45 1820 }
6de9cd9a
DN
1821 }
1822
6de9cd9a 1823 remove_phi_nodes_and_edges_for_unreachable_block (bb);
726a989a 1824 bb->il.gimple = NULL;
6de9cd9a
DN
1825}
1826
6de9cd9a 1827
35920270
KH
1828/* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
1829 predicate VAL, return the edge that will be taken out of the block.
1830 If VAL does not match a unique edge, NULL is returned. */
6de9cd9a
DN
1831
1832edge
1833find_taken_edge (basic_block bb, tree val)
1834{
726a989a 1835 gimple stmt;
6de9cd9a
DN
1836
1837 stmt = last_stmt (bb);
1838
1e128c5f
GB
1839 gcc_assert (stmt);
1840 gcc_assert (is_ctrl_stmt (stmt));
6de9cd9a 1841
726a989a
RB
1842 if (val == NULL)
1843 return NULL;
1844
1845 if (!is_gimple_min_invariant (val))
6de9cd9a
DN
1846 return NULL;
1847
726a989a 1848 if (gimple_code (stmt) == GIMPLE_COND)
6de9cd9a
DN
1849 return find_taken_edge_cond_expr (bb, val);
1850
726a989a 1851 if (gimple_code (stmt) == GIMPLE_SWITCH)
6de9cd9a
DN
1852 return find_taken_edge_switch_expr (bb, val);
1853
be477406 1854 if (computed_goto_p (stmt))
1799efef
JL
1855 {
1856 /* Only optimize if the argument is a label, if the argument is
1857 not a label then we can not construct a proper CFG.
1858
1859 It may be the case that we only need to allow the LABEL_REF to
1860 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
1861 appear inside a LABEL_EXPR just to be safe. */
1862 if ((TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
1863 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
1864 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
1865 return NULL;
1866 }
be477406 1867
35920270 1868 gcc_unreachable ();
6de9cd9a
DN
1869}
1870
be477406
JL
1871/* Given a constant value VAL and the entry block BB to a GOTO_EXPR
1872 statement, determine which of the outgoing edges will be taken out of the
1873 block. Return NULL if either edge may be taken. */
1874
1875static edge
1876find_taken_edge_computed_goto (basic_block bb, tree val)
1877{
1878 basic_block dest;
1879 edge e = NULL;
1880
1881 dest = label_to_block (val);
1882 if (dest)
1883 {
1884 e = find_edge (bb, dest);
1885 gcc_assert (e != NULL);
1886 }
1887
1888 return e;
1889}
6de9cd9a
DN
1890
1891/* Given a constant value VAL and the entry block BB to a COND_EXPR
1892 statement, determine which of the two edges will be taken out of the
1893 block. Return NULL if either edge may be taken. */
1894
1895static edge
1896find_taken_edge_cond_expr (basic_block bb, tree val)
1897{
1898 edge true_edge, false_edge;
1899
1900 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
6531d1be 1901
f1b19062 1902 gcc_assert (TREE_CODE (val) == INTEGER_CST);
6e682d7e 1903 return (integer_zerop (val) ? false_edge : true_edge);
6de9cd9a
DN
1904}
1905
fca01525 1906/* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
6de9cd9a
DN
1907 statement, determine which edge will be taken out of the block. Return
1908 NULL if any edge may be taken. */
1909
1910static edge
1911find_taken_edge_switch_expr (basic_block bb, tree val)
1912{
6de9cd9a
DN
1913 basic_block dest_bb;
1914 edge e;
726a989a
RB
1915 gimple switch_stmt;
1916 tree taken_case;
6de9cd9a 1917
726a989a
RB
1918 switch_stmt = last_stmt (bb);
1919 taken_case = find_case_label_for_value (switch_stmt, val);
6de9cd9a
DN
1920 dest_bb = label_to_block (CASE_LABEL (taken_case));
1921
1922 e = find_edge (bb, dest_bb);
1e128c5f 1923 gcc_assert (e);
6de9cd9a
DN
1924 return e;
1925}
1926
1927
726a989a 1928/* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
f667741c
SB
1929 We can make optimal use here of the fact that the case labels are
1930 sorted: We can do a binary search for a case matching VAL. */
6de9cd9a
DN
1931
1932static tree
726a989a 1933find_case_label_for_value (gimple switch_stmt, tree val)
6de9cd9a 1934{
726a989a
RB
1935 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
1936 tree default_case = gimple_switch_default_label (switch_stmt);
6de9cd9a 1937
726a989a 1938 for (low = 0, high = n; high - low > 1; )
6de9cd9a 1939 {
f667741c 1940 size_t i = (high + low) / 2;
726a989a 1941 tree t = gimple_switch_label (switch_stmt, i);
f667741c
SB
1942 int cmp;
1943
1944 /* Cache the result of comparing CASE_LOW and val. */
1945 cmp = tree_int_cst_compare (CASE_LOW (t), val);
6de9cd9a 1946
f667741c
SB
1947 if (cmp > 0)
1948 high = i;
1949 else
1950 low = i;
1951
1952 if (CASE_HIGH (t) == NULL)
6de9cd9a 1953 {
f667741c
SB
1954 /* A singe-valued case label. */
1955 if (cmp == 0)
6de9cd9a
DN
1956 return t;
1957 }
1958 else
1959 {
1960 /* A case range. We can only handle integer ranges. */
f667741c 1961 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
6de9cd9a
DN
1962 return t;
1963 }
1964 }
1965
6de9cd9a
DN
1966 return default_case;
1967}
1968
1969
6de9cd9a
DN
1970/* Dump a basic block on stderr. */
1971
1972void
726a989a 1973gimple_debug_bb (basic_block bb)
6de9cd9a 1974{
726a989a 1975 gimple_dump_bb (bb, stderr, 0, TDF_VOPS|TDF_MEMSYMS);
6de9cd9a
DN
1976}
1977
1978
1979/* Dump basic block with index N on stderr. */
1980
1981basic_block
726a989a 1982gimple_debug_bb_n (int n)
6de9cd9a 1983{
726a989a 1984 gimple_debug_bb (BASIC_BLOCK (n));
6de9cd9a 1985 return BASIC_BLOCK (n);
6531d1be 1986}
6de9cd9a
DN
1987
1988
1989/* Dump the CFG on stderr.
1990
1991 FLAGS are the same used by the tree dumping functions
6531d1be 1992 (see TDF_* in tree-pass.h). */
6de9cd9a
DN
1993
1994void
726a989a 1995gimple_debug_cfg (int flags)
6de9cd9a 1996{
726a989a 1997 gimple_dump_cfg (stderr, flags);
6de9cd9a
DN
1998}
1999
2000
2001/* Dump the program showing basic block boundaries on the given FILE.
2002
2003 FLAGS are the same used by the tree dumping functions (see TDF_* in
2004 tree.h). */
2005
2006void
726a989a 2007gimple_dump_cfg (FILE *file, int flags)
6de9cd9a
DN
2008{
2009 if (flags & TDF_DETAILS)
2010 {
2011 const char *funcname
673fda6b 2012 = lang_hooks.decl_printable_name (current_function_decl, 2);
6de9cd9a
DN
2013
2014 fputc ('\n', file);
2015 fprintf (file, ";; Function %s\n\n", funcname);
2016 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2017 n_basic_blocks, n_edges, last_basic_block);
2018
2019 brief_dump_cfg (file);
2020 fprintf (file, "\n");
2021 }
2022
2023 if (flags & TDF_STATS)
2024 dump_cfg_stats (file);
2025
2026 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2027}
2028
2029
2030/* Dump CFG statistics on FILE. */
2031
2032void
2033dump_cfg_stats (FILE *file)
2034{
2035 static long max_num_merged_labels = 0;
2036 unsigned long size, total = 0;
7b0cab99 2037 long num_edges;
6de9cd9a
DN
2038 basic_block bb;
2039 const char * const fmt_str = "%-30s%-13s%12s\n";
f7fda749 2040 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
cac50d94 2041 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
6de9cd9a
DN
2042 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2043 const char *funcname
673fda6b 2044 = lang_hooks.decl_printable_name (current_function_decl, 2);
6de9cd9a
DN
2045
2046
2047 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2048
2049 fprintf (file, "---------------------------------------------------------\n");
2050 fprintf (file, fmt_str, "", " Number of ", "Memory");
2051 fprintf (file, fmt_str, "", " instances ", "used ");
2052 fprintf (file, "---------------------------------------------------------\n");
2053
2054 size = n_basic_blocks * sizeof (struct basic_block_def);
2055 total += size;
f7fda749
RH
2056 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks,
2057 SCALE (size), LABEL (size));
6de9cd9a 2058
7b0cab99 2059 num_edges = 0;
6de9cd9a 2060 FOR_EACH_BB (bb)
7b0cab99
JH
2061 num_edges += EDGE_COUNT (bb->succs);
2062 size = num_edges * sizeof (struct edge_def);
6de9cd9a 2063 total += size;
cac50d94 2064 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
6de9cd9a 2065
6de9cd9a
DN
2066 fprintf (file, "---------------------------------------------------------\n");
2067 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2068 LABEL (total));
2069 fprintf (file, "---------------------------------------------------------\n");
2070 fprintf (file, "\n");
2071
2072 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2073 max_num_merged_labels = cfg_stats.num_merged_labels;
2074
2075 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2076 cfg_stats.num_merged_labels, max_num_merged_labels);
2077
2078 fprintf (file, "\n");
2079}
2080
2081
2082/* Dump CFG statistics on stderr. Keep extern so that it's always
2083 linked in the final executable. */
2084
2085void
2086debug_cfg_stats (void)
2087{
2088 dump_cfg_stats (stderr);
2089}
2090
2091
2092/* Dump the flowgraph to a .vcg FILE. */
2093
2094static void
726a989a 2095gimple_cfg2vcg (FILE *file)
6de9cd9a
DN
2096{
2097 edge e;
628f6a4e 2098 edge_iterator ei;
6de9cd9a
DN
2099 basic_block bb;
2100 const char *funcname
673fda6b 2101 = lang_hooks.decl_printable_name (current_function_decl, 2);
6de9cd9a
DN
2102
2103 /* Write the file header. */
2104 fprintf (file, "graph: { title: \"%s\"\n", funcname);
2105 fprintf (file, "node: { title: \"ENTRY\" label: \"ENTRY\" }\n");
2106 fprintf (file, "node: { title: \"EXIT\" label: \"EXIT\" }\n");
2107
2108 /* Write blocks and edges. */
628f6a4e 2109 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
6de9cd9a
DN
2110 {
2111 fprintf (file, "edge: { sourcename: \"ENTRY\" targetname: \"%d\"",
2112 e->dest->index);
2113
2114 if (e->flags & EDGE_FAKE)
2115 fprintf (file, " linestyle: dotted priority: 10");
2116 else
2117 fprintf (file, " linestyle: solid priority: 100");
2118
2119 fprintf (file, " }\n");
2120 }
2121 fputc ('\n', file);
2122
2123 FOR_EACH_BB (bb)
2124 {
726a989a 2125 enum gimple_code head_code, end_code;
6de9cd9a
DN
2126 const char *head_name, *end_name;
2127 int head_line = 0;
2128 int end_line = 0;
726a989a
RB
2129 gimple first = first_stmt (bb);
2130 gimple last = last_stmt (bb);
6de9cd9a
DN
2131
2132 if (first)
2133 {
726a989a
RB
2134 head_code = gimple_code (first);
2135 head_name = gimple_code_name[head_code];
6de9cd9a
DN
2136 head_line = get_lineno (first);
2137 }
2138 else
2139 head_name = "no-statement";
2140
2141 if (last)
2142 {
726a989a
RB
2143 end_code = gimple_code (last);
2144 end_name = gimple_code_name[end_code];
6de9cd9a
DN
2145 end_line = get_lineno (last);
2146 }
2147 else
2148 end_name = "no-statement";
2149
2150 fprintf (file, "node: { title: \"%d\" label: \"#%d\\n%s (%d)\\n%s (%d)\"}\n",
2151 bb->index, bb->index, head_name, head_line, end_name,
2152 end_line);
2153
628f6a4e 2154 FOR_EACH_EDGE (e, ei, bb->succs)
6de9cd9a
DN
2155 {
2156 if (e->dest == EXIT_BLOCK_PTR)
2157 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"EXIT\"", bb->index);
2158 else
2159 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"%d\"", bb->index, e->dest->index);
2160
2161 if (e->flags & EDGE_FAKE)
2162 fprintf (file, " priority: 10 linestyle: dotted");
2163 else
2164 fprintf (file, " priority: 100 linestyle: solid");
2165
2166 fprintf (file, " }\n");
2167 }
2168
2169 if (bb->next_bb != EXIT_BLOCK_PTR)
2170 fputc ('\n', file);
2171 }
2172
2173 fputs ("}\n\n", file);
2174}
2175
2176
2177
2178/*---------------------------------------------------------------------------
2179 Miscellaneous helpers
2180---------------------------------------------------------------------------*/
2181
2182/* Return true if T represents a stmt that always transfers control. */
2183
2184bool
726a989a 2185is_ctrl_stmt (gimple t)
6de9cd9a 2186{
40a32862
RH
2187 switch (gimple_code (t))
2188 {
2189 case GIMPLE_COND:
2190 case GIMPLE_SWITCH:
2191 case GIMPLE_GOTO:
2192 case GIMPLE_RETURN:
2193 case GIMPLE_RESX:
2194 return true;
2195 default:
2196 return false;
2197 }
6de9cd9a
DN
2198}
2199
2200
2201/* Return true if T is a statement that may alter the flow of control
2202 (e.g., a call to a non-returning function). */
2203
2204bool
726a989a 2205is_ctrl_altering_stmt (gimple t)
6de9cd9a 2206{
1e128c5f 2207 gcc_assert (t);
726a989a 2208
8b9db065 2209 switch (gimple_code (t))
6de9cd9a 2210 {
8b9db065
RH
2211 case GIMPLE_CALL:
2212 {
2213 int flags = gimple_call_flags (t);
726a989a 2214
8b9db065
RH
2215 /* A non-pure/const call alters flow control if the current
2216 function has nonlocal labels. */
2217 if (!(flags & (ECF_CONST | ECF_PURE)) && cfun->has_nonlocal_label)
2218 return true;
6de9cd9a 2219
8b9db065 2220 /* A call also alters control flow if it does not return. */
5de8da9b 2221 if (flags & ECF_NORETURN)
8b9db065
RH
2222 return true;
2223 }
2224 break;
6de9cd9a 2225
1d65f45c
RH
2226 case GIMPLE_EH_DISPATCH:
2227 /* EH_DISPATCH branches to the individual catch handlers at
2228 this level of a try or allowed-exceptions region. It can
2229 fallthru to the next statement as well. */
2230 return true;
2231
1c384bf1
RH
2232 case GIMPLE_ASM:
2233 if (gimple_asm_nlabels (t) > 0)
2234 return true;
2235 break;
2236
8b9db065
RH
2237 CASE_GIMPLE_OMP:
2238 /* OpenMP directives alter control flow. */
2239 return true;
2240
2241 default:
2242 break;
2243 }
50674e96 2244
6de9cd9a 2245 /* If a statement can throw, it alters control flow. */
726a989a 2246 return stmt_can_throw_internal (t);
6de9cd9a
DN
2247}
2248
2249
4f6c2131 2250/* Return true if T is a simple local goto. */
6de9cd9a
DN
2251
2252bool
726a989a 2253simple_goto_p (gimple t)
6de9cd9a 2254{
726a989a
RB
2255 return (gimple_code (t) == GIMPLE_GOTO
2256 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
4f6c2131
EB
2257}
2258
2259
2260/* Return true if T can make an abnormal transfer of control flow.
2261 Transfers of control flow associated with EH are excluded. */
2262
2263bool
726a989a 2264stmt_can_make_abnormal_goto (gimple t)
4f6c2131
EB
2265{
2266 if (computed_goto_p (t))
2267 return true;
726a989a
RB
2268 if (is_gimple_call (t))
2269 return gimple_has_side_effects (t) && cfun->has_nonlocal_label;
4f6c2131 2270 return false;
6de9cd9a
DN
2271}
2272
2273
726a989a
RB
2274/* Return true if STMT should start a new basic block. PREV_STMT is
2275 the statement preceding STMT. It is used when STMT is a label or a
2276 case label. Labels should only start a new basic block if their
2277 previous statement wasn't a label. Otherwise, sequence of labels
2278 would generate unnecessary basic blocks that only contain a single
2279 label. */
6de9cd9a
DN
2280
2281static inline bool
726a989a 2282stmt_starts_bb_p (gimple stmt, gimple prev_stmt)
6de9cd9a 2283{
726a989a 2284 if (stmt == NULL)
6de9cd9a
DN
2285 return false;
2286
726a989a
RB
2287 /* Labels start a new basic block only if the preceding statement
2288 wasn't a label of the same type. This prevents the creation of
2289 consecutive blocks that have nothing but a single label. */
2290 if (gimple_code (stmt) == GIMPLE_LABEL)
6de9cd9a
DN
2291 {
2292 /* Nonlocal and computed GOTO targets always start a new block. */
726a989a
RB
2293 if (DECL_NONLOCAL (gimple_label_label (stmt))
2294 || FORCED_LABEL (gimple_label_label (stmt)))
6de9cd9a
DN
2295 return true;
2296
726a989a 2297 if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
6de9cd9a 2298 {
726a989a 2299 if (DECL_NONLOCAL (gimple_label_label (prev_stmt)))
6de9cd9a
DN
2300 return true;
2301
2302 cfg_stats.num_merged_labels++;
2303 return false;
2304 }
2305 else
2306 return true;
2307 }
2308
2309 return false;
2310}
2311
2312
2313/* Return true if T should end a basic block. */
2314
2315bool
726a989a 2316stmt_ends_bb_p (gimple t)
6de9cd9a
DN
2317{
2318 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2319}
2320
726a989a 2321/* Remove block annotations and other data structures. */
6de9cd9a
DN
2322
2323void
242229bb 2324delete_tree_cfg_annotations (void)
6de9cd9a 2325{
6de9cd9a 2326 label_to_block_map = NULL;
6de9cd9a
DN
2327}
2328
2329
2330/* Return the first statement in basic block BB. */
2331
726a989a 2332gimple
6de9cd9a
DN
2333first_stmt (basic_block bb)
2334{
726a989a 2335 gimple_stmt_iterator i = gsi_start_bb (bb);
b5b8b0ac
AO
2336 gimple stmt = NULL;
2337
2338 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2339 {
2340 gsi_next (&i);
2341 stmt = NULL;
2342 }
2343 return stmt;
6de9cd9a
DN
2344}
2345
6c52e687
CC
2346/* Return the first non-label statement in basic block BB. */
2347
2348static gimple
2349first_non_label_stmt (basic_block bb)
2350{
2351 gimple_stmt_iterator i = gsi_start_bb (bb);
2352 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2353 gsi_next (&i);
2354 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2355}
2356
6de9cd9a
DN
2357/* Return the last statement in basic block BB. */
2358
726a989a 2359gimple
6de9cd9a
DN
2360last_stmt (basic_block bb)
2361{
b5b8b0ac
AO
2362 gimple_stmt_iterator i = gsi_last_bb (bb);
2363 gimple stmt = NULL;
2364
2365 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2366 {
2367 gsi_prev (&i);
2368 stmt = NULL;
2369 }
2370 return stmt;
6de9cd9a
DN
2371}
2372
6de9cd9a
DN
2373/* Return the last statement of an otherwise empty block. Return NULL
2374 if the block is totally empty, or if it contains more than one
2375 statement. */
2376
726a989a 2377gimple
6de9cd9a
DN
2378last_and_only_stmt (basic_block bb)
2379{
b5b8b0ac 2380 gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
726a989a 2381 gimple last, prev;
6de9cd9a 2382
726a989a
RB
2383 if (gsi_end_p (i))
2384 return NULL;
6de9cd9a 2385
726a989a 2386 last = gsi_stmt (i);
b5b8b0ac 2387 gsi_prev_nondebug (&i);
726a989a 2388 if (gsi_end_p (i))
6de9cd9a
DN
2389 return last;
2390
2391 /* Empty statements should no longer appear in the instruction stream.
2392 Everything that might have appeared before should be deleted by
726a989a 2393 remove_useless_stmts, and the optimizers should just gsi_remove
6de9cd9a
DN
2394 instead of smashing with build_empty_stmt.
2395
2396 Thus the only thing that should appear here in a block containing
2397 one executable statement is a label. */
726a989a
RB
2398 prev = gsi_stmt (i);
2399 if (gimple_code (prev) == GIMPLE_LABEL)
6de9cd9a
DN
2400 return last;
2401 else
726a989a 2402 return NULL;
82b85a85 2403}
6de9cd9a 2404
4f7db7f7
KH
2405/* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2406
2407static void
2408reinstall_phi_args (edge new_edge, edge old_edge)
2409{
ea7e6d5a
AH
2410 edge_var_map_vector v;
2411 edge_var_map *vm;
2412 int i;
726a989a 2413 gimple_stmt_iterator phis;
b8698a0f 2414
ea7e6d5a
AH
2415 v = redirect_edge_var_map_vector (old_edge);
2416 if (!v)
4f7db7f7 2417 return;
b8698a0f 2418
726a989a
RB
2419 for (i = 0, phis = gsi_start_phis (new_edge->dest);
2420 VEC_iterate (edge_var_map, v, i, vm) && !gsi_end_p (phis);
2421 i++, gsi_next (&phis))
4f7db7f7 2422 {
726a989a 2423 gimple phi = gsi_stmt (phis);
ea7e6d5a
AH
2424 tree result = redirect_edge_var_map_result (vm);
2425 tree arg = redirect_edge_var_map_def (vm);
b8698a0f 2426
726a989a 2427 gcc_assert (result == gimple_phi_result (phi));
b8698a0f 2428
f5045c96 2429 add_phi_arg (phi, arg, new_edge, redirect_edge_var_map_location (vm));
4f7db7f7 2430 }
b8698a0f 2431
ea7e6d5a 2432 redirect_edge_var_map_clear (old_edge);
4f7db7f7
KH
2433}
2434
2a8a8292 2435/* Returns the basic block after which the new basic block created
b9a66240
ZD
2436 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2437 near its "logical" location. This is of most help to humans looking
2438 at debugging dumps. */
2439
2440static basic_block
2441split_edge_bb_loc (edge edge_in)
2442{
2443 basic_block dest = edge_in->dest;
88e24a5a 2444 basic_block dest_prev = dest->prev_bb;
b9a66240 2445
88e24a5a
RH
2446 if (dest_prev)
2447 {
2448 edge e = find_edge (dest_prev, dest);
2449 if (e && !(e->flags & EDGE_COMPLEX))
2450 return edge_in->src;
2451 }
2452 return dest_prev;
b9a66240
ZD
2453}
2454
6de9cd9a
DN
2455/* Split a (typically critical) edge EDGE_IN. Return the new block.
2456 Abort on abnormal edges. */
2457
2458static basic_block
726a989a 2459gimple_split_edge (edge edge_in)
6de9cd9a 2460{
4741d956 2461 basic_block new_bb, after_bb, dest;
6de9cd9a 2462 edge new_edge, e;
6de9cd9a
DN
2463
2464 /* Abnormal edges cannot be split. */
1e128c5f 2465 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
6de9cd9a 2466
6de9cd9a
DN
2467 dest = edge_in->dest;
2468
b9a66240 2469 after_bb = split_edge_bb_loc (edge_in);
6de9cd9a
DN
2470
2471 new_bb = create_empty_bb (after_bb);
b829f3fa
JH
2472 new_bb->frequency = EDGE_FREQUENCY (edge_in);
2473 new_bb->count = edge_in->count;
6de9cd9a 2474 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
b829f3fa
JH
2475 new_edge->probability = REG_BR_PROB_BASE;
2476 new_edge->count = edge_in->count;
6de9cd9a 2477
1e128c5f 2478 e = redirect_edge_and_branch (edge_in, new_bb);
c7b852c8 2479 gcc_assert (e == edge_in);
4f7db7f7 2480 reinstall_phi_args (new_edge, e);
6de9cd9a
DN
2481
2482 return new_bb;
2483}
2484
6de9cd9a 2485/* Callback for walk_tree, check that all elements with address taken are
7a442a1d
SB
2486 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2487 inside a PHI node. */
6de9cd9a
DN
2488
2489static tree
2fbe90f2 2490verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
6de9cd9a
DN
2491{
2492 tree t = *tp, x;
2493
2494 if (TYPE_P (t))
2495 *walk_subtrees = 0;
6531d1be 2496
e8ca4159 2497 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2fbe90f2 2498#define CHECK_OP(N, MSG) \
e8ca4159 2499 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2fbe90f2 2500 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
6de9cd9a
DN
2501
2502 switch (TREE_CODE (t))
2503 {
2504 case SSA_NAME:
2505 if (SSA_NAME_IN_FREE_LIST (t))
2506 {
2507 error ("SSA name in freelist but still referenced");
2508 return *tp;
2509 }
2510 break;
2511
26de0bcb
AP
2512 case INDIRECT_REF:
2513 x = TREE_OPERAND (t, 0);
2514 if (!is_gimple_reg (x) && !is_gimple_min_invariant (x))
2515 {
2516 error ("Indirect reference's operand is not a register or a constant.");
2517 return x;
2518 }
2519 break;
2520
0bca51f0
DN
2521 case ASSERT_EXPR:
2522 x = fold (ASSERT_EXPR_COND (t));
2523 if (x == boolean_false_node)
2524 {
2525 error ("ASSERT_EXPR with an always-false condition");
2526 return *tp;
2527 }
2528 break;
2529
6de9cd9a 2530 case MODIFY_EXPR:
26de0bcb 2531 error ("MODIFY_EXPR not expected while having tuples.");
e57fcb68 2532 return *tp;
6de9cd9a
DN
2533
2534 case ADDR_EXPR:
81fc3052 2535 {
81fc3052
DB
2536 bool old_constant;
2537 bool old_side_effects;
81fc3052
DB
2538 bool new_constant;
2539 bool new_side_effects;
2540
51eed280
PB
2541 gcc_assert (is_gimple_address (t));
2542
81fc3052
DB
2543 old_constant = TREE_CONSTANT (t);
2544 old_side_effects = TREE_SIDE_EFFECTS (t);
2545
127203ac 2546 recompute_tree_invariant_for_addr_expr (t);
81fc3052
DB
2547 new_side_effects = TREE_SIDE_EFFECTS (t);
2548 new_constant = TREE_CONSTANT (t);
2549
81fc3052
DB
2550 if (old_constant != new_constant)
2551 {
2552 error ("constant not recomputed when ADDR_EXPR changed");
2553 return t;
2554 }
2555 if (old_side_effects != new_side_effects)
2556 {
2557 error ("side effects not recomputed when ADDR_EXPR changed");
2558 return t;
2559 }
2560
2561 /* Skip any references (they will be checked when we recurse down the
2562 tree) and ensure that any variable used as a prefix is marked
2563 addressable. */
2564 for (x = TREE_OPERAND (t, 0);
2565 handled_component_p (x);
2566 x = TREE_OPERAND (x, 0))
2567 ;
2568
5006671f
RG
2569 if (!(TREE_CODE (x) == VAR_DECL
2570 || TREE_CODE (x) == PARM_DECL
2571 || TREE_CODE (x) == RESULT_DECL))
81fc3052
DB
2572 return NULL;
2573 if (!TREE_ADDRESSABLE (x))
2574 {
2575 error ("address taken, but ADDRESSABLE bit not set");
2576 return x;
2577 }
ba4d8f9d
RG
2578 if (DECL_GIMPLE_REG_P (x))
2579 {
2580 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2581 return x;
2582 }
bdb69bee 2583
81fc3052
DB
2584 break;
2585 }
6de9cd9a
DN
2586
2587 case COND_EXPR:
a6234684 2588 x = COND_EXPR_COND (t);
d40055ab 2589 if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
6de9cd9a 2590 {
d40055ab 2591 error ("non-integral used in condition");
6de9cd9a
DN
2592 return x;
2593 }
9c691961
AP
2594 if (!is_gimple_condexpr (x))
2595 {
ab532386 2596 error ("invalid conditional operand");
9c691961
AP
2597 return x;
2598 }
6de9cd9a
DN
2599 break;
2600
a134e5f3
TB
2601 case NON_LVALUE_EXPR:
2602 gcc_unreachable ();
2603
1043771b 2604 CASE_CONVERT:
6de9cd9a 2605 case FIX_TRUNC_EXPR:
6de9cd9a
DN
2606 case FLOAT_EXPR:
2607 case NEGATE_EXPR:
2608 case ABS_EXPR:
2609 case BIT_NOT_EXPR:
6de9cd9a 2610 case TRUTH_NOT_EXPR:
ab532386 2611 CHECK_OP (0, "invalid operand to unary operator");
6de9cd9a
DN
2612 break;
2613
2614 case REALPART_EXPR:
2615 case IMAGPART_EXPR:
2fbe90f2
RK
2616 case COMPONENT_REF:
2617 case ARRAY_REF:
2618 case ARRAY_RANGE_REF:
2619 case BIT_FIELD_REF:
2620 case VIEW_CONVERT_EXPR:
2621 /* We have a nest of references. Verify that each of the operands
2622 that determine where to reference is either a constant or a variable,
2623 verify that the base is valid, and then show we've already checked
2624 the subtrees. */
afe84921 2625 while (handled_component_p (t))
2fbe90f2
RK
2626 {
2627 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
ab532386 2628 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
2fbe90f2
RK
2629 else if (TREE_CODE (t) == ARRAY_REF
2630 || TREE_CODE (t) == ARRAY_RANGE_REF)
2631 {
ab532386 2632 CHECK_OP (1, "invalid array index");
2fbe90f2 2633 if (TREE_OPERAND (t, 2))
ab532386 2634 CHECK_OP (2, "invalid array lower bound");
2fbe90f2 2635 if (TREE_OPERAND (t, 3))
ab532386 2636 CHECK_OP (3, "invalid array stride");
2fbe90f2
RK
2637 }
2638 else if (TREE_CODE (t) == BIT_FIELD_REF)
2639 {
e55f42fb
RG
2640 if (!host_integerp (TREE_OPERAND (t, 1), 1)
2641 || !host_integerp (TREE_OPERAND (t, 2), 1))
2642 {
2643 error ("invalid position or size operand to BIT_FIELD_REF");
2644 return t;
2645 }
fc0f49f3
RG
2646 else if (INTEGRAL_TYPE_P (TREE_TYPE (t))
2647 && (TYPE_PRECISION (TREE_TYPE (t))
2648 != TREE_INT_CST_LOW (TREE_OPERAND (t, 1))))
2649 {
2650 error ("integral result type precision does not match "
2651 "field size of BIT_FIELD_REF");
2652 return t;
2653 }
2654 if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
2655 && (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (t)))
2656 != TREE_INT_CST_LOW (TREE_OPERAND (t, 1))))
2657 {
2658 error ("mode precision of non-integral result does not "
2659 "match field size of BIT_FIELD_REF");
2660 return t;
2661 }
2fbe90f2
RK
2662 }
2663
2664 t = TREE_OPERAND (t, 0);
2665 }
2666
bb0c55f6 2667 if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
2fbe90f2 2668 {
ab532386 2669 error ("invalid reference prefix");
2fbe90f2
RK
2670 return t;
2671 }
2672 *walk_subtrees = 0;
6de9cd9a 2673 break;
5be014d5
AP
2674 case PLUS_EXPR:
2675 case MINUS_EXPR:
2676 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
2677 POINTER_PLUS_EXPR. */
2678 if (POINTER_TYPE_P (TREE_TYPE (t)))
2679 {
2680 error ("invalid operand to plus/minus, type is a pointer");
2681 return t;
2682 }
2683 CHECK_OP (0, "invalid operand to binary operator");
2684 CHECK_OP (1, "invalid operand to binary operator");
2685 break;
6de9cd9a 2686
5be014d5
AP
2687 case POINTER_PLUS_EXPR:
2688 /* Check to make sure the first operand is a pointer or reference type. */
2689 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
2690 {
2691 error ("invalid operand to pointer plus, first operand is not a pointer");
2692 return t;
2693 }
2694 /* Check to make sure the second operand is an integer with type of
2695 sizetype. */
36618b93
RG
2696 if (!useless_type_conversion_p (sizetype,
2697 TREE_TYPE (TREE_OPERAND (t, 1))))
5be014d5
AP
2698 {
2699 error ("invalid operand to pointer plus, second operand is not an "
2700 "integer with type of sizetype.");
2701 return t;
2702 }
2703 /* FALLTHROUGH */
6de9cd9a
DN
2704 case LT_EXPR:
2705 case LE_EXPR:
2706 case GT_EXPR:
2707 case GE_EXPR:
2708 case EQ_EXPR:
2709 case NE_EXPR:
2710 case UNORDERED_EXPR:
2711 case ORDERED_EXPR:
2712 case UNLT_EXPR:
2713 case UNLE_EXPR:
2714 case UNGT_EXPR:
2715 case UNGE_EXPR:
2716 case UNEQ_EXPR:
d1a7edaf 2717 case LTGT_EXPR:
6de9cd9a
DN
2718 case MULT_EXPR:
2719 case TRUNC_DIV_EXPR:
2720 case CEIL_DIV_EXPR:
2721 case FLOOR_DIV_EXPR:
2722 case ROUND_DIV_EXPR:
2723 case TRUNC_MOD_EXPR:
2724 case CEIL_MOD_EXPR:
2725 case FLOOR_MOD_EXPR:
2726 case ROUND_MOD_EXPR:
2727 case RDIV_EXPR:
2728 case EXACT_DIV_EXPR:
2729 case MIN_EXPR:
2730 case MAX_EXPR:
2731 case LSHIFT_EXPR:
2732 case RSHIFT_EXPR:
2733 case LROTATE_EXPR:
2734 case RROTATE_EXPR:
2735 case BIT_IOR_EXPR:
2736 case BIT_XOR_EXPR:
2737 case BIT_AND_EXPR:
ab532386
JM
2738 CHECK_OP (0, "invalid operand to binary operator");
2739 CHECK_OP (1, "invalid operand to binary operator");
6de9cd9a
DN
2740 break;
2741
84816907
JM
2742 case CONSTRUCTOR:
2743 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
2744 *walk_subtrees = 0;
2745 break;
2746
6de9cd9a
DN
2747 default:
2748 break;
2749 }
2750 return NULL;
2fbe90f2
RK
2751
2752#undef CHECK_OP
6de9cd9a
DN
2753}
2754
7e98624c
RG
2755
2756/* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
2757 Returns true if there is an error, otherwise false. */
2758
2759static bool
726a989a 2760verify_types_in_gimple_min_lval (tree expr)
7e98624c
RG
2761{
2762 tree op;
2763
2764 if (is_gimple_id (expr))
2765 return false;
2766
9f509004
RG
2767 if (!INDIRECT_REF_P (expr)
2768 && TREE_CODE (expr) != TARGET_MEM_REF)
7e98624c
RG
2769 {
2770 error ("invalid expression for min lvalue");
2771 return true;
2772 }
2773
9f509004
RG
2774 /* TARGET_MEM_REFs are strange beasts. */
2775 if (TREE_CODE (expr) == TARGET_MEM_REF)
2776 return false;
2777
7e98624c
RG
2778 op = TREE_OPERAND (expr, 0);
2779 if (!is_gimple_val (op))
2780 {
2781 error ("invalid operand in indirect reference");
2782 debug_generic_stmt (op);
2783 return true;
2784 }
2785 if (!useless_type_conversion_p (TREE_TYPE (expr),
2786 TREE_TYPE (TREE_TYPE (op))))
2787 {
2788 error ("type mismatch in indirect reference");
2789 debug_generic_stmt (TREE_TYPE (expr));
2790 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
2791 return true;
2792 }
2793
2794 return false;
2795}
2796
3a19701a
RG
2797/* Verify if EXPR is a valid GIMPLE reference expression. If
2798 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
7e98624c
RG
2799 if there is an error, otherwise false. */
2800
2801static bool
3a19701a 2802verify_types_in_gimple_reference (tree expr, bool require_lvalue)
7e98624c
RG
2803{
2804 while (handled_component_p (expr))
2805 {
2806 tree op = TREE_OPERAND (expr, 0);
2807
2808 if (TREE_CODE (expr) == ARRAY_REF
2809 || TREE_CODE (expr) == ARRAY_RANGE_REF)
2810 {
2811 if (!is_gimple_val (TREE_OPERAND (expr, 1))
2812 || (TREE_OPERAND (expr, 2)
2813 && !is_gimple_val (TREE_OPERAND (expr, 2)))
2814 || (TREE_OPERAND (expr, 3)
2815 && !is_gimple_val (TREE_OPERAND (expr, 3))))
2816 {
2817 error ("invalid operands to array reference");
2818 debug_generic_stmt (expr);
2819 return true;
2820 }
2821 }
2822
2823 /* Verify if the reference array element types are compatible. */
2824 if (TREE_CODE (expr) == ARRAY_REF
2825 && !useless_type_conversion_p (TREE_TYPE (expr),
2826 TREE_TYPE (TREE_TYPE (op))))
2827 {
2828 error ("type mismatch in array reference");
2829 debug_generic_stmt (TREE_TYPE (expr));
2830 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
2831 return true;
2832 }
2833 if (TREE_CODE (expr) == ARRAY_RANGE_REF
2834 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
2835 TREE_TYPE (TREE_TYPE (op))))
2836 {
2837 error ("type mismatch in array range reference");
2838 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
2839 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
2840 return true;
2841 }
2842
2843 if ((TREE_CODE (expr) == REALPART_EXPR
2844 || TREE_CODE (expr) == IMAGPART_EXPR)
2845 && !useless_type_conversion_p (TREE_TYPE (expr),
2846 TREE_TYPE (TREE_TYPE (op))))
2847 {
2848 error ("type mismatch in real/imagpart reference");
2849 debug_generic_stmt (TREE_TYPE (expr));
2850 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
2851 return true;
2852 }
2853
2854 if (TREE_CODE (expr) == COMPONENT_REF
2855 && !useless_type_conversion_p (TREE_TYPE (expr),
2856 TREE_TYPE (TREE_OPERAND (expr, 1))))
2857 {
2858 error ("type mismatch in component reference");
2859 debug_generic_stmt (TREE_TYPE (expr));
2860 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
2861 return true;
2862 }
2863
cc12e760
MJ
2864 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
2865 {
2866 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
2867 that their operand is not an SSA name or an invariant when
2868 requiring an lvalue (this usually means there is a SRA or IPA-SRA
2869 bug). Otherwise there is nothing to verify, gross mismatches at
2870 most invoke undefined behavior. */
2871 if (require_lvalue
2872 && (TREE_CODE (op) == SSA_NAME
2873 || is_gimple_min_invariant (op)))
2874 {
2875 error ("Conversion of an SSA_NAME on the left hand side.");
2876 debug_generic_stmt (expr);
2877 return true;
2878 }
2879 else if (!handled_component_p (op))
2880 return false;
2881 }
7e98624c
RG
2882
2883 expr = op;
2884 }
2885
3a19701a
RG
2886 return ((require_lvalue || !is_gimple_min_invariant (expr))
2887 && verify_types_in_gimple_min_lval (expr));
7e98624c
RG
2888}
2889
20dcff2a
RG
2890/* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
2891 list of pointer-to types that is trivially convertible to DEST. */
2892
2893static bool
2894one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
2895{
2896 tree src;
2897
2898 if (!TYPE_POINTER_TO (src_obj))
2899 return true;
2900
2901 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
2902 if (useless_type_conversion_p (dest, src))
2903 return true;
2904
2905 return false;
2906}
2907
726a989a
RB
2908/* Return true if TYPE1 is a fixed-point type and if conversions to and
2909 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
2910
2911static bool
2912valid_fixed_convert_types_p (tree type1, tree type2)
2913{
2914 return (FIXED_POINT_TYPE_P (type1)
2915 && (INTEGRAL_TYPE_P (type2)
2916 || SCALAR_FLOAT_TYPE_P (type2)
2917 || FIXED_POINT_TYPE_P (type2)));
2918}
2919
726a989a
RB
2920/* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
2921 is a problem, otherwise false. */
2922
2923static bool
b59d3976 2924verify_gimple_call (gimple stmt)
726a989a 2925{
b59d3976
RG
2926 tree fn = gimple_call_fn (stmt);
2927 tree fntype;
f68a75df
RG
2928 unsigned i;
2929
2930 if (TREE_CODE (fn) != OBJ_TYPE_REF
2931 && !is_gimple_val (fn))
2932 {
2933 error ("invalid function in gimple call");
2934 debug_generic_stmt (fn);
2935 return true;
2936 }
726a989a 2937
b59d3976
RG
2938 if (!POINTER_TYPE_P (TREE_TYPE (fn))
2939 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
2940 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE))
2941 {
2942 error ("non-function in gimple call");
2943 return true;
2944 }
726a989a 2945
b59d3976 2946 if (gimple_call_lhs (stmt)
cc12e760
MJ
2947 && (!is_gimple_lvalue (gimple_call_lhs (stmt))
2948 || verify_types_in_gimple_reference (gimple_call_lhs (stmt), true)))
b59d3976
RG
2949 {
2950 error ("invalid LHS in gimple call");
2951 return true;
2952 }
726a989a 2953
5de8da9b
AO
2954 if (gimple_call_lhs (stmt) && gimple_call_noreturn_p (stmt))
2955 {
2956 error ("LHS in noreturn call");
2957 return true;
2958 }
2959
b59d3976
RG
2960 fntype = TREE_TYPE (TREE_TYPE (fn));
2961 if (gimple_call_lhs (stmt)
2962 && !useless_type_conversion_p (TREE_TYPE (gimple_call_lhs (stmt)),
2963 TREE_TYPE (fntype))
2964 /* ??? At least C++ misses conversions at assignments from
2965 void * call results.
2966 ??? Java is completely off. Especially with functions
2967 returning java.lang.Object.
2968 For now simply allow arbitrary pointer type conversions. */
2969 && !(POINTER_TYPE_P (TREE_TYPE (gimple_call_lhs (stmt)))
2970 && POINTER_TYPE_P (TREE_TYPE (fntype))))
2971 {
2972 error ("invalid conversion in gimple call");
2973 debug_generic_stmt (TREE_TYPE (gimple_call_lhs (stmt)));
2974 debug_generic_stmt (TREE_TYPE (fntype));
2975 return true;
2976 }
726a989a 2977
f68a75df
RG
2978 if (gimple_call_chain (stmt)
2979 && !is_gimple_val (gimple_call_chain (stmt)))
2980 {
2981 error ("invalid static chain in gimple call");
2982 debug_generic_stmt (gimple_call_chain (stmt));
2983 return true;
2984 }
2985
fe663f4e 2986 /* If there is a static chain argument, this should not be an indirect
9f62cb92 2987 call, and the decl should have DECL_STATIC_CHAIN set. */
fe663f4e
RH
2988 if (gimple_call_chain (stmt))
2989 {
2990 if (TREE_CODE (fn) != ADDR_EXPR
2991 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
2992 {
2993 error ("static chain in indirect gimple call");
2994 return true;
2995 }
2996 fn = TREE_OPERAND (fn, 0);
2997
9f62cb92 2998 if (!DECL_STATIC_CHAIN (fn))
fe663f4e
RH
2999 {
3000 error ("static chain with function that doesn't use one");
3001 return true;
3002 }
3003 }
3004
b59d3976
RG
3005 /* ??? The C frontend passes unpromoted arguments in case it
3006 didn't see a function declaration before the call. So for now
f68a75df 3007 leave the call arguments mostly unverified. Once we gimplify
b59d3976 3008 unit-at-a-time we have a chance to fix this. */
726a989a 3009
f68a75df
RG
3010 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3011 {
3012 tree arg = gimple_call_arg (stmt, i);
3013 if (!is_gimple_operand (arg))
3014 {
3015 error ("invalid argument to gimple call");
3016 debug_generic_expr (arg);
3017 }
3018 }
3019
b59d3976 3020 return false;
726a989a
RB
3021}
3022
b59d3976
RG
3023/* Verifies the gimple comparison with the result type TYPE and
3024 the operands OP0 and OP1. */
17d23165
RS
3025
3026static bool
b59d3976 3027verify_gimple_comparison (tree type, tree op0, tree op1)
17d23165 3028{
b59d3976
RG
3029 tree op0_type = TREE_TYPE (op0);
3030 tree op1_type = TREE_TYPE (op1);
726a989a 3031
b59d3976
RG
3032 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3033 {
3034 error ("invalid operands in gimple comparison");
3035 return true;
3036 }
17d23165 3037
b59d3976
RG
3038 /* For comparisons we do not have the operations type as the
3039 effective type the comparison is carried out in. Instead
3040 we require that either the first operand is trivially
3041 convertible into the second, or the other way around.
3042 The resulting type of a comparison may be any integral type.
3043 Because we special-case pointers to void we allow
3044 comparisons of pointers with the same mode as well. */
3045 if ((!useless_type_conversion_p (op0_type, op1_type)
3046 && !useless_type_conversion_p (op1_type, op0_type)
3047 && (!POINTER_TYPE_P (op0_type)
3048 || !POINTER_TYPE_P (op1_type)
3049 || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3050 || !INTEGRAL_TYPE_P (type))
3051 {
3052 error ("type mismatch in comparison expression");
3053 debug_generic_expr (type);
3054 debug_generic_expr (op0_type);
3055 debug_generic_expr (op1_type);
3056 return true;
3057 }
3058
3059 return false;
3060}
726a989a 3061
9f509004
RG
3062/* Verify a gimple assignment statement STMT with an unary rhs.
3063 Returns true if anything is wrong. */
7e98624c
RG
3064
3065static bool
9f509004 3066verify_gimple_assign_unary (gimple stmt)
7e98624c 3067{
726a989a
RB
3068 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3069 tree lhs = gimple_assign_lhs (stmt);
726a989a 3070 tree lhs_type = TREE_TYPE (lhs);
9f509004 3071 tree rhs1 = gimple_assign_rhs1 (stmt);
726a989a 3072 tree rhs1_type = TREE_TYPE (rhs1);
7e98624c 3073
9f509004
RG
3074 if (!is_gimple_reg (lhs)
3075 && !(optimize == 0
3076 && TREE_CODE (lhs_type) == COMPLEX_TYPE))
3077 {
3078 error ("non-register as LHS of unary operation");
3079 return true;
3080 }
3081
3082 if (!is_gimple_val (rhs1))
3083 {
3084 error ("invalid operand in unary operation");
3085 return true;
3086 }
3087
3088 /* First handle conversions. */
726a989a 3089 switch (rhs_code)
7e98624c 3090 {
1043771b 3091 CASE_CONVERT:
7e98624c 3092 {
7e98624c 3093 /* Allow conversions between integral types and pointers only if
9f509004
RG
3094 there is no sign or zero extension involved.
3095 For targets were the precision of sizetype doesn't match that
3096 of pointers we need to allow arbitrary conversions from and
3097 to sizetype. */
3098 if ((POINTER_TYPE_P (lhs_type)
3099 && INTEGRAL_TYPE_P (rhs1_type)
3100 && (TYPE_PRECISION (lhs_type) >= TYPE_PRECISION (rhs1_type)
3101 || rhs1_type == sizetype))
3102 || (POINTER_TYPE_P (rhs1_type)
3103 && INTEGRAL_TYPE_P (lhs_type)
3104 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3105 || lhs_type == sizetype)))
7e98624c
RG
3106 return false;
3107
3108 /* Allow conversion from integer to offset type and vice versa. */
726a989a
RB
3109 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3110 && TREE_CODE (rhs1_type) == INTEGER_TYPE)
3111 || (TREE_CODE (lhs_type) == INTEGER_TYPE
3112 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
7e98624c
RG
3113 return false;
3114
3115 /* Otherwise assert we are converting between types of the
3116 same kind. */
726a989a 3117 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
7e98624c
RG
3118 {
3119 error ("invalid types in nop conversion");
726a989a
RB
3120 debug_generic_expr (lhs_type);
3121 debug_generic_expr (rhs1_type);
7e98624c
RG
3122 return true;
3123 }
3124
3125 return false;
3126 }
3127
09e881c9
BE
3128 case ADDR_SPACE_CONVERT_EXPR:
3129 {
3130 if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3131 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3132 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3133 {
3134 error ("invalid types in address space conversion");
3135 debug_generic_expr (lhs_type);
3136 debug_generic_expr (rhs1_type);
3137 return true;
3138 }
3139
3140 return false;
3141 }
3142
17d23165
RS
3143 case FIXED_CONVERT_EXPR:
3144 {
726a989a
RB
3145 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3146 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
17d23165
RS
3147 {
3148 error ("invalid types in fixed-point conversion");
726a989a
RB
3149 debug_generic_expr (lhs_type);
3150 debug_generic_expr (rhs1_type);
17d23165
RS
3151 return true;
3152 }
3153
3154 return false;
3155 }
3156
7e98624c
RG
3157 case FLOAT_EXPR:
3158 {
726a989a 3159 if (!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
7e98624c
RG
3160 {
3161 error ("invalid types in conversion to floating point");
726a989a
RB
3162 debug_generic_expr (lhs_type);
3163 debug_generic_expr (rhs1_type);
7e98624c
RG
3164 return true;
3165 }
726a989a 3166
7e98624c
RG
3167 return false;
3168 }
3169
3170 case FIX_TRUNC_EXPR:
3171 {
726a989a 3172 if (!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
7e98624c
RG
3173 {
3174 error ("invalid types in conversion to integer");
726a989a
RB
3175 debug_generic_expr (lhs_type);
3176 debug_generic_expr (rhs1_type);
7e98624c
RG
3177 return true;
3178 }
726a989a 3179
7e98624c
RG
3180 return false;
3181 }
3182
587aa063
RG
3183 case VEC_UNPACK_HI_EXPR:
3184 case VEC_UNPACK_LO_EXPR:
3185 case REDUC_MAX_EXPR:
3186 case REDUC_MIN_EXPR:
3187 case REDUC_PLUS_EXPR:
3188 case VEC_UNPACK_FLOAT_HI_EXPR:
3189 case VEC_UNPACK_FLOAT_LO_EXPR:
3190 /* FIXME. */
3191 return false;
9f509004 3192
587aa063 3193 case TRUTH_NOT_EXPR:
9f509004
RG
3194 case NEGATE_EXPR:
3195 case ABS_EXPR:
3196 case BIT_NOT_EXPR:
3197 case PAREN_EXPR:
3198 case NON_LVALUE_EXPR:
3199 case CONJ_EXPR:
9f509004
RG
3200 break;
3201
3202 default:
3203 gcc_unreachable ();
3204 }
3205
3206 /* For the remaining codes assert there is no conversion involved. */
3207 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3208 {
3209 error ("non-trivial conversion in unary operation");
3210 debug_generic_expr (lhs_type);
3211 debug_generic_expr (rhs1_type);
3212 return true;
3213 }
3214
3215 return false;
3216}
3217
3218/* Verify a gimple assignment statement STMT with a binary rhs.
3219 Returns true if anything is wrong. */
3220
3221static bool
3222verify_gimple_assign_binary (gimple stmt)
3223{
3224 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3225 tree lhs = gimple_assign_lhs (stmt);
3226 tree lhs_type = TREE_TYPE (lhs);
3227 tree rhs1 = gimple_assign_rhs1 (stmt);
3228 tree rhs1_type = TREE_TYPE (rhs1);
3229 tree rhs2 = gimple_assign_rhs2 (stmt);
3230 tree rhs2_type = TREE_TYPE (rhs2);
3231
3232 if (!is_gimple_reg (lhs)
3233 && !(optimize == 0
3234 && TREE_CODE (lhs_type) == COMPLEX_TYPE))
3235 {
3236 error ("non-register as LHS of binary operation");
3237 return true;
3238 }
726a989a 3239
9f509004
RG
3240 if (!is_gimple_val (rhs1)
3241 || !is_gimple_val (rhs2))
3242 {
3243 error ("invalid operands in binary operation");
3244 return true;
3245 }
3246
3247 /* First handle operations that involve different types. */
3248 switch (rhs_code)
3249 {
3250 case COMPLEX_EXPR:
3251 {
3252 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3253 || !(INTEGRAL_TYPE_P (rhs1_type)
726a989a 3254 || SCALAR_FLOAT_TYPE_P (rhs1_type))
9f509004 3255 || !(INTEGRAL_TYPE_P (rhs2_type)
726a989a 3256 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
7e98624c
RG
3257 {
3258 error ("type mismatch in complex expression");
726a989a
RB
3259 debug_generic_expr (lhs_type);
3260 debug_generic_expr (rhs1_type);
3261 debug_generic_expr (rhs2_type);
7e98624c
RG
3262 return true;
3263 }
726a989a 3264
7e98624c
RG
3265 return false;
3266 }
3267
7e98624c
RG
3268 case LSHIFT_EXPR:
3269 case RSHIFT_EXPR:
3270 case LROTATE_EXPR:
3271 case RROTATE_EXPR:
3272 {
587aa063
RG
3273 /* Shifts and rotates are ok on integral types, fixed point
3274 types and integer vector types. */
3275 if ((!INTEGRAL_TYPE_P (rhs1_type)
3276 && !FIXED_POINT_TYPE_P (rhs1_type)
3277 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
bf8e3b77 3278 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
587aa063
RG
3279 || (!INTEGRAL_TYPE_P (rhs2_type)
3280 /* Vector shifts of vectors are also ok. */
3281 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
bf8e3b77 3282 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
587aa063 3283 && TREE_CODE (rhs2_type) == VECTOR_TYPE
bf8e3b77 3284 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
726a989a 3285 || !useless_type_conversion_p (lhs_type, rhs1_type))
7e98624c
RG
3286 {
3287 error ("type mismatch in shift expression");
726a989a
RB
3288 debug_generic_expr (lhs_type);
3289 debug_generic_expr (rhs1_type);
3290 debug_generic_expr (rhs2_type);
7e98624c
RG
3291 return true;
3292 }
726a989a 3293
7e98624c
RG
3294 return false;
3295 }
3296
9f509004
RG
3297 case VEC_LSHIFT_EXPR:
3298 case VEC_RSHIFT_EXPR:
7e98624c 3299 {
9f509004 3300 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
0009b473 3301 || !(INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
1fe479fd
RG
3302 || FIXED_POINT_TYPE_P (TREE_TYPE (rhs1_type))
3303 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type)))
9f509004
RG
3304 || (!INTEGRAL_TYPE_P (rhs2_type)
3305 && (TREE_CODE (rhs2_type) != VECTOR_TYPE
3306 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3307 || !useless_type_conversion_p (lhs_type, rhs1_type))
7e98624c 3308 {
9f509004
RG
3309 error ("type mismatch in vector shift expression");
3310 debug_generic_expr (lhs_type);
3311 debug_generic_expr (rhs1_type);
3312 debug_generic_expr (rhs2_type);
7e98624c
RG
3313 return true;
3314 }
1fe479fd
RG
3315 /* For shifting a vector of floating point components we
3316 only allow shifting by a constant multiple of the element size. */
3317 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type))
3318 && (TREE_CODE (rhs2) != INTEGER_CST
3319 || !div_if_zero_remainder (EXACT_DIV_EXPR, rhs2,
3320 TYPE_SIZE (TREE_TYPE (rhs1_type)))))
3321 {
3322 error ("non-element sized vector shift of floating point vector");
3323 return true;
3324 }
726a989a 3325
9f509004 3326 return false;
7e98624c
RG
3327 }
3328
646bea10
RG
3329 case PLUS_EXPR:
3330 {
3331 /* We use regular PLUS_EXPR for vectors.
3332 ??? This just makes the checker happy and may not be what is
3333 intended. */
3334 if (TREE_CODE (lhs_type) == VECTOR_TYPE
3335 && POINTER_TYPE_P (TREE_TYPE (lhs_type)))
3336 {
3337 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3338 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
3339 {
3340 error ("invalid non-vector operands to vector valued plus");
3341 return true;
3342 }
3343 lhs_type = TREE_TYPE (lhs_type);
3344 rhs1_type = TREE_TYPE (rhs1_type);
3345 rhs2_type = TREE_TYPE (rhs2_type);
3346 /* PLUS_EXPR is commutative, so we might end up canonicalizing
3347 the pointer to 2nd place. */
3348 if (POINTER_TYPE_P (rhs2_type))
3349 {
3350 tree tem = rhs1_type;
3351 rhs1_type = rhs2_type;
3352 rhs2_type = tem;
3353 }
3354 goto do_pointer_plus_expr_check;
3355 }
3356 }
3357 /* Fallthru. */
3358 case MINUS_EXPR:
3359 {
3360 if (POINTER_TYPE_P (lhs_type)
3361 || POINTER_TYPE_P (rhs1_type)
3362 || POINTER_TYPE_P (rhs2_type))
3363 {
3364 error ("invalid (pointer) operands to plus/minus");
3365 return true;
3366 }
3367
3368 /* Continue with generic binary expression handling. */
3369 break;
3370 }
3371
7e98624c
RG
3372 case POINTER_PLUS_EXPR:
3373 {
646bea10 3374do_pointer_plus_expr_check:
726a989a
RB
3375 if (!POINTER_TYPE_P (rhs1_type)
3376 || !useless_type_conversion_p (lhs_type, rhs1_type)
3377 || !useless_type_conversion_p (sizetype, rhs2_type))
7e98624c
RG
3378 {
3379 error ("type mismatch in pointer plus expression");
726a989a
RB
3380 debug_generic_stmt (lhs_type);
3381 debug_generic_stmt (rhs1_type);
3382 debug_generic_stmt (rhs2_type);
7e98624c
RG
3383 return true;
3384 }
7e98624c 3385
726a989a 3386 return false;
b8698a0f 3387 }
7e98624c 3388
7e98624c
RG
3389 case TRUTH_ANDIF_EXPR:
3390 case TRUTH_ORIF_EXPR:
2893f753
RAE
3391 gcc_unreachable ();
3392
7e98624c
RG
3393 case TRUTH_AND_EXPR:
3394 case TRUTH_OR_EXPR:
3395 case TRUTH_XOR_EXPR:
3396 {
7e98624c 3397 /* We allow any kind of integral typed argument and result. */
726a989a
RB
3398 if (!INTEGRAL_TYPE_P (rhs1_type)
3399 || !INTEGRAL_TYPE_P (rhs2_type)
3400 || !INTEGRAL_TYPE_P (lhs_type))
7e98624c
RG
3401 {
3402 error ("type mismatch in binary truth expression");
726a989a
RB
3403 debug_generic_expr (lhs_type);
3404 debug_generic_expr (rhs1_type);
3405 debug_generic_expr (rhs2_type);
7e98624c
RG
3406 return true;
3407 }
3408
3409 return false;
3410 }
3411
9f509004
RG
3412 case LT_EXPR:
3413 case LE_EXPR:
3414 case GT_EXPR:
3415 case GE_EXPR:
3416 case EQ_EXPR:
3417 case NE_EXPR:
3418 case UNORDERED_EXPR:
3419 case ORDERED_EXPR:
3420 case UNLT_EXPR:
3421 case UNLE_EXPR:
3422 case UNGT_EXPR:
3423 case UNGE_EXPR:
3424 case UNEQ_EXPR:
3425 case LTGT_EXPR:
3426 /* Comparisons are also binary, but the result type is not
3427 connected to the operand types. */
3428 return verify_gimple_comparison (lhs_type, rhs1, rhs2);
7e98624c 3429
587aa063
RG
3430 case WIDEN_SUM_EXPR:
3431 case WIDEN_MULT_EXPR:
3432 case VEC_WIDEN_MULT_HI_EXPR:
3433 case VEC_WIDEN_MULT_LO_EXPR:
3434 case VEC_PACK_TRUNC_EXPR:
3435 case VEC_PACK_SAT_EXPR:
3436 case VEC_PACK_FIX_TRUNC_EXPR:
3437 case VEC_EXTRACT_EVEN_EXPR:
3438 case VEC_EXTRACT_ODD_EXPR:
3439 case VEC_INTERLEAVE_HIGH_EXPR:
3440 case VEC_INTERLEAVE_LOW_EXPR:
3441 /* FIXME. */
3442 return false;
3443
9f509004
RG
3444 case MULT_EXPR:
3445 case TRUNC_DIV_EXPR:
3446 case CEIL_DIV_EXPR:
3447 case FLOOR_DIV_EXPR:
3448 case ROUND_DIV_EXPR:
3449 case TRUNC_MOD_EXPR:
3450 case CEIL_MOD_EXPR:
3451 case FLOOR_MOD_EXPR:
3452 case ROUND_MOD_EXPR:
3453 case RDIV_EXPR:
3454 case EXACT_DIV_EXPR:
3455 case MIN_EXPR:
3456 case MAX_EXPR:
3457 case BIT_IOR_EXPR:
3458 case BIT_XOR_EXPR:
3459 case BIT_AND_EXPR:
9f509004
RG
3460 /* Continue with generic binary expression handling. */
3461 break;
7e98624c 3462
9f509004
RG
3463 default:
3464 gcc_unreachable ();
3465 }
b691d4b0 3466
9f509004
RG
3467 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3468 || !useless_type_conversion_p (lhs_type, rhs2_type))
3469 {
3470 error ("type mismatch in binary expression");
3471 debug_generic_stmt (lhs_type);
3472 debug_generic_stmt (rhs1_type);
3473 debug_generic_stmt (rhs2_type);
3474 return true;
3475 }
3476
3477 return false;
3478}
3479
3480/* Verify a gimple assignment statement STMT with a single rhs.
3481 Returns true if anything is wrong. */
3482
3483static bool
3484verify_gimple_assign_single (gimple stmt)
3485{
3486 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3487 tree lhs = gimple_assign_lhs (stmt);
3488 tree lhs_type = TREE_TYPE (lhs);
3489 tree rhs1 = gimple_assign_rhs1 (stmt);
3490 tree rhs1_type = TREE_TYPE (rhs1);
3491 bool res = false;
3492
3493 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3494 {
3495 error ("non-trivial conversion at assignment");
3496 debug_generic_expr (lhs_type);
3497 debug_generic_expr (rhs1_type);
3498 return true;
7e98624c
RG
3499 }
3500
9f509004 3501 if (handled_component_p (lhs))
3a19701a 3502 res |= verify_types_in_gimple_reference (lhs, true);
9f509004
RG
3503
3504 /* Special codes we cannot handle via their class. */
3505 switch (rhs_code)
7e98624c 3506 {
9f509004
RG
3507 case ADDR_EXPR:
3508 {
3509 tree op = TREE_OPERAND (rhs1, 0);
3510 if (!is_gimple_addressable (op))
3511 {
3512 error ("invalid operand in unary expression");
3513 return true;
3514 }
f5e85907 3515
6b8b9e42
RG
3516 if (!types_compatible_p (TREE_TYPE (op), TREE_TYPE (TREE_TYPE (rhs1)))
3517 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
3518 TREE_TYPE (op)))
9f509004
RG
3519 {
3520 error ("type mismatch in address expression");
6b8b9e42
RG
3521 debug_generic_stmt (TREE_TYPE (rhs1));
3522 debug_generic_stmt (TREE_TYPE (op));
9f509004
RG
3523 return true;
3524 }
3525
3a19701a 3526 return verify_types_in_gimple_reference (op, true);
9f509004
RG
3527 }
3528
3529 /* tcc_reference */
3530 case COMPONENT_REF:
3531 case BIT_FIELD_REF:
3532 case INDIRECT_REF:
3533 case ALIGN_INDIRECT_REF:
3534 case MISALIGNED_INDIRECT_REF:
3535 case ARRAY_REF:
3536 case ARRAY_RANGE_REF:
3537 case VIEW_CONVERT_EXPR:
3538 case REALPART_EXPR:
3539 case IMAGPART_EXPR:
3540 case TARGET_MEM_REF:
3541 if (!is_gimple_reg (lhs)
3542 && is_gimple_reg_type (TREE_TYPE (lhs)))
f5e85907 3543 {
9f509004
RG
3544 error ("invalid rhs for gimple memory store");
3545 debug_generic_stmt (lhs);
3546 debug_generic_stmt (rhs1);
726a989a
RB
3547 return true;
3548 }
3a19701a 3549 return res || verify_types_in_gimple_reference (rhs1, false);
7e98624c 3550
9f509004
RG
3551 /* tcc_constant */
3552 case SSA_NAME:
3553 case INTEGER_CST:
3554 case REAL_CST:
3555 case FIXED_CST:
3556 case COMPLEX_CST:
3557 case VECTOR_CST:
3558 case STRING_CST:
3559 return res;
3560
3561 /* tcc_declaration */
3562 case CONST_DECL:
3563 return res;
3564 case VAR_DECL:
3565 case PARM_DECL:
3566 if (!is_gimple_reg (lhs)
3567 && !is_gimple_reg (rhs1)
3568 && is_gimple_reg_type (TREE_TYPE (lhs)))
2f9864e6 3569 {
9f509004
RG
3570 error ("invalid rhs for gimple memory store");
3571 debug_generic_stmt (lhs);
3572 debug_generic_stmt (rhs1);
2f9864e6
RG
3573 return true;
3574 }
9f509004 3575 return res;
7e98624c 3576
9f509004
RG
3577 case COND_EXPR:
3578 case CONSTRUCTOR:
3579 case OBJ_TYPE_REF:
3580 case ASSERT_EXPR:
3581 case WITH_SIZE_EXPR:
9f509004
RG
3582 case POLYNOMIAL_CHREC:
3583 case DOT_PROD_EXPR:
3584 case VEC_COND_EXPR:
3585 case REALIGN_LOAD_EXPR:
3586 /* FIXME. */
3587 return res;
7e98624c 3588
726a989a 3589 default:;
7e98624c
RG
3590 }
3591
9f509004 3592 return res;
7e98624c
RG
3593}
3594
9f509004
RG
3595/* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
3596 is a problem, otherwise false. */
3597
3598static bool
3599verify_gimple_assign (gimple stmt)
3600{
3601 switch (gimple_assign_rhs_class (stmt))
3602 {
3603 case GIMPLE_SINGLE_RHS:
3604 return verify_gimple_assign_single (stmt);
3605
3606 case GIMPLE_UNARY_RHS:
3607 return verify_gimple_assign_unary (stmt);
3608
3609 case GIMPLE_BINARY_RHS:
3610 return verify_gimple_assign_binary (stmt);
3611
3612 default:
3613 gcc_unreachable ();
3614 }
3615}
726a989a
RB
3616
3617/* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
3618 is a problem, otherwise false. */
7e98624c
RG
3619
3620static bool
b59d3976 3621verify_gimple_return (gimple stmt)
7e98624c 3622{
726a989a 3623 tree op = gimple_return_retval (stmt);
b59d3976 3624 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
726a989a 3625
b59d3976
RG
3626 /* We cannot test for present return values as we do not fix up missing
3627 return values from the original source. */
726a989a
RB
3628 if (op == NULL)
3629 return false;
b8698a0f 3630
b59d3976
RG
3631 if (!is_gimple_val (op)
3632 && TREE_CODE (op) != RESULT_DECL)
3633 {
3634 error ("invalid operand in return statement");
3635 debug_generic_stmt (op);
3636 return true;
3637 }
3638
3639 if (!useless_type_conversion_p (restype, TREE_TYPE (op))
3640 /* ??? With C++ we can have the situation that the result
3641 decl is a reference type while the return type is an aggregate. */
3642 && !(TREE_CODE (op) == RESULT_DECL
3643 && TREE_CODE (TREE_TYPE (op)) == REFERENCE_TYPE
3644 && useless_type_conversion_p (restype, TREE_TYPE (TREE_TYPE (op)))))
3645 {
3646 error ("invalid conversion in return statement");
3647 debug_generic_stmt (restype);
3648 debug_generic_stmt (TREE_TYPE (op));
3649 return true;
3650 }
3651
3652 return false;
726a989a 3653}
7e98624c 3654
7e98624c 3655
b59d3976
RG
3656/* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
3657 is a problem, otherwise false. */
3658
3659static bool
3660verify_gimple_goto (gimple stmt)
3661{
3662 tree dest = gimple_goto_dest (stmt);
3663
3664 /* ??? We have two canonical forms of direct goto destinations, a
3665 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
3666 if (TREE_CODE (dest) != LABEL_DECL
3667 && (!is_gimple_val (dest)
3668 || !POINTER_TYPE_P (TREE_TYPE (dest))))
3669 {
3670 error ("goto destination is neither a label nor a pointer");
3671 return true;
3672 }
3673
3674 return false;
3675}
3676
726a989a
RB
3677/* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
3678 is a problem, otherwise false. */
3679
3680static bool
b59d3976 3681verify_gimple_switch (gimple stmt)
726a989a
RB
3682{
3683 if (!is_gimple_val (gimple_switch_index (stmt)))
7e98624c 3684 {
726a989a 3685 error ("invalid operand to switch statement");
b59d3976 3686 debug_generic_stmt (gimple_switch_index (stmt));
7e98624c
RG
3687 return true;
3688 }
3689
726a989a
RB
3690 return false;
3691}
7e98624c 3692
7e98624c 3693
726a989a
RB
3694/* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
3695 and false otherwise. */
7e98624c 3696
726a989a 3697static bool
b59d3976 3698verify_gimple_phi (gimple stmt)
726a989a 3699{
b59d3976
RG
3700 tree type = TREE_TYPE (gimple_phi_result (stmt));
3701 unsigned i;
7e98624c 3702
7648edc4 3703 if (TREE_CODE (gimple_phi_result (stmt)) != SSA_NAME)
b59d3976
RG
3704 {
3705 error ("Invalid PHI result");
3706 return true;
3707 }
7e98624c 3708
726a989a 3709 for (i = 0; i < gimple_phi_num_args (stmt); i++)
b59d3976
RG
3710 {
3711 tree arg = gimple_phi_arg_def (stmt, i);
9f509004
RG
3712 if ((is_gimple_reg (gimple_phi_result (stmt))
3713 && !is_gimple_val (arg))
3714 || (!is_gimple_reg (gimple_phi_result (stmt))
3715 && !is_gimple_addressable (arg)))
b59d3976
RG
3716 {
3717 error ("Invalid PHI argument");
3718 debug_generic_stmt (arg);
3719 return true;
3720 }
3721 if (!useless_type_conversion_p (type, TREE_TYPE (arg)))
3722 {
587aa063 3723 error ("Incompatible types in PHI argument %u", i);
b59d3976
RG
3724 debug_generic_stmt (type);
3725 debug_generic_stmt (TREE_TYPE (arg));
3726 return true;
3727 }
3728 }
726a989a 3729
7e98624c
RG
3730 return false;
3731}
3732
726a989a 3733
b5b8b0ac
AO
3734/* Verify a gimple debug statement STMT.
3735 Returns true if anything is wrong. */
3736
3737static bool
3738verify_gimple_debug (gimple stmt ATTRIBUTE_UNUSED)
3739{
3740 /* There isn't much that could be wrong in a gimple debug stmt. A
3741 gimple debug bind stmt, for example, maps a tree, that's usually
3742 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
3743 component or member of an aggregate type, to another tree, that
3744 can be an arbitrary expression. These stmts expand into debug
3745 insns, and are converted to debug notes by var-tracking.c. */
3746 return false;
3747}
3748
3749
7e98624c
RG
3750/* Verify the GIMPLE statement STMT. Returns true if there is an
3751 error, otherwise false. */
3752
3753static bool
726a989a 3754verify_types_in_gimple_stmt (gimple stmt)
7e98624c 3755{
726a989a 3756 switch (gimple_code (stmt))
7e98624c 3757 {
726a989a 3758 case GIMPLE_ASSIGN:
9f509004 3759 return verify_gimple_assign (stmt);
7e98624c 3760
726a989a
RB
3761 case GIMPLE_LABEL:
3762 return TREE_CODE (gimple_label_label (stmt)) != LABEL_DECL;
7e98624c 3763
726a989a 3764 case GIMPLE_CALL:
b59d3976 3765 return verify_gimple_call (stmt);
7e98624c 3766
726a989a 3767 case GIMPLE_COND:
f68a75df
RG
3768 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
3769 {
3770 error ("invalid comparison code in gimple cond");
3771 return true;
3772 }
3773 if (!(!gimple_cond_true_label (stmt)
3774 || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
3775 || !(!gimple_cond_false_label (stmt)
3776 || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
3777 {
3778 error ("invalid labels in gimple cond");
3779 return true;
3780 }
3781
b59d3976
RG
3782 return verify_gimple_comparison (boolean_type_node,
3783 gimple_cond_lhs (stmt),
3784 gimple_cond_rhs (stmt));
7e98624c 3785
726a989a 3786 case GIMPLE_GOTO:
b59d3976 3787 return verify_gimple_goto (stmt);
7e98624c 3788
726a989a 3789 case GIMPLE_SWITCH:
b59d3976 3790 return verify_gimple_switch (stmt);
7e98624c 3791
726a989a 3792 case GIMPLE_RETURN:
b59d3976 3793 return verify_gimple_return (stmt);
7e98624c 3794
726a989a 3795 case GIMPLE_ASM:
7e98624c
RG
3796 return false;
3797
726a989a 3798 case GIMPLE_PHI:
b59d3976
RG
3799 return verify_gimple_phi (stmt);
3800
3801 /* Tuples that do not have tree operands. */
3802 case GIMPLE_NOP:
b59d3976 3803 case GIMPLE_PREDICT:
1d65f45c
RH
3804 case GIMPLE_RESX:
3805 case GIMPLE_EH_DISPATCH:
40742b42 3806 case GIMPLE_EH_MUST_NOT_THROW:
b59d3976 3807 return false;
726a989a 3808
8b9db065
RH
3809 CASE_GIMPLE_OMP:
3810 /* OpenMP directives are validated by the FE and never operated
3811 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
3812 non-gimple expressions when the main index variable has had
3813 its address taken. This does not affect the loop itself
3814 because the header of an GIMPLE_OMP_FOR is merely used to determine
3815 how to setup the parallel iteration. */
3816 return false;
3817
b5b8b0ac
AO
3818 case GIMPLE_DEBUG:
3819 return verify_gimple_debug (stmt);
3820
7e98624c
RG
3821 default:
3822 gcc_unreachable ();
3823 }
3824}
3825
726a989a 3826/* Verify the GIMPLE statements inside the sequence STMTS. */
7e98624c 3827
7dc83ebc 3828static bool
726a989a 3829verify_types_in_gimple_seq_2 (gimple_seq stmts)
7e98624c 3830{
726a989a 3831 gimple_stmt_iterator ittr;
7dc83ebc 3832 bool err = false;
7e98624c 3833
726a989a 3834 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
7e98624c 3835 {
726a989a 3836 gimple stmt = gsi_stmt (ittr);
7e98624c 3837
726a989a
RB
3838 switch (gimple_code (stmt))
3839 {
b59d3976
RG
3840 case GIMPLE_BIND:
3841 err |= verify_types_in_gimple_seq_2 (gimple_bind_body (stmt));
3842 break;
3843
3844 case GIMPLE_TRY:
3845 err |= verify_types_in_gimple_seq_2 (gimple_try_eval (stmt));
3846 err |= verify_types_in_gimple_seq_2 (gimple_try_cleanup (stmt));
3847 break;
3848
3849 case GIMPLE_EH_FILTER:
3850 err |= verify_types_in_gimple_seq_2 (gimple_eh_filter_failure (stmt));
3851 break;
3852
3853 case GIMPLE_CATCH:
3854 err |= verify_types_in_gimple_seq_2 (gimple_catch_handler (stmt));
3855 break;
7e98624c
RG
3856
3857 default:
7dc83ebc 3858 {
726a989a 3859 bool err2 = verify_types_in_gimple_stmt (stmt);
7dc83ebc 3860 if (err2)
726a989a 3861 debug_gimple_stmt (stmt);
7dc83ebc
RG
3862 err |= err2;
3863 }
7e98624c
RG
3864 }
3865 }
7dc83ebc
RG
3866
3867 return err;
3868}
3869
3870
3871/* Verify the GIMPLE statements inside the statement list STMTS. */
3872
3873void
726a989a 3874verify_types_in_gimple_seq (gimple_seq stmts)
7dc83ebc 3875{
726a989a 3876 if (verify_types_in_gimple_seq_2 (stmts))
7dc83ebc 3877 internal_error ("verify_gimple failed");
7e98624c
RG
3878}
3879
6de9cd9a
DN
3880
3881/* Verify STMT, return true if STMT is not in GIMPLE form.
3882 TODO: Implement type checking. */
3883
3884static bool
726a989a 3885verify_stmt (gimple_stmt_iterator *gsi)
6de9cd9a
DN
3886{
3887 tree addr;
726a989a
RB
3888 struct walk_stmt_info wi;
3889 bool last_in_block = gsi_one_before_end_p (*gsi);
3890 gimple stmt = gsi_stmt (*gsi);
1d65f45c 3891 int lp_nr;
6de9cd9a 3892
726a989a 3893 if (is_gimple_omp (stmt))
50674e96
DN
3894 {
3895 /* OpenMP directives are validated by the FE and never operated
726a989a 3896 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
50674e96
DN
3897 non-gimple expressions when the main index variable has had
3898 its address taken. This does not affect the loop itself
726a989a 3899 because the header of an GIMPLE_OMP_FOR is merely used to determine
50674e96
DN
3900 how to setup the parallel iteration. */
3901 return false;
3902 }
3903
726a989a
RB
3904 /* FIXME. The C frontend passes unpromoted arguments in case it
3905 didn't see a function declaration before the call. */
3906 if (is_gimple_call (stmt))
6de9cd9a 3907 {
7c9577be 3908 tree decl;
726a989a 3909
7c9577be
RG
3910 if (!is_gimple_call_addr (gimple_call_fn (stmt)))
3911 {
3912 error ("invalid function in call statement");
3913 return true;
3914 }
3915
3916 decl = gimple_call_fndecl (stmt);
3917 if (decl
3918 && TREE_CODE (decl) == FUNCTION_DECL
726a989a
RB
3919 && DECL_LOOPING_CONST_OR_PURE_P (decl)
3920 && (!DECL_PURE_P (decl))
3921 && (!TREE_READONLY (decl)))
3922 {
3923 error ("invalid pure const state for function");
3924 return true;
3925 }
6de9cd9a
DN
3926 }
3927
b5b8b0ac
AO
3928 if (is_gimple_debug (stmt))
3929 return false;
3930
726a989a
RB
3931 memset (&wi, 0, sizeof (wi));
3932 addr = walk_gimple_op (gsi_stmt (*gsi), verify_expr, &wi);
6de9cd9a
DN
3933 if (addr)
3934 {
726a989a 3935 debug_generic_expr (addr);
c2255bc4 3936 inform (gimple_location (gsi_stmt (*gsi)), "in statement");
726a989a 3937 debug_gimple_stmt (stmt);
6de9cd9a
DN
3938 return true;
3939 }
3940
1eaba2f2
RH
3941 /* If the statement is marked as part of an EH region, then it is
3942 expected that the statement could throw. Verify that when we
3943 have optimizations that simplify statements such that we prove
3944 that they cannot throw, that we update other data structures
3945 to match. */
1d65f45c
RH
3946 lp_nr = lookup_stmt_eh_lp (stmt);
3947 if (lp_nr != 0)
1eaba2f2 3948 {
1d65f45c 3949 if (!stmt_could_throw_p (stmt))
1eaba2f2 3950 {
1d65f45c
RH
3951 /* During IPA passes, ipa-pure-const sets nothrow flags on calls
3952 and they are updated on statements only after fixup_cfg
3953 is executed at beggining of expansion stage. */
3954 if (cgraph_state != CGRAPH_STATE_IPA_SSA)
3955 {
3956 error ("statement marked for throw, but doesn%'t");
3957 goto fail;
3958 }
1eaba2f2 3959 }
1d65f45c 3960 else if (lp_nr > 0 && !last_in_block && stmt_can_throw_internal (stmt))
1eaba2f2 3961 {
ab532386 3962 error ("statement marked for throw in middle of block");
1eaba2f2
RH
3963 goto fail;
3964 }
3965 }
3966
6de9cd9a 3967 return false;
1eaba2f2
RH
3968
3969 fail:
726a989a 3970 debug_gimple_stmt (stmt);
1eaba2f2 3971 return true;
6de9cd9a
DN
3972}
3973
3974
3975/* Return true when the T can be shared. */
3976
d7f09764 3977bool
6de9cd9a
DN
3978tree_node_can_be_shared (tree t)
3979{
6615c446 3980 if (IS_TYPE_OR_DECL_P (t)
6de9cd9a 3981 || is_gimple_min_invariant (t)
5e23162d 3982 || TREE_CODE (t) == SSA_NAME
953ff289
DN
3983 || t == error_mark_node
3984 || TREE_CODE (t) == IDENTIFIER_NODE)
6de9cd9a
DN
3985 return true;
3986
92b6dff3
JL
3987 if (TREE_CODE (t) == CASE_LABEL_EXPR)
3988 return true;
3989
44de5aeb 3990 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
953ff289
DN
3991 && is_gimple_min_invariant (TREE_OPERAND (t, 1)))
3992 || TREE_CODE (t) == COMPONENT_REF
3993 || TREE_CODE (t) == REALPART_EXPR
3994 || TREE_CODE (t) == IMAGPART_EXPR)
6de9cd9a
DN
3995 t = TREE_OPERAND (t, 0);
3996
3997 if (DECL_P (t))
3998 return true;
3999
4000 return false;
4001}
4002
4003
726a989a 4004/* Called via walk_gimple_stmt. Verify tree sharing. */
6de9cd9a
DN
4005
4006static tree
726a989a 4007verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
6de9cd9a 4008{
726a989a
RB
4009 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4010 struct pointer_set_t *visited = (struct pointer_set_t *) wi->info;
6de9cd9a
DN
4011
4012 if (tree_node_can_be_shared (*tp))
4013 {
4014 *walk_subtrees = false;
4015 return NULL;
4016 }
4017
4437b50d
JH
4018 if (pointer_set_insert (visited, *tp))
4019 return *tp;
6de9cd9a
DN
4020
4021 return NULL;
4022}
4023
4024
4437b50d
JH
4025static bool eh_error_found;
4026static int
4027verify_eh_throw_stmt_node (void **slot, void *data)
4028{
4029 struct throw_stmt_node *node = (struct throw_stmt_node *)*slot;
4030 struct pointer_set_t *visited = (struct pointer_set_t *) data;
4031
4032 if (!pointer_set_contains (visited, node->stmt))
4033 {
4034 error ("Dead STMT in EH table");
726a989a 4035 debug_gimple_stmt (node->stmt);
4437b50d
JH
4036 eh_error_found = true;
4037 }
c13edb67 4038 return 1;
4437b50d
JH
4039}
4040
726a989a
RB
4041
4042/* Verify the GIMPLE statements in every basic block. */
6de9cd9a
DN
4043
4044void
4045verify_stmts (void)
4046{
4047 basic_block bb;
726a989a 4048 gimple_stmt_iterator gsi;
6de9cd9a 4049 bool err = false;
4437b50d 4050 struct pointer_set_t *visited, *visited_stmts;
6de9cd9a 4051 tree addr;
726a989a 4052 struct walk_stmt_info wi;
6de9cd9a
DN
4053
4054 timevar_push (TV_TREE_STMT_VERIFY);
4437b50d
JH
4055 visited = pointer_set_create ();
4056 visited_stmts = pointer_set_create ();
6de9cd9a 4057
726a989a
RB
4058 memset (&wi, 0, sizeof (wi));
4059 wi.info = (void *) visited;
4060
6de9cd9a
DN
4061 FOR_EACH_BB (bb)
4062 {
726a989a
RB
4063 gimple phi;
4064 size_t i;
6de9cd9a 4065
726a989a 4066 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6de9cd9a 4067 {
726a989a 4068 phi = gsi_stmt (gsi);
4437b50d 4069 pointer_set_insert (visited_stmts, phi);
726a989a 4070 if (gimple_bb (phi) != bb)
8de1fc1b 4071 {
726a989a 4072 error ("gimple_bb (phi) is set to a wrong basic block");
8de1fc1b
KH
4073 err |= true;
4074 }
4075
726a989a 4076 for (i = 0; i < gimple_phi_num_args (phi); i++)
6de9cd9a 4077 {
726a989a 4078 tree t = gimple_phi_arg_def (phi, i);
6de9cd9a
DN
4079 tree addr;
4080
e9705dc5
AO
4081 if (!t)
4082 {
4083 error ("missing PHI def");
726a989a 4084 debug_gimple_stmt (phi);
e9705dc5
AO
4085 err |= true;
4086 continue;
4087 }
6de9cd9a
DN
4088 /* Addressable variables do have SSA_NAMEs but they
4089 are not considered gimple values. */
e9705dc5
AO
4090 else if (TREE_CODE (t) != SSA_NAME
4091 && TREE_CODE (t) != FUNCTION_DECL
220f1c29 4092 && !is_gimple_min_invariant (t))
6de9cd9a 4093 {
726a989a
RB
4094 error ("PHI argument is not a GIMPLE value");
4095 debug_gimple_stmt (phi);
4096 debug_generic_expr (t);
6de9cd9a
DN
4097 err |= true;
4098 }
4099
4437b50d 4100 addr = walk_tree (&t, verify_node_sharing, visited, NULL);
6de9cd9a
DN
4101 if (addr)
4102 {
ab532386 4103 error ("incorrect sharing of tree nodes");
726a989a
RB
4104 debug_gimple_stmt (phi);
4105 debug_generic_expr (addr);
6de9cd9a
DN
4106 err |= true;
4107 }
4108 }
211ca15c
RG
4109
4110#ifdef ENABLE_TYPES_CHECKING
4111 if (verify_gimple_phi (phi))
4112 {
4113 debug_gimple_stmt (phi);
4114 err |= true;
4115 }
4116#endif
6de9cd9a
DN
4117 }
4118
726a989a 4119 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
6de9cd9a 4120 {
726a989a
RB
4121 gimple stmt = gsi_stmt (gsi);
4122
4123 if (gimple_code (stmt) == GIMPLE_WITH_CLEANUP_EXPR
4124 || gimple_code (stmt) == GIMPLE_BIND)
4125 {
4126 error ("invalid GIMPLE statement");
4127 debug_gimple_stmt (stmt);
4128 err |= true;
4129 }
8de1fc1b 4130
4437b50d 4131 pointer_set_insert (visited_stmts, stmt);
07beea0d 4132
726a989a 4133 if (gimple_bb (stmt) != bb)
8de1fc1b 4134 {
726a989a 4135 error ("gimple_bb (stmt) is set to a wrong basic block");
2cd713a0 4136 debug_gimple_stmt (stmt);
8de1fc1b
KH
4137 err |= true;
4138 }
4139
726a989a
RB
4140 if (gimple_code (stmt) == GIMPLE_LABEL)
4141 {
4142 tree decl = gimple_label_label (stmt);
4143 int uid = LABEL_DECL_UID (decl);
4144
4145 if (uid == -1
4146 || VEC_index (basic_block, label_to_block_map, uid) != bb)
4147 {
1d65f45c 4148 error ("incorrect entry in label_to_block_map");
726a989a
RB
4149 err |= true;
4150 }
1d65f45c
RH
4151
4152 uid = EH_LANDING_PAD_NR (decl);
4153 if (uid)
4154 {
4155 eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
4156 if (decl != lp->post_landing_pad)
4157 {
4158 error ("incorrect setting of landing pad number");
4159 err |= true;
4160 }
4161 }
726a989a
RB
4162 }
4163
4164 err |= verify_stmt (&gsi);
211ca15c
RG
4165
4166#ifdef ENABLE_TYPES_CHECKING
4167 if (verify_types_in_gimple_stmt (gsi_stmt (gsi)))
4168 {
4169 debug_gimple_stmt (stmt);
4170 err |= true;
4171 }
4172#endif
726a989a 4173 addr = walk_gimple_op (gsi_stmt (gsi), verify_node_sharing, &wi);
6de9cd9a
DN
4174 if (addr)
4175 {
ab532386 4176 error ("incorrect sharing of tree nodes");
726a989a
RB
4177 debug_gimple_stmt (stmt);
4178 debug_generic_expr (addr);
6de9cd9a
DN
4179 err |= true;
4180 }
726a989a 4181 gsi_next (&gsi);
6de9cd9a
DN
4182 }
4183 }
726a989a 4184
4437b50d
JH
4185 eh_error_found = false;
4186 if (get_eh_throw_stmt_table (cfun))
4187 htab_traverse (get_eh_throw_stmt_table (cfun),
4188 verify_eh_throw_stmt_node,
4189 visited_stmts);
6de9cd9a 4190
4437b50d 4191 if (err | eh_error_found)
ab532386 4192 internal_error ("verify_stmts failed");
6de9cd9a 4193
4437b50d
JH
4194 pointer_set_destroy (visited);
4195 pointer_set_destroy (visited_stmts);
6946b3f7 4196 verify_histograms ();
6de9cd9a
DN
4197 timevar_pop (TV_TREE_STMT_VERIFY);
4198}
4199
4200
4201/* Verifies that the flow information is OK. */
4202
4203static int
726a989a 4204gimple_verify_flow_info (void)
6de9cd9a
DN
4205{
4206 int err = 0;
4207 basic_block bb;
726a989a
RB
4208 gimple_stmt_iterator gsi;
4209 gimple stmt;
6de9cd9a 4210 edge e;
628f6a4e 4211 edge_iterator ei;
6de9cd9a 4212
726a989a 4213 if (ENTRY_BLOCK_PTR->il.gimple)
6de9cd9a 4214 {
7506e1cb 4215 error ("ENTRY_BLOCK has IL associated with it");
6de9cd9a
DN
4216 err = 1;
4217 }
4218
726a989a 4219 if (EXIT_BLOCK_PTR->il.gimple)
6de9cd9a 4220 {
7506e1cb 4221 error ("EXIT_BLOCK has IL associated with it");
6de9cd9a
DN
4222 err = 1;
4223 }
4224
628f6a4e 4225 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
6de9cd9a
DN
4226 if (e->flags & EDGE_FALLTHRU)
4227 {
ab532386 4228 error ("fallthru to exit from bb %d", e->src->index);
6de9cd9a
DN
4229 err = 1;
4230 }
4231
4232 FOR_EACH_BB (bb)
4233 {
4234 bool found_ctrl_stmt = false;
4235
726a989a 4236 stmt = NULL;
548414c6 4237
6de9cd9a 4238 /* Skip labels on the start of basic block. */
726a989a 4239 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6de9cd9a 4240 {
726a989a
RB
4241 tree label;
4242 gimple prev_stmt = stmt;
548414c6 4243
726a989a 4244 stmt = gsi_stmt (gsi);
548414c6 4245
726a989a 4246 if (gimple_code (stmt) != GIMPLE_LABEL)
6de9cd9a
DN
4247 break;
4248
726a989a
RB
4249 label = gimple_label_label (stmt);
4250 if (prev_stmt && DECL_NONLOCAL (label))
548414c6 4251 {
953ff289 4252 error ("nonlocal label ");
726a989a 4253 print_generic_expr (stderr, label, 0);
953ff289
DN
4254 fprintf (stderr, " is not first in a sequence of labels in bb %d",
4255 bb->index);
548414c6
KH
4256 err = 1;
4257 }
4258
1197e789
RG
4259 if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
4260 {
4261 error ("EH landing pad label ");
4262 print_generic_expr (stderr, label, 0);
4263 fprintf (stderr, " is not first in a sequence of labels in bb %d",
4264 bb->index);
4265 err = 1;
4266 }
4267
726a989a 4268 if (label_to_block (label) != bb)
6de9cd9a 4269 {
953ff289 4270 error ("label ");
726a989a 4271 print_generic_expr (stderr, label, 0);
953ff289
DN
4272 fprintf (stderr, " to block does not match in bb %d",
4273 bb->index);
6de9cd9a
DN
4274 err = 1;
4275 }
4276
726a989a 4277 if (decl_function_context (label) != current_function_decl)
6de9cd9a 4278 {
953ff289 4279 error ("label ");
726a989a 4280 print_generic_expr (stderr, label, 0);
953ff289
DN
4281 fprintf (stderr, " has incorrect context in bb %d",
4282 bb->index);
6de9cd9a
DN
4283 err = 1;
4284 }
4285 }
4286
4287 /* Verify that body of basic block BB is free of control flow. */
726a989a 4288 for (; !gsi_end_p (gsi); gsi_next (&gsi))
6de9cd9a 4289 {
726a989a 4290 gimple stmt = gsi_stmt (gsi);
6de9cd9a
DN
4291
4292 if (found_ctrl_stmt)
4293 {
ab532386 4294 error ("control flow in the middle of basic block %d",
6de9cd9a
DN
4295 bb->index);
4296 err = 1;
4297 }
4298
4299 if (stmt_ends_bb_p (stmt))
4300 found_ctrl_stmt = true;
4301
726a989a 4302 if (gimple_code (stmt) == GIMPLE_LABEL)
6de9cd9a 4303 {
953ff289 4304 error ("label ");
726a989a 4305 print_generic_expr (stderr, gimple_label_label (stmt), 0);
953ff289 4306 fprintf (stderr, " in the middle of basic block %d", bb->index);
6de9cd9a
DN
4307 err = 1;
4308 }
4309 }
953ff289 4310
726a989a
RB
4311 gsi = gsi_last_bb (bb);
4312 if (gsi_end_p (gsi))
6de9cd9a
DN
4313 continue;
4314
726a989a 4315 stmt = gsi_stmt (gsi);
6de9cd9a 4316
1d65f45c
RH
4317 if (gimple_code (stmt) == GIMPLE_LABEL)
4318 continue;
4319
cc7220fd
JH
4320 err |= verify_eh_edges (stmt);
4321
6de9cd9a
DN
4322 if (is_ctrl_stmt (stmt))
4323 {
628f6a4e 4324 FOR_EACH_EDGE (e, ei, bb->succs)
6de9cd9a
DN
4325 if (e->flags & EDGE_FALLTHRU)
4326 {
ab532386 4327 error ("fallthru edge after a control statement in bb %d",
6de9cd9a
DN
4328 bb->index);
4329 err = 1;
4330 }
4331 }
4332
726a989a 4333 if (gimple_code (stmt) != GIMPLE_COND)
36b24193
ZD
4334 {
4335 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
4336 after anything else but if statement. */
4337 FOR_EACH_EDGE (e, ei, bb->succs)
4338 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
4339 {
726a989a 4340 error ("true/false edge after a non-GIMPLE_COND in bb %d",
36b24193
ZD
4341 bb->index);
4342 err = 1;
4343 }
4344 }
4345
726a989a 4346 switch (gimple_code (stmt))
6de9cd9a 4347 {
726a989a 4348 case GIMPLE_COND:
6de9cd9a
DN
4349 {
4350 edge true_edge;
4351 edge false_edge;
b8698a0f 4352
6de9cd9a
DN
4353 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
4354
726a989a
RB
4355 if (!true_edge
4356 || !false_edge
6de9cd9a
DN
4357 || !(true_edge->flags & EDGE_TRUE_VALUE)
4358 || !(false_edge->flags & EDGE_FALSE_VALUE)
4359 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
4360 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
628f6a4e 4361 || EDGE_COUNT (bb->succs) >= 3)
6de9cd9a 4362 {
ab532386 4363 error ("wrong outgoing edge flags at end of bb %d",
6de9cd9a
DN
4364 bb->index);
4365 err = 1;
4366 }
6de9cd9a
DN
4367 }
4368 break;
4369
726a989a 4370 case GIMPLE_GOTO:
6de9cd9a
DN
4371 if (simple_goto_p (stmt))
4372 {
ab532386 4373 error ("explicit goto at end of bb %d", bb->index);
6531d1be 4374 err = 1;
6de9cd9a
DN
4375 }
4376 else
4377 {
6531d1be 4378 /* FIXME. We should double check that the labels in the
6de9cd9a 4379 destination blocks have their address taken. */
628f6a4e 4380 FOR_EACH_EDGE (e, ei, bb->succs)
6de9cd9a
DN
4381 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
4382 | EDGE_FALSE_VALUE))
4383 || !(e->flags & EDGE_ABNORMAL))
4384 {
ab532386 4385 error ("wrong outgoing edge flags at end of bb %d",
6de9cd9a
DN
4386 bb->index);
4387 err = 1;
4388 }
4389 }
4390 break;
4391
726a989a 4392 case GIMPLE_RETURN:
c5cbcccf
ZD
4393 if (!single_succ_p (bb)
4394 || (single_succ_edge (bb)->flags
4395 & (EDGE_FALLTHRU | EDGE_ABNORMAL
4396 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
6de9cd9a 4397 {
ab532386 4398 error ("wrong outgoing edge flags at end of bb %d", bb->index);
6de9cd9a
DN
4399 err = 1;
4400 }
c5cbcccf 4401 if (single_succ (bb) != EXIT_BLOCK_PTR)
6de9cd9a 4402 {
ab532386 4403 error ("return edge does not point to exit in bb %d",
6de9cd9a
DN
4404 bb->index);
4405 err = 1;
4406 }
4407 break;
4408
726a989a 4409 case GIMPLE_SWITCH:
6de9cd9a 4410 {
7853504d 4411 tree prev;
6de9cd9a
DN
4412 edge e;
4413 size_t i, n;
6de9cd9a 4414
726a989a 4415 n = gimple_switch_num_labels (stmt);
6de9cd9a
DN
4416
4417 /* Mark all the destination basic blocks. */
4418 for (i = 0; i < n; ++i)
4419 {
726a989a 4420 tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
6de9cd9a 4421 basic_block label_bb = label_to_block (lab);
1e128c5f 4422 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
6de9cd9a
DN
4423 label_bb->aux = (void *)1;
4424 }
4425
7853504d 4426 /* Verify that the case labels are sorted. */
726a989a 4427 prev = gimple_switch_label (stmt, 0);
b7814a18 4428 for (i = 1; i < n; ++i)
7853504d 4429 {
726a989a
RB
4430 tree c = gimple_switch_label (stmt, i);
4431 if (!CASE_LOW (c))
7853504d 4432 {
726a989a
RB
4433 error ("found default case not at the start of "
4434 "case vector");
4435 err = 1;
7853504d
SB
4436 continue;
4437 }
726a989a
RB
4438 if (CASE_LOW (prev)
4439 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
7853504d 4440 {
953ff289 4441 error ("case labels not sorted: ");
7853504d
SB
4442 print_generic_expr (stderr, prev, 0);
4443 fprintf (stderr," is greater than ");
4444 print_generic_expr (stderr, c, 0);
4445 fprintf (stderr," but comes before it.\n");
4446 err = 1;
4447 }
4448 prev = c;
4449 }
b7814a18
RG
4450 /* VRP will remove the default case if it can prove it will
4451 never be executed. So do not verify there always exists
4452 a default case here. */
7853504d 4453
628f6a4e 4454 FOR_EACH_EDGE (e, ei, bb->succs)
6de9cd9a
DN
4455 {
4456 if (!e->dest->aux)
4457 {
ab532386 4458 error ("extra outgoing edge %d->%d",
6de9cd9a
DN
4459 bb->index, e->dest->index);
4460 err = 1;
4461 }
726a989a 4462
6de9cd9a
DN
4463 e->dest->aux = (void *)2;
4464 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
4465 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
4466 {
ab532386 4467 error ("wrong outgoing edge flags at end of bb %d",
6de9cd9a
DN
4468 bb->index);
4469 err = 1;
4470 }
4471 }
4472
4473 /* Check that we have all of them. */
4474 for (i = 0; i < n; ++i)
4475 {
726a989a 4476 tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
6de9cd9a
DN
4477 basic_block label_bb = label_to_block (lab);
4478
4479 if (label_bb->aux != (void *)2)
4480 {
726a989a 4481 error ("missing edge %i->%i", bb->index, label_bb->index);
6de9cd9a
DN
4482 err = 1;
4483 }
4484 }
4485
628f6a4e 4486 FOR_EACH_EDGE (e, ei, bb->succs)
6de9cd9a
DN
4487 e->dest->aux = (void *)0;
4488 }
1d65f45c
RH
4489 break;
4490
4491 case GIMPLE_EH_DISPATCH:
4492 err |= verify_eh_dispatch_edge (stmt);
4493 break;
6de9cd9a 4494
1d65f45c
RH
4495 default:
4496 break;
6de9cd9a
DN
4497 }
4498 }
4499
2b28c07a 4500 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
6de9cd9a
DN
4501 verify_dominators (CDI_DOMINATORS);
4502
4503 return err;
4504}
4505
4506
f0b698c1 4507/* Updates phi nodes after creating a forwarder block joined
6de9cd9a
DN
4508 by edge FALLTHRU. */
4509
4510static void
726a989a 4511gimple_make_forwarder_block (edge fallthru)
6de9cd9a
DN
4512{
4513 edge e;
628f6a4e 4514 edge_iterator ei;
6de9cd9a 4515 basic_block dummy, bb;
726a989a
RB
4516 tree var;
4517 gimple_stmt_iterator gsi;
6de9cd9a
DN
4518
4519 dummy = fallthru->src;
4520 bb = fallthru->dest;
4521
c5cbcccf 4522 if (single_pred_p (bb))
6de9cd9a
DN
4523 return;
4524
cfaab3a9 4525 /* If we redirected a branch we must create new PHI nodes at the
6de9cd9a 4526 start of BB. */
726a989a 4527 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
6de9cd9a 4528 {
726a989a 4529 gimple phi, new_phi;
b8698a0f 4530
726a989a
RB
4531 phi = gsi_stmt (gsi);
4532 var = gimple_phi_result (phi);
6de9cd9a
DN
4533 new_phi = create_phi_node (var, bb);
4534 SSA_NAME_DEF_STMT (var) = new_phi;
726a989a 4535 gimple_phi_set_result (phi, make_ssa_name (SSA_NAME_VAR (var), phi));
b8698a0f 4536 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
f5045c96 4537 UNKNOWN_LOCATION);
6de9cd9a
DN
4538 }
4539
6de9cd9a 4540 /* Add the arguments we have stored on edges. */
628f6a4e 4541 FOR_EACH_EDGE (e, ei, bb->preds)
6de9cd9a
DN
4542 {
4543 if (e == fallthru)
4544 continue;
4545
71882046 4546 flush_pending_stmts (e);
6de9cd9a
DN
4547 }
4548}
4549
4550
6de9cd9a
DN
4551/* Return a non-special label in the head of basic block BLOCK.
4552 Create one if it doesn't exist. */
4553
d7621d3c 4554tree
726a989a 4555gimple_block_label (basic_block bb)
6de9cd9a 4556{
726a989a 4557 gimple_stmt_iterator i, s = gsi_start_bb (bb);
6de9cd9a 4558 bool first = true;
726a989a
RB
4559 tree label;
4560 gimple stmt;
6de9cd9a 4561
726a989a 4562 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
6de9cd9a 4563 {
726a989a
RB
4564 stmt = gsi_stmt (i);
4565 if (gimple_code (stmt) != GIMPLE_LABEL)
6de9cd9a 4566 break;
726a989a 4567 label = gimple_label_label (stmt);
6de9cd9a
DN
4568 if (!DECL_NONLOCAL (label))
4569 {
4570 if (!first)
726a989a 4571 gsi_move_before (&i, &s);
6de9cd9a
DN
4572 return label;
4573 }
4574 }
4575
c2255bc4 4576 label = create_artificial_label (UNKNOWN_LOCATION);
726a989a
RB
4577 stmt = gimple_build_label (label);
4578 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
6de9cd9a
DN
4579 return label;
4580}
4581
4582
4583/* Attempt to perform edge redirection by replacing a possibly complex
4584 jump instruction by a goto or by removing the jump completely.
4585 This can apply only if all edges now point to the same block. The
4586 parameters and return values are equivalent to
4587 redirect_edge_and_branch. */
4588
4589static edge
726a989a 4590gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
6de9cd9a
DN
4591{
4592 basic_block src = e->src;
726a989a
RB
4593 gimple_stmt_iterator i;
4594 gimple stmt;
6de9cd9a 4595
07b43a87
KH
4596 /* We can replace or remove a complex jump only when we have exactly
4597 two edges. */
4598 if (EDGE_COUNT (src->succs) != 2
4599 /* Verify that all targets will be TARGET. Specifically, the
4600 edge that is not E must also go to TARGET. */
4601 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
6de9cd9a
DN
4602 return NULL;
4603
726a989a
RB
4604 i = gsi_last_bb (src);
4605 if (gsi_end_p (i))
6de9cd9a 4606 return NULL;
6de9cd9a 4607
726a989a
RB
4608 stmt = gsi_stmt (i);
4609
4610 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
6de9cd9a 4611 {
726a989a 4612 gsi_remove (&i, true);
6de9cd9a
DN
4613 e = ssa_redirect_edge (e, target);
4614 e->flags = EDGE_FALLTHRU;
4615 return e;
4616 }
4617
4618 return NULL;
4619}
4620
4621
4622/* Redirect E to DEST. Return NULL on failure. Otherwise, return the
4623 edge representing the redirected branch. */
4624
4625static edge
726a989a 4626gimple_redirect_edge_and_branch (edge e, basic_block dest)
6de9cd9a
DN
4627{
4628 basic_block bb = e->src;
726a989a 4629 gimple_stmt_iterator gsi;
6de9cd9a 4630 edge ret;
726a989a 4631 gimple stmt;
6de9cd9a 4632
4f6c2131 4633 if (e->flags & EDGE_ABNORMAL)
6de9cd9a
DN
4634 return NULL;
4635
6de9cd9a
DN
4636 if (e->dest == dest)
4637 return NULL;
4638
a3710436
JH
4639 if (e->flags & EDGE_EH)
4640 return redirect_eh_edge (e, dest);
4641
1c061be5
RH
4642 if (e->src != ENTRY_BLOCK_PTR)
4643 {
4644 ret = gimple_try_redirect_by_replacing_jump (e, dest);
4645 if (ret)
4646 return ret;
4647 }
4648
726a989a
RB
4649 gsi = gsi_last_bb (bb);
4650 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
6de9cd9a 4651
d130ae11 4652 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
6de9cd9a 4653 {
726a989a 4654 case GIMPLE_COND:
a9b77cd1 4655 /* For COND_EXPR, we only need to redirect the edge. */
6de9cd9a
DN
4656 break;
4657
726a989a 4658 case GIMPLE_GOTO:
6de9cd9a
DN
4659 /* No non-abnormal edges should lead from a non-simple goto, and
4660 simple ones should be represented implicitly. */
1e128c5f 4661 gcc_unreachable ();
6de9cd9a 4662
726a989a 4663 case GIMPLE_SWITCH:
6de9cd9a 4664 {
726a989a 4665 tree label = gimple_block_label (dest);
d6be0d7f 4666 tree cases = get_cases_for_edge (e, stmt);
6de9cd9a 4667
d6be0d7f
JL
4668 /* If we have a list of cases associated with E, then use it
4669 as it's a lot faster than walking the entire case vector. */
4670 if (cases)
6de9cd9a 4671 {
4edbbd3f 4672 edge e2 = find_edge (e->src, dest);
d6be0d7f
JL
4673 tree last, first;
4674
4675 first = cases;
4676 while (cases)
4677 {
4678 last = cases;
4679 CASE_LABEL (cases) = label;
4680 cases = TREE_CHAIN (cases);
4681 }
4682
4683 /* If there was already an edge in the CFG, then we need
4684 to move all the cases associated with E to E2. */
4685 if (e2)
4686 {
4687 tree cases2 = get_cases_for_edge (e2, stmt);
4688
4689 TREE_CHAIN (last) = TREE_CHAIN (cases2);
4690 TREE_CHAIN (cases2) = first;
4691 }
6de9cd9a 4692 }
92b6dff3
JL
4693 else
4694 {
726a989a 4695 size_t i, n = gimple_switch_num_labels (stmt);
d6be0d7f
JL
4696
4697 for (i = 0; i < n; i++)
4698 {
726a989a 4699 tree elt = gimple_switch_label (stmt, i);
d6be0d7f
JL
4700 if (label_to_block (CASE_LABEL (elt)) == e->dest)
4701 CASE_LABEL (elt) = label;
4702 }
92b6dff3 4703 }
1c384bf1
RH
4704 }
4705 break;
d6be0d7f 4706
1c384bf1
RH
4707 case GIMPLE_ASM:
4708 {
4709 int i, n = gimple_asm_nlabels (stmt);
f8981d1c 4710 tree label = NULL;
1c384bf1
RH
4711
4712 for (i = 0; i < n; ++i)
4713 {
4714 tree cons = gimple_asm_label_op (stmt, i);
4715 if (label_to_block (TREE_VALUE (cons)) == e->dest)
f8981d1c
AO
4716 {
4717 if (!label)
4718 label = gimple_block_label (dest);
4719 TREE_VALUE (cons) = label;
4720 }
1c384bf1 4721 }
f8981d1c
AO
4722
4723 /* If we didn't find any label matching the former edge in the
4724 asm labels, we must be redirecting the fallthrough
4725 edge. */
4726 gcc_assert (label || (e->flags & EDGE_FALLTHRU));
6de9cd9a 4727 }
1c384bf1 4728 break;
6de9cd9a 4729
726a989a
RB
4730 case GIMPLE_RETURN:
4731 gsi_remove (&gsi, true);
6de9cd9a
DN
4732 e->flags |= EDGE_FALLTHRU;
4733 break;
4734
726a989a
RB
4735 case GIMPLE_OMP_RETURN:
4736 case GIMPLE_OMP_CONTINUE:
4737 case GIMPLE_OMP_SECTIONS_SWITCH:
4738 case GIMPLE_OMP_FOR:
e5c95afe
ZD
4739 /* The edges from OMP constructs can be simply redirected. */
4740 break;
4741
1d65f45c
RH
4742 case GIMPLE_EH_DISPATCH:
4743 if (!(e->flags & EDGE_FALLTHRU))
4744 redirect_eh_dispatch_edge (stmt, e, dest);
4745 break;
4746
6de9cd9a
DN
4747 default:
4748 /* Otherwise it must be a fallthru edge, and we don't need to
4749 do anything besides redirecting it. */
1e128c5f 4750 gcc_assert (e->flags & EDGE_FALLTHRU);
6de9cd9a
DN
4751 break;
4752 }
4753
4754 /* Update/insert PHI nodes as necessary. */
4755
4756 /* Now update the edges in the CFG. */
4757 e = ssa_redirect_edge (e, dest);
4758
4759 return e;
4760}
4761
14fa2cc0
ZD
4762/* Returns true if it is possible to remove edge E by redirecting
4763 it to the destination of the other edge from E->src. */
4764
4765static bool
726a989a 4766gimple_can_remove_branch_p (const_edge e)
14fa2cc0 4767{
496a4ef5 4768 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
14fa2cc0
ZD
4769 return false;
4770
4771 return true;
4772}
6de9cd9a
DN
4773
4774/* Simple wrapper, as we can always redirect fallthru edges. */
4775
4776static basic_block
726a989a 4777gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
6de9cd9a 4778{
726a989a 4779 e = gimple_redirect_edge_and_branch (e, dest);
1e128c5f 4780 gcc_assert (e);
6de9cd9a
DN
4781
4782 return NULL;
4783}
4784
4785
4786/* Splits basic block BB after statement STMT (but at least after the
4787 labels). If STMT is NULL, BB is split just after the labels. */
4788
4789static basic_block
726a989a 4790gimple_split_block (basic_block bb, void *stmt)
6de9cd9a 4791{
726a989a
RB
4792 gimple_stmt_iterator gsi;
4793 gimple_stmt_iterator gsi_tgt;
4794 gimple act;
4795 gimple_seq list;
6de9cd9a
DN
4796 basic_block new_bb;
4797 edge e;
628f6a4e 4798 edge_iterator ei;
6de9cd9a
DN
4799
4800 new_bb = create_empty_bb (bb);
4801
4802 /* Redirect the outgoing edges. */
628f6a4e
BE
4803 new_bb->succs = bb->succs;
4804 bb->succs = NULL;
4805 FOR_EACH_EDGE (e, ei, new_bb->succs)
6de9cd9a
DN
4806 e->src = new_bb;
4807
726a989a 4808 if (stmt && gimple_code ((gimple) stmt) == GIMPLE_LABEL)
6de9cd9a
DN
4809 stmt = NULL;
4810
726a989a
RB
4811 /* Move everything from GSI to the new basic block. */
4812 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6de9cd9a 4813 {
726a989a
RB
4814 act = gsi_stmt (gsi);
4815 if (gimple_code (act) == GIMPLE_LABEL)
6de9cd9a
DN
4816 continue;
4817
4818 if (!stmt)
4819 break;
4820
4821 if (stmt == act)
4822 {
726a989a 4823 gsi_next (&gsi);
6de9cd9a
DN
4824 break;
4825 }
4826 }
4827
726a989a 4828 if (gsi_end_p (gsi))
597ae074
JH
4829 return new_bb;
4830
4831 /* Split the statement list - avoid re-creating new containers as this
b8698a0f 4832 brings ugly quadratic memory consumption in the inliner.
597ae074
JH
4833 (We are still quadratic since we need to update stmt BB pointers,
4834 sadly.) */
726a989a
RB
4835 list = gsi_split_seq_before (&gsi);
4836 set_bb_seq (new_bb, list);
4837 for (gsi_tgt = gsi_start (list);
4838 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
4839 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
6de9cd9a
DN
4840
4841 return new_bb;
4842}
4843
4844
4845/* Moves basic block BB after block AFTER. */
4846
4847static bool
726a989a 4848gimple_move_block_after (basic_block bb, basic_block after)
6de9cd9a
DN
4849{
4850 if (bb->prev_bb == after)
4851 return true;
4852
4853 unlink_block (bb);
4854 link_block (bb, after);
4855
4856 return true;
4857}
4858
4859
4860/* Return true if basic_block can be duplicated. */
4861
4862static bool
ca89096d 4863gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
6de9cd9a
DN
4864{
4865 return true;
4866}
4867
6de9cd9a
DN
4868/* Create a duplicate of the basic block BB. NOTE: This does not
4869 preserve SSA form. */
4870
4871static basic_block
726a989a 4872gimple_duplicate_bb (basic_block bb)
6de9cd9a
DN
4873{
4874 basic_block new_bb;
726a989a
RB
4875 gimple_stmt_iterator gsi, gsi_tgt;
4876 gimple_seq phis = phi_nodes (bb);
4877 gimple phi, stmt, copy;
6de9cd9a
DN
4878
4879 new_bb = create_empty_bb (EXIT_BLOCK_PTR->prev_bb);
b0382c67 4880
84d65814
DN
4881 /* Copy the PHI nodes. We ignore PHI node arguments here because
4882 the incoming edges have not been setup yet. */
726a989a 4883 for (gsi = gsi_start (phis); !gsi_end_p (gsi); gsi_next (&gsi))
b0382c67 4884 {
726a989a
RB
4885 phi = gsi_stmt (gsi);
4886 copy = create_phi_node (gimple_phi_result (phi), new_bb);
4887 create_new_def_for (gimple_phi_result (copy), copy,
4888 gimple_phi_result_ptr (copy));
b0382c67 4889 }
84d65814 4890
726a989a
RB
4891 gsi_tgt = gsi_start_bb (new_bb);
4892 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6de9cd9a 4893 {
84d65814
DN
4894 def_operand_p def_p;
4895 ssa_op_iter op_iter;
6de9cd9a 4896
726a989a
RB
4897 stmt = gsi_stmt (gsi);
4898 if (gimple_code (stmt) == GIMPLE_LABEL)
6de9cd9a
DN
4899 continue;
4900
84d65814
DN
4901 /* Create a new copy of STMT and duplicate STMT's virtual
4902 operands. */
726a989a
RB
4903 copy = gimple_copy (stmt);
4904 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
1d65f45c
RH
4905
4906 maybe_duplicate_eh_stmt (copy, stmt);
6946b3f7 4907 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
84d65814
DN
4908
4909 /* Create new names for all the definitions created by COPY and
4910 add replacement mappings for each new name. */
4911 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
4912 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
6de9cd9a
DN
4913 }
4914
4915 return new_bb;
4916}
4917
5f40b3cb
ZD
4918/* Adds phi node arguments for edge E_COPY after basic block duplication. */
4919
4920static void
4921add_phi_args_after_copy_edge (edge e_copy)
4922{
4923 basic_block bb, bb_copy = e_copy->src, dest;
4924 edge e;
4925 edge_iterator ei;
726a989a
RB
4926 gimple phi, phi_copy;
4927 tree def;
4928 gimple_stmt_iterator psi, psi_copy;
5f40b3cb 4929
726a989a 4930 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
5f40b3cb
ZD
4931 return;
4932
4933 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
4934
4935 if (e_copy->dest->flags & BB_DUPLICATED)
4936 dest = get_bb_original (e_copy->dest);
4937 else
4938 dest = e_copy->dest;
4939
4940 e = find_edge (bb, dest);
4941 if (!e)
4942 {
4943 /* During loop unrolling the target of the latch edge is copied.
4944 In this case we are not looking for edge to dest, but to
4945 duplicated block whose original was dest. */
4946 FOR_EACH_EDGE (e, ei, bb->succs)
4947 {
4948 if ((e->dest->flags & BB_DUPLICATED)
4949 && get_bb_original (e->dest) == dest)
4950 break;
4951 }
4952
4953 gcc_assert (e != NULL);
4954 }
4955
726a989a
RB
4956 for (psi = gsi_start_phis (e->dest),
4957 psi_copy = gsi_start_phis (e_copy->dest);
4958 !gsi_end_p (psi);
4959 gsi_next (&psi), gsi_next (&psi_copy))
5f40b3cb 4960 {
726a989a
RB
4961 phi = gsi_stmt (psi);
4962 phi_copy = gsi_stmt (psi_copy);
5f40b3cb 4963 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
b8698a0f 4964 add_phi_arg (phi_copy, def, e_copy,
f5045c96 4965 gimple_phi_arg_location_from_edge (phi, e));
5f40b3cb
ZD
4966 }
4967}
4968
84d65814 4969
42759f1e
ZD
4970/* Basic block BB_COPY was created by code duplication. Add phi node
4971 arguments for edges going out of BB_COPY. The blocks that were
6580ee77 4972 duplicated have BB_DUPLICATED set. */
42759f1e
ZD
4973
4974void
4975add_phi_args_after_copy_bb (basic_block bb_copy)
4976{
5f40b3cb 4977 edge e_copy;
726a989a 4978 edge_iterator ei;
42759f1e 4979
628f6a4e 4980 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
42759f1e 4981 {
5f40b3cb 4982 add_phi_args_after_copy_edge (e_copy);
42759f1e
ZD
4983 }
4984}
4985
4986/* Blocks in REGION_COPY array of length N_REGION were created by
4987 duplication of basic blocks. Add phi node arguments for edges
5f40b3cb
ZD
4988 going from these blocks. If E_COPY is not NULL, also add
4989 phi node arguments for its destination.*/
42759f1e
ZD
4990
4991void
5f40b3cb
ZD
4992add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
4993 edge e_copy)
42759f1e
ZD
4994{
4995 unsigned i;
4996
4997 for (i = 0; i < n_region; i++)
6580ee77 4998 region_copy[i]->flags |= BB_DUPLICATED;
42759f1e
ZD
4999
5000 for (i = 0; i < n_region; i++)
5001 add_phi_args_after_copy_bb (region_copy[i]);
5f40b3cb
ZD
5002 if (e_copy)
5003 add_phi_args_after_copy_edge (e_copy);
42759f1e
ZD
5004
5005 for (i = 0; i < n_region; i++)
6580ee77 5006 region_copy[i]->flags &= ~BB_DUPLICATED;
42759f1e
ZD
5007}
5008
42759f1e
ZD
5009/* Duplicates a REGION (set of N_REGION basic blocks) with just a single
5010 important exit edge EXIT. By important we mean that no SSA name defined
5011 inside region is live over the other exit edges of the region. All entry
5012 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
5013 to the duplicate of the region. SSA form, dominance and loop information
5014 is updated. The new basic blocks are stored to REGION_COPY in the same
5015 order as they had in REGION, provided that REGION_COPY is not NULL.
5016 The function returns false if it is unable to copy the region,
5017 true otherwise. */
5018
5019bool
726a989a 5020gimple_duplicate_sese_region (edge entry, edge exit,
42759f1e
ZD
5021 basic_block *region, unsigned n_region,
5022 basic_block *region_copy)
5023{
66f97d31 5024 unsigned i;
42759f1e
ZD
5025 bool free_region_copy = false, copying_header = false;
5026 struct loop *loop = entry->dest->loop_father;
5027 edge exit_copy;
66f97d31 5028 VEC (basic_block, heap) *doms;
42759f1e 5029 edge redirected;
09bac500
JH
5030 int total_freq = 0, entry_freq = 0;
5031 gcov_type total_count = 0, entry_count = 0;
42759f1e
ZD
5032
5033 if (!can_copy_bbs_p (region, n_region))
5034 return false;
5035
5036 /* Some sanity checking. Note that we do not check for all possible
5037 missuses of the functions. I.e. if you ask to copy something weird,
5038 it will work, but the state of structures probably will not be
5039 correct. */
42759f1e
ZD
5040 for (i = 0; i < n_region; i++)
5041 {
5042 /* We do not handle subloops, i.e. all the blocks must belong to the
5043 same loop. */
5044 if (region[i]->loop_father != loop)
5045 return false;
5046
5047 if (region[i] != entry->dest
5048 && region[i] == loop->header)
5049 return false;
5050 }
5051
561e8a90 5052 set_loop_copy (loop, loop);
42759f1e
ZD
5053
5054 /* In case the function is used for loop header copying (which is the primary
5055 use), ensure that EXIT and its copy will be new latch and entry edges. */
5056 if (loop->header == entry->dest)
5057 {
5058 copying_header = true;
561e8a90 5059 set_loop_copy (loop, loop_outer (loop));
42759f1e
ZD
5060
5061 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
5062 return false;
5063
5064 for (i = 0; i < n_region; i++)
5065 if (region[i] != exit->src
5066 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
5067 return false;
5068 }
5069
5070 if (!region_copy)
5071 {
858904db 5072 region_copy = XNEWVEC (basic_block, n_region);
42759f1e
ZD
5073 free_region_copy = true;
5074 }
5075
5006671f 5076 gcc_assert (!need_ssa_update_p (cfun));
42759f1e 5077
5deaef19 5078 /* Record blocks outside the region that are dominated by something
42759f1e 5079 inside. */
66f97d31 5080 doms = NULL;
6580ee77
JH
5081 initialize_original_copy_tables ();
5082
66f97d31 5083 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
42759f1e 5084
09bac500
JH
5085 if (entry->dest->count)
5086 {
5087 total_count = entry->dest->count;
5088 entry_count = entry->count;
5089 /* Fix up corner cases, to avoid division by zero or creation of negative
5090 frequencies. */
5091 if (entry_count > total_count)
5092 entry_count = total_count;
5093 }
5094 else
5095 {
5096 total_freq = entry->dest->frequency;
5097 entry_freq = EDGE_FREQUENCY (entry);
5098 /* Fix up corner cases, to avoid division by zero or creation of negative
5099 frequencies. */
5100 if (total_freq == 0)
5101 total_freq = 1;
5102 else if (entry_freq > total_freq)
5103 entry_freq = total_freq;
5104 }
5deaef19 5105
b9a66240
ZD
5106 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
5107 split_edge_bb_loc (entry));
09bac500
JH
5108 if (total_count)
5109 {
5110 scale_bbs_frequencies_gcov_type (region, n_region,
5111 total_count - entry_count,
5112 total_count);
5113 scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
6531d1be 5114 total_count);
09bac500
JH
5115 }
5116 else
5117 {
5118 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
5119 total_freq);
5120 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
5121 }
42759f1e
ZD
5122
5123 if (copying_header)
5124 {
5125 loop->header = exit->dest;
5126 loop->latch = exit->src;
5127 }
5128
5129 /* Redirect the entry and add the phi node arguments. */
6580ee77 5130 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
42759f1e 5131 gcc_assert (redirected != NULL);
71882046 5132 flush_pending_stmts (entry);
42759f1e
ZD
5133
5134 /* Concerning updating of dominators: We must recount dominators
84d65814
DN
5135 for entry block and its copy. Anything that is outside of the
5136 region, but was dominated by something inside needs recounting as
5137 well. */
42759f1e 5138 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
66f97d31
ZD
5139 VEC_safe_push (basic_block, heap, doms, get_bb_original (entry->dest));
5140 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
5f40b3cb 5141 VEC_free (basic_block, heap, doms);
42759f1e 5142
84d65814 5143 /* Add the other PHI node arguments. */
5f40b3cb
ZD
5144 add_phi_args_after_copy (region_copy, n_region, NULL);
5145
5146 /* Update the SSA web. */
5147 update_ssa (TODO_update_ssa);
5148
5149 if (free_region_copy)
5150 free (region_copy);
5151
5152 free_original_copy_tables ();
5153 return true;
5154}
5155
5156/* Duplicates REGION consisting of N_REGION blocks. The new blocks
5157 are stored to REGION_COPY in the same order in that they appear
5158 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
5159 the region, EXIT an exit from it. The condition guarding EXIT
5160 is moved to ENTRY. Returns true if duplication succeeds, false
5161 otherwise.
5162
b8698a0f
L
5163 For example,
5164
5f40b3cb
ZD
5165 some_code;
5166 if (cond)
5167 A;
5168 else
5169 B;
5170
5171 is transformed to
5172
5173 if (cond)
5174 {
5175 some_code;
5176 A;
5177 }
5178 else
5179 {
5180 some_code;
5181 B;
5182 }
5183*/
5184
5185bool
726a989a
RB
5186gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED, edge exit ATTRIBUTE_UNUSED,
5187 basic_block *region ATTRIBUTE_UNUSED, unsigned n_region ATTRIBUTE_UNUSED,
5188 basic_block *region_copy ATTRIBUTE_UNUSED)
5f40b3cb
ZD
5189{
5190 unsigned i;
5191 bool free_region_copy = false;
5192 struct loop *loop = exit->dest->loop_father;
5193 struct loop *orig_loop = entry->dest->loop_father;
5194 basic_block switch_bb, entry_bb, nentry_bb;
5195 VEC (basic_block, heap) *doms;
5196 int total_freq = 0, exit_freq = 0;
5197 gcov_type total_count = 0, exit_count = 0;
5198 edge exits[2], nexits[2], e;
48710229 5199 gimple_stmt_iterator gsi,gsi1;
726a989a 5200 gimple cond_stmt;
8adfe01d 5201 edge sorig, snew;
48710229 5202 basic_block exit_bb;
8adfe01d 5203 basic_block iters_bb;
48710229 5204 tree new_rhs;
8adfe01d
RL
5205 gimple_stmt_iterator psi;
5206 gimple phi;
5207 tree def;
5f40b3cb
ZD
5208
5209 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
5210 exits[0] = exit;
5211 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
5212
5213 if (!can_copy_bbs_p (region, n_region))
5214 return false;
5215
5f40b3cb
ZD
5216 initialize_original_copy_tables ();
5217 set_loop_copy (orig_loop, loop);
48710229 5218 duplicate_subloops (orig_loop, loop);
5f40b3cb
ZD
5219
5220 if (!region_copy)
5221 {
5222 region_copy = XNEWVEC (basic_block, n_region);
5223 free_region_copy = true;
5224 }
5225
5006671f 5226 gcc_assert (!need_ssa_update_p (cfun));
5f40b3cb
ZD
5227
5228 /* Record blocks outside the region that are dominated by something
5229 inside. */
5230 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
5231
5232 if (exit->src->count)
5233 {
5234 total_count = exit->src->count;
5235 exit_count = exit->count;
5236 /* Fix up corner cases, to avoid division by zero or creation of negative
5237 frequencies. */
5238 if (exit_count > total_count)
5239 exit_count = total_count;
5240 }
5241 else
5242 {
5243 total_freq = exit->src->frequency;
5244 exit_freq = EDGE_FREQUENCY (exit);
5245 /* Fix up corner cases, to avoid division by zero or creation of negative
5246 frequencies. */
5247 if (total_freq == 0)
5248 total_freq = 1;
5249 if (exit_freq > total_freq)
5250 exit_freq = total_freq;
5251 }
5252
5253 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
5254 split_edge_bb_loc (exit));
5255 if (total_count)
5256 {
5257 scale_bbs_frequencies_gcov_type (region, n_region,
5258 total_count - exit_count,
5259 total_count);
5260 scale_bbs_frequencies_gcov_type (region_copy, n_region, exit_count,
5261 total_count);
5262 }
5263 else
5264 {
5265 scale_bbs_frequencies_int (region, n_region, total_freq - exit_freq,
5266 total_freq);
5267 scale_bbs_frequencies_int (region_copy, n_region, exit_freq, total_freq);
5268 }
5269
5270 /* Create the switch block, and put the exit condition to it. */
5271 entry_bb = entry->dest;
5272 nentry_bb = get_bb_copy (entry_bb);
5273 if (!last_stmt (entry->src)
5274 || !stmt_ends_bb_p (last_stmt (entry->src)))
5275 switch_bb = entry->src;
5276 else
5277 switch_bb = split_edge (entry);
5278 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
5279
726a989a
RB
5280 gsi = gsi_last_bb (switch_bb);
5281 cond_stmt = last_stmt (exit->src);
5282 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
5283 cond_stmt = gimple_copy (cond_stmt);
b8698a0f
L
5284
5285 /* If the block consisting of the exit condition has the latch as
5286 successor, then the body of the loop is executed before
5287 the exit condition is tested. In such case, moving the
5288 condition to the entry, causes that the loop will iterate
5289 one less iteration (which is the wanted outcome, since we
5290 peel out the last iteration). If the body is executed after
5291 the condition, moving the condition to the entry requires
48710229
RL
5292 decrementing one iteration. */
5293 if (exits[1]->dest == orig_loop->latch)
5294 new_rhs = gimple_cond_rhs (cond_stmt);
5295 else
5296 {
5297 new_rhs = fold_build2 (MINUS_EXPR, TREE_TYPE (gimple_cond_rhs (cond_stmt)),
b8698a0f 5298 gimple_cond_rhs (cond_stmt),
48710229
RL
5299 build_int_cst (TREE_TYPE (gimple_cond_rhs (cond_stmt)), 1));
5300
5301 if (TREE_CODE (gimple_cond_rhs (cond_stmt)) == SSA_NAME)
5302 {
5303 iters_bb = gimple_bb (SSA_NAME_DEF_STMT (gimple_cond_rhs (cond_stmt)));
5304 for (gsi1 = gsi_start_bb (iters_bb); !gsi_end_p (gsi1); gsi_next (&gsi1))
6d8752c4 5305 if (gsi_stmt (gsi1) == SSA_NAME_DEF_STMT (gimple_cond_rhs (cond_stmt)))
48710229 5306 break;
b8698a0f 5307
48710229
RL
5308 new_rhs = force_gimple_operand_gsi (&gsi1, new_rhs, true,
5309 NULL_TREE,false,GSI_CONTINUE_LINKING);
5310 }
b8698a0f
L
5311 }
5312 gimple_cond_set_rhs (cond_stmt, unshare_expr (new_rhs));
726a989a 5313 gimple_cond_set_lhs (cond_stmt, unshare_expr (gimple_cond_lhs (cond_stmt)));
726a989a 5314 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
5f40b3cb
ZD
5315
5316 sorig = single_succ_edge (switch_bb);
5317 sorig->flags = exits[1]->flags;
5318 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
5319
5320 /* Register the new edge from SWITCH_BB in loop exit lists. */
5321 rescan_loop_exit (snew, true, false);
5322
5323 /* Add the PHI node arguments. */
5324 add_phi_args_after_copy (region_copy, n_region, snew);
b8698a0f 5325
5f40b3cb
ZD
5326 /* Get rid of now superfluous conditions and associated edges (and phi node
5327 arguments). */
48710229 5328 exit_bb = exit->dest;
b8698a0f 5329
5f40b3cb 5330 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
726a989a 5331 PENDING_STMT (e) = NULL;
b8698a0f 5332
8adfe01d
RL
5333 /* The latch of ORIG_LOOP was copied, and so was the backedge
5334 to the original header. We redirect this backedge to EXIT_BB. */
48710229 5335 for (i = 0; i < n_region; i++)
8adfe01d
RL
5336 if (get_bb_original (region_copy[i]) == orig_loop->latch)
5337 {
5338 gcc_assert (single_succ_edge (region_copy[i]));
5339 e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
5340 PENDING_STMT (e) = NULL;
5341 for (psi = gsi_start_phis (exit_bb);
5342 !gsi_end_p (psi);
5343 gsi_next (&psi))
5344 {
5345 phi = gsi_stmt (psi);
5346 def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
5347 add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
5348 }
5349 }
5350 e = redirect_edge_and_branch (nexits[0], nexits[1]->dest);
5351 PENDING_STMT (e) = NULL;
5352
5f40b3cb
ZD
5353 /* Anything that is outside of the region, but was dominated by something
5354 inside needs to update dominance info. */
5355 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
5356 VEC_free (basic_block, heap, doms);
84d65814
DN
5357 /* Update the SSA web. */
5358 update_ssa (TODO_update_ssa);
6d8752c4 5359
42759f1e
ZD
5360 if (free_region_copy)
5361 free (region_copy);
6d8752c4 5362
6580ee77 5363 free_original_copy_tables ();
42759f1e
ZD
5364 return true;
5365}
6de9cd9a 5366
50674e96
DN
5367/* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
5368 adding blocks when the dominator traversal reaches EXIT. This
5369 function silently assumes that ENTRY strictly dominates EXIT. */
5370
9f9f72aa 5371void
50674e96
DN
5372gather_blocks_in_sese_region (basic_block entry, basic_block exit,
5373 VEC(basic_block,heap) **bbs_p)
5374{
5375 basic_block son;
5376
5377 for (son = first_dom_son (CDI_DOMINATORS, entry);
5378 son;
5379 son = next_dom_son (CDI_DOMINATORS, son))
5380 {
5381 VEC_safe_push (basic_block, heap, *bbs_p, son);
5382 if (son != exit)
5383 gather_blocks_in_sese_region (son, exit, bbs_p);
5384 }
5385}
5386
917948d3
ZD
5387/* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
5388 The duplicates are recorded in VARS_MAP. */
5389
5390static void
5391replace_by_duplicate_decl (tree *tp, struct pointer_map_t *vars_map,
5392 tree to_context)
5393{
5394 tree t = *tp, new_t;
5395 struct function *f = DECL_STRUCT_FUNCTION (to_context);
5396 void **loc;
5397
5398 if (DECL_CONTEXT (t) == to_context)
5399 return;
5400
5401 loc = pointer_map_contains (vars_map, t);
5402
5403 if (!loc)
5404 {
5405 loc = pointer_map_insert (vars_map, t);
5406
5407 if (SSA_VAR_P (t))
5408 {
5409 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
cb91fab0 5410 f->local_decls = tree_cons (NULL_TREE, new_t, f->local_decls);
917948d3
ZD
5411 }
5412 else
5413 {
5414 gcc_assert (TREE_CODE (t) == CONST_DECL);
5415 new_t = copy_node (t);
5416 }
5417 DECL_CONTEXT (new_t) = to_context;
5418
5419 *loc = new_t;
5420 }
5421 else
3d9a9f94 5422 new_t = (tree) *loc;
917948d3
ZD
5423
5424 *tp = new_t;
5425}
5426
726a989a 5427
917948d3
ZD
5428/* Creates an ssa name in TO_CONTEXT equivalent to NAME.
5429 VARS_MAP maps old ssa names and var_decls to the new ones. */
5430
5431static tree
5432replace_ssa_name (tree name, struct pointer_map_t *vars_map,
5433 tree to_context)
5434{
5435 void **loc;
5436 tree new_name, decl = SSA_NAME_VAR (name);
5437
5438 gcc_assert (is_gimple_reg (name));
5439
5440 loc = pointer_map_contains (vars_map, name);
5441
5442 if (!loc)
5443 {
5444 replace_by_duplicate_decl (&decl, vars_map, to_context);
5445
5446 push_cfun (DECL_STRUCT_FUNCTION (to_context));
5447 if (gimple_in_ssa_p (cfun))
5448 add_referenced_var (decl);
5449
5450 new_name = make_ssa_name (decl, SSA_NAME_DEF_STMT (name));
5451 if (SSA_NAME_IS_DEFAULT_DEF (name))
5452 set_default_def (decl, new_name);
5453 pop_cfun ();
5454
5455 loc = pointer_map_insert (vars_map, name);
5456 *loc = new_name;
5457 }
5458 else
3d9a9f94 5459 new_name = (tree) *loc;
917948d3
ZD
5460
5461 return new_name;
5462}
50674e96
DN
5463
5464struct move_stmt_d
5465{
b357f682
JJ
5466 tree orig_block;
5467 tree new_block;
50674e96
DN
5468 tree from_context;
5469 tree to_context;
917948d3 5470 struct pointer_map_t *vars_map;
fad41cd7 5471 htab_t new_label_map;
1d65f45c 5472 struct pointer_map_t *eh_map;
50674e96
DN
5473 bool remap_decls_p;
5474};
5475
5476/* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
b357f682
JJ
5477 contained in *TP if it has been ORIG_BLOCK previously and change the
5478 DECL_CONTEXT of every local variable referenced in *TP. */
50674e96
DN
5479
5480static tree
726a989a 5481move_stmt_op (tree *tp, int *walk_subtrees, void *data)
50674e96 5482{
726a989a
RB
5483 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5484 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
fad41cd7 5485 tree t = *tp;
50674e96 5486
726a989a
RB
5487 if (EXPR_P (t))
5488 /* We should never have TREE_BLOCK set on non-statements. */
5489 gcc_assert (!TREE_BLOCK (t));
fad41cd7 5490
917948d3 5491 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
50674e96 5492 {
917948d3
ZD
5493 if (TREE_CODE (t) == SSA_NAME)
5494 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
5495 else if (TREE_CODE (t) == LABEL_DECL)
fad41cd7
RH
5496 {
5497 if (p->new_label_map)
5498 {
5499 struct tree_map in, *out;
fc8600f9 5500 in.base.from = t;
3d9a9f94
KG
5501 out = (struct tree_map *)
5502 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
fad41cd7
RH
5503 if (out)
5504 *tp = t = out->to;
5505 }
50674e96 5506
fad41cd7
RH
5507 DECL_CONTEXT (t) = p->to_context;
5508 }
5509 else if (p->remap_decls_p)
50674e96 5510 {
917948d3
ZD
5511 /* Replace T with its duplicate. T should no longer appear in the
5512 parent function, so this looks wasteful; however, it may appear
5513 in referenced_vars, and more importantly, as virtual operands of
5514 statements, and in alias lists of other variables. It would be
5515 quite difficult to expunge it from all those places. ??? It might
5516 suffice to do this for addressable variables. */
5517 if ((TREE_CODE (t) == VAR_DECL
5518 && !is_global_var (t))
5519 || TREE_CODE (t) == CONST_DECL)
5520 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
b8698a0f 5521
917948d3
ZD
5522 if (SSA_VAR_P (t)
5523 && gimple_in_ssa_p (cfun))
fad41cd7 5524 {
917948d3
ZD
5525 push_cfun (DECL_STRUCT_FUNCTION (p->to_context));
5526 add_referenced_var (*tp);
5527 pop_cfun ();
fad41cd7 5528 }
50674e96 5529 }
917948d3 5530 *walk_subtrees = 0;
50674e96 5531 }
fad41cd7
RH
5532 else if (TYPE_P (t))
5533 *walk_subtrees = 0;
50674e96
DN
5534
5535 return NULL_TREE;
5536}
5537
1d65f45c
RH
5538/* Helper for move_stmt_r. Given an EH region number for the source
5539 function, map that to the duplicate EH regio number in the dest. */
5540
5541static int
5542move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
5543{
5544 eh_region old_r, new_r;
5545 void **slot;
5546
5547 old_r = get_eh_region_from_number (old_nr);
5548 slot = pointer_map_contains (p->eh_map, old_r);
5549 new_r = (eh_region) *slot;
5550
5551 return new_r->index;
5552}
5553
5554/* Similar, but operate on INTEGER_CSTs. */
5555
5556static tree
5557move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
5558{
5559 int old_nr, new_nr;
5560
5561 old_nr = tree_low_cst (old_t_nr, 0);
5562 new_nr = move_stmt_eh_region_nr (old_nr, p);
5563
5564 return build_int_cst (NULL, new_nr);
5565}
5566
726a989a
RB
5567/* Like move_stmt_op, but for gimple statements.
5568
5569 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
5570 contained in the current statement in *GSI_P and change the
5571 DECL_CONTEXT of every local variable referenced in the current
5572 statement. */
5573
5574static tree
5575move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
5576 struct walk_stmt_info *wi)
5577{
5578 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
5579 gimple stmt = gsi_stmt (*gsi_p);
5580 tree block = gimple_block (stmt);
5581
5582 if (p->orig_block == NULL_TREE
5583 || block == p->orig_block
5584 || block == NULL_TREE)
5585 gimple_set_block (stmt, p->new_block);
5586#ifdef ENABLE_CHECKING
5587 else if (block != p->new_block)
5588 {
5589 while (block && block != p->orig_block)
5590 block = BLOCK_SUPERCONTEXT (block);
5591 gcc_assert (block);
5592 }
5593#endif
5594
1d65f45c 5595 switch (gimple_code (stmt))
726a989a 5596 {
1d65f45c
RH
5597 case GIMPLE_CALL:
5598 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
5599 {
5600 tree r, fndecl = gimple_call_fndecl (stmt);
5601 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
5602 switch (DECL_FUNCTION_CODE (fndecl))
5603 {
5604 case BUILT_IN_EH_COPY_VALUES:
5605 r = gimple_call_arg (stmt, 1);
5606 r = move_stmt_eh_region_tree_nr (r, p);
5607 gimple_call_set_arg (stmt, 1, r);
5608 /* FALLTHRU */
5609
5610 case BUILT_IN_EH_POINTER:
5611 case BUILT_IN_EH_FILTER:
5612 r = gimple_call_arg (stmt, 0);
5613 r = move_stmt_eh_region_tree_nr (r, p);
5614 gimple_call_set_arg (stmt, 0, r);
5615 break;
726a989a 5616
1d65f45c
RH
5617 default:
5618 break;
5619 }
5620 }
5621 break;
5622
5623 case GIMPLE_RESX:
5624 {
5625 int r = gimple_resx_region (stmt);
5626 r = move_stmt_eh_region_nr (r, p);
5627 gimple_resx_set_region (stmt, r);
5628 }
5629 break;
726a989a 5630
1d65f45c
RH
5631 case GIMPLE_EH_DISPATCH:
5632 {
5633 int r = gimple_eh_dispatch_region (stmt);
5634 r = move_stmt_eh_region_nr (r, p);
5635 gimple_eh_dispatch_set_region (stmt, r);
5636 }
5637 break;
5638
5639 case GIMPLE_OMP_RETURN:
5640 case GIMPLE_OMP_CONTINUE:
5641 break;
5642 default:
5643 if (is_gimple_omp (stmt))
5644 {
5645 /* Do not remap variables inside OMP directives. Variables
5646 referenced in clauses and directive header belong to the
5647 parent function and should not be moved into the child
5648 function. */
5649 bool save_remap_decls_p = p->remap_decls_p;
5650 p->remap_decls_p = false;
5651 *handled_ops_p = true;
5652
5653 walk_gimple_seq (gimple_omp_body (stmt), move_stmt_r,
5654 move_stmt_op, wi);
5655
5656 p->remap_decls_p = save_remap_decls_p;
5657 }
5658 break;
726a989a
RB
5659 }
5660
5661 return NULL_TREE;
5662}
5663
917948d3
ZD
5664/* Marks virtual operands of all statements in basic blocks BBS for
5665 renaming. */
5666
dea61d92
SP
5667void
5668mark_virtual_ops_in_bb (basic_block bb)
917948d3 5669{
726a989a 5670 gimple_stmt_iterator gsi;
dea61d92 5671
726a989a
RB
5672 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5673 mark_virtual_ops_for_renaming (gsi_stmt (gsi));
dea61d92 5674
726a989a
RB
5675 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5676 mark_virtual_ops_for_renaming (gsi_stmt (gsi));
dea61d92
SP
5677}
5678
50674e96
DN
5679/* Move basic block BB from function CFUN to function DEST_FN. The
5680 block is moved out of the original linked list and placed after
5681 block AFTER in the new list. Also, the block is removed from the
5682 original array of blocks and placed in DEST_FN's array of blocks.
5683 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
5684 updated to reflect the moved edges.
6531d1be 5685
917948d3
ZD
5686 The local variables are remapped to new instances, VARS_MAP is used
5687 to record the mapping. */
50674e96
DN
5688
5689static void
5690move_block_to_fn (struct function *dest_cfun, basic_block bb,
5691 basic_block after, bool update_edge_count_p,
1d65f45c 5692 struct move_stmt_d *d)
50674e96
DN
5693{
5694 struct control_flow_graph *cfg;
5695 edge_iterator ei;
5696 edge e;
726a989a 5697 gimple_stmt_iterator si;
728b26bb 5698 unsigned old_len, new_len;
50674e96 5699
3722506a
ZD
5700 /* Remove BB from dominance structures. */
5701 delete_from_dominance_info (CDI_DOMINATORS, bb);
5f40b3cb
ZD
5702 if (current_loops)
5703 remove_bb_from_loops (bb);
3722506a 5704
50674e96
DN
5705 /* Link BB to the new linked list. */
5706 move_block_after (bb, after);
5707
5708 /* Update the edge count in the corresponding flowgraphs. */
5709 if (update_edge_count_p)
5710 FOR_EACH_EDGE (e, ei, bb->succs)
5711 {
5712 cfun->cfg->x_n_edges--;
5713 dest_cfun->cfg->x_n_edges++;
5714 }
5715
5716 /* Remove BB from the original basic block array. */
5717 VEC_replace (basic_block, cfun->cfg->x_basic_block_info, bb->index, NULL);
5718 cfun->cfg->x_n_basic_blocks--;
5719
5720 /* Grow DEST_CFUN's basic block array if needed. */
5721 cfg = dest_cfun->cfg;
5722 cfg->x_n_basic_blocks++;
3722506a
ZD
5723 if (bb->index >= cfg->x_last_basic_block)
5724 cfg->x_last_basic_block = bb->index + 1;
50674e96 5725
728b26bb
DN
5726 old_len = VEC_length (basic_block, cfg->x_basic_block_info);
5727 if ((unsigned) cfg->x_last_basic_block >= old_len)
50674e96 5728 {
728b26bb 5729 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
a590ac65
KH
5730 VEC_safe_grow_cleared (basic_block, gc, cfg->x_basic_block_info,
5731 new_len);
50674e96
DN
5732 }
5733
5734 VEC_replace (basic_block, cfg->x_basic_block_info,
e0310afb 5735 bb->index, bb);
50674e96 5736
917948d3 5737 /* Remap the variables in phi nodes. */
726a989a 5738 for (si = gsi_start_phis (bb); !gsi_end_p (si); )
917948d3 5739 {
726a989a 5740 gimple phi = gsi_stmt (si);
917948d3
ZD
5741 use_operand_p use;
5742 tree op = PHI_RESULT (phi);
5743 ssa_op_iter oi;
5744
5745 if (!is_gimple_reg (op))
5f40b3cb
ZD
5746 {
5747 /* Remove the phi nodes for virtual operands (alias analysis will be
5748 run for the new function, anyway). */
726a989a 5749 remove_phi_node (&si, true);
5f40b3cb
ZD
5750 continue;
5751 }
917948d3 5752
b357f682
JJ
5753 SET_PHI_RESULT (phi,
5754 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
917948d3
ZD
5755 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
5756 {
5757 op = USE_FROM_PTR (use);
5758 if (TREE_CODE (op) == SSA_NAME)
b357f682 5759 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
917948d3 5760 }
726a989a
RB
5761
5762 gsi_next (&si);
917948d3
ZD
5763 }
5764
726a989a 5765 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
50674e96 5766 {
726a989a 5767 gimple stmt = gsi_stmt (si);
726a989a 5768 struct walk_stmt_info wi;
50674e96 5769
726a989a
RB
5770 memset (&wi, 0, sizeof (wi));
5771 wi.info = d;
5772 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
50674e96 5773
726a989a 5774 if (gimple_code (stmt) == GIMPLE_LABEL)
50674e96 5775 {
726a989a 5776 tree label = gimple_label_label (stmt);
50674e96
DN
5777 int uid = LABEL_DECL_UID (label);
5778
5779 gcc_assert (uid > -1);
5780
5781 old_len = VEC_length (basic_block, cfg->x_label_to_block_map);
5782 if (old_len <= (unsigned) uid)
5783 {
5006671f 5784 new_len = 3 * uid / 2 + 1;
a590ac65
KH
5785 VEC_safe_grow_cleared (basic_block, gc,
5786 cfg->x_label_to_block_map, new_len);
50674e96
DN
5787 }
5788
5789 VEC_replace (basic_block, cfg->x_label_to_block_map, uid, bb);
5790 VEC_replace (basic_block, cfun->cfg->x_label_to_block_map, uid, NULL);
5791
5792 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
5793
cb91fab0
JH
5794 if (uid >= dest_cfun->cfg->last_label_uid)
5795 dest_cfun->cfg->last_label_uid = uid + 1;
50674e96 5796 }
fad41cd7 5797
1d65f45c
RH
5798 maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
5799 remove_stmt_from_eh_lp_fn (cfun, stmt);
5800
5801 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
5802 gimple_remove_stmt_histograms (cfun, stmt);
917948d3 5803
5f40b3cb
ZD
5804 /* We cannot leave any operands allocated from the operand caches of
5805 the current function. */
5806 free_stmt_operands (stmt);
5807 push_cfun (dest_cfun);
917948d3 5808 update_stmt (stmt);
5f40b3cb 5809 pop_cfun ();
fad41cd7 5810 }
7241571e
JJ
5811
5812 FOR_EACH_EDGE (e, ei, bb->succs)
5813 if (e->goto_locus)
5814 {
5815 tree block = e->goto_block;
5816 if (d->orig_block == NULL_TREE
5817 || block == d->orig_block)
5818 e->goto_block = d->new_block;
5819#ifdef ENABLE_CHECKING
5820 else if (block != d->new_block)
5821 {
5822 while (block && block != d->orig_block)
5823 block = BLOCK_SUPERCONTEXT (block);
5824 gcc_assert (block);
5825 }
5826#endif
5827 }
fad41cd7
RH
5828}
5829
5830/* Examine the statements in BB (which is in SRC_CFUN); find and return
5831 the outermost EH region. Use REGION as the incoming base EH region. */
5832
1d65f45c 5833static eh_region
fad41cd7 5834find_outermost_region_in_block (struct function *src_cfun,
1d65f45c 5835 basic_block bb, eh_region region)
fad41cd7 5836{
726a989a 5837 gimple_stmt_iterator si;
6531d1be 5838
726a989a 5839 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
fad41cd7 5840 {
726a989a 5841 gimple stmt = gsi_stmt (si);
1d65f45c
RH
5842 eh_region stmt_region;
5843 int lp_nr;
1799e5d5 5844
1d65f45c
RH
5845 lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
5846 stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
5847 if (stmt_region)
7e2df4a1 5848 {
1d65f45c 5849 if (region == NULL)
7e2df4a1
JJ
5850 region = stmt_region;
5851 else if (stmt_region != region)
5852 {
5853 region = eh_region_outermost (src_cfun, stmt_region, region);
1d65f45c 5854 gcc_assert (region != NULL);
7e2df4a1
JJ
5855 }
5856 }
50674e96 5857 }
fad41cd7
RH
5858
5859 return region;
50674e96
DN
5860}
5861
fad41cd7
RH
5862static tree
5863new_label_mapper (tree decl, void *data)
5864{
5865 htab_t hash = (htab_t) data;
5866 struct tree_map *m;
5867 void **slot;
5868
5869 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
5870
3d9a9f94 5871 m = XNEW (struct tree_map);
fad41cd7 5872 m->hash = DECL_UID (decl);
fc8600f9 5873 m->base.from = decl;
c2255bc4 5874 m->to = create_artificial_label (UNKNOWN_LOCATION);
fad41cd7 5875 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
cb91fab0
JH
5876 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
5877 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
fad41cd7
RH
5878
5879 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
5880 gcc_assert (*slot == NULL);
5881
5882 *slot = m;
5883
5884 return m->to;
5885}
50674e96 5886
b357f682
JJ
5887/* Change DECL_CONTEXT of all BLOCK_VARS in block, including
5888 subblocks. */
5889
5890static void
5891replace_block_vars_by_duplicates (tree block, struct pointer_map_t *vars_map,
5892 tree to_context)
5893{
5894 tree *tp, t;
5895
5896 for (tp = &BLOCK_VARS (block); *tp; tp = &TREE_CHAIN (*tp))
5897 {
5898 t = *tp;
e1e2bac4
JJ
5899 if (TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != CONST_DECL)
5900 continue;
b357f682
JJ
5901 replace_by_duplicate_decl (&t, vars_map, to_context);
5902 if (t != *tp)
5903 {
5904 if (TREE_CODE (*tp) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (*tp))
5905 {
5906 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (*tp));
5907 DECL_HAS_VALUE_EXPR_P (t) = 1;
5908 }
5909 TREE_CHAIN (t) = TREE_CHAIN (*tp);
5910 *tp = t;
5911 }
5912 }
5913
5914 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
5915 replace_block_vars_by_duplicates (block, vars_map, to_context);
5916}
5917
50674e96
DN
5918/* Move a single-entry, single-exit region delimited by ENTRY_BB and
5919 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
5920 single basic block in the original CFG and the new basic block is
5921 returned. DEST_CFUN must not have a CFG yet.
5922
5923 Note that the region need not be a pure SESE region. Blocks inside
5924 the region may contain calls to abort/exit. The only restriction
5925 is that ENTRY_BB should be the only entry point and it must
5926 dominate EXIT_BB.
5927
b357f682
JJ
5928 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
5929 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
5930 to the new function.
5931
50674e96
DN
5932 All local variables referenced in the region are assumed to be in
5933 the corresponding BLOCK_VARS and unexpanded variable lists
5934 associated with DEST_CFUN. */
5935
5936basic_block
5937move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
b357f682 5938 basic_block exit_bb, tree orig_block)
50674e96 5939{
917948d3
ZD
5940 VEC(basic_block,heap) *bbs, *dom_bbs;
5941 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
5942 basic_block after, bb, *entry_pred, *exit_succ, abb;
5943 struct function *saved_cfun = cfun;
1d65f45c 5944 int *entry_flag, *exit_flag;
917948d3 5945 unsigned *entry_prob, *exit_prob;
50674e96
DN
5946 unsigned i, num_entry_edges, num_exit_edges;
5947 edge e;
5948 edge_iterator ei;
fad41cd7 5949 htab_t new_label_map;
1d65f45c 5950 struct pointer_map_t *vars_map, *eh_map;
5f40b3cb 5951 struct loop *loop = entry_bb->loop_father;
b357f682 5952 struct move_stmt_d d;
50674e96
DN
5953
5954 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
5955 region. */
5956 gcc_assert (entry_bb != exit_bb
2aee3e57
JJ
5957 && (!exit_bb
5958 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
50674e96 5959
917948d3
ZD
5960 /* Collect all the blocks in the region. Manually add ENTRY_BB
5961 because it won't be added by dfs_enumerate_from. */
50674e96
DN
5962 bbs = NULL;
5963 VEC_safe_push (basic_block, heap, bbs, entry_bb);
5964 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
5965
917948d3
ZD
5966 /* The blocks that used to be dominated by something in BBS will now be
5967 dominated by the new block. */
5968 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
5969 VEC_address (basic_block, bbs),
5970 VEC_length (basic_block, bbs));
5971
50674e96
DN
5972 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
5973 the predecessor edges to ENTRY_BB and the successor edges to
5974 EXIT_BB so that we can re-attach them to the new basic block that
5975 will replace the region. */
5976 num_entry_edges = EDGE_COUNT (entry_bb->preds);
5977 entry_pred = (basic_block *) xcalloc (num_entry_edges, sizeof (basic_block));
5978 entry_flag = (int *) xcalloc (num_entry_edges, sizeof (int));
917948d3 5979 entry_prob = XNEWVEC (unsigned, num_entry_edges);
50674e96
DN
5980 i = 0;
5981 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
5982 {
917948d3 5983 entry_prob[i] = e->probability;
50674e96
DN
5984 entry_flag[i] = e->flags;
5985 entry_pred[i++] = e->src;
5986 remove_edge (e);
5987 }
5988
2aee3e57 5989 if (exit_bb)
50674e96 5990 {
2aee3e57
JJ
5991 num_exit_edges = EDGE_COUNT (exit_bb->succs);
5992 exit_succ = (basic_block *) xcalloc (num_exit_edges,
5993 sizeof (basic_block));
5994 exit_flag = (int *) xcalloc (num_exit_edges, sizeof (int));
917948d3 5995 exit_prob = XNEWVEC (unsigned, num_exit_edges);
2aee3e57
JJ
5996 i = 0;
5997 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
5998 {
917948d3 5999 exit_prob[i] = e->probability;
2aee3e57
JJ
6000 exit_flag[i] = e->flags;
6001 exit_succ[i++] = e->dest;
6002 remove_edge (e);
6003 }
6004 }
6005 else
6006 {
6007 num_exit_edges = 0;
6008 exit_succ = NULL;
6009 exit_flag = NULL;
917948d3 6010 exit_prob = NULL;
50674e96
DN
6011 }
6012
6013 /* Switch context to the child function to initialize DEST_FN's CFG. */
6014 gcc_assert (dest_cfun->cfg == NULL);
917948d3 6015 push_cfun (dest_cfun);
fad41cd7 6016
50674e96 6017 init_empty_tree_cfg ();
fad41cd7
RH
6018
6019 /* Initialize EH information for the new function. */
1d65f45c 6020 eh_map = NULL;
fad41cd7
RH
6021 new_label_map = NULL;
6022 if (saved_cfun->eh)
6023 {
1d65f45c 6024 eh_region region = NULL;
fad41cd7
RH
6025
6026 for (i = 0; VEC_iterate (basic_block, bbs, i, bb); i++)
6027 region = find_outermost_region_in_block (saved_cfun, bb, region);
6028
6029 init_eh_for_function ();
1d65f45c 6030 if (region != NULL)
fad41cd7
RH
6031 {
6032 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
1d65f45c
RH
6033 eh_map = duplicate_eh_regions (saved_cfun, region, 0,
6034 new_label_mapper, new_label_map);
fad41cd7
RH
6035 }
6036 }
6037
917948d3
ZD
6038 pop_cfun ();
6039
50674e96
DN
6040 /* Move blocks from BBS into DEST_CFUN. */
6041 gcc_assert (VEC_length (basic_block, bbs) >= 2);
6042 after = dest_cfun->cfg->x_entry_block_ptr;
917948d3 6043 vars_map = pointer_map_create ();
b357f682
JJ
6044
6045 memset (&d, 0, sizeof (d));
1d65f45c
RH
6046 d.orig_block = orig_block;
6047 d.new_block = DECL_INITIAL (dest_cfun->decl);
b357f682
JJ
6048 d.from_context = cfun->decl;
6049 d.to_context = dest_cfun->decl;
1d65f45c 6050 d.vars_map = vars_map;
b357f682 6051 d.new_label_map = new_label_map;
1d65f45c 6052 d.eh_map = eh_map;
b357f682 6053 d.remap_decls_p = true;
b357f682 6054
50674e96
DN
6055 for (i = 0; VEC_iterate (basic_block, bbs, i, bb); i++)
6056 {
6057 /* No need to update edge counts on the last block. It has
6058 already been updated earlier when we detached the region from
6059 the original CFG. */
1d65f45c 6060 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
50674e96
DN
6061 after = bb;
6062 }
6063
b357f682
JJ
6064 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
6065 if (orig_block)
6066 {
6067 tree block;
6068 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
6069 == NULL_TREE);
6070 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
6071 = BLOCK_SUBBLOCKS (orig_block);
6072 for (block = BLOCK_SUBBLOCKS (orig_block);
6073 block; block = BLOCK_CHAIN (block))
6074 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
6075 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
6076 }
6077
6078 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
6079 vars_map, dest_cfun->decl);
6080
fad41cd7
RH
6081 if (new_label_map)
6082 htab_delete (new_label_map);
1d65f45c
RH
6083 if (eh_map)
6084 pointer_map_destroy (eh_map);
917948d3 6085 pointer_map_destroy (vars_map);
50674e96
DN
6086
6087 /* Rewire the entry and exit blocks. The successor to the entry
6088 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
6089 the child function. Similarly, the predecessor of DEST_FN's
6090 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
6091 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
6092 various CFG manipulation function get to the right CFG.
6093
6094 FIXME, this is silly. The CFG ought to become a parameter to
6095 these helpers. */
917948d3 6096 push_cfun (dest_cfun);
50674e96 6097 make_edge (ENTRY_BLOCK_PTR, entry_bb, EDGE_FALLTHRU);
2aee3e57
JJ
6098 if (exit_bb)
6099 make_edge (exit_bb, EXIT_BLOCK_PTR, 0);
917948d3 6100 pop_cfun ();
50674e96
DN
6101
6102 /* Back in the original function, the SESE region has disappeared,
6103 create a new basic block in its place. */
6104 bb = create_empty_bb (entry_pred[0]);
5f40b3cb
ZD
6105 if (current_loops)
6106 add_bb_to_loop (bb, loop);
50674e96 6107 for (i = 0; i < num_entry_edges; i++)
917948d3
ZD
6108 {
6109 e = make_edge (entry_pred[i], bb, entry_flag[i]);
6110 e->probability = entry_prob[i];
6111 }
50674e96
DN
6112
6113 for (i = 0; i < num_exit_edges; i++)
917948d3
ZD
6114 {
6115 e = make_edge (bb, exit_succ[i], exit_flag[i]);
6116 e->probability = exit_prob[i];
6117 }
6118
6119 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
6120 for (i = 0; VEC_iterate (basic_block, dom_bbs, i, abb); i++)
6121 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
6122 VEC_free (basic_block, heap, dom_bbs);
50674e96 6123
2aee3e57
JJ
6124 if (exit_bb)
6125 {
917948d3 6126 free (exit_prob);
2aee3e57
JJ
6127 free (exit_flag);
6128 free (exit_succ);
6129 }
917948d3 6130 free (entry_prob);
50674e96
DN
6131 free (entry_flag);
6132 free (entry_pred);
50674e96
DN
6133 VEC_free (basic_block, heap, bbs);
6134
6135 return bb;
6136}
6137
84d65814 6138
726a989a
RB
6139/* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in tree-pass.h)
6140 */
6de9cd9a
DN
6141
6142void
6143dump_function_to_file (tree fn, FILE *file, int flags)
6144{
6145 tree arg, vars, var;
459ffad3 6146 struct function *dsf;
6de9cd9a
DN
6147 bool ignore_topmost_bind = false, any_var = false;
6148 basic_block bb;
6149 tree chain;
6531d1be 6150
673fda6b 6151 fprintf (file, "%s (", lang_hooks.decl_printable_name (fn, 2));
6de9cd9a
DN
6152
6153 arg = DECL_ARGUMENTS (fn);
6154 while (arg)
6155 {
2f9ea521
RG
6156 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
6157 fprintf (file, " ");
6de9cd9a 6158 print_generic_expr (file, arg, dump_flags);
3e894af1
KZ
6159 if (flags & TDF_VERBOSE)
6160 print_node (file, "", arg, 4);
6de9cd9a
DN
6161 if (TREE_CHAIN (arg))
6162 fprintf (file, ", ");
6163 arg = TREE_CHAIN (arg);
6164 }
6165 fprintf (file, ")\n");
6166
3e894af1
KZ
6167 if (flags & TDF_VERBOSE)
6168 print_node (file, "", fn, 2);
6169
459ffad3 6170 dsf = DECL_STRUCT_FUNCTION (fn);
feb4e5ba 6171 if (dsf && (flags & TDF_EH))
459ffad3
EB
6172 dump_eh_tree (file, dsf);
6173
39ecc018 6174 if (flags & TDF_RAW && !gimple_has_body_p (fn))
6de9cd9a
DN
6175 {
6176 dump_node (fn, TDF_SLIM | flags, file);
6177 return;
6178 }
6179
953ff289 6180 /* Switch CFUN to point to FN. */
db2960f4 6181 push_cfun (DECL_STRUCT_FUNCTION (fn));
953ff289 6182
6de9cd9a
DN
6183 /* When GIMPLE is lowered, the variables are no longer available in
6184 BIND_EXPRs, so display them separately. */
cb91fab0 6185 if (cfun && cfun->decl == fn && cfun->local_decls)
6de9cd9a
DN
6186 {
6187 ignore_topmost_bind = true;
6188
6189 fprintf (file, "{\n");
cb91fab0 6190 for (vars = cfun->local_decls; vars; vars = TREE_CHAIN (vars))
6de9cd9a
DN
6191 {
6192 var = TREE_VALUE (vars);
6193
6194 print_generic_decl (file, var, flags);
3e894af1
KZ
6195 if (flags & TDF_VERBOSE)
6196 print_node (file, "", var, 4);
6de9cd9a
DN
6197 fprintf (file, "\n");
6198
6199 any_var = true;
6200 }
6201 }
6202
32a87d45 6203 if (cfun && cfun->decl == fn && cfun->cfg && basic_block_info)
6de9cd9a 6204 {
726a989a 6205 /* If the CFG has been built, emit a CFG-based dump. */
878f99d2 6206 check_bb_profile (ENTRY_BLOCK_PTR, file);
6de9cd9a
DN
6207 if (!ignore_topmost_bind)
6208 fprintf (file, "{\n");
6209
6210 if (any_var && n_basic_blocks)
6211 fprintf (file, "\n");
6212
6213 FOR_EACH_BB (bb)
726a989a 6214 gimple_dump_bb (bb, file, 2, flags);
6531d1be 6215
6de9cd9a 6216 fprintf (file, "}\n");
878f99d2 6217 check_bb_profile (EXIT_BLOCK_PTR, file);
6de9cd9a 6218 }
726a989a
RB
6219 else if (DECL_SAVED_TREE (fn) == NULL)
6220 {
6221 /* The function is now in GIMPLE form but the CFG has not been
6222 built yet. Emit the single sequence of GIMPLE statements
6223 that make up its body. */
6224 gimple_seq body = gimple_body (fn);
6225
6226 if (gimple_seq_first_stmt (body)
6227 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
6228 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
6229 print_gimple_seq (file, body, 0, flags);
6230 else
6231 {
6232 if (!ignore_topmost_bind)
6233 fprintf (file, "{\n");
6234
6235 if (any_var)
6236 fprintf (file, "\n");
6237
6238 print_gimple_seq (file, body, 2, flags);
6239 fprintf (file, "}\n");
6240 }
6241 }
6de9cd9a
DN
6242 else
6243 {
6244 int indent;
6245
6246 /* Make a tree based dump. */
6247 chain = DECL_SAVED_TREE (fn);
6248
953ff289 6249 if (chain && TREE_CODE (chain) == BIND_EXPR)
6de9cd9a
DN
6250 {
6251 if (ignore_topmost_bind)
6252 {
6253 chain = BIND_EXPR_BODY (chain);
6254 indent = 2;
6255 }
6256 else
6257 indent = 0;
6258 }
6259 else
6260 {
6261 if (!ignore_topmost_bind)
6262 fprintf (file, "{\n");
6263 indent = 2;
6264 }
6265
6266 if (any_var)
6267 fprintf (file, "\n");
6268
6269 print_generic_stmt_indented (file, chain, flags, indent);
6270 if (ignore_topmost_bind)
6271 fprintf (file, "}\n");
6272 }
6273
6274 fprintf (file, "\n\n");
953ff289
DN
6275
6276 /* Restore CFUN. */
db2960f4 6277 pop_cfun ();
953ff289
DN
6278}
6279
6280
6281/* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
6282
6283void
6284debug_function (tree fn, int flags)
6285{
6286 dump_function_to_file (fn, stderr, flags);
6de9cd9a
DN
6287}
6288
6289
d7770457 6290/* Print on FILE the indexes for the predecessors of basic_block BB. */
6de9cd9a
DN
6291
6292static void
628f6a4e 6293print_pred_bbs (FILE *file, basic_block bb)
6de9cd9a 6294{
628f6a4e
BE
6295 edge e;
6296 edge_iterator ei;
6297
6298 FOR_EACH_EDGE (e, ei, bb->preds)
d7770457 6299 fprintf (file, "bb_%d ", e->src->index);
6de9cd9a
DN
6300}
6301
6302
d7770457 6303/* Print on FILE the indexes for the successors of basic_block BB. */
6de9cd9a
DN
6304
6305static void
628f6a4e 6306print_succ_bbs (FILE *file, basic_block bb)
6de9cd9a 6307{
628f6a4e
BE
6308 edge e;
6309 edge_iterator ei;
6310
6311 FOR_EACH_EDGE (e, ei, bb->succs)
d7770457 6312 fprintf (file, "bb_%d ", e->dest->index);
6de9cd9a
DN
6313}
6314
0c8efed8
SP
6315/* Print to FILE the basic block BB following the VERBOSITY level. */
6316
b8698a0f 6317void
0c8efed8
SP
6318print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
6319{
6320 char *s_indent = (char *) alloca ((size_t) indent + 1);
6321 memset ((void *) s_indent, ' ', (size_t) indent);
6322 s_indent[indent] = '\0';
6323
6324 /* Print basic_block's header. */
6325 if (verbosity >= 2)
6326 {
6327 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
6328 print_pred_bbs (file, bb);
6329 fprintf (file, "}, succs = {");
6330 print_succ_bbs (file, bb);
6331 fprintf (file, "})\n");
6332 }
6333
6334 /* Print basic_block's body. */
6335 if (verbosity >= 3)
6336 {
6337 fprintf (file, "%s {\n", s_indent);
726a989a 6338 gimple_dump_bb (bb, file, indent + 4, TDF_VOPS|TDF_MEMSYMS);
0c8efed8
SP
6339 fprintf (file, "%s }\n", s_indent);
6340 }
6341}
6342
6343static void print_loop_and_siblings (FILE *, struct loop *, int, int);
6de9cd9a 6344
0c8efed8
SP
6345/* Pretty print LOOP on FILE, indented INDENT spaces. Following
6346 VERBOSITY level this outputs the contents of the loop, or just its
6347 structure. */
6de9cd9a
DN
6348
6349static void
0c8efed8 6350print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
6de9cd9a
DN
6351{
6352 char *s_indent;
6353 basic_block bb;
6531d1be 6354
6de9cd9a
DN
6355 if (loop == NULL)
6356 return;
6357
6358 s_indent = (char *) alloca ((size_t) indent + 1);
6359 memset ((void *) s_indent, ' ', (size_t) indent);
6360 s_indent[indent] = '\0';
6361
0c8efed8 6362 /* Print loop's header. */
b8698a0f 6363 fprintf (file, "%sloop_%d (header = %d, latch = %d", s_indent,
0c8efed8
SP
6364 loop->num, loop->header->index, loop->latch->index);
6365 fprintf (file, ", niter = ");
6366 print_generic_expr (file, loop->nb_iterations, 0);
6531d1be 6367
0c8efed8
SP
6368 if (loop->any_upper_bound)
6369 {
6370 fprintf (file, ", upper_bound = ");
6371 dump_double_int (file, loop->nb_iterations_upper_bound, true);
6372 }
6531d1be 6373
0c8efed8
SP
6374 if (loop->any_estimate)
6375 {
6376 fprintf (file, ", estimate = ");
6377 dump_double_int (file, loop->nb_iterations_estimate, true);
6378 }
6379 fprintf (file, ")\n");
6380
6381 /* Print loop's body. */
6382 if (verbosity >= 1)
6383 {
6384 fprintf (file, "%s{\n", s_indent);
6385 FOR_EACH_BB (bb)
6386 if (bb->loop_father == loop)
6387 print_loops_bb (file, bb, indent, verbosity);
6388
6389 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
6390 fprintf (file, "%s}\n", s_indent);
6391 }
6de9cd9a
DN
6392}
6393
0c8efed8
SP
6394/* Print the LOOP and its sibling loops on FILE, indented INDENT
6395 spaces. Following VERBOSITY level this outputs the contents of the
6396 loop, or just its structure. */
6397
6398static void
6399print_loop_and_siblings (FILE *file, struct loop *loop, int indent, int verbosity)
6400{
6401 if (loop == NULL)
6402 return;
6403
6404 print_loop (file, loop, indent, verbosity);
6405 print_loop_and_siblings (file, loop->next, indent, verbosity);
6406}
6de9cd9a
DN
6407
6408/* Follow a CFG edge from the entry point of the program, and on entry
6409 of a loop, pretty print the loop structure on FILE. */
6410
6531d1be 6411void
0c8efed8 6412print_loops (FILE *file, int verbosity)
6de9cd9a
DN
6413{
6414 basic_block bb;
6531d1be 6415
f8bf9252 6416 bb = ENTRY_BLOCK_PTR;
6de9cd9a 6417 if (bb && bb->loop_father)
0c8efed8 6418 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
6de9cd9a
DN
6419}
6420
6421
0c8efed8
SP
6422/* Debugging loops structure at tree level, at some VERBOSITY level. */
6423
6424void
6425debug_loops (int verbosity)
6426{
6427 print_loops (stderr, verbosity);
6428}
6429
6430/* Print on stderr the code of LOOP, at some VERBOSITY level. */
6de9cd9a 6431
6531d1be 6432void
0c8efed8 6433debug_loop (struct loop *loop, int verbosity)
6de9cd9a 6434{
0c8efed8 6435 print_loop (stderr, loop, 0, verbosity);
6de9cd9a
DN
6436}
6437
0c8efed8
SP
6438/* Print on stderr the code of loop number NUM, at some VERBOSITY
6439 level. */
6440
6441void
6442debug_loop_num (unsigned num, int verbosity)
6443{
6444 debug_loop (get_loop (num), verbosity);
6445}
6de9cd9a
DN
6446
6447/* Return true if BB ends with a call, possibly followed by some
6448 instructions that must stay with the call. Return false,
6449 otherwise. */
6450
6451static bool
726a989a 6452gimple_block_ends_with_call_p (basic_block bb)
6de9cd9a 6453{
b5b8b0ac 6454 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
726a989a 6455 return is_gimple_call (gsi_stmt (gsi));
6de9cd9a
DN
6456}
6457
6458
6459/* Return true if BB ends with a conditional branch. Return false,
6460 otherwise. */
6461
6462static bool
726a989a 6463gimple_block_ends_with_condjump_p (const_basic_block bb)
6de9cd9a 6464{
726a989a
RB
6465 gimple stmt = last_stmt (CONST_CAST_BB (bb));
6466 return (stmt && gimple_code (stmt) == GIMPLE_COND);
6de9cd9a
DN
6467}
6468
6469
6470/* Return true if we need to add fake edge to exit at statement T.
726a989a 6471 Helper function for gimple_flow_call_edges_add. */
6de9cd9a
DN
6472
6473static bool
726a989a 6474need_fake_edge_p (gimple t)
6de9cd9a 6475{
726a989a
RB
6476 tree fndecl = NULL_TREE;
6477 int call_flags = 0;
6de9cd9a
DN
6478
6479 /* NORETURN and LONGJMP calls already have an edge to exit.
321cf1f2 6480 CONST and PURE calls do not need one.
6de9cd9a
DN
6481 We don't currently check for CONST and PURE here, although
6482 it would be a good idea, because those attributes are
6483 figured out from the RTL in mark_constant_function, and
6484 the counter incrementation code from -fprofile-arcs
6485 leads to different results from -fbranch-probabilities. */
726a989a 6486 if (is_gimple_call (t))
23ef6d21 6487 {
726a989a
RB
6488 fndecl = gimple_call_fndecl (t);
6489 call_flags = gimple_call_flags (t);
23ef6d21
BE
6490 }
6491
726a989a
RB
6492 if (is_gimple_call (t)
6493 && fndecl
6494 && DECL_BUILT_IN (fndecl)
23ef6d21 6495 && (call_flags & ECF_NOTHROW)
3cfa762b
RG
6496 && !(call_flags & ECF_RETURNS_TWICE)
6497 /* fork() doesn't really return twice, but the effect of
6498 wrapping it in __gcov_fork() which calls __gcov_flush()
6499 and clears the counters before forking has the same
6500 effect as returning twice. Force a fake edge. */
6501 && !(DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
6502 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FORK))
6503 return false;
23ef6d21 6504
726a989a
RB
6505 if (is_gimple_call (t)
6506 && !(call_flags & ECF_NORETURN))
6de9cd9a
DN
6507 return true;
6508
e0c68ce9 6509 if (gimple_code (t) == GIMPLE_ASM
726a989a 6510 && (gimple_asm_volatile_p (t) || gimple_asm_input_p (t)))
6de9cd9a
DN
6511 return true;
6512
6513 return false;
6514}
6515
6516
6517/* Add fake edges to the function exit for any non constant and non
6518 noreturn calls, volatile inline assembly in the bitmap of blocks
6519 specified by BLOCKS or to the whole CFG if BLOCKS is zero. Return
6520 the number of blocks that were split.
6521
6522 The goal is to expose cases in which entering a basic block does
6523 not imply that all subsequent instructions must be executed. */
6524
6525static int
726a989a 6526gimple_flow_call_edges_add (sbitmap blocks)
6de9cd9a
DN
6527{
6528 int i;
6529 int blocks_split = 0;
6530 int last_bb = last_basic_block;
6531 bool check_last_block = false;
6532
24bd1a0b 6533 if (n_basic_blocks == NUM_FIXED_BLOCKS)
6de9cd9a
DN
6534 return 0;
6535
6536 if (! blocks)
6537 check_last_block = true;
6538 else
6539 check_last_block = TEST_BIT (blocks, EXIT_BLOCK_PTR->prev_bb->index);
6540
6541 /* In the last basic block, before epilogue generation, there will be
6542 a fallthru edge to EXIT. Special care is required if the last insn
6543 of the last basic block is a call because make_edge folds duplicate
6544 edges, which would result in the fallthru edge also being marked
6545 fake, which would result in the fallthru edge being removed by
6546 remove_fake_edges, which would result in an invalid CFG.
6547
6548 Moreover, we can't elide the outgoing fake edge, since the block
6549 profiler needs to take this into account in order to solve the minimal
6550 spanning tree in the case that the call doesn't return.
6551
6552 Handle this by adding a dummy instruction in a new last basic block. */
6553 if (check_last_block)
6554 {
6555 basic_block bb = EXIT_BLOCK_PTR->prev_bb;
726a989a
RB
6556 gimple_stmt_iterator gsi = gsi_last_bb (bb);
6557 gimple t = NULL;
6558
6559 if (!gsi_end_p (gsi))
6560 t = gsi_stmt (gsi);
6de9cd9a 6561
6a60530d 6562 if (t && need_fake_edge_p (t))
6de9cd9a
DN
6563 {
6564 edge e;
6565
9ff3d2de
JL
6566 e = find_edge (bb, EXIT_BLOCK_PTR);
6567 if (e)
6568 {
726a989a
RB
6569 gsi_insert_on_edge (e, gimple_build_nop ());
6570 gsi_commit_edge_inserts ();
9ff3d2de 6571 }
6de9cd9a
DN
6572 }
6573 }
6574
6575 /* Now add fake edges to the function exit for any non constant
6576 calls since there is no way that we can determine if they will
6577 return or not... */
6578 for (i = 0; i < last_bb; i++)
6579 {
6580 basic_block bb = BASIC_BLOCK (i);
726a989a
RB
6581 gimple_stmt_iterator gsi;
6582 gimple stmt, last_stmt;
6de9cd9a
DN
6583
6584 if (!bb)
6585 continue;
6586
6587 if (blocks && !TEST_BIT (blocks, i))
6588 continue;
6589
726a989a
RB
6590 gsi = gsi_last_bb (bb);
6591 if (!gsi_end_p (gsi))
6de9cd9a 6592 {
726a989a 6593 last_stmt = gsi_stmt (gsi);
6de9cd9a
DN
6594 do
6595 {
726a989a 6596 stmt = gsi_stmt (gsi);
6de9cd9a
DN
6597 if (need_fake_edge_p (stmt))
6598 {
6599 edge e;
726a989a 6600
6de9cd9a
DN
6601 /* The handling above of the final block before the
6602 epilogue should be enough to verify that there is
6603 no edge to the exit block in CFG already.
6604 Calling make_edge in such case would cause us to
6605 mark that edge as fake and remove it later. */
6606#ifdef ENABLE_CHECKING
6607 if (stmt == last_stmt)
628f6a4e 6608 {
9ff3d2de
JL
6609 e = find_edge (bb, EXIT_BLOCK_PTR);
6610 gcc_assert (e == NULL);
628f6a4e 6611 }
6de9cd9a
DN
6612#endif
6613
6614 /* Note that the following may create a new basic block
6615 and renumber the existing basic blocks. */
6616 if (stmt != last_stmt)
6617 {
6618 e = split_block (bb, stmt);
6619 if (e)
6620 blocks_split++;
6621 }
6622 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
6623 }
726a989a 6624 gsi_prev (&gsi);
6de9cd9a 6625 }
726a989a 6626 while (!gsi_end_p (gsi));
6de9cd9a
DN
6627 }
6628 }
6629
6630 if (blocks_split)
6631 verify_flow_info ();
6632
6633 return blocks_split;
6634}
6635
4f6c2131
EB
6636/* Purge dead abnormal call edges from basic block BB. */
6637
6638bool
726a989a 6639gimple_purge_dead_abnormal_call_edges (basic_block bb)
4f6c2131 6640{
726a989a 6641 bool changed = gimple_purge_dead_eh_edges (bb);
4f6c2131 6642
e3b5732b 6643 if (cfun->has_nonlocal_label)
4f6c2131 6644 {
726a989a 6645 gimple stmt = last_stmt (bb);
4f6c2131
EB
6646 edge_iterator ei;
6647 edge e;
6648
726a989a 6649 if (!(stmt && stmt_can_make_abnormal_goto (stmt)))
4f6c2131
EB
6650 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6651 {
6652 if (e->flags & EDGE_ABNORMAL)
6653 {
6654 remove_edge (e);
6655 changed = true;
6656 }
6657 else
6658 ei_next (&ei);
6659 }
6660
726a989a 6661 /* See gimple_purge_dead_eh_edges below. */
4f6c2131
EB
6662 if (changed)
6663 free_dominance_info (CDI_DOMINATORS);
6664 }
6665
6666 return changed;
6667}
6668
672987e8
ZD
6669/* Removes edge E and all the blocks dominated by it, and updates dominance
6670 information. The IL in E->src needs to be updated separately.
6671 If dominance info is not available, only the edge E is removed.*/
6672
6673void
6674remove_edge_and_dominated_blocks (edge e)
6675{
6676 VEC (basic_block, heap) *bbs_to_remove = NULL;
6677 VEC (basic_block, heap) *bbs_to_fix_dom = NULL;
6678 bitmap df, df_idom;
6679 edge f;
6680 edge_iterator ei;
6681 bool none_removed = false;
6682 unsigned i;
6683 basic_block bb, dbb;
6684 bitmap_iterator bi;
6685
2b28c07a 6686 if (!dom_info_available_p (CDI_DOMINATORS))
672987e8
ZD
6687 {
6688 remove_edge (e);
6689 return;
6690 }
6691
6692 /* No updating is needed for edges to exit. */
6693 if (e->dest == EXIT_BLOCK_PTR)
6694 {
6695 if (cfgcleanup_altered_bbs)
6696 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
6697 remove_edge (e);
6698 return;
6699 }
6700
6701 /* First, we find the basic blocks to remove. If E->dest has a predecessor
6702 that is not dominated by E->dest, then this set is empty. Otherwise,
6703 all the basic blocks dominated by E->dest are removed.
6704
6705 Also, to DF_IDOM we store the immediate dominators of the blocks in
6706 the dominance frontier of E (i.e., of the successors of the
6707 removed blocks, if there are any, and of E->dest otherwise). */
6708 FOR_EACH_EDGE (f, ei, e->dest->preds)
6709 {
6710 if (f == e)
6711 continue;
6712
6713 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
6714 {
6715 none_removed = true;
6716 break;
6717 }
6718 }
6719
6720 df = BITMAP_ALLOC (NULL);
6721 df_idom = BITMAP_ALLOC (NULL);
6722
6723 if (none_removed)
6724 bitmap_set_bit (df_idom,
6725 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
6726 else
6727 {
438c239d 6728 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
672987e8
ZD
6729 for (i = 0; VEC_iterate (basic_block, bbs_to_remove, i, bb); i++)
6730 {
6731 FOR_EACH_EDGE (f, ei, bb->succs)
6732 {
6733 if (f->dest != EXIT_BLOCK_PTR)
6734 bitmap_set_bit (df, f->dest->index);
6735 }
6736 }
6737 for (i = 0; VEC_iterate (basic_block, bbs_to_remove, i, bb); i++)
6738 bitmap_clear_bit (df, bb->index);
6739
6740 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
6741 {
6742 bb = BASIC_BLOCK (i);
6743 bitmap_set_bit (df_idom,
6744 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
6745 }
6746 }
6747
6748 if (cfgcleanup_altered_bbs)
6749 {
6750 /* Record the set of the altered basic blocks. */
6751 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
6752 bitmap_ior_into (cfgcleanup_altered_bbs, df);
6753 }
6754
6755 /* Remove E and the cancelled blocks. */
6756 if (none_removed)
6757 remove_edge (e);
6758 else
6759 {
b5b8b0ac
AO
6760 /* Walk backwards so as to get a chance to substitute all
6761 released DEFs into debug stmts. See
6762 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
6763 details. */
6764 for (i = VEC_length (basic_block, bbs_to_remove); i-- > 0; )
6765 delete_basic_block (VEC_index (basic_block, bbs_to_remove, i));
672987e8
ZD
6766 }
6767
6768 /* Update the dominance information. The immediate dominator may change only
6769 for blocks whose immediate dominator belongs to DF_IDOM:
b8698a0f 6770
672987e8
ZD
6771 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
6772 removal. Let Z the arbitrary block such that idom(Z) = Y and
6773 Z dominates X after the removal. Before removal, there exists a path P
6774 from Y to X that avoids Z. Let F be the last edge on P that is
6775 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
6776 dominates W, and because of P, Z does not dominate W), and W belongs to
b8698a0f 6777 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
672987e8
ZD
6778 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
6779 {
6780 bb = BASIC_BLOCK (i);
6781 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
6782 dbb;
6783 dbb = next_dom_son (CDI_DOMINATORS, dbb))
6784 VEC_safe_push (basic_block, heap, bbs_to_fix_dom, dbb);
6785 }
6786
66f97d31 6787 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
672987e8
ZD
6788
6789 BITMAP_FREE (df);
6790 BITMAP_FREE (df_idom);
6791 VEC_free (basic_block, heap, bbs_to_remove);
6792 VEC_free (basic_block, heap, bbs_to_fix_dom);
6793}
6794
4f6c2131
EB
6795/* Purge dead EH edges from basic block BB. */
6796
1eaba2f2 6797bool
726a989a 6798gimple_purge_dead_eh_edges (basic_block bb)
1eaba2f2
RH
6799{
6800 bool changed = false;
628f6a4e
BE
6801 edge e;
6802 edge_iterator ei;
726a989a 6803 gimple stmt = last_stmt (bb);
1eaba2f2 6804
726a989a 6805 if (stmt && stmt_can_throw_internal (stmt))
1eaba2f2
RH
6806 return false;
6807
628f6a4e 6808 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
1eaba2f2 6809 {
1eaba2f2
RH
6810 if (e->flags & EDGE_EH)
6811 {
672987e8 6812 remove_edge_and_dominated_blocks (e);
1eaba2f2
RH
6813 changed = true;
6814 }
628f6a4e
BE
6815 else
6816 ei_next (&ei);
1eaba2f2
RH
6817 }
6818
6819 return changed;
6820}
6821
6822bool
726a989a 6823gimple_purge_all_dead_eh_edges (const_bitmap blocks)
1eaba2f2
RH
6824{
6825 bool changed = false;
3cd8c58a 6826 unsigned i;
87c476a2 6827 bitmap_iterator bi;
1eaba2f2 6828
87c476a2
ZD
6829 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
6830 {
833ee764
JJ
6831 basic_block bb = BASIC_BLOCK (i);
6832
6833 /* Earlier gimple_purge_dead_eh_edges could have removed
6834 this basic block already. */
6835 gcc_assert (bb || changed);
6836 if (bb != NULL)
6837 changed |= gimple_purge_dead_eh_edges (bb);
87c476a2 6838 }
1eaba2f2
RH
6839
6840 return changed;
6841}
6de9cd9a 6842
a100ac1e
KH
6843/* This function is called whenever a new edge is created or
6844 redirected. */
6845
6846static void
726a989a 6847gimple_execute_on_growing_pred (edge e)
a100ac1e
KH
6848{
6849 basic_block bb = e->dest;
6850
8eacd016 6851 if (!gimple_seq_empty_p (phi_nodes (bb)))
a100ac1e
KH
6852 reserve_phi_args_for_new_edge (bb);
6853}
6854
e51546f8
KH
6855/* This function is called immediately before edge E is removed from
6856 the edge vector E->dest->preds. */
6857
6858static void
726a989a 6859gimple_execute_on_shrinking_pred (edge e)
e51546f8 6860{
8eacd016 6861 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
e51546f8
KH
6862 remove_phi_args (e);
6863}
6864
1cb7dfc3
MH
6865/*---------------------------------------------------------------------------
6866 Helper functions for Loop versioning
6867 ---------------------------------------------------------------------------*/
6868
6869/* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
6870 of 'first'. Both of them are dominated by 'new_head' basic block. When
6871 'new_head' was created by 'second's incoming edge it received phi arguments
6872 on the edge by split_edge(). Later, additional edge 'e' was created to
6531d1be
BF
6873 connect 'new_head' and 'first'. Now this routine adds phi args on this
6874 additional edge 'e' that new_head to second edge received as part of edge
726a989a 6875 splitting. */
1cb7dfc3
MH
6876
6877static void
726a989a
RB
6878gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
6879 basic_block new_head, edge e)
1cb7dfc3 6880{
726a989a
RB
6881 gimple phi1, phi2;
6882 gimple_stmt_iterator psi1, psi2;
6883 tree def;
d0e12fc6
KH
6884 edge e2 = find_edge (new_head, second);
6885
6886 /* Because NEW_HEAD has been created by splitting SECOND's incoming
6887 edge, we should always have an edge from NEW_HEAD to SECOND. */
6888 gcc_assert (e2 != NULL);
1cb7dfc3
MH
6889
6890 /* Browse all 'second' basic block phi nodes and add phi args to
6891 edge 'e' for 'first' head. PHI args are always in correct order. */
6892
726a989a
RB
6893 for (psi2 = gsi_start_phis (second),
6894 psi1 = gsi_start_phis (first);
6895 !gsi_end_p (psi2) && !gsi_end_p (psi1);
6896 gsi_next (&psi2), gsi_next (&psi1))
1cb7dfc3 6897 {
726a989a
RB
6898 phi1 = gsi_stmt (psi1);
6899 phi2 = gsi_stmt (psi2);
6900 def = PHI_ARG_DEF (phi2, e2->dest_idx);
f5045c96 6901 add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
1cb7dfc3
MH
6902 }
6903}
6904
726a989a 6905
6531d1be
BF
6906/* Adds a if else statement to COND_BB with condition COND_EXPR.
6907 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
1cb7dfc3 6908 the destination of the ELSE part. */
726a989a 6909
1cb7dfc3 6910static void
726a989a
RB
6911gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
6912 basic_block second_head ATTRIBUTE_UNUSED,
6913 basic_block cond_bb, void *cond_e)
1cb7dfc3 6914{
726a989a
RB
6915 gimple_stmt_iterator gsi;
6916 gimple new_cond_expr;
1cb7dfc3
MH
6917 tree cond_expr = (tree) cond_e;
6918 edge e0;
6919
6920 /* Build new conditional expr */
726a989a
RB
6921 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
6922 NULL_TREE, NULL_TREE);
1cb7dfc3 6923
6531d1be 6924 /* Add new cond in cond_bb. */
726a989a
RB
6925 gsi = gsi_last_bb (cond_bb);
6926 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
6927
1cb7dfc3
MH
6928 /* Adjust edges appropriately to connect new head with first head
6929 as well as second head. */
6930 e0 = single_succ_edge (cond_bb);
6931 e0->flags &= ~EDGE_FALLTHRU;
6932 e0->flags |= EDGE_FALSE_VALUE;
6933}
6934
726a989a
RB
6935struct cfg_hooks gimple_cfg_hooks = {
6936 "gimple",
6937 gimple_verify_flow_info,
6938 gimple_dump_bb, /* dump_bb */
6de9cd9a 6939 create_bb, /* create_basic_block */
726a989a
RB
6940 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
6941 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
6942 gimple_can_remove_branch_p, /* can_remove_branch_p */
6de9cd9a 6943 remove_bb, /* delete_basic_block */
726a989a
RB
6944 gimple_split_block, /* split_block */
6945 gimple_move_block_after, /* move_block_after */
6946 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
6947 gimple_merge_blocks, /* merge_blocks */
6948 gimple_predict_edge, /* predict_edge */
6949 gimple_predicted_by_p, /* predicted_by_p */
6950 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
6951 gimple_duplicate_bb, /* duplicate_block */
6952 gimple_split_edge, /* split_edge */
6953 gimple_make_forwarder_block, /* make_forward_block */
6de9cd9a 6954 NULL, /* tidy_fallthru_edge */
726a989a
RB
6955 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
6956 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
6957 gimple_flow_call_edges_add, /* flow_call_edges_add */
6958 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
6959 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
6960 gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
6961 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
6962 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
1cb7dfc3 6963 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
6531d1be 6964 flush_pending_stmts /* flush_pending_stmts */
6de9cd9a
DN
6965};
6966
6967
6968/* Split all critical edges. */
6969
c2924966 6970static unsigned int
6de9cd9a
DN
6971split_critical_edges (void)
6972{
6973 basic_block bb;
6974 edge e;
628f6a4e 6975 edge_iterator ei;
6de9cd9a 6976
d6be0d7f
JL
6977 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
6978 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
6979 mappings around the calls to split_edge. */
6980 start_recording_case_labels ();
6de9cd9a
DN
6981 FOR_ALL_BB (bb)
6982 {
628f6a4e 6983 FOR_EACH_EDGE (e, ei, bb->succs)
496a4ef5
JH
6984 {
6985 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
6de9cd9a 6986 split_edge (e);
b8698a0f 6987 /* PRE inserts statements to edges and expects that
496a4ef5
JH
6988 since split_critical_edges was done beforehand, committing edge
6989 insertions will not split more edges. In addition to critical
6990 edges we must split edges that have multiple successors and
b8698a0f 6991 end by control flow statements, such as RESX.
496a4ef5
JH
6992 Go ahead and split them too. This matches the logic in
6993 gimple_find_edge_insert_loc. */
6994 else if ((!single_pred_p (e->dest)
671f9f30 6995 || !gimple_seq_empty_p (phi_nodes (e->dest))
496a4ef5
JH
6996 || e->dest == EXIT_BLOCK_PTR)
6997 && e->src != ENTRY_BLOCK_PTR
6998 && !(e->flags & EDGE_ABNORMAL))
6999 {
7000 gimple_stmt_iterator gsi;
7001
7002 gsi = gsi_last_bb (e->src);
7003 if (!gsi_end_p (gsi)
7004 && stmt_ends_bb_p (gsi_stmt (gsi))
7005 && gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN)
7006 split_edge (e);
7007 }
7008 }
6de9cd9a 7009 }
d6be0d7f 7010 end_recording_case_labels ();
c2924966 7011 return 0;
6de9cd9a
DN
7012}
7013
8ddbbcae 7014struct gimple_opt_pass pass_split_crit_edges =
6de9cd9a 7015{
8ddbbcae
JH
7016 {
7017 GIMPLE_PASS,
5d44aeed 7018 "crited", /* name */
6de9cd9a
DN
7019 NULL, /* gate */
7020 split_critical_edges, /* execute */
7021 NULL, /* sub */
7022 NULL, /* next */
7023 0, /* static_pass_number */
7024 TV_TREE_SPLIT_EDGES, /* tv_id */
7025 PROP_cfg, /* properties required */
7026 PROP_no_crit_edges, /* properties_provided */
7027 0, /* properties_destroyed */
7028 0, /* todo_flags_start */
9187e02d 7029 TODO_dump_func | TODO_verify_flow /* todo_flags_finish */
8ddbbcae 7030 }
6de9cd9a 7031};
26277d41 7032
26277d41 7033
726a989a 7034/* Build a ternary operation and gimplify it. Emit code before GSI.
26277d41
PB
7035 Return the gimple_val holding the result. */
7036
7037tree
726a989a 7038gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
26277d41
PB
7039 tree type, tree a, tree b, tree c)
7040{
7041 tree ret;
db3927fb 7042 location_t loc = gimple_location (gsi_stmt (*gsi));
26277d41 7043
db3927fb 7044 ret = fold_build3_loc (loc, code, type, a, b, c);
26277d41
PB
7045 STRIP_NOPS (ret);
7046
726a989a
RB
7047 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
7048 GSI_SAME_STMT);
26277d41
PB
7049}
7050
726a989a 7051/* Build a binary operation and gimplify it. Emit code before GSI.
26277d41
PB
7052 Return the gimple_val holding the result. */
7053
7054tree
726a989a 7055gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
26277d41
PB
7056 tree type, tree a, tree b)
7057{
7058 tree ret;
7059
db3927fb 7060 ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
26277d41
PB
7061 STRIP_NOPS (ret);
7062
726a989a
RB
7063 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
7064 GSI_SAME_STMT);
26277d41
PB
7065}
7066
726a989a 7067/* Build a unary operation and gimplify it. Emit code before GSI.
26277d41
PB
7068 Return the gimple_val holding the result. */
7069
7070tree
726a989a 7071gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
26277d41
PB
7072 tree a)
7073{
7074 tree ret;
7075
db3927fb 7076 ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
26277d41
PB
7077 STRIP_NOPS (ret);
7078
726a989a
RB
7079 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
7080 GSI_SAME_STMT);
26277d41
PB
7081}
7082
7083
6de9cd9a
DN
7084\f
7085/* Emit return warnings. */
7086
c2924966 7087static unsigned int
6de9cd9a
DN
7088execute_warn_function_return (void)
7089{
9506ac2b 7090 source_location location;
726a989a 7091 gimple last;
6de9cd9a 7092 edge e;
628f6a4e 7093 edge_iterator ei;
6de9cd9a 7094
6de9cd9a
DN
7095 /* If we have a path to EXIT, then we do return. */
7096 if (TREE_THIS_VOLATILE (cfun->decl)
628f6a4e 7097 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0)
6de9cd9a 7098 {
9506ac2b 7099 location = UNKNOWN_LOCATION;
628f6a4e 7100 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
6de9cd9a
DN
7101 {
7102 last = last_stmt (e->src);
726a989a
RB
7103 if (gimple_code (last) == GIMPLE_RETURN
7104 && (location = gimple_location (last)) != UNKNOWN_LOCATION)
6de9cd9a
DN
7105 break;
7106 }
9506ac2b
PB
7107 if (location == UNKNOWN_LOCATION)
7108 location = cfun->function_end_locus;
fab922b1 7109 warning_at (location, 0, "%<noreturn%> function does return");
6de9cd9a
DN
7110 }
7111
7112 /* If we see "return;" in some basic block, then we do reach the end
7113 without returning a value. */
7114 else if (warn_return_type
089efaa4 7115 && !TREE_NO_WARNING (cfun->decl)
628f6a4e 7116 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0
6de9cd9a
DN
7117 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (cfun->decl))))
7118 {
628f6a4e 7119 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
6de9cd9a 7120 {
726a989a
RB
7121 gimple last = last_stmt (e->src);
7122 if (gimple_code (last) == GIMPLE_RETURN
7123 && gimple_return_retval (last) == NULL
7124 && !gimple_no_warning_p (last))
6de9cd9a 7125 {
726a989a 7126 location = gimple_location (last);
9506ac2b
PB
7127 if (location == UNKNOWN_LOCATION)
7128 location = cfun->function_end_locus;
aa14403d 7129 warning_at (location, OPT_Wreturn_type, "control reaches end of non-void function");
089efaa4 7130 TREE_NO_WARNING (cfun->decl) = 1;
6de9cd9a
DN
7131 break;
7132 }
7133 }
7134 }
c2924966 7135 return 0;
6de9cd9a
DN
7136}
7137
7138
7139/* Given a basic block B which ends with a conditional and has
7140 precisely two successors, determine which of the edges is taken if
7141 the conditional is true and which is taken if the conditional is
7142 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
7143
7144void
7145extract_true_false_edges_from_block (basic_block b,
7146 edge *true_edge,
7147 edge *false_edge)
7148{
628f6a4e 7149 edge e = EDGE_SUCC (b, 0);
6de9cd9a
DN
7150
7151 if (e->flags & EDGE_TRUE_VALUE)
7152 {
7153 *true_edge = e;
628f6a4e 7154 *false_edge = EDGE_SUCC (b, 1);
6de9cd9a
DN
7155 }
7156 else
7157 {
7158 *false_edge = e;
628f6a4e 7159 *true_edge = EDGE_SUCC (b, 1);
6de9cd9a
DN
7160 }
7161}
7162
8ddbbcae 7163struct gimple_opt_pass pass_warn_function_return =
6de9cd9a 7164{
8ddbbcae
JH
7165 {
7166 GIMPLE_PASS,
e0a42b0f 7167 "*warn_function_return", /* name */
6de9cd9a
DN
7168 NULL, /* gate */
7169 execute_warn_function_return, /* execute */
7170 NULL, /* sub */
7171 NULL, /* next */
7172 0, /* static_pass_number */
7072a650 7173 TV_NONE, /* tv_id */
00bfee6f 7174 PROP_cfg, /* properties_required */
6de9cd9a
DN
7175 0, /* properties_provided */
7176 0, /* properties_destroyed */
7177 0, /* todo_flags_start */
8ddbbcae
JH
7178 0 /* todo_flags_finish */
7179 }
6de9cd9a 7180};
aa313ed4
JH
7181
7182/* Emit noreturn warnings. */
7183
c2924966 7184static unsigned int
aa313ed4
JH
7185execute_warn_function_noreturn (void)
7186{
7187 if (warn_missing_noreturn
7188 && !TREE_THIS_VOLATILE (cfun->decl)
7189 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) == 0
e8924938 7190 && !lang_hooks.missing_noreturn_ok_p (cfun->decl))
c5d75364
MLI
7191 warning_at (DECL_SOURCE_LOCATION (cfun->decl), OPT_Wmissing_noreturn,
7192 "function might be possible candidate "
7193 "for attribute %<noreturn%>");
c2924966 7194 return 0;
aa313ed4
JH
7195}
7196
8ddbbcae 7197struct gimple_opt_pass pass_warn_function_noreturn =
aa313ed4 7198{
8ddbbcae
JH
7199 {
7200 GIMPLE_PASS,
e0a42b0f 7201 "*warn_function_noreturn", /* name */
aa313ed4
JH
7202 NULL, /* gate */
7203 execute_warn_function_noreturn, /* execute */
7204 NULL, /* sub */
7205 NULL, /* next */
7206 0, /* static_pass_number */
7072a650 7207 TV_NONE, /* tv_id */
aa313ed4
JH
7208 PROP_cfg, /* properties_required */
7209 0, /* properties_provided */
7210 0, /* properties_destroyed */
7211 0, /* todo_flags_start */
8ddbbcae
JH
7212 0 /* todo_flags_finish */
7213 }
aa313ed4 7214};
a406865a
RG
7215
7216
7217/* Walk a gimplified function and warn for functions whose return value is
7218 ignored and attribute((warn_unused_result)) is set. This is done before
7219 inlining, so we don't have to worry about that. */
7220
7221static void
7222do_warn_unused_result (gimple_seq seq)
7223{
7224 tree fdecl, ftype;
7225 gimple_stmt_iterator i;
7226
7227 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
7228 {
7229 gimple g = gsi_stmt (i);
7230
7231 switch (gimple_code (g))
7232 {
7233 case GIMPLE_BIND:
7234 do_warn_unused_result (gimple_bind_body (g));
7235 break;
7236 case GIMPLE_TRY:
7237 do_warn_unused_result (gimple_try_eval (g));
7238 do_warn_unused_result (gimple_try_cleanup (g));
7239 break;
7240 case GIMPLE_CATCH:
7241 do_warn_unused_result (gimple_catch_handler (g));
7242 break;
7243 case GIMPLE_EH_FILTER:
7244 do_warn_unused_result (gimple_eh_filter_failure (g));
7245 break;
7246
7247 case GIMPLE_CALL:
7248 if (gimple_call_lhs (g))
7249 break;
7250
7251 /* This is a naked call, as opposed to a GIMPLE_CALL with an
7252 LHS. All calls whose value is ignored should be
7253 represented like this. Look for the attribute. */
7254 fdecl = gimple_call_fndecl (g);
7255 ftype = TREE_TYPE (TREE_TYPE (gimple_call_fn (g)));
7256
7257 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
7258 {
7259 location_t loc = gimple_location (g);
7260
7261 if (fdecl)
7262 warning_at (loc, OPT_Wunused_result,
7263 "ignoring return value of %qD, "
7264 "declared with attribute warn_unused_result",
7265 fdecl);
7266 else
7267 warning_at (loc, OPT_Wunused_result,
7268 "ignoring return value of function "
7269 "declared with attribute warn_unused_result");
7270 }
7271 break;
7272
7273 default:
7274 /* Not a container, not a call, or a call whose value is used. */
7275 break;
7276 }
7277 }
7278}
7279
7280static unsigned int
7281run_warn_unused_result (void)
7282{
7283 do_warn_unused_result (gimple_body (current_function_decl));
7284 return 0;
7285}
7286
7287static bool
7288gate_warn_unused_result (void)
7289{
7290 return flag_warn_unused_result;
7291}
7292
7293struct gimple_opt_pass pass_warn_unused_result =
7294{
7295 {
7296 GIMPLE_PASS,
2329c6f5 7297 "*warn_unused_result", /* name */
a406865a
RG
7298 gate_warn_unused_result, /* gate */
7299 run_warn_unused_result, /* execute */
7300 NULL, /* sub */
7301 NULL, /* next */
7302 0, /* static_pass_number */
7303 TV_NONE, /* tv_id */
7304 PROP_gimple_any, /* properties_required */
7305 0, /* properties_provided */
7306 0, /* properties_destroyed */
7307 0, /* todo_flags_start */
7308 0, /* todo_flags_finish */
7309 }
7310};
7311