]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-cfg.c
fb7e300d14995f4c89e7ab629ad6c58ef9480bc8
[thirdparty/gcc.git] / gcc / tree-cfg.c
1 /* Control flow functions for trees.
2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
3 2010, 2011, 2012 Free Software Foundation, Inc.
4 Contributed by Diego Novillo <dnovillo@redhat.com>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
12
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "tm_p.h"
28 #include "basic-block.h"
29 #include "flags.h"
30 #include "function.h"
31 #include "ggc.h"
32 #include "langhooks.h"
33 #include "tree-pretty-print.h"
34 #include "gimple-pretty-print.h"
35 #include "tree-flow.h"
36 #include "timevar.h"
37 #include "tree-dump.h"
38 #include "tree-pass.h"
39 #include "diagnostic-core.h"
40 #include "except.h"
41 #include "cfgloop.h"
42 #include "cfglayout.h" /* for can_copy_bbs_p and copy_bbs */
43 #include "tree-ssa-propagate.h"
44 #include "value-prof.h"
45 #include "pointer-set.h"
46 #include "tree-inline.h"
47
48 /* This file contains functions for building the Control Flow Graph (CFG)
49 for a function tree. */
50
51 /* Local declarations. */
52
53 /* Initial capacity for the basic block array. */
54 static const int initial_cfg_capacity = 20;
55
56 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
57 which use a particular edge. The CASE_LABEL_EXPRs are chained together
58 via their CASE_CHAIN field, which we clear after we're done with the
59 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
60
61 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
62 update the case vector in response to edge redirections.
63
64 Right now this table is set up and torn down at key points in the
65 compilation process. It would be nice if we could make the table
66 more persistent. The key is getting notification of changes to
67 the CFG (particularly edge removal, creation and redirection). */
68
69 static struct pointer_map_t *edge_to_cases;
70
71 /* If we record edge_to_cases, this bitmap will hold indexes
72 of basic blocks that end in a GIMPLE_SWITCH which we touched
73 due to edge manipulations. */
74
75 static bitmap touched_switch_bbs;
76
77 /* CFG statistics. */
78 struct cfg_stats_d
79 {
80 long num_merged_labels;
81 };
82
83 static struct cfg_stats_d cfg_stats;
84
85 /* Nonzero if we found a computed goto while building basic blocks. */
86 static bool found_computed_goto;
87
88 /* Hash table to store last discriminator assigned for each locus. */
89 struct locus_discrim_map
90 {
91 location_t locus;
92 int discriminator;
93 };
94 static htab_t discriminator_per_locus;
95
96 /* Basic blocks and flowgraphs. */
97 static void make_blocks (gimple_seq);
98 static void factor_computed_gotos (void);
99
100 /* Edges. */
101 static void make_edges (void);
102 static void make_cond_expr_edges (basic_block);
103 static void make_gimple_switch_edges (basic_block);
104 static void make_goto_expr_edges (basic_block);
105 static void make_gimple_asm_edges (basic_block);
106 static unsigned int locus_map_hash (const void *);
107 static int locus_map_eq (const void *, const void *);
108 static void assign_discriminator (location_t, basic_block);
109 static edge gimple_redirect_edge_and_branch (edge, basic_block);
110 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
111 static unsigned int split_critical_edges (void);
112
113 /* Various helpers. */
114 static inline bool stmt_starts_bb_p (gimple, gimple);
115 static int gimple_verify_flow_info (void);
116 static void gimple_make_forwarder_block (edge);
117 static void gimple_cfg2vcg (FILE *);
118 static gimple first_non_label_stmt (basic_block);
119 static bool verify_gimple_transaction (gimple);
120
121 /* Flowgraph optimization and cleanup. */
122 static void gimple_merge_blocks (basic_block, basic_block);
123 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
124 static void remove_bb (basic_block);
125 static edge find_taken_edge_computed_goto (basic_block, tree);
126 static edge find_taken_edge_cond_expr (basic_block, tree);
127 static edge find_taken_edge_switch_expr (basic_block, tree);
128 static tree find_case_label_for_value (gimple, tree);
129 static void group_case_labels_stmt (gimple);
130
131 void
132 init_empty_tree_cfg_for_function (struct function *fn)
133 {
134 /* Initialize the basic block array. */
135 init_flow (fn);
136 profile_status_for_function (fn) = PROFILE_ABSENT;
137 n_basic_blocks_for_function (fn) = NUM_FIXED_BLOCKS;
138 last_basic_block_for_function (fn) = NUM_FIXED_BLOCKS;
139 basic_block_info_for_function (fn)
140 = VEC_alloc (basic_block, gc, initial_cfg_capacity);
141 VEC_safe_grow_cleared (basic_block, gc,
142 basic_block_info_for_function (fn),
143 initial_cfg_capacity);
144
145 /* Build a mapping of labels to their associated blocks. */
146 label_to_block_map_for_function (fn)
147 = VEC_alloc (basic_block, gc, initial_cfg_capacity);
148 VEC_safe_grow_cleared (basic_block, gc,
149 label_to_block_map_for_function (fn),
150 initial_cfg_capacity);
151
152 SET_BASIC_BLOCK_FOR_FUNCTION (fn, ENTRY_BLOCK,
153 ENTRY_BLOCK_PTR_FOR_FUNCTION (fn));
154 SET_BASIC_BLOCK_FOR_FUNCTION (fn, EXIT_BLOCK,
155 EXIT_BLOCK_PTR_FOR_FUNCTION (fn));
156
157 ENTRY_BLOCK_PTR_FOR_FUNCTION (fn)->next_bb
158 = EXIT_BLOCK_PTR_FOR_FUNCTION (fn);
159 EXIT_BLOCK_PTR_FOR_FUNCTION (fn)->prev_bb
160 = ENTRY_BLOCK_PTR_FOR_FUNCTION (fn);
161 }
162
163 void
164 init_empty_tree_cfg (void)
165 {
166 init_empty_tree_cfg_for_function (cfun);
167 }
168
169 /*---------------------------------------------------------------------------
170 Create basic blocks
171 ---------------------------------------------------------------------------*/
172
173 /* Entry point to the CFG builder for trees. SEQ is the sequence of
174 statements to be added to the flowgraph. */
175
176 static void
177 build_gimple_cfg (gimple_seq seq)
178 {
179 /* Register specific gimple functions. */
180 gimple_register_cfg_hooks ();
181
182 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
183
184 init_empty_tree_cfg ();
185
186 found_computed_goto = 0;
187 make_blocks (seq);
188
189 /* Computed gotos are hell to deal with, especially if there are
190 lots of them with a large number of destinations. So we factor
191 them to a common computed goto location before we build the
192 edge list. After we convert back to normal form, we will un-factor
193 the computed gotos since factoring introduces an unwanted jump. */
194 if (found_computed_goto)
195 factor_computed_gotos ();
196
197 /* Make sure there is always at least one block, even if it's empty. */
198 if (n_basic_blocks == NUM_FIXED_BLOCKS)
199 create_empty_bb (ENTRY_BLOCK_PTR);
200
201 /* Adjust the size of the array. */
202 if (VEC_length (basic_block, basic_block_info) < (size_t) n_basic_blocks)
203 VEC_safe_grow_cleared (basic_block, gc, basic_block_info, n_basic_blocks);
204
205 /* To speed up statement iterator walks, we first purge dead labels. */
206 cleanup_dead_labels ();
207
208 /* Group case nodes to reduce the number of edges.
209 We do this after cleaning up dead labels because otherwise we miss
210 a lot of obvious case merging opportunities. */
211 group_case_labels ();
212
213 /* Create the edges of the flowgraph. */
214 discriminator_per_locus = htab_create (13, locus_map_hash, locus_map_eq,
215 free);
216 make_edges ();
217 cleanup_dead_labels ();
218 htab_delete (discriminator_per_locus);
219
220 /* Debugging dumps. */
221
222 /* Write the flowgraph to a VCG file. */
223 {
224 int local_dump_flags;
225 FILE *vcg_file = dump_begin (TDI_vcg, &local_dump_flags);
226 if (vcg_file)
227 {
228 gimple_cfg2vcg (vcg_file);
229 dump_end (TDI_vcg, vcg_file);
230 }
231 }
232 }
233
234 static unsigned int
235 execute_build_cfg (void)
236 {
237 gimple_seq body = gimple_body (current_function_decl);
238
239 build_gimple_cfg (body);
240 gimple_set_body (current_function_decl, NULL);
241 if (dump_file && (dump_flags & TDF_DETAILS))
242 {
243 fprintf (dump_file, "Scope blocks:\n");
244 dump_scope_blocks (dump_file, dump_flags);
245 }
246 return 0;
247 }
248
249 struct gimple_opt_pass pass_build_cfg =
250 {
251 {
252 GIMPLE_PASS,
253 "cfg", /* name */
254 NULL, /* gate */
255 execute_build_cfg, /* execute */
256 NULL, /* sub */
257 NULL, /* next */
258 0, /* static_pass_number */
259 TV_TREE_CFG, /* tv_id */
260 PROP_gimple_leh, /* properties_required */
261 PROP_cfg, /* properties_provided */
262 0, /* properties_destroyed */
263 0, /* todo_flags_start */
264 TODO_verify_stmts | TODO_cleanup_cfg /* todo_flags_finish */
265 }
266 };
267
268
269 /* Return true if T is a computed goto. */
270
271 static bool
272 computed_goto_p (gimple t)
273 {
274 return (gimple_code (t) == GIMPLE_GOTO
275 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
276 }
277
278
279 /* Search the CFG for any computed gotos. If found, factor them to a
280 common computed goto site. Also record the location of that site so
281 that we can un-factor the gotos after we have converted back to
282 normal form. */
283
284 static void
285 factor_computed_gotos (void)
286 {
287 basic_block bb;
288 tree factored_label_decl = NULL;
289 tree var = NULL;
290 gimple factored_computed_goto_label = NULL;
291 gimple factored_computed_goto = NULL;
292
293 /* We know there are one or more computed gotos in this function.
294 Examine the last statement in each basic block to see if the block
295 ends with a computed goto. */
296
297 FOR_EACH_BB (bb)
298 {
299 gimple_stmt_iterator gsi = gsi_last_bb (bb);
300 gimple last;
301
302 if (gsi_end_p (gsi))
303 continue;
304
305 last = gsi_stmt (gsi);
306
307 /* Ignore the computed goto we create when we factor the original
308 computed gotos. */
309 if (last == factored_computed_goto)
310 continue;
311
312 /* If the last statement is a computed goto, factor it. */
313 if (computed_goto_p (last))
314 {
315 gimple assignment;
316
317 /* The first time we find a computed goto we need to create
318 the factored goto block and the variable each original
319 computed goto will use for their goto destination. */
320 if (!factored_computed_goto)
321 {
322 basic_block new_bb = create_empty_bb (bb);
323 gimple_stmt_iterator new_gsi = gsi_start_bb (new_bb);
324
325 /* Create the destination of the factored goto. Each original
326 computed goto will put its desired destination into this
327 variable and jump to the label we create immediately
328 below. */
329 var = create_tmp_var (ptr_type_node, "gotovar");
330
331 /* Build a label for the new block which will contain the
332 factored computed goto. */
333 factored_label_decl = create_artificial_label (UNKNOWN_LOCATION);
334 factored_computed_goto_label
335 = gimple_build_label (factored_label_decl);
336 gsi_insert_after (&new_gsi, factored_computed_goto_label,
337 GSI_NEW_STMT);
338
339 /* Build our new computed goto. */
340 factored_computed_goto = gimple_build_goto (var);
341 gsi_insert_after (&new_gsi, factored_computed_goto, GSI_NEW_STMT);
342 }
343
344 /* Copy the original computed goto's destination into VAR. */
345 assignment = gimple_build_assign (var, gimple_goto_dest (last));
346 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
347
348 /* And re-vector the computed goto to the new destination. */
349 gimple_goto_set_dest (last, factored_label_decl);
350 }
351 }
352 }
353
354
355 /* Build a flowgraph for the sequence of stmts SEQ. */
356
357 static void
358 make_blocks (gimple_seq seq)
359 {
360 gimple_stmt_iterator i = gsi_start (seq);
361 gimple stmt = NULL;
362 bool start_new_block = true;
363 bool first_stmt_of_seq = true;
364 basic_block bb = ENTRY_BLOCK_PTR;
365
366 while (!gsi_end_p (i))
367 {
368 gimple prev_stmt;
369
370 prev_stmt = stmt;
371 stmt = gsi_stmt (i);
372
373 /* If the statement starts a new basic block or if we have determined
374 in a previous pass that we need to create a new block for STMT, do
375 so now. */
376 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
377 {
378 if (!first_stmt_of_seq)
379 gsi_split_seq_before (&i, &seq);
380 bb = create_basic_block (seq, NULL, bb);
381 start_new_block = false;
382 }
383
384 /* Now add STMT to BB and create the subgraphs for special statement
385 codes. */
386 gimple_set_bb (stmt, bb);
387
388 if (computed_goto_p (stmt))
389 found_computed_goto = true;
390
391 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
392 next iteration. */
393 if (stmt_ends_bb_p (stmt))
394 {
395 /* If the stmt can make abnormal goto use a new temporary
396 for the assignment to the LHS. This makes sure the old value
397 of the LHS is available on the abnormal edge. Otherwise
398 we will end up with overlapping life-ranges for abnormal
399 SSA names. */
400 if (gimple_has_lhs (stmt)
401 && stmt_can_make_abnormal_goto (stmt)
402 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
403 {
404 tree lhs = gimple_get_lhs (stmt);
405 tree tmp = create_tmp_var (TREE_TYPE (lhs), NULL);
406 gimple s = gimple_build_assign (lhs, tmp);
407 gimple_set_location (s, gimple_location (stmt));
408 gimple_set_block (s, gimple_block (stmt));
409 gimple_set_lhs (stmt, tmp);
410 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
411 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
412 DECL_GIMPLE_REG_P (tmp) = 1;
413 gsi_insert_after (&i, s, GSI_SAME_STMT);
414 }
415 start_new_block = true;
416 }
417
418 gsi_next (&i);
419 first_stmt_of_seq = false;
420 }
421 }
422
423
424 /* Create and return a new empty basic block after bb AFTER. */
425
426 static basic_block
427 create_bb (void *h, void *e, basic_block after)
428 {
429 basic_block bb;
430
431 gcc_assert (!e);
432
433 /* Create and initialize a new basic block. Since alloc_block uses
434 GC allocation that clears memory to allocate a basic block, we do
435 not have to clear the newly allocated basic block here. */
436 bb = alloc_block ();
437
438 bb->index = last_basic_block;
439 bb->flags = BB_NEW;
440 set_bb_seq (bb, h ? (gimple_seq) h : NULL);
441
442 /* Add the new block to the linked list of blocks. */
443 link_block (bb, after);
444
445 /* Grow the basic block array if needed. */
446 if ((size_t) last_basic_block == VEC_length (basic_block, basic_block_info))
447 {
448 size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
449 VEC_safe_grow_cleared (basic_block, gc, basic_block_info, new_size);
450 }
451
452 /* Add the newly created block to the array. */
453 SET_BASIC_BLOCK (last_basic_block, bb);
454
455 n_basic_blocks++;
456 last_basic_block++;
457
458 return bb;
459 }
460
461
462 /*---------------------------------------------------------------------------
463 Edge creation
464 ---------------------------------------------------------------------------*/
465
466 /* Fold COND_EXPR_COND of each COND_EXPR. */
467
468 void
469 fold_cond_expr_cond (void)
470 {
471 basic_block bb;
472
473 FOR_EACH_BB (bb)
474 {
475 gimple stmt = last_stmt (bb);
476
477 if (stmt && gimple_code (stmt) == GIMPLE_COND)
478 {
479 location_t loc = gimple_location (stmt);
480 tree cond;
481 bool zerop, onep;
482
483 fold_defer_overflow_warnings ();
484 cond = fold_binary_loc (loc, gimple_cond_code (stmt), boolean_type_node,
485 gimple_cond_lhs (stmt), gimple_cond_rhs (stmt));
486 if (cond)
487 {
488 zerop = integer_zerop (cond);
489 onep = integer_onep (cond);
490 }
491 else
492 zerop = onep = false;
493
494 fold_undefer_overflow_warnings (zerop || onep,
495 stmt,
496 WARN_STRICT_OVERFLOW_CONDITIONAL);
497 if (zerop)
498 gimple_cond_make_false (stmt);
499 else if (onep)
500 gimple_cond_make_true (stmt);
501 }
502 }
503 }
504
505 /* Join all the blocks in the flowgraph. */
506
507 static void
508 make_edges (void)
509 {
510 basic_block bb;
511 struct omp_region *cur_region = NULL;
512
513 /* Create an edge from entry to the first block with executable
514 statements in it. */
515 make_edge (ENTRY_BLOCK_PTR, BASIC_BLOCK (NUM_FIXED_BLOCKS), EDGE_FALLTHRU);
516
517 /* Traverse the basic block array placing edges. */
518 FOR_EACH_BB (bb)
519 {
520 gimple last = last_stmt (bb);
521 bool fallthru;
522
523 if (last)
524 {
525 enum gimple_code code = gimple_code (last);
526 switch (code)
527 {
528 case GIMPLE_GOTO:
529 make_goto_expr_edges (bb);
530 fallthru = false;
531 break;
532 case GIMPLE_RETURN:
533 make_edge (bb, EXIT_BLOCK_PTR, 0);
534 fallthru = false;
535 break;
536 case GIMPLE_COND:
537 make_cond_expr_edges (bb);
538 fallthru = false;
539 break;
540 case GIMPLE_SWITCH:
541 make_gimple_switch_edges (bb);
542 fallthru = false;
543 break;
544 case GIMPLE_RESX:
545 make_eh_edges (last);
546 fallthru = false;
547 break;
548 case GIMPLE_EH_DISPATCH:
549 fallthru = make_eh_dispatch_edges (last);
550 break;
551
552 case GIMPLE_CALL:
553 /* If this function receives a nonlocal goto, then we need to
554 make edges from this call site to all the nonlocal goto
555 handlers. */
556 if (stmt_can_make_abnormal_goto (last))
557 make_abnormal_goto_edges (bb, true);
558
559 /* If this statement has reachable exception handlers, then
560 create abnormal edges to them. */
561 make_eh_edges (last);
562
563 /* BUILTIN_RETURN is really a return statement. */
564 if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
565 make_edge (bb, EXIT_BLOCK_PTR, 0), fallthru = false;
566 /* Some calls are known not to return. */
567 else
568 fallthru = !(gimple_call_flags (last) & ECF_NORETURN);
569 break;
570
571 case GIMPLE_ASSIGN:
572 /* A GIMPLE_ASSIGN may throw internally and thus be considered
573 control-altering. */
574 if (is_ctrl_altering_stmt (last))
575 make_eh_edges (last);
576 fallthru = true;
577 break;
578
579 case GIMPLE_ASM:
580 make_gimple_asm_edges (bb);
581 fallthru = true;
582 break;
583
584 case GIMPLE_OMP_PARALLEL:
585 case GIMPLE_OMP_TASK:
586 case GIMPLE_OMP_FOR:
587 case GIMPLE_OMP_SINGLE:
588 case GIMPLE_OMP_MASTER:
589 case GIMPLE_OMP_ORDERED:
590 case GIMPLE_OMP_CRITICAL:
591 case GIMPLE_OMP_SECTION:
592 cur_region = new_omp_region (bb, code, cur_region);
593 fallthru = true;
594 break;
595
596 case GIMPLE_OMP_SECTIONS:
597 cur_region = new_omp_region (bb, code, cur_region);
598 fallthru = true;
599 break;
600
601 case GIMPLE_OMP_SECTIONS_SWITCH:
602 fallthru = false;
603 break;
604
605 case GIMPLE_OMP_ATOMIC_LOAD:
606 case GIMPLE_OMP_ATOMIC_STORE:
607 fallthru = true;
608 break;
609
610 case GIMPLE_OMP_RETURN:
611 /* In the case of a GIMPLE_OMP_SECTION, the edge will go
612 somewhere other than the next block. This will be
613 created later. */
614 cur_region->exit = bb;
615 fallthru = cur_region->type != GIMPLE_OMP_SECTION;
616 cur_region = cur_region->outer;
617 break;
618
619 case GIMPLE_OMP_CONTINUE:
620 cur_region->cont = bb;
621 switch (cur_region->type)
622 {
623 case GIMPLE_OMP_FOR:
624 /* Mark all GIMPLE_OMP_FOR and GIMPLE_OMP_CONTINUE
625 succs edges as abnormal to prevent splitting
626 them. */
627 single_succ_edge (cur_region->entry)->flags |= EDGE_ABNORMAL;
628 /* Make the loopback edge. */
629 make_edge (bb, single_succ (cur_region->entry),
630 EDGE_ABNORMAL);
631
632 /* Create an edge from GIMPLE_OMP_FOR to exit, which
633 corresponds to the case that the body of the loop
634 is not executed at all. */
635 make_edge (cur_region->entry, bb->next_bb, EDGE_ABNORMAL);
636 make_edge (bb, bb->next_bb, EDGE_FALLTHRU | EDGE_ABNORMAL);
637 fallthru = false;
638 break;
639
640 case GIMPLE_OMP_SECTIONS:
641 /* Wire up the edges into and out of the nested sections. */
642 {
643 basic_block switch_bb = single_succ (cur_region->entry);
644
645 struct omp_region *i;
646 for (i = cur_region->inner; i ; i = i->next)
647 {
648 gcc_assert (i->type == GIMPLE_OMP_SECTION);
649 make_edge (switch_bb, i->entry, 0);
650 make_edge (i->exit, bb, EDGE_FALLTHRU);
651 }
652
653 /* Make the loopback edge to the block with
654 GIMPLE_OMP_SECTIONS_SWITCH. */
655 make_edge (bb, switch_bb, 0);
656
657 /* Make the edge from the switch to exit. */
658 make_edge (switch_bb, bb->next_bb, 0);
659 fallthru = false;
660 }
661 break;
662
663 default:
664 gcc_unreachable ();
665 }
666 break;
667
668 case GIMPLE_TRANSACTION:
669 {
670 tree abort_label = gimple_transaction_label (last);
671 if (abort_label)
672 make_edge (bb, label_to_block (abort_label), 0);
673 fallthru = true;
674 }
675 break;
676
677 default:
678 gcc_assert (!stmt_ends_bb_p (last));
679 fallthru = true;
680 }
681 }
682 else
683 fallthru = true;
684
685 if (fallthru)
686 {
687 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
688 if (last)
689 assign_discriminator (gimple_location (last), bb->next_bb);
690 }
691 }
692
693 if (root_omp_region)
694 free_omp_regions ();
695
696 /* Fold COND_EXPR_COND of each COND_EXPR. */
697 fold_cond_expr_cond ();
698 }
699
700 /* Trivial hash function for a location_t. ITEM is a pointer to
701 a hash table entry that maps a location_t to a discriminator. */
702
703 static unsigned int
704 locus_map_hash (const void *item)
705 {
706 return ((const struct locus_discrim_map *) item)->locus;
707 }
708
709 /* Equality function for the locus-to-discriminator map. VA and VB
710 point to the two hash table entries to compare. */
711
712 static int
713 locus_map_eq (const void *va, const void *vb)
714 {
715 const struct locus_discrim_map *a = (const struct locus_discrim_map *) va;
716 const struct locus_discrim_map *b = (const struct locus_discrim_map *) vb;
717 return a->locus == b->locus;
718 }
719
720 /* Find the next available discriminator value for LOCUS. The
721 discriminator distinguishes among several basic blocks that
722 share a common locus, allowing for more accurate sample-based
723 profiling. */
724
725 static int
726 next_discriminator_for_locus (location_t locus)
727 {
728 struct locus_discrim_map item;
729 struct locus_discrim_map **slot;
730
731 item.locus = locus;
732 item.discriminator = 0;
733 slot = (struct locus_discrim_map **)
734 htab_find_slot_with_hash (discriminator_per_locus, (void *) &item,
735 (hashval_t) locus, INSERT);
736 gcc_assert (slot);
737 if (*slot == HTAB_EMPTY_ENTRY)
738 {
739 *slot = XNEW (struct locus_discrim_map);
740 gcc_assert (*slot);
741 (*slot)->locus = locus;
742 (*slot)->discriminator = 0;
743 }
744 (*slot)->discriminator++;
745 return (*slot)->discriminator;
746 }
747
748 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
749
750 static bool
751 same_line_p (location_t locus1, location_t locus2)
752 {
753 expanded_location from, to;
754
755 if (locus1 == locus2)
756 return true;
757
758 from = expand_location (locus1);
759 to = expand_location (locus2);
760
761 if (from.line != to.line)
762 return false;
763 if (from.file == to.file)
764 return true;
765 return (from.file != NULL
766 && to.file != NULL
767 && filename_cmp (from.file, to.file) == 0);
768 }
769
770 /* Assign a unique discriminator value to block BB if it begins at the same
771 LOCUS as its predecessor block. */
772
773 static void
774 assign_discriminator (location_t locus, basic_block bb)
775 {
776 gimple first_in_to_bb, last_in_to_bb;
777
778 if (locus == 0 || bb->discriminator != 0)
779 return;
780
781 first_in_to_bb = first_non_label_stmt (bb);
782 last_in_to_bb = last_stmt (bb);
783 if ((first_in_to_bb && same_line_p (locus, gimple_location (first_in_to_bb)))
784 || (last_in_to_bb && same_line_p (locus, gimple_location (last_in_to_bb))))
785 bb->discriminator = next_discriminator_for_locus (locus);
786 }
787
788 /* Create the edges for a GIMPLE_COND starting at block BB. */
789
790 static void
791 make_cond_expr_edges (basic_block bb)
792 {
793 gimple entry = last_stmt (bb);
794 gimple then_stmt, else_stmt;
795 basic_block then_bb, else_bb;
796 tree then_label, else_label;
797 edge e;
798 location_t entry_locus;
799
800 gcc_assert (entry);
801 gcc_assert (gimple_code (entry) == GIMPLE_COND);
802
803 entry_locus = gimple_location (entry);
804
805 /* Entry basic blocks for each component. */
806 then_label = gimple_cond_true_label (entry);
807 else_label = gimple_cond_false_label (entry);
808 then_bb = label_to_block (then_label);
809 else_bb = label_to_block (else_label);
810 then_stmt = first_stmt (then_bb);
811 else_stmt = first_stmt (else_bb);
812
813 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
814 assign_discriminator (entry_locus, then_bb);
815 e->goto_locus = gimple_location (then_stmt);
816 if (e->goto_locus)
817 e->goto_block = gimple_block (then_stmt);
818 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
819 if (e)
820 {
821 assign_discriminator (entry_locus, else_bb);
822 e->goto_locus = gimple_location (else_stmt);
823 if (e->goto_locus)
824 e->goto_block = gimple_block (else_stmt);
825 }
826
827 /* We do not need the labels anymore. */
828 gimple_cond_set_true_label (entry, NULL_TREE);
829 gimple_cond_set_false_label (entry, NULL_TREE);
830 }
831
832
833 /* Called for each element in the hash table (P) as we delete the
834 edge to cases hash table.
835
836 Clear all the TREE_CHAINs to prevent problems with copying of
837 SWITCH_EXPRs and structure sharing rules, then free the hash table
838 element. */
839
840 static bool
841 edge_to_cases_cleanup (const void *key ATTRIBUTE_UNUSED, void **value,
842 void *data ATTRIBUTE_UNUSED)
843 {
844 tree t, next;
845
846 for (t = (tree) *value; t; t = next)
847 {
848 next = CASE_CHAIN (t);
849 CASE_CHAIN (t) = NULL;
850 }
851
852 *value = NULL;
853 return true;
854 }
855
856 /* Start recording information mapping edges to case labels. */
857
858 void
859 start_recording_case_labels (void)
860 {
861 gcc_assert (edge_to_cases == NULL);
862 edge_to_cases = pointer_map_create ();
863 touched_switch_bbs = BITMAP_ALLOC (NULL);
864 }
865
866 /* Return nonzero if we are recording information for case labels. */
867
868 static bool
869 recording_case_labels_p (void)
870 {
871 return (edge_to_cases != NULL);
872 }
873
874 /* Stop recording information mapping edges to case labels and
875 remove any information we have recorded. */
876 void
877 end_recording_case_labels (void)
878 {
879 bitmap_iterator bi;
880 unsigned i;
881 pointer_map_traverse (edge_to_cases, edge_to_cases_cleanup, NULL);
882 pointer_map_destroy (edge_to_cases);
883 edge_to_cases = NULL;
884 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
885 {
886 basic_block bb = BASIC_BLOCK (i);
887 if (bb)
888 {
889 gimple stmt = last_stmt (bb);
890 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
891 group_case_labels_stmt (stmt);
892 }
893 }
894 BITMAP_FREE (touched_switch_bbs);
895 }
896
897 /* If we are inside a {start,end}_recording_cases block, then return
898 a chain of CASE_LABEL_EXPRs from T which reference E.
899
900 Otherwise return NULL. */
901
902 static tree
903 get_cases_for_edge (edge e, gimple t)
904 {
905 void **slot;
906 size_t i, n;
907
908 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
909 chains available. Return NULL so the caller can detect this case. */
910 if (!recording_case_labels_p ())
911 return NULL;
912
913 slot = pointer_map_contains (edge_to_cases, e);
914 if (slot)
915 return (tree) *slot;
916
917 /* If we did not find E in the hash table, then this must be the first
918 time we have been queried for information about E & T. Add all the
919 elements from T to the hash table then perform the query again. */
920
921 n = gimple_switch_num_labels (t);
922 for (i = 0; i < n; i++)
923 {
924 tree elt = gimple_switch_label (t, i);
925 tree lab = CASE_LABEL (elt);
926 basic_block label_bb = label_to_block (lab);
927 edge this_edge = find_edge (e->src, label_bb);
928
929 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
930 a new chain. */
931 slot = pointer_map_insert (edge_to_cases, this_edge);
932 CASE_CHAIN (elt) = (tree) *slot;
933 *slot = elt;
934 }
935
936 return (tree) *pointer_map_contains (edge_to_cases, e);
937 }
938
939 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
940
941 static void
942 make_gimple_switch_edges (basic_block bb)
943 {
944 gimple entry = last_stmt (bb);
945 location_t entry_locus;
946 size_t i, n;
947
948 entry_locus = gimple_location (entry);
949
950 n = gimple_switch_num_labels (entry);
951
952 for (i = 0; i < n; ++i)
953 {
954 tree lab = CASE_LABEL (gimple_switch_label (entry, i));
955 basic_block label_bb = label_to_block (lab);
956 make_edge (bb, label_bb, 0);
957 assign_discriminator (entry_locus, label_bb);
958 }
959 }
960
961
962 /* Return the basic block holding label DEST. */
963
964 basic_block
965 label_to_block_fn (struct function *ifun, tree dest)
966 {
967 int uid = LABEL_DECL_UID (dest);
968
969 /* We would die hard when faced by an undefined label. Emit a label to
970 the very first basic block. This will hopefully make even the dataflow
971 and undefined variable warnings quite right. */
972 if (seen_error () && uid < 0)
973 {
974 gimple_stmt_iterator gsi = gsi_start_bb (BASIC_BLOCK (NUM_FIXED_BLOCKS));
975 gimple stmt;
976
977 stmt = gimple_build_label (dest);
978 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
979 uid = LABEL_DECL_UID (dest);
980 }
981 if (VEC_length (basic_block, ifun->cfg->x_label_to_block_map)
982 <= (unsigned int) uid)
983 return NULL;
984 return VEC_index (basic_block, ifun->cfg->x_label_to_block_map, uid);
985 }
986
987 /* Create edges for an abnormal goto statement at block BB. If FOR_CALL
988 is true, the source statement is a CALL_EXPR instead of a GOTO_EXPR. */
989
990 void
991 make_abnormal_goto_edges (basic_block bb, bool for_call)
992 {
993 basic_block target_bb;
994 gimple_stmt_iterator gsi;
995
996 FOR_EACH_BB (target_bb)
997 for (gsi = gsi_start_bb (target_bb); !gsi_end_p (gsi); gsi_next (&gsi))
998 {
999 gimple label_stmt = gsi_stmt (gsi);
1000 tree target;
1001
1002 if (gimple_code (label_stmt) != GIMPLE_LABEL)
1003 break;
1004
1005 target = gimple_label_label (label_stmt);
1006
1007 /* Make an edge to every label block that has been marked as a
1008 potential target for a computed goto or a non-local goto. */
1009 if ((FORCED_LABEL (target) && !for_call)
1010 || (DECL_NONLOCAL (target) && for_call))
1011 {
1012 make_edge (bb, target_bb, EDGE_ABNORMAL);
1013 break;
1014 }
1015 }
1016 }
1017
1018 /* Create edges for a goto statement at block BB. */
1019
1020 static void
1021 make_goto_expr_edges (basic_block bb)
1022 {
1023 gimple_stmt_iterator last = gsi_last_bb (bb);
1024 gimple goto_t = gsi_stmt (last);
1025
1026 /* A simple GOTO creates normal edges. */
1027 if (simple_goto_p (goto_t))
1028 {
1029 tree dest = gimple_goto_dest (goto_t);
1030 basic_block label_bb = label_to_block (dest);
1031 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1032 e->goto_locus = gimple_location (goto_t);
1033 assign_discriminator (e->goto_locus, label_bb);
1034 if (e->goto_locus)
1035 e->goto_block = gimple_block (goto_t);
1036 gsi_remove (&last, true);
1037 return;
1038 }
1039
1040 /* A computed GOTO creates abnormal edges. */
1041 make_abnormal_goto_edges (bb, false);
1042 }
1043
1044 /* Create edges for an asm statement with labels at block BB. */
1045
1046 static void
1047 make_gimple_asm_edges (basic_block bb)
1048 {
1049 gimple stmt = last_stmt (bb);
1050 location_t stmt_loc = gimple_location (stmt);
1051 int i, n = gimple_asm_nlabels (stmt);
1052
1053 for (i = 0; i < n; ++i)
1054 {
1055 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1056 basic_block label_bb = label_to_block (label);
1057 make_edge (bb, label_bb, 0);
1058 assign_discriminator (stmt_loc, label_bb);
1059 }
1060 }
1061
1062 /*---------------------------------------------------------------------------
1063 Flowgraph analysis
1064 ---------------------------------------------------------------------------*/
1065
1066 /* Cleanup useless labels in basic blocks. This is something we wish
1067 to do early because it allows us to group case labels before creating
1068 the edges for the CFG, and it speeds up block statement iterators in
1069 all passes later on.
1070 We rerun this pass after CFG is created, to get rid of the labels that
1071 are no longer referenced. After then we do not run it any more, since
1072 (almost) no new labels should be created. */
1073
1074 /* A map from basic block index to the leading label of that block. */
1075 static struct label_record
1076 {
1077 /* The label. */
1078 tree label;
1079
1080 /* True if the label is referenced from somewhere. */
1081 bool used;
1082 } *label_for_bb;
1083
1084 /* Given LABEL return the first label in the same basic block. */
1085
1086 static tree
1087 main_block_label (tree label)
1088 {
1089 basic_block bb = label_to_block (label);
1090 tree main_label = label_for_bb[bb->index].label;
1091
1092 /* label_to_block possibly inserted undefined label into the chain. */
1093 if (!main_label)
1094 {
1095 label_for_bb[bb->index].label = label;
1096 main_label = label;
1097 }
1098
1099 label_for_bb[bb->index].used = true;
1100 return main_label;
1101 }
1102
1103 /* Clean up redundant labels within the exception tree. */
1104
1105 static void
1106 cleanup_dead_labels_eh (void)
1107 {
1108 eh_landing_pad lp;
1109 eh_region r;
1110 tree lab;
1111 int i;
1112
1113 if (cfun->eh == NULL)
1114 return;
1115
1116 for (i = 1; VEC_iterate (eh_landing_pad, cfun->eh->lp_array, i, lp); ++i)
1117 if (lp && lp->post_landing_pad)
1118 {
1119 lab = main_block_label (lp->post_landing_pad);
1120 if (lab != lp->post_landing_pad)
1121 {
1122 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1123 EH_LANDING_PAD_NR (lab) = lp->index;
1124 }
1125 }
1126
1127 FOR_ALL_EH_REGION (r)
1128 switch (r->type)
1129 {
1130 case ERT_CLEANUP:
1131 case ERT_MUST_NOT_THROW:
1132 break;
1133
1134 case ERT_TRY:
1135 {
1136 eh_catch c;
1137 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1138 {
1139 lab = c->label;
1140 if (lab)
1141 c->label = main_block_label (lab);
1142 }
1143 }
1144 break;
1145
1146 case ERT_ALLOWED_EXCEPTIONS:
1147 lab = r->u.allowed.label;
1148 if (lab)
1149 r->u.allowed.label = main_block_label (lab);
1150 break;
1151 }
1152 }
1153
1154
1155 /* Cleanup redundant labels. This is a three-step process:
1156 1) Find the leading label for each block.
1157 2) Redirect all references to labels to the leading labels.
1158 3) Cleanup all useless labels. */
1159
1160 void
1161 cleanup_dead_labels (void)
1162 {
1163 basic_block bb;
1164 label_for_bb = XCNEWVEC (struct label_record, last_basic_block);
1165
1166 /* Find a suitable label for each block. We use the first user-defined
1167 label if there is one, or otherwise just the first label we see. */
1168 FOR_EACH_BB (bb)
1169 {
1170 gimple_stmt_iterator i;
1171
1172 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1173 {
1174 tree label;
1175 gimple stmt = gsi_stmt (i);
1176
1177 if (gimple_code (stmt) != GIMPLE_LABEL)
1178 break;
1179
1180 label = gimple_label_label (stmt);
1181
1182 /* If we have not yet seen a label for the current block,
1183 remember this one and see if there are more labels. */
1184 if (!label_for_bb[bb->index].label)
1185 {
1186 label_for_bb[bb->index].label = label;
1187 continue;
1188 }
1189
1190 /* If we did see a label for the current block already, but it
1191 is an artificially created label, replace it if the current
1192 label is a user defined label. */
1193 if (!DECL_ARTIFICIAL (label)
1194 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1195 {
1196 label_for_bb[bb->index].label = label;
1197 break;
1198 }
1199 }
1200 }
1201
1202 /* Now redirect all jumps/branches to the selected label.
1203 First do so for each block ending in a control statement. */
1204 FOR_EACH_BB (bb)
1205 {
1206 gimple stmt = last_stmt (bb);
1207 tree label, new_label;
1208
1209 if (!stmt)
1210 continue;
1211
1212 switch (gimple_code (stmt))
1213 {
1214 case GIMPLE_COND:
1215 label = gimple_cond_true_label (stmt);
1216 if (label)
1217 {
1218 new_label = main_block_label (label);
1219 if (new_label != label)
1220 gimple_cond_set_true_label (stmt, new_label);
1221 }
1222
1223 label = gimple_cond_false_label (stmt);
1224 if (label)
1225 {
1226 new_label = main_block_label (label);
1227 if (new_label != label)
1228 gimple_cond_set_false_label (stmt, new_label);
1229 }
1230 break;
1231
1232 case GIMPLE_SWITCH:
1233 {
1234 size_t i, n = gimple_switch_num_labels (stmt);
1235
1236 /* Replace all destination labels. */
1237 for (i = 0; i < n; ++i)
1238 {
1239 tree case_label = gimple_switch_label (stmt, i);
1240 label = CASE_LABEL (case_label);
1241 new_label = main_block_label (label);
1242 if (new_label != label)
1243 CASE_LABEL (case_label) = new_label;
1244 }
1245 break;
1246 }
1247
1248 case GIMPLE_ASM:
1249 {
1250 int i, n = gimple_asm_nlabels (stmt);
1251
1252 for (i = 0; i < n; ++i)
1253 {
1254 tree cons = gimple_asm_label_op (stmt, i);
1255 tree label = main_block_label (TREE_VALUE (cons));
1256 TREE_VALUE (cons) = label;
1257 }
1258 break;
1259 }
1260
1261 /* We have to handle gotos until they're removed, and we don't
1262 remove them until after we've created the CFG edges. */
1263 case GIMPLE_GOTO:
1264 if (!computed_goto_p (stmt))
1265 {
1266 label = gimple_goto_dest (stmt);
1267 new_label = main_block_label (label);
1268 if (new_label != label)
1269 gimple_goto_set_dest (stmt, new_label);
1270 }
1271 break;
1272
1273 case GIMPLE_TRANSACTION:
1274 {
1275 tree label = gimple_transaction_label (stmt);
1276 if (label)
1277 {
1278 tree new_label = main_block_label (label);
1279 if (new_label != label)
1280 gimple_transaction_set_label (stmt, new_label);
1281 }
1282 }
1283 break;
1284
1285 default:
1286 break;
1287 }
1288 }
1289
1290 /* Do the same for the exception region tree labels. */
1291 cleanup_dead_labels_eh ();
1292
1293 /* Finally, purge dead labels. All user-defined labels and labels that
1294 can be the target of non-local gotos and labels which have their
1295 address taken are preserved. */
1296 FOR_EACH_BB (bb)
1297 {
1298 gimple_stmt_iterator i;
1299 tree label_for_this_bb = label_for_bb[bb->index].label;
1300
1301 if (!label_for_this_bb)
1302 continue;
1303
1304 /* If the main label of the block is unused, we may still remove it. */
1305 if (!label_for_bb[bb->index].used)
1306 label_for_this_bb = NULL;
1307
1308 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1309 {
1310 tree label;
1311 gimple stmt = gsi_stmt (i);
1312
1313 if (gimple_code (stmt) != GIMPLE_LABEL)
1314 break;
1315
1316 label = gimple_label_label (stmt);
1317
1318 if (label == label_for_this_bb
1319 || !DECL_ARTIFICIAL (label)
1320 || DECL_NONLOCAL (label)
1321 || FORCED_LABEL (label))
1322 gsi_next (&i);
1323 else
1324 gsi_remove (&i, true);
1325 }
1326 }
1327
1328 free (label_for_bb);
1329 }
1330
1331 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1332 the ones jumping to the same label.
1333 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1334
1335 static void
1336 group_case_labels_stmt (gimple stmt)
1337 {
1338 int old_size = gimple_switch_num_labels (stmt);
1339 int i, j, new_size = old_size;
1340 tree default_case = NULL_TREE;
1341 tree default_label = NULL_TREE;
1342 bool has_default;
1343
1344 /* The default label is always the first case in a switch
1345 statement after gimplification if it was not optimized
1346 away */
1347 if (!CASE_LOW (gimple_switch_default_label (stmt))
1348 && !CASE_HIGH (gimple_switch_default_label (stmt)))
1349 {
1350 default_case = gimple_switch_default_label (stmt);
1351 default_label = CASE_LABEL (default_case);
1352 has_default = true;
1353 }
1354 else
1355 has_default = false;
1356
1357 /* Look for possible opportunities to merge cases. */
1358 if (has_default)
1359 i = 1;
1360 else
1361 i = 0;
1362 while (i < old_size)
1363 {
1364 tree base_case, base_label, base_high;
1365 base_case = gimple_switch_label (stmt, i);
1366
1367 gcc_assert (base_case);
1368 base_label = CASE_LABEL (base_case);
1369
1370 /* Discard cases that have the same destination as the
1371 default case. */
1372 if (base_label == default_label)
1373 {
1374 gimple_switch_set_label (stmt, i, NULL_TREE);
1375 i++;
1376 new_size--;
1377 continue;
1378 }
1379
1380 base_high = CASE_HIGH (base_case)
1381 ? CASE_HIGH (base_case)
1382 : CASE_LOW (base_case);
1383 i++;
1384
1385 /* Try to merge case labels. Break out when we reach the end
1386 of the label vector or when we cannot merge the next case
1387 label with the current one. */
1388 while (i < old_size)
1389 {
1390 tree merge_case = gimple_switch_label (stmt, i);
1391 tree merge_label = CASE_LABEL (merge_case);
1392 double_int bhp1 = double_int_add (tree_to_double_int (base_high),
1393 double_int_one);
1394
1395 /* Merge the cases if they jump to the same place,
1396 and their ranges are consecutive. */
1397 if (merge_label == base_label
1398 && double_int_equal_p (tree_to_double_int (CASE_LOW (merge_case)),
1399 bhp1))
1400 {
1401 base_high = CASE_HIGH (merge_case) ?
1402 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1403 CASE_HIGH (base_case) = base_high;
1404 gimple_switch_set_label (stmt, i, NULL_TREE);
1405 new_size--;
1406 i++;
1407 }
1408 else
1409 break;
1410 }
1411 }
1412
1413 /* Compress the case labels in the label vector, and adjust the
1414 length of the vector. */
1415 for (i = 0, j = 0; i < new_size; i++)
1416 {
1417 while (! gimple_switch_label (stmt, j))
1418 j++;
1419 gimple_switch_set_label (stmt, i,
1420 gimple_switch_label (stmt, j++));
1421 }
1422
1423 gcc_assert (new_size <= old_size);
1424 gimple_switch_set_num_labels (stmt, new_size);
1425 }
1426
1427 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1428 and scan the sorted vector of cases. Combine the ones jumping to the
1429 same label. */
1430
1431 void
1432 group_case_labels (void)
1433 {
1434 basic_block bb;
1435
1436 FOR_EACH_BB (bb)
1437 {
1438 gimple stmt = last_stmt (bb);
1439 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1440 group_case_labels_stmt (stmt);
1441 }
1442 }
1443
1444 /* Checks whether we can merge block B into block A. */
1445
1446 static bool
1447 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1448 {
1449 gimple stmt;
1450 gimple_stmt_iterator gsi;
1451 gimple_seq phis;
1452
1453 if (!single_succ_p (a))
1454 return false;
1455
1456 if (single_succ_edge (a)->flags & (EDGE_ABNORMAL | EDGE_EH | EDGE_PRESERVE))
1457 return false;
1458
1459 if (single_succ (a) != b)
1460 return false;
1461
1462 if (!single_pred_p (b))
1463 return false;
1464
1465 if (b == EXIT_BLOCK_PTR)
1466 return false;
1467
1468 /* If A ends by a statement causing exceptions or something similar, we
1469 cannot merge the blocks. */
1470 stmt = last_stmt (a);
1471 if (stmt && stmt_ends_bb_p (stmt))
1472 return false;
1473
1474 /* Do not allow a block with only a non-local label to be merged. */
1475 if (stmt
1476 && gimple_code (stmt) == GIMPLE_LABEL
1477 && DECL_NONLOCAL (gimple_label_label (stmt)))
1478 return false;
1479
1480 /* Examine the labels at the beginning of B. */
1481 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi); gsi_next (&gsi))
1482 {
1483 tree lab;
1484 stmt = gsi_stmt (gsi);
1485 if (gimple_code (stmt) != GIMPLE_LABEL)
1486 break;
1487 lab = gimple_label_label (stmt);
1488
1489 /* Do not remove user forced labels or for -O0 any user labels. */
1490 if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
1491 return false;
1492 }
1493
1494 /* Protect the loop latches. */
1495 if (current_loops && b->loop_father->latch == b)
1496 return false;
1497
1498 /* It must be possible to eliminate all phi nodes in B. If ssa form
1499 is not up-to-date and a name-mapping is registered, we cannot eliminate
1500 any phis. Symbols marked for renaming are never a problem though. */
1501 phis = phi_nodes (b);
1502 if (!gimple_seq_empty_p (phis)
1503 && name_mappings_registered_p ())
1504 return false;
1505
1506 /* When not optimizing, don't merge if we'd lose goto_locus. */
1507 if (!optimize
1508 && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
1509 {
1510 location_t goto_locus = single_succ_edge (a)->goto_locus;
1511 gimple_stmt_iterator prev, next;
1512 prev = gsi_last_nondebug_bb (a);
1513 next = gsi_after_labels (b);
1514 if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
1515 gsi_next_nondebug (&next);
1516 if ((gsi_end_p (prev)
1517 || gimple_location (gsi_stmt (prev)) != goto_locus)
1518 && (gsi_end_p (next)
1519 || gimple_location (gsi_stmt (next)) != goto_locus))
1520 return false;
1521 }
1522
1523 return true;
1524 }
1525
1526 /* Return true if the var whose chain of uses starts at PTR has no
1527 nondebug uses. */
1528 bool
1529 has_zero_uses_1 (const ssa_use_operand_t *head)
1530 {
1531 const ssa_use_operand_t *ptr;
1532
1533 for (ptr = head->next; ptr != head; ptr = ptr->next)
1534 if (!is_gimple_debug (USE_STMT (ptr)))
1535 return false;
1536
1537 return true;
1538 }
1539
1540 /* Return true if the var whose chain of uses starts at PTR has a
1541 single nondebug use. Set USE_P and STMT to that single nondebug
1542 use, if so, or to NULL otherwise. */
1543 bool
1544 single_imm_use_1 (const ssa_use_operand_t *head,
1545 use_operand_p *use_p, gimple *stmt)
1546 {
1547 ssa_use_operand_t *ptr, *single_use = 0;
1548
1549 for (ptr = head->next; ptr != head; ptr = ptr->next)
1550 if (!is_gimple_debug (USE_STMT (ptr)))
1551 {
1552 if (single_use)
1553 {
1554 single_use = NULL;
1555 break;
1556 }
1557 single_use = ptr;
1558 }
1559
1560 if (use_p)
1561 *use_p = single_use;
1562
1563 if (stmt)
1564 *stmt = single_use ? single_use->loc.stmt : NULL;
1565
1566 return !!single_use;
1567 }
1568
1569 /* Replaces all uses of NAME by VAL. */
1570
1571 void
1572 replace_uses_by (tree name, tree val)
1573 {
1574 imm_use_iterator imm_iter;
1575 use_operand_p use;
1576 gimple stmt;
1577 edge e;
1578
1579 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1580 {
1581 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1582 {
1583 replace_exp (use, val);
1584
1585 if (gimple_code (stmt) == GIMPLE_PHI)
1586 {
1587 e = gimple_phi_arg_edge (stmt, PHI_ARG_INDEX_FROM_USE (use));
1588 if (e->flags & EDGE_ABNORMAL)
1589 {
1590 /* This can only occur for virtual operands, since
1591 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1592 would prevent replacement. */
1593 gcc_checking_assert (!is_gimple_reg (name));
1594 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1595 }
1596 }
1597 }
1598
1599 if (gimple_code (stmt) != GIMPLE_PHI)
1600 {
1601 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1602 gimple orig_stmt = stmt;
1603 size_t i;
1604
1605 /* Mark the block if we changed the last stmt in it. */
1606 if (cfgcleanup_altered_bbs
1607 && stmt_ends_bb_p (stmt))
1608 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1609
1610 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
1611 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
1612 only change sth from non-invariant to invariant, and only
1613 when propagating constants. */
1614 if (is_gimple_min_invariant (val))
1615 for (i = 0; i < gimple_num_ops (stmt); i++)
1616 {
1617 tree op = gimple_op (stmt, i);
1618 /* Operands may be empty here. For example, the labels
1619 of a GIMPLE_COND are nulled out following the creation
1620 of the corresponding CFG edges. */
1621 if (op && TREE_CODE (op) == ADDR_EXPR)
1622 recompute_tree_invariant_for_addr_expr (op);
1623 }
1624
1625 if (fold_stmt (&gsi))
1626 stmt = gsi_stmt (gsi);
1627
1628 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
1629 gimple_purge_dead_eh_edges (gimple_bb (stmt));
1630
1631 update_stmt (stmt);
1632 }
1633 }
1634
1635 gcc_checking_assert (has_zero_uses (name));
1636
1637 /* Also update the trees stored in loop structures. */
1638 if (current_loops)
1639 {
1640 struct loop *loop;
1641 loop_iterator li;
1642
1643 FOR_EACH_LOOP (li, loop, 0)
1644 {
1645 substitute_in_loop_info (loop, name, val);
1646 }
1647 }
1648 }
1649
1650 /* Merge block B into block A. */
1651
1652 static void
1653 gimple_merge_blocks (basic_block a, basic_block b)
1654 {
1655 gimple_stmt_iterator last, gsi, psi;
1656
1657 if (dump_file)
1658 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1659
1660 /* Remove all single-valued PHI nodes from block B of the form
1661 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1662 gsi = gsi_last_bb (a);
1663 for (psi = gsi_start_phis (b); !gsi_end_p (psi); )
1664 {
1665 gimple phi = gsi_stmt (psi);
1666 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
1667 gimple copy;
1668 bool may_replace_uses = !is_gimple_reg (def)
1669 || may_propagate_copy (def, use);
1670
1671 /* In case we maintain loop closed ssa form, do not propagate arguments
1672 of loop exit phi nodes. */
1673 if (current_loops
1674 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
1675 && is_gimple_reg (def)
1676 && TREE_CODE (use) == SSA_NAME
1677 && a->loop_father != b->loop_father)
1678 may_replace_uses = false;
1679
1680 if (!may_replace_uses)
1681 {
1682 gcc_assert (is_gimple_reg (def));
1683
1684 /* Note that just emitting the copies is fine -- there is no problem
1685 with ordering of phi nodes. This is because A is the single
1686 predecessor of B, therefore results of the phi nodes cannot
1687 appear as arguments of the phi nodes. */
1688 copy = gimple_build_assign (def, use);
1689 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
1690 remove_phi_node (&psi, false);
1691 }
1692 else
1693 {
1694 /* If we deal with a PHI for virtual operands, we can simply
1695 propagate these without fussing with folding or updating
1696 the stmt. */
1697 if (!is_gimple_reg (def))
1698 {
1699 imm_use_iterator iter;
1700 use_operand_p use_p;
1701 gimple stmt;
1702
1703 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
1704 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1705 SET_USE (use_p, use);
1706
1707 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
1708 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
1709 }
1710 else
1711 replace_uses_by (def, use);
1712
1713 remove_phi_node (&psi, true);
1714 }
1715 }
1716
1717 /* Ensure that B follows A. */
1718 move_block_after (b, a);
1719
1720 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1721 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1722
1723 /* Remove labels from B and set gimple_bb to A for other statements. */
1724 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
1725 {
1726 gimple stmt = gsi_stmt (gsi);
1727 if (gimple_code (stmt) == GIMPLE_LABEL)
1728 {
1729 tree label = gimple_label_label (stmt);
1730 int lp_nr;
1731
1732 gsi_remove (&gsi, false);
1733
1734 /* Now that we can thread computed gotos, we might have
1735 a situation where we have a forced label in block B
1736 However, the label at the start of block B might still be
1737 used in other ways (think about the runtime checking for
1738 Fortran assigned gotos). So we can not just delete the
1739 label. Instead we move the label to the start of block A. */
1740 if (FORCED_LABEL (label))
1741 {
1742 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
1743 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
1744 }
1745 /* Other user labels keep around in a form of a debug stmt. */
1746 else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_STMTS)
1747 {
1748 gimple dbg = gimple_build_debug_bind (label,
1749 integer_zero_node,
1750 stmt);
1751 gimple_debug_bind_reset_value (dbg);
1752 gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
1753 }
1754
1755 lp_nr = EH_LANDING_PAD_NR (label);
1756 if (lp_nr)
1757 {
1758 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
1759 lp->post_landing_pad = NULL;
1760 }
1761 }
1762 else
1763 {
1764 gimple_set_bb (stmt, a);
1765 gsi_next (&gsi);
1766 }
1767 }
1768
1769 /* Merge the sequences. */
1770 last = gsi_last_bb (a);
1771 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
1772 set_bb_seq (b, NULL);
1773
1774 if (cfgcleanup_altered_bbs)
1775 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
1776 }
1777
1778
1779 /* Return the one of two successors of BB that is not reachable by a
1780 complex edge, if there is one. Else, return BB. We use
1781 this in optimizations that use post-dominators for their heuristics,
1782 to catch the cases in C++ where function calls are involved. */
1783
1784 basic_block
1785 single_noncomplex_succ (basic_block bb)
1786 {
1787 edge e0, e1;
1788 if (EDGE_COUNT (bb->succs) != 2)
1789 return bb;
1790
1791 e0 = EDGE_SUCC (bb, 0);
1792 e1 = EDGE_SUCC (bb, 1);
1793 if (e0->flags & EDGE_COMPLEX)
1794 return e1->dest;
1795 if (e1->flags & EDGE_COMPLEX)
1796 return e0->dest;
1797
1798 return bb;
1799 }
1800
1801 /* T is CALL_EXPR. Set current_function_calls_* flags. */
1802
1803 void
1804 notice_special_calls (gimple call)
1805 {
1806 int flags = gimple_call_flags (call);
1807
1808 if (flags & ECF_MAY_BE_ALLOCA)
1809 cfun->calls_alloca = true;
1810 if (flags & ECF_RETURNS_TWICE)
1811 cfun->calls_setjmp = true;
1812 }
1813
1814
1815 /* Clear flags set by notice_special_calls. Used by dead code removal
1816 to update the flags. */
1817
1818 void
1819 clear_special_calls (void)
1820 {
1821 cfun->calls_alloca = false;
1822 cfun->calls_setjmp = false;
1823 }
1824
1825 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
1826
1827 static void
1828 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
1829 {
1830 /* Since this block is no longer reachable, we can just delete all
1831 of its PHI nodes. */
1832 remove_phi_nodes (bb);
1833
1834 /* Remove edges to BB's successors. */
1835 while (EDGE_COUNT (bb->succs) > 0)
1836 remove_edge (EDGE_SUCC (bb, 0));
1837 }
1838
1839
1840 /* Remove statements of basic block BB. */
1841
1842 static void
1843 remove_bb (basic_block bb)
1844 {
1845 gimple_stmt_iterator i;
1846
1847 if (dump_file)
1848 {
1849 fprintf (dump_file, "Removing basic block %d\n", bb->index);
1850 if (dump_flags & TDF_DETAILS)
1851 {
1852 dump_bb (bb, dump_file, 0);
1853 fprintf (dump_file, "\n");
1854 }
1855 }
1856
1857 if (current_loops)
1858 {
1859 struct loop *loop = bb->loop_father;
1860
1861 /* If a loop gets removed, clean up the information associated
1862 with it. */
1863 if (loop->latch == bb
1864 || loop->header == bb)
1865 free_numbers_of_iterations_estimates_loop (loop);
1866 }
1867
1868 /* Remove all the instructions in the block. */
1869 if (bb_seq (bb) != NULL)
1870 {
1871 /* Walk backwards so as to get a chance to substitute all
1872 released DEFs into debug stmts. See
1873 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
1874 details. */
1875 for (i = gsi_last_bb (bb); !gsi_end_p (i);)
1876 {
1877 gimple stmt = gsi_stmt (i);
1878 if (gimple_code (stmt) == GIMPLE_LABEL
1879 && (FORCED_LABEL (gimple_label_label (stmt))
1880 || DECL_NONLOCAL (gimple_label_label (stmt))))
1881 {
1882 basic_block new_bb;
1883 gimple_stmt_iterator new_gsi;
1884
1885 /* A non-reachable non-local label may still be referenced.
1886 But it no longer needs to carry the extra semantics of
1887 non-locality. */
1888 if (DECL_NONLOCAL (gimple_label_label (stmt)))
1889 {
1890 DECL_NONLOCAL (gimple_label_label (stmt)) = 0;
1891 FORCED_LABEL (gimple_label_label (stmt)) = 1;
1892 }
1893
1894 new_bb = bb->prev_bb;
1895 new_gsi = gsi_start_bb (new_bb);
1896 gsi_remove (&i, false);
1897 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
1898 }
1899 else
1900 {
1901 /* Release SSA definitions if we are in SSA. Note that we
1902 may be called when not in SSA. For example,
1903 final_cleanup calls this function via
1904 cleanup_tree_cfg. */
1905 if (gimple_in_ssa_p (cfun))
1906 release_defs (stmt);
1907
1908 gsi_remove (&i, true);
1909 }
1910
1911 if (gsi_end_p (i))
1912 i = gsi_last_bb (bb);
1913 else
1914 gsi_prev (&i);
1915 }
1916 }
1917
1918 remove_phi_nodes_and_edges_for_unreachable_block (bb);
1919 bb->il.gimple.seq = NULL;
1920 bb->il.gimple.phi_nodes = NULL;
1921 }
1922
1923
1924 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
1925 predicate VAL, return the edge that will be taken out of the block.
1926 If VAL does not match a unique edge, NULL is returned. */
1927
1928 edge
1929 find_taken_edge (basic_block bb, tree val)
1930 {
1931 gimple stmt;
1932
1933 stmt = last_stmt (bb);
1934
1935 gcc_assert (stmt);
1936 gcc_assert (is_ctrl_stmt (stmt));
1937
1938 if (val == NULL)
1939 return NULL;
1940
1941 if (!is_gimple_min_invariant (val))
1942 return NULL;
1943
1944 if (gimple_code (stmt) == GIMPLE_COND)
1945 return find_taken_edge_cond_expr (bb, val);
1946
1947 if (gimple_code (stmt) == GIMPLE_SWITCH)
1948 return find_taken_edge_switch_expr (bb, val);
1949
1950 if (computed_goto_p (stmt))
1951 {
1952 /* Only optimize if the argument is a label, if the argument is
1953 not a label then we can not construct a proper CFG.
1954
1955 It may be the case that we only need to allow the LABEL_REF to
1956 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
1957 appear inside a LABEL_EXPR just to be safe. */
1958 if ((TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
1959 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
1960 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
1961 return NULL;
1962 }
1963
1964 gcc_unreachable ();
1965 }
1966
1967 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
1968 statement, determine which of the outgoing edges will be taken out of the
1969 block. Return NULL if either edge may be taken. */
1970
1971 static edge
1972 find_taken_edge_computed_goto (basic_block bb, tree val)
1973 {
1974 basic_block dest;
1975 edge e = NULL;
1976
1977 dest = label_to_block (val);
1978 if (dest)
1979 {
1980 e = find_edge (bb, dest);
1981 gcc_assert (e != NULL);
1982 }
1983
1984 return e;
1985 }
1986
1987 /* Given a constant value VAL and the entry block BB to a COND_EXPR
1988 statement, determine which of the two edges will be taken out of the
1989 block. Return NULL if either edge may be taken. */
1990
1991 static edge
1992 find_taken_edge_cond_expr (basic_block bb, tree val)
1993 {
1994 edge true_edge, false_edge;
1995
1996 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1997
1998 gcc_assert (TREE_CODE (val) == INTEGER_CST);
1999 return (integer_zerop (val) ? false_edge : true_edge);
2000 }
2001
2002 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2003 statement, determine which edge will be taken out of the block. Return
2004 NULL if any edge may be taken. */
2005
2006 static edge
2007 find_taken_edge_switch_expr (basic_block bb, tree val)
2008 {
2009 basic_block dest_bb;
2010 edge e;
2011 gimple switch_stmt;
2012 tree taken_case;
2013
2014 switch_stmt = last_stmt (bb);
2015 taken_case = find_case_label_for_value (switch_stmt, val);
2016 dest_bb = label_to_block (CASE_LABEL (taken_case));
2017
2018 e = find_edge (bb, dest_bb);
2019 gcc_assert (e);
2020 return e;
2021 }
2022
2023
2024 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2025 We can make optimal use here of the fact that the case labels are
2026 sorted: We can do a binary search for a case matching VAL. */
2027
2028 static tree
2029 find_case_label_for_value (gimple switch_stmt, tree val)
2030 {
2031 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2032 tree default_case = gimple_switch_default_label (switch_stmt);
2033
2034 for (low = 0, high = n; high - low > 1; )
2035 {
2036 size_t i = (high + low) / 2;
2037 tree t = gimple_switch_label (switch_stmt, i);
2038 int cmp;
2039
2040 /* Cache the result of comparing CASE_LOW and val. */
2041 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2042
2043 if (cmp > 0)
2044 high = i;
2045 else
2046 low = i;
2047
2048 if (CASE_HIGH (t) == NULL)
2049 {
2050 /* A singe-valued case label. */
2051 if (cmp == 0)
2052 return t;
2053 }
2054 else
2055 {
2056 /* A case range. We can only handle integer ranges. */
2057 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2058 return t;
2059 }
2060 }
2061
2062 return default_case;
2063 }
2064
2065
2066 /* Dump a basic block on stderr. */
2067
2068 void
2069 gimple_debug_bb (basic_block bb)
2070 {
2071 gimple_dump_bb (bb, stderr, 0, TDF_VOPS|TDF_MEMSYMS);
2072 }
2073
2074
2075 /* Dump basic block with index N on stderr. */
2076
2077 basic_block
2078 gimple_debug_bb_n (int n)
2079 {
2080 gimple_debug_bb (BASIC_BLOCK (n));
2081 return BASIC_BLOCK (n);
2082 }
2083
2084
2085 /* Dump the CFG on stderr.
2086
2087 FLAGS are the same used by the tree dumping functions
2088 (see TDF_* in tree-pass.h). */
2089
2090 void
2091 gimple_debug_cfg (int flags)
2092 {
2093 gimple_dump_cfg (stderr, flags);
2094 }
2095
2096
2097 /* Dump the program showing basic block boundaries on the given FILE.
2098
2099 FLAGS are the same used by the tree dumping functions (see TDF_* in
2100 tree.h). */
2101
2102 void
2103 gimple_dump_cfg (FILE *file, int flags)
2104 {
2105 if (flags & TDF_DETAILS)
2106 {
2107 dump_function_header (file, current_function_decl, flags);
2108 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2109 n_basic_blocks, n_edges, last_basic_block);
2110
2111 brief_dump_cfg (file);
2112 fprintf (file, "\n");
2113 }
2114
2115 if (flags & TDF_STATS)
2116 dump_cfg_stats (file);
2117
2118 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2119 }
2120
2121
2122 /* Dump CFG statistics on FILE. */
2123
2124 void
2125 dump_cfg_stats (FILE *file)
2126 {
2127 static long max_num_merged_labels = 0;
2128 unsigned long size, total = 0;
2129 long num_edges;
2130 basic_block bb;
2131 const char * const fmt_str = "%-30s%-13s%12s\n";
2132 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2133 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2134 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2135 const char *funcname
2136 = lang_hooks.decl_printable_name (current_function_decl, 2);
2137
2138
2139 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2140
2141 fprintf (file, "---------------------------------------------------------\n");
2142 fprintf (file, fmt_str, "", " Number of ", "Memory");
2143 fprintf (file, fmt_str, "", " instances ", "used ");
2144 fprintf (file, "---------------------------------------------------------\n");
2145
2146 size = n_basic_blocks * sizeof (struct basic_block_def);
2147 total += size;
2148 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks,
2149 SCALE (size), LABEL (size));
2150
2151 num_edges = 0;
2152 FOR_EACH_BB (bb)
2153 num_edges += EDGE_COUNT (bb->succs);
2154 size = num_edges * sizeof (struct edge_def);
2155 total += size;
2156 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2157
2158 fprintf (file, "---------------------------------------------------------\n");
2159 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2160 LABEL (total));
2161 fprintf (file, "---------------------------------------------------------\n");
2162 fprintf (file, "\n");
2163
2164 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2165 max_num_merged_labels = cfg_stats.num_merged_labels;
2166
2167 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2168 cfg_stats.num_merged_labels, max_num_merged_labels);
2169
2170 fprintf (file, "\n");
2171 }
2172
2173
2174 /* Dump CFG statistics on stderr. Keep extern so that it's always
2175 linked in the final executable. */
2176
2177 DEBUG_FUNCTION void
2178 debug_cfg_stats (void)
2179 {
2180 dump_cfg_stats (stderr);
2181 }
2182
2183
2184 /* Dump the flowgraph to a .vcg FILE. */
2185
2186 static void
2187 gimple_cfg2vcg (FILE *file)
2188 {
2189 edge e;
2190 edge_iterator ei;
2191 basic_block bb;
2192 const char *funcname
2193 = lang_hooks.decl_printable_name (current_function_decl, 2);
2194
2195 /* Write the file header. */
2196 fprintf (file, "graph: { title: \"%s\"\n", funcname);
2197 fprintf (file, "node: { title: \"ENTRY\" label: \"ENTRY\" }\n");
2198 fprintf (file, "node: { title: \"EXIT\" label: \"EXIT\" }\n");
2199
2200 /* Write blocks and edges. */
2201 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
2202 {
2203 fprintf (file, "edge: { sourcename: \"ENTRY\" targetname: \"%d\"",
2204 e->dest->index);
2205
2206 if (e->flags & EDGE_FAKE)
2207 fprintf (file, " linestyle: dotted priority: 10");
2208 else
2209 fprintf (file, " linestyle: solid priority: 100");
2210
2211 fprintf (file, " }\n");
2212 }
2213 fputc ('\n', file);
2214
2215 FOR_EACH_BB (bb)
2216 {
2217 enum gimple_code head_code, end_code;
2218 const char *head_name, *end_name;
2219 int head_line = 0;
2220 int end_line = 0;
2221 gimple first = first_stmt (bb);
2222 gimple last = last_stmt (bb);
2223
2224 if (first)
2225 {
2226 head_code = gimple_code (first);
2227 head_name = gimple_code_name[head_code];
2228 head_line = get_lineno (first);
2229 }
2230 else
2231 head_name = "no-statement";
2232
2233 if (last)
2234 {
2235 end_code = gimple_code (last);
2236 end_name = gimple_code_name[end_code];
2237 end_line = get_lineno (last);
2238 }
2239 else
2240 end_name = "no-statement";
2241
2242 fprintf (file, "node: { title: \"%d\" label: \"#%d\\n%s (%d)\\n%s (%d)\"}\n",
2243 bb->index, bb->index, head_name, head_line, end_name,
2244 end_line);
2245
2246 FOR_EACH_EDGE (e, ei, bb->succs)
2247 {
2248 if (e->dest == EXIT_BLOCK_PTR)
2249 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"EXIT\"", bb->index);
2250 else
2251 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"%d\"", bb->index, e->dest->index);
2252
2253 if (e->flags & EDGE_FAKE)
2254 fprintf (file, " priority: 10 linestyle: dotted");
2255 else
2256 fprintf (file, " priority: 100 linestyle: solid");
2257
2258 fprintf (file, " }\n");
2259 }
2260
2261 if (bb->next_bb != EXIT_BLOCK_PTR)
2262 fputc ('\n', file);
2263 }
2264
2265 fputs ("}\n\n", file);
2266 }
2267
2268
2269
2270 /*---------------------------------------------------------------------------
2271 Miscellaneous helpers
2272 ---------------------------------------------------------------------------*/
2273
2274 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2275 flow. Transfers of control flow associated with EH are excluded. */
2276
2277 static bool
2278 call_can_make_abnormal_goto (gimple t)
2279 {
2280 /* If the function has no non-local labels, then a call cannot make an
2281 abnormal transfer of control. */
2282 if (!cfun->has_nonlocal_label)
2283 return false;
2284
2285 /* Likewise if the call has no side effects. */
2286 if (!gimple_has_side_effects (t))
2287 return false;
2288
2289 /* Likewise if the called function is leaf. */
2290 if (gimple_call_flags (t) & ECF_LEAF)
2291 return false;
2292
2293 return true;
2294 }
2295
2296
2297 /* Return true if T can make an abnormal transfer of control flow.
2298 Transfers of control flow associated with EH are excluded. */
2299
2300 bool
2301 stmt_can_make_abnormal_goto (gimple t)
2302 {
2303 if (computed_goto_p (t))
2304 return true;
2305 if (is_gimple_call (t))
2306 return call_can_make_abnormal_goto (t);
2307 return false;
2308 }
2309
2310
2311 /* Return true if T represents a stmt that always transfers control. */
2312
2313 bool
2314 is_ctrl_stmt (gimple t)
2315 {
2316 switch (gimple_code (t))
2317 {
2318 case GIMPLE_COND:
2319 case GIMPLE_SWITCH:
2320 case GIMPLE_GOTO:
2321 case GIMPLE_RETURN:
2322 case GIMPLE_RESX:
2323 return true;
2324 default:
2325 return false;
2326 }
2327 }
2328
2329
2330 /* Return true if T is a statement that may alter the flow of control
2331 (e.g., a call to a non-returning function). */
2332
2333 bool
2334 is_ctrl_altering_stmt (gimple t)
2335 {
2336 gcc_assert (t);
2337
2338 switch (gimple_code (t))
2339 {
2340 case GIMPLE_CALL:
2341 {
2342 int flags = gimple_call_flags (t);
2343
2344 /* A call alters control flow if it can make an abnormal goto. */
2345 if (call_can_make_abnormal_goto (t))
2346 return true;
2347
2348 /* A call also alters control flow if it does not return. */
2349 if (flags & ECF_NORETURN)
2350 return true;
2351
2352 /* TM ending statements have backedges out of the transaction.
2353 Return true so we split the basic block containing them.
2354 Note that the TM_BUILTIN test is merely an optimization. */
2355 if ((flags & ECF_TM_BUILTIN)
2356 && is_tm_ending_fndecl (gimple_call_fndecl (t)))
2357 return true;
2358
2359 /* BUILT_IN_RETURN call is same as return statement. */
2360 if (gimple_call_builtin_p (t, BUILT_IN_RETURN))
2361 return true;
2362 }
2363 break;
2364
2365 case GIMPLE_EH_DISPATCH:
2366 /* EH_DISPATCH branches to the individual catch handlers at
2367 this level of a try or allowed-exceptions region. It can
2368 fallthru to the next statement as well. */
2369 return true;
2370
2371 case GIMPLE_ASM:
2372 if (gimple_asm_nlabels (t) > 0)
2373 return true;
2374 break;
2375
2376 CASE_GIMPLE_OMP:
2377 /* OpenMP directives alter control flow. */
2378 return true;
2379
2380 case GIMPLE_TRANSACTION:
2381 /* A transaction start alters control flow. */
2382 return true;
2383
2384 default:
2385 break;
2386 }
2387
2388 /* If a statement can throw, it alters control flow. */
2389 return stmt_can_throw_internal (t);
2390 }
2391
2392
2393 /* Return true if T is a simple local goto. */
2394
2395 bool
2396 simple_goto_p (gimple t)
2397 {
2398 return (gimple_code (t) == GIMPLE_GOTO
2399 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2400 }
2401
2402
2403 /* Return true if STMT should start a new basic block. PREV_STMT is
2404 the statement preceding STMT. It is used when STMT is a label or a
2405 case label. Labels should only start a new basic block if their
2406 previous statement wasn't a label. Otherwise, sequence of labels
2407 would generate unnecessary basic blocks that only contain a single
2408 label. */
2409
2410 static inline bool
2411 stmt_starts_bb_p (gimple stmt, gimple prev_stmt)
2412 {
2413 if (stmt == NULL)
2414 return false;
2415
2416 /* Labels start a new basic block only if the preceding statement
2417 wasn't a label of the same type. This prevents the creation of
2418 consecutive blocks that have nothing but a single label. */
2419 if (gimple_code (stmt) == GIMPLE_LABEL)
2420 {
2421 /* Nonlocal and computed GOTO targets always start a new block. */
2422 if (DECL_NONLOCAL (gimple_label_label (stmt))
2423 || FORCED_LABEL (gimple_label_label (stmt)))
2424 return true;
2425
2426 if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
2427 {
2428 if (DECL_NONLOCAL (gimple_label_label (prev_stmt)))
2429 return true;
2430
2431 cfg_stats.num_merged_labels++;
2432 return false;
2433 }
2434 else
2435 return true;
2436 }
2437
2438 return false;
2439 }
2440
2441
2442 /* Return true if T should end a basic block. */
2443
2444 bool
2445 stmt_ends_bb_p (gimple t)
2446 {
2447 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2448 }
2449
2450 /* Remove block annotations and other data structures. */
2451
2452 void
2453 delete_tree_cfg_annotations (void)
2454 {
2455 label_to_block_map = NULL;
2456 }
2457
2458
2459 /* Return the first statement in basic block BB. */
2460
2461 gimple
2462 first_stmt (basic_block bb)
2463 {
2464 gimple_stmt_iterator i = gsi_start_bb (bb);
2465 gimple stmt = NULL;
2466
2467 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2468 {
2469 gsi_next (&i);
2470 stmt = NULL;
2471 }
2472 return stmt;
2473 }
2474
2475 /* Return the first non-label statement in basic block BB. */
2476
2477 static gimple
2478 first_non_label_stmt (basic_block bb)
2479 {
2480 gimple_stmt_iterator i = gsi_start_bb (bb);
2481 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2482 gsi_next (&i);
2483 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2484 }
2485
2486 /* Return the last statement in basic block BB. */
2487
2488 gimple
2489 last_stmt (basic_block bb)
2490 {
2491 gimple_stmt_iterator i = gsi_last_bb (bb);
2492 gimple stmt = NULL;
2493
2494 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2495 {
2496 gsi_prev (&i);
2497 stmt = NULL;
2498 }
2499 return stmt;
2500 }
2501
2502 /* Return the last statement of an otherwise empty block. Return NULL
2503 if the block is totally empty, or if it contains more than one
2504 statement. */
2505
2506 gimple
2507 last_and_only_stmt (basic_block bb)
2508 {
2509 gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2510 gimple last, prev;
2511
2512 if (gsi_end_p (i))
2513 return NULL;
2514
2515 last = gsi_stmt (i);
2516 gsi_prev_nondebug (&i);
2517 if (gsi_end_p (i))
2518 return last;
2519
2520 /* Empty statements should no longer appear in the instruction stream.
2521 Everything that might have appeared before should be deleted by
2522 remove_useless_stmts, and the optimizers should just gsi_remove
2523 instead of smashing with build_empty_stmt.
2524
2525 Thus the only thing that should appear here in a block containing
2526 one executable statement is a label. */
2527 prev = gsi_stmt (i);
2528 if (gimple_code (prev) == GIMPLE_LABEL)
2529 return last;
2530 else
2531 return NULL;
2532 }
2533
2534 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2535
2536 static void
2537 reinstall_phi_args (edge new_edge, edge old_edge)
2538 {
2539 edge_var_map_vector v;
2540 edge_var_map *vm;
2541 int i;
2542 gimple_stmt_iterator phis;
2543
2544 v = redirect_edge_var_map_vector (old_edge);
2545 if (!v)
2546 return;
2547
2548 for (i = 0, phis = gsi_start_phis (new_edge->dest);
2549 VEC_iterate (edge_var_map, v, i, vm) && !gsi_end_p (phis);
2550 i++, gsi_next (&phis))
2551 {
2552 gimple phi = gsi_stmt (phis);
2553 tree result = redirect_edge_var_map_result (vm);
2554 tree arg = redirect_edge_var_map_def (vm);
2555
2556 gcc_assert (result == gimple_phi_result (phi));
2557
2558 add_phi_arg (phi, arg, new_edge, redirect_edge_var_map_location (vm));
2559 }
2560
2561 redirect_edge_var_map_clear (old_edge);
2562 }
2563
2564 /* Returns the basic block after which the new basic block created
2565 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2566 near its "logical" location. This is of most help to humans looking
2567 at debugging dumps. */
2568
2569 static basic_block
2570 split_edge_bb_loc (edge edge_in)
2571 {
2572 basic_block dest = edge_in->dest;
2573 basic_block dest_prev = dest->prev_bb;
2574
2575 if (dest_prev)
2576 {
2577 edge e = find_edge (dest_prev, dest);
2578 if (e && !(e->flags & EDGE_COMPLEX))
2579 return edge_in->src;
2580 }
2581 return dest_prev;
2582 }
2583
2584 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2585 Abort on abnormal edges. */
2586
2587 static basic_block
2588 gimple_split_edge (edge edge_in)
2589 {
2590 basic_block new_bb, after_bb, dest;
2591 edge new_edge, e;
2592
2593 /* Abnormal edges cannot be split. */
2594 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2595
2596 dest = edge_in->dest;
2597
2598 after_bb = split_edge_bb_loc (edge_in);
2599
2600 new_bb = create_empty_bb (after_bb);
2601 new_bb->frequency = EDGE_FREQUENCY (edge_in);
2602 new_bb->count = edge_in->count;
2603 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
2604 new_edge->probability = REG_BR_PROB_BASE;
2605 new_edge->count = edge_in->count;
2606
2607 e = redirect_edge_and_branch (edge_in, new_bb);
2608 gcc_assert (e == edge_in);
2609 reinstall_phi_args (new_edge, e);
2610
2611 return new_bb;
2612 }
2613
2614
2615 /* Verify properties of the address expression T with base object BASE. */
2616
2617 static tree
2618 verify_address (tree t, tree base)
2619 {
2620 bool old_constant;
2621 bool old_side_effects;
2622 bool new_constant;
2623 bool new_side_effects;
2624
2625 old_constant = TREE_CONSTANT (t);
2626 old_side_effects = TREE_SIDE_EFFECTS (t);
2627
2628 recompute_tree_invariant_for_addr_expr (t);
2629 new_side_effects = TREE_SIDE_EFFECTS (t);
2630 new_constant = TREE_CONSTANT (t);
2631
2632 if (old_constant != new_constant)
2633 {
2634 error ("constant not recomputed when ADDR_EXPR changed");
2635 return t;
2636 }
2637 if (old_side_effects != new_side_effects)
2638 {
2639 error ("side effects not recomputed when ADDR_EXPR changed");
2640 return t;
2641 }
2642
2643 if (!(TREE_CODE (base) == VAR_DECL
2644 || TREE_CODE (base) == PARM_DECL
2645 || TREE_CODE (base) == RESULT_DECL))
2646 return NULL_TREE;
2647
2648 if (DECL_GIMPLE_REG_P (base))
2649 {
2650 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2651 return base;
2652 }
2653
2654 return NULL_TREE;
2655 }
2656
2657 /* Callback for walk_tree, check that all elements with address taken are
2658 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2659 inside a PHI node. */
2660
2661 static tree
2662 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2663 {
2664 tree t = *tp, x;
2665
2666 if (TYPE_P (t))
2667 *walk_subtrees = 0;
2668
2669 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2670 #define CHECK_OP(N, MSG) \
2671 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2672 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2673
2674 switch (TREE_CODE (t))
2675 {
2676 case SSA_NAME:
2677 if (SSA_NAME_IN_FREE_LIST (t))
2678 {
2679 error ("SSA name in freelist but still referenced");
2680 return *tp;
2681 }
2682 break;
2683
2684 case INDIRECT_REF:
2685 error ("INDIRECT_REF in gimple IL");
2686 return t;
2687
2688 case MEM_REF:
2689 x = TREE_OPERAND (t, 0);
2690 if (!POINTER_TYPE_P (TREE_TYPE (x))
2691 || !is_gimple_mem_ref_addr (x))
2692 {
2693 error ("invalid first operand of MEM_REF");
2694 return x;
2695 }
2696 if (TREE_CODE (TREE_OPERAND (t, 1)) != INTEGER_CST
2697 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 1))))
2698 {
2699 error ("invalid offset operand of MEM_REF");
2700 return TREE_OPERAND (t, 1);
2701 }
2702 if (TREE_CODE (x) == ADDR_EXPR
2703 && (x = verify_address (x, TREE_OPERAND (x, 0))))
2704 return x;
2705 *walk_subtrees = 0;
2706 break;
2707
2708 case ASSERT_EXPR:
2709 x = fold (ASSERT_EXPR_COND (t));
2710 if (x == boolean_false_node)
2711 {
2712 error ("ASSERT_EXPR with an always-false condition");
2713 return *tp;
2714 }
2715 break;
2716
2717 case MODIFY_EXPR:
2718 error ("MODIFY_EXPR not expected while having tuples");
2719 return *tp;
2720
2721 case ADDR_EXPR:
2722 {
2723 tree tem;
2724
2725 gcc_assert (is_gimple_address (t));
2726
2727 /* Skip any references (they will be checked when we recurse down the
2728 tree) and ensure that any variable used as a prefix is marked
2729 addressable. */
2730 for (x = TREE_OPERAND (t, 0);
2731 handled_component_p (x);
2732 x = TREE_OPERAND (x, 0))
2733 ;
2734
2735 if ((tem = verify_address (t, x)))
2736 return tem;
2737
2738 if (!(TREE_CODE (x) == VAR_DECL
2739 || TREE_CODE (x) == PARM_DECL
2740 || TREE_CODE (x) == RESULT_DECL))
2741 return NULL;
2742
2743 if (!TREE_ADDRESSABLE (x))
2744 {
2745 error ("address taken, but ADDRESSABLE bit not set");
2746 return x;
2747 }
2748
2749 break;
2750 }
2751
2752 case COND_EXPR:
2753 x = COND_EXPR_COND (t);
2754 if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
2755 {
2756 error ("non-integral used in condition");
2757 return x;
2758 }
2759 if (!is_gimple_condexpr (x))
2760 {
2761 error ("invalid conditional operand");
2762 return x;
2763 }
2764 break;
2765
2766 case NON_LVALUE_EXPR:
2767 case TRUTH_NOT_EXPR:
2768 gcc_unreachable ();
2769
2770 CASE_CONVERT:
2771 case FIX_TRUNC_EXPR:
2772 case FLOAT_EXPR:
2773 case NEGATE_EXPR:
2774 case ABS_EXPR:
2775 case BIT_NOT_EXPR:
2776 CHECK_OP (0, "invalid operand to unary operator");
2777 break;
2778
2779 case REALPART_EXPR:
2780 case IMAGPART_EXPR:
2781 case COMPONENT_REF:
2782 case ARRAY_REF:
2783 case ARRAY_RANGE_REF:
2784 case BIT_FIELD_REF:
2785 case VIEW_CONVERT_EXPR:
2786 /* We have a nest of references. Verify that each of the operands
2787 that determine where to reference is either a constant or a variable,
2788 verify that the base is valid, and then show we've already checked
2789 the subtrees. */
2790 while (handled_component_p (t))
2791 {
2792 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
2793 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
2794 else if (TREE_CODE (t) == ARRAY_REF
2795 || TREE_CODE (t) == ARRAY_RANGE_REF)
2796 {
2797 CHECK_OP (1, "invalid array index");
2798 if (TREE_OPERAND (t, 2))
2799 CHECK_OP (2, "invalid array lower bound");
2800 if (TREE_OPERAND (t, 3))
2801 CHECK_OP (3, "invalid array stride");
2802 }
2803 else if (TREE_CODE (t) == BIT_FIELD_REF)
2804 {
2805 if (!host_integerp (TREE_OPERAND (t, 1), 1)
2806 || !host_integerp (TREE_OPERAND (t, 2), 1))
2807 {
2808 error ("invalid position or size operand to BIT_FIELD_REF");
2809 return t;
2810 }
2811 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
2812 && (TYPE_PRECISION (TREE_TYPE (t))
2813 != TREE_INT_CST_LOW (TREE_OPERAND (t, 1))))
2814 {
2815 error ("integral result type precision does not match "
2816 "field size of BIT_FIELD_REF");
2817 return t;
2818 }
2819 else if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
2820 && !AGGREGATE_TYPE_P (TREE_TYPE (t))
2821 && TYPE_MODE (TREE_TYPE (t)) != BLKmode
2822 && (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (t)))
2823 != TREE_INT_CST_LOW (TREE_OPERAND (t, 1))))
2824 {
2825 error ("mode precision of non-integral result does not "
2826 "match field size of BIT_FIELD_REF");
2827 return t;
2828 }
2829 }
2830
2831 t = TREE_OPERAND (t, 0);
2832 }
2833
2834 if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
2835 {
2836 error ("invalid reference prefix");
2837 return t;
2838 }
2839 *walk_subtrees = 0;
2840 break;
2841 case PLUS_EXPR:
2842 case MINUS_EXPR:
2843 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
2844 POINTER_PLUS_EXPR. */
2845 if (POINTER_TYPE_P (TREE_TYPE (t)))
2846 {
2847 error ("invalid operand to plus/minus, type is a pointer");
2848 return t;
2849 }
2850 CHECK_OP (0, "invalid operand to binary operator");
2851 CHECK_OP (1, "invalid operand to binary operator");
2852 break;
2853
2854 case POINTER_PLUS_EXPR:
2855 /* Check to make sure the first operand is a pointer or reference type. */
2856 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
2857 {
2858 error ("invalid operand to pointer plus, first operand is not a pointer");
2859 return t;
2860 }
2861 /* Check to make sure the second operand is a ptrofftype. */
2862 if (!ptrofftype_p (TREE_TYPE (TREE_OPERAND (t, 1))))
2863 {
2864 error ("invalid operand to pointer plus, second operand is not an "
2865 "integer type of appropriate width");
2866 return t;
2867 }
2868 /* FALLTHROUGH */
2869 case LT_EXPR:
2870 case LE_EXPR:
2871 case GT_EXPR:
2872 case GE_EXPR:
2873 case EQ_EXPR:
2874 case NE_EXPR:
2875 case UNORDERED_EXPR:
2876 case ORDERED_EXPR:
2877 case UNLT_EXPR:
2878 case UNLE_EXPR:
2879 case UNGT_EXPR:
2880 case UNGE_EXPR:
2881 case UNEQ_EXPR:
2882 case LTGT_EXPR:
2883 case MULT_EXPR:
2884 case TRUNC_DIV_EXPR:
2885 case CEIL_DIV_EXPR:
2886 case FLOOR_DIV_EXPR:
2887 case ROUND_DIV_EXPR:
2888 case TRUNC_MOD_EXPR:
2889 case CEIL_MOD_EXPR:
2890 case FLOOR_MOD_EXPR:
2891 case ROUND_MOD_EXPR:
2892 case RDIV_EXPR:
2893 case EXACT_DIV_EXPR:
2894 case MIN_EXPR:
2895 case MAX_EXPR:
2896 case LSHIFT_EXPR:
2897 case RSHIFT_EXPR:
2898 case LROTATE_EXPR:
2899 case RROTATE_EXPR:
2900 case BIT_IOR_EXPR:
2901 case BIT_XOR_EXPR:
2902 case BIT_AND_EXPR:
2903 CHECK_OP (0, "invalid operand to binary operator");
2904 CHECK_OP (1, "invalid operand to binary operator");
2905 break;
2906
2907 case CONSTRUCTOR:
2908 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
2909 *walk_subtrees = 0;
2910 break;
2911
2912 case CASE_LABEL_EXPR:
2913 if (CASE_CHAIN (t))
2914 {
2915 error ("invalid CASE_CHAIN");
2916 return t;
2917 }
2918 break;
2919
2920 default:
2921 break;
2922 }
2923 return NULL;
2924
2925 #undef CHECK_OP
2926 }
2927
2928
2929 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
2930 Returns true if there is an error, otherwise false. */
2931
2932 static bool
2933 verify_types_in_gimple_min_lval (tree expr)
2934 {
2935 tree op;
2936
2937 if (is_gimple_id (expr))
2938 return false;
2939
2940 if (TREE_CODE (expr) != TARGET_MEM_REF
2941 && TREE_CODE (expr) != MEM_REF)
2942 {
2943 error ("invalid expression for min lvalue");
2944 return true;
2945 }
2946
2947 /* TARGET_MEM_REFs are strange beasts. */
2948 if (TREE_CODE (expr) == TARGET_MEM_REF)
2949 return false;
2950
2951 op = TREE_OPERAND (expr, 0);
2952 if (!is_gimple_val (op))
2953 {
2954 error ("invalid operand in indirect reference");
2955 debug_generic_stmt (op);
2956 return true;
2957 }
2958 /* Memory references now generally can involve a value conversion. */
2959
2960 return false;
2961 }
2962
2963 /* Verify if EXPR is a valid GIMPLE reference expression. If
2964 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
2965 if there is an error, otherwise false. */
2966
2967 static bool
2968 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
2969 {
2970 while (handled_component_p (expr))
2971 {
2972 tree op = TREE_OPERAND (expr, 0);
2973
2974 if (TREE_CODE (expr) == ARRAY_REF
2975 || TREE_CODE (expr) == ARRAY_RANGE_REF)
2976 {
2977 if (!is_gimple_val (TREE_OPERAND (expr, 1))
2978 || (TREE_OPERAND (expr, 2)
2979 && !is_gimple_val (TREE_OPERAND (expr, 2)))
2980 || (TREE_OPERAND (expr, 3)
2981 && !is_gimple_val (TREE_OPERAND (expr, 3))))
2982 {
2983 error ("invalid operands to array reference");
2984 debug_generic_stmt (expr);
2985 return true;
2986 }
2987 }
2988
2989 /* Verify if the reference array element types are compatible. */
2990 if (TREE_CODE (expr) == ARRAY_REF
2991 && !useless_type_conversion_p (TREE_TYPE (expr),
2992 TREE_TYPE (TREE_TYPE (op))))
2993 {
2994 error ("type mismatch in array reference");
2995 debug_generic_stmt (TREE_TYPE (expr));
2996 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
2997 return true;
2998 }
2999 if (TREE_CODE (expr) == ARRAY_RANGE_REF
3000 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
3001 TREE_TYPE (TREE_TYPE (op))))
3002 {
3003 error ("type mismatch in array range reference");
3004 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
3005 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3006 return true;
3007 }
3008
3009 if ((TREE_CODE (expr) == REALPART_EXPR
3010 || TREE_CODE (expr) == IMAGPART_EXPR)
3011 && !useless_type_conversion_p (TREE_TYPE (expr),
3012 TREE_TYPE (TREE_TYPE (op))))
3013 {
3014 error ("type mismatch in real/imagpart reference");
3015 debug_generic_stmt (TREE_TYPE (expr));
3016 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3017 return true;
3018 }
3019
3020 if (TREE_CODE (expr) == COMPONENT_REF
3021 && !useless_type_conversion_p (TREE_TYPE (expr),
3022 TREE_TYPE (TREE_OPERAND (expr, 1))))
3023 {
3024 error ("type mismatch in component reference");
3025 debug_generic_stmt (TREE_TYPE (expr));
3026 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
3027 return true;
3028 }
3029
3030 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3031 {
3032 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
3033 that their operand is not an SSA name or an invariant when
3034 requiring an lvalue (this usually means there is a SRA or IPA-SRA
3035 bug). Otherwise there is nothing to verify, gross mismatches at
3036 most invoke undefined behavior. */
3037 if (require_lvalue
3038 && (TREE_CODE (op) == SSA_NAME
3039 || is_gimple_min_invariant (op)))
3040 {
3041 error ("conversion of an SSA_NAME on the left hand side");
3042 debug_generic_stmt (expr);
3043 return true;
3044 }
3045 else if (TREE_CODE (op) == SSA_NAME
3046 && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
3047 {
3048 error ("conversion of register to a different size");
3049 debug_generic_stmt (expr);
3050 return true;
3051 }
3052 else if (!handled_component_p (op))
3053 return false;
3054 }
3055
3056 expr = op;
3057 }
3058
3059 if (TREE_CODE (expr) == MEM_REF)
3060 {
3061 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0)))
3062 {
3063 error ("invalid address operand in MEM_REF");
3064 debug_generic_stmt (expr);
3065 return true;
3066 }
3067 if (TREE_CODE (TREE_OPERAND (expr, 1)) != INTEGER_CST
3068 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
3069 {
3070 error ("invalid offset operand in MEM_REF");
3071 debug_generic_stmt (expr);
3072 return true;
3073 }
3074 }
3075 else if (TREE_CODE (expr) == TARGET_MEM_REF)
3076 {
3077 if (!TMR_BASE (expr)
3078 || !is_gimple_mem_ref_addr (TMR_BASE (expr)))
3079 {
3080 error ("invalid address operand in TARGET_MEM_REF");
3081 return true;
3082 }
3083 if (!TMR_OFFSET (expr)
3084 || TREE_CODE (TMR_OFFSET (expr)) != INTEGER_CST
3085 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
3086 {
3087 error ("invalid offset operand in TARGET_MEM_REF");
3088 debug_generic_stmt (expr);
3089 return true;
3090 }
3091 }
3092
3093 return ((require_lvalue || !is_gimple_min_invariant (expr))
3094 && verify_types_in_gimple_min_lval (expr));
3095 }
3096
3097 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3098 list of pointer-to types that is trivially convertible to DEST. */
3099
3100 static bool
3101 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3102 {
3103 tree src;
3104
3105 if (!TYPE_POINTER_TO (src_obj))
3106 return true;
3107
3108 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3109 if (useless_type_conversion_p (dest, src))
3110 return true;
3111
3112 return false;
3113 }
3114
3115 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3116 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3117
3118 static bool
3119 valid_fixed_convert_types_p (tree type1, tree type2)
3120 {
3121 return (FIXED_POINT_TYPE_P (type1)
3122 && (INTEGRAL_TYPE_P (type2)
3123 || SCALAR_FLOAT_TYPE_P (type2)
3124 || FIXED_POINT_TYPE_P (type2)));
3125 }
3126
3127 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3128 is a problem, otherwise false. */
3129
3130 static bool
3131 verify_gimple_call (gimple stmt)
3132 {
3133 tree fn = gimple_call_fn (stmt);
3134 tree fntype, fndecl;
3135 unsigned i;
3136
3137 if (gimple_call_internal_p (stmt))
3138 {
3139 if (fn)
3140 {
3141 error ("gimple call has two targets");
3142 debug_generic_stmt (fn);
3143 return true;
3144 }
3145 }
3146 else
3147 {
3148 if (!fn)
3149 {
3150 error ("gimple call has no target");
3151 return true;
3152 }
3153 }
3154
3155 if (fn && !is_gimple_call_addr (fn))
3156 {
3157 error ("invalid function in gimple call");
3158 debug_generic_stmt (fn);
3159 return true;
3160 }
3161
3162 if (fn
3163 && (!POINTER_TYPE_P (TREE_TYPE (fn))
3164 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3165 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
3166 {
3167 error ("non-function in gimple call");
3168 return true;
3169 }
3170
3171 fndecl = gimple_call_fndecl (stmt);
3172 if (fndecl
3173 && TREE_CODE (fndecl) == FUNCTION_DECL
3174 && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
3175 && !DECL_PURE_P (fndecl)
3176 && !TREE_READONLY (fndecl))
3177 {
3178 error ("invalid pure const state for function");
3179 return true;
3180 }
3181
3182 if (gimple_call_lhs (stmt)
3183 && (!is_gimple_lvalue (gimple_call_lhs (stmt))
3184 || verify_types_in_gimple_reference (gimple_call_lhs (stmt), true)))
3185 {
3186 error ("invalid LHS in gimple call");
3187 return true;
3188 }
3189
3190 if (gimple_call_lhs (stmt) && gimple_call_noreturn_p (stmt))
3191 {
3192 error ("LHS in noreturn call");
3193 return true;
3194 }
3195
3196 fntype = gimple_call_fntype (stmt);
3197 if (fntype
3198 && gimple_call_lhs (stmt)
3199 && !useless_type_conversion_p (TREE_TYPE (gimple_call_lhs (stmt)),
3200 TREE_TYPE (fntype))
3201 /* ??? At least C++ misses conversions at assignments from
3202 void * call results.
3203 ??? Java is completely off. Especially with functions
3204 returning java.lang.Object.
3205 For now simply allow arbitrary pointer type conversions. */
3206 && !(POINTER_TYPE_P (TREE_TYPE (gimple_call_lhs (stmt)))
3207 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3208 {
3209 error ("invalid conversion in gimple call");
3210 debug_generic_stmt (TREE_TYPE (gimple_call_lhs (stmt)));
3211 debug_generic_stmt (TREE_TYPE (fntype));
3212 return true;
3213 }
3214
3215 if (gimple_call_chain (stmt)
3216 && !is_gimple_val (gimple_call_chain (stmt)))
3217 {
3218 error ("invalid static chain in gimple call");
3219 debug_generic_stmt (gimple_call_chain (stmt));
3220 return true;
3221 }
3222
3223 /* If there is a static chain argument, this should not be an indirect
3224 call, and the decl should have DECL_STATIC_CHAIN set. */
3225 if (gimple_call_chain (stmt))
3226 {
3227 if (!gimple_call_fndecl (stmt))
3228 {
3229 error ("static chain in indirect gimple call");
3230 return true;
3231 }
3232 fn = TREE_OPERAND (fn, 0);
3233
3234 if (!DECL_STATIC_CHAIN (fn))
3235 {
3236 error ("static chain with function that doesn%'t use one");
3237 return true;
3238 }
3239 }
3240
3241 /* ??? The C frontend passes unpromoted arguments in case it
3242 didn't see a function declaration before the call. So for now
3243 leave the call arguments mostly unverified. Once we gimplify
3244 unit-at-a-time we have a chance to fix this. */
3245
3246 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3247 {
3248 tree arg = gimple_call_arg (stmt, i);
3249 if ((is_gimple_reg_type (TREE_TYPE (arg))
3250 && !is_gimple_val (arg))
3251 || (!is_gimple_reg_type (TREE_TYPE (arg))
3252 && !is_gimple_lvalue (arg)))
3253 {
3254 error ("invalid argument to gimple call");
3255 debug_generic_expr (arg);
3256 return true;
3257 }
3258 }
3259
3260 return false;
3261 }
3262
3263 /* Verifies the gimple comparison with the result type TYPE and
3264 the operands OP0 and OP1. */
3265
3266 static bool
3267 verify_gimple_comparison (tree type, tree op0, tree op1)
3268 {
3269 tree op0_type = TREE_TYPE (op0);
3270 tree op1_type = TREE_TYPE (op1);
3271
3272 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3273 {
3274 error ("invalid operands in gimple comparison");
3275 return true;
3276 }
3277
3278 /* For comparisons we do not have the operations type as the
3279 effective type the comparison is carried out in. Instead
3280 we require that either the first operand is trivially
3281 convertible into the second, or the other way around.
3282 Because we special-case pointers to void we allow
3283 comparisons of pointers with the same mode as well. */
3284 if (!useless_type_conversion_p (op0_type, op1_type)
3285 && !useless_type_conversion_p (op1_type, op0_type)
3286 && (!POINTER_TYPE_P (op0_type)
3287 || !POINTER_TYPE_P (op1_type)
3288 || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3289 {
3290 error ("mismatching comparison operand types");
3291 debug_generic_expr (op0_type);
3292 debug_generic_expr (op1_type);
3293 return true;
3294 }
3295
3296 /* The resulting type of a comparison may be an effective boolean type. */
3297 if (INTEGRAL_TYPE_P (type)
3298 && (TREE_CODE (type) == BOOLEAN_TYPE
3299 || TYPE_PRECISION (type) == 1))
3300 ;
3301 /* Or an integer vector type with the same size and element count
3302 as the comparison operand types. */
3303 else if (TREE_CODE (type) == VECTOR_TYPE
3304 && TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE)
3305 {
3306 if (TREE_CODE (op0_type) != VECTOR_TYPE
3307 || TREE_CODE (op1_type) != VECTOR_TYPE)
3308 {
3309 error ("non-vector operands in vector comparison");
3310 debug_generic_expr (op0_type);
3311 debug_generic_expr (op1_type);
3312 return true;
3313 }
3314
3315 if (TYPE_VECTOR_SUBPARTS (type) != TYPE_VECTOR_SUBPARTS (op0_type)
3316 || (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (type)))
3317 != GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0_type)))))
3318 {
3319 error ("invalid vector comparison resulting type");
3320 debug_generic_expr (type);
3321 return true;
3322 }
3323 }
3324 else
3325 {
3326 error ("bogus comparison result type");
3327 debug_generic_expr (type);
3328 return true;
3329 }
3330
3331 return false;
3332 }
3333
3334 /* Verify a gimple assignment statement STMT with an unary rhs.
3335 Returns true if anything is wrong. */
3336
3337 static bool
3338 verify_gimple_assign_unary (gimple stmt)
3339 {
3340 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3341 tree lhs = gimple_assign_lhs (stmt);
3342 tree lhs_type = TREE_TYPE (lhs);
3343 tree rhs1 = gimple_assign_rhs1 (stmt);
3344 tree rhs1_type = TREE_TYPE (rhs1);
3345
3346 if (!is_gimple_reg (lhs))
3347 {
3348 error ("non-register as LHS of unary operation");
3349 return true;
3350 }
3351
3352 if (!is_gimple_val (rhs1))
3353 {
3354 error ("invalid operand in unary operation");
3355 return true;
3356 }
3357
3358 /* First handle conversions. */
3359 switch (rhs_code)
3360 {
3361 CASE_CONVERT:
3362 {
3363 /* Allow conversions from pointer type to integral type only if
3364 there is no sign or zero extension involved.
3365 For targets were the precision of ptrofftype doesn't match that
3366 of pointers we need to allow arbitrary conversions to ptrofftype. */
3367 if ((POINTER_TYPE_P (lhs_type)
3368 && INTEGRAL_TYPE_P (rhs1_type))
3369 || (POINTER_TYPE_P (rhs1_type)
3370 && INTEGRAL_TYPE_P (lhs_type)
3371 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3372 || ptrofftype_p (sizetype))))
3373 return false;
3374
3375 /* Allow conversion from integral to offset type and vice versa. */
3376 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3377 && INTEGRAL_TYPE_P (rhs1_type))
3378 || (INTEGRAL_TYPE_P (lhs_type)
3379 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3380 return false;
3381
3382 /* Otherwise assert we are converting between types of the
3383 same kind. */
3384 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3385 {
3386 error ("invalid types in nop conversion");
3387 debug_generic_expr (lhs_type);
3388 debug_generic_expr (rhs1_type);
3389 return true;
3390 }
3391
3392 return false;
3393 }
3394
3395 case ADDR_SPACE_CONVERT_EXPR:
3396 {
3397 if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3398 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3399 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3400 {
3401 error ("invalid types in address space conversion");
3402 debug_generic_expr (lhs_type);
3403 debug_generic_expr (rhs1_type);
3404 return true;
3405 }
3406
3407 return false;
3408 }
3409
3410 case FIXED_CONVERT_EXPR:
3411 {
3412 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3413 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3414 {
3415 error ("invalid types in fixed-point conversion");
3416 debug_generic_expr (lhs_type);
3417 debug_generic_expr (rhs1_type);
3418 return true;
3419 }
3420
3421 return false;
3422 }
3423
3424 case FLOAT_EXPR:
3425 {
3426 if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3427 && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3428 || !VECTOR_FLOAT_TYPE_P(lhs_type)))
3429 {
3430 error ("invalid types in conversion to floating point");
3431 debug_generic_expr (lhs_type);
3432 debug_generic_expr (rhs1_type);
3433 return true;
3434 }
3435
3436 return false;
3437 }
3438
3439 case FIX_TRUNC_EXPR:
3440 {
3441 if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3442 && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3443 || !VECTOR_FLOAT_TYPE_P(rhs1_type)))
3444 {
3445 error ("invalid types in conversion to integer");
3446 debug_generic_expr (lhs_type);
3447 debug_generic_expr (rhs1_type);
3448 return true;
3449 }
3450
3451 return false;
3452 }
3453
3454 case VEC_UNPACK_HI_EXPR:
3455 case VEC_UNPACK_LO_EXPR:
3456 case REDUC_MAX_EXPR:
3457 case REDUC_MIN_EXPR:
3458 case REDUC_PLUS_EXPR:
3459 case VEC_UNPACK_FLOAT_HI_EXPR:
3460 case VEC_UNPACK_FLOAT_LO_EXPR:
3461 /* FIXME. */
3462 return false;
3463
3464 case NEGATE_EXPR:
3465 case ABS_EXPR:
3466 case BIT_NOT_EXPR:
3467 case PAREN_EXPR:
3468 case NON_LVALUE_EXPR:
3469 case CONJ_EXPR:
3470 break;
3471
3472 default:
3473 gcc_unreachable ();
3474 }
3475
3476 /* For the remaining codes assert there is no conversion involved. */
3477 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3478 {
3479 error ("non-trivial conversion in unary operation");
3480 debug_generic_expr (lhs_type);
3481 debug_generic_expr (rhs1_type);
3482 return true;
3483 }
3484
3485 return false;
3486 }
3487
3488 /* Verify a gimple assignment statement STMT with a binary rhs.
3489 Returns true if anything is wrong. */
3490
3491 static bool
3492 verify_gimple_assign_binary (gimple stmt)
3493 {
3494 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3495 tree lhs = gimple_assign_lhs (stmt);
3496 tree lhs_type = TREE_TYPE (lhs);
3497 tree rhs1 = gimple_assign_rhs1 (stmt);
3498 tree rhs1_type = TREE_TYPE (rhs1);
3499 tree rhs2 = gimple_assign_rhs2 (stmt);
3500 tree rhs2_type = TREE_TYPE (rhs2);
3501
3502 if (!is_gimple_reg (lhs))
3503 {
3504 error ("non-register as LHS of binary operation");
3505 return true;
3506 }
3507
3508 if (!is_gimple_val (rhs1)
3509 || !is_gimple_val (rhs2))
3510 {
3511 error ("invalid operands in binary operation");
3512 return true;
3513 }
3514
3515 /* First handle operations that involve different types. */
3516 switch (rhs_code)
3517 {
3518 case COMPLEX_EXPR:
3519 {
3520 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3521 || !(INTEGRAL_TYPE_P (rhs1_type)
3522 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3523 || !(INTEGRAL_TYPE_P (rhs2_type)
3524 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3525 {
3526 error ("type mismatch in complex expression");
3527 debug_generic_expr (lhs_type);
3528 debug_generic_expr (rhs1_type);
3529 debug_generic_expr (rhs2_type);
3530 return true;
3531 }
3532
3533 return false;
3534 }
3535
3536 case LSHIFT_EXPR:
3537 case RSHIFT_EXPR:
3538 case LROTATE_EXPR:
3539 case RROTATE_EXPR:
3540 {
3541 /* Shifts and rotates are ok on integral types, fixed point
3542 types and integer vector types. */
3543 if ((!INTEGRAL_TYPE_P (rhs1_type)
3544 && !FIXED_POINT_TYPE_P (rhs1_type)
3545 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3546 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3547 || (!INTEGRAL_TYPE_P (rhs2_type)
3548 /* Vector shifts of vectors are also ok. */
3549 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3550 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3551 && TREE_CODE (rhs2_type) == VECTOR_TYPE
3552 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3553 || !useless_type_conversion_p (lhs_type, rhs1_type))
3554 {
3555 error ("type mismatch in shift expression");
3556 debug_generic_expr (lhs_type);
3557 debug_generic_expr (rhs1_type);
3558 debug_generic_expr (rhs2_type);
3559 return true;
3560 }
3561
3562 return false;
3563 }
3564
3565 case VEC_LSHIFT_EXPR:
3566 case VEC_RSHIFT_EXPR:
3567 {
3568 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3569 || !(INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3570 || POINTER_TYPE_P (TREE_TYPE (rhs1_type))
3571 || FIXED_POINT_TYPE_P (TREE_TYPE (rhs1_type))
3572 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type)))
3573 || (!INTEGRAL_TYPE_P (rhs2_type)
3574 && (TREE_CODE (rhs2_type) != VECTOR_TYPE
3575 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3576 || !useless_type_conversion_p (lhs_type, rhs1_type))
3577 {
3578 error ("type mismatch in vector shift expression");
3579 debug_generic_expr (lhs_type);
3580 debug_generic_expr (rhs1_type);
3581 debug_generic_expr (rhs2_type);
3582 return true;
3583 }
3584 /* For shifting a vector of non-integral components we
3585 only allow shifting by a constant multiple of the element size. */
3586 if (!INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3587 && (TREE_CODE (rhs2) != INTEGER_CST
3588 || !div_if_zero_remainder (EXACT_DIV_EXPR, rhs2,
3589 TYPE_SIZE (TREE_TYPE (rhs1_type)))))
3590 {
3591 error ("non-element sized vector shift of floating point vector");
3592 return true;
3593 }
3594
3595 return false;
3596 }
3597
3598 case WIDEN_LSHIFT_EXPR:
3599 {
3600 if (!INTEGRAL_TYPE_P (lhs_type)
3601 || !INTEGRAL_TYPE_P (rhs1_type)
3602 || TREE_CODE (rhs2) != INTEGER_CST
3603 || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
3604 {
3605 error ("type mismatch in widening vector shift expression");
3606 debug_generic_expr (lhs_type);
3607 debug_generic_expr (rhs1_type);
3608 debug_generic_expr (rhs2_type);
3609 return true;
3610 }
3611
3612 return false;
3613 }
3614
3615 case VEC_WIDEN_LSHIFT_HI_EXPR:
3616 case VEC_WIDEN_LSHIFT_LO_EXPR:
3617 {
3618 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3619 || TREE_CODE (lhs_type) != VECTOR_TYPE
3620 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3621 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3622 || TREE_CODE (rhs2) != INTEGER_CST
3623 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
3624 > TYPE_PRECISION (TREE_TYPE (lhs_type))))
3625 {
3626 error ("type mismatch in widening vector shift expression");
3627 debug_generic_expr (lhs_type);
3628 debug_generic_expr (rhs1_type);
3629 debug_generic_expr (rhs2_type);
3630 return true;
3631 }
3632
3633 return false;
3634 }
3635
3636 case PLUS_EXPR:
3637 case MINUS_EXPR:
3638 {
3639 /* We use regular PLUS_EXPR and MINUS_EXPR for vectors.
3640 ??? This just makes the checker happy and may not be what is
3641 intended. */
3642 if (TREE_CODE (lhs_type) == VECTOR_TYPE
3643 && POINTER_TYPE_P (TREE_TYPE (lhs_type)))
3644 {
3645 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3646 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
3647 {
3648 error ("invalid non-vector operands to vector valued plus");
3649 return true;
3650 }
3651 lhs_type = TREE_TYPE (lhs_type);
3652 rhs1_type = TREE_TYPE (rhs1_type);
3653 rhs2_type = TREE_TYPE (rhs2_type);
3654 /* PLUS_EXPR is commutative, so we might end up canonicalizing
3655 the pointer to 2nd place. */
3656 if (POINTER_TYPE_P (rhs2_type))
3657 {
3658 tree tem = rhs1_type;
3659 rhs1_type = rhs2_type;
3660 rhs2_type = tem;
3661 }
3662 goto do_pointer_plus_expr_check;
3663 }
3664 if (POINTER_TYPE_P (lhs_type)
3665 || POINTER_TYPE_P (rhs1_type)
3666 || POINTER_TYPE_P (rhs2_type))
3667 {
3668 error ("invalid (pointer) operands to plus/minus");
3669 return true;
3670 }
3671
3672 /* Continue with generic binary expression handling. */
3673 break;
3674 }
3675
3676 case POINTER_PLUS_EXPR:
3677 {
3678 do_pointer_plus_expr_check:
3679 if (!POINTER_TYPE_P (rhs1_type)
3680 || !useless_type_conversion_p (lhs_type, rhs1_type)
3681 || !ptrofftype_p (rhs2_type))
3682 {
3683 error ("type mismatch in pointer plus expression");
3684 debug_generic_stmt (lhs_type);
3685 debug_generic_stmt (rhs1_type);
3686 debug_generic_stmt (rhs2_type);
3687 return true;
3688 }
3689
3690 return false;
3691 }
3692
3693 case TRUTH_ANDIF_EXPR:
3694 case TRUTH_ORIF_EXPR:
3695 case TRUTH_AND_EXPR:
3696 case TRUTH_OR_EXPR:
3697 case TRUTH_XOR_EXPR:
3698
3699 gcc_unreachable ();
3700
3701 case LT_EXPR:
3702 case LE_EXPR:
3703 case GT_EXPR:
3704 case GE_EXPR:
3705 case EQ_EXPR:
3706 case NE_EXPR:
3707 case UNORDERED_EXPR:
3708 case ORDERED_EXPR:
3709 case UNLT_EXPR:
3710 case UNLE_EXPR:
3711 case UNGT_EXPR:
3712 case UNGE_EXPR:
3713 case UNEQ_EXPR:
3714 case LTGT_EXPR:
3715 /* Comparisons are also binary, but the result type is not
3716 connected to the operand types. */
3717 return verify_gimple_comparison (lhs_type, rhs1, rhs2);
3718
3719 case WIDEN_MULT_EXPR:
3720 if (TREE_CODE (lhs_type) != INTEGER_TYPE)
3721 return true;
3722 return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
3723 || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
3724
3725 case WIDEN_SUM_EXPR:
3726 case VEC_WIDEN_MULT_HI_EXPR:
3727 case VEC_WIDEN_MULT_LO_EXPR:
3728 case VEC_PACK_TRUNC_EXPR:
3729 case VEC_PACK_SAT_EXPR:
3730 case VEC_PACK_FIX_TRUNC_EXPR:
3731 /* FIXME. */
3732 return false;
3733
3734 case MULT_EXPR:
3735 case TRUNC_DIV_EXPR:
3736 case CEIL_DIV_EXPR:
3737 case FLOOR_DIV_EXPR:
3738 case ROUND_DIV_EXPR:
3739 case TRUNC_MOD_EXPR:
3740 case CEIL_MOD_EXPR:
3741 case FLOOR_MOD_EXPR:
3742 case ROUND_MOD_EXPR:
3743 case RDIV_EXPR:
3744 case EXACT_DIV_EXPR:
3745 case MIN_EXPR:
3746 case MAX_EXPR:
3747 case BIT_IOR_EXPR:
3748 case BIT_XOR_EXPR:
3749 case BIT_AND_EXPR:
3750 /* Continue with generic binary expression handling. */
3751 break;
3752
3753 default:
3754 gcc_unreachable ();
3755 }
3756
3757 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3758 || !useless_type_conversion_p (lhs_type, rhs2_type))
3759 {
3760 error ("type mismatch in binary expression");
3761 debug_generic_stmt (lhs_type);
3762 debug_generic_stmt (rhs1_type);
3763 debug_generic_stmt (rhs2_type);
3764 return true;
3765 }
3766
3767 return false;
3768 }
3769
3770 /* Verify a gimple assignment statement STMT with a ternary rhs.
3771 Returns true if anything is wrong. */
3772
3773 static bool
3774 verify_gimple_assign_ternary (gimple stmt)
3775 {
3776 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3777 tree lhs = gimple_assign_lhs (stmt);
3778 tree lhs_type = TREE_TYPE (lhs);
3779 tree rhs1 = gimple_assign_rhs1 (stmt);
3780 tree rhs1_type = TREE_TYPE (rhs1);
3781 tree rhs2 = gimple_assign_rhs2 (stmt);
3782 tree rhs2_type = TREE_TYPE (rhs2);
3783 tree rhs3 = gimple_assign_rhs3 (stmt);
3784 tree rhs3_type = TREE_TYPE (rhs3);
3785
3786 if (!is_gimple_reg (lhs))
3787 {
3788 error ("non-register as LHS of ternary operation");
3789 return true;
3790 }
3791
3792 if (((rhs_code == VEC_COND_EXPR || rhs_code == COND_EXPR)
3793 ? !is_gimple_condexpr (rhs1) : !is_gimple_val (rhs1))
3794 || !is_gimple_val (rhs2)
3795 || !is_gimple_val (rhs3))
3796 {
3797 error ("invalid operands in ternary operation");
3798 return true;
3799 }
3800
3801 /* First handle operations that involve different types. */
3802 switch (rhs_code)
3803 {
3804 case WIDEN_MULT_PLUS_EXPR:
3805 case WIDEN_MULT_MINUS_EXPR:
3806 if ((!INTEGRAL_TYPE_P (rhs1_type)
3807 && !FIXED_POINT_TYPE_P (rhs1_type))
3808 || !useless_type_conversion_p (rhs1_type, rhs2_type)
3809 || !useless_type_conversion_p (lhs_type, rhs3_type)
3810 || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
3811 || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
3812 {
3813 error ("type mismatch in widening multiply-accumulate expression");
3814 debug_generic_expr (lhs_type);
3815 debug_generic_expr (rhs1_type);
3816 debug_generic_expr (rhs2_type);
3817 debug_generic_expr (rhs3_type);
3818 return true;
3819 }
3820 break;
3821
3822 case FMA_EXPR:
3823 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3824 || !useless_type_conversion_p (lhs_type, rhs2_type)
3825 || !useless_type_conversion_p (lhs_type, rhs3_type))
3826 {
3827 error ("type mismatch in fused multiply-add expression");
3828 debug_generic_expr (lhs_type);
3829 debug_generic_expr (rhs1_type);
3830 debug_generic_expr (rhs2_type);
3831 debug_generic_expr (rhs3_type);
3832 return true;
3833 }
3834 break;
3835
3836 case COND_EXPR:
3837 case VEC_COND_EXPR:
3838 if (!useless_type_conversion_p (lhs_type, rhs2_type)
3839 || !useless_type_conversion_p (lhs_type, rhs3_type))
3840 {
3841 error ("type mismatch in conditional expression");
3842 debug_generic_expr (lhs_type);
3843 debug_generic_expr (rhs2_type);
3844 debug_generic_expr (rhs3_type);
3845 return true;
3846 }
3847 break;
3848
3849 case VEC_PERM_EXPR:
3850 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3851 || !useless_type_conversion_p (lhs_type, rhs2_type))
3852 {
3853 error ("type mismatch in vector permute expression");
3854 debug_generic_expr (lhs_type);
3855 debug_generic_expr (rhs1_type);
3856 debug_generic_expr (rhs2_type);
3857 debug_generic_expr (rhs3_type);
3858 return true;
3859 }
3860
3861 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3862 || TREE_CODE (rhs2_type) != VECTOR_TYPE
3863 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
3864 {
3865 error ("vector types expected in vector permute expression");
3866 debug_generic_expr (lhs_type);
3867 debug_generic_expr (rhs1_type);
3868 debug_generic_expr (rhs2_type);
3869 debug_generic_expr (rhs3_type);
3870 return true;
3871 }
3872
3873 if (TYPE_VECTOR_SUBPARTS (rhs1_type) != TYPE_VECTOR_SUBPARTS (rhs2_type)
3874 || TYPE_VECTOR_SUBPARTS (rhs2_type)
3875 != TYPE_VECTOR_SUBPARTS (rhs3_type)
3876 || TYPE_VECTOR_SUBPARTS (rhs3_type)
3877 != TYPE_VECTOR_SUBPARTS (lhs_type))
3878 {
3879 error ("vectors with different element number found "
3880 "in vector permute expression");
3881 debug_generic_expr (lhs_type);
3882 debug_generic_expr (rhs1_type);
3883 debug_generic_expr (rhs2_type);
3884 debug_generic_expr (rhs3_type);
3885 return true;
3886 }
3887
3888 if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
3889 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs3_type)))
3890 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type))))
3891 {
3892 error ("invalid mask type in vector permute expression");
3893 debug_generic_expr (lhs_type);
3894 debug_generic_expr (rhs1_type);
3895 debug_generic_expr (rhs2_type);
3896 debug_generic_expr (rhs3_type);
3897 return true;
3898 }
3899
3900 return false;
3901
3902 case DOT_PROD_EXPR:
3903 case REALIGN_LOAD_EXPR:
3904 /* FIXME. */
3905 return false;
3906
3907 default:
3908 gcc_unreachable ();
3909 }
3910 return false;
3911 }
3912
3913 /* Verify a gimple assignment statement STMT with a single rhs.
3914 Returns true if anything is wrong. */
3915
3916 static bool
3917 verify_gimple_assign_single (gimple stmt)
3918 {
3919 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3920 tree lhs = gimple_assign_lhs (stmt);
3921 tree lhs_type = TREE_TYPE (lhs);
3922 tree rhs1 = gimple_assign_rhs1 (stmt);
3923 tree rhs1_type = TREE_TYPE (rhs1);
3924 bool res = false;
3925
3926 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3927 {
3928 error ("non-trivial conversion at assignment");
3929 debug_generic_expr (lhs_type);
3930 debug_generic_expr (rhs1_type);
3931 return true;
3932 }
3933
3934 if (handled_component_p (lhs))
3935 res |= verify_types_in_gimple_reference (lhs, true);
3936
3937 /* Special codes we cannot handle via their class. */
3938 switch (rhs_code)
3939 {
3940 case ADDR_EXPR:
3941 {
3942 tree op = TREE_OPERAND (rhs1, 0);
3943 if (!is_gimple_addressable (op))
3944 {
3945 error ("invalid operand in unary expression");
3946 return true;
3947 }
3948
3949 /* Technically there is no longer a need for matching types, but
3950 gimple hygiene asks for this check. In LTO we can end up
3951 combining incompatible units and thus end up with addresses
3952 of globals that change their type to a common one. */
3953 if (!in_lto_p
3954 && !types_compatible_p (TREE_TYPE (op),
3955 TREE_TYPE (TREE_TYPE (rhs1)))
3956 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
3957 TREE_TYPE (op)))
3958 {
3959 error ("type mismatch in address expression");
3960 debug_generic_stmt (TREE_TYPE (rhs1));
3961 debug_generic_stmt (TREE_TYPE (op));
3962 return true;
3963 }
3964
3965 return verify_types_in_gimple_reference (op, true);
3966 }
3967
3968 /* tcc_reference */
3969 case INDIRECT_REF:
3970 error ("INDIRECT_REF in gimple IL");
3971 return true;
3972
3973 case COMPONENT_REF:
3974 case BIT_FIELD_REF:
3975 case ARRAY_REF:
3976 case ARRAY_RANGE_REF:
3977 case VIEW_CONVERT_EXPR:
3978 case REALPART_EXPR:
3979 case IMAGPART_EXPR:
3980 case TARGET_MEM_REF:
3981 case MEM_REF:
3982 if (!is_gimple_reg (lhs)
3983 && is_gimple_reg_type (TREE_TYPE (lhs)))
3984 {
3985 error ("invalid rhs for gimple memory store");
3986 debug_generic_stmt (lhs);
3987 debug_generic_stmt (rhs1);
3988 return true;
3989 }
3990 return res || verify_types_in_gimple_reference (rhs1, false);
3991
3992 /* tcc_constant */
3993 case SSA_NAME:
3994 case INTEGER_CST:
3995 case REAL_CST:
3996 case FIXED_CST:
3997 case COMPLEX_CST:
3998 case VECTOR_CST:
3999 case STRING_CST:
4000 return res;
4001
4002 /* tcc_declaration */
4003 case CONST_DECL:
4004 return res;
4005 case VAR_DECL:
4006 case PARM_DECL:
4007 if (!is_gimple_reg (lhs)
4008 && !is_gimple_reg (rhs1)
4009 && is_gimple_reg_type (TREE_TYPE (lhs)))
4010 {
4011 error ("invalid rhs for gimple memory store");
4012 debug_generic_stmt (lhs);
4013 debug_generic_stmt (rhs1);
4014 return true;
4015 }
4016 return res;
4017
4018 case CONSTRUCTOR:
4019 case OBJ_TYPE_REF:
4020 case ASSERT_EXPR:
4021 case WITH_SIZE_EXPR:
4022 /* FIXME. */
4023 return res;
4024
4025 default:;
4026 }
4027
4028 return res;
4029 }
4030
4031 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4032 is a problem, otherwise false. */
4033
4034 static bool
4035 verify_gimple_assign (gimple stmt)
4036 {
4037 switch (gimple_assign_rhs_class (stmt))
4038 {
4039 case GIMPLE_SINGLE_RHS:
4040 return verify_gimple_assign_single (stmt);
4041
4042 case GIMPLE_UNARY_RHS:
4043 return verify_gimple_assign_unary (stmt);
4044
4045 case GIMPLE_BINARY_RHS:
4046 return verify_gimple_assign_binary (stmt);
4047
4048 case GIMPLE_TERNARY_RHS:
4049 return verify_gimple_assign_ternary (stmt);
4050
4051 default:
4052 gcc_unreachable ();
4053 }
4054 }
4055
4056 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4057 is a problem, otherwise false. */
4058
4059 static bool
4060 verify_gimple_return (gimple stmt)
4061 {
4062 tree op = gimple_return_retval (stmt);
4063 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
4064
4065 /* We cannot test for present return values as we do not fix up missing
4066 return values from the original source. */
4067 if (op == NULL)
4068 return false;
4069
4070 if (!is_gimple_val (op)
4071 && TREE_CODE (op) != RESULT_DECL)
4072 {
4073 error ("invalid operand in return statement");
4074 debug_generic_stmt (op);
4075 return true;
4076 }
4077
4078 if ((TREE_CODE (op) == RESULT_DECL
4079 && DECL_BY_REFERENCE (op))
4080 || (TREE_CODE (op) == SSA_NAME
4081 && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
4082 && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
4083 op = TREE_TYPE (op);
4084
4085 if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
4086 {
4087 error ("invalid conversion in return statement");
4088 debug_generic_stmt (restype);
4089 debug_generic_stmt (TREE_TYPE (op));
4090 return true;
4091 }
4092
4093 return false;
4094 }
4095
4096
4097 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4098 is a problem, otherwise false. */
4099
4100 static bool
4101 verify_gimple_goto (gimple stmt)
4102 {
4103 tree dest = gimple_goto_dest (stmt);
4104
4105 /* ??? We have two canonical forms of direct goto destinations, a
4106 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4107 if (TREE_CODE (dest) != LABEL_DECL
4108 && (!is_gimple_val (dest)
4109 || !POINTER_TYPE_P (TREE_TYPE (dest))))
4110 {
4111 error ("goto destination is neither a label nor a pointer");
4112 return true;
4113 }
4114
4115 return false;
4116 }
4117
4118 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4119 is a problem, otherwise false. */
4120
4121 static bool
4122 verify_gimple_switch (gimple stmt)
4123 {
4124 unsigned int i, n;
4125 tree elt, prev_upper_bound = NULL_TREE;
4126 tree index_type, elt_type = NULL_TREE;
4127
4128 if (!is_gimple_val (gimple_switch_index (stmt)))
4129 {
4130 error ("invalid operand to switch statement");
4131 debug_generic_stmt (gimple_switch_index (stmt));
4132 return true;
4133 }
4134
4135 index_type = TREE_TYPE (gimple_switch_index (stmt));
4136 if (! INTEGRAL_TYPE_P (index_type))
4137 {
4138 error ("non-integral type switch statement");
4139 debug_generic_expr (index_type);
4140 return true;
4141 }
4142
4143 elt = gimple_switch_default_label (stmt);
4144 if (CASE_LOW (elt) != NULL_TREE || CASE_HIGH (elt) != NULL_TREE)
4145 {
4146 error ("invalid default case label in switch statement");
4147 debug_generic_expr (elt);
4148 return true;
4149 }
4150
4151 n = gimple_switch_num_labels (stmt);
4152 for (i = 1; i < n; i++)
4153 {
4154 elt = gimple_switch_label (stmt, i);
4155
4156 if (! CASE_LOW (elt))
4157 {
4158 error ("invalid case label in switch statement");
4159 debug_generic_expr (elt);
4160 return true;
4161 }
4162 if (CASE_HIGH (elt)
4163 && ! tree_int_cst_lt (CASE_LOW (elt), CASE_HIGH (elt)))
4164 {
4165 error ("invalid case range in switch statement");
4166 debug_generic_expr (elt);
4167 return true;
4168 }
4169
4170 if (elt_type)
4171 {
4172 if (TREE_TYPE (CASE_LOW (elt)) != elt_type
4173 || (CASE_HIGH (elt) && TREE_TYPE (CASE_HIGH (elt)) != elt_type))
4174 {
4175 error ("type mismatch for case label in switch statement");
4176 debug_generic_expr (elt);
4177 return true;
4178 }
4179 }
4180 else
4181 {
4182 elt_type = TREE_TYPE (CASE_LOW (elt));
4183 if (TYPE_PRECISION (index_type) < TYPE_PRECISION (elt_type))
4184 {
4185 error ("type precision mismatch in switch statement");
4186 return true;
4187 }
4188 }
4189
4190 if (prev_upper_bound)
4191 {
4192 if (! tree_int_cst_lt (prev_upper_bound, CASE_LOW (elt)))
4193 {
4194 error ("case labels not sorted in switch statement");
4195 return true;
4196 }
4197 }
4198
4199 prev_upper_bound = CASE_HIGH (elt);
4200 if (! prev_upper_bound)
4201 prev_upper_bound = CASE_LOW (elt);
4202 }
4203
4204 return false;
4205 }
4206
4207 /* Verify a gimple debug statement STMT.
4208 Returns true if anything is wrong. */
4209
4210 static bool
4211 verify_gimple_debug (gimple stmt ATTRIBUTE_UNUSED)
4212 {
4213 /* There isn't much that could be wrong in a gimple debug stmt. A
4214 gimple debug bind stmt, for example, maps a tree, that's usually
4215 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4216 component or member of an aggregate type, to another tree, that
4217 can be an arbitrary expression. These stmts expand into debug
4218 insns, and are converted to debug notes by var-tracking.c. */
4219 return false;
4220 }
4221
4222 /* Verify a gimple label statement STMT.
4223 Returns true if anything is wrong. */
4224
4225 static bool
4226 verify_gimple_label (gimple stmt)
4227 {
4228 tree decl = gimple_label_label (stmt);
4229 int uid;
4230 bool err = false;
4231
4232 if (TREE_CODE (decl) != LABEL_DECL)
4233 return true;
4234
4235 uid = LABEL_DECL_UID (decl);
4236 if (cfun->cfg
4237 && (uid == -1
4238 || VEC_index (basic_block,
4239 label_to_block_map, uid) != gimple_bb (stmt)))
4240 {
4241 error ("incorrect entry in label_to_block_map");
4242 err |= true;
4243 }
4244
4245 uid = EH_LANDING_PAD_NR (decl);
4246 if (uid)
4247 {
4248 eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
4249 if (decl != lp->post_landing_pad)
4250 {
4251 error ("incorrect setting of landing pad number");
4252 err |= true;
4253 }
4254 }
4255
4256 return err;
4257 }
4258
4259 /* Verify the GIMPLE statement STMT. Returns true if there is an
4260 error, otherwise false. */
4261
4262 static bool
4263 verify_gimple_stmt (gimple stmt)
4264 {
4265 switch (gimple_code (stmt))
4266 {
4267 case GIMPLE_ASSIGN:
4268 return verify_gimple_assign (stmt);
4269
4270 case GIMPLE_LABEL:
4271 return verify_gimple_label (stmt);
4272
4273 case GIMPLE_CALL:
4274 return verify_gimple_call (stmt);
4275
4276 case GIMPLE_COND:
4277 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
4278 {
4279 error ("invalid comparison code in gimple cond");
4280 return true;
4281 }
4282 if (!(!gimple_cond_true_label (stmt)
4283 || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
4284 || !(!gimple_cond_false_label (stmt)
4285 || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
4286 {
4287 error ("invalid labels in gimple cond");
4288 return true;
4289 }
4290
4291 return verify_gimple_comparison (boolean_type_node,
4292 gimple_cond_lhs (stmt),
4293 gimple_cond_rhs (stmt));
4294
4295 case GIMPLE_GOTO:
4296 return verify_gimple_goto (stmt);
4297
4298 case GIMPLE_SWITCH:
4299 return verify_gimple_switch (stmt);
4300
4301 case GIMPLE_RETURN:
4302 return verify_gimple_return (stmt);
4303
4304 case GIMPLE_ASM:
4305 return false;
4306
4307 case GIMPLE_TRANSACTION:
4308 return verify_gimple_transaction (stmt);
4309
4310 /* Tuples that do not have tree operands. */
4311 case GIMPLE_NOP:
4312 case GIMPLE_PREDICT:
4313 case GIMPLE_RESX:
4314 case GIMPLE_EH_DISPATCH:
4315 case GIMPLE_EH_MUST_NOT_THROW:
4316 return false;
4317
4318 CASE_GIMPLE_OMP:
4319 /* OpenMP directives are validated by the FE and never operated
4320 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4321 non-gimple expressions when the main index variable has had
4322 its address taken. This does not affect the loop itself
4323 because the header of an GIMPLE_OMP_FOR is merely used to determine
4324 how to setup the parallel iteration. */
4325 return false;
4326
4327 case GIMPLE_DEBUG:
4328 return verify_gimple_debug (stmt);
4329
4330 default:
4331 gcc_unreachable ();
4332 }
4333 }
4334
4335 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4336 and false otherwise. */
4337
4338 static bool
4339 verify_gimple_phi (gimple phi)
4340 {
4341 bool err = false;
4342 unsigned i;
4343 tree phi_result = gimple_phi_result (phi);
4344 bool virtual_p;
4345
4346 if (!phi_result)
4347 {
4348 error ("invalid PHI result");
4349 return true;
4350 }
4351
4352 virtual_p = !is_gimple_reg (phi_result);
4353 if (TREE_CODE (phi_result) != SSA_NAME
4354 || (virtual_p
4355 && SSA_NAME_VAR (phi_result) != gimple_vop (cfun)))
4356 {
4357 error ("invalid PHI result");
4358 err = true;
4359 }
4360
4361 for (i = 0; i < gimple_phi_num_args (phi); i++)
4362 {
4363 tree t = gimple_phi_arg_def (phi, i);
4364
4365 if (!t)
4366 {
4367 error ("missing PHI def");
4368 err |= true;
4369 continue;
4370 }
4371 /* Addressable variables do have SSA_NAMEs but they
4372 are not considered gimple values. */
4373 else if ((TREE_CODE (t) == SSA_NAME
4374 && virtual_p != !is_gimple_reg (t))
4375 || (virtual_p
4376 && (TREE_CODE (t) != SSA_NAME
4377 || SSA_NAME_VAR (t) != gimple_vop (cfun)))
4378 || (!virtual_p
4379 && !is_gimple_val (t)))
4380 {
4381 error ("invalid PHI argument");
4382 debug_generic_expr (t);
4383 err |= true;
4384 }
4385 #ifdef ENABLE_TYPES_CHECKING
4386 if (!useless_type_conversion_p (TREE_TYPE (phi_result), TREE_TYPE (t)))
4387 {
4388 error ("incompatible types in PHI argument %u", i);
4389 debug_generic_stmt (TREE_TYPE (phi_result));
4390 debug_generic_stmt (TREE_TYPE (t));
4391 err |= true;
4392 }
4393 #endif
4394 }
4395
4396 return err;
4397 }
4398
4399 /* Verify the GIMPLE statements inside the sequence STMTS. */
4400
4401 static bool
4402 verify_gimple_in_seq_2 (gimple_seq stmts)
4403 {
4404 gimple_stmt_iterator ittr;
4405 bool err = false;
4406
4407 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
4408 {
4409 gimple stmt = gsi_stmt (ittr);
4410
4411 switch (gimple_code (stmt))
4412 {
4413 case GIMPLE_BIND:
4414 err |= verify_gimple_in_seq_2 (gimple_bind_body (stmt));
4415 break;
4416
4417 case GIMPLE_TRY:
4418 err |= verify_gimple_in_seq_2 (gimple_try_eval (stmt));
4419 err |= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt));
4420 break;
4421
4422 case GIMPLE_EH_FILTER:
4423 err |= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt));
4424 break;
4425
4426 case GIMPLE_EH_ELSE:
4427 err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (stmt));
4428 err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (stmt));
4429 break;
4430
4431 case GIMPLE_CATCH:
4432 err |= verify_gimple_in_seq_2 (gimple_catch_handler (stmt));
4433 break;
4434
4435 case GIMPLE_TRANSACTION:
4436 err |= verify_gimple_transaction (stmt);
4437 break;
4438
4439 default:
4440 {
4441 bool err2 = verify_gimple_stmt (stmt);
4442 if (err2)
4443 debug_gimple_stmt (stmt);
4444 err |= err2;
4445 }
4446 }
4447 }
4448
4449 return err;
4450 }
4451
4452 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
4453 is a problem, otherwise false. */
4454
4455 static bool
4456 verify_gimple_transaction (gimple stmt)
4457 {
4458 tree lab = gimple_transaction_label (stmt);
4459 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
4460 return true;
4461 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt));
4462 }
4463
4464
4465 /* Verify the GIMPLE statements inside the statement list STMTS. */
4466
4467 DEBUG_FUNCTION void
4468 verify_gimple_in_seq (gimple_seq stmts)
4469 {
4470 timevar_push (TV_TREE_STMT_VERIFY);
4471 if (verify_gimple_in_seq_2 (stmts))
4472 internal_error ("verify_gimple failed");
4473 timevar_pop (TV_TREE_STMT_VERIFY);
4474 }
4475
4476 /* Return true when the T can be shared. */
4477
4478 bool
4479 tree_node_can_be_shared (tree t)
4480 {
4481 if (IS_TYPE_OR_DECL_P (t)
4482 || is_gimple_min_invariant (t)
4483 || TREE_CODE (t) == SSA_NAME
4484 || t == error_mark_node
4485 || TREE_CODE (t) == IDENTIFIER_NODE)
4486 return true;
4487
4488 if (TREE_CODE (t) == CASE_LABEL_EXPR)
4489 return true;
4490
4491 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4492 && is_gimple_min_invariant (TREE_OPERAND (t, 1)))
4493 || TREE_CODE (t) == COMPONENT_REF
4494 || TREE_CODE (t) == REALPART_EXPR
4495 || TREE_CODE (t) == IMAGPART_EXPR)
4496 t = TREE_OPERAND (t, 0);
4497
4498 if (DECL_P (t))
4499 return true;
4500
4501 return false;
4502 }
4503
4504 /* Called via walk_gimple_stmt. Verify tree sharing. */
4505
4506 static tree
4507 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
4508 {
4509 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4510 struct pointer_set_t *visited = (struct pointer_set_t *) wi->info;
4511
4512 if (tree_node_can_be_shared (*tp))
4513 {
4514 *walk_subtrees = false;
4515 return NULL;
4516 }
4517
4518 if (pointer_set_insert (visited, *tp))
4519 return *tp;
4520
4521 return NULL;
4522 }
4523
4524 static bool eh_error_found;
4525 static int
4526 verify_eh_throw_stmt_node (void **slot, void *data)
4527 {
4528 struct throw_stmt_node *node = (struct throw_stmt_node *)*slot;
4529 struct pointer_set_t *visited = (struct pointer_set_t *) data;
4530
4531 if (!pointer_set_contains (visited, node->stmt))
4532 {
4533 error ("dead STMT in EH table");
4534 debug_gimple_stmt (node->stmt);
4535 eh_error_found = true;
4536 }
4537 return 1;
4538 }
4539
4540 /* Verify the GIMPLE statements in the CFG of FN. */
4541
4542 DEBUG_FUNCTION void
4543 verify_gimple_in_cfg (struct function *fn)
4544 {
4545 basic_block bb;
4546 bool err = false;
4547 struct pointer_set_t *visited, *visited_stmts;
4548
4549 timevar_push (TV_TREE_STMT_VERIFY);
4550 visited = pointer_set_create ();
4551 visited_stmts = pointer_set_create ();
4552
4553 FOR_EACH_BB_FN (bb, fn)
4554 {
4555 gimple_stmt_iterator gsi;
4556
4557 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4558 {
4559 gimple phi = gsi_stmt (gsi);
4560 bool err2 = false;
4561 unsigned i;
4562
4563 pointer_set_insert (visited_stmts, phi);
4564
4565 if (gimple_bb (phi) != bb)
4566 {
4567 error ("gimple_bb (phi) is set to a wrong basic block");
4568 err2 = true;
4569 }
4570
4571 err2 |= verify_gimple_phi (phi);
4572
4573 for (i = 0; i < gimple_phi_num_args (phi); i++)
4574 {
4575 tree arg = gimple_phi_arg_def (phi, i);
4576 tree addr = walk_tree (&arg, verify_node_sharing, visited, NULL);
4577 if (addr)
4578 {
4579 error ("incorrect sharing of tree nodes");
4580 debug_generic_expr (addr);
4581 err2 |= true;
4582 }
4583 }
4584
4585 if (err2)
4586 debug_gimple_stmt (phi);
4587 err |= err2;
4588 }
4589
4590 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4591 {
4592 gimple stmt = gsi_stmt (gsi);
4593 bool err2 = false;
4594 struct walk_stmt_info wi;
4595 tree addr;
4596 int lp_nr;
4597
4598 pointer_set_insert (visited_stmts, stmt);
4599
4600 if (gimple_bb (stmt) != bb)
4601 {
4602 error ("gimple_bb (stmt) is set to a wrong basic block");
4603 err2 = true;
4604 }
4605
4606 err2 |= verify_gimple_stmt (stmt);
4607
4608 memset (&wi, 0, sizeof (wi));
4609 wi.info = (void *) visited;
4610 addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
4611 if (addr)
4612 {
4613 error ("incorrect sharing of tree nodes");
4614 debug_generic_expr (addr);
4615 err2 |= true;
4616 }
4617
4618 /* ??? Instead of not checking these stmts at all the walker
4619 should know its context via wi. */
4620 if (!is_gimple_debug (stmt)
4621 && !is_gimple_omp (stmt))
4622 {
4623 memset (&wi, 0, sizeof (wi));
4624 addr = walk_gimple_op (stmt, verify_expr, &wi);
4625 if (addr)
4626 {
4627 debug_generic_expr (addr);
4628 inform (gimple_location (stmt), "in statement");
4629 err2 |= true;
4630 }
4631 }
4632
4633 /* If the statement is marked as part of an EH region, then it is
4634 expected that the statement could throw. Verify that when we
4635 have optimizations that simplify statements such that we prove
4636 that they cannot throw, that we update other data structures
4637 to match. */
4638 lp_nr = lookup_stmt_eh_lp (stmt);
4639 if (lp_nr != 0)
4640 {
4641 if (!stmt_could_throw_p (stmt))
4642 {
4643 error ("statement marked for throw, but doesn%'t");
4644 err2 |= true;
4645 }
4646 else if (lp_nr > 0
4647 && !gsi_one_before_end_p (gsi)
4648 && stmt_can_throw_internal (stmt))
4649 {
4650 error ("statement marked for throw in middle of block");
4651 err2 |= true;
4652 }
4653 }
4654
4655 if (err2)
4656 debug_gimple_stmt (stmt);
4657 err |= err2;
4658 }
4659 }
4660
4661 eh_error_found = false;
4662 if (get_eh_throw_stmt_table (cfun))
4663 htab_traverse (get_eh_throw_stmt_table (cfun),
4664 verify_eh_throw_stmt_node,
4665 visited_stmts);
4666
4667 if (err || eh_error_found)
4668 internal_error ("verify_gimple failed");
4669
4670 pointer_set_destroy (visited);
4671 pointer_set_destroy (visited_stmts);
4672 verify_histograms ();
4673 timevar_pop (TV_TREE_STMT_VERIFY);
4674 }
4675
4676
4677 /* Verifies that the flow information is OK. */
4678
4679 static int
4680 gimple_verify_flow_info (void)
4681 {
4682 int err = 0;
4683 basic_block bb;
4684 gimple_stmt_iterator gsi;
4685 gimple stmt;
4686 edge e;
4687 edge_iterator ei;
4688
4689 if (ENTRY_BLOCK_PTR->il.gimple.seq || ENTRY_BLOCK_PTR->il.gimple.phi_nodes)
4690 {
4691 error ("ENTRY_BLOCK has IL associated with it");
4692 err = 1;
4693 }
4694
4695 if (EXIT_BLOCK_PTR->il.gimple.seq || EXIT_BLOCK_PTR->il.gimple.phi_nodes)
4696 {
4697 error ("EXIT_BLOCK has IL associated with it");
4698 err = 1;
4699 }
4700
4701 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
4702 if (e->flags & EDGE_FALLTHRU)
4703 {
4704 error ("fallthru to exit from bb %d", e->src->index);
4705 err = 1;
4706 }
4707
4708 FOR_EACH_BB (bb)
4709 {
4710 bool found_ctrl_stmt = false;
4711
4712 stmt = NULL;
4713
4714 /* Skip labels on the start of basic block. */
4715 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4716 {
4717 tree label;
4718 gimple prev_stmt = stmt;
4719
4720 stmt = gsi_stmt (gsi);
4721
4722 if (gimple_code (stmt) != GIMPLE_LABEL)
4723 break;
4724
4725 label = gimple_label_label (stmt);
4726 if (prev_stmt && DECL_NONLOCAL (label))
4727 {
4728 error ("nonlocal label ");
4729 print_generic_expr (stderr, label, 0);
4730 fprintf (stderr, " is not first in a sequence of labels in bb %d",
4731 bb->index);
4732 err = 1;
4733 }
4734
4735 if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
4736 {
4737 error ("EH landing pad label ");
4738 print_generic_expr (stderr, label, 0);
4739 fprintf (stderr, " is not first in a sequence of labels in bb %d",
4740 bb->index);
4741 err = 1;
4742 }
4743
4744 if (label_to_block (label) != bb)
4745 {
4746 error ("label ");
4747 print_generic_expr (stderr, label, 0);
4748 fprintf (stderr, " to block does not match in bb %d",
4749 bb->index);
4750 err = 1;
4751 }
4752
4753 if (decl_function_context (label) != current_function_decl)
4754 {
4755 error ("label ");
4756 print_generic_expr (stderr, label, 0);
4757 fprintf (stderr, " has incorrect context in bb %d",
4758 bb->index);
4759 err = 1;
4760 }
4761 }
4762
4763 /* Verify that body of basic block BB is free of control flow. */
4764 for (; !gsi_end_p (gsi); gsi_next (&gsi))
4765 {
4766 gimple stmt = gsi_stmt (gsi);
4767
4768 if (found_ctrl_stmt)
4769 {
4770 error ("control flow in the middle of basic block %d",
4771 bb->index);
4772 err = 1;
4773 }
4774
4775 if (stmt_ends_bb_p (stmt))
4776 found_ctrl_stmt = true;
4777
4778 if (gimple_code (stmt) == GIMPLE_LABEL)
4779 {
4780 error ("label ");
4781 print_generic_expr (stderr, gimple_label_label (stmt), 0);
4782 fprintf (stderr, " in the middle of basic block %d", bb->index);
4783 err = 1;
4784 }
4785 }
4786
4787 gsi = gsi_last_bb (bb);
4788 if (gsi_end_p (gsi))
4789 continue;
4790
4791 stmt = gsi_stmt (gsi);
4792
4793 if (gimple_code (stmt) == GIMPLE_LABEL)
4794 continue;
4795
4796 err |= verify_eh_edges (stmt);
4797
4798 if (is_ctrl_stmt (stmt))
4799 {
4800 FOR_EACH_EDGE (e, ei, bb->succs)
4801 if (e->flags & EDGE_FALLTHRU)
4802 {
4803 error ("fallthru edge after a control statement in bb %d",
4804 bb->index);
4805 err = 1;
4806 }
4807 }
4808
4809 if (gimple_code (stmt) != GIMPLE_COND)
4810 {
4811 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
4812 after anything else but if statement. */
4813 FOR_EACH_EDGE (e, ei, bb->succs)
4814 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
4815 {
4816 error ("true/false edge after a non-GIMPLE_COND in bb %d",
4817 bb->index);
4818 err = 1;
4819 }
4820 }
4821
4822 switch (gimple_code (stmt))
4823 {
4824 case GIMPLE_COND:
4825 {
4826 edge true_edge;
4827 edge false_edge;
4828
4829 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
4830
4831 if (!true_edge
4832 || !false_edge
4833 || !(true_edge->flags & EDGE_TRUE_VALUE)
4834 || !(false_edge->flags & EDGE_FALSE_VALUE)
4835 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
4836 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
4837 || EDGE_COUNT (bb->succs) >= 3)
4838 {
4839 error ("wrong outgoing edge flags at end of bb %d",
4840 bb->index);
4841 err = 1;
4842 }
4843 }
4844 break;
4845
4846 case GIMPLE_GOTO:
4847 if (simple_goto_p (stmt))
4848 {
4849 error ("explicit goto at end of bb %d", bb->index);
4850 err = 1;
4851 }
4852 else
4853 {
4854 /* FIXME. We should double check that the labels in the
4855 destination blocks have their address taken. */
4856 FOR_EACH_EDGE (e, ei, bb->succs)
4857 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
4858 | EDGE_FALSE_VALUE))
4859 || !(e->flags & EDGE_ABNORMAL))
4860 {
4861 error ("wrong outgoing edge flags at end of bb %d",
4862 bb->index);
4863 err = 1;
4864 }
4865 }
4866 break;
4867
4868 case GIMPLE_CALL:
4869 if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
4870 break;
4871 /* ... fallthru ... */
4872 case GIMPLE_RETURN:
4873 if (!single_succ_p (bb)
4874 || (single_succ_edge (bb)->flags
4875 & (EDGE_FALLTHRU | EDGE_ABNORMAL
4876 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
4877 {
4878 error ("wrong outgoing edge flags at end of bb %d", bb->index);
4879 err = 1;
4880 }
4881 if (single_succ (bb) != EXIT_BLOCK_PTR)
4882 {
4883 error ("return edge does not point to exit in bb %d",
4884 bb->index);
4885 err = 1;
4886 }
4887 break;
4888
4889 case GIMPLE_SWITCH:
4890 {
4891 tree prev;
4892 edge e;
4893 size_t i, n;
4894
4895 n = gimple_switch_num_labels (stmt);
4896
4897 /* Mark all the destination basic blocks. */
4898 for (i = 0; i < n; ++i)
4899 {
4900 tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
4901 basic_block label_bb = label_to_block (lab);
4902 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
4903 label_bb->aux = (void *)1;
4904 }
4905
4906 /* Verify that the case labels are sorted. */
4907 prev = gimple_switch_label (stmt, 0);
4908 for (i = 1; i < n; ++i)
4909 {
4910 tree c = gimple_switch_label (stmt, i);
4911 if (!CASE_LOW (c))
4912 {
4913 error ("found default case not at the start of "
4914 "case vector");
4915 err = 1;
4916 continue;
4917 }
4918 if (CASE_LOW (prev)
4919 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
4920 {
4921 error ("case labels not sorted: ");
4922 print_generic_expr (stderr, prev, 0);
4923 fprintf (stderr," is greater than ");
4924 print_generic_expr (stderr, c, 0);
4925 fprintf (stderr," but comes before it.\n");
4926 err = 1;
4927 }
4928 prev = c;
4929 }
4930 /* VRP will remove the default case if it can prove it will
4931 never be executed. So do not verify there always exists
4932 a default case here. */
4933
4934 FOR_EACH_EDGE (e, ei, bb->succs)
4935 {
4936 if (!e->dest->aux)
4937 {
4938 error ("extra outgoing edge %d->%d",
4939 bb->index, e->dest->index);
4940 err = 1;
4941 }
4942
4943 e->dest->aux = (void *)2;
4944 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
4945 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
4946 {
4947 error ("wrong outgoing edge flags at end of bb %d",
4948 bb->index);
4949 err = 1;
4950 }
4951 }
4952
4953 /* Check that we have all of them. */
4954 for (i = 0; i < n; ++i)
4955 {
4956 tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
4957 basic_block label_bb = label_to_block (lab);
4958
4959 if (label_bb->aux != (void *)2)
4960 {
4961 error ("missing edge %i->%i", bb->index, label_bb->index);
4962 err = 1;
4963 }
4964 }
4965
4966 FOR_EACH_EDGE (e, ei, bb->succs)
4967 e->dest->aux = (void *)0;
4968 }
4969 break;
4970
4971 case GIMPLE_EH_DISPATCH:
4972 err |= verify_eh_dispatch_edge (stmt);
4973 break;
4974
4975 default:
4976 break;
4977 }
4978 }
4979
4980 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
4981 verify_dominators (CDI_DOMINATORS);
4982
4983 return err;
4984 }
4985
4986
4987 /* Updates phi nodes after creating a forwarder block joined
4988 by edge FALLTHRU. */
4989
4990 static void
4991 gimple_make_forwarder_block (edge fallthru)
4992 {
4993 edge e;
4994 edge_iterator ei;
4995 basic_block dummy, bb;
4996 tree var;
4997 gimple_stmt_iterator gsi;
4998
4999 dummy = fallthru->src;
5000 bb = fallthru->dest;
5001
5002 if (single_pred_p (bb))
5003 return;
5004
5005 /* If we redirected a branch we must create new PHI nodes at the
5006 start of BB. */
5007 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
5008 {
5009 gimple phi, new_phi;
5010
5011 phi = gsi_stmt (gsi);
5012 var = gimple_phi_result (phi);
5013 new_phi = create_phi_node (var, bb);
5014 SSA_NAME_DEF_STMT (var) = new_phi;
5015 gimple_phi_set_result (phi, make_ssa_name (SSA_NAME_VAR (var), phi));
5016 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
5017 UNKNOWN_LOCATION);
5018 }
5019
5020 /* Add the arguments we have stored on edges. */
5021 FOR_EACH_EDGE (e, ei, bb->preds)
5022 {
5023 if (e == fallthru)
5024 continue;
5025
5026 flush_pending_stmts (e);
5027 }
5028 }
5029
5030
5031 /* Return a non-special label in the head of basic block BLOCK.
5032 Create one if it doesn't exist. */
5033
5034 tree
5035 gimple_block_label (basic_block bb)
5036 {
5037 gimple_stmt_iterator i, s = gsi_start_bb (bb);
5038 bool first = true;
5039 tree label;
5040 gimple stmt;
5041
5042 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
5043 {
5044 stmt = gsi_stmt (i);
5045 if (gimple_code (stmt) != GIMPLE_LABEL)
5046 break;
5047 label = gimple_label_label (stmt);
5048 if (!DECL_NONLOCAL (label))
5049 {
5050 if (!first)
5051 gsi_move_before (&i, &s);
5052 return label;
5053 }
5054 }
5055
5056 label = create_artificial_label (UNKNOWN_LOCATION);
5057 stmt = gimple_build_label (label);
5058 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
5059 return label;
5060 }
5061
5062
5063 /* Attempt to perform edge redirection by replacing a possibly complex
5064 jump instruction by a goto or by removing the jump completely.
5065 This can apply only if all edges now point to the same block. The
5066 parameters and return values are equivalent to
5067 redirect_edge_and_branch. */
5068
5069 static edge
5070 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
5071 {
5072 basic_block src = e->src;
5073 gimple_stmt_iterator i;
5074 gimple stmt;
5075
5076 /* We can replace or remove a complex jump only when we have exactly
5077 two edges. */
5078 if (EDGE_COUNT (src->succs) != 2
5079 /* Verify that all targets will be TARGET. Specifically, the
5080 edge that is not E must also go to TARGET. */
5081 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
5082 return NULL;
5083
5084 i = gsi_last_bb (src);
5085 if (gsi_end_p (i))
5086 return NULL;
5087
5088 stmt = gsi_stmt (i);
5089
5090 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
5091 {
5092 gsi_remove (&i, true);
5093 e = ssa_redirect_edge (e, target);
5094 e->flags = EDGE_FALLTHRU;
5095 return e;
5096 }
5097
5098 return NULL;
5099 }
5100
5101
5102 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
5103 edge representing the redirected branch. */
5104
5105 static edge
5106 gimple_redirect_edge_and_branch (edge e, basic_block dest)
5107 {
5108 basic_block bb = e->src;
5109 gimple_stmt_iterator gsi;
5110 edge ret;
5111 gimple stmt;
5112
5113 if (e->flags & EDGE_ABNORMAL)
5114 return NULL;
5115
5116 if (e->dest == dest)
5117 return NULL;
5118
5119 if (e->flags & EDGE_EH)
5120 return redirect_eh_edge (e, dest);
5121
5122 if (e->src != ENTRY_BLOCK_PTR)
5123 {
5124 ret = gimple_try_redirect_by_replacing_jump (e, dest);
5125 if (ret)
5126 return ret;
5127 }
5128
5129 gsi = gsi_last_bb (bb);
5130 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
5131
5132 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
5133 {
5134 case GIMPLE_COND:
5135 /* For COND_EXPR, we only need to redirect the edge. */
5136 break;
5137
5138 case GIMPLE_GOTO:
5139 /* No non-abnormal edges should lead from a non-simple goto, and
5140 simple ones should be represented implicitly. */
5141 gcc_unreachable ();
5142
5143 case GIMPLE_SWITCH:
5144 {
5145 tree label = gimple_block_label (dest);
5146 tree cases = get_cases_for_edge (e, stmt);
5147
5148 /* If we have a list of cases associated with E, then use it
5149 as it's a lot faster than walking the entire case vector. */
5150 if (cases)
5151 {
5152 edge e2 = find_edge (e->src, dest);
5153 tree last, first;
5154
5155 first = cases;
5156 while (cases)
5157 {
5158 last = cases;
5159 CASE_LABEL (cases) = label;
5160 cases = CASE_CHAIN (cases);
5161 }
5162
5163 /* If there was already an edge in the CFG, then we need
5164 to move all the cases associated with E to E2. */
5165 if (e2)
5166 {
5167 tree cases2 = get_cases_for_edge (e2, stmt);
5168
5169 CASE_CHAIN (last) = CASE_CHAIN (cases2);
5170 CASE_CHAIN (cases2) = first;
5171 }
5172 bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
5173 }
5174 else
5175 {
5176 size_t i, n = gimple_switch_num_labels (stmt);
5177
5178 for (i = 0; i < n; i++)
5179 {
5180 tree elt = gimple_switch_label (stmt, i);
5181 if (label_to_block (CASE_LABEL (elt)) == e->dest)
5182 CASE_LABEL (elt) = label;
5183 }
5184 }
5185 }
5186 break;
5187
5188 case GIMPLE_ASM:
5189 {
5190 int i, n = gimple_asm_nlabels (stmt);
5191 tree label = NULL;
5192
5193 for (i = 0; i < n; ++i)
5194 {
5195 tree cons = gimple_asm_label_op (stmt, i);
5196 if (label_to_block (TREE_VALUE (cons)) == e->dest)
5197 {
5198 if (!label)
5199 label = gimple_block_label (dest);
5200 TREE_VALUE (cons) = label;
5201 }
5202 }
5203
5204 /* If we didn't find any label matching the former edge in the
5205 asm labels, we must be redirecting the fallthrough
5206 edge. */
5207 gcc_assert (label || (e->flags & EDGE_FALLTHRU));
5208 }
5209 break;
5210
5211 case GIMPLE_RETURN:
5212 gsi_remove (&gsi, true);
5213 e->flags |= EDGE_FALLTHRU;
5214 break;
5215
5216 case GIMPLE_OMP_RETURN:
5217 case GIMPLE_OMP_CONTINUE:
5218 case GIMPLE_OMP_SECTIONS_SWITCH:
5219 case GIMPLE_OMP_FOR:
5220 /* The edges from OMP constructs can be simply redirected. */
5221 break;
5222
5223 case GIMPLE_EH_DISPATCH:
5224 if (!(e->flags & EDGE_FALLTHRU))
5225 redirect_eh_dispatch_edge (stmt, e, dest);
5226 break;
5227
5228 case GIMPLE_TRANSACTION:
5229 /* The ABORT edge has a stored label associated with it, otherwise
5230 the edges are simply redirectable. */
5231 if (e->flags == 0)
5232 gimple_transaction_set_label (stmt, gimple_block_label (dest));
5233 break;
5234
5235 default:
5236 /* Otherwise it must be a fallthru edge, and we don't need to
5237 do anything besides redirecting it. */
5238 gcc_assert (e->flags & EDGE_FALLTHRU);
5239 break;
5240 }
5241
5242 /* Update/insert PHI nodes as necessary. */
5243
5244 /* Now update the edges in the CFG. */
5245 e = ssa_redirect_edge (e, dest);
5246
5247 return e;
5248 }
5249
5250 /* Returns true if it is possible to remove edge E by redirecting
5251 it to the destination of the other edge from E->src. */
5252
5253 static bool
5254 gimple_can_remove_branch_p (const_edge e)
5255 {
5256 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
5257 return false;
5258
5259 return true;
5260 }
5261
5262 /* Simple wrapper, as we can always redirect fallthru edges. */
5263
5264 static basic_block
5265 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
5266 {
5267 e = gimple_redirect_edge_and_branch (e, dest);
5268 gcc_assert (e);
5269
5270 return NULL;
5271 }
5272
5273
5274 /* Splits basic block BB after statement STMT (but at least after the
5275 labels). If STMT is NULL, BB is split just after the labels. */
5276
5277 static basic_block
5278 gimple_split_block (basic_block bb, void *stmt)
5279 {
5280 gimple_stmt_iterator gsi;
5281 gimple_stmt_iterator gsi_tgt;
5282 gimple act;
5283 gimple_seq list;
5284 basic_block new_bb;
5285 edge e;
5286 edge_iterator ei;
5287
5288 new_bb = create_empty_bb (bb);
5289
5290 /* Redirect the outgoing edges. */
5291 new_bb->succs = bb->succs;
5292 bb->succs = NULL;
5293 FOR_EACH_EDGE (e, ei, new_bb->succs)
5294 e->src = new_bb;
5295
5296 if (stmt && gimple_code ((gimple) stmt) == GIMPLE_LABEL)
5297 stmt = NULL;
5298
5299 /* Move everything from GSI to the new basic block. */
5300 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5301 {
5302 act = gsi_stmt (gsi);
5303 if (gimple_code (act) == GIMPLE_LABEL)
5304 continue;
5305
5306 if (!stmt)
5307 break;
5308
5309 if (stmt == act)
5310 {
5311 gsi_next (&gsi);
5312 break;
5313 }
5314 }
5315
5316 if (gsi_end_p (gsi))
5317 return new_bb;
5318
5319 /* Split the statement list - avoid re-creating new containers as this
5320 brings ugly quadratic memory consumption in the inliner.
5321 (We are still quadratic since we need to update stmt BB pointers,
5322 sadly.) */
5323 gsi_split_seq_before (&gsi, &list);
5324 set_bb_seq (new_bb, list);
5325 for (gsi_tgt = gsi_start (list);
5326 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
5327 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
5328
5329 return new_bb;
5330 }
5331
5332
5333 /* Moves basic block BB after block AFTER. */
5334
5335 static bool
5336 gimple_move_block_after (basic_block bb, basic_block after)
5337 {
5338 if (bb->prev_bb == after)
5339 return true;
5340
5341 unlink_block (bb);
5342 link_block (bb, after);
5343
5344 return true;
5345 }
5346
5347
5348 /* Return true if basic_block can be duplicated. */
5349
5350 static bool
5351 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
5352 {
5353 return true;
5354 }
5355
5356 /* Create a duplicate of the basic block BB. NOTE: This does not
5357 preserve SSA form. */
5358
5359 static basic_block
5360 gimple_duplicate_bb (basic_block bb)
5361 {
5362 basic_block new_bb;
5363 gimple_stmt_iterator gsi, gsi_tgt;
5364 gimple_seq phis = phi_nodes (bb);
5365 gimple phi, stmt, copy;
5366
5367 new_bb = create_empty_bb (EXIT_BLOCK_PTR->prev_bb);
5368
5369 /* Copy the PHI nodes. We ignore PHI node arguments here because
5370 the incoming edges have not been setup yet. */
5371 for (gsi = gsi_start (phis); !gsi_end_p (gsi); gsi_next (&gsi))
5372 {
5373 phi = gsi_stmt (gsi);
5374 copy = create_phi_node (gimple_phi_result (phi), new_bb);
5375 create_new_def_for (gimple_phi_result (copy), copy,
5376 gimple_phi_result_ptr (copy));
5377 }
5378
5379 gsi_tgt = gsi_start_bb (new_bb);
5380 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5381 {
5382 def_operand_p def_p;
5383 ssa_op_iter op_iter;
5384 tree lhs;
5385
5386 stmt = gsi_stmt (gsi);
5387 if (gimple_code (stmt) == GIMPLE_LABEL)
5388 continue;
5389
5390 /* Don't duplicate label debug stmts. */
5391 if (gimple_debug_bind_p (stmt)
5392 && TREE_CODE (gimple_debug_bind_get_var (stmt))
5393 == LABEL_DECL)
5394 continue;
5395
5396 /* Create a new copy of STMT and duplicate STMT's virtual
5397 operands. */
5398 copy = gimple_copy (stmt);
5399 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
5400
5401 maybe_duplicate_eh_stmt (copy, stmt);
5402 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
5403
5404 /* When copying around a stmt writing into a local non-user
5405 aggregate, make sure it won't share stack slot with other
5406 vars. */
5407 lhs = gimple_get_lhs (stmt);
5408 if (lhs && TREE_CODE (lhs) != SSA_NAME)
5409 {
5410 tree base = get_base_address (lhs);
5411 if (base
5412 && (TREE_CODE (base) == VAR_DECL
5413 || TREE_CODE (base) == RESULT_DECL)
5414 && DECL_IGNORED_P (base)
5415 && !TREE_STATIC (base)
5416 && !DECL_EXTERNAL (base)
5417 && (TREE_CODE (base) != VAR_DECL
5418 || !DECL_HAS_VALUE_EXPR_P (base)))
5419 DECL_NONSHAREABLE (base) = 1;
5420 }
5421
5422 /* Create new names for all the definitions created by COPY and
5423 add replacement mappings for each new name. */
5424 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
5425 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
5426 }
5427
5428 return new_bb;
5429 }
5430
5431 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
5432
5433 static void
5434 add_phi_args_after_copy_edge (edge e_copy)
5435 {
5436 basic_block bb, bb_copy = e_copy->src, dest;
5437 edge e;
5438 edge_iterator ei;
5439 gimple phi, phi_copy;
5440 tree def;
5441 gimple_stmt_iterator psi, psi_copy;
5442
5443 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
5444 return;
5445
5446 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
5447
5448 if (e_copy->dest->flags & BB_DUPLICATED)
5449 dest = get_bb_original (e_copy->dest);
5450 else
5451 dest = e_copy->dest;
5452
5453 e = find_edge (bb, dest);
5454 if (!e)
5455 {
5456 /* During loop unrolling the target of the latch edge is copied.
5457 In this case we are not looking for edge to dest, but to
5458 duplicated block whose original was dest. */
5459 FOR_EACH_EDGE (e, ei, bb->succs)
5460 {
5461 if ((e->dest->flags & BB_DUPLICATED)
5462 && get_bb_original (e->dest) == dest)
5463 break;
5464 }
5465
5466 gcc_assert (e != NULL);
5467 }
5468
5469 for (psi = gsi_start_phis (e->dest),
5470 psi_copy = gsi_start_phis (e_copy->dest);
5471 !gsi_end_p (psi);
5472 gsi_next (&psi), gsi_next (&psi_copy))
5473 {
5474 phi = gsi_stmt (psi);
5475 phi_copy = gsi_stmt (psi_copy);
5476 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
5477 add_phi_arg (phi_copy, def, e_copy,
5478 gimple_phi_arg_location_from_edge (phi, e));
5479 }
5480 }
5481
5482
5483 /* Basic block BB_COPY was created by code duplication. Add phi node
5484 arguments for edges going out of BB_COPY. The blocks that were
5485 duplicated have BB_DUPLICATED set. */
5486
5487 void
5488 add_phi_args_after_copy_bb (basic_block bb_copy)
5489 {
5490 edge e_copy;
5491 edge_iterator ei;
5492
5493 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
5494 {
5495 add_phi_args_after_copy_edge (e_copy);
5496 }
5497 }
5498
5499 /* Blocks in REGION_COPY array of length N_REGION were created by
5500 duplication of basic blocks. Add phi node arguments for edges
5501 going from these blocks. If E_COPY is not NULL, also add
5502 phi node arguments for its destination.*/
5503
5504 void
5505 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
5506 edge e_copy)
5507 {
5508 unsigned i;
5509
5510 for (i = 0; i < n_region; i++)
5511 region_copy[i]->flags |= BB_DUPLICATED;
5512
5513 for (i = 0; i < n_region; i++)
5514 add_phi_args_after_copy_bb (region_copy[i]);
5515 if (e_copy)
5516 add_phi_args_after_copy_edge (e_copy);
5517
5518 for (i = 0; i < n_region; i++)
5519 region_copy[i]->flags &= ~BB_DUPLICATED;
5520 }
5521
5522 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
5523 important exit edge EXIT. By important we mean that no SSA name defined
5524 inside region is live over the other exit edges of the region. All entry
5525 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
5526 to the duplicate of the region. SSA form, dominance and loop information
5527 is updated. The new basic blocks are stored to REGION_COPY in the same
5528 order as they had in REGION, provided that REGION_COPY is not NULL.
5529 The function returns false if it is unable to copy the region,
5530 true otherwise. */
5531
5532 bool
5533 gimple_duplicate_sese_region (edge entry, edge exit,
5534 basic_block *region, unsigned n_region,
5535 basic_block *region_copy)
5536 {
5537 unsigned i;
5538 bool free_region_copy = false, copying_header = false;
5539 struct loop *loop = entry->dest->loop_father;
5540 edge exit_copy;
5541 VEC (basic_block, heap) *doms;
5542 edge redirected;
5543 int total_freq = 0, entry_freq = 0;
5544 gcov_type total_count = 0, entry_count = 0;
5545
5546 if (!can_copy_bbs_p (region, n_region))
5547 return false;
5548
5549 /* Some sanity checking. Note that we do not check for all possible
5550 missuses of the functions. I.e. if you ask to copy something weird,
5551 it will work, but the state of structures probably will not be
5552 correct. */
5553 for (i = 0; i < n_region; i++)
5554 {
5555 /* We do not handle subloops, i.e. all the blocks must belong to the
5556 same loop. */
5557 if (region[i]->loop_father != loop)
5558 return false;
5559
5560 if (region[i] != entry->dest
5561 && region[i] == loop->header)
5562 return false;
5563 }
5564
5565 set_loop_copy (loop, loop);
5566
5567 /* In case the function is used for loop header copying (which is the primary
5568 use), ensure that EXIT and its copy will be new latch and entry edges. */
5569 if (loop->header == entry->dest)
5570 {
5571 copying_header = true;
5572 set_loop_copy (loop, loop_outer (loop));
5573
5574 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
5575 return false;
5576
5577 for (i = 0; i < n_region; i++)
5578 if (region[i] != exit->src
5579 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
5580 return false;
5581 }
5582
5583 if (!region_copy)
5584 {
5585 region_copy = XNEWVEC (basic_block, n_region);
5586 free_region_copy = true;
5587 }
5588
5589 gcc_assert (!need_ssa_update_p (cfun));
5590
5591 /* Record blocks outside the region that are dominated by something
5592 inside. */
5593 doms = NULL;
5594 initialize_original_copy_tables ();
5595
5596 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
5597
5598 if (entry->dest->count)
5599 {
5600 total_count = entry->dest->count;
5601 entry_count = entry->count;
5602 /* Fix up corner cases, to avoid division by zero or creation of negative
5603 frequencies. */
5604 if (entry_count > total_count)
5605 entry_count = total_count;
5606 }
5607 else
5608 {
5609 total_freq = entry->dest->frequency;
5610 entry_freq = EDGE_FREQUENCY (entry);
5611 /* Fix up corner cases, to avoid division by zero or creation of negative
5612 frequencies. */
5613 if (total_freq == 0)
5614 total_freq = 1;
5615 else if (entry_freq > total_freq)
5616 entry_freq = total_freq;
5617 }
5618
5619 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
5620 split_edge_bb_loc (entry));
5621 if (total_count)
5622 {
5623 scale_bbs_frequencies_gcov_type (region, n_region,
5624 total_count - entry_count,
5625 total_count);
5626 scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
5627 total_count);
5628 }
5629 else
5630 {
5631 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
5632 total_freq);
5633 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
5634 }
5635
5636 if (copying_header)
5637 {
5638 loop->header = exit->dest;
5639 loop->latch = exit->src;
5640 }
5641
5642 /* Redirect the entry and add the phi node arguments. */
5643 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
5644 gcc_assert (redirected != NULL);
5645 flush_pending_stmts (entry);
5646
5647 /* Concerning updating of dominators: We must recount dominators
5648 for entry block and its copy. Anything that is outside of the
5649 region, but was dominated by something inside needs recounting as
5650 well. */
5651 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
5652 VEC_safe_push (basic_block, heap, doms, get_bb_original (entry->dest));
5653 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
5654 VEC_free (basic_block, heap, doms);
5655
5656 /* Add the other PHI node arguments. */
5657 add_phi_args_after_copy (region_copy, n_region, NULL);
5658
5659 /* Update the SSA web. */
5660 update_ssa (TODO_update_ssa);
5661
5662 if (free_region_copy)
5663 free (region_copy);
5664
5665 free_original_copy_tables ();
5666 return true;
5667 }
5668
5669 /* Checks if BB is part of the region defined by N_REGION BBS. */
5670 static bool
5671 bb_part_of_region_p (basic_block bb, basic_block* bbs, unsigned n_region)
5672 {
5673 unsigned int n;
5674
5675 for (n = 0; n < n_region; n++)
5676 {
5677 if (bb == bbs[n])
5678 return true;
5679 }
5680 return false;
5681 }
5682
5683 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
5684 are stored to REGION_COPY in the same order in that they appear
5685 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
5686 the region, EXIT an exit from it. The condition guarding EXIT
5687 is moved to ENTRY. Returns true if duplication succeeds, false
5688 otherwise.
5689
5690 For example,
5691
5692 some_code;
5693 if (cond)
5694 A;
5695 else
5696 B;
5697
5698 is transformed to
5699
5700 if (cond)
5701 {
5702 some_code;
5703 A;
5704 }
5705 else
5706 {
5707 some_code;
5708 B;
5709 }
5710 */
5711
5712 bool
5713 gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED, edge exit ATTRIBUTE_UNUSED,
5714 basic_block *region ATTRIBUTE_UNUSED, unsigned n_region ATTRIBUTE_UNUSED,
5715 basic_block *region_copy ATTRIBUTE_UNUSED)
5716 {
5717 unsigned i;
5718 bool free_region_copy = false;
5719 struct loop *loop = exit->dest->loop_father;
5720 struct loop *orig_loop = entry->dest->loop_father;
5721 basic_block switch_bb, entry_bb, nentry_bb;
5722 VEC (basic_block, heap) *doms;
5723 int total_freq = 0, exit_freq = 0;
5724 gcov_type total_count = 0, exit_count = 0;
5725 edge exits[2], nexits[2], e;
5726 gimple_stmt_iterator gsi;
5727 gimple cond_stmt;
5728 edge sorig, snew;
5729 basic_block exit_bb;
5730 gimple_stmt_iterator psi;
5731 gimple phi;
5732 tree def;
5733 struct loop *target, *aloop, *cloop;
5734
5735 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
5736 exits[0] = exit;
5737 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
5738
5739 if (!can_copy_bbs_p (region, n_region))
5740 return false;
5741
5742 initialize_original_copy_tables ();
5743 set_loop_copy (orig_loop, loop);
5744
5745 target= loop;
5746 for (aloop = orig_loop->inner; aloop; aloop = aloop->next)
5747 {
5748 if (bb_part_of_region_p (aloop->header, region, n_region))
5749 {
5750 cloop = duplicate_loop (aloop, target);
5751 duplicate_subloops (aloop, cloop);
5752 }
5753 }
5754
5755 if (!region_copy)
5756 {
5757 region_copy = XNEWVEC (basic_block, n_region);
5758 free_region_copy = true;
5759 }
5760
5761 gcc_assert (!need_ssa_update_p (cfun));
5762
5763 /* Record blocks outside the region that are dominated by something
5764 inside. */
5765 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
5766
5767 if (exit->src->count)
5768 {
5769 total_count = exit->src->count;
5770 exit_count = exit->count;
5771 /* Fix up corner cases, to avoid division by zero or creation of negative
5772 frequencies. */
5773 if (exit_count > total_count)
5774 exit_count = total_count;
5775 }
5776 else
5777 {
5778 total_freq = exit->src->frequency;
5779 exit_freq = EDGE_FREQUENCY (exit);
5780 /* Fix up corner cases, to avoid division by zero or creation of negative
5781 frequencies. */
5782 if (total_freq == 0)
5783 total_freq = 1;
5784 if (exit_freq > total_freq)
5785 exit_freq = total_freq;
5786 }
5787
5788 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
5789 split_edge_bb_loc (exit));
5790 if (total_count)
5791 {
5792 scale_bbs_frequencies_gcov_type (region, n_region,
5793 total_count - exit_count,
5794 total_count);
5795 scale_bbs_frequencies_gcov_type (region_copy, n_region, exit_count,
5796 total_count);
5797 }
5798 else
5799 {
5800 scale_bbs_frequencies_int (region, n_region, total_freq - exit_freq,
5801 total_freq);
5802 scale_bbs_frequencies_int (region_copy, n_region, exit_freq, total_freq);
5803 }
5804
5805 /* Create the switch block, and put the exit condition to it. */
5806 entry_bb = entry->dest;
5807 nentry_bb = get_bb_copy (entry_bb);
5808 if (!last_stmt (entry->src)
5809 || !stmt_ends_bb_p (last_stmt (entry->src)))
5810 switch_bb = entry->src;
5811 else
5812 switch_bb = split_edge (entry);
5813 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
5814
5815 gsi = gsi_last_bb (switch_bb);
5816 cond_stmt = last_stmt (exit->src);
5817 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
5818 cond_stmt = gimple_copy (cond_stmt);
5819
5820 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
5821
5822 sorig = single_succ_edge (switch_bb);
5823 sorig->flags = exits[1]->flags;
5824 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
5825
5826 /* Register the new edge from SWITCH_BB in loop exit lists. */
5827 rescan_loop_exit (snew, true, false);
5828
5829 /* Add the PHI node arguments. */
5830 add_phi_args_after_copy (region_copy, n_region, snew);
5831
5832 /* Get rid of now superfluous conditions and associated edges (and phi node
5833 arguments). */
5834 exit_bb = exit->dest;
5835
5836 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
5837 PENDING_STMT (e) = NULL;
5838
5839 /* The latch of ORIG_LOOP was copied, and so was the backedge
5840 to the original header. We redirect this backedge to EXIT_BB. */
5841 for (i = 0; i < n_region; i++)
5842 if (get_bb_original (region_copy[i]) == orig_loop->latch)
5843 {
5844 gcc_assert (single_succ_edge (region_copy[i]));
5845 e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
5846 PENDING_STMT (e) = NULL;
5847 for (psi = gsi_start_phis (exit_bb);
5848 !gsi_end_p (psi);
5849 gsi_next (&psi))
5850 {
5851 phi = gsi_stmt (psi);
5852 def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
5853 add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
5854 }
5855 }
5856 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
5857 PENDING_STMT (e) = NULL;
5858
5859 /* Anything that is outside of the region, but was dominated by something
5860 inside needs to update dominance info. */
5861 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
5862 VEC_free (basic_block, heap, doms);
5863 /* Update the SSA web. */
5864 update_ssa (TODO_update_ssa);
5865
5866 if (free_region_copy)
5867 free (region_copy);
5868
5869 free_original_copy_tables ();
5870 return true;
5871 }
5872
5873 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
5874 adding blocks when the dominator traversal reaches EXIT. This
5875 function silently assumes that ENTRY strictly dominates EXIT. */
5876
5877 void
5878 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
5879 VEC(basic_block,heap) **bbs_p)
5880 {
5881 basic_block son;
5882
5883 for (son = first_dom_son (CDI_DOMINATORS, entry);
5884 son;
5885 son = next_dom_son (CDI_DOMINATORS, son))
5886 {
5887 VEC_safe_push (basic_block, heap, *bbs_p, son);
5888 if (son != exit)
5889 gather_blocks_in_sese_region (son, exit, bbs_p);
5890 }
5891 }
5892
5893 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
5894 The duplicates are recorded in VARS_MAP. */
5895
5896 static void
5897 replace_by_duplicate_decl (tree *tp, struct pointer_map_t *vars_map,
5898 tree to_context)
5899 {
5900 tree t = *tp, new_t;
5901 struct function *f = DECL_STRUCT_FUNCTION (to_context);
5902 void **loc;
5903
5904 if (DECL_CONTEXT (t) == to_context)
5905 return;
5906
5907 loc = pointer_map_contains (vars_map, t);
5908
5909 if (!loc)
5910 {
5911 loc = pointer_map_insert (vars_map, t);
5912
5913 if (SSA_VAR_P (t))
5914 {
5915 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
5916 add_local_decl (f, new_t);
5917 }
5918 else
5919 {
5920 gcc_assert (TREE_CODE (t) == CONST_DECL);
5921 new_t = copy_node (t);
5922 }
5923 DECL_CONTEXT (new_t) = to_context;
5924
5925 *loc = new_t;
5926 }
5927 else
5928 new_t = (tree) *loc;
5929
5930 *tp = new_t;
5931 }
5932
5933
5934 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
5935 VARS_MAP maps old ssa names and var_decls to the new ones. */
5936
5937 static tree
5938 replace_ssa_name (tree name, struct pointer_map_t *vars_map,
5939 tree to_context)
5940 {
5941 void **loc;
5942 tree new_name, decl = SSA_NAME_VAR (name);
5943
5944 gcc_assert (is_gimple_reg (name));
5945
5946 loc = pointer_map_contains (vars_map, name);
5947
5948 if (!loc)
5949 {
5950 replace_by_duplicate_decl (&decl, vars_map, to_context);
5951
5952 push_cfun (DECL_STRUCT_FUNCTION (to_context));
5953 if (gimple_in_ssa_p (cfun))
5954 add_referenced_var (decl);
5955
5956 new_name = make_ssa_name (decl, SSA_NAME_DEF_STMT (name));
5957 if (SSA_NAME_IS_DEFAULT_DEF (name))
5958 set_default_def (decl, new_name);
5959 pop_cfun ();
5960
5961 loc = pointer_map_insert (vars_map, name);
5962 *loc = new_name;
5963 }
5964 else
5965 new_name = (tree) *loc;
5966
5967 return new_name;
5968 }
5969
5970 struct move_stmt_d
5971 {
5972 tree orig_block;
5973 tree new_block;
5974 tree from_context;
5975 tree to_context;
5976 struct pointer_map_t *vars_map;
5977 htab_t new_label_map;
5978 struct pointer_map_t *eh_map;
5979 bool remap_decls_p;
5980 };
5981
5982 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
5983 contained in *TP if it has been ORIG_BLOCK previously and change the
5984 DECL_CONTEXT of every local variable referenced in *TP. */
5985
5986 static tree
5987 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
5988 {
5989 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5990 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
5991 tree t = *tp;
5992
5993 if (EXPR_P (t))
5994 /* We should never have TREE_BLOCK set on non-statements. */
5995 gcc_assert (!TREE_BLOCK (t));
5996
5997 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
5998 {
5999 if (TREE_CODE (t) == SSA_NAME)
6000 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
6001 else if (TREE_CODE (t) == LABEL_DECL)
6002 {
6003 if (p->new_label_map)
6004 {
6005 struct tree_map in, *out;
6006 in.base.from = t;
6007 out = (struct tree_map *)
6008 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
6009 if (out)
6010 *tp = t = out->to;
6011 }
6012
6013 DECL_CONTEXT (t) = p->to_context;
6014 }
6015 else if (p->remap_decls_p)
6016 {
6017 /* Replace T with its duplicate. T should no longer appear in the
6018 parent function, so this looks wasteful; however, it may appear
6019 in referenced_vars, and more importantly, as virtual operands of
6020 statements, and in alias lists of other variables. It would be
6021 quite difficult to expunge it from all those places. ??? It might
6022 suffice to do this for addressable variables. */
6023 if ((TREE_CODE (t) == VAR_DECL
6024 && !is_global_var (t))
6025 || TREE_CODE (t) == CONST_DECL)
6026 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
6027
6028 if (SSA_VAR_P (t)
6029 && gimple_in_ssa_p (cfun))
6030 {
6031 push_cfun (DECL_STRUCT_FUNCTION (p->to_context));
6032 add_referenced_var (*tp);
6033 pop_cfun ();
6034 }
6035 }
6036 *walk_subtrees = 0;
6037 }
6038 else if (TYPE_P (t))
6039 *walk_subtrees = 0;
6040
6041 return NULL_TREE;
6042 }
6043
6044 /* Helper for move_stmt_r. Given an EH region number for the source
6045 function, map that to the duplicate EH regio number in the dest. */
6046
6047 static int
6048 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
6049 {
6050 eh_region old_r, new_r;
6051 void **slot;
6052
6053 old_r = get_eh_region_from_number (old_nr);
6054 slot = pointer_map_contains (p->eh_map, old_r);
6055 new_r = (eh_region) *slot;
6056
6057 return new_r->index;
6058 }
6059
6060 /* Similar, but operate on INTEGER_CSTs. */
6061
6062 static tree
6063 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
6064 {
6065 int old_nr, new_nr;
6066
6067 old_nr = tree_low_cst (old_t_nr, 0);
6068 new_nr = move_stmt_eh_region_nr (old_nr, p);
6069
6070 return build_int_cst (integer_type_node, new_nr);
6071 }
6072
6073 /* Like move_stmt_op, but for gimple statements.
6074
6075 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
6076 contained in the current statement in *GSI_P and change the
6077 DECL_CONTEXT of every local variable referenced in the current
6078 statement. */
6079
6080 static tree
6081 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
6082 struct walk_stmt_info *wi)
6083 {
6084 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6085 gimple stmt = gsi_stmt (*gsi_p);
6086 tree block = gimple_block (stmt);
6087
6088 if (p->orig_block == NULL_TREE
6089 || block == p->orig_block
6090 || block == NULL_TREE)
6091 gimple_set_block (stmt, p->new_block);
6092 #ifdef ENABLE_CHECKING
6093 else if (block != p->new_block)
6094 {
6095 while (block && block != p->orig_block)
6096 block = BLOCK_SUPERCONTEXT (block);
6097 gcc_assert (block);
6098 }
6099 #endif
6100
6101 switch (gimple_code (stmt))
6102 {
6103 case GIMPLE_CALL:
6104 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
6105 {
6106 tree r, fndecl = gimple_call_fndecl (stmt);
6107 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
6108 switch (DECL_FUNCTION_CODE (fndecl))
6109 {
6110 case BUILT_IN_EH_COPY_VALUES:
6111 r = gimple_call_arg (stmt, 1);
6112 r = move_stmt_eh_region_tree_nr (r, p);
6113 gimple_call_set_arg (stmt, 1, r);
6114 /* FALLTHRU */
6115
6116 case BUILT_IN_EH_POINTER:
6117 case BUILT_IN_EH_FILTER:
6118 r = gimple_call_arg (stmt, 0);
6119 r = move_stmt_eh_region_tree_nr (r, p);
6120 gimple_call_set_arg (stmt, 0, r);
6121 break;
6122
6123 default:
6124 break;
6125 }
6126 }
6127 break;
6128
6129 case GIMPLE_RESX:
6130 {
6131 int r = gimple_resx_region (stmt);
6132 r = move_stmt_eh_region_nr (r, p);
6133 gimple_resx_set_region (stmt, r);
6134 }
6135 break;
6136
6137 case GIMPLE_EH_DISPATCH:
6138 {
6139 int r = gimple_eh_dispatch_region (stmt);
6140 r = move_stmt_eh_region_nr (r, p);
6141 gimple_eh_dispatch_set_region (stmt, r);
6142 }
6143 break;
6144
6145 case GIMPLE_OMP_RETURN:
6146 case GIMPLE_OMP_CONTINUE:
6147 break;
6148 default:
6149 if (is_gimple_omp (stmt))
6150 {
6151 /* Do not remap variables inside OMP directives. Variables
6152 referenced in clauses and directive header belong to the
6153 parent function and should not be moved into the child
6154 function. */
6155 bool save_remap_decls_p = p->remap_decls_p;
6156 p->remap_decls_p = false;
6157 *handled_ops_p = true;
6158
6159 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), move_stmt_r,
6160 move_stmt_op, wi);
6161
6162 p->remap_decls_p = save_remap_decls_p;
6163 }
6164 break;
6165 }
6166
6167 return NULL_TREE;
6168 }
6169
6170 /* Move basic block BB from function CFUN to function DEST_FN. The
6171 block is moved out of the original linked list and placed after
6172 block AFTER in the new list. Also, the block is removed from the
6173 original array of blocks and placed in DEST_FN's array of blocks.
6174 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
6175 updated to reflect the moved edges.
6176
6177 The local variables are remapped to new instances, VARS_MAP is used
6178 to record the mapping. */
6179
6180 static void
6181 move_block_to_fn (struct function *dest_cfun, basic_block bb,
6182 basic_block after, bool update_edge_count_p,
6183 struct move_stmt_d *d)
6184 {
6185 struct control_flow_graph *cfg;
6186 edge_iterator ei;
6187 edge e;
6188 gimple_stmt_iterator si;
6189 unsigned old_len, new_len;
6190
6191 /* Remove BB from dominance structures. */
6192 delete_from_dominance_info (CDI_DOMINATORS, bb);
6193 if (current_loops)
6194 remove_bb_from_loops (bb);
6195
6196 /* Link BB to the new linked list. */
6197 move_block_after (bb, after);
6198
6199 /* Update the edge count in the corresponding flowgraphs. */
6200 if (update_edge_count_p)
6201 FOR_EACH_EDGE (e, ei, bb->succs)
6202 {
6203 cfun->cfg->x_n_edges--;
6204 dest_cfun->cfg->x_n_edges++;
6205 }
6206
6207 /* Remove BB from the original basic block array. */
6208 VEC_replace (basic_block, cfun->cfg->x_basic_block_info, bb->index, NULL);
6209 cfun->cfg->x_n_basic_blocks--;
6210
6211 /* Grow DEST_CFUN's basic block array if needed. */
6212 cfg = dest_cfun->cfg;
6213 cfg->x_n_basic_blocks++;
6214 if (bb->index >= cfg->x_last_basic_block)
6215 cfg->x_last_basic_block = bb->index + 1;
6216
6217 old_len = VEC_length (basic_block, cfg->x_basic_block_info);
6218 if ((unsigned) cfg->x_last_basic_block >= old_len)
6219 {
6220 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
6221 VEC_safe_grow_cleared (basic_block, gc, cfg->x_basic_block_info,
6222 new_len);
6223 }
6224
6225 VEC_replace (basic_block, cfg->x_basic_block_info,
6226 bb->index, bb);
6227
6228 /* Remap the variables in phi nodes. */
6229 for (si = gsi_start_phis (bb); !gsi_end_p (si); )
6230 {
6231 gimple phi = gsi_stmt (si);
6232 use_operand_p use;
6233 tree op = PHI_RESULT (phi);
6234 ssa_op_iter oi;
6235
6236 if (!is_gimple_reg (op))
6237 {
6238 /* Remove the phi nodes for virtual operands (alias analysis will be
6239 run for the new function, anyway). */
6240 remove_phi_node (&si, true);
6241 continue;
6242 }
6243
6244 SET_PHI_RESULT (phi,
6245 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6246 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
6247 {
6248 op = USE_FROM_PTR (use);
6249 if (TREE_CODE (op) == SSA_NAME)
6250 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6251 }
6252
6253 gsi_next (&si);
6254 }
6255
6256 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6257 {
6258 gimple stmt = gsi_stmt (si);
6259 struct walk_stmt_info wi;
6260
6261 memset (&wi, 0, sizeof (wi));
6262 wi.info = d;
6263 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
6264
6265 if (gimple_code (stmt) == GIMPLE_LABEL)
6266 {
6267 tree label = gimple_label_label (stmt);
6268 int uid = LABEL_DECL_UID (label);
6269
6270 gcc_assert (uid > -1);
6271
6272 old_len = VEC_length (basic_block, cfg->x_label_to_block_map);
6273 if (old_len <= (unsigned) uid)
6274 {
6275 new_len = 3 * uid / 2 + 1;
6276 VEC_safe_grow_cleared (basic_block, gc,
6277 cfg->x_label_to_block_map, new_len);
6278 }
6279
6280 VEC_replace (basic_block, cfg->x_label_to_block_map, uid, bb);
6281 VEC_replace (basic_block, cfun->cfg->x_label_to_block_map, uid, NULL);
6282
6283 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
6284
6285 if (uid >= dest_cfun->cfg->last_label_uid)
6286 dest_cfun->cfg->last_label_uid = uid + 1;
6287 }
6288
6289 maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
6290 remove_stmt_from_eh_lp_fn (cfun, stmt);
6291
6292 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
6293 gimple_remove_stmt_histograms (cfun, stmt);
6294
6295 /* We cannot leave any operands allocated from the operand caches of
6296 the current function. */
6297 free_stmt_operands (stmt);
6298 push_cfun (dest_cfun);
6299 update_stmt (stmt);
6300 pop_cfun ();
6301 }
6302
6303 FOR_EACH_EDGE (e, ei, bb->succs)
6304 if (e->goto_locus)
6305 {
6306 tree block = e->goto_block;
6307 if (d->orig_block == NULL_TREE
6308 || block == d->orig_block)
6309 e->goto_block = d->new_block;
6310 #ifdef ENABLE_CHECKING
6311 else if (block != d->new_block)
6312 {
6313 while (block && block != d->orig_block)
6314 block = BLOCK_SUPERCONTEXT (block);
6315 gcc_assert (block);
6316 }
6317 #endif
6318 }
6319 }
6320
6321 /* Examine the statements in BB (which is in SRC_CFUN); find and return
6322 the outermost EH region. Use REGION as the incoming base EH region. */
6323
6324 static eh_region
6325 find_outermost_region_in_block (struct function *src_cfun,
6326 basic_block bb, eh_region region)
6327 {
6328 gimple_stmt_iterator si;
6329
6330 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6331 {
6332 gimple stmt = gsi_stmt (si);
6333 eh_region stmt_region;
6334 int lp_nr;
6335
6336 lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
6337 stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
6338 if (stmt_region)
6339 {
6340 if (region == NULL)
6341 region = stmt_region;
6342 else if (stmt_region != region)
6343 {
6344 region = eh_region_outermost (src_cfun, stmt_region, region);
6345 gcc_assert (region != NULL);
6346 }
6347 }
6348 }
6349
6350 return region;
6351 }
6352
6353 static tree
6354 new_label_mapper (tree decl, void *data)
6355 {
6356 htab_t hash = (htab_t) data;
6357 struct tree_map *m;
6358 void **slot;
6359
6360 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
6361
6362 m = XNEW (struct tree_map);
6363 m->hash = DECL_UID (decl);
6364 m->base.from = decl;
6365 m->to = create_artificial_label (UNKNOWN_LOCATION);
6366 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
6367 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
6368 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
6369
6370 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
6371 gcc_assert (*slot == NULL);
6372
6373 *slot = m;
6374
6375 return m->to;
6376 }
6377
6378 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
6379 subblocks. */
6380
6381 static void
6382 replace_block_vars_by_duplicates (tree block, struct pointer_map_t *vars_map,
6383 tree to_context)
6384 {
6385 tree *tp, t;
6386
6387 for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
6388 {
6389 t = *tp;
6390 if (TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != CONST_DECL)
6391 continue;
6392 replace_by_duplicate_decl (&t, vars_map, to_context);
6393 if (t != *tp)
6394 {
6395 if (TREE_CODE (*tp) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (*tp))
6396 {
6397 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (*tp));
6398 DECL_HAS_VALUE_EXPR_P (t) = 1;
6399 }
6400 DECL_CHAIN (t) = DECL_CHAIN (*tp);
6401 *tp = t;
6402 }
6403 }
6404
6405 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
6406 replace_block_vars_by_duplicates (block, vars_map, to_context);
6407 }
6408
6409 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
6410 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
6411 single basic block in the original CFG and the new basic block is
6412 returned. DEST_CFUN must not have a CFG yet.
6413
6414 Note that the region need not be a pure SESE region. Blocks inside
6415 the region may contain calls to abort/exit. The only restriction
6416 is that ENTRY_BB should be the only entry point and it must
6417 dominate EXIT_BB.
6418
6419 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
6420 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
6421 to the new function.
6422
6423 All local variables referenced in the region are assumed to be in
6424 the corresponding BLOCK_VARS and unexpanded variable lists
6425 associated with DEST_CFUN. */
6426
6427 basic_block
6428 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
6429 basic_block exit_bb, tree orig_block)
6430 {
6431 VEC(basic_block,heap) *bbs, *dom_bbs;
6432 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
6433 basic_block after, bb, *entry_pred, *exit_succ, abb;
6434 struct function *saved_cfun = cfun;
6435 int *entry_flag, *exit_flag;
6436 unsigned *entry_prob, *exit_prob;
6437 unsigned i, num_entry_edges, num_exit_edges;
6438 edge e;
6439 edge_iterator ei;
6440 htab_t new_label_map;
6441 struct pointer_map_t *vars_map, *eh_map;
6442 struct loop *loop = entry_bb->loop_father;
6443 struct move_stmt_d d;
6444
6445 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
6446 region. */
6447 gcc_assert (entry_bb != exit_bb
6448 && (!exit_bb
6449 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
6450
6451 /* Collect all the blocks in the region. Manually add ENTRY_BB
6452 because it won't be added by dfs_enumerate_from. */
6453 bbs = NULL;
6454 VEC_safe_push (basic_block, heap, bbs, entry_bb);
6455 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
6456
6457 /* The blocks that used to be dominated by something in BBS will now be
6458 dominated by the new block. */
6459 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
6460 VEC_address (basic_block, bbs),
6461 VEC_length (basic_block, bbs));
6462
6463 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
6464 the predecessor edges to ENTRY_BB and the successor edges to
6465 EXIT_BB so that we can re-attach them to the new basic block that
6466 will replace the region. */
6467 num_entry_edges = EDGE_COUNT (entry_bb->preds);
6468 entry_pred = (basic_block *) xcalloc (num_entry_edges, sizeof (basic_block));
6469 entry_flag = (int *) xcalloc (num_entry_edges, sizeof (int));
6470 entry_prob = XNEWVEC (unsigned, num_entry_edges);
6471 i = 0;
6472 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
6473 {
6474 entry_prob[i] = e->probability;
6475 entry_flag[i] = e->flags;
6476 entry_pred[i++] = e->src;
6477 remove_edge (e);
6478 }
6479
6480 if (exit_bb)
6481 {
6482 num_exit_edges = EDGE_COUNT (exit_bb->succs);
6483 exit_succ = (basic_block *) xcalloc (num_exit_edges,
6484 sizeof (basic_block));
6485 exit_flag = (int *) xcalloc (num_exit_edges, sizeof (int));
6486 exit_prob = XNEWVEC (unsigned, num_exit_edges);
6487 i = 0;
6488 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
6489 {
6490 exit_prob[i] = e->probability;
6491 exit_flag[i] = e->flags;
6492 exit_succ[i++] = e->dest;
6493 remove_edge (e);
6494 }
6495 }
6496 else
6497 {
6498 num_exit_edges = 0;
6499 exit_succ = NULL;
6500 exit_flag = NULL;
6501 exit_prob = NULL;
6502 }
6503
6504 /* Switch context to the child function to initialize DEST_FN's CFG. */
6505 gcc_assert (dest_cfun->cfg == NULL);
6506 push_cfun (dest_cfun);
6507
6508 init_empty_tree_cfg ();
6509
6510 /* Initialize EH information for the new function. */
6511 eh_map = NULL;
6512 new_label_map = NULL;
6513 if (saved_cfun->eh)
6514 {
6515 eh_region region = NULL;
6516
6517 FOR_EACH_VEC_ELT (basic_block, bbs, i, bb)
6518 region = find_outermost_region_in_block (saved_cfun, bb, region);
6519
6520 init_eh_for_function ();
6521 if (region != NULL)
6522 {
6523 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
6524 eh_map = duplicate_eh_regions (saved_cfun, region, 0,
6525 new_label_mapper, new_label_map);
6526 }
6527 }
6528
6529 pop_cfun ();
6530
6531 /* Move blocks from BBS into DEST_CFUN. */
6532 gcc_assert (VEC_length (basic_block, bbs) >= 2);
6533 after = dest_cfun->cfg->x_entry_block_ptr;
6534 vars_map = pointer_map_create ();
6535
6536 memset (&d, 0, sizeof (d));
6537 d.orig_block = orig_block;
6538 d.new_block = DECL_INITIAL (dest_cfun->decl);
6539 d.from_context = cfun->decl;
6540 d.to_context = dest_cfun->decl;
6541 d.vars_map = vars_map;
6542 d.new_label_map = new_label_map;
6543 d.eh_map = eh_map;
6544 d.remap_decls_p = true;
6545
6546 FOR_EACH_VEC_ELT (basic_block, bbs, i, bb)
6547 {
6548 /* No need to update edge counts on the last block. It has
6549 already been updated earlier when we detached the region from
6550 the original CFG. */
6551 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
6552 after = bb;
6553 }
6554
6555 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
6556 if (orig_block)
6557 {
6558 tree block;
6559 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
6560 == NULL_TREE);
6561 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
6562 = BLOCK_SUBBLOCKS (orig_block);
6563 for (block = BLOCK_SUBBLOCKS (orig_block);
6564 block; block = BLOCK_CHAIN (block))
6565 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
6566 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
6567 }
6568
6569 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
6570 vars_map, dest_cfun->decl);
6571
6572 if (new_label_map)
6573 htab_delete (new_label_map);
6574 if (eh_map)
6575 pointer_map_destroy (eh_map);
6576 pointer_map_destroy (vars_map);
6577
6578 /* Rewire the entry and exit blocks. The successor to the entry
6579 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
6580 the child function. Similarly, the predecessor of DEST_FN's
6581 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
6582 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
6583 various CFG manipulation function get to the right CFG.
6584
6585 FIXME, this is silly. The CFG ought to become a parameter to
6586 these helpers. */
6587 push_cfun (dest_cfun);
6588 make_edge (ENTRY_BLOCK_PTR, entry_bb, EDGE_FALLTHRU);
6589 if (exit_bb)
6590 make_edge (exit_bb, EXIT_BLOCK_PTR, 0);
6591 pop_cfun ();
6592
6593 /* Back in the original function, the SESE region has disappeared,
6594 create a new basic block in its place. */
6595 bb = create_empty_bb (entry_pred[0]);
6596 if (current_loops)
6597 add_bb_to_loop (bb, loop);
6598 for (i = 0; i < num_entry_edges; i++)
6599 {
6600 e = make_edge (entry_pred[i], bb, entry_flag[i]);
6601 e->probability = entry_prob[i];
6602 }
6603
6604 for (i = 0; i < num_exit_edges; i++)
6605 {
6606 e = make_edge (bb, exit_succ[i], exit_flag[i]);
6607 e->probability = exit_prob[i];
6608 }
6609
6610 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
6611 FOR_EACH_VEC_ELT (basic_block, dom_bbs, i, abb)
6612 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
6613 VEC_free (basic_block, heap, dom_bbs);
6614
6615 if (exit_bb)
6616 {
6617 free (exit_prob);
6618 free (exit_flag);
6619 free (exit_succ);
6620 }
6621 free (entry_prob);
6622 free (entry_flag);
6623 free (entry_pred);
6624 VEC_free (basic_block, heap, bbs);
6625
6626 return bb;
6627 }
6628
6629
6630 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in tree-pass.h)
6631 */
6632
6633 void
6634 dump_function_to_file (tree fn, FILE *file, int flags)
6635 {
6636 tree arg, var;
6637 struct function *dsf;
6638 bool ignore_topmost_bind = false, any_var = false;
6639 basic_block bb;
6640 tree chain;
6641 bool tmclone = TREE_CODE (fn) == FUNCTION_DECL && decl_is_tm_clone (fn);
6642
6643 fprintf (file, "%s %s(", lang_hooks.decl_printable_name (fn, 2),
6644 tmclone ? "[tm-clone] " : "");
6645
6646 arg = DECL_ARGUMENTS (fn);
6647 while (arg)
6648 {
6649 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
6650 fprintf (file, " ");
6651 print_generic_expr (file, arg, dump_flags);
6652 if (flags & TDF_VERBOSE)
6653 print_node (file, "", arg, 4);
6654 if (DECL_CHAIN (arg))
6655 fprintf (file, ", ");
6656 arg = DECL_CHAIN (arg);
6657 }
6658 fprintf (file, ")\n");
6659
6660 if (flags & TDF_VERBOSE)
6661 print_node (file, "", fn, 2);
6662
6663 dsf = DECL_STRUCT_FUNCTION (fn);
6664 if (dsf && (flags & TDF_EH))
6665 dump_eh_tree (file, dsf);
6666
6667 if (flags & TDF_RAW && !gimple_has_body_p (fn))
6668 {
6669 dump_node (fn, TDF_SLIM | flags, file);
6670 return;
6671 }
6672
6673 /* Switch CFUN to point to FN. */
6674 push_cfun (DECL_STRUCT_FUNCTION (fn));
6675
6676 /* When GIMPLE is lowered, the variables are no longer available in
6677 BIND_EXPRs, so display them separately. */
6678 if (cfun && cfun->decl == fn && !VEC_empty (tree, cfun->local_decls))
6679 {
6680 unsigned ix;
6681 ignore_topmost_bind = true;
6682
6683 fprintf (file, "{\n");
6684 FOR_EACH_LOCAL_DECL (cfun, ix, var)
6685 {
6686 print_generic_decl (file, var, flags);
6687 if (flags & TDF_VERBOSE)
6688 print_node (file, "", var, 4);
6689 fprintf (file, "\n");
6690
6691 any_var = true;
6692 }
6693 }
6694
6695 if (cfun && cfun->decl == fn && cfun->cfg && basic_block_info)
6696 {
6697 /* If the CFG has been built, emit a CFG-based dump. */
6698 check_bb_profile (ENTRY_BLOCK_PTR, file);
6699 if (!ignore_topmost_bind)
6700 fprintf (file, "{\n");
6701
6702 if (any_var && n_basic_blocks)
6703 fprintf (file, "\n");
6704
6705 FOR_EACH_BB (bb)
6706 gimple_dump_bb (bb, file, 2, flags);
6707
6708 fprintf (file, "}\n");
6709 check_bb_profile (EXIT_BLOCK_PTR, file);
6710 }
6711 else if (DECL_SAVED_TREE (fn) == NULL)
6712 {
6713 /* The function is now in GIMPLE form but the CFG has not been
6714 built yet. Emit the single sequence of GIMPLE statements
6715 that make up its body. */
6716 gimple_seq body = gimple_body (fn);
6717
6718 if (gimple_seq_first_stmt (body)
6719 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
6720 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
6721 print_gimple_seq (file, body, 0, flags);
6722 else
6723 {
6724 if (!ignore_topmost_bind)
6725 fprintf (file, "{\n");
6726
6727 if (any_var)
6728 fprintf (file, "\n");
6729
6730 print_gimple_seq (file, body, 2, flags);
6731 fprintf (file, "}\n");
6732 }
6733 }
6734 else
6735 {
6736 int indent;
6737
6738 /* Make a tree based dump. */
6739 chain = DECL_SAVED_TREE (fn);
6740
6741 if (chain && TREE_CODE (chain) == BIND_EXPR)
6742 {
6743 if (ignore_topmost_bind)
6744 {
6745 chain = BIND_EXPR_BODY (chain);
6746 indent = 2;
6747 }
6748 else
6749 indent = 0;
6750 }
6751 else
6752 {
6753 if (!ignore_topmost_bind)
6754 fprintf (file, "{\n");
6755 indent = 2;
6756 }
6757
6758 if (any_var)
6759 fprintf (file, "\n");
6760
6761 print_generic_stmt_indented (file, chain, flags, indent);
6762 if (ignore_topmost_bind)
6763 fprintf (file, "}\n");
6764 }
6765
6766 if (flags & TDF_ENUMERATE_LOCALS)
6767 dump_enumerated_decls (file, flags);
6768 fprintf (file, "\n\n");
6769
6770 /* Restore CFUN. */
6771 pop_cfun ();
6772 }
6773
6774
6775 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
6776
6777 DEBUG_FUNCTION void
6778 debug_function (tree fn, int flags)
6779 {
6780 dump_function_to_file (fn, stderr, flags);
6781 }
6782
6783
6784 /* Print on FILE the indexes for the predecessors of basic_block BB. */
6785
6786 static void
6787 print_pred_bbs (FILE *file, basic_block bb)
6788 {
6789 edge e;
6790 edge_iterator ei;
6791
6792 FOR_EACH_EDGE (e, ei, bb->preds)
6793 fprintf (file, "bb_%d ", e->src->index);
6794 }
6795
6796
6797 /* Print on FILE the indexes for the successors of basic_block BB. */
6798
6799 static void
6800 print_succ_bbs (FILE *file, basic_block bb)
6801 {
6802 edge e;
6803 edge_iterator ei;
6804
6805 FOR_EACH_EDGE (e, ei, bb->succs)
6806 fprintf (file, "bb_%d ", e->dest->index);
6807 }
6808
6809 /* Print to FILE the basic block BB following the VERBOSITY level. */
6810
6811 void
6812 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
6813 {
6814 char *s_indent = (char *) alloca ((size_t) indent + 1);
6815 memset ((void *) s_indent, ' ', (size_t) indent);
6816 s_indent[indent] = '\0';
6817
6818 /* Print basic_block's header. */
6819 if (verbosity >= 2)
6820 {
6821 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
6822 print_pred_bbs (file, bb);
6823 fprintf (file, "}, succs = {");
6824 print_succ_bbs (file, bb);
6825 fprintf (file, "})\n");
6826 }
6827
6828 /* Print basic_block's body. */
6829 if (verbosity >= 3)
6830 {
6831 fprintf (file, "%s {\n", s_indent);
6832 gimple_dump_bb (bb, file, indent + 4, TDF_VOPS|TDF_MEMSYMS);
6833 fprintf (file, "%s }\n", s_indent);
6834 }
6835 }
6836
6837 static void print_loop_and_siblings (FILE *, struct loop *, int, int);
6838
6839 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
6840 VERBOSITY level this outputs the contents of the loop, or just its
6841 structure. */
6842
6843 static void
6844 print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
6845 {
6846 char *s_indent;
6847 basic_block bb;
6848
6849 if (loop == NULL)
6850 return;
6851
6852 s_indent = (char *) alloca ((size_t) indent + 1);
6853 memset ((void *) s_indent, ' ', (size_t) indent);
6854 s_indent[indent] = '\0';
6855
6856 /* Print loop's header. */
6857 fprintf (file, "%sloop_%d (header = %d, latch = %d", s_indent,
6858 loop->num, loop->header->index, loop->latch->index);
6859 fprintf (file, ", niter = ");
6860 print_generic_expr (file, loop->nb_iterations, 0);
6861
6862 if (loop->any_upper_bound)
6863 {
6864 fprintf (file, ", upper_bound = ");
6865 dump_double_int (file, loop->nb_iterations_upper_bound, true);
6866 }
6867
6868 if (loop->any_estimate)
6869 {
6870 fprintf (file, ", estimate = ");
6871 dump_double_int (file, loop->nb_iterations_estimate, true);
6872 }
6873 fprintf (file, ")\n");
6874
6875 /* Print loop's body. */
6876 if (verbosity >= 1)
6877 {
6878 fprintf (file, "%s{\n", s_indent);
6879 FOR_EACH_BB (bb)
6880 if (bb->loop_father == loop)
6881 print_loops_bb (file, bb, indent, verbosity);
6882
6883 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
6884 fprintf (file, "%s}\n", s_indent);
6885 }
6886 }
6887
6888 /* Print the LOOP and its sibling loops on FILE, indented INDENT
6889 spaces. Following VERBOSITY level this outputs the contents of the
6890 loop, or just its structure. */
6891
6892 static void
6893 print_loop_and_siblings (FILE *file, struct loop *loop, int indent, int verbosity)
6894 {
6895 if (loop == NULL)
6896 return;
6897
6898 print_loop (file, loop, indent, verbosity);
6899 print_loop_and_siblings (file, loop->next, indent, verbosity);
6900 }
6901
6902 /* Follow a CFG edge from the entry point of the program, and on entry
6903 of a loop, pretty print the loop structure on FILE. */
6904
6905 void
6906 print_loops (FILE *file, int verbosity)
6907 {
6908 basic_block bb;
6909
6910 bb = ENTRY_BLOCK_PTR;
6911 if (bb && bb->loop_father)
6912 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
6913 }
6914
6915
6916 /* Debugging loops structure at tree level, at some VERBOSITY level. */
6917
6918 DEBUG_FUNCTION void
6919 debug_loops (int verbosity)
6920 {
6921 print_loops (stderr, verbosity);
6922 }
6923
6924 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
6925
6926 DEBUG_FUNCTION void
6927 debug_loop (struct loop *loop, int verbosity)
6928 {
6929 print_loop (stderr, loop, 0, verbosity);
6930 }
6931
6932 /* Print on stderr the code of loop number NUM, at some VERBOSITY
6933 level. */
6934
6935 DEBUG_FUNCTION void
6936 debug_loop_num (unsigned num, int verbosity)
6937 {
6938 debug_loop (get_loop (num), verbosity);
6939 }
6940
6941 /* Return true if BB ends with a call, possibly followed by some
6942 instructions that must stay with the call. Return false,
6943 otherwise. */
6944
6945 static bool
6946 gimple_block_ends_with_call_p (basic_block bb)
6947 {
6948 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
6949 return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
6950 }
6951
6952
6953 /* Return true if BB ends with a conditional branch. Return false,
6954 otherwise. */
6955
6956 static bool
6957 gimple_block_ends_with_condjump_p (const_basic_block bb)
6958 {
6959 gimple stmt = last_stmt (CONST_CAST_BB (bb));
6960 return (stmt && gimple_code (stmt) == GIMPLE_COND);
6961 }
6962
6963
6964 /* Return true if we need to add fake edge to exit at statement T.
6965 Helper function for gimple_flow_call_edges_add. */
6966
6967 static bool
6968 need_fake_edge_p (gimple t)
6969 {
6970 tree fndecl = NULL_TREE;
6971 int call_flags = 0;
6972
6973 /* NORETURN and LONGJMP calls already have an edge to exit.
6974 CONST and PURE calls do not need one.
6975 We don't currently check for CONST and PURE here, although
6976 it would be a good idea, because those attributes are
6977 figured out from the RTL in mark_constant_function, and
6978 the counter incrementation code from -fprofile-arcs
6979 leads to different results from -fbranch-probabilities. */
6980 if (is_gimple_call (t))
6981 {
6982 fndecl = gimple_call_fndecl (t);
6983 call_flags = gimple_call_flags (t);
6984 }
6985
6986 if (is_gimple_call (t)
6987 && fndecl
6988 && DECL_BUILT_IN (fndecl)
6989 && (call_flags & ECF_NOTHROW)
6990 && !(call_flags & ECF_RETURNS_TWICE)
6991 /* fork() doesn't really return twice, but the effect of
6992 wrapping it in __gcov_fork() which calls __gcov_flush()
6993 and clears the counters before forking has the same
6994 effect as returning twice. Force a fake edge. */
6995 && !(DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
6996 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FORK))
6997 return false;
6998
6999 if (is_gimple_call (t))
7000 {
7001 edge_iterator ei;
7002 edge e;
7003 basic_block bb;
7004
7005 if (!(call_flags & ECF_NORETURN))
7006 return true;
7007
7008 bb = gimple_bb (t);
7009 FOR_EACH_EDGE (e, ei, bb->succs)
7010 if ((e->flags & EDGE_FAKE) == 0)
7011 return true;
7012 }
7013
7014 if (gimple_code (t) == GIMPLE_ASM
7015 && (gimple_asm_volatile_p (t) || gimple_asm_input_p (t)))
7016 return true;
7017
7018 return false;
7019 }
7020
7021
7022 /* Add fake edges to the function exit for any non constant and non
7023 noreturn calls (or noreturn calls with EH/abnormal edges),
7024 volatile inline assembly in the bitmap of blocks specified by BLOCKS
7025 or to the whole CFG if BLOCKS is zero. Return the number of blocks
7026 that were split.
7027
7028 The goal is to expose cases in which entering a basic block does
7029 not imply that all subsequent instructions must be executed. */
7030
7031 static int
7032 gimple_flow_call_edges_add (sbitmap blocks)
7033 {
7034 int i;
7035 int blocks_split = 0;
7036 int last_bb = last_basic_block;
7037 bool check_last_block = false;
7038
7039 if (n_basic_blocks == NUM_FIXED_BLOCKS)
7040 return 0;
7041
7042 if (! blocks)
7043 check_last_block = true;
7044 else
7045 check_last_block = TEST_BIT (blocks, EXIT_BLOCK_PTR->prev_bb->index);
7046
7047 /* In the last basic block, before epilogue generation, there will be
7048 a fallthru edge to EXIT. Special care is required if the last insn
7049 of the last basic block is a call because make_edge folds duplicate
7050 edges, which would result in the fallthru edge also being marked
7051 fake, which would result in the fallthru edge being removed by
7052 remove_fake_edges, which would result in an invalid CFG.
7053
7054 Moreover, we can't elide the outgoing fake edge, since the block
7055 profiler needs to take this into account in order to solve the minimal
7056 spanning tree in the case that the call doesn't return.
7057
7058 Handle this by adding a dummy instruction in a new last basic block. */
7059 if (check_last_block)
7060 {
7061 basic_block bb = EXIT_BLOCK_PTR->prev_bb;
7062 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7063 gimple t = NULL;
7064
7065 if (!gsi_end_p (gsi))
7066 t = gsi_stmt (gsi);
7067
7068 if (t && need_fake_edge_p (t))
7069 {
7070 edge e;
7071
7072 e = find_edge (bb, EXIT_BLOCK_PTR);
7073 if (e)
7074 {
7075 gsi_insert_on_edge (e, gimple_build_nop ());
7076 gsi_commit_edge_inserts ();
7077 }
7078 }
7079 }
7080
7081 /* Now add fake edges to the function exit for any non constant
7082 calls since there is no way that we can determine if they will
7083 return or not... */
7084 for (i = 0; i < last_bb; i++)
7085 {
7086 basic_block bb = BASIC_BLOCK (i);
7087 gimple_stmt_iterator gsi;
7088 gimple stmt, last_stmt;
7089
7090 if (!bb)
7091 continue;
7092
7093 if (blocks && !TEST_BIT (blocks, i))
7094 continue;
7095
7096 gsi = gsi_last_nondebug_bb (bb);
7097 if (!gsi_end_p (gsi))
7098 {
7099 last_stmt = gsi_stmt (gsi);
7100 do
7101 {
7102 stmt = gsi_stmt (gsi);
7103 if (need_fake_edge_p (stmt))
7104 {
7105 edge e;
7106
7107 /* The handling above of the final block before the
7108 epilogue should be enough to verify that there is
7109 no edge to the exit block in CFG already.
7110 Calling make_edge in such case would cause us to
7111 mark that edge as fake and remove it later. */
7112 #ifdef ENABLE_CHECKING
7113 if (stmt == last_stmt)
7114 {
7115 e = find_edge (bb, EXIT_BLOCK_PTR);
7116 gcc_assert (e == NULL);
7117 }
7118 #endif
7119
7120 /* Note that the following may create a new basic block
7121 and renumber the existing basic blocks. */
7122 if (stmt != last_stmt)
7123 {
7124 e = split_block (bb, stmt);
7125 if (e)
7126 blocks_split++;
7127 }
7128 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
7129 }
7130 gsi_prev (&gsi);
7131 }
7132 while (!gsi_end_p (gsi));
7133 }
7134 }
7135
7136 if (blocks_split)
7137 verify_flow_info ();
7138
7139 return blocks_split;
7140 }
7141
7142 /* Removes edge E and all the blocks dominated by it, and updates dominance
7143 information. The IL in E->src needs to be updated separately.
7144 If dominance info is not available, only the edge E is removed.*/
7145
7146 void
7147 remove_edge_and_dominated_blocks (edge e)
7148 {
7149 VEC (basic_block, heap) *bbs_to_remove = NULL;
7150 VEC (basic_block, heap) *bbs_to_fix_dom = NULL;
7151 bitmap df, df_idom;
7152 edge f;
7153 edge_iterator ei;
7154 bool none_removed = false;
7155 unsigned i;
7156 basic_block bb, dbb;
7157 bitmap_iterator bi;
7158
7159 if (!dom_info_available_p (CDI_DOMINATORS))
7160 {
7161 remove_edge (e);
7162 return;
7163 }
7164
7165 /* No updating is needed for edges to exit. */
7166 if (e->dest == EXIT_BLOCK_PTR)
7167 {
7168 if (cfgcleanup_altered_bbs)
7169 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7170 remove_edge (e);
7171 return;
7172 }
7173
7174 /* First, we find the basic blocks to remove. If E->dest has a predecessor
7175 that is not dominated by E->dest, then this set is empty. Otherwise,
7176 all the basic blocks dominated by E->dest are removed.
7177
7178 Also, to DF_IDOM we store the immediate dominators of the blocks in
7179 the dominance frontier of E (i.e., of the successors of the
7180 removed blocks, if there are any, and of E->dest otherwise). */
7181 FOR_EACH_EDGE (f, ei, e->dest->preds)
7182 {
7183 if (f == e)
7184 continue;
7185
7186 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
7187 {
7188 none_removed = true;
7189 break;
7190 }
7191 }
7192
7193 df = BITMAP_ALLOC (NULL);
7194 df_idom = BITMAP_ALLOC (NULL);
7195
7196 if (none_removed)
7197 bitmap_set_bit (df_idom,
7198 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
7199 else
7200 {
7201 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
7202 FOR_EACH_VEC_ELT (basic_block, bbs_to_remove, i, bb)
7203 {
7204 FOR_EACH_EDGE (f, ei, bb->succs)
7205 {
7206 if (f->dest != EXIT_BLOCK_PTR)
7207 bitmap_set_bit (df, f->dest->index);
7208 }
7209 }
7210 FOR_EACH_VEC_ELT (basic_block, bbs_to_remove, i, bb)
7211 bitmap_clear_bit (df, bb->index);
7212
7213 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
7214 {
7215 bb = BASIC_BLOCK (i);
7216 bitmap_set_bit (df_idom,
7217 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
7218 }
7219 }
7220
7221 if (cfgcleanup_altered_bbs)
7222 {
7223 /* Record the set of the altered basic blocks. */
7224 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7225 bitmap_ior_into (cfgcleanup_altered_bbs, df);
7226 }
7227
7228 /* Remove E and the cancelled blocks. */
7229 if (none_removed)
7230 remove_edge (e);
7231 else
7232 {
7233 /* Walk backwards so as to get a chance to substitute all
7234 released DEFs into debug stmts. See
7235 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
7236 details. */
7237 for (i = VEC_length (basic_block, bbs_to_remove); i-- > 0; )
7238 delete_basic_block (VEC_index (basic_block, bbs_to_remove, i));
7239 }
7240
7241 /* Update the dominance information. The immediate dominator may change only
7242 for blocks whose immediate dominator belongs to DF_IDOM:
7243
7244 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
7245 removal. Let Z the arbitrary block such that idom(Z) = Y and
7246 Z dominates X after the removal. Before removal, there exists a path P
7247 from Y to X that avoids Z. Let F be the last edge on P that is
7248 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
7249 dominates W, and because of P, Z does not dominate W), and W belongs to
7250 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
7251 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
7252 {
7253 bb = BASIC_BLOCK (i);
7254 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
7255 dbb;
7256 dbb = next_dom_son (CDI_DOMINATORS, dbb))
7257 VEC_safe_push (basic_block, heap, bbs_to_fix_dom, dbb);
7258 }
7259
7260 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
7261
7262 BITMAP_FREE (df);
7263 BITMAP_FREE (df_idom);
7264 VEC_free (basic_block, heap, bbs_to_remove);
7265 VEC_free (basic_block, heap, bbs_to_fix_dom);
7266 }
7267
7268 /* Purge dead EH edges from basic block BB. */
7269
7270 bool
7271 gimple_purge_dead_eh_edges (basic_block bb)
7272 {
7273 bool changed = false;
7274 edge e;
7275 edge_iterator ei;
7276 gimple stmt = last_stmt (bb);
7277
7278 if (stmt && stmt_can_throw_internal (stmt))
7279 return false;
7280
7281 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
7282 {
7283 if (e->flags & EDGE_EH)
7284 {
7285 remove_edge_and_dominated_blocks (e);
7286 changed = true;
7287 }
7288 else
7289 ei_next (&ei);
7290 }
7291
7292 return changed;
7293 }
7294
7295 /* Purge dead EH edges from basic block listed in BLOCKS. */
7296
7297 bool
7298 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
7299 {
7300 bool changed = false;
7301 unsigned i;
7302 bitmap_iterator bi;
7303
7304 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
7305 {
7306 basic_block bb = BASIC_BLOCK (i);
7307
7308 /* Earlier gimple_purge_dead_eh_edges could have removed
7309 this basic block already. */
7310 gcc_assert (bb || changed);
7311 if (bb != NULL)
7312 changed |= gimple_purge_dead_eh_edges (bb);
7313 }
7314
7315 return changed;
7316 }
7317
7318 /* Purge dead abnormal call edges from basic block BB. */
7319
7320 bool
7321 gimple_purge_dead_abnormal_call_edges (basic_block bb)
7322 {
7323 bool changed = false;
7324 edge e;
7325 edge_iterator ei;
7326 gimple stmt = last_stmt (bb);
7327
7328 if (!cfun->has_nonlocal_label)
7329 return false;
7330
7331 if (stmt && stmt_can_make_abnormal_goto (stmt))
7332 return false;
7333
7334 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
7335 {
7336 if (e->flags & EDGE_ABNORMAL)
7337 {
7338 remove_edge_and_dominated_blocks (e);
7339 changed = true;
7340 }
7341 else
7342 ei_next (&ei);
7343 }
7344
7345 return changed;
7346 }
7347
7348 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
7349
7350 bool
7351 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks)
7352 {
7353 bool changed = false;
7354 unsigned i;
7355 bitmap_iterator bi;
7356
7357 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
7358 {
7359 basic_block bb = BASIC_BLOCK (i);
7360
7361 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
7362 this basic block already. */
7363 gcc_assert (bb || changed);
7364 if (bb != NULL)
7365 changed |= gimple_purge_dead_abnormal_call_edges (bb);
7366 }
7367
7368 return changed;
7369 }
7370
7371 /* This function is called whenever a new edge is created or
7372 redirected. */
7373
7374 static void
7375 gimple_execute_on_growing_pred (edge e)
7376 {
7377 basic_block bb = e->dest;
7378
7379 if (!gimple_seq_empty_p (phi_nodes (bb)))
7380 reserve_phi_args_for_new_edge (bb);
7381 }
7382
7383 /* This function is called immediately before edge E is removed from
7384 the edge vector E->dest->preds. */
7385
7386 static void
7387 gimple_execute_on_shrinking_pred (edge e)
7388 {
7389 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
7390 remove_phi_args (e);
7391 }
7392
7393 /*---------------------------------------------------------------------------
7394 Helper functions for Loop versioning
7395 ---------------------------------------------------------------------------*/
7396
7397 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
7398 of 'first'. Both of them are dominated by 'new_head' basic block. When
7399 'new_head' was created by 'second's incoming edge it received phi arguments
7400 on the edge by split_edge(). Later, additional edge 'e' was created to
7401 connect 'new_head' and 'first'. Now this routine adds phi args on this
7402 additional edge 'e' that new_head to second edge received as part of edge
7403 splitting. */
7404
7405 static void
7406 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
7407 basic_block new_head, edge e)
7408 {
7409 gimple phi1, phi2;
7410 gimple_stmt_iterator psi1, psi2;
7411 tree def;
7412 edge e2 = find_edge (new_head, second);
7413
7414 /* Because NEW_HEAD has been created by splitting SECOND's incoming
7415 edge, we should always have an edge from NEW_HEAD to SECOND. */
7416 gcc_assert (e2 != NULL);
7417
7418 /* Browse all 'second' basic block phi nodes and add phi args to
7419 edge 'e' for 'first' head. PHI args are always in correct order. */
7420
7421 for (psi2 = gsi_start_phis (second),
7422 psi1 = gsi_start_phis (first);
7423 !gsi_end_p (psi2) && !gsi_end_p (psi1);
7424 gsi_next (&psi2), gsi_next (&psi1))
7425 {
7426 phi1 = gsi_stmt (psi1);
7427 phi2 = gsi_stmt (psi2);
7428 def = PHI_ARG_DEF (phi2, e2->dest_idx);
7429 add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
7430 }
7431 }
7432
7433
7434 /* Adds a if else statement to COND_BB with condition COND_EXPR.
7435 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
7436 the destination of the ELSE part. */
7437
7438 static void
7439 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
7440 basic_block second_head ATTRIBUTE_UNUSED,
7441 basic_block cond_bb, void *cond_e)
7442 {
7443 gimple_stmt_iterator gsi;
7444 gimple new_cond_expr;
7445 tree cond_expr = (tree) cond_e;
7446 edge e0;
7447
7448 /* Build new conditional expr */
7449 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
7450 NULL_TREE, NULL_TREE);
7451
7452 /* Add new cond in cond_bb. */
7453 gsi = gsi_last_bb (cond_bb);
7454 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
7455
7456 /* Adjust edges appropriately to connect new head with first head
7457 as well as second head. */
7458 e0 = single_succ_edge (cond_bb);
7459 e0->flags &= ~EDGE_FALLTHRU;
7460 e0->flags |= EDGE_FALSE_VALUE;
7461 }
7462
7463 struct cfg_hooks gimple_cfg_hooks = {
7464 "gimple",
7465 gimple_verify_flow_info,
7466 gimple_dump_bb, /* dump_bb */
7467 create_bb, /* create_basic_block */
7468 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
7469 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
7470 gimple_can_remove_branch_p, /* can_remove_branch_p */
7471 remove_bb, /* delete_basic_block */
7472 gimple_split_block, /* split_block */
7473 gimple_move_block_after, /* move_block_after */
7474 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
7475 gimple_merge_blocks, /* merge_blocks */
7476 gimple_predict_edge, /* predict_edge */
7477 gimple_predicted_by_p, /* predicted_by_p */
7478 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
7479 gimple_duplicate_bb, /* duplicate_block */
7480 gimple_split_edge, /* split_edge */
7481 gimple_make_forwarder_block, /* make_forward_block */
7482 NULL, /* tidy_fallthru_edge */
7483 NULL, /* force_nonfallthru */
7484 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
7485 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
7486 gimple_flow_call_edges_add, /* flow_call_edges_add */
7487 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
7488 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
7489 gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
7490 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
7491 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
7492 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
7493 flush_pending_stmts /* flush_pending_stmts */
7494 };
7495
7496
7497 /* Split all critical edges. */
7498
7499 static unsigned int
7500 split_critical_edges (void)
7501 {
7502 basic_block bb;
7503 edge e;
7504 edge_iterator ei;
7505
7506 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
7507 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
7508 mappings around the calls to split_edge. */
7509 start_recording_case_labels ();
7510 FOR_ALL_BB (bb)
7511 {
7512 FOR_EACH_EDGE (e, ei, bb->succs)
7513 {
7514 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
7515 split_edge (e);
7516 /* PRE inserts statements to edges and expects that
7517 since split_critical_edges was done beforehand, committing edge
7518 insertions will not split more edges. In addition to critical
7519 edges we must split edges that have multiple successors and
7520 end by control flow statements, such as RESX.
7521 Go ahead and split them too. This matches the logic in
7522 gimple_find_edge_insert_loc. */
7523 else if ((!single_pred_p (e->dest)
7524 || !gimple_seq_empty_p (phi_nodes (e->dest))
7525 || e->dest == EXIT_BLOCK_PTR)
7526 && e->src != ENTRY_BLOCK_PTR
7527 && !(e->flags & EDGE_ABNORMAL))
7528 {
7529 gimple_stmt_iterator gsi;
7530
7531 gsi = gsi_last_bb (e->src);
7532 if (!gsi_end_p (gsi)
7533 && stmt_ends_bb_p (gsi_stmt (gsi))
7534 && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
7535 && !gimple_call_builtin_p (gsi_stmt (gsi),
7536 BUILT_IN_RETURN)))
7537 split_edge (e);
7538 }
7539 }
7540 }
7541 end_recording_case_labels ();
7542 return 0;
7543 }
7544
7545 struct gimple_opt_pass pass_split_crit_edges =
7546 {
7547 {
7548 GIMPLE_PASS,
7549 "crited", /* name */
7550 NULL, /* gate */
7551 split_critical_edges, /* execute */
7552 NULL, /* sub */
7553 NULL, /* next */
7554 0, /* static_pass_number */
7555 TV_TREE_SPLIT_EDGES, /* tv_id */
7556 PROP_cfg, /* properties required */
7557 PROP_no_crit_edges, /* properties_provided */
7558 0, /* properties_destroyed */
7559 0, /* todo_flags_start */
7560 TODO_verify_flow /* todo_flags_finish */
7561 }
7562 };
7563
7564
7565 /* Build a ternary operation and gimplify it. Emit code before GSI.
7566 Return the gimple_val holding the result. */
7567
7568 tree
7569 gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
7570 tree type, tree a, tree b, tree c)
7571 {
7572 tree ret;
7573 location_t loc = gimple_location (gsi_stmt (*gsi));
7574
7575 ret = fold_build3_loc (loc, code, type, a, b, c);
7576 STRIP_NOPS (ret);
7577
7578 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
7579 GSI_SAME_STMT);
7580 }
7581
7582 /* Build a binary operation and gimplify it. Emit code before GSI.
7583 Return the gimple_val holding the result. */
7584
7585 tree
7586 gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
7587 tree type, tree a, tree b)
7588 {
7589 tree ret;
7590
7591 ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
7592 STRIP_NOPS (ret);
7593
7594 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
7595 GSI_SAME_STMT);
7596 }
7597
7598 /* Build a unary operation and gimplify it. Emit code before GSI.
7599 Return the gimple_val holding the result. */
7600
7601 tree
7602 gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
7603 tree a)
7604 {
7605 tree ret;
7606
7607 ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
7608 STRIP_NOPS (ret);
7609
7610 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
7611 GSI_SAME_STMT);
7612 }
7613
7614
7615 \f
7616 /* Emit return warnings. */
7617
7618 static unsigned int
7619 execute_warn_function_return (void)
7620 {
7621 source_location location;
7622 gimple last;
7623 edge e;
7624 edge_iterator ei;
7625
7626 /* If we have a path to EXIT, then we do return. */
7627 if (TREE_THIS_VOLATILE (cfun->decl)
7628 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0)
7629 {
7630 location = UNKNOWN_LOCATION;
7631 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
7632 {
7633 last = last_stmt (e->src);
7634 if ((gimple_code (last) == GIMPLE_RETURN
7635 || gimple_call_builtin_p (last, BUILT_IN_RETURN))
7636 && (location = gimple_location (last)) != UNKNOWN_LOCATION)
7637 break;
7638 }
7639 if (location == UNKNOWN_LOCATION)
7640 location = cfun->function_end_locus;
7641 warning_at (location, 0, "%<noreturn%> function does return");
7642 }
7643
7644 /* If we see "return;" in some basic block, then we do reach the end
7645 without returning a value. */
7646 else if (warn_return_type
7647 && !TREE_NO_WARNING (cfun->decl)
7648 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0
7649 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (cfun->decl))))
7650 {
7651 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
7652 {
7653 gimple last = last_stmt (e->src);
7654 if (gimple_code (last) == GIMPLE_RETURN
7655 && gimple_return_retval (last) == NULL
7656 && !gimple_no_warning_p (last))
7657 {
7658 location = gimple_location (last);
7659 if (location == UNKNOWN_LOCATION)
7660 location = cfun->function_end_locus;
7661 warning_at (location, OPT_Wreturn_type, "control reaches end of non-void function");
7662 TREE_NO_WARNING (cfun->decl) = 1;
7663 break;
7664 }
7665 }
7666 }
7667 return 0;
7668 }
7669
7670
7671 /* Given a basic block B which ends with a conditional and has
7672 precisely two successors, determine which of the edges is taken if
7673 the conditional is true and which is taken if the conditional is
7674 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
7675
7676 void
7677 extract_true_false_edges_from_block (basic_block b,
7678 edge *true_edge,
7679 edge *false_edge)
7680 {
7681 edge e = EDGE_SUCC (b, 0);
7682
7683 if (e->flags & EDGE_TRUE_VALUE)
7684 {
7685 *true_edge = e;
7686 *false_edge = EDGE_SUCC (b, 1);
7687 }
7688 else
7689 {
7690 *false_edge = e;
7691 *true_edge = EDGE_SUCC (b, 1);
7692 }
7693 }
7694
7695 struct gimple_opt_pass pass_warn_function_return =
7696 {
7697 {
7698 GIMPLE_PASS,
7699 "*warn_function_return", /* name */
7700 NULL, /* gate */
7701 execute_warn_function_return, /* execute */
7702 NULL, /* sub */
7703 NULL, /* next */
7704 0, /* static_pass_number */
7705 TV_NONE, /* tv_id */
7706 PROP_cfg, /* properties_required */
7707 0, /* properties_provided */
7708 0, /* properties_destroyed */
7709 0, /* todo_flags_start */
7710 0 /* todo_flags_finish */
7711 }
7712 };
7713
7714 /* Emit noreturn warnings. */
7715
7716 static unsigned int
7717 execute_warn_function_noreturn (void)
7718 {
7719 if (!TREE_THIS_VOLATILE (current_function_decl)
7720 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) == 0)
7721 warn_function_noreturn (current_function_decl);
7722 return 0;
7723 }
7724
7725 static bool
7726 gate_warn_function_noreturn (void)
7727 {
7728 return warn_suggest_attribute_noreturn;
7729 }
7730
7731 struct gimple_opt_pass pass_warn_function_noreturn =
7732 {
7733 {
7734 GIMPLE_PASS,
7735 "*warn_function_noreturn", /* name */
7736 gate_warn_function_noreturn, /* gate */
7737 execute_warn_function_noreturn, /* execute */
7738 NULL, /* sub */
7739 NULL, /* next */
7740 0, /* static_pass_number */
7741 TV_NONE, /* tv_id */
7742 PROP_cfg, /* properties_required */
7743 0, /* properties_provided */
7744 0, /* properties_destroyed */
7745 0, /* todo_flags_start */
7746 0 /* todo_flags_finish */
7747 }
7748 };
7749
7750
7751 /* Walk a gimplified function and warn for functions whose return value is
7752 ignored and attribute((warn_unused_result)) is set. This is done before
7753 inlining, so we don't have to worry about that. */
7754
7755 static void
7756 do_warn_unused_result (gimple_seq seq)
7757 {
7758 tree fdecl, ftype;
7759 gimple_stmt_iterator i;
7760
7761 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
7762 {
7763 gimple g = gsi_stmt (i);
7764
7765 switch (gimple_code (g))
7766 {
7767 case GIMPLE_BIND:
7768 do_warn_unused_result (gimple_bind_body (g));
7769 break;
7770 case GIMPLE_TRY:
7771 do_warn_unused_result (gimple_try_eval (g));
7772 do_warn_unused_result (gimple_try_cleanup (g));
7773 break;
7774 case GIMPLE_CATCH:
7775 do_warn_unused_result (gimple_catch_handler (g));
7776 break;
7777 case GIMPLE_EH_FILTER:
7778 do_warn_unused_result (gimple_eh_filter_failure (g));
7779 break;
7780
7781 case GIMPLE_CALL:
7782 if (gimple_call_lhs (g))
7783 break;
7784 if (gimple_call_internal_p (g))
7785 break;
7786
7787 /* This is a naked call, as opposed to a GIMPLE_CALL with an
7788 LHS. All calls whose value is ignored should be
7789 represented like this. Look for the attribute. */
7790 fdecl = gimple_call_fndecl (g);
7791 ftype = gimple_call_fntype (g);
7792
7793 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
7794 {
7795 location_t loc = gimple_location (g);
7796
7797 if (fdecl)
7798 warning_at (loc, OPT_Wunused_result,
7799 "ignoring return value of %qD, "
7800 "declared with attribute warn_unused_result",
7801 fdecl);
7802 else
7803 warning_at (loc, OPT_Wunused_result,
7804 "ignoring return value of function "
7805 "declared with attribute warn_unused_result");
7806 }
7807 break;
7808
7809 default:
7810 /* Not a container, not a call, or a call whose value is used. */
7811 break;
7812 }
7813 }
7814 }
7815
7816 static unsigned int
7817 run_warn_unused_result (void)
7818 {
7819 do_warn_unused_result (gimple_body (current_function_decl));
7820 return 0;
7821 }
7822
7823 static bool
7824 gate_warn_unused_result (void)
7825 {
7826 return flag_warn_unused_result;
7827 }
7828
7829 struct gimple_opt_pass pass_warn_unused_result =
7830 {
7831 {
7832 GIMPLE_PASS,
7833 "*warn_unused_result", /* name */
7834 gate_warn_unused_result, /* gate */
7835 run_warn_unused_result, /* execute */
7836 NULL, /* sub */
7837 NULL, /* next */
7838 0, /* static_pass_number */
7839 TV_NONE, /* tv_id */
7840 PROP_gimple_any, /* properties_required */
7841 0, /* properties_provided */
7842 0, /* properties_destroyed */
7843 0, /* todo_flags_start */
7844 0, /* todo_flags_finish */
7845 }
7846 };