]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-cfg.c
coretypes.h (gimple_seq, [...]): Typedef as gimple.
[thirdparty/gcc.git] / gcc / tree-cfg.c
1 /* Control flow functions for trees.
2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
3 2010, 2011, 2012 Free Software Foundation, Inc.
4 Contributed by Diego Novillo <dnovillo@redhat.com>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
12
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "tm_p.h"
28 #include "basic-block.h"
29 #include "output.h"
30 #include "flags.h"
31 #include "function.h"
32 #include "ggc.h"
33 #include "langhooks.h"
34 #include "tree-pretty-print.h"
35 #include "gimple-pretty-print.h"
36 #include "tree-flow.h"
37 #include "timevar.h"
38 #include "tree-dump.h"
39 #include "tree-pass.h"
40 #include "diagnostic-core.h"
41 #include "except.h"
42 #include "cfgloop.h"
43 #include "cfglayout.h"
44 #include "tree-ssa-propagate.h"
45 #include "value-prof.h"
46 #include "pointer-set.h"
47 #include "tree-inline.h"
48
49 /* This file contains functions for building the Control Flow Graph (CFG)
50 for a function tree. */
51
52 /* Local declarations. */
53
54 /* Initial capacity for the basic block array. */
55 static const int initial_cfg_capacity = 20;
56
57 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
58 which use a particular edge. The CASE_LABEL_EXPRs are chained together
59 via their CASE_CHAIN field, which we clear after we're done with the
60 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
61
62 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
63 update the case vector in response to edge redirections.
64
65 Right now this table is set up and torn down at key points in the
66 compilation process. It would be nice if we could make the table
67 more persistent. The key is getting notification of changes to
68 the CFG (particularly edge removal, creation and redirection). */
69
70 static struct pointer_map_t *edge_to_cases;
71
72 /* If we record edge_to_cases, this bitmap will hold indexes
73 of basic blocks that end in a GIMPLE_SWITCH which we touched
74 due to edge manipulations. */
75
76 static bitmap touched_switch_bbs;
77
78 /* CFG statistics. */
79 struct cfg_stats_d
80 {
81 long num_merged_labels;
82 };
83
84 static struct cfg_stats_d cfg_stats;
85
86 /* Nonzero if we found a computed goto while building basic blocks. */
87 static bool found_computed_goto;
88
89 /* Hash table to store last discriminator assigned for each locus. */
90 struct locus_discrim_map
91 {
92 location_t locus;
93 int discriminator;
94 };
95 static htab_t discriminator_per_locus;
96
97 /* Basic blocks and flowgraphs. */
98 static void make_blocks (gimple_seq);
99 static void factor_computed_gotos (void);
100
101 /* Edges. */
102 static void make_edges (void);
103 static void make_cond_expr_edges (basic_block);
104 static void make_gimple_switch_edges (basic_block);
105 static void make_goto_expr_edges (basic_block);
106 static void make_gimple_asm_edges (basic_block);
107 static unsigned int locus_map_hash (const void *);
108 static int locus_map_eq (const void *, const void *);
109 static void assign_discriminator (location_t, basic_block);
110 static edge gimple_redirect_edge_and_branch (edge, basic_block);
111 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
112 static unsigned int split_critical_edges (void);
113
114 /* Various helpers. */
115 static inline bool stmt_starts_bb_p (gimple, gimple);
116 static int gimple_verify_flow_info (void);
117 static void gimple_make_forwarder_block (edge);
118 static void gimple_cfg2vcg (FILE *);
119 static gimple first_non_label_stmt (basic_block);
120 static bool verify_gimple_transaction (gimple);
121
122 /* Flowgraph optimization and cleanup. */
123 static void gimple_merge_blocks (basic_block, basic_block);
124 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
125 static void remove_bb (basic_block);
126 static edge find_taken_edge_computed_goto (basic_block, tree);
127 static edge find_taken_edge_cond_expr (basic_block, tree);
128 static edge find_taken_edge_switch_expr (basic_block, tree);
129 static tree find_case_label_for_value (gimple, tree);
130 static void group_case_labels_stmt (gimple);
131
132 void
133 init_empty_tree_cfg_for_function (struct function *fn)
134 {
135 /* Initialize the basic block array. */
136 init_flow (fn);
137 profile_status_for_function (fn) = PROFILE_ABSENT;
138 n_basic_blocks_for_function (fn) = NUM_FIXED_BLOCKS;
139 last_basic_block_for_function (fn) = NUM_FIXED_BLOCKS;
140 basic_block_info_for_function (fn)
141 = VEC_alloc (basic_block, gc, initial_cfg_capacity);
142 VEC_safe_grow_cleared (basic_block, gc,
143 basic_block_info_for_function (fn),
144 initial_cfg_capacity);
145
146 /* Build a mapping of labels to their associated blocks. */
147 label_to_block_map_for_function (fn)
148 = VEC_alloc (basic_block, gc, initial_cfg_capacity);
149 VEC_safe_grow_cleared (basic_block, gc,
150 label_to_block_map_for_function (fn),
151 initial_cfg_capacity);
152
153 SET_BASIC_BLOCK_FOR_FUNCTION (fn, ENTRY_BLOCK,
154 ENTRY_BLOCK_PTR_FOR_FUNCTION (fn));
155 SET_BASIC_BLOCK_FOR_FUNCTION (fn, EXIT_BLOCK,
156 EXIT_BLOCK_PTR_FOR_FUNCTION (fn));
157
158 ENTRY_BLOCK_PTR_FOR_FUNCTION (fn)->next_bb
159 = EXIT_BLOCK_PTR_FOR_FUNCTION (fn);
160 EXIT_BLOCK_PTR_FOR_FUNCTION (fn)->prev_bb
161 = ENTRY_BLOCK_PTR_FOR_FUNCTION (fn);
162 }
163
164 void
165 init_empty_tree_cfg (void)
166 {
167 init_empty_tree_cfg_for_function (cfun);
168 }
169
170 /*---------------------------------------------------------------------------
171 Create basic blocks
172 ---------------------------------------------------------------------------*/
173
174 /* Entry point to the CFG builder for trees. SEQ is the sequence of
175 statements to be added to the flowgraph. */
176
177 static void
178 build_gimple_cfg (gimple_seq seq)
179 {
180 /* Register specific gimple functions. */
181 gimple_register_cfg_hooks ();
182
183 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
184
185 init_empty_tree_cfg ();
186
187 found_computed_goto = 0;
188 make_blocks (seq);
189
190 /* Computed gotos are hell to deal with, especially if there are
191 lots of them with a large number of destinations. So we factor
192 them to a common computed goto location before we build the
193 edge list. After we convert back to normal form, we will un-factor
194 the computed gotos since factoring introduces an unwanted jump. */
195 if (found_computed_goto)
196 factor_computed_gotos ();
197
198 /* Make sure there is always at least one block, even if it's empty. */
199 if (n_basic_blocks == NUM_FIXED_BLOCKS)
200 create_empty_bb (ENTRY_BLOCK_PTR);
201
202 /* Adjust the size of the array. */
203 if (VEC_length (basic_block, basic_block_info) < (size_t) n_basic_blocks)
204 VEC_safe_grow_cleared (basic_block, gc, basic_block_info, n_basic_blocks);
205
206 /* To speed up statement iterator walks, we first purge dead labels. */
207 cleanup_dead_labels ();
208
209 /* Group case nodes to reduce the number of edges.
210 We do this after cleaning up dead labels because otherwise we miss
211 a lot of obvious case merging opportunities. */
212 group_case_labels ();
213
214 /* Create the edges of the flowgraph. */
215 discriminator_per_locus = htab_create (13, locus_map_hash, locus_map_eq,
216 free);
217 make_edges ();
218 cleanup_dead_labels ();
219 htab_delete (discriminator_per_locus);
220
221 /* Debugging dumps. */
222
223 /* Write the flowgraph to a VCG file. */
224 {
225 int local_dump_flags;
226 FILE *vcg_file = dump_begin (TDI_vcg, &local_dump_flags);
227 if (vcg_file)
228 {
229 gimple_cfg2vcg (vcg_file);
230 dump_end (TDI_vcg, vcg_file);
231 }
232 }
233 }
234
235 static unsigned int
236 execute_build_cfg (void)
237 {
238 gimple_seq body = gimple_body (current_function_decl);
239
240 build_gimple_cfg (body);
241 gimple_set_body (current_function_decl, NULL);
242 if (dump_file && (dump_flags & TDF_DETAILS))
243 {
244 fprintf (dump_file, "Scope blocks:\n");
245 dump_scope_blocks (dump_file, dump_flags);
246 }
247 return 0;
248 }
249
250 struct gimple_opt_pass pass_build_cfg =
251 {
252 {
253 GIMPLE_PASS,
254 "cfg", /* name */
255 NULL, /* gate */
256 execute_build_cfg, /* execute */
257 NULL, /* sub */
258 NULL, /* next */
259 0, /* static_pass_number */
260 TV_TREE_CFG, /* tv_id */
261 PROP_gimple_leh, /* properties_required */
262 PROP_cfg, /* properties_provided */
263 0, /* properties_destroyed */
264 0, /* todo_flags_start */
265 TODO_verify_stmts | TODO_cleanup_cfg /* todo_flags_finish */
266 }
267 };
268
269
270 /* Return true if T is a computed goto. */
271
272 static bool
273 computed_goto_p (gimple t)
274 {
275 return (gimple_code (t) == GIMPLE_GOTO
276 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
277 }
278
279
280 /* Search the CFG for any computed gotos. If found, factor them to a
281 common computed goto site. Also record the location of that site so
282 that we can un-factor the gotos after we have converted back to
283 normal form. */
284
285 static void
286 factor_computed_gotos (void)
287 {
288 basic_block bb;
289 tree factored_label_decl = NULL;
290 tree var = NULL;
291 gimple factored_computed_goto_label = NULL;
292 gimple factored_computed_goto = NULL;
293
294 /* We know there are one or more computed gotos in this function.
295 Examine the last statement in each basic block to see if the block
296 ends with a computed goto. */
297
298 FOR_EACH_BB (bb)
299 {
300 gimple_stmt_iterator gsi = gsi_last_bb (bb);
301 gimple last;
302
303 if (gsi_end_p (gsi))
304 continue;
305
306 last = gsi_stmt (gsi);
307
308 /* Ignore the computed goto we create when we factor the original
309 computed gotos. */
310 if (last == factored_computed_goto)
311 continue;
312
313 /* If the last statement is a computed goto, factor it. */
314 if (computed_goto_p (last))
315 {
316 gimple assignment;
317
318 /* The first time we find a computed goto we need to create
319 the factored goto block and the variable each original
320 computed goto will use for their goto destination. */
321 if (!factored_computed_goto)
322 {
323 basic_block new_bb = create_empty_bb (bb);
324 gimple_stmt_iterator new_gsi = gsi_start_bb (new_bb);
325
326 /* Create the destination of the factored goto. Each original
327 computed goto will put its desired destination into this
328 variable and jump to the label we create immediately
329 below. */
330 var = create_tmp_var (ptr_type_node, "gotovar");
331
332 /* Build a label for the new block which will contain the
333 factored computed goto. */
334 factored_label_decl = create_artificial_label (UNKNOWN_LOCATION);
335 factored_computed_goto_label
336 = gimple_build_label (factored_label_decl);
337 gsi_insert_after (&new_gsi, factored_computed_goto_label,
338 GSI_NEW_STMT);
339
340 /* Build our new computed goto. */
341 factored_computed_goto = gimple_build_goto (var);
342 gsi_insert_after (&new_gsi, factored_computed_goto, GSI_NEW_STMT);
343 }
344
345 /* Copy the original computed goto's destination into VAR. */
346 assignment = gimple_build_assign (var, gimple_goto_dest (last));
347 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
348
349 /* And re-vector the computed goto to the new destination. */
350 gimple_goto_set_dest (last, factored_label_decl);
351 }
352 }
353 }
354
355
356 /* Build a flowgraph for the sequence of stmts SEQ. */
357
358 static void
359 make_blocks (gimple_seq seq)
360 {
361 gimple_stmt_iterator i = gsi_start (seq);
362 gimple stmt = NULL;
363 bool start_new_block = true;
364 bool first_stmt_of_seq = true;
365 basic_block bb = ENTRY_BLOCK_PTR;
366
367 while (!gsi_end_p (i))
368 {
369 gimple prev_stmt;
370
371 prev_stmt = stmt;
372 stmt = gsi_stmt (i);
373
374 /* If the statement starts a new basic block or if we have determined
375 in a previous pass that we need to create a new block for STMT, do
376 so now. */
377 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
378 {
379 if (!first_stmt_of_seq)
380 gsi_split_seq_before (&i, &seq);
381 bb = create_basic_block (seq, NULL, bb);
382 start_new_block = false;
383 }
384
385 /* Now add STMT to BB and create the subgraphs for special statement
386 codes. */
387 gimple_set_bb (stmt, bb);
388
389 if (computed_goto_p (stmt))
390 found_computed_goto = true;
391
392 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
393 next iteration. */
394 if (stmt_ends_bb_p (stmt))
395 {
396 /* If the stmt can make abnormal goto use a new temporary
397 for the assignment to the LHS. This makes sure the old value
398 of the LHS is available on the abnormal edge. Otherwise
399 we will end up with overlapping life-ranges for abnormal
400 SSA names. */
401 if (gimple_has_lhs (stmt)
402 && stmt_can_make_abnormal_goto (stmt)
403 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
404 {
405 tree lhs = gimple_get_lhs (stmt);
406 tree tmp = create_tmp_var (TREE_TYPE (lhs), NULL);
407 gimple s = gimple_build_assign (lhs, tmp);
408 gimple_set_location (s, gimple_location (stmt));
409 gimple_set_block (s, gimple_block (stmt));
410 gimple_set_lhs (stmt, tmp);
411 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
412 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
413 DECL_GIMPLE_REG_P (tmp) = 1;
414 gsi_insert_after (&i, s, GSI_SAME_STMT);
415 }
416 start_new_block = true;
417 }
418
419 gsi_next (&i);
420 first_stmt_of_seq = false;
421 }
422 }
423
424
425 /* Create and return a new empty basic block after bb AFTER. */
426
427 static basic_block
428 create_bb (void *h, void *e, basic_block after)
429 {
430 basic_block bb;
431
432 gcc_assert (!e);
433
434 /* Create and initialize a new basic block. Since alloc_block uses
435 GC allocation that clears memory to allocate a basic block, we do
436 not have to clear the newly allocated basic block here. */
437 bb = alloc_block ();
438
439 bb->index = last_basic_block;
440 bb->flags = BB_NEW;
441 bb->il.gimple = ggc_alloc_cleared_gimple_bb_info ();
442 set_bb_seq (bb, h ? (gimple_seq) h : NULL);
443
444 /* Add the new block to the linked list of blocks. */
445 link_block (bb, after);
446
447 /* Grow the basic block array if needed. */
448 if ((size_t) last_basic_block == VEC_length (basic_block, basic_block_info))
449 {
450 size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
451 VEC_safe_grow_cleared (basic_block, gc, basic_block_info, new_size);
452 }
453
454 /* Add the newly created block to the array. */
455 SET_BASIC_BLOCK (last_basic_block, bb);
456
457 n_basic_blocks++;
458 last_basic_block++;
459
460 return bb;
461 }
462
463
464 /*---------------------------------------------------------------------------
465 Edge creation
466 ---------------------------------------------------------------------------*/
467
468 /* Fold COND_EXPR_COND of each COND_EXPR. */
469
470 void
471 fold_cond_expr_cond (void)
472 {
473 basic_block bb;
474
475 FOR_EACH_BB (bb)
476 {
477 gimple stmt = last_stmt (bb);
478
479 if (stmt && gimple_code (stmt) == GIMPLE_COND)
480 {
481 location_t loc = gimple_location (stmt);
482 tree cond;
483 bool zerop, onep;
484
485 fold_defer_overflow_warnings ();
486 cond = fold_binary_loc (loc, gimple_cond_code (stmt), boolean_type_node,
487 gimple_cond_lhs (stmt), gimple_cond_rhs (stmt));
488 if (cond)
489 {
490 zerop = integer_zerop (cond);
491 onep = integer_onep (cond);
492 }
493 else
494 zerop = onep = false;
495
496 fold_undefer_overflow_warnings (zerop || onep,
497 stmt,
498 WARN_STRICT_OVERFLOW_CONDITIONAL);
499 if (zerop)
500 gimple_cond_make_false (stmt);
501 else if (onep)
502 gimple_cond_make_true (stmt);
503 }
504 }
505 }
506
507 /* Join all the blocks in the flowgraph. */
508
509 static void
510 make_edges (void)
511 {
512 basic_block bb;
513 struct omp_region *cur_region = NULL;
514
515 /* Create an edge from entry to the first block with executable
516 statements in it. */
517 make_edge (ENTRY_BLOCK_PTR, BASIC_BLOCK (NUM_FIXED_BLOCKS), EDGE_FALLTHRU);
518
519 /* Traverse the basic block array placing edges. */
520 FOR_EACH_BB (bb)
521 {
522 gimple last = last_stmt (bb);
523 bool fallthru;
524
525 if (last)
526 {
527 enum gimple_code code = gimple_code (last);
528 switch (code)
529 {
530 case GIMPLE_GOTO:
531 make_goto_expr_edges (bb);
532 fallthru = false;
533 break;
534 case GIMPLE_RETURN:
535 make_edge (bb, EXIT_BLOCK_PTR, 0);
536 fallthru = false;
537 break;
538 case GIMPLE_COND:
539 make_cond_expr_edges (bb);
540 fallthru = false;
541 break;
542 case GIMPLE_SWITCH:
543 make_gimple_switch_edges (bb);
544 fallthru = false;
545 break;
546 case GIMPLE_RESX:
547 make_eh_edges (last);
548 fallthru = false;
549 break;
550 case GIMPLE_EH_DISPATCH:
551 fallthru = make_eh_dispatch_edges (last);
552 break;
553
554 case GIMPLE_CALL:
555 /* If this function receives a nonlocal goto, then we need to
556 make edges from this call site to all the nonlocal goto
557 handlers. */
558 if (stmt_can_make_abnormal_goto (last))
559 make_abnormal_goto_edges (bb, true);
560
561 /* If this statement has reachable exception handlers, then
562 create abnormal edges to them. */
563 make_eh_edges (last);
564
565 /* BUILTIN_RETURN is really a return statement. */
566 if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
567 make_edge (bb, EXIT_BLOCK_PTR, 0), fallthru = false;
568 /* Some calls are known not to return. */
569 else
570 fallthru = !(gimple_call_flags (last) & ECF_NORETURN);
571 break;
572
573 case GIMPLE_ASSIGN:
574 /* A GIMPLE_ASSIGN may throw internally and thus be considered
575 control-altering. */
576 if (is_ctrl_altering_stmt (last))
577 make_eh_edges (last);
578 fallthru = true;
579 break;
580
581 case GIMPLE_ASM:
582 make_gimple_asm_edges (bb);
583 fallthru = true;
584 break;
585
586 case GIMPLE_OMP_PARALLEL:
587 case GIMPLE_OMP_TASK:
588 case GIMPLE_OMP_FOR:
589 case GIMPLE_OMP_SINGLE:
590 case GIMPLE_OMP_MASTER:
591 case GIMPLE_OMP_ORDERED:
592 case GIMPLE_OMP_CRITICAL:
593 case GIMPLE_OMP_SECTION:
594 cur_region = new_omp_region (bb, code, cur_region);
595 fallthru = true;
596 break;
597
598 case GIMPLE_OMP_SECTIONS:
599 cur_region = new_omp_region (bb, code, cur_region);
600 fallthru = true;
601 break;
602
603 case GIMPLE_OMP_SECTIONS_SWITCH:
604 fallthru = false;
605 break;
606
607 case GIMPLE_OMP_ATOMIC_LOAD:
608 case GIMPLE_OMP_ATOMIC_STORE:
609 fallthru = true;
610 break;
611
612 case GIMPLE_OMP_RETURN:
613 /* In the case of a GIMPLE_OMP_SECTION, the edge will go
614 somewhere other than the next block. This will be
615 created later. */
616 cur_region->exit = bb;
617 fallthru = cur_region->type != GIMPLE_OMP_SECTION;
618 cur_region = cur_region->outer;
619 break;
620
621 case GIMPLE_OMP_CONTINUE:
622 cur_region->cont = bb;
623 switch (cur_region->type)
624 {
625 case GIMPLE_OMP_FOR:
626 /* Mark all GIMPLE_OMP_FOR and GIMPLE_OMP_CONTINUE
627 succs edges as abnormal to prevent splitting
628 them. */
629 single_succ_edge (cur_region->entry)->flags |= EDGE_ABNORMAL;
630 /* Make the loopback edge. */
631 make_edge (bb, single_succ (cur_region->entry),
632 EDGE_ABNORMAL);
633
634 /* Create an edge from GIMPLE_OMP_FOR to exit, which
635 corresponds to the case that the body of the loop
636 is not executed at all. */
637 make_edge (cur_region->entry, bb->next_bb, EDGE_ABNORMAL);
638 make_edge (bb, bb->next_bb, EDGE_FALLTHRU | EDGE_ABNORMAL);
639 fallthru = false;
640 break;
641
642 case GIMPLE_OMP_SECTIONS:
643 /* Wire up the edges into and out of the nested sections. */
644 {
645 basic_block switch_bb = single_succ (cur_region->entry);
646
647 struct omp_region *i;
648 for (i = cur_region->inner; i ; i = i->next)
649 {
650 gcc_assert (i->type == GIMPLE_OMP_SECTION);
651 make_edge (switch_bb, i->entry, 0);
652 make_edge (i->exit, bb, EDGE_FALLTHRU);
653 }
654
655 /* Make the loopback edge to the block with
656 GIMPLE_OMP_SECTIONS_SWITCH. */
657 make_edge (bb, switch_bb, 0);
658
659 /* Make the edge from the switch to exit. */
660 make_edge (switch_bb, bb->next_bb, 0);
661 fallthru = false;
662 }
663 break;
664
665 default:
666 gcc_unreachable ();
667 }
668 break;
669
670 case GIMPLE_TRANSACTION:
671 {
672 tree abort_label = gimple_transaction_label (last);
673 if (abort_label)
674 make_edge (bb, label_to_block (abort_label), 0);
675 fallthru = true;
676 }
677 break;
678
679 default:
680 gcc_assert (!stmt_ends_bb_p (last));
681 fallthru = true;
682 }
683 }
684 else
685 fallthru = true;
686
687 if (fallthru)
688 {
689 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
690 if (last)
691 assign_discriminator (gimple_location (last), bb->next_bb);
692 }
693 }
694
695 if (root_omp_region)
696 free_omp_regions ();
697
698 /* Fold COND_EXPR_COND of each COND_EXPR. */
699 fold_cond_expr_cond ();
700 }
701
702 /* Trivial hash function for a location_t. ITEM is a pointer to
703 a hash table entry that maps a location_t to a discriminator. */
704
705 static unsigned int
706 locus_map_hash (const void *item)
707 {
708 return ((const struct locus_discrim_map *) item)->locus;
709 }
710
711 /* Equality function for the locus-to-discriminator map. VA and VB
712 point to the two hash table entries to compare. */
713
714 static int
715 locus_map_eq (const void *va, const void *vb)
716 {
717 const struct locus_discrim_map *a = (const struct locus_discrim_map *) va;
718 const struct locus_discrim_map *b = (const struct locus_discrim_map *) vb;
719 return a->locus == b->locus;
720 }
721
722 /* Find the next available discriminator value for LOCUS. The
723 discriminator distinguishes among several basic blocks that
724 share a common locus, allowing for more accurate sample-based
725 profiling. */
726
727 static int
728 next_discriminator_for_locus (location_t locus)
729 {
730 struct locus_discrim_map item;
731 struct locus_discrim_map **slot;
732
733 item.locus = locus;
734 item.discriminator = 0;
735 slot = (struct locus_discrim_map **)
736 htab_find_slot_with_hash (discriminator_per_locus, (void *) &item,
737 (hashval_t) locus, INSERT);
738 gcc_assert (slot);
739 if (*slot == HTAB_EMPTY_ENTRY)
740 {
741 *slot = XNEW (struct locus_discrim_map);
742 gcc_assert (*slot);
743 (*slot)->locus = locus;
744 (*slot)->discriminator = 0;
745 }
746 (*slot)->discriminator++;
747 return (*slot)->discriminator;
748 }
749
750 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
751
752 static bool
753 same_line_p (location_t locus1, location_t locus2)
754 {
755 expanded_location from, to;
756
757 if (locus1 == locus2)
758 return true;
759
760 from = expand_location (locus1);
761 to = expand_location (locus2);
762
763 if (from.line != to.line)
764 return false;
765 if (from.file == to.file)
766 return true;
767 return (from.file != NULL
768 && to.file != NULL
769 && filename_cmp (from.file, to.file) == 0);
770 }
771
772 /* Assign a unique discriminator value to block BB if it begins at the same
773 LOCUS as its predecessor block. */
774
775 static void
776 assign_discriminator (location_t locus, basic_block bb)
777 {
778 gimple first_in_to_bb, last_in_to_bb;
779
780 if (locus == 0 || bb->discriminator != 0)
781 return;
782
783 first_in_to_bb = first_non_label_stmt (bb);
784 last_in_to_bb = last_stmt (bb);
785 if ((first_in_to_bb && same_line_p (locus, gimple_location (first_in_to_bb)))
786 || (last_in_to_bb && same_line_p (locus, gimple_location (last_in_to_bb))))
787 bb->discriminator = next_discriminator_for_locus (locus);
788 }
789
790 /* Create the edges for a GIMPLE_COND starting at block BB. */
791
792 static void
793 make_cond_expr_edges (basic_block bb)
794 {
795 gimple entry = last_stmt (bb);
796 gimple then_stmt, else_stmt;
797 basic_block then_bb, else_bb;
798 tree then_label, else_label;
799 edge e;
800 location_t entry_locus;
801
802 gcc_assert (entry);
803 gcc_assert (gimple_code (entry) == GIMPLE_COND);
804
805 entry_locus = gimple_location (entry);
806
807 /* Entry basic blocks for each component. */
808 then_label = gimple_cond_true_label (entry);
809 else_label = gimple_cond_false_label (entry);
810 then_bb = label_to_block (then_label);
811 else_bb = label_to_block (else_label);
812 then_stmt = first_stmt (then_bb);
813 else_stmt = first_stmt (else_bb);
814
815 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
816 assign_discriminator (entry_locus, then_bb);
817 e->goto_locus = gimple_location (then_stmt);
818 if (e->goto_locus)
819 e->goto_block = gimple_block (then_stmt);
820 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
821 if (e)
822 {
823 assign_discriminator (entry_locus, else_bb);
824 e->goto_locus = gimple_location (else_stmt);
825 if (e->goto_locus)
826 e->goto_block = gimple_block (else_stmt);
827 }
828
829 /* We do not need the labels anymore. */
830 gimple_cond_set_true_label (entry, NULL_TREE);
831 gimple_cond_set_false_label (entry, NULL_TREE);
832 }
833
834
835 /* Called for each element in the hash table (P) as we delete the
836 edge to cases hash table.
837
838 Clear all the TREE_CHAINs to prevent problems with copying of
839 SWITCH_EXPRs and structure sharing rules, then free the hash table
840 element. */
841
842 static bool
843 edge_to_cases_cleanup (const void *key ATTRIBUTE_UNUSED, void **value,
844 void *data ATTRIBUTE_UNUSED)
845 {
846 tree t, next;
847
848 for (t = (tree) *value; t; t = next)
849 {
850 next = CASE_CHAIN (t);
851 CASE_CHAIN (t) = NULL;
852 }
853
854 *value = NULL;
855 return true;
856 }
857
858 /* Start recording information mapping edges to case labels. */
859
860 void
861 start_recording_case_labels (void)
862 {
863 gcc_assert (edge_to_cases == NULL);
864 edge_to_cases = pointer_map_create ();
865 touched_switch_bbs = BITMAP_ALLOC (NULL);
866 }
867
868 /* Return nonzero if we are recording information for case labels. */
869
870 static bool
871 recording_case_labels_p (void)
872 {
873 return (edge_to_cases != NULL);
874 }
875
876 /* Stop recording information mapping edges to case labels and
877 remove any information we have recorded. */
878 void
879 end_recording_case_labels (void)
880 {
881 bitmap_iterator bi;
882 unsigned i;
883 pointer_map_traverse (edge_to_cases, edge_to_cases_cleanup, NULL);
884 pointer_map_destroy (edge_to_cases);
885 edge_to_cases = NULL;
886 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
887 {
888 basic_block bb = BASIC_BLOCK (i);
889 if (bb)
890 {
891 gimple stmt = last_stmt (bb);
892 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
893 group_case_labels_stmt (stmt);
894 }
895 }
896 BITMAP_FREE (touched_switch_bbs);
897 }
898
899 /* If we are inside a {start,end}_recording_cases block, then return
900 a chain of CASE_LABEL_EXPRs from T which reference E.
901
902 Otherwise return NULL. */
903
904 static tree
905 get_cases_for_edge (edge e, gimple t)
906 {
907 void **slot;
908 size_t i, n;
909
910 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
911 chains available. Return NULL so the caller can detect this case. */
912 if (!recording_case_labels_p ())
913 return NULL;
914
915 slot = pointer_map_contains (edge_to_cases, e);
916 if (slot)
917 return (tree) *slot;
918
919 /* If we did not find E in the hash table, then this must be the first
920 time we have been queried for information about E & T. Add all the
921 elements from T to the hash table then perform the query again. */
922
923 n = gimple_switch_num_labels (t);
924 for (i = 0; i < n; i++)
925 {
926 tree elt = gimple_switch_label (t, i);
927 tree lab = CASE_LABEL (elt);
928 basic_block label_bb = label_to_block (lab);
929 edge this_edge = find_edge (e->src, label_bb);
930
931 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
932 a new chain. */
933 slot = pointer_map_insert (edge_to_cases, this_edge);
934 CASE_CHAIN (elt) = (tree) *slot;
935 *slot = elt;
936 }
937
938 return (tree) *pointer_map_contains (edge_to_cases, e);
939 }
940
941 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
942
943 static void
944 make_gimple_switch_edges (basic_block bb)
945 {
946 gimple entry = last_stmt (bb);
947 location_t entry_locus;
948 size_t i, n;
949
950 entry_locus = gimple_location (entry);
951
952 n = gimple_switch_num_labels (entry);
953
954 for (i = 0; i < n; ++i)
955 {
956 tree lab = CASE_LABEL (gimple_switch_label (entry, i));
957 basic_block label_bb = label_to_block (lab);
958 make_edge (bb, label_bb, 0);
959 assign_discriminator (entry_locus, label_bb);
960 }
961 }
962
963
964 /* Return the basic block holding label DEST. */
965
966 basic_block
967 label_to_block_fn (struct function *ifun, tree dest)
968 {
969 int uid = LABEL_DECL_UID (dest);
970
971 /* We would die hard when faced by an undefined label. Emit a label to
972 the very first basic block. This will hopefully make even the dataflow
973 and undefined variable warnings quite right. */
974 if (seen_error () && uid < 0)
975 {
976 gimple_stmt_iterator gsi = gsi_start_bb (BASIC_BLOCK (NUM_FIXED_BLOCKS));
977 gimple stmt;
978
979 stmt = gimple_build_label (dest);
980 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
981 uid = LABEL_DECL_UID (dest);
982 }
983 if (VEC_length (basic_block, ifun->cfg->x_label_to_block_map)
984 <= (unsigned int) uid)
985 return NULL;
986 return VEC_index (basic_block, ifun->cfg->x_label_to_block_map, uid);
987 }
988
989 /* Create edges for an abnormal goto statement at block BB. If FOR_CALL
990 is true, the source statement is a CALL_EXPR instead of a GOTO_EXPR. */
991
992 void
993 make_abnormal_goto_edges (basic_block bb, bool for_call)
994 {
995 basic_block target_bb;
996 gimple_stmt_iterator gsi;
997
998 FOR_EACH_BB (target_bb)
999 for (gsi = gsi_start_bb (target_bb); !gsi_end_p (gsi); gsi_next (&gsi))
1000 {
1001 gimple label_stmt = gsi_stmt (gsi);
1002 tree target;
1003
1004 if (gimple_code (label_stmt) != GIMPLE_LABEL)
1005 break;
1006
1007 target = gimple_label_label (label_stmt);
1008
1009 /* Make an edge to every label block that has been marked as a
1010 potential target for a computed goto or a non-local goto. */
1011 if ((FORCED_LABEL (target) && !for_call)
1012 || (DECL_NONLOCAL (target) && for_call))
1013 {
1014 make_edge (bb, target_bb, EDGE_ABNORMAL);
1015 break;
1016 }
1017 }
1018 }
1019
1020 /* Create edges for a goto statement at block BB. */
1021
1022 static void
1023 make_goto_expr_edges (basic_block bb)
1024 {
1025 gimple_stmt_iterator last = gsi_last_bb (bb);
1026 gimple goto_t = gsi_stmt (last);
1027
1028 /* A simple GOTO creates normal edges. */
1029 if (simple_goto_p (goto_t))
1030 {
1031 tree dest = gimple_goto_dest (goto_t);
1032 basic_block label_bb = label_to_block (dest);
1033 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1034 e->goto_locus = gimple_location (goto_t);
1035 assign_discriminator (e->goto_locus, label_bb);
1036 if (e->goto_locus)
1037 e->goto_block = gimple_block (goto_t);
1038 gsi_remove (&last, true);
1039 return;
1040 }
1041
1042 /* A computed GOTO creates abnormal edges. */
1043 make_abnormal_goto_edges (bb, false);
1044 }
1045
1046 /* Create edges for an asm statement with labels at block BB. */
1047
1048 static void
1049 make_gimple_asm_edges (basic_block bb)
1050 {
1051 gimple stmt = last_stmt (bb);
1052 location_t stmt_loc = gimple_location (stmt);
1053 int i, n = gimple_asm_nlabels (stmt);
1054
1055 for (i = 0; i < n; ++i)
1056 {
1057 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1058 basic_block label_bb = label_to_block (label);
1059 make_edge (bb, label_bb, 0);
1060 assign_discriminator (stmt_loc, label_bb);
1061 }
1062 }
1063
1064 /*---------------------------------------------------------------------------
1065 Flowgraph analysis
1066 ---------------------------------------------------------------------------*/
1067
1068 /* Cleanup useless labels in basic blocks. This is something we wish
1069 to do early because it allows us to group case labels before creating
1070 the edges for the CFG, and it speeds up block statement iterators in
1071 all passes later on.
1072 We rerun this pass after CFG is created, to get rid of the labels that
1073 are no longer referenced. After then we do not run it any more, since
1074 (almost) no new labels should be created. */
1075
1076 /* A map from basic block index to the leading label of that block. */
1077 static struct label_record
1078 {
1079 /* The label. */
1080 tree label;
1081
1082 /* True if the label is referenced from somewhere. */
1083 bool used;
1084 } *label_for_bb;
1085
1086 /* Given LABEL return the first label in the same basic block. */
1087
1088 static tree
1089 main_block_label (tree label)
1090 {
1091 basic_block bb = label_to_block (label);
1092 tree main_label = label_for_bb[bb->index].label;
1093
1094 /* label_to_block possibly inserted undefined label into the chain. */
1095 if (!main_label)
1096 {
1097 label_for_bb[bb->index].label = label;
1098 main_label = label;
1099 }
1100
1101 label_for_bb[bb->index].used = true;
1102 return main_label;
1103 }
1104
1105 /* Clean up redundant labels within the exception tree. */
1106
1107 static void
1108 cleanup_dead_labels_eh (void)
1109 {
1110 eh_landing_pad lp;
1111 eh_region r;
1112 tree lab;
1113 int i;
1114
1115 if (cfun->eh == NULL)
1116 return;
1117
1118 for (i = 1; VEC_iterate (eh_landing_pad, cfun->eh->lp_array, i, lp); ++i)
1119 if (lp && lp->post_landing_pad)
1120 {
1121 lab = main_block_label (lp->post_landing_pad);
1122 if (lab != lp->post_landing_pad)
1123 {
1124 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1125 EH_LANDING_PAD_NR (lab) = lp->index;
1126 }
1127 }
1128
1129 FOR_ALL_EH_REGION (r)
1130 switch (r->type)
1131 {
1132 case ERT_CLEANUP:
1133 case ERT_MUST_NOT_THROW:
1134 break;
1135
1136 case ERT_TRY:
1137 {
1138 eh_catch c;
1139 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1140 {
1141 lab = c->label;
1142 if (lab)
1143 c->label = main_block_label (lab);
1144 }
1145 }
1146 break;
1147
1148 case ERT_ALLOWED_EXCEPTIONS:
1149 lab = r->u.allowed.label;
1150 if (lab)
1151 r->u.allowed.label = main_block_label (lab);
1152 break;
1153 }
1154 }
1155
1156
1157 /* Cleanup redundant labels. This is a three-step process:
1158 1) Find the leading label for each block.
1159 2) Redirect all references to labels to the leading labels.
1160 3) Cleanup all useless labels. */
1161
1162 void
1163 cleanup_dead_labels (void)
1164 {
1165 basic_block bb;
1166 label_for_bb = XCNEWVEC (struct label_record, last_basic_block);
1167
1168 /* Find a suitable label for each block. We use the first user-defined
1169 label if there is one, or otherwise just the first label we see. */
1170 FOR_EACH_BB (bb)
1171 {
1172 gimple_stmt_iterator i;
1173
1174 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1175 {
1176 tree label;
1177 gimple stmt = gsi_stmt (i);
1178
1179 if (gimple_code (stmt) != GIMPLE_LABEL)
1180 break;
1181
1182 label = gimple_label_label (stmt);
1183
1184 /* If we have not yet seen a label for the current block,
1185 remember this one and see if there are more labels. */
1186 if (!label_for_bb[bb->index].label)
1187 {
1188 label_for_bb[bb->index].label = label;
1189 continue;
1190 }
1191
1192 /* If we did see a label for the current block already, but it
1193 is an artificially created label, replace it if the current
1194 label is a user defined label. */
1195 if (!DECL_ARTIFICIAL (label)
1196 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1197 {
1198 label_for_bb[bb->index].label = label;
1199 break;
1200 }
1201 }
1202 }
1203
1204 /* Now redirect all jumps/branches to the selected label.
1205 First do so for each block ending in a control statement. */
1206 FOR_EACH_BB (bb)
1207 {
1208 gimple stmt = last_stmt (bb);
1209 tree label, new_label;
1210
1211 if (!stmt)
1212 continue;
1213
1214 switch (gimple_code (stmt))
1215 {
1216 case GIMPLE_COND:
1217 label = gimple_cond_true_label (stmt);
1218 if (label)
1219 {
1220 new_label = main_block_label (label);
1221 if (new_label != label)
1222 gimple_cond_set_true_label (stmt, new_label);
1223 }
1224
1225 label = gimple_cond_false_label (stmt);
1226 if (label)
1227 {
1228 new_label = main_block_label (label);
1229 if (new_label != label)
1230 gimple_cond_set_false_label (stmt, new_label);
1231 }
1232 break;
1233
1234 case GIMPLE_SWITCH:
1235 {
1236 size_t i, n = gimple_switch_num_labels (stmt);
1237
1238 /* Replace all destination labels. */
1239 for (i = 0; i < n; ++i)
1240 {
1241 tree case_label = gimple_switch_label (stmt, i);
1242 label = CASE_LABEL (case_label);
1243 new_label = main_block_label (label);
1244 if (new_label != label)
1245 CASE_LABEL (case_label) = new_label;
1246 }
1247 break;
1248 }
1249
1250 case GIMPLE_ASM:
1251 {
1252 int i, n = gimple_asm_nlabels (stmt);
1253
1254 for (i = 0; i < n; ++i)
1255 {
1256 tree cons = gimple_asm_label_op (stmt, i);
1257 tree label = main_block_label (TREE_VALUE (cons));
1258 TREE_VALUE (cons) = label;
1259 }
1260 break;
1261 }
1262
1263 /* We have to handle gotos until they're removed, and we don't
1264 remove them until after we've created the CFG edges. */
1265 case GIMPLE_GOTO:
1266 if (!computed_goto_p (stmt))
1267 {
1268 label = gimple_goto_dest (stmt);
1269 new_label = main_block_label (label);
1270 if (new_label != label)
1271 gimple_goto_set_dest (stmt, new_label);
1272 }
1273 break;
1274
1275 case GIMPLE_TRANSACTION:
1276 {
1277 tree label = gimple_transaction_label (stmt);
1278 if (label)
1279 {
1280 tree new_label = main_block_label (label);
1281 if (new_label != label)
1282 gimple_transaction_set_label (stmt, new_label);
1283 }
1284 }
1285 break;
1286
1287 default:
1288 break;
1289 }
1290 }
1291
1292 /* Do the same for the exception region tree labels. */
1293 cleanup_dead_labels_eh ();
1294
1295 /* Finally, purge dead labels. All user-defined labels and labels that
1296 can be the target of non-local gotos and labels which have their
1297 address taken are preserved. */
1298 FOR_EACH_BB (bb)
1299 {
1300 gimple_stmt_iterator i;
1301 tree label_for_this_bb = label_for_bb[bb->index].label;
1302
1303 if (!label_for_this_bb)
1304 continue;
1305
1306 /* If the main label of the block is unused, we may still remove it. */
1307 if (!label_for_bb[bb->index].used)
1308 label_for_this_bb = NULL;
1309
1310 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1311 {
1312 tree label;
1313 gimple stmt = gsi_stmt (i);
1314
1315 if (gimple_code (stmt) != GIMPLE_LABEL)
1316 break;
1317
1318 label = gimple_label_label (stmt);
1319
1320 if (label == label_for_this_bb
1321 || !DECL_ARTIFICIAL (label)
1322 || DECL_NONLOCAL (label)
1323 || FORCED_LABEL (label))
1324 gsi_next (&i);
1325 else
1326 gsi_remove (&i, true);
1327 }
1328 }
1329
1330 free (label_for_bb);
1331 }
1332
1333 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1334 the ones jumping to the same label.
1335 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1336
1337 static void
1338 group_case_labels_stmt (gimple stmt)
1339 {
1340 int old_size = gimple_switch_num_labels (stmt);
1341 int i, j, new_size = old_size;
1342 tree default_case = NULL_TREE;
1343 tree default_label = NULL_TREE;
1344 bool has_default;
1345
1346 /* The default label is always the first case in a switch
1347 statement after gimplification if it was not optimized
1348 away */
1349 if (!CASE_LOW (gimple_switch_default_label (stmt))
1350 && !CASE_HIGH (gimple_switch_default_label (stmt)))
1351 {
1352 default_case = gimple_switch_default_label (stmt);
1353 default_label = CASE_LABEL (default_case);
1354 has_default = true;
1355 }
1356 else
1357 has_default = false;
1358
1359 /* Look for possible opportunities to merge cases. */
1360 if (has_default)
1361 i = 1;
1362 else
1363 i = 0;
1364 while (i < old_size)
1365 {
1366 tree base_case, base_label, base_high;
1367 base_case = gimple_switch_label (stmt, i);
1368
1369 gcc_assert (base_case);
1370 base_label = CASE_LABEL (base_case);
1371
1372 /* Discard cases that have the same destination as the
1373 default case. */
1374 if (base_label == default_label)
1375 {
1376 gimple_switch_set_label (stmt, i, NULL_TREE);
1377 i++;
1378 new_size--;
1379 continue;
1380 }
1381
1382 base_high = CASE_HIGH (base_case)
1383 ? CASE_HIGH (base_case)
1384 : CASE_LOW (base_case);
1385 i++;
1386
1387 /* Try to merge case labels. Break out when we reach the end
1388 of the label vector or when we cannot merge the next case
1389 label with the current one. */
1390 while (i < old_size)
1391 {
1392 tree merge_case = gimple_switch_label (stmt, i);
1393 tree merge_label = CASE_LABEL (merge_case);
1394 double_int bhp1 = double_int_add (tree_to_double_int (base_high),
1395 double_int_one);
1396
1397 /* Merge the cases if they jump to the same place,
1398 and their ranges are consecutive. */
1399 if (merge_label == base_label
1400 && double_int_equal_p (tree_to_double_int (CASE_LOW (merge_case)),
1401 bhp1))
1402 {
1403 base_high = CASE_HIGH (merge_case) ?
1404 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1405 CASE_HIGH (base_case) = base_high;
1406 gimple_switch_set_label (stmt, i, NULL_TREE);
1407 new_size--;
1408 i++;
1409 }
1410 else
1411 break;
1412 }
1413 }
1414
1415 /* Compress the case labels in the label vector, and adjust the
1416 length of the vector. */
1417 for (i = 0, j = 0; i < new_size; i++)
1418 {
1419 while (! gimple_switch_label (stmt, j))
1420 j++;
1421 gimple_switch_set_label (stmt, i,
1422 gimple_switch_label (stmt, j++));
1423 }
1424
1425 gcc_assert (new_size <= old_size);
1426 gimple_switch_set_num_labels (stmt, new_size);
1427 }
1428
1429 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1430 and scan the sorted vector of cases. Combine the ones jumping to the
1431 same label. */
1432
1433 void
1434 group_case_labels (void)
1435 {
1436 basic_block bb;
1437
1438 FOR_EACH_BB (bb)
1439 {
1440 gimple stmt = last_stmt (bb);
1441 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1442 group_case_labels_stmt (stmt);
1443 }
1444 }
1445
1446 /* Checks whether we can merge block B into block A. */
1447
1448 static bool
1449 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1450 {
1451 gimple stmt;
1452 gimple_stmt_iterator gsi;
1453 gimple_seq phis;
1454
1455 if (!single_succ_p (a))
1456 return false;
1457
1458 if (single_succ_edge (a)->flags & (EDGE_ABNORMAL | EDGE_EH | EDGE_PRESERVE))
1459 return false;
1460
1461 if (single_succ (a) != b)
1462 return false;
1463
1464 if (!single_pred_p (b))
1465 return false;
1466
1467 if (b == EXIT_BLOCK_PTR)
1468 return false;
1469
1470 /* If A ends by a statement causing exceptions or something similar, we
1471 cannot merge the blocks. */
1472 stmt = last_stmt (a);
1473 if (stmt && stmt_ends_bb_p (stmt))
1474 return false;
1475
1476 /* Do not allow a block with only a non-local label to be merged. */
1477 if (stmt
1478 && gimple_code (stmt) == GIMPLE_LABEL
1479 && DECL_NONLOCAL (gimple_label_label (stmt)))
1480 return false;
1481
1482 /* Examine the labels at the beginning of B. */
1483 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi); gsi_next (&gsi))
1484 {
1485 tree lab;
1486 stmt = gsi_stmt (gsi);
1487 if (gimple_code (stmt) != GIMPLE_LABEL)
1488 break;
1489 lab = gimple_label_label (stmt);
1490
1491 /* Do not remove user forced labels or for -O0 any user labels. */
1492 if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
1493 return false;
1494 }
1495
1496 /* Protect the loop latches. */
1497 if (current_loops && b->loop_father->latch == b)
1498 return false;
1499
1500 /* It must be possible to eliminate all phi nodes in B. If ssa form
1501 is not up-to-date and a name-mapping is registered, we cannot eliminate
1502 any phis. Symbols marked for renaming are never a problem though. */
1503 phis = phi_nodes (b);
1504 if (!gimple_seq_empty_p (phis)
1505 && name_mappings_registered_p ())
1506 return false;
1507
1508 /* When not optimizing, don't merge if we'd lose goto_locus. */
1509 if (!optimize
1510 && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
1511 {
1512 location_t goto_locus = single_succ_edge (a)->goto_locus;
1513 gimple_stmt_iterator prev, next;
1514 prev = gsi_last_nondebug_bb (a);
1515 next = gsi_after_labels (b);
1516 if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
1517 gsi_next_nondebug (&next);
1518 if ((gsi_end_p (prev)
1519 || gimple_location (gsi_stmt (prev)) != goto_locus)
1520 && (gsi_end_p (next)
1521 || gimple_location (gsi_stmt (next)) != goto_locus))
1522 return false;
1523 }
1524
1525 return true;
1526 }
1527
1528 /* Return true if the var whose chain of uses starts at PTR has no
1529 nondebug uses. */
1530 bool
1531 has_zero_uses_1 (const ssa_use_operand_t *head)
1532 {
1533 const ssa_use_operand_t *ptr;
1534
1535 for (ptr = head->next; ptr != head; ptr = ptr->next)
1536 if (!is_gimple_debug (USE_STMT (ptr)))
1537 return false;
1538
1539 return true;
1540 }
1541
1542 /* Return true if the var whose chain of uses starts at PTR has a
1543 single nondebug use. Set USE_P and STMT to that single nondebug
1544 use, if so, or to NULL otherwise. */
1545 bool
1546 single_imm_use_1 (const ssa_use_operand_t *head,
1547 use_operand_p *use_p, gimple *stmt)
1548 {
1549 ssa_use_operand_t *ptr, *single_use = 0;
1550
1551 for (ptr = head->next; ptr != head; ptr = ptr->next)
1552 if (!is_gimple_debug (USE_STMT (ptr)))
1553 {
1554 if (single_use)
1555 {
1556 single_use = NULL;
1557 break;
1558 }
1559 single_use = ptr;
1560 }
1561
1562 if (use_p)
1563 *use_p = single_use;
1564
1565 if (stmt)
1566 *stmt = single_use ? single_use->loc.stmt : NULL;
1567
1568 return !!single_use;
1569 }
1570
1571 /* Replaces all uses of NAME by VAL. */
1572
1573 void
1574 replace_uses_by (tree name, tree val)
1575 {
1576 imm_use_iterator imm_iter;
1577 use_operand_p use;
1578 gimple stmt;
1579 edge e;
1580
1581 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1582 {
1583 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1584 {
1585 replace_exp (use, val);
1586
1587 if (gimple_code (stmt) == GIMPLE_PHI)
1588 {
1589 e = gimple_phi_arg_edge (stmt, PHI_ARG_INDEX_FROM_USE (use));
1590 if (e->flags & EDGE_ABNORMAL)
1591 {
1592 /* This can only occur for virtual operands, since
1593 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1594 would prevent replacement. */
1595 gcc_checking_assert (!is_gimple_reg (name));
1596 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1597 }
1598 }
1599 }
1600
1601 if (gimple_code (stmt) != GIMPLE_PHI)
1602 {
1603 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1604 gimple orig_stmt = stmt;
1605 size_t i;
1606
1607 /* Mark the block if we changed the last stmt in it. */
1608 if (cfgcleanup_altered_bbs
1609 && stmt_ends_bb_p (stmt))
1610 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1611
1612 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
1613 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
1614 only change sth from non-invariant to invariant, and only
1615 when propagating constants. */
1616 if (is_gimple_min_invariant (val))
1617 for (i = 0; i < gimple_num_ops (stmt); i++)
1618 {
1619 tree op = gimple_op (stmt, i);
1620 /* Operands may be empty here. For example, the labels
1621 of a GIMPLE_COND are nulled out following the creation
1622 of the corresponding CFG edges. */
1623 if (op && TREE_CODE (op) == ADDR_EXPR)
1624 recompute_tree_invariant_for_addr_expr (op);
1625 }
1626
1627 if (fold_stmt (&gsi))
1628 stmt = gsi_stmt (gsi);
1629
1630 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
1631 gimple_purge_dead_eh_edges (gimple_bb (stmt));
1632
1633 update_stmt (stmt);
1634 }
1635 }
1636
1637 gcc_checking_assert (has_zero_uses (name));
1638
1639 /* Also update the trees stored in loop structures. */
1640 if (current_loops)
1641 {
1642 struct loop *loop;
1643 loop_iterator li;
1644
1645 FOR_EACH_LOOP (li, loop, 0)
1646 {
1647 substitute_in_loop_info (loop, name, val);
1648 }
1649 }
1650 }
1651
1652 /* Merge block B into block A. */
1653
1654 static void
1655 gimple_merge_blocks (basic_block a, basic_block b)
1656 {
1657 gimple_stmt_iterator last, gsi, psi;
1658
1659 if (dump_file)
1660 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1661
1662 /* Remove all single-valued PHI nodes from block B of the form
1663 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1664 gsi = gsi_last_bb (a);
1665 for (psi = gsi_start_phis (b); !gsi_end_p (psi); )
1666 {
1667 gimple phi = gsi_stmt (psi);
1668 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
1669 gimple copy;
1670 bool may_replace_uses = !is_gimple_reg (def)
1671 || may_propagate_copy (def, use);
1672
1673 /* In case we maintain loop closed ssa form, do not propagate arguments
1674 of loop exit phi nodes. */
1675 if (current_loops
1676 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
1677 && is_gimple_reg (def)
1678 && TREE_CODE (use) == SSA_NAME
1679 && a->loop_father != b->loop_father)
1680 may_replace_uses = false;
1681
1682 if (!may_replace_uses)
1683 {
1684 gcc_assert (is_gimple_reg (def));
1685
1686 /* Note that just emitting the copies is fine -- there is no problem
1687 with ordering of phi nodes. This is because A is the single
1688 predecessor of B, therefore results of the phi nodes cannot
1689 appear as arguments of the phi nodes. */
1690 copy = gimple_build_assign (def, use);
1691 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
1692 remove_phi_node (&psi, false);
1693 }
1694 else
1695 {
1696 /* If we deal with a PHI for virtual operands, we can simply
1697 propagate these without fussing with folding or updating
1698 the stmt. */
1699 if (!is_gimple_reg (def))
1700 {
1701 imm_use_iterator iter;
1702 use_operand_p use_p;
1703 gimple stmt;
1704
1705 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
1706 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1707 SET_USE (use_p, use);
1708
1709 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
1710 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
1711 }
1712 else
1713 replace_uses_by (def, use);
1714
1715 remove_phi_node (&psi, true);
1716 }
1717 }
1718
1719 /* Ensure that B follows A. */
1720 move_block_after (b, a);
1721
1722 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1723 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1724
1725 /* Remove labels from B and set gimple_bb to A for other statements. */
1726 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
1727 {
1728 gimple stmt = gsi_stmt (gsi);
1729 if (gimple_code (stmt) == GIMPLE_LABEL)
1730 {
1731 tree label = gimple_label_label (stmt);
1732 int lp_nr;
1733
1734 gsi_remove (&gsi, false);
1735
1736 /* Now that we can thread computed gotos, we might have
1737 a situation where we have a forced label in block B
1738 However, the label at the start of block B might still be
1739 used in other ways (think about the runtime checking for
1740 Fortran assigned gotos). So we can not just delete the
1741 label. Instead we move the label to the start of block A. */
1742 if (FORCED_LABEL (label))
1743 {
1744 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
1745 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
1746 }
1747 /* Other user labels keep around in a form of a debug stmt. */
1748 else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_STMTS)
1749 {
1750 gimple dbg = gimple_build_debug_bind (label,
1751 integer_zero_node,
1752 stmt);
1753 gimple_debug_bind_reset_value (dbg);
1754 gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
1755 }
1756
1757 lp_nr = EH_LANDING_PAD_NR (label);
1758 if (lp_nr)
1759 {
1760 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
1761 lp->post_landing_pad = NULL;
1762 }
1763 }
1764 else
1765 {
1766 gimple_set_bb (stmt, a);
1767 gsi_next (&gsi);
1768 }
1769 }
1770
1771 /* Merge the sequences. */
1772 last = gsi_last_bb (a);
1773 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
1774 set_bb_seq (b, NULL);
1775
1776 if (cfgcleanup_altered_bbs)
1777 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
1778 }
1779
1780
1781 /* Return the one of two successors of BB that is not reachable by a
1782 complex edge, if there is one. Else, return BB. We use
1783 this in optimizations that use post-dominators for their heuristics,
1784 to catch the cases in C++ where function calls are involved. */
1785
1786 basic_block
1787 single_noncomplex_succ (basic_block bb)
1788 {
1789 edge e0, e1;
1790 if (EDGE_COUNT (bb->succs) != 2)
1791 return bb;
1792
1793 e0 = EDGE_SUCC (bb, 0);
1794 e1 = EDGE_SUCC (bb, 1);
1795 if (e0->flags & EDGE_COMPLEX)
1796 return e1->dest;
1797 if (e1->flags & EDGE_COMPLEX)
1798 return e0->dest;
1799
1800 return bb;
1801 }
1802
1803 /* T is CALL_EXPR. Set current_function_calls_* flags. */
1804
1805 void
1806 notice_special_calls (gimple call)
1807 {
1808 int flags = gimple_call_flags (call);
1809
1810 if (flags & ECF_MAY_BE_ALLOCA)
1811 cfun->calls_alloca = true;
1812 if (flags & ECF_RETURNS_TWICE)
1813 cfun->calls_setjmp = true;
1814 }
1815
1816
1817 /* Clear flags set by notice_special_calls. Used by dead code removal
1818 to update the flags. */
1819
1820 void
1821 clear_special_calls (void)
1822 {
1823 cfun->calls_alloca = false;
1824 cfun->calls_setjmp = false;
1825 }
1826
1827 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
1828
1829 static void
1830 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
1831 {
1832 /* Since this block is no longer reachable, we can just delete all
1833 of its PHI nodes. */
1834 remove_phi_nodes (bb);
1835
1836 /* Remove edges to BB's successors. */
1837 while (EDGE_COUNT (bb->succs) > 0)
1838 remove_edge (EDGE_SUCC (bb, 0));
1839 }
1840
1841
1842 /* Remove statements of basic block BB. */
1843
1844 static void
1845 remove_bb (basic_block bb)
1846 {
1847 gimple_stmt_iterator i;
1848
1849 if (dump_file)
1850 {
1851 fprintf (dump_file, "Removing basic block %d\n", bb->index);
1852 if (dump_flags & TDF_DETAILS)
1853 {
1854 dump_bb (bb, dump_file, 0);
1855 fprintf (dump_file, "\n");
1856 }
1857 }
1858
1859 if (current_loops)
1860 {
1861 struct loop *loop = bb->loop_father;
1862
1863 /* If a loop gets removed, clean up the information associated
1864 with it. */
1865 if (loop->latch == bb
1866 || loop->header == bb)
1867 free_numbers_of_iterations_estimates_loop (loop);
1868 }
1869
1870 /* Remove all the instructions in the block. */
1871 if (bb_seq (bb) != NULL)
1872 {
1873 /* Walk backwards so as to get a chance to substitute all
1874 released DEFs into debug stmts. See
1875 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
1876 details. */
1877 for (i = gsi_last_bb (bb); !gsi_end_p (i);)
1878 {
1879 gimple stmt = gsi_stmt (i);
1880 if (gimple_code (stmt) == GIMPLE_LABEL
1881 && (FORCED_LABEL (gimple_label_label (stmt))
1882 || DECL_NONLOCAL (gimple_label_label (stmt))))
1883 {
1884 basic_block new_bb;
1885 gimple_stmt_iterator new_gsi;
1886
1887 /* A non-reachable non-local label may still be referenced.
1888 But it no longer needs to carry the extra semantics of
1889 non-locality. */
1890 if (DECL_NONLOCAL (gimple_label_label (stmt)))
1891 {
1892 DECL_NONLOCAL (gimple_label_label (stmt)) = 0;
1893 FORCED_LABEL (gimple_label_label (stmt)) = 1;
1894 }
1895
1896 new_bb = bb->prev_bb;
1897 new_gsi = gsi_start_bb (new_bb);
1898 gsi_remove (&i, false);
1899 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
1900 }
1901 else
1902 {
1903 /* Release SSA definitions if we are in SSA. Note that we
1904 may be called when not in SSA. For example,
1905 final_cleanup calls this function via
1906 cleanup_tree_cfg. */
1907 if (gimple_in_ssa_p (cfun))
1908 release_defs (stmt);
1909
1910 gsi_remove (&i, true);
1911 }
1912
1913 if (gsi_end_p (i))
1914 i = gsi_last_bb (bb);
1915 else
1916 gsi_prev (&i);
1917 }
1918 }
1919
1920 remove_phi_nodes_and_edges_for_unreachable_block (bb);
1921 bb->il.gimple = NULL;
1922 }
1923
1924
1925 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
1926 predicate VAL, return the edge that will be taken out of the block.
1927 If VAL does not match a unique edge, NULL is returned. */
1928
1929 edge
1930 find_taken_edge (basic_block bb, tree val)
1931 {
1932 gimple stmt;
1933
1934 stmt = last_stmt (bb);
1935
1936 gcc_assert (stmt);
1937 gcc_assert (is_ctrl_stmt (stmt));
1938
1939 if (val == NULL)
1940 return NULL;
1941
1942 if (!is_gimple_min_invariant (val))
1943 return NULL;
1944
1945 if (gimple_code (stmt) == GIMPLE_COND)
1946 return find_taken_edge_cond_expr (bb, val);
1947
1948 if (gimple_code (stmt) == GIMPLE_SWITCH)
1949 return find_taken_edge_switch_expr (bb, val);
1950
1951 if (computed_goto_p (stmt))
1952 {
1953 /* Only optimize if the argument is a label, if the argument is
1954 not a label then we can not construct a proper CFG.
1955
1956 It may be the case that we only need to allow the LABEL_REF to
1957 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
1958 appear inside a LABEL_EXPR just to be safe. */
1959 if ((TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
1960 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
1961 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
1962 return NULL;
1963 }
1964
1965 gcc_unreachable ();
1966 }
1967
1968 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
1969 statement, determine which of the outgoing edges will be taken out of the
1970 block. Return NULL if either edge may be taken. */
1971
1972 static edge
1973 find_taken_edge_computed_goto (basic_block bb, tree val)
1974 {
1975 basic_block dest;
1976 edge e = NULL;
1977
1978 dest = label_to_block (val);
1979 if (dest)
1980 {
1981 e = find_edge (bb, dest);
1982 gcc_assert (e != NULL);
1983 }
1984
1985 return e;
1986 }
1987
1988 /* Given a constant value VAL and the entry block BB to a COND_EXPR
1989 statement, determine which of the two edges will be taken out of the
1990 block. Return NULL if either edge may be taken. */
1991
1992 static edge
1993 find_taken_edge_cond_expr (basic_block bb, tree val)
1994 {
1995 edge true_edge, false_edge;
1996
1997 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1998
1999 gcc_assert (TREE_CODE (val) == INTEGER_CST);
2000 return (integer_zerop (val) ? false_edge : true_edge);
2001 }
2002
2003 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2004 statement, determine which edge will be taken out of the block. Return
2005 NULL if any edge may be taken. */
2006
2007 static edge
2008 find_taken_edge_switch_expr (basic_block bb, tree val)
2009 {
2010 basic_block dest_bb;
2011 edge e;
2012 gimple switch_stmt;
2013 tree taken_case;
2014
2015 switch_stmt = last_stmt (bb);
2016 taken_case = find_case_label_for_value (switch_stmt, val);
2017 dest_bb = label_to_block (CASE_LABEL (taken_case));
2018
2019 e = find_edge (bb, dest_bb);
2020 gcc_assert (e);
2021 return e;
2022 }
2023
2024
2025 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2026 We can make optimal use here of the fact that the case labels are
2027 sorted: We can do a binary search for a case matching VAL. */
2028
2029 static tree
2030 find_case_label_for_value (gimple switch_stmt, tree val)
2031 {
2032 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2033 tree default_case = gimple_switch_default_label (switch_stmt);
2034
2035 for (low = 0, high = n; high - low > 1; )
2036 {
2037 size_t i = (high + low) / 2;
2038 tree t = gimple_switch_label (switch_stmt, i);
2039 int cmp;
2040
2041 /* Cache the result of comparing CASE_LOW and val. */
2042 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2043
2044 if (cmp > 0)
2045 high = i;
2046 else
2047 low = i;
2048
2049 if (CASE_HIGH (t) == NULL)
2050 {
2051 /* A singe-valued case label. */
2052 if (cmp == 0)
2053 return t;
2054 }
2055 else
2056 {
2057 /* A case range. We can only handle integer ranges. */
2058 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2059 return t;
2060 }
2061 }
2062
2063 return default_case;
2064 }
2065
2066
2067 /* Dump a basic block on stderr. */
2068
2069 void
2070 gimple_debug_bb (basic_block bb)
2071 {
2072 gimple_dump_bb (bb, stderr, 0, TDF_VOPS|TDF_MEMSYMS);
2073 }
2074
2075
2076 /* Dump basic block with index N on stderr. */
2077
2078 basic_block
2079 gimple_debug_bb_n (int n)
2080 {
2081 gimple_debug_bb (BASIC_BLOCK (n));
2082 return BASIC_BLOCK (n);
2083 }
2084
2085
2086 /* Dump the CFG on stderr.
2087
2088 FLAGS are the same used by the tree dumping functions
2089 (see TDF_* in tree-pass.h). */
2090
2091 void
2092 gimple_debug_cfg (int flags)
2093 {
2094 gimple_dump_cfg (stderr, flags);
2095 }
2096
2097
2098 /* Dump the program showing basic block boundaries on the given FILE.
2099
2100 FLAGS are the same used by the tree dumping functions (see TDF_* in
2101 tree.h). */
2102
2103 void
2104 gimple_dump_cfg (FILE *file, int flags)
2105 {
2106 if (flags & TDF_DETAILS)
2107 {
2108 dump_function_header (file, current_function_decl, flags);
2109 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2110 n_basic_blocks, n_edges, last_basic_block);
2111
2112 brief_dump_cfg (file);
2113 fprintf (file, "\n");
2114 }
2115
2116 if (flags & TDF_STATS)
2117 dump_cfg_stats (file);
2118
2119 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2120 }
2121
2122
2123 /* Dump CFG statistics on FILE. */
2124
2125 void
2126 dump_cfg_stats (FILE *file)
2127 {
2128 static long max_num_merged_labels = 0;
2129 unsigned long size, total = 0;
2130 long num_edges;
2131 basic_block bb;
2132 const char * const fmt_str = "%-30s%-13s%12s\n";
2133 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2134 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2135 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2136 const char *funcname
2137 = lang_hooks.decl_printable_name (current_function_decl, 2);
2138
2139
2140 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2141
2142 fprintf (file, "---------------------------------------------------------\n");
2143 fprintf (file, fmt_str, "", " Number of ", "Memory");
2144 fprintf (file, fmt_str, "", " instances ", "used ");
2145 fprintf (file, "---------------------------------------------------------\n");
2146
2147 size = n_basic_blocks * sizeof (struct basic_block_def);
2148 total += size;
2149 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks,
2150 SCALE (size), LABEL (size));
2151
2152 num_edges = 0;
2153 FOR_EACH_BB (bb)
2154 num_edges += EDGE_COUNT (bb->succs);
2155 size = num_edges * sizeof (struct edge_def);
2156 total += size;
2157 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2158
2159 fprintf (file, "---------------------------------------------------------\n");
2160 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2161 LABEL (total));
2162 fprintf (file, "---------------------------------------------------------\n");
2163 fprintf (file, "\n");
2164
2165 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2166 max_num_merged_labels = cfg_stats.num_merged_labels;
2167
2168 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2169 cfg_stats.num_merged_labels, max_num_merged_labels);
2170
2171 fprintf (file, "\n");
2172 }
2173
2174
2175 /* Dump CFG statistics on stderr. Keep extern so that it's always
2176 linked in the final executable. */
2177
2178 DEBUG_FUNCTION void
2179 debug_cfg_stats (void)
2180 {
2181 dump_cfg_stats (stderr);
2182 }
2183
2184
2185 /* Dump the flowgraph to a .vcg FILE. */
2186
2187 static void
2188 gimple_cfg2vcg (FILE *file)
2189 {
2190 edge e;
2191 edge_iterator ei;
2192 basic_block bb;
2193 const char *funcname
2194 = lang_hooks.decl_printable_name (current_function_decl, 2);
2195
2196 /* Write the file header. */
2197 fprintf (file, "graph: { title: \"%s\"\n", funcname);
2198 fprintf (file, "node: { title: \"ENTRY\" label: \"ENTRY\" }\n");
2199 fprintf (file, "node: { title: \"EXIT\" label: \"EXIT\" }\n");
2200
2201 /* Write blocks and edges. */
2202 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
2203 {
2204 fprintf (file, "edge: { sourcename: \"ENTRY\" targetname: \"%d\"",
2205 e->dest->index);
2206
2207 if (e->flags & EDGE_FAKE)
2208 fprintf (file, " linestyle: dotted priority: 10");
2209 else
2210 fprintf (file, " linestyle: solid priority: 100");
2211
2212 fprintf (file, " }\n");
2213 }
2214 fputc ('\n', file);
2215
2216 FOR_EACH_BB (bb)
2217 {
2218 enum gimple_code head_code, end_code;
2219 const char *head_name, *end_name;
2220 int head_line = 0;
2221 int end_line = 0;
2222 gimple first = first_stmt (bb);
2223 gimple last = last_stmt (bb);
2224
2225 if (first)
2226 {
2227 head_code = gimple_code (first);
2228 head_name = gimple_code_name[head_code];
2229 head_line = get_lineno (first);
2230 }
2231 else
2232 head_name = "no-statement";
2233
2234 if (last)
2235 {
2236 end_code = gimple_code (last);
2237 end_name = gimple_code_name[end_code];
2238 end_line = get_lineno (last);
2239 }
2240 else
2241 end_name = "no-statement";
2242
2243 fprintf (file, "node: { title: \"%d\" label: \"#%d\\n%s (%d)\\n%s (%d)\"}\n",
2244 bb->index, bb->index, head_name, head_line, end_name,
2245 end_line);
2246
2247 FOR_EACH_EDGE (e, ei, bb->succs)
2248 {
2249 if (e->dest == EXIT_BLOCK_PTR)
2250 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"EXIT\"", bb->index);
2251 else
2252 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"%d\"", bb->index, e->dest->index);
2253
2254 if (e->flags & EDGE_FAKE)
2255 fprintf (file, " priority: 10 linestyle: dotted");
2256 else
2257 fprintf (file, " priority: 100 linestyle: solid");
2258
2259 fprintf (file, " }\n");
2260 }
2261
2262 if (bb->next_bb != EXIT_BLOCK_PTR)
2263 fputc ('\n', file);
2264 }
2265
2266 fputs ("}\n\n", file);
2267 }
2268
2269
2270
2271 /*---------------------------------------------------------------------------
2272 Miscellaneous helpers
2273 ---------------------------------------------------------------------------*/
2274
2275 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2276 flow. Transfers of control flow associated with EH are excluded. */
2277
2278 static bool
2279 call_can_make_abnormal_goto (gimple t)
2280 {
2281 /* If the function has no non-local labels, then a call cannot make an
2282 abnormal transfer of control. */
2283 if (!cfun->has_nonlocal_label)
2284 return false;
2285
2286 /* Likewise if the call has no side effects. */
2287 if (!gimple_has_side_effects (t))
2288 return false;
2289
2290 /* Likewise if the called function is leaf. */
2291 if (gimple_call_flags (t) & ECF_LEAF)
2292 return false;
2293
2294 return true;
2295 }
2296
2297
2298 /* Return true if T can make an abnormal transfer of control flow.
2299 Transfers of control flow associated with EH are excluded. */
2300
2301 bool
2302 stmt_can_make_abnormal_goto (gimple t)
2303 {
2304 if (computed_goto_p (t))
2305 return true;
2306 if (is_gimple_call (t))
2307 return call_can_make_abnormal_goto (t);
2308 return false;
2309 }
2310
2311
2312 /* Return true if T represents a stmt that always transfers control. */
2313
2314 bool
2315 is_ctrl_stmt (gimple t)
2316 {
2317 switch (gimple_code (t))
2318 {
2319 case GIMPLE_COND:
2320 case GIMPLE_SWITCH:
2321 case GIMPLE_GOTO:
2322 case GIMPLE_RETURN:
2323 case GIMPLE_RESX:
2324 return true;
2325 default:
2326 return false;
2327 }
2328 }
2329
2330
2331 /* Return true if T is a statement that may alter the flow of control
2332 (e.g., a call to a non-returning function). */
2333
2334 bool
2335 is_ctrl_altering_stmt (gimple t)
2336 {
2337 gcc_assert (t);
2338
2339 switch (gimple_code (t))
2340 {
2341 case GIMPLE_CALL:
2342 {
2343 int flags = gimple_call_flags (t);
2344
2345 /* A call alters control flow if it can make an abnormal goto. */
2346 if (call_can_make_abnormal_goto (t))
2347 return true;
2348
2349 /* A call also alters control flow if it does not return. */
2350 if (flags & ECF_NORETURN)
2351 return true;
2352
2353 /* TM ending statements have backedges out of the transaction.
2354 Return true so we split the basic block containing them.
2355 Note that the TM_BUILTIN test is merely an optimization. */
2356 if ((flags & ECF_TM_BUILTIN)
2357 && is_tm_ending_fndecl (gimple_call_fndecl (t)))
2358 return true;
2359
2360 /* BUILT_IN_RETURN call is same as return statement. */
2361 if (gimple_call_builtin_p (t, BUILT_IN_RETURN))
2362 return true;
2363 }
2364 break;
2365
2366 case GIMPLE_EH_DISPATCH:
2367 /* EH_DISPATCH branches to the individual catch handlers at
2368 this level of a try or allowed-exceptions region. It can
2369 fallthru to the next statement as well. */
2370 return true;
2371
2372 case GIMPLE_ASM:
2373 if (gimple_asm_nlabels (t) > 0)
2374 return true;
2375 break;
2376
2377 CASE_GIMPLE_OMP:
2378 /* OpenMP directives alter control flow. */
2379 return true;
2380
2381 case GIMPLE_TRANSACTION:
2382 /* A transaction start alters control flow. */
2383 return true;
2384
2385 default:
2386 break;
2387 }
2388
2389 /* If a statement can throw, it alters control flow. */
2390 return stmt_can_throw_internal (t);
2391 }
2392
2393
2394 /* Return true if T is a simple local goto. */
2395
2396 bool
2397 simple_goto_p (gimple t)
2398 {
2399 return (gimple_code (t) == GIMPLE_GOTO
2400 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2401 }
2402
2403
2404 /* Return true if STMT should start a new basic block. PREV_STMT is
2405 the statement preceding STMT. It is used when STMT is a label or a
2406 case label. Labels should only start a new basic block if their
2407 previous statement wasn't a label. Otherwise, sequence of labels
2408 would generate unnecessary basic blocks that only contain a single
2409 label. */
2410
2411 static inline bool
2412 stmt_starts_bb_p (gimple stmt, gimple prev_stmt)
2413 {
2414 if (stmt == NULL)
2415 return false;
2416
2417 /* Labels start a new basic block only if the preceding statement
2418 wasn't a label of the same type. This prevents the creation of
2419 consecutive blocks that have nothing but a single label. */
2420 if (gimple_code (stmt) == GIMPLE_LABEL)
2421 {
2422 /* Nonlocal and computed GOTO targets always start a new block. */
2423 if (DECL_NONLOCAL (gimple_label_label (stmt))
2424 || FORCED_LABEL (gimple_label_label (stmt)))
2425 return true;
2426
2427 if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
2428 {
2429 if (DECL_NONLOCAL (gimple_label_label (prev_stmt)))
2430 return true;
2431
2432 cfg_stats.num_merged_labels++;
2433 return false;
2434 }
2435 else
2436 return true;
2437 }
2438
2439 return false;
2440 }
2441
2442
2443 /* Return true if T should end a basic block. */
2444
2445 bool
2446 stmt_ends_bb_p (gimple t)
2447 {
2448 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2449 }
2450
2451 /* Remove block annotations and other data structures. */
2452
2453 void
2454 delete_tree_cfg_annotations (void)
2455 {
2456 label_to_block_map = NULL;
2457 }
2458
2459
2460 /* Return the first statement in basic block BB. */
2461
2462 gimple
2463 first_stmt (basic_block bb)
2464 {
2465 gimple_stmt_iterator i = gsi_start_bb (bb);
2466 gimple stmt = NULL;
2467
2468 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2469 {
2470 gsi_next (&i);
2471 stmt = NULL;
2472 }
2473 return stmt;
2474 }
2475
2476 /* Return the first non-label statement in basic block BB. */
2477
2478 static gimple
2479 first_non_label_stmt (basic_block bb)
2480 {
2481 gimple_stmt_iterator i = gsi_start_bb (bb);
2482 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2483 gsi_next (&i);
2484 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2485 }
2486
2487 /* Return the last statement in basic block BB. */
2488
2489 gimple
2490 last_stmt (basic_block bb)
2491 {
2492 gimple_stmt_iterator i = gsi_last_bb (bb);
2493 gimple stmt = NULL;
2494
2495 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2496 {
2497 gsi_prev (&i);
2498 stmt = NULL;
2499 }
2500 return stmt;
2501 }
2502
2503 /* Return the last statement of an otherwise empty block. Return NULL
2504 if the block is totally empty, or if it contains more than one
2505 statement. */
2506
2507 gimple
2508 last_and_only_stmt (basic_block bb)
2509 {
2510 gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2511 gimple last, prev;
2512
2513 if (gsi_end_p (i))
2514 return NULL;
2515
2516 last = gsi_stmt (i);
2517 gsi_prev_nondebug (&i);
2518 if (gsi_end_p (i))
2519 return last;
2520
2521 /* Empty statements should no longer appear in the instruction stream.
2522 Everything that might have appeared before should be deleted by
2523 remove_useless_stmts, and the optimizers should just gsi_remove
2524 instead of smashing with build_empty_stmt.
2525
2526 Thus the only thing that should appear here in a block containing
2527 one executable statement is a label. */
2528 prev = gsi_stmt (i);
2529 if (gimple_code (prev) == GIMPLE_LABEL)
2530 return last;
2531 else
2532 return NULL;
2533 }
2534
2535 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2536
2537 static void
2538 reinstall_phi_args (edge new_edge, edge old_edge)
2539 {
2540 edge_var_map_vector v;
2541 edge_var_map *vm;
2542 int i;
2543 gimple_stmt_iterator phis;
2544
2545 v = redirect_edge_var_map_vector (old_edge);
2546 if (!v)
2547 return;
2548
2549 for (i = 0, phis = gsi_start_phis (new_edge->dest);
2550 VEC_iterate (edge_var_map, v, i, vm) && !gsi_end_p (phis);
2551 i++, gsi_next (&phis))
2552 {
2553 gimple phi = gsi_stmt (phis);
2554 tree result = redirect_edge_var_map_result (vm);
2555 tree arg = redirect_edge_var_map_def (vm);
2556
2557 gcc_assert (result == gimple_phi_result (phi));
2558
2559 add_phi_arg (phi, arg, new_edge, redirect_edge_var_map_location (vm));
2560 }
2561
2562 redirect_edge_var_map_clear (old_edge);
2563 }
2564
2565 /* Returns the basic block after which the new basic block created
2566 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2567 near its "logical" location. This is of most help to humans looking
2568 at debugging dumps. */
2569
2570 static basic_block
2571 split_edge_bb_loc (edge edge_in)
2572 {
2573 basic_block dest = edge_in->dest;
2574 basic_block dest_prev = dest->prev_bb;
2575
2576 if (dest_prev)
2577 {
2578 edge e = find_edge (dest_prev, dest);
2579 if (e && !(e->flags & EDGE_COMPLEX))
2580 return edge_in->src;
2581 }
2582 return dest_prev;
2583 }
2584
2585 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2586 Abort on abnormal edges. */
2587
2588 static basic_block
2589 gimple_split_edge (edge edge_in)
2590 {
2591 basic_block new_bb, after_bb, dest;
2592 edge new_edge, e;
2593
2594 /* Abnormal edges cannot be split. */
2595 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2596
2597 dest = edge_in->dest;
2598
2599 after_bb = split_edge_bb_loc (edge_in);
2600
2601 new_bb = create_empty_bb (after_bb);
2602 new_bb->frequency = EDGE_FREQUENCY (edge_in);
2603 new_bb->count = edge_in->count;
2604 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
2605 new_edge->probability = REG_BR_PROB_BASE;
2606 new_edge->count = edge_in->count;
2607
2608 e = redirect_edge_and_branch (edge_in, new_bb);
2609 gcc_assert (e == edge_in);
2610 reinstall_phi_args (new_edge, e);
2611
2612 return new_bb;
2613 }
2614
2615
2616 /* Verify properties of the address expression T with base object BASE. */
2617
2618 static tree
2619 verify_address (tree t, tree base)
2620 {
2621 bool old_constant;
2622 bool old_side_effects;
2623 bool new_constant;
2624 bool new_side_effects;
2625
2626 old_constant = TREE_CONSTANT (t);
2627 old_side_effects = TREE_SIDE_EFFECTS (t);
2628
2629 recompute_tree_invariant_for_addr_expr (t);
2630 new_side_effects = TREE_SIDE_EFFECTS (t);
2631 new_constant = TREE_CONSTANT (t);
2632
2633 if (old_constant != new_constant)
2634 {
2635 error ("constant not recomputed when ADDR_EXPR changed");
2636 return t;
2637 }
2638 if (old_side_effects != new_side_effects)
2639 {
2640 error ("side effects not recomputed when ADDR_EXPR changed");
2641 return t;
2642 }
2643
2644 if (!(TREE_CODE (base) == VAR_DECL
2645 || TREE_CODE (base) == PARM_DECL
2646 || TREE_CODE (base) == RESULT_DECL))
2647 return NULL_TREE;
2648
2649 if (DECL_GIMPLE_REG_P (base))
2650 {
2651 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2652 return base;
2653 }
2654
2655 return NULL_TREE;
2656 }
2657
2658 /* Callback for walk_tree, check that all elements with address taken are
2659 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2660 inside a PHI node. */
2661
2662 static tree
2663 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2664 {
2665 tree t = *tp, x;
2666
2667 if (TYPE_P (t))
2668 *walk_subtrees = 0;
2669
2670 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2671 #define CHECK_OP(N, MSG) \
2672 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2673 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2674
2675 switch (TREE_CODE (t))
2676 {
2677 case SSA_NAME:
2678 if (SSA_NAME_IN_FREE_LIST (t))
2679 {
2680 error ("SSA name in freelist but still referenced");
2681 return *tp;
2682 }
2683 break;
2684
2685 case INDIRECT_REF:
2686 error ("INDIRECT_REF in gimple IL");
2687 return t;
2688
2689 case MEM_REF:
2690 x = TREE_OPERAND (t, 0);
2691 if (!POINTER_TYPE_P (TREE_TYPE (x))
2692 || !is_gimple_mem_ref_addr (x))
2693 {
2694 error ("invalid first operand of MEM_REF");
2695 return x;
2696 }
2697 if (TREE_CODE (TREE_OPERAND (t, 1)) != INTEGER_CST
2698 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 1))))
2699 {
2700 error ("invalid offset operand of MEM_REF");
2701 return TREE_OPERAND (t, 1);
2702 }
2703 if (TREE_CODE (x) == ADDR_EXPR
2704 && (x = verify_address (x, TREE_OPERAND (x, 0))))
2705 return x;
2706 *walk_subtrees = 0;
2707 break;
2708
2709 case ASSERT_EXPR:
2710 x = fold (ASSERT_EXPR_COND (t));
2711 if (x == boolean_false_node)
2712 {
2713 error ("ASSERT_EXPR with an always-false condition");
2714 return *tp;
2715 }
2716 break;
2717
2718 case MODIFY_EXPR:
2719 error ("MODIFY_EXPR not expected while having tuples");
2720 return *tp;
2721
2722 case ADDR_EXPR:
2723 {
2724 tree tem;
2725
2726 gcc_assert (is_gimple_address (t));
2727
2728 /* Skip any references (they will be checked when we recurse down the
2729 tree) and ensure that any variable used as a prefix is marked
2730 addressable. */
2731 for (x = TREE_OPERAND (t, 0);
2732 handled_component_p (x);
2733 x = TREE_OPERAND (x, 0))
2734 ;
2735
2736 if ((tem = verify_address (t, x)))
2737 return tem;
2738
2739 if (!(TREE_CODE (x) == VAR_DECL
2740 || TREE_CODE (x) == PARM_DECL
2741 || TREE_CODE (x) == RESULT_DECL))
2742 return NULL;
2743
2744 if (!TREE_ADDRESSABLE (x))
2745 {
2746 error ("address taken, but ADDRESSABLE bit not set");
2747 return x;
2748 }
2749
2750 break;
2751 }
2752
2753 case COND_EXPR:
2754 x = COND_EXPR_COND (t);
2755 if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
2756 {
2757 error ("non-integral used in condition");
2758 return x;
2759 }
2760 if (!is_gimple_condexpr (x))
2761 {
2762 error ("invalid conditional operand");
2763 return x;
2764 }
2765 break;
2766
2767 case NON_LVALUE_EXPR:
2768 case TRUTH_NOT_EXPR:
2769 gcc_unreachable ();
2770
2771 CASE_CONVERT:
2772 case FIX_TRUNC_EXPR:
2773 case FLOAT_EXPR:
2774 case NEGATE_EXPR:
2775 case ABS_EXPR:
2776 case BIT_NOT_EXPR:
2777 CHECK_OP (0, "invalid operand to unary operator");
2778 break;
2779
2780 case REALPART_EXPR:
2781 case IMAGPART_EXPR:
2782 case COMPONENT_REF:
2783 case ARRAY_REF:
2784 case ARRAY_RANGE_REF:
2785 case BIT_FIELD_REF:
2786 case VIEW_CONVERT_EXPR:
2787 /* We have a nest of references. Verify that each of the operands
2788 that determine where to reference is either a constant or a variable,
2789 verify that the base is valid, and then show we've already checked
2790 the subtrees. */
2791 while (handled_component_p (t))
2792 {
2793 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
2794 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
2795 else if (TREE_CODE (t) == ARRAY_REF
2796 || TREE_CODE (t) == ARRAY_RANGE_REF)
2797 {
2798 CHECK_OP (1, "invalid array index");
2799 if (TREE_OPERAND (t, 2))
2800 CHECK_OP (2, "invalid array lower bound");
2801 if (TREE_OPERAND (t, 3))
2802 CHECK_OP (3, "invalid array stride");
2803 }
2804 else if (TREE_CODE (t) == BIT_FIELD_REF)
2805 {
2806 if (!host_integerp (TREE_OPERAND (t, 1), 1)
2807 || !host_integerp (TREE_OPERAND (t, 2), 1))
2808 {
2809 error ("invalid position or size operand to BIT_FIELD_REF");
2810 return t;
2811 }
2812 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
2813 && (TYPE_PRECISION (TREE_TYPE (t))
2814 != TREE_INT_CST_LOW (TREE_OPERAND (t, 1))))
2815 {
2816 error ("integral result type precision does not match "
2817 "field size of BIT_FIELD_REF");
2818 return t;
2819 }
2820 else if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
2821 && !AGGREGATE_TYPE_P (TREE_TYPE (t))
2822 && TYPE_MODE (TREE_TYPE (t)) != BLKmode
2823 && (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (t)))
2824 != TREE_INT_CST_LOW (TREE_OPERAND (t, 1))))
2825 {
2826 error ("mode precision of non-integral result does not "
2827 "match field size of BIT_FIELD_REF");
2828 return t;
2829 }
2830 }
2831
2832 t = TREE_OPERAND (t, 0);
2833 }
2834
2835 if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
2836 {
2837 error ("invalid reference prefix");
2838 return t;
2839 }
2840 *walk_subtrees = 0;
2841 break;
2842 case PLUS_EXPR:
2843 case MINUS_EXPR:
2844 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
2845 POINTER_PLUS_EXPR. */
2846 if (POINTER_TYPE_P (TREE_TYPE (t)))
2847 {
2848 error ("invalid operand to plus/minus, type is a pointer");
2849 return t;
2850 }
2851 CHECK_OP (0, "invalid operand to binary operator");
2852 CHECK_OP (1, "invalid operand to binary operator");
2853 break;
2854
2855 case POINTER_PLUS_EXPR:
2856 /* Check to make sure the first operand is a pointer or reference type. */
2857 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
2858 {
2859 error ("invalid operand to pointer plus, first operand is not a pointer");
2860 return t;
2861 }
2862 /* Check to make sure the second operand is a ptrofftype. */
2863 if (!ptrofftype_p (TREE_TYPE (TREE_OPERAND (t, 1))))
2864 {
2865 error ("invalid operand to pointer plus, second operand is not an "
2866 "integer type of appropriate width");
2867 return t;
2868 }
2869 /* FALLTHROUGH */
2870 case LT_EXPR:
2871 case LE_EXPR:
2872 case GT_EXPR:
2873 case GE_EXPR:
2874 case EQ_EXPR:
2875 case NE_EXPR:
2876 case UNORDERED_EXPR:
2877 case ORDERED_EXPR:
2878 case UNLT_EXPR:
2879 case UNLE_EXPR:
2880 case UNGT_EXPR:
2881 case UNGE_EXPR:
2882 case UNEQ_EXPR:
2883 case LTGT_EXPR:
2884 case MULT_EXPR:
2885 case TRUNC_DIV_EXPR:
2886 case CEIL_DIV_EXPR:
2887 case FLOOR_DIV_EXPR:
2888 case ROUND_DIV_EXPR:
2889 case TRUNC_MOD_EXPR:
2890 case CEIL_MOD_EXPR:
2891 case FLOOR_MOD_EXPR:
2892 case ROUND_MOD_EXPR:
2893 case RDIV_EXPR:
2894 case EXACT_DIV_EXPR:
2895 case MIN_EXPR:
2896 case MAX_EXPR:
2897 case LSHIFT_EXPR:
2898 case RSHIFT_EXPR:
2899 case LROTATE_EXPR:
2900 case RROTATE_EXPR:
2901 case BIT_IOR_EXPR:
2902 case BIT_XOR_EXPR:
2903 case BIT_AND_EXPR:
2904 CHECK_OP (0, "invalid operand to binary operator");
2905 CHECK_OP (1, "invalid operand to binary operator");
2906 break;
2907
2908 case CONSTRUCTOR:
2909 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
2910 *walk_subtrees = 0;
2911 break;
2912
2913 case CASE_LABEL_EXPR:
2914 if (CASE_CHAIN (t))
2915 {
2916 error ("invalid CASE_CHAIN");
2917 return t;
2918 }
2919 break;
2920
2921 default:
2922 break;
2923 }
2924 return NULL;
2925
2926 #undef CHECK_OP
2927 }
2928
2929
2930 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
2931 Returns true if there is an error, otherwise false. */
2932
2933 static bool
2934 verify_types_in_gimple_min_lval (tree expr)
2935 {
2936 tree op;
2937
2938 if (is_gimple_id (expr))
2939 return false;
2940
2941 if (TREE_CODE (expr) != TARGET_MEM_REF
2942 && TREE_CODE (expr) != MEM_REF)
2943 {
2944 error ("invalid expression for min lvalue");
2945 return true;
2946 }
2947
2948 /* TARGET_MEM_REFs are strange beasts. */
2949 if (TREE_CODE (expr) == TARGET_MEM_REF)
2950 return false;
2951
2952 op = TREE_OPERAND (expr, 0);
2953 if (!is_gimple_val (op))
2954 {
2955 error ("invalid operand in indirect reference");
2956 debug_generic_stmt (op);
2957 return true;
2958 }
2959 /* Memory references now generally can involve a value conversion. */
2960
2961 return false;
2962 }
2963
2964 /* Verify if EXPR is a valid GIMPLE reference expression. If
2965 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
2966 if there is an error, otherwise false. */
2967
2968 static bool
2969 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
2970 {
2971 while (handled_component_p (expr))
2972 {
2973 tree op = TREE_OPERAND (expr, 0);
2974
2975 if (TREE_CODE (expr) == ARRAY_REF
2976 || TREE_CODE (expr) == ARRAY_RANGE_REF)
2977 {
2978 if (!is_gimple_val (TREE_OPERAND (expr, 1))
2979 || (TREE_OPERAND (expr, 2)
2980 && !is_gimple_val (TREE_OPERAND (expr, 2)))
2981 || (TREE_OPERAND (expr, 3)
2982 && !is_gimple_val (TREE_OPERAND (expr, 3))))
2983 {
2984 error ("invalid operands to array reference");
2985 debug_generic_stmt (expr);
2986 return true;
2987 }
2988 }
2989
2990 /* Verify if the reference array element types are compatible. */
2991 if (TREE_CODE (expr) == ARRAY_REF
2992 && !useless_type_conversion_p (TREE_TYPE (expr),
2993 TREE_TYPE (TREE_TYPE (op))))
2994 {
2995 error ("type mismatch in array reference");
2996 debug_generic_stmt (TREE_TYPE (expr));
2997 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
2998 return true;
2999 }
3000 if (TREE_CODE (expr) == ARRAY_RANGE_REF
3001 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
3002 TREE_TYPE (TREE_TYPE (op))))
3003 {
3004 error ("type mismatch in array range reference");
3005 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
3006 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3007 return true;
3008 }
3009
3010 if ((TREE_CODE (expr) == REALPART_EXPR
3011 || TREE_CODE (expr) == IMAGPART_EXPR)
3012 && !useless_type_conversion_p (TREE_TYPE (expr),
3013 TREE_TYPE (TREE_TYPE (op))))
3014 {
3015 error ("type mismatch in real/imagpart reference");
3016 debug_generic_stmt (TREE_TYPE (expr));
3017 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3018 return true;
3019 }
3020
3021 if (TREE_CODE (expr) == COMPONENT_REF
3022 && !useless_type_conversion_p (TREE_TYPE (expr),
3023 TREE_TYPE (TREE_OPERAND (expr, 1))))
3024 {
3025 error ("type mismatch in component reference");
3026 debug_generic_stmt (TREE_TYPE (expr));
3027 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
3028 return true;
3029 }
3030
3031 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3032 {
3033 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
3034 that their operand is not an SSA name or an invariant when
3035 requiring an lvalue (this usually means there is a SRA or IPA-SRA
3036 bug). Otherwise there is nothing to verify, gross mismatches at
3037 most invoke undefined behavior. */
3038 if (require_lvalue
3039 && (TREE_CODE (op) == SSA_NAME
3040 || is_gimple_min_invariant (op)))
3041 {
3042 error ("conversion of an SSA_NAME on the left hand side");
3043 debug_generic_stmt (expr);
3044 return true;
3045 }
3046 else if (TREE_CODE (op) == SSA_NAME
3047 && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
3048 {
3049 error ("conversion of register to a different size");
3050 debug_generic_stmt (expr);
3051 return true;
3052 }
3053 else if (!handled_component_p (op))
3054 return false;
3055 }
3056
3057 expr = op;
3058 }
3059
3060 if (TREE_CODE (expr) == MEM_REF)
3061 {
3062 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0)))
3063 {
3064 error ("invalid address operand in MEM_REF");
3065 debug_generic_stmt (expr);
3066 return true;
3067 }
3068 if (TREE_CODE (TREE_OPERAND (expr, 1)) != INTEGER_CST
3069 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
3070 {
3071 error ("invalid offset operand in MEM_REF");
3072 debug_generic_stmt (expr);
3073 return true;
3074 }
3075 }
3076 else if (TREE_CODE (expr) == TARGET_MEM_REF)
3077 {
3078 if (!TMR_BASE (expr)
3079 || !is_gimple_mem_ref_addr (TMR_BASE (expr)))
3080 {
3081 error ("invalid address operand in TARGET_MEM_REF");
3082 return true;
3083 }
3084 if (!TMR_OFFSET (expr)
3085 || TREE_CODE (TMR_OFFSET (expr)) != INTEGER_CST
3086 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
3087 {
3088 error ("invalid offset operand in TARGET_MEM_REF");
3089 debug_generic_stmt (expr);
3090 return true;
3091 }
3092 }
3093
3094 return ((require_lvalue || !is_gimple_min_invariant (expr))
3095 && verify_types_in_gimple_min_lval (expr));
3096 }
3097
3098 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3099 list of pointer-to types that is trivially convertible to DEST. */
3100
3101 static bool
3102 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3103 {
3104 tree src;
3105
3106 if (!TYPE_POINTER_TO (src_obj))
3107 return true;
3108
3109 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3110 if (useless_type_conversion_p (dest, src))
3111 return true;
3112
3113 return false;
3114 }
3115
3116 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3117 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3118
3119 static bool
3120 valid_fixed_convert_types_p (tree type1, tree type2)
3121 {
3122 return (FIXED_POINT_TYPE_P (type1)
3123 && (INTEGRAL_TYPE_P (type2)
3124 || SCALAR_FLOAT_TYPE_P (type2)
3125 || FIXED_POINT_TYPE_P (type2)));
3126 }
3127
3128 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3129 is a problem, otherwise false. */
3130
3131 static bool
3132 verify_gimple_call (gimple stmt)
3133 {
3134 tree fn = gimple_call_fn (stmt);
3135 tree fntype, fndecl;
3136 unsigned i;
3137
3138 if (gimple_call_internal_p (stmt))
3139 {
3140 if (fn)
3141 {
3142 error ("gimple call has two targets");
3143 debug_generic_stmt (fn);
3144 return true;
3145 }
3146 }
3147 else
3148 {
3149 if (!fn)
3150 {
3151 error ("gimple call has no target");
3152 return true;
3153 }
3154 }
3155
3156 if (fn && !is_gimple_call_addr (fn))
3157 {
3158 error ("invalid function in gimple call");
3159 debug_generic_stmt (fn);
3160 return true;
3161 }
3162
3163 if (fn
3164 && (!POINTER_TYPE_P (TREE_TYPE (fn))
3165 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3166 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
3167 {
3168 error ("non-function in gimple call");
3169 return true;
3170 }
3171
3172 fndecl = gimple_call_fndecl (stmt);
3173 if (fndecl
3174 && TREE_CODE (fndecl) == FUNCTION_DECL
3175 && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
3176 && !DECL_PURE_P (fndecl)
3177 && !TREE_READONLY (fndecl))
3178 {
3179 error ("invalid pure const state for function");
3180 return true;
3181 }
3182
3183 if (gimple_call_lhs (stmt)
3184 && (!is_gimple_lvalue (gimple_call_lhs (stmt))
3185 || verify_types_in_gimple_reference (gimple_call_lhs (stmt), true)))
3186 {
3187 error ("invalid LHS in gimple call");
3188 return true;
3189 }
3190
3191 if (gimple_call_lhs (stmt) && gimple_call_noreturn_p (stmt))
3192 {
3193 error ("LHS in noreturn call");
3194 return true;
3195 }
3196
3197 fntype = gimple_call_fntype (stmt);
3198 if (fntype
3199 && gimple_call_lhs (stmt)
3200 && !useless_type_conversion_p (TREE_TYPE (gimple_call_lhs (stmt)),
3201 TREE_TYPE (fntype))
3202 /* ??? At least C++ misses conversions at assignments from
3203 void * call results.
3204 ??? Java is completely off. Especially with functions
3205 returning java.lang.Object.
3206 For now simply allow arbitrary pointer type conversions. */
3207 && !(POINTER_TYPE_P (TREE_TYPE (gimple_call_lhs (stmt)))
3208 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3209 {
3210 error ("invalid conversion in gimple call");
3211 debug_generic_stmt (TREE_TYPE (gimple_call_lhs (stmt)));
3212 debug_generic_stmt (TREE_TYPE (fntype));
3213 return true;
3214 }
3215
3216 if (gimple_call_chain (stmt)
3217 && !is_gimple_val (gimple_call_chain (stmt)))
3218 {
3219 error ("invalid static chain in gimple call");
3220 debug_generic_stmt (gimple_call_chain (stmt));
3221 return true;
3222 }
3223
3224 /* If there is a static chain argument, this should not be an indirect
3225 call, and the decl should have DECL_STATIC_CHAIN set. */
3226 if (gimple_call_chain (stmt))
3227 {
3228 if (!gimple_call_fndecl (stmt))
3229 {
3230 error ("static chain in indirect gimple call");
3231 return true;
3232 }
3233 fn = TREE_OPERAND (fn, 0);
3234
3235 if (!DECL_STATIC_CHAIN (fn))
3236 {
3237 error ("static chain with function that doesn%'t use one");
3238 return true;
3239 }
3240 }
3241
3242 /* ??? The C frontend passes unpromoted arguments in case it
3243 didn't see a function declaration before the call. So for now
3244 leave the call arguments mostly unverified. Once we gimplify
3245 unit-at-a-time we have a chance to fix this. */
3246
3247 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3248 {
3249 tree arg = gimple_call_arg (stmt, i);
3250 if ((is_gimple_reg_type (TREE_TYPE (arg))
3251 && !is_gimple_val (arg))
3252 || (!is_gimple_reg_type (TREE_TYPE (arg))
3253 && !is_gimple_lvalue (arg)))
3254 {
3255 error ("invalid argument to gimple call");
3256 debug_generic_expr (arg);
3257 return true;
3258 }
3259 }
3260
3261 return false;
3262 }
3263
3264 /* Verifies the gimple comparison with the result type TYPE and
3265 the operands OP0 and OP1. */
3266
3267 static bool
3268 verify_gimple_comparison (tree type, tree op0, tree op1)
3269 {
3270 tree op0_type = TREE_TYPE (op0);
3271 tree op1_type = TREE_TYPE (op1);
3272
3273 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3274 {
3275 error ("invalid operands in gimple comparison");
3276 return true;
3277 }
3278
3279 /* For comparisons we do not have the operations type as the
3280 effective type the comparison is carried out in. Instead
3281 we require that either the first operand is trivially
3282 convertible into the second, or the other way around.
3283 Because we special-case pointers to void we allow
3284 comparisons of pointers with the same mode as well. */
3285 if (!useless_type_conversion_p (op0_type, op1_type)
3286 && !useless_type_conversion_p (op1_type, op0_type)
3287 && (!POINTER_TYPE_P (op0_type)
3288 || !POINTER_TYPE_P (op1_type)
3289 || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3290 {
3291 error ("mismatching comparison operand types");
3292 debug_generic_expr (op0_type);
3293 debug_generic_expr (op1_type);
3294 return true;
3295 }
3296
3297 /* The resulting type of a comparison may be an effective boolean type. */
3298 if (INTEGRAL_TYPE_P (type)
3299 && (TREE_CODE (type) == BOOLEAN_TYPE
3300 || TYPE_PRECISION (type) == 1))
3301 ;
3302 /* Or an integer vector type with the same size and element count
3303 as the comparison operand types. */
3304 else if (TREE_CODE (type) == VECTOR_TYPE
3305 && TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE)
3306 {
3307 if (TREE_CODE (op0_type) != VECTOR_TYPE
3308 || TREE_CODE (op1_type) != VECTOR_TYPE)
3309 {
3310 error ("non-vector operands in vector comparison");
3311 debug_generic_expr (op0_type);
3312 debug_generic_expr (op1_type);
3313 return true;
3314 }
3315
3316 if (TYPE_VECTOR_SUBPARTS (type) != TYPE_VECTOR_SUBPARTS (op0_type)
3317 || (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (type)))
3318 != GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0_type)))))
3319 {
3320 error ("invalid vector comparison resulting type");
3321 debug_generic_expr (type);
3322 return true;
3323 }
3324 }
3325 else
3326 {
3327 error ("bogus comparison result type");
3328 debug_generic_expr (type);
3329 return true;
3330 }
3331
3332 return false;
3333 }
3334
3335 /* Verify a gimple assignment statement STMT with an unary rhs.
3336 Returns true if anything is wrong. */
3337
3338 static bool
3339 verify_gimple_assign_unary (gimple stmt)
3340 {
3341 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3342 tree lhs = gimple_assign_lhs (stmt);
3343 tree lhs_type = TREE_TYPE (lhs);
3344 tree rhs1 = gimple_assign_rhs1 (stmt);
3345 tree rhs1_type = TREE_TYPE (rhs1);
3346
3347 if (!is_gimple_reg (lhs))
3348 {
3349 error ("non-register as LHS of unary operation");
3350 return true;
3351 }
3352
3353 if (!is_gimple_val (rhs1))
3354 {
3355 error ("invalid operand in unary operation");
3356 return true;
3357 }
3358
3359 /* First handle conversions. */
3360 switch (rhs_code)
3361 {
3362 CASE_CONVERT:
3363 {
3364 /* Allow conversions from pointer type to integral type only if
3365 there is no sign or zero extension involved.
3366 For targets were the precision of ptrofftype doesn't match that
3367 of pointers we need to allow arbitrary conversions to ptrofftype. */
3368 if ((POINTER_TYPE_P (lhs_type)
3369 && INTEGRAL_TYPE_P (rhs1_type))
3370 || (POINTER_TYPE_P (rhs1_type)
3371 && INTEGRAL_TYPE_P (lhs_type)
3372 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3373 || ptrofftype_p (sizetype))))
3374 return false;
3375
3376 /* Allow conversion from integer to offset type and vice versa. */
3377 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3378 && TREE_CODE (rhs1_type) == INTEGER_TYPE)
3379 || (TREE_CODE (lhs_type) == INTEGER_TYPE
3380 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3381 return false;
3382
3383 /* Otherwise assert we are converting between types of the
3384 same kind. */
3385 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3386 {
3387 error ("invalid types in nop conversion");
3388 debug_generic_expr (lhs_type);
3389 debug_generic_expr (rhs1_type);
3390 return true;
3391 }
3392
3393 return false;
3394 }
3395
3396 case ADDR_SPACE_CONVERT_EXPR:
3397 {
3398 if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3399 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3400 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3401 {
3402 error ("invalid types in address space conversion");
3403 debug_generic_expr (lhs_type);
3404 debug_generic_expr (rhs1_type);
3405 return true;
3406 }
3407
3408 return false;
3409 }
3410
3411 case FIXED_CONVERT_EXPR:
3412 {
3413 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3414 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3415 {
3416 error ("invalid types in fixed-point conversion");
3417 debug_generic_expr (lhs_type);
3418 debug_generic_expr (rhs1_type);
3419 return true;
3420 }
3421
3422 return false;
3423 }
3424
3425 case FLOAT_EXPR:
3426 {
3427 if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3428 && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3429 || !VECTOR_FLOAT_TYPE_P(lhs_type)))
3430 {
3431 error ("invalid types in conversion to floating point");
3432 debug_generic_expr (lhs_type);
3433 debug_generic_expr (rhs1_type);
3434 return true;
3435 }
3436
3437 return false;
3438 }
3439
3440 case FIX_TRUNC_EXPR:
3441 {
3442 if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3443 && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3444 || !VECTOR_FLOAT_TYPE_P(rhs1_type)))
3445 {
3446 error ("invalid types in conversion to integer");
3447 debug_generic_expr (lhs_type);
3448 debug_generic_expr (rhs1_type);
3449 return true;
3450 }
3451
3452 return false;
3453 }
3454
3455 case VEC_UNPACK_HI_EXPR:
3456 case VEC_UNPACK_LO_EXPR:
3457 case REDUC_MAX_EXPR:
3458 case REDUC_MIN_EXPR:
3459 case REDUC_PLUS_EXPR:
3460 case VEC_UNPACK_FLOAT_HI_EXPR:
3461 case VEC_UNPACK_FLOAT_LO_EXPR:
3462 /* FIXME. */
3463 return false;
3464
3465 case NEGATE_EXPR:
3466 case ABS_EXPR:
3467 case BIT_NOT_EXPR:
3468 case PAREN_EXPR:
3469 case NON_LVALUE_EXPR:
3470 case CONJ_EXPR:
3471 break;
3472
3473 default:
3474 gcc_unreachable ();
3475 }
3476
3477 /* For the remaining codes assert there is no conversion involved. */
3478 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3479 {
3480 error ("non-trivial conversion in unary operation");
3481 debug_generic_expr (lhs_type);
3482 debug_generic_expr (rhs1_type);
3483 return true;
3484 }
3485
3486 return false;
3487 }
3488
3489 /* Verify a gimple assignment statement STMT with a binary rhs.
3490 Returns true if anything is wrong. */
3491
3492 static bool
3493 verify_gimple_assign_binary (gimple stmt)
3494 {
3495 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3496 tree lhs = gimple_assign_lhs (stmt);
3497 tree lhs_type = TREE_TYPE (lhs);
3498 tree rhs1 = gimple_assign_rhs1 (stmt);
3499 tree rhs1_type = TREE_TYPE (rhs1);
3500 tree rhs2 = gimple_assign_rhs2 (stmt);
3501 tree rhs2_type = TREE_TYPE (rhs2);
3502
3503 if (!is_gimple_reg (lhs))
3504 {
3505 error ("non-register as LHS of binary operation");
3506 return true;
3507 }
3508
3509 if (!is_gimple_val (rhs1)
3510 || !is_gimple_val (rhs2))
3511 {
3512 error ("invalid operands in binary operation");
3513 return true;
3514 }
3515
3516 /* First handle operations that involve different types. */
3517 switch (rhs_code)
3518 {
3519 case COMPLEX_EXPR:
3520 {
3521 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3522 || !(INTEGRAL_TYPE_P (rhs1_type)
3523 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3524 || !(INTEGRAL_TYPE_P (rhs2_type)
3525 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3526 {
3527 error ("type mismatch in complex expression");
3528 debug_generic_expr (lhs_type);
3529 debug_generic_expr (rhs1_type);
3530 debug_generic_expr (rhs2_type);
3531 return true;
3532 }
3533
3534 return false;
3535 }
3536
3537 case LSHIFT_EXPR:
3538 case RSHIFT_EXPR:
3539 case LROTATE_EXPR:
3540 case RROTATE_EXPR:
3541 {
3542 /* Shifts and rotates are ok on integral types, fixed point
3543 types and integer vector types. */
3544 if ((!INTEGRAL_TYPE_P (rhs1_type)
3545 && !FIXED_POINT_TYPE_P (rhs1_type)
3546 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3547 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3548 || (!INTEGRAL_TYPE_P (rhs2_type)
3549 /* Vector shifts of vectors are also ok. */
3550 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3551 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3552 && TREE_CODE (rhs2_type) == VECTOR_TYPE
3553 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3554 || !useless_type_conversion_p (lhs_type, rhs1_type))
3555 {
3556 error ("type mismatch in shift expression");
3557 debug_generic_expr (lhs_type);
3558 debug_generic_expr (rhs1_type);
3559 debug_generic_expr (rhs2_type);
3560 return true;
3561 }
3562
3563 return false;
3564 }
3565
3566 case VEC_LSHIFT_EXPR:
3567 case VEC_RSHIFT_EXPR:
3568 {
3569 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3570 || !(INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3571 || POINTER_TYPE_P (TREE_TYPE (rhs1_type))
3572 || FIXED_POINT_TYPE_P (TREE_TYPE (rhs1_type))
3573 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type)))
3574 || (!INTEGRAL_TYPE_P (rhs2_type)
3575 && (TREE_CODE (rhs2_type) != VECTOR_TYPE
3576 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3577 || !useless_type_conversion_p (lhs_type, rhs1_type))
3578 {
3579 error ("type mismatch in vector shift expression");
3580 debug_generic_expr (lhs_type);
3581 debug_generic_expr (rhs1_type);
3582 debug_generic_expr (rhs2_type);
3583 return true;
3584 }
3585 /* For shifting a vector of non-integral components we
3586 only allow shifting by a constant multiple of the element size. */
3587 if (!INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3588 && (TREE_CODE (rhs2) != INTEGER_CST
3589 || !div_if_zero_remainder (EXACT_DIV_EXPR, rhs2,
3590 TYPE_SIZE (TREE_TYPE (rhs1_type)))))
3591 {
3592 error ("non-element sized vector shift of floating point vector");
3593 return true;
3594 }
3595
3596 return false;
3597 }
3598
3599 case WIDEN_LSHIFT_EXPR:
3600 {
3601 if (!INTEGRAL_TYPE_P (lhs_type)
3602 || !INTEGRAL_TYPE_P (rhs1_type)
3603 || TREE_CODE (rhs2) != INTEGER_CST
3604 || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
3605 {
3606 error ("type mismatch in widening vector shift expression");
3607 debug_generic_expr (lhs_type);
3608 debug_generic_expr (rhs1_type);
3609 debug_generic_expr (rhs2_type);
3610 return true;
3611 }
3612
3613 return false;
3614 }
3615
3616 case VEC_WIDEN_LSHIFT_HI_EXPR:
3617 case VEC_WIDEN_LSHIFT_LO_EXPR:
3618 {
3619 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3620 || TREE_CODE (lhs_type) != VECTOR_TYPE
3621 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3622 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3623 || TREE_CODE (rhs2) != INTEGER_CST
3624 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
3625 > TYPE_PRECISION (TREE_TYPE (lhs_type))))
3626 {
3627 error ("type mismatch in widening vector shift expression");
3628 debug_generic_expr (lhs_type);
3629 debug_generic_expr (rhs1_type);
3630 debug_generic_expr (rhs2_type);
3631 return true;
3632 }
3633
3634 return false;
3635 }
3636
3637 case PLUS_EXPR:
3638 case MINUS_EXPR:
3639 {
3640 /* We use regular PLUS_EXPR and MINUS_EXPR for vectors.
3641 ??? This just makes the checker happy and may not be what is
3642 intended. */
3643 if (TREE_CODE (lhs_type) == VECTOR_TYPE
3644 && POINTER_TYPE_P (TREE_TYPE (lhs_type)))
3645 {
3646 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3647 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
3648 {
3649 error ("invalid non-vector operands to vector valued plus");
3650 return true;
3651 }
3652 lhs_type = TREE_TYPE (lhs_type);
3653 rhs1_type = TREE_TYPE (rhs1_type);
3654 rhs2_type = TREE_TYPE (rhs2_type);
3655 /* PLUS_EXPR is commutative, so we might end up canonicalizing
3656 the pointer to 2nd place. */
3657 if (POINTER_TYPE_P (rhs2_type))
3658 {
3659 tree tem = rhs1_type;
3660 rhs1_type = rhs2_type;
3661 rhs2_type = tem;
3662 }
3663 goto do_pointer_plus_expr_check;
3664 }
3665 if (POINTER_TYPE_P (lhs_type)
3666 || POINTER_TYPE_P (rhs1_type)
3667 || POINTER_TYPE_P (rhs2_type))
3668 {
3669 error ("invalid (pointer) operands to plus/minus");
3670 return true;
3671 }
3672
3673 /* Continue with generic binary expression handling. */
3674 break;
3675 }
3676
3677 case POINTER_PLUS_EXPR:
3678 {
3679 do_pointer_plus_expr_check:
3680 if (!POINTER_TYPE_P (rhs1_type)
3681 || !useless_type_conversion_p (lhs_type, rhs1_type)
3682 || !ptrofftype_p (rhs2_type))
3683 {
3684 error ("type mismatch in pointer plus expression");
3685 debug_generic_stmt (lhs_type);
3686 debug_generic_stmt (rhs1_type);
3687 debug_generic_stmt (rhs2_type);
3688 return true;
3689 }
3690
3691 return false;
3692 }
3693
3694 case TRUTH_ANDIF_EXPR:
3695 case TRUTH_ORIF_EXPR:
3696 case TRUTH_AND_EXPR:
3697 case TRUTH_OR_EXPR:
3698 case TRUTH_XOR_EXPR:
3699
3700 gcc_unreachable ();
3701
3702 case LT_EXPR:
3703 case LE_EXPR:
3704 case GT_EXPR:
3705 case GE_EXPR:
3706 case EQ_EXPR:
3707 case NE_EXPR:
3708 case UNORDERED_EXPR:
3709 case ORDERED_EXPR:
3710 case UNLT_EXPR:
3711 case UNLE_EXPR:
3712 case UNGT_EXPR:
3713 case UNGE_EXPR:
3714 case UNEQ_EXPR:
3715 case LTGT_EXPR:
3716 /* Comparisons are also binary, but the result type is not
3717 connected to the operand types. */
3718 return verify_gimple_comparison (lhs_type, rhs1, rhs2);
3719
3720 case WIDEN_MULT_EXPR:
3721 if (TREE_CODE (lhs_type) != INTEGER_TYPE)
3722 return true;
3723 return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
3724 || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
3725
3726 case WIDEN_SUM_EXPR:
3727 case VEC_WIDEN_MULT_HI_EXPR:
3728 case VEC_WIDEN_MULT_LO_EXPR:
3729 case VEC_PACK_TRUNC_EXPR:
3730 case VEC_PACK_SAT_EXPR:
3731 case VEC_PACK_FIX_TRUNC_EXPR:
3732 /* FIXME. */
3733 return false;
3734
3735 case MULT_EXPR:
3736 case TRUNC_DIV_EXPR:
3737 case CEIL_DIV_EXPR:
3738 case FLOOR_DIV_EXPR:
3739 case ROUND_DIV_EXPR:
3740 case TRUNC_MOD_EXPR:
3741 case CEIL_MOD_EXPR:
3742 case FLOOR_MOD_EXPR:
3743 case ROUND_MOD_EXPR:
3744 case RDIV_EXPR:
3745 case EXACT_DIV_EXPR:
3746 case MIN_EXPR:
3747 case MAX_EXPR:
3748 case BIT_IOR_EXPR:
3749 case BIT_XOR_EXPR:
3750 case BIT_AND_EXPR:
3751 /* Continue with generic binary expression handling. */
3752 break;
3753
3754 default:
3755 gcc_unreachable ();
3756 }
3757
3758 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3759 || !useless_type_conversion_p (lhs_type, rhs2_type))
3760 {
3761 error ("type mismatch in binary expression");
3762 debug_generic_stmt (lhs_type);
3763 debug_generic_stmt (rhs1_type);
3764 debug_generic_stmt (rhs2_type);
3765 return true;
3766 }
3767
3768 return false;
3769 }
3770
3771 /* Verify a gimple assignment statement STMT with a ternary rhs.
3772 Returns true if anything is wrong. */
3773
3774 static bool
3775 verify_gimple_assign_ternary (gimple stmt)
3776 {
3777 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3778 tree lhs = gimple_assign_lhs (stmt);
3779 tree lhs_type = TREE_TYPE (lhs);
3780 tree rhs1 = gimple_assign_rhs1 (stmt);
3781 tree rhs1_type = TREE_TYPE (rhs1);
3782 tree rhs2 = gimple_assign_rhs2 (stmt);
3783 tree rhs2_type = TREE_TYPE (rhs2);
3784 tree rhs3 = gimple_assign_rhs3 (stmt);
3785 tree rhs3_type = TREE_TYPE (rhs3);
3786
3787 if (!is_gimple_reg (lhs))
3788 {
3789 error ("non-register as LHS of ternary operation");
3790 return true;
3791 }
3792
3793 if (((rhs_code == VEC_COND_EXPR || rhs_code == COND_EXPR)
3794 ? !is_gimple_condexpr (rhs1) : !is_gimple_val (rhs1))
3795 || !is_gimple_val (rhs2)
3796 || !is_gimple_val (rhs3))
3797 {
3798 error ("invalid operands in ternary operation");
3799 return true;
3800 }
3801
3802 /* First handle operations that involve different types. */
3803 switch (rhs_code)
3804 {
3805 case WIDEN_MULT_PLUS_EXPR:
3806 case WIDEN_MULT_MINUS_EXPR:
3807 if ((!INTEGRAL_TYPE_P (rhs1_type)
3808 && !FIXED_POINT_TYPE_P (rhs1_type))
3809 || !useless_type_conversion_p (rhs1_type, rhs2_type)
3810 || !useless_type_conversion_p (lhs_type, rhs3_type)
3811 || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
3812 || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
3813 {
3814 error ("type mismatch in widening multiply-accumulate expression");
3815 debug_generic_expr (lhs_type);
3816 debug_generic_expr (rhs1_type);
3817 debug_generic_expr (rhs2_type);
3818 debug_generic_expr (rhs3_type);
3819 return true;
3820 }
3821 break;
3822
3823 case FMA_EXPR:
3824 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3825 || !useless_type_conversion_p (lhs_type, rhs2_type)
3826 || !useless_type_conversion_p (lhs_type, rhs3_type))
3827 {
3828 error ("type mismatch in fused multiply-add expression");
3829 debug_generic_expr (lhs_type);
3830 debug_generic_expr (rhs1_type);
3831 debug_generic_expr (rhs2_type);
3832 debug_generic_expr (rhs3_type);
3833 return true;
3834 }
3835 break;
3836
3837 case COND_EXPR:
3838 case VEC_COND_EXPR:
3839 if (!useless_type_conversion_p (lhs_type, rhs2_type)
3840 || !useless_type_conversion_p (lhs_type, rhs3_type))
3841 {
3842 error ("type mismatch in conditional expression");
3843 debug_generic_expr (lhs_type);
3844 debug_generic_expr (rhs2_type);
3845 debug_generic_expr (rhs3_type);
3846 return true;
3847 }
3848 break;
3849
3850 case VEC_PERM_EXPR:
3851 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3852 || !useless_type_conversion_p (lhs_type, rhs2_type))
3853 {
3854 error ("type mismatch in vector permute expression");
3855 debug_generic_expr (lhs_type);
3856 debug_generic_expr (rhs1_type);
3857 debug_generic_expr (rhs2_type);
3858 debug_generic_expr (rhs3_type);
3859 return true;
3860 }
3861
3862 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3863 || TREE_CODE (rhs2_type) != VECTOR_TYPE
3864 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
3865 {
3866 error ("vector types expected in vector permute expression");
3867 debug_generic_expr (lhs_type);
3868 debug_generic_expr (rhs1_type);
3869 debug_generic_expr (rhs2_type);
3870 debug_generic_expr (rhs3_type);
3871 return true;
3872 }
3873
3874 if (TYPE_VECTOR_SUBPARTS (rhs1_type) != TYPE_VECTOR_SUBPARTS (rhs2_type)
3875 || TYPE_VECTOR_SUBPARTS (rhs2_type)
3876 != TYPE_VECTOR_SUBPARTS (rhs3_type)
3877 || TYPE_VECTOR_SUBPARTS (rhs3_type)
3878 != TYPE_VECTOR_SUBPARTS (lhs_type))
3879 {
3880 error ("vectors with different element number found "
3881 "in vector permute expression");
3882 debug_generic_expr (lhs_type);
3883 debug_generic_expr (rhs1_type);
3884 debug_generic_expr (rhs2_type);
3885 debug_generic_expr (rhs3_type);
3886 return true;
3887 }
3888
3889 if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
3890 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs3_type)))
3891 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type))))
3892 {
3893 error ("invalid mask type in vector permute expression");
3894 debug_generic_expr (lhs_type);
3895 debug_generic_expr (rhs1_type);
3896 debug_generic_expr (rhs2_type);
3897 debug_generic_expr (rhs3_type);
3898 return true;
3899 }
3900
3901 return false;
3902
3903 case DOT_PROD_EXPR:
3904 case REALIGN_LOAD_EXPR:
3905 /* FIXME. */
3906 return false;
3907
3908 default:
3909 gcc_unreachable ();
3910 }
3911 return false;
3912 }
3913
3914 /* Verify a gimple assignment statement STMT with a single rhs.
3915 Returns true if anything is wrong. */
3916
3917 static bool
3918 verify_gimple_assign_single (gimple stmt)
3919 {
3920 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3921 tree lhs = gimple_assign_lhs (stmt);
3922 tree lhs_type = TREE_TYPE (lhs);
3923 tree rhs1 = gimple_assign_rhs1 (stmt);
3924 tree rhs1_type = TREE_TYPE (rhs1);
3925 bool res = false;
3926
3927 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3928 {
3929 error ("non-trivial conversion at assignment");
3930 debug_generic_expr (lhs_type);
3931 debug_generic_expr (rhs1_type);
3932 return true;
3933 }
3934
3935 if (handled_component_p (lhs))
3936 res |= verify_types_in_gimple_reference (lhs, true);
3937
3938 /* Special codes we cannot handle via their class. */
3939 switch (rhs_code)
3940 {
3941 case ADDR_EXPR:
3942 {
3943 tree op = TREE_OPERAND (rhs1, 0);
3944 if (!is_gimple_addressable (op))
3945 {
3946 error ("invalid operand in unary expression");
3947 return true;
3948 }
3949
3950 /* Technically there is no longer a need for matching types, but
3951 gimple hygiene asks for this check. In LTO we can end up
3952 combining incompatible units and thus end up with addresses
3953 of globals that change their type to a common one. */
3954 if (!in_lto_p
3955 && !types_compatible_p (TREE_TYPE (op),
3956 TREE_TYPE (TREE_TYPE (rhs1)))
3957 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
3958 TREE_TYPE (op)))
3959 {
3960 error ("type mismatch in address expression");
3961 debug_generic_stmt (TREE_TYPE (rhs1));
3962 debug_generic_stmt (TREE_TYPE (op));
3963 return true;
3964 }
3965
3966 return verify_types_in_gimple_reference (op, true);
3967 }
3968
3969 /* tcc_reference */
3970 case INDIRECT_REF:
3971 error ("INDIRECT_REF in gimple IL");
3972 return true;
3973
3974 case COMPONENT_REF:
3975 case BIT_FIELD_REF:
3976 case ARRAY_REF:
3977 case ARRAY_RANGE_REF:
3978 case VIEW_CONVERT_EXPR:
3979 case REALPART_EXPR:
3980 case IMAGPART_EXPR:
3981 case TARGET_MEM_REF:
3982 case MEM_REF:
3983 if (!is_gimple_reg (lhs)
3984 && is_gimple_reg_type (TREE_TYPE (lhs)))
3985 {
3986 error ("invalid rhs for gimple memory store");
3987 debug_generic_stmt (lhs);
3988 debug_generic_stmt (rhs1);
3989 return true;
3990 }
3991 return res || verify_types_in_gimple_reference (rhs1, false);
3992
3993 /* tcc_constant */
3994 case SSA_NAME:
3995 case INTEGER_CST:
3996 case REAL_CST:
3997 case FIXED_CST:
3998 case COMPLEX_CST:
3999 case VECTOR_CST:
4000 case STRING_CST:
4001 return res;
4002
4003 /* tcc_declaration */
4004 case CONST_DECL:
4005 return res;
4006 case VAR_DECL:
4007 case PARM_DECL:
4008 if (!is_gimple_reg (lhs)
4009 && !is_gimple_reg (rhs1)
4010 && is_gimple_reg_type (TREE_TYPE (lhs)))
4011 {
4012 error ("invalid rhs for gimple memory store");
4013 debug_generic_stmt (lhs);
4014 debug_generic_stmt (rhs1);
4015 return true;
4016 }
4017 return res;
4018
4019 case CONSTRUCTOR:
4020 case OBJ_TYPE_REF:
4021 case ASSERT_EXPR:
4022 case WITH_SIZE_EXPR:
4023 /* FIXME. */
4024 return res;
4025
4026 default:;
4027 }
4028
4029 return res;
4030 }
4031
4032 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4033 is a problem, otherwise false. */
4034
4035 static bool
4036 verify_gimple_assign (gimple stmt)
4037 {
4038 switch (gimple_assign_rhs_class (stmt))
4039 {
4040 case GIMPLE_SINGLE_RHS:
4041 return verify_gimple_assign_single (stmt);
4042
4043 case GIMPLE_UNARY_RHS:
4044 return verify_gimple_assign_unary (stmt);
4045
4046 case GIMPLE_BINARY_RHS:
4047 return verify_gimple_assign_binary (stmt);
4048
4049 case GIMPLE_TERNARY_RHS:
4050 return verify_gimple_assign_ternary (stmt);
4051
4052 default:
4053 gcc_unreachable ();
4054 }
4055 }
4056
4057 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4058 is a problem, otherwise false. */
4059
4060 static bool
4061 verify_gimple_return (gimple stmt)
4062 {
4063 tree op = gimple_return_retval (stmt);
4064 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
4065
4066 /* We cannot test for present return values as we do not fix up missing
4067 return values from the original source. */
4068 if (op == NULL)
4069 return false;
4070
4071 if (!is_gimple_val (op)
4072 && TREE_CODE (op) != RESULT_DECL)
4073 {
4074 error ("invalid operand in return statement");
4075 debug_generic_stmt (op);
4076 return true;
4077 }
4078
4079 if ((TREE_CODE (op) == RESULT_DECL
4080 && DECL_BY_REFERENCE (op))
4081 || (TREE_CODE (op) == SSA_NAME
4082 && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
4083 && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
4084 op = TREE_TYPE (op);
4085
4086 if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
4087 {
4088 error ("invalid conversion in return statement");
4089 debug_generic_stmt (restype);
4090 debug_generic_stmt (TREE_TYPE (op));
4091 return true;
4092 }
4093
4094 return false;
4095 }
4096
4097
4098 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4099 is a problem, otherwise false. */
4100
4101 static bool
4102 verify_gimple_goto (gimple stmt)
4103 {
4104 tree dest = gimple_goto_dest (stmt);
4105
4106 /* ??? We have two canonical forms of direct goto destinations, a
4107 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4108 if (TREE_CODE (dest) != LABEL_DECL
4109 && (!is_gimple_val (dest)
4110 || !POINTER_TYPE_P (TREE_TYPE (dest))))
4111 {
4112 error ("goto destination is neither a label nor a pointer");
4113 return true;
4114 }
4115
4116 return false;
4117 }
4118
4119 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4120 is a problem, otherwise false. */
4121
4122 static bool
4123 verify_gimple_switch (gimple stmt)
4124 {
4125 if (!is_gimple_val (gimple_switch_index (stmt)))
4126 {
4127 error ("invalid operand to switch statement");
4128 debug_generic_stmt (gimple_switch_index (stmt));
4129 return true;
4130 }
4131
4132 return false;
4133 }
4134
4135 /* Verify a gimple debug statement STMT.
4136 Returns true if anything is wrong. */
4137
4138 static bool
4139 verify_gimple_debug (gimple stmt ATTRIBUTE_UNUSED)
4140 {
4141 /* There isn't much that could be wrong in a gimple debug stmt. A
4142 gimple debug bind stmt, for example, maps a tree, that's usually
4143 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4144 component or member of an aggregate type, to another tree, that
4145 can be an arbitrary expression. These stmts expand into debug
4146 insns, and are converted to debug notes by var-tracking.c. */
4147 return false;
4148 }
4149
4150 /* Verify a gimple label statement STMT.
4151 Returns true if anything is wrong. */
4152
4153 static bool
4154 verify_gimple_label (gimple stmt)
4155 {
4156 tree decl = gimple_label_label (stmt);
4157 int uid;
4158 bool err = false;
4159
4160 if (TREE_CODE (decl) != LABEL_DECL)
4161 return true;
4162
4163 uid = LABEL_DECL_UID (decl);
4164 if (cfun->cfg
4165 && (uid == -1
4166 || VEC_index (basic_block,
4167 label_to_block_map, uid) != gimple_bb (stmt)))
4168 {
4169 error ("incorrect entry in label_to_block_map");
4170 err |= true;
4171 }
4172
4173 uid = EH_LANDING_PAD_NR (decl);
4174 if (uid)
4175 {
4176 eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
4177 if (decl != lp->post_landing_pad)
4178 {
4179 error ("incorrect setting of landing pad number");
4180 err |= true;
4181 }
4182 }
4183
4184 return err;
4185 }
4186
4187 /* Verify the GIMPLE statement STMT. Returns true if there is an
4188 error, otherwise false. */
4189
4190 static bool
4191 verify_gimple_stmt (gimple stmt)
4192 {
4193 switch (gimple_code (stmt))
4194 {
4195 case GIMPLE_ASSIGN:
4196 return verify_gimple_assign (stmt);
4197
4198 case GIMPLE_LABEL:
4199 return verify_gimple_label (stmt);
4200
4201 case GIMPLE_CALL:
4202 return verify_gimple_call (stmt);
4203
4204 case GIMPLE_COND:
4205 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
4206 {
4207 error ("invalid comparison code in gimple cond");
4208 return true;
4209 }
4210 if (!(!gimple_cond_true_label (stmt)
4211 || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
4212 || !(!gimple_cond_false_label (stmt)
4213 || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
4214 {
4215 error ("invalid labels in gimple cond");
4216 return true;
4217 }
4218
4219 return verify_gimple_comparison (boolean_type_node,
4220 gimple_cond_lhs (stmt),
4221 gimple_cond_rhs (stmt));
4222
4223 case GIMPLE_GOTO:
4224 return verify_gimple_goto (stmt);
4225
4226 case GIMPLE_SWITCH:
4227 return verify_gimple_switch (stmt);
4228
4229 case GIMPLE_RETURN:
4230 return verify_gimple_return (stmt);
4231
4232 case GIMPLE_ASM:
4233 return false;
4234
4235 case GIMPLE_TRANSACTION:
4236 return verify_gimple_transaction (stmt);
4237
4238 /* Tuples that do not have tree operands. */
4239 case GIMPLE_NOP:
4240 case GIMPLE_PREDICT:
4241 case GIMPLE_RESX:
4242 case GIMPLE_EH_DISPATCH:
4243 case GIMPLE_EH_MUST_NOT_THROW:
4244 return false;
4245
4246 CASE_GIMPLE_OMP:
4247 /* OpenMP directives are validated by the FE and never operated
4248 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4249 non-gimple expressions when the main index variable has had
4250 its address taken. This does not affect the loop itself
4251 because the header of an GIMPLE_OMP_FOR is merely used to determine
4252 how to setup the parallel iteration. */
4253 return false;
4254
4255 case GIMPLE_DEBUG:
4256 return verify_gimple_debug (stmt);
4257
4258 default:
4259 gcc_unreachable ();
4260 }
4261 }
4262
4263 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4264 and false otherwise. */
4265
4266 static bool
4267 verify_gimple_phi (gimple phi)
4268 {
4269 bool err = false;
4270 unsigned i;
4271 tree phi_result = gimple_phi_result (phi);
4272 bool virtual_p;
4273
4274 if (!phi_result)
4275 {
4276 error ("invalid PHI result");
4277 return true;
4278 }
4279
4280 virtual_p = !is_gimple_reg (phi_result);
4281 if (TREE_CODE (phi_result) != SSA_NAME
4282 || (virtual_p
4283 && SSA_NAME_VAR (phi_result) != gimple_vop (cfun)))
4284 {
4285 error ("invalid PHI result");
4286 err = true;
4287 }
4288
4289 for (i = 0; i < gimple_phi_num_args (phi); i++)
4290 {
4291 tree t = gimple_phi_arg_def (phi, i);
4292
4293 if (!t)
4294 {
4295 error ("missing PHI def");
4296 err |= true;
4297 continue;
4298 }
4299 /* Addressable variables do have SSA_NAMEs but they
4300 are not considered gimple values. */
4301 else if ((TREE_CODE (t) == SSA_NAME
4302 && virtual_p != !is_gimple_reg (t))
4303 || (virtual_p
4304 && (TREE_CODE (t) != SSA_NAME
4305 || SSA_NAME_VAR (t) != gimple_vop (cfun)))
4306 || (!virtual_p
4307 && !is_gimple_val (t)))
4308 {
4309 error ("invalid PHI argument");
4310 debug_generic_expr (t);
4311 err |= true;
4312 }
4313 #ifdef ENABLE_TYPES_CHECKING
4314 if (!useless_type_conversion_p (TREE_TYPE (phi_result), TREE_TYPE (t)))
4315 {
4316 error ("incompatible types in PHI argument %u", i);
4317 debug_generic_stmt (TREE_TYPE (phi_result));
4318 debug_generic_stmt (TREE_TYPE (t));
4319 err |= true;
4320 }
4321 #endif
4322 }
4323
4324 return err;
4325 }
4326
4327 /* Verify the GIMPLE statements inside the sequence STMTS. */
4328
4329 static bool
4330 verify_gimple_in_seq_2 (gimple_seq stmts)
4331 {
4332 gimple_stmt_iterator ittr;
4333 bool err = false;
4334
4335 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
4336 {
4337 gimple stmt = gsi_stmt (ittr);
4338
4339 switch (gimple_code (stmt))
4340 {
4341 case GIMPLE_BIND:
4342 err |= verify_gimple_in_seq_2 (gimple_bind_body (stmt));
4343 break;
4344
4345 case GIMPLE_TRY:
4346 err |= verify_gimple_in_seq_2 (gimple_try_eval (stmt));
4347 err |= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt));
4348 break;
4349
4350 case GIMPLE_EH_FILTER:
4351 err |= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt));
4352 break;
4353
4354 case GIMPLE_EH_ELSE:
4355 err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (stmt));
4356 err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (stmt));
4357 break;
4358
4359 case GIMPLE_CATCH:
4360 err |= verify_gimple_in_seq_2 (gimple_catch_handler (stmt));
4361 break;
4362
4363 case GIMPLE_TRANSACTION:
4364 err |= verify_gimple_transaction (stmt);
4365 break;
4366
4367 default:
4368 {
4369 bool err2 = verify_gimple_stmt (stmt);
4370 if (err2)
4371 debug_gimple_stmt (stmt);
4372 err |= err2;
4373 }
4374 }
4375 }
4376
4377 return err;
4378 }
4379
4380 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
4381 is a problem, otherwise false. */
4382
4383 static bool
4384 verify_gimple_transaction (gimple stmt)
4385 {
4386 tree lab = gimple_transaction_label (stmt);
4387 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
4388 return true;
4389 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt));
4390 }
4391
4392
4393 /* Verify the GIMPLE statements inside the statement list STMTS. */
4394
4395 DEBUG_FUNCTION void
4396 verify_gimple_in_seq (gimple_seq stmts)
4397 {
4398 timevar_push (TV_TREE_STMT_VERIFY);
4399 if (verify_gimple_in_seq_2 (stmts))
4400 internal_error ("verify_gimple failed");
4401 timevar_pop (TV_TREE_STMT_VERIFY);
4402 }
4403
4404 /* Return true when the T can be shared. */
4405
4406 bool
4407 tree_node_can_be_shared (tree t)
4408 {
4409 if (IS_TYPE_OR_DECL_P (t)
4410 || is_gimple_min_invariant (t)
4411 || TREE_CODE (t) == SSA_NAME
4412 || t == error_mark_node
4413 || TREE_CODE (t) == IDENTIFIER_NODE)
4414 return true;
4415
4416 if (TREE_CODE (t) == CASE_LABEL_EXPR)
4417 return true;
4418
4419 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4420 && is_gimple_min_invariant (TREE_OPERAND (t, 1)))
4421 || TREE_CODE (t) == COMPONENT_REF
4422 || TREE_CODE (t) == REALPART_EXPR
4423 || TREE_CODE (t) == IMAGPART_EXPR)
4424 t = TREE_OPERAND (t, 0);
4425
4426 if (DECL_P (t))
4427 return true;
4428
4429 return false;
4430 }
4431
4432 /* Called via walk_gimple_stmt. Verify tree sharing. */
4433
4434 static tree
4435 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
4436 {
4437 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4438 struct pointer_set_t *visited = (struct pointer_set_t *) wi->info;
4439
4440 if (tree_node_can_be_shared (*tp))
4441 {
4442 *walk_subtrees = false;
4443 return NULL;
4444 }
4445
4446 if (pointer_set_insert (visited, *tp))
4447 return *tp;
4448
4449 return NULL;
4450 }
4451
4452 static bool eh_error_found;
4453 static int
4454 verify_eh_throw_stmt_node (void **slot, void *data)
4455 {
4456 struct throw_stmt_node *node = (struct throw_stmt_node *)*slot;
4457 struct pointer_set_t *visited = (struct pointer_set_t *) data;
4458
4459 if (!pointer_set_contains (visited, node->stmt))
4460 {
4461 error ("dead STMT in EH table");
4462 debug_gimple_stmt (node->stmt);
4463 eh_error_found = true;
4464 }
4465 return 1;
4466 }
4467
4468 /* Verify the GIMPLE statements in the CFG of FN. */
4469
4470 DEBUG_FUNCTION void
4471 verify_gimple_in_cfg (struct function *fn)
4472 {
4473 basic_block bb;
4474 bool err = false;
4475 struct pointer_set_t *visited, *visited_stmts;
4476
4477 timevar_push (TV_TREE_STMT_VERIFY);
4478 visited = pointer_set_create ();
4479 visited_stmts = pointer_set_create ();
4480
4481 FOR_EACH_BB_FN (bb, fn)
4482 {
4483 gimple_stmt_iterator gsi;
4484
4485 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4486 {
4487 gimple phi = gsi_stmt (gsi);
4488 bool err2 = false;
4489 unsigned i;
4490
4491 pointer_set_insert (visited_stmts, phi);
4492
4493 if (gimple_bb (phi) != bb)
4494 {
4495 error ("gimple_bb (phi) is set to a wrong basic block");
4496 err2 = true;
4497 }
4498
4499 err2 |= verify_gimple_phi (phi);
4500
4501 for (i = 0; i < gimple_phi_num_args (phi); i++)
4502 {
4503 tree arg = gimple_phi_arg_def (phi, i);
4504 tree addr = walk_tree (&arg, verify_node_sharing, visited, NULL);
4505 if (addr)
4506 {
4507 error ("incorrect sharing of tree nodes");
4508 debug_generic_expr (addr);
4509 err2 |= true;
4510 }
4511 }
4512
4513 if (err2)
4514 debug_gimple_stmt (phi);
4515 err |= err2;
4516 }
4517
4518 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4519 {
4520 gimple stmt = gsi_stmt (gsi);
4521 bool err2 = false;
4522 struct walk_stmt_info wi;
4523 tree addr;
4524 int lp_nr;
4525
4526 pointer_set_insert (visited_stmts, stmt);
4527
4528 if (gimple_bb (stmt) != bb)
4529 {
4530 error ("gimple_bb (stmt) is set to a wrong basic block");
4531 err2 = true;
4532 }
4533
4534 err2 |= verify_gimple_stmt (stmt);
4535
4536 memset (&wi, 0, sizeof (wi));
4537 wi.info = (void *) visited;
4538 addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
4539 if (addr)
4540 {
4541 error ("incorrect sharing of tree nodes");
4542 debug_generic_expr (addr);
4543 err2 |= true;
4544 }
4545
4546 /* ??? Instead of not checking these stmts at all the walker
4547 should know its context via wi. */
4548 if (!is_gimple_debug (stmt)
4549 && !is_gimple_omp (stmt))
4550 {
4551 memset (&wi, 0, sizeof (wi));
4552 addr = walk_gimple_op (stmt, verify_expr, &wi);
4553 if (addr)
4554 {
4555 debug_generic_expr (addr);
4556 inform (gimple_location (stmt), "in statement");
4557 err2 |= true;
4558 }
4559 }
4560
4561 /* If the statement is marked as part of an EH region, then it is
4562 expected that the statement could throw. Verify that when we
4563 have optimizations that simplify statements such that we prove
4564 that they cannot throw, that we update other data structures
4565 to match. */
4566 lp_nr = lookup_stmt_eh_lp (stmt);
4567 if (lp_nr != 0)
4568 {
4569 if (!stmt_could_throw_p (stmt))
4570 {
4571 error ("statement marked for throw, but doesn%'t");
4572 err2 |= true;
4573 }
4574 else if (lp_nr > 0
4575 && !gsi_one_before_end_p (gsi)
4576 && stmt_can_throw_internal (stmt))
4577 {
4578 error ("statement marked for throw in middle of block");
4579 err2 |= true;
4580 }
4581 }
4582
4583 if (err2)
4584 debug_gimple_stmt (stmt);
4585 err |= err2;
4586 }
4587 }
4588
4589 eh_error_found = false;
4590 if (get_eh_throw_stmt_table (cfun))
4591 htab_traverse (get_eh_throw_stmt_table (cfun),
4592 verify_eh_throw_stmt_node,
4593 visited_stmts);
4594
4595 if (err || eh_error_found)
4596 internal_error ("verify_gimple failed");
4597
4598 pointer_set_destroy (visited);
4599 pointer_set_destroy (visited_stmts);
4600 verify_histograms ();
4601 timevar_pop (TV_TREE_STMT_VERIFY);
4602 }
4603
4604
4605 /* Verifies that the flow information is OK. */
4606
4607 static int
4608 gimple_verify_flow_info (void)
4609 {
4610 int err = 0;
4611 basic_block bb;
4612 gimple_stmt_iterator gsi;
4613 gimple stmt;
4614 edge e;
4615 edge_iterator ei;
4616
4617 if (ENTRY_BLOCK_PTR->il.gimple)
4618 {
4619 error ("ENTRY_BLOCK has IL associated with it");
4620 err = 1;
4621 }
4622
4623 if (EXIT_BLOCK_PTR->il.gimple)
4624 {
4625 error ("EXIT_BLOCK has IL associated with it");
4626 err = 1;
4627 }
4628
4629 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
4630 if (e->flags & EDGE_FALLTHRU)
4631 {
4632 error ("fallthru to exit from bb %d", e->src->index);
4633 err = 1;
4634 }
4635
4636 FOR_EACH_BB (bb)
4637 {
4638 bool found_ctrl_stmt = false;
4639
4640 stmt = NULL;
4641
4642 /* Skip labels on the start of basic block. */
4643 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4644 {
4645 tree label;
4646 gimple prev_stmt = stmt;
4647
4648 stmt = gsi_stmt (gsi);
4649
4650 if (gimple_code (stmt) != GIMPLE_LABEL)
4651 break;
4652
4653 label = gimple_label_label (stmt);
4654 if (prev_stmt && DECL_NONLOCAL (label))
4655 {
4656 error ("nonlocal label ");
4657 print_generic_expr (stderr, label, 0);
4658 fprintf (stderr, " is not first in a sequence of labels in bb %d",
4659 bb->index);
4660 err = 1;
4661 }
4662
4663 if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
4664 {
4665 error ("EH landing pad label ");
4666 print_generic_expr (stderr, label, 0);
4667 fprintf (stderr, " is not first in a sequence of labels in bb %d",
4668 bb->index);
4669 err = 1;
4670 }
4671
4672 if (label_to_block (label) != bb)
4673 {
4674 error ("label ");
4675 print_generic_expr (stderr, label, 0);
4676 fprintf (stderr, " to block does not match in bb %d",
4677 bb->index);
4678 err = 1;
4679 }
4680
4681 if (decl_function_context (label) != current_function_decl)
4682 {
4683 error ("label ");
4684 print_generic_expr (stderr, label, 0);
4685 fprintf (stderr, " has incorrect context in bb %d",
4686 bb->index);
4687 err = 1;
4688 }
4689 }
4690
4691 /* Verify that body of basic block BB is free of control flow. */
4692 for (; !gsi_end_p (gsi); gsi_next (&gsi))
4693 {
4694 gimple stmt = gsi_stmt (gsi);
4695
4696 if (found_ctrl_stmt)
4697 {
4698 error ("control flow in the middle of basic block %d",
4699 bb->index);
4700 err = 1;
4701 }
4702
4703 if (stmt_ends_bb_p (stmt))
4704 found_ctrl_stmt = true;
4705
4706 if (gimple_code (stmt) == GIMPLE_LABEL)
4707 {
4708 error ("label ");
4709 print_generic_expr (stderr, gimple_label_label (stmt), 0);
4710 fprintf (stderr, " in the middle of basic block %d", bb->index);
4711 err = 1;
4712 }
4713 }
4714
4715 gsi = gsi_last_bb (bb);
4716 if (gsi_end_p (gsi))
4717 continue;
4718
4719 stmt = gsi_stmt (gsi);
4720
4721 if (gimple_code (stmt) == GIMPLE_LABEL)
4722 continue;
4723
4724 err |= verify_eh_edges (stmt);
4725
4726 if (is_ctrl_stmt (stmt))
4727 {
4728 FOR_EACH_EDGE (e, ei, bb->succs)
4729 if (e->flags & EDGE_FALLTHRU)
4730 {
4731 error ("fallthru edge after a control statement in bb %d",
4732 bb->index);
4733 err = 1;
4734 }
4735 }
4736
4737 if (gimple_code (stmt) != GIMPLE_COND)
4738 {
4739 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
4740 after anything else but if statement. */
4741 FOR_EACH_EDGE (e, ei, bb->succs)
4742 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
4743 {
4744 error ("true/false edge after a non-GIMPLE_COND in bb %d",
4745 bb->index);
4746 err = 1;
4747 }
4748 }
4749
4750 switch (gimple_code (stmt))
4751 {
4752 case GIMPLE_COND:
4753 {
4754 edge true_edge;
4755 edge false_edge;
4756
4757 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
4758
4759 if (!true_edge
4760 || !false_edge
4761 || !(true_edge->flags & EDGE_TRUE_VALUE)
4762 || !(false_edge->flags & EDGE_FALSE_VALUE)
4763 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
4764 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
4765 || EDGE_COUNT (bb->succs) >= 3)
4766 {
4767 error ("wrong outgoing edge flags at end of bb %d",
4768 bb->index);
4769 err = 1;
4770 }
4771 }
4772 break;
4773
4774 case GIMPLE_GOTO:
4775 if (simple_goto_p (stmt))
4776 {
4777 error ("explicit goto at end of bb %d", bb->index);
4778 err = 1;
4779 }
4780 else
4781 {
4782 /* FIXME. We should double check that the labels in the
4783 destination blocks have their address taken. */
4784 FOR_EACH_EDGE (e, ei, bb->succs)
4785 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
4786 | EDGE_FALSE_VALUE))
4787 || !(e->flags & EDGE_ABNORMAL))
4788 {
4789 error ("wrong outgoing edge flags at end of bb %d",
4790 bb->index);
4791 err = 1;
4792 }
4793 }
4794 break;
4795
4796 case GIMPLE_CALL:
4797 if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
4798 break;
4799 /* ... fallthru ... */
4800 case GIMPLE_RETURN:
4801 if (!single_succ_p (bb)
4802 || (single_succ_edge (bb)->flags
4803 & (EDGE_FALLTHRU | EDGE_ABNORMAL
4804 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
4805 {
4806 error ("wrong outgoing edge flags at end of bb %d", bb->index);
4807 err = 1;
4808 }
4809 if (single_succ (bb) != EXIT_BLOCK_PTR)
4810 {
4811 error ("return edge does not point to exit in bb %d",
4812 bb->index);
4813 err = 1;
4814 }
4815 break;
4816
4817 case GIMPLE_SWITCH:
4818 {
4819 tree prev;
4820 edge e;
4821 size_t i, n;
4822
4823 n = gimple_switch_num_labels (stmt);
4824
4825 /* Mark all the destination basic blocks. */
4826 for (i = 0; i < n; ++i)
4827 {
4828 tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
4829 basic_block label_bb = label_to_block (lab);
4830 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
4831 label_bb->aux = (void *)1;
4832 }
4833
4834 /* Verify that the case labels are sorted. */
4835 prev = gimple_switch_label (stmt, 0);
4836 for (i = 1; i < n; ++i)
4837 {
4838 tree c = gimple_switch_label (stmt, i);
4839 if (!CASE_LOW (c))
4840 {
4841 error ("found default case not at the start of "
4842 "case vector");
4843 err = 1;
4844 continue;
4845 }
4846 if (CASE_LOW (prev)
4847 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
4848 {
4849 error ("case labels not sorted: ");
4850 print_generic_expr (stderr, prev, 0);
4851 fprintf (stderr," is greater than ");
4852 print_generic_expr (stderr, c, 0);
4853 fprintf (stderr," but comes before it.\n");
4854 err = 1;
4855 }
4856 prev = c;
4857 }
4858 /* VRP will remove the default case if it can prove it will
4859 never be executed. So do not verify there always exists
4860 a default case here. */
4861
4862 FOR_EACH_EDGE (e, ei, bb->succs)
4863 {
4864 if (!e->dest->aux)
4865 {
4866 error ("extra outgoing edge %d->%d",
4867 bb->index, e->dest->index);
4868 err = 1;
4869 }
4870
4871 e->dest->aux = (void *)2;
4872 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
4873 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
4874 {
4875 error ("wrong outgoing edge flags at end of bb %d",
4876 bb->index);
4877 err = 1;
4878 }
4879 }
4880
4881 /* Check that we have all of them. */
4882 for (i = 0; i < n; ++i)
4883 {
4884 tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
4885 basic_block label_bb = label_to_block (lab);
4886
4887 if (label_bb->aux != (void *)2)
4888 {
4889 error ("missing edge %i->%i", bb->index, label_bb->index);
4890 err = 1;
4891 }
4892 }
4893
4894 FOR_EACH_EDGE (e, ei, bb->succs)
4895 e->dest->aux = (void *)0;
4896 }
4897 break;
4898
4899 case GIMPLE_EH_DISPATCH:
4900 err |= verify_eh_dispatch_edge (stmt);
4901 break;
4902
4903 default:
4904 break;
4905 }
4906 }
4907
4908 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
4909 verify_dominators (CDI_DOMINATORS);
4910
4911 return err;
4912 }
4913
4914
4915 /* Updates phi nodes after creating a forwarder block joined
4916 by edge FALLTHRU. */
4917
4918 static void
4919 gimple_make_forwarder_block (edge fallthru)
4920 {
4921 edge e;
4922 edge_iterator ei;
4923 basic_block dummy, bb;
4924 tree var;
4925 gimple_stmt_iterator gsi;
4926
4927 dummy = fallthru->src;
4928 bb = fallthru->dest;
4929
4930 if (single_pred_p (bb))
4931 return;
4932
4933 /* If we redirected a branch we must create new PHI nodes at the
4934 start of BB. */
4935 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
4936 {
4937 gimple phi, new_phi;
4938
4939 phi = gsi_stmt (gsi);
4940 var = gimple_phi_result (phi);
4941 new_phi = create_phi_node (var, bb);
4942 SSA_NAME_DEF_STMT (var) = new_phi;
4943 gimple_phi_set_result (phi, make_ssa_name (SSA_NAME_VAR (var), phi));
4944 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
4945 UNKNOWN_LOCATION);
4946 }
4947
4948 /* Add the arguments we have stored on edges. */
4949 FOR_EACH_EDGE (e, ei, bb->preds)
4950 {
4951 if (e == fallthru)
4952 continue;
4953
4954 flush_pending_stmts (e);
4955 }
4956 }
4957
4958
4959 /* Return a non-special label in the head of basic block BLOCK.
4960 Create one if it doesn't exist. */
4961
4962 tree
4963 gimple_block_label (basic_block bb)
4964 {
4965 gimple_stmt_iterator i, s = gsi_start_bb (bb);
4966 bool first = true;
4967 tree label;
4968 gimple stmt;
4969
4970 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
4971 {
4972 stmt = gsi_stmt (i);
4973 if (gimple_code (stmt) != GIMPLE_LABEL)
4974 break;
4975 label = gimple_label_label (stmt);
4976 if (!DECL_NONLOCAL (label))
4977 {
4978 if (!first)
4979 gsi_move_before (&i, &s);
4980 return label;
4981 }
4982 }
4983
4984 label = create_artificial_label (UNKNOWN_LOCATION);
4985 stmt = gimple_build_label (label);
4986 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
4987 return label;
4988 }
4989
4990
4991 /* Attempt to perform edge redirection by replacing a possibly complex
4992 jump instruction by a goto or by removing the jump completely.
4993 This can apply only if all edges now point to the same block. The
4994 parameters and return values are equivalent to
4995 redirect_edge_and_branch. */
4996
4997 static edge
4998 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
4999 {
5000 basic_block src = e->src;
5001 gimple_stmt_iterator i;
5002 gimple stmt;
5003
5004 /* We can replace or remove a complex jump only when we have exactly
5005 two edges. */
5006 if (EDGE_COUNT (src->succs) != 2
5007 /* Verify that all targets will be TARGET. Specifically, the
5008 edge that is not E must also go to TARGET. */
5009 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
5010 return NULL;
5011
5012 i = gsi_last_bb (src);
5013 if (gsi_end_p (i))
5014 return NULL;
5015
5016 stmt = gsi_stmt (i);
5017
5018 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
5019 {
5020 gsi_remove (&i, true);
5021 e = ssa_redirect_edge (e, target);
5022 e->flags = EDGE_FALLTHRU;
5023 return e;
5024 }
5025
5026 return NULL;
5027 }
5028
5029
5030 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
5031 edge representing the redirected branch. */
5032
5033 static edge
5034 gimple_redirect_edge_and_branch (edge e, basic_block dest)
5035 {
5036 basic_block bb = e->src;
5037 gimple_stmt_iterator gsi;
5038 edge ret;
5039 gimple stmt;
5040
5041 if (e->flags & EDGE_ABNORMAL)
5042 return NULL;
5043
5044 if (e->dest == dest)
5045 return NULL;
5046
5047 if (e->flags & EDGE_EH)
5048 return redirect_eh_edge (e, dest);
5049
5050 if (e->src != ENTRY_BLOCK_PTR)
5051 {
5052 ret = gimple_try_redirect_by_replacing_jump (e, dest);
5053 if (ret)
5054 return ret;
5055 }
5056
5057 gsi = gsi_last_bb (bb);
5058 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
5059
5060 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
5061 {
5062 case GIMPLE_COND:
5063 /* For COND_EXPR, we only need to redirect the edge. */
5064 break;
5065
5066 case GIMPLE_GOTO:
5067 /* No non-abnormal edges should lead from a non-simple goto, and
5068 simple ones should be represented implicitly. */
5069 gcc_unreachable ();
5070
5071 case GIMPLE_SWITCH:
5072 {
5073 tree label = gimple_block_label (dest);
5074 tree cases = get_cases_for_edge (e, stmt);
5075
5076 /* If we have a list of cases associated with E, then use it
5077 as it's a lot faster than walking the entire case vector. */
5078 if (cases)
5079 {
5080 edge e2 = find_edge (e->src, dest);
5081 tree last, first;
5082
5083 first = cases;
5084 while (cases)
5085 {
5086 last = cases;
5087 CASE_LABEL (cases) = label;
5088 cases = CASE_CHAIN (cases);
5089 }
5090
5091 /* If there was already an edge in the CFG, then we need
5092 to move all the cases associated with E to E2. */
5093 if (e2)
5094 {
5095 tree cases2 = get_cases_for_edge (e2, stmt);
5096
5097 CASE_CHAIN (last) = CASE_CHAIN (cases2);
5098 CASE_CHAIN (cases2) = first;
5099 }
5100 bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
5101 }
5102 else
5103 {
5104 size_t i, n = gimple_switch_num_labels (stmt);
5105
5106 for (i = 0; i < n; i++)
5107 {
5108 tree elt = gimple_switch_label (stmt, i);
5109 if (label_to_block (CASE_LABEL (elt)) == e->dest)
5110 CASE_LABEL (elt) = label;
5111 }
5112 }
5113 }
5114 break;
5115
5116 case GIMPLE_ASM:
5117 {
5118 int i, n = gimple_asm_nlabels (stmt);
5119 tree label = NULL;
5120
5121 for (i = 0; i < n; ++i)
5122 {
5123 tree cons = gimple_asm_label_op (stmt, i);
5124 if (label_to_block (TREE_VALUE (cons)) == e->dest)
5125 {
5126 if (!label)
5127 label = gimple_block_label (dest);
5128 TREE_VALUE (cons) = label;
5129 }
5130 }
5131
5132 /* If we didn't find any label matching the former edge in the
5133 asm labels, we must be redirecting the fallthrough
5134 edge. */
5135 gcc_assert (label || (e->flags & EDGE_FALLTHRU));
5136 }
5137 break;
5138
5139 case GIMPLE_RETURN:
5140 gsi_remove (&gsi, true);
5141 e->flags |= EDGE_FALLTHRU;
5142 break;
5143
5144 case GIMPLE_OMP_RETURN:
5145 case GIMPLE_OMP_CONTINUE:
5146 case GIMPLE_OMP_SECTIONS_SWITCH:
5147 case GIMPLE_OMP_FOR:
5148 /* The edges from OMP constructs can be simply redirected. */
5149 break;
5150
5151 case GIMPLE_EH_DISPATCH:
5152 if (!(e->flags & EDGE_FALLTHRU))
5153 redirect_eh_dispatch_edge (stmt, e, dest);
5154 break;
5155
5156 case GIMPLE_TRANSACTION:
5157 /* The ABORT edge has a stored label associated with it, otherwise
5158 the edges are simply redirectable. */
5159 if (e->flags == 0)
5160 gimple_transaction_set_label (stmt, gimple_block_label (dest));
5161 break;
5162
5163 default:
5164 /* Otherwise it must be a fallthru edge, and we don't need to
5165 do anything besides redirecting it. */
5166 gcc_assert (e->flags & EDGE_FALLTHRU);
5167 break;
5168 }
5169
5170 /* Update/insert PHI nodes as necessary. */
5171
5172 /* Now update the edges in the CFG. */
5173 e = ssa_redirect_edge (e, dest);
5174
5175 return e;
5176 }
5177
5178 /* Returns true if it is possible to remove edge E by redirecting
5179 it to the destination of the other edge from E->src. */
5180
5181 static bool
5182 gimple_can_remove_branch_p (const_edge e)
5183 {
5184 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
5185 return false;
5186
5187 return true;
5188 }
5189
5190 /* Simple wrapper, as we can always redirect fallthru edges. */
5191
5192 static basic_block
5193 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
5194 {
5195 e = gimple_redirect_edge_and_branch (e, dest);
5196 gcc_assert (e);
5197
5198 return NULL;
5199 }
5200
5201
5202 /* Splits basic block BB after statement STMT (but at least after the
5203 labels). If STMT is NULL, BB is split just after the labels. */
5204
5205 static basic_block
5206 gimple_split_block (basic_block bb, void *stmt)
5207 {
5208 gimple_stmt_iterator gsi;
5209 gimple_stmt_iterator gsi_tgt;
5210 gimple act;
5211 gimple_seq list;
5212 basic_block new_bb;
5213 edge e;
5214 edge_iterator ei;
5215
5216 new_bb = create_empty_bb (bb);
5217
5218 /* Redirect the outgoing edges. */
5219 new_bb->succs = bb->succs;
5220 bb->succs = NULL;
5221 FOR_EACH_EDGE (e, ei, new_bb->succs)
5222 e->src = new_bb;
5223
5224 if (stmt && gimple_code ((gimple) stmt) == GIMPLE_LABEL)
5225 stmt = NULL;
5226
5227 /* Move everything from GSI to the new basic block. */
5228 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5229 {
5230 act = gsi_stmt (gsi);
5231 if (gimple_code (act) == GIMPLE_LABEL)
5232 continue;
5233
5234 if (!stmt)
5235 break;
5236
5237 if (stmt == act)
5238 {
5239 gsi_next (&gsi);
5240 break;
5241 }
5242 }
5243
5244 if (gsi_end_p (gsi))
5245 return new_bb;
5246
5247 /* Split the statement list - avoid re-creating new containers as this
5248 brings ugly quadratic memory consumption in the inliner.
5249 (We are still quadratic since we need to update stmt BB pointers,
5250 sadly.) */
5251 gsi_split_seq_before (&gsi, &list);
5252 set_bb_seq (new_bb, list);
5253 for (gsi_tgt = gsi_start (list);
5254 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
5255 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
5256
5257 return new_bb;
5258 }
5259
5260
5261 /* Moves basic block BB after block AFTER. */
5262
5263 static bool
5264 gimple_move_block_after (basic_block bb, basic_block after)
5265 {
5266 if (bb->prev_bb == after)
5267 return true;
5268
5269 unlink_block (bb);
5270 link_block (bb, after);
5271
5272 return true;
5273 }
5274
5275
5276 /* Return true if basic_block can be duplicated. */
5277
5278 static bool
5279 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
5280 {
5281 return true;
5282 }
5283
5284 /* Create a duplicate of the basic block BB. NOTE: This does not
5285 preserve SSA form. */
5286
5287 static basic_block
5288 gimple_duplicate_bb (basic_block bb)
5289 {
5290 basic_block new_bb;
5291 gimple_stmt_iterator gsi, gsi_tgt;
5292 gimple_seq phis = phi_nodes (bb);
5293 gimple phi, stmt, copy;
5294
5295 new_bb = create_empty_bb (EXIT_BLOCK_PTR->prev_bb);
5296
5297 /* Copy the PHI nodes. We ignore PHI node arguments here because
5298 the incoming edges have not been setup yet. */
5299 for (gsi = gsi_start (phis); !gsi_end_p (gsi); gsi_next (&gsi))
5300 {
5301 phi = gsi_stmt (gsi);
5302 copy = create_phi_node (gimple_phi_result (phi), new_bb);
5303 create_new_def_for (gimple_phi_result (copy), copy,
5304 gimple_phi_result_ptr (copy));
5305 }
5306
5307 gsi_tgt = gsi_start_bb (new_bb);
5308 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5309 {
5310 def_operand_p def_p;
5311 ssa_op_iter op_iter;
5312 tree lhs;
5313
5314 stmt = gsi_stmt (gsi);
5315 if (gimple_code (stmt) == GIMPLE_LABEL)
5316 continue;
5317
5318 /* Don't duplicate label debug stmts. */
5319 if (gimple_debug_bind_p (stmt)
5320 && TREE_CODE (gimple_debug_bind_get_var (stmt))
5321 == LABEL_DECL)
5322 continue;
5323
5324 /* Create a new copy of STMT and duplicate STMT's virtual
5325 operands. */
5326 copy = gimple_copy (stmt);
5327 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
5328
5329 maybe_duplicate_eh_stmt (copy, stmt);
5330 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
5331
5332 /* When copying around a stmt writing into a local non-user
5333 aggregate, make sure it won't share stack slot with other
5334 vars. */
5335 lhs = gimple_get_lhs (stmt);
5336 if (lhs && TREE_CODE (lhs) != SSA_NAME)
5337 {
5338 tree base = get_base_address (lhs);
5339 if (base
5340 && (TREE_CODE (base) == VAR_DECL
5341 || TREE_CODE (base) == RESULT_DECL)
5342 && DECL_IGNORED_P (base)
5343 && !TREE_STATIC (base)
5344 && !DECL_EXTERNAL (base)
5345 && (TREE_CODE (base) != VAR_DECL
5346 || !DECL_HAS_VALUE_EXPR_P (base)))
5347 DECL_NONSHAREABLE (base) = 1;
5348 }
5349
5350 /* Create new names for all the definitions created by COPY and
5351 add replacement mappings for each new name. */
5352 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
5353 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
5354 }
5355
5356 return new_bb;
5357 }
5358
5359 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
5360
5361 static void
5362 add_phi_args_after_copy_edge (edge e_copy)
5363 {
5364 basic_block bb, bb_copy = e_copy->src, dest;
5365 edge e;
5366 edge_iterator ei;
5367 gimple phi, phi_copy;
5368 tree def;
5369 gimple_stmt_iterator psi, psi_copy;
5370
5371 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
5372 return;
5373
5374 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
5375
5376 if (e_copy->dest->flags & BB_DUPLICATED)
5377 dest = get_bb_original (e_copy->dest);
5378 else
5379 dest = e_copy->dest;
5380
5381 e = find_edge (bb, dest);
5382 if (!e)
5383 {
5384 /* During loop unrolling the target of the latch edge is copied.
5385 In this case we are not looking for edge to dest, but to
5386 duplicated block whose original was dest. */
5387 FOR_EACH_EDGE (e, ei, bb->succs)
5388 {
5389 if ((e->dest->flags & BB_DUPLICATED)
5390 && get_bb_original (e->dest) == dest)
5391 break;
5392 }
5393
5394 gcc_assert (e != NULL);
5395 }
5396
5397 for (psi = gsi_start_phis (e->dest),
5398 psi_copy = gsi_start_phis (e_copy->dest);
5399 !gsi_end_p (psi);
5400 gsi_next (&psi), gsi_next (&psi_copy))
5401 {
5402 phi = gsi_stmt (psi);
5403 phi_copy = gsi_stmt (psi_copy);
5404 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
5405 add_phi_arg (phi_copy, def, e_copy,
5406 gimple_phi_arg_location_from_edge (phi, e));
5407 }
5408 }
5409
5410
5411 /* Basic block BB_COPY was created by code duplication. Add phi node
5412 arguments for edges going out of BB_COPY. The blocks that were
5413 duplicated have BB_DUPLICATED set. */
5414
5415 void
5416 add_phi_args_after_copy_bb (basic_block bb_copy)
5417 {
5418 edge e_copy;
5419 edge_iterator ei;
5420
5421 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
5422 {
5423 add_phi_args_after_copy_edge (e_copy);
5424 }
5425 }
5426
5427 /* Blocks in REGION_COPY array of length N_REGION were created by
5428 duplication of basic blocks. Add phi node arguments for edges
5429 going from these blocks. If E_COPY is not NULL, also add
5430 phi node arguments for its destination.*/
5431
5432 void
5433 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
5434 edge e_copy)
5435 {
5436 unsigned i;
5437
5438 for (i = 0; i < n_region; i++)
5439 region_copy[i]->flags |= BB_DUPLICATED;
5440
5441 for (i = 0; i < n_region; i++)
5442 add_phi_args_after_copy_bb (region_copy[i]);
5443 if (e_copy)
5444 add_phi_args_after_copy_edge (e_copy);
5445
5446 for (i = 0; i < n_region; i++)
5447 region_copy[i]->flags &= ~BB_DUPLICATED;
5448 }
5449
5450 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
5451 important exit edge EXIT. By important we mean that no SSA name defined
5452 inside region is live over the other exit edges of the region. All entry
5453 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
5454 to the duplicate of the region. SSA form, dominance and loop information
5455 is updated. The new basic blocks are stored to REGION_COPY in the same
5456 order as they had in REGION, provided that REGION_COPY is not NULL.
5457 The function returns false if it is unable to copy the region,
5458 true otherwise. */
5459
5460 bool
5461 gimple_duplicate_sese_region (edge entry, edge exit,
5462 basic_block *region, unsigned n_region,
5463 basic_block *region_copy)
5464 {
5465 unsigned i;
5466 bool free_region_copy = false, copying_header = false;
5467 struct loop *loop = entry->dest->loop_father;
5468 edge exit_copy;
5469 VEC (basic_block, heap) *doms;
5470 edge redirected;
5471 int total_freq = 0, entry_freq = 0;
5472 gcov_type total_count = 0, entry_count = 0;
5473
5474 if (!can_copy_bbs_p (region, n_region))
5475 return false;
5476
5477 /* Some sanity checking. Note that we do not check for all possible
5478 missuses of the functions. I.e. if you ask to copy something weird,
5479 it will work, but the state of structures probably will not be
5480 correct. */
5481 for (i = 0; i < n_region; i++)
5482 {
5483 /* We do not handle subloops, i.e. all the blocks must belong to the
5484 same loop. */
5485 if (region[i]->loop_father != loop)
5486 return false;
5487
5488 if (region[i] != entry->dest
5489 && region[i] == loop->header)
5490 return false;
5491 }
5492
5493 set_loop_copy (loop, loop);
5494
5495 /* In case the function is used for loop header copying (which is the primary
5496 use), ensure that EXIT and its copy will be new latch and entry edges. */
5497 if (loop->header == entry->dest)
5498 {
5499 copying_header = true;
5500 set_loop_copy (loop, loop_outer (loop));
5501
5502 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
5503 return false;
5504
5505 for (i = 0; i < n_region; i++)
5506 if (region[i] != exit->src
5507 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
5508 return false;
5509 }
5510
5511 if (!region_copy)
5512 {
5513 region_copy = XNEWVEC (basic_block, n_region);
5514 free_region_copy = true;
5515 }
5516
5517 gcc_assert (!need_ssa_update_p (cfun));
5518
5519 /* Record blocks outside the region that are dominated by something
5520 inside. */
5521 doms = NULL;
5522 initialize_original_copy_tables ();
5523
5524 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
5525
5526 if (entry->dest->count)
5527 {
5528 total_count = entry->dest->count;
5529 entry_count = entry->count;
5530 /* Fix up corner cases, to avoid division by zero or creation of negative
5531 frequencies. */
5532 if (entry_count > total_count)
5533 entry_count = total_count;
5534 }
5535 else
5536 {
5537 total_freq = entry->dest->frequency;
5538 entry_freq = EDGE_FREQUENCY (entry);
5539 /* Fix up corner cases, to avoid division by zero or creation of negative
5540 frequencies. */
5541 if (total_freq == 0)
5542 total_freq = 1;
5543 else if (entry_freq > total_freq)
5544 entry_freq = total_freq;
5545 }
5546
5547 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
5548 split_edge_bb_loc (entry));
5549 if (total_count)
5550 {
5551 scale_bbs_frequencies_gcov_type (region, n_region,
5552 total_count - entry_count,
5553 total_count);
5554 scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
5555 total_count);
5556 }
5557 else
5558 {
5559 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
5560 total_freq);
5561 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
5562 }
5563
5564 if (copying_header)
5565 {
5566 loop->header = exit->dest;
5567 loop->latch = exit->src;
5568 }
5569
5570 /* Redirect the entry and add the phi node arguments. */
5571 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
5572 gcc_assert (redirected != NULL);
5573 flush_pending_stmts (entry);
5574
5575 /* Concerning updating of dominators: We must recount dominators
5576 for entry block and its copy. Anything that is outside of the
5577 region, but was dominated by something inside needs recounting as
5578 well. */
5579 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
5580 VEC_safe_push (basic_block, heap, doms, get_bb_original (entry->dest));
5581 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
5582 VEC_free (basic_block, heap, doms);
5583
5584 /* Add the other PHI node arguments. */
5585 add_phi_args_after_copy (region_copy, n_region, NULL);
5586
5587 /* Update the SSA web. */
5588 update_ssa (TODO_update_ssa);
5589
5590 if (free_region_copy)
5591 free (region_copy);
5592
5593 free_original_copy_tables ();
5594 return true;
5595 }
5596
5597 /* Checks if BB is part of the region defined by N_REGION BBS. */
5598 static bool
5599 bb_part_of_region_p (basic_block bb, basic_block* bbs, unsigned n_region)
5600 {
5601 unsigned int n;
5602
5603 for (n = 0; n < n_region; n++)
5604 {
5605 if (bb == bbs[n])
5606 return true;
5607 }
5608 return false;
5609 }
5610
5611 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
5612 are stored to REGION_COPY in the same order in that they appear
5613 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
5614 the region, EXIT an exit from it. The condition guarding EXIT
5615 is moved to ENTRY. Returns true if duplication succeeds, false
5616 otherwise.
5617
5618 For example,
5619
5620 some_code;
5621 if (cond)
5622 A;
5623 else
5624 B;
5625
5626 is transformed to
5627
5628 if (cond)
5629 {
5630 some_code;
5631 A;
5632 }
5633 else
5634 {
5635 some_code;
5636 B;
5637 }
5638 */
5639
5640 bool
5641 gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED, edge exit ATTRIBUTE_UNUSED,
5642 basic_block *region ATTRIBUTE_UNUSED, unsigned n_region ATTRIBUTE_UNUSED,
5643 basic_block *region_copy ATTRIBUTE_UNUSED)
5644 {
5645 unsigned i;
5646 bool free_region_copy = false;
5647 struct loop *loop = exit->dest->loop_father;
5648 struct loop *orig_loop = entry->dest->loop_father;
5649 basic_block switch_bb, entry_bb, nentry_bb;
5650 VEC (basic_block, heap) *doms;
5651 int total_freq = 0, exit_freq = 0;
5652 gcov_type total_count = 0, exit_count = 0;
5653 edge exits[2], nexits[2], e;
5654 gimple_stmt_iterator gsi;
5655 gimple cond_stmt;
5656 edge sorig, snew;
5657 basic_block exit_bb;
5658 gimple_stmt_iterator psi;
5659 gimple phi;
5660 tree def;
5661 struct loop *target, *aloop, *cloop;
5662
5663 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
5664 exits[0] = exit;
5665 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
5666
5667 if (!can_copy_bbs_p (region, n_region))
5668 return false;
5669
5670 initialize_original_copy_tables ();
5671 set_loop_copy (orig_loop, loop);
5672
5673 target= loop;
5674 for (aloop = orig_loop->inner; aloop; aloop = aloop->next)
5675 {
5676 if (bb_part_of_region_p (aloop->header, region, n_region))
5677 {
5678 cloop = duplicate_loop (aloop, target);
5679 duplicate_subloops (aloop, cloop);
5680 }
5681 }
5682
5683 if (!region_copy)
5684 {
5685 region_copy = XNEWVEC (basic_block, n_region);
5686 free_region_copy = true;
5687 }
5688
5689 gcc_assert (!need_ssa_update_p (cfun));
5690
5691 /* Record blocks outside the region that are dominated by something
5692 inside. */
5693 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
5694
5695 if (exit->src->count)
5696 {
5697 total_count = exit->src->count;
5698 exit_count = exit->count;
5699 /* Fix up corner cases, to avoid division by zero or creation of negative
5700 frequencies. */
5701 if (exit_count > total_count)
5702 exit_count = total_count;
5703 }
5704 else
5705 {
5706 total_freq = exit->src->frequency;
5707 exit_freq = EDGE_FREQUENCY (exit);
5708 /* Fix up corner cases, to avoid division by zero or creation of negative
5709 frequencies. */
5710 if (total_freq == 0)
5711 total_freq = 1;
5712 if (exit_freq > total_freq)
5713 exit_freq = total_freq;
5714 }
5715
5716 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
5717 split_edge_bb_loc (exit));
5718 if (total_count)
5719 {
5720 scale_bbs_frequencies_gcov_type (region, n_region,
5721 total_count - exit_count,
5722 total_count);
5723 scale_bbs_frequencies_gcov_type (region_copy, n_region, exit_count,
5724 total_count);
5725 }
5726 else
5727 {
5728 scale_bbs_frequencies_int (region, n_region, total_freq - exit_freq,
5729 total_freq);
5730 scale_bbs_frequencies_int (region_copy, n_region, exit_freq, total_freq);
5731 }
5732
5733 /* Create the switch block, and put the exit condition to it. */
5734 entry_bb = entry->dest;
5735 nentry_bb = get_bb_copy (entry_bb);
5736 if (!last_stmt (entry->src)
5737 || !stmt_ends_bb_p (last_stmt (entry->src)))
5738 switch_bb = entry->src;
5739 else
5740 switch_bb = split_edge (entry);
5741 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
5742
5743 gsi = gsi_last_bb (switch_bb);
5744 cond_stmt = last_stmt (exit->src);
5745 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
5746 cond_stmt = gimple_copy (cond_stmt);
5747
5748 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
5749
5750 sorig = single_succ_edge (switch_bb);
5751 sorig->flags = exits[1]->flags;
5752 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
5753
5754 /* Register the new edge from SWITCH_BB in loop exit lists. */
5755 rescan_loop_exit (snew, true, false);
5756
5757 /* Add the PHI node arguments. */
5758 add_phi_args_after_copy (region_copy, n_region, snew);
5759
5760 /* Get rid of now superfluous conditions and associated edges (and phi node
5761 arguments). */
5762 exit_bb = exit->dest;
5763
5764 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
5765 PENDING_STMT (e) = NULL;
5766
5767 /* The latch of ORIG_LOOP was copied, and so was the backedge
5768 to the original header. We redirect this backedge to EXIT_BB. */
5769 for (i = 0; i < n_region; i++)
5770 if (get_bb_original (region_copy[i]) == orig_loop->latch)
5771 {
5772 gcc_assert (single_succ_edge (region_copy[i]));
5773 e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
5774 PENDING_STMT (e) = NULL;
5775 for (psi = gsi_start_phis (exit_bb);
5776 !gsi_end_p (psi);
5777 gsi_next (&psi))
5778 {
5779 phi = gsi_stmt (psi);
5780 def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
5781 add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
5782 }
5783 }
5784 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
5785 PENDING_STMT (e) = NULL;
5786
5787 /* Anything that is outside of the region, but was dominated by something
5788 inside needs to update dominance info. */
5789 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
5790 VEC_free (basic_block, heap, doms);
5791 /* Update the SSA web. */
5792 update_ssa (TODO_update_ssa);
5793
5794 if (free_region_copy)
5795 free (region_copy);
5796
5797 free_original_copy_tables ();
5798 return true;
5799 }
5800
5801 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
5802 adding blocks when the dominator traversal reaches EXIT. This
5803 function silently assumes that ENTRY strictly dominates EXIT. */
5804
5805 void
5806 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
5807 VEC(basic_block,heap) **bbs_p)
5808 {
5809 basic_block son;
5810
5811 for (son = first_dom_son (CDI_DOMINATORS, entry);
5812 son;
5813 son = next_dom_son (CDI_DOMINATORS, son))
5814 {
5815 VEC_safe_push (basic_block, heap, *bbs_p, son);
5816 if (son != exit)
5817 gather_blocks_in_sese_region (son, exit, bbs_p);
5818 }
5819 }
5820
5821 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
5822 The duplicates are recorded in VARS_MAP. */
5823
5824 static void
5825 replace_by_duplicate_decl (tree *tp, struct pointer_map_t *vars_map,
5826 tree to_context)
5827 {
5828 tree t = *tp, new_t;
5829 struct function *f = DECL_STRUCT_FUNCTION (to_context);
5830 void **loc;
5831
5832 if (DECL_CONTEXT (t) == to_context)
5833 return;
5834
5835 loc = pointer_map_contains (vars_map, t);
5836
5837 if (!loc)
5838 {
5839 loc = pointer_map_insert (vars_map, t);
5840
5841 if (SSA_VAR_P (t))
5842 {
5843 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
5844 add_local_decl (f, new_t);
5845 }
5846 else
5847 {
5848 gcc_assert (TREE_CODE (t) == CONST_DECL);
5849 new_t = copy_node (t);
5850 }
5851 DECL_CONTEXT (new_t) = to_context;
5852
5853 *loc = new_t;
5854 }
5855 else
5856 new_t = (tree) *loc;
5857
5858 *tp = new_t;
5859 }
5860
5861
5862 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
5863 VARS_MAP maps old ssa names and var_decls to the new ones. */
5864
5865 static tree
5866 replace_ssa_name (tree name, struct pointer_map_t *vars_map,
5867 tree to_context)
5868 {
5869 void **loc;
5870 tree new_name, decl = SSA_NAME_VAR (name);
5871
5872 gcc_assert (is_gimple_reg (name));
5873
5874 loc = pointer_map_contains (vars_map, name);
5875
5876 if (!loc)
5877 {
5878 replace_by_duplicate_decl (&decl, vars_map, to_context);
5879
5880 push_cfun (DECL_STRUCT_FUNCTION (to_context));
5881 if (gimple_in_ssa_p (cfun))
5882 add_referenced_var (decl);
5883
5884 new_name = make_ssa_name (decl, SSA_NAME_DEF_STMT (name));
5885 if (SSA_NAME_IS_DEFAULT_DEF (name))
5886 set_default_def (decl, new_name);
5887 pop_cfun ();
5888
5889 loc = pointer_map_insert (vars_map, name);
5890 *loc = new_name;
5891 }
5892 else
5893 new_name = (tree) *loc;
5894
5895 return new_name;
5896 }
5897
5898 struct move_stmt_d
5899 {
5900 tree orig_block;
5901 tree new_block;
5902 tree from_context;
5903 tree to_context;
5904 struct pointer_map_t *vars_map;
5905 htab_t new_label_map;
5906 struct pointer_map_t *eh_map;
5907 bool remap_decls_p;
5908 };
5909
5910 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
5911 contained in *TP if it has been ORIG_BLOCK previously and change the
5912 DECL_CONTEXT of every local variable referenced in *TP. */
5913
5914 static tree
5915 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
5916 {
5917 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5918 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
5919 tree t = *tp;
5920
5921 if (EXPR_P (t))
5922 /* We should never have TREE_BLOCK set on non-statements. */
5923 gcc_assert (!TREE_BLOCK (t));
5924
5925 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
5926 {
5927 if (TREE_CODE (t) == SSA_NAME)
5928 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
5929 else if (TREE_CODE (t) == LABEL_DECL)
5930 {
5931 if (p->new_label_map)
5932 {
5933 struct tree_map in, *out;
5934 in.base.from = t;
5935 out = (struct tree_map *)
5936 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
5937 if (out)
5938 *tp = t = out->to;
5939 }
5940
5941 DECL_CONTEXT (t) = p->to_context;
5942 }
5943 else if (p->remap_decls_p)
5944 {
5945 /* Replace T with its duplicate. T should no longer appear in the
5946 parent function, so this looks wasteful; however, it may appear
5947 in referenced_vars, and more importantly, as virtual operands of
5948 statements, and in alias lists of other variables. It would be
5949 quite difficult to expunge it from all those places. ??? It might
5950 suffice to do this for addressable variables. */
5951 if ((TREE_CODE (t) == VAR_DECL
5952 && !is_global_var (t))
5953 || TREE_CODE (t) == CONST_DECL)
5954 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
5955
5956 if (SSA_VAR_P (t)
5957 && gimple_in_ssa_p (cfun))
5958 {
5959 push_cfun (DECL_STRUCT_FUNCTION (p->to_context));
5960 add_referenced_var (*tp);
5961 pop_cfun ();
5962 }
5963 }
5964 *walk_subtrees = 0;
5965 }
5966 else if (TYPE_P (t))
5967 *walk_subtrees = 0;
5968
5969 return NULL_TREE;
5970 }
5971
5972 /* Helper for move_stmt_r. Given an EH region number for the source
5973 function, map that to the duplicate EH regio number in the dest. */
5974
5975 static int
5976 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
5977 {
5978 eh_region old_r, new_r;
5979 void **slot;
5980
5981 old_r = get_eh_region_from_number (old_nr);
5982 slot = pointer_map_contains (p->eh_map, old_r);
5983 new_r = (eh_region) *slot;
5984
5985 return new_r->index;
5986 }
5987
5988 /* Similar, but operate on INTEGER_CSTs. */
5989
5990 static tree
5991 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
5992 {
5993 int old_nr, new_nr;
5994
5995 old_nr = tree_low_cst (old_t_nr, 0);
5996 new_nr = move_stmt_eh_region_nr (old_nr, p);
5997
5998 return build_int_cst (integer_type_node, new_nr);
5999 }
6000
6001 /* Like move_stmt_op, but for gimple statements.
6002
6003 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
6004 contained in the current statement in *GSI_P and change the
6005 DECL_CONTEXT of every local variable referenced in the current
6006 statement. */
6007
6008 static tree
6009 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
6010 struct walk_stmt_info *wi)
6011 {
6012 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6013 gimple stmt = gsi_stmt (*gsi_p);
6014 tree block = gimple_block (stmt);
6015
6016 if (p->orig_block == NULL_TREE
6017 || block == p->orig_block
6018 || block == NULL_TREE)
6019 gimple_set_block (stmt, p->new_block);
6020 #ifdef ENABLE_CHECKING
6021 else if (block != p->new_block)
6022 {
6023 while (block && block != p->orig_block)
6024 block = BLOCK_SUPERCONTEXT (block);
6025 gcc_assert (block);
6026 }
6027 #endif
6028
6029 switch (gimple_code (stmt))
6030 {
6031 case GIMPLE_CALL:
6032 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
6033 {
6034 tree r, fndecl = gimple_call_fndecl (stmt);
6035 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
6036 switch (DECL_FUNCTION_CODE (fndecl))
6037 {
6038 case BUILT_IN_EH_COPY_VALUES:
6039 r = gimple_call_arg (stmt, 1);
6040 r = move_stmt_eh_region_tree_nr (r, p);
6041 gimple_call_set_arg (stmt, 1, r);
6042 /* FALLTHRU */
6043
6044 case BUILT_IN_EH_POINTER:
6045 case BUILT_IN_EH_FILTER:
6046 r = gimple_call_arg (stmt, 0);
6047 r = move_stmt_eh_region_tree_nr (r, p);
6048 gimple_call_set_arg (stmt, 0, r);
6049 break;
6050
6051 default:
6052 break;
6053 }
6054 }
6055 break;
6056
6057 case GIMPLE_RESX:
6058 {
6059 int r = gimple_resx_region (stmt);
6060 r = move_stmt_eh_region_nr (r, p);
6061 gimple_resx_set_region (stmt, r);
6062 }
6063 break;
6064
6065 case GIMPLE_EH_DISPATCH:
6066 {
6067 int r = gimple_eh_dispatch_region (stmt);
6068 r = move_stmt_eh_region_nr (r, p);
6069 gimple_eh_dispatch_set_region (stmt, r);
6070 }
6071 break;
6072
6073 case GIMPLE_OMP_RETURN:
6074 case GIMPLE_OMP_CONTINUE:
6075 break;
6076 default:
6077 if (is_gimple_omp (stmt))
6078 {
6079 /* Do not remap variables inside OMP directives. Variables
6080 referenced in clauses and directive header belong to the
6081 parent function and should not be moved into the child
6082 function. */
6083 bool save_remap_decls_p = p->remap_decls_p;
6084 p->remap_decls_p = false;
6085 *handled_ops_p = true;
6086
6087 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), move_stmt_r,
6088 move_stmt_op, wi);
6089
6090 p->remap_decls_p = save_remap_decls_p;
6091 }
6092 break;
6093 }
6094
6095 return NULL_TREE;
6096 }
6097
6098 /* Move basic block BB from function CFUN to function DEST_FN. The
6099 block is moved out of the original linked list and placed after
6100 block AFTER in the new list. Also, the block is removed from the
6101 original array of blocks and placed in DEST_FN's array of blocks.
6102 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
6103 updated to reflect the moved edges.
6104
6105 The local variables are remapped to new instances, VARS_MAP is used
6106 to record the mapping. */
6107
6108 static void
6109 move_block_to_fn (struct function *dest_cfun, basic_block bb,
6110 basic_block after, bool update_edge_count_p,
6111 struct move_stmt_d *d)
6112 {
6113 struct control_flow_graph *cfg;
6114 edge_iterator ei;
6115 edge e;
6116 gimple_stmt_iterator si;
6117 unsigned old_len, new_len;
6118
6119 /* Remove BB from dominance structures. */
6120 delete_from_dominance_info (CDI_DOMINATORS, bb);
6121 if (current_loops)
6122 remove_bb_from_loops (bb);
6123
6124 /* Link BB to the new linked list. */
6125 move_block_after (bb, after);
6126
6127 /* Update the edge count in the corresponding flowgraphs. */
6128 if (update_edge_count_p)
6129 FOR_EACH_EDGE (e, ei, bb->succs)
6130 {
6131 cfun->cfg->x_n_edges--;
6132 dest_cfun->cfg->x_n_edges++;
6133 }
6134
6135 /* Remove BB from the original basic block array. */
6136 VEC_replace (basic_block, cfun->cfg->x_basic_block_info, bb->index, NULL);
6137 cfun->cfg->x_n_basic_blocks--;
6138
6139 /* Grow DEST_CFUN's basic block array if needed. */
6140 cfg = dest_cfun->cfg;
6141 cfg->x_n_basic_blocks++;
6142 if (bb->index >= cfg->x_last_basic_block)
6143 cfg->x_last_basic_block = bb->index + 1;
6144
6145 old_len = VEC_length (basic_block, cfg->x_basic_block_info);
6146 if ((unsigned) cfg->x_last_basic_block >= old_len)
6147 {
6148 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
6149 VEC_safe_grow_cleared (basic_block, gc, cfg->x_basic_block_info,
6150 new_len);
6151 }
6152
6153 VEC_replace (basic_block, cfg->x_basic_block_info,
6154 bb->index, bb);
6155
6156 /* Remap the variables in phi nodes. */
6157 for (si = gsi_start_phis (bb); !gsi_end_p (si); )
6158 {
6159 gimple phi = gsi_stmt (si);
6160 use_operand_p use;
6161 tree op = PHI_RESULT (phi);
6162 ssa_op_iter oi;
6163
6164 if (!is_gimple_reg (op))
6165 {
6166 /* Remove the phi nodes for virtual operands (alias analysis will be
6167 run for the new function, anyway). */
6168 remove_phi_node (&si, true);
6169 continue;
6170 }
6171
6172 SET_PHI_RESULT (phi,
6173 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6174 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
6175 {
6176 op = USE_FROM_PTR (use);
6177 if (TREE_CODE (op) == SSA_NAME)
6178 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6179 }
6180
6181 gsi_next (&si);
6182 }
6183
6184 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6185 {
6186 gimple stmt = gsi_stmt (si);
6187 struct walk_stmt_info wi;
6188
6189 memset (&wi, 0, sizeof (wi));
6190 wi.info = d;
6191 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
6192
6193 if (gimple_code (stmt) == GIMPLE_LABEL)
6194 {
6195 tree label = gimple_label_label (stmt);
6196 int uid = LABEL_DECL_UID (label);
6197
6198 gcc_assert (uid > -1);
6199
6200 old_len = VEC_length (basic_block, cfg->x_label_to_block_map);
6201 if (old_len <= (unsigned) uid)
6202 {
6203 new_len = 3 * uid / 2 + 1;
6204 VEC_safe_grow_cleared (basic_block, gc,
6205 cfg->x_label_to_block_map, new_len);
6206 }
6207
6208 VEC_replace (basic_block, cfg->x_label_to_block_map, uid, bb);
6209 VEC_replace (basic_block, cfun->cfg->x_label_to_block_map, uid, NULL);
6210
6211 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
6212
6213 if (uid >= dest_cfun->cfg->last_label_uid)
6214 dest_cfun->cfg->last_label_uid = uid + 1;
6215 }
6216
6217 maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
6218 remove_stmt_from_eh_lp_fn (cfun, stmt);
6219
6220 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
6221 gimple_remove_stmt_histograms (cfun, stmt);
6222
6223 /* We cannot leave any operands allocated from the operand caches of
6224 the current function. */
6225 free_stmt_operands (stmt);
6226 push_cfun (dest_cfun);
6227 update_stmt (stmt);
6228 pop_cfun ();
6229 }
6230
6231 FOR_EACH_EDGE (e, ei, bb->succs)
6232 if (e->goto_locus)
6233 {
6234 tree block = e->goto_block;
6235 if (d->orig_block == NULL_TREE
6236 || block == d->orig_block)
6237 e->goto_block = d->new_block;
6238 #ifdef ENABLE_CHECKING
6239 else if (block != d->new_block)
6240 {
6241 while (block && block != d->orig_block)
6242 block = BLOCK_SUPERCONTEXT (block);
6243 gcc_assert (block);
6244 }
6245 #endif
6246 }
6247 }
6248
6249 /* Examine the statements in BB (which is in SRC_CFUN); find and return
6250 the outermost EH region. Use REGION as the incoming base EH region. */
6251
6252 static eh_region
6253 find_outermost_region_in_block (struct function *src_cfun,
6254 basic_block bb, eh_region region)
6255 {
6256 gimple_stmt_iterator si;
6257
6258 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6259 {
6260 gimple stmt = gsi_stmt (si);
6261 eh_region stmt_region;
6262 int lp_nr;
6263
6264 lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
6265 stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
6266 if (stmt_region)
6267 {
6268 if (region == NULL)
6269 region = stmt_region;
6270 else if (stmt_region != region)
6271 {
6272 region = eh_region_outermost (src_cfun, stmt_region, region);
6273 gcc_assert (region != NULL);
6274 }
6275 }
6276 }
6277
6278 return region;
6279 }
6280
6281 static tree
6282 new_label_mapper (tree decl, void *data)
6283 {
6284 htab_t hash = (htab_t) data;
6285 struct tree_map *m;
6286 void **slot;
6287
6288 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
6289
6290 m = XNEW (struct tree_map);
6291 m->hash = DECL_UID (decl);
6292 m->base.from = decl;
6293 m->to = create_artificial_label (UNKNOWN_LOCATION);
6294 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
6295 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
6296 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
6297
6298 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
6299 gcc_assert (*slot == NULL);
6300
6301 *slot = m;
6302
6303 return m->to;
6304 }
6305
6306 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
6307 subblocks. */
6308
6309 static void
6310 replace_block_vars_by_duplicates (tree block, struct pointer_map_t *vars_map,
6311 tree to_context)
6312 {
6313 tree *tp, t;
6314
6315 for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
6316 {
6317 t = *tp;
6318 if (TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != CONST_DECL)
6319 continue;
6320 replace_by_duplicate_decl (&t, vars_map, to_context);
6321 if (t != *tp)
6322 {
6323 if (TREE_CODE (*tp) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (*tp))
6324 {
6325 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (*tp));
6326 DECL_HAS_VALUE_EXPR_P (t) = 1;
6327 }
6328 DECL_CHAIN (t) = DECL_CHAIN (*tp);
6329 *tp = t;
6330 }
6331 }
6332
6333 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
6334 replace_block_vars_by_duplicates (block, vars_map, to_context);
6335 }
6336
6337 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
6338 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
6339 single basic block in the original CFG and the new basic block is
6340 returned. DEST_CFUN must not have a CFG yet.
6341
6342 Note that the region need not be a pure SESE region. Blocks inside
6343 the region may contain calls to abort/exit. The only restriction
6344 is that ENTRY_BB should be the only entry point and it must
6345 dominate EXIT_BB.
6346
6347 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
6348 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
6349 to the new function.
6350
6351 All local variables referenced in the region are assumed to be in
6352 the corresponding BLOCK_VARS and unexpanded variable lists
6353 associated with DEST_CFUN. */
6354
6355 basic_block
6356 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
6357 basic_block exit_bb, tree orig_block)
6358 {
6359 VEC(basic_block,heap) *bbs, *dom_bbs;
6360 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
6361 basic_block after, bb, *entry_pred, *exit_succ, abb;
6362 struct function *saved_cfun = cfun;
6363 int *entry_flag, *exit_flag;
6364 unsigned *entry_prob, *exit_prob;
6365 unsigned i, num_entry_edges, num_exit_edges;
6366 edge e;
6367 edge_iterator ei;
6368 htab_t new_label_map;
6369 struct pointer_map_t *vars_map, *eh_map;
6370 struct loop *loop = entry_bb->loop_father;
6371 struct move_stmt_d d;
6372
6373 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
6374 region. */
6375 gcc_assert (entry_bb != exit_bb
6376 && (!exit_bb
6377 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
6378
6379 /* Collect all the blocks in the region. Manually add ENTRY_BB
6380 because it won't be added by dfs_enumerate_from. */
6381 bbs = NULL;
6382 VEC_safe_push (basic_block, heap, bbs, entry_bb);
6383 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
6384
6385 /* The blocks that used to be dominated by something in BBS will now be
6386 dominated by the new block. */
6387 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
6388 VEC_address (basic_block, bbs),
6389 VEC_length (basic_block, bbs));
6390
6391 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
6392 the predecessor edges to ENTRY_BB and the successor edges to
6393 EXIT_BB so that we can re-attach them to the new basic block that
6394 will replace the region. */
6395 num_entry_edges = EDGE_COUNT (entry_bb->preds);
6396 entry_pred = (basic_block *) xcalloc (num_entry_edges, sizeof (basic_block));
6397 entry_flag = (int *) xcalloc (num_entry_edges, sizeof (int));
6398 entry_prob = XNEWVEC (unsigned, num_entry_edges);
6399 i = 0;
6400 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
6401 {
6402 entry_prob[i] = e->probability;
6403 entry_flag[i] = e->flags;
6404 entry_pred[i++] = e->src;
6405 remove_edge (e);
6406 }
6407
6408 if (exit_bb)
6409 {
6410 num_exit_edges = EDGE_COUNT (exit_bb->succs);
6411 exit_succ = (basic_block *) xcalloc (num_exit_edges,
6412 sizeof (basic_block));
6413 exit_flag = (int *) xcalloc (num_exit_edges, sizeof (int));
6414 exit_prob = XNEWVEC (unsigned, num_exit_edges);
6415 i = 0;
6416 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
6417 {
6418 exit_prob[i] = e->probability;
6419 exit_flag[i] = e->flags;
6420 exit_succ[i++] = e->dest;
6421 remove_edge (e);
6422 }
6423 }
6424 else
6425 {
6426 num_exit_edges = 0;
6427 exit_succ = NULL;
6428 exit_flag = NULL;
6429 exit_prob = NULL;
6430 }
6431
6432 /* Switch context to the child function to initialize DEST_FN's CFG. */
6433 gcc_assert (dest_cfun->cfg == NULL);
6434 push_cfun (dest_cfun);
6435
6436 init_empty_tree_cfg ();
6437
6438 /* Initialize EH information for the new function. */
6439 eh_map = NULL;
6440 new_label_map = NULL;
6441 if (saved_cfun->eh)
6442 {
6443 eh_region region = NULL;
6444
6445 FOR_EACH_VEC_ELT (basic_block, bbs, i, bb)
6446 region = find_outermost_region_in_block (saved_cfun, bb, region);
6447
6448 init_eh_for_function ();
6449 if (region != NULL)
6450 {
6451 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
6452 eh_map = duplicate_eh_regions (saved_cfun, region, 0,
6453 new_label_mapper, new_label_map);
6454 }
6455 }
6456
6457 pop_cfun ();
6458
6459 /* Move blocks from BBS into DEST_CFUN. */
6460 gcc_assert (VEC_length (basic_block, bbs) >= 2);
6461 after = dest_cfun->cfg->x_entry_block_ptr;
6462 vars_map = pointer_map_create ();
6463
6464 memset (&d, 0, sizeof (d));
6465 d.orig_block = orig_block;
6466 d.new_block = DECL_INITIAL (dest_cfun->decl);
6467 d.from_context = cfun->decl;
6468 d.to_context = dest_cfun->decl;
6469 d.vars_map = vars_map;
6470 d.new_label_map = new_label_map;
6471 d.eh_map = eh_map;
6472 d.remap_decls_p = true;
6473
6474 FOR_EACH_VEC_ELT (basic_block, bbs, i, bb)
6475 {
6476 /* No need to update edge counts on the last block. It has
6477 already been updated earlier when we detached the region from
6478 the original CFG. */
6479 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
6480 after = bb;
6481 }
6482
6483 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
6484 if (orig_block)
6485 {
6486 tree block;
6487 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
6488 == NULL_TREE);
6489 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
6490 = BLOCK_SUBBLOCKS (orig_block);
6491 for (block = BLOCK_SUBBLOCKS (orig_block);
6492 block; block = BLOCK_CHAIN (block))
6493 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
6494 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
6495 }
6496
6497 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
6498 vars_map, dest_cfun->decl);
6499
6500 if (new_label_map)
6501 htab_delete (new_label_map);
6502 if (eh_map)
6503 pointer_map_destroy (eh_map);
6504 pointer_map_destroy (vars_map);
6505
6506 /* Rewire the entry and exit blocks. The successor to the entry
6507 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
6508 the child function. Similarly, the predecessor of DEST_FN's
6509 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
6510 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
6511 various CFG manipulation function get to the right CFG.
6512
6513 FIXME, this is silly. The CFG ought to become a parameter to
6514 these helpers. */
6515 push_cfun (dest_cfun);
6516 make_edge (ENTRY_BLOCK_PTR, entry_bb, EDGE_FALLTHRU);
6517 if (exit_bb)
6518 make_edge (exit_bb, EXIT_BLOCK_PTR, 0);
6519 pop_cfun ();
6520
6521 /* Back in the original function, the SESE region has disappeared,
6522 create a new basic block in its place. */
6523 bb = create_empty_bb (entry_pred[0]);
6524 if (current_loops)
6525 add_bb_to_loop (bb, loop);
6526 for (i = 0; i < num_entry_edges; i++)
6527 {
6528 e = make_edge (entry_pred[i], bb, entry_flag[i]);
6529 e->probability = entry_prob[i];
6530 }
6531
6532 for (i = 0; i < num_exit_edges; i++)
6533 {
6534 e = make_edge (bb, exit_succ[i], exit_flag[i]);
6535 e->probability = exit_prob[i];
6536 }
6537
6538 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
6539 FOR_EACH_VEC_ELT (basic_block, dom_bbs, i, abb)
6540 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
6541 VEC_free (basic_block, heap, dom_bbs);
6542
6543 if (exit_bb)
6544 {
6545 free (exit_prob);
6546 free (exit_flag);
6547 free (exit_succ);
6548 }
6549 free (entry_prob);
6550 free (entry_flag);
6551 free (entry_pred);
6552 VEC_free (basic_block, heap, bbs);
6553
6554 return bb;
6555 }
6556
6557
6558 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in tree-pass.h)
6559 */
6560
6561 void
6562 dump_function_to_file (tree fn, FILE *file, int flags)
6563 {
6564 tree arg, var;
6565 struct function *dsf;
6566 bool ignore_topmost_bind = false, any_var = false;
6567 basic_block bb;
6568 tree chain;
6569 bool tmclone = TREE_CODE (fn) == FUNCTION_DECL && decl_is_tm_clone (fn);
6570
6571 fprintf (file, "%s %s(", lang_hooks.decl_printable_name (fn, 2),
6572 tmclone ? "[tm-clone] " : "");
6573
6574 arg = DECL_ARGUMENTS (fn);
6575 while (arg)
6576 {
6577 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
6578 fprintf (file, " ");
6579 print_generic_expr (file, arg, dump_flags);
6580 if (flags & TDF_VERBOSE)
6581 print_node (file, "", arg, 4);
6582 if (DECL_CHAIN (arg))
6583 fprintf (file, ", ");
6584 arg = DECL_CHAIN (arg);
6585 }
6586 fprintf (file, ")\n");
6587
6588 if (flags & TDF_VERBOSE)
6589 print_node (file, "", fn, 2);
6590
6591 dsf = DECL_STRUCT_FUNCTION (fn);
6592 if (dsf && (flags & TDF_EH))
6593 dump_eh_tree (file, dsf);
6594
6595 if (flags & TDF_RAW && !gimple_has_body_p (fn))
6596 {
6597 dump_node (fn, TDF_SLIM | flags, file);
6598 return;
6599 }
6600
6601 /* Switch CFUN to point to FN. */
6602 push_cfun (DECL_STRUCT_FUNCTION (fn));
6603
6604 /* When GIMPLE is lowered, the variables are no longer available in
6605 BIND_EXPRs, so display them separately. */
6606 if (cfun && cfun->decl == fn && !VEC_empty (tree, cfun->local_decls))
6607 {
6608 unsigned ix;
6609 ignore_topmost_bind = true;
6610
6611 fprintf (file, "{\n");
6612 FOR_EACH_LOCAL_DECL (cfun, ix, var)
6613 {
6614 print_generic_decl (file, var, flags);
6615 if (flags & TDF_VERBOSE)
6616 print_node (file, "", var, 4);
6617 fprintf (file, "\n");
6618
6619 any_var = true;
6620 }
6621 }
6622
6623 if (cfun && cfun->decl == fn && cfun->cfg && basic_block_info)
6624 {
6625 /* If the CFG has been built, emit a CFG-based dump. */
6626 check_bb_profile (ENTRY_BLOCK_PTR, file);
6627 if (!ignore_topmost_bind)
6628 fprintf (file, "{\n");
6629
6630 if (any_var && n_basic_blocks)
6631 fprintf (file, "\n");
6632
6633 FOR_EACH_BB (bb)
6634 gimple_dump_bb (bb, file, 2, flags);
6635
6636 fprintf (file, "}\n");
6637 check_bb_profile (EXIT_BLOCK_PTR, file);
6638 }
6639 else if (DECL_SAVED_TREE (fn) == NULL)
6640 {
6641 /* The function is now in GIMPLE form but the CFG has not been
6642 built yet. Emit the single sequence of GIMPLE statements
6643 that make up its body. */
6644 gimple_seq body = gimple_body (fn);
6645
6646 if (gimple_seq_first_stmt (body)
6647 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
6648 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
6649 print_gimple_seq (file, body, 0, flags);
6650 else
6651 {
6652 if (!ignore_topmost_bind)
6653 fprintf (file, "{\n");
6654
6655 if (any_var)
6656 fprintf (file, "\n");
6657
6658 print_gimple_seq (file, body, 2, flags);
6659 fprintf (file, "}\n");
6660 }
6661 }
6662 else
6663 {
6664 int indent;
6665
6666 /* Make a tree based dump. */
6667 chain = DECL_SAVED_TREE (fn);
6668
6669 if (chain && TREE_CODE (chain) == BIND_EXPR)
6670 {
6671 if (ignore_topmost_bind)
6672 {
6673 chain = BIND_EXPR_BODY (chain);
6674 indent = 2;
6675 }
6676 else
6677 indent = 0;
6678 }
6679 else
6680 {
6681 if (!ignore_topmost_bind)
6682 fprintf (file, "{\n");
6683 indent = 2;
6684 }
6685
6686 if (any_var)
6687 fprintf (file, "\n");
6688
6689 print_generic_stmt_indented (file, chain, flags, indent);
6690 if (ignore_topmost_bind)
6691 fprintf (file, "}\n");
6692 }
6693
6694 if (flags & TDF_ENUMERATE_LOCALS)
6695 dump_enumerated_decls (file, flags);
6696 fprintf (file, "\n\n");
6697
6698 /* Restore CFUN. */
6699 pop_cfun ();
6700 }
6701
6702
6703 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
6704
6705 DEBUG_FUNCTION void
6706 debug_function (tree fn, int flags)
6707 {
6708 dump_function_to_file (fn, stderr, flags);
6709 }
6710
6711
6712 /* Print on FILE the indexes for the predecessors of basic_block BB. */
6713
6714 static void
6715 print_pred_bbs (FILE *file, basic_block bb)
6716 {
6717 edge e;
6718 edge_iterator ei;
6719
6720 FOR_EACH_EDGE (e, ei, bb->preds)
6721 fprintf (file, "bb_%d ", e->src->index);
6722 }
6723
6724
6725 /* Print on FILE the indexes for the successors of basic_block BB. */
6726
6727 static void
6728 print_succ_bbs (FILE *file, basic_block bb)
6729 {
6730 edge e;
6731 edge_iterator ei;
6732
6733 FOR_EACH_EDGE (e, ei, bb->succs)
6734 fprintf (file, "bb_%d ", e->dest->index);
6735 }
6736
6737 /* Print to FILE the basic block BB following the VERBOSITY level. */
6738
6739 void
6740 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
6741 {
6742 char *s_indent = (char *) alloca ((size_t) indent + 1);
6743 memset ((void *) s_indent, ' ', (size_t) indent);
6744 s_indent[indent] = '\0';
6745
6746 /* Print basic_block's header. */
6747 if (verbosity >= 2)
6748 {
6749 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
6750 print_pred_bbs (file, bb);
6751 fprintf (file, "}, succs = {");
6752 print_succ_bbs (file, bb);
6753 fprintf (file, "})\n");
6754 }
6755
6756 /* Print basic_block's body. */
6757 if (verbosity >= 3)
6758 {
6759 fprintf (file, "%s {\n", s_indent);
6760 gimple_dump_bb (bb, file, indent + 4, TDF_VOPS|TDF_MEMSYMS);
6761 fprintf (file, "%s }\n", s_indent);
6762 }
6763 }
6764
6765 static void print_loop_and_siblings (FILE *, struct loop *, int, int);
6766
6767 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
6768 VERBOSITY level this outputs the contents of the loop, or just its
6769 structure. */
6770
6771 static void
6772 print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
6773 {
6774 char *s_indent;
6775 basic_block bb;
6776
6777 if (loop == NULL)
6778 return;
6779
6780 s_indent = (char *) alloca ((size_t) indent + 1);
6781 memset ((void *) s_indent, ' ', (size_t) indent);
6782 s_indent[indent] = '\0';
6783
6784 /* Print loop's header. */
6785 fprintf (file, "%sloop_%d (header = %d, latch = %d", s_indent,
6786 loop->num, loop->header->index, loop->latch->index);
6787 fprintf (file, ", niter = ");
6788 print_generic_expr (file, loop->nb_iterations, 0);
6789
6790 if (loop->any_upper_bound)
6791 {
6792 fprintf (file, ", upper_bound = ");
6793 dump_double_int (file, loop->nb_iterations_upper_bound, true);
6794 }
6795
6796 if (loop->any_estimate)
6797 {
6798 fprintf (file, ", estimate = ");
6799 dump_double_int (file, loop->nb_iterations_estimate, true);
6800 }
6801 fprintf (file, ")\n");
6802
6803 /* Print loop's body. */
6804 if (verbosity >= 1)
6805 {
6806 fprintf (file, "%s{\n", s_indent);
6807 FOR_EACH_BB (bb)
6808 if (bb->loop_father == loop)
6809 print_loops_bb (file, bb, indent, verbosity);
6810
6811 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
6812 fprintf (file, "%s}\n", s_indent);
6813 }
6814 }
6815
6816 /* Print the LOOP and its sibling loops on FILE, indented INDENT
6817 spaces. Following VERBOSITY level this outputs the contents of the
6818 loop, or just its structure. */
6819
6820 static void
6821 print_loop_and_siblings (FILE *file, struct loop *loop, int indent, int verbosity)
6822 {
6823 if (loop == NULL)
6824 return;
6825
6826 print_loop (file, loop, indent, verbosity);
6827 print_loop_and_siblings (file, loop->next, indent, verbosity);
6828 }
6829
6830 /* Follow a CFG edge from the entry point of the program, and on entry
6831 of a loop, pretty print the loop structure on FILE. */
6832
6833 void
6834 print_loops (FILE *file, int verbosity)
6835 {
6836 basic_block bb;
6837
6838 bb = ENTRY_BLOCK_PTR;
6839 if (bb && bb->loop_father)
6840 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
6841 }
6842
6843
6844 /* Debugging loops structure at tree level, at some VERBOSITY level. */
6845
6846 DEBUG_FUNCTION void
6847 debug_loops (int verbosity)
6848 {
6849 print_loops (stderr, verbosity);
6850 }
6851
6852 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
6853
6854 DEBUG_FUNCTION void
6855 debug_loop (struct loop *loop, int verbosity)
6856 {
6857 print_loop (stderr, loop, 0, verbosity);
6858 }
6859
6860 /* Print on stderr the code of loop number NUM, at some VERBOSITY
6861 level. */
6862
6863 DEBUG_FUNCTION void
6864 debug_loop_num (unsigned num, int verbosity)
6865 {
6866 debug_loop (get_loop (num), verbosity);
6867 }
6868
6869 /* Return true if BB ends with a call, possibly followed by some
6870 instructions that must stay with the call. Return false,
6871 otherwise. */
6872
6873 static bool
6874 gimple_block_ends_with_call_p (basic_block bb)
6875 {
6876 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
6877 return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
6878 }
6879
6880
6881 /* Return true if BB ends with a conditional branch. Return false,
6882 otherwise. */
6883
6884 static bool
6885 gimple_block_ends_with_condjump_p (const_basic_block bb)
6886 {
6887 gimple stmt = last_stmt (CONST_CAST_BB (bb));
6888 return (stmt && gimple_code (stmt) == GIMPLE_COND);
6889 }
6890
6891
6892 /* Return true if we need to add fake edge to exit at statement T.
6893 Helper function for gimple_flow_call_edges_add. */
6894
6895 static bool
6896 need_fake_edge_p (gimple t)
6897 {
6898 tree fndecl = NULL_TREE;
6899 int call_flags = 0;
6900
6901 /* NORETURN and LONGJMP calls already have an edge to exit.
6902 CONST and PURE calls do not need one.
6903 We don't currently check for CONST and PURE here, although
6904 it would be a good idea, because those attributes are
6905 figured out from the RTL in mark_constant_function, and
6906 the counter incrementation code from -fprofile-arcs
6907 leads to different results from -fbranch-probabilities. */
6908 if (is_gimple_call (t))
6909 {
6910 fndecl = gimple_call_fndecl (t);
6911 call_flags = gimple_call_flags (t);
6912 }
6913
6914 if (is_gimple_call (t)
6915 && fndecl
6916 && DECL_BUILT_IN (fndecl)
6917 && (call_flags & ECF_NOTHROW)
6918 && !(call_flags & ECF_RETURNS_TWICE)
6919 /* fork() doesn't really return twice, but the effect of
6920 wrapping it in __gcov_fork() which calls __gcov_flush()
6921 and clears the counters before forking has the same
6922 effect as returning twice. Force a fake edge. */
6923 && !(DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
6924 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FORK))
6925 return false;
6926
6927 if (is_gimple_call (t))
6928 {
6929 edge_iterator ei;
6930 edge e;
6931 basic_block bb;
6932
6933 if (!(call_flags & ECF_NORETURN))
6934 return true;
6935
6936 bb = gimple_bb (t);
6937 FOR_EACH_EDGE (e, ei, bb->succs)
6938 if ((e->flags & EDGE_FAKE) == 0)
6939 return true;
6940 }
6941
6942 if (gimple_code (t) == GIMPLE_ASM
6943 && (gimple_asm_volatile_p (t) || gimple_asm_input_p (t)))
6944 return true;
6945
6946 return false;
6947 }
6948
6949
6950 /* Add fake edges to the function exit for any non constant and non
6951 noreturn calls (or noreturn calls with EH/abnormal edges),
6952 volatile inline assembly in the bitmap of blocks specified by BLOCKS
6953 or to the whole CFG if BLOCKS is zero. Return the number of blocks
6954 that were split.
6955
6956 The goal is to expose cases in which entering a basic block does
6957 not imply that all subsequent instructions must be executed. */
6958
6959 static int
6960 gimple_flow_call_edges_add (sbitmap blocks)
6961 {
6962 int i;
6963 int blocks_split = 0;
6964 int last_bb = last_basic_block;
6965 bool check_last_block = false;
6966
6967 if (n_basic_blocks == NUM_FIXED_BLOCKS)
6968 return 0;
6969
6970 if (! blocks)
6971 check_last_block = true;
6972 else
6973 check_last_block = TEST_BIT (blocks, EXIT_BLOCK_PTR->prev_bb->index);
6974
6975 /* In the last basic block, before epilogue generation, there will be
6976 a fallthru edge to EXIT. Special care is required if the last insn
6977 of the last basic block is a call because make_edge folds duplicate
6978 edges, which would result in the fallthru edge also being marked
6979 fake, which would result in the fallthru edge being removed by
6980 remove_fake_edges, which would result in an invalid CFG.
6981
6982 Moreover, we can't elide the outgoing fake edge, since the block
6983 profiler needs to take this into account in order to solve the minimal
6984 spanning tree in the case that the call doesn't return.
6985
6986 Handle this by adding a dummy instruction in a new last basic block. */
6987 if (check_last_block)
6988 {
6989 basic_block bb = EXIT_BLOCK_PTR->prev_bb;
6990 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
6991 gimple t = NULL;
6992
6993 if (!gsi_end_p (gsi))
6994 t = gsi_stmt (gsi);
6995
6996 if (t && need_fake_edge_p (t))
6997 {
6998 edge e;
6999
7000 e = find_edge (bb, EXIT_BLOCK_PTR);
7001 if (e)
7002 {
7003 gsi_insert_on_edge (e, gimple_build_nop ());
7004 gsi_commit_edge_inserts ();
7005 }
7006 }
7007 }
7008
7009 /* Now add fake edges to the function exit for any non constant
7010 calls since there is no way that we can determine if they will
7011 return or not... */
7012 for (i = 0; i < last_bb; i++)
7013 {
7014 basic_block bb = BASIC_BLOCK (i);
7015 gimple_stmt_iterator gsi;
7016 gimple stmt, last_stmt;
7017
7018 if (!bb)
7019 continue;
7020
7021 if (blocks && !TEST_BIT (blocks, i))
7022 continue;
7023
7024 gsi = gsi_last_nondebug_bb (bb);
7025 if (!gsi_end_p (gsi))
7026 {
7027 last_stmt = gsi_stmt (gsi);
7028 do
7029 {
7030 stmt = gsi_stmt (gsi);
7031 if (need_fake_edge_p (stmt))
7032 {
7033 edge e;
7034
7035 /* The handling above of the final block before the
7036 epilogue should be enough to verify that there is
7037 no edge to the exit block in CFG already.
7038 Calling make_edge in such case would cause us to
7039 mark that edge as fake and remove it later. */
7040 #ifdef ENABLE_CHECKING
7041 if (stmt == last_stmt)
7042 {
7043 e = find_edge (bb, EXIT_BLOCK_PTR);
7044 gcc_assert (e == NULL);
7045 }
7046 #endif
7047
7048 /* Note that the following may create a new basic block
7049 and renumber the existing basic blocks. */
7050 if (stmt != last_stmt)
7051 {
7052 e = split_block (bb, stmt);
7053 if (e)
7054 blocks_split++;
7055 }
7056 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
7057 }
7058 gsi_prev (&gsi);
7059 }
7060 while (!gsi_end_p (gsi));
7061 }
7062 }
7063
7064 if (blocks_split)
7065 verify_flow_info ();
7066
7067 return blocks_split;
7068 }
7069
7070 /* Removes edge E and all the blocks dominated by it, and updates dominance
7071 information. The IL in E->src needs to be updated separately.
7072 If dominance info is not available, only the edge E is removed.*/
7073
7074 void
7075 remove_edge_and_dominated_blocks (edge e)
7076 {
7077 VEC (basic_block, heap) *bbs_to_remove = NULL;
7078 VEC (basic_block, heap) *bbs_to_fix_dom = NULL;
7079 bitmap df, df_idom;
7080 edge f;
7081 edge_iterator ei;
7082 bool none_removed = false;
7083 unsigned i;
7084 basic_block bb, dbb;
7085 bitmap_iterator bi;
7086
7087 if (!dom_info_available_p (CDI_DOMINATORS))
7088 {
7089 remove_edge (e);
7090 return;
7091 }
7092
7093 /* No updating is needed for edges to exit. */
7094 if (e->dest == EXIT_BLOCK_PTR)
7095 {
7096 if (cfgcleanup_altered_bbs)
7097 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7098 remove_edge (e);
7099 return;
7100 }
7101
7102 /* First, we find the basic blocks to remove. If E->dest has a predecessor
7103 that is not dominated by E->dest, then this set is empty. Otherwise,
7104 all the basic blocks dominated by E->dest are removed.
7105
7106 Also, to DF_IDOM we store the immediate dominators of the blocks in
7107 the dominance frontier of E (i.e., of the successors of the
7108 removed blocks, if there are any, and of E->dest otherwise). */
7109 FOR_EACH_EDGE (f, ei, e->dest->preds)
7110 {
7111 if (f == e)
7112 continue;
7113
7114 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
7115 {
7116 none_removed = true;
7117 break;
7118 }
7119 }
7120
7121 df = BITMAP_ALLOC (NULL);
7122 df_idom = BITMAP_ALLOC (NULL);
7123
7124 if (none_removed)
7125 bitmap_set_bit (df_idom,
7126 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
7127 else
7128 {
7129 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
7130 FOR_EACH_VEC_ELT (basic_block, bbs_to_remove, i, bb)
7131 {
7132 FOR_EACH_EDGE (f, ei, bb->succs)
7133 {
7134 if (f->dest != EXIT_BLOCK_PTR)
7135 bitmap_set_bit (df, f->dest->index);
7136 }
7137 }
7138 FOR_EACH_VEC_ELT (basic_block, bbs_to_remove, i, bb)
7139 bitmap_clear_bit (df, bb->index);
7140
7141 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
7142 {
7143 bb = BASIC_BLOCK (i);
7144 bitmap_set_bit (df_idom,
7145 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
7146 }
7147 }
7148
7149 if (cfgcleanup_altered_bbs)
7150 {
7151 /* Record the set of the altered basic blocks. */
7152 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7153 bitmap_ior_into (cfgcleanup_altered_bbs, df);
7154 }
7155
7156 /* Remove E and the cancelled blocks. */
7157 if (none_removed)
7158 remove_edge (e);
7159 else
7160 {
7161 /* Walk backwards so as to get a chance to substitute all
7162 released DEFs into debug stmts. See
7163 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
7164 details. */
7165 for (i = VEC_length (basic_block, bbs_to_remove); i-- > 0; )
7166 delete_basic_block (VEC_index (basic_block, bbs_to_remove, i));
7167 }
7168
7169 /* Update the dominance information. The immediate dominator may change only
7170 for blocks whose immediate dominator belongs to DF_IDOM:
7171
7172 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
7173 removal. Let Z the arbitrary block such that idom(Z) = Y and
7174 Z dominates X after the removal. Before removal, there exists a path P
7175 from Y to X that avoids Z. Let F be the last edge on P that is
7176 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
7177 dominates W, and because of P, Z does not dominate W), and W belongs to
7178 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
7179 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
7180 {
7181 bb = BASIC_BLOCK (i);
7182 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
7183 dbb;
7184 dbb = next_dom_son (CDI_DOMINATORS, dbb))
7185 VEC_safe_push (basic_block, heap, bbs_to_fix_dom, dbb);
7186 }
7187
7188 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
7189
7190 BITMAP_FREE (df);
7191 BITMAP_FREE (df_idom);
7192 VEC_free (basic_block, heap, bbs_to_remove);
7193 VEC_free (basic_block, heap, bbs_to_fix_dom);
7194 }
7195
7196 /* Purge dead EH edges from basic block BB. */
7197
7198 bool
7199 gimple_purge_dead_eh_edges (basic_block bb)
7200 {
7201 bool changed = false;
7202 edge e;
7203 edge_iterator ei;
7204 gimple stmt = last_stmt (bb);
7205
7206 if (stmt && stmt_can_throw_internal (stmt))
7207 return false;
7208
7209 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
7210 {
7211 if (e->flags & EDGE_EH)
7212 {
7213 remove_edge_and_dominated_blocks (e);
7214 changed = true;
7215 }
7216 else
7217 ei_next (&ei);
7218 }
7219
7220 return changed;
7221 }
7222
7223 /* Purge dead EH edges from basic block listed in BLOCKS. */
7224
7225 bool
7226 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
7227 {
7228 bool changed = false;
7229 unsigned i;
7230 bitmap_iterator bi;
7231
7232 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
7233 {
7234 basic_block bb = BASIC_BLOCK (i);
7235
7236 /* Earlier gimple_purge_dead_eh_edges could have removed
7237 this basic block already. */
7238 gcc_assert (bb || changed);
7239 if (bb != NULL)
7240 changed |= gimple_purge_dead_eh_edges (bb);
7241 }
7242
7243 return changed;
7244 }
7245
7246 /* Purge dead abnormal call edges from basic block BB. */
7247
7248 bool
7249 gimple_purge_dead_abnormal_call_edges (basic_block bb)
7250 {
7251 bool changed = false;
7252 edge e;
7253 edge_iterator ei;
7254 gimple stmt = last_stmt (bb);
7255
7256 if (!cfun->has_nonlocal_label)
7257 return false;
7258
7259 if (stmt && stmt_can_make_abnormal_goto (stmt))
7260 return false;
7261
7262 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
7263 {
7264 if (e->flags & EDGE_ABNORMAL)
7265 {
7266 remove_edge_and_dominated_blocks (e);
7267 changed = true;
7268 }
7269 else
7270 ei_next (&ei);
7271 }
7272
7273 return changed;
7274 }
7275
7276 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
7277
7278 bool
7279 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks)
7280 {
7281 bool changed = false;
7282 unsigned i;
7283 bitmap_iterator bi;
7284
7285 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
7286 {
7287 basic_block bb = BASIC_BLOCK (i);
7288
7289 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
7290 this basic block already. */
7291 gcc_assert (bb || changed);
7292 if (bb != NULL)
7293 changed |= gimple_purge_dead_abnormal_call_edges (bb);
7294 }
7295
7296 return changed;
7297 }
7298
7299 /* This function is called whenever a new edge is created or
7300 redirected. */
7301
7302 static void
7303 gimple_execute_on_growing_pred (edge e)
7304 {
7305 basic_block bb = e->dest;
7306
7307 if (!gimple_seq_empty_p (phi_nodes (bb)))
7308 reserve_phi_args_for_new_edge (bb);
7309 }
7310
7311 /* This function is called immediately before edge E is removed from
7312 the edge vector E->dest->preds. */
7313
7314 static void
7315 gimple_execute_on_shrinking_pred (edge e)
7316 {
7317 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
7318 remove_phi_args (e);
7319 }
7320
7321 /*---------------------------------------------------------------------------
7322 Helper functions for Loop versioning
7323 ---------------------------------------------------------------------------*/
7324
7325 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
7326 of 'first'. Both of them are dominated by 'new_head' basic block. When
7327 'new_head' was created by 'second's incoming edge it received phi arguments
7328 on the edge by split_edge(). Later, additional edge 'e' was created to
7329 connect 'new_head' and 'first'. Now this routine adds phi args on this
7330 additional edge 'e' that new_head to second edge received as part of edge
7331 splitting. */
7332
7333 static void
7334 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
7335 basic_block new_head, edge e)
7336 {
7337 gimple phi1, phi2;
7338 gimple_stmt_iterator psi1, psi2;
7339 tree def;
7340 edge e2 = find_edge (new_head, second);
7341
7342 /* Because NEW_HEAD has been created by splitting SECOND's incoming
7343 edge, we should always have an edge from NEW_HEAD to SECOND. */
7344 gcc_assert (e2 != NULL);
7345
7346 /* Browse all 'second' basic block phi nodes and add phi args to
7347 edge 'e' for 'first' head. PHI args are always in correct order. */
7348
7349 for (psi2 = gsi_start_phis (second),
7350 psi1 = gsi_start_phis (first);
7351 !gsi_end_p (psi2) && !gsi_end_p (psi1);
7352 gsi_next (&psi2), gsi_next (&psi1))
7353 {
7354 phi1 = gsi_stmt (psi1);
7355 phi2 = gsi_stmt (psi2);
7356 def = PHI_ARG_DEF (phi2, e2->dest_idx);
7357 add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
7358 }
7359 }
7360
7361
7362 /* Adds a if else statement to COND_BB with condition COND_EXPR.
7363 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
7364 the destination of the ELSE part. */
7365
7366 static void
7367 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
7368 basic_block second_head ATTRIBUTE_UNUSED,
7369 basic_block cond_bb, void *cond_e)
7370 {
7371 gimple_stmt_iterator gsi;
7372 gimple new_cond_expr;
7373 tree cond_expr = (tree) cond_e;
7374 edge e0;
7375
7376 /* Build new conditional expr */
7377 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
7378 NULL_TREE, NULL_TREE);
7379
7380 /* Add new cond in cond_bb. */
7381 gsi = gsi_last_bb (cond_bb);
7382 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
7383
7384 /* Adjust edges appropriately to connect new head with first head
7385 as well as second head. */
7386 e0 = single_succ_edge (cond_bb);
7387 e0->flags &= ~EDGE_FALLTHRU;
7388 e0->flags |= EDGE_FALSE_VALUE;
7389 }
7390
7391 struct cfg_hooks gimple_cfg_hooks = {
7392 "gimple",
7393 gimple_verify_flow_info,
7394 gimple_dump_bb, /* dump_bb */
7395 create_bb, /* create_basic_block */
7396 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
7397 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
7398 gimple_can_remove_branch_p, /* can_remove_branch_p */
7399 remove_bb, /* delete_basic_block */
7400 gimple_split_block, /* split_block */
7401 gimple_move_block_after, /* move_block_after */
7402 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
7403 gimple_merge_blocks, /* merge_blocks */
7404 gimple_predict_edge, /* predict_edge */
7405 gimple_predicted_by_p, /* predicted_by_p */
7406 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
7407 gimple_duplicate_bb, /* duplicate_block */
7408 gimple_split_edge, /* split_edge */
7409 gimple_make_forwarder_block, /* make_forward_block */
7410 NULL, /* tidy_fallthru_edge */
7411 NULL, /* force_nonfallthru */
7412 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
7413 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
7414 gimple_flow_call_edges_add, /* flow_call_edges_add */
7415 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
7416 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
7417 gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
7418 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
7419 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
7420 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
7421 flush_pending_stmts /* flush_pending_stmts */
7422 };
7423
7424
7425 /* Split all critical edges. */
7426
7427 static unsigned int
7428 split_critical_edges (void)
7429 {
7430 basic_block bb;
7431 edge e;
7432 edge_iterator ei;
7433
7434 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
7435 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
7436 mappings around the calls to split_edge. */
7437 start_recording_case_labels ();
7438 FOR_ALL_BB (bb)
7439 {
7440 FOR_EACH_EDGE (e, ei, bb->succs)
7441 {
7442 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
7443 split_edge (e);
7444 /* PRE inserts statements to edges and expects that
7445 since split_critical_edges was done beforehand, committing edge
7446 insertions will not split more edges. In addition to critical
7447 edges we must split edges that have multiple successors and
7448 end by control flow statements, such as RESX.
7449 Go ahead and split them too. This matches the logic in
7450 gimple_find_edge_insert_loc. */
7451 else if ((!single_pred_p (e->dest)
7452 || !gimple_seq_empty_p (phi_nodes (e->dest))
7453 || e->dest == EXIT_BLOCK_PTR)
7454 && e->src != ENTRY_BLOCK_PTR
7455 && !(e->flags & EDGE_ABNORMAL))
7456 {
7457 gimple_stmt_iterator gsi;
7458
7459 gsi = gsi_last_bb (e->src);
7460 if (!gsi_end_p (gsi)
7461 && stmt_ends_bb_p (gsi_stmt (gsi))
7462 && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
7463 && !gimple_call_builtin_p (gsi_stmt (gsi),
7464 BUILT_IN_RETURN)))
7465 split_edge (e);
7466 }
7467 }
7468 }
7469 end_recording_case_labels ();
7470 return 0;
7471 }
7472
7473 struct gimple_opt_pass pass_split_crit_edges =
7474 {
7475 {
7476 GIMPLE_PASS,
7477 "crited", /* name */
7478 NULL, /* gate */
7479 split_critical_edges, /* execute */
7480 NULL, /* sub */
7481 NULL, /* next */
7482 0, /* static_pass_number */
7483 TV_TREE_SPLIT_EDGES, /* tv_id */
7484 PROP_cfg, /* properties required */
7485 PROP_no_crit_edges, /* properties_provided */
7486 0, /* properties_destroyed */
7487 0, /* todo_flags_start */
7488 TODO_verify_flow /* todo_flags_finish */
7489 }
7490 };
7491
7492
7493 /* Build a ternary operation and gimplify it. Emit code before GSI.
7494 Return the gimple_val holding the result. */
7495
7496 tree
7497 gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
7498 tree type, tree a, tree b, tree c)
7499 {
7500 tree ret;
7501 location_t loc = gimple_location (gsi_stmt (*gsi));
7502
7503 ret = fold_build3_loc (loc, code, type, a, b, c);
7504 STRIP_NOPS (ret);
7505
7506 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
7507 GSI_SAME_STMT);
7508 }
7509
7510 /* Build a binary operation and gimplify it. Emit code before GSI.
7511 Return the gimple_val holding the result. */
7512
7513 tree
7514 gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
7515 tree type, tree a, tree b)
7516 {
7517 tree ret;
7518
7519 ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
7520 STRIP_NOPS (ret);
7521
7522 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
7523 GSI_SAME_STMT);
7524 }
7525
7526 /* Build a unary operation and gimplify it. Emit code before GSI.
7527 Return the gimple_val holding the result. */
7528
7529 tree
7530 gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
7531 tree a)
7532 {
7533 tree ret;
7534
7535 ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
7536 STRIP_NOPS (ret);
7537
7538 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
7539 GSI_SAME_STMT);
7540 }
7541
7542
7543 \f
7544 /* Emit return warnings. */
7545
7546 static unsigned int
7547 execute_warn_function_return (void)
7548 {
7549 source_location location;
7550 gimple last;
7551 edge e;
7552 edge_iterator ei;
7553
7554 /* If we have a path to EXIT, then we do return. */
7555 if (TREE_THIS_VOLATILE (cfun->decl)
7556 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0)
7557 {
7558 location = UNKNOWN_LOCATION;
7559 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
7560 {
7561 last = last_stmt (e->src);
7562 if ((gimple_code (last) == GIMPLE_RETURN
7563 || gimple_call_builtin_p (last, BUILT_IN_RETURN))
7564 && (location = gimple_location (last)) != UNKNOWN_LOCATION)
7565 break;
7566 }
7567 if (location == UNKNOWN_LOCATION)
7568 location = cfun->function_end_locus;
7569 warning_at (location, 0, "%<noreturn%> function does return");
7570 }
7571
7572 /* If we see "return;" in some basic block, then we do reach the end
7573 without returning a value. */
7574 else if (warn_return_type
7575 && !TREE_NO_WARNING (cfun->decl)
7576 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0
7577 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (cfun->decl))))
7578 {
7579 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
7580 {
7581 gimple last = last_stmt (e->src);
7582 if (gimple_code (last) == GIMPLE_RETURN
7583 && gimple_return_retval (last) == NULL
7584 && !gimple_no_warning_p (last))
7585 {
7586 location = gimple_location (last);
7587 if (location == UNKNOWN_LOCATION)
7588 location = cfun->function_end_locus;
7589 warning_at (location, OPT_Wreturn_type, "control reaches end of non-void function");
7590 TREE_NO_WARNING (cfun->decl) = 1;
7591 break;
7592 }
7593 }
7594 }
7595 return 0;
7596 }
7597
7598
7599 /* Given a basic block B which ends with a conditional and has
7600 precisely two successors, determine which of the edges is taken if
7601 the conditional is true and which is taken if the conditional is
7602 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
7603
7604 void
7605 extract_true_false_edges_from_block (basic_block b,
7606 edge *true_edge,
7607 edge *false_edge)
7608 {
7609 edge e = EDGE_SUCC (b, 0);
7610
7611 if (e->flags & EDGE_TRUE_VALUE)
7612 {
7613 *true_edge = e;
7614 *false_edge = EDGE_SUCC (b, 1);
7615 }
7616 else
7617 {
7618 *false_edge = e;
7619 *true_edge = EDGE_SUCC (b, 1);
7620 }
7621 }
7622
7623 struct gimple_opt_pass pass_warn_function_return =
7624 {
7625 {
7626 GIMPLE_PASS,
7627 "*warn_function_return", /* name */
7628 NULL, /* gate */
7629 execute_warn_function_return, /* execute */
7630 NULL, /* sub */
7631 NULL, /* next */
7632 0, /* static_pass_number */
7633 TV_NONE, /* tv_id */
7634 PROP_cfg, /* properties_required */
7635 0, /* properties_provided */
7636 0, /* properties_destroyed */
7637 0, /* todo_flags_start */
7638 0 /* todo_flags_finish */
7639 }
7640 };
7641
7642 /* Emit noreturn warnings. */
7643
7644 static unsigned int
7645 execute_warn_function_noreturn (void)
7646 {
7647 if (!TREE_THIS_VOLATILE (current_function_decl)
7648 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) == 0)
7649 warn_function_noreturn (current_function_decl);
7650 return 0;
7651 }
7652
7653 static bool
7654 gate_warn_function_noreturn (void)
7655 {
7656 return warn_suggest_attribute_noreturn;
7657 }
7658
7659 struct gimple_opt_pass pass_warn_function_noreturn =
7660 {
7661 {
7662 GIMPLE_PASS,
7663 "*warn_function_noreturn", /* name */
7664 gate_warn_function_noreturn, /* gate */
7665 execute_warn_function_noreturn, /* execute */
7666 NULL, /* sub */
7667 NULL, /* next */
7668 0, /* static_pass_number */
7669 TV_NONE, /* tv_id */
7670 PROP_cfg, /* properties_required */
7671 0, /* properties_provided */
7672 0, /* properties_destroyed */
7673 0, /* todo_flags_start */
7674 0 /* todo_flags_finish */
7675 }
7676 };
7677
7678
7679 /* Walk a gimplified function and warn for functions whose return value is
7680 ignored and attribute((warn_unused_result)) is set. This is done before
7681 inlining, so we don't have to worry about that. */
7682
7683 static void
7684 do_warn_unused_result (gimple_seq seq)
7685 {
7686 tree fdecl, ftype;
7687 gimple_stmt_iterator i;
7688
7689 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
7690 {
7691 gimple g = gsi_stmt (i);
7692
7693 switch (gimple_code (g))
7694 {
7695 case GIMPLE_BIND:
7696 do_warn_unused_result (gimple_bind_body (g));
7697 break;
7698 case GIMPLE_TRY:
7699 do_warn_unused_result (gimple_try_eval (g));
7700 do_warn_unused_result (gimple_try_cleanup (g));
7701 break;
7702 case GIMPLE_CATCH:
7703 do_warn_unused_result (gimple_catch_handler (g));
7704 break;
7705 case GIMPLE_EH_FILTER:
7706 do_warn_unused_result (gimple_eh_filter_failure (g));
7707 break;
7708
7709 case GIMPLE_CALL:
7710 if (gimple_call_lhs (g))
7711 break;
7712 if (gimple_call_internal_p (g))
7713 break;
7714
7715 /* This is a naked call, as opposed to a GIMPLE_CALL with an
7716 LHS. All calls whose value is ignored should be
7717 represented like this. Look for the attribute. */
7718 fdecl = gimple_call_fndecl (g);
7719 ftype = gimple_call_fntype (g);
7720
7721 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
7722 {
7723 location_t loc = gimple_location (g);
7724
7725 if (fdecl)
7726 warning_at (loc, OPT_Wunused_result,
7727 "ignoring return value of %qD, "
7728 "declared with attribute warn_unused_result",
7729 fdecl);
7730 else
7731 warning_at (loc, OPT_Wunused_result,
7732 "ignoring return value of function "
7733 "declared with attribute warn_unused_result");
7734 }
7735 break;
7736
7737 default:
7738 /* Not a container, not a call, or a call whose value is used. */
7739 break;
7740 }
7741 }
7742 }
7743
7744 static unsigned int
7745 run_warn_unused_result (void)
7746 {
7747 do_warn_unused_result (gimple_body (current_function_decl));
7748 return 0;
7749 }
7750
7751 static bool
7752 gate_warn_unused_result (void)
7753 {
7754 return flag_warn_unused_result;
7755 }
7756
7757 struct gimple_opt_pass pass_warn_unused_result =
7758 {
7759 {
7760 GIMPLE_PASS,
7761 "*warn_unused_result", /* name */
7762 gate_warn_unused_result, /* gate */
7763 run_warn_unused_result, /* execute */
7764 NULL, /* sub */
7765 NULL, /* next */
7766 0, /* static_pass_number */
7767 TV_NONE, /* tv_id */
7768 PROP_gimple_any, /* properties_required */
7769 0, /* properties_provided */
7770 0, /* properties_destroyed */
7771 0, /* todo_flags_start */
7772 0, /* todo_flags_finish */
7773 }
7774 };