]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-cfg.c
Merge with trunk.
[thirdparty/gcc.git] / gcc / tree-cfg.c
1 /* Control flow functions for trees.
2 Copyright (C) 2001-2013 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "hash-table.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "trans-mem.h"
28 #include "stor-layout.h"
29 #include "print-tree.h"
30 #include "tm_p.h"
31 #include "basic-block.h"
32 #include "flags.h"
33 #include "function.h"
34 #include "gimple-pretty-print.h"
35 #include "pointer-set.h"
36 #include "tree-ssa-alias.h"
37 #include "internal-fn.h"
38 #include "gimple-fold.h"
39 #include "tree-eh.h"
40 #include "gimple-expr.h"
41 #include "is-a.h"
42 #include "gimple.h"
43 #include "gimple-iterator.h"
44 #include "gimplify-me.h"
45 #include "gimple-walk.h"
46 #include "gimple-ssa.h"
47 #include "cgraph.h"
48 #include "tree-cfg.h"
49 #include "tree-phinodes.h"
50 #include "ssa-iterators.h"
51 #include "stringpool.h"
52 #include "tree-ssanames.h"
53 #include "tree-ssa-loop-manip.h"
54 #include "tree-ssa-loop-niter.h"
55 #include "tree-into-ssa.h"
56 #include "expr.h"
57 #include "tree-dfa.h"
58 #include "tree-ssa.h"
59 #include "tree-dump.h"
60 #include "tree-pass.h"
61 #include "diagnostic-core.h"
62 #include "except.h"
63 #include "cfgloop.h"
64 #include "tree-ssa-propagate.h"
65 #include "value-prof.h"
66 #include "tree-inline.h"
67 #include "target.h"
68 #include "tree-ssa-live.h"
69 #include "omp-low.h"
70 #include "tree-cfgcleanup.h"
71 #include "wide-int.h"
72 #include "wide-int-print.h"
73
74 /* This file contains functions for building the Control Flow Graph (CFG)
75 for a function tree. */
76
77 /* Local declarations. */
78
79 /* Initial capacity for the basic block array. */
80 static const int initial_cfg_capacity = 20;
81
82 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
83 which use a particular edge. The CASE_LABEL_EXPRs are chained together
84 via their CASE_CHAIN field, which we clear after we're done with the
85 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
86
87 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
88 update the case vector in response to edge redirections.
89
90 Right now this table is set up and torn down at key points in the
91 compilation process. It would be nice if we could make the table
92 more persistent. The key is getting notification of changes to
93 the CFG (particularly edge removal, creation and redirection). */
94
95 static struct pointer_map_t *edge_to_cases;
96
97 /* If we record edge_to_cases, this bitmap will hold indexes
98 of basic blocks that end in a GIMPLE_SWITCH which we touched
99 due to edge manipulations. */
100
101 static bitmap touched_switch_bbs;
102
103 /* CFG statistics. */
104 struct cfg_stats_d
105 {
106 long num_merged_labels;
107 };
108
109 static struct cfg_stats_d cfg_stats;
110
111 /* Nonzero if we found a computed goto while building basic blocks. */
112 static bool found_computed_goto;
113
114 /* Hash table to store last discriminator assigned for each locus. */
115 struct locus_discrim_map
116 {
117 location_t locus;
118 int discriminator;
119 };
120
121 /* Hashtable helpers. */
122
123 struct locus_discrim_hasher : typed_free_remove <locus_discrim_map>
124 {
125 typedef locus_discrim_map value_type;
126 typedef locus_discrim_map compare_type;
127 static inline hashval_t hash (const value_type *);
128 static inline bool equal (const value_type *, const compare_type *);
129 };
130
131 /* Trivial hash function for a location_t. ITEM is a pointer to
132 a hash table entry that maps a location_t to a discriminator. */
133
134 inline hashval_t
135 locus_discrim_hasher::hash (const value_type *item)
136 {
137 return LOCATION_LINE (item->locus);
138 }
139
140 /* Equality function for the locus-to-discriminator map. A and B
141 point to the two hash table entries to compare. */
142
143 inline bool
144 locus_discrim_hasher::equal (const value_type *a, const compare_type *b)
145 {
146 return LOCATION_LINE (a->locus) == LOCATION_LINE (b->locus);
147 }
148
149 static hash_table <locus_discrim_hasher> discriminator_per_locus;
150
151 /* Basic blocks and flowgraphs. */
152 static void make_blocks (gimple_seq);
153 static void factor_computed_gotos (void);
154
155 /* Edges. */
156 static void make_edges (void);
157 static void assign_discriminators (void);
158 static void make_cond_expr_edges (basic_block);
159 static void make_gimple_switch_edges (basic_block);
160 static void make_goto_expr_edges (basic_block);
161 static void make_gimple_asm_edges (basic_block);
162 static edge gimple_redirect_edge_and_branch (edge, basic_block);
163 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
164 static unsigned int split_critical_edges (void);
165
166 /* Various helpers. */
167 static inline bool stmt_starts_bb_p (gimple, gimple);
168 static int gimple_verify_flow_info (void);
169 static void gimple_make_forwarder_block (edge);
170 static gimple first_non_label_stmt (basic_block);
171 static bool verify_gimple_transaction (gimple);
172
173 /* Flowgraph optimization and cleanup. */
174 static void gimple_merge_blocks (basic_block, basic_block);
175 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
176 static void remove_bb (basic_block);
177 static edge find_taken_edge_computed_goto (basic_block, tree);
178 static edge find_taken_edge_cond_expr (basic_block, tree);
179 static edge find_taken_edge_switch_expr (basic_block, tree);
180 static tree find_case_label_for_value (gimple, tree);
181
182 void
183 init_empty_tree_cfg_for_function (struct function *fn)
184 {
185 /* Initialize the basic block array. */
186 init_flow (fn);
187 profile_status_for_function (fn) = PROFILE_ABSENT;
188 n_basic_blocks_for_fn (fn) = NUM_FIXED_BLOCKS;
189 last_basic_block_for_function (fn) = NUM_FIXED_BLOCKS;
190 vec_alloc (basic_block_info_for_function (fn), initial_cfg_capacity);
191 vec_safe_grow_cleared (basic_block_info_for_function (fn),
192 initial_cfg_capacity);
193
194 /* Build a mapping of labels to their associated blocks. */
195 vec_alloc (label_to_block_map_for_function (fn), initial_cfg_capacity);
196 vec_safe_grow_cleared (label_to_block_map_for_function (fn),
197 initial_cfg_capacity);
198
199 SET_BASIC_BLOCK_FOR_FUNCTION (fn, ENTRY_BLOCK,
200 ENTRY_BLOCK_PTR_FOR_FN (fn));
201 SET_BASIC_BLOCK_FOR_FUNCTION (fn, EXIT_BLOCK,
202 EXIT_BLOCK_PTR_FOR_FN (fn));
203
204 ENTRY_BLOCK_PTR_FOR_FN (fn)->next_bb
205 = EXIT_BLOCK_PTR_FOR_FN (fn);
206 EXIT_BLOCK_PTR_FOR_FN (fn)->prev_bb
207 = ENTRY_BLOCK_PTR_FOR_FN (fn);
208 }
209
210 void
211 init_empty_tree_cfg (void)
212 {
213 init_empty_tree_cfg_for_function (cfun);
214 }
215
216 /*---------------------------------------------------------------------------
217 Create basic blocks
218 ---------------------------------------------------------------------------*/
219
220 /* Entry point to the CFG builder for trees. SEQ is the sequence of
221 statements to be added to the flowgraph. */
222
223 static void
224 build_gimple_cfg (gimple_seq seq)
225 {
226 /* Register specific gimple functions. */
227 gimple_register_cfg_hooks ();
228
229 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
230
231 init_empty_tree_cfg ();
232
233 found_computed_goto = 0;
234 make_blocks (seq);
235
236 /* Computed gotos are hell to deal with, especially if there are
237 lots of them with a large number of destinations. So we factor
238 them to a common computed goto location before we build the
239 edge list. After we convert back to normal form, we will un-factor
240 the computed gotos since factoring introduces an unwanted jump. */
241 if (found_computed_goto)
242 factor_computed_gotos ();
243
244 /* Make sure there is always at least one block, even if it's empty. */
245 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
246 create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
247
248 /* Adjust the size of the array. */
249 if (basic_block_info->length () < (size_t) n_basic_blocks_for_fn (cfun))
250 vec_safe_grow_cleared (basic_block_info, n_basic_blocks_for_fn (cfun));
251
252 /* To speed up statement iterator walks, we first purge dead labels. */
253 cleanup_dead_labels ();
254
255 /* Group case nodes to reduce the number of edges.
256 We do this after cleaning up dead labels because otherwise we miss
257 a lot of obvious case merging opportunities. */
258 group_case_labels ();
259
260 /* Create the edges of the flowgraph. */
261 discriminator_per_locus.create (13);
262 make_edges ();
263 assign_discriminators ();
264 cleanup_dead_labels ();
265 discriminator_per_locus.dispose ();
266 }
267
268
269 /* Search for ANNOTATE call with annot_expr_ivdep_kind; if found, remove
270 it and set loop->safelen to INT_MAX. We assume that the annotation
271 comes immediately before the condition. */
272
273 static void
274 replace_loop_annotate ()
275 {
276 struct loop *loop;
277 basic_block bb;
278 gimple_stmt_iterator gsi;
279 gimple stmt;
280
281 FOR_EACH_LOOP (loop, 0)
282 {
283 gsi = gsi_last_bb (loop->header);
284 stmt = gsi_stmt (gsi);
285 if (stmt && gimple_code (stmt) == GIMPLE_COND)
286 {
287 gsi_prev_nondebug (&gsi);
288 if (gsi_end_p (gsi))
289 continue;
290 stmt = gsi_stmt (gsi);
291 if (gimple_code (stmt) != GIMPLE_CALL)
292 continue;
293 if (!gimple_call_internal_p (stmt)
294 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
295 continue;
296 if ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1))
297 != annot_expr_ivdep_kind)
298 continue;
299 stmt = gimple_build_assign (gimple_call_lhs (stmt),
300 gimple_call_arg (stmt, 0));
301 gsi_replace (&gsi, stmt, true);
302 loop->safelen = INT_MAX;
303 }
304 }
305
306 /* Remove IFN_ANNOTATE. Safeguard for the case loop->latch == NULL. */
307 FOR_EACH_BB (bb)
308 {
309 gsi = gsi_last_bb (bb);
310 stmt = gsi_stmt (gsi);
311 if (stmt && gimple_code (stmt) == GIMPLE_COND)
312 gsi_prev_nondebug (&gsi);
313 if (gsi_end_p (gsi))
314 continue;
315 stmt = gsi_stmt (gsi);
316 if (gimple_code (stmt) != GIMPLE_CALL)
317 continue;
318 if (!gimple_call_internal_p (stmt)
319 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
320 continue;
321 if ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1))
322 != annot_expr_ivdep_kind)
323 continue;
324 warning_at (gimple_location (stmt), 0, "ignoring %<GCC ivdep%> "
325 "annotation");
326 stmt = gimple_build_assign (gimple_call_lhs (stmt),
327 gimple_call_arg (stmt, 0));
328 gsi_replace (&gsi, stmt, true);
329 }
330 }
331
332
333 static unsigned int
334 execute_build_cfg (void)
335 {
336 gimple_seq body = gimple_body (current_function_decl);
337
338 build_gimple_cfg (body);
339 gimple_set_body (current_function_decl, NULL);
340 if (dump_file && (dump_flags & TDF_DETAILS))
341 {
342 fprintf (dump_file, "Scope blocks:\n");
343 dump_scope_blocks (dump_file, dump_flags);
344 }
345 cleanup_tree_cfg ();
346 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
347 replace_loop_annotate ();
348 return 0;
349 }
350
351 namespace {
352
353 const pass_data pass_data_build_cfg =
354 {
355 GIMPLE_PASS, /* type */
356 "cfg", /* name */
357 OPTGROUP_NONE, /* optinfo_flags */
358 false, /* has_gate */
359 true, /* has_execute */
360 TV_TREE_CFG, /* tv_id */
361 PROP_gimple_leh, /* properties_required */
362 ( PROP_cfg | PROP_loops ), /* properties_provided */
363 0, /* properties_destroyed */
364 0, /* todo_flags_start */
365 TODO_verify_stmts, /* todo_flags_finish */
366 };
367
368 class pass_build_cfg : public gimple_opt_pass
369 {
370 public:
371 pass_build_cfg (gcc::context *ctxt)
372 : gimple_opt_pass (pass_data_build_cfg, ctxt)
373 {}
374
375 /* opt_pass methods: */
376 unsigned int execute () { return execute_build_cfg (); }
377
378 }; // class pass_build_cfg
379
380 } // anon namespace
381
382 gimple_opt_pass *
383 make_pass_build_cfg (gcc::context *ctxt)
384 {
385 return new pass_build_cfg (ctxt);
386 }
387
388
389 /* Return true if T is a computed goto. */
390
391 static bool
392 computed_goto_p (gimple t)
393 {
394 return (gimple_code (t) == GIMPLE_GOTO
395 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
396 }
397
398 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
399 the other edge points to a bb with just __builtin_unreachable ().
400 I.e. return true for C->M edge in:
401 <bb C>:
402 ...
403 if (something)
404 goto <bb N>;
405 else
406 goto <bb M>;
407 <bb N>:
408 __builtin_unreachable ();
409 <bb M>: */
410
411 bool
412 assert_unreachable_fallthru_edge_p (edge e)
413 {
414 basic_block pred_bb = e->src;
415 gimple last = last_stmt (pred_bb);
416 if (last && gimple_code (last) == GIMPLE_COND)
417 {
418 basic_block other_bb = EDGE_SUCC (pred_bb, 0)->dest;
419 if (other_bb == e->dest)
420 other_bb = EDGE_SUCC (pred_bb, 1)->dest;
421 if (EDGE_COUNT (other_bb->succs) == 0)
422 {
423 gimple_stmt_iterator gsi = gsi_after_labels (other_bb);
424 gimple stmt;
425
426 if (gsi_end_p (gsi))
427 return false;
428 stmt = gsi_stmt (gsi);
429 if (is_gimple_debug (stmt))
430 {
431 gsi_next_nondebug (&gsi);
432 if (gsi_end_p (gsi))
433 return false;
434 stmt = gsi_stmt (gsi);
435 }
436 return gimple_call_builtin_p (stmt, BUILT_IN_UNREACHABLE);
437 }
438 }
439 return false;
440 }
441
442
443 /* Search the CFG for any computed gotos. If found, factor them to a
444 common computed goto site. Also record the location of that site so
445 that we can un-factor the gotos after we have converted back to
446 normal form. */
447
448 static void
449 factor_computed_gotos (void)
450 {
451 basic_block bb;
452 tree factored_label_decl = NULL;
453 tree var = NULL;
454 gimple factored_computed_goto_label = NULL;
455 gimple factored_computed_goto = NULL;
456
457 /* We know there are one or more computed gotos in this function.
458 Examine the last statement in each basic block to see if the block
459 ends with a computed goto. */
460
461 FOR_EACH_BB (bb)
462 {
463 gimple_stmt_iterator gsi = gsi_last_bb (bb);
464 gimple last;
465
466 if (gsi_end_p (gsi))
467 continue;
468
469 last = gsi_stmt (gsi);
470
471 /* Ignore the computed goto we create when we factor the original
472 computed gotos. */
473 if (last == factored_computed_goto)
474 continue;
475
476 /* If the last statement is a computed goto, factor it. */
477 if (computed_goto_p (last))
478 {
479 gimple assignment;
480
481 /* The first time we find a computed goto we need to create
482 the factored goto block and the variable each original
483 computed goto will use for their goto destination. */
484 if (!factored_computed_goto)
485 {
486 basic_block new_bb = create_empty_bb (bb);
487 gimple_stmt_iterator new_gsi = gsi_start_bb (new_bb);
488
489 /* Create the destination of the factored goto. Each original
490 computed goto will put its desired destination into this
491 variable and jump to the label we create immediately
492 below. */
493 var = create_tmp_var (ptr_type_node, "gotovar");
494
495 /* Build a label for the new block which will contain the
496 factored computed goto. */
497 factored_label_decl = create_artificial_label (UNKNOWN_LOCATION);
498 factored_computed_goto_label
499 = gimple_build_label (factored_label_decl);
500 gsi_insert_after (&new_gsi, factored_computed_goto_label,
501 GSI_NEW_STMT);
502
503 /* Build our new computed goto. */
504 factored_computed_goto = gimple_build_goto (var);
505 gsi_insert_after (&new_gsi, factored_computed_goto, GSI_NEW_STMT);
506 }
507
508 /* Copy the original computed goto's destination into VAR. */
509 assignment = gimple_build_assign (var, gimple_goto_dest (last));
510 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
511
512 /* And re-vector the computed goto to the new destination. */
513 gimple_goto_set_dest (last, factored_label_decl);
514 }
515 }
516 }
517
518
519 /* Build a flowgraph for the sequence of stmts SEQ. */
520
521 static void
522 make_blocks (gimple_seq seq)
523 {
524 gimple_stmt_iterator i = gsi_start (seq);
525 gimple stmt = NULL;
526 bool start_new_block = true;
527 bool first_stmt_of_seq = true;
528 basic_block bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
529
530 while (!gsi_end_p (i))
531 {
532 gimple prev_stmt;
533
534 prev_stmt = stmt;
535 stmt = gsi_stmt (i);
536
537 /* If the statement starts a new basic block or if we have determined
538 in a previous pass that we need to create a new block for STMT, do
539 so now. */
540 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
541 {
542 if (!first_stmt_of_seq)
543 gsi_split_seq_before (&i, &seq);
544 bb = create_basic_block (seq, NULL, bb);
545 start_new_block = false;
546 }
547
548 /* Now add STMT to BB and create the subgraphs for special statement
549 codes. */
550 gimple_set_bb (stmt, bb);
551
552 if (computed_goto_p (stmt))
553 found_computed_goto = true;
554
555 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
556 next iteration. */
557 if (stmt_ends_bb_p (stmt))
558 {
559 /* If the stmt can make abnormal goto use a new temporary
560 for the assignment to the LHS. This makes sure the old value
561 of the LHS is available on the abnormal edge. Otherwise
562 we will end up with overlapping life-ranges for abnormal
563 SSA names. */
564 if (gimple_has_lhs (stmt)
565 && stmt_can_make_abnormal_goto (stmt)
566 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
567 {
568 tree lhs = gimple_get_lhs (stmt);
569 tree tmp = create_tmp_var (TREE_TYPE (lhs), NULL);
570 gimple s = gimple_build_assign (lhs, tmp);
571 gimple_set_location (s, gimple_location (stmt));
572 gimple_set_block (s, gimple_block (stmt));
573 gimple_set_lhs (stmt, tmp);
574 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
575 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
576 DECL_GIMPLE_REG_P (tmp) = 1;
577 gsi_insert_after (&i, s, GSI_SAME_STMT);
578 }
579 start_new_block = true;
580 }
581
582 gsi_next (&i);
583 first_stmt_of_seq = false;
584 }
585 }
586
587
588 /* Create and return a new empty basic block after bb AFTER. */
589
590 static basic_block
591 create_bb (void *h, void *e, basic_block after)
592 {
593 basic_block bb;
594
595 gcc_assert (!e);
596
597 /* Create and initialize a new basic block. Since alloc_block uses
598 GC allocation that clears memory to allocate a basic block, we do
599 not have to clear the newly allocated basic block here. */
600 bb = alloc_block ();
601
602 bb->index = last_basic_block;
603 bb->flags = BB_NEW;
604 set_bb_seq (bb, h ? (gimple_seq) h : NULL);
605
606 /* Add the new block to the linked list of blocks. */
607 link_block (bb, after);
608
609 /* Grow the basic block array if needed. */
610 if ((size_t) last_basic_block == basic_block_info->length ())
611 {
612 size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
613 vec_safe_grow_cleared (basic_block_info, new_size);
614 }
615
616 /* Add the newly created block to the array. */
617 SET_BASIC_BLOCK (last_basic_block, bb);
618
619 n_basic_blocks_for_fn (cfun)++;
620 last_basic_block++;
621
622 return bb;
623 }
624
625
626 /*---------------------------------------------------------------------------
627 Edge creation
628 ---------------------------------------------------------------------------*/
629
630 /* Fold COND_EXPR_COND of each COND_EXPR. */
631
632 void
633 fold_cond_expr_cond (void)
634 {
635 basic_block bb;
636
637 FOR_EACH_BB (bb)
638 {
639 gimple stmt = last_stmt (bb);
640
641 if (stmt && gimple_code (stmt) == GIMPLE_COND)
642 {
643 location_t loc = gimple_location (stmt);
644 tree cond;
645 bool zerop, onep;
646
647 fold_defer_overflow_warnings ();
648 cond = fold_binary_loc (loc, gimple_cond_code (stmt), boolean_type_node,
649 gimple_cond_lhs (stmt), gimple_cond_rhs (stmt));
650 if (cond)
651 {
652 zerop = integer_zerop (cond);
653 onep = integer_onep (cond);
654 }
655 else
656 zerop = onep = false;
657
658 fold_undefer_overflow_warnings (zerop || onep,
659 stmt,
660 WARN_STRICT_OVERFLOW_CONDITIONAL);
661 if (zerop)
662 gimple_cond_make_false (stmt);
663 else if (onep)
664 gimple_cond_make_true (stmt);
665 }
666 }
667 }
668
669 /* Join all the blocks in the flowgraph. */
670
671 static void
672 make_edges (void)
673 {
674 basic_block bb;
675 struct omp_region *cur_region = NULL;
676
677 /* Create an edge from entry to the first block with executable
678 statements in it. */
679 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), BASIC_BLOCK (NUM_FIXED_BLOCKS),
680 EDGE_FALLTHRU);
681
682 /* Traverse the basic block array placing edges. */
683 FOR_EACH_BB (bb)
684 {
685 gimple last = last_stmt (bb);
686 bool fallthru;
687
688 if (last)
689 {
690 enum gimple_code code = gimple_code (last);
691 switch (code)
692 {
693 case GIMPLE_GOTO:
694 make_goto_expr_edges (bb);
695 fallthru = false;
696 break;
697 case GIMPLE_RETURN:
698 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
699 fallthru = false;
700 break;
701 case GIMPLE_COND:
702 make_cond_expr_edges (bb);
703 fallthru = false;
704 break;
705 case GIMPLE_SWITCH:
706 make_gimple_switch_edges (bb);
707 fallthru = false;
708 break;
709 case GIMPLE_RESX:
710 make_eh_edges (last);
711 fallthru = false;
712 break;
713 case GIMPLE_EH_DISPATCH:
714 fallthru = make_eh_dispatch_edges (last);
715 break;
716
717 case GIMPLE_CALL:
718 /* If this function receives a nonlocal goto, then we need to
719 make edges from this call site to all the nonlocal goto
720 handlers. */
721 if (stmt_can_make_abnormal_goto (last))
722 make_abnormal_goto_edges (bb, true);
723
724 /* If this statement has reachable exception handlers, then
725 create abnormal edges to them. */
726 make_eh_edges (last);
727
728 /* BUILTIN_RETURN is really a return statement. */
729 if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
730 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0), fallthru =
731 false;
732 /* Some calls are known not to return. */
733 else
734 fallthru = !(gimple_call_flags (last) & ECF_NORETURN);
735 break;
736
737 case GIMPLE_ASSIGN:
738 /* A GIMPLE_ASSIGN may throw internally and thus be considered
739 control-altering. */
740 if (is_ctrl_altering_stmt (last))
741 make_eh_edges (last);
742 fallthru = true;
743 break;
744
745 case GIMPLE_ASM:
746 make_gimple_asm_edges (bb);
747 fallthru = true;
748 break;
749
750 CASE_GIMPLE_OMP:
751 fallthru = make_gimple_omp_edges (bb, &cur_region);
752 break;
753
754 case GIMPLE_TRANSACTION:
755 {
756 tree abort_label = gimple_transaction_label (last);
757 if (abort_label)
758 make_edge (bb, label_to_block (abort_label), EDGE_TM_ABORT);
759 fallthru = true;
760 }
761 break;
762
763 default:
764 gcc_assert (!stmt_ends_bb_p (last));
765 fallthru = true;
766 }
767 }
768 else
769 fallthru = true;
770
771 if (fallthru)
772 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
773 }
774
775 free_omp_regions ();
776
777 /* Fold COND_EXPR_COND of each COND_EXPR. */
778 fold_cond_expr_cond ();
779 }
780
781 /* Find the next available discriminator value for LOCUS. The
782 discriminator distinguishes among several basic blocks that
783 share a common locus, allowing for more accurate sample-based
784 profiling. */
785
786 static int
787 next_discriminator_for_locus (location_t locus)
788 {
789 struct locus_discrim_map item;
790 struct locus_discrim_map **slot;
791
792 item.locus = locus;
793 item.discriminator = 0;
794 slot = discriminator_per_locus.find_slot_with_hash (
795 &item, LOCATION_LINE (locus), INSERT);
796 gcc_assert (slot);
797 if (*slot == HTAB_EMPTY_ENTRY)
798 {
799 *slot = XNEW (struct locus_discrim_map);
800 gcc_assert (*slot);
801 (*slot)->locus = locus;
802 (*slot)->discriminator = 0;
803 }
804 (*slot)->discriminator++;
805 return (*slot)->discriminator;
806 }
807
808 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
809
810 static bool
811 same_line_p (location_t locus1, location_t locus2)
812 {
813 expanded_location from, to;
814
815 if (locus1 == locus2)
816 return true;
817
818 from = expand_location (locus1);
819 to = expand_location (locus2);
820
821 if (from.line != to.line)
822 return false;
823 if (from.file == to.file)
824 return true;
825 return (from.file != NULL
826 && to.file != NULL
827 && filename_cmp (from.file, to.file) == 0);
828 }
829
830 /* Assign discriminators to each basic block. */
831
832 static void
833 assign_discriminators (void)
834 {
835 basic_block bb;
836
837 FOR_EACH_BB (bb)
838 {
839 edge e;
840 edge_iterator ei;
841 gimple last = last_stmt (bb);
842 location_t locus = last ? gimple_location (last) : UNKNOWN_LOCATION;
843
844 if (locus == UNKNOWN_LOCATION)
845 continue;
846
847 FOR_EACH_EDGE (e, ei, bb->succs)
848 {
849 gimple first = first_non_label_stmt (e->dest);
850 gimple last = last_stmt (e->dest);
851 if ((first && same_line_p (locus, gimple_location (first)))
852 || (last && same_line_p (locus, gimple_location (last))))
853 {
854 if (e->dest->discriminator != 0 && bb->discriminator == 0)
855 bb->discriminator = next_discriminator_for_locus (locus);
856 else
857 e->dest->discriminator = next_discriminator_for_locus (locus);
858 }
859 }
860 }
861 }
862
863 /* Create the edges for a GIMPLE_COND starting at block BB. */
864
865 static void
866 make_cond_expr_edges (basic_block bb)
867 {
868 gimple entry = last_stmt (bb);
869 gimple then_stmt, else_stmt;
870 basic_block then_bb, else_bb;
871 tree then_label, else_label;
872 edge e;
873
874 gcc_assert (entry);
875 gcc_assert (gimple_code (entry) == GIMPLE_COND);
876
877 /* Entry basic blocks for each component. */
878 then_label = gimple_cond_true_label (entry);
879 else_label = gimple_cond_false_label (entry);
880 then_bb = label_to_block (then_label);
881 else_bb = label_to_block (else_label);
882 then_stmt = first_stmt (then_bb);
883 else_stmt = first_stmt (else_bb);
884
885 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
886 e->goto_locus = gimple_location (then_stmt);
887 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
888 if (e)
889 e->goto_locus = gimple_location (else_stmt);
890
891 /* We do not need the labels anymore. */
892 gimple_cond_set_true_label (entry, NULL_TREE);
893 gimple_cond_set_false_label (entry, NULL_TREE);
894 }
895
896
897 /* Called for each element in the hash table (P) as we delete the
898 edge to cases hash table.
899
900 Clear all the TREE_CHAINs to prevent problems with copying of
901 SWITCH_EXPRs and structure sharing rules, then free the hash table
902 element. */
903
904 static bool
905 edge_to_cases_cleanup (const void *key ATTRIBUTE_UNUSED, void **value,
906 void *data ATTRIBUTE_UNUSED)
907 {
908 tree t, next;
909
910 for (t = (tree) *value; t; t = next)
911 {
912 next = CASE_CHAIN (t);
913 CASE_CHAIN (t) = NULL;
914 }
915
916 *value = NULL;
917 return true;
918 }
919
920 /* Start recording information mapping edges to case labels. */
921
922 void
923 start_recording_case_labels (void)
924 {
925 gcc_assert (edge_to_cases == NULL);
926 edge_to_cases = pointer_map_create ();
927 touched_switch_bbs = BITMAP_ALLOC (NULL);
928 }
929
930 /* Return nonzero if we are recording information for case labels. */
931
932 static bool
933 recording_case_labels_p (void)
934 {
935 return (edge_to_cases != NULL);
936 }
937
938 /* Stop recording information mapping edges to case labels and
939 remove any information we have recorded. */
940 void
941 end_recording_case_labels (void)
942 {
943 bitmap_iterator bi;
944 unsigned i;
945 pointer_map_traverse (edge_to_cases, edge_to_cases_cleanup, NULL);
946 pointer_map_destroy (edge_to_cases);
947 edge_to_cases = NULL;
948 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
949 {
950 basic_block bb = BASIC_BLOCK (i);
951 if (bb)
952 {
953 gimple stmt = last_stmt (bb);
954 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
955 group_case_labels_stmt (stmt);
956 }
957 }
958 BITMAP_FREE (touched_switch_bbs);
959 }
960
961 /* If we are inside a {start,end}_recording_cases block, then return
962 a chain of CASE_LABEL_EXPRs from T which reference E.
963
964 Otherwise return NULL. */
965
966 static tree
967 get_cases_for_edge (edge e, gimple t)
968 {
969 void **slot;
970 size_t i, n;
971
972 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
973 chains available. Return NULL so the caller can detect this case. */
974 if (!recording_case_labels_p ())
975 return NULL;
976
977 slot = pointer_map_contains (edge_to_cases, e);
978 if (slot)
979 return (tree) *slot;
980
981 /* If we did not find E in the hash table, then this must be the first
982 time we have been queried for information about E & T. Add all the
983 elements from T to the hash table then perform the query again. */
984
985 n = gimple_switch_num_labels (t);
986 for (i = 0; i < n; i++)
987 {
988 tree elt = gimple_switch_label (t, i);
989 tree lab = CASE_LABEL (elt);
990 basic_block label_bb = label_to_block (lab);
991 edge this_edge = find_edge (e->src, label_bb);
992
993 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
994 a new chain. */
995 slot = pointer_map_insert (edge_to_cases, this_edge);
996 CASE_CHAIN (elt) = (tree) *slot;
997 *slot = elt;
998 }
999
1000 return (tree) *pointer_map_contains (edge_to_cases, e);
1001 }
1002
1003 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
1004
1005 static void
1006 make_gimple_switch_edges (basic_block bb)
1007 {
1008 gimple entry = last_stmt (bb);
1009 size_t i, n;
1010
1011 n = gimple_switch_num_labels (entry);
1012
1013 for (i = 0; i < n; ++i)
1014 {
1015 tree lab = CASE_LABEL (gimple_switch_label (entry, i));
1016 basic_block label_bb = label_to_block (lab);
1017 make_edge (bb, label_bb, 0);
1018 }
1019 }
1020
1021
1022 /* Return the basic block holding label DEST. */
1023
1024 basic_block
1025 label_to_block_fn (struct function *ifun, tree dest)
1026 {
1027 int uid = LABEL_DECL_UID (dest);
1028
1029 /* We would die hard when faced by an undefined label. Emit a label to
1030 the very first basic block. This will hopefully make even the dataflow
1031 and undefined variable warnings quite right. */
1032 if (seen_error () && uid < 0)
1033 {
1034 gimple_stmt_iterator gsi = gsi_start_bb (BASIC_BLOCK (NUM_FIXED_BLOCKS));
1035 gimple stmt;
1036
1037 stmt = gimple_build_label (dest);
1038 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
1039 uid = LABEL_DECL_UID (dest);
1040 }
1041 if (vec_safe_length (ifun->cfg->x_label_to_block_map) <= (unsigned int) uid)
1042 return NULL;
1043 return (*ifun->cfg->x_label_to_block_map)[uid];
1044 }
1045
1046 /* Create edges for an abnormal goto statement at block BB. If FOR_CALL
1047 is true, the source statement is a CALL_EXPR instead of a GOTO_EXPR. */
1048
1049 void
1050 make_abnormal_goto_edges (basic_block bb, bool for_call)
1051 {
1052 basic_block target_bb;
1053 gimple_stmt_iterator gsi;
1054
1055 FOR_EACH_BB (target_bb)
1056 {
1057 for (gsi = gsi_start_bb (target_bb); !gsi_end_p (gsi); gsi_next (&gsi))
1058 {
1059 gimple label_stmt = gsi_stmt (gsi);
1060 tree target;
1061
1062 if (gimple_code (label_stmt) != GIMPLE_LABEL)
1063 break;
1064
1065 target = gimple_label_label (label_stmt);
1066
1067 /* Make an edge to every label block that has been marked as a
1068 potential target for a computed goto or a non-local goto. */
1069 if ((FORCED_LABEL (target) && !for_call)
1070 || (DECL_NONLOCAL (target) && for_call))
1071 {
1072 make_edge (bb, target_bb, EDGE_ABNORMAL);
1073 break;
1074 }
1075 }
1076 if (!gsi_end_p (gsi)
1077 && is_gimple_debug (gsi_stmt (gsi)))
1078 gsi_next_nondebug (&gsi);
1079 if (!gsi_end_p (gsi))
1080 {
1081 /* Make an edge to every setjmp-like call. */
1082 gimple call_stmt = gsi_stmt (gsi);
1083 if (is_gimple_call (call_stmt)
1084 && (gimple_call_flags (call_stmt) & ECF_RETURNS_TWICE))
1085 make_edge (bb, target_bb, EDGE_ABNORMAL);
1086 }
1087 }
1088 }
1089
1090 /* Create edges for a goto statement at block BB. */
1091
1092 static void
1093 make_goto_expr_edges (basic_block bb)
1094 {
1095 gimple_stmt_iterator last = gsi_last_bb (bb);
1096 gimple goto_t = gsi_stmt (last);
1097
1098 /* A simple GOTO creates normal edges. */
1099 if (simple_goto_p (goto_t))
1100 {
1101 tree dest = gimple_goto_dest (goto_t);
1102 basic_block label_bb = label_to_block (dest);
1103 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1104 e->goto_locus = gimple_location (goto_t);
1105 gsi_remove (&last, true);
1106 return;
1107 }
1108
1109 /* A computed GOTO creates abnormal edges. */
1110 make_abnormal_goto_edges (bb, false);
1111 }
1112
1113 /* Create edges for an asm statement with labels at block BB. */
1114
1115 static void
1116 make_gimple_asm_edges (basic_block bb)
1117 {
1118 gimple stmt = last_stmt (bb);
1119 int i, n = gimple_asm_nlabels (stmt);
1120
1121 for (i = 0; i < n; ++i)
1122 {
1123 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1124 basic_block label_bb = label_to_block (label);
1125 make_edge (bb, label_bb, 0);
1126 }
1127 }
1128
1129 /*---------------------------------------------------------------------------
1130 Flowgraph analysis
1131 ---------------------------------------------------------------------------*/
1132
1133 /* Cleanup useless labels in basic blocks. This is something we wish
1134 to do early because it allows us to group case labels before creating
1135 the edges for the CFG, and it speeds up block statement iterators in
1136 all passes later on.
1137 We rerun this pass after CFG is created, to get rid of the labels that
1138 are no longer referenced. After then we do not run it any more, since
1139 (almost) no new labels should be created. */
1140
1141 /* A map from basic block index to the leading label of that block. */
1142 static struct label_record
1143 {
1144 /* The label. */
1145 tree label;
1146
1147 /* True if the label is referenced from somewhere. */
1148 bool used;
1149 } *label_for_bb;
1150
1151 /* Given LABEL return the first label in the same basic block. */
1152
1153 static tree
1154 main_block_label (tree label)
1155 {
1156 basic_block bb = label_to_block (label);
1157 tree main_label = label_for_bb[bb->index].label;
1158
1159 /* label_to_block possibly inserted undefined label into the chain. */
1160 if (!main_label)
1161 {
1162 label_for_bb[bb->index].label = label;
1163 main_label = label;
1164 }
1165
1166 label_for_bb[bb->index].used = true;
1167 return main_label;
1168 }
1169
1170 /* Clean up redundant labels within the exception tree. */
1171
1172 static void
1173 cleanup_dead_labels_eh (void)
1174 {
1175 eh_landing_pad lp;
1176 eh_region r;
1177 tree lab;
1178 int i;
1179
1180 if (cfun->eh == NULL)
1181 return;
1182
1183 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1184 if (lp && lp->post_landing_pad)
1185 {
1186 lab = main_block_label (lp->post_landing_pad);
1187 if (lab != lp->post_landing_pad)
1188 {
1189 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1190 EH_LANDING_PAD_NR (lab) = lp->index;
1191 }
1192 }
1193
1194 FOR_ALL_EH_REGION (r)
1195 switch (r->type)
1196 {
1197 case ERT_CLEANUP:
1198 case ERT_MUST_NOT_THROW:
1199 break;
1200
1201 case ERT_TRY:
1202 {
1203 eh_catch c;
1204 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1205 {
1206 lab = c->label;
1207 if (lab)
1208 c->label = main_block_label (lab);
1209 }
1210 }
1211 break;
1212
1213 case ERT_ALLOWED_EXCEPTIONS:
1214 lab = r->u.allowed.label;
1215 if (lab)
1216 r->u.allowed.label = main_block_label (lab);
1217 break;
1218 }
1219 }
1220
1221
1222 /* Cleanup redundant labels. This is a three-step process:
1223 1) Find the leading label for each block.
1224 2) Redirect all references to labels to the leading labels.
1225 3) Cleanup all useless labels. */
1226
1227 void
1228 cleanup_dead_labels (void)
1229 {
1230 basic_block bb;
1231 label_for_bb = XCNEWVEC (struct label_record, last_basic_block);
1232
1233 /* Find a suitable label for each block. We use the first user-defined
1234 label if there is one, or otherwise just the first label we see. */
1235 FOR_EACH_BB (bb)
1236 {
1237 gimple_stmt_iterator i;
1238
1239 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1240 {
1241 tree label;
1242 gimple stmt = gsi_stmt (i);
1243
1244 if (gimple_code (stmt) != GIMPLE_LABEL)
1245 break;
1246
1247 label = gimple_label_label (stmt);
1248
1249 /* If we have not yet seen a label for the current block,
1250 remember this one and see if there are more labels. */
1251 if (!label_for_bb[bb->index].label)
1252 {
1253 label_for_bb[bb->index].label = label;
1254 continue;
1255 }
1256
1257 /* If we did see a label for the current block already, but it
1258 is an artificially created label, replace it if the current
1259 label is a user defined label. */
1260 if (!DECL_ARTIFICIAL (label)
1261 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1262 {
1263 label_for_bb[bb->index].label = label;
1264 break;
1265 }
1266 }
1267 }
1268
1269 /* Now redirect all jumps/branches to the selected label.
1270 First do so for each block ending in a control statement. */
1271 FOR_EACH_BB (bb)
1272 {
1273 gimple stmt = last_stmt (bb);
1274 tree label, new_label;
1275
1276 if (!stmt)
1277 continue;
1278
1279 switch (gimple_code (stmt))
1280 {
1281 case GIMPLE_COND:
1282 label = gimple_cond_true_label (stmt);
1283 if (label)
1284 {
1285 new_label = main_block_label (label);
1286 if (new_label != label)
1287 gimple_cond_set_true_label (stmt, new_label);
1288 }
1289
1290 label = gimple_cond_false_label (stmt);
1291 if (label)
1292 {
1293 new_label = main_block_label (label);
1294 if (new_label != label)
1295 gimple_cond_set_false_label (stmt, new_label);
1296 }
1297 break;
1298
1299 case GIMPLE_SWITCH:
1300 {
1301 size_t i, n = gimple_switch_num_labels (stmt);
1302
1303 /* Replace all destination labels. */
1304 for (i = 0; i < n; ++i)
1305 {
1306 tree case_label = gimple_switch_label (stmt, i);
1307 label = CASE_LABEL (case_label);
1308 new_label = main_block_label (label);
1309 if (new_label != label)
1310 CASE_LABEL (case_label) = new_label;
1311 }
1312 break;
1313 }
1314
1315 case GIMPLE_ASM:
1316 {
1317 int i, n = gimple_asm_nlabels (stmt);
1318
1319 for (i = 0; i < n; ++i)
1320 {
1321 tree cons = gimple_asm_label_op (stmt, i);
1322 tree label = main_block_label (TREE_VALUE (cons));
1323 TREE_VALUE (cons) = label;
1324 }
1325 break;
1326 }
1327
1328 /* We have to handle gotos until they're removed, and we don't
1329 remove them until after we've created the CFG edges. */
1330 case GIMPLE_GOTO:
1331 if (!computed_goto_p (stmt))
1332 {
1333 label = gimple_goto_dest (stmt);
1334 new_label = main_block_label (label);
1335 if (new_label != label)
1336 gimple_goto_set_dest (stmt, new_label);
1337 }
1338 break;
1339
1340 case GIMPLE_TRANSACTION:
1341 {
1342 tree label = gimple_transaction_label (stmt);
1343 if (label)
1344 {
1345 tree new_label = main_block_label (label);
1346 if (new_label != label)
1347 gimple_transaction_set_label (stmt, new_label);
1348 }
1349 }
1350 break;
1351
1352 default:
1353 break;
1354 }
1355 }
1356
1357 /* Do the same for the exception region tree labels. */
1358 cleanup_dead_labels_eh ();
1359
1360 /* Finally, purge dead labels. All user-defined labels and labels that
1361 can be the target of non-local gotos and labels which have their
1362 address taken are preserved. */
1363 FOR_EACH_BB (bb)
1364 {
1365 gimple_stmt_iterator i;
1366 tree label_for_this_bb = label_for_bb[bb->index].label;
1367
1368 if (!label_for_this_bb)
1369 continue;
1370
1371 /* If the main label of the block is unused, we may still remove it. */
1372 if (!label_for_bb[bb->index].used)
1373 label_for_this_bb = NULL;
1374
1375 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1376 {
1377 tree label;
1378 gimple stmt = gsi_stmt (i);
1379
1380 if (gimple_code (stmt) != GIMPLE_LABEL)
1381 break;
1382
1383 label = gimple_label_label (stmt);
1384
1385 if (label == label_for_this_bb
1386 || !DECL_ARTIFICIAL (label)
1387 || DECL_NONLOCAL (label)
1388 || FORCED_LABEL (label))
1389 gsi_next (&i);
1390 else
1391 gsi_remove (&i, true);
1392 }
1393 }
1394
1395 free (label_for_bb);
1396 }
1397
1398 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1399 the ones jumping to the same label.
1400 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1401
1402 void
1403 group_case_labels_stmt (gimple stmt)
1404 {
1405 int old_size = gimple_switch_num_labels (stmt);
1406 int i, j, new_size = old_size;
1407 basic_block default_bb = NULL;
1408
1409 default_bb = label_to_block (CASE_LABEL (gimple_switch_default_label (stmt)));
1410
1411 /* Look for possible opportunities to merge cases. */
1412 i = 1;
1413 while (i < old_size)
1414 {
1415 tree base_case, base_high;
1416 basic_block base_bb;
1417
1418 base_case = gimple_switch_label (stmt, i);
1419
1420 gcc_assert (base_case);
1421 base_bb = label_to_block (CASE_LABEL (base_case));
1422
1423 /* Discard cases that have the same destination as the
1424 default case. */
1425 if (base_bb == default_bb)
1426 {
1427 gimple_switch_set_label (stmt, i, NULL_TREE);
1428 i++;
1429 new_size--;
1430 continue;
1431 }
1432
1433 base_high = CASE_HIGH (base_case)
1434 ? CASE_HIGH (base_case)
1435 : CASE_LOW (base_case);
1436 i++;
1437
1438 /* Try to merge case labels. Break out when we reach the end
1439 of the label vector or when we cannot merge the next case
1440 label with the current one. */
1441 while (i < old_size)
1442 {
1443 tree merge_case = gimple_switch_label (stmt, i);
1444 basic_block merge_bb = label_to_block (CASE_LABEL (merge_case));
1445 wide_int bhp1 = wi::add (base_high, 1);
1446
1447 /* Merge the cases if they jump to the same place,
1448 and their ranges are consecutive. */
1449 if (merge_bb == base_bb
1450 && wi::eq_p (CASE_LOW (merge_case), bhp1))
1451 {
1452 base_high = CASE_HIGH (merge_case) ?
1453 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1454 CASE_HIGH (base_case) = base_high;
1455 gimple_switch_set_label (stmt, i, NULL_TREE);
1456 new_size--;
1457 i++;
1458 }
1459 else
1460 break;
1461 }
1462 }
1463
1464 /* Compress the case labels in the label vector, and adjust the
1465 length of the vector. */
1466 for (i = 0, j = 0; i < new_size; i++)
1467 {
1468 while (! gimple_switch_label (stmt, j))
1469 j++;
1470 gimple_switch_set_label (stmt, i,
1471 gimple_switch_label (stmt, j++));
1472 }
1473
1474 gcc_assert (new_size <= old_size);
1475 gimple_switch_set_num_labels (stmt, new_size);
1476 }
1477
1478 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1479 and scan the sorted vector of cases. Combine the ones jumping to the
1480 same label. */
1481
1482 void
1483 group_case_labels (void)
1484 {
1485 basic_block bb;
1486
1487 FOR_EACH_BB (bb)
1488 {
1489 gimple stmt = last_stmt (bb);
1490 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1491 group_case_labels_stmt (stmt);
1492 }
1493 }
1494
1495 /* Checks whether we can merge block B into block A. */
1496
1497 static bool
1498 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1499 {
1500 gimple stmt;
1501 gimple_stmt_iterator gsi;
1502
1503 if (!single_succ_p (a))
1504 return false;
1505
1506 if (single_succ_edge (a)->flags & EDGE_COMPLEX)
1507 return false;
1508
1509 if (single_succ (a) != b)
1510 return false;
1511
1512 if (!single_pred_p (b))
1513 return false;
1514
1515 if (b == EXIT_BLOCK_PTR_FOR_FN (cfun))
1516 return false;
1517
1518 /* If A ends by a statement causing exceptions or something similar, we
1519 cannot merge the blocks. */
1520 stmt = last_stmt (a);
1521 if (stmt && stmt_ends_bb_p (stmt))
1522 return false;
1523
1524 /* Do not allow a block with only a non-local label to be merged. */
1525 if (stmt
1526 && gimple_code (stmt) == GIMPLE_LABEL
1527 && DECL_NONLOCAL (gimple_label_label (stmt)))
1528 return false;
1529
1530 /* Examine the labels at the beginning of B. */
1531 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi); gsi_next (&gsi))
1532 {
1533 tree lab;
1534 stmt = gsi_stmt (gsi);
1535 if (gimple_code (stmt) != GIMPLE_LABEL)
1536 break;
1537 lab = gimple_label_label (stmt);
1538
1539 /* Do not remove user forced labels or for -O0 any user labels. */
1540 if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
1541 return false;
1542 }
1543
1544 /* Protect the loop latches. */
1545 if (current_loops && b->loop_father->latch == b)
1546 return false;
1547
1548 /* It must be possible to eliminate all phi nodes in B. If ssa form
1549 is not up-to-date and a name-mapping is registered, we cannot eliminate
1550 any phis. Symbols marked for renaming are never a problem though. */
1551 for (gsi = gsi_start_phis (b); !gsi_end_p (gsi); gsi_next (&gsi))
1552 {
1553 gimple phi = gsi_stmt (gsi);
1554 /* Technically only new names matter. */
1555 if (name_registered_for_update_p (PHI_RESULT (phi)))
1556 return false;
1557 }
1558
1559 /* When not optimizing, don't merge if we'd lose goto_locus. */
1560 if (!optimize
1561 && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
1562 {
1563 location_t goto_locus = single_succ_edge (a)->goto_locus;
1564 gimple_stmt_iterator prev, next;
1565 prev = gsi_last_nondebug_bb (a);
1566 next = gsi_after_labels (b);
1567 if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
1568 gsi_next_nondebug (&next);
1569 if ((gsi_end_p (prev)
1570 || gimple_location (gsi_stmt (prev)) != goto_locus)
1571 && (gsi_end_p (next)
1572 || gimple_location (gsi_stmt (next)) != goto_locus))
1573 return false;
1574 }
1575
1576 return true;
1577 }
1578
1579 /* Replaces all uses of NAME by VAL. */
1580
1581 void
1582 replace_uses_by (tree name, tree val)
1583 {
1584 imm_use_iterator imm_iter;
1585 use_operand_p use;
1586 gimple stmt;
1587 edge e;
1588
1589 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1590 {
1591 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1592 {
1593 replace_exp (use, val);
1594
1595 if (gimple_code (stmt) == GIMPLE_PHI)
1596 {
1597 e = gimple_phi_arg_edge (stmt, PHI_ARG_INDEX_FROM_USE (use));
1598 if (e->flags & EDGE_ABNORMAL)
1599 {
1600 /* This can only occur for virtual operands, since
1601 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1602 would prevent replacement. */
1603 gcc_checking_assert (virtual_operand_p (name));
1604 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1605 }
1606 }
1607 }
1608
1609 if (gimple_code (stmt) != GIMPLE_PHI)
1610 {
1611 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1612 gimple orig_stmt = stmt;
1613 size_t i;
1614
1615 /* Mark the block if we changed the last stmt in it. */
1616 if (cfgcleanup_altered_bbs
1617 && stmt_ends_bb_p (stmt))
1618 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1619
1620 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
1621 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
1622 only change sth from non-invariant to invariant, and only
1623 when propagating constants. */
1624 if (is_gimple_min_invariant (val))
1625 for (i = 0; i < gimple_num_ops (stmt); i++)
1626 {
1627 tree op = gimple_op (stmt, i);
1628 /* Operands may be empty here. For example, the labels
1629 of a GIMPLE_COND are nulled out following the creation
1630 of the corresponding CFG edges. */
1631 if (op && TREE_CODE (op) == ADDR_EXPR)
1632 recompute_tree_invariant_for_addr_expr (op);
1633 }
1634
1635 if (fold_stmt (&gsi))
1636 stmt = gsi_stmt (gsi);
1637
1638 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
1639 gimple_purge_dead_eh_edges (gimple_bb (stmt));
1640
1641 update_stmt (stmt);
1642 }
1643 }
1644
1645 gcc_checking_assert (has_zero_uses (name));
1646
1647 /* Also update the trees stored in loop structures. */
1648 if (current_loops)
1649 {
1650 struct loop *loop;
1651
1652 FOR_EACH_LOOP (loop, 0)
1653 {
1654 substitute_in_loop_info (loop, name, val);
1655 }
1656 }
1657 }
1658
1659 /* Merge block B into block A. */
1660
1661 static void
1662 gimple_merge_blocks (basic_block a, basic_block b)
1663 {
1664 gimple_stmt_iterator last, gsi, psi;
1665
1666 if (dump_file)
1667 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1668
1669 /* Remove all single-valued PHI nodes from block B of the form
1670 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1671 gsi = gsi_last_bb (a);
1672 for (psi = gsi_start_phis (b); !gsi_end_p (psi); )
1673 {
1674 gimple phi = gsi_stmt (psi);
1675 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
1676 gimple copy;
1677 bool may_replace_uses = (virtual_operand_p (def)
1678 || may_propagate_copy (def, use));
1679
1680 /* In case we maintain loop closed ssa form, do not propagate arguments
1681 of loop exit phi nodes. */
1682 if (current_loops
1683 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
1684 && !virtual_operand_p (def)
1685 && TREE_CODE (use) == SSA_NAME
1686 && a->loop_father != b->loop_father)
1687 may_replace_uses = false;
1688
1689 if (!may_replace_uses)
1690 {
1691 gcc_assert (!virtual_operand_p (def));
1692
1693 /* Note that just emitting the copies is fine -- there is no problem
1694 with ordering of phi nodes. This is because A is the single
1695 predecessor of B, therefore results of the phi nodes cannot
1696 appear as arguments of the phi nodes. */
1697 copy = gimple_build_assign (def, use);
1698 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
1699 remove_phi_node (&psi, false);
1700 }
1701 else
1702 {
1703 /* If we deal with a PHI for virtual operands, we can simply
1704 propagate these without fussing with folding or updating
1705 the stmt. */
1706 if (virtual_operand_p (def))
1707 {
1708 imm_use_iterator iter;
1709 use_operand_p use_p;
1710 gimple stmt;
1711
1712 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
1713 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1714 SET_USE (use_p, use);
1715
1716 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
1717 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
1718 }
1719 else
1720 replace_uses_by (def, use);
1721
1722 remove_phi_node (&psi, true);
1723 }
1724 }
1725
1726 /* Ensure that B follows A. */
1727 move_block_after (b, a);
1728
1729 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1730 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1731
1732 /* Remove labels from B and set gimple_bb to A for other statements. */
1733 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
1734 {
1735 gimple stmt = gsi_stmt (gsi);
1736 if (gimple_code (stmt) == GIMPLE_LABEL)
1737 {
1738 tree label = gimple_label_label (stmt);
1739 int lp_nr;
1740
1741 gsi_remove (&gsi, false);
1742
1743 /* Now that we can thread computed gotos, we might have
1744 a situation where we have a forced label in block B
1745 However, the label at the start of block B might still be
1746 used in other ways (think about the runtime checking for
1747 Fortran assigned gotos). So we can not just delete the
1748 label. Instead we move the label to the start of block A. */
1749 if (FORCED_LABEL (label))
1750 {
1751 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
1752 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
1753 }
1754 /* Other user labels keep around in a form of a debug stmt. */
1755 else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_STMTS)
1756 {
1757 gimple dbg = gimple_build_debug_bind (label,
1758 integer_zero_node,
1759 stmt);
1760 gimple_debug_bind_reset_value (dbg);
1761 gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
1762 }
1763
1764 lp_nr = EH_LANDING_PAD_NR (label);
1765 if (lp_nr)
1766 {
1767 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
1768 lp->post_landing_pad = NULL;
1769 }
1770 }
1771 else
1772 {
1773 gimple_set_bb (stmt, a);
1774 gsi_next (&gsi);
1775 }
1776 }
1777
1778 /* Merge the sequences. */
1779 last = gsi_last_bb (a);
1780 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
1781 set_bb_seq (b, NULL);
1782
1783 if (cfgcleanup_altered_bbs)
1784 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
1785 }
1786
1787
1788 /* Return the one of two successors of BB that is not reachable by a
1789 complex edge, if there is one. Else, return BB. We use
1790 this in optimizations that use post-dominators for their heuristics,
1791 to catch the cases in C++ where function calls are involved. */
1792
1793 basic_block
1794 single_noncomplex_succ (basic_block bb)
1795 {
1796 edge e0, e1;
1797 if (EDGE_COUNT (bb->succs) != 2)
1798 return bb;
1799
1800 e0 = EDGE_SUCC (bb, 0);
1801 e1 = EDGE_SUCC (bb, 1);
1802 if (e0->flags & EDGE_COMPLEX)
1803 return e1->dest;
1804 if (e1->flags & EDGE_COMPLEX)
1805 return e0->dest;
1806
1807 return bb;
1808 }
1809
1810 /* T is CALL_EXPR. Set current_function_calls_* flags. */
1811
1812 void
1813 notice_special_calls (gimple call)
1814 {
1815 int flags = gimple_call_flags (call);
1816
1817 if (flags & ECF_MAY_BE_ALLOCA)
1818 cfun->calls_alloca = true;
1819 if (flags & ECF_RETURNS_TWICE)
1820 cfun->calls_setjmp = true;
1821 }
1822
1823
1824 /* Clear flags set by notice_special_calls. Used by dead code removal
1825 to update the flags. */
1826
1827 void
1828 clear_special_calls (void)
1829 {
1830 cfun->calls_alloca = false;
1831 cfun->calls_setjmp = false;
1832 }
1833
1834 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
1835
1836 static void
1837 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
1838 {
1839 /* Since this block is no longer reachable, we can just delete all
1840 of its PHI nodes. */
1841 remove_phi_nodes (bb);
1842
1843 /* Remove edges to BB's successors. */
1844 while (EDGE_COUNT (bb->succs) > 0)
1845 remove_edge (EDGE_SUCC (bb, 0));
1846 }
1847
1848
1849 /* Remove statements of basic block BB. */
1850
1851 static void
1852 remove_bb (basic_block bb)
1853 {
1854 gimple_stmt_iterator i;
1855
1856 if (dump_file)
1857 {
1858 fprintf (dump_file, "Removing basic block %d\n", bb->index);
1859 if (dump_flags & TDF_DETAILS)
1860 {
1861 dump_bb (dump_file, bb, 0, dump_flags);
1862 fprintf (dump_file, "\n");
1863 }
1864 }
1865
1866 if (current_loops)
1867 {
1868 struct loop *loop = bb->loop_father;
1869
1870 /* If a loop gets removed, clean up the information associated
1871 with it. */
1872 if (loop->latch == bb
1873 || loop->header == bb)
1874 free_numbers_of_iterations_estimates_loop (loop);
1875 }
1876
1877 /* Remove all the instructions in the block. */
1878 if (bb_seq (bb) != NULL)
1879 {
1880 /* Walk backwards so as to get a chance to substitute all
1881 released DEFs into debug stmts. See
1882 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
1883 details. */
1884 for (i = gsi_last_bb (bb); !gsi_end_p (i);)
1885 {
1886 gimple stmt = gsi_stmt (i);
1887 if (gimple_code (stmt) == GIMPLE_LABEL
1888 && (FORCED_LABEL (gimple_label_label (stmt))
1889 || DECL_NONLOCAL (gimple_label_label (stmt))))
1890 {
1891 basic_block new_bb;
1892 gimple_stmt_iterator new_gsi;
1893
1894 /* A non-reachable non-local label may still be referenced.
1895 But it no longer needs to carry the extra semantics of
1896 non-locality. */
1897 if (DECL_NONLOCAL (gimple_label_label (stmt)))
1898 {
1899 DECL_NONLOCAL (gimple_label_label (stmt)) = 0;
1900 FORCED_LABEL (gimple_label_label (stmt)) = 1;
1901 }
1902
1903 new_bb = bb->prev_bb;
1904 new_gsi = gsi_start_bb (new_bb);
1905 gsi_remove (&i, false);
1906 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
1907 }
1908 else
1909 {
1910 /* Release SSA definitions if we are in SSA. Note that we
1911 may be called when not in SSA. For example,
1912 final_cleanup calls this function via
1913 cleanup_tree_cfg. */
1914 if (gimple_in_ssa_p (cfun))
1915 release_defs (stmt);
1916
1917 gsi_remove (&i, true);
1918 }
1919
1920 if (gsi_end_p (i))
1921 i = gsi_last_bb (bb);
1922 else
1923 gsi_prev (&i);
1924 }
1925 }
1926
1927 remove_phi_nodes_and_edges_for_unreachable_block (bb);
1928 bb->il.gimple.seq = NULL;
1929 bb->il.gimple.phi_nodes = NULL;
1930 }
1931
1932
1933 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
1934 predicate VAL, return the edge that will be taken out of the block.
1935 If VAL does not match a unique edge, NULL is returned. */
1936
1937 edge
1938 find_taken_edge (basic_block bb, tree val)
1939 {
1940 gimple stmt;
1941
1942 stmt = last_stmt (bb);
1943
1944 gcc_assert (stmt);
1945 gcc_assert (is_ctrl_stmt (stmt));
1946
1947 if (val == NULL)
1948 return NULL;
1949
1950 if (!is_gimple_min_invariant (val))
1951 return NULL;
1952
1953 if (gimple_code (stmt) == GIMPLE_COND)
1954 return find_taken_edge_cond_expr (bb, val);
1955
1956 if (gimple_code (stmt) == GIMPLE_SWITCH)
1957 return find_taken_edge_switch_expr (bb, val);
1958
1959 if (computed_goto_p (stmt))
1960 {
1961 /* Only optimize if the argument is a label, if the argument is
1962 not a label then we can not construct a proper CFG.
1963
1964 It may be the case that we only need to allow the LABEL_REF to
1965 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
1966 appear inside a LABEL_EXPR just to be safe. */
1967 if ((TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
1968 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
1969 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
1970 return NULL;
1971 }
1972
1973 gcc_unreachable ();
1974 }
1975
1976 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
1977 statement, determine which of the outgoing edges will be taken out of the
1978 block. Return NULL if either edge may be taken. */
1979
1980 static edge
1981 find_taken_edge_computed_goto (basic_block bb, tree val)
1982 {
1983 basic_block dest;
1984 edge e = NULL;
1985
1986 dest = label_to_block (val);
1987 if (dest)
1988 {
1989 e = find_edge (bb, dest);
1990 gcc_assert (e != NULL);
1991 }
1992
1993 return e;
1994 }
1995
1996 /* Given a constant value VAL and the entry block BB to a COND_EXPR
1997 statement, determine which of the two edges will be taken out of the
1998 block. Return NULL if either edge may be taken. */
1999
2000 static edge
2001 find_taken_edge_cond_expr (basic_block bb, tree val)
2002 {
2003 edge true_edge, false_edge;
2004
2005 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2006
2007 gcc_assert (TREE_CODE (val) == INTEGER_CST);
2008 return (integer_zerop (val) ? false_edge : true_edge);
2009 }
2010
2011 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2012 statement, determine which edge will be taken out of the block. Return
2013 NULL if any edge may be taken. */
2014
2015 static edge
2016 find_taken_edge_switch_expr (basic_block bb, tree val)
2017 {
2018 basic_block dest_bb;
2019 edge e;
2020 gimple switch_stmt;
2021 tree taken_case;
2022
2023 switch_stmt = last_stmt (bb);
2024 taken_case = find_case_label_for_value (switch_stmt, val);
2025 dest_bb = label_to_block (CASE_LABEL (taken_case));
2026
2027 e = find_edge (bb, dest_bb);
2028 gcc_assert (e);
2029 return e;
2030 }
2031
2032
2033 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2034 We can make optimal use here of the fact that the case labels are
2035 sorted: We can do a binary search for a case matching VAL. */
2036
2037 static tree
2038 find_case_label_for_value (gimple switch_stmt, tree val)
2039 {
2040 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2041 tree default_case = gimple_switch_default_label (switch_stmt);
2042
2043 for (low = 0, high = n; high - low > 1; )
2044 {
2045 size_t i = (high + low) / 2;
2046 tree t = gimple_switch_label (switch_stmt, i);
2047 int cmp;
2048
2049 /* Cache the result of comparing CASE_LOW and val. */
2050 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2051
2052 if (cmp > 0)
2053 high = i;
2054 else
2055 low = i;
2056
2057 if (CASE_HIGH (t) == NULL)
2058 {
2059 /* A singe-valued case label. */
2060 if (cmp == 0)
2061 return t;
2062 }
2063 else
2064 {
2065 /* A case range. We can only handle integer ranges. */
2066 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2067 return t;
2068 }
2069 }
2070
2071 return default_case;
2072 }
2073
2074
2075 /* Dump a basic block on stderr. */
2076
2077 void
2078 gimple_debug_bb (basic_block bb)
2079 {
2080 dump_bb (stderr, bb, 0, TDF_VOPS|TDF_MEMSYMS|TDF_BLOCKS);
2081 }
2082
2083
2084 /* Dump basic block with index N on stderr. */
2085
2086 basic_block
2087 gimple_debug_bb_n (int n)
2088 {
2089 gimple_debug_bb (BASIC_BLOCK (n));
2090 return BASIC_BLOCK (n);
2091 }
2092
2093
2094 /* Dump the CFG on stderr.
2095
2096 FLAGS are the same used by the tree dumping functions
2097 (see TDF_* in dumpfile.h). */
2098
2099 void
2100 gimple_debug_cfg (int flags)
2101 {
2102 gimple_dump_cfg (stderr, flags);
2103 }
2104
2105
2106 /* Dump the program showing basic block boundaries on the given FILE.
2107
2108 FLAGS are the same used by the tree dumping functions (see TDF_* in
2109 tree.h). */
2110
2111 void
2112 gimple_dump_cfg (FILE *file, int flags)
2113 {
2114 if (flags & TDF_DETAILS)
2115 {
2116 dump_function_header (file, current_function_decl, flags);
2117 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2118 n_basic_blocks_for_fn (cfun), n_edges_for_fn (cfun),
2119 last_basic_block);
2120
2121 brief_dump_cfg (file, flags | TDF_COMMENT);
2122 fprintf (file, "\n");
2123 }
2124
2125 if (flags & TDF_STATS)
2126 dump_cfg_stats (file);
2127
2128 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2129 }
2130
2131
2132 /* Dump CFG statistics on FILE. */
2133
2134 void
2135 dump_cfg_stats (FILE *file)
2136 {
2137 static long max_num_merged_labels = 0;
2138 unsigned long size, total = 0;
2139 long num_edges;
2140 basic_block bb;
2141 const char * const fmt_str = "%-30s%-13s%12s\n";
2142 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2143 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2144 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2145 const char *funcname = current_function_name ();
2146
2147 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2148
2149 fprintf (file, "---------------------------------------------------------\n");
2150 fprintf (file, fmt_str, "", " Number of ", "Memory");
2151 fprintf (file, fmt_str, "", " instances ", "used ");
2152 fprintf (file, "---------------------------------------------------------\n");
2153
2154 size = n_basic_blocks_for_fn (cfun) * sizeof (struct basic_block_def);
2155 total += size;
2156 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks_for_fn (cfun),
2157 SCALE (size), LABEL (size));
2158
2159 num_edges = 0;
2160 FOR_EACH_BB (bb)
2161 num_edges += EDGE_COUNT (bb->succs);
2162 size = num_edges * sizeof (struct edge_def);
2163 total += size;
2164 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2165
2166 fprintf (file, "---------------------------------------------------------\n");
2167 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2168 LABEL (total));
2169 fprintf (file, "---------------------------------------------------------\n");
2170 fprintf (file, "\n");
2171
2172 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2173 max_num_merged_labels = cfg_stats.num_merged_labels;
2174
2175 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2176 cfg_stats.num_merged_labels, max_num_merged_labels);
2177
2178 fprintf (file, "\n");
2179 }
2180
2181
2182 /* Dump CFG statistics on stderr. Keep extern so that it's always
2183 linked in the final executable. */
2184
2185 DEBUG_FUNCTION void
2186 debug_cfg_stats (void)
2187 {
2188 dump_cfg_stats (stderr);
2189 }
2190
2191 /*---------------------------------------------------------------------------
2192 Miscellaneous helpers
2193 ---------------------------------------------------------------------------*/
2194
2195 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2196 flow. Transfers of control flow associated with EH are excluded. */
2197
2198 static bool
2199 call_can_make_abnormal_goto (gimple t)
2200 {
2201 /* If the function has no non-local labels, then a call cannot make an
2202 abnormal transfer of control. */
2203 if (!cfun->has_nonlocal_label
2204 && !cfun->calls_setjmp)
2205 return false;
2206
2207 /* Likewise if the call has no side effects. */
2208 if (!gimple_has_side_effects (t))
2209 return false;
2210
2211 /* Likewise if the called function is leaf. */
2212 if (gimple_call_flags (t) & ECF_LEAF)
2213 return false;
2214
2215 return true;
2216 }
2217
2218
2219 /* Return true if T can make an abnormal transfer of control flow.
2220 Transfers of control flow associated with EH are excluded. */
2221
2222 bool
2223 stmt_can_make_abnormal_goto (gimple t)
2224 {
2225 if (computed_goto_p (t))
2226 return true;
2227 if (is_gimple_call (t))
2228 return call_can_make_abnormal_goto (t);
2229 return false;
2230 }
2231
2232
2233 /* Return true if T represents a stmt that always transfers control. */
2234
2235 bool
2236 is_ctrl_stmt (gimple t)
2237 {
2238 switch (gimple_code (t))
2239 {
2240 case GIMPLE_COND:
2241 case GIMPLE_SWITCH:
2242 case GIMPLE_GOTO:
2243 case GIMPLE_RETURN:
2244 case GIMPLE_RESX:
2245 return true;
2246 default:
2247 return false;
2248 }
2249 }
2250
2251
2252 /* Return true if T is a statement that may alter the flow of control
2253 (e.g., a call to a non-returning function). */
2254
2255 bool
2256 is_ctrl_altering_stmt (gimple t)
2257 {
2258 gcc_assert (t);
2259
2260 switch (gimple_code (t))
2261 {
2262 case GIMPLE_CALL:
2263 {
2264 int flags = gimple_call_flags (t);
2265
2266 /* A call alters control flow if it can make an abnormal goto. */
2267 if (call_can_make_abnormal_goto (t))
2268 return true;
2269
2270 /* A call also alters control flow if it does not return. */
2271 if (flags & ECF_NORETURN)
2272 return true;
2273
2274 /* TM ending statements have backedges out of the transaction.
2275 Return true so we split the basic block containing them.
2276 Note that the TM_BUILTIN test is merely an optimization. */
2277 if ((flags & ECF_TM_BUILTIN)
2278 && is_tm_ending_fndecl (gimple_call_fndecl (t)))
2279 return true;
2280
2281 /* BUILT_IN_RETURN call is same as return statement. */
2282 if (gimple_call_builtin_p (t, BUILT_IN_RETURN))
2283 return true;
2284 }
2285 break;
2286
2287 case GIMPLE_EH_DISPATCH:
2288 /* EH_DISPATCH branches to the individual catch handlers at
2289 this level of a try or allowed-exceptions region. It can
2290 fallthru to the next statement as well. */
2291 return true;
2292
2293 case GIMPLE_ASM:
2294 if (gimple_asm_nlabels (t) > 0)
2295 return true;
2296 break;
2297
2298 CASE_GIMPLE_OMP:
2299 /* OpenMP directives alter control flow. */
2300 return true;
2301
2302 case GIMPLE_TRANSACTION:
2303 /* A transaction start alters control flow. */
2304 return true;
2305
2306 default:
2307 break;
2308 }
2309
2310 /* If a statement can throw, it alters control flow. */
2311 return stmt_can_throw_internal (t);
2312 }
2313
2314
2315 /* Return true if T is a simple local goto. */
2316
2317 bool
2318 simple_goto_p (gimple t)
2319 {
2320 return (gimple_code (t) == GIMPLE_GOTO
2321 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2322 }
2323
2324
2325 /* Return true if STMT should start a new basic block. PREV_STMT is
2326 the statement preceding STMT. It is used when STMT is a label or a
2327 case label. Labels should only start a new basic block if their
2328 previous statement wasn't a label. Otherwise, sequence of labels
2329 would generate unnecessary basic blocks that only contain a single
2330 label. */
2331
2332 static inline bool
2333 stmt_starts_bb_p (gimple stmt, gimple prev_stmt)
2334 {
2335 if (stmt == NULL)
2336 return false;
2337
2338 /* Labels start a new basic block only if the preceding statement
2339 wasn't a label of the same type. This prevents the creation of
2340 consecutive blocks that have nothing but a single label. */
2341 if (gimple_code (stmt) == GIMPLE_LABEL)
2342 {
2343 /* Nonlocal and computed GOTO targets always start a new block. */
2344 if (DECL_NONLOCAL (gimple_label_label (stmt))
2345 || FORCED_LABEL (gimple_label_label (stmt)))
2346 return true;
2347
2348 if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
2349 {
2350 if (DECL_NONLOCAL (gimple_label_label (prev_stmt)))
2351 return true;
2352
2353 cfg_stats.num_merged_labels++;
2354 return false;
2355 }
2356 else
2357 return true;
2358 }
2359 else if (gimple_code (stmt) == GIMPLE_CALL
2360 && gimple_call_flags (stmt) & ECF_RETURNS_TWICE)
2361 /* setjmp acts similar to a nonlocal GOTO target and thus should
2362 start a new block. */
2363 return true;
2364
2365 return false;
2366 }
2367
2368
2369 /* Return true if T should end a basic block. */
2370
2371 bool
2372 stmt_ends_bb_p (gimple t)
2373 {
2374 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2375 }
2376
2377 /* Remove block annotations and other data structures. */
2378
2379 void
2380 delete_tree_cfg_annotations (void)
2381 {
2382 vec_free (label_to_block_map);
2383 }
2384
2385
2386 /* Return the first statement in basic block BB. */
2387
2388 gimple
2389 first_stmt (basic_block bb)
2390 {
2391 gimple_stmt_iterator i = gsi_start_bb (bb);
2392 gimple stmt = NULL;
2393
2394 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2395 {
2396 gsi_next (&i);
2397 stmt = NULL;
2398 }
2399 return stmt;
2400 }
2401
2402 /* Return the first non-label statement in basic block BB. */
2403
2404 static gimple
2405 first_non_label_stmt (basic_block bb)
2406 {
2407 gimple_stmt_iterator i = gsi_start_bb (bb);
2408 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2409 gsi_next (&i);
2410 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2411 }
2412
2413 /* Return the last statement in basic block BB. */
2414
2415 gimple
2416 last_stmt (basic_block bb)
2417 {
2418 gimple_stmt_iterator i = gsi_last_bb (bb);
2419 gimple stmt = NULL;
2420
2421 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2422 {
2423 gsi_prev (&i);
2424 stmt = NULL;
2425 }
2426 return stmt;
2427 }
2428
2429 /* Return the last statement of an otherwise empty block. Return NULL
2430 if the block is totally empty, or if it contains more than one
2431 statement. */
2432
2433 gimple
2434 last_and_only_stmt (basic_block bb)
2435 {
2436 gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2437 gimple last, prev;
2438
2439 if (gsi_end_p (i))
2440 return NULL;
2441
2442 last = gsi_stmt (i);
2443 gsi_prev_nondebug (&i);
2444 if (gsi_end_p (i))
2445 return last;
2446
2447 /* Empty statements should no longer appear in the instruction stream.
2448 Everything that might have appeared before should be deleted by
2449 remove_useless_stmts, and the optimizers should just gsi_remove
2450 instead of smashing with build_empty_stmt.
2451
2452 Thus the only thing that should appear here in a block containing
2453 one executable statement is a label. */
2454 prev = gsi_stmt (i);
2455 if (gimple_code (prev) == GIMPLE_LABEL)
2456 return last;
2457 else
2458 return NULL;
2459 }
2460
2461 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2462
2463 static void
2464 reinstall_phi_args (edge new_edge, edge old_edge)
2465 {
2466 edge_var_map_vector *v;
2467 edge_var_map *vm;
2468 int i;
2469 gimple_stmt_iterator phis;
2470
2471 v = redirect_edge_var_map_vector (old_edge);
2472 if (!v)
2473 return;
2474
2475 for (i = 0, phis = gsi_start_phis (new_edge->dest);
2476 v->iterate (i, &vm) && !gsi_end_p (phis);
2477 i++, gsi_next (&phis))
2478 {
2479 gimple phi = gsi_stmt (phis);
2480 tree result = redirect_edge_var_map_result (vm);
2481 tree arg = redirect_edge_var_map_def (vm);
2482
2483 gcc_assert (result == gimple_phi_result (phi));
2484
2485 add_phi_arg (phi, arg, new_edge, redirect_edge_var_map_location (vm));
2486 }
2487
2488 redirect_edge_var_map_clear (old_edge);
2489 }
2490
2491 /* Returns the basic block after which the new basic block created
2492 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2493 near its "logical" location. This is of most help to humans looking
2494 at debugging dumps. */
2495
2496 static basic_block
2497 split_edge_bb_loc (edge edge_in)
2498 {
2499 basic_block dest = edge_in->dest;
2500 basic_block dest_prev = dest->prev_bb;
2501
2502 if (dest_prev)
2503 {
2504 edge e = find_edge (dest_prev, dest);
2505 if (e && !(e->flags & EDGE_COMPLEX))
2506 return edge_in->src;
2507 }
2508 return dest_prev;
2509 }
2510
2511 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2512 Abort on abnormal edges. */
2513
2514 static basic_block
2515 gimple_split_edge (edge edge_in)
2516 {
2517 basic_block new_bb, after_bb, dest;
2518 edge new_edge, e;
2519
2520 /* Abnormal edges cannot be split. */
2521 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2522
2523 dest = edge_in->dest;
2524
2525 after_bb = split_edge_bb_loc (edge_in);
2526
2527 new_bb = create_empty_bb (after_bb);
2528 new_bb->frequency = EDGE_FREQUENCY (edge_in);
2529 new_bb->count = edge_in->count;
2530 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
2531 new_edge->probability = REG_BR_PROB_BASE;
2532 new_edge->count = edge_in->count;
2533
2534 e = redirect_edge_and_branch (edge_in, new_bb);
2535 gcc_assert (e == edge_in);
2536 reinstall_phi_args (new_edge, e);
2537
2538 return new_bb;
2539 }
2540
2541
2542 /* Verify properties of the address expression T with base object BASE. */
2543
2544 static tree
2545 verify_address (tree t, tree base)
2546 {
2547 bool old_constant;
2548 bool old_side_effects;
2549 bool new_constant;
2550 bool new_side_effects;
2551
2552 old_constant = TREE_CONSTANT (t);
2553 old_side_effects = TREE_SIDE_EFFECTS (t);
2554
2555 recompute_tree_invariant_for_addr_expr (t);
2556 new_side_effects = TREE_SIDE_EFFECTS (t);
2557 new_constant = TREE_CONSTANT (t);
2558
2559 if (old_constant != new_constant)
2560 {
2561 error ("constant not recomputed when ADDR_EXPR changed");
2562 return t;
2563 }
2564 if (old_side_effects != new_side_effects)
2565 {
2566 error ("side effects not recomputed when ADDR_EXPR changed");
2567 return t;
2568 }
2569
2570 if (!(TREE_CODE (base) == VAR_DECL
2571 || TREE_CODE (base) == PARM_DECL
2572 || TREE_CODE (base) == RESULT_DECL))
2573 return NULL_TREE;
2574
2575 if (DECL_GIMPLE_REG_P (base))
2576 {
2577 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2578 return base;
2579 }
2580
2581 return NULL_TREE;
2582 }
2583
2584 /* Callback for walk_tree, check that all elements with address taken are
2585 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2586 inside a PHI node. */
2587
2588 static tree
2589 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2590 {
2591 tree t = *tp, x;
2592
2593 if (TYPE_P (t))
2594 *walk_subtrees = 0;
2595
2596 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2597 #define CHECK_OP(N, MSG) \
2598 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2599 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2600
2601 switch (TREE_CODE (t))
2602 {
2603 case SSA_NAME:
2604 if (SSA_NAME_IN_FREE_LIST (t))
2605 {
2606 error ("SSA name in freelist but still referenced");
2607 return *tp;
2608 }
2609 break;
2610
2611 case INDIRECT_REF:
2612 error ("INDIRECT_REF in gimple IL");
2613 return t;
2614
2615 case MEM_REF:
2616 x = TREE_OPERAND (t, 0);
2617 if (!POINTER_TYPE_P (TREE_TYPE (x))
2618 || !is_gimple_mem_ref_addr (x))
2619 {
2620 error ("invalid first operand of MEM_REF");
2621 return x;
2622 }
2623 if (TREE_CODE (TREE_OPERAND (t, 1)) != INTEGER_CST
2624 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 1))))
2625 {
2626 error ("invalid offset operand of MEM_REF");
2627 return TREE_OPERAND (t, 1);
2628 }
2629 if (TREE_CODE (x) == ADDR_EXPR
2630 && (x = verify_address (x, TREE_OPERAND (x, 0))))
2631 return x;
2632 *walk_subtrees = 0;
2633 break;
2634
2635 case ASSERT_EXPR:
2636 x = fold (ASSERT_EXPR_COND (t));
2637 if (x == boolean_false_node)
2638 {
2639 error ("ASSERT_EXPR with an always-false condition");
2640 return *tp;
2641 }
2642 break;
2643
2644 case MODIFY_EXPR:
2645 error ("MODIFY_EXPR not expected while having tuples");
2646 return *tp;
2647
2648 case ADDR_EXPR:
2649 {
2650 tree tem;
2651
2652 gcc_assert (is_gimple_address (t));
2653
2654 /* Skip any references (they will be checked when we recurse down the
2655 tree) and ensure that any variable used as a prefix is marked
2656 addressable. */
2657 for (x = TREE_OPERAND (t, 0);
2658 handled_component_p (x);
2659 x = TREE_OPERAND (x, 0))
2660 ;
2661
2662 if ((tem = verify_address (t, x)))
2663 return tem;
2664
2665 if (!(TREE_CODE (x) == VAR_DECL
2666 || TREE_CODE (x) == PARM_DECL
2667 || TREE_CODE (x) == RESULT_DECL))
2668 return NULL;
2669
2670 if (!TREE_ADDRESSABLE (x))
2671 {
2672 error ("address taken, but ADDRESSABLE bit not set");
2673 return x;
2674 }
2675
2676 break;
2677 }
2678
2679 case COND_EXPR:
2680 x = COND_EXPR_COND (t);
2681 if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
2682 {
2683 error ("non-integral used in condition");
2684 return x;
2685 }
2686 if (!is_gimple_condexpr (x))
2687 {
2688 error ("invalid conditional operand");
2689 return x;
2690 }
2691 break;
2692
2693 case NON_LVALUE_EXPR:
2694 case TRUTH_NOT_EXPR:
2695 gcc_unreachable ();
2696
2697 CASE_CONVERT:
2698 case FIX_TRUNC_EXPR:
2699 case FLOAT_EXPR:
2700 case NEGATE_EXPR:
2701 case ABS_EXPR:
2702 case BIT_NOT_EXPR:
2703 CHECK_OP (0, "invalid operand to unary operator");
2704 break;
2705
2706 case REALPART_EXPR:
2707 case IMAGPART_EXPR:
2708 case BIT_FIELD_REF:
2709 if (!is_gimple_reg_type (TREE_TYPE (t)))
2710 {
2711 error ("non-scalar BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR");
2712 return t;
2713 }
2714
2715 if (TREE_CODE (t) == BIT_FIELD_REF)
2716 {
2717 tree t0 = TREE_OPERAND (t, 0);
2718 tree t1 = TREE_OPERAND (t, 1);
2719 tree t2 = TREE_OPERAND (t, 2);
2720 if (!tree_fits_uhwi_p (t1)
2721 || !tree_fits_uhwi_p (t2))
2722 {
2723 error ("invalid position or size operand to BIT_FIELD_REF");
2724 return t;
2725 }
2726 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
2727 && (TYPE_PRECISION (TREE_TYPE (t))
2728 != tree_to_uhwi (t1)))
2729 {
2730 error ("integral result type precision does not match "
2731 "field size of BIT_FIELD_REF");
2732 return t;
2733 }
2734 else if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
2735 && TYPE_MODE (TREE_TYPE (t)) != BLKmode
2736 && (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (t)))
2737 != tree_to_uhwi (t1)))
2738 {
2739 error ("mode precision of non-integral result does not "
2740 "match field size of BIT_FIELD_REF");
2741 return t;
2742 }
2743 if (!AGGREGATE_TYPE_P (TREE_TYPE (t0))
2744 && (tree_to_uhwi (t1) + tree_to_uhwi (t2)
2745 > tree_to_uhwi (TYPE_SIZE (TREE_TYPE (t0)))))
2746 {
2747 error ("position plus size exceeds size of referenced object in "
2748 "BIT_FIELD_REF");
2749 return t;
2750 }
2751 }
2752 t = TREE_OPERAND (t, 0);
2753
2754 /* Fall-through. */
2755 case COMPONENT_REF:
2756 case ARRAY_REF:
2757 case ARRAY_RANGE_REF:
2758 case VIEW_CONVERT_EXPR:
2759 /* We have a nest of references. Verify that each of the operands
2760 that determine where to reference is either a constant or a variable,
2761 verify that the base is valid, and then show we've already checked
2762 the subtrees. */
2763 while (handled_component_p (t))
2764 {
2765 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
2766 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
2767 else if (TREE_CODE (t) == ARRAY_REF
2768 || TREE_CODE (t) == ARRAY_RANGE_REF)
2769 {
2770 CHECK_OP (1, "invalid array index");
2771 if (TREE_OPERAND (t, 2))
2772 CHECK_OP (2, "invalid array lower bound");
2773 if (TREE_OPERAND (t, 3))
2774 CHECK_OP (3, "invalid array stride");
2775 }
2776 else if (TREE_CODE (t) == BIT_FIELD_REF
2777 || TREE_CODE (t) == REALPART_EXPR
2778 || TREE_CODE (t) == IMAGPART_EXPR)
2779 {
2780 error ("non-top-level BIT_FIELD_REF, IMAGPART_EXPR or "
2781 "REALPART_EXPR");
2782 return t;
2783 }
2784
2785 t = TREE_OPERAND (t, 0);
2786 }
2787
2788 if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
2789 {
2790 error ("invalid reference prefix");
2791 return t;
2792 }
2793 *walk_subtrees = 0;
2794 break;
2795 case PLUS_EXPR:
2796 case MINUS_EXPR:
2797 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
2798 POINTER_PLUS_EXPR. */
2799 if (POINTER_TYPE_P (TREE_TYPE (t)))
2800 {
2801 error ("invalid operand to plus/minus, type is a pointer");
2802 return t;
2803 }
2804 CHECK_OP (0, "invalid operand to binary operator");
2805 CHECK_OP (1, "invalid operand to binary operator");
2806 break;
2807
2808 case POINTER_PLUS_EXPR:
2809 /* Check to make sure the first operand is a pointer or reference type. */
2810 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
2811 {
2812 error ("invalid operand to pointer plus, first operand is not a pointer");
2813 return t;
2814 }
2815 /* Check to make sure the second operand is a ptrofftype. */
2816 if (!ptrofftype_p (TREE_TYPE (TREE_OPERAND (t, 1))))
2817 {
2818 error ("invalid operand to pointer plus, second operand is not an "
2819 "integer type of appropriate width");
2820 return t;
2821 }
2822 /* FALLTHROUGH */
2823 case LT_EXPR:
2824 case LE_EXPR:
2825 case GT_EXPR:
2826 case GE_EXPR:
2827 case EQ_EXPR:
2828 case NE_EXPR:
2829 case UNORDERED_EXPR:
2830 case ORDERED_EXPR:
2831 case UNLT_EXPR:
2832 case UNLE_EXPR:
2833 case UNGT_EXPR:
2834 case UNGE_EXPR:
2835 case UNEQ_EXPR:
2836 case LTGT_EXPR:
2837 case MULT_EXPR:
2838 case TRUNC_DIV_EXPR:
2839 case CEIL_DIV_EXPR:
2840 case FLOOR_DIV_EXPR:
2841 case ROUND_DIV_EXPR:
2842 case TRUNC_MOD_EXPR:
2843 case CEIL_MOD_EXPR:
2844 case FLOOR_MOD_EXPR:
2845 case ROUND_MOD_EXPR:
2846 case RDIV_EXPR:
2847 case EXACT_DIV_EXPR:
2848 case MIN_EXPR:
2849 case MAX_EXPR:
2850 case LSHIFT_EXPR:
2851 case RSHIFT_EXPR:
2852 case LROTATE_EXPR:
2853 case RROTATE_EXPR:
2854 case BIT_IOR_EXPR:
2855 case BIT_XOR_EXPR:
2856 case BIT_AND_EXPR:
2857 CHECK_OP (0, "invalid operand to binary operator");
2858 CHECK_OP (1, "invalid operand to binary operator");
2859 break;
2860
2861 case CONSTRUCTOR:
2862 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
2863 *walk_subtrees = 0;
2864 break;
2865
2866 case CASE_LABEL_EXPR:
2867 if (CASE_CHAIN (t))
2868 {
2869 error ("invalid CASE_CHAIN");
2870 return t;
2871 }
2872 break;
2873
2874 default:
2875 break;
2876 }
2877 return NULL;
2878
2879 #undef CHECK_OP
2880 }
2881
2882
2883 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
2884 Returns true if there is an error, otherwise false. */
2885
2886 static bool
2887 verify_types_in_gimple_min_lval (tree expr)
2888 {
2889 tree op;
2890
2891 if (is_gimple_id (expr))
2892 return false;
2893
2894 if (TREE_CODE (expr) != TARGET_MEM_REF
2895 && TREE_CODE (expr) != MEM_REF)
2896 {
2897 error ("invalid expression for min lvalue");
2898 return true;
2899 }
2900
2901 /* TARGET_MEM_REFs are strange beasts. */
2902 if (TREE_CODE (expr) == TARGET_MEM_REF)
2903 return false;
2904
2905 op = TREE_OPERAND (expr, 0);
2906 if (!is_gimple_val (op))
2907 {
2908 error ("invalid operand in indirect reference");
2909 debug_generic_stmt (op);
2910 return true;
2911 }
2912 /* Memory references now generally can involve a value conversion. */
2913
2914 return false;
2915 }
2916
2917 /* Verify if EXPR is a valid GIMPLE reference expression. If
2918 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
2919 if there is an error, otherwise false. */
2920
2921 static bool
2922 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
2923 {
2924 while (handled_component_p (expr))
2925 {
2926 tree op = TREE_OPERAND (expr, 0);
2927
2928 if (TREE_CODE (expr) == ARRAY_REF
2929 || TREE_CODE (expr) == ARRAY_RANGE_REF)
2930 {
2931 if (!is_gimple_val (TREE_OPERAND (expr, 1))
2932 || (TREE_OPERAND (expr, 2)
2933 && !is_gimple_val (TREE_OPERAND (expr, 2)))
2934 || (TREE_OPERAND (expr, 3)
2935 && !is_gimple_val (TREE_OPERAND (expr, 3))))
2936 {
2937 error ("invalid operands to array reference");
2938 debug_generic_stmt (expr);
2939 return true;
2940 }
2941 }
2942
2943 /* Verify if the reference array element types are compatible. */
2944 if (TREE_CODE (expr) == ARRAY_REF
2945 && !useless_type_conversion_p (TREE_TYPE (expr),
2946 TREE_TYPE (TREE_TYPE (op))))
2947 {
2948 error ("type mismatch in array reference");
2949 debug_generic_stmt (TREE_TYPE (expr));
2950 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
2951 return true;
2952 }
2953 if (TREE_CODE (expr) == ARRAY_RANGE_REF
2954 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
2955 TREE_TYPE (TREE_TYPE (op))))
2956 {
2957 error ("type mismatch in array range reference");
2958 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
2959 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
2960 return true;
2961 }
2962
2963 if ((TREE_CODE (expr) == REALPART_EXPR
2964 || TREE_CODE (expr) == IMAGPART_EXPR)
2965 && !useless_type_conversion_p (TREE_TYPE (expr),
2966 TREE_TYPE (TREE_TYPE (op))))
2967 {
2968 error ("type mismatch in real/imagpart reference");
2969 debug_generic_stmt (TREE_TYPE (expr));
2970 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
2971 return true;
2972 }
2973
2974 if (TREE_CODE (expr) == COMPONENT_REF
2975 && !useless_type_conversion_p (TREE_TYPE (expr),
2976 TREE_TYPE (TREE_OPERAND (expr, 1))))
2977 {
2978 error ("type mismatch in component reference");
2979 debug_generic_stmt (TREE_TYPE (expr));
2980 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
2981 return true;
2982 }
2983
2984 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
2985 {
2986 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
2987 that their operand is not an SSA name or an invariant when
2988 requiring an lvalue (this usually means there is a SRA or IPA-SRA
2989 bug). Otherwise there is nothing to verify, gross mismatches at
2990 most invoke undefined behavior. */
2991 if (require_lvalue
2992 && (TREE_CODE (op) == SSA_NAME
2993 || is_gimple_min_invariant (op)))
2994 {
2995 error ("conversion of an SSA_NAME on the left hand side");
2996 debug_generic_stmt (expr);
2997 return true;
2998 }
2999 else if (TREE_CODE (op) == SSA_NAME
3000 && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
3001 {
3002 error ("conversion of register to a different size");
3003 debug_generic_stmt (expr);
3004 return true;
3005 }
3006 else if (!handled_component_p (op))
3007 return false;
3008 }
3009
3010 expr = op;
3011 }
3012
3013 if (TREE_CODE (expr) == MEM_REF)
3014 {
3015 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0)))
3016 {
3017 error ("invalid address operand in MEM_REF");
3018 debug_generic_stmt (expr);
3019 return true;
3020 }
3021 if (TREE_CODE (TREE_OPERAND (expr, 1)) != INTEGER_CST
3022 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
3023 {
3024 error ("invalid offset operand in MEM_REF");
3025 debug_generic_stmt (expr);
3026 return true;
3027 }
3028 }
3029 else if (TREE_CODE (expr) == TARGET_MEM_REF)
3030 {
3031 if (!TMR_BASE (expr)
3032 || !is_gimple_mem_ref_addr (TMR_BASE (expr)))
3033 {
3034 error ("invalid address operand in TARGET_MEM_REF");
3035 return true;
3036 }
3037 if (!TMR_OFFSET (expr)
3038 || TREE_CODE (TMR_OFFSET (expr)) != INTEGER_CST
3039 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
3040 {
3041 error ("invalid offset operand in TARGET_MEM_REF");
3042 debug_generic_stmt (expr);
3043 return true;
3044 }
3045 }
3046
3047 return ((require_lvalue || !is_gimple_min_invariant (expr))
3048 && verify_types_in_gimple_min_lval (expr));
3049 }
3050
3051 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3052 list of pointer-to types that is trivially convertible to DEST. */
3053
3054 static bool
3055 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3056 {
3057 tree src;
3058
3059 if (!TYPE_POINTER_TO (src_obj))
3060 return true;
3061
3062 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3063 if (useless_type_conversion_p (dest, src))
3064 return true;
3065
3066 return false;
3067 }
3068
3069 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3070 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3071
3072 static bool
3073 valid_fixed_convert_types_p (tree type1, tree type2)
3074 {
3075 return (FIXED_POINT_TYPE_P (type1)
3076 && (INTEGRAL_TYPE_P (type2)
3077 || SCALAR_FLOAT_TYPE_P (type2)
3078 || FIXED_POINT_TYPE_P (type2)));
3079 }
3080
3081 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3082 is a problem, otherwise false. */
3083
3084 static bool
3085 verify_gimple_call (gimple stmt)
3086 {
3087 tree fn = gimple_call_fn (stmt);
3088 tree fntype, fndecl;
3089 unsigned i;
3090
3091 if (gimple_call_internal_p (stmt))
3092 {
3093 if (fn)
3094 {
3095 error ("gimple call has two targets");
3096 debug_generic_stmt (fn);
3097 return true;
3098 }
3099 }
3100 else
3101 {
3102 if (!fn)
3103 {
3104 error ("gimple call has no target");
3105 return true;
3106 }
3107 }
3108
3109 if (fn && !is_gimple_call_addr (fn))
3110 {
3111 error ("invalid function in gimple call");
3112 debug_generic_stmt (fn);
3113 return true;
3114 }
3115
3116 if (fn
3117 && (!POINTER_TYPE_P (TREE_TYPE (fn))
3118 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3119 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
3120 {
3121 error ("non-function in gimple call");
3122 return true;
3123 }
3124
3125 fndecl = gimple_call_fndecl (stmt);
3126 if (fndecl
3127 && TREE_CODE (fndecl) == FUNCTION_DECL
3128 && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
3129 && !DECL_PURE_P (fndecl)
3130 && !TREE_READONLY (fndecl))
3131 {
3132 error ("invalid pure const state for function");
3133 return true;
3134 }
3135
3136 if (gimple_call_lhs (stmt)
3137 && (!is_gimple_lvalue (gimple_call_lhs (stmt))
3138 || verify_types_in_gimple_reference (gimple_call_lhs (stmt), true)))
3139 {
3140 error ("invalid LHS in gimple call");
3141 return true;
3142 }
3143
3144 if (gimple_call_lhs (stmt) && gimple_call_noreturn_p (stmt))
3145 {
3146 error ("LHS in noreturn call");
3147 return true;
3148 }
3149
3150 fntype = gimple_call_fntype (stmt);
3151 if (fntype
3152 && gimple_call_lhs (stmt)
3153 && !useless_type_conversion_p (TREE_TYPE (gimple_call_lhs (stmt)),
3154 TREE_TYPE (fntype))
3155 /* ??? At least C++ misses conversions at assignments from
3156 void * call results.
3157 ??? Java is completely off. Especially with functions
3158 returning java.lang.Object.
3159 For now simply allow arbitrary pointer type conversions. */
3160 && !(POINTER_TYPE_P (TREE_TYPE (gimple_call_lhs (stmt)))
3161 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3162 {
3163 error ("invalid conversion in gimple call");
3164 debug_generic_stmt (TREE_TYPE (gimple_call_lhs (stmt)));
3165 debug_generic_stmt (TREE_TYPE (fntype));
3166 return true;
3167 }
3168
3169 if (gimple_call_chain (stmt)
3170 && !is_gimple_val (gimple_call_chain (stmt)))
3171 {
3172 error ("invalid static chain in gimple call");
3173 debug_generic_stmt (gimple_call_chain (stmt));
3174 return true;
3175 }
3176
3177 /* If there is a static chain argument, this should not be an indirect
3178 call, and the decl should have DECL_STATIC_CHAIN set. */
3179 if (gimple_call_chain (stmt))
3180 {
3181 if (!gimple_call_fndecl (stmt))
3182 {
3183 error ("static chain in indirect gimple call");
3184 return true;
3185 }
3186 fn = TREE_OPERAND (fn, 0);
3187
3188 if (!DECL_STATIC_CHAIN (fn))
3189 {
3190 error ("static chain with function that doesn%'t use one");
3191 return true;
3192 }
3193 }
3194
3195 /* ??? The C frontend passes unpromoted arguments in case it
3196 didn't see a function declaration before the call. So for now
3197 leave the call arguments mostly unverified. Once we gimplify
3198 unit-at-a-time we have a chance to fix this. */
3199
3200 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3201 {
3202 tree arg = gimple_call_arg (stmt, i);
3203 if ((is_gimple_reg_type (TREE_TYPE (arg))
3204 && !is_gimple_val (arg))
3205 || (!is_gimple_reg_type (TREE_TYPE (arg))
3206 && !is_gimple_lvalue (arg)))
3207 {
3208 error ("invalid argument to gimple call");
3209 debug_generic_expr (arg);
3210 return true;
3211 }
3212 }
3213
3214 return false;
3215 }
3216
3217 /* Verifies the gimple comparison with the result type TYPE and
3218 the operands OP0 and OP1. */
3219
3220 static bool
3221 verify_gimple_comparison (tree type, tree op0, tree op1)
3222 {
3223 tree op0_type = TREE_TYPE (op0);
3224 tree op1_type = TREE_TYPE (op1);
3225
3226 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3227 {
3228 error ("invalid operands in gimple comparison");
3229 return true;
3230 }
3231
3232 /* For comparisons we do not have the operations type as the
3233 effective type the comparison is carried out in. Instead
3234 we require that either the first operand is trivially
3235 convertible into the second, or the other way around.
3236 Because we special-case pointers to void we allow
3237 comparisons of pointers with the same mode as well. */
3238 if (!useless_type_conversion_p (op0_type, op1_type)
3239 && !useless_type_conversion_p (op1_type, op0_type)
3240 && (!POINTER_TYPE_P (op0_type)
3241 || !POINTER_TYPE_P (op1_type)
3242 || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3243 {
3244 error ("mismatching comparison operand types");
3245 debug_generic_expr (op0_type);
3246 debug_generic_expr (op1_type);
3247 return true;
3248 }
3249
3250 /* The resulting type of a comparison may be an effective boolean type. */
3251 if (INTEGRAL_TYPE_P (type)
3252 && (TREE_CODE (type) == BOOLEAN_TYPE
3253 || TYPE_PRECISION (type) == 1))
3254 {
3255 if (TREE_CODE (op0_type) == VECTOR_TYPE
3256 || TREE_CODE (op1_type) == VECTOR_TYPE)
3257 {
3258 error ("vector comparison returning a boolean");
3259 debug_generic_expr (op0_type);
3260 debug_generic_expr (op1_type);
3261 return true;
3262 }
3263 }
3264 /* Or an integer vector type with the same size and element count
3265 as the comparison operand types. */
3266 else if (TREE_CODE (type) == VECTOR_TYPE
3267 && TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE)
3268 {
3269 if (TREE_CODE (op0_type) != VECTOR_TYPE
3270 || TREE_CODE (op1_type) != VECTOR_TYPE)
3271 {
3272 error ("non-vector operands in vector comparison");
3273 debug_generic_expr (op0_type);
3274 debug_generic_expr (op1_type);
3275 return true;
3276 }
3277
3278 if (TYPE_VECTOR_SUBPARTS (type) != TYPE_VECTOR_SUBPARTS (op0_type)
3279 || (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (type)))
3280 != GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0_type))))
3281 /* The result of a vector comparison is of signed
3282 integral type. */
3283 || TYPE_UNSIGNED (TREE_TYPE (type)))
3284 {
3285 error ("invalid vector comparison resulting type");
3286 debug_generic_expr (type);
3287 return true;
3288 }
3289 }
3290 else
3291 {
3292 error ("bogus comparison result type");
3293 debug_generic_expr (type);
3294 return true;
3295 }
3296
3297 return false;
3298 }
3299
3300 /* Verify a gimple assignment statement STMT with an unary rhs.
3301 Returns true if anything is wrong. */
3302
3303 static bool
3304 verify_gimple_assign_unary (gimple stmt)
3305 {
3306 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3307 tree lhs = gimple_assign_lhs (stmt);
3308 tree lhs_type = TREE_TYPE (lhs);
3309 tree rhs1 = gimple_assign_rhs1 (stmt);
3310 tree rhs1_type = TREE_TYPE (rhs1);
3311
3312 if (!is_gimple_reg (lhs))
3313 {
3314 error ("non-register as LHS of unary operation");
3315 return true;
3316 }
3317
3318 if (!is_gimple_val (rhs1))
3319 {
3320 error ("invalid operand in unary operation");
3321 return true;
3322 }
3323
3324 /* First handle conversions. */
3325 switch (rhs_code)
3326 {
3327 CASE_CONVERT:
3328 {
3329 /* Allow conversions from pointer type to integral type only if
3330 there is no sign or zero extension involved.
3331 For targets were the precision of ptrofftype doesn't match that
3332 of pointers we need to allow arbitrary conversions to ptrofftype. */
3333 if ((POINTER_TYPE_P (lhs_type)
3334 && INTEGRAL_TYPE_P (rhs1_type))
3335 || (POINTER_TYPE_P (rhs1_type)
3336 && INTEGRAL_TYPE_P (lhs_type)
3337 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3338 || ptrofftype_p (sizetype))))
3339 return false;
3340
3341 /* Allow conversion from integral to offset type and vice versa. */
3342 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3343 && INTEGRAL_TYPE_P (rhs1_type))
3344 || (INTEGRAL_TYPE_P (lhs_type)
3345 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3346 return false;
3347
3348 /* Otherwise assert we are converting between types of the
3349 same kind. */
3350 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3351 {
3352 error ("invalid types in nop conversion");
3353 debug_generic_expr (lhs_type);
3354 debug_generic_expr (rhs1_type);
3355 return true;
3356 }
3357
3358 return false;
3359 }
3360
3361 case ADDR_SPACE_CONVERT_EXPR:
3362 {
3363 if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3364 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3365 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3366 {
3367 error ("invalid types in address space conversion");
3368 debug_generic_expr (lhs_type);
3369 debug_generic_expr (rhs1_type);
3370 return true;
3371 }
3372
3373 return false;
3374 }
3375
3376 case FIXED_CONVERT_EXPR:
3377 {
3378 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3379 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3380 {
3381 error ("invalid types in fixed-point conversion");
3382 debug_generic_expr (lhs_type);
3383 debug_generic_expr (rhs1_type);
3384 return true;
3385 }
3386
3387 return false;
3388 }
3389
3390 case FLOAT_EXPR:
3391 {
3392 if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3393 && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3394 || !VECTOR_FLOAT_TYPE_P (lhs_type)))
3395 {
3396 error ("invalid types in conversion to floating point");
3397 debug_generic_expr (lhs_type);
3398 debug_generic_expr (rhs1_type);
3399 return true;
3400 }
3401
3402 return false;
3403 }
3404
3405 case FIX_TRUNC_EXPR:
3406 {
3407 if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3408 && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3409 || !VECTOR_FLOAT_TYPE_P (rhs1_type)))
3410 {
3411 error ("invalid types in conversion to integer");
3412 debug_generic_expr (lhs_type);
3413 debug_generic_expr (rhs1_type);
3414 return true;
3415 }
3416
3417 return false;
3418 }
3419
3420 case VEC_UNPACK_HI_EXPR:
3421 case VEC_UNPACK_LO_EXPR:
3422 case REDUC_MAX_EXPR:
3423 case REDUC_MIN_EXPR:
3424 case REDUC_PLUS_EXPR:
3425 case VEC_UNPACK_FLOAT_HI_EXPR:
3426 case VEC_UNPACK_FLOAT_LO_EXPR:
3427 /* FIXME. */
3428 return false;
3429
3430 case NEGATE_EXPR:
3431 case ABS_EXPR:
3432 case BIT_NOT_EXPR:
3433 case PAREN_EXPR:
3434 case NON_LVALUE_EXPR:
3435 case CONJ_EXPR:
3436 break;
3437
3438 default:
3439 gcc_unreachable ();
3440 }
3441
3442 /* For the remaining codes assert there is no conversion involved. */
3443 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3444 {
3445 error ("non-trivial conversion in unary operation");
3446 debug_generic_expr (lhs_type);
3447 debug_generic_expr (rhs1_type);
3448 return true;
3449 }
3450
3451 return false;
3452 }
3453
3454 /* Verify a gimple assignment statement STMT with a binary rhs.
3455 Returns true if anything is wrong. */
3456
3457 static bool
3458 verify_gimple_assign_binary (gimple stmt)
3459 {
3460 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3461 tree lhs = gimple_assign_lhs (stmt);
3462 tree lhs_type = TREE_TYPE (lhs);
3463 tree rhs1 = gimple_assign_rhs1 (stmt);
3464 tree rhs1_type = TREE_TYPE (rhs1);
3465 tree rhs2 = gimple_assign_rhs2 (stmt);
3466 tree rhs2_type = TREE_TYPE (rhs2);
3467
3468 if (!is_gimple_reg (lhs))
3469 {
3470 error ("non-register as LHS of binary operation");
3471 return true;
3472 }
3473
3474 if (!is_gimple_val (rhs1)
3475 || !is_gimple_val (rhs2))
3476 {
3477 error ("invalid operands in binary operation");
3478 return true;
3479 }
3480
3481 /* First handle operations that involve different types. */
3482 switch (rhs_code)
3483 {
3484 case COMPLEX_EXPR:
3485 {
3486 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3487 || !(INTEGRAL_TYPE_P (rhs1_type)
3488 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3489 || !(INTEGRAL_TYPE_P (rhs2_type)
3490 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3491 {
3492 error ("type mismatch in complex expression");
3493 debug_generic_expr (lhs_type);
3494 debug_generic_expr (rhs1_type);
3495 debug_generic_expr (rhs2_type);
3496 return true;
3497 }
3498
3499 return false;
3500 }
3501
3502 case LSHIFT_EXPR:
3503 case RSHIFT_EXPR:
3504 case LROTATE_EXPR:
3505 case RROTATE_EXPR:
3506 {
3507 /* Shifts and rotates are ok on integral types, fixed point
3508 types and integer vector types. */
3509 if ((!INTEGRAL_TYPE_P (rhs1_type)
3510 && !FIXED_POINT_TYPE_P (rhs1_type)
3511 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3512 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3513 || (!INTEGRAL_TYPE_P (rhs2_type)
3514 /* Vector shifts of vectors are also ok. */
3515 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3516 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3517 && TREE_CODE (rhs2_type) == VECTOR_TYPE
3518 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3519 || !useless_type_conversion_p (lhs_type, rhs1_type))
3520 {
3521 error ("type mismatch in shift expression");
3522 debug_generic_expr (lhs_type);
3523 debug_generic_expr (rhs1_type);
3524 debug_generic_expr (rhs2_type);
3525 return true;
3526 }
3527
3528 return false;
3529 }
3530
3531 case VEC_LSHIFT_EXPR:
3532 case VEC_RSHIFT_EXPR:
3533 {
3534 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3535 || !(INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3536 || POINTER_TYPE_P (TREE_TYPE (rhs1_type))
3537 || FIXED_POINT_TYPE_P (TREE_TYPE (rhs1_type))
3538 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type)))
3539 || (!INTEGRAL_TYPE_P (rhs2_type)
3540 && (TREE_CODE (rhs2_type) != VECTOR_TYPE
3541 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3542 || !useless_type_conversion_p (lhs_type, rhs1_type))
3543 {
3544 error ("type mismatch in vector shift expression");
3545 debug_generic_expr (lhs_type);
3546 debug_generic_expr (rhs1_type);
3547 debug_generic_expr (rhs2_type);
3548 return true;
3549 }
3550 /* For shifting a vector of non-integral components we
3551 only allow shifting by a constant multiple of the element size. */
3552 if (!INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3553 && (TREE_CODE (rhs2) != INTEGER_CST
3554 || !div_if_zero_remainder (rhs2,
3555 TYPE_SIZE (TREE_TYPE (rhs1_type)))))
3556 {
3557 error ("non-element sized vector shift of floating point vector");
3558 return true;
3559 }
3560
3561 return false;
3562 }
3563
3564 case WIDEN_LSHIFT_EXPR:
3565 {
3566 if (!INTEGRAL_TYPE_P (lhs_type)
3567 || !INTEGRAL_TYPE_P (rhs1_type)
3568 || TREE_CODE (rhs2) != INTEGER_CST
3569 || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
3570 {
3571 error ("type mismatch in widening vector shift expression");
3572 debug_generic_expr (lhs_type);
3573 debug_generic_expr (rhs1_type);
3574 debug_generic_expr (rhs2_type);
3575 return true;
3576 }
3577
3578 return false;
3579 }
3580
3581 case VEC_WIDEN_LSHIFT_HI_EXPR:
3582 case VEC_WIDEN_LSHIFT_LO_EXPR:
3583 {
3584 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3585 || TREE_CODE (lhs_type) != VECTOR_TYPE
3586 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3587 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3588 || TREE_CODE (rhs2) != INTEGER_CST
3589 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
3590 > TYPE_PRECISION (TREE_TYPE (lhs_type))))
3591 {
3592 error ("type mismatch in widening vector shift expression");
3593 debug_generic_expr (lhs_type);
3594 debug_generic_expr (rhs1_type);
3595 debug_generic_expr (rhs2_type);
3596 return true;
3597 }
3598
3599 return false;
3600 }
3601
3602 case PLUS_EXPR:
3603 case MINUS_EXPR:
3604 {
3605 tree lhs_etype = lhs_type;
3606 tree rhs1_etype = rhs1_type;
3607 tree rhs2_etype = rhs2_type;
3608 if (TREE_CODE (lhs_type) == VECTOR_TYPE)
3609 {
3610 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3611 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
3612 {
3613 error ("invalid non-vector operands to vector valued plus");
3614 return true;
3615 }
3616 lhs_etype = TREE_TYPE (lhs_type);
3617 rhs1_etype = TREE_TYPE (rhs1_type);
3618 rhs2_etype = TREE_TYPE (rhs2_type);
3619 }
3620 if (POINTER_TYPE_P (lhs_etype)
3621 || POINTER_TYPE_P (rhs1_etype)
3622 || POINTER_TYPE_P (rhs2_etype))
3623 {
3624 error ("invalid (pointer) operands to plus/minus");
3625 return true;
3626 }
3627
3628 /* Continue with generic binary expression handling. */
3629 break;
3630 }
3631
3632 case POINTER_PLUS_EXPR:
3633 {
3634 if (!POINTER_TYPE_P (rhs1_type)
3635 || !useless_type_conversion_p (lhs_type, rhs1_type)
3636 || !ptrofftype_p (rhs2_type))
3637 {
3638 error ("type mismatch in pointer plus expression");
3639 debug_generic_stmt (lhs_type);
3640 debug_generic_stmt (rhs1_type);
3641 debug_generic_stmt (rhs2_type);
3642 return true;
3643 }
3644
3645 return false;
3646 }
3647
3648 case TRUTH_ANDIF_EXPR:
3649 case TRUTH_ORIF_EXPR:
3650 case TRUTH_AND_EXPR:
3651 case TRUTH_OR_EXPR:
3652 case TRUTH_XOR_EXPR:
3653
3654 gcc_unreachable ();
3655
3656 case LT_EXPR:
3657 case LE_EXPR:
3658 case GT_EXPR:
3659 case GE_EXPR:
3660 case EQ_EXPR:
3661 case NE_EXPR:
3662 case UNORDERED_EXPR:
3663 case ORDERED_EXPR:
3664 case UNLT_EXPR:
3665 case UNLE_EXPR:
3666 case UNGT_EXPR:
3667 case UNGE_EXPR:
3668 case UNEQ_EXPR:
3669 case LTGT_EXPR:
3670 /* Comparisons are also binary, but the result type is not
3671 connected to the operand types. */
3672 return verify_gimple_comparison (lhs_type, rhs1, rhs2);
3673
3674 case WIDEN_MULT_EXPR:
3675 if (TREE_CODE (lhs_type) != INTEGER_TYPE)
3676 return true;
3677 return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
3678 || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
3679
3680 case WIDEN_SUM_EXPR:
3681 case VEC_WIDEN_MULT_HI_EXPR:
3682 case VEC_WIDEN_MULT_LO_EXPR:
3683 case VEC_WIDEN_MULT_EVEN_EXPR:
3684 case VEC_WIDEN_MULT_ODD_EXPR:
3685 case VEC_PACK_TRUNC_EXPR:
3686 case VEC_PACK_SAT_EXPR:
3687 case VEC_PACK_FIX_TRUNC_EXPR:
3688 /* FIXME. */
3689 return false;
3690
3691 case MULT_EXPR:
3692 case MULT_HIGHPART_EXPR:
3693 case TRUNC_DIV_EXPR:
3694 case CEIL_DIV_EXPR:
3695 case FLOOR_DIV_EXPR:
3696 case ROUND_DIV_EXPR:
3697 case TRUNC_MOD_EXPR:
3698 case CEIL_MOD_EXPR:
3699 case FLOOR_MOD_EXPR:
3700 case ROUND_MOD_EXPR:
3701 case RDIV_EXPR:
3702 case EXACT_DIV_EXPR:
3703 case MIN_EXPR:
3704 case MAX_EXPR:
3705 case BIT_IOR_EXPR:
3706 case BIT_XOR_EXPR:
3707 case BIT_AND_EXPR:
3708 /* Continue with generic binary expression handling. */
3709 break;
3710
3711 default:
3712 gcc_unreachable ();
3713 }
3714
3715 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3716 || !useless_type_conversion_p (lhs_type, rhs2_type))
3717 {
3718 error ("type mismatch in binary expression");
3719 debug_generic_stmt (lhs_type);
3720 debug_generic_stmt (rhs1_type);
3721 debug_generic_stmt (rhs2_type);
3722 return true;
3723 }
3724
3725 return false;
3726 }
3727
3728 /* Verify a gimple assignment statement STMT with a ternary rhs.
3729 Returns true if anything is wrong. */
3730
3731 static bool
3732 verify_gimple_assign_ternary (gimple stmt)
3733 {
3734 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3735 tree lhs = gimple_assign_lhs (stmt);
3736 tree lhs_type = TREE_TYPE (lhs);
3737 tree rhs1 = gimple_assign_rhs1 (stmt);
3738 tree rhs1_type = TREE_TYPE (rhs1);
3739 tree rhs2 = gimple_assign_rhs2 (stmt);
3740 tree rhs2_type = TREE_TYPE (rhs2);
3741 tree rhs3 = gimple_assign_rhs3 (stmt);
3742 tree rhs3_type = TREE_TYPE (rhs3);
3743
3744 if (!is_gimple_reg (lhs))
3745 {
3746 error ("non-register as LHS of ternary operation");
3747 return true;
3748 }
3749
3750 if (((rhs_code == VEC_COND_EXPR || rhs_code == COND_EXPR)
3751 ? !is_gimple_condexpr (rhs1) : !is_gimple_val (rhs1))
3752 || !is_gimple_val (rhs2)
3753 || !is_gimple_val (rhs3))
3754 {
3755 error ("invalid operands in ternary operation");
3756 return true;
3757 }
3758
3759 /* First handle operations that involve different types. */
3760 switch (rhs_code)
3761 {
3762 case WIDEN_MULT_PLUS_EXPR:
3763 case WIDEN_MULT_MINUS_EXPR:
3764 if ((!INTEGRAL_TYPE_P (rhs1_type)
3765 && !FIXED_POINT_TYPE_P (rhs1_type))
3766 || !useless_type_conversion_p (rhs1_type, rhs2_type)
3767 || !useless_type_conversion_p (lhs_type, rhs3_type)
3768 || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
3769 || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
3770 {
3771 error ("type mismatch in widening multiply-accumulate expression");
3772 debug_generic_expr (lhs_type);
3773 debug_generic_expr (rhs1_type);
3774 debug_generic_expr (rhs2_type);
3775 debug_generic_expr (rhs3_type);
3776 return true;
3777 }
3778 break;
3779
3780 case FMA_EXPR:
3781 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3782 || !useless_type_conversion_p (lhs_type, rhs2_type)
3783 || !useless_type_conversion_p (lhs_type, rhs3_type))
3784 {
3785 error ("type mismatch in fused multiply-add expression");
3786 debug_generic_expr (lhs_type);
3787 debug_generic_expr (rhs1_type);
3788 debug_generic_expr (rhs2_type);
3789 debug_generic_expr (rhs3_type);
3790 return true;
3791 }
3792 break;
3793
3794 case COND_EXPR:
3795 case VEC_COND_EXPR:
3796 if (!useless_type_conversion_p (lhs_type, rhs2_type)
3797 || !useless_type_conversion_p (lhs_type, rhs3_type))
3798 {
3799 error ("type mismatch in conditional expression");
3800 debug_generic_expr (lhs_type);
3801 debug_generic_expr (rhs2_type);
3802 debug_generic_expr (rhs3_type);
3803 return true;
3804 }
3805 break;
3806
3807 case VEC_PERM_EXPR:
3808 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3809 || !useless_type_conversion_p (lhs_type, rhs2_type))
3810 {
3811 error ("type mismatch in vector permute expression");
3812 debug_generic_expr (lhs_type);
3813 debug_generic_expr (rhs1_type);
3814 debug_generic_expr (rhs2_type);
3815 debug_generic_expr (rhs3_type);
3816 return true;
3817 }
3818
3819 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3820 || TREE_CODE (rhs2_type) != VECTOR_TYPE
3821 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
3822 {
3823 error ("vector types expected in vector permute expression");
3824 debug_generic_expr (lhs_type);
3825 debug_generic_expr (rhs1_type);
3826 debug_generic_expr (rhs2_type);
3827 debug_generic_expr (rhs3_type);
3828 return true;
3829 }
3830
3831 if (TYPE_VECTOR_SUBPARTS (rhs1_type) != TYPE_VECTOR_SUBPARTS (rhs2_type)
3832 || TYPE_VECTOR_SUBPARTS (rhs2_type)
3833 != TYPE_VECTOR_SUBPARTS (rhs3_type)
3834 || TYPE_VECTOR_SUBPARTS (rhs3_type)
3835 != TYPE_VECTOR_SUBPARTS (lhs_type))
3836 {
3837 error ("vectors with different element number found "
3838 "in vector permute expression");
3839 debug_generic_expr (lhs_type);
3840 debug_generic_expr (rhs1_type);
3841 debug_generic_expr (rhs2_type);
3842 debug_generic_expr (rhs3_type);
3843 return true;
3844 }
3845
3846 if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
3847 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs3_type)))
3848 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type))))
3849 {
3850 error ("invalid mask type in vector permute expression");
3851 debug_generic_expr (lhs_type);
3852 debug_generic_expr (rhs1_type);
3853 debug_generic_expr (rhs2_type);
3854 debug_generic_expr (rhs3_type);
3855 return true;
3856 }
3857
3858 return false;
3859
3860 case DOT_PROD_EXPR:
3861 case REALIGN_LOAD_EXPR:
3862 /* FIXME. */
3863 return false;
3864
3865 default:
3866 gcc_unreachable ();
3867 }
3868 return false;
3869 }
3870
3871 /* Verify a gimple assignment statement STMT with a single rhs.
3872 Returns true if anything is wrong. */
3873
3874 static bool
3875 verify_gimple_assign_single (gimple stmt)
3876 {
3877 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3878 tree lhs = gimple_assign_lhs (stmt);
3879 tree lhs_type = TREE_TYPE (lhs);
3880 tree rhs1 = gimple_assign_rhs1 (stmt);
3881 tree rhs1_type = TREE_TYPE (rhs1);
3882 bool res = false;
3883
3884 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3885 {
3886 error ("non-trivial conversion at assignment");
3887 debug_generic_expr (lhs_type);
3888 debug_generic_expr (rhs1_type);
3889 return true;
3890 }
3891
3892 if (gimple_clobber_p (stmt)
3893 && !(DECL_P (lhs) || TREE_CODE (lhs) == MEM_REF))
3894 {
3895 error ("non-decl/MEM_REF LHS in clobber statement");
3896 debug_generic_expr (lhs);
3897 return true;
3898 }
3899
3900 if (handled_component_p (lhs))
3901 res |= verify_types_in_gimple_reference (lhs, true);
3902
3903 /* Special codes we cannot handle via their class. */
3904 switch (rhs_code)
3905 {
3906 case ADDR_EXPR:
3907 {
3908 tree op = TREE_OPERAND (rhs1, 0);
3909 if (!is_gimple_addressable (op))
3910 {
3911 error ("invalid operand in unary expression");
3912 return true;
3913 }
3914
3915 /* Technically there is no longer a need for matching types, but
3916 gimple hygiene asks for this check. In LTO we can end up
3917 combining incompatible units and thus end up with addresses
3918 of globals that change their type to a common one. */
3919 if (!in_lto_p
3920 && !types_compatible_p (TREE_TYPE (op),
3921 TREE_TYPE (TREE_TYPE (rhs1)))
3922 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
3923 TREE_TYPE (op)))
3924 {
3925 error ("type mismatch in address expression");
3926 debug_generic_stmt (TREE_TYPE (rhs1));
3927 debug_generic_stmt (TREE_TYPE (op));
3928 return true;
3929 }
3930
3931 return verify_types_in_gimple_reference (op, true);
3932 }
3933
3934 /* tcc_reference */
3935 case INDIRECT_REF:
3936 error ("INDIRECT_REF in gimple IL");
3937 return true;
3938
3939 case COMPONENT_REF:
3940 case BIT_FIELD_REF:
3941 case ARRAY_REF:
3942 case ARRAY_RANGE_REF:
3943 case VIEW_CONVERT_EXPR:
3944 case REALPART_EXPR:
3945 case IMAGPART_EXPR:
3946 case TARGET_MEM_REF:
3947 case MEM_REF:
3948 if (!is_gimple_reg (lhs)
3949 && is_gimple_reg_type (TREE_TYPE (lhs)))
3950 {
3951 error ("invalid rhs for gimple memory store");
3952 debug_generic_stmt (lhs);
3953 debug_generic_stmt (rhs1);
3954 return true;
3955 }
3956 return res || verify_types_in_gimple_reference (rhs1, false);
3957
3958 /* tcc_constant */
3959 case SSA_NAME:
3960 case INTEGER_CST:
3961 case REAL_CST:
3962 case FIXED_CST:
3963 case COMPLEX_CST:
3964 case VECTOR_CST:
3965 case STRING_CST:
3966 return res;
3967
3968 /* tcc_declaration */
3969 case CONST_DECL:
3970 return res;
3971 case VAR_DECL:
3972 case PARM_DECL:
3973 if (!is_gimple_reg (lhs)
3974 && !is_gimple_reg (rhs1)
3975 && is_gimple_reg_type (TREE_TYPE (lhs)))
3976 {
3977 error ("invalid rhs for gimple memory store");
3978 debug_generic_stmt (lhs);
3979 debug_generic_stmt (rhs1);
3980 return true;
3981 }
3982 return res;
3983
3984 case CONSTRUCTOR:
3985 if (TREE_CODE (rhs1_type) == VECTOR_TYPE)
3986 {
3987 unsigned int i;
3988 tree elt_i, elt_v, elt_t = NULL_TREE;
3989
3990 if (CONSTRUCTOR_NELTS (rhs1) == 0)
3991 return res;
3992 /* For vector CONSTRUCTORs we require that either it is empty
3993 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
3994 (then the element count must be correct to cover the whole
3995 outer vector and index must be NULL on all elements, or it is
3996 a CONSTRUCTOR of scalar elements, where we as an exception allow
3997 smaller number of elements (assuming zero filling) and
3998 consecutive indexes as compared to NULL indexes (such
3999 CONSTRUCTORs can appear in the IL from FEs). */
4000 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), i, elt_i, elt_v)
4001 {
4002 if (elt_t == NULL_TREE)
4003 {
4004 elt_t = TREE_TYPE (elt_v);
4005 if (TREE_CODE (elt_t) == VECTOR_TYPE)
4006 {
4007 tree elt_t = TREE_TYPE (elt_v);
4008 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4009 TREE_TYPE (elt_t)))
4010 {
4011 error ("incorrect type of vector CONSTRUCTOR"
4012 " elements");
4013 debug_generic_stmt (rhs1);
4014 return true;
4015 }
4016 else if (CONSTRUCTOR_NELTS (rhs1)
4017 * TYPE_VECTOR_SUBPARTS (elt_t)
4018 != TYPE_VECTOR_SUBPARTS (rhs1_type))
4019 {
4020 error ("incorrect number of vector CONSTRUCTOR"
4021 " elements");
4022 debug_generic_stmt (rhs1);
4023 return true;
4024 }
4025 }
4026 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4027 elt_t))
4028 {
4029 error ("incorrect type of vector CONSTRUCTOR elements");
4030 debug_generic_stmt (rhs1);
4031 return true;
4032 }
4033 else if (CONSTRUCTOR_NELTS (rhs1)
4034 > TYPE_VECTOR_SUBPARTS (rhs1_type))
4035 {
4036 error ("incorrect number of vector CONSTRUCTOR elements");
4037 debug_generic_stmt (rhs1);
4038 return true;
4039 }
4040 }
4041 else if (!useless_type_conversion_p (elt_t, TREE_TYPE (elt_v)))
4042 {
4043 error ("incorrect type of vector CONSTRUCTOR elements");
4044 debug_generic_stmt (rhs1);
4045 return true;
4046 }
4047 if (elt_i != NULL_TREE
4048 && (TREE_CODE (elt_t) == VECTOR_TYPE
4049 || TREE_CODE (elt_i) != INTEGER_CST
4050 || compare_tree_int (elt_i, i) != 0))
4051 {
4052 error ("vector CONSTRUCTOR with non-NULL element index");
4053 debug_generic_stmt (rhs1);
4054 return true;
4055 }
4056 }
4057 }
4058 return res;
4059 case OBJ_TYPE_REF:
4060 case ASSERT_EXPR:
4061 case WITH_SIZE_EXPR:
4062 /* FIXME. */
4063 return res;
4064
4065 default:;
4066 }
4067
4068 return res;
4069 }
4070
4071 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4072 is a problem, otherwise false. */
4073
4074 static bool
4075 verify_gimple_assign (gimple stmt)
4076 {
4077 switch (gimple_assign_rhs_class (stmt))
4078 {
4079 case GIMPLE_SINGLE_RHS:
4080 return verify_gimple_assign_single (stmt);
4081
4082 case GIMPLE_UNARY_RHS:
4083 return verify_gimple_assign_unary (stmt);
4084
4085 case GIMPLE_BINARY_RHS:
4086 return verify_gimple_assign_binary (stmt);
4087
4088 case GIMPLE_TERNARY_RHS:
4089 return verify_gimple_assign_ternary (stmt);
4090
4091 default:
4092 gcc_unreachable ();
4093 }
4094 }
4095
4096 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4097 is a problem, otherwise false. */
4098
4099 static bool
4100 verify_gimple_return (gimple stmt)
4101 {
4102 tree op = gimple_return_retval (stmt);
4103 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
4104
4105 /* We cannot test for present return values as we do not fix up missing
4106 return values from the original source. */
4107 if (op == NULL)
4108 return false;
4109
4110 if (!is_gimple_val (op)
4111 && TREE_CODE (op) != RESULT_DECL)
4112 {
4113 error ("invalid operand in return statement");
4114 debug_generic_stmt (op);
4115 return true;
4116 }
4117
4118 if ((TREE_CODE (op) == RESULT_DECL
4119 && DECL_BY_REFERENCE (op))
4120 || (TREE_CODE (op) == SSA_NAME
4121 && SSA_NAME_VAR (op)
4122 && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
4123 && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
4124 op = TREE_TYPE (op);
4125
4126 if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
4127 {
4128 error ("invalid conversion in return statement");
4129 debug_generic_stmt (restype);
4130 debug_generic_stmt (TREE_TYPE (op));
4131 return true;
4132 }
4133
4134 return false;
4135 }
4136
4137
4138 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4139 is a problem, otherwise false. */
4140
4141 static bool
4142 verify_gimple_goto (gimple stmt)
4143 {
4144 tree dest = gimple_goto_dest (stmt);
4145
4146 /* ??? We have two canonical forms of direct goto destinations, a
4147 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4148 if (TREE_CODE (dest) != LABEL_DECL
4149 && (!is_gimple_val (dest)
4150 || !POINTER_TYPE_P (TREE_TYPE (dest))))
4151 {
4152 error ("goto destination is neither a label nor a pointer");
4153 return true;
4154 }
4155
4156 return false;
4157 }
4158
4159 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4160 is a problem, otherwise false. */
4161
4162 static bool
4163 verify_gimple_switch (gimple stmt)
4164 {
4165 unsigned int i, n;
4166 tree elt, prev_upper_bound = NULL_TREE;
4167 tree index_type, elt_type = NULL_TREE;
4168
4169 if (!is_gimple_val (gimple_switch_index (stmt)))
4170 {
4171 error ("invalid operand to switch statement");
4172 debug_generic_stmt (gimple_switch_index (stmt));
4173 return true;
4174 }
4175
4176 index_type = TREE_TYPE (gimple_switch_index (stmt));
4177 if (! INTEGRAL_TYPE_P (index_type))
4178 {
4179 error ("non-integral type switch statement");
4180 debug_generic_expr (index_type);
4181 return true;
4182 }
4183
4184 elt = gimple_switch_label (stmt, 0);
4185 if (CASE_LOW (elt) != NULL_TREE || CASE_HIGH (elt) != NULL_TREE)
4186 {
4187 error ("invalid default case label in switch statement");
4188 debug_generic_expr (elt);
4189 return true;
4190 }
4191
4192 n = gimple_switch_num_labels (stmt);
4193 for (i = 1; i < n; i++)
4194 {
4195 elt = gimple_switch_label (stmt, i);
4196
4197 if (! CASE_LOW (elt))
4198 {
4199 error ("invalid case label in switch statement");
4200 debug_generic_expr (elt);
4201 return true;
4202 }
4203 if (CASE_HIGH (elt)
4204 && ! tree_int_cst_lt (CASE_LOW (elt), CASE_HIGH (elt)))
4205 {
4206 error ("invalid case range in switch statement");
4207 debug_generic_expr (elt);
4208 return true;
4209 }
4210
4211 if (elt_type)
4212 {
4213 if (TREE_TYPE (CASE_LOW (elt)) != elt_type
4214 || (CASE_HIGH (elt) && TREE_TYPE (CASE_HIGH (elt)) != elt_type))
4215 {
4216 error ("type mismatch for case label in switch statement");
4217 debug_generic_expr (elt);
4218 return true;
4219 }
4220 }
4221 else
4222 {
4223 elt_type = TREE_TYPE (CASE_LOW (elt));
4224 if (TYPE_PRECISION (index_type) < TYPE_PRECISION (elt_type))
4225 {
4226 error ("type precision mismatch in switch statement");
4227 return true;
4228 }
4229 }
4230
4231 if (prev_upper_bound)
4232 {
4233 if (! tree_int_cst_lt (prev_upper_bound, CASE_LOW (elt)))
4234 {
4235 error ("case labels not sorted in switch statement");
4236 return true;
4237 }
4238 }
4239
4240 prev_upper_bound = CASE_HIGH (elt);
4241 if (! prev_upper_bound)
4242 prev_upper_bound = CASE_LOW (elt);
4243 }
4244
4245 return false;
4246 }
4247
4248 /* Verify a gimple debug statement STMT.
4249 Returns true if anything is wrong. */
4250
4251 static bool
4252 verify_gimple_debug (gimple stmt ATTRIBUTE_UNUSED)
4253 {
4254 /* There isn't much that could be wrong in a gimple debug stmt. A
4255 gimple debug bind stmt, for example, maps a tree, that's usually
4256 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4257 component or member of an aggregate type, to another tree, that
4258 can be an arbitrary expression. These stmts expand into debug
4259 insns, and are converted to debug notes by var-tracking.c. */
4260 return false;
4261 }
4262
4263 /* Verify a gimple label statement STMT.
4264 Returns true if anything is wrong. */
4265
4266 static bool
4267 verify_gimple_label (gimple stmt)
4268 {
4269 tree decl = gimple_label_label (stmt);
4270 int uid;
4271 bool err = false;
4272
4273 if (TREE_CODE (decl) != LABEL_DECL)
4274 return true;
4275 if (!DECL_NONLOCAL (decl) && !FORCED_LABEL (decl)
4276 && DECL_CONTEXT (decl) != current_function_decl)
4277 {
4278 error ("label's context is not the current function decl");
4279 err |= true;
4280 }
4281
4282 uid = LABEL_DECL_UID (decl);
4283 if (cfun->cfg
4284 && (uid == -1 || (*label_to_block_map)[uid] != gimple_bb (stmt)))
4285 {
4286 error ("incorrect entry in label_to_block_map");
4287 err |= true;
4288 }
4289
4290 uid = EH_LANDING_PAD_NR (decl);
4291 if (uid)
4292 {
4293 eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
4294 if (decl != lp->post_landing_pad)
4295 {
4296 error ("incorrect setting of landing pad number");
4297 err |= true;
4298 }
4299 }
4300
4301 return err;
4302 }
4303
4304 /* Verify the GIMPLE statement STMT. Returns true if there is an
4305 error, otherwise false. */
4306
4307 static bool
4308 verify_gimple_stmt (gimple stmt)
4309 {
4310 switch (gimple_code (stmt))
4311 {
4312 case GIMPLE_ASSIGN:
4313 return verify_gimple_assign (stmt);
4314
4315 case GIMPLE_LABEL:
4316 return verify_gimple_label (stmt);
4317
4318 case GIMPLE_CALL:
4319 return verify_gimple_call (stmt);
4320
4321 case GIMPLE_COND:
4322 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
4323 {
4324 error ("invalid comparison code in gimple cond");
4325 return true;
4326 }
4327 if (!(!gimple_cond_true_label (stmt)
4328 || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
4329 || !(!gimple_cond_false_label (stmt)
4330 || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
4331 {
4332 error ("invalid labels in gimple cond");
4333 return true;
4334 }
4335
4336 return verify_gimple_comparison (boolean_type_node,
4337 gimple_cond_lhs (stmt),
4338 gimple_cond_rhs (stmt));
4339
4340 case GIMPLE_GOTO:
4341 return verify_gimple_goto (stmt);
4342
4343 case GIMPLE_SWITCH:
4344 return verify_gimple_switch (stmt);
4345
4346 case GIMPLE_RETURN:
4347 return verify_gimple_return (stmt);
4348
4349 case GIMPLE_ASM:
4350 return false;
4351
4352 case GIMPLE_TRANSACTION:
4353 return verify_gimple_transaction (stmt);
4354
4355 /* Tuples that do not have tree operands. */
4356 case GIMPLE_NOP:
4357 case GIMPLE_PREDICT:
4358 case GIMPLE_RESX:
4359 case GIMPLE_EH_DISPATCH:
4360 case GIMPLE_EH_MUST_NOT_THROW:
4361 return false;
4362
4363 CASE_GIMPLE_OMP:
4364 /* OpenMP directives are validated by the FE and never operated
4365 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4366 non-gimple expressions when the main index variable has had
4367 its address taken. This does not affect the loop itself
4368 because the header of an GIMPLE_OMP_FOR is merely used to determine
4369 how to setup the parallel iteration. */
4370 return false;
4371
4372 case GIMPLE_DEBUG:
4373 return verify_gimple_debug (stmt);
4374
4375 default:
4376 gcc_unreachable ();
4377 }
4378 }
4379
4380 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4381 and false otherwise. */
4382
4383 static bool
4384 verify_gimple_phi (gimple phi)
4385 {
4386 bool err = false;
4387 unsigned i;
4388 tree phi_result = gimple_phi_result (phi);
4389 bool virtual_p;
4390
4391 if (!phi_result)
4392 {
4393 error ("invalid PHI result");
4394 return true;
4395 }
4396
4397 virtual_p = virtual_operand_p (phi_result);
4398 if (TREE_CODE (phi_result) != SSA_NAME
4399 || (virtual_p
4400 && SSA_NAME_VAR (phi_result) != gimple_vop (cfun)))
4401 {
4402 error ("invalid PHI result");
4403 err = true;
4404 }
4405
4406 for (i = 0; i < gimple_phi_num_args (phi); i++)
4407 {
4408 tree t = gimple_phi_arg_def (phi, i);
4409
4410 if (!t)
4411 {
4412 error ("missing PHI def");
4413 err |= true;
4414 continue;
4415 }
4416 /* Addressable variables do have SSA_NAMEs but they
4417 are not considered gimple values. */
4418 else if ((TREE_CODE (t) == SSA_NAME
4419 && virtual_p != virtual_operand_p (t))
4420 || (virtual_p
4421 && (TREE_CODE (t) != SSA_NAME
4422 || SSA_NAME_VAR (t) != gimple_vop (cfun)))
4423 || (!virtual_p
4424 && !is_gimple_val (t)))
4425 {
4426 error ("invalid PHI argument");
4427 debug_generic_expr (t);
4428 err |= true;
4429 }
4430 #ifdef ENABLE_TYPES_CHECKING
4431 if (!useless_type_conversion_p (TREE_TYPE (phi_result), TREE_TYPE (t)))
4432 {
4433 error ("incompatible types in PHI argument %u", i);
4434 debug_generic_stmt (TREE_TYPE (phi_result));
4435 debug_generic_stmt (TREE_TYPE (t));
4436 err |= true;
4437 }
4438 #endif
4439 }
4440
4441 return err;
4442 }
4443
4444 /* Verify the GIMPLE statements inside the sequence STMTS. */
4445
4446 static bool
4447 verify_gimple_in_seq_2 (gimple_seq stmts)
4448 {
4449 gimple_stmt_iterator ittr;
4450 bool err = false;
4451
4452 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
4453 {
4454 gimple stmt = gsi_stmt (ittr);
4455
4456 switch (gimple_code (stmt))
4457 {
4458 case GIMPLE_BIND:
4459 err |= verify_gimple_in_seq_2 (gimple_bind_body (stmt));
4460 break;
4461
4462 case GIMPLE_TRY:
4463 err |= verify_gimple_in_seq_2 (gimple_try_eval (stmt));
4464 err |= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt));
4465 break;
4466
4467 case GIMPLE_EH_FILTER:
4468 err |= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt));
4469 break;
4470
4471 case GIMPLE_EH_ELSE:
4472 err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (stmt));
4473 err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (stmt));
4474 break;
4475
4476 case GIMPLE_CATCH:
4477 err |= verify_gimple_in_seq_2 (gimple_catch_handler (stmt));
4478 break;
4479
4480 case GIMPLE_TRANSACTION:
4481 err |= verify_gimple_transaction (stmt);
4482 break;
4483
4484 default:
4485 {
4486 bool err2 = verify_gimple_stmt (stmt);
4487 if (err2)
4488 debug_gimple_stmt (stmt);
4489 err |= err2;
4490 }
4491 }
4492 }
4493
4494 return err;
4495 }
4496
4497 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
4498 is a problem, otherwise false. */
4499
4500 static bool
4501 verify_gimple_transaction (gimple stmt)
4502 {
4503 tree lab = gimple_transaction_label (stmt);
4504 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
4505 return true;
4506 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt));
4507 }
4508
4509
4510 /* Verify the GIMPLE statements inside the statement list STMTS. */
4511
4512 DEBUG_FUNCTION void
4513 verify_gimple_in_seq (gimple_seq stmts)
4514 {
4515 timevar_push (TV_TREE_STMT_VERIFY);
4516 if (verify_gimple_in_seq_2 (stmts))
4517 internal_error ("verify_gimple failed");
4518 timevar_pop (TV_TREE_STMT_VERIFY);
4519 }
4520
4521 /* Return true when the T can be shared. */
4522
4523 static bool
4524 tree_node_can_be_shared (tree t)
4525 {
4526 if (IS_TYPE_OR_DECL_P (t)
4527 || is_gimple_min_invariant (t)
4528 || TREE_CODE (t) == SSA_NAME
4529 || t == error_mark_node
4530 || TREE_CODE (t) == IDENTIFIER_NODE)
4531 return true;
4532
4533 if (TREE_CODE (t) == CASE_LABEL_EXPR)
4534 return true;
4535
4536 if (DECL_P (t))
4537 return true;
4538
4539 return false;
4540 }
4541
4542 /* Called via walk_tree. Verify tree sharing. */
4543
4544 static tree
4545 verify_node_sharing_1 (tree *tp, int *walk_subtrees, void *data)
4546 {
4547 struct pointer_set_t *visited = (struct pointer_set_t *) data;
4548
4549 if (tree_node_can_be_shared (*tp))
4550 {
4551 *walk_subtrees = false;
4552 return NULL;
4553 }
4554
4555 if (pointer_set_insert (visited, *tp))
4556 return *tp;
4557
4558 return NULL;
4559 }
4560
4561 /* Called via walk_gimple_stmt. Verify tree sharing. */
4562
4563 static tree
4564 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
4565 {
4566 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4567 return verify_node_sharing_1 (tp, walk_subtrees, wi->info);
4568 }
4569
4570 static bool eh_error_found;
4571 static int
4572 verify_eh_throw_stmt_node (void **slot, void *data)
4573 {
4574 struct throw_stmt_node *node = (struct throw_stmt_node *)*slot;
4575 struct pointer_set_t *visited = (struct pointer_set_t *) data;
4576
4577 if (!pointer_set_contains (visited, node->stmt))
4578 {
4579 error ("dead STMT in EH table");
4580 debug_gimple_stmt (node->stmt);
4581 eh_error_found = true;
4582 }
4583 return 1;
4584 }
4585
4586 /* Verify if the location LOCs block is in BLOCKS. */
4587
4588 static bool
4589 verify_location (pointer_set_t *blocks, location_t loc)
4590 {
4591 tree block = LOCATION_BLOCK (loc);
4592 if (block != NULL_TREE
4593 && !pointer_set_contains (blocks, block))
4594 {
4595 error ("location references block not in block tree");
4596 return true;
4597 }
4598 if (block != NULL_TREE)
4599 return verify_location (blocks, BLOCK_SOURCE_LOCATION (block));
4600 return false;
4601 }
4602
4603 /* Called via walk_tree. Verify that expressions have no blocks. */
4604
4605 static tree
4606 verify_expr_no_block (tree *tp, int *walk_subtrees, void *)
4607 {
4608 if (!EXPR_P (*tp))
4609 {
4610 *walk_subtrees = false;
4611 return NULL;
4612 }
4613
4614 location_t loc = EXPR_LOCATION (*tp);
4615 if (LOCATION_BLOCK (loc) != NULL)
4616 return *tp;
4617
4618 return NULL;
4619 }
4620
4621 /* Called via walk_tree. Verify locations of expressions. */
4622
4623 static tree
4624 verify_expr_location_1 (tree *tp, int *walk_subtrees, void *data)
4625 {
4626 struct pointer_set_t *blocks = (struct pointer_set_t *) data;
4627
4628 if (TREE_CODE (*tp) == VAR_DECL
4629 && DECL_HAS_DEBUG_EXPR_P (*tp))
4630 {
4631 tree t = DECL_DEBUG_EXPR (*tp);
4632 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
4633 if (addr)
4634 return addr;
4635 }
4636 if ((TREE_CODE (*tp) == VAR_DECL
4637 || TREE_CODE (*tp) == PARM_DECL
4638 || TREE_CODE (*tp) == RESULT_DECL)
4639 && DECL_HAS_VALUE_EXPR_P (*tp))
4640 {
4641 tree t = DECL_VALUE_EXPR (*tp);
4642 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
4643 if (addr)
4644 return addr;
4645 }
4646
4647 if (!EXPR_P (*tp))
4648 {
4649 *walk_subtrees = false;
4650 return NULL;
4651 }
4652
4653 location_t loc = EXPR_LOCATION (*tp);
4654 if (verify_location (blocks, loc))
4655 return *tp;
4656
4657 return NULL;
4658 }
4659
4660 /* Called via walk_gimple_op. Verify locations of expressions. */
4661
4662 static tree
4663 verify_expr_location (tree *tp, int *walk_subtrees, void *data)
4664 {
4665 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4666 return verify_expr_location_1 (tp, walk_subtrees, wi->info);
4667 }
4668
4669 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
4670
4671 static void
4672 collect_subblocks (pointer_set_t *blocks, tree block)
4673 {
4674 tree t;
4675 for (t = BLOCK_SUBBLOCKS (block); t; t = BLOCK_CHAIN (t))
4676 {
4677 pointer_set_insert (blocks, t);
4678 collect_subblocks (blocks, t);
4679 }
4680 }
4681
4682 /* Verify the GIMPLE statements in the CFG of FN. */
4683
4684 DEBUG_FUNCTION void
4685 verify_gimple_in_cfg (struct function *fn)
4686 {
4687 basic_block bb;
4688 bool err = false;
4689 struct pointer_set_t *visited, *visited_stmts, *blocks;
4690
4691 timevar_push (TV_TREE_STMT_VERIFY);
4692 visited = pointer_set_create ();
4693 visited_stmts = pointer_set_create ();
4694
4695 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
4696 blocks = pointer_set_create ();
4697 if (DECL_INITIAL (fn->decl))
4698 {
4699 pointer_set_insert (blocks, DECL_INITIAL (fn->decl));
4700 collect_subblocks (blocks, DECL_INITIAL (fn->decl));
4701 }
4702
4703 FOR_EACH_BB_FN (bb, fn)
4704 {
4705 gimple_stmt_iterator gsi;
4706
4707 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4708 {
4709 gimple phi = gsi_stmt (gsi);
4710 bool err2 = false;
4711 unsigned i;
4712
4713 pointer_set_insert (visited_stmts, phi);
4714
4715 if (gimple_bb (phi) != bb)
4716 {
4717 error ("gimple_bb (phi) is set to a wrong basic block");
4718 err2 = true;
4719 }
4720
4721 err2 |= verify_gimple_phi (phi);
4722
4723 /* Only PHI arguments have locations. */
4724 if (gimple_location (phi) != UNKNOWN_LOCATION)
4725 {
4726 error ("PHI node with location");
4727 err2 = true;
4728 }
4729
4730 for (i = 0; i < gimple_phi_num_args (phi); i++)
4731 {
4732 tree arg = gimple_phi_arg_def (phi, i);
4733 tree addr = walk_tree (&arg, verify_node_sharing_1,
4734 visited, NULL);
4735 if (addr)
4736 {
4737 error ("incorrect sharing of tree nodes");
4738 debug_generic_expr (addr);
4739 err2 |= true;
4740 }
4741 location_t loc = gimple_phi_arg_location (phi, i);
4742 if (virtual_operand_p (gimple_phi_result (phi))
4743 && loc != UNKNOWN_LOCATION)
4744 {
4745 error ("virtual PHI with argument locations");
4746 err2 = true;
4747 }
4748 addr = walk_tree (&arg, verify_expr_location_1, blocks, NULL);
4749 if (addr)
4750 {
4751 debug_generic_expr (addr);
4752 err2 = true;
4753 }
4754 err2 |= verify_location (blocks, loc);
4755 }
4756
4757 if (err2)
4758 debug_gimple_stmt (phi);
4759 err |= err2;
4760 }
4761
4762 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4763 {
4764 gimple stmt = gsi_stmt (gsi);
4765 bool err2 = false;
4766 struct walk_stmt_info wi;
4767 tree addr;
4768 int lp_nr;
4769
4770 pointer_set_insert (visited_stmts, stmt);
4771
4772 if (gimple_bb (stmt) != bb)
4773 {
4774 error ("gimple_bb (stmt) is set to a wrong basic block");
4775 err2 = true;
4776 }
4777
4778 err2 |= verify_gimple_stmt (stmt);
4779 err2 |= verify_location (blocks, gimple_location (stmt));
4780
4781 memset (&wi, 0, sizeof (wi));
4782 wi.info = (void *) visited;
4783 addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
4784 if (addr)
4785 {
4786 error ("incorrect sharing of tree nodes");
4787 debug_generic_expr (addr);
4788 err2 |= true;
4789 }
4790
4791 memset (&wi, 0, sizeof (wi));
4792 wi.info = (void *) blocks;
4793 addr = walk_gimple_op (stmt, verify_expr_location, &wi);
4794 if (addr)
4795 {
4796 debug_generic_expr (addr);
4797 err2 |= true;
4798 }
4799
4800 /* ??? Instead of not checking these stmts at all the walker
4801 should know its context via wi. */
4802 if (!is_gimple_debug (stmt)
4803 && !is_gimple_omp (stmt))
4804 {
4805 memset (&wi, 0, sizeof (wi));
4806 addr = walk_gimple_op (stmt, verify_expr, &wi);
4807 if (addr)
4808 {
4809 debug_generic_expr (addr);
4810 inform (gimple_location (stmt), "in statement");
4811 err2 |= true;
4812 }
4813 }
4814
4815 /* If the statement is marked as part of an EH region, then it is
4816 expected that the statement could throw. Verify that when we
4817 have optimizations that simplify statements such that we prove
4818 that they cannot throw, that we update other data structures
4819 to match. */
4820 lp_nr = lookup_stmt_eh_lp (stmt);
4821 if (lp_nr != 0)
4822 {
4823 if (!stmt_could_throw_p (stmt))
4824 {
4825 error ("statement marked for throw, but doesn%'t");
4826 err2 |= true;
4827 }
4828 else if (lp_nr > 0
4829 && !gsi_one_before_end_p (gsi)
4830 && stmt_can_throw_internal (stmt))
4831 {
4832 error ("statement marked for throw in middle of block");
4833 err2 |= true;
4834 }
4835 }
4836
4837 if (err2)
4838 debug_gimple_stmt (stmt);
4839 err |= err2;
4840 }
4841 }
4842
4843 eh_error_found = false;
4844 if (get_eh_throw_stmt_table (cfun))
4845 htab_traverse (get_eh_throw_stmt_table (cfun),
4846 verify_eh_throw_stmt_node,
4847 visited_stmts);
4848
4849 if (err || eh_error_found)
4850 internal_error ("verify_gimple failed");
4851
4852 pointer_set_destroy (visited);
4853 pointer_set_destroy (visited_stmts);
4854 pointer_set_destroy (blocks);
4855 verify_histograms ();
4856 timevar_pop (TV_TREE_STMT_VERIFY);
4857 }
4858
4859
4860 /* Verifies that the flow information is OK. */
4861
4862 static int
4863 gimple_verify_flow_info (void)
4864 {
4865 int err = 0;
4866 basic_block bb;
4867 gimple_stmt_iterator gsi;
4868 gimple stmt;
4869 edge e;
4870 edge_iterator ei;
4871
4872 if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
4873 || ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
4874 {
4875 error ("ENTRY_BLOCK has IL associated with it");
4876 err = 1;
4877 }
4878
4879 if (EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
4880 || EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
4881 {
4882 error ("EXIT_BLOCK has IL associated with it");
4883 err = 1;
4884 }
4885
4886 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
4887 if (e->flags & EDGE_FALLTHRU)
4888 {
4889 error ("fallthru to exit from bb %d", e->src->index);
4890 err = 1;
4891 }
4892
4893 FOR_EACH_BB (bb)
4894 {
4895 bool found_ctrl_stmt = false;
4896
4897 stmt = NULL;
4898
4899 /* Skip labels on the start of basic block. */
4900 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4901 {
4902 tree label;
4903 gimple prev_stmt = stmt;
4904
4905 stmt = gsi_stmt (gsi);
4906
4907 if (gimple_code (stmt) != GIMPLE_LABEL)
4908 break;
4909
4910 label = gimple_label_label (stmt);
4911 if (prev_stmt && DECL_NONLOCAL (label))
4912 {
4913 error ("nonlocal label ");
4914 print_generic_expr (stderr, label, 0);
4915 fprintf (stderr, " is not first in a sequence of labels in bb %d",
4916 bb->index);
4917 err = 1;
4918 }
4919
4920 if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
4921 {
4922 error ("EH landing pad label ");
4923 print_generic_expr (stderr, label, 0);
4924 fprintf (stderr, " is not first in a sequence of labels in bb %d",
4925 bb->index);
4926 err = 1;
4927 }
4928
4929 if (label_to_block (label) != bb)
4930 {
4931 error ("label ");
4932 print_generic_expr (stderr, label, 0);
4933 fprintf (stderr, " to block does not match in bb %d",
4934 bb->index);
4935 err = 1;
4936 }
4937
4938 if (decl_function_context (label) != current_function_decl)
4939 {
4940 error ("label ");
4941 print_generic_expr (stderr, label, 0);
4942 fprintf (stderr, " has incorrect context in bb %d",
4943 bb->index);
4944 err = 1;
4945 }
4946 }
4947
4948 /* Verify that body of basic block BB is free of control flow. */
4949 for (; !gsi_end_p (gsi); gsi_next (&gsi))
4950 {
4951 gimple stmt = gsi_stmt (gsi);
4952
4953 if (found_ctrl_stmt)
4954 {
4955 error ("control flow in the middle of basic block %d",
4956 bb->index);
4957 err = 1;
4958 }
4959
4960 if (stmt_ends_bb_p (stmt))
4961 found_ctrl_stmt = true;
4962
4963 if (gimple_code (stmt) == GIMPLE_LABEL)
4964 {
4965 error ("label ");
4966 print_generic_expr (stderr, gimple_label_label (stmt), 0);
4967 fprintf (stderr, " in the middle of basic block %d", bb->index);
4968 err = 1;
4969 }
4970 }
4971
4972 gsi = gsi_last_bb (bb);
4973 if (gsi_end_p (gsi))
4974 continue;
4975
4976 stmt = gsi_stmt (gsi);
4977
4978 if (gimple_code (stmt) == GIMPLE_LABEL)
4979 continue;
4980
4981 err |= verify_eh_edges (stmt);
4982
4983 if (is_ctrl_stmt (stmt))
4984 {
4985 FOR_EACH_EDGE (e, ei, bb->succs)
4986 if (e->flags & EDGE_FALLTHRU)
4987 {
4988 error ("fallthru edge after a control statement in bb %d",
4989 bb->index);
4990 err = 1;
4991 }
4992 }
4993
4994 if (gimple_code (stmt) != GIMPLE_COND)
4995 {
4996 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
4997 after anything else but if statement. */
4998 FOR_EACH_EDGE (e, ei, bb->succs)
4999 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
5000 {
5001 error ("true/false edge after a non-GIMPLE_COND in bb %d",
5002 bb->index);
5003 err = 1;
5004 }
5005 }
5006
5007 switch (gimple_code (stmt))
5008 {
5009 case GIMPLE_COND:
5010 {
5011 edge true_edge;
5012 edge false_edge;
5013
5014 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
5015
5016 if (!true_edge
5017 || !false_edge
5018 || !(true_edge->flags & EDGE_TRUE_VALUE)
5019 || !(false_edge->flags & EDGE_FALSE_VALUE)
5020 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5021 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5022 || EDGE_COUNT (bb->succs) >= 3)
5023 {
5024 error ("wrong outgoing edge flags at end of bb %d",
5025 bb->index);
5026 err = 1;
5027 }
5028 }
5029 break;
5030
5031 case GIMPLE_GOTO:
5032 if (simple_goto_p (stmt))
5033 {
5034 error ("explicit goto at end of bb %d", bb->index);
5035 err = 1;
5036 }
5037 else
5038 {
5039 /* FIXME. We should double check that the labels in the
5040 destination blocks have their address taken. */
5041 FOR_EACH_EDGE (e, ei, bb->succs)
5042 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
5043 | EDGE_FALSE_VALUE))
5044 || !(e->flags & EDGE_ABNORMAL))
5045 {
5046 error ("wrong outgoing edge flags at end of bb %d",
5047 bb->index);
5048 err = 1;
5049 }
5050 }
5051 break;
5052
5053 case GIMPLE_CALL:
5054 if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
5055 break;
5056 /* ... fallthru ... */
5057 case GIMPLE_RETURN:
5058 if (!single_succ_p (bb)
5059 || (single_succ_edge (bb)->flags
5060 & (EDGE_FALLTHRU | EDGE_ABNORMAL
5061 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5062 {
5063 error ("wrong outgoing edge flags at end of bb %d", bb->index);
5064 err = 1;
5065 }
5066 if (single_succ (bb) != EXIT_BLOCK_PTR_FOR_FN (cfun))
5067 {
5068 error ("return edge does not point to exit in bb %d",
5069 bb->index);
5070 err = 1;
5071 }
5072 break;
5073
5074 case GIMPLE_SWITCH:
5075 {
5076 tree prev;
5077 edge e;
5078 size_t i, n;
5079
5080 n = gimple_switch_num_labels (stmt);
5081
5082 /* Mark all the destination basic blocks. */
5083 for (i = 0; i < n; ++i)
5084 {
5085 tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
5086 basic_block label_bb = label_to_block (lab);
5087 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
5088 label_bb->aux = (void *)1;
5089 }
5090
5091 /* Verify that the case labels are sorted. */
5092 prev = gimple_switch_label (stmt, 0);
5093 for (i = 1; i < n; ++i)
5094 {
5095 tree c = gimple_switch_label (stmt, i);
5096 if (!CASE_LOW (c))
5097 {
5098 error ("found default case not at the start of "
5099 "case vector");
5100 err = 1;
5101 continue;
5102 }
5103 if (CASE_LOW (prev)
5104 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
5105 {
5106 error ("case labels not sorted: ");
5107 print_generic_expr (stderr, prev, 0);
5108 fprintf (stderr," is greater than ");
5109 print_generic_expr (stderr, c, 0);
5110 fprintf (stderr," but comes before it.\n");
5111 err = 1;
5112 }
5113 prev = c;
5114 }
5115 /* VRP will remove the default case if it can prove it will
5116 never be executed. So do not verify there always exists
5117 a default case here. */
5118
5119 FOR_EACH_EDGE (e, ei, bb->succs)
5120 {
5121 if (!e->dest->aux)
5122 {
5123 error ("extra outgoing edge %d->%d",
5124 bb->index, e->dest->index);
5125 err = 1;
5126 }
5127
5128 e->dest->aux = (void *)2;
5129 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
5130 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5131 {
5132 error ("wrong outgoing edge flags at end of bb %d",
5133 bb->index);
5134 err = 1;
5135 }
5136 }
5137
5138 /* Check that we have all of them. */
5139 for (i = 0; i < n; ++i)
5140 {
5141 tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
5142 basic_block label_bb = label_to_block (lab);
5143
5144 if (label_bb->aux != (void *)2)
5145 {
5146 error ("missing edge %i->%i", bb->index, label_bb->index);
5147 err = 1;
5148 }
5149 }
5150
5151 FOR_EACH_EDGE (e, ei, bb->succs)
5152 e->dest->aux = (void *)0;
5153 }
5154 break;
5155
5156 case GIMPLE_EH_DISPATCH:
5157 err |= verify_eh_dispatch_edge (stmt);
5158 break;
5159
5160 default:
5161 break;
5162 }
5163 }
5164
5165 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
5166 verify_dominators (CDI_DOMINATORS);
5167
5168 return err;
5169 }
5170
5171
5172 /* Updates phi nodes after creating a forwarder block joined
5173 by edge FALLTHRU. */
5174
5175 static void
5176 gimple_make_forwarder_block (edge fallthru)
5177 {
5178 edge e;
5179 edge_iterator ei;
5180 basic_block dummy, bb;
5181 tree var;
5182 gimple_stmt_iterator gsi;
5183
5184 dummy = fallthru->src;
5185 bb = fallthru->dest;
5186
5187 if (single_pred_p (bb))
5188 return;
5189
5190 /* If we redirected a branch we must create new PHI nodes at the
5191 start of BB. */
5192 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
5193 {
5194 gimple phi, new_phi;
5195
5196 phi = gsi_stmt (gsi);
5197 var = gimple_phi_result (phi);
5198 new_phi = create_phi_node (var, bb);
5199 gimple_phi_set_result (phi, copy_ssa_name (var, phi));
5200 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
5201 UNKNOWN_LOCATION);
5202 }
5203
5204 /* Add the arguments we have stored on edges. */
5205 FOR_EACH_EDGE (e, ei, bb->preds)
5206 {
5207 if (e == fallthru)
5208 continue;
5209
5210 flush_pending_stmts (e);
5211 }
5212 }
5213
5214
5215 /* Return a non-special label in the head of basic block BLOCK.
5216 Create one if it doesn't exist. */
5217
5218 tree
5219 gimple_block_label (basic_block bb)
5220 {
5221 gimple_stmt_iterator i, s = gsi_start_bb (bb);
5222 bool first = true;
5223 tree label;
5224 gimple stmt;
5225
5226 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
5227 {
5228 stmt = gsi_stmt (i);
5229 if (gimple_code (stmt) != GIMPLE_LABEL)
5230 break;
5231 label = gimple_label_label (stmt);
5232 if (!DECL_NONLOCAL (label))
5233 {
5234 if (!first)
5235 gsi_move_before (&i, &s);
5236 return label;
5237 }
5238 }
5239
5240 label = create_artificial_label (UNKNOWN_LOCATION);
5241 stmt = gimple_build_label (label);
5242 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
5243 return label;
5244 }
5245
5246
5247 /* Attempt to perform edge redirection by replacing a possibly complex
5248 jump instruction by a goto or by removing the jump completely.
5249 This can apply only if all edges now point to the same block. The
5250 parameters and return values are equivalent to
5251 redirect_edge_and_branch. */
5252
5253 static edge
5254 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
5255 {
5256 basic_block src = e->src;
5257 gimple_stmt_iterator i;
5258 gimple stmt;
5259
5260 /* We can replace or remove a complex jump only when we have exactly
5261 two edges. */
5262 if (EDGE_COUNT (src->succs) != 2
5263 /* Verify that all targets will be TARGET. Specifically, the
5264 edge that is not E must also go to TARGET. */
5265 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
5266 return NULL;
5267
5268 i = gsi_last_bb (src);
5269 if (gsi_end_p (i))
5270 return NULL;
5271
5272 stmt = gsi_stmt (i);
5273
5274 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
5275 {
5276 gsi_remove (&i, true);
5277 e = ssa_redirect_edge (e, target);
5278 e->flags = EDGE_FALLTHRU;
5279 return e;
5280 }
5281
5282 return NULL;
5283 }
5284
5285
5286 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
5287 edge representing the redirected branch. */
5288
5289 static edge
5290 gimple_redirect_edge_and_branch (edge e, basic_block dest)
5291 {
5292 basic_block bb = e->src;
5293 gimple_stmt_iterator gsi;
5294 edge ret;
5295 gimple stmt;
5296
5297 if (e->flags & EDGE_ABNORMAL)
5298 return NULL;
5299
5300 if (e->dest == dest)
5301 return NULL;
5302
5303 if (e->flags & EDGE_EH)
5304 return redirect_eh_edge (e, dest);
5305
5306 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
5307 {
5308 ret = gimple_try_redirect_by_replacing_jump (e, dest);
5309 if (ret)
5310 return ret;
5311 }
5312
5313 gsi = gsi_last_bb (bb);
5314 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
5315
5316 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
5317 {
5318 case GIMPLE_COND:
5319 /* For COND_EXPR, we only need to redirect the edge. */
5320 break;
5321
5322 case GIMPLE_GOTO:
5323 /* No non-abnormal edges should lead from a non-simple goto, and
5324 simple ones should be represented implicitly. */
5325 gcc_unreachable ();
5326
5327 case GIMPLE_SWITCH:
5328 {
5329 tree label = gimple_block_label (dest);
5330 tree cases = get_cases_for_edge (e, stmt);
5331
5332 /* If we have a list of cases associated with E, then use it
5333 as it's a lot faster than walking the entire case vector. */
5334 if (cases)
5335 {
5336 edge e2 = find_edge (e->src, dest);
5337 tree last, first;
5338
5339 first = cases;
5340 while (cases)
5341 {
5342 last = cases;
5343 CASE_LABEL (cases) = label;
5344 cases = CASE_CHAIN (cases);
5345 }
5346
5347 /* If there was already an edge in the CFG, then we need
5348 to move all the cases associated with E to E2. */
5349 if (e2)
5350 {
5351 tree cases2 = get_cases_for_edge (e2, stmt);
5352
5353 CASE_CHAIN (last) = CASE_CHAIN (cases2);
5354 CASE_CHAIN (cases2) = first;
5355 }
5356 bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
5357 }
5358 else
5359 {
5360 size_t i, n = gimple_switch_num_labels (stmt);
5361
5362 for (i = 0; i < n; i++)
5363 {
5364 tree elt = gimple_switch_label (stmt, i);
5365 if (label_to_block (CASE_LABEL (elt)) == e->dest)
5366 CASE_LABEL (elt) = label;
5367 }
5368 }
5369 }
5370 break;
5371
5372 case GIMPLE_ASM:
5373 {
5374 int i, n = gimple_asm_nlabels (stmt);
5375 tree label = NULL;
5376
5377 for (i = 0; i < n; ++i)
5378 {
5379 tree cons = gimple_asm_label_op (stmt, i);
5380 if (label_to_block (TREE_VALUE (cons)) == e->dest)
5381 {
5382 if (!label)
5383 label = gimple_block_label (dest);
5384 TREE_VALUE (cons) = label;
5385 }
5386 }
5387
5388 /* If we didn't find any label matching the former edge in the
5389 asm labels, we must be redirecting the fallthrough
5390 edge. */
5391 gcc_assert (label || (e->flags & EDGE_FALLTHRU));
5392 }
5393 break;
5394
5395 case GIMPLE_RETURN:
5396 gsi_remove (&gsi, true);
5397 e->flags |= EDGE_FALLTHRU;
5398 break;
5399
5400 case GIMPLE_OMP_RETURN:
5401 case GIMPLE_OMP_CONTINUE:
5402 case GIMPLE_OMP_SECTIONS_SWITCH:
5403 case GIMPLE_OMP_FOR:
5404 /* The edges from OMP constructs can be simply redirected. */
5405 break;
5406
5407 case GIMPLE_EH_DISPATCH:
5408 if (!(e->flags & EDGE_FALLTHRU))
5409 redirect_eh_dispatch_edge (stmt, e, dest);
5410 break;
5411
5412 case GIMPLE_TRANSACTION:
5413 /* The ABORT edge has a stored label associated with it, otherwise
5414 the edges are simply redirectable. */
5415 if (e->flags == 0)
5416 gimple_transaction_set_label (stmt, gimple_block_label (dest));
5417 break;
5418
5419 default:
5420 /* Otherwise it must be a fallthru edge, and we don't need to
5421 do anything besides redirecting it. */
5422 gcc_assert (e->flags & EDGE_FALLTHRU);
5423 break;
5424 }
5425
5426 /* Update/insert PHI nodes as necessary. */
5427
5428 /* Now update the edges in the CFG. */
5429 e = ssa_redirect_edge (e, dest);
5430
5431 return e;
5432 }
5433
5434 /* Returns true if it is possible to remove edge E by redirecting
5435 it to the destination of the other edge from E->src. */
5436
5437 static bool
5438 gimple_can_remove_branch_p (const_edge e)
5439 {
5440 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
5441 return false;
5442
5443 return true;
5444 }
5445
5446 /* Simple wrapper, as we can always redirect fallthru edges. */
5447
5448 static basic_block
5449 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
5450 {
5451 e = gimple_redirect_edge_and_branch (e, dest);
5452 gcc_assert (e);
5453
5454 return NULL;
5455 }
5456
5457
5458 /* Splits basic block BB after statement STMT (but at least after the
5459 labels). If STMT is NULL, BB is split just after the labels. */
5460
5461 static basic_block
5462 gimple_split_block (basic_block bb, void *stmt)
5463 {
5464 gimple_stmt_iterator gsi;
5465 gimple_stmt_iterator gsi_tgt;
5466 gimple act;
5467 gimple_seq list;
5468 basic_block new_bb;
5469 edge e;
5470 edge_iterator ei;
5471
5472 new_bb = create_empty_bb (bb);
5473
5474 /* Redirect the outgoing edges. */
5475 new_bb->succs = bb->succs;
5476 bb->succs = NULL;
5477 FOR_EACH_EDGE (e, ei, new_bb->succs)
5478 e->src = new_bb;
5479
5480 if (stmt && gimple_code ((gimple) stmt) == GIMPLE_LABEL)
5481 stmt = NULL;
5482
5483 /* Move everything from GSI to the new basic block. */
5484 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5485 {
5486 act = gsi_stmt (gsi);
5487 if (gimple_code (act) == GIMPLE_LABEL)
5488 continue;
5489
5490 if (!stmt)
5491 break;
5492
5493 if (stmt == act)
5494 {
5495 gsi_next (&gsi);
5496 break;
5497 }
5498 }
5499
5500 if (gsi_end_p (gsi))
5501 return new_bb;
5502
5503 /* Split the statement list - avoid re-creating new containers as this
5504 brings ugly quadratic memory consumption in the inliner.
5505 (We are still quadratic since we need to update stmt BB pointers,
5506 sadly.) */
5507 gsi_split_seq_before (&gsi, &list);
5508 set_bb_seq (new_bb, list);
5509 for (gsi_tgt = gsi_start (list);
5510 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
5511 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
5512
5513 return new_bb;
5514 }
5515
5516
5517 /* Moves basic block BB after block AFTER. */
5518
5519 static bool
5520 gimple_move_block_after (basic_block bb, basic_block after)
5521 {
5522 if (bb->prev_bb == after)
5523 return true;
5524
5525 unlink_block (bb);
5526 link_block (bb, after);
5527
5528 return true;
5529 }
5530
5531
5532 /* Return TRUE if block BB has no executable statements, otherwise return
5533 FALSE. */
5534
5535 static bool
5536 gimple_empty_block_p (basic_block bb)
5537 {
5538 /* BB must have no executable statements. */
5539 gimple_stmt_iterator gsi = gsi_after_labels (bb);
5540 if (phi_nodes (bb))
5541 return false;
5542 if (gsi_end_p (gsi))
5543 return true;
5544 if (is_gimple_debug (gsi_stmt (gsi)))
5545 gsi_next_nondebug (&gsi);
5546 return gsi_end_p (gsi);
5547 }
5548
5549
5550 /* Split a basic block if it ends with a conditional branch and if the
5551 other part of the block is not empty. */
5552
5553 static basic_block
5554 gimple_split_block_before_cond_jump (basic_block bb)
5555 {
5556 gimple last, split_point;
5557 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
5558 if (gsi_end_p (gsi))
5559 return NULL;
5560 last = gsi_stmt (gsi);
5561 if (gimple_code (last) != GIMPLE_COND
5562 && gimple_code (last) != GIMPLE_SWITCH)
5563 return NULL;
5564 gsi_prev_nondebug (&gsi);
5565 split_point = gsi_stmt (gsi);
5566 return split_block (bb, split_point)->dest;
5567 }
5568
5569
5570 /* Return true if basic_block can be duplicated. */
5571
5572 static bool
5573 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
5574 {
5575 return true;
5576 }
5577
5578 /* Create a duplicate of the basic block BB. NOTE: This does not
5579 preserve SSA form. */
5580
5581 static basic_block
5582 gimple_duplicate_bb (basic_block bb)
5583 {
5584 basic_block new_bb;
5585 gimple_stmt_iterator gsi, gsi_tgt;
5586 gimple_seq phis = phi_nodes (bb);
5587 gimple phi, stmt, copy;
5588
5589 new_bb = create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
5590
5591 /* Copy the PHI nodes. We ignore PHI node arguments here because
5592 the incoming edges have not been setup yet. */
5593 for (gsi = gsi_start (phis); !gsi_end_p (gsi); gsi_next (&gsi))
5594 {
5595 phi = gsi_stmt (gsi);
5596 copy = create_phi_node (NULL_TREE, new_bb);
5597 create_new_def_for (gimple_phi_result (phi), copy,
5598 gimple_phi_result_ptr (copy));
5599 gimple_set_uid (copy, gimple_uid (phi));
5600 }
5601
5602 gsi_tgt = gsi_start_bb (new_bb);
5603 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5604 {
5605 def_operand_p def_p;
5606 ssa_op_iter op_iter;
5607 tree lhs;
5608
5609 stmt = gsi_stmt (gsi);
5610 if (gimple_code (stmt) == GIMPLE_LABEL)
5611 continue;
5612
5613 /* Don't duplicate label debug stmts. */
5614 if (gimple_debug_bind_p (stmt)
5615 && TREE_CODE (gimple_debug_bind_get_var (stmt))
5616 == LABEL_DECL)
5617 continue;
5618
5619 /* Create a new copy of STMT and duplicate STMT's virtual
5620 operands. */
5621 copy = gimple_copy (stmt);
5622 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
5623
5624 maybe_duplicate_eh_stmt (copy, stmt);
5625 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
5626
5627 /* When copying around a stmt writing into a local non-user
5628 aggregate, make sure it won't share stack slot with other
5629 vars. */
5630 lhs = gimple_get_lhs (stmt);
5631 if (lhs && TREE_CODE (lhs) != SSA_NAME)
5632 {
5633 tree base = get_base_address (lhs);
5634 if (base
5635 && (TREE_CODE (base) == VAR_DECL
5636 || TREE_CODE (base) == RESULT_DECL)
5637 && DECL_IGNORED_P (base)
5638 && !TREE_STATIC (base)
5639 && !DECL_EXTERNAL (base)
5640 && (TREE_CODE (base) != VAR_DECL
5641 || !DECL_HAS_VALUE_EXPR_P (base)))
5642 DECL_NONSHAREABLE (base) = 1;
5643 }
5644
5645 /* Create new names for all the definitions created by COPY and
5646 add replacement mappings for each new name. */
5647 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
5648 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
5649 }
5650
5651 return new_bb;
5652 }
5653
5654 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
5655
5656 static void
5657 add_phi_args_after_copy_edge (edge e_copy)
5658 {
5659 basic_block bb, bb_copy = e_copy->src, dest;
5660 edge e;
5661 edge_iterator ei;
5662 gimple phi, phi_copy;
5663 tree def;
5664 gimple_stmt_iterator psi, psi_copy;
5665
5666 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
5667 return;
5668
5669 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
5670
5671 if (e_copy->dest->flags & BB_DUPLICATED)
5672 dest = get_bb_original (e_copy->dest);
5673 else
5674 dest = e_copy->dest;
5675
5676 e = find_edge (bb, dest);
5677 if (!e)
5678 {
5679 /* During loop unrolling the target of the latch edge is copied.
5680 In this case we are not looking for edge to dest, but to
5681 duplicated block whose original was dest. */
5682 FOR_EACH_EDGE (e, ei, bb->succs)
5683 {
5684 if ((e->dest->flags & BB_DUPLICATED)
5685 && get_bb_original (e->dest) == dest)
5686 break;
5687 }
5688
5689 gcc_assert (e != NULL);
5690 }
5691
5692 for (psi = gsi_start_phis (e->dest),
5693 psi_copy = gsi_start_phis (e_copy->dest);
5694 !gsi_end_p (psi);
5695 gsi_next (&psi), gsi_next (&psi_copy))
5696 {
5697 phi = gsi_stmt (psi);
5698 phi_copy = gsi_stmt (psi_copy);
5699 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
5700 add_phi_arg (phi_copy, def, e_copy,
5701 gimple_phi_arg_location_from_edge (phi, e));
5702 }
5703 }
5704
5705
5706 /* Basic block BB_COPY was created by code duplication. Add phi node
5707 arguments for edges going out of BB_COPY. The blocks that were
5708 duplicated have BB_DUPLICATED set. */
5709
5710 void
5711 add_phi_args_after_copy_bb (basic_block bb_copy)
5712 {
5713 edge e_copy;
5714 edge_iterator ei;
5715
5716 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
5717 {
5718 add_phi_args_after_copy_edge (e_copy);
5719 }
5720 }
5721
5722 /* Blocks in REGION_COPY array of length N_REGION were created by
5723 duplication of basic blocks. Add phi node arguments for edges
5724 going from these blocks. If E_COPY is not NULL, also add
5725 phi node arguments for its destination.*/
5726
5727 void
5728 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
5729 edge e_copy)
5730 {
5731 unsigned i;
5732
5733 for (i = 0; i < n_region; i++)
5734 region_copy[i]->flags |= BB_DUPLICATED;
5735
5736 for (i = 0; i < n_region; i++)
5737 add_phi_args_after_copy_bb (region_copy[i]);
5738 if (e_copy)
5739 add_phi_args_after_copy_edge (e_copy);
5740
5741 for (i = 0; i < n_region; i++)
5742 region_copy[i]->flags &= ~BB_DUPLICATED;
5743 }
5744
5745 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
5746 important exit edge EXIT. By important we mean that no SSA name defined
5747 inside region is live over the other exit edges of the region. All entry
5748 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
5749 to the duplicate of the region. Dominance and loop information is
5750 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
5751 UPDATE_DOMINANCE is false then we assume that the caller will update the
5752 dominance information after calling this function. The new basic
5753 blocks are stored to REGION_COPY in the same order as they had in REGION,
5754 provided that REGION_COPY is not NULL.
5755 The function returns false if it is unable to copy the region,
5756 true otherwise. */
5757
5758 bool
5759 gimple_duplicate_sese_region (edge entry, edge exit,
5760 basic_block *region, unsigned n_region,
5761 basic_block *region_copy,
5762 bool update_dominance)
5763 {
5764 unsigned i;
5765 bool free_region_copy = false, copying_header = false;
5766 struct loop *loop = entry->dest->loop_father;
5767 edge exit_copy;
5768 vec<basic_block> doms;
5769 edge redirected;
5770 int total_freq = 0, entry_freq = 0;
5771 gcov_type total_count = 0, entry_count = 0;
5772
5773 if (!can_copy_bbs_p (region, n_region))
5774 return false;
5775
5776 /* Some sanity checking. Note that we do not check for all possible
5777 missuses of the functions. I.e. if you ask to copy something weird,
5778 it will work, but the state of structures probably will not be
5779 correct. */
5780 for (i = 0; i < n_region; i++)
5781 {
5782 /* We do not handle subloops, i.e. all the blocks must belong to the
5783 same loop. */
5784 if (region[i]->loop_father != loop)
5785 return false;
5786
5787 if (region[i] != entry->dest
5788 && region[i] == loop->header)
5789 return false;
5790 }
5791
5792 set_loop_copy (loop, loop);
5793
5794 /* In case the function is used for loop header copying (which is the primary
5795 use), ensure that EXIT and its copy will be new latch and entry edges. */
5796 if (loop->header == entry->dest)
5797 {
5798 copying_header = true;
5799 set_loop_copy (loop, loop_outer (loop));
5800
5801 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
5802 return false;
5803
5804 for (i = 0; i < n_region; i++)
5805 if (region[i] != exit->src
5806 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
5807 return false;
5808 }
5809
5810 if (!region_copy)
5811 {
5812 region_copy = XNEWVEC (basic_block, n_region);
5813 free_region_copy = true;
5814 }
5815
5816 initialize_original_copy_tables ();
5817
5818 /* Record blocks outside the region that are dominated by something
5819 inside. */
5820 if (update_dominance)
5821 {
5822 doms.create (0);
5823 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
5824 }
5825
5826 if (entry->dest->count)
5827 {
5828 total_count = entry->dest->count;
5829 entry_count = entry->count;
5830 /* Fix up corner cases, to avoid division by zero or creation of negative
5831 frequencies. */
5832 if (entry_count > total_count)
5833 entry_count = total_count;
5834 }
5835 else
5836 {
5837 total_freq = entry->dest->frequency;
5838 entry_freq = EDGE_FREQUENCY (entry);
5839 /* Fix up corner cases, to avoid division by zero or creation of negative
5840 frequencies. */
5841 if (total_freq == 0)
5842 total_freq = 1;
5843 else if (entry_freq > total_freq)
5844 entry_freq = total_freq;
5845 }
5846
5847 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
5848 split_edge_bb_loc (entry), update_dominance);
5849 if (total_count)
5850 {
5851 scale_bbs_frequencies_gcov_type (region, n_region,
5852 total_count - entry_count,
5853 total_count);
5854 scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
5855 total_count);
5856 }
5857 else
5858 {
5859 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
5860 total_freq);
5861 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
5862 }
5863
5864 if (copying_header)
5865 {
5866 loop->header = exit->dest;
5867 loop->latch = exit->src;
5868 }
5869
5870 /* Redirect the entry and add the phi node arguments. */
5871 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
5872 gcc_assert (redirected != NULL);
5873 flush_pending_stmts (entry);
5874
5875 /* Concerning updating of dominators: We must recount dominators
5876 for entry block and its copy. Anything that is outside of the
5877 region, but was dominated by something inside needs recounting as
5878 well. */
5879 if (update_dominance)
5880 {
5881 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
5882 doms.safe_push (get_bb_original (entry->dest));
5883 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
5884 doms.release ();
5885 }
5886
5887 /* Add the other PHI node arguments. */
5888 add_phi_args_after_copy (region_copy, n_region, NULL);
5889
5890 if (free_region_copy)
5891 free (region_copy);
5892
5893 free_original_copy_tables ();
5894 return true;
5895 }
5896
5897 /* Checks if BB is part of the region defined by N_REGION BBS. */
5898 static bool
5899 bb_part_of_region_p (basic_block bb, basic_block* bbs, unsigned n_region)
5900 {
5901 unsigned int n;
5902
5903 for (n = 0; n < n_region; n++)
5904 {
5905 if (bb == bbs[n])
5906 return true;
5907 }
5908 return false;
5909 }
5910
5911 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
5912 are stored to REGION_COPY in the same order in that they appear
5913 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
5914 the region, EXIT an exit from it. The condition guarding EXIT
5915 is moved to ENTRY. Returns true if duplication succeeds, false
5916 otherwise.
5917
5918 For example,
5919
5920 some_code;
5921 if (cond)
5922 A;
5923 else
5924 B;
5925
5926 is transformed to
5927
5928 if (cond)
5929 {
5930 some_code;
5931 A;
5932 }
5933 else
5934 {
5935 some_code;
5936 B;
5937 }
5938 */
5939
5940 bool
5941 gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED, edge exit ATTRIBUTE_UNUSED,
5942 basic_block *region ATTRIBUTE_UNUSED, unsigned n_region ATTRIBUTE_UNUSED,
5943 basic_block *region_copy ATTRIBUTE_UNUSED)
5944 {
5945 unsigned i;
5946 bool free_region_copy = false;
5947 struct loop *loop = exit->dest->loop_father;
5948 struct loop *orig_loop = entry->dest->loop_father;
5949 basic_block switch_bb, entry_bb, nentry_bb;
5950 vec<basic_block> doms;
5951 int total_freq = 0, exit_freq = 0;
5952 gcov_type total_count = 0, exit_count = 0;
5953 edge exits[2], nexits[2], e;
5954 gimple_stmt_iterator gsi;
5955 gimple cond_stmt;
5956 edge sorig, snew;
5957 basic_block exit_bb;
5958 gimple_stmt_iterator psi;
5959 gimple phi;
5960 tree def;
5961 struct loop *target, *aloop, *cloop;
5962
5963 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
5964 exits[0] = exit;
5965 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
5966
5967 if (!can_copy_bbs_p (region, n_region))
5968 return false;
5969
5970 initialize_original_copy_tables ();
5971 set_loop_copy (orig_loop, loop);
5972
5973 target= loop;
5974 for (aloop = orig_loop->inner; aloop; aloop = aloop->next)
5975 {
5976 if (bb_part_of_region_p (aloop->header, region, n_region))
5977 {
5978 cloop = duplicate_loop (aloop, target);
5979 duplicate_subloops (aloop, cloop);
5980 }
5981 }
5982
5983 if (!region_copy)
5984 {
5985 region_copy = XNEWVEC (basic_block, n_region);
5986 free_region_copy = true;
5987 }
5988
5989 gcc_assert (!need_ssa_update_p (cfun));
5990
5991 /* Record blocks outside the region that are dominated by something
5992 inside. */
5993 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
5994
5995 if (exit->src->count)
5996 {
5997 total_count = exit->src->count;
5998 exit_count = exit->count;
5999 /* Fix up corner cases, to avoid division by zero or creation of negative
6000 frequencies. */
6001 if (exit_count > total_count)
6002 exit_count = total_count;
6003 }
6004 else
6005 {
6006 total_freq = exit->src->frequency;
6007 exit_freq = EDGE_FREQUENCY (exit);
6008 /* Fix up corner cases, to avoid division by zero or creation of negative
6009 frequencies. */
6010 if (total_freq == 0)
6011 total_freq = 1;
6012 if (exit_freq > total_freq)
6013 exit_freq = total_freq;
6014 }
6015
6016 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
6017 split_edge_bb_loc (exit), true);
6018 if (total_count)
6019 {
6020 scale_bbs_frequencies_gcov_type (region, n_region,
6021 total_count - exit_count,
6022 total_count);
6023 scale_bbs_frequencies_gcov_type (region_copy, n_region, exit_count,
6024 total_count);
6025 }
6026 else
6027 {
6028 scale_bbs_frequencies_int (region, n_region, total_freq - exit_freq,
6029 total_freq);
6030 scale_bbs_frequencies_int (region_copy, n_region, exit_freq, total_freq);
6031 }
6032
6033 /* Create the switch block, and put the exit condition to it. */
6034 entry_bb = entry->dest;
6035 nentry_bb = get_bb_copy (entry_bb);
6036 if (!last_stmt (entry->src)
6037 || !stmt_ends_bb_p (last_stmt (entry->src)))
6038 switch_bb = entry->src;
6039 else
6040 switch_bb = split_edge (entry);
6041 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
6042
6043 gsi = gsi_last_bb (switch_bb);
6044 cond_stmt = last_stmt (exit->src);
6045 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
6046 cond_stmt = gimple_copy (cond_stmt);
6047
6048 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
6049
6050 sorig = single_succ_edge (switch_bb);
6051 sorig->flags = exits[1]->flags;
6052 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
6053
6054 /* Register the new edge from SWITCH_BB in loop exit lists. */
6055 rescan_loop_exit (snew, true, false);
6056
6057 /* Add the PHI node arguments. */
6058 add_phi_args_after_copy (region_copy, n_region, snew);
6059
6060 /* Get rid of now superfluous conditions and associated edges (and phi node
6061 arguments). */
6062 exit_bb = exit->dest;
6063
6064 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
6065 PENDING_STMT (e) = NULL;
6066
6067 /* The latch of ORIG_LOOP was copied, and so was the backedge
6068 to the original header. We redirect this backedge to EXIT_BB. */
6069 for (i = 0; i < n_region; i++)
6070 if (get_bb_original (region_copy[i]) == orig_loop->latch)
6071 {
6072 gcc_assert (single_succ_edge (region_copy[i]));
6073 e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
6074 PENDING_STMT (e) = NULL;
6075 for (psi = gsi_start_phis (exit_bb);
6076 !gsi_end_p (psi);
6077 gsi_next (&psi))
6078 {
6079 phi = gsi_stmt (psi);
6080 def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
6081 add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
6082 }
6083 }
6084 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
6085 PENDING_STMT (e) = NULL;
6086
6087 /* Anything that is outside of the region, but was dominated by something
6088 inside needs to update dominance info. */
6089 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6090 doms.release ();
6091 /* Update the SSA web. */
6092 update_ssa (TODO_update_ssa);
6093
6094 if (free_region_copy)
6095 free (region_copy);
6096
6097 free_original_copy_tables ();
6098 return true;
6099 }
6100
6101 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
6102 adding blocks when the dominator traversal reaches EXIT. This
6103 function silently assumes that ENTRY strictly dominates EXIT. */
6104
6105 void
6106 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
6107 vec<basic_block> *bbs_p)
6108 {
6109 basic_block son;
6110
6111 for (son = first_dom_son (CDI_DOMINATORS, entry);
6112 son;
6113 son = next_dom_son (CDI_DOMINATORS, son))
6114 {
6115 bbs_p->safe_push (son);
6116 if (son != exit)
6117 gather_blocks_in_sese_region (son, exit, bbs_p);
6118 }
6119 }
6120
6121 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
6122 The duplicates are recorded in VARS_MAP. */
6123
6124 static void
6125 replace_by_duplicate_decl (tree *tp, struct pointer_map_t *vars_map,
6126 tree to_context)
6127 {
6128 tree t = *tp, new_t;
6129 struct function *f = DECL_STRUCT_FUNCTION (to_context);
6130 void **loc;
6131
6132 if (DECL_CONTEXT (t) == to_context)
6133 return;
6134
6135 loc = pointer_map_contains (vars_map, t);
6136
6137 if (!loc)
6138 {
6139 loc = pointer_map_insert (vars_map, t);
6140
6141 if (SSA_VAR_P (t))
6142 {
6143 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
6144 add_local_decl (f, new_t);
6145 }
6146 else
6147 {
6148 gcc_assert (TREE_CODE (t) == CONST_DECL);
6149 new_t = copy_node (t);
6150 }
6151 DECL_CONTEXT (new_t) = to_context;
6152
6153 *loc = new_t;
6154 }
6155 else
6156 new_t = (tree) *loc;
6157
6158 *tp = new_t;
6159 }
6160
6161
6162 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6163 VARS_MAP maps old ssa names and var_decls to the new ones. */
6164
6165 static tree
6166 replace_ssa_name (tree name, struct pointer_map_t *vars_map,
6167 tree to_context)
6168 {
6169 void **loc;
6170 tree new_name;
6171
6172 gcc_assert (!virtual_operand_p (name));
6173
6174 loc = pointer_map_contains (vars_map, name);
6175
6176 if (!loc)
6177 {
6178 tree decl = SSA_NAME_VAR (name);
6179 if (decl)
6180 {
6181 replace_by_duplicate_decl (&decl, vars_map, to_context);
6182 new_name = make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6183 decl, SSA_NAME_DEF_STMT (name));
6184 if (SSA_NAME_IS_DEFAULT_DEF (name))
6185 set_ssa_default_def (DECL_STRUCT_FUNCTION (to_context),
6186 decl, new_name);
6187 }
6188 else
6189 new_name = copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6190 name, SSA_NAME_DEF_STMT (name));
6191
6192 loc = pointer_map_insert (vars_map, name);
6193 *loc = new_name;
6194 }
6195 else
6196 new_name = (tree) *loc;
6197
6198 return new_name;
6199 }
6200
6201 struct move_stmt_d
6202 {
6203 tree orig_block;
6204 tree new_block;
6205 tree from_context;
6206 tree to_context;
6207 struct pointer_map_t *vars_map;
6208 htab_t new_label_map;
6209 struct pointer_map_t *eh_map;
6210 bool remap_decls_p;
6211 };
6212
6213 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
6214 contained in *TP if it has been ORIG_BLOCK previously and change the
6215 DECL_CONTEXT of every local variable referenced in *TP. */
6216
6217 static tree
6218 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
6219 {
6220 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
6221 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6222 tree t = *tp;
6223
6224 if (EXPR_P (t))
6225 {
6226 tree block = TREE_BLOCK (t);
6227 if (block == p->orig_block
6228 || (p->orig_block == NULL_TREE
6229 && block != NULL_TREE))
6230 TREE_SET_BLOCK (t, p->new_block);
6231 #ifdef ENABLE_CHECKING
6232 else if (block != NULL_TREE)
6233 {
6234 while (block && TREE_CODE (block) == BLOCK && block != p->orig_block)
6235 block = BLOCK_SUPERCONTEXT (block);
6236 gcc_assert (block == p->orig_block);
6237 }
6238 #endif
6239 }
6240 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
6241 {
6242 if (TREE_CODE (t) == SSA_NAME)
6243 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
6244 else if (TREE_CODE (t) == LABEL_DECL)
6245 {
6246 if (p->new_label_map)
6247 {
6248 struct tree_map in, *out;
6249 in.base.from = t;
6250 out = (struct tree_map *)
6251 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
6252 if (out)
6253 *tp = t = out->to;
6254 }
6255
6256 DECL_CONTEXT (t) = p->to_context;
6257 }
6258 else if (p->remap_decls_p)
6259 {
6260 /* Replace T with its duplicate. T should no longer appear in the
6261 parent function, so this looks wasteful; however, it may appear
6262 in referenced_vars, and more importantly, as virtual operands of
6263 statements, and in alias lists of other variables. It would be
6264 quite difficult to expunge it from all those places. ??? It might
6265 suffice to do this for addressable variables. */
6266 if ((TREE_CODE (t) == VAR_DECL
6267 && !is_global_var (t))
6268 || TREE_CODE (t) == CONST_DECL)
6269 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
6270 }
6271 *walk_subtrees = 0;
6272 }
6273 else if (TYPE_P (t))
6274 *walk_subtrees = 0;
6275
6276 return NULL_TREE;
6277 }
6278
6279 /* Helper for move_stmt_r. Given an EH region number for the source
6280 function, map that to the duplicate EH regio number in the dest. */
6281
6282 static int
6283 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
6284 {
6285 eh_region old_r, new_r;
6286 void **slot;
6287
6288 old_r = get_eh_region_from_number (old_nr);
6289 slot = pointer_map_contains (p->eh_map, old_r);
6290 new_r = (eh_region) *slot;
6291
6292 return new_r->index;
6293 }
6294
6295 /* Similar, but operate on INTEGER_CSTs. */
6296
6297 static tree
6298 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
6299 {
6300 int old_nr, new_nr;
6301
6302 old_nr = tree_to_shwi (old_t_nr);
6303 new_nr = move_stmt_eh_region_nr (old_nr, p);
6304
6305 return build_int_cst (integer_type_node, new_nr);
6306 }
6307
6308 /* Like move_stmt_op, but for gimple statements.
6309
6310 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
6311 contained in the current statement in *GSI_P and change the
6312 DECL_CONTEXT of every local variable referenced in the current
6313 statement. */
6314
6315 static tree
6316 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
6317 struct walk_stmt_info *wi)
6318 {
6319 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6320 gimple stmt = gsi_stmt (*gsi_p);
6321 tree block = gimple_block (stmt);
6322
6323 if (block == p->orig_block
6324 || (p->orig_block == NULL_TREE
6325 && block != NULL_TREE))
6326 gimple_set_block (stmt, p->new_block);
6327
6328 switch (gimple_code (stmt))
6329 {
6330 case GIMPLE_CALL:
6331 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
6332 {
6333 tree r, fndecl = gimple_call_fndecl (stmt);
6334 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
6335 switch (DECL_FUNCTION_CODE (fndecl))
6336 {
6337 case BUILT_IN_EH_COPY_VALUES:
6338 r = gimple_call_arg (stmt, 1);
6339 r = move_stmt_eh_region_tree_nr (r, p);
6340 gimple_call_set_arg (stmt, 1, r);
6341 /* FALLTHRU */
6342
6343 case BUILT_IN_EH_POINTER:
6344 case BUILT_IN_EH_FILTER:
6345 r = gimple_call_arg (stmt, 0);
6346 r = move_stmt_eh_region_tree_nr (r, p);
6347 gimple_call_set_arg (stmt, 0, r);
6348 break;
6349
6350 default:
6351 break;
6352 }
6353 }
6354 break;
6355
6356 case GIMPLE_RESX:
6357 {
6358 int r = gimple_resx_region (stmt);
6359 r = move_stmt_eh_region_nr (r, p);
6360 gimple_resx_set_region (stmt, r);
6361 }
6362 break;
6363
6364 case GIMPLE_EH_DISPATCH:
6365 {
6366 int r = gimple_eh_dispatch_region (stmt);
6367 r = move_stmt_eh_region_nr (r, p);
6368 gimple_eh_dispatch_set_region (stmt, r);
6369 }
6370 break;
6371
6372 case GIMPLE_OMP_RETURN:
6373 case GIMPLE_OMP_CONTINUE:
6374 break;
6375 default:
6376 if (is_gimple_omp (stmt))
6377 {
6378 /* Do not remap variables inside OMP directives. Variables
6379 referenced in clauses and directive header belong to the
6380 parent function and should not be moved into the child
6381 function. */
6382 bool save_remap_decls_p = p->remap_decls_p;
6383 p->remap_decls_p = false;
6384 *handled_ops_p = true;
6385
6386 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), move_stmt_r,
6387 move_stmt_op, wi);
6388
6389 p->remap_decls_p = save_remap_decls_p;
6390 }
6391 break;
6392 }
6393
6394 return NULL_TREE;
6395 }
6396
6397 /* Move basic block BB from function CFUN to function DEST_FN. The
6398 block is moved out of the original linked list and placed after
6399 block AFTER in the new list. Also, the block is removed from the
6400 original array of blocks and placed in DEST_FN's array of blocks.
6401 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
6402 updated to reflect the moved edges.
6403
6404 The local variables are remapped to new instances, VARS_MAP is used
6405 to record the mapping. */
6406
6407 static void
6408 move_block_to_fn (struct function *dest_cfun, basic_block bb,
6409 basic_block after, bool update_edge_count_p,
6410 struct move_stmt_d *d)
6411 {
6412 struct control_flow_graph *cfg;
6413 edge_iterator ei;
6414 edge e;
6415 gimple_stmt_iterator si;
6416 unsigned old_len, new_len;
6417
6418 /* Remove BB from dominance structures. */
6419 delete_from_dominance_info (CDI_DOMINATORS, bb);
6420
6421 /* Move BB from its current loop to the copy in the new function. */
6422 if (current_loops)
6423 {
6424 struct loop *new_loop = (struct loop *)bb->loop_father->aux;
6425 if (new_loop)
6426 bb->loop_father = new_loop;
6427 }
6428
6429 /* Link BB to the new linked list. */
6430 move_block_after (bb, after);
6431
6432 /* Update the edge count in the corresponding flowgraphs. */
6433 if (update_edge_count_p)
6434 FOR_EACH_EDGE (e, ei, bb->succs)
6435 {
6436 cfun->cfg->x_n_edges--;
6437 dest_cfun->cfg->x_n_edges++;
6438 }
6439
6440 /* Remove BB from the original basic block array. */
6441 (*cfun->cfg->x_basic_block_info)[bb->index] = NULL;
6442 cfun->cfg->x_n_basic_blocks--;
6443
6444 /* Grow DEST_CFUN's basic block array if needed. */
6445 cfg = dest_cfun->cfg;
6446 cfg->x_n_basic_blocks++;
6447 if (bb->index >= cfg->x_last_basic_block)
6448 cfg->x_last_basic_block = bb->index + 1;
6449
6450 old_len = vec_safe_length (cfg->x_basic_block_info);
6451 if ((unsigned) cfg->x_last_basic_block >= old_len)
6452 {
6453 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
6454 vec_safe_grow_cleared (cfg->x_basic_block_info, new_len);
6455 }
6456
6457 (*cfg->x_basic_block_info)[bb->index] = bb;
6458
6459 /* Remap the variables in phi nodes. */
6460 for (si = gsi_start_phis (bb); !gsi_end_p (si); )
6461 {
6462 gimple phi = gsi_stmt (si);
6463 use_operand_p use;
6464 tree op = PHI_RESULT (phi);
6465 ssa_op_iter oi;
6466 unsigned i;
6467
6468 if (virtual_operand_p (op))
6469 {
6470 /* Remove the phi nodes for virtual operands (alias analysis will be
6471 run for the new function, anyway). */
6472 remove_phi_node (&si, true);
6473 continue;
6474 }
6475
6476 SET_PHI_RESULT (phi,
6477 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6478 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
6479 {
6480 op = USE_FROM_PTR (use);
6481 if (TREE_CODE (op) == SSA_NAME)
6482 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6483 }
6484
6485 for (i = 0; i < EDGE_COUNT (bb->preds); i++)
6486 {
6487 location_t locus = gimple_phi_arg_location (phi, i);
6488 tree block = LOCATION_BLOCK (locus);
6489
6490 if (locus == UNKNOWN_LOCATION)
6491 continue;
6492 if (d->orig_block == NULL_TREE || block == d->orig_block)
6493 {
6494 if (d->new_block == NULL_TREE)
6495 locus = LOCATION_LOCUS (locus);
6496 else
6497 locus = COMBINE_LOCATION_DATA (line_table, locus, d->new_block);
6498 gimple_phi_arg_set_location (phi, i, locus);
6499 }
6500 }
6501
6502 gsi_next (&si);
6503 }
6504
6505 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6506 {
6507 gimple stmt = gsi_stmt (si);
6508 struct walk_stmt_info wi;
6509
6510 memset (&wi, 0, sizeof (wi));
6511 wi.info = d;
6512 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
6513
6514 if (gimple_code (stmt) == GIMPLE_LABEL)
6515 {
6516 tree label = gimple_label_label (stmt);
6517 int uid = LABEL_DECL_UID (label);
6518
6519 gcc_assert (uid > -1);
6520
6521 old_len = vec_safe_length (cfg->x_label_to_block_map);
6522 if (old_len <= (unsigned) uid)
6523 {
6524 new_len = 3 * uid / 2 + 1;
6525 vec_safe_grow_cleared (cfg->x_label_to_block_map, new_len);
6526 }
6527
6528 (*cfg->x_label_to_block_map)[uid] = bb;
6529 (*cfun->cfg->x_label_to_block_map)[uid] = NULL;
6530
6531 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
6532
6533 if (uid >= dest_cfun->cfg->last_label_uid)
6534 dest_cfun->cfg->last_label_uid = uid + 1;
6535 }
6536
6537 maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
6538 remove_stmt_from_eh_lp_fn (cfun, stmt);
6539
6540 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
6541 gimple_remove_stmt_histograms (cfun, stmt);
6542
6543 /* We cannot leave any operands allocated from the operand caches of
6544 the current function. */
6545 free_stmt_operands (cfun, stmt);
6546 push_cfun (dest_cfun);
6547 update_stmt (stmt);
6548 pop_cfun ();
6549 }
6550
6551 FOR_EACH_EDGE (e, ei, bb->succs)
6552 if (e->goto_locus != UNKNOWN_LOCATION)
6553 {
6554 tree block = LOCATION_BLOCK (e->goto_locus);
6555 if (d->orig_block == NULL_TREE
6556 || block == d->orig_block)
6557 e->goto_locus = d->new_block ?
6558 COMBINE_LOCATION_DATA (line_table, e->goto_locus, d->new_block) :
6559 LOCATION_LOCUS (e->goto_locus);
6560 }
6561 }
6562
6563 /* Examine the statements in BB (which is in SRC_CFUN); find and return
6564 the outermost EH region. Use REGION as the incoming base EH region. */
6565
6566 static eh_region
6567 find_outermost_region_in_block (struct function *src_cfun,
6568 basic_block bb, eh_region region)
6569 {
6570 gimple_stmt_iterator si;
6571
6572 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6573 {
6574 gimple stmt = gsi_stmt (si);
6575 eh_region stmt_region;
6576 int lp_nr;
6577
6578 lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
6579 stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
6580 if (stmt_region)
6581 {
6582 if (region == NULL)
6583 region = stmt_region;
6584 else if (stmt_region != region)
6585 {
6586 region = eh_region_outermost (src_cfun, stmt_region, region);
6587 gcc_assert (region != NULL);
6588 }
6589 }
6590 }
6591
6592 return region;
6593 }
6594
6595 static tree
6596 new_label_mapper (tree decl, void *data)
6597 {
6598 htab_t hash = (htab_t) data;
6599 struct tree_map *m;
6600 void **slot;
6601
6602 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
6603
6604 m = XNEW (struct tree_map);
6605 m->hash = DECL_UID (decl);
6606 m->base.from = decl;
6607 m->to = create_artificial_label (UNKNOWN_LOCATION);
6608 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
6609 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
6610 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
6611
6612 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
6613 gcc_assert (*slot == NULL);
6614
6615 *slot = m;
6616
6617 return m->to;
6618 }
6619
6620 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
6621 subblocks. */
6622
6623 static void
6624 replace_block_vars_by_duplicates (tree block, struct pointer_map_t *vars_map,
6625 tree to_context)
6626 {
6627 tree *tp, t;
6628
6629 for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
6630 {
6631 t = *tp;
6632 if (TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != CONST_DECL)
6633 continue;
6634 replace_by_duplicate_decl (&t, vars_map, to_context);
6635 if (t != *tp)
6636 {
6637 if (TREE_CODE (*tp) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (*tp))
6638 {
6639 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (*tp));
6640 DECL_HAS_VALUE_EXPR_P (t) = 1;
6641 }
6642 DECL_CHAIN (t) = DECL_CHAIN (*tp);
6643 *tp = t;
6644 }
6645 }
6646
6647 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
6648 replace_block_vars_by_duplicates (block, vars_map, to_context);
6649 }
6650
6651 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
6652 from FN1 to FN2. */
6653
6654 static void
6655 fixup_loop_arrays_after_move (struct function *fn1, struct function *fn2,
6656 struct loop *loop)
6657 {
6658 /* Discard it from the old loop array. */
6659 (*get_loops (fn1))[loop->num] = NULL;
6660
6661 /* Place it in the new loop array, assigning it a new number. */
6662 loop->num = number_of_loops (fn2);
6663 vec_safe_push (loops_for_fn (fn2)->larray, loop);
6664
6665 /* Recurse to children. */
6666 for (loop = loop->inner; loop; loop = loop->next)
6667 fixup_loop_arrays_after_move (fn1, fn2, loop);
6668 }
6669
6670 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
6671 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
6672 single basic block in the original CFG and the new basic block is
6673 returned. DEST_CFUN must not have a CFG yet.
6674
6675 Note that the region need not be a pure SESE region. Blocks inside
6676 the region may contain calls to abort/exit. The only restriction
6677 is that ENTRY_BB should be the only entry point and it must
6678 dominate EXIT_BB.
6679
6680 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
6681 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
6682 to the new function.
6683
6684 All local variables referenced in the region are assumed to be in
6685 the corresponding BLOCK_VARS and unexpanded variable lists
6686 associated with DEST_CFUN. */
6687
6688 basic_block
6689 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
6690 basic_block exit_bb, tree orig_block)
6691 {
6692 vec<basic_block> bbs, dom_bbs;
6693 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
6694 basic_block after, bb, *entry_pred, *exit_succ, abb;
6695 struct function *saved_cfun = cfun;
6696 int *entry_flag, *exit_flag;
6697 unsigned *entry_prob, *exit_prob;
6698 unsigned i, num_entry_edges, num_exit_edges, num_nodes;
6699 edge e;
6700 edge_iterator ei;
6701 htab_t new_label_map;
6702 struct pointer_map_t *vars_map, *eh_map;
6703 struct loop *loop = entry_bb->loop_father;
6704 struct loop *loop0 = get_loop (saved_cfun, 0);
6705 struct move_stmt_d d;
6706
6707 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
6708 region. */
6709 gcc_assert (entry_bb != exit_bb
6710 && (!exit_bb
6711 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
6712
6713 /* Collect all the blocks in the region. Manually add ENTRY_BB
6714 because it won't be added by dfs_enumerate_from. */
6715 bbs.create (0);
6716 bbs.safe_push (entry_bb);
6717 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
6718
6719 /* The blocks that used to be dominated by something in BBS will now be
6720 dominated by the new block. */
6721 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
6722 bbs.address (),
6723 bbs.length ());
6724
6725 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
6726 the predecessor edges to ENTRY_BB and the successor edges to
6727 EXIT_BB so that we can re-attach them to the new basic block that
6728 will replace the region. */
6729 num_entry_edges = EDGE_COUNT (entry_bb->preds);
6730 entry_pred = XNEWVEC (basic_block, num_entry_edges);
6731 entry_flag = XNEWVEC (int, num_entry_edges);
6732 entry_prob = XNEWVEC (unsigned, num_entry_edges);
6733 i = 0;
6734 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
6735 {
6736 entry_prob[i] = e->probability;
6737 entry_flag[i] = e->flags;
6738 entry_pred[i++] = e->src;
6739 remove_edge (e);
6740 }
6741
6742 if (exit_bb)
6743 {
6744 num_exit_edges = EDGE_COUNT (exit_bb->succs);
6745 exit_succ = XNEWVEC (basic_block, num_exit_edges);
6746 exit_flag = XNEWVEC (int, num_exit_edges);
6747 exit_prob = XNEWVEC (unsigned, num_exit_edges);
6748 i = 0;
6749 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
6750 {
6751 exit_prob[i] = e->probability;
6752 exit_flag[i] = e->flags;
6753 exit_succ[i++] = e->dest;
6754 remove_edge (e);
6755 }
6756 }
6757 else
6758 {
6759 num_exit_edges = 0;
6760 exit_succ = NULL;
6761 exit_flag = NULL;
6762 exit_prob = NULL;
6763 }
6764
6765 /* Switch context to the child function to initialize DEST_FN's CFG. */
6766 gcc_assert (dest_cfun->cfg == NULL);
6767 push_cfun (dest_cfun);
6768
6769 init_empty_tree_cfg ();
6770
6771 /* Initialize EH information for the new function. */
6772 eh_map = NULL;
6773 new_label_map = NULL;
6774 if (saved_cfun->eh)
6775 {
6776 eh_region region = NULL;
6777
6778 FOR_EACH_VEC_ELT (bbs, i, bb)
6779 region = find_outermost_region_in_block (saved_cfun, bb, region);
6780
6781 init_eh_for_function ();
6782 if (region != NULL)
6783 {
6784 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
6785 eh_map = duplicate_eh_regions (saved_cfun, region, 0,
6786 new_label_mapper, new_label_map);
6787 }
6788 }
6789
6790 /* Initialize an empty loop tree. */
6791 struct loops *loops = ggc_alloc_cleared_loops ();
6792 init_loops_structure (dest_cfun, loops, 1);
6793 loops->state = LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
6794 set_loops_for_fn (dest_cfun, loops);
6795
6796 /* Move the outlined loop tree part. */
6797 num_nodes = bbs.length ();
6798 FOR_EACH_VEC_ELT (bbs, i, bb)
6799 {
6800 if (bb->loop_father->header == bb)
6801 {
6802 struct loop *this_loop = bb->loop_father;
6803 struct loop *outer = loop_outer (this_loop);
6804 if (outer == loop
6805 /* If the SESE region contains some bbs ending with
6806 a noreturn call, those are considered to belong
6807 to the outermost loop in saved_cfun, rather than
6808 the entry_bb's loop_father. */
6809 || outer == loop0)
6810 {
6811 if (outer != loop)
6812 num_nodes -= this_loop->num_nodes;
6813 flow_loop_tree_node_remove (bb->loop_father);
6814 flow_loop_tree_node_add (get_loop (dest_cfun, 0), this_loop);
6815 fixup_loop_arrays_after_move (saved_cfun, cfun, this_loop);
6816 }
6817 }
6818 else if (bb->loop_father == loop0 && loop0 != loop)
6819 num_nodes--;
6820
6821 /* Remove loop exits from the outlined region. */
6822 if (loops_for_fn (saved_cfun)->exits)
6823 FOR_EACH_EDGE (e, ei, bb->succs)
6824 {
6825 void **slot = htab_find_slot_with_hash
6826 (loops_for_fn (saved_cfun)->exits, e,
6827 htab_hash_pointer (e), NO_INSERT);
6828 if (slot)
6829 htab_clear_slot (loops_for_fn (saved_cfun)->exits, slot);
6830 }
6831 }
6832
6833
6834 /* Adjust the number of blocks in the tree root of the outlined part. */
6835 get_loop (dest_cfun, 0)->num_nodes = bbs.length () + 2;
6836
6837 /* Setup a mapping to be used by move_block_to_fn. */
6838 loop->aux = current_loops->tree_root;
6839 loop0->aux = current_loops->tree_root;
6840
6841 pop_cfun ();
6842
6843 /* Move blocks from BBS into DEST_CFUN. */
6844 gcc_assert (bbs.length () >= 2);
6845 after = dest_cfun->cfg->x_entry_block_ptr;
6846 vars_map = pointer_map_create ();
6847
6848 memset (&d, 0, sizeof (d));
6849 d.orig_block = orig_block;
6850 d.new_block = DECL_INITIAL (dest_cfun->decl);
6851 d.from_context = cfun->decl;
6852 d.to_context = dest_cfun->decl;
6853 d.vars_map = vars_map;
6854 d.new_label_map = new_label_map;
6855 d.eh_map = eh_map;
6856 d.remap_decls_p = true;
6857
6858 FOR_EACH_VEC_ELT (bbs, i, bb)
6859 {
6860 /* No need to update edge counts on the last block. It has
6861 already been updated earlier when we detached the region from
6862 the original CFG. */
6863 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
6864 after = bb;
6865 }
6866
6867 loop->aux = NULL;
6868 loop0->aux = NULL;
6869 /* Loop sizes are no longer correct, fix them up. */
6870 loop->num_nodes -= num_nodes;
6871 for (struct loop *outer = loop_outer (loop);
6872 outer; outer = loop_outer (outer))
6873 outer->num_nodes -= num_nodes;
6874 loop0->num_nodes -= bbs.length () - num_nodes;
6875
6876 if (saved_cfun->has_simduid_loops || saved_cfun->has_force_vect_loops)
6877 {
6878 struct loop *aloop;
6879 for (i = 0; vec_safe_iterate (loops->larray, i, &aloop); i++)
6880 if (aloop != NULL)
6881 {
6882 if (aloop->simduid)
6883 {
6884 replace_by_duplicate_decl (&aloop->simduid, d.vars_map,
6885 d.to_context);
6886 dest_cfun->has_simduid_loops = true;
6887 }
6888 if (aloop->force_vect)
6889 dest_cfun->has_force_vect_loops = true;
6890 }
6891 }
6892
6893 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
6894 if (orig_block)
6895 {
6896 tree block;
6897 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
6898 == NULL_TREE);
6899 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
6900 = BLOCK_SUBBLOCKS (orig_block);
6901 for (block = BLOCK_SUBBLOCKS (orig_block);
6902 block; block = BLOCK_CHAIN (block))
6903 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
6904 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
6905 }
6906
6907 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
6908 vars_map, dest_cfun->decl);
6909
6910 if (new_label_map)
6911 htab_delete (new_label_map);
6912 if (eh_map)
6913 pointer_map_destroy (eh_map);
6914 pointer_map_destroy (vars_map);
6915
6916 /* Rewire the entry and exit blocks. The successor to the entry
6917 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
6918 the child function. Similarly, the predecessor of DEST_FN's
6919 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
6920 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
6921 various CFG manipulation function get to the right CFG.
6922
6923 FIXME, this is silly. The CFG ought to become a parameter to
6924 these helpers. */
6925 push_cfun (dest_cfun);
6926 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), entry_bb, EDGE_FALLTHRU);
6927 if (exit_bb)
6928 make_edge (exit_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
6929 pop_cfun ();
6930
6931 /* Back in the original function, the SESE region has disappeared,
6932 create a new basic block in its place. */
6933 bb = create_empty_bb (entry_pred[0]);
6934 if (current_loops)
6935 add_bb_to_loop (bb, loop);
6936 for (i = 0; i < num_entry_edges; i++)
6937 {
6938 e = make_edge (entry_pred[i], bb, entry_flag[i]);
6939 e->probability = entry_prob[i];
6940 }
6941
6942 for (i = 0; i < num_exit_edges; i++)
6943 {
6944 e = make_edge (bb, exit_succ[i], exit_flag[i]);
6945 e->probability = exit_prob[i];
6946 }
6947
6948 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
6949 FOR_EACH_VEC_ELT (dom_bbs, i, abb)
6950 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
6951 dom_bbs.release ();
6952
6953 if (exit_bb)
6954 {
6955 free (exit_prob);
6956 free (exit_flag);
6957 free (exit_succ);
6958 }
6959 free (entry_prob);
6960 free (entry_flag);
6961 free (entry_pred);
6962 bbs.release ();
6963
6964 return bb;
6965 }
6966
6967
6968 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
6969 */
6970
6971 void
6972 dump_function_to_file (tree fndecl, FILE *file, int flags)
6973 {
6974 tree arg, var, old_current_fndecl = current_function_decl;
6975 struct function *dsf;
6976 bool ignore_topmost_bind = false, any_var = false;
6977 basic_block bb;
6978 tree chain;
6979 bool tmclone = (TREE_CODE (fndecl) == FUNCTION_DECL
6980 && decl_is_tm_clone (fndecl));
6981 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
6982
6983 current_function_decl = fndecl;
6984 fprintf (file, "%s %s(", function_name (fun), tmclone ? "[tm-clone] " : "");
6985
6986 arg = DECL_ARGUMENTS (fndecl);
6987 while (arg)
6988 {
6989 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
6990 fprintf (file, " ");
6991 print_generic_expr (file, arg, dump_flags);
6992 if (flags & TDF_VERBOSE)
6993 print_node (file, "", arg, 4);
6994 if (DECL_CHAIN (arg))
6995 fprintf (file, ", ");
6996 arg = DECL_CHAIN (arg);
6997 }
6998 fprintf (file, ")\n");
6999
7000 if (flags & TDF_VERBOSE)
7001 print_node (file, "", fndecl, 2);
7002
7003 dsf = DECL_STRUCT_FUNCTION (fndecl);
7004 if (dsf && (flags & TDF_EH))
7005 dump_eh_tree (file, dsf);
7006
7007 if (flags & TDF_RAW && !gimple_has_body_p (fndecl))
7008 {
7009 dump_node (fndecl, TDF_SLIM | flags, file);
7010 current_function_decl = old_current_fndecl;
7011 return;
7012 }
7013
7014 /* When GIMPLE is lowered, the variables are no longer available in
7015 BIND_EXPRs, so display them separately. */
7016 if (fun && fun->decl == fndecl && (fun->curr_properties & PROP_gimple_lcf))
7017 {
7018 unsigned ix;
7019 ignore_topmost_bind = true;
7020
7021 fprintf (file, "{\n");
7022 if (!vec_safe_is_empty (fun->local_decls))
7023 FOR_EACH_LOCAL_DECL (fun, ix, var)
7024 {
7025 print_generic_decl (file, var, flags);
7026 if (flags & TDF_VERBOSE)
7027 print_node (file, "", var, 4);
7028 fprintf (file, "\n");
7029
7030 any_var = true;
7031 }
7032 if (gimple_in_ssa_p (cfun))
7033 for (ix = 1; ix < num_ssa_names; ++ix)
7034 {
7035 tree name = ssa_name (ix);
7036 if (name && !SSA_NAME_VAR (name))
7037 {
7038 fprintf (file, " ");
7039 print_generic_expr (file, TREE_TYPE (name), flags);
7040 fprintf (file, " ");
7041 print_generic_expr (file, name, flags);
7042 fprintf (file, ";\n");
7043
7044 any_var = true;
7045 }
7046 }
7047 }
7048
7049 if (fun && fun->decl == fndecl
7050 && fun->cfg
7051 && basic_block_info_for_function (fun))
7052 {
7053 /* If the CFG has been built, emit a CFG-based dump. */
7054 if (!ignore_topmost_bind)
7055 fprintf (file, "{\n");
7056
7057 if (any_var && n_basic_blocks_for_fn (fun))
7058 fprintf (file, "\n");
7059
7060 FOR_EACH_BB_FN (bb, fun)
7061 dump_bb (file, bb, 2, flags | TDF_COMMENT);
7062
7063 fprintf (file, "}\n");
7064 }
7065 else if (DECL_SAVED_TREE (fndecl) == NULL)
7066 {
7067 /* The function is now in GIMPLE form but the CFG has not been
7068 built yet. Emit the single sequence of GIMPLE statements
7069 that make up its body. */
7070 gimple_seq body = gimple_body (fndecl);
7071
7072 if (gimple_seq_first_stmt (body)
7073 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
7074 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
7075 print_gimple_seq (file, body, 0, flags);
7076 else
7077 {
7078 if (!ignore_topmost_bind)
7079 fprintf (file, "{\n");
7080
7081 if (any_var)
7082 fprintf (file, "\n");
7083
7084 print_gimple_seq (file, body, 2, flags);
7085 fprintf (file, "}\n");
7086 }
7087 }
7088 else
7089 {
7090 int indent;
7091
7092 /* Make a tree based dump. */
7093 chain = DECL_SAVED_TREE (fndecl);
7094 if (chain && TREE_CODE (chain) == BIND_EXPR)
7095 {
7096 if (ignore_topmost_bind)
7097 {
7098 chain = BIND_EXPR_BODY (chain);
7099 indent = 2;
7100 }
7101 else
7102 indent = 0;
7103 }
7104 else
7105 {
7106 if (!ignore_topmost_bind)
7107 fprintf (file, "{\n");
7108 indent = 2;
7109 }
7110
7111 if (any_var)
7112 fprintf (file, "\n");
7113
7114 print_generic_stmt_indented (file, chain, flags, indent);
7115 if (ignore_topmost_bind)
7116 fprintf (file, "}\n");
7117 }
7118
7119 if (flags & TDF_ENUMERATE_LOCALS)
7120 dump_enumerated_decls (file, flags);
7121 fprintf (file, "\n\n");
7122
7123 current_function_decl = old_current_fndecl;
7124 }
7125
7126 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
7127
7128 DEBUG_FUNCTION void
7129 debug_function (tree fn, int flags)
7130 {
7131 dump_function_to_file (fn, stderr, flags);
7132 }
7133
7134
7135 /* Print on FILE the indexes for the predecessors of basic_block BB. */
7136
7137 static void
7138 print_pred_bbs (FILE *file, basic_block bb)
7139 {
7140 edge e;
7141 edge_iterator ei;
7142
7143 FOR_EACH_EDGE (e, ei, bb->preds)
7144 fprintf (file, "bb_%d ", e->src->index);
7145 }
7146
7147
7148 /* Print on FILE the indexes for the successors of basic_block BB. */
7149
7150 static void
7151 print_succ_bbs (FILE *file, basic_block bb)
7152 {
7153 edge e;
7154 edge_iterator ei;
7155
7156 FOR_EACH_EDGE (e, ei, bb->succs)
7157 fprintf (file, "bb_%d ", e->dest->index);
7158 }
7159
7160 /* Print to FILE the basic block BB following the VERBOSITY level. */
7161
7162 void
7163 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
7164 {
7165 char *s_indent = (char *) alloca ((size_t) indent + 1);
7166 memset ((void *) s_indent, ' ', (size_t) indent);
7167 s_indent[indent] = '\0';
7168
7169 /* Print basic_block's header. */
7170 if (verbosity >= 2)
7171 {
7172 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
7173 print_pred_bbs (file, bb);
7174 fprintf (file, "}, succs = {");
7175 print_succ_bbs (file, bb);
7176 fprintf (file, "})\n");
7177 }
7178
7179 /* Print basic_block's body. */
7180 if (verbosity >= 3)
7181 {
7182 fprintf (file, "%s {\n", s_indent);
7183 dump_bb (file, bb, indent + 4, TDF_VOPS|TDF_MEMSYMS);
7184 fprintf (file, "%s }\n", s_indent);
7185 }
7186 }
7187
7188 static void print_loop_and_siblings (FILE *, struct loop *, int, int);
7189
7190 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
7191 VERBOSITY level this outputs the contents of the loop, or just its
7192 structure. */
7193
7194 static void
7195 print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
7196 {
7197 char *s_indent;
7198 basic_block bb;
7199
7200 if (loop == NULL)
7201 return;
7202
7203 s_indent = (char *) alloca ((size_t) indent + 1);
7204 memset ((void *) s_indent, ' ', (size_t) indent);
7205 s_indent[indent] = '\0';
7206
7207 /* Print loop's header. */
7208 fprintf (file, "%sloop_%d (", s_indent, loop->num);
7209 if (loop->header)
7210 fprintf (file, "header = %d", loop->header->index);
7211 else
7212 {
7213 fprintf (file, "deleted)\n");
7214 return;
7215 }
7216 if (loop->latch)
7217 fprintf (file, ", latch = %d", loop->latch->index);
7218 else
7219 fprintf (file, ", multiple latches");
7220 fprintf (file, ", niter = ");
7221 print_generic_expr (file, loop->nb_iterations, 0);
7222
7223 if (loop->any_upper_bound)
7224 {
7225 fprintf (file, ", upper_bound = ");
7226 print_decu (loop->nb_iterations_upper_bound, file);
7227 }
7228
7229 if (loop->any_estimate)
7230 {
7231 fprintf (file, ", estimate = ");
7232 print_decu (loop->nb_iterations_estimate, file);
7233 }
7234 fprintf (file, ")\n");
7235
7236 /* Print loop's body. */
7237 if (verbosity >= 1)
7238 {
7239 fprintf (file, "%s{\n", s_indent);
7240 FOR_EACH_BB (bb)
7241 if (bb->loop_father == loop)
7242 print_loops_bb (file, bb, indent, verbosity);
7243
7244 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
7245 fprintf (file, "%s}\n", s_indent);
7246 }
7247 }
7248
7249 /* Print the LOOP and its sibling loops on FILE, indented INDENT
7250 spaces. Following VERBOSITY level this outputs the contents of the
7251 loop, or just its structure. */
7252
7253 static void
7254 print_loop_and_siblings (FILE *file, struct loop *loop, int indent,
7255 int verbosity)
7256 {
7257 if (loop == NULL)
7258 return;
7259
7260 print_loop (file, loop, indent, verbosity);
7261 print_loop_and_siblings (file, loop->next, indent, verbosity);
7262 }
7263
7264 /* Follow a CFG edge from the entry point of the program, and on entry
7265 of a loop, pretty print the loop structure on FILE. */
7266
7267 void
7268 print_loops (FILE *file, int verbosity)
7269 {
7270 basic_block bb;
7271
7272 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
7273 if (bb && bb->loop_father)
7274 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
7275 }
7276
7277 /* Dump a loop. */
7278
7279 DEBUG_FUNCTION void
7280 debug (struct loop &ref)
7281 {
7282 print_loop (stderr, &ref, 0, /*verbosity*/0);
7283 }
7284
7285 DEBUG_FUNCTION void
7286 debug (struct loop *ptr)
7287 {
7288 if (ptr)
7289 debug (*ptr);
7290 else
7291 fprintf (stderr, "<nil>\n");
7292 }
7293
7294 /* Dump a loop verbosely. */
7295
7296 DEBUG_FUNCTION void
7297 debug_verbose (struct loop &ref)
7298 {
7299 print_loop (stderr, &ref, 0, /*verbosity*/3);
7300 }
7301
7302 DEBUG_FUNCTION void
7303 debug_verbose (struct loop *ptr)
7304 {
7305 if (ptr)
7306 debug (*ptr);
7307 else
7308 fprintf (stderr, "<nil>\n");
7309 }
7310
7311
7312 /* Debugging loops structure at tree level, at some VERBOSITY level. */
7313
7314 DEBUG_FUNCTION void
7315 debug_loops (int verbosity)
7316 {
7317 print_loops (stderr, verbosity);
7318 }
7319
7320 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
7321
7322 DEBUG_FUNCTION void
7323 debug_loop (struct loop *loop, int verbosity)
7324 {
7325 print_loop (stderr, loop, 0, verbosity);
7326 }
7327
7328 /* Print on stderr the code of loop number NUM, at some VERBOSITY
7329 level. */
7330
7331 DEBUG_FUNCTION void
7332 debug_loop_num (unsigned num, int verbosity)
7333 {
7334 debug_loop (get_loop (cfun, num), verbosity);
7335 }
7336
7337 /* Return true if BB ends with a call, possibly followed by some
7338 instructions that must stay with the call. Return false,
7339 otherwise. */
7340
7341 static bool
7342 gimple_block_ends_with_call_p (basic_block bb)
7343 {
7344 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7345 return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
7346 }
7347
7348
7349 /* Return true if BB ends with a conditional branch. Return false,
7350 otherwise. */
7351
7352 static bool
7353 gimple_block_ends_with_condjump_p (const_basic_block bb)
7354 {
7355 gimple stmt = last_stmt (CONST_CAST_BB (bb));
7356 return (stmt && gimple_code (stmt) == GIMPLE_COND);
7357 }
7358
7359
7360 /* Return true if we need to add fake edge to exit at statement T.
7361 Helper function for gimple_flow_call_edges_add. */
7362
7363 static bool
7364 need_fake_edge_p (gimple t)
7365 {
7366 tree fndecl = NULL_TREE;
7367 int call_flags = 0;
7368
7369 /* NORETURN and LONGJMP calls already have an edge to exit.
7370 CONST and PURE calls do not need one.
7371 We don't currently check for CONST and PURE here, although
7372 it would be a good idea, because those attributes are
7373 figured out from the RTL in mark_constant_function, and
7374 the counter incrementation code from -fprofile-arcs
7375 leads to different results from -fbranch-probabilities. */
7376 if (is_gimple_call (t))
7377 {
7378 fndecl = gimple_call_fndecl (t);
7379 call_flags = gimple_call_flags (t);
7380 }
7381
7382 if (is_gimple_call (t)
7383 && fndecl
7384 && DECL_BUILT_IN (fndecl)
7385 && (call_flags & ECF_NOTHROW)
7386 && !(call_flags & ECF_RETURNS_TWICE)
7387 /* fork() doesn't really return twice, but the effect of
7388 wrapping it in __gcov_fork() which calls __gcov_flush()
7389 and clears the counters before forking has the same
7390 effect as returning twice. Force a fake edge. */
7391 && !(DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7392 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FORK))
7393 return false;
7394
7395 if (is_gimple_call (t))
7396 {
7397 edge_iterator ei;
7398 edge e;
7399 basic_block bb;
7400
7401 if (!(call_flags & ECF_NORETURN))
7402 return true;
7403
7404 bb = gimple_bb (t);
7405 FOR_EACH_EDGE (e, ei, bb->succs)
7406 if ((e->flags & EDGE_FAKE) == 0)
7407 return true;
7408 }
7409
7410 if (gimple_code (t) == GIMPLE_ASM
7411 && (gimple_asm_volatile_p (t) || gimple_asm_input_p (t)))
7412 return true;
7413
7414 return false;
7415 }
7416
7417
7418 /* Add fake edges to the function exit for any non constant and non
7419 noreturn calls (or noreturn calls with EH/abnormal edges),
7420 volatile inline assembly in the bitmap of blocks specified by BLOCKS
7421 or to the whole CFG if BLOCKS is zero. Return the number of blocks
7422 that were split.
7423
7424 The goal is to expose cases in which entering a basic block does
7425 not imply that all subsequent instructions must be executed. */
7426
7427 static int
7428 gimple_flow_call_edges_add (sbitmap blocks)
7429 {
7430 int i;
7431 int blocks_split = 0;
7432 int last_bb = last_basic_block;
7433 bool check_last_block = false;
7434
7435 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
7436 return 0;
7437
7438 if (! blocks)
7439 check_last_block = true;
7440 else
7441 check_last_block = bitmap_bit_p (blocks,
7442 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
7443
7444 /* In the last basic block, before epilogue generation, there will be
7445 a fallthru edge to EXIT. Special care is required if the last insn
7446 of the last basic block is a call because make_edge folds duplicate
7447 edges, which would result in the fallthru edge also being marked
7448 fake, which would result in the fallthru edge being removed by
7449 remove_fake_edges, which would result in an invalid CFG.
7450
7451 Moreover, we can't elide the outgoing fake edge, since the block
7452 profiler needs to take this into account in order to solve the minimal
7453 spanning tree in the case that the call doesn't return.
7454
7455 Handle this by adding a dummy instruction in a new last basic block. */
7456 if (check_last_block)
7457 {
7458 basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
7459 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7460 gimple t = NULL;
7461
7462 if (!gsi_end_p (gsi))
7463 t = gsi_stmt (gsi);
7464
7465 if (t && need_fake_edge_p (t))
7466 {
7467 edge e;
7468
7469 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
7470 if (e)
7471 {
7472 gsi_insert_on_edge (e, gimple_build_nop ());
7473 gsi_commit_edge_inserts ();
7474 }
7475 }
7476 }
7477
7478 /* Now add fake edges to the function exit for any non constant
7479 calls since there is no way that we can determine if they will
7480 return or not... */
7481 for (i = 0; i < last_bb; i++)
7482 {
7483 basic_block bb = BASIC_BLOCK (i);
7484 gimple_stmt_iterator gsi;
7485 gimple stmt, last_stmt;
7486
7487 if (!bb)
7488 continue;
7489
7490 if (blocks && !bitmap_bit_p (blocks, i))
7491 continue;
7492
7493 gsi = gsi_last_nondebug_bb (bb);
7494 if (!gsi_end_p (gsi))
7495 {
7496 last_stmt = gsi_stmt (gsi);
7497 do
7498 {
7499 stmt = gsi_stmt (gsi);
7500 if (need_fake_edge_p (stmt))
7501 {
7502 edge e;
7503
7504 /* The handling above of the final block before the
7505 epilogue should be enough to verify that there is
7506 no edge to the exit block in CFG already.
7507 Calling make_edge in such case would cause us to
7508 mark that edge as fake and remove it later. */
7509 #ifdef ENABLE_CHECKING
7510 if (stmt == last_stmt)
7511 {
7512 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
7513 gcc_assert (e == NULL);
7514 }
7515 #endif
7516
7517 /* Note that the following may create a new basic block
7518 and renumber the existing basic blocks. */
7519 if (stmt != last_stmt)
7520 {
7521 e = split_block (bb, stmt);
7522 if (e)
7523 blocks_split++;
7524 }
7525 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
7526 }
7527 gsi_prev (&gsi);
7528 }
7529 while (!gsi_end_p (gsi));
7530 }
7531 }
7532
7533 if (blocks_split)
7534 verify_flow_info ();
7535
7536 return blocks_split;
7537 }
7538
7539 /* Removes edge E and all the blocks dominated by it, and updates dominance
7540 information. The IL in E->src needs to be updated separately.
7541 If dominance info is not available, only the edge E is removed.*/
7542
7543 void
7544 remove_edge_and_dominated_blocks (edge e)
7545 {
7546 vec<basic_block> bbs_to_remove = vNULL;
7547 vec<basic_block> bbs_to_fix_dom = vNULL;
7548 bitmap df, df_idom;
7549 edge f;
7550 edge_iterator ei;
7551 bool none_removed = false;
7552 unsigned i;
7553 basic_block bb, dbb;
7554 bitmap_iterator bi;
7555
7556 if (!dom_info_available_p (CDI_DOMINATORS))
7557 {
7558 remove_edge (e);
7559 return;
7560 }
7561
7562 /* No updating is needed for edges to exit. */
7563 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
7564 {
7565 if (cfgcleanup_altered_bbs)
7566 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7567 remove_edge (e);
7568 return;
7569 }
7570
7571 /* First, we find the basic blocks to remove. If E->dest has a predecessor
7572 that is not dominated by E->dest, then this set is empty. Otherwise,
7573 all the basic blocks dominated by E->dest are removed.
7574
7575 Also, to DF_IDOM we store the immediate dominators of the blocks in
7576 the dominance frontier of E (i.e., of the successors of the
7577 removed blocks, if there are any, and of E->dest otherwise). */
7578 FOR_EACH_EDGE (f, ei, e->dest->preds)
7579 {
7580 if (f == e)
7581 continue;
7582
7583 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
7584 {
7585 none_removed = true;
7586 break;
7587 }
7588 }
7589
7590 df = BITMAP_ALLOC (NULL);
7591 df_idom = BITMAP_ALLOC (NULL);
7592
7593 if (none_removed)
7594 bitmap_set_bit (df_idom,
7595 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
7596 else
7597 {
7598 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
7599 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
7600 {
7601 FOR_EACH_EDGE (f, ei, bb->succs)
7602 {
7603 if (f->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
7604 bitmap_set_bit (df, f->dest->index);
7605 }
7606 }
7607 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
7608 bitmap_clear_bit (df, bb->index);
7609
7610 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
7611 {
7612 bb = BASIC_BLOCK (i);
7613 bitmap_set_bit (df_idom,
7614 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
7615 }
7616 }
7617
7618 if (cfgcleanup_altered_bbs)
7619 {
7620 /* Record the set of the altered basic blocks. */
7621 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7622 bitmap_ior_into (cfgcleanup_altered_bbs, df);
7623 }
7624
7625 /* Remove E and the cancelled blocks. */
7626 if (none_removed)
7627 remove_edge (e);
7628 else
7629 {
7630 /* Walk backwards so as to get a chance to substitute all
7631 released DEFs into debug stmts. See
7632 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
7633 details. */
7634 for (i = bbs_to_remove.length (); i-- > 0; )
7635 delete_basic_block (bbs_to_remove[i]);
7636 }
7637
7638 /* Update the dominance information. The immediate dominator may change only
7639 for blocks whose immediate dominator belongs to DF_IDOM:
7640
7641 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
7642 removal. Let Z the arbitrary block such that idom(Z) = Y and
7643 Z dominates X after the removal. Before removal, there exists a path P
7644 from Y to X that avoids Z. Let F be the last edge on P that is
7645 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
7646 dominates W, and because of P, Z does not dominate W), and W belongs to
7647 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
7648 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
7649 {
7650 bb = BASIC_BLOCK (i);
7651 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
7652 dbb;
7653 dbb = next_dom_son (CDI_DOMINATORS, dbb))
7654 bbs_to_fix_dom.safe_push (dbb);
7655 }
7656
7657 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
7658
7659 BITMAP_FREE (df);
7660 BITMAP_FREE (df_idom);
7661 bbs_to_remove.release ();
7662 bbs_to_fix_dom.release ();
7663 }
7664
7665 /* Purge dead EH edges from basic block BB. */
7666
7667 bool
7668 gimple_purge_dead_eh_edges (basic_block bb)
7669 {
7670 bool changed = false;
7671 edge e;
7672 edge_iterator ei;
7673 gimple stmt = last_stmt (bb);
7674
7675 if (stmt && stmt_can_throw_internal (stmt))
7676 return false;
7677
7678 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
7679 {
7680 if (e->flags & EDGE_EH)
7681 {
7682 remove_edge_and_dominated_blocks (e);
7683 changed = true;
7684 }
7685 else
7686 ei_next (&ei);
7687 }
7688
7689 return changed;
7690 }
7691
7692 /* Purge dead EH edges from basic block listed in BLOCKS. */
7693
7694 bool
7695 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
7696 {
7697 bool changed = false;
7698 unsigned i;
7699 bitmap_iterator bi;
7700
7701 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
7702 {
7703 basic_block bb = BASIC_BLOCK (i);
7704
7705 /* Earlier gimple_purge_dead_eh_edges could have removed
7706 this basic block already. */
7707 gcc_assert (bb || changed);
7708 if (bb != NULL)
7709 changed |= gimple_purge_dead_eh_edges (bb);
7710 }
7711
7712 return changed;
7713 }
7714
7715 /* Purge dead abnormal call edges from basic block BB. */
7716
7717 bool
7718 gimple_purge_dead_abnormal_call_edges (basic_block bb)
7719 {
7720 bool changed = false;
7721 edge e;
7722 edge_iterator ei;
7723 gimple stmt = last_stmt (bb);
7724
7725 if (!cfun->has_nonlocal_label
7726 && !cfun->calls_setjmp)
7727 return false;
7728
7729 if (stmt && stmt_can_make_abnormal_goto (stmt))
7730 return false;
7731
7732 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
7733 {
7734 if (e->flags & EDGE_ABNORMAL)
7735 {
7736 if (e->flags & EDGE_FALLTHRU)
7737 e->flags &= ~EDGE_ABNORMAL;
7738 else
7739 remove_edge_and_dominated_blocks (e);
7740 changed = true;
7741 }
7742 else
7743 ei_next (&ei);
7744 }
7745
7746 return changed;
7747 }
7748
7749 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
7750
7751 bool
7752 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks)
7753 {
7754 bool changed = false;
7755 unsigned i;
7756 bitmap_iterator bi;
7757
7758 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
7759 {
7760 basic_block bb = BASIC_BLOCK (i);
7761
7762 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
7763 this basic block already. */
7764 gcc_assert (bb || changed);
7765 if (bb != NULL)
7766 changed |= gimple_purge_dead_abnormal_call_edges (bb);
7767 }
7768
7769 return changed;
7770 }
7771
7772 /* This function is called whenever a new edge is created or
7773 redirected. */
7774
7775 static void
7776 gimple_execute_on_growing_pred (edge e)
7777 {
7778 basic_block bb = e->dest;
7779
7780 if (!gimple_seq_empty_p (phi_nodes (bb)))
7781 reserve_phi_args_for_new_edge (bb);
7782 }
7783
7784 /* This function is called immediately before edge E is removed from
7785 the edge vector E->dest->preds. */
7786
7787 static void
7788 gimple_execute_on_shrinking_pred (edge e)
7789 {
7790 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
7791 remove_phi_args (e);
7792 }
7793
7794 /*---------------------------------------------------------------------------
7795 Helper functions for Loop versioning
7796 ---------------------------------------------------------------------------*/
7797
7798 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
7799 of 'first'. Both of them are dominated by 'new_head' basic block. When
7800 'new_head' was created by 'second's incoming edge it received phi arguments
7801 on the edge by split_edge(). Later, additional edge 'e' was created to
7802 connect 'new_head' and 'first'. Now this routine adds phi args on this
7803 additional edge 'e' that new_head to second edge received as part of edge
7804 splitting. */
7805
7806 static void
7807 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
7808 basic_block new_head, edge e)
7809 {
7810 gimple phi1, phi2;
7811 gimple_stmt_iterator psi1, psi2;
7812 tree def;
7813 edge e2 = find_edge (new_head, second);
7814
7815 /* Because NEW_HEAD has been created by splitting SECOND's incoming
7816 edge, we should always have an edge from NEW_HEAD to SECOND. */
7817 gcc_assert (e2 != NULL);
7818
7819 /* Browse all 'second' basic block phi nodes and add phi args to
7820 edge 'e' for 'first' head. PHI args are always in correct order. */
7821
7822 for (psi2 = gsi_start_phis (second),
7823 psi1 = gsi_start_phis (first);
7824 !gsi_end_p (psi2) && !gsi_end_p (psi1);
7825 gsi_next (&psi2), gsi_next (&psi1))
7826 {
7827 phi1 = gsi_stmt (psi1);
7828 phi2 = gsi_stmt (psi2);
7829 def = PHI_ARG_DEF (phi2, e2->dest_idx);
7830 add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
7831 }
7832 }
7833
7834
7835 /* Adds a if else statement to COND_BB with condition COND_EXPR.
7836 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
7837 the destination of the ELSE part. */
7838
7839 static void
7840 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
7841 basic_block second_head ATTRIBUTE_UNUSED,
7842 basic_block cond_bb, void *cond_e)
7843 {
7844 gimple_stmt_iterator gsi;
7845 gimple new_cond_expr;
7846 tree cond_expr = (tree) cond_e;
7847 edge e0;
7848
7849 /* Build new conditional expr */
7850 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
7851 NULL_TREE, NULL_TREE);
7852
7853 /* Add new cond in cond_bb. */
7854 gsi = gsi_last_bb (cond_bb);
7855 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
7856
7857 /* Adjust edges appropriately to connect new head with first head
7858 as well as second head. */
7859 e0 = single_succ_edge (cond_bb);
7860 e0->flags &= ~EDGE_FALLTHRU;
7861 e0->flags |= EDGE_FALSE_VALUE;
7862 }
7863
7864
7865 /* Do book-keeping of basic block BB for the profile consistency checker.
7866 If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
7867 then do post-pass accounting. Store the counting in RECORD. */
7868 static void
7869 gimple_account_profile_record (basic_block bb, int after_pass,
7870 struct profile_record *record)
7871 {
7872 gimple_stmt_iterator i;
7873 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
7874 {
7875 record->size[after_pass]
7876 += estimate_num_insns (gsi_stmt (i), &eni_size_weights);
7877 if (profile_status == PROFILE_READ)
7878 record->time[after_pass]
7879 += estimate_num_insns (gsi_stmt (i),
7880 &eni_time_weights) * bb->count;
7881 else if (profile_status == PROFILE_GUESSED)
7882 record->time[after_pass]
7883 += estimate_num_insns (gsi_stmt (i),
7884 &eni_time_weights) * bb->frequency;
7885 }
7886 }
7887
7888 struct cfg_hooks gimple_cfg_hooks = {
7889 "gimple",
7890 gimple_verify_flow_info,
7891 gimple_dump_bb, /* dump_bb */
7892 gimple_dump_bb_for_graph, /* dump_bb_for_graph */
7893 create_bb, /* create_basic_block */
7894 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
7895 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
7896 gimple_can_remove_branch_p, /* can_remove_branch_p */
7897 remove_bb, /* delete_basic_block */
7898 gimple_split_block, /* split_block */
7899 gimple_move_block_after, /* move_block_after */
7900 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
7901 gimple_merge_blocks, /* merge_blocks */
7902 gimple_predict_edge, /* predict_edge */
7903 gimple_predicted_by_p, /* predicted_by_p */
7904 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
7905 gimple_duplicate_bb, /* duplicate_block */
7906 gimple_split_edge, /* split_edge */
7907 gimple_make_forwarder_block, /* make_forward_block */
7908 NULL, /* tidy_fallthru_edge */
7909 NULL, /* force_nonfallthru */
7910 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
7911 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
7912 gimple_flow_call_edges_add, /* flow_call_edges_add */
7913 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
7914 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
7915 gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
7916 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
7917 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
7918 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
7919 flush_pending_stmts, /* flush_pending_stmts */
7920 gimple_empty_block_p, /* block_empty_p */
7921 gimple_split_block_before_cond_jump, /* split_block_before_cond_jump */
7922 gimple_account_profile_record,
7923 };
7924
7925
7926 /* Split all critical edges. */
7927
7928 static unsigned int
7929 split_critical_edges (void)
7930 {
7931 basic_block bb;
7932 edge e;
7933 edge_iterator ei;
7934
7935 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
7936 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
7937 mappings around the calls to split_edge. */
7938 start_recording_case_labels ();
7939 FOR_ALL_BB (bb)
7940 {
7941 FOR_EACH_EDGE (e, ei, bb->succs)
7942 {
7943 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
7944 split_edge (e);
7945 /* PRE inserts statements to edges and expects that
7946 since split_critical_edges was done beforehand, committing edge
7947 insertions will not split more edges. In addition to critical
7948 edges we must split edges that have multiple successors and
7949 end by control flow statements, such as RESX.
7950 Go ahead and split them too. This matches the logic in
7951 gimple_find_edge_insert_loc. */
7952 else if ((!single_pred_p (e->dest)
7953 || !gimple_seq_empty_p (phi_nodes (e->dest))
7954 || e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
7955 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
7956 && !(e->flags & EDGE_ABNORMAL))
7957 {
7958 gimple_stmt_iterator gsi;
7959
7960 gsi = gsi_last_bb (e->src);
7961 if (!gsi_end_p (gsi)
7962 && stmt_ends_bb_p (gsi_stmt (gsi))
7963 && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
7964 && !gimple_call_builtin_p (gsi_stmt (gsi),
7965 BUILT_IN_RETURN)))
7966 split_edge (e);
7967 }
7968 }
7969 }
7970 end_recording_case_labels ();
7971 return 0;
7972 }
7973
7974 namespace {
7975
7976 const pass_data pass_data_split_crit_edges =
7977 {
7978 GIMPLE_PASS, /* type */
7979 "crited", /* name */
7980 OPTGROUP_NONE, /* optinfo_flags */
7981 false, /* has_gate */
7982 true, /* has_execute */
7983 TV_TREE_SPLIT_EDGES, /* tv_id */
7984 PROP_cfg, /* properties_required */
7985 PROP_no_crit_edges, /* properties_provided */
7986 0, /* properties_destroyed */
7987 0, /* todo_flags_start */
7988 TODO_verify_flow, /* todo_flags_finish */
7989 };
7990
7991 class pass_split_crit_edges : public gimple_opt_pass
7992 {
7993 public:
7994 pass_split_crit_edges (gcc::context *ctxt)
7995 : gimple_opt_pass (pass_data_split_crit_edges, ctxt)
7996 {}
7997
7998 /* opt_pass methods: */
7999 unsigned int execute () { return split_critical_edges (); }
8000
8001 opt_pass * clone () { return new pass_split_crit_edges (m_ctxt); }
8002 }; // class pass_split_crit_edges
8003
8004 } // anon namespace
8005
8006 gimple_opt_pass *
8007 make_pass_split_crit_edges (gcc::context *ctxt)
8008 {
8009 return new pass_split_crit_edges (ctxt);
8010 }
8011
8012
8013 /* Build a ternary operation and gimplify it. Emit code before GSI.
8014 Return the gimple_val holding the result. */
8015
8016 tree
8017 gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
8018 tree type, tree a, tree b, tree c)
8019 {
8020 tree ret;
8021 location_t loc = gimple_location (gsi_stmt (*gsi));
8022
8023 ret = fold_build3_loc (loc, code, type, a, b, c);
8024 STRIP_NOPS (ret);
8025
8026 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8027 GSI_SAME_STMT);
8028 }
8029
8030 /* Build a binary operation and gimplify it. Emit code before GSI.
8031 Return the gimple_val holding the result. */
8032
8033 tree
8034 gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
8035 tree type, tree a, tree b)
8036 {
8037 tree ret;
8038
8039 ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
8040 STRIP_NOPS (ret);
8041
8042 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8043 GSI_SAME_STMT);
8044 }
8045
8046 /* Build a unary operation and gimplify it. Emit code before GSI.
8047 Return the gimple_val holding the result. */
8048
8049 tree
8050 gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
8051 tree a)
8052 {
8053 tree ret;
8054
8055 ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
8056 STRIP_NOPS (ret);
8057
8058 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8059 GSI_SAME_STMT);
8060 }
8061
8062
8063 \f
8064 /* Emit return warnings. */
8065
8066 static unsigned int
8067 execute_warn_function_return (void)
8068 {
8069 source_location location;
8070 gimple last;
8071 edge e;
8072 edge_iterator ei;
8073
8074 if (!targetm.warn_func_return (cfun->decl))
8075 return 0;
8076
8077 /* If we have a path to EXIT, then we do return. */
8078 if (TREE_THIS_VOLATILE (cfun->decl)
8079 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds) > 0)
8080 {
8081 location = UNKNOWN_LOCATION;
8082 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
8083 {
8084 last = last_stmt (e->src);
8085 if ((gimple_code (last) == GIMPLE_RETURN
8086 || gimple_call_builtin_p (last, BUILT_IN_RETURN))
8087 && (location = gimple_location (last)) != UNKNOWN_LOCATION)
8088 break;
8089 }
8090 if (location == UNKNOWN_LOCATION)
8091 location = cfun->function_end_locus;
8092 warning_at (location, 0, "%<noreturn%> function does return");
8093 }
8094
8095 /* If we see "return;" in some basic block, then we do reach the end
8096 without returning a value. */
8097 else if (warn_return_type
8098 && !TREE_NO_WARNING (cfun->decl)
8099 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds) > 0
8100 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (cfun->decl))))
8101 {
8102 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
8103 {
8104 gimple last = last_stmt (e->src);
8105 if (gimple_code (last) == GIMPLE_RETURN
8106 && gimple_return_retval (last) == NULL
8107 && !gimple_no_warning_p (last))
8108 {
8109 location = gimple_location (last);
8110 if (location == UNKNOWN_LOCATION)
8111 location = cfun->function_end_locus;
8112 warning_at (location, OPT_Wreturn_type, "control reaches end of non-void function");
8113 TREE_NO_WARNING (cfun->decl) = 1;
8114 break;
8115 }
8116 }
8117 }
8118 return 0;
8119 }
8120
8121
8122 /* Given a basic block B which ends with a conditional and has
8123 precisely two successors, determine which of the edges is taken if
8124 the conditional is true and which is taken if the conditional is
8125 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
8126
8127 void
8128 extract_true_false_edges_from_block (basic_block b,
8129 edge *true_edge,
8130 edge *false_edge)
8131 {
8132 edge e = EDGE_SUCC (b, 0);
8133
8134 if (e->flags & EDGE_TRUE_VALUE)
8135 {
8136 *true_edge = e;
8137 *false_edge = EDGE_SUCC (b, 1);
8138 }
8139 else
8140 {
8141 *false_edge = e;
8142 *true_edge = EDGE_SUCC (b, 1);
8143 }
8144 }
8145
8146 namespace {
8147
8148 const pass_data pass_data_warn_function_return =
8149 {
8150 GIMPLE_PASS, /* type */
8151 "*warn_function_return", /* name */
8152 OPTGROUP_NONE, /* optinfo_flags */
8153 false, /* has_gate */
8154 true, /* has_execute */
8155 TV_NONE, /* tv_id */
8156 PROP_cfg, /* properties_required */
8157 0, /* properties_provided */
8158 0, /* properties_destroyed */
8159 0, /* todo_flags_start */
8160 0, /* todo_flags_finish */
8161 };
8162
8163 class pass_warn_function_return : public gimple_opt_pass
8164 {
8165 public:
8166 pass_warn_function_return (gcc::context *ctxt)
8167 : gimple_opt_pass (pass_data_warn_function_return, ctxt)
8168 {}
8169
8170 /* opt_pass methods: */
8171 unsigned int execute () { return execute_warn_function_return (); }
8172
8173 }; // class pass_warn_function_return
8174
8175 } // anon namespace
8176
8177 gimple_opt_pass *
8178 make_pass_warn_function_return (gcc::context *ctxt)
8179 {
8180 return new pass_warn_function_return (ctxt);
8181 }
8182
8183 /* Walk a gimplified function and warn for functions whose return value is
8184 ignored and attribute((warn_unused_result)) is set. This is done before
8185 inlining, so we don't have to worry about that. */
8186
8187 static void
8188 do_warn_unused_result (gimple_seq seq)
8189 {
8190 tree fdecl, ftype;
8191 gimple_stmt_iterator i;
8192
8193 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
8194 {
8195 gimple g = gsi_stmt (i);
8196
8197 switch (gimple_code (g))
8198 {
8199 case GIMPLE_BIND:
8200 do_warn_unused_result (gimple_bind_body (g));
8201 break;
8202 case GIMPLE_TRY:
8203 do_warn_unused_result (gimple_try_eval (g));
8204 do_warn_unused_result (gimple_try_cleanup (g));
8205 break;
8206 case GIMPLE_CATCH:
8207 do_warn_unused_result (gimple_catch_handler (g));
8208 break;
8209 case GIMPLE_EH_FILTER:
8210 do_warn_unused_result (gimple_eh_filter_failure (g));
8211 break;
8212
8213 case GIMPLE_CALL:
8214 if (gimple_call_lhs (g))
8215 break;
8216 if (gimple_call_internal_p (g))
8217 break;
8218
8219 /* This is a naked call, as opposed to a GIMPLE_CALL with an
8220 LHS. All calls whose value is ignored should be
8221 represented like this. Look for the attribute. */
8222 fdecl = gimple_call_fndecl (g);
8223 ftype = gimple_call_fntype (g);
8224
8225 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
8226 {
8227 location_t loc = gimple_location (g);
8228
8229 if (fdecl)
8230 warning_at (loc, OPT_Wunused_result,
8231 "ignoring return value of %qD, "
8232 "declared with attribute warn_unused_result",
8233 fdecl);
8234 else
8235 warning_at (loc, OPT_Wunused_result,
8236 "ignoring return value of function "
8237 "declared with attribute warn_unused_result");
8238 }
8239 break;
8240
8241 default:
8242 /* Not a container, not a call, or a call whose value is used. */
8243 break;
8244 }
8245 }
8246 }
8247
8248 static unsigned int
8249 run_warn_unused_result (void)
8250 {
8251 do_warn_unused_result (gimple_body (current_function_decl));
8252 return 0;
8253 }
8254
8255 static bool
8256 gate_warn_unused_result (void)
8257 {
8258 return flag_warn_unused_result;
8259 }
8260
8261 namespace {
8262
8263 const pass_data pass_data_warn_unused_result =
8264 {
8265 GIMPLE_PASS, /* type */
8266 "*warn_unused_result", /* name */
8267 OPTGROUP_NONE, /* optinfo_flags */
8268 true, /* has_gate */
8269 true, /* has_execute */
8270 TV_NONE, /* tv_id */
8271 PROP_gimple_any, /* properties_required */
8272 0, /* properties_provided */
8273 0, /* properties_destroyed */
8274 0, /* todo_flags_start */
8275 0, /* todo_flags_finish */
8276 };
8277
8278 class pass_warn_unused_result : public gimple_opt_pass
8279 {
8280 public:
8281 pass_warn_unused_result (gcc::context *ctxt)
8282 : gimple_opt_pass (pass_data_warn_unused_result, ctxt)
8283 {}
8284
8285 /* opt_pass methods: */
8286 bool gate () { return gate_warn_unused_result (); }
8287 unsigned int execute () { return run_warn_unused_result (); }
8288
8289 }; // class pass_warn_unused_result
8290
8291 } // anon namespace
8292
8293 gimple_opt_pass *
8294 make_pass_warn_unused_result (gcc::context *ctxt)
8295 {
8296 return new pass_warn_unused_result (ctxt);
8297 }
8298
8299 /* IPA passes, compilation of earlier functions or inlining
8300 might have changed some properties, such as marked functions nothrow,
8301 pure, const or noreturn.
8302 Remove redundant edges and basic blocks, and create new ones if necessary.
8303
8304 This pass can't be executed as stand alone pass from pass manager, because
8305 in between inlining and this fixup the verify_flow_info would fail. */
8306
8307 unsigned int
8308 execute_fixup_cfg (void)
8309 {
8310 basic_block bb;
8311 gimple_stmt_iterator gsi;
8312 int todo = gimple_in_ssa_p (cfun) ? TODO_verify_ssa : 0;
8313 gcov_type count_scale;
8314 edge e;
8315 edge_iterator ei;
8316
8317 count_scale
8318 = GCOV_COMPUTE_SCALE (cgraph_get_node (current_function_decl)->count,
8319 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count);
8320
8321 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
8322 cgraph_get_node (current_function_decl)->count;
8323 EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
8324 apply_scale (EXIT_BLOCK_PTR_FOR_FN (cfun)->count,
8325 count_scale);
8326
8327 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
8328 e->count = apply_scale (e->count, count_scale);
8329
8330 FOR_EACH_BB (bb)
8331 {
8332 bb->count = apply_scale (bb->count, count_scale);
8333 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
8334 {
8335 gimple stmt = gsi_stmt (gsi);
8336 tree decl = is_gimple_call (stmt)
8337 ? gimple_call_fndecl (stmt)
8338 : NULL;
8339 if (decl)
8340 {
8341 int flags = gimple_call_flags (stmt);
8342 if (flags & (ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE))
8343 {
8344 if (gimple_purge_dead_abnormal_call_edges (bb))
8345 todo |= TODO_cleanup_cfg;
8346
8347 if (gimple_in_ssa_p (cfun))
8348 {
8349 todo |= TODO_update_ssa | TODO_cleanup_cfg;
8350 update_stmt (stmt);
8351 }
8352 }
8353
8354 if (flags & ECF_NORETURN
8355 && fixup_noreturn_call (stmt))
8356 todo |= TODO_cleanup_cfg;
8357 }
8358
8359 if (maybe_clean_eh_stmt (stmt)
8360 && gimple_purge_dead_eh_edges (bb))
8361 todo |= TODO_cleanup_cfg;
8362 }
8363
8364 FOR_EACH_EDGE (e, ei, bb->succs)
8365 e->count = apply_scale (e->count, count_scale);
8366
8367 /* If we have a basic block with no successors that does not
8368 end with a control statement or a noreturn call end it with
8369 a call to __builtin_unreachable. This situation can occur
8370 when inlining a noreturn call that does in fact return. */
8371 if (EDGE_COUNT (bb->succs) == 0)
8372 {
8373 gimple stmt = last_stmt (bb);
8374 if (!stmt
8375 || (!is_ctrl_stmt (stmt)
8376 && (!is_gimple_call (stmt)
8377 || (gimple_call_flags (stmt) & ECF_NORETURN) == 0)))
8378 {
8379 stmt = gimple_build_call
8380 (builtin_decl_implicit (BUILT_IN_UNREACHABLE), 0);
8381 gimple_stmt_iterator gsi = gsi_last_bb (bb);
8382 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
8383 }
8384 }
8385 }
8386 if (count_scale != REG_BR_PROB_BASE)
8387 compute_function_frequency ();
8388
8389 /* We just processed all calls. */
8390 if (cfun->gimple_df)
8391 vec_free (MODIFIED_NORETURN_CALLS (cfun));
8392
8393 /* Dump a textual representation of the flowgraph. */
8394 if (dump_file)
8395 gimple_dump_cfg (dump_file, dump_flags);
8396
8397 if (current_loops
8398 && (todo & TODO_cleanup_cfg))
8399 loops_state_set (LOOPS_NEED_FIXUP);
8400
8401 return todo;
8402 }
8403
8404 namespace {
8405
8406 const pass_data pass_data_fixup_cfg =
8407 {
8408 GIMPLE_PASS, /* type */
8409 "*free_cfg_annotations", /* name */
8410 OPTGROUP_NONE, /* optinfo_flags */
8411 false, /* has_gate */
8412 true, /* has_execute */
8413 TV_NONE, /* tv_id */
8414 PROP_cfg, /* properties_required */
8415 0, /* properties_provided */
8416 0, /* properties_destroyed */
8417 0, /* todo_flags_start */
8418 0, /* todo_flags_finish */
8419 };
8420
8421 class pass_fixup_cfg : public gimple_opt_pass
8422 {
8423 public:
8424 pass_fixup_cfg (gcc::context *ctxt)
8425 : gimple_opt_pass (pass_data_fixup_cfg, ctxt)
8426 {}
8427
8428 /* opt_pass methods: */
8429 opt_pass * clone () { return new pass_fixup_cfg (m_ctxt); }
8430 unsigned int execute () { return execute_fixup_cfg (); }
8431
8432 }; // class pass_fixup_cfg
8433
8434 } // anon namespace
8435
8436 gimple_opt_pass *
8437 make_pass_fixup_cfg (gcc::context *ctxt)
8438 {
8439 return new pass_fixup_cfg (ctxt);
8440 }
8441
8442 /* Garbage collection support for edge_def. */
8443
8444 extern void gt_ggc_mx (tree&);
8445 extern void gt_ggc_mx (gimple&);
8446 extern void gt_ggc_mx (rtx&);
8447 extern void gt_ggc_mx (basic_block&);
8448
8449 void
8450 gt_ggc_mx (edge_def *e)
8451 {
8452 tree block = LOCATION_BLOCK (e->goto_locus);
8453 gt_ggc_mx (e->src);
8454 gt_ggc_mx (e->dest);
8455 if (current_ir_type () == IR_GIMPLE)
8456 gt_ggc_mx (e->insns.g);
8457 else
8458 gt_ggc_mx (e->insns.r);
8459 gt_ggc_mx (block);
8460 }
8461
8462 /* PCH support for edge_def. */
8463
8464 extern void gt_pch_nx (tree&);
8465 extern void gt_pch_nx (gimple&);
8466 extern void gt_pch_nx (rtx&);
8467 extern void gt_pch_nx (basic_block&);
8468
8469 void
8470 gt_pch_nx (edge_def *e)
8471 {
8472 tree block = LOCATION_BLOCK (e->goto_locus);
8473 gt_pch_nx (e->src);
8474 gt_pch_nx (e->dest);
8475 if (current_ir_type () == IR_GIMPLE)
8476 gt_pch_nx (e->insns.g);
8477 else
8478 gt_pch_nx (e->insns.r);
8479 gt_pch_nx (block);
8480 }
8481
8482 void
8483 gt_pch_nx (edge_def *e, gt_pointer_operator op, void *cookie)
8484 {
8485 tree block = LOCATION_BLOCK (e->goto_locus);
8486 op (&(e->src), cookie);
8487 op (&(e->dest), cookie);
8488 if (current_ir_type () == IR_GIMPLE)
8489 op (&(e->insns.g), cookie);
8490 else
8491 op (&(e->insns.r), cookie);
8492 op (&(block), cookie);
8493 }