]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-cfg.c
i386.c (make_resolver_func): Update.
[thirdparty/gcc.git] / gcc / tree-cfg.c
1 /* Control flow functions for trees.
2 Copyright (C) 2001-2017 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "cfghooks.h"
30 #include "tree-pass.h"
31 #include "ssa.h"
32 #include "cgraph.h"
33 #include "gimple-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "fold-const.h"
36 #include "trans-mem.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
39 #include "cfganal.h"
40 #include "gimple-fold.h"
41 #include "tree-eh.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-cfg.h"
46 #include "tree-ssa-loop-manip.h"
47 #include "tree-ssa-loop-niter.h"
48 #include "tree-into-ssa.h"
49 #include "tree-dfa.h"
50 #include "tree-ssa.h"
51 #include "except.h"
52 #include "cfgloop.h"
53 #include "tree-ssa-propagate.h"
54 #include "value-prof.h"
55 #include "tree-inline.h"
56 #include "tree-ssa-live.h"
57 #include "omp-general.h"
58 #include "omp-expand.h"
59 #include "tree-cfgcleanup.h"
60 #include "gimplify.h"
61 #include "attribs.h"
62 #include "selftest.h"
63
64 /* This file contains functions for building the Control Flow Graph (CFG)
65 for a function tree. */
66
67 /* Local declarations. */
68
69 /* Initial capacity for the basic block array. */
70 static const int initial_cfg_capacity = 20;
71
72 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
73 which use a particular edge. The CASE_LABEL_EXPRs are chained together
74 via their CASE_CHAIN field, which we clear after we're done with the
75 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
76
77 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
78 update the case vector in response to edge redirections.
79
80 Right now this table is set up and torn down at key points in the
81 compilation process. It would be nice if we could make the table
82 more persistent. The key is getting notification of changes to
83 the CFG (particularly edge removal, creation and redirection). */
84
85 static hash_map<edge, tree> *edge_to_cases;
86
87 /* If we record edge_to_cases, this bitmap will hold indexes
88 of basic blocks that end in a GIMPLE_SWITCH which we touched
89 due to edge manipulations. */
90
91 static bitmap touched_switch_bbs;
92
93 /* CFG statistics. */
94 struct cfg_stats_d
95 {
96 long num_merged_labels;
97 };
98
99 static struct cfg_stats_d cfg_stats;
100
101 /* Data to pass to replace_block_vars_by_duplicates_1. */
102 struct replace_decls_d
103 {
104 hash_map<tree, tree> *vars_map;
105 tree to_context;
106 };
107
108 /* Hash table to store last discriminator assigned for each locus. */
109 struct locus_discrim_map
110 {
111 location_t locus;
112 int discriminator;
113 };
114
115 /* Hashtable helpers. */
116
117 struct locus_discrim_hasher : free_ptr_hash <locus_discrim_map>
118 {
119 static inline hashval_t hash (const locus_discrim_map *);
120 static inline bool equal (const locus_discrim_map *,
121 const locus_discrim_map *);
122 };
123
124 /* Trivial hash function for a location_t. ITEM is a pointer to
125 a hash table entry that maps a location_t to a discriminator. */
126
127 inline hashval_t
128 locus_discrim_hasher::hash (const locus_discrim_map *item)
129 {
130 return LOCATION_LINE (item->locus);
131 }
132
133 /* Equality function for the locus-to-discriminator map. A and B
134 point to the two hash table entries to compare. */
135
136 inline bool
137 locus_discrim_hasher::equal (const locus_discrim_map *a,
138 const locus_discrim_map *b)
139 {
140 return LOCATION_LINE (a->locus) == LOCATION_LINE (b->locus);
141 }
142
143 static hash_table<locus_discrim_hasher> *discriminator_per_locus;
144
145 /* Basic blocks and flowgraphs. */
146 static void make_blocks (gimple_seq);
147
148 /* Edges. */
149 static void make_edges (void);
150 static void assign_discriminators (void);
151 static void make_cond_expr_edges (basic_block);
152 static void make_gimple_switch_edges (gswitch *, basic_block);
153 static bool make_goto_expr_edges (basic_block);
154 static void make_gimple_asm_edges (basic_block);
155 static edge gimple_redirect_edge_and_branch (edge, basic_block);
156 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
157
158 /* Various helpers. */
159 static inline bool stmt_starts_bb_p (gimple *, gimple *);
160 static int gimple_verify_flow_info (void);
161 static void gimple_make_forwarder_block (edge);
162 static gimple *first_non_label_stmt (basic_block);
163 static bool verify_gimple_transaction (gtransaction *);
164 static bool call_can_make_abnormal_goto (gimple *);
165
166 /* Flowgraph optimization and cleanup. */
167 static void gimple_merge_blocks (basic_block, basic_block);
168 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
169 static void remove_bb (basic_block);
170 static edge find_taken_edge_computed_goto (basic_block, tree);
171 static edge find_taken_edge_cond_expr (basic_block, tree);
172 static edge find_taken_edge_switch_expr (gswitch *, basic_block, tree);
173 static tree find_case_label_for_value (gswitch *, tree);
174 static void lower_phi_internal_fn ();
175
176 void
177 init_empty_tree_cfg_for_function (struct function *fn)
178 {
179 /* Initialize the basic block array. */
180 init_flow (fn);
181 profile_status_for_fn (fn) = PROFILE_ABSENT;
182 n_basic_blocks_for_fn (fn) = NUM_FIXED_BLOCKS;
183 last_basic_block_for_fn (fn) = NUM_FIXED_BLOCKS;
184 vec_alloc (basic_block_info_for_fn (fn), initial_cfg_capacity);
185 vec_safe_grow_cleared (basic_block_info_for_fn (fn),
186 initial_cfg_capacity);
187
188 /* Build a mapping of labels to their associated blocks. */
189 vec_alloc (label_to_block_map_for_fn (fn), initial_cfg_capacity);
190 vec_safe_grow_cleared (label_to_block_map_for_fn (fn),
191 initial_cfg_capacity);
192
193 SET_BASIC_BLOCK_FOR_FN (fn, ENTRY_BLOCK, ENTRY_BLOCK_PTR_FOR_FN (fn));
194 SET_BASIC_BLOCK_FOR_FN (fn, EXIT_BLOCK, EXIT_BLOCK_PTR_FOR_FN (fn));
195
196 ENTRY_BLOCK_PTR_FOR_FN (fn)->next_bb
197 = EXIT_BLOCK_PTR_FOR_FN (fn);
198 EXIT_BLOCK_PTR_FOR_FN (fn)->prev_bb
199 = ENTRY_BLOCK_PTR_FOR_FN (fn);
200 }
201
202 void
203 init_empty_tree_cfg (void)
204 {
205 init_empty_tree_cfg_for_function (cfun);
206 }
207
208 /*---------------------------------------------------------------------------
209 Create basic blocks
210 ---------------------------------------------------------------------------*/
211
212 /* Entry point to the CFG builder for trees. SEQ is the sequence of
213 statements to be added to the flowgraph. */
214
215 static void
216 build_gimple_cfg (gimple_seq seq)
217 {
218 /* Register specific gimple functions. */
219 gimple_register_cfg_hooks ();
220
221 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
222
223 init_empty_tree_cfg ();
224
225 make_blocks (seq);
226
227 /* Make sure there is always at least one block, even if it's empty. */
228 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
229 create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
230
231 /* Adjust the size of the array. */
232 if (basic_block_info_for_fn (cfun)->length ()
233 < (size_t) n_basic_blocks_for_fn (cfun))
234 vec_safe_grow_cleared (basic_block_info_for_fn (cfun),
235 n_basic_blocks_for_fn (cfun));
236
237 /* To speed up statement iterator walks, we first purge dead labels. */
238 cleanup_dead_labels ();
239
240 /* Group case nodes to reduce the number of edges.
241 We do this after cleaning up dead labels because otherwise we miss
242 a lot of obvious case merging opportunities. */
243 group_case_labels ();
244
245 /* Create the edges of the flowgraph. */
246 discriminator_per_locus = new hash_table<locus_discrim_hasher> (13);
247 make_edges ();
248 assign_discriminators ();
249 lower_phi_internal_fn ();
250 cleanup_dead_labels ();
251 delete discriminator_per_locus;
252 discriminator_per_locus = NULL;
253 }
254
255 /* Look for ANNOTATE calls with loop annotation kind in BB; if found, remove
256 them and propagate the information to LOOP. We assume that the annotations
257 come immediately before the condition in BB, if any. */
258
259 static void
260 replace_loop_annotate_in_block (basic_block bb, struct loop *loop)
261 {
262 gimple_stmt_iterator gsi = gsi_last_bb (bb);
263 gimple *stmt = gsi_stmt (gsi);
264
265 if (!(stmt && gimple_code (stmt) == GIMPLE_COND))
266 return;
267
268 for (gsi_prev_nondebug (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
269 {
270 stmt = gsi_stmt (gsi);
271 if (gimple_code (stmt) != GIMPLE_CALL)
272 break;
273 if (!gimple_call_internal_p (stmt)
274 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
275 break;
276
277 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
278 {
279 case annot_expr_ivdep_kind:
280 loop->safelen = INT_MAX;
281 break;
282 case annot_expr_no_vector_kind:
283 loop->dont_vectorize = true;
284 break;
285 case annot_expr_vector_kind:
286 loop->force_vectorize = true;
287 cfun->has_force_vectorize_loops = true;
288 break;
289 default:
290 gcc_unreachable ();
291 }
292
293 stmt = gimple_build_assign (gimple_call_lhs (stmt),
294 gimple_call_arg (stmt, 0));
295 gsi_replace (&gsi, stmt, true);
296 }
297 }
298
299 /* Look for ANNOTATE calls with loop annotation kind; if found, remove
300 them and propagate the information to the loop. We assume that the
301 annotations come immediately before the condition of the loop. */
302
303 static void
304 replace_loop_annotate (void)
305 {
306 struct loop *loop;
307 basic_block bb;
308 gimple_stmt_iterator gsi;
309 gimple *stmt;
310
311 FOR_EACH_LOOP (loop, 0)
312 {
313 /* First look into the header. */
314 replace_loop_annotate_in_block (loop->header, loop);
315
316 /* Then look into the latch, if any. */
317 if (loop->latch)
318 replace_loop_annotate_in_block (loop->latch, loop);
319 }
320
321 /* Remove IFN_ANNOTATE. Safeguard for the case loop->latch == NULL. */
322 FOR_EACH_BB_FN (bb, cfun)
323 {
324 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
325 {
326 stmt = gsi_stmt (gsi);
327 if (gimple_code (stmt) != GIMPLE_CALL)
328 continue;
329 if (!gimple_call_internal_p (stmt)
330 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
331 continue;
332
333 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
334 {
335 case annot_expr_ivdep_kind:
336 case annot_expr_no_vector_kind:
337 case annot_expr_vector_kind:
338 break;
339 default:
340 gcc_unreachable ();
341 }
342
343 warning_at (gimple_location (stmt), 0, "ignoring loop annotation");
344 stmt = gimple_build_assign (gimple_call_lhs (stmt),
345 gimple_call_arg (stmt, 0));
346 gsi_replace (&gsi, stmt, true);
347 }
348 }
349 }
350
351 /* Lower internal PHI function from GIMPLE FE. */
352
353 static void
354 lower_phi_internal_fn ()
355 {
356 basic_block bb, pred = NULL;
357 gimple_stmt_iterator gsi;
358 tree lhs;
359 gphi *phi_node;
360 gimple *stmt;
361
362 /* After edge creation, handle __PHI function from GIMPLE FE. */
363 FOR_EACH_BB_FN (bb, cfun)
364 {
365 for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi);)
366 {
367 stmt = gsi_stmt (gsi);
368 if (! gimple_call_internal_p (stmt, IFN_PHI))
369 break;
370
371 lhs = gimple_call_lhs (stmt);
372 phi_node = create_phi_node (lhs, bb);
373
374 /* Add arguments to the PHI node. */
375 for (unsigned i = 0; i < gimple_call_num_args (stmt); ++i)
376 {
377 tree arg = gimple_call_arg (stmt, i);
378 if (TREE_CODE (arg) == LABEL_DECL)
379 pred = label_to_block (arg);
380 else
381 {
382 edge e = find_edge (pred, bb);
383 add_phi_arg (phi_node, arg, e, UNKNOWN_LOCATION);
384 }
385 }
386
387 gsi_remove (&gsi, true);
388 }
389 }
390 }
391
392 static unsigned int
393 execute_build_cfg (void)
394 {
395 gimple_seq body = gimple_body (current_function_decl);
396
397 build_gimple_cfg (body);
398 gimple_set_body (current_function_decl, NULL);
399 if (dump_file && (dump_flags & TDF_DETAILS))
400 {
401 fprintf (dump_file, "Scope blocks:\n");
402 dump_scope_blocks (dump_file, dump_flags);
403 }
404 cleanup_tree_cfg ();
405 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
406 replace_loop_annotate ();
407 return 0;
408 }
409
410 namespace {
411
412 const pass_data pass_data_build_cfg =
413 {
414 GIMPLE_PASS, /* type */
415 "cfg", /* name */
416 OPTGROUP_NONE, /* optinfo_flags */
417 TV_TREE_CFG, /* tv_id */
418 PROP_gimple_leh, /* properties_required */
419 ( PROP_cfg | PROP_loops ), /* properties_provided */
420 0, /* properties_destroyed */
421 0, /* todo_flags_start */
422 0, /* todo_flags_finish */
423 };
424
425 class pass_build_cfg : public gimple_opt_pass
426 {
427 public:
428 pass_build_cfg (gcc::context *ctxt)
429 : gimple_opt_pass (pass_data_build_cfg, ctxt)
430 {}
431
432 /* opt_pass methods: */
433 virtual unsigned int execute (function *) { return execute_build_cfg (); }
434
435 }; // class pass_build_cfg
436
437 } // anon namespace
438
439 gimple_opt_pass *
440 make_pass_build_cfg (gcc::context *ctxt)
441 {
442 return new pass_build_cfg (ctxt);
443 }
444
445
446 /* Return true if T is a computed goto. */
447
448 bool
449 computed_goto_p (gimple *t)
450 {
451 return (gimple_code (t) == GIMPLE_GOTO
452 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
453 }
454
455 /* Returns true if the sequence of statements STMTS only contains
456 a call to __builtin_unreachable (). */
457
458 bool
459 gimple_seq_unreachable_p (gimple_seq stmts)
460 {
461 if (stmts == NULL)
462 return false;
463
464 gimple_stmt_iterator gsi = gsi_last (stmts);
465
466 if (!gimple_call_builtin_p (gsi_stmt (gsi), BUILT_IN_UNREACHABLE))
467 return false;
468
469 for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
470 {
471 gimple *stmt = gsi_stmt (gsi);
472 if (gimple_code (stmt) != GIMPLE_LABEL
473 && !is_gimple_debug (stmt)
474 && !gimple_clobber_p (stmt))
475 return false;
476 }
477 return true;
478 }
479
480 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
481 the other edge points to a bb with just __builtin_unreachable ().
482 I.e. return true for C->M edge in:
483 <bb C>:
484 ...
485 if (something)
486 goto <bb N>;
487 else
488 goto <bb M>;
489 <bb N>:
490 __builtin_unreachable ();
491 <bb M>: */
492
493 bool
494 assert_unreachable_fallthru_edge_p (edge e)
495 {
496 basic_block pred_bb = e->src;
497 gimple *last = last_stmt (pred_bb);
498 if (last && gimple_code (last) == GIMPLE_COND)
499 {
500 basic_block other_bb = EDGE_SUCC (pred_bb, 0)->dest;
501 if (other_bb == e->dest)
502 other_bb = EDGE_SUCC (pred_bb, 1)->dest;
503 if (EDGE_COUNT (other_bb->succs) == 0)
504 return gimple_seq_unreachable_p (bb_seq (other_bb));
505 }
506 return false;
507 }
508
509
510 /* Initialize GF_CALL_CTRL_ALTERING flag, which indicates the call
511 could alter control flow except via eh. We initialize the flag at
512 CFG build time and only ever clear it later. */
513
514 static void
515 gimple_call_initialize_ctrl_altering (gimple *stmt)
516 {
517 int flags = gimple_call_flags (stmt);
518
519 /* A call alters control flow if it can make an abnormal goto. */
520 if (call_can_make_abnormal_goto (stmt)
521 /* A call also alters control flow if it does not return. */
522 || flags & ECF_NORETURN
523 /* TM ending statements have backedges out of the transaction.
524 Return true so we split the basic block containing them.
525 Note that the TM_BUILTIN test is merely an optimization. */
526 || ((flags & ECF_TM_BUILTIN)
527 && is_tm_ending_fndecl (gimple_call_fndecl (stmt)))
528 /* BUILT_IN_RETURN call is same as return statement. */
529 || gimple_call_builtin_p (stmt, BUILT_IN_RETURN)
530 /* IFN_UNIQUE should be the last insn, to make checking for it
531 as cheap as possible. */
532 || (gimple_call_internal_p (stmt)
533 && gimple_call_internal_unique_p (stmt)))
534 gimple_call_set_ctrl_altering (stmt, true);
535 else
536 gimple_call_set_ctrl_altering (stmt, false);
537 }
538
539
540 /* Insert SEQ after BB and build a flowgraph. */
541
542 static basic_block
543 make_blocks_1 (gimple_seq seq, basic_block bb)
544 {
545 gimple_stmt_iterator i = gsi_start (seq);
546 gimple *stmt = NULL;
547 bool start_new_block = true;
548 bool first_stmt_of_seq = true;
549
550 while (!gsi_end_p (i))
551 {
552 gimple *prev_stmt;
553
554 prev_stmt = stmt;
555 stmt = gsi_stmt (i);
556
557 if (stmt && is_gimple_call (stmt))
558 gimple_call_initialize_ctrl_altering (stmt);
559
560 /* If the statement starts a new basic block or if we have determined
561 in a previous pass that we need to create a new block for STMT, do
562 so now. */
563 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
564 {
565 if (!first_stmt_of_seq)
566 gsi_split_seq_before (&i, &seq);
567 bb = create_basic_block (seq, bb);
568 start_new_block = false;
569 }
570
571 /* Now add STMT to BB and create the subgraphs for special statement
572 codes. */
573 gimple_set_bb (stmt, bb);
574
575 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
576 next iteration. */
577 if (stmt_ends_bb_p (stmt))
578 {
579 /* If the stmt can make abnormal goto use a new temporary
580 for the assignment to the LHS. This makes sure the old value
581 of the LHS is available on the abnormal edge. Otherwise
582 we will end up with overlapping life-ranges for abnormal
583 SSA names. */
584 if (gimple_has_lhs (stmt)
585 && stmt_can_make_abnormal_goto (stmt)
586 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
587 {
588 tree lhs = gimple_get_lhs (stmt);
589 tree tmp = create_tmp_var (TREE_TYPE (lhs));
590 gimple *s = gimple_build_assign (lhs, tmp);
591 gimple_set_location (s, gimple_location (stmt));
592 gimple_set_block (s, gimple_block (stmt));
593 gimple_set_lhs (stmt, tmp);
594 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
595 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
596 DECL_GIMPLE_REG_P (tmp) = 1;
597 gsi_insert_after (&i, s, GSI_SAME_STMT);
598 }
599 start_new_block = true;
600 }
601
602 gsi_next (&i);
603 first_stmt_of_seq = false;
604 }
605 return bb;
606 }
607
608 /* Build a flowgraph for the sequence of stmts SEQ. */
609
610 static void
611 make_blocks (gimple_seq seq)
612 {
613 make_blocks_1 (seq, ENTRY_BLOCK_PTR_FOR_FN (cfun));
614 }
615
616 /* Create and return a new empty basic block after bb AFTER. */
617
618 static basic_block
619 create_bb (void *h, void *e, basic_block after)
620 {
621 basic_block bb;
622
623 gcc_assert (!e);
624
625 /* Create and initialize a new basic block. Since alloc_block uses
626 GC allocation that clears memory to allocate a basic block, we do
627 not have to clear the newly allocated basic block here. */
628 bb = alloc_block ();
629
630 bb->index = last_basic_block_for_fn (cfun);
631 bb->flags = BB_NEW;
632 set_bb_seq (bb, h ? (gimple_seq) h : NULL);
633
634 /* Add the new block to the linked list of blocks. */
635 link_block (bb, after);
636
637 /* Grow the basic block array if needed. */
638 if ((size_t) last_basic_block_for_fn (cfun)
639 == basic_block_info_for_fn (cfun)->length ())
640 {
641 size_t new_size =
642 (last_basic_block_for_fn (cfun)
643 + (last_basic_block_for_fn (cfun) + 3) / 4);
644 vec_safe_grow_cleared (basic_block_info_for_fn (cfun), new_size);
645 }
646
647 /* Add the newly created block to the array. */
648 SET_BASIC_BLOCK_FOR_FN (cfun, last_basic_block_for_fn (cfun), bb);
649
650 n_basic_blocks_for_fn (cfun)++;
651 last_basic_block_for_fn (cfun)++;
652
653 return bb;
654 }
655
656
657 /*---------------------------------------------------------------------------
658 Edge creation
659 ---------------------------------------------------------------------------*/
660
661 /* If basic block BB has an abnormal edge to a basic block
662 containing IFN_ABNORMAL_DISPATCHER internal call, return
663 that the dispatcher's basic block, otherwise return NULL. */
664
665 basic_block
666 get_abnormal_succ_dispatcher (basic_block bb)
667 {
668 edge e;
669 edge_iterator ei;
670
671 FOR_EACH_EDGE (e, ei, bb->succs)
672 if ((e->flags & (EDGE_ABNORMAL | EDGE_EH)) == EDGE_ABNORMAL)
673 {
674 gimple_stmt_iterator gsi
675 = gsi_start_nondebug_after_labels_bb (e->dest);
676 gimple *g = gsi_stmt (gsi);
677 if (g && gimple_call_internal_p (g, IFN_ABNORMAL_DISPATCHER))
678 return e->dest;
679 }
680 return NULL;
681 }
682
683 /* Helper function for make_edges. Create a basic block with
684 with ABNORMAL_DISPATCHER internal call in it if needed, and
685 create abnormal edges from BBS to it and from it to FOR_BB
686 if COMPUTED_GOTO is false, otherwise factor the computed gotos. */
687
688 static void
689 handle_abnormal_edges (basic_block *dispatcher_bbs,
690 basic_block for_bb, int *bb_to_omp_idx,
691 auto_vec<basic_block> *bbs, bool computed_goto)
692 {
693 basic_block *dispatcher = dispatcher_bbs + (computed_goto ? 1 : 0);
694 unsigned int idx = 0;
695 basic_block bb;
696 bool inner = false;
697
698 if (bb_to_omp_idx)
699 {
700 dispatcher = dispatcher_bbs + 2 * bb_to_omp_idx[for_bb->index];
701 if (bb_to_omp_idx[for_bb->index] != 0)
702 inner = true;
703 }
704
705 /* If the dispatcher has been created already, then there are basic
706 blocks with abnormal edges to it, so just make a new edge to
707 for_bb. */
708 if (*dispatcher == NULL)
709 {
710 /* Check if there are any basic blocks that need to have
711 abnormal edges to this dispatcher. If there are none, return
712 early. */
713 if (bb_to_omp_idx == NULL)
714 {
715 if (bbs->is_empty ())
716 return;
717 }
718 else
719 {
720 FOR_EACH_VEC_ELT (*bbs, idx, bb)
721 if (bb_to_omp_idx[bb->index] == bb_to_omp_idx[for_bb->index])
722 break;
723 if (bb == NULL)
724 return;
725 }
726
727 /* Create the dispatcher bb. */
728 *dispatcher = create_basic_block (NULL, for_bb);
729 if (computed_goto)
730 {
731 /* Factor computed gotos into a common computed goto site. Also
732 record the location of that site so that we can un-factor the
733 gotos after we have converted back to normal form. */
734 gimple_stmt_iterator gsi = gsi_start_bb (*dispatcher);
735
736 /* Create the destination of the factored goto. Each original
737 computed goto will put its desired destination into this
738 variable and jump to the label we create immediately below. */
739 tree var = create_tmp_var (ptr_type_node, "gotovar");
740
741 /* Build a label for the new block which will contain the
742 factored computed goto. */
743 tree factored_label_decl
744 = create_artificial_label (UNKNOWN_LOCATION);
745 gimple *factored_computed_goto_label
746 = gimple_build_label (factored_label_decl);
747 gsi_insert_after (&gsi, factored_computed_goto_label, GSI_NEW_STMT);
748
749 /* Build our new computed goto. */
750 gimple *factored_computed_goto = gimple_build_goto (var);
751 gsi_insert_after (&gsi, factored_computed_goto, GSI_NEW_STMT);
752
753 FOR_EACH_VEC_ELT (*bbs, idx, bb)
754 {
755 if (bb_to_omp_idx
756 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
757 continue;
758
759 gsi = gsi_last_bb (bb);
760 gimple *last = gsi_stmt (gsi);
761
762 gcc_assert (computed_goto_p (last));
763
764 /* Copy the original computed goto's destination into VAR. */
765 gimple *assignment
766 = gimple_build_assign (var, gimple_goto_dest (last));
767 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
768
769 edge e = make_edge (bb, *dispatcher, EDGE_FALLTHRU);
770 e->goto_locus = gimple_location (last);
771 gsi_remove (&gsi, true);
772 }
773 }
774 else
775 {
776 tree arg = inner ? boolean_true_node : boolean_false_node;
777 gimple *g = gimple_build_call_internal (IFN_ABNORMAL_DISPATCHER,
778 1, arg);
779 gimple_stmt_iterator gsi = gsi_after_labels (*dispatcher);
780 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
781
782 /* Create predecessor edges of the dispatcher. */
783 FOR_EACH_VEC_ELT (*bbs, idx, bb)
784 {
785 if (bb_to_omp_idx
786 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
787 continue;
788 make_edge (bb, *dispatcher, EDGE_ABNORMAL);
789 }
790 }
791 }
792
793 make_edge (*dispatcher, for_bb, EDGE_ABNORMAL);
794 }
795
796 /* Creates outgoing edges for BB. Returns 1 when it ends with an
797 computed goto, returns 2 when it ends with a statement that
798 might return to this function via an nonlocal goto, otherwise
799 return 0. Updates *PCUR_REGION with the OMP region this BB is in. */
800
801 static int
802 make_edges_bb (basic_block bb, struct omp_region **pcur_region, int *pomp_index)
803 {
804 gimple *last = last_stmt (bb);
805 bool fallthru = false;
806 int ret = 0;
807
808 if (!last)
809 return ret;
810
811 switch (gimple_code (last))
812 {
813 case GIMPLE_GOTO:
814 if (make_goto_expr_edges (bb))
815 ret = 1;
816 fallthru = false;
817 break;
818 case GIMPLE_RETURN:
819 {
820 edge e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
821 e->goto_locus = gimple_location (last);
822 fallthru = false;
823 }
824 break;
825 case GIMPLE_COND:
826 make_cond_expr_edges (bb);
827 fallthru = false;
828 break;
829 case GIMPLE_SWITCH:
830 make_gimple_switch_edges (as_a <gswitch *> (last), bb);
831 fallthru = false;
832 break;
833 case GIMPLE_RESX:
834 make_eh_edges (last);
835 fallthru = false;
836 break;
837 case GIMPLE_EH_DISPATCH:
838 fallthru = make_eh_dispatch_edges (as_a <geh_dispatch *> (last));
839 break;
840
841 case GIMPLE_CALL:
842 /* If this function receives a nonlocal goto, then we need to
843 make edges from this call site to all the nonlocal goto
844 handlers. */
845 if (stmt_can_make_abnormal_goto (last))
846 ret = 2;
847
848 /* If this statement has reachable exception handlers, then
849 create abnormal edges to them. */
850 make_eh_edges (last);
851
852 /* BUILTIN_RETURN is really a return statement. */
853 if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
854 {
855 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
856 fallthru = false;
857 }
858 /* Some calls are known not to return. */
859 else
860 fallthru = !gimple_call_noreturn_p (last);
861 break;
862
863 case GIMPLE_ASSIGN:
864 /* A GIMPLE_ASSIGN may throw internally and thus be considered
865 control-altering. */
866 if (is_ctrl_altering_stmt (last))
867 make_eh_edges (last);
868 fallthru = true;
869 break;
870
871 case GIMPLE_ASM:
872 make_gimple_asm_edges (bb);
873 fallthru = true;
874 break;
875
876 CASE_GIMPLE_OMP:
877 fallthru = omp_make_gimple_edges (bb, pcur_region, pomp_index);
878 break;
879
880 case GIMPLE_TRANSACTION:
881 {
882 gtransaction *txn = as_a <gtransaction *> (last);
883 tree label1 = gimple_transaction_label_norm (txn);
884 tree label2 = gimple_transaction_label_uninst (txn);
885
886 if (label1)
887 make_edge (bb, label_to_block (label1), EDGE_FALLTHRU);
888 if (label2)
889 make_edge (bb, label_to_block (label2),
890 EDGE_TM_UNINSTRUMENTED | (label1 ? 0 : EDGE_FALLTHRU));
891
892 tree label3 = gimple_transaction_label_over (txn);
893 if (gimple_transaction_subcode (txn)
894 & (GTMA_HAVE_ABORT | GTMA_IS_OUTER))
895 make_edge (bb, label_to_block (label3), EDGE_TM_ABORT);
896
897 fallthru = false;
898 }
899 break;
900
901 default:
902 gcc_assert (!stmt_ends_bb_p (last));
903 fallthru = true;
904 break;
905 }
906
907 if (fallthru)
908 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
909
910 return ret;
911 }
912
913 /* Join all the blocks in the flowgraph. */
914
915 static void
916 make_edges (void)
917 {
918 basic_block bb;
919 struct omp_region *cur_region = NULL;
920 auto_vec<basic_block> ab_edge_goto;
921 auto_vec<basic_block> ab_edge_call;
922 int *bb_to_omp_idx = NULL;
923 int cur_omp_region_idx = 0;
924
925 /* Create an edge from entry to the first block with executable
926 statements in it. */
927 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun),
928 BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS),
929 EDGE_FALLTHRU);
930
931 /* Traverse the basic block array placing edges. */
932 FOR_EACH_BB_FN (bb, cfun)
933 {
934 int mer;
935
936 if (bb_to_omp_idx)
937 bb_to_omp_idx[bb->index] = cur_omp_region_idx;
938
939 mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
940 if (mer == 1)
941 ab_edge_goto.safe_push (bb);
942 else if (mer == 2)
943 ab_edge_call.safe_push (bb);
944
945 if (cur_region && bb_to_omp_idx == NULL)
946 bb_to_omp_idx = XCNEWVEC (int, n_basic_blocks_for_fn (cfun));
947 }
948
949 /* Computed gotos are hell to deal with, especially if there are
950 lots of them with a large number of destinations. So we factor
951 them to a common computed goto location before we build the
952 edge list. After we convert back to normal form, we will un-factor
953 the computed gotos since factoring introduces an unwanted jump.
954 For non-local gotos and abnormal edges from calls to calls that return
955 twice or forced labels, factor the abnormal edges too, by having all
956 abnormal edges from the calls go to a common artificial basic block
957 with ABNORMAL_DISPATCHER internal call and abnormal edges from that
958 basic block to all forced labels and calls returning twice.
959 We do this per-OpenMP structured block, because those regions
960 are guaranteed to be single entry single exit by the standard,
961 so it is not allowed to enter or exit such regions abnormally this way,
962 thus all computed gotos, non-local gotos and setjmp/longjmp calls
963 must not transfer control across SESE region boundaries. */
964 if (!ab_edge_goto.is_empty () || !ab_edge_call.is_empty ())
965 {
966 gimple_stmt_iterator gsi;
967 basic_block dispatcher_bb_array[2] = { NULL, NULL };
968 basic_block *dispatcher_bbs = dispatcher_bb_array;
969 int count = n_basic_blocks_for_fn (cfun);
970
971 if (bb_to_omp_idx)
972 dispatcher_bbs = XCNEWVEC (basic_block, 2 * count);
973
974 FOR_EACH_BB_FN (bb, cfun)
975 {
976 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
977 {
978 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
979 tree target;
980
981 if (!label_stmt)
982 break;
983
984 target = gimple_label_label (label_stmt);
985
986 /* Make an edge to every label block that has been marked as a
987 potential target for a computed goto or a non-local goto. */
988 if (FORCED_LABEL (target))
989 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
990 &ab_edge_goto, true);
991 if (DECL_NONLOCAL (target))
992 {
993 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
994 &ab_edge_call, false);
995 break;
996 }
997 }
998
999 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
1000 gsi_next_nondebug (&gsi);
1001 if (!gsi_end_p (gsi))
1002 {
1003 /* Make an edge to every setjmp-like call. */
1004 gimple *call_stmt = gsi_stmt (gsi);
1005 if (is_gimple_call (call_stmt)
1006 && ((gimple_call_flags (call_stmt) & ECF_RETURNS_TWICE)
1007 || gimple_call_builtin_p (call_stmt,
1008 BUILT_IN_SETJMP_RECEIVER)))
1009 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
1010 &ab_edge_call, false);
1011 }
1012 }
1013
1014 if (bb_to_omp_idx)
1015 XDELETE (dispatcher_bbs);
1016 }
1017
1018 XDELETE (bb_to_omp_idx);
1019
1020 omp_free_regions ();
1021 }
1022
1023 /* Add SEQ after GSI. Start new bb after GSI, and created further bbs as
1024 needed. Returns true if new bbs were created.
1025 Note: This is transitional code, and should not be used for new code. We
1026 should be able to get rid of this by rewriting all target va-arg
1027 gimplification hooks to use an interface gimple_build_cond_value as described
1028 in https://gcc.gnu.org/ml/gcc-patches/2015-02/msg01194.html. */
1029
1030 bool
1031 gimple_find_sub_bbs (gimple_seq seq, gimple_stmt_iterator *gsi)
1032 {
1033 gimple *stmt = gsi_stmt (*gsi);
1034 basic_block bb = gimple_bb (stmt);
1035 basic_block lastbb, afterbb;
1036 int old_num_bbs = n_basic_blocks_for_fn (cfun);
1037 edge e;
1038 lastbb = make_blocks_1 (seq, bb);
1039 if (old_num_bbs == n_basic_blocks_for_fn (cfun))
1040 return false;
1041 e = split_block (bb, stmt);
1042 /* Move e->dest to come after the new basic blocks. */
1043 afterbb = e->dest;
1044 unlink_block (afterbb);
1045 link_block (afterbb, lastbb);
1046 redirect_edge_succ (e, bb->next_bb);
1047 bb = bb->next_bb;
1048 while (bb != afterbb)
1049 {
1050 struct omp_region *cur_region = NULL;
1051 int cur_omp_region_idx = 0;
1052 int mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
1053 gcc_assert (!mer && !cur_region);
1054 add_bb_to_loop (bb, afterbb->loop_father);
1055 bb = bb->next_bb;
1056 }
1057 return true;
1058 }
1059
1060 /* Find the next available discriminator value for LOCUS. The
1061 discriminator distinguishes among several basic blocks that
1062 share a common locus, allowing for more accurate sample-based
1063 profiling. */
1064
1065 static int
1066 next_discriminator_for_locus (location_t locus)
1067 {
1068 struct locus_discrim_map item;
1069 struct locus_discrim_map **slot;
1070
1071 item.locus = locus;
1072 item.discriminator = 0;
1073 slot = discriminator_per_locus->find_slot_with_hash (
1074 &item, LOCATION_LINE (locus), INSERT);
1075 gcc_assert (slot);
1076 if (*slot == HTAB_EMPTY_ENTRY)
1077 {
1078 *slot = XNEW (struct locus_discrim_map);
1079 gcc_assert (*slot);
1080 (*slot)->locus = locus;
1081 (*slot)->discriminator = 0;
1082 }
1083 (*slot)->discriminator++;
1084 return (*slot)->discriminator;
1085 }
1086
1087 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
1088
1089 static bool
1090 same_line_p (location_t locus1, location_t locus2)
1091 {
1092 expanded_location from, to;
1093
1094 if (locus1 == locus2)
1095 return true;
1096
1097 from = expand_location (locus1);
1098 to = expand_location (locus2);
1099
1100 if (from.line != to.line)
1101 return false;
1102 if (from.file == to.file)
1103 return true;
1104 return (from.file != NULL
1105 && to.file != NULL
1106 && filename_cmp (from.file, to.file) == 0);
1107 }
1108
1109 /* Assign discriminators to each basic block. */
1110
1111 static void
1112 assign_discriminators (void)
1113 {
1114 basic_block bb;
1115
1116 FOR_EACH_BB_FN (bb, cfun)
1117 {
1118 edge e;
1119 edge_iterator ei;
1120 gimple *last = last_stmt (bb);
1121 location_t locus = last ? gimple_location (last) : UNKNOWN_LOCATION;
1122
1123 if (locus == UNKNOWN_LOCATION)
1124 continue;
1125
1126 FOR_EACH_EDGE (e, ei, bb->succs)
1127 {
1128 gimple *first = first_non_label_stmt (e->dest);
1129 gimple *last = last_stmt (e->dest);
1130 if ((first && same_line_p (locus, gimple_location (first)))
1131 || (last && same_line_p (locus, gimple_location (last))))
1132 {
1133 if (e->dest->discriminator != 0 && bb->discriminator == 0)
1134 bb->discriminator = next_discriminator_for_locus (locus);
1135 else
1136 e->dest->discriminator = next_discriminator_for_locus (locus);
1137 }
1138 }
1139 }
1140 }
1141
1142 /* Create the edges for a GIMPLE_COND starting at block BB. */
1143
1144 static void
1145 make_cond_expr_edges (basic_block bb)
1146 {
1147 gcond *entry = as_a <gcond *> (last_stmt (bb));
1148 gimple *then_stmt, *else_stmt;
1149 basic_block then_bb, else_bb;
1150 tree then_label, else_label;
1151 edge e;
1152
1153 gcc_assert (entry);
1154 gcc_assert (gimple_code (entry) == GIMPLE_COND);
1155
1156 /* Entry basic blocks for each component. */
1157 then_label = gimple_cond_true_label (entry);
1158 else_label = gimple_cond_false_label (entry);
1159 then_bb = label_to_block (then_label);
1160 else_bb = label_to_block (else_label);
1161 then_stmt = first_stmt (then_bb);
1162 else_stmt = first_stmt (else_bb);
1163
1164 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1165 e->goto_locus = gimple_location (then_stmt);
1166 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1167 if (e)
1168 e->goto_locus = gimple_location (else_stmt);
1169
1170 /* We do not need the labels anymore. */
1171 gimple_cond_set_true_label (entry, NULL_TREE);
1172 gimple_cond_set_false_label (entry, NULL_TREE);
1173 }
1174
1175
1176 /* Called for each element in the hash table (P) as we delete the
1177 edge to cases hash table.
1178
1179 Clear all the CASE_CHAINs to prevent problems with copying of
1180 SWITCH_EXPRs and structure sharing rules, then free the hash table
1181 element. */
1182
1183 bool
1184 edge_to_cases_cleanup (edge const &, tree const &value, void *)
1185 {
1186 tree t, next;
1187
1188 for (t = value; t; t = next)
1189 {
1190 next = CASE_CHAIN (t);
1191 CASE_CHAIN (t) = NULL;
1192 }
1193
1194 return true;
1195 }
1196
1197 /* Start recording information mapping edges to case labels. */
1198
1199 void
1200 start_recording_case_labels (void)
1201 {
1202 gcc_assert (edge_to_cases == NULL);
1203 edge_to_cases = new hash_map<edge, tree>;
1204 touched_switch_bbs = BITMAP_ALLOC (NULL);
1205 }
1206
1207 /* Return nonzero if we are recording information for case labels. */
1208
1209 static bool
1210 recording_case_labels_p (void)
1211 {
1212 return (edge_to_cases != NULL);
1213 }
1214
1215 /* Stop recording information mapping edges to case labels and
1216 remove any information we have recorded. */
1217 void
1218 end_recording_case_labels (void)
1219 {
1220 bitmap_iterator bi;
1221 unsigned i;
1222 edge_to_cases->traverse<void *, edge_to_cases_cleanup> (NULL);
1223 delete edge_to_cases;
1224 edge_to_cases = NULL;
1225 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
1226 {
1227 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
1228 if (bb)
1229 {
1230 gimple *stmt = last_stmt (bb);
1231 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1232 group_case_labels_stmt (as_a <gswitch *> (stmt));
1233 }
1234 }
1235 BITMAP_FREE (touched_switch_bbs);
1236 }
1237
1238 /* If we are inside a {start,end}_recording_cases block, then return
1239 a chain of CASE_LABEL_EXPRs from T which reference E.
1240
1241 Otherwise return NULL. */
1242
1243 static tree
1244 get_cases_for_edge (edge e, gswitch *t)
1245 {
1246 tree *slot;
1247 size_t i, n;
1248
1249 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
1250 chains available. Return NULL so the caller can detect this case. */
1251 if (!recording_case_labels_p ())
1252 return NULL;
1253
1254 slot = edge_to_cases->get (e);
1255 if (slot)
1256 return *slot;
1257
1258 /* If we did not find E in the hash table, then this must be the first
1259 time we have been queried for information about E & T. Add all the
1260 elements from T to the hash table then perform the query again. */
1261
1262 n = gimple_switch_num_labels (t);
1263 for (i = 0; i < n; i++)
1264 {
1265 tree elt = gimple_switch_label (t, i);
1266 tree lab = CASE_LABEL (elt);
1267 basic_block label_bb = label_to_block (lab);
1268 edge this_edge = find_edge (e->src, label_bb);
1269
1270 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
1271 a new chain. */
1272 tree &s = edge_to_cases->get_or_insert (this_edge);
1273 CASE_CHAIN (elt) = s;
1274 s = elt;
1275 }
1276
1277 return *edge_to_cases->get (e);
1278 }
1279
1280 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
1281
1282 static void
1283 make_gimple_switch_edges (gswitch *entry, basic_block bb)
1284 {
1285 size_t i, n;
1286
1287 n = gimple_switch_num_labels (entry);
1288
1289 for (i = 0; i < n; ++i)
1290 {
1291 tree lab = CASE_LABEL (gimple_switch_label (entry, i));
1292 basic_block label_bb = label_to_block (lab);
1293 make_edge (bb, label_bb, 0);
1294 }
1295 }
1296
1297
1298 /* Return the basic block holding label DEST. */
1299
1300 basic_block
1301 label_to_block_fn (struct function *ifun, tree dest)
1302 {
1303 int uid = LABEL_DECL_UID (dest);
1304
1305 /* We would die hard when faced by an undefined label. Emit a label to
1306 the very first basic block. This will hopefully make even the dataflow
1307 and undefined variable warnings quite right. */
1308 if (seen_error () && uid < 0)
1309 {
1310 gimple_stmt_iterator gsi =
1311 gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS));
1312 gimple *stmt;
1313
1314 stmt = gimple_build_label (dest);
1315 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
1316 uid = LABEL_DECL_UID (dest);
1317 }
1318 if (vec_safe_length (ifun->cfg->x_label_to_block_map) <= (unsigned int) uid)
1319 return NULL;
1320 return (*ifun->cfg->x_label_to_block_map)[uid];
1321 }
1322
1323 /* Create edges for a goto statement at block BB. Returns true
1324 if abnormal edges should be created. */
1325
1326 static bool
1327 make_goto_expr_edges (basic_block bb)
1328 {
1329 gimple_stmt_iterator last = gsi_last_bb (bb);
1330 gimple *goto_t = gsi_stmt (last);
1331
1332 /* A simple GOTO creates normal edges. */
1333 if (simple_goto_p (goto_t))
1334 {
1335 tree dest = gimple_goto_dest (goto_t);
1336 basic_block label_bb = label_to_block (dest);
1337 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1338 e->goto_locus = gimple_location (goto_t);
1339 gsi_remove (&last, true);
1340 return false;
1341 }
1342
1343 /* A computed GOTO creates abnormal edges. */
1344 return true;
1345 }
1346
1347 /* Create edges for an asm statement with labels at block BB. */
1348
1349 static void
1350 make_gimple_asm_edges (basic_block bb)
1351 {
1352 gasm *stmt = as_a <gasm *> (last_stmt (bb));
1353 int i, n = gimple_asm_nlabels (stmt);
1354
1355 for (i = 0; i < n; ++i)
1356 {
1357 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1358 basic_block label_bb = label_to_block (label);
1359 make_edge (bb, label_bb, 0);
1360 }
1361 }
1362
1363 /*---------------------------------------------------------------------------
1364 Flowgraph analysis
1365 ---------------------------------------------------------------------------*/
1366
1367 /* Cleanup useless labels in basic blocks. This is something we wish
1368 to do early because it allows us to group case labels before creating
1369 the edges for the CFG, and it speeds up block statement iterators in
1370 all passes later on.
1371 We rerun this pass after CFG is created, to get rid of the labels that
1372 are no longer referenced. After then we do not run it any more, since
1373 (almost) no new labels should be created. */
1374
1375 /* A map from basic block index to the leading label of that block. */
1376 static struct label_record
1377 {
1378 /* The label. */
1379 tree label;
1380
1381 /* True if the label is referenced from somewhere. */
1382 bool used;
1383 } *label_for_bb;
1384
1385 /* Given LABEL return the first label in the same basic block. */
1386
1387 static tree
1388 main_block_label (tree label)
1389 {
1390 basic_block bb = label_to_block (label);
1391 tree main_label = label_for_bb[bb->index].label;
1392
1393 /* label_to_block possibly inserted undefined label into the chain. */
1394 if (!main_label)
1395 {
1396 label_for_bb[bb->index].label = label;
1397 main_label = label;
1398 }
1399
1400 label_for_bb[bb->index].used = true;
1401 return main_label;
1402 }
1403
1404 /* Clean up redundant labels within the exception tree. */
1405
1406 static void
1407 cleanup_dead_labels_eh (void)
1408 {
1409 eh_landing_pad lp;
1410 eh_region r;
1411 tree lab;
1412 int i;
1413
1414 if (cfun->eh == NULL)
1415 return;
1416
1417 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1418 if (lp && lp->post_landing_pad)
1419 {
1420 lab = main_block_label (lp->post_landing_pad);
1421 if (lab != lp->post_landing_pad)
1422 {
1423 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1424 EH_LANDING_PAD_NR (lab) = lp->index;
1425 }
1426 }
1427
1428 FOR_ALL_EH_REGION (r)
1429 switch (r->type)
1430 {
1431 case ERT_CLEANUP:
1432 case ERT_MUST_NOT_THROW:
1433 break;
1434
1435 case ERT_TRY:
1436 {
1437 eh_catch c;
1438 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1439 {
1440 lab = c->label;
1441 if (lab)
1442 c->label = main_block_label (lab);
1443 }
1444 }
1445 break;
1446
1447 case ERT_ALLOWED_EXCEPTIONS:
1448 lab = r->u.allowed.label;
1449 if (lab)
1450 r->u.allowed.label = main_block_label (lab);
1451 break;
1452 }
1453 }
1454
1455
1456 /* Cleanup redundant labels. This is a three-step process:
1457 1) Find the leading label for each block.
1458 2) Redirect all references to labels to the leading labels.
1459 3) Cleanup all useless labels. */
1460
1461 void
1462 cleanup_dead_labels (void)
1463 {
1464 basic_block bb;
1465 label_for_bb = XCNEWVEC (struct label_record, last_basic_block_for_fn (cfun));
1466
1467 /* Find a suitable label for each block. We use the first user-defined
1468 label if there is one, or otherwise just the first label we see. */
1469 FOR_EACH_BB_FN (bb, cfun)
1470 {
1471 gimple_stmt_iterator i;
1472
1473 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1474 {
1475 tree label;
1476 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1477
1478 if (!label_stmt)
1479 break;
1480
1481 label = gimple_label_label (label_stmt);
1482
1483 /* If we have not yet seen a label for the current block,
1484 remember this one and see if there are more labels. */
1485 if (!label_for_bb[bb->index].label)
1486 {
1487 label_for_bb[bb->index].label = label;
1488 continue;
1489 }
1490
1491 /* If we did see a label for the current block already, but it
1492 is an artificially created label, replace it if the current
1493 label is a user defined label. */
1494 if (!DECL_ARTIFICIAL (label)
1495 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1496 {
1497 label_for_bb[bb->index].label = label;
1498 break;
1499 }
1500 }
1501 }
1502
1503 /* Now redirect all jumps/branches to the selected label.
1504 First do so for each block ending in a control statement. */
1505 FOR_EACH_BB_FN (bb, cfun)
1506 {
1507 gimple *stmt = last_stmt (bb);
1508 tree label, new_label;
1509
1510 if (!stmt)
1511 continue;
1512
1513 switch (gimple_code (stmt))
1514 {
1515 case GIMPLE_COND:
1516 {
1517 gcond *cond_stmt = as_a <gcond *> (stmt);
1518 label = gimple_cond_true_label (cond_stmt);
1519 if (label)
1520 {
1521 new_label = main_block_label (label);
1522 if (new_label != label)
1523 gimple_cond_set_true_label (cond_stmt, new_label);
1524 }
1525
1526 label = gimple_cond_false_label (cond_stmt);
1527 if (label)
1528 {
1529 new_label = main_block_label (label);
1530 if (new_label != label)
1531 gimple_cond_set_false_label (cond_stmt, new_label);
1532 }
1533 }
1534 break;
1535
1536 case GIMPLE_SWITCH:
1537 {
1538 gswitch *switch_stmt = as_a <gswitch *> (stmt);
1539 size_t i, n = gimple_switch_num_labels (switch_stmt);
1540
1541 /* Replace all destination labels. */
1542 for (i = 0; i < n; ++i)
1543 {
1544 tree case_label = gimple_switch_label (switch_stmt, i);
1545 label = CASE_LABEL (case_label);
1546 new_label = main_block_label (label);
1547 if (new_label != label)
1548 CASE_LABEL (case_label) = new_label;
1549 }
1550 break;
1551 }
1552
1553 case GIMPLE_ASM:
1554 {
1555 gasm *asm_stmt = as_a <gasm *> (stmt);
1556 int i, n = gimple_asm_nlabels (asm_stmt);
1557
1558 for (i = 0; i < n; ++i)
1559 {
1560 tree cons = gimple_asm_label_op (asm_stmt, i);
1561 tree label = main_block_label (TREE_VALUE (cons));
1562 TREE_VALUE (cons) = label;
1563 }
1564 break;
1565 }
1566
1567 /* We have to handle gotos until they're removed, and we don't
1568 remove them until after we've created the CFG edges. */
1569 case GIMPLE_GOTO:
1570 if (!computed_goto_p (stmt))
1571 {
1572 ggoto *goto_stmt = as_a <ggoto *> (stmt);
1573 label = gimple_goto_dest (goto_stmt);
1574 new_label = main_block_label (label);
1575 if (new_label != label)
1576 gimple_goto_set_dest (goto_stmt, new_label);
1577 }
1578 break;
1579
1580 case GIMPLE_TRANSACTION:
1581 {
1582 gtransaction *txn = as_a <gtransaction *> (stmt);
1583
1584 label = gimple_transaction_label_norm (txn);
1585 if (label)
1586 {
1587 new_label = main_block_label (label);
1588 if (new_label != label)
1589 gimple_transaction_set_label_norm (txn, new_label);
1590 }
1591
1592 label = gimple_transaction_label_uninst (txn);
1593 if (label)
1594 {
1595 new_label = main_block_label (label);
1596 if (new_label != label)
1597 gimple_transaction_set_label_uninst (txn, new_label);
1598 }
1599
1600 label = gimple_transaction_label_over (txn);
1601 if (label)
1602 {
1603 new_label = main_block_label (label);
1604 if (new_label != label)
1605 gimple_transaction_set_label_over (txn, new_label);
1606 }
1607 }
1608 break;
1609
1610 default:
1611 break;
1612 }
1613 }
1614
1615 /* Do the same for the exception region tree labels. */
1616 cleanup_dead_labels_eh ();
1617
1618 /* Finally, purge dead labels. All user-defined labels and labels that
1619 can be the target of non-local gotos and labels which have their
1620 address taken are preserved. */
1621 FOR_EACH_BB_FN (bb, cfun)
1622 {
1623 gimple_stmt_iterator i;
1624 tree label_for_this_bb = label_for_bb[bb->index].label;
1625
1626 if (!label_for_this_bb)
1627 continue;
1628
1629 /* If the main label of the block is unused, we may still remove it. */
1630 if (!label_for_bb[bb->index].used)
1631 label_for_this_bb = NULL;
1632
1633 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1634 {
1635 tree label;
1636 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1637
1638 if (!label_stmt)
1639 break;
1640
1641 label = gimple_label_label (label_stmt);
1642
1643 if (label == label_for_this_bb
1644 || !DECL_ARTIFICIAL (label)
1645 || DECL_NONLOCAL (label)
1646 || FORCED_LABEL (label))
1647 gsi_next (&i);
1648 else
1649 gsi_remove (&i, true);
1650 }
1651 }
1652
1653 free (label_for_bb);
1654 }
1655
1656 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1657 the ones jumping to the same label.
1658 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1659
1660 void
1661 group_case_labels_stmt (gswitch *stmt)
1662 {
1663 int old_size = gimple_switch_num_labels (stmt);
1664 int i, j, base_index, new_size = old_size;
1665 basic_block default_bb = NULL;
1666
1667 default_bb = label_to_block (CASE_LABEL (gimple_switch_default_label (stmt)));
1668
1669 /* Look for possible opportunities to merge cases. */
1670 i = 1;
1671 while (i < old_size)
1672 {
1673 tree base_case, base_high;
1674 basic_block base_bb;
1675
1676 base_case = gimple_switch_label (stmt, i);
1677
1678 gcc_assert (base_case);
1679 base_bb = label_to_block (CASE_LABEL (base_case));
1680
1681 /* Discard cases that have the same destination as the default case. */
1682 if (base_bb == default_bb)
1683 {
1684 gimple_switch_set_label (stmt, i, NULL_TREE);
1685 i++;
1686 new_size--;
1687 continue;
1688 }
1689
1690 base_high = CASE_HIGH (base_case)
1691 ? CASE_HIGH (base_case)
1692 : CASE_LOW (base_case);
1693 base_index = i++;
1694
1695 /* Try to merge case labels. Break out when we reach the end
1696 of the label vector or when we cannot merge the next case
1697 label with the current one. */
1698 while (i < old_size)
1699 {
1700 tree merge_case = gimple_switch_label (stmt, i);
1701 basic_block merge_bb = label_to_block (CASE_LABEL (merge_case));
1702 wide_int bhp1 = wi::add (base_high, 1);
1703
1704 /* Merge the cases if they jump to the same place,
1705 and their ranges are consecutive. */
1706 if (merge_bb == base_bb
1707 && wi::eq_p (CASE_LOW (merge_case), bhp1))
1708 {
1709 base_high = CASE_HIGH (merge_case) ?
1710 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1711 CASE_HIGH (base_case) = base_high;
1712 gimple_switch_set_label (stmt, i, NULL_TREE);
1713 new_size--;
1714 i++;
1715 }
1716 else
1717 break;
1718 }
1719
1720 /* Discard cases that have an unreachable destination block. */
1721 if (EDGE_COUNT (base_bb->succs) == 0
1722 && gimple_seq_unreachable_p (bb_seq (base_bb)))
1723 {
1724 edge base_edge = find_edge (gimple_bb (stmt), base_bb);
1725 if (base_edge != NULL)
1726 remove_edge_and_dominated_blocks (base_edge);
1727 gimple_switch_set_label (stmt, base_index, NULL_TREE);
1728 new_size--;
1729 }
1730 }
1731
1732 /* Compress the case labels in the label vector, and adjust the
1733 length of the vector. */
1734 for (i = 0, j = 0; i < new_size; i++)
1735 {
1736 while (! gimple_switch_label (stmt, j))
1737 j++;
1738 gimple_switch_set_label (stmt, i,
1739 gimple_switch_label (stmt, j++));
1740 }
1741
1742 gcc_assert (new_size <= old_size);
1743 gimple_switch_set_num_labels (stmt, new_size);
1744 }
1745
1746 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1747 and scan the sorted vector of cases. Combine the ones jumping to the
1748 same label. */
1749
1750 void
1751 group_case_labels (void)
1752 {
1753 basic_block bb;
1754
1755 FOR_EACH_BB_FN (bb, cfun)
1756 {
1757 gimple *stmt = last_stmt (bb);
1758 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1759 group_case_labels_stmt (as_a <gswitch *> (stmt));
1760 }
1761 }
1762
1763 /* Checks whether we can merge block B into block A. */
1764
1765 static bool
1766 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1767 {
1768 gimple *stmt;
1769
1770 if (!single_succ_p (a))
1771 return false;
1772
1773 if (single_succ_edge (a)->flags & EDGE_COMPLEX)
1774 return false;
1775
1776 if (single_succ (a) != b)
1777 return false;
1778
1779 if (!single_pred_p (b))
1780 return false;
1781
1782 if (a == ENTRY_BLOCK_PTR_FOR_FN (cfun)
1783 || b == EXIT_BLOCK_PTR_FOR_FN (cfun))
1784 return false;
1785
1786 /* If A ends by a statement causing exceptions or something similar, we
1787 cannot merge the blocks. */
1788 stmt = last_stmt (a);
1789 if (stmt && stmt_ends_bb_p (stmt))
1790 return false;
1791
1792 /* Do not allow a block with only a non-local label to be merged. */
1793 if (stmt)
1794 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
1795 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
1796 return false;
1797
1798 /* Examine the labels at the beginning of B. */
1799 for (gimple_stmt_iterator gsi = gsi_start_bb (b); !gsi_end_p (gsi);
1800 gsi_next (&gsi))
1801 {
1802 tree lab;
1803 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
1804 if (!label_stmt)
1805 break;
1806 lab = gimple_label_label (label_stmt);
1807
1808 /* Do not remove user forced labels or for -O0 any user labels. */
1809 if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
1810 return false;
1811 }
1812
1813 /* Protect simple loop latches. We only want to avoid merging
1814 the latch with the loop header or with a block in another
1815 loop in this case. */
1816 if (current_loops
1817 && b->loop_father->latch == b
1818 && loops_state_satisfies_p (LOOPS_HAVE_SIMPLE_LATCHES)
1819 && (b->loop_father->header == a
1820 || b->loop_father != a->loop_father))
1821 return false;
1822
1823 /* It must be possible to eliminate all phi nodes in B. If ssa form
1824 is not up-to-date and a name-mapping is registered, we cannot eliminate
1825 any phis. Symbols marked for renaming are never a problem though. */
1826 for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);
1827 gsi_next (&gsi))
1828 {
1829 gphi *phi = gsi.phi ();
1830 /* Technically only new names matter. */
1831 if (name_registered_for_update_p (PHI_RESULT (phi)))
1832 return false;
1833 }
1834
1835 /* When not optimizing, don't merge if we'd lose goto_locus. */
1836 if (!optimize
1837 && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
1838 {
1839 location_t goto_locus = single_succ_edge (a)->goto_locus;
1840 gimple_stmt_iterator prev, next;
1841 prev = gsi_last_nondebug_bb (a);
1842 next = gsi_after_labels (b);
1843 if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
1844 gsi_next_nondebug (&next);
1845 if ((gsi_end_p (prev)
1846 || gimple_location (gsi_stmt (prev)) != goto_locus)
1847 && (gsi_end_p (next)
1848 || gimple_location (gsi_stmt (next)) != goto_locus))
1849 return false;
1850 }
1851
1852 return true;
1853 }
1854
1855 /* Replaces all uses of NAME by VAL. */
1856
1857 void
1858 replace_uses_by (tree name, tree val)
1859 {
1860 imm_use_iterator imm_iter;
1861 use_operand_p use;
1862 gimple *stmt;
1863 edge e;
1864
1865 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1866 {
1867 /* Mark the block if we change the last stmt in it. */
1868 if (cfgcleanup_altered_bbs
1869 && stmt_ends_bb_p (stmt))
1870 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1871
1872 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1873 {
1874 replace_exp (use, val);
1875
1876 if (gimple_code (stmt) == GIMPLE_PHI)
1877 {
1878 e = gimple_phi_arg_edge (as_a <gphi *> (stmt),
1879 PHI_ARG_INDEX_FROM_USE (use));
1880 if (e->flags & EDGE_ABNORMAL
1881 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val))
1882 {
1883 /* This can only occur for virtual operands, since
1884 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1885 would prevent replacement. */
1886 gcc_checking_assert (virtual_operand_p (name));
1887 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1888 }
1889 }
1890 }
1891
1892 if (gimple_code (stmt) != GIMPLE_PHI)
1893 {
1894 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1895 gimple *orig_stmt = stmt;
1896 size_t i;
1897
1898 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
1899 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
1900 only change sth from non-invariant to invariant, and only
1901 when propagating constants. */
1902 if (is_gimple_min_invariant (val))
1903 for (i = 0; i < gimple_num_ops (stmt); i++)
1904 {
1905 tree op = gimple_op (stmt, i);
1906 /* Operands may be empty here. For example, the labels
1907 of a GIMPLE_COND are nulled out following the creation
1908 of the corresponding CFG edges. */
1909 if (op && TREE_CODE (op) == ADDR_EXPR)
1910 recompute_tree_invariant_for_addr_expr (op);
1911 }
1912
1913 if (fold_stmt (&gsi))
1914 stmt = gsi_stmt (gsi);
1915
1916 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
1917 gimple_purge_dead_eh_edges (gimple_bb (stmt));
1918
1919 update_stmt (stmt);
1920 }
1921 }
1922
1923 gcc_checking_assert (has_zero_uses (name));
1924
1925 /* Also update the trees stored in loop structures. */
1926 if (current_loops)
1927 {
1928 struct loop *loop;
1929
1930 FOR_EACH_LOOP (loop, 0)
1931 {
1932 substitute_in_loop_info (loop, name, val);
1933 }
1934 }
1935 }
1936
1937 /* Merge block B into block A. */
1938
1939 static void
1940 gimple_merge_blocks (basic_block a, basic_block b)
1941 {
1942 gimple_stmt_iterator last, gsi;
1943 gphi_iterator psi;
1944
1945 if (dump_file)
1946 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1947
1948 /* Remove all single-valued PHI nodes from block B of the form
1949 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1950 gsi = gsi_last_bb (a);
1951 for (psi = gsi_start_phis (b); !gsi_end_p (psi); )
1952 {
1953 gimple *phi = gsi_stmt (psi);
1954 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
1955 gimple *copy;
1956 bool may_replace_uses = (virtual_operand_p (def)
1957 || may_propagate_copy (def, use));
1958
1959 /* In case we maintain loop closed ssa form, do not propagate arguments
1960 of loop exit phi nodes. */
1961 if (current_loops
1962 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
1963 && !virtual_operand_p (def)
1964 && TREE_CODE (use) == SSA_NAME
1965 && a->loop_father != b->loop_father)
1966 may_replace_uses = false;
1967
1968 if (!may_replace_uses)
1969 {
1970 gcc_assert (!virtual_operand_p (def));
1971
1972 /* Note that just emitting the copies is fine -- there is no problem
1973 with ordering of phi nodes. This is because A is the single
1974 predecessor of B, therefore results of the phi nodes cannot
1975 appear as arguments of the phi nodes. */
1976 copy = gimple_build_assign (def, use);
1977 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
1978 remove_phi_node (&psi, false);
1979 }
1980 else
1981 {
1982 /* If we deal with a PHI for virtual operands, we can simply
1983 propagate these without fussing with folding or updating
1984 the stmt. */
1985 if (virtual_operand_p (def))
1986 {
1987 imm_use_iterator iter;
1988 use_operand_p use_p;
1989 gimple *stmt;
1990
1991 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
1992 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1993 SET_USE (use_p, use);
1994
1995 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
1996 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
1997 }
1998 else
1999 replace_uses_by (def, use);
2000
2001 remove_phi_node (&psi, true);
2002 }
2003 }
2004
2005 /* Ensure that B follows A. */
2006 move_block_after (b, a);
2007
2008 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
2009 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
2010
2011 /* Remove labels from B and set gimple_bb to A for other statements. */
2012 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
2013 {
2014 gimple *stmt = gsi_stmt (gsi);
2015 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2016 {
2017 tree label = gimple_label_label (label_stmt);
2018 int lp_nr;
2019
2020 gsi_remove (&gsi, false);
2021
2022 /* Now that we can thread computed gotos, we might have
2023 a situation where we have a forced label in block B
2024 However, the label at the start of block B might still be
2025 used in other ways (think about the runtime checking for
2026 Fortran assigned gotos). So we can not just delete the
2027 label. Instead we move the label to the start of block A. */
2028 if (FORCED_LABEL (label))
2029 {
2030 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
2031 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
2032 }
2033 /* Other user labels keep around in a form of a debug stmt. */
2034 else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_STMTS)
2035 {
2036 gimple *dbg = gimple_build_debug_bind (label,
2037 integer_zero_node,
2038 stmt);
2039 gimple_debug_bind_reset_value (dbg);
2040 gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
2041 }
2042
2043 lp_nr = EH_LANDING_PAD_NR (label);
2044 if (lp_nr)
2045 {
2046 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
2047 lp->post_landing_pad = NULL;
2048 }
2049 }
2050 else
2051 {
2052 gimple_set_bb (stmt, a);
2053 gsi_next (&gsi);
2054 }
2055 }
2056
2057 /* When merging two BBs, if their counts are different, the larger count
2058 is selected as the new bb count. This is to handle inconsistent
2059 profiles. */
2060 if (a->loop_father == b->loop_father)
2061 {
2062 a->count = MAX (a->count, b->count);
2063 a->frequency = MAX (a->frequency, b->frequency);
2064 }
2065
2066 /* Merge the sequences. */
2067 last = gsi_last_bb (a);
2068 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
2069 set_bb_seq (b, NULL);
2070
2071 if (cfgcleanup_altered_bbs)
2072 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
2073 }
2074
2075
2076 /* Return the one of two successors of BB that is not reachable by a
2077 complex edge, if there is one. Else, return BB. We use
2078 this in optimizations that use post-dominators for their heuristics,
2079 to catch the cases in C++ where function calls are involved. */
2080
2081 basic_block
2082 single_noncomplex_succ (basic_block bb)
2083 {
2084 edge e0, e1;
2085 if (EDGE_COUNT (bb->succs) != 2)
2086 return bb;
2087
2088 e0 = EDGE_SUCC (bb, 0);
2089 e1 = EDGE_SUCC (bb, 1);
2090 if (e0->flags & EDGE_COMPLEX)
2091 return e1->dest;
2092 if (e1->flags & EDGE_COMPLEX)
2093 return e0->dest;
2094
2095 return bb;
2096 }
2097
2098 /* T is CALL_EXPR. Set current_function_calls_* flags. */
2099
2100 void
2101 notice_special_calls (gcall *call)
2102 {
2103 int flags = gimple_call_flags (call);
2104
2105 if (flags & ECF_MAY_BE_ALLOCA)
2106 cfun->calls_alloca = true;
2107 if (flags & ECF_RETURNS_TWICE)
2108 cfun->calls_setjmp = true;
2109 }
2110
2111
2112 /* Clear flags set by notice_special_calls. Used by dead code removal
2113 to update the flags. */
2114
2115 void
2116 clear_special_calls (void)
2117 {
2118 cfun->calls_alloca = false;
2119 cfun->calls_setjmp = false;
2120 }
2121
2122 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
2123
2124 static void
2125 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
2126 {
2127 /* Since this block is no longer reachable, we can just delete all
2128 of its PHI nodes. */
2129 remove_phi_nodes (bb);
2130
2131 /* Remove edges to BB's successors. */
2132 while (EDGE_COUNT (bb->succs) > 0)
2133 remove_edge (EDGE_SUCC (bb, 0));
2134 }
2135
2136
2137 /* Remove statements of basic block BB. */
2138
2139 static void
2140 remove_bb (basic_block bb)
2141 {
2142 gimple_stmt_iterator i;
2143
2144 if (dump_file)
2145 {
2146 fprintf (dump_file, "Removing basic block %d\n", bb->index);
2147 if (dump_flags & TDF_DETAILS)
2148 {
2149 dump_bb (dump_file, bb, 0, TDF_BLOCKS);
2150 fprintf (dump_file, "\n");
2151 }
2152 }
2153
2154 if (current_loops)
2155 {
2156 struct loop *loop = bb->loop_father;
2157
2158 /* If a loop gets removed, clean up the information associated
2159 with it. */
2160 if (loop->latch == bb
2161 || loop->header == bb)
2162 free_numbers_of_iterations_estimates_loop (loop);
2163 }
2164
2165 /* Remove all the instructions in the block. */
2166 if (bb_seq (bb) != NULL)
2167 {
2168 /* Walk backwards so as to get a chance to substitute all
2169 released DEFs into debug stmts. See
2170 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
2171 details. */
2172 for (i = gsi_last_bb (bb); !gsi_end_p (i);)
2173 {
2174 gimple *stmt = gsi_stmt (i);
2175 glabel *label_stmt = dyn_cast <glabel *> (stmt);
2176 if (label_stmt
2177 && (FORCED_LABEL (gimple_label_label (label_stmt))
2178 || DECL_NONLOCAL (gimple_label_label (label_stmt))))
2179 {
2180 basic_block new_bb;
2181 gimple_stmt_iterator new_gsi;
2182
2183 /* A non-reachable non-local label may still be referenced.
2184 But it no longer needs to carry the extra semantics of
2185 non-locality. */
2186 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
2187 {
2188 DECL_NONLOCAL (gimple_label_label (label_stmt)) = 0;
2189 FORCED_LABEL (gimple_label_label (label_stmt)) = 1;
2190 }
2191
2192 new_bb = bb->prev_bb;
2193 new_gsi = gsi_start_bb (new_bb);
2194 gsi_remove (&i, false);
2195 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
2196 }
2197 else
2198 {
2199 /* Release SSA definitions. */
2200 release_defs (stmt);
2201 gsi_remove (&i, true);
2202 }
2203
2204 if (gsi_end_p (i))
2205 i = gsi_last_bb (bb);
2206 else
2207 gsi_prev (&i);
2208 }
2209 }
2210
2211 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2212 bb->il.gimple.seq = NULL;
2213 bb->il.gimple.phi_nodes = NULL;
2214 }
2215
2216
2217 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2218 predicate VAL, return the edge that will be taken out of the block.
2219 If VAL does not match a unique edge, NULL is returned. */
2220
2221 edge
2222 find_taken_edge (basic_block bb, tree val)
2223 {
2224 gimple *stmt;
2225
2226 stmt = last_stmt (bb);
2227
2228 gcc_assert (stmt);
2229 gcc_assert (is_ctrl_stmt (stmt));
2230
2231 if (val == NULL)
2232 return NULL;
2233
2234 if (!is_gimple_min_invariant (val))
2235 return NULL;
2236
2237 if (gimple_code (stmt) == GIMPLE_COND)
2238 return find_taken_edge_cond_expr (bb, val);
2239
2240 if (gimple_code (stmt) == GIMPLE_SWITCH)
2241 return find_taken_edge_switch_expr (as_a <gswitch *> (stmt), bb, val);
2242
2243 if (computed_goto_p (stmt))
2244 {
2245 /* Only optimize if the argument is a label, if the argument is
2246 not a label then we can not construct a proper CFG.
2247
2248 It may be the case that we only need to allow the LABEL_REF to
2249 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2250 appear inside a LABEL_EXPR just to be safe. */
2251 if ((TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
2252 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
2253 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
2254 return NULL;
2255 }
2256
2257 gcc_unreachable ();
2258 }
2259
2260 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2261 statement, determine which of the outgoing edges will be taken out of the
2262 block. Return NULL if either edge may be taken. */
2263
2264 static edge
2265 find_taken_edge_computed_goto (basic_block bb, tree val)
2266 {
2267 basic_block dest;
2268 edge e = NULL;
2269
2270 dest = label_to_block (val);
2271 if (dest)
2272 {
2273 e = find_edge (bb, dest);
2274 gcc_assert (e != NULL);
2275 }
2276
2277 return e;
2278 }
2279
2280 /* Given a constant value VAL and the entry block BB to a COND_EXPR
2281 statement, determine which of the two edges will be taken out of the
2282 block. Return NULL if either edge may be taken. */
2283
2284 static edge
2285 find_taken_edge_cond_expr (basic_block bb, tree val)
2286 {
2287 edge true_edge, false_edge;
2288
2289 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2290
2291 gcc_assert (TREE_CODE (val) == INTEGER_CST);
2292 return (integer_zerop (val) ? false_edge : true_edge);
2293 }
2294
2295 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2296 statement, determine which edge will be taken out of the block. Return
2297 NULL if any edge may be taken. */
2298
2299 static edge
2300 find_taken_edge_switch_expr (gswitch *switch_stmt, basic_block bb,
2301 tree val)
2302 {
2303 basic_block dest_bb;
2304 edge e;
2305 tree taken_case;
2306
2307 taken_case = find_case_label_for_value (switch_stmt, val);
2308 dest_bb = label_to_block (CASE_LABEL (taken_case));
2309
2310 e = find_edge (bb, dest_bb);
2311 gcc_assert (e);
2312 return e;
2313 }
2314
2315
2316 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2317 We can make optimal use here of the fact that the case labels are
2318 sorted: We can do a binary search for a case matching VAL. */
2319
2320 static tree
2321 find_case_label_for_value (gswitch *switch_stmt, tree val)
2322 {
2323 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2324 tree default_case = gimple_switch_default_label (switch_stmt);
2325
2326 for (low = 0, high = n; high - low > 1; )
2327 {
2328 size_t i = (high + low) / 2;
2329 tree t = gimple_switch_label (switch_stmt, i);
2330 int cmp;
2331
2332 /* Cache the result of comparing CASE_LOW and val. */
2333 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2334
2335 if (cmp > 0)
2336 high = i;
2337 else
2338 low = i;
2339
2340 if (CASE_HIGH (t) == NULL)
2341 {
2342 /* A singe-valued case label. */
2343 if (cmp == 0)
2344 return t;
2345 }
2346 else
2347 {
2348 /* A case range. We can only handle integer ranges. */
2349 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2350 return t;
2351 }
2352 }
2353
2354 return default_case;
2355 }
2356
2357
2358 /* Dump a basic block on stderr. */
2359
2360 void
2361 gimple_debug_bb (basic_block bb)
2362 {
2363 dump_bb (stderr, bb, 0, TDF_VOPS|TDF_MEMSYMS|TDF_BLOCKS);
2364 }
2365
2366
2367 /* Dump basic block with index N on stderr. */
2368
2369 basic_block
2370 gimple_debug_bb_n (int n)
2371 {
2372 gimple_debug_bb (BASIC_BLOCK_FOR_FN (cfun, n));
2373 return BASIC_BLOCK_FOR_FN (cfun, n);
2374 }
2375
2376
2377 /* Dump the CFG on stderr.
2378
2379 FLAGS are the same used by the tree dumping functions
2380 (see TDF_* in dumpfile.h). */
2381
2382 void
2383 gimple_debug_cfg (dump_flags_t flags)
2384 {
2385 gimple_dump_cfg (stderr, flags);
2386 }
2387
2388
2389 /* Dump the program showing basic block boundaries on the given FILE.
2390
2391 FLAGS are the same used by the tree dumping functions (see TDF_* in
2392 tree.h). */
2393
2394 void
2395 gimple_dump_cfg (FILE *file, dump_flags_t flags)
2396 {
2397 if (flags & TDF_DETAILS)
2398 {
2399 dump_function_header (file, current_function_decl, flags);
2400 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2401 n_basic_blocks_for_fn (cfun), n_edges_for_fn (cfun),
2402 last_basic_block_for_fn (cfun));
2403
2404 brief_dump_cfg (file, flags);
2405 fprintf (file, "\n");
2406 }
2407
2408 if (flags & TDF_STATS)
2409 dump_cfg_stats (file);
2410
2411 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2412 }
2413
2414
2415 /* Dump CFG statistics on FILE. */
2416
2417 void
2418 dump_cfg_stats (FILE *file)
2419 {
2420 static long max_num_merged_labels = 0;
2421 unsigned long size, total = 0;
2422 long num_edges;
2423 basic_block bb;
2424 const char * const fmt_str = "%-30s%-13s%12s\n";
2425 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2426 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2427 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2428 const char *funcname = current_function_name ();
2429
2430 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2431
2432 fprintf (file, "---------------------------------------------------------\n");
2433 fprintf (file, fmt_str, "", " Number of ", "Memory");
2434 fprintf (file, fmt_str, "", " instances ", "used ");
2435 fprintf (file, "---------------------------------------------------------\n");
2436
2437 size = n_basic_blocks_for_fn (cfun) * sizeof (struct basic_block_def);
2438 total += size;
2439 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks_for_fn (cfun),
2440 SCALE (size), LABEL (size));
2441
2442 num_edges = 0;
2443 FOR_EACH_BB_FN (bb, cfun)
2444 num_edges += EDGE_COUNT (bb->succs);
2445 size = num_edges * sizeof (struct edge_def);
2446 total += size;
2447 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2448
2449 fprintf (file, "---------------------------------------------------------\n");
2450 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2451 LABEL (total));
2452 fprintf (file, "---------------------------------------------------------\n");
2453 fprintf (file, "\n");
2454
2455 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2456 max_num_merged_labels = cfg_stats.num_merged_labels;
2457
2458 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2459 cfg_stats.num_merged_labels, max_num_merged_labels);
2460
2461 fprintf (file, "\n");
2462 }
2463
2464
2465 /* Dump CFG statistics on stderr. Keep extern so that it's always
2466 linked in the final executable. */
2467
2468 DEBUG_FUNCTION void
2469 debug_cfg_stats (void)
2470 {
2471 dump_cfg_stats (stderr);
2472 }
2473
2474 /*---------------------------------------------------------------------------
2475 Miscellaneous helpers
2476 ---------------------------------------------------------------------------*/
2477
2478 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2479 flow. Transfers of control flow associated with EH are excluded. */
2480
2481 static bool
2482 call_can_make_abnormal_goto (gimple *t)
2483 {
2484 /* If the function has no non-local labels, then a call cannot make an
2485 abnormal transfer of control. */
2486 if (!cfun->has_nonlocal_label
2487 && !cfun->calls_setjmp)
2488 return false;
2489
2490 /* Likewise if the call has no side effects. */
2491 if (!gimple_has_side_effects (t))
2492 return false;
2493
2494 /* Likewise if the called function is leaf. */
2495 if (gimple_call_flags (t) & ECF_LEAF)
2496 return false;
2497
2498 return true;
2499 }
2500
2501
2502 /* Return true if T can make an abnormal transfer of control flow.
2503 Transfers of control flow associated with EH are excluded. */
2504
2505 bool
2506 stmt_can_make_abnormal_goto (gimple *t)
2507 {
2508 if (computed_goto_p (t))
2509 return true;
2510 if (is_gimple_call (t))
2511 return call_can_make_abnormal_goto (t);
2512 return false;
2513 }
2514
2515
2516 /* Return true if T represents a stmt that always transfers control. */
2517
2518 bool
2519 is_ctrl_stmt (gimple *t)
2520 {
2521 switch (gimple_code (t))
2522 {
2523 case GIMPLE_COND:
2524 case GIMPLE_SWITCH:
2525 case GIMPLE_GOTO:
2526 case GIMPLE_RETURN:
2527 case GIMPLE_RESX:
2528 return true;
2529 default:
2530 return false;
2531 }
2532 }
2533
2534
2535 /* Return true if T is a statement that may alter the flow of control
2536 (e.g., a call to a non-returning function). */
2537
2538 bool
2539 is_ctrl_altering_stmt (gimple *t)
2540 {
2541 gcc_assert (t);
2542
2543 switch (gimple_code (t))
2544 {
2545 case GIMPLE_CALL:
2546 /* Per stmt call flag indicates whether the call could alter
2547 controlflow. */
2548 if (gimple_call_ctrl_altering_p (t))
2549 return true;
2550 break;
2551
2552 case GIMPLE_EH_DISPATCH:
2553 /* EH_DISPATCH branches to the individual catch handlers at
2554 this level of a try or allowed-exceptions region. It can
2555 fallthru to the next statement as well. */
2556 return true;
2557
2558 case GIMPLE_ASM:
2559 if (gimple_asm_nlabels (as_a <gasm *> (t)) > 0)
2560 return true;
2561 break;
2562
2563 CASE_GIMPLE_OMP:
2564 /* OpenMP directives alter control flow. */
2565 return true;
2566
2567 case GIMPLE_TRANSACTION:
2568 /* A transaction start alters control flow. */
2569 return true;
2570
2571 default:
2572 break;
2573 }
2574
2575 /* If a statement can throw, it alters control flow. */
2576 return stmt_can_throw_internal (t);
2577 }
2578
2579
2580 /* Return true if T is a simple local goto. */
2581
2582 bool
2583 simple_goto_p (gimple *t)
2584 {
2585 return (gimple_code (t) == GIMPLE_GOTO
2586 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2587 }
2588
2589
2590 /* Return true if STMT should start a new basic block. PREV_STMT is
2591 the statement preceding STMT. It is used when STMT is a label or a
2592 case label. Labels should only start a new basic block if their
2593 previous statement wasn't a label. Otherwise, sequence of labels
2594 would generate unnecessary basic blocks that only contain a single
2595 label. */
2596
2597 static inline bool
2598 stmt_starts_bb_p (gimple *stmt, gimple *prev_stmt)
2599 {
2600 if (stmt == NULL)
2601 return false;
2602
2603 /* Labels start a new basic block only if the preceding statement
2604 wasn't a label of the same type. This prevents the creation of
2605 consecutive blocks that have nothing but a single label. */
2606 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2607 {
2608 /* Nonlocal and computed GOTO targets always start a new block. */
2609 if (DECL_NONLOCAL (gimple_label_label (label_stmt))
2610 || FORCED_LABEL (gimple_label_label (label_stmt)))
2611 return true;
2612
2613 if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
2614 {
2615 if (DECL_NONLOCAL (gimple_label_label (
2616 as_a <glabel *> (prev_stmt))))
2617 return true;
2618
2619 cfg_stats.num_merged_labels++;
2620 return false;
2621 }
2622 else
2623 return true;
2624 }
2625 else if (gimple_code (stmt) == GIMPLE_CALL)
2626 {
2627 if (gimple_call_flags (stmt) & ECF_RETURNS_TWICE)
2628 /* setjmp acts similar to a nonlocal GOTO target and thus should
2629 start a new block. */
2630 return true;
2631 if (gimple_call_internal_p (stmt, IFN_PHI)
2632 && prev_stmt
2633 && gimple_code (prev_stmt) != GIMPLE_LABEL
2634 && (gimple_code (prev_stmt) != GIMPLE_CALL
2635 || ! gimple_call_internal_p (prev_stmt, IFN_PHI)))
2636 /* PHI nodes start a new block unless preceeded by a label
2637 or another PHI. */
2638 return true;
2639 }
2640
2641 return false;
2642 }
2643
2644
2645 /* Return true if T should end a basic block. */
2646
2647 bool
2648 stmt_ends_bb_p (gimple *t)
2649 {
2650 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2651 }
2652
2653 /* Remove block annotations and other data structures. */
2654
2655 void
2656 delete_tree_cfg_annotations (struct function *fn)
2657 {
2658 vec_free (label_to_block_map_for_fn (fn));
2659 }
2660
2661 /* Return the virtual phi in BB. */
2662
2663 gphi *
2664 get_virtual_phi (basic_block bb)
2665 {
2666 for (gphi_iterator gsi = gsi_start_phis (bb);
2667 !gsi_end_p (gsi);
2668 gsi_next (&gsi))
2669 {
2670 gphi *phi = gsi.phi ();
2671
2672 if (virtual_operand_p (PHI_RESULT (phi)))
2673 return phi;
2674 }
2675
2676 return NULL;
2677 }
2678
2679 /* Return the first statement in basic block BB. */
2680
2681 gimple *
2682 first_stmt (basic_block bb)
2683 {
2684 gimple_stmt_iterator i = gsi_start_bb (bb);
2685 gimple *stmt = NULL;
2686
2687 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2688 {
2689 gsi_next (&i);
2690 stmt = NULL;
2691 }
2692 return stmt;
2693 }
2694
2695 /* Return the first non-label statement in basic block BB. */
2696
2697 static gimple *
2698 first_non_label_stmt (basic_block bb)
2699 {
2700 gimple_stmt_iterator i = gsi_start_bb (bb);
2701 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2702 gsi_next (&i);
2703 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2704 }
2705
2706 /* Return the last statement in basic block BB. */
2707
2708 gimple *
2709 last_stmt (basic_block bb)
2710 {
2711 gimple_stmt_iterator i = gsi_last_bb (bb);
2712 gimple *stmt = NULL;
2713
2714 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2715 {
2716 gsi_prev (&i);
2717 stmt = NULL;
2718 }
2719 return stmt;
2720 }
2721
2722 /* Return the last statement of an otherwise empty block. Return NULL
2723 if the block is totally empty, or if it contains more than one
2724 statement. */
2725
2726 gimple *
2727 last_and_only_stmt (basic_block bb)
2728 {
2729 gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2730 gimple *last, *prev;
2731
2732 if (gsi_end_p (i))
2733 return NULL;
2734
2735 last = gsi_stmt (i);
2736 gsi_prev_nondebug (&i);
2737 if (gsi_end_p (i))
2738 return last;
2739
2740 /* Empty statements should no longer appear in the instruction stream.
2741 Everything that might have appeared before should be deleted by
2742 remove_useless_stmts, and the optimizers should just gsi_remove
2743 instead of smashing with build_empty_stmt.
2744
2745 Thus the only thing that should appear here in a block containing
2746 one executable statement is a label. */
2747 prev = gsi_stmt (i);
2748 if (gimple_code (prev) == GIMPLE_LABEL)
2749 return last;
2750 else
2751 return NULL;
2752 }
2753
2754 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2755
2756 static void
2757 reinstall_phi_args (edge new_edge, edge old_edge)
2758 {
2759 edge_var_map *vm;
2760 int i;
2761 gphi_iterator phis;
2762
2763 vec<edge_var_map> *v = redirect_edge_var_map_vector (old_edge);
2764 if (!v)
2765 return;
2766
2767 for (i = 0, phis = gsi_start_phis (new_edge->dest);
2768 v->iterate (i, &vm) && !gsi_end_p (phis);
2769 i++, gsi_next (&phis))
2770 {
2771 gphi *phi = phis.phi ();
2772 tree result = redirect_edge_var_map_result (vm);
2773 tree arg = redirect_edge_var_map_def (vm);
2774
2775 gcc_assert (result == gimple_phi_result (phi));
2776
2777 add_phi_arg (phi, arg, new_edge, redirect_edge_var_map_location (vm));
2778 }
2779
2780 redirect_edge_var_map_clear (old_edge);
2781 }
2782
2783 /* Returns the basic block after which the new basic block created
2784 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2785 near its "logical" location. This is of most help to humans looking
2786 at debugging dumps. */
2787
2788 basic_block
2789 split_edge_bb_loc (edge edge_in)
2790 {
2791 basic_block dest = edge_in->dest;
2792 basic_block dest_prev = dest->prev_bb;
2793
2794 if (dest_prev)
2795 {
2796 edge e = find_edge (dest_prev, dest);
2797 if (e && !(e->flags & EDGE_COMPLEX))
2798 return edge_in->src;
2799 }
2800 return dest_prev;
2801 }
2802
2803 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2804 Abort on abnormal edges. */
2805
2806 static basic_block
2807 gimple_split_edge (edge edge_in)
2808 {
2809 basic_block new_bb, after_bb, dest;
2810 edge new_edge, e;
2811
2812 /* Abnormal edges cannot be split. */
2813 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2814
2815 dest = edge_in->dest;
2816
2817 after_bb = split_edge_bb_loc (edge_in);
2818
2819 new_bb = create_empty_bb (after_bb);
2820 new_bb->frequency = EDGE_FREQUENCY (edge_in);
2821 new_bb->count = edge_in->count;
2822 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
2823 new_edge->probability = REG_BR_PROB_BASE;
2824 new_edge->count = edge_in->count;
2825
2826 e = redirect_edge_and_branch (edge_in, new_bb);
2827 gcc_assert (e == edge_in);
2828 reinstall_phi_args (new_edge, e);
2829
2830 return new_bb;
2831 }
2832
2833
2834 /* Verify properties of the address expression T with base object BASE. */
2835
2836 static tree
2837 verify_address (tree t, tree base)
2838 {
2839 bool old_constant;
2840 bool old_side_effects;
2841 bool new_constant;
2842 bool new_side_effects;
2843
2844 old_constant = TREE_CONSTANT (t);
2845 old_side_effects = TREE_SIDE_EFFECTS (t);
2846
2847 recompute_tree_invariant_for_addr_expr (t);
2848 new_side_effects = TREE_SIDE_EFFECTS (t);
2849 new_constant = TREE_CONSTANT (t);
2850
2851 if (old_constant != new_constant)
2852 {
2853 error ("constant not recomputed when ADDR_EXPR changed");
2854 return t;
2855 }
2856 if (old_side_effects != new_side_effects)
2857 {
2858 error ("side effects not recomputed when ADDR_EXPR changed");
2859 return t;
2860 }
2861
2862 if (!(VAR_P (base)
2863 || TREE_CODE (base) == PARM_DECL
2864 || TREE_CODE (base) == RESULT_DECL))
2865 return NULL_TREE;
2866
2867 if (DECL_GIMPLE_REG_P (base))
2868 {
2869 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2870 return base;
2871 }
2872
2873 return NULL_TREE;
2874 }
2875
2876 /* Callback for walk_tree, check that all elements with address taken are
2877 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2878 inside a PHI node. */
2879
2880 static tree
2881 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2882 {
2883 tree t = *tp, x;
2884
2885 if (TYPE_P (t))
2886 *walk_subtrees = 0;
2887
2888 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2889 #define CHECK_OP(N, MSG) \
2890 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2891 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2892
2893 switch (TREE_CODE (t))
2894 {
2895 case SSA_NAME:
2896 if (SSA_NAME_IN_FREE_LIST (t))
2897 {
2898 error ("SSA name in freelist but still referenced");
2899 return *tp;
2900 }
2901 break;
2902
2903 case PARM_DECL:
2904 case VAR_DECL:
2905 case RESULT_DECL:
2906 {
2907 tree context = decl_function_context (t);
2908 if (context != cfun->decl
2909 && !SCOPE_FILE_SCOPE_P (context)
2910 && !TREE_STATIC (t)
2911 && !DECL_EXTERNAL (t))
2912 {
2913 error ("Local declaration from a different function");
2914 return t;
2915 }
2916 }
2917 break;
2918
2919 case INDIRECT_REF:
2920 error ("INDIRECT_REF in gimple IL");
2921 return t;
2922
2923 case MEM_REF:
2924 x = TREE_OPERAND (t, 0);
2925 if (!POINTER_TYPE_P (TREE_TYPE (x))
2926 || !is_gimple_mem_ref_addr (x))
2927 {
2928 error ("invalid first operand of MEM_REF");
2929 return x;
2930 }
2931 if (TREE_CODE (TREE_OPERAND (t, 1)) != INTEGER_CST
2932 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 1))))
2933 {
2934 error ("invalid offset operand of MEM_REF");
2935 return TREE_OPERAND (t, 1);
2936 }
2937 if (TREE_CODE (x) == ADDR_EXPR)
2938 {
2939 tree va = verify_address (x, TREE_OPERAND (x, 0));
2940 if (va)
2941 return va;
2942 x = TREE_OPERAND (x, 0);
2943 }
2944 walk_tree (&x, verify_expr, data, NULL);
2945 *walk_subtrees = 0;
2946 break;
2947
2948 case ASSERT_EXPR:
2949 x = fold (ASSERT_EXPR_COND (t));
2950 if (x == boolean_false_node)
2951 {
2952 error ("ASSERT_EXPR with an always-false condition");
2953 return *tp;
2954 }
2955 break;
2956
2957 case MODIFY_EXPR:
2958 error ("MODIFY_EXPR not expected while having tuples");
2959 return *tp;
2960
2961 case ADDR_EXPR:
2962 {
2963 tree tem;
2964
2965 gcc_assert (is_gimple_address (t));
2966
2967 /* Skip any references (they will be checked when we recurse down the
2968 tree) and ensure that any variable used as a prefix is marked
2969 addressable. */
2970 for (x = TREE_OPERAND (t, 0);
2971 handled_component_p (x);
2972 x = TREE_OPERAND (x, 0))
2973 ;
2974
2975 if ((tem = verify_address (t, x)))
2976 return tem;
2977
2978 if (!(VAR_P (x)
2979 || TREE_CODE (x) == PARM_DECL
2980 || TREE_CODE (x) == RESULT_DECL))
2981 return NULL;
2982
2983 if (!TREE_ADDRESSABLE (x))
2984 {
2985 error ("address taken, but ADDRESSABLE bit not set");
2986 return x;
2987 }
2988
2989 break;
2990 }
2991
2992 case COND_EXPR:
2993 x = COND_EXPR_COND (t);
2994 if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
2995 {
2996 error ("non-integral used in condition");
2997 return x;
2998 }
2999 if (!is_gimple_condexpr (x))
3000 {
3001 error ("invalid conditional operand");
3002 return x;
3003 }
3004 break;
3005
3006 case NON_LVALUE_EXPR:
3007 case TRUTH_NOT_EXPR:
3008 gcc_unreachable ();
3009
3010 CASE_CONVERT:
3011 case FIX_TRUNC_EXPR:
3012 case FLOAT_EXPR:
3013 case NEGATE_EXPR:
3014 case ABS_EXPR:
3015 case BIT_NOT_EXPR:
3016 CHECK_OP (0, "invalid operand to unary operator");
3017 break;
3018
3019 case REALPART_EXPR:
3020 case IMAGPART_EXPR:
3021 case BIT_FIELD_REF:
3022 if (!is_gimple_reg_type (TREE_TYPE (t)))
3023 {
3024 error ("non-scalar BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR");
3025 return t;
3026 }
3027
3028 if (TREE_CODE (t) == BIT_FIELD_REF)
3029 {
3030 tree t0 = TREE_OPERAND (t, 0);
3031 tree t1 = TREE_OPERAND (t, 1);
3032 tree t2 = TREE_OPERAND (t, 2);
3033 if (!tree_fits_uhwi_p (t1)
3034 || !tree_fits_uhwi_p (t2))
3035 {
3036 error ("invalid position or size operand to BIT_FIELD_REF");
3037 return t;
3038 }
3039 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
3040 && (TYPE_PRECISION (TREE_TYPE (t))
3041 != tree_to_uhwi (t1)))
3042 {
3043 error ("integral result type precision does not match "
3044 "field size of BIT_FIELD_REF");
3045 return t;
3046 }
3047 else if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
3048 && TYPE_MODE (TREE_TYPE (t)) != BLKmode
3049 && (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (t)))
3050 != tree_to_uhwi (t1)))
3051 {
3052 error ("mode size of non-integral result does not "
3053 "match field size of BIT_FIELD_REF");
3054 return t;
3055 }
3056 if (!AGGREGATE_TYPE_P (TREE_TYPE (t0))
3057 && (tree_to_uhwi (t1) + tree_to_uhwi (t2)
3058 > tree_to_uhwi (TYPE_SIZE (TREE_TYPE (t0)))))
3059 {
3060 error ("position plus size exceeds size of referenced object in "
3061 "BIT_FIELD_REF");
3062 return t;
3063 }
3064 }
3065 t = TREE_OPERAND (t, 0);
3066
3067 /* Fall-through. */
3068 case COMPONENT_REF:
3069 case ARRAY_REF:
3070 case ARRAY_RANGE_REF:
3071 case VIEW_CONVERT_EXPR:
3072 /* We have a nest of references. Verify that each of the operands
3073 that determine where to reference is either a constant or a variable,
3074 verify that the base is valid, and then show we've already checked
3075 the subtrees. */
3076 while (handled_component_p (t))
3077 {
3078 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
3079 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
3080 else if (TREE_CODE (t) == ARRAY_REF
3081 || TREE_CODE (t) == ARRAY_RANGE_REF)
3082 {
3083 CHECK_OP (1, "invalid array index");
3084 if (TREE_OPERAND (t, 2))
3085 CHECK_OP (2, "invalid array lower bound");
3086 if (TREE_OPERAND (t, 3))
3087 CHECK_OP (3, "invalid array stride");
3088 }
3089 else if (TREE_CODE (t) == BIT_FIELD_REF
3090 || TREE_CODE (t) == REALPART_EXPR
3091 || TREE_CODE (t) == IMAGPART_EXPR)
3092 {
3093 error ("non-top-level BIT_FIELD_REF, IMAGPART_EXPR or "
3094 "REALPART_EXPR");
3095 return t;
3096 }
3097
3098 t = TREE_OPERAND (t, 0);
3099 }
3100
3101 if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
3102 {
3103 error ("invalid reference prefix");
3104 return t;
3105 }
3106 walk_tree (&t, verify_expr, data, NULL);
3107 *walk_subtrees = 0;
3108 break;
3109 case PLUS_EXPR:
3110 case MINUS_EXPR:
3111 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
3112 POINTER_PLUS_EXPR. */
3113 if (POINTER_TYPE_P (TREE_TYPE (t)))
3114 {
3115 error ("invalid operand to plus/minus, type is a pointer");
3116 return t;
3117 }
3118 CHECK_OP (0, "invalid operand to binary operator");
3119 CHECK_OP (1, "invalid operand to binary operator");
3120 break;
3121
3122 case POINTER_PLUS_EXPR:
3123 /* Check to make sure the first operand is a pointer or reference type. */
3124 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
3125 {
3126 error ("invalid operand to pointer plus, first operand is not a pointer");
3127 return t;
3128 }
3129 /* Check to make sure the second operand is a ptrofftype. */
3130 if (!ptrofftype_p (TREE_TYPE (TREE_OPERAND (t, 1))))
3131 {
3132 error ("invalid operand to pointer plus, second operand is not an "
3133 "integer type of appropriate width");
3134 return t;
3135 }
3136 /* FALLTHROUGH */
3137 case LT_EXPR:
3138 case LE_EXPR:
3139 case GT_EXPR:
3140 case GE_EXPR:
3141 case EQ_EXPR:
3142 case NE_EXPR:
3143 case UNORDERED_EXPR:
3144 case ORDERED_EXPR:
3145 case UNLT_EXPR:
3146 case UNLE_EXPR:
3147 case UNGT_EXPR:
3148 case UNGE_EXPR:
3149 case UNEQ_EXPR:
3150 case LTGT_EXPR:
3151 case MULT_EXPR:
3152 case TRUNC_DIV_EXPR:
3153 case CEIL_DIV_EXPR:
3154 case FLOOR_DIV_EXPR:
3155 case ROUND_DIV_EXPR:
3156 case TRUNC_MOD_EXPR:
3157 case CEIL_MOD_EXPR:
3158 case FLOOR_MOD_EXPR:
3159 case ROUND_MOD_EXPR:
3160 case RDIV_EXPR:
3161 case EXACT_DIV_EXPR:
3162 case MIN_EXPR:
3163 case MAX_EXPR:
3164 case LSHIFT_EXPR:
3165 case RSHIFT_EXPR:
3166 case LROTATE_EXPR:
3167 case RROTATE_EXPR:
3168 case BIT_IOR_EXPR:
3169 case BIT_XOR_EXPR:
3170 case BIT_AND_EXPR:
3171 CHECK_OP (0, "invalid operand to binary operator");
3172 CHECK_OP (1, "invalid operand to binary operator");
3173 break;
3174
3175 case CONSTRUCTOR:
3176 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
3177 *walk_subtrees = 0;
3178 break;
3179
3180 case CASE_LABEL_EXPR:
3181 if (CASE_CHAIN (t))
3182 {
3183 error ("invalid CASE_CHAIN");
3184 return t;
3185 }
3186 break;
3187
3188 default:
3189 break;
3190 }
3191 return NULL;
3192
3193 #undef CHECK_OP
3194 }
3195
3196
3197 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
3198 Returns true if there is an error, otherwise false. */
3199
3200 static bool
3201 verify_types_in_gimple_min_lval (tree expr)
3202 {
3203 tree op;
3204
3205 if (is_gimple_id (expr))
3206 return false;
3207
3208 if (TREE_CODE (expr) != TARGET_MEM_REF
3209 && TREE_CODE (expr) != MEM_REF)
3210 {
3211 error ("invalid expression for min lvalue");
3212 return true;
3213 }
3214
3215 /* TARGET_MEM_REFs are strange beasts. */
3216 if (TREE_CODE (expr) == TARGET_MEM_REF)
3217 return false;
3218
3219 op = TREE_OPERAND (expr, 0);
3220 if (!is_gimple_val (op))
3221 {
3222 error ("invalid operand in indirect reference");
3223 debug_generic_stmt (op);
3224 return true;
3225 }
3226 /* Memory references now generally can involve a value conversion. */
3227
3228 return false;
3229 }
3230
3231 /* Verify if EXPR is a valid GIMPLE reference expression. If
3232 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
3233 if there is an error, otherwise false. */
3234
3235 static bool
3236 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
3237 {
3238 while (handled_component_p (expr))
3239 {
3240 tree op = TREE_OPERAND (expr, 0);
3241
3242 if (TREE_CODE (expr) == ARRAY_REF
3243 || TREE_CODE (expr) == ARRAY_RANGE_REF)
3244 {
3245 if (!is_gimple_val (TREE_OPERAND (expr, 1))
3246 || (TREE_OPERAND (expr, 2)
3247 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3248 || (TREE_OPERAND (expr, 3)
3249 && !is_gimple_val (TREE_OPERAND (expr, 3))))
3250 {
3251 error ("invalid operands to array reference");
3252 debug_generic_stmt (expr);
3253 return true;
3254 }
3255 }
3256
3257 /* Verify if the reference array element types are compatible. */
3258 if (TREE_CODE (expr) == ARRAY_REF
3259 && !useless_type_conversion_p (TREE_TYPE (expr),
3260 TREE_TYPE (TREE_TYPE (op))))
3261 {
3262 error ("type mismatch in array reference");
3263 debug_generic_stmt (TREE_TYPE (expr));
3264 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3265 return true;
3266 }
3267 if (TREE_CODE (expr) == ARRAY_RANGE_REF
3268 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
3269 TREE_TYPE (TREE_TYPE (op))))
3270 {
3271 error ("type mismatch in array range reference");
3272 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
3273 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3274 return true;
3275 }
3276
3277 if ((TREE_CODE (expr) == REALPART_EXPR
3278 || TREE_CODE (expr) == IMAGPART_EXPR)
3279 && !useless_type_conversion_p (TREE_TYPE (expr),
3280 TREE_TYPE (TREE_TYPE (op))))
3281 {
3282 error ("type mismatch in real/imagpart reference");
3283 debug_generic_stmt (TREE_TYPE (expr));
3284 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3285 return true;
3286 }
3287
3288 if (TREE_CODE (expr) == COMPONENT_REF
3289 && !useless_type_conversion_p (TREE_TYPE (expr),
3290 TREE_TYPE (TREE_OPERAND (expr, 1))))
3291 {
3292 error ("type mismatch in component reference");
3293 debug_generic_stmt (TREE_TYPE (expr));
3294 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
3295 return true;
3296 }
3297
3298 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3299 {
3300 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
3301 that their operand is not an SSA name or an invariant when
3302 requiring an lvalue (this usually means there is a SRA or IPA-SRA
3303 bug). Otherwise there is nothing to verify, gross mismatches at
3304 most invoke undefined behavior. */
3305 if (require_lvalue
3306 && (TREE_CODE (op) == SSA_NAME
3307 || is_gimple_min_invariant (op)))
3308 {
3309 error ("conversion of an SSA_NAME on the left hand side");
3310 debug_generic_stmt (expr);
3311 return true;
3312 }
3313 else if (TREE_CODE (op) == SSA_NAME
3314 && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
3315 {
3316 error ("conversion of register to a different size");
3317 debug_generic_stmt (expr);
3318 return true;
3319 }
3320 else if (!handled_component_p (op))
3321 return false;
3322 }
3323
3324 expr = op;
3325 }
3326
3327 if (TREE_CODE (expr) == MEM_REF)
3328 {
3329 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0)))
3330 {
3331 error ("invalid address operand in MEM_REF");
3332 debug_generic_stmt (expr);
3333 return true;
3334 }
3335 if (TREE_CODE (TREE_OPERAND (expr, 1)) != INTEGER_CST
3336 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
3337 {
3338 error ("invalid offset operand in MEM_REF");
3339 debug_generic_stmt (expr);
3340 return true;
3341 }
3342 }
3343 else if (TREE_CODE (expr) == TARGET_MEM_REF)
3344 {
3345 if (!TMR_BASE (expr)
3346 || !is_gimple_mem_ref_addr (TMR_BASE (expr)))
3347 {
3348 error ("invalid address operand in TARGET_MEM_REF");
3349 return true;
3350 }
3351 if (!TMR_OFFSET (expr)
3352 || TREE_CODE (TMR_OFFSET (expr)) != INTEGER_CST
3353 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
3354 {
3355 error ("invalid offset operand in TARGET_MEM_REF");
3356 debug_generic_stmt (expr);
3357 return true;
3358 }
3359 }
3360
3361 return ((require_lvalue || !is_gimple_min_invariant (expr))
3362 && verify_types_in_gimple_min_lval (expr));
3363 }
3364
3365 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3366 list of pointer-to types that is trivially convertible to DEST. */
3367
3368 static bool
3369 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3370 {
3371 tree src;
3372
3373 if (!TYPE_POINTER_TO (src_obj))
3374 return true;
3375
3376 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3377 if (useless_type_conversion_p (dest, src))
3378 return true;
3379
3380 return false;
3381 }
3382
3383 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3384 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3385
3386 static bool
3387 valid_fixed_convert_types_p (tree type1, tree type2)
3388 {
3389 return (FIXED_POINT_TYPE_P (type1)
3390 && (INTEGRAL_TYPE_P (type2)
3391 || SCALAR_FLOAT_TYPE_P (type2)
3392 || FIXED_POINT_TYPE_P (type2)));
3393 }
3394
3395 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3396 is a problem, otherwise false. */
3397
3398 static bool
3399 verify_gimple_call (gcall *stmt)
3400 {
3401 tree fn = gimple_call_fn (stmt);
3402 tree fntype, fndecl;
3403 unsigned i;
3404
3405 if (gimple_call_internal_p (stmt))
3406 {
3407 if (fn)
3408 {
3409 error ("gimple call has two targets");
3410 debug_generic_stmt (fn);
3411 return true;
3412 }
3413 /* FIXME : for passing label as arg in internal fn PHI from GIMPLE FE*/
3414 else if (gimple_call_internal_fn (stmt) == IFN_PHI)
3415 {
3416 return false;
3417 }
3418 }
3419 else
3420 {
3421 if (!fn)
3422 {
3423 error ("gimple call has no target");
3424 return true;
3425 }
3426 }
3427
3428 if (fn && !is_gimple_call_addr (fn))
3429 {
3430 error ("invalid function in gimple call");
3431 debug_generic_stmt (fn);
3432 return true;
3433 }
3434
3435 if (fn
3436 && (!POINTER_TYPE_P (TREE_TYPE (fn))
3437 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3438 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
3439 {
3440 error ("non-function in gimple call");
3441 return true;
3442 }
3443
3444 fndecl = gimple_call_fndecl (stmt);
3445 if (fndecl
3446 && TREE_CODE (fndecl) == FUNCTION_DECL
3447 && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
3448 && !DECL_PURE_P (fndecl)
3449 && !TREE_READONLY (fndecl))
3450 {
3451 error ("invalid pure const state for function");
3452 return true;
3453 }
3454
3455 tree lhs = gimple_call_lhs (stmt);
3456 if (lhs
3457 && (!is_gimple_lvalue (lhs)
3458 || verify_types_in_gimple_reference (lhs, true)))
3459 {
3460 error ("invalid LHS in gimple call");
3461 return true;
3462 }
3463
3464 if (gimple_call_ctrl_altering_p (stmt)
3465 && gimple_call_noreturn_p (stmt)
3466 && should_remove_lhs_p (lhs))
3467 {
3468 error ("LHS in noreturn call");
3469 return true;
3470 }
3471
3472 fntype = gimple_call_fntype (stmt);
3473 if (fntype
3474 && lhs
3475 && !useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (fntype))
3476 /* ??? At least C++ misses conversions at assignments from
3477 void * call results.
3478 ??? Java is completely off. Especially with functions
3479 returning java.lang.Object.
3480 For now simply allow arbitrary pointer type conversions. */
3481 && !(POINTER_TYPE_P (TREE_TYPE (lhs))
3482 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3483 {
3484 error ("invalid conversion in gimple call");
3485 debug_generic_stmt (TREE_TYPE (lhs));
3486 debug_generic_stmt (TREE_TYPE (fntype));
3487 return true;
3488 }
3489
3490 if (gimple_call_chain (stmt)
3491 && !is_gimple_val (gimple_call_chain (stmt)))
3492 {
3493 error ("invalid static chain in gimple call");
3494 debug_generic_stmt (gimple_call_chain (stmt));
3495 return true;
3496 }
3497
3498 /* If there is a static chain argument, the call should either be
3499 indirect, or the decl should have DECL_STATIC_CHAIN set. */
3500 if (gimple_call_chain (stmt)
3501 && fndecl
3502 && !DECL_STATIC_CHAIN (fndecl))
3503 {
3504 error ("static chain with function that doesn%'t use one");
3505 return true;
3506 }
3507
3508 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3509 {
3510 switch (DECL_FUNCTION_CODE (fndecl))
3511 {
3512 case BUILT_IN_UNREACHABLE:
3513 case BUILT_IN_TRAP:
3514 if (gimple_call_num_args (stmt) > 0)
3515 {
3516 /* Built-in unreachable with parameters might not be caught by
3517 undefined behavior sanitizer. Front-ends do check users do not
3518 call them that way but we also produce calls to
3519 __builtin_unreachable internally, for example when IPA figures
3520 out a call cannot happen in a legal program. In such cases,
3521 we must make sure arguments are stripped off. */
3522 error ("__builtin_unreachable or __builtin_trap call with "
3523 "arguments");
3524 return true;
3525 }
3526 break;
3527 default:
3528 break;
3529 }
3530 }
3531
3532 /* ??? The C frontend passes unpromoted arguments in case it
3533 didn't see a function declaration before the call. So for now
3534 leave the call arguments mostly unverified. Once we gimplify
3535 unit-at-a-time we have a chance to fix this. */
3536
3537 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3538 {
3539 tree arg = gimple_call_arg (stmt, i);
3540 if ((is_gimple_reg_type (TREE_TYPE (arg))
3541 && !is_gimple_val (arg))
3542 || (!is_gimple_reg_type (TREE_TYPE (arg))
3543 && !is_gimple_lvalue (arg)))
3544 {
3545 error ("invalid argument to gimple call");
3546 debug_generic_expr (arg);
3547 return true;
3548 }
3549 }
3550
3551 return false;
3552 }
3553
3554 /* Verifies the gimple comparison with the result type TYPE and
3555 the operands OP0 and OP1, comparison code is CODE. */
3556
3557 static bool
3558 verify_gimple_comparison (tree type, tree op0, tree op1, enum tree_code code)
3559 {
3560 tree op0_type = TREE_TYPE (op0);
3561 tree op1_type = TREE_TYPE (op1);
3562
3563 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3564 {
3565 error ("invalid operands in gimple comparison");
3566 return true;
3567 }
3568
3569 /* For comparisons we do not have the operations type as the
3570 effective type the comparison is carried out in. Instead
3571 we require that either the first operand is trivially
3572 convertible into the second, or the other way around.
3573 Because we special-case pointers to void we allow
3574 comparisons of pointers with the same mode as well. */
3575 if (!useless_type_conversion_p (op0_type, op1_type)
3576 && !useless_type_conversion_p (op1_type, op0_type)
3577 && (!POINTER_TYPE_P (op0_type)
3578 || !POINTER_TYPE_P (op1_type)
3579 || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3580 {
3581 error ("mismatching comparison operand types");
3582 debug_generic_expr (op0_type);
3583 debug_generic_expr (op1_type);
3584 return true;
3585 }
3586
3587 /* The resulting type of a comparison may be an effective boolean type. */
3588 if (INTEGRAL_TYPE_P (type)
3589 && (TREE_CODE (type) == BOOLEAN_TYPE
3590 || TYPE_PRECISION (type) == 1))
3591 {
3592 if ((TREE_CODE (op0_type) == VECTOR_TYPE
3593 || TREE_CODE (op1_type) == VECTOR_TYPE)
3594 && code != EQ_EXPR && code != NE_EXPR
3595 && !VECTOR_BOOLEAN_TYPE_P (op0_type)
3596 && !VECTOR_INTEGER_TYPE_P (op0_type))
3597 {
3598 error ("unsupported operation or type for vector comparison"
3599 " returning a boolean");
3600 debug_generic_expr (op0_type);
3601 debug_generic_expr (op1_type);
3602 return true;
3603 }
3604 }
3605 /* Or a boolean vector type with the same element count
3606 as the comparison operand types. */
3607 else if (TREE_CODE (type) == VECTOR_TYPE
3608 && TREE_CODE (TREE_TYPE (type)) == BOOLEAN_TYPE)
3609 {
3610 if (TREE_CODE (op0_type) != VECTOR_TYPE
3611 || TREE_CODE (op1_type) != VECTOR_TYPE)
3612 {
3613 error ("non-vector operands in vector comparison");
3614 debug_generic_expr (op0_type);
3615 debug_generic_expr (op1_type);
3616 return true;
3617 }
3618
3619 if (TYPE_VECTOR_SUBPARTS (type) != TYPE_VECTOR_SUBPARTS (op0_type))
3620 {
3621 error ("invalid vector comparison resulting type");
3622 debug_generic_expr (type);
3623 return true;
3624 }
3625 }
3626 else
3627 {
3628 error ("bogus comparison result type");
3629 debug_generic_expr (type);
3630 return true;
3631 }
3632
3633 return false;
3634 }
3635
3636 /* Verify a gimple assignment statement STMT with an unary rhs.
3637 Returns true if anything is wrong. */
3638
3639 static bool
3640 verify_gimple_assign_unary (gassign *stmt)
3641 {
3642 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3643 tree lhs = gimple_assign_lhs (stmt);
3644 tree lhs_type = TREE_TYPE (lhs);
3645 tree rhs1 = gimple_assign_rhs1 (stmt);
3646 tree rhs1_type = TREE_TYPE (rhs1);
3647
3648 if (!is_gimple_reg (lhs))
3649 {
3650 error ("non-register as LHS of unary operation");
3651 return true;
3652 }
3653
3654 if (!is_gimple_val (rhs1))
3655 {
3656 error ("invalid operand in unary operation");
3657 return true;
3658 }
3659
3660 /* First handle conversions. */
3661 switch (rhs_code)
3662 {
3663 CASE_CONVERT:
3664 {
3665 /* Allow conversions from pointer type to integral type only if
3666 there is no sign or zero extension involved.
3667 For targets were the precision of ptrofftype doesn't match that
3668 of pointers we need to allow arbitrary conversions to ptrofftype. */
3669 if ((POINTER_TYPE_P (lhs_type)
3670 && INTEGRAL_TYPE_P (rhs1_type))
3671 || (POINTER_TYPE_P (rhs1_type)
3672 && INTEGRAL_TYPE_P (lhs_type)
3673 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3674 || ptrofftype_p (sizetype))))
3675 return false;
3676
3677 /* Allow conversion from integral to offset type and vice versa. */
3678 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3679 && INTEGRAL_TYPE_P (rhs1_type))
3680 || (INTEGRAL_TYPE_P (lhs_type)
3681 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3682 return false;
3683
3684 /* Otherwise assert we are converting between types of the
3685 same kind. */
3686 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3687 {
3688 error ("invalid types in nop conversion");
3689 debug_generic_expr (lhs_type);
3690 debug_generic_expr (rhs1_type);
3691 return true;
3692 }
3693
3694 return false;
3695 }
3696
3697 case ADDR_SPACE_CONVERT_EXPR:
3698 {
3699 if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3700 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3701 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3702 {
3703 error ("invalid types in address space conversion");
3704 debug_generic_expr (lhs_type);
3705 debug_generic_expr (rhs1_type);
3706 return true;
3707 }
3708
3709 return false;
3710 }
3711
3712 case FIXED_CONVERT_EXPR:
3713 {
3714 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3715 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3716 {
3717 error ("invalid types in fixed-point conversion");
3718 debug_generic_expr (lhs_type);
3719 debug_generic_expr (rhs1_type);
3720 return true;
3721 }
3722
3723 return false;
3724 }
3725
3726 case FLOAT_EXPR:
3727 {
3728 if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3729 && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3730 || !VECTOR_FLOAT_TYPE_P (lhs_type)))
3731 {
3732 error ("invalid types in conversion to floating point");
3733 debug_generic_expr (lhs_type);
3734 debug_generic_expr (rhs1_type);
3735 return true;
3736 }
3737
3738 return false;
3739 }
3740
3741 case FIX_TRUNC_EXPR:
3742 {
3743 if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3744 && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3745 || !VECTOR_FLOAT_TYPE_P (rhs1_type)))
3746 {
3747 error ("invalid types in conversion to integer");
3748 debug_generic_expr (lhs_type);
3749 debug_generic_expr (rhs1_type);
3750 return true;
3751 }
3752
3753 return false;
3754 }
3755 case REDUC_MAX_EXPR:
3756 case REDUC_MIN_EXPR:
3757 case REDUC_PLUS_EXPR:
3758 if (!VECTOR_TYPE_P (rhs1_type)
3759 || !useless_type_conversion_p (lhs_type, TREE_TYPE (rhs1_type)))
3760 {
3761 error ("reduction should convert from vector to element type");
3762 debug_generic_expr (lhs_type);
3763 debug_generic_expr (rhs1_type);
3764 return true;
3765 }
3766 return false;
3767
3768 case VEC_UNPACK_HI_EXPR:
3769 case VEC_UNPACK_LO_EXPR:
3770 case VEC_UNPACK_FLOAT_HI_EXPR:
3771 case VEC_UNPACK_FLOAT_LO_EXPR:
3772 /* FIXME. */
3773 return false;
3774
3775 case NEGATE_EXPR:
3776 case ABS_EXPR:
3777 case BIT_NOT_EXPR:
3778 case PAREN_EXPR:
3779 case CONJ_EXPR:
3780 break;
3781
3782 default:
3783 gcc_unreachable ();
3784 }
3785
3786 /* For the remaining codes assert there is no conversion involved. */
3787 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3788 {
3789 error ("non-trivial conversion in unary operation");
3790 debug_generic_expr (lhs_type);
3791 debug_generic_expr (rhs1_type);
3792 return true;
3793 }
3794
3795 return false;
3796 }
3797
3798 /* Verify a gimple assignment statement STMT with a binary rhs.
3799 Returns true if anything is wrong. */
3800
3801 static bool
3802 verify_gimple_assign_binary (gassign *stmt)
3803 {
3804 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3805 tree lhs = gimple_assign_lhs (stmt);
3806 tree lhs_type = TREE_TYPE (lhs);
3807 tree rhs1 = gimple_assign_rhs1 (stmt);
3808 tree rhs1_type = TREE_TYPE (rhs1);
3809 tree rhs2 = gimple_assign_rhs2 (stmt);
3810 tree rhs2_type = TREE_TYPE (rhs2);
3811
3812 if (!is_gimple_reg (lhs))
3813 {
3814 error ("non-register as LHS of binary operation");
3815 return true;
3816 }
3817
3818 if (!is_gimple_val (rhs1)
3819 || !is_gimple_val (rhs2))
3820 {
3821 error ("invalid operands in binary operation");
3822 return true;
3823 }
3824
3825 /* First handle operations that involve different types. */
3826 switch (rhs_code)
3827 {
3828 case COMPLEX_EXPR:
3829 {
3830 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3831 || !(INTEGRAL_TYPE_P (rhs1_type)
3832 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3833 || !(INTEGRAL_TYPE_P (rhs2_type)
3834 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3835 {
3836 error ("type mismatch in complex expression");
3837 debug_generic_expr (lhs_type);
3838 debug_generic_expr (rhs1_type);
3839 debug_generic_expr (rhs2_type);
3840 return true;
3841 }
3842
3843 return false;
3844 }
3845
3846 case LSHIFT_EXPR:
3847 case RSHIFT_EXPR:
3848 case LROTATE_EXPR:
3849 case RROTATE_EXPR:
3850 {
3851 /* Shifts and rotates are ok on integral types, fixed point
3852 types and integer vector types. */
3853 if ((!INTEGRAL_TYPE_P (rhs1_type)
3854 && !FIXED_POINT_TYPE_P (rhs1_type)
3855 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3856 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3857 || (!INTEGRAL_TYPE_P (rhs2_type)
3858 /* Vector shifts of vectors are also ok. */
3859 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3860 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3861 && TREE_CODE (rhs2_type) == VECTOR_TYPE
3862 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3863 || !useless_type_conversion_p (lhs_type, rhs1_type))
3864 {
3865 error ("type mismatch in shift expression");
3866 debug_generic_expr (lhs_type);
3867 debug_generic_expr (rhs1_type);
3868 debug_generic_expr (rhs2_type);
3869 return true;
3870 }
3871
3872 return false;
3873 }
3874
3875 case WIDEN_LSHIFT_EXPR:
3876 {
3877 if (!INTEGRAL_TYPE_P (lhs_type)
3878 || !INTEGRAL_TYPE_P (rhs1_type)
3879 || TREE_CODE (rhs2) != INTEGER_CST
3880 || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
3881 {
3882 error ("type mismatch in widening vector shift expression");
3883 debug_generic_expr (lhs_type);
3884 debug_generic_expr (rhs1_type);
3885 debug_generic_expr (rhs2_type);
3886 return true;
3887 }
3888
3889 return false;
3890 }
3891
3892 case VEC_WIDEN_LSHIFT_HI_EXPR:
3893 case VEC_WIDEN_LSHIFT_LO_EXPR:
3894 {
3895 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3896 || TREE_CODE (lhs_type) != VECTOR_TYPE
3897 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3898 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3899 || TREE_CODE (rhs2) != INTEGER_CST
3900 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
3901 > TYPE_PRECISION (TREE_TYPE (lhs_type))))
3902 {
3903 error ("type mismatch in widening vector shift expression");
3904 debug_generic_expr (lhs_type);
3905 debug_generic_expr (rhs1_type);
3906 debug_generic_expr (rhs2_type);
3907 return true;
3908 }
3909
3910 return false;
3911 }
3912
3913 case PLUS_EXPR:
3914 case MINUS_EXPR:
3915 {
3916 tree lhs_etype = lhs_type;
3917 tree rhs1_etype = rhs1_type;
3918 tree rhs2_etype = rhs2_type;
3919 if (TREE_CODE (lhs_type) == VECTOR_TYPE)
3920 {
3921 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3922 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
3923 {
3924 error ("invalid non-vector operands to vector valued plus");
3925 return true;
3926 }
3927 lhs_etype = TREE_TYPE (lhs_type);
3928 rhs1_etype = TREE_TYPE (rhs1_type);
3929 rhs2_etype = TREE_TYPE (rhs2_type);
3930 }
3931 if (POINTER_TYPE_P (lhs_etype)
3932 || POINTER_TYPE_P (rhs1_etype)
3933 || POINTER_TYPE_P (rhs2_etype))
3934 {
3935 error ("invalid (pointer) operands to plus/minus");
3936 return true;
3937 }
3938
3939 /* Continue with generic binary expression handling. */
3940 break;
3941 }
3942
3943 case POINTER_PLUS_EXPR:
3944 {
3945 if (!POINTER_TYPE_P (rhs1_type)
3946 || !useless_type_conversion_p (lhs_type, rhs1_type)
3947 || !ptrofftype_p (rhs2_type))
3948 {
3949 error ("type mismatch in pointer plus expression");
3950 debug_generic_stmt (lhs_type);
3951 debug_generic_stmt (rhs1_type);
3952 debug_generic_stmt (rhs2_type);
3953 return true;
3954 }
3955
3956 return false;
3957 }
3958
3959 case TRUTH_ANDIF_EXPR:
3960 case TRUTH_ORIF_EXPR:
3961 case TRUTH_AND_EXPR:
3962 case TRUTH_OR_EXPR:
3963 case TRUTH_XOR_EXPR:
3964
3965 gcc_unreachable ();
3966
3967 case LT_EXPR:
3968 case LE_EXPR:
3969 case GT_EXPR:
3970 case GE_EXPR:
3971 case EQ_EXPR:
3972 case NE_EXPR:
3973 case UNORDERED_EXPR:
3974 case ORDERED_EXPR:
3975 case UNLT_EXPR:
3976 case UNLE_EXPR:
3977 case UNGT_EXPR:
3978 case UNGE_EXPR:
3979 case UNEQ_EXPR:
3980 case LTGT_EXPR:
3981 /* Comparisons are also binary, but the result type is not
3982 connected to the operand types. */
3983 return verify_gimple_comparison (lhs_type, rhs1, rhs2, rhs_code);
3984
3985 case WIDEN_MULT_EXPR:
3986 if (TREE_CODE (lhs_type) != INTEGER_TYPE)
3987 return true;
3988 return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
3989 || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
3990
3991 case WIDEN_SUM_EXPR:
3992 case VEC_WIDEN_MULT_HI_EXPR:
3993 case VEC_WIDEN_MULT_LO_EXPR:
3994 case VEC_WIDEN_MULT_EVEN_EXPR:
3995 case VEC_WIDEN_MULT_ODD_EXPR:
3996 case VEC_PACK_TRUNC_EXPR:
3997 case VEC_PACK_SAT_EXPR:
3998 case VEC_PACK_FIX_TRUNC_EXPR:
3999 /* FIXME. */
4000 return false;
4001
4002 case MULT_EXPR:
4003 case MULT_HIGHPART_EXPR:
4004 case TRUNC_DIV_EXPR:
4005 case CEIL_DIV_EXPR:
4006 case FLOOR_DIV_EXPR:
4007 case ROUND_DIV_EXPR:
4008 case TRUNC_MOD_EXPR:
4009 case CEIL_MOD_EXPR:
4010 case FLOOR_MOD_EXPR:
4011 case ROUND_MOD_EXPR:
4012 case RDIV_EXPR:
4013 case EXACT_DIV_EXPR:
4014 case MIN_EXPR:
4015 case MAX_EXPR:
4016 case BIT_IOR_EXPR:
4017 case BIT_XOR_EXPR:
4018 case BIT_AND_EXPR:
4019 /* Continue with generic binary expression handling. */
4020 break;
4021
4022 default:
4023 gcc_unreachable ();
4024 }
4025
4026 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4027 || !useless_type_conversion_p (lhs_type, rhs2_type))
4028 {
4029 error ("type mismatch in binary expression");
4030 debug_generic_stmt (lhs_type);
4031 debug_generic_stmt (rhs1_type);
4032 debug_generic_stmt (rhs2_type);
4033 return true;
4034 }
4035
4036 return false;
4037 }
4038
4039 /* Verify a gimple assignment statement STMT with a ternary rhs.
4040 Returns true if anything is wrong. */
4041
4042 static bool
4043 verify_gimple_assign_ternary (gassign *stmt)
4044 {
4045 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4046 tree lhs = gimple_assign_lhs (stmt);
4047 tree lhs_type = TREE_TYPE (lhs);
4048 tree rhs1 = gimple_assign_rhs1 (stmt);
4049 tree rhs1_type = TREE_TYPE (rhs1);
4050 tree rhs2 = gimple_assign_rhs2 (stmt);
4051 tree rhs2_type = TREE_TYPE (rhs2);
4052 tree rhs3 = gimple_assign_rhs3 (stmt);
4053 tree rhs3_type = TREE_TYPE (rhs3);
4054
4055 if (!is_gimple_reg (lhs))
4056 {
4057 error ("non-register as LHS of ternary operation");
4058 return true;
4059 }
4060
4061 if (((rhs_code == VEC_COND_EXPR || rhs_code == COND_EXPR)
4062 ? !is_gimple_condexpr (rhs1) : !is_gimple_val (rhs1))
4063 || !is_gimple_val (rhs2)
4064 || !is_gimple_val (rhs3))
4065 {
4066 error ("invalid operands in ternary operation");
4067 return true;
4068 }
4069
4070 /* First handle operations that involve different types. */
4071 switch (rhs_code)
4072 {
4073 case WIDEN_MULT_PLUS_EXPR:
4074 case WIDEN_MULT_MINUS_EXPR:
4075 if ((!INTEGRAL_TYPE_P (rhs1_type)
4076 && !FIXED_POINT_TYPE_P (rhs1_type))
4077 || !useless_type_conversion_p (rhs1_type, rhs2_type)
4078 || !useless_type_conversion_p (lhs_type, rhs3_type)
4079 || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
4080 || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
4081 {
4082 error ("type mismatch in widening multiply-accumulate expression");
4083 debug_generic_expr (lhs_type);
4084 debug_generic_expr (rhs1_type);
4085 debug_generic_expr (rhs2_type);
4086 debug_generic_expr (rhs3_type);
4087 return true;
4088 }
4089 break;
4090
4091 case FMA_EXPR:
4092 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4093 || !useless_type_conversion_p (lhs_type, rhs2_type)
4094 || !useless_type_conversion_p (lhs_type, rhs3_type))
4095 {
4096 error ("type mismatch in fused multiply-add expression");
4097 debug_generic_expr (lhs_type);
4098 debug_generic_expr (rhs1_type);
4099 debug_generic_expr (rhs2_type);
4100 debug_generic_expr (rhs3_type);
4101 return true;
4102 }
4103 break;
4104
4105 case VEC_COND_EXPR:
4106 if (!VECTOR_BOOLEAN_TYPE_P (rhs1_type)
4107 || TYPE_VECTOR_SUBPARTS (rhs1_type)
4108 != TYPE_VECTOR_SUBPARTS (lhs_type))
4109 {
4110 error ("the first argument of a VEC_COND_EXPR must be of a "
4111 "boolean vector type of the same number of elements "
4112 "as the result");
4113 debug_generic_expr (lhs_type);
4114 debug_generic_expr (rhs1_type);
4115 return true;
4116 }
4117 /* Fallthrough. */
4118 case COND_EXPR:
4119 if (!useless_type_conversion_p (lhs_type, rhs2_type)
4120 || !useless_type_conversion_p (lhs_type, rhs3_type))
4121 {
4122 error ("type mismatch in conditional expression");
4123 debug_generic_expr (lhs_type);
4124 debug_generic_expr (rhs2_type);
4125 debug_generic_expr (rhs3_type);
4126 return true;
4127 }
4128 break;
4129
4130 case VEC_PERM_EXPR:
4131 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4132 || !useless_type_conversion_p (lhs_type, rhs2_type))
4133 {
4134 error ("type mismatch in vector permute expression");
4135 debug_generic_expr (lhs_type);
4136 debug_generic_expr (rhs1_type);
4137 debug_generic_expr (rhs2_type);
4138 debug_generic_expr (rhs3_type);
4139 return true;
4140 }
4141
4142 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4143 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4144 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4145 {
4146 error ("vector types expected in vector permute expression");
4147 debug_generic_expr (lhs_type);
4148 debug_generic_expr (rhs1_type);
4149 debug_generic_expr (rhs2_type);
4150 debug_generic_expr (rhs3_type);
4151 return true;
4152 }
4153
4154 if (TYPE_VECTOR_SUBPARTS (rhs1_type) != TYPE_VECTOR_SUBPARTS (rhs2_type)
4155 || TYPE_VECTOR_SUBPARTS (rhs2_type)
4156 != TYPE_VECTOR_SUBPARTS (rhs3_type)
4157 || TYPE_VECTOR_SUBPARTS (rhs3_type)
4158 != TYPE_VECTOR_SUBPARTS (lhs_type))
4159 {
4160 error ("vectors with different element number found "
4161 "in vector permute expression");
4162 debug_generic_expr (lhs_type);
4163 debug_generic_expr (rhs1_type);
4164 debug_generic_expr (rhs2_type);
4165 debug_generic_expr (rhs3_type);
4166 return true;
4167 }
4168
4169 if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
4170 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs3_type)))
4171 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type))))
4172 {
4173 error ("invalid mask type in vector permute expression");
4174 debug_generic_expr (lhs_type);
4175 debug_generic_expr (rhs1_type);
4176 debug_generic_expr (rhs2_type);
4177 debug_generic_expr (rhs3_type);
4178 return true;
4179 }
4180
4181 return false;
4182
4183 case SAD_EXPR:
4184 if (!useless_type_conversion_p (rhs1_type, rhs2_type)
4185 || !useless_type_conversion_p (lhs_type, rhs3_type)
4186 || 2 * GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type)))
4187 > GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (lhs_type))))
4188 {
4189 error ("type mismatch in sad expression");
4190 debug_generic_expr (lhs_type);
4191 debug_generic_expr (rhs1_type);
4192 debug_generic_expr (rhs2_type);
4193 debug_generic_expr (rhs3_type);
4194 return true;
4195 }
4196
4197 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4198 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4199 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4200 {
4201 error ("vector types expected in sad expression");
4202 debug_generic_expr (lhs_type);
4203 debug_generic_expr (rhs1_type);
4204 debug_generic_expr (rhs2_type);
4205 debug_generic_expr (rhs3_type);
4206 return true;
4207 }
4208
4209 return false;
4210
4211 case BIT_INSERT_EXPR:
4212 if (! useless_type_conversion_p (lhs_type, rhs1_type))
4213 {
4214 error ("type mismatch in BIT_INSERT_EXPR");
4215 debug_generic_expr (lhs_type);
4216 debug_generic_expr (rhs1_type);
4217 return true;
4218 }
4219 if (! ((INTEGRAL_TYPE_P (rhs1_type)
4220 && INTEGRAL_TYPE_P (rhs2_type))
4221 || (VECTOR_TYPE_P (rhs1_type)
4222 && types_compatible_p (TREE_TYPE (rhs1_type), rhs2_type))))
4223 {
4224 error ("not allowed type combination in BIT_INSERT_EXPR");
4225 debug_generic_expr (rhs1_type);
4226 debug_generic_expr (rhs2_type);
4227 return true;
4228 }
4229 if (! tree_fits_uhwi_p (rhs3)
4230 || ! tree_fits_uhwi_p (TYPE_SIZE (rhs2_type)))
4231 {
4232 error ("invalid position or size in BIT_INSERT_EXPR");
4233 return true;
4234 }
4235 if (INTEGRAL_TYPE_P (rhs1_type))
4236 {
4237 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (rhs3);
4238 if (bitpos >= TYPE_PRECISION (rhs1_type)
4239 || (bitpos + TYPE_PRECISION (rhs2_type)
4240 > TYPE_PRECISION (rhs1_type)))
4241 {
4242 error ("insertion out of range in BIT_INSERT_EXPR");
4243 return true;
4244 }
4245 }
4246 else if (VECTOR_TYPE_P (rhs1_type))
4247 {
4248 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (rhs3);
4249 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (TYPE_SIZE (rhs2_type));
4250 if (bitpos % bitsize != 0)
4251 {
4252 error ("vector insertion not at element boundary");
4253 return true;
4254 }
4255 }
4256 return false;
4257
4258 case DOT_PROD_EXPR:
4259 case REALIGN_LOAD_EXPR:
4260 /* FIXME. */
4261 return false;
4262
4263 default:
4264 gcc_unreachable ();
4265 }
4266 return false;
4267 }
4268
4269 /* Verify a gimple assignment statement STMT with a single rhs.
4270 Returns true if anything is wrong. */
4271
4272 static bool
4273 verify_gimple_assign_single (gassign *stmt)
4274 {
4275 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4276 tree lhs = gimple_assign_lhs (stmt);
4277 tree lhs_type = TREE_TYPE (lhs);
4278 tree rhs1 = gimple_assign_rhs1 (stmt);
4279 tree rhs1_type = TREE_TYPE (rhs1);
4280 bool res = false;
4281
4282 if (!useless_type_conversion_p (lhs_type, rhs1_type))
4283 {
4284 error ("non-trivial conversion at assignment");
4285 debug_generic_expr (lhs_type);
4286 debug_generic_expr (rhs1_type);
4287 return true;
4288 }
4289
4290 if (gimple_clobber_p (stmt)
4291 && !(DECL_P (lhs) || TREE_CODE (lhs) == MEM_REF))
4292 {
4293 error ("non-decl/MEM_REF LHS in clobber statement");
4294 debug_generic_expr (lhs);
4295 return true;
4296 }
4297
4298 if (handled_component_p (lhs)
4299 || TREE_CODE (lhs) == MEM_REF
4300 || TREE_CODE (lhs) == TARGET_MEM_REF)
4301 res |= verify_types_in_gimple_reference (lhs, true);
4302
4303 /* Special codes we cannot handle via their class. */
4304 switch (rhs_code)
4305 {
4306 case ADDR_EXPR:
4307 {
4308 tree op = TREE_OPERAND (rhs1, 0);
4309 if (!is_gimple_addressable (op))
4310 {
4311 error ("invalid operand in unary expression");
4312 return true;
4313 }
4314
4315 /* Technically there is no longer a need for matching types, but
4316 gimple hygiene asks for this check. In LTO we can end up
4317 combining incompatible units and thus end up with addresses
4318 of globals that change their type to a common one. */
4319 if (!in_lto_p
4320 && !types_compatible_p (TREE_TYPE (op),
4321 TREE_TYPE (TREE_TYPE (rhs1)))
4322 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
4323 TREE_TYPE (op)))
4324 {
4325 error ("type mismatch in address expression");
4326 debug_generic_stmt (TREE_TYPE (rhs1));
4327 debug_generic_stmt (TREE_TYPE (op));
4328 return true;
4329 }
4330
4331 return verify_types_in_gimple_reference (op, true);
4332 }
4333
4334 /* tcc_reference */
4335 case INDIRECT_REF:
4336 error ("INDIRECT_REF in gimple IL");
4337 return true;
4338
4339 case COMPONENT_REF:
4340 case BIT_FIELD_REF:
4341 case ARRAY_REF:
4342 case ARRAY_RANGE_REF:
4343 case VIEW_CONVERT_EXPR:
4344 case REALPART_EXPR:
4345 case IMAGPART_EXPR:
4346 case TARGET_MEM_REF:
4347 case MEM_REF:
4348 if (!is_gimple_reg (lhs)
4349 && is_gimple_reg_type (TREE_TYPE (lhs)))
4350 {
4351 error ("invalid rhs for gimple memory store");
4352 debug_generic_stmt (lhs);
4353 debug_generic_stmt (rhs1);
4354 return true;
4355 }
4356 return res || verify_types_in_gimple_reference (rhs1, false);
4357
4358 /* tcc_constant */
4359 case SSA_NAME:
4360 case INTEGER_CST:
4361 case REAL_CST:
4362 case FIXED_CST:
4363 case COMPLEX_CST:
4364 case VECTOR_CST:
4365 case STRING_CST:
4366 return res;
4367
4368 /* tcc_declaration */
4369 case CONST_DECL:
4370 return res;
4371 case VAR_DECL:
4372 case PARM_DECL:
4373 if (!is_gimple_reg (lhs)
4374 && !is_gimple_reg (rhs1)
4375 && is_gimple_reg_type (TREE_TYPE (lhs)))
4376 {
4377 error ("invalid rhs for gimple memory store");
4378 debug_generic_stmt (lhs);
4379 debug_generic_stmt (rhs1);
4380 return true;
4381 }
4382 return res;
4383
4384 case CONSTRUCTOR:
4385 if (TREE_CODE (rhs1_type) == VECTOR_TYPE)
4386 {
4387 unsigned int i;
4388 tree elt_i, elt_v, elt_t = NULL_TREE;
4389
4390 if (CONSTRUCTOR_NELTS (rhs1) == 0)
4391 return res;
4392 /* For vector CONSTRUCTORs we require that either it is empty
4393 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
4394 (then the element count must be correct to cover the whole
4395 outer vector and index must be NULL on all elements, or it is
4396 a CONSTRUCTOR of scalar elements, where we as an exception allow
4397 smaller number of elements (assuming zero filling) and
4398 consecutive indexes as compared to NULL indexes (such
4399 CONSTRUCTORs can appear in the IL from FEs). */
4400 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), i, elt_i, elt_v)
4401 {
4402 if (elt_t == NULL_TREE)
4403 {
4404 elt_t = TREE_TYPE (elt_v);
4405 if (TREE_CODE (elt_t) == VECTOR_TYPE)
4406 {
4407 tree elt_t = TREE_TYPE (elt_v);
4408 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4409 TREE_TYPE (elt_t)))
4410 {
4411 error ("incorrect type of vector CONSTRUCTOR"
4412 " elements");
4413 debug_generic_stmt (rhs1);
4414 return true;
4415 }
4416 else if (CONSTRUCTOR_NELTS (rhs1)
4417 * TYPE_VECTOR_SUBPARTS (elt_t)
4418 != TYPE_VECTOR_SUBPARTS (rhs1_type))
4419 {
4420 error ("incorrect number of vector CONSTRUCTOR"
4421 " elements");
4422 debug_generic_stmt (rhs1);
4423 return true;
4424 }
4425 }
4426 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4427 elt_t))
4428 {
4429 error ("incorrect type of vector CONSTRUCTOR elements");
4430 debug_generic_stmt (rhs1);
4431 return true;
4432 }
4433 else if (CONSTRUCTOR_NELTS (rhs1)
4434 > TYPE_VECTOR_SUBPARTS (rhs1_type))
4435 {
4436 error ("incorrect number of vector CONSTRUCTOR elements");
4437 debug_generic_stmt (rhs1);
4438 return true;
4439 }
4440 }
4441 else if (!useless_type_conversion_p (elt_t, TREE_TYPE (elt_v)))
4442 {
4443 error ("incorrect type of vector CONSTRUCTOR elements");
4444 debug_generic_stmt (rhs1);
4445 return true;
4446 }
4447 if (elt_i != NULL_TREE
4448 && (TREE_CODE (elt_t) == VECTOR_TYPE
4449 || TREE_CODE (elt_i) != INTEGER_CST
4450 || compare_tree_int (elt_i, i) != 0))
4451 {
4452 error ("vector CONSTRUCTOR with non-NULL element index");
4453 debug_generic_stmt (rhs1);
4454 return true;
4455 }
4456 if (!is_gimple_val (elt_v))
4457 {
4458 error ("vector CONSTRUCTOR element is not a GIMPLE value");
4459 debug_generic_stmt (rhs1);
4460 return true;
4461 }
4462 }
4463 }
4464 else if (CONSTRUCTOR_NELTS (rhs1) != 0)
4465 {
4466 error ("non-vector CONSTRUCTOR with elements");
4467 debug_generic_stmt (rhs1);
4468 return true;
4469 }
4470 return res;
4471 case OBJ_TYPE_REF:
4472 case ASSERT_EXPR:
4473 case WITH_SIZE_EXPR:
4474 /* FIXME. */
4475 return res;
4476
4477 default:;
4478 }
4479
4480 return res;
4481 }
4482
4483 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4484 is a problem, otherwise false. */
4485
4486 static bool
4487 verify_gimple_assign (gassign *stmt)
4488 {
4489 switch (gimple_assign_rhs_class (stmt))
4490 {
4491 case GIMPLE_SINGLE_RHS:
4492 return verify_gimple_assign_single (stmt);
4493
4494 case GIMPLE_UNARY_RHS:
4495 return verify_gimple_assign_unary (stmt);
4496
4497 case GIMPLE_BINARY_RHS:
4498 return verify_gimple_assign_binary (stmt);
4499
4500 case GIMPLE_TERNARY_RHS:
4501 return verify_gimple_assign_ternary (stmt);
4502
4503 default:
4504 gcc_unreachable ();
4505 }
4506 }
4507
4508 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4509 is a problem, otherwise false. */
4510
4511 static bool
4512 verify_gimple_return (greturn *stmt)
4513 {
4514 tree op = gimple_return_retval (stmt);
4515 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
4516
4517 /* We cannot test for present return values as we do not fix up missing
4518 return values from the original source. */
4519 if (op == NULL)
4520 return false;
4521
4522 if (!is_gimple_val (op)
4523 && TREE_CODE (op) != RESULT_DECL)
4524 {
4525 error ("invalid operand in return statement");
4526 debug_generic_stmt (op);
4527 return true;
4528 }
4529
4530 if ((TREE_CODE (op) == RESULT_DECL
4531 && DECL_BY_REFERENCE (op))
4532 || (TREE_CODE (op) == SSA_NAME
4533 && SSA_NAME_VAR (op)
4534 && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
4535 && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
4536 op = TREE_TYPE (op);
4537
4538 if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
4539 {
4540 error ("invalid conversion in return statement");
4541 debug_generic_stmt (restype);
4542 debug_generic_stmt (TREE_TYPE (op));
4543 return true;
4544 }
4545
4546 return false;
4547 }
4548
4549
4550 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4551 is a problem, otherwise false. */
4552
4553 static bool
4554 verify_gimple_goto (ggoto *stmt)
4555 {
4556 tree dest = gimple_goto_dest (stmt);
4557
4558 /* ??? We have two canonical forms of direct goto destinations, a
4559 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4560 if (TREE_CODE (dest) != LABEL_DECL
4561 && (!is_gimple_val (dest)
4562 || !POINTER_TYPE_P (TREE_TYPE (dest))))
4563 {
4564 error ("goto destination is neither a label nor a pointer");
4565 return true;
4566 }
4567
4568 return false;
4569 }
4570
4571 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4572 is a problem, otherwise false. */
4573
4574 static bool
4575 verify_gimple_switch (gswitch *stmt)
4576 {
4577 unsigned int i, n;
4578 tree elt, prev_upper_bound = NULL_TREE;
4579 tree index_type, elt_type = NULL_TREE;
4580
4581 if (!is_gimple_val (gimple_switch_index (stmt)))
4582 {
4583 error ("invalid operand to switch statement");
4584 debug_generic_stmt (gimple_switch_index (stmt));
4585 return true;
4586 }
4587
4588 index_type = TREE_TYPE (gimple_switch_index (stmt));
4589 if (! INTEGRAL_TYPE_P (index_type))
4590 {
4591 error ("non-integral type switch statement");
4592 debug_generic_expr (index_type);
4593 return true;
4594 }
4595
4596 elt = gimple_switch_label (stmt, 0);
4597 if (CASE_LOW (elt) != NULL_TREE || CASE_HIGH (elt) != NULL_TREE)
4598 {
4599 error ("invalid default case label in switch statement");
4600 debug_generic_expr (elt);
4601 return true;
4602 }
4603
4604 n = gimple_switch_num_labels (stmt);
4605 for (i = 1; i < n; i++)
4606 {
4607 elt = gimple_switch_label (stmt, i);
4608
4609 if (! CASE_LOW (elt))
4610 {
4611 error ("invalid case label in switch statement");
4612 debug_generic_expr (elt);
4613 return true;
4614 }
4615 if (CASE_HIGH (elt)
4616 && ! tree_int_cst_lt (CASE_LOW (elt), CASE_HIGH (elt)))
4617 {
4618 error ("invalid case range in switch statement");
4619 debug_generic_expr (elt);
4620 return true;
4621 }
4622
4623 if (elt_type)
4624 {
4625 if (TREE_TYPE (CASE_LOW (elt)) != elt_type
4626 || (CASE_HIGH (elt) && TREE_TYPE (CASE_HIGH (elt)) != elt_type))
4627 {
4628 error ("type mismatch for case label in switch statement");
4629 debug_generic_expr (elt);
4630 return true;
4631 }
4632 }
4633 else
4634 {
4635 elt_type = TREE_TYPE (CASE_LOW (elt));
4636 if (TYPE_PRECISION (index_type) < TYPE_PRECISION (elt_type))
4637 {
4638 error ("type precision mismatch in switch statement");
4639 return true;
4640 }
4641 }
4642
4643 if (prev_upper_bound)
4644 {
4645 if (! tree_int_cst_lt (prev_upper_bound, CASE_LOW (elt)))
4646 {
4647 error ("case labels not sorted in switch statement");
4648 return true;
4649 }
4650 }
4651
4652 prev_upper_bound = CASE_HIGH (elt);
4653 if (! prev_upper_bound)
4654 prev_upper_bound = CASE_LOW (elt);
4655 }
4656
4657 return false;
4658 }
4659
4660 /* Verify a gimple debug statement STMT.
4661 Returns true if anything is wrong. */
4662
4663 static bool
4664 verify_gimple_debug (gimple *stmt ATTRIBUTE_UNUSED)
4665 {
4666 /* There isn't much that could be wrong in a gimple debug stmt. A
4667 gimple debug bind stmt, for example, maps a tree, that's usually
4668 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4669 component or member of an aggregate type, to another tree, that
4670 can be an arbitrary expression. These stmts expand into debug
4671 insns, and are converted to debug notes by var-tracking.c. */
4672 return false;
4673 }
4674
4675 /* Verify a gimple label statement STMT.
4676 Returns true if anything is wrong. */
4677
4678 static bool
4679 verify_gimple_label (glabel *stmt)
4680 {
4681 tree decl = gimple_label_label (stmt);
4682 int uid;
4683 bool err = false;
4684
4685 if (TREE_CODE (decl) != LABEL_DECL)
4686 return true;
4687 if (!DECL_NONLOCAL (decl) && !FORCED_LABEL (decl)
4688 && DECL_CONTEXT (decl) != current_function_decl)
4689 {
4690 error ("label's context is not the current function decl");
4691 err |= true;
4692 }
4693
4694 uid = LABEL_DECL_UID (decl);
4695 if (cfun->cfg
4696 && (uid == -1
4697 || (*label_to_block_map_for_fn (cfun))[uid] != gimple_bb (stmt)))
4698 {
4699 error ("incorrect entry in label_to_block_map");
4700 err |= true;
4701 }
4702
4703 uid = EH_LANDING_PAD_NR (decl);
4704 if (uid)
4705 {
4706 eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
4707 if (decl != lp->post_landing_pad)
4708 {
4709 error ("incorrect setting of landing pad number");
4710 err |= true;
4711 }
4712 }
4713
4714 return err;
4715 }
4716
4717 /* Verify a gimple cond statement STMT.
4718 Returns true if anything is wrong. */
4719
4720 static bool
4721 verify_gimple_cond (gcond *stmt)
4722 {
4723 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
4724 {
4725 error ("invalid comparison code in gimple cond");
4726 return true;
4727 }
4728 if (!(!gimple_cond_true_label (stmt)
4729 || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
4730 || !(!gimple_cond_false_label (stmt)
4731 || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
4732 {
4733 error ("invalid labels in gimple cond");
4734 return true;
4735 }
4736
4737 return verify_gimple_comparison (boolean_type_node,
4738 gimple_cond_lhs (stmt),
4739 gimple_cond_rhs (stmt),
4740 gimple_cond_code (stmt));
4741 }
4742
4743 /* Verify the GIMPLE statement STMT. Returns true if there is an
4744 error, otherwise false. */
4745
4746 static bool
4747 verify_gimple_stmt (gimple *stmt)
4748 {
4749 switch (gimple_code (stmt))
4750 {
4751 case GIMPLE_ASSIGN:
4752 return verify_gimple_assign (as_a <gassign *> (stmt));
4753
4754 case GIMPLE_LABEL:
4755 return verify_gimple_label (as_a <glabel *> (stmt));
4756
4757 case GIMPLE_CALL:
4758 return verify_gimple_call (as_a <gcall *> (stmt));
4759
4760 case GIMPLE_COND:
4761 return verify_gimple_cond (as_a <gcond *> (stmt));
4762
4763 case GIMPLE_GOTO:
4764 return verify_gimple_goto (as_a <ggoto *> (stmt));
4765
4766 case GIMPLE_SWITCH:
4767 return verify_gimple_switch (as_a <gswitch *> (stmt));
4768
4769 case GIMPLE_RETURN:
4770 return verify_gimple_return (as_a <greturn *> (stmt));
4771
4772 case GIMPLE_ASM:
4773 return false;
4774
4775 case GIMPLE_TRANSACTION:
4776 return verify_gimple_transaction (as_a <gtransaction *> (stmt));
4777
4778 /* Tuples that do not have tree operands. */
4779 case GIMPLE_NOP:
4780 case GIMPLE_PREDICT:
4781 case GIMPLE_RESX:
4782 case GIMPLE_EH_DISPATCH:
4783 case GIMPLE_EH_MUST_NOT_THROW:
4784 return false;
4785
4786 CASE_GIMPLE_OMP:
4787 /* OpenMP directives are validated by the FE and never operated
4788 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4789 non-gimple expressions when the main index variable has had
4790 its address taken. This does not affect the loop itself
4791 because the header of an GIMPLE_OMP_FOR is merely used to determine
4792 how to setup the parallel iteration. */
4793 return false;
4794
4795 case GIMPLE_DEBUG:
4796 return verify_gimple_debug (stmt);
4797
4798 default:
4799 gcc_unreachable ();
4800 }
4801 }
4802
4803 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4804 and false otherwise. */
4805
4806 static bool
4807 verify_gimple_phi (gimple *phi)
4808 {
4809 bool err = false;
4810 unsigned i;
4811 tree phi_result = gimple_phi_result (phi);
4812 bool virtual_p;
4813
4814 if (!phi_result)
4815 {
4816 error ("invalid PHI result");
4817 return true;
4818 }
4819
4820 virtual_p = virtual_operand_p (phi_result);
4821 if (TREE_CODE (phi_result) != SSA_NAME
4822 || (virtual_p
4823 && SSA_NAME_VAR (phi_result) != gimple_vop (cfun)))
4824 {
4825 error ("invalid PHI result");
4826 err = true;
4827 }
4828
4829 for (i = 0; i < gimple_phi_num_args (phi); i++)
4830 {
4831 tree t = gimple_phi_arg_def (phi, i);
4832
4833 if (!t)
4834 {
4835 error ("missing PHI def");
4836 err |= true;
4837 continue;
4838 }
4839 /* Addressable variables do have SSA_NAMEs but they
4840 are not considered gimple values. */
4841 else if ((TREE_CODE (t) == SSA_NAME
4842 && virtual_p != virtual_operand_p (t))
4843 || (virtual_p
4844 && (TREE_CODE (t) != SSA_NAME
4845 || SSA_NAME_VAR (t) != gimple_vop (cfun)))
4846 || (!virtual_p
4847 && !is_gimple_val (t)))
4848 {
4849 error ("invalid PHI argument");
4850 debug_generic_expr (t);
4851 err |= true;
4852 }
4853 #ifdef ENABLE_TYPES_CHECKING
4854 if (!useless_type_conversion_p (TREE_TYPE (phi_result), TREE_TYPE (t)))
4855 {
4856 error ("incompatible types in PHI argument %u", i);
4857 debug_generic_stmt (TREE_TYPE (phi_result));
4858 debug_generic_stmt (TREE_TYPE (t));
4859 err |= true;
4860 }
4861 #endif
4862 }
4863
4864 return err;
4865 }
4866
4867 /* Verify the GIMPLE statements inside the sequence STMTS. */
4868
4869 static bool
4870 verify_gimple_in_seq_2 (gimple_seq stmts)
4871 {
4872 gimple_stmt_iterator ittr;
4873 bool err = false;
4874
4875 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
4876 {
4877 gimple *stmt = gsi_stmt (ittr);
4878
4879 switch (gimple_code (stmt))
4880 {
4881 case GIMPLE_BIND:
4882 err |= verify_gimple_in_seq_2 (
4883 gimple_bind_body (as_a <gbind *> (stmt)));
4884 break;
4885
4886 case GIMPLE_TRY:
4887 err |= verify_gimple_in_seq_2 (gimple_try_eval (stmt));
4888 err |= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt));
4889 break;
4890
4891 case GIMPLE_EH_FILTER:
4892 err |= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt));
4893 break;
4894
4895 case GIMPLE_EH_ELSE:
4896 {
4897 geh_else *eh_else = as_a <geh_else *> (stmt);
4898 err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (eh_else));
4899 err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (eh_else));
4900 }
4901 break;
4902
4903 case GIMPLE_CATCH:
4904 err |= verify_gimple_in_seq_2 (gimple_catch_handler (
4905 as_a <gcatch *> (stmt)));
4906 break;
4907
4908 case GIMPLE_TRANSACTION:
4909 err |= verify_gimple_transaction (as_a <gtransaction *> (stmt));
4910 break;
4911
4912 default:
4913 {
4914 bool err2 = verify_gimple_stmt (stmt);
4915 if (err2)
4916 debug_gimple_stmt (stmt);
4917 err |= err2;
4918 }
4919 }
4920 }
4921
4922 return err;
4923 }
4924
4925 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
4926 is a problem, otherwise false. */
4927
4928 static bool
4929 verify_gimple_transaction (gtransaction *stmt)
4930 {
4931 tree lab;
4932
4933 lab = gimple_transaction_label_norm (stmt);
4934 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
4935 return true;
4936 lab = gimple_transaction_label_uninst (stmt);
4937 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
4938 return true;
4939 lab = gimple_transaction_label_over (stmt);
4940 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
4941 return true;
4942
4943 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt));
4944 }
4945
4946
4947 /* Verify the GIMPLE statements inside the statement list STMTS. */
4948
4949 DEBUG_FUNCTION void
4950 verify_gimple_in_seq (gimple_seq stmts)
4951 {
4952 timevar_push (TV_TREE_STMT_VERIFY);
4953 if (verify_gimple_in_seq_2 (stmts))
4954 internal_error ("verify_gimple failed");
4955 timevar_pop (TV_TREE_STMT_VERIFY);
4956 }
4957
4958 /* Return true when the T can be shared. */
4959
4960 static bool
4961 tree_node_can_be_shared (tree t)
4962 {
4963 if (IS_TYPE_OR_DECL_P (t)
4964 || is_gimple_min_invariant (t)
4965 || TREE_CODE (t) == SSA_NAME
4966 || t == error_mark_node
4967 || TREE_CODE (t) == IDENTIFIER_NODE)
4968 return true;
4969
4970 if (TREE_CODE (t) == CASE_LABEL_EXPR)
4971 return true;
4972
4973 if (DECL_P (t))
4974 return true;
4975
4976 return false;
4977 }
4978
4979 /* Called via walk_tree. Verify tree sharing. */
4980
4981 static tree
4982 verify_node_sharing_1 (tree *tp, int *walk_subtrees, void *data)
4983 {
4984 hash_set<void *> *visited = (hash_set<void *> *) data;
4985
4986 if (tree_node_can_be_shared (*tp))
4987 {
4988 *walk_subtrees = false;
4989 return NULL;
4990 }
4991
4992 if (visited->add (*tp))
4993 return *tp;
4994
4995 return NULL;
4996 }
4997
4998 /* Called via walk_gimple_stmt. Verify tree sharing. */
4999
5000 static tree
5001 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
5002 {
5003 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5004 return verify_node_sharing_1 (tp, walk_subtrees, wi->info);
5005 }
5006
5007 static bool eh_error_found;
5008 bool
5009 verify_eh_throw_stmt_node (gimple *const &stmt, const int &,
5010 hash_set<gimple *> *visited)
5011 {
5012 if (!visited->contains (stmt))
5013 {
5014 error ("dead STMT in EH table");
5015 debug_gimple_stmt (stmt);
5016 eh_error_found = true;
5017 }
5018 return true;
5019 }
5020
5021 /* Verify if the location LOCs block is in BLOCKS. */
5022
5023 static bool
5024 verify_location (hash_set<tree> *blocks, location_t loc)
5025 {
5026 tree block = LOCATION_BLOCK (loc);
5027 if (block != NULL_TREE
5028 && !blocks->contains (block))
5029 {
5030 error ("location references block not in block tree");
5031 return true;
5032 }
5033 if (block != NULL_TREE)
5034 return verify_location (blocks, BLOCK_SOURCE_LOCATION (block));
5035 return false;
5036 }
5037
5038 /* Called via walk_tree. Verify that expressions have no blocks. */
5039
5040 static tree
5041 verify_expr_no_block (tree *tp, int *walk_subtrees, void *)
5042 {
5043 if (!EXPR_P (*tp))
5044 {
5045 *walk_subtrees = false;
5046 return NULL;
5047 }
5048
5049 location_t loc = EXPR_LOCATION (*tp);
5050 if (LOCATION_BLOCK (loc) != NULL)
5051 return *tp;
5052
5053 return NULL;
5054 }
5055
5056 /* Called via walk_tree. Verify locations of expressions. */
5057
5058 static tree
5059 verify_expr_location_1 (tree *tp, int *walk_subtrees, void *data)
5060 {
5061 hash_set<tree> *blocks = (hash_set<tree> *) data;
5062
5063 if (VAR_P (*tp) && DECL_HAS_DEBUG_EXPR_P (*tp))
5064 {
5065 tree t = DECL_DEBUG_EXPR (*tp);
5066 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
5067 if (addr)
5068 return addr;
5069 }
5070 if ((VAR_P (*tp)
5071 || TREE_CODE (*tp) == PARM_DECL
5072 || TREE_CODE (*tp) == RESULT_DECL)
5073 && DECL_HAS_VALUE_EXPR_P (*tp))
5074 {
5075 tree t = DECL_VALUE_EXPR (*tp);
5076 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
5077 if (addr)
5078 return addr;
5079 }
5080
5081 if (!EXPR_P (*tp))
5082 {
5083 *walk_subtrees = false;
5084 return NULL;
5085 }
5086
5087 location_t loc = EXPR_LOCATION (*tp);
5088 if (verify_location (blocks, loc))
5089 return *tp;
5090
5091 return NULL;
5092 }
5093
5094 /* Called via walk_gimple_op. Verify locations of expressions. */
5095
5096 static tree
5097 verify_expr_location (tree *tp, int *walk_subtrees, void *data)
5098 {
5099 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5100 return verify_expr_location_1 (tp, walk_subtrees, wi->info);
5101 }
5102
5103 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
5104
5105 static void
5106 collect_subblocks (hash_set<tree> *blocks, tree block)
5107 {
5108 tree t;
5109 for (t = BLOCK_SUBBLOCKS (block); t; t = BLOCK_CHAIN (t))
5110 {
5111 blocks->add (t);
5112 collect_subblocks (blocks, t);
5113 }
5114 }
5115
5116 /* Verify the GIMPLE statements in the CFG of FN. */
5117
5118 DEBUG_FUNCTION void
5119 verify_gimple_in_cfg (struct function *fn, bool verify_nothrow)
5120 {
5121 basic_block bb;
5122 bool err = false;
5123
5124 timevar_push (TV_TREE_STMT_VERIFY);
5125 hash_set<void *> visited;
5126 hash_set<gimple *> visited_stmts;
5127
5128 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
5129 hash_set<tree> blocks;
5130 if (DECL_INITIAL (fn->decl))
5131 {
5132 blocks.add (DECL_INITIAL (fn->decl));
5133 collect_subblocks (&blocks, DECL_INITIAL (fn->decl));
5134 }
5135
5136 FOR_EACH_BB_FN (bb, fn)
5137 {
5138 gimple_stmt_iterator gsi;
5139
5140 for (gphi_iterator gpi = gsi_start_phis (bb);
5141 !gsi_end_p (gpi);
5142 gsi_next (&gpi))
5143 {
5144 gphi *phi = gpi.phi ();
5145 bool err2 = false;
5146 unsigned i;
5147
5148 visited_stmts.add (phi);
5149
5150 if (gimple_bb (phi) != bb)
5151 {
5152 error ("gimple_bb (phi) is set to a wrong basic block");
5153 err2 = true;
5154 }
5155
5156 err2 |= verify_gimple_phi (phi);
5157
5158 /* Only PHI arguments have locations. */
5159 if (gimple_location (phi) != UNKNOWN_LOCATION)
5160 {
5161 error ("PHI node with location");
5162 err2 = true;
5163 }
5164
5165 for (i = 0; i < gimple_phi_num_args (phi); i++)
5166 {
5167 tree arg = gimple_phi_arg_def (phi, i);
5168 tree addr = walk_tree (&arg, verify_node_sharing_1,
5169 &visited, NULL);
5170 if (addr)
5171 {
5172 error ("incorrect sharing of tree nodes");
5173 debug_generic_expr (addr);
5174 err2 |= true;
5175 }
5176 location_t loc = gimple_phi_arg_location (phi, i);
5177 if (virtual_operand_p (gimple_phi_result (phi))
5178 && loc != UNKNOWN_LOCATION)
5179 {
5180 error ("virtual PHI with argument locations");
5181 err2 = true;
5182 }
5183 addr = walk_tree (&arg, verify_expr_location_1, &blocks, NULL);
5184 if (addr)
5185 {
5186 debug_generic_expr (addr);
5187 err2 = true;
5188 }
5189 err2 |= verify_location (&blocks, loc);
5190 }
5191
5192 if (err2)
5193 debug_gimple_stmt (phi);
5194 err |= err2;
5195 }
5196
5197 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5198 {
5199 gimple *stmt = gsi_stmt (gsi);
5200 bool err2 = false;
5201 struct walk_stmt_info wi;
5202 tree addr;
5203 int lp_nr;
5204
5205 visited_stmts.add (stmt);
5206
5207 if (gimple_bb (stmt) != bb)
5208 {
5209 error ("gimple_bb (stmt) is set to a wrong basic block");
5210 err2 = true;
5211 }
5212
5213 err2 |= verify_gimple_stmt (stmt);
5214 err2 |= verify_location (&blocks, gimple_location (stmt));
5215
5216 memset (&wi, 0, sizeof (wi));
5217 wi.info = (void *) &visited;
5218 addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
5219 if (addr)
5220 {
5221 error ("incorrect sharing of tree nodes");
5222 debug_generic_expr (addr);
5223 err2 |= true;
5224 }
5225
5226 memset (&wi, 0, sizeof (wi));
5227 wi.info = (void *) &blocks;
5228 addr = walk_gimple_op (stmt, verify_expr_location, &wi);
5229 if (addr)
5230 {
5231 debug_generic_expr (addr);
5232 err2 |= true;
5233 }
5234
5235 /* ??? Instead of not checking these stmts at all the walker
5236 should know its context via wi. */
5237 if (!is_gimple_debug (stmt)
5238 && !is_gimple_omp (stmt))
5239 {
5240 memset (&wi, 0, sizeof (wi));
5241 addr = walk_gimple_op (stmt, verify_expr, &wi);
5242 if (addr)
5243 {
5244 debug_generic_expr (addr);
5245 inform (gimple_location (stmt), "in statement");
5246 err2 |= true;
5247 }
5248 }
5249
5250 /* If the statement is marked as part of an EH region, then it is
5251 expected that the statement could throw. Verify that when we
5252 have optimizations that simplify statements such that we prove
5253 that they cannot throw, that we update other data structures
5254 to match. */
5255 lp_nr = lookup_stmt_eh_lp (stmt);
5256 if (lp_nr > 0)
5257 {
5258 if (!stmt_could_throw_p (stmt))
5259 {
5260 if (verify_nothrow)
5261 {
5262 error ("statement marked for throw, but doesn%'t");
5263 err2 |= true;
5264 }
5265 }
5266 else if (!gsi_one_before_end_p (gsi))
5267 {
5268 error ("statement marked for throw in middle of block");
5269 err2 |= true;
5270 }
5271 }
5272
5273 if (err2)
5274 debug_gimple_stmt (stmt);
5275 err |= err2;
5276 }
5277 }
5278
5279 eh_error_found = false;
5280 hash_map<gimple *, int> *eh_table = get_eh_throw_stmt_table (cfun);
5281 if (eh_table)
5282 eh_table->traverse<hash_set<gimple *> *, verify_eh_throw_stmt_node>
5283 (&visited_stmts);
5284
5285 if (err || eh_error_found)
5286 internal_error ("verify_gimple failed");
5287
5288 verify_histograms ();
5289 timevar_pop (TV_TREE_STMT_VERIFY);
5290 }
5291
5292
5293 /* Verifies that the flow information is OK. */
5294
5295 static int
5296 gimple_verify_flow_info (void)
5297 {
5298 int err = 0;
5299 basic_block bb;
5300 gimple_stmt_iterator gsi;
5301 gimple *stmt;
5302 edge e;
5303 edge_iterator ei;
5304
5305 if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5306 || ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5307 {
5308 error ("ENTRY_BLOCK has IL associated with it");
5309 err = 1;
5310 }
5311
5312 if (EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5313 || EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5314 {
5315 error ("EXIT_BLOCK has IL associated with it");
5316 err = 1;
5317 }
5318
5319 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5320 if (e->flags & EDGE_FALLTHRU)
5321 {
5322 error ("fallthru to exit from bb %d", e->src->index);
5323 err = 1;
5324 }
5325
5326 FOR_EACH_BB_FN (bb, cfun)
5327 {
5328 bool found_ctrl_stmt = false;
5329
5330 stmt = NULL;
5331
5332 /* Skip labels on the start of basic block. */
5333 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5334 {
5335 tree label;
5336 gimple *prev_stmt = stmt;
5337
5338 stmt = gsi_stmt (gsi);
5339
5340 if (gimple_code (stmt) != GIMPLE_LABEL)
5341 break;
5342
5343 label = gimple_label_label (as_a <glabel *> (stmt));
5344 if (prev_stmt && DECL_NONLOCAL (label))
5345 {
5346 error ("nonlocal label ");
5347 print_generic_expr (stderr, label);
5348 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5349 bb->index);
5350 err = 1;
5351 }
5352
5353 if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
5354 {
5355 error ("EH landing pad label ");
5356 print_generic_expr (stderr, label);
5357 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5358 bb->index);
5359 err = 1;
5360 }
5361
5362 if (label_to_block (label) != bb)
5363 {
5364 error ("label ");
5365 print_generic_expr (stderr, label);
5366 fprintf (stderr, " to block does not match in bb %d",
5367 bb->index);
5368 err = 1;
5369 }
5370
5371 if (decl_function_context (label) != current_function_decl)
5372 {
5373 error ("label ");
5374 print_generic_expr (stderr, label);
5375 fprintf (stderr, " has incorrect context in bb %d",
5376 bb->index);
5377 err = 1;
5378 }
5379 }
5380
5381 /* Verify that body of basic block BB is free of control flow. */
5382 for (; !gsi_end_p (gsi); gsi_next (&gsi))
5383 {
5384 gimple *stmt = gsi_stmt (gsi);
5385
5386 if (found_ctrl_stmt)
5387 {
5388 error ("control flow in the middle of basic block %d",
5389 bb->index);
5390 err = 1;
5391 }
5392
5393 if (stmt_ends_bb_p (stmt))
5394 found_ctrl_stmt = true;
5395
5396 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
5397 {
5398 error ("label ");
5399 print_generic_expr (stderr, gimple_label_label (label_stmt));
5400 fprintf (stderr, " in the middle of basic block %d", bb->index);
5401 err = 1;
5402 }
5403 }
5404
5405 gsi = gsi_last_bb (bb);
5406 if (gsi_end_p (gsi))
5407 continue;
5408
5409 stmt = gsi_stmt (gsi);
5410
5411 if (gimple_code (stmt) == GIMPLE_LABEL)
5412 continue;
5413
5414 err |= verify_eh_edges (stmt);
5415
5416 if (is_ctrl_stmt (stmt))
5417 {
5418 FOR_EACH_EDGE (e, ei, bb->succs)
5419 if (e->flags & EDGE_FALLTHRU)
5420 {
5421 error ("fallthru edge after a control statement in bb %d",
5422 bb->index);
5423 err = 1;
5424 }
5425 }
5426
5427 if (gimple_code (stmt) != GIMPLE_COND)
5428 {
5429 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
5430 after anything else but if statement. */
5431 FOR_EACH_EDGE (e, ei, bb->succs)
5432 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
5433 {
5434 error ("true/false edge after a non-GIMPLE_COND in bb %d",
5435 bb->index);
5436 err = 1;
5437 }
5438 }
5439
5440 switch (gimple_code (stmt))
5441 {
5442 case GIMPLE_COND:
5443 {
5444 edge true_edge;
5445 edge false_edge;
5446
5447 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
5448
5449 if (!true_edge
5450 || !false_edge
5451 || !(true_edge->flags & EDGE_TRUE_VALUE)
5452 || !(false_edge->flags & EDGE_FALSE_VALUE)
5453 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5454 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5455 || EDGE_COUNT (bb->succs) >= 3)
5456 {
5457 error ("wrong outgoing edge flags at end of bb %d",
5458 bb->index);
5459 err = 1;
5460 }
5461 }
5462 break;
5463
5464 case GIMPLE_GOTO:
5465 if (simple_goto_p (stmt))
5466 {
5467 error ("explicit goto at end of bb %d", bb->index);
5468 err = 1;
5469 }
5470 else
5471 {
5472 /* FIXME. We should double check that the labels in the
5473 destination blocks have their address taken. */
5474 FOR_EACH_EDGE (e, ei, bb->succs)
5475 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
5476 | EDGE_FALSE_VALUE))
5477 || !(e->flags & EDGE_ABNORMAL))
5478 {
5479 error ("wrong outgoing edge flags at end of bb %d",
5480 bb->index);
5481 err = 1;
5482 }
5483 }
5484 break;
5485
5486 case GIMPLE_CALL:
5487 if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
5488 break;
5489 /* fallthru */
5490 case GIMPLE_RETURN:
5491 if (!single_succ_p (bb)
5492 || (single_succ_edge (bb)->flags
5493 & (EDGE_FALLTHRU | EDGE_ABNORMAL
5494 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5495 {
5496 error ("wrong outgoing edge flags at end of bb %d", bb->index);
5497 err = 1;
5498 }
5499 if (single_succ (bb) != EXIT_BLOCK_PTR_FOR_FN (cfun))
5500 {
5501 error ("return edge does not point to exit in bb %d",
5502 bb->index);
5503 err = 1;
5504 }
5505 break;
5506
5507 case GIMPLE_SWITCH:
5508 {
5509 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5510 tree prev;
5511 edge e;
5512 size_t i, n;
5513
5514 n = gimple_switch_num_labels (switch_stmt);
5515
5516 /* Mark all the destination basic blocks. */
5517 for (i = 0; i < n; ++i)
5518 {
5519 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
5520 basic_block label_bb = label_to_block (lab);
5521 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
5522 label_bb->aux = (void *)1;
5523 }
5524
5525 /* Verify that the case labels are sorted. */
5526 prev = gimple_switch_label (switch_stmt, 0);
5527 for (i = 1; i < n; ++i)
5528 {
5529 tree c = gimple_switch_label (switch_stmt, i);
5530 if (!CASE_LOW (c))
5531 {
5532 error ("found default case not at the start of "
5533 "case vector");
5534 err = 1;
5535 continue;
5536 }
5537 if (CASE_LOW (prev)
5538 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
5539 {
5540 error ("case labels not sorted: ");
5541 print_generic_expr (stderr, prev);
5542 fprintf (stderr," is greater than ");
5543 print_generic_expr (stderr, c);
5544 fprintf (stderr," but comes before it.\n");
5545 err = 1;
5546 }
5547 prev = c;
5548 }
5549 /* VRP will remove the default case if it can prove it will
5550 never be executed. So do not verify there always exists
5551 a default case here. */
5552
5553 FOR_EACH_EDGE (e, ei, bb->succs)
5554 {
5555 if (!e->dest->aux)
5556 {
5557 error ("extra outgoing edge %d->%d",
5558 bb->index, e->dest->index);
5559 err = 1;
5560 }
5561
5562 e->dest->aux = (void *)2;
5563 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
5564 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5565 {
5566 error ("wrong outgoing edge flags at end of bb %d",
5567 bb->index);
5568 err = 1;
5569 }
5570 }
5571
5572 /* Check that we have all of them. */
5573 for (i = 0; i < n; ++i)
5574 {
5575 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
5576 basic_block label_bb = label_to_block (lab);
5577
5578 if (label_bb->aux != (void *)2)
5579 {
5580 error ("missing edge %i->%i", bb->index, label_bb->index);
5581 err = 1;
5582 }
5583 }
5584
5585 FOR_EACH_EDGE (e, ei, bb->succs)
5586 e->dest->aux = (void *)0;
5587 }
5588 break;
5589
5590 case GIMPLE_EH_DISPATCH:
5591 err |= verify_eh_dispatch_edge (as_a <geh_dispatch *> (stmt));
5592 break;
5593
5594 default:
5595 break;
5596 }
5597 }
5598
5599 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
5600 verify_dominators (CDI_DOMINATORS);
5601
5602 return err;
5603 }
5604
5605
5606 /* Updates phi nodes after creating a forwarder block joined
5607 by edge FALLTHRU. */
5608
5609 static void
5610 gimple_make_forwarder_block (edge fallthru)
5611 {
5612 edge e;
5613 edge_iterator ei;
5614 basic_block dummy, bb;
5615 tree var;
5616 gphi_iterator gsi;
5617
5618 dummy = fallthru->src;
5619 bb = fallthru->dest;
5620
5621 if (single_pred_p (bb))
5622 return;
5623
5624 /* If we redirected a branch we must create new PHI nodes at the
5625 start of BB. */
5626 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
5627 {
5628 gphi *phi, *new_phi;
5629
5630 phi = gsi.phi ();
5631 var = gimple_phi_result (phi);
5632 new_phi = create_phi_node (var, bb);
5633 gimple_phi_set_result (phi, copy_ssa_name (var, phi));
5634 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
5635 UNKNOWN_LOCATION);
5636 }
5637
5638 /* Add the arguments we have stored on edges. */
5639 FOR_EACH_EDGE (e, ei, bb->preds)
5640 {
5641 if (e == fallthru)
5642 continue;
5643
5644 flush_pending_stmts (e);
5645 }
5646 }
5647
5648
5649 /* Return a non-special label in the head of basic block BLOCK.
5650 Create one if it doesn't exist. */
5651
5652 tree
5653 gimple_block_label (basic_block bb)
5654 {
5655 gimple_stmt_iterator i, s = gsi_start_bb (bb);
5656 bool first = true;
5657 tree label;
5658 glabel *stmt;
5659
5660 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
5661 {
5662 stmt = dyn_cast <glabel *> (gsi_stmt (i));
5663 if (!stmt)
5664 break;
5665 label = gimple_label_label (stmt);
5666 if (!DECL_NONLOCAL (label))
5667 {
5668 if (!first)
5669 gsi_move_before (&i, &s);
5670 return label;
5671 }
5672 }
5673
5674 label = create_artificial_label (UNKNOWN_LOCATION);
5675 stmt = gimple_build_label (label);
5676 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
5677 return label;
5678 }
5679
5680
5681 /* Attempt to perform edge redirection by replacing a possibly complex
5682 jump instruction by a goto or by removing the jump completely.
5683 This can apply only if all edges now point to the same block. The
5684 parameters and return values are equivalent to
5685 redirect_edge_and_branch. */
5686
5687 static edge
5688 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
5689 {
5690 basic_block src = e->src;
5691 gimple_stmt_iterator i;
5692 gimple *stmt;
5693
5694 /* We can replace or remove a complex jump only when we have exactly
5695 two edges. */
5696 if (EDGE_COUNT (src->succs) != 2
5697 /* Verify that all targets will be TARGET. Specifically, the
5698 edge that is not E must also go to TARGET. */
5699 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
5700 return NULL;
5701
5702 i = gsi_last_bb (src);
5703 if (gsi_end_p (i))
5704 return NULL;
5705
5706 stmt = gsi_stmt (i);
5707
5708 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
5709 {
5710 gsi_remove (&i, true);
5711 e = ssa_redirect_edge (e, target);
5712 e->flags = EDGE_FALLTHRU;
5713 return e;
5714 }
5715
5716 return NULL;
5717 }
5718
5719
5720 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
5721 edge representing the redirected branch. */
5722
5723 static edge
5724 gimple_redirect_edge_and_branch (edge e, basic_block dest)
5725 {
5726 basic_block bb = e->src;
5727 gimple_stmt_iterator gsi;
5728 edge ret;
5729 gimple *stmt;
5730
5731 if (e->flags & EDGE_ABNORMAL)
5732 return NULL;
5733
5734 if (e->dest == dest)
5735 return NULL;
5736
5737 if (e->flags & EDGE_EH)
5738 return redirect_eh_edge (e, dest);
5739
5740 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
5741 {
5742 ret = gimple_try_redirect_by_replacing_jump (e, dest);
5743 if (ret)
5744 return ret;
5745 }
5746
5747 gsi = gsi_last_bb (bb);
5748 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
5749
5750 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
5751 {
5752 case GIMPLE_COND:
5753 /* For COND_EXPR, we only need to redirect the edge. */
5754 break;
5755
5756 case GIMPLE_GOTO:
5757 /* No non-abnormal edges should lead from a non-simple goto, and
5758 simple ones should be represented implicitly. */
5759 gcc_unreachable ();
5760
5761 case GIMPLE_SWITCH:
5762 {
5763 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5764 tree label = gimple_block_label (dest);
5765 tree cases = get_cases_for_edge (e, switch_stmt);
5766
5767 /* If we have a list of cases associated with E, then use it
5768 as it's a lot faster than walking the entire case vector. */
5769 if (cases)
5770 {
5771 edge e2 = find_edge (e->src, dest);
5772 tree last, first;
5773
5774 first = cases;
5775 while (cases)
5776 {
5777 last = cases;
5778 CASE_LABEL (cases) = label;
5779 cases = CASE_CHAIN (cases);
5780 }
5781
5782 /* If there was already an edge in the CFG, then we need
5783 to move all the cases associated with E to E2. */
5784 if (e2)
5785 {
5786 tree cases2 = get_cases_for_edge (e2, switch_stmt);
5787
5788 CASE_CHAIN (last) = CASE_CHAIN (cases2);
5789 CASE_CHAIN (cases2) = first;
5790 }
5791 bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
5792 }
5793 else
5794 {
5795 size_t i, n = gimple_switch_num_labels (switch_stmt);
5796
5797 for (i = 0; i < n; i++)
5798 {
5799 tree elt = gimple_switch_label (switch_stmt, i);
5800 if (label_to_block (CASE_LABEL (elt)) == e->dest)
5801 CASE_LABEL (elt) = label;
5802 }
5803 }
5804 }
5805 break;
5806
5807 case GIMPLE_ASM:
5808 {
5809 gasm *asm_stmt = as_a <gasm *> (stmt);
5810 int i, n = gimple_asm_nlabels (asm_stmt);
5811 tree label = NULL;
5812
5813 for (i = 0; i < n; ++i)
5814 {
5815 tree cons = gimple_asm_label_op (asm_stmt, i);
5816 if (label_to_block (TREE_VALUE (cons)) == e->dest)
5817 {
5818 if (!label)
5819 label = gimple_block_label (dest);
5820 TREE_VALUE (cons) = label;
5821 }
5822 }
5823
5824 /* If we didn't find any label matching the former edge in the
5825 asm labels, we must be redirecting the fallthrough
5826 edge. */
5827 gcc_assert (label || (e->flags & EDGE_FALLTHRU));
5828 }
5829 break;
5830
5831 case GIMPLE_RETURN:
5832 gsi_remove (&gsi, true);
5833 e->flags |= EDGE_FALLTHRU;
5834 break;
5835
5836 case GIMPLE_OMP_RETURN:
5837 case GIMPLE_OMP_CONTINUE:
5838 case GIMPLE_OMP_SECTIONS_SWITCH:
5839 case GIMPLE_OMP_FOR:
5840 /* The edges from OMP constructs can be simply redirected. */
5841 break;
5842
5843 case GIMPLE_EH_DISPATCH:
5844 if (!(e->flags & EDGE_FALLTHRU))
5845 redirect_eh_dispatch_edge (as_a <geh_dispatch *> (stmt), e, dest);
5846 break;
5847
5848 case GIMPLE_TRANSACTION:
5849 if (e->flags & EDGE_TM_ABORT)
5850 gimple_transaction_set_label_over (as_a <gtransaction *> (stmt),
5851 gimple_block_label (dest));
5852 else if (e->flags & EDGE_TM_UNINSTRUMENTED)
5853 gimple_transaction_set_label_uninst (as_a <gtransaction *> (stmt),
5854 gimple_block_label (dest));
5855 else
5856 gimple_transaction_set_label_norm (as_a <gtransaction *> (stmt),
5857 gimple_block_label (dest));
5858 break;
5859
5860 default:
5861 /* Otherwise it must be a fallthru edge, and we don't need to
5862 do anything besides redirecting it. */
5863 gcc_assert (e->flags & EDGE_FALLTHRU);
5864 break;
5865 }
5866
5867 /* Update/insert PHI nodes as necessary. */
5868
5869 /* Now update the edges in the CFG. */
5870 e = ssa_redirect_edge (e, dest);
5871
5872 return e;
5873 }
5874
5875 /* Returns true if it is possible to remove edge E by redirecting
5876 it to the destination of the other edge from E->src. */
5877
5878 static bool
5879 gimple_can_remove_branch_p (const_edge e)
5880 {
5881 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
5882 return false;
5883
5884 return true;
5885 }
5886
5887 /* Simple wrapper, as we can always redirect fallthru edges. */
5888
5889 static basic_block
5890 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
5891 {
5892 e = gimple_redirect_edge_and_branch (e, dest);
5893 gcc_assert (e);
5894
5895 return NULL;
5896 }
5897
5898
5899 /* Splits basic block BB after statement STMT (but at least after the
5900 labels). If STMT is NULL, BB is split just after the labels. */
5901
5902 static basic_block
5903 gimple_split_block (basic_block bb, void *stmt)
5904 {
5905 gimple_stmt_iterator gsi;
5906 gimple_stmt_iterator gsi_tgt;
5907 gimple_seq list;
5908 basic_block new_bb;
5909 edge e;
5910 edge_iterator ei;
5911
5912 new_bb = create_empty_bb (bb);
5913
5914 /* Redirect the outgoing edges. */
5915 new_bb->succs = bb->succs;
5916 bb->succs = NULL;
5917 FOR_EACH_EDGE (e, ei, new_bb->succs)
5918 e->src = new_bb;
5919
5920 /* Get a stmt iterator pointing to the first stmt to move. */
5921 if (!stmt || gimple_code ((gimple *) stmt) == GIMPLE_LABEL)
5922 gsi = gsi_after_labels (bb);
5923 else
5924 {
5925 gsi = gsi_for_stmt ((gimple *) stmt);
5926 gsi_next (&gsi);
5927 }
5928
5929 /* Move everything from GSI to the new basic block. */
5930 if (gsi_end_p (gsi))
5931 return new_bb;
5932
5933 /* Split the statement list - avoid re-creating new containers as this
5934 brings ugly quadratic memory consumption in the inliner.
5935 (We are still quadratic since we need to update stmt BB pointers,
5936 sadly.) */
5937 gsi_split_seq_before (&gsi, &list);
5938 set_bb_seq (new_bb, list);
5939 for (gsi_tgt = gsi_start (list);
5940 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
5941 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
5942
5943 return new_bb;
5944 }
5945
5946
5947 /* Moves basic block BB after block AFTER. */
5948
5949 static bool
5950 gimple_move_block_after (basic_block bb, basic_block after)
5951 {
5952 if (bb->prev_bb == after)
5953 return true;
5954
5955 unlink_block (bb);
5956 link_block (bb, after);
5957
5958 return true;
5959 }
5960
5961
5962 /* Return TRUE if block BB has no executable statements, otherwise return
5963 FALSE. */
5964
5965 static bool
5966 gimple_empty_block_p (basic_block bb)
5967 {
5968 /* BB must have no executable statements. */
5969 gimple_stmt_iterator gsi = gsi_after_labels (bb);
5970 if (phi_nodes (bb))
5971 return false;
5972 if (gsi_end_p (gsi))
5973 return true;
5974 if (is_gimple_debug (gsi_stmt (gsi)))
5975 gsi_next_nondebug (&gsi);
5976 return gsi_end_p (gsi);
5977 }
5978
5979
5980 /* Split a basic block if it ends with a conditional branch and if the
5981 other part of the block is not empty. */
5982
5983 static basic_block
5984 gimple_split_block_before_cond_jump (basic_block bb)
5985 {
5986 gimple *last, *split_point;
5987 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
5988 if (gsi_end_p (gsi))
5989 return NULL;
5990 last = gsi_stmt (gsi);
5991 if (gimple_code (last) != GIMPLE_COND
5992 && gimple_code (last) != GIMPLE_SWITCH)
5993 return NULL;
5994 gsi_prev (&gsi);
5995 split_point = gsi_stmt (gsi);
5996 return split_block (bb, split_point)->dest;
5997 }
5998
5999
6000 /* Return true if basic_block can be duplicated. */
6001
6002 static bool
6003 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
6004 {
6005 return true;
6006 }
6007
6008 /* Create a duplicate of the basic block BB. NOTE: This does not
6009 preserve SSA form. */
6010
6011 static basic_block
6012 gimple_duplicate_bb (basic_block bb)
6013 {
6014 basic_block new_bb;
6015 gimple_stmt_iterator gsi_tgt;
6016
6017 new_bb = create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
6018
6019 /* Copy the PHI nodes. We ignore PHI node arguments here because
6020 the incoming edges have not been setup yet. */
6021 for (gphi_iterator gpi = gsi_start_phis (bb);
6022 !gsi_end_p (gpi);
6023 gsi_next (&gpi))
6024 {
6025 gphi *phi, *copy;
6026 phi = gpi.phi ();
6027 copy = create_phi_node (NULL_TREE, new_bb);
6028 create_new_def_for (gimple_phi_result (phi), copy,
6029 gimple_phi_result_ptr (copy));
6030 gimple_set_uid (copy, gimple_uid (phi));
6031 }
6032
6033 gsi_tgt = gsi_start_bb (new_bb);
6034 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
6035 !gsi_end_p (gsi);
6036 gsi_next (&gsi))
6037 {
6038 def_operand_p def_p;
6039 ssa_op_iter op_iter;
6040 tree lhs;
6041 gimple *stmt, *copy;
6042
6043 stmt = gsi_stmt (gsi);
6044 if (gimple_code (stmt) == GIMPLE_LABEL)
6045 continue;
6046
6047 /* Don't duplicate label debug stmts. */
6048 if (gimple_debug_bind_p (stmt)
6049 && TREE_CODE (gimple_debug_bind_get_var (stmt))
6050 == LABEL_DECL)
6051 continue;
6052
6053 /* Create a new copy of STMT and duplicate STMT's virtual
6054 operands. */
6055 copy = gimple_copy (stmt);
6056 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
6057
6058 maybe_duplicate_eh_stmt (copy, stmt);
6059 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
6060
6061 /* When copying around a stmt writing into a local non-user
6062 aggregate, make sure it won't share stack slot with other
6063 vars. */
6064 lhs = gimple_get_lhs (stmt);
6065 if (lhs && TREE_CODE (lhs) != SSA_NAME)
6066 {
6067 tree base = get_base_address (lhs);
6068 if (base
6069 && (VAR_P (base) || TREE_CODE (base) == RESULT_DECL)
6070 && DECL_IGNORED_P (base)
6071 && !TREE_STATIC (base)
6072 && !DECL_EXTERNAL (base)
6073 && (!VAR_P (base) || !DECL_HAS_VALUE_EXPR_P (base)))
6074 DECL_NONSHAREABLE (base) = 1;
6075 }
6076
6077 /* Create new names for all the definitions created by COPY and
6078 add replacement mappings for each new name. */
6079 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
6080 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
6081 }
6082
6083 return new_bb;
6084 }
6085
6086 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
6087
6088 static void
6089 add_phi_args_after_copy_edge (edge e_copy)
6090 {
6091 basic_block bb, bb_copy = e_copy->src, dest;
6092 edge e;
6093 edge_iterator ei;
6094 gphi *phi, *phi_copy;
6095 tree def;
6096 gphi_iterator psi, psi_copy;
6097
6098 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
6099 return;
6100
6101 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
6102
6103 if (e_copy->dest->flags & BB_DUPLICATED)
6104 dest = get_bb_original (e_copy->dest);
6105 else
6106 dest = e_copy->dest;
6107
6108 e = find_edge (bb, dest);
6109 if (!e)
6110 {
6111 /* During loop unrolling the target of the latch edge is copied.
6112 In this case we are not looking for edge to dest, but to
6113 duplicated block whose original was dest. */
6114 FOR_EACH_EDGE (e, ei, bb->succs)
6115 {
6116 if ((e->dest->flags & BB_DUPLICATED)
6117 && get_bb_original (e->dest) == dest)
6118 break;
6119 }
6120
6121 gcc_assert (e != NULL);
6122 }
6123
6124 for (psi = gsi_start_phis (e->dest),
6125 psi_copy = gsi_start_phis (e_copy->dest);
6126 !gsi_end_p (psi);
6127 gsi_next (&psi), gsi_next (&psi_copy))
6128 {
6129 phi = psi.phi ();
6130 phi_copy = psi_copy.phi ();
6131 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
6132 add_phi_arg (phi_copy, def, e_copy,
6133 gimple_phi_arg_location_from_edge (phi, e));
6134 }
6135 }
6136
6137
6138 /* Basic block BB_COPY was created by code duplication. Add phi node
6139 arguments for edges going out of BB_COPY. The blocks that were
6140 duplicated have BB_DUPLICATED set. */
6141
6142 void
6143 add_phi_args_after_copy_bb (basic_block bb_copy)
6144 {
6145 edge e_copy;
6146 edge_iterator ei;
6147
6148 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
6149 {
6150 add_phi_args_after_copy_edge (e_copy);
6151 }
6152 }
6153
6154 /* Blocks in REGION_COPY array of length N_REGION were created by
6155 duplication of basic blocks. Add phi node arguments for edges
6156 going from these blocks. If E_COPY is not NULL, also add
6157 phi node arguments for its destination.*/
6158
6159 void
6160 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
6161 edge e_copy)
6162 {
6163 unsigned i;
6164
6165 for (i = 0; i < n_region; i++)
6166 region_copy[i]->flags |= BB_DUPLICATED;
6167
6168 for (i = 0; i < n_region; i++)
6169 add_phi_args_after_copy_bb (region_copy[i]);
6170 if (e_copy)
6171 add_phi_args_after_copy_edge (e_copy);
6172
6173 for (i = 0; i < n_region; i++)
6174 region_copy[i]->flags &= ~BB_DUPLICATED;
6175 }
6176
6177 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
6178 important exit edge EXIT. By important we mean that no SSA name defined
6179 inside region is live over the other exit edges of the region. All entry
6180 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
6181 to the duplicate of the region. Dominance and loop information is
6182 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
6183 UPDATE_DOMINANCE is false then we assume that the caller will update the
6184 dominance information after calling this function. The new basic
6185 blocks are stored to REGION_COPY in the same order as they had in REGION,
6186 provided that REGION_COPY is not NULL.
6187 The function returns false if it is unable to copy the region,
6188 true otherwise. */
6189
6190 bool
6191 gimple_duplicate_sese_region (edge entry, edge exit,
6192 basic_block *region, unsigned n_region,
6193 basic_block *region_copy,
6194 bool update_dominance)
6195 {
6196 unsigned i;
6197 bool free_region_copy = false, copying_header = false;
6198 struct loop *loop = entry->dest->loop_father;
6199 edge exit_copy;
6200 vec<basic_block> doms;
6201 edge redirected;
6202 int total_freq = 0, entry_freq = 0;
6203 profile_count total_count = profile_count::uninitialized ();
6204 profile_count entry_count = profile_count::uninitialized ();
6205
6206 if (!can_copy_bbs_p (region, n_region))
6207 return false;
6208
6209 /* Some sanity checking. Note that we do not check for all possible
6210 missuses of the functions. I.e. if you ask to copy something weird,
6211 it will work, but the state of structures probably will not be
6212 correct. */
6213 for (i = 0; i < n_region; i++)
6214 {
6215 /* We do not handle subloops, i.e. all the blocks must belong to the
6216 same loop. */
6217 if (region[i]->loop_father != loop)
6218 return false;
6219
6220 if (region[i] != entry->dest
6221 && region[i] == loop->header)
6222 return false;
6223 }
6224
6225 /* In case the function is used for loop header copying (which is the primary
6226 use), ensure that EXIT and its copy will be new latch and entry edges. */
6227 if (loop->header == entry->dest)
6228 {
6229 copying_header = true;
6230
6231 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
6232 return false;
6233
6234 for (i = 0; i < n_region; i++)
6235 if (region[i] != exit->src
6236 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
6237 return false;
6238 }
6239
6240 initialize_original_copy_tables ();
6241
6242 if (copying_header)
6243 set_loop_copy (loop, loop_outer (loop));
6244 else
6245 set_loop_copy (loop, loop);
6246
6247 if (!region_copy)
6248 {
6249 region_copy = XNEWVEC (basic_block, n_region);
6250 free_region_copy = true;
6251 }
6252
6253 /* Record blocks outside the region that are dominated by something
6254 inside. */
6255 if (update_dominance)
6256 {
6257 doms.create (0);
6258 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6259 }
6260
6261 if (entry->dest->count.initialized_p ())
6262 {
6263 total_count = entry->dest->count;
6264 entry_count = entry->count;
6265 /* Fix up corner cases, to avoid division by zero or creation of negative
6266 frequencies. */
6267 if (entry_count > total_count)
6268 entry_count = total_count;
6269 }
6270 if (!(total_count > 0) || !(entry_count > 0))
6271 {
6272 total_freq = entry->dest->frequency;
6273 entry_freq = EDGE_FREQUENCY (entry);
6274 /* Fix up corner cases, to avoid division by zero or creation of negative
6275 frequencies. */
6276 if (total_freq == 0)
6277 total_freq = 1;
6278 else if (entry_freq > total_freq)
6279 entry_freq = total_freq;
6280 }
6281
6282 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
6283 split_edge_bb_loc (entry), update_dominance);
6284 if (total_count > 0 && entry_count > 0)
6285 {
6286 scale_bbs_frequencies_profile_count (region, n_region,
6287 total_count - entry_count,
6288 total_count);
6289 scale_bbs_frequencies_profile_count (region_copy, n_region, entry_count,
6290 total_count);
6291 }
6292 else
6293 {
6294 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
6295 total_freq);
6296 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
6297 }
6298
6299 if (copying_header)
6300 {
6301 loop->header = exit->dest;
6302 loop->latch = exit->src;
6303 }
6304
6305 /* Redirect the entry and add the phi node arguments. */
6306 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
6307 gcc_assert (redirected != NULL);
6308 flush_pending_stmts (entry);
6309
6310 /* Concerning updating of dominators: We must recount dominators
6311 for entry block and its copy. Anything that is outside of the
6312 region, but was dominated by something inside needs recounting as
6313 well. */
6314 if (update_dominance)
6315 {
6316 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
6317 doms.safe_push (get_bb_original (entry->dest));
6318 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6319 doms.release ();
6320 }
6321
6322 /* Add the other PHI node arguments. */
6323 add_phi_args_after_copy (region_copy, n_region, NULL);
6324
6325 if (free_region_copy)
6326 free (region_copy);
6327
6328 free_original_copy_tables ();
6329 return true;
6330 }
6331
6332 /* Checks if BB is part of the region defined by N_REGION BBS. */
6333 static bool
6334 bb_part_of_region_p (basic_block bb, basic_block* bbs, unsigned n_region)
6335 {
6336 unsigned int n;
6337
6338 for (n = 0; n < n_region; n++)
6339 {
6340 if (bb == bbs[n])
6341 return true;
6342 }
6343 return false;
6344 }
6345
6346 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
6347 are stored to REGION_COPY in the same order in that they appear
6348 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
6349 the region, EXIT an exit from it. The condition guarding EXIT
6350 is moved to ENTRY. Returns true if duplication succeeds, false
6351 otherwise.
6352
6353 For example,
6354
6355 some_code;
6356 if (cond)
6357 A;
6358 else
6359 B;
6360
6361 is transformed to
6362
6363 if (cond)
6364 {
6365 some_code;
6366 A;
6367 }
6368 else
6369 {
6370 some_code;
6371 B;
6372 }
6373 */
6374
6375 bool
6376 gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED, edge exit ATTRIBUTE_UNUSED,
6377 basic_block *region ATTRIBUTE_UNUSED, unsigned n_region ATTRIBUTE_UNUSED,
6378 basic_block *region_copy ATTRIBUTE_UNUSED)
6379 {
6380 unsigned i;
6381 bool free_region_copy = false;
6382 struct loop *loop = exit->dest->loop_father;
6383 struct loop *orig_loop = entry->dest->loop_father;
6384 basic_block switch_bb, entry_bb, nentry_bb;
6385 vec<basic_block> doms;
6386 int total_freq = 0, exit_freq = 0;
6387 profile_count total_count = profile_count::uninitialized (),
6388 exit_count = profile_count::uninitialized ();
6389 edge exits[2], nexits[2], e;
6390 gimple_stmt_iterator gsi;
6391 gimple *cond_stmt;
6392 edge sorig, snew;
6393 basic_block exit_bb;
6394 gphi_iterator psi;
6395 gphi *phi;
6396 tree def;
6397 struct loop *target, *aloop, *cloop;
6398
6399 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
6400 exits[0] = exit;
6401 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
6402
6403 if (!can_copy_bbs_p (region, n_region))
6404 return false;
6405
6406 initialize_original_copy_tables ();
6407 set_loop_copy (orig_loop, loop);
6408
6409 target= loop;
6410 for (aloop = orig_loop->inner; aloop; aloop = aloop->next)
6411 {
6412 if (bb_part_of_region_p (aloop->header, region, n_region))
6413 {
6414 cloop = duplicate_loop (aloop, target);
6415 duplicate_subloops (aloop, cloop);
6416 }
6417 }
6418
6419 if (!region_copy)
6420 {
6421 region_copy = XNEWVEC (basic_block, n_region);
6422 free_region_copy = true;
6423 }
6424
6425 gcc_assert (!need_ssa_update_p (cfun));
6426
6427 /* Record blocks outside the region that are dominated by something
6428 inside. */
6429 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6430
6431 if (exit->src->count > 0)
6432 {
6433 total_count = exit->src->count;
6434 exit_count = exit->count;
6435 /* Fix up corner cases, to avoid division by zero or creation of negative
6436 frequencies. */
6437 if (exit_count > total_count)
6438 exit_count = total_count;
6439 }
6440 else
6441 {
6442 total_freq = exit->src->frequency;
6443 exit_freq = EDGE_FREQUENCY (exit);
6444 /* Fix up corner cases, to avoid division by zero or creation of negative
6445 frequencies. */
6446 if (total_freq == 0)
6447 total_freq = 1;
6448 if (exit_freq > total_freq)
6449 exit_freq = total_freq;
6450 }
6451
6452 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
6453 split_edge_bb_loc (exit), true);
6454 if (total_count.initialized_p ())
6455 {
6456 scale_bbs_frequencies_profile_count (region, n_region,
6457 total_count - exit_count,
6458 total_count);
6459 scale_bbs_frequencies_profile_count (region_copy, n_region, exit_count,
6460 total_count);
6461 }
6462 else
6463 {
6464 scale_bbs_frequencies_int (region, n_region, total_freq - exit_freq,
6465 total_freq);
6466 scale_bbs_frequencies_int (region_copy, n_region, exit_freq, total_freq);
6467 }
6468
6469 /* Create the switch block, and put the exit condition to it. */
6470 entry_bb = entry->dest;
6471 nentry_bb = get_bb_copy (entry_bb);
6472 if (!last_stmt (entry->src)
6473 || !stmt_ends_bb_p (last_stmt (entry->src)))
6474 switch_bb = entry->src;
6475 else
6476 switch_bb = split_edge (entry);
6477 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
6478
6479 gsi = gsi_last_bb (switch_bb);
6480 cond_stmt = last_stmt (exit->src);
6481 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
6482 cond_stmt = gimple_copy (cond_stmt);
6483
6484 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
6485
6486 sorig = single_succ_edge (switch_bb);
6487 sorig->flags = exits[1]->flags;
6488 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
6489
6490 /* Register the new edge from SWITCH_BB in loop exit lists. */
6491 rescan_loop_exit (snew, true, false);
6492
6493 /* Add the PHI node arguments. */
6494 add_phi_args_after_copy (region_copy, n_region, snew);
6495
6496 /* Get rid of now superfluous conditions and associated edges (and phi node
6497 arguments). */
6498 exit_bb = exit->dest;
6499
6500 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
6501 PENDING_STMT (e) = NULL;
6502
6503 /* The latch of ORIG_LOOP was copied, and so was the backedge
6504 to the original header. We redirect this backedge to EXIT_BB. */
6505 for (i = 0; i < n_region; i++)
6506 if (get_bb_original (region_copy[i]) == orig_loop->latch)
6507 {
6508 gcc_assert (single_succ_edge (region_copy[i]));
6509 e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
6510 PENDING_STMT (e) = NULL;
6511 for (psi = gsi_start_phis (exit_bb);
6512 !gsi_end_p (psi);
6513 gsi_next (&psi))
6514 {
6515 phi = psi.phi ();
6516 def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
6517 add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
6518 }
6519 }
6520 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
6521 PENDING_STMT (e) = NULL;
6522
6523 /* Anything that is outside of the region, but was dominated by something
6524 inside needs to update dominance info. */
6525 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6526 doms.release ();
6527 /* Update the SSA web. */
6528 update_ssa (TODO_update_ssa);
6529
6530 if (free_region_copy)
6531 free (region_copy);
6532
6533 free_original_copy_tables ();
6534 return true;
6535 }
6536
6537 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
6538 adding blocks when the dominator traversal reaches EXIT. This
6539 function silently assumes that ENTRY strictly dominates EXIT. */
6540
6541 void
6542 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
6543 vec<basic_block> *bbs_p)
6544 {
6545 basic_block son;
6546
6547 for (son = first_dom_son (CDI_DOMINATORS, entry);
6548 son;
6549 son = next_dom_son (CDI_DOMINATORS, son))
6550 {
6551 bbs_p->safe_push (son);
6552 if (son != exit)
6553 gather_blocks_in_sese_region (son, exit, bbs_p);
6554 }
6555 }
6556
6557 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
6558 The duplicates are recorded in VARS_MAP. */
6559
6560 static void
6561 replace_by_duplicate_decl (tree *tp, hash_map<tree, tree> *vars_map,
6562 tree to_context)
6563 {
6564 tree t = *tp, new_t;
6565 struct function *f = DECL_STRUCT_FUNCTION (to_context);
6566
6567 if (DECL_CONTEXT (t) == to_context)
6568 return;
6569
6570 bool existed;
6571 tree &loc = vars_map->get_or_insert (t, &existed);
6572
6573 if (!existed)
6574 {
6575 if (SSA_VAR_P (t))
6576 {
6577 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
6578 add_local_decl (f, new_t);
6579 }
6580 else
6581 {
6582 gcc_assert (TREE_CODE (t) == CONST_DECL);
6583 new_t = copy_node (t);
6584 }
6585 DECL_CONTEXT (new_t) = to_context;
6586
6587 loc = new_t;
6588 }
6589 else
6590 new_t = loc;
6591
6592 *tp = new_t;
6593 }
6594
6595
6596 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6597 VARS_MAP maps old ssa names and var_decls to the new ones. */
6598
6599 static tree
6600 replace_ssa_name (tree name, hash_map<tree, tree> *vars_map,
6601 tree to_context)
6602 {
6603 tree new_name;
6604
6605 gcc_assert (!virtual_operand_p (name));
6606
6607 tree *loc = vars_map->get (name);
6608
6609 if (!loc)
6610 {
6611 tree decl = SSA_NAME_VAR (name);
6612 if (decl)
6613 {
6614 gcc_assert (!SSA_NAME_IS_DEFAULT_DEF (name));
6615 replace_by_duplicate_decl (&decl, vars_map, to_context);
6616 new_name = make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6617 decl, SSA_NAME_DEF_STMT (name));
6618 }
6619 else
6620 new_name = copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6621 name, SSA_NAME_DEF_STMT (name));
6622
6623 /* Now that we've used the def stmt to define new_name, make sure it
6624 doesn't define name anymore. */
6625 SSA_NAME_DEF_STMT (name) = NULL;
6626
6627 vars_map->put (name, new_name);
6628 }
6629 else
6630 new_name = *loc;
6631
6632 return new_name;
6633 }
6634
6635 struct move_stmt_d
6636 {
6637 tree orig_block;
6638 tree new_block;
6639 tree from_context;
6640 tree to_context;
6641 hash_map<tree, tree> *vars_map;
6642 htab_t new_label_map;
6643 hash_map<void *, void *> *eh_map;
6644 bool remap_decls_p;
6645 };
6646
6647 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
6648 contained in *TP if it has been ORIG_BLOCK previously and change the
6649 DECL_CONTEXT of every local variable referenced in *TP. */
6650
6651 static tree
6652 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
6653 {
6654 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
6655 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6656 tree t = *tp;
6657
6658 if (EXPR_P (t))
6659 {
6660 tree block = TREE_BLOCK (t);
6661 if (block == NULL_TREE)
6662 ;
6663 else if (block == p->orig_block
6664 || p->orig_block == NULL_TREE)
6665 TREE_SET_BLOCK (t, p->new_block);
6666 else if (flag_checking)
6667 {
6668 while (block && TREE_CODE (block) == BLOCK && block != p->orig_block)
6669 block = BLOCK_SUPERCONTEXT (block);
6670 gcc_assert (block == p->orig_block);
6671 }
6672 }
6673 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
6674 {
6675 if (TREE_CODE (t) == SSA_NAME)
6676 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
6677 else if (TREE_CODE (t) == PARM_DECL
6678 && gimple_in_ssa_p (cfun))
6679 *tp = *(p->vars_map->get (t));
6680 else if (TREE_CODE (t) == LABEL_DECL)
6681 {
6682 if (p->new_label_map)
6683 {
6684 struct tree_map in, *out;
6685 in.base.from = t;
6686 out = (struct tree_map *)
6687 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
6688 if (out)
6689 *tp = t = out->to;
6690 }
6691
6692 DECL_CONTEXT (t) = p->to_context;
6693 }
6694 else if (p->remap_decls_p)
6695 {
6696 /* Replace T with its duplicate. T should no longer appear in the
6697 parent function, so this looks wasteful; however, it may appear
6698 in referenced_vars, and more importantly, as virtual operands of
6699 statements, and in alias lists of other variables. It would be
6700 quite difficult to expunge it from all those places. ??? It might
6701 suffice to do this for addressable variables. */
6702 if ((VAR_P (t) && !is_global_var (t))
6703 || TREE_CODE (t) == CONST_DECL)
6704 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
6705 }
6706 *walk_subtrees = 0;
6707 }
6708 else if (TYPE_P (t))
6709 *walk_subtrees = 0;
6710
6711 return NULL_TREE;
6712 }
6713
6714 /* Helper for move_stmt_r. Given an EH region number for the source
6715 function, map that to the duplicate EH regio number in the dest. */
6716
6717 static int
6718 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
6719 {
6720 eh_region old_r, new_r;
6721
6722 old_r = get_eh_region_from_number (old_nr);
6723 new_r = static_cast<eh_region> (*p->eh_map->get (old_r));
6724
6725 return new_r->index;
6726 }
6727
6728 /* Similar, but operate on INTEGER_CSTs. */
6729
6730 static tree
6731 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
6732 {
6733 int old_nr, new_nr;
6734
6735 old_nr = tree_to_shwi (old_t_nr);
6736 new_nr = move_stmt_eh_region_nr (old_nr, p);
6737
6738 return build_int_cst (integer_type_node, new_nr);
6739 }
6740
6741 /* Like move_stmt_op, but for gimple statements.
6742
6743 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
6744 contained in the current statement in *GSI_P and change the
6745 DECL_CONTEXT of every local variable referenced in the current
6746 statement. */
6747
6748 static tree
6749 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
6750 struct walk_stmt_info *wi)
6751 {
6752 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6753 gimple *stmt = gsi_stmt (*gsi_p);
6754 tree block = gimple_block (stmt);
6755
6756 if (block == p->orig_block
6757 || (p->orig_block == NULL_TREE
6758 && block != NULL_TREE))
6759 gimple_set_block (stmt, p->new_block);
6760
6761 switch (gimple_code (stmt))
6762 {
6763 case GIMPLE_CALL:
6764 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
6765 {
6766 tree r, fndecl = gimple_call_fndecl (stmt);
6767 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
6768 switch (DECL_FUNCTION_CODE (fndecl))
6769 {
6770 case BUILT_IN_EH_COPY_VALUES:
6771 r = gimple_call_arg (stmt, 1);
6772 r = move_stmt_eh_region_tree_nr (r, p);
6773 gimple_call_set_arg (stmt, 1, r);
6774 /* FALLTHRU */
6775
6776 case BUILT_IN_EH_POINTER:
6777 case BUILT_IN_EH_FILTER:
6778 r = gimple_call_arg (stmt, 0);
6779 r = move_stmt_eh_region_tree_nr (r, p);
6780 gimple_call_set_arg (stmt, 0, r);
6781 break;
6782
6783 default:
6784 break;
6785 }
6786 }
6787 break;
6788
6789 case GIMPLE_RESX:
6790 {
6791 gresx *resx_stmt = as_a <gresx *> (stmt);
6792 int r = gimple_resx_region (resx_stmt);
6793 r = move_stmt_eh_region_nr (r, p);
6794 gimple_resx_set_region (resx_stmt, r);
6795 }
6796 break;
6797
6798 case GIMPLE_EH_DISPATCH:
6799 {
6800 geh_dispatch *eh_dispatch_stmt = as_a <geh_dispatch *> (stmt);
6801 int r = gimple_eh_dispatch_region (eh_dispatch_stmt);
6802 r = move_stmt_eh_region_nr (r, p);
6803 gimple_eh_dispatch_set_region (eh_dispatch_stmt, r);
6804 }
6805 break;
6806
6807 case GIMPLE_OMP_RETURN:
6808 case GIMPLE_OMP_CONTINUE:
6809 break;
6810 default:
6811 if (is_gimple_omp (stmt))
6812 {
6813 /* Do not remap variables inside OMP directives. Variables
6814 referenced in clauses and directive header belong to the
6815 parent function and should not be moved into the child
6816 function. */
6817 bool save_remap_decls_p = p->remap_decls_p;
6818 p->remap_decls_p = false;
6819 *handled_ops_p = true;
6820
6821 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), move_stmt_r,
6822 move_stmt_op, wi);
6823
6824 p->remap_decls_p = save_remap_decls_p;
6825 }
6826 break;
6827 }
6828
6829 return NULL_TREE;
6830 }
6831
6832 /* Move basic block BB from function CFUN to function DEST_FN. The
6833 block is moved out of the original linked list and placed after
6834 block AFTER in the new list. Also, the block is removed from the
6835 original array of blocks and placed in DEST_FN's array of blocks.
6836 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
6837 updated to reflect the moved edges.
6838
6839 The local variables are remapped to new instances, VARS_MAP is used
6840 to record the mapping. */
6841
6842 static void
6843 move_block_to_fn (struct function *dest_cfun, basic_block bb,
6844 basic_block after, bool update_edge_count_p,
6845 struct move_stmt_d *d)
6846 {
6847 struct control_flow_graph *cfg;
6848 edge_iterator ei;
6849 edge e;
6850 gimple_stmt_iterator si;
6851 unsigned old_len, new_len;
6852
6853 /* Remove BB from dominance structures. */
6854 delete_from_dominance_info (CDI_DOMINATORS, bb);
6855
6856 /* Move BB from its current loop to the copy in the new function. */
6857 if (current_loops)
6858 {
6859 struct loop *new_loop = (struct loop *)bb->loop_father->aux;
6860 if (new_loop)
6861 bb->loop_father = new_loop;
6862 }
6863
6864 /* Link BB to the new linked list. */
6865 move_block_after (bb, after);
6866
6867 /* Update the edge count in the corresponding flowgraphs. */
6868 if (update_edge_count_p)
6869 FOR_EACH_EDGE (e, ei, bb->succs)
6870 {
6871 cfun->cfg->x_n_edges--;
6872 dest_cfun->cfg->x_n_edges++;
6873 }
6874
6875 /* Remove BB from the original basic block array. */
6876 (*cfun->cfg->x_basic_block_info)[bb->index] = NULL;
6877 cfun->cfg->x_n_basic_blocks--;
6878
6879 /* Grow DEST_CFUN's basic block array if needed. */
6880 cfg = dest_cfun->cfg;
6881 cfg->x_n_basic_blocks++;
6882 if (bb->index >= cfg->x_last_basic_block)
6883 cfg->x_last_basic_block = bb->index + 1;
6884
6885 old_len = vec_safe_length (cfg->x_basic_block_info);
6886 if ((unsigned) cfg->x_last_basic_block >= old_len)
6887 {
6888 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
6889 vec_safe_grow_cleared (cfg->x_basic_block_info, new_len);
6890 }
6891
6892 (*cfg->x_basic_block_info)[bb->index] = bb;
6893
6894 /* Remap the variables in phi nodes. */
6895 for (gphi_iterator psi = gsi_start_phis (bb);
6896 !gsi_end_p (psi); )
6897 {
6898 gphi *phi = psi.phi ();
6899 use_operand_p use;
6900 tree op = PHI_RESULT (phi);
6901 ssa_op_iter oi;
6902 unsigned i;
6903
6904 if (virtual_operand_p (op))
6905 {
6906 /* Remove the phi nodes for virtual operands (alias analysis will be
6907 run for the new function, anyway). */
6908 remove_phi_node (&psi, true);
6909 continue;
6910 }
6911
6912 SET_PHI_RESULT (phi,
6913 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6914 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
6915 {
6916 op = USE_FROM_PTR (use);
6917 if (TREE_CODE (op) == SSA_NAME)
6918 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6919 }
6920
6921 for (i = 0; i < EDGE_COUNT (bb->preds); i++)
6922 {
6923 location_t locus = gimple_phi_arg_location (phi, i);
6924 tree block = LOCATION_BLOCK (locus);
6925
6926 if (locus == UNKNOWN_LOCATION)
6927 continue;
6928 if (d->orig_block == NULL_TREE || block == d->orig_block)
6929 {
6930 locus = set_block (locus, d->new_block);
6931 gimple_phi_arg_set_location (phi, i, locus);
6932 }
6933 }
6934
6935 gsi_next (&psi);
6936 }
6937
6938 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6939 {
6940 gimple *stmt = gsi_stmt (si);
6941 struct walk_stmt_info wi;
6942
6943 memset (&wi, 0, sizeof (wi));
6944 wi.info = d;
6945 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
6946
6947 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
6948 {
6949 tree label = gimple_label_label (label_stmt);
6950 int uid = LABEL_DECL_UID (label);
6951
6952 gcc_assert (uid > -1);
6953
6954 old_len = vec_safe_length (cfg->x_label_to_block_map);
6955 if (old_len <= (unsigned) uid)
6956 {
6957 new_len = 3 * uid / 2 + 1;
6958 vec_safe_grow_cleared (cfg->x_label_to_block_map, new_len);
6959 }
6960
6961 (*cfg->x_label_to_block_map)[uid] = bb;
6962 (*cfun->cfg->x_label_to_block_map)[uid] = NULL;
6963
6964 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
6965
6966 if (uid >= dest_cfun->cfg->last_label_uid)
6967 dest_cfun->cfg->last_label_uid = uid + 1;
6968 }
6969
6970 maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
6971 remove_stmt_from_eh_lp_fn (cfun, stmt);
6972
6973 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
6974 gimple_remove_stmt_histograms (cfun, stmt);
6975
6976 /* We cannot leave any operands allocated from the operand caches of
6977 the current function. */
6978 free_stmt_operands (cfun, stmt);
6979 push_cfun (dest_cfun);
6980 update_stmt (stmt);
6981 pop_cfun ();
6982 }
6983
6984 FOR_EACH_EDGE (e, ei, bb->succs)
6985 if (e->goto_locus != UNKNOWN_LOCATION)
6986 {
6987 tree block = LOCATION_BLOCK (e->goto_locus);
6988 if (d->orig_block == NULL_TREE
6989 || block == d->orig_block)
6990 e->goto_locus = set_block (e->goto_locus, d->new_block);
6991 }
6992 }
6993
6994 /* Examine the statements in BB (which is in SRC_CFUN); find and return
6995 the outermost EH region. Use REGION as the incoming base EH region. */
6996
6997 static eh_region
6998 find_outermost_region_in_block (struct function *src_cfun,
6999 basic_block bb, eh_region region)
7000 {
7001 gimple_stmt_iterator si;
7002
7003 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
7004 {
7005 gimple *stmt = gsi_stmt (si);
7006 eh_region stmt_region;
7007 int lp_nr;
7008
7009 lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
7010 stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
7011 if (stmt_region)
7012 {
7013 if (region == NULL)
7014 region = stmt_region;
7015 else if (stmt_region != region)
7016 {
7017 region = eh_region_outermost (src_cfun, stmt_region, region);
7018 gcc_assert (region != NULL);
7019 }
7020 }
7021 }
7022
7023 return region;
7024 }
7025
7026 static tree
7027 new_label_mapper (tree decl, void *data)
7028 {
7029 htab_t hash = (htab_t) data;
7030 struct tree_map *m;
7031 void **slot;
7032
7033 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
7034
7035 m = XNEW (struct tree_map);
7036 m->hash = DECL_UID (decl);
7037 m->base.from = decl;
7038 m->to = create_artificial_label (UNKNOWN_LOCATION);
7039 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
7040 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
7041 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
7042
7043 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
7044 gcc_assert (*slot == NULL);
7045
7046 *slot = m;
7047
7048 return m->to;
7049 }
7050
7051 /* Tree walker to replace the decls used inside value expressions by
7052 duplicates. */
7053
7054 static tree
7055 replace_block_vars_by_duplicates_1 (tree *tp, int *walk_subtrees, void *data)
7056 {
7057 struct replace_decls_d *rd = (struct replace_decls_d *)data;
7058
7059 switch (TREE_CODE (*tp))
7060 {
7061 case VAR_DECL:
7062 case PARM_DECL:
7063 case RESULT_DECL:
7064 replace_by_duplicate_decl (tp, rd->vars_map, rd->to_context);
7065 break;
7066 default:
7067 break;
7068 }
7069
7070 if (IS_TYPE_OR_DECL_P (*tp))
7071 *walk_subtrees = false;
7072
7073 return NULL;
7074 }
7075
7076 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
7077 subblocks. */
7078
7079 static void
7080 replace_block_vars_by_duplicates (tree block, hash_map<tree, tree> *vars_map,
7081 tree to_context)
7082 {
7083 tree *tp, t;
7084
7085 for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
7086 {
7087 t = *tp;
7088 if (!VAR_P (t) && TREE_CODE (t) != CONST_DECL)
7089 continue;
7090 replace_by_duplicate_decl (&t, vars_map, to_context);
7091 if (t != *tp)
7092 {
7093 if (VAR_P (*tp) && DECL_HAS_VALUE_EXPR_P (*tp))
7094 {
7095 tree x = DECL_VALUE_EXPR (*tp);
7096 struct replace_decls_d rd = { vars_map, to_context };
7097 unshare_expr (x);
7098 walk_tree (&x, replace_block_vars_by_duplicates_1, &rd, NULL);
7099 SET_DECL_VALUE_EXPR (t, x);
7100 DECL_HAS_VALUE_EXPR_P (t) = 1;
7101 }
7102 DECL_CHAIN (t) = DECL_CHAIN (*tp);
7103 *tp = t;
7104 }
7105 }
7106
7107 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
7108 replace_block_vars_by_duplicates (block, vars_map, to_context);
7109 }
7110
7111 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
7112 from FN1 to FN2. */
7113
7114 static void
7115 fixup_loop_arrays_after_move (struct function *fn1, struct function *fn2,
7116 struct loop *loop)
7117 {
7118 /* Discard it from the old loop array. */
7119 (*get_loops (fn1))[loop->num] = NULL;
7120
7121 /* Place it in the new loop array, assigning it a new number. */
7122 loop->num = number_of_loops (fn2);
7123 vec_safe_push (loops_for_fn (fn2)->larray, loop);
7124
7125 /* Recurse to children. */
7126 for (loop = loop->inner; loop; loop = loop->next)
7127 fixup_loop_arrays_after_move (fn1, fn2, loop);
7128 }
7129
7130 /* Verify that the blocks in BBS_P are a single-entry, single-exit region
7131 delimited by ENTRY_BB and EXIT_BB, possibly containing noreturn blocks. */
7132
7133 DEBUG_FUNCTION void
7134 verify_sese (basic_block entry, basic_block exit, vec<basic_block> *bbs_p)
7135 {
7136 basic_block bb;
7137 edge_iterator ei;
7138 edge e;
7139 bitmap bbs = BITMAP_ALLOC (NULL);
7140 int i;
7141
7142 gcc_assert (entry != NULL);
7143 gcc_assert (entry != exit);
7144 gcc_assert (bbs_p != NULL);
7145
7146 gcc_assert (bbs_p->length () > 0);
7147
7148 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
7149 bitmap_set_bit (bbs, bb->index);
7150
7151 gcc_assert (bitmap_bit_p (bbs, entry->index));
7152 gcc_assert (exit == NULL || bitmap_bit_p (bbs, exit->index));
7153
7154 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
7155 {
7156 if (bb == entry)
7157 {
7158 gcc_assert (single_pred_p (entry));
7159 gcc_assert (!bitmap_bit_p (bbs, single_pred (entry)->index));
7160 }
7161 else
7162 for (ei = ei_start (bb->preds); !ei_end_p (ei); ei_next (&ei))
7163 {
7164 e = ei_edge (ei);
7165 gcc_assert (bitmap_bit_p (bbs, e->src->index));
7166 }
7167
7168 if (bb == exit)
7169 {
7170 gcc_assert (single_succ_p (exit));
7171 gcc_assert (!bitmap_bit_p (bbs, single_succ (exit)->index));
7172 }
7173 else
7174 for (ei = ei_start (bb->succs); !ei_end_p (ei); ei_next (&ei))
7175 {
7176 e = ei_edge (ei);
7177 gcc_assert (bitmap_bit_p (bbs, e->dest->index));
7178 }
7179 }
7180
7181 BITMAP_FREE (bbs);
7182 }
7183
7184 /* If FROM is an SSA_NAME, mark the version in bitmap DATA. */
7185
7186 bool
7187 gather_ssa_name_hash_map_from (tree const &from, tree const &, void *data)
7188 {
7189 bitmap release_names = (bitmap)data;
7190
7191 if (TREE_CODE (from) != SSA_NAME)
7192 return true;
7193
7194 bitmap_set_bit (release_names, SSA_NAME_VERSION (from));
7195 return true;
7196 }
7197
7198 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
7199 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
7200 single basic block in the original CFG and the new basic block is
7201 returned. DEST_CFUN must not have a CFG yet.
7202
7203 Note that the region need not be a pure SESE region. Blocks inside
7204 the region may contain calls to abort/exit. The only restriction
7205 is that ENTRY_BB should be the only entry point and it must
7206 dominate EXIT_BB.
7207
7208 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
7209 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
7210 to the new function.
7211
7212 All local variables referenced in the region are assumed to be in
7213 the corresponding BLOCK_VARS and unexpanded variable lists
7214 associated with DEST_CFUN.
7215
7216 TODO: investigate whether we can reuse gimple_duplicate_sese_region to
7217 reimplement move_sese_region_to_fn by duplicating the region rather than
7218 moving it. */
7219
7220 basic_block
7221 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
7222 basic_block exit_bb, tree orig_block)
7223 {
7224 vec<basic_block> bbs, dom_bbs;
7225 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
7226 basic_block after, bb, *entry_pred, *exit_succ, abb;
7227 struct function *saved_cfun = cfun;
7228 int *entry_flag, *exit_flag;
7229 unsigned *entry_prob, *exit_prob;
7230 unsigned i, num_entry_edges, num_exit_edges, num_nodes;
7231 edge e;
7232 edge_iterator ei;
7233 htab_t new_label_map;
7234 hash_map<void *, void *> *eh_map;
7235 struct loop *loop = entry_bb->loop_father;
7236 struct loop *loop0 = get_loop (saved_cfun, 0);
7237 struct move_stmt_d d;
7238
7239 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
7240 region. */
7241 gcc_assert (entry_bb != exit_bb
7242 && (!exit_bb
7243 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
7244
7245 /* Collect all the blocks in the region. Manually add ENTRY_BB
7246 because it won't be added by dfs_enumerate_from. */
7247 bbs.create (0);
7248 bbs.safe_push (entry_bb);
7249 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
7250
7251 if (flag_checking)
7252 verify_sese (entry_bb, exit_bb, &bbs);
7253
7254 /* The blocks that used to be dominated by something in BBS will now be
7255 dominated by the new block. */
7256 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
7257 bbs.address (),
7258 bbs.length ());
7259
7260 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
7261 the predecessor edges to ENTRY_BB and the successor edges to
7262 EXIT_BB so that we can re-attach them to the new basic block that
7263 will replace the region. */
7264 num_entry_edges = EDGE_COUNT (entry_bb->preds);
7265 entry_pred = XNEWVEC (basic_block, num_entry_edges);
7266 entry_flag = XNEWVEC (int, num_entry_edges);
7267 entry_prob = XNEWVEC (unsigned, num_entry_edges);
7268 i = 0;
7269 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
7270 {
7271 entry_prob[i] = e->probability;
7272 entry_flag[i] = e->flags;
7273 entry_pred[i++] = e->src;
7274 remove_edge (e);
7275 }
7276
7277 if (exit_bb)
7278 {
7279 num_exit_edges = EDGE_COUNT (exit_bb->succs);
7280 exit_succ = XNEWVEC (basic_block, num_exit_edges);
7281 exit_flag = XNEWVEC (int, num_exit_edges);
7282 exit_prob = XNEWVEC (unsigned, num_exit_edges);
7283 i = 0;
7284 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
7285 {
7286 exit_prob[i] = e->probability;
7287 exit_flag[i] = e->flags;
7288 exit_succ[i++] = e->dest;
7289 remove_edge (e);
7290 }
7291 }
7292 else
7293 {
7294 num_exit_edges = 0;
7295 exit_succ = NULL;
7296 exit_flag = NULL;
7297 exit_prob = NULL;
7298 }
7299
7300 /* Switch context to the child function to initialize DEST_FN's CFG. */
7301 gcc_assert (dest_cfun->cfg == NULL);
7302 push_cfun (dest_cfun);
7303
7304 init_empty_tree_cfg ();
7305
7306 /* Initialize EH information for the new function. */
7307 eh_map = NULL;
7308 new_label_map = NULL;
7309 if (saved_cfun->eh)
7310 {
7311 eh_region region = NULL;
7312
7313 FOR_EACH_VEC_ELT (bbs, i, bb)
7314 region = find_outermost_region_in_block (saved_cfun, bb, region);
7315
7316 init_eh_for_function ();
7317 if (region != NULL)
7318 {
7319 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
7320 eh_map = duplicate_eh_regions (saved_cfun, region, 0,
7321 new_label_mapper, new_label_map);
7322 }
7323 }
7324
7325 /* Initialize an empty loop tree. */
7326 struct loops *loops = ggc_cleared_alloc<struct loops> ();
7327 init_loops_structure (dest_cfun, loops, 1);
7328 loops->state = LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
7329 set_loops_for_fn (dest_cfun, loops);
7330
7331 /* Move the outlined loop tree part. */
7332 num_nodes = bbs.length ();
7333 FOR_EACH_VEC_ELT (bbs, i, bb)
7334 {
7335 if (bb->loop_father->header == bb)
7336 {
7337 struct loop *this_loop = bb->loop_father;
7338 struct loop *outer = loop_outer (this_loop);
7339 if (outer == loop
7340 /* If the SESE region contains some bbs ending with
7341 a noreturn call, those are considered to belong
7342 to the outermost loop in saved_cfun, rather than
7343 the entry_bb's loop_father. */
7344 || outer == loop0)
7345 {
7346 if (outer != loop)
7347 num_nodes -= this_loop->num_nodes;
7348 flow_loop_tree_node_remove (bb->loop_father);
7349 flow_loop_tree_node_add (get_loop (dest_cfun, 0), this_loop);
7350 fixup_loop_arrays_after_move (saved_cfun, cfun, this_loop);
7351 }
7352 }
7353 else if (bb->loop_father == loop0 && loop0 != loop)
7354 num_nodes--;
7355
7356 /* Remove loop exits from the outlined region. */
7357 if (loops_for_fn (saved_cfun)->exits)
7358 FOR_EACH_EDGE (e, ei, bb->succs)
7359 {
7360 struct loops *l = loops_for_fn (saved_cfun);
7361 loop_exit **slot
7362 = l->exits->find_slot_with_hash (e, htab_hash_pointer (e),
7363 NO_INSERT);
7364 if (slot)
7365 l->exits->clear_slot (slot);
7366 }
7367 }
7368
7369
7370 /* Adjust the number of blocks in the tree root of the outlined part. */
7371 get_loop (dest_cfun, 0)->num_nodes = bbs.length () + 2;
7372
7373 /* Setup a mapping to be used by move_block_to_fn. */
7374 loop->aux = current_loops->tree_root;
7375 loop0->aux = current_loops->tree_root;
7376
7377 pop_cfun ();
7378
7379 /* Move blocks from BBS into DEST_CFUN. */
7380 gcc_assert (bbs.length () >= 2);
7381 after = dest_cfun->cfg->x_entry_block_ptr;
7382 hash_map<tree, tree> vars_map;
7383
7384 memset (&d, 0, sizeof (d));
7385 d.orig_block = orig_block;
7386 d.new_block = DECL_INITIAL (dest_cfun->decl);
7387 d.from_context = cfun->decl;
7388 d.to_context = dest_cfun->decl;
7389 d.vars_map = &vars_map;
7390 d.new_label_map = new_label_map;
7391 d.eh_map = eh_map;
7392 d.remap_decls_p = true;
7393
7394 if (gimple_in_ssa_p (cfun))
7395 for (tree arg = DECL_ARGUMENTS (d.to_context); arg; arg = DECL_CHAIN (arg))
7396 {
7397 tree narg = make_ssa_name_fn (dest_cfun, arg, gimple_build_nop ());
7398 set_ssa_default_def (dest_cfun, arg, narg);
7399 vars_map.put (arg, narg);
7400 }
7401
7402 FOR_EACH_VEC_ELT (bbs, i, bb)
7403 {
7404 /* No need to update edge counts on the last block. It has
7405 already been updated earlier when we detached the region from
7406 the original CFG. */
7407 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
7408 after = bb;
7409 }
7410
7411 loop->aux = NULL;
7412 loop0->aux = NULL;
7413 /* Loop sizes are no longer correct, fix them up. */
7414 loop->num_nodes -= num_nodes;
7415 for (struct loop *outer = loop_outer (loop);
7416 outer; outer = loop_outer (outer))
7417 outer->num_nodes -= num_nodes;
7418 loop0->num_nodes -= bbs.length () - num_nodes;
7419
7420 if (saved_cfun->has_simduid_loops || saved_cfun->has_force_vectorize_loops)
7421 {
7422 struct loop *aloop;
7423 for (i = 0; vec_safe_iterate (loops->larray, i, &aloop); i++)
7424 if (aloop != NULL)
7425 {
7426 if (aloop->simduid)
7427 {
7428 replace_by_duplicate_decl (&aloop->simduid, d.vars_map,
7429 d.to_context);
7430 dest_cfun->has_simduid_loops = true;
7431 }
7432 if (aloop->force_vectorize)
7433 dest_cfun->has_force_vectorize_loops = true;
7434 }
7435 }
7436
7437 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
7438 if (orig_block)
7439 {
7440 tree block;
7441 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7442 == NULL_TREE);
7443 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7444 = BLOCK_SUBBLOCKS (orig_block);
7445 for (block = BLOCK_SUBBLOCKS (orig_block);
7446 block; block = BLOCK_CHAIN (block))
7447 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
7448 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
7449 }
7450
7451 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
7452 &vars_map, dest_cfun->decl);
7453
7454 if (new_label_map)
7455 htab_delete (new_label_map);
7456 if (eh_map)
7457 delete eh_map;
7458
7459 if (gimple_in_ssa_p (cfun))
7460 {
7461 /* We need to release ssa-names in a defined order, so first find them,
7462 and then iterate in ascending version order. */
7463 bitmap release_names = BITMAP_ALLOC (NULL);
7464 vars_map.traverse<void *, gather_ssa_name_hash_map_from> (release_names);
7465 bitmap_iterator bi;
7466 unsigned i;
7467 EXECUTE_IF_SET_IN_BITMAP (release_names, 0, i, bi)
7468 release_ssa_name (ssa_name (i));
7469 BITMAP_FREE (release_names);
7470 }
7471
7472 /* Rewire the entry and exit blocks. The successor to the entry
7473 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
7474 the child function. Similarly, the predecessor of DEST_FN's
7475 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
7476 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
7477 various CFG manipulation function get to the right CFG.
7478
7479 FIXME, this is silly. The CFG ought to become a parameter to
7480 these helpers. */
7481 push_cfun (dest_cfun);
7482 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), entry_bb, EDGE_FALLTHRU);
7483 if (exit_bb)
7484 make_edge (exit_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
7485 pop_cfun ();
7486
7487 /* Back in the original function, the SESE region has disappeared,
7488 create a new basic block in its place. */
7489 bb = create_empty_bb (entry_pred[0]);
7490 if (current_loops)
7491 add_bb_to_loop (bb, loop);
7492 for (i = 0; i < num_entry_edges; i++)
7493 {
7494 e = make_edge (entry_pred[i], bb, entry_flag[i]);
7495 e->probability = entry_prob[i];
7496 }
7497
7498 for (i = 0; i < num_exit_edges; i++)
7499 {
7500 e = make_edge (bb, exit_succ[i], exit_flag[i]);
7501 e->probability = exit_prob[i];
7502 }
7503
7504 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
7505 FOR_EACH_VEC_ELT (dom_bbs, i, abb)
7506 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
7507 dom_bbs.release ();
7508
7509 if (exit_bb)
7510 {
7511 free (exit_prob);
7512 free (exit_flag);
7513 free (exit_succ);
7514 }
7515 free (entry_prob);
7516 free (entry_flag);
7517 free (entry_pred);
7518 bbs.release ();
7519
7520 return bb;
7521 }
7522
7523 /* Dump default def DEF to file FILE using FLAGS and indentation
7524 SPC. */
7525
7526 static void
7527 dump_default_def (FILE *file, tree def, int spc, dump_flags_t flags)
7528 {
7529 for (int i = 0; i < spc; ++i)
7530 fprintf (file, " ");
7531 dump_ssaname_info_to_file (file, def, spc);
7532
7533 print_generic_expr (file, TREE_TYPE (def), flags);
7534 fprintf (file, " ");
7535 print_generic_expr (file, def, flags);
7536 fprintf (file, " = ");
7537 print_generic_expr (file, SSA_NAME_VAR (def), flags);
7538 fprintf (file, ";\n");
7539 }
7540
7541 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
7542 */
7543
7544 void
7545 dump_function_to_file (tree fndecl, FILE *file, dump_flags_t flags)
7546 {
7547 tree arg, var, old_current_fndecl = current_function_decl;
7548 struct function *dsf;
7549 bool ignore_topmost_bind = false, any_var = false;
7550 basic_block bb;
7551 tree chain;
7552 bool tmclone = (TREE_CODE (fndecl) == FUNCTION_DECL
7553 && decl_is_tm_clone (fndecl));
7554 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
7555
7556 if (DECL_ATTRIBUTES (fndecl) != NULL_TREE)
7557 {
7558 fprintf (file, "__attribute__((");
7559
7560 bool first = true;
7561 tree chain;
7562 for (chain = DECL_ATTRIBUTES (fndecl); chain;
7563 first = false, chain = TREE_CHAIN (chain))
7564 {
7565 if (!first)
7566 fprintf (file, ", ");
7567
7568 print_generic_expr (file, get_attribute_name (chain), dump_flags);
7569 if (TREE_VALUE (chain) != NULL_TREE)
7570 {
7571 fprintf (file, " (");
7572 print_generic_expr (file, TREE_VALUE (chain), dump_flags);
7573 fprintf (file, ")");
7574 }
7575 }
7576
7577 fprintf (file, "))\n");
7578 }
7579
7580 current_function_decl = fndecl;
7581 if (flags & TDF_GIMPLE)
7582 {
7583 print_generic_expr (file, TREE_TYPE (TREE_TYPE (fndecl)),
7584 dump_flags | TDF_SLIM);
7585 fprintf (file, " __GIMPLE ()\n%s (", function_name (fun));
7586 }
7587 else
7588 fprintf (file, "%s %s(", function_name (fun), tmclone ? "[tm-clone] " : "");
7589
7590 arg = DECL_ARGUMENTS (fndecl);
7591 while (arg)
7592 {
7593 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
7594 fprintf (file, " ");
7595 print_generic_expr (file, arg, dump_flags);
7596 if (DECL_CHAIN (arg))
7597 fprintf (file, ", ");
7598 arg = DECL_CHAIN (arg);
7599 }
7600 fprintf (file, ")\n");
7601
7602 dsf = DECL_STRUCT_FUNCTION (fndecl);
7603 if (dsf && (flags & TDF_EH))
7604 dump_eh_tree (file, dsf);
7605
7606 if (flags & TDF_RAW && !gimple_has_body_p (fndecl))
7607 {
7608 dump_node (fndecl, TDF_SLIM | flags, file);
7609 current_function_decl = old_current_fndecl;
7610 return;
7611 }
7612
7613 /* When GIMPLE is lowered, the variables are no longer available in
7614 BIND_EXPRs, so display them separately. */
7615 if (fun && fun->decl == fndecl && (fun->curr_properties & PROP_gimple_lcf))
7616 {
7617 unsigned ix;
7618 ignore_topmost_bind = true;
7619
7620 fprintf (file, "{\n");
7621 if (gimple_in_ssa_p (fun)
7622 && (flags & TDF_ALIAS))
7623 {
7624 for (arg = DECL_ARGUMENTS (fndecl); arg != NULL;
7625 arg = DECL_CHAIN (arg))
7626 {
7627 tree def = ssa_default_def (fun, arg);
7628 if (def)
7629 dump_default_def (file, def, 2, flags);
7630 }
7631
7632 tree res = DECL_RESULT (fun->decl);
7633 if (res != NULL_TREE
7634 && DECL_BY_REFERENCE (res))
7635 {
7636 tree def = ssa_default_def (fun, res);
7637 if (def)
7638 dump_default_def (file, def, 2, flags);
7639 }
7640
7641 tree static_chain = fun->static_chain_decl;
7642 if (static_chain != NULL_TREE)
7643 {
7644 tree def = ssa_default_def (fun, static_chain);
7645 if (def)
7646 dump_default_def (file, def, 2, flags);
7647 }
7648 }
7649
7650 if (!vec_safe_is_empty (fun->local_decls))
7651 FOR_EACH_LOCAL_DECL (fun, ix, var)
7652 {
7653 print_generic_decl (file, var, flags);
7654 fprintf (file, "\n");
7655
7656 any_var = true;
7657 }
7658
7659 tree name;
7660
7661 if (gimple_in_ssa_p (cfun))
7662 FOR_EACH_SSA_NAME (ix, name, cfun)
7663 {
7664 if (!SSA_NAME_VAR (name))
7665 {
7666 fprintf (file, " ");
7667 print_generic_expr (file, TREE_TYPE (name), flags);
7668 fprintf (file, " ");
7669 print_generic_expr (file, name, flags);
7670 fprintf (file, ";\n");
7671
7672 any_var = true;
7673 }
7674 }
7675 }
7676
7677 if (fun && fun->decl == fndecl
7678 && fun->cfg
7679 && basic_block_info_for_fn (fun))
7680 {
7681 /* If the CFG has been built, emit a CFG-based dump. */
7682 if (!ignore_topmost_bind)
7683 fprintf (file, "{\n");
7684
7685 if (any_var && n_basic_blocks_for_fn (fun))
7686 fprintf (file, "\n");
7687
7688 FOR_EACH_BB_FN (bb, fun)
7689 dump_bb (file, bb, 2, flags);
7690
7691 fprintf (file, "}\n");
7692 }
7693 else if (fun->curr_properties & PROP_gimple_any)
7694 {
7695 /* The function is now in GIMPLE form but the CFG has not been
7696 built yet. Emit the single sequence of GIMPLE statements
7697 that make up its body. */
7698 gimple_seq body = gimple_body (fndecl);
7699
7700 if (gimple_seq_first_stmt (body)
7701 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
7702 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
7703 print_gimple_seq (file, body, 0, flags);
7704 else
7705 {
7706 if (!ignore_topmost_bind)
7707 fprintf (file, "{\n");
7708
7709 if (any_var)
7710 fprintf (file, "\n");
7711
7712 print_gimple_seq (file, body, 2, flags);
7713 fprintf (file, "}\n");
7714 }
7715 }
7716 else
7717 {
7718 int indent;
7719
7720 /* Make a tree based dump. */
7721 chain = DECL_SAVED_TREE (fndecl);
7722 if (chain && TREE_CODE (chain) == BIND_EXPR)
7723 {
7724 if (ignore_topmost_bind)
7725 {
7726 chain = BIND_EXPR_BODY (chain);
7727 indent = 2;
7728 }
7729 else
7730 indent = 0;
7731 }
7732 else
7733 {
7734 if (!ignore_topmost_bind)
7735 {
7736 fprintf (file, "{\n");
7737 /* No topmost bind, pretend it's ignored for later. */
7738 ignore_topmost_bind = true;
7739 }
7740 indent = 2;
7741 }
7742
7743 if (any_var)
7744 fprintf (file, "\n");
7745
7746 print_generic_stmt_indented (file, chain, flags, indent);
7747 if (ignore_topmost_bind)
7748 fprintf (file, "}\n");
7749 }
7750
7751 if (flags & TDF_ENUMERATE_LOCALS)
7752 dump_enumerated_decls (file, flags);
7753 fprintf (file, "\n\n");
7754
7755 current_function_decl = old_current_fndecl;
7756 }
7757
7758 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
7759
7760 DEBUG_FUNCTION void
7761 debug_function (tree fn, dump_flags_t flags)
7762 {
7763 dump_function_to_file (fn, stderr, flags);
7764 }
7765
7766
7767 /* Print on FILE the indexes for the predecessors of basic_block BB. */
7768
7769 static void
7770 print_pred_bbs (FILE *file, basic_block bb)
7771 {
7772 edge e;
7773 edge_iterator ei;
7774
7775 FOR_EACH_EDGE (e, ei, bb->preds)
7776 fprintf (file, "bb_%d ", e->src->index);
7777 }
7778
7779
7780 /* Print on FILE the indexes for the successors of basic_block BB. */
7781
7782 static void
7783 print_succ_bbs (FILE *file, basic_block bb)
7784 {
7785 edge e;
7786 edge_iterator ei;
7787
7788 FOR_EACH_EDGE (e, ei, bb->succs)
7789 fprintf (file, "bb_%d ", e->dest->index);
7790 }
7791
7792 /* Print to FILE the basic block BB following the VERBOSITY level. */
7793
7794 void
7795 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
7796 {
7797 char *s_indent = (char *) alloca ((size_t) indent + 1);
7798 memset ((void *) s_indent, ' ', (size_t) indent);
7799 s_indent[indent] = '\0';
7800
7801 /* Print basic_block's header. */
7802 if (verbosity >= 2)
7803 {
7804 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
7805 print_pred_bbs (file, bb);
7806 fprintf (file, "}, succs = {");
7807 print_succ_bbs (file, bb);
7808 fprintf (file, "})\n");
7809 }
7810
7811 /* Print basic_block's body. */
7812 if (verbosity >= 3)
7813 {
7814 fprintf (file, "%s {\n", s_indent);
7815 dump_bb (file, bb, indent + 4, TDF_VOPS|TDF_MEMSYMS);
7816 fprintf (file, "%s }\n", s_indent);
7817 }
7818 }
7819
7820 static void print_loop_and_siblings (FILE *, struct loop *, int, int);
7821
7822 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
7823 VERBOSITY level this outputs the contents of the loop, or just its
7824 structure. */
7825
7826 static void
7827 print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
7828 {
7829 char *s_indent;
7830 basic_block bb;
7831
7832 if (loop == NULL)
7833 return;
7834
7835 s_indent = (char *) alloca ((size_t) indent + 1);
7836 memset ((void *) s_indent, ' ', (size_t) indent);
7837 s_indent[indent] = '\0';
7838
7839 /* Print loop's header. */
7840 fprintf (file, "%sloop_%d (", s_indent, loop->num);
7841 if (loop->header)
7842 fprintf (file, "header = %d", loop->header->index);
7843 else
7844 {
7845 fprintf (file, "deleted)\n");
7846 return;
7847 }
7848 if (loop->latch)
7849 fprintf (file, ", latch = %d", loop->latch->index);
7850 else
7851 fprintf (file, ", multiple latches");
7852 fprintf (file, ", niter = ");
7853 print_generic_expr (file, loop->nb_iterations);
7854
7855 if (loop->any_upper_bound)
7856 {
7857 fprintf (file, ", upper_bound = ");
7858 print_decu (loop->nb_iterations_upper_bound, file);
7859 }
7860 if (loop->any_likely_upper_bound)
7861 {
7862 fprintf (file, ", likely_upper_bound = ");
7863 print_decu (loop->nb_iterations_likely_upper_bound, file);
7864 }
7865
7866 if (loop->any_estimate)
7867 {
7868 fprintf (file, ", estimate = ");
7869 print_decu (loop->nb_iterations_estimate, file);
7870 }
7871 fprintf (file, ")\n");
7872
7873 /* Print loop's body. */
7874 if (verbosity >= 1)
7875 {
7876 fprintf (file, "%s{\n", s_indent);
7877 FOR_EACH_BB_FN (bb, cfun)
7878 if (bb->loop_father == loop)
7879 print_loops_bb (file, bb, indent, verbosity);
7880
7881 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
7882 fprintf (file, "%s}\n", s_indent);
7883 }
7884 }
7885
7886 /* Print the LOOP and its sibling loops on FILE, indented INDENT
7887 spaces. Following VERBOSITY level this outputs the contents of the
7888 loop, or just its structure. */
7889
7890 static void
7891 print_loop_and_siblings (FILE *file, struct loop *loop, int indent,
7892 int verbosity)
7893 {
7894 if (loop == NULL)
7895 return;
7896
7897 print_loop (file, loop, indent, verbosity);
7898 print_loop_and_siblings (file, loop->next, indent, verbosity);
7899 }
7900
7901 /* Follow a CFG edge from the entry point of the program, and on entry
7902 of a loop, pretty print the loop structure on FILE. */
7903
7904 void
7905 print_loops (FILE *file, int verbosity)
7906 {
7907 basic_block bb;
7908
7909 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
7910 fprintf (file, "\nLoops in function: %s\n", current_function_name ());
7911 if (bb && bb->loop_father)
7912 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
7913 }
7914
7915 /* Dump a loop. */
7916
7917 DEBUG_FUNCTION void
7918 debug (struct loop &ref)
7919 {
7920 print_loop (stderr, &ref, 0, /*verbosity*/0);
7921 }
7922
7923 DEBUG_FUNCTION void
7924 debug (struct loop *ptr)
7925 {
7926 if (ptr)
7927 debug (*ptr);
7928 else
7929 fprintf (stderr, "<nil>\n");
7930 }
7931
7932 /* Dump a loop verbosely. */
7933
7934 DEBUG_FUNCTION void
7935 debug_verbose (struct loop &ref)
7936 {
7937 print_loop (stderr, &ref, 0, /*verbosity*/3);
7938 }
7939
7940 DEBUG_FUNCTION void
7941 debug_verbose (struct loop *ptr)
7942 {
7943 if (ptr)
7944 debug (*ptr);
7945 else
7946 fprintf (stderr, "<nil>\n");
7947 }
7948
7949
7950 /* Debugging loops structure at tree level, at some VERBOSITY level. */
7951
7952 DEBUG_FUNCTION void
7953 debug_loops (int verbosity)
7954 {
7955 print_loops (stderr, verbosity);
7956 }
7957
7958 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
7959
7960 DEBUG_FUNCTION void
7961 debug_loop (struct loop *loop, int verbosity)
7962 {
7963 print_loop (stderr, loop, 0, verbosity);
7964 }
7965
7966 /* Print on stderr the code of loop number NUM, at some VERBOSITY
7967 level. */
7968
7969 DEBUG_FUNCTION void
7970 debug_loop_num (unsigned num, int verbosity)
7971 {
7972 debug_loop (get_loop (cfun, num), verbosity);
7973 }
7974
7975 /* Return true if BB ends with a call, possibly followed by some
7976 instructions that must stay with the call. Return false,
7977 otherwise. */
7978
7979 static bool
7980 gimple_block_ends_with_call_p (basic_block bb)
7981 {
7982 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7983 return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
7984 }
7985
7986
7987 /* Return true if BB ends with a conditional branch. Return false,
7988 otherwise. */
7989
7990 static bool
7991 gimple_block_ends_with_condjump_p (const_basic_block bb)
7992 {
7993 gimple *stmt = last_stmt (CONST_CAST_BB (bb));
7994 return (stmt && gimple_code (stmt) == GIMPLE_COND);
7995 }
7996
7997
7998 /* Return true if statement T may terminate execution of BB in ways not
7999 explicitly represtented in the CFG. */
8000
8001 bool
8002 stmt_can_terminate_bb_p (gimple *t)
8003 {
8004 tree fndecl = NULL_TREE;
8005 int call_flags = 0;
8006
8007 /* Eh exception not handled internally terminates execution of the whole
8008 function. */
8009 if (stmt_can_throw_external (t))
8010 return true;
8011
8012 /* NORETURN and LONGJMP calls already have an edge to exit.
8013 CONST and PURE calls do not need one.
8014 We don't currently check for CONST and PURE here, although
8015 it would be a good idea, because those attributes are
8016 figured out from the RTL in mark_constant_function, and
8017 the counter incrementation code from -fprofile-arcs
8018 leads to different results from -fbranch-probabilities. */
8019 if (is_gimple_call (t))
8020 {
8021 fndecl = gimple_call_fndecl (t);
8022 call_flags = gimple_call_flags (t);
8023 }
8024
8025 if (is_gimple_call (t)
8026 && fndecl
8027 && DECL_BUILT_IN (fndecl)
8028 && (call_flags & ECF_NOTHROW)
8029 && !(call_flags & ECF_RETURNS_TWICE)
8030 /* fork() doesn't really return twice, but the effect of
8031 wrapping it in __gcov_fork() which calls __gcov_flush()
8032 and clears the counters before forking has the same
8033 effect as returning twice. Force a fake edge. */
8034 && !(DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
8035 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FORK))
8036 return false;
8037
8038 if (is_gimple_call (t))
8039 {
8040 edge_iterator ei;
8041 edge e;
8042 basic_block bb;
8043
8044 if (call_flags & (ECF_PURE | ECF_CONST)
8045 && !(call_flags & ECF_LOOPING_CONST_OR_PURE))
8046 return false;
8047
8048 /* Function call may do longjmp, terminate program or do other things.
8049 Special case noreturn that have non-abnormal edges out as in this case
8050 the fact is sufficiently represented by lack of edges out of T. */
8051 if (!(call_flags & ECF_NORETURN))
8052 return true;
8053
8054 bb = gimple_bb (t);
8055 FOR_EACH_EDGE (e, ei, bb->succs)
8056 if ((e->flags & EDGE_FAKE) == 0)
8057 return true;
8058 }
8059
8060 if (gasm *asm_stmt = dyn_cast <gasm *> (t))
8061 if (gimple_asm_volatile_p (asm_stmt) || gimple_asm_input_p (asm_stmt))
8062 return true;
8063
8064 return false;
8065 }
8066
8067
8068 /* Add fake edges to the function exit for any non constant and non
8069 noreturn calls (or noreturn calls with EH/abnormal edges),
8070 volatile inline assembly in the bitmap of blocks specified by BLOCKS
8071 or to the whole CFG if BLOCKS is zero. Return the number of blocks
8072 that were split.
8073
8074 The goal is to expose cases in which entering a basic block does
8075 not imply that all subsequent instructions must be executed. */
8076
8077 static int
8078 gimple_flow_call_edges_add (sbitmap blocks)
8079 {
8080 int i;
8081 int blocks_split = 0;
8082 int last_bb = last_basic_block_for_fn (cfun);
8083 bool check_last_block = false;
8084
8085 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
8086 return 0;
8087
8088 if (! blocks)
8089 check_last_block = true;
8090 else
8091 check_last_block = bitmap_bit_p (blocks,
8092 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
8093
8094 /* In the last basic block, before epilogue generation, there will be
8095 a fallthru edge to EXIT. Special care is required if the last insn
8096 of the last basic block is a call because make_edge folds duplicate
8097 edges, which would result in the fallthru edge also being marked
8098 fake, which would result in the fallthru edge being removed by
8099 remove_fake_edges, which would result in an invalid CFG.
8100
8101 Moreover, we can't elide the outgoing fake edge, since the block
8102 profiler needs to take this into account in order to solve the minimal
8103 spanning tree in the case that the call doesn't return.
8104
8105 Handle this by adding a dummy instruction in a new last basic block. */
8106 if (check_last_block)
8107 {
8108 basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
8109 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
8110 gimple *t = NULL;
8111
8112 if (!gsi_end_p (gsi))
8113 t = gsi_stmt (gsi);
8114
8115 if (t && stmt_can_terminate_bb_p (t))
8116 {
8117 edge e;
8118
8119 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
8120 if (e)
8121 {
8122 gsi_insert_on_edge (e, gimple_build_nop ());
8123 gsi_commit_edge_inserts ();
8124 }
8125 }
8126 }
8127
8128 /* Now add fake edges to the function exit for any non constant
8129 calls since there is no way that we can determine if they will
8130 return or not... */
8131 for (i = 0; i < last_bb; i++)
8132 {
8133 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8134 gimple_stmt_iterator gsi;
8135 gimple *stmt, *last_stmt;
8136
8137 if (!bb)
8138 continue;
8139
8140 if (blocks && !bitmap_bit_p (blocks, i))
8141 continue;
8142
8143 gsi = gsi_last_nondebug_bb (bb);
8144 if (!gsi_end_p (gsi))
8145 {
8146 last_stmt = gsi_stmt (gsi);
8147 do
8148 {
8149 stmt = gsi_stmt (gsi);
8150 if (stmt_can_terminate_bb_p (stmt))
8151 {
8152 edge e;
8153
8154 /* The handling above of the final block before the
8155 epilogue should be enough to verify that there is
8156 no edge to the exit block in CFG already.
8157 Calling make_edge in such case would cause us to
8158 mark that edge as fake and remove it later. */
8159 if (flag_checking && stmt == last_stmt)
8160 {
8161 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
8162 gcc_assert (e == NULL);
8163 }
8164
8165 /* Note that the following may create a new basic block
8166 and renumber the existing basic blocks. */
8167 if (stmt != last_stmt)
8168 {
8169 e = split_block (bb, stmt);
8170 if (e)
8171 blocks_split++;
8172 }
8173 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
8174 }
8175 gsi_prev (&gsi);
8176 }
8177 while (!gsi_end_p (gsi));
8178 }
8179 }
8180
8181 if (blocks_split)
8182 verify_flow_info ();
8183
8184 return blocks_split;
8185 }
8186
8187 /* Removes edge E and all the blocks dominated by it, and updates dominance
8188 information. The IL in E->src needs to be updated separately.
8189 If dominance info is not available, only the edge E is removed.*/
8190
8191 void
8192 remove_edge_and_dominated_blocks (edge e)
8193 {
8194 vec<basic_block> bbs_to_remove = vNULL;
8195 vec<basic_block> bbs_to_fix_dom = vNULL;
8196 edge f;
8197 edge_iterator ei;
8198 bool none_removed = false;
8199 unsigned i;
8200 basic_block bb, dbb;
8201 bitmap_iterator bi;
8202
8203 /* If we are removing a path inside a non-root loop that may change
8204 loop ownership of blocks or remove loops. Mark loops for fixup. */
8205 if (current_loops
8206 && loop_outer (e->src->loop_father) != NULL
8207 && e->src->loop_father == e->dest->loop_father)
8208 loops_state_set (LOOPS_NEED_FIXUP);
8209
8210 if (!dom_info_available_p (CDI_DOMINATORS))
8211 {
8212 remove_edge (e);
8213 return;
8214 }
8215
8216 /* No updating is needed for edges to exit. */
8217 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
8218 {
8219 if (cfgcleanup_altered_bbs)
8220 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
8221 remove_edge (e);
8222 return;
8223 }
8224
8225 /* First, we find the basic blocks to remove. If E->dest has a predecessor
8226 that is not dominated by E->dest, then this set is empty. Otherwise,
8227 all the basic blocks dominated by E->dest are removed.
8228
8229 Also, to DF_IDOM we store the immediate dominators of the blocks in
8230 the dominance frontier of E (i.e., of the successors of the
8231 removed blocks, if there are any, and of E->dest otherwise). */
8232 FOR_EACH_EDGE (f, ei, e->dest->preds)
8233 {
8234 if (f == e)
8235 continue;
8236
8237 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
8238 {
8239 none_removed = true;
8240 break;
8241 }
8242 }
8243
8244 auto_bitmap df, df_idom;
8245 if (none_removed)
8246 bitmap_set_bit (df_idom,
8247 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
8248 else
8249 {
8250 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
8251 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
8252 {
8253 FOR_EACH_EDGE (f, ei, bb->succs)
8254 {
8255 if (f->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
8256 bitmap_set_bit (df, f->dest->index);
8257 }
8258 }
8259 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
8260 bitmap_clear_bit (df, bb->index);
8261
8262 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
8263 {
8264 bb = BASIC_BLOCK_FOR_FN (cfun, i);
8265 bitmap_set_bit (df_idom,
8266 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
8267 }
8268 }
8269
8270 if (cfgcleanup_altered_bbs)
8271 {
8272 /* Record the set of the altered basic blocks. */
8273 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
8274 bitmap_ior_into (cfgcleanup_altered_bbs, df);
8275 }
8276
8277 /* Remove E and the cancelled blocks. */
8278 if (none_removed)
8279 remove_edge (e);
8280 else
8281 {
8282 /* Walk backwards so as to get a chance to substitute all
8283 released DEFs into debug stmts. See
8284 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
8285 details. */
8286 for (i = bbs_to_remove.length (); i-- > 0; )
8287 delete_basic_block (bbs_to_remove[i]);
8288 }
8289
8290 /* Update the dominance information. The immediate dominator may change only
8291 for blocks whose immediate dominator belongs to DF_IDOM:
8292
8293 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
8294 removal. Let Z the arbitrary block such that idom(Z) = Y and
8295 Z dominates X after the removal. Before removal, there exists a path P
8296 from Y to X that avoids Z. Let F be the last edge on P that is
8297 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
8298 dominates W, and because of P, Z does not dominate W), and W belongs to
8299 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
8300 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
8301 {
8302 bb = BASIC_BLOCK_FOR_FN (cfun, i);
8303 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
8304 dbb;
8305 dbb = next_dom_son (CDI_DOMINATORS, dbb))
8306 bbs_to_fix_dom.safe_push (dbb);
8307 }
8308
8309 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
8310
8311 bbs_to_remove.release ();
8312 bbs_to_fix_dom.release ();
8313 }
8314
8315 /* Purge dead EH edges from basic block BB. */
8316
8317 bool
8318 gimple_purge_dead_eh_edges (basic_block bb)
8319 {
8320 bool changed = false;
8321 edge e;
8322 edge_iterator ei;
8323 gimple *stmt = last_stmt (bb);
8324
8325 if (stmt && stmt_can_throw_internal (stmt))
8326 return false;
8327
8328 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8329 {
8330 if (e->flags & EDGE_EH)
8331 {
8332 remove_edge_and_dominated_blocks (e);
8333 changed = true;
8334 }
8335 else
8336 ei_next (&ei);
8337 }
8338
8339 return changed;
8340 }
8341
8342 /* Purge dead EH edges from basic block listed in BLOCKS. */
8343
8344 bool
8345 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
8346 {
8347 bool changed = false;
8348 unsigned i;
8349 bitmap_iterator bi;
8350
8351 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8352 {
8353 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8354
8355 /* Earlier gimple_purge_dead_eh_edges could have removed
8356 this basic block already. */
8357 gcc_assert (bb || changed);
8358 if (bb != NULL)
8359 changed |= gimple_purge_dead_eh_edges (bb);
8360 }
8361
8362 return changed;
8363 }
8364
8365 /* Purge dead abnormal call edges from basic block BB. */
8366
8367 bool
8368 gimple_purge_dead_abnormal_call_edges (basic_block bb)
8369 {
8370 bool changed = false;
8371 edge e;
8372 edge_iterator ei;
8373 gimple *stmt = last_stmt (bb);
8374
8375 if (!cfun->has_nonlocal_label
8376 && !cfun->calls_setjmp)
8377 return false;
8378
8379 if (stmt && stmt_can_make_abnormal_goto (stmt))
8380 return false;
8381
8382 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8383 {
8384 if (e->flags & EDGE_ABNORMAL)
8385 {
8386 if (e->flags & EDGE_FALLTHRU)
8387 e->flags &= ~EDGE_ABNORMAL;
8388 else
8389 remove_edge_and_dominated_blocks (e);
8390 changed = true;
8391 }
8392 else
8393 ei_next (&ei);
8394 }
8395
8396 return changed;
8397 }
8398
8399 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
8400
8401 bool
8402 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks)
8403 {
8404 bool changed = false;
8405 unsigned i;
8406 bitmap_iterator bi;
8407
8408 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8409 {
8410 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8411
8412 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
8413 this basic block already. */
8414 gcc_assert (bb || changed);
8415 if (bb != NULL)
8416 changed |= gimple_purge_dead_abnormal_call_edges (bb);
8417 }
8418
8419 return changed;
8420 }
8421
8422 /* This function is called whenever a new edge is created or
8423 redirected. */
8424
8425 static void
8426 gimple_execute_on_growing_pred (edge e)
8427 {
8428 basic_block bb = e->dest;
8429
8430 if (!gimple_seq_empty_p (phi_nodes (bb)))
8431 reserve_phi_args_for_new_edge (bb);
8432 }
8433
8434 /* This function is called immediately before edge E is removed from
8435 the edge vector E->dest->preds. */
8436
8437 static void
8438 gimple_execute_on_shrinking_pred (edge e)
8439 {
8440 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
8441 remove_phi_args (e);
8442 }
8443
8444 /*---------------------------------------------------------------------------
8445 Helper functions for Loop versioning
8446 ---------------------------------------------------------------------------*/
8447
8448 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
8449 of 'first'. Both of them are dominated by 'new_head' basic block. When
8450 'new_head' was created by 'second's incoming edge it received phi arguments
8451 on the edge by split_edge(). Later, additional edge 'e' was created to
8452 connect 'new_head' and 'first'. Now this routine adds phi args on this
8453 additional edge 'e' that new_head to second edge received as part of edge
8454 splitting. */
8455
8456 static void
8457 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
8458 basic_block new_head, edge e)
8459 {
8460 gphi *phi1, *phi2;
8461 gphi_iterator psi1, psi2;
8462 tree def;
8463 edge e2 = find_edge (new_head, second);
8464
8465 /* Because NEW_HEAD has been created by splitting SECOND's incoming
8466 edge, we should always have an edge from NEW_HEAD to SECOND. */
8467 gcc_assert (e2 != NULL);
8468
8469 /* Browse all 'second' basic block phi nodes and add phi args to
8470 edge 'e' for 'first' head. PHI args are always in correct order. */
8471
8472 for (psi2 = gsi_start_phis (second),
8473 psi1 = gsi_start_phis (first);
8474 !gsi_end_p (psi2) && !gsi_end_p (psi1);
8475 gsi_next (&psi2), gsi_next (&psi1))
8476 {
8477 phi1 = psi1.phi ();
8478 phi2 = psi2.phi ();
8479 def = PHI_ARG_DEF (phi2, e2->dest_idx);
8480 add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
8481 }
8482 }
8483
8484
8485 /* Adds a if else statement to COND_BB with condition COND_EXPR.
8486 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
8487 the destination of the ELSE part. */
8488
8489 static void
8490 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
8491 basic_block second_head ATTRIBUTE_UNUSED,
8492 basic_block cond_bb, void *cond_e)
8493 {
8494 gimple_stmt_iterator gsi;
8495 gimple *new_cond_expr;
8496 tree cond_expr = (tree) cond_e;
8497 edge e0;
8498
8499 /* Build new conditional expr */
8500 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
8501 NULL_TREE, NULL_TREE);
8502
8503 /* Add new cond in cond_bb. */
8504 gsi = gsi_last_bb (cond_bb);
8505 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
8506
8507 /* Adjust edges appropriately to connect new head with first head
8508 as well as second head. */
8509 e0 = single_succ_edge (cond_bb);
8510 e0->flags &= ~EDGE_FALLTHRU;
8511 e0->flags |= EDGE_FALSE_VALUE;
8512 }
8513
8514
8515 /* Do book-keeping of basic block BB for the profile consistency checker.
8516 If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
8517 then do post-pass accounting. Store the counting in RECORD. */
8518 static void
8519 gimple_account_profile_record (basic_block bb, int after_pass,
8520 struct profile_record *record)
8521 {
8522 gimple_stmt_iterator i;
8523 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
8524 {
8525 record->size[after_pass]
8526 += estimate_num_insns (gsi_stmt (i), &eni_size_weights);
8527 if (bb->count.initialized_p ())
8528 record->time[after_pass]
8529 += estimate_num_insns (gsi_stmt (i),
8530 &eni_time_weights) * bb->count.to_gcov_type ();
8531 else if (profile_status_for_fn (cfun) == PROFILE_GUESSED)
8532 record->time[after_pass]
8533 += estimate_num_insns (gsi_stmt (i),
8534 &eni_time_weights) * bb->frequency;
8535 }
8536 }
8537
8538 struct cfg_hooks gimple_cfg_hooks = {
8539 "gimple",
8540 gimple_verify_flow_info,
8541 gimple_dump_bb, /* dump_bb */
8542 gimple_dump_bb_for_graph, /* dump_bb_for_graph */
8543 create_bb, /* create_basic_block */
8544 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
8545 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
8546 gimple_can_remove_branch_p, /* can_remove_branch_p */
8547 remove_bb, /* delete_basic_block */
8548 gimple_split_block, /* split_block */
8549 gimple_move_block_after, /* move_block_after */
8550 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
8551 gimple_merge_blocks, /* merge_blocks */
8552 gimple_predict_edge, /* predict_edge */
8553 gimple_predicted_by_p, /* predicted_by_p */
8554 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
8555 gimple_duplicate_bb, /* duplicate_block */
8556 gimple_split_edge, /* split_edge */
8557 gimple_make_forwarder_block, /* make_forward_block */
8558 NULL, /* tidy_fallthru_edge */
8559 NULL, /* force_nonfallthru */
8560 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
8561 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
8562 gimple_flow_call_edges_add, /* flow_call_edges_add */
8563 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
8564 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
8565 gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
8566 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
8567 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
8568 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
8569 flush_pending_stmts, /* flush_pending_stmts */
8570 gimple_empty_block_p, /* block_empty_p */
8571 gimple_split_block_before_cond_jump, /* split_block_before_cond_jump */
8572 gimple_account_profile_record,
8573 };
8574
8575
8576 /* Split all critical edges. */
8577
8578 unsigned int
8579 split_critical_edges (void)
8580 {
8581 basic_block bb;
8582 edge e;
8583 edge_iterator ei;
8584
8585 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
8586 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
8587 mappings around the calls to split_edge. */
8588 start_recording_case_labels ();
8589 FOR_ALL_BB_FN (bb, cfun)
8590 {
8591 FOR_EACH_EDGE (e, ei, bb->succs)
8592 {
8593 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
8594 split_edge (e);
8595 /* PRE inserts statements to edges and expects that
8596 since split_critical_edges was done beforehand, committing edge
8597 insertions will not split more edges. In addition to critical
8598 edges we must split edges that have multiple successors and
8599 end by control flow statements, such as RESX.
8600 Go ahead and split them too. This matches the logic in
8601 gimple_find_edge_insert_loc. */
8602 else if ((!single_pred_p (e->dest)
8603 || !gimple_seq_empty_p (phi_nodes (e->dest))
8604 || e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
8605 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
8606 && !(e->flags & EDGE_ABNORMAL))
8607 {
8608 gimple_stmt_iterator gsi;
8609
8610 gsi = gsi_last_bb (e->src);
8611 if (!gsi_end_p (gsi)
8612 && stmt_ends_bb_p (gsi_stmt (gsi))
8613 && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
8614 && !gimple_call_builtin_p (gsi_stmt (gsi),
8615 BUILT_IN_RETURN)))
8616 split_edge (e);
8617 }
8618 }
8619 }
8620 end_recording_case_labels ();
8621 return 0;
8622 }
8623
8624 namespace {
8625
8626 const pass_data pass_data_split_crit_edges =
8627 {
8628 GIMPLE_PASS, /* type */
8629 "crited", /* name */
8630 OPTGROUP_NONE, /* optinfo_flags */
8631 TV_TREE_SPLIT_EDGES, /* tv_id */
8632 PROP_cfg, /* properties_required */
8633 PROP_no_crit_edges, /* properties_provided */
8634 0, /* properties_destroyed */
8635 0, /* todo_flags_start */
8636 0, /* todo_flags_finish */
8637 };
8638
8639 class pass_split_crit_edges : public gimple_opt_pass
8640 {
8641 public:
8642 pass_split_crit_edges (gcc::context *ctxt)
8643 : gimple_opt_pass (pass_data_split_crit_edges, ctxt)
8644 {}
8645
8646 /* opt_pass methods: */
8647 virtual unsigned int execute (function *) { return split_critical_edges (); }
8648
8649 opt_pass * clone () { return new pass_split_crit_edges (m_ctxt); }
8650 }; // class pass_split_crit_edges
8651
8652 } // anon namespace
8653
8654 gimple_opt_pass *
8655 make_pass_split_crit_edges (gcc::context *ctxt)
8656 {
8657 return new pass_split_crit_edges (ctxt);
8658 }
8659
8660
8661 /* Insert COND expression which is GIMPLE_COND after STMT
8662 in basic block BB with appropriate basic block split
8663 and creation of a new conditionally executed basic block.
8664 Return created basic block. */
8665 basic_block
8666 insert_cond_bb (basic_block bb, gimple *stmt, gimple *cond)
8667 {
8668 edge fall = split_block (bb, stmt);
8669 gimple_stmt_iterator iter = gsi_last_bb (bb);
8670 basic_block new_bb;
8671
8672 /* Insert cond statement. */
8673 gcc_assert (gimple_code (cond) == GIMPLE_COND);
8674 if (gsi_end_p (iter))
8675 gsi_insert_before (&iter, cond, GSI_CONTINUE_LINKING);
8676 else
8677 gsi_insert_after (&iter, cond, GSI_CONTINUE_LINKING);
8678
8679 /* Create conditionally executed block. */
8680 new_bb = create_empty_bb (bb);
8681 make_edge (bb, new_bb, EDGE_TRUE_VALUE);
8682 make_single_succ_edge (new_bb, fall->dest, EDGE_FALLTHRU);
8683
8684 /* Fix edge for split bb. */
8685 fall->flags = EDGE_FALSE_VALUE;
8686
8687 /* Update dominance info. */
8688 if (dom_info_available_p (CDI_DOMINATORS))
8689 {
8690 set_immediate_dominator (CDI_DOMINATORS, new_bb, bb);
8691 set_immediate_dominator (CDI_DOMINATORS, fall->dest, bb);
8692 }
8693
8694 /* Update loop info. */
8695 if (current_loops)
8696 add_bb_to_loop (new_bb, bb->loop_father);
8697
8698 return new_bb;
8699 }
8700
8701 /* Build a ternary operation and gimplify it. Emit code before GSI.
8702 Return the gimple_val holding the result. */
8703
8704 tree
8705 gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
8706 tree type, tree a, tree b, tree c)
8707 {
8708 tree ret;
8709 location_t loc = gimple_location (gsi_stmt (*gsi));
8710
8711 ret = fold_build3_loc (loc, code, type, a, b, c);
8712 STRIP_NOPS (ret);
8713
8714 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8715 GSI_SAME_STMT);
8716 }
8717
8718 /* Build a binary operation and gimplify it. Emit code before GSI.
8719 Return the gimple_val holding the result. */
8720
8721 tree
8722 gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
8723 tree type, tree a, tree b)
8724 {
8725 tree ret;
8726
8727 ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
8728 STRIP_NOPS (ret);
8729
8730 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8731 GSI_SAME_STMT);
8732 }
8733
8734 /* Build a unary operation and gimplify it. Emit code before GSI.
8735 Return the gimple_val holding the result. */
8736
8737 tree
8738 gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
8739 tree a)
8740 {
8741 tree ret;
8742
8743 ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
8744 STRIP_NOPS (ret);
8745
8746 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8747 GSI_SAME_STMT);
8748 }
8749
8750
8751 \f
8752 /* Given a basic block B which ends with a conditional and has
8753 precisely two successors, determine which of the edges is taken if
8754 the conditional is true and which is taken if the conditional is
8755 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
8756
8757 void
8758 extract_true_false_edges_from_block (basic_block b,
8759 edge *true_edge,
8760 edge *false_edge)
8761 {
8762 edge e = EDGE_SUCC (b, 0);
8763
8764 if (e->flags & EDGE_TRUE_VALUE)
8765 {
8766 *true_edge = e;
8767 *false_edge = EDGE_SUCC (b, 1);
8768 }
8769 else
8770 {
8771 *false_edge = e;
8772 *true_edge = EDGE_SUCC (b, 1);
8773 }
8774 }
8775
8776
8777 /* From a controlling predicate in the immediate dominator DOM of
8778 PHIBLOCK determine the edges into PHIBLOCK that are chosen if the
8779 predicate evaluates to true and false and store them to
8780 *TRUE_CONTROLLED_EDGE and *FALSE_CONTROLLED_EDGE if
8781 they are non-NULL. Returns true if the edges can be determined,
8782 else return false. */
8783
8784 bool
8785 extract_true_false_controlled_edges (basic_block dom, basic_block phiblock,
8786 edge *true_controlled_edge,
8787 edge *false_controlled_edge)
8788 {
8789 basic_block bb = phiblock;
8790 edge true_edge, false_edge, tem;
8791 edge e0 = NULL, e1 = NULL;
8792
8793 /* We have to verify that one edge into the PHI node is dominated
8794 by the true edge of the predicate block and the other edge
8795 dominated by the false edge. This ensures that the PHI argument
8796 we are going to take is completely determined by the path we
8797 take from the predicate block.
8798 We can only use BB dominance checks below if the destination of
8799 the true/false edges are dominated by their edge, thus only
8800 have a single predecessor. */
8801 extract_true_false_edges_from_block (dom, &true_edge, &false_edge);
8802 tem = EDGE_PRED (bb, 0);
8803 if (tem == true_edge
8804 || (single_pred_p (true_edge->dest)
8805 && (tem->src == true_edge->dest
8806 || dominated_by_p (CDI_DOMINATORS,
8807 tem->src, true_edge->dest))))
8808 e0 = tem;
8809 else if (tem == false_edge
8810 || (single_pred_p (false_edge->dest)
8811 && (tem->src == false_edge->dest
8812 || dominated_by_p (CDI_DOMINATORS,
8813 tem->src, false_edge->dest))))
8814 e1 = tem;
8815 else
8816 return false;
8817 tem = EDGE_PRED (bb, 1);
8818 if (tem == true_edge
8819 || (single_pred_p (true_edge->dest)
8820 && (tem->src == true_edge->dest
8821 || dominated_by_p (CDI_DOMINATORS,
8822 tem->src, true_edge->dest))))
8823 e0 = tem;
8824 else if (tem == false_edge
8825 || (single_pred_p (false_edge->dest)
8826 && (tem->src == false_edge->dest
8827 || dominated_by_p (CDI_DOMINATORS,
8828 tem->src, false_edge->dest))))
8829 e1 = tem;
8830 else
8831 return false;
8832 if (!e0 || !e1)
8833 return false;
8834
8835 if (true_controlled_edge)
8836 *true_controlled_edge = e0;
8837 if (false_controlled_edge)
8838 *false_controlled_edge = e1;
8839
8840 return true;
8841 }
8842
8843
8844
8845 /* Emit return warnings. */
8846
8847 namespace {
8848
8849 const pass_data pass_data_warn_function_return =
8850 {
8851 GIMPLE_PASS, /* type */
8852 "*warn_function_return", /* name */
8853 OPTGROUP_NONE, /* optinfo_flags */
8854 TV_NONE, /* tv_id */
8855 PROP_cfg, /* properties_required */
8856 0, /* properties_provided */
8857 0, /* properties_destroyed */
8858 0, /* todo_flags_start */
8859 0, /* todo_flags_finish */
8860 };
8861
8862 class pass_warn_function_return : public gimple_opt_pass
8863 {
8864 public:
8865 pass_warn_function_return (gcc::context *ctxt)
8866 : gimple_opt_pass (pass_data_warn_function_return, ctxt)
8867 {}
8868
8869 /* opt_pass methods: */
8870 virtual unsigned int execute (function *);
8871
8872 }; // class pass_warn_function_return
8873
8874 unsigned int
8875 pass_warn_function_return::execute (function *fun)
8876 {
8877 source_location location;
8878 gimple *last;
8879 edge e;
8880 edge_iterator ei;
8881
8882 if (!targetm.warn_func_return (fun->decl))
8883 return 0;
8884
8885 /* If we have a path to EXIT, then we do return. */
8886 if (TREE_THIS_VOLATILE (fun->decl)
8887 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0)
8888 {
8889 location = UNKNOWN_LOCATION;
8890 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
8891 {
8892 last = last_stmt (e->src);
8893 if ((gimple_code (last) == GIMPLE_RETURN
8894 || gimple_call_builtin_p (last, BUILT_IN_RETURN))
8895 && (location = gimple_location (last)) != UNKNOWN_LOCATION)
8896 break;
8897 }
8898 if (location == UNKNOWN_LOCATION)
8899 location = cfun->function_end_locus;
8900 warning_at (location, 0, "%<noreturn%> function does return");
8901 }
8902
8903 /* If we see "return;" in some basic block, then we do reach the end
8904 without returning a value. */
8905 else if (warn_return_type
8906 && !TREE_NO_WARNING (fun->decl)
8907 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0
8908 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fun->decl))))
8909 {
8910 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
8911 {
8912 gimple *last = last_stmt (e->src);
8913 greturn *return_stmt = dyn_cast <greturn *> (last);
8914 if (return_stmt
8915 && gimple_return_retval (return_stmt) == NULL
8916 && !gimple_no_warning_p (last))
8917 {
8918 location = gimple_location (last);
8919 if (location == UNKNOWN_LOCATION)
8920 location = fun->function_end_locus;
8921 warning_at (location, OPT_Wreturn_type, "control reaches end of non-void function");
8922 TREE_NO_WARNING (fun->decl) = 1;
8923 break;
8924 }
8925 }
8926 }
8927 return 0;
8928 }
8929
8930 } // anon namespace
8931
8932 gimple_opt_pass *
8933 make_pass_warn_function_return (gcc::context *ctxt)
8934 {
8935 return new pass_warn_function_return (ctxt);
8936 }
8937
8938 /* Walk a gimplified function and warn for functions whose return value is
8939 ignored and attribute((warn_unused_result)) is set. This is done before
8940 inlining, so we don't have to worry about that. */
8941
8942 static void
8943 do_warn_unused_result (gimple_seq seq)
8944 {
8945 tree fdecl, ftype;
8946 gimple_stmt_iterator i;
8947
8948 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
8949 {
8950 gimple *g = gsi_stmt (i);
8951
8952 switch (gimple_code (g))
8953 {
8954 case GIMPLE_BIND:
8955 do_warn_unused_result (gimple_bind_body (as_a <gbind *>(g)));
8956 break;
8957 case GIMPLE_TRY:
8958 do_warn_unused_result (gimple_try_eval (g));
8959 do_warn_unused_result (gimple_try_cleanup (g));
8960 break;
8961 case GIMPLE_CATCH:
8962 do_warn_unused_result (gimple_catch_handler (
8963 as_a <gcatch *> (g)));
8964 break;
8965 case GIMPLE_EH_FILTER:
8966 do_warn_unused_result (gimple_eh_filter_failure (g));
8967 break;
8968
8969 case GIMPLE_CALL:
8970 if (gimple_call_lhs (g))
8971 break;
8972 if (gimple_call_internal_p (g))
8973 break;
8974
8975 /* This is a naked call, as opposed to a GIMPLE_CALL with an
8976 LHS. All calls whose value is ignored should be
8977 represented like this. Look for the attribute. */
8978 fdecl = gimple_call_fndecl (g);
8979 ftype = gimple_call_fntype (g);
8980
8981 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
8982 {
8983 location_t loc = gimple_location (g);
8984
8985 if (fdecl)
8986 warning_at (loc, OPT_Wunused_result,
8987 "ignoring return value of %qD, "
8988 "declared with attribute warn_unused_result",
8989 fdecl);
8990 else
8991 warning_at (loc, OPT_Wunused_result,
8992 "ignoring return value of function "
8993 "declared with attribute warn_unused_result");
8994 }
8995 break;
8996
8997 default:
8998 /* Not a container, not a call, or a call whose value is used. */
8999 break;
9000 }
9001 }
9002 }
9003
9004 namespace {
9005
9006 const pass_data pass_data_warn_unused_result =
9007 {
9008 GIMPLE_PASS, /* type */
9009 "*warn_unused_result", /* name */
9010 OPTGROUP_NONE, /* optinfo_flags */
9011 TV_NONE, /* tv_id */
9012 PROP_gimple_any, /* properties_required */
9013 0, /* properties_provided */
9014 0, /* properties_destroyed */
9015 0, /* todo_flags_start */
9016 0, /* todo_flags_finish */
9017 };
9018
9019 class pass_warn_unused_result : public gimple_opt_pass
9020 {
9021 public:
9022 pass_warn_unused_result (gcc::context *ctxt)
9023 : gimple_opt_pass (pass_data_warn_unused_result, ctxt)
9024 {}
9025
9026 /* opt_pass methods: */
9027 virtual bool gate (function *) { return flag_warn_unused_result; }
9028 virtual unsigned int execute (function *)
9029 {
9030 do_warn_unused_result (gimple_body (current_function_decl));
9031 return 0;
9032 }
9033
9034 }; // class pass_warn_unused_result
9035
9036 } // anon namespace
9037
9038 gimple_opt_pass *
9039 make_pass_warn_unused_result (gcc::context *ctxt)
9040 {
9041 return new pass_warn_unused_result (ctxt);
9042 }
9043
9044 /* IPA passes, compilation of earlier functions or inlining
9045 might have changed some properties, such as marked functions nothrow,
9046 pure, const or noreturn.
9047 Remove redundant edges and basic blocks, and create new ones if necessary.
9048
9049 This pass can't be executed as stand alone pass from pass manager, because
9050 in between inlining and this fixup the verify_flow_info would fail. */
9051
9052 unsigned int
9053 execute_fixup_cfg (void)
9054 {
9055 basic_block bb;
9056 gimple_stmt_iterator gsi;
9057 int todo = 0;
9058 edge e;
9059 edge_iterator ei;
9060 cgraph_node *node = cgraph_node::get (current_function_decl);
9061 profile_count num = node->count;
9062 profile_count den = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
9063 bool scale = num.initialized_p () && den.initialized_p () && !(num == den);
9064
9065 if (scale)
9066 {
9067 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = node->count;
9068 EXIT_BLOCK_PTR_FOR_FN (cfun)->count
9069 = EXIT_BLOCK_PTR_FOR_FN (cfun)->count.apply_scale (num, den);
9070
9071 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
9072 e->count = e->count.apply_scale (num, den);
9073 }
9074
9075 FOR_EACH_BB_FN (bb, cfun)
9076 {
9077 if (scale)
9078 bb->count = bb->count.apply_scale (num, den);
9079 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
9080 {
9081 gimple *stmt = gsi_stmt (gsi);
9082 tree decl = is_gimple_call (stmt)
9083 ? gimple_call_fndecl (stmt)
9084 : NULL;
9085 if (decl)
9086 {
9087 int flags = gimple_call_flags (stmt);
9088 if (flags & (ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE))
9089 {
9090 if (gimple_purge_dead_abnormal_call_edges (bb))
9091 todo |= TODO_cleanup_cfg;
9092
9093 if (gimple_in_ssa_p (cfun))
9094 {
9095 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9096 update_stmt (stmt);
9097 }
9098 }
9099
9100 if (flags & ECF_NORETURN
9101 && fixup_noreturn_call (stmt))
9102 todo |= TODO_cleanup_cfg;
9103 }
9104
9105 /* Remove stores to variables we marked write-only.
9106 Keep access when store has side effect, i.e. in case when source
9107 is volatile. */
9108 if (gimple_store_p (stmt)
9109 && !gimple_has_side_effects (stmt))
9110 {
9111 tree lhs = get_base_address (gimple_get_lhs (stmt));
9112
9113 if (VAR_P (lhs)
9114 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
9115 && varpool_node::get (lhs)->writeonly)
9116 {
9117 unlink_stmt_vdef (stmt);
9118 gsi_remove (&gsi, true);
9119 release_defs (stmt);
9120 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9121 continue;
9122 }
9123 }
9124 /* For calls we can simply remove LHS when it is known
9125 to be write-only. */
9126 if (is_gimple_call (stmt)
9127 && gimple_get_lhs (stmt))
9128 {
9129 tree lhs = get_base_address (gimple_get_lhs (stmt));
9130
9131 if (VAR_P (lhs)
9132 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
9133 && varpool_node::get (lhs)->writeonly)
9134 {
9135 gimple_call_set_lhs (stmt, NULL);
9136 update_stmt (stmt);
9137 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9138 }
9139 }
9140
9141 if (maybe_clean_eh_stmt (stmt)
9142 && gimple_purge_dead_eh_edges (bb))
9143 todo |= TODO_cleanup_cfg;
9144 gsi_next (&gsi);
9145 }
9146
9147 if (scale)
9148 FOR_EACH_EDGE (e, ei, bb->succs)
9149 e->count = e->count.apply_scale (num, den);
9150
9151 /* If we have a basic block with no successors that does not
9152 end with a control statement or a noreturn call end it with
9153 a call to __builtin_unreachable. This situation can occur
9154 when inlining a noreturn call that does in fact return. */
9155 if (EDGE_COUNT (bb->succs) == 0)
9156 {
9157 gimple *stmt = last_stmt (bb);
9158 if (!stmt
9159 || (!is_ctrl_stmt (stmt)
9160 && (!is_gimple_call (stmt)
9161 || !gimple_call_noreturn_p (stmt))))
9162 {
9163 if (stmt && is_gimple_call (stmt))
9164 gimple_call_set_ctrl_altering (stmt, false);
9165 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
9166 stmt = gimple_build_call (fndecl, 0);
9167 gimple_stmt_iterator gsi = gsi_last_bb (bb);
9168 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
9169 if (!cfun->after_inlining)
9170 {
9171 gcall *call_stmt = dyn_cast <gcall *> (stmt);
9172 int freq
9173 = compute_call_stmt_bb_frequency (current_function_decl,
9174 bb);
9175 node->create_edge (cgraph_node::get_create (fndecl),
9176 call_stmt, bb->count, freq);
9177 }
9178 }
9179 }
9180 }
9181 if (scale)
9182 compute_function_frequency ();
9183
9184 if (current_loops
9185 && (todo & TODO_cleanup_cfg))
9186 loops_state_set (LOOPS_NEED_FIXUP);
9187
9188 return todo;
9189 }
9190
9191 namespace {
9192
9193 const pass_data pass_data_fixup_cfg =
9194 {
9195 GIMPLE_PASS, /* type */
9196 "fixup_cfg", /* name */
9197 OPTGROUP_NONE, /* optinfo_flags */
9198 TV_NONE, /* tv_id */
9199 PROP_cfg, /* properties_required */
9200 0, /* properties_provided */
9201 0, /* properties_destroyed */
9202 0, /* todo_flags_start */
9203 0, /* todo_flags_finish */
9204 };
9205
9206 class pass_fixup_cfg : public gimple_opt_pass
9207 {
9208 public:
9209 pass_fixup_cfg (gcc::context *ctxt)
9210 : gimple_opt_pass (pass_data_fixup_cfg, ctxt)
9211 {}
9212
9213 /* opt_pass methods: */
9214 opt_pass * clone () { return new pass_fixup_cfg (m_ctxt); }
9215 virtual unsigned int execute (function *) { return execute_fixup_cfg (); }
9216
9217 }; // class pass_fixup_cfg
9218
9219 } // anon namespace
9220
9221 gimple_opt_pass *
9222 make_pass_fixup_cfg (gcc::context *ctxt)
9223 {
9224 return new pass_fixup_cfg (ctxt);
9225 }
9226
9227 /* Garbage collection support for edge_def. */
9228
9229 extern void gt_ggc_mx (tree&);
9230 extern void gt_ggc_mx (gimple *&);
9231 extern void gt_ggc_mx (rtx&);
9232 extern void gt_ggc_mx (basic_block&);
9233
9234 static void
9235 gt_ggc_mx (rtx_insn *& x)
9236 {
9237 if (x)
9238 gt_ggc_mx_rtx_def ((void *) x);
9239 }
9240
9241 void
9242 gt_ggc_mx (edge_def *e)
9243 {
9244 tree block = LOCATION_BLOCK (e->goto_locus);
9245 gt_ggc_mx (e->src);
9246 gt_ggc_mx (e->dest);
9247 if (current_ir_type () == IR_GIMPLE)
9248 gt_ggc_mx (e->insns.g);
9249 else
9250 gt_ggc_mx (e->insns.r);
9251 gt_ggc_mx (block);
9252 }
9253
9254 /* PCH support for edge_def. */
9255
9256 extern void gt_pch_nx (tree&);
9257 extern void gt_pch_nx (gimple *&);
9258 extern void gt_pch_nx (rtx&);
9259 extern void gt_pch_nx (basic_block&);
9260
9261 static void
9262 gt_pch_nx (rtx_insn *& x)
9263 {
9264 if (x)
9265 gt_pch_nx_rtx_def ((void *) x);
9266 }
9267
9268 void
9269 gt_pch_nx (edge_def *e)
9270 {
9271 tree block = LOCATION_BLOCK (e->goto_locus);
9272 gt_pch_nx (e->src);
9273 gt_pch_nx (e->dest);
9274 if (current_ir_type () == IR_GIMPLE)
9275 gt_pch_nx (e->insns.g);
9276 else
9277 gt_pch_nx (e->insns.r);
9278 gt_pch_nx (block);
9279 }
9280
9281 void
9282 gt_pch_nx (edge_def *e, gt_pointer_operator op, void *cookie)
9283 {
9284 tree block = LOCATION_BLOCK (e->goto_locus);
9285 op (&(e->src), cookie);
9286 op (&(e->dest), cookie);
9287 if (current_ir_type () == IR_GIMPLE)
9288 op (&(e->insns.g), cookie);
9289 else
9290 op (&(e->insns.r), cookie);
9291 op (&(block), cookie);
9292 }
9293
9294 #if CHECKING_P
9295
9296 namespace selftest {
9297
9298 /* Helper function for CFG selftests: create a dummy function decl
9299 and push it as cfun. */
9300
9301 static tree
9302 push_fndecl (const char *name)
9303 {
9304 tree fn_type = build_function_type_array (integer_type_node, 0, NULL);
9305 /* FIXME: this uses input_location: */
9306 tree fndecl = build_fn_decl (name, fn_type);
9307 tree retval = build_decl (UNKNOWN_LOCATION, RESULT_DECL,
9308 NULL_TREE, integer_type_node);
9309 DECL_RESULT (fndecl) = retval;
9310 push_struct_function (fndecl);
9311 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9312 ASSERT_TRUE (fun != NULL);
9313 init_empty_tree_cfg_for_function (fun);
9314 ASSERT_EQ (2, n_basic_blocks_for_fn (fun));
9315 ASSERT_EQ (0, n_edges_for_fn (fun));
9316 return fndecl;
9317 }
9318
9319 /* These tests directly create CFGs.
9320 Compare with the static fns within tree-cfg.c:
9321 - build_gimple_cfg
9322 - make_blocks: calls create_basic_block (seq, bb);
9323 - make_edges. */
9324
9325 /* Verify a simple cfg of the form:
9326 ENTRY -> A -> B -> C -> EXIT. */
9327
9328 static void
9329 test_linear_chain ()
9330 {
9331 gimple_register_cfg_hooks ();
9332
9333 tree fndecl = push_fndecl ("cfg_test_linear_chain");
9334 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9335
9336 /* Create some empty blocks. */
9337 basic_block bb_a = create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
9338 basic_block bb_b = create_empty_bb (bb_a);
9339 basic_block bb_c = create_empty_bb (bb_b);
9340
9341 ASSERT_EQ (5, n_basic_blocks_for_fn (fun));
9342 ASSERT_EQ (0, n_edges_for_fn (fun));
9343
9344 /* Create some edges: a simple linear chain of BBs. */
9345 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), bb_a, EDGE_FALLTHRU);
9346 make_edge (bb_a, bb_b, 0);
9347 make_edge (bb_b, bb_c, 0);
9348 make_edge (bb_c, EXIT_BLOCK_PTR_FOR_FN (fun), 0);
9349
9350 /* Verify the edges. */
9351 ASSERT_EQ (4, n_edges_for_fn (fun));
9352 ASSERT_EQ (NULL, ENTRY_BLOCK_PTR_FOR_FN (fun)->preds);
9353 ASSERT_EQ (1, ENTRY_BLOCK_PTR_FOR_FN (fun)->succs->length ());
9354 ASSERT_EQ (1, bb_a->preds->length ());
9355 ASSERT_EQ (1, bb_a->succs->length ());
9356 ASSERT_EQ (1, bb_b->preds->length ());
9357 ASSERT_EQ (1, bb_b->succs->length ());
9358 ASSERT_EQ (1, bb_c->preds->length ());
9359 ASSERT_EQ (1, bb_c->succs->length ());
9360 ASSERT_EQ (1, EXIT_BLOCK_PTR_FOR_FN (fun)->preds->length ());
9361 ASSERT_EQ (NULL, EXIT_BLOCK_PTR_FOR_FN (fun)->succs);
9362
9363 /* Verify the dominance information
9364 Each BB in our simple chain should be dominated by the one before
9365 it. */
9366 calculate_dominance_info (CDI_DOMINATORS);
9367 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_b));
9368 ASSERT_EQ (bb_b, get_immediate_dominator (CDI_DOMINATORS, bb_c));
9369 vec<basic_block> dom_by_b = get_dominated_by (CDI_DOMINATORS, bb_b);
9370 ASSERT_EQ (1, dom_by_b.length ());
9371 ASSERT_EQ (bb_c, dom_by_b[0]);
9372 free_dominance_info (CDI_DOMINATORS);
9373 dom_by_b.release ();
9374
9375 /* Similarly for post-dominance: each BB in our chain is post-dominated
9376 by the one after it. */
9377 calculate_dominance_info (CDI_POST_DOMINATORS);
9378 ASSERT_EQ (bb_b, get_immediate_dominator (CDI_POST_DOMINATORS, bb_a));
9379 ASSERT_EQ (bb_c, get_immediate_dominator (CDI_POST_DOMINATORS, bb_b));
9380 vec<basic_block> postdom_by_b = get_dominated_by (CDI_POST_DOMINATORS, bb_b);
9381 ASSERT_EQ (1, postdom_by_b.length ());
9382 ASSERT_EQ (bb_a, postdom_by_b[0]);
9383 free_dominance_info (CDI_POST_DOMINATORS);
9384 postdom_by_b.release ();
9385
9386 pop_cfun ();
9387 }
9388
9389 /* Verify a simple CFG of the form:
9390 ENTRY
9391 |
9392 A
9393 / \
9394 /t \f
9395 B C
9396 \ /
9397 \ /
9398 D
9399 |
9400 EXIT. */
9401
9402 static void
9403 test_diamond ()
9404 {
9405 gimple_register_cfg_hooks ();
9406
9407 tree fndecl = push_fndecl ("cfg_test_diamond");
9408 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9409
9410 /* Create some empty blocks. */
9411 basic_block bb_a = create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
9412 basic_block bb_b = create_empty_bb (bb_a);
9413 basic_block bb_c = create_empty_bb (bb_a);
9414 basic_block bb_d = create_empty_bb (bb_b);
9415
9416 ASSERT_EQ (6, n_basic_blocks_for_fn (fun));
9417 ASSERT_EQ (0, n_edges_for_fn (fun));
9418
9419 /* Create the edges. */
9420 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), bb_a, EDGE_FALLTHRU);
9421 make_edge (bb_a, bb_b, EDGE_TRUE_VALUE);
9422 make_edge (bb_a, bb_c, EDGE_FALSE_VALUE);
9423 make_edge (bb_b, bb_d, 0);
9424 make_edge (bb_c, bb_d, 0);
9425 make_edge (bb_d, EXIT_BLOCK_PTR_FOR_FN (fun), 0);
9426
9427 /* Verify the edges. */
9428 ASSERT_EQ (6, n_edges_for_fn (fun));
9429 ASSERT_EQ (1, bb_a->preds->length ());
9430 ASSERT_EQ (2, bb_a->succs->length ());
9431 ASSERT_EQ (1, bb_b->preds->length ());
9432 ASSERT_EQ (1, bb_b->succs->length ());
9433 ASSERT_EQ (1, bb_c->preds->length ());
9434 ASSERT_EQ (1, bb_c->succs->length ());
9435 ASSERT_EQ (2, bb_d->preds->length ());
9436 ASSERT_EQ (1, bb_d->succs->length ());
9437
9438 /* Verify the dominance information. */
9439 calculate_dominance_info (CDI_DOMINATORS);
9440 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_b));
9441 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_c));
9442 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_d));
9443 vec<basic_block> dom_by_a = get_dominated_by (CDI_DOMINATORS, bb_a);
9444 ASSERT_EQ (3, dom_by_a.length ()); /* B, C, D, in some order. */
9445 dom_by_a.release ();
9446 vec<basic_block> dom_by_b = get_dominated_by (CDI_DOMINATORS, bb_b);
9447 ASSERT_EQ (0, dom_by_b.length ());
9448 dom_by_b.release ();
9449 free_dominance_info (CDI_DOMINATORS);
9450
9451 /* Similarly for post-dominance. */
9452 calculate_dominance_info (CDI_POST_DOMINATORS);
9453 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_a));
9454 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_b));
9455 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_c));
9456 vec<basic_block> postdom_by_d = get_dominated_by (CDI_POST_DOMINATORS, bb_d);
9457 ASSERT_EQ (3, postdom_by_d.length ()); /* A, B, C in some order. */
9458 postdom_by_d.release ();
9459 vec<basic_block> postdom_by_b = get_dominated_by (CDI_POST_DOMINATORS, bb_b);
9460 ASSERT_EQ (0, postdom_by_b.length ());
9461 postdom_by_b.release ();
9462 free_dominance_info (CDI_POST_DOMINATORS);
9463
9464 pop_cfun ();
9465 }
9466
9467 /* Verify that we can handle a CFG containing a "complete" aka
9468 fully-connected subgraph (where A B C D below all have edges
9469 pointing to each other node, also to themselves).
9470 e.g.:
9471 ENTRY EXIT
9472 | ^
9473 | /
9474 | /
9475 | /
9476 V/
9477 A<--->B
9478 ^^ ^^
9479 | \ / |
9480 | X |
9481 | / \ |
9482 VV VV
9483 C<--->D
9484 */
9485
9486 static void
9487 test_fully_connected ()
9488 {
9489 gimple_register_cfg_hooks ();
9490
9491 tree fndecl = push_fndecl ("cfg_fully_connected");
9492 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9493
9494 const int n = 4;
9495
9496 /* Create some empty blocks. */
9497 auto_vec <basic_block> subgraph_nodes;
9498 for (int i = 0; i < n; i++)
9499 subgraph_nodes.safe_push (create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun)));
9500
9501 ASSERT_EQ (n + 2, n_basic_blocks_for_fn (fun));
9502 ASSERT_EQ (0, n_edges_for_fn (fun));
9503
9504 /* Create the edges. */
9505 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), subgraph_nodes[0], EDGE_FALLTHRU);
9506 make_edge (subgraph_nodes[0], EXIT_BLOCK_PTR_FOR_FN (fun), 0);
9507 for (int i = 0; i < n; i++)
9508 for (int j = 0; j < n; j++)
9509 make_edge (subgraph_nodes[i], subgraph_nodes[j], 0);
9510
9511 /* Verify the edges. */
9512 ASSERT_EQ (2 + (n * n), n_edges_for_fn (fun));
9513 /* The first one is linked to ENTRY/EXIT as well as itself and
9514 everything else. */
9515 ASSERT_EQ (n + 1, subgraph_nodes[0]->preds->length ());
9516 ASSERT_EQ (n + 1, subgraph_nodes[0]->succs->length ());
9517 /* The other ones in the subgraph are linked to everything in
9518 the subgraph (including themselves). */
9519 for (int i = 1; i < n; i++)
9520 {
9521 ASSERT_EQ (n, subgraph_nodes[i]->preds->length ());
9522 ASSERT_EQ (n, subgraph_nodes[i]->succs->length ());
9523 }
9524
9525 /* Verify the dominance information. */
9526 calculate_dominance_info (CDI_DOMINATORS);
9527 /* The initial block in the subgraph should be dominated by ENTRY. */
9528 ASSERT_EQ (ENTRY_BLOCK_PTR_FOR_FN (fun),
9529 get_immediate_dominator (CDI_DOMINATORS,
9530 subgraph_nodes[0]));
9531 /* Every other block in the subgraph should be dominated by the
9532 initial block. */
9533 for (int i = 1; i < n; i++)
9534 ASSERT_EQ (subgraph_nodes[0],
9535 get_immediate_dominator (CDI_DOMINATORS,
9536 subgraph_nodes[i]));
9537 free_dominance_info (CDI_DOMINATORS);
9538
9539 /* Similarly for post-dominance. */
9540 calculate_dominance_info (CDI_POST_DOMINATORS);
9541 /* The initial block in the subgraph should be postdominated by EXIT. */
9542 ASSERT_EQ (EXIT_BLOCK_PTR_FOR_FN (fun),
9543 get_immediate_dominator (CDI_POST_DOMINATORS,
9544 subgraph_nodes[0]));
9545 /* Every other block in the subgraph should be postdominated by the
9546 initial block, since that leads to EXIT. */
9547 for (int i = 1; i < n; i++)
9548 ASSERT_EQ (subgraph_nodes[0],
9549 get_immediate_dominator (CDI_POST_DOMINATORS,
9550 subgraph_nodes[i]));
9551 free_dominance_info (CDI_POST_DOMINATORS);
9552
9553 pop_cfun ();
9554 }
9555
9556 /* Run all of the selftests within this file. */
9557
9558 void
9559 tree_cfg_c_tests ()
9560 {
9561 test_linear_chain ();
9562 test_diamond ();
9563 test_fully_connected ();
9564 }
9565
9566 } // namespace selftest
9567
9568 /* TODO: test the dominator/postdominator logic with various graphs/nodes:
9569 - loop
9570 - nested loops
9571 - switch statement (a block with many out-edges)
9572 - something that jumps to itself
9573 - etc */
9574
9575 #endif /* CHECKING_P */