]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-cfg.c
2015-06-04 Andrew MacLeod <amacleod@redhat.com>
[thirdparty/gcc.git] / gcc / tree-cfg.c
1 /* Control flow functions for trees.
2 Copyright (C) 2001-2015 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "hash-table.h"
25 #include "hash-map.h"
26 #include "tm.h"
27 #include "hash-set.h"
28 #include "vec.h"
29 #include "input.h"
30 #include "alias.h"
31 #include "symtab.h"
32 #include "inchash.h"
33 #include "tree.h"
34 #include "fold-const.h"
35 #include "trans-mem.h"
36 #include "stor-layout.h"
37 #include "print-tree.h"
38 #include "tm_p.h"
39 #include "predict.h"
40 #include "hard-reg-set.h"
41 #include "function.h"
42 #include "dominance.h"
43 #include "cfg.h"
44 #include "cfganal.h"
45 #include "basic-block.h"
46 #include "flags.h"
47 #include "gimple-pretty-print.h"
48 #include "tree-ssa-alias.h"
49 #include "internal-fn.h"
50 #include "gimple-fold.h"
51 #include "tree-eh.h"
52 #include "gimple-expr.h"
53 #include "is-a.h"
54 #include "gimple.h"
55 #include "gimple-iterator.h"
56 #include "gimplify-me.h"
57 #include "gimple-walk.h"
58 #include "gimple-ssa.h"
59 #include "plugin-api.h"
60 #include "ipa-ref.h"
61 #include "cgraph.h"
62 #include "tree-cfg.h"
63 #include "tree-phinodes.h"
64 #include "ssa-iterators.h"
65 #include "stringpool.h"
66 #include "tree-ssanames.h"
67 #include "tree-ssa-loop-manip.h"
68 #include "tree-ssa-loop-niter.h"
69 #include "tree-into-ssa.h"
70 #include "hashtab.h"
71 #include "rtl.h"
72 #include "statistics.h"
73 #include "insn-config.h"
74 #include "expmed.h"
75 #include "dojump.h"
76 #include "explow.h"
77 #include "calls.h"
78 #include "emit-rtl.h"
79 #include "varasm.h"
80 #include "stmt.h"
81 #include "expr.h"
82 #include "tree-dfa.h"
83 #include "tree-ssa.h"
84 #include "tree-dump.h"
85 #include "tree-pass.h"
86 #include "diagnostic-core.h"
87 #include "except.h"
88 #include "cfgloop.h"
89 #include "tree-ssa-propagate.h"
90 #include "value-prof.h"
91 #include "tree-inline.h"
92 #include "target.h"
93 #include "tree-ssa-live.h"
94 #include "omp-low.h"
95 #include "tree-cfgcleanup.h"
96 #include "wide-int-print.h"
97
98 /* This file contains functions for building the Control Flow Graph (CFG)
99 for a function tree. */
100
101 /* Local declarations. */
102
103 /* Initial capacity for the basic block array. */
104 static const int initial_cfg_capacity = 20;
105
106 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
107 which use a particular edge. The CASE_LABEL_EXPRs are chained together
108 via their CASE_CHAIN field, which we clear after we're done with the
109 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
110
111 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
112 update the case vector in response to edge redirections.
113
114 Right now this table is set up and torn down at key points in the
115 compilation process. It would be nice if we could make the table
116 more persistent. The key is getting notification of changes to
117 the CFG (particularly edge removal, creation and redirection). */
118
119 static hash_map<edge, tree> *edge_to_cases;
120
121 /* If we record edge_to_cases, this bitmap will hold indexes
122 of basic blocks that end in a GIMPLE_SWITCH which we touched
123 due to edge manipulations. */
124
125 static bitmap touched_switch_bbs;
126
127 /* CFG statistics. */
128 struct cfg_stats_d
129 {
130 long num_merged_labels;
131 };
132
133 static struct cfg_stats_d cfg_stats;
134
135 /* Hash table to store last discriminator assigned for each locus. */
136 struct locus_discrim_map
137 {
138 location_t locus;
139 int discriminator;
140 };
141
142 /* Hashtable helpers. */
143
144 struct locus_discrim_hasher : typed_free_remove <locus_discrim_map>
145 {
146 typedef locus_discrim_map *value_type;
147 typedef locus_discrim_map *compare_type;
148 static inline hashval_t hash (const locus_discrim_map *);
149 static inline bool equal (const locus_discrim_map *,
150 const locus_discrim_map *);
151 };
152
153 /* Trivial hash function for a location_t. ITEM is a pointer to
154 a hash table entry that maps a location_t to a discriminator. */
155
156 inline hashval_t
157 locus_discrim_hasher::hash (const locus_discrim_map *item)
158 {
159 return LOCATION_LINE (item->locus);
160 }
161
162 /* Equality function for the locus-to-discriminator map. A and B
163 point to the two hash table entries to compare. */
164
165 inline bool
166 locus_discrim_hasher::equal (const locus_discrim_map *a,
167 const locus_discrim_map *b)
168 {
169 return LOCATION_LINE (a->locus) == LOCATION_LINE (b->locus);
170 }
171
172 static hash_table<locus_discrim_hasher> *discriminator_per_locus;
173
174 /* Basic blocks and flowgraphs. */
175 static void make_blocks (gimple_seq);
176
177 /* Edges. */
178 static void make_edges (void);
179 static void assign_discriminators (void);
180 static void make_cond_expr_edges (basic_block);
181 static void make_gimple_switch_edges (gswitch *, basic_block);
182 static bool make_goto_expr_edges (basic_block);
183 static void make_gimple_asm_edges (basic_block);
184 static edge gimple_redirect_edge_and_branch (edge, basic_block);
185 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
186
187 /* Various helpers. */
188 static inline bool stmt_starts_bb_p (gimple, gimple);
189 static int gimple_verify_flow_info (void);
190 static void gimple_make_forwarder_block (edge);
191 static gimple first_non_label_stmt (basic_block);
192 static bool verify_gimple_transaction (gtransaction *);
193 static bool call_can_make_abnormal_goto (gimple);
194
195 /* Flowgraph optimization and cleanup. */
196 static void gimple_merge_blocks (basic_block, basic_block);
197 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
198 static void remove_bb (basic_block);
199 static edge find_taken_edge_computed_goto (basic_block, tree);
200 static edge find_taken_edge_cond_expr (basic_block, tree);
201 static edge find_taken_edge_switch_expr (gswitch *, basic_block, tree);
202 static tree find_case_label_for_value (gswitch *, tree);
203
204 void
205 init_empty_tree_cfg_for_function (struct function *fn)
206 {
207 /* Initialize the basic block array. */
208 init_flow (fn);
209 profile_status_for_fn (fn) = PROFILE_ABSENT;
210 n_basic_blocks_for_fn (fn) = NUM_FIXED_BLOCKS;
211 last_basic_block_for_fn (fn) = NUM_FIXED_BLOCKS;
212 vec_alloc (basic_block_info_for_fn (fn), initial_cfg_capacity);
213 vec_safe_grow_cleared (basic_block_info_for_fn (fn),
214 initial_cfg_capacity);
215
216 /* Build a mapping of labels to their associated blocks. */
217 vec_alloc (label_to_block_map_for_fn (fn), initial_cfg_capacity);
218 vec_safe_grow_cleared (label_to_block_map_for_fn (fn),
219 initial_cfg_capacity);
220
221 SET_BASIC_BLOCK_FOR_FN (fn, ENTRY_BLOCK, ENTRY_BLOCK_PTR_FOR_FN (fn));
222 SET_BASIC_BLOCK_FOR_FN (fn, EXIT_BLOCK, EXIT_BLOCK_PTR_FOR_FN (fn));
223
224 ENTRY_BLOCK_PTR_FOR_FN (fn)->next_bb
225 = EXIT_BLOCK_PTR_FOR_FN (fn);
226 EXIT_BLOCK_PTR_FOR_FN (fn)->prev_bb
227 = ENTRY_BLOCK_PTR_FOR_FN (fn);
228 }
229
230 void
231 init_empty_tree_cfg (void)
232 {
233 init_empty_tree_cfg_for_function (cfun);
234 }
235
236 /*---------------------------------------------------------------------------
237 Create basic blocks
238 ---------------------------------------------------------------------------*/
239
240 /* Entry point to the CFG builder for trees. SEQ is the sequence of
241 statements to be added to the flowgraph. */
242
243 static void
244 build_gimple_cfg (gimple_seq seq)
245 {
246 /* Register specific gimple functions. */
247 gimple_register_cfg_hooks ();
248
249 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
250
251 init_empty_tree_cfg ();
252
253 make_blocks (seq);
254
255 /* Make sure there is always at least one block, even if it's empty. */
256 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
257 create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
258
259 /* Adjust the size of the array. */
260 if (basic_block_info_for_fn (cfun)->length ()
261 < (size_t) n_basic_blocks_for_fn (cfun))
262 vec_safe_grow_cleared (basic_block_info_for_fn (cfun),
263 n_basic_blocks_for_fn (cfun));
264
265 /* To speed up statement iterator walks, we first purge dead labels. */
266 cleanup_dead_labels ();
267
268 /* Group case nodes to reduce the number of edges.
269 We do this after cleaning up dead labels because otherwise we miss
270 a lot of obvious case merging opportunities. */
271 group_case_labels ();
272
273 /* Create the edges of the flowgraph. */
274 discriminator_per_locus = new hash_table<locus_discrim_hasher> (13);
275 make_edges ();
276 assign_discriminators ();
277 cleanup_dead_labels ();
278 delete discriminator_per_locus;
279 discriminator_per_locus = NULL;
280 }
281
282 /* Look for ANNOTATE calls with loop annotation kind in BB; if found, remove
283 them and propagate the information to LOOP. We assume that the annotations
284 come immediately before the condition in BB, if any. */
285
286 static void
287 replace_loop_annotate_in_block (basic_block bb, struct loop *loop)
288 {
289 gimple_stmt_iterator gsi = gsi_last_bb (bb);
290 gimple stmt = gsi_stmt (gsi);
291
292 if (!(stmt && gimple_code (stmt) == GIMPLE_COND))
293 return;
294
295 for (gsi_prev_nondebug (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
296 {
297 stmt = gsi_stmt (gsi);
298 if (gimple_code (stmt) != GIMPLE_CALL)
299 break;
300 if (!gimple_call_internal_p (stmt)
301 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
302 break;
303
304 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
305 {
306 case annot_expr_ivdep_kind:
307 loop->safelen = INT_MAX;
308 break;
309 case annot_expr_no_vector_kind:
310 loop->dont_vectorize = true;
311 break;
312 case annot_expr_vector_kind:
313 loop->force_vectorize = true;
314 cfun->has_force_vectorize_loops = true;
315 break;
316 default:
317 gcc_unreachable ();
318 }
319
320 stmt = gimple_build_assign (gimple_call_lhs (stmt),
321 gimple_call_arg (stmt, 0));
322 gsi_replace (&gsi, stmt, true);
323 }
324 }
325
326 /* Look for ANNOTATE calls with loop annotation kind; if found, remove
327 them and propagate the information to the loop. We assume that the
328 annotations come immediately before the condition of the loop. */
329
330 static void
331 replace_loop_annotate (void)
332 {
333 struct loop *loop;
334 basic_block bb;
335 gimple_stmt_iterator gsi;
336 gimple stmt;
337
338 FOR_EACH_LOOP (loop, 0)
339 {
340 /* First look into the header. */
341 replace_loop_annotate_in_block (loop->header, loop);
342
343 /* Then look into the latch, if any. */
344 if (loop->latch)
345 replace_loop_annotate_in_block (loop->latch, loop);
346 }
347
348 /* Remove IFN_ANNOTATE. Safeguard for the case loop->latch == NULL. */
349 FOR_EACH_BB_FN (bb, cfun)
350 {
351 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
352 {
353 stmt = gsi_stmt (gsi);
354 if (gimple_code (stmt) != GIMPLE_CALL)
355 continue;
356 if (!gimple_call_internal_p (stmt)
357 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
358 continue;
359
360 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
361 {
362 case annot_expr_ivdep_kind:
363 case annot_expr_no_vector_kind:
364 case annot_expr_vector_kind:
365 break;
366 default:
367 gcc_unreachable ();
368 }
369
370 warning_at (gimple_location (stmt), 0, "ignoring loop annotation");
371 stmt = gimple_build_assign (gimple_call_lhs (stmt),
372 gimple_call_arg (stmt, 0));
373 gsi_replace (&gsi, stmt, true);
374 }
375 }
376 }
377
378
379 static unsigned int
380 execute_build_cfg (void)
381 {
382 gimple_seq body = gimple_body (current_function_decl);
383
384 build_gimple_cfg (body);
385 gimple_set_body (current_function_decl, NULL);
386 if (dump_file && (dump_flags & TDF_DETAILS))
387 {
388 fprintf (dump_file, "Scope blocks:\n");
389 dump_scope_blocks (dump_file, dump_flags);
390 }
391 cleanup_tree_cfg ();
392 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
393 replace_loop_annotate ();
394 return 0;
395 }
396
397 namespace {
398
399 const pass_data pass_data_build_cfg =
400 {
401 GIMPLE_PASS, /* type */
402 "cfg", /* name */
403 OPTGROUP_NONE, /* optinfo_flags */
404 TV_TREE_CFG, /* tv_id */
405 PROP_gimple_leh, /* properties_required */
406 ( PROP_cfg | PROP_loops ), /* properties_provided */
407 0, /* properties_destroyed */
408 0, /* todo_flags_start */
409 0, /* todo_flags_finish */
410 };
411
412 class pass_build_cfg : public gimple_opt_pass
413 {
414 public:
415 pass_build_cfg (gcc::context *ctxt)
416 : gimple_opt_pass (pass_data_build_cfg, ctxt)
417 {}
418
419 /* opt_pass methods: */
420 virtual unsigned int execute (function *) { return execute_build_cfg (); }
421
422 }; // class pass_build_cfg
423
424 } // anon namespace
425
426 gimple_opt_pass *
427 make_pass_build_cfg (gcc::context *ctxt)
428 {
429 return new pass_build_cfg (ctxt);
430 }
431
432
433 /* Return true if T is a computed goto. */
434
435 bool
436 computed_goto_p (gimple t)
437 {
438 return (gimple_code (t) == GIMPLE_GOTO
439 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
440 }
441
442 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
443 the other edge points to a bb with just __builtin_unreachable ().
444 I.e. return true for C->M edge in:
445 <bb C>:
446 ...
447 if (something)
448 goto <bb N>;
449 else
450 goto <bb M>;
451 <bb N>:
452 __builtin_unreachable ();
453 <bb M>: */
454
455 bool
456 assert_unreachable_fallthru_edge_p (edge e)
457 {
458 basic_block pred_bb = e->src;
459 gimple last = last_stmt (pred_bb);
460 if (last && gimple_code (last) == GIMPLE_COND)
461 {
462 basic_block other_bb = EDGE_SUCC (pred_bb, 0)->dest;
463 if (other_bb == e->dest)
464 other_bb = EDGE_SUCC (pred_bb, 1)->dest;
465 if (EDGE_COUNT (other_bb->succs) == 0)
466 {
467 gimple_stmt_iterator gsi = gsi_after_labels (other_bb);
468 gimple stmt;
469
470 if (gsi_end_p (gsi))
471 return false;
472 stmt = gsi_stmt (gsi);
473 while (is_gimple_debug (stmt) || gimple_clobber_p (stmt))
474 {
475 gsi_next (&gsi);
476 if (gsi_end_p (gsi))
477 return false;
478 stmt = gsi_stmt (gsi);
479 }
480 return gimple_call_builtin_p (stmt, BUILT_IN_UNREACHABLE);
481 }
482 }
483 return false;
484 }
485
486
487 /* Initialize GF_CALL_CTRL_ALTERING flag, which indicates the call
488 could alter control flow except via eh. We initialize the flag at
489 CFG build time and only ever clear it later. */
490
491 static void
492 gimple_call_initialize_ctrl_altering (gimple stmt)
493 {
494 int flags = gimple_call_flags (stmt);
495
496 /* A call alters control flow if it can make an abnormal goto. */
497 if (call_can_make_abnormal_goto (stmt)
498 /* A call also alters control flow if it does not return. */
499 || flags & ECF_NORETURN
500 /* TM ending statements have backedges out of the transaction.
501 Return true so we split the basic block containing them.
502 Note that the TM_BUILTIN test is merely an optimization. */
503 || ((flags & ECF_TM_BUILTIN)
504 && is_tm_ending_fndecl (gimple_call_fndecl (stmt)))
505 /* BUILT_IN_RETURN call is same as return statement. */
506 || gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
507 gimple_call_set_ctrl_altering (stmt, true);
508 else
509 gimple_call_set_ctrl_altering (stmt, false);
510 }
511
512
513 /* Insert SEQ after BB and build a flowgraph. */
514
515 static basic_block
516 make_blocks_1 (gimple_seq seq, basic_block bb)
517 {
518 gimple_stmt_iterator i = gsi_start (seq);
519 gimple stmt = NULL;
520 bool start_new_block = true;
521 bool first_stmt_of_seq = true;
522
523 while (!gsi_end_p (i))
524 {
525 gimple prev_stmt;
526
527 prev_stmt = stmt;
528 stmt = gsi_stmt (i);
529
530 if (stmt && is_gimple_call (stmt))
531 gimple_call_initialize_ctrl_altering (stmt);
532
533 /* If the statement starts a new basic block or if we have determined
534 in a previous pass that we need to create a new block for STMT, do
535 so now. */
536 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
537 {
538 if (!first_stmt_of_seq)
539 gsi_split_seq_before (&i, &seq);
540 bb = create_basic_block (seq, bb);
541 start_new_block = false;
542 }
543
544 /* Now add STMT to BB and create the subgraphs for special statement
545 codes. */
546 gimple_set_bb (stmt, bb);
547
548 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
549 next iteration. */
550 if (stmt_ends_bb_p (stmt))
551 {
552 /* If the stmt can make abnormal goto use a new temporary
553 for the assignment to the LHS. This makes sure the old value
554 of the LHS is available on the abnormal edge. Otherwise
555 we will end up with overlapping life-ranges for abnormal
556 SSA names. */
557 if (gimple_has_lhs (stmt)
558 && stmt_can_make_abnormal_goto (stmt)
559 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
560 {
561 tree lhs = gimple_get_lhs (stmt);
562 tree tmp = create_tmp_var (TREE_TYPE (lhs));
563 gimple s = gimple_build_assign (lhs, tmp);
564 gimple_set_location (s, gimple_location (stmt));
565 gimple_set_block (s, gimple_block (stmt));
566 gimple_set_lhs (stmt, tmp);
567 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
568 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
569 DECL_GIMPLE_REG_P (tmp) = 1;
570 gsi_insert_after (&i, s, GSI_SAME_STMT);
571 }
572 start_new_block = true;
573 }
574
575 gsi_next (&i);
576 first_stmt_of_seq = false;
577 }
578 return bb;
579 }
580
581 /* Build a flowgraph for the sequence of stmts SEQ. */
582
583 static void
584 make_blocks (gimple_seq seq)
585 {
586 make_blocks_1 (seq, ENTRY_BLOCK_PTR_FOR_FN (cfun));
587 }
588
589 /* Create and return a new empty basic block after bb AFTER. */
590
591 static basic_block
592 create_bb (void *h, void *e, basic_block after)
593 {
594 basic_block bb;
595
596 gcc_assert (!e);
597
598 /* Create and initialize a new basic block. Since alloc_block uses
599 GC allocation that clears memory to allocate a basic block, we do
600 not have to clear the newly allocated basic block here. */
601 bb = alloc_block ();
602
603 bb->index = last_basic_block_for_fn (cfun);
604 bb->flags = BB_NEW;
605 set_bb_seq (bb, h ? (gimple_seq) h : NULL);
606
607 /* Add the new block to the linked list of blocks. */
608 link_block (bb, after);
609
610 /* Grow the basic block array if needed. */
611 if ((size_t) last_basic_block_for_fn (cfun)
612 == basic_block_info_for_fn (cfun)->length ())
613 {
614 size_t new_size =
615 (last_basic_block_for_fn (cfun)
616 + (last_basic_block_for_fn (cfun) + 3) / 4);
617 vec_safe_grow_cleared (basic_block_info_for_fn (cfun), new_size);
618 }
619
620 /* Add the newly created block to the array. */
621 SET_BASIC_BLOCK_FOR_FN (cfun, last_basic_block_for_fn (cfun), bb);
622
623 n_basic_blocks_for_fn (cfun)++;
624 last_basic_block_for_fn (cfun)++;
625
626 return bb;
627 }
628
629
630 /*---------------------------------------------------------------------------
631 Edge creation
632 ---------------------------------------------------------------------------*/
633
634 /* Fold COND_EXPR_COND of each COND_EXPR. */
635
636 void
637 fold_cond_expr_cond (void)
638 {
639 basic_block bb;
640
641 FOR_EACH_BB_FN (bb, cfun)
642 {
643 gimple stmt = last_stmt (bb);
644
645 if (stmt && gimple_code (stmt) == GIMPLE_COND)
646 {
647 gcond *cond_stmt = as_a <gcond *> (stmt);
648 location_t loc = gimple_location (stmt);
649 tree cond;
650 bool zerop, onep;
651
652 fold_defer_overflow_warnings ();
653 cond = fold_binary_loc (loc, gimple_cond_code (cond_stmt),
654 boolean_type_node,
655 gimple_cond_lhs (cond_stmt),
656 gimple_cond_rhs (cond_stmt));
657 if (cond)
658 {
659 zerop = integer_zerop (cond);
660 onep = integer_onep (cond);
661 }
662 else
663 zerop = onep = false;
664
665 fold_undefer_overflow_warnings (zerop || onep,
666 stmt,
667 WARN_STRICT_OVERFLOW_CONDITIONAL);
668 if (zerop)
669 gimple_cond_make_false (cond_stmt);
670 else if (onep)
671 gimple_cond_make_true (cond_stmt);
672 }
673 }
674 }
675
676 /* If basic block BB has an abnormal edge to a basic block
677 containing IFN_ABNORMAL_DISPATCHER internal call, return
678 that the dispatcher's basic block, otherwise return NULL. */
679
680 basic_block
681 get_abnormal_succ_dispatcher (basic_block bb)
682 {
683 edge e;
684 edge_iterator ei;
685
686 FOR_EACH_EDGE (e, ei, bb->succs)
687 if ((e->flags & (EDGE_ABNORMAL | EDGE_EH)) == EDGE_ABNORMAL)
688 {
689 gimple_stmt_iterator gsi
690 = gsi_start_nondebug_after_labels_bb (e->dest);
691 gimple g = gsi_stmt (gsi);
692 if (g
693 && is_gimple_call (g)
694 && gimple_call_internal_p (g)
695 && gimple_call_internal_fn (g) == IFN_ABNORMAL_DISPATCHER)
696 return e->dest;
697 }
698 return NULL;
699 }
700
701 /* Helper function for make_edges. Create a basic block with
702 with ABNORMAL_DISPATCHER internal call in it if needed, and
703 create abnormal edges from BBS to it and from it to FOR_BB
704 if COMPUTED_GOTO is false, otherwise factor the computed gotos. */
705
706 static void
707 handle_abnormal_edges (basic_block *dispatcher_bbs,
708 basic_block for_bb, int *bb_to_omp_idx,
709 auto_vec<basic_block> *bbs, bool computed_goto)
710 {
711 basic_block *dispatcher = dispatcher_bbs + (computed_goto ? 1 : 0);
712 unsigned int idx = 0;
713 basic_block bb;
714 bool inner = false;
715
716 if (bb_to_omp_idx)
717 {
718 dispatcher = dispatcher_bbs + 2 * bb_to_omp_idx[for_bb->index];
719 if (bb_to_omp_idx[for_bb->index] != 0)
720 inner = true;
721 }
722
723 /* If the dispatcher has been created already, then there are basic
724 blocks with abnormal edges to it, so just make a new edge to
725 for_bb. */
726 if (*dispatcher == NULL)
727 {
728 /* Check if there are any basic blocks that need to have
729 abnormal edges to this dispatcher. If there are none, return
730 early. */
731 if (bb_to_omp_idx == NULL)
732 {
733 if (bbs->is_empty ())
734 return;
735 }
736 else
737 {
738 FOR_EACH_VEC_ELT (*bbs, idx, bb)
739 if (bb_to_omp_idx[bb->index] == bb_to_omp_idx[for_bb->index])
740 break;
741 if (bb == NULL)
742 return;
743 }
744
745 /* Create the dispatcher bb. */
746 *dispatcher = create_basic_block (NULL, for_bb);
747 if (computed_goto)
748 {
749 /* Factor computed gotos into a common computed goto site. Also
750 record the location of that site so that we can un-factor the
751 gotos after we have converted back to normal form. */
752 gimple_stmt_iterator gsi = gsi_start_bb (*dispatcher);
753
754 /* Create the destination of the factored goto. Each original
755 computed goto will put its desired destination into this
756 variable and jump to the label we create immediately below. */
757 tree var = create_tmp_var (ptr_type_node, "gotovar");
758
759 /* Build a label for the new block which will contain the
760 factored computed goto. */
761 tree factored_label_decl
762 = create_artificial_label (UNKNOWN_LOCATION);
763 gimple factored_computed_goto_label
764 = gimple_build_label (factored_label_decl);
765 gsi_insert_after (&gsi, factored_computed_goto_label, GSI_NEW_STMT);
766
767 /* Build our new computed goto. */
768 gimple factored_computed_goto = gimple_build_goto (var);
769 gsi_insert_after (&gsi, factored_computed_goto, GSI_NEW_STMT);
770
771 FOR_EACH_VEC_ELT (*bbs, idx, bb)
772 {
773 if (bb_to_omp_idx
774 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
775 continue;
776
777 gsi = gsi_last_bb (bb);
778 gimple last = gsi_stmt (gsi);
779
780 gcc_assert (computed_goto_p (last));
781
782 /* Copy the original computed goto's destination into VAR. */
783 gimple assignment
784 = gimple_build_assign (var, gimple_goto_dest (last));
785 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
786
787 edge e = make_edge (bb, *dispatcher, EDGE_FALLTHRU);
788 e->goto_locus = gimple_location (last);
789 gsi_remove (&gsi, true);
790 }
791 }
792 else
793 {
794 tree arg = inner ? boolean_true_node : boolean_false_node;
795 gimple g = gimple_build_call_internal (IFN_ABNORMAL_DISPATCHER,
796 1, arg);
797 gimple_stmt_iterator gsi = gsi_after_labels (*dispatcher);
798 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
799
800 /* Create predecessor edges of the dispatcher. */
801 FOR_EACH_VEC_ELT (*bbs, idx, bb)
802 {
803 if (bb_to_omp_idx
804 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
805 continue;
806 make_edge (bb, *dispatcher, EDGE_ABNORMAL);
807 }
808 }
809 }
810
811 make_edge (*dispatcher, for_bb, EDGE_ABNORMAL);
812 }
813
814 /* Creates outgoing edges for BB. Returns 1 when it ends with an
815 computed goto, returns 2 when it ends with a statement that
816 might return to this function via an nonlocal goto, otherwise
817 return 0. Updates *PCUR_REGION with the OMP region this BB is in. */
818
819 static int
820 make_edges_bb (basic_block bb, struct omp_region **pcur_region, int *pomp_index)
821 {
822 gimple last = last_stmt (bb);
823 bool fallthru = false;
824 int ret = 0;
825
826 if (!last)
827 return ret;
828
829 switch (gimple_code (last))
830 {
831 case GIMPLE_GOTO:
832 if (make_goto_expr_edges (bb))
833 ret = 1;
834 fallthru = false;
835 break;
836 case GIMPLE_RETURN:
837 {
838 edge e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
839 e->goto_locus = gimple_location (last);
840 fallthru = false;
841 }
842 break;
843 case GIMPLE_COND:
844 make_cond_expr_edges (bb);
845 fallthru = false;
846 break;
847 case GIMPLE_SWITCH:
848 make_gimple_switch_edges (as_a <gswitch *> (last), bb);
849 fallthru = false;
850 break;
851 case GIMPLE_RESX:
852 make_eh_edges (last);
853 fallthru = false;
854 break;
855 case GIMPLE_EH_DISPATCH:
856 fallthru = make_eh_dispatch_edges (as_a <geh_dispatch *> (last));
857 break;
858
859 case GIMPLE_CALL:
860 /* If this function receives a nonlocal goto, then we need to
861 make edges from this call site to all the nonlocal goto
862 handlers. */
863 if (stmt_can_make_abnormal_goto (last))
864 ret = 2;
865
866 /* If this statement has reachable exception handlers, then
867 create abnormal edges to them. */
868 make_eh_edges (last);
869
870 /* BUILTIN_RETURN is really a return statement. */
871 if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
872 {
873 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
874 fallthru = false;
875 }
876 /* Some calls are known not to return. */
877 else
878 fallthru = !(gimple_call_flags (last) & ECF_NORETURN);
879 break;
880
881 case GIMPLE_ASSIGN:
882 /* A GIMPLE_ASSIGN may throw internally and thus be considered
883 control-altering. */
884 if (is_ctrl_altering_stmt (last))
885 make_eh_edges (last);
886 fallthru = true;
887 break;
888
889 case GIMPLE_ASM:
890 make_gimple_asm_edges (bb);
891 fallthru = true;
892 break;
893
894 CASE_GIMPLE_OMP:
895 fallthru = make_gimple_omp_edges (bb, pcur_region, pomp_index);
896 break;
897
898 case GIMPLE_TRANSACTION:
899 {
900 tree abort_label
901 = gimple_transaction_label (as_a <gtransaction *> (last));
902 if (abort_label)
903 make_edge (bb, label_to_block (abort_label), EDGE_TM_ABORT);
904 fallthru = true;
905 }
906 break;
907
908 default:
909 gcc_assert (!stmt_ends_bb_p (last));
910 fallthru = true;
911 break;
912 }
913
914 if (fallthru)
915 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
916
917 return ret;
918 }
919
920 /* Join all the blocks in the flowgraph. */
921
922 static void
923 make_edges (void)
924 {
925 basic_block bb;
926 struct omp_region *cur_region = NULL;
927 auto_vec<basic_block> ab_edge_goto;
928 auto_vec<basic_block> ab_edge_call;
929 int *bb_to_omp_idx = NULL;
930 int cur_omp_region_idx = 0;
931
932 /* Create an edge from entry to the first block with executable
933 statements in it. */
934 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun),
935 BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS),
936 EDGE_FALLTHRU);
937
938 /* Traverse the basic block array placing edges. */
939 FOR_EACH_BB_FN (bb, cfun)
940 {
941 int mer;
942
943 if (bb_to_omp_idx)
944 bb_to_omp_idx[bb->index] = cur_omp_region_idx;
945
946 mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
947 if (mer == 1)
948 ab_edge_goto.safe_push (bb);
949 else if (mer == 2)
950 ab_edge_call.safe_push (bb);
951
952 if (cur_region && bb_to_omp_idx == NULL)
953 bb_to_omp_idx = XCNEWVEC (int, n_basic_blocks_for_fn (cfun));
954 }
955
956 /* Computed gotos are hell to deal with, especially if there are
957 lots of them with a large number of destinations. So we factor
958 them to a common computed goto location before we build the
959 edge list. After we convert back to normal form, we will un-factor
960 the computed gotos since factoring introduces an unwanted jump.
961 For non-local gotos and abnormal edges from calls to calls that return
962 twice or forced labels, factor the abnormal edges too, by having all
963 abnormal edges from the calls go to a common artificial basic block
964 with ABNORMAL_DISPATCHER internal call and abnormal edges from that
965 basic block to all forced labels and calls returning twice.
966 We do this per-OpenMP structured block, because those regions
967 are guaranteed to be single entry single exit by the standard,
968 so it is not allowed to enter or exit such regions abnormally this way,
969 thus all computed gotos, non-local gotos and setjmp/longjmp calls
970 must not transfer control across SESE region boundaries. */
971 if (!ab_edge_goto.is_empty () || !ab_edge_call.is_empty ())
972 {
973 gimple_stmt_iterator gsi;
974 basic_block dispatcher_bb_array[2] = { NULL, NULL };
975 basic_block *dispatcher_bbs = dispatcher_bb_array;
976 int count = n_basic_blocks_for_fn (cfun);
977
978 if (bb_to_omp_idx)
979 dispatcher_bbs = XCNEWVEC (basic_block, 2 * count);
980
981 FOR_EACH_BB_FN (bb, cfun)
982 {
983 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
984 {
985 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
986 tree target;
987
988 if (!label_stmt)
989 break;
990
991 target = gimple_label_label (label_stmt);
992
993 /* Make an edge to every label block that has been marked as a
994 potential target for a computed goto or a non-local goto. */
995 if (FORCED_LABEL (target))
996 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
997 &ab_edge_goto, true);
998 if (DECL_NONLOCAL (target))
999 {
1000 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
1001 &ab_edge_call, false);
1002 break;
1003 }
1004 }
1005
1006 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
1007 gsi_next_nondebug (&gsi);
1008 if (!gsi_end_p (gsi))
1009 {
1010 /* Make an edge to every setjmp-like call. */
1011 gimple call_stmt = gsi_stmt (gsi);
1012 if (is_gimple_call (call_stmt)
1013 && ((gimple_call_flags (call_stmt) & ECF_RETURNS_TWICE)
1014 || gimple_call_builtin_p (call_stmt,
1015 BUILT_IN_SETJMP_RECEIVER)))
1016 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
1017 &ab_edge_call, false);
1018 }
1019 }
1020
1021 if (bb_to_omp_idx)
1022 XDELETE (dispatcher_bbs);
1023 }
1024
1025 XDELETE (bb_to_omp_idx);
1026
1027 free_omp_regions ();
1028
1029 /* Fold COND_EXPR_COND of each COND_EXPR. */
1030 fold_cond_expr_cond ();
1031 }
1032
1033 /* Add SEQ after GSI. Start new bb after GSI, and created further bbs as
1034 needed. Returns true if new bbs were created.
1035 Note: This is transitional code, and should not be used for new code. We
1036 should be able to get rid of this by rewriting all target va-arg
1037 gimplification hooks to use an interface gimple_build_cond_value as described
1038 in https://gcc.gnu.org/ml/gcc-patches/2015-02/msg01194.html. */
1039
1040 bool
1041 gimple_find_sub_bbs (gimple_seq seq, gimple_stmt_iterator *gsi)
1042 {
1043 gimple stmt = gsi_stmt (*gsi);
1044 basic_block bb = gimple_bb (stmt);
1045 basic_block lastbb, afterbb;
1046 int old_num_bbs = n_basic_blocks_for_fn (cfun);
1047 edge e;
1048 lastbb = make_blocks_1 (seq, bb);
1049 if (old_num_bbs == n_basic_blocks_for_fn (cfun))
1050 return false;
1051 e = split_block (bb, stmt);
1052 /* Move e->dest to come after the new basic blocks. */
1053 afterbb = e->dest;
1054 unlink_block (afterbb);
1055 link_block (afterbb, lastbb);
1056 redirect_edge_succ (e, bb->next_bb);
1057 bb = bb->next_bb;
1058 while (bb != afterbb)
1059 {
1060 struct omp_region *cur_region = NULL;
1061 int cur_omp_region_idx = 0;
1062 int mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
1063 gcc_assert (!mer && !cur_region);
1064 add_bb_to_loop (bb, afterbb->loop_father);
1065 bb = bb->next_bb;
1066 }
1067 return true;
1068 }
1069
1070 /* Find the next available discriminator value for LOCUS. The
1071 discriminator distinguishes among several basic blocks that
1072 share a common locus, allowing for more accurate sample-based
1073 profiling. */
1074
1075 static int
1076 next_discriminator_for_locus (location_t locus)
1077 {
1078 struct locus_discrim_map item;
1079 struct locus_discrim_map **slot;
1080
1081 item.locus = locus;
1082 item.discriminator = 0;
1083 slot = discriminator_per_locus->find_slot_with_hash (
1084 &item, LOCATION_LINE (locus), INSERT);
1085 gcc_assert (slot);
1086 if (*slot == HTAB_EMPTY_ENTRY)
1087 {
1088 *slot = XNEW (struct locus_discrim_map);
1089 gcc_assert (*slot);
1090 (*slot)->locus = locus;
1091 (*slot)->discriminator = 0;
1092 }
1093 (*slot)->discriminator++;
1094 return (*slot)->discriminator;
1095 }
1096
1097 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
1098
1099 static bool
1100 same_line_p (location_t locus1, location_t locus2)
1101 {
1102 expanded_location from, to;
1103
1104 if (locus1 == locus2)
1105 return true;
1106
1107 from = expand_location (locus1);
1108 to = expand_location (locus2);
1109
1110 if (from.line != to.line)
1111 return false;
1112 if (from.file == to.file)
1113 return true;
1114 return (from.file != NULL
1115 && to.file != NULL
1116 && filename_cmp (from.file, to.file) == 0);
1117 }
1118
1119 /* Assign discriminators to each basic block. */
1120
1121 static void
1122 assign_discriminators (void)
1123 {
1124 basic_block bb;
1125
1126 FOR_EACH_BB_FN (bb, cfun)
1127 {
1128 edge e;
1129 edge_iterator ei;
1130 gimple last = last_stmt (bb);
1131 location_t locus = last ? gimple_location (last) : UNKNOWN_LOCATION;
1132
1133 if (locus == UNKNOWN_LOCATION)
1134 continue;
1135
1136 FOR_EACH_EDGE (e, ei, bb->succs)
1137 {
1138 gimple first = first_non_label_stmt (e->dest);
1139 gimple last = last_stmt (e->dest);
1140 if ((first && same_line_p (locus, gimple_location (first)))
1141 || (last && same_line_p (locus, gimple_location (last))))
1142 {
1143 if (e->dest->discriminator != 0 && bb->discriminator == 0)
1144 bb->discriminator = next_discriminator_for_locus (locus);
1145 else
1146 e->dest->discriminator = next_discriminator_for_locus (locus);
1147 }
1148 }
1149 }
1150 }
1151
1152 /* Create the edges for a GIMPLE_COND starting at block BB. */
1153
1154 static void
1155 make_cond_expr_edges (basic_block bb)
1156 {
1157 gcond *entry = as_a <gcond *> (last_stmt (bb));
1158 gimple then_stmt, else_stmt;
1159 basic_block then_bb, else_bb;
1160 tree then_label, else_label;
1161 edge e;
1162
1163 gcc_assert (entry);
1164 gcc_assert (gimple_code (entry) == GIMPLE_COND);
1165
1166 /* Entry basic blocks for each component. */
1167 then_label = gimple_cond_true_label (entry);
1168 else_label = gimple_cond_false_label (entry);
1169 then_bb = label_to_block (then_label);
1170 else_bb = label_to_block (else_label);
1171 then_stmt = first_stmt (then_bb);
1172 else_stmt = first_stmt (else_bb);
1173
1174 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1175 e->goto_locus = gimple_location (then_stmt);
1176 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1177 if (e)
1178 e->goto_locus = gimple_location (else_stmt);
1179
1180 /* We do not need the labels anymore. */
1181 gimple_cond_set_true_label (entry, NULL_TREE);
1182 gimple_cond_set_false_label (entry, NULL_TREE);
1183 }
1184
1185
1186 /* Called for each element in the hash table (P) as we delete the
1187 edge to cases hash table.
1188
1189 Clear all the TREE_CHAINs to prevent problems with copying of
1190 SWITCH_EXPRs and structure sharing rules, then free the hash table
1191 element. */
1192
1193 bool
1194 edge_to_cases_cleanup (edge const &, tree const &value, void *)
1195 {
1196 tree t, next;
1197
1198 for (t = value; t; t = next)
1199 {
1200 next = CASE_CHAIN (t);
1201 CASE_CHAIN (t) = NULL;
1202 }
1203
1204 return true;
1205 }
1206
1207 /* Start recording information mapping edges to case labels. */
1208
1209 void
1210 start_recording_case_labels (void)
1211 {
1212 gcc_assert (edge_to_cases == NULL);
1213 edge_to_cases = new hash_map<edge, tree>;
1214 touched_switch_bbs = BITMAP_ALLOC (NULL);
1215 }
1216
1217 /* Return nonzero if we are recording information for case labels. */
1218
1219 static bool
1220 recording_case_labels_p (void)
1221 {
1222 return (edge_to_cases != NULL);
1223 }
1224
1225 /* Stop recording information mapping edges to case labels and
1226 remove any information we have recorded. */
1227 void
1228 end_recording_case_labels (void)
1229 {
1230 bitmap_iterator bi;
1231 unsigned i;
1232 edge_to_cases->traverse<void *, edge_to_cases_cleanup> (NULL);
1233 delete edge_to_cases;
1234 edge_to_cases = NULL;
1235 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
1236 {
1237 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
1238 if (bb)
1239 {
1240 gimple stmt = last_stmt (bb);
1241 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1242 group_case_labels_stmt (as_a <gswitch *> (stmt));
1243 }
1244 }
1245 BITMAP_FREE (touched_switch_bbs);
1246 }
1247
1248 /* If we are inside a {start,end}_recording_cases block, then return
1249 a chain of CASE_LABEL_EXPRs from T which reference E.
1250
1251 Otherwise return NULL. */
1252
1253 static tree
1254 get_cases_for_edge (edge e, gswitch *t)
1255 {
1256 tree *slot;
1257 size_t i, n;
1258
1259 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
1260 chains available. Return NULL so the caller can detect this case. */
1261 if (!recording_case_labels_p ())
1262 return NULL;
1263
1264 slot = edge_to_cases->get (e);
1265 if (slot)
1266 return *slot;
1267
1268 /* If we did not find E in the hash table, then this must be the first
1269 time we have been queried for information about E & T. Add all the
1270 elements from T to the hash table then perform the query again. */
1271
1272 n = gimple_switch_num_labels (t);
1273 for (i = 0; i < n; i++)
1274 {
1275 tree elt = gimple_switch_label (t, i);
1276 tree lab = CASE_LABEL (elt);
1277 basic_block label_bb = label_to_block (lab);
1278 edge this_edge = find_edge (e->src, label_bb);
1279
1280 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
1281 a new chain. */
1282 tree &s = edge_to_cases->get_or_insert (this_edge);
1283 CASE_CHAIN (elt) = s;
1284 s = elt;
1285 }
1286
1287 return *edge_to_cases->get (e);
1288 }
1289
1290 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
1291
1292 static void
1293 make_gimple_switch_edges (gswitch *entry, basic_block bb)
1294 {
1295 size_t i, n;
1296
1297 n = gimple_switch_num_labels (entry);
1298
1299 for (i = 0; i < n; ++i)
1300 {
1301 tree lab = CASE_LABEL (gimple_switch_label (entry, i));
1302 basic_block label_bb = label_to_block (lab);
1303 make_edge (bb, label_bb, 0);
1304 }
1305 }
1306
1307
1308 /* Return the basic block holding label DEST. */
1309
1310 basic_block
1311 label_to_block_fn (struct function *ifun, tree dest)
1312 {
1313 int uid = LABEL_DECL_UID (dest);
1314
1315 /* We would die hard when faced by an undefined label. Emit a label to
1316 the very first basic block. This will hopefully make even the dataflow
1317 and undefined variable warnings quite right. */
1318 if (seen_error () && uid < 0)
1319 {
1320 gimple_stmt_iterator gsi =
1321 gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS));
1322 gimple stmt;
1323
1324 stmt = gimple_build_label (dest);
1325 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
1326 uid = LABEL_DECL_UID (dest);
1327 }
1328 if (vec_safe_length (ifun->cfg->x_label_to_block_map) <= (unsigned int) uid)
1329 return NULL;
1330 return (*ifun->cfg->x_label_to_block_map)[uid];
1331 }
1332
1333 /* Create edges for a goto statement at block BB. Returns true
1334 if abnormal edges should be created. */
1335
1336 static bool
1337 make_goto_expr_edges (basic_block bb)
1338 {
1339 gimple_stmt_iterator last = gsi_last_bb (bb);
1340 gimple goto_t = gsi_stmt (last);
1341
1342 /* A simple GOTO creates normal edges. */
1343 if (simple_goto_p (goto_t))
1344 {
1345 tree dest = gimple_goto_dest (goto_t);
1346 basic_block label_bb = label_to_block (dest);
1347 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1348 e->goto_locus = gimple_location (goto_t);
1349 gsi_remove (&last, true);
1350 return false;
1351 }
1352
1353 /* A computed GOTO creates abnormal edges. */
1354 return true;
1355 }
1356
1357 /* Create edges for an asm statement with labels at block BB. */
1358
1359 static void
1360 make_gimple_asm_edges (basic_block bb)
1361 {
1362 gasm *stmt = as_a <gasm *> (last_stmt (bb));
1363 int i, n = gimple_asm_nlabels (stmt);
1364
1365 for (i = 0; i < n; ++i)
1366 {
1367 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1368 basic_block label_bb = label_to_block (label);
1369 make_edge (bb, label_bb, 0);
1370 }
1371 }
1372
1373 /*---------------------------------------------------------------------------
1374 Flowgraph analysis
1375 ---------------------------------------------------------------------------*/
1376
1377 /* Cleanup useless labels in basic blocks. This is something we wish
1378 to do early because it allows us to group case labels before creating
1379 the edges for the CFG, and it speeds up block statement iterators in
1380 all passes later on.
1381 We rerun this pass after CFG is created, to get rid of the labels that
1382 are no longer referenced. After then we do not run it any more, since
1383 (almost) no new labels should be created. */
1384
1385 /* A map from basic block index to the leading label of that block. */
1386 static struct label_record
1387 {
1388 /* The label. */
1389 tree label;
1390
1391 /* True if the label is referenced from somewhere. */
1392 bool used;
1393 } *label_for_bb;
1394
1395 /* Given LABEL return the first label in the same basic block. */
1396
1397 static tree
1398 main_block_label (tree label)
1399 {
1400 basic_block bb = label_to_block (label);
1401 tree main_label = label_for_bb[bb->index].label;
1402
1403 /* label_to_block possibly inserted undefined label into the chain. */
1404 if (!main_label)
1405 {
1406 label_for_bb[bb->index].label = label;
1407 main_label = label;
1408 }
1409
1410 label_for_bb[bb->index].used = true;
1411 return main_label;
1412 }
1413
1414 /* Clean up redundant labels within the exception tree. */
1415
1416 static void
1417 cleanup_dead_labels_eh (void)
1418 {
1419 eh_landing_pad lp;
1420 eh_region r;
1421 tree lab;
1422 int i;
1423
1424 if (cfun->eh == NULL)
1425 return;
1426
1427 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1428 if (lp && lp->post_landing_pad)
1429 {
1430 lab = main_block_label (lp->post_landing_pad);
1431 if (lab != lp->post_landing_pad)
1432 {
1433 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1434 EH_LANDING_PAD_NR (lab) = lp->index;
1435 }
1436 }
1437
1438 FOR_ALL_EH_REGION (r)
1439 switch (r->type)
1440 {
1441 case ERT_CLEANUP:
1442 case ERT_MUST_NOT_THROW:
1443 break;
1444
1445 case ERT_TRY:
1446 {
1447 eh_catch c;
1448 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1449 {
1450 lab = c->label;
1451 if (lab)
1452 c->label = main_block_label (lab);
1453 }
1454 }
1455 break;
1456
1457 case ERT_ALLOWED_EXCEPTIONS:
1458 lab = r->u.allowed.label;
1459 if (lab)
1460 r->u.allowed.label = main_block_label (lab);
1461 break;
1462 }
1463 }
1464
1465
1466 /* Cleanup redundant labels. This is a three-step process:
1467 1) Find the leading label for each block.
1468 2) Redirect all references to labels to the leading labels.
1469 3) Cleanup all useless labels. */
1470
1471 void
1472 cleanup_dead_labels (void)
1473 {
1474 basic_block bb;
1475 label_for_bb = XCNEWVEC (struct label_record, last_basic_block_for_fn (cfun));
1476
1477 /* Find a suitable label for each block. We use the first user-defined
1478 label if there is one, or otherwise just the first label we see. */
1479 FOR_EACH_BB_FN (bb, cfun)
1480 {
1481 gimple_stmt_iterator i;
1482
1483 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1484 {
1485 tree label;
1486 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1487
1488 if (!label_stmt)
1489 break;
1490
1491 label = gimple_label_label (label_stmt);
1492
1493 /* If we have not yet seen a label for the current block,
1494 remember this one and see if there are more labels. */
1495 if (!label_for_bb[bb->index].label)
1496 {
1497 label_for_bb[bb->index].label = label;
1498 continue;
1499 }
1500
1501 /* If we did see a label for the current block already, but it
1502 is an artificially created label, replace it if the current
1503 label is a user defined label. */
1504 if (!DECL_ARTIFICIAL (label)
1505 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1506 {
1507 label_for_bb[bb->index].label = label;
1508 break;
1509 }
1510 }
1511 }
1512
1513 /* Now redirect all jumps/branches to the selected label.
1514 First do so for each block ending in a control statement. */
1515 FOR_EACH_BB_FN (bb, cfun)
1516 {
1517 gimple stmt = last_stmt (bb);
1518 tree label, new_label;
1519
1520 if (!stmt)
1521 continue;
1522
1523 switch (gimple_code (stmt))
1524 {
1525 case GIMPLE_COND:
1526 {
1527 gcond *cond_stmt = as_a <gcond *> (stmt);
1528 label = gimple_cond_true_label (cond_stmt);
1529 if (label)
1530 {
1531 new_label = main_block_label (label);
1532 if (new_label != label)
1533 gimple_cond_set_true_label (cond_stmt, new_label);
1534 }
1535
1536 label = gimple_cond_false_label (cond_stmt);
1537 if (label)
1538 {
1539 new_label = main_block_label (label);
1540 if (new_label != label)
1541 gimple_cond_set_false_label (cond_stmt, new_label);
1542 }
1543 }
1544 break;
1545
1546 case GIMPLE_SWITCH:
1547 {
1548 gswitch *switch_stmt = as_a <gswitch *> (stmt);
1549 size_t i, n = gimple_switch_num_labels (switch_stmt);
1550
1551 /* Replace all destination labels. */
1552 for (i = 0; i < n; ++i)
1553 {
1554 tree case_label = gimple_switch_label (switch_stmt, i);
1555 label = CASE_LABEL (case_label);
1556 new_label = main_block_label (label);
1557 if (new_label != label)
1558 CASE_LABEL (case_label) = new_label;
1559 }
1560 break;
1561 }
1562
1563 case GIMPLE_ASM:
1564 {
1565 gasm *asm_stmt = as_a <gasm *> (stmt);
1566 int i, n = gimple_asm_nlabels (asm_stmt);
1567
1568 for (i = 0; i < n; ++i)
1569 {
1570 tree cons = gimple_asm_label_op (asm_stmt, i);
1571 tree label = main_block_label (TREE_VALUE (cons));
1572 TREE_VALUE (cons) = label;
1573 }
1574 break;
1575 }
1576
1577 /* We have to handle gotos until they're removed, and we don't
1578 remove them until after we've created the CFG edges. */
1579 case GIMPLE_GOTO:
1580 if (!computed_goto_p (stmt))
1581 {
1582 ggoto *goto_stmt = as_a <ggoto *> (stmt);
1583 label = gimple_goto_dest (goto_stmt);
1584 new_label = main_block_label (label);
1585 if (new_label != label)
1586 gimple_goto_set_dest (goto_stmt, new_label);
1587 }
1588 break;
1589
1590 case GIMPLE_TRANSACTION:
1591 {
1592 gtransaction *trans_stmt = as_a <gtransaction *> (stmt);
1593 tree label = gimple_transaction_label (trans_stmt);
1594 if (label)
1595 {
1596 tree new_label = main_block_label (label);
1597 if (new_label != label)
1598 gimple_transaction_set_label (trans_stmt, new_label);
1599 }
1600 }
1601 break;
1602
1603 default:
1604 break;
1605 }
1606 }
1607
1608 /* Do the same for the exception region tree labels. */
1609 cleanup_dead_labels_eh ();
1610
1611 /* Finally, purge dead labels. All user-defined labels and labels that
1612 can be the target of non-local gotos and labels which have their
1613 address taken are preserved. */
1614 FOR_EACH_BB_FN (bb, cfun)
1615 {
1616 gimple_stmt_iterator i;
1617 tree label_for_this_bb = label_for_bb[bb->index].label;
1618
1619 if (!label_for_this_bb)
1620 continue;
1621
1622 /* If the main label of the block is unused, we may still remove it. */
1623 if (!label_for_bb[bb->index].used)
1624 label_for_this_bb = NULL;
1625
1626 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1627 {
1628 tree label;
1629 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1630
1631 if (!label_stmt)
1632 break;
1633
1634 label = gimple_label_label (label_stmt);
1635
1636 if (label == label_for_this_bb
1637 || !DECL_ARTIFICIAL (label)
1638 || DECL_NONLOCAL (label)
1639 || FORCED_LABEL (label))
1640 gsi_next (&i);
1641 else
1642 gsi_remove (&i, true);
1643 }
1644 }
1645
1646 free (label_for_bb);
1647 }
1648
1649 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1650 the ones jumping to the same label.
1651 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1652
1653 void
1654 group_case_labels_stmt (gswitch *stmt)
1655 {
1656 int old_size = gimple_switch_num_labels (stmt);
1657 int i, j, new_size = old_size;
1658 basic_block default_bb = NULL;
1659
1660 default_bb = label_to_block (CASE_LABEL (gimple_switch_default_label (stmt)));
1661
1662 /* Look for possible opportunities to merge cases. */
1663 i = 1;
1664 while (i < old_size)
1665 {
1666 tree base_case, base_high;
1667 basic_block base_bb;
1668
1669 base_case = gimple_switch_label (stmt, i);
1670
1671 gcc_assert (base_case);
1672 base_bb = label_to_block (CASE_LABEL (base_case));
1673
1674 /* Discard cases that have the same destination as the
1675 default case. */
1676 if (base_bb == default_bb)
1677 {
1678 gimple_switch_set_label (stmt, i, NULL_TREE);
1679 i++;
1680 new_size--;
1681 continue;
1682 }
1683
1684 base_high = CASE_HIGH (base_case)
1685 ? CASE_HIGH (base_case)
1686 : CASE_LOW (base_case);
1687 i++;
1688
1689 /* Try to merge case labels. Break out when we reach the end
1690 of the label vector or when we cannot merge the next case
1691 label with the current one. */
1692 while (i < old_size)
1693 {
1694 tree merge_case = gimple_switch_label (stmt, i);
1695 basic_block merge_bb = label_to_block (CASE_LABEL (merge_case));
1696 wide_int bhp1 = wi::add (base_high, 1);
1697
1698 /* Merge the cases if they jump to the same place,
1699 and their ranges are consecutive. */
1700 if (merge_bb == base_bb
1701 && wi::eq_p (CASE_LOW (merge_case), bhp1))
1702 {
1703 base_high = CASE_HIGH (merge_case) ?
1704 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1705 CASE_HIGH (base_case) = base_high;
1706 gimple_switch_set_label (stmt, i, NULL_TREE);
1707 new_size--;
1708 i++;
1709 }
1710 else
1711 break;
1712 }
1713 }
1714
1715 /* Compress the case labels in the label vector, and adjust the
1716 length of the vector. */
1717 for (i = 0, j = 0; i < new_size; i++)
1718 {
1719 while (! gimple_switch_label (stmt, j))
1720 j++;
1721 gimple_switch_set_label (stmt, i,
1722 gimple_switch_label (stmt, j++));
1723 }
1724
1725 gcc_assert (new_size <= old_size);
1726 gimple_switch_set_num_labels (stmt, new_size);
1727 }
1728
1729 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1730 and scan the sorted vector of cases. Combine the ones jumping to the
1731 same label. */
1732
1733 void
1734 group_case_labels (void)
1735 {
1736 basic_block bb;
1737
1738 FOR_EACH_BB_FN (bb, cfun)
1739 {
1740 gimple stmt = last_stmt (bb);
1741 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1742 group_case_labels_stmt (as_a <gswitch *> (stmt));
1743 }
1744 }
1745
1746 /* Checks whether we can merge block B into block A. */
1747
1748 static bool
1749 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1750 {
1751 gimple stmt;
1752
1753 if (!single_succ_p (a))
1754 return false;
1755
1756 if (single_succ_edge (a)->flags & EDGE_COMPLEX)
1757 return false;
1758
1759 if (single_succ (a) != b)
1760 return false;
1761
1762 if (!single_pred_p (b))
1763 return false;
1764
1765 if (a == ENTRY_BLOCK_PTR_FOR_FN (cfun)
1766 || b == EXIT_BLOCK_PTR_FOR_FN (cfun))
1767 return false;
1768
1769 /* If A ends by a statement causing exceptions or something similar, we
1770 cannot merge the blocks. */
1771 stmt = last_stmt (a);
1772 if (stmt && stmt_ends_bb_p (stmt))
1773 return false;
1774
1775 /* Do not allow a block with only a non-local label to be merged. */
1776 if (stmt)
1777 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
1778 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
1779 return false;
1780
1781 /* Examine the labels at the beginning of B. */
1782 for (gimple_stmt_iterator gsi = gsi_start_bb (b); !gsi_end_p (gsi);
1783 gsi_next (&gsi))
1784 {
1785 tree lab;
1786 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
1787 if (!label_stmt)
1788 break;
1789 lab = gimple_label_label (label_stmt);
1790
1791 /* Do not remove user forced labels or for -O0 any user labels. */
1792 if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
1793 return false;
1794 }
1795
1796 /* Protect simple loop latches. We only want to avoid merging
1797 the latch with the loop header or with a block in another
1798 loop in this case. */
1799 if (current_loops
1800 && b->loop_father->latch == b
1801 && loops_state_satisfies_p (LOOPS_HAVE_SIMPLE_LATCHES)
1802 && (b->loop_father->header == a
1803 || b->loop_father != a->loop_father))
1804 return false;
1805
1806 /* It must be possible to eliminate all phi nodes in B. If ssa form
1807 is not up-to-date and a name-mapping is registered, we cannot eliminate
1808 any phis. Symbols marked for renaming are never a problem though. */
1809 for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);
1810 gsi_next (&gsi))
1811 {
1812 gphi *phi = gsi.phi ();
1813 /* Technically only new names matter. */
1814 if (name_registered_for_update_p (PHI_RESULT (phi)))
1815 return false;
1816 }
1817
1818 /* When not optimizing, don't merge if we'd lose goto_locus. */
1819 if (!optimize
1820 && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
1821 {
1822 location_t goto_locus = single_succ_edge (a)->goto_locus;
1823 gimple_stmt_iterator prev, next;
1824 prev = gsi_last_nondebug_bb (a);
1825 next = gsi_after_labels (b);
1826 if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
1827 gsi_next_nondebug (&next);
1828 if ((gsi_end_p (prev)
1829 || gimple_location (gsi_stmt (prev)) != goto_locus)
1830 && (gsi_end_p (next)
1831 || gimple_location (gsi_stmt (next)) != goto_locus))
1832 return false;
1833 }
1834
1835 return true;
1836 }
1837
1838 /* Replaces all uses of NAME by VAL. */
1839
1840 void
1841 replace_uses_by (tree name, tree val)
1842 {
1843 imm_use_iterator imm_iter;
1844 use_operand_p use;
1845 gimple stmt;
1846 edge e;
1847
1848 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1849 {
1850 /* Mark the block if we change the last stmt in it. */
1851 if (cfgcleanup_altered_bbs
1852 && stmt_ends_bb_p (stmt))
1853 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1854
1855 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1856 {
1857 replace_exp (use, val);
1858
1859 if (gimple_code (stmt) == GIMPLE_PHI)
1860 {
1861 e = gimple_phi_arg_edge (as_a <gphi *> (stmt),
1862 PHI_ARG_INDEX_FROM_USE (use));
1863 if (e->flags & EDGE_ABNORMAL
1864 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val))
1865 {
1866 /* This can only occur for virtual operands, since
1867 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1868 would prevent replacement. */
1869 gcc_checking_assert (virtual_operand_p (name));
1870 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1871 }
1872 }
1873 }
1874
1875 if (gimple_code (stmt) != GIMPLE_PHI)
1876 {
1877 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1878 gimple orig_stmt = stmt;
1879 size_t i;
1880
1881 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
1882 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
1883 only change sth from non-invariant to invariant, and only
1884 when propagating constants. */
1885 if (is_gimple_min_invariant (val))
1886 for (i = 0; i < gimple_num_ops (stmt); i++)
1887 {
1888 tree op = gimple_op (stmt, i);
1889 /* Operands may be empty here. For example, the labels
1890 of a GIMPLE_COND are nulled out following the creation
1891 of the corresponding CFG edges. */
1892 if (op && TREE_CODE (op) == ADDR_EXPR)
1893 recompute_tree_invariant_for_addr_expr (op);
1894 }
1895
1896 if (fold_stmt (&gsi))
1897 stmt = gsi_stmt (gsi);
1898
1899 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
1900 gimple_purge_dead_eh_edges (gimple_bb (stmt));
1901
1902 update_stmt (stmt);
1903 }
1904 }
1905
1906 gcc_checking_assert (has_zero_uses (name));
1907
1908 /* Also update the trees stored in loop structures. */
1909 if (current_loops)
1910 {
1911 struct loop *loop;
1912
1913 FOR_EACH_LOOP (loop, 0)
1914 {
1915 substitute_in_loop_info (loop, name, val);
1916 }
1917 }
1918 }
1919
1920 /* Merge block B into block A. */
1921
1922 static void
1923 gimple_merge_blocks (basic_block a, basic_block b)
1924 {
1925 gimple_stmt_iterator last, gsi;
1926 gphi_iterator psi;
1927
1928 if (dump_file)
1929 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1930
1931 /* Remove all single-valued PHI nodes from block B of the form
1932 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1933 gsi = gsi_last_bb (a);
1934 for (psi = gsi_start_phis (b); !gsi_end_p (psi); )
1935 {
1936 gimple phi = gsi_stmt (psi);
1937 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
1938 gimple copy;
1939 bool may_replace_uses = (virtual_operand_p (def)
1940 || may_propagate_copy (def, use));
1941
1942 /* In case we maintain loop closed ssa form, do not propagate arguments
1943 of loop exit phi nodes. */
1944 if (current_loops
1945 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
1946 && !virtual_operand_p (def)
1947 && TREE_CODE (use) == SSA_NAME
1948 && a->loop_father != b->loop_father)
1949 may_replace_uses = false;
1950
1951 if (!may_replace_uses)
1952 {
1953 gcc_assert (!virtual_operand_p (def));
1954
1955 /* Note that just emitting the copies is fine -- there is no problem
1956 with ordering of phi nodes. This is because A is the single
1957 predecessor of B, therefore results of the phi nodes cannot
1958 appear as arguments of the phi nodes. */
1959 copy = gimple_build_assign (def, use);
1960 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
1961 remove_phi_node (&psi, false);
1962 }
1963 else
1964 {
1965 /* If we deal with a PHI for virtual operands, we can simply
1966 propagate these without fussing with folding or updating
1967 the stmt. */
1968 if (virtual_operand_p (def))
1969 {
1970 imm_use_iterator iter;
1971 use_operand_p use_p;
1972 gimple stmt;
1973
1974 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
1975 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1976 SET_USE (use_p, use);
1977
1978 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
1979 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
1980 }
1981 else
1982 replace_uses_by (def, use);
1983
1984 remove_phi_node (&psi, true);
1985 }
1986 }
1987
1988 /* Ensure that B follows A. */
1989 move_block_after (b, a);
1990
1991 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1992 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1993
1994 /* Remove labels from B and set gimple_bb to A for other statements. */
1995 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
1996 {
1997 gimple stmt = gsi_stmt (gsi);
1998 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
1999 {
2000 tree label = gimple_label_label (label_stmt);
2001 int lp_nr;
2002
2003 gsi_remove (&gsi, false);
2004
2005 /* Now that we can thread computed gotos, we might have
2006 a situation where we have a forced label in block B
2007 However, the label at the start of block B might still be
2008 used in other ways (think about the runtime checking for
2009 Fortran assigned gotos). So we can not just delete the
2010 label. Instead we move the label to the start of block A. */
2011 if (FORCED_LABEL (label))
2012 {
2013 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
2014 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
2015 }
2016 /* Other user labels keep around in a form of a debug stmt. */
2017 else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_STMTS)
2018 {
2019 gimple dbg = gimple_build_debug_bind (label,
2020 integer_zero_node,
2021 stmt);
2022 gimple_debug_bind_reset_value (dbg);
2023 gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
2024 }
2025
2026 lp_nr = EH_LANDING_PAD_NR (label);
2027 if (lp_nr)
2028 {
2029 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
2030 lp->post_landing_pad = NULL;
2031 }
2032 }
2033 else
2034 {
2035 gimple_set_bb (stmt, a);
2036 gsi_next (&gsi);
2037 }
2038 }
2039
2040 /* When merging two BBs, if their counts are different, the larger count
2041 is selected as the new bb count. This is to handle inconsistent
2042 profiles. */
2043 if (a->loop_father == b->loop_father)
2044 {
2045 a->count = MAX (a->count, b->count);
2046 a->frequency = MAX (a->frequency, b->frequency);
2047 }
2048
2049 /* Merge the sequences. */
2050 last = gsi_last_bb (a);
2051 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
2052 set_bb_seq (b, NULL);
2053
2054 if (cfgcleanup_altered_bbs)
2055 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
2056 }
2057
2058
2059 /* Return the one of two successors of BB that is not reachable by a
2060 complex edge, if there is one. Else, return BB. We use
2061 this in optimizations that use post-dominators for their heuristics,
2062 to catch the cases in C++ where function calls are involved. */
2063
2064 basic_block
2065 single_noncomplex_succ (basic_block bb)
2066 {
2067 edge e0, e1;
2068 if (EDGE_COUNT (bb->succs) != 2)
2069 return bb;
2070
2071 e0 = EDGE_SUCC (bb, 0);
2072 e1 = EDGE_SUCC (bb, 1);
2073 if (e0->flags & EDGE_COMPLEX)
2074 return e1->dest;
2075 if (e1->flags & EDGE_COMPLEX)
2076 return e0->dest;
2077
2078 return bb;
2079 }
2080
2081 /* T is CALL_EXPR. Set current_function_calls_* flags. */
2082
2083 void
2084 notice_special_calls (gcall *call)
2085 {
2086 int flags = gimple_call_flags (call);
2087
2088 if (flags & ECF_MAY_BE_ALLOCA)
2089 cfun->calls_alloca = true;
2090 if (flags & ECF_RETURNS_TWICE)
2091 cfun->calls_setjmp = true;
2092 }
2093
2094
2095 /* Clear flags set by notice_special_calls. Used by dead code removal
2096 to update the flags. */
2097
2098 void
2099 clear_special_calls (void)
2100 {
2101 cfun->calls_alloca = false;
2102 cfun->calls_setjmp = false;
2103 }
2104
2105 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
2106
2107 static void
2108 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
2109 {
2110 /* Since this block is no longer reachable, we can just delete all
2111 of its PHI nodes. */
2112 remove_phi_nodes (bb);
2113
2114 /* Remove edges to BB's successors. */
2115 while (EDGE_COUNT (bb->succs) > 0)
2116 remove_edge (EDGE_SUCC (bb, 0));
2117 }
2118
2119
2120 /* Remove statements of basic block BB. */
2121
2122 static void
2123 remove_bb (basic_block bb)
2124 {
2125 gimple_stmt_iterator i;
2126
2127 if (dump_file)
2128 {
2129 fprintf (dump_file, "Removing basic block %d\n", bb->index);
2130 if (dump_flags & TDF_DETAILS)
2131 {
2132 dump_bb (dump_file, bb, 0, TDF_BLOCKS);
2133 fprintf (dump_file, "\n");
2134 }
2135 }
2136
2137 if (current_loops)
2138 {
2139 struct loop *loop = bb->loop_father;
2140
2141 /* If a loop gets removed, clean up the information associated
2142 with it. */
2143 if (loop->latch == bb
2144 || loop->header == bb)
2145 free_numbers_of_iterations_estimates_loop (loop);
2146 }
2147
2148 /* Remove all the instructions in the block. */
2149 if (bb_seq (bb) != NULL)
2150 {
2151 /* Walk backwards so as to get a chance to substitute all
2152 released DEFs into debug stmts. See
2153 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
2154 details. */
2155 for (i = gsi_last_bb (bb); !gsi_end_p (i);)
2156 {
2157 gimple stmt = gsi_stmt (i);
2158 glabel *label_stmt = dyn_cast <glabel *> (stmt);
2159 if (label_stmt
2160 && (FORCED_LABEL (gimple_label_label (label_stmt))
2161 || DECL_NONLOCAL (gimple_label_label (label_stmt))))
2162 {
2163 basic_block new_bb;
2164 gimple_stmt_iterator new_gsi;
2165
2166 /* A non-reachable non-local label may still be referenced.
2167 But it no longer needs to carry the extra semantics of
2168 non-locality. */
2169 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
2170 {
2171 DECL_NONLOCAL (gimple_label_label (label_stmt)) = 0;
2172 FORCED_LABEL (gimple_label_label (label_stmt)) = 1;
2173 }
2174
2175 new_bb = bb->prev_bb;
2176 new_gsi = gsi_start_bb (new_bb);
2177 gsi_remove (&i, false);
2178 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
2179 }
2180 else
2181 {
2182 /* Release SSA definitions if we are in SSA. Note that we
2183 may be called when not in SSA. For example,
2184 final_cleanup calls this function via
2185 cleanup_tree_cfg. */
2186 if (gimple_in_ssa_p (cfun))
2187 release_defs (stmt);
2188
2189 gsi_remove (&i, true);
2190 }
2191
2192 if (gsi_end_p (i))
2193 i = gsi_last_bb (bb);
2194 else
2195 gsi_prev (&i);
2196 }
2197 }
2198
2199 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2200 bb->il.gimple.seq = NULL;
2201 bb->il.gimple.phi_nodes = NULL;
2202 }
2203
2204
2205 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2206 predicate VAL, return the edge that will be taken out of the block.
2207 If VAL does not match a unique edge, NULL is returned. */
2208
2209 edge
2210 find_taken_edge (basic_block bb, tree val)
2211 {
2212 gimple stmt;
2213
2214 stmt = last_stmt (bb);
2215
2216 gcc_assert (stmt);
2217 gcc_assert (is_ctrl_stmt (stmt));
2218
2219 if (val == NULL)
2220 return NULL;
2221
2222 if (!is_gimple_min_invariant (val))
2223 return NULL;
2224
2225 if (gimple_code (stmt) == GIMPLE_COND)
2226 return find_taken_edge_cond_expr (bb, val);
2227
2228 if (gimple_code (stmt) == GIMPLE_SWITCH)
2229 return find_taken_edge_switch_expr (as_a <gswitch *> (stmt), bb, val);
2230
2231 if (computed_goto_p (stmt))
2232 {
2233 /* Only optimize if the argument is a label, if the argument is
2234 not a label then we can not construct a proper CFG.
2235
2236 It may be the case that we only need to allow the LABEL_REF to
2237 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2238 appear inside a LABEL_EXPR just to be safe. */
2239 if ((TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
2240 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
2241 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
2242 return NULL;
2243 }
2244
2245 gcc_unreachable ();
2246 }
2247
2248 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2249 statement, determine which of the outgoing edges will be taken out of the
2250 block. Return NULL if either edge may be taken. */
2251
2252 static edge
2253 find_taken_edge_computed_goto (basic_block bb, tree val)
2254 {
2255 basic_block dest;
2256 edge e = NULL;
2257
2258 dest = label_to_block (val);
2259 if (dest)
2260 {
2261 e = find_edge (bb, dest);
2262 gcc_assert (e != NULL);
2263 }
2264
2265 return e;
2266 }
2267
2268 /* Given a constant value VAL and the entry block BB to a COND_EXPR
2269 statement, determine which of the two edges will be taken out of the
2270 block. Return NULL if either edge may be taken. */
2271
2272 static edge
2273 find_taken_edge_cond_expr (basic_block bb, tree val)
2274 {
2275 edge true_edge, false_edge;
2276
2277 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2278
2279 gcc_assert (TREE_CODE (val) == INTEGER_CST);
2280 return (integer_zerop (val) ? false_edge : true_edge);
2281 }
2282
2283 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2284 statement, determine which edge will be taken out of the block. Return
2285 NULL if any edge may be taken. */
2286
2287 static edge
2288 find_taken_edge_switch_expr (gswitch *switch_stmt, basic_block bb,
2289 tree val)
2290 {
2291 basic_block dest_bb;
2292 edge e;
2293 tree taken_case;
2294
2295 taken_case = find_case_label_for_value (switch_stmt, val);
2296 dest_bb = label_to_block (CASE_LABEL (taken_case));
2297
2298 e = find_edge (bb, dest_bb);
2299 gcc_assert (e);
2300 return e;
2301 }
2302
2303
2304 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2305 We can make optimal use here of the fact that the case labels are
2306 sorted: We can do a binary search for a case matching VAL. */
2307
2308 static tree
2309 find_case_label_for_value (gswitch *switch_stmt, tree val)
2310 {
2311 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2312 tree default_case = gimple_switch_default_label (switch_stmt);
2313
2314 for (low = 0, high = n; high - low > 1; )
2315 {
2316 size_t i = (high + low) / 2;
2317 tree t = gimple_switch_label (switch_stmt, i);
2318 int cmp;
2319
2320 /* Cache the result of comparing CASE_LOW and val. */
2321 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2322
2323 if (cmp > 0)
2324 high = i;
2325 else
2326 low = i;
2327
2328 if (CASE_HIGH (t) == NULL)
2329 {
2330 /* A singe-valued case label. */
2331 if (cmp == 0)
2332 return t;
2333 }
2334 else
2335 {
2336 /* A case range. We can only handle integer ranges. */
2337 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2338 return t;
2339 }
2340 }
2341
2342 return default_case;
2343 }
2344
2345
2346 /* Dump a basic block on stderr. */
2347
2348 void
2349 gimple_debug_bb (basic_block bb)
2350 {
2351 dump_bb (stderr, bb, 0, TDF_VOPS|TDF_MEMSYMS|TDF_BLOCKS);
2352 }
2353
2354
2355 /* Dump basic block with index N on stderr. */
2356
2357 basic_block
2358 gimple_debug_bb_n (int n)
2359 {
2360 gimple_debug_bb (BASIC_BLOCK_FOR_FN (cfun, n));
2361 return BASIC_BLOCK_FOR_FN (cfun, n);
2362 }
2363
2364
2365 /* Dump the CFG on stderr.
2366
2367 FLAGS are the same used by the tree dumping functions
2368 (see TDF_* in dumpfile.h). */
2369
2370 void
2371 gimple_debug_cfg (int flags)
2372 {
2373 gimple_dump_cfg (stderr, flags);
2374 }
2375
2376
2377 /* Dump the program showing basic block boundaries on the given FILE.
2378
2379 FLAGS are the same used by the tree dumping functions (see TDF_* in
2380 tree.h). */
2381
2382 void
2383 gimple_dump_cfg (FILE *file, int flags)
2384 {
2385 if (flags & TDF_DETAILS)
2386 {
2387 dump_function_header (file, current_function_decl, flags);
2388 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2389 n_basic_blocks_for_fn (cfun), n_edges_for_fn (cfun),
2390 last_basic_block_for_fn (cfun));
2391
2392 brief_dump_cfg (file, flags | TDF_COMMENT);
2393 fprintf (file, "\n");
2394 }
2395
2396 if (flags & TDF_STATS)
2397 dump_cfg_stats (file);
2398
2399 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2400 }
2401
2402
2403 /* Dump CFG statistics on FILE. */
2404
2405 void
2406 dump_cfg_stats (FILE *file)
2407 {
2408 static long max_num_merged_labels = 0;
2409 unsigned long size, total = 0;
2410 long num_edges;
2411 basic_block bb;
2412 const char * const fmt_str = "%-30s%-13s%12s\n";
2413 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2414 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2415 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2416 const char *funcname = current_function_name ();
2417
2418 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2419
2420 fprintf (file, "---------------------------------------------------------\n");
2421 fprintf (file, fmt_str, "", " Number of ", "Memory");
2422 fprintf (file, fmt_str, "", " instances ", "used ");
2423 fprintf (file, "---------------------------------------------------------\n");
2424
2425 size = n_basic_blocks_for_fn (cfun) * sizeof (struct basic_block_def);
2426 total += size;
2427 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks_for_fn (cfun),
2428 SCALE (size), LABEL (size));
2429
2430 num_edges = 0;
2431 FOR_EACH_BB_FN (bb, cfun)
2432 num_edges += EDGE_COUNT (bb->succs);
2433 size = num_edges * sizeof (struct edge_def);
2434 total += size;
2435 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2436
2437 fprintf (file, "---------------------------------------------------------\n");
2438 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2439 LABEL (total));
2440 fprintf (file, "---------------------------------------------------------\n");
2441 fprintf (file, "\n");
2442
2443 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2444 max_num_merged_labels = cfg_stats.num_merged_labels;
2445
2446 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2447 cfg_stats.num_merged_labels, max_num_merged_labels);
2448
2449 fprintf (file, "\n");
2450 }
2451
2452
2453 /* Dump CFG statistics on stderr. Keep extern so that it's always
2454 linked in the final executable. */
2455
2456 DEBUG_FUNCTION void
2457 debug_cfg_stats (void)
2458 {
2459 dump_cfg_stats (stderr);
2460 }
2461
2462 /*---------------------------------------------------------------------------
2463 Miscellaneous helpers
2464 ---------------------------------------------------------------------------*/
2465
2466 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2467 flow. Transfers of control flow associated with EH are excluded. */
2468
2469 static bool
2470 call_can_make_abnormal_goto (gimple t)
2471 {
2472 /* If the function has no non-local labels, then a call cannot make an
2473 abnormal transfer of control. */
2474 if (!cfun->has_nonlocal_label
2475 && !cfun->calls_setjmp)
2476 return false;
2477
2478 /* Likewise if the call has no side effects. */
2479 if (!gimple_has_side_effects (t))
2480 return false;
2481
2482 /* Likewise if the called function is leaf. */
2483 if (gimple_call_flags (t) & ECF_LEAF)
2484 return false;
2485
2486 return true;
2487 }
2488
2489
2490 /* Return true if T can make an abnormal transfer of control flow.
2491 Transfers of control flow associated with EH are excluded. */
2492
2493 bool
2494 stmt_can_make_abnormal_goto (gimple t)
2495 {
2496 if (computed_goto_p (t))
2497 return true;
2498 if (is_gimple_call (t))
2499 return call_can_make_abnormal_goto (t);
2500 return false;
2501 }
2502
2503
2504 /* Return true if T represents a stmt that always transfers control. */
2505
2506 bool
2507 is_ctrl_stmt (gimple t)
2508 {
2509 switch (gimple_code (t))
2510 {
2511 case GIMPLE_COND:
2512 case GIMPLE_SWITCH:
2513 case GIMPLE_GOTO:
2514 case GIMPLE_RETURN:
2515 case GIMPLE_RESX:
2516 return true;
2517 default:
2518 return false;
2519 }
2520 }
2521
2522
2523 /* Return true if T is a statement that may alter the flow of control
2524 (e.g., a call to a non-returning function). */
2525
2526 bool
2527 is_ctrl_altering_stmt (gimple t)
2528 {
2529 gcc_assert (t);
2530
2531 switch (gimple_code (t))
2532 {
2533 case GIMPLE_CALL:
2534 /* Per stmt call flag indicates whether the call could alter
2535 controlflow. */
2536 if (gimple_call_ctrl_altering_p (t))
2537 return true;
2538 break;
2539
2540 case GIMPLE_EH_DISPATCH:
2541 /* EH_DISPATCH branches to the individual catch handlers at
2542 this level of a try or allowed-exceptions region. It can
2543 fallthru to the next statement as well. */
2544 return true;
2545
2546 case GIMPLE_ASM:
2547 if (gimple_asm_nlabels (as_a <gasm *> (t)) > 0)
2548 return true;
2549 break;
2550
2551 CASE_GIMPLE_OMP:
2552 /* OpenMP directives alter control flow. */
2553 return true;
2554
2555 case GIMPLE_TRANSACTION:
2556 /* A transaction start alters control flow. */
2557 return true;
2558
2559 default:
2560 break;
2561 }
2562
2563 /* If a statement can throw, it alters control flow. */
2564 return stmt_can_throw_internal (t);
2565 }
2566
2567
2568 /* Return true if T is a simple local goto. */
2569
2570 bool
2571 simple_goto_p (gimple t)
2572 {
2573 return (gimple_code (t) == GIMPLE_GOTO
2574 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2575 }
2576
2577
2578 /* Return true if STMT should start a new basic block. PREV_STMT is
2579 the statement preceding STMT. It is used when STMT is a label or a
2580 case label. Labels should only start a new basic block if their
2581 previous statement wasn't a label. Otherwise, sequence of labels
2582 would generate unnecessary basic blocks that only contain a single
2583 label. */
2584
2585 static inline bool
2586 stmt_starts_bb_p (gimple stmt, gimple prev_stmt)
2587 {
2588 if (stmt == NULL)
2589 return false;
2590
2591 /* Labels start a new basic block only if the preceding statement
2592 wasn't a label of the same type. This prevents the creation of
2593 consecutive blocks that have nothing but a single label. */
2594 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2595 {
2596 /* Nonlocal and computed GOTO targets always start a new block. */
2597 if (DECL_NONLOCAL (gimple_label_label (label_stmt))
2598 || FORCED_LABEL (gimple_label_label (label_stmt)))
2599 return true;
2600
2601 if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
2602 {
2603 if (DECL_NONLOCAL (gimple_label_label (
2604 as_a <glabel *> (prev_stmt))))
2605 return true;
2606
2607 cfg_stats.num_merged_labels++;
2608 return false;
2609 }
2610 else
2611 return true;
2612 }
2613 else if (gimple_code (stmt) == GIMPLE_CALL
2614 && gimple_call_flags (stmt) & ECF_RETURNS_TWICE)
2615 /* setjmp acts similar to a nonlocal GOTO target and thus should
2616 start a new block. */
2617 return true;
2618
2619 return false;
2620 }
2621
2622
2623 /* Return true if T should end a basic block. */
2624
2625 bool
2626 stmt_ends_bb_p (gimple t)
2627 {
2628 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2629 }
2630
2631 /* Remove block annotations and other data structures. */
2632
2633 void
2634 delete_tree_cfg_annotations (void)
2635 {
2636 vec_free (label_to_block_map_for_fn (cfun));
2637 }
2638
2639
2640 /* Return the first statement in basic block BB. */
2641
2642 gimple
2643 first_stmt (basic_block bb)
2644 {
2645 gimple_stmt_iterator i = gsi_start_bb (bb);
2646 gimple stmt = NULL;
2647
2648 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2649 {
2650 gsi_next (&i);
2651 stmt = NULL;
2652 }
2653 return stmt;
2654 }
2655
2656 /* Return the first non-label statement in basic block BB. */
2657
2658 static gimple
2659 first_non_label_stmt (basic_block bb)
2660 {
2661 gimple_stmt_iterator i = gsi_start_bb (bb);
2662 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2663 gsi_next (&i);
2664 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2665 }
2666
2667 /* Return the last statement in basic block BB. */
2668
2669 gimple
2670 last_stmt (basic_block bb)
2671 {
2672 gimple_stmt_iterator i = gsi_last_bb (bb);
2673 gimple stmt = NULL;
2674
2675 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2676 {
2677 gsi_prev (&i);
2678 stmt = NULL;
2679 }
2680 return stmt;
2681 }
2682
2683 /* Return the last statement of an otherwise empty block. Return NULL
2684 if the block is totally empty, or if it contains more than one
2685 statement. */
2686
2687 gimple
2688 last_and_only_stmt (basic_block bb)
2689 {
2690 gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2691 gimple last, prev;
2692
2693 if (gsi_end_p (i))
2694 return NULL;
2695
2696 last = gsi_stmt (i);
2697 gsi_prev_nondebug (&i);
2698 if (gsi_end_p (i))
2699 return last;
2700
2701 /* Empty statements should no longer appear in the instruction stream.
2702 Everything that might have appeared before should be deleted by
2703 remove_useless_stmts, and the optimizers should just gsi_remove
2704 instead of smashing with build_empty_stmt.
2705
2706 Thus the only thing that should appear here in a block containing
2707 one executable statement is a label. */
2708 prev = gsi_stmt (i);
2709 if (gimple_code (prev) == GIMPLE_LABEL)
2710 return last;
2711 else
2712 return NULL;
2713 }
2714
2715 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2716
2717 static void
2718 reinstall_phi_args (edge new_edge, edge old_edge)
2719 {
2720 edge_var_map *vm;
2721 int i;
2722 gphi_iterator phis;
2723
2724 vec<edge_var_map> *v = redirect_edge_var_map_vector (old_edge);
2725 if (!v)
2726 return;
2727
2728 for (i = 0, phis = gsi_start_phis (new_edge->dest);
2729 v->iterate (i, &vm) && !gsi_end_p (phis);
2730 i++, gsi_next (&phis))
2731 {
2732 gphi *phi = phis.phi ();
2733 tree result = redirect_edge_var_map_result (vm);
2734 tree arg = redirect_edge_var_map_def (vm);
2735
2736 gcc_assert (result == gimple_phi_result (phi));
2737
2738 add_phi_arg (phi, arg, new_edge, redirect_edge_var_map_location (vm));
2739 }
2740
2741 redirect_edge_var_map_clear (old_edge);
2742 }
2743
2744 /* Returns the basic block after which the new basic block created
2745 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2746 near its "logical" location. This is of most help to humans looking
2747 at debugging dumps. */
2748
2749 basic_block
2750 split_edge_bb_loc (edge edge_in)
2751 {
2752 basic_block dest = edge_in->dest;
2753 basic_block dest_prev = dest->prev_bb;
2754
2755 if (dest_prev)
2756 {
2757 edge e = find_edge (dest_prev, dest);
2758 if (e && !(e->flags & EDGE_COMPLEX))
2759 return edge_in->src;
2760 }
2761 return dest_prev;
2762 }
2763
2764 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2765 Abort on abnormal edges. */
2766
2767 static basic_block
2768 gimple_split_edge (edge edge_in)
2769 {
2770 basic_block new_bb, after_bb, dest;
2771 edge new_edge, e;
2772
2773 /* Abnormal edges cannot be split. */
2774 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2775
2776 dest = edge_in->dest;
2777
2778 after_bb = split_edge_bb_loc (edge_in);
2779
2780 new_bb = create_empty_bb (after_bb);
2781 new_bb->frequency = EDGE_FREQUENCY (edge_in);
2782 new_bb->count = edge_in->count;
2783 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
2784 new_edge->probability = REG_BR_PROB_BASE;
2785 new_edge->count = edge_in->count;
2786
2787 e = redirect_edge_and_branch (edge_in, new_bb);
2788 gcc_assert (e == edge_in);
2789 reinstall_phi_args (new_edge, e);
2790
2791 return new_bb;
2792 }
2793
2794
2795 /* Verify properties of the address expression T with base object BASE. */
2796
2797 static tree
2798 verify_address (tree t, tree base)
2799 {
2800 bool old_constant;
2801 bool old_side_effects;
2802 bool new_constant;
2803 bool new_side_effects;
2804
2805 old_constant = TREE_CONSTANT (t);
2806 old_side_effects = TREE_SIDE_EFFECTS (t);
2807
2808 recompute_tree_invariant_for_addr_expr (t);
2809 new_side_effects = TREE_SIDE_EFFECTS (t);
2810 new_constant = TREE_CONSTANT (t);
2811
2812 if (old_constant != new_constant)
2813 {
2814 error ("constant not recomputed when ADDR_EXPR changed");
2815 return t;
2816 }
2817 if (old_side_effects != new_side_effects)
2818 {
2819 error ("side effects not recomputed when ADDR_EXPR changed");
2820 return t;
2821 }
2822
2823 if (!(TREE_CODE (base) == VAR_DECL
2824 || TREE_CODE (base) == PARM_DECL
2825 || TREE_CODE (base) == RESULT_DECL))
2826 return NULL_TREE;
2827
2828 if (DECL_GIMPLE_REG_P (base))
2829 {
2830 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2831 return base;
2832 }
2833
2834 return NULL_TREE;
2835 }
2836
2837 /* Callback for walk_tree, check that all elements with address taken are
2838 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2839 inside a PHI node. */
2840
2841 static tree
2842 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2843 {
2844 tree t = *tp, x;
2845
2846 if (TYPE_P (t))
2847 *walk_subtrees = 0;
2848
2849 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2850 #define CHECK_OP(N, MSG) \
2851 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2852 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2853
2854 switch (TREE_CODE (t))
2855 {
2856 case SSA_NAME:
2857 if (SSA_NAME_IN_FREE_LIST (t))
2858 {
2859 error ("SSA name in freelist but still referenced");
2860 return *tp;
2861 }
2862 break;
2863
2864 case INDIRECT_REF:
2865 error ("INDIRECT_REF in gimple IL");
2866 return t;
2867
2868 case MEM_REF:
2869 x = TREE_OPERAND (t, 0);
2870 if (!POINTER_TYPE_P (TREE_TYPE (x))
2871 || !is_gimple_mem_ref_addr (x))
2872 {
2873 error ("invalid first operand of MEM_REF");
2874 return x;
2875 }
2876 if (TREE_CODE (TREE_OPERAND (t, 1)) != INTEGER_CST
2877 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 1))))
2878 {
2879 error ("invalid offset operand of MEM_REF");
2880 return TREE_OPERAND (t, 1);
2881 }
2882 if (TREE_CODE (x) == ADDR_EXPR
2883 && (x = verify_address (x, TREE_OPERAND (x, 0))))
2884 return x;
2885 *walk_subtrees = 0;
2886 break;
2887
2888 case ASSERT_EXPR:
2889 x = fold (ASSERT_EXPR_COND (t));
2890 if (x == boolean_false_node)
2891 {
2892 error ("ASSERT_EXPR with an always-false condition");
2893 return *tp;
2894 }
2895 break;
2896
2897 case MODIFY_EXPR:
2898 error ("MODIFY_EXPR not expected while having tuples");
2899 return *tp;
2900
2901 case ADDR_EXPR:
2902 {
2903 tree tem;
2904
2905 gcc_assert (is_gimple_address (t));
2906
2907 /* Skip any references (they will be checked when we recurse down the
2908 tree) and ensure that any variable used as a prefix is marked
2909 addressable. */
2910 for (x = TREE_OPERAND (t, 0);
2911 handled_component_p (x);
2912 x = TREE_OPERAND (x, 0))
2913 ;
2914
2915 if ((tem = verify_address (t, x)))
2916 return tem;
2917
2918 if (!(TREE_CODE (x) == VAR_DECL
2919 || TREE_CODE (x) == PARM_DECL
2920 || TREE_CODE (x) == RESULT_DECL))
2921 return NULL;
2922
2923 if (!TREE_ADDRESSABLE (x))
2924 {
2925 error ("address taken, but ADDRESSABLE bit not set");
2926 return x;
2927 }
2928
2929 break;
2930 }
2931
2932 case COND_EXPR:
2933 x = COND_EXPR_COND (t);
2934 if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
2935 {
2936 error ("non-integral used in condition");
2937 return x;
2938 }
2939 if (!is_gimple_condexpr (x))
2940 {
2941 error ("invalid conditional operand");
2942 return x;
2943 }
2944 break;
2945
2946 case NON_LVALUE_EXPR:
2947 case TRUTH_NOT_EXPR:
2948 gcc_unreachable ();
2949
2950 CASE_CONVERT:
2951 case FIX_TRUNC_EXPR:
2952 case FLOAT_EXPR:
2953 case NEGATE_EXPR:
2954 case ABS_EXPR:
2955 case BIT_NOT_EXPR:
2956 CHECK_OP (0, "invalid operand to unary operator");
2957 break;
2958
2959 case REALPART_EXPR:
2960 case IMAGPART_EXPR:
2961 case BIT_FIELD_REF:
2962 if (!is_gimple_reg_type (TREE_TYPE (t)))
2963 {
2964 error ("non-scalar BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR");
2965 return t;
2966 }
2967
2968 if (TREE_CODE (t) == BIT_FIELD_REF)
2969 {
2970 tree t0 = TREE_OPERAND (t, 0);
2971 tree t1 = TREE_OPERAND (t, 1);
2972 tree t2 = TREE_OPERAND (t, 2);
2973 if (!tree_fits_uhwi_p (t1)
2974 || !tree_fits_uhwi_p (t2))
2975 {
2976 error ("invalid position or size operand to BIT_FIELD_REF");
2977 return t;
2978 }
2979 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
2980 && (TYPE_PRECISION (TREE_TYPE (t))
2981 != tree_to_uhwi (t1)))
2982 {
2983 error ("integral result type precision does not match "
2984 "field size of BIT_FIELD_REF");
2985 return t;
2986 }
2987 else if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
2988 && TYPE_MODE (TREE_TYPE (t)) != BLKmode
2989 && (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (t)))
2990 != tree_to_uhwi (t1)))
2991 {
2992 error ("mode precision of non-integral result does not "
2993 "match field size of BIT_FIELD_REF");
2994 return t;
2995 }
2996 if (!AGGREGATE_TYPE_P (TREE_TYPE (t0))
2997 && (tree_to_uhwi (t1) + tree_to_uhwi (t2)
2998 > tree_to_uhwi (TYPE_SIZE (TREE_TYPE (t0)))))
2999 {
3000 error ("position plus size exceeds size of referenced object in "
3001 "BIT_FIELD_REF");
3002 return t;
3003 }
3004 }
3005 t = TREE_OPERAND (t, 0);
3006
3007 /* Fall-through. */
3008 case COMPONENT_REF:
3009 case ARRAY_REF:
3010 case ARRAY_RANGE_REF:
3011 case VIEW_CONVERT_EXPR:
3012 /* We have a nest of references. Verify that each of the operands
3013 that determine where to reference is either a constant or a variable,
3014 verify that the base is valid, and then show we've already checked
3015 the subtrees. */
3016 while (handled_component_p (t))
3017 {
3018 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
3019 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
3020 else if (TREE_CODE (t) == ARRAY_REF
3021 || TREE_CODE (t) == ARRAY_RANGE_REF)
3022 {
3023 CHECK_OP (1, "invalid array index");
3024 if (TREE_OPERAND (t, 2))
3025 CHECK_OP (2, "invalid array lower bound");
3026 if (TREE_OPERAND (t, 3))
3027 CHECK_OP (3, "invalid array stride");
3028 }
3029 else if (TREE_CODE (t) == BIT_FIELD_REF
3030 || TREE_CODE (t) == REALPART_EXPR
3031 || TREE_CODE (t) == IMAGPART_EXPR)
3032 {
3033 error ("non-top-level BIT_FIELD_REF, IMAGPART_EXPR or "
3034 "REALPART_EXPR");
3035 return t;
3036 }
3037
3038 t = TREE_OPERAND (t, 0);
3039 }
3040
3041 if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
3042 {
3043 error ("invalid reference prefix");
3044 return t;
3045 }
3046 *walk_subtrees = 0;
3047 break;
3048 case PLUS_EXPR:
3049 case MINUS_EXPR:
3050 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
3051 POINTER_PLUS_EXPR. */
3052 if (POINTER_TYPE_P (TREE_TYPE (t)))
3053 {
3054 error ("invalid operand to plus/minus, type is a pointer");
3055 return t;
3056 }
3057 CHECK_OP (0, "invalid operand to binary operator");
3058 CHECK_OP (1, "invalid operand to binary operator");
3059 break;
3060
3061 case POINTER_PLUS_EXPR:
3062 /* Check to make sure the first operand is a pointer or reference type. */
3063 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
3064 {
3065 error ("invalid operand to pointer plus, first operand is not a pointer");
3066 return t;
3067 }
3068 /* Check to make sure the second operand is a ptrofftype. */
3069 if (!ptrofftype_p (TREE_TYPE (TREE_OPERAND (t, 1))))
3070 {
3071 error ("invalid operand to pointer plus, second operand is not an "
3072 "integer type of appropriate width");
3073 return t;
3074 }
3075 /* FALLTHROUGH */
3076 case LT_EXPR:
3077 case LE_EXPR:
3078 case GT_EXPR:
3079 case GE_EXPR:
3080 case EQ_EXPR:
3081 case NE_EXPR:
3082 case UNORDERED_EXPR:
3083 case ORDERED_EXPR:
3084 case UNLT_EXPR:
3085 case UNLE_EXPR:
3086 case UNGT_EXPR:
3087 case UNGE_EXPR:
3088 case UNEQ_EXPR:
3089 case LTGT_EXPR:
3090 case MULT_EXPR:
3091 case TRUNC_DIV_EXPR:
3092 case CEIL_DIV_EXPR:
3093 case FLOOR_DIV_EXPR:
3094 case ROUND_DIV_EXPR:
3095 case TRUNC_MOD_EXPR:
3096 case CEIL_MOD_EXPR:
3097 case FLOOR_MOD_EXPR:
3098 case ROUND_MOD_EXPR:
3099 case RDIV_EXPR:
3100 case EXACT_DIV_EXPR:
3101 case MIN_EXPR:
3102 case MAX_EXPR:
3103 case LSHIFT_EXPR:
3104 case RSHIFT_EXPR:
3105 case LROTATE_EXPR:
3106 case RROTATE_EXPR:
3107 case BIT_IOR_EXPR:
3108 case BIT_XOR_EXPR:
3109 case BIT_AND_EXPR:
3110 CHECK_OP (0, "invalid operand to binary operator");
3111 CHECK_OP (1, "invalid operand to binary operator");
3112 break;
3113
3114 case CONSTRUCTOR:
3115 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
3116 *walk_subtrees = 0;
3117 break;
3118
3119 case CASE_LABEL_EXPR:
3120 if (CASE_CHAIN (t))
3121 {
3122 error ("invalid CASE_CHAIN");
3123 return t;
3124 }
3125 break;
3126
3127 default:
3128 break;
3129 }
3130 return NULL;
3131
3132 #undef CHECK_OP
3133 }
3134
3135
3136 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
3137 Returns true if there is an error, otherwise false. */
3138
3139 static bool
3140 verify_types_in_gimple_min_lval (tree expr)
3141 {
3142 tree op;
3143
3144 if (is_gimple_id (expr))
3145 return false;
3146
3147 if (TREE_CODE (expr) != TARGET_MEM_REF
3148 && TREE_CODE (expr) != MEM_REF)
3149 {
3150 error ("invalid expression for min lvalue");
3151 return true;
3152 }
3153
3154 /* TARGET_MEM_REFs are strange beasts. */
3155 if (TREE_CODE (expr) == TARGET_MEM_REF)
3156 return false;
3157
3158 op = TREE_OPERAND (expr, 0);
3159 if (!is_gimple_val (op))
3160 {
3161 error ("invalid operand in indirect reference");
3162 debug_generic_stmt (op);
3163 return true;
3164 }
3165 /* Memory references now generally can involve a value conversion. */
3166
3167 return false;
3168 }
3169
3170 /* Verify if EXPR is a valid GIMPLE reference expression. If
3171 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
3172 if there is an error, otherwise false. */
3173
3174 static bool
3175 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
3176 {
3177 while (handled_component_p (expr))
3178 {
3179 tree op = TREE_OPERAND (expr, 0);
3180
3181 if (TREE_CODE (expr) == ARRAY_REF
3182 || TREE_CODE (expr) == ARRAY_RANGE_REF)
3183 {
3184 if (!is_gimple_val (TREE_OPERAND (expr, 1))
3185 || (TREE_OPERAND (expr, 2)
3186 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3187 || (TREE_OPERAND (expr, 3)
3188 && !is_gimple_val (TREE_OPERAND (expr, 3))))
3189 {
3190 error ("invalid operands to array reference");
3191 debug_generic_stmt (expr);
3192 return true;
3193 }
3194 }
3195
3196 /* Verify if the reference array element types are compatible. */
3197 if (TREE_CODE (expr) == ARRAY_REF
3198 && !useless_type_conversion_p (TREE_TYPE (expr),
3199 TREE_TYPE (TREE_TYPE (op))))
3200 {
3201 error ("type mismatch in array reference");
3202 debug_generic_stmt (TREE_TYPE (expr));
3203 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3204 return true;
3205 }
3206 if (TREE_CODE (expr) == ARRAY_RANGE_REF
3207 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
3208 TREE_TYPE (TREE_TYPE (op))))
3209 {
3210 error ("type mismatch in array range reference");
3211 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
3212 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3213 return true;
3214 }
3215
3216 if ((TREE_CODE (expr) == REALPART_EXPR
3217 || TREE_CODE (expr) == IMAGPART_EXPR)
3218 && !useless_type_conversion_p (TREE_TYPE (expr),
3219 TREE_TYPE (TREE_TYPE (op))))
3220 {
3221 error ("type mismatch in real/imagpart reference");
3222 debug_generic_stmt (TREE_TYPE (expr));
3223 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3224 return true;
3225 }
3226
3227 if (TREE_CODE (expr) == COMPONENT_REF
3228 && !useless_type_conversion_p (TREE_TYPE (expr),
3229 TREE_TYPE (TREE_OPERAND (expr, 1))))
3230 {
3231 error ("type mismatch in component reference");
3232 debug_generic_stmt (TREE_TYPE (expr));
3233 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
3234 return true;
3235 }
3236
3237 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3238 {
3239 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
3240 that their operand is not an SSA name or an invariant when
3241 requiring an lvalue (this usually means there is a SRA or IPA-SRA
3242 bug). Otherwise there is nothing to verify, gross mismatches at
3243 most invoke undefined behavior. */
3244 if (require_lvalue
3245 && (TREE_CODE (op) == SSA_NAME
3246 || is_gimple_min_invariant (op)))
3247 {
3248 error ("conversion of an SSA_NAME on the left hand side");
3249 debug_generic_stmt (expr);
3250 return true;
3251 }
3252 else if (TREE_CODE (op) == SSA_NAME
3253 && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
3254 {
3255 error ("conversion of register to a different size");
3256 debug_generic_stmt (expr);
3257 return true;
3258 }
3259 else if (!handled_component_p (op))
3260 return false;
3261 }
3262
3263 expr = op;
3264 }
3265
3266 if (TREE_CODE (expr) == MEM_REF)
3267 {
3268 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0)))
3269 {
3270 error ("invalid address operand in MEM_REF");
3271 debug_generic_stmt (expr);
3272 return true;
3273 }
3274 if (TREE_CODE (TREE_OPERAND (expr, 1)) != INTEGER_CST
3275 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
3276 {
3277 error ("invalid offset operand in MEM_REF");
3278 debug_generic_stmt (expr);
3279 return true;
3280 }
3281 }
3282 else if (TREE_CODE (expr) == TARGET_MEM_REF)
3283 {
3284 if (!TMR_BASE (expr)
3285 || !is_gimple_mem_ref_addr (TMR_BASE (expr)))
3286 {
3287 error ("invalid address operand in TARGET_MEM_REF");
3288 return true;
3289 }
3290 if (!TMR_OFFSET (expr)
3291 || TREE_CODE (TMR_OFFSET (expr)) != INTEGER_CST
3292 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
3293 {
3294 error ("invalid offset operand in TARGET_MEM_REF");
3295 debug_generic_stmt (expr);
3296 return true;
3297 }
3298 }
3299
3300 return ((require_lvalue || !is_gimple_min_invariant (expr))
3301 && verify_types_in_gimple_min_lval (expr));
3302 }
3303
3304 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3305 list of pointer-to types that is trivially convertible to DEST. */
3306
3307 static bool
3308 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3309 {
3310 tree src;
3311
3312 if (!TYPE_POINTER_TO (src_obj))
3313 return true;
3314
3315 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3316 if (useless_type_conversion_p (dest, src))
3317 return true;
3318
3319 return false;
3320 }
3321
3322 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3323 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3324
3325 static bool
3326 valid_fixed_convert_types_p (tree type1, tree type2)
3327 {
3328 return (FIXED_POINT_TYPE_P (type1)
3329 && (INTEGRAL_TYPE_P (type2)
3330 || SCALAR_FLOAT_TYPE_P (type2)
3331 || FIXED_POINT_TYPE_P (type2)));
3332 }
3333
3334 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3335 is a problem, otherwise false. */
3336
3337 static bool
3338 verify_gimple_call (gcall *stmt)
3339 {
3340 tree fn = gimple_call_fn (stmt);
3341 tree fntype, fndecl;
3342 unsigned i;
3343
3344 if (gimple_call_internal_p (stmt))
3345 {
3346 if (fn)
3347 {
3348 error ("gimple call has two targets");
3349 debug_generic_stmt (fn);
3350 return true;
3351 }
3352 }
3353 else
3354 {
3355 if (!fn)
3356 {
3357 error ("gimple call has no target");
3358 return true;
3359 }
3360 }
3361
3362 if (fn && !is_gimple_call_addr (fn))
3363 {
3364 error ("invalid function in gimple call");
3365 debug_generic_stmt (fn);
3366 return true;
3367 }
3368
3369 if (fn
3370 && (!POINTER_TYPE_P (TREE_TYPE (fn))
3371 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3372 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
3373 {
3374 error ("non-function in gimple call");
3375 return true;
3376 }
3377
3378 fndecl = gimple_call_fndecl (stmt);
3379 if (fndecl
3380 && TREE_CODE (fndecl) == FUNCTION_DECL
3381 && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
3382 && !DECL_PURE_P (fndecl)
3383 && !TREE_READONLY (fndecl))
3384 {
3385 error ("invalid pure const state for function");
3386 return true;
3387 }
3388
3389 tree lhs = gimple_call_lhs (stmt);
3390 if (lhs
3391 && (!is_gimple_lvalue (lhs)
3392 || verify_types_in_gimple_reference (lhs, true)))
3393 {
3394 error ("invalid LHS in gimple call");
3395 return true;
3396 }
3397
3398 if (lhs
3399 && gimple_call_ctrl_altering_p (stmt)
3400 && gimple_call_noreturn_p (stmt)
3401 && TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (lhs))) == INTEGER_CST)
3402 {
3403 error ("LHS in noreturn call");
3404 return true;
3405 }
3406
3407 fntype = gimple_call_fntype (stmt);
3408 if (fntype
3409 && lhs
3410 && !useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (fntype))
3411 /* ??? At least C++ misses conversions at assignments from
3412 void * call results.
3413 ??? Java is completely off. Especially with functions
3414 returning java.lang.Object.
3415 For now simply allow arbitrary pointer type conversions. */
3416 && !(POINTER_TYPE_P (TREE_TYPE (lhs))
3417 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3418 {
3419 error ("invalid conversion in gimple call");
3420 debug_generic_stmt (TREE_TYPE (lhs));
3421 debug_generic_stmt (TREE_TYPE (fntype));
3422 return true;
3423 }
3424
3425 if (gimple_call_chain (stmt)
3426 && !is_gimple_val (gimple_call_chain (stmt)))
3427 {
3428 error ("invalid static chain in gimple call");
3429 debug_generic_stmt (gimple_call_chain (stmt));
3430 return true;
3431 }
3432
3433 /* If there is a static chain argument, the call should either be
3434 indirect, or the decl should have DECL_STATIC_CHAIN set. */
3435 if (gimple_call_chain (stmt)
3436 && fndecl
3437 && !DECL_STATIC_CHAIN (fndecl))
3438 {
3439 error ("static chain with function that doesn%'t use one");
3440 return true;
3441 }
3442
3443 /* ??? The C frontend passes unpromoted arguments in case it
3444 didn't see a function declaration before the call. So for now
3445 leave the call arguments mostly unverified. Once we gimplify
3446 unit-at-a-time we have a chance to fix this. */
3447
3448 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3449 {
3450 tree arg = gimple_call_arg (stmt, i);
3451 if ((is_gimple_reg_type (TREE_TYPE (arg))
3452 && !is_gimple_val (arg))
3453 || (!is_gimple_reg_type (TREE_TYPE (arg))
3454 && !is_gimple_lvalue (arg)))
3455 {
3456 error ("invalid argument to gimple call");
3457 debug_generic_expr (arg);
3458 return true;
3459 }
3460 }
3461
3462 return false;
3463 }
3464
3465 /* Verifies the gimple comparison with the result type TYPE and
3466 the operands OP0 and OP1. */
3467
3468 static bool
3469 verify_gimple_comparison (tree type, tree op0, tree op1)
3470 {
3471 tree op0_type = TREE_TYPE (op0);
3472 tree op1_type = TREE_TYPE (op1);
3473
3474 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3475 {
3476 error ("invalid operands in gimple comparison");
3477 return true;
3478 }
3479
3480 /* For comparisons we do not have the operations type as the
3481 effective type the comparison is carried out in. Instead
3482 we require that either the first operand is trivially
3483 convertible into the second, or the other way around.
3484 Because we special-case pointers to void we allow
3485 comparisons of pointers with the same mode as well. */
3486 if (!useless_type_conversion_p (op0_type, op1_type)
3487 && !useless_type_conversion_p (op1_type, op0_type)
3488 && (!POINTER_TYPE_P (op0_type)
3489 || !POINTER_TYPE_P (op1_type)
3490 || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3491 {
3492 error ("mismatching comparison operand types");
3493 debug_generic_expr (op0_type);
3494 debug_generic_expr (op1_type);
3495 return true;
3496 }
3497
3498 /* The resulting type of a comparison may be an effective boolean type. */
3499 if (INTEGRAL_TYPE_P (type)
3500 && (TREE_CODE (type) == BOOLEAN_TYPE
3501 || TYPE_PRECISION (type) == 1))
3502 {
3503 if (TREE_CODE (op0_type) == VECTOR_TYPE
3504 || TREE_CODE (op1_type) == VECTOR_TYPE)
3505 {
3506 error ("vector comparison returning a boolean");
3507 debug_generic_expr (op0_type);
3508 debug_generic_expr (op1_type);
3509 return true;
3510 }
3511 }
3512 /* Or an integer vector type with the same size and element count
3513 as the comparison operand types. */
3514 else if (TREE_CODE (type) == VECTOR_TYPE
3515 && TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE)
3516 {
3517 if (TREE_CODE (op0_type) != VECTOR_TYPE
3518 || TREE_CODE (op1_type) != VECTOR_TYPE)
3519 {
3520 error ("non-vector operands in vector comparison");
3521 debug_generic_expr (op0_type);
3522 debug_generic_expr (op1_type);
3523 return true;
3524 }
3525
3526 if (TYPE_VECTOR_SUBPARTS (type) != TYPE_VECTOR_SUBPARTS (op0_type)
3527 || (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (type)))
3528 != GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0_type))))
3529 /* The result of a vector comparison is of signed
3530 integral type. */
3531 || TYPE_UNSIGNED (TREE_TYPE (type)))
3532 {
3533 error ("invalid vector comparison resulting type");
3534 debug_generic_expr (type);
3535 return true;
3536 }
3537 }
3538 else
3539 {
3540 error ("bogus comparison result type");
3541 debug_generic_expr (type);
3542 return true;
3543 }
3544
3545 return false;
3546 }
3547
3548 /* Verify a gimple assignment statement STMT with an unary rhs.
3549 Returns true if anything is wrong. */
3550
3551 static bool
3552 verify_gimple_assign_unary (gassign *stmt)
3553 {
3554 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3555 tree lhs = gimple_assign_lhs (stmt);
3556 tree lhs_type = TREE_TYPE (lhs);
3557 tree rhs1 = gimple_assign_rhs1 (stmt);
3558 tree rhs1_type = TREE_TYPE (rhs1);
3559
3560 if (!is_gimple_reg (lhs))
3561 {
3562 error ("non-register as LHS of unary operation");
3563 return true;
3564 }
3565
3566 if (!is_gimple_val (rhs1))
3567 {
3568 error ("invalid operand in unary operation");
3569 return true;
3570 }
3571
3572 /* First handle conversions. */
3573 switch (rhs_code)
3574 {
3575 CASE_CONVERT:
3576 {
3577 /* Allow conversions from pointer type to integral type only if
3578 there is no sign or zero extension involved.
3579 For targets were the precision of ptrofftype doesn't match that
3580 of pointers we need to allow arbitrary conversions to ptrofftype. */
3581 if ((POINTER_TYPE_P (lhs_type)
3582 && INTEGRAL_TYPE_P (rhs1_type))
3583 || (POINTER_TYPE_P (rhs1_type)
3584 && INTEGRAL_TYPE_P (lhs_type)
3585 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3586 || ptrofftype_p (sizetype))))
3587 return false;
3588
3589 /* Allow conversion from integral to offset type and vice versa. */
3590 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3591 && INTEGRAL_TYPE_P (rhs1_type))
3592 || (INTEGRAL_TYPE_P (lhs_type)
3593 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3594 return false;
3595
3596 /* Otherwise assert we are converting between types of the
3597 same kind. */
3598 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3599 {
3600 error ("invalid types in nop conversion");
3601 debug_generic_expr (lhs_type);
3602 debug_generic_expr (rhs1_type);
3603 return true;
3604 }
3605
3606 return false;
3607 }
3608
3609 case ADDR_SPACE_CONVERT_EXPR:
3610 {
3611 if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3612 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3613 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3614 {
3615 error ("invalid types in address space conversion");
3616 debug_generic_expr (lhs_type);
3617 debug_generic_expr (rhs1_type);
3618 return true;
3619 }
3620
3621 return false;
3622 }
3623
3624 case FIXED_CONVERT_EXPR:
3625 {
3626 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3627 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3628 {
3629 error ("invalid types in fixed-point conversion");
3630 debug_generic_expr (lhs_type);
3631 debug_generic_expr (rhs1_type);
3632 return true;
3633 }
3634
3635 return false;
3636 }
3637
3638 case FLOAT_EXPR:
3639 {
3640 if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3641 && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3642 || !VECTOR_FLOAT_TYPE_P (lhs_type)))
3643 {
3644 error ("invalid types in conversion to floating point");
3645 debug_generic_expr (lhs_type);
3646 debug_generic_expr (rhs1_type);
3647 return true;
3648 }
3649
3650 return false;
3651 }
3652
3653 case FIX_TRUNC_EXPR:
3654 {
3655 if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3656 && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3657 || !VECTOR_FLOAT_TYPE_P (rhs1_type)))
3658 {
3659 error ("invalid types in conversion to integer");
3660 debug_generic_expr (lhs_type);
3661 debug_generic_expr (rhs1_type);
3662 return true;
3663 }
3664
3665 return false;
3666 }
3667 case REDUC_MAX_EXPR:
3668 case REDUC_MIN_EXPR:
3669 case REDUC_PLUS_EXPR:
3670 if (!VECTOR_TYPE_P (rhs1_type)
3671 || !useless_type_conversion_p (lhs_type, TREE_TYPE (rhs1_type)))
3672 {
3673 error ("reduction should convert from vector to element type");
3674 debug_generic_expr (lhs_type);
3675 debug_generic_expr (rhs1_type);
3676 return true;
3677 }
3678 return false;
3679
3680 case VEC_UNPACK_HI_EXPR:
3681 case VEC_UNPACK_LO_EXPR:
3682 case VEC_UNPACK_FLOAT_HI_EXPR:
3683 case VEC_UNPACK_FLOAT_LO_EXPR:
3684 /* FIXME. */
3685 return false;
3686
3687 case NEGATE_EXPR:
3688 case ABS_EXPR:
3689 case BIT_NOT_EXPR:
3690 case PAREN_EXPR:
3691 case CONJ_EXPR:
3692 break;
3693
3694 default:
3695 gcc_unreachable ();
3696 }
3697
3698 /* For the remaining codes assert there is no conversion involved. */
3699 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3700 {
3701 error ("non-trivial conversion in unary operation");
3702 debug_generic_expr (lhs_type);
3703 debug_generic_expr (rhs1_type);
3704 return true;
3705 }
3706
3707 return false;
3708 }
3709
3710 /* Verify a gimple assignment statement STMT with a binary rhs.
3711 Returns true if anything is wrong. */
3712
3713 static bool
3714 verify_gimple_assign_binary (gassign *stmt)
3715 {
3716 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3717 tree lhs = gimple_assign_lhs (stmt);
3718 tree lhs_type = TREE_TYPE (lhs);
3719 tree rhs1 = gimple_assign_rhs1 (stmt);
3720 tree rhs1_type = TREE_TYPE (rhs1);
3721 tree rhs2 = gimple_assign_rhs2 (stmt);
3722 tree rhs2_type = TREE_TYPE (rhs2);
3723
3724 if (!is_gimple_reg (lhs))
3725 {
3726 error ("non-register as LHS of binary operation");
3727 return true;
3728 }
3729
3730 if (!is_gimple_val (rhs1)
3731 || !is_gimple_val (rhs2))
3732 {
3733 error ("invalid operands in binary operation");
3734 return true;
3735 }
3736
3737 /* First handle operations that involve different types. */
3738 switch (rhs_code)
3739 {
3740 case COMPLEX_EXPR:
3741 {
3742 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3743 || !(INTEGRAL_TYPE_P (rhs1_type)
3744 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3745 || !(INTEGRAL_TYPE_P (rhs2_type)
3746 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3747 {
3748 error ("type mismatch in complex expression");
3749 debug_generic_expr (lhs_type);
3750 debug_generic_expr (rhs1_type);
3751 debug_generic_expr (rhs2_type);
3752 return true;
3753 }
3754
3755 return false;
3756 }
3757
3758 case LSHIFT_EXPR:
3759 case RSHIFT_EXPR:
3760 case LROTATE_EXPR:
3761 case RROTATE_EXPR:
3762 {
3763 /* Shifts and rotates are ok on integral types, fixed point
3764 types and integer vector types. */
3765 if ((!INTEGRAL_TYPE_P (rhs1_type)
3766 && !FIXED_POINT_TYPE_P (rhs1_type)
3767 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3768 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3769 || (!INTEGRAL_TYPE_P (rhs2_type)
3770 /* Vector shifts of vectors are also ok. */
3771 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3772 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3773 && TREE_CODE (rhs2_type) == VECTOR_TYPE
3774 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3775 || !useless_type_conversion_p (lhs_type, rhs1_type))
3776 {
3777 error ("type mismatch in shift expression");
3778 debug_generic_expr (lhs_type);
3779 debug_generic_expr (rhs1_type);
3780 debug_generic_expr (rhs2_type);
3781 return true;
3782 }
3783
3784 return false;
3785 }
3786
3787 case WIDEN_LSHIFT_EXPR:
3788 {
3789 if (!INTEGRAL_TYPE_P (lhs_type)
3790 || !INTEGRAL_TYPE_P (rhs1_type)
3791 || TREE_CODE (rhs2) != INTEGER_CST
3792 || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
3793 {
3794 error ("type mismatch in widening vector shift expression");
3795 debug_generic_expr (lhs_type);
3796 debug_generic_expr (rhs1_type);
3797 debug_generic_expr (rhs2_type);
3798 return true;
3799 }
3800
3801 return false;
3802 }
3803
3804 case VEC_WIDEN_LSHIFT_HI_EXPR:
3805 case VEC_WIDEN_LSHIFT_LO_EXPR:
3806 {
3807 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3808 || TREE_CODE (lhs_type) != VECTOR_TYPE
3809 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3810 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3811 || TREE_CODE (rhs2) != INTEGER_CST
3812 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
3813 > TYPE_PRECISION (TREE_TYPE (lhs_type))))
3814 {
3815 error ("type mismatch in widening vector shift expression");
3816 debug_generic_expr (lhs_type);
3817 debug_generic_expr (rhs1_type);
3818 debug_generic_expr (rhs2_type);
3819 return true;
3820 }
3821
3822 return false;
3823 }
3824
3825 case PLUS_EXPR:
3826 case MINUS_EXPR:
3827 {
3828 tree lhs_etype = lhs_type;
3829 tree rhs1_etype = rhs1_type;
3830 tree rhs2_etype = rhs2_type;
3831 if (TREE_CODE (lhs_type) == VECTOR_TYPE)
3832 {
3833 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3834 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
3835 {
3836 error ("invalid non-vector operands to vector valued plus");
3837 return true;
3838 }
3839 lhs_etype = TREE_TYPE (lhs_type);
3840 rhs1_etype = TREE_TYPE (rhs1_type);
3841 rhs2_etype = TREE_TYPE (rhs2_type);
3842 }
3843 if (POINTER_TYPE_P (lhs_etype)
3844 || POINTER_TYPE_P (rhs1_etype)
3845 || POINTER_TYPE_P (rhs2_etype))
3846 {
3847 error ("invalid (pointer) operands to plus/minus");
3848 return true;
3849 }
3850
3851 /* Continue with generic binary expression handling. */
3852 break;
3853 }
3854
3855 case POINTER_PLUS_EXPR:
3856 {
3857 if (!POINTER_TYPE_P (rhs1_type)
3858 || !useless_type_conversion_p (lhs_type, rhs1_type)
3859 || !ptrofftype_p (rhs2_type))
3860 {
3861 error ("type mismatch in pointer plus expression");
3862 debug_generic_stmt (lhs_type);
3863 debug_generic_stmt (rhs1_type);
3864 debug_generic_stmt (rhs2_type);
3865 return true;
3866 }
3867
3868 return false;
3869 }
3870
3871 case TRUTH_ANDIF_EXPR:
3872 case TRUTH_ORIF_EXPR:
3873 case TRUTH_AND_EXPR:
3874 case TRUTH_OR_EXPR:
3875 case TRUTH_XOR_EXPR:
3876
3877 gcc_unreachable ();
3878
3879 case LT_EXPR:
3880 case LE_EXPR:
3881 case GT_EXPR:
3882 case GE_EXPR:
3883 case EQ_EXPR:
3884 case NE_EXPR:
3885 case UNORDERED_EXPR:
3886 case ORDERED_EXPR:
3887 case UNLT_EXPR:
3888 case UNLE_EXPR:
3889 case UNGT_EXPR:
3890 case UNGE_EXPR:
3891 case UNEQ_EXPR:
3892 case LTGT_EXPR:
3893 /* Comparisons are also binary, but the result type is not
3894 connected to the operand types. */
3895 return verify_gimple_comparison (lhs_type, rhs1, rhs2);
3896
3897 case WIDEN_MULT_EXPR:
3898 if (TREE_CODE (lhs_type) != INTEGER_TYPE)
3899 return true;
3900 return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
3901 || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
3902
3903 case WIDEN_SUM_EXPR:
3904 case VEC_WIDEN_MULT_HI_EXPR:
3905 case VEC_WIDEN_MULT_LO_EXPR:
3906 case VEC_WIDEN_MULT_EVEN_EXPR:
3907 case VEC_WIDEN_MULT_ODD_EXPR:
3908 case VEC_PACK_TRUNC_EXPR:
3909 case VEC_PACK_SAT_EXPR:
3910 case VEC_PACK_FIX_TRUNC_EXPR:
3911 /* FIXME. */
3912 return false;
3913
3914 case MULT_EXPR:
3915 case MULT_HIGHPART_EXPR:
3916 case TRUNC_DIV_EXPR:
3917 case CEIL_DIV_EXPR:
3918 case FLOOR_DIV_EXPR:
3919 case ROUND_DIV_EXPR:
3920 case TRUNC_MOD_EXPR:
3921 case CEIL_MOD_EXPR:
3922 case FLOOR_MOD_EXPR:
3923 case ROUND_MOD_EXPR:
3924 case RDIV_EXPR:
3925 case EXACT_DIV_EXPR:
3926 case MIN_EXPR:
3927 case MAX_EXPR:
3928 case BIT_IOR_EXPR:
3929 case BIT_XOR_EXPR:
3930 case BIT_AND_EXPR:
3931 /* Continue with generic binary expression handling. */
3932 break;
3933
3934 default:
3935 gcc_unreachable ();
3936 }
3937
3938 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3939 || !useless_type_conversion_p (lhs_type, rhs2_type))
3940 {
3941 error ("type mismatch in binary expression");
3942 debug_generic_stmt (lhs_type);
3943 debug_generic_stmt (rhs1_type);
3944 debug_generic_stmt (rhs2_type);
3945 return true;
3946 }
3947
3948 return false;
3949 }
3950
3951 /* Verify a gimple assignment statement STMT with a ternary rhs.
3952 Returns true if anything is wrong. */
3953
3954 static bool
3955 verify_gimple_assign_ternary (gassign *stmt)
3956 {
3957 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3958 tree lhs = gimple_assign_lhs (stmt);
3959 tree lhs_type = TREE_TYPE (lhs);
3960 tree rhs1 = gimple_assign_rhs1 (stmt);
3961 tree rhs1_type = TREE_TYPE (rhs1);
3962 tree rhs2 = gimple_assign_rhs2 (stmt);
3963 tree rhs2_type = TREE_TYPE (rhs2);
3964 tree rhs3 = gimple_assign_rhs3 (stmt);
3965 tree rhs3_type = TREE_TYPE (rhs3);
3966
3967 if (!is_gimple_reg (lhs))
3968 {
3969 error ("non-register as LHS of ternary operation");
3970 return true;
3971 }
3972
3973 if (((rhs_code == VEC_COND_EXPR || rhs_code == COND_EXPR)
3974 ? !is_gimple_condexpr (rhs1) : !is_gimple_val (rhs1))
3975 || !is_gimple_val (rhs2)
3976 || !is_gimple_val (rhs3))
3977 {
3978 error ("invalid operands in ternary operation");
3979 return true;
3980 }
3981
3982 /* First handle operations that involve different types. */
3983 switch (rhs_code)
3984 {
3985 case WIDEN_MULT_PLUS_EXPR:
3986 case WIDEN_MULT_MINUS_EXPR:
3987 if ((!INTEGRAL_TYPE_P (rhs1_type)
3988 && !FIXED_POINT_TYPE_P (rhs1_type))
3989 || !useless_type_conversion_p (rhs1_type, rhs2_type)
3990 || !useless_type_conversion_p (lhs_type, rhs3_type)
3991 || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
3992 || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
3993 {
3994 error ("type mismatch in widening multiply-accumulate expression");
3995 debug_generic_expr (lhs_type);
3996 debug_generic_expr (rhs1_type);
3997 debug_generic_expr (rhs2_type);
3998 debug_generic_expr (rhs3_type);
3999 return true;
4000 }
4001 break;
4002
4003 case FMA_EXPR:
4004 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4005 || !useless_type_conversion_p (lhs_type, rhs2_type)
4006 || !useless_type_conversion_p (lhs_type, rhs3_type))
4007 {
4008 error ("type mismatch in fused multiply-add expression");
4009 debug_generic_expr (lhs_type);
4010 debug_generic_expr (rhs1_type);
4011 debug_generic_expr (rhs2_type);
4012 debug_generic_expr (rhs3_type);
4013 return true;
4014 }
4015 break;
4016
4017 case COND_EXPR:
4018 case VEC_COND_EXPR:
4019 if (!useless_type_conversion_p (lhs_type, rhs2_type)
4020 || !useless_type_conversion_p (lhs_type, rhs3_type))
4021 {
4022 error ("type mismatch in conditional expression");
4023 debug_generic_expr (lhs_type);
4024 debug_generic_expr (rhs2_type);
4025 debug_generic_expr (rhs3_type);
4026 return true;
4027 }
4028 break;
4029
4030 case VEC_PERM_EXPR:
4031 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4032 || !useless_type_conversion_p (lhs_type, rhs2_type))
4033 {
4034 error ("type mismatch in vector permute expression");
4035 debug_generic_expr (lhs_type);
4036 debug_generic_expr (rhs1_type);
4037 debug_generic_expr (rhs2_type);
4038 debug_generic_expr (rhs3_type);
4039 return true;
4040 }
4041
4042 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4043 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4044 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4045 {
4046 error ("vector types expected in vector permute expression");
4047 debug_generic_expr (lhs_type);
4048 debug_generic_expr (rhs1_type);
4049 debug_generic_expr (rhs2_type);
4050 debug_generic_expr (rhs3_type);
4051 return true;
4052 }
4053
4054 if (TYPE_VECTOR_SUBPARTS (rhs1_type) != TYPE_VECTOR_SUBPARTS (rhs2_type)
4055 || TYPE_VECTOR_SUBPARTS (rhs2_type)
4056 != TYPE_VECTOR_SUBPARTS (rhs3_type)
4057 || TYPE_VECTOR_SUBPARTS (rhs3_type)
4058 != TYPE_VECTOR_SUBPARTS (lhs_type))
4059 {
4060 error ("vectors with different element number found "
4061 "in vector permute expression");
4062 debug_generic_expr (lhs_type);
4063 debug_generic_expr (rhs1_type);
4064 debug_generic_expr (rhs2_type);
4065 debug_generic_expr (rhs3_type);
4066 return true;
4067 }
4068
4069 if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
4070 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs3_type)))
4071 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type))))
4072 {
4073 error ("invalid mask type in vector permute expression");
4074 debug_generic_expr (lhs_type);
4075 debug_generic_expr (rhs1_type);
4076 debug_generic_expr (rhs2_type);
4077 debug_generic_expr (rhs3_type);
4078 return true;
4079 }
4080
4081 return false;
4082
4083 case SAD_EXPR:
4084 if (!useless_type_conversion_p (rhs1_type, rhs2_type)
4085 || !useless_type_conversion_p (lhs_type, rhs3_type)
4086 || 2 * GET_MODE_BITSIZE (GET_MODE_INNER
4087 (TYPE_MODE (TREE_TYPE (rhs1_type))))
4088 > GET_MODE_BITSIZE (GET_MODE_INNER
4089 (TYPE_MODE (TREE_TYPE (lhs_type)))))
4090 {
4091 error ("type mismatch in sad expression");
4092 debug_generic_expr (lhs_type);
4093 debug_generic_expr (rhs1_type);
4094 debug_generic_expr (rhs2_type);
4095 debug_generic_expr (rhs3_type);
4096 return true;
4097 }
4098
4099 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4100 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4101 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4102 {
4103 error ("vector types expected in sad expression");
4104 debug_generic_expr (lhs_type);
4105 debug_generic_expr (rhs1_type);
4106 debug_generic_expr (rhs2_type);
4107 debug_generic_expr (rhs3_type);
4108 return true;
4109 }
4110
4111 return false;
4112
4113 case DOT_PROD_EXPR:
4114 case REALIGN_LOAD_EXPR:
4115 /* FIXME. */
4116 return false;
4117
4118 default:
4119 gcc_unreachable ();
4120 }
4121 return false;
4122 }
4123
4124 /* Verify a gimple assignment statement STMT with a single rhs.
4125 Returns true if anything is wrong. */
4126
4127 static bool
4128 verify_gimple_assign_single (gassign *stmt)
4129 {
4130 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4131 tree lhs = gimple_assign_lhs (stmt);
4132 tree lhs_type = TREE_TYPE (lhs);
4133 tree rhs1 = gimple_assign_rhs1 (stmt);
4134 tree rhs1_type = TREE_TYPE (rhs1);
4135 bool res = false;
4136
4137 if (!useless_type_conversion_p (lhs_type, rhs1_type))
4138 {
4139 error ("non-trivial conversion at assignment");
4140 debug_generic_expr (lhs_type);
4141 debug_generic_expr (rhs1_type);
4142 return true;
4143 }
4144
4145 if (gimple_clobber_p (stmt)
4146 && !(DECL_P (lhs) || TREE_CODE (lhs) == MEM_REF))
4147 {
4148 error ("non-decl/MEM_REF LHS in clobber statement");
4149 debug_generic_expr (lhs);
4150 return true;
4151 }
4152
4153 if (handled_component_p (lhs)
4154 || TREE_CODE (lhs) == MEM_REF
4155 || TREE_CODE (lhs) == TARGET_MEM_REF)
4156 res |= verify_types_in_gimple_reference (lhs, true);
4157
4158 /* Special codes we cannot handle via their class. */
4159 switch (rhs_code)
4160 {
4161 case ADDR_EXPR:
4162 {
4163 tree op = TREE_OPERAND (rhs1, 0);
4164 if (!is_gimple_addressable (op))
4165 {
4166 error ("invalid operand in unary expression");
4167 return true;
4168 }
4169
4170 /* Technically there is no longer a need for matching types, but
4171 gimple hygiene asks for this check. In LTO we can end up
4172 combining incompatible units and thus end up with addresses
4173 of globals that change their type to a common one. */
4174 if (!in_lto_p
4175 && !types_compatible_p (TREE_TYPE (op),
4176 TREE_TYPE (TREE_TYPE (rhs1)))
4177 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
4178 TREE_TYPE (op)))
4179 {
4180 error ("type mismatch in address expression");
4181 debug_generic_stmt (TREE_TYPE (rhs1));
4182 debug_generic_stmt (TREE_TYPE (op));
4183 return true;
4184 }
4185
4186 return verify_types_in_gimple_reference (op, true);
4187 }
4188
4189 /* tcc_reference */
4190 case INDIRECT_REF:
4191 error ("INDIRECT_REF in gimple IL");
4192 return true;
4193
4194 case COMPONENT_REF:
4195 case BIT_FIELD_REF:
4196 case ARRAY_REF:
4197 case ARRAY_RANGE_REF:
4198 case VIEW_CONVERT_EXPR:
4199 case REALPART_EXPR:
4200 case IMAGPART_EXPR:
4201 case TARGET_MEM_REF:
4202 case MEM_REF:
4203 if (!is_gimple_reg (lhs)
4204 && is_gimple_reg_type (TREE_TYPE (lhs)))
4205 {
4206 error ("invalid rhs for gimple memory store");
4207 debug_generic_stmt (lhs);
4208 debug_generic_stmt (rhs1);
4209 return true;
4210 }
4211 return res || verify_types_in_gimple_reference (rhs1, false);
4212
4213 /* tcc_constant */
4214 case SSA_NAME:
4215 case INTEGER_CST:
4216 case REAL_CST:
4217 case FIXED_CST:
4218 case COMPLEX_CST:
4219 case VECTOR_CST:
4220 case STRING_CST:
4221 return res;
4222
4223 /* tcc_declaration */
4224 case CONST_DECL:
4225 return res;
4226 case VAR_DECL:
4227 case PARM_DECL:
4228 if (!is_gimple_reg (lhs)
4229 && !is_gimple_reg (rhs1)
4230 && is_gimple_reg_type (TREE_TYPE (lhs)))
4231 {
4232 error ("invalid rhs for gimple memory store");
4233 debug_generic_stmt (lhs);
4234 debug_generic_stmt (rhs1);
4235 return true;
4236 }
4237 return res;
4238
4239 case CONSTRUCTOR:
4240 if (TREE_CODE (rhs1_type) == VECTOR_TYPE)
4241 {
4242 unsigned int i;
4243 tree elt_i, elt_v, elt_t = NULL_TREE;
4244
4245 if (CONSTRUCTOR_NELTS (rhs1) == 0)
4246 return res;
4247 /* For vector CONSTRUCTORs we require that either it is empty
4248 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
4249 (then the element count must be correct to cover the whole
4250 outer vector and index must be NULL on all elements, or it is
4251 a CONSTRUCTOR of scalar elements, where we as an exception allow
4252 smaller number of elements (assuming zero filling) and
4253 consecutive indexes as compared to NULL indexes (such
4254 CONSTRUCTORs can appear in the IL from FEs). */
4255 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), i, elt_i, elt_v)
4256 {
4257 if (elt_t == NULL_TREE)
4258 {
4259 elt_t = TREE_TYPE (elt_v);
4260 if (TREE_CODE (elt_t) == VECTOR_TYPE)
4261 {
4262 tree elt_t = TREE_TYPE (elt_v);
4263 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4264 TREE_TYPE (elt_t)))
4265 {
4266 error ("incorrect type of vector CONSTRUCTOR"
4267 " elements");
4268 debug_generic_stmt (rhs1);
4269 return true;
4270 }
4271 else if (CONSTRUCTOR_NELTS (rhs1)
4272 * TYPE_VECTOR_SUBPARTS (elt_t)
4273 != TYPE_VECTOR_SUBPARTS (rhs1_type))
4274 {
4275 error ("incorrect number of vector CONSTRUCTOR"
4276 " elements");
4277 debug_generic_stmt (rhs1);
4278 return true;
4279 }
4280 }
4281 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4282 elt_t))
4283 {
4284 error ("incorrect type of vector CONSTRUCTOR elements");
4285 debug_generic_stmt (rhs1);
4286 return true;
4287 }
4288 else if (CONSTRUCTOR_NELTS (rhs1)
4289 > TYPE_VECTOR_SUBPARTS (rhs1_type))
4290 {
4291 error ("incorrect number of vector CONSTRUCTOR elements");
4292 debug_generic_stmt (rhs1);
4293 return true;
4294 }
4295 }
4296 else if (!useless_type_conversion_p (elt_t, TREE_TYPE (elt_v)))
4297 {
4298 error ("incorrect type of vector CONSTRUCTOR elements");
4299 debug_generic_stmt (rhs1);
4300 return true;
4301 }
4302 if (elt_i != NULL_TREE
4303 && (TREE_CODE (elt_t) == VECTOR_TYPE
4304 || TREE_CODE (elt_i) != INTEGER_CST
4305 || compare_tree_int (elt_i, i) != 0))
4306 {
4307 error ("vector CONSTRUCTOR with non-NULL element index");
4308 debug_generic_stmt (rhs1);
4309 return true;
4310 }
4311 if (!is_gimple_val (elt_v))
4312 {
4313 error ("vector CONSTRUCTOR element is not a GIMPLE value");
4314 debug_generic_stmt (rhs1);
4315 return true;
4316 }
4317 }
4318 }
4319 else if (CONSTRUCTOR_NELTS (rhs1) != 0)
4320 {
4321 error ("non-vector CONSTRUCTOR with elements");
4322 debug_generic_stmt (rhs1);
4323 return true;
4324 }
4325 return res;
4326 case OBJ_TYPE_REF:
4327 case ASSERT_EXPR:
4328 case WITH_SIZE_EXPR:
4329 /* FIXME. */
4330 return res;
4331
4332 default:;
4333 }
4334
4335 return res;
4336 }
4337
4338 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4339 is a problem, otherwise false. */
4340
4341 static bool
4342 verify_gimple_assign (gassign *stmt)
4343 {
4344 switch (gimple_assign_rhs_class (stmt))
4345 {
4346 case GIMPLE_SINGLE_RHS:
4347 return verify_gimple_assign_single (stmt);
4348
4349 case GIMPLE_UNARY_RHS:
4350 return verify_gimple_assign_unary (stmt);
4351
4352 case GIMPLE_BINARY_RHS:
4353 return verify_gimple_assign_binary (stmt);
4354
4355 case GIMPLE_TERNARY_RHS:
4356 return verify_gimple_assign_ternary (stmt);
4357
4358 default:
4359 gcc_unreachable ();
4360 }
4361 }
4362
4363 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4364 is a problem, otherwise false. */
4365
4366 static bool
4367 verify_gimple_return (greturn *stmt)
4368 {
4369 tree op = gimple_return_retval (stmt);
4370 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
4371
4372 /* We cannot test for present return values as we do not fix up missing
4373 return values from the original source. */
4374 if (op == NULL)
4375 return false;
4376
4377 if (!is_gimple_val (op)
4378 && TREE_CODE (op) != RESULT_DECL)
4379 {
4380 error ("invalid operand in return statement");
4381 debug_generic_stmt (op);
4382 return true;
4383 }
4384
4385 if ((TREE_CODE (op) == RESULT_DECL
4386 && DECL_BY_REFERENCE (op))
4387 || (TREE_CODE (op) == SSA_NAME
4388 && SSA_NAME_VAR (op)
4389 && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
4390 && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
4391 op = TREE_TYPE (op);
4392
4393 if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
4394 {
4395 error ("invalid conversion in return statement");
4396 debug_generic_stmt (restype);
4397 debug_generic_stmt (TREE_TYPE (op));
4398 return true;
4399 }
4400
4401 return false;
4402 }
4403
4404
4405 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4406 is a problem, otherwise false. */
4407
4408 static bool
4409 verify_gimple_goto (ggoto *stmt)
4410 {
4411 tree dest = gimple_goto_dest (stmt);
4412
4413 /* ??? We have two canonical forms of direct goto destinations, a
4414 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4415 if (TREE_CODE (dest) != LABEL_DECL
4416 && (!is_gimple_val (dest)
4417 || !POINTER_TYPE_P (TREE_TYPE (dest))))
4418 {
4419 error ("goto destination is neither a label nor a pointer");
4420 return true;
4421 }
4422
4423 return false;
4424 }
4425
4426 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4427 is a problem, otherwise false. */
4428
4429 static bool
4430 verify_gimple_switch (gswitch *stmt)
4431 {
4432 unsigned int i, n;
4433 tree elt, prev_upper_bound = NULL_TREE;
4434 tree index_type, elt_type = NULL_TREE;
4435
4436 if (!is_gimple_val (gimple_switch_index (stmt)))
4437 {
4438 error ("invalid operand to switch statement");
4439 debug_generic_stmt (gimple_switch_index (stmt));
4440 return true;
4441 }
4442
4443 index_type = TREE_TYPE (gimple_switch_index (stmt));
4444 if (! INTEGRAL_TYPE_P (index_type))
4445 {
4446 error ("non-integral type switch statement");
4447 debug_generic_expr (index_type);
4448 return true;
4449 }
4450
4451 elt = gimple_switch_label (stmt, 0);
4452 if (CASE_LOW (elt) != NULL_TREE || CASE_HIGH (elt) != NULL_TREE)
4453 {
4454 error ("invalid default case label in switch statement");
4455 debug_generic_expr (elt);
4456 return true;
4457 }
4458
4459 n = gimple_switch_num_labels (stmt);
4460 for (i = 1; i < n; i++)
4461 {
4462 elt = gimple_switch_label (stmt, i);
4463
4464 if (! CASE_LOW (elt))
4465 {
4466 error ("invalid case label in switch statement");
4467 debug_generic_expr (elt);
4468 return true;
4469 }
4470 if (CASE_HIGH (elt)
4471 && ! tree_int_cst_lt (CASE_LOW (elt), CASE_HIGH (elt)))
4472 {
4473 error ("invalid case range in switch statement");
4474 debug_generic_expr (elt);
4475 return true;
4476 }
4477
4478 if (elt_type)
4479 {
4480 if (TREE_TYPE (CASE_LOW (elt)) != elt_type
4481 || (CASE_HIGH (elt) && TREE_TYPE (CASE_HIGH (elt)) != elt_type))
4482 {
4483 error ("type mismatch for case label in switch statement");
4484 debug_generic_expr (elt);
4485 return true;
4486 }
4487 }
4488 else
4489 {
4490 elt_type = TREE_TYPE (CASE_LOW (elt));
4491 if (TYPE_PRECISION (index_type) < TYPE_PRECISION (elt_type))
4492 {
4493 error ("type precision mismatch in switch statement");
4494 return true;
4495 }
4496 }
4497
4498 if (prev_upper_bound)
4499 {
4500 if (! tree_int_cst_lt (prev_upper_bound, CASE_LOW (elt)))
4501 {
4502 error ("case labels not sorted in switch statement");
4503 return true;
4504 }
4505 }
4506
4507 prev_upper_bound = CASE_HIGH (elt);
4508 if (! prev_upper_bound)
4509 prev_upper_bound = CASE_LOW (elt);
4510 }
4511
4512 return false;
4513 }
4514
4515 /* Verify a gimple debug statement STMT.
4516 Returns true if anything is wrong. */
4517
4518 static bool
4519 verify_gimple_debug (gimple stmt ATTRIBUTE_UNUSED)
4520 {
4521 /* There isn't much that could be wrong in a gimple debug stmt. A
4522 gimple debug bind stmt, for example, maps a tree, that's usually
4523 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4524 component or member of an aggregate type, to another tree, that
4525 can be an arbitrary expression. These stmts expand into debug
4526 insns, and are converted to debug notes by var-tracking.c. */
4527 return false;
4528 }
4529
4530 /* Verify a gimple label statement STMT.
4531 Returns true if anything is wrong. */
4532
4533 static bool
4534 verify_gimple_label (glabel *stmt)
4535 {
4536 tree decl = gimple_label_label (stmt);
4537 int uid;
4538 bool err = false;
4539
4540 if (TREE_CODE (decl) != LABEL_DECL)
4541 return true;
4542 if (!DECL_NONLOCAL (decl) && !FORCED_LABEL (decl)
4543 && DECL_CONTEXT (decl) != current_function_decl)
4544 {
4545 error ("label's context is not the current function decl");
4546 err |= true;
4547 }
4548
4549 uid = LABEL_DECL_UID (decl);
4550 if (cfun->cfg
4551 && (uid == -1
4552 || (*label_to_block_map_for_fn (cfun))[uid] != gimple_bb (stmt)))
4553 {
4554 error ("incorrect entry in label_to_block_map");
4555 err |= true;
4556 }
4557
4558 uid = EH_LANDING_PAD_NR (decl);
4559 if (uid)
4560 {
4561 eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
4562 if (decl != lp->post_landing_pad)
4563 {
4564 error ("incorrect setting of landing pad number");
4565 err |= true;
4566 }
4567 }
4568
4569 return err;
4570 }
4571
4572 /* Verify a gimple cond statement STMT.
4573 Returns true if anything is wrong. */
4574
4575 static bool
4576 verify_gimple_cond (gcond *stmt)
4577 {
4578 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
4579 {
4580 error ("invalid comparison code in gimple cond");
4581 return true;
4582 }
4583 if (!(!gimple_cond_true_label (stmt)
4584 || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
4585 || !(!gimple_cond_false_label (stmt)
4586 || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
4587 {
4588 error ("invalid labels in gimple cond");
4589 return true;
4590 }
4591
4592 return verify_gimple_comparison (boolean_type_node,
4593 gimple_cond_lhs (stmt),
4594 gimple_cond_rhs (stmt));
4595 }
4596
4597 /* Verify the GIMPLE statement STMT. Returns true if there is an
4598 error, otherwise false. */
4599
4600 static bool
4601 verify_gimple_stmt (gimple stmt)
4602 {
4603 switch (gimple_code (stmt))
4604 {
4605 case GIMPLE_ASSIGN:
4606 return verify_gimple_assign (as_a <gassign *> (stmt));
4607
4608 case GIMPLE_LABEL:
4609 return verify_gimple_label (as_a <glabel *> (stmt));
4610
4611 case GIMPLE_CALL:
4612 return verify_gimple_call (as_a <gcall *> (stmt));
4613
4614 case GIMPLE_COND:
4615 return verify_gimple_cond (as_a <gcond *> (stmt));
4616
4617 case GIMPLE_GOTO:
4618 return verify_gimple_goto (as_a <ggoto *> (stmt));
4619
4620 case GIMPLE_SWITCH:
4621 return verify_gimple_switch (as_a <gswitch *> (stmt));
4622
4623 case GIMPLE_RETURN:
4624 return verify_gimple_return (as_a <greturn *> (stmt));
4625
4626 case GIMPLE_ASM:
4627 return false;
4628
4629 case GIMPLE_TRANSACTION:
4630 return verify_gimple_transaction (as_a <gtransaction *> (stmt));
4631
4632 /* Tuples that do not have tree operands. */
4633 case GIMPLE_NOP:
4634 case GIMPLE_PREDICT:
4635 case GIMPLE_RESX:
4636 case GIMPLE_EH_DISPATCH:
4637 case GIMPLE_EH_MUST_NOT_THROW:
4638 return false;
4639
4640 CASE_GIMPLE_OMP:
4641 /* OpenMP directives are validated by the FE and never operated
4642 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4643 non-gimple expressions when the main index variable has had
4644 its address taken. This does not affect the loop itself
4645 because the header of an GIMPLE_OMP_FOR is merely used to determine
4646 how to setup the parallel iteration. */
4647 return false;
4648
4649 case GIMPLE_DEBUG:
4650 return verify_gimple_debug (stmt);
4651
4652 default:
4653 gcc_unreachable ();
4654 }
4655 }
4656
4657 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4658 and false otherwise. */
4659
4660 static bool
4661 verify_gimple_phi (gimple phi)
4662 {
4663 bool err = false;
4664 unsigned i;
4665 tree phi_result = gimple_phi_result (phi);
4666 bool virtual_p;
4667
4668 if (!phi_result)
4669 {
4670 error ("invalid PHI result");
4671 return true;
4672 }
4673
4674 virtual_p = virtual_operand_p (phi_result);
4675 if (TREE_CODE (phi_result) != SSA_NAME
4676 || (virtual_p
4677 && SSA_NAME_VAR (phi_result) != gimple_vop (cfun)))
4678 {
4679 error ("invalid PHI result");
4680 err = true;
4681 }
4682
4683 for (i = 0; i < gimple_phi_num_args (phi); i++)
4684 {
4685 tree t = gimple_phi_arg_def (phi, i);
4686
4687 if (!t)
4688 {
4689 error ("missing PHI def");
4690 err |= true;
4691 continue;
4692 }
4693 /* Addressable variables do have SSA_NAMEs but they
4694 are not considered gimple values. */
4695 else if ((TREE_CODE (t) == SSA_NAME
4696 && virtual_p != virtual_operand_p (t))
4697 || (virtual_p
4698 && (TREE_CODE (t) != SSA_NAME
4699 || SSA_NAME_VAR (t) != gimple_vop (cfun)))
4700 || (!virtual_p
4701 && !is_gimple_val (t)))
4702 {
4703 error ("invalid PHI argument");
4704 debug_generic_expr (t);
4705 err |= true;
4706 }
4707 #ifdef ENABLE_TYPES_CHECKING
4708 if (!useless_type_conversion_p (TREE_TYPE (phi_result), TREE_TYPE (t)))
4709 {
4710 error ("incompatible types in PHI argument %u", i);
4711 debug_generic_stmt (TREE_TYPE (phi_result));
4712 debug_generic_stmt (TREE_TYPE (t));
4713 err |= true;
4714 }
4715 #endif
4716 }
4717
4718 return err;
4719 }
4720
4721 /* Verify the GIMPLE statements inside the sequence STMTS. */
4722
4723 static bool
4724 verify_gimple_in_seq_2 (gimple_seq stmts)
4725 {
4726 gimple_stmt_iterator ittr;
4727 bool err = false;
4728
4729 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
4730 {
4731 gimple stmt = gsi_stmt (ittr);
4732
4733 switch (gimple_code (stmt))
4734 {
4735 case GIMPLE_BIND:
4736 err |= verify_gimple_in_seq_2 (
4737 gimple_bind_body (as_a <gbind *> (stmt)));
4738 break;
4739
4740 case GIMPLE_TRY:
4741 err |= verify_gimple_in_seq_2 (gimple_try_eval (stmt));
4742 err |= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt));
4743 break;
4744
4745 case GIMPLE_EH_FILTER:
4746 err |= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt));
4747 break;
4748
4749 case GIMPLE_EH_ELSE:
4750 {
4751 geh_else *eh_else = as_a <geh_else *> (stmt);
4752 err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (eh_else));
4753 err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (eh_else));
4754 }
4755 break;
4756
4757 case GIMPLE_CATCH:
4758 err |= verify_gimple_in_seq_2 (gimple_catch_handler (
4759 as_a <gcatch *> (stmt)));
4760 break;
4761
4762 case GIMPLE_TRANSACTION:
4763 err |= verify_gimple_transaction (as_a <gtransaction *> (stmt));
4764 break;
4765
4766 default:
4767 {
4768 bool err2 = verify_gimple_stmt (stmt);
4769 if (err2)
4770 debug_gimple_stmt (stmt);
4771 err |= err2;
4772 }
4773 }
4774 }
4775
4776 return err;
4777 }
4778
4779 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
4780 is a problem, otherwise false. */
4781
4782 static bool
4783 verify_gimple_transaction (gtransaction *stmt)
4784 {
4785 tree lab = gimple_transaction_label (stmt);
4786 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
4787 return true;
4788 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt));
4789 }
4790
4791
4792 /* Verify the GIMPLE statements inside the statement list STMTS. */
4793
4794 DEBUG_FUNCTION void
4795 verify_gimple_in_seq (gimple_seq stmts)
4796 {
4797 timevar_push (TV_TREE_STMT_VERIFY);
4798 if (verify_gimple_in_seq_2 (stmts))
4799 internal_error ("verify_gimple failed");
4800 timevar_pop (TV_TREE_STMT_VERIFY);
4801 }
4802
4803 /* Return true when the T can be shared. */
4804
4805 static bool
4806 tree_node_can_be_shared (tree t)
4807 {
4808 if (IS_TYPE_OR_DECL_P (t)
4809 || is_gimple_min_invariant (t)
4810 || TREE_CODE (t) == SSA_NAME
4811 || t == error_mark_node
4812 || TREE_CODE (t) == IDENTIFIER_NODE)
4813 return true;
4814
4815 if (TREE_CODE (t) == CASE_LABEL_EXPR)
4816 return true;
4817
4818 if (DECL_P (t))
4819 return true;
4820
4821 return false;
4822 }
4823
4824 /* Called via walk_tree. Verify tree sharing. */
4825
4826 static tree
4827 verify_node_sharing_1 (tree *tp, int *walk_subtrees, void *data)
4828 {
4829 hash_set<void *> *visited = (hash_set<void *> *) data;
4830
4831 if (tree_node_can_be_shared (*tp))
4832 {
4833 *walk_subtrees = false;
4834 return NULL;
4835 }
4836
4837 if (visited->add (*tp))
4838 return *tp;
4839
4840 return NULL;
4841 }
4842
4843 /* Called via walk_gimple_stmt. Verify tree sharing. */
4844
4845 static tree
4846 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
4847 {
4848 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4849 return verify_node_sharing_1 (tp, walk_subtrees, wi->info);
4850 }
4851
4852 static bool eh_error_found;
4853 bool
4854 verify_eh_throw_stmt_node (const gimple &stmt, const int &,
4855 hash_set<gimple> *visited)
4856 {
4857 if (!visited->contains (stmt))
4858 {
4859 error ("dead STMT in EH table");
4860 debug_gimple_stmt (stmt);
4861 eh_error_found = true;
4862 }
4863 return true;
4864 }
4865
4866 /* Verify if the location LOCs block is in BLOCKS. */
4867
4868 static bool
4869 verify_location (hash_set<tree> *blocks, location_t loc)
4870 {
4871 tree block = LOCATION_BLOCK (loc);
4872 if (block != NULL_TREE
4873 && !blocks->contains (block))
4874 {
4875 error ("location references block not in block tree");
4876 return true;
4877 }
4878 if (block != NULL_TREE)
4879 return verify_location (blocks, BLOCK_SOURCE_LOCATION (block));
4880 return false;
4881 }
4882
4883 /* Called via walk_tree. Verify that expressions have no blocks. */
4884
4885 static tree
4886 verify_expr_no_block (tree *tp, int *walk_subtrees, void *)
4887 {
4888 if (!EXPR_P (*tp))
4889 {
4890 *walk_subtrees = false;
4891 return NULL;
4892 }
4893
4894 location_t loc = EXPR_LOCATION (*tp);
4895 if (LOCATION_BLOCK (loc) != NULL)
4896 return *tp;
4897
4898 return NULL;
4899 }
4900
4901 /* Called via walk_tree. Verify locations of expressions. */
4902
4903 static tree
4904 verify_expr_location_1 (tree *tp, int *walk_subtrees, void *data)
4905 {
4906 hash_set<tree> *blocks = (hash_set<tree> *) data;
4907
4908 if (TREE_CODE (*tp) == VAR_DECL
4909 && DECL_HAS_DEBUG_EXPR_P (*tp))
4910 {
4911 tree t = DECL_DEBUG_EXPR (*tp);
4912 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
4913 if (addr)
4914 return addr;
4915 }
4916 if ((TREE_CODE (*tp) == VAR_DECL
4917 || TREE_CODE (*tp) == PARM_DECL
4918 || TREE_CODE (*tp) == RESULT_DECL)
4919 && DECL_HAS_VALUE_EXPR_P (*tp))
4920 {
4921 tree t = DECL_VALUE_EXPR (*tp);
4922 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
4923 if (addr)
4924 return addr;
4925 }
4926
4927 if (!EXPR_P (*tp))
4928 {
4929 *walk_subtrees = false;
4930 return NULL;
4931 }
4932
4933 location_t loc = EXPR_LOCATION (*tp);
4934 if (verify_location (blocks, loc))
4935 return *tp;
4936
4937 return NULL;
4938 }
4939
4940 /* Called via walk_gimple_op. Verify locations of expressions. */
4941
4942 static tree
4943 verify_expr_location (tree *tp, int *walk_subtrees, void *data)
4944 {
4945 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4946 return verify_expr_location_1 (tp, walk_subtrees, wi->info);
4947 }
4948
4949 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
4950
4951 static void
4952 collect_subblocks (hash_set<tree> *blocks, tree block)
4953 {
4954 tree t;
4955 for (t = BLOCK_SUBBLOCKS (block); t; t = BLOCK_CHAIN (t))
4956 {
4957 blocks->add (t);
4958 collect_subblocks (blocks, t);
4959 }
4960 }
4961
4962 /* Verify the GIMPLE statements in the CFG of FN. */
4963
4964 DEBUG_FUNCTION void
4965 verify_gimple_in_cfg (struct function *fn, bool verify_nothrow)
4966 {
4967 basic_block bb;
4968 bool err = false;
4969
4970 timevar_push (TV_TREE_STMT_VERIFY);
4971 hash_set<void *> visited;
4972 hash_set<gimple> visited_stmts;
4973
4974 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
4975 hash_set<tree> blocks;
4976 if (DECL_INITIAL (fn->decl))
4977 {
4978 blocks.add (DECL_INITIAL (fn->decl));
4979 collect_subblocks (&blocks, DECL_INITIAL (fn->decl));
4980 }
4981
4982 FOR_EACH_BB_FN (bb, fn)
4983 {
4984 gimple_stmt_iterator gsi;
4985
4986 for (gphi_iterator gpi = gsi_start_phis (bb);
4987 !gsi_end_p (gpi);
4988 gsi_next (&gpi))
4989 {
4990 gphi *phi = gpi.phi ();
4991 bool err2 = false;
4992 unsigned i;
4993
4994 visited_stmts.add (phi);
4995
4996 if (gimple_bb (phi) != bb)
4997 {
4998 error ("gimple_bb (phi) is set to a wrong basic block");
4999 err2 = true;
5000 }
5001
5002 err2 |= verify_gimple_phi (phi);
5003
5004 /* Only PHI arguments have locations. */
5005 if (gimple_location (phi) != UNKNOWN_LOCATION)
5006 {
5007 error ("PHI node with location");
5008 err2 = true;
5009 }
5010
5011 for (i = 0; i < gimple_phi_num_args (phi); i++)
5012 {
5013 tree arg = gimple_phi_arg_def (phi, i);
5014 tree addr = walk_tree (&arg, verify_node_sharing_1,
5015 &visited, NULL);
5016 if (addr)
5017 {
5018 error ("incorrect sharing of tree nodes");
5019 debug_generic_expr (addr);
5020 err2 |= true;
5021 }
5022 location_t loc = gimple_phi_arg_location (phi, i);
5023 if (virtual_operand_p (gimple_phi_result (phi))
5024 && loc != UNKNOWN_LOCATION)
5025 {
5026 error ("virtual PHI with argument locations");
5027 err2 = true;
5028 }
5029 addr = walk_tree (&arg, verify_expr_location_1, &blocks, NULL);
5030 if (addr)
5031 {
5032 debug_generic_expr (addr);
5033 err2 = true;
5034 }
5035 err2 |= verify_location (&blocks, loc);
5036 }
5037
5038 if (err2)
5039 debug_gimple_stmt (phi);
5040 err |= err2;
5041 }
5042
5043 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5044 {
5045 gimple stmt = gsi_stmt (gsi);
5046 bool err2 = false;
5047 struct walk_stmt_info wi;
5048 tree addr;
5049 int lp_nr;
5050
5051 visited_stmts.add (stmt);
5052
5053 if (gimple_bb (stmt) != bb)
5054 {
5055 error ("gimple_bb (stmt) is set to a wrong basic block");
5056 err2 = true;
5057 }
5058
5059 err2 |= verify_gimple_stmt (stmt);
5060 err2 |= verify_location (&blocks, gimple_location (stmt));
5061
5062 memset (&wi, 0, sizeof (wi));
5063 wi.info = (void *) &visited;
5064 addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
5065 if (addr)
5066 {
5067 error ("incorrect sharing of tree nodes");
5068 debug_generic_expr (addr);
5069 err2 |= true;
5070 }
5071
5072 memset (&wi, 0, sizeof (wi));
5073 wi.info = (void *) &blocks;
5074 addr = walk_gimple_op (stmt, verify_expr_location, &wi);
5075 if (addr)
5076 {
5077 debug_generic_expr (addr);
5078 err2 |= true;
5079 }
5080
5081 /* ??? Instead of not checking these stmts at all the walker
5082 should know its context via wi. */
5083 if (!is_gimple_debug (stmt)
5084 && !is_gimple_omp (stmt))
5085 {
5086 memset (&wi, 0, sizeof (wi));
5087 addr = walk_gimple_op (stmt, verify_expr, &wi);
5088 if (addr)
5089 {
5090 debug_generic_expr (addr);
5091 inform (gimple_location (stmt), "in statement");
5092 err2 |= true;
5093 }
5094 }
5095
5096 /* If the statement is marked as part of an EH region, then it is
5097 expected that the statement could throw. Verify that when we
5098 have optimizations that simplify statements such that we prove
5099 that they cannot throw, that we update other data structures
5100 to match. */
5101 lp_nr = lookup_stmt_eh_lp (stmt);
5102 if (lp_nr > 0)
5103 {
5104 if (!stmt_could_throw_p (stmt))
5105 {
5106 if (verify_nothrow)
5107 {
5108 error ("statement marked for throw, but doesn%'t");
5109 err2 |= true;
5110 }
5111 }
5112 else if (!gsi_one_before_end_p (gsi))
5113 {
5114 error ("statement marked for throw in middle of block");
5115 err2 |= true;
5116 }
5117 }
5118
5119 if (err2)
5120 debug_gimple_stmt (stmt);
5121 err |= err2;
5122 }
5123 }
5124
5125 eh_error_found = false;
5126 hash_map<gimple, int> *eh_table = get_eh_throw_stmt_table (cfun);
5127 if (eh_table)
5128 eh_table->traverse<hash_set<gimple> *, verify_eh_throw_stmt_node>
5129 (&visited_stmts);
5130
5131 if (err || eh_error_found)
5132 internal_error ("verify_gimple failed");
5133
5134 verify_histograms ();
5135 timevar_pop (TV_TREE_STMT_VERIFY);
5136 }
5137
5138
5139 /* Verifies that the flow information is OK. */
5140
5141 static int
5142 gimple_verify_flow_info (void)
5143 {
5144 int err = 0;
5145 basic_block bb;
5146 gimple_stmt_iterator gsi;
5147 gimple stmt;
5148 edge e;
5149 edge_iterator ei;
5150
5151 if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5152 || ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5153 {
5154 error ("ENTRY_BLOCK has IL associated with it");
5155 err = 1;
5156 }
5157
5158 if (EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5159 || EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5160 {
5161 error ("EXIT_BLOCK has IL associated with it");
5162 err = 1;
5163 }
5164
5165 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5166 if (e->flags & EDGE_FALLTHRU)
5167 {
5168 error ("fallthru to exit from bb %d", e->src->index);
5169 err = 1;
5170 }
5171
5172 FOR_EACH_BB_FN (bb, cfun)
5173 {
5174 bool found_ctrl_stmt = false;
5175
5176 stmt = NULL;
5177
5178 /* Skip labels on the start of basic block. */
5179 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5180 {
5181 tree label;
5182 gimple prev_stmt = stmt;
5183
5184 stmt = gsi_stmt (gsi);
5185
5186 if (gimple_code (stmt) != GIMPLE_LABEL)
5187 break;
5188
5189 label = gimple_label_label (as_a <glabel *> (stmt));
5190 if (prev_stmt && DECL_NONLOCAL (label))
5191 {
5192 error ("nonlocal label ");
5193 print_generic_expr (stderr, label, 0);
5194 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5195 bb->index);
5196 err = 1;
5197 }
5198
5199 if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
5200 {
5201 error ("EH landing pad label ");
5202 print_generic_expr (stderr, label, 0);
5203 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5204 bb->index);
5205 err = 1;
5206 }
5207
5208 if (label_to_block (label) != bb)
5209 {
5210 error ("label ");
5211 print_generic_expr (stderr, label, 0);
5212 fprintf (stderr, " to block does not match in bb %d",
5213 bb->index);
5214 err = 1;
5215 }
5216
5217 if (decl_function_context (label) != current_function_decl)
5218 {
5219 error ("label ");
5220 print_generic_expr (stderr, label, 0);
5221 fprintf (stderr, " has incorrect context in bb %d",
5222 bb->index);
5223 err = 1;
5224 }
5225 }
5226
5227 /* Verify that body of basic block BB is free of control flow. */
5228 for (; !gsi_end_p (gsi); gsi_next (&gsi))
5229 {
5230 gimple stmt = gsi_stmt (gsi);
5231
5232 if (found_ctrl_stmt)
5233 {
5234 error ("control flow in the middle of basic block %d",
5235 bb->index);
5236 err = 1;
5237 }
5238
5239 if (stmt_ends_bb_p (stmt))
5240 found_ctrl_stmt = true;
5241
5242 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
5243 {
5244 error ("label ");
5245 print_generic_expr (stderr, gimple_label_label (label_stmt), 0);
5246 fprintf (stderr, " in the middle of basic block %d", bb->index);
5247 err = 1;
5248 }
5249 }
5250
5251 gsi = gsi_last_bb (bb);
5252 if (gsi_end_p (gsi))
5253 continue;
5254
5255 stmt = gsi_stmt (gsi);
5256
5257 if (gimple_code (stmt) == GIMPLE_LABEL)
5258 continue;
5259
5260 err |= verify_eh_edges (stmt);
5261
5262 if (is_ctrl_stmt (stmt))
5263 {
5264 FOR_EACH_EDGE (e, ei, bb->succs)
5265 if (e->flags & EDGE_FALLTHRU)
5266 {
5267 error ("fallthru edge after a control statement in bb %d",
5268 bb->index);
5269 err = 1;
5270 }
5271 }
5272
5273 if (gimple_code (stmt) != GIMPLE_COND)
5274 {
5275 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
5276 after anything else but if statement. */
5277 FOR_EACH_EDGE (e, ei, bb->succs)
5278 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
5279 {
5280 error ("true/false edge after a non-GIMPLE_COND in bb %d",
5281 bb->index);
5282 err = 1;
5283 }
5284 }
5285
5286 switch (gimple_code (stmt))
5287 {
5288 case GIMPLE_COND:
5289 {
5290 edge true_edge;
5291 edge false_edge;
5292
5293 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
5294
5295 if (!true_edge
5296 || !false_edge
5297 || !(true_edge->flags & EDGE_TRUE_VALUE)
5298 || !(false_edge->flags & EDGE_FALSE_VALUE)
5299 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5300 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5301 || EDGE_COUNT (bb->succs) >= 3)
5302 {
5303 error ("wrong outgoing edge flags at end of bb %d",
5304 bb->index);
5305 err = 1;
5306 }
5307 }
5308 break;
5309
5310 case GIMPLE_GOTO:
5311 if (simple_goto_p (stmt))
5312 {
5313 error ("explicit goto at end of bb %d", bb->index);
5314 err = 1;
5315 }
5316 else
5317 {
5318 /* FIXME. We should double check that the labels in the
5319 destination blocks have their address taken. */
5320 FOR_EACH_EDGE (e, ei, bb->succs)
5321 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
5322 | EDGE_FALSE_VALUE))
5323 || !(e->flags & EDGE_ABNORMAL))
5324 {
5325 error ("wrong outgoing edge flags at end of bb %d",
5326 bb->index);
5327 err = 1;
5328 }
5329 }
5330 break;
5331
5332 case GIMPLE_CALL:
5333 if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
5334 break;
5335 /* ... fallthru ... */
5336 case GIMPLE_RETURN:
5337 if (!single_succ_p (bb)
5338 || (single_succ_edge (bb)->flags
5339 & (EDGE_FALLTHRU | EDGE_ABNORMAL
5340 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5341 {
5342 error ("wrong outgoing edge flags at end of bb %d", bb->index);
5343 err = 1;
5344 }
5345 if (single_succ (bb) != EXIT_BLOCK_PTR_FOR_FN (cfun))
5346 {
5347 error ("return edge does not point to exit in bb %d",
5348 bb->index);
5349 err = 1;
5350 }
5351 break;
5352
5353 case GIMPLE_SWITCH:
5354 {
5355 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5356 tree prev;
5357 edge e;
5358 size_t i, n;
5359
5360 n = gimple_switch_num_labels (switch_stmt);
5361
5362 /* Mark all the destination basic blocks. */
5363 for (i = 0; i < n; ++i)
5364 {
5365 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
5366 basic_block label_bb = label_to_block (lab);
5367 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
5368 label_bb->aux = (void *)1;
5369 }
5370
5371 /* Verify that the case labels are sorted. */
5372 prev = gimple_switch_label (switch_stmt, 0);
5373 for (i = 1; i < n; ++i)
5374 {
5375 tree c = gimple_switch_label (switch_stmt, i);
5376 if (!CASE_LOW (c))
5377 {
5378 error ("found default case not at the start of "
5379 "case vector");
5380 err = 1;
5381 continue;
5382 }
5383 if (CASE_LOW (prev)
5384 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
5385 {
5386 error ("case labels not sorted: ");
5387 print_generic_expr (stderr, prev, 0);
5388 fprintf (stderr," is greater than ");
5389 print_generic_expr (stderr, c, 0);
5390 fprintf (stderr," but comes before it.\n");
5391 err = 1;
5392 }
5393 prev = c;
5394 }
5395 /* VRP will remove the default case if it can prove it will
5396 never be executed. So do not verify there always exists
5397 a default case here. */
5398
5399 FOR_EACH_EDGE (e, ei, bb->succs)
5400 {
5401 if (!e->dest->aux)
5402 {
5403 error ("extra outgoing edge %d->%d",
5404 bb->index, e->dest->index);
5405 err = 1;
5406 }
5407
5408 e->dest->aux = (void *)2;
5409 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
5410 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5411 {
5412 error ("wrong outgoing edge flags at end of bb %d",
5413 bb->index);
5414 err = 1;
5415 }
5416 }
5417
5418 /* Check that we have all of them. */
5419 for (i = 0; i < n; ++i)
5420 {
5421 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
5422 basic_block label_bb = label_to_block (lab);
5423
5424 if (label_bb->aux != (void *)2)
5425 {
5426 error ("missing edge %i->%i", bb->index, label_bb->index);
5427 err = 1;
5428 }
5429 }
5430
5431 FOR_EACH_EDGE (e, ei, bb->succs)
5432 e->dest->aux = (void *)0;
5433 }
5434 break;
5435
5436 case GIMPLE_EH_DISPATCH:
5437 err |= verify_eh_dispatch_edge (as_a <geh_dispatch *> (stmt));
5438 break;
5439
5440 default:
5441 break;
5442 }
5443 }
5444
5445 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
5446 verify_dominators (CDI_DOMINATORS);
5447
5448 return err;
5449 }
5450
5451
5452 /* Updates phi nodes after creating a forwarder block joined
5453 by edge FALLTHRU. */
5454
5455 static void
5456 gimple_make_forwarder_block (edge fallthru)
5457 {
5458 edge e;
5459 edge_iterator ei;
5460 basic_block dummy, bb;
5461 tree var;
5462 gphi_iterator gsi;
5463
5464 dummy = fallthru->src;
5465 bb = fallthru->dest;
5466
5467 if (single_pred_p (bb))
5468 return;
5469
5470 /* If we redirected a branch we must create new PHI nodes at the
5471 start of BB. */
5472 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
5473 {
5474 gphi *phi, *new_phi;
5475
5476 phi = gsi.phi ();
5477 var = gimple_phi_result (phi);
5478 new_phi = create_phi_node (var, bb);
5479 gimple_phi_set_result (phi, copy_ssa_name (var, phi));
5480 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
5481 UNKNOWN_LOCATION);
5482 }
5483
5484 /* Add the arguments we have stored on edges. */
5485 FOR_EACH_EDGE (e, ei, bb->preds)
5486 {
5487 if (e == fallthru)
5488 continue;
5489
5490 flush_pending_stmts (e);
5491 }
5492 }
5493
5494
5495 /* Return a non-special label in the head of basic block BLOCK.
5496 Create one if it doesn't exist. */
5497
5498 tree
5499 gimple_block_label (basic_block bb)
5500 {
5501 gimple_stmt_iterator i, s = gsi_start_bb (bb);
5502 bool first = true;
5503 tree label;
5504 glabel *stmt;
5505
5506 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
5507 {
5508 stmt = dyn_cast <glabel *> (gsi_stmt (i));
5509 if (!stmt)
5510 break;
5511 label = gimple_label_label (stmt);
5512 if (!DECL_NONLOCAL (label))
5513 {
5514 if (!first)
5515 gsi_move_before (&i, &s);
5516 return label;
5517 }
5518 }
5519
5520 label = create_artificial_label (UNKNOWN_LOCATION);
5521 stmt = gimple_build_label (label);
5522 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
5523 return label;
5524 }
5525
5526
5527 /* Attempt to perform edge redirection by replacing a possibly complex
5528 jump instruction by a goto or by removing the jump completely.
5529 This can apply only if all edges now point to the same block. The
5530 parameters and return values are equivalent to
5531 redirect_edge_and_branch. */
5532
5533 static edge
5534 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
5535 {
5536 basic_block src = e->src;
5537 gimple_stmt_iterator i;
5538 gimple stmt;
5539
5540 /* We can replace or remove a complex jump only when we have exactly
5541 two edges. */
5542 if (EDGE_COUNT (src->succs) != 2
5543 /* Verify that all targets will be TARGET. Specifically, the
5544 edge that is not E must also go to TARGET. */
5545 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
5546 return NULL;
5547
5548 i = gsi_last_bb (src);
5549 if (gsi_end_p (i))
5550 return NULL;
5551
5552 stmt = gsi_stmt (i);
5553
5554 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
5555 {
5556 gsi_remove (&i, true);
5557 e = ssa_redirect_edge (e, target);
5558 e->flags = EDGE_FALLTHRU;
5559 return e;
5560 }
5561
5562 return NULL;
5563 }
5564
5565
5566 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
5567 edge representing the redirected branch. */
5568
5569 static edge
5570 gimple_redirect_edge_and_branch (edge e, basic_block dest)
5571 {
5572 basic_block bb = e->src;
5573 gimple_stmt_iterator gsi;
5574 edge ret;
5575 gimple stmt;
5576
5577 if (e->flags & EDGE_ABNORMAL)
5578 return NULL;
5579
5580 if (e->dest == dest)
5581 return NULL;
5582
5583 if (e->flags & EDGE_EH)
5584 return redirect_eh_edge (e, dest);
5585
5586 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
5587 {
5588 ret = gimple_try_redirect_by_replacing_jump (e, dest);
5589 if (ret)
5590 return ret;
5591 }
5592
5593 gsi = gsi_last_bb (bb);
5594 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
5595
5596 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
5597 {
5598 case GIMPLE_COND:
5599 /* For COND_EXPR, we only need to redirect the edge. */
5600 break;
5601
5602 case GIMPLE_GOTO:
5603 /* No non-abnormal edges should lead from a non-simple goto, and
5604 simple ones should be represented implicitly. */
5605 gcc_unreachable ();
5606
5607 case GIMPLE_SWITCH:
5608 {
5609 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5610 tree label = gimple_block_label (dest);
5611 tree cases = get_cases_for_edge (e, switch_stmt);
5612
5613 /* If we have a list of cases associated with E, then use it
5614 as it's a lot faster than walking the entire case vector. */
5615 if (cases)
5616 {
5617 edge e2 = find_edge (e->src, dest);
5618 tree last, first;
5619
5620 first = cases;
5621 while (cases)
5622 {
5623 last = cases;
5624 CASE_LABEL (cases) = label;
5625 cases = CASE_CHAIN (cases);
5626 }
5627
5628 /* If there was already an edge in the CFG, then we need
5629 to move all the cases associated with E to E2. */
5630 if (e2)
5631 {
5632 tree cases2 = get_cases_for_edge (e2, switch_stmt);
5633
5634 CASE_CHAIN (last) = CASE_CHAIN (cases2);
5635 CASE_CHAIN (cases2) = first;
5636 }
5637 bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
5638 }
5639 else
5640 {
5641 size_t i, n = gimple_switch_num_labels (switch_stmt);
5642
5643 for (i = 0; i < n; i++)
5644 {
5645 tree elt = gimple_switch_label (switch_stmt, i);
5646 if (label_to_block (CASE_LABEL (elt)) == e->dest)
5647 CASE_LABEL (elt) = label;
5648 }
5649 }
5650 }
5651 break;
5652
5653 case GIMPLE_ASM:
5654 {
5655 gasm *asm_stmt = as_a <gasm *> (stmt);
5656 int i, n = gimple_asm_nlabels (asm_stmt);
5657 tree label = NULL;
5658
5659 for (i = 0; i < n; ++i)
5660 {
5661 tree cons = gimple_asm_label_op (asm_stmt, i);
5662 if (label_to_block (TREE_VALUE (cons)) == e->dest)
5663 {
5664 if (!label)
5665 label = gimple_block_label (dest);
5666 TREE_VALUE (cons) = label;
5667 }
5668 }
5669
5670 /* If we didn't find any label matching the former edge in the
5671 asm labels, we must be redirecting the fallthrough
5672 edge. */
5673 gcc_assert (label || (e->flags & EDGE_FALLTHRU));
5674 }
5675 break;
5676
5677 case GIMPLE_RETURN:
5678 gsi_remove (&gsi, true);
5679 e->flags |= EDGE_FALLTHRU;
5680 break;
5681
5682 case GIMPLE_OMP_RETURN:
5683 case GIMPLE_OMP_CONTINUE:
5684 case GIMPLE_OMP_SECTIONS_SWITCH:
5685 case GIMPLE_OMP_FOR:
5686 /* The edges from OMP constructs can be simply redirected. */
5687 break;
5688
5689 case GIMPLE_EH_DISPATCH:
5690 if (!(e->flags & EDGE_FALLTHRU))
5691 redirect_eh_dispatch_edge (as_a <geh_dispatch *> (stmt), e, dest);
5692 break;
5693
5694 case GIMPLE_TRANSACTION:
5695 /* The ABORT edge has a stored label associated with it, otherwise
5696 the edges are simply redirectable. */
5697 if (e->flags == 0)
5698 gimple_transaction_set_label (as_a <gtransaction *> (stmt),
5699 gimple_block_label (dest));
5700 break;
5701
5702 default:
5703 /* Otherwise it must be a fallthru edge, and we don't need to
5704 do anything besides redirecting it. */
5705 gcc_assert (e->flags & EDGE_FALLTHRU);
5706 break;
5707 }
5708
5709 /* Update/insert PHI nodes as necessary. */
5710
5711 /* Now update the edges in the CFG. */
5712 e = ssa_redirect_edge (e, dest);
5713
5714 return e;
5715 }
5716
5717 /* Returns true if it is possible to remove edge E by redirecting
5718 it to the destination of the other edge from E->src. */
5719
5720 static bool
5721 gimple_can_remove_branch_p (const_edge e)
5722 {
5723 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
5724 return false;
5725
5726 return true;
5727 }
5728
5729 /* Simple wrapper, as we can always redirect fallthru edges. */
5730
5731 static basic_block
5732 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
5733 {
5734 e = gimple_redirect_edge_and_branch (e, dest);
5735 gcc_assert (e);
5736
5737 return NULL;
5738 }
5739
5740
5741 /* Splits basic block BB after statement STMT (but at least after the
5742 labels). If STMT is NULL, BB is split just after the labels. */
5743
5744 static basic_block
5745 gimple_split_block (basic_block bb, void *stmt)
5746 {
5747 gimple_stmt_iterator gsi;
5748 gimple_stmt_iterator gsi_tgt;
5749 gimple_seq list;
5750 basic_block new_bb;
5751 edge e;
5752 edge_iterator ei;
5753
5754 new_bb = create_empty_bb (bb);
5755
5756 /* Redirect the outgoing edges. */
5757 new_bb->succs = bb->succs;
5758 bb->succs = NULL;
5759 FOR_EACH_EDGE (e, ei, new_bb->succs)
5760 e->src = new_bb;
5761
5762 /* Get a stmt iterator pointing to the first stmt to move. */
5763 if (!stmt || gimple_code ((gimple) stmt) == GIMPLE_LABEL)
5764 gsi = gsi_after_labels (bb);
5765 else
5766 {
5767 gsi = gsi_for_stmt ((gimple) stmt);
5768 gsi_next (&gsi);
5769 }
5770
5771 /* Move everything from GSI to the new basic block. */
5772 if (gsi_end_p (gsi))
5773 return new_bb;
5774
5775 /* Split the statement list - avoid re-creating new containers as this
5776 brings ugly quadratic memory consumption in the inliner.
5777 (We are still quadratic since we need to update stmt BB pointers,
5778 sadly.) */
5779 gsi_split_seq_before (&gsi, &list);
5780 set_bb_seq (new_bb, list);
5781 for (gsi_tgt = gsi_start (list);
5782 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
5783 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
5784
5785 return new_bb;
5786 }
5787
5788
5789 /* Moves basic block BB after block AFTER. */
5790
5791 static bool
5792 gimple_move_block_after (basic_block bb, basic_block after)
5793 {
5794 if (bb->prev_bb == after)
5795 return true;
5796
5797 unlink_block (bb);
5798 link_block (bb, after);
5799
5800 return true;
5801 }
5802
5803
5804 /* Return TRUE if block BB has no executable statements, otherwise return
5805 FALSE. */
5806
5807 static bool
5808 gimple_empty_block_p (basic_block bb)
5809 {
5810 /* BB must have no executable statements. */
5811 gimple_stmt_iterator gsi = gsi_after_labels (bb);
5812 if (phi_nodes (bb))
5813 return false;
5814 if (gsi_end_p (gsi))
5815 return true;
5816 if (is_gimple_debug (gsi_stmt (gsi)))
5817 gsi_next_nondebug (&gsi);
5818 return gsi_end_p (gsi);
5819 }
5820
5821
5822 /* Split a basic block if it ends with a conditional branch and if the
5823 other part of the block is not empty. */
5824
5825 static basic_block
5826 gimple_split_block_before_cond_jump (basic_block bb)
5827 {
5828 gimple last, split_point;
5829 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
5830 if (gsi_end_p (gsi))
5831 return NULL;
5832 last = gsi_stmt (gsi);
5833 if (gimple_code (last) != GIMPLE_COND
5834 && gimple_code (last) != GIMPLE_SWITCH)
5835 return NULL;
5836 gsi_prev_nondebug (&gsi);
5837 split_point = gsi_stmt (gsi);
5838 return split_block (bb, split_point)->dest;
5839 }
5840
5841
5842 /* Return true if basic_block can be duplicated. */
5843
5844 static bool
5845 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
5846 {
5847 return true;
5848 }
5849
5850 /* Create a duplicate of the basic block BB. NOTE: This does not
5851 preserve SSA form. */
5852
5853 static basic_block
5854 gimple_duplicate_bb (basic_block bb)
5855 {
5856 basic_block new_bb;
5857 gimple_stmt_iterator gsi_tgt;
5858
5859 new_bb = create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
5860
5861 /* Copy the PHI nodes. We ignore PHI node arguments here because
5862 the incoming edges have not been setup yet. */
5863 for (gphi_iterator gpi = gsi_start_phis (bb);
5864 !gsi_end_p (gpi);
5865 gsi_next (&gpi))
5866 {
5867 gphi *phi, *copy;
5868 phi = gpi.phi ();
5869 copy = create_phi_node (NULL_TREE, new_bb);
5870 create_new_def_for (gimple_phi_result (phi), copy,
5871 gimple_phi_result_ptr (copy));
5872 gimple_set_uid (copy, gimple_uid (phi));
5873 }
5874
5875 gsi_tgt = gsi_start_bb (new_bb);
5876 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
5877 !gsi_end_p (gsi);
5878 gsi_next (&gsi))
5879 {
5880 def_operand_p def_p;
5881 ssa_op_iter op_iter;
5882 tree lhs;
5883 gimple stmt, copy;
5884
5885 stmt = gsi_stmt (gsi);
5886 if (gimple_code (stmt) == GIMPLE_LABEL)
5887 continue;
5888
5889 /* Don't duplicate label debug stmts. */
5890 if (gimple_debug_bind_p (stmt)
5891 && TREE_CODE (gimple_debug_bind_get_var (stmt))
5892 == LABEL_DECL)
5893 continue;
5894
5895 /* Create a new copy of STMT and duplicate STMT's virtual
5896 operands. */
5897 copy = gimple_copy (stmt);
5898 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
5899
5900 maybe_duplicate_eh_stmt (copy, stmt);
5901 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
5902
5903 /* When copying around a stmt writing into a local non-user
5904 aggregate, make sure it won't share stack slot with other
5905 vars. */
5906 lhs = gimple_get_lhs (stmt);
5907 if (lhs && TREE_CODE (lhs) != SSA_NAME)
5908 {
5909 tree base = get_base_address (lhs);
5910 if (base
5911 && (TREE_CODE (base) == VAR_DECL
5912 || TREE_CODE (base) == RESULT_DECL)
5913 && DECL_IGNORED_P (base)
5914 && !TREE_STATIC (base)
5915 && !DECL_EXTERNAL (base)
5916 && (TREE_CODE (base) != VAR_DECL
5917 || !DECL_HAS_VALUE_EXPR_P (base)))
5918 DECL_NONSHAREABLE (base) = 1;
5919 }
5920
5921 /* Create new names for all the definitions created by COPY and
5922 add replacement mappings for each new name. */
5923 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
5924 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
5925 }
5926
5927 return new_bb;
5928 }
5929
5930 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
5931
5932 static void
5933 add_phi_args_after_copy_edge (edge e_copy)
5934 {
5935 basic_block bb, bb_copy = e_copy->src, dest;
5936 edge e;
5937 edge_iterator ei;
5938 gphi *phi, *phi_copy;
5939 tree def;
5940 gphi_iterator psi, psi_copy;
5941
5942 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
5943 return;
5944
5945 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
5946
5947 if (e_copy->dest->flags & BB_DUPLICATED)
5948 dest = get_bb_original (e_copy->dest);
5949 else
5950 dest = e_copy->dest;
5951
5952 e = find_edge (bb, dest);
5953 if (!e)
5954 {
5955 /* During loop unrolling the target of the latch edge is copied.
5956 In this case we are not looking for edge to dest, but to
5957 duplicated block whose original was dest. */
5958 FOR_EACH_EDGE (e, ei, bb->succs)
5959 {
5960 if ((e->dest->flags & BB_DUPLICATED)
5961 && get_bb_original (e->dest) == dest)
5962 break;
5963 }
5964
5965 gcc_assert (e != NULL);
5966 }
5967
5968 for (psi = gsi_start_phis (e->dest),
5969 psi_copy = gsi_start_phis (e_copy->dest);
5970 !gsi_end_p (psi);
5971 gsi_next (&psi), gsi_next (&psi_copy))
5972 {
5973 phi = psi.phi ();
5974 phi_copy = psi_copy.phi ();
5975 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
5976 add_phi_arg (phi_copy, def, e_copy,
5977 gimple_phi_arg_location_from_edge (phi, e));
5978 }
5979 }
5980
5981
5982 /* Basic block BB_COPY was created by code duplication. Add phi node
5983 arguments for edges going out of BB_COPY. The blocks that were
5984 duplicated have BB_DUPLICATED set. */
5985
5986 void
5987 add_phi_args_after_copy_bb (basic_block bb_copy)
5988 {
5989 edge e_copy;
5990 edge_iterator ei;
5991
5992 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
5993 {
5994 add_phi_args_after_copy_edge (e_copy);
5995 }
5996 }
5997
5998 /* Blocks in REGION_COPY array of length N_REGION were created by
5999 duplication of basic blocks. Add phi node arguments for edges
6000 going from these blocks. If E_COPY is not NULL, also add
6001 phi node arguments for its destination.*/
6002
6003 void
6004 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
6005 edge e_copy)
6006 {
6007 unsigned i;
6008
6009 for (i = 0; i < n_region; i++)
6010 region_copy[i]->flags |= BB_DUPLICATED;
6011
6012 for (i = 0; i < n_region; i++)
6013 add_phi_args_after_copy_bb (region_copy[i]);
6014 if (e_copy)
6015 add_phi_args_after_copy_edge (e_copy);
6016
6017 for (i = 0; i < n_region; i++)
6018 region_copy[i]->flags &= ~BB_DUPLICATED;
6019 }
6020
6021 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
6022 important exit edge EXIT. By important we mean that no SSA name defined
6023 inside region is live over the other exit edges of the region. All entry
6024 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
6025 to the duplicate of the region. Dominance and loop information is
6026 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
6027 UPDATE_DOMINANCE is false then we assume that the caller will update the
6028 dominance information after calling this function. The new basic
6029 blocks are stored to REGION_COPY in the same order as they had in REGION,
6030 provided that REGION_COPY is not NULL.
6031 The function returns false if it is unable to copy the region,
6032 true otherwise. */
6033
6034 bool
6035 gimple_duplicate_sese_region (edge entry, edge exit,
6036 basic_block *region, unsigned n_region,
6037 basic_block *region_copy,
6038 bool update_dominance)
6039 {
6040 unsigned i;
6041 bool free_region_copy = false, copying_header = false;
6042 struct loop *loop = entry->dest->loop_father;
6043 edge exit_copy;
6044 vec<basic_block> doms;
6045 edge redirected;
6046 int total_freq = 0, entry_freq = 0;
6047 gcov_type total_count = 0, entry_count = 0;
6048
6049 if (!can_copy_bbs_p (region, n_region))
6050 return false;
6051
6052 /* Some sanity checking. Note that we do not check for all possible
6053 missuses of the functions. I.e. if you ask to copy something weird,
6054 it will work, but the state of structures probably will not be
6055 correct. */
6056 for (i = 0; i < n_region; i++)
6057 {
6058 /* We do not handle subloops, i.e. all the blocks must belong to the
6059 same loop. */
6060 if (region[i]->loop_father != loop)
6061 return false;
6062
6063 if (region[i] != entry->dest
6064 && region[i] == loop->header)
6065 return false;
6066 }
6067
6068 /* In case the function is used for loop header copying (which is the primary
6069 use), ensure that EXIT and its copy will be new latch and entry edges. */
6070 if (loop->header == entry->dest)
6071 {
6072 copying_header = true;
6073
6074 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
6075 return false;
6076
6077 for (i = 0; i < n_region; i++)
6078 if (region[i] != exit->src
6079 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
6080 return false;
6081 }
6082
6083 initialize_original_copy_tables ();
6084
6085 if (copying_header)
6086 set_loop_copy (loop, loop_outer (loop));
6087 else
6088 set_loop_copy (loop, loop);
6089
6090 if (!region_copy)
6091 {
6092 region_copy = XNEWVEC (basic_block, n_region);
6093 free_region_copy = true;
6094 }
6095
6096 /* Record blocks outside the region that are dominated by something
6097 inside. */
6098 if (update_dominance)
6099 {
6100 doms.create (0);
6101 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6102 }
6103
6104 if (entry->dest->count)
6105 {
6106 total_count = entry->dest->count;
6107 entry_count = entry->count;
6108 /* Fix up corner cases, to avoid division by zero or creation of negative
6109 frequencies. */
6110 if (entry_count > total_count)
6111 entry_count = total_count;
6112 }
6113 else
6114 {
6115 total_freq = entry->dest->frequency;
6116 entry_freq = EDGE_FREQUENCY (entry);
6117 /* Fix up corner cases, to avoid division by zero or creation of negative
6118 frequencies. */
6119 if (total_freq == 0)
6120 total_freq = 1;
6121 else if (entry_freq > total_freq)
6122 entry_freq = total_freq;
6123 }
6124
6125 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
6126 split_edge_bb_loc (entry), update_dominance);
6127 if (total_count)
6128 {
6129 scale_bbs_frequencies_gcov_type (region, n_region,
6130 total_count - entry_count,
6131 total_count);
6132 scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
6133 total_count);
6134 }
6135 else
6136 {
6137 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
6138 total_freq);
6139 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
6140 }
6141
6142 if (copying_header)
6143 {
6144 loop->header = exit->dest;
6145 loop->latch = exit->src;
6146 }
6147
6148 /* Redirect the entry and add the phi node arguments. */
6149 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
6150 gcc_assert (redirected != NULL);
6151 flush_pending_stmts (entry);
6152
6153 /* Concerning updating of dominators: We must recount dominators
6154 for entry block and its copy. Anything that is outside of the
6155 region, but was dominated by something inside needs recounting as
6156 well. */
6157 if (update_dominance)
6158 {
6159 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
6160 doms.safe_push (get_bb_original (entry->dest));
6161 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6162 doms.release ();
6163 }
6164
6165 /* Add the other PHI node arguments. */
6166 add_phi_args_after_copy (region_copy, n_region, NULL);
6167
6168 if (free_region_copy)
6169 free (region_copy);
6170
6171 free_original_copy_tables ();
6172 return true;
6173 }
6174
6175 /* Checks if BB is part of the region defined by N_REGION BBS. */
6176 static bool
6177 bb_part_of_region_p (basic_block bb, basic_block* bbs, unsigned n_region)
6178 {
6179 unsigned int n;
6180
6181 for (n = 0; n < n_region; n++)
6182 {
6183 if (bb == bbs[n])
6184 return true;
6185 }
6186 return false;
6187 }
6188
6189 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
6190 are stored to REGION_COPY in the same order in that they appear
6191 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
6192 the region, EXIT an exit from it. The condition guarding EXIT
6193 is moved to ENTRY. Returns true if duplication succeeds, false
6194 otherwise.
6195
6196 For example,
6197
6198 some_code;
6199 if (cond)
6200 A;
6201 else
6202 B;
6203
6204 is transformed to
6205
6206 if (cond)
6207 {
6208 some_code;
6209 A;
6210 }
6211 else
6212 {
6213 some_code;
6214 B;
6215 }
6216 */
6217
6218 bool
6219 gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED, edge exit ATTRIBUTE_UNUSED,
6220 basic_block *region ATTRIBUTE_UNUSED, unsigned n_region ATTRIBUTE_UNUSED,
6221 basic_block *region_copy ATTRIBUTE_UNUSED)
6222 {
6223 unsigned i;
6224 bool free_region_copy = false;
6225 struct loop *loop = exit->dest->loop_father;
6226 struct loop *orig_loop = entry->dest->loop_father;
6227 basic_block switch_bb, entry_bb, nentry_bb;
6228 vec<basic_block> doms;
6229 int total_freq = 0, exit_freq = 0;
6230 gcov_type total_count = 0, exit_count = 0;
6231 edge exits[2], nexits[2], e;
6232 gimple_stmt_iterator gsi;
6233 gimple cond_stmt;
6234 edge sorig, snew;
6235 basic_block exit_bb;
6236 gphi_iterator psi;
6237 gphi *phi;
6238 tree def;
6239 struct loop *target, *aloop, *cloop;
6240
6241 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
6242 exits[0] = exit;
6243 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
6244
6245 if (!can_copy_bbs_p (region, n_region))
6246 return false;
6247
6248 initialize_original_copy_tables ();
6249 set_loop_copy (orig_loop, loop);
6250
6251 target= loop;
6252 for (aloop = orig_loop->inner; aloop; aloop = aloop->next)
6253 {
6254 if (bb_part_of_region_p (aloop->header, region, n_region))
6255 {
6256 cloop = duplicate_loop (aloop, target);
6257 duplicate_subloops (aloop, cloop);
6258 }
6259 }
6260
6261 if (!region_copy)
6262 {
6263 region_copy = XNEWVEC (basic_block, n_region);
6264 free_region_copy = true;
6265 }
6266
6267 gcc_assert (!need_ssa_update_p (cfun));
6268
6269 /* Record blocks outside the region that are dominated by something
6270 inside. */
6271 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6272
6273 if (exit->src->count)
6274 {
6275 total_count = exit->src->count;
6276 exit_count = exit->count;
6277 /* Fix up corner cases, to avoid division by zero or creation of negative
6278 frequencies. */
6279 if (exit_count > total_count)
6280 exit_count = total_count;
6281 }
6282 else
6283 {
6284 total_freq = exit->src->frequency;
6285 exit_freq = EDGE_FREQUENCY (exit);
6286 /* Fix up corner cases, to avoid division by zero or creation of negative
6287 frequencies. */
6288 if (total_freq == 0)
6289 total_freq = 1;
6290 if (exit_freq > total_freq)
6291 exit_freq = total_freq;
6292 }
6293
6294 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
6295 split_edge_bb_loc (exit), true);
6296 if (total_count)
6297 {
6298 scale_bbs_frequencies_gcov_type (region, n_region,
6299 total_count - exit_count,
6300 total_count);
6301 scale_bbs_frequencies_gcov_type (region_copy, n_region, exit_count,
6302 total_count);
6303 }
6304 else
6305 {
6306 scale_bbs_frequencies_int (region, n_region, total_freq - exit_freq,
6307 total_freq);
6308 scale_bbs_frequencies_int (region_copy, n_region, exit_freq, total_freq);
6309 }
6310
6311 /* Create the switch block, and put the exit condition to it. */
6312 entry_bb = entry->dest;
6313 nentry_bb = get_bb_copy (entry_bb);
6314 if (!last_stmt (entry->src)
6315 || !stmt_ends_bb_p (last_stmt (entry->src)))
6316 switch_bb = entry->src;
6317 else
6318 switch_bb = split_edge (entry);
6319 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
6320
6321 gsi = gsi_last_bb (switch_bb);
6322 cond_stmt = last_stmt (exit->src);
6323 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
6324 cond_stmt = gimple_copy (cond_stmt);
6325
6326 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
6327
6328 sorig = single_succ_edge (switch_bb);
6329 sorig->flags = exits[1]->flags;
6330 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
6331
6332 /* Register the new edge from SWITCH_BB in loop exit lists. */
6333 rescan_loop_exit (snew, true, false);
6334
6335 /* Add the PHI node arguments. */
6336 add_phi_args_after_copy (region_copy, n_region, snew);
6337
6338 /* Get rid of now superfluous conditions and associated edges (and phi node
6339 arguments). */
6340 exit_bb = exit->dest;
6341
6342 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
6343 PENDING_STMT (e) = NULL;
6344
6345 /* The latch of ORIG_LOOP was copied, and so was the backedge
6346 to the original header. We redirect this backedge to EXIT_BB. */
6347 for (i = 0; i < n_region; i++)
6348 if (get_bb_original (region_copy[i]) == orig_loop->latch)
6349 {
6350 gcc_assert (single_succ_edge (region_copy[i]));
6351 e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
6352 PENDING_STMT (e) = NULL;
6353 for (psi = gsi_start_phis (exit_bb);
6354 !gsi_end_p (psi);
6355 gsi_next (&psi))
6356 {
6357 phi = psi.phi ();
6358 def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
6359 add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
6360 }
6361 }
6362 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
6363 PENDING_STMT (e) = NULL;
6364
6365 /* Anything that is outside of the region, but was dominated by something
6366 inside needs to update dominance info. */
6367 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6368 doms.release ();
6369 /* Update the SSA web. */
6370 update_ssa (TODO_update_ssa);
6371
6372 if (free_region_copy)
6373 free (region_copy);
6374
6375 free_original_copy_tables ();
6376 return true;
6377 }
6378
6379 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
6380 adding blocks when the dominator traversal reaches EXIT. This
6381 function silently assumes that ENTRY strictly dominates EXIT. */
6382
6383 void
6384 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
6385 vec<basic_block> *bbs_p)
6386 {
6387 basic_block son;
6388
6389 for (son = first_dom_son (CDI_DOMINATORS, entry);
6390 son;
6391 son = next_dom_son (CDI_DOMINATORS, son))
6392 {
6393 bbs_p->safe_push (son);
6394 if (son != exit)
6395 gather_blocks_in_sese_region (son, exit, bbs_p);
6396 }
6397 }
6398
6399 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
6400 The duplicates are recorded in VARS_MAP. */
6401
6402 static void
6403 replace_by_duplicate_decl (tree *tp, hash_map<tree, tree> *vars_map,
6404 tree to_context)
6405 {
6406 tree t = *tp, new_t;
6407 struct function *f = DECL_STRUCT_FUNCTION (to_context);
6408
6409 if (DECL_CONTEXT (t) == to_context)
6410 return;
6411
6412 bool existed;
6413 tree &loc = vars_map->get_or_insert (t, &existed);
6414
6415 if (!existed)
6416 {
6417 if (SSA_VAR_P (t))
6418 {
6419 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
6420 add_local_decl (f, new_t);
6421 }
6422 else
6423 {
6424 gcc_assert (TREE_CODE (t) == CONST_DECL);
6425 new_t = copy_node (t);
6426 }
6427 DECL_CONTEXT (new_t) = to_context;
6428
6429 loc = new_t;
6430 }
6431 else
6432 new_t = loc;
6433
6434 *tp = new_t;
6435 }
6436
6437
6438 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6439 VARS_MAP maps old ssa names and var_decls to the new ones. */
6440
6441 static tree
6442 replace_ssa_name (tree name, hash_map<tree, tree> *vars_map,
6443 tree to_context)
6444 {
6445 tree new_name;
6446
6447 gcc_assert (!virtual_operand_p (name));
6448
6449 tree *loc = vars_map->get (name);
6450
6451 if (!loc)
6452 {
6453 tree decl = SSA_NAME_VAR (name);
6454 if (decl)
6455 {
6456 replace_by_duplicate_decl (&decl, vars_map, to_context);
6457 new_name = make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6458 decl, SSA_NAME_DEF_STMT (name));
6459 if (SSA_NAME_IS_DEFAULT_DEF (name))
6460 set_ssa_default_def (DECL_STRUCT_FUNCTION (to_context),
6461 decl, new_name);
6462 }
6463 else
6464 new_name = copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6465 name, SSA_NAME_DEF_STMT (name));
6466
6467 vars_map->put (name, new_name);
6468 }
6469 else
6470 new_name = *loc;
6471
6472 return new_name;
6473 }
6474
6475 struct move_stmt_d
6476 {
6477 tree orig_block;
6478 tree new_block;
6479 tree from_context;
6480 tree to_context;
6481 hash_map<tree, tree> *vars_map;
6482 htab_t new_label_map;
6483 hash_map<void *, void *> *eh_map;
6484 bool remap_decls_p;
6485 };
6486
6487 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
6488 contained in *TP if it has been ORIG_BLOCK previously and change the
6489 DECL_CONTEXT of every local variable referenced in *TP. */
6490
6491 static tree
6492 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
6493 {
6494 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
6495 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6496 tree t = *tp;
6497
6498 if (EXPR_P (t))
6499 {
6500 tree block = TREE_BLOCK (t);
6501 if (block == p->orig_block
6502 || (p->orig_block == NULL_TREE
6503 && block != NULL_TREE))
6504 TREE_SET_BLOCK (t, p->new_block);
6505 #ifdef ENABLE_CHECKING
6506 else if (block != NULL_TREE)
6507 {
6508 while (block && TREE_CODE (block) == BLOCK && block != p->orig_block)
6509 block = BLOCK_SUPERCONTEXT (block);
6510 gcc_assert (block == p->orig_block);
6511 }
6512 #endif
6513 }
6514 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
6515 {
6516 if (TREE_CODE (t) == SSA_NAME)
6517 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
6518 else if (TREE_CODE (t) == LABEL_DECL)
6519 {
6520 if (p->new_label_map)
6521 {
6522 struct tree_map in, *out;
6523 in.base.from = t;
6524 out = (struct tree_map *)
6525 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
6526 if (out)
6527 *tp = t = out->to;
6528 }
6529
6530 DECL_CONTEXT (t) = p->to_context;
6531 }
6532 else if (p->remap_decls_p)
6533 {
6534 /* Replace T with its duplicate. T should no longer appear in the
6535 parent function, so this looks wasteful; however, it may appear
6536 in referenced_vars, and more importantly, as virtual operands of
6537 statements, and in alias lists of other variables. It would be
6538 quite difficult to expunge it from all those places. ??? It might
6539 suffice to do this for addressable variables. */
6540 if ((TREE_CODE (t) == VAR_DECL
6541 && !is_global_var (t))
6542 || TREE_CODE (t) == CONST_DECL)
6543 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
6544 }
6545 *walk_subtrees = 0;
6546 }
6547 else if (TYPE_P (t))
6548 *walk_subtrees = 0;
6549
6550 return NULL_TREE;
6551 }
6552
6553 /* Helper for move_stmt_r. Given an EH region number for the source
6554 function, map that to the duplicate EH regio number in the dest. */
6555
6556 static int
6557 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
6558 {
6559 eh_region old_r, new_r;
6560
6561 old_r = get_eh_region_from_number (old_nr);
6562 new_r = static_cast<eh_region> (*p->eh_map->get (old_r));
6563
6564 return new_r->index;
6565 }
6566
6567 /* Similar, but operate on INTEGER_CSTs. */
6568
6569 static tree
6570 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
6571 {
6572 int old_nr, new_nr;
6573
6574 old_nr = tree_to_shwi (old_t_nr);
6575 new_nr = move_stmt_eh_region_nr (old_nr, p);
6576
6577 return build_int_cst (integer_type_node, new_nr);
6578 }
6579
6580 /* Like move_stmt_op, but for gimple statements.
6581
6582 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
6583 contained in the current statement in *GSI_P and change the
6584 DECL_CONTEXT of every local variable referenced in the current
6585 statement. */
6586
6587 static tree
6588 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
6589 struct walk_stmt_info *wi)
6590 {
6591 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6592 gimple stmt = gsi_stmt (*gsi_p);
6593 tree block = gimple_block (stmt);
6594
6595 if (block == p->orig_block
6596 || (p->orig_block == NULL_TREE
6597 && block != NULL_TREE))
6598 gimple_set_block (stmt, p->new_block);
6599
6600 switch (gimple_code (stmt))
6601 {
6602 case GIMPLE_CALL:
6603 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
6604 {
6605 tree r, fndecl = gimple_call_fndecl (stmt);
6606 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
6607 switch (DECL_FUNCTION_CODE (fndecl))
6608 {
6609 case BUILT_IN_EH_COPY_VALUES:
6610 r = gimple_call_arg (stmt, 1);
6611 r = move_stmt_eh_region_tree_nr (r, p);
6612 gimple_call_set_arg (stmt, 1, r);
6613 /* FALLTHRU */
6614
6615 case BUILT_IN_EH_POINTER:
6616 case BUILT_IN_EH_FILTER:
6617 r = gimple_call_arg (stmt, 0);
6618 r = move_stmt_eh_region_tree_nr (r, p);
6619 gimple_call_set_arg (stmt, 0, r);
6620 break;
6621
6622 default:
6623 break;
6624 }
6625 }
6626 break;
6627
6628 case GIMPLE_RESX:
6629 {
6630 gresx *resx_stmt = as_a <gresx *> (stmt);
6631 int r = gimple_resx_region (resx_stmt);
6632 r = move_stmt_eh_region_nr (r, p);
6633 gimple_resx_set_region (resx_stmt, r);
6634 }
6635 break;
6636
6637 case GIMPLE_EH_DISPATCH:
6638 {
6639 geh_dispatch *eh_dispatch_stmt = as_a <geh_dispatch *> (stmt);
6640 int r = gimple_eh_dispatch_region (eh_dispatch_stmt);
6641 r = move_stmt_eh_region_nr (r, p);
6642 gimple_eh_dispatch_set_region (eh_dispatch_stmt, r);
6643 }
6644 break;
6645
6646 case GIMPLE_OMP_RETURN:
6647 case GIMPLE_OMP_CONTINUE:
6648 break;
6649 default:
6650 if (is_gimple_omp (stmt))
6651 {
6652 /* Do not remap variables inside OMP directives. Variables
6653 referenced in clauses and directive header belong to the
6654 parent function and should not be moved into the child
6655 function. */
6656 bool save_remap_decls_p = p->remap_decls_p;
6657 p->remap_decls_p = false;
6658 *handled_ops_p = true;
6659
6660 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), move_stmt_r,
6661 move_stmt_op, wi);
6662
6663 p->remap_decls_p = save_remap_decls_p;
6664 }
6665 break;
6666 }
6667
6668 return NULL_TREE;
6669 }
6670
6671 /* Move basic block BB from function CFUN to function DEST_FN. The
6672 block is moved out of the original linked list and placed after
6673 block AFTER in the new list. Also, the block is removed from the
6674 original array of blocks and placed in DEST_FN's array of blocks.
6675 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
6676 updated to reflect the moved edges.
6677
6678 The local variables are remapped to new instances, VARS_MAP is used
6679 to record the mapping. */
6680
6681 static void
6682 move_block_to_fn (struct function *dest_cfun, basic_block bb,
6683 basic_block after, bool update_edge_count_p,
6684 struct move_stmt_d *d)
6685 {
6686 struct control_flow_graph *cfg;
6687 edge_iterator ei;
6688 edge e;
6689 gimple_stmt_iterator si;
6690 unsigned old_len, new_len;
6691
6692 /* Remove BB from dominance structures. */
6693 delete_from_dominance_info (CDI_DOMINATORS, bb);
6694
6695 /* Move BB from its current loop to the copy in the new function. */
6696 if (current_loops)
6697 {
6698 struct loop *new_loop = (struct loop *)bb->loop_father->aux;
6699 if (new_loop)
6700 bb->loop_father = new_loop;
6701 }
6702
6703 /* Link BB to the new linked list. */
6704 move_block_after (bb, after);
6705
6706 /* Update the edge count in the corresponding flowgraphs. */
6707 if (update_edge_count_p)
6708 FOR_EACH_EDGE (e, ei, bb->succs)
6709 {
6710 cfun->cfg->x_n_edges--;
6711 dest_cfun->cfg->x_n_edges++;
6712 }
6713
6714 /* Remove BB from the original basic block array. */
6715 (*cfun->cfg->x_basic_block_info)[bb->index] = NULL;
6716 cfun->cfg->x_n_basic_blocks--;
6717
6718 /* Grow DEST_CFUN's basic block array if needed. */
6719 cfg = dest_cfun->cfg;
6720 cfg->x_n_basic_blocks++;
6721 if (bb->index >= cfg->x_last_basic_block)
6722 cfg->x_last_basic_block = bb->index + 1;
6723
6724 old_len = vec_safe_length (cfg->x_basic_block_info);
6725 if ((unsigned) cfg->x_last_basic_block >= old_len)
6726 {
6727 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
6728 vec_safe_grow_cleared (cfg->x_basic_block_info, new_len);
6729 }
6730
6731 (*cfg->x_basic_block_info)[bb->index] = bb;
6732
6733 /* Remap the variables in phi nodes. */
6734 for (gphi_iterator psi = gsi_start_phis (bb);
6735 !gsi_end_p (psi); )
6736 {
6737 gphi *phi = psi.phi ();
6738 use_operand_p use;
6739 tree op = PHI_RESULT (phi);
6740 ssa_op_iter oi;
6741 unsigned i;
6742
6743 if (virtual_operand_p (op))
6744 {
6745 /* Remove the phi nodes for virtual operands (alias analysis will be
6746 run for the new function, anyway). */
6747 remove_phi_node (&psi, true);
6748 continue;
6749 }
6750
6751 SET_PHI_RESULT (phi,
6752 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6753 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
6754 {
6755 op = USE_FROM_PTR (use);
6756 if (TREE_CODE (op) == SSA_NAME)
6757 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6758 }
6759
6760 for (i = 0; i < EDGE_COUNT (bb->preds); i++)
6761 {
6762 location_t locus = gimple_phi_arg_location (phi, i);
6763 tree block = LOCATION_BLOCK (locus);
6764
6765 if (locus == UNKNOWN_LOCATION)
6766 continue;
6767 if (d->orig_block == NULL_TREE || block == d->orig_block)
6768 {
6769 if (d->new_block == NULL_TREE)
6770 locus = LOCATION_LOCUS (locus);
6771 else
6772 locus = COMBINE_LOCATION_DATA (line_table, locus, d->new_block);
6773 gimple_phi_arg_set_location (phi, i, locus);
6774 }
6775 }
6776
6777 gsi_next (&psi);
6778 }
6779
6780 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6781 {
6782 gimple stmt = gsi_stmt (si);
6783 struct walk_stmt_info wi;
6784
6785 memset (&wi, 0, sizeof (wi));
6786 wi.info = d;
6787 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
6788
6789 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
6790 {
6791 tree label = gimple_label_label (label_stmt);
6792 int uid = LABEL_DECL_UID (label);
6793
6794 gcc_assert (uid > -1);
6795
6796 old_len = vec_safe_length (cfg->x_label_to_block_map);
6797 if (old_len <= (unsigned) uid)
6798 {
6799 new_len = 3 * uid / 2 + 1;
6800 vec_safe_grow_cleared (cfg->x_label_to_block_map, new_len);
6801 }
6802
6803 (*cfg->x_label_to_block_map)[uid] = bb;
6804 (*cfun->cfg->x_label_to_block_map)[uid] = NULL;
6805
6806 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
6807
6808 if (uid >= dest_cfun->cfg->last_label_uid)
6809 dest_cfun->cfg->last_label_uid = uid + 1;
6810 }
6811
6812 maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
6813 remove_stmt_from_eh_lp_fn (cfun, stmt);
6814
6815 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
6816 gimple_remove_stmt_histograms (cfun, stmt);
6817
6818 /* We cannot leave any operands allocated from the operand caches of
6819 the current function. */
6820 free_stmt_operands (cfun, stmt);
6821 push_cfun (dest_cfun);
6822 update_stmt (stmt);
6823 pop_cfun ();
6824 }
6825
6826 FOR_EACH_EDGE (e, ei, bb->succs)
6827 if (e->goto_locus != UNKNOWN_LOCATION)
6828 {
6829 tree block = LOCATION_BLOCK (e->goto_locus);
6830 if (d->orig_block == NULL_TREE
6831 || block == d->orig_block)
6832 e->goto_locus = d->new_block ?
6833 COMBINE_LOCATION_DATA (line_table, e->goto_locus, d->new_block) :
6834 LOCATION_LOCUS (e->goto_locus);
6835 }
6836 }
6837
6838 /* Examine the statements in BB (which is in SRC_CFUN); find and return
6839 the outermost EH region. Use REGION as the incoming base EH region. */
6840
6841 static eh_region
6842 find_outermost_region_in_block (struct function *src_cfun,
6843 basic_block bb, eh_region region)
6844 {
6845 gimple_stmt_iterator si;
6846
6847 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6848 {
6849 gimple stmt = gsi_stmt (si);
6850 eh_region stmt_region;
6851 int lp_nr;
6852
6853 lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
6854 stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
6855 if (stmt_region)
6856 {
6857 if (region == NULL)
6858 region = stmt_region;
6859 else if (stmt_region != region)
6860 {
6861 region = eh_region_outermost (src_cfun, stmt_region, region);
6862 gcc_assert (region != NULL);
6863 }
6864 }
6865 }
6866
6867 return region;
6868 }
6869
6870 static tree
6871 new_label_mapper (tree decl, void *data)
6872 {
6873 htab_t hash = (htab_t) data;
6874 struct tree_map *m;
6875 void **slot;
6876
6877 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
6878
6879 m = XNEW (struct tree_map);
6880 m->hash = DECL_UID (decl);
6881 m->base.from = decl;
6882 m->to = create_artificial_label (UNKNOWN_LOCATION);
6883 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
6884 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
6885 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
6886
6887 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
6888 gcc_assert (*slot == NULL);
6889
6890 *slot = m;
6891
6892 return m->to;
6893 }
6894
6895 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
6896 subblocks. */
6897
6898 static void
6899 replace_block_vars_by_duplicates (tree block, hash_map<tree, tree> *vars_map,
6900 tree to_context)
6901 {
6902 tree *tp, t;
6903
6904 for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
6905 {
6906 t = *tp;
6907 if (TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != CONST_DECL)
6908 continue;
6909 replace_by_duplicate_decl (&t, vars_map, to_context);
6910 if (t != *tp)
6911 {
6912 if (TREE_CODE (*tp) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (*tp))
6913 {
6914 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (*tp));
6915 DECL_HAS_VALUE_EXPR_P (t) = 1;
6916 }
6917 DECL_CHAIN (t) = DECL_CHAIN (*tp);
6918 *tp = t;
6919 }
6920 }
6921
6922 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
6923 replace_block_vars_by_duplicates (block, vars_map, to_context);
6924 }
6925
6926 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
6927 from FN1 to FN2. */
6928
6929 static void
6930 fixup_loop_arrays_after_move (struct function *fn1, struct function *fn2,
6931 struct loop *loop)
6932 {
6933 /* Discard it from the old loop array. */
6934 (*get_loops (fn1))[loop->num] = NULL;
6935
6936 /* Place it in the new loop array, assigning it a new number. */
6937 loop->num = number_of_loops (fn2);
6938 vec_safe_push (loops_for_fn (fn2)->larray, loop);
6939
6940 /* Recurse to children. */
6941 for (loop = loop->inner; loop; loop = loop->next)
6942 fixup_loop_arrays_after_move (fn1, fn2, loop);
6943 }
6944
6945 /* Verify that the blocks in BBS_P are a single-entry, single-exit region
6946 delimited by ENTRY_BB and EXIT_BB, possibly containing noreturn blocks. */
6947
6948 DEBUG_FUNCTION void
6949 verify_sese (basic_block entry, basic_block exit, vec<basic_block> *bbs_p)
6950 {
6951 basic_block bb;
6952 edge_iterator ei;
6953 edge e;
6954 bitmap bbs = BITMAP_ALLOC (NULL);
6955 int i;
6956
6957 gcc_assert (entry != NULL);
6958 gcc_assert (entry != exit);
6959 gcc_assert (bbs_p != NULL);
6960
6961 gcc_assert (bbs_p->length () > 0);
6962
6963 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
6964 bitmap_set_bit (bbs, bb->index);
6965
6966 gcc_assert (bitmap_bit_p (bbs, entry->index));
6967 gcc_assert (exit == NULL || bitmap_bit_p (bbs, exit->index));
6968
6969 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
6970 {
6971 if (bb == entry)
6972 {
6973 gcc_assert (single_pred_p (entry));
6974 gcc_assert (!bitmap_bit_p (bbs, single_pred (entry)->index));
6975 }
6976 else
6977 for (ei = ei_start (bb->preds); !ei_end_p (ei); ei_next (&ei))
6978 {
6979 e = ei_edge (ei);
6980 gcc_assert (bitmap_bit_p (bbs, e->src->index));
6981 }
6982
6983 if (bb == exit)
6984 {
6985 gcc_assert (single_succ_p (exit));
6986 gcc_assert (!bitmap_bit_p (bbs, single_succ (exit)->index));
6987 }
6988 else
6989 for (ei = ei_start (bb->succs); !ei_end_p (ei); ei_next (&ei))
6990 {
6991 e = ei_edge (ei);
6992 gcc_assert (bitmap_bit_p (bbs, e->dest->index));
6993 }
6994 }
6995
6996 BITMAP_FREE (bbs);
6997 }
6998
6999
7000 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
7001 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
7002 single basic block in the original CFG and the new basic block is
7003 returned. DEST_CFUN must not have a CFG yet.
7004
7005 Note that the region need not be a pure SESE region. Blocks inside
7006 the region may contain calls to abort/exit. The only restriction
7007 is that ENTRY_BB should be the only entry point and it must
7008 dominate EXIT_BB.
7009
7010 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
7011 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
7012 to the new function.
7013
7014 All local variables referenced in the region are assumed to be in
7015 the corresponding BLOCK_VARS and unexpanded variable lists
7016 associated with DEST_CFUN. */
7017
7018 basic_block
7019 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
7020 basic_block exit_bb, tree orig_block)
7021 {
7022 vec<basic_block> bbs, dom_bbs;
7023 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
7024 basic_block after, bb, *entry_pred, *exit_succ, abb;
7025 struct function *saved_cfun = cfun;
7026 int *entry_flag, *exit_flag;
7027 unsigned *entry_prob, *exit_prob;
7028 unsigned i, num_entry_edges, num_exit_edges, num_nodes;
7029 edge e;
7030 edge_iterator ei;
7031 htab_t new_label_map;
7032 hash_map<void *, void *> *eh_map;
7033 struct loop *loop = entry_bb->loop_father;
7034 struct loop *loop0 = get_loop (saved_cfun, 0);
7035 struct move_stmt_d d;
7036
7037 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
7038 region. */
7039 gcc_assert (entry_bb != exit_bb
7040 && (!exit_bb
7041 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
7042
7043 /* Collect all the blocks in the region. Manually add ENTRY_BB
7044 because it won't be added by dfs_enumerate_from. */
7045 bbs.create (0);
7046 bbs.safe_push (entry_bb);
7047 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
7048 #ifdef ENABLE_CHECKING
7049 verify_sese (entry_bb, exit_bb, &bbs);
7050 #endif
7051
7052 /* The blocks that used to be dominated by something in BBS will now be
7053 dominated by the new block. */
7054 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
7055 bbs.address (),
7056 bbs.length ());
7057
7058 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
7059 the predecessor edges to ENTRY_BB and the successor edges to
7060 EXIT_BB so that we can re-attach them to the new basic block that
7061 will replace the region. */
7062 num_entry_edges = EDGE_COUNT (entry_bb->preds);
7063 entry_pred = XNEWVEC (basic_block, num_entry_edges);
7064 entry_flag = XNEWVEC (int, num_entry_edges);
7065 entry_prob = XNEWVEC (unsigned, num_entry_edges);
7066 i = 0;
7067 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
7068 {
7069 entry_prob[i] = e->probability;
7070 entry_flag[i] = e->flags;
7071 entry_pred[i++] = e->src;
7072 remove_edge (e);
7073 }
7074
7075 if (exit_bb)
7076 {
7077 num_exit_edges = EDGE_COUNT (exit_bb->succs);
7078 exit_succ = XNEWVEC (basic_block, num_exit_edges);
7079 exit_flag = XNEWVEC (int, num_exit_edges);
7080 exit_prob = XNEWVEC (unsigned, num_exit_edges);
7081 i = 0;
7082 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
7083 {
7084 exit_prob[i] = e->probability;
7085 exit_flag[i] = e->flags;
7086 exit_succ[i++] = e->dest;
7087 remove_edge (e);
7088 }
7089 }
7090 else
7091 {
7092 num_exit_edges = 0;
7093 exit_succ = NULL;
7094 exit_flag = NULL;
7095 exit_prob = NULL;
7096 }
7097
7098 /* Switch context to the child function to initialize DEST_FN's CFG. */
7099 gcc_assert (dest_cfun->cfg == NULL);
7100 push_cfun (dest_cfun);
7101
7102 init_empty_tree_cfg ();
7103
7104 /* Initialize EH information for the new function. */
7105 eh_map = NULL;
7106 new_label_map = NULL;
7107 if (saved_cfun->eh)
7108 {
7109 eh_region region = NULL;
7110
7111 FOR_EACH_VEC_ELT (bbs, i, bb)
7112 region = find_outermost_region_in_block (saved_cfun, bb, region);
7113
7114 init_eh_for_function ();
7115 if (region != NULL)
7116 {
7117 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
7118 eh_map = duplicate_eh_regions (saved_cfun, region, 0,
7119 new_label_mapper, new_label_map);
7120 }
7121 }
7122
7123 /* Initialize an empty loop tree. */
7124 struct loops *loops = ggc_cleared_alloc<struct loops> ();
7125 init_loops_structure (dest_cfun, loops, 1);
7126 loops->state = LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
7127 set_loops_for_fn (dest_cfun, loops);
7128
7129 /* Move the outlined loop tree part. */
7130 num_nodes = bbs.length ();
7131 FOR_EACH_VEC_ELT (bbs, i, bb)
7132 {
7133 if (bb->loop_father->header == bb)
7134 {
7135 struct loop *this_loop = bb->loop_father;
7136 struct loop *outer = loop_outer (this_loop);
7137 if (outer == loop
7138 /* If the SESE region contains some bbs ending with
7139 a noreturn call, those are considered to belong
7140 to the outermost loop in saved_cfun, rather than
7141 the entry_bb's loop_father. */
7142 || outer == loop0)
7143 {
7144 if (outer != loop)
7145 num_nodes -= this_loop->num_nodes;
7146 flow_loop_tree_node_remove (bb->loop_father);
7147 flow_loop_tree_node_add (get_loop (dest_cfun, 0), this_loop);
7148 fixup_loop_arrays_after_move (saved_cfun, cfun, this_loop);
7149 }
7150 }
7151 else if (bb->loop_father == loop0 && loop0 != loop)
7152 num_nodes--;
7153
7154 /* Remove loop exits from the outlined region. */
7155 if (loops_for_fn (saved_cfun)->exits)
7156 FOR_EACH_EDGE (e, ei, bb->succs)
7157 {
7158 struct loops *l = loops_for_fn (saved_cfun);
7159 loop_exit **slot
7160 = l->exits->find_slot_with_hash (e, htab_hash_pointer (e),
7161 NO_INSERT);
7162 if (slot)
7163 l->exits->clear_slot (slot);
7164 }
7165 }
7166
7167
7168 /* Adjust the number of blocks in the tree root of the outlined part. */
7169 get_loop (dest_cfun, 0)->num_nodes = bbs.length () + 2;
7170
7171 /* Setup a mapping to be used by move_block_to_fn. */
7172 loop->aux = current_loops->tree_root;
7173 loop0->aux = current_loops->tree_root;
7174
7175 pop_cfun ();
7176
7177 /* Move blocks from BBS into DEST_CFUN. */
7178 gcc_assert (bbs.length () >= 2);
7179 after = dest_cfun->cfg->x_entry_block_ptr;
7180 hash_map<tree, tree> vars_map;
7181
7182 memset (&d, 0, sizeof (d));
7183 d.orig_block = orig_block;
7184 d.new_block = DECL_INITIAL (dest_cfun->decl);
7185 d.from_context = cfun->decl;
7186 d.to_context = dest_cfun->decl;
7187 d.vars_map = &vars_map;
7188 d.new_label_map = new_label_map;
7189 d.eh_map = eh_map;
7190 d.remap_decls_p = true;
7191
7192 FOR_EACH_VEC_ELT (bbs, i, bb)
7193 {
7194 /* No need to update edge counts on the last block. It has
7195 already been updated earlier when we detached the region from
7196 the original CFG. */
7197 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
7198 after = bb;
7199 }
7200
7201 loop->aux = NULL;
7202 loop0->aux = NULL;
7203 /* Loop sizes are no longer correct, fix them up. */
7204 loop->num_nodes -= num_nodes;
7205 for (struct loop *outer = loop_outer (loop);
7206 outer; outer = loop_outer (outer))
7207 outer->num_nodes -= num_nodes;
7208 loop0->num_nodes -= bbs.length () - num_nodes;
7209
7210 if (saved_cfun->has_simduid_loops || saved_cfun->has_force_vectorize_loops)
7211 {
7212 struct loop *aloop;
7213 for (i = 0; vec_safe_iterate (loops->larray, i, &aloop); i++)
7214 if (aloop != NULL)
7215 {
7216 if (aloop->simduid)
7217 {
7218 replace_by_duplicate_decl (&aloop->simduid, d.vars_map,
7219 d.to_context);
7220 dest_cfun->has_simduid_loops = true;
7221 }
7222 if (aloop->force_vectorize)
7223 dest_cfun->has_force_vectorize_loops = true;
7224 }
7225 }
7226
7227 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
7228 if (orig_block)
7229 {
7230 tree block;
7231 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7232 == NULL_TREE);
7233 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7234 = BLOCK_SUBBLOCKS (orig_block);
7235 for (block = BLOCK_SUBBLOCKS (orig_block);
7236 block; block = BLOCK_CHAIN (block))
7237 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
7238 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
7239 }
7240
7241 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
7242 &vars_map, dest_cfun->decl);
7243
7244 if (new_label_map)
7245 htab_delete (new_label_map);
7246 if (eh_map)
7247 delete eh_map;
7248
7249 /* Rewire the entry and exit blocks. The successor to the entry
7250 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
7251 the child function. Similarly, the predecessor of DEST_FN's
7252 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
7253 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
7254 various CFG manipulation function get to the right CFG.
7255
7256 FIXME, this is silly. The CFG ought to become a parameter to
7257 these helpers. */
7258 push_cfun (dest_cfun);
7259 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), entry_bb, EDGE_FALLTHRU);
7260 if (exit_bb)
7261 make_edge (exit_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
7262 pop_cfun ();
7263
7264 /* Back in the original function, the SESE region has disappeared,
7265 create a new basic block in its place. */
7266 bb = create_empty_bb (entry_pred[0]);
7267 if (current_loops)
7268 add_bb_to_loop (bb, loop);
7269 for (i = 0; i < num_entry_edges; i++)
7270 {
7271 e = make_edge (entry_pred[i], bb, entry_flag[i]);
7272 e->probability = entry_prob[i];
7273 }
7274
7275 for (i = 0; i < num_exit_edges; i++)
7276 {
7277 e = make_edge (bb, exit_succ[i], exit_flag[i]);
7278 e->probability = exit_prob[i];
7279 }
7280
7281 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
7282 FOR_EACH_VEC_ELT (dom_bbs, i, abb)
7283 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
7284 dom_bbs.release ();
7285
7286 if (exit_bb)
7287 {
7288 free (exit_prob);
7289 free (exit_flag);
7290 free (exit_succ);
7291 }
7292 free (entry_prob);
7293 free (entry_flag);
7294 free (entry_pred);
7295 bbs.release ();
7296
7297 return bb;
7298 }
7299
7300
7301 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
7302 */
7303
7304 void
7305 dump_function_to_file (tree fndecl, FILE *file, int flags)
7306 {
7307 tree arg, var, old_current_fndecl = current_function_decl;
7308 struct function *dsf;
7309 bool ignore_topmost_bind = false, any_var = false;
7310 basic_block bb;
7311 tree chain;
7312 bool tmclone = (TREE_CODE (fndecl) == FUNCTION_DECL
7313 && decl_is_tm_clone (fndecl));
7314 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
7315
7316 current_function_decl = fndecl;
7317 fprintf (file, "%s %s(", function_name (fun), tmclone ? "[tm-clone] " : "");
7318
7319 arg = DECL_ARGUMENTS (fndecl);
7320 while (arg)
7321 {
7322 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
7323 fprintf (file, " ");
7324 print_generic_expr (file, arg, dump_flags);
7325 if (flags & TDF_VERBOSE)
7326 print_node (file, "", arg, 4);
7327 if (DECL_CHAIN (arg))
7328 fprintf (file, ", ");
7329 arg = DECL_CHAIN (arg);
7330 }
7331 fprintf (file, ")\n");
7332
7333 if (flags & TDF_VERBOSE)
7334 print_node (file, "", fndecl, 2);
7335
7336 dsf = DECL_STRUCT_FUNCTION (fndecl);
7337 if (dsf && (flags & TDF_EH))
7338 dump_eh_tree (file, dsf);
7339
7340 if (flags & TDF_RAW && !gimple_has_body_p (fndecl))
7341 {
7342 dump_node (fndecl, TDF_SLIM | flags, file);
7343 current_function_decl = old_current_fndecl;
7344 return;
7345 }
7346
7347 /* When GIMPLE is lowered, the variables are no longer available in
7348 BIND_EXPRs, so display them separately. */
7349 if (fun && fun->decl == fndecl && (fun->curr_properties & PROP_gimple_lcf))
7350 {
7351 unsigned ix;
7352 ignore_topmost_bind = true;
7353
7354 fprintf (file, "{\n");
7355 if (!vec_safe_is_empty (fun->local_decls))
7356 FOR_EACH_LOCAL_DECL (fun, ix, var)
7357 {
7358 print_generic_decl (file, var, flags);
7359 if (flags & TDF_VERBOSE)
7360 print_node (file, "", var, 4);
7361 fprintf (file, "\n");
7362
7363 any_var = true;
7364 }
7365 if (gimple_in_ssa_p (cfun))
7366 for (ix = 1; ix < num_ssa_names; ++ix)
7367 {
7368 tree name = ssa_name (ix);
7369 if (name && !SSA_NAME_VAR (name))
7370 {
7371 fprintf (file, " ");
7372 print_generic_expr (file, TREE_TYPE (name), flags);
7373 fprintf (file, " ");
7374 print_generic_expr (file, name, flags);
7375 fprintf (file, ";\n");
7376
7377 any_var = true;
7378 }
7379 }
7380 }
7381
7382 if (fun && fun->decl == fndecl
7383 && fun->cfg
7384 && basic_block_info_for_fn (fun))
7385 {
7386 /* If the CFG has been built, emit a CFG-based dump. */
7387 if (!ignore_topmost_bind)
7388 fprintf (file, "{\n");
7389
7390 if (any_var && n_basic_blocks_for_fn (fun))
7391 fprintf (file, "\n");
7392
7393 FOR_EACH_BB_FN (bb, fun)
7394 dump_bb (file, bb, 2, flags | TDF_COMMENT);
7395
7396 fprintf (file, "}\n");
7397 }
7398 else if (DECL_SAVED_TREE (fndecl) == NULL)
7399 {
7400 /* The function is now in GIMPLE form but the CFG has not been
7401 built yet. Emit the single sequence of GIMPLE statements
7402 that make up its body. */
7403 gimple_seq body = gimple_body (fndecl);
7404
7405 if (gimple_seq_first_stmt (body)
7406 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
7407 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
7408 print_gimple_seq (file, body, 0, flags);
7409 else
7410 {
7411 if (!ignore_topmost_bind)
7412 fprintf (file, "{\n");
7413
7414 if (any_var)
7415 fprintf (file, "\n");
7416
7417 print_gimple_seq (file, body, 2, flags);
7418 fprintf (file, "}\n");
7419 }
7420 }
7421 else
7422 {
7423 int indent;
7424
7425 /* Make a tree based dump. */
7426 chain = DECL_SAVED_TREE (fndecl);
7427 if (chain && TREE_CODE (chain) == BIND_EXPR)
7428 {
7429 if (ignore_topmost_bind)
7430 {
7431 chain = BIND_EXPR_BODY (chain);
7432 indent = 2;
7433 }
7434 else
7435 indent = 0;
7436 }
7437 else
7438 {
7439 if (!ignore_topmost_bind)
7440 {
7441 fprintf (file, "{\n");
7442 /* No topmost bind, pretend it's ignored for later. */
7443 ignore_topmost_bind = true;
7444 }
7445 indent = 2;
7446 }
7447
7448 if (any_var)
7449 fprintf (file, "\n");
7450
7451 print_generic_stmt_indented (file, chain, flags, indent);
7452 if (ignore_topmost_bind)
7453 fprintf (file, "}\n");
7454 }
7455
7456 if (flags & TDF_ENUMERATE_LOCALS)
7457 dump_enumerated_decls (file, flags);
7458 fprintf (file, "\n\n");
7459
7460 current_function_decl = old_current_fndecl;
7461 }
7462
7463 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
7464
7465 DEBUG_FUNCTION void
7466 debug_function (tree fn, int flags)
7467 {
7468 dump_function_to_file (fn, stderr, flags);
7469 }
7470
7471
7472 /* Print on FILE the indexes for the predecessors of basic_block BB. */
7473
7474 static void
7475 print_pred_bbs (FILE *file, basic_block bb)
7476 {
7477 edge e;
7478 edge_iterator ei;
7479
7480 FOR_EACH_EDGE (e, ei, bb->preds)
7481 fprintf (file, "bb_%d ", e->src->index);
7482 }
7483
7484
7485 /* Print on FILE the indexes for the successors of basic_block BB. */
7486
7487 static void
7488 print_succ_bbs (FILE *file, basic_block bb)
7489 {
7490 edge e;
7491 edge_iterator ei;
7492
7493 FOR_EACH_EDGE (e, ei, bb->succs)
7494 fprintf (file, "bb_%d ", e->dest->index);
7495 }
7496
7497 /* Print to FILE the basic block BB following the VERBOSITY level. */
7498
7499 void
7500 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
7501 {
7502 char *s_indent = (char *) alloca ((size_t) indent + 1);
7503 memset ((void *) s_indent, ' ', (size_t) indent);
7504 s_indent[indent] = '\0';
7505
7506 /* Print basic_block's header. */
7507 if (verbosity >= 2)
7508 {
7509 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
7510 print_pred_bbs (file, bb);
7511 fprintf (file, "}, succs = {");
7512 print_succ_bbs (file, bb);
7513 fprintf (file, "})\n");
7514 }
7515
7516 /* Print basic_block's body. */
7517 if (verbosity >= 3)
7518 {
7519 fprintf (file, "%s {\n", s_indent);
7520 dump_bb (file, bb, indent + 4, TDF_VOPS|TDF_MEMSYMS);
7521 fprintf (file, "%s }\n", s_indent);
7522 }
7523 }
7524
7525 static void print_loop_and_siblings (FILE *, struct loop *, int, int);
7526
7527 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
7528 VERBOSITY level this outputs the contents of the loop, or just its
7529 structure. */
7530
7531 static void
7532 print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
7533 {
7534 char *s_indent;
7535 basic_block bb;
7536
7537 if (loop == NULL)
7538 return;
7539
7540 s_indent = (char *) alloca ((size_t) indent + 1);
7541 memset ((void *) s_indent, ' ', (size_t) indent);
7542 s_indent[indent] = '\0';
7543
7544 /* Print loop's header. */
7545 fprintf (file, "%sloop_%d (", s_indent, loop->num);
7546 if (loop->header)
7547 fprintf (file, "header = %d", loop->header->index);
7548 else
7549 {
7550 fprintf (file, "deleted)\n");
7551 return;
7552 }
7553 if (loop->latch)
7554 fprintf (file, ", latch = %d", loop->latch->index);
7555 else
7556 fprintf (file, ", multiple latches");
7557 fprintf (file, ", niter = ");
7558 print_generic_expr (file, loop->nb_iterations, 0);
7559
7560 if (loop->any_upper_bound)
7561 {
7562 fprintf (file, ", upper_bound = ");
7563 print_decu (loop->nb_iterations_upper_bound, file);
7564 }
7565
7566 if (loop->any_estimate)
7567 {
7568 fprintf (file, ", estimate = ");
7569 print_decu (loop->nb_iterations_estimate, file);
7570 }
7571 fprintf (file, ")\n");
7572
7573 /* Print loop's body. */
7574 if (verbosity >= 1)
7575 {
7576 fprintf (file, "%s{\n", s_indent);
7577 FOR_EACH_BB_FN (bb, cfun)
7578 if (bb->loop_father == loop)
7579 print_loops_bb (file, bb, indent, verbosity);
7580
7581 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
7582 fprintf (file, "%s}\n", s_indent);
7583 }
7584 }
7585
7586 /* Print the LOOP and its sibling loops on FILE, indented INDENT
7587 spaces. Following VERBOSITY level this outputs the contents of the
7588 loop, or just its structure. */
7589
7590 static void
7591 print_loop_and_siblings (FILE *file, struct loop *loop, int indent,
7592 int verbosity)
7593 {
7594 if (loop == NULL)
7595 return;
7596
7597 print_loop (file, loop, indent, verbosity);
7598 print_loop_and_siblings (file, loop->next, indent, verbosity);
7599 }
7600
7601 /* Follow a CFG edge from the entry point of the program, and on entry
7602 of a loop, pretty print the loop structure on FILE. */
7603
7604 void
7605 print_loops (FILE *file, int verbosity)
7606 {
7607 basic_block bb;
7608
7609 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
7610 if (bb && bb->loop_father)
7611 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
7612 }
7613
7614 /* Dump a loop. */
7615
7616 DEBUG_FUNCTION void
7617 debug (struct loop &ref)
7618 {
7619 print_loop (stderr, &ref, 0, /*verbosity*/0);
7620 }
7621
7622 DEBUG_FUNCTION void
7623 debug (struct loop *ptr)
7624 {
7625 if (ptr)
7626 debug (*ptr);
7627 else
7628 fprintf (stderr, "<nil>\n");
7629 }
7630
7631 /* Dump a loop verbosely. */
7632
7633 DEBUG_FUNCTION void
7634 debug_verbose (struct loop &ref)
7635 {
7636 print_loop (stderr, &ref, 0, /*verbosity*/3);
7637 }
7638
7639 DEBUG_FUNCTION void
7640 debug_verbose (struct loop *ptr)
7641 {
7642 if (ptr)
7643 debug (*ptr);
7644 else
7645 fprintf (stderr, "<nil>\n");
7646 }
7647
7648
7649 /* Debugging loops structure at tree level, at some VERBOSITY level. */
7650
7651 DEBUG_FUNCTION void
7652 debug_loops (int verbosity)
7653 {
7654 print_loops (stderr, verbosity);
7655 }
7656
7657 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
7658
7659 DEBUG_FUNCTION void
7660 debug_loop (struct loop *loop, int verbosity)
7661 {
7662 print_loop (stderr, loop, 0, verbosity);
7663 }
7664
7665 /* Print on stderr the code of loop number NUM, at some VERBOSITY
7666 level. */
7667
7668 DEBUG_FUNCTION void
7669 debug_loop_num (unsigned num, int verbosity)
7670 {
7671 debug_loop (get_loop (cfun, num), verbosity);
7672 }
7673
7674 /* Return true if BB ends with a call, possibly followed by some
7675 instructions that must stay with the call. Return false,
7676 otherwise. */
7677
7678 static bool
7679 gimple_block_ends_with_call_p (basic_block bb)
7680 {
7681 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7682 return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
7683 }
7684
7685
7686 /* Return true if BB ends with a conditional branch. Return false,
7687 otherwise. */
7688
7689 static bool
7690 gimple_block_ends_with_condjump_p (const_basic_block bb)
7691 {
7692 gimple stmt = last_stmt (CONST_CAST_BB (bb));
7693 return (stmt && gimple_code (stmt) == GIMPLE_COND);
7694 }
7695
7696
7697 /* Return true if we need to add fake edge to exit at statement T.
7698 Helper function for gimple_flow_call_edges_add. */
7699
7700 static bool
7701 need_fake_edge_p (gimple t)
7702 {
7703 tree fndecl = NULL_TREE;
7704 int call_flags = 0;
7705
7706 /* NORETURN and LONGJMP calls already have an edge to exit.
7707 CONST and PURE calls do not need one.
7708 We don't currently check for CONST and PURE here, although
7709 it would be a good idea, because those attributes are
7710 figured out from the RTL in mark_constant_function, and
7711 the counter incrementation code from -fprofile-arcs
7712 leads to different results from -fbranch-probabilities. */
7713 if (is_gimple_call (t))
7714 {
7715 fndecl = gimple_call_fndecl (t);
7716 call_flags = gimple_call_flags (t);
7717 }
7718
7719 if (is_gimple_call (t)
7720 && fndecl
7721 && DECL_BUILT_IN (fndecl)
7722 && (call_flags & ECF_NOTHROW)
7723 && !(call_flags & ECF_RETURNS_TWICE)
7724 /* fork() doesn't really return twice, but the effect of
7725 wrapping it in __gcov_fork() which calls __gcov_flush()
7726 and clears the counters before forking has the same
7727 effect as returning twice. Force a fake edge. */
7728 && !(DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7729 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FORK))
7730 return false;
7731
7732 if (is_gimple_call (t))
7733 {
7734 edge_iterator ei;
7735 edge e;
7736 basic_block bb;
7737
7738 if (!(call_flags & ECF_NORETURN))
7739 return true;
7740
7741 bb = gimple_bb (t);
7742 FOR_EACH_EDGE (e, ei, bb->succs)
7743 if ((e->flags & EDGE_FAKE) == 0)
7744 return true;
7745 }
7746
7747 if (gasm *asm_stmt = dyn_cast <gasm *> (t))
7748 if (gimple_asm_volatile_p (asm_stmt) || gimple_asm_input_p (asm_stmt))
7749 return true;
7750
7751 return false;
7752 }
7753
7754
7755 /* Add fake edges to the function exit for any non constant and non
7756 noreturn calls (or noreturn calls with EH/abnormal edges),
7757 volatile inline assembly in the bitmap of blocks specified by BLOCKS
7758 or to the whole CFG if BLOCKS is zero. Return the number of blocks
7759 that were split.
7760
7761 The goal is to expose cases in which entering a basic block does
7762 not imply that all subsequent instructions must be executed. */
7763
7764 static int
7765 gimple_flow_call_edges_add (sbitmap blocks)
7766 {
7767 int i;
7768 int blocks_split = 0;
7769 int last_bb = last_basic_block_for_fn (cfun);
7770 bool check_last_block = false;
7771
7772 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
7773 return 0;
7774
7775 if (! blocks)
7776 check_last_block = true;
7777 else
7778 check_last_block = bitmap_bit_p (blocks,
7779 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
7780
7781 /* In the last basic block, before epilogue generation, there will be
7782 a fallthru edge to EXIT. Special care is required if the last insn
7783 of the last basic block is a call because make_edge folds duplicate
7784 edges, which would result in the fallthru edge also being marked
7785 fake, which would result in the fallthru edge being removed by
7786 remove_fake_edges, which would result in an invalid CFG.
7787
7788 Moreover, we can't elide the outgoing fake edge, since the block
7789 profiler needs to take this into account in order to solve the minimal
7790 spanning tree in the case that the call doesn't return.
7791
7792 Handle this by adding a dummy instruction in a new last basic block. */
7793 if (check_last_block)
7794 {
7795 basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
7796 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7797 gimple t = NULL;
7798
7799 if (!gsi_end_p (gsi))
7800 t = gsi_stmt (gsi);
7801
7802 if (t && need_fake_edge_p (t))
7803 {
7804 edge e;
7805
7806 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
7807 if (e)
7808 {
7809 gsi_insert_on_edge (e, gimple_build_nop ());
7810 gsi_commit_edge_inserts ();
7811 }
7812 }
7813 }
7814
7815 /* Now add fake edges to the function exit for any non constant
7816 calls since there is no way that we can determine if they will
7817 return or not... */
7818 for (i = 0; i < last_bb; i++)
7819 {
7820 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
7821 gimple_stmt_iterator gsi;
7822 gimple stmt, last_stmt;
7823
7824 if (!bb)
7825 continue;
7826
7827 if (blocks && !bitmap_bit_p (blocks, i))
7828 continue;
7829
7830 gsi = gsi_last_nondebug_bb (bb);
7831 if (!gsi_end_p (gsi))
7832 {
7833 last_stmt = gsi_stmt (gsi);
7834 do
7835 {
7836 stmt = gsi_stmt (gsi);
7837 if (need_fake_edge_p (stmt))
7838 {
7839 edge e;
7840
7841 /* The handling above of the final block before the
7842 epilogue should be enough to verify that there is
7843 no edge to the exit block in CFG already.
7844 Calling make_edge in such case would cause us to
7845 mark that edge as fake and remove it later. */
7846 #ifdef ENABLE_CHECKING
7847 if (stmt == last_stmt)
7848 {
7849 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
7850 gcc_assert (e == NULL);
7851 }
7852 #endif
7853
7854 /* Note that the following may create a new basic block
7855 and renumber the existing basic blocks. */
7856 if (stmt != last_stmt)
7857 {
7858 e = split_block (bb, stmt);
7859 if (e)
7860 blocks_split++;
7861 }
7862 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
7863 }
7864 gsi_prev (&gsi);
7865 }
7866 while (!gsi_end_p (gsi));
7867 }
7868 }
7869
7870 if (blocks_split)
7871 verify_flow_info ();
7872
7873 return blocks_split;
7874 }
7875
7876 /* Removes edge E and all the blocks dominated by it, and updates dominance
7877 information. The IL in E->src needs to be updated separately.
7878 If dominance info is not available, only the edge E is removed.*/
7879
7880 void
7881 remove_edge_and_dominated_blocks (edge e)
7882 {
7883 vec<basic_block> bbs_to_remove = vNULL;
7884 vec<basic_block> bbs_to_fix_dom = vNULL;
7885 bitmap df, df_idom;
7886 edge f;
7887 edge_iterator ei;
7888 bool none_removed = false;
7889 unsigned i;
7890 basic_block bb, dbb;
7891 bitmap_iterator bi;
7892
7893 /* If we are removing a path inside a non-root loop that may change
7894 loop ownership of blocks or remove loops. Mark loops for fixup. */
7895 if (current_loops
7896 && loop_outer (e->src->loop_father) != NULL
7897 && e->src->loop_father == e->dest->loop_father)
7898 loops_state_set (LOOPS_NEED_FIXUP);
7899
7900 if (!dom_info_available_p (CDI_DOMINATORS))
7901 {
7902 remove_edge (e);
7903 return;
7904 }
7905
7906 /* No updating is needed for edges to exit. */
7907 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
7908 {
7909 if (cfgcleanup_altered_bbs)
7910 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7911 remove_edge (e);
7912 return;
7913 }
7914
7915 /* First, we find the basic blocks to remove. If E->dest has a predecessor
7916 that is not dominated by E->dest, then this set is empty. Otherwise,
7917 all the basic blocks dominated by E->dest are removed.
7918
7919 Also, to DF_IDOM we store the immediate dominators of the blocks in
7920 the dominance frontier of E (i.e., of the successors of the
7921 removed blocks, if there are any, and of E->dest otherwise). */
7922 FOR_EACH_EDGE (f, ei, e->dest->preds)
7923 {
7924 if (f == e)
7925 continue;
7926
7927 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
7928 {
7929 none_removed = true;
7930 break;
7931 }
7932 }
7933
7934 df = BITMAP_ALLOC (NULL);
7935 df_idom = BITMAP_ALLOC (NULL);
7936
7937 if (none_removed)
7938 bitmap_set_bit (df_idom,
7939 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
7940 else
7941 {
7942 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
7943 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
7944 {
7945 FOR_EACH_EDGE (f, ei, bb->succs)
7946 {
7947 if (f->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
7948 bitmap_set_bit (df, f->dest->index);
7949 }
7950 }
7951 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
7952 bitmap_clear_bit (df, bb->index);
7953
7954 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
7955 {
7956 bb = BASIC_BLOCK_FOR_FN (cfun, i);
7957 bitmap_set_bit (df_idom,
7958 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
7959 }
7960 }
7961
7962 if (cfgcleanup_altered_bbs)
7963 {
7964 /* Record the set of the altered basic blocks. */
7965 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7966 bitmap_ior_into (cfgcleanup_altered_bbs, df);
7967 }
7968
7969 /* Remove E and the cancelled blocks. */
7970 if (none_removed)
7971 remove_edge (e);
7972 else
7973 {
7974 /* Walk backwards so as to get a chance to substitute all
7975 released DEFs into debug stmts. See
7976 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
7977 details. */
7978 for (i = bbs_to_remove.length (); i-- > 0; )
7979 delete_basic_block (bbs_to_remove[i]);
7980 }
7981
7982 /* Update the dominance information. The immediate dominator may change only
7983 for blocks whose immediate dominator belongs to DF_IDOM:
7984
7985 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
7986 removal. Let Z the arbitrary block such that idom(Z) = Y and
7987 Z dominates X after the removal. Before removal, there exists a path P
7988 from Y to X that avoids Z. Let F be the last edge on P that is
7989 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
7990 dominates W, and because of P, Z does not dominate W), and W belongs to
7991 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
7992 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
7993 {
7994 bb = BASIC_BLOCK_FOR_FN (cfun, i);
7995 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
7996 dbb;
7997 dbb = next_dom_son (CDI_DOMINATORS, dbb))
7998 bbs_to_fix_dom.safe_push (dbb);
7999 }
8000
8001 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
8002
8003 BITMAP_FREE (df);
8004 BITMAP_FREE (df_idom);
8005 bbs_to_remove.release ();
8006 bbs_to_fix_dom.release ();
8007 }
8008
8009 /* Purge dead EH edges from basic block BB. */
8010
8011 bool
8012 gimple_purge_dead_eh_edges (basic_block bb)
8013 {
8014 bool changed = false;
8015 edge e;
8016 edge_iterator ei;
8017 gimple stmt = last_stmt (bb);
8018
8019 if (stmt && stmt_can_throw_internal (stmt))
8020 return false;
8021
8022 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8023 {
8024 if (e->flags & EDGE_EH)
8025 {
8026 remove_edge_and_dominated_blocks (e);
8027 changed = true;
8028 }
8029 else
8030 ei_next (&ei);
8031 }
8032
8033 return changed;
8034 }
8035
8036 /* Purge dead EH edges from basic block listed in BLOCKS. */
8037
8038 bool
8039 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
8040 {
8041 bool changed = false;
8042 unsigned i;
8043 bitmap_iterator bi;
8044
8045 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8046 {
8047 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8048
8049 /* Earlier gimple_purge_dead_eh_edges could have removed
8050 this basic block already. */
8051 gcc_assert (bb || changed);
8052 if (bb != NULL)
8053 changed |= gimple_purge_dead_eh_edges (bb);
8054 }
8055
8056 return changed;
8057 }
8058
8059 /* Purge dead abnormal call edges from basic block BB. */
8060
8061 bool
8062 gimple_purge_dead_abnormal_call_edges (basic_block bb)
8063 {
8064 bool changed = false;
8065 edge e;
8066 edge_iterator ei;
8067 gimple stmt = last_stmt (bb);
8068
8069 if (!cfun->has_nonlocal_label
8070 && !cfun->calls_setjmp)
8071 return false;
8072
8073 if (stmt && stmt_can_make_abnormal_goto (stmt))
8074 return false;
8075
8076 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8077 {
8078 if (e->flags & EDGE_ABNORMAL)
8079 {
8080 if (e->flags & EDGE_FALLTHRU)
8081 e->flags &= ~EDGE_ABNORMAL;
8082 else
8083 remove_edge_and_dominated_blocks (e);
8084 changed = true;
8085 }
8086 else
8087 ei_next (&ei);
8088 }
8089
8090 return changed;
8091 }
8092
8093 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
8094
8095 bool
8096 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks)
8097 {
8098 bool changed = false;
8099 unsigned i;
8100 bitmap_iterator bi;
8101
8102 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8103 {
8104 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8105
8106 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
8107 this basic block already. */
8108 gcc_assert (bb || changed);
8109 if (bb != NULL)
8110 changed |= gimple_purge_dead_abnormal_call_edges (bb);
8111 }
8112
8113 return changed;
8114 }
8115
8116 /* This function is called whenever a new edge is created or
8117 redirected. */
8118
8119 static void
8120 gimple_execute_on_growing_pred (edge e)
8121 {
8122 basic_block bb = e->dest;
8123
8124 if (!gimple_seq_empty_p (phi_nodes (bb)))
8125 reserve_phi_args_for_new_edge (bb);
8126 }
8127
8128 /* This function is called immediately before edge E is removed from
8129 the edge vector E->dest->preds. */
8130
8131 static void
8132 gimple_execute_on_shrinking_pred (edge e)
8133 {
8134 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
8135 remove_phi_args (e);
8136 }
8137
8138 /*---------------------------------------------------------------------------
8139 Helper functions for Loop versioning
8140 ---------------------------------------------------------------------------*/
8141
8142 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
8143 of 'first'. Both of them are dominated by 'new_head' basic block. When
8144 'new_head' was created by 'second's incoming edge it received phi arguments
8145 on the edge by split_edge(). Later, additional edge 'e' was created to
8146 connect 'new_head' and 'first'. Now this routine adds phi args on this
8147 additional edge 'e' that new_head to second edge received as part of edge
8148 splitting. */
8149
8150 static void
8151 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
8152 basic_block new_head, edge e)
8153 {
8154 gphi *phi1, *phi2;
8155 gphi_iterator psi1, psi2;
8156 tree def;
8157 edge e2 = find_edge (new_head, second);
8158
8159 /* Because NEW_HEAD has been created by splitting SECOND's incoming
8160 edge, we should always have an edge from NEW_HEAD to SECOND. */
8161 gcc_assert (e2 != NULL);
8162
8163 /* Browse all 'second' basic block phi nodes and add phi args to
8164 edge 'e' for 'first' head. PHI args are always in correct order. */
8165
8166 for (psi2 = gsi_start_phis (second),
8167 psi1 = gsi_start_phis (first);
8168 !gsi_end_p (psi2) && !gsi_end_p (psi1);
8169 gsi_next (&psi2), gsi_next (&psi1))
8170 {
8171 phi1 = psi1.phi ();
8172 phi2 = psi2.phi ();
8173 def = PHI_ARG_DEF (phi2, e2->dest_idx);
8174 add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
8175 }
8176 }
8177
8178
8179 /* Adds a if else statement to COND_BB with condition COND_EXPR.
8180 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
8181 the destination of the ELSE part. */
8182
8183 static void
8184 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
8185 basic_block second_head ATTRIBUTE_UNUSED,
8186 basic_block cond_bb, void *cond_e)
8187 {
8188 gimple_stmt_iterator gsi;
8189 gimple new_cond_expr;
8190 tree cond_expr = (tree) cond_e;
8191 edge e0;
8192
8193 /* Build new conditional expr */
8194 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
8195 NULL_TREE, NULL_TREE);
8196
8197 /* Add new cond in cond_bb. */
8198 gsi = gsi_last_bb (cond_bb);
8199 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
8200
8201 /* Adjust edges appropriately to connect new head with first head
8202 as well as second head. */
8203 e0 = single_succ_edge (cond_bb);
8204 e0->flags &= ~EDGE_FALLTHRU;
8205 e0->flags |= EDGE_FALSE_VALUE;
8206 }
8207
8208
8209 /* Do book-keeping of basic block BB for the profile consistency checker.
8210 If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
8211 then do post-pass accounting. Store the counting in RECORD. */
8212 static void
8213 gimple_account_profile_record (basic_block bb, int after_pass,
8214 struct profile_record *record)
8215 {
8216 gimple_stmt_iterator i;
8217 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
8218 {
8219 record->size[after_pass]
8220 += estimate_num_insns (gsi_stmt (i), &eni_size_weights);
8221 if (profile_status_for_fn (cfun) == PROFILE_READ)
8222 record->time[after_pass]
8223 += estimate_num_insns (gsi_stmt (i),
8224 &eni_time_weights) * bb->count;
8225 else if (profile_status_for_fn (cfun) == PROFILE_GUESSED)
8226 record->time[after_pass]
8227 += estimate_num_insns (gsi_stmt (i),
8228 &eni_time_weights) * bb->frequency;
8229 }
8230 }
8231
8232 struct cfg_hooks gimple_cfg_hooks = {
8233 "gimple",
8234 gimple_verify_flow_info,
8235 gimple_dump_bb, /* dump_bb */
8236 gimple_dump_bb_for_graph, /* dump_bb_for_graph */
8237 create_bb, /* create_basic_block */
8238 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
8239 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
8240 gimple_can_remove_branch_p, /* can_remove_branch_p */
8241 remove_bb, /* delete_basic_block */
8242 gimple_split_block, /* split_block */
8243 gimple_move_block_after, /* move_block_after */
8244 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
8245 gimple_merge_blocks, /* merge_blocks */
8246 gimple_predict_edge, /* predict_edge */
8247 gimple_predicted_by_p, /* predicted_by_p */
8248 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
8249 gimple_duplicate_bb, /* duplicate_block */
8250 gimple_split_edge, /* split_edge */
8251 gimple_make_forwarder_block, /* make_forward_block */
8252 NULL, /* tidy_fallthru_edge */
8253 NULL, /* force_nonfallthru */
8254 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
8255 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
8256 gimple_flow_call_edges_add, /* flow_call_edges_add */
8257 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
8258 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
8259 gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
8260 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
8261 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
8262 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
8263 flush_pending_stmts, /* flush_pending_stmts */
8264 gimple_empty_block_p, /* block_empty_p */
8265 gimple_split_block_before_cond_jump, /* split_block_before_cond_jump */
8266 gimple_account_profile_record,
8267 };
8268
8269
8270 /* Split all critical edges. */
8271
8272 unsigned int
8273 split_critical_edges (void)
8274 {
8275 basic_block bb;
8276 edge e;
8277 edge_iterator ei;
8278
8279 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
8280 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
8281 mappings around the calls to split_edge. */
8282 start_recording_case_labels ();
8283 FOR_ALL_BB_FN (bb, cfun)
8284 {
8285 FOR_EACH_EDGE (e, ei, bb->succs)
8286 {
8287 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
8288 split_edge (e);
8289 /* PRE inserts statements to edges and expects that
8290 since split_critical_edges was done beforehand, committing edge
8291 insertions will not split more edges. In addition to critical
8292 edges we must split edges that have multiple successors and
8293 end by control flow statements, such as RESX.
8294 Go ahead and split them too. This matches the logic in
8295 gimple_find_edge_insert_loc. */
8296 else if ((!single_pred_p (e->dest)
8297 || !gimple_seq_empty_p (phi_nodes (e->dest))
8298 || e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
8299 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
8300 && !(e->flags & EDGE_ABNORMAL))
8301 {
8302 gimple_stmt_iterator gsi;
8303
8304 gsi = gsi_last_bb (e->src);
8305 if (!gsi_end_p (gsi)
8306 && stmt_ends_bb_p (gsi_stmt (gsi))
8307 && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
8308 && !gimple_call_builtin_p (gsi_stmt (gsi),
8309 BUILT_IN_RETURN)))
8310 split_edge (e);
8311 }
8312 }
8313 }
8314 end_recording_case_labels ();
8315 return 0;
8316 }
8317
8318 namespace {
8319
8320 const pass_data pass_data_split_crit_edges =
8321 {
8322 GIMPLE_PASS, /* type */
8323 "crited", /* name */
8324 OPTGROUP_NONE, /* optinfo_flags */
8325 TV_TREE_SPLIT_EDGES, /* tv_id */
8326 PROP_cfg, /* properties_required */
8327 PROP_no_crit_edges, /* properties_provided */
8328 0, /* properties_destroyed */
8329 0, /* todo_flags_start */
8330 0, /* todo_flags_finish */
8331 };
8332
8333 class pass_split_crit_edges : public gimple_opt_pass
8334 {
8335 public:
8336 pass_split_crit_edges (gcc::context *ctxt)
8337 : gimple_opt_pass (pass_data_split_crit_edges, ctxt)
8338 {}
8339
8340 /* opt_pass methods: */
8341 virtual unsigned int execute (function *) { return split_critical_edges (); }
8342
8343 opt_pass * clone () { return new pass_split_crit_edges (m_ctxt); }
8344 }; // class pass_split_crit_edges
8345
8346 } // anon namespace
8347
8348 gimple_opt_pass *
8349 make_pass_split_crit_edges (gcc::context *ctxt)
8350 {
8351 return new pass_split_crit_edges (ctxt);
8352 }
8353
8354
8355 /* Insert COND expression which is GIMPLE_COND after STMT
8356 in basic block BB with appropriate basic block split
8357 and creation of a new conditionally executed basic block.
8358 Return created basic block. */
8359 basic_block
8360 insert_cond_bb (basic_block bb, gimple stmt, gimple cond)
8361 {
8362 edge fall = split_block (bb, stmt);
8363 gimple_stmt_iterator iter = gsi_last_bb (bb);
8364 basic_block new_bb;
8365
8366 /* Insert cond statement. */
8367 gcc_assert (gimple_code (cond) == GIMPLE_COND);
8368 if (gsi_end_p (iter))
8369 gsi_insert_before (&iter, cond, GSI_CONTINUE_LINKING);
8370 else
8371 gsi_insert_after (&iter, cond, GSI_CONTINUE_LINKING);
8372
8373 /* Create conditionally executed block. */
8374 new_bb = create_empty_bb (bb);
8375 make_edge (bb, new_bb, EDGE_TRUE_VALUE);
8376 make_single_succ_edge (new_bb, fall->dest, EDGE_FALLTHRU);
8377
8378 /* Fix edge for split bb. */
8379 fall->flags = EDGE_FALSE_VALUE;
8380
8381 /* Update dominance info. */
8382 if (dom_info_available_p (CDI_DOMINATORS))
8383 {
8384 set_immediate_dominator (CDI_DOMINATORS, new_bb, bb);
8385 set_immediate_dominator (CDI_DOMINATORS, fall->dest, bb);
8386 }
8387
8388 /* Update loop info. */
8389 if (current_loops)
8390 add_bb_to_loop (new_bb, bb->loop_father);
8391
8392 return new_bb;
8393 }
8394
8395 /* Build a ternary operation and gimplify it. Emit code before GSI.
8396 Return the gimple_val holding the result. */
8397
8398 tree
8399 gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
8400 tree type, tree a, tree b, tree c)
8401 {
8402 tree ret;
8403 location_t loc = gimple_location (gsi_stmt (*gsi));
8404
8405 ret = fold_build3_loc (loc, code, type, a, b, c);
8406 STRIP_NOPS (ret);
8407
8408 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8409 GSI_SAME_STMT);
8410 }
8411
8412 /* Build a binary operation and gimplify it. Emit code before GSI.
8413 Return the gimple_val holding the result. */
8414
8415 tree
8416 gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
8417 tree type, tree a, tree b)
8418 {
8419 tree ret;
8420
8421 ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
8422 STRIP_NOPS (ret);
8423
8424 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8425 GSI_SAME_STMT);
8426 }
8427
8428 /* Build a unary operation and gimplify it. Emit code before GSI.
8429 Return the gimple_val holding the result. */
8430
8431 tree
8432 gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
8433 tree a)
8434 {
8435 tree ret;
8436
8437 ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
8438 STRIP_NOPS (ret);
8439
8440 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8441 GSI_SAME_STMT);
8442 }
8443
8444
8445 \f
8446 /* Given a basic block B which ends with a conditional and has
8447 precisely two successors, determine which of the edges is taken if
8448 the conditional is true and which is taken if the conditional is
8449 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
8450
8451 void
8452 extract_true_false_edges_from_block (basic_block b,
8453 edge *true_edge,
8454 edge *false_edge)
8455 {
8456 edge e = EDGE_SUCC (b, 0);
8457
8458 if (e->flags & EDGE_TRUE_VALUE)
8459 {
8460 *true_edge = e;
8461 *false_edge = EDGE_SUCC (b, 1);
8462 }
8463 else
8464 {
8465 *false_edge = e;
8466 *true_edge = EDGE_SUCC (b, 1);
8467 }
8468 }
8469
8470 /* Emit return warnings. */
8471
8472 namespace {
8473
8474 const pass_data pass_data_warn_function_return =
8475 {
8476 GIMPLE_PASS, /* type */
8477 "*warn_function_return", /* name */
8478 OPTGROUP_NONE, /* optinfo_flags */
8479 TV_NONE, /* tv_id */
8480 PROP_cfg, /* properties_required */
8481 0, /* properties_provided */
8482 0, /* properties_destroyed */
8483 0, /* todo_flags_start */
8484 0, /* todo_flags_finish */
8485 };
8486
8487 class pass_warn_function_return : public gimple_opt_pass
8488 {
8489 public:
8490 pass_warn_function_return (gcc::context *ctxt)
8491 : gimple_opt_pass (pass_data_warn_function_return, ctxt)
8492 {}
8493
8494 /* opt_pass methods: */
8495 virtual unsigned int execute (function *);
8496
8497 }; // class pass_warn_function_return
8498
8499 unsigned int
8500 pass_warn_function_return::execute (function *fun)
8501 {
8502 source_location location;
8503 gimple last;
8504 edge e;
8505 edge_iterator ei;
8506
8507 if (!targetm.warn_func_return (fun->decl))
8508 return 0;
8509
8510 /* If we have a path to EXIT, then we do return. */
8511 if (TREE_THIS_VOLATILE (fun->decl)
8512 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0)
8513 {
8514 location = UNKNOWN_LOCATION;
8515 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
8516 {
8517 last = last_stmt (e->src);
8518 if ((gimple_code (last) == GIMPLE_RETURN
8519 || gimple_call_builtin_p (last, BUILT_IN_RETURN))
8520 && (location = gimple_location (last)) != UNKNOWN_LOCATION)
8521 break;
8522 }
8523 if (location == UNKNOWN_LOCATION)
8524 location = cfun->function_end_locus;
8525 warning_at (location, 0, "%<noreturn%> function does return");
8526 }
8527
8528 /* If we see "return;" in some basic block, then we do reach the end
8529 without returning a value. */
8530 else if (warn_return_type
8531 && !TREE_NO_WARNING (fun->decl)
8532 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0
8533 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fun->decl))))
8534 {
8535 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
8536 {
8537 gimple last = last_stmt (e->src);
8538 greturn *return_stmt = dyn_cast <greturn *> (last);
8539 if (return_stmt
8540 && gimple_return_retval (return_stmt) == NULL
8541 && !gimple_no_warning_p (last))
8542 {
8543 location = gimple_location (last);
8544 if (location == UNKNOWN_LOCATION)
8545 location = fun->function_end_locus;
8546 warning_at (location, OPT_Wreturn_type, "control reaches end of non-void function");
8547 TREE_NO_WARNING (fun->decl) = 1;
8548 break;
8549 }
8550 }
8551 }
8552 return 0;
8553 }
8554
8555 } // anon namespace
8556
8557 gimple_opt_pass *
8558 make_pass_warn_function_return (gcc::context *ctxt)
8559 {
8560 return new pass_warn_function_return (ctxt);
8561 }
8562
8563 /* Walk a gimplified function and warn for functions whose return value is
8564 ignored and attribute((warn_unused_result)) is set. This is done before
8565 inlining, so we don't have to worry about that. */
8566
8567 static void
8568 do_warn_unused_result (gimple_seq seq)
8569 {
8570 tree fdecl, ftype;
8571 gimple_stmt_iterator i;
8572
8573 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
8574 {
8575 gimple g = gsi_stmt (i);
8576
8577 switch (gimple_code (g))
8578 {
8579 case GIMPLE_BIND:
8580 do_warn_unused_result (gimple_bind_body (as_a <gbind *>(g)));
8581 break;
8582 case GIMPLE_TRY:
8583 do_warn_unused_result (gimple_try_eval (g));
8584 do_warn_unused_result (gimple_try_cleanup (g));
8585 break;
8586 case GIMPLE_CATCH:
8587 do_warn_unused_result (gimple_catch_handler (
8588 as_a <gcatch *> (g)));
8589 break;
8590 case GIMPLE_EH_FILTER:
8591 do_warn_unused_result (gimple_eh_filter_failure (g));
8592 break;
8593
8594 case GIMPLE_CALL:
8595 if (gimple_call_lhs (g))
8596 break;
8597 if (gimple_call_internal_p (g))
8598 break;
8599
8600 /* This is a naked call, as opposed to a GIMPLE_CALL with an
8601 LHS. All calls whose value is ignored should be
8602 represented like this. Look for the attribute. */
8603 fdecl = gimple_call_fndecl (g);
8604 ftype = gimple_call_fntype (g);
8605
8606 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
8607 {
8608 location_t loc = gimple_location (g);
8609
8610 if (fdecl)
8611 warning_at (loc, OPT_Wunused_result,
8612 "ignoring return value of %qD, "
8613 "declared with attribute warn_unused_result",
8614 fdecl);
8615 else
8616 warning_at (loc, OPT_Wunused_result,
8617 "ignoring return value of function "
8618 "declared with attribute warn_unused_result");
8619 }
8620 break;
8621
8622 default:
8623 /* Not a container, not a call, or a call whose value is used. */
8624 break;
8625 }
8626 }
8627 }
8628
8629 namespace {
8630
8631 const pass_data pass_data_warn_unused_result =
8632 {
8633 GIMPLE_PASS, /* type */
8634 "*warn_unused_result", /* name */
8635 OPTGROUP_NONE, /* optinfo_flags */
8636 TV_NONE, /* tv_id */
8637 PROP_gimple_any, /* properties_required */
8638 0, /* properties_provided */
8639 0, /* properties_destroyed */
8640 0, /* todo_flags_start */
8641 0, /* todo_flags_finish */
8642 };
8643
8644 class pass_warn_unused_result : public gimple_opt_pass
8645 {
8646 public:
8647 pass_warn_unused_result (gcc::context *ctxt)
8648 : gimple_opt_pass (pass_data_warn_unused_result, ctxt)
8649 {}
8650
8651 /* opt_pass methods: */
8652 virtual bool gate (function *) { return flag_warn_unused_result; }
8653 virtual unsigned int execute (function *)
8654 {
8655 do_warn_unused_result (gimple_body (current_function_decl));
8656 return 0;
8657 }
8658
8659 }; // class pass_warn_unused_result
8660
8661 } // anon namespace
8662
8663 gimple_opt_pass *
8664 make_pass_warn_unused_result (gcc::context *ctxt)
8665 {
8666 return new pass_warn_unused_result (ctxt);
8667 }
8668
8669 /* IPA passes, compilation of earlier functions or inlining
8670 might have changed some properties, such as marked functions nothrow,
8671 pure, const or noreturn.
8672 Remove redundant edges and basic blocks, and create new ones if necessary.
8673
8674 This pass can't be executed as stand alone pass from pass manager, because
8675 in between inlining and this fixup the verify_flow_info would fail. */
8676
8677 unsigned int
8678 execute_fixup_cfg (void)
8679 {
8680 basic_block bb;
8681 gimple_stmt_iterator gsi;
8682 int todo = 0;
8683 gcov_type count_scale;
8684 edge e;
8685 edge_iterator ei;
8686
8687 count_scale
8688 = GCOV_COMPUTE_SCALE (cgraph_node::get (current_function_decl)->count,
8689 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count);
8690
8691 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
8692 cgraph_node::get (current_function_decl)->count;
8693 EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
8694 apply_scale (EXIT_BLOCK_PTR_FOR_FN (cfun)->count,
8695 count_scale);
8696
8697 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
8698 e->count = apply_scale (e->count, count_scale);
8699
8700 FOR_EACH_BB_FN (bb, cfun)
8701 {
8702 bb->count = apply_scale (bb->count, count_scale);
8703 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
8704 {
8705 gimple stmt = gsi_stmt (gsi);
8706 tree decl = is_gimple_call (stmt)
8707 ? gimple_call_fndecl (stmt)
8708 : NULL;
8709 if (decl)
8710 {
8711 int flags = gimple_call_flags (stmt);
8712 if (flags & (ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE))
8713 {
8714 if (gimple_purge_dead_abnormal_call_edges (bb))
8715 todo |= TODO_cleanup_cfg;
8716
8717 if (gimple_in_ssa_p (cfun))
8718 {
8719 todo |= TODO_update_ssa | TODO_cleanup_cfg;
8720 update_stmt (stmt);
8721 }
8722 }
8723
8724 if (flags & ECF_NORETURN
8725 && fixup_noreturn_call (stmt))
8726 todo |= TODO_cleanup_cfg;
8727 }
8728
8729 /* Remove stores to variables we marked write-only.
8730 Keep access when store has side effect, i.e. in case when source
8731 is volatile. */
8732 if (gimple_store_p (stmt)
8733 && !gimple_has_side_effects (stmt))
8734 {
8735 tree lhs = get_base_address (gimple_get_lhs (stmt));
8736
8737 if (TREE_CODE (lhs) == VAR_DECL
8738 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
8739 && varpool_node::get (lhs)->writeonly)
8740 {
8741 unlink_stmt_vdef (stmt);
8742 gsi_remove (&gsi, true);
8743 release_defs (stmt);
8744 todo |= TODO_update_ssa | TODO_cleanup_cfg;
8745 continue;
8746 }
8747 }
8748 /* For calls we can simply remove LHS when it is known
8749 to be write-only. */
8750 if (is_gimple_call (stmt)
8751 && gimple_get_lhs (stmt))
8752 {
8753 tree lhs = get_base_address (gimple_get_lhs (stmt));
8754
8755 if (TREE_CODE (lhs) == VAR_DECL
8756 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
8757 && varpool_node::get (lhs)->writeonly)
8758 {
8759 gimple_call_set_lhs (stmt, NULL);
8760 update_stmt (stmt);
8761 todo |= TODO_update_ssa | TODO_cleanup_cfg;
8762 }
8763 }
8764
8765 if (maybe_clean_eh_stmt (stmt)
8766 && gimple_purge_dead_eh_edges (bb))
8767 todo |= TODO_cleanup_cfg;
8768 gsi_next (&gsi);
8769 }
8770
8771 FOR_EACH_EDGE (e, ei, bb->succs)
8772 e->count = apply_scale (e->count, count_scale);
8773
8774 /* If we have a basic block with no successors that does not
8775 end with a control statement or a noreturn call end it with
8776 a call to __builtin_unreachable. This situation can occur
8777 when inlining a noreturn call that does in fact return. */
8778 if (EDGE_COUNT (bb->succs) == 0)
8779 {
8780 gimple stmt = last_stmt (bb);
8781 if (!stmt
8782 || (!is_ctrl_stmt (stmt)
8783 && (!is_gimple_call (stmt)
8784 || (gimple_call_flags (stmt) & ECF_NORETURN) == 0)))
8785 {
8786 if (stmt && is_gimple_call (stmt))
8787 gimple_call_set_ctrl_altering (stmt, false);
8788 stmt = gimple_build_call
8789 (builtin_decl_implicit (BUILT_IN_UNREACHABLE), 0);
8790 gimple_stmt_iterator gsi = gsi_last_bb (bb);
8791 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
8792 }
8793 }
8794 }
8795 if (count_scale != REG_BR_PROB_BASE)
8796 compute_function_frequency ();
8797
8798 if (current_loops
8799 && (todo & TODO_cleanup_cfg))
8800 loops_state_set (LOOPS_NEED_FIXUP);
8801
8802 return todo;
8803 }
8804
8805 namespace {
8806
8807 const pass_data pass_data_fixup_cfg =
8808 {
8809 GIMPLE_PASS, /* type */
8810 "fixup_cfg", /* name */
8811 OPTGROUP_NONE, /* optinfo_flags */
8812 TV_NONE, /* tv_id */
8813 PROP_cfg, /* properties_required */
8814 0, /* properties_provided */
8815 0, /* properties_destroyed */
8816 0, /* todo_flags_start */
8817 0, /* todo_flags_finish */
8818 };
8819
8820 class pass_fixup_cfg : public gimple_opt_pass
8821 {
8822 public:
8823 pass_fixup_cfg (gcc::context *ctxt)
8824 : gimple_opt_pass (pass_data_fixup_cfg, ctxt)
8825 {}
8826
8827 /* opt_pass methods: */
8828 opt_pass * clone () { return new pass_fixup_cfg (m_ctxt); }
8829 virtual unsigned int execute (function *) { return execute_fixup_cfg (); }
8830
8831 }; // class pass_fixup_cfg
8832
8833 } // anon namespace
8834
8835 gimple_opt_pass *
8836 make_pass_fixup_cfg (gcc::context *ctxt)
8837 {
8838 return new pass_fixup_cfg (ctxt);
8839 }
8840
8841 /* Garbage collection support for edge_def. */
8842
8843 extern void gt_ggc_mx (tree&);
8844 extern void gt_ggc_mx (gimple&);
8845 extern void gt_ggc_mx (rtx&);
8846 extern void gt_ggc_mx (basic_block&);
8847
8848 static void
8849 gt_ggc_mx (rtx_insn *& x)
8850 {
8851 if (x)
8852 gt_ggc_mx_rtx_def ((void *) x);
8853 }
8854
8855 void
8856 gt_ggc_mx (edge_def *e)
8857 {
8858 tree block = LOCATION_BLOCK (e->goto_locus);
8859 gt_ggc_mx (e->src);
8860 gt_ggc_mx (e->dest);
8861 if (current_ir_type () == IR_GIMPLE)
8862 gt_ggc_mx (e->insns.g);
8863 else
8864 gt_ggc_mx (e->insns.r);
8865 gt_ggc_mx (block);
8866 }
8867
8868 /* PCH support for edge_def. */
8869
8870 extern void gt_pch_nx (tree&);
8871 extern void gt_pch_nx (gimple&);
8872 extern void gt_pch_nx (rtx&);
8873 extern void gt_pch_nx (basic_block&);
8874
8875 static void
8876 gt_pch_nx (rtx_insn *& x)
8877 {
8878 if (x)
8879 gt_pch_nx_rtx_def ((void *) x);
8880 }
8881
8882 void
8883 gt_pch_nx (edge_def *e)
8884 {
8885 tree block = LOCATION_BLOCK (e->goto_locus);
8886 gt_pch_nx (e->src);
8887 gt_pch_nx (e->dest);
8888 if (current_ir_type () == IR_GIMPLE)
8889 gt_pch_nx (e->insns.g);
8890 else
8891 gt_pch_nx (e->insns.r);
8892 gt_pch_nx (block);
8893 }
8894
8895 void
8896 gt_pch_nx (edge_def *e, gt_pointer_operator op, void *cookie)
8897 {
8898 tree block = LOCATION_BLOCK (e->goto_locus);
8899 op (&(e->src), cookie);
8900 op (&(e->dest), cookie);
8901 if (current_ir_type () == IR_GIMPLE)
8902 op (&(e->insns.g), cookie);
8903 else
8904 op (&(e->insns.r), cookie);
8905 op (&(block), cookie);
8906 }