]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-cfg.c
2015-06-25 Andrew MacLeod <amacleod@redhat.com>
[thirdparty/gcc.git] / gcc / tree-cfg.c
1 /* Control flow functions for trees.
2 Copyright (C) 2001-2015 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "alias.h"
26 #include "symtab.h"
27 #include "tree.h"
28 #include "fold-const.h"
29 #include "trans-mem.h"
30 #include "stor-layout.h"
31 #include "print-tree.h"
32 #include "tm_p.h"
33 #include "predict.h"
34 #include "hard-reg-set.h"
35 #include "function.h"
36 #include "dominance.h"
37 #include "cfg.h"
38 #include "cfganal.h"
39 #include "basic-block.h"
40 #include "flags.h"
41 #include "gimple-pretty-print.h"
42 #include "tree-ssa-alias.h"
43 #include "internal-fn.h"
44 #include "gimple-fold.h"
45 #include "tree-eh.h"
46 #include "gimple-expr.h"
47 #include "gimple.h"
48 #include "gimple-iterator.h"
49 #include "gimplify-me.h"
50 #include "gimple-walk.h"
51 #include "gimple-ssa.h"
52 #include "cgraph.h"
53 #include "tree-cfg.h"
54 #include "tree-phinodes.h"
55 #include "ssa-iterators.h"
56 #include "stringpool.h"
57 #include "tree-ssanames.h"
58 #include "tree-ssa-loop-manip.h"
59 #include "tree-ssa-loop-niter.h"
60 #include "tree-into-ssa.h"
61 #include "rtl.h"
62 #include "insn-config.h"
63 #include "expmed.h"
64 #include "dojump.h"
65 #include "explow.h"
66 #include "calls.h"
67 #include "emit-rtl.h"
68 #include "varasm.h"
69 #include "stmt.h"
70 #include "expr.h"
71 #include "tree-dfa.h"
72 #include "tree-ssa.h"
73 #include "tree-dump.h"
74 #include "tree-pass.h"
75 #include "diagnostic-core.h"
76 #include "except.h"
77 #include "cfgloop.h"
78 #include "tree-ssa-propagate.h"
79 #include "value-prof.h"
80 #include "tree-inline.h"
81 #include "target.h"
82 #include "tree-ssa-live.h"
83 #include "omp-low.h"
84 #include "tree-cfgcleanup.h"
85 #include "wide-int-print.h"
86
87 /* This file contains functions for building the Control Flow Graph (CFG)
88 for a function tree. */
89
90 /* Local declarations. */
91
92 /* Initial capacity for the basic block array. */
93 static const int initial_cfg_capacity = 20;
94
95 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
96 which use a particular edge. The CASE_LABEL_EXPRs are chained together
97 via their CASE_CHAIN field, which we clear after we're done with the
98 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
99
100 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
101 update the case vector in response to edge redirections.
102
103 Right now this table is set up and torn down at key points in the
104 compilation process. It would be nice if we could make the table
105 more persistent. The key is getting notification of changes to
106 the CFG (particularly edge removal, creation and redirection). */
107
108 static hash_map<edge, tree> *edge_to_cases;
109
110 /* If we record edge_to_cases, this bitmap will hold indexes
111 of basic blocks that end in a GIMPLE_SWITCH which we touched
112 due to edge manipulations. */
113
114 static bitmap touched_switch_bbs;
115
116 /* CFG statistics. */
117 struct cfg_stats_d
118 {
119 long num_merged_labels;
120 };
121
122 static struct cfg_stats_d cfg_stats;
123
124 /* Hash table to store last discriminator assigned for each locus. */
125 struct locus_discrim_map
126 {
127 location_t locus;
128 int discriminator;
129 };
130
131 /* Hashtable helpers. */
132
133 struct locus_discrim_hasher : free_ptr_hash <locus_discrim_map>
134 {
135 static inline hashval_t hash (const locus_discrim_map *);
136 static inline bool equal (const locus_discrim_map *,
137 const locus_discrim_map *);
138 };
139
140 /* Trivial hash function for a location_t. ITEM is a pointer to
141 a hash table entry that maps a location_t to a discriminator. */
142
143 inline hashval_t
144 locus_discrim_hasher::hash (const locus_discrim_map *item)
145 {
146 return LOCATION_LINE (item->locus);
147 }
148
149 /* Equality function for the locus-to-discriminator map. A and B
150 point to the two hash table entries to compare. */
151
152 inline bool
153 locus_discrim_hasher::equal (const locus_discrim_map *a,
154 const locus_discrim_map *b)
155 {
156 return LOCATION_LINE (a->locus) == LOCATION_LINE (b->locus);
157 }
158
159 static hash_table<locus_discrim_hasher> *discriminator_per_locus;
160
161 /* Basic blocks and flowgraphs. */
162 static void make_blocks (gimple_seq);
163
164 /* Edges. */
165 static void make_edges (void);
166 static void assign_discriminators (void);
167 static void make_cond_expr_edges (basic_block);
168 static void make_gimple_switch_edges (gswitch *, basic_block);
169 static bool make_goto_expr_edges (basic_block);
170 static void make_gimple_asm_edges (basic_block);
171 static edge gimple_redirect_edge_and_branch (edge, basic_block);
172 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
173
174 /* Various helpers. */
175 static inline bool stmt_starts_bb_p (gimple, gimple);
176 static int gimple_verify_flow_info (void);
177 static void gimple_make_forwarder_block (edge);
178 static gimple first_non_label_stmt (basic_block);
179 static bool verify_gimple_transaction (gtransaction *);
180 static bool call_can_make_abnormal_goto (gimple);
181
182 /* Flowgraph optimization and cleanup. */
183 static void gimple_merge_blocks (basic_block, basic_block);
184 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
185 static void remove_bb (basic_block);
186 static edge find_taken_edge_computed_goto (basic_block, tree);
187 static edge find_taken_edge_cond_expr (basic_block, tree);
188 static edge find_taken_edge_switch_expr (gswitch *, basic_block, tree);
189 static tree find_case_label_for_value (gswitch *, tree);
190
191 void
192 init_empty_tree_cfg_for_function (struct function *fn)
193 {
194 /* Initialize the basic block array. */
195 init_flow (fn);
196 profile_status_for_fn (fn) = PROFILE_ABSENT;
197 n_basic_blocks_for_fn (fn) = NUM_FIXED_BLOCKS;
198 last_basic_block_for_fn (fn) = NUM_FIXED_BLOCKS;
199 vec_alloc (basic_block_info_for_fn (fn), initial_cfg_capacity);
200 vec_safe_grow_cleared (basic_block_info_for_fn (fn),
201 initial_cfg_capacity);
202
203 /* Build a mapping of labels to their associated blocks. */
204 vec_alloc (label_to_block_map_for_fn (fn), initial_cfg_capacity);
205 vec_safe_grow_cleared (label_to_block_map_for_fn (fn),
206 initial_cfg_capacity);
207
208 SET_BASIC_BLOCK_FOR_FN (fn, ENTRY_BLOCK, ENTRY_BLOCK_PTR_FOR_FN (fn));
209 SET_BASIC_BLOCK_FOR_FN (fn, EXIT_BLOCK, EXIT_BLOCK_PTR_FOR_FN (fn));
210
211 ENTRY_BLOCK_PTR_FOR_FN (fn)->next_bb
212 = EXIT_BLOCK_PTR_FOR_FN (fn);
213 EXIT_BLOCK_PTR_FOR_FN (fn)->prev_bb
214 = ENTRY_BLOCK_PTR_FOR_FN (fn);
215 }
216
217 void
218 init_empty_tree_cfg (void)
219 {
220 init_empty_tree_cfg_for_function (cfun);
221 }
222
223 /*---------------------------------------------------------------------------
224 Create basic blocks
225 ---------------------------------------------------------------------------*/
226
227 /* Entry point to the CFG builder for trees. SEQ is the sequence of
228 statements to be added to the flowgraph. */
229
230 static void
231 build_gimple_cfg (gimple_seq seq)
232 {
233 /* Register specific gimple functions. */
234 gimple_register_cfg_hooks ();
235
236 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
237
238 init_empty_tree_cfg ();
239
240 make_blocks (seq);
241
242 /* Make sure there is always at least one block, even if it's empty. */
243 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
244 create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
245
246 /* Adjust the size of the array. */
247 if (basic_block_info_for_fn (cfun)->length ()
248 < (size_t) n_basic_blocks_for_fn (cfun))
249 vec_safe_grow_cleared (basic_block_info_for_fn (cfun),
250 n_basic_blocks_for_fn (cfun));
251
252 /* To speed up statement iterator walks, we first purge dead labels. */
253 cleanup_dead_labels ();
254
255 /* Group case nodes to reduce the number of edges.
256 We do this after cleaning up dead labels because otherwise we miss
257 a lot of obvious case merging opportunities. */
258 group_case_labels ();
259
260 /* Create the edges of the flowgraph. */
261 discriminator_per_locus = new hash_table<locus_discrim_hasher> (13);
262 make_edges ();
263 assign_discriminators ();
264 cleanup_dead_labels ();
265 delete discriminator_per_locus;
266 discriminator_per_locus = NULL;
267 }
268
269 /* Look for ANNOTATE calls with loop annotation kind in BB; if found, remove
270 them and propagate the information to LOOP. We assume that the annotations
271 come immediately before the condition in BB, if any. */
272
273 static void
274 replace_loop_annotate_in_block (basic_block bb, struct loop *loop)
275 {
276 gimple_stmt_iterator gsi = gsi_last_bb (bb);
277 gimple stmt = gsi_stmt (gsi);
278
279 if (!(stmt && gimple_code (stmt) == GIMPLE_COND))
280 return;
281
282 for (gsi_prev_nondebug (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
283 {
284 stmt = gsi_stmt (gsi);
285 if (gimple_code (stmt) != GIMPLE_CALL)
286 break;
287 if (!gimple_call_internal_p (stmt)
288 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
289 break;
290
291 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
292 {
293 case annot_expr_ivdep_kind:
294 loop->safelen = INT_MAX;
295 break;
296 case annot_expr_no_vector_kind:
297 loop->dont_vectorize = true;
298 break;
299 case annot_expr_vector_kind:
300 loop->force_vectorize = true;
301 cfun->has_force_vectorize_loops = true;
302 break;
303 default:
304 gcc_unreachable ();
305 }
306
307 stmt = gimple_build_assign (gimple_call_lhs (stmt),
308 gimple_call_arg (stmt, 0));
309 gsi_replace (&gsi, stmt, true);
310 }
311 }
312
313 /* Look for ANNOTATE calls with loop annotation kind; if found, remove
314 them and propagate the information to the loop. We assume that the
315 annotations come immediately before the condition of the loop. */
316
317 static void
318 replace_loop_annotate (void)
319 {
320 struct loop *loop;
321 basic_block bb;
322 gimple_stmt_iterator gsi;
323 gimple stmt;
324
325 FOR_EACH_LOOP (loop, 0)
326 {
327 /* First look into the header. */
328 replace_loop_annotate_in_block (loop->header, loop);
329
330 /* Then look into the latch, if any. */
331 if (loop->latch)
332 replace_loop_annotate_in_block (loop->latch, loop);
333 }
334
335 /* Remove IFN_ANNOTATE. Safeguard for the case loop->latch == NULL. */
336 FOR_EACH_BB_FN (bb, cfun)
337 {
338 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
339 {
340 stmt = gsi_stmt (gsi);
341 if (gimple_code (stmt) != GIMPLE_CALL)
342 continue;
343 if (!gimple_call_internal_p (stmt)
344 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
345 continue;
346
347 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
348 {
349 case annot_expr_ivdep_kind:
350 case annot_expr_no_vector_kind:
351 case annot_expr_vector_kind:
352 break;
353 default:
354 gcc_unreachable ();
355 }
356
357 warning_at (gimple_location (stmt), 0, "ignoring loop annotation");
358 stmt = gimple_build_assign (gimple_call_lhs (stmt),
359 gimple_call_arg (stmt, 0));
360 gsi_replace (&gsi, stmt, true);
361 }
362 }
363 }
364
365
366 static unsigned int
367 execute_build_cfg (void)
368 {
369 gimple_seq body = gimple_body (current_function_decl);
370
371 build_gimple_cfg (body);
372 gimple_set_body (current_function_decl, NULL);
373 if (dump_file && (dump_flags & TDF_DETAILS))
374 {
375 fprintf (dump_file, "Scope blocks:\n");
376 dump_scope_blocks (dump_file, dump_flags);
377 }
378 cleanup_tree_cfg ();
379 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
380 replace_loop_annotate ();
381 return 0;
382 }
383
384 namespace {
385
386 const pass_data pass_data_build_cfg =
387 {
388 GIMPLE_PASS, /* type */
389 "cfg", /* name */
390 OPTGROUP_NONE, /* optinfo_flags */
391 TV_TREE_CFG, /* tv_id */
392 PROP_gimple_leh, /* properties_required */
393 ( PROP_cfg | PROP_loops ), /* properties_provided */
394 0, /* properties_destroyed */
395 0, /* todo_flags_start */
396 0, /* todo_flags_finish */
397 };
398
399 class pass_build_cfg : public gimple_opt_pass
400 {
401 public:
402 pass_build_cfg (gcc::context *ctxt)
403 : gimple_opt_pass (pass_data_build_cfg, ctxt)
404 {}
405
406 /* opt_pass methods: */
407 virtual unsigned int execute (function *) { return execute_build_cfg (); }
408
409 }; // class pass_build_cfg
410
411 } // anon namespace
412
413 gimple_opt_pass *
414 make_pass_build_cfg (gcc::context *ctxt)
415 {
416 return new pass_build_cfg (ctxt);
417 }
418
419
420 /* Return true if T is a computed goto. */
421
422 bool
423 computed_goto_p (gimple t)
424 {
425 return (gimple_code (t) == GIMPLE_GOTO
426 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
427 }
428
429 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
430 the other edge points to a bb with just __builtin_unreachable ().
431 I.e. return true for C->M edge in:
432 <bb C>:
433 ...
434 if (something)
435 goto <bb N>;
436 else
437 goto <bb M>;
438 <bb N>:
439 __builtin_unreachable ();
440 <bb M>: */
441
442 bool
443 assert_unreachable_fallthru_edge_p (edge e)
444 {
445 basic_block pred_bb = e->src;
446 gimple last = last_stmt (pred_bb);
447 if (last && gimple_code (last) == GIMPLE_COND)
448 {
449 basic_block other_bb = EDGE_SUCC (pred_bb, 0)->dest;
450 if (other_bb == e->dest)
451 other_bb = EDGE_SUCC (pred_bb, 1)->dest;
452 if (EDGE_COUNT (other_bb->succs) == 0)
453 {
454 gimple_stmt_iterator gsi = gsi_after_labels (other_bb);
455 gimple stmt;
456
457 if (gsi_end_p (gsi))
458 return false;
459 stmt = gsi_stmt (gsi);
460 while (is_gimple_debug (stmt) || gimple_clobber_p (stmt))
461 {
462 gsi_next (&gsi);
463 if (gsi_end_p (gsi))
464 return false;
465 stmt = gsi_stmt (gsi);
466 }
467 return gimple_call_builtin_p (stmt, BUILT_IN_UNREACHABLE);
468 }
469 }
470 return false;
471 }
472
473
474 /* Initialize GF_CALL_CTRL_ALTERING flag, which indicates the call
475 could alter control flow except via eh. We initialize the flag at
476 CFG build time and only ever clear it later. */
477
478 static void
479 gimple_call_initialize_ctrl_altering (gimple stmt)
480 {
481 int flags = gimple_call_flags (stmt);
482
483 /* A call alters control flow if it can make an abnormal goto. */
484 if (call_can_make_abnormal_goto (stmt)
485 /* A call also alters control flow if it does not return. */
486 || flags & ECF_NORETURN
487 /* TM ending statements have backedges out of the transaction.
488 Return true so we split the basic block containing them.
489 Note that the TM_BUILTIN test is merely an optimization. */
490 || ((flags & ECF_TM_BUILTIN)
491 && is_tm_ending_fndecl (gimple_call_fndecl (stmt)))
492 /* BUILT_IN_RETURN call is same as return statement. */
493 || gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
494 gimple_call_set_ctrl_altering (stmt, true);
495 else
496 gimple_call_set_ctrl_altering (stmt, false);
497 }
498
499
500 /* Insert SEQ after BB and build a flowgraph. */
501
502 static basic_block
503 make_blocks_1 (gimple_seq seq, basic_block bb)
504 {
505 gimple_stmt_iterator i = gsi_start (seq);
506 gimple stmt = NULL;
507 bool start_new_block = true;
508 bool first_stmt_of_seq = true;
509
510 while (!gsi_end_p (i))
511 {
512 gimple prev_stmt;
513
514 prev_stmt = stmt;
515 stmt = gsi_stmt (i);
516
517 if (stmt && is_gimple_call (stmt))
518 gimple_call_initialize_ctrl_altering (stmt);
519
520 /* If the statement starts a new basic block or if we have determined
521 in a previous pass that we need to create a new block for STMT, do
522 so now. */
523 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
524 {
525 if (!first_stmt_of_seq)
526 gsi_split_seq_before (&i, &seq);
527 bb = create_basic_block (seq, bb);
528 start_new_block = false;
529 }
530
531 /* Now add STMT to BB and create the subgraphs for special statement
532 codes. */
533 gimple_set_bb (stmt, bb);
534
535 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
536 next iteration. */
537 if (stmt_ends_bb_p (stmt))
538 {
539 /* If the stmt can make abnormal goto use a new temporary
540 for the assignment to the LHS. This makes sure the old value
541 of the LHS is available on the abnormal edge. Otherwise
542 we will end up with overlapping life-ranges for abnormal
543 SSA names. */
544 if (gimple_has_lhs (stmt)
545 && stmt_can_make_abnormal_goto (stmt)
546 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
547 {
548 tree lhs = gimple_get_lhs (stmt);
549 tree tmp = create_tmp_var (TREE_TYPE (lhs));
550 gimple s = gimple_build_assign (lhs, tmp);
551 gimple_set_location (s, gimple_location (stmt));
552 gimple_set_block (s, gimple_block (stmt));
553 gimple_set_lhs (stmt, tmp);
554 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
555 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
556 DECL_GIMPLE_REG_P (tmp) = 1;
557 gsi_insert_after (&i, s, GSI_SAME_STMT);
558 }
559 start_new_block = true;
560 }
561
562 gsi_next (&i);
563 first_stmt_of_seq = false;
564 }
565 return bb;
566 }
567
568 /* Build a flowgraph for the sequence of stmts SEQ. */
569
570 static void
571 make_blocks (gimple_seq seq)
572 {
573 make_blocks_1 (seq, ENTRY_BLOCK_PTR_FOR_FN (cfun));
574 }
575
576 /* Create and return a new empty basic block after bb AFTER. */
577
578 static basic_block
579 create_bb (void *h, void *e, basic_block after)
580 {
581 basic_block bb;
582
583 gcc_assert (!e);
584
585 /* Create and initialize a new basic block. Since alloc_block uses
586 GC allocation that clears memory to allocate a basic block, we do
587 not have to clear the newly allocated basic block here. */
588 bb = alloc_block ();
589
590 bb->index = last_basic_block_for_fn (cfun);
591 bb->flags = BB_NEW;
592 set_bb_seq (bb, h ? (gimple_seq) h : NULL);
593
594 /* Add the new block to the linked list of blocks. */
595 link_block (bb, after);
596
597 /* Grow the basic block array if needed. */
598 if ((size_t) last_basic_block_for_fn (cfun)
599 == basic_block_info_for_fn (cfun)->length ())
600 {
601 size_t new_size =
602 (last_basic_block_for_fn (cfun)
603 + (last_basic_block_for_fn (cfun) + 3) / 4);
604 vec_safe_grow_cleared (basic_block_info_for_fn (cfun), new_size);
605 }
606
607 /* Add the newly created block to the array. */
608 SET_BASIC_BLOCK_FOR_FN (cfun, last_basic_block_for_fn (cfun), bb);
609
610 n_basic_blocks_for_fn (cfun)++;
611 last_basic_block_for_fn (cfun)++;
612
613 return bb;
614 }
615
616
617 /*---------------------------------------------------------------------------
618 Edge creation
619 ---------------------------------------------------------------------------*/
620
621 /* Fold COND_EXPR_COND of each COND_EXPR. */
622
623 void
624 fold_cond_expr_cond (void)
625 {
626 basic_block bb;
627
628 FOR_EACH_BB_FN (bb, cfun)
629 {
630 gimple stmt = last_stmt (bb);
631
632 if (stmt && gimple_code (stmt) == GIMPLE_COND)
633 {
634 gcond *cond_stmt = as_a <gcond *> (stmt);
635 location_t loc = gimple_location (stmt);
636 tree cond;
637 bool zerop, onep;
638
639 fold_defer_overflow_warnings ();
640 cond = fold_binary_loc (loc, gimple_cond_code (cond_stmt),
641 boolean_type_node,
642 gimple_cond_lhs (cond_stmt),
643 gimple_cond_rhs (cond_stmt));
644 if (cond)
645 {
646 zerop = integer_zerop (cond);
647 onep = integer_onep (cond);
648 }
649 else
650 zerop = onep = false;
651
652 fold_undefer_overflow_warnings (zerop || onep,
653 stmt,
654 WARN_STRICT_OVERFLOW_CONDITIONAL);
655 if (zerop)
656 gimple_cond_make_false (cond_stmt);
657 else if (onep)
658 gimple_cond_make_true (cond_stmt);
659 }
660 }
661 }
662
663 /* If basic block BB has an abnormal edge to a basic block
664 containing IFN_ABNORMAL_DISPATCHER internal call, return
665 that the dispatcher's basic block, otherwise return NULL. */
666
667 basic_block
668 get_abnormal_succ_dispatcher (basic_block bb)
669 {
670 edge e;
671 edge_iterator ei;
672
673 FOR_EACH_EDGE (e, ei, bb->succs)
674 if ((e->flags & (EDGE_ABNORMAL | EDGE_EH)) == EDGE_ABNORMAL)
675 {
676 gimple_stmt_iterator gsi
677 = gsi_start_nondebug_after_labels_bb (e->dest);
678 gimple g = gsi_stmt (gsi);
679 if (g
680 && is_gimple_call (g)
681 && gimple_call_internal_p (g)
682 && gimple_call_internal_fn (g) == IFN_ABNORMAL_DISPATCHER)
683 return e->dest;
684 }
685 return NULL;
686 }
687
688 /* Helper function for make_edges. Create a basic block with
689 with ABNORMAL_DISPATCHER internal call in it if needed, and
690 create abnormal edges from BBS to it and from it to FOR_BB
691 if COMPUTED_GOTO is false, otherwise factor the computed gotos. */
692
693 static void
694 handle_abnormal_edges (basic_block *dispatcher_bbs,
695 basic_block for_bb, int *bb_to_omp_idx,
696 auto_vec<basic_block> *bbs, bool computed_goto)
697 {
698 basic_block *dispatcher = dispatcher_bbs + (computed_goto ? 1 : 0);
699 unsigned int idx = 0;
700 basic_block bb;
701 bool inner = false;
702
703 if (bb_to_omp_idx)
704 {
705 dispatcher = dispatcher_bbs + 2 * bb_to_omp_idx[for_bb->index];
706 if (bb_to_omp_idx[for_bb->index] != 0)
707 inner = true;
708 }
709
710 /* If the dispatcher has been created already, then there are basic
711 blocks with abnormal edges to it, so just make a new edge to
712 for_bb. */
713 if (*dispatcher == NULL)
714 {
715 /* Check if there are any basic blocks that need to have
716 abnormal edges to this dispatcher. If there are none, return
717 early. */
718 if (bb_to_omp_idx == NULL)
719 {
720 if (bbs->is_empty ())
721 return;
722 }
723 else
724 {
725 FOR_EACH_VEC_ELT (*bbs, idx, bb)
726 if (bb_to_omp_idx[bb->index] == bb_to_omp_idx[for_bb->index])
727 break;
728 if (bb == NULL)
729 return;
730 }
731
732 /* Create the dispatcher bb. */
733 *dispatcher = create_basic_block (NULL, for_bb);
734 if (computed_goto)
735 {
736 /* Factor computed gotos into a common computed goto site. Also
737 record the location of that site so that we can un-factor the
738 gotos after we have converted back to normal form. */
739 gimple_stmt_iterator gsi = gsi_start_bb (*dispatcher);
740
741 /* Create the destination of the factored goto. Each original
742 computed goto will put its desired destination into this
743 variable and jump to the label we create immediately below. */
744 tree var = create_tmp_var (ptr_type_node, "gotovar");
745
746 /* Build a label for the new block which will contain the
747 factored computed goto. */
748 tree factored_label_decl
749 = create_artificial_label (UNKNOWN_LOCATION);
750 gimple factored_computed_goto_label
751 = gimple_build_label (factored_label_decl);
752 gsi_insert_after (&gsi, factored_computed_goto_label, GSI_NEW_STMT);
753
754 /* Build our new computed goto. */
755 gimple factored_computed_goto = gimple_build_goto (var);
756 gsi_insert_after (&gsi, factored_computed_goto, GSI_NEW_STMT);
757
758 FOR_EACH_VEC_ELT (*bbs, idx, bb)
759 {
760 if (bb_to_omp_idx
761 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
762 continue;
763
764 gsi = gsi_last_bb (bb);
765 gimple last = gsi_stmt (gsi);
766
767 gcc_assert (computed_goto_p (last));
768
769 /* Copy the original computed goto's destination into VAR. */
770 gimple assignment
771 = gimple_build_assign (var, gimple_goto_dest (last));
772 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
773
774 edge e = make_edge (bb, *dispatcher, EDGE_FALLTHRU);
775 e->goto_locus = gimple_location (last);
776 gsi_remove (&gsi, true);
777 }
778 }
779 else
780 {
781 tree arg = inner ? boolean_true_node : boolean_false_node;
782 gimple g = gimple_build_call_internal (IFN_ABNORMAL_DISPATCHER,
783 1, arg);
784 gimple_stmt_iterator gsi = gsi_after_labels (*dispatcher);
785 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
786
787 /* Create predecessor edges of the dispatcher. */
788 FOR_EACH_VEC_ELT (*bbs, idx, bb)
789 {
790 if (bb_to_omp_idx
791 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
792 continue;
793 make_edge (bb, *dispatcher, EDGE_ABNORMAL);
794 }
795 }
796 }
797
798 make_edge (*dispatcher, for_bb, EDGE_ABNORMAL);
799 }
800
801 /* Creates outgoing edges for BB. Returns 1 when it ends with an
802 computed goto, returns 2 when it ends with a statement that
803 might return to this function via an nonlocal goto, otherwise
804 return 0. Updates *PCUR_REGION with the OMP region this BB is in. */
805
806 static int
807 make_edges_bb (basic_block bb, struct omp_region **pcur_region, int *pomp_index)
808 {
809 gimple last = last_stmt (bb);
810 bool fallthru = false;
811 int ret = 0;
812
813 if (!last)
814 return ret;
815
816 switch (gimple_code (last))
817 {
818 case GIMPLE_GOTO:
819 if (make_goto_expr_edges (bb))
820 ret = 1;
821 fallthru = false;
822 break;
823 case GIMPLE_RETURN:
824 {
825 edge e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
826 e->goto_locus = gimple_location (last);
827 fallthru = false;
828 }
829 break;
830 case GIMPLE_COND:
831 make_cond_expr_edges (bb);
832 fallthru = false;
833 break;
834 case GIMPLE_SWITCH:
835 make_gimple_switch_edges (as_a <gswitch *> (last), bb);
836 fallthru = false;
837 break;
838 case GIMPLE_RESX:
839 make_eh_edges (last);
840 fallthru = false;
841 break;
842 case GIMPLE_EH_DISPATCH:
843 fallthru = make_eh_dispatch_edges (as_a <geh_dispatch *> (last));
844 break;
845
846 case GIMPLE_CALL:
847 /* If this function receives a nonlocal goto, then we need to
848 make edges from this call site to all the nonlocal goto
849 handlers. */
850 if (stmt_can_make_abnormal_goto (last))
851 ret = 2;
852
853 /* If this statement has reachable exception handlers, then
854 create abnormal edges to them. */
855 make_eh_edges (last);
856
857 /* BUILTIN_RETURN is really a return statement. */
858 if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
859 {
860 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
861 fallthru = false;
862 }
863 /* Some calls are known not to return. */
864 else
865 fallthru = !(gimple_call_flags (last) & ECF_NORETURN);
866 break;
867
868 case GIMPLE_ASSIGN:
869 /* A GIMPLE_ASSIGN may throw internally and thus be considered
870 control-altering. */
871 if (is_ctrl_altering_stmt (last))
872 make_eh_edges (last);
873 fallthru = true;
874 break;
875
876 case GIMPLE_ASM:
877 make_gimple_asm_edges (bb);
878 fallthru = true;
879 break;
880
881 CASE_GIMPLE_OMP:
882 fallthru = make_gimple_omp_edges (bb, pcur_region, pomp_index);
883 break;
884
885 case GIMPLE_TRANSACTION:
886 {
887 tree abort_label
888 = gimple_transaction_label (as_a <gtransaction *> (last));
889 if (abort_label)
890 make_edge (bb, label_to_block (abort_label), EDGE_TM_ABORT);
891 fallthru = true;
892 }
893 break;
894
895 default:
896 gcc_assert (!stmt_ends_bb_p (last));
897 fallthru = true;
898 break;
899 }
900
901 if (fallthru)
902 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
903
904 return ret;
905 }
906
907 /* Join all the blocks in the flowgraph. */
908
909 static void
910 make_edges (void)
911 {
912 basic_block bb;
913 struct omp_region *cur_region = NULL;
914 auto_vec<basic_block> ab_edge_goto;
915 auto_vec<basic_block> ab_edge_call;
916 int *bb_to_omp_idx = NULL;
917 int cur_omp_region_idx = 0;
918
919 /* Create an edge from entry to the first block with executable
920 statements in it. */
921 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun),
922 BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS),
923 EDGE_FALLTHRU);
924
925 /* Traverse the basic block array placing edges. */
926 FOR_EACH_BB_FN (bb, cfun)
927 {
928 int mer;
929
930 if (bb_to_omp_idx)
931 bb_to_omp_idx[bb->index] = cur_omp_region_idx;
932
933 mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
934 if (mer == 1)
935 ab_edge_goto.safe_push (bb);
936 else if (mer == 2)
937 ab_edge_call.safe_push (bb);
938
939 if (cur_region && bb_to_omp_idx == NULL)
940 bb_to_omp_idx = XCNEWVEC (int, n_basic_blocks_for_fn (cfun));
941 }
942
943 /* Computed gotos are hell to deal with, especially if there are
944 lots of them with a large number of destinations. So we factor
945 them to a common computed goto location before we build the
946 edge list. After we convert back to normal form, we will un-factor
947 the computed gotos since factoring introduces an unwanted jump.
948 For non-local gotos and abnormal edges from calls to calls that return
949 twice or forced labels, factor the abnormal edges too, by having all
950 abnormal edges from the calls go to a common artificial basic block
951 with ABNORMAL_DISPATCHER internal call and abnormal edges from that
952 basic block to all forced labels and calls returning twice.
953 We do this per-OpenMP structured block, because those regions
954 are guaranteed to be single entry single exit by the standard,
955 so it is not allowed to enter or exit such regions abnormally this way,
956 thus all computed gotos, non-local gotos and setjmp/longjmp calls
957 must not transfer control across SESE region boundaries. */
958 if (!ab_edge_goto.is_empty () || !ab_edge_call.is_empty ())
959 {
960 gimple_stmt_iterator gsi;
961 basic_block dispatcher_bb_array[2] = { NULL, NULL };
962 basic_block *dispatcher_bbs = dispatcher_bb_array;
963 int count = n_basic_blocks_for_fn (cfun);
964
965 if (bb_to_omp_idx)
966 dispatcher_bbs = XCNEWVEC (basic_block, 2 * count);
967
968 FOR_EACH_BB_FN (bb, cfun)
969 {
970 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
971 {
972 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
973 tree target;
974
975 if (!label_stmt)
976 break;
977
978 target = gimple_label_label (label_stmt);
979
980 /* Make an edge to every label block that has been marked as a
981 potential target for a computed goto or a non-local goto. */
982 if (FORCED_LABEL (target))
983 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
984 &ab_edge_goto, true);
985 if (DECL_NONLOCAL (target))
986 {
987 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
988 &ab_edge_call, false);
989 break;
990 }
991 }
992
993 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
994 gsi_next_nondebug (&gsi);
995 if (!gsi_end_p (gsi))
996 {
997 /* Make an edge to every setjmp-like call. */
998 gimple call_stmt = gsi_stmt (gsi);
999 if (is_gimple_call (call_stmt)
1000 && ((gimple_call_flags (call_stmt) & ECF_RETURNS_TWICE)
1001 || gimple_call_builtin_p (call_stmt,
1002 BUILT_IN_SETJMP_RECEIVER)))
1003 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
1004 &ab_edge_call, false);
1005 }
1006 }
1007
1008 if (bb_to_omp_idx)
1009 XDELETE (dispatcher_bbs);
1010 }
1011
1012 XDELETE (bb_to_omp_idx);
1013
1014 free_omp_regions ();
1015
1016 /* Fold COND_EXPR_COND of each COND_EXPR. */
1017 fold_cond_expr_cond ();
1018 }
1019
1020 /* Add SEQ after GSI. Start new bb after GSI, and created further bbs as
1021 needed. Returns true if new bbs were created.
1022 Note: This is transitional code, and should not be used for new code. We
1023 should be able to get rid of this by rewriting all target va-arg
1024 gimplification hooks to use an interface gimple_build_cond_value as described
1025 in https://gcc.gnu.org/ml/gcc-patches/2015-02/msg01194.html. */
1026
1027 bool
1028 gimple_find_sub_bbs (gimple_seq seq, gimple_stmt_iterator *gsi)
1029 {
1030 gimple stmt = gsi_stmt (*gsi);
1031 basic_block bb = gimple_bb (stmt);
1032 basic_block lastbb, afterbb;
1033 int old_num_bbs = n_basic_blocks_for_fn (cfun);
1034 edge e;
1035 lastbb = make_blocks_1 (seq, bb);
1036 if (old_num_bbs == n_basic_blocks_for_fn (cfun))
1037 return false;
1038 e = split_block (bb, stmt);
1039 /* Move e->dest to come after the new basic blocks. */
1040 afterbb = e->dest;
1041 unlink_block (afterbb);
1042 link_block (afterbb, lastbb);
1043 redirect_edge_succ (e, bb->next_bb);
1044 bb = bb->next_bb;
1045 while (bb != afterbb)
1046 {
1047 struct omp_region *cur_region = NULL;
1048 int cur_omp_region_idx = 0;
1049 int mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
1050 gcc_assert (!mer && !cur_region);
1051 add_bb_to_loop (bb, afterbb->loop_father);
1052 bb = bb->next_bb;
1053 }
1054 return true;
1055 }
1056
1057 /* Find the next available discriminator value for LOCUS. The
1058 discriminator distinguishes among several basic blocks that
1059 share a common locus, allowing for more accurate sample-based
1060 profiling. */
1061
1062 static int
1063 next_discriminator_for_locus (location_t locus)
1064 {
1065 struct locus_discrim_map item;
1066 struct locus_discrim_map **slot;
1067
1068 item.locus = locus;
1069 item.discriminator = 0;
1070 slot = discriminator_per_locus->find_slot_with_hash (
1071 &item, LOCATION_LINE (locus), INSERT);
1072 gcc_assert (slot);
1073 if (*slot == HTAB_EMPTY_ENTRY)
1074 {
1075 *slot = XNEW (struct locus_discrim_map);
1076 gcc_assert (*slot);
1077 (*slot)->locus = locus;
1078 (*slot)->discriminator = 0;
1079 }
1080 (*slot)->discriminator++;
1081 return (*slot)->discriminator;
1082 }
1083
1084 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
1085
1086 static bool
1087 same_line_p (location_t locus1, location_t locus2)
1088 {
1089 expanded_location from, to;
1090
1091 if (locus1 == locus2)
1092 return true;
1093
1094 from = expand_location (locus1);
1095 to = expand_location (locus2);
1096
1097 if (from.line != to.line)
1098 return false;
1099 if (from.file == to.file)
1100 return true;
1101 return (from.file != NULL
1102 && to.file != NULL
1103 && filename_cmp (from.file, to.file) == 0);
1104 }
1105
1106 /* Assign discriminators to each basic block. */
1107
1108 static void
1109 assign_discriminators (void)
1110 {
1111 basic_block bb;
1112
1113 FOR_EACH_BB_FN (bb, cfun)
1114 {
1115 edge e;
1116 edge_iterator ei;
1117 gimple last = last_stmt (bb);
1118 location_t locus = last ? gimple_location (last) : UNKNOWN_LOCATION;
1119
1120 if (locus == UNKNOWN_LOCATION)
1121 continue;
1122
1123 FOR_EACH_EDGE (e, ei, bb->succs)
1124 {
1125 gimple first = first_non_label_stmt (e->dest);
1126 gimple last = last_stmt (e->dest);
1127 if ((first && same_line_p (locus, gimple_location (first)))
1128 || (last && same_line_p (locus, gimple_location (last))))
1129 {
1130 if (e->dest->discriminator != 0 && bb->discriminator == 0)
1131 bb->discriminator = next_discriminator_for_locus (locus);
1132 else
1133 e->dest->discriminator = next_discriminator_for_locus (locus);
1134 }
1135 }
1136 }
1137 }
1138
1139 /* Create the edges for a GIMPLE_COND starting at block BB. */
1140
1141 static void
1142 make_cond_expr_edges (basic_block bb)
1143 {
1144 gcond *entry = as_a <gcond *> (last_stmt (bb));
1145 gimple then_stmt, else_stmt;
1146 basic_block then_bb, else_bb;
1147 tree then_label, else_label;
1148 edge e;
1149
1150 gcc_assert (entry);
1151 gcc_assert (gimple_code (entry) == GIMPLE_COND);
1152
1153 /* Entry basic blocks for each component. */
1154 then_label = gimple_cond_true_label (entry);
1155 else_label = gimple_cond_false_label (entry);
1156 then_bb = label_to_block (then_label);
1157 else_bb = label_to_block (else_label);
1158 then_stmt = first_stmt (then_bb);
1159 else_stmt = first_stmt (else_bb);
1160
1161 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1162 e->goto_locus = gimple_location (then_stmt);
1163 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1164 if (e)
1165 e->goto_locus = gimple_location (else_stmt);
1166
1167 /* We do not need the labels anymore. */
1168 gimple_cond_set_true_label (entry, NULL_TREE);
1169 gimple_cond_set_false_label (entry, NULL_TREE);
1170 }
1171
1172
1173 /* Called for each element in the hash table (P) as we delete the
1174 edge to cases hash table.
1175
1176 Clear all the TREE_CHAINs to prevent problems with copying of
1177 SWITCH_EXPRs and structure sharing rules, then free the hash table
1178 element. */
1179
1180 bool
1181 edge_to_cases_cleanup (edge const &, tree const &value, void *)
1182 {
1183 tree t, next;
1184
1185 for (t = value; t; t = next)
1186 {
1187 next = CASE_CHAIN (t);
1188 CASE_CHAIN (t) = NULL;
1189 }
1190
1191 return true;
1192 }
1193
1194 /* Start recording information mapping edges to case labels. */
1195
1196 void
1197 start_recording_case_labels (void)
1198 {
1199 gcc_assert (edge_to_cases == NULL);
1200 edge_to_cases = new hash_map<edge, tree>;
1201 touched_switch_bbs = BITMAP_ALLOC (NULL);
1202 }
1203
1204 /* Return nonzero if we are recording information for case labels. */
1205
1206 static bool
1207 recording_case_labels_p (void)
1208 {
1209 return (edge_to_cases != NULL);
1210 }
1211
1212 /* Stop recording information mapping edges to case labels and
1213 remove any information we have recorded. */
1214 void
1215 end_recording_case_labels (void)
1216 {
1217 bitmap_iterator bi;
1218 unsigned i;
1219 edge_to_cases->traverse<void *, edge_to_cases_cleanup> (NULL);
1220 delete edge_to_cases;
1221 edge_to_cases = NULL;
1222 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
1223 {
1224 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
1225 if (bb)
1226 {
1227 gimple stmt = last_stmt (bb);
1228 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1229 group_case_labels_stmt (as_a <gswitch *> (stmt));
1230 }
1231 }
1232 BITMAP_FREE (touched_switch_bbs);
1233 }
1234
1235 /* If we are inside a {start,end}_recording_cases block, then return
1236 a chain of CASE_LABEL_EXPRs from T which reference E.
1237
1238 Otherwise return NULL. */
1239
1240 static tree
1241 get_cases_for_edge (edge e, gswitch *t)
1242 {
1243 tree *slot;
1244 size_t i, n;
1245
1246 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
1247 chains available. Return NULL so the caller can detect this case. */
1248 if (!recording_case_labels_p ())
1249 return NULL;
1250
1251 slot = edge_to_cases->get (e);
1252 if (slot)
1253 return *slot;
1254
1255 /* If we did not find E in the hash table, then this must be the first
1256 time we have been queried for information about E & T. Add all the
1257 elements from T to the hash table then perform the query again. */
1258
1259 n = gimple_switch_num_labels (t);
1260 for (i = 0; i < n; i++)
1261 {
1262 tree elt = gimple_switch_label (t, i);
1263 tree lab = CASE_LABEL (elt);
1264 basic_block label_bb = label_to_block (lab);
1265 edge this_edge = find_edge (e->src, label_bb);
1266
1267 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
1268 a new chain. */
1269 tree &s = edge_to_cases->get_or_insert (this_edge);
1270 CASE_CHAIN (elt) = s;
1271 s = elt;
1272 }
1273
1274 return *edge_to_cases->get (e);
1275 }
1276
1277 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
1278
1279 static void
1280 make_gimple_switch_edges (gswitch *entry, basic_block bb)
1281 {
1282 size_t i, n;
1283
1284 n = gimple_switch_num_labels (entry);
1285
1286 for (i = 0; i < n; ++i)
1287 {
1288 tree lab = CASE_LABEL (gimple_switch_label (entry, i));
1289 basic_block label_bb = label_to_block (lab);
1290 make_edge (bb, label_bb, 0);
1291 }
1292 }
1293
1294
1295 /* Return the basic block holding label DEST. */
1296
1297 basic_block
1298 label_to_block_fn (struct function *ifun, tree dest)
1299 {
1300 int uid = LABEL_DECL_UID (dest);
1301
1302 /* We would die hard when faced by an undefined label. Emit a label to
1303 the very first basic block. This will hopefully make even the dataflow
1304 and undefined variable warnings quite right. */
1305 if (seen_error () && uid < 0)
1306 {
1307 gimple_stmt_iterator gsi =
1308 gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS));
1309 gimple stmt;
1310
1311 stmt = gimple_build_label (dest);
1312 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
1313 uid = LABEL_DECL_UID (dest);
1314 }
1315 if (vec_safe_length (ifun->cfg->x_label_to_block_map) <= (unsigned int) uid)
1316 return NULL;
1317 return (*ifun->cfg->x_label_to_block_map)[uid];
1318 }
1319
1320 /* Create edges for a goto statement at block BB. Returns true
1321 if abnormal edges should be created. */
1322
1323 static bool
1324 make_goto_expr_edges (basic_block bb)
1325 {
1326 gimple_stmt_iterator last = gsi_last_bb (bb);
1327 gimple goto_t = gsi_stmt (last);
1328
1329 /* A simple GOTO creates normal edges. */
1330 if (simple_goto_p (goto_t))
1331 {
1332 tree dest = gimple_goto_dest (goto_t);
1333 basic_block label_bb = label_to_block (dest);
1334 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1335 e->goto_locus = gimple_location (goto_t);
1336 gsi_remove (&last, true);
1337 return false;
1338 }
1339
1340 /* A computed GOTO creates abnormal edges. */
1341 return true;
1342 }
1343
1344 /* Create edges for an asm statement with labels at block BB. */
1345
1346 static void
1347 make_gimple_asm_edges (basic_block bb)
1348 {
1349 gasm *stmt = as_a <gasm *> (last_stmt (bb));
1350 int i, n = gimple_asm_nlabels (stmt);
1351
1352 for (i = 0; i < n; ++i)
1353 {
1354 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1355 basic_block label_bb = label_to_block (label);
1356 make_edge (bb, label_bb, 0);
1357 }
1358 }
1359
1360 /*---------------------------------------------------------------------------
1361 Flowgraph analysis
1362 ---------------------------------------------------------------------------*/
1363
1364 /* Cleanup useless labels in basic blocks. This is something we wish
1365 to do early because it allows us to group case labels before creating
1366 the edges for the CFG, and it speeds up block statement iterators in
1367 all passes later on.
1368 We rerun this pass after CFG is created, to get rid of the labels that
1369 are no longer referenced. After then we do not run it any more, since
1370 (almost) no new labels should be created. */
1371
1372 /* A map from basic block index to the leading label of that block. */
1373 static struct label_record
1374 {
1375 /* The label. */
1376 tree label;
1377
1378 /* True if the label is referenced from somewhere. */
1379 bool used;
1380 } *label_for_bb;
1381
1382 /* Given LABEL return the first label in the same basic block. */
1383
1384 static tree
1385 main_block_label (tree label)
1386 {
1387 basic_block bb = label_to_block (label);
1388 tree main_label = label_for_bb[bb->index].label;
1389
1390 /* label_to_block possibly inserted undefined label into the chain. */
1391 if (!main_label)
1392 {
1393 label_for_bb[bb->index].label = label;
1394 main_label = label;
1395 }
1396
1397 label_for_bb[bb->index].used = true;
1398 return main_label;
1399 }
1400
1401 /* Clean up redundant labels within the exception tree. */
1402
1403 static void
1404 cleanup_dead_labels_eh (void)
1405 {
1406 eh_landing_pad lp;
1407 eh_region r;
1408 tree lab;
1409 int i;
1410
1411 if (cfun->eh == NULL)
1412 return;
1413
1414 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1415 if (lp && lp->post_landing_pad)
1416 {
1417 lab = main_block_label (lp->post_landing_pad);
1418 if (lab != lp->post_landing_pad)
1419 {
1420 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1421 EH_LANDING_PAD_NR (lab) = lp->index;
1422 }
1423 }
1424
1425 FOR_ALL_EH_REGION (r)
1426 switch (r->type)
1427 {
1428 case ERT_CLEANUP:
1429 case ERT_MUST_NOT_THROW:
1430 break;
1431
1432 case ERT_TRY:
1433 {
1434 eh_catch c;
1435 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1436 {
1437 lab = c->label;
1438 if (lab)
1439 c->label = main_block_label (lab);
1440 }
1441 }
1442 break;
1443
1444 case ERT_ALLOWED_EXCEPTIONS:
1445 lab = r->u.allowed.label;
1446 if (lab)
1447 r->u.allowed.label = main_block_label (lab);
1448 break;
1449 }
1450 }
1451
1452
1453 /* Cleanup redundant labels. This is a three-step process:
1454 1) Find the leading label for each block.
1455 2) Redirect all references to labels to the leading labels.
1456 3) Cleanup all useless labels. */
1457
1458 void
1459 cleanup_dead_labels (void)
1460 {
1461 basic_block bb;
1462 label_for_bb = XCNEWVEC (struct label_record, last_basic_block_for_fn (cfun));
1463
1464 /* Find a suitable label for each block. We use the first user-defined
1465 label if there is one, or otherwise just the first label we see. */
1466 FOR_EACH_BB_FN (bb, cfun)
1467 {
1468 gimple_stmt_iterator i;
1469
1470 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1471 {
1472 tree label;
1473 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1474
1475 if (!label_stmt)
1476 break;
1477
1478 label = gimple_label_label (label_stmt);
1479
1480 /* If we have not yet seen a label for the current block,
1481 remember this one and see if there are more labels. */
1482 if (!label_for_bb[bb->index].label)
1483 {
1484 label_for_bb[bb->index].label = label;
1485 continue;
1486 }
1487
1488 /* If we did see a label for the current block already, but it
1489 is an artificially created label, replace it if the current
1490 label is a user defined label. */
1491 if (!DECL_ARTIFICIAL (label)
1492 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1493 {
1494 label_for_bb[bb->index].label = label;
1495 break;
1496 }
1497 }
1498 }
1499
1500 /* Now redirect all jumps/branches to the selected label.
1501 First do so for each block ending in a control statement. */
1502 FOR_EACH_BB_FN (bb, cfun)
1503 {
1504 gimple stmt = last_stmt (bb);
1505 tree label, new_label;
1506
1507 if (!stmt)
1508 continue;
1509
1510 switch (gimple_code (stmt))
1511 {
1512 case GIMPLE_COND:
1513 {
1514 gcond *cond_stmt = as_a <gcond *> (stmt);
1515 label = gimple_cond_true_label (cond_stmt);
1516 if (label)
1517 {
1518 new_label = main_block_label (label);
1519 if (new_label != label)
1520 gimple_cond_set_true_label (cond_stmt, new_label);
1521 }
1522
1523 label = gimple_cond_false_label (cond_stmt);
1524 if (label)
1525 {
1526 new_label = main_block_label (label);
1527 if (new_label != label)
1528 gimple_cond_set_false_label (cond_stmt, new_label);
1529 }
1530 }
1531 break;
1532
1533 case GIMPLE_SWITCH:
1534 {
1535 gswitch *switch_stmt = as_a <gswitch *> (stmt);
1536 size_t i, n = gimple_switch_num_labels (switch_stmt);
1537
1538 /* Replace all destination labels. */
1539 for (i = 0; i < n; ++i)
1540 {
1541 tree case_label = gimple_switch_label (switch_stmt, i);
1542 label = CASE_LABEL (case_label);
1543 new_label = main_block_label (label);
1544 if (new_label != label)
1545 CASE_LABEL (case_label) = new_label;
1546 }
1547 break;
1548 }
1549
1550 case GIMPLE_ASM:
1551 {
1552 gasm *asm_stmt = as_a <gasm *> (stmt);
1553 int i, n = gimple_asm_nlabels (asm_stmt);
1554
1555 for (i = 0; i < n; ++i)
1556 {
1557 tree cons = gimple_asm_label_op (asm_stmt, i);
1558 tree label = main_block_label (TREE_VALUE (cons));
1559 TREE_VALUE (cons) = label;
1560 }
1561 break;
1562 }
1563
1564 /* We have to handle gotos until they're removed, and we don't
1565 remove them until after we've created the CFG edges. */
1566 case GIMPLE_GOTO:
1567 if (!computed_goto_p (stmt))
1568 {
1569 ggoto *goto_stmt = as_a <ggoto *> (stmt);
1570 label = gimple_goto_dest (goto_stmt);
1571 new_label = main_block_label (label);
1572 if (new_label != label)
1573 gimple_goto_set_dest (goto_stmt, new_label);
1574 }
1575 break;
1576
1577 case GIMPLE_TRANSACTION:
1578 {
1579 gtransaction *trans_stmt = as_a <gtransaction *> (stmt);
1580 tree label = gimple_transaction_label (trans_stmt);
1581 if (label)
1582 {
1583 tree new_label = main_block_label (label);
1584 if (new_label != label)
1585 gimple_transaction_set_label (trans_stmt, new_label);
1586 }
1587 }
1588 break;
1589
1590 default:
1591 break;
1592 }
1593 }
1594
1595 /* Do the same for the exception region tree labels. */
1596 cleanup_dead_labels_eh ();
1597
1598 /* Finally, purge dead labels. All user-defined labels and labels that
1599 can be the target of non-local gotos and labels which have their
1600 address taken are preserved. */
1601 FOR_EACH_BB_FN (bb, cfun)
1602 {
1603 gimple_stmt_iterator i;
1604 tree label_for_this_bb = label_for_bb[bb->index].label;
1605
1606 if (!label_for_this_bb)
1607 continue;
1608
1609 /* If the main label of the block is unused, we may still remove it. */
1610 if (!label_for_bb[bb->index].used)
1611 label_for_this_bb = NULL;
1612
1613 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1614 {
1615 tree label;
1616 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1617
1618 if (!label_stmt)
1619 break;
1620
1621 label = gimple_label_label (label_stmt);
1622
1623 if (label == label_for_this_bb
1624 || !DECL_ARTIFICIAL (label)
1625 || DECL_NONLOCAL (label)
1626 || FORCED_LABEL (label))
1627 gsi_next (&i);
1628 else
1629 gsi_remove (&i, true);
1630 }
1631 }
1632
1633 free (label_for_bb);
1634 }
1635
1636 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1637 the ones jumping to the same label.
1638 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1639
1640 void
1641 group_case_labels_stmt (gswitch *stmt)
1642 {
1643 int old_size = gimple_switch_num_labels (stmt);
1644 int i, j, new_size = old_size;
1645 basic_block default_bb = NULL;
1646
1647 default_bb = label_to_block (CASE_LABEL (gimple_switch_default_label (stmt)));
1648
1649 /* Look for possible opportunities to merge cases. */
1650 i = 1;
1651 while (i < old_size)
1652 {
1653 tree base_case, base_high;
1654 basic_block base_bb;
1655
1656 base_case = gimple_switch_label (stmt, i);
1657
1658 gcc_assert (base_case);
1659 base_bb = label_to_block (CASE_LABEL (base_case));
1660
1661 /* Discard cases that have the same destination as the
1662 default case. */
1663 if (base_bb == default_bb)
1664 {
1665 gimple_switch_set_label (stmt, i, NULL_TREE);
1666 i++;
1667 new_size--;
1668 continue;
1669 }
1670
1671 base_high = CASE_HIGH (base_case)
1672 ? CASE_HIGH (base_case)
1673 : CASE_LOW (base_case);
1674 i++;
1675
1676 /* Try to merge case labels. Break out when we reach the end
1677 of the label vector or when we cannot merge the next case
1678 label with the current one. */
1679 while (i < old_size)
1680 {
1681 tree merge_case = gimple_switch_label (stmt, i);
1682 basic_block merge_bb = label_to_block (CASE_LABEL (merge_case));
1683 wide_int bhp1 = wi::add (base_high, 1);
1684
1685 /* Merge the cases if they jump to the same place,
1686 and their ranges are consecutive. */
1687 if (merge_bb == base_bb
1688 && wi::eq_p (CASE_LOW (merge_case), bhp1))
1689 {
1690 base_high = CASE_HIGH (merge_case) ?
1691 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1692 CASE_HIGH (base_case) = base_high;
1693 gimple_switch_set_label (stmt, i, NULL_TREE);
1694 new_size--;
1695 i++;
1696 }
1697 else
1698 break;
1699 }
1700 }
1701
1702 /* Compress the case labels in the label vector, and adjust the
1703 length of the vector. */
1704 for (i = 0, j = 0; i < new_size; i++)
1705 {
1706 while (! gimple_switch_label (stmt, j))
1707 j++;
1708 gimple_switch_set_label (stmt, i,
1709 gimple_switch_label (stmt, j++));
1710 }
1711
1712 gcc_assert (new_size <= old_size);
1713 gimple_switch_set_num_labels (stmt, new_size);
1714 }
1715
1716 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1717 and scan the sorted vector of cases. Combine the ones jumping to the
1718 same label. */
1719
1720 void
1721 group_case_labels (void)
1722 {
1723 basic_block bb;
1724
1725 FOR_EACH_BB_FN (bb, cfun)
1726 {
1727 gimple stmt = last_stmt (bb);
1728 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1729 group_case_labels_stmt (as_a <gswitch *> (stmt));
1730 }
1731 }
1732
1733 /* Checks whether we can merge block B into block A. */
1734
1735 static bool
1736 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1737 {
1738 gimple stmt;
1739
1740 if (!single_succ_p (a))
1741 return false;
1742
1743 if (single_succ_edge (a)->flags & EDGE_COMPLEX)
1744 return false;
1745
1746 if (single_succ (a) != b)
1747 return false;
1748
1749 if (!single_pred_p (b))
1750 return false;
1751
1752 if (a == ENTRY_BLOCK_PTR_FOR_FN (cfun)
1753 || b == EXIT_BLOCK_PTR_FOR_FN (cfun))
1754 return false;
1755
1756 /* If A ends by a statement causing exceptions or something similar, we
1757 cannot merge the blocks. */
1758 stmt = last_stmt (a);
1759 if (stmt && stmt_ends_bb_p (stmt))
1760 return false;
1761
1762 /* Do not allow a block with only a non-local label to be merged. */
1763 if (stmt)
1764 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
1765 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
1766 return false;
1767
1768 /* Examine the labels at the beginning of B. */
1769 for (gimple_stmt_iterator gsi = gsi_start_bb (b); !gsi_end_p (gsi);
1770 gsi_next (&gsi))
1771 {
1772 tree lab;
1773 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
1774 if (!label_stmt)
1775 break;
1776 lab = gimple_label_label (label_stmt);
1777
1778 /* Do not remove user forced labels or for -O0 any user labels. */
1779 if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
1780 return false;
1781 }
1782
1783 /* Protect simple loop latches. We only want to avoid merging
1784 the latch with the loop header or with a block in another
1785 loop in this case. */
1786 if (current_loops
1787 && b->loop_father->latch == b
1788 && loops_state_satisfies_p (LOOPS_HAVE_SIMPLE_LATCHES)
1789 && (b->loop_father->header == a
1790 || b->loop_father != a->loop_father))
1791 return false;
1792
1793 /* It must be possible to eliminate all phi nodes in B. If ssa form
1794 is not up-to-date and a name-mapping is registered, we cannot eliminate
1795 any phis. Symbols marked for renaming are never a problem though. */
1796 for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);
1797 gsi_next (&gsi))
1798 {
1799 gphi *phi = gsi.phi ();
1800 /* Technically only new names matter. */
1801 if (name_registered_for_update_p (PHI_RESULT (phi)))
1802 return false;
1803 }
1804
1805 /* When not optimizing, don't merge if we'd lose goto_locus. */
1806 if (!optimize
1807 && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
1808 {
1809 location_t goto_locus = single_succ_edge (a)->goto_locus;
1810 gimple_stmt_iterator prev, next;
1811 prev = gsi_last_nondebug_bb (a);
1812 next = gsi_after_labels (b);
1813 if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
1814 gsi_next_nondebug (&next);
1815 if ((gsi_end_p (prev)
1816 || gimple_location (gsi_stmt (prev)) != goto_locus)
1817 && (gsi_end_p (next)
1818 || gimple_location (gsi_stmt (next)) != goto_locus))
1819 return false;
1820 }
1821
1822 return true;
1823 }
1824
1825 /* Replaces all uses of NAME by VAL. */
1826
1827 void
1828 replace_uses_by (tree name, tree val)
1829 {
1830 imm_use_iterator imm_iter;
1831 use_operand_p use;
1832 gimple stmt;
1833 edge e;
1834
1835 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1836 {
1837 /* Mark the block if we change the last stmt in it. */
1838 if (cfgcleanup_altered_bbs
1839 && stmt_ends_bb_p (stmt))
1840 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1841
1842 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1843 {
1844 replace_exp (use, val);
1845
1846 if (gimple_code (stmt) == GIMPLE_PHI)
1847 {
1848 e = gimple_phi_arg_edge (as_a <gphi *> (stmt),
1849 PHI_ARG_INDEX_FROM_USE (use));
1850 if (e->flags & EDGE_ABNORMAL
1851 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val))
1852 {
1853 /* This can only occur for virtual operands, since
1854 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1855 would prevent replacement. */
1856 gcc_checking_assert (virtual_operand_p (name));
1857 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1858 }
1859 }
1860 }
1861
1862 if (gimple_code (stmt) != GIMPLE_PHI)
1863 {
1864 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1865 gimple orig_stmt = stmt;
1866 size_t i;
1867
1868 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
1869 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
1870 only change sth from non-invariant to invariant, and only
1871 when propagating constants. */
1872 if (is_gimple_min_invariant (val))
1873 for (i = 0; i < gimple_num_ops (stmt); i++)
1874 {
1875 tree op = gimple_op (stmt, i);
1876 /* Operands may be empty here. For example, the labels
1877 of a GIMPLE_COND are nulled out following the creation
1878 of the corresponding CFG edges. */
1879 if (op && TREE_CODE (op) == ADDR_EXPR)
1880 recompute_tree_invariant_for_addr_expr (op);
1881 }
1882
1883 if (fold_stmt (&gsi))
1884 stmt = gsi_stmt (gsi);
1885
1886 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
1887 gimple_purge_dead_eh_edges (gimple_bb (stmt));
1888
1889 update_stmt (stmt);
1890 }
1891 }
1892
1893 gcc_checking_assert (has_zero_uses (name));
1894
1895 /* Also update the trees stored in loop structures. */
1896 if (current_loops)
1897 {
1898 struct loop *loop;
1899
1900 FOR_EACH_LOOP (loop, 0)
1901 {
1902 substitute_in_loop_info (loop, name, val);
1903 }
1904 }
1905 }
1906
1907 /* Merge block B into block A. */
1908
1909 static void
1910 gimple_merge_blocks (basic_block a, basic_block b)
1911 {
1912 gimple_stmt_iterator last, gsi;
1913 gphi_iterator psi;
1914
1915 if (dump_file)
1916 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1917
1918 /* Remove all single-valued PHI nodes from block B of the form
1919 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1920 gsi = gsi_last_bb (a);
1921 for (psi = gsi_start_phis (b); !gsi_end_p (psi); )
1922 {
1923 gimple phi = gsi_stmt (psi);
1924 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
1925 gimple copy;
1926 bool may_replace_uses = (virtual_operand_p (def)
1927 || may_propagate_copy (def, use));
1928
1929 /* In case we maintain loop closed ssa form, do not propagate arguments
1930 of loop exit phi nodes. */
1931 if (current_loops
1932 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
1933 && !virtual_operand_p (def)
1934 && TREE_CODE (use) == SSA_NAME
1935 && a->loop_father != b->loop_father)
1936 may_replace_uses = false;
1937
1938 if (!may_replace_uses)
1939 {
1940 gcc_assert (!virtual_operand_p (def));
1941
1942 /* Note that just emitting the copies is fine -- there is no problem
1943 with ordering of phi nodes. This is because A is the single
1944 predecessor of B, therefore results of the phi nodes cannot
1945 appear as arguments of the phi nodes. */
1946 copy = gimple_build_assign (def, use);
1947 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
1948 remove_phi_node (&psi, false);
1949 }
1950 else
1951 {
1952 /* If we deal with a PHI for virtual operands, we can simply
1953 propagate these without fussing with folding or updating
1954 the stmt. */
1955 if (virtual_operand_p (def))
1956 {
1957 imm_use_iterator iter;
1958 use_operand_p use_p;
1959 gimple stmt;
1960
1961 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
1962 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1963 SET_USE (use_p, use);
1964
1965 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
1966 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
1967 }
1968 else
1969 replace_uses_by (def, use);
1970
1971 remove_phi_node (&psi, true);
1972 }
1973 }
1974
1975 /* Ensure that B follows A. */
1976 move_block_after (b, a);
1977
1978 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1979 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1980
1981 /* Remove labels from B and set gimple_bb to A for other statements. */
1982 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
1983 {
1984 gimple stmt = gsi_stmt (gsi);
1985 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
1986 {
1987 tree label = gimple_label_label (label_stmt);
1988 int lp_nr;
1989
1990 gsi_remove (&gsi, false);
1991
1992 /* Now that we can thread computed gotos, we might have
1993 a situation where we have a forced label in block B
1994 However, the label at the start of block B might still be
1995 used in other ways (think about the runtime checking for
1996 Fortran assigned gotos). So we can not just delete the
1997 label. Instead we move the label to the start of block A. */
1998 if (FORCED_LABEL (label))
1999 {
2000 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
2001 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
2002 }
2003 /* Other user labels keep around in a form of a debug stmt. */
2004 else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_STMTS)
2005 {
2006 gimple dbg = gimple_build_debug_bind (label,
2007 integer_zero_node,
2008 stmt);
2009 gimple_debug_bind_reset_value (dbg);
2010 gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
2011 }
2012
2013 lp_nr = EH_LANDING_PAD_NR (label);
2014 if (lp_nr)
2015 {
2016 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
2017 lp->post_landing_pad = NULL;
2018 }
2019 }
2020 else
2021 {
2022 gimple_set_bb (stmt, a);
2023 gsi_next (&gsi);
2024 }
2025 }
2026
2027 /* When merging two BBs, if their counts are different, the larger count
2028 is selected as the new bb count. This is to handle inconsistent
2029 profiles. */
2030 if (a->loop_father == b->loop_father)
2031 {
2032 a->count = MAX (a->count, b->count);
2033 a->frequency = MAX (a->frequency, b->frequency);
2034 }
2035
2036 /* Merge the sequences. */
2037 last = gsi_last_bb (a);
2038 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
2039 set_bb_seq (b, NULL);
2040
2041 if (cfgcleanup_altered_bbs)
2042 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
2043 }
2044
2045
2046 /* Return the one of two successors of BB that is not reachable by a
2047 complex edge, if there is one. Else, return BB. We use
2048 this in optimizations that use post-dominators for their heuristics,
2049 to catch the cases in C++ where function calls are involved. */
2050
2051 basic_block
2052 single_noncomplex_succ (basic_block bb)
2053 {
2054 edge e0, e1;
2055 if (EDGE_COUNT (bb->succs) != 2)
2056 return bb;
2057
2058 e0 = EDGE_SUCC (bb, 0);
2059 e1 = EDGE_SUCC (bb, 1);
2060 if (e0->flags & EDGE_COMPLEX)
2061 return e1->dest;
2062 if (e1->flags & EDGE_COMPLEX)
2063 return e0->dest;
2064
2065 return bb;
2066 }
2067
2068 /* T is CALL_EXPR. Set current_function_calls_* flags. */
2069
2070 void
2071 notice_special_calls (gcall *call)
2072 {
2073 int flags = gimple_call_flags (call);
2074
2075 if (flags & ECF_MAY_BE_ALLOCA)
2076 cfun->calls_alloca = true;
2077 if (flags & ECF_RETURNS_TWICE)
2078 cfun->calls_setjmp = true;
2079 }
2080
2081
2082 /* Clear flags set by notice_special_calls. Used by dead code removal
2083 to update the flags. */
2084
2085 void
2086 clear_special_calls (void)
2087 {
2088 cfun->calls_alloca = false;
2089 cfun->calls_setjmp = false;
2090 }
2091
2092 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
2093
2094 static void
2095 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
2096 {
2097 /* Since this block is no longer reachable, we can just delete all
2098 of its PHI nodes. */
2099 remove_phi_nodes (bb);
2100
2101 /* Remove edges to BB's successors. */
2102 while (EDGE_COUNT (bb->succs) > 0)
2103 remove_edge (EDGE_SUCC (bb, 0));
2104 }
2105
2106
2107 /* Remove statements of basic block BB. */
2108
2109 static void
2110 remove_bb (basic_block bb)
2111 {
2112 gimple_stmt_iterator i;
2113
2114 if (dump_file)
2115 {
2116 fprintf (dump_file, "Removing basic block %d\n", bb->index);
2117 if (dump_flags & TDF_DETAILS)
2118 {
2119 dump_bb (dump_file, bb, 0, TDF_BLOCKS);
2120 fprintf (dump_file, "\n");
2121 }
2122 }
2123
2124 if (current_loops)
2125 {
2126 struct loop *loop = bb->loop_father;
2127
2128 /* If a loop gets removed, clean up the information associated
2129 with it. */
2130 if (loop->latch == bb
2131 || loop->header == bb)
2132 free_numbers_of_iterations_estimates_loop (loop);
2133 }
2134
2135 /* Remove all the instructions in the block. */
2136 if (bb_seq (bb) != NULL)
2137 {
2138 /* Walk backwards so as to get a chance to substitute all
2139 released DEFs into debug stmts. See
2140 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
2141 details. */
2142 for (i = gsi_last_bb (bb); !gsi_end_p (i);)
2143 {
2144 gimple stmt = gsi_stmt (i);
2145 glabel *label_stmt = dyn_cast <glabel *> (stmt);
2146 if (label_stmt
2147 && (FORCED_LABEL (gimple_label_label (label_stmt))
2148 || DECL_NONLOCAL (gimple_label_label (label_stmt))))
2149 {
2150 basic_block new_bb;
2151 gimple_stmt_iterator new_gsi;
2152
2153 /* A non-reachable non-local label may still be referenced.
2154 But it no longer needs to carry the extra semantics of
2155 non-locality. */
2156 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
2157 {
2158 DECL_NONLOCAL (gimple_label_label (label_stmt)) = 0;
2159 FORCED_LABEL (gimple_label_label (label_stmt)) = 1;
2160 }
2161
2162 new_bb = bb->prev_bb;
2163 new_gsi = gsi_start_bb (new_bb);
2164 gsi_remove (&i, false);
2165 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
2166 }
2167 else
2168 {
2169 /* Release SSA definitions if we are in SSA. Note that we
2170 may be called when not in SSA. For example,
2171 final_cleanup calls this function via
2172 cleanup_tree_cfg. */
2173 if (gimple_in_ssa_p (cfun))
2174 release_defs (stmt);
2175
2176 gsi_remove (&i, true);
2177 }
2178
2179 if (gsi_end_p (i))
2180 i = gsi_last_bb (bb);
2181 else
2182 gsi_prev (&i);
2183 }
2184 }
2185
2186 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2187 bb->il.gimple.seq = NULL;
2188 bb->il.gimple.phi_nodes = NULL;
2189 }
2190
2191
2192 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2193 predicate VAL, return the edge that will be taken out of the block.
2194 If VAL does not match a unique edge, NULL is returned. */
2195
2196 edge
2197 find_taken_edge (basic_block bb, tree val)
2198 {
2199 gimple stmt;
2200
2201 stmt = last_stmt (bb);
2202
2203 gcc_assert (stmt);
2204 gcc_assert (is_ctrl_stmt (stmt));
2205
2206 if (val == NULL)
2207 return NULL;
2208
2209 if (!is_gimple_min_invariant (val))
2210 return NULL;
2211
2212 if (gimple_code (stmt) == GIMPLE_COND)
2213 return find_taken_edge_cond_expr (bb, val);
2214
2215 if (gimple_code (stmt) == GIMPLE_SWITCH)
2216 return find_taken_edge_switch_expr (as_a <gswitch *> (stmt), bb, val);
2217
2218 if (computed_goto_p (stmt))
2219 {
2220 /* Only optimize if the argument is a label, if the argument is
2221 not a label then we can not construct a proper CFG.
2222
2223 It may be the case that we only need to allow the LABEL_REF to
2224 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2225 appear inside a LABEL_EXPR just to be safe. */
2226 if ((TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
2227 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
2228 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
2229 return NULL;
2230 }
2231
2232 gcc_unreachable ();
2233 }
2234
2235 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2236 statement, determine which of the outgoing edges will be taken out of the
2237 block. Return NULL if either edge may be taken. */
2238
2239 static edge
2240 find_taken_edge_computed_goto (basic_block bb, tree val)
2241 {
2242 basic_block dest;
2243 edge e = NULL;
2244
2245 dest = label_to_block (val);
2246 if (dest)
2247 {
2248 e = find_edge (bb, dest);
2249 gcc_assert (e != NULL);
2250 }
2251
2252 return e;
2253 }
2254
2255 /* Given a constant value VAL and the entry block BB to a COND_EXPR
2256 statement, determine which of the two edges will be taken out of the
2257 block. Return NULL if either edge may be taken. */
2258
2259 static edge
2260 find_taken_edge_cond_expr (basic_block bb, tree val)
2261 {
2262 edge true_edge, false_edge;
2263
2264 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2265
2266 gcc_assert (TREE_CODE (val) == INTEGER_CST);
2267 return (integer_zerop (val) ? false_edge : true_edge);
2268 }
2269
2270 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2271 statement, determine which edge will be taken out of the block. Return
2272 NULL if any edge may be taken. */
2273
2274 static edge
2275 find_taken_edge_switch_expr (gswitch *switch_stmt, basic_block bb,
2276 tree val)
2277 {
2278 basic_block dest_bb;
2279 edge e;
2280 tree taken_case;
2281
2282 taken_case = find_case_label_for_value (switch_stmt, val);
2283 dest_bb = label_to_block (CASE_LABEL (taken_case));
2284
2285 e = find_edge (bb, dest_bb);
2286 gcc_assert (e);
2287 return e;
2288 }
2289
2290
2291 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2292 We can make optimal use here of the fact that the case labels are
2293 sorted: We can do a binary search for a case matching VAL. */
2294
2295 static tree
2296 find_case_label_for_value (gswitch *switch_stmt, tree val)
2297 {
2298 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2299 tree default_case = gimple_switch_default_label (switch_stmt);
2300
2301 for (low = 0, high = n; high - low > 1; )
2302 {
2303 size_t i = (high + low) / 2;
2304 tree t = gimple_switch_label (switch_stmt, i);
2305 int cmp;
2306
2307 /* Cache the result of comparing CASE_LOW and val. */
2308 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2309
2310 if (cmp > 0)
2311 high = i;
2312 else
2313 low = i;
2314
2315 if (CASE_HIGH (t) == NULL)
2316 {
2317 /* A singe-valued case label. */
2318 if (cmp == 0)
2319 return t;
2320 }
2321 else
2322 {
2323 /* A case range. We can only handle integer ranges. */
2324 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2325 return t;
2326 }
2327 }
2328
2329 return default_case;
2330 }
2331
2332
2333 /* Dump a basic block on stderr. */
2334
2335 void
2336 gimple_debug_bb (basic_block bb)
2337 {
2338 dump_bb (stderr, bb, 0, TDF_VOPS|TDF_MEMSYMS|TDF_BLOCKS);
2339 }
2340
2341
2342 /* Dump basic block with index N on stderr. */
2343
2344 basic_block
2345 gimple_debug_bb_n (int n)
2346 {
2347 gimple_debug_bb (BASIC_BLOCK_FOR_FN (cfun, n));
2348 return BASIC_BLOCK_FOR_FN (cfun, n);
2349 }
2350
2351
2352 /* Dump the CFG on stderr.
2353
2354 FLAGS are the same used by the tree dumping functions
2355 (see TDF_* in dumpfile.h). */
2356
2357 void
2358 gimple_debug_cfg (int flags)
2359 {
2360 gimple_dump_cfg (stderr, flags);
2361 }
2362
2363
2364 /* Dump the program showing basic block boundaries on the given FILE.
2365
2366 FLAGS are the same used by the tree dumping functions (see TDF_* in
2367 tree.h). */
2368
2369 void
2370 gimple_dump_cfg (FILE *file, int flags)
2371 {
2372 if (flags & TDF_DETAILS)
2373 {
2374 dump_function_header (file, current_function_decl, flags);
2375 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2376 n_basic_blocks_for_fn (cfun), n_edges_for_fn (cfun),
2377 last_basic_block_for_fn (cfun));
2378
2379 brief_dump_cfg (file, flags | TDF_COMMENT);
2380 fprintf (file, "\n");
2381 }
2382
2383 if (flags & TDF_STATS)
2384 dump_cfg_stats (file);
2385
2386 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2387 }
2388
2389
2390 /* Dump CFG statistics on FILE. */
2391
2392 void
2393 dump_cfg_stats (FILE *file)
2394 {
2395 static long max_num_merged_labels = 0;
2396 unsigned long size, total = 0;
2397 long num_edges;
2398 basic_block bb;
2399 const char * const fmt_str = "%-30s%-13s%12s\n";
2400 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2401 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2402 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2403 const char *funcname = current_function_name ();
2404
2405 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2406
2407 fprintf (file, "---------------------------------------------------------\n");
2408 fprintf (file, fmt_str, "", " Number of ", "Memory");
2409 fprintf (file, fmt_str, "", " instances ", "used ");
2410 fprintf (file, "---------------------------------------------------------\n");
2411
2412 size = n_basic_blocks_for_fn (cfun) * sizeof (struct basic_block_def);
2413 total += size;
2414 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks_for_fn (cfun),
2415 SCALE (size), LABEL (size));
2416
2417 num_edges = 0;
2418 FOR_EACH_BB_FN (bb, cfun)
2419 num_edges += EDGE_COUNT (bb->succs);
2420 size = num_edges * sizeof (struct edge_def);
2421 total += size;
2422 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2423
2424 fprintf (file, "---------------------------------------------------------\n");
2425 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2426 LABEL (total));
2427 fprintf (file, "---------------------------------------------------------\n");
2428 fprintf (file, "\n");
2429
2430 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2431 max_num_merged_labels = cfg_stats.num_merged_labels;
2432
2433 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2434 cfg_stats.num_merged_labels, max_num_merged_labels);
2435
2436 fprintf (file, "\n");
2437 }
2438
2439
2440 /* Dump CFG statistics on stderr. Keep extern so that it's always
2441 linked in the final executable. */
2442
2443 DEBUG_FUNCTION void
2444 debug_cfg_stats (void)
2445 {
2446 dump_cfg_stats (stderr);
2447 }
2448
2449 /*---------------------------------------------------------------------------
2450 Miscellaneous helpers
2451 ---------------------------------------------------------------------------*/
2452
2453 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2454 flow. Transfers of control flow associated with EH are excluded. */
2455
2456 static bool
2457 call_can_make_abnormal_goto (gimple t)
2458 {
2459 /* If the function has no non-local labels, then a call cannot make an
2460 abnormal transfer of control. */
2461 if (!cfun->has_nonlocal_label
2462 && !cfun->calls_setjmp)
2463 return false;
2464
2465 /* Likewise if the call has no side effects. */
2466 if (!gimple_has_side_effects (t))
2467 return false;
2468
2469 /* Likewise if the called function is leaf. */
2470 if (gimple_call_flags (t) & ECF_LEAF)
2471 return false;
2472
2473 return true;
2474 }
2475
2476
2477 /* Return true if T can make an abnormal transfer of control flow.
2478 Transfers of control flow associated with EH are excluded. */
2479
2480 bool
2481 stmt_can_make_abnormal_goto (gimple t)
2482 {
2483 if (computed_goto_p (t))
2484 return true;
2485 if (is_gimple_call (t))
2486 return call_can_make_abnormal_goto (t);
2487 return false;
2488 }
2489
2490
2491 /* Return true if T represents a stmt that always transfers control. */
2492
2493 bool
2494 is_ctrl_stmt (gimple t)
2495 {
2496 switch (gimple_code (t))
2497 {
2498 case GIMPLE_COND:
2499 case GIMPLE_SWITCH:
2500 case GIMPLE_GOTO:
2501 case GIMPLE_RETURN:
2502 case GIMPLE_RESX:
2503 return true;
2504 default:
2505 return false;
2506 }
2507 }
2508
2509
2510 /* Return true if T is a statement that may alter the flow of control
2511 (e.g., a call to a non-returning function). */
2512
2513 bool
2514 is_ctrl_altering_stmt (gimple t)
2515 {
2516 gcc_assert (t);
2517
2518 switch (gimple_code (t))
2519 {
2520 case GIMPLE_CALL:
2521 /* Per stmt call flag indicates whether the call could alter
2522 controlflow. */
2523 if (gimple_call_ctrl_altering_p (t))
2524 return true;
2525 break;
2526
2527 case GIMPLE_EH_DISPATCH:
2528 /* EH_DISPATCH branches to the individual catch handlers at
2529 this level of a try or allowed-exceptions region. It can
2530 fallthru to the next statement as well. */
2531 return true;
2532
2533 case GIMPLE_ASM:
2534 if (gimple_asm_nlabels (as_a <gasm *> (t)) > 0)
2535 return true;
2536 break;
2537
2538 CASE_GIMPLE_OMP:
2539 /* OpenMP directives alter control flow. */
2540 return true;
2541
2542 case GIMPLE_TRANSACTION:
2543 /* A transaction start alters control flow. */
2544 return true;
2545
2546 default:
2547 break;
2548 }
2549
2550 /* If a statement can throw, it alters control flow. */
2551 return stmt_can_throw_internal (t);
2552 }
2553
2554
2555 /* Return true if T is a simple local goto. */
2556
2557 bool
2558 simple_goto_p (gimple t)
2559 {
2560 return (gimple_code (t) == GIMPLE_GOTO
2561 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2562 }
2563
2564
2565 /* Return true if STMT should start a new basic block. PREV_STMT is
2566 the statement preceding STMT. It is used when STMT is a label or a
2567 case label. Labels should only start a new basic block if their
2568 previous statement wasn't a label. Otherwise, sequence of labels
2569 would generate unnecessary basic blocks that only contain a single
2570 label. */
2571
2572 static inline bool
2573 stmt_starts_bb_p (gimple stmt, gimple prev_stmt)
2574 {
2575 if (stmt == NULL)
2576 return false;
2577
2578 /* Labels start a new basic block only if the preceding statement
2579 wasn't a label of the same type. This prevents the creation of
2580 consecutive blocks that have nothing but a single label. */
2581 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2582 {
2583 /* Nonlocal and computed GOTO targets always start a new block. */
2584 if (DECL_NONLOCAL (gimple_label_label (label_stmt))
2585 || FORCED_LABEL (gimple_label_label (label_stmt)))
2586 return true;
2587
2588 if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
2589 {
2590 if (DECL_NONLOCAL (gimple_label_label (
2591 as_a <glabel *> (prev_stmt))))
2592 return true;
2593
2594 cfg_stats.num_merged_labels++;
2595 return false;
2596 }
2597 else
2598 return true;
2599 }
2600 else if (gimple_code (stmt) == GIMPLE_CALL
2601 && gimple_call_flags (stmt) & ECF_RETURNS_TWICE)
2602 /* setjmp acts similar to a nonlocal GOTO target and thus should
2603 start a new block. */
2604 return true;
2605
2606 return false;
2607 }
2608
2609
2610 /* Return true if T should end a basic block. */
2611
2612 bool
2613 stmt_ends_bb_p (gimple t)
2614 {
2615 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2616 }
2617
2618 /* Remove block annotations and other data structures. */
2619
2620 void
2621 delete_tree_cfg_annotations (void)
2622 {
2623 vec_free (label_to_block_map_for_fn (cfun));
2624 }
2625
2626
2627 /* Return the first statement in basic block BB. */
2628
2629 gimple
2630 first_stmt (basic_block bb)
2631 {
2632 gimple_stmt_iterator i = gsi_start_bb (bb);
2633 gimple stmt = NULL;
2634
2635 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2636 {
2637 gsi_next (&i);
2638 stmt = NULL;
2639 }
2640 return stmt;
2641 }
2642
2643 /* Return the first non-label statement in basic block BB. */
2644
2645 static gimple
2646 first_non_label_stmt (basic_block bb)
2647 {
2648 gimple_stmt_iterator i = gsi_start_bb (bb);
2649 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2650 gsi_next (&i);
2651 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2652 }
2653
2654 /* Return the last statement in basic block BB. */
2655
2656 gimple
2657 last_stmt (basic_block bb)
2658 {
2659 gimple_stmt_iterator i = gsi_last_bb (bb);
2660 gimple stmt = NULL;
2661
2662 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2663 {
2664 gsi_prev (&i);
2665 stmt = NULL;
2666 }
2667 return stmt;
2668 }
2669
2670 /* Return the last statement of an otherwise empty block. Return NULL
2671 if the block is totally empty, or if it contains more than one
2672 statement. */
2673
2674 gimple
2675 last_and_only_stmt (basic_block bb)
2676 {
2677 gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2678 gimple last, prev;
2679
2680 if (gsi_end_p (i))
2681 return NULL;
2682
2683 last = gsi_stmt (i);
2684 gsi_prev_nondebug (&i);
2685 if (gsi_end_p (i))
2686 return last;
2687
2688 /* Empty statements should no longer appear in the instruction stream.
2689 Everything that might have appeared before should be deleted by
2690 remove_useless_stmts, and the optimizers should just gsi_remove
2691 instead of smashing with build_empty_stmt.
2692
2693 Thus the only thing that should appear here in a block containing
2694 one executable statement is a label. */
2695 prev = gsi_stmt (i);
2696 if (gimple_code (prev) == GIMPLE_LABEL)
2697 return last;
2698 else
2699 return NULL;
2700 }
2701
2702 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2703
2704 static void
2705 reinstall_phi_args (edge new_edge, edge old_edge)
2706 {
2707 edge_var_map *vm;
2708 int i;
2709 gphi_iterator phis;
2710
2711 vec<edge_var_map> *v = redirect_edge_var_map_vector (old_edge);
2712 if (!v)
2713 return;
2714
2715 for (i = 0, phis = gsi_start_phis (new_edge->dest);
2716 v->iterate (i, &vm) && !gsi_end_p (phis);
2717 i++, gsi_next (&phis))
2718 {
2719 gphi *phi = phis.phi ();
2720 tree result = redirect_edge_var_map_result (vm);
2721 tree arg = redirect_edge_var_map_def (vm);
2722
2723 gcc_assert (result == gimple_phi_result (phi));
2724
2725 add_phi_arg (phi, arg, new_edge, redirect_edge_var_map_location (vm));
2726 }
2727
2728 redirect_edge_var_map_clear (old_edge);
2729 }
2730
2731 /* Returns the basic block after which the new basic block created
2732 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2733 near its "logical" location. This is of most help to humans looking
2734 at debugging dumps. */
2735
2736 basic_block
2737 split_edge_bb_loc (edge edge_in)
2738 {
2739 basic_block dest = edge_in->dest;
2740 basic_block dest_prev = dest->prev_bb;
2741
2742 if (dest_prev)
2743 {
2744 edge e = find_edge (dest_prev, dest);
2745 if (e && !(e->flags & EDGE_COMPLEX))
2746 return edge_in->src;
2747 }
2748 return dest_prev;
2749 }
2750
2751 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2752 Abort on abnormal edges. */
2753
2754 static basic_block
2755 gimple_split_edge (edge edge_in)
2756 {
2757 basic_block new_bb, after_bb, dest;
2758 edge new_edge, e;
2759
2760 /* Abnormal edges cannot be split. */
2761 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2762
2763 dest = edge_in->dest;
2764
2765 after_bb = split_edge_bb_loc (edge_in);
2766
2767 new_bb = create_empty_bb (after_bb);
2768 new_bb->frequency = EDGE_FREQUENCY (edge_in);
2769 new_bb->count = edge_in->count;
2770 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
2771 new_edge->probability = REG_BR_PROB_BASE;
2772 new_edge->count = edge_in->count;
2773
2774 e = redirect_edge_and_branch (edge_in, new_bb);
2775 gcc_assert (e == edge_in);
2776 reinstall_phi_args (new_edge, e);
2777
2778 return new_bb;
2779 }
2780
2781
2782 /* Verify properties of the address expression T with base object BASE. */
2783
2784 static tree
2785 verify_address (tree t, tree base)
2786 {
2787 bool old_constant;
2788 bool old_side_effects;
2789 bool new_constant;
2790 bool new_side_effects;
2791
2792 old_constant = TREE_CONSTANT (t);
2793 old_side_effects = TREE_SIDE_EFFECTS (t);
2794
2795 recompute_tree_invariant_for_addr_expr (t);
2796 new_side_effects = TREE_SIDE_EFFECTS (t);
2797 new_constant = TREE_CONSTANT (t);
2798
2799 if (old_constant != new_constant)
2800 {
2801 error ("constant not recomputed when ADDR_EXPR changed");
2802 return t;
2803 }
2804 if (old_side_effects != new_side_effects)
2805 {
2806 error ("side effects not recomputed when ADDR_EXPR changed");
2807 return t;
2808 }
2809
2810 if (!(TREE_CODE (base) == VAR_DECL
2811 || TREE_CODE (base) == PARM_DECL
2812 || TREE_CODE (base) == RESULT_DECL))
2813 return NULL_TREE;
2814
2815 if (DECL_GIMPLE_REG_P (base))
2816 {
2817 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2818 return base;
2819 }
2820
2821 return NULL_TREE;
2822 }
2823
2824 /* Callback for walk_tree, check that all elements with address taken are
2825 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2826 inside a PHI node. */
2827
2828 static tree
2829 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2830 {
2831 tree t = *tp, x;
2832
2833 if (TYPE_P (t))
2834 *walk_subtrees = 0;
2835
2836 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2837 #define CHECK_OP(N, MSG) \
2838 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2839 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2840
2841 switch (TREE_CODE (t))
2842 {
2843 case SSA_NAME:
2844 if (SSA_NAME_IN_FREE_LIST (t))
2845 {
2846 error ("SSA name in freelist but still referenced");
2847 return *tp;
2848 }
2849 break;
2850
2851 case INDIRECT_REF:
2852 error ("INDIRECT_REF in gimple IL");
2853 return t;
2854
2855 case MEM_REF:
2856 x = TREE_OPERAND (t, 0);
2857 if (!POINTER_TYPE_P (TREE_TYPE (x))
2858 || !is_gimple_mem_ref_addr (x))
2859 {
2860 error ("invalid first operand of MEM_REF");
2861 return x;
2862 }
2863 if (TREE_CODE (TREE_OPERAND (t, 1)) != INTEGER_CST
2864 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 1))))
2865 {
2866 error ("invalid offset operand of MEM_REF");
2867 return TREE_OPERAND (t, 1);
2868 }
2869 if (TREE_CODE (x) == ADDR_EXPR
2870 && (x = verify_address (x, TREE_OPERAND (x, 0))))
2871 return x;
2872 *walk_subtrees = 0;
2873 break;
2874
2875 case ASSERT_EXPR:
2876 x = fold (ASSERT_EXPR_COND (t));
2877 if (x == boolean_false_node)
2878 {
2879 error ("ASSERT_EXPR with an always-false condition");
2880 return *tp;
2881 }
2882 break;
2883
2884 case MODIFY_EXPR:
2885 error ("MODIFY_EXPR not expected while having tuples");
2886 return *tp;
2887
2888 case ADDR_EXPR:
2889 {
2890 tree tem;
2891
2892 gcc_assert (is_gimple_address (t));
2893
2894 /* Skip any references (they will be checked when we recurse down the
2895 tree) and ensure that any variable used as a prefix is marked
2896 addressable. */
2897 for (x = TREE_OPERAND (t, 0);
2898 handled_component_p (x);
2899 x = TREE_OPERAND (x, 0))
2900 ;
2901
2902 if ((tem = verify_address (t, x)))
2903 return tem;
2904
2905 if (!(TREE_CODE (x) == VAR_DECL
2906 || TREE_CODE (x) == PARM_DECL
2907 || TREE_CODE (x) == RESULT_DECL))
2908 return NULL;
2909
2910 if (!TREE_ADDRESSABLE (x))
2911 {
2912 error ("address taken, but ADDRESSABLE bit not set");
2913 return x;
2914 }
2915
2916 break;
2917 }
2918
2919 case COND_EXPR:
2920 x = COND_EXPR_COND (t);
2921 if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
2922 {
2923 error ("non-integral used in condition");
2924 return x;
2925 }
2926 if (!is_gimple_condexpr (x))
2927 {
2928 error ("invalid conditional operand");
2929 return x;
2930 }
2931 break;
2932
2933 case NON_LVALUE_EXPR:
2934 case TRUTH_NOT_EXPR:
2935 gcc_unreachable ();
2936
2937 CASE_CONVERT:
2938 case FIX_TRUNC_EXPR:
2939 case FLOAT_EXPR:
2940 case NEGATE_EXPR:
2941 case ABS_EXPR:
2942 case BIT_NOT_EXPR:
2943 CHECK_OP (0, "invalid operand to unary operator");
2944 break;
2945
2946 case REALPART_EXPR:
2947 case IMAGPART_EXPR:
2948 case BIT_FIELD_REF:
2949 if (!is_gimple_reg_type (TREE_TYPE (t)))
2950 {
2951 error ("non-scalar BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR");
2952 return t;
2953 }
2954
2955 if (TREE_CODE (t) == BIT_FIELD_REF)
2956 {
2957 tree t0 = TREE_OPERAND (t, 0);
2958 tree t1 = TREE_OPERAND (t, 1);
2959 tree t2 = TREE_OPERAND (t, 2);
2960 if (!tree_fits_uhwi_p (t1)
2961 || !tree_fits_uhwi_p (t2))
2962 {
2963 error ("invalid position or size operand to BIT_FIELD_REF");
2964 return t;
2965 }
2966 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
2967 && (TYPE_PRECISION (TREE_TYPE (t))
2968 != tree_to_uhwi (t1)))
2969 {
2970 error ("integral result type precision does not match "
2971 "field size of BIT_FIELD_REF");
2972 return t;
2973 }
2974 else if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
2975 && TYPE_MODE (TREE_TYPE (t)) != BLKmode
2976 && (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (t)))
2977 != tree_to_uhwi (t1)))
2978 {
2979 error ("mode precision of non-integral result does not "
2980 "match field size of BIT_FIELD_REF");
2981 return t;
2982 }
2983 if (!AGGREGATE_TYPE_P (TREE_TYPE (t0))
2984 && (tree_to_uhwi (t1) + tree_to_uhwi (t2)
2985 > tree_to_uhwi (TYPE_SIZE (TREE_TYPE (t0)))))
2986 {
2987 error ("position plus size exceeds size of referenced object in "
2988 "BIT_FIELD_REF");
2989 return t;
2990 }
2991 }
2992 t = TREE_OPERAND (t, 0);
2993
2994 /* Fall-through. */
2995 case COMPONENT_REF:
2996 case ARRAY_REF:
2997 case ARRAY_RANGE_REF:
2998 case VIEW_CONVERT_EXPR:
2999 /* We have a nest of references. Verify that each of the operands
3000 that determine where to reference is either a constant or a variable,
3001 verify that the base is valid, and then show we've already checked
3002 the subtrees. */
3003 while (handled_component_p (t))
3004 {
3005 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
3006 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
3007 else if (TREE_CODE (t) == ARRAY_REF
3008 || TREE_CODE (t) == ARRAY_RANGE_REF)
3009 {
3010 CHECK_OP (1, "invalid array index");
3011 if (TREE_OPERAND (t, 2))
3012 CHECK_OP (2, "invalid array lower bound");
3013 if (TREE_OPERAND (t, 3))
3014 CHECK_OP (3, "invalid array stride");
3015 }
3016 else if (TREE_CODE (t) == BIT_FIELD_REF
3017 || TREE_CODE (t) == REALPART_EXPR
3018 || TREE_CODE (t) == IMAGPART_EXPR)
3019 {
3020 error ("non-top-level BIT_FIELD_REF, IMAGPART_EXPR or "
3021 "REALPART_EXPR");
3022 return t;
3023 }
3024
3025 t = TREE_OPERAND (t, 0);
3026 }
3027
3028 if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
3029 {
3030 error ("invalid reference prefix");
3031 return t;
3032 }
3033 *walk_subtrees = 0;
3034 break;
3035 case PLUS_EXPR:
3036 case MINUS_EXPR:
3037 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
3038 POINTER_PLUS_EXPR. */
3039 if (POINTER_TYPE_P (TREE_TYPE (t)))
3040 {
3041 error ("invalid operand to plus/minus, type is a pointer");
3042 return t;
3043 }
3044 CHECK_OP (0, "invalid operand to binary operator");
3045 CHECK_OP (1, "invalid operand to binary operator");
3046 break;
3047
3048 case POINTER_PLUS_EXPR:
3049 /* Check to make sure the first operand is a pointer or reference type. */
3050 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
3051 {
3052 error ("invalid operand to pointer plus, first operand is not a pointer");
3053 return t;
3054 }
3055 /* Check to make sure the second operand is a ptrofftype. */
3056 if (!ptrofftype_p (TREE_TYPE (TREE_OPERAND (t, 1))))
3057 {
3058 error ("invalid operand to pointer plus, second operand is not an "
3059 "integer type of appropriate width");
3060 return t;
3061 }
3062 /* FALLTHROUGH */
3063 case LT_EXPR:
3064 case LE_EXPR:
3065 case GT_EXPR:
3066 case GE_EXPR:
3067 case EQ_EXPR:
3068 case NE_EXPR:
3069 case UNORDERED_EXPR:
3070 case ORDERED_EXPR:
3071 case UNLT_EXPR:
3072 case UNLE_EXPR:
3073 case UNGT_EXPR:
3074 case UNGE_EXPR:
3075 case UNEQ_EXPR:
3076 case LTGT_EXPR:
3077 case MULT_EXPR:
3078 case TRUNC_DIV_EXPR:
3079 case CEIL_DIV_EXPR:
3080 case FLOOR_DIV_EXPR:
3081 case ROUND_DIV_EXPR:
3082 case TRUNC_MOD_EXPR:
3083 case CEIL_MOD_EXPR:
3084 case FLOOR_MOD_EXPR:
3085 case ROUND_MOD_EXPR:
3086 case RDIV_EXPR:
3087 case EXACT_DIV_EXPR:
3088 case MIN_EXPR:
3089 case MAX_EXPR:
3090 case LSHIFT_EXPR:
3091 case RSHIFT_EXPR:
3092 case LROTATE_EXPR:
3093 case RROTATE_EXPR:
3094 case BIT_IOR_EXPR:
3095 case BIT_XOR_EXPR:
3096 case BIT_AND_EXPR:
3097 CHECK_OP (0, "invalid operand to binary operator");
3098 CHECK_OP (1, "invalid operand to binary operator");
3099 break;
3100
3101 case CONSTRUCTOR:
3102 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
3103 *walk_subtrees = 0;
3104 break;
3105
3106 case CASE_LABEL_EXPR:
3107 if (CASE_CHAIN (t))
3108 {
3109 error ("invalid CASE_CHAIN");
3110 return t;
3111 }
3112 break;
3113
3114 default:
3115 break;
3116 }
3117 return NULL;
3118
3119 #undef CHECK_OP
3120 }
3121
3122
3123 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
3124 Returns true if there is an error, otherwise false. */
3125
3126 static bool
3127 verify_types_in_gimple_min_lval (tree expr)
3128 {
3129 tree op;
3130
3131 if (is_gimple_id (expr))
3132 return false;
3133
3134 if (TREE_CODE (expr) != TARGET_MEM_REF
3135 && TREE_CODE (expr) != MEM_REF)
3136 {
3137 error ("invalid expression for min lvalue");
3138 return true;
3139 }
3140
3141 /* TARGET_MEM_REFs are strange beasts. */
3142 if (TREE_CODE (expr) == TARGET_MEM_REF)
3143 return false;
3144
3145 op = TREE_OPERAND (expr, 0);
3146 if (!is_gimple_val (op))
3147 {
3148 error ("invalid operand in indirect reference");
3149 debug_generic_stmt (op);
3150 return true;
3151 }
3152 /* Memory references now generally can involve a value conversion. */
3153
3154 return false;
3155 }
3156
3157 /* Verify if EXPR is a valid GIMPLE reference expression. If
3158 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
3159 if there is an error, otherwise false. */
3160
3161 static bool
3162 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
3163 {
3164 while (handled_component_p (expr))
3165 {
3166 tree op = TREE_OPERAND (expr, 0);
3167
3168 if (TREE_CODE (expr) == ARRAY_REF
3169 || TREE_CODE (expr) == ARRAY_RANGE_REF)
3170 {
3171 if (!is_gimple_val (TREE_OPERAND (expr, 1))
3172 || (TREE_OPERAND (expr, 2)
3173 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3174 || (TREE_OPERAND (expr, 3)
3175 && !is_gimple_val (TREE_OPERAND (expr, 3))))
3176 {
3177 error ("invalid operands to array reference");
3178 debug_generic_stmt (expr);
3179 return true;
3180 }
3181 }
3182
3183 /* Verify if the reference array element types are compatible. */
3184 if (TREE_CODE (expr) == ARRAY_REF
3185 && !useless_type_conversion_p (TREE_TYPE (expr),
3186 TREE_TYPE (TREE_TYPE (op))))
3187 {
3188 error ("type mismatch in array reference");
3189 debug_generic_stmt (TREE_TYPE (expr));
3190 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3191 return true;
3192 }
3193 if (TREE_CODE (expr) == ARRAY_RANGE_REF
3194 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
3195 TREE_TYPE (TREE_TYPE (op))))
3196 {
3197 error ("type mismatch in array range reference");
3198 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
3199 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3200 return true;
3201 }
3202
3203 if ((TREE_CODE (expr) == REALPART_EXPR
3204 || TREE_CODE (expr) == IMAGPART_EXPR)
3205 && !useless_type_conversion_p (TREE_TYPE (expr),
3206 TREE_TYPE (TREE_TYPE (op))))
3207 {
3208 error ("type mismatch in real/imagpart reference");
3209 debug_generic_stmt (TREE_TYPE (expr));
3210 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3211 return true;
3212 }
3213
3214 if (TREE_CODE (expr) == COMPONENT_REF
3215 && !useless_type_conversion_p (TREE_TYPE (expr),
3216 TREE_TYPE (TREE_OPERAND (expr, 1))))
3217 {
3218 error ("type mismatch in component reference");
3219 debug_generic_stmt (TREE_TYPE (expr));
3220 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
3221 return true;
3222 }
3223
3224 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3225 {
3226 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
3227 that their operand is not an SSA name or an invariant when
3228 requiring an lvalue (this usually means there is a SRA or IPA-SRA
3229 bug). Otherwise there is nothing to verify, gross mismatches at
3230 most invoke undefined behavior. */
3231 if (require_lvalue
3232 && (TREE_CODE (op) == SSA_NAME
3233 || is_gimple_min_invariant (op)))
3234 {
3235 error ("conversion of an SSA_NAME on the left hand side");
3236 debug_generic_stmt (expr);
3237 return true;
3238 }
3239 else if (TREE_CODE (op) == SSA_NAME
3240 && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
3241 {
3242 error ("conversion of register to a different size");
3243 debug_generic_stmt (expr);
3244 return true;
3245 }
3246 else if (!handled_component_p (op))
3247 return false;
3248 }
3249
3250 expr = op;
3251 }
3252
3253 if (TREE_CODE (expr) == MEM_REF)
3254 {
3255 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0)))
3256 {
3257 error ("invalid address operand in MEM_REF");
3258 debug_generic_stmt (expr);
3259 return true;
3260 }
3261 if (TREE_CODE (TREE_OPERAND (expr, 1)) != INTEGER_CST
3262 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
3263 {
3264 error ("invalid offset operand in MEM_REF");
3265 debug_generic_stmt (expr);
3266 return true;
3267 }
3268 }
3269 else if (TREE_CODE (expr) == TARGET_MEM_REF)
3270 {
3271 if (!TMR_BASE (expr)
3272 || !is_gimple_mem_ref_addr (TMR_BASE (expr)))
3273 {
3274 error ("invalid address operand in TARGET_MEM_REF");
3275 return true;
3276 }
3277 if (!TMR_OFFSET (expr)
3278 || TREE_CODE (TMR_OFFSET (expr)) != INTEGER_CST
3279 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
3280 {
3281 error ("invalid offset operand in TARGET_MEM_REF");
3282 debug_generic_stmt (expr);
3283 return true;
3284 }
3285 }
3286
3287 return ((require_lvalue || !is_gimple_min_invariant (expr))
3288 && verify_types_in_gimple_min_lval (expr));
3289 }
3290
3291 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3292 list of pointer-to types that is trivially convertible to DEST. */
3293
3294 static bool
3295 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3296 {
3297 tree src;
3298
3299 if (!TYPE_POINTER_TO (src_obj))
3300 return true;
3301
3302 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3303 if (useless_type_conversion_p (dest, src))
3304 return true;
3305
3306 return false;
3307 }
3308
3309 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3310 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3311
3312 static bool
3313 valid_fixed_convert_types_p (tree type1, tree type2)
3314 {
3315 return (FIXED_POINT_TYPE_P (type1)
3316 && (INTEGRAL_TYPE_P (type2)
3317 || SCALAR_FLOAT_TYPE_P (type2)
3318 || FIXED_POINT_TYPE_P (type2)));
3319 }
3320
3321 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3322 is a problem, otherwise false. */
3323
3324 static bool
3325 verify_gimple_call (gcall *stmt)
3326 {
3327 tree fn = gimple_call_fn (stmt);
3328 tree fntype, fndecl;
3329 unsigned i;
3330
3331 if (gimple_call_internal_p (stmt))
3332 {
3333 if (fn)
3334 {
3335 error ("gimple call has two targets");
3336 debug_generic_stmt (fn);
3337 return true;
3338 }
3339 }
3340 else
3341 {
3342 if (!fn)
3343 {
3344 error ("gimple call has no target");
3345 return true;
3346 }
3347 }
3348
3349 if (fn && !is_gimple_call_addr (fn))
3350 {
3351 error ("invalid function in gimple call");
3352 debug_generic_stmt (fn);
3353 return true;
3354 }
3355
3356 if (fn
3357 && (!POINTER_TYPE_P (TREE_TYPE (fn))
3358 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3359 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
3360 {
3361 error ("non-function in gimple call");
3362 return true;
3363 }
3364
3365 fndecl = gimple_call_fndecl (stmt);
3366 if (fndecl
3367 && TREE_CODE (fndecl) == FUNCTION_DECL
3368 && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
3369 && !DECL_PURE_P (fndecl)
3370 && !TREE_READONLY (fndecl))
3371 {
3372 error ("invalid pure const state for function");
3373 return true;
3374 }
3375
3376 tree lhs = gimple_call_lhs (stmt);
3377 if (lhs
3378 && (!is_gimple_lvalue (lhs)
3379 || verify_types_in_gimple_reference (lhs, true)))
3380 {
3381 error ("invalid LHS in gimple call");
3382 return true;
3383 }
3384
3385 if (lhs
3386 && gimple_call_ctrl_altering_p (stmt)
3387 && gimple_call_noreturn_p (stmt)
3388 && TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (lhs))) == INTEGER_CST)
3389 {
3390 error ("LHS in noreturn call");
3391 return true;
3392 }
3393
3394 fntype = gimple_call_fntype (stmt);
3395 if (fntype
3396 && lhs
3397 && !useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (fntype))
3398 /* ??? At least C++ misses conversions at assignments from
3399 void * call results.
3400 ??? Java is completely off. Especially with functions
3401 returning java.lang.Object.
3402 For now simply allow arbitrary pointer type conversions. */
3403 && !(POINTER_TYPE_P (TREE_TYPE (lhs))
3404 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3405 {
3406 error ("invalid conversion in gimple call");
3407 debug_generic_stmt (TREE_TYPE (lhs));
3408 debug_generic_stmt (TREE_TYPE (fntype));
3409 return true;
3410 }
3411
3412 if (gimple_call_chain (stmt)
3413 && !is_gimple_val (gimple_call_chain (stmt)))
3414 {
3415 error ("invalid static chain in gimple call");
3416 debug_generic_stmt (gimple_call_chain (stmt));
3417 return true;
3418 }
3419
3420 /* If there is a static chain argument, the call should either be
3421 indirect, or the decl should have DECL_STATIC_CHAIN set. */
3422 if (gimple_call_chain (stmt)
3423 && fndecl
3424 && !DECL_STATIC_CHAIN (fndecl))
3425 {
3426 error ("static chain with function that doesn%'t use one");
3427 return true;
3428 }
3429
3430 /* ??? The C frontend passes unpromoted arguments in case it
3431 didn't see a function declaration before the call. So for now
3432 leave the call arguments mostly unverified. Once we gimplify
3433 unit-at-a-time we have a chance to fix this. */
3434
3435 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3436 {
3437 tree arg = gimple_call_arg (stmt, i);
3438 if ((is_gimple_reg_type (TREE_TYPE (arg))
3439 && !is_gimple_val (arg))
3440 || (!is_gimple_reg_type (TREE_TYPE (arg))
3441 && !is_gimple_lvalue (arg)))
3442 {
3443 error ("invalid argument to gimple call");
3444 debug_generic_expr (arg);
3445 return true;
3446 }
3447 }
3448
3449 return false;
3450 }
3451
3452 /* Verifies the gimple comparison with the result type TYPE and
3453 the operands OP0 and OP1. */
3454
3455 static bool
3456 verify_gimple_comparison (tree type, tree op0, tree op1)
3457 {
3458 tree op0_type = TREE_TYPE (op0);
3459 tree op1_type = TREE_TYPE (op1);
3460
3461 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3462 {
3463 error ("invalid operands in gimple comparison");
3464 return true;
3465 }
3466
3467 /* For comparisons we do not have the operations type as the
3468 effective type the comparison is carried out in. Instead
3469 we require that either the first operand is trivially
3470 convertible into the second, or the other way around.
3471 Because we special-case pointers to void we allow
3472 comparisons of pointers with the same mode as well. */
3473 if (!useless_type_conversion_p (op0_type, op1_type)
3474 && !useless_type_conversion_p (op1_type, op0_type)
3475 && (!POINTER_TYPE_P (op0_type)
3476 || !POINTER_TYPE_P (op1_type)
3477 || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3478 {
3479 error ("mismatching comparison operand types");
3480 debug_generic_expr (op0_type);
3481 debug_generic_expr (op1_type);
3482 return true;
3483 }
3484
3485 /* The resulting type of a comparison may be an effective boolean type. */
3486 if (INTEGRAL_TYPE_P (type)
3487 && (TREE_CODE (type) == BOOLEAN_TYPE
3488 || TYPE_PRECISION (type) == 1))
3489 {
3490 if (TREE_CODE (op0_type) == VECTOR_TYPE
3491 || TREE_CODE (op1_type) == VECTOR_TYPE)
3492 {
3493 error ("vector comparison returning a boolean");
3494 debug_generic_expr (op0_type);
3495 debug_generic_expr (op1_type);
3496 return true;
3497 }
3498 }
3499 /* Or an integer vector type with the same size and element count
3500 as the comparison operand types. */
3501 else if (TREE_CODE (type) == VECTOR_TYPE
3502 && TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE)
3503 {
3504 if (TREE_CODE (op0_type) != VECTOR_TYPE
3505 || TREE_CODE (op1_type) != VECTOR_TYPE)
3506 {
3507 error ("non-vector operands in vector comparison");
3508 debug_generic_expr (op0_type);
3509 debug_generic_expr (op1_type);
3510 return true;
3511 }
3512
3513 if (TYPE_VECTOR_SUBPARTS (type) != TYPE_VECTOR_SUBPARTS (op0_type)
3514 || (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (type)))
3515 != GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0_type))))
3516 /* The result of a vector comparison is of signed
3517 integral type. */
3518 || TYPE_UNSIGNED (TREE_TYPE (type)))
3519 {
3520 error ("invalid vector comparison resulting type");
3521 debug_generic_expr (type);
3522 return true;
3523 }
3524 }
3525 else
3526 {
3527 error ("bogus comparison result type");
3528 debug_generic_expr (type);
3529 return true;
3530 }
3531
3532 return false;
3533 }
3534
3535 /* Verify a gimple assignment statement STMT with an unary rhs.
3536 Returns true if anything is wrong. */
3537
3538 static bool
3539 verify_gimple_assign_unary (gassign *stmt)
3540 {
3541 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3542 tree lhs = gimple_assign_lhs (stmt);
3543 tree lhs_type = TREE_TYPE (lhs);
3544 tree rhs1 = gimple_assign_rhs1 (stmt);
3545 tree rhs1_type = TREE_TYPE (rhs1);
3546
3547 if (!is_gimple_reg (lhs))
3548 {
3549 error ("non-register as LHS of unary operation");
3550 return true;
3551 }
3552
3553 if (!is_gimple_val (rhs1))
3554 {
3555 error ("invalid operand in unary operation");
3556 return true;
3557 }
3558
3559 /* First handle conversions. */
3560 switch (rhs_code)
3561 {
3562 CASE_CONVERT:
3563 {
3564 /* Allow conversions from pointer type to integral type only if
3565 there is no sign or zero extension involved.
3566 For targets were the precision of ptrofftype doesn't match that
3567 of pointers we need to allow arbitrary conversions to ptrofftype. */
3568 if ((POINTER_TYPE_P (lhs_type)
3569 && INTEGRAL_TYPE_P (rhs1_type))
3570 || (POINTER_TYPE_P (rhs1_type)
3571 && INTEGRAL_TYPE_P (lhs_type)
3572 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3573 || ptrofftype_p (sizetype))))
3574 return false;
3575
3576 /* Allow conversion from integral to offset type and vice versa. */
3577 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3578 && INTEGRAL_TYPE_P (rhs1_type))
3579 || (INTEGRAL_TYPE_P (lhs_type)
3580 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3581 return false;
3582
3583 /* Otherwise assert we are converting between types of the
3584 same kind. */
3585 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3586 {
3587 error ("invalid types in nop conversion");
3588 debug_generic_expr (lhs_type);
3589 debug_generic_expr (rhs1_type);
3590 return true;
3591 }
3592
3593 return false;
3594 }
3595
3596 case ADDR_SPACE_CONVERT_EXPR:
3597 {
3598 if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3599 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3600 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3601 {
3602 error ("invalid types in address space conversion");
3603 debug_generic_expr (lhs_type);
3604 debug_generic_expr (rhs1_type);
3605 return true;
3606 }
3607
3608 return false;
3609 }
3610
3611 case FIXED_CONVERT_EXPR:
3612 {
3613 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3614 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3615 {
3616 error ("invalid types in fixed-point conversion");
3617 debug_generic_expr (lhs_type);
3618 debug_generic_expr (rhs1_type);
3619 return true;
3620 }
3621
3622 return false;
3623 }
3624
3625 case FLOAT_EXPR:
3626 {
3627 if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3628 && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3629 || !VECTOR_FLOAT_TYPE_P (lhs_type)))
3630 {
3631 error ("invalid types in conversion to floating point");
3632 debug_generic_expr (lhs_type);
3633 debug_generic_expr (rhs1_type);
3634 return true;
3635 }
3636
3637 return false;
3638 }
3639
3640 case FIX_TRUNC_EXPR:
3641 {
3642 if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3643 && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3644 || !VECTOR_FLOAT_TYPE_P (rhs1_type)))
3645 {
3646 error ("invalid types in conversion to integer");
3647 debug_generic_expr (lhs_type);
3648 debug_generic_expr (rhs1_type);
3649 return true;
3650 }
3651
3652 return false;
3653 }
3654 case REDUC_MAX_EXPR:
3655 case REDUC_MIN_EXPR:
3656 case REDUC_PLUS_EXPR:
3657 if (!VECTOR_TYPE_P (rhs1_type)
3658 || !useless_type_conversion_p (lhs_type, TREE_TYPE (rhs1_type)))
3659 {
3660 error ("reduction should convert from vector to element type");
3661 debug_generic_expr (lhs_type);
3662 debug_generic_expr (rhs1_type);
3663 return true;
3664 }
3665 return false;
3666
3667 case VEC_UNPACK_HI_EXPR:
3668 case VEC_UNPACK_LO_EXPR:
3669 case VEC_UNPACK_FLOAT_HI_EXPR:
3670 case VEC_UNPACK_FLOAT_LO_EXPR:
3671 /* FIXME. */
3672 return false;
3673
3674 case NEGATE_EXPR:
3675 case ABS_EXPR:
3676 case BIT_NOT_EXPR:
3677 case PAREN_EXPR:
3678 case CONJ_EXPR:
3679 break;
3680
3681 default:
3682 gcc_unreachable ();
3683 }
3684
3685 /* For the remaining codes assert there is no conversion involved. */
3686 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3687 {
3688 error ("non-trivial conversion in unary operation");
3689 debug_generic_expr (lhs_type);
3690 debug_generic_expr (rhs1_type);
3691 return true;
3692 }
3693
3694 return false;
3695 }
3696
3697 /* Verify a gimple assignment statement STMT with a binary rhs.
3698 Returns true if anything is wrong. */
3699
3700 static bool
3701 verify_gimple_assign_binary (gassign *stmt)
3702 {
3703 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3704 tree lhs = gimple_assign_lhs (stmt);
3705 tree lhs_type = TREE_TYPE (lhs);
3706 tree rhs1 = gimple_assign_rhs1 (stmt);
3707 tree rhs1_type = TREE_TYPE (rhs1);
3708 tree rhs2 = gimple_assign_rhs2 (stmt);
3709 tree rhs2_type = TREE_TYPE (rhs2);
3710
3711 if (!is_gimple_reg (lhs))
3712 {
3713 error ("non-register as LHS of binary operation");
3714 return true;
3715 }
3716
3717 if (!is_gimple_val (rhs1)
3718 || !is_gimple_val (rhs2))
3719 {
3720 error ("invalid operands in binary operation");
3721 return true;
3722 }
3723
3724 /* First handle operations that involve different types. */
3725 switch (rhs_code)
3726 {
3727 case COMPLEX_EXPR:
3728 {
3729 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3730 || !(INTEGRAL_TYPE_P (rhs1_type)
3731 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3732 || !(INTEGRAL_TYPE_P (rhs2_type)
3733 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3734 {
3735 error ("type mismatch in complex expression");
3736 debug_generic_expr (lhs_type);
3737 debug_generic_expr (rhs1_type);
3738 debug_generic_expr (rhs2_type);
3739 return true;
3740 }
3741
3742 return false;
3743 }
3744
3745 case LSHIFT_EXPR:
3746 case RSHIFT_EXPR:
3747 case LROTATE_EXPR:
3748 case RROTATE_EXPR:
3749 {
3750 /* Shifts and rotates are ok on integral types, fixed point
3751 types and integer vector types. */
3752 if ((!INTEGRAL_TYPE_P (rhs1_type)
3753 && !FIXED_POINT_TYPE_P (rhs1_type)
3754 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3755 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3756 || (!INTEGRAL_TYPE_P (rhs2_type)
3757 /* Vector shifts of vectors are also ok. */
3758 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3759 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3760 && TREE_CODE (rhs2_type) == VECTOR_TYPE
3761 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3762 || !useless_type_conversion_p (lhs_type, rhs1_type))
3763 {
3764 error ("type mismatch in shift expression");
3765 debug_generic_expr (lhs_type);
3766 debug_generic_expr (rhs1_type);
3767 debug_generic_expr (rhs2_type);
3768 return true;
3769 }
3770
3771 return false;
3772 }
3773
3774 case WIDEN_LSHIFT_EXPR:
3775 {
3776 if (!INTEGRAL_TYPE_P (lhs_type)
3777 || !INTEGRAL_TYPE_P (rhs1_type)
3778 || TREE_CODE (rhs2) != INTEGER_CST
3779 || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
3780 {
3781 error ("type mismatch in widening vector shift expression");
3782 debug_generic_expr (lhs_type);
3783 debug_generic_expr (rhs1_type);
3784 debug_generic_expr (rhs2_type);
3785 return true;
3786 }
3787
3788 return false;
3789 }
3790
3791 case VEC_WIDEN_LSHIFT_HI_EXPR:
3792 case VEC_WIDEN_LSHIFT_LO_EXPR:
3793 {
3794 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3795 || TREE_CODE (lhs_type) != VECTOR_TYPE
3796 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3797 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3798 || TREE_CODE (rhs2) != INTEGER_CST
3799 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
3800 > TYPE_PRECISION (TREE_TYPE (lhs_type))))
3801 {
3802 error ("type mismatch in widening vector shift expression");
3803 debug_generic_expr (lhs_type);
3804 debug_generic_expr (rhs1_type);
3805 debug_generic_expr (rhs2_type);
3806 return true;
3807 }
3808
3809 return false;
3810 }
3811
3812 case PLUS_EXPR:
3813 case MINUS_EXPR:
3814 {
3815 tree lhs_etype = lhs_type;
3816 tree rhs1_etype = rhs1_type;
3817 tree rhs2_etype = rhs2_type;
3818 if (TREE_CODE (lhs_type) == VECTOR_TYPE)
3819 {
3820 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3821 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
3822 {
3823 error ("invalid non-vector operands to vector valued plus");
3824 return true;
3825 }
3826 lhs_etype = TREE_TYPE (lhs_type);
3827 rhs1_etype = TREE_TYPE (rhs1_type);
3828 rhs2_etype = TREE_TYPE (rhs2_type);
3829 }
3830 if (POINTER_TYPE_P (lhs_etype)
3831 || POINTER_TYPE_P (rhs1_etype)
3832 || POINTER_TYPE_P (rhs2_etype))
3833 {
3834 error ("invalid (pointer) operands to plus/minus");
3835 return true;
3836 }
3837
3838 /* Continue with generic binary expression handling. */
3839 break;
3840 }
3841
3842 case POINTER_PLUS_EXPR:
3843 {
3844 if (!POINTER_TYPE_P (rhs1_type)
3845 || !useless_type_conversion_p (lhs_type, rhs1_type)
3846 || !ptrofftype_p (rhs2_type))
3847 {
3848 error ("type mismatch in pointer plus expression");
3849 debug_generic_stmt (lhs_type);
3850 debug_generic_stmt (rhs1_type);
3851 debug_generic_stmt (rhs2_type);
3852 return true;
3853 }
3854
3855 return false;
3856 }
3857
3858 case TRUTH_ANDIF_EXPR:
3859 case TRUTH_ORIF_EXPR:
3860 case TRUTH_AND_EXPR:
3861 case TRUTH_OR_EXPR:
3862 case TRUTH_XOR_EXPR:
3863
3864 gcc_unreachable ();
3865
3866 case LT_EXPR:
3867 case LE_EXPR:
3868 case GT_EXPR:
3869 case GE_EXPR:
3870 case EQ_EXPR:
3871 case NE_EXPR:
3872 case UNORDERED_EXPR:
3873 case ORDERED_EXPR:
3874 case UNLT_EXPR:
3875 case UNLE_EXPR:
3876 case UNGT_EXPR:
3877 case UNGE_EXPR:
3878 case UNEQ_EXPR:
3879 case LTGT_EXPR:
3880 /* Comparisons are also binary, but the result type is not
3881 connected to the operand types. */
3882 return verify_gimple_comparison (lhs_type, rhs1, rhs2);
3883
3884 case WIDEN_MULT_EXPR:
3885 if (TREE_CODE (lhs_type) != INTEGER_TYPE)
3886 return true;
3887 return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
3888 || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
3889
3890 case WIDEN_SUM_EXPR:
3891 case VEC_WIDEN_MULT_HI_EXPR:
3892 case VEC_WIDEN_MULT_LO_EXPR:
3893 case VEC_WIDEN_MULT_EVEN_EXPR:
3894 case VEC_WIDEN_MULT_ODD_EXPR:
3895 case VEC_PACK_TRUNC_EXPR:
3896 case VEC_PACK_SAT_EXPR:
3897 case VEC_PACK_FIX_TRUNC_EXPR:
3898 /* FIXME. */
3899 return false;
3900
3901 case MULT_EXPR:
3902 case MULT_HIGHPART_EXPR:
3903 case TRUNC_DIV_EXPR:
3904 case CEIL_DIV_EXPR:
3905 case FLOOR_DIV_EXPR:
3906 case ROUND_DIV_EXPR:
3907 case TRUNC_MOD_EXPR:
3908 case CEIL_MOD_EXPR:
3909 case FLOOR_MOD_EXPR:
3910 case ROUND_MOD_EXPR:
3911 case RDIV_EXPR:
3912 case EXACT_DIV_EXPR:
3913 case MIN_EXPR:
3914 case MAX_EXPR:
3915 case BIT_IOR_EXPR:
3916 case BIT_XOR_EXPR:
3917 case BIT_AND_EXPR:
3918 /* Continue with generic binary expression handling. */
3919 break;
3920
3921 default:
3922 gcc_unreachable ();
3923 }
3924
3925 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3926 || !useless_type_conversion_p (lhs_type, rhs2_type))
3927 {
3928 error ("type mismatch in binary expression");
3929 debug_generic_stmt (lhs_type);
3930 debug_generic_stmt (rhs1_type);
3931 debug_generic_stmt (rhs2_type);
3932 return true;
3933 }
3934
3935 return false;
3936 }
3937
3938 /* Verify a gimple assignment statement STMT with a ternary rhs.
3939 Returns true if anything is wrong. */
3940
3941 static bool
3942 verify_gimple_assign_ternary (gassign *stmt)
3943 {
3944 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3945 tree lhs = gimple_assign_lhs (stmt);
3946 tree lhs_type = TREE_TYPE (lhs);
3947 tree rhs1 = gimple_assign_rhs1 (stmt);
3948 tree rhs1_type = TREE_TYPE (rhs1);
3949 tree rhs2 = gimple_assign_rhs2 (stmt);
3950 tree rhs2_type = TREE_TYPE (rhs2);
3951 tree rhs3 = gimple_assign_rhs3 (stmt);
3952 tree rhs3_type = TREE_TYPE (rhs3);
3953
3954 if (!is_gimple_reg (lhs))
3955 {
3956 error ("non-register as LHS of ternary operation");
3957 return true;
3958 }
3959
3960 if (((rhs_code == VEC_COND_EXPR || rhs_code == COND_EXPR)
3961 ? !is_gimple_condexpr (rhs1) : !is_gimple_val (rhs1))
3962 || !is_gimple_val (rhs2)
3963 || !is_gimple_val (rhs3))
3964 {
3965 error ("invalid operands in ternary operation");
3966 return true;
3967 }
3968
3969 /* First handle operations that involve different types. */
3970 switch (rhs_code)
3971 {
3972 case WIDEN_MULT_PLUS_EXPR:
3973 case WIDEN_MULT_MINUS_EXPR:
3974 if ((!INTEGRAL_TYPE_P (rhs1_type)
3975 && !FIXED_POINT_TYPE_P (rhs1_type))
3976 || !useless_type_conversion_p (rhs1_type, rhs2_type)
3977 || !useless_type_conversion_p (lhs_type, rhs3_type)
3978 || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
3979 || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
3980 {
3981 error ("type mismatch in widening multiply-accumulate expression");
3982 debug_generic_expr (lhs_type);
3983 debug_generic_expr (rhs1_type);
3984 debug_generic_expr (rhs2_type);
3985 debug_generic_expr (rhs3_type);
3986 return true;
3987 }
3988 break;
3989
3990 case FMA_EXPR:
3991 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3992 || !useless_type_conversion_p (lhs_type, rhs2_type)
3993 || !useless_type_conversion_p (lhs_type, rhs3_type))
3994 {
3995 error ("type mismatch in fused multiply-add expression");
3996 debug_generic_expr (lhs_type);
3997 debug_generic_expr (rhs1_type);
3998 debug_generic_expr (rhs2_type);
3999 debug_generic_expr (rhs3_type);
4000 return true;
4001 }
4002 break;
4003
4004 case COND_EXPR:
4005 case VEC_COND_EXPR:
4006 if (!useless_type_conversion_p (lhs_type, rhs2_type)
4007 || !useless_type_conversion_p (lhs_type, rhs3_type))
4008 {
4009 error ("type mismatch in conditional expression");
4010 debug_generic_expr (lhs_type);
4011 debug_generic_expr (rhs2_type);
4012 debug_generic_expr (rhs3_type);
4013 return true;
4014 }
4015 break;
4016
4017 case VEC_PERM_EXPR:
4018 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4019 || !useless_type_conversion_p (lhs_type, rhs2_type))
4020 {
4021 error ("type mismatch in vector permute expression");
4022 debug_generic_expr (lhs_type);
4023 debug_generic_expr (rhs1_type);
4024 debug_generic_expr (rhs2_type);
4025 debug_generic_expr (rhs3_type);
4026 return true;
4027 }
4028
4029 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4030 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4031 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4032 {
4033 error ("vector types expected in vector permute expression");
4034 debug_generic_expr (lhs_type);
4035 debug_generic_expr (rhs1_type);
4036 debug_generic_expr (rhs2_type);
4037 debug_generic_expr (rhs3_type);
4038 return true;
4039 }
4040
4041 if (TYPE_VECTOR_SUBPARTS (rhs1_type) != TYPE_VECTOR_SUBPARTS (rhs2_type)
4042 || TYPE_VECTOR_SUBPARTS (rhs2_type)
4043 != TYPE_VECTOR_SUBPARTS (rhs3_type)
4044 || TYPE_VECTOR_SUBPARTS (rhs3_type)
4045 != TYPE_VECTOR_SUBPARTS (lhs_type))
4046 {
4047 error ("vectors with different element number found "
4048 "in vector permute expression");
4049 debug_generic_expr (lhs_type);
4050 debug_generic_expr (rhs1_type);
4051 debug_generic_expr (rhs2_type);
4052 debug_generic_expr (rhs3_type);
4053 return true;
4054 }
4055
4056 if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
4057 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs3_type)))
4058 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type))))
4059 {
4060 error ("invalid mask type in vector permute expression");
4061 debug_generic_expr (lhs_type);
4062 debug_generic_expr (rhs1_type);
4063 debug_generic_expr (rhs2_type);
4064 debug_generic_expr (rhs3_type);
4065 return true;
4066 }
4067
4068 return false;
4069
4070 case SAD_EXPR:
4071 if (!useless_type_conversion_p (rhs1_type, rhs2_type)
4072 || !useless_type_conversion_p (lhs_type, rhs3_type)
4073 || 2 * GET_MODE_BITSIZE (GET_MODE_INNER
4074 (TYPE_MODE (TREE_TYPE (rhs1_type))))
4075 > GET_MODE_BITSIZE (GET_MODE_INNER
4076 (TYPE_MODE (TREE_TYPE (lhs_type)))))
4077 {
4078 error ("type mismatch in sad expression");
4079 debug_generic_expr (lhs_type);
4080 debug_generic_expr (rhs1_type);
4081 debug_generic_expr (rhs2_type);
4082 debug_generic_expr (rhs3_type);
4083 return true;
4084 }
4085
4086 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4087 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4088 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4089 {
4090 error ("vector types expected in sad expression");
4091 debug_generic_expr (lhs_type);
4092 debug_generic_expr (rhs1_type);
4093 debug_generic_expr (rhs2_type);
4094 debug_generic_expr (rhs3_type);
4095 return true;
4096 }
4097
4098 return false;
4099
4100 case DOT_PROD_EXPR:
4101 case REALIGN_LOAD_EXPR:
4102 /* FIXME. */
4103 return false;
4104
4105 default:
4106 gcc_unreachable ();
4107 }
4108 return false;
4109 }
4110
4111 /* Verify a gimple assignment statement STMT with a single rhs.
4112 Returns true if anything is wrong. */
4113
4114 static bool
4115 verify_gimple_assign_single (gassign *stmt)
4116 {
4117 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4118 tree lhs = gimple_assign_lhs (stmt);
4119 tree lhs_type = TREE_TYPE (lhs);
4120 tree rhs1 = gimple_assign_rhs1 (stmt);
4121 tree rhs1_type = TREE_TYPE (rhs1);
4122 bool res = false;
4123
4124 if (!useless_type_conversion_p (lhs_type, rhs1_type))
4125 {
4126 error ("non-trivial conversion at assignment");
4127 debug_generic_expr (lhs_type);
4128 debug_generic_expr (rhs1_type);
4129 return true;
4130 }
4131
4132 if (gimple_clobber_p (stmt)
4133 && !(DECL_P (lhs) || TREE_CODE (lhs) == MEM_REF))
4134 {
4135 error ("non-decl/MEM_REF LHS in clobber statement");
4136 debug_generic_expr (lhs);
4137 return true;
4138 }
4139
4140 if (handled_component_p (lhs)
4141 || TREE_CODE (lhs) == MEM_REF
4142 || TREE_CODE (lhs) == TARGET_MEM_REF)
4143 res |= verify_types_in_gimple_reference (lhs, true);
4144
4145 /* Special codes we cannot handle via their class. */
4146 switch (rhs_code)
4147 {
4148 case ADDR_EXPR:
4149 {
4150 tree op = TREE_OPERAND (rhs1, 0);
4151 if (!is_gimple_addressable (op))
4152 {
4153 error ("invalid operand in unary expression");
4154 return true;
4155 }
4156
4157 /* Technically there is no longer a need for matching types, but
4158 gimple hygiene asks for this check. In LTO we can end up
4159 combining incompatible units and thus end up with addresses
4160 of globals that change their type to a common one. */
4161 if (!in_lto_p
4162 && !types_compatible_p (TREE_TYPE (op),
4163 TREE_TYPE (TREE_TYPE (rhs1)))
4164 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
4165 TREE_TYPE (op)))
4166 {
4167 error ("type mismatch in address expression");
4168 debug_generic_stmt (TREE_TYPE (rhs1));
4169 debug_generic_stmt (TREE_TYPE (op));
4170 return true;
4171 }
4172
4173 return verify_types_in_gimple_reference (op, true);
4174 }
4175
4176 /* tcc_reference */
4177 case INDIRECT_REF:
4178 error ("INDIRECT_REF in gimple IL");
4179 return true;
4180
4181 case COMPONENT_REF:
4182 case BIT_FIELD_REF:
4183 case ARRAY_REF:
4184 case ARRAY_RANGE_REF:
4185 case VIEW_CONVERT_EXPR:
4186 case REALPART_EXPR:
4187 case IMAGPART_EXPR:
4188 case TARGET_MEM_REF:
4189 case MEM_REF:
4190 if (!is_gimple_reg (lhs)
4191 && is_gimple_reg_type (TREE_TYPE (lhs)))
4192 {
4193 error ("invalid rhs for gimple memory store");
4194 debug_generic_stmt (lhs);
4195 debug_generic_stmt (rhs1);
4196 return true;
4197 }
4198 return res || verify_types_in_gimple_reference (rhs1, false);
4199
4200 /* tcc_constant */
4201 case SSA_NAME:
4202 case INTEGER_CST:
4203 case REAL_CST:
4204 case FIXED_CST:
4205 case COMPLEX_CST:
4206 case VECTOR_CST:
4207 case STRING_CST:
4208 return res;
4209
4210 /* tcc_declaration */
4211 case CONST_DECL:
4212 return res;
4213 case VAR_DECL:
4214 case PARM_DECL:
4215 if (!is_gimple_reg (lhs)
4216 && !is_gimple_reg (rhs1)
4217 && is_gimple_reg_type (TREE_TYPE (lhs)))
4218 {
4219 error ("invalid rhs for gimple memory store");
4220 debug_generic_stmt (lhs);
4221 debug_generic_stmt (rhs1);
4222 return true;
4223 }
4224 return res;
4225
4226 case CONSTRUCTOR:
4227 if (TREE_CODE (rhs1_type) == VECTOR_TYPE)
4228 {
4229 unsigned int i;
4230 tree elt_i, elt_v, elt_t = NULL_TREE;
4231
4232 if (CONSTRUCTOR_NELTS (rhs1) == 0)
4233 return res;
4234 /* For vector CONSTRUCTORs we require that either it is empty
4235 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
4236 (then the element count must be correct to cover the whole
4237 outer vector and index must be NULL on all elements, or it is
4238 a CONSTRUCTOR of scalar elements, where we as an exception allow
4239 smaller number of elements (assuming zero filling) and
4240 consecutive indexes as compared to NULL indexes (such
4241 CONSTRUCTORs can appear in the IL from FEs). */
4242 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), i, elt_i, elt_v)
4243 {
4244 if (elt_t == NULL_TREE)
4245 {
4246 elt_t = TREE_TYPE (elt_v);
4247 if (TREE_CODE (elt_t) == VECTOR_TYPE)
4248 {
4249 tree elt_t = TREE_TYPE (elt_v);
4250 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4251 TREE_TYPE (elt_t)))
4252 {
4253 error ("incorrect type of vector CONSTRUCTOR"
4254 " elements");
4255 debug_generic_stmt (rhs1);
4256 return true;
4257 }
4258 else if (CONSTRUCTOR_NELTS (rhs1)
4259 * TYPE_VECTOR_SUBPARTS (elt_t)
4260 != TYPE_VECTOR_SUBPARTS (rhs1_type))
4261 {
4262 error ("incorrect number of vector CONSTRUCTOR"
4263 " elements");
4264 debug_generic_stmt (rhs1);
4265 return true;
4266 }
4267 }
4268 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4269 elt_t))
4270 {
4271 error ("incorrect type of vector CONSTRUCTOR elements");
4272 debug_generic_stmt (rhs1);
4273 return true;
4274 }
4275 else if (CONSTRUCTOR_NELTS (rhs1)
4276 > TYPE_VECTOR_SUBPARTS (rhs1_type))
4277 {
4278 error ("incorrect number of vector CONSTRUCTOR elements");
4279 debug_generic_stmt (rhs1);
4280 return true;
4281 }
4282 }
4283 else if (!useless_type_conversion_p (elt_t, TREE_TYPE (elt_v)))
4284 {
4285 error ("incorrect type of vector CONSTRUCTOR elements");
4286 debug_generic_stmt (rhs1);
4287 return true;
4288 }
4289 if (elt_i != NULL_TREE
4290 && (TREE_CODE (elt_t) == VECTOR_TYPE
4291 || TREE_CODE (elt_i) != INTEGER_CST
4292 || compare_tree_int (elt_i, i) != 0))
4293 {
4294 error ("vector CONSTRUCTOR with non-NULL element index");
4295 debug_generic_stmt (rhs1);
4296 return true;
4297 }
4298 if (!is_gimple_val (elt_v))
4299 {
4300 error ("vector CONSTRUCTOR element is not a GIMPLE value");
4301 debug_generic_stmt (rhs1);
4302 return true;
4303 }
4304 }
4305 }
4306 else if (CONSTRUCTOR_NELTS (rhs1) != 0)
4307 {
4308 error ("non-vector CONSTRUCTOR with elements");
4309 debug_generic_stmt (rhs1);
4310 return true;
4311 }
4312 return res;
4313 case OBJ_TYPE_REF:
4314 case ASSERT_EXPR:
4315 case WITH_SIZE_EXPR:
4316 /* FIXME. */
4317 return res;
4318
4319 default:;
4320 }
4321
4322 return res;
4323 }
4324
4325 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4326 is a problem, otherwise false. */
4327
4328 static bool
4329 verify_gimple_assign (gassign *stmt)
4330 {
4331 switch (gimple_assign_rhs_class (stmt))
4332 {
4333 case GIMPLE_SINGLE_RHS:
4334 return verify_gimple_assign_single (stmt);
4335
4336 case GIMPLE_UNARY_RHS:
4337 return verify_gimple_assign_unary (stmt);
4338
4339 case GIMPLE_BINARY_RHS:
4340 return verify_gimple_assign_binary (stmt);
4341
4342 case GIMPLE_TERNARY_RHS:
4343 return verify_gimple_assign_ternary (stmt);
4344
4345 default:
4346 gcc_unreachable ();
4347 }
4348 }
4349
4350 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4351 is a problem, otherwise false. */
4352
4353 static bool
4354 verify_gimple_return (greturn *stmt)
4355 {
4356 tree op = gimple_return_retval (stmt);
4357 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
4358
4359 /* We cannot test for present return values as we do not fix up missing
4360 return values from the original source. */
4361 if (op == NULL)
4362 return false;
4363
4364 if (!is_gimple_val (op)
4365 && TREE_CODE (op) != RESULT_DECL)
4366 {
4367 error ("invalid operand in return statement");
4368 debug_generic_stmt (op);
4369 return true;
4370 }
4371
4372 if ((TREE_CODE (op) == RESULT_DECL
4373 && DECL_BY_REFERENCE (op))
4374 || (TREE_CODE (op) == SSA_NAME
4375 && SSA_NAME_VAR (op)
4376 && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
4377 && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
4378 op = TREE_TYPE (op);
4379
4380 if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
4381 {
4382 error ("invalid conversion in return statement");
4383 debug_generic_stmt (restype);
4384 debug_generic_stmt (TREE_TYPE (op));
4385 return true;
4386 }
4387
4388 return false;
4389 }
4390
4391
4392 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4393 is a problem, otherwise false. */
4394
4395 static bool
4396 verify_gimple_goto (ggoto *stmt)
4397 {
4398 tree dest = gimple_goto_dest (stmt);
4399
4400 /* ??? We have two canonical forms of direct goto destinations, a
4401 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4402 if (TREE_CODE (dest) != LABEL_DECL
4403 && (!is_gimple_val (dest)
4404 || !POINTER_TYPE_P (TREE_TYPE (dest))))
4405 {
4406 error ("goto destination is neither a label nor a pointer");
4407 return true;
4408 }
4409
4410 return false;
4411 }
4412
4413 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4414 is a problem, otherwise false. */
4415
4416 static bool
4417 verify_gimple_switch (gswitch *stmt)
4418 {
4419 unsigned int i, n;
4420 tree elt, prev_upper_bound = NULL_TREE;
4421 tree index_type, elt_type = NULL_TREE;
4422
4423 if (!is_gimple_val (gimple_switch_index (stmt)))
4424 {
4425 error ("invalid operand to switch statement");
4426 debug_generic_stmt (gimple_switch_index (stmt));
4427 return true;
4428 }
4429
4430 index_type = TREE_TYPE (gimple_switch_index (stmt));
4431 if (! INTEGRAL_TYPE_P (index_type))
4432 {
4433 error ("non-integral type switch statement");
4434 debug_generic_expr (index_type);
4435 return true;
4436 }
4437
4438 elt = gimple_switch_label (stmt, 0);
4439 if (CASE_LOW (elt) != NULL_TREE || CASE_HIGH (elt) != NULL_TREE)
4440 {
4441 error ("invalid default case label in switch statement");
4442 debug_generic_expr (elt);
4443 return true;
4444 }
4445
4446 n = gimple_switch_num_labels (stmt);
4447 for (i = 1; i < n; i++)
4448 {
4449 elt = gimple_switch_label (stmt, i);
4450
4451 if (! CASE_LOW (elt))
4452 {
4453 error ("invalid case label in switch statement");
4454 debug_generic_expr (elt);
4455 return true;
4456 }
4457 if (CASE_HIGH (elt)
4458 && ! tree_int_cst_lt (CASE_LOW (elt), CASE_HIGH (elt)))
4459 {
4460 error ("invalid case range in switch statement");
4461 debug_generic_expr (elt);
4462 return true;
4463 }
4464
4465 if (elt_type)
4466 {
4467 if (TREE_TYPE (CASE_LOW (elt)) != elt_type
4468 || (CASE_HIGH (elt) && TREE_TYPE (CASE_HIGH (elt)) != elt_type))
4469 {
4470 error ("type mismatch for case label in switch statement");
4471 debug_generic_expr (elt);
4472 return true;
4473 }
4474 }
4475 else
4476 {
4477 elt_type = TREE_TYPE (CASE_LOW (elt));
4478 if (TYPE_PRECISION (index_type) < TYPE_PRECISION (elt_type))
4479 {
4480 error ("type precision mismatch in switch statement");
4481 return true;
4482 }
4483 }
4484
4485 if (prev_upper_bound)
4486 {
4487 if (! tree_int_cst_lt (prev_upper_bound, CASE_LOW (elt)))
4488 {
4489 error ("case labels not sorted in switch statement");
4490 return true;
4491 }
4492 }
4493
4494 prev_upper_bound = CASE_HIGH (elt);
4495 if (! prev_upper_bound)
4496 prev_upper_bound = CASE_LOW (elt);
4497 }
4498
4499 return false;
4500 }
4501
4502 /* Verify a gimple debug statement STMT.
4503 Returns true if anything is wrong. */
4504
4505 static bool
4506 verify_gimple_debug (gimple stmt ATTRIBUTE_UNUSED)
4507 {
4508 /* There isn't much that could be wrong in a gimple debug stmt. A
4509 gimple debug bind stmt, for example, maps a tree, that's usually
4510 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4511 component or member of an aggregate type, to another tree, that
4512 can be an arbitrary expression. These stmts expand into debug
4513 insns, and are converted to debug notes by var-tracking.c. */
4514 return false;
4515 }
4516
4517 /* Verify a gimple label statement STMT.
4518 Returns true if anything is wrong. */
4519
4520 static bool
4521 verify_gimple_label (glabel *stmt)
4522 {
4523 tree decl = gimple_label_label (stmt);
4524 int uid;
4525 bool err = false;
4526
4527 if (TREE_CODE (decl) != LABEL_DECL)
4528 return true;
4529 if (!DECL_NONLOCAL (decl) && !FORCED_LABEL (decl)
4530 && DECL_CONTEXT (decl) != current_function_decl)
4531 {
4532 error ("label's context is not the current function decl");
4533 err |= true;
4534 }
4535
4536 uid = LABEL_DECL_UID (decl);
4537 if (cfun->cfg
4538 && (uid == -1
4539 || (*label_to_block_map_for_fn (cfun))[uid] != gimple_bb (stmt)))
4540 {
4541 error ("incorrect entry in label_to_block_map");
4542 err |= true;
4543 }
4544
4545 uid = EH_LANDING_PAD_NR (decl);
4546 if (uid)
4547 {
4548 eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
4549 if (decl != lp->post_landing_pad)
4550 {
4551 error ("incorrect setting of landing pad number");
4552 err |= true;
4553 }
4554 }
4555
4556 return err;
4557 }
4558
4559 /* Verify a gimple cond statement STMT.
4560 Returns true if anything is wrong. */
4561
4562 static bool
4563 verify_gimple_cond (gcond *stmt)
4564 {
4565 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
4566 {
4567 error ("invalid comparison code in gimple cond");
4568 return true;
4569 }
4570 if (!(!gimple_cond_true_label (stmt)
4571 || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
4572 || !(!gimple_cond_false_label (stmt)
4573 || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
4574 {
4575 error ("invalid labels in gimple cond");
4576 return true;
4577 }
4578
4579 return verify_gimple_comparison (boolean_type_node,
4580 gimple_cond_lhs (stmt),
4581 gimple_cond_rhs (stmt));
4582 }
4583
4584 /* Verify the GIMPLE statement STMT. Returns true if there is an
4585 error, otherwise false. */
4586
4587 static bool
4588 verify_gimple_stmt (gimple stmt)
4589 {
4590 switch (gimple_code (stmt))
4591 {
4592 case GIMPLE_ASSIGN:
4593 return verify_gimple_assign (as_a <gassign *> (stmt));
4594
4595 case GIMPLE_LABEL:
4596 return verify_gimple_label (as_a <glabel *> (stmt));
4597
4598 case GIMPLE_CALL:
4599 return verify_gimple_call (as_a <gcall *> (stmt));
4600
4601 case GIMPLE_COND:
4602 return verify_gimple_cond (as_a <gcond *> (stmt));
4603
4604 case GIMPLE_GOTO:
4605 return verify_gimple_goto (as_a <ggoto *> (stmt));
4606
4607 case GIMPLE_SWITCH:
4608 return verify_gimple_switch (as_a <gswitch *> (stmt));
4609
4610 case GIMPLE_RETURN:
4611 return verify_gimple_return (as_a <greturn *> (stmt));
4612
4613 case GIMPLE_ASM:
4614 return false;
4615
4616 case GIMPLE_TRANSACTION:
4617 return verify_gimple_transaction (as_a <gtransaction *> (stmt));
4618
4619 /* Tuples that do not have tree operands. */
4620 case GIMPLE_NOP:
4621 case GIMPLE_PREDICT:
4622 case GIMPLE_RESX:
4623 case GIMPLE_EH_DISPATCH:
4624 case GIMPLE_EH_MUST_NOT_THROW:
4625 return false;
4626
4627 CASE_GIMPLE_OMP:
4628 /* OpenMP directives are validated by the FE and never operated
4629 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4630 non-gimple expressions when the main index variable has had
4631 its address taken. This does not affect the loop itself
4632 because the header of an GIMPLE_OMP_FOR is merely used to determine
4633 how to setup the parallel iteration. */
4634 return false;
4635
4636 case GIMPLE_DEBUG:
4637 return verify_gimple_debug (stmt);
4638
4639 default:
4640 gcc_unreachable ();
4641 }
4642 }
4643
4644 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4645 and false otherwise. */
4646
4647 static bool
4648 verify_gimple_phi (gimple phi)
4649 {
4650 bool err = false;
4651 unsigned i;
4652 tree phi_result = gimple_phi_result (phi);
4653 bool virtual_p;
4654
4655 if (!phi_result)
4656 {
4657 error ("invalid PHI result");
4658 return true;
4659 }
4660
4661 virtual_p = virtual_operand_p (phi_result);
4662 if (TREE_CODE (phi_result) != SSA_NAME
4663 || (virtual_p
4664 && SSA_NAME_VAR (phi_result) != gimple_vop (cfun)))
4665 {
4666 error ("invalid PHI result");
4667 err = true;
4668 }
4669
4670 for (i = 0; i < gimple_phi_num_args (phi); i++)
4671 {
4672 tree t = gimple_phi_arg_def (phi, i);
4673
4674 if (!t)
4675 {
4676 error ("missing PHI def");
4677 err |= true;
4678 continue;
4679 }
4680 /* Addressable variables do have SSA_NAMEs but they
4681 are not considered gimple values. */
4682 else if ((TREE_CODE (t) == SSA_NAME
4683 && virtual_p != virtual_operand_p (t))
4684 || (virtual_p
4685 && (TREE_CODE (t) != SSA_NAME
4686 || SSA_NAME_VAR (t) != gimple_vop (cfun)))
4687 || (!virtual_p
4688 && !is_gimple_val (t)))
4689 {
4690 error ("invalid PHI argument");
4691 debug_generic_expr (t);
4692 err |= true;
4693 }
4694 #ifdef ENABLE_TYPES_CHECKING
4695 if (!useless_type_conversion_p (TREE_TYPE (phi_result), TREE_TYPE (t)))
4696 {
4697 error ("incompatible types in PHI argument %u", i);
4698 debug_generic_stmt (TREE_TYPE (phi_result));
4699 debug_generic_stmt (TREE_TYPE (t));
4700 err |= true;
4701 }
4702 #endif
4703 }
4704
4705 return err;
4706 }
4707
4708 /* Verify the GIMPLE statements inside the sequence STMTS. */
4709
4710 static bool
4711 verify_gimple_in_seq_2 (gimple_seq stmts)
4712 {
4713 gimple_stmt_iterator ittr;
4714 bool err = false;
4715
4716 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
4717 {
4718 gimple stmt = gsi_stmt (ittr);
4719
4720 switch (gimple_code (stmt))
4721 {
4722 case GIMPLE_BIND:
4723 err |= verify_gimple_in_seq_2 (
4724 gimple_bind_body (as_a <gbind *> (stmt)));
4725 break;
4726
4727 case GIMPLE_TRY:
4728 err |= verify_gimple_in_seq_2 (gimple_try_eval (stmt));
4729 err |= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt));
4730 break;
4731
4732 case GIMPLE_EH_FILTER:
4733 err |= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt));
4734 break;
4735
4736 case GIMPLE_EH_ELSE:
4737 {
4738 geh_else *eh_else = as_a <geh_else *> (stmt);
4739 err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (eh_else));
4740 err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (eh_else));
4741 }
4742 break;
4743
4744 case GIMPLE_CATCH:
4745 err |= verify_gimple_in_seq_2 (gimple_catch_handler (
4746 as_a <gcatch *> (stmt)));
4747 break;
4748
4749 case GIMPLE_TRANSACTION:
4750 err |= verify_gimple_transaction (as_a <gtransaction *> (stmt));
4751 break;
4752
4753 default:
4754 {
4755 bool err2 = verify_gimple_stmt (stmt);
4756 if (err2)
4757 debug_gimple_stmt (stmt);
4758 err |= err2;
4759 }
4760 }
4761 }
4762
4763 return err;
4764 }
4765
4766 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
4767 is a problem, otherwise false. */
4768
4769 static bool
4770 verify_gimple_transaction (gtransaction *stmt)
4771 {
4772 tree lab = gimple_transaction_label (stmt);
4773 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
4774 return true;
4775 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt));
4776 }
4777
4778
4779 /* Verify the GIMPLE statements inside the statement list STMTS. */
4780
4781 DEBUG_FUNCTION void
4782 verify_gimple_in_seq (gimple_seq stmts)
4783 {
4784 timevar_push (TV_TREE_STMT_VERIFY);
4785 if (verify_gimple_in_seq_2 (stmts))
4786 internal_error ("verify_gimple failed");
4787 timevar_pop (TV_TREE_STMT_VERIFY);
4788 }
4789
4790 /* Return true when the T can be shared. */
4791
4792 static bool
4793 tree_node_can_be_shared (tree t)
4794 {
4795 if (IS_TYPE_OR_DECL_P (t)
4796 || is_gimple_min_invariant (t)
4797 || TREE_CODE (t) == SSA_NAME
4798 || t == error_mark_node
4799 || TREE_CODE (t) == IDENTIFIER_NODE)
4800 return true;
4801
4802 if (TREE_CODE (t) == CASE_LABEL_EXPR)
4803 return true;
4804
4805 if (DECL_P (t))
4806 return true;
4807
4808 return false;
4809 }
4810
4811 /* Called via walk_tree. Verify tree sharing. */
4812
4813 static tree
4814 verify_node_sharing_1 (tree *tp, int *walk_subtrees, void *data)
4815 {
4816 hash_set<void *> *visited = (hash_set<void *> *) data;
4817
4818 if (tree_node_can_be_shared (*tp))
4819 {
4820 *walk_subtrees = false;
4821 return NULL;
4822 }
4823
4824 if (visited->add (*tp))
4825 return *tp;
4826
4827 return NULL;
4828 }
4829
4830 /* Called via walk_gimple_stmt. Verify tree sharing. */
4831
4832 static tree
4833 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
4834 {
4835 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4836 return verify_node_sharing_1 (tp, walk_subtrees, wi->info);
4837 }
4838
4839 static bool eh_error_found;
4840 bool
4841 verify_eh_throw_stmt_node (const gimple &stmt, const int &,
4842 hash_set<gimple> *visited)
4843 {
4844 if (!visited->contains (stmt))
4845 {
4846 error ("dead STMT in EH table");
4847 debug_gimple_stmt (stmt);
4848 eh_error_found = true;
4849 }
4850 return true;
4851 }
4852
4853 /* Verify if the location LOCs block is in BLOCKS. */
4854
4855 static bool
4856 verify_location (hash_set<tree> *blocks, location_t loc)
4857 {
4858 tree block = LOCATION_BLOCK (loc);
4859 if (block != NULL_TREE
4860 && !blocks->contains (block))
4861 {
4862 error ("location references block not in block tree");
4863 return true;
4864 }
4865 if (block != NULL_TREE)
4866 return verify_location (blocks, BLOCK_SOURCE_LOCATION (block));
4867 return false;
4868 }
4869
4870 /* Called via walk_tree. Verify that expressions have no blocks. */
4871
4872 static tree
4873 verify_expr_no_block (tree *tp, int *walk_subtrees, void *)
4874 {
4875 if (!EXPR_P (*tp))
4876 {
4877 *walk_subtrees = false;
4878 return NULL;
4879 }
4880
4881 location_t loc = EXPR_LOCATION (*tp);
4882 if (LOCATION_BLOCK (loc) != NULL)
4883 return *tp;
4884
4885 return NULL;
4886 }
4887
4888 /* Called via walk_tree. Verify locations of expressions. */
4889
4890 static tree
4891 verify_expr_location_1 (tree *tp, int *walk_subtrees, void *data)
4892 {
4893 hash_set<tree> *blocks = (hash_set<tree> *) data;
4894
4895 if (TREE_CODE (*tp) == VAR_DECL
4896 && DECL_HAS_DEBUG_EXPR_P (*tp))
4897 {
4898 tree t = DECL_DEBUG_EXPR (*tp);
4899 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
4900 if (addr)
4901 return addr;
4902 }
4903 if ((TREE_CODE (*tp) == VAR_DECL
4904 || TREE_CODE (*tp) == PARM_DECL
4905 || TREE_CODE (*tp) == RESULT_DECL)
4906 && DECL_HAS_VALUE_EXPR_P (*tp))
4907 {
4908 tree t = DECL_VALUE_EXPR (*tp);
4909 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
4910 if (addr)
4911 return addr;
4912 }
4913
4914 if (!EXPR_P (*tp))
4915 {
4916 *walk_subtrees = false;
4917 return NULL;
4918 }
4919
4920 location_t loc = EXPR_LOCATION (*tp);
4921 if (verify_location (blocks, loc))
4922 return *tp;
4923
4924 return NULL;
4925 }
4926
4927 /* Called via walk_gimple_op. Verify locations of expressions. */
4928
4929 static tree
4930 verify_expr_location (tree *tp, int *walk_subtrees, void *data)
4931 {
4932 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4933 return verify_expr_location_1 (tp, walk_subtrees, wi->info);
4934 }
4935
4936 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
4937
4938 static void
4939 collect_subblocks (hash_set<tree> *blocks, tree block)
4940 {
4941 tree t;
4942 for (t = BLOCK_SUBBLOCKS (block); t; t = BLOCK_CHAIN (t))
4943 {
4944 blocks->add (t);
4945 collect_subblocks (blocks, t);
4946 }
4947 }
4948
4949 /* Verify the GIMPLE statements in the CFG of FN. */
4950
4951 DEBUG_FUNCTION void
4952 verify_gimple_in_cfg (struct function *fn, bool verify_nothrow)
4953 {
4954 basic_block bb;
4955 bool err = false;
4956
4957 timevar_push (TV_TREE_STMT_VERIFY);
4958 hash_set<void *> visited;
4959 hash_set<gimple> visited_stmts;
4960
4961 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
4962 hash_set<tree> blocks;
4963 if (DECL_INITIAL (fn->decl))
4964 {
4965 blocks.add (DECL_INITIAL (fn->decl));
4966 collect_subblocks (&blocks, DECL_INITIAL (fn->decl));
4967 }
4968
4969 FOR_EACH_BB_FN (bb, fn)
4970 {
4971 gimple_stmt_iterator gsi;
4972
4973 for (gphi_iterator gpi = gsi_start_phis (bb);
4974 !gsi_end_p (gpi);
4975 gsi_next (&gpi))
4976 {
4977 gphi *phi = gpi.phi ();
4978 bool err2 = false;
4979 unsigned i;
4980
4981 visited_stmts.add (phi);
4982
4983 if (gimple_bb (phi) != bb)
4984 {
4985 error ("gimple_bb (phi) is set to a wrong basic block");
4986 err2 = true;
4987 }
4988
4989 err2 |= verify_gimple_phi (phi);
4990
4991 /* Only PHI arguments have locations. */
4992 if (gimple_location (phi) != UNKNOWN_LOCATION)
4993 {
4994 error ("PHI node with location");
4995 err2 = true;
4996 }
4997
4998 for (i = 0; i < gimple_phi_num_args (phi); i++)
4999 {
5000 tree arg = gimple_phi_arg_def (phi, i);
5001 tree addr = walk_tree (&arg, verify_node_sharing_1,
5002 &visited, NULL);
5003 if (addr)
5004 {
5005 error ("incorrect sharing of tree nodes");
5006 debug_generic_expr (addr);
5007 err2 |= true;
5008 }
5009 location_t loc = gimple_phi_arg_location (phi, i);
5010 if (virtual_operand_p (gimple_phi_result (phi))
5011 && loc != UNKNOWN_LOCATION)
5012 {
5013 error ("virtual PHI with argument locations");
5014 err2 = true;
5015 }
5016 addr = walk_tree (&arg, verify_expr_location_1, &blocks, NULL);
5017 if (addr)
5018 {
5019 debug_generic_expr (addr);
5020 err2 = true;
5021 }
5022 err2 |= verify_location (&blocks, loc);
5023 }
5024
5025 if (err2)
5026 debug_gimple_stmt (phi);
5027 err |= err2;
5028 }
5029
5030 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5031 {
5032 gimple stmt = gsi_stmt (gsi);
5033 bool err2 = false;
5034 struct walk_stmt_info wi;
5035 tree addr;
5036 int lp_nr;
5037
5038 visited_stmts.add (stmt);
5039
5040 if (gimple_bb (stmt) != bb)
5041 {
5042 error ("gimple_bb (stmt) is set to a wrong basic block");
5043 err2 = true;
5044 }
5045
5046 err2 |= verify_gimple_stmt (stmt);
5047 err2 |= verify_location (&blocks, gimple_location (stmt));
5048
5049 memset (&wi, 0, sizeof (wi));
5050 wi.info = (void *) &visited;
5051 addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
5052 if (addr)
5053 {
5054 error ("incorrect sharing of tree nodes");
5055 debug_generic_expr (addr);
5056 err2 |= true;
5057 }
5058
5059 memset (&wi, 0, sizeof (wi));
5060 wi.info = (void *) &blocks;
5061 addr = walk_gimple_op (stmt, verify_expr_location, &wi);
5062 if (addr)
5063 {
5064 debug_generic_expr (addr);
5065 err2 |= true;
5066 }
5067
5068 /* ??? Instead of not checking these stmts at all the walker
5069 should know its context via wi. */
5070 if (!is_gimple_debug (stmt)
5071 && !is_gimple_omp (stmt))
5072 {
5073 memset (&wi, 0, sizeof (wi));
5074 addr = walk_gimple_op (stmt, verify_expr, &wi);
5075 if (addr)
5076 {
5077 debug_generic_expr (addr);
5078 inform (gimple_location (stmt), "in statement");
5079 err2 |= true;
5080 }
5081 }
5082
5083 /* If the statement is marked as part of an EH region, then it is
5084 expected that the statement could throw. Verify that when we
5085 have optimizations that simplify statements such that we prove
5086 that they cannot throw, that we update other data structures
5087 to match. */
5088 lp_nr = lookup_stmt_eh_lp (stmt);
5089 if (lp_nr > 0)
5090 {
5091 if (!stmt_could_throw_p (stmt))
5092 {
5093 if (verify_nothrow)
5094 {
5095 error ("statement marked for throw, but doesn%'t");
5096 err2 |= true;
5097 }
5098 }
5099 else if (!gsi_one_before_end_p (gsi))
5100 {
5101 error ("statement marked for throw in middle of block");
5102 err2 |= true;
5103 }
5104 }
5105
5106 if (err2)
5107 debug_gimple_stmt (stmt);
5108 err |= err2;
5109 }
5110 }
5111
5112 eh_error_found = false;
5113 hash_map<gimple, int> *eh_table = get_eh_throw_stmt_table (cfun);
5114 if (eh_table)
5115 eh_table->traverse<hash_set<gimple> *, verify_eh_throw_stmt_node>
5116 (&visited_stmts);
5117
5118 if (err || eh_error_found)
5119 internal_error ("verify_gimple failed");
5120
5121 verify_histograms ();
5122 timevar_pop (TV_TREE_STMT_VERIFY);
5123 }
5124
5125
5126 /* Verifies that the flow information is OK. */
5127
5128 static int
5129 gimple_verify_flow_info (void)
5130 {
5131 int err = 0;
5132 basic_block bb;
5133 gimple_stmt_iterator gsi;
5134 gimple stmt;
5135 edge e;
5136 edge_iterator ei;
5137
5138 if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5139 || ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5140 {
5141 error ("ENTRY_BLOCK has IL associated with it");
5142 err = 1;
5143 }
5144
5145 if (EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5146 || EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5147 {
5148 error ("EXIT_BLOCK has IL associated with it");
5149 err = 1;
5150 }
5151
5152 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5153 if (e->flags & EDGE_FALLTHRU)
5154 {
5155 error ("fallthru to exit from bb %d", e->src->index);
5156 err = 1;
5157 }
5158
5159 FOR_EACH_BB_FN (bb, cfun)
5160 {
5161 bool found_ctrl_stmt = false;
5162
5163 stmt = NULL;
5164
5165 /* Skip labels on the start of basic block. */
5166 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5167 {
5168 tree label;
5169 gimple prev_stmt = stmt;
5170
5171 stmt = gsi_stmt (gsi);
5172
5173 if (gimple_code (stmt) != GIMPLE_LABEL)
5174 break;
5175
5176 label = gimple_label_label (as_a <glabel *> (stmt));
5177 if (prev_stmt && DECL_NONLOCAL (label))
5178 {
5179 error ("nonlocal label ");
5180 print_generic_expr (stderr, label, 0);
5181 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5182 bb->index);
5183 err = 1;
5184 }
5185
5186 if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
5187 {
5188 error ("EH landing pad label ");
5189 print_generic_expr (stderr, label, 0);
5190 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5191 bb->index);
5192 err = 1;
5193 }
5194
5195 if (label_to_block (label) != bb)
5196 {
5197 error ("label ");
5198 print_generic_expr (stderr, label, 0);
5199 fprintf (stderr, " to block does not match in bb %d",
5200 bb->index);
5201 err = 1;
5202 }
5203
5204 if (decl_function_context (label) != current_function_decl)
5205 {
5206 error ("label ");
5207 print_generic_expr (stderr, label, 0);
5208 fprintf (stderr, " has incorrect context in bb %d",
5209 bb->index);
5210 err = 1;
5211 }
5212 }
5213
5214 /* Verify that body of basic block BB is free of control flow. */
5215 for (; !gsi_end_p (gsi); gsi_next (&gsi))
5216 {
5217 gimple stmt = gsi_stmt (gsi);
5218
5219 if (found_ctrl_stmt)
5220 {
5221 error ("control flow in the middle of basic block %d",
5222 bb->index);
5223 err = 1;
5224 }
5225
5226 if (stmt_ends_bb_p (stmt))
5227 found_ctrl_stmt = true;
5228
5229 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
5230 {
5231 error ("label ");
5232 print_generic_expr (stderr, gimple_label_label (label_stmt), 0);
5233 fprintf (stderr, " in the middle of basic block %d", bb->index);
5234 err = 1;
5235 }
5236 }
5237
5238 gsi = gsi_last_bb (bb);
5239 if (gsi_end_p (gsi))
5240 continue;
5241
5242 stmt = gsi_stmt (gsi);
5243
5244 if (gimple_code (stmt) == GIMPLE_LABEL)
5245 continue;
5246
5247 err |= verify_eh_edges (stmt);
5248
5249 if (is_ctrl_stmt (stmt))
5250 {
5251 FOR_EACH_EDGE (e, ei, bb->succs)
5252 if (e->flags & EDGE_FALLTHRU)
5253 {
5254 error ("fallthru edge after a control statement in bb %d",
5255 bb->index);
5256 err = 1;
5257 }
5258 }
5259
5260 if (gimple_code (stmt) != GIMPLE_COND)
5261 {
5262 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
5263 after anything else but if statement. */
5264 FOR_EACH_EDGE (e, ei, bb->succs)
5265 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
5266 {
5267 error ("true/false edge after a non-GIMPLE_COND in bb %d",
5268 bb->index);
5269 err = 1;
5270 }
5271 }
5272
5273 switch (gimple_code (stmt))
5274 {
5275 case GIMPLE_COND:
5276 {
5277 edge true_edge;
5278 edge false_edge;
5279
5280 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
5281
5282 if (!true_edge
5283 || !false_edge
5284 || !(true_edge->flags & EDGE_TRUE_VALUE)
5285 || !(false_edge->flags & EDGE_FALSE_VALUE)
5286 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5287 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5288 || EDGE_COUNT (bb->succs) >= 3)
5289 {
5290 error ("wrong outgoing edge flags at end of bb %d",
5291 bb->index);
5292 err = 1;
5293 }
5294 }
5295 break;
5296
5297 case GIMPLE_GOTO:
5298 if (simple_goto_p (stmt))
5299 {
5300 error ("explicit goto at end of bb %d", bb->index);
5301 err = 1;
5302 }
5303 else
5304 {
5305 /* FIXME. We should double check that the labels in the
5306 destination blocks have their address taken. */
5307 FOR_EACH_EDGE (e, ei, bb->succs)
5308 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
5309 | EDGE_FALSE_VALUE))
5310 || !(e->flags & EDGE_ABNORMAL))
5311 {
5312 error ("wrong outgoing edge flags at end of bb %d",
5313 bb->index);
5314 err = 1;
5315 }
5316 }
5317 break;
5318
5319 case GIMPLE_CALL:
5320 if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
5321 break;
5322 /* ... fallthru ... */
5323 case GIMPLE_RETURN:
5324 if (!single_succ_p (bb)
5325 || (single_succ_edge (bb)->flags
5326 & (EDGE_FALLTHRU | EDGE_ABNORMAL
5327 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5328 {
5329 error ("wrong outgoing edge flags at end of bb %d", bb->index);
5330 err = 1;
5331 }
5332 if (single_succ (bb) != EXIT_BLOCK_PTR_FOR_FN (cfun))
5333 {
5334 error ("return edge does not point to exit in bb %d",
5335 bb->index);
5336 err = 1;
5337 }
5338 break;
5339
5340 case GIMPLE_SWITCH:
5341 {
5342 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5343 tree prev;
5344 edge e;
5345 size_t i, n;
5346
5347 n = gimple_switch_num_labels (switch_stmt);
5348
5349 /* Mark all the destination basic blocks. */
5350 for (i = 0; i < n; ++i)
5351 {
5352 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
5353 basic_block label_bb = label_to_block (lab);
5354 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
5355 label_bb->aux = (void *)1;
5356 }
5357
5358 /* Verify that the case labels are sorted. */
5359 prev = gimple_switch_label (switch_stmt, 0);
5360 for (i = 1; i < n; ++i)
5361 {
5362 tree c = gimple_switch_label (switch_stmt, i);
5363 if (!CASE_LOW (c))
5364 {
5365 error ("found default case not at the start of "
5366 "case vector");
5367 err = 1;
5368 continue;
5369 }
5370 if (CASE_LOW (prev)
5371 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
5372 {
5373 error ("case labels not sorted: ");
5374 print_generic_expr (stderr, prev, 0);
5375 fprintf (stderr," is greater than ");
5376 print_generic_expr (stderr, c, 0);
5377 fprintf (stderr," but comes before it.\n");
5378 err = 1;
5379 }
5380 prev = c;
5381 }
5382 /* VRP will remove the default case if it can prove it will
5383 never be executed. So do not verify there always exists
5384 a default case here. */
5385
5386 FOR_EACH_EDGE (e, ei, bb->succs)
5387 {
5388 if (!e->dest->aux)
5389 {
5390 error ("extra outgoing edge %d->%d",
5391 bb->index, e->dest->index);
5392 err = 1;
5393 }
5394
5395 e->dest->aux = (void *)2;
5396 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
5397 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5398 {
5399 error ("wrong outgoing edge flags at end of bb %d",
5400 bb->index);
5401 err = 1;
5402 }
5403 }
5404
5405 /* Check that we have all of them. */
5406 for (i = 0; i < n; ++i)
5407 {
5408 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
5409 basic_block label_bb = label_to_block (lab);
5410
5411 if (label_bb->aux != (void *)2)
5412 {
5413 error ("missing edge %i->%i", bb->index, label_bb->index);
5414 err = 1;
5415 }
5416 }
5417
5418 FOR_EACH_EDGE (e, ei, bb->succs)
5419 e->dest->aux = (void *)0;
5420 }
5421 break;
5422
5423 case GIMPLE_EH_DISPATCH:
5424 err |= verify_eh_dispatch_edge (as_a <geh_dispatch *> (stmt));
5425 break;
5426
5427 default:
5428 break;
5429 }
5430 }
5431
5432 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
5433 verify_dominators (CDI_DOMINATORS);
5434
5435 return err;
5436 }
5437
5438
5439 /* Updates phi nodes after creating a forwarder block joined
5440 by edge FALLTHRU. */
5441
5442 static void
5443 gimple_make_forwarder_block (edge fallthru)
5444 {
5445 edge e;
5446 edge_iterator ei;
5447 basic_block dummy, bb;
5448 tree var;
5449 gphi_iterator gsi;
5450
5451 dummy = fallthru->src;
5452 bb = fallthru->dest;
5453
5454 if (single_pred_p (bb))
5455 return;
5456
5457 /* If we redirected a branch we must create new PHI nodes at the
5458 start of BB. */
5459 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
5460 {
5461 gphi *phi, *new_phi;
5462
5463 phi = gsi.phi ();
5464 var = gimple_phi_result (phi);
5465 new_phi = create_phi_node (var, bb);
5466 gimple_phi_set_result (phi, copy_ssa_name (var, phi));
5467 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
5468 UNKNOWN_LOCATION);
5469 }
5470
5471 /* Add the arguments we have stored on edges. */
5472 FOR_EACH_EDGE (e, ei, bb->preds)
5473 {
5474 if (e == fallthru)
5475 continue;
5476
5477 flush_pending_stmts (e);
5478 }
5479 }
5480
5481
5482 /* Return a non-special label in the head of basic block BLOCK.
5483 Create one if it doesn't exist. */
5484
5485 tree
5486 gimple_block_label (basic_block bb)
5487 {
5488 gimple_stmt_iterator i, s = gsi_start_bb (bb);
5489 bool first = true;
5490 tree label;
5491 glabel *stmt;
5492
5493 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
5494 {
5495 stmt = dyn_cast <glabel *> (gsi_stmt (i));
5496 if (!stmt)
5497 break;
5498 label = gimple_label_label (stmt);
5499 if (!DECL_NONLOCAL (label))
5500 {
5501 if (!first)
5502 gsi_move_before (&i, &s);
5503 return label;
5504 }
5505 }
5506
5507 label = create_artificial_label (UNKNOWN_LOCATION);
5508 stmt = gimple_build_label (label);
5509 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
5510 return label;
5511 }
5512
5513
5514 /* Attempt to perform edge redirection by replacing a possibly complex
5515 jump instruction by a goto or by removing the jump completely.
5516 This can apply only if all edges now point to the same block. The
5517 parameters and return values are equivalent to
5518 redirect_edge_and_branch. */
5519
5520 static edge
5521 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
5522 {
5523 basic_block src = e->src;
5524 gimple_stmt_iterator i;
5525 gimple stmt;
5526
5527 /* We can replace or remove a complex jump only when we have exactly
5528 two edges. */
5529 if (EDGE_COUNT (src->succs) != 2
5530 /* Verify that all targets will be TARGET. Specifically, the
5531 edge that is not E must also go to TARGET. */
5532 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
5533 return NULL;
5534
5535 i = gsi_last_bb (src);
5536 if (gsi_end_p (i))
5537 return NULL;
5538
5539 stmt = gsi_stmt (i);
5540
5541 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
5542 {
5543 gsi_remove (&i, true);
5544 e = ssa_redirect_edge (e, target);
5545 e->flags = EDGE_FALLTHRU;
5546 return e;
5547 }
5548
5549 return NULL;
5550 }
5551
5552
5553 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
5554 edge representing the redirected branch. */
5555
5556 static edge
5557 gimple_redirect_edge_and_branch (edge e, basic_block dest)
5558 {
5559 basic_block bb = e->src;
5560 gimple_stmt_iterator gsi;
5561 edge ret;
5562 gimple stmt;
5563
5564 if (e->flags & EDGE_ABNORMAL)
5565 return NULL;
5566
5567 if (e->dest == dest)
5568 return NULL;
5569
5570 if (e->flags & EDGE_EH)
5571 return redirect_eh_edge (e, dest);
5572
5573 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
5574 {
5575 ret = gimple_try_redirect_by_replacing_jump (e, dest);
5576 if (ret)
5577 return ret;
5578 }
5579
5580 gsi = gsi_last_bb (bb);
5581 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
5582
5583 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
5584 {
5585 case GIMPLE_COND:
5586 /* For COND_EXPR, we only need to redirect the edge. */
5587 break;
5588
5589 case GIMPLE_GOTO:
5590 /* No non-abnormal edges should lead from a non-simple goto, and
5591 simple ones should be represented implicitly. */
5592 gcc_unreachable ();
5593
5594 case GIMPLE_SWITCH:
5595 {
5596 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5597 tree label = gimple_block_label (dest);
5598 tree cases = get_cases_for_edge (e, switch_stmt);
5599
5600 /* If we have a list of cases associated with E, then use it
5601 as it's a lot faster than walking the entire case vector. */
5602 if (cases)
5603 {
5604 edge e2 = find_edge (e->src, dest);
5605 tree last, first;
5606
5607 first = cases;
5608 while (cases)
5609 {
5610 last = cases;
5611 CASE_LABEL (cases) = label;
5612 cases = CASE_CHAIN (cases);
5613 }
5614
5615 /* If there was already an edge in the CFG, then we need
5616 to move all the cases associated with E to E2. */
5617 if (e2)
5618 {
5619 tree cases2 = get_cases_for_edge (e2, switch_stmt);
5620
5621 CASE_CHAIN (last) = CASE_CHAIN (cases2);
5622 CASE_CHAIN (cases2) = first;
5623 }
5624 bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
5625 }
5626 else
5627 {
5628 size_t i, n = gimple_switch_num_labels (switch_stmt);
5629
5630 for (i = 0; i < n; i++)
5631 {
5632 tree elt = gimple_switch_label (switch_stmt, i);
5633 if (label_to_block (CASE_LABEL (elt)) == e->dest)
5634 CASE_LABEL (elt) = label;
5635 }
5636 }
5637 }
5638 break;
5639
5640 case GIMPLE_ASM:
5641 {
5642 gasm *asm_stmt = as_a <gasm *> (stmt);
5643 int i, n = gimple_asm_nlabels (asm_stmt);
5644 tree label = NULL;
5645
5646 for (i = 0; i < n; ++i)
5647 {
5648 tree cons = gimple_asm_label_op (asm_stmt, i);
5649 if (label_to_block (TREE_VALUE (cons)) == e->dest)
5650 {
5651 if (!label)
5652 label = gimple_block_label (dest);
5653 TREE_VALUE (cons) = label;
5654 }
5655 }
5656
5657 /* If we didn't find any label matching the former edge in the
5658 asm labels, we must be redirecting the fallthrough
5659 edge. */
5660 gcc_assert (label || (e->flags & EDGE_FALLTHRU));
5661 }
5662 break;
5663
5664 case GIMPLE_RETURN:
5665 gsi_remove (&gsi, true);
5666 e->flags |= EDGE_FALLTHRU;
5667 break;
5668
5669 case GIMPLE_OMP_RETURN:
5670 case GIMPLE_OMP_CONTINUE:
5671 case GIMPLE_OMP_SECTIONS_SWITCH:
5672 case GIMPLE_OMP_FOR:
5673 /* The edges from OMP constructs can be simply redirected. */
5674 break;
5675
5676 case GIMPLE_EH_DISPATCH:
5677 if (!(e->flags & EDGE_FALLTHRU))
5678 redirect_eh_dispatch_edge (as_a <geh_dispatch *> (stmt), e, dest);
5679 break;
5680
5681 case GIMPLE_TRANSACTION:
5682 /* The ABORT edge has a stored label associated with it, otherwise
5683 the edges are simply redirectable. */
5684 if (e->flags == 0)
5685 gimple_transaction_set_label (as_a <gtransaction *> (stmt),
5686 gimple_block_label (dest));
5687 break;
5688
5689 default:
5690 /* Otherwise it must be a fallthru edge, and we don't need to
5691 do anything besides redirecting it. */
5692 gcc_assert (e->flags & EDGE_FALLTHRU);
5693 break;
5694 }
5695
5696 /* Update/insert PHI nodes as necessary. */
5697
5698 /* Now update the edges in the CFG. */
5699 e = ssa_redirect_edge (e, dest);
5700
5701 return e;
5702 }
5703
5704 /* Returns true if it is possible to remove edge E by redirecting
5705 it to the destination of the other edge from E->src. */
5706
5707 static bool
5708 gimple_can_remove_branch_p (const_edge e)
5709 {
5710 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
5711 return false;
5712
5713 return true;
5714 }
5715
5716 /* Simple wrapper, as we can always redirect fallthru edges. */
5717
5718 static basic_block
5719 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
5720 {
5721 e = gimple_redirect_edge_and_branch (e, dest);
5722 gcc_assert (e);
5723
5724 return NULL;
5725 }
5726
5727
5728 /* Splits basic block BB after statement STMT (but at least after the
5729 labels). If STMT is NULL, BB is split just after the labels. */
5730
5731 static basic_block
5732 gimple_split_block (basic_block bb, void *stmt)
5733 {
5734 gimple_stmt_iterator gsi;
5735 gimple_stmt_iterator gsi_tgt;
5736 gimple_seq list;
5737 basic_block new_bb;
5738 edge e;
5739 edge_iterator ei;
5740
5741 new_bb = create_empty_bb (bb);
5742
5743 /* Redirect the outgoing edges. */
5744 new_bb->succs = bb->succs;
5745 bb->succs = NULL;
5746 FOR_EACH_EDGE (e, ei, new_bb->succs)
5747 e->src = new_bb;
5748
5749 /* Get a stmt iterator pointing to the first stmt to move. */
5750 if (!stmt || gimple_code ((gimple) stmt) == GIMPLE_LABEL)
5751 gsi = gsi_after_labels (bb);
5752 else
5753 {
5754 gsi = gsi_for_stmt ((gimple) stmt);
5755 gsi_next (&gsi);
5756 }
5757
5758 /* Move everything from GSI to the new basic block. */
5759 if (gsi_end_p (gsi))
5760 return new_bb;
5761
5762 /* Split the statement list - avoid re-creating new containers as this
5763 brings ugly quadratic memory consumption in the inliner.
5764 (We are still quadratic since we need to update stmt BB pointers,
5765 sadly.) */
5766 gsi_split_seq_before (&gsi, &list);
5767 set_bb_seq (new_bb, list);
5768 for (gsi_tgt = gsi_start (list);
5769 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
5770 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
5771
5772 return new_bb;
5773 }
5774
5775
5776 /* Moves basic block BB after block AFTER. */
5777
5778 static bool
5779 gimple_move_block_after (basic_block bb, basic_block after)
5780 {
5781 if (bb->prev_bb == after)
5782 return true;
5783
5784 unlink_block (bb);
5785 link_block (bb, after);
5786
5787 return true;
5788 }
5789
5790
5791 /* Return TRUE if block BB has no executable statements, otherwise return
5792 FALSE. */
5793
5794 static bool
5795 gimple_empty_block_p (basic_block bb)
5796 {
5797 /* BB must have no executable statements. */
5798 gimple_stmt_iterator gsi = gsi_after_labels (bb);
5799 if (phi_nodes (bb))
5800 return false;
5801 if (gsi_end_p (gsi))
5802 return true;
5803 if (is_gimple_debug (gsi_stmt (gsi)))
5804 gsi_next_nondebug (&gsi);
5805 return gsi_end_p (gsi);
5806 }
5807
5808
5809 /* Split a basic block if it ends with a conditional branch and if the
5810 other part of the block is not empty. */
5811
5812 static basic_block
5813 gimple_split_block_before_cond_jump (basic_block bb)
5814 {
5815 gimple last, split_point;
5816 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
5817 if (gsi_end_p (gsi))
5818 return NULL;
5819 last = gsi_stmt (gsi);
5820 if (gimple_code (last) != GIMPLE_COND
5821 && gimple_code (last) != GIMPLE_SWITCH)
5822 return NULL;
5823 gsi_prev_nondebug (&gsi);
5824 split_point = gsi_stmt (gsi);
5825 return split_block (bb, split_point)->dest;
5826 }
5827
5828
5829 /* Return true if basic_block can be duplicated. */
5830
5831 static bool
5832 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
5833 {
5834 return true;
5835 }
5836
5837 /* Create a duplicate of the basic block BB. NOTE: This does not
5838 preserve SSA form. */
5839
5840 static basic_block
5841 gimple_duplicate_bb (basic_block bb)
5842 {
5843 basic_block new_bb;
5844 gimple_stmt_iterator gsi_tgt;
5845
5846 new_bb = create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
5847
5848 /* Copy the PHI nodes. We ignore PHI node arguments here because
5849 the incoming edges have not been setup yet. */
5850 for (gphi_iterator gpi = gsi_start_phis (bb);
5851 !gsi_end_p (gpi);
5852 gsi_next (&gpi))
5853 {
5854 gphi *phi, *copy;
5855 phi = gpi.phi ();
5856 copy = create_phi_node (NULL_TREE, new_bb);
5857 create_new_def_for (gimple_phi_result (phi), copy,
5858 gimple_phi_result_ptr (copy));
5859 gimple_set_uid (copy, gimple_uid (phi));
5860 }
5861
5862 gsi_tgt = gsi_start_bb (new_bb);
5863 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
5864 !gsi_end_p (gsi);
5865 gsi_next (&gsi))
5866 {
5867 def_operand_p def_p;
5868 ssa_op_iter op_iter;
5869 tree lhs;
5870 gimple stmt, copy;
5871
5872 stmt = gsi_stmt (gsi);
5873 if (gimple_code (stmt) == GIMPLE_LABEL)
5874 continue;
5875
5876 /* Don't duplicate label debug stmts. */
5877 if (gimple_debug_bind_p (stmt)
5878 && TREE_CODE (gimple_debug_bind_get_var (stmt))
5879 == LABEL_DECL)
5880 continue;
5881
5882 /* Create a new copy of STMT and duplicate STMT's virtual
5883 operands. */
5884 copy = gimple_copy (stmt);
5885 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
5886
5887 maybe_duplicate_eh_stmt (copy, stmt);
5888 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
5889
5890 /* When copying around a stmt writing into a local non-user
5891 aggregate, make sure it won't share stack slot with other
5892 vars. */
5893 lhs = gimple_get_lhs (stmt);
5894 if (lhs && TREE_CODE (lhs) != SSA_NAME)
5895 {
5896 tree base = get_base_address (lhs);
5897 if (base
5898 && (TREE_CODE (base) == VAR_DECL
5899 || TREE_CODE (base) == RESULT_DECL)
5900 && DECL_IGNORED_P (base)
5901 && !TREE_STATIC (base)
5902 && !DECL_EXTERNAL (base)
5903 && (TREE_CODE (base) != VAR_DECL
5904 || !DECL_HAS_VALUE_EXPR_P (base)))
5905 DECL_NONSHAREABLE (base) = 1;
5906 }
5907
5908 /* Create new names for all the definitions created by COPY and
5909 add replacement mappings for each new name. */
5910 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
5911 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
5912 }
5913
5914 return new_bb;
5915 }
5916
5917 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
5918
5919 static void
5920 add_phi_args_after_copy_edge (edge e_copy)
5921 {
5922 basic_block bb, bb_copy = e_copy->src, dest;
5923 edge e;
5924 edge_iterator ei;
5925 gphi *phi, *phi_copy;
5926 tree def;
5927 gphi_iterator psi, psi_copy;
5928
5929 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
5930 return;
5931
5932 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
5933
5934 if (e_copy->dest->flags & BB_DUPLICATED)
5935 dest = get_bb_original (e_copy->dest);
5936 else
5937 dest = e_copy->dest;
5938
5939 e = find_edge (bb, dest);
5940 if (!e)
5941 {
5942 /* During loop unrolling the target of the latch edge is copied.
5943 In this case we are not looking for edge to dest, but to
5944 duplicated block whose original was dest. */
5945 FOR_EACH_EDGE (e, ei, bb->succs)
5946 {
5947 if ((e->dest->flags & BB_DUPLICATED)
5948 && get_bb_original (e->dest) == dest)
5949 break;
5950 }
5951
5952 gcc_assert (e != NULL);
5953 }
5954
5955 for (psi = gsi_start_phis (e->dest),
5956 psi_copy = gsi_start_phis (e_copy->dest);
5957 !gsi_end_p (psi);
5958 gsi_next (&psi), gsi_next (&psi_copy))
5959 {
5960 phi = psi.phi ();
5961 phi_copy = psi_copy.phi ();
5962 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
5963 add_phi_arg (phi_copy, def, e_copy,
5964 gimple_phi_arg_location_from_edge (phi, e));
5965 }
5966 }
5967
5968
5969 /* Basic block BB_COPY was created by code duplication. Add phi node
5970 arguments for edges going out of BB_COPY. The blocks that were
5971 duplicated have BB_DUPLICATED set. */
5972
5973 void
5974 add_phi_args_after_copy_bb (basic_block bb_copy)
5975 {
5976 edge e_copy;
5977 edge_iterator ei;
5978
5979 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
5980 {
5981 add_phi_args_after_copy_edge (e_copy);
5982 }
5983 }
5984
5985 /* Blocks in REGION_COPY array of length N_REGION were created by
5986 duplication of basic blocks. Add phi node arguments for edges
5987 going from these blocks. If E_COPY is not NULL, also add
5988 phi node arguments for its destination.*/
5989
5990 void
5991 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
5992 edge e_copy)
5993 {
5994 unsigned i;
5995
5996 for (i = 0; i < n_region; i++)
5997 region_copy[i]->flags |= BB_DUPLICATED;
5998
5999 for (i = 0; i < n_region; i++)
6000 add_phi_args_after_copy_bb (region_copy[i]);
6001 if (e_copy)
6002 add_phi_args_after_copy_edge (e_copy);
6003
6004 for (i = 0; i < n_region; i++)
6005 region_copy[i]->flags &= ~BB_DUPLICATED;
6006 }
6007
6008 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
6009 important exit edge EXIT. By important we mean that no SSA name defined
6010 inside region is live over the other exit edges of the region. All entry
6011 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
6012 to the duplicate of the region. Dominance and loop information is
6013 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
6014 UPDATE_DOMINANCE is false then we assume that the caller will update the
6015 dominance information after calling this function. The new basic
6016 blocks are stored to REGION_COPY in the same order as they had in REGION,
6017 provided that REGION_COPY is not NULL.
6018 The function returns false if it is unable to copy the region,
6019 true otherwise. */
6020
6021 bool
6022 gimple_duplicate_sese_region (edge entry, edge exit,
6023 basic_block *region, unsigned n_region,
6024 basic_block *region_copy,
6025 bool update_dominance)
6026 {
6027 unsigned i;
6028 bool free_region_copy = false, copying_header = false;
6029 struct loop *loop = entry->dest->loop_father;
6030 edge exit_copy;
6031 vec<basic_block> doms;
6032 edge redirected;
6033 int total_freq = 0, entry_freq = 0;
6034 gcov_type total_count = 0, entry_count = 0;
6035
6036 if (!can_copy_bbs_p (region, n_region))
6037 return false;
6038
6039 /* Some sanity checking. Note that we do not check for all possible
6040 missuses of the functions. I.e. if you ask to copy something weird,
6041 it will work, but the state of structures probably will not be
6042 correct. */
6043 for (i = 0; i < n_region; i++)
6044 {
6045 /* We do not handle subloops, i.e. all the blocks must belong to the
6046 same loop. */
6047 if (region[i]->loop_father != loop)
6048 return false;
6049
6050 if (region[i] != entry->dest
6051 && region[i] == loop->header)
6052 return false;
6053 }
6054
6055 /* In case the function is used for loop header copying (which is the primary
6056 use), ensure that EXIT and its copy will be new latch and entry edges. */
6057 if (loop->header == entry->dest)
6058 {
6059 copying_header = true;
6060
6061 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
6062 return false;
6063
6064 for (i = 0; i < n_region; i++)
6065 if (region[i] != exit->src
6066 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
6067 return false;
6068 }
6069
6070 initialize_original_copy_tables ();
6071
6072 if (copying_header)
6073 set_loop_copy (loop, loop_outer (loop));
6074 else
6075 set_loop_copy (loop, loop);
6076
6077 if (!region_copy)
6078 {
6079 region_copy = XNEWVEC (basic_block, n_region);
6080 free_region_copy = true;
6081 }
6082
6083 /* Record blocks outside the region that are dominated by something
6084 inside. */
6085 if (update_dominance)
6086 {
6087 doms.create (0);
6088 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6089 }
6090
6091 if (entry->dest->count)
6092 {
6093 total_count = entry->dest->count;
6094 entry_count = entry->count;
6095 /* Fix up corner cases, to avoid division by zero or creation of negative
6096 frequencies. */
6097 if (entry_count > total_count)
6098 entry_count = total_count;
6099 }
6100 else
6101 {
6102 total_freq = entry->dest->frequency;
6103 entry_freq = EDGE_FREQUENCY (entry);
6104 /* Fix up corner cases, to avoid division by zero or creation of negative
6105 frequencies. */
6106 if (total_freq == 0)
6107 total_freq = 1;
6108 else if (entry_freq > total_freq)
6109 entry_freq = total_freq;
6110 }
6111
6112 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
6113 split_edge_bb_loc (entry), update_dominance);
6114 if (total_count)
6115 {
6116 scale_bbs_frequencies_gcov_type (region, n_region,
6117 total_count - entry_count,
6118 total_count);
6119 scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
6120 total_count);
6121 }
6122 else
6123 {
6124 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
6125 total_freq);
6126 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
6127 }
6128
6129 if (copying_header)
6130 {
6131 loop->header = exit->dest;
6132 loop->latch = exit->src;
6133 }
6134
6135 /* Redirect the entry and add the phi node arguments. */
6136 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
6137 gcc_assert (redirected != NULL);
6138 flush_pending_stmts (entry);
6139
6140 /* Concerning updating of dominators: We must recount dominators
6141 for entry block and its copy. Anything that is outside of the
6142 region, but was dominated by something inside needs recounting as
6143 well. */
6144 if (update_dominance)
6145 {
6146 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
6147 doms.safe_push (get_bb_original (entry->dest));
6148 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6149 doms.release ();
6150 }
6151
6152 /* Add the other PHI node arguments. */
6153 add_phi_args_after_copy (region_copy, n_region, NULL);
6154
6155 if (free_region_copy)
6156 free (region_copy);
6157
6158 free_original_copy_tables ();
6159 return true;
6160 }
6161
6162 /* Checks if BB is part of the region defined by N_REGION BBS. */
6163 static bool
6164 bb_part_of_region_p (basic_block bb, basic_block* bbs, unsigned n_region)
6165 {
6166 unsigned int n;
6167
6168 for (n = 0; n < n_region; n++)
6169 {
6170 if (bb == bbs[n])
6171 return true;
6172 }
6173 return false;
6174 }
6175
6176 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
6177 are stored to REGION_COPY in the same order in that they appear
6178 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
6179 the region, EXIT an exit from it. The condition guarding EXIT
6180 is moved to ENTRY. Returns true if duplication succeeds, false
6181 otherwise.
6182
6183 For example,
6184
6185 some_code;
6186 if (cond)
6187 A;
6188 else
6189 B;
6190
6191 is transformed to
6192
6193 if (cond)
6194 {
6195 some_code;
6196 A;
6197 }
6198 else
6199 {
6200 some_code;
6201 B;
6202 }
6203 */
6204
6205 bool
6206 gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED, edge exit ATTRIBUTE_UNUSED,
6207 basic_block *region ATTRIBUTE_UNUSED, unsigned n_region ATTRIBUTE_UNUSED,
6208 basic_block *region_copy ATTRIBUTE_UNUSED)
6209 {
6210 unsigned i;
6211 bool free_region_copy = false;
6212 struct loop *loop = exit->dest->loop_father;
6213 struct loop *orig_loop = entry->dest->loop_father;
6214 basic_block switch_bb, entry_bb, nentry_bb;
6215 vec<basic_block> doms;
6216 int total_freq = 0, exit_freq = 0;
6217 gcov_type total_count = 0, exit_count = 0;
6218 edge exits[2], nexits[2], e;
6219 gimple_stmt_iterator gsi;
6220 gimple cond_stmt;
6221 edge sorig, snew;
6222 basic_block exit_bb;
6223 gphi_iterator psi;
6224 gphi *phi;
6225 tree def;
6226 struct loop *target, *aloop, *cloop;
6227
6228 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
6229 exits[0] = exit;
6230 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
6231
6232 if (!can_copy_bbs_p (region, n_region))
6233 return false;
6234
6235 initialize_original_copy_tables ();
6236 set_loop_copy (orig_loop, loop);
6237
6238 target= loop;
6239 for (aloop = orig_loop->inner; aloop; aloop = aloop->next)
6240 {
6241 if (bb_part_of_region_p (aloop->header, region, n_region))
6242 {
6243 cloop = duplicate_loop (aloop, target);
6244 duplicate_subloops (aloop, cloop);
6245 }
6246 }
6247
6248 if (!region_copy)
6249 {
6250 region_copy = XNEWVEC (basic_block, n_region);
6251 free_region_copy = true;
6252 }
6253
6254 gcc_assert (!need_ssa_update_p (cfun));
6255
6256 /* Record blocks outside the region that are dominated by something
6257 inside. */
6258 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6259
6260 if (exit->src->count)
6261 {
6262 total_count = exit->src->count;
6263 exit_count = exit->count;
6264 /* Fix up corner cases, to avoid division by zero or creation of negative
6265 frequencies. */
6266 if (exit_count > total_count)
6267 exit_count = total_count;
6268 }
6269 else
6270 {
6271 total_freq = exit->src->frequency;
6272 exit_freq = EDGE_FREQUENCY (exit);
6273 /* Fix up corner cases, to avoid division by zero or creation of negative
6274 frequencies. */
6275 if (total_freq == 0)
6276 total_freq = 1;
6277 if (exit_freq > total_freq)
6278 exit_freq = total_freq;
6279 }
6280
6281 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
6282 split_edge_bb_loc (exit), true);
6283 if (total_count)
6284 {
6285 scale_bbs_frequencies_gcov_type (region, n_region,
6286 total_count - exit_count,
6287 total_count);
6288 scale_bbs_frequencies_gcov_type (region_copy, n_region, exit_count,
6289 total_count);
6290 }
6291 else
6292 {
6293 scale_bbs_frequencies_int (region, n_region, total_freq - exit_freq,
6294 total_freq);
6295 scale_bbs_frequencies_int (region_copy, n_region, exit_freq, total_freq);
6296 }
6297
6298 /* Create the switch block, and put the exit condition to it. */
6299 entry_bb = entry->dest;
6300 nentry_bb = get_bb_copy (entry_bb);
6301 if (!last_stmt (entry->src)
6302 || !stmt_ends_bb_p (last_stmt (entry->src)))
6303 switch_bb = entry->src;
6304 else
6305 switch_bb = split_edge (entry);
6306 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
6307
6308 gsi = gsi_last_bb (switch_bb);
6309 cond_stmt = last_stmt (exit->src);
6310 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
6311 cond_stmt = gimple_copy (cond_stmt);
6312
6313 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
6314
6315 sorig = single_succ_edge (switch_bb);
6316 sorig->flags = exits[1]->flags;
6317 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
6318
6319 /* Register the new edge from SWITCH_BB in loop exit lists. */
6320 rescan_loop_exit (snew, true, false);
6321
6322 /* Add the PHI node arguments. */
6323 add_phi_args_after_copy (region_copy, n_region, snew);
6324
6325 /* Get rid of now superfluous conditions and associated edges (and phi node
6326 arguments). */
6327 exit_bb = exit->dest;
6328
6329 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
6330 PENDING_STMT (e) = NULL;
6331
6332 /* The latch of ORIG_LOOP was copied, and so was the backedge
6333 to the original header. We redirect this backedge to EXIT_BB. */
6334 for (i = 0; i < n_region; i++)
6335 if (get_bb_original (region_copy[i]) == orig_loop->latch)
6336 {
6337 gcc_assert (single_succ_edge (region_copy[i]));
6338 e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
6339 PENDING_STMT (e) = NULL;
6340 for (psi = gsi_start_phis (exit_bb);
6341 !gsi_end_p (psi);
6342 gsi_next (&psi))
6343 {
6344 phi = psi.phi ();
6345 def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
6346 add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
6347 }
6348 }
6349 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
6350 PENDING_STMT (e) = NULL;
6351
6352 /* Anything that is outside of the region, but was dominated by something
6353 inside needs to update dominance info. */
6354 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6355 doms.release ();
6356 /* Update the SSA web. */
6357 update_ssa (TODO_update_ssa);
6358
6359 if (free_region_copy)
6360 free (region_copy);
6361
6362 free_original_copy_tables ();
6363 return true;
6364 }
6365
6366 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
6367 adding blocks when the dominator traversal reaches EXIT. This
6368 function silently assumes that ENTRY strictly dominates EXIT. */
6369
6370 void
6371 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
6372 vec<basic_block> *bbs_p)
6373 {
6374 basic_block son;
6375
6376 for (son = first_dom_son (CDI_DOMINATORS, entry);
6377 son;
6378 son = next_dom_son (CDI_DOMINATORS, son))
6379 {
6380 bbs_p->safe_push (son);
6381 if (son != exit)
6382 gather_blocks_in_sese_region (son, exit, bbs_p);
6383 }
6384 }
6385
6386 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
6387 The duplicates are recorded in VARS_MAP. */
6388
6389 static void
6390 replace_by_duplicate_decl (tree *tp, hash_map<tree, tree> *vars_map,
6391 tree to_context)
6392 {
6393 tree t = *tp, new_t;
6394 struct function *f = DECL_STRUCT_FUNCTION (to_context);
6395
6396 if (DECL_CONTEXT (t) == to_context)
6397 return;
6398
6399 bool existed;
6400 tree &loc = vars_map->get_or_insert (t, &existed);
6401
6402 if (!existed)
6403 {
6404 if (SSA_VAR_P (t))
6405 {
6406 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
6407 add_local_decl (f, new_t);
6408 }
6409 else
6410 {
6411 gcc_assert (TREE_CODE (t) == CONST_DECL);
6412 new_t = copy_node (t);
6413 }
6414 DECL_CONTEXT (new_t) = to_context;
6415
6416 loc = new_t;
6417 }
6418 else
6419 new_t = loc;
6420
6421 *tp = new_t;
6422 }
6423
6424
6425 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6426 VARS_MAP maps old ssa names and var_decls to the new ones. */
6427
6428 static tree
6429 replace_ssa_name (tree name, hash_map<tree, tree> *vars_map,
6430 tree to_context)
6431 {
6432 tree new_name;
6433
6434 gcc_assert (!virtual_operand_p (name));
6435
6436 tree *loc = vars_map->get (name);
6437
6438 if (!loc)
6439 {
6440 tree decl = SSA_NAME_VAR (name);
6441 if (decl)
6442 {
6443 replace_by_duplicate_decl (&decl, vars_map, to_context);
6444 new_name = make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6445 decl, SSA_NAME_DEF_STMT (name));
6446 if (SSA_NAME_IS_DEFAULT_DEF (name))
6447 set_ssa_default_def (DECL_STRUCT_FUNCTION (to_context),
6448 decl, new_name);
6449 }
6450 else
6451 new_name = copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6452 name, SSA_NAME_DEF_STMT (name));
6453
6454 vars_map->put (name, new_name);
6455 }
6456 else
6457 new_name = *loc;
6458
6459 return new_name;
6460 }
6461
6462 struct move_stmt_d
6463 {
6464 tree orig_block;
6465 tree new_block;
6466 tree from_context;
6467 tree to_context;
6468 hash_map<tree, tree> *vars_map;
6469 htab_t new_label_map;
6470 hash_map<void *, void *> *eh_map;
6471 bool remap_decls_p;
6472 };
6473
6474 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
6475 contained in *TP if it has been ORIG_BLOCK previously and change the
6476 DECL_CONTEXT of every local variable referenced in *TP. */
6477
6478 static tree
6479 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
6480 {
6481 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
6482 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6483 tree t = *tp;
6484
6485 if (EXPR_P (t))
6486 {
6487 tree block = TREE_BLOCK (t);
6488 if (block == p->orig_block
6489 || (p->orig_block == NULL_TREE
6490 && block != NULL_TREE))
6491 TREE_SET_BLOCK (t, p->new_block);
6492 #ifdef ENABLE_CHECKING
6493 else if (block != NULL_TREE)
6494 {
6495 while (block && TREE_CODE (block) == BLOCK && block != p->orig_block)
6496 block = BLOCK_SUPERCONTEXT (block);
6497 gcc_assert (block == p->orig_block);
6498 }
6499 #endif
6500 }
6501 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
6502 {
6503 if (TREE_CODE (t) == SSA_NAME)
6504 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
6505 else if (TREE_CODE (t) == LABEL_DECL)
6506 {
6507 if (p->new_label_map)
6508 {
6509 struct tree_map in, *out;
6510 in.base.from = t;
6511 out = (struct tree_map *)
6512 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
6513 if (out)
6514 *tp = t = out->to;
6515 }
6516
6517 DECL_CONTEXT (t) = p->to_context;
6518 }
6519 else if (p->remap_decls_p)
6520 {
6521 /* Replace T with its duplicate. T should no longer appear in the
6522 parent function, so this looks wasteful; however, it may appear
6523 in referenced_vars, and more importantly, as virtual operands of
6524 statements, and in alias lists of other variables. It would be
6525 quite difficult to expunge it from all those places. ??? It might
6526 suffice to do this for addressable variables. */
6527 if ((TREE_CODE (t) == VAR_DECL
6528 && !is_global_var (t))
6529 || TREE_CODE (t) == CONST_DECL)
6530 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
6531 }
6532 *walk_subtrees = 0;
6533 }
6534 else if (TYPE_P (t))
6535 *walk_subtrees = 0;
6536
6537 return NULL_TREE;
6538 }
6539
6540 /* Helper for move_stmt_r. Given an EH region number for the source
6541 function, map that to the duplicate EH regio number in the dest. */
6542
6543 static int
6544 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
6545 {
6546 eh_region old_r, new_r;
6547
6548 old_r = get_eh_region_from_number (old_nr);
6549 new_r = static_cast<eh_region> (*p->eh_map->get (old_r));
6550
6551 return new_r->index;
6552 }
6553
6554 /* Similar, but operate on INTEGER_CSTs. */
6555
6556 static tree
6557 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
6558 {
6559 int old_nr, new_nr;
6560
6561 old_nr = tree_to_shwi (old_t_nr);
6562 new_nr = move_stmt_eh_region_nr (old_nr, p);
6563
6564 return build_int_cst (integer_type_node, new_nr);
6565 }
6566
6567 /* Like move_stmt_op, but for gimple statements.
6568
6569 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
6570 contained in the current statement in *GSI_P and change the
6571 DECL_CONTEXT of every local variable referenced in the current
6572 statement. */
6573
6574 static tree
6575 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
6576 struct walk_stmt_info *wi)
6577 {
6578 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6579 gimple stmt = gsi_stmt (*gsi_p);
6580 tree block = gimple_block (stmt);
6581
6582 if (block == p->orig_block
6583 || (p->orig_block == NULL_TREE
6584 && block != NULL_TREE))
6585 gimple_set_block (stmt, p->new_block);
6586
6587 switch (gimple_code (stmt))
6588 {
6589 case GIMPLE_CALL:
6590 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
6591 {
6592 tree r, fndecl = gimple_call_fndecl (stmt);
6593 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
6594 switch (DECL_FUNCTION_CODE (fndecl))
6595 {
6596 case BUILT_IN_EH_COPY_VALUES:
6597 r = gimple_call_arg (stmt, 1);
6598 r = move_stmt_eh_region_tree_nr (r, p);
6599 gimple_call_set_arg (stmt, 1, r);
6600 /* FALLTHRU */
6601
6602 case BUILT_IN_EH_POINTER:
6603 case BUILT_IN_EH_FILTER:
6604 r = gimple_call_arg (stmt, 0);
6605 r = move_stmt_eh_region_tree_nr (r, p);
6606 gimple_call_set_arg (stmt, 0, r);
6607 break;
6608
6609 default:
6610 break;
6611 }
6612 }
6613 break;
6614
6615 case GIMPLE_RESX:
6616 {
6617 gresx *resx_stmt = as_a <gresx *> (stmt);
6618 int r = gimple_resx_region (resx_stmt);
6619 r = move_stmt_eh_region_nr (r, p);
6620 gimple_resx_set_region (resx_stmt, r);
6621 }
6622 break;
6623
6624 case GIMPLE_EH_DISPATCH:
6625 {
6626 geh_dispatch *eh_dispatch_stmt = as_a <geh_dispatch *> (stmt);
6627 int r = gimple_eh_dispatch_region (eh_dispatch_stmt);
6628 r = move_stmt_eh_region_nr (r, p);
6629 gimple_eh_dispatch_set_region (eh_dispatch_stmt, r);
6630 }
6631 break;
6632
6633 case GIMPLE_OMP_RETURN:
6634 case GIMPLE_OMP_CONTINUE:
6635 break;
6636 default:
6637 if (is_gimple_omp (stmt))
6638 {
6639 /* Do not remap variables inside OMP directives. Variables
6640 referenced in clauses and directive header belong to the
6641 parent function and should not be moved into the child
6642 function. */
6643 bool save_remap_decls_p = p->remap_decls_p;
6644 p->remap_decls_p = false;
6645 *handled_ops_p = true;
6646
6647 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), move_stmt_r,
6648 move_stmt_op, wi);
6649
6650 p->remap_decls_p = save_remap_decls_p;
6651 }
6652 break;
6653 }
6654
6655 return NULL_TREE;
6656 }
6657
6658 /* Move basic block BB from function CFUN to function DEST_FN. The
6659 block is moved out of the original linked list and placed after
6660 block AFTER in the new list. Also, the block is removed from the
6661 original array of blocks and placed in DEST_FN's array of blocks.
6662 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
6663 updated to reflect the moved edges.
6664
6665 The local variables are remapped to new instances, VARS_MAP is used
6666 to record the mapping. */
6667
6668 static void
6669 move_block_to_fn (struct function *dest_cfun, basic_block bb,
6670 basic_block after, bool update_edge_count_p,
6671 struct move_stmt_d *d)
6672 {
6673 struct control_flow_graph *cfg;
6674 edge_iterator ei;
6675 edge e;
6676 gimple_stmt_iterator si;
6677 unsigned old_len, new_len;
6678
6679 /* Remove BB from dominance structures. */
6680 delete_from_dominance_info (CDI_DOMINATORS, bb);
6681
6682 /* Move BB from its current loop to the copy in the new function. */
6683 if (current_loops)
6684 {
6685 struct loop *new_loop = (struct loop *)bb->loop_father->aux;
6686 if (new_loop)
6687 bb->loop_father = new_loop;
6688 }
6689
6690 /* Link BB to the new linked list. */
6691 move_block_after (bb, after);
6692
6693 /* Update the edge count in the corresponding flowgraphs. */
6694 if (update_edge_count_p)
6695 FOR_EACH_EDGE (e, ei, bb->succs)
6696 {
6697 cfun->cfg->x_n_edges--;
6698 dest_cfun->cfg->x_n_edges++;
6699 }
6700
6701 /* Remove BB from the original basic block array. */
6702 (*cfun->cfg->x_basic_block_info)[bb->index] = NULL;
6703 cfun->cfg->x_n_basic_blocks--;
6704
6705 /* Grow DEST_CFUN's basic block array if needed. */
6706 cfg = dest_cfun->cfg;
6707 cfg->x_n_basic_blocks++;
6708 if (bb->index >= cfg->x_last_basic_block)
6709 cfg->x_last_basic_block = bb->index + 1;
6710
6711 old_len = vec_safe_length (cfg->x_basic_block_info);
6712 if ((unsigned) cfg->x_last_basic_block >= old_len)
6713 {
6714 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
6715 vec_safe_grow_cleared (cfg->x_basic_block_info, new_len);
6716 }
6717
6718 (*cfg->x_basic_block_info)[bb->index] = bb;
6719
6720 /* Remap the variables in phi nodes. */
6721 for (gphi_iterator psi = gsi_start_phis (bb);
6722 !gsi_end_p (psi); )
6723 {
6724 gphi *phi = psi.phi ();
6725 use_operand_p use;
6726 tree op = PHI_RESULT (phi);
6727 ssa_op_iter oi;
6728 unsigned i;
6729
6730 if (virtual_operand_p (op))
6731 {
6732 /* Remove the phi nodes for virtual operands (alias analysis will be
6733 run for the new function, anyway). */
6734 remove_phi_node (&psi, true);
6735 continue;
6736 }
6737
6738 SET_PHI_RESULT (phi,
6739 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6740 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
6741 {
6742 op = USE_FROM_PTR (use);
6743 if (TREE_CODE (op) == SSA_NAME)
6744 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6745 }
6746
6747 for (i = 0; i < EDGE_COUNT (bb->preds); i++)
6748 {
6749 location_t locus = gimple_phi_arg_location (phi, i);
6750 tree block = LOCATION_BLOCK (locus);
6751
6752 if (locus == UNKNOWN_LOCATION)
6753 continue;
6754 if (d->orig_block == NULL_TREE || block == d->orig_block)
6755 {
6756 if (d->new_block == NULL_TREE)
6757 locus = LOCATION_LOCUS (locus);
6758 else
6759 locus = COMBINE_LOCATION_DATA (line_table, locus, d->new_block);
6760 gimple_phi_arg_set_location (phi, i, locus);
6761 }
6762 }
6763
6764 gsi_next (&psi);
6765 }
6766
6767 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6768 {
6769 gimple stmt = gsi_stmt (si);
6770 struct walk_stmt_info wi;
6771
6772 memset (&wi, 0, sizeof (wi));
6773 wi.info = d;
6774 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
6775
6776 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
6777 {
6778 tree label = gimple_label_label (label_stmt);
6779 int uid = LABEL_DECL_UID (label);
6780
6781 gcc_assert (uid > -1);
6782
6783 old_len = vec_safe_length (cfg->x_label_to_block_map);
6784 if (old_len <= (unsigned) uid)
6785 {
6786 new_len = 3 * uid / 2 + 1;
6787 vec_safe_grow_cleared (cfg->x_label_to_block_map, new_len);
6788 }
6789
6790 (*cfg->x_label_to_block_map)[uid] = bb;
6791 (*cfun->cfg->x_label_to_block_map)[uid] = NULL;
6792
6793 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
6794
6795 if (uid >= dest_cfun->cfg->last_label_uid)
6796 dest_cfun->cfg->last_label_uid = uid + 1;
6797 }
6798
6799 maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
6800 remove_stmt_from_eh_lp_fn (cfun, stmt);
6801
6802 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
6803 gimple_remove_stmt_histograms (cfun, stmt);
6804
6805 /* We cannot leave any operands allocated from the operand caches of
6806 the current function. */
6807 free_stmt_operands (cfun, stmt);
6808 push_cfun (dest_cfun);
6809 update_stmt (stmt);
6810 pop_cfun ();
6811 }
6812
6813 FOR_EACH_EDGE (e, ei, bb->succs)
6814 if (e->goto_locus != UNKNOWN_LOCATION)
6815 {
6816 tree block = LOCATION_BLOCK (e->goto_locus);
6817 if (d->orig_block == NULL_TREE
6818 || block == d->orig_block)
6819 e->goto_locus = d->new_block ?
6820 COMBINE_LOCATION_DATA (line_table, e->goto_locus, d->new_block) :
6821 LOCATION_LOCUS (e->goto_locus);
6822 }
6823 }
6824
6825 /* Examine the statements in BB (which is in SRC_CFUN); find and return
6826 the outermost EH region. Use REGION as the incoming base EH region. */
6827
6828 static eh_region
6829 find_outermost_region_in_block (struct function *src_cfun,
6830 basic_block bb, eh_region region)
6831 {
6832 gimple_stmt_iterator si;
6833
6834 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6835 {
6836 gimple stmt = gsi_stmt (si);
6837 eh_region stmt_region;
6838 int lp_nr;
6839
6840 lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
6841 stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
6842 if (stmt_region)
6843 {
6844 if (region == NULL)
6845 region = stmt_region;
6846 else if (stmt_region != region)
6847 {
6848 region = eh_region_outermost (src_cfun, stmt_region, region);
6849 gcc_assert (region != NULL);
6850 }
6851 }
6852 }
6853
6854 return region;
6855 }
6856
6857 static tree
6858 new_label_mapper (tree decl, void *data)
6859 {
6860 htab_t hash = (htab_t) data;
6861 struct tree_map *m;
6862 void **slot;
6863
6864 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
6865
6866 m = XNEW (struct tree_map);
6867 m->hash = DECL_UID (decl);
6868 m->base.from = decl;
6869 m->to = create_artificial_label (UNKNOWN_LOCATION);
6870 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
6871 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
6872 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
6873
6874 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
6875 gcc_assert (*slot == NULL);
6876
6877 *slot = m;
6878
6879 return m->to;
6880 }
6881
6882 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
6883 subblocks. */
6884
6885 static void
6886 replace_block_vars_by_duplicates (tree block, hash_map<tree, tree> *vars_map,
6887 tree to_context)
6888 {
6889 tree *tp, t;
6890
6891 for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
6892 {
6893 t = *tp;
6894 if (TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != CONST_DECL)
6895 continue;
6896 replace_by_duplicate_decl (&t, vars_map, to_context);
6897 if (t != *tp)
6898 {
6899 if (TREE_CODE (*tp) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (*tp))
6900 {
6901 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (*tp));
6902 DECL_HAS_VALUE_EXPR_P (t) = 1;
6903 }
6904 DECL_CHAIN (t) = DECL_CHAIN (*tp);
6905 *tp = t;
6906 }
6907 }
6908
6909 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
6910 replace_block_vars_by_duplicates (block, vars_map, to_context);
6911 }
6912
6913 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
6914 from FN1 to FN2. */
6915
6916 static void
6917 fixup_loop_arrays_after_move (struct function *fn1, struct function *fn2,
6918 struct loop *loop)
6919 {
6920 /* Discard it from the old loop array. */
6921 (*get_loops (fn1))[loop->num] = NULL;
6922
6923 /* Place it in the new loop array, assigning it a new number. */
6924 loop->num = number_of_loops (fn2);
6925 vec_safe_push (loops_for_fn (fn2)->larray, loop);
6926
6927 /* Recurse to children. */
6928 for (loop = loop->inner; loop; loop = loop->next)
6929 fixup_loop_arrays_after_move (fn1, fn2, loop);
6930 }
6931
6932 /* Verify that the blocks in BBS_P are a single-entry, single-exit region
6933 delimited by ENTRY_BB and EXIT_BB, possibly containing noreturn blocks. */
6934
6935 DEBUG_FUNCTION void
6936 verify_sese (basic_block entry, basic_block exit, vec<basic_block> *bbs_p)
6937 {
6938 basic_block bb;
6939 edge_iterator ei;
6940 edge e;
6941 bitmap bbs = BITMAP_ALLOC (NULL);
6942 int i;
6943
6944 gcc_assert (entry != NULL);
6945 gcc_assert (entry != exit);
6946 gcc_assert (bbs_p != NULL);
6947
6948 gcc_assert (bbs_p->length () > 0);
6949
6950 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
6951 bitmap_set_bit (bbs, bb->index);
6952
6953 gcc_assert (bitmap_bit_p (bbs, entry->index));
6954 gcc_assert (exit == NULL || bitmap_bit_p (bbs, exit->index));
6955
6956 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
6957 {
6958 if (bb == entry)
6959 {
6960 gcc_assert (single_pred_p (entry));
6961 gcc_assert (!bitmap_bit_p (bbs, single_pred (entry)->index));
6962 }
6963 else
6964 for (ei = ei_start (bb->preds); !ei_end_p (ei); ei_next (&ei))
6965 {
6966 e = ei_edge (ei);
6967 gcc_assert (bitmap_bit_p (bbs, e->src->index));
6968 }
6969
6970 if (bb == exit)
6971 {
6972 gcc_assert (single_succ_p (exit));
6973 gcc_assert (!bitmap_bit_p (bbs, single_succ (exit)->index));
6974 }
6975 else
6976 for (ei = ei_start (bb->succs); !ei_end_p (ei); ei_next (&ei))
6977 {
6978 e = ei_edge (ei);
6979 gcc_assert (bitmap_bit_p (bbs, e->dest->index));
6980 }
6981 }
6982
6983 BITMAP_FREE (bbs);
6984 }
6985
6986
6987 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
6988 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
6989 single basic block in the original CFG and the new basic block is
6990 returned. DEST_CFUN must not have a CFG yet.
6991
6992 Note that the region need not be a pure SESE region. Blocks inside
6993 the region may contain calls to abort/exit. The only restriction
6994 is that ENTRY_BB should be the only entry point and it must
6995 dominate EXIT_BB.
6996
6997 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
6998 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
6999 to the new function.
7000
7001 All local variables referenced in the region are assumed to be in
7002 the corresponding BLOCK_VARS and unexpanded variable lists
7003 associated with DEST_CFUN. */
7004
7005 basic_block
7006 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
7007 basic_block exit_bb, tree orig_block)
7008 {
7009 vec<basic_block> bbs, dom_bbs;
7010 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
7011 basic_block after, bb, *entry_pred, *exit_succ, abb;
7012 struct function *saved_cfun = cfun;
7013 int *entry_flag, *exit_flag;
7014 unsigned *entry_prob, *exit_prob;
7015 unsigned i, num_entry_edges, num_exit_edges, num_nodes;
7016 edge e;
7017 edge_iterator ei;
7018 htab_t new_label_map;
7019 hash_map<void *, void *> *eh_map;
7020 struct loop *loop = entry_bb->loop_father;
7021 struct loop *loop0 = get_loop (saved_cfun, 0);
7022 struct move_stmt_d d;
7023
7024 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
7025 region. */
7026 gcc_assert (entry_bb != exit_bb
7027 && (!exit_bb
7028 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
7029
7030 /* Collect all the blocks in the region. Manually add ENTRY_BB
7031 because it won't be added by dfs_enumerate_from. */
7032 bbs.create (0);
7033 bbs.safe_push (entry_bb);
7034 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
7035 #ifdef ENABLE_CHECKING
7036 verify_sese (entry_bb, exit_bb, &bbs);
7037 #endif
7038
7039 /* The blocks that used to be dominated by something in BBS will now be
7040 dominated by the new block. */
7041 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
7042 bbs.address (),
7043 bbs.length ());
7044
7045 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
7046 the predecessor edges to ENTRY_BB and the successor edges to
7047 EXIT_BB so that we can re-attach them to the new basic block that
7048 will replace the region. */
7049 num_entry_edges = EDGE_COUNT (entry_bb->preds);
7050 entry_pred = XNEWVEC (basic_block, num_entry_edges);
7051 entry_flag = XNEWVEC (int, num_entry_edges);
7052 entry_prob = XNEWVEC (unsigned, num_entry_edges);
7053 i = 0;
7054 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
7055 {
7056 entry_prob[i] = e->probability;
7057 entry_flag[i] = e->flags;
7058 entry_pred[i++] = e->src;
7059 remove_edge (e);
7060 }
7061
7062 if (exit_bb)
7063 {
7064 num_exit_edges = EDGE_COUNT (exit_bb->succs);
7065 exit_succ = XNEWVEC (basic_block, num_exit_edges);
7066 exit_flag = XNEWVEC (int, num_exit_edges);
7067 exit_prob = XNEWVEC (unsigned, num_exit_edges);
7068 i = 0;
7069 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
7070 {
7071 exit_prob[i] = e->probability;
7072 exit_flag[i] = e->flags;
7073 exit_succ[i++] = e->dest;
7074 remove_edge (e);
7075 }
7076 }
7077 else
7078 {
7079 num_exit_edges = 0;
7080 exit_succ = NULL;
7081 exit_flag = NULL;
7082 exit_prob = NULL;
7083 }
7084
7085 /* Switch context to the child function to initialize DEST_FN's CFG. */
7086 gcc_assert (dest_cfun->cfg == NULL);
7087 push_cfun (dest_cfun);
7088
7089 init_empty_tree_cfg ();
7090
7091 /* Initialize EH information for the new function. */
7092 eh_map = NULL;
7093 new_label_map = NULL;
7094 if (saved_cfun->eh)
7095 {
7096 eh_region region = NULL;
7097
7098 FOR_EACH_VEC_ELT (bbs, i, bb)
7099 region = find_outermost_region_in_block (saved_cfun, bb, region);
7100
7101 init_eh_for_function ();
7102 if (region != NULL)
7103 {
7104 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
7105 eh_map = duplicate_eh_regions (saved_cfun, region, 0,
7106 new_label_mapper, new_label_map);
7107 }
7108 }
7109
7110 /* Initialize an empty loop tree. */
7111 struct loops *loops = ggc_cleared_alloc<struct loops> ();
7112 init_loops_structure (dest_cfun, loops, 1);
7113 loops->state = LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
7114 set_loops_for_fn (dest_cfun, loops);
7115
7116 /* Move the outlined loop tree part. */
7117 num_nodes = bbs.length ();
7118 FOR_EACH_VEC_ELT (bbs, i, bb)
7119 {
7120 if (bb->loop_father->header == bb)
7121 {
7122 struct loop *this_loop = bb->loop_father;
7123 struct loop *outer = loop_outer (this_loop);
7124 if (outer == loop
7125 /* If the SESE region contains some bbs ending with
7126 a noreturn call, those are considered to belong
7127 to the outermost loop in saved_cfun, rather than
7128 the entry_bb's loop_father. */
7129 || outer == loop0)
7130 {
7131 if (outer != loop)
7132 num_nodes -= this_loop->num_nodes;
7133 flow_loop_tree_node_remove (bb->loop_father);
7134 flow_loop_tree_node_add (get_loop (dest_cfun, 0), this_loop);
7135 fixup_loop_arrays_after_move (saved_cfun, cfun, this_loop);
7136 }
7137 }
7138 else if (bb->loop_father == loop0 && loop0 != loop)
7139 num_nodes--;
7140
7141 /* Remove loop exits from the outlined region. */
7142 if (loops_for_fn (saved_cfun)->exits)
7143 FOR_EACH_EDGE (e, ei, bb->succs)
7144 {
7145 struct loops *l = loops_for_fn (saved_cfun);
7146 loop_exit **slot
7147 = l->exits->find_slot_with_hash (e, htab_hash_pointer (e),
7148 NO_INSERT);
7149 if (slot)
7150 l->exits->clear_slot (slot);
7151 }
7152 }
7153
7154
7155 /* Adjust the number of blocks in the tree root of the outlined part. */
7156 get_loop (dest_cfun, 0)->num_nodes = bbs.length () + 2;
7157
7158 /* Setup a mapping to be used by move_block_to_fn. */
7159 loop->aux = current_loops->tree_root;
7160 loop0->aux = current_loops->tree_root;
7161
7162 pop_cfun ();
7163
7164 /* Move blocks from BBS into DEST_CFUN. */
7165 gcc_assert (bbs.length () >= 2);
7166 after = dest_cfun->cfg->x_entry_block_ptr;
7167 hash_map<tree, tree> vars_map;
7168
7169 memset (&d, 0, sizeof (d));
7170 d.orig_block = orig_block;
7171 d.new_block = DECL_INITIAL (dest_cfun->decl);
7172 d.from_context = cfun->decl;
7173 d.to_context = dest_cfun->decl;
7174 d.vars_map = &vars_map;
7175 d.new_label_map = new_label_map;
7176 d.eh_map = eh_map;
7177 d.remap_decls_p = true;
7178
7179 FOR_EACH_VEC_ELT (bbs, i, bb)
7180 {
7181 /* No need to update edge counts on the last block. It has
7182 already been updated earlier when we detached the region from
7183 the original CFG. */
7184 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
7185 after = bb;
7186 }
7187
7188 loop->aux = NULL;
7189 loop0->aux = NULL;
7190 /* Loop sizes are no longer correct, fix them up. */
7191 loop->num_nodes -= num_nodes;
7192 for (struct loop *outer = loop_outer (loop);
7193 outer; outer = loop_outer (outer))
7194 outer->num_nodes -= num_nodes;
7195 loop0->num_nodes -= bbs.length () - num_nodes;
7196
7197 if (saved_cfun->has_simduid_loops || saved_cfun->has_force_vectorize_loops)
7198 {
7199 struct loop *aloop;
7200 for (i = 0; vec_safe_iterate (loops->larray, i, &aloop); i++)
7201 if (aloop != NULL)
7202 {
7203 if (aloop->simduid)
7204 {
7205 replace_by_duplicate_decl (&aloop->simduid, d.vars_map,
7206 d.to_context);
7207 dest_cfun->has_simduid_loops = true;
7208 }
7209 if (aloop->force_vectorize)
7210 dest_cfun->has_force_vectorize_loops = true;
7211 }
7212 }
7213
7214 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
7215 if (orig_block)
7216 {
7217 tree block;
7218 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7219 == NULL_TREE);
7220 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7221 = BLOCK_SUBBLOCKS (orig_block);
7222 for (block = BLOCK_SUBBLOCKS (orig_block);
7223 block; block = BLOCK_CHAIN (block))
7224 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
7225 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
7226 }
7227
7228 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
7229 &vars_map, dest_cfun->decl);
7230
7231 if (new_label_map)
7232 htab_delete (new_label_map);
7233 if (eh_map)
7234 delete eh_map;
7235
7236 /* Rewire the entry and exit blocks. The successor to the entry
7237 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
7238 the child function. Similarly, the predecessor of DEST_FN's
7239 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
7240 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
7241 various CFG manipulation function get to the right CFG.
7242
7243 FIXME, this is silly. The CFG ought to become a parameter to
7244 these helpers. */
7245 push_cfun (dest_cfun);
7246 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), entry_bb, EDGE_FALLTHRU);
7247 if (exit_bb)
7248 make_edge (exit_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
7249 pop_cfun ();
7250
7251 /* Back in the original function, the SESE region has disappeared,
7252 create a new basic block in its place. */
7253 bb = create_empty_bb (entry_pred[0]);
7254 if (current_loops)
7255 add_bb_to_loop (bb, loop);
7256 for (i = 0; i < num_entry_edges; i++)
7257 {
7258 e = make_edge (entry_pred[i], bb, entry_flag[i]);
7259 e->probability = entry_prob[i];
7260 }
7261
7262 for (i = 0; i < num_exit_edges; i++)
7263 {
7264 e = make_edge (bb, exit_succ[i], exit_flag[i]);
7265 e->probability = exit_prob[i];
7266 }
7267
7268 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
7269 FOR_EACH_VEC_ELT (dom_bbs, i, abb)
7270 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
7271 dom_bbs.release ();
7272
7273 if (exit_bb)
7274 {
7275 free (exit_prob);
7276 free (exit_flag);
7277 free (exit_succ);
7278 }
7279 free (entry_prob);
7280 free (entry_flag);
7281 free (entry_pred);
7282 bbs.release ();
7283
7284 return bb;
7285 }
7286
7287
7288 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
7289 */
7290
7291 void
7292 dump_function_to_file (tree fndecl, FILE *file, int flags)
7293 {
7294 tree arg, var, old_current_fndecl = current_function_decl;
7295 struct function *dsf;
7296 bool ignore_topmost_bind = false, any_var = false;
7297 basic_block bb;
7298 tree chain;
7299 bool tmclone = (TREE_CODE (fndecl) == FUNCTION_DECL
7300 && decl_is_tm_clone (fndecl));
7301 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
7302
7303 current_function_decl = fndecl;
7304 fprintf (file, "%s %s(", function_name (fun), tmclone ? "[tm-clone] " : "");
7305
7306 arg = DECL_ARGUMENTS (fndecl);
7307 while (arg)
7308 {
7309 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
7310 fprintf (file, " ");
7311 print_generic_expr (file, arg, dump_flags);
7312 if (flags & TDF_VERBOSE)
7313 print_node (file, "", arg, 4);
7314 if (DECL_CHAIN (arg))
7315 fprintf (file, ", ");
7316 arg = DECL_CHAIN (arg);
7317 }
7318 fprintf (file, ")\n");
7319
7320 if (flags & TDF_VERBOSE)
7321 print_node (file, "", fndecl, 2);
7322
7323 dsf = DECL_STRUCT_FUNCTION (fndecl);
7324 if (dsf && (flags & TDF_EH))
7325 dump_eh_tree (file, dsf);
7326
7327 if (flags & TDF_RAW && !gimple_has_body_p (fndecl))
7328 {
7329 dump_node (fndecl, TDF_SLIM | flags, file);
7330 current_function_decl = old_current_fndecl;
7331 return;
7332 }
7333
7334 /* When GIMPLE is lowered, the variables are no longer available in
7335 BIND_EXPRs, so display them separately. */
7336 if (fun && fun->decl == fndecl && (fun->curr_properties & PROP_gimple_lcf))
7337 {
7338 unsigned ix;
7339 ignore_topmost_bind = true;
7340
7341 fprintf (file, "{\n");
7342 if (!vec_safe_is_empty (fun->local_decls))
7343 FOR_EACH_LOCAL_DECL (fun, ix, var)
7344 {
7345 print_generic_decl (file, var, flags);
7346 if (flags & TDF_VERBOSE)
7347 print_node (file, "", var, 4);
7348 fprintf (file, "\n");
7349
7350 any_var = true;
7351 }
7352 if (gimple_in_ssa_p (cfun))
7353 for (ix = 1; ix < num_ssa_names; ++ix)
7354 {
7355 tree name = ssa_name (ix);
7356 if (name && !SSA_NAME_VAR (name))
7357 {
7358 fprintf (file, " ");
7359 print_generic_expr (file, TREE_TYPE (name), flags);
7360 fprintf (file, " ");
7361 print_generic_expr (file, name, flags);
7362 fprintf (file, ";\n");
7363
7364 any_var = true;
7365 }
7366 }
7367 }
7368
7369 if (fun && fun->decl == fndecl
7370 && fun->cfg
7371 && basic_block_info_for_fn (fun))
7372 {
7373 /* If the CFG has been built, emit a CFG-based dump. */
7374 if (!ignore_topmost_bind)
7375 fprintf (file, "{\n");
7376
7377 if (any_var && n_basic_blocks_for_fn (fun))
7378 fprintf (file, "\n");
7379
7380 FOR_EACH_BB_FN (bb, fun)
7381 dump_bb (file, bb, 2, flags | TDF_COMMENT);
7382
7383 fprintf (file, "}\n");
7384 }
7385 else if (DECL_SAVED_TREE (fndecl) == NULL)
7386 {
7387 /* The function is now in GIMPLE form but the CFG has not been
7388 built yet. Emit the single sequence of GIMPLE statements
7389 that make up its body. */
7390 gimple_seq body = gimple_body (fndecl);
7391
7392 if (gimple_seq_first_stmt (body)
7393 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
7394 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
7395 print_gimple_seq (file, body, 0, flags);
7396 else
7397 {
7398 if (!ignore_topmost_bind)
7399 fprintf (file, "{\n");
7400
7401 if (any_var)
7402 fprintf (file, "\n");
7403
7404 print_gimple_seq (file, body, 2, flags);
7405 fprintf (file, "}\n");
7406 }
7407 }
7408 else
7409 {
7410 int indent;
7411
7412 /* Make a tree based dump. */
7413 chain = DECL_SAVED_TREE (fndecl);
7414 if (chain && TREE_CODE (chain) == BIND_EXPR)
7415 {
7416 if (ignore_topmost_bind)
7417 {
7418 chain = BIND_EXPR_BODY (chain);
7419 indent = 2;
7420 }
7421 else
7422 indent = 0;
7423 }
7424 else
7425 {
7426 if (!ignore_topmost_bind)
7427 {
7428 fprintf (file, "{\n");
7429 /* No topmost bind, pretend it's ignored for later. */
7430 ignore_topmost_bind = true;
7431 }
7432 indent = 2;
7433 }
7434
7435 if (any_var)
7436 fprintf (file, "\n");
7437
7438 print_generic_stmt_indented (file, chain, flags, indent);
7439 if (ignore_topmost_bind)
7440 fprintf (file, "}\n");
7441 }
7442
7443 if (flags & TDF_ENUMERATE_LOCALS)
7444 dump_enumerated_decls (file, flags);
7445 fprintf (file, "\n\n");
7446
7447 current_function_decl = old_current_fndecl;
7448 }
7449
7450 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
7451
7452 DEBUG_FUNCTION void
7453 debug_function (tree fn, int flags)
7454 {
7455 dump_function_to_file (fn, stderr, flags);
7456 }
7457
7458
7459 /* Print on FILE the indexes for the predecessors of basic_block BB. */
7460
7461 static void
7462 print_pred_bbs (FILE *file, basic_block bb)
7463 {
7464 edge e;
7465 edge_iterator ei;
7466
7467 FOR_EACH_EDGE (e, ei, bb->preds)
7468 fprintf (file, "bb_%d ", e->src->index);
7469 }
7470
7471
7472 /* Print on FILE the indexes for the successors of basic_block BB. */
7473
7474 static void
7475 print_succ_bbs (FILE *file, basic_block bb)
7476 {
7477 edge e;
7478 edge_iterator ei;
7479
7480 FOR_EACH_EDGE (e, ei, bb->succs)
7481 fprintf (file, "bb_%d ", e->dest->index);
7482 }
7483
7484 /* Print to FILE the basic block BB following the VERBOSITY level. */
7485
7486 void
7487 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
7488 {
7489 char *s_indent = (char *) alloca ((size_t) indent + 1);
7490 memset ((void *) s_indent, ' ', (size_t) indent);
7491 s_indent[indent] = '\0';
7492
7493 /* Print basic_block's header. */
7494 if (verbosity >= 2)
7495 {
7496 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
7497 print_pred_bbs (file, bb);
7498 fprintf (file, "}, succs = {");
7499 print_succ_bbs (file, bb);
7500 fprintf (file, "})\n");
7501 }
7502
7503 /* Print basic_block's body. */
7504 if (verbosity >= 3)
7505 {
7506 fprintf (file, "%s {\n", s_indent);
7507 dump_bb (file, bb, indent + 4, TDF_VOPS|TDF_MEMSYMS);
7508 fprintf (file, "%s }\n", s_indent);
7509 }
7510 }
7511
7512 static void print_loop_and_siblings (FILE *, struct loop *, int, int);
7513
7514 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
7515 VERBOSITY level this outputs the contents of the loop, or just its
7516 structure. */
7517
7518 static void
7519 print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
7520 {
7521 char *s_indent;
7522 basic_block bb;
7523
7524 if (loop == NULL)
7525 return;
7526
7527 s_indent = (char *) alloca ((size_t) indent + 1);
7528 memset ((void *) s_indent, ' ', (size_t) indent);
7529 s_indent[indent] = '\0';
7530
7531 /* Print loop's header. */
7532 fprintf (file, "%sloop_%d (", s_indent, loop->num);
7533 if (loop->header)
7534 fprintf (file, "header = %d", loop->header->index);
7535 else
7536 {
7537 fprintf (file, "deleted)\n");
7538 return;
7539 }
7540 if (loop->latch)
7541 fprintf (file, ", latch = %d", loop->latch->index);
7542 else
7543 fprintf (file, ", multiple latches");
7544 fprintf (file, ", niter = ");
7545 print_generic_expr (file, loop->nb_iterations, 0);
7546
7547 if (loop->any_upper_bound)
7548 {
7549 fprintf (file, ", upper_bound = ");
7550 print_decu (loop->nb_iterations_upper_bound, file);
7551 }
7552
7553 if (loop->any_estimate)
7554 {
7555 fprintf (file, ", estimate = ");
7556 print_decu (loop->nb_iterations_estimate, file);
7557 }
7558 fprintf (file, ")\n");
7559
7560 /* Print loop's body. */
7561 if (verbosity >= 1)
7562 {
7563 fprintf (file, "%s{\n", s_indent);
7564 FOR_EACH_BB_FN (bb, cfun)
7565 if (bb->loop_father == loop)
7566 print_loops_bb (file, bb, indent, verbosity);
7567
7568 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
7569 fprintf (file, "%s}\n", s_indent);
7570 }
7571 }
7572
7573 /* Print the LOOP and its sibling loops on FILE, indented INDENT
7574 spaces. Following VERBOSITY level this outputs the contents of the
7575 loop, or just its structure. */
7576
7577 static void
7578 print_loop_and_siblings (FILE *file, struct loop *loop, int indent,
7579 int verbosity)
7580 {
7581 if (loop == NULL)
7582 return;
7583
7584 print_loop (file, loop, indent, verbosity);
7585 print_loop_and_siblings (file, loop->next, indent, verbosity);
7586 }
7587
7588 /* Follow a CFG edge from the entry point of the program, and on entry
7589 of a loop, pretty print the loop structure on FILE. */
7590
7591 void
7592 print_loops (FILE *file, int verbosity)
7593 {
7594 basic_block bb;
7595
7596 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
7597 if (bb && bb->loop_father)
7598 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
7599 }
7600
7601 /* Dump a loop. */
7602
7603 DEBUG_FUNCTION void
7604 debug (struct loop &ref)
7605 {
7606 print_loop (stderr, &ref, 0, /*verbosity*/0);
7607 }
7608
7609 DEBUG_FUNCTION void
7610 debug (struct loop *ptr)
7611 {
7612 if (ptr)
7613 debug (*ptr);
7614 else
7615 fprintf (stderr, "<nil>\n");
7616 }
7617
7618 /* Dump a loop verbosely. */
7619
7620 DEBUG_FUNCTION void
7621 debug_verbose (struct loop &ref)
7622 {
7623 print_loop (stderr, &ref, 0, /*verbosity*/3);
7624 }
7625
7626 DEBUG_FUNCTION void
7627 debug_verbose (struct loop *ptr)
7628 {
7629 if (ptr)
7630 debug (*ptr);
7631 else
7632 fprintf (stderr, "<nil>\n");
7633 }
7634
7635
7636 /* Debugging loops structure at tree level, at some VERBOSITY level. */
7637
7638 DEBUG_FUNCTION void
7639 debug_loops (int verbosity)
7640 {
7641 print_loops (stderr, verbosity);
7642 }
7643
7644 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
7645
7646 DEBUG_FUNCTION void
7647 debug_loop (struct loop *loop, int verbosity)
7648 {
7649 print_loop (stderr, loop, 0, verbosity);
7650 }
7651
7652 /* Print on stderr the code of loop number NUM, at some VERBOSITY
7653 level. */
7654
7655 DEBUG_FUNCTION void
7656 debug_loop_num (unsigned num, int verbosity)
7657 {
7658 debug_loop (get_loop (cfun, num), verbosity);
7659 }
7660
7661 /* Return true if BB ends with a call, possibly followed by some
7662 instructions that must stay with the call. Return false,
7663 otherwise. */
7664
7665 static bool
7666 gimple_block_ends_with_call_p (basic_block bb)
7667 {
7668 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7669 return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
7670 }
7671
7672
7673 /* Return true if BB ends with a conditional branch. Return false,
7674 otherwise. */
7675
7676 static bool
7677 gimple_block_ends_with_condjump_p (const_basic_block bb)
7678 {
7679 gimple stmt = last_stmt (CONST_CAST_BB (bb));
7680 return (stmt && gimple_code (stmt) == GIMPLE_COND);
7681 }
7682
7683
7684 /* Return true if we need to add fake edge to exit at statement T.
7685 Helper function for gimple_flow_call_edges_add. */
7686
7687 static bool
7688 need_fake_edge_p (gimple t)
7689 {
7690 tree fndecl = NULL_TREE;
7691 int call_flags = 0;
7692
7693 /* NORETURN and LONGJMP calls already have an edge to exit.
7694 CONST and PURE calls do not need one.
7695 We don't currently check for CONST and PURE here, although
7696 it would be a good idea, because those attributes are
7697 figured out from the RTL in mark_constant_function, and
7698 the counter incrementation code from -fprofile-arcs
7699 leads to different results from -fbranch-probabilities. */
7700 if (is_gimple_call (t))
7701 {
7702 fndecl = gimple_call_fndecl (t);
7703 call_flags = gimple_call_flags (t);
7704 }
7705
7706 if (is_gimple_call (t)
7707 && fndecl
7708 && DECL_BUILT_IN (fndecl)
7709 && (call_flags & ECF_NOTHROW)
7710 && !(call_flags & ECF_RETURNS_TWICE)
7711 /* fork() doesn't really return twice, but the effect of
7712 wrapping it in __gcov_fork() which calls __gcov_flush()
7713 and clears the counters before forking has the same
7714 effect as returning twice. Force a fake edge. */
7715 && !(DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7716 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FORK))
7717 return false;
7718
7719 if (is_gimple_call (t))
7720 {
7721 edge_iterator ei;
7722 edge e;
7723 basic_block bb;
7724
7725 if (!(call_flags & ECF_NORETURN))
7726 return true;
7727
7728 bb = gimple_bb (t);
7729 FOR_EACH_EDGE (e, ei, bb->succs)
7730 if ((e->flags & EDGE_FAKE) == 0)
7731 return true;
7732 }
7733
7734 if (gasm *asm_stmt = dyn_cast <gasm *> (t))
7735 if (gimple_asm_volatile_p (asm_stmt) || gimple_asm_input_p (asm_stmt))
7736 return true;
7737
7738 return false;
7739 }
7740
7741
7742 /* Add fake edges to the function exit for any non constant and non
7743 noreturn calls (or noreturn calls with EH/abnormal edges),
7744 volatile inline assembly in the bitmap of blocks specified by BLOCKS
7745 or to the whole CFG if BLOCKS is zero. Return the number of blocks
7746 that were split.
7747
7748 The goal is to expose cases in which entering a basic block does
7749 not imply that all subsequent instructions must be executed. */
7750
7751 static int
7752 gimple_flow_call_edges_add (sbitmap blocks)
7753 {
7754 int i;
7755 int blocks_split = 0;
7756 int last_bb = last_basic_block_for_fn (cfun);
7757 bool check_last_block = false;
7758
7759 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
7760 return 0;
7761
7762 if (! blocks)
7763 check_last_block = true;
7764 else
7765 check_last_block = bitmap_bit_p (blocks,
7766 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
7767
7768 /* In the last basic block, before epilogue generation, there will be
7769 a fallthru edge to EXIT. Special care is required if the last insn
7770 of the last basic block is a call because make_edge folds duplicate
7771 edges, which would result in the fallthru edge also being marked
7772 fake, which would result in the fallthru edge being removed by
7773 remove_fake_edges, which would result in an invalid CFG.
7774
7775 Moreover, we can't elide the outgoing fake edge, since the block
7776 profiler needs to take this into account in order to solve the minimal
7777 spanning tree in the case that the call doesn't return.
7778
7779 Handle this by adding a dummy instruction in a new last basic block. */
7780 if (check_last_block)
7781 {
7782 basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
7783 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7784 gimple t = NULL;
7785
7786 if (!gsi_end_p (gsi))
7787 t = gsi_stmt (gsi);
7788
7789 if (t && need_fake_edge_p (t))
7790 {
7791 edge e;
7792
7793 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
7794 if (e)
7795 {
7796 gsi_insert_on_edge (e, gimple_build_nop ());
7797 gsi_commit_edge_inserts ();
7798 }
7799 }
7800 }
7801
7802 /* Now add fake edges to the function exit for any non constant
7803 calls since there is no way that we can determine if they will
7804 return or not... */
7805 for (i = 0; i < last_bb; i++)
7806 {
7807 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
7808 gimple_stmt_iterator gsi;
7809 gimple stmt, last_stmt;
7810
7811 if (!bb)
7812 continue;
7813
7814 if (blocks && !bitmap_bit_p (blocks, i))
7815 continue;
7816
7817 gsi = gsi_last_nondebug_bb (bb);
7818 if (!gsi_end_p (gsi))
7819 {
7820 last_stmt = gsi_stmt (gsi);
7821 do
7822 {
7823 stmt = gsi_stmt (gsi);
7824 if (need_fake_edge_p (stmt))
7825 {
7826 edge e;
7827
7828 /* The handling above of the final block before the
7829 epilogue should be enough to verify that there is
7830 no edge to the exit block in CFG already.
7831 Calling make_edge in such case would cause us to
7832 mark that edge as fake and remove it later. */
7833 #ifdef ENABLE_CHECKING
7834 if (stmt == last_stmt)
7835 {
7836 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
7837 gcc_assert (e == NULL);
7838 }
7839 #endif
7840
7841 /* Note that the following may create a new basic block
7842 and renumber the existing basic blocks. */
7843 if (stmt != last_stmt)
7844 {
7845 e = split_block (bb, stmt);
7846 if (e)
7847 blocks_split++;
7848 }
7849 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
7850 }
7851 gsi_prev (&gsi);
7852 }
7853 while (!gsi_end_p (gsi));
7854 }
7855 }
7856
7857 if (blocks_split)
7858 verify_flow_info ();
7859
7860 return blocks_split;
7861 }
7862
7863 /* Removes edge E and all the blocks dominated by it, and updates dominance
7864 information. The IL in E->src needs to be updated separately.
7865 If dominance info is not available, only the edge E is removed.*/
7866
7867 void
7868 remove_edge_and_dominated_blocks (edge e)
7869 {
7870 vec<basic_block> bbs_to_remove = vNULL;
7871 vec<basic_block> bbs_to_fix_dom = vNULL;
7872 bitmap df, df_idom;
7873 edge f;
7874 edge_iterator ei;
7875 bool none_removed = false;
7876 unsigned i;
7877 basic_block bb, dbb;
7878 bitmap_iterator bi;
7879
7880 /* If we are removing a path inside a non-root loop that may change
7881 loop ownership of blocks or remove loops. Mark loops for fixup. */
7882 if (current_loops
7883 && loop_outer (e->src->loop_father) != NULL
7884 && e->src->loop_father == e->dest->loop_father)
7885 loops_state_set (LOOPS_NEED_FIXUP);
7886
7887 if (!dom_info_available_p (CDI_DOMINATORS))
7888 {
7889 remove_edge (e);
7890 return;
7891 }
7892
7893 /* No updating is needed for edges to exit. */
7894 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
7895 {
7896 if (cfgcleanup_altered_bbs)
7897 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7898 remove_edge (e);
7899 return;
7900 }
7901
7902 /* First, we find the basic blocks to remove. If E->dest has a predecessor
7903 that is not dominated by E->dest, then this set is empty. Otherwise,
7904 all the basic blocks dominated by E->dest are removed.
7905
7906 Also, to DF_IDOM we store the immediate dominators of the blocks in
7907 the dominance frontier of E (i.e., of the successors of the
7908 removed blocks, if there are any, and of E->dest otherwise). */
7909 FOR_EACH_EDGE (f, ei, e->dest->preds)
7910 {
7911 if (f == e)
7912 continue;
7913
7914 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
7915 {
7916 none_removed = true;
7917 break;
7918 }
7919 }
7920
7921 df = BITMAP_ALLOC (NULL);
7922 df_idom = BITMAP_ALLOC (NULL);
7923
7924 if (none_removed)
7925 bitmap_set_bit (df_idom,
7926 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
7927 else
7928 {
7929 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
7930 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
7931 {
7932 FOR_EACH_EDGE (f, ei, bb->succs)
7933 {
7934 if (f->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
7935 bitmap_set_bit (df, f->dest->index);
7936 }
7937 }
7938 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
7939 bitmap_clear_bit (df, bb->index);
7940
7941 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
7942 {
7943 bb = BASIC_BLOCK_FOR_FN (cfun, i);
7944 bitmap_set_bit (df_idom,
7945 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
7946 }
7947 }
7948
7949 if (cfgcleanup_altered_bbs)
7950 {
7951 /* Record the set of the altered basic blocks. */
7952 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7953 bitmap_ior_into (cfgcleanup_altered_bbs, df);
7954 }
7955
7956 /* Remove E and the cancelled blocks. */
7957 if (none_removed)
7958 remove_edge (e);
7959 else
7960 {
7961 /* Walk backwards so as to get a chance to substitute all
7962 released DEFs into debug stmts. See
7963 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
7964 details. */
7965 for (i = bbs_to_remove.length (); i-- > 0; )
7966 delete_basic_block (bbs_to_remove[i]);
7967 }
7968
7969 /* Update the dominance information. The immediate dominator may change only
7970 for blocks whose immediate dominator belongs to DF_IDOM:
7971
7972 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
7973 removal. Let Z the arbitrary block such that idom(Z) = Y and
7974 Z dominates X after the removal. Before removal, there exists a path P
7975 from Y to X that avoids Z. Let F be the last edge on P that is
7976 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
7977 dominates W, and because of P, Z does not dominate W), and W belongs to
7978 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
7979 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
7980 {
7981 bb = BASIC_BLOCK_FOR_FN (cfun, i);
7982 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
7983 dbb;
7984 dbb = next_dom_son (CDI_DOMINATORS, dbb))
7985 bbs_to_fix_dom.safe_push (dbb);
7986 }
7987
7988 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
7989
7990 BITMAP_FREE (df);
7991 BITMAP_FREE (df_idom);
7992 bbs_to_remove.release ();
7993 bbs_to_fix_dom.release ();
7994 }
7995
7996 /* Purge dead EH edges from basic block BB. */
7997
7998 bool
7999 gimple_purge_dead_eh_edges (basic_block bb)
8000 {
8001 bool changed = false;
8002 edge e;
8003 edge_iterator ei;
8004 gimple stmt = last_stmt (bb);
8005
8006 if (stmt && stmt_can_throw_internal (stmt))
8007 return false;
8008
8009 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8010 {
8011 if (e->flags & EDGE_EH)
8012 {
8013 remove_edge_and_dominated_blocks (e);
8014 changed = true;
8015 }
8016 else
8017 ei_next (&ei);
8018 }
8019
8020 return changed;
8021 }
8022
8023 /* Purge dead EH edges from basic block listed in BLOCKS. */
8024
8025 bool
8026 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
8027 {
8028 bool changed = false;
8029 unsigned i;
8030 bitmap_iterator bi;
8031
8032 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8033 {
8034 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8035
8036 /* Earlier gimple_purge_dead_eh_edges could have removed
8037 this basic block already. */
8038 gcc_assert (bb || changed);
8039 if (bb != NULL)
8040 changed |= gimple_purge_dead_eh_edges (bb);
8041 }
8042
8043 return changed;
8044 }
8045
8046 /* Purge dead abnormal call edges from basic block BB. */
8047
8048 bool
8049 gimple_purge_dead_abnormal_call_edges (basic_block bb)
8050 {
8051 bool changed = false;
8052 edge e;
8053 edge_iterator ei;
8054 gimple stmt = last_stmt (bb);
8055
8056 if (!cfun->has_nonlocal_label
8057 && !cfun->calls_setjmp)
8058 return false;
8059
8060 if (stmt && stmt_can_make_abnormal_goto (stmt))
8061 return false;
8062
8063 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8064 {
8065 if (e->flags & EDGE_ABNORMAL)
8066 {
8067 if (e->flags & EDGE_FALLTHRU)
8068 e->flags &= ~EDGE_ABNORMAL;
8069 else
8070 remove_edge_and_dominated_blocks (e);
8071 changed = true;
8072 }
8073 else
8074 ei_next (&ei);
8075 }
8076
8077 return changed;
8078 }
8079
8080 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
8081
8082 bool
8083 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks)
8084 {
8085 bool changed = false;
8086 unsigned i;
8087 bitmap_iterator bi;
8088
8089 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8090 {
8091 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8092
8093 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
8094 this basic block already. */
8095 gcc_assert (bb || changed);
8096 if (bb != NULL)
8097 changed |= gimple_purge_dead_abnormal_call_edges (bb);
8098 }
8099
8100 return changed;
8101 }
8102
8103 /* This function is called whenever a new edge is created or
8104 redirected. */
8105
8106 static void
8107 gimple_execute_on_growing_pred (edge e)
8108 {
8109 basic_block bb = e->dest;
8110
8111 if (!gimple_seq_empty_p (phi_nodes (bb)))
8112 reserve_phi_args_for_new_edge (bb);
8113 }
8114
8115 /* This function is called immediately before edge E is removed from
8116 the edge vector E->dest->preds. */
8117
8118 static void
8119 gimple_execute_on_shrinking_pred (edge e)
8120 {
8121 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
8122 remove_phi_args (e);
8123 }
8124
8125 /*---------------------------------------------------------------------------
8126 Helper functions for Loop versioning
8127 ---------------------------------------------------------------------------*/
8128
8129 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
8130 of 'first'. Both of them are dominated by 'new_head' basic block. When
8131 'new_head' was created by 'second's incoming edge it received phi arguments
8132 on the edge by split_edge(). Later, additional edge 'e' was created to
8133 connect 'new_head' and 'first'. Now this routine adds phi args on this
8134 additional edge 'e' that new_head to second edge received as part of edge
8135 splitting. */
8136
8137 static void
8138 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
8139 basic_block new_head, edge e)
8140 {
8141 gphi *phi1, *phi2;
8142 gphi_iterator psi1, psi2;
8143 tree def;
8144 edge e2 = find_edge (new_head, second);
8145
8146 /* Because NEW_HEAD has been created by splitting SECOND's incoming
8147 edge, we should always have an edge from NEW_HEAD to SECOND. */
8148 gcc_assert (e2 != NULL);
8149
8150 /* Browse all 'second' basic block phi nodes and add phi args to
8151 edge 'e' for 'first' head. PHI args are always in correct order. */
8152
8153 for (psi2 = gsi_start_phis (second),
8154 psi1 = gsi_start_phis (first);
8155 !gsi_end_p (psi2) && !gsi_end_p (psi1);
8156 gsi_next (&psi2), gsi_next (&psi1))
8157 {
8158 phi1 = psi1.phi ();
8159 phi2 = psi2.phi ();
8160 def = PHI_ARG_DEF (phi2, e2->dest_idx);
8161 add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
8162 }
8163 }
8164
8165
8166 /* Adds a if else statement to COND_BB with condition COND_EXPR.
8167 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
8168 the destination of the ELSE part. */
8169
8170 static void
8171 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
8172 basic_block second_head ATTRIBUTE_UNUSED,
8173 basic_block cond_bb, void *cond_e)
8174 {
8175 gimple_stmt_iterator gsi;
8176 gimple new_cond_expr;
8177 tree cond_expr = (tree) cond_e;
8178 edge e0;
8179
8180 /* Build new conditional expr */
8181 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
8182 NULL_TREE, NULL_TREE);
8183
8184 /* Add new cond in cond_bb. */
8185 gsi = gsi_last_bb (cond_bb);
8186 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
8187
8188 /* Adjust edges appropriately to connect new head with first head
8189 as well as second head. */
8190 e0 = single_succ_edge (cond_bb);
8191 e0->flags &= ~EDGE_FALLTHRU;
8192 e0->flags |= EDGE_FALSE_VALUE;
8193 }
8194
8195
8196 /* Do book-keeping of basic block BB for the profile consistency checker.
8197 If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
8198 then do post-pass accounting. Store the counting in RECORD. */
8199 static void
8200 gimple_account_profile_record (basic_block bb, int after_pass,
8201 struct profile_record *record)
8202 {
8203 gimple_stmt_iterator i;
8204 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
8205 {
8206 record->size[after_pass]
8207 += estimate_num_insns (gsi_stmt (i), &eni_size_weights);
8208 if (profile_status_for_fn (cfun) == PROFILE_READ)
8209 record->time[after_pass]
8210 += estimate_num_insns (gsi_stmt (i),
8211 &eni_time_weights) * bb->count;
8212 else if (profile_status_for_fn (cfun) == PROFILE_GUESSED)
8213 record->time[after_pass]
8214 += estimate_num_insns (gsi_stmt (i),
8215 &eni_time_weights) * bb->frequency;
8216 }
8217 }
8218
8219 struct cfg_hooks gimple_cfg_hooks = {
8220 "gimple",
8221 gimple_verify_flow_info,
8222 gimple_dump_bb, /* dump_bb */
8223 gimple_dump_bb_for_graph, /* dump_bb_for_graph */
8224 create_bb, /* create_basic_block */
8225 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
8226 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
8227 gimple_can_remove_branch_p, /* can_remove_branch_p */
8228 remove_bb, /* delete_basic_block */
8229 gimple_split_block, /* split_block */
8230 gimple_move_block_after, /* move_block_after */
8231 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
8232 gimple_merge_blocks, /* merge_blocks */
8233 gimple_predict_edge, /* predict_edge */
8234 gimple_predicted_by_p, /* predicted_by_p */
8235 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
8236 gimple_duplicate_bb, /* duplicate_block */
8237 gimple_split_edge, /* split_edge */
8238 gimple_make_forwarder_block, /* make_forward_block */
8239 NULL, /* tidy_fallthru_edge */
8240 NULL, /* force_nonfallthru */
8241 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
8242 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
8243 gimple_flow_call_edges_add, /* flow_call_edges_add */
8244 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
8245 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
8246 gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
8247 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
8248 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
8249 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
8250 flush_pending_stmts, /* flush_pending_stmts */
8251 gimple_empty_block_p, /* block_empty_p */
8252 gimple_split_block_before_cond_jump, /* split_block_before_cond_jump */
8253 gimple_account_profile_record,
8254 };
8255
8256
8257 /* Split all critical edges. */
8258
8259 unsigned int
8260 split_critical_edges (void)
8261 {
8262 basic_block bb;
8263 edge e;
8264 edge_iterator ei;
8265
8266 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
8267 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
8268 mappings around the calls to split_edge. */
8269 start_recording_case_labels ();
8270 FOR_ALL_BB_FN (bb, cfun)
8271 {
8272 FOR_EACH_EDGE (e, ei, bb->succs)
8273 {
8274 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
8275 split_edge (e);
8276 /* PRE inserts statements to edges and expects that
8277 since split_critical_edges was done beforehand, committing edge
8278 insertions will not split more edges. In addition to critical
8279 edges we must split edges that have multiple successors and
8280 end by control flow statements, such as RESX.
8281 Go ahead and split them too. This matches the logic in
8282 gimple_find_edge_insert_loc. */
8283 else if ((!single_pred_p (e->dest)
8284 || !gimple_seq_empty_p (phi_nodes (e->dest))
8285 || e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
8286 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
8287 && !(e->flags & EDGE_ABNORMAL))
8288 {
8289 gimple_stmt_iterator gsi;
8290
8291 gsi = gsi_last_bb (e->src);
8292 if (!gsi_end_p (gsi)
8293 && stmt_ends_bb_p (gsi_stmt (gsi))
8294 && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
8295 && !gimple_call_builtin_p (gsi_stmt (gsi),
8296 BUILT_IN_RETURN)))
8297 split_edge (e);
8298 }
8299 }
8300 }
8301 end_recording_case_labels ();
8302 return 0;
8303 }
8304
8305 namespace {
8306
8307 const pass_data pass_data_split_crit_edges =
8308 {
8309 GIMPLE_PASS, /* type */
8310 "crited", /* name */
8311 OPTGROUP_NONE, /* optinfo_flags */
8312 TV_TREE_SPLIT_EDGES, /* tv_id */
8313 PROP_cfg, /* properties_required */
8314 PROP_no_crit_edges, /* properties_provided */
8315 0, /* properties_destroyed */
8316 0, /* todo_flags_start */
8317 0, /* todo_flags_finish */
8318 };
8319
8320 class pass_split_crit_edges : public gimple_opt_pass
8321 {
8322 public:
8323 pass_split_crit_edges (gcc::context *ctxt)
8324 : gimple_opt_pass (pass_data_split_crit_edges, ctxt)
8325 {}
8326
8327 /* opt_pass methods: */
8328 virtual unsigned int execute (function *) { return split_critical_edges (); }
8329
8330 opt_pass * clone () { return new pass_split_crit_edges (m_ctxt); }
8331 }; // class pass_split_crit_edges
8332
8333 } // anon namespace
8334
8335 gimple_opt_pass *
8336 make_pass_split_crit_edges (gcc::context *ctxt)
8337 {
8338 return new pass_split_crit_edges (ctxt);
8339 }
8340
8341
8342 /* Insert COND expression which is GIMPLE_COND after STMT
8343 in basic block BB with appropriate basic block split
8344 and creation of a new conditionally executed basic block.
8345 Return created basic block. */
8346 basic_block
8347 insert_cond_bb (basic_block bb, gimple stmt, gimple cond)
8348 {
8349 edge fall = split_block (bb, stmt);
8350 gimple_stmt_iterator iter = gsi_last_bb (bb);
8351 basic_block new_bb;
8352
8353 /* Insert cond statement. */
8354 gcc_assert (gimple_code (cond) == GIMPLE_COND);
8355 if (gsi_end_p (iter))
8356 gsi_insert_before (&iter, cond, GSI_CONTINUE_LINKING);
8357 else
8358 gsi_insert_after (&iter, cond, GSI_CONTINUE_LINKING);
8359
8360 /* Create conditionally executed block. */
8361 new_bb = create_empty_bb (bb);
8362 make_edge (bb, new_bb, EDGE_TRUE_VALUE);
8363 make_single_succ_edge (new_bb, fall->dest, EDGE_FALLTHRU);
8364
8365 /* Fix edge for split bb. */
8366 fall->flags = EDGE_FALSE_VALUE;
8367
8368 /* Update dominance info. */
8369 if (dom_info_available_p (CDI_DOMINATORS))
8370 {
8371 set_immediate_dominator (CDI_DOMINATORS, new_bb, bb);
8372 set_immediate_dominator (CDI_DOMINATORS, fall->dest, bb);
8373 }
8374
8375 /* Update loop info. */
8376 if (current_loops)
8377 add_bb_to_loop (new_bb, bb->loop_father);
8378
8379 return new_bb;
8380 }
8381
8382 /* Build a ternary operation and gimplify it. Emit code before GSI.
8383 Return the gimple_val holding the result. */
8384
8385 tree
8386 gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
8387 tree type, tree a, tree b, tree c)
8388 {
8389 tree ret;
8390 location_t loc = gimple_location (gsi_stmt (*gsi));
8391
8392 ret = fold_build3_loc (loc, code, type, a, b, c);
8393 STRIP_NOPS (ret);
8394
8395 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8396 GSI_SAME_STMT);
8397 }
8398
8399 /* Build a binary operation and gimplify it. Emit code before GSI.
8400 Return the gimple_val holding the result. */
8401
8402 tree
8403 gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
8404 tree type, tree a, tree b)
8405 {
8406 tree ret;
8407
8408 ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
8409 STRIP_NOPS (ret);
8410
8411 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8412 GSI_SAME_STMT);
8413 }
8414
8415 /* Build a unary operation and gimplify it. Emit code before GSI.
8416 Return the gimple_val holding the result. */
8417
8418 tree
8419 gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
8420 tree a)
8421 {
8422 tree ret;
8423
8424 ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
8425 STRIP_NOPS (ret);
8426
8427 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8428 GSI_SAME_STMT);
8429 }
8430
8431
8432 \f
8433 /* Given a basic block B which ends with a conditional and has
8434 precisely two successors, determine which of the edges is taken if
8435 the conditional is true and which is taken if the conditional is
8436 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
8437
8438 void
8439 extract_true_false_edges_from_block (basic_block b,
8440 edge *true_edge,
8441 edge *false_edge)
8442 {
8443 edge e = EDGE_SUCC (b, 0);
8444
8445 if (e->flags & EDGE_TRUE_VALUE)
8446 {
8447 *true_edge = e;
8448 *false_edge = EDGE_SUCC (b, 1);
8449 }
8450 else
8451 {
8452 *false_edge = e;
8453 *true_edge = EDGE_SUCC (b, 1);
8454 }
8455 }
8456
8457 /* Emit return warnings. */
8458
8459 namespace {
8460
8461 const pass_data pass_data_warn_function_return =
8462 {
8463 GIMPLE_PASS, /* type */
8464 "*warn_function_return", /* name */
8465 OPTGROUP_NONE, /* optinfo_flags */
8466 TV_NONE, /* tv_id */
8467 PROP_cfg, /* properties_required */
8468 0, /* properties_provided */
8469 0, /* properties_destroyed */
8470 0, /* todo_flags_start */
8471 0, /* todo_flags_finish */
8472 };
8473
8474 class pass_warn_function_return : public gimple_opt_pass
8475 {
8476 public:
8477 pass_warn_function_return (gcc::context *ctxt)
8478 : gimple_opt_pass (pass_data_warn_function_return, ctxt)
8479 {}
8480
8481 /* opt_pass methods: */
8482 virtual unsigned int execute (function *);
8483
8484 }; // class pass_warn_function_return
8485
8486 unsigned int
8487 pass_warn_function_return::execute (function *fun)
8488 {
8489 source_location location;
8490 gimple last;
8491 edge e;
8492 edge_iterator ei;
8493
8494 if (!targetm.warn_func_return (fun->decl))
8495 return 0;
8496
8497 /* If we have a path to EXIT, then we do return. */
8498 if (TREE_THIS_VOLATILE (fun->decl)
8499 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0)
8500 {
8501 location = UNKNOWN_LOCATION;
8502 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
8503 {
8504 last = last_stmt (e->src);
8505 if ((gimple_code (last) == GIMPLE_RETURN
8506 || gimple_call_builtin_p (last, BUILT_IN_RETURN))
8507 && (location = gimple_location (last)) != UNKNOWN_LOCATION)
8508 break;
8509 }
8510 if (location == UNKNOWN_LOCATION)
8511 location = cfun->function_end_locus;
8512 warning_at (location, 0, "%<noreturn%> function does return");
8513 }
8514
8515 /* If we see "return;" in some basic block, then we do reach the end
8516 without returning a value. */
8517 else if (warn_return_type
8518 && !TREE_NO_WARNING (fun->decl)
8519 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0
8520 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fun->decl))))
8521 {
8522 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
8523 {
8524 gimple last = last_stmt (e->src);
8525 greturn *return_stmt = dyn_cast <greturn *> (last);
8526 if (return_stmt
8527 && gimple_return_retval (return_stmt) == NULL
8528 && !gimple_no_warning_p (last))
8529 {
8530 location = gimple_location (last);
8531 if (location == UNKNOWN_LOCATION)
8532 location = fun->function_end_locus;
8533 warning_at (location, OPT_Wreturn_type, "control reaches end of non-void function");
8534 TREE_NO_WARNING (fun->decl) = 1;
8535 break;
8536 }
8537 }
8538 }
8539 return 0;
8540 }
8541
8542 } // anon namespace
8543
8544 gimple_opt_pass *
8545 make_pass_warn_function_return (gcc::context *ctxt)
8546 {
8547 return new pass_warn_function_return (ctxt);
8548 }
8549
8550 /* Walk a gimplified function and warn for functions whose return value is
8551 ignored and attribute((warn_unused_result)) is set. This is done before
8552 inlining, so we don't have to worry about that. */
8553
8554 static void
8555 do_warn_unused_result (gimple_seq seq)
8556 {
8557 tree fdecl, ftype;
8558 gimple_stmt_iterator i;
8559
8560 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
8561 {
8562 gimple g = gsi_stmt (i);
8563
8564 switch (gimple_code (g))
8565 {
8566 case GIMPLE_BIND:
8567 do_warn_unused_result (gimple_bind_body (as_a <gbind *>(g)));
8568 break;
8569 case GIMPLE_TRY:
8570 do_warn_unused_result (gimple_try_eval (g));
8571 do_warn_unused_result (gimple_try_cleanup (g));
8572 break;
8573 case GIMPLE_CATCH:
8574 do_warn_unused_result (gimple_catch_handler (
8575 as_a <gcatch *> (g)));
8576 break;
8577 case GIMPLE_EH_FILTER:
8578 do_warn_unused_result (gimple_eh_filter_failure (g));
8579 break;
8580
8581 case GIMPLE_CALL:
8582 if (gimple_call_lhs (g))
8583 break;
8584 if (gimple_call_internal_p (g))
8585 break;
8586
8587 /* This is a naked call, as opposed to a GIMPLE_CALL with an
8588 LHS. All calls whose value is ignored should be
8589 represented like this. Look for the attribute. */
8590 fdecl = gimple_call_fndecl (g);
8591 ftype = gimple_call_fntype (g);
8592
8593 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
8594 {
8595 location_t loc = gimple_location (g);
8596
8597 if (fdecl)
8598 warning_at (loc, OPT_Wunused_result,
8599 "ignoring return value of %qD, "
8600 "declared with attribute warn_unused_result",
8601 fdecl);
8602 else
8603 warning_at (loc, OPT_Wunused_result,
8604 "ignoring return value of function "
8605 "declared with attribute warn_unused_result");
8606 }
8607 break;
8608
8609 default:
8610 /* Not a container, not a call, or a call whose value is used. */
8611 break;
8612 }
8613 }
8614 }
8615
8616 namespace {
8617
8618 const pass_data pass_data_warn_unused_result =
8619 {
8620 GIMPLE_PASS, /* type */
8621 "*warn_unused_result", /* name */
8622 OPTGROUP_NONE, /* optinfo_flags */
8623 TV_NONE, /* tv_id */
8624 PROP_gimple_any, /* properties_required */
8625 0, /* properties_provided */
8626 0, /* properties_destroyed */
8627 0, /* todo_flags_start */
8628 0, /* todo_flags_finish */
8629 };
8630
8631 class pass_warn_unused_result : public gimple_opt_pass
8632 {
8633 public:
8634 pass_warn_unused_result (gcc::context *ctxt)
8635 : gimple_opt_pass (pass_data_warn_unused_result, ctxt)
8636 {}
8637
8638 /* opt_pass methods: */
8639 virtual bool gate (function *) { return flag_warn_unused_result; }
8640 virtual unsigned int execute (function *)
8641 {
8642 do_warn_unused_result (gimple_body (current_function_decl));
8643 return 0;
8644 }
8645
8646 }; // class pass_warn_unused_result
8647
8648 } // anon namespace
8649
8650 gimple_opt_pass *
8651 make_pass_warn_unused_result (gcc::context *ctxt)
8652 {
8653 return new pass_warn_unused_result (ctxt);
8654 }
8655
8656 /* IPA passes, compilation of earlier functions or inlining
8657 might have changed some properties, such as marked functions nothrow,
8658 pure, const or noreturn.
8659 Remove redundant edges and basic blocks, and create new ones if necessary.
8660
8661 This pass can't be executed as stand alone pass from pass manager, because
8662 in between inlining and this fixup the verify_flow_info would fail. */
8663
8664 unsigned int
8665 execute_fixup_cfg (void)
8666 {
8667 basic_block bb;
8668 gimple_stmt_iterator gsi;
8669 int todo = 0;
8670 gcov_type count_scale;
8671 edge e;
8672 edge_iterator ei;
8673
8674 count_scale
8675 = GCOV_COMPUTE_SCALE (cgraph_node::get (current_function_decl)->count,
8676 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count);
8677
8678 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
8679 cgraph_node::get (current_function_decl)->count;
8680 EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
8681 apply_scale (EXIT_BLOCK_PTR_FOR_FN (cfun)->count,
8682 count_scale);
8683
8684 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
8685 e->count = apply_scale (e->count, count_scale);
8686
8687 FOR_EACH_BB_FN (bb, cfun)
8688 {
8689 bb->count = apply_scale (bb->count, count_scale);
8690 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
8691 {
8692 gimple stmt = gsi_stmt (gsi);
8693 tree decl = is_gimple_call (stmt)
8694 ? gimple_call_fndecl (stmt)
8695 : NULL;
8696 if (decl)
8697 {
8698 int flags = gimple_call_flags (stmt);
8699 if (flags & (ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE))
8700 {
8701 if (gimple_purge_dead_abnormal_call_edges (bb))
8702 todo |= TODO_cleanup_cfg;
8703
8704 if (gimple_in_ssa_p (cfun))
8705 {
8706 todo |= TODO_update_ssa | TODO_cleanup_cfg;
8707 update_stmt (stmt);
8708 }
8709 }
8710
8711 if (flags & ECF_NORETURN
8712 && fixup_noreturn_call (stmt))
8713 todo |= TODO_cleanup_cfg;
8714 }
8715
8716 /* Remove stores to variables we marked write-only.
8717 Keep access when store has side effect, i.e. in case when source
8718 is volatile. */
8719 if (gimple_store_p (stmt)
8720 && !gimple_has_side_effects (stmt))
8721 {
8722 tree lhs = get_base_address (gimple_get_lhs (stmt));
8723
8724 if (TREE_CODE (lhs) == VAR_DECL
8725 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
8726 && varpool_node::get (lhs)->writeonly)
8727 {
8728 unlink_stmt_vdef (stmt);
8729 gsi_remove (&gsi, true);
8730 release_defs (stmt);
8731 todo |= TODO_update_ssa | TODO_cleanup_cfg;
8732 continue;
8733 }
8734 }
8735 /* For calls we can simply remove LHS when it is known
8736 to be write-only. */
8737 if (is_gimple_call (stmt)
8738 && gimple_get_lhs (stmt))
8739 {
8740 tree lhs = get_base_address (gimple_get_lhs (stmt));
8741
8742 if (TREE_CODE (lhs) == VAR_DECL
8743 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
8744 && varpool_node::get (lhs)->writeonly)
8745 {
8746 gimple_call_set_lhs (stmt, NULL);
8747 update_stmt (stmt);
8748 todo |= TODO_update_ssa | TODO_cleanup_cfg;
8749 }
8750 }
8751
8752 if (maybe_clean_eh_stmt (stmt)
8753 && gimple_purge_dead_eh_edges (bb))
8754 todo |= TODO_cleanup_cfg;
8755 gsi_next (&gsi);
8756 }
8757
8758 FOR_EACH_EDGE (e, ei, bb->succs)
8759 e->count = apply_scale (e->count, count_scale);
8760
8761 /* If we have a basic block with no successors that does not
8762 end with a control statement or a noreturn call end it with
8763 a call to __builtin_unreachable. This situation can occur
8764 when inlining a noreturn call that does in fact return. */
8765 if (EDGE_COUNT (bb->succs) == 0)
8766 {
8767 gimple stmt = last_stmt (bb);
8768 if (!stmt
8769 || (!is_ctrl_stmt (stmt)
8770 && (!is_gimple_call (stmt)
8771 || (gimple_call_flags (stmt) & ECF_NORETURN) == 0)))
8772 {
8773 if (stmt && is_gimple_call (stmt))
8774 gimple_call_set_ctrl_altering (stmt, false);
8775 stmt = gimple_build_call
8776 (builtin_decl_implicit (BUILT_IN_UNREACHABLE), 0);
8777 gimple_stmt_iterator gsi = gsi_last_bb (bb);
8778 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
8779 }
8780 }
8781 }
8782 if (count_scale != REG_BR_PROB_BASE)
8783 compute_function_frequency ();
8784
8785 if (current_loops
8786 && (todo & TODO_cleanup_cfg))
8787 loops_state_set (LOOPS_NEED_FIXUP);
8788
8789 return todo;
8790 }
8791
8792 namespace {
8793
8794 const pass_data pass_data_fixup_cfg =
8795 {
8796 GIMPLE_PASS, /* type */
8797 "fixup_cfg", /* name */
8798 OPTGROUP_NONE, /* optinfo_flags */
8799 TV_NONE, /* tv_id */
8800 PROP_cfg, /* properties_required */
8801 0, /* properties_provided */
8802 0, /* properties_destroyed */
8803 0, /* todo_flags_start */
8804 0, /* todo_flags_finish */
8805 };
8806
8807 class pass_fixup_cfg : public gimple_opt_pass
8808 {
8809 public:
8810 pass_fixup_cfg (gcc::context *ctxt)
8811 : gimple_opt_pass (pass_data_fixup_cfg, ctxt)
8812 {}
8813
8814 /* opt_pass methods: */
8815 opt_pass * clone () { return new pass_fixup_cfg (m_ctxt); }
8816 virtual unsigned int execute (function *) { return execute_fixup_cfg (); }
8817
8818 }; // class pass_fixup_cfg
8819
8820 } // anon namespace
8821
8822 gimple_opt_pass *
8823 make_pass_fixup_cfg (gcc::context *ctxt)
8824 {
8825 return new pass_fixup_cfg (ctxt);
8826 }
8827
8828 /* Garbage collection support for edge_def. */
8829
8830 extern void gt_ggc_mx (tree&);
8831 extern void gt_ggc_mx (gimple&);
8832 extern void gt_ggc_mx (rtx&);
8833 extern void gt_ggc_mx (basic_block&);
8834
8835 static void
8836 gt_ggc_mx (rtx_insn *& x)
8837 {
8838 if (x)
8839 gt_ggc_mx_rtx_def ((void *) x);
8840 }
8841
8842 void
8843 gt_ggc_mx (edge_def *e)
8844 {
8845 tree block = LOCATION_BLOCK (e->goto_locus);
8846 gt_ggc_mx (e->src);
8847 gt_ggc_mx (e->dest);
8848 if (current_ir_type () == IR_GIMPLE)
8849 gt_ggc_mx (e->insns.g);
8850 else
8851 gt_ggc_mx (e->insns.r);
8852 gt_ggc_mx (block);
8853 }
8854
8855 /* PCH support for edge_def. */
8856
8857 extern void gt_pch_nx (tree&);
8858 extern void gt_pch_nx (gimple&);
8859 extern void gt_pch_nx (rtx&);
8860 extern void gt_pch_nx (basic_block&);
8861
8862 static void
8863 gt_pch_nx (rtx_insn *& x)
8864 {
8865 if (x)
8866 gt_pch_nx_rtx_def ((void *) x);
8867 }
8868
8869 void
8870 gt_pch_nx (edge_def *e)
8871 {
8872 tree block = LOCATION_BLOCK (e->goto_locus);
8873 gt_pch_nx (e->src);
8874 gt_pch_nx (e->dest);
8875 if (current_ir_type () == IR_GIMPLE)
8876 gt_pch_nx (e->insns.g);
8877 else
8878 gt_pch_nx (e->insns.r);
8879 gt_pch_nx (block);
8880 }
8881
8882 void
8883 gt_pch_nx (edge_def *e, gt_pointer_operator op, void *cookie)
8884 {
8885 tree block = LOCATION_BLOCK (e->goto_locus);
8886 op (&(e->src), cookie);
8887 op (&(e->dest), cookie);
8888 if (current_ir_type () == IR_GIMPLE)
8889 op (&(e->insns.g), cookie);
8890 else
8891 op (&(e->insns.r), cookie);
8892 op (&(block), cookie);
8893 }