]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-cfg.c
alias.c: Reorder #include statements and remove duplicates.
[thirdparty/gcc.git] / gcc / tree-cfg.c
1 /* Control flow functions for trees.
2 Copyright (C) 2001-2015 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "cfghooks.h"
30 #include "tree-pass.h"
31 #include "tm_p.h"
32 #include "ssa.h"
33 #include "expmed.h"
34 #include "insn-config.h"
35 #include "emit-rtl.h"
36 #include "cgraph.h"
37 #include "gimple-pretty-print.h"
38 #include "diagnostic-core.h"
39 #include "alias.h"
40 #include "fold-const.h"
41 #include "trans-mem.h"
42 #include "stor-layout.h"
43 #include "print-tree.h"
44 #include "cfganal.h"
45 #include "flags.h"
46 #include "internal-fn.h"
47 #include "gimple-fold.h"
48 #include "tree-eh.h"
49 #include "gimple-iterator.h"
50 #include "gimplify-me.h"
51 #include "gimple-walk.h"
52 #include "tree-cfg.h"
53 #include "tree-ssa-loop-manip.h"
54 #include "tree-ssa-loop-niter.h"
55 #include "tree-into-ssa.h"
56 #include "dojump.h"
57 #include "explow.h"
58 #include "calls.h"
59 #include "varasm.h"
60 #include "stmt.h"
61 #include "expr.h"
62 #include "tree-dfa.h"
63 #include "tree-ssa.h"
64 #include "tree-dump.h"
65 #include "except.h"
66 #include "cfgloop.h"
67 #include "tree-ssa-propagate.h"
68 #include "value-prof.h"
69 #include "tree-inline.h"
70 #include "tree-ssa-live.h"
71 #include "omp-low.h"
72 #include "tree-cfgcleanup.h"
73 #include "gimplify.h"
74 #include "attribs.h"
75
76 /* This file contains functions for building the Control Flow Graph (CFG)
77 for a function tree. */
78
79 /* Local declarations. */
80
81 /* Initial capacity for the basic block array. */
82 static const int initial_cfg_capacity = 20;
83
84 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
85 which use a particular edge. The CASE_LABEL_EXPRs are chained together
86 via their CASE_CHAIN field, which we clear after we're done with the
87 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
88
89 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
90 update the case vector in response to edge redirections.
91
92 Right now this table is set up and torn down at key points in the
93 compilation process. It would be nice if we could make the table
94 more persistent. The key is getting notification of changes to
95 the CFG (particularly edge removal, creation and redirection). */
96
97 static hash_map<edge, tree> *edge_to_cases;
98
99 /* If we record edge_to_cases, this bitmap will hold indexes
100 of basic blocks that end in a GIMPLE_SWITCH which we touched
101 due to edge manipulations. */
102
103 static bitmap touched_switch_bbs;
104
105 /* CFG statistics. */
106 struct cfg_stats_d
107 {
108 long num_merged_labels;
109 };
110
111 static struct cfg_stats_d cfg_stats;
112
113 /* Data to pass to replace_block_vars_by_duplicates_1. */
114 struct replace_decls_d
115 {
116 hash_map<tree, tree> *vars_map;
117 tree to_context;
118 };
119
120 /* Hash table to store last discriminator assigned for each locus. */
121 struct locus_discrim_map
122 {
123 location_t locus;
124 int discriminator;
125 };
126
127 /* Hashtable helpers. */
128
129 struct locus_discrim_hasher : free_ptr_hash <locus_discrim_map>
130 {
131 static inline hashval_t hash (const locus_discrim_map *);
132 static inline bool equal (const locus_discrim_map *,
133 const locus_discrim_map *);
134 };
135
136 /* Trivial hash function for a location_t. ITEM is a pointer to
137 a hash table entry that maps a location_t to a discriminator. */
138
139 inline hashval_t
140 locus_discrim_hasher::hash (const locus_discrim_map *item)
141 {
142 return LOCATION_LINE (item->locus);
143 }
144
145 /* Equality function for the locus-to-discriminator map. A and B
146 point to the two hash table entries to compare. */
147
148 inline bool
149 locus_discrim_hasher::equal (const locus_discrim_map *a,
150 const locus_discrim_map *b)
151 {
152 return LOCATION_LINE (a->locus) == LOCATION_LINE (b->locus);
153 }
154
155 static hash_table<locus_discrim_hasher> *discriminator_per_locus;
156
157 /* Basic blocks and flowgraphs. */
158 static void make_blocks (gimple_seq);
159
160 /* Edges. */
161 static void make_edges (void);
162 static void assign_discriminators (void);
163 static void make_cond_expr_edges (basic_block);
164 static void make_gimple_switch_edges (gswitch *, basic_block);
165 static bool make_goto_expr_edges (basic_block);
166 static void make_gimple_asm_edges (basic_block);
167 static edge gimple_redirect_edge_and_branch (edge, basic_block);
168 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
169
170 /* Various helpers. */
171 static inline bool stmt_starts_bb_p (gimple *, gimple *);
172 static int gimple_verify_flow_info (void);
173 static void gimple_make_forwarder_block (edge);
174 static gimple *first_non_label_stmt (basic_block);
175 static bool verify_gimple_transaction (gtransaction *);
176 static bool call_can_make_abnormal_goto (gimple *);
177
178 /* Flowgraph optimization and cleanup. */
179 static void gimple_merge_blocks (basic_block, basic_block);
180 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
181 static void remove_bb (basic_block);
182 static edge find_taken_edge_computed_goto (basic_block, tree);
183 static edge find_taken_edge_cond_expr (basic_block, tree);
184 static edge find_taken_edge_switch_expr (gswitch *, basic_block, tree);
185 static tree find_case_label_for_value (gswitch *, tree);
186
187 void
188 init_empty_tree_cfg_for_function (struct function *fn)
189 {
190 /* Initialize the basic block array. */
191 init_flow (fn);
192 profile_status_for_fn (fn) = PROFILE_ABSENT;
193 n_basic_blocks_for_fn (fn) = NUM_FIXED_BLOCKS;
194 last_basic_block_for_fn (fn) = NUM_FIXED_BLOCKS;
195 vec_alloc (basic_block_info_for_fn (fn), initial_cfg_capacity);
196 vec_safe_grow_cleared (basic_block_info_for_fn (fn),
197 initial_cfg_capacity);
198
199 /* Build a mapping of labels to their associated blocks. */
200 vec_alloc (label_to_block_map_for_fn (fn), initial_cfg_capacity);
201 vec_safe_grow_cleared (label_to_block_map_for_fn (fn),
202 initial_cfg_capacity);
203
204 SET_BASIC_BLOCK_FOR_FN (fn, ENTRY_BLOCK, ENTRY_BLOCK_PTR_FOR_FN (fn));
205 SET_BASIC_BLOCK_FOR_FN (fn, EXIT_BLOCK, EXIT_BLOCK_PTR_FOR_FN (fn));
206
207 ENTRY_BLOCK_PTR_FOR_FN (fn)->next_bb
208 = EXIT_BLOCK_PTR_FOR_FN (fn);
209 EXIT_BLOCK_PTR_FOR_FN (fn)->prev_bb
210 = ENTRY_BLOCK_PTR_FOR_FN (fn);
211 }
212
213 void
214 init_empty_tree_cfg (void)
215 {
216 init_empty_tree_cfg_for_function (cfun);
217 }
218
219 /*---------------------------------------------------------------------------
220 Create basic blocks
221 ---------------------------------------------------------------------------*/
222
223 /* Entry point to the CFG builder for trees. SEQ is the sequence of
224 statements to be added to the flowgraph. */
225
226 static void
227 build_gimple_cfg (gimple_seq seq)
228 {
229 /* Register specific gimple functions. */
230 gimple_register_cfg_hooks ();
231
232 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
233
234 init_empty_tree_cfg ();
235
236 make_blocks (seq);
237
238 /* Make sure there is always at least one block, even if it's empty. */
239 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
240 create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
241
242 /* Adjust the size of the array. */
243 if (basic_block_info_for_fn (cfun)->length ()
244 < (size_t) n_basic_blocks_for_fn (cfun))
245 vec_safe_grow_cleared (basic_block_info_for_fn (cfun),
246 n_basic_blocks_for_fn (cfun));
247
248 /* To speed up statement iterator walks, we first purge dead labels. */
249 cleanup_dead_labels ();
250
251 /* Group case nodes to reduce the number of edges.
252 We do this after cleaning up dead labels because otherwise we miss
253 a lot of obvious case merging opportunities. */
254 group_case_labels ();
255
256 /* Create the edges of the flowgraph. */
257 discriminator_per_locus = new hash_table<locus_discrim_hasher> (13);
258 make_edges ();
259 assign_discriminators ();
260 cleanup_dead_labels ();
261 delete discriminator_per_locus;
262 discriminator_per_locus = NULL;
263 }
264
265 /* Look for ANNOTATE calls with loop annotation kind in BB; if found, remove
266 them and propagate the information to LOOP. We assume that the annotations
267 come immediately before the condition in BB, if any. */
268
269 static void
270 replace_loop_annotate_in_block (basic_block bb, struct loop *loop)
271 {
272 gimple_stmt_iterator gsi = gsi_last_bb (bb);
273 gimple *stmt = gsi_stmt (gsi);
274
275 if (!(stmt && gimple_code (stmt) == GIMPLE_COND))
276 return;
277
278 for (gsi_prev_nondebug (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
279 {
280 stmt = gsi_stmt (gsi);
281 if (gimple_code (stmt) != GIMPLE_CALL)
282 break;
283 if (!gimple_call_internal_p (stmt)
284 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
285 break;
286
287 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
288 {
289 case annot_expr_ivdep_kind:
290 loop->safelen = INT_MAX;
291 break;
292 case annot_expr_no_vector_kind:
293 loop->dont_vectorize = true;
294 break;
295 case annot_expr_vector_kind:
296 loop->force_vectorize = true;
297 cfun->has_force_vectorize_loops = true;
298 break;
299 default:
300 gcc_unreachable ();
301 }
302
303 stmt = gimple_build_assign (gimple_call_lhs (stmt),
304 gimple_call_arg (stmt, 0));
305 gsi_replace (&gsi, stmt, true);
306 }
307 }
308
309 /* Look for ANNOTATE calls with loop annotation kind; if found, remove
310 them and propagate the information to the loop. We assume that the
311 annotations come immediately before the condition of the loop. */
312
313 static void
314 replace_loop_annotate (void)
315 {
316 struct loop *loop;
317 basic_block bb;
318 gimple_stmt_iterator gsi;
319 gimple *stmt;
320
321 FOR_EACH_LOOP (loop, 0)
322 {
323 /* First look into the header. */
324 replace_loop_annotate_in_block (loop->header, loop);
325
326 /* Then look into the latch, if any. */
327 if (loop->latch)
328 replace_loop_annotate_in_block (loop->latch, loop);
329 }
330
331 /* Remove IFN_ANNOTATE. Safeguard for the case loop->latch == NULL. */
332 FOR_EACH_BB_FN (bb, cfun)
333 {
334 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
335 {
336 stmt = gsi_stmt (gsi);
337 if (gimple_code (stmt) != GIMPLE_CALL)
338 continue;
339 if (!gimple_call_internal_p (stmt)
340 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
341 continue;
342
343 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
344 {
345 case annot_expr_ivdep_kind:
346 case annot_expr_no_vector_kind:
347 case annot_expr_vector_kind:
348 break;
349 default:
350 gcc_unreachable ();
351 }
352
353 warning_at (gimple_location (stmt), 0, "ignoring loop annotation");
354 stmt = gimple_build_assign (gimple_call_lhs (stmt),
355 gimple_call_arg (stmt, 0));
356 gsi_replace (&gsi, stmt, true);
357 }
358 }
359 }
360
361
362 static unsigned int
363 execute_build_cfg (void)
364 {
365 gimple_seq body = gimple_body (current_function_decl);
366
367 build_gimple_cfg (body);
368 gimple_set_body (current_function_decl, NULL);
369 if (dump_file && (dump_flags & TDF_DETAILS))
370 {
371 fprintf (dump_file, "Scope blocks:\n");
372 dump_scope_blocks (dump_file, dump_flags);
373 }
374 cleanup_tree_cfg ();
375 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
376 replace_loop_annotate ();
377 return 0;
378 }
379
380 namespace {
381
382 const pass_data pass_data_build_cfg =
383 {
384 GIMPLE_PASS, /* type */
385 "cfg", /* name */
386 OPTGROUP_NONE, /* optinfo_flags */
387 TV_TREE_CFG, /* tv_id */
388 PROP_gimple_leh, /* properties_required */
389 ( PROP_cfg | PROP_loops ), /* properties_provided */
390 0, /* properties_destroyed */
391 0, /* todo_flags_start */
392 0, /* todo_flags_finish */
393 };
394
395 class pass_build_cfg : public gimple_opt_pass
396 {
397 public:
398 pass_build_cfg (gcc::context *ctxt)
399 : gimple_opt_pass (pass_data_build_cfg, ctxt)
400 {}
401
402 /* opt_pass methods: */
403 virtual unsigned int execute (function *) { return execute_build_cfg (); }
404
405 }; // class pass_build_cfg
406
407 } // anon namespace
408
409 gimple_opt_pass *
410 make_pass_build_cfg (gcc::context *ctxt)
411 {
412 return new pass_build_cfg (ctxt);
413 }
414
415
416 /* Return true if T is a computed goto. */
417
418 bool
419 computed_goto_p (gimple *t)
420 {
421 return (gimple_code (t) == GIMPLE_GOTO
422 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
423 }
424
425 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
426 the other edge points to a bb with just __builtin_unreachable ().
427 I.e. return true for C->M edge in:
428 <bb C>:
429 ...
430 if (something)
431 goto <bb N>;
432 else
433 goto <bb M>;
434 <bb N>:
435 __builtin_unreachable ();
436 <bb M>: */
437
438 bool
439 assert_unreachable_fallthru_edge_p (edge e)
440 {
441 basic_block pred_bb = e->src;
442 gimple *last = last_stmt (pred_bb);
443 if (last && gimple_code (last) == GIMPLE_COND)
444 {
445 basic_block other_bb = EDGE_SUCC (pred_bb, 0)->dest;
446 if (other_bb == e->dest)
447 other_bb = EDGE_SUCC (pred_bb, 1)->dest;
448 if (EDGE_COUNT (other_bb->succs) == 0)
449 {
450 gimple_stmt_iterator gsi = gsi_after_labels (other_bb);
451 gimple *stmt;
452
453 if (gsi_end_p (gsi))
454 return false;
455 stmt = gsi_stmt (gsi);
456 while (is_gimple_debug (stmt) || gimple_clobber_p (stmt))
457 {
458 gsi_next (&gsi);
459 if (gsi_end_p (gsi))
460 return false;
461 stmt = gsi_stmt (gsi);
462 }
463 return gimple_call_builtin_p (stmt, BUILT_IN_UNREACHABLE);
464 }
465 }
466 return false;
467 }
468
469
470 /* Initialize GF_CALL_CTRL_ALTERING flag, which indicates the call
471 could alter control flow except via eh. We initialize the flag at
472 CFG build time and only ever clear it later. */
473
474 static void
475 gimple_call_initialize_ctrl_altering (gimple *stmt)
476 {
477 int flags = gimple_call_flags (stmt);
478
479 /* A call alters control flow if it can make an abnormal goto. */
480 if (call_can_make_abnormal_goto (stmt)
481 /* A call also alters control flow if it does not return. */
482 || flags & ECF_NORETURN
483 /* TM ending statements have backedges out of the transaction.
484 Return true so we split the basic block containing them.
485 Note that the TM_BUILTIN test is merely an optimization. */
486 || ((flags & ECF_TM_BUILTIN)
487 && is_tm_ending_fndecl (gimple_call_fndecl (stmt)))
488 /* BUILT_IN_RETURN call is same as return statement. */
489 || gimple_call_builtin_p (stmt, BUILT_IN_RETURN)
490 /* IFN_UNIQUE should be the last insn, to make checking for it
491 as cheap as possible. */
492 || (gimple_call_internal_p (stmt)
493 && gimple_call_internal_unique_p (stmt)))
494 gimple_call_set_ctrl_altering (stmt, true);
495 else
496 gimple_call_set_ctrl_altering (stmt, false);
497 }
498
499
500 /* Insert SEQ after BB and build a flowgraph. */
501
502 static basic_block
503 make_blocks_1 (gimple_seq seq, basic_block bb)
504 {
505 gimple_stmt_iterator i = gsi_start (seq);
506 gimple *stmt = NULL;
507 bool start_new_block = true;
508 bool first_stmt_of_seq = true;
509
510 while (!gsi_end_p (i))
511 {
512 gimple *prev_stmt;
513
514 prev_stmt = stmt;
515 stmt = gsi_stmt (i);
516
517 if (stmt && is_gimple_call (stmt))
518 gimple_call_initialize_ctrl_altering (stmt);
519
520 /* If the statement starts a new basic block or if we have determined
521 in a previous pass that we need to create a new block for STMT, do
522 so now. */
523 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
524 {
525 if (!first_stmt_of_seq)
526 gsi_split_seq_before (&i, &seq);
527 bb = create_basic_block (seq, bb);
528 start_new_block = false;
529 }
530
531 /* Now add STMT to BB and create the subgraphs for special statement
532 codes. */
533 gimple_set_bb (stmt, bb);
534
535 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
536 next iteration. */
537 if (stmt_ends_bb_p (stmt))
538 {
539 /* If the stmt can make abnormal goto use a new temporary
540 for the assignment to the LHS. This makes sure the old value
541 of the LHS is available on the abnormal edge. Otherwise
542 we will end up with overlapping life-ranges for abnormal
543 SSA names. */
544 if (gimple_has_lhs (stmt)
545 && stmt_can_make_abnormal_goto (stmt)
546 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
547 {
548 tree lhs = gimple_get_lhs (stmt);
549 tree tmp = create_tmp_var (TREE_TYPE (lhs));
550 gimple *s = gimple_build_assign (lhs, tmp);
551 gimple_set_location (s, gimple_location (stmt));
552 gimple_set_block (s, gimple_block (stmt));
553 gimple_set_lhs (stmt, tmp);
554 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
555 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
556 DECL_GIMPLE_REG_P (tmp) = 1;
557 gsi_insert_after (&i, s, GSI_SAME_STMT);
558 }
559 start_new_block = true;
560 }
561
562 gsi_next (&i);
563 first_stmt_of_seq = false;
564 }
565 return bb;
566 }
567
568 /* Build a flowgraph for the sequence of stmts SEQ. */
569
570 static void
571 make_blocks (gimple_seq seq)
572 {
573 make_blocks_1 (seq, ENTRY_BLOCK_PTR_FOR_FN (cfun));
574 }
575
576 /* Create and return a new empty basic block after bb AFTER. */
577
578 static basic_block
579 create_bb (void *h, void *e, basic_block after)
580 {
581 basic_block bb;
582
583 gcc_assert (!e);
584
585 /* Create and initialize a new basic block. Since alloc_block uses
586 GC allocation that clears memory to allocate a basic block, we do
587 not have to clear the newly allocated basic block here. */
588 bb = alloc_block ();
589
590 bb->index = last_basic_block_for_fn (cfun);
591 bb->flags = BB_NEW;
592 set_bb_seq (bb, h ? (gimple_seq) h : NULL);
593
594 /* Add the new block to the linked list of blocks. */
595 link_block (bb, after);
596
597 /* Grow the basic block array if needed. */
598 if ((size_t) last_basic_block_for_fn (cfun)
599 == basic_block_info_for_fn (cfun)->length ())
600 {
601 size_t new_size =
602 (last_basic_block_for_fn (cfun)
603 + (last_basic_block_for_fn (cfun) + 3) / 4);
604 vec_safe_grow_cleared (basic_block_info_for_fn (cfun), new_size);
605 }
606
607 /* Add the newly created block to the array. */
608 SET_BASIC_BLOCK_FOR_FN (cfun, last_basic_block_for_fn (cfun), bb);
609
610 n_basic_blocks_for_fn (cfun)++;
611 last_basic_block_for_fn (cfun)++;
612
613 return bb;
614 }
615
616
617 /*---------------------------------------------------------------------------
618 Edge creation
619 ---------------------------------------------------------------------------*/
620
621 /* If basic block BB has an abnormal edge to a basic block
622 containing IFN_ABNORMAL_DISPATCHER internal call, return
623 that the dispatcher's basic block, otherwise return NULL. */
624
625 basic_block
626 get_abnormal_succ_dispatcher (basic_block bb)
627 {
628 edge e;
629 edge_iterator ei;
630
631 FOR_EACH_EDGE (e, ei, bb->succs)
632 if ((e->flags & (EDGE_ABNORMAL | EDGE_EH)) == EDGE_ABNORMAL)
633 {
634 gimple_stmt_iterator gsi
635 = gsi_start_nondebug_after_labels_bb (e->dest);
636 gimple *g = gsi_stmt (gsi);
637 if (g
638 && is_gimple_call (g)
639 && gimple_call_internal_p (g)
640 && gimple_call_internal_fn (g) == IFN_ABNORMAL_DISPATCHER)
641 return e->dest;
642 }
643 return NULL;
644 }
645
646 /* Helper function for make_edges. Create a basic block with
647 with ABNORMAL_DISPATCHER internal call in it if needed, and
648 create abnormal edges from BBS to it and from it to FOR_BB
649 if COMPUTED_GOTO is false, otherwise factor the computed gotos. */
650
651 static void
652 handle_abnormal_edges (basic_block *dispatcher_bbs,
653 basic_block for_bb, int *bb_to_omp_idx,
654 auto_vec<basic_block> *bbs, bool computed_goto)
655 {
656 basic_block *dispatcher = dispatcher_bbs + (computed_goto ? 1 : 0);
657 unsigned int idx = 0;
658 basic_block bb;
659 bool inner = false;
660
661 if (bb_to_omp_idx)
662 {
663 dispatcher = dispatcher_bbs + 2 * bb_to_omp_idx[for_bb->index];
664 if (bb_to_omp_idx[for_bb->index] != 0)
665 inner = true;
666 }
667
668 /* If the dispatcher has been created already, then there are basic
669 blocks with abnormal edges to it, so just make a new edge to
670 for_bb. */
671 if (*dispatcher == NULL)
672 {
673 /* Check if there are any basic blocks that need to have
674 abnormal edges to this dispatcher. If there are none, return
675 early. */
676 if (bb_to_omp_idx == NULL)
677 {
678 if (bbs->is_empty ())
679 return;
680 }
681 else
682 {
683 FOR_EACH_VEC_ELT (*bbs, idx, bb)
684 if (bb_to_omp_idx[bb->index] == bb_to_omp_idx[for_bb->index])
685 break;
686 if (bb == NULL)
687 return;
688 }
689
690 /* Create the dispatcher bb. */
691 *dispatcher = create_basic_block (NULL, for_bb);
692 if (computed_goto)
693 {
694 /* Factor computed gotos into a common computed goto site. Also
695 record the location of that site so that we can un-factor the
696 gotos after we have converted back to normal form. */
697 gimple_stmt_iterator gsi = gsi_start_bb (*dispatcher);
698
699 /* Create the destination of the factored goto. Each original
700 computed goto will put its desired destination into this
701 variable and jump to the label we create immediately below. */
702 tree var = create_tmp_var (ptr_type_node, "gotovar");
703
704 /* Build a label for the new block which will contain the
705 factored computed goto. */
706 tree factored_label_decl
707 = create_artificial_label (UNKNOWN_LOCATION);
708 gimple *factored_computed_goto_label
709 = gimple_build_label (factored_label_decl);
710 gsi_insert_after (&gsi, factored_computed_goto_label, GSI_NEW_STMT);
711
712 /* Build our new computed goto. */
713 gimple *factored_computed_goto = gimple_build_goto (var);
714 gsi_insert_after (&gsi, factored_computed_goto, GSI_NEW_STMT);
715
716 FOR_EACH_VEC_ELT (*bbs, idx, bb)
717 {
718 if (bb_to_omp_idx
719 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
720 continue;
721
722 gsi = gsi_last_bb (bb);
723 gimple *last = gsi_stmt (gsi);
724
725 gcc_assert (computed_goto_p (last));
726
727 /* Copy the original computed goto's destination into VAR. */
728 gimple *assignment
729 = gimple_build_assign (var, gimple_goto_dest (last));
730 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
731
732 edge e = make_edge (bb, *dispatcher, EDGE_FALLTHRU);
733 e->goto_locus = gimple_location (last);
734 gsi_remove (&gsi, true);
735 }
736 }
737 else
738 {
739 tree arg = inner ? boolean_true_node : boolean_false_node;
740 gimple *g = gimple_build_call_internal (IFN_ABNORMAL_DISPATCHER,
741 1, arg);
742 gimple_stmt_iterator gsi = gsi_after_labels (*dispatcher);
743 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
744
745 /* Create predecessor edges of the dispatcher. */
746 FOR_EACH_VEC_ELT (*bbs, idx, bb)
747 {
748 if (bb_to_omp_idx
749 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
750 continue;
751 make_edge (bb, *dispatcher, EDGE_ABNORMAL);
752 }
753 }
754 }
755
756 make_edge (*dispatcher, for_bb, EDGE_ABNORMAL);
757 }
758
759 /* Creates outgoing edges for BB. Returns 1 when it ends with an
760 computed goto, returns 2 when it ends with a statement that
761 might return to this function via an nonlocal goto, otherwise
762 return 0. Updates *PCUR_REGION with the OMP region this BB is in. */
763
764 static int
765 make_edges_bb (basic_block bb, struct omp_region **pcur_region, int *pomp_index)
766 {
767 gimple *last = last_stmt (bb);
768 bool fallthru = false;
769 int ret = 0;
770
771 if (!last)
772 return ret;
773
774 switch (gimple_code (last))
775 {
776 case GIMPLE_GOTO:
777 if (make_goto_expr_edges (bb))
778 ret = 1;
779 fallthru = false;
780 break;
781 case GIMPLE_RETURN:
782 {
783 edge e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
784 e->goto_locus = gimple_location (last);
785 fallthru = false;
786 }
787 break;
788 case GIMPLE_COND:
789 make_cond_expr_edges (bb);
790 fallthru = false;
791 break;
792 case GIMPLE_SWITCH:
793 make_gimple_switch_edges (as_a <gswitch *> (last), bb);
794 fallthru = false;
795 break;
796 case GIMPLE_RESX:
797 make_eh_edges (last);
798 fallthru = false;
799 break;
800 case GIMPLE_EH_DISPATCH:
801 fallthru = make_eh_dispatch_edges (as_a <geh_dispatch *> (last));
802 break;
803
804 case GIMPLE_CALL:
805 /* If this function receives a nonlocal goto, then we need to
806 make edges from this call site to all the nonlocal goto
807 handlers. */
808 if (stmt_can_make_abnormal_goto (last))
809 ret = 2;
810
811 /* If this statement has reachable exception handlers, then
812 create abnormal edges to them. */
813 make_eh_edges (last);
814
815 /* BUILTIN_RETURN is really a return statement. */
816 if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
817 {
818 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
819 fallthru = false;
820 }
821 /* Some calls are known not to return. */
822 else
823 fallthru = !(gimple_call_flags (last) & ECF_NORETURN);
824 break;
825
826 case GIMPLE_ASSIGN:
827 /* A GIMPLE_ASSIGN may throw internally and thus be considered
828 control-altering. */
829 if (is_ctrl_altering_stmt (last))
830 make_eh_edges (last);
831 fallthru = true;
832 break;
833
834 case GIMPLE_ASM:
835 make_gimple_asm_edges (bb);
836 fallthru = true;
837 break;
838
839 CASE_GIMPLE_OMP:
840 fallthru = make_gimple_omp_edges (bb, pcur_region, pomp_index);
841 break;
842
843 case GIMPLE_TRANSACTION:
844 {
845 tree abort_label
846 = gimple_transaction_label (as_a <gtransaction *> (last));
847 if (abort_label)
848 make_edge (bb, label_to_block (abort_label), EDGE_TM_ABORT);
849 fallthru = true;
850 }
851 break;
852
853 default:
854 gcc_assert (!stmt_ends_bb_p (last));
855 fallthru = true;
856 break;
857 }
858
859 if (fallthru)
860 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
861
862 return ret;
863 }
864
865 /* Join all the blocks in the flowgraph. */
866
867 static void
868 make_edges (void)
869 {
870 basic_block bb;
871 struct omp_region *cur_region = NULL;
872 auto_vec<basic_block> ab_edge_goto;
873 auto_vec<basic_block> ab_edge_call;
874 int *bb_to_omp_idx = NULL;
875 int cur_omp_region_idx = 0;
876
877 /* Create an edge from entry to the first block with executable
878 statements in it. */
879 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun),
880 BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS),
881 EDGE_FALLTHRU);
882
883 /* Traverse the basic block array placing edges. */
884 FOR_EACH_BB_FN (bb, cfun)
885 {
886 int mer;
887
888 if (bb_to_omp_idx)
889 bb_to_omp_idx[bb->index] = cur_omp_region_idx;
890
891 mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
892 if (mer == 1)
893 ab_edge_goto.safe_push (bb);
894 else if (mer == 2)
895 ab_edge_call.safe_push (bb);
896
897 if (cur_region && bb_to_omp_idx == NULL)
898 bb_to_omp_idx = XCNEWVEC (int, n_basic_blocks_for_fn (cfun));
899 }
900
901 /* Computed gotos are hell to deal with, especially if there are
902 lots of them with a large number of destinations. So we factor
903 them to a common computed goto location before we build the
904 edge list. After we convert back to normal form, we will un-factor
905 the computed gotos since factoring introduces an unwanted jump.
906 For non-local gotos and abnormal edges from calls to calls that return
907 twice or forced labels, factor the abnormal edges too, by having all
908 abnormal edges from the calls go to a common artificial basic block
909 with ABNORMAL_DISPATCHER internal call and abnormal edges from that
910 basic block to all forced labels and calls returning twice.
911 We do this per-OpenMP structured block, because those regions
912 are guaranteed to be single entry single exit by the standard,
913 so it is not allowed to enter or exit such regions abnormally this way,
914 thus all computed gotos, non-local gotos and setjmp/longjmp calls
915 must not transfer control across SESE region boundaries. */
916 if (!ab_edge_goto.is_empty () || !ab_edge_call.is_empty ())
917 {
918 gimple_stmt_iterator gsi;
919 basic_block dispatcher_bb_array[2] = { NULL, NULL };
920 basic_block *dispatcher_bbs = dispatcher_bb_array;
921 int count = n_basic_blocks_for_fn (cfun);
922
923 if (bb_to_omp_idx)
924 dispatcher_bbs = XCNEWVEC (basic_block, 2 * count);
925
926 FOR_EACH_BB_FN (bb, cfun)
927 {
928 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
929 {
930 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
931 tree target;
932
933 if (!label_stmt)
934 break;
935
936 target = gimple_label_label (label_stmt);
937
938 /* Make an edge to every label block that has been marked as a
939 potential target for a computed goto or a non-local goto. */
940 if (FORCED_LABEL (target))
941 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
942 &ab_edge_goto, true);
943 if (DECL_NONLOCAL (target))
944 {
945 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
946 &ab_edge_call, false);
947 break;
948 }
949 }
950
951 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
952 gsi_next_nondebug (&gsi);
953 if (!gsi_end_p (gsi))
954 {
955 /* Make an edge to every setjmp-like call. */
956 gimple *call_stmt = gsi_stmt (gsi);
957 if (is_gimple_call (call_stmt)
958 && ((gimple_call_flags (call_stmt) & ECF_RETURNS_TWICE)
959 || gimple_call_builtin_p (call_stmt,
960 BUILT_IN_SETJMP_RECEIVER)))
961 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
962 &ab_edge_call, false);
963 }
964 }
965
966 if (bb_to_omp_idx)
967 XDELETE (dispatcher_bbs);
968 }
969
970 XDELETE (bb_to_omp_idx);
971
972 free_omp_regions ();
973 }
974
975 /* Add SEQ after GSI. Start new bb after GSI, and created further bbs as
976 needed. Returns true if new bbs were created.
977 Note: This is transitional code, and should not be used for new code. We
978 should be able to get rid of this by rewriting all target va-arg
979 gimplification hooks to use an interface gimple_build_cond_value as described
980 in https://gcc.gnu.org/ml/gcc-patches/2015-02/msg01194.html. */
981
982 bool
983 gimple_find_sub_bbs (gimple_seq seq, gimple_stmt_iterator *gsi)
984 {
985 gimple *stmt = gsi_stmt (*gsi);
986 basic_block bb = gimple_bb (stmt);
987 basic_block lastbb, afterbb;
988 int old_num_bbs = n_basic_blocks_for_fn (cfun);
989 edge e;
990 lastbb = make_blocks_1 (seq, bb);
991 if (old_num_bbs == n_basic_blocks_for_fn (cfun))
992 return false;
993 e = split_block (bb, stmt);
994 /* Move e->dest to come after the new basic blocks. */
995 afterbb = e->dest;
996 unlink_block (afterbb);
997 link_block (afterbb, lastbb);
998 redirect_edge_succ (e, bb->next_bb);
999 bb = bb->next_bb;
1000 while (bb != afterbb)
1001 {
1002 struct omp_region *cur_region = NULL;
1003 int cur_omp_region_idx = 0;
1004 int mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
1005 gcc_assert (!mer && !cur_region);
1006 add_bb_to_loop (bb, afterbb->loop_father);
1007 bb = bb->next_bb;
1008 }
1009 return true;
1010 }
1011
1012 /* Find the next available discriminator value for LOCUS. The
1013 discriminator distinguishes among several basic blocks that
1014 share a common locus, allowing for more accurate sample-based
1015 profiling. */
1016
1017 static int
1018 next_discriminator_for_locus (location_t locus)
1019 {
1020 struct locus_discrim_map item;
1021 struct locus_discrim_map **slot;
1022
1023 item.locus = locus;
1024 item.discriminator = 0;
1025 slot = discriminator_per_locus->find_slot_with_hash (
1026 &item, LOCATION_LINE (locus), INSERT);
1027 gcc_assert (slot);
1028 if (*slot == HTAB_EMPTY_ENTRY)
1029 {
1030 *slot = XNEW (struct locus_discrim_map);
1031 gcc_assert (*slot);
1032 (*slot)->locus = locus;
1033 (*slot)->discriminator = 0;
1034 }
1035 (*slot)->discriminator++;
1036 return (*slot)->discriminator;
1037 }
1038
1039 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
1040
1041 static bool
1042 same_line_p (location_t locus1, location_t locus2)
1043 {
1044 expanded_location from, to;
1045
1046 if (locus1 == locus2)
1047 return true;
1048
1049 from = expand_location (locus1);
1050 to = expand_location (locus2);
1051
1052 if (from.line != to.line)
1053 return false;
1054 if (from.file == to.file)
1055 return true;
1056 return (from.file != NULL
1057 && to.file != NULL
1058 && filename_cmp (from.file, to.file) == 0);
1059 }
1060
1061 /* Assign discriminators to each basic block. */
1062
1063 static void
1064 assign_discriminators (void)
1065 {
1066 basic_block bb;
1067
1068 FOR_EACH_BB_FN (bb, cfun)
1069 {
1070 edge e;
1071 edge_iterator ei;
1072 gimple *last = last_stmt (bb);
1073 location_t locus = last ? gimple_location (last) : UNKNOWN_LOCATION;
1074
1075 if (locus == UNKNOWN_LOCATION)
1076 continue;
1077
1078 FOR_EACH_EDGE (e, ei, bb->succs)
1079 {
1080 gimple *first = first_non_label_stmt (e->dest);
1081 gimple *last = last_stmt (e->dest);
1082 if ((first && same_line_p (locus, gimple_location (first)))
1083 || (last && same_line_p (locus, gimple_location (last))))
1084 {
1085 if (e->dest->discriminator != 0 && bb->discriminator == 0)
1086 bb->discriminator = next_discriminator_for_locus (locus);
1087 else
1088 e->dest->discriminator = next_discriminator_for_locus (locus);
1089 }
1090 }
1091 }
1092 }
1093
1094 /* Create the edges for a GIMPLE_COND starting at block BB. */
1095
1096 static void
1097 make_cond_expr_edges (basic_block bb)
1098 {
1099 gcond *entry = as_a <gcond *> (last_stmt (bb));
1100 gimple *then_stmt, *else_stmt;
1101 basic_block then_bb, else_bb;
1102 tree then_label, else_label;
1103 edge e;
1104
1105 gcc_assert (entry);
1106 gcc_assert (gimple_code (entry) == GIMPLE_COND);
1107
1108 /* Entry basic blocks for each component. */
1109 then_label = gimple_cond_true_label (entry);
1110 else_label = gimple_cond_false_label (entry);
1111 then_bb = label_to_block (then_label);
1112 else_bb = label_to_block (else_label);
1113 then_stmt = first_stmt (then_bb);
1114 else_stmt = first_stmt (else_bb);
1115
1116 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1117 e->goto_locus = gimple_location (then_stmt);
1118 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1119 if (e)
1120 e->goto_locus = gimple_location (else_stmt);
1121
1122 /* We do not need the labels anymore. */
1123 gimple_cond_set_true_label (entry, NULL_TREE);
1124 gimple_cond_set_false_label (entry, NULL_TREE);
1125 }
1126
1127
1128 /* Called for each element in the hash table (P) as we delete the
1129 edge to cases hash table.
1130
1131 Clear all the TREE_CHAINs to prevent problems with copying of
1132 SWITCH_EXPRs and structure sharing rules, then free the hash table
1133 element. */
1134
1135 bool
1136 edge_to_cases_cleanup (edge const &, tree const &value, void *)
1137 {
1138 tree t, next;
1139
1140 for (t = value; t; t = next)
1141 {
1142 next = CASE_CHAIN (t);
1143 CASE_CHAIN (t) = NULL;
1144 }
1145
1146 return true;
1147 }
1148
1149 /* Start recording information mapping edges to case labels. */
1150
1151 void
1152 start_recording_case_labels (void)
1153 {
1154 gcc_assert (edge_to_cases == NULL);
1155 edge_to_cases = new hash_map<edge, tree>;
1156 touched_switch_bbs = BITMAP_ALLOC (NULL);
1157 }
1158
1159 /* Return nonzero if we are recording information for case labels. */
1160
1161 static bool
1162 recording_case_labels_p (void)
1163 {
1164 return (edge_to_cases != NULL);
1165 }
1166
1167 /* Stop recording information mapping edges to case labels and
1168 remove any information we have recorded. */
1169 void
1170 end_recording_case_labels (void)
1171 {
1172 bitmap_iterator bi;
1173 unsigned i;
1174 edge_to_cases->traverse<void *, edge_to_cases_cleanup> (NULL);
1175 delete edge_to_cases;
1176 edge_to_cases = NULL;
1177 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
1178 {
1179 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
1180 if (bb)
1181 {
1182 gimple *stmt = last_stmt (bb);
1183 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1184 group_case_labels_stmt (as_a <gswitch *> (stmt));
1185 }
1186 }
1187 BITMAP_FREE (touched_switch_bbs);
1188 }
1189
1190 /* If we are inside a {start,end}_recording_cases block, then return
1191 a chain of CASE_LABEL_EXPRs from T which reference E.
1192
1193 Otherwise return NULL. */
1194
1195 static tree
1196 get_cases_for_edge (edge e, gswitch *t)
1197 {
1198 tree *slot;
1199 size_t i, n;
1200
1201 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
1202 chains available. Return NULL so the caller can detect this case. */
1203 if (!recording_case_labels_p ())
1204 return NULL;
1205
1206 slot = edge_to_cases->get (e);
1207 if (slot)
1208 return *slot;
1209
1210 /* If we did not find E in the hash table, then this must be the first
1211 time we have been queried for information about E & T. Add all the
1212 elements from T to the hash table then perform the query again. */
1213
1214 n = gimple_switch_num_labels (t);
1215 for (i = 0; i < n; i++)
1216 {
1217 tree elt = gimple_switch_label (t, i);
1218 tree lab = CASE_LABEL (elt);
1219 basic_block label_bb = label_to_block (lab);
1220 edge this_edge = find_edge (e->src, label_bb);
1221
1222 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
1223 a new chain. */
1224 tree &s = edge_to_cases->get_or_insert (this_edge);
1225 CASE_CHAIN (elt) = s;
1226 s = elt;
1227 }
1228
1229 return *edge_to_cases->get (e);
1230 }
1231
1232 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
1233
1234 static void
1235 make_gimple_switch_edges (gswitch *entry, basic_block bb)
1236 {
1237 size_t i, n;
1238
1239 n = gimple_switch_num_labels (entry);
1240
1241 for (i = 0; i < n; ++i)
1242 {
1243 tree lab = CASE_LABEL (gimple_switch_label (entry, i));
1244 basic_block label_bb = label_to_block (lab);
1245 make_edge (bb, label_bb, 0);
1246 }
1247 }
1248
1249
1250 /* Return the basic block holding label DEST. */
1251
1252 basic_block
1253 label_to_block_fn (struct function *ifun, tree dest)
1254 {
1255 int uid = LABEL_DECL_UID (dest);
1256
1257 /* We would die hard when faced by an undefined label. Emit a label to
1258 the very first basic block. This will hopefully make even the dataflow
1259 and undefined variable warnings quite right. */
1260 if (seen_error () && uid < 0)
1261 {
1262 gimple_stmt_iterator gsi =
1263 gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS));
1264 gimple *stmt;
1265
1266 stmt = gimple_build_label (dest);
1267 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
1268 uid = LABEL_DECL_UID (dest);
1269 }
1270 if (vec_safe_length (ifun->cfg->x_label_to_block_map) <= (unsigned int) uid)
1271 return NULL;
1272 return (*ifun->cfg->x_label_to_block_map)[uid];
1273 }
1274
1275 /* Create edges for a goto statement at block BB. Returns true
1276 if abnormal edges should be created. */
1277
1278 static bool
1279 make_goto_expr_edges (basic_block bb)
1280 {
1281 gimple_stmt_iterator last = gsi_last_bb (bb);
1282 gimple *goto_t = gsi_stmt (last);
1283
1284 /* A simple GOTO creates normal edges. */
1285 if (simple_goto_p (goto_t))
1286 {
1287 tree dest = gimple_goto_dest (goto_t);
1288 basic_block label_bb = label_to_block (dest);
1289 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1290 e->goto_locus = gimple_location (goto_t);
1291 gsi_remove (&last, true);
1292 return false;
1293 }
1294
1295 /* A computed GOTO creates abnormal edges. */
1296 return true;
1297 }
1298
1299 /* Create edges for an asm statement with labels at block BB. */
1300
1301 static void
1302 make_gimple_asm_edges (basic_block bb)
1303 {
1304 gasm *stmt = as_a <gasm *> (last_stmt (bb));
1305 int i, n = gimple_asm_nlabels (stmt);
1306
1307 for (i = 0; i < n; ++i)
1308 {
1309 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1310 basic_block label_bb = label_to_block (label);
1311 make_edge (bb, label_bb, 0);
1312 }
1313 }
1314
1315 /*---------------------------------------------------------------------------
1316 Flowgraph analysis
1317 ---------------------------------------------------------------------------*/
1318
1319 /* Cleanup useless labels in basic blocks. This is something we wish
1320 to do early because it allows us to group case labels before creating
1321 the edges for the CFG, and it speeds up block statement iterators in
1322 all passes later on.
1323 We rerun this pass after CFG is created, to get rid of the labels that
1324 are no longer referenced. After then we do not run it any more, since
1325 (almost) no new labels should be created. */
1326
1327 /* A map from basic block index to the leading label of that block. */
1328 static struct label_record
1329 {
1330 /* The label. */
1331 tree label;
1332
1333 /* True if the label is referenced from somewhere. */
1334 bool used;
1335 } *label_for_bb;
1336
1337 /* Given LABEL return the first label in the same basic block. */
1338
1339 static tree
1340 main_block_label (tree label)
1341 {
1342 basic_block bb = label_to_block (label);
1343 tree main_label = label_for_bb[bb->index].label;
1344
1345 /* label_to_block possibly inserted undefined label into the chain. */
1346 if (!main_label)
1347 {
1348 label_for_bb[bb->index].label = label;
1349 main_label = label;
1350 }
1351
1352 label_for_bb[bb->index].used = true;
1353 return main_label;
1354 }
1355
1356 /* Clean up redundant labels within the exception tree. */
1357
1358 static void
1359 cleanup_dead_labels_eh (void)
1360 {
1361 eh_landing_pad lp;
1362 eh_region r;
1363 tree lab;
1364 int i;
1365
1366 if (cfun->eh == NULL)
1367 return;
1368
1369 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1370 if (lp && lp->post_landing_pad)
1371 {
1372 lab = main_block_label (lp->post_landing_pad);
1373 if (lab != lp->post_landing_pad)
1374 {
1375 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1376 EH_LANDING_PAD_NR (lab) = lp->index;
1377 }
1378 }
1379
1380 FOR_ALL_EH_REGION (r)
1381 switch (r->type)
1382 {
1383 case ERT_CLEANUP:
1384 case ERT_MUST_NOT_THROW:
1385 break;
1386
1387 case ERT_TRY:
1388 {
1389 eh_catch c;
1390 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1391 {
1392 lab = c->label;
1393 if (lab)
1394 c->label = main_block_label (lab);
1395 }
1396 }
1397 break;
1398
1399 case ERT_ALLOWED_EXCEPTIONS:
1400 lab = r->u.allowed.label;
1401 if (lab)
1402 r->u.allowed.label = main_block_label (lab);
1403 break;
1404 }
1405 }
1406
1407
1408 /* Cleanup redundant labels. This is a three-step process:
1409 1) Find the leading label for each block.
1410 2) Redirect all references to labels to the leading labels.
1411 3) Cleanup all useless labels. */
1412
1413 void
1414 cleanup_dead_labels (void)
1415 {
1416 basic_block bb;
1417 label_for_bb = XCNEWVEC (struct label_record, last_basic_block_for_fn (cfun));
1418
1419 /* Find a suitable label for each block. We use the first user-defined
1420 label if there is one, or otherwise just the first label we see. */
1421 FOR_EACH_BB_FN (bb, cfun)
1422 {
1423 gimple_stmt_iterator i;
1424
1425 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1426 {
1427 tree label;
1428 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1429
1430 if (!label_stmt)
1431 break;
1432
1433 label = gimple_label_label (label_stmt);
1434
1435 /* If we have not yet seen a label for the current block,
1436 remember this one and see if there are more labels. */
1437 if (!label_for_bb[bb->index].label)
1438 {
1439 label_for_bb[bb->index].label = label;
1440 continue;
1441 }
1442
1443 /* If we did see a label for the current block already, but it
1444 is an artificially created label, replace it if the current
1445 label is a user defined label. */
1446 if (!DECL_ARTIFICIAL (label)
1447 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1448 {
1449 label_for_bb[bb->index].label = label;
1450 break;
1451 }
1452 }
1453 }
1454
1455 /* Now redirect all jumps/branches to the selected label.
1456 First do so for each block ending in a control statement. */
1457 FOR_EACH_BB_FN (bb, cfun)
1458 {
1459 gimple *stmt = last_stmt (bb);
1460 tree label, new_label;
1461
1462 if (!stmt)
1463 continue;
1464
1465 switch (gimple_code (stmt))
1466 {
1467 case GIMPLE_COND:
1468 {
1469 gcond *cond_stmt = as_a <gcond *> (stmt);
1470 label = gimple_cond_true_label (cond_stmt);
1471 if (label)
1472 {
1473 new_label = main_block_label (label);
1474 if (new_label != label)
1475 gimple_cond_set_true_label (cond_stmt, new_label);
1476 }
1477
1478 label = gimple_cond_false_label (cond_stmt);
1479 if (label)
1480 {
1481 new_label = main_block_label (label);
1482 if (new_label != label)
1483 gimple_cond_set_false_label (cond_stmt, new_label);
1484 }
1485 }
1486 break;
1487
1488 case GIMPLE_SWITCH:
1489 {
1490 gswitch *switch_stmt = as_a <gswitch *> (stmt);
1491 size_t i, n = gimple_switch_num_labels (switch_stmt);
1492
1493 /* Replace all destination labels. */
1494 for (i = 0; i < n; ++i)
1495 {
1496 tree case_label = gimple_switch_label (switch_stmt, i);
1497 label = CASE_LABEL (case_label);
1498 new_label = main_block_label (label);
1499 if (new_label != label)
1500 CASE_LABEL (case_label) = new_label;
1501 }
1502 break;
1503 }
1504
1505 case GIMPLE_ASM:
1506 {
1507 gasm *asm_stmt = as_a <gasm *> (stmt);
1508 int i, n = gimple_asm_nlabels (asm_stmt);
1509
1510 for (i = 0; i < n; ++i)
1511 {
1512 tree cons = gimple_asm_label_op (asm_stmt, i);
1513 tree label = main_block_label (TREE_VALUE (cons));
1514 TREE_VALUE (cons) = label;
1515 }
1516 break;
1517 }
1518
1519 /* We have to handle gotos until they're removed, and we don't
1520 remove them until after we've created the CFG edges. */
1521 case GIMPLE_GOTO:
1522 if (!computed_goto_p (stmt))
1523 {
1524 ggoto *goto_stmt = as_a <ggoto *> (stmt);
1525 label = gimple_goto_dest (goto_stmt);
1526 new_label = main_block_label (label);
1527 if (new_label != label)
1528 gimple_goto_set_dest (goto_stmt, new_label);
1529 }
1530 break;
1531
1532 case GIMPLE_TRANSACTION:
1533 {
1534 gtransaction *trans_stmt = as_a <gtransaction *> (stmt);
1535 tree label = gimple_transaction_label (trans_stmt);
1536 if (label)
1537 {
1538 tree new_label = main_block_label (label);
1539 if (new_label != label)
1540 gimple_transaction_set_label (trans_stmt, new_label);
1541 }
1542 }
1543 break;
1544
1545 default:
1546 break;
1547 }
1548 }
1549
1550 /* Do the same for the exception region tree labels. */
1551 cleanup_dead_labels_eh ();
1552
1553 /* Finally, purge dead labels. All user-defined labels and labels that
1554 can be the target of non-local gotos and labels which have their
1555 address taken are preserved. */
1556 FOR_EACH_BB_FN (bb, cfun)
1557 {
1558 gimple_stmt_iterator i;
1559 tree label_for_this_bb = label_for_bb[bb->index].label;
1560
1561 if (!label_for_this_bb)
1562 continue;
1563
1564 /* If the main label of the block is unused, we may still remove it. */
1565 if (!label_for_bb[bb->index].used)
1566 label_for_this_bb = NULL;
1567
1568 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1569 {
1570 tree label;
1571 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1572
1573 if (!label_stmt)
1574 break;
1575
1576 label = gimple_label_label (label_stmt);
1577
1578 if (label == label_for_this_bb
1579 || !DECL_ARTIFICIAL (label)
1580 || DECL_NONLOCAL (label)
1581 || FORCED_LABEL (label))
1582 gsi_next (&i);
1583 else
1584 gsi_remove (&i, true);
1585 }
1586 }
1587
1588 free (label_for_bb);
1589 }
1590
1591 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1592 the ones jumping to the same label.
1593 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1594
1595 void
1596 group_case_labels_stmt (gswitch *stmt)
1597 {
1598 int old_size = gimple_switch_num_labels (stmt);
1599 int i, j, new_size = old_size;
1600 basic_block default_bb = NULL;
1601
1602 default_bb = label_to_block (CASE_LABEL (gimple_switch_default_label (stmt)));
1603
1604 /* Look for possible opportunities to merge cases. */
1605 i = 1;
1606 while (i < old_size)
1607 {
1608 tree base_case, base_high;
1609 basic_block base_bb;
1610
1611 base_case = gimple_switch_label (stmt, i);
1612
1613 gcc_assert (base_case);
1614 base_bb = label_to_block (CASE_LABEL (base_case));
1615
1616 /* Discard cases that have the same destination as the
1617 default case. */
1618 if (base_bb == default_bb)
1619 {
1620 gimple_switch_set_label (stmt, i, NULL_TREE);
1621 i++;
1622 new_size--;
1623 continue;
1624 }
1625
1626 base_high = CASE_HIGH (base_case)
1627 ? CASE_HIGH (base_case)
1628 : CASE_LOW (base_case);
1629 i++;
1630
1631 /* Try to merge case labels. Break out when we reach the end
1632 of the label vector or when we cannot merge the next case
1633 label with the current one. */
1634 while (i < old_size)
1635 {
1636 tree merge_case = gimple_switch_label (stmt, i);
1637 basic_block merge_bb = label_to_block (CASE_LABEL (merge_case));
1638 wide_int bhp1 = wi::add (base_high, 1);
1639
1640 /* Merge the cases if they jump to the same place,
1641 and their ranges are consecutive. */
1642 if (merge_bb == base_bb
1643 && wi::eq_p (CASE_LOW (merge_case), bhp1))
1644 {
1645 base_high = CASE_HIGH (merge_case) ?
1646 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1647 CASE_HIGH (base_case) = base_high;
1648 gimple_switch_set_label (stmt, i, NULL_TREE);
1649 new_size--;
1650 i++;
1651 }
1652 else
1653 break;
1654 }
1655 }
1656
1657 /* Compress the case labels in the label vector, and adjust the
1658 length of the vector. */
1659 for (i = 0, j = 0; i < new_size; i++)
1660 {
1661 while (! gimple_switch_label (stmt, j))
1662 j++;
1663 gimple_switch_set_label (stmt, i,
1664 gimple_switch_label (stmt, j++));
1665 }
1666
1667 gcc_assert (new_size <= old_size);
1668 gimple_switch_set_num_labels (stmt, new_size);
1669 }
1670
1671 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1672 and scan the sorted vector of cases. Combine the ones jumping to the
1673 same label. */
1674
1675 void
1676 group_case_labels (void)
1677 {
1678 basic_block bb;
1679
1680 FOR_EACH_BB_FN (bb, cfun)
1681 {
1682 gimple *stmt = last_stmt (bb);
1683 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1684 group_case_labels_stmt (as_a <gswitch *> (stmt));
1685 }
1686 }
1687
1688 /* Checks whether we can merge block B into block A. */
1689
1690 static bool
1691 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1692 {
1693 gimple *stmt;
1694
1695 if (!single_succ_p (a))
1696 return false;
1697
1698 if (single_succ_edge (a)->flags & EDGE_COMPLEX)
1699 return false;
1700
1701 if (single_succ (a) != b)
1702 return false;
1703
1704 if (!single_pred_p (b))
1705 return false;
1706
1707 if (a == ENTRY_BLOCK_PTR_FOR_FN (cfun)
1708 || b == EXIT_BLOCK_PTR_FOR_FN (cfun))
1709 return false;
1710
1711 /* If A ends by a statement causing exceptions or something similar, we
1712 cannot merge the blocks. */
1713 stmt = last_stmt (a);
1714 if (stmt && stmt_ends_bb_p (stmt))
1715 return false;
1716
1717 /* Do not allow a block with only a non-local label to be merged. */
1718 if (stmt)
1719 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
1720 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
1721 return false;
1722
1723 /* Examine the labels at the beginning of B. */
1724 for (gimple_stmt_iterator gsi = gsi_start_bb (b); !gsi_end_p (gsi);
1725 gsi_next (&gsi))
1726 {
1727 tree lab;
1728 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
1729 if (!label_stmt)
1730 break;
1731 lab = gimple_label_label (label_stmt);
1732
1733 /* Do not remove user forced labels or for -O0 any user labels. */
1734 if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
1735 return false;
1736 }
1737
1738 /* Protect simple loop latches. We only want to avoid merging
1739 the latch with the loop header or with a block in another
1740 loop in this case. */
1741 if (current_loops
1742 && b->loop_father->latch == b
1743 && loops_state_satisfies_p (LOOPS_HAVE_SIMPLE_LATCHES)
1744 && (b->loop_father->header == a
1745 || b->loop_father != a->loop_father))
1746 return false;
1747
1748 /* It must be possible to eliminate all phi nodes in B. If ssa form
1749 is not up-to-date and a name-mapping is registered, we cannot eliminate
1750 any phis. Symbols marked for renaming are never a problem though. */
1751 for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);
1752 gsi_next (&gsi))
1753 {
1754 gphi *phi = gsi.phi ();
1755 /* Technically only new names matter. */
1756 if (name_registered_for_update_p (PHI_RESULT (phi)))
1757 return false;
1758 }
1759
1760 /* When not optimizing, don't merge if we'd lose goto_locus. */
1761 if (!optimize
1762 && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
1763 {
1764 location_t goto_locus = single_succ_edge (a)->goto_locus;
1765 gimple_stmt_iterator prev, next;
1766 prev = gsi_last_nondebug_bb (a);
1767 next = gsi_after_labels (b);
1768 if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
1769 gsi_next_nondebug (&next);
1770 if ((gsi_end_p (prev)
1771 || gimple_location (gsi_stmt (prev)) != goto_locus)
1772 && (gsi_end_p (next)
1773 || gimple_location (gsi_stmt (next)) != goto_locus))
1774 return false;
1775 }
1776
1777 return true;
1778 }
1779
1780 /* Replaces all uses of NAME by VAL. */
1781
1782 void
1783 replace_uses_by (tree name, tree val)
1784 {
1785 imm_use_iterator imm_iter;
1786 use_operand_p use;
1787 gimple *stmt;
1788 edge e;
1789
1790 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1791 {
1792 /* Mark the block if we change the last stmt in it. */
1793 if (cfgcleanup_altered_bbs
1794 && stmt_ends_bb_p (stmt))
1795 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1796
1797 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1798 {
1799 replace_exp (use, val);
1800
1801 if (gimple_code (stmt) == GIMPLE_PHI)
1802 {
1803 e = gimple_phi_arg_edge (as_a <gphi *> (stmt),
1804 PHI_ARG_INDEX_FROM_USE (use));
1805 if (e->flags & EDGE_ABNORMAL
1806 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val))
1807 {
1808 /* This can only occur for virtual operands, since
1809 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1810 would prevent replacement. */
1811 gcc_checking_assert (virtual_operand_p (name));
1812 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1813 }
1814 }
1815 }
1816
1817 if (gimple_code (stmt) != GIMPLE_PHI)
1818 {
1819 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1820 gimple *orig_stmt = stmt;
1821 size_t i;
1822
1823 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
1824 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
1825 only change sth from non-invariant to invariant, and only
1826 when propagating constants. */
1827 if (is_gimple_min_invariant (val))
1828 for (i = 0; i < gimple_num_ops (stmt); i++)
1829 {
1830 tree op = gimple_op (stmt, i);
1831 /* Operands may be empty here. For example, the labels
1832 of a GIMPLE_COND are nulled out following the creation
1833 of the corresponding CFG edges. */
1834 if (op && TREE_CODE (op) == ADDR_EXPR)
1835 recompute_tree_invariant_for_addr_expr (op);
1836 }
1837
1838 if (fold_stmt (&gsi))
1839 stmt = gsi_stmt (gsi);
1840
1841 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
1842 gimple_purge_dead_eh_edges (gimple_bb (stmt));
1843
1844 update_stmt (stmt);
1845 }
1846 }
1847
1848 gcc_checking_assert (has_zero_uses (name));
1849
1850 /* Also update the trees stored in loop structures. */
1851 if (current_loops)
1852 {
1853 struct loop *loop;
1854
1855 FOR_EACH_LOOP (loop, 0)
1856 {
1857 substitute_in_loop_info (loop, name, val);
1858 }
1859 }
1860 }
1861
1862 /* Merge block B into block A. */
1863
1864 static void
1865 gimple_merge_blocks (basic_block a, basic_block b)
1866 {
1867 gimple_stmt_iterator last, gsi;
1868 gphi_iterator psi;
1869
1870 if (dump_file)
1871 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1872
1873 /* Remove all single-valued PHI nodes from block B of the form
1874 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1875 gsi = gsi_last_bb (a);
1876 for (psi = gsi_start_phis (b); !gsi_end_p (psi); )
1877 {
1878 gimple *phi = gsi_stmt (psi);
1879 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
1880 gimple *copy;
1881 bool may_replace_uses = (virtual_operand_p (def)
1882 || may_propagate_copy (def, use));
1883
1884 /* In case we maintain loop closed ssa form, do not propagate arguments
1885 of loop exit phi nodes. */
1886 if (current_loops
1887 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
1888 && !virtual_operand_p (def)
1889 && TREE_CODE (use) == SSA_NAME
1890 && a->loop_father != b->loop_father)
1891 may_replace_uses = false;
1892
1893 if (!may_replace_uses)
1894 {
1895 gcc_assert (!virtual_operand_p (def));
1896
1897 /* Note that just emitting the copies is fine -- there is no problem
1898 with ordering of phi nodes. This is because A is the single
1899 predecessor of B, therefore results of the phi nodes cannot
1900 appear as arguments of the phi nodes. */
1901 copy = gimple_build_assign (def, use);
1902 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
1903 remove_phi_node (&psi, false);
1904 }
1905 else
1906 {
1907 /* If we deal with a PHI for virtual operands, we can simply
1908 propagate these without fussing with folding or updating
1909 the stmt. */
1910 if (virtual_operand_p (def))
1911 {
1912 imm_use_iterator iter;
1913 use_operand_p use_p;
1914 gimple *stmt;
1915
1916 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
1917 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1918 SET_USE (use_p, use);
1919
1920 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
1921 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
1922 }
1923 else
1924 replace_uses_by (def, use);
1925
1926 remove_phi_node (&psi, true);
1927 }
1928 }
1929
1930 /* Ensure that B follows A. */
1931 move_block_after (b, a);
1932
1933 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1934 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1935
1936 /* Remove labels from B and set gimple_bb to A for other statements. */
1937 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
1938 {
1939 gimple *stmt = gsi_stmt (gsi);
1940 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
1941 {
1942 tree label = gimple_label_label (label_stmt);
1943 int lp_nr;
1944
1945 gsi_remove (&gsi, false);
1946
1947 /* Now that we can thread computed gotos, we might have
1948 a situation where we have a forced label in block B
1949 However, the label at the start of block B might still be
1950 used in other ways (think about the runtime checking for
1951 Fortran assigned gotos). So we can not just delete the
1952 label. Instead we move the label to the start of block A. */
1953 if (FORCED_LABEL (label))
1954 {
1955 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
1956 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
1957 }
1958 /* Other user labels keep around in a form of a debug stmt. */
1959 else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_STMTS)
1960 {
1961 gimple *dbg = gimple_build_debug_bind (label,
1962 integer_zero_node,
1963 stmt);
1964 gimple_debug_bind_reset_value (dbg);
1965 gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
1966 }
1967
1968 lp_nr = EH_LANDING_PAD_NR (label);
1969 if (lp_nr)
1970 {
1971 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
1972 lp->post_landing_pad = NULL;
1973 }
1974 }
1975 else
1976 {
1977 gimple_set_bb (stmt, a);
1978 gsi_next (&gsi);
1979 }
1980 }
1981
1982 /* When merging two BBs, if their counts are different, the larger count
1983 is selected as the new bb count. This is to handle inconsistent
1984 profiles. */
1985 if (a->loop_father == b->loop_father)
1986 {
1987 a->count = MAX (a->count, b->count);
1988 a->frequency = MAX (a->frequency, b->frequency);
1989 }
1990
1991 /* Merge the sequences. */
1992 last = gsi_last_bb (a);
1993 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
1994 set_bb_seq (b, NULL);
1995
1996 if (cfgcleanup_altered_bbs)
1997 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
1998 }
1999
2000
2001 /* Return the one of two successors of BB that is not reachable by a
2002 complex edge, if there is one. Else, return BB. We use
2003 this in optimizations that use post-dominators for their heuristics,
2004 to catch the cases in C++ where function calls are involved. */
2005
2006 basic_block
2007 single_noncomplex_succ (basic_block bb)
2008 {
2009 edge e0, e1;
2010 if (EDGE_COUNT (bb->succs) != 2)
2011 return bb;
2012
2013 e0 = EDGE_SUCC (bb, 0);
2014 e1 = EDGE_SUCC (bb, 1);
2015 if (e0->flags & EDGE_COMPLEX)
2016 return e1->dest;
2017 if (e1->flags & EDGE_COMPLEX)
2018 return e0->dest;
2019
2020 return bb;
2021 }
2022
2023 /* T is CALL_EXPR. Set current_function_calls_* flags. */
2024
2025 void
2026 notice_special_calls (gcall *call)
2027 {
2028 int flags = gimple_call_flags (call);
2029
2030 if (flags & ECF_MAY_BE_ALLOCA)
2031 cfun->calls_alloca = true;
2032 if (flags & ECF_RETURNS_TWICE)
2033 cfun->calls_setjmp = true;
2034 }
2035
2036
2037 /* Clear flags set by notice_special_calls. Used by dead code removal
2038 to update the flags. */
2039
2040 void
2041 clear_special_calls (void)
2042 {
2043 cfun->calls_alloca = false;
2044 cfun->calls_setjmp = false;
2045 }
2046
2047 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
2048
2049 static void
2050 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
2051 {
2052 /* Since this block is no longer reachable, we can just delete all
2053 of its PHI nodes. */
2054 remove_phi_nodes (bb);
2055
2056 /* Remove edges to BB's successors. */
2057 while (EDGE_COUNT (bb->succs) > 0)
2058 remove_edge (EDGE_SUCC (bb, 0));
2059 }
2060
2061
2062 /* Remove statements of basic block BB. */
2063
2064 static void
2065 remove_bb (basic_block bb)
2066 {
2067 gimple_stmt_iterator i;
2068
2069 if (dump_file)
2070 {
2071 fprintf (dump_file, "Removing basic block %d\n", bb->index);
2072 if (dump_flags & TDF_DETAILS)
2073 {
2074 dump_bb (dump_file, bb, 0, TDF_BLOCKS);
2075 fprintf (dump_file, "\n");
2076 }
2077 }
2078
2079 if (current_loops)
2080 {
2081 struct loop *loop = bb->loop_father;
2082
2083 /* If a loop gets removed, clean up the information associated
2084 with it. */
2085 if (loop->latch == bb
2086 || loop->header == bb)
2087 free_numbers_of_iterations_estimates_loop (loop);
2088 }
2089
2090 /* Remove all the instructions in the block. */
2091 if (bb_seq (bb) != NULL)
2092 {
2093 /* Walk backwards so as to get a chance to substitute all
2094 released DEFs into debug stmts. See
2095 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
2096 details. */
2097 for (i = gsi_last_bb (bb); !gsi_end_p (i);)
2098 {
2099 gimple *stmt = gsi_stmt (i);
2100 glabel *label_stmt = dyn_cast <glabel *> (stmt);
2101 if (label_stmt
2102 && (FORCED_LABEL (gimple_label_label (label_stmt))
2103 || DECL_NONLOCAL (gimple_label_label (label_stmt))))
2104 {
2105 basic_block new_bb;
2106 gimple_stmt_iterator new_gsi;
2107
2108 /* A non-reachable non-local label may still be referenced.
2109 But it no longer needs to carry the extra semantics of
2110 non-locality. */
2111 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
2112 {
2113 DECL_NONLOCAL (gimple_label_label (label_stmt)) = 0;
2114 FORCED_LABEL (gimple_label_label (label_stmt)) = 1;
2115 }
2116
2117 new_bb = bb->prev_bb;
2118 new_gsi = gsi_start_bb (new_bb);
2119 gsi_remove (&i, false);
2120 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
2121 }
2122 else
2123 {
2124 /* Release SSA definitions if we are in SSA. Note that we
2125 may be called when not in SSA. For example,
2126 final_cleanup calls this function via
2127 cleanup_tree_cfg. */
2128 if (gimple_in_ssa_p (cfun))
2129 release_defs (stmt);
2130
2131 gsi_remove (&i, true);
2132 }
2133
2134 if (gsi_end_p (i))
2135 i = gsi_last_bb (bb);
2136 else
2137 gsi_prev (&i);
2138 }
2139 }
2140
2141 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2142 bb->il.gimple.seq = NULL;
2143 bb->il.gimple.phi_nodes = NULL;
2144 }
2145
2146
2147 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2148 predicate VAL, return the edge that will be taken out of the block.
2149 If VAL does not match a unique edge, NULL is returned. */
2150
2151 edge
2152 find_taken_edge (basic_block bb, tree val)
2153 {
2154 gimple *stmt;
2155
2156 stmt = last_stmt (bb);
2157
2158 gcc_assert (stmt);
2159 gcc_assert (is_ctrl_stmt (stmt));
2160
2161 if (val == NULL)
2162 return NULL;
2163
2164 if (!is_gimple_min_invariant (val))
2165 return NULL;
2166
2167 if (gimple_code (stmt) == GIMPLE_COND)
2168 return find_taken_edge_cond_expr (bb, val);
2169
2170 if (gimple_code (stmt) == GIMPLE_SWITCH)
2171 return find_taken_edge_switch_expr (as_a <gswitch *> (stmt), bb, val);
2172
2173 if (computed_goto_p (stmt))
2174 {
2175 /* Only optimize if the argument is a label, if the argument is
2176 not a label then we can not construct a proper CFG.
2177
2178 It may be the case that we only need to allow the LABEL_REF to
2179 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2180 appear inside a LABEL_EXPR just to be safe. */
2181 if ((TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
2182 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
2183 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
2184 return NULL;
2185 }
2186
2187 gcc_unreachable ();
2188 }
2189
2190 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2191 statement, determine which of the outgoing edges will be taken out of the
2192 block. Return NULL if either edge may be taken. */
2193
2194 static edge
2195 find_taken_edge_computed_goto (basic_block bb, tree val)
2196 {
2197 basic_block dest;
2198 edge e = NULL;
2199
2200 dest = label_to_block (val);
2201 if (dest)
2202 {
2203 e = find_edge (bb, dest);
2204 gcc_assert (e != NULL);
2205 }
2206
2207 return e;
2208 }
2209
2210 /* Given a constant value VAL and the entry block BB to a COND_EXPR
2211 statement, determine which of the two edges will be taken out of the
2212 block. Return NULL if either edge may be taken. */
2213
2214 static edge
2215 find_taken_edge_cond_expr (basic_block bb, tree val)
2216 {
2217 edge true_edge, false_edge;
2218
2219 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2220
2221 gcc_assert (TREE_CODE (val) == INTEGER_CST);
2222 return (integer_zerop (val) ? false_edge : true_edge);
2223 }
2224
2225 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2226 statement, determine which edge will be taken out of the block. Return
2227 NULL if any edge may be taken. */
2228
2229 static edge
2230 find_taken_edge_switch_expr (gswitch *switch_stmt, basic_block bb,
2231 tree val)
2232 {
2233 basic_block dest_bb;
2234 edge e;
2235 tree taken_case;
2236
2237 taken_case = find_case_label_for_value (switch_stmt, val);
2238 dest_bb = label_to_block (CASE_LABEL (taken_case));
2239
2240 e = find_edge (bb, dest_bb);
2241 gcc_assert (e);
2242 return e;
2243 }
2244
2245
2246 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2247 We can make optimal use here of the fact that the case labels are
2248 sorted: We can do a binary search for a case matching VAL. */
2249
2250 static tree
2251 find_case_label_for_value (gswitch *switch_stmt, tree val)
2252 {
2253 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2254 tree default_case = gimple_switch_default_label (switch_stmt);
2255
2256 for (low = 0, high = n; high - low > 1; )
2257 {
2258 size_t i = (high + low) / 2;
2259 tree t = gimple_switch_label (switch_stmt, i);
2260 int cmp;
2261
2262 /* Cache the result of comparing CASE_LOW and val. */
2263 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2264
2265 if (cmp > 0)
2266 high = i;
2267 else
2268 low = i;
2269
2270 if (CASE_HIGH (t) == NULL)
2271 {
2272 /* A singe-valued case label. */
2273 if (cmp == 0)
2274 return t;
2275 }
2276 else
2277 {
2278 /* A case range. We can only handle integer ranges. */
2279 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2280 return t;
2281 }
2282 }
2283
2284 return default_case;
2285 }
2286
2287
2288 /* Dump a basic block on stderr. */
2289
2290 void
2291 gimple_debug_bb (basic_block bb)
2292 {
2293 dump_bb (stderr, bb, 0, TDF_VOPS|TDF_MEMSYMS|TDF_BLOCKS);
2294 }
2295
2296
2297 /* Dump basic block with index N on stderr. */
2298
2299 basic_block
2300 gimple_debug_bb_n (int n)
2301 {
2302 gimple_debug_bb (BASIC_BLOCK_FOR_FN (cfun, n));
2303 return BASIC_BLOCK_FOR_FN (cfun, n);
2304 }
2305
2306
2307 /* Dump the CFG on stderr.
2308
2309 FLAGS are the same used by the tree dumping functions
2310 (see TDF_* in dumpfile.h). */
2311
2312 void
2313 gimple_debug_cfg (int flags)
2314 {
2315 gimple_dump_cfg (stderr, flags);
2316 }
2317
2318
2319 /* Dump the program showing basic block boundaries on the given FILE.
2320
2321 FLAGS are the same used by the tree dumping functions (see TDF_* in
2322 tree.h). */
2323
2324 void
2325 gimple_dump_cfg (FILE *file, int flags)
2326 {
2327 if (flags & TDF_DETAILS)
2328 {
2329 dump_function_header (file, current_function_decl, flags);
2330 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2331 n_basic_blocks_for_fn (cfun), n_edges_for_fn (cfun),
2332 last_basic_block_for_fn (cfun));
2333
2334 brief_dump_cfg (file, flags | TDF_COMMENT);
2335 fprintf (file, "\n");
2336 }
2337
2338 if (flags & TDF_STATS)
2339 dump_cfg_stats (file);
2340
2341 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2342 }
2343
2344
2345 /* Dump CFG statistics on FILE. */
2346
2347 void
2348 dump_cfg_stats (FILE *file)
2349 {
2350 static long max_num_merged_labels = 0;
2351 unsigned long size, total = 0;
2352 long num_edges;
2353 basic_block bb;
2354 const char * const fmt_str = "%-30s%-13s%12s\n";
2355 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2356 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2357 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2358 const char *funcname = current_function_name ();
2359
2360 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2361
2362 fprintf (file, "---------------------------------------------------------\n");
2363 fprintf (file, fmt_str, "", " Number of ", "Memory");
2364 fprintf (file, fmt_str, "", " instances ", "used ");
2365 fprintf (file, "---------------------------------------------------------\n");
2366
2367 size = n_basic_blocks_for_fn (cfun) * sizeof (struct basic_block_def);
2368 total += size;
2369 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks_for_fn (cfun),
2370 SCALE (size), LABEL (size));
2371
2372 num_edges = 0;
2373 FOR_EACH_BB_FN (bb, cfun)
2374 num_edges += EDGE_COUNT (bb->succs);
2375 size = num_edges * sizeof (struct edge_def);
2376 total += size;
2377 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2378
2379 fprintf (file, "---------------------------------------------------------\n");
2380 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2381 LABEL (total));
2382 fprintf (file, "---------------------------------------------------------\n");
2383 fprintf (file, "\n");
2384
2385 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2386 max_num_merged_labels = cfg_stats.num_merged_labels;
2387
2388 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2389 cfg_stats.num_merged_labels, max_num_merged_labels);
2390
2391 fprintf (file, "\n");
2392 }
2393
2394
2395 /* Dump CFG statistics on stderr. Keep extern so that it's always
2396 linked in the final executable. */
2397
2398 DEBUG_FUNCTION void
2399 debug_cfg_stats (void)
2400 {
2401 dump_cfg_stats (stderr);
2402 }
2403
2404 /*---------------------------------------------------------------------------
2405 Miscellaneous helpers
2406 ---------------------------------------------------------------------------*/
2407
2408 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2409 flow. Transfers of control flow associated with EH are excluded. */
2410
2411 static bool
2412 call_can_make_abnormal_goto (gimple *t)
2413 {
2414 /* If the function has no non-local labels, then a call cannot make an
2415 abnormal transfer of control. */
2416 if (!cfun->has_nonlocal_label
2417 && !cfun->calls_setjmp)
2418 return false;
2419
2420 /* Likewise if the call has no side effects. */
2421 if (!gimple_has_side_effects (t))
2422 return false;
2423
2424 /* Likewise if the called function is leaf. */
2425 if (gimple_call_flags (t) & ECF_LEAF)
2426 return false;
2427
2428 return true;
2429 }
2430
2431
2432 /* Return true if T can make an abnormal transfer of control flow.
2433 Transfers of control flow associated with EH are excluded. */
2434
2435 bool
2436 stmt_can_make_abnormal_goto (gimple *t)
2437 {
2438 if (computed_goto_p (t))
2439 return true;
2440 if (is_gimple_call (t))
2441 return call_can_make_abnormal_goto (t);
2442 return false;
2443 }
2444
2445
2446 /* Return true if T represents a stmt that always transfers control. */
2447
2448 bool
2449 is_ctrl_stmt (gimple *t)
2450 {
2451 switch (gimple_code (t))
2452 {
2453 case GIMPLE_COND:
2454 case GIMPLE_SWITCH:
2455 case GIMPLE_GOTO:
2456 case GIMPLE_RETURN:
2457 case GIMPLE_RESX:
2458 return true;
2459 default:
2460 return false;
2461 }
2462 }
2463
2464
2465 /* Return true if T is a statement that may alter the flow of control
2466 (e.g., a call to a non-returning function). */
2467
2468 bool
2469 is_ctrl_altering_stmt (gimple *t)
2470 {
2471 gcc_assert (t);
2472
2473 switch (gimple_code (t))
2474 {
2475 case GIMPLE_CALL:
2476 /* Per stmt call flag indicates whether the call could alter
2477 controlflow. */
2478 if (gimple_call_ctrl_altering_p (t))
2479 return true;
2480 break;
2481
2482 case GIMPLE_EH_DISPATCH:
2483 /* EH_DISPATCH branches to the individual catch handlers at
2484 this level of a try or allowed-exceptions region. It can
2485 fallthru to the next statement as well. */
2486 return true;
2487
2488 case GIMPLE_ASM:
2489 if (gimple_asm_nlabels (as_a <gasm *> (t)) > 0)
2490 return true;
2491 break;
2492
2493 CASE_GIMPLE_OMP:
2494 /* OpenMP directives alter control flow. */
2495 return true;
2496
2497 case GIMPLE_TRANSACTION:
2498 /* A transaction start alters control flow. */
2499 return true;
2500
2501 default:
2502 break;
2503 }
2504
2505 /* If a statement can throw, it alters control flow. */
2506 return stmt_can_throw_internal (t);
2507 }
2508
2509
2510 /* Return true if T is a simple local goto. */
2511
2512 bool
2513 simple_goto_p (gimple *t)
2514 {
2515 return (gimple_code (t) == GIMPLE_GOTO
2516 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2517 }
2518
2519
2520 /* Return true if STMT should start a new basic block. PREV_STMT is
2521 the statement preceding STMT. It is used when STMT is a label or a
2522 case label. Labels should only start a new basic block if their
2523 previous statement wasn't a label. Otherwise, sequence of labels
2524 would generate unnecessary basic blocks that only contain a single
2525 label. */
2526
2527 static inline bool
2528 stmt_starts_bb_p (gimple *stmt, gimple *prev_stmt)
2529 {
2530 if (stmt == NULL)
2531 return false;
2532
2533 /* Labels start a new basic block only if the preceding statement
2534 wasn't a label of the same type. This prevents the creation of
2535 consecutive blocks that have nothing but a single label. */
2536 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2537 {
2538 /* Nonlocal and computed GOTO targets always start a new block. */
2539 if (DECL_NONLOCAL (gimple_label_label (label_stmt))
2540 || FORCED_LABEL (gimple_label_label (label_stmt)))
2541 return true;
2542
2543 if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
2544 {
2545 if (DECL_NONLOCAL (gimple_label_label (
2546 as_a <glabel *> (prev_stmt))))
2547 return true;
2548
2549 cfg_stats.num_merged_labels++;
2550 return false;
2551 }
2552 else
2553 return true;
2554 }
2555 else if (gimple_code (stmt) == GIMPLE_CALL
2556 && gimple_call_flags (stmt) & ECF_RETURNS_TWICE)
2557 /* setjmp acts similar to a nonlocal GOTO target and thus should
2558 start a new block. */
2559 return true;
2560
2561 return false;
2562 }
2563
2564
2565 /* Return true if T should end a basic block. */
2566
2567 bool
2568 stmt_ends_bb_p (gimple *t)
2569 {
2570 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2571 }
2572
2573 /* Remove block annotations and other data structures. */
2574
2575 void
2576 delete_tree_cfg_annotations (struct function *fn)
2577 {
2578 vec_free (label_to_block_map_for_fn (fn));
2579 }
2580
2581 /* Return the virtual phi in BB. */
2582
2583 gphi *
2584 get_virtual_phi (basic_block bb)
2585 {
2586 for (gphi_iterator gsi = gsi_start_phis (bb);
2587 !gsi_end_p (gsi);
2588 gsi_next (&gsi))
2589 {
2590 gphi *phi = gsi.phi ();
2591
2592 if (virtual_operand_p (PHI_RESULT (phi)))
2593 return phi;
2594 }
2595
2596 return NULL;
2597 }
2598
2599 /* Return the first statement in basic block BB. */
2600
2601 gimple *
2602 first_stmt (basic_block bb)
2603 {
2604 gimple_stmt_iterator i = gsi_start_bb (bb);
2605 gimple *stmt = NULL;
2606
2607 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2608 {
2609 gsi_next (&i);
2610 stmt = NULL;
2611 }
2612 return stmt;
2613 }
2614
2615 /* Return the first non-label statement in basic block BB. */
2616
2617 static gimple *
2618 first_non_label_stmt (basic_block bb)
2619 {
2620 gimple_stmt_iterator i = gsi_start_bb (bb);
2621 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2622 gsi_next (&i);
2623 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2624 }
2625
2626 /* Return the last statement in basic block BB. */
2627
2628 gimple *
2629 last_stmt (basic_block bb)
2630 {
2631 gimple_stmt_iterator i = gsi_last_bb (bb);
2632 gimple *stmt = NULL;
2633
2634 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2635 {
2636 gsi_prev (&i);
2637 stmt = NULL;
2638 }
2639 return stmt;
2640 }
2641
2642 /* Return the last statement of an otherwise empty block. Return NULL
2643 if the block is totally empty, or if it contains more than one
2644 statement. */
2645
2646 gimple *
2647 last_and_only_stmt (basic_block bb)
2648 {
2649 gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2650 gimple *last, *prev;
2651
2652 if (gsi_end_p (i))
2653 return NULL;
2654
2655 last = gsi_stmt (i);
2656 gsi_prev_nondebug (&i);
2657 if (gsi_end_p (i))
2658 return last;
2659
2660 /* Empty statements should no longer appear in the instruction stream.
2661 Everything that might have appeared before should be deleted by
2662 remove_useless_stmts, and the optimizers should just gsi_remove
2663 instead of smashing with build_empty_stmt.
2664
2665 Thus the only thing that should appear here in a block containing
2666 one executable statement is a label. */
2667 prev = gsi_stmt (i);
2668 if (gimple_code (prev) == GIMPLE_LABEL)
2669 return last;
2670 else
2671 return NULL;
2672 }
2673
2674 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2675
2676 static void
2677 reinstall_phi_args (edge new_edge, edge old_edge)
2678 {
2679 edge_var_map *vm;
2680 int i;
2681 gphi_iterator phis;
2682
2683 vec<edge_var_map> *v = redirect_edge_var_map_vector (old_edge);
2684 if (!v)
2685 return;
2686
2687 for (i = 0, phis = gsi_start_phis (new_edge->dest);
2688 v->iterate (i, &vm) && !gsi_end_p (phis);
2689 i++, gsi_next (&phis))
2690 {
2691 gphi *phi = phis.phi ();
2692 tree result = redirect_edge_var_map_result (vm);
2693 tree arg = redirect_edge_var_map_def (vm);
2694
2695 gcc_assert (result == gimple_phi_result (phi));
2696
2697 add_phi_arg (phi, arg, new_edge, redirect_edge_var_map_location (vm));
2698 }
2699
2700 redirect_edge_var_map_clear (old_edge);
2701 }
2702
2703 /* Returns the basic block after which the new basic block created
2704 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2705 near its "logical" location. This is of most help to humans looking
2706 at debugging dumps. */
2707
2708 basic_block
2709 split_edge_bb_loc (edge edge_in)
2710 {
2711 basic_block dest = edge_in->dest;
2712 basic_block dest_prev = dest->prev_bb;
2713
2714 if (dest_prev)
2715 {
2716 edge e = find_edge (dest_prev, dest);
2717 if (e && !(e->flags & EDGE_COMPLEX))
2718 return edge_in->src;
2719 }
2720 return dest_prev;
2721 }
2722
2723 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2724 Abort on abnormal edges. */
2725
2726 static basic_block
2727 gimple_split_edge (edge edge_in)
2728 {
2729 basic_block new_bb, after_bb, dest;
2730 edge new_edge, e;
2731
2732 /* Abnormal edges cannot be split. */
2733 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2734
2735 dest = edge_in->dest;
2736
2737 after_bb = split_edge_bb_loc (edge_in);
2738
2739 new_bb = create_empty_bb (after_bb);
2740 new_bb->frequency = EDGE_FREQUENCY (edge_in);
2741 new_bb->count = edge_in->count;
2742 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
2743 new_edge->probability = REG_BR_PROB_BASE;
2744 new_edge->count = edge_in->count;
2745
2746 e = redirect_edge_and_branch (edge_in, new_bb);
2747 gcc_assert (e == edge_in);
2748 reinstall_phi_args (new_edge, e);
2749
2750 return new_bb;
2751 }
2752
2753
2754 /* Verify properties of the address expression T with base object BASE. */
2755
2756 static tree
2757 verify_address (tree t, tree base)
2758 {
2759 bool old_constant;
2760 bool old_side_effects;
2761 bool new_constant;
2762 bool new_side_effects;
2763
2764 old_constant = TREE_CONSTANT (t);
2765 old_side_effects = TREE_SIDE_EFFECTS (t);
2766
2767 recompute_tree_invariant_for_addr_expr (t);
2768 new_side_effects = TREE_SIDE_EFFECTS (t);
2769 new_constant = TREE_CONSTANT (t);
2770
2771 if (old_constant != new_constant)
2772 {
2773 error ("constant not recomputed when ADDR_EXPR changed");
2774 return t;
2775 }
2776 if (old_side_effects != new_side_effects)
2777 {
2778 error ("side effects not recomputed when ADDR_EXPR changed");
2779 return t;
2780 }
2781
2782 if (!(TREE_CODE (base) == VAR_DECL
2783 || TREE_CODE (base) == PARM_DECL
2784 || TREE_CODE (base) == RESULT_DECL))
2785 return NULL_TREE;
2786
2787 if (DECL_GIMPLE_REG_P (base))
2788 {
2789 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2790 return base;
2791 }
2792
2793 return NULL_TREE;
2794 }
2795
2796 /* Callback for walk_tree, check that all elements with address taken are
2797 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2798 inside a PHI node. */
2799
2800 static tree
2801 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2802 {
2803 tree t = *tp, x;
2804
2805 if (TYPE_P (t))
2806 *walk_subtrees = 0;
2807
2808 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2809 #define CHECK_OP(N, MSG) \
2810 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2811 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2812
2813 switch (TREE_CODE (t))
2814 {
2815 case SSA_NAME:
2816 if (SSA_NAME_IN_FREE_LIST (t))
2817 {
2818 error ("SSA name in freelist but still referenced");
2819 return *tp;
2820 }
2821 break;
2822
2823 case INDIRECT_REF:
2824 error ("INDIRECT_REF in gimple IL");
2825 return t;
2826
2827 case MEM_REF:
2828 x = TREE_OPERAND (t, 0);
2829 if (!POINTER_TYPE_P (TREE_TYPE (x))
2830 || !is_gimple_mem_ref_addr (x))
2831 {
2832 error ("invalid first operand of MEM_REF");
2833 return x;
2834 }
2835 if (TREE_CODE (TREE_OPERAND (t, 1)) != INTEGER_CST
2836 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 1))))
2837 {
2838 error ("invalid offset operand of MEM_REF");
2839 return TREE_OPERAND (t, 1);
2840 }
2841 if (TREE_CODE (x) == ADDR_EXPR
2842 && (x = verify_address (x, TREE_OPERAND (x, 0))))
2843 return x;
2844 *walk_subtrees = 0;
2845 break;
2846
2847 case ASSERT_EXPR:
2848 x = fold (ASSERT_EXPR_COND (t));
2849 if (x == boolean_false_node)
2850 {
2851 error ("ASSERT_EXPR with an always-false condition");
2852 return *tp;
2853 }
2854 break;
2855
2856 case MODIFY_EXPR:
2857 error ("MODIFY_EXPR not expected while having tuples");
2858 return *tp;
2859
2860 case ADDR_EXPR:
2861 {
2862 tree tem;
2863
2864 gcc_assert (is_gimple_address (t));
2865
2866 /* Skip any references (they will be checked when we recurse down the
2867 tree) and ensure that any variable used as a prefix is marked
2868 addressable. */
2869 for (x = TREE_OPERAND (t, 0);
2870 handled_component_p (x);
2871 x = TREE_OPERAND (x, 0))
2872 ;
2873
2874 if ((tem = verify_address (t, x)))
2875 return tem;
2876
2877 if (!(TREE_CODE (x) == VAR_DECL
2878 || TREE_CODE (x) == PARM_DECL
2879 || TREE_CODE (x) == RESULT_DECL))
2880 return NULL;
2881
2882 if (!TREE_ADDRESSABLE (x))
2883 {
2884 error ("address taken, but ADDRESSABLE bit not set");
2885 return x;
2886 }
2887
2888 break;
2889 }
2890
2891 case COND_EXPR:
2892 x = COND_EXPR_COND (t);
2893 if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
2894 {
2895 error ("non-integral used in condition");
2896 return x;
2897 }
2898 if (!is_gimple_condexpr (x))
2899 {
2900 error ("invalid conditional operand");
2901 return x;
2902 }
2903 break;
2904
2905 case NON_LVALUE_EXPR:
2906 case TRUTH_NOT_EXPR:
2907 gcc_unreachable ();
2908
2909 CASE_CONVERT:
2910 case FIX_TRUNC_EXPR:
2911 case FLOAT_EXPR:
2912 case NEGATE_EXPR:
2913 case ABS_EXPR:
2914 case BIT_NOT_EXPR:
2915 CHECK_OP (0, "invalid operand to unary operator");
2916 break;
2917
2918 case REALPART_EXPR:
2919 case IMAGPART_EXPR:
2920 case BIT_FIELD_REF:
2921 if (!is_gimple_reg_type (TREE_TYPE (t)))
2922 {
2923 error ("non-scalar BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR");
2924 return t;
2925 }
2926
2927 if (TREE_CODE (t) == BIT_FIELD_REF)
2928 {
2929 tree t0 = TREE_OPERAND (t, 0);
2930 tree t1 = TREE_OPERAND (t, 1);
2931 tree t2 = TREE_OPERAND (t, 2);
2932 if (!tree_fits_uhwi_p (t1)
2933 || !tree_fits_uhwi_p (t2))
2934 {
2935 error ("invalid position or size operand to BIT_FIELD_REF");
2936 return t;
2937 }
2938 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
2939 && (TYPE_PRECISION (TREE_TYPE (t))
2940 != tree_to_uhwi (t1)))
2941 {
2942 error ("integral result type precision does not match "
2943 "field size of BIT_FIELD_REF");
2944 return t;
2945 }
2946 else if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
2947 && TYPE_MODE (TREE_TYPE (t)) != BLKmode
2948 && (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (t)))
2949 != tree_to_uhwi (t1)))
2950 {
2951 error ("mode precision of non-integral result does not "
2952 "match field size of BIT_FIELD_REF");
2953 return t;
2954 }
2955 if (!AGGREGATE_TYPE_P (TREE_TYPE (t0))
2956 && (tree_to_uhwi (t1) + tree_to_uhwi (t2)
2957 > tree_to_uhwi (TYPE_SIZE (TREE_TYPE (t0)))))
2958 {
2959 error ("position plus size exceeds size of referenced object in "
2960 "BIT_FIELD_REF");
2961 return t;
2962 }
2963 }
2964 t = TREE_OPERAND (t, 0);
2965
2966 /* Fall-through. */
2967 case COMPONENT_REF:
2968 case ARRAY_REF:
2969 case ARRAY_RANGE_REF:
2970 case VIEW_CONVERT_EXPR:
2971 /* We have a nest of references. Verify that each of the operands
2972 that determine where to reference is either a constant or a variable,
2973 verify that the base is valid, and then show we've already checked
2974 the subtrees. */
2975 while (handled_component_p (t))
2976 {
2977 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
2978 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
2979 else if (TREE_CODE (t) == ARRAY_REF
2980 || TREE_CODE (t) == ARRAY_RANGE_REF)
2981 {
2982 CHECK_OP (1, "invalid array index");
2983 if (TREE_OPERAND (t, 2))
2984 CHECK_OP (2, "invalid array lower bound");
2985 if (TREE_OPERAND (t, 3))
2986 CHECK_OP (3, "invalid array stride");
2987 }
2988 else if (TREE_CODE (t) == BIT_FIELD_REF
2989 || TREE_CODE (t) == REALPART_EXPR
2990 || TREE_CODE (t) == IMAGPART_EXPR)
2991 {
2992 error ("non-top-level BIT_FIELD_REF, IMAGPART_EXPR or "
2993 "REALPART_EXPR");
2994 return t;
2995 }
2996
2997 t = TREE_OPERAND (t, 0);
2998 }
2999
3000 if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
3001 {
3002 error ("invalid reference prefix");
3003 return t;
3004 }
3005 *walk_subtrees = 0;
3006 break;
3007 case PLUS_EXPR:
3008 case MINUS_EXPR:
3009 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
3010 POINTER_PLUS_EXPR. */
3011 if (POINTER_TYPE_P (TREE_TYPE (t)))
3012 {
3013 error ("invalid operand to plus/minus, type is a pointer");
3014 return t;
3015 }
3016 CHECK_OP (0, "invalid operand to binary operator");
3017 CHECK_OP (1, "invalid operand to binary operator");
3018 break;
3019
3020 case POINTER_PLUS_EXPR:
3021 /* Check to make sure the first operand is a pointer or reference type. */
3022 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
3023 {
3024 error ("invalid operand to pointer plus, first operand is not a pointer");
3025 return t;
3026 }
3027 /* Check to make sure the second operand is a ptrofftype. */
3028 if (!ptrofftype_p (TREE_TYPE (TREE_OPERAND (t, 1))))
3029 {
3030 error ("invalid operand to pointer plus, second operand is not an "
3031 "integer type of appropriate width");
3032 return t;
3033 }
3034 /* FALLTHROUGH */
3035 case LT_EXPR:
3036 case LE_EXPR:
3037 case GT_EXPR:
3038 case GE_EXPR:
3039 case EQ_EXPR:
3040 case NE_EXPR:
3041 case UNORDERED_EXPR:
3042 case ORDERED_EXPR:
3043 case UNLT_EXPR:
3044 case UNLE_EXPR:
3045 case UNGT_EXPR:
3046 case UNGE_EXPR:
3047 case UNEQ_EXPR:
3048 case LTGT_EXPR:
3049 case MULT_EXPR:
3050 case TRUNC_DIV_EXPR:
3051 case CEIL_DIV_EXPR:
3052 case FLOOR_DIV_EXPR:
3053 case ROUND_DIV_EXPR:
3054 case TRUNC_MOD_EXPR:
3055 case CEIL_MOD_EXPR:
3056 case FLOOR_MOD_EXPR:
3057 case ROUND_MOD_EXPR:
3058 case RDIV_EXPR:
3059 case EXACT_DIV_EXPR:
3060 case MIN_EXPR:
3061 case MAX_EXPR:
3062 case LSHIFT_EXPR:
3063 case RSHIFT_EXPR:
3064 case LROTATE_EXPR:
3065 case RROTATE_EXPR:
3066 case BIT_IOR_EXPR:
3067 case BIT_XOR_EXPR:
3068 case BIT_AND_EXPR:
3069 CHECK_OP (0, "invalid operand to binary operator");
3070 CHECK_OP (1, "invalid operand to binary operator");
3071 break;
3072
3073 case CONSTRUCTOR:
3074 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
3075 *walk_subtrees = 0;
3076 break;
3077
3078 case CASE_LABEL_EXPR:
3079 if (CASE_CHAIN (t))
3080 {
3081 error ("invalid CASE_CHAIN");
3082 return t;
3083 }
3084 break;
3085
3086 default:
3087 break;
3088 }
3089 return NULL;
3090
3091 #undef CHECK_OP
3092 }
3093
3094
3095 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
3096 Returns true if there is an error, otherwise false. */
3097
3098 static bool
3099 verify_types_in_gimple_min_lval (tree expr)
3100 {
3101 tree op;
3102
3103 if (is_gimple_id (expr))
3104 return false;
3105
3106 if (TREE_CODE (expr) != TARGET_MEM_REF
3107 && TREE_CODE (expr) != MEM_REF)
3108 {
3109 error ("invalid expression for min lvalue");
3110 return true;
3111 }
3112
3113 /* TARGET_MEM_REFs are strange beasts. */
3114 if (TREE_CODE (expr) == TARGET_MEM_REF)
3115 return false;
3116
3117 op = TREE_OPERAND (expr, 0);
3118 if (!is_gimple_val (op))
3119 {
3120 error ("invalid operand in indirect reference");
3121 debug_generic_stmt (op);
3122 return true;
3123 }
3124 /* Memory references now generally can involve a value conversion. */
3125
3126 return false;
3127 }
3128
3129 /* Verify if EXPR is a valid GIMPLE reference expression. If
3130 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
3131 if there is an error, otherwise false. */
3132
3133 static bool
3134 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
3135 {
3136 while (handled_component_p (expr))
3137 {
3138 tree op = TREE_OPERAND (expr, 0);
3139
3140 if (TREE_CODE (expr) == ARRAY_REF
3141 || TREE_CODE (expr) == ARRAY_RANGE_REF)
3142 {
3143 if (!is_gimple_val (TREE_OPERAND (expr, 1))
3144 || (TREE_OPERAND (expr, 2)
3145 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3146 || (TREE_OPERAND (expr, 3)
3147 && !is_gimple_val (TREE_OPERAND (expr, 3))))
3148 {
3149 error ("invalid operands to array reference");
3150 debug_generic_stmt (expr);
3151 return true;
3152 }
3153 }
3154
3155 /* Verify if the reference array element types are compatible. */
3156 if (TREE_CODE (expr) == ARRAY_REF
3157 && !useless_type_conversion_p (TREE_TYPE (expr),
3158 TREE_TYPE (TREE_TYPE (op))))
3159 {
3160 error ("type mismatch in array reference");
3161 debug_generic_stmt (TREE_TYPE (expr));
3162 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3163 return true;
3164 }
3165 if (TREE_CODE (expr) == ARRAY_RANGE_REF
3166 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
3167 TREE_TYPE (TREE_TYPE (op))))
3168 {
3169 error ("type mismatch in array range reference");
3170 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
3171 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3172 return true;
3173 }
3174
3175 if ((TREE_CODE (expr) == REALPART_EXPR
3176 || TREE_CODE (expr) == IMAGPART_EXPR)
3177 && !useless_type_conversion_p (TREE_TYPE (expr),
3178 TREE_TYPE (TREE_TYPE (op))))
3179 {
3180 error ("type mismatch in real/imagpart reference");
3181 debug_generic_stmt (TREE_TYPE (expr));
3182 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3183 return true;
3184 }
3185
3186 if (TREE_CODE (expr) == COMPONENT_REF
3187 && !useless_type_conversion_p (TREE_TYPE (expr),
3188 TREE_TYPE (TREE_OPERAND (expr, 1))))
3189 {
3190 error ("type mismatch in component reference");
3191 debug_generic_stmt (TREE_TYPE (expr));
3192 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
3193 return true;
3194 }
3195
3196 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3197 {
3198 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
3199 that their operand is not an SSA name or an invariant when
3200 requiring an lvalue (this usually means there is a SRA or IPA-SRA
3201 bug). Otherwise there is nothing to verify, gross mismatches at
3202 most invoke undefined behavior. */
3203 if (require_lvalue
3204 && (TREE_CODE (op) == SSA_NAME
3205 || is_gimple_min_invariant (op)))
3206 {
3207 error ("conversion of an SSA_NAME on the left hand side");
3208 debug_generic_stmt (expr);
3209 return true;
3210 }
3211 else if (TREE_CODE (op) == SSA_NAME
3212 && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
3213 {
3214 error ("conversion of register to a different size");
3215 debug_generic_stmt (expr);
3216 return true;
3217 }
3218 else if (!handled_component_p (op))
3219 return false;
3220 }
3221
3222 expr = op;
3223 }
3224
3225 if (TREE_CODE (expr) == MEM_REF)
3226 {
3227 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0)))
3228 {
3229 error ("invalid address operand in MEM_REF");
3230 debug_generic_stmt (expr);
3231 return true;
3232 }
3233 if (TREE_CODE (TREE_OPERAND (expr, 1)) != INTEGER_CST
3234 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
3235 {
3236 error ("invalid offset operand in MEM_REF");
3237 debug_generic_stmt (expr);
3238 return true;
3239 }
3240 }
3241 else if (TREE_CODE (expr) == TARGET_MEM_REF)
3242 {
3243 if (!TMR_BASE (expr)
3244 || !is_gimple_mem_ref_addr (TMR_BASE (expr)))
3245 {
3246 error ("invalid address operand in TARGET_MEM_REF");
3247 return true;
3248 }
3249 if (!TMR_OFFSET (expr)
3250 || TREE_CODE (TMR_OFFSET (expr)) != INTEGER_CST
3251 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
3252 {
3253 error ("invalid offset operand in TARGET_MEM_REF");
3254 debug_generic_stmt (expr);
3255 return true;
3256 }
3257 }
3258
3259 return ((require_lvalue || !is_gimple_min_invariant (expr))
3260 && verify_types_in_gimple_min_lval (expr));
3261 }
3262
3263 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3264 list of pointer-to types that is trivially convertible to DEST. */
3265
3266 static bool
3267 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3268 {
3269 tree src;
3270
3271 if (!TYPE_POINTER_TO (src_obj))
3272 return true;
3273
3274 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3275 if (useless_type_conversion_p (dest, src))
3276 return true;
3277
3278 return false;
3279 }
3280
3281 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3282 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3283
3284 static bool
3285 valid_fixed_convert_types_p (tree type1, tree type2)
3286 {
3287 return (FIXED_POINT_TYPE_P (type1)
3288 && (INTEGRAL_TYPE_P (type2)
3289 || SCALAR_FLOAT_TYPE_P (type2)
3290 || FIXED_POINT_TYPE_P (type2)));
3291 }
3292
3293 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3294 is a problem, otherwise false. */
3295
3296 static bool
3297 verify_gimple_call (gcall *stmt)
3298 {
3299 tree fn = gimple_call_fn (stmt);
3300 tree fntype, fndecl;
3301 unsigned i;
3302
3303 if (gimple_call_internal_p (stmt))
3304 {
3305 if (fn)
3306 {
3307 error ("gimple call has two targets");
3308 debug_generic_stmt (fn);
3309 return true;
3310 }
3311 }
3312 else
3313 {
3314 if (!fn)
3315 {
3316 error ("gimple call has no target");
3317 return true;
3318 }
3319 }
3320
3321 if (fn && !is_gimple_call_addr (fn))
3322 {
3323 error ("invalid function in gimple call");
3324 debug_generic_stmt (fn);
3325 return true;
3326 }
3327
3328 if (fn
3329 && (!POINTER_TYPE_P (TREE_TYPE (fn))
3330 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3331 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
3332 {
3333 error ("non-function in gimple call");
3334 return true;
3335 }
3336
3337 fndecl = gimple_call_fndecl (stmt);
3338 if (fndecl
3339 && TREE_CODE (fndecl) == FUNCTION_DECL
3340 && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
3341 && !DECL_PURE_P (fndecl)
3342 && !TREE_READONLY (fndecl))
3343 {
3344 error ("invalid pure const state for function");
3345 return true;
3346 }
3347
3348 tree lhs = gimple_call_lhs (stmt);
3349 if (lhs
3350 && (!is_gimple_lvalue (lhs)
3351 || verify_types_in_gimple_reference (lhs, true)))
3352 {
3353 error ("invalid LHS in gimple call");
3354 return true;
3355 }
3356
3357 if (lhs
3358 && gimple_call_ctrl_altering_p (stmt)
3359 && gimple_call_noreturn_p (stmt)
3360 && TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (lhs))) == INTEGER_CST)
3361 {
3362 error ("LHS in noreturn call");
3363 return true;
3364 }
3365
3366 fntype = gimple_call_fntype (stmt);
3367 if (fntype
3368 && lhs
3369 && !useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (fntype))
3370 /* ??? At least C++ misses conversions at assignments from
3371 void * call results.
3372 ??? Java is completely off. Especially with functions
3373 returning java.lang.Object.
3374 For now simply allow arbitrary pointer type conversions. */
3375 && !(POINTER_TYPE_P (TREE_TYPE (lhs))
3376 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3377 {
3378 error ("invalid conversion in gimple call");
3379 debug_generic_stmt (TREE_TYPE (lhs));
3380 debug_generic_stmt (TREE_TYPE (fntype));
3381 return true;
3382 }
3383
3384 if (gimple_call_chain (stmt)
3385 && !is_gimple_val (gimple_call_chain (stmt)))
3386 {
3387 error ("invalid static chain in gimple call");
3388 debug_generic_stmt (gimple_call_chain (stmt));
3389 return true;
3390 }
3391
3392 /* If there is a static chain argument, the call should either be
3393 indirect, or the decl should have DECL_STATIC_CHAIN set. */
3394 if (gimple_call_chain (stmt)
3395 && fndecl
3396 && !DECL_STATIC_CHAIN (fndecl))
3397 {
3398 error ("static chain with function that doesn%'t use one");
3399 return true;
3400 }
3401
3402 /* ??? The C frontend passes unpromoted arguments in case it
3403 didn't see a function declaration before the call. So for now
3404 leave the call arguments mostly unverified. Once we gimplify
3405 unit-at-a-time we have a chance to fix this. */
3406
3407 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3408 {
3409 tree arg = gimple_call_arg (stmt, i);
3410 if ((is_gimple_reg_type (TREE_TYPE (arg))
3411 && !is_gimple_val (arg))
3412 || (!is_gimple_reg_type (TREE_TYPE (arg))
3413 && !is_gimple_lvalue (arg)))
3414 {
3415 error ("invalid argument to gimple call");
3416 debug_generic_expr (arg);
3417 return true;
3418 }
3419 }
3420
3421 return false;
3422 }
3423
3424 /* Verifies the gimple comparison with the result type TYPE and
3425 the operands OP0 and OP1. */
3426
3427 static bool
3428 verify_gimple_comparison (tree type, tree op0, tree op1)
3429 {
3430 tree op0_type = TREE_TYPE (op0);
3431 tree op1_type = TREE_TYPE (op1);
3432
3433 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3434 {
3435 error ("invalid operands in gimple comparison");
3436 return true;
3437 }
3438
3439 /* For comparisons we do not have the operations type as the
3440 effective type the comparison is carried out in. Instead
3441 we require that either the first operand is trivially
3442 convertible into the second, or the other way around.
3443 Because we special-case pointers to void we allow
3444 comparisons of pointers with the same mode as well. */
3445 if (!useless_type_conversion_p (op0_type, op1_type)
3446 && !useless_type_conversion_p (op1_type, op0_type)
3447 && (!POINTER_TYPE_P (op0_type)
3448 || !POINTER_TYPE_P (op1_type)
3449 || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3450 {
3451 error ("mismatching comparison operand types");
3452 debug_generic_expr (op0_type);
3453 debug_generic_expr (op1_type);
3454 return true;
3455 }
3456
3457 /* The resulting type of a comparison may be an effective boolean type. */
3458 if (INTEGRAL_TYPE_P (type)
3459 && (TREE_CODE (type) == BOOLEAN_TYPE
3460 || TYPE_PRECISION (type) == 1))
3461 {
3462 if (TREE_CODE (op0_type) == VECTOR_TYPE
3463 || TREE_CODE (op1_type) == VECTOR_TYPE)
3464 {
3465 error ("vector comparison returning a boolean");
3466 debug_generic_expr (op0_type);
3467 debug_generic_expr (op1_type);
3468 return true;
3469 }
3470 }
3471 /* Or a boolean vector type with the same element count
3472 as the comparison operand types. */
3473 else if (TREE_CODE (type) == VECTOR_TYPE
3474 && TREE_CODE (TREE_TYPE (type)) == BOOLEAN_TYPE)
3475 {
3476 if (TREE_CODE (op0_type) != VECTOR_TYPE
3477 || TREE_CODE (op1_type) != VECTOR_TYPE)
3478 {
3479 error ("non-vector operands in vector comparison");
3480 debug_generic_expr (op0_type);
3481 debug_generic_expr (op1_type);
3482 return true;
3483 }
3484
3485 if (TYPE_VECTOR_SUBPARTS (type) != TYPE_VECTOR_SUBPARTS (op0_type))
3486 {
3487 error ("invalid vector comparison resulting type");
3488 debug_generic_expr (type);
3489 return true;
3490 }
3491 }
3492 else
3493 {
3494 error ("bogus comparison result type");
3495 debug_generic_expr (type);
3496 return true;
3497 }
3498
3499 return false;
3500 }
3501
3502 /* Verify a gimple assignment statement STMT with an unary rhs.
3503 Returns true if anything is wrong. */
3504
3505 static bool
3506 verify_gimple_assign_unary (gassign *stmt)
3507 {
3508 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3509 tree lhs = gimple_assign_lhs (stmt);
3510 tree lhs_type = TREE_TYPE (lhs);
3511 tree rhs1 = gimple_assign_rhs1 (stmt);
3512 tree rhs1_type = TREE_TYPE (rhs1);
3513
3514 if (!is_gimple_reg (lhs))
3515 {
3516 error ("non-register as LHS of unary operation");
3517 return true;
3518 }
3519
3520 if (!is_gimple_val (rhs1))
3521 {
3522 error ("invalid operand in unary operation");
3523 return true;
3524 }
3525
3526 /* First handle conversions. */
3527 switch (rhs_code)
3528 {
3529 CASE_CONVERT:
3530 {
3531 /* Allow conversions from pointer type to integral type only if
3532 there is no sign or zero extension involved.
3533 For targets were the precision of ptrofftype doesn't match that
3534 of pointers we need to allow arbitrary conversions to ptrofftype. */
3535 if ((POINTER_TYPE_P (lhs_type)
3536 && INTEGRAL_TYPE_P (rhs1_type))
3537 || (POINTER_TYPE_P (rhs1_type)
3538 && INTEGRAL_TYPE_P (lhs_type)
3539 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3540 || ptrofftype_p (sizetype))))
3541 return false;
3542
3543 /* Allow conversion from integral to offset type and vice versa. */
3544 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3545 && INTEGRAL_TYPE_P (rhs1_type))
3546 || (INTEGRAL_TYPE_P (lhs_type)
3547 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3548 return false;
3549
3550 /* Otherwise assert we are converting between types of the
3551 same kind. */
3552 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3553 {
3554 error ("invalid types in nop conversion");
3555 debug_generic_expr (lhs_type);
3556 debug_generic_expr (rhs1_type);
3557 return true;
3558 }
3559
3560 return false;
3561 }
3562
3563 case ADDR_SPACE_CONVERT_EXPR:
3564 {
3565 if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3566 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3567 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3568 {
3569 error ("invalid types in address space conversion");
3570 debug_generic_expr (lhs_type);
3571 debug_generic_expr (rhs1_type);
3572 return true;
3573 }
3574
3575 return false;
3576 }
3577
3578 case FIXED_CONVERT_EXPR:
3579 {
3580 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3581 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3582 {
3583 error ("invalid types in fixed-point conversion");
3584 debug_generic_expr (lhs_type);
3585 debug_generic_expr (rhs1_type);
3586 return true;
3587 }
3588
3589 return false;
3590 }
3591
3592 case FLOAT_EXPR:
3593 {
3594 if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3595 && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3596 || !VECTOR_FLOAT_TYPE_P (lhs_type)))
3597 {
3598 error ("invalid types in conversion to floating point");
3599 debug_generic_expr (lhs_type);
3600 debug_generic_expr (rhs1_type);
3601 return true;
3602 }
3603
3604 return false;
3605 }
3606
3607 case FIX_TRUNC_EXPR:
3608 {
3609 if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3610 && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3611 || !VECTOR_FLOAT_TYPE_P (rhs1_type)))
3612 {
3613 error ("invalid types in conversion to integer");
3614 debug_generic_expr (lhs_type);
3615 debug_generic_expr (rhs1_type);
3616 return true;
3617 }
3618
3619 return false;
3620 }
3621 case REDUC_MAX_EXPR:
3622 case REDUC_MIN_EXPR:
3623 case REDUC_PLUS_EXPR:
3624 if (!VECTOR_TYPE_P (rhs1_type)
3625 || !useless_type_conversion_p (lhs_type, TREE_TYPE (rhs1_type)))
3626 {
3627 error ("reduction should convert from vector to element type");
3628 debug_generic_expr (lhs_type);
3629 debug_generic_expr (rhs1_type);
3630 return true;
3631 }
3632 return false;
3633
3634 case VEC_UNPACK_HI_EXPR:
3635 case VEC_UNPACK_LO_EXPR:
3636 case VEC_UNPACK_FLOAT_HI_EXPR:
3637 case VEC_UNPACK_FLOAT_LO_EXPR:
3638 /* FIXME. */
3639 return false;
3640
3641 case NEGATE_EXPR:
3642 case ABS_EXPR:
3643 case BIT_NOT_EXPR:
3644 case PAREN_EXPR:
3645 case CONJ_EXPR:
3646 break;
3647
3648 default:
3649 gcc_unreachable ();
3650 }
3651
3652 /* For the remaining codes assert there is no conversion involved. */
3653 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3654 {
3655 error ("non-trivial conversion in unary operation");
3656 debug_generic_expr (lhs_type);
3657 debug_generic_expr (rhs1_type);
3658 return true;
3659 }
3660
3661 return false;
3662 }
3663
3664 /* Verify a gimple assignment statement STMT with a binary rhs.
3665 Returns true if anything is wrong. */
3666
3667 static bool
3668 verify_gimple_assign_binary (gassign *stmt)
3669 {
3670 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3671 tree lhs = gimple_assign_lhs (stmt);
3672 tree lhs_type = TREE_TYPE (lhs);
3673 tree rhs1 = gimple_assign_rhs1 (stmt);
3674 tree rhs1_type = TREE_TYPE (rhs1);
3675 tree rhs2 = gimple_assign_rhs2 (stmt);
3676 tree rhs2_type = TREE_TYPE (rhs2);
3677
3678 if (!is_gimple_reg (lhs))
3679 {
3680 error ("non-register as LHS of binary operation");
3681 return true;
3682 }
3683
3684 if (!is_gimple_val (rhs1)
3685 || !is_gimple_val (rhs2))
3686 {
3687 error ("invalid operands in binary operation");
3688 return true;
3689 }
3690
3691 /* First handle operations that involve different types. */
3692 switch (rhs_code)
3693 {
3694 case COMPLEX_EXPR:
3695 {
3696 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3697 || !(INTEGRAL_TYPE_P (rhs1_type)
3698 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3699 || !(INTEGRAL_TYPE_P (rhs2_type)
3700 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3701 {
3702 error ("type mismatch in complex expression");
3703 debug_generic_expr (lhs_type);
3704 debug_generic_expr (rhs1_type);
3705 debug_generic_expr (rhs2_type);
3706 return true;
3707 }
3708
3709 return false;
3710 }
3711
3712 case LSHIFT_EXPR:
3713 case RSHIFT_EXPR:
3714 case LROTATE_EXPR:
3715 case RROTATE_EXPR:
3716 {
3717 /* Shifts and rotates are ok on integral types, fixed point
3718 types and integer vector types. */
3719 if ((!INTEGRAL_TYPE_P (rhs1_type)
3720 && !FIXED_POINT_TYPE_P (rhs1_type)
3721 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3722 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3723 || (!INTEGRAL_TYPE_P (rhs2_type)
3724 /* Vector shifts of vectors are also ok. */
3725 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3726 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3727 && TREE_CODE (rhs2_type) == VECTOR_TYPE
3728 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3729 || !useless_type_conversion_p (lhs_type, rhs1_type))
3730 {
3731 error ("type mismatch in shift expression");
3732 debug_generic_expr (lhs_type);
3733 debug_generic_expr (rhs1_type);
3734 debug_generic_expr (rhs2_type);
3735 return true;
3736 }
3737
3738 return false;
3739 }
3740
3741 case WIDEN_LSHIFT_EXPR:
3742 {
3743 if (!INTEGRAL_TYPE_P (lhs_type)
3744 || !INTEGRAL_TYPE_P (rhs1_type)
3745 || TREE_CODE (rhs2) != INTEGER_CST
3746 || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
3747 {
3748 error ("type mismatch in widening vector shift expression");
3749 debug_generic_expr (lhs_type);
3750 debug_generic_expr (rhs1_type);
3751 debug_generic_expr (rhs2_type);
3752 return true;
3753 }
3754
3755 return false;
3756 }
3757
3758 case VEC_WIDEN_LSHIFT_HI_EXPR:
3759 case VEC_WIDEN_LSHIFT_LO_EXPR:
3760 {
3761 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3762 || TREE_CODE (lhs_type) != VECTOR_TYPE
3763 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3764 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3765 || TREE_CODE (rhs2) != INTEGER_CST
3766 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
3767 > TYPE_PRECISION (TREE_TYPE (lhs_type))))
3768 {
3769 error ("type mismatch in widening vector shift expression");
3770 debug_generic_expr (lhs_type);
3771 debug_generic_expr (rhs1_type);
3772 debug_generic_expr (rhs2_type);
3773 return true;
3774 }
3775
3776 return false;
3777 }
3778
3779 case PLUS_EXPR:
3780 case MINUS_EXPR:
3781 {
3782 tree lhs_etype = lhs_type;
3783 tree rhs1_etype = rhs1_type;
3784 tree rhs2_etype = rhs2_type;
3785 if (TREE_CODE (lhs_type) == VECTOR_TYPE)
3786 {
3787 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3788 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
3789 {
3790 error ("invalid non-vector operands to vector valued plus");
3791 return true;
3792 }
3793 lhs_etype = TREE_TYPE (lhs_type);
3794 rhs1_etype = TREE_TYPE (rhs1_type);
3795 rhs2_etype = TREE_TYPE (rhs2_type);
3796 }
3797 if (POINTER_TYPE_P (lhs_etype)
3798 || POINTER_TYPE_P (rhs1_etype)
3799 || POINTER_TYPE_P (rhs2_etype))
3800 {
3801 error ("invalid (pointer) operands to plus/minus");
3802 return true;
3803 }
3804
3805 /* Continue with generic binary expression handling. */
3806 break;
3807 }
3808
3809 case POINTER_PLUS_EXPR:
3810 {
3811 if (!POINTER_TYPE_P (rhs1_type)
3812 || !useless_type_conversion_p (lhs_type, rhs1_type)
3813 || !ptrofftype_p (rhs2_type))
3814 {
3815 error ("type mismatch in pointer plus expression");
3816 debug_generic_stmt (lhs_type);
3817 debug_generic_stmt (rhs1_type);
3818 debug_generic_stmt (rhs2_type);
3819 return true;
3820 }
3821
3822 return false;
3823 }
3824
3825 case TRUTH_ANDIF_EXPR:
3826 case TRUTH_ORIF_EXPR:
3827 case TRUTH_AND_EXPR:
3828 case TRUTH_OR_EXPR:
3829 case TRUTH_XOR_EXPR:
3830
3831 gcc_unreachable ();
3832
3833 case LT_EXPR:
3834 case LE_EXPR:
3835 case GT_EXPR:
3836 case GE_EXPR:
3837 case EQ_EXPR:
3838 case NE_EXPR:
3839 case UNORDERED_EXPR:
3840 case ORDERED_EXPR:
3841 case UNLT_EXPR:
3842 case UNLE_EXPR:
3843 case UNGT_EXPR:
3844 case UNGE_EXPR:
3845 case UNEQ_EXPR:
3846 case LTGT_EXPR:
3847 /* Comparisons are also binary, but the result type is not
3848 connected to the operand types. */
3849 return verify_gimple_comparison (lhs_type, rhs1, rhs2);
3850
3851 case WIDEN_MULT_EXPR:
3852 if (TREE_CODE (lhs_type) != INTEGER_TYPE)
3853 return true;
3854 return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
3855 || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
3856
3857 case WIDEN_SUM_EXPR:
3858 case VEC_WIDEN_MULT_HI_EXPR:
3859 case VEC_WIDEN_MULT_LO_EXPR:
3860 case VEC_WIDEN_MULT_EVEN_EXPR:
3861 case VEC_WIDEN_MULT_ODD_EXPR:
3862 case VEC_PACK_TRUNC_EXPR:
3863 case VEC_PACK_SAT_EXPR:
3864 case VEC_PACK_FIX_TRUNC_EXPR:
3865 /* FIXME. */
3866 return false;
3867
3868 case MULT_EXPR:
3869 case MULT_HIGHPART_EXPR:
3870 case TRUNC_DIV_EXPR:
3871 case CEIL_DIV_EXPR:
3872 case FLOOR_DIV_EXPR:
3873 case ROUND_DIV_EXPR:
3874 case TRUNC_MOD_EXPR:
3875 case CEIL_MOD_EXPR:
3876 case FLOOR_MOD_EXPR:
3877 case ROUND_MOD_EXPR:
3878 case RDIV_EXPR:
3879 case EXACT_DIV_EXPR:
3880 case MIN_EXPR:
3881 case MAX_EXPR:
3882 case BIT_IOR_EXPR:
3883 case BIT_XOR_EXPR:
3884 case BIT_AND_EXPR:
3885 /* Continue with generic binary expression handling. */
3886 break;
3887
3888 default:
3889 gcc_unreachable ();
3890 }
3891
3892 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3893 || !useless_type_conversion_p (lhs_type, rhs2_type))
3894 {
3895 error ("type mismatch in binary expression");
3896 debug_generic_stmt (lhs_type);
3897 debug_generic_stmt (rhs1_type);
3898 debug_generic_stmt (rhs2_type);
3899 return true;
3900 }
3901
3902 return false;
3903 }
3904
3905 /* Verify a gimple assignment statement STMT with a ternary rhs.
3906 Returns true if anything is wrong. */
3907
3908 static bool
3909 verify_gimple_assign_ternary (gassign *stmt)
3910 {
3911 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3912 tree lhs = gimple_assign_lhs (stmt);
3913 tree lhs_type = TREE_TYPE (lhs);
3914 tree rhs1 = gimple_assign_rhs1 (stmt);
3915 tree rhs1_type = TREE_TYPE (rhs1);
3916 tree rhs2 = gimple_assign_rhs2 (stmt);
3917 tree rhs2_type = TREE_TYPE (rhs2);
3918 tree rhs3 = gimple_assign_rhs3 (stmt);
3919 tree rhs3_type = TREE_TYPE (rhs3);
3920
3921 if (!is_gimple_reg (lhs))
3922 {
3923 error ("non-register as LHS of ternary operation");
3924 return true;
3925 }
3926
3927 if (((rhs_code == VEC_COND_EXPR || rhs_code == COND_EXPR)
3928 ? !is_gimple_condexpr (rhs1) : !is_gimple_val (rhs1))
3929 || !is_gimple_val (rhs2)
3930 || !is_gimple_val (rhs3))
3931 {
3932 error ("invalid operands in ternary operation");
3933 return true;
3934 }
3935
3936 /* First handle operations that involve different types. */
3937 switch (rhs_code)
3938 {
3939 case WIDEN_MULT_PLUS_EXPR:
3940 case WIDEN_MULT_MINUS_EXPR:
3941 if ((!INTEGRAL_TYPE_P (rhs1_type)
3942 && !FIXED_POINT_TYPE_P (rhs1_type))
3943 || !useless_type_conversion_p (rhs1_type, rhs2_type)
3944 || !useless_type_conversion_p (lhs_type, rhs3_type)
3945 || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
3946 || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
3947 {
3948 error ("type mismatch in widening multiply-accumulate expression");
3949 debug_generic_expr (lhs_type);
3950 debug_generic_expr (rhs1_type);
3951 debug_generic_expr (rhs2_type);
3952 debug_generic_expr (rhs3_type);
3953 return true;
3954 }
3955 break;
3956
3957 case FMA_EXPR:
3958 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3959 || !useless_type_conversion_p (lhs_type, rhs2_type)
3960 || !useless_type_conversion_p (lhs_type, rhs3_type))
3961 {
3962 error ("type mismatch in fused multiply-add expression");
3963 debug_generic_expr (lhs_type);
3964 debug_generic_expr (rhs1_type);
3965 debug_generic_expr (rhs2_type);
3966 debug_generic_expr (rhs3_type);
3967 return true;
3968 }
3969 break;
3970
3971 case VEC_COND_EXPR:
3972 if (!VECTOR_BOOLEAN_TYPE_P (rhs1_type)
3973 || TYPE_VECTOR_SUBPARTS (rhs1_type)
3974 != TYPE_VECTOR_SUBPARTS (lhs_type))
3975 {
3976 error ("the first argument of a VEC_COND_EXPR must be of a "
3977 "boolean vector type of the same number of elements "
3978 "as the result");
3979 debug_generic_expr (lhs_type);
3980 debug_generic_expr (rhs1_type);
3981 return true;
3982 }
3983 /* Fallthrough. */
3984 case COND_EXPR:
3985 if (!useless_type_conversion_p (lhs_type, rhs2_type)
3986 || !useless_type_conversion_p (lhs_type, rhs3_type))
3987 {
3988 error ("type mismatch in conditional expression");
3989 debug_generic_expr (lhs_type);
3990 debug_generic_expr (rhs2_type);
3991 debug_generic_expr (rhs3_type);
3992 return true;
3993 }
3994 break;
3995
3996 case VEC_PERM_EXPR:
3997 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3998 || !useless_type_conversion_p (lhs_type, rhs2_type))
3999 {
4000 error ("type mismatch in vector permute expression");
4001 debug_generic_expr (lhs_type);
4002 debug_generic_expr (rhs1_type);
4003 debug_generic_expr (rhs2_type);
4004 debug_generic_expr (rhs3_type);
4005 return true;
4006 }
4007
4008 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4009 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4010 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4011 {
4012 error ("vector types expected in vector permute expression");
4013 debug_generic_expr (lhs_type);
4014 debug_generic_expr (rhs1_type);
4015 debug_generic_expr (rhs2_type);
4016 debug_generic_expr (rhs3_type);
4017 return true;
4018 }
4019
4020 if (TYPE_VECTOR_SUBPARTS (rhs1_type) != TYPE_VECTOR_SUBPARTS (rhs2_type)
4021 || TYPE_VECTOR_SUBPARTS (rhs2_type)
4022 != TYPE_VECTOR_SUBPARTS (rhs3_type)
4023 || TYPE_VECTOR_SUBPARTS (rhs3_type)
4024 != TYPE_VECTOR_SUBPARTS (lhs_type))
4025 {
4026 error ("vectors with different element number found "
4027 "in vector permute expression");
4028 debug_generic_expr (lhs_type);
4029 debug_generic_expr (rhs1_type);
4030 debug_generic_expr (rhs2_type);
4031 debug_generic_expr (rhs3_type);
4032 return true;
4033 }
4034
4035 if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
4036 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs3_type)))
4037 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type))))
4038 {
4039 error ("invalid mask type in vector permute expression");
4040 debug_generic_expr (lhs_type);
4041 debug_generic_expr (rhs1_type);
4042 debug_generic_expr (rhs2_type);
4043 debug_generic_expr (rhs3_type);
4044 return true;
4045 }
4046
4047 return false;
4048
4049 case SAD_EXPR:
4050 if (!useless_type_conversion_p (rhs1_type, rhs2_type)
4051 || !useless_type_conversion_p (lhs_type, rhs3_type)
4052 || 2 * GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type)))
4053 > GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (lhs_type))))
4054 {
4055 error ("type mismatch in sad expression");
4056 debug_generic_expr (lhs_type);
4057 debug_generic_expr (rhs1_type);
4058 debug_generic_expr (rhs2_type);
4059 debug_generic_expr (rhs3_type);
4060 return true;
4061 }
4062
4063 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4064 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4065 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4066 {
4067 error ("vector types expected in sad expression");
4068 debug_generic_expr (lhs_type);
4069 debug_generic_expr (rhs1_type);
4070 debug_generic_expr (rhs2_type);
4071 debug_generic_expr (rhs3_type);
4072 return true;
4073 }
4074
4075 return false;
4076
4077 case DOT_PROD_EXPR:
4078 case REALIGN_LOAD_EXPR:
4079 /* FIXME. */
4080 return false;
4081
4082 default:
4083 gcc_unreachable ();
4084 }
4085 return false;
4086 }
4087
4088 /* Verify a gimple assignment statement STMT with a single rhs.
4089 Returns true if anything is wrong. */
4090
4091 static bool
4092 verify_gimple_assign_single (gassign *stmt)
4093 {
4094 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4095 tree lhs = gimple_assign_lhs (stmt);
4096 tree lhs_type = TREE_TYPE (lhs);
4097 tree rhs1 = gimple_assign_rhs1 (stmt);
4098 tree rhs1_type = TREE_TYPE (rhs1);
4099 bool res = false;
4100
4101 if (!useless_type_conversion_p (lhs_type, rhs1_type))
4102 {
4103 error ("non-trivial conversion at assignment");
4104 debug_generic_expr (lhs_type);
4105 debug_generic_expr (rhs1_type);
4106 return true;
4107 }
4108
4109 if (gimple_clobber_p (stmt)
4110 && !(DECL_P (lhs) || TREE_CODE (lhs) == MEM_REF))
4111 {
4112 error ("non-decl/MEM_REF LHS in clobber statement");
4113 debug_generic_expr (lhs);
4114 return true;
4115 }
4116
4117 if (handled_component_p (lhs)
4118 || TREE_CODE (lhs) == MEM_REF
4119 || TREE_CODE (lhs) == TARGET_MEM_REF)
4120 res |= verify_types_in_gimple_reference (lhs, true);
4121
4122 /* Special codes we cannot handle via their class. */
4123 switch (rhs_code)
4124 {
4125 case ADDR_EXPR:
4126 {
4127 tree op = TREE_OPERAND (rhs1, 0);
4128 if (!is_gimple_addressable (op))
4129 {
4130 error ("invalid operand in unary expression");
4131 return true;
4132 }
4133
4134 /* Technically there is no longer a need for matching types, but
4135 gimple hygiene asks for this check. In LTO we can end up
4136 combining incompatible units and thus end up with addresses
4137 of globals that change their type to a common one. */
4138 if (!in_lto_p
4139 && !types_compatible_p (TREE_TYPE (op),
4140 TREE_TYPE (TREE_TYPE (rhs1)))
4141 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
4142 TREE_TYPE (op)))
4143 {
4144 error ("type mismatch in address expression");
4145 debug_generic_stmt (TREE_TYPE (rhs1));
4146 debug_generic_stmt (TREE_TYPE (op));
4147 return true;
4148 }
4149
4150 return verify_types_in_gimple_reference (op, true);
4151 }
4152
4153 /* tcc_reference */
4154 case INDIRECT_REF:
4155 error ("INDIRECT_REF in gimple IL");
4156 return true;
4157
4158 case COMPONENT_REF:
4159 case BIT_FIELD_REF:
4160 case ARRAY_REF:
4161 case ARRAY_RANGE_REF:
4162 case VIEW_CONVERT_EXPR:
4163 case REALPART_EXPR:
4164 case IMAGPART_EXPR:
4165 case TARGET_MEM_REF:
4166 case MEM_REF:
4167 if (!is_gimple_reg (lhs)
4168 && is_gimple_reg_type (TREE_TYPE (lhs)))
4169 {
4170 error ("invalid rhs for gimple memory store");
4171 debug_generic_stmt (lhs);
4172 debug_generic_stmt (rhs1);
4173 return true;
4174 }
4175 return res || verify_types_in_gimple_reference (rhs1, false);
4176
4177 /* tcc_constant */
4178 case SSA_NAME:
4179 case INTEGER_CST:
4180 case REAL_CST:
4181 case FIXED_CST:
4182 case COMPLEX_CST:
4183 case VECTOR_CST:
4184 case STRING_CST:
4185 return res;
4186
4187 /* tcc_declaration */
4188 case CONST_DECL:
4189 return res;
4190 case VAR_DECL:
4191 case PARM_DECL:
4192 if (!is_gimple_reg (lhs)
4193 && !is_gimple_reg (rhs1)
4194 && is_gimple_reg_type (TREE_TYPE (lhs)))
4195 {
4196 error ("invalid rhs for gimple memory store");
4197 debug_generic_stmt (lhs);
4198 debug_generic_stmt (rhs1);
4199 return true;
4200 }
4201 return res;
4202
4203 case CONSTRUCTOR:
4204 if (TREE_CODE (rhs1_type) == VECTOR_TYPE)
4205 {
4206 unsigned int i;
4207 tree elt_i, elt_v, elt_t = NULL_TREE;
4208
4209 if (CONSTRUCTOR_NELTS (rhs1) == 0)
4210 return res;
4211 /* For vector CONSTRUCTORs we require that either it is empty
4212 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
4213 (then the element count must be correct to cover the whole
4214 outer vector and index must be NULL on all elements, or it is
4215 a CONSTRUCTOR of scalar elements, where we as an exception allow
4216 smaller number of elements (assuming zero filling) and
4217 consecutive indexes as compared to NULL indexes (such
4218 CONSTRUCTORs can appear in the IL from FEs). */
4219 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), i, elt_i, elt_v)
4220 {
4221 if (elt_t == NULL_TREE)
4222 {
4223 elt_t = TREE_TYPE (elt_v);
4224 if (TREE_CODE (elt_t) == VECTOR_TYPE)
4225 {
4226 tree elt_t = TREE_TYPE (elt_v);
4227 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4228 TREE_TYPE (elt_t)))
4229 {
4230 error ("incorrect type of vector CONSTRUCTOR"
4231 " elements");
4232 debug_generic_stmt (rhs1);
4233 return true;
4234 }
4235 else if (CONSTRUCTOR_NELTS (rhs1)
4236 * TYPE_VECTOR_SUBPARTS (elt_t)
4237 != TYPE_VECTOR_SUBPARTS (rhs1_type))
4238 {
4239 error ("incorrect number of vector CONSTRUCTOR"
4240 " elements");
4241 debug_generic_stmt (rhs1);
4242 return true;
4243 }
4244 }
4245 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4246 elt_t))
4247 {
4248 error ("incorrect type of vector CONSTRUCTOR elements");
4249 debug_generic_stmt (rhs1);
4250 return true;
4251 }
4252 else if (CONSTRUCTOR_NELTS (rhs1)
4253 > TYPE_VECTOR_SUBPARTS (rhs1_type))
4254 {
4255 error ("incorrect number of vector CONSTRUCTOR elements");
4256 debug_generic_stmt (rhs1);
4257 return true;
4258 }
4259 }
4260 else if (!useless_type_conversion_p (elt_t, TREE_TYPE (elt_v)))
4261 {
4262 error ("incorrect type of vector CONSTRUCTOR elements");
4263 debug_generic_stmt (rhs1);
4264 return true;
4265 }
4266 if (elt_i != NULL_TREE
4267 && (TREE_CODE (elt_t) == VECTOR_TYPE
4268 || TREE_CODE (elt_i) != INTEGER_CST
4269 || compare_tree_int (elt_i, i) != 0))
4270 {
4271 error ("vector CONSTRUCTOR with non-NULL element index");
4272 debug_generic_stmt (rhs1);
4273 return true;
4274 }
4275 if (!is_gimple_val (elt_v))
4276 {
4277 error ("vector CONSTRUCTOR element is not a GIMPLE value");
4278 debug_generic_stmt (rhs1);
4279 return true;
4280 }
4281 }
4282 }
4283 else if (CONSTRUCTOR_NELTS (rhs1) != 0)
4284 {
4285 error ("non-vector CONSTRUCTOR with elements");
4286 debug_generic_stmt (rhs1);
4287 return true;
4288 }
4289 return res;
4290 case OBJ_TYPE_REF:
4291 case ASSERT_EXPR:
4292 case WITH_SIZE_EXPR:
4293 /* FIXME. */
4294 return res;
4295
4296 default:;
4297 }
4298
4299 return res;
4300 }
4301
4302 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4303 is a problem, otherwise false. */
4304
4305 static bool
4306 verify_gimple_assign (gassign *stmt)
4307 {
4308 switch (gimple_assign_rhs_class (stmt))
4309 {
4310 case GIMPLE_SINGLE_RHS:
4311 return verify_gimple_assign_single (stmt);
4312
4313 case GIMPLE_UNARY_RHS:
4314 return verify_gimple_assign_unary (stmt);
4315
4316 case GIMPLE_BINARY_RHS:
4317 return verify_gimple_assign_binary (stmt);
4318
4319 case GIMPLE_TERNARY_RHS:
4320 return verify_gimple_assign_ternary (stmt);
4321
4322 default:
4323 gcc_unreachable ();
4324 }
4325 }
4326
4327 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4328 is a problem, otherwise false. */
4329
4330 static bool
4331 verify_gimple_return (greturn *stmt)
4332 {
4333 tree op = gimple_return_retval (stmt);
4334 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
4335
4336 /* We cannot test for present return values as we do not fix up missing
4337 return values from the original source. */
4338 if (op == NULL)
4339 return false;
4340
4341 if (!is_gimple_val (op)
4342 && TREE_CODE (op) != RESULT_DECL)
4343 {
4344 error ("invalid operand in return statement");
4345 debug_generic_stmt (op);
4346 return true;
4347 }
4348
4349 if ((TREE_CODE (op) == RESULT_DECL
4350 && DECL_BY_REFERENCE (op))
4351 || (TREE_CODE (op) == SSA_NAME
4352 && SSA_NAME_VAR (op)
4353 && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
4354 && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
4355 op = TREE_TYPE (op);
4356
4357 if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
4358 {
4359 error ("invalid conversion in return statement");
4360 debug_generic_stmt (restype);
4361 debug_generic_stmt (TREE_TYPE (op));
4362 return true;
4363 }
4364
4365 return false;
4366 }
4367
4368
4369 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4370 is a problem, otherwise false. */
4371
4372 static bool
4373 verify_gimple_goto (ggoto *stmt)
4374 {
4375 tree dest = gimple_goto_dest (stmt);
4376
4377 /* ??? We have two canonical forms of direct goto destinations, a
4378 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4379 if (TREE_CODE (dest) != LABEL_DECL
4380 && (!is_gimple_val (dest)
4381 || !POINTER_TYPE_P (TREE_TYPE (dest))))
4382 {
4383 error ("goto destination is neither a label nor a pointer");
4384 return true;
4385 }
4386
4387 return false;
4388 }
4389
4390 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4391 is a problem, otherwise false. */
4392
4393 static bool
4394 verify_gimple_switch (gswitch *stmt)
4395 {
4396 unsigned int i, n;
4397 tree elt, prev_upper_bound = NULL_TREE;
4398 tree index_type, elt_type = NULL_TREE;
4399
4400 if (!is_gimple_val (gimple_switch_index (stmt)))
4401 {
4402 error ("invalid operand to switch statement");
4403 debug_generic_stmt (gimple_switch_index (stmt));
4404 return true;
4405 }
4406
4407 index_type = TREE_TYPE (gimple_switch_index (stmt));
4408 if (! INTEGRAL_TYPE_P (index_type))
4409 {
4410 error ("non-integral type switch statement");
4411 debug_generic_expr (index_type);
4412 return true;
4413 }
4414
4415 elt = gimple_switch_label (stmt, 0);
4416 if (CASE_LOW (elt) != NULL_TREE || CASE_HIGH (elt) != NULL_TREE)
4417 {
4418 error ("invalid default case label in switch statement");
4419 debug_generic_expr (elt);
4420 return true;
4421 }
4422
4423 n = gimple_switch_num_labels (stmt);
4424 for (i = 1; i < n; i++)
4425 {
4426 elt = gimple_switch_label (stmt, i);
4427
4428 if (! CASE_LOW (elt))
4429 {
4430 error ("invalid case label in switch statement");
4431 debug_generic_expr (elt);
4432 return true;
4433 }
4434 if (CASE_HIGH (elt)
4435 && ! tree_int_cst_lt (CASE_LOW (elt), CASE_HIGH (elt)))
4436 {
4437 error ("invalid case range in switch statement");
4438 debug_generic_expr (elt);
4439 return true;
4440 }
4441
4442 if (elt_type)
4443 {
4444 if (TREE_TYPE (CASE_LOW (elt)) != elt_type
4445 || (CASE_HIGH (elt) && TREE_TYPE (CASE_HIGH (elt)) != elt_type))
4446 {
4447 error ("type mismatch for case label in switch statement");
4448 debug_generic_expr (elt);
4449 return true;
4450 }
4451 }
4452 else
4453 {
4454 elt_type = TREE_TYPE (CASE_LOW (elt));
4455 if (TYPE_PRECISION (index_type) < TYPE_PRECISION (elt_type))
4456 {
4457 error ("type precision mismatch in switch statement");
4458 return true;
4459 }
4460 }
4461
4462 if (prev_upper_bound)
4463 {
4464 if (! tree_int_cst_lt (prev_upper_bound, CASE_LOW (elt)))
4465 {
4466 error ("case labels not sorted in switch statement");
4467 return true;
4468 }
4469 }
4470
4471 prev_upper_bound = CASE_HIGH (elt);
4472 if (! prev_upper_bound)
4473 prev_upper_bound = CASE_LOW (elt);
4474 }
4475
4476 return false;
4477 }
4478
4479 /* Verify a gimple debug statement STMT.
4480 Returns true if anything is wrong. */
4481
4482 static bool
4483 verify_gimple_debug (gimple *stmt ATTRIBUTE_UNUSED)
4484 {
4485 /* There isn't much that could be wrong in a gimple debug stmt. A
4486 gimple debug bind stmt, for example, maps a tree, that's usually
4487 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4488 component or member of an aggregate type, to another tree, that
4489 can be an arbitrary expression. These stmts expand into debug
4490 insns, and are converted to debug notes by var-tracking.c. */
4491 return false;
4492 }
4493
4494 /* Verify a gimple label statement STMT.
4495 Returns true if anything is wrong. */
4496
4497 static bool
4498 verify_gimple_label (glabel *stmt)
4499 {
4500 tree decl = gimple_label_label (stmt);
4501 int uid;
4502 bool err = false;
4503
4504 if (TREE_CODE (decl) != LABEL_DECL)
4505 return true;
4506 if (!DECL_NONLOCAL (decl) && !FORCED_LABEL (decl)
4507 && DECL_CONTEXT (decl) != current_function_decl)
4508 {
4509 error ("label's context is not the current function decl");
4510 err |= true;
4511 }
4512
4513 uid = LABEL_DECL_UID (decl);
4514 if (cfun->cfg
4515 && (uid == -1
4516 || (*label_to_block_map_for_fn (cfun))[uid] != gimple_bb (stmt)))
4517 {
4518 error ("incorrect entry in label_to_block_map");
4519 err |= true;
4520 }
4521
4522 uid = EH_LANDING_PAD_NR (decl);
4523 if (uid)
4524 {
4525 eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
4526 if (decl != lp->post_landing_pad)
4527 {
4528 error ("incorrect setting of landing pad number");
4529 err |= true;
4530 }
4531 }
4532
4533 return err;
4534 }
4535
4536 /* Verify a gimple cond statement STMT.
4537 Returns true if anything is wrong. */
4538
4539 static bool
4540 verify_gimple_cond (gcond *stmt)
4541 {
4542 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
4543 {
4544 error ("invalid comparison code in gimple cond");
4545 return true;
4546 }
4547 if (!(!gimple_cond_true_label (stmt)
4548 || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
4549 || !(!gimple_cond_false_label (stmt)
4550 || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
4551 {
4552 error ("invalid labels in gimple cond");
4553 return true;
4554 }
4555
4556 return verify_gimple_comparison (boolean_type_node,
4557 gimple_cond_lhs (stmt),
4558 gimple_cond_rhs (stmt));
4559 }
4560
4561 /* Verify the GIMPLE statement STMT. Returns true if there is an
4562 error, otherwise false. */
4563
4564 static bool
4565 verify_gimple_stmt (gimple *stmt)
4566 {
4567 switch (gimple_code (stmt))
4568 {
4569 case GIMPLE_ASSIGN:
4570 return verify_gimple_assign (as_a <gassign *> (stmt));
4571
4572 case GIMPLE_LABEL:
4573 return verify_gimple_label (as_a <glabel *> (stmt));
4574
4575 case GIMPLE_CALL:
4576 return verify_gimple_call (as_a <gcall *> (stmt));
4577
4578 case GIMPLE_COND:
4579 return verify_gimple_cond (as_a <gcond *> (stmt));
4580
4581 case GIMPLE_GOTO:
4582 return verify_gimple_goto (as_a <ggoto *> (stmt));
4583
4584 case GIMPLE_SWITCH:
4585 return verify_gimple_switch (as_a <gswitch *> (stmt));
4586
4587 case GIMPLE_RETURN:
4588 return verify_gimple_return (as_a <greturn *> (stmt));
4589
4590 case GIMPLE_ASM:
4591 return false;
4592
4593 case GIMPLE_TRANSACTION:
4594 return verify_gimple_transaction (as_a <gtransaction *> (stmt));
4595
4596 /* Tuples that do not have tree operands. */
4597 case GIMPLE_NOP:
4598 case GIMPLE_PREDICT:
4599 case GIMPLE_RESX:
4600 case GIMPLE_EH_DISPATCH:
4601 case GIMPLE_EH_MUST_NOT_THROW:
4602 return false;
4603
4604 CASE_GIMPLE_OMP:
4605 /* OpenMP directives are validated by the FE and never operated
4606 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4607 non-gimple expressions when the main index variable has had
4608 its address taken. This does not affect the loop itself
4609 because the header of an GIMPLE_OMP_FOR is merely used to determine
4610 how to setup the parallel iteration. */
4611 return false;
4612
4613 case GIMPLE_DEBUG:
4614 return verify_gimple_debug (stmt);
4615
4616 default:
4617 gcc_unreachable ();
4618 }
4619 }
4620
4621 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4622 and false otherwise. */
4623
4624 static bool
4625 verify_gimple_phi (gimple *phi)
4626 {
4627 bool err = false;
4628 unsigned i;
4629 tree phi_result = gimple_phi_result (phi);
4630 bool virtual_p;
4631
4632 if (!phi_result)
4633 {
4634 error ("invalid PHI result");
4635 return true;
4636 }
4637
4638 virtual_p = virtual_operand_p (phi_result);
4639 if (TREE_CODE (phi_result) != SSA_NAME
4640 || (virtual_p
4641 && SSA_NAME_VAR (phi_result) != gimple_vop (cfun)))
4642 {
4643 error ("invalid PHI result");
4644 err = true;
4645 }
4646
4647 for (i = 0; i < gimple_phi_num_args (phi); i++)
4648 {
4649 tree t = gimple_phi_arg_def (phi, i);
4650
4651 if (!t)
4652 {
4653 error ("missing PHI def");
4654 err |= true;
4655 continue;
4656 }
4657 /* Addressable variables do have SSA_NAMEs but they
4658 are not considered gimple values. */
4659 else if ((TREE_CODE (t) == SSA_NAME
4660 && virtual_p != virtual_operand_p (t))
4661 || (virtual_p
4662 && (TREE_CODE (t) != SSA_NAME
4663 || SSA_NAME_VAR (t) != gimple_vop (cfun)))
4664 || (!virtual_p
4665 && !is_gimple_val (t)))
4666 {
4667 error ("invalid PHI argument");
4668 debug_generic_expr (t);
4669 err |= true;
4670 }
4671 #ifdef ENABLE_TYPES_CHECKING
4672 if (!useless_type_conversion_p (TREE_TYPE (phi_result), TREE_TYPE (t)))
4673 {
4674 error ("incompatible types in PHI argument %u", i);
4675 debug_generic_stmt (TREE_TYPE (phi_result));
4676 debug_generic_stmt (TREE_TYPE (t));
4677 err |= true;
4678 }
4679 #endif
4680 }
4681
4682 return err;
4683 }
4684
4685 /* Verify the GIMPLE statements inside the sequence STMTS. */
4686
4687 static bool
4688 verify_gimple_in_seq_2 (gimple_seq stmts)
4689 {
4690 gimple_stmt_iterator ittr;
4691 bool err = false;
4692
4693 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
4694 {
4695 gimple *stmt = gsi_stmt (ittr);
4696
4697 switch (gimple_code (stmt))
4698 {
4699 case GIMPLE_BIND:
4700 err |= verify_gimple_in_seq_2 (
4701 gimple_bind_body (as_a <gbind *> (stmt)));
4702 break;
4703
4704 case GIMPLE_TRY:
4705 err |= verify_gimple_in_seq_2 (gimple_try_eval (stmt));
4706 err |= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt));
4707 break;
4708
4709 case GIMPLE_EH_FILTER:
4710 err |= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt));
4711 break;
4712
4713 case GIMPLE_EH_ELSE:
4714 {
4715 geh_else *eh_else = as_a <geh_else *> (stmt);
4716 err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (eh_else));
4717 err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (eh_else));
4718 }
4719 break;
4720
4721 case GIMPLE_CATCH:
4722 err |= verify_gimple_in_seq_2 (gimple_catch_handler (
4723 as_a <gcatch *> (stmt)));
4724 break;
4725
4726 case GIMPLE_TRANSACTION:
4727 err |= verify_gimple_transaction (as_a <gtransaction *> (stmt));
4728 break;
4729
4730 default:
4731 {
4732 bool err2 = verify_gimple_stmt (stmt);
4733 if (err2)
4734 debug_gimple_stmt (stmt);
4735 err |= err2;
4736 }
4737 }
4738 }
4739
4740 return err;
4741 }
4742
4743 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
4744 is a problem, otherwise false. */
4745
4746 static bool
4747 verify_gimple_transaction (gtransaction *stmt)
4748 {
4749 tree lab = gimple_transaction_label (stmt);
4750 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
4751 return true;
4752 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt));
4753 }
4754
4755
4756 /* Verify the GIMPLE statements inside the statement list STMTS. */
4757
4758 DEBUG_FUNCTION void
4759 verify_gimple_in_seq (gimple_seq stmts)
4760 {
4761 timevar_push (TV_TREE_STMT_VERIFY);
4762 if (verify_gimple_in_seq_2 (stmts))
4763 internal_error ("verify_gimple failed");
4764 timevar_pop (TV_TREE_STMT_VERIFY);
4765 }
4766
4767 /* Return true when the T can be shared. */
4768
4769 static bool
4770 tree_node_can_be_shared (tree t)
4771 {
4772 if (IS_TYPE_OR_DECL_P (t)
4773 || is_gimple_min_invariant (t)
4774 || TREE_CODE (t) == SSA_NAME
4775 || t == error_mark_node
4776 || TREE_CODE (t) == IDENTIFIER_NODE)
4777 return true;
4778
4779 if (TREE_CODE (t) == CASE_LABEL_EXPR)
4780 return true;
4781
4782 if (DECL_P (t))
4783 return true;
4784
4785 return false;
4786 }
4787
4788 /* Called via walk_tree. Verify tree sharing. */
4789
4790 static tree
4791 verify_node_sharing_1 (tree *tp, int *walk_subtrees, void *data)
4792 {
4793 hash_set<void *> *visited = (hash_set<void *> *) data;
4794
4795 if (tree_node_can_be_shared (*tp))
4796 {
4797 *walk_subtrees = false;
4798 return NULL;
4799 }
4800
4801 if (visited->add (*tp))
4802 return *tp;
4803
4804 return NULL;
4805 }
4806
4807 /* Called via walk_gimple_stmt. Verify tree sharing. */
4808
4809 static tree
4810 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
4811 {
4812 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4813 return verify_node_sharing_1 (tp, walk_subtrees, wi->info);
4814 }
4815
4816 static bool eh_error_found;
4817 bool
4818 verify_eh_throw_stmt_node (gimple *const &stmt, const int &,
4819 hash_set<gimple *> *visited)
4820 {
4821 if (!visited->contains (stmt))
4822 {
4823 error ("dead STMT in EH table");
4824 debug_gimple_stmt (stmt);
4825 eh_error_found = true;
4826 }
4827 return true;
4828 }
4829
4830 /* Verify if the location LOCs block is in BLOCKS. */
4831
4832 static bool
4833 verify_location (hash_set<tree> *blocks, location_t loc)
4834 {
4835 tree block = LOCATION_BLOCK (loc);
4836 if (block != NULL_TREE
4837 && !blocks->contains (block))
4838 {
4839 error ("location references block not in block tree");
4840 return true;
4841 }
4842 if (block != NULL_TREE)
4843 return verify_location (blocks, BLOCK_SOURCE_LOCATION (block));
4844 return false;
4845 }
4846
4847 /* Called via walk_tree. Verify that expressions have no blocks. */
4848
4849 static tree
4850 verify_expr_no_block (tree *tp, int *walk_subtrees, void *)
4851 {
4852 if (!EXPR_P (*tp))
4853 {
4854 *walk_subtrees = false;
4855 return NULL;
4856 }
4857
4858 location_t loc = EXPR_LOCATION (*tp);
4859 if (LOCATION_BLOCK (loc) != NULL)
4860 return *tp;
4861
4862 return NULL;
4863 }
4864
4865 /* Called via walk_tree. Verify locations of expressions. */
4866
4867 static tree
4868 verify_expr_location_1 (tree *tp, int *walk_subtrees, void *data)
4869 {
4870 hash_set<tree> *blocks = (hash_set<tree> *) data;
4871
4872 if (TREE_CODE (*tp) == VAR_DECL
4873 && DECL_HAS_DEBUG_EXPR_P (*tp))
4874 {
4875 tree t = DECL_DEBUG_EXPR (*tp);
4876 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
4877 if (addr)
4878 return addr;
4879 }
4880 if ((TREE_CODE (*tp) == VAR_DECL
4881 || TREE_CODE (*tp) == PARM_DECL
4882 || TREE_CODE (*tp) == RESULT_DECL)
4883 && DECL_HAS_VALUE_EXPR_P (*tp))
4884 {
4885 tree t = DECL_VALUE_EXPR (*tp);
4886 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
4887 if (addr)
4888 return addr;
4889 }
4890
4891 if (!EXPR_P (*tp))
4892 {
4893 *walk_subtrees = false;
4894 return NULL;
4895 }
4896
4897 location_t loc = EXPR_LOCATION (*tp);
4898 if (verify_location (blocks, loc))
4899 return *tp;
4900
4901 return NULL;
4902 }
4903
4904 /* Called via walk_gimple_op. Verify locations of expressions. */
4905
4906 static tree
4907 verify_expr_location (tree *tp, int *walk_subtrees, void *data)
4908 {
4909 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4910 return verify_expr_location_1 (tp, walk_subtrees, wi->info);
4911 }
4912
4913 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
4914
4915 static void
4916 collect_subblocks (hash_set<tree> *blocks, tree block)
4917 {
4918 tree t;
4919 for (t = BLOCK_SUBBLOCKS (block); t; t = BLOCK_CHAIN (t))
4920 {
4921 blocks->add (t);
4922 collect_subblocks (blocks, t);
4923 }
4924 }
4925
4926 /* Verify the GIMPLE statements in the CFG of FN. */
4927
4928 DEBUG_FUNCTION void
4929 verify_gimple_in_cfg (struct function *fn, bool verify_nothrow)
4930 {
4931 basic_block bb;
4932 bool err = false;
4933
4934 timevar_push (TV_TREE_STMT_VERIFY);
4935 hash_set<void *> visited;
4936 hash_set<gimple *> visited_stmts;
4937
4938 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
4939 hash_set<tree> blocks;
4940 if (DECL_INITIAL (fn->decl))
4941 {
4942 blocks.add (DECL_INITIAL (fn->decl));
4943 collect_subblocks (&blocks, DECL_INITIAL (fn->decl));
4944 }
4945
4946 FOR_EACH_BB_FN (bb, fn)
4947 {
4948 gimple_stmt_iterator gsi;
4949
4950 for (gphi_iterator gpi = gsi_start_phis (bb);
4951 !gsi_end_p (gpi);
4952 gsi_next (&gpi))
4953 {
4954 gphi *phi = gpi.phi ();
4955 bool err2 = false;
4956 unsigned i;
4957
4958 visited_stmts.add (phi);
4959
4960 if (gimple_bb (phi) != bb)
4961 {
4962 error ("gimple_bb (phi) is set to a wrong basic block");
4963 err2 = true;
4964 }
4965
4966 err2 |= verify_gimple_phi (phi);
4967
4968 /* Only PHI arguments have locations. */
4969 if (gimple_location (phi) != UNKNOWN_LOCATION)
4970 {
4971 error ("PHI node with location");
4972 err2 = true;
4973 }
4974
4975 for (i = 0; i < gimple_phi_num_args (phi); i++)
4976 {
4977 tree arg = gimple_phi_arg_def (phi, i);
4978 tree addr = walk_tree (&arg, verify_node_sharing_1,
4979 &visited, NULL);
4980 if (addr)
4981 {
4982 error ("incorrect sharing of tree nodes");
4983 debug_generic_expr (addr);
4984 err2 |= true;
4985 }
4986 location_t loc = gimple_phi_arg_location (phi, i);
4987 if (virtual_operand_p (gimple_phi_result (phi))
4988 && loc != UNKNOWN_LOCATION)
4989 {
4990 error ("virtual PHI with argument locations");
4991 err2 = true;
4992 }
4993 addr = walk_tree (&arg, verify_expr_location_1, &blocks, NULL);
4994 if (addr)
4995 {
4996 debug_generic_expr (addr);
4997 err2 = true;
4998 }
4999 err2 |= verify_location (&blocks, loc);
5000 }
5001
5002 if (err2)
5003 debug_gimple_stmt (phi);
5004 err |= err2;
5005 }
5006
5007 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5008 {
5009 gimple *stmt = gsi_stmt (gsi);
5010 bool err2 = false;
5011 struct walk_stmt_info wi;
5012 tree addr;
5013 int lp_nr;
5014
5015 visited_stmts.add (stmt);
5016
5017 if (gimple_bb (stmt) != bb)
5018 {
5019 error ("gimple_bb (stmt) is set to a wrong basic block");
5020 err2 = true;
5021 }
5022
5023 err2 |= verify_gimple_stmt (stmt);
5024 err2 |= verify_location (&blocks, gimple_location (stmt));
5025
5026 memset (&wi, 0, sizeof (wi));
5027 wi.info = (void *) &visited;
5028 addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
5029 if (addr)
5030 {
5031 error ("incorrect sharing of tree nodes");
5032 debug_generic_expr (addr);
5033 err2 |= true;
5034 }
5035
5036 memset (&wi, 0, sizeof (wi));
5037 wi.info = (void *) &blocks;
5038 addr = walk_gimple_op (stmt, verify_expr_location, &wi);
5039 if (addr)
5040 {
5041 debug_generic_expr (addr);
5042 err2 |= true;
5043 }
5044
5045 /* ??? Instead of not checking these stmts at all the walker
5046 should know its context via wi. */
5047 if (!is_gimple_debug (stmt)
5048 && !is_gimple_omp (stmt))
5049 {
5050 memset (&wi, 0, sizeof (wi));
5051 addr = walk_gimple_op (stmt, verify_expr, &wi);
5052 if (addr)
5053 {
5054 debug_generic_expr (addr);
5055 inform (gimple_location (stmt), "in statement");
5056 err2 |= true;
5057 }
5058 }
5059
5060 /* If the statement is marked as part of an EH region, then it is
5061 expected that the statement could throw. Verify that when we
5062 have optimizations that simplify statements such that we prove
5063 that they cannot throw, that we update other data structures
5064 to match. */
5065 lp_nr = lookup_stmt_eh_lp (stmt);
5066 if (lp_nr > 0)
5067 {
5068 if (!stmt_could_throw_p (stmt))
5069 {
5070 if (verify_nothrow)
5071 {
5072 error ("statement marked for throw, but doesn%'t");
5073 err2 |= true;
5074 }
5075 }
5076 else if (!gsi_one_before_end_p (gsi))
5077 {
5078 error ("statement marked for throw in middle of block");
5079 err2 |= true;
5080 }
5081 }
5082
5083 if (err2)
5084 debug_gimple_stmt (stmt);
5085 err |= err2;
5086 }
5087 }
5088
5089 eh_error_found = false;
5090 hash_map<gimple *, int> *eh_table = get_eh_throw_stmt_table (cfun);
5091 if (eh_table)
5092 eh_table->traverse<hash_set<gimple *> *, verify_eh_throw_stmt_node>
5093 (&visited_stmts);
5094
5095 if (err || eh_error_found)
5096 internal_error ("verify_gimple failed");
5097
5098 verify_histograms ();
5099 timevar_pop (TV_TREE_STMT_VERIFY);
5100 }
5101
5102
5103 /* Verifies that the flow information is OK. */
5104
5105 static int
5106 gimple_verify_flow_info (void)
5107 {
5108 int err = 0;
5109 basic_block bb;
5110 gimple_stmt_iterator gsi;
5111 gimple *stmt;
5112 edge e;
5113 edge_iterator ei;
5114
5115 if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5116 || ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5117 {
5118 error ("ENTRY_BLOCK has IL associated with it");
5119 err = 1;
5120 }
5121
5122 if (EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5123 || EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5124 {
5125 error ("EXIT_BLOCK has IL associated with it");
5126 err = 1;
5127 }
5128
5129 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5130 if (e->flags & EDGE_FALLTHRU)
5131 {
5132 error ("fallthru to exit from bb %d", e->src->index);
5133 err = 1;
5134 }
5135
5136 FOR_EACH_BB_FN (bb, cfun)
5137 {
5138 bool found_ctrl_stmt = false;
5139
5140 stmt = NULL;
5141
5142 /* Skip labels on the start of basic block. */
5143 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5144 {
5145 tree label;
5146 gimple *prev_stmt = stmt;
5147
5148 stmt = gsi_stmt (gsi);
5149
5150 if (gimple_code (stmt) != GIMPLE_LABEL)
5151 break;
5152
5153 label = gimple_label_label (as_a <glabel *> (stmt));
5154 if (prev_stmt && DECL_NONLOCAL (label))
5155 {
5156 error ("nonlocal label ");
5157 print_generic_expr (stderr, label, 0);
5158 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5159 bb->index);
5160 err = 1;
5161 }
5162
5163 if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
5164 {
5165 error ("EH landing pad label ");
5166 print_generic_expr (stderr, label, 0);
5167 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5168 bb->index);
5169 err = 1;
5170 }
5171
5172 if (label_to_block (label) != bb)
5173 {
5174 error ("label ");
5175 print_generic_expr (stderr, label, 0);
5176 fprintf (stderr, " to block does not match in bb %d",
5177 bb->index);
5178 err = 1;
5179 }
5180
5181 if (decl_function_context (label) != current_function_decl)
5182 {
5183 error ("label ");
5184 print_generic_expr (stderr, label, 0);
5185 fprintf (stderr, " has incorrect context in bb %d",
5186 bb->index);
5187 err = 1;
5188 }
5189 }
5190
5191 /* Verify that body of basic block BB is free of control flow. */
5192 for (; !gsi_end_p (gsi); gsi_next (&gsi))
5193 {
5194 gimple *stmt = gsi_stmt (gsi);
5195
5196 if (found_ctrl_stmt)
5197 {
5198 error ("control flow in the middle of basic block %d",
5199 bb->index);
5200 err = 1;
5201 }
5202
5203 if (stmt_ends_bb_p (stmt))
5204 found_ctrl_stmt = true;
5205
5206 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
5207 {
5208 error ("label ");
5209 print_generic_expr (stderr, gimple_label_label (label_stmt), 0);
5210 fprintf (stderr, " in the middle of basic block %d", bb->index);
5211 err = 1;
5212 }
5213 }
5214
5215 gsi = gsi_last_bb (bb);
5216 if (gsi_end_p (gsi))
5217 continue;
5218
5219 stmt = gsi_stmt (gsi);
5220
5221 if (gimple_code (stmt) == GIMPLE_LABEL)
5222 continue;
5223
5224 err |= verify_eh_edges (stmt);
5225
5226 if (is_ctrl_stmt (stmt))
5227 {
5228 FOR_EACH_EDGE (e, ei, bb->succs)
5229 if (e->flags & EDGE_FALLTHRU)
5230 {
5231 error ("fallthru edge after a control statement in bb %d",
5232 bb->index);
5233 err = 1;
5234 }
5235 }
5236
5237 if (gimple_code (stmt) != GIMPLE_COND)
5238 {
5239 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
5240 after anything else but if statement. */
5241 FOR_EACH_EDGE (e, ei, bb->succs)
5242 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
5243 {
5244 error ("true/false edge after a non-GIMPLE_COND in bb %d",
5245 bb->index);
5246 err = 1;
5247 }
5248 }
5249
5250 switch (gimple_code (stmt))
5251 {
5252 case GIMPLE_COND:
5253 {
5254 edge true_edge;
5255 edge false_edge;
5256
5257 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
5258
5259 if (!true_edge
5260 || !false_edge
5261 || !(true_edge->flags & EDGE_TRUE_VALUE)
5262 || !(false_edge->flags & EDGE_FALSE_VALUE)
5263 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5264 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5265 || EDGE_COUNT (bb->succs) >= 3)
5266 {
5267 error ("wrong outgoing edge flags at end of bb %d",
5268 bb->index);
5269 err = 1;
5270 }
5271 }
5272 break;
5273
5274 case GIMPLE_GOTO:
5275 if (simple_goto_p (stmt))
5276 {
5277 error ("explicit goto at end of bb %d", bb->index);
5278 err = 1;
5279 }
5280 else
5281 {
5282 /* FIXME. We should double check that the labels in the
5283 destination blocks have their address taken. */
5284 FOR_EACH_EDGE (e, ei, bb->succs)
5285 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
5286 | EDGE_FALSE_VALUE))
5287 || !(e->flags & EDGE_ABNORMAL))
5288 {
5289 error ("wrong outgoing edge flags at end of bb %d",
5290 bb->index);
5291 err = 1;
5292 }
5293 }
5294 break;
5295
5296 case GIMPLE_CALL:
5297 if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
5298 break;
5299 /* ... fallthru ... */
5300 case GIMPLE_RETURN:
5301 if (!single_succ_p (bb)
5302 || (single_succ_edge (bb)->flags
5303 & (EDGE_FALLTHRU | EDGE_ABNORMAL
5304 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5305 {
5306 error ("wrong outgoing edge flags at end of bb %d", bb->index);
5307 err = 1;
5308 }
5309 if (single_succ (bb) != EXIT_BLOCK_PTR_FOR_FN (cfun))
5310 {
5311 error ("return edge does not point to exit in bb %d",
5312 bb->index);
5313 err = 1;
5314 }
5315 break;
5316
5317 case GIMPLE_SWITCH:
5318 {
5319 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5320 tree prev;
5321 edge e;
5322 size_t i, n;
5323
5324 n = gimple_switch_num_labels (switch_stmt);
5325
5326 /* Mark all the destination basic blocks. */
5327 for (i = 0; i < n; ++i)
5328 {
5329 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
5330 basic_block label_bb = label_to_block (lab);
5331 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
5332 label_bb->aux = (void *)1;
5333 }
5334
5335 /* Verify that the case labels are sorted. */
5336 prev = gimple_switch_label (switch_stmt, 0);
5337 for (i = 1; i < n; ++i)
5338 {
5339 tree c = gimple_switch_label (switch_stmt, i);
5340 if (!CASE_LOW (c))
5341 {
5342 error ("found default case not at the start of "
5343 "case vector");
5344 err = 1;
5345 continue;
5346 }
5347 if (CASE_LOW (prev)
5348 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
5349 {
5350 error ("case labels not sorted: ");
5351 print_generic_expr (stderr, prev, 0);
5352 fprintf (stderr," is greater than ");
5353 print_generic_expr (stderr, c, 0);
5354 fprintf (stderr," but comes before it.\n");
5355 err = 1;
5356 }
5357 prev = c;
5358 }
5359 /* VRP will remove the default case if it can prove it will
5360 never be executed. So do not verify there always exists
5361 a default case here. */
5362
5363 FOR_EACH_EDGE (e, ei, bb->succs)
5364 {
5365 if (!e->dest->aux)
5366 {
5367 error ("extra outgoing edge %d->%d",
5368 bb->index, e->dest->index);
5369 err = 1;
5370 }
5371
5372 e->dest->aux = (void *)2;
5373 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
5374 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5375 {
5376 error ("wrong outgoing edge flags at end of bb %d",
5377 bb->index);
5378 err = 1;
5379 }
5380 }
5381
5382 /* Check that we have all of them. */
5383 for (i = 0; i < n; ++i)
5384 {
5385 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
5386 basic_block label_bb = label_to_block (lab);
5387
5388 if (label_bb->aux != (void *)2)
5389 {
5390 error ("missing edge %i->%i", bb->index, label_bb->index);
5391 err = 1;
5392 }
5393 }
5394
5395 FOR_EACH_EDGE (e, ei, bb->succs)
5396 e->dest->aux = (void *)0;
5397 }
5398 break;
5399
5400 case GIMPLE_EH_DISPATCH:
5401 err |= verify_eh_dispatch_edge (as_a <geh_dispatch *> (stmt));
5402 break;
5403
5404 default:
5405 break;
5406 }
5407 }
5408
5409 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
5410 verify_dominators (CDI_DOMINATORS);
5411
5412 return err;
5413 }
5414
5415
5416 /* Updates phi nodes after creating a forwarder block joined
5417 by edge FALLTHRU. */
5418
5419 static void
5420 gimple_make_forwarder_block (edge fallthru)
5421 {
5422 edge e;
5423 edge_iterator ei;
5424 basic_block dummy, bb;
5425 tree var;
5426 gphi_iterator gsi;
5427
5428 dummy = fallthru->src;
5429 bb = fallthru->dest;
5430
5431 if (single_pred_p (bb))
5432 return;
5433
5434 /* If we redirected a branch we must create new PHI nodes at the
5435 start of BB. */
5436 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
5437 {
5438 gphi *phi, *new_phi;
5439
5440 phi = gsi.phi ();
5441 var = gimple_phi_result (phi);
5442 new_phi = create_phi_node (var, bb);
5443 gimple_phi_set_result (phi, copy_ssa_name (var, phi));
5444 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
5445 UNKNOWN_LOCATION);
5446 }
5447
5448 /* Add the arguments we have stored on edges. */
5449 FOR_EACH_EDGE (e, ei, bb->preds)
5450 {
5451 if (e == fallthru)
5452 continue;
5453
5454 flush_pending_stmts (e);
5455 }
5456 }
5457
5458
5459 /* Return a non-special label in the head of basic block BLOCK.
5460 Create one if it doesn't exist. */
5461
5462 tree
5463 gimple_block_label (basic_block bb)
5464 {
5465 gimple_stmt_iterator i, s = gsi_start_bb (bb);
5466 bool first = true;
5467 tree label;
5468 glabel *stmt;
5469
5470 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
5471 {
5472 stmt = dyn_cast <glabel *> (gsi_stmt (i));
5473 if (!stmt)
5474 break;
5475 label = gimple_label_label (stmt);
5476 if (!DECL_NONLOCAL (label))
5477 {
5478 if (!first)
5479 gsi_move_before (&i, &s);
5480 return label;
5481 }
5482 }
5483
5484 label = create_artificial_label (UNKNOWN_LOCATION);
5485 stmt = gimple_build_label (label);
5486 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
5487 return label;
5488 }
5489
5490
5491 /* Attempt to perform edge redirection by replacing a possibly complex
5492 jump instruction by a goto or by removing the jump completely.
5493 This can apply only if all edges now point to the same block. The
5494 parameters and return values are equivalent to
5495 redirect_edge_and_branch. */
5496
5497 static edge
5498 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
5499 {
5500 basic_block src = e->src;
5501 gimple_stmt_iterator i;
5502 gimple *stmt;
5503
5504 /* We can replace or remove a complex jump only when we have exactly
5505 two edges. */
5506 if (EDGE_COUNT (src->succs) != 2
5507 /* Verify that all targets will be TARGET. Specifically, the
5508 edge that is not E must also go to TARGET. */
5509 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
5510 return NULL;
5511
5512 i = gsi_last_bb (src);
5513 if (gsi_end_p (i))
5514 return NULL;
5515
5516 stmt = gsi_stmt (i);
5517
5518 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
5519 {
5520 gsi_remove (&i, true);
5521 e = ssa_redirect_edge (e, target);
5522 e->flags = EDGE_FALLTHRU;
5523 return e;
5524 }
5525
5526 return NULL;
5527 }
5528
5529
5530 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
5531 edge representing the redirected branch. */
5532
5533 static edge
5534 gimple_redirect_edge_and_branch (edge e, basic_block dest)
5535 {
5536 basic_block bb = e->src;
5537 gimple_stmt_iterator gsi;
5538 edge ret;
5539 gimple *stmt;
5540
5541 if (e->flags & EDGE_ABNORMAL)
5542 return NULL;
5543
5544 if (e->dest == dest)
5545 return NULL;
5546
5547 if (e->flags & EDGE_EH)
5548 return redirect_eh_edge (e, dest);
5549
5550 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
5551 {
5552 ret = gimple_try_redirect_by_replacing_jump (e, dest);
5553 if (ret)
5554 return ret;
5555 }
5556
5557 gsi = gsi_last_bb (bb);
5558 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
5559
5560 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
5561 {
5562 case GIMPLE_COND:
5563 /* For COND_EXPR, we only need to redirect the edge. */
5564 break;
5565
5566 case GIMPLE_GOTO:
5567 /* No non-abnormal edges should lead from a non-simple goto, and
5568 simple ones should be represented implicitly. */
5569 gcc_unreachable ();
5570
5571 case GIMPLE_SWITCH:
5572 {
5573 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5574 tree label = gimple_block_label (dest);
5575 tree cases = get_cases_for_edge (e, switch_stmt);
5576
5577 /* If we have a list of cases associated with E, then use it
5578 as it's a lot faster than walking the entire case vector. */
5579 if (cases)
5580 {
5581 edge e2 = find_edge (e->src, dest);
5582 tree last, first;
5583
5584 first = cases;
5585 while (cases)
5586 {
5587 last = cases;
5588 CASE_LABEL (cases) = label;
5589 cases = CASE_CHAIN (cases);
5590 }
5591
5592 /* If there was already an edge in the CFG, then we need
5593 to move all the cases associated with E to E2. */
5594 if (e2)
5595 {
5596 tree cases2 = get_cases_for_edge (e2, switch_stmt);
5597
5598 CASE_CHAIN (last) = CASE_CHAIN (cases2);
5599 CASE_CHAIN (cases2) = first;
5600 }
5601 bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
5602 }
5603 else
5604 {
5605 size_t i, n = gimple_switch_num_labels (switch_stmt);
5606
5607 for (i = 0; i < n; i++)
5608 {
5609 tree elt = gimple_switch_label (switch_stmt, i);
5610 if (label_to_block (CASE_LABEL (elt)) == e->dest)
5611 CASE_LABEL (elt) = label;
5612 }
5613 }
5614 }
5615 break;
5616
5617 case GIMPLE_ASM:
5618 {
5619 gasm *asm_stmt = as_a <gasm *> (stmt);
5620 int i, n = gimple_asm_nlabels (asm_stmt);
5621 tree label = NULL;
5622
5623 for (i = 0; i < n; ++i)
5624 {
5625 tree cons = gimple_asm_label_op (asm_stmt, i);
5626 if (label_to_block (TREE_VALUE (cons)) == e->dest)
5627 {
5628 if (!label)
5629 label = gimple_block_label (dest);
5630 TREE_VALUE (cons) = label;
5631 }
5632 }
5633
5634 /* If we didn't find any label matching the former edge in the
5635 asm labels, we must be redirecting the fallthrough
5636 edge. */
5637 gcc_assert (label || (e->flags & EDGE_FALLTHRU));
5638 }
5639 break;
5640
5641 case GIMPLE_RETURN:
5642 gsi_remove (&gsi, true);
5643 e->flags |= EDGE_FALLTHRU;
5644 break;
5645
5646 case GIMPLE_OMP_RETURN:
5647 case GIMPLE_OMP_CONTINUE:
5648 case GIMPLE_OMP_SECTIONS_SWITCH:
5649 case GIMPLE_OMP_FOR:
5650 /* The edges from OMP constructs can be simply redirected. */
5651 break;
5652
5653 case GIMPLE_EH_DISPATCH:
5654 if (!(e->flags & EDGE_FALLTHRU))
5655 redirect_eh_dispatch_edge (as_a <geh_dispatch *> (stmt), e, dest);
5656 break;
5657
5658 case GIMPLE_TRANSACTION:
5659 /* The ABORT edge has a stored label associated with it, otherwise
5660 the edges are simply redirectable. */
5661 if (e->flags == 0)
5662 gimple_transaction_set_label (as_a <gtransaction *> (stmt),
5663 gimple_block_label (dest));
5664 break;
5665
5666 default:
5667 /* Otherwise it must be a fallthru edge, and we don't need to
5668 do anything besides redirecting it. */
5669 gcc_assert (e->flags & EDGE_FALLTHRU);
5670 break;
5671 }
5672
5673 /* Update/insert PHI nodes as necessary. */
5674
5675 /* Now update the edges in the CFG. */
5676 e = ssa_redirect_edge (e, dest);
5677
5678 return e;
5679 }
5680
5681 /* Returns true if it is possible to remove edge E by redirecting
5682 it to the destination of the other edge from E->src. */
5683
5684 static bool
5685 gimple_can_remove_branch_p (const_edge e)
5686 {
5687 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
5688 return false;
5689
5690 return true;
5691 }
5692
5693 /* Simple wrapper, as we can always redirect fallthru edges. */
5694
5695 static basic_block
5696 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
5697 {
5698 e = gimple_redirect_edge_and_branch (e, dest);
5699 gcc_assert (e);
5700
5701 return NULL;
5702 }
5703
5704
5705 /* Splits basic block BB after statement STMT (but at least after the
5706 labels). If STMT is NULL, BB is split just after the labels. */
5707
5708 static basic_block
5709 gimple_split_block (basic_block bb, void *stmt)
5710 {
5711 gimple_stmt_iterator gsi;
5712 gimple_stmt_iterator gsi_tgt;
5713 gimple_seq list;
5714 basic_block new_bb;
5715 edge e;
5716 edge_iterator ei;
5717
5718 new_bb = create_empty_bb (bb);
5719
5720 /* Redirect the outgoing edges. */
5721 new_bb->succs = bb->succs;
5722 bb->succs = NULL;
5723 FOR_EACH_EDGE (e, ei, new_bb->succs)
5724 e->src = new_bb;
5725
5726 /* Get a stmt iterator pointing to the first stmt to move. */
5727 if (!stmt || gimple_code ((gimple *) stmt) == GIMPLE_LABEL)
5728 gsi = gsi_after_labels (bb);
5729 else
5730 {
5731 gsi = gsi_for_stmt ((gimple *) stmt);
5732 gsi_next (&gsi);
5733 }
5734
5735 /* Move everything from GSI to the new basic block. */
5736 if (gsi_end_p (gsi))
5737 return new_bb;
5738
5739 /* Split the statement list - avoid re-creating new containers as this
5740 brings ugly quadratic memory consumption in the inliner.
5741 (We are still quadratic since we need to update stmt BB pointers,
5742 sadly.) */
5743 gsi_split_seq_before (&gsi, &list);
5744 set_bb_seq (new_bb, list);
5745 for (gsi_tgt = gsi_start (list);
5746 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
5747 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
5748
5749 return new_bb;
5750 }
5751
5752
5753 /* Moves basic block BB after block AFTER. */
5754
5755 static bool
5756 gimple_move_block_after (basic_block bb, basic_block after)
5757 {
5758 if (bb->prev_bb == after)
5759 return true;
5760
5761 unlink_block (bb);
5762 link_block (bb, after);
5763
5764 return true;
5765 }
5766
5767
5768 /* Return TRUE if block BB has no executable statements, otherwise return
5769 FALSE. */
5770
5771 static bool
5772 gimple_empty_block_p (basic_block bb)
5773 {
5774 /* BB must have no executable statements. */
5775 gimple_stmt_iterator gsi = gsi_after_labels (bb);
5776 if (phi_nodes (bb))
5777 return false;
5778 if (gsi_end_p (gsi))
5779 return true;
5780 if (is_gimple_debug (gsi_stmt (gsi)))
5781 gsi_next_nondebug (&gsi);
5782 return gsi_end_p (gsi);
5783 }
5784
5785
5786 /* Split a basic block if it ends with a conditional branch and if the
5787 other part of the block is not empty. */
5788
5789 static basic_block
5790 gimple_split_block_before_cond_jump (basic_block bb)
5791 {
5792 gimple *last, *split_point;
5793 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
5794 if (gsi_end_p (gsi))
5795 return NULL;
5796 last = gsi_stmt (gsi);
5797 if (gimple_code (last) != GIMPLE_COND
5798 && gimple_code (last) != GIMPLE_SWITCH)
5799 return NULL;
5800 gsi_prev_nondebug (&gsi);
5801 split_point = gsi_stmt (gsi);
5802 return split_block (bb, split_point)->dest;
5803 }
5804
5805
5806 /* Return true if basic_block can be duplicated. */
5807
5808 static bool
5809 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
5810 {
5811 return true;
5812 }
5813
5814 /* Create a duplicate of the basic block BB. NOTE: This does not
5815 preserve SSA form. */
5816
5817 static basic_block
5818 gimple_duplicate_bb (basic_block bb)
5819 {
5820 basic_block new_bb;
5821 gimple_stmt_iterator gsi_tgt;
5822
5823 new_bb = create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
5824
5825 /* Copy the PHI nodes. We ignore PHI node arguments here because
5826 the incoming edges have not been setup yet. */
5827 for (gphi_iterator gpi = gsi_start_phis (bb);
5828 !gsi_end_p (gpi);
5829 gsi_next (&gpi))
5830 {
5831 gphi *phi, *copy;
5832 phi = gpi.phi ();
5833 copy = create_phi_node (NULL_TREE, new_bb);
5834 create_new_def_for (gimple_phi_result (phi), copy,
5835 gimple_phi_result_ptr (copy));
5836 gimple_set_uid (copy, gimple_uid (phi));
5837 }
5838
5839 gsi_tgt = gsi_start_bb (new_bb);
5840 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
5841 !gsi_end_p (gsi);
5842 gsi_next (&gsi))
5843 {
5844 def_operand_p def_p;
5845 ssa_op_iter op_iter;
5846 tree lhs;
5847 gimple *stmt, *copy;
5848
5849 stmt = gsi_stmt (gsi);
5850 if (gimple_code (stmt) == GIMPLE_LABEL)
5851 continue;
5852
5853 /* Don't duplicate label debug stmts. */
5854 if (gimple_debug_bind_p (stmt)
5855 && TREE_CODE (gimple_debug_bind_get_var (stmt))
5856 == LABEL_DECL)
5857 continue;
5858
5859 /* Create a new copy of STMT and duplicate STMT's virtual
5860 operands. */
5861 copy = gimple_copy (stmt);
5862 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
5863
5864 maybe_duplicate_eh_stmt (copy, stmt);
5865 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
5866
5867 /* When copying around a stmt writing into a local non-user
5868 aggregate, make sure it won't share stack slot with other
5869 vars. */
5870 lhs = gimple_get_lhs (stmt);
5871 if (lhs && TREE_CODE (lhs) != SSA_NAME)
5872 {
5873 tree base = get_base_address (lhs);
5874 if (base
5875 && (TREE_CODE (base) == VAR_DECL
5876 || TREE_CODE (base) == RESULT_DECL)
5877 && DECL_IGNORED_P (base)
5878 && !TREE_STATIC (base)
5879 && !DECL_EXTERNAL (base)
5880 && (TREE_CODE (base) != VAR_DECL
5881 || !DECL_HAS_VALUE_EXPR_P (base)))
5882 DECL_NONSHAREABLE (base) = 1;
5883 }
5884
5885 /* Create new names for all the definitions created by COPY and
5886 add replacement mappings for each new name. */
5887 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
5888 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
5889 }
5890
5891 return new_bb;
5892 }
5893
5894 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
5895
5896 static void
5897 add_phi_args_after_copy_edge (edge e_copy)
5898 {
5899 basic_block bb, bb_copy = e_copy->src, dest;
5900 edge e;
5901 edge_iterator ei;
5902 gphi *phi, *phi_copy;
5903 tree def;
5904 gphi_iterator psi, psi_copy;
5905
5906 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
5907 return;
5908
5909 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
5910
5911 if (e_copy->dest->flags & BB_DUPLICATED)
5912 dest = get_bb_original (e_copy->dest);
5913 else
5914 dest = e_copy->dest;
5915
5916 e = find_edge (bb, dest);
5917 if (!e)
5918 {
5919 /* During loop unrolling the target of the latch edge is copied.
5920 In this case we are not looking for edge to dest, but to
5921 duplicated block whose original was dest. */
5922 FOR_EACH_EDGE (e, ei, bb->succs)
5923 {
5924 if ((e->dest->flags & BB_DUPLICATED)
5925 && get_bb_original (e->dest) == dest)
5926 break;
5927 }
5928
5929 gcc_assert (e != NULL);
5930 }
5931
5932 for (psi = gsi_start_phis (e->dest),
5933 psi_copy = gsi_start_phis (e_copy->dest);
5934 !gsi_end_p (psi);
5935 gsi_next (&psi), gsi_next (&psi_copy))
5936 {
5937 phi = psi.phi ();
5938 phi_copy = psi_copy.phi ();
5939 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
5940 add_phi_arg (phi_copy, def, e_copy,
5941 gimple_phi_arg_location_from_edge (phi, e));
5942 }
5943 }
5944
5945
5946 /* Basic block BB_COPY was created by code duplication. Add phi node
5947 arguments for edges going out of BB_COPY. The blocks that were
5948 duplicated have BB_DUPLICATED set. */
5949
5950 void
5951 add_phi_args_after_copy_bb (basic_block bb_copy)
5952 {
5953 edge e_copy;
5954 edge_iterator ei;
5955
5956 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
5957 {
5958 add_phi_args_after_copy_edge (e_copy);
5959 }
5960 }
5961
5962 /* Blocks in REGION_COPY array of length N_REGION were created by
5963 duplication of basic blocks. Add phi node arguments for edges
5964 going from these blocks. If E_COPY is not NULL, also add
5965 phi node arguments for its destination.*/
5966
5967 void
5968 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
5969 edge e_copy)
5970 {
5971 unsigned i;
5972
5973 for (i = 0; i < n_region; i++)
5974 region_copy[i]->flags |= BB_DUPLICATED;
5975
5976 for (i = 0; i < n_region; i++)
5977 add_phi_args_after_copy_bb (region_copy[i]);
5978 if (e_copy)
5979 add_phi_args_after_copy_edge (e_copy);
5980
5981 for (i = 0; i < n_region; i++)
5982 region_copy[i]->flags &= ~BB_DUPLICATED;
5983 }
5984
5985 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
5986 important exit edge EXIT. By important we mean that no SSA name defined
5987 inside region is live over the other exit edges of the region. All entry
5988 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
5989 to the duplicate of the region. Dominance and loop information is
5990 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
5991 UPDATE_DOMINANCE is false then we assume that the caller will update the
5992 dominance information after calling this function. The new basic
5993 blocks are stored to REGION_COPY in the same order as they had in REGION,
5994 provided that REGION_COPY is not NULL.
5995 The function returns false if it is unable to copy the region,
5996 true otherwise. */
5997
5998 bool
5999 gimple_duplicate_sese_region (edge entry, edge exit,
6000 basic_block *region, unsigned n_region,
6001 basic_block *region_copy,
6002 bool update_dominance)
6003 {
6004 unsigned i;
6005 bool free_region_copy = false, copying_header = false;
6006 struct loop *loop = entry->dest->loop_father;
6007 edge exit_copy;
6008 vec<basic_block> doms;
6009 edge redirected;
6010 int total_freq = 0, entry_freq = 0;
6011 gcov_type total_count = 0, entry_count = 0;
6012
6013 if (!can_copy_bbs_p (region, n_region))
6014 return false;
6015
6016 /* Some sanity checking. Note that we do not check for all possible
6017 missuses of the functions. I.e. if you ask to copy something weird,
6018 it will work, but the state of structures probably will not be
6019 correct. */
6020 for (i = 0; i < n_region; i++)
6021 {
6022 /* We do not handle subloops, i.e. all the blocks must belong to the
6023 same loop. */
6024 if (region[i]->loop_father != loop)
6025 return false;
6026
6027 if (region[i] != entry->dest
6028 && region[i] == loop->header)
6029 return false;
6030 }
6031
6032 /* In case the function is used for loop header copying (which is the primary
6033 use), ensure that EXIT and its copy will be new latch and entry edges. */
6034 if (loop->header == entry->dest)
6035 {
6036 copying_header = true;
6037
6038 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
6039 return false;
6040
6041 for (i = 0; i < n_region; i++)
6042 if (region[i] != exit->src
6043 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
6044 return false;
6045 }
6046
6047 initialize_original_copy_tables ();
6048
6049 if (copying_header)
6050 set_loop_copy (loop, loop_outer (loop));
6051 else
6052 set_loop_copy (loop, loop);
6053
6054 if (!region_copy)
6055 {
6056 region_copy = XNEWVEC (basic_block, n_region);
6057 free_region_copy = true;
6058 }
6059
6060 /* Record blocks outside the region that are dominated by something
6061 inside. */
6062 if (update_dominance)
6063 {
6064 doms.create (0);
6065 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6066 }
6067
6068 if (entry->dest->count)
6069 {
6070 total_count = entry->dest->count;
6071 entry_count = entry->count;
6072 /* Fix up corner cases, to avoid division by zero or creation of negative
6073 frequencies. */
6074 if (entry_count > total_count)
6075 entry_count = total_count;
6076 }
6077 else
6078 {
6079 total_freq = entry->dest->frequency;
6080 entry_freq = EDGE_FREQUENCY (entry);
6081 /* Fix up corner cases, to avoid division by zero or creation of negative
6082 frequencies. */
6083 if (total_freq == 0)
6084 total_freq = 1;
6085 else if (entry_freq > total_freq)
6086 entry_freq = total_freq;
6087 }
6088
6089 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
6090 split_edge_bb_loc (entry), update_dominance);
6091 if (total_count)
6092 {
6093 scale_bbs_frequencies_gcov_type (region, n_region,
6094 total_count - entry_count,
6095 total_count);
6096 scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
6097 total_count);
6098 }
6099 else
6100 {
6101 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
6102 total_freq);
6103 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
6104 }
6105
6106 if (copying_header)
6107 {
6108 loop->header = exit->dest;
6109 loop->latch = exit->src;
6110 }
6111
6112 /* Redirect the entry and add the phi node arguments. */
6113 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
6114 gcc_assert (redirected != NULL);
6115 flush_pending_stmts (entry);
6116
6117 /* Concerning updating of dominators: We must recount dominators
6118 for entry block and its copy. Anything that is outside of the
6119 region, but was dominated by something inside needs recounting as
6120 well. */
6121 if (update_dominance)
6122 {
6123 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
6124 doms.safe_push (get_bb_original (entry->dest));
6125 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6126 doms.release ();
6127 }
6128
6129 /* Add the other PHI node arguments. */
6130 add_phi_args_after_copy (region_copy, n_region, NULL);
6131
6132 if (free_region_copy)
6133 free (region_copy);
6134
6135 free_original_copy_tables ();
6136 return true;
6137 }
6138
6139 /* Checks if BB is part of the region defined by N_REGION BBS. */
6140 static bool
6141 bb_part_of_region_p (basic_block bb, basic_block* bbs, unsigned n_region)
6142 {
6143 unsigned int n;
6144
6145 for (n = 0; n < n_region; n++)
6146 {
6147 if (bb == bbs[n])
6148 return true;
6149 }
6150 return false;
6151 }
6152
6153 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
6154 are stored to REGION_COPY in the same order in that they appear
6155 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
6156 the region, EXIT an exit from it. The condition guarding EXIT
6157 is moved to ENTRY. Returns true if duplication succeeds, false
6158 otherwise.
6159
6160 For example,
6161
6162 some_code;
6163 if (cond)
6164 A;
6165 else
6166 B;
6167
6168 is transformed to
6169
6170 if (cond)
6171 {
6172 some_code;
6173 A;
6174 }
6175 else
6176 {
6177 some_code;
6178 B;
6179 }
6180 */
6181
6182 bool
6183 gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED, edge exit ATTRIBUTE_UNUSED,
6184 basic_block *region ATTRIBUTE_UNUSED, unsigned n_region ATTRIBUTE_UNUSED,
6185 basic_block *region_copy ATTRIBUTE_UNUSED)
6186 {
6187 unsigned i;
6188 bool free_region_copy = false;
6189 struct loop *loop = exit->dest->loop_father;
6190 struct loop *orig_loop = entry->dest->loop_father;
6191 basic_block switch_bb, entry_bb, nentry_bb;
6192 vec<basic_block> doms;
6193 int total_freq = 0, exit_freq = 0;
6194 gcov_type total_count = 0, exit_count = 0;
6195 edge exits[2], nexits[2], e;
6196 gimple_stmt_iterator gsi;
6197 gimple *cond_stmt;
6198 edge sorig, snew;
6199 basic_block exit_bb;
6200 gphi_iterator psi;
6201 gphi *phi;
6202 tree def;
6203 struct loop *target, *aloop, *cloop;
6204
6205 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
6206 exits[0] = exit;
6207 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
6208
6209 if (!can_copy_bbs_p (region, n_region))
6210 return false;
6211
6212 initialize_original_copy_tables ();
6213 set_loop_copy (orig_loop, loop);
6214
6215 target= loop;
6216 for (aloop = orig_loop->inner; aloop; aloop = aloop->next)
6217 {
6218 if (bb_part_of_region_p (aloop->header, region, n_region))
6219 {
6220 cloop = duplicate_loop (aloop, target);
6221 duplicate_subloops (aloop, cloop);
6222 }
6223 }
6224
6225 if (!region_copy)
6226 {
6227 region_copy = XNEWVEC (basic_block, n_region);
6228 free_region_copy = true;
6229 }
6230
6231 gcc_assert (!need_ssa_update_p (cfun));
6232
6233 /* Record blocks outside the region that are dominated by something
6234 inside. */
6235 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6236
6237 if (exit->src->count)
6238 {
6239 total_count = exit->src->count;
6240 exit_count = exit->count;
6241 /* Fix up corner cases, to avoid division by zero or creation of negative
6242 frequencies. */
6243 if (exit_count > total_count)
6244 exit_count = total_count;
6245 }
6246 else
6247 {
6248 total_freq = exit->src->frequency;
6249 exit_freq = EDGE_FREQUENCY (exit);
6250 /* Fix up corner cases, to avoid division by zero or creation of negative
6251 frequencies. */
6252 if (total_freq == 0)
6253 total_freq = 1;
6254 if (exit_freq > total_freq)
6255 exit_freq = total_freq;
6256 }
6257
6258 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
6259 split_edge_bb_loc (exit), true);
6260 if (total_count)
6261 {
6262 scale_bbs_frequencies_gcov_type (region, n_region,
6263 total_count - exit_count,
6264 total_count);
6265 scale_bbs_frequencies_gcov_type (region_copy, n_region, exit_count,
6266 total_count);
6267 }
6268 else
6269 {
6270 scale_bbs_frequencies_int (region, n_region, total_freq - exit_freq,
6271 total_freq);
6272 scale_bbs_frequencies_int (region_copy, n_region, exit_freq, total_freq);
6273 }
6274
6275 /* Create the switch block, and put the exit condition to it. */
6276 entry_bb = entry->dest;
6277 nentry_bb = get_bb_copy (entry_bb);
6278 if (!last_stmt (entry->src)
6279 || !stmt_ends_bb_p (last_stmt (entry->src)))
6280 switch_bb = entry->src;
6281 else
6282 switch_bb = split_edge (entry);
6283 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
6284
6285 gsi = gsi_last_bb (switch_bb);
6286 cond_stmt = last_stmt (exit->src);
6287 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
6288 cond_stmt = gimple_copy (cond_stmt);
6289
6290 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
6291
6292 sorig = single_succ_edge (switch_bb);
6293 sorig->flags = exits[1]->flags;
6294 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
6295
6296 /* Register the new edge from SWITCH_BB in loop exit lists. */
6297 rescan_loop_exit (snew, true, false);
6298
6299 /* Add the PHI node arguments. */
6300 add_phi_args_after_copy (region_copy, n_region, snew);
6301
6302 /* Get rid of now superfluous conditions and associated edges (and phi node
6303 arguments). */
6304 exit_bb = exit->dest;
6305
6306 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
6307 PENDING_STMT (e) = NULL;
6308
6309 /* The latch of ORIG_LOOP was copied, and so was the backedge
6310 to the original header. We redirect this backedge to EXIT_BB. */
6311 for (i = 0; i < n_region; i++)
6312 if (get_bb_original (region_copy[i]) == orig_loop->latch)
6313 {
6314 gcc_assert (single_succ_edge (region_copy[i]));
6315 e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
6316 PENDING_STMT (e) = NULL;
6317 for (psi = gsi_start_phis (exit_bb);
6318 !gsi_end_p (psi);
6319 gsi_next (&psi))
6320 {
6321 phi = psi.phi ();
6322 def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
6323 add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
6324 }
6325 }
6326 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
6327 PENDING_STMT (e) = NULL;
6328
6329 /* Anything that is outside of the region, but was dominated by something
6330 inside needs to update dominance info. */
6331 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6332 doms.release ();
6333 /* Update the SSA web. */
6334 update_ssa (TODO_update_ssa);
6335
6336 if (free_region_copy)
6337 free (region_copy);
6338
6339 free_original_copy_tables ();
6340 return true;
6341 }
6342
6343 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
6344 adding blocks when the dominator traversal reaches EXIT. This
6345 function silently assumes that ENTRY strictly dominates EXIT. */
6346
6347 void
6348 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
6349 vec<basic_block> *bbs_p)
6350 {
6351 basic_block son;
6352
6353 for (son = first_dom_son (CDI_DOMINATORS, entry);
6354 son;
6355 son = next_dom_son (CDI_DOMINATORS, son))
6356 {
6357 bbs_p->safe_push (son);
6358 if (son != exit)
6359 gather_blocks_in_sese_region (son, exit, bbs_p);
6360 }
6361 }
6362
6363 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
6364 The duplicates are recorded in VARS_MAP. */
6365
6366 static void
6367 replace_by_duplicate_decl (tree *tp, hash_map<tree, tree> *vars_map,
6368 tree to_context)
6369 {
6370 tree t = *tp, new_t;
6371 struct function *f = DECL_STRUCT_FUNCTION (to_context);
6372
6373 if (DECL_CONTEXT (t) == to_context)
6374 return;
6375
6376 bool existed;
6377 tree &loc = vars_map->get_or_insert (t, &existed);
6378
6379 if (!existed)
6380 {
6381 if (SSA_VAR_P (t))
6382 {
6383 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
6384 add_local_decl (f, new_t);
6385 }
6386 else
6387 {
6388 gcc_assert (TREE_CODE (t) == CONST_DECL);
6389 new_t = copy_node (t);
6390 }
6391 DECL_CONTEXT (new_t) = to_context;
6392
6393 loc = new_t;
6394 }
6395 else
6396 new_t = loc;
6397
6398 *tp = new_t;
6399 }
6400
6401
6402 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6403 VARS_MAP maps old ssa names and var_decls to the new ones. */
6404
6405 static tree
6406 replace_ssa_name (tree name, hash_map<tree, tree> *vars_map,
6407 tree to_context)
6408 {
6409 tree new_name;
6410
6411 gcc_assert (!virtual_operand_p (name));
6412
6413 tree *loc = vars_map->get (name);
6414
6415 if (!loc)
6416 {
6417 tree decl = SSA_NAME_VAR (name);
6418 if (decl)
6419 {
6420 gcc_assert (!SSA_NAME_IS_DEFAULT_DEF (name));
6421 replace_by_duplicate_decl (&decl, vars_map, to_context);
6422 new_name = make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6423 decl, SSA_NAME_DEF_STMT (name));
6424 }
6425 else
6426 new_name = copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6427 name, SSA_NAME_DEF_STMT (name));
6428
6429 /* Now that we've used the def stmt to define new_name, make sure it
6430 doesn't define name anymore. */
6431 SSA_NAME_DEF_STMT (name) = NULL;
6432
6433 vars_map->put (name, new_name);
6434 }
6435 else
6436 new_name = *loc;
6437
6438 return new_name;
6439 }
6440
6441 struct move_stmt_d
6442 {
6443 tree orig_block;
6444 tree new_block;
6445 tree from_context;
6446 tree to_context;
6447 hash_map<tree, tree> *vars_map;
6448 htab_t new_label_map;
6449 hash_map<void *, void *> *eh_map;
6450 bool remap_decls_p;
6451 };
6452
6453 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
6454 contained in *TP if it has been ORIG_BLOCK previously and change the
6455 DECL_CONTEXT of every local variable referenced in *TP. */
6456
6457 static tree
6458 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
6459 {
6460 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
6461 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6462 tree t = *tp;
6463
6464 if (EXPR_P (t))
6465 {
6466 tree block = TREE_BLOCK (t);
6467 if (block == p->orig_block
6468 || (p->orig_block == NULL_TREE
6469 && block != NULL_TREE))
6470 TREE_SET_BLOCK (t, p->new_block);
6471 else if (flag_checking && block != NULL_TREE)
6472 {
6473 while (block && TREE_CODE (block) == BLOCK && block != p->orig_block)
6474 block = BLOCK_SUPERCONTEXT (block);
6475 gcc_assert (block == p->orig_block);
6476 }
6477 }
6478 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
6479 {
6480 if (TREE_CODE (t) == SSA_NAME)
6481 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
6482 else if (TREE_CODE (t) == PARM_DECL
6483 && gimple_in_ssa_p (cfun))
6484 *tp = *(p->vars_map->get (t));
6485 else if (TREE_CODE (t) == LABEL_DECL)
6486 {
6487 if (p->new_label_map)
6488 {
6489 struct tree_map in, *out;
6490 in.base.from = t;
6491 out = (struct tree_map *)
6492 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
6493 if (out)
6494 *tp = t = out->to;
6495 }
6496
6497 DECL_CONTEXT (t) = p->to_context;
6498 }
6499 else if (p->remap_decls_p)
6500 {
6501 /* Replace T with its duplicate. T should no longer appear in the
6502 parent function, so this looks wasteful; however, it may appear
6503 in referenced_vars, and more importantly, as virtual operands of
6504 statements, and in alias lists of other variables. It would be
6505 quite difficult to expunge it from all those places. ??? It might
6506 suffice to do this for addressable variables. */
6507 if ((TREE_CODE (t) == VAR_DECL
6508 && !is_global_var (t))
6509 || TREE_CODE (t) == CONST_DECL)
6510 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
6511 }
6512 *walk_subtrees = 0;
6513 }
6514 else if (TYPE_P (t))
6515 *walk_subtrees = 0;
6516
6517 return NULL_TREE;
6518 }
6519
6520 /* Helper for move_stmt_r. Given an EH region number for the source
6521 function, map that to the duplicate EH regio number in the dest. */
6522
6523 static int
6524 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
6525 {
6526 eh_region old_r, new_r;
6527
6528 old_r = get_eh_region_from_number (old_nr);
6529 new_r = static_cast<eh_region> (*p->eh_map->get (old_r));
6530
6531 return new_r->index;
6532 }
6533
6534 /* Similar, but operate on INTEGER_CSTs. */
6535
6536 static tree
6537 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
6538 {
6539 int old_nr, new_nr;
6540
6541 old_nr = tree_to_shwi (old_t_nr);
6542 new_nr = move_stmt_eh_region_nr (old_nr, p);
6543
6544 return build_int_cst (integer_type_node, new_nr);
6545 }
6546
6547 /* Like move_stmt_op, but for gimple statements.
6548
6549 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
6550 contained in the current statement in *GSI_P and change the
6551 DECL_CONTEXT of every local variable referenced in the current
6552 statement. */
6553
6554 static tree
6555 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
6556 struct walk_stmt_info *wi)
6557 {
6558 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6559 gimple *stmt = gsi_stmt (*gsi_p);
6560 tree block = gimple_block (stmt);
6561
6562 if (block == p->orig_block
6563 || (p->orig_block == NULL_TREE
6564 && block != NULL_TREE))
6565 gimple_set_block (stmt, p->new_block);
6566
6567 switch (gimple_code (stmt))
6568 {
6569 case GIMPLE_CALL:
6570 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
6571 {
6572 tree r, fndecl = gimple_call_fndecl (stmt);
6573 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
6574 switch (DECL_FUNCTION_CODE (fndecl))
6575 {
6576 case BUILT_IN_EH_COPY_VALUES:
6577 r = gimple_call_arg (stmt, 1);
6578 r = move_stmt_eh_region_tree_nr (r, p);
6579 gimple_call_set_arg (stmt, 1, r);
6580 /* FALLTHRU */
6581
6582 case BUILT_IN_EH_POINTER:
6583 case BUILT_IN_EH_FILTER:
6584 r = gimple_call_arg (stmt, 0);
6585 r = move_stmt_eh_region_tree_nr (r, p);
6586 gimple_call_set_arg (stmt, 0, r);
6587 break;
6588
6589 default:
6590 break;
6591 }
6592 }
6593 break;
6594
6595 case GIMPLE_RESX:
6596 {
6597 gresx *resx_stmt = as_a <gresx *> (stmt);
6598 int r = gimple_resx_region (resx_stmt);
6599 r = move_stmt_eh_region_nr (r, p);
6600 gimple_resx_set_region (resx_stmt, r);
6601 }
6602 break;
6603
6604 case GIMPLE_EH_DISPATCH:
6605 {
6606 geh_dispatch *eh_dispatch_stmt = as_a <geh_dispatch *> (stmt);
6607 int r = gimple_eh_dispatch_region (eh_dispatch_stmt);
6608 r = move_stmt_eh_region_nr (r, p);
6609 gimple_eh_dispatch_set_region (eh_dispatch_stmt, r);
6610 }
6611 break;
6612
6613 case GIMPLE_OMP_RETURN:
6614 case GIMPLE_OMP_CONTINUE:
6615 break;
6616 default:
6617 if (is_gimple_omp (stmt))
6618 {
6619 /* Do not remap variables inside OMP directives. Variables
6620 referenced in clauses and directive header belong to the
6621 parent function and should not be moved into the child
6622 function. */
6623 bool save_remap_decls_p = p->remap_decls_p;
6624 p->remap_decls_p = false;
6625 *handled_ops_p = true;
6626
6627 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), move_stmt_r,
6628 move_stmt_op, wi);
6629
6630 p->remap_decls_p = save_remap_decls_p;
6631 }
6632 break;
6633 }
6634
6635 return NULL_TREE;
6636 }
6637
6638 /* Move basic block BB from function CFUN to function DEST_FN. The
6639 block is moved out of the original linked list and placed after
6640 block AFTER in the new list. Also, the block is removed from the
6641 original array of blocks and placed in DEST_FN's array of blocks.
6642 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
6643 updated to reflect the moved edges.
6644
6645 The local variables are remapped to new instances, VARS_MAP is used
6646 to record the mapping. */
6647
6648 static void
6649 move_block_to_fn (struct function *dest_cfun, basic_block bb,
6650 basic_block after, bool update_edge_count_p,
6651 struct move_stmt_d *d)
6652 {
6653 struct control_flow_graph *cfg;
6654 edge_iterator ei;
6655 edge e;
6656 gimple_stmt_iterator si;
6657 unsigned old_len, new_len;
6658
6659 /* Remove BB from dominance structures. */
6660 delete_from_dominance_info (CDI_DOMINATORS, bb);
6661
6662 /* Move BB from its current loop to the copy in the new function. */
6663 if (current_loops)
6664 {
6665 struct loop *new_loop = (struct loop *)bb->loop_father->aux;
6666 if (new_loop)
6667 bb->loop_father = new_loop;
6668 }
6669
6670 /* Link BB to the new linked list. */
6671 move_block_after (bb, after);
6672
6673 /* Update the edge count in the corresponding flowgraphs. */
6674 if (update_edge_count_p)
6675 FOR_EACH_EDGE (e, ei, bb->succs)
6676 {
6677 cfun->cfg->x_n_edges--;
6678 dest_cfun->cfg->x_n_edges++;
6679 }
6680
6681 /* Remove BB from the original basic block array. */
6682 (*cfun->cfg->x_basic_block_info)[bb->index] = NULL;
6683 cfun->cfg->x_n_basic_blocks--;
6684
6685 /* Grow DEST_CFUN's basic block array if needed. */
6686 cfg = dest_cfun->cfg;
6687 cfg->x_n_basic_blocks++;
6688 if (bb->index >= cfg->x_last_basic_block)
6689 cfg->x_last_basic_block = bb->index + 1;
6690
6691 old_len = vec_safe_length (cfg->x_basic_block_info);
6692 if ((unsigned) cfg->x_last_basic_block >= old_len)
6693 {
6694 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
6695 vec_safe_grow_cleared (cfg->x_basic_block_info, new_len);
6696 }
6697
6698 (*cfg->x_basic_block_info)[bb->index] = bb;
6699
6700 /* Remap the variables in phi nodes. */
6701 for (gphi_iterator psi = gsi_start_phis (bb);
6702 !gsi_end_p (psi); )
6703 {
6704 gphi *phi = psi.phi ();
6705 use_operand_p use;
6706 tree op = PHI_RESULT (phi);
6707 ssa_op_iter oi;
6708 unsigned i;
6709
6710 if (virtual_operand_p (op))
6711 {
6712 /* Remove the phi nodes for virtual operands (alias analysis will be
6713 run for the new function, anyway). */
6714 remove_phi_node (&psi, true);
6715 continue;
6716 }
6717
6718 SET_PHI_RESULT (phi,
6719 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6720 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
6721 {
6722 op = USE_FROM_PTR (use);
6723 if (TREE_CODE (op) == SSA_NAME)
6724 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6725 }
6726
6727 for (i = 0; i < EDGE_COUNT (bb->preds); i++)
6728 {
6729 location_t locus = gimple_phi_arg_location (phi, i);
6730 tree block = LOCATION_BLOCK (locus);
6731
6732 if (locus == UNKNOWN_LOCATION)
6733 continue;
6734 if (d->orig_block == NULL_TREE || block == d->orig_block)
6735 {
6736 if (d->new_block == NULL_TREE)
6737 locus = LOCATION_LOCUS (locus);
6738 else
6739 locus = COMBINE_LOCATION_DATA (line_table, locus, d->new_block);
6740 gimple_phi_arg_set_location (phi, i, locus);
6741 }
6742 }
6743
6744 gsi_next (&psi);
6745 }
6746
6747 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6748 {
6749 gimple *stmt = gsi_stmt (si);
6750 struct walk_stmt_info wi;
6751
6752 memset (&wi, 0, sizeof (wi));
6753 wi.info = d;
6754 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
6755
6756 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
6757 {
6758 tree label = gimple_label_label (label_stmt);
6759 int uid = LABEL_DECL_UID (label);
6760
6761 gcc_assert (uid > -1);
6762
6763 old_len = vec_safe_length (cfg->x_label_to_block_map);
6764 if (old_len <= (unsigned) uid)
6765 {
6766 new_len = 3 * uid / 2 + 1;
6767 vec_safe_grow_cleared (cfg->x_label_to_block_map, new_len);
6768 }
6769
6770 (*cfg->x_label_to_block_map)[uid] = bb;
6771 (*cfun->cfg->x_label_to_block_map)[uid] = NULL;
6772
6773 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
6774
6775 if (uid >= dest_cfun->cfg->last_label_uid)
6776 dest_cfun->cfg->last_label_uid = uid + 1;
6777 }
6778
6779 maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
6780 remove_stmt_from_eh_lp_fn (cfun, stmt);
6781
6782 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
6783 gimple_remove_stmt_histograms (cfun, stmt);
6784
6785 /* We cannot leave any operands allocated from the operand caches of
6786 the current function. */
6787 free_stmt_operands (cfun, stmt);
6788 push_cfun (dest_cfun);
6789 update_stmt (stmt);
6790 pop_cfun ();
6791 }
6792
6793 FOR_EACH_EDGE (e, ei, bb->succs)
6794 if (e->goto_locus != UNKNOWN_LOCATION)
6795 {
6796 tree block = LOCATION_BLOCK (e->goto_locus);
6797 if (d->orig_block == NULL_TREE
6798 || block == d->orig_block)
6799 e->goto_locus = d->new_block ?
6800 COMBINE_LOCATION_DATA (line_table, e->goto_locus, d->new_block) :
6801 LOCATION_LOCUS (e->goto_locus);
6802 }
6803 }
6804
6805 /* Examine the statements in BB (which is in SRC_CFUN); find and return
6806 the outermost EH region. Use REGION as the incoming base EH region. */
6807
6808 static eh_region
6809 find_outermost_region_in_block (struct function *src_cfun,
6810 basic_block bb, eh_region region)
6811 {
6812 gimple_stmt_iterator si;
6813
6814 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6815 {
6816 gimple *stmt = gsi_stmt (si);
6817 eh_region stmt_region;
6818 int lp_nr;
6819
6820 lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
6821 stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
6822 if (stmt_region)
6823 {
6824 if (region == NULL)
6825 region = stmt_region;
6826 else if (stmt_region != region)
6827 {
6828 region = eh_region_outermost (src_cfun, stmt_region, region);
6829 gcc_assert (region != NULL);
6830 }
6831 }
6832 }
6833
6834 return region;
6835 }
6836
6837 static tree
6838 new_label_mapper (tree decl, void *data)
6839 {
6840 htab_t hash = (htab_t) data;
6841 struct tree_map *m;
6842 void **slot;
6843
6844 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
6845
6846 m = XNEW (struct tree_map);
6847 m->hash = DECL_UID (decl);
6848 m->base.from = decl;
6849 m->to = create_artificial_label (UNKNOWN_LOCATION);
6850 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
6851 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
6852 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
6853
6854 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
6855 gcc_assert (*slot == NULL);
6856
6857 *slot = m;
6858
6859 return m->to;
6860 }
6861
6862 /* Tree walker to replace the decls used inside value expressions by
6863 duplicates. */
6864
6865 static tree
6866 replace_block_vars_by_duplicates_1 (tree *tp, int *walk_subtrees, void *data)
6867 {
6868 struct replace_decls_d *rd = (struct replace_decls_d *)data;
6869
6870 switch (TREE_CODE (*tp))
6871 {
6872 case VAR_DECL:
6873 case PARM_DECL:
6874 case RESULT_DECL:
6875 replace_by_duplicate_decl (tp, rd->vars_map, rd->to_context);
6876 break;
6877 default:
6878 break;
6879 }
6880
6881 if (IS_TYPE_OR_DECL_P (*tp))
6882 *walk_subtrees = false;
6883
6884 return NULL;
6885 }
6886
6887 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
6888 subblocks. */
6889
6890 static void
6891 replace_block_vars_by_duplicates (tree block, hash_map<tree, tree> *vars_map,
6892 tree to_context)
6893 {
6894 tree *tp, t;
6895
6896 for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
6897 {
6898 t = *tp;
6899 if (TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != CONST_DECL)
6900 continue;
6901 replace_by_duplicate_decl (&t, vars_map, to_context);
6902 if (t != *tp)
6903 {
6904 if (TREE_CODE (*tp) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (*tp))
6905 {
6906 tree x = DECL_VALUE_EXPR (*tp);
6907 struct replace_decls_d rd = { vars_map, to_context };
6908 unshare_expr (x);
6909 walk_tree (&x, replace_block_vars_by_duplicates_1, &rd, NULL);
6910 SET_DECL_VALUE_EXPR (t, x);
6911 DECL_HAS_VALUE_EXPR_P (t) = 1;
6912 }
6913 DECL_CHAIN (t) = DECL_CHAIN (*tp);
6914 *tp = t;
6915 }
6916 }
6917
6918 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
6919 replace_block_vars_by_duplicates (block, vars_map, to_context);
6920 }
6921
6922 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
6923 from FN1 to FN2. */
6924
6925 static void
6926 fixup_loop_arrays_after_move (struct function *fn1, struct function *fn2,
6927 struct loop *loop)
6928 {
6929 /* Discard it from the old loop array. */
6930 (*get_loops (fn1))[loop->num] = NULL;
6931
6932 /* Place it in the new loop array, assigning it a new number. */
6933 loop->num = number_of_loops (fn2);
6934 vec_safe_push (loops_for_fn (fn2)->larray, loop);
6935
6936 /* Recurse to children. */
6937 for (loop = loop->inner; loop; loop = loop->next)
6938 fixup_loop_arrays_after_move (fn1, fn2, loop);
6939 }
6940
6941 /* Verify that the blocks in BBS_P are a single-entry, single-exit region
6942 delimited by ENTRY_BB and EXIT_BB, possibly containing noreturn blocks. */
6943
6944 DEBUG_FUNCTION void
6945 verify_sese (basic_block entry, basic_block exit, vec<basic_block> *bbs_p)
6946 {
6947 basic_block bb;
6948 edge_iterator ei;
6949 edge e;
6950 bitmap bbs = BITMAP_ALLOC (NULL);
6951 int i;
6952
6953 gcc_assert (entry != NULL);
6954 gcc_assert (entry != exit);
6955 gcc_assert (bbs_p != NULL);
6956
6957 gcc_assert (bbs_p->length () > 0);
6958
6959 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
6960 bitmap_set_bit (bbs, bb->index);
6961
6962 gcc_assert (bitmap_bit_p (bbs, entry->index));
6963 gcc_assert (exit == NULL || bitmap_bit_p (bbs, exit->index));
6964
6965 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
6966 {
6967 if (bb == entry)
6968 {
6969 gcc_assert (single_pred_p (entry));
6970 gcc_assert (!bitmap_bit_p (bbs, single_pred (entry)->index));
6971 }
6972 else
6973 for (ei = ei_start (bb->preds); !ei_end_p (ei); ei_next (&ei))
6974 {
6975 e = ei_edge (ei);
6976 gcc_assert (bitmap_bit_p (bbs, e->src->index));
6977 }
6978
6979 if (bb == exit)
6980 {
6981 gcc_assert (single_succ_p (exit));
6982 gcc_assert (!bitmap_bit_p (bbs, single_succ (exit)->index));
6983 }
6984 else
6985 for (ei = ei_start (bb->succs); !ei_end_p (ei); ei_next (&ei))
6986 {
6987 e = ei_edge (ei);
6988 gcc_assert (bitmap_bit_p (bbs, e->dest->index));
6989 }
6990 }
6991
6992 BITMAP_FREE (bbs);
6993 }
6994
6995 /* If FROM is an SSA_NAME, mark the version in bitmap DATA. */
6996
6997 bool
6998 gather_ssa_name_hash_map_from (tree const &from, tree const &, void *data)
6999 {
7000 bitmap release_names = (bitmap)data;
7001
7002 if (TREE_CODE (from) != SSA_NAME)
7003 return true;
7004
7005 bitmap_set_bit (release_names, SSA_NAME_VERSION (from));
7006 return true;
7007 }
7008
7009 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
7010 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
7011 single basic block in the original CFG and the new basic block is
7012 returned. DEST_CFUN must not have a CFG yet.
7013
7014 Note that the region need not be a pure SESE region. Blocks inside
7015 the region may contain calls to abort/exit. The only restriction
7016 is that ENTRY_BB should be the only entry point and it must
7017 dominate EXIT_BB.
7018
7019 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
7020 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
7021 to the new function.
7022
7023 All local variables referenced in the region are assumed to be in
7024 the corresponding BLOCK_VARS and unexpanded variable lists
7025 associated with DEST_CFUN.
7026
7027 TODO: investigate whether we can reuse gimple_duplicate_sese_region to
7028 reimplement move_sese_region_to_fn by duplicating the region rather than
7029 moving it. */
7030
7031 basic_block
7032 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
7033 basic_block exit_bb, tree orig_block)
7034 {
7035 vec<basic_block> bbs, dom_bbs;
7036 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
7037 basic_block after, bb, *entry_pred, *exit_succ, abb;
7038 struct function *saved_cfun = cfun;
7039 int *entry_flag, *exit_flag;
7040 unsigned *entry_prob, *exit_prob;
7041 unsigned i, num_entry_edges, num_exit_edges, num_nodes;
7042 edge e;
7043 edge_iterator ei;
7044 htab_t new_label_map;
7045 hash_map<void *, void *> *eh_map;
7046 struct loop *loop = entry_bb->loop_father;
7047 struct loop *loop0 = get_loop (saved_cfun, 0);
7048 struct move_stmt_d d;
7049
7050 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
7051 region. */
7052 gcc_assert (entry_bb != exit_bb
7053 && (!exit_bb
7054 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
7055
7056 /* Collect all the blocks in the region. Manually add ENTRY_BB
7057 because it won't be added by dfs_enumerate_from. */
7058 bbs.create (0);
7059 bbs.safe_push (entry_bb);
7060 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
7061
7062 if (flag_checking)
7063 verify_sese (entry_bb, exit_bb, &bbs);
7064
7065 /* The blocks that used to be dominated by something in BBS will now be
7066 dominated by the new block. */
7067 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
7068 bbs.address (),
7069 bbs.length ());
7070
7071 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
7072 the predecessor edges to ENTRY_BB and the successor edges to
7073 EXIT_BB so that we can re-attach them to the new basic block that
7074 will replace the region. */
7075 num_entry_edges = EDGE_COUNT (entry_bb->preds);
7076 entry_pred = XNEWVEC (basic_block, num_entry_edges);
7077 entry_flag = XNEWVEC (int, num_entry_edges);
7078 entry_prob = XNEWVEC (unsigned, num_entry_edges);
7079 i = 0;
7080 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
7081 {
7082 entry_prob[i] = e->probability;
7083 entry_flag[i] = e->flags;
7084 entry_pred[i++] = e->src;
7085 remove_edge (e);
7086 }
7087
7088 if (exit_bb)
7089 {
7090 num_exit_edges = EDGE_COUNT (exit_bb->succs);
7091 exit_succ = XNEWVEC (basic_block, num_exit_edges);
7092 exit_flag = XNEWVEC (int, num_exit_edges);
7093 exit_prob = XNEWVEC (unsigned, num_exit_edges);
7094 i = 0;
7095 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
7096 {
7097 exit_prob[i] = e->probability;
7098 exit_flag[i] = e->flags;
7099 exit_succ[i++] = e->dest;
7100 remove_edge (e);
7101 }
7102 }
7103 else
7104 {
7105 num_exit_edges = 0;
7106 exit_succ = NULL;
7107 exit_flag = NULL;
7108 exit_prob = NULL;
7109 }
7110
7111 /* Switch context to the child function to initialize DEST_FN's CFG. */
7112 gcc_assert (dest_cfun->cfg == NULL);
7113 push_cfun (dest_cfun);
7114
7115 init_empty_tree_cfg ();
7116
7117 /* Initialize EH information for the new function. */
7118 eh_map = NULL;
7119 new_label_map = NULL;
7120 if (saved_cfun->eh)
7121 {
7122 eh_region region = NULL;
7123
7124 FOR_EACH_VEC_ELT (bbs, i, bb)
7125 region = find_outermost_region_in_block (saved_cfun, bb, region);
7126
7127 init_eh_for_function ();
7128 if (region != NULL)
7129 {
7130 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
7131 eh_map = duplicate_eh_regions (saved_cfun, region, 0,
7132 new_label_mapper, new_label_map);
7133 }
7134 }
7135
7136 /* Initialize an empty loop tree. */
7137 struct loops *loops = ggc_cleared_alloc<struct loops> ();
7138 init_loops_structure (dest_cfun, loops, 1);
7139 loops->state = LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
7140 set_loops_for_fn (dest_cfun, loops);
7141
7142 /* Move the outlined loop tree part. */
7143 num_nodes = bbs.length ();
7144 FOR_EACH_VEC_ELT (bbs, i, bb)
7145 {
7146 if (bb->loop_father->header == bb)
7147 {
7148 struct loop *this_loop = bb->loop_father;
7149 struct loop *outer = loop_outer (this_loop);
7150 if (outer == loop
7151 /* If the SESE region contains some bbs ending with
7152 a noreturn call, those are considered to belong
7153 to the outermost loop in saved_cfun, rather than
7154 the entry_bb's loop_father. */
7155 || outer == loop0)
7156 {
7157 if (outer != loop)
7158 num_nodes -= this_loop->num_nodes;
7159 flow_loop_tree_node_remove (bb->loop_father);
7160 flow_loop_tree_node_add (get_loop (dest_cfun, 0), this_loop);
7161 fixup_loop_arrays_after_move (saved_cfun, cfun, this_loop);
7162 }
7163 }
7164 else if (bb->loop_father == loop0 && loop0 != loop)
7165 num_nodes--;
7166
7167 /* Remove loop exits from the outlined region. */
7168 if (loops_for_fn (saved_cfun)->exits)
7169 FOR_EACH_EDGE (e, ei, bb->succs)
7170 {
7171 struct loops *l = loops_for_fn (saved_cfun);
7172 loop_exit **slot
7173 = l->exits->find_slot_with_hash (e, htab_hash_pointer (e),
7174 NO_INSERT);
7175 if (slot)
7176 l->exits->clear_slot (slot);
7177 }
7178 }
7179
7180
7181 /* Adjust the number of blocks in the tree root of the outlined part. */
7182 get_loop (dest_cfun, 0)->num_nodes = bbs.length () + 2;
7183
7184 /* Setup a mapping to be used by move_block_to_fn. */
7185 loop->aux = current_loops->tree_root;
7186 loop0->aux = current_loops->tree_root;
7187
7188 pop_cfun ();
7189
7190 /* Move blocks from BBS into DEST_CFUN. */
7191 gcc_assert (bbs.length () >= 2);
7192 after = dest_cfun->cfg->x_entry_block_ptr;
7193 hash_map<tree, tree> vars_map;
7194
7195 memset (&d, 0, sizeof (d));
7196 d.orig_block = orig_block;
7197 d.new_block = DECL_INITIAL (dest_cfun->decl);
7198 d.from_context = cfun->decl;
7199 d.to_context = dest_cfun->decl;
7200 d.vars_map = &vars_map;
7201 d.new_label_map = new_label_map;
7202 d.eh_map = eh_map;
7203 d.remap_decls_p = true;
7204
7205 if (gimple_in_ssa_p (cfun))
7206 for (tree arg = DECL_ARGUMENTS (d.to_context); arg; arg = DECL_CHAIN (arg))
7207 {
7208 tree narg = make_ssa_name_fn (dest_cfun, arg, gimple_build_nop ());
7209 set_ssa_default_def (dest_cfun, arg, narg);
7210 vars_map.put (arg, narg);
7211 }
7212
7213 FOR_EACH_VEC_ELT (bbs, i, bb)
7214 {
7215 /* No need to update edge counts on the last block. It has
7216 already been updated earlier when we detached the region from
7217 the original CFG. */
7218 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
7219 after = bb;
7220 }
7221
7222 loop->aux = NULL;
7223 loop0->aux = NULL;
7224 /* Loop sizes are no longer correct, fix them up. */
7225 loop->num_nodes -= num_nodes;
7226 for (struct loop *outer = loop_outer (loop);
7227 outer; outer = loop_outer (outer))
7228 outer->num_nodes -= num_nodes;
7229 loop0->num_nodes -= bbs.length () - num_nodes;
7230
7231 if (saved_cfun->has_simduid_loops || saved_cfun->has_force_vectorize_loops)
7232 {
7233 struct loop *aloop;
7234 for (i = 0; vec_safe_iterate (loops->larray, i, &aloop); i++)
7235 if (aloop != NULL)
7236 {
7237 if (aloop->simduid)
7238 {
7239 replace_by_duplicate_decl (&aloop->simduid, d.vars_map,
7240 d.to_context);
7241 dest_cfun->has_simduid_loops = true;
7242 }
7243 if (aloop->force_vectorize)
7244 dest_cfun->has_force_vectorize_loops = true;
7245 }
7246 }
7247
7248 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
7249 if (orig_block)
7250 {
7251 tree block;
7252 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7253 == NULL_TREE);
7254 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7255 = BLOCK_SUBBLOCKS (orig_block);
7256 for (block = BLOCK_SUBBLOCKS (orig_block);
7257 block; block = BLOCK_CHAIN (block))
7258 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
7259 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
7260 }
7261
7262 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
7263 &vars_map, dest_cfun->decl);
7264
7265 if (new_label_map)
7266 htab_delete (new_label_map);
7267 if (eh_map)
7268 delete eh_map;
7269
7270 if (gimple_in_ssa_p (cfun))
7271 {
7272 /* We need to release ssa-names in a defined order, so first find them,
7273 and then iterate in ascending version order. */
7274 bitmap release_names = BITMAP_ALLOC (NULL);
7275 vars_map.traverse<void *, gather_ssa_name_hash_map_from> (release_names);
7276 bitmap_iterator bi;
7277 unsigned i;
7278 EXECUTE_IF_SET_IN_BITMAP (release_names, 0, i, bi)
7279 release_ssa_name (ssa_name (i));
7280 BITMAP_FREE (release_names);
7281 }
7282
7283 /* Rewire the entry and exit blocks. The successor to the entry
7284 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
7285 the child function. Similarly, the predecessor of DEST_FN's
7286 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
7287 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
7288 various CFG manipulation function get to the right CFG.
7289
7290 FIXME, this is silly. The CFG ought to become a parameter to
7291 these helpers. */
7292 push_cfun (dest_cfun);
7293 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), entry_bb, EDGE_FALLTHRU);
7294 if (exit_bb)
7295 make_edge (exit_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
7296 pop_cfun ();
7297
7298 /* Back in the original function, the SESE region has disappeared,
7299 create a new basic block in its place. */
7300 bb = create_empty_bb (entry_pred[0]);
7301 if (current_loops)
7302 add_bb_to_loop (bb, loop);
7303 for (i = 0; i < num_entry_edges; i++)
7304 {
7305 e = make_edge (entry_pred[i], bb, entry_flag[i]);
7306 e->probability = entry_prob[i];
7307 }
7308
7309 for (i = 0; i < num_exit_edges; i++)
7310 {
7311 e = make_edge (bb, exit_succ[i], exit_flag[i]);
7312 e->probability = exit_prob[i];
7313 }
7314
7315 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
7316 FOR_EACH_VEC_ELT (dom_bbs, i, abb)
7317 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
7318 dom_bbs.release ();
7319
7320 if (exit_bb)
7321 {
7322 free (exit_prob);
7323 free (exit_flag);
7324 free (exit_succ);
7325 }
7326 free (entry_prob);
7327 free (entry_flag);
7328 free (entry_pred);
7329 bbs.release ();
7330
7331 return bb;
7332 }
7333
7334
7335 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
7336 */
7337
7338 void
7339 dump_function_to_file (tree fndecl, FILE *file, int flags)
7340 {
7341 tree arg, var, old_current_fndecl = current_function_decl;
7342 struct function *dsf;
7343 bool ignore_topmost_bind = false, any_var = false;
7344 basic_block bb;
7345 tree chain;
7346 bool tmclone = (TREE_CODE (fndecl) == FUNCTION_DECL
7347 && decl_is_tm_clone (fndecl));
7348 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
7349
7350 if (DECL_ATTRIBUTES (fndecl) != NULL_TREE)
7351 {
7352 fprintf (file, "__attribute__((");
7353
7354 bool first = true;
7355 tree chain;
7356 for (chain = DECL_ATTRIBUTES (fndecl); chain;
7357 first = false, chain = TREE_CHAIN (chain))
7358 {
7359 if (!first)
7360 fprintf (file, ", ");
7361
7362 print_generic_expr (file, get_attribute_name (chain), dump_flags);
7363 if (TREE_VALUE (chain) != NULL_TREE)
7364 {
7365 fprintf (file, " (");
7366 print_generic_expr (file, TREE_VALUE (chain), dump_flags);
7367 fprintf (file, ")");
7368 }
7369 }
7370
7371 fprintf (file, "))\n");
7372 }
7373
7374 current_function_decl = fndecl;
7375 fprintf (file, "%s %s(", function_name (fun), tmclone ? "[tm-clone] " : "");
7376
7377 arg = DECL_ARGUMENTS (fndecl);
7378 while (arg)
7379 {
7380 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
7381 fprintf (file, " ");
7382 print_generic_expr (file, arg, dump_flags);
7383 if (flags & TDF_VERBOSE)
7384 print_node (file, "", arg, 4);
7385 if (DECL_CHAIN (arg))
7386 fprintf (file, ", ");
7387 arg = DECL_CHAIN (arg);
7388 }
7389 fprintf (file, ")\n");
7390
7391 if (flags & TDF_VERBOSE)
7392 print_node (file, "", fndecl, 2);
7393
7394 dsf = DECL_STRUCT_FUNCTION (fndecl);
7395 if (dsf && (flags & TDF_EH))
7396 dump_eh_tree (file, dsf);
7397
7398 if (flags & TDF_RAW && !gimple_has_body_p (fndecl))
7399 {
7400 dump_node (fndecl, TDF_SLIM | flags, file);
7401 current_function_decl = old_current_fndecl;
7402 return;
7403 }
7404
7405 /* When GIMPLE is lowered, the variables are no longer available in
7406 BIND_EXPRs, so display them separately. */
7407 if (fun && fun->decl == fndecl && (fun->curr_properties & PROP_gimple_lcf))
7408 {
7409 unsigned ix;
7410 ignore_topmost_bind = true;
7411
7412 fprintf (file, "{\n");
7413 if (!vec_safe_is_empty (fun->local_decls))
7414 FOR_EACH_LOCAL_DECL (fun, ix, var)
7415 {
7416 print_generic_decl (file, var, flags);
7417 if (flags & TDF_VERBOSE)
7418 print_node (file, "", var, 4);
7419 fprintf (file, "\n");
7420
7421 any_var = true;
7422 }
7423 if (gimple_in_ssa_p (cfun))
7424 for (ix = 1; ix < num_ssa_names; ++ix)
7425 {
7426 tree name = ssa_name (ix);
7427 if (name && !SSA_NAME_VAR (name))
7428 {
7429 fprintf (file, " ");
7430 print_generic_expr (file, TREE_TYPE (name), flags);
7431 fprintf (file, " ");
7432 print_generic_expr (file, name, flags);
7433 fprintf (file, ";\n");
7434
7435 any_var = true;
7436 }
7437 }
7438 }
7439
7440 if (fun && fun->decl == fndecl
7441 && fun->cfg
7442 && basic_block_info_for_fn (fun))
7443 {
7444 /* If the CFG has been built, emit a CFG-based dump. */
7445 if (!ignore_topmost_bind)
7446 fprintf (file, "{\n");
7447
7448 if (any_var && n_basic_blocks_for_fn (fun))
7449 fprintf (file, "\n");
7450
7451 FOR_EACH_BB_FN (bb, fun)
7452 dump_bb (file, bb, 2, flags | TDF_COMMENT);
7453
7454 fprintf (file, "}\n");
7455 }
7456 else if (DECL_SAVED_TREE (fndecl) == NULL)
7457 {
7458 /* The function is now in GIMPLE form but the CFG has not been
7459 built yet. Emit the single sequence of GIMPLE statements
7460 that make up its body. */
7461 gimple_seq body = gimple_body (fndecl);
7462
7463 if (gimple_seq_first_stmt (body)
7464 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
7465 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
7466 print_gimple_seq (file, body, 0, flags);
7467 else
7468 {
7469 if (!ignore_topmost_bind)
7470 fprintf (file, "{\n");
7471
7472 if (any_var)
7473 fprintf (file, "\n");
7474
7475 print_gimple_seq (file, body, 2, flags);
7476 fprintf (file, "}\n");
7477 }
7478 }
7479 else
7480 {
7481 int indent;
7482
7483 /* Make a tree based dump. */
7484 chain = DECL_SAVED_TREE (fndecl);
7485 if (chain && TREE_CODE (chain) == BIND_EXPR)
7486 {
7487 if (ignore_topmost_bind)
7488 {
7489 chain = BIND_EXPR_BODY (chain);
7490 indent = 2;
7491 }
7492 else
7493 indent = 0;
7494 }
7495 else
7496 {
7497 if (!ignore_topmost_bind)
7498 {
7499 fprintf (file, "{\n");
7500 /* No topmost bind, pretend it's ignored for later. */
7501 ignore_topmost_bind = true;
7502 }
7503 indent = 2;
7504 }
7505
7506 if (any_var)
7507 fprintf (file, "\n");
7508
7509 print_generic_stmt_indented (file, chain, flags, indent);
7510 if (ignore_topmost_bind)
7511 fprintf (file, "}\n");
7512 }
7513
7514 if (flags & TDF_ENUMERATE_LOCALS)
7515 dump_enumerated_decls (file, flags);
7516 fprintf (file, "\n\n");
7517
7518 current_function_decl = old_current_fndecl;
7519 }
7520
7521 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
7522
7523 DEBUG_FUNCTION void
7524 debug_function (tree fn, int flags)
7525 {
7526 dump_function_to_file (fn, stderr, flags);
7527 }
7528
7529
7530 /* Print on FILE the indexes for the predecessors of basic_block BB. */
7531
7532 static void
7533 print_pred_bbs (FILE *file, basic_block bb)
7534 {
7535 edge e;
7536 edge_iterator ei;
7537
7538 FOR_EACH_EDGE (e, ei, bb->preds)
7539 fprintf (file, "bb_%d ", e->src->index);
7540 }
7541
7542
7543 /* Print on FILE the indexes for the successors of basic_block BB. */
7544
7545 static void
7546 print_succ_bbs (FILE *file, basic_block bb)
7547 {
7548 edge e;
7549 edge_iterator ei;
7550
7551 FOR_EACH_EDGE (e, ei, bb->succs)
7552 fprintf (file, "bb_%d ", e->dest->index);
7553 }
7554
7555 /* Print to FILE the basic block BB following the VERBOSITY level. */
7556
7557 void
7558 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
7559 {
7560 char *s_indent = (char *) alloca ((size_t) indent + 1);
7561 memset ((void *) s_indent, ' ', (size_t) indent);
7562 s_indent[indent] = '\0';
7563
7564 /* Print basic_block's header. */
7565 if (verbosity >= 2)
7566 {
7567 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
7568 print_pred_bbs (file, bb);
7569 fprintf (file, "}, succs = {");
7570 print_succ_bbs (file, bb);
7571 fprintf (file, "})\n");
7572 }
7573
7574 /* Print basic_block's body. */
7575 if (verbosity >= 3)
7576 {
7577 fprintf (file, "%s {\n", s_indent);
7578 dump_bb (file, bb, indent + 4, TDF_VOPS|TDF_MEMSYMS);
7579 fprintf (file, "%s }\n", s_indent);
7580 }
7581 }
7582
7583 static void print_loop_and_siblings (FILE *, struct loop *, int, int);
7584
7585 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
7586 VERBOSITY level this outputs the contents of the loop, or just its
7587 structure. */
7588
7589 static void
7590 print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
7591 {
7592 char *s_indent;
7593 basic_block bb;
7594
7595 if (loop == NULL)
7596 return;
7597
7598 s_indent = (char *) alloca ((size_t) indent + 1);
7599 memset ((void *) s_indent, ' ', (size_t) indent);
7600 s_indent[indent] = '\0';
7601
7602 /* Print loop's header. */
7603 fprintf (file, "%sloop_%d (", s_indent, loop->num);
7604 if (loop->header)
7605 fprintf (file, "header = %d", loop->header->index);
7606 else
7607 {
7608 fprintf (file, "deleted)\n");
7609 return;
7610 }
7611 if (loop->latch)
7612 fprintf (file, ", latch = %d", loop->latch->index);
7613 else
7614 fprintf (file, ", multiple latches");
7615 fprintf (file, ", niter = ");
7616 print_generic_expr (file, loop->nb_iterations, 0);
7617
7618 if (loop->any_upper_bound)
7619 {
7620 fprintf (file, ", upper_bound = ");
7621 print_decu (loop->nb_iterations_upper_bound, file);
7622 }
7623
7624 if (loop->any_estimate)
7625 {
7626 fprintf (file, ", estimate = ");
7627 print_decu (loop->nb_iterations_estimate, file);
7628 }
7629 fprintf (file, ")\n");
7630
7631 /* Print loop's body. */
7632 if (verbosity >= 1)
7633 {
7634 fprintf (file, "%s{\n", s_indent);
7635 FOR_EACH_BB_FN (bb, cfun)
7636 if (bb->loop_father == loop)
7637 print_loops_bb (file, bb, indent, verbosity);
7638
7639 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
7640 fprintf (file, "%s}\n", s_indent);
7641 }
7642 }
7643
7644 /* Print the LOOP and its sibling loops on FILE, indented INDENT
7645 spaces. Following VERBOSITY level this outputs the contents of the
7646 loop, or just its structure. */
7647
7648 static void
7649 print_loop_and_siblings (FILE *file, struct loop *loop, int indent,
7650 int verbosity)
7651 {
7652 if (loop == NULL)
7653 return;
7654
7655 print_loop (file, loop, indent, verbosity);
7656 print_loop_and_siblings (file, loop->next, indent, verbosity);
7657 }
7658
7659 /* Follow a CFG edge from the entry point of the program, and on entry
7660 of a loop, pretty print the loop structure on FILE. */
7661
7662 void
7663 print_loops (FILE *file, int verbosity)
7664 {
7665 basic_block bb;
7666
7667 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
7668 fprintf (file, "\nLoops in function: %s\n", current_function_name ());
7669 if (bb && bb->loop_father)
7670 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
7671 }
7672
7673 /* Dump a loop. */
7674
7675 DEBUG_FUNCTION void
7676 debug (struct loop &ref)
7677 {
7678 print_loop (stderr, &ref, 0, /*verbosity*/0);
7679 }
7680
7681 DEBUG_FUNCTION void
7682 debug (struct loop *ptr)
7683 {
7684 if (ptr)
7685 debug (*ptr);
7686 else
7687 fprintf (stderr, "<nil>\n");
7688 }
7689
7690 /* Dump a loop verbosely. */
7691
7692 DEBUG_FUNCTION void
7693 debug_verbose (struct loop &ref)
7694 {
7695 print_loop (stderr, &ref, 0, /*verbosity*/3);
7696 }
7697
7698 DEBUG_FUNCTION void
7699 debug_verbose (struct loop *ptr)
7700 {
7701 if (ptr)
7702 debug (*ptr);
7703 else
7704 fprintf (stderr, "<nil>\n");
7705 }
7706
7707
7708 /* Debugging loops structure at tree level, at some VERBOSITY level. */
7709
7710 DEBUG_FUNCTION void
7711 debug_loops (int verbosity)
7712 {
7713 print_loops (stderr, verbosity);
7714 }
7715
7716 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
7717
7718 DEBUG_FUNCTION void
7719 debug_loop (struct loop *loop, int verbosity)
7720 {
7721 print_loop (stderr, loop, 0, verbosity);
7722 }
7723
7724 /* Print on stderr the code of loop number NUM, at some VERBOSITY
7725 level. */
7726
7727 DEBUG_FUNCTION void
7728 debug_loop_num (unsigned num, int verbosity)
7729 {
7730 debug_loop (get_loop (cfun, num), verbosity);
7731 }
7732
7733 /* Return true if BB ends with a call, possibly followed by some
7734 instructions that must stay with the call. Return false,
7735 otherwise. */
7736
7737 static bool
7738 gimple_block_ends_with_call_p (basic_block bb)
7739 {
7740 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7741 return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
7742 }
7743
7744
7745 /* Return true if BB ends with a conditional branch. Return false,
7746 otherwise. */
7747
7748 static bool
7749 gimple_block_ends_with_condjump_p (const_basic_block bb)
7750 {
7751 gimple *stmt = last_stmt (CONST_CAST_BB (bb));
7752 return (stmt && gimple_code (stmt) == GIMPLE_COND);
7753 }
7754
7755
7756 /* Return true if we need to add fake edge to exit at statement T.
7757 Helper function for gimple_flow_call_edges_add. */
7758
7759 static bool
7760 need_fake_edge_p (gimple *t)
7761 {
7762 tree fndecl = NULL_TREE;
7763 int call_flags = 0;
7764
7765 /* NORETURN and LONGJMP calls already have an edge to exit.
7766 CONST and PURE calls do not need one.
7767 We don't currently check for CONST and PURE here, although
7768 it would be a good idea, because those attributes are
7769 figured out from the RTL in mark_constant_function, and
7770 the counter incrementation code from -fprofile-arcs
7771 leads to different results from -fbranch-probabilities. */
7772 if (is_gimple_call (t))
7773 {
7774 fndecl = gimple_call_fndecl (t);
7775 call_flags = gimple_call_flags (t);
7776 }
7777
7778 if (is_gimple_call (t)
7779 && fndecl
7780 && DECL_BUILT_IN (fndecl)
7781 && (call_flags & ECF_NOTHROW)
7782 && !(call_flags & ECF_RETURNS_TWICE)
7783 /* fork() doesn't really return twice, but the effect of
7784 wrapping it in __gcov_fork() which calls __gcov_flush()
7785 and clears the counters before forking has the same
7786 effect as returning twice. Force a fake edge. */
7787 && !(DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7788 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FORK))
7789 return false;
7790
7791 if (is_gimple_call (t))
7792 {
7793 edge_iterator ei;
7794 edge e;
7795 basic_block bb;
7796
7797 if (!(call_flags & ECF_NORETURN))
7798 return true;
7799
7800 bb = gimple_bb (t);
7801 FOR_EACH_EDGE (e, ei, bb->succs)
7802 if ((e->flags & EDGE_FAKE) == 0)
7803 return true;
7804 }
7805
7806 if (gasm *asm_stmt = dyn_cast <gasm *> (t))
7807 if (gimple_asm_volatile_p (asm_stmt) || gimple_asm_input_p (asm_stmt))
7808 return true;
7809
7810 return false;
7811 }
7812
7813
7814 /* Add fake edges to the function exit for any non constant and non
7815 noreturn calls (or noreturn calls with EH/abnormal edges),
7816 volatile inline assembly in the bitmap of blocks specified by BLOCKS
7817 or to the whole CFG if BLOCKS is zero. Return the number of blocks
7818 that were split.
7819
7820 The goal is to expose cases in which entering a basic block does
7821 not imply that all subsequent instructions must be executed. */
7822
7823 static int
7824 gimple_flow_call_edges_add (sbitmap blocks)
7825 {
7826 int i;
7827 int blocks_split = 0;
7828 int last_bb = last_basic_block_for_fn (cfun);
7829 bool check_last_block = false;
7830
7831 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
7832 return 0;
7833
7834 if (! blocks)
7835 check_last_block = true;
7836 else
7837 check_last_block = bitmap_bit_p (blocks,
7838 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
7839
7840 /* In the last basic block, before epilogue generation, there will be
7841 a fallthru edge to EXIT. Special care is required if the last insn
7842 of the last basic block is a call because make_edge folds duplicate
7843 edges, which would result in the fallthru edge also being marked
7844 fake, which would result in the fallthru edge being removed by
7845 remove_fake_edges, which would result in an invalid CFG.
7846
7847 Moreover, we can't elide the outgoing fake edge, since the block
7848 profiler needs to take this into account in order to solve the minimal
7849 spanning tree in the case that the call doesn't return.
7850
7851 Handle this by adding a dummy instruction in a new last basic block. */
7852 if (check_last_block)
7853 {
7854 basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
7855 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7856 gimple *t = NULL;
7857
7858 if (!gsi_end_p (gsi))
7859 t = gsi_stmt (gsi);
7860
7861 if (t && need_fake_edge_p (t))
7862 {
7863 edge e;
7864
7865 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
7866 if (e)
7867 {
7868 gsi_insert_on_edge (e, gimple_build_nop ());
7869 gsi_commit_edge_inserts ();
7870 }
7871 }
7872 }
7873
7874 /* Now add fake edges to the function exit for any non constant
7875 calls since there is no way that we can determine if they will
7876 return or not... */
7877 for (i = 0; i < last_bb; i++)
7878 {
7879 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
7880 gimple_stmt_iterator gsi;
7881 gimple *stmt, *last_stmt;
7882
7883 if (!bb)
7884 continue;
7885
7886 if (blocks && !bitmap_bit_p (blocks, i))
7887 continue;
7888
7889 gsi = gsi_last_nondebug_bb (bb);
7890 if (!gsi_end_p (gsi))
7891 {
7892 last_stmt = gsi_stmt (gsi);
7893 do
7894 {
7895 stmt = gsi_stmt (gsi);
7896 if (need_fake_edge_p (stmt))
7897 {
7898 edge e;
7899
7900 /* The handling above of the final block before the
7901 epilogue should be enough to verify that there is
7902 no edge to the exit block in CFG already.
7903 Calling make_edge in such case would cause us to
7904 mark that edge as fake and remove it later. */
7905 if (flag_checking && stmt == last_stmt)
7906 {
7907 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
7908 gcc_assert (e == NULL);
7909 }
7910
7911 /* Note that the following may create a new basic block
7912 and renumber the existing basic blocks. */
7913 if (stmt != last_stmt)
7914 {
7915 e = split_block (bb, stmt);
7916 if (e)
7917 blocks_split++;
7918 }
7919 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
7920 }
7921 gsi_prev (&gsi);
7922 }
7923 while (!gsi_end_p (gsi));
7924 }
7925 }
7926
7927 if (blocks_split)
7928 verify_flow_info ();
7929
7930 return blocks_split;
7931 }
7932
7933 /* Removes edge E and all the blocks dominated by it, and updates dominance
7934 information. The IL in E->src needs to be updated separately.
7935 If dominance info is not available, only the edge E is removed.*/
7936
7937 void
7938 remove_edge_and_dominated_blocks (edge e)
7939 {
7940 vec<basic_block> bbs_to_remove = vNULL;
7941 vec<basic_block> bbs_to_fix_dom = vNULL;
7942 bitmap df, df_idom;
7943 edge f;
7944 edge_iterator ei;
7945 bool none_removed = false;
7946 unsigned i;
7947 basic_block bb, dbb;
7948 bitmap_iterator bi;
7949
7950 /* If we are removing a path inside a non-root loop that may change
7951 loop ownership of blocks or remove loops. Mark loops for fixup. */
7952 if (current_loops
7953 && loop_outer (e->src->loop_father) != NULL
7954 && e->src->loop_father == e->dest->loop_father)
7955 loops_state_set (LOOPS_NEED_FIXUP);
7956
7957 if (!dom_info_available_p (CDI_DOMINATORS))
7958 {
7959 remove_edge (e);
7960 return;
7961 }
7962
7963 /* No updating is needed for edges to exit. */
7964 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
7965 {
7966 if (cfgcleanup_altered_bbs)
7967 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7968 remove_edge (e);
7969 return;
7970 }
7971
7972 /* First, we find the basic blocks to remove. If E->dest has a predecessor
7973 that is not dominated by E->dest, then this set is empty. Otherwise,
7974 all the basic blocks dominated by E->dest are removed.
7975
7976 Also, to DF_IDOM we store the immediate dominators of the blocks in
7977 the dominance frontier of E (i.e., of the successors of the
7978 removed blocks, if there are any, and of E->dest otherwise). */
7979 FOR_EACH_EDGE (f, ei, e->dest->preds)
7980 {
7981 if (f == e)
7982 continue;
7983
7984 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
7985 {
7986 none_removed = true;
7987 break;
7988 }
7989 }
7990
7991 df = BITMAP_ALLOC (NULL);
7992 df_idom = BITMAP_ALLOC (NULL);
7993
7994 if (none_removed)
7995 bitmap_set_bit (df_idom,
7996 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
7997 else
7998 {
7999 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
8000 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
8001 {
8002 FOR_EACH_EDGE (f, ei, bb->succs)
8003 {
8004 if (f->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
8005 bitmap_set_bit (df, f->dest->index);
8006 }
8007 }
8008 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
8009 bitmap_clear_bit (df, bb->index);
8010
8011 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
8012 {
8013 bb = BASIC_BLOCK_FOR_FN (cfun, i);
8014 bitmap_set_bit (df_idom,
8015 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
8016 }
8017 }
8018
8019 if (cfgcleanup_altered_bbs)
8020 {
8021 /* Record the set of the altered basic blocks. */
8022 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
8023 bitmap_ior_into (cfgcleanup_altered_bbs, df);
8024 }
8025
8026 /* Remove E and the cancelled blocks. */
8027 if (none_removed)
8028 remove_edge (e);
8029 else
8030 {
8031 /* Walk backwards so as to get a chance to substitute all
8032 released DEFs into debug stmts. See
8033 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
8034 details. */
8035 for (i = bbs_to_remove.length (); i-- > 0; )
8036 delete_basic_block (bbs_to_remove[i]);
8037 }
8038
8039 /* Update the dominance information. The immediate dominator may change only
8040 for blocks whose immediate dominator belongs to DF_IDOM:
8041
8042 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
8043 removal. Let Z the arbitrary block such that idom(Z) = Y and
8044 Z dominates X after the removal. Before removal, there exists a path P
8045 from Y to X that avoids Z. Let F be the last edge on P that is
8046 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
8047 dominates W, and because of P, Z does not dominate W), and W belongs to
8048 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
8049 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
8050 {
8051 bb = BASIC_BLOCK_FOR_FN (cfun, i);
8052 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
8053 dbb;
8054 dbb = next_dom_son (CDI_DOMINATORS, dbb))
8055 bbs_to_fix_dom.safe_push (dbb);
8056 }
8057
8058 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
8059
8060 BITMAP_FREE (df);
8061 BITMAP_FREE (df_idom);
8062 bbs_to_remove.release ();
8063 bbs_to_fix_dom.release ();
8064 }
8065
8066 /* Purge dead EH edges from basic block BB. */
8067
8068 bool
8069 gimple_purge_dead_eh_edges (basic_block bb)
8070 {
8071 bool changed = false;
8072 edge e;
8073 edge_iterator ei;
8074 gimple *stmt = last_stmt (bb);
8075
8076 if (stmt && stmt_can_throw_internal (stmt))
8077 return false;
8078
8079 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8080 {
8081 if (e->flags & EDGE_EH)
8082 {
8083 remove_edge_and_dominated_blocks (e);
8084 changed = true;
8085 }
8086 else
8087 ei_next (&ei);
8088 }
8089
8090 return changed;
8091 }
8092
8093 /* Purge dead EH edges from basic block listed in BLOCKS. */
8094
8095 bool
8096 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
8097 {
8098 bool changed = false;
8099 unsigned i;
8100 bitmap_iterator bi;
8101
8102 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8103 {
8104 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8105
8106 /* Earlier gimple_purge_dead_eh_edges could have removed
8107 this basic block already. */
8108 gcc_assert (bb || changed);
8109 if (bb != NULL)
8110 changed |= gimple_purge_dead_eh_edges (bb);
8111 }
8112
8113 return changed;
8114 }
8115
8116 /* Purge dead abnormal call edges from basic block BB. */
8117
8118 bool
8119 gimple_purge_dead_abnormal_call_edges (basic_block bb)
8120 {
8121 bool changed = false;
8122 edge e;
8123 edge_iterator ei;
8124 gimple *stmt = last_stmt (bb);
8125
8126 if (!cfun->has_nonlocal_label
8127 && !cfun->calls_setjmp)
8128 return false;
8129
8130 if (stmt && stmt_can_make_abnormal_goto (stmt))
8131 return false;
8132
8133 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8134 {
8135 if (e->flags & EDGE_ABNORMAL)
8136 {
8137 if (e->flags & EDGE_FALLTHRU)
8138 e->flags &= ~EDGE_ABNORMAL;
8139 else
8140 remove_edge_and_dominated_blocks (e);
8141 changed = true;
8142 }
8143 else
8144 ei_next (&ei);
8145 }
8146
8147 return changed;
8148 }
8149
8150 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
8151
8152 bool
8153 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks)
8154 {
8155 bool changed = false;
8156 unsigned i;
8157 bitmap_iterator bi;
8158
8159 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8160 {
8161 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8162
8163 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
8164 this basic block already. */
8165 gcc_assert (bb || changed);
8166 if (bb != NULL)
8167 changed |= gimple_purge_dead_abnormal_call_edges (bb);
8168 }
8169
8170 return changed;
8171 }
8172
8173 /* This function is called whenever a new edge is created or
8174 redirected. */
8175
8176 static void
8177 gimple_execute_on_growing_pred (edge e)
8178 {
8179 basic_block bb = e->dest;
8180
8181 if (!gimple_seq_empty_p (phi_nodes (bb)))
8182 reserve_phi_args_for_new_edge (bb);
8183 }
8184
8185 /* This function is called immediately before edge E is removed from
8186 the edge vector E->dest->preds. */
8187
8188 static void
8189 gimple_execute_on_shrinking_pred (edge e)
8190 {
8191 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
8192 remove_phi_args (e);
8193 }
8194
8195 /*---------------------------------------------------------------------------
8196 Helper functions for Loop versioning
8197 ---------------------------------------------------------------------------*/
8198
8199 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
8200 of 'first'. Both of them are dominated by 'new_head' basic block. When
8201 'new_head' was created by 'second's incoming edge it received phi arguments
8202 on the edge by split_edge(). Later, additional edge 'e' was created to
8203 connect 'new_head' and 'first'. Now this routine adds phi args on this
8204 additional edge 'e' that new_head to second edge received as part of edge
8205 splitting. */
8206
8207 static void
8208 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
8209 basic_block new_head, edge e)
8210 {
8211 gphi *phi1, *phi2;
8212 gphi_iterator psi1, psi2;
8213 tree def;
8214 edge e2 = find_edge (new_head, second);
8215
8216 /* Because NEW_HEAD has been created by splitting SECOND's incoming
8217 edge, we should always have an edge from NEW_HEAD to SECOND. */
8218 gcc_assert (e2 != NULL);
8219
8220 /* Browse all 'second' basic block phi nodes and add phi args to
8221 edge 'e' for 'first' head. PHI args are always in correct order. */
8222
8223 for (psi2 = gsi_start_phis (second),
8224 psi1 = gsi_start_phis (first);
8225 !gsi_end_p (psi2) && !gsi_end_p (psi1);
8226 gsi_next (&psi2), gsi_next (&psi1))
8227 {
8228 phi1 = psi1.phi ();
8229 phi2 = psi2.phi ();
8230 def = PHI_ARG_DEF (phi2, e2->dest_idx);
8231 add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
8232 }
8233 }
8234
8235
8236 /* Adds a if else statement to COND_BB with condition COND_EXPR.
8237 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
8238 the destination of the ELSE part. */
8239
8240 static void
8241 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
8242 basic_block second_head ATTRIBUTE_UNUSED,
8243 basic_block cond_bb, void *cond_e)
8244 {
8245 gimple_stmt_iterator gsi;
8246 gimple *new_cond_expr;
8247 tree cond_expr = (tree) cond_e;
8248 edge e0;
8249
8250 /* Build new conditional expr */
8251 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
8252 NULL_TREE, NULL_TREE);
8253
8254 /* Add new cond in cond_bb. */
8255 gsi = gsi_last_bb (cond_bb);
8256 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
8257
8258 /* Adjust edges appropriately to connect new head with first head
8259 as well as second head. */
8260 e0 = single_succ_edge (cond_bb);
8261 e0->flags &= ~EDGE_FALLTHRU;
8262 e0->flags |= EDGE_FALSE_VALUE;
8263 }
8264
8265
8266 /* Do book-keeping of basic block BB for the profile consistency checker.
8267 If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
8268 then do post-pass accounting. Store the counting in RECORD. */
8269 static void
8270 gimple_account_profile_record (basic_block bb, int after_pass,
8271 struct profile_record *record)
8272 {
8273 gimple_stmt_iterator i;
8274 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
8275 {
8276 record->size[after_pass]
8277 += estimate_num_insns (gsi_stmt (i), &eni_size_weights);
8278 if (profile_status_for_fn (cfun) == PROFILE_READ)
8279 record->time[after_pass]
8280 += estimate_num_insns (gsi_stmt (i),
8281 &eni_time_weights) * bb->count;
8282 else if (profile_status_for_fn (cfun) == PROFILE_GUESSED)
8283 record->time[after_pass]
8284 += estimate_num_insns (gsi_stmt (i),
8285 &eni_time_weights) * bb->frequency;
8286 }
8287 }
8288
8289 struct cfg_hooks gimple_cfg_hooks = {
8290 "gimple",
8291 gimple_verify_flow_info,
8292 gimple_dump_bb, /* dump_bb */
8293 gimple_dump_bb_for_graph, /* dump_bb_for_graph */
8294 create_bb, /* create_basic_block */
8295 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
8296 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
8297 gimple_can_remove_branch_p, /* can_remove_branch_p */
8298 remove_bb, /* delete_basic_block */
8299 gimple_split_block, /* split_block */
8300 gimple_move_block_after, /* move_block_after */
8301 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
8302 gimple_merge_blocks, /* merge_blocks */
8303 gimple_predict_edge, /* predict_edge */
8304 gimple_predicted_by_p, /* predicted_by_p */
8305 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
8306 gimple_duplicate_bb, /* duplicate_block */
8307 gimple_split_edge, /* split_edge */
8308 gimple_make_forwarder_block, /* make_forward_block */
8309 NULL, /* tidy_fallthru_edge */
8310 NULL, /* force_nonfallthru */
8311 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
8312 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
8313 gimple_flow_call_edges_add, /* flow_call_edges_add */
8314 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
8315 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
8316 gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
8317 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
8318 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
8319 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
8320 flush_pending_stmts, /* flush_pending_stmts */
8321 gimple_empty_block_p, /* block_empty_p */
8322 gimple_split_block_before_cond_jump, /* split_block_before_cond_jump */
8323 gimple_account_profile_record,
8324 };
8325
8326
8327 /* Split all critical edges. */
8328
8329 unsigned int
8330 split_critical_edges (void)
8331 {
8332 basic_block bb;
8333 edge e;
8334 edge_iterator ei;
8335
8336 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
8337 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
8338 mappings around the calls to split_edge. */
8339 start_recording_case_labels ();
8340 FOR_ALL_BB_FN (bb, cfun)
8341 {
8342 FOR_EACH_EDGE (e, ei, bb->succs)
8343 {
8344 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
8345 split_edge (e);
8346 /* PRE inserts statements to edges and expects that
8347 since split_critical_edges was done beforehand, committing edge
8348 insertions will not split more edges. In addition to critical
8349 edges we must split edges that have multiple successors and
8350 end by control flow statements, such as RESX.
8351 Go ahead and split them too. This matches the logic in
8352 gimple_find_edge_insert_loc. */
8353 else if ((!single_pred_p (e->dest)
8354 || !gimple_seq_empty_p (phi_nodes (e->dest))
8355 || e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
8356 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
8357 && !(e->flags & EDGE_ABNORMAL))
8358 {
8359 gimple_stmt_iterator gsi;
8360
8361 gsi = gsi_last_bb (e->src);
8362 if (!gsi_end_p (gsi)
8363 && stmt_ends_bb_p (gsi_stmt (gsi))
8364 && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
8365 && !gimple_call_builtin_p (gsi_stmt (gsi),
8366 BUILT_IN_RETURN)))
8367 split_edge (e);
8368 }
8369 }
8370 }
8371 end_recording_case_labels ();
8372 return 0;
8373 }
8374
8375 namespace {
8376
8377 const pass_data pass_data_split_crit_edges =
8378 {
8379 GIMPLE_PASS, /* type */
8380 "crited", /* name */
8381 OPTGROUP_NONE, /* optinfo_flags */
8382 TV_TREE_SPLIT_EDGES, /* tv_id */
8383 PROP_cfg, /* properties_required */
8384 PROP_no_crit_edges, /* properties_provided */
8385 0, /* properties_destroyed */
8386 0, /* todo_flags_start */
8387 0, /* todo_flags_finish */
8388 };
8389
8390 class pass_split_crit_edges : public gimple_opt_pass
8391 {
8392 public:
8393 pass_split_crit_edges (gcc::context *ctxt)
8394 : gimple_opt_pass (pass_data_split_crit_edges, ctxt)
8395 {}
8396
8397 /* opt_pass methods: */
8398 virtual unsigned int execute (function *) { return split_critical_edges (); }
8399
8400 opt_pass * clone () { return new pass_split_crit_edges (m_ctxt); }
8401 }; // class pass_split_crit_edges
8402
8403 } // anon namespace
8404
8405 gimple_opt_pass *
8406 make_pass_split_crit_edges (gcc::context *ctxt)
8407 {
8408 return new pass_split_crit_edges (ctxt);
8409 }
8410
8411
8412 /* Insert COND expression which is GIMPLE_COND after STMT
8413 in basic block BB with appropriate basic block split
8414 and creation of a new conditionally executed basic block.
8415 Return created basic block. */
8416 basic_block
8417 insert_cond_bb (basic_block bb, gimple *stmt, gimple *cond)
8418 {
8419 edge fall = split_block (bb, stmt);
8420 gimple_stmt_iterator iter = gsi_last_bb (bb);
8421 basic_block new_bb;
8422
8423 /* Insert cond statement. */
8424 gcc_assert (gimple_code (cond) == GIMPLE_COND);
8425 if (gsi_end_p (iter))
8426 gsi_insert_before (&iter, cond, GSI_CONTINUE_LINKING);
8427 else
8428 gsi_insert_after (&iter, cond, GSI_CONTINUE_LINKING);
8429
8430 /* Create conditionally executed block. */
8431 new_bb = create_empty_bb (bb);
8432 make_edge (bb, new_bb, EDGE_TRUE_VALUE);
8433 make_single_succ_edge (new_bb, fall->dest, EDGE_FALLTHRU);
8434
8435 /* Fix edge for split bb. */
8436 fall->flags = EDGE_FALSE_VALUE;
8437
8438 /* Update dominance info. */
8439 if (dom_info_available_p (CDI_DOMINATORS))
8440 {
8441 set_immediate_dominator (CDI_DOMINATORS, new_bb, bb);
8442 set_immediate_dominator (CDI_DOMINATORS, fall->dest, bb);
8443 }
8444
8445 /* Update loop info. */
8446 if (current_loops)
8447 add_bb_to_loop (new_bb, bb->loop_father);
8448
8449 return new_bb;
8450 }
8451
8452 /* Build a ternary operation and gimplify it. Emit code before GSI.
8453 Return the gimple_val holding the result. */
8454
8455 tree
8456 gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
8457 tree type, tree a, tree b, tree c)
8458 {
8459 tree ret;
8460 location_t loc = gimple_location (gsi_stmt (*gsi));
8461
8462 ret = fold_build3_loc (loc, code, type, a, b, c);
8463 STRIP_NOPS (ret);
8464
8465 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8466 GSI_SAME_STMT);
8467 }
8468
8469 /* Build a binary operation and gimplify it. Emit code before GSI.
8470 Return the gimple_val holding the result. */
8471
8472 tree
8473 gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
8474 tree type, tree a, tree b)
8475 {
8476 tree ret;
8477
8478 ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
8479 STRIP_NOPS (ret);
8480
8481 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8482 GSI_SAME_STMT);
8483 }
8484
8485 /* Build a unary operation and gimplify it. Emit code before GSI.
8486 Return the gimple_val holding the result. */
8487
8488 tree
8489 gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
8490 tree a)
8491 {
8492 tree ret;
8493
8494 ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
8495 STRIP_NOPS (ret);
8496
8497 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8498 GSI_SAME_STMT);
8499 }
8500
8501
8502 \f
8503 /* Given a basic block B which ends with a conditional and has
8504 precisely two successors, determine which of the edges is taken if
8505 the conditional is true and which is taken if the conditional is
8506 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
8507
8508 void
8509 extract_true_false_edges_from_block (basic_block b,
8510 edge *true_edge,
8511 edge *false_edge)
8512 {
8513 edge e = EDGE_SUCC (b, 0);
8514
8515 if (e->flags & EDGE_TRUE_VALUE)
8516 {
8517 *true_edge = e;
8518 *false_edge = EDGE_SUCC (b, 1);
8519 }
8520 else
8521 {
8522 *false_edge = e;
8523 *true_edge = EDGE_SUCC (b, 1);
8524 }
8525 }
8526
8527
8528 /* From a controlling predicate in the immediate dominator DOM of
8529 PHIBLOCK determine the edges into PHIBLOCK that are chosen if the
8530 predicate evaluates to true and false and store them to
8531 *TRUE_CONTROLLED_EDGE and *FALSE_CONTROLLED_EDGE if
8532 they are non-NULL. Returns true if the edges can be determined,
8533 else return false. */
8534
8535 bool
8536 extract_true_false_controlled_edges (basic_block dom, basic_block phiblock,
8537 edge *true_controlled_edge,
8538 edge *false_controlled_edge)
8539 {
8540 basic_block bb = phiblock;
8541 edge true_edge, false_edge, tem;
8542 edge e0 = NULL, e1 = NULL;
8543
8544 /* We have to verify that one edge into the PHI node is dominated
8545 by the true edge of the predicate block and the other edge
8546 dominated by the false edge. This ensures that the PHI argument
8547 we are going to take is completely determined by the path we
8548 take from the predicate block.
8549 We can only use BB dominance checks below if the destination of
8550 the true/false edges are dominated by their edge, thus only
8551 have a single predecessor. */
8552 extract_true_false_edges_from_block (dom, &true_edge, &false_edge);
8553 tem = EDGE_PRED (bb, 0);
8554 if (tem == true_edge
8555 || (single_pred_p (true_edge->dest)
8556 && (tem->src == true_edge->dest
8557 || dominated_by_p (CDI_DOMINATORS,
8558 tem->src, true_edge->dest))))
8559 e0 = tem;
8560 else if (tem == false_edge
8561 || (single_pred_p (false_edge->dest)
8562 && (tem->src == false_edge->dest
8563 || dominated_by_p (CDI_DOMINATORS,
8564 tem->src, false_edge->dest))))
8565 e1 = tem;
8566 else
8567 return false;
8568 tem = EDGE_PRED (bb, 1);
8569 if (tem == true_edge
8570 || (single_pred_p (true_edge->dest)
8571 && (tem->src == true_edge->dest
8572 || dominated_by_p (CDI_DOMINATORS,
8573 tem->src, true_edge->dest))))
8574 e0 = tem;
8575 else if (tem == false_edge
8576 || (single_pred_p (false_edge->dest)
8577 && (tem->src == false_edge->dest
8578 || dominated_by_p (CDI_DOMINATORS,
8579 tem->src, false_edge->dest))))
8580 e1 = tem;
8581 else
8582 return false;
8583 if (!e0 || !e1)
8584 return false;
8585
8586 if (true_controlled_edge)
8587 *true_controlled_edge = e0;
8588 if (false_controlled_edge)
8589 *false_controlled_edge = e1;
8590
8591 return true;
8592 }
8593
8594
8595
8596 /* Emit return warnings. */
8597
8598 namespace {
8599
8600 const pass_data pass_data_warn_function_return =
8601 {
8602 GIMPLE_PASS, /* type */
8603 "*warn_function_return", /* name */
8604 OPTGROUP_NONE, /* optinfo_flags */
8605 TV_NONE, /* tv_id */
8606 PROP_cfg, /* properties_required */
8607 0, /* properties_provided */
8608 0, /* properties_destroyed */
8609 0, /* todo_flags_start */
8610 0, /* todo_flags_finish */
8611 };
8612
8613 class pass_warn_function_return : public gimple_opt_pass
8614 {
8615 public:
8616 pass_warn_function_return (gcc::context *ctxt)
8617 : gimple_opt_pass (pass_data_warn_function_return, ctxt)
8618 {}
8619
8620 /* opt_pass methods: */
8621 virtual unsigned int execute (function *);
8622
8623 }; // class pass_warn_function_return
8624
8625 unsigned int
8626 pass_warn_function_return::execute (function *fun)
8627 {
8628 source_location location;
8629 gimple *last;
8630 edge e;
8631 edge_iterator ei;
8632
8633 if (!targetm.warn_func_return (fun->decl))
8634 return 0;
8635
8636 /* If we have a path to EXIT, then we do return. */
8637 if (TREE_THIS_VOLATILE (fun->decl)
8638 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0)
8639 {
8640 location = UNKNOWN_LOCATION;
8641 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
8642 {
8643 last = last_stmt (e->src);
8644 if ((gimple_code (last) == GIMPLE_RETURN
8645 || gimple_call_builtin_p (last, BUILT_IN_RETURN))
8646 && (location = gimple_location (last)) != UNKNOWN_LOCATION)
8647 break;
8648 }
8649 if (location == UNKNOWN_LOCATION)
8650 location = cfun->function_end_locus;
8651 warning_at (location, 0, "%<noreturn%> function does return");
8652 }
8653
8654 /* If we see "return;" in some basic block, then we do reach the end
8655 without returning a value. */
8656 else if (warn_return_type
8657 && !TREE_NO_WARNING (fun->decl)
8658 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0
8659 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fun->decl))))
8660 {
8661 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
8662 {
8663 gimple *last = last_stmt (e->src);
8664 greturn *return_stmt = dyn_cast <greturn *> (last);
8665 if (return_stmt
8666 && gimple_return_retval (return_stmt) == NULL
8667 && !gimple_no_warning_p (last))
8668 {
8669 location = gimple_location (last);
8670 if (location == UNKNOWN_LOCATION)
8671 location = fun->function_end_locus;
8672 warning_at (location, OPT_Wreturn_type, "control reaches end of non-void function");
8673 TREE_NO_WARNING (fun->decl) = 1;
8674 break;
8675 }
8676 }
8677 }
8678 return 0;
8679 }
8680
8681 } // anon namespace
8682
8683 gimple_opt_pass *
8684 make_pass_warn_function_return (gcc::context *ctxt)
8685 {
8686 return new pass_warn_function_return (ctxt);
8687 }
8688
8689 /* Walk a gimplified function and warn for functions whose return value is
8690 ignored and attribute((warn_unused_result)) is set. This is done before
8691 inlining, so we don't have to worry about that. */
8692
8693 static void
8694 do_warn_unused_result (gimple_seq seq)
8695 {
8696 tree fdecl, ftype;
8697 gimple_stmt_iterator i;
8698
8699 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
8700 {
8701 gimple *g = gsi_stmt (i);
8702
8703 switch (gimple_code (g))
8704 {
8705 case GIMPLE_BIND:
8706 do_warn_unused_result (gimple_bind_body (as_a <gbind *>(g)));
8707 break;
8708 case GIMPLE_TRY:
8709 do_warn_unused_result (gimple_try_eval (g));
8710 do_warn_unused_result (gimple_try_cleanup (g));
8711 break;
8712 case GIMPLE_CATCH:
8713 do_warn_unused_result (gimple_catch_handler (
8714 as_a <gcatch *> (g)));
8715 break;
8716 case GIMPLE_EH_FILTER:
8717 do_warn_unused_result (gimple_eh_filter_failure (g));
8718 break;
8719
8720 case GIMPLE_CALL:
8721 if (gimple_call_lhs (g))
8722 break;
8723 if (gimple_call_internal_p (g))
8724 break;
8725
8726 /* This is a naked call, as opposed to a GIMPLE_CALL with an
8727 LHS. All calls whose value is ignored should be
8728 represented like this. Look for the attribute. */
8729 fdecl = gimple_call_fndecl (g);
8730 ftype = gimple_call_fntype (g);
8731
8732 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
8733 {
8734 location_t loc = gimple_location (g);
8735
8736 if (fdecl)
8737 warning_at (loc, OPT_Wunused_result,
8738 "ignoring return value of %qD, "
8739 "declared with attribute warn_unused_result",
8740 fdecl);
8741 else
8742 warning_at (loc, OPT_Wunused_result,
8743 "ignoring return value of function "
8744 "declared with attribute warn_unused_result");
8745 }
8746 break;
8747
8748 default:
8749 /* Not a container, not a call, or a call whose value is used. */
8750 break;
8751 }
8752 }
8753 }
8754
8755 namespace {
8756
8757 const pass_data pass_data_warn_unused_result =
8758 {
8759 GIMPLE_PASS, /* type */
8760 "*warn_unused_result", /* name */
8761 OPTGROUP_NONE, /* optinfo_flags */
8762 TV_NONE, /* tv_id */
8763 PROP_gimple_any, /* properties_required */
8764 0, /* properties_provided */
8765 0, /* properties_destroyed */
8766 0, /* todo_flags_start */
8767 0, /* todo_flags_finish */
8768 };
8769
8770 class pass_warn_unused_result : public gimple_opt_pass
8771 {
8772 public:
8773 pass_warn_unused_result (gcc::context *ctxt)
8774 : gimple_opt_pass (pass_data_warn_unused_result, ctxt)
8775 {}
8776
8777 /* opt_pass methods: */
8778 virtual bool gate (function *) { return flag_warn_unused_result; }
8779 virtual unsigned int execute (function *)
8780 {
8781 do_warn_unused_result (gimple_body (current_function_decl));
8782 return 0;
8783 }
8784
8785 }; // class pass_warn_unused_result
8786
8787 } // anon namespace
8788
8789 gimple_opt_pass *
8790 make_pass_warn_unused_result (gcc::context *ctxt)
8791 {
8792 return new pass_warn_unused_result (ctxt);
8793 }
8794
8795 /* IPA passes, compilation of earlier functions or inlining
8796 might have changed some properties, such as marked functions nothrow,
8797 pure, const or noreturn.
8798 Remove redundant edges and basic blocks, and create new ones if necessary.
8799
8800 This pass can't be executed as stand alone pass from pass manager, because
8801 in between inlining and this fixup the verify_flow_info would fail. */
8802
8803 unsigned int
8804 execute_fixup_cfg (void)
8805 {
8806 basic_block bb;
8807 gimple_stmt_iterator gsi;
8808 int todo = 0;
8809 gcov_type count_scale;
8810 edge e;
8811 edge_iterator ei;
8812
8813 count_scale
8814 = GCOV_COMPUTE_SCALE (cgraph_node::get (current_function_decl)->count,
8815 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count);
8816
8817 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
8818 cgraph_node::get (current_function_decl)->count;
8819 EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
8820 apply_scale (EXIT_BLOCK_PTR_FOR_FN (cfun)->count,
8821 count_scale);
8822
8823 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
8824 e->count = apply_scale (e->count, count_scale);
8825
8826 FOR_EACH_BB_FN (bb, cfun)
8827 {
8828 bb->count = apply_scale (bb->count, count_scale);
8829 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
8830 {
8831 gimple *stmt = gsi_stmt (gsi);
8832 tree decl = is_gimple_call (stmt)
8833 ? gimple_call_fndecl (stmt)
8834 : NULL;
8835 if (decl)
8836 {
8837 int flags = gimple_call_flags (stmt);
8838 if (flags & (ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE))
8839 {
8840 if (gimple_purge_dead_abnormal_call_edges (bb))
8841 todo |= TODO_cleanup_cfg;
8842
8843 if (gimple_in_ssa_p (cfun))
8844 {
8845 todo |= TODO_update_ssa | TODO_cleanup_cfg;
8846 update_stmt (stmt);
8847 }
8848 }
8849
8850 if (flags & ECF_NORETURN
8851 && fixup_noreturn_call (stmt))
8852 todo |= TODO_cleanup_cfg;
8853 }
8854
8855 /* Remove stores to variables we marked write-only.
8856 Keep access when store has side effect, i.e. in case when source
8857 is volatile. */
8858 if (gimple_store_p (stmt)
8859 && !gimple_has_side_effects (stmt))
8860 {
8861 tree lhs = get_base_address (gimple_get_lhs (stmt));
8862
8863 if (TREE_CODE (lhs) == VAR_DECL
8864 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
8865 && varpool_node::get (lhs)->writeonly)
8866 {
8867 unlink_stmt_vdef (stmt);
8868 gsi_remove (&gsi, true);
8869 release_defs (stmt);
8870 todo |= TODO_update_ssa | TODO_cleanup_cfg;
8871 continue;
8872 }
8873 }
8874 /* For calls we can simply remove LHS when it is known
8875 to be write-only. */
8876 if (is_gimple_call (stmt)
8877 && gimple_get_lhs (stmt))
8878 {
8879 tree lhs = get_base_address (gimple_get_lhs (stmt));
8880
8881 if (TREE_CODE (lhs) == VAR_DECL
8882 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
8883 && varpool_node::get (lhs)->writeonly)
8884 {
8885 gimple_call_set_lhs (stmt, NULL);
8886 update_stmt (stmt);
8887 todo |= TODO_update_ssa | TODO_cleanup_cfg;
8888 }
8889 }
8890
8891 if (maybe_clean_eh_stmt (stmt)
8892 && gimple_purge_dead_eh_edges (bb))
8893 todo |= TODO_cleanup_cfg;
8894 gsi_next (&gsi);
8895 }
8896
8897 FOR_EACH_EDGE (e, ei, bb->succs)
8898 e->count = apply_scale (e->count, count_scale);
8899
8900 /* If we have a basic block with no successors that does not
8901 end with a control statement or a noreturn call end it with
8902 a call to __builtin_unreachable. This situation can occur
8903 when inlining a noreturn call that does in fact return. */
8904 if (EDGE_COUNT (bb->succs) == 0)
8905 {
8906 gimple *stmt = last_stmt (bb);
8907 if (!stmt
8908 || (!is_ctrl_stmt (stmt)
8909 && (!is_gimple_call (stmt)
8910 || (gimple_call_flags (stmt) & ECF_NORETURN) == 0)))
8911 {
8912 if (stmt && is_gimple_call (stmt))
8913 gimple_call_set_ctrl_altering (stmt, false);
8914 stmt = gimple_build_call
8915 (builtin_decl_implicit (BUILT_IN_UNREACHABLE), 0);
8916 gimple_stmt_iterator gsi = gsi_last_bb (bb);
8917 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
8918 }
8919 }
8920 }
8921 if (count_scale != REG_BR_PROB_BASE)
8922 compute_function_frequency ();
8923
8924 if (current_loops
8925 && (todo & TODO_cleanup_cfg))
8926 loops_state_set (LOOPS_NEED_FIXUP);
8927
8928 return todo;
8929 }
8930
8931 namespace {
8932
8933 const pass_data pass_data_fixup_cfg =
8934 {
8935 GIMPLE_PASS, /* type */
8936 "fixup_cfg", /* name */
8937 OPTGROUP_NONE, /* optinfo_flags */
8938 TV_NONE, /* tv_id */
8939 PROP_cfg, /* properties_required */
8940 0, /* properties_provided */
8941 0, /* properties_destroyed */
8942 0, /* todo_flags_start */
8943 0, /* todo_flags_finish */
8944 };
8945
8946 class pass_fixup_cfg : public gimple_opt_pass
8947 {
8948 public:
8949 pass_fixup_cfg (gcc::context *ctxt)
8950 : gimple_opt_pass (pass_data_fixup_cfg, ctxt)
8951 {}
8952
8953 /* opt_pass methods: */
8954 opt_pass * clone () { return new pass_fixup_cfg (m_ctxt); }
8955 virtual unsigned int execute (function *) { return execute_fixup_cfg (); }
8956
8957 }; // class pass_fixup_cfg
8958
8959 } // anon namespace
8960
8961 gimple_opt_pass *
8962 make_pass_fixup_cfg (gcc::context *ctxt)
8963 {
8964 return new pass_fixup_cfg (ctxt);
8965 }
8966
8967 /* Garbage collection support for edge_def. */
8968
8969 extern void gt_ggc_mx (tree&);
8970 extern void gt_ggc_mx (gimple *&);
8971 extern void gt_ggc_mx (rtx&);
8972 extern void gt_ggc_mx (basic_block&);
8973
8974 static void
8975 gt_ggc_mx (rtx_insn *& x)
8976 {
8977 if (x)
8978 gt_ggc_mx_rtx_def ((void *) x);
8979 }
8980
8981 void
8982 gt_ggc_mx (edge_def *e)
8983 {
8984 tree block = LOCATION_BLOCK (e->goto_locus);
8985 gt_ggc_mx (e->src);
8986 gt_ggc_mx (e->dest);
8987 if (current_ir_type () == IR_GIMPLE)
8988 gt_ggc_mx (e->insns.g);
8989 else
8990 gt_ggc_mx (e->insns.r);
8991 gt_ggc_mx (block);
8992 }
8993
8994 /* PCH support for edge_def. */
8995
8996 extern void gt_pch_nx (tree&);
8997 extern void gt_pch_nx (gimple *&);
8998 extern void gt_pch_nx (rtx&);
8999 extern void gt_pch_nx (basic_block&);
9000
9001 static void
9002 gt_pch_nx (rtx_insn *& x)
9003 {
9004 if (x)
9005 gt_pch_nx_rtx_def ((void *) x);
9006 }
9007
9008 void
9009 gt_pch_nx (edge_def *e)
9010 {
9011 tree block = LOCATION_BLOCK (e->goto_locus);
9012 gt_pch_nx (e->src);
9013 gt_pch_nx (e->dest);
9014 if (current_ir_type () == IR_GIMPLE)
9015 gt_pch_nx (e->insns.g);
9016 else
9017 gt_pch_nx (e->insns.r);
9018 gt_pch_nx (block);
9019 }
9020
9021 void
9022 gt_pch_nx (edge_def *e, gt_pointer_operator op, void *cookie)
9023 {
9024 tree block = LOCATION_BLOCK (e->goto_locus);
9025 op (&(e->src), cookie);
9026 op (&(e->dest), cookie);
9027 if (current_ir_type () == IR_GIMPLE)
9028 op (&(e->insns.g), cookie);
9029 else
9030 op (&(e->insns.r), cookie);
9031 op (&(block), cookie);
9032 }