]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-cfg.c
PR target/84369
[thirdparty/gcc.git] / gcc / tree-cfg.c
1 /* Control flow functions for trees.
2 Copyright (C) 2001-2019 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "cfghooks.h"
30 #include "tree-pass.h"
31 #include "ssa.h"
32 #include "cgraph.h"
33 #include "gimple-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "fold-const.h"
36 #include "trans-mem.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
39 #include "cfganal.h"
40 #include "gimple-fold.h"
41 #include "tree-eh.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-cfg.h"
46 #include "tree-ssa-loop-manip.h"
47 #include "tree-ssa-loop-niter.h"
48 #include "tree-into-ssa.h"
49 #include "tree-dfa.h"
50 #include "tree-ssa.h"
51 #include "except.h"
52 #include "cfgloop.h"
53 #include "tree-ssa-propagate.h"
54 #include "value-prof.h"
55 #include "tree-inline.h"
56 #include "tree-ssa-live.h"
57 #include "omp-general.h"
58 #include "omp-expand.h"
59 #include "tree-cfgcleanup.h"
60 #include "gimplify.h"
61 #include "attribs.h"
62 #include "selftest.h"
63 #include "opts.h"
64 #include "asan.h"
65
66 /* This file contains functions for building the Control Flow Graph (CFG)
67 for a function tree. */
68
69 /* Local declarations. */
70
71 /* Initial capacity for the basic block array. */
72 static const int initial_cfg_capacity = 20;
73
74 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
75 which use a particular edge. The CASE_LABEL_EXPRs are chained together
76 via their CASE_CHAIN field, which we clear after we're done with the
77 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
78
79 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
80 update the case vector in response to edge redirections.
81
82 Right now this table is set up and torn down at key points in the
83 compilation process. It would be nice if we could make the table
84 more persistent. The key is getting notification of changes to
85 the CFG (particularly edge removal, creation and redirection). */
86
87 static hash_map<edge, tree> *edge_to_cases;
88
89 /* If we record edge_to_cases, this bitmap will hold indexes
90 of basic blocks that end in a GIMPLE_SWITCH which we touched
91 due to edge manipulations. */
92
93 static bitmap touched_switch_bbs;
94
95 /* CFG statistics. */
96 struct cfg_stats_d
97 {
98 long num_merged_labels;
99 };
100
101 static struct cfg_stats_d cfg_stats;
102
103 /* Data to pass to replace_block_vars_by_duplicates_1. */
104 struct replace_decls_d
105 {
106 hash_map<tree, tree> *vars_map;
107 tree to_context;
108 };
109
110 /* Hash table to store last discriminator assigned for each locus. */
111 struct locus_discrim_map
112 {
113 int location_line;
114 int discriminator;
115 };
116
117 /* Hashtable helpers. */
118
119 struct locus_discrim_hasher : free_ptr_hash <locus_discrim_map>
120 {
121 static inline hashval_t hash (const locus_discrim_map *);
122 static inline bool equal (const locus_discrim_map *,
123 const locus_discrim_map *);
124 };
125
126 /* Trivial hash function for a location_t. ITEM is a pointer to
127 a hash table entry that maps a location_t to a discriminator. */
128
129 inline hashval_t
130 locus_discrim_hasher::hash (const locus_discrim_map *item)
131 {
132 return item->location_line;
133 }
134
135 /* Equality function for the locus-to-discriminator map. A and B
136 point to the two hash table entries to compare. */
137
138 inline bool
139 locus_discrim_hasher::equal (const locus_discrim_map *a,
140 const locus_discrim_map *b)
141 {
142 return a->location_line == b->location_line;
143 }
144
145 static hash_table<locus_discrim_hasher> *discriminator_per_locus;
146
147 /* Basic blocks and flowgraphs. */
148 static void make_blocks (gimple_seq);
149
150 /* Edges. */
151 static void make_edges (void);
152 static void assign_discriminators (void);
153 static void make_cond_expr_edges (basic_block);
154 static void make_gimple_switch_edges (gswitch *, basic_block);
155 static bool make_goto_expr_edges (basic_block);
156 static void make_gimple_asm_edges (basic_block);
157 static edge gimple_redirect_edge_and_branch (edge, basic_block);
158 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
159
160 /* Various helpers. */
161 static inline bool stmt_starts_bb_p (gimple *, gimple *);
162 static int gimple_verify_flow_info (void);
163 static void gimple_make_forwarder_block (edge);
164 static gimple *first_non_label_stmt (basic_block);
165 static bool verify_gimple_transaction (gtransaction *);
166 static bool call_can_make_abnormal_goto (gimple *);
167
168 /* Flowgraph optimization and cleanup. */
169 static void gimple_merge_blocks (basic_block, basic_block);
170 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
171 static void remove_bb (basic_block);
172 static edge find_taken_edge_computed_goto (basic_block, tree);
173 static edge find_taken_edge_cond_expr (const gcond *, tree);
174
175 void
176 init_empty_tree_cfg_for_function (struct function *fn)
177 {
178 /* Initialize the basic block array. */
179 init_flow (fn);
180 profile_status_for_fn (fn) = PROFILE_ABSENT;
181 n_basic_blocks_for_fn (fn) = NUM_FIXED_BLOCKS;
182 last_basic_block_for_fn (fn) = NUM_FIXED_BLOCKS;
183 vec_alloc (basic_block_info_for_fn (fn), initial_cfg_capacity);
184 vec_safe_grow_cleared (basic_block_info_for_fn (fn),
185 initial_cfg_capacity);
186
187 /* Build a mapping of labels to their associated blocks. */
188 vec_alloc (label_to_block_map_for_fn (fn), initial_cfg_capacity);
189 vec_safe_grow_cleared (label_to_block_map_for_fn (fn),
190 initial_cfg_capacity);
191
192 SET_BASIC_BLOCK_FOR_FN (fn, ENTRY_BLOCK, ENTRY_BLOCK_PTR_FOR_FN (fn));
193 SET_BASIC_BLOCK_FOR_FN (fn, EXIT_BLOCK, EXIT_BLOCK_PTR_FOR_FN (fn));
194
195 ENTRY_BLOCK_PTR_FOR_FN (fn)->next_bb
196 = EXIT_BLOCK_PTR_FOR_FN (fn);
197 EXIT_BLOCK_PTR_FOR_FN (fn)->prev_bb
198 = ENTRY_BLOCK_PTR_FOR_FN (fn);
199 }
200
201 void
202 init_empty_tree_cfg (void)
203 {
204 init_empty_tree_cfg_for_function (cfun);
205 }
206
207 /*---------------------------------------------------------------------------
208 Create basic blocks
209 ---------------------------------------------------------------------------*/
210
211 /* Entry point to the CFG builder for trees. SEQ is the sequence of
212 statements to be added to the flowgraph. */
213
214 static void
215 build_gimple_cfg (gimple_seq seq)
216 {
217 /* Register specific gimple functions. */
218 gimple_register_cfg_hooks ();
219
220 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
221
222 init_empty_tree_cfg ();
223
224 make_blocks (seq);
225
226 /* Make sure there is always at least one block, even if it's empty. */
227 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
228 create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
229
230 /* Adjust the size of the array. */
231 if (basic_block_info_for_fn (cfun)->length ()
232 < (size_t) n_basic_blocks_for_fn (cfun))
233 vec_safe_grow_cleared (basic_block_info_for_fn (cfun),
234 n_basic_blocks_for_fn (cfun));
235
236 /* To speed up statement iterator walks, we first purge dead labels. */
237 cleanup_dead_labels ();
238
239 /* Group case nodes to reduce the number of edges.
240 We do this after cleaning up dead labels because otherwise we miss
241 a lot of obvious case merging opportunities. */
242 group_case_labels ();
243
244 /* Create the edges of the flowgraph. */
245 discriminator_per_locus = new hash_table<locus_discrim_hasher> (13);
246 make_edges ();
247 assign_discriminators ();
248 cleanup_dead_labels ();
249 delete discriminator_per_locus;
250 discriminator_per_locus = NULL;
251 }
252
253 /* Look for ANNOTATE calls with loop annotation kind in BB; if found, remove
254 them and propagate the information to LOOP. We assume that the annotations
255 come immediately before the condition in BB, if any. */
256
257 static void
258 replace_loop_annotate_in_block (basic_block bb, struct loop *loop)
259 {
260 gimple_stmt_iterator gsi = gsi_last_bb (bb);
261 gimple *stmt = gsi_stmt (gsi);
262
263 if (!(stmt && gimple_code (stmt) == GIMPLE_COND))
264 return;
265
266 for (gsi_prev_nondebug (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
267 {
268 stmt = gsi_stmt (gsi);
269 if (gimple_code (stmt) != GIMPLE_CALL)
270 break;
271 if (!gimple_call_internal_p (stmt)
272 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
273 break;
274
275 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
276 {
277 case annot_expr_ivdep_kind:
278 loop->safelen = INT_MAX;
279 break;
280 case annot_expr_unroll_kind:
281 loop->unroll
282 = (unsigned short) tree_to_shwi (gimple_call_arg (stmt, 2));
283 cfun->has_unroll = true;
284 break;
285 case annot_expr_no_vector_kind:
286 loop->dont_vectorize = true;
287 break;
288 case annot_expr_vector_kind:
289 loop->force_vectorize = true;
290 cfun->has_force_vectorize_loops = true;
291 break;
292 case annot_expr_parallel_kind:
293 loop->can_be_parallel = true;
294 loop->safelen = INT_MAX;
295 break;
296 default:
297 gcc_unreachable ();
298 }
299
300 stmt = gimple_build_assign (gimple_call_lhs (stmt),
301 gimple_call_arg (stmt, 0));
302 gsi_replace (&gsi, stmt, true);
303 }
304 }
305
306 /* Look for ANNOTATE calls with loop annotation kind; if found, remove
307 them and propagate the information to the loop. We assume that the
308 annotations come immediately before the condition of the loop. */
309
310 static void
311 replace_loop_annotate (void)
312 {
313 struct loop *loop;
314 basic_block bb;
315 gimple_stmt_iterator gsi;
316 gimple *stmt;
317
318 FOR_EACH_LOOP (loop, 0)
319 {
320 /* First look into the header. */
321 replace_loop_annotate_in_block (loop->header, loop);
322
323 /* Then look into the latch, if any. */
324 if (loop->latch)
325 replace_loop_annotate_in_block (loop->latch, loop);
326 }
327
328 /* Remove IFN_ANNOTATE. Safeguard for the case loop->latch == NULL. */
329 FOR_EACH_BB_FN (bb, cfun)
330 {
331 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
332 {
333 stmt = gsi_stmt (gsi);
334 if (gimple_code (stmt) != GIMPLE_CALL)
335 continue;
336 if (!gimple_call_internal_p (stmt)
337 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
338 continue;
339
340 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
341 {
342 case annot_expr_ivdep_kind:
343 case annot_expr_unroll_kind:
344 case annot_expr_no_vector_kind:
345 case annot_expr_vector_kind:
346 case annot_expr_parallel_kind:
347 break;
348 default:
349 gcc_unreachable ();
350 }
351
352 warning_at (gimple_location (stmt), 0, "ignoring loop annotation");
353 stmt = gimple_build_assign (gimple_call_lhs (stmt),
354 gimple_call_arg (stmt, 0));
355 gsi_replace (&gsi, stmt, true);
356 }
357 }
358 }
359
360 static unsigned int
361 execute_build_cfg (void)
362 {
363 gimple_seq body = gimple_body (current_function_decl);
364
365 build_gimple_cfg (body);
366 gimple_set_body (current_function_decl, NULL);
367 if (dump_file && (dump_flags & TDF_DETAILS))
368 {
369 fprintf (dump_file, "Scope blocks:\n");
370 dump_scope_blocks (dump_file, dump_flags);
371 }
372 cleanup_tree_cfg ();
373 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
374 replace_loop_annotate ();
375 return 0;
376 }
377
378 namespace {
379
380 const pass_data pass_data_build_cfg =
381 {
382 GIMPLE_PASS, /* type */
383 "cfg", /* name */
384 OPTGROUP_NONE, /* optinfo_flags */
385 TV_TREE_CFG, /* tv_id */
386 PROP_gimple_leh, /* properties_required */
387 ( PROP_cfg | PROP_loops ), /* properties_provided */
388 0, /* properties_destroyed */
389 0, /* todo_flags_start */
390 0, /* todo_flags_finish */
391 };
392
393 class pass_build_cfg : public gimple_opt_pass
394 {
395 public:
396 pass_build_cfg (gcc::context *ctxt)
397 : gimple_opt_pass (pass_data_build_cfg, ctxt)
398 {}
399
400 /* opt_pass methods: */
401 virtual unsigned int execute (function *) { return execute_build_cfg (); }
402
403 }; // class pass_build_cfg
404
405 } // anon namespace
406
407 gimple_opt_pass *
408 make_pass_build_cfg (gcc::context *ctxt)
409 {
410 return new pass_build_cfg (ctxt);
411 }
412
413
414 /* Return true if T is a computed goto. */
415
416 bool
417 computed_goto_p (gimple *t)
418 {
419 return (gimple_code (t) == GIMPLE_GOTO
420 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
421 }
422
423 /* Returns true if the sequence of statements STMTS only contains
424 a call to __builtin_unreachable (). */
425
426 bool
427 gimple_seq_unreachable_p (gimple_seq stmts)
428 {
429 if (stmts == NULL
430 /* Return false if -fsanitize=unreachable, we don't want to
431 optimize away those calls, but rather turn them into
432 __ubsan_handle_builtin_unreachable () or __builtin_trap ()
433 later. */
434 || sanitize_flags_p (SANITIZE_UNREACHABLE))
435 return false;
436
437 gimple_stmt_iterator gsi = gsi_last (stmts);
438
439 if (!gimple_call_builtin_p (gsi_stmt (gsi), BUILT_IN_UNREACHABLE))
440 return false;
441
442 for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
443 {
444 gimple *stmt = gsi_stmt (gsi);
445 if (gimple_code (stmt) != GIMPLE_LABEL
446 && !is_gimple_debug (stmt)
447 && !gimple_clobber_p (stmt))
448 return false;
449 }
450 return true;
451 }
452
453 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
454 the other edge points to a bb with just __builtin_unreachable ().
455 I.e. return true for C->M edge in:
456 <bb C>:
457 ...
458 if (something)
459 goto <bb N>;
460 else
461 goto <bb M>;
462 <bb N>:
463 __builtin_unreachable ();
464 <bb M>: */
465
466 bool
467 assert_unreachable_fallthru_edge_p (edge e)
468 {
469 basic_block pred_bb = e->src;
470 gimple *last = last_stmt (pred_bb);
471 if (last && gimple_code (last) == GIMPLE_COND)
472 {
473 basic_block other_bb = EDGE_SUCC (pred_bb, 0)->dest;
474 if (other_bb == e->dest)
475 other_bb = EDGE_SUCC (pred_bb, 1)->dest;
476 if (EDGE_COUNT (other_bb->succs) == 0)
477 return gimple_seq_unreachable_p (bb_seq (other_bb));
478 }
479 return false;
480 }
481
482
483 /* Initialize GF_CALL_CTRL_ALTERING flag, which indicates the call
484 could alter control flow except via eh. We initialize the flag at
485 CFG build time and only ever clear it later. */
486
487 static void
488 gimple_call_initialize_ctrl_altering (gimple *stmt)
489 {
490 int flags = gimple_call_flags (stmt);
491
492 /* A call alters control flow if it can make an abnormal goto. */
493 if (call_can_make_abnormal_goto (stmt)
494 /* A call also alters control flow if it does not return. */
495 || flags & ECF_NORETURN
496 /* TM ending statements have backedges out of the transaction.
497 Return true so we split the basic block containing them.
498 Note that the TM_BUILTIN test is merely an optimization. */
499 || ((flags & ECF_TM_BUILTIN)
500 && is_tm_ending_fndecl (gimple_call_fndecl (stmt)))
501 /* BUILT_IN_RETURN call is same as return statement. */
502 || gimple_call_builtin_p (stmt, BUILT_IN_RETURN)
503 /* IFN_UNIQUE should be the last insn, to make checking for it
504 as cheap as possible. */
505 || (gimple_call_internal_p (stmt)
506 && gimple_call_internal_unique_p (stmt)))
507 gimple_call_set_ctrl_altering (stmt, true);
508 else
509 gimple_call_set_ctrl_altering (stmt, false);
510 }
511
512
513 /* Insert SEQ after BB and build a flowgraph. */
514
515 static basic_block
516 make_blocks_1 (gimple_seq seq, basic_block bb)
517 {
518 gimple_stmt_iterator i = gsi_start (seq);
519 gimple *stmt = NULL;
520 gimple *prev_stmt = NULL;
521 bool start_new_block = true;
522 bool first_stmt_of_seq = true;
523
524 while (!gsi_end_p (i))
525 {
526 /* PREV_STMT should only be set to a debug stmt if the debug
527 stmt is before nondebug stmts. Once stmt reaches a nondebug
528 nonlabel, prev_stmt will be set to it, so that
529 stmt_starts_bb_p will know to start a new block if a label is
530 found. However, if stmt was a label after debug stmts only,
531 keep the label in prev_stmt even if we find further debug
532 stmts, for there may be other labels after them, and they
533 should land in the same block. */
534 if (!prev_stmt || !stmt || !is_gimple_debug (stmt))
535 prev_stmt = stmt;
536 stmt = gsi_stmt (i);
537
538 if (stmt && is_gimple_call (stmt))
539 gimple_call_initialize_ctrl_altering (stmt);
540
541 /* If the statement starts a new basic block or if we have determined
542 in a previous pass that we need to create a new block for STMT, do
543 so now. */
544 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
545 {
546 if (!first_stmt_of_seq)
547 gsi_split_seq_before (&i, &seq);
548 bb = create_basic_block (seq, bb);
549 start_new_block = false;
550 prev_stmt = NULL;
551 }
552
553 /* Now add STMT to BB and create the subgraphs for special statement
554 codes. */
555 gimple_set_bb (stmt, bb);
556
557 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
558 next iteration. */
559 if (stmt_ends_bb_p (stmt))
560 {
561 /* If the stmt can make abnormal goto use a new temporary
562 for the assignment to the LHS. This makes sure the old value
563 of the LHS is available on the abnormal edge. Otherwise
564 we will end up with overlapping life-ranges for abnormal
565 SSA names. */
566 if (gimple_has_lhs (stmt)
567 && stmt_can_make_abnormal_goto (stmt)
568 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
569 {
570 tree lhs = gimple_get_lhs (stmt);
571 tree tmp = create_tmp_var (TREE_TYPE (lhs));
572 gimple *s = gimple_build_assign (lhs, tmp);
573 gimple_set_location (s, gimple_location (stmt));
574 gimple_set_block (s, gimple_block (stmt));
575 gimple_set_lhs (stmt, tmp);
576 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
577 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
578 DECL_GIMPLE_REG_P (tmp) = 1;
579 gsi_insert_after (&i, s, GSI_SAME_STMT);
580 }
581 start_new_block = true;
582 }
583
584 gsi_next (&i);
585 first_stmt_of_seq = false;
586 }
587 return bb;
588 }
589
590 /* Build a flowgraph for the sequence of stmts SEQ. */
591
592 static void
593 make_blocks (gimple_seq seq)
594 {
595 /* Look for debug markers right before labels, and move the debug
596 stmts after the labels. Accepting labels among debug markers
597 adds no value, just complexity; if we wanted to annotate labels
598 with view numbers (so sequencing among markers would matter) or
599 somesuch, we're probably better off still moving the labels, but
600 adding other debug annotations in their original positions or
601 emitting nonbind or bind markers associated with the labels in
602 the original position of the labels.
603
604 Moving labels would probably be simpler, but we can't do that:
605 moving labels assigns label ids to them, and doing so because of
606 debug markers makes for -fcompare-debug and possibly even codegen
607 differences. So, we have to move the debug stmts instead. To
608 that end, we scan SEQ backwards, marking the position of the
609 latest (earliest we find) label, and moving debug stmts that are
610 not separated from it by nondebug nonlabel stmts after the
611 label. */
612 if (MAY_HAVE_DEBUG_MARKER_STMTS)
613 {
614 gimple_stmt_iterator label = gsi_none ();
615
616 for (gimple_stmt_iterator i = gsi_last (seq); !gsi_end_p (i); gsi_prev (&i))
617 {
618 gimple *stmt = gsi_stmt (i);
619
620 /* If this is the first label we encounter (latest in SEQ)
621 before nondebug stmts, record its position. */
622 if (is_a <glabel *> (stmt))
623 {
624 if (gsi_end_p (label))
625 label = i;
626 continue;
627 }
628
629 /* Without a recorded label position to move debug stmts to,
630 there's nothing to do. */
631 if (gsi_end_p (label))
632 continue;
633
634 /* Move the debug stmt at I after LABEL. */
635 if (is_gimple_debug (stmt))
636 {
637 gcc_assert (gimple_debug_nonbind_marker_p (stmt));
638 /* As STMT is removed, I advances to the stmt after
639 STMT, so the gsi_prev in the for "increment"
640 expression gets us to the stmt we're to visit after
641 STMT. LABEL, however, would advance to the moved
642 stmt if we passed it to gsi_move_after, so pass it a
643 copy instead, so as to keep LABEL pointing to the
644 LABEL. */
645 gimple_stmt_iterator copy = label;
646 gsi_move_after (&i, &copy);
647 continue;
648 }
649
650 /* There aren't any (more?) debug stmts before label, so
651 there isn't anything else to move after it. */
652 label = gsi_none ();
653 }
654 }
655
656 make_blocks_1 (seq, ENTRY_BLOCK_PTR_FOR_FN (cfun));
657 }
658
659 /* Create and return a new empty basic block after bb AFTER. */
660
661 static basic_block
662 create_bb (void *h, void *e, basic_block after)
663 {
664 basic_block bb;
665
666 gcc_assert (!e);
667
668 /* Create and initialize a new basic block. Since alloc_block uses
669 GC allocation that clears memory to allocate a basic block, we do
670 not have to clear the newly allocated basic block here. */
671 bb = alloc_block ();
672
673 bb->index = last_basic_block_for_fn (cfun);
674 bb->flags = BB_NEW;
675 set_bb_seq (bb, h ? (gimple_seq) h : NULL);
676
677 /* Add the new block to the linked list of blocks. */
678 link_block (bb, after);
679
680 /* Grow the basic block array if needed. */
681 if ((size_t) last_basic_block_for_fn (cfun)
682 == basic_block_info_for_fn (cfun)->length ())
683 {
684 size_t new_size =
685 (last_basic_block_for_fn (cfun)
686 + (last_basic_block_for_fn (cfun) + 3) / 4);
687 vec_safe_grow_cleared (basic_block_info_for_fn (cfun), new_size);
688 }
689
690 /* Add the newly created block to the array. */
691 SET_BASIC_BLOCK_FOR_FN (cfun, last_basic_block_for_fn (cfun), bb);
692
693 n_basic_blocks_for_fn (cfun)++;
694 last_basic_block_for_fn (cfun)++;
695
696 return bb;
697 }
698
699
700 /*---------------------------------------------------------------------------
701 Edge creation
702 ---------------------------------------------------------------------------*/
703
704 /* If basic block BB has an abnormal edge to a basic block
705 containing IFN_ABNORMAL_DISPATCHER internal call, return
706 that the dispatcher's basic block, otherwise return NULL. */
707
708 basic_block
709 get_abnormal_succ_dispatcher (basic_block bb)
710 {
711 edge e;
712 edge_iterator ei;
713
714 FOR_EACH_EDGE (e, ei, bb->succs)
715 if ((e->flags & (EDGE_ABNORMAL | EDGE_EH)) == EDGE_ABNORMAL)
716 {
717 gimple_stmt_iterator gsi
718 = gsi_start_nondebug_after_labels_bb (e->dest);
719 gimple *g = gsi_stmt (gsi);
720 if (g && gimple_call_internal_p (g, IFN_ABNORMAL_DISPATCHER))
721 return e->dest;
722 }
723 return NULL;
724 }
725
726 /* Helper function for make_edges. Create a basic block with
727 with ABNORMAL_DISPATCHER internal call in it if needed, and
728 create abnormal edges from BBS to it and from it to FOR_BB
729 if COMPUTED_GOTO is false, otherwise factor the computed gotos. */
730
731 static void
732 handle_abnormal_edges (basic_block *dispatcher_bbs,
733 basic_block for_bb, int *bb_to_omp_idx,
734 auto_vec<basic_block> *bbs, bool computed_goto)
735 {
736 basic_block *dispatcher = dispatcher_bbs + (computed_goto ? 1 : 0);
737 unsigned int idx = 0;
738 basic_block bb;
739 bool inner = false;
740
741 if (bb_to_omp_idx)
742 {
743 dispatcher = dispatcher_bbs + 2 * bb_to_omp_idx[for_bb->index];
744 if (bb_to_omp_idx[for_bb->index] != 0)
745 inner = true;
746 }
747
748 /* If the dispatcher has been created already, then there are basic
749 blocks with abnormal edges to it, so just make a new edge to
750 for_bb. */
751 if (*dispatcher == NULL)
752 {
753 /* Check if there are any basic blocks that need to have
754 abnormal edges to this dispatcher. If there are none, return
755 early. */
756 if (bb_to_omp_idx == NULL)
757 {
758 if (bbs->is_empty ())
759 return;
760 }
761 else
762 {
763 FOR_EACH_VEC_ELT (*bbs, idx, bb)
764 if (bb_to_omp_idx[bb->index] == bb_to_omp_idx[for_bb->index])
765 break;
766 if (bb == NULL)
767 return;
768 }
769
770 /* Create the dispatcher bb. */
771 *dispatcher = create_basic_block (NULL, for_bb);
772 if (computed_goto)
773 {
774 /* Factor computed gotos into a common computed goto site. Also
775 record the location of that site so that we can un-factor the
776 gotos after we have converted back to normal form. */
777 gimple_stmt_iterator gsi = gsi_start_bb (*dispatcher);
778
779 /* Create the destination of the factored goto. Each original
780 computed goto will put its desired destination into this
781 variable and jump to the label we create immediately below. */
782 tree var = create_tmp_var (ptr_type_node, "gotovar");
783
784 /* Build a label for the new block which will contain the
785 factored computed goto. */
786 tree factored_label_decl
787 = create_artificial_label (UNKNOWN_LOCATION);
788 gimple *factored_computed_goto_label
789 = gimple_build_label (factored_label_decl);
790 gsi_insert_after (&gsi, factored_computed_goto_label, GSI_NEW_STMT);
791
792 /* Build our new computed goto. */
793 gimple *factored_computed_goto = gimple_build_goto (var);
794 gsi_insert_after (&gsi, factored_computed_goto, GSI_NEW_STMT);
795
796 FOR_EACH_VEC_ELT (*bbs, idx, bb)
797 {
798 if (bb_to_omp_idx
799 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
800 continue;
801
802 gsi = gsi_last_bb (bb);
803 gimple *last = gsi_stmt (gsi);
804
805 gcc_assert (computed_goto_p (last));
806
807 /* Copy the original computed goto's destination into VAR. */
808 gimple *assignment
809 = gimple_build_assign (var, gimple_goto_dest (last));
810 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
811
812 edge e = make_edge (bb, *dispatcher, EDGE_FALLTHRU);
813 e->goto_locus = gimple_location (last);
814 gsi_remove (&gsi, true);
815 }
816 }
817 else
818 {
819 tree arg = inner ? boolean_true_node : boolean_false_node;
820 gimple *g = gimple_build_call_internal (IFN_ABNORMAL_DISPATCHER,
821 1, arg);
822 gimple_stmt_iterator gsi = gsi_after_labels (*dispatcher);
823 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
824
825 /* Create predecessor edges of the dispatcher. */
826 FOR_EACH_VEC_ELT (*bbs, idx, bb)
827 {
828 if (bb_to_omp_idx
829 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
830 continue;
831 make_edge (bb, *dispatcher, EDGE_ABNORMAL);
832 }
833 }
834 }
835
836 make_edge (*dispatcher, for_bb, EDGE_ABNORMAL);
837 }
838
839 /* Creates outgoing edges for BB. Returns 1 when it ends with an
840 computed goto, returns 2 when it ends with a statement that
841 might return to this function via an nonlocal goto, otherwise
842 return 0. Updates *PCUR_REGION with the OMP region this BB is in. */
843
844 static int
845 make_edges_bb (basic_block bb, struct omp_region **pcur_region, int *pomp_index)
846 {
847 gimple *last = last_stmt (bb);
848 bool fallthru = false;
849 int ret = 0;
850
851 if (!last)
852 return ret;
853
854 switch (gimple_code (last))
855 {
856 case GIMPLE_GOTO:
857 if (make_goto_expr_edges (bb))
858 ret = 1;
859 fallthru = false;
860 break;
861 case GIMPLE_RETURN:
862 {
863 edge e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
864 e->goto_locus = gimple_location (last);
865 fallthru = false;
866 }
867 break;
868 case GIMPLE_COND:
869 make_cond_expr_edges (bb);
870 fallthru = false;
871 break;
872 case GIMPLE_SWITCH:
873 make_gimple_switch_edges (as_a <gswitch *> (last), bb);
874 fallthru = false;
875 break;
876 case GIMPLE_RESX:
877 make_eh_edges (last);
878 fallthru = false;
879 break;
880 case GIMPLE_EH_DISPATCH:
881 fallthru = make_eh_dispatch_edges (as_a <geh_dispatch *> (last));
882 break;
883
884 case GIMPLE_CALL:
885 /* If this function receives a nonlocal goto, then we need to
886 make edges from this call site to all the nonlocal goto
887 handlers. */
888 if (stmt_can_make_abnormal_goto (last))
889 ret = 2;
890
891 /* If this statement has reachable exception handlers, then
892 create abnormal edges to them. */
893 make_eh_edges (last);
894
895 /* BUILTIN_RETURN is really a return statement. */
896 if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
897 {
898 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
899 fallthru = false;
900 }
901 /* Some calls are known not to return. */
902 else
903 fallthru = !gimple_call_noreturn_p (last);
904 break;
905
906 case GIMPLE_ASSIGN:
907 /* A GIMPLE_ASSIGN may throw internally and thus be considered
908 control-altering. */
909 if (is_ctrl_altering_stmt (last))
910 make_eh_edges (last);
911 fallthru = true;
912 break;
913
914 case GIMPLE_ASM:
915 make_gimple_asm_edges (bb);
916 fallthru = true;
917 break;
918
919 CASE_GIMPLE_OMP:
920 fallthru = omp_make_gimple_edges (bb, pcur_region, pomp_index);
921 break;
922
923 case GIMPLE_TRANSACTION:
924 {
925 gtransaction *txn = as_a <gtransaction *> (last);
926 tree label1 = gimple_transaction_label_norm (txn);
927 tree label2 = gimple_transaction_label_uninst (txn);
928
929 if (label1)
930 make_edge (bb, label_to_block (cfun, label1), EDGE_FALLTHRU);
931 if (label2)
932 make_edge (bb, label_to_block (cfun, label2),
933 EDGE_TM_UNINSTRUMENTED | (label1 ? 0 : EDGE_FALLTHRU));
934
935 tree label3 = gimple_transaction_label_over (txn);
936 if (gimple_transaction_subcode (txn)
937 & (GTMA_HAVE_ABORT | GTMA_IS_OUTER))
938 make_edge (bb, label_to_block (cfun, label3), EDGE_TM_ABORT);
939
940 fallthru = false;
941 }
942 break;
943
944 default:
945 gcc_assert (!stmt_ends_bb_p (last));
946 fallthru = true;
947 break;
948 }
949
950 if (fallthru)
951 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
952
953 return ret;
954 }
955
956 /* Join all the blocks in the flowgraph. */
957
958 static void
959 make_edges (void)
960 {
961 basic_block bb;
962 struct omp_region *cur_region = NULL;
963 auto_vec<basic_block> ab_edge_goto;
964 auto_vec<basic_block> ab_edge_call;
965 int *bb_to_omp_idx = NULL;
966 int cur_omp_region_idx = 0;
967
968 /* Create an edge from entry to the first block with executable
969 statements in it. */
970 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun),
971 BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS),
972 EDGE_FALLTHRU);
973
974 /* Traverse the basic block array placing edges. */
975 FOR_EACH_BB_FN (bb, cfun)
976 {
977 int mer;
978
979 if (bb_to_omp_idx)
980 bb_to_omp_idx[bb->index] = cur_omp_region_idx;
981
982 mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
983 if (mer == 1)
984 ab_edge_goto.safe_push (bb);
985 else if (mer == 2)
986 ab_edge_call.safe_push (bb);
987
988 if (cur_region && bb_to_omp_idx == NULL)
989 bb_to_omp_idx = XCNEWVEC (int, n_basic_blocks_for_fn (cfun));
990 }
991
992 /* Computed gotos are hell to deal with, especially if there are
993 lots of them with a large number of destinations. So we factor
994 them to a common computed goto location before we build the
995 edge list. After we convert back to normal form, we will un-factor
996 the computed gotos since factoring introduces an unwanted jump.
997 For non-local gotos and abnormal edges from calls to calls that return
998 twice or forced labels, factor the abnormal edges too, by having all
999 abnormal edges from the calls go to a common artificial basic block
1000 with ABNORMAL_DISPATCHER internal call and abnormal edges from that
1001 basic block to all forced labels and calls returning twice.
1002 We do this per-OpenMP structured block, because those regions
1003 are guaranteed to be single entry single exit by the standard,
1004 so it is not allowed to enter or exit such regions abnormally this way,
1005 thus all computed gotos, non-local gotos and setjmp/longjmp calls
1006 must not transfer control across SESE region boundaries. */
1007 if (!ab_edge_goto.is_empty () || !ab_edge_call.is_empty ())
1008 {
1009 gimple_stmt_iterator gsi;
1010 basic_block dispatcher_bb_array[2] = { NULL, NULL };
1011 basic_block *dispatcher_bbs = dispatcher_bb_array;
1012 int count = n_basic_blocks_for_fn (cfun);
1013
1014 if (bb_to_omp_idx)
1015 dispatcher_bbs = XCNEWVEC (basic_block, 2 * count);
1016
1017 FOR_EACH_BB_FN (bb, cfun)
1018 {
1019 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1020 {
1021 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
1022 tree target;
1023
1024 if (!label_stmt)
1025 break;
1026
1027 target = gimple_label_label (label_stmt);
1028
1029 /* Make an edge to every label block that has been marked as a
1030 potential target for a computed goto or a non-local goto. */
1031 if (FORCED_LABEL (target))
1032 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
1033 &ab_edge_goto, true);
1034 if (DECL_NONLOCAL (target))
1035 {
1036 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
1037 &ab_edge_call, false);
1038 break;
1039 }
1040 }
1041
1042 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
1043 gsi_next_nondebug (&gsi);
1044 if (!gsi_end_p (gsi))
1045 {
1046 /* Make an edge to every setjmp-like call. */
1047 gimple *call_stmt = gsi_stmt (gsi);
1048 if (is_gimple_call (call_stmt)
1049 && ((gimple_call_flags (call_stmt) & ECF_RETURNS_TWICE)
1050 || gimple_call_builtin_p (call_stmt,
1051 BUILT_IN_SETJMP_RECEIVER)))
1052 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
1053 &ab_edge_call, false);
1054 }
1055 }
1056
1057 if (bb_to_omp_idx)
1058 XDELETE (dispatcher_bbs);
1059 }
1060
1061 XDELETE (bb_to_omp_idx);
1062
1063 omp_free_regions ();
1064 }
1065
1066 /* Add SEQ after GSI. Start new bb after GSI, and created further bbs as
1067 needed. Returns true if new bbs were created.
1068 Note: This is transitional code, and should not be used for new code. We
1069 should be able to get rid of this by rewriting all target va-arg
1070 gimplification hooks to use an interface gimple_build_cond_value as described
1071 in https://gcc.gnu.org/ml/gcc-patches/2015-02/msg01194.html. */
1072
1073 bool
1074 gimple_find_sub_bbs (gimple_seq seq, gimple_stmt_iterator *gsi)
1075 {
1076 gimple *stmt = gsi_stmt (*gsi);
1077 basic_block bb = gimple_bb (stmt);
1078 basic_block lastbb, afterbb;
1079 int old_num_bbs = n_basic_blocks_for_fn (cfun);
1080 edge e;
1081 lastbb = make_blocks_1 (seq, bb);
1082 if (old_num_bbs == n_basic_blocks_for_fn (cfun))
1083 return false;
1084 e = split_block (bb, stmt);
1085 /* Move e->dest to come after the new basic blocks. */
1086 afterbb = e->dest;
1087 unlink_block (afterbb);
1088 link_block (afterbb, lastbb);
1089 redirect_edge_succ (e, bb->next_bb);
1090 bb = bb->next_bb;
1091 while (bb != afterbb)
1092 {
1093 struct omp_region *cur_region = NULL;
1094 profile_count cnt = profile_count::zero ();
1095 bool all = true;
1096
1097 int cur_omp_region_idx = 0;
1098 int mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
1099 gcc_assert (!mer && !cur_region);
1100 add_bb_to_loop (bb, afterbb->loop_father);
1101
1102 edge e;
1103 edge_iterator ei;
1104 FOR_EACH_EDGE (e, ei, bb->preds)
1105 {
1106 if (e->count ().initialized_p ())
1107 cnt += e->count ();
1108 else
1109 all = false;
1110 }
1111 tree_guess_outgoing_edge_probabilities (bb);
1112 if (all || profile_status_for_fn (cfun) == PROFILE_READ)
1113 bb->count = cnt;
1114
1115 bb = bb->next_bb;
1116 }
1117 return true;
1118 }
1119
1120 /* Find the next available discriminator value for LOCUS. The
1121 discriminator distinguishes among several basic blocks that
1122 share a common locus, allowing for more accurate sample-based
1123 profiling. */
1124
1125 static int
1126 next_discriminator_for_locus (int line)
1127 {
1128 struct locus_discrim_map item;
1129 struct locus_discrim_map **slot;
1130
1131 item.location_line = line;
1132 item.discriminator = 0;
1133 slot = discriminator_per_locus->find_slot_with_hash (&item, line, INSERT);
1134 gcc_assert (slot);
1135 if (*slot == HTAB_EMPTY_ENTRY)
1136 {
1137 *slot = XNEW (struct locus_discrim_map);
1138 gcc_assert (*slot);
1139 (*slot)->location_line = line;
1140 (*slot)->discriminator = 0;
1141 }
1142 (*slot)->discriminator++;
1143 return (*slot)->discriminator;
1144 }
1145
1146 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
1147
1148 static bool
1149 same_line_p (location_t locus1, expanded_location *from, location_t locus2)
1150 {
1151 expanded_location to;
1152
1153 if (locus1 == locus2)
1154 return true;
1155
1156 to = expand_location (locus2);
1157
1158 if (from->line != to.line)
1159 return false;
1160 if (from->file == to.file)
1161 return true;
1162 return (from->file != NULL
1163 && to.file != NULL
1164 && filename_cmp (from->file, to.file) == 0);
1165 }
1166
1167 /* Assign discriminators to each basic block. */
1168
1169 static void
1170 assign_discriminators (void)
1171 {
1172 basic_block bb;
1173
1174 FOR_EACH_BB_FN (bb, cfun)
1175 {
1176 edge e;
1177 edge_iterator ei;
1178 gimple *last = last_stmt (bb);
1179 location_t locus = last ? gimple_location (last) : UNKNOWN_LOCATION;
1180
1181 if (locus == UNKNOWN_LOCATION)
1182 continue;
1183
1184 expanded_location locus_e = expand_location (locus);
1185
1186 FOR_EACH_EDGE (e, ei, bb->succs)
1187 {
1188 gimple *first = first_non_label_stmt (e->dest);
1189 gimple *last = last_stmt (e->dest);
1190 if ((first && same_line_p (locus, &locus_e,
1191 gimple_location (first)))
1192 || (last && same_line_p (locus, &locus_e,
1193 gimple_location (last))))
1194 {
1195 if (e->dest->discriminator != 0 && bb->discriminator == 0)
1196 bb->discriminator
1197 = next_discriminator_for_locus (locus_e.line);
1198 else
1199 e->dest->discriminator
1200 = next_discriminator_for_locus (locus_e.line);
1201 }
1202 }
1203 }
1204 }
1205
1206 /* Create the edges for a GIMPLE_COND starting at block BB. */
1207
1208 static void
1209 make_cond_expr_edges (basic_block bb)
1210 {
1211 gcond *entry = as_a <gcond *> (last_stmt (bb));
1212 gimple *then_stmt, *else_stmt;
1213 basic_block then_bb, else_bb;
1214 tree then_label, else_label;
1215 edge e;
1216
1217 gcc_assert (entry);
1218 gcc_assert (gimple_code (entry) == GIMPLE_COND);
1219
1220 /* Entry basic blocks for each component. */
1221 then_label = gimple_cond_true_label (entry);
1222 else_label = gimple_cond_false_label (entry);
1223 then_bb = label_to_block (cfun, then_label);
1224 else_bb = label_to_block (cfun, else_label);
1225 then_stmt = first_stmt (then_bb);
1226 else_stmt = first_stmt (else_bb);
1227
1228 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1229 e->goto_locus = gimple_location (then_stmt);
1230 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1231 if (e)
1232 e->goto_locus = gimple_location (else_stmt);
1233
1234 /* We do not need the labels anymore. */
1235 gimple_cond_set_true_label (entry, NULL_TREE);
1236 gimple_cond_set_false_label (entry, NULL_TREE);
1237 }
1238
1239
1240 /* Called for each element in the hash table (P) as we delete the
1241 edge to cases hash table.
1242
1243 Clear all the CASE_CHAINs to prevent problems with copying of
1244 SWITCH_EXPRs and structure sharing rules, then free the hash table
1245 element. */
1246
1247 bool
1248 edge_to_cases_cleanup (edge const &, tree const &value, void *)
1249 {
1250 tree t, next;
1251
1252 for (t = value; t; t = next)
1253 {
1254 next = CASE_CHAIN (t);
1255 CASE_CHAIN (t) = NULL;
1256 }
1257
1258 return true;
1259 }
1260
1261 /* Start recording information mapping edges to case labels. */
1262
1263 void
1264 start_recording_case_labels (void)
1265 {
1266 gcc_assert (edge_to_cases == NULL);
1267 edge_to_cases = new hash_map<edge, tree>;
1268 touched_switch_bbs = BITMAP_ALLOC (NULL);
1269 }
1270
1271 /* Return nonzero if we are recording information for case labels. */
1272
1273 static bool
1274 recording_case_labels_p (void)
1275 {
1276 return (edge_to_cases != NULL);
1277 }
1278
1279 /* Stop recording information mapping edges to case labels and
1280 remove any information we have recorded. */
1281 void
1282 end_recording_case_labels (void)
1283 {
1284 bitmap_iterator bi;
1285 unsigned i;
1286 edge_to_cases->traverse<void *, edge_to_cases_cleanup> (NULL);
1287 delete edge_to_cases;
1288 edge_to_cases = NULL;
1289 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
1290 {
1291 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
1292 if (bb)
1293 {
1294 gimple *stmt = last_stmt (bb);
1295 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1296 group_case_labels_stmt (as_a <gswitch *> (stmt));
1297 }
1298 }
1299 BITMAP_FREE (touched_switch_bbs);
1300 }
1301
1302 /* If we are inside a {start,end}_recording_cases block, then return
1303 a chain of CASE_LABEL_EXPRs from T which reference E.
1304
1305 Otherwise return NULL. */
1306
1307 static tree
1308 get_cases_for_edge (edge e, gswitch *t)
1309 {
1310 tree *slot;
1311 size_t i, n;
1312
1313 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
1314 chains available. Return NULL so the caller can detect this case. */
1315 if (!recording_case_labels_p ())
1316 return NULL;
1317
1318 slot = edge_to_cases->get (e);
1319 if (slot)
1320 return *slot;
1321
1322 /* If we did not find E in the hash table, then this must be the first
1323 time we have been queried for information about E & T. Add all the
1324 elements from T to the hash table then perform the query again. */
1325
1326 n = gimple_switch_num_labels (t);
1327 for (i = 0; i < n; i++)
1328 {
1329 tree elt = gimple_switch_label (t, i);
1330 tree lab = CASE_LABEL (elt);
1331 basic_block label_bb = label_to_block (cfun, lab);
1332 edge this_edge = find_edge (e->src, label_bb);
1333
1334 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
1335 a new chain. */
1336 tree &s = edge_to_cases->get_or_insert (this_edge);
1337 CASE_CHAIN (elt) = s;
1338 s = elt;
1339 }
1340
1341 return *edge_to_cases->get (e);
1342 }
1343
1344 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
1345
1346 static void
1347 make_gimple_switch_edges (gswitch *entry, basic_block bb)
1348 {
1349 size_t i, n;
1350
1351 n = gimple_switch_num_labels (entry);
1352
1353 for (i = 0; i < n; ++i)
1354 {
1355 basic_block label_bb = gimple_switch_label_bb (cfun, entry, i);
1356 make_edge (bb, label_bb, 0);
1357 }
1358 }
1359
1360
1361 /* Return the basic block holding label DEST. */
1362
1363 basic_block
1364 label_to_block (struct function *ifun, tree dest)
1365 {
1366 int uid = LABEL_DECL_UID (dest);
1367
1368 /* We would die hard when faced by an undefined label. Emit a label to
1369 the very first basic block. This will hopefully make even the dataflow
1370 and undefined variable warnings quite right. */
1371 if (seen_error () && uid < 0)
1372 {
1373 gimple_stmt_iterator gsi =
1374 gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS));
1375 gimple *stmt;
1376
1377 stmt = gimple_build_label (dest);
1378 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
1379 uid = LABEL_DECL_UID (dest);
1380 }
1381 if (vec_safe_length (ifun->cfg->x_label_to_block_map) <= (unsigned int) uid)
1382 return NULL;
1383 return (*ifun->cfg->x_label_to_block_map)[uid];
1384 }
1385
1386 /* Create edges for a goto statement at block BB. Returns true
1387 if abnormal edges should be created. */
1388
1389 static bool
1390 make_goto_expr_edges (basic_block bb)
1391 {
1392 gimple_stmt_iterator last = gsi_last_bb (bb);
1393 gimple *goto_t = gsi_stmt (last);
1394
1395 /* A simple GOTO creates normal edges. */
1396 if (simple_goto_p (goto_t))
1397 {
1398 tree dest = gimple_goto_dest (goto_t);
1399 basic_block label_bb = label_to_block (cfun, dest);
1400 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1401 e->goto_locus = gimple_location (goto_t);
1402 gsi_remove (&last, true);
1403 return false;
1404 }
1405
1406 /* A computed GOTO creates abnormal edges. */
1407 return true;
1408 }
1409
1410 /* Create edges for an asm statement with labels at block BB. */
1411
1412 static void
1413 make_gimple_asm_edges (basic_block bb)
1414 {
1415 gasm *stmt = as_a <gasm *> (last_stmt (bb));
1416 int i, n = gimple_asm_nlabels (stmt);
1417
1418 for (i = 0; i < n; ++i)
1419 {
1420 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1421 basic_block label_bb = label_to_block (cfun, label);
1422 make_edge (bb, label_bb, 0);
1423 }
1424 }
1425
1426 /*---------------------------------------------------------------------------
1427 Flowgraph analysis
1428 ---------------------------------------------------------------------------*/
1429
1430 /* Cleanup useless labels in basic blocks. This is something we wish
1431 to do early because it allows us to group case labels before creating
1432 the edges for the CFG, and it speeds up block statement iterators in
1433 all passes later on.
1434 We rerun this pass after CFG is created, to get rid of the labels that
1435 are no longer referenced. After then we do not run it any more, since
1436 (almost) no new labels should be created. */
1437
1438 /* A map from basic block index to the leading label of that block. */
1439 static struct label_record
1440 {
1441 /* The label. */
1442 tree label;
1443
1444 /* True if the label is referenced from somewhere. */
1445 bool used;
1446 } *label_for_bb;
1447
1448 /* Given LABEL return the first label in the same basic block. */
1449
1450 static tree
1451 main_block_label (tree label)
1452 {
1453 basic_block bb = label_to_block (cfun, label);
1454 tree main_label = label_for_bb[bb->index].label;
1455
1456 /* label_to_block possibly inserted undefined label into the chain. */
1457 if (!main_label)
1458 {
1459 label_for_bb[bb->index].label = label;
1460 main_label = label;
1461 }
1462
1463 label_for_bb[bb->index].used = true;
1464 return main_label;
1465 }
1466
1467 /* Clean up redundant labels within the exception tree. */
1468
1469 static void
1470 cleanup_dead_labels_eh (void)
1471 {
1472 eh_landing_pad lp;
1473 eh_region r;
1474 tree lab;
1475 int i;
1476
1477 if (cfun->eh == NULL)
1478 return;
1479
1480 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1481 if (lp && lp->post_landing_pad)
1482 {
1483 lab = main_block_label (lp->post_landing_pad);
1484 if (lab != lp->post_landing_pad)
1485 {
1486 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1487 EH_LANDING_PAD_NR (lab) = lp->index;
1488 }
1489 }
1490
1491 FOR_ALL_EH_REGION (r)
1492 switch (r->type)
1493 {
1494 case ERT_CLEANUP:
1495 case ERT_MUST_NOT_THROW:
1496 break;
1497
1498 case ERT_TRY:
1499 {
1500 eh_catch c;
1501 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1502 {
1503 lab = c->label;
1504 if (lab)
1505 c->label = main_block_label (lab);
1506 }
1507 }
1508 break;
1509
1510 case ERT_ALLOWED_EXCEPTIONS:
1511 lab = r->u.allowed.label;
1512 if (lab)
1513 r->u.allowed.label = main_block_label (lab);
1514 break;
1515 }
1516 }
1517
1518
1519 /* Cleanup redundant labels. This is a three-step process:
1520 1) Find the leading label for each block.
1521 2) Redirect all references to labels to the leading labels.
1522 3) Cleanup all useless labels. */
1523
1524 void
1525 cleanup_dead_labels (void)
1526 {
1527 basic_block bb;
1528 label_for_bb = XCNEWVEC (struct label_record, last_basic_block_for_fn (cfun));
1529
1530 /* Find a suitable label for each block. We use the first user-defined
1531 label if there is one, or otherwise just the first label we see. */
1532 FOR_EACH_BB_FN (bb, cfun)
1533 {
1534 gimple_stmt_iterator i;
1535
1536 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1537 {
1538 tree label;
1539 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1540
1541 if (!label_stmt)
1542 break;
1543
1544 label = gimple_label_label (label_stmt);
1545
1546 /* If we have not yet seen a label for the current block,
1547 remember this one and see if there are more labels. */
1548 if (!label_for_bb[bb->index].label)
1549 {
1550 label_for_bb[bb->index].label = label;
1551 continue;
1552 }
1553
1554 /* If we did see a label for the current block already, but it
1555 is an artificially created label, replace it if the current
1556 label is a user defined label. */
1557 if (!DECL_ARTIFICIAL (label)
1558 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1559 {
1560 label_for_bb[bb->index].label = label;
1561 break;
1562 }
1563 }
1564 }
1565
1566 /* Now redirect all jumps/branches to the selected label.
1567 First do so for each block ending in a control statement. */
1568 FOR_EACH_BB_FN (bb, cfun)
1569 {
1570 gimple *stmt = last_stmt (bb);
1571 tree label, new_label;
1572
1573 if (!stmt)
1574 continue;
1575
1576 switch (gimple_code (stmt))
1577 {
1578 case GIMPLE_COND:
1579 {
1580 gcond *cond_stmt = as_a <gcond *> (stmt);
1581 label = gimple_cond_true_label (cond_stmt);
1582 if (label)
1583 {
1584 new_label = main_block_label (label);
1585 if (new_label != label)
1586 gimple_cond_set_true_label (cond_stmt, new_label);
1587 }
1588
1589 label = gimple_cond_false_label (cond_stmt);
1590 if (label)
1591 {
1592 new_label = main_block_label (label);
1593 if (new_label != label)
1594 gimple_cond_set_false_label (cond_stmt, new_label);
1595 }
1596 }
1597 break;
1598
1599 case GIMPLE_SWITCH:
1600 {
1601 gswitch *switch_stmt = as_a <gswitch *> (stmt);
1602 size_t i, n = gimple_switch_num_labels (switch_stmt);
1603
1604 /* Replace all destination labels. */
1605 for (i = 0; i < n; ++i)
1606 {
1607 tree case_label = gimple_switch_label (switch_stmt, i);
1608 label = CASE_LABEL (case_label);
1609 new_label = main_block_label (label);
1610 if (new_label != label)
1611 CASE_LABEL (case_label) = new_label;
1612 }
1613 break;
1614 }
1615
1616 case GIMPLE_ASM:
1617 {
1618 gasm *asm_stmt = as_a <gasm *> (stmt);
1619 int i, n = gimple_asm_nlabels (asm_stmt);
1620
1621 for (i = 0; i < n; ++i)
1622 {
1623 tree cons = gimple_asm_label_op (asm_stmt, i);
1624 tree label = main_block_label (TREE_VALUE (cons));
1625 TREE_VALUE (cons) = label;
1626 }
1627 break;
1628 }
1629
1630 /* We have to handle gotos until they're removed, and we don't
1631 remove them until after we've created the CFG edges. */
1632 case GIMPLE_GOTO:
1633 if (!computed_goto_p (stmt))
1634 {
1635 ggoto *goto_stmt = as_a <ggoto *> (stmt);
1636 label = gimple_goto_dest (goto_stmt);
1637 new_label = main_block_label (label);
1638 if (new_label != label)
1639 gimple_goto_set_dest (goto_stmt, new_label);
1640 }
1641 break;
1642
1643 case GIMPLE_TRANSACTION:
1644 {
1645 gtransaction *txn = as_a <gtransaction *> (stmt);
1646
1647 label = gimple_transaction_label_norm (txn);
1648 if (label)
1649 {
1650 new_label = main_block_label (label);
1651 if (new_label != label)
1652 gimple_transaction_set_label_norm (txn, new_label);
1653 }
1654
1655 label = gimple_transaction_label_uninst (txn);
1656 if (label)
1657 {
1658 new_label = main_block_label (label);
1659 if (new_label != label)
1660 gimple_transaction_set_label_uninst (txn, new_label);
1661 }
1662
1663 label = gimple_transaction_label_over (txn);
1664 if (label)
1665 {
1666 new_label = main_block_label (label);
1667 if (new_label != label)
1668 gimple_transaction_set_label_over (txn, new_label);
1669 }
1670 }
1671 break;
1672
1673 default:
1674 break;
1675 }
1676 }
1677
1678 /* Do the same for the exception region tree labels. */
1679 cleanup_dead_labels_eh ();
1680
1681 /* Finally, purge dead labels. All user-defined labels and labels that
1682 can be the target of non-local gotos and labels which have their
1683 address taken are preserved. */
1684 FOR_EACH_BB_FN (bb, cfun)
1685 {
1686 gimple_stmt_iterator i;
1687 tree label_for_this_bb = label_for_bb[bb->index].label;
1688
1689 if (!label_for_this_bb)
1690 continue;
1691
1692 /* If the main label of the block is unused, we may still remove it. */
1693 if (!label_for_bb[bb->index].used)
1694 label_for_this_bb = NULL;
1695
1696 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1697 {
1698 tree label;
1699 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1700
1701 if (!label_stmt)
1702 break;
1703
1704 label = gimple_label_label (label_stmt);
1705
1706 if (label == label_for_this_bb
1707 || !DECL_ARTIFICIAL (label)
1708 || DECL_NONLOCAL (label)
1709 || FORCED_LABEL (label))
1710 gsi_next (&i);
1711 else
1712 gsi_remove (&i, true);
1713 }
1714 }
1715
1716 free (label_for_bb);
1717 }
1718
1719 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1720 the ones jumping to the same label.
1721 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1722
1723 bool
1724 group_case_labels_stmt (gswitch *stmt)
1725 {
1726 int old_size = gimple_switch_num_labels (stmt);
1727 int i, next_index, new_size;
1728 basic_block default_bb = NULL;
1729
1730 default_bb = gimple_switch_default_bb (cfun, stmt);
1731
1732 /* Look for possible opportunities to merge cases. */
1733 new_size = i = 1;
1734 while (i < old_size)
1735 {
1736 tree base_case, base_high;
1737 basic_block base_bb;
1738
1739 base_case = gimple_switch_label (stmt, i);
1740
1741 gcc_assert (base_case);
1742 base_bb = label_to_block (cfun, CASE_LABEL (base_case));
1743
1744 /* Discard cases that have the same destination as the default case or
1745 whose destiniation blocks have already been removed as unreachable. */
1746 if (base_bb == NULL || base_bb == default_bb)
1747 {
1748 i++;
1749 continue;
1750 }
1751
1752 base_high = CASE_HIGH (base_case)
1753 ? CASE_HIGH (base_case)
1754 : CASE_LOW (base_case);
1755 next_index = i + 1;
1756
1757 /* Try to merge case labels. Break out when we reach the end
1758 of the label vector or when we cannot merge the next case
1759 label with the current one. */
1760 while (next_index < old_size)
1761 {
1762 tree merge_case = gimple_switch_label (stmt, next_index);
1763 basic_block merge_bb = label_to_block (cfun, CASE_LABEL (merge_case));
1764 wide_int bhp1 = wi::to_wide (base_high) + 1;
1765
1766 /* Merge the cases if they jump to the same place,
1767 and their ranges are consecutive. */
1768 if (merge_bb == base_bb
1769 && wi::to_wide (CASE_LOW (merge_case)) == bhp1)
1770 {
1771 base_high = CASE_HIGH (merge_case) ?
1772 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1773 CASE_HIGH (base_case) = base_high;
1774 next_index++;
1775 }
1776 else
1777 break;
1778 }
1779
1780 /* Discard cases that have an unreachable destination block. */
1781 if (EDGE_COUNT (base_bb->succs) == 0
1782 && gimple_seq_unreachable_p (bb_seq (base_bb))
1783 /* Don't optimize this if __builtin_unreachable () is the
1784 implicitly added one by the C++ FE too early, before
1785 -Wreturn-type can be diagnosed. We'll optimize it later
1786 during switchconv pass or any other cfg cleanup. */
1787 && (gimple_in_ssa_p (cfun)
1788 || (LOCATION_LOCUS (gimple_location (last_stmt (base_bb)))
1789 != BUILTINS_LOCATION)))
1790 {
1791 edge base_edge = find_edge (gimple_bb (stmt), base_bb);
1792 if (base_edge != NULL)
1793 remove_edge_and_dominated_blocks (base_edge);
1794 i = next_index;
1795 continue;
1796 }
1797
1798 if (new_size < i)
1799 gimple_switch_set_label (stmt, new_size,
1800 gimple_switch_label (stmt, i));
1801 i = next_index;
1802 new_size++;
1803 }
1804
1805 gcc_assert (new_size <= old_size);
1806
1807 if (new_size < old_size)
1808 gimple_switch_set_num_labels (stmt, new_size);
1809
1810 return new_size < old_size;
1811 }
1812
1813 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1814 and scan the sorted vector of cases. Combine the ones jumping to the
1815 same label. */
1816
1817 bool
1818 group_case_labels (void)
1819 {
1820 basic_block bb;
1821 bool changed = false;
1822
1823 FOR_EACH_BB_FN (bb, cfun)
1824 {
1825 gimple *stmt = last_stmt (bb);
1826 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1827 changed |= group_case_labels_stmt (as_a <gswitch *> (stmt));
1828 }
1829
1830 return changed;
1831 }
1832
1833 /* Checks whether we can merge block B into block A. */
1834
1835 static bool
1836 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1837 {
1838 gimple *stmt;
1839
1840 if (!single_succ_p (a))
1841 return false;
1842
1843 if (single_succ_edge (a)->flags & EDGE_COMPLEX)
1844 return false;
1845
1846 if (single_succ (a) != b)
1847 return false;
1848
1849 if (!single_pred_p (b))
1850 return false;
1851
1852 if (a == ENTRY_BLOCK_PTR_FOR_FN (cfun)
1853 || b == EXIT_BLOCK_PTR_FOR_FN (cfun))
1854 return false;
1855
1856 /* If A ends by a statement causing exceptions or something similar, we
1857 cannot merge the blocks. */
1858 stmt = last_stmt (a);
1859 if (stmt && stmt_ends_bb_p (stmt))
1860 return false;
1861
1862 /* Do not allow a block with only a non-local label to be merged. */
1863 if (stmt)
1864 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
1865 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
1866 return false;
1867
1868 /* Examine the labels at the beginning of B. */
1869 for (gimple_stmt_iterator gsi = gsi_start_bb (b); !gsi_end_p (gsi);
1870 gsi_next (&gsi))
1871 {
1872 tree lab;
1873 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
1874 if (!label_stmt)
1875 break;
1876 lab = gimple_label_label (label_stmt);
1877
1878 /* Do not remove user forced labels or for -O0 any user labels. */
1879 if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
1880 return false;
1881 }
1882
1883 /* Protect simple loop latches. We only want to avoid merging
1884 the latch with the loop header or with a block in another
1885 loop in this case. */
1886 if (current_loops
1887 && b->loop_father->latch == b
1888 && loops_state_satisfies_p (LOOPS_HAVE_SIMPLE_LATCHES)
1889 && (b->loop_father->header == a
1890 || b->loop_father != a->loop_father))
1891 return false;
1892
1893 /* It must be possible to eliminate all phi nodes in B. If ssa form
1894 is not up-to-date and a name-mapping is registered, we cannot eliminate
1895 any phis. Symbols marked for renaming are never a problem though. */
1896 for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);
1897 gsi_next (&gsi))
1898 {
1899 gphi *phi = gsi.phi ();
1900 /* Technically only new names matter. */
1901 if (name_registered_for_update_p (PHI_RESULT (phi)))
1902 return false;
1903 }
1904
1905 /* When not optimizing, don't merge if we'd lose goto_locus. */
1906 if (!optimize
1907 && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
1908 {
1909 location_t goto_locus = single_succ_edge (a)->goto_locus;
1910 gimple_stmt_iterator prev, next;
1911 prev = gsi_last_nondebug_bb (a);
1912 next = gsi_after_labels (b);
1913 if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
1914 gsi_next_nondebug (&next);
1915 if ((gsi_end_p (prev)
1916 || gimple_location (gsi_stmt (prev)) != goto_locus)
1917 && (gsi_end_p (next)
1918 || gimple_location (gsi_stmt (next)) != goto_locus))
1919 return false;
1920 }
1921
1922 return true;
1923 }
1924
1925 /* Replaces all uses of NAME by VAL. */
1926
1927 void
1928 replace_uses_by (tree name, tree val)
1929 {
1930 imm_use_iterator imm_iter;
1931 use_operand_p use;
1932 gimple *stmt;
1933 edge e;
1934
1935 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1936 {
1937 /* Mark the block if we change the last stmt in it. */
1938 if (cfgcleanup_altered_bbs
1939 && stmt_ends_bb_p (stmt))
1940 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1941
1942 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1943 {
1944 replace_exp (use, val);
1945
1946 if (gimple_code (stmt) == GIMPLE_PHI)
1947 {
1948 e = gimple_phi_arg_edge (as_a <gphi *> (stmt),
1949 PHI_ARG_INDEX_FROM_USE (use));
1950 if (e->flags & EDGE_ABNORMAL
1951 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val))
1952 {
1953 /* This can only occur for virtual operands, since
1954 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1955 would prevent replacement. */
1956 gcc_checking_assert (virtual_operand_p (name));
1957 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1958 }
1959 }
1960 }
1961
1962 if (gimple_code (stmt) != GIMPLE_PHI)
1963 {
1964 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1965 gimple *orig_stmt = stmt;
1966 size_t i;
1967
1968 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
1969 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
1970 only change sth from non-invariant to invariant, and only
1971 when propagating constants. */
1972 if (is_gimple_min_invariant (val))
1973 for (i = 0; i < gimple_num_ops (stmt); i++)
1974 {
1975 tree op = gimple_op (stmt, i);
1976 /* Operands may be empty here. For example, the labels
1977 of a GIMPLE_COND are nulled out following the creation
1978 of the corresponding CFG edges. */
1979 if (op && TREE_CODE (op) == ADDR_EXPR)
1980 recompute_tree_invariant_for_addr_expr (op);
1981 }
1982
1983 if (fold_stmt (&gsi))
1984 stmt = gsi_stmt (gsi);
1985
1986 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
1987 gimple_purge_dead_eh_edges (gimple_bb (stmt));
1988
1989 update_stmt (stmt);
1990 }
1991 }
1992
1993 gcc_checking_assert (has_zero_uses (name));
1994
1995 /* Also update the trees stored in loop structures. */
1996 if (current_loops)
1997 {
1998 struct loop *loop;
1999
2000 FOR_EACH_LOOP (loop, 0)
2001 {
2002 substitute_in_loop_info (loop, name, val);
2003 }
2004 }
2005 }
2006
2007 /* Merge block B into block A. */
2008
2009 static void
2010 gimple_merge_blocks (basic_block a, basic_block b)
2011 {
2012 gimple_stmt_iterator last, gsi;
2013 gphi_iterator psi;
2014
2015 if (dump_file)
2016 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
2017
2018 /* Remove all single-valued PHI nodes from block B of the form
2019 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
2020 gsi = gsi_last_bb (a);
2021 for (psi = gsi_start_phis (b); !gsi_end_p (psi); )
2022 {
2023 gimple *phi = gsi_stmt (psi);
2024 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
2025 gimple *copy;
2026 bool may_replace_uses = (virtual_operand_p (def)
2027 || may_propagate_copy (def, use));
2028
2029 /* In case we maintain loop closed ssa form, do not propagate arguments
2030 of loop exit phi nodes. */
2031 if (current_loops
2032 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
2033 && !virtual_operand_p (def)
2034 && TREE_CODE (use) == SSA_NAME
2035 && a->loop_father != b->loop_father)
2036 may_replace_uses = false;
2037
2038 if (!may_replace_uses)
2039 {
2040 gcc_assert (!virtual_operand_p (def));
2041
2042 /* Note that just emitting the copies is fine -- there is no problem
2043 with ordering of phi nodes. This is because A is the single
2044 predecessor of B, therefore results of the phi nodes cannot
2045 appear as arguments of the phi nodes. */
2046 copy = gimple_build_assign (def, use);
2047 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
2048 remove_phi_node (&psi, false);
2049 }
2050 else
2051 {
2052 /* If we deal with a PHI for virtual operands, we can simply
2053 propagate these without fussing with folding or updating
2054 the stmt. */
2055 if (virtual_operand_p (def))
2056 {
2057 imm_use_iterator iter;
2058 use_operand_p use_p;
2059 gimple *stmt;
2060
2061 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
2062 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
2063 SET_USE (use_p, use);
2064
2065 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
2066 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
2067 }
2068 else
2069 replace_uses_by (def, use);
2070
2071 remove_phi_node (&psi, true);
2072 }
2073 }
2074
2075 /* Ensure that B follows A. */
2076 move_block_after (b, a);
2077
2078 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
2079 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
2080
2081 /* Remove labels from B and set gimple_bb to A for other statements. */
2082 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
2083 {
2084 gimple *stmt = gsi_stmt (gsi);
2085 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2086 {
2087 tree label = gimple_label_label (label_stmt);
2088 int lp_nr;
2089
2090 gsi_remove (&gsi, false);
2091
2092 /* Now that we can thread computed gotos, we might have
2093 a situation where we have a forced label in block B
2094 However, the label at the start of block B might still be
2095 used in other ways (think about the runtime checking for
2096 Fortran assigned gotos). So we cannot just delete the
2097 label. Instead we move the label to the start of block A. */
2098 if (FORCED_LABEL (label))
2099 {
2100 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
2101 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
2102 }
2103 /* Other user labels keep around in a form of a debug stmt. */
2104 else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_BIND_STMTS)
2105 {
2106 gimple *dbg = gimple_build_debug_bind (label,
2107 integer_zero_node,
2108 stmt);
2109 gimple_debug_bind_reset_value (dbg);
2110 gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
2111 }
2112
2113 lp_nr = EH_LANDING_PAD_NR (label);
2114 if (lp_nr)
2115 {
2116 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
2117 lp->post_landing_pad = NULL;
2118 }
2119 }
2120 else
2121 {
2122 gimple_set_bb (stmt, a);
2123 gsi_next (&gsi);
2124 }
2125 }
2126
2127 /* When merging two BBs, if their counts are different, the larger count
2128 is selected as the new bb count. This is to handle inconsistent
2129 profiles. */
2130 if (a->loop_father == b->loop_father)
2131 {
2132 a->count = a->count.merge (b->count);
2133 }
2134
2135 /* Merge the sequences. */
2136 last = gsi_last_bb (a);
2137 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
2138 set_bb_seq (b, NULL);
2139
2140 if (cfgcleanup_altered_bbs)
2141 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
2142 }
2143
2144
2145 /* Return the one of two successors of BB that is not reachable by a
2146 complex edge, if there is one. Else, return BB. We use
2147 this in optimizations that use post-dominators for their heuristics,
2148 to catch the cases in C++ where function calls are involved. */
2149
2150 basic_block
2151 single_noncomplex_succ (basic_block bb)
2152 {
2153 edge e0, e1;
2154 if (EDGE_COUNT (bb->succs) != 2)
2155 return bb;
2156
2157 e0 = EDGE_SUCC (bb, 0);
2158 e1 = EDGE_SUCC (bb, 1);
2159 if (e0->flags & EDGE_COMPLEX)
2160 return e1->dest;
2161 if (e1->flags & EDGE_COMPLEX)
2162 return e0->dest;
2163
2164 return bb;
2165 }
2166
2167 /* T is CALL_EXPR. Set current_function_calls_* flags. */
2168
2169 void
2170 notice_special_calls (gcall *call)
2171 {
2172 int flags = gimple_call_flags (call);
2173
2174 if (flags & ECF_MAY_BE_ALLOCA)
2175 cfun->calls_alloca = true;
2176 if (flags & ECF_RETURNS_TWICE)
2177 cfun->calls_setjmp = true;
2178 }
2179
2180
2181 /* Clear flags set by notice_special_calls. Used by dead code removal
2182 to update the flags. */
2183
2184 void
2185 clear_special_calls (void)
2186 {
2187 cfun->calls_alloca = false;
2188 cfun->calls_setjmp = false;
2189 }
2190
2191 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
2192
2193 static void
2194 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
2195 {
2196 /* Since this block is no longer reachable, we can just delete all
2197 of its PHI nodes. */
2198 remove_phi_nodes (bb);
2199
2200 /* Remove edges to BB's successors. */
2201 while (EDGE_COUNT (bb->succs) > 0)
2202 remove_edge (EDGE_SUCC (bb, 0));
2203 }
2204
2205
2206 /* Remove statements of basic block BB. */
2207
2208 static void
2209 remove_bb (basic_block bb)
2210 {
2211 gimple_stmt_iterator i;
2212
2213 if (dump_file)
2214 {
2215 fprintf (dump_file, "Removing basic block %d\n", bb->index);
2216 if (dump_flags & TDF_DETAILS)
2217 {
2218 dump_bb (dump_file, bb, 0, TDF_BLOCKS);
2219 fprintf (dump_file, "\n");
2220 }
2221 }
2222
2223 if (current_loops)
2224 {
2225 struct loop *loop = bb->loop_father;
2226
2227 /* If a loop gets removed, clean up the information associated
2228 with it. */
2229 if (loop->latch == bb
2230 || loop->header == bb)
2231 free_numbers_of_iterations_estimates (loop);
2232 }
2233
2234 /* Remove all the instructions in the block. */
2235 if (bb_seq (bb) != NULL)
2236 {
2237 /* Walk backwards so as to get a chance to substitute all
2238 released DEFs into debug stmts. See
2239 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
2240 details. */
2241 for (i = gsi_last_bb (bb); !gsi_end_p (i);)
2242 {
2243 gimple *stmt = gsi_stmt (i);
2244 glabel *label_stmt = dyn_cast <glabel *> (stmt);
2245 if (label_stmt
2246 && (FORCED_LABEL (gimple_label_label (label_stmt))
2247 || DECL_NONLOCAL (gimple_label_label (label_stmt))))
2248 {
2249 basic_block new_bb;
2250 gimple_stmt_iterator new_gsi;
2251
2252 /* A non-reachable non-local label may still be referenced.
2253 But it no longer needs to carry the extra semantics of
2254 non-locality. */
2255 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
2256 {
2257 DECL_NONLOCAL (gimple_label_label (label_stmt)) = 0;
2258 FORCED_LABEL (gimple_label_label (label_stmt)) = 1;
2259 }
2260
2261 new_bb = bb->prev_bb;
2262 /* Don't move any labels into ENTRY block. */
2263 if (new_bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
2264 {
2265 new_bb = single_succ (new_bb);
2266 gcc_assert (new_bb != bb);
2267 }
2268 new_gsi = gsi_start_bb (new_bb);
2269 gsi_remove (&i, false);
2270 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
2271 }
2272 else
2273 {
2274 /* Release SSA definitions. */
2275 release_defs (stmt);
2276 gsi_remove (&i, true);
2277 }
2278
2279 if (gsi_end_p (i))
2280 i = gsi_last_bb (bb);
2281 else
2282 gsi_prev (&i);
2283 }
2284 }
2285
2286 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2287 bb->il.gimple.seq = NULL;
2288 bb->il.gimple.phi_nodes = NULL;
2289 }
2290
2291
2292 /* Given a basic block BB and a value VAL for use in the final statement
2293 of the block (if a GIMPLE_COND, GIMPLE_SWITCH, or computed goto), return
2294 the edge that will be taken out of the block.
2295 If VAL is NULL_TREE, then the current value of the final statement's
2296 predicate or index is used.
2297 If the value does not match a unique edge, NULL is returned. */
2298
2299 edge
2300 find_taken_edge (basic_block bb, tree val)
2301 {
2302 gimple *stmt;
2303
2304 stmt = last_stmt (bb);
2305
2306 /* Handle ENTRY and EXIT. */
2307 if (!stmt)
2308 return NULL;
2309
2310 if (gimple_code (stmt) == GIMPLE_COND)
2311 return find_taken_edge_cond_expr (as_a <gcond *> (stmt), val);
2312
2313 if (gimple_code (stmt) == GIMPLE_SWITCH)
2314 return find_taken_edge_switch_expr (as_a <gswitch *> (stmt), val);
2315
2316 if (computed_goto_p (stmt))
2317 {
2318 /* Only optimize if the argument is a label, if the argument is
2319 not a label then we cannot construct a proper CFG.
2320
2321 It may be the case that we only need to allow the LABEL_REF to
2322 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2323 appear inside a LABEL_EXPR just to be safe. */
2324 if (val
2325 && (TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
2326 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
2327 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
2328 }
2329
2330 /* Otherwise we only know the taken successor edge if it's unique. */
2331 return single_succ_p (bb) ? single_succ_edge (bb) : NULL;
2332 }
2333
2334 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2335 statement, determine which of the outgoing edges will be taken out of the
2336 block. Return NULL if either edge may be taken. */
2337
2338 static edge
2339 find_taken_edge_computed_goto (basic_block bb, tree val)
2340 {
2341 basic_block dest;
2342 edge e = NULL;
2343
2344 dest = label_to_block (cfun, val);
2345 if (dest)
2346 e = find_edge (bb, dest);
2347
2348 /* It's possible for find_edge to return NULL here on invalid code
2349 that abuses the labels-as-values extension (e.g. code that attempts to
2350 jump *between* functions via stored labels-as-values; PR 84136).
2351 If so, then we simply return that NULL for the edge.
2352 We don't currently have a way of detecting such invalid code, so we
2353 can't assert that it was the case when a NULL edge occurs here. */
2354
2355 return e;
2356 }
2357
2358 /* Given COND_STMT and a constant value VAL for use as the predicate,
2359 determine which of the two edges will be taken out of
2360 the statement's block. Return NULL if either edge may be taken.
2361 If VAL is NULL_TREE, then the current value of COND_STMT's predicate
2362 is used. */
2363
2364 static edge
2365 find_taken_edge_cond_expr (const gcond *cond_stmt, tree val)
2366 {
2367 edge true_edge, false_edge;
2368
2369 if (val == NULL_TREE)
2370 {
2371 /* Use the current value of the predicate. */
2372 if (gimple_cond_true_p (cond_stmt))
2373 val = integer_one_node;
2374 else if (gimple_cond_false_p (cond_stmt))
2375 val = integer_zero_node;
2376 else
2377 return NULL;
2378 }
2379 else if (TREE_CODE (val) != INTEGER_CST)
2380 return NULL;
2381
2382 extract_true_false_edges_from_block (gimple_bb (cond_stmt),
2383 &true_edge, &false_edge);
2384
2385 return (integer_zerop (val) ? false_edge : true_edge);
2386 }
2387
2388 /* Given SWITCH_STMT and an INTEGER_CST VAL for use as the index, determine
2389 which edge will be taken out of the statement's block. Return NULL if any
2390 edge may be taken.
2391 If VAL is NULL_TREE, then the current value of SWITCH_STMT's index
2392 is used. */
2393
2394 edge
2395 find_taken_edge_switch_expr (const gswitch *switch_stmt, tree val)
2396 {
2397 basic_block dest_bb;
2398 edge e;
2399 tree taken_case;
2400
2401 if (gimple_switch_num_labels (switch_stmt) == 1)
2402 taken_case = gimple_switch_default_label (switch_stmt);
2403 else
2404 {
2405 if (val == NULL_TREE)
2406 val = gimple_switch_index (switch_stmt);
2407 if (TREE_CODE (val) != INTEGER_CST)
2408 return NULL;
2409 else
2410 taken_case = find_case_label_for_value (switch_stmt, val);
2411 }
2412 dest_bb = label_to_block (cfun, CASE_LABEL (taken_case));
2413
2414 e = find_edge (gimple_bb (switch_stmt), dest_bb);
2415 gcc_assert (e);
2416 return e;
2417 }
2418
2419
2420 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2421 We can make optimal use here of the fact that the case labels are
2422 sorted: We can do a binary search for a case matching VAL. */
2423
2424 tree
2425 find_case_label_for_value (const gswitch *switch_stmt, tree val)
2426 {
2427 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2428 tree default_case = gimple_switch_default_label (switch_stmt);
2429
2430 for (low = 0, high = n; high - low > 1; )
2431 {
2432 size_t i = (high + low) / 2;
2433 tree t = gimple_switch_label (switch_stmt, i);
2434 int cmp;
2435
2436 /* Cache the result of comparing CASE_LOW and val. */
2437 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2438
2439 if (cmp > 0)
2440 high = i;
2441 else
2442 low = i;
2443
2444 if (CASE_HIGH (t) == NULL)
2445 {
2446 /* A singe-valued case label. */
2447 if (cmp == 0)
2448 return t;
2449 }
2450 else
2451 {
2452 /* A case range. We can only handle integer ranges. */
2453 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2454 return t;
2455 }
2456 }
2457
2458 return default_case;
2459 }
2460
2461
2462 /* Dump a basic block on stderr. */
2463
2464 void
2465 gimple_debug_bb (basic_block bb)
2466 {
2467 dump_bb (stderr, bb, 0, TDF_VOPS|TDF_MEMSYMS|TDF_BLOCKS);
2468 }
2469
2470
2471 /* Dump basic block with index N on stderr. */
2472
2473 basic_block
2474 gimple_debug_bb_n (int n)
2475 {
2476 gimple_debug_bb (BASIC_BLOCK_FOR_FN (cfun, n));
2477 return BASIC_BLOCK_FOR_FN (cfun, n);
2478 }
2479
2480
2481 /* Dump the CFG on stderr.
2482
2483 FLAGS are the same used by the tree dumping functions
2484 (see TDF_* in dumpfile.h). */
2485
2486 void
2487 gimple_debug_cfg (dump_flags_t flags)
2488 {
2489 gimple_dump_cfg (stderr, flags);
2490 }
2491
2492
2493 /* Dump the program showing basic block boundaries on the given FILE.
2494
2495 FLAGS are the same used by the tree dumping functions (see TDF_* in
2496 tree.h). */
2497
2498 void
2499 gimple_dump_cfg (FILE *file, dump_flags_t flags)
2500 {
2501 if (flags & TDF_DETAILS)
2502 {
2503 dump_function_header (file, current_function_decl, flags);
2504 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2505 n_basic_blocks_for_fn (cfun), n_edges_for_fn (cfun),
2506 last_basic_block_for_fn (cfun));
2507
2508 brief_dump_cfg (file, flags);
2509 fprintf (file, "\n");
2510 }
2511
2512 if (flags & TDF_STATS)
2513 dump_cfg_stats (file);
2514
2515 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2516 }
2517
2518
2519 /* Dump CFG statistics on FILE. */
2520
2521 void
2522 dump_cfg_stats (FILE *file)
2523 {
2524 static long max_num_merged_labels = 0;
2525 unsigned long size, total = 0;
2526 long num_edges;
2527 basic_block bb;
2528 const char * const fmt_str = "%-30s%-13s%12s\n";
2529 const char * const fmt_str_1 = "%-30s%13d" PRsa (11) "\n";
2530 const char * const fmt_str_2 = "%-30s%13ld" PRsa (11) "\n";
2531 const char * const fmt_str_3 = "%-43s" PRsa (11) "\n";
2532 const char *funcname = current_function_name ();
2533
2534 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2535
2536 fprintf (file, "---------------------------------------------------------\n");
2537 fprintf (file, fmt_str, "", " Number of ", "Memory");
2538 fprintf (file, fmt_str, "", " instances ", "used ");
2539 fprintf (file, "---------------------------------------------------------\n");
2540
2541 size = n_basic_blocks_for_fn (cfun) * sizeof (struct basic_block_def);
2542 total += size;
2543 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks_for_fn (cfun),
2544 SIZE_AMOUNT (size));
2545
2546 num_edges = 0;
2547 FOR_EACH_BB_FN (bb, cfun)
2548 num_edges += EDGE_COUNT (bb->succs);
2549 size = num_edges * sizeof (struct edge_def);
2550 total += size;
2551 fprintf (file, fmt_str_2, "Edges", num_edges, SIZE_AMOUNT (size));
2552
2553 fprintf (file, "---------------------------------------------------------\n");
2554 fprintf (file, fmt_str_3, "Total memory used by CFG data",
2555 SIZE_AMOUNT (total));
2556 fprintf (file, "---------------------------------------------------------\n");
2557 fprintf (file, "\n");
2558
2559 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2560 max_num_merged_labels = cfg_stats.num_merged_labels;
2561
2562 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2563 cfg_stats.num_merged_labels, max_num_merged_labels);
2564
2565 fprintf (file, "\n");
2566 }
2567
2568
2569 /* Dump CFG statistics on stderr. Keep extern so that it's always
2570 linked in the final executable. */
2571
2572 DEBUG_FUNCTION void
2573 debug_cfg_stats (void)
2574 {
2575 dump_cfg_stats (stderr);
2576 }
2577
2578 /*---------------------------------------------------------------------------
2579 Miscellaneous helpers
2580 ---------------------------------------------------------------------------*/
2581
2582 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2583 flow. Transfers of control flow associated with EH are excluded. */
2584
2585 static bool
2586 call_can_make_abnormal_goto (gimple *t)
2587 {
2588 /* If the function has no non-local labels, then a call cannot make an
2589 abnormal transfer of control. */
2590 if (!cfun->has_nonlocal_label
2591 && !cfun->calls_setjmp)
2592 return false;
2593
2594 /* Likewise if the call has no side effects. */
2595 if (!gimple_has_side_effects (t))
2596 return false;
2597
2598 /* Likewise if the called function is leaf. */
2599 if (gimple_call_flags (t) & ECF_LEAF)
2600 return false;
2601
2602 return true;
2603 }
2604
2605
2606 /* Return true if T can make an abnormal transfer of control flow.
2607 Transfers of control flow associated with EH are excluded. */
2608
2609 bool
2610 stmt_can_make_abnormal_goto (gimple *t)
2611 {
2612 if (computed_goto_p (t))
2613 return true;
2614 if (is_gimple_call (t))
2615 return call_can_make_abnormal_goto (t);
2616 return false;
2617 }
2618
2619
2620 /* Return true if T represents a stmt that always transfers control. */
2621
2622 bool
2623 is_ctrl_stmt (gimple *t)
2624 {
2625 switch (gimple_code (t))
2626 {
2627 case GIMPLE_COND:
2628 case GIMPLE_SWITCH:
2629 case GIMPLE_GOTO:
2630 case GIMPLE_RETURN:
2631 case GIMPLE_RESX:
2632 return true;
2633 default:
2634 return false;
2635 }
2636 }
2637
2638
2639 /* Return true if T is a statement that may alter the flow of control
2640 (e.g., a call to a non-returning function). */
2641
2642 bool
2643 is_ctrl_altering_stmt (gimple *t)
2644 {
2645 gcc_assert (t);
2646
2647 switch (gimple_code (t))
2648 {
2649 case GIMPLE_CALL:
2650 /* Per stmt call flag indicates whether the call could alter
2651 controlflow. */
2652 if (gimple_call_ctrl_altering_p (t))
2653 return true;
2654 break;
2655
2656 case GIMPLE_EH_DISPATCH:
2657 /* EH_DISPATCH branches to the individual catch handlers at
2658 this level of a try or allowed-exceptions region. It can
2659 fallthru to the next statement as well. */
2660 return true;
2661
2662 case GIMPLE_ASM:
2663 if (gimple_asm_nlabels (as_a <gasm *> (t)) > 0)
2664 return true;
2665 break;
2666
2667 CASE_GIMPLE_OMP:
2668 /* OpenMP directives alter control flow. */
2669 return true;
2670
2671 case GIMPLE_TRANSACTION:
2672 /* A transaction start alters control flow. */
2673 return true;
2674
2675 default:
2676 break;
2677 }
2678
2679 /* If a statement can throw, it alters control flow. */
2680 return stmt_can_throw_internal (cfun, t);
2681 }
2682
2683
2684 /* Return true if T is a simple local goto. */
2685
2686 bool
2687 simple_goto_p (gimple *t)
2688 {
2689 return (gimple_code (t) == GIMPLE_GOTO
2690 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2691 }
2692
2693
2694 /* Return true if STMT should start a new basic block. PREV_STMT is
2695 the statement preceding STMT. It is used when STMT is a label or a
2696 case label. Labels should only start a new basic block if their
2697 previous statement wasn't a label. Otherwise, sequence of labels
2698 would generate unnecessary basic blocks that only contain a single
2699 label. */
2700
2701 static inline bool
2702 stmt_starts_bb_p (gimple *stmt, gimple *prev_stmt)
2703 {
2704 if (stmt == NULL)
2705 return false;
2706
2707 /* PREV_STMT is only set to a debug stmt if the debug stmt is before
2708 any nondebug stmts in the block. We don't want to start another
2709 block in this case: the debug stmt will already have started the
2710 one STMT would start if we weren't outputting debug stmts. */
2711 if (prev_stmt && is_gimple_debug (prev_stmt))
2712 return false;
2713
2714 /* Labels start a new basic block only if the preceding statement
2715 wasn't a label of the same type. This prevents the creation of
2716 consecutive blocks that have nothing but a single label. */
2717 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2718 {
2719 /* Nonlocal and computed GOTO targets always start a new block. */
2720 if (DECL_NONLOCAL (gimple_label_label (label_stmt))
2721 || FORCED_LABEL (gimple_label_label (label_stmt)))
2722 return true;
2723
2724 if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
2725 {
2726 if (DECL_NONLOCAL (gimple_label_label (
2727 as_a <glabel *> (prev_stmt))))
2728 return true;
2729
2730 cfg_stats.num_merged_labels++;
2731 return false;
2732 }
2733 else
2734 return true;
2735 }
2736 else if (gimple_code (stmt) == GIMPLE_CALL)
2737 {
2738 if (gimple_call_flags (stmt) & ECF_RETURNS_TWICE)
2739 /* setjmp acts similar to a nonlocal GOTO target and thus should
2740 start a new block. */
2741 return true;
2742 if (gimple_call_internal_p (stmt, IFN_PHI)
2743 && prev_stmt
2744 && gimple_code (prev_stmt) != GIMPLE_LABEL
2745 && (gimple_code (prev_stmt) != GIMPLE_CALL
2746 || ! gimple_call_internal_p (prev_stmt, IFN_PHI)))
2747 /* PHI nodes start a new block unless preceeded by a label
2748 or another PHI. */
2749 return true;
2750 }
2751
2752 return false;
2753 }
2754
2755
2756 /* Return true if T should end a basic block. */
2757
2758 bool
2759 stmt_ends_bb_p (gimple *t)
2760 {
2761 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2762 }
2763
2764 /* Remove block annotations and other data structures. */
2765
2766 void
2767 delete_tree_cfg_annotations (struct function *fn)
2768 {
2769 vec_free (label_to_block_map_for_fn (fn));
2770 }
2771
2772 /* Return the virtual phi in BB. */
2773
2774 gphi *
2775 get_virtual_phi (basic_block bb)
2776 {
2777 for (gphi_iterator gsi = gsi_start_phis (bb);
2778 !gsi_end_p (gsi);
2779 gsi_next (&gsi))
2780 {
2781 gphi *phi = gsi.phi ();
2782
2783 if (virtual_operand_p (PHI_RESULT (phi)))
2784 return phi;
2785 }
2786
2787 return NULL;
2788 }
2789
2790 /* Return the first statement in basic block BB. */
2791
2792 gimple *
2793 first_stmt (basic_block bb)
2794 {
2795 gimple_stmt_iterator i = gsi_start_bb (bb);
2796 gimple *stmt = NULL;
2797
2798 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2799 {
2800 gsi_next (&i);
2801 stmt = NULL;
2802 }
2803 return stmt;
2804 }
2805
2806 /* Return the first non-label statement in basic block BB. */
2807
2808 static gimple *
2809 first_non_label_stmt (basic_block bb)
2810 {
2811 gimple_stmt_iterator i = gsi_start_bb (bb);
2812 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2813 gsi_next (&i);
2814 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2815 }
2816
2817 /* Return the last statement in basic block BB. */
2818
2819 gimple *
2820 last_stmt (basic_block bb)
2821 {
2822 gimple_stmt_iterator i = gsi_last_bb (bb);
2823 gimple *stmt = NULL;
2824
2825 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2826 {
2827 gsi_prev (&i);
2828 stmt = NULL;
2829 }
2830 return stmt;
2831 }
2832
2833 /* Return the last statement of an otherwise empty block. Return NULL
2834 if the block is totally empty, or if it contains more than one
2835 statement. */
2836
2837 gimple *
2838 last_and_only_stmt (basic_block bb)
2839 {
2840 gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2841 gimple *last, *prev;
2842
2843 if (gsi_end_p (i))
2844 return NULL;
2845
2846 last = gsi_stmt (i);
2847 gsi_prev_nondebug (&i);
2848 if (gsi_end_p (i))
2849 return last;
2850
2851 /* Empty statements should no longer appear in the instruction stream.
2852 Everything that might have appeared before should be deleted by
2853 remove_useless_stmts, and the optimizers should just gsi_remove
2854 instead of smashing with build_empty_stmt.
2855
2856 Thus the only thing that should appear here in a block containing
2857 one executable statement is a label. */
2858 prev = gsi_stmt (i);
2859 if (gimple_code (prev) == GIMPLE_LABEL)
2860 return last;
2861 else
2862 return NULL;
2863 }
2864
2865 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2866
2867 static void
2868 reinstall_phi_args (edge new_edge, edge old_edge)
2869 {
2870 edge_var_map *vm;
2871 int i;
2872 gphi_iterator phis;
2873
2874 vec<edge_var_map> *v = redirect_edge_var_map_vector (old_edge);
2875 if (!v)
2876 return;
2877
2878 for (i = 0, phis = gsi_start_phis (new_edge->dest);
2879 v->iterate (i, &vm) && !gsi_end_p (phis);
2880 i++, gsi_next (&phis))
2881 {
2882 gphi *phi = phis.phi ();
2883 tree result = redirect_edge_var_map_result (vm);
2884 tree arg = redirect_edge_var_map_def (vm);
2885
2886 gcc_assert (result == gimple_phi_result (phi));
2887
2888 add_phi_arg (phi, arg, new_edge, redirect_edge_var_map_location (vm));
2889 }
2890
2891 redirect_edge_var_map_clear (old_edge);
2892 }
2893
2894 /* Returns the basic block after which the new basic block created
2895 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2896 near its "logical" location. This is of most help to humans looking
2897 at debugging dumps. */
2898
2899 basic_block
2900 split_edge_bb_loc (edge edge_in)
2901 {
2902 basic_block dest = edge_in->dest;
2903 basic_block dest_prev = dest->prev_bb;
2904
2905 if (dest_prev)
2906 {
2907 edge e = find_edge (dest_prev, dest);
2908 if (e && !(e->flags & EDGE_COMPLEX))
2909 return edge_in->src;
2910 }
2911 return dest_prev;
2912 }
2913
2914 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2915 Abort on abnormal edges. */
2916
2917 static basic_block
2918 gimple_split_edge (edge edge_in)
2919 {
2920 basic_block new_bb, after_bb, dest;
2921 edge new_edge, e;
2922
2923 /* Abnormal edges cannot be split. */
2924 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2925
2926 dest = edge_in->dest;
2927
2928 after_bb = split_edge_bb_loc (edge_in);
2929
2930 new_bb = create_empty_bb (after_bb);
2931 new_bb->count = edge_in->count ();
2932
2933 e = redirect_edge_and_branch (edge_in, new_bb);
2934 gcc_assert (e == edge_in);
2935
2936 new_edge = make_single_succ_edge (new_bb, dest, EDGE_FALLTHRU);
2937 reinstall_phi_args (new_edge, e);
2938
2939 return new_bb;
2940 }
2941
2942
2943 /* Verify properties of the address expression T whose base should be
2944 TREE_ADDRESSABLE if VERIFY_ADDRESSABLE is true. */
2945
2946 static bool
2947 verify_address (tree t, bool verify_addressable)
2948 {
2949 bool old_constant;
2950 bool old_side_effects;
2951 bool new_constant;
2952 bool new_side_effects;
2953
2954 old_constant = TREE_CONSTANT (t);
2955 old_side_effects = TREE_SIDE_EFFECTS (t);
2956
2957 recompute_tree_invariant_for_addr_expr (t);
2958 new_side_effects = TREE_SIDE_EFFECTS (t);
2959 new_constant = TREE_CONSTANT (t);
2960
2961 if (old_constant != new_constant)
2962 {
2963 error ("constant not recomputed when ADDR_EXPR changed");
2964 return true;
2965 }
2966 if (old_side_effects != new_side_effects)
2967 {
2968 error ("side effects not recomputed when ADDR_EXPR changed");
2969 return true;
2970 }
2971
2972 tree base = TREE_OPERAND (t, 0);
2973 while (handled_component_p (base))
2974 base = TREE_OPERAND (base, 0);
2975
2976 if (!(VAR_P (base)
2977 || TREE_CODE (base) == PARM_DECL
2978 || TREE_CODE (base) == RESULT_DECL))
2979 return false;
2980
2981 if (DECL_GIMPLE_REG_P (base))
2982 {
2983 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2984 return true;
2985 }
2986
2987 if (verify_addressable && !TREE_ADDRESSABLE (base))
2988 {
2989 error ("address taken, but ADDRESSABLE bit not set");
2990 return true;
2991 }
2992
2993 return false;
2994 }
2995
2996
2997 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
2998 Returns true if there is an error, otherwise false. */
2999
3000 static bool
3001 verify_types_in_gimple_min_lval (tree expr)
3002 {
3003 tree op;
3004
3005 if (is_gimple_id (expr))
3006 return false;
3007
3008 if (TREE_CODE (expr) != TARGET_MEM_REF
3009 && TREE_CODE (expr) != MEM_REF)
3010 {
3011 error ("invalid expression for min lvalue");
3012 return true;
3013 }
3014
3015 /* TARGET_MEM_REFs are strange beasts. */
3016 if (TREE_CODE (expr) == TARGET_MEM_REF)
3017 return false;
3018
3019 op = TREE_OPERAND (expr, 0);
3020 if (!is_gimple_val (op))
3021 {
3022 error ("invalid operand in indirect reference");
3023 debug_generic_stmt (op);
3024 return true;
3025 }
3026 /* Memory references now generally can involve a value conversion. */
3027
3028 return false;
3029 }
3030
3031 /* Verify if EXPR is a valid GIMPLE reference expression. If
3032 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
3033 if there is an error, otherwise false. */
3034
3035 static bool
3036 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
3037 {
3038 if (TREE_CODE (expr) == REALPART_EXPR
3039 || TREE_CODE (expr) == IMAGPART_EXPR
3040 || TREE_CODE (expr) == BIT_FIELD_REF)
3041 {
3042 tree op = TREE_OPERAND (expr, 0);
3043 if (!is_gimple_reg_type (TREE_TYPE (expr)))
3044 {
3045 error ("non-scalar BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR");
3046 return true;
3047 }
3048
3049 if (TREE_CODE (expr) == BIT_FIELD_REF)
3050 {
3051 tree t1 = TREE_OPERAND (expr, 1);
3052 tree t2 = TREE_OPERAND (expr, 2);
3053 poly_uint64 size, bitpos;
3054 if (!poly_int_tree_p (t1, &size)
3055 || !poly_int_tree_p (t2, &bitpos)
3056 || !types_compatible_p (bitsizetype, TREE_TYPE (t1))
3057 || !types_compatible_p (bitsizetype, TREE_TYPE (t2)))
3058 {
3059 error ("invalid position or size operand to BIT_FIELD_REF");
3060 return true;
3061 }
3062 if (INTEGRAL_TYPE_P (TREE_TYPE (expr))
3063 && maybe_ne (TYPE_PRECISION (TREE_TYPE (expr)), size))
3064 {
3065 error ("integral result type precision does not match "
3066 "field size of BIT_FIELD_REF");
3067 return true;
3068 }
3069 else if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
3070 && TYPE_MODE (TREE_TYPE (expr)) != BLKmode
3071 && maybe_ne (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (expr))),
3072 size))
3073 {
3074 error ("mode size of non-integral result does not "
3075 "match field size of BIT_FIELD_REF");
3076 return true;
3077 }
3078 if (INTEGRAL_TYPE_P (TREE_TYPE (op))
3079 && !type_has_mode_precision_p (TREE_TYPE (op)))
3080 {
3081 error ("BIT_FIELD_REF of non-mode-precision operand");
3082 return true;
3083 }
3084 if (!AGGREGATE_TYPE_P (TREE_TYPE (op))
3085 && maybe_gt (size + bitpos,
3086 tree_to_poly_uint64 (TYPE_SIZE (TREE_TYPE (op)))))
3087 {
3088 error ("position plus size exceeds size of referenced object in "
3089 "BIT_FIELD_REF");
3090 return true;
3091 }
3092 }
3093
3094 if ((TREE_CODE (expr) == REALPART_EXPR
3095 || TREE_CODE (expr) == IMAGPART_EXPR)
3096 && !useless_type_conversion_p (TREE_TYPE (expr),
3097 TREE_TYPE (TREE_TYPE (op))))
3098 {
3099 error ("type mismatch in real/imagpart reference");
3100 debug_generic_stmt (TREE_TYPE (expr));
3101 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3102 return true;
3103 }
3104 expr = op;
3105 }
3106
3107 while (handled_component_p (expr))
3108 {
3109 if (TREE_CODE (expr) == REALPART_EXPR
3110 || TREE_CODE (expr) == IMAGPART_EXPR
3111 || TREE_CODE (expr) == BIT_FIELD_REF)
3112 {
3113 error ("non-top-level BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR");
3114 return true;
3115 }
3116
3117 tree op = TREE_OPERAND (expr, 0);
3118
3119 if (TREE_CODE (expr) == ARRAY_REF
3120 || TREE_CODE (expr) == ARRAY_RANGE_REF)
3121 {
3122 if (!is_gimple_val (TREE_OPERAND (expr, 1))
3123 || (TREE_OPERAND (expr, 2)
3124 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3125 || (TREE_OPERAND (expr, 3)
3126 && !is_gimple_val (TREE_OPERAND (expr, 3))))
3127 {
3128 error ("invalid operands to array reference");
3129 debug_generic_stmt (expr);
3130 return true;
3131 }
3132 }
3133
3134 /* Verify if the reference array element types are compatible. */
3135 if (TREE_CODE (expr) == ARRAY_REF
3136 && !useless_type_conversion_p (TREE_TYPE (expr),
3137 TREE_TYPE (TREE_TYPE (op))))
3138 {
3139 error ("type mismatch in array reference");
3140 debug_generic_stmt (TREE_TYPE (expr));
3141 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3142 return true;
3143 }
3144 if (TREE_CODE (expr) == ARRAY_RANGE_REF
3145 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
3146 TREE_TYPE (TREE_TYPE (op))))
3147 {
3148 error ("type mismatch in array range reference");
3149 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
3150 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3151 return true;
3152 }
3153
3154 if (TREE_CODE (expr) == COMPONENT_REF)
3155 {
3156 if (TREE_OPERAND (expr, 2)
3157 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3158 {
3159 error ("invalid COMPONENT_REF offset operator");
3160 return true;
3161 }
3162 if (!useless_type_conversion_p (TREE_TYPE (expr),
3163 TREE_TYPE (TREE_OPERAND (expr, 1))))
3164 {
3165 error ("type mismatch in component reference");
3166 debug_generic_stmt (TREE_TYPE (expr));
3167 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
3168 return true;
3169 }
3170 }
3171
3172 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3173 {
3174 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
3175 that their operand is not an SSA name or an invariant when
3176 requiring an lvalue (this usually means there is a SRA or IPA-SRA
3177 bug). Otherwise there is nothing to verify, gross mismatches at
3178 most invoke undefined behavior. */
3179 if (require_lvalue
3180 && (TREE_CODE (op) == SSA_NAME
3181 || is_gimple_min_invariant (op)))
3182 {
3183 error ("conversion of an SSA_NAME on the left hand side");
3184 debug_generic_stmt (expr);
3185 return true;
3186 }
3187 else if (TREE_CODE (op) == SSA_NAME
3188 && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
3189 {
3190 error ("conversion of register to a different size");
3191 debug_generic_stmt (expr);
3192 return true;
3193 }
3194 else if (!handled_component_p (op))
3195 return false;
3196 }
3197
3198 expr = op;
3199 }
3200
3201 if (TREE_CODE (expr) == MEM_REF)
3202 {
3203 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0))
3204 || (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR
3205 && verify_address (TREE_OPERAND (expr, 0), false)))
3206 {
3207 error ("invalid address operand in MEM_REF");
3208 debug_generic_stmt (expr);
3209 return true;
3210 }
3211 if (!poly_int_tree_p (TREE_OPERAND (expr, 1))
3212 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
3213 {
3214 error ("invalid offset operand in MEM_REF");
3215 debug_generic_stmt (expr);
3216 return true;
3217 }
3218 }
3219 else if (TREE_CODE (expr) == TARGET_MEM_REF)
3220 {
3221 if (!TMR_BASE (expr)
3222 || !is_gimple_mem_ref_addr (TMR_BASE (expr))
3223 || (TREE_CODE (TMR_BASE (expr)) == ADDR_EXPR
3224 && verify_address (TMR_BASE (expr), false)))
3225 {
3226 error ("invalid address operand in TARGET_MEM_REF");
3227 return true;
3228 }
3229 if (!TMR_OFFSET (expr)
3230 || !poly_int_tree_p (TMR_OFFSET (expr))
3231 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
3232 {
3233 error ("invalid offset operand in TARGET_MEM_REF");
3234 debug_generic_stmt (expr);
3235 return true;
3236 }
3237 }
3238 else if (TREE_CODE (expr) == INDIRECT_REF)
3239 {
3240 error ("INDIRECT_REF in gimple IL");
3241 debug_generic_stmt (expr);
3242 return true;
3243 }
3244
3245 return ((require_lvalue || !is_gimple_min_invariant (expr))
3246 && verify_types_in_gimple_min_lval (expr));
3247 }
3248
3249 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3250 list of pointer-to types that is trivially convertible to DEST. */
3251
3252 static bool
3253 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3254 {
3255 tree src;
3256
3257 if (!TYPE_POINTER_TO (src_obj))
3258 return true;
3259
3260 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3261 if (useless_type_conversion_p (dest, src))
3262 return true;
3263
3264 return false;
3265 }
3266
3267 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3268 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3269
3270 static bool
3271 valid_fixed_convert_types_p (tree type1, tree type2)
3272 {
3273 return (FIXED_POINT_TYPE_P (type1)
3274 && (INTEGRAL_TYPE_P (type2)
3275 || SCALAR_FLOAT_TYPE_P (type2)
3276 || FIXED_POINT_TYPE_P (type2)));
3277 }
3278
3279 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3280 is a problem, otherwise false. */
3281
3282 static bool
3283 verify_gimple_call (gcall *stmt)
3284 {
3285 tree fn = gimple_call_fn (stmt);
3286 tree fntype, fndecl;
3287 unsigned i;
3288
3289 if (gimple_call_internal_p (stmt))
3290 {
3291 if (fn)
3292 {
3293 error ("gimple call has two targets");
3294 debug_generic_stmt (fn);
3295 return true;
3296 }
3297 }
3298 else
3299 {
3300 if (!fn)
3301 {
3302 error ("gimple call has no target");
3303 return true;
3304 }
3305 }
3306
3307 if (fn && !is_gimple_call_addr (fn))
3308 {
3309 error ("invalid function in gimple call");
3310 debug_generic_stmt (fn);
3311 return true;
3312 }
3313
3314 if (fn
3315 && (!POINTER_TYPE_P (TREE_TYPE (fn))
3316 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3317 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
3318 {
3319 error ("non-function in gimple call");
3320 return true;
3321 }
3322
3323 fndecl = gimple_call_fndecl (stmt);
3324 if (fndecl
3325 && TREE_CODE (fndecl) == FUNCTION_DECL
3326 && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
3327 && !DECL_PURE_P (fndecl)
3328 && !TREE_READONLY (fndecl))
3329 {
3330 error ("invalid pure const state for function");
3331 return true;
3332 }
3333
3334 tree lhs = gimple_call_lhs (stmt);
3335 if (lhs
3336 && (!is_gimple_lvalue (lhs)
3337 || verify_types_in_gimple_reference (lhs, true)))
3338 {
3339 error ("invalid LHS in gimple call");
3340 return true;
3341 }
3342
3343 if (gimple_call_ctrl_altering_p (stmt)
3344 && gimple_call_noreturn_p (stmt)
3345 && should_remove_lhs_p (lhs))
3346 {
3347 error ("LHS in noreturn call");
3348 return true;
3349 }
3350
3351 fntype = gimple_call_fntype (stmt);
3352 if (fntype
3353 && lhs
3354 && !useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (fntype))
3355 /* ??? At least C++ misses conversions at assignments from
3356 void * call results.
3357 For now simply allow arbitrary pointer type conversions. */
3358 && !(POINTER_TYPE_P (TREE_TYPE (lhs))
3359 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3360 {
3361 error ("invalid conversion in gimple call");
3362 debug_generic_stmt (TREE_TYPE (lhs));
3363 debug_generic_stmt (TREE_TYPE (fntype));
3364 return true;
3365 }
3366
3367 if (gimple_call_chain (stmt)
3368 && !is_gimple_val (gimple_call_chain (stmt)))
3369 {
3370 error ("invalid static chain in gimple call");
3371 debug_generic_stmt (gimple_call_chain (stmt));
3372 return true;
3373 }
3374
3375 /* If there is a static chain argument, the call should either be
3376 indirect, or the decl should have DECL_STATIC_CHAIN set. */
3377 if (gimple_call_chain (stmt)
3378 && fndecl
3379 && !DECL_STATIC_CHAIN (fndecl))
3380 {
3381 error ("static chain with function that doesn%'t use one");
3382 return true;
3383 }
3384
3385 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
3386 {
3387 switch (DECL_FUNCTION_CODE (fndecl))
3388 {
3389 case BUILT_IN_UNREACHABLE:
3390 case BUILT_IN_TRAP:
3391 if (gimple_call_num_args (stmt) > 0)
3392 {
3393 /* Built-in unreachable with parameters might not be caught by
3394 undefined behavior sanitizer. Front-ends do check users do not
3395 call them that way but we also produce calls to
3396 __builtin_unreachable internally, for example when IPA figures
3397 out a call cannot happen in a legal program. In such cases,
3398 we must make sure arguments are stripped off. */
3399 error ("%<__builtin_unreachable%> or %<__builtin_trap%> call "
3400 "with arguments");
3401 return true;
3402 }
3403 break;
3404 default:
3405 break;
3406 }
3407 }
3408
3409 /* ??? The C frontend passes unpromoted arguments in case it
3410 didn't see a function declaration before the call. So for now
3411 leave the call arguments mostly unverified. Once we gimplify
3412 unit-at-a-time we have a chance to fix this. */
3413
3414 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3415 {
3416 tree arg = gimple_call_arg (stmt, i);
3417 if ((is_gimple_reg_type (TREE_TYPE (arg))
3418 && !is_gimple_val (arg))
3419 || (!is_gimple_reg_type (TREE_TYPE (arg))
3420 && !is_gimple_lvalue (arg)))
3421 {
3422 error ("invalid argument to gimple call");
3423 debug_generic_expr (arg);
3424 return true;
3425 }
3426 }
3427
3428 return false;
3429 }
3430
3431 /* Verifies the gimple comparison with the result type TYPE and
3432 the operands OP0 and OP1, comparison code is CODE. */
3433
3434 static bool
3435 verify_gimple_comparison (tree type, tree op0, tree op1, enum tree_code code)
3436 {
3437 tree op0_type = TREE_TYPE (op0);
3438 tree op1_type = TREE_TYPE (op1);
3439
3440 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3441 {
3442 error ("invalid operands in gimple comparison");
3443 return true;
3444 }
3445
3446 /* For comparisons we do not have the operations type as the
3447 effective type the comparison is carried out in. Instead
3448 we require that either the first operand is trivially
3449 convertible into the second, or the other way around.
3450 Because we special-case pointers to void we allow
3451 comparisons of pointers with the same mode as well. */
3452 if (!useless_type_conversion_p (op0_type, op1_type)
3453 && !useless_type_conversion_p (op1_type, op0_type)
3454 && (!POINTER_TYPE_P (op0_type)
3455 || !POINTER_TYPE_P (op1_type)
3456 || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3457 {
3458 error ("mismatching comparison operand types");
3459 debug_generic_expr (op0_type);
3460 debug_generic_expr (op1_type);
3461 return true;
3462 }
3463
3464 /* The resulting type of a comparison may be an effective boolean type. */
3465 if (INTEGRAL_TYPE_P (type)
3466 && (TREE_CODE (type) == BOOLEAN_TYPE
3467 || TYPE_PRECISION (type) == 1))
3468 {
3469 if ((TREE_CODE (op0_type) == VECTOR_TYPE
3470 || TREE_CODE (op1_type) == VECTOR_TYPE)
3471 && code != EQ_EXPR && code != NE_EXPR
3472 && !VECTOR_BOOLEAN_TYPE_P (op0_type)
3473 && !VECTOR_INTEGER_TYPE_P (op0_type))
3474 {
3475 error ("unsupported operation or type for vector comparison"
3476 " returning a boolean");
3477 debug_generic_expr (op0_type);
3478 debug_generic_expr (op1_type);
3479 return true;
3480 }
3481 }
3482 /* Or a boolean vector type with the same element count
3483 as the comparison operand types. */
3484 else if (TREE_CODE (type) == VECTOR_TYPE
3485 && TREE_CODE (TREE_TYPE (type)) == BOOLEAN_TYPE)
3486 {
3487 if (TREE_CODE (op0_type) != VECTOR_TYPE
3488 || TREE_CODE (op1_type) != VECTOR_TYPE)
3489 {
3490 error ("non-vector operands in vector comparison");
3491 debug_generic_expr (op0_type);
3492 debug_generic_expr (op1_type);
3493 return true;
3494 }
3495
3496 if (maybe_ne (TYPE_VECTOR_SUBPARTS (type),
3497 TYPE_VECTOR_SUBPARTS (op0_type)))
3498 {
3499 error ("invalid vector comparison resulting type");
3500 debug_generic_expr (type);
3501 return true;
3502 }
3503 }
3504 else
3505 {
3506 error ("bogus comparison result type");
3507 debug_generic_expr (type);
3508 return true;
3509 }
3510
3511 return false;
3512 }
3513
3514 /* Verify a gimple assignment statement STMT with an unary rhs.
3515 Returns true if anything is wrong. */
3516
3517 static bool
3518 verify_gimple_assign_unary (gassign *stmt)
3519 {
3520 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3521 tree lhs = gimple_assign_lhs (stmt);
3522 tree lhs_type = TREE_TYPE (lhs);
3523 tree rhs1 = gimple_assign_rhs1 (stmt);
3524 tree rhs1_type = TREE_TYPE (rhs1);
3525
3526 if (!is_gimple_reg (lhs))
3527 {
3528 error ("non-register as LHS of unary operation");
3529 return true;
3530 }
3531
3532 if (!is_gimple_val (rhs1))
3533 {
3534 error ("invalid operand in unary operation");
3535 return true;
3536 }
3537
3538 /* First handle conversions. */
3539 switch (rhs_code)
3540 {
3541 CASE_CONVERT:
3542 {
3543 /* Allow conversions from pointer type to integral type only if
3544 there is no sign or zero extension involved.
3545 For targets were the precision of ptrofftype doesn't match that
3546 of pointers we need to allow arbitrary conversions to ptrofftype. */
3547 if ((POINTER_TYPE_P (lhs_type)
3548 && INTEGRAL_TYPE_P (rhs1_type))
3549 || (POINTER_TYPE_P (rhs1_type)
3550 && INTEGRAL_TYPE_P (lhs_type)
3551 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3552 || ptrofftype_p (lhs_type))))
3553 return false;
3554
3555 /* Allow conversion from integral to offset type and vice versa. */
3556 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3557 && INTEGRAL_TYPE_P (rhs1_type))
3558 || (INTEGRAL_TYPE_P (lhs_type)
3559 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3560 return false;
3561
3562 /* Otherwise assert we are converting between types of the
3563 same kind. */
3564 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3565 {
3566 error ("invalid types in nop conversion");
3567 debug_generic_expr (lhs_type);
3568 debug_generic_expr (rhs1_type);
3569 return true;
3570 }
3571
3572 return false;
3573 }
3574
3575 case ADDR_SPACE_CONVERT_EXPR:
3576 {
3577 if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3578 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3579 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3580 {
3581 error ("invalid types in address space conversion");
3582 debug_generic_expr (lhs_type);
3583 debug_generic_expr (rhs1_type);
3584 return true;
3585 }
3586
3587 return false;
3588 }
3589
3590 case FIXED_CONVERT_EXPR:
3591 {
3592 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3593 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3594 {
3595 error ("invalid types in fixed-point conversion");
3596 debug_generic_expr (lhs_type);
3597 debug_generic_expr (rhs1_type);
3598 return true;
3599 }
3600
3601 return false;
3602 }
3603
3604 case FLOAT_EXPR:
3605 {
3606 if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3607 && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3608 || !VECTOR_FLOAT_TYPE_P (lhs_type)))
3609 {
3610 error ("invalid types in conversion to floating point");
3611 debug_generic_expr (lhs_type);
3612 debug_generic_expr (rhs1_type);
3613 return true;
3614 }
3615
3616 return false;
3617 }
3618
3619 case FIX_TRUNC_EXPR:
3620 {
3621 if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3622 && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3623 || !VECTOR_FLOAT_TYPE_P (rhs1_type)))
3624 {
3625 error ("invalid types in conversion to integer");
3626 debug_generic_expr (lhs_type);
3627 debug_generic_expr (rhs1_type);
3628 return true;
3629 }
3630
3631 return false;
3632 }
3633
3634 case VEC_UNPACK_HI_EXPR:
3635 case VEC_UNPACK_LO_EXPR:
3636 case VEC_UNPACK_FLOAT_HI_EXPR:
3637 case VEC_UNPACK_FLOAT_LO_EXPR:
3638 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
3639 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
3640 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3641 || TREE_CODE (lhs_type) != VECTOR_TYPE
3642 || (!INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3643 && !SCALAR_FLOAT_TYPE_P (TREE_TYPE (lhs_type)))
3644 || (!INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3645 && !SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type)))
3646 || ((rhs_code == VEC_UNPACK_HI_EXPR
3647 || rhs_code == VEC_UNPACK_LO_EXPR)
3648 && (INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3649 != INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3650 || ((rhs_code == VEC_UNPACK_FLOAT_HI_EXPR
3651 || rhs_code == VEC_UNPACK_FLOAT_LO_EXPR)
3652 && (INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3653 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type))))
3654 || ((rhs_code == VEC_UNPACK_FIX_TRUNC_HI_EXPR
3655 || rhs_code == VEC_UNPACK_FIX_TRUNC_LO_EXPR)
3656 && (INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3657 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (lhs_type))))
3658 || (maybe_ne (GET_MODE_SIZE (element_mode (lhs_type)),
3659 2 * GET_MODE_SIZE (element_mode (rhs1_type)))
3660 && (!VECTOR_BOOLEAN_TYPE_P (lhs_type)
3661 || !VECTOR_BOOLEAN_TYPE_P (rhs1_type)))
3662 || maybe_ne (2 * TYPE_VECTOR_SUBPARTS (lhs_type),
3663 TYPE_VECTOR_SUBPARTS (rhs1_type)))
3664 {
3665 error ("type mismatch in vector unpack expression");
3666 debug_generic_expr (lhs_type);
3667 debug_generic_expr (rhs1_type);
3668 return true;
3669 }
3670
3671 return false;
3672
3673 case NEGATE_EXPR:
3674 case ABS_EXPR:
3675 case BIT_NOT_EXPR:
3676 case PAREN_EXPR:
3677 case CONJ_EXPR:
3678 break;
3679
3680 case ABSU_EXPR:
3681 if (!ANY_INTEGRAL_TYPE_P (lhs_type)
3682 || !TYPE_UNSIGNED (lhs_type)
3683 || !ANY_INTEGRAL_TYPE_P (rhs1_type)
3684 || TYPE_UNSIGNED (rhs1_type)
3685 || element_precision (lhs_type) != element_precision (rhs1_type))
3686 {
3687 error ("invalid types for ABSU_EXPR");
3688 debug_generic_expr (lhs_type);
3689 debug_generic_expr (rhs1_type);
3690 return true;
3691 }
3692 return false;
3693
3694 case VEC_DUPLICATE_EXPR:
3695 if (TREE_CODE (lhs_type) != VECTOR_TYPE
3696 || !useless_type_conversion_p (TREE_TYPE (lhs_type), rhs1_type))
3697 {
3698 error ("vec_duplicate should be from a scalar to a like vector");
3699 debug_generic_expr (lhs_type);
3700 debug_generic_expr (rhs1_type);
3701 return true;
3702 }
3703 return false;
3704
3705 default:
3706 gcc_unreachable ();
3707 }
3708
3709 /* For the remaining codes assert there is no conversion involved. */
3710 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3711 {
3712 error ("non-trivial conversion in unary operation");
3713 debug_generic_expr (lhs_type);
3714 debug_generic_expr (rhs1_type);
3715 return true;
3716 }
3717
3718 return false;
3719 }
3720
3721 /* Verify a gimple assignment statement STMT with a binary rhs.
3722 Returns true if anything is wrong. */
3723
3724 static bool
3725 verify_gimple_assign_binary (gassign *stmt)
3726 {
3727 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3728 tree lhs = gimple_assign_lhs (stmt);
3729 tree lhs_type = TREE_TYPE (lhs);
3730 tree rhs1 = gimple_assign_rhs1 (stmt);
3731 tree rhs1_type = TREE_TYPE (rhs1);
3732 tree rhs2 = gimple_assign_rhs2 (stmt);
3733 tree rhs2_type = TREE_TYPE (rhs2);
3734
3735 if (!is_gimple_reg (lhs))
3736 {
3737 error ("non-register as LHS of binary operation");
3738 return true;
3739 }
3740
3741 if (!is_gimple_val (rhs1)
3742 || !is_gimple_val (rhs2))
3743 {
3744 error ("invalid operands in binary operation");
3745 return true;
3746 }
3747
3748 /* First handle operations that involve different types. */
3749 switch (rhs_code)
3750 {
3751 case COMPLEX_EXPR:
3752 {
3753 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3754 || !(INTEGRAL_TYPE_P (rhs1_type)
3755 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3756 || !(INTEGRAL_TYPE_P (rhs2_type)
3757 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3758 {
3759 error ("type mismatch in complex expression");
3760 debug_generic_expr (lhs_type);
3761 debug_generic_expr (rhs1_type);
3762 debug_generic_expr (rhs2_type);
3763 return true;
3764 }
3765
3766 return false;
3767 }
3768
3769 case LSHIFT_EXPR:
3770 case RSHIFT_EXPR:
3771 case LROTATE_EXPR:
3772 case RROTATE_EXPR:
3773 {
3774 /* Shifts and rotates are ok on integral types, fixed point
3775 types and integer vector types. */
3776 if ((!INTEGRAL_TYPE_P (rhs1_type)
3777 && !FIXED_POINT_TYPE_P (rhs1_type)
3778 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3779 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3780 || (!INTEGRAL_TYPE_P (rhs2_type)
3781 /* Vector shifts of vectors are also ok. */
3782 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3783 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3784 && TREE_CODE (rhs2_type) == VECTOR_TYPE
3785 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3786 || !useless_type_conversion_p (lhs_type, rhs1_type))
3787 {
3788 error ("type mismatch in shift expression");
3789 debug_generic_expr (lhs_type);
3790 debug_generic_expr (rhs1_type);
3791 debug_generic_expr (rhs2_type);
3792 return true;
3793 }
3794
3795 return false;
3796 }
3797
3798 case WIDEN_LSHIFT_EXPR:
3799 {
3800 if (!INTEGRAL_TYPE_P (lhs_type)
3801 || !INTEGRAL_TYPE_P (rhs1_type)
3802 || TREE_CODE (rhs2) != INTEGER_CST
3803 || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
3804 {
3805 error ("type mismatch in widening vector shift expression");
3806 debug_generic_expr (lhs_type);
3807 debug_generic_expr (rhs1_type);
3808 debug_generic_expr (rhs2_type);
3809 return true;
3810 }
3811
3812 return false;
3813 }
3814
3815 case VEC_WIDEN_LSHIFT_HI_EXPR:
3816 case VEC_WIDEN_LSHIFT_LO_EXPR:
3817 {
3818 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3819 || TREE_CODE (lhs_type) != VECTOR_TYPE
3820 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3821 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3822 || TREE_CODE (rhs2) != INTEGER_CST
3823 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
3824 > TYPE_PRECISION (TREE_TYPE (lhs_type))))
3825 {
3826 error ("type mismatch in widening vector shift expression");
3827 debug_generic_expr (lhs_type);
3828 debug_generic_expr (rhs1_type);
3829 debug_generic_expr (rhs2_type);
3830 return true;
3831 }
3832
3833 return false;
3834 }
3835
3836 case PLUS_EXPR:
3837 case MINUS_EXPR:
3838 {
3839 tree lhs_etype = lhs_type;
3840 tree rhs1_etype = rhs1_type;
3841 tree rhs2_etype = rhs2_type;
3842 if (TREE_CODE (lhs_type) == VECTOR_TYPE)
3843 {
3844 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3845 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
3846 {
3847 error ("invalid non-vector operands to vector valued plus");
3848 return true;
3849 }
3850 lhs_etype = TREE_TYPE (lhs_type);
3851 rhs1_etype = TREE_TYPE (rhs1_type);
3852 rhs2_etype = TREE_TYPE (rhs2_type);
3853 }
3854 if (POINTER_TYPE_P (lhs_etype)
3855 || POINTER_TYPE_P (rhs1_etype)
3856 || POINTER_TYPE_P (rhs2_etype))
3857 {
3858 error ("invalid (pointer) operands to plus/minus");
3859 return true;
3860 }
3861
3862 /* Continue with generic binary expression handling. */
3863 break;
3864 }
3865
3866 case POINTER_PLUS_EXPR:
3867 {
3868 if (!POINTER_TYPE_P (rhs1_type)
3869 || !useless_type_conversion_p (lhs_type, rhs1_type)
3870 || !ptrofftype_p (rhs2_type))
3871 {
3872 error ("type mismatch in pointer plus expression");
3873 debug_generic_stmt (lhs_type);
3874 debug_generic_stmt (rhs1_type);
3875 debug_generic_stmt (rhs2_type);
3876 return true;
3877 }
3878
3879 return false;
3880 }
3881
3882 case POINTER_DIFF_EXPR:
3883 {
3884 if (!POINTER_TYPE_P (rhs1_type)
3885 || !POINTER_TYPE_P (rhs2_type)
3886 /* Because we special-case pointers to void we allow difference
3887 of arbitrary pointers with the same mode. */
3888 || TYPE_MODE (rhs1_type) != TYPE_MODE (rhs2_type)
3889 || TREE_CODE (lhs_type) != INTEGER_TYPE
3890 || TYPE_UNSIGNED (lhs_type)
3891 || TYPE_PRECISION (lhs_type) != TYPE_PRECISION (rhs1_type))
3892 {
3893 error ("type mismatch in pointer diff expression");
3894 debug_generic_stmt (lhs_type);
3895 debug_generic_stmt (rhs1_type);
3896 debug_generic_stmt (rhs2_type);
3897 return true;
3898 }
3899
3900 return false;
3901 }
3902
3903 case TRUTH_ANDIF_EXPR:
3904 case TRUTH_ORIF_EXPR:
3905 case TRUTH_AND_EXPR:
3906 case TRUTH_OR_EXPR:
3907 case TRUTH_XOR_EXPR:
3908
3909 gcc_unreachable ();
3910
3911 case LT_EXPR:
3912 case LE_EXPR:
3913 case GT_EXPR:
3914 case GE_EXPR:
3915 case EQ_EXPR:
3916 case NE_EXPR:
3917 case UNORDERED_EXPR:
3918 case ORDERED_EXPR:
3919 case UNLT_EXPR:
3920 case UNLE_EXPR:
3921 case UNGT_EXPR:
3922 case UNGE_EXPR:
3923 case UNEQ_EXPR:
3924 case LTGT_EXPR:
3925 /* Comparisons are also binary, but the result type is not
3926 connected to the operand types. */
3927 return verify_gimple_comparison (lhs_type, rhs1, rhs2, rhs_code);
3928
3929 case WIDEN_MULT_EXPR:
3930 if (TREE_CODE (lhs_type) != INTEGER_TYPE)
3931 return true;
3932 return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
3933 || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
3934
3935 case WIDEN_SUM_EXPR:
3936 {
3937 if (((TREE_CODE (rhs1_type) != VECTOR_TYPE
3938 || TREE_CODE (lhs_type) != VECTOR_TYPE)
3939 && ((!INTEGRAL_TYPE_P (rhs1_type)
3940 && !SCALAR_FLOAT_TYPE_P (rhs1_type))
3941 || (!INTEGRAL_TYPE_P (lhs_type)
3942 && !SCALAR_FLOAT_TYPE_P (lhs_type))))
3943 || !useless_type_conversion_p (lhs_type, rhs2_type)
3944 || maybe_lt (GET_MODE_SIZE (element_mode (rhs2_type)),
3945 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
3946 {
3947 error ("type mismatch in widening sum reduction");
3948 debug_generic_expr (lhs_type);
3949 debug_generic_expr (rhs1_type);
3950 debug_generic_expr (rhs2_type);
3951 return true;
3952 }
3953 return false;
3954 }
3955
3956 case VEC_WIDEN_MULT_HI_EXPR:
3957 case VEC_WIDEN_MULT_LO_EXPR:
3958 case VEC_WIDEN_MULT_EVEN_EXPR:
3959 case VEC_WIDEN_MULT_ODD_EXPR:
3960 {
3961 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3962 || TREE_CODE (lhs_type) != VECTOR_TYPE
3963 || !types_compatible_p (rhs1_type, rhs2_type)
3964 || maybe_ne (GET_MODE_SIZE (element_mode (lhs_type)),
3965 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
3966 {
3967 error ("type mismatch in vector widening multiplication");
3968 debug_generic_expr (lhs_type);
3969 debug_generic_expr (rhs1_type);
3970 debug_generic_expr (rhs2_type);
3971 return true;
3972 }
3973 return false;
3974 }
3975
3976 case VEC_PACK_TRUNC_EXPR:
3977 /* ??? We currently use VEC_PACK_TRUNC_EXPR to simply concat
3978 vector boolean types. */
3979 if (VECTOR_BOOLEAN_TYPE_P (lhs_type)
3980 && VECTOR_BOOLEAN_TYPE_P (rhs1_type)
3981 && types_compatible_p (rhs1_type, rhs2_type)
3982 && known_eq (TYPE_VECTOR_SUBPARTS (lhs_type),
3983 2 * TYPE_VECTOR_SUBPARTS (rhs1_type)))
3984 return false;
3985
3986 /* Fallthru. */
3987 case VEC_PACK_SAT_EXPR:
3988 case VEC_PACK_FIX_TRUNC_EXPR:
3989 {
3990 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3991 || TREE_CODE (lhs_type) != VECTOR_TYPE
3992 || !((rhs_code == VEC_PACK_FIX_TRUNC_EXPR
3993 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type))
3994 && INTEGRAL_TYPE_P (TREE_TYPE (lhs_type)))
3995 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3996 == INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))))
3997 || !types_compatible_p (rhs1_type, rhs2_type)
3998 || maybe_ne (GET_MODE_SIZE (element_mode (rhs1_type)),
3999 2 * GET_MODE_SIZE (element_mode (lhs_type)))
4000 || maybe_ne (2 * TYPE_VECTOR_SUBPARTS (rhs1_type),
4001 TYPE_VECTOR_SUBPARTS (lhs_type)))
4002 {
4003 error ("type mismatch in vector pack expression");
4004 debug_generic_expr (lhs_type);
4005 debug_generic_expr (rhs1_type);
4006 debug_generic_expr (rhs2_type);
4007 return true;
4008 }
4009
4010 return false;
4011 }
4012
4013 case VEC_PACK_FLOAT_EXPR:
4014 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4015 || TREE_CODE (lhs_type) != VECTOR_TYPE
4016 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
4017 || !SCALAR_FLOAT_TYPE_P (TREE_TYPE (lhs_type))
4018 || !types_compatible_p (rhs1_type, rhs2_type)
4019 || maybe_ne (GET_MODE_SIZE (element_mode (rhs1_type)),
4020 2 * GET_MODE_SIZE (element_mode (lhs_type)))
4021 || maybe_ne (2 * TYPE_VECTOR_SUBPARTS (rhs1_type),
4022 TYPE_VECTOR_SUBPARTS (lhs_type)))
4023 {
4024 error ("type mismatch in vector pack expression");
4025 debug_generic_expr (lhs_type);
4026 debug_generic_expr (rhs1_type);
4027 debug_generic_expr (rhs2_type);
4028 return true;
4029 }
4030
4031 return false;
4032
4033 case MULT_EXPR:
4034 case MULT_HIGHPART_EXPR:
4035 case TRUNC_DIV_EXPR:
4036 case CEIL_DIV_EXPR:
4037 case FLOOR_DIV_EXPR:
4038 case ROUND_DIV_EXPR:
4039 case TRUNC_MOD_EXPR:
4040 case CEIL_MOD_EXPR:
4041 case FLOOR_MOD_EXPR:
4042 case ROUND_MOD_EXPR:
4043 case RDIV_EXPR:
4044 case EXACT_DIV_EXPR:
4045 case MIN_EXPR:
4046 case MAX_EXPR:
4047 case BIT_IOR_EXPR:
4048 case BIT_XOR_EXPR:
4049 case BIT_AND_EXPR:
4050 /* Continue with generic binary expression handling. */
4051 break;
4052
4053 case VEC_SERIES_EXPR:
4054 if (!useless_type_conversion_p (rhs1_type, rhs2_type))
4055 {
4056 error ("type mismatch in series expression");
4057 debug_generic_expr (rhs1_type);
4058 debug_generic_expr (rhs2_type);
4059 return true;
4060 }
4061 if (TREE_CODE (lhs_type) != VECTOR_TYPE
4062 || !useless_type_conversion_p (TREE_TYPE (lhs_type), rhs1_type))
4063 {
4064 error ("vector type expected in series expression");
4065 debug_generic_expr (lhs_type);
4066 return true;
4067 }
4068 return false;
4069
4070 default:
4071 gcc_unreachable ();
4072 }
4073
4074 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4075 || !useless_type_conversion_p (lhs_type, rhs2_type))
4076 {
4077 error ("type mismatch in binary expression");
4078 debug_generic_stmt (lhs_type);
4079 debug_generic_stmt (rhs1_type);
4080 debug_generic_stmt (rhs2_type);
4081 return true;
4082 }
4083
4084 return false;
4085 }
4086
4087 /* Verify a gimple assignment statement STMT with a ternary rhs.
4088 Returns true if anything is wrong. */
4089
4090 static bool
4091 verify_gimple_assign_ternary (gassign *stmt)
4092 {
4093 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4094 tree lhs = gimple_assign_lhs (stmt);
4095 tree lhs_type = TREE_TYPE (lhs);
4096 tree rhs1 = gimple_assign_rhs1 (stmt);
4097 tree rhs1_type = TREE_TYPE (rhs1);
4098 tree rhs2 = gimple_assign_rhs2 (stmt);
4099 tree rhs2_type = TREE_TYPE (rhs2);
4100 tree rhs3 = gimple_assign_rhs3 (stmt);
4101 tree rhs3_type = TREE_TYPE (rhs3);
4102
4103 if (!is_gimple_reg (lhs))
4104 {
4105 error ("non-register as LHS of ternary operation");
4106 return true;
4107 }
4108
4109 if (((rhs_code == VEC_COND_EXPR || rhs_code == COND_EXPR)
4110 ? !is_gimple_condexpr (rhs1) : !is_gimple_val (rhs1))
4111 || !is_gimple_val (rhs2)
4112 || !is_gimple_val (rhs3))
4113 {
4114 error ("invalid operands in ternary operation");
4115 return true;
4116 }
4117
4118 /* First handle operations that involve different types. */
4119 switch (rhs_code)
4120 {
4121 case WIDEN_MULT_PLUS_EXPR:
4122 case WIDEN_MULT_MINUS_EXPR:
4123 if ((!INTEGRAL_TYPE_P (rhs1_type)
4124 && !FIXED_POINT_TYPE_P (rhs1_type))
4125 || !useless_type_conversion_p (rhs1_type, rhs2_type)
4126 || !useless_type_conversion_p (lhs_type, rhs3_type)
4127 || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
4128 || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
4129 {
4130 error ("type mismatch in widening multiply-accumulate expression");
4131 debug_generic_expr (lhs_type);
4132 debug_generic_expr (rhs1_type);
4133 debug_generic_expr (rhs2_type);
4134 debug_generic_expr (rhs3_type);
4135 return true;
4136 }
4137 break;
4138
4139 case VEC_COND_EXPR:
4140 if (!VECTOR_BOOLEAN_TYPE_P (rhs1_type)
4141 || maybe_ne (TYPE_VECTOR_SUBPARTS (rhs1_type),
4142 TYPE_VECTOR_SUBPARTS (lhs_type)))
4143 {
4144 error ("the first argument of a VEC_COND_EXPR must be of a "
4145 "boolean vector type of the same number of elements "
4146 "as the result");
4147 debug_generic_expr (lhs_type);
4148 debug_generic_expr (rhs1_type);
4149 return true;
4150 }
4151 /* Fallthrough. */
4152 case COND_EXPR:
4153 if (!is_gimple_val (rhs1)
4154 && verify_gimple_comparison (TREE_TYPE (rhs1),
4155 TREE_OPERAND (rhs1, 0),
4156 TREE_OPERAND (rhs1, 1),
4157 TREE_CODE (rhs1)))
4158 return true;
4159 if (!useless_type_conversion_p (lhs_type, rhs2_type)
4160 || !useless_type_conversion_p (lhs_type, rhs3_type))
4161 {
4162 error ("type mismatch in conditional expression");
4163 debug_generic_expr (lhs_type);
4164 debug_generic_expr (rhs2_type);
4165 debug_generic_expr (rhs3_type);
4166 return true;
4167 }
4168 break;
4169
4170 case VEC_PERM_EXPR:
4171 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4172 || !useless_type_conversion_p (lhs_type, rhs2_type))
4173 {
4174 error ("type mismatch in vector permute expression");
4175 debug_generic_expr (lhs_type);
4176 debug_generic_expr (rhs1_type);
4177 debug_generic_expr (rhs2_type);
4178 debug_generic_expr (rhs3_type);
4179 return true;
4180 }
4181
4182 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4183 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4184 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4185 {
4186 error ("vector types expected in vector permute expression");
4187 debug_generic_expr (lhs_type);
4188 debug_generic_expr (rhs1_type);
4189 debug_generic_expr (rhs2_type);
4190 debug_generic_expr (rhs3_type);
4191 return true;
4192 }
4193
4194 if (maybe_ne (TYPE_VECTOR_SUBPARTS (rhs1_type),
4195 TYPE_VECTOR_SUBPARTS (rhs2_type))
4196 || maybe_ne (TYPE_VECTOR_SUBPARTS (rhs2_type),
4197 TYPE_VECTOR_SUBPARTS (rhs3_type))
4198 || maybe_ne (TYPE_VECTOR_SUBPARTS (rhs3_type),
4199 TYPE_VECTOR_SUBPARTS (lhs_type)))
4200 {
4201 error ("vectors with different element number found "
4202 "in vector permute expression");
4203 debug_generic_expr (lhs_type);
4204 debug_generic_expr (rhs1_type);
4205 debug_generic_expr (rhs2_type);
4206 debug_generic_expr (rhs3_type);
4207 return true;
4208 }
4209
4210 if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
4211 || (TREE_CODE (rhs3) != VECTOR_CST
4212 && (GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE
4213 (TREE_TYPE (rhs3_type)))
4214 != GET_MODE_BITSIZE (SCALAR_TYPE_MODE
4215 (TREE_TYPE (rhs1_type))))))
4216 {
4217 error ("invalid mask type in vector permute expression");
4218 debug_generic_expr (lhs_type);
4219 debug_generic_expr (rhs1_type);
4220 debug_generic_expr (rhs2_type);
4221 debug_generic_expr (rhs3_type);
4222 return true;
4223 }
4224
4225 return false;
4226
4227 case SAD_EXPR:
4228 if (!useless_type_conversion_p (rhs1_type, rhs2_type)
4229 || !useless_type_conversion_p (lhs_type, rhs3_type)
4230 || 2 * GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type)))
4231 > GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (lhs_type))))
4232 {
4233 error ("type mismatch in sad expression");
4234 debug_generic_expr (lhs_type);
4235 debug_generic_expr (rhs1_type);
4236 debug_generic_expr (rhs2_type);
4237 debug_generic_expr (rhs3_type);
4238 return true;
4239 }
4240
4241 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4242 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4243 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4244 {
4245 error ("vector types expected in sad expression");
4246 debug_generic_expr (lhs_type);
4247 debug_generic_expr (rhs1_type);
4248 debug_generic_expr (rhs2_type);
4249 debug_generic_expr (rhs3_type);
4250 return true;
4251 }
4252
4253 return false;
4254
4255 case BIT_INSERT_EXPR:
4256 if (! useless_type_conversion_p (lhs_type, rhs1_type))
4257 {
4258 error ("type mismatch in BIT_INSERT_EXPR");
4259 debug_generic_expr (lhs_type);
4260 debug_generic_expr (rhs1_type);
4261 return true;
4262 }
4263 if (! ((INTEGRAL_TYPE_P (rhs1_type)
4264 && INTEGRAL_TYPE_P (rhs2_type))
4265 || (VECTOR_TYPE_P (rhs1_type)
4266 && types_compatible_p (TREE_TYPE (rhs1_type), rhs2_type))))
4267 {
4268 error ("not allowed type combination in BIT_INSERT_EXPR");
4269 debug_generic_expr (rhs1_type);
4270 debug_generic_expr (rhs2_type);
4271 return true;
4272 }
4273 if (! tree_fits_uhwi_p (rhs3)
4274 || ! types_compatible_p (bitsizetype, TREE_TYPE (rhs3))
4275 || ! tree_fits_uhwi_p (TYPE_SIZE (rhs2_type)))
4276 {
4277 error ("invalid position or size in BIT_INSERT_EXPR");
4278 return true;
4279 }
4280 if (INTEGRAL_TYPE_P (rhs1_type)
4281 && !type_has_mode_precision_p (rhs1_type))
4282 {
4283 error ("BIT_INSERT_EXPR into non-mode-precision operand");
4284 return true;
4285 }
4286 if (INTEGRAL_TYPE_P (rhs1_type))
4287 {
4288 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (rhs3);
4289 if (bitpos >= TYPE_PRECISION (rhs1_type)
4290 || (bitpos + TYPE_PRECISION (rhs2_type)
4291 > TYPE_PRECISION (rhs1_type)))
4292 {
4293 error ("insertion out of range in BIT_INSERT_EXPR");
4294 return true;
4295 }
4296 }
4297 else if (VECTOR_TYPE_P (rhs1_type))
4298 {
4299 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (rhs3);
4300 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (TYPE_SIZE (rhs2_type));
4301 if (bitpos % bitsize != 0)
4302 {
4303 error ("vector insertion not at element boundary");
4304 return true;
4305 }
4306 }
4307 return false;
4308
4309 case DOT_PROD_EXPR:
4310 {
4311 if (((TREE_CODE (rhs1_type) != VECTOR_TYPE
4312 || TREE_CODE (lhs_type) != VECTOR_TYPE)
4313 && ((!INTEGRAL_TYPE_P (rhs1_type)
4314 && !SCALAR_FLOAT_TYPE_P (rhs1_type))
4315 || (!INTEGRAL_TYPE_P (lhs_type)
4316 && !SCALAR_FLOAT_TYPE_P (lhs_type))))
4317 || !types_compatible_p (rhs1_type, rhs2_type)
4318 || !useless_type_conversion_p (lhs_type, rhs3_type)
4319 || maybe_lt (GET_MODE_SIZE (element_mode (rhs3_type)),
4320 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
4321 {
4322 error ("type mismatch in dot product reduction");
4323 debug_generic_expr (lhs_type);
4324 debug_generic_expr (rhs1_type);
4325 debug_generic_expr (rhs2_type);
4326 return true;
4327 }
4328 return false;
4329 }
4330
4331 case REALIGN_LOAD_EXPR:
4332 /* FIXME. */
4333 return false;
4334
4335 default:
4336 gcc_unreachable ();
4337 }
4338 return false;
4339 }
4340
4341 /* Verify a gimple assignment statement STMT with a single rhs.
4342 Returns true if anything is wrong. */
4343
4344 static bool
4345 verify_gimple_assign_single (gassign *stmt)
4346 {
4347 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4348 tree lhs = gimple_assign_lhs (stmt);
4349 tree lhs_type = TREE_TYPE (lhs);
4350 tree rhs1 = gimple_assign_rhs1 (stmt);
4351 tree rhs1_type = TREE_TYPE (rhs1);
4352 bool res = false;
4353
4354 if (!useless_type_conversion_p (lhs_type, rhs1_type))
4355 {
4356 error ("non-trivial conversion at assignment");
4357 debug_generic_expr (lhs_type);
4358 debug_generic_expr (rhs1_type);
4359 return true;
4360 }
4361
4362 if (gimple_clobber_p (stmt)
4363 && !(DECL_P (lhs) || TREE_CODE (lhs) == MEM_REF))
4364 {
4365 error ("non-decl/MEM_REF LHS in clobber statement");
4366 debug_generic_expr (lhs);
4367 return true;
4368 }
4369
4370 if (handled_component_p (lhs)
4371 || TREE_CODE (lhs) == MEM_REF
4372 || TREE_CODE (lhs) == TARGET_MEM_REF)
4373 res |= verify_types_in_gimple_reference (lhs, true);
4374
4375 /* Special codes we cannot handle via their class. */
4376 switch (rhs_code)
4377 {
4378 case ADDR_EXPR:
4379 {
4380 tree op = TREE_OPERAND (rhs1, 0);
4381 if (!is_gimple_addressable (op))
4382 {
4383 error ("invalid operand in unary expression");
4384 return true;
4385 }
4386
4387 /* Technically there is no longer a need for matching types, but
4388 gimple hygiene asks for this check. In LTO we can end up
4389 combining incompatible units and thus end up with addresses
4390 of globals that change their type to a common one. */
4391 if (!in_lto_p
4392 && !types_compatible_p (TREE_TYPE (op),
4393 TREE_TYPE (TREE_TYPE (rhs1)))
4394 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
4395 TREE_TYPE (op)))
4396 {
4397 error ("type mismatch in address expression");
4398 debug_generic_stmt (TREE_TYPE (rhs1));
4399 debug_generic_stmt (TREE_TYPE (op));
4400 return true;
4401 }
4402
4403 return (verify_address (rhs1, true)
4404 || verify_types_in_gimple_reference (op, true));
4405 }
4406
4407 /* tcc_reference */
4408 case INDIRECT_REF:
4409 error ("INDIRECT_REF in gimple IL");
4410 return true;
4411
4412 case COMPONENT_REF:
4413 case BIT_FIELD_REF:
4414 case ARRAY_REF:
4415 case ARRAY_RANGE_REF:
4416 case VIEW_CONVERT_EXPR:
4417 case REALPART_EXPR:
4418 case IMAGPART_EXPR:
4419 case TARGET_MEM_REF:
4420 case MEM_REF:
4421 if (!is_gimple_reg (lhs)
4422 && is_gimple_reg_type (TREE_TYPE (lhs)))
4423 {
4424 error ("invalid rhs for gimple memory store");
4425 debug_generic_stmt (lhs);
4426 debug_generic_stmt (rhs1);
4427 return true;
4428 }
4429 return res || verify_types_in_gimple_reference (rhs1, false);
4430
4431 /* tcc_constant */
4432 case SSA_NAME:
4433 case INTEGER_CST:
4434 case REAL_CST:
4435 case FIXED_CST:
4436 case COMPLEX_CST:
4437 case VECTOR_CST:
4438 case STRING_CST:
4439 return res;
4440
4441 /* tcc_declaration */
4442 case CONST_DECL:
4443 return res;
4444 case VAR_DECL:
4445 case PARM_DECL:
4446 if (!is_gimple_reg (lhs)
4447 && !is_gimple_reg (rhs1)
4448 && is_gimple_reg_type (TREE_TYPE (lhs)))
4449 {
4450 error ("invalid rhs for gimple memory store");
4451 debug_generic_stmt (lhs);
4452 debug_generic_stmt (rhs1);
4453 return true;
4454 }
4455 return res;
4456
4457 case CONSTRUCTOR:
4458 if (TREE_CODE (rhs1_type) == VECTOR_TYPE)
4459 {
4460 unsigned int i;
4461 tree elt_i, elt_v, elt_t = NULL_TREE;
4462
4463 if (CONSTRUCTOR_NELTS (rhs1) == 0)
4464 return res;
4465 /* For vector CONSTRUCTORs we require that either it is empty
4466 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
4467 (then the element count must be correct to cover the whole
4468 outer vector and index must be NULL on all elements, or it is
4469 a CONSTRUCTOR of scalar elements, where we as an exception allow
4470 smaller number of elements (assuming zero filling) and
4471 consecutive indexes as compared to NULL indexes (such
4472 CONSTRUCTORs can appear in the IL from FEs). */
4473 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), i, elt_i, elt_v)
4474 {
4475 if (elt_t == NULL_TREE)
4476 {
4477 elt_t = TREE_TYPE (elt_v);
4478 if (TREE_CODE (elt_t) == VECTOR_TYPE)
4479 {
4480 tree elt_t = TREE_TYPE (elt_v);
4481 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4482 TREE_TYPE (elt_t)))
4483 {
4484 error ("incorrect type of vector CONSTRUCTOR"
4485 " elements");
4486 debug_generic_stmt (rhs1);
4487 return true;
4488 }
4489 else if (maybe_ne (CONSTRUCTOR_NELTS (rhs1)
4490 * TYPE_VECTOR_SUBPARTS (elt_t),
4491 TYPE_VECTOR_SUBPARTS (rhs1_type)))
4492 {
4493 error ("incorrect number of vector CONSTRUCTOR"
4494 " elements");
4495 debug_generic_stmt (rhs1);
4496 return true;
4497 }
4498 }
4499 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4500 elt_t))
4501 {
4502 error ("incorrect type of vector CONSTRUCTOR elements");
4503 debug_generic_stmt (rhs1);
4504 return true;
4505 }
4506 else if (maybe_gt (CONSTRUCTOR_NELTS (rhs1),
4507 TYPE_VECTOR_SUBPARTS (rhs1_type)))
4508 {
4509 error ("incorrect number of vector CONSTRUCTOR elements");
4510 debug_generic_stmt (rhs1);
4511 return true;
4512 }
4513 }
4514 else if (!useless_type_conversion_p (elt_t, TREE_TYPE (elt_v)))
4515 {
4516 error ("incorrect type of vector CONSTRUCTOR elements");
4517 debug_generic_stmt (rhs1);
4518 return true;
4519 }
4520 if (elt_i != NULL_TREE
4521 && (TREE_CODE (elt_t) == VECTOR_TYPE
4522 || TREE_CODE (elt_i) != INTEGER_CST
4523 || compare_tree_int (elt_i, i) != 0))
4524 {
4525 error ("vector CONSTRUCTOR with non-NULL element index");
4526 debug_generic_stmt (rhs1);
4527 return true;
4528 }
4529 if (!is_gimple_val (elt_v))
4530 {
4531 error ("vector CONSTRUCTOR element is not a GIMPLE value");
4532 debug_generic_stmt (rhs1);
4533 return true;
4534 }
4535 }
4536 }
4537 else if (CONSTRUCTOR_NELTS (rhs1) != 0)
4538 {
4539 error ("non-vector CONSTRUCTOR with elements");
4540 debug_generic_stmt (rhs1);
4541 return true;
4542 }
4543 return res;
4544
4545 case ASSERT_EXPR:
4546 /* FIXME. */
4547 rhs1 = fold (ASSERT_EXPR_COND (rhs1));
4548 if (rhs1 == boolean_false_node)
4549 {
4550 error ("ASSERT_EXPR with an always-false condition");
4551 debug_generic_stmt (rhs1);
4552 return true;
4553 }
4554 break;
4555
4556 case OBJ_TYPE_REF:
4557 case WITH_SIZE_EXPR:
4558 /* FIXME. */
4559 return res;
4560
4561 default:;
4562 }
4563
4564 return res;
4565 }
4566
4567 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4568 is a problem, otherwise false. */
4569
4570 static bool
4571 verify_gimple_assign (gassign *stmt)
4572 {
4573 switch (gimple_assign_rhs_class (stmt))
4574 {
4575 case GIMPLE_SINGLE_RHS:
4576 return verify_gimple_assign_single (stmt);
4577
4578 case GIMPLE_UNARY_RHS:
4579 return verify_gimple_assign_unary (stmt);
4580
4581 case GIMPLE_BINARY_RHS:
4582 return verify_gimple_assign_binary (stmt);
4583
4584 case GIMPLE_TERNARY_RHS:
4585 return verify_gimple_assign_ternary (stmt);
4586
4587 default:
4588 gcc_unreachable ();
4589 }
4590 }
4591
4592 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4593 is a problem, otherwise false. */
4594
4595 static bool
4596 verify_gimple_return (greturn *stmt)
4597 {
4598 tree op = gimple_return_retval (stmt);
4599 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
4600
4601 /* We cannot test for present return values as we do not fix up missing
4602 return values from the original source. */
4603 if (op == NULL)
4604 return false;
4605
4606 if (!is_gimple_val (op)
4607 && TREE_CODE (op) != RESULT_DECL)
4608 {
4609 error ("invalid operand in return statement");
4610 debug_generic_stmt (op);
4611 return true;
4612 }
4613
4614 if ((TREE_CODE (op) == RESULT_DECL
4615 && DECL_BY_REFERENCE (op))
4616 || (TREE_CODE (op) == SSA_NAME
4617 && SSA_NAME_VAR (op)
4618 && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
4619 && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
4620 op = TREE_TYPE (op);
4621
4622 if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
4623 {
4624 error ("invalid conversion in return statement");
4625 debug_generic_stmt (restype);
4626 debug_generic_stmt (TREE_TYPE (op));
4627 return true;
4628 }
4629
4630 return false;
4631 }
4632
4633
4634 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4635 is a problem, otherwise false. */
4636
4637 static bool
4638 verify_gimple_goto (ggoto *stmt)
4639 {
4640 tree dest = gimple_goto_dest (stmt);
4641
4642 /* ??? We have two canonical forms of direct goto destinations, a
4643 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4644 if (TREE_CODE (dest) != LABEL_DECL
4645 && (!is_gimple_val (dest)
4646 || !POINTER_TYPE_P (TREE_TYPE (dest))))
4647 {
4648 error ("goto destination is neither a label nor a pointer");
4649 return true;
4650 }
4651
4652 return false;
4653 }
4654
4655 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4656 is a problem, otherwise false. */
4657
4658 static bool
4659 verify_gimple_switch (gswitch *stmt)
4660 {
4661 unsigned int i, n;
4662 tree elt, prev_upper_bound = NULL_TREE;
4663 tree index_type, elt_type = NULL_TREE;
4664
4665 if (!is_gimple_val (gimple_switch_index (stmt)))
4666 {
4667 error ("invalid operand to switch statement");
4668 debug_generic_stmt (gimple_switch_index (stmt));
4669 return true;
4670 }
4671
4672 index_type = TREE_TYPE (gimple_switch_index (stmt));
4673 if (! INTEGRAL_TYPE_P (index_type))
4674 {
4675 error ("non-integral type switch statement");
4676 debug_generic_expr (index_type);
4677 return true;
4678 }
4679
4680 elt = gimple_switch_label (stmt, 0);
4681 if (CASE_LOW (elt) != NULL_TREE
4682 || CASE_HIGH (elt) != NULL_TREE
4683 || CASE_CHAIN (elt) != NULL_TREE)
4684 {
4685 error ("invalid default case label in switch statement");
4686 debug_generic_expr (elt);
4687 return true;
4688 }
4689
4690 n = gimple_switch_num_labels (stmt);
4691 for (i = 1; i < n; i++)
4692 {
4693 elt = gimple_switch_label (stmt, i);
4694
4695 if (CASE_CHAIN (elt))
4696 {
4697 error ("invalid CASE_CHAIN");
4698 debug_generic_expr (elt);
4699 return true;
4700 }
4701 if (! CASE_LOW (elt))
4702 {
4703 error ("invalid case label in switch statement");
4704 debug_generic_expr (elt);
4705 return true;
4706 }
4707 if (CASE_HIGH (elt)
4708 && ! tree_int_cst_lt (CASE_LOW (elt), CASE_HIGH (elt)))
4709 {
4710 error ("invalid case range in switch statement");
4711 debug_generic_expr (elt);
4712 return true;
4713 }
4714
4715 if (elt_type)
4716 {
4717 if (TREE_TYPE (CASE_LOW (elt)) != elt_type
4718 || (CASE_HIGH (elt) && TREE_TYPE (CASE_HIGH (elt)) != elt_type))
4719 {
4720 error ("type mismatch for case label in switch statement");
4721 debug_generic_expr (elt);
4722 return true;
4723 }
4724 }
4725 else
4726 {
4727 elt_type = TREE_TYPE (CASE_LOW (elt));
4728 if (TYPE_PRECISION (index_type) < TYPE_PRECISION (elt_type))
4729 {
4730 error ("type precision mismatch in switch statement");
4731 return true;
4732 }
4733 }
4734
4735 if (prev_upper_bound)
4736 {
4737 if (! tree_int_cst_lt (prev_upper_bound, CASE_LOW (elt)))
4738 {
4739 error ("case labels not sorted in switch statement");
4740 return true;
4741 }
4742 }
4743
4744 prev_upper_bound = CASE_HIGH (elt);
4745 if (! prev_upper_bound)
4746 prev_upper_bound = CASE_LOW (elt);
4747 }
4748
4749 return false;
4750 }
4751
4752 /* Verify a gimple debug statement STMT.
4753 Returns true if anything is wrong. */
4754
4755 static bool
4756 verify_gimple_debug (gimple *stmt ATTRIBUTE_UNUSED)
4757 {
4758 /* There isn't much that could be wrong in a gimple debug stmt. A
4759 gimple debug bind stmt, for example, maps a tree, that's usually
4760 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4761 component or member of an aggregate type, to another tree, that
4762 can be an arbitrary expression. These stmts expand into debug
4763 insns, and are converted to debug notes by var-tracking.c. */
4764 return false;
4765 }
4766
4767 /* Verify a gimple label statement STMT.
4768 Returns true if anything is wrong. */
4769
4770 static bool
4771 verify_gimple_label (glabel *stmt)
4772 {
4773 tree decl = gimple_label_label (stmt);
4774 int uid;
4775 bool err = false;
4776
4777 if (TREE_CODE (decl) != LABEL_DECL)
4778 return true;
4779 if (!DECL_NONLOCAL (decl) && !FORCED_LABEL (decl)
4780 && DECL_CONTEXT (decl) != current_function_decl)
4781 {
4782 error ("label%'s context is not the current function decl");
4783 err |= true;
4784 }
4785
4786 uid = LABEL_DECL_UID (decl);
4787 if (cfun->cfg
4788 && (uid == -1
4789 || (*label_to_block_map_for_fn (cfun))[uid] != gimple_bb (stmt)))
4790 {
4791 error ("incorrect entry in label_to_block_map");
4792 err |= true;
4793 }
4794
4795 uid = EH_LANDING_PAD_NR (decl);
4796 if (uid)
4797 {
4798 eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
4799 if (decl != lp->post_landing_pad)
4800 {
4801 error ("incorrect setting of landing pad number");
4802 err |= true;
4803 }
4804 }
4805
4806 return err;
4807 }
4808
4809 /* Verify a gimple cond statement STMT.
4810 Returns true if anything is wrong. */
4811
4812 static bool
4813 verify_gimple_cond (gcond *stmt)
4814 {
4815 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
4816 {
4817 error ("invalid comparison code in gimple cond");
4818 return true;
4819 }
4820 if (!(!gimple_cond_true_label (stmt)
4821 || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
4822 || !(!gimple_cond_false_label (stmt)
4823 || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
4824 {
4825 error ("invalid labels in gimple cond");
4826 return true;
4827 }
4828
4829 return verify_gimple_comparison (boolean_type_node,
4830 gimple_cond_lhs (stmt),
4831 gimple_cond_rhs (stmt),
4832 gimple_cond_code (stmt));
4833 }
4834
4835 /* Verify the GIMPLE statement STMT. Returns true if there is an
4836 error, otherwise false. */
4837
4838 static bool
4839 verify_gimple_stmt (gimple *stmt)
4840 {
4841 switch (gimple_code (stmt))
4842 {
4843 case GIMPLE_ASSIGN:
4844 return verify_gimple_assign (as_a <gassign *> (stmt));
4845
4846 case GIMPLE_LABEL:
4847 return verify_gimple_label (as_a <glabel *> (stmt));
4848
4849 case GIMPLE_CALL:
4850 return verify_gimple_call (as_a <gcall *> (stmt));
4851
4852 case GIMPLE_COND:
4853 return verify_gimple_cond (as_a <gcond *> (stmt));
4854
4855 case GIMPLE_GOTO:
4856 return verify_gimple_goto (as_a <ggoto *> (stmt));
4857
4858 case GIMPLE_SWITCH:
4859 return verify_gimple_switch (as_a <gswitch *> (stmt));
4860
4861 case GIMPLE_RETURN:
4862 return verify_gimple_return (as_a <greturn *> (stmt));
4863
4864 case GIMPLE_ASM:
4865 return false;
4866
4867 case GIMPLE_TRANSACTION:
4868 return verify_gimple_transaction (as_a <gtransaction *> (stmt));
4869
4870 /* Tuples that do not have tree operands. */
4871 case GIMPLE_NOP:
4872 case GIMPLE_PREDICT:
4873 case GIMPLE_RESX:
4874 case GIMPLE_EH_DISPATCH:
4875 case GIMPLE_EH_MUST_NOT_THROW:
4876 return false;
4877
4878 CASE_GIMPLE_OMP:
4879 /* OpenMP directives are validated by the FE and never operated
4880 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4881 non-gimple expressions when the main index variable has had
4882 its address taken. This does not affect the loop itself
4883 because the header of an GIMPLE_OMP_FOR is merely used to determine
4884 how to setup the parallel iteration. */
4885 return false;
4886
4887 case GIMPLE_DEBUG:
4888 return verify_gimple_debug (stmt);
4889
4890 default:
4891 gcc_unreachable ();
4892 }
4893 }
4894
4895 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4896 and false otherwise. */
4897
4898 static bool
4899 verify_gimple_phi (gphi *phi)
4900 {
4901 bool err = false;
4902 unsigned i;
4903 tree phi_result = gimple_phi_result (phi);
4904 bool virtual_p;
4905
4906 if (!phi_result)
4907 {
4908 error ("invalid PHI result");
4909 return true;
4910 }
4911
4912 virtual_p = virtual_operand_p (phi_result);
4913 if (TREE_CODE (phi_result) != SSA_NAME
4914 || (virtual_p
4915 && SSA_NAME_VAR (phi_result) != gimple_vop (cfun)))
4916 {
4917 error ("invalid PHI result");
4918 err = true;
4919 }
4920
4921 for (i = 0; i < gimple_phi_num_args (phi); i++)
4922 {
4923 tree t = gimple_phi_arg_def (phi, i);
4924
4925 if (!t)
4926 {
4927 error ("missing PHI def");
4928 err |= true;
4929 continue;
4930 }
4931 /* Addressable variables do have SSA_NAMEs but they
4932 are not considered gimple values. */
4933 else if ((TREE_CODE (t) == SSA_NAME
4934 && virtual_p != virtual_operand_p (t))
4935 || (virtual_p
4936 && (TREE_CODE (t) != SSA_NAME
4937 || SSA_NAME_VAR (t) != gimple_vop (cfun)))
4938 || (!virtual_p
4939 && !is_gimple_val (t)))
4940 {
4941 error ("invalid PHI argument");
4942 debug_generic_expr (t);
4943 err |= true;
4944 }
4945 #ifdef ENABLE_TYPES_CHECKING
4946 if (!useless_type_conversion_p (TREE_TYPE (phi_result), TREE_TYPE (t)))
4947 {
4948 error ("incompatible types in PHI argument %u", i);
4949 debug_generic_stmt (TREE_TYPE (phi_result));
4950 debug_generic_stmt (TREE_TYPE (t));
4951 err |= true;
4952 }
4953 #endif
4954 }
4955
4956 return err;
4957 }
4958
4959 /* Verify the GIMPLE statements inside the sequence STMTS. */
4960
4961 static bool
4962 verify_gimple_in_seq_2 (gimple_seq stmts)
4963 {
4964 gimple_stmt_iterator ittr;
4965 bool err = false;
4966
4967 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
4968 {
4969 gimple *stmt = gsi_stmt (ittr);
4970
4971 switch (gimple_code (stmt))
4972 {
4973 case GIMPLE_BIND:
4974 err |= verify_gimple_in_seq_2 (
4975 gimple_bind_body (as_a <gbind *> (stmt)));
4976 break;
4977
4978 case GIMPLE_TRY:
4979 err |= verify_gimple_in_seq_2 (gimple_try_eval (stmt));
4980 err |= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt));
4981 break;
4982
4983 case GIMPLE_EH_FILTER:
4984 err |= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt));
4985 break;
4986
4987 case GIMPLE_EH_ELSE:
4988 {
4989 geh_else *eh_else = as_a <geh_else *> (stmt);
4990 err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (eh_else));
4991 err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (eh_else));
4992 }
4993 break;
4994
4995 case GIMPLE_CATCH:
4996 err |= verify_gimple_in_seq_2 (gimple_catch_handler (
4997 as_a <gcatch *> (stmt)));
4998 break;
4999
5000 case GIMPLE_TRANSACTION:
5001 err |= verify_gimple_transaction (as_a <gtransaction *> (stmt));
5002 break;
5003
5004 default:
5005 {
5006 bool err2 = verify_gimple_stmt (stmt);
5007 if (err2)
5008 debug_gimple_stmt (stmt);
5009 err |= err2;
5010 }
5011 }
5012 }
5013
5014 return err;
5015 }
5016
5017 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
5018 is a problem, otherwise false. */
5019
5020 static bool
5021 verify_gimple_transaction (gtransaction *stmt)
5022 {
5023 tree lab;
5024
5025 lab = gimple_transaction_label_norm (stmt);
5026 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
5027 return true;
5028 lab = gimple_transaction_label_uninst (stmt);
5029 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
5030 return true;
5031 lab = gimple_transaction_label_over (stmt);
5032 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
5033 return true;
5034
5035 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt));
5036 }
5037
5038
5039 /* Verify the GIMPLE statements inside the statement list STMTS. */
5040
5041 DEBUG_FUNCTION void
5042 verify_gimple_in_seq (gimple_seq stmts)
5043 {
5044 timevar_push (TV_TREE_STMT_VERIFY);
5045 if (verify_gimple_in_seq_2 (stmts))
5046 internal_error ("verify_gimple failed");
5047 timevar_pop (TV_TREE_STMT_VERIFY);
5048 }
5049
5050 /* Return true when the T can be shared. */
5051
5052 static bool
5053 tree_node_can_be_shared (tree t)
5054 {
5055 if (IS_TYPE_OR_DECL_P (t)
5056 || TREE_CODE (t) == SSA_NAME
5057 || TREE_CODE (t) == IDENTIFIER_NODE
5058 || TREE_CODE (t) == CASE_LABEL_EXPR
5059 || is_gimple_min_invariant (t))
5060 return true;
5061
5062 if (t == error_mark_node)
5063 return true;
5064
5065 return false;
5066 }
5067
5068 /* Called via walk_tree. Verify tree sharing. */
5069
5070 static tree
5071 verify_node_sharing_1 (tree *tp, int *walk_subtrees, void *data)
5072 {
5073 hash_set<void *> *visited = (hash_set<void *> *) data;
5074
5075 if (tree_node_can_be_shared (*tp))
5076 {
5077 *walk_subtrees = false;
5078 return NULL;
5079 }
5080
5081 if (visited->add (*tp))
5082 return *tp;
5083
5084 return NULL;
5085 }
5086
5087 /* Called via walk_gimple_stmt. Verify tree sharing. */
5088
5089 static tree
5090 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
5091 {
5092 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5093 return verify_node_sharing_1 (tp, walk_subtrees, wi->info);
5094 }
5095
5096 static bool eh_error_found;
5097 bool
5098 verify_eh_throw_stmt_node (gimple *const &stmt, const int &,
5099 hash_set<gimple *> *visited)
5100 {
5101 if (!visited->contains (stmt))
5102 {
5103 error ("dead STMT in EH table");
5104 debug_gimple_stmt (stmt);
5105 eh_error_found = true;
5106 }
5107 return true;
5108 }
5109
5110 /* Verify if the location LOCs block is in BLOCKS. */
5111
5112 static bool
5113 verify_location (hash_set<tree> *blocks, location_t loc)
5114 {
5115 tree block = LOCATION_BLOCK (loc);
5116 if (block != NULL_TREE
5117 && !blocks->contains (block))
5118 {
5119 error ("location references block not in block tree");
5120 return true;
5121 }
5122 if (block != NULL_TREE)
5123 return verify_location (blocks, BLOCK_SOURCE_LOCATION (block));
5124 return false;
5125 }
5126
5127 /* Called via walk_tree. Verify that expressions have no blocks. */
5128
5129 static tree
5130 verify_expr_no_block (tree *tp, int *walk_subtrees, void *)
5131 {
5132 if (!EXPR_P (*tp))
5133 {
5134 *walk_subtrees = false;
5135 return NULL;
5136 }
5137
5138 location_t loc = EXPR_LOCATION (*tp);
5139 if (LOCATION_BLOCK (loc) != NULL)
5140 return *tp;
5141
5142 return NULL;
5143 }
5144
5145 /* Called via walk_tree. Verify locations of expressions. */
5146
5147 static tree
5148 verify_expr_location_1 (tree *tp, int *walk_subtrees, void *data)
5149 {
5150 hash_set<tree> *blocks = (hash_set<tree> *) data;
5151 tree t = *tp;
5152
5153 /* ??? This doesn't really belong here but there's no good place to
5154 stick this remainder of old verify_expr. */
5155 /* ??? This barfs on debug stmts which contain binds to vars with
5156 different function context. */
5157 #if 0
5158 if (VAR_P (t)
5159 || TREE_CODE (t) == PARM_DECL
5160 || TREE_CODE (t) == RESULT_DECL)
5161 {
5162 tree context = decl_function_context (t);
5163 if (context != cfun->decl
5164 && !SCOPE_FILE_SCOPE_P (context)
5165 && !TREE_STATIC (t)
5166 && !DECL_EXTERNAL (t))
5167 {
5168 error ("local declaration from a different function");
5169 return t;
5170 }
5171 }
5172 #endif
5173
5174 if (VAR_P (t) && DECL_HAS_DEBUG_EXPR_P (t))
5175 {
5176 tree x = DECL_DEBUG_EXPR (t);
5177 tree addr = walk_tree (&x, verify_expr_no_block, NULL, NULL);
5178 if (addr)
5179 return addr;
5180 }
5181 if ((VAR_P (t)
5182 || TREE_CODE (t) == PARM_DECL
5183 || TREE_CODE (t) == RESULT_DECL)
5184 && DECL_HAS_VALUE_EXPR_P (t))
5185 {
5186 tree x = DECL_VALUE_EXPR (t);
5187 tree addr = walk_tree (&x, verify_expr_no_block, NULL, NULL);
5188 if (addr)
5189 return addr;
5190 }
5191
5192 if (!EXPR_P (t))
5193 {
5194 *walk_subtrees = false;
5195 return NULL;
5196 }
5197
5198 location_t loc = EXPR_LOCATION (t);
5199 if (verify_location (blocks, loc))
5200 return t;
5201
5202 return NULL;
5203 }
5204
5205 /* Called via walk_gimple_op. Verify locations of expressions. */
5206
5207 static tree
5208 verify_expr_location (tree *tp, int *walk_subtrees, void *data)
5209 {
5210 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5211 return verify_expr_location_1 (tp, walk_subtrees, wi->info);
5212 }
5213
5214 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
5215
5216 static void
5217 collect_subblocks (hash_set<tree> *blocks, tree block)
5218 {
5219 tree t;
5220 for (t = BLOCK_SUBBLOCKS (block); t; t = BLOCK_CHAIN (t))
5221 {
5222 blocks->add (t);
5223 collect_subblocks (blocks, t);
5224 }
5225 }
5226
5227 /* Verify the GIMPLE statements in the CFG of FN. */
5228
5229 DEBUG_FUNCTION void
5230 verify_gimple_in_cfg (struct function *fn, bool verify_nothrow)
5231 {
5232 basic_block bb;
5233 bool err = false;
5234
5235 timevar_push (TV_TREE_STMT_VERIFY);
5236 hash_set<void *> visited;
5237 hash_set<gimple *> visited_throwing_stmts;
5238
5239 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
5240 hash_set<tree> blocks;
5241 if (DECL_INITIAL (fn->decl))
5242 {
5243 blocks.add (DECL_INITIAL (fn->decl));
5244 collect_subblocks (&blocks, DECL_INITIAL (fn->decl));
5245 }
5246
5247 FOR_EACH_BB_FN (bb, fn)
5248 {
5249 gimple_stmt_iterator gsi;
5250 edge_iterator ei;
5251 edge e;
5252
5253 for (gphi_iterator gpi = gsi_start_phis (bb);
5254 !gsi_end_p (gpi);
5255 gsi_next (&gpi))
5256 {
5257 gphi *phi = gpi.phi ();
5258 bool err2 = false;
5259 unsigned i;
5260
5261 if (gimple_bb (phi) != bb)
5262 {
5263 error ("gimple_bb (phi) is set to a wrong basic block");
5264 err2 = true;
5265 }
5266
5267 err2 |= verify_gimple_phi (phi);
5268
5269 /* Only PHI arguments have locations. */
5270 if (gimple_location (phi) != UNKNOWN_LOCATION)
5271 {
5272 error ("PHI node with location");
5273 err2 = true;
5274 }
5275
5276 for (i = 0; i < gimple_phi_num_args (phi); i++)
5277 {
5278 tree arg = gimple_phi_arg_def (phi, i);
5279 tree addr = walk_tree (&arg, verify_node_sharing_1,
5280 &visited, NULL);
5281 if (addr)
5282 {
5283 error ("incorrect sharing of tree nodes");
5284 debug_generic_expr (addr);
5285 err2 |= true;
5286 }
5287 location_t loc = gimple_phi_arg_location (phi, i);
5288 if (virtual_operand_p (gimple_phi_result (phi))
5289 && loc != UNKNOWN_LOCATION)
5290 {
5291 error ("virtual PHI with argument locations");
5292 err2 = true;
5293 }
5294 addr = walk_tree (&arg, verify_expr_location_1, &blocks, NULL);
5295 if (addr)
5296 {
5297 debug_generic_expr (addr);
5298 err2 = true;
5299 }
5300 err2 |= verify_location (&blocks, loc);
5301 }
5302
5303 if (err2)
5304 debug_gimple_stmt (phi);
5305 err |= err2;
5306 }
5307
5308 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5309 {
5310 gimple *stmt = gsi_stmt (gsi);
5311 bool err2 = false;
5312 struct walk_stmt_info wi;
5313 tree addr;
5314 int lp_nr;
5315
5316 if (gimple_bb (stmt) != bb)
5317 {
5318 error ("gimple_bb (stmt) is set to a wrong basic block");
5319 err2 = true;
5320 }
5321
5322 err2 |= verify_gimple_stmt (stmt);
5323 err2 |= verify_location (&blocks, gimple_location (stmt));
5324
5325 memset (&wi, 0, sizeof (wi));
5326 wi.info = (void *) &visited;
5327 addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
5328 if (addr)
5329 {
5330 error ("incorrect sharing of tree nodes");
5331 debug_generic_expr (addr);
5332 err2 |= true;
5333 }
5334
5335 memset (&wi, 0, sizeof (wi));
5336 wi.info = (void *) &blocks;
5337 addr = walk_gimple_op (stmt, verify_expr_location, &wi);
5338 if (addr)
5339 {
5340 debug_generic_expr (addr);
5341 err2 |= true;
5342 }
5343
5344 /* If the statement is marked as part of an EH region, then it is
5345 expected that the statement could throw. Verify that when we
5346 have optimizations that simplify statements such that we prove
5347 that they cannot throw, that we update other data structures
5348 to match. */
5349 lp_nr = lookup_stmt_eh_lp (stmt);
5350 if (lp_nr != 0)
5351 visited_throwing_stmts.add (stmt);
5352 if (lp_nr > 0)
5353 {
5354 if (!stmt_could_throw_p (cfun, stmt))
5355 {
5356 if (verify_nothrow)
5357 {
5358 error ("statement marked for throw, but doesn%'t");
5359 err2 |= true;
5360 }
5361 }
5362 else if (!gsi_one_before_end_p (gsi))
5363 {
5364 error ("statement marked for throw in middle of block");
5365 err2 |= true;
5366 }
5367 }
5368
5369 if (err2)
5370 debug_gimple_stmt (stmt);
5371 err |= err2;
5372 }
5373
5374 FOR_EACH_EDGE (e, ei, bb->succs)
5375 if (e->goto_locus != UNKNOWN_LOCATION)
5376 err |= verify_location (&blocks, e->goto_locus);
5377 }
5378
5379 hash_map<gimple *, int> *eh_table = get_eh_throw_stmt_table (cfun);
5380 eh_error_found = false;
5381 if (eh_table)
5382 eh_table->traverse<hash_set<gimple *> *, verify_eh_throw_stmt_node>
5383 (&visited_throwing_stmts);
5384
5385 if (err || eh_error_found)
5386 internal_error ("verify_gimple failed");
5387
5388 verify_histograms ();
5389 timevar_pop (TV_TREE_STMT_VERIFY);
5390 }
5391
5392
5393 /* Verifies that the flow information is OK. */
5394
5395 static int
5396 gimple_verify_flow_info (void)
5397 {
5398 int err = 0;
5399 basic_block bb;
5400 gimple_stmt_iterator gsi;
5401 gimple *stmt;
5402 edge e;
5403 edge_iterator ei;
5404
5405 if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5406 || ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5407 {
5408 error ("ENTRY_BLOCK has IL associated with it");
5409 err = 1;
5410 }
5411
5412 if (EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5413 || EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5414 {
5415 error ("EXIT_BLOCK has IL associated with it");
5416 err = 1;
5417 }
5418
5419 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5420 if (e->flags & EDGE_FALLTHRU)
5421 {
5422 error ("fallthru to exit from bb %d", e->src->index);
5423 err = 1;
5424 }
5425
5426 FOR_EACH_BB_FN (bb, cfun)
5427 {
5428 bool found_ctrl_stmt = false;
5429
5430 stmt = NULL;
5431
5432 /* Skip labels on the start of basic block. */
5433 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5434 {
5435 tree label;
5436 gimple *prev_stmt = stmt;
5437
5438 stmt = gsi_stmt (gsi);
5439
5440 if (gimple_code (stmt) != GIMPLE_LABEL)
5441 break;
5442
5443 label = gimple_label_label (as_a <glabel *> (stmt));
5444 if (prev_stmt && DECL_NONLOCAL (label))
5445 {
5446 error ("nonlocal label ");
5447 print_generic_expr (stderr, label);
5448 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5449 bb->index);
5450 err = 1;
5451 }
5452
5453 if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
5454 {
5455 error ("EH landing pad label ");
5456 print_generic_expr (stderr, label);
5457 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5458 bb->index);
5459 err = 1;
5460 }
5461
5462 if (label_to_block (cfun, label) != bb)
5463 {
5464 error ("label ");
5465 print_generic_expr (stderr, label);
5466 fprintf (stderr, " to block does not match in bb %d",
5467 bb->index);
5468 err = 1;
5469 }
5470
5471 if (decl_function_context (label) != current_function_decl)
5472 {
5473 error ("label ");
5474 print_generic_expr (stderr, label);
5475 fprintf (stderr, " has incorrect context in bb %d",
5476 bb->index);
5477 err = 1;
5478 }
5479 }
5480
5481 /* Verify that body of basic block BB is free of control flow. */
5482 for (; !gsi_end_p (gsi); gsi_next (&gsi))
5483 {
5484 gimple *stmt = gsi_stmt (gsi);
5485
5486 if (found_ctrl_stmt)
5487 {
5488 error ("control flow in the middle of basic block %d",
5489 bb->index);
5490 err = 1;
5491 }
5492
5493 if (stmt_ends_bb_p (stmt))
5494 found_ctrl_stmt = true;
5495
5496 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
5497 {
5498 error ("label ");
5499 print_generic_expr (stderr, gimple_label_label (label_stmt));
5500 fprintf (stderr, " in the middle of basic block %d", bb->index);
5501 err = 1;
5502 }
5503 }
5504
5505 gsi = gsi_last_nondebug_bb (bb);
5506 if (gsi_end_p (gsi))
5507 continue;
5508
5509 stmt = gsi_stmt (gsi);
5510
5511 if (gimple_code (stmt) == GIMPLE_LABEL)
5512 continue;
5513
5514 err |= verify_eh_edges (stmt);
5515
5516 if (is_ctrl_stmt (stmt))
5517 {
5518 FOR_EACH_EDGE (e, ei, bb->succs)
5519 if (e->flags & EDGE_FALLTHRU)
5520 {
5521 error ("fallthru edge after a control statement in bb %d",
5522 bb->index);
5523 err = 1;
5524 }
5525 }
5526
5527 if (gimple_code (stmt) != GIMPLE_COND)
5528 {
5529 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
5530 after anything else but if statement. */
5531 FOR_EACH_EDGE (e, ei, bb->succs)
5532 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
5533 {
5534 error ("true/false edge after a non-GIMPLE_COND in bb %d",
5535 bb->index);
5536 err = 1;
5537 }
5538 }
5539
5540 switch (gimple_code (stmt))
5541 {
5542 case GIMPLE_COND:
5543 {
5544 edge true_edge;
5545 edge false_edge;
5546
5547 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
5548
5549 if (!true_edge
5550 || !false_edge
5551 || !(true_edge->flags & EDGE_TRUE_VALUE)
5552 || !(false_edge->flags & EDGE_FALSE_VALUE)
5553 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5554 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5555 || EDGE_COUNT (bb->succs) >= 3)
5556 {
5557 error ("wrong outgoing edge flags at end of bb %d",
5558 bb->index);
5559 err = 1;
5560 }
5561 }
5562 break;
5563
5564 case GIMPLE_GOTO:
5565 if (simple_goto_p (stmt))
5566 {
5567 error ("explicit goto at end of bb %d", bb->index);
5568 err = 1;
5569 }
5570 else
5571 {
5572 /* FIXME. We should double check that the labels in the
5573 destination blocks have their address taken. */
5574 FOR_EACH_EDGE (e, ei, bb->succs)
5575 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
5576 | EDGE_FALSE_VALUE))
5577 || !(e->flags & EDGE_ABNORMAL))
5578 {
5579 error ("wrong outgoing edge flags at end of bb %d",
5580 bb->index);
5581 err = 1;
5582 }
5583 }
5584 break;
5585
5586 case GIMPLE_CALL:
5587 if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
5588 break;
5589 /* fallthru */
5590 case GIMPLE_RETURN:
5591 if (!single_succ_p (bb)
5592 || (single_succ_edge (bb)->flags
5593 & (EDGE_FALLTHRU | EDGE_ABNORMAL
5594 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5595 {
5596 error ("wrong outgoing edge flags at end of bb %d", bb->index);
5597 err = 1;
5598 }
5599 if (single_succ (bb) != EXIT_BLOCK_PTR_FOR_FN (cfun))
5600 {
5601 error ("return edge does not point to exit in bb %d",
5602 bb->index);
5603 err = 1;
5604 }
5605 break;
5606
5607 case GIMPLE_SWITCH:
5608 {
5609 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5610 tree prev;
5611 edge e;
5612 size_t i, n;
5613
5614 n = gimple_switch_num_labels (switch_stmt);
5615
5616 /* Mark all the destination basic blocks. */
5617 for (i = 0; i < n; ++i)
5618 {
5619 basic_block label_bb = gimple_switch_label_bb (cfun, switch_stmt, i);
5620 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
5621 label_bb->aux = (void *)1;
5622 }
5623
5624 /* Verify that the case labels are sorted. */
5625 prev = gimple_switch_label (switch_stmt, 0);
5626 for (i = 1; i < n; ++i)
5627 {
5628 tree c = gimple_switch_label (switch_stmt, i);
5629 if (!CASE_LOW (c))
5630 {
5631 error ("found default case not at the start of "
5632 "case vector");
5633 err = 1;
5634 continue;
5635 }
5636 if (CASE_LOW (prev)
5637 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
5638 {
5639 error ("case labels not sorted: ");
5640 print_generic_expr (stderr, prev);
5641 fprintf (stderr," is greater than ");
5642 print_generic_expr (stderr, c);
5643 fprintf (stderr," but comes before it.\n");
5644 err = 1;
5645 }
5646 prev = c;
5647 }
5648 /* VRP will remove the default case if it can prove it will
5649 never be executed. So do not verify there always exists
5650 a default case here. */
5651
5652 FOR_EACH_EDGE (e, ei, bb->succs)
5653 {
5654 if (!e->dest->aux)
5655 {
5656 error ("extra outgoing edge %d->%d",
5657 bb->index, e->dest->index);
5658 err = 1;
5659 }
5660
5661 e->dest->aux = (void *)2;
5662 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
5663 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5664 {
5665 error ("wrong outgoing edge flags at end of bb %d",
5666 bb->index);
5667 err = 1;
5668 }
5669 }
5670
5671 /* Check that we have all of them. */
5672 for (i = 0; i < n; ++i)
5673 {
5674 basic_block label_bb = gimple_switch_label_bb (cfun,
5675 switch_stmt, i);
5676
5677 if (label_bb->aux != (void *)2)
5678 {
5679 error ("missing edge %i->%i", bb->index, label_bb->index);
5680 err = 1;
5681 }
5682 }
5683
5684 FOR_EACH_EDGE (e, ei, bb->succs)
5685 e->dest->aux = (void *)0;
5686 }
5687 break;
5688
5689 case GIMPLE_EH_DISPATCH:
5690 err |= verify_eh_dispatch_edge (as_a <geh_dispatch *> (stmt));
5691 break;
5692
5693 default:
5694 break;
5695 }
5696 }
5697
5698 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
5699 verify_dominators (CDI_DOMINATORS);
5700
5701 return err;
5702 }
5703
5704
5705 /* Updates phi nodes after creating a forwarder block joined
5706 by edge FALLTHRU. */
5707
5708 static void
5709 gimple_make_forwarder_block (edge fallthru)
5710 {
5711 edge e;
5712 edge_iterator ei;
5713 basic_block dummy, bb;
5714 tree var;
5715 gphi_iterator gsi;
5716
5717 dummy = fallthru->src;
5718 bb = fallthru->dest;
5719
5720 if (single_pred_p (bb))
5721 return;
5722
5723 /* If we redirected a branch we must create new PHI nodes at the
5724 start of BB. */
5725 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
5726 {
5727 gphi *phi, *new_phi;
5728
5729 phi = gsi.phi ();
5730 var = gimple_phi_result (phi);
5731 new_phi = create_phi_node (var, bb);
5732 gimple_phi_set_result (phi, copy_ssa_name (var, phi));
5733 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
5734 UNKNOWN_LOCATION);
5735 }
5736
5737 /* Add the arguments we have stored on edges. */
5738 FOR_EACH_EDGE (e, ei, bb->preds)
5739 {
5740 if (e == fallthru)
5741 continue;
5742
5743 flush_pending_stmts (e);
5744 }
5745 }
5746
5747
5748 /* Return a non-special label in the head of basic block BLOCK.
5749 Create one if it doesn't exist. */
5750
5751 tree
5752 gimple_block_label (basic_block bb)
5753 {
5754 gimple_stmt_iterator i, s = gsi_start_bb (bb);
5755 bool first = true;
5756 tree label;
5757 glabel *stmt;
5758
5759 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
5760 {
5761 stmt = dyn_cast <glabel *> (gsi_stmt (i));
5762 if (!stmt)
5763 break;
5764 label = gimple_label_label (stmt);
5765 if (!DECL_NONLOCAL (label))
5766 {
5767 if (!first)
5768 gsi_move_before (&i, &s);
5769 return label;
5770 }
5771 }
5772
5773 label = create_artificial_label (UNKNOWN_LOCATION);
5774 stmt = gimple_build_label (label);
5775 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
5776 return label;
5777 }
5778
5779
5780 /* Attempt to perform edge redirection by replacing a possibly complex
5781 jump instruction by a goto or by removing the jump completely.
5782 This can apply only if all edges now point to the same block. The
5783 parameters and return values are equivalent to
5784 redirect_edge_and_branch. */
5785
5786 static edge
5787 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
5788 {
5789 basic_block src = e->src;
5790 gimple_stmt_iterator i;
5791 gimple *stmt;
5792
5793 /* We can replace or remove a complex jump only when we have exactly
5794 two edges. */
5795 if (EDGE_COUNT (src->succs) != 2
5796 /* Verify that all targets will be TARGET. Specifically, the
5797 edge that is not E must also go to TARGET. */
5798 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
5799 return NULL;
5800
5801 i = gsi_last_bb (src);
5802 if (gsi_end_p (i))
5803 return NULL;
5804
5805 stmt = gsi_stmt (i);
5806
5807 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
5808 {
5809 gsi_remove (&i, true);
5810 e = ssa_redirect_edge (e, target);
5811 e->flags = EDGE_FALLTHRU;
5812 return e;
5813 }
5814
5815 return NULL;
5816 }
5817
5818
5819 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
5820 edge representing the redirected branch. */
5821
5822 static edge
5823 gimple_redirect_edge_and_branch (edge e, basic_block dest)
5824 {
5825 basic_block bb = e->src;
5826 gimple_stmt_iterator gsi;
5827 edge ret;
5828 gimple *stmt;
5829
5830 if (e->flags & EDGE_ABNORMAL)
5831 return NULL;
5832
5833 if (e->dest == dest)
5834 return NULL;
5835
5836 if (e->flags & EDGE_EH)
5837 return redirect_eh_edge (e, dest);
5838
5839 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
5840 {
5841 ret = gimple_try_redirect_by_replacing_jump (e, dest);
5842 if (ret)
5843 return ret;
5844 }
5845
5846 gsi = gsi_last_nondebug_bb (bb);
5847 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
5848
5849 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
5850 {
5851 case GIMPLE_COND:
5852 /* For COND_EXPR, we only need to redirect the edge. */
5853 break;
5854
5855 case GIMPLE_GOTO:
5856 /* No non-abnormal edges should lead from a non-simple goto, and
5857 simple ones should be represented implicitly. */
5858 gcc_unreachable ();
5859
5860 case GIMPLE_SWITCH:
5861 {
5862 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5863 tree label = gimple_block_label (dest);
5864 tree cases = get_cases_for_edge (e, switch_stmt);
5865
5866 /* If we have a list of cases associated with E, then use it
5867 as it's a lot faster than walking the entire case vector. */
5868 if (cases)
5869 {
5870 edge e2 = find_edge (e->src, dest);
5871 tree last, first;
5872
5873 first = cases;
5874 while (cases)
5875 {
5876 last = cases;
5877 CASE_LABEL (cases) = label;
5878 cases = CASE_CHAIN (cases);
5879 }
5880
5881 /* If there was already an edge in the CFG, then we need
5882 to move all the cases associated with E to E2. */
5883 if (e2)
5884 {
5885 tree cases2 = get_cases_for_edge (e2, switch_stmt);
5886
5887 CASE_CHAIN (last) = CASE_CHAIN (cases2);
5888 CASE_CHAIN (cases2) = first;
5889 }
5890 bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
5891 }
5892 else
5893 {
5894 size_t i, n = gimple_switch_num_labels (switch_stmt);
5895
5896 for (i = 0; i < n; i++)
5897 {
5898 tree elt = gimple_switch_label (switch_stmt, i);
5899 if (label_to_block (cfun, CASE_LABEL (elt)) == e->dest)
5900 CASE_LABEL (elt) = label;
5901 }
5902 }
5903 }
5904 break;
5905
5906 case GIMPLE_ASM:
5907 {
5908 gasm *asm_stmt = as_a <gasm *> (stmt);
5909 int i, n = gimple_asm_nlabels (asm_stmt);
5910 tree label = NULL;
5911
5912 for (i = 0; i < n; ++i)
5913 {
5914 tree cons = gimple_asm_label_op (asm_stmt, i);
5915 if (label_to_block (cfun, TREE_VALUE (cons)) == e->dest)
5916 {
5917 if (!label)
5918 label = gimple_block_label (dest);
5919 TREE_VALUE (cons) = label;
5920 }
5921 }
5922
5923 /* If we didn't find any label matching the former edge in the
5924 asm labels, we must be redirecting the fallthrough
5925 edge. */
5926 gcc_assert (label || (e->flags & EDGE_FALLTHRU));
5927 }
5928 break;
5929
5930 case GIMPLE_RETURN:
5931 gsi_remove (&gsi, true);
5932 e->flags |= EDGE_FALLTHRU;
5933 break;
5934
5935 case GIMPLE_OMP_RETURN:
5936 case GIMPLE_OMP_CONTINUE:
5937 case GIMPLE_OMP_SECTIONS_SWITCH:
5938 case GIMPLE_OMP_FOR:
5939 /* The edges from OMP constructs can be simply redirected. */
5940 break;
5941
5942 case GIMPLE_EH_DISPATCH:
5943 if (!(e->flags & EDGE_FALLTHRU))
5944 redirect_eh_dispatch_edge (as_a <geh_dispatch *> (stmt), e, dest);
5945 break;
5946
5947 case GIMPLE_TRANSACTION:
5948 if (e->flags & EDGE_TM_ABORT)
5949 gimple_transaction_set_label_over (as_a <gtransaction *> (stmt),
5950 gimple_block_label (dest));
5951 else if (e->flags & EDGE_TM_UNINSTRUMENTED)
5952 gimple_transaction_set_label_uninst (as_a <gtransaction *> (stmt),
5953 gimple_block_label (dest));
5954 else
5955 gimple_transaction_set_label_norm (as_a <gtransaction *> (stmt),
5956 gimple_block_label (dest));
5957 break;
5958
5959 default:
5960 /* Otherwise it must be a fallthru edge, and we don't need to
5961 do anything besides redirecting it. */
5962 gcc_assert (e->flags & EDGE_FALLTHRU);
5963 break;
5964 }
5965
5966 /* Update/insert PHI nodes as necessary. */
5967
5968 /* Now update the edges in the CFG. */
5969 e = ssa_redirect_edge (e, dest);
5970
5971 return e;
5972 }
5973
5974 /* Returns true if it is possible to remove edge E by redirecting
5975 it to the destination of the other edge from E->src. */
5976
5977 static bool
5978 gimple_can_remove_branch_p (const_edge e)
5979 {
5980 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
5981 return false;
5982
5983 return true;
5984 }
5985
5986 /* Simple wrapper, as we can always redirect fallthru edges. */
5987
5988 static basic_block
5989 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
5990 {
5991 e = gimple_redirect_edge_and_branch (e, dest);
5992 gcc_assert (e);
5993
5994 return NULL;
5995 }
5996
5997
5998 /* Splits basic block BB after statement STMT (but at least after the
5999 labels). If STMT is NULL, BB is split just after the labels. */
6000
6001 static basic_block
6002 gimple_split_block (basic_block bb, void *stmt)
6003 {
6004 gimple_stmt_iterator gsi;
6005 gimple_stmt_iterator gsi_tgt;
6006 gimple_seq list;
6007 basic_block new_bb;
6008 edge e;
6009 edge_iterator ei;
6010
6011 new_bb = create_empty_bb (bb);
6012
6013 /* Redirect the outgoing edges. */
6014 new_bb->succs = bb->succs;
6015 bb->succs = NULL;
6016 FOR_EACH_EDGE (e, ei, new_bb->succs)
6017 e->src = new_bb;
6018
6019 /* Get a stmt iterator pointing to the first stmt to move. */
6020 if (!stmt || gimple_code ((gimple *) stmt) == GIMPLE_LABEL)
6021 gsi = gsi_after_labels (bb);
6022 else
6023 {
6024 gsi = gsi_for_stmt ((gimple *) stmt);
6025 gsi_next (&gsi);
6026 }
6027
6028 /* Move everything from GSI to the new basic block. */
6029 if (gsi_end_p (gsi))
6030 return new_bb;
6031
6032 /* Split the statement list - avoid re-creating new containers as this
6033 brings ugly quadratic memory consumption in the inliner.
6034 (We are still quadratic since we need to update stmt BB pointers,
6035 sadly.) */
6036 gsi_split_seq_before (&gsi, &list);
6037 set_bb_seq (new_bb, list);
6038 for (gsi_tgt = gsi_start (list);
6039 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
6040 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
6041
6042 return new_bb;
6043 }
6044
6045
6046 /* Moves basic block BB after block AFTER. */
6047
6048 static bool
6049 gimple_move_block_after (basic_block bb, basic_block after)
6050 {
6051 if (bb->prev_bb == after)
6052 return true;
6053
6054 unlink_block (bb);
6055 link_block (bb, after);
6056
6057 return true;
6058 }
6059
6060
6061 /* Return TRUE if block BB has no executable statements, otherwise return
6062 FALSE. */
6063
6064 static bool
6065 gimple_empty_block_p (basic_block bb)
6066 {
6067 /* BB must have no executable statements. */
6068 gimple_stmt_iterator gsi = gsi_after_labels (bb);
6069 if (phi_nodes (bb))
6070 return false;
6071 while (!gsi_end_p (gsi))
6072 {
6073 gimple *stmt = gsi_stmt (gsi);
6074 if (is_gimple_debug (stmt))
6075 ;
6076 else if (gimple_code (stmt) == GIMPLE_NOP
6077 || gimple_code (stmt) == GIMPLE_PREDICT)
6078 ;
6079 else
6080 return false;
6081 gsi_next (&gsi);
6082 }
6083 return true;
6084 }
6085
6086
6087 /* Split a basic block if it ends with a conditional branch and if the
6088 other part of the block is not empty. */
6089
6090 static basic_block
6091 gimple_split_block_before_cond_jump (basic_block bb)
6092 {
6093 gimple *last, *split_point;
6094 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
6095 if (gsi_end_p (gsi))
6096 return NULL;
6097 last = gsi_stmt (gsi);
6098 if (gimple_code (last) != GIMPLE_COND
6099 && gimple_code (last) != GIMPLE_SWITCH)
6100 return NULL;
6101 gsi_prev (&gsi);
6102 split_point = gsi_stmt (gsi);
6103 return split_block (bb, split_point)->dest;
6104 }
6105
6106
6107 /* Return true if basic_block can be duplicated. */
6108
6109 static bool
6110 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
6111 {
6112 return true;
6113 }
6114
6115 /* Create a duplicate of the basic block BB. NOTE: This does not
6116 preserve SSA form. */
6117
6118 static basic_block
6119 gimple_duplicate_bb (basic_block bb, copy_bb_data *id)
6120 {
6121 basic_block new_bb;
6122 gimple_stmt_iterator gsi_tgt;
6123
6124 new_bb = create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
6125
6126 /* Copy the PHI nodes. We ignore PHI node arguments here because
6127 the incoming edges have not been setup yet. */
6128 for (gphi_iterator gpi = gsi_start_phis (bb);
6129 !gsi_end_p (gpi);
6130 gsi_next (&gpi))
6131 {
6132 gphi *phi, *copy;
6133 phi = gpi.phi ();
6134 copy = create_phi_node (NULL_TREE, new_bb);
6135 create_new_def_for (gimple_phi_result (phi), copy,
6136 gimple_phi_result_ptr (copy));
6137 gimple_set_uid (copy, gimple_uid (phi));
6138 }
6139
6140 gsi_tgt = gsi_start_bb (new_bb);
6141 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
6142 !gsi_end_p (gsi);
6143 gsi_next (&gsi))
6144 {
6145 def_operand_p def_p;
6146 ssa_op_iter op_iter;
6147 tree lhs;
6148 gimple *stmt, *copy;
6149
6150 stmt = gsi_stmt (gsi);
6151 if (gimple_code (stmt) == GIMPLE_LABEL)
6152 continue;
6153
6154 /* Don't duplicate label debug stmts. */
6155 if (gimple_debug_bind_p (stmt)
6156 && TREE_CODE (gimple_debug_bind_get_var (stmt))
6157 == LABEL_DECL)
6158 continue;
6159
6160 /* Create a new copy of STMT and duplicate STMT's virtual
6161 operands. */
6162 copy = gimple_copy (stmt);
6163 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
6164
6165 maybe_duplicate_eh_stmt (copy, stmt);
6166 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
6167
6168 /* When copying around a stmt writing into a local non-user
6169 aggregate, make sure it won't share stack slot with other
6170 vars. */
6171 lhs = gimple_get_lhs (stmt);
6172 if (lhs && TREE_CODE (lhs) != SSA_NAME)
6173 {
6174 tree base = get_base_address (lhs);
6175 if (base
6176 && (VAR_P (base) || TREE_CODE (base) == RESULT_DECL)
6177 && DECL_IGNORED_P (base)
6178 && !TREE_STATIC (base)
6179 && !DECL_EXTERNAL (base)
6180 && (!VAR_P (base) || !DECL_HAS_VALUE_EXPR_P (base)))
6181 DECL_NONSHAREABLE (base) = 1;
6182 }
6183
6184 /* If requested remap dependence info of cliques brought in
6185 via inlining. */
6186 if (id)
6187 for (unsigned i = 0; i < gimple_num_ops (copy); ++i)
6188 {
6189 tree op = gimple_op (copy, i);
6190 if (!op)
6191 continue;
6192 if (TREE_CODE (op) == ADDR_EXPR
6193 || TREE_CODE (op) == WITH_SIZE_EXPR)
6194 op = TREE_OPERAND (op, 0);
6195 while (handled_component_p (op))
6196 op = TREE_OPERAND (op, 0);
6197 if ((TREE_CODE (op) == MEM_REF
6198 || TREE_CODE (op) == TARGET_MEM_REF)
6199 && MR_DEPENDENCE_CLIQUE (op) > 1
6200 && MR_DEPENDENCE_CLIQUE (op) != bb->loop_father->owned_clique)
6201 {
6202 if (!id->dependence_map)
6203 id->dependence_map = new hash_map<dependence_hash,
6204 unsigned short>;
6205 bool existed;
6206 unsigned short &newc = id->dependence_map->get_or_insert
6207 (MR_DEPENDENCE_CLIQUE (op), &existed);
6208 if (!existed)
6209 {
6210 gcc_assert (MR_DEPENDENCE_CLIQUE (op) <= cfun->last_clique);
6211 newc = ++cfun->last_clique;
6212 }
6213 MR_DEPENDENCE_CLIQUE (op) = newc;
6214 }
6215 }
6216
6217 /* Create new names for all the definitions created by COPY and
6218 add replacement mappings for each new name. */
6219 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
6220 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
6221 }
6222
6223 return new_bb;
6224 }
6225
6226 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
6227
6228 static void
6229 add_phi_args_after_copy_edge (edge e_copy)
6230 {
6231 basic_block bb, bb_copy = e_copy->src, dest;
6232 edge e;
6233 edge_iterator ei;
6234 gphi *phi, *phi_copy;
6235 tree def;
6236 gphi_iterator psi, psi_copy;
6237
6238 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
6239 return;
6240
6241 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
6242
6243 if (e_copy->dest->flags & BB_DUPLICATED)
6244 dest = get_bb_original (e_copy->dest);
6245 else
6246 dest = e_copy->dest;
6247
6248 e = find_edge (bb, dest);
6249 if (!e)
6250 {
6251 /* During loop unrolling the target of the latch edge is copied.
6252 In this case we are not looking for edge to dest, but to
6253 duplicated block whose original was dest. */
6254 FOR_EACH_EDGE (e, ei, bb->succs)
6255 {
6256 if ((e->dest->flags & BB_DUPLICATED)
6257 && get_bb_original (e->dest) == dest)
6258 break;
6259 }
6260
6261 gcc_assert (e != NULL);
6262 }
6263
6264 for (psi = gsi_start_phis (e->dest),
6265 psi_copy = gsi_start_phis (e_copy->dest);
6266 !gsi_end_p (psi);
6267 gsi_next (&psi), gsi_next (&psi_copy))
6268 {
6269 phi = psi.phi ();
6270 phi_copy = psi_copy.phi ();
6271 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
6272 add_phi_arg (phi_copy, def, e_copy,
6273 gimple_phi_arg_location_from_edge (phi, e));
6274 }
6275 }
6276
6277
6278 /* Basic block BB_COPY was created by code duplication. Add phi node
6279 arguments for edges going out of BB_COPY. The blocks that were
6280 duplicated have BB_DUPLICATED set. */
6281
6282 void
6283 add_phi_args_after_copy_bb (basic_block bb_copy)
6284 {
6285 edge e_copy;
6286 edge_iterator ei;
6287
6288 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
6289 {
6290 add_phi_args_after_copy_edge (e_copy);
6291 }
6292 }
6293
6294 /* Blocks in REGION_COPY array of length N_REGION were created by
6295 duplication of basic blocks. Add phi node arguments for edges
6296 going from these blocks. If E_COPY is not NULL, also add
6297 phi node arguments for its destination.*/
6298
6299 void
6300 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
6301 edge e_copy)
6302 {
6303 unsigned i;
6304
6305 for (i = 0; i < n_region; i++)
6306 region_copy[i]->flags |= BB_DUPLICATED;
6307
6308 for (i = 0; i < n_region; i++)
6309 add_phi_args_after_copy_bb (region_copy[i]);
6310 if (e_copy)
6311 add_phi_args_after_copy_edge (e_copy);
6312
6313 for (i = 0; i < n_region; i++)
6314 region_copy[i]->flags &= ~BB_DUPLICATED;
6315 }
6316
6317 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
6318 important exit edge EXIT. By important we mean that no SSA name defined
6319 inside region is live over the other exit edges of the region. All entry
6320 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
6321 to the duplicate of the region. Dominance and loop information is
6322 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
6323 UPDATE_DOMINANCE is false then we assume that the caller will update the
6324 dominance information after calling this function. The new basic
6325 blocks are stored to REGION_COPY in the same order as they had in REGION,
6326 provided that REGION_COPY is not NULL.
6327 The function returns false if it is unable to copy the region,
6328 true otherwise. */
6329
6330 bool
6331 gimple_duplicate_sese_region (edge entry, edge exit,
6332 basic_block *region, unsigned n_region,
6333 basic_block *region_copy,
6334 bool update_dominance)
6335 {
6336 unsigned i;
6337 bool free_region_copy = false, copying_header = false;
6338 struct loop *loop = entry->dest->loop_father;
6339 edge exit_copy;
6340 vec<basic_block> doms = vNULL;
6341 edge redirected;
6342 profile_count total_count = profile_count::uninitialized ();
6343 profile_count entry_count = profile_count::uninitialized ();
6344
6345 if (!can_copy_bbs_p (region, n_region))
6346 return false;
6347
6348 /* Some sanity checking. Note that we do not check for all possible
6349 missuses of the functions. I.e. if you ask to copy something weird,
6350 it will work, but the state of structures probably will not be
6351 correct. */
6352 for (i = 0; i < n_region; i++)
6353 {
6354 /* We do not handle subloops, i.e. all the blocks must belong to the
6355 same loop. */
6356 if (region[i]->loop_father != loop)
6357 return false;
6358
6359 if (region[i] != entry->dest
6360 && region[i] == loop->header)
6361 return false;
6362 }
6363
6364 /* In case the function is used for loop header copying (which is the primary
6365 use), ensure that EXIT and its copy will be new latch and entry edges. */
6366 if (loop->header == entry->dest)
6367 {
6368 copying_header = true;
6369
6370 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
6371 return false;
6372
6373 for (i = 0; i < n_region; i++)
6374 if (region[i] != exit->src
6375 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
6376 return false;
6377 }
6378
6379 initialize_original_copy_tables ();
6380
6381 if (copying_header)
6382 set_loop_copy (loop, loop_outer (loop));
6383 else
6384 set_loop_copy (loop, loop);
6385
6386 if (!region_copy)
6387 {
6388 region_copy = XNEWVEC (basic_block, n_region);
6389 free_region_copy = true;
6390 }
6391
6392 /* Record blocks outside the region that are dominated by something
6393 inside. */
6394 if (update_dominance)
6395 {
6396 doms.create (0);
6397 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6398 }
6399
6400 if (entry->dest->count.initialized_p ())
6401 {
6402 total_count = entry->dest->count;
6403 entry_count = entry->count ();
6404 /* Fix up corner cases, to avoid division by zero or creation of negative
6405 frequencies. */
6406 if (entry_count > total_count)
6407 entry_count = total_count;
6408 }
6409
6410 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
6411 split_edge_bb_loc (entry), update_dominance);
6412 if (total_count.initialized_p () && entry_count.initialized_p ())
6413 {
6414 scale_bbs_frequencies_profile_count (region, n_region,
6415 total_count - entry_count,
6416 total_count);
6417 scale_bbs_frequencies_profile_count (region_copy, n_region, entry_count,
6418 total_count);
6419 }
6420
6421 if (copying_header)
6422 {
6423 loop->header = exit->dest;
6424 loop->latch = exit->src;
6425 }
6426
6427 /* Redirect the entry and add the phi node arguments. */
6428 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
6429 gcc_assert (redirected != NULL);
6430 flush_pending_stmts (entry);
6431
6432 /* Concerning updating of dominators: We must recount dominators
6433 for entry block and its copy. Anything that is outside of the
6434 region, but was dominated by something inside needs recounting as
6435 well. */
6436 if (update_dominance)
6437 {
6438 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
6439 doms.safe_push (get_bb_original (entry->dest));
6440 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6441 doms.release ();
6442 }
6443
6444 /* Add the other PHI node arguments. */
6445 add_phi_args_after_copy (region_copy, n_region, NULL);
6446
6447 if (free_region_copy)
6448 free (region_copy);
6449
6450 free_original_copy_tables ();
6451 return true;
6452 }
6453
6454 /* Checks if BB is part of the region defined by N_REGION BBS. */
6455 static bool
6456 bb_part_of_region_p (basic_block bb, basic_block* bbs, unsigned n_region)
6457 {
6458 unsigned int n;
6459
6460 for (n = 0; n < n_region; n++)
6461 {
6462 if (bb == bbs[n])
6463 return true;
6464 }
6465 return false;
6466 }
6467
6468 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
6469 are stored to REGION_COPY in the same order in that they appear
6470 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
6471 the region, EXIT an exit from it. The condition guarding EXIT
6472 is moved to ENTRY. Returns true if duplication succeeds, false
6473 otherwise.
6474
6475 For example,
6476
6477 some_code;
6478 if (cond)
6479 A;
6480 else
6481 B;
6482
6483 is transformed to
6484
6485 if (cond)
6486 {
6487 some_code;
6488 A;
6489 }
6490 else
6491 {
6492 some_code;
6493 B;
6494 }
6495 */
6496
6497 bool
6498 gimple_duplicate_sese_tail (edge entry, edge exit,
6499 basic_block *region, unsigned n_region,
6500 basic_block *region_copy)
6501 {
6502 unsigned i;
6503 bool free_region_copy = false;
6504 struct loop *loop = exit->dest->loop_father;
6505 struct loop *orig_loop = entry->dest->loop_father;
6506 basic_block switch_bb, entry_bb, nentry_bb;
6507 vec<basic_block> doms;
6508 profile_count total_count = profile_count::uninitialized (),
6509 exit_count = profile_count::uninitialized ();
6510 edge exits[2], nexits[2], e;
6511 gimple_stmt_iterator gsi;
6512 gimple *cond_stmt;
6513 edge sorig, snew;
6514 basic_block exit_bb;
6515 gphi_iterator psi;
6516 gphi *phi;
6517 tree def;
6518 struct loop *target, *aloop, *cloop;
6519
6520 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
6521 exits[0] = exit;
6522 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
6523
6524 if (!can_copy_bbs_p (region, n_region))
6525 return false;
6526
6527 initialize_original_copy_tables ();
6528 set_loop_copy (orig_loop, loop);
6529
6530 target= loop;
6531 for (aloop = orig_loop->inner; aloop; aloop = aloop->next)
6532 {
6533 if (bb_part_of_region_p (aloop->header, region, n_region))
6534 {
6535 cloop = duplicate_loop (aloop, target);
6536 duplicate_subloops (aloop, cloop);
6537 }
6538 }
6539
6540 if (!region_copy)
6541 {
6542 region_copy = XNEWVEC (basic_block, n_region);
6543 free_region_copy = true;
6544 }
6545
6546 gcc_assert (!need_ssa_update_p (cfun));
6547
6548 /* Record blocks outside the region that are dominated by something
6549 inside. */
6550 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6551
6552 total_count = exit->src->count;
6553 exit_count = exit->count ();
6554 /* Fix up corner cases, to avoid division by zero or creation of negative
6555 frequencies. */
6556 if (exit_count > total_count)
6557 exit_count = total_count;
6558
6559 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
6560 split_edge_bb_loc (exit), true);
6561 if (total_count.initialized_p () && exit_count.initialized_p ())
6562 {
6563 scale_bbs_frequencies_profile_count (region, n_region,
6564 total_count - exit_count,
6565 total_count);
6566 scale_bbs_frequencies_profile_count (region_copy, n_region, exit_count,
6567 total_count);
6568 }
6569
6570 /* Create the switch block, and put the exit condition to it. */
6571 entry_bb = entry->dest;
6572 nentry_bb = get_bb_copy (entry_bb);
6573 if (!last_stmt (entry->src)
6574 || !stmt_ends_bb_p (last_stmt (entry->src)))
6575 switch_bb = entry->src;
6576 else
6577 switch_bb = split_edge (entry);
6578 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
6579
6580 gsi = gsi_last_bb (switch_bb);
6581 cond_stmt = last_stmt (exit->src);
6582 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
6583 cond_stmt = gimple_copy (cond_stmt);
6584
6585 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
6586
6587 sorig = single_succ_edge (switch_bb);
6588 sorig->flags = exits[1]->flags;
6589 sorig->probability = exits[1]->probability;
6590 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
6591 snew->probability = exits[0]->probability;
6592
6593
6594 /* Register the new edge from SWITCH_BB in loop exit lists. */
6595 rescan_loop_exit (snew, true, false);
6596
6597 /* Add the PHI node arguments. */
6598 add_phi_args_after_copy (region_copy, n_region, snew);
6599
6600 /* Get rid of now superfluous conditions and associated edges (and phi node
6601 arguments). */
6602 exit_bb = exit->dest;
6603
6604 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
6605 PENDING_STMT (e) = NULL;
6606
6607 /* The latch of ORIG_LOOP was copied, and so was the backedge
6608 to the original header. We redirect this backedge to EXIT_BB. */
6609 for (i = 0; i < n_region; i++)
6610 if (get_bb_original (region_copy[i]) == orig_loop->latch)
6611 {
6612 gcc_assert (single_succ_edge (region_copy[i]));
6613 e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
6614 PENDING_STMT (e) = NULL;
6615 for (psi = gsi_start_phis (exit_bb);
6616 !gsi_end_p (psi);
6617 gsi_next (&psi))
6618 {
6619 phi = psi.phi ();
6620 def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
6621 add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
6622 }
6623 }
6624 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
6625 PENDING_STMT (e) = NULL;
6626
6627 /* Anything that is outside of the region, but was dominated by something
6628 inside needs to update dominance info. */
6629 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6630 doms.release ();
6631 /* Update the SSA web. */
6632 update_ssa (TODO_update_ssa);
6633
6634 if (free_region_copy)
6635 free (region_copy);
6636
6637 free_original_copy_tables ();
6638 return true;
6639 }
6640
6641 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
6642 adding blocks when the dominator traversal reaches EXIT. This
6643 function silently assumes that ENTRY strictly dominates EXIT. */
6644
6645 void
6646 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
6647 vec<basic_block> *bbs_p)
6648 {
6649 basic_block son;
6650
6651 for (son = first_dom_son (CDI_DOMINATORS, entry);
6652 son;
6653 son = next_dom_son (CDI_DOMINATORS, son))
6654 {
6655 bbs_p->safe_push (son);
6656 if (son != exit)
6657 gather_blocks_in_sese_region (son, exit, bbs_p);
6658 }
6659 }
6660
6661 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
6662 The duplicates are recorded in VARS_MAP. */
6663
6664 static void
6665 replace_by_duplicate_decl (tree *tp, hash_map<tree, tree> *vars_map,
6666 tree to_context)
6667 {
6668 tree t = *tp, new_t;
6669 struct function *f = DECL_STRUCT_FUNCTION (to_context);
6670
6671 if (DECL_CONTEXT (t) == to_context)
6672 return;
6673
6674 bool existed;
6675 tree &loc = vars_map->get_or_insert (t, &existed);
6676
6677 if (!existed)
6678 {
6679 if (SSA_VAR_P (t))
6680 {
6681 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
6682 add_local_decl (f, new_t);
6683 }
6684 else
6685 {
6686 gcc_assert (TREE_CODE (t) == CONST_DECL);
6687 new_t = copy_node (t);
6688 }
6689 DECL_CONTEXT (new_t) = to_context;
6690
6691 loc = new_t;
6692 }
6693 else
6694 new_t = loc;
6695
6696 *tp = new_t;
6697 }
6698
6699
6700 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6701 VARS_MAP maps old ssa names and var_decls to the new ones. */
6702
6703 static tree
6704 replace_ssa_name (tree name, hash_map<tree, tree> *vars_map,
6705 tree to_context)
6706 {
6707 tree new_name;
6708
6709 gcc_assert (!virtual_operand_p (name));
6710
6711 tree *loc = vars_map->get (name);
6712
6713 if (!loc)
6714 {
6715 tree decl = SSA_NAME_VAR (name);
6716 if (decl)
6717 {
6718 gcc_assert (!SSA_NAME_IS_DEFAULT_DEF (name));
6719 replace_by_duplicate_decl (&decl, vars_map, to_context);
6720 new_name = make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6721 decl, SSA_NAME_DEF_STMT (name));
6722 }
6723 else
6724 new_name = copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6725 name, SSA_NAME_DEF_STMT (name));
6726
6727 /* Now that we've used the def stmt to define new_name, make sure it
6728 doesn't define name anymore. */
6729 SSA_NAME_DEF_STMT (name) = NULL;
6730
6731 vars_map->put (name, new_name);
6732 }
6733 else
6734 new_name = *loc;
6735
6736 return new_name;
6737 }
6738
6739 struct move_stmt_d
6740 {
6741 tree orig_block;
6742 tree new_block;
6743 tree from_context;
6744 tree to_context;
6745 hash_map<tree, tree> *vars_map;
6746 htab_t new_label_map;
6747 hash_map<void *, void *> *eh_map;
6748 bool remap_decls_p;
6749 };
6750
6751 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
6752 contained in *TP if it has been ORIG_BLOCK previously and change the
6753 DECL_CONTEXT of every local variable referenced in *TP. */
6754
6755 static tree
6756 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
6757 {
6758 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
6759 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6760 tree t = *tp;
6761
6762 if (EXPR_P (t))
6763 {
6764 tree block = TREE_BLOCK (t);
6765 if (block == NULL_TREE)
6766 ;
6767 else if (block == p->orig_block
6768 || p->orig_block == NULL_TREE)
6769 {
6770 /* tree_node_can_be_shared says we can share invariant
6771 addresses but unshare_expr copies them anyways. Make sure
6772 to unshare before adjusting the block in place - we do not
6773 always see a copy here. */
6774 if (TREE_CODE (t) == ADDR_EXPR
6775 && is_gimple_min_invariant (t))
6776 *tp = t = unshare_expr (t);
6777 TREE_SET_BLOCK (t, p->new_block);
6778 }
6779 else if (flag_checking)
6780 {
6781 while (block && TREE_CODE (block) == BLOCK && block != p->orig_block)
6782 block = BLOCK_SUPERCONTEXT (block);
6783 gcc_assert (block == p->orig_block);
6784 }
6785 }
6786 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
6787 {
6788 if (TREE_CODE (t) == SSA_NAME)
6789 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
6790 else if (TREE_CODE (t) == PARM_DECL
6791 && gimple_in_ssa_p (cfun))
6792 *tp = *(p->vars_map->get (t));
6793 else if (TREE_CODE (t) == LABEL_DECL)
6794 {
6795 if (p->new_label_map)
6796 {
6797 struct tree_map in, *out;
6798 in.base.from = t;
6799 out = (struct tree_map *)
6800 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
6801 if (out)
6802 *tp = t = out->to;
6803 }
6804
6805 /* For FORCED_LABELs we can end up with references from other
6806 functions if some SESE regions are outlined. It is UB to
6807 jump in between them, but they could be used just for printing
6808 addresses etc. In that case, DECL_CONTEXT on the label should
6809 be the function containing the glabel stmt with that LABEL_DECL,
6810 rather than whatever function a reference to the label was seen
6811 last time. */
6812 if (!FORCED_LABEL (t) && !DECL_NONLOCAL (t))
6813 DECL_CONTEXT (t) = p->to_context;
6814 }
6815 else if (p->remap_decls_p)
6816 {
6817 /* Replace T with its duplicate. T should no longer appear in the
6818 parent function, so this looks wasteful; however, it may appear
6819 in referenced_vars, and more importantly, as virtual operands of
6820 statements, and in alias lists of other variables. It would be
6821 quite difficult to expunge it from all those places. ??? It might
6822 suffice to do this for addressable variables. */
6823 if ((VAR_P (t) && !is_global_var (t))
6824 || TREE_CODE (t) == CONST_DECL)
6825 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
6826 }
6827 *walk_subtrees = 0;
6828 }
6829 else if (TYPE_P (t))
6830 *walk_subtrees = 0;
6831
6832 return NULL_TREE;
6833 }
6834
6835 /* Helper for move_stmt_r. Given an EH region number for the source
6836 function, map that to the duplicate EH regio number in the dest. */
6837
6838 static int
6839 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
6840 {
6841 eh_region old_r, new_r;
6842
6843 old_r = get_eh_region_from_number (old_nr);
6844 new_r = static_cast<eh_region> (*p->eh_map->get (old_r));
6845
6846 return new_r->index;
6847 }
6848
6849 /* Similar, but operate on INTEGER_CSTs. */
6850
6851 static tree
6852 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
6853 {
6854 int old_nr, new_nr;
6855
6856 old_nr = tree_to_shwi (old_t_nr);
6857 new_nr = move_stmt_eh_region_nr (old_nr, p);
6858
6859 return build_int_cst (integer_type_node, new_nr);
6860 }
6861
6862 /* Like move_stmt_op, but for gimple statements.
6863
6864 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
6865 contained in the current statement in *GSI_P and change the
6866 DECL_CONTEXT of every local variable referenced in the current
6867 statement. */
6868
6869 static tree
6870 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
6871 struct walk_stmt_info *wi)
6872 {
6873 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6874 gimple *stmt = gsi_stmt (*gsi_p);
6875 tree block = gimple_block (stmt);
6876
6877 if (block == p->orig_block
6878 || (p->orig_block == NULL_TREE
6879 && block != NULL_TREE))
6880 gimple_set_block (stmt, p->new_block);
6881
6882 switch (gimple_code (stmt))
6883 {
6884 case GIMPLE_CALL:
6885 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
6886 {
6887 tree r, fndecl = gimple_call_fndecl (stmt);
6888 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
6889 switch (DECL_FUNCTION_CODE (fndecl))
6890 {
6891 case BUILT_IN_EH_COPY_VALUES:
6892 r = gimple_call_arg (stmt, 1);
6893 r = move_stmt_eh_region_tree_nr (r, p);
6894 gimple_call_set_arg (stmt, 1, r);
6895 /* FALLTHRU */
6896
6897 case BUILT_IN_EH_POINTER:
6898 case BUILT_IN_EH_FILTER:
6899 r = gimple_call_arg (stmt, 0);
6900 r = move_stmt_eh_region_tree_nr (r, p);
6901 gimple_call_set_arg (stmt, 0, r);
6902 break;
6903
6904 default:
6905 break;
6906 }
6907 }
6908 break;
6909
6910 case GIMPLE_RESX:
6911 {
6912 gresx *resx_stmt = as_a <gresx *> (stmt);
6913 int r = gimple_resx_region (resx_stmt);
6914 r = move_stmt_eh_region_nr (r, p);
6915 gimple_resx_set_region (resx_stmt, r);
6916 }
6917 break;
6918
6919 case GIMPLE_EH_DISPATCH:
6920 {
6921 geh_dispatch *eh_dispatch_stmt = as_a <geh_dispatch *> (stmt);
6922 int r = gimple_eh_dispatch_region (eh_dispatch_stmt);
6923 r = move_stmt_eh_region_nr (r, p);
6924 gimple_eh_dispatch_set_region (eh_dispatch_stmt, r);
6925 }
6926 break;
6927
6928 case GIMPLE_OMP_RETURN:
6929 case GIMPLE_OMP_CONTINUE:
6930 break;
6931
6932 case GIMPLE_LABEL:
6933 {
6934 /* For FORCED_LABEL, move_stmt_op doesn't adjust DECL_CONTEXT,
6935 so that such labels can be referenced from other regions.
6936 Make sure to update it when seeing a GIMPLE_LABEL though,
6937 that is the owner of the label. */
6938 walk_gimple_op (stmt, move_stmt_op, wi);
6939 *handled_ops_p = true;
6940 tree label = gimple_label_label (as_a <glabel *> (stmt));
6941 if (FORCED_LABEL (label) || DECL_NONLOCAL (label))
6942 DECL_CONTEXT (label) = p->to_context;
6943 }
6944 break;
6945
6946 default:
6947 if (is_gimple_omp (stmt))
6948 {
6949 /* Do not remap variables inside OMP directives. Variables
6950 referenced in clauses and directive header belong to the
6951 parent function and should not be moved into the child
6952 function. */
6953 bool save_remap_decls_p = p->remap_decls_p;
6954 p->remap_decls_p = false;
6955 *handled_ops_p = true;
6956
6957 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), move_stmt_r,
6958 move_stmt_op, wi);
6959
6960 p->remap_decls_p = save_remap_decls_p;
6961 }
6962 break;
6963 }
6964
6965 return NULL_TREE;
6966 }
6967
6968 /* Move basic block BB from function CFUN to function DEST_FN. The
6969 block is moved out of the original linked list and placed after
6970 block AFTER in the new list. Also, the block is removed from the
6971 original array of blocks and placed in DEST_FN's array of blocks.
6972 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
6973 updated to reflect the moved edges.
6974
6975 The local variables are remapped to new instances, VARS_MAP is used
6976 to record the mapping. */
6977
6978 static void
6979 move_block_to_fn (struct function *dest_cfun, basic_block bb,
6980 basic_block after, bool update_edge_count_p,
6981 struct move_stmt_d *d)
6982 {
6983 struct control_flow_graph *cfg;
6984 edge_iterator ei;
6985 edge e;
6986 gimple_stmt_iterator si;
6987 unsigned old_len, new_len;
6988
6989 /* Remove BB from dominance structures. */
6990 delete_from_dominance_info (CDI_DOMINATORS, bb);
6991
6992 /* Move BB from its current loop to the copy in the new function. */
6993 if (current_loops)
6994 {
6995 struct loop *new_loop = (struct loop *)bb->loop_father->aux;
6996 if (new_loop)
6997 bb->loop_father = new_loop;
6998 }
6999
7000 /* Link BB to the new linked list. */
7001 move_block_after (bb, after);
7002
7003 /* Update the edge count in the corresponding flowgraphs. */
7004 if (update_edge_count_p)
7005 FOR_EACH_EDGE (e, ei, bb->succs)
7006 {
7007 cfun->cfg->x_n_edges--;
7008 dest_cfun->cfg->x_n_edges++;
7009 }
7010
7011 /* Remove BB from the original basic block array. */
7012 (*cfun->cfg->x_basic_block_info)[bb->index] = NULL;
7013 cfun->cfg->x_n_basic_blocks--;
7014
7015 /* Grow DEST_CFUN's basic block array if needed. */
7016 cfg = dest_cfun->cfg;
7017 cfg->x_n_basic_blocks++;
7018 if (bb->index >= cfg->x_last_basic_block)
7019 cfg->x_last_basic_block = bb->index + 1;
7020
7021 old_len = vec_safe_length (cfg->x_basic_block_info);
7022 if ((unsigned) cfg->x_last_basic_block >= old_len)
7023 {
7024 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
7025 vec_safe_grow_cleared (cfg->x_basic_block_info, new_len);
7026 }
7027
7028 (*cfg->x_basic_block_info)[bb->index] = bb;
7029
7030 /* Remap the variables in phi nodes. */
7031 for (gphi_iterator psi = gsi_start_phis (bb);
7032 !gsi_end_p (psi); )
7033 {
7034 gphi *phi = psi.phi ();
7035 use_operand_p use;
7036 tree op = PHI_RESULT (phi);
7037 ssa_op_iter oi;
7038 unsigned i;
7039
7040 if (virtual_operand_p (op))
7041 {
7042 /* Remove the phi nodes for virtual operands (alias analysis will be
7043 run for the new function, anyway). */
7044 remove_phi_node (&psi, true);
7045 continue;
7046 }
7047
7048 SET_PHI_RESULT (phi,
7049 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
7050 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
7051 {
7052 op = USE_FROM_PTR (use);
7053 if (TREE_CODE (op) == SSA_NAME)
7054 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
7055 }
7056
7057 for (i = 0; i < EDGE_COUNT (bb->preds); i++)
7058 {
7059 location_t locus = gimple_phi_arg_location (phi, i);
7060 tree block = LOCATION_BLOCK (locus);
7061
7062 if (locus == UNKNOWN_LOCATION)
7063 continue;
7064 if (d->orig_block == NULL_TREE || block == d->orig_block)
7065 {
7066 locus = set_block (locus, d->new_block);
7067 gimple_phi_arg_set_location (phi, i, locus);
7068 }
7069 }
7070
7071 gsi_next (&psi);
7072 }
7073
7074 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
7075 {
7076 gimple *stmt = gsi_stmt (si);
7077 struct walk_stmt_info wi;
7078
7079 memset (&wi, 0, sizeof (wi));
7080 wi.info = d;
7081 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
7082
7083 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
7084 {
7085 tree label = gimple_label_label (label_stmt);
7086 int uid = LABEL_DECL_UID (label);
7087
7088 gcc_assert (uid > -1);
7089
7090 old_len = vec_safe_length (cfg->x_label_to_block_map);
7091 if (old_len <= (unsigned) uid)
7092 {
7093 new_len = 3 * uid / 2 + 1;
7094 vec_safe_grow_cleared (cfg->x_label_to_block_map, new_len);
7095 }
7096
7097 (*cfg->x_label_to_block_map)[uid] = bb;
7098 (*cfun->cfg->x_label_to_block_map)[uid] = NULL;
7099
7100 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
7101
7102 if (uid >= dest_cfun->cfg->last_label_uid)
7103 dest_cfun->cfg->last_label_uid = uid + 1;
7104 }
7105
7106 maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
7107 remove_stmt_from_eh_lp_fn (cfun, stmt);
7108
7109 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
7110 gimple_remove_stmt_histograms (cfun, stmt);
7111
7112 /* We cannot leave any operands allocated from the operand caches of
7113 the current function. */
7114 free_stmt_operands (cfun, stmt);
7115 push_cfun (dest_cfun);
7116 update_stmt (stmt);
7117 pop_cfun ();
7118 }
7119
7120 FOR_EACH_EDGE (e, ei, bb->succs)
7121 if (e->goto_locus != UNKNOWN_LOCATION)
7122 {
7123 tree block = LOCATION_BLOCK (e->goto_locus);
7124 if (d->orig_block == NULL_TREE
7125 || block == d->orig_block)
7126 e->goto_locus = set_block (e->goto_locus, d->new_block);
7127 }
7128 }
7129
7130 /* Examine the statements in BB (which is in SRC_CFUN); find and return
7131 the outermost EH region. Use REGION as the incoming base EH region.
7132 If there is no single outermost region, return NULL and set *ALL to
7133 true. */
7134
7135 static eh_region
7136 find_outermost_region_in_block (struct function *src_cfun,
7137 basic_block bb, eh_region region,
7138 bool *all)
7139 {
7140 gimple_stmt_iterator si;
7141
7142 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
7143 {
7144 gimple *stmt = gsi_stmt (si);
7145 eh_region stmt_region;
7146 int lp_nr;
7147
7148 lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
7149 stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
7150 if (stmt_region)
7151 {
7152 if (region == NULL)
7153 region = stmt_region;
7154 else if (stmt_region != region)
7155 {
7156 region = eh_region_outermost (src_cfun, stmt_region, region);
7157 if (region == NULL)
7158 {
7159 *all = true;
7160 return NULL;
7161 }
7162 }
7163 }
7164 }
7165
7166 return region;
7167 }
7168
7169 static tree
7170 new_label_mapper (tree decl, void *data)
7171 {
7172 htab_t hash = (htab_t) data;
7173 struct tree_map *m;
7174 void **slot;
7175
7176 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
7177
7178 m = XNEW (struct tree_map);
7179 m->hash = DECL_UID (decl);
7180 m->base.from = decl;
7181 m->to = create_artificial_label (UNKNOWN_LOCATION);
7182 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
7183 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
7184 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
7185
7186 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
7187 gcc_assert (*slot == NULL);
7188
7189 *slot = m;
7190
7191 return m->to;
7192 }
7193
7194 /* Tree walker to replace the decls used inside value expressions by
7195 duplicates. */
7196
7197 static tree
7198 replace_block_vars_by_duplicates_1 (tree *tp, int *walk_subtrees, void *data)
7199 {
7200 struct replace_decls_d *rd = (struct replace_decls_d *)data;
7201
7202 switch (TREE_CODE (*tp))
7203 {
7204 case VAR_DECL:
7205 case PARM_DECL:
7206 case RESULT_DECL:
7207 replace_by_duplicate_decl (tp, rd->vars_map, rd->to_context);
7208 break;
7209 default:
7210 break;
7211 }
7212
7213 if (IS_TYPE_OR_DECL_P (*tp))
7214 *walk_subtrees = false;
7215
7216 return NULL;
7217 }
7218
7219 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
7220 subblocks. */
7221
7222 static void
7223 replace_block_vars_by_duplicates (tree block, hash_map<tree, tree> *vars_map,
7224 tree to_context)
7225 {
7226 tree *tp, t;
7227
7228 for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
7229 {
7230 t = *tp;
7231 if (!VAR_P (t) && TREE_CODE (t) != CONST_DECL)
7232 continue;
7233 replace_by_duplicate_decl (&t, vars_map, to_context);
7234 if (t != *tp)
7235 {
7236 if (VAR_P (*tp) && DECL_HAS_VALUE_EXPR_P (*tp))
7237 {
7238 tree x = DECL_VALUE_EXPR (*tp);
7239 struct replace_decls_d rd = { vars_map, to_context };
7240 unshare_expr (x);
7241 walk_tree (&x, replace_block_vars_by_duplicates_1, &rd, NULL);
7242 SET_DECL_VALUE_EXPR (t, x);
7243 DECL_HAS_VALUE_EXPR_P (t) = 1;
7244 }
7245 DECL_CHAIN (t) = DECL_CHAIN (*tp);
7246 *tp = t;
7247 }
7248 }
7249
7250 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
7251 replace_block_vars_by_duplicates (block, vars_map, to_context);
7252 }
7253
7254 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
7255 from FN1 to FN2. */
7256
7257 static void
7258 fixup_loop_arrays_after_move (struct function *fn1, struct function *fn2,
7259 struct loop *loop)
7260 {
7261 /* Discard it from the old loop array. */
7262 (*get_loops (fn1))[loop->num] = NULL;
7263
7264 /* Place it in the new loop array, assigning it a new number. */
7265 loop->num = number_of_loops (fn2);
7266 vec_safe_push (loops_for_fn (fn2)->larray, loop);
7267
7268 /* Recurse to children. */
7269 for (loop = loop->inner; loop; loop = loop->next)
7270 fixup_loop_arrays_after_move (fn1, fn2, loop);
7271 }
7272
7273 /* Verify that the blocks in BBS_P are a single-entry, single-exit region
7274 delimited by ENTRY_BB and EXIT_BB, possibly containing noreturn blocks. */
7275
7276 DEBUG_FUNCTION void
7277 verify_sese (basic_block entry, basic_block exit, vec<basic_block> *bbs_p)
7278 {
7279 basic_block bb;
7280 edge_iterator ei;
7281 edge e;
7282 bitmap bbs = BITMAP_ALLOC (NULL);
7283 int i;
7284
7285 gcc_assert (entry != NULL);
7286 gcc_assert (entry != exit);
7287 gcc_assert (bbs_p != NULL);
7288
7289 gcc_assert (bbs_p->length () > 0);
7290
7291 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
7292 bitmap_set_bit (bbs, bb->index);
7293
7294 gcc_assert (bitmap_bit_p (bbs, entry->index));
7295 gcc_assert (exit == NULL || bitmap_bit_p (bbs, exit->index));
7296
7297 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
7298 {
7299 if (bb == entry)
7300 {
7301 gcc_assert (single_pred_p (entry));
7302 gcc_assert (!bitmap_bit_p (bbs, single_pred (entry)->index));
7303 }
7304 else
7305 for (ei = ei_start (bb->preds); !ei_end_p (ei); ei_next (&ei))
7306 {
7307 e = ei_edge (ei);
7308 gcc_assert (bitmap_bit_p (bbs, e->src->index));
7309 }
7310
7311 if (bb == exit)
7312 {
7313 gcc_assert (single_succ_p (exit));
7314 gcc_assert (!bitmap_bit_p (bbs, single_succ (exit)->index));
7315 }
7316 else
7317 for (ei = ei_start (bb->succs); !ei_end_p (ei); ei_next (&ei))
7318 {
7319 e = ei_edge (ei);
7320 gcc_assert (bitmap_bit_p (bbs, e->dest->index));
7321 }
7322 }
7323
7324 BITMAP_FREE (bbs);
7325 }
7326
7327 /* If FROM is an SSA_NAME, mark the version in bitmap DATA. */
7328
7329 bool
7330 gather_ssa_name_hash_map_from (tree const &from, tree const &, void *data)
7331 {
7332 bitmap release_names = (bitmap)data;
7333
7334 if (TREE_CODE (from) != SSA_NAME)
7335 return true;
7336
7337 bitmap_set_bit (release_names, SSA_NAME_VERSION (from));
7338 return true;
7339 }
7340
7341 /* Return LOOP_DIST_ALIAS call if present in BB. */
7342
7343 static gimple *
7344 find_loop_dist_alias (basic_block bb)
7345 {
7346 gimple *g = last_stmt (bb);
7347 if (g == NULL || gimple_code (g) != GIMPLE_COND)
7348 return NULL;
7349
7350 gimple_stmt_iterator gsi = gsi_for_stmt (g);
7351 gsi_prev (&gsi);
7352 if (gsi_end_p (gsi))
7353 return NULL;
7354
7355 g = gsi_stmt (gsi);
7356 if (gimple_call_internal_p (g, IFN_LOOP_DIST_ALIAS))
7357 return g;
7358 return NULL;
7359 }
7360
7361 /* Fold loop internal call G like IFN_LOOP_VECTORIZED/IFN_LOOP_DIST_ALIAS
7362 to VALUE and update any immediate uses of it's LHS. */
7363
7364 void
7365 fold_loop_internal_call (gimple *g, tree value)
7366 {
7367 tree lhs = gimple_call_lhs (g);
7368 use_operand_p use_p;
7369 imm_use_iterator iter;
7370 gimple *use_stmt;
7371 gimple_stmt_iterator gsi = gsi_for_stmt (g);
7372
7373 update_call_from_tree (&gsi, value);
7374 FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
7375 {
7376 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
7377 SET_USE (use_p, value);
7378 update_stmt (use_stmt);
7379 }
7380 }
7381
7382 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
7383 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
7384 single basic block in the original CFG and the new basic block is
7385 returned. DEST_CFUN must not have a CFG yet.
7386
7387 Note that the region need not be a pure SESE region. Blocks inside
7388 the region may contain calls to abort/exit. The only restriction
7389 is that ENTRY_BB should be the only entry point and it must
7390 dominate EXIT_BB.
7391
7392 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
7393 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
7394 to the new function.
7395
7396 All local variables referenced in the region are assumed to be in
7397 the corresponding BLOCK_VARS and unexpanded variable lists
7398 associated with DEST_CFUN.
7399
7400 TODO: investigate whether we can reuse gimple_duplicate_sese_region to
7401 reimplement move_sese_region_to_fn by duplicating the region rather than
7402 moving it. */
7403
7404 basic_block
7405 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
7406 basic_block exit_bb, tree orig_block)
7407 {
7408 vec<basic_block> bbs, dom_bbs;
7409 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
7410 basic_block after, bb, *entry_pred, *exit_succ, abb;
7411 struct function *saved_cfun = cfun;
7412 int *entry_flag, *exit_flag;
7413 profile_probability *entry_prob, *exit_prob;
7414 unsigned i, num_entry_edges, num_exit_edges, num_nodes;
7415 edge e;
7416 edge_iterator ei;
7417 htab_t new_label_map;
7418 hash_map<void *, void *> *eh_map;
7419 struct loop *loop = entry_bb->loop_father;
7420 struct loop *loop0 = get_loop (saved_cfun, 0);
7421 struct move_stmt_d d;
7422
7423 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
7424 region. */
7425 gcc_assert (entry_bb != exit_bb
7426 && (!exit_bb
7427 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
7428
7429 /* Collect all the blocks in the region. Manually add ENTRY_BB
7430 because it won't be added by dfs_enumerate_from. */
7431 bbs.create (0);
7432 bbs.safe_push (entry_bb);
7433 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
7434
7435 if (flag_checking)
7436 verify_sese (entry_bb, exit_bb, &bbs);
7437
7438 /* The blocks that used to be dominated by something in BBS will now be
7439 dominated by the new block. */
7440 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
7441 bbs.address (),
7442 bbs.length ());
7443
7444 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
7445 the predecessor edges to ENTRY_BB and the successor edges to
7446 EXIT_BB so that we can re-attach them to the new basic block that
7447 will replace the region. */
7448 num_entry_edges = EDGE_COUNT (entry_bb->preds);
7449 entry_pred = XNEWVEC (basic_block, num_entry_edges);
7450 entry_flag = XNEWVEC (int, num_entry_edges);
7451 entry_prob = XNEWVEC (profile_probability, num_entry_edges);
7452 i = 0;
7453 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
7454 {
7455 entry_prob[i] = e->probability;
7456 entry_flag[i] = e->flags;
7457 entry_pred[i++] = e->src;
7458 remove_edge (e);
7459 }
7460
7461 if (exit_bb)
7462 {
7463 num_exit_edges = EDGE_COUNT (exit_bb->succs);
7464 exit_succ = XNEWVEC (basic_block, num_exit_edges);
7465 exit_flag = XNEWVEC (int, num_exit_edges);
7466 exit_prob = XNEWVEC (profile_probability, num_exit_edges);
7467 i = 0;
7468 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
7469 {
7470 exit_prob[i] = e->probability;
7471 exit_flag[i] = e->flags;
7472 exit_succ[i++] = e->dest;
7473 remove_edge (e);
7474 }
7475 }
7476 else
7477 {
7478 num_exit_edges = 0;
7479 exit_succ = NULL;
7480 exit_flag = NULL;
7481 exit_prob = NULL;
7482 }
7483
7484 /* Switch context to the child function to initialize DEST_FN's CFG. */
7485 gcc_assert (dest_cfun->cfg == NULL);
7486 push_cfun (dest_cfun);
7487
7488 init_empty_tree_cfg ();
7489
7490 /* Initialize EH information for the new function. */
7491 eh_map = NULL;
7492 new_label_map = NULL;
7493 if (saved_cfun->eh)
7494 {
7495 eh_region region = NULL;
7496 bool all = false;
7497
7498 FOR_EACH_VEC_ELT (bbs, i, bb)
7499 {
7500 region = find_outermost_region_in_block (saved_cfun, bb, region, &all);
7501 if (all)
7502 break;
7503 }
7504
7505 init_eh_for_function ();
7506 if (region != NULL || all)
7507 {
7508 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
7509 eh_map = duplicate_eh_regions (saved_cfun, region, 0,
7510 new_label_mapper, new_label_map);
7511 }
7512 }
7513
7514 /* Initialize an empty loop tree. */
7515 struct loops *loops = ggc_cleared_alloc<struct loops> ();
7516 init_loops_structure (dest_cfun, loops, 1);
7517 loops->state = LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
7518 set_loops_for_fn (dest_cfun, loops);
7519
7520 vec<loop_p, va_gc> *larray = get_loops (saved_cfun)->copy ();
7521
7522 /* Move the outlined loop tree part. */
7523 num_nodes = bbs.length ();
7524 FOR_EACH_VEC_ELT (bbs, i, bb)
7525 {
7526 if (bb->loop_father->header == bb)
7527 {
7528 struct loop *this_loop = bb->loop_father;
7529 struct loop *outer = loop_outer (this_loop);
7530 if (outer == loop
7531 /* If the SESE region contains some bbs ending with
7532 a noreturn call, those are considered to belong
7533 to the outermost loop in saved_cfun, rather than
7534 the entry_bb's loop_father. */
7535 || outer == loop0)
7536 {
7537 if (outer != loop)
7538 num_nodes -= this_loop->num_nodes;
7539 flow_loop_tree_node_remove (bb->loop_father);
7540 flow_loop_tree_node_add (get_loop (dest_cfun, 0), this_loop);
7541 fixup_loop_arrays_after_move (saved_cfun, cfun, this_loop);
7542 }
7543 }
7544 else if (bb->loop_father == loop0 && loop0 != loop)
7545 num_nodes--;
7546
7547 /* Remove loop exits from the outlined region. */
7548 if (loops_for_fn (saved_cfun)->exits)
7549 FOR_EACH_EDGE (e, ei, bb->succs)
7550 {
7551 struct loops *l = loops_for_fn (saved_cfun);
7552 loop_exit **slot
7553 = l->exits->find_slot_with_hash (e, htab_hash_pointer (e),
7554 NO_INSERT);
7555 if (slot)
7556 l->exits->clear_slot (slot);
7557 }
7558 }
7559
7560 /* Adjust the number of blocks in the tree root of the outlined part. */
7561 get_loop (dest_cfun, 0)->num_nodes = bbs.length () + 2;
7562
7563 /* Setup a mapping to be used by move_block_to_fn. */
7564 loop->aux = current_loops->tree_root;
7565 loop0->aux = current_loops->tree_root;
7566
7567 /* Fix up orig_loop_num. If the block referenced in it has been moved
7568 to dest_cfun, update orig_loop_num field, otherwise clear it. */
7569 struct loop *dloop;
7570 signed char *moved_orig_loop_num = NULL;
7571 FOR_EACH_LOOP_FN (dest_cfun, dloop, 0)
7572 if (dloop->orig_loop_num)
7573 {
7574 if (moved_orig_loop_num == NULL)
7575 moved_orig_loop_num
7576 = XCNEWVEC (signed char, vec_safe_length (larray));
7577 if ((*larray)[dloop->orig_loop_num] != NULL
7578 && get_loop (saved_cfun, dloop->orig_loop_num) == NULL)
7579 {
7580 if (moved_orig_loop_num[dloop->orig_loop_num] >= 0
7581 && moved_orig_loop_num[dloop->orig_loop_num] < 2)
7582 moved_orig_loop_num[dloop->orig_loop_num]++;
7583 dloop->orig_loop_num = (*larray)[dloop->orig_loop_num]->num;
7584 }
7585 else
7586 {
7587 moved_orig_loop_num[dloop->orig_loop_num] = -1;
7588 dloop->orig_loop_num = 0;
7589 }
7590 }
7591 pop_cfun ();
7592
7593 if (moved_orig_loop_num)
7594 {
7595 FOR_EACH_VEC_ELT (bbs, i, bb)
7596 {
7597 gimple *g = find_loop_dist_alias (bb);
7598 if (g == NULL)
7599 continue;
7600
7601 int orig_loop_num = tree_to_shwi (gimple_call_arg (g, 0));
7602 gcc_assert (orig_loop_num
7603 && (unsigned) orig_loop_num < vec_safe_length (larray));
7604 if (moved_orig_loop_num[orig_loop_num] == 2)
7605 {
7606 /* If we have moved both loops with this orig_loop_num into
7607 dest_cfun and the LOOP_DIST_ALIAS call is being moved there
7608 too, update the first argument. */
7609 gcc_assert ((*larray)[dloop->orig_loop_num] != NULL
7610 && (get_loop (saved_cfun, dloop->orig_loop_num)
7611 == NULL));
7612 tree t = build_int_cst (integer_type_node,
7613 (*larray)[dloop->orig_loop_num]->num);
7614 gimple_call_set_arg (g, 0, t);
7615 update_stmt (g);
7616 /* Make sure the following loop will not update it. */
7617 moved_orig_loop_num[orig_loop_num] = 0;
7618 }
7619 else
7620 /* Otherwise at least one of the loops stayed in saved_cfun.
7621 Remove the LOOP_DIST_ALIAS call. */
7622 fold_loop_internal_call (g, gimple_call_arg (g, 1));
7623 }
7624 FOR_EACH_BB_FN (bb, saved_cfun)
7625 {
7626 gimple *g = find_loop_dist_alias (bb);
7627 if (g == NULL)
7628 continue;
7629 int orig_loop_num = tree_to_shwi (gimple_call_arg (g, 0));
7630 gcc_assert (orig_loop_num
7631 && (unsigned) orig_loop_num < vec_safe_length (larray));
7632 if (moved_orig_loop_num[orig_loop_num])
7633 /* LOOP_DIST_ALIAS call remained in saved_cfun, if at least one
7634 of the corresponding loops was moved, remove it. */
7635 fold_loop_internal_call (g, gimple_call_arg (g, 1));
7636 }
7637 XDELETEVEC (moved_orig_loop_num);
7638 }
7639 ggc_free (larray);
7640
7641 /* Move blocks from BBS into DEST_CFUN. */
7642 gcc_assert (bbs.length () >= 2);
7643 after = dest_cfun->cfg->x_entry_block_ptr;
7644 hash_map<tree, tree> vars_map;
7645
7646 memset (&d, 0, sizeof (d));
7647 d.orig_block = orig_block;
7648 d.new_block = DECL_INITIAL (dest_cfun->decl);
7649 d.from_context = cfun->decl;
7650 d.to_context = dest_cfun->decl;
7651 d.vars_map = &vars_map;
7652 d.new_label_map = new_label_map;
7653 d.eh_map = eh_map;
7654 d.remap_decls_p = true;
7655
7656 if (gimple_in_ssa_p (cfun))
7657 for (tree arg = DECL_ARGUMENTS (d.to_context); arg; arg = DECL_CHAIN (arg))
7658 {
7659 tree narg = make_ssa_name_fn (dest_cfun, arg, gimple_build_nop ());
7660 set_ssa_default_def (dest_cfun, arg, narg);
7661 vars_map.put (arg, narg);
7662 }
7663
7664 FOR_EACH_VEC_ELT (bbs, i, bb)
7665 {
7666 /* No need to update edge counts on the last block. It has
7667 already been updated earlier when we detached the region from
7668 the original CFG. */
7669 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
7670 after = bb;
7671 }
7672
7673 loop->aux = NULL;
7674 loop0->aux = NULL;
7675 /* Loop sizes are no longer correct, fix them up. */
7676 loop->num_nodes -= num_nodes;
7677 for (struct loop *outer = loop_outer (loop);
7678 outer; outer = loop_outer (outer))
7679 outer->num_nodes -= num_nodes;
7680 loop0->num_nodes -= bbs.length () - num_nodes;
7681
7682 if (saved_cfun->has_simduid_loops || saved_cfun->has_force_vectorize_loops)
7683 {
7684 struct loop *aloop;
7685 for (i = 0; vec_safe_iterate (loops->larray, i, &aloop); i++)
7686 if (aloop != NULL)
7687 {
7688 if (aloop->simduid)
7689 {
7690 replace_by_duplicate_decl (&aloop->simduid, d.vars_map,
7691 d.to_context);
7692 dest_cfun->has_simduid_loops = true;
7693 }
7694 if (aloop->force_vectorize)
7695 dest_cfun->has_force_vectorize_loops = true;
7696 }
7697 }
7698
7699 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
7700 if (orig_block)
7701 {
7702 tree block;
7703 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7704 == NULL_TREE);
7705 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7706 = BLOCK_SUBBLOCKS (orig_block);
7707 for (block = BLOCK_SUBBLOCKS (orig_block);
7708 block; block = BLOCK_CHAIN (block))
7709 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
7710 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
7711 }
7712
7713 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
7714 &vars_map, dest_cfun->decl);
7715
7716 if (new_label_map)
7717 htab_delete (new_label_map);
7718 if (eh_map)
7719 delete eh_map;
7720
7721 if (gimple_in_ssa_p (cfun))
7722 {
7723 /* We need to release ssa-names in a defined order, so first find them,
7724 and then iterate in ascending version order. */
7725 bitmap release_names = BITMAP_ALLOC (NULL);
7726 vars_map.traverse<void *, gather_ssa_name_hash_map_from> (release_names);
7727 bitmap_iterator bi;
7728 unsigned i;
7729 EXECUTE_IF_SET_IN_BITMAP (release_names, 0, i, bi)
7730 release_ssa_name (ssa_name (i));
7731 BITMAP_FREE (release_names);
7732 }
7733
7734 /* Rewire the entry and exit blocks. The successor to the entry
7735 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
7736 the child function. Similarly, the predecessor of DEST_FN's
7737 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
7738 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
7739 various CFG manipulation function get to the right CFG.
7740
7741 FIXME, this is silly. The CFG ought to become a parameter to
7742 these helpers. */
7743 push_cfun (dest_cfun);
7744 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = entry_bb->count;
7745 make_single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), entry_bb, EDGE_FALLTHRU);
7746 if (exit_bb)
7747 {
7748 make_single_succ_edge (exit_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
7749 EXIT_BLOCK_PTR_FOR_FN (cfun)->count = exit_bb->count;
7750 }
7751 else
7752 EXIT_BLOCK_PTR_FOR_FN (cfun)->count = profile_count::zero ();
7753 pop_cfun ();
7754
7755 /* Back in the original function, the SESE region has disappeared,
7756 create a new basic block in its place. */
7757 bb = create_empty_bb (entry_pred[0]);
7758 if (current_loops)
7759 add_bb_to_loop (bb, loop);
7760 for (i = 0; i < num_entry_edges; i++)
7761 {
7762 e = make_edge (entry_pred[i], bb, entry_flag[i]);
7763 e->probability = entry_prob[i];
7764 }
7765
7766 for (i = 0; i < num_exit_edges; i++)
7767 {
7768 e = make_edge (bb, exit_succ[i], exit_flag[i]);
7769 e->probability = exit_prob[i];
7770 }
7771
7772 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
7773 FOR_EACH_VEC_ELT (dom_bbs, i, abb)
7774 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
7775 dom_bbs.release ();
7776
7777 if (exit_bb)
7778 {
7779 free (exit_prob);
7780 free (exit_flag);
7781 free (exit_succ);
7782 }
7783 free (entry_prob);
7784 free (entry_flag);
7785 free (entry_pred);
7786 bbs.release ();
7787
7788 return bb;
7789 }
7790
7791 /* Dump default def DEF to file FILE using FLAGS and indentation
7792 SPC. */
7793
7794 static void
7795 dump_default_def (FILE *file, tree def, int spc, dump_flags_t flags)
7796 {
7797 for (int i = 0; i < spc; ++i)
7798 fprintf (file, " ");
7799 dump_ssaname_info_to_file (file, def, spc);
7800
7801 print_generic_expr (file, TREE_TYPE (def), flags);
7802 fprintf (file, " ");
7803 print_generic_expr (file, def, flags);
7804 fprintf (file, " = ");
7805 print_generic_expr (file, SSA_NAME_VAR (def), flags);
7806 fprintf (file, ";\n");
7807 }
7808
7809 /* Print no_sanitize attribute to FILE for a given attribute VALUE. */
7810
7811 static void
7812 print_no_sanitize_attr_value (FILE *file, tree value)
7813 {
7814 unsigned int flags = tree_to_uhwi (value);
7815 bool first = true;
7816 for (int i = 0; sanitizer_opts[i].name != NULL; ++i)
7817 {
7818 if ((sanitizer_opts[i].flag & flags) == sanitizer_opts[i].flag)
7819 {
7820 if (!first)
7821 fprintf (file, " | ");
7822 fprintf (file, "%s", sanitizer_opts[i].name);
7823 first = false;
7824 }
7825 }
7826 }
7827
7828 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
7829 */
7830
7831 void
7832 dump_function_to_file (tree fndecl, FILE *file, dump_flags_t flags)
7833 {
7834 tree arg, var, old_current_fndecl = current_function_decl;
7835 struct function *dsf;
7836 bool ignore_topmost_bind = false, any_var = false;
7837 basic_block bb;
7838 tree chain;
7839 bool tmclone = (TREE_CODE (fndecl) == FUNCTION_DECL
7840 && decl_is_tm_clone (fndecl));
7841 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
7842
7843 if (DECL_ATTRIBUTES (fndecl) != NULL_TREE)
7844 {
7845 fprintf (file, "__attribute__((");
7846
7847 bool first = true;
7848 tree chain;
7849 for (chain = DECL_ATTRIBUTES (fndecl); chain;
7850 first = false, chain = TREE_CHAIN (chain))
7851 {
7852 if (!first)
7853 fprintf (file, ", ");
7854
7855 tree name = get_attribute_name (chain);
7856 print_generic_expr (file, name, dump_flags);
7857 if (TREE_VALUE (chain) != NULL_TREE)
7858 {
7859 fprintf (file, " (");
7860
7861 if (strstr (IDENTIFIER_POINTER (name), "no_sanitize"))
7862 print_no_sanitize_attr_value (file, TREE_VALUE (chain));
7863 else
7864 print_generic_expr (file, TREE_VALUE (chain), dump_flags);
7865 fprintf (file, ")");
7866 }
7867 }
7868
7869 fprintf (file, "))\n");
7870 }
7871
7872 current_function_decl = fndecl;
7873 if (flags & TDF_GIMPLE)
7874 {
7875 print_generic_expr (file, TREE_TYPE (TREE_TYPE (fndecl)),
7876 dump_flags | TDF_SLIM);
7877 fprintf (file, " __GIMPLE (%s)\n%s (",
7878 (fun->curr_properties & PROP_ssa) ? "ssa"
7879 : (fun->curr_properties & PROP_cfg) ? "cfg"
7880 : "",
7881 function_name (fun));
7882 }
7883 else
7884 fprintf (file, "%s %s(", function_name (fun), tmclone ? "[tm-clone] " : "");
7885
7886 arg = DECL_ARGUMENTS (fndecl);
7887 while (arg)
7888 {
7889 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
7890 fprintf (file, " ");
7891 print_generic_expr (file, arg, dump_flags);
7892 if (DECL_CHAIN (arg))
7893 fprintf (file, ", ");
7894 arg = DECL_CHAIN (arg);
7895 }
7896 fprintf (file, ")\n");
7897
7898 dsf = DECL_STRUCT_FUNCTION (fndecl);
7899 if (dsf && (flags & TDF_EH))
7900 dump_eh_tree (file, dsf);
7901
7902 if (flags & TDF_RAW && !gimple_has_body_p (fndecl))
7903 {
7904 dump_node (fndecl, TDF_SLIM | flags, file);
7905 current_function_decl = old_current_fndecl;
7906 return;
7907 }
7908
7909 /* When GIMPLE is lowered, the variables are no longer available in
7910 BIND_EXPRs, so display them separately. */
7911 if (fun && fun->decl == fndecl && (fun->curr_properties & PROP_gimple_lcf))
7912 {
7913 unsigned ix;
7914 ignore_topmost_bind = true;
7915
7916 fprintf (file, "{\n");
7917 if (gimple_in_ssa_p (fun)
7918 && (flags & TDF_ALIAS))
7919 {
7920 for (arg = DECL_ARGUMENTS (fndecl); arg != NULL;
7921 arg = DECL_CHAIN (arg))
7922 {
7923 tree def = ssa_default_def (fun, arg);
7924 if (def)
7925 dump_default_def (file, def, 2, flags);
7926 }
7927
7928 tree res = DECL_RESULT (fun->decl);
7929 if (res != NULL_TREE
7930 && DECL_BY_REFERENCE (res))
7931 {
7932 tree def = ssa_default_def (fun, res);
7933 if (def)
7934 dump_default_def (file, def, 2, flags);
7935 }
7936
7937 tree static_chain = fun->static_chain_decl;
7938 if (static_chain != NULL_TREE)
7939 {
7940 tree def = ssa_default_def (fun, static_chain);
7941 if (def)
7942 dump_default_def (file, def, 2, flags);
7943 }
7944 }
7945
7946 if (!vec_safe_is_empty (fun->local_decls))
7947 FOR_EACH_LOCAL_DECL (fun, ix, var)
7948 {
7949 print_generic_decl (file, var, flags);
7950 fprintf (file, "\n");
7951
7952 any_var = true;
7953 }
7954
7955 tree name;
7956
7957 if (gimple_in_ssa_p (cfun))
7958 FOR_EACH_SSA_NAME (ix, name, cfun)
7959 {
7960 if (!SSA_NAME_VAR (name))
7961 {
7962 fprintf (file, " ");
7963 print_generic_expr (file, TREE_TYPE (name), flags);
7964 fprintf (file, " ");
7965 print_generic_expr (file, name, flags);
7966 fprintf (file, ";\n");
7967
7968 any_var = true;
7969 }
7970 }
7971 }
7972
7973 if (fun && fun->decl == fndecl
7974 && fun->cfg
7975 && basic_block_info_for_fn (fun))
7976 {
7977 /* If the CFG has been built, emit a CFG-based dump. */
7978 if (!ignore_topmost_bind)
7979 fprintf (file, "{\n");
7980
7981 if (any_var && n_basic_blocks_for_fn (fun))
7982 fprintf (file, "\n");
7983
7984 FOR_EACH_BB_FN (bb, fun)
7985 dump_bb (file, bb, 2, flags);
7986
7987 fprintf (file, "}\n");
7988 }
7989 else if (fun->curr_properties & PROP_gimple_any)
7990 {
7991 /* The function is now in GIMPLE form but the CFG has not been
7992 built yet. Emit the single sequence of GIMPLE statements
7993 that make up its body. */
7994 gimple_seq body = gimple_body (fndecl);
7995
7996 if (gimple_seq_first_stmt (body)
7997 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
7998 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
7999 print_gimple_seq (file, body, 0, flags);
8000 else
8001 {
8002 if (!ignore_topmost_bind)
8003 fprintf (file, "{\n");
8004
8005 if (any_var)
8006 fprintf (file, "\n");
8007
8008 print_gimple_seq (file, body, 2, flags);
8009 fprintf (file, "}\n");
8010 }
8011 }
8012 else
8013 {
8014 int indent;
8015
8016 /* Make a tree based dump. */
8017 chain = DECL_SAVED_TREE (fndecl);
8018 if (chain && TREE_CODE (chain) == BIND_EXPR)
8019 {
8020 if (ignore_topmost_bind)
8021 {
8022 chain = BIND_EXPR_BODY (chain);
8023 indent = 2;
8024 }
8025 else
8026 indent = 0;
8027 }
8028 else
8029 {
8030 if (!ignore_topmost_bind)
8031 {
8032 fprintf (file, "{\n");
8033 /* No topmost bind, pretend it's ignored for later. */
8034 ignore_topmost_bind = true;
8035 }
8036 indent = 2;
8037 }
8038
8039 if (any_var)
8040 fprintf (file, "\n");
8041
8042 print_generic_stmt_indented (file, chain, flags, indent);
8043 if (ignore_topmost_bind)
8044 fprintf (file, "}\n");
8045 }
8046
8047 if (flags & TDF_ENUMERATE_LOCALS)
8048 dump_enumerated_decls (file, flags);
8049 fprintf (file, "\n\n");
8050
8051 current_function_decl = old_current_fndecl;
8052 }
8053
8054 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
8055
8056 DEBUG_FUNCTION void
8057 debug_function (tree fn, dump_flags_t flags)
8058 {
8059 dump_function_to_file (fn, stderr, flags);
8060 }
8061
8062
8063 /* Print on FILE the indexes for the predecessors of basic_block BB. */
8064
8065 static void
8066 print_pred_bbs (FILE *file, basic_block bb)
8067 {
8068 edge e;
8069 edge_iterator ei;
8070
8071 FOR_EACH_EDGE (e, ei, bb->preds)
8072 fprintf (file, "bb_%d ", e->src->index);
8073 }
8074
8075
8076 /* Print on FILE the indexes for the successors of basic_block BB. */
8077
8078 static void
8079 print_succ_bbs (FILE *file, basic_block bb)
8080 {
8081 edge e;
8082 edge_iterator ei;
8083
8084 FOR_EACH_EDGE (e, ei, bb->succs)
8085 fprintf (file, "bb_%d ", e->dest->index);
8086 }
8087
8088 /* Print to FILE the basic block BB following the VERBOSITY level. */
8089
8090 void
8091 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
8092 {
8093 char *s_indent = (char *) alloca ((size_t) indent + 1);
8094 memset ((void *) s_indent, ' ', (size_t) indent);
8095 s_indent[indent] = '\0';
8096
8097 /* Print basic_block's header. */
8098 if (verbosity >= 2)
8099 {
8100 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
8101 print_pred_bbs (file, bb);
8102 fprintf (file, "}, succs = {");
8103 print_succ_bbs (file, bb);
8104 fprintf (file, "})\n");
8105 }
8106
8107 /* Print basic_block's body. */
8108 if (verbosity >= 3)
8109 {
8110 fprintf (file, "%s {\n", s_indent);
8111 dump_bb (file, bb, indent + 4, TDF_VOPS|TDF_MEMSYMS);
8112 fprintf (file, "%s }\n", s_indent);
8113 }
8114 }
8115
8116 static void print_loop_and_siblings (FILE *, struct loop *, int, int);
8117
8118 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
8119 VERBOSITY level this outputs the contents of the loop, or just its
8120 structure. */
8121
8122 static void
8123 print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
8124 {
8125 char *s_indent;
8126 basic_block bb;
8127
8128 if (loop == NULL)
8129 return;
8130
8131 s_indent = (char *) alloca ((size_t) indent + 1);
8132 memset ((void *) s_indent, ' ', (size_t) indent);
8133 s_indent[indent] = '\0';
8134
8135 /* Print loop's header. */
8136 fprintf (file, "%sloop_%d (", s_indent, loop->num);
8137 if (loop->header)
8138 fprintf (file, "header = %d", loop->header->index);
8139 else
8140 {
8141 fprintf (file, "deleted)\n");
8142 return;
8143 }
8144 if (loop->latch)
8145 fprintf (file, ", latch = %d", loop->latch->index);
8146 else
8147 fprintf (file, ", multiple latches");
8148 fprintf (file, ", niter = ");
8149 print_generic_expr (file, loop->nb_iterations);
8150
8151 if (loop->any_upper_bound)
8152 {
8153 fprintf (file, ", upper_bound = ");
8154 print_decu (loop->nb_iterations_upper_bound, file);
8155 }
8156 if (loop->any_likely_upper_bound)
8157 {
8158 fprintf (file, ", likely_upper_bound = ");
8159 print_decu (loop->nb_iterations_likely_upper_bound, file);
8160 }
8161
8162 if (loop->any_estimate)
8163 {
8164 fprintf (file, ", estimate = ");
8165 print_decu (loop->nb_iterations_estimate, file);
8166 }
8167 if (loop->unroll)
8168 fprintf (file, ", unroll = %d", loop->unroll);
8169 fprintf (file, ")\n");
8170
8171 /* Print loop's body. */
8172 if (verbosity >= 1)
8173 {
8174 fprintf (file, "%s{\n", s_indent);
8175 FOR_EACH_BB_FN (bb, cfun)
8176 if (bb->loop_father == loop)
8177 print_loops_bb (file, bb, indent, verbosity);
8178
8179 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
8180 fprintf (file, "%s}\n", s_indent);
8181 }
8182 }
8183
8184 /* Print the LOOP and its sibling loops on FILE, indented INDENT
8185 spaces. Following VERBOSITY level this outputs the contents of the
8186 loop, or just its structure. */
8187
8188 static void
8189 print_loop_and_siblings (FILE *file, struct loop *loop, int indent,
8190 int verbosity)
8191 {
8192 if (loop == NULL)
8193 return;
8194
8195 print_loop (file, loop, indent, verbosity);
8196 print_loop_and_siblings (file, loop->next, indent, verbosity);
8197 }
8198
8199 /* Follow a CFG edge from the entry point of the program, and on entry
8200 of a loop, pretty print the loop structure on FILE. */
8201
8202 void
8203 print_loops (FILE *file, int verbosity)
8204 {
8205 basic_block bb;
8206
8207 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
8208 fprintf (file, "\nLoops in function: %s\n", current_function_name ());
8209 if (bb && bb->loop_father)
8210 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
8211 }
8212
8213 /* Dump a loop. */
8214
8215 DEBUG_FUNCTION void
8216 debug (struct loop &ref)
8217 {
8218 print_loop (stderr, &ref, 0, /*verbosity*/0);
8219 }
8220
8221 DEBUG_FUNCTION void
8222 debug (struct loop *ptr)
8223 {
8224 if (ptr)
8225 debug (*ptr);
8226 else
8227 fprintf (stderr, "<nil>\n");
8228 }
8229
8230 /* Dump a loop verbosely. */
8231
8232 DEBUG_FUNCTION void
8233 debug_verbose (struct loop &ref)
8234 {
8235 print_loop (stderr, &ref, 0, /*verbosity*/3);
8236 }
8237
8238 DEBUG_FUNCTION void
8239 debug_verbose (struct loop *ptr)
8240 {
8241 if (ptr)
8242 debug (*ptr);
8243 else
8244 fprintf (stderr, "<nil>\n");
8245 }
8246
8247
8248 /* Debugging loops structure at tree level, at some VERBOSITY level. */
8249
8250 DEBUG_FUNCTION void
8251 debug_loops (int verbosity)
8252 {
8253 print_loops (stderr, verbosity);
8254 }
8255
8256 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
8257
8258 DEBUG_FUNCTION void
8259 debug_loop (struct loop *loop, int verbosity)
8260 {
8261 print_loop (stderr, loop, 0, verbosity);
8262 }
8263
8264 /* Print on stderr the code of loop number NUM, at some VERBOSITY
8265 level. */
8266
8267 DEBUG_FUNCTION void
8268 debug_loop_num (unsigned num, int verbosity)
8269 {
8270 debug_loop (get_loop (cfun, num), verbosity);
8271 }
8272
8273 /* Return true if BB ends with a call, possibly followed by some
8274 instructions that must stay with the call. Return false,
8275 otherwise. */
8276
8277 static bool
8278 gimple_block_ends_with_call_p (basic_block bb)
8279 {
8280 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
8281 return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
8282 }
8283
8284
8285 /* Return true if BB ends with a conditional branch. Return false,
8286 otherwise. */
8287
8288 static bool
8289 gimple_block_ends_with_condjump_p (const_basic_block bb)
8290 {
8291 gimple *stmt = last_stmt (CONST_CAST_BB (bb));
8292 return (stmt && gimple_code (stmt) == GIMPLE_COND);
8293 }
8294
8295
8296 /* Return true if statement T may terminate execution of BB in ways not
8297 explicitly represtented in the CFG. */
8298
8299 bool
8300 stmt_can_terminate_bb_p (gimple *t)
8301 {
8302 tree fndecl = NULL_TREE;
8303 int call_flags = 0;
8304
8305 /* Eh exception not handled internally terminates execution of the whole
8306 function. */
8307 if (stmt_can_throw_external (cfun, t))
8308 return true;
8309
8310 /* NORETURN and LONGJMP calls already have an edge to exit.
8311 CONST and PURE calls do not need one.
8312 We don't currently check for CONST and PURE here, although
8313 it would be a good idea, because those attributes are
8314 figured out from the RTL in mark_constant_function, and
8315 the counter incrementation code from -fprofile-arcs
8316 leads to different results from -fbranch-probabilities. */
8317 if (is_gimple_call (t))
8318 {
8319 fndecl = gimple_call_fndecl (t);
8320 call_flags = gimple_call_flags (t);
8321 }
8322
8323 if (is_gimple_call (t)
8324 && fndecl
8325 && fndecl_built_in_p (fndecl)
8326 && (call_flags & ECF_NOTHROW)
8327 && !(call_flags & ECF_RETURNS_TWICE)
8328 /* fork() doesn't really return twice, but the effect of
8329 wrapping it in __gcov_fork() which calls __gcov_flush()
8330 and clears the counters before forking has the same
8331 effect as returning twice. Force a fake edge. */
8332 && !fndecl_built_in_p (fndecl, BUILT_IN_FORK))
8333 return false;
8334
8335 if (is_gimple_call (t))
8336 {
8337 edge_iterator ei;
8338 edge e;
8339 basic_block bb;
8340
8341 if (call_flags & (ECF_PURE | ECF_CONST)
8342 && !(call_flags & ECF_LOOPING_CONST_OR_PURE))
8343 return false;
8344
8345 /* Function call may do longjmp, terminate program or do other things.
8346 Special case noreturn that have non-abnormal edges out as in this case
8347 the fact is sufficiently represented by lack of edges out of T. */
8348 if (!(call_flags & ECF_NORETURN))
8349 return true;
8350
8351 bb = gimple_bb (t);
8352 FOR_EACH_EDGE (e, ei, bb->succs)
8353 if ((e->flags & EDGE_FAKE) == 0)
8354 return true;
8355 }
8356
8357 if (gasm *asm_stmt = dyn_cast <gasm *> (t))
8358 if (gimple_asm_volatile_p (asm_stmt) || gimple_asm_input_p (asm_stmt))
8359 return true;
8360
8361 return false;
8362 }
8363
8364
8365 /* Add fake edges to the function exit for any non constant and non
8366 noreturn calls (or noreturn calls with EH/abnormal edges),
8367 volatile inline assembly in the bitmap of blocks specified by BLOCKS
8368 or to the whole CFG if BLOCKS is zero. Return the number of blocks
8369 that were split.
8370
8371 The goal is to expose cases in which entering a basic block does
8372 not imply that all subsequent instructions must be executed. */
8373
8374 static int
8375 gimple_flow_call_edges_add (sbitmap blocks)
8376 {
8377 int i;
8378 int blocks_split = 0;
8379 int last_bb = last_basic_block_for_fn (cfun);
8380 bool check_last_block = false;
8381
8382 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
8383 return 0;
8384
8385 if (! blocks)
8386 check_last_block = true;
8387 else
8388 check_last_block = bitmap_bit_p (blocks,
8389 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
8390
8391 /* In the last basic block, before epilogue generation, there will be
8392 a fallthru edge to EXIT. Special care is required if the last insn
8393 of the last basic block is a call because make_edge folds duplicate
8394 edges, which would result in the fallthru edge also being marked
8395 fake, which would result in the fallthru edge being removed by
8396 remove_fake_edges, which would result in an invalid CFG.
8397
8398 Moreover, we can't elide the outgoing fake edge, since the block
8399 profiler needs to take this into account in order to solve the minimal
8400 spanning tree in the case that the call doesn't return.
8401
8402 Handle this by adding a dummy instruction in a new last basic block. */
8403 if (check_last_block)
8404 {
8405 basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
8406 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
8407 gimple *t = NULL;
8408
8409 if (!gsi_end_p (gsi))
8410 t = gsi_stmt (gsi);
8411
8412 if (t && stmt_can_terminate_bb_p (t))
8413 {
8414 edge e;
8415
8416 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
8417 if (e)
8418 {
8419 gsi_insert_on_edge (e, gimple_build_nop ());
8420 gsi_commit_edge_inserts ();
8421 }
8422 }
8423 }
8424
8425 /* Now add fake edges to the function exit for any non constant
8426 calls since there is no way that we can determine if they will
8427 return or not... */
8428 for (i = 0; i < last_bb; i++)
8429 {
8430 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8431 gimple_stmt_iterator gsi;
8432 gimple *stmt, *last_stmt;
8433
8434 if (!bb)
8435 continue;
8436
8437 if (blocks && !bitmap_bit_p (blocks, i))
8438 continue;
8439
8440 gsi = gsi_last_nondebug_bb (bb);
8441 if (!gsi_end_p (gsi))
8442 {
8443 last_stmt = gsi_stmt (gsi);
8444 do
8445 {
8446 stmt = gsi_stmt (gsi);
8447 if (stmt_can_terminate_bb_p (stmt))
8448 {
8449 edge e;
8450
8451 /* The handling above of the final block before the
8452 epilogue should be enough to verify that there is
8453 no edge to the exit block in CFG already.
8454 Calling make_edge in such case would cause us to
8455 mark that edge as fake and remove it later. */
8456 if (flag_checking && stmt == last_stmt)
8457 {
8458 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
8459 gcc_assert (e == NULL);
8460 }
8461
8462 /* Note that the following may create a new basic block
8463 and renumber the existing basic blocks. */
8464 if (stmt != last_stmt)
8465 {
8466 e = split_block (bb, stmt);
8467 if (e)
8468 blocks_split++;
8469 }
8470 e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
8471 e->probability = profile_probability::guessed_never ();
8472 }
8473 gsi_prev (&gsi);
8474 }
8475 while (!gsi_end_p (gsi));
8476 }
8477 }
8478
8479 if (blocks_split)
8480 checking_verify_flow_info ();
8481
8482 return blocks_split;
8483 }
8484
8485 /* Removes edge E and all the blocks dominated by it, and updates dominance
8486 information. The IL in E->src needs to be updated separately.
8487 If dominance info is not available, only the edge E is removed.*/
8488
8489 void
8490 remove_edge_and_dominated_blocks (edge e)
8491 {
8492 vec<basic_block> bbs_to_remove = vNULL;
8493 vec<basic_block> bbs_to_fix_dom = vNULL;
8494 edge f;
8495 edge_iterator ei;
8496 bool none_removed = false;
8497 unsigned i;
8498 basic_block bb, dbb;
8499 bitmap_iterator bi;
8500
8501 /* If we are removing a path inside a non-root loop that may change
8502 loop ownership of blocks or remove loops. Mark loops for fixup. */
8503 if (current_loops
8504 && loop_outer (e->src->loop_father) != NULL
8505 && e->src->loop_father == e->dest->loop_father)
8506 loops_state_set (LOOPS_NEED_FIXUP);
8507
8508 if (!dom_info_available_p (CDI_DOMINATORS))
8509 {
8510 remove_edge (e);
8511 return;
8512 }
8513
8514 /* No updating is needed for edges to exit. */
8515 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
8516 {
8517 if (cfgcleanup_altered_bbs)
8518 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
8519 remove_edge (e);
8520 return;
8521 }
8522
8523 /* First, we find the basic blocks to remove. If E->dest has a predecessor
8524 that is not dominated by E->dest, then this set is empty. Otherwise,
8525 all the basic blocks dominated by E->dest are removed.
8526
8527 Also, to DF_IDOM we store the immediate dominators of the blocks in
8528 the dominance frontier of E (i.e., of the successors of the
8529 removed blocks, if there are any, and of E->dest otherwise). */
8530 FOR_EACH_EDGE (f, ei, e->dest->preds)
8531 {
8532 if (f == e)
8533 continue;
8534
8535 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
8536 {
8537 none_removed = true;
8538 break;
8539 }
8540 }
8541
8542 auto_bitmap df, df_idom;
8543 if (none_removed)
8544 bitmap_set_bit (df_idom,
8545 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
8546 else
8547 {
8548 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
8549 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
8550 {
8551 FOR_EACH_EDGE (f, ei, bb->succs)
8552 {
8553 if (f->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
8554 bitmap_set_bit (df, f->dest->index);
8555 }
8556 }
8557 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
8558 bitmap_clear_bit (df, bb->index);
8559
8560 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
8561 {
8562 bb = BASIC_BLOCK_FOR_FN (cfun, i);
8563 bitmap_set_bit (df_idom,
8564 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
8565 }
8566 }
8567
8568 if (cfgcleanup_altered_bbs)
8569 {
8570 /* Record the set of the altered basic blocks. */
8571 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
8572 bitmap_ior_into (cfgcleanup_altered_bbs, df);
8573 }
8574
8575 /* Remove E and the cancelled blocks. */
8576 if (none_removed)
8577 remove_edge (e);
8578 else
8579 {
8580 /* Walk backwards so as to get a chance to substitute all
8581 released DEFs into debug stmts. See
8582 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
8583 details. */
8584 for (i = bbs_to_remove.length (); i-- > 0; )
8585 delete_basic_block (bbs_to_remove[i]);
8586 }
8587
8588 /* Update the dominance information. The immediate dominator may change only
8589 for blocks whose immediate dominator belongs to DF_IDOM:
8590
8591 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
8592 removal. Let Z the arbitrary block such that idom(Z) = Y and
8593 Z dominates X after the removal. Before removal, there exists a path P
8594 from Y to X that avoids Z. Let F be the last edge on P that is
8595 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
8596 dominates W, and because of P, Z does not dominate W), and W belongs to
8597 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
8598 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
8599 {
8600 bb = BASIC_BLOCK_FOR_FN (cfun, i);
8601 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
8602 dbb;
8603 dbb = next_dom_son (CDI_DOMINATORS, dbb))
8604 bbs_to_fix_dom.safe_push (dbb);
8605 }
8606
8607 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
8608
8609 bbs_to_remove.release ();
8610 bbs_to_fix_dom.release ();
8611 }
8612
8613 /* Purge dead EH edges from basic block BB. */
8614
8615 bool
8616 gimple_purge_dead_eh_edges (basic_block bb)
8617 {
8618 bool changed = false;
8619 edge e;
8620 edge_iterator ei;
8621 gimple *stmt = last_stmt (bb);
8622
8623 if (stmt && stmt_can_throw_internal (cfun, stmt))
8624 return false;
8625
8626 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8627 {
8628 if (e->flags & EDGE_EH)
8629 {
8630 remove_edge_and_dominated_blocks (e);
8631 changed = true;
8632 }
8633 else
8634 ei_next (&ei);
8635 }
8636
8637 return changed;
8638 }
8639
8640 /* Purge dead EH edges from basic block listed in BLOCKS. */
8641
8642 bool
8643 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
8644 {
8645 bool changed = false;
8646 unsigned i;
8647 bitmap_iterator bi;
8648
8649 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8650 {
8651 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8652
8653 /* Earlier gimple_purge_dead_eh_edges could have removed
8654 this basic block already. */
8655 gcc_assert (bb || changed);
8656 if (bb != NULL)
8657 changed |= gimple_purge_dead_eh_edges (bb);
8658 }
8659
8660 return changed;
8661 }
8662
8663 /* Purge dead abnormal call edges from basic block BB. */
8664
8665 bool
8666 gimple_purge_dead_abnormal_call_edges (basic_block bb)
8667 {
8668 bool changed = false;
8669 edge e;
8670 edge_iterator ei;
8671 gimple *stmt = last_stmt (bb);
8672
8673 if (!cfun->has_nonlocal_label
8674 && !cfun->calls_setjmp)
8675 return false;
8676
8677 if (stmt && stmt_can_make_abnormal_goto (stmt))
8678 return false;
8679
8680 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8681 {
8682 if (e->flags & EDGE_ABNORMAL)
8683 {
8684 if (e->flags & EDGE_FALLTHRU)
8685 e->flags &= ~EDGE_ABNORMAL;
8686 else
8687 remove_edge_and_dominated_blocks (e);
8688 changed = true;
8689 }
8690 else
8691 ei_next (&ei);
8692 }
8693
8694 return changed;
8695 }
8696
8697 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
8698
8699 bool
8700 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks)
8701 {
8702 bool changed = false;
8703 unsigned i;
8704 bitmap_iterator bi;
8705
8706 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8707 {
8708 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8709
8710 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
8711 this basic block already. */
8712 gcc_assert (bb || changed);
8713 if (bb != NULL)
8714 changed |= gimple_purge_dead_abnormal_call_edges (bb);
8715 }
8716
8717 return changed;
8718 }
8719
8720 /* This function is called whenever a new edge is created or
8721 redirected. */
8722
8723 static void
8724 gimple_execute_on_growing_pred (edge e)
8725 {
8726 basic_block bb = e->dest;
8727
8728 if (!gimple_seq_empty_p (phi_nodes (bb)))
8729 reserve_phi_args_for_new_edge (bb);
8730 }
8731
8732 /* This function is called immediately before edge E is removed from
8733 the edge vector E->dest->preds. */
8734
8735 static void
8736 gimple_execute_on_shrinking_pred (edge e)
8737 {
8738 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
8739 remove_phi_args (e);
8740 }
8741
8742 /*---------------------------------------------------------------------------
8743 Helper functions for Loop versioning
8744 ---------------------------------------------------------------------------*/
8745
8746 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
8747 of 'first'. Both of them are dominated by 'new_head' basic block. When
8748 'new_head' was created by 'second's incoming edge it received phi arguments
8749 on the edge by split_edge(). Later, additional edge 'e' was created to
8750 connect 'new_head' and 'first'. Now this routine adds phi args on this
8751 additional edge 'e' that new_head to second edge received as part of edge
8752 splitting. */
8753
8754 static void
8755 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
8756 basic_block new_head, edge e)
8757 {
8758 gphi *phi1, *phi2;
8759 gphi_iterator psi1, psi2;
8760 tree def;
8761 edge e2 = find_edge (new_head, second);
8762
8763 /* Because NEW_HEAD has been created by splitting SECOND's incoming
8764 edge, we should always have an edge from NEW_HEAD to SECOND. */
8765 gcc_assert (e2 != NULL);
8766
8767 /* Browse all 'second' basic block phi nodes and add phi args to
8768 edge 'e' for 'first' head. PHI args are always in correct order. */
8769
8770 for (psi2 = gsi_start_phis (second),
8771 psi1 = gsi_start_phis (first);
8772 !gsi_end_p (psi2) && !gsi_end_p (psi1);
8773 gsi_next (&psi2), gsi_next (&psi1))
8774 {
8775 phi1 = psi1.phi ();
8776 phi2 = psi2.phi ();
8777 def = PHI_ARG_DEF (phi2, e2->dest_idx);
8778 add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
8779 }
8780 }
8781
8782
8783 /* Adds a if else statement to COND_BB with condition COND_EXPR.
8784 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
8785 the destination of the ELSE part. */
8786
8787 static void
8788 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
8789 basic_block second_head ATTRIBUTE_UNUSED,
8790 basic_block cond_bb, void *cond_e)
8791 {
8792 gimple_stmt_iterator gsi;
8793 gimple *new_cond_expr;
8794 tree cond_expr = (tree) cond_e;
8795 edge e0;
8796
8797 /* Build new conditional expr */
8798 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
8799 NULL_TREE, NULL_TREE);
8800
8801 /* Add new cond in cond_bb. */
8802 gsi = gsi_last_bb (cond_bb);
8803 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
8804
8805 /* Adjust edges appropriately to connect new head with first head
8806 as well as second head. */
8807 e0 = single_succ_edge (cond_bb);
8808 e0->flags &= ~EDGE_FALLTHRU;
8809 e0->flags |= EDGE_FALSE_VALUE;
8810 }
8811
8812
8813 /* Do book-keeping of basic block BB for the profile consistency checker.
8814 Store the counting in RECORD. */
8815 static void
8816 gimple_account_profile_record (basic_block bb,
8817 struct profile_record *record)
8818 {
8819 gimple_stmt_iterator i;
8820 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
8821 {
8822 record->size
8823 += estimate_num_insns (gsi_stmt (i), &eni_size_weights);
8824 if (bb->count.initialized_p ())
8825 record->time
8826 += estimate_num_insns (gsi_stmt (i),
8827 &eni_time_weights) * bb->count.to_gcov_type ();
8828 else if (profile_status_for_fn (cfun) == PROFILE_GUESSED)
8829 record->time
8830 += estimate_num_insns (gsi_stmt (i),
8831 &eni_time_weights) * bb->count.to_frequency (cfun);
8832 }
8833 }
8834
8835 struct cfg_hooks gimple_cfg_hooks = {
8836 "gimple",
8837 gimple_verify_flow_info,
8838 gimple_dump_bb, /* dump_bb */
8839 gimple_dump_bb_for_graph, /* dump_bb_for_graph */
8840 create_bb, /* create_basic_block */
8841 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
8842 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
8843 gimple_can_remove_branch_p, /* can_remove_branch_p */
8844 remove_bb, /* delete_basic_block */
8845 gimple_split_block, /* split_block */
8846 gimple_move_block_after, /* move_block_after */
8847 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
8848 gimple_merge_blocks, /* merge_blocks */
8849 gimple_predict_edge, /* predict_edge */
8850 gimple_predicted_by_p, /* predicted_by_p */
8851 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
8852 gimple_duplicate_bb, /* duplicate_block */
8853 gimple_split_edge, /* split_edge */
8854 gimple_make_forwarder_block, /* make_forward_block */
8855 NULL, /* tidy_fallthru_edge */
8856 NULL, /* force_nonfallthru */
8857 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
8858 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
8859 gimple_flow_call_edges_add, /* flow_call_edges_add */
8860 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
8861 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
8862 gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
8863 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
8864 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
8865 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
8866 flush_pending_stmts, /* flush_pending_stmts */
8867 gimple_empty_block_p, /* block_empty_p */
8868 gimple_split_block_before_cond_jump, /* split_block_before_cond_jump */
8869 gimple_account_profile_record,
8870 };
8871
8872
8873 /* Split all critical edges. */
8874
8875 unsigned int
8876 split_critical_edges (void)
8877 {
8878 basic_block bb;
8879 edge e;
8880 edge_iterator ei;
8881
8882 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
8883 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
8884 mappings around the calls to split_edge. */
8885 start_recording_case_labels ();
8886 FOR_ALL_BB_FN (bb, cfun)
8887 {
8888 FOR_EACH_EDGE (e, ei, bb->succs)
8889 {
8890 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
8891 split_edge (e);
8892 /* PRE inserts statements to edges and expects that
8893 since split_critical_edges was done beforehand, committing edge
8894 insertions will not split more edges. In addition to critical
8895 edges we must split edges that have multiple successors and
8896 end by control flow statements, such as RESX.
8897 Go ahead and split them too. This matches the logic in
8898 gimple_find_edge_insert_loc. */
8899 else if ((!single_pred_p (e->dest)
8900 || !gimple_seq_empty_p (phi_nodes (e->dest))
8901 || e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
8902 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
8903 && !(e->flags & EDGE_ABNORMAL))
8904 {
8905 gimple_stmt_iterator gsi;
8906
8907 gsi = gsi_last_bb (e->src);
8908 if (!gsi_end_p (gsi)
8909 && stmt_ends_bb_p (gsi_stmt (gsi))
8910 && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
8911 && !gimple_call_builtin_p (gsi_stmt (gsi),
8912 BUILT_IN_RETURN)))
8913 split_edge (e);
8914 }
8915 }
8916 }
8917 end_recording_case_labels ();
8918 return 0;
8919 }
8920
8921 namespace {
8922
8923 const pass_data pass_data_split_crit_edges =
8924 {
8925 GIMPLE_PASS, /* type */
8926 "crited", /* name */
8927 OPTGROUP_NONE, /* optinfo_flags */
8928 TV_TREE_SPLIT_EDGES, /* tv_id */
8929 PROP_cfg, /* properties_required */
8930 PROP_no_crit_edges, /* properties_provided */
8931 0, /* properties_destroyed */
8932 0, /* todo_flags_start */
8933 0, /* todo_flags_finish */
8934 };
8935
8936 class pass_split_crit_edges : public gimple_opt_pass
8937 {
8938 public:
8939 pass_split_crit_edges (gcc::context *ctxt)
8940 : gimple_opt_pass (pass_data_split_crit_edges, ctxt)
8941 {}
8942
8943 /* opt_pass methods: */
8944 virtual unsigned int execute (function *) { return split_critical_edges (); }
8945
8946 opt_pass * clone () { return new pass_split_crit_edges (m_ctxt); }
8947 }; // class pass_split_crit_edges
8948
8949 } // anon namespace
8950
8951 gimple_opt_pass *
8952 make_pass_split_crit_edges (gcc::context *ctxt)
8953 {
8954 return new pass_split_crit_edges (ctxt);
8955 }
8956
8957
8958 /* Insert COND expression which is GIMPLE_COND after STMT
8959 in basic block BB with appropriate basic block split
8960 and creation of a new conditionally executed basic block.
8961 Update profile so the new bb is visited with probability PROB.
8962 Return created basic block. */
8963 basic_block
8964 insert_cond_bb (basic_block bb, gimple *stmt, gimple *cond,
8965 profile_probability prob)
8966 {
8967 edge fall = split_block (bb, stmt);
8968 gimple_stmt_iterator iter = gsi_last_bb (bb);
8969 basic_block new_bb;
8970
8971 /* Insert cond statement. */
8972 gcc_assert (gimple_code (cond) == GIMPLE_COND);
8973 if (gsi_end_p (iter))
8974 gsi_insert_before (&iter, cond, GSI_CONTINUE_LINKING);
8975 else
8976 gsi_insert_after (&iter, cond, GSI_CONTINUE_LINKING);
8977
8978 /* Create conditionally executed block. */
8979 new_bb = create_empty_bb (bb);
8980 edge e = make_edge (bb, new_bb, EDGE_TRUE_VALUE);
8981 e->probability = prob;
8982 new_bb->count = e->count ();
8983 make_single_succ_edge (new_bb, fall->dest, EDGE_FALLTHRU);
8984
8985 /* Fix edge for split bb. */
8986 fall->flags = EDGE_FALSE_VALUE;
8987 fall->probability -= e->probability;
8988
8989 /* Update dominance info. */
8990 if (dom_info_available_p (CDI_DOMINATORS))
8991 {
8992 set_immediate_dominator (CDI_DOMINATORS, new_bb, bb);
8993 set_immediate_dominator (CDI_DOMINATORS, fall->dest, bb);
8994 }
8995
8996 /* Update loop info. */
8997 if (current_loops)
8998 add_bb_to_loop (new_bb, bb->loop_father);
8999
9000 return new_bb;
9001 }
9002
9003 /* Build a ternary operation and gimplify it. Emit code before GSI.
9004 Return the gimple_val holding the result. */
9005
9006 tree
9007 gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
9008 tree type, tree a, tree b, tree c)
9009 {
9010 tree ret;
9011 location_t loc = gimple_location (gsi_stmt (*gsi));
9012
9013 ret = fold_build3_loc (loc, code, type, a, b, c);
9014 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
9015 GSI_SAME_STMT);
9016 }
9017
9018 /* Build a binary operation and gimplify it. Emit code before GSI.
9019 Return the gimple_val holding the result. */
9020
9021 tree
9022 gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
9023 tree type, tree a, tree b)
9024 {
9025 tree ret;
9026
9027 ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
9028 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
9029 GSI_SAME_STMT);
9030 }
9031
9032 /* Build a unary operation and gimplify it. Emit code before GSI.
9033 Return the gimple_val holding the result. */
9034
9035 tree
9036 gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
9037 tree a)
9038 {
9039 tree ret;
9040
9041 ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
9042 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
9043 GSI_SAME_STMT);
9044 }
9045
9046
9047 \f
9048 /* Given a basic block B which ends with a conditional and has
9049 precisely two successors, determine which of the edges is taken if
9050 the conditional is true and which is taken if the conditional is
9051 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
9052
9053 void
9054 extract_true_false_edges_from_block (basic_block b,
9055 edge *true_edge,
9056 edge *false_edge)
9057 {
9058 edge e = EDGE_SUCC (b, 0);
9059
9060 if (e->flags & EDGE_TRUE_VALUE)
9061 {
9062 *true_edge = e;
9063 *false_edge = EDGE_SUCC (b, 1);
9064 }
9065 else
9066 {
9067 *false_edge = e;
9068 *true_edge = EDGE_SUCC (b, 1);
9069 }
9070 }
9071
9072
9073 /* From a controlling predicate in the immediate dominator DOM of
9074 PHIBLOCK determine the edges into PHIBLOCK that are chosen if the
9075 predicate evaluates to true and false and store them to
9076 *TRUE_CONTROLLED_EDGE and *FALSE_CONTROLLED_EDGE if
9077 they are non-NULL. Returns true if the edges can be determined,
9078 else return false. */
9079
9080 bool
9081 extract_true_false_controlled_edges (basic_block dom, basic_block phiblock,
9082 edge *true_controlled_edge,
9083 edge *false_controlled_edge)
9084 {
9085 basic_block bb = phiblock;
9086 edge true_edge, false_edge, tem;
9087 edge e0 = NULL, e1 = NULL;
9088
9089 /* We have to verify that one edge into the PHI node is dominated
9090 by the true edge of the predicate block and the other edge
9091 dominated by the false edge. This ensures that the PHI argument
9092 we are going to take is completely determined by the path we
9093 take from the predicate block.
9094 We can only use BB dominance checks below if the destination of
9095 the true/false edges are dominated by their edge, thus only
9096 have a single predecessor. */
9097 extract_true_false_edges_from_block (dom, &true_edge, &false_edge);
9098 tem = EDGE_PRED (bb, 0);
9099 if (tem == true_edge
9100 || (single_pred_p (true_edge->dest)
9101 && (tem->src == true_edge->dest
9102 || dominated_by_p (CDI_DOMINATORS,
9103 tem->src, true_edge->dest))))
9104 e0 = tem;
9105 else if (tem == false_edge
9106 || (single_pred_p (false_edge->dest)
9107 && (tem->src == false_edge->dest
9108 || dominated_by_p (CDI_DOMINATORS,
9109 tem->src, false_edge->dest))))
9110 e1 = tem;
9111 else
9112 return false;
9113 tem = EDGE_PRED (bb, 1);
9114 if (tem == true_edge
9115 || (single_pred_p (true_edge->dest)
9116 && (tem->src == true_edge->dest
9117 || dominated_by_p (CDI_DOMINATORS,
9118 tem->src, true_edge->dest))))
9119 e0 = tem;
9120 else if (tem == false_edge
9121 || (single_pred_p (false_edge->dest)
9122 && (tem->src == false_edge->dest
9123 || dominated_by_p (CDI_DOMINATORS,
9124 tem->src, false_edge->dest))))
9125 e1 = tem;
9126 else
9127 return false;
9128 if (!e0 || !e1)
9129 return false;
9130
9131 if (true_controlled_edge)
9132 *true_controlled_edge = e0;
9133 if (false_controlled_edge)
9134 *false_controlled_edge = e1;
9135
9136 return true;
9137 }
9138
9139 /* Generate a range test LHS CODE RHS that determines whether INDEX is in the
9140 range [low, high]. Place associated stmts before *GSI. */
9141
9142 void
9143 generate_range_test (basic_block bb, tree index, tree low, tree high,
9144 tree *lhs, tree *rhs)
9145 {
9146 tree type = TREE_TYPE (index);
9147 tree utype = unsigned_type_for (type);
9148
9149 low = fold_convert (utype, low);
9150 high = fold_convert (utype, high);
9151
9152 gimple_seq seq = NULL;
9153 index = gimple_convert (&seq, utype, index);
9154 *lhs = gimple_build (&seq, MINUS_EXPR, utype, index, low);
9155 *rhs = const_binop (MINUS_EXPR, utype, high, low);
9156
9157 gimple_stmt_iterator gsi = gsi_last_bb (bb);
9158 gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT);
9159 }
9160
9161 /* Return the basic block that belongs to label numbered INDEX
9162 of a switch statement. */
9163
9164 basic_block
9165 gimple_switch_label_bb (function *ifun, gswitch *gs, unsigned index)
9166 {
9167 return label_to_block (ifun, CASE_LABEL (gimple_switch_label (gs, index)));
9168 }
9169
9170 /* Return the default basic block of a switch statement. */
9171
9172 basic_block
9173 gimple_switch_default_bb (function *ifun, gswitch *gs)
9174 {
9175 return gimple_switch_label_bb (ifun, gs, 0);
9176 }
9177
9178 /* Return the edge that belongs to label numbered INDEX
9179 of a switch statement. */
9180
9181 edge
9182 gimple_switch_edge (function *ifun, gswitch *gs, unsigned index)
9183 {
9184 return find_edge (gimple_bb (gs), gimple_switch_label_bb (ifun, gs, index));
9185 }
9186
9187 /* Return the default edge of a switch statement. */
9188
9189 edge
9190 gimple_switch_default_edge (function *ifun, gswitch *gs)
9191 {
9192 return gimple_switch_edge (ifun, gs, 0);
9193 }
9194
9195
9196 /* Emit return warnings. */
9197
9198 namespace {
9199
9200 const pass_data pass_data_warn_function_return =
9201 {
9202 GIMPLE_PASS, /* type */
9203 "*warn_function_return", /* name */
9204 OPTGROUP_NONE, /* optinfo_flags */
9205 TV_NONE, /* tv_id */
9206 PROP_cfg, /* properties_required */
9207 0, /* properties_provided */
9208 0, /* properties_destroyed */
9209 0, /* todo_flags_start */
9210 0, /* todo_flags_finish */
9211 };
9212
9213 class pass_warn_function_return : public gimple_opt_pass
9214 {
9215 public:
9216 pass_warn_function_return (gcc::context *ctxt)
9217 : gimple_opt_pass (pass_data_warn_function_return, ctxt)
9218 {}
9219
9220 /* opt_pass methods: */
9221 virtual unsigned int execute (function *);
9222
9223 }; // class pass_warn_function_return
9224
9225 unsigned int
9226 pass_warn_function_return::execute (function *fun)
9227 {
9228 location_t location;
9229 gimple *last;
9230 edge e;
9231 edge_iterator ei;
9232
9233 if (!targetm.warn_func_return (fun->decl))
9234 return 0;
9235
9236 /* If we have a path to EXIT, then we do return. */
9237 if (TREE_THIS_VOLATILE (fun->decl)
9238 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0)
9239 {
9240 location = UNKNOWN_LOCATION;
9241 for (ei = ei_start (EXIT_BLOCK_PTR_FOR_FN (fun)->preds);
9242 (e = ei_safe_edge (ei)); )
9243 {
9244 last = last_stmt (e->src);
9245 if ((gimple_code (last) == GIMPLE_RETURN
9246 || gimple_call_builtin_p (last, BUILT_IN_RETURN))
9247 && location == UNKNOWN_LOCATION
9248 && ((location = LOCATION_LOCUS (gimple_location (last)))
9249 != UNKNOWN_LOCATION)
9250 && !optimize)
9251 break;
9252 /* When optimizing, replace return stmts in noreturn functions
9253 with __builtin_unreachable () call. */
9254 if (optimize && gimple_code (last) == GIMPLE_RETURN)
9255 {
9256 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
9257 gimple *new_stmt = gimple_build_call (fndecl, 0);
9258 gimple_set_location (new_stmt, gimple_location (last));
9259 gimple_stmt_iterator gsi = gsi_for_stmt (last);
9260 gsi_replace (&gsi, new_stmt, true);
9261 remove_edge (e);
9262 }
9263 else
9264 ei_next (&ei);
9265 }
9266 if (location == UNKNOWN_LOCATION)
9267 location = cfun->function_end_locus;
9268 warning_at (location, 0, "%<noreturn%> function does return");
9269 }
9270
9271 /* If we see "return;" in some basic block, then we do reach the end
9272 without returning a value. */
9273 else if (warn_return_type > 0
9274 && !TREE_NO_WARNING (fun->decl)
9275 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fun->decl))))
9276 {
9277 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
9278 {
9279 gimple *last = last_stmt (e->src);
9280 greturn *return_stmt = dyn_cast <greturn *> (last);
9281 if (return_stmt
9282 && gimple_return_retval (return_stmt) == NULL
9283 && !gimple_no_warning_p (last))
9284 {
9285 location = gimple_location (last);
9286 if (LOCATION_LOCUS (location) == UNKNOWN_LOCATION)
9287 location = fun->function_end_locus;
9288 if (warning_at (location, OPT_Wreturn_type,
9289 "control reaches end of non-void function"))
9290 TREE_NO_WARNING (fun->decl) = 1;
9291 break;
9292 }
9293 }
9294 /* The C++ FE turns fallthrough from the end of non-void function
9295 into __builtin_unreachable () call with BUILTINS_LOCATION.
9296 Recognize those too. */
9297 basic_block bb;
9298 if (!TREE_NO_WARNING (fun->decl))
9299 FOR_EACH_BB_FN (bb, fun)
9300 if (EDGE_COUNT (bb->succs) == 0)
9301 {
9302 gimple *last = last_stmt (bb);
9303 const enum built_in_function ubsan_missing_ret
9304 = BUILT_IN_UBSAN_HANDLE_MISSING_RETURN;
9305 if (last
9306 && ((LOCATION_LOCUS (gimple_location (last))
9307 == BUILTINS_LOCATION
9308 && gimple_call_builtin_p (last, BUILT_IN_UNREACHABLE))
9309 || gimple_call_builtin_p (last, ubsan_missing_ret)))
9310 {
9311 gimple_stmt_iterator gsi = gsi_for_stmt (last);
9312 gsi_prev_nondebug (&gsi);
9313 gimple *prev = gsi_stmt (gsi);
9314 if (prev == NULL)
9315 location = UNKNOWN_LOCATION;
9316 else
9317 location = gimple_location (prev);
9318 if (LOCATION_LOCUS (location) == UNKNOWN_LOCATION)
9319 location = fun->function_end_locus;
9320 if (warning_at (location, OPT_Wreturn_type,
9321 "control reaches end of non-void function"))
9322 TREE_NO_WARNING (fun->decl) = 1;
9323 break;
9324 }
9325 }
9326 }
9327 return 0;
9328 }
9329
9330 } // anon namespace
9331
9332 gimple_opt_pass *
9333 make_pass_warn_function_return (gcc::context *ctxt)
9334 {
9335 return new pass_warn_function_return (ctxt);
9336 }
9337
9338 /* Walk a gimplified function and warn for functions whose return value is
9339 ignored and attribute((warn_unused_result)) is set. This is done before
9340 inlining, so we don't have to worry about that. */
9341
9342 static void
9343 do_warn_unused_result (gimple_seq seq)
9344 {
9345 tree fdecl, ftype;
9346 gimple_stmt_iterator i;
9347
9348 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
9349 {
9350 gimple *g = gsi_stmt (i);
9351
9352 switch (gimple_code (g))
9353 {
9354 case GIMPLE_BIND:
9355 do_warn_unused_result (gimple_bind_body (as_a <gbind *>(g)));
9356 break;
9357 case GIMPLE_TRY:
9358 do_warn_unused_result (gimple_try_eval (g));
9359 do_warn_unused_result (gimple_try_cleanup (g));
9360 break;
9361 case GIMPLE_CATCH:
9362 do_warn_unused_result (gimple_catch_handler (
9363 as_a <gcatch *> (g)));
9364 break;
9365 case GIMPLE_EH_FILTER:
9366 do_warn_unused_result (gimple_eh_filter_failure (g));
9367 break;
9368
9369 case GIMPLE_CALL:
9370 if (gimple_call_lhs (g))
9371 break;
9372 if (gimple_call_internal_p (g))
9373 break;
9374
9375 /* This is a naked call, as opposed to a GIMPLE_CALL with an
9376 LHS. All calls whose value is ignored should be
9377 represented like this. Look for the attribute. */
9378 fdecl = gimple_call_fndecl (g);
9379 ftype = gimple_call_fntype (g);
9380
9381 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
9382 {
9383 location_t loc = gimple_location (g);
9384
9385 if (fdecl)
9386 warning_at (loc, OPT_Wunused_result,
9387 "ignoring return value of %qD, "
9388 "declared with attribute warn_unused_result",
9389 fdecl);
9390 else
9391 warning_at (loc, OPT_Wunused_result,
9392 "ignoring return value of function "
9393 "declared with attribute warn_unused_result");
9394 }
9395 break;
9396
9397 default:
9398 /* Not a container, not a call, or a call whose value is used. */
9399 break;
9400 }
9401 }
9402 }
9403
9404 namespace {
9405
9406 const pass_data pass_data_warn_unused_result =
9407 {
9408 GIMPLE_PASS, /* type */
9409 "*warn_unused_result", /* name */
9410 OPTGROUP_NONE, /* optinfo_flags */
9411 TV_NONE, /* tv_id */
9412 PROP_gimple_any, /* properties_required */
9413 0, /* properties_provided */
9414 0, /* properties_destroyed */
9415 0, /* todo_flags_start */
9416 0, /* todo_flags_finish */
9417 };
9418
9419 class pass_warn_unused_result : public gimple_opt_pass
9420 {
9421 public:
9422 pass_warn_unused_result (gcc::context *ctxt)
9423 : gimple_opt_pass (pass_data_warn_unused_result, ctxt)
9424 {}
9425
9426 /* opt_pass methods: */
9427 virtual bool gate (function *) { return flag_warn_unused_result; }
9428 virtual unsigned int execute (function *)
9429 {
9430 do_warn_unused_result (gimple_body (current_function_decl));
9431 return 0;
9432 }
9433
9434 }; // class pass_warn_unused_result
9435
9436 } // anon namespace
9437
9438 gimple_opt_pass *
9439 make_pass_warn_unused_result (gcc::context *ctxt)
9440 {
9441 return new pass_warn_unused_result (ctxt);
9442 }
9443
9444 /* IPA passes, compilation of earlier functions or inlining
9445 might have changed some properties, such as marked functions nothrow,
9446 pure, const or noreturn.
9447 Remove redundant edges and basic blocks, and create new ones if necessary.
9448
9449 This pass can't be executed as stand alone pass from pass manager, because
9450 in between inlining and this fixup the verify_flow_info would fail. */
9451
9452 unsigned int
9453 execute_fixup_cfg (void)
9454 {
9455 basic_block bb;
9456 gimple_stmt_iterator gsi;
9457 int todo = 0;
9458 cgraph_node *node = cgraph_node::get (current_function_decl);
9459 profile_count num = node->count;
9460 profile_count den = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
9461 bool scale = num.initialized_p () && !(num == den);
9462
9463 if (scale)
9464 {
9465 profile_count::adjust_for_ipa_scaling (&num, &den);
9466 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = node->count;
9467 EXIT_BLOCK_PTR_FOR_FN (cfun)->count
9468 = EXIT_BLOCK_PTR_FOR_FN (cfun)->count.apply_scale (num, den);
9469 }
9470
9471 FOR_EACH_BB_FN (bb, cfun)
9472 {
9473 if (scale)
9474 bb->count = bb->count.apply_scale (num, den);
9475 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
9476 {
9477 gimple *stmt = gsi_stmt (gsi);
9478 tree decl = is_gimple_call (stmt)
9479 ? gimple_call_fndecl (stmt)
9480 : NULL;
9481 if (decl)
9482 {
9483 int flags = gimple_call_flags (stmt);
9484 if (flags & (ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE))
9485 {
9486 if (gimple_purge_dead_abnormal_call_edges (bb))
9487 todo |= TODO_cleanup_cfg;
9488
9489 if (gimple_in_ssa_p (cfun))
9490 {
9491 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9492 update_stmt (stmt);
9493 }
9494 }
9495
9496 if (flags & ECF_NORETURN
9497 && fixup_noreturn_call (stmt))
9498 todo |= TODO_cleanup_cfg;
9499 }
9500
9501 /* Remove stores to variables we marked write-only.
9502 Keep access when store has side effect, i.e. in case when source
9503 is volatile. */
9504 if (gimple_store_p (stmt)
9505 && !gimple_has_side_effects (stmt))
9506 {
9507 tree lhs = get_base_address (gimple_get_lhs (stmt));
9508
9509 if (VAR_P (lhs)
9510 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
9511 && varpool_node::get (lhs)->writeonly)
9512 {
9513 unlink_stmt_vdef (stmt);
9514 gsi_remove (&gsi, true);
9515 release_defs (stmt);
9516 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9517 continue;
9518 }
9519 }
9520 /* For calls we can simply remove LHS when it is known
9521 to be write-only. */
9522 if (is_gimple_call (stmt)
9523 && gimple_get_lhs (stmt))
9524 {
9525 tree lhs = get_base_address (gimple_get_lhs (stmt));
9526
9527 if (VAR_P (lhs)
9528 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
9529 && varpool_node::get (lhs)->writeonly)
9530 {
9531 gimple_call_set_lhs (stmt, NULL);
9532 update_stmt (stmt);
9533 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9534 }
9535 }
9536
9537 if (maybe_clean_eh_stmt (stmt)
9538 && gimple_purge_dead_eh_edges (bb))
9539 todo |= TODO_cleanup_cfg;
9540 gsi_next (&gsi);
9541 }
9542
9543 /* If we have a basic block with no successors that does not
9544 end with a control statement or a noreturn call end it with
9545 a call to __builtin_unreachable. This situation can occur
9546 when inlining a noreturn call that does in fact return. */
9547 if (EDGE_COUNT (bb->succs) == 0)
9548 {
9549 gimple *stmt = last_stmt (bb);
9550 if (!stmt
9551 || (!is_ctrl_stmt (stmt)
9552 && (!is_gimple_call (stmt)
9553 || !gimple_call_noreturn_p (stmt))))
9554 {
9555 if (stmt && is_gimple_call (stmt))
9556 gimple_call_set_ctrl_altering (stmt, false);
9557 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
9558 stmt = gimple_build_call (fndecl, 0);
9559 gimple_stmt_iterator gsi = gsi_last_bb (bb);
9560 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
9561 if (!cfun->after_inlining)
9562 {
9563 gcall *call_stmt = dyn_cast <gcall *> (stmt);
9564 node->create_edge (cgraph_node::get_create (fndecl),
9565 call_stmt, bb->count);
9566 }
9567 }
9568 }
9569 }
9570 if (scale)
9571 compute_function_frequency ();
9572
9573 if (current_loops
9574 && (todo & TODO_cleanup_cfg))
9575 loops_state_set (LOOPS_NEED_FIXUP);
9576
9577 return todo;
9578 }
9579
9580 namespace {
9581
9582 const pass_data pass_data_fixup_cfg =
9583 {
9584 GIMPLE_PASS, /* type */
9585 "fixup_cfg", /* name */
9586 OPTGROUP_NONE, /* optinfo_flags */
9587 TV_NONE, /* tv_id */
9588 PROP_cfg, /* properties_required */
9589 0, /* properties_provided */
9590 0, /* properties_destroyed */
9591 0, /* todo_flags_start */
9592 0, /* todo_flags_finish */
9593 };
9594
9595 class pass_fixup_cfg : public gimple_opt_pass
9596 {
9597 public:
9598 pass_fixup_cfg (gcc::context *ctxt)
9599 : gimple_opt_pass (pass_data_fixup_cfg, ctxt)
9600 {}
9601
9602 /* opt_pass methods: */
9603 opt_pass * clone () { return new pass_fixup_cfg (m_ctxt); }
9604 virtual unsigned int execute (function *) { return execute_fixup_cfg (); }
9605
9606 }; // class pass_fixup_cfg
9607
9608 } // anon namespace
9609
9610 gimple_opt_pass *
9611 make_pass_fixup_cfg (gcc::context *ctxt)
9612 {
9613 return new pass_fixup_cfg (ctxt);
9614 }
9615
9616 /* Garbage collection support for edge_def. */
9617
9618 extern void gt_ggc_mx (tree&);
9619 extern void gt_ggc_mx (gimple *&);
9620 extern void gt_ggc_mx (rtx&);
9621 extern void gt_ggc_mx (basic_block&);
9622
9623 static void
9624 gt_ggc_mx (rtx_insn *& x)
9625 {
9626 if (x)
9627 gt_ggc_mx_rtx_def ((void *) x);
9628 }
9629
9630 void
9631 gt_ggc_mx (edge_def *e)
9632 {
9633 tree block = LOCATION_BLOCK (e->goto_locus);
9634 gt_ggc_mx (e->src);
9635 gt_ggc_mx (e->dest);
9636 if (current_ir_type () == IR_GIMPLE)
9637 gt_ggc_mx (e->insns.g);
9638 else
9639 gt_ggc_mx (e->insns.r);
9640 gt_ggc_mx (block);
9641 }
9642
9643 /* PCH support for edge_def. */
9644
9645 extern void gt_pch_nx (tree&);
9646 extern void gt_pch_nx (gimple *&);
9647 extern void gt_pch_nx (rtx&);
9648 extern void gt_pch_nx (basic_block&);
9649
9650 static void
9651 gt_pch_nx (rtx_insn *& x)
9652 {
9653 if (x)
9654 gt_pch_nx_rtx_def ((void *) x);
9655 }
9656
9657 void
9658 gt_pch_nx (edge_def *e)
9659 {
9660 tree block = LOCATION_BLOCK (e->goto_locus);
9661 gt_pch_nx (e->src);
9662 gt_pch_nx (e->dest);
9663 if (current_ir_type () == IR_GIMPLE)
9664 gt_pch_nx (e->insns.g);
9665 else
9666 gt_pch_nx (e->insns.r);
9667 gt_pch_nx (block);
9668 }
9669
9670 void
9671 gt_pch_nx (edge_def *e, gt_pointer_operator op, void *cookie)
9672 {
9673 tree block = LOCATION_BLOCK (e->goto_locus);
9674 op (&(e->src), cookie);
9675 op (&(e->dest), cookie);
9676 if (current_ir_type () == IR_GIMPLE)
9677 op (&(e->insns.g), cookie);
9678 else
9679 op (&(e->insns.r), cookie);
9680 op (&(block), cookie);
9681 }
9682
9683 #if CHECKING_P
9684
9685 namespace selftest {
9686
9687 /* Helper function for CFG selftests: create a dummy function decl
9688 and push it as cfun. */
9689
9690 static tree
9691 push_fndecl (const char *name)
9692 {
9693 tree fn_type = build_function_type_array (integer_type_node, 0, NULL);
9694 /* FIXME: this uses input_location: */
9695 tree fndecl = build_fn_decl (name, fn_type);
9696 tree retval = build_decl (UNKNOWN_LOCATION, RESULT_DECL,
9697 NULL_TREE, integer_type_node);
9698 DECL_RESULT (fndecl) = retval;
9699 push_struct_function (fndecl);
9700 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9701 ASSERT_TRUE (fun != NULL);
9702 init_empty_tree_cfg_for_function (fun);
9703 ASSERT_EQ (2, n_basic_blocks_for_fn (fun));
9704 ASSERT_EQ (0, n_edges_for_fn (fun));
9705 return fndecl;
9706 }
9707
9708 /* These tests directly create CFGs.
9709 Compare with the static fns within tree-cfg.c:
9710 - build_gimple_cfg
9711 - make_blocks: calls create_basic_block (seq, bb);
9712 - make_edges. */
9713
9714 /* Verify a simple cfg of the form:
9715 ENTRY -> A -> B -> C -> EXIT. */
9716
9717 static void
9718 test_linear_chain ()
9719 {
9720 gimple_register_cfg_hooks ();
9721
9722 tree fndecl = push_fndecl ("cfg_test_linear_chain");
9723 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9724
9725 /* Create some empty blocks. */
9726 basic_block bb_a = create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
9727 basic_block bb_b = create_empty_bb (bb_a);
9728 basic_block bb_c = create_empty_bb (bb_b);
9729
9730 ASSERT_EQ (5, n_basic_blocks_for_fn (fun));
9731 ASSERT_EQ (0, n_edges_for_fn (fun));
9732
9733 /* Create some edges: a simple linear chain of BBs. */
9734 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), bb_a, EDGE_FALLTHRU);
9735 make_edge (bb_a, bb_b, 0);
9736 make_edge (bb_b, bb_c, 0);
9737 make_edge (bb_c, EXIT_BLOCK_PTR_FOR_FN (fun), 0);
9738
9739 /* Verify the edges. */
9740 ASSERT_EQ (4, n_edges_for_fn (fun));
9741 ASSERT_EQ (NULL, ENTRY_BLOCK_PTR_FOR_FN (fun)->preds);
9742 ASSERT_EQ (1, ENTRY_BLOCK_PTR_FOR_FN (fun)->succs->length ());
9743 ASSERT_EQ (1, bb_a->preds->length ());
9744 ASSERT_EQ (1, bb_a->succs->length ());
9745 ASSERT_EQ (1, bb_b->preds->length ());
9746 ASSERT_EQ (1, bb_b->succs->length ());
9747 ASSERT_EQ (1, bb_c->preds->length ());
9748 ASSERT_EQ (1, bb_c->succs->length ());
9749 ASSERT_EQ (1, EXIT_BLOCK_PTR_FOR_FN (fun)->preds->length ());
9750 ASSERT_EQ (NULL, EXIT_BLOCK_PTR_FOR_FN (fun)->succs);
9751
9752 /* Verify the dominance information
9753 Each BB in our simple chain should be dominated by the one before
9754 it. */
9755 calculate_dominance_info (CDI_DOMINATORS);
9756 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_b));
9757 ASSERT_EQ (bb_b, get_immediate_dominator (CDI_DOMINATORS, bb_c));
9758 vec<basic_block> dom_by_b = get_dominated_by (CDI_DOMINATORS, bb_b);
9759 ASSERT_EQ (1, dom_by_b.length ());
9760 ASSERT_EQ (bb_c, dom_by_b[0]);
9761 free_dominance_info (CDI_DOMINATORS);
9762 dom_by_b.release ();
9763
9764 /* Similarly for post-dominance: each BB in our chain is post-dominated
9765 by the one after it. */
9766 calculate_dominance_info (CDI_POST_DOMINATORS);
9767 ASSERT_EQ (bb_b, get_immediate_dominator (CDI_POST_DOMINATORS, bb_a));
9768 ASSERT_EQ (bb_c, get_immediate_dominator (CDI_POST_DOMINATORS, bb_b));
9769 vec<basic_block> postdom_by_b = get_dominated_by (CDI_POST_DOMINATORS, bb_b);
9770 ASSERT_EQ (1, postdom_by_b.length ());
9771 ASSERT_EQ (bb_a, postdom_by_b[0]);
9772 free_dominance_info (CDI_POST_DOMINATORS);
9773 postdom_by_b.release ();
9774
9775 pop_cfun ();
9776 }
9777
9778 /* Verify a simple CFG of the form:
9779 ENTRY
9780 |
9781 A
9782 / \
9783 /t \f
9784 B C
9785 \ /
9786 \ /
9787 D
9788 |
9789 EXIT. */
9790
9791 static void
9792 test_diamond ()
9793 {
9794 gimple_register_cfg_hooks ();
9795
9796 tree fndecl = push_fndecl ("cfg_test_diamond");
9797 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9798
9799 /* Create some empty blocks. */
9800 basic_block bb_a = create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
9801 basic_block bb_b = create_empty_bb (bb_a);
9802 basic_block bb_c = create_empty_bb (bb_a);
9803 basic_block bb_d = create_empty_bb (bb_b);
9804
9805 ASSERT_EQ (6, n_basic_blocks_for_fn (fun));
9806 ASSERT_EQ (0, n_edges_for_fn (fun));
9807
9808 /* Create the edges. */
9809 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), bb_a, EDGE_FALLTHRU);
9810 make_edge (bb_a, bb_b, EDGE_TRUE_VALUE);
9811 make_edge (bb_a, bb_c, EDGE_FALSE_VALUE);
9812 make_edge (bb_b, bb_d, 0);
9813 make_edge (bb_c, bb_d, 0);
9814 make_edge (bb_d, EXIT_BLOCK_PTR_FOR_FN (fun), 0);
9815
9816 /* Verify the edges. */
9817 ASSERT_EQ (6, n_edges_for_fn (fun));
9818 ASSERT_EQ (1, bb_a->preds->length ());
9819 ASSERT_EQ (2, bb_a->succs->length ());
9820 ASSERT_EQ (1, bb_b->preds->length ());
9821 ASSERT_EQ (1, bb_b->succs->length ());
9822 ASSERT_EQ (1, bb_c->preds->length ());
9823 ASSERT_EQ (1, bb_c->succs->length ());
9824 ASSERT_EQ (2, bb_d->preds->length ());
9825 ASSERT_EQ (1, bb_d->succs->length ());
9826
9827 /* Verify the dominance information. */
9828 calculate_dominance_info (CDI_DOMINATORS);
9829 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_b));
9830 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_c));
9831 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_d));
9832 vec<basic_block> dom_by_a = get_dominated_by (CDI_DOMINATORS, bb_a);
9833 ASSERT_EQ (3, dom_by_a.length ()); /* B, C, D, in some order. */
9834 dom_by_a.release ();
9835 vec<basic_block> dom_by_b = get_dominated_by (CDI_DOMINATORS, bb_b);
9836 ASSERT_EQ (0, dom_by_b.length ());
9837 dom_by_b.release ();
9838 free_dominance_info (CDI_DOMINATORS);
9839
9840 /* Similarly for post-dominance. */
9841 calculate_dominance_info (CDI_POST_DOMINATORS);
9842 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_a));
9843 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_b));
9844 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_c));
9845 vec<basic_block> postdom_by_d = get_dominated_by (CDI_POST_DOMINATORS, bb_d);
9846 ASSERT_EQ (3, postdom_by_d.length ()); /* A, B, C in some order. */
9847 postdom_by_d.release ();
9848 vec<basic_block> postdom_by_b = get_dominated_by (CDI_POST_DOMINATORS, bb_b);
9849 ASSERT_EQ (0, postdom_by_b.length ());
9850 postdom_by_b.release ();
9851 free_dominance_info (CDI_POST_DOMINATORS);
9852
9853 pop_cfun ();
9854 }
9855
9856 /* Verify that we can handle a CFG containing a "complete" aka
9857 fully-connected subgraph (where A B C D below all have edges
9858 pointing to each other node, also to themselves).
9859 e.g.:
9860 ENTRY EXIT
9861 | ^
9862 | /
9863 | /
9864 | /
9865 V/
9866 A<--->B
9867 ^^ ^^
9868 | \ / |
9869 | X |
9870 | / \ |
9871 VV VV
9872 C<--->D
9873 */
9874
9875 static void
9876 test_fully_connected ()
9877 {
9878 gimple_register_cfg_hooks ();
9879
9880 tree fndecl = push_fndecl ("cfg_fully_connected");
9881 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9882
9883 const int n = 4;
9884
9885 /* Create some empty blocks. */
9886 auto_vec <basic_block> subgraph_nodes;
9887 for (int i = 0; i < n; i++)
9888 subgraph_nodes.safe_push (create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun)));
9889
9890 ASSERT_EQ (n + 2, n_basic_blocks_for_fn (fun));
9891 ASSERT_EQ (0, n_edges_for_fn (fun));
9892
9893 /* Create the edges. */
9894 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), subgraph_nodes[0], EDGE_FALLTHRU);
9895 make_edge (subgraph_nodes[0], EXIT_BLOCK_PTR_FOR_FN (fun), 0);
9896 for (int i = 0; i < n; i++)
9897 for (int j = 0; j < n; j++)
9898 make_edge (subgraph_nodes[i], subgraph_nodes[j], 0);
9899
9900 /* Verify the edges. */
9901 ASSERT_EQ (2 + (n * n), n_edges_for_fn (fun));
9902 /* The first one is linked to ENTRY/EXIT as well as itself and
9903 everything else. */
9904 ASSERT_EQ (n + 1, subgraph_nodes[0]->preds->length ());
9905 ASSERT_EQ (n + 1, subgraph_nodes[0]->succs->length ());
9906 /* The other ones in the subgraph are linked to everything in
9907 the subgraph (including themselves). */
9908 for (int i = 1; i < n; i++)
9909 {
9910 ASSERT_EQ (n, subgraph_nodes[i]->preds->length ());
9911 ASSERT_EQ (n, subgraph_nodes[i]->succs->length ());
9912 }
9913
9914 /* Verify the dominance information. */
9915 calculate_dominance_info (CDI_DOMINATORS);
9916 /* The initial block in the subgraph should be dominated by ENTRY. */
9917 ASSERT_EQ (ENTRY_BLOCK_PTR_FOR_FN (fun),
9918 get_immediate_dominator (CDI_DOMINATORS,
9919 subgraph_nodes[0]));
9920 /* Every other block in the subgraph should be dominated by the
9921 initial block. */
9922 for (int i = 1; i < n; i++)
9923 ASSERT_EQ (subgraph_nodes[0],
9924 get_immediate_dominator (CDI_DOMINATORS,
9925 subgraph_nodes[i]));
9926 free_dominance_info (CDI_DOMINATORS);
9927
9928 /* Similarly for post-dominance. */
9929 calculate_dominance_info (CDI_POST_DOMINATORS);
9930 /* The initial block in the subgraph should be postdominated by EXIT. */
9931 ASSERT_EQ (EXIT_BLOCK_PTR_FOR_FN (fun),
9932 get_immediate_dominator (CDI_POST_DOMINATORS,
9933 subgraph_nodes[0]));
9934 /* Every other block in the subgraph should be postdominated by the
9935 initial block, since that leads to EXIT. */
9936 for (int i = 1; i < n; i++)
9937 ASSERT_EQ (subgraph_nodes[0],
9938 get_immediate_dominator (CDI_POST_DOMINATORS,
9939 subgraph_nodes[i]));
9940 free_dominance_info (CDI_POST_DOMINATORS);
9941
9942 pop_cfun ();
9943 }
9944
9945 /* Run all of the selftests within this file. */
9946
9947 void
9948 tree_cfg_c_tests ()
9949 {
9950 test_linear_chain ();
9951 test_diamond ();
9952 test_fully_connected ();
9953 }
9954
9955 } // namespace selftest
9956
9957 /* TODO: test the dominator/postdominator logic with various graphs/nodes:
9958 - loop
9959 - nested loops
9960 - switch statement (a block with many out-edges)
9961 - something that jumps to itself
9962 - etc */
9963
9964 #endif /* CHECKING_P */