]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-cfg.c
b4d0c6db238146bc9a1ebd3e39ccfdaa7097a38a
[thirdparty/gcc.git] / gcc / tree-cfg.c
1 /* Control flow functions for trees.
2 Copyright (C) 2001-2020 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "cfghooks.h"
30 #include "tree-pass.h"
31 #include "ssa.h"
32 #include "cgraph.h"
33 #include "gimple-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "fold-const.h"
36 #include "trans-mem.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
39 #include "cfganal.h"
40 #include "gimple-fold.h"
41 #include "tree-eh.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-cfg.h"
46 #include "tree-ssa-loop-manip.h"
47 #include "tree-ssa-loop-niter.h"
48 #include "tree-into-ssa.h"
49 #include "tree-dfa.h"
50 #include "tree-ssa.h"
51 #include "except.h"
52 #include "cfgloop.h"
53 #include "tree-ssa-propagate.h"
54 #include "value-prof.h"
55 #include "tree-inline.h"
56 #include "tree-ssa-live.h"
57 #include "omp-general.h"
58 #include "omp-expand.h"
59 #include "tree-cfgcleanup.h"
60 #include "gimplify.h"
61 #include "attribs.h"
62 #include "selftest.h"
63 #include "opts.h"
64 #include "asan.h"
65 #include "profile.h"
66
67 /* This file contains functions for building the Control Flow Graph (CFG)
68 for a function tree. */
69
70 /* Local declarations. */
71
72 /* Initial capacity for the basic block array. */
73 static const int initial_cfg_capacity = 20;
74
75 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
76 which use a particular edge. The CASE_LABEL_EXPRs are chained together
77 via their CASE_CHAIN field, which we clear after we're done with the
78 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
79
80 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
81 update the case vector in response to edge redirections.
82
83 Right now this table is set up and torn down at key points in the
84 compilation process. It would be nice if we could make the table
85 more persistent. The key is getting notification of changes to
86 the CFG (particularly edge removal, creation and redirection). */
87
88 static hash_map<edge, tree> *edge_to_cases;
89
90 /* If we record edge_to_cases, this bitmap will hold indexes
91 of basic blocks that end in a GIMPLE_SWITCH which we touched
92 due to edge manipulations. */
93
94 static bitmap touched_switch_bbs;
95
96 /* CFG statistics. */
97 struct cfg_stats_d
98 {
99 long num_merged_labels;
100 };
101
102 static struct cfg_stats_d cfg_stats;
103
104 /* Data to pass to replace_block_vars_by_duplicates_1. */
105 struct replace_decls_d
106 {
107 hash_map<tree, tree> *vars_map;
108 tree to_context;
109 };
110
111 /* Hash table to store last discriminator assigned for each locus. */
112 struct locus_discrim_map
113 {
114 int location_line;
115 int discriminator;
116 };
117
118 /* Hashtable helpers. */
119
120 struct locus_discrim_hasher : free_ptr_hash <locus_discrim_map>
121 {
122 static inline hashval_t hash (const locus_discrim_map *);
123 static inline bool equal (const locus_discrim_map *,
124 const locus_discrim_map *);
125 };
126
127 /* Trivial hash function for a location_t. ITEM is a pointer to
128 a hash table entry that maps a location_t to a discriminator. */
129
130 inline hashval_t
131 locus_discrim_hasher::hash (const locus_discrim_map *item)
132 {
133 return item->location_line;
134 }
135
136 /* Equality function for the locus-to-discriminator map. A and B
137 point to the two hash table entries to compare. */
138
139 inline bool
140 locus_discrim_hasher::equal (const locus_discrim_map *a,
141 const locus_discrim_map *b)
142 {
143 return a->location_line == b->location_line;
144 }
145
146 static hash_table<locus_discrim_hasher> *discriminator_per_locus;
147
148 /* Basic blocks and flowgraphs. */
149 static void make_blocks (gimple_seq);
150
151 /* Edges. */
152 static void make_edges (void);
153 static void assign_discriminators (void);
154 static void make_cond_expr_edges (basic_block);
155 static void make_gimple_switch_edges (gswitch *, basic_block);
156 static bool make_goto_expr_edges (basic_block);
157 static void make_gimple_asm_edges (basic_block);
158 static edge gimple_redirect_edge_and_branch (edge, basic_block);
159 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
160
161 /* Various helpers. */
162 static inline bool stmt_starts_bb_p (gimple *, gimple *);
163 static int gimple_verify_flow_info (void);
164 static void gimple_make_forwarder_block (edge);
165 static gimple *first_non_label_stmt (basic_block);
166 static bool verify_gimple_transaction (gtransaction *);
167 static bool call_can_make_abnormal_goto (gimple *);
168
169 /* Flowgraph optimization and cleanup. */
170 static void gimple_merge_blocks (basic_block, basic_block);
171 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
172 static void remove_bb (basic_block);
173 static edge find_taken_edge_computed_goto (basic_block, tree);
174 static edge find_taken_edge_cond_expr (const gcond *, tree);
175
176 void
177 init_empty_tree_cfg_for_function (struct function *fn)
178 {
179 /* Initialize the basic block array. */
180 init_flow (fn);
181 profile_status_for_fn (fn) = PROFILE_ABSENT;
182 n_basic_blocks_for_fn (fn) = NUM_FIXED_BLOCKS;
183 last_basic_block_for_fn (fn) = NUM_FIXED_BLOCKS;
184 vec_alloc (basic_block_info_for_fn (fn), initial_cfg_capacity);
185 vec_safe_grow_cleared (basic_block_info_for_fn (fn),
186 initial_cfg_capacity);
187
188 /* Build a mapping of labels to their associated blocks. */
189 vec_alloc (label_to_block_map_for_fn (fn), initial_cfg_capacity);
190 vec_safe_grow_cleared (label_to_block_map_for_fn (fn),
191 initial_cfg_capacity);
192
193 SET_BASIC_BLOCK_FOR_FN (fn, ENTRY_BLOCK, ENTRY_BLOCK_PTR_FOR_FN (fn));
194 SET_BASIC_BLOCK_FOR_FN (fn, EXIT_BLOCK, EXIT_BLOCK_PTR_FOR_FN (fn));
195
196 ENTRY_BLOCK_PTR_FOR_FN (fn)->next_bb
197 = EXIT_BLOCK_PTR_FOR_FN (fn);
198 EXIT_BLOCK_PTR_FOR_FN (fn)->prev_bb
199 = ENTRY_BLOCK_PTR_FOR_FN (fn);
200 }
201
202 void
203 init_empty_tree_cfg (void)
204 {
205 init_empty_tree_cfg_for_function (cfun);
206 }
207
208 /*---------------------------------------------------------------------------
209 Create basic blocks
210 ---------------------------------------------------------------------------*/
211
212 /* Entry point to the CFG builder for trees. SEQ is the sequence of
213 statements to be added to the flowgraph. */
214
215 static void
216 build_gimple_cfg (gimple_seq seq)
217 {
218 /* Register specific gimple functions. */
219 gimple_register_cfg_hooks ();
220
221 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
222
223 init_empty_tree_cfg ();
224
225 make_blocks (seq);
226
227 /* Make sure there is always at least one block, even if it's empty. */
228 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
229 create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
230
231 /* Adjust the size of the array. */
232 if (basic_block_info_for_fn (cfun)->length ()
233 < (size_t) n_basic_blocks_for_fn (cfun))
234 vec_safe_grow_cleared (basic_block_info_for_fn (cfun),
235 n_basic_blocks_for_fn (cfun));
236
237 /* To speed up statement iterator walks, we first purge dead labels. */
238 cleanup_dead_labels ();
239
240 /* Group case nodes to reduce the number of edges.
241 We do this after cleaning up dead labels because otherwise we miss
242 a lot of obvious case merging opportunities. */
243 group_case_labels ();
244
245 /* Create the edges of the flowgraph. */
246 discriminator_per_locus = new hash_table<locus_discrim_hasher> (13);
247 make_edges ();
248 assign_discriminators ();
249 cleanup_dead_labels ();
250 delete discriminator_per_locus;
251 discriminator_per_locus = NULL;
252 }
253
254 /* Look for ANNOTATE calls with loop annotation kind in BB; if found, remove
255 them and propagate the information to LOOP. We assume that the annotations
256 come immediately before the condition in BB, if any. */
257
258 static void
259 replace_loop_annotate_in_block (basic_block bb, class loop *loop)
260 {
261 gimple_stmt_iterator gsi = gsi_last_bb (bb);
262 gimple *stmt = gsi_stmt (gsi);
263
264 if (!(stmt && gimple_code (stmt) == GIMPLE_COND))
265 return;
266
267 for (gsi_prev_nondebug (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
268 {
269 stmt = gsi_stmt (gsi);
270 if (gimple_code (stmt) != GIMPLE_CALL)
271 break;
272 if (!gimple_call_internal_p (stmt)
273 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
274 break;
275
276 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
277 {
278 case annot_expr_ivdep_kind:
279 loop->safelen = INT_MAX;
280 break;
281 case annot_expr_unroll_kind:
282 loop->unroll
283 = (unsigned short) tree_to_shwi (gimple_call_arg (stmt, 2));
284 cfun->has_unroll = true;
285 break;
286 case annot_expr_no_vector_kind:
287 loop->dont_vectorize = true;
288 break;
289 case annot_expr_vector_kind:
290 loop->force_vectorize = true;
291 cfun->has_force_vectorize_loops = true;
292 break;
293 case annot_expr_parallel_kind:
294 loop->can_be_parallel = true;
295 loop->safelen = INT_MAX;
296 break;
297 default:
298 gcc_unreachable ();
299 }
300
301 stmt = gimple_build_assign (gimple_call_lhs (stmt),
302 gimple_call_arg (stmt, 0));
303 gsi_replace (&gsi, stmt, true);
304 }
305 }
306
307 /* Look for ANNOTATE calls with loop annotation kind; if found, remove
308 them and propagate the information to the loop. We assume that the
309 annotations come immediately before the condition of the loop. */
310
311 static void
312 replace_loop_annotate (void)
313 {
314 class loop *loop;
315 basic_block bb;
316 gimple_stmt_iterator gsi;
317 gimple *stmt;
318
319 FOR_EACH_LOOP (loop, 0)
320 {
321 /* First look into the header. */
322 replace_loop_annotate_in_block (loop->header, loop);
323
324 /* Then look into the latch, if any. */
325 if (loop->latch)
326 replace_loop_annotate_in_block (loop->latch, loop);
327
328 /* Push the global flag_finite_loops state down to individual loops. */
329 loop->finite_p = flag_finite_loops;
330 }
331
332 /* Remove IFN_ANNOTATE. Safeguard for the case loop->latch == NULL. */
333 FOR_EACH_BB_FN (bb, cfun)
334 {
335 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
336 {
337 stmt = gsi_stmt (gsi);
338 if (gimple_code (stmt) != GIMPLE_CALL)
339 continue;
340 if (!gimple_call_internal_p (stmt)
341 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
342 continue;
343
344 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
345 {
346 case annot_expr_ivdep_kind:
347 case annot_expr_unroll_kind:
348 case annot_expr_no_vector_kind:
349 case annot_expr_vector_kind:
350 case annot_expr_parallel_kind:
351 break;
352 default:
353 gcc_unreachable ();
354 }
355
356 warning_at (gimple_location (stmt), 0, "ignoring loop annotation");
357 stmt = gimple_build_assign (gimple_call_lhs (stmt),
358 gimple_call_arg (stmt, 0));
359 gsi_replace (&gsi, stmt, true);
360 }
361 }
362 }
363
364 static unsigned int
365 execute_build_cfg (void)
366 {
367 gimple_seq body = gimple_body (current_function_decl);
368
369 build_gimple_cfg (body);
370 gimple_set_body (current_function_decl, NULL);
371 if (dump_file && (dump_flags & TDF_DETAILS))
372 {
373 fprintf (dump_file, "Scope blocks:\n");
374 dump_scope_blocks (dump_file, dump_flags);
375 }
376 cleanup_tree_cfg ();
377 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
378 replace_loop_annotate ();
379 return 0;
380 }
381
382 namespace {
383
384 const pass_data pass_data_build_cfg =
385 {
386 GIMPLE_PASS, /* type */
387 "cfg", /* name */
388 OPTGROUP_NONE, /* optinfo_flags */
389 TV_TREE_CFG, /* tv_id */
390 PROP_gimple_leh, /* properties_required */
391 ( PROP_cfg | PROP_loops ), /* properties_provided */
392 0, /* properties_destroyed */
393 0, /* todo_flags_start */
394 0, /* todo_flags_finish */
395 };
396
397 class pass_build_cfg : public gimple_opt_pass
398 {
399 public:
400 pass_build_cfg (gcc::context *ctxt)
401 : gimple_opt_pass (pass_data_build_cfg, ctxt)
402 {}
403
404 /* opt_pass methods: */
405 virtual unsigned int execute (function *) { return execute_build_cfg (); }
406
407 }; // class pass_build_cfg
408
409 } // anon namespace
410
411 gimple_opt_pass *
412 make_pass_build_cfg (gcc::context *ctxt)
413 {
414 return new pass_build_cfg (ctxt);
415 }
416
417
418 /* Return true if T is a computed goto. */
419
420 bool
421 computed_goto_p (gimple *t)
422 {
423 return (gimple_code (t) == GIMPLE_GOTO
424 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
425 }
426
427 /* Returns true if the sequence of statements STMTS only contains
428 a call to __builtin_unreachable (). */
429
430 bool
431 gimple_seq_unreachable_p (gimple_seq stmts)
432 {
433 if (stmts == NULL
434 /* Return false if -fsanitize=unreachable, we don't want to
435 optimize away those calls, but rather turn them into
436 __ubsan_handle_builtin_unreachable () or __builtin_trap ()
437 later. */
438 || sanitize_flags_p (SANITIZE_UNREACHABLE))
439 return false;
440
441 gimple_stmt_iterator gsi = gsi_last (stmts);
442
443 if (!gimple_call_builtin_p (gsi_stmt (gsi), BUILT_IN_UNREACHABLE))
444 return false;
445
446 for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
447 {
448 gimple *stmt = gsi_stmt (gsi);
449 if (gimple_code (stmt) != GIMPLE_LABEL
450 && !is_gimple_debug (stmt)
451 && !gimple_clobber_p (stmt))
452 return false;
453 }
454 return true;
455 }
456
457 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
458 the other edge points to a bb with just __builtin_unreachable ().
459 I.e. return true for C->M edge in:
460 <bb C>:
461 ...
462 if (something)
463 goto <bb N>;
464 else
465 goto <bb M>;
466 <bb N>:
467 __builtin_unreachable ();
468 <bb M>: */
469
470 bool
471 assert_unreachable_fallthru_edge_p (edge e)
472 {
473 basic_block pred_bb = e->src;
474 gimple *last = last_stmt (pred_bb);
475 if (last && gimple_code (last) == GIMPLE_COND)
476 {
477 basic_block other_bb = EDGE_SUCC (pred_bb, 0)->dest;
478 if (other_bb == e->dest)
479 other_bb = EDGE_SUCC (pred_bb, 1)->dest;
480 if (EDGE_COUNT (other_bb->succs) == 0)
481 return gimple_seq_unreachable_p (bb_seq (other_bb));
482 }
483 return false;
484 }
485
486
487 /* Initialize GF_CALL_CTRL_ALTERING flag, which indicates the call
488 could alter control flow except via eh. We initialize the flag at
489 CFG build time and only ever clear it later. */
490
491 static void
492 gimple_call_initialize_ctrl_altering (gimple *stmt)
493 {
494 int flags = gimple_call_flags (stmt);
495
496 /* A call alters control flow if it can make an abnormal goto. */
497 if (call_can_make_abnormal_goto (stmt)
498 /* A call also alters control flow if it does not return. */
499 || flags & ECF_NORETURN
500 /* TM ending statements have backedges out of the transaction.
501 Return true so we split the basic block containing them.
502 Note that the TM_BUILTIN test is merely an optimization. */
503 || ((flags & ECF_TM_BUILTIN)
504 && is_tm_ending_fndecl (gimple_call_fndecl (stmt)))
505 /* BUILT_IN_RETURN call is same as return statement. */
506 || gimple_call_builtin_p (stmt, BUILT_IN_RETURN)
507 /* IFN_UNIQUE should be the last insn, to make checking for it
508 as cheap as possible. */
509 || (gimple_call_internal_p (stmt)
510 && gimple_call_internal_unique_p (stmt)))
511 gimple_call_set_ctrl_altering (stmt, true);
512 else
513 gimple_call_set_ctrl_altering (stmt, false);
514 }
515
516
517 /* Insert SEQ after BB and build a flowgraph. */
518
519 static basic_block
520 make_blocks_1 (gimple_seq seq, basic_block bb)
521 {
522 gimple_stmt_iterator i = gsi_start (seq);
523 gimple *stmt = NULL;
524 gimple *prev_stmt = NULL;
525 bool start_new_block = true;
526 bool first_stmt_of_seq = true;
527
528 while (!gsi_end_p (i))
529 {
530 /* PREV_STMT should only be set to a debug stmt if the debug
531 stmt is before nondebug stmts. Once stmt reaches a nondebug
532 nonlabel, prev_stmt will be set to it, so that
533 stmt_starts_bb_p will know to start a new block if a label is
534 found. However, if stmt was a label after debug stmts only,
535 keep the label in prev_stmt even if we find further debug
536 stmts, for there may be other labels after them, and they
537 should land in the same block. */
538 if (!prev_stmt || !stmt || !is_gimple_debug (stmt))
539 prev_stmt = stmt;
540 stmt = gsi_stmt (i);
541
542 if (stmt && is_gimple_call (stmt))
543 gimple_call_initialize_ctrl_altering (stmt);
544
545 /* If the statement starts a new basic block or if we have determined
546 in a previous pass that we need to create a new block for STMT, do
547 so now. */
548 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
549 {
550 if (!first_stmt_of_seq)
551 gsi_split_seq_before (&i, &seq);
552 bb = create_basic_block (seq, bb);
553 start_new_block = false;
554 prev_stmt = NULL;
555 }
556
557 /* Now add STMT to BB and create the subgraphs for special statement
558 codes. */
559 gimple_set_bb (stmt, bb);
560
561 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
562 next iteration. */
563 if (stmt_ends_bb_p (stmt))
564 {
565 /* If the stmt can make abnormal goto use a new temporary
566 for the assignment to the LHS. This makes sure the old value
567 of the LHS is available on the abnormal edge. Otherwise
568 we will end up with overlapping life-ranges for abnormal
569 SSA names. */
570 if (gimple_has_lhs (stmt)
571 && stmt_can_make_abnormal_goto (stmt)
572 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
573 {
574 tree lhs = gimple_get_lhs (stmt);
575 tree tmp = create_tmp_var (TREE_TYPE (lhs));
576 gimple *s = gimple_build_assign (lhs, tmp);
577 gimple_set_location (s, gimple_location (stmt));
578 gimple_set_block (s, gimple_block (stmt));
579 gimple_set_lhs (stmt, tmp);
580 gsi_insert_after (&i, s, GSI_SAME_STMT);
581 }
582 start_new_block = true;
583 }
584
585 gsi_next (&i);
586 first_stmt_of_seq = false;
587 }
588 return bb;
589 }
590
591 /* Build a flowgraph for the sequence of stmts SEQ. */
592
593 static void
594 make_blocks (gimple_seq seq)
595 {
596 /* Look for debug markers right before labels, and move the debug
597 stmts after the labels. Accepting labels among debug markers
598 adds no value, just complexity; if we wanted to annotate labels
599 with view numbers (so sequencing among markers would matter) or
600 somesuch, we're probably better off still moving the labels, but
601 adding other debug annotations in their original positions or
602 emitting nonbind or bind markers associated with the labels in
603 the original position of the labels.
604
605 Moving labels would probably be simpler, but we can't do that:
606 moving labels assigns label ids to them, and doing so because of
607 debug markers makes for -fcompare-debug and possibly even codegen
608 differences. So, we have to move the debug stmts instead. To
609 that end, we scan SEQ backwards, marking the position of the
610 latest (earliest we find) label, and moving debug stmts that are
611 not separated from it by nondebug nonlabel stmts after the
612 label. */
613 if (MAY_HAVE_DEBUG_MARKER_STMTS)
614 {
615 gimple_stmt_iterator label = gsi_none ();
616
617 for (gimple_stmt_iterator i = gsi_last (seq); !gsi_end_p (i); gsi_prev (&i))
618 {
619 gimple *stmt = gsi_stmt (i);
620
621 /* If this is the first label we encounter (latest in SEQ)
622 before nondebug stmts, record its position. */
623 if (is_a <glabel *> (stmt))
624 {
625 if (gsi_end_p (label))
626 label = i;
627 continue;
628 }
629
630 /* Without a recorded label position to move debug stmts to,
631 there's nothing to do. */
632 if (gsi_end_p (label))
633 continue;
634
635 /* Move the debug stmt at I after LABEL. */
636 if (is_gimple_debug (stmt))
637 {
638 gcc_assert (gimple_debug_nonbind_marker_p (stmt));
639 /* As STMT is removed, I advances to the stmt after
640 STMT, so the gsi_prev in the for "increment"
641 expression gets us to the stmt we're to visit after
642 STMT. LABEL, however, would advance to the moved
643 stmt if we passed it to gsi_move_after, so pass it a
644 copy instead, so as to keep LABEL pointing to the
645 LABEL. */
646 gimple_stmt_iterator copy = label;
647 gsi_move_after (&i, &copy);
648 continue;
649 }
650
651 /* There aren't any (more?) debug stmts before label, so
652 there isn't anything else to move after it. */
653 label = gsi_none ();
654 }
655 }
656
657 make_blocks_1 (seq, ENTRY_BLOCK_PTR_FOR_FN (cfun));
658 }
659
660 /* Create and return a new empty basic block after bb AFTER. */
661
662 static basic_block
663 create_bb (void *h, void *e, basic_block after)
664 {
665 basic_block bb;
666
667 gcc_assert (!e);
668
669 /* Create and initialize a new basic block. Since alloc_block uses
670 GC allocation that clears memory to allocate a basic block, we do
671 not have to clear the newly allocated basic block here. */
672 bb = alloc_block ();
673
674 bb->index = last_basic_block_for_fn (cfun);
675 bb->flags = BB_NEW;
676 set_bb_seq (bb, h ? (gimple_seq) h : NULL);
677
678 /* Add the new block to the linked list of blocks. */
679 link_block (bb, after);
680
681 /* Grow the basic block array if needed. */
682 if ((size_t) last_basic_block_for_fn (cfun)
683 == basic_block_info_for_fn (cfun)->length ())
684 {
685 size_t new_size =
686 (last_basic_block_for_fn (cfun)
687 + (last_basic_block_for_fn (cfun) + 3) / 4);
688 vec_safe_grow_cleared (basic_block_info_for_fn (cfun), new_size);
689 }
690
691 /* Add the newly created block to the array. */
692 SET_BASIC_BLOCK_FOR_FN (cfun, last_basic_block_for_fn (cfun), bb);
693
694 n_basic_blocks_for_fn (cfun)++;
695 last_basic_block_for_fn (cfun)++;
696
697 return bb;
698 }
699
700
701 /*---------------------------------------------------------------------------
702 Edge creation
703 ---------------------------------------------------------------------------*/
704
705 /* If basic block BB has an abnormal edge to a basic block
706 containing IFN_ABNORMAL_DISPATCHER internal call, return
707 that the dispatcher's basic block, otherwise return NULL. */
708
709 basic_block
710 get_abnormal_succ_dispatcher (basic_block bb)
711 {
712 edge e;
713 edge_iterator ei;
714
715 FOR_EACH_EDGE (e, ei, bb->succs)
716 if ((e->flags & (EDGE_ABNORMAL | EDGE_EH)) == EDGE_ABNORMAL)
717 {
718 gimple_stmt_iterator gsi
719 = gsi_start_nondebug_after_labels_bb (e->dest);
720 gimple *g = gsi_stmt (gsi);
721 if (g && gimple_call_internal_p (g, IFN_ABNORMAL_DISPATCHER))
722 return e->dest;
723 }
724 return NULL;
725 }
726
727 /* Helper function for make_edges. Create a basic block with
728 with ABNORMAL_DISPATCHER internal call in it if needed, and
729 create abnormal edges from BBS to it and from it to FOR_BB
730 if COMPUTED_GOTO is false, otherwise factor the computed gotos. */
731
732 static void
733 handle_abnormal_edges (basic_block *dispatcher_bbs,
734 basic_block for_bb, int *bb_to_omp_idx,
735 auto_vec<basic_block> *bbs, bool computed_goto)
736 {
737 basic_block *dispatcher = dispatcher_bbs + (computed_goto ? 1 : 0);
738 unsigned int idx = 0;
739 basic_block bb;
740 bool inner = false;
741
742 if (bb_to_omp_idx)
743 {
744 dispatcher = dispatcher_bbs + 2 * bb_to_omp_idx[for_bb->index];
745 if (bb_to_omp_idx[for_bb->index] != 0)
746 inner = true;
747 }
748
749 /* If the dispatcher has been created already, then there are basic
750 blocks with abnormal edges to it, so just make a new edge to
751 for_bb. */
752 if (*dispatcher == NULL)
753 {
754 /* Check if there are any basic blocks that need to have
755 abnormal edges to this dispatcher. If there are none, return
756 early. */
757 if (bb_to_omp_idx == NULL)
758 {
759 if (bbs->is_empty ())
760 return;
761 }
762 else
763 {
764 FOR_EACH_VEC_ELT (*bbs, idx, bb)
765 if (bb_to_omp_idx[bb->index] == bb_to_omp_idx[for_bb->index])
766 break;
767 if (bb == NULL)
768 return;
769 }
770
771 /* Create the dispatcher bb. */
772 *dispatcher = create_basic_block (NULL, for_bb);
773 if (computed_goto)
774 {
775 /* Factor computed gotos into a common computed goto site. Also
776 record the location of that site so that we can un-factor the
777 gotos after we have converted back to normal form. */
778 gimple_stmt_iterator gsi = gsi_start_bb (*dispatcher);
779
780 /* Create the destination of the factored goto. Each original
781 computed goto will put its desired destination into this
782 variable and jump to the label we create immediately below. */
783 tree var = create_tmp_var (ptr_type_node, "gotovar");
784
785 /* Build a label for the new block which will contain the
786 factored computed goto. */
787 tree factored_label_decl
788 = create_artificial_label (UNKNOWN_LOCATION);
789 gimple *factored_computed_goto_label
790 = gimple_build_label (factored_label_decl);
791 gsi_insert_after (&gsi, factored_computed_goto_label, GSI_NEW_STMT);
792
793 /* Build our new computed goto. */
794 gimple *factored_computed_goto = gimple_build_goto (var);
795 gsi_insert_after (&gsi, factored_computed_goto, GSI_NEW_STMT);
796
797 FOR_EACH_VEC_ELT (*bbs, idx, bb)
798 {
799 if (bb_to_omp_idx
800 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
801 continue;
802
803 gsi = gsi_last_bb (bb);
804 gimple *last = gsi_stmt (gsi);
805
806 gcc_assert (computed_goto_p (last));
807
808 /* Copy the original computed goto's destination into VAR. */
809 gimple *assignment
810 = gimple_build_assign (var, gimple_goto_dest (last));
811 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
812
813 edge e = make_edge (bb, *dispatcher, EDGE_FALLTHRU);
814 e->goto_locus = gimple_location (last);
815 gsi_remove (&gsi, true);
816 }
817 }
818 else
819 {
820 tree arg = inner ? boolean_true_node : boolean_false_node;
821 gimple *g = gimple_build_call_internal (IFN_ABNORMAL_DISPATCHER,
822 1, arg);
823 gimple_stmt_iterator gsi = gsi_after_labels (*dispatcher);
824 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
825
826 /* Create predecessor edges of the dispatcher. */
827 FOR_EACH_VEC_ELT (*bbs, idx, bb)
828 {
829 if (bb_to_omp_idx
830 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
831 continue;
832 make_edge (bb, *dispatcher, EDGE_ABNORMAL);
833 }
834 }
835 }
836
837 make_edge (*dispatcher, for_bb, EDGE_ABNORMAL);
838 }
839
840 /* Creates outgoing edges for BB. Returns 1 when it ends with an
841 computed goto, returns 2 when it ends with a statement that
842 might return to this function via an nonlocal goto, otherwise
843 return 0. Updates *PCUR_REGION with the OMP region this BB is in. */
844
845 static int
846 make_edges_bb (basic_block bb, struct omp_region **pcur_region, int *pomp_index)
847 {
848 gimple *last = last_stmt (bb);
849 bool fallthru = false;
850 int ret = 0;
851
852 if (!last)
853 return ret;
854
855 switch (gimple_code (last))
856 {
857 case GIMPLE_GOTO:
858 if (make_goto_expr_edges (bb))
859 ret = 1;
860 fallthru = false;
861 break;
862 case GIMPLE_RETURN:
863 {
864 edge e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
865 e->goto_locus = gimple_location (last);
866 fallthru = false;
867 }
868 break;
869 case GIMPLE_COND:
870 make_cond_expr_edges (bb);
871 fallthru = false;
872 break;
873 case GIMPLE_SWITCH:
874 make_gimple_switch_edges (as_a <gswitch *> (last), bb);
875 fallthru = false;
876 break;
877 case GIMPLE_RESX:
878 make_eh_edges (last);
879 fallthru = false;
880 break;
881 case GIMPLE_EH_DISPATCH:
882 fallthru = make_eh_dispatch_edges (as_a <geh_dispatch *> (last));
883 break;
884
885 case GIMPLE_CALL:
886 /* If this function receives a nonlocal goto, then we need to
887 make edges from this call site to all the nonlocal goto
888 handlers. */
889 if (stmt_can_make_abnormal_goto (last))
890 ret = 2;
891
892 /* If this statement has reachable exception handlers, then
893 create abnormal edges to them. */
894 make_eh_edges (last);
895
896 /* BUILTIN_RETURN is really a return statement. */
897 if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
898 {
899 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
900 fallthru = false;
901 }
902 /* Some calls are known not to return. */
903 else
904 fallthru = !gimple_call_noreturn_p (last);
905 break;
906
907 case GIMPLE_ASSIGN:
908 /* A GIMPLE_ASSIGN may throw internally and thus be considered
909 control-altering. */
910 if (is_ctrl_altering_stmt (last))
911 make_eh_edges (last);
912 fallthru = true;
913 break;
914
915 case GIMPLE_ASM:
916 make_gimple_asm_edges (bb);
917 fallthru = true;
918 break;
919
920 CASE_GIMPLE_OMP:
921 fallthru = omp_make_gimple_edges (bb, pcur_region, pomp_index);
922 break;
923
924 case GIMPLE_TRANSACTION:
925 {
926 gtransaction *txn = as_a <gtransaction *> (last);
927 tree label1 = gimple_transaction_label_norm (txn);
928 tree label2 = gimple_transaction_label_uninst (txn);
929
930 if (label1)
931 make_edge (bb, label_to_block (cfun, label1), EDGE_FALLTHRU);
932 if (label2)
933 make_edge (bb, label_to_block (cfun, label2),
934 EDGE_TM_UNINSTRUMENTED | (label1 ? 0 : EDGE_FALLTHRU));
935
936 tree label3 = gimple_transaction_label_over (txn);
937 if (gimple_transaction_subcode (txn)
938 & (GTMA_HAVE_ABORT | GTMA_IS_OUTER))
939 make_edge (bb, label_to_block (cfun, label3), EDGE_TM_ABORT);
940
941 fallthru = false;
942 }
943 break;
944
945 default:
946 gcc_assert (!stmt_ends_bb_p (last));
947 fallthru = true;
948 break;
949 }
950
951 if (fallthru)
952 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
953
954 return ret;
955 }
956
957 /* Join all the blocks in the flowgraph. */
958
959 static void
960 make_edges (void)
961 {
962 basic_block bb;
963 struct omp_region *cur_region = NULL;
964 auto_vec<basic_block> ab_edge_goto;
965 auto_vec<basic_block> ab_edge_call;
966 int *bb_to_omp_idx = NULL;
967 int cur_omp_region_idx = 0;
968
969 /* Create an edge from entry to the first block with executable
970 statements in it. */
971 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun),
972 BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS),
973 EDGE_FALLTHRU);
974
975 /* Traverse the basic block array placing edges. */
976 FOR_EACH_BB_FN (bb, cfun)
977 {
978 int mer;
979
980 if (bb_to_omp_idx)
981 bb_to_omp_idx[bb->index] = cur_omp_region_idx;
982
983 mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
984 if (mer == 1)
985 ab_edge_goto.safe_push (bb);
986 else if (mer == 2)
987 ab_edge_call.safe_push (bb);
988
989 if (cur_region && bb_to_omp_idx == NULL)
990 bb_to_omp_idx = XCNEWVEC (int, n_basic_blocks_for_fn (cfun));
991 }
992
993 /* Computed gotos are hell to deal with, especially if there are
994 lots of them with a large number of destinations. So we factor
995 them to a common computed goto location before we build the
996 edge list. After we convert back to normal form, we will un-factor
997 the computed gotos since factoring introduces an unwanted jump.
998 For non-local gotos and abnormal edges from calls to calls that return
999 twice or forced labels, factor the abnormal edges too, by having all
1000 abnormal edges from the calls go to a common artificial basic block
1001 with ABNORMAL_DISPATCHER internal call and abnormal edges from that
1002 basic block to all forced labels and calls returning twice.
1003 We do this per-OpenMP structured block, because those regions
1004 are guaranteed to be single entry single exit by the standard,
1005 so it is not allowed to enter or exit such regions abnormally this way,
1006 thus all computed gotos, non-local gotos and setjmp/longjmp calls
1007 must not transfer control across SESE region boundaries. */
1008 if (!ab_edge_goto.is_empty () || !ab_edge_call.is_empty ())
1009 {
1010 gimple_stmt_iterator gsi;
1011 basic_block dispatcher_bb_array[2] = { NULL, NULL };
1012 basic_block *dispatcher_bbs = dispatcher_bb_array;
1013 int count = n_basic_blocks_for_fn (cfun);
1014
1015 if (bb_to_omp_idx)
1016 dispatcher_bbs = XCNEWVEC (basic_block, 2 * count);
1017
1018 FOR_EACH_BB_FN (bb, cfun)
1019 {
1020 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1021 {
1022 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
1023 tree target;
1024
1025 if (!label_stmt)
1026 break;
1027
1028 target = gimple_label_label (label_stmt);
1029
1030 /* Make an edge to every label block that has been marked as a
1031 potential target for a computed goto or a non-local goto. */
1032 if (FORCED_LABEL (target))
1033 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
1034 &ab_edge_goto, true);
1035 if (DECL_NONLOCAL (target))
1036 {
1037 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
1038 &ab_edge_call, false);
1039 break;
1040 }
1041 }
1042
1043 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
1044 gsi_next_nondebug (&gsi);
1045 if (!gsi_end_p (gsi))
1046 {
1047 /* Make an edge to every setjmp-like call. */
1048 gimple *call_stmt = gsi_stmt (gsi);
1049 if (is_gimple_call (call_stmt)
1050 && ((gimple_call_flags (call_stmt) & ECF_RETURNS_TWICE)
1051 || gimple_call_builtin_p (call_stmt,
1052 BUILT_IN_SETJMP_RECEIVER)))
1053 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
1054 &ab_edge_call, false);
1055 }
1056 }
1057
1058 if (bb_to_omp_idx)
1059 XDELETE (dispatcher_bbs);
1060 }
1061
1062 XDELETE (bb_to_omp_idx);
1063
1064 omp_free_regions ();
1065 }
1066
1067 /* Add SEQ after GSI. Start new bb after GSI, and created further bbs as
1068 needed. Returns true if new bbs were created.
1069 Note: This is transitional code, and should not be used for new code. We
1070 should be able to get rid of this by rewriting all target va-arg
1071 gimplification hooks to use an interface gimple_build_cond_value as described
1072 in https://gcc.gnu.org/ml/gcc-patches/2015-02/msg01194.html. */
1073
1074 bool
1075 gimple_find_sub_bbs (gimple_seq seq, gimple_stmt_iterator *gsi)
1076 {
1077 gimple *stmt = gsi_stmt (*gsi);
1078 basic_block bb = gimple_bb (stmt);
1079 basic_block lastbb, afterbb;
1080 int old_num_bbs = n_basic_blocks_for_fn (cfun);
1081 edge e;
1082 lastbb = make_blocks_1 (seq, bb);
1083 if (old_num_bbs == n_basic_blocks_for_fn (cfun))
1084 return false;
1085 e = split_block (bb, stmt);
1086 /* Move e->dest to come after the new basic blocks. */
1087 afterbb = e->dest;
1088 unlink_block (afterbb);
1089 link_block (afterbb, lastbb);
1090 redirect_edge_succ (e, bb->next_bb);
1091 bb = bb->next_bb;
1092 while (bb != afterbb)
1093 {
1094 struct omp_region *cur_region = NULL;
1095 profile_count cnt = profile_count::zero ();
1096 bool all = true;
1097
1098 int cur_omp_region_idx = 0;
1099 int mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
1100 gcc_assert (!mer && !cur_region);
1101 add_bb_to_loop (bb, afterbb->loop_father);
1102
1103 edge e;
1104 edge_iterator ei;
1105 FOR_EACH_EDGE (e, ei, bb->preds)
1106 {
1107 if (e->count ().initialized_p ())
1108 cnt += e->count ();
1109 else
1110 all = false;
1111 }
1112 tree_guess_outgoing_edge_probabilities (bb);
1113 if (all || profile_status_for_fn (cfun) == PROFILE_READ)
1114 bb->count = cnt;
1115
1116 bb = bb->next_bb;
1117 }
1118 return true;
1119 }
1120
1121 /* Find the next available discriminator value for LOCUS. The
1122 discriminator distinguishes among several basic blocks that
1123 share a common locus, allowing for more accurate sample-based
1124 profiling. */
1125
1126 static int
1127 next_discriminator_for_locus (int line)
1128 {
1129 struct locus_discrim_map item;
1130 struct locus_discrim_map **slot;
1131
1132 item.location_line = line;
1133 item.discriminator = 0;
1134 slot = discriminator_per_locus->find_slot_with_hash (&item, line, INSERT);
1135 gcc_assert (slot);
1136 if (*slot == HTAB_EMPTY_ENTRY)
1137 {
1138 *slot = XNEW (struct locus_discrim_map);
1139 gcc_assert (*slot);
1140 (*slot)->location_line = line;
1141 (*slot)->discriminator = 0;
1142 }
1143 (*slot)->discriminator++;
1144 return (*slot)->discriminator;
1145 }
1146
1147 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
1148
1149 static bool
1150 same_line_p (location_t locus1, expanded_location *from, location_t locus2)
1151 {
1152 expanded_location to;
1153
1154 if (locus1 == locus2)
1155 return true;
1156
1157 to = expand_location (locus2);
1158
1159 if (from->line != to.line)
1160 return false;
1161 if (from->file == to.file)
1162 return true;
1163 return (from->file != NULL
1164 && to.file != NULL
1165 && filename_cmp (from->file, to.file) == 0);
1166 }
1167
1168 /* Assign discriminators to each basic block. */
1169
1170 static void
1171 assign_discriminators (void)
1172 {
1173 basic_block bb;
1174
1175 FOR_EACH_BB_FN (bb, cfun)
1176 {
1177 edge e;
1178 edge_iterator ei;
1179 gimple *last = last_stmt (bb);
1180 location_t locus = last ? gimple_location (last) : UNKNOWN_LOCATION;
1181
1182 if (locus == UNKNOWN_LOCATION)
1183 continue;
1184
1185 expanded_location locus_e = expand_location (locus);
1186
1187 FOR_EACH_EDGE (e, ei, bb->succs)
1188 {
1189 gimple *first = first_non_label_stmt (e->dest);
1190 gimple *last = last_stmt (e->dest);
1191 if ((first && same_line_p (locus, &locus_e,
1192 gimple_location (first)))
1193 || (last && same_line_p (locus, &locus_e,
1194 gimple_location (last))))
1195 {
1196 if (e->dest->discriminator != 0 && bb->discriminator == 0)
1197 bb->discriminator
1198 = next_discriminator_for_locus (locus_e.line);
1199 else
1200 e->dest->discriminator
1201 = next_discriminator_for_locus (locus_e.line);
1202 }
1203 }
1204 }
1205 }
1206
1207 /* Create the edges for a GIMPLE_COND starting at block BB. */
1208
1209 static void
1210 make_cond_expr_edges (basic_block bb)
1211 {
1212 gcond *entry = as_a <gcond *> (last_stmt (bb));
1213 gimple *then_stmt, *else_stmt;
1214 basic_block then_bb, else_bb;
1215 tree then_label, else_label;
1216 edge e;
1217
1218 gcc_assert (entry);
1219 gcc_assert (gimple_code (entry) == GIMPLE_COND);
1220
1221 /* Entry basic blocks for each component. */
1222 then_label = gimple_cond_true_label (entry);
1223 else_label = gimple_cond_false_label (entry);
1224 then_bb = label_to_block (cfun, then_label);
1225 else_bb = label_to_block (cfun, else_label);
1226 then_stmt = first_stmt (then_bb);
1227 else_stmt = first_stmt (else_bb);
1228
1229 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1230 e->goto_locus = gimple_location (then_stmt);
1231 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1232 if (e)
1233 e->goto_locus = gimple_location (else_stmt);
1234
1235 /* We do not need the labels anymore. */
1236 gimple_cond_set_true_label (entry, NULL_TREE);
1237 gimple_cond_set_false_label (entry, NULL_TREE);
1238 }
1239
1240
1241 /* Called for each element in the hash table (P) as we delete the
1242 edge to cases hash table.
1243
1244 Clear all the CASE_CHAINs to prevent problems with copying of
1245 SWITCH_EXPRs and structure sharing rules, then free the hash table
1246 element. */
1247
1248 bool
1249 edge_to_cases_cleanup (edge const &, tree const &value, void *)
1250 {
1251 tree t, next;
1252
1253 for (t = value; t; t = next)
1254 {
1255 next = CASE_CHAIN (t);
1256 CASE_CHAIN (t) = NULL;
1257 }
1258
1259 return true;
1260 }
1261
1262 /* Start recording information mapping edges to case labels. */
1263
1264 void
1265 start_recording_case_labels (void)
1266 {
1267 gcc_assert (edge_to_cases == NULL);
1268 edge_to_cases = new hash_map<edge, tree>;
1269 touched_switch_bbs = BITMAP_ALLOC (NULL);
1270 }
1271
1272 /* Return nonzero if we are recording information for case labels. */
1273
1274 static bool
1275 recording_case_labels_p (void)
1276 {
1277 return (edge_to_cases != NULL);
1278 }
1279
1280 /* Stop recording information mapping edges to case labels and
1281 remove any information we have recorded. */
1282 void
1283 end_recording_case_labels (void)
1284 {
1285 bitmap_iterator bi;
1286 unsigned i;
1287 edge_to_cases->traverse<void *, edge_to_cases_cleanup> (NULL);
1288 delete edge_to_cases;
1289 edge_to_cases = NULL;
1290 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
1291 {
1292 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
1293 if (bb)
1294 {
1295 gimple *stmt = last_stmt (bb);
1296 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1297 group_case_labels_stmt (as_a <gswitch *> (stmt));
1298 }
1299 }
1300 BITMAP_FREE (touched_switch_bbs);
1301 }
1302
1303 /* If we are inside a {start,end}_recording_cases block, then return
1304 a chain of CASE_LABEL_EXPRs from T which reference E.
1305
1306 Otherwise return NULL. */
1307
1308 static tree
1309 get_cases_for_edge (edge e, gswitch *t)
1310 {
1311 tree *slot;
1312 size_t i, n;
1313
1314 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
1315 chains available. Return NULL so the caller can detect this case. */
1316 if (!recording_case_labels_p ())
1317 return NULL;
1318
1319 slot = edge_to_cases->get (e);
1320 if (slot)
1321 return *slot;
1322
1323 /* If we did not find E in the hash table, then this must be the first
1324 time we have been queried for information about E & T. Add all the
1325 elements from T to the hash table then perform the query again. */
1326
1327 n = gimple_switch_num_labels (t);
1328 for (i = 0; i < n; i++)
1329 {
1330 tree elt = gimple_switch_label (t, i);
1331 tree lab = CASE_LABEL (elt);
1332 basic_block label_bb = label_to_block (cfun, lab);
1333 edge this_edge = find_edge (e->src, label_bb);
1334
1335 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
1336 a new chain. */
1337 tree &s = edge_to_cases->get_or_insert (this_edge);
1338 CASE_CHAIN (elt) = s;
1339 s = elt;
1340 }
1341
1342 return *edge_to_cases->get (e);
1343 }
1344
1345 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
1346
1347 static void
1348 make_gimple_switch_edges (gswitch *entry, basic_block bb)
1349 {
1350 size_t i, n;
1351
1352 n = gimple_switch_num_labels (entry);
1353
1354 for (i = 0; i < n; ++i)
1355 {
1356 basic_block label_bb = gimple_switch_label_bb (cfun, entry, i);
1357 make_edge (bb, label_bb, 0);
1358 }
1359 }
1360
1361
1362 /* Return the basic block holding label DEST. */
1363
1364 basic_block
1365 label_to_block (struct function *ifun, tree dest)
1366 {
1367 int uid = LABEL_DECL_UID (dest);
1368
1369 /* We would die hard when faced by an undefined label. Emit a label to
1370 the very first basic block. This will hopefully make even the dataflow
1371 and undefined variable warnings quite right. */
1372 if (seen_error () && uid < 0)
1373 {
1374 gimple_stmt_iterator gsi =
1375 gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS));
1376 gimple *stmt;
1377
1378 stmt = gimple_build_label (dest);
1379 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
1380 uid = LABEL_DECL_UID (dest);
1381 }
1382 if (vec_safe_length (ifun->cfg->x_label_to_block_map) <= (unsigned int) uid)
1383 return NULL;
1384 return (*ifun->cfg->x_label_to_block_map)[uid];
1385 }
1386
1387 /* Create edges for a goto statement at block BB. Returns true
1388 if abnormal edges should be created. */
1389
1390 static bool
1391 make_goto_expr_edges (basic_block bb)
1392 {
1393 gimple_stmt_iterator last = gsi_last_bb (bb);
1394 gimple *goto_t = gsi_stmt (last);
1395
1396 /* A simple GOTO creates normal edges. */
1397 if (simple_goto_p (goto_t))
1398 {
1399 tree dest = gimple_goto_dest (goto_t);
1400 basic_block label_bb = label_to_block (cfun, dest);
1401 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1402 e->goto_locus = gimple_location (goto_t);
1403 gsi_remove (&last, true);
1404 return false;
1405 }
1406
1407 /* A computed GOTO creates abnormal edges. */
1408 return true;
1409 }
1410
1411 /* Create edges for an asm statement with labels at block BB. */
1412
1413 static void
1414 make_gimple_asm_edges (basic_block bb)
1415 {
1416 gasm *stmt = as_a <gasm *> (last_stmt (bb));
1417 int i, n = gimple_asm_nlabels (stmt);
1418
1419 for (i = 0; i < n; ++i)
1420 {
1421 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1422 basic_block label_bb = label_to_block (cfun, label);
1423 make_edge (bb, label_bb, 0);
1424 }
1425 }
1426
1427 /*---------------------------------------------------------------------------
1428 Flowgraph analysis
1429 ---------------------------------------------------------------------------*/
1430
1431 /* Cleanup useless labels in basic blocks. This is something we wish
1432 to do early because it allows us to group case labels before creating
1433 the edges for the CFG, and it speeds up block statement iterators in
1434 all passes later on.
1435 We rerun this pass after CFG is created, to get rid of the labels that
1436 are no longer referenced. After then we do not run it any more, since
1437 (almost) no new labels should be created. */
1438
1439 /* A map from basic block index to the leading label of that block. */
1440 struct label_record
1441 {
1442 /* The label. */
1443 tree label;
1444
1445 /* True if the label is referenced from somewhere. */
1446 bool used;
1447 };
1448
1449 /* Given LABEL return the first label in the same basic block. */
1450
1451 static tree
1452 main_block_label (tree label, label_record *label_for_bb)
1453 {
1454 basic_block bb = label_to_block (cfun, label);
1455 tree main_label = label_for_bb[bb->index].label;
1456
1457 /* label_to_block possibly inserted undefined label into the chain. */
1458 if (!main_label)
1459 {
1460 label_for_bb[bb->index].label = label;
1461 main_label = label;
1462 }
1463
1464 label_for_bb[bb->index].used = true;
1465 return main_label;
1466 }
1467
1468 /* Clean up redundant labels within the exception tree. */
1469
1470 static void
1471 cleanup_dead_labels_eh (label_record *label_for_bb)
1472 {
1473 eh_landing_pad lp;
1474 eh_region r;
1475 tree lab;
1476 int i;
1477
1478 if (cfun->eh == NULL)
1479 return;
1480
1481 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1482 if (lp && lp->post_landing_pad)
1483 {
1484 lab = main_block_label (lp->post_landing_pad, label_for_bb);
1485 if (lab != lp->post_landing_pad)
1486 {
1487 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1488 EH_LANDING_PAD_NR (lab) = lp->index;
1489 }
1490 }
1491
1492 FOR_ALL_EH_REGION (r)
1493 switch (r->type)
1494 {
1495 case ERT_CLEANUP:
1496 case ERT_MUST_NOT_THROW:
1497 break;
1498
1499 case ERT_TRY:
1500 {
1501 eh_catch c;
1502 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1503 {
1504 lab = c->label;
1505 if (lab)
1506 c->label = main_block_label (lab, label_for_bb);
1507 }
1508 }
1509 break;
1510
1511 case ERT_ALLOWED_EXCEPTIONS:
1512 lab = r->u.allowed.label;
1513 if (lab)
1514 r->u.allowed.label = main_block_label (lab, label_for_bb);
1515 break;
1516 }
1517 }
1518
1519
1520 /* Cleanup redundant labels. This is a three-step process:
1521 1) Find the leading label for each block.
1522 2) Redirect all references to labels to the leading labels.
1523 3) Cleanup all useless labels. */
1524
1525 void
1526 cleanup_dead_labels (void)
1527 {
1528 basic_block bb;
1529 label_record *label_for_bb = XCNEWVEC (struct label_record,
1530 last_basic_block_for_fn (cfun));
1531
1532 /* Find a suitable label for each block. We use the first user-defined
1533 label if there is one, or otherwise just the first label we see. */
1534 FOR_EACH_BB_FN (bb, cfun)
1535 {
1536 gimple_stmt_iterator i;
1537
1538 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1539 {
1540 tree label;
1541 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1542
1543 if (!label_stmt)
1544 break;
1545
1546 label = gimple_label_label (label_stmt);
1547
1548 /* If we have not yet seen a label for the current block,
1549 remember this one and see if there are more labels. */
1550 if (!label_for_bb[bb->index].label)
1551 {
1552 label_for_bb[bb->index].label = label;
1553 continue;
1554 }
1555
1556 /* If we did see a label for the current block already, but it
1557 is an artificially created label, replace it if the current
1558 label is a user defined label. */
1559 if (!DECL_ARTIFICIAL (label)
1560 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1561 {
1562 label_for_bb[bb->index].label = label;
1563 break;
1564 }
1565 }
1566 }
1567
1568 /* Now redirect all jumps/branches to the selected label.
1569 First do so for each block ending in a control statement. */
1570 FOR_EACH_BB_FN (bb, cfun)
1571 {
1572 gimple *stmt = last_stmt (bb);
1573 tree label, new_label;
1574
1575 if (!stmt)
1576 continue;
1577
1578 switch (gimple_code (stmt))
1579 {
1580 case GIMPLE_COND:
1581 {
1582 gcond *cond_stmt = as_a <gcond *> (stmt);
1583 label = gimple_cond_true_label (cond_stmt);
1584 if (label)
1585 {
1586 new_label = main_block_label (label, label_for_bb);
1587 if (new_label != label)
1588 gimple_cond_set_true_label (cond_stmt, new_label);
1589 }
1590
1591 label = gimple_cond_false_label (cond_stmt);
1592 if (label)
1593 {
1594 new_label = main_block_label (label, label_for_bb);
1595 if (new_label != label)
1596 gimple_cond_set_false_label (cond_stmt, new_label);
1597 }
1598 }
1599 break;
1600
1601 case GIMPLE_SWITCH:
1602 {
1603 gswitch *switch_stmt = as_a <gswitch *> (stmt);
1604 size_t i, n = gimple_switch_num_labels (switch_stmt);
1605
1606 /* Replace all destination labels. */
1607 for (i = 0; i < n; ++i)
1608 {
1609 tree case_label = gimple_switch_label (switch_stmt, i);
1610 label = CASE_LABEL (case_label);
1611 new_label = main_block_label (label, label_for_bb);
1612 if (new_label != label)
1613 CASE_LABEL (case_label) = new_label;
1614 }
1615 break;
1616 }
1617
1618 case GIMPLE_ASM:
1619 {
1620 gasm *asm_stmt = as_a <gasm *> (stmt);
1621 int i, n = gimple_asm_nlabels (asm_stmt);
1622
1623 for (i = 0; i < n; ++i)
1624 {
1625 tree cons = gimple_asm_label_op (asm_stmt, i);
1626 tree label = main_block_label (TREE_VALUE (cons), label_for_bb);
1627 TREE_VALUE (cons) = label;
1628 }
1629 break;
1630 }
1631
1632 /* We have to handle gotos until they're removed, and we don't
1633 remove them until after we've created the CFG edges. */
1634 case GIMPLE_GOTO:
1635 if (!computed_goto_p (stmt))
1636 {
1637 ggoto *goto_stmt = as_a <ggoto *> (stmt);
1638 label = gimple_goto_dest (goto_stmt);
1639 new_label = main_block_label (label, label_for_bb);
1640 if (new_label != label)
1641 gimple_goto_set_dest (goto_stmt, new_label);
1642 }
1643 break;
1644
1645 case GIMPLE_TRANSACTION:
1646 {
1647 gtransaction *txn = as_a <gtransaction *> (stmt);
1648
1649 label = gimple_transaction_label_norm (txn);
1650 if (label)
1651 {
1652 new_label = main_block_label (label, label_for_bb);
1653 if (new_label != label)
1654 gimple_transaction_set_label_norm (txn, new_label);
1655 }
1656
1657 label = gimple_transaction_label_uninst (txn);
1658 if (label)
1659 {
1660 new_label = main_block_label (label, label_for_bb);
1661 if (new_label != label)
1662 gimple_transaction_set_label_uninst (txn, new_label);
1663 }
1664
1665 label = gimple_transaction_label_over (txn);
1666 if (label)
1667 {
1668 new_label = main_block_label (label, label_for_bb);
1669 if (new_label != label)
1670 gimple_transaction_set_label_over (txn, new_label);
1671 }
1672 }
1673 break;
1674
1675 default:
1676 break;
1677 }
1678 }
1679
1680 /* Do the same for the exception region tree labels. */
1681 cleanup_dead_labels_eh (label_for_bb);
1682
1683 /* Finally, purge dead labels. All user-defined labels and labels that
1684 can be the target of non-local gotos and labels which have their
1685 address taken are preserved. */
1686 FOR_EACH_BB_FN (bb, cfun)
1687 {
1688 gimple_stmt_iterator i;
1689 tree label_for_this_bb = label_for_bb[bb->index].label;
1690
1691 if (!label_for_this_bb)
1692 continue;
1693
1694 /* If the main label of the block is unused, we may still remove it. */
1695 if (!label_for_bb[bb->index].used)
1696 label_for_this_bb = NULL;
1697
1698 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1699 {
1700 tree label;
1701 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1702
1703 if (!label_stmt)
1704 break;
1705
1706 label = gimple_label_label (label_stmt);
1707
1708 if (label == label_for_this_bb
1709 || !DECL_ARTIFICIAL (label)
1710 || DECL_NONLOCAL (label)
1711 || FORCED_LABEL (label))
1712 gsi_next (&i);
1713 else
1714 gsi_remove (&i, true);
1715 }
1716 }
1717
1718 free (label_for_bb);
1719 }
1720
1721 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1722 the ones jumping to the same label.
1723 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1724
1725 bool
1726 group_case_labels_stmt (gswitch *stmt)
1727 {
1728 int old_size = gimple_switch_num_labels (stmt);
1729 int i, next_index, new_size;
1730 basic_block default_bb = NULL;
1731 hash_set<tree> *removed_labels = NULL;
1732
1733 default_bb = gimple_switch_default_bb (cfun, stmt);
1734
1735 /* Look for possible opportunities to merge cases. */
1736 new_size = i = 1;
1737 while (i < old_size)
1738 {
1739 tree base_case, base_high;
1740 basic_block base_bb;
1741
1742 base_case = gimple_switch_label (stmt, i);
1743
1744 gcc_assert (base_case);
1745 base_bb = label_to_block (cfun, CASE_LABEL (base_case));
1746
1747 /* Discard cases that have the same destination as the default case or
1748 whose destination blocks have already been removed as unreachable. */
1749 if (base_bb == NULL
1750 || base_bb == default_bb
1751 || (removed_labels
1752 && removed_labels->contains (CASE_LABEL (base_case))))
1753 {
1754 i++;
1755 continue;
1756 }
1757
1758 base_high = CASE_HIGH (base_case)
1759 ? CASE_HIGH (base_case)
1760 : CASE_LOW (base_case);
1761 next_index = i + 1;
1762
1763 /* Try to merge case labels. Break out when we reach the end
1764 of the label vector or when we cannot merge the next case
1765 label with the current one. */
1766 while (next_index < old_size)
1767 {
1768 tree merge_case = gimple_switch_label (stmt, next_index);
1769 basic_block merge_bb = label_to_block (cfun, CASE_LABEL (merge_case));
1770 wide_int bhp1 = wi::to_wide (base_high) + 1;
1771
1772 /* Merge the cases if they jump to the same place,
1773 and their ranges are consecutive. */
1774 if (merge_bb == base_bb
1775 && (removed_labels == NULL
1776 || !removed_labels->contains (CASE_LABEL (merge_case)))
1777 && wi::to_wide (CASE_LOW (merge_case)) == bhp1)
1778 {
1779 base_high
1780 = (CASE_HIGH (merge_case)
1781 ? CASE_HIGH (merge_case) : CASE_LOW (merge_case));
1782 CASE_HIGH (base_case) = base_high;
1783 next_index++;
1784 }
1785 else
1786 break;
1787 }
1788
1789 /* Discard cases that have an unreachable destination block. */
1790 if (EDGE_COUNT (base_bb->succs) == 0
1791 && gimple_seq_unreachable_p (bb_seq (base_bb))
1792 /* Don't optimize this if __builtin_unreachable () is the
1793 implicitly added one by the C++ FE too early, before
1794 -Wreturn-type can be diagnosed. We'll optimize it later
1795 during switchconv pass or any other cfg cleanup. */
1796 && (gimple_in_ssa_p (cfun)
1797 || (LOCATION_LOCUS (gimple_location (last_stmt (base_bb)))
1798 != BUILTINS_LOCATION)))
1799 {
1800 edge base_edge = find_edge (gimple_bb (stmt), base_bb);
1801 if (base_edge != NULL)
1802 {
1803 for (gimple_stmt_iterator gsi = gsi_start_bb (base_bb);
1804 !gsi_end_p (gsi); gsi_next (&gsi))
1805 if (glabel *stmt = dyn_cast <glabel *> (gsi_stmt (gsi)))
1806 {
1807 if (FORCED_LABEL (gimple_label_label (stmt))
1808 || DECL_NONLOCAL (gimple_label_label (stmt)))
1809 {
1810 /* Forced/non-local labels aren't going to be removed,
1811 but they will be moved to some neighbouring basic
1812 block. If some later case label refers to one of
1813 those labels, we should throw that case away rather
1814 than keeping it around and refering to some random
1815 other basic block without an edge to it. */
1816 if (removed_labels == NULL)
1817 removed_labels = new hash_set<tree>;
1818 removed_labels->add (gimple_label_label (stmt));
1819 }
1820 }
1821 else
1822 break;
1823 remove_edge_and_dominated_blocks (base_edge);
1824 }
1825 i = next_index;
1826 continue;
1827 }
1828
1829 if (new_size < i)
1830 gimple_switch_set_label (stmt, new_size,
1831 gimple_switch_label (stmt, i));
1832 i = next_index;
1833 new_size++;
1834 }
1835
1836 gcc_assert (new_size <= old_size);
1837
1838 if (new_size < old_size)
1839 gimple_switch_set_num_labels (stmt, new_size);
1840
1841 delete removed_labels;
1842 return new_size < old_size;
1843 }
1844
1845 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1846 and scan the sorted vector of cases. Combine the ones jumping to the
1847 same label. */
1848
1849 bool
1850 group_case_labels (void)
1851 {
1852 basic_block bb;
1853 bool changed = false;
1854
1855 FOR_EACH_BB_FN (bb, cfun)
1856 {
1857 gimple *stmt = last_stmt (bb);
1858 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1859 changed |= group_case_labels_stmt (as_a <gswitch *> (stmt));
1860 }
1861
1862 return changed;
1863 }
1864
1865 /* Checks whether we can merge block B into block A. */
1866
1867 static bool
1868 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1869 {
1870 gimple *stmt;
1871
1872 if (!single_succ_p (a))
1873 return false;
1874
1875 if (single_succ_edge (a)->flags & EDGE_COMPLEX)
1876 return false;
1877
1878 if (single_succ (a) != b)
1879 return false;
1880
1881 if (!single_pred_p (b))
1882 return false;
1883
1884 if (a == ENTRY_BLOCK_PTR_FOR_FN (cfun)
1885 || b == EXIT_BLOCK_PTR_FOR_FN (cfun))
1886 return false;
1887
1888 /* If A ends by a statement causing exceptions or something similar, we
1889 cannot merge the blocks. */
1890 stmt = last_stmt (a);
1891 if (stmt && stmt_ends_bb_p (stmt))
1892 return false;
1893
1894 /* Do not allow a block with only a non-local label to be merged. */
1895 if (stmt)
1896 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
1897 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
1898 return false;
1899
1900 /* Examine the labels at the beginning of B. */
1901 for (gimple_stmt_iterator gsi = gsi_start_bb (b); !gsi_end_p (gsi);
1902 gsi_next (&gsi))
1903 {
1904 tree lab;
1905 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
1906 if (!label_stmt)
1907 break;
1908 lab = gimple_label_label (label_stmt);
1909
1910 /* Do not remove user forced labels or for -O0 any user labels. */
1911 if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
1912 return false;
1913 }
1914
1915 /* Protect simple loop latches. We only want to avoid merging
1916 the latch with the loop header or with a block in another
1917 loop in this case. */
1918 if (current_loops
1919 && b->loop_father->latch == b
1920 && loops_state_satisfies_p (LOOPS_HAVE_SIMPLE_LATCHES)
1921 && (b->loop_father->header == a
1922 || b->loop_father != a->loop_father))
1923 return false;
1924
1925 /* It must be possible to eliminate all phi nodes in B. If ssa form
1926 is not up-to-date and a name-mapping is registered, we cannot eliminate
1927 any phis. Symbols marked for renaming are never a problem though. */
1928 for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);
1929 gsi_next (&gsi))
1930 {
1931 gphi *phi = gsi.phi ();
1932 /* Technically only new names matter. */
1933 if (name_registered_for_update_p (PHI_RESULT (phi)))
1934 return false;
1935 }
1936
1937 /* When not optimizing, don't merge if we'd lose goto_locus. */
1938 if (!optimize
1939 && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
1940 {
1941 location_t goto_locus = single_succ_edge (a)->goto_locus;
1942 gimple_stmt_iterator prev, next;
1943 prev = gsi_last_nondebug_bb (a);
1944 next = gsi_after_labels (b);
1945 if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
1946 gsi_next_nondebug (&next);
1947 if ((gsi_end_p (prev)
1948 || gimple_location (gsi_stmt (prev)) != goto_locus)
1949 && (gsi_end_p (next)
1950 || gimple_location (gsi_stmt (next)) != goto_locus))
1951 return false;
1952 }
1953
1954 return true;
1955 }
1956
1957 /* Replaces all uses of NAME by VAL. */
1958
1959 void
1960 replace_uses_by (tree name, tree val)
1961 {
1962 imm_use_iterator imm_iter;
1963 use_operand_p use;
1964 gimple *stmt;
1965 edge e;
1966
1967 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1968 {
1969 /* Mark the block if we change the last stmt in it. */
1970 if (cfgcleanup_altered_bbs
1971 && stmt_ends_bb_p (stmt))
1972 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1973
1974 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1975 {
1976 replace_exp (use, val);
1977
1978 if (gimple_code (stmt) == GIMPLE_PHI)
1979 {
1980 e = gimple_phi_arg_edge (as_a <gphi *> (stmt),
1981 PHI_ARG_INDEX_FROM_USE (use));
1982 if (e->flags & EDGE_ABNORMAL
1983 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val))
1984 {
1985 /* This can only occur for virtual operands, since
1986 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1987 would prevent replacement. */
1988 gcc_checking_assert (virtual_operand_p (name));
1989 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1990 }
1991 }
1992 }
1993
1994 if (gimple_code (stmt) != GIMPLE_PHI)
1995 {
1996 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1997 gimple *orig_stmt = stmt;
1998 size_t i;
1999
2000 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
2001 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
2002 only change sth from non-invariant to invariant, and only
2003 when propagating constants. */
2004 if (is_gimple_min_invariant (val))
2005 for (i = 0; i < gimple_num_ops (stmt); i++)
2006 {
2007 tree op = gimple_op (stmt, i);
2008 /* Operands may be empty here. For example, the labels
2009 of a GIMPLE_COND are nulled out following the creation
2010 of the corresponding CFG edges. */
2011 if (op && TREE_CODE (op) == ADDR_EXPR)
2012 recompute_tree_invariant_for_addr_expr (op);
2013 }
2014
2015 if (fold_stmt (&gsi))
2016 stmt = gsi_stmt (gsi);
2017
2018 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
2019 gimple_purge_dead_eh_edges (gimple_bb (stmt));
2020
2021 update_stmt (stmt);
2022 }
2023 }
2024
2025 gcc_checking_assert (has_zero_uses (name));
2026
2027 /* Also update the trees stored in loop structures. */
2028 if (current_loops)
2029 {
2030 class loop *loop;
2031
2032 FOR_EACH_LOOP (loop, 0)
2033 {
2034 substitute_in_loop_info (loop, name, val);
2035 }
2036 }
2037 }
2038
2039 /* Merge block B into block A. */
2040
2041 static void
2042 gimple_merge_blocks (basic_block a, basic_block b)
2043 {
2044 gimple_stmt_iterator last, gsi;
2045 gphi_iterator psi;
2046
2047 if (dump_file)
2048 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
2049
2050 /* Remove all single-valued PHI nodes from block B of the form
2051 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
2052 gsi = gsi_last_bb (a);
2053 for (psi = gsi_start_phis (b); !gsi_end_p (psi); )
2054 {
2055 gimple *phi = gsi_stmt (psi);
2056 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
2057 gimple *copy;
2058 bool may_replace_uses = (virtual_operand_p (def)
2059 || may_propagate_copy (def, use));
2060
2061 /* In case we maintain loop closed ssa form, do not propagate arguments
2062 of loop exit phi nodes. */
2063 if (current_loops
2064 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
2065 && !virtual_operand_p (def)
2066 && TREE_CODE (use) == SSA_NAME
2067 && a->loop_father != b->loop_father)
2068 may_replace_uses = false;
2069
2070 if (!may_replace_uses)
2071 {
2072 gcc_assert (!virtual_operand_p (def));
2073
2074 /* Note that just emitting the copies is fine -- there is no problem
2075 with ordering of phi nodes. This is because A is the single
2076 predecessor of B, therefore results of the phi nodes cannot
2077 appear as arguments of the phi nodes. */
2078 copy = gimple_build_assign (def, use);
2079 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
2080 remove_phi_node (&psi, false);
2081 }
2082 else
2083 {
2084 /* If we deal with a PHI for virtual operands, we can simply
2085 propagate these without fussing with folding or updating
2086 the stmt. */
2087 if (virtual_operand_p (def))
2088 {
2089 imm_use_iterator iter;
2090 use_operand_p use_p;
2091 gimple *stmt;
2092
2093 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
2094 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
2095 SET_USE (use_p, use);
2096
2097 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
2098 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
2099 }
2100 else
2101 replace_uses_by (def, use);
2102
2103 remove_phi_node (&psi, true);
2104 }
2105 }
2106
2107 /* Ensure that B follows A. */
2108 move_block_after (b, a);
2109
2110 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
2111 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
2112
2113 /* Remove labels from B and set gimple_bb to A for other statements. */
2114 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
2115 {
2116 gimple *stmt = gsi_stmt (gsi);
2117 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2118 {
2119 tree label = gimple_label_label (label_stmt);
2120 int lp_nr;
2121
2122 gsi_remove (&gsi, false);
2123
2124 /* Now that we can thread computed gotos, we might have
2125 a situation where we have a forced label in block B
2126 However, the label at the start of block B might still be
2127 used in other ways (think about the runtime checking for
2128 Fortran assigned gotos). So we cannot just delete the
2129 label. Instead we move the label to the start of block A. */
2130 if (FORCED_LABEL (label))
2131 {
2132 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
2133 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
2134 }
2135 /* Other user labels keep around in a form of a debug stmt. */
2136 else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_BIND_STMTS)
2137 {
2138 gimple *dbg = gimple_build_debug_bind (label,
2139 integer_zero_node,
2140 stmt);
2141 gimple_debug_bind_reset_value (dbg);
2142 gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
2143 }
2144
2145 lp_nr = EH_LANDING_PAD_NR (label);
2146 if (lp_nr)
2147 {
2148 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
2149 lp->post_landing_pad = NULL;
2150 }
2151 }
2152 else
2153 {
2154 gimple_set_bb (stmt, a);
2155 gsi_next (&gsi);
2156 }
2157 }
2158
2159 /* When merging two BBs, if their counts are different, the larger count
2160 is selected as the new bb count. This is to handle inconsistent
2161 profiles. */
2162 if (a->loop_father == b->loop_father)
2163 {
2164 a->count = a->count.merge (b->count);
2165 }
2166
2167 /* Merge the sequences. */
2168 last = gsi_last_bb (a);
2169 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
2170 set_bb_seq (b, NULL);
2171
2172 if (cfgcleanup_altered_bbs)
2173 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
2174 }
2175
2176
2177 /* Return the one of two successors of BB that is not reachable by a
2178 complex edge, if there is one. Else, return BB. We use
2179 this in optimizations that use post-dominators for their heuristics,
2180 to catch the cases in C++ where function calls are involved. */
2181
2182 basic_block
2183 single_noncomplex_succ (basic_block bb)
2184 {
2185 edge e0, e1;
2186 if (EDGE_COUNT (bb->succs) != 2)
2187 return bb;
2188
2189 e0 = EDGE_SUCC (bb, 0);
2190 e1 = EDGE_SUCC (bb, 1);
2191 if (e0->flags & EDGE_COMPLEX)
2192 return e1->dest;
2193 if (e1->flags & EDGE_COMPLEX)
2194 return e0->dest;
2195
2196 return bb;
2197 }
2198
2199 /* T is CALL_EXPR. Set current_function_calls_* flags. */
2200
2201 void
2202 notice_special_calls (gcall *call)
2203 {
2204 int flags = gimple_call_flags (call);
2205
2206 if (flags & ECF_MAY_BE_ALLOCA)
2207 cfun->calls_alloca = true;
2208 if (flags & ECF_RETURNS_TWICE)
2209 cfun->calls_setjmp = true;
2210 }
2211
2212
2213 /* Clear flags set by notice_special_calls. Used by dead code removal
2214 to update the flags. */
2215
2216 void
2217 clear_special_calls (void)
2218 {
2219 cfun->calls_alloca = false;
2220 cfun->calls_setjmp = false;
2221 }
2222
2223 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
2224
2225 static void
2226 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
2227 {
2228 /* Since this block is no longer reachable, we can just delete all
2229 of its PHI nodes. */
2230 remove_phi_nodes (bb);
2231
2232 /* Remove edges to BB's successors. */
2233 while (EDGE_COUNT (bb->succs) > 0)
2234 remove_edge (EDGE_SUCC (bb, 0));
2235 }
2236
2237
2238 /* Remove statements of basic block BB. */
2239
2240 static void
2241 remove_bb (basic_block bb)
2242 {
2243 gimple_stmt_iterator i;
2244
2245 if (dump_file)
2246 {
2247 fprintf (dump_file, "Removing basic block %d\n", bb->index);
2248 if (dump_flags & TDF_DETAILS)
2249 {
2250 dump_bb (dump_file, bb, 0, TDF_BLOCKS);
2251 fprintf (dump_file, "\n");
2252 }
2253 }
2254
2255 if (current_loops)
2256 {
2257 class loop *loop = bb->loop_father;
2258
2259 /* If a loop gets removed, clean up the information associated
2260 with it. */
2261 if (loop->latch == bb
2262 || loop->header == bb)
2263 free_numbers_of_iterations_estimates (loop);
2264 }
2265
2266 /* Remove all the instructions in the block. */
2267 if (bb_seq (bb) != NULL)
2268 {
2269 /* Walk backwards so as to get a chance to substitute all
2270 released DEFs into debug stmts. See
2271 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
2272 details. */
2273 for (i = gsi_last_bb (bb); !gsi_end_p (i);)
2274 {
2275 gimple *stmt = gsi_stmt (i);
2276 glabel *label_stmt = dyn_cast <glabel *> (stmt);
2277 if (label_stmt
2278 && (FORCED_LABEL (gimple_label_label (label_stmt))
2279 || DECL_NONLOCAL (gimple_label_label (label_stmt))))
2280 {
2281 basic_block new_bb;
2282 gimple_stmt_iterator new_gsi;
2283
2284 /* A non-reachable non-local label may still be referenced.
2285 But it no longer needs to carry the extra semantics of
2286 non-locality. */
2287 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
2288 {
2289 DECL_NONLOCAL (gimple_label_label (label_stmt)) = 0;
2290 FORCED_LABEL (gimple_label_label (label_stmt)) = 1;
2291 }
2292
2293 new_bb = bb->prev_bb;
2294 /* Don't move any labels into ENTRY block. */
2295 if (new_bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
2296 {
2297 new_bb = single_succ (new_bb);
2298 gcc_assert (new_bb != bb);
2299 }
2300 new_gsi = gsi_after_labels (new_bb);
2301 gsi_remove (&i, false);
2302 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
2303 }
2304 else
2305 {
2306 /* Release SSA definitions. */
2307 release_defs (stmt);
2308 gsi_remove (&i, true);
2309 }
2310
2311 if (gsi_end_p (i))
2312 i = gsi_last_bb (bb);
2313 else
2314 gsi_prev (&i);
2315 }
2316 }
2317
2318 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2319 bb->il.gimple.seq = NULL;
2320 bb->il.gimple.phi_nodes = NULL;
2321 }
2322
2323
2324 /* Given a basic block BB and a value VAL for use in the final statement
2325 of the block (if a GIMPLE_COND, GIMPLE_SWITCH, or computed goto), return
2326 the edge that will be taken out of the block.
2327 If VAL is NULL_TREE, then the current value of the final statement's
2328 predicate or index is used.
2329 If the value does not match a unique edge, NULL is returned. */
2330
2331 edge
2332 find_taken_edge (basic_block bb, tree val)
2333 {
2334 gimple *stmt;
2335
2336 stmt = last_stmt (bb);
2337
2338 /* Handle ENTRY and EXIT. */
2339 if (!stmt)
2340 return NULL;
2341
2342 if (gimple_code (stmt) == GIMPLE_COND)
2343 return find_taken_edge_cond_expr (as_a <gcond *> (stmt), val);
2344
2345 if (gimple_code (stmt) == GIMPLE_SWITCH)
2346 return find_taken_edge_switch_expr (as_a <gswitch *> (stmt), val);
2347
2348 if (computed_goto_p (stmt))
2349 {
2350 /* Only optimize if the argument is a label, if the argument is
2351 not a label then we cannot construct a proper CFG.
2352
2353 It may be the case that we only need to allow the LABEL_REF to
2354 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2355 appear inside a LABEL_EXPR just to be safe. */
2356 if (val
2357 && (TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
2358 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
2359 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
2360 }
2361
2362 /* Otherwise we only know the taken successor edge if it's unique. */
2363 return single_succ_p (bb) ? single_succ_edge (bb) : NULL;
2364 }
2365
2366 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2367 statement, determine which of the outgoing edges will be taken out of the
2368 block. Return NULL if either edge may be taken. */
2369
2370 static edge
2371 find_taken_edge_computed_goto (basic_block bb, tree val)
2372 {
2373 basic_block dest;
2374 edge e = NULL;
2375
2376 dest = label_to_block (cfun, val);
2377 if (dest)
2378 e = find_edge (bb, dest);
2379
2380 /* It's possible for find_edge to return NULL here on invalid code
2381 that abuses the labels-as-values extension (e.g. code that attempts to
2382 jump *between* functions via stored labels-as-values; PR 84136).
2383 If so, then we simply return that NULL for the edge.
2384 We don't currently have a way of detecting such invalid code, so we
2385 can't assert that it was the case when a NULL edge occurs here. */
2386
2387 return e;
2388 }
2389
2390 /* Given COND_STMT and a constant value VAL for use as the predicate,
2391 determine which of the two edges will be taken out of
2392 the statement's block. Return NULL if either edge may be taken.
2393 If VAL is NULL_TREE, then the current value of COND_STMT's predicate
2394 is used. */
2395
2396 static edge
2397 find_taken_edge_cond_expr (const gcond *cond_stmt, tree val)
2398 {
2399 edge true_edge, false_edge;
2400
2401 if (val == NULL_TREE)
2402 {
2403 /* Use the current value of the predicate. */
2404 if (gimple_cond_true_p (cond_stmt))
2405 val = integer_one_node;
2406 else if (gimple_cond_false_p (cond_stmt))
2407 val = integer_zero_node;
2408 else
2409 return NULL;
2410 }
2411 else if (TREE_CODE (val) != INTEGER_CST)
2412 return NULL;
2413
2414 extract_true_false_edges_from_block (gimple_bb (cond_stmt),
2415 &true_edge, &false_edge);
2416
2417 return (integer_zerop (val) ? false_edge : true_edge);
2418 }
2419
2420 /* Given SWITCH_STMT and an INTEGER_CST VAL for use as the index, determine
2421 which edge will be taken out of the statement's block. Return NULL if any
2422 edge may be taken.
2423 If VAL is NULL_TREE, then the current value of SWITCH_STMT's index
2424 is used. */
2425
2426 edge
2427 find_taken_edge_switch_expr (const gswitch *switch_stmt, tree val)
2428 {
2429 basic_block dest_bb;
2430 edge e;
2431 tree taken_case;
2432
2433 if (gimple_switch_num_labels (switch_stmt) == 1)
2434 taken_case = gimple_switch_default_label (switch_stmt);
2435 else
2436 {
2437 if (val == NULL_TREE)
2438 val = gimple_switch_index (switch_stmt);
2439 if (TREE_CODE (val) != INTEGER_CST)
2440 return NULL;
2441 else
2442 taken_case = find_case_label_for_value (switch_stmt, val);
2443 }
2444 dest_bb = label_to_block (cfun, CASE_LABEL (taken_case));
2445
2446 e = find_edge (gimple_bb (switch_stmt), dest_bb);
2447 gcc_assert (e);
2448 return e;
2449 }
2450
2451
2452 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2453 We can make optimal use here of the fact that the case labels are
2454 sorted: We can do a binary search for a case matching VAL. */
2455
2456 tree
2457 find_case_label_for_value (const gswitch *switch_stmt, tree val)
2458 {
2459 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2460 tree default_case = gimple_switch_default_label (switch_stmt);
2461
2462 for (low = 0, high = n; high - low > 1; )
2463 {
2464 size_t i = (high + low) / 2;
2465 tree t = gimple_switch_label (switch_stmt, i);
2466 int cmp;
2467
2468 /* Cache the result of comparing CASE_LOW and val. */
2469 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2470
2471 if (cmp > 0)
2472 high = i;
2473 else
2474 low = i;
2475
2476 if (CASE_HIGH (t) == NULL)
2477 {
2478 /* A singe-valued case label. */
2479 if (cmp == 0)
2480 return t;
2481 }
2482 else
2483 {
2484 /* A case range. We can only handle integer ranges. */
2485 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2486 return t;
2487 }
2488 }
2489
2490 return default_case;
2491 }
2492
2493
2494 /* Dump a basic block on stderr. */
2495
2496 void
2497 gimple_debug_bb (basic_block bb)
2498 {
2499 dump_bb (stderr, bb, 0, TDF_VOPS|TDF_MEMSYMS|TDF_BLOCKS);
2500 }
2501
2502
2503 /* Dump basic block with index N on stderr. */
2504
2505 basic_block
2506 gimple_debug_bb_n (int n)
2507 {
2508 gimple_debug_bb (BASIC_BLOCK_FOR_FN (cfun, n));
2509 return BASIC_BLOCK_FOR_FN (cfun, n);
2510 }
2511
2512
2513 /* Dump the CFG on stderr.
2514
2515 FLAGS are the same used by the tree dumping functions
2516 (see TDF_* in dumpfile.h). */
2517
2518 void
2519 gimple_debug_cfg (dump_flags_t flags)
2520 {
2521 gimple_dump_cfg (stderr, flags);
2522 }
2523
2524
2525 /* Dump the program showing basic block boundaries on the given FILE.
2526
2527 FLAGS are the same used by the tree dumping functions (see TDF_* in
2528 tree.h). */
2529
2530 void
2531 gimple_dump_cfg (FILE *file, dump_flags_t flags)
2532 {
2533 if (flags & TDF_DETAILS)
2534 {
2535 dump_function_header (file, current_function_decl, flags);
2536 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2537 n_basic_blocks_for_fn (cfun), n_edges_for_fn (cfun),
2538 last_basic_block_for_fn (cfun));
2539
2540 brief_dump_cfg (file, flags);
2541 fprintf (file, "\n");
2542 }
2543
2544 if (flags & TDF_STATS)
2545 dump_cfg_stats (file);
2546
2547 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2548 }
2549
2550
2551 /* Dump CFG statistics on FILE. */
2552
2553 void
2554 dump_cfg_stats (FILE *file)
2555 {
2556 static long max_num_merged_labels = 0;
2557 unsigned long size, total = 0;
2558 long num_edges;
2559 basic_block bb;
2560 const char * const fmt_str = "%-30s%-13s%12s\n";
2561 const char * const fmt_str_1 = "%-30s%13d" PRsa (11) "\n";
2562 const char * const fmt_str_2 = "%-30s%13ld" PRsa (11) "\n";
2563 const char * const fmt_str_3 = "%-43s" PRsa (11) "\n";
2564 const char *funcname = current_function_name ();
2565
2566 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2567
2568 fprintf (file, "---------------------------------------------------------\n");
2569 fprintf (file, fmt_str, "", " Number of ", "Memory");
2570 fprintf (file, fmt_str, "", " instances ", "used ");
2571 fprintf (file, "---------------------------------------------------------\n");
2572
2573 size = n_basic_blocks_for_fn (cfun) * sizeof (struct basic_block_def);
2574 total += size;
2575 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks_for_fn (cfun),
2576 SIZE_AMOUNT (size));
2577
2578 num_edges = 0;
2579 FOR_EACH_BB_FN (bb, cfun)
2580 num_edges += EDGE_COUNT (bb->succs);
2581 size = num_edges * sizeof (class edge_def);
2582 total += size;
2583 fprintf (file, fmt_str_2, "Edges", num_edges, SIZE_AMOUNT (size));
2584
2585 fprintf (file, "---------------------------------------------------------\n");
2586 fprintf (file, fmt_str_3, "Total memory used by CFG data",
2587 SIZE_AMOUNT (total));
2588 fprintf (file, "---------------------------------------------------------\n");
2589 fprintf (file, "\n");
2590
2591 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2592 max_num_merged_labels = cfg_stats.num_merged_labels;
2593
2594 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2595 cfg_stats.num_merged_labels, max_num_merged_labels);
2596
2597 fprintf (file, "\n");
2598 }
2599
2600
2601 /* Dump CFG statistics on stderr. Keep extern so that it's always
2602 linked in the final executable. */
2603
2604 DEBUG_FUNCTION void
2605 debug_cfg_stats (void)
2606 {
2607 dump_cfg_stats (stderr);
2608 }
2609
2610 /*---------------------------------------------------------------------------
2611 Miscellaneous helpers
2612 ---------------------------------------------------------------------------*/
2613
2614 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2615 flow. Transfers of control flow associated with EH are excluded. */
2616
2617 static bool
2618 call_can_make_abnormal_goto (gimple *t)
2619 {
2620 /* If the function has no non-local labels, then a call cannot make an
2621 abnormal transfer of control. */
2622 if (!cfun->has_nonlocal_label
2623 && !cfun->calls_setjmp)
2624 return false;
2625
2626 /* Likewise if the call has no side effects. */
2627 if (!gimple_has_side_effects (t))
2628 return false;
2629
2630 /* Likewise if the called function is leaf. */
2631 if (gimple_call_flags (t) & ECF_LEAF)
2632 return false;
2633
2634 return true;
2635 }
2636
2637
2638 /* Return true if T can make an abnormal transfer of control flow.
2639 Transfers of control flow associated with EH are excluded. */
2640
2641 bool
2642 stmt_can_make_abnormal_goto (gimple *t)
2643 {
2644 if (computed_goto_p (t))
2645 return true;
2646 if (is_gimple_call (t))
2647 return call_can_make_abnormal_goto (t);
2648 return false;
2649 }
2650
2651
2652 /* Return true if T represents a stmt that always transfers control. */
2653
2654 bool
2655 is_ctrl_stmt (gimple *t)
2656 {
2657 switch (gimple_code (t))
2658 {
2659 case GIMPLE_COND:
2660 case GIMPLE_SWITCH:
2661 case GIMPLE_GOTO:
2662 case GIMPLE_RETURN:
2663 case GIMPLE_RESX:
2664 return true;
2665 default:
2666 return false;
2667 }
2668 }
2669
2670
2671 /* Return true if T is a statement that may alter the flow of control
2672 (e.g., a call to a non-returning function). */
2673
2674 bool
2675 is_ctrl_altering_stmt (gimple *t)
2676 {
2677 gcc_assert (t);
2678
2679 switch (gimple_code (t))
2680 {
2681 case GIMPLE_CALL:
2682 /* Per stmt call flag indicates whether the call could alter
2683 controlflow. */
2684 if (gimple_call_ctrl_altering_p (t))
2685 return true;
2686 break;
2687
2688 case GIMPLE_EH_DISPATCH:
2689 /* EH_DISPATCH branches to the individual catch handlers at
2690 this level of a try or allowed-exceptions region. It can
2691 fallthru to the next statement as well. */
2692 return true;
2693
2694 case GIMPLE_ASM:
2695 if (gimple_asm_nlabels (as_a <gasm *> (t)) > 0)
2696 return true;
2697 break;
2698
2699 CASE_GIMPLE_OMP:
2700 /* OpenMP directives alter control flow. */
2701 return true;
2702
2703 case GIMPLE_TRANSACTION:
2704 /* A transaction start alters control flow. */
2705 return true;
2706
2707 default:
2708 break;
2709 }
2710
2711 /* If a statement can throw, it alters control flow. */
2712 return stmt_can_throw_internal (cfun, t);
2713 }
2714
2715
2716 /* Return true if T is a simple local goto. */
2717
2718 bool
2719 simple_goto_p (gimple *t)
2720 {
2721 return (gimple_code (t) == GIMPLE_GOTO
2722 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2723 }
2724
2725
2726 /* Return true if STMT should start a new basic block. PREV_STMT is
2727 the statement preceding STMT. It is used when STMT is a label or a
2728 case label. Labels should only start a new basic block if their
2729 previous statement wasn't a label. Otherwise, sequence of labels
2730 would generate unnecessary basic blocks that only contain a single
2731 label. */
2732
2733 static inline bool
2734 stmt_starts_bb_p (gimple *stmt, gimple *prev_stmt)
2735 {
2736 if (stmt == NULL)
2737 return false;
2738
2739 /* PREV_STMT is only set to a debug stmt if the debug stmt is before
2740 any nondebug stmts in the block. We don't want to start another
2741 block in this case: the debug stmt will already have started the
2742 one STMT would start if we weren't outputting debug stmts. */
2743 if (prev_stmt && is_gimple_debug (prev_stmt))
2744 return false;
2745
2746 /* Labels start a new basic block only if the preceding statement
2747 wasn't a label of the same type. This prevents the creation of
2748 consecutive blocks that have nothing but a single label. */
2749 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2750 {
2751 /* Nonlocal and computed GOTO targets always start a new block. */
2752 if (DECL_NONLOCAL (gimple_label_label (label_stmt))
2753 || FORCED_LABEL (gimple_label_label (label_stmt)))
2754 return true;
2755
2756 if (glabel *plabel = safe_dyn_cast <glabel *> (prev_stmt))
2757 {
2758 if (DECL_NONLOCAL (gimple_label_label (plabel))
2759 || !DECL_ARTIFICIAL (gimple_label_label (plabel)))
2760 return true;
2761
2762 cfg_stats.num_merged_labels++;
2763 return false;
2764 }
2765 else
2766 return true;
2767 }
2768 else if (gimple_code (stmt) == GIMPLE_CALL)
2769 {
2770 if (gimple_call_flags (stmt) & ECF_RETURNS_TWICE)
2771 /* setjmp acts similar to a nonlocal GOTO target and thus should
2772 start a new block. */
2773 return true;
2774 if (gimple_call_internal_p (stmt, IFN_PHI)
2775 && prev_stmt
2776 && gimple_code (prev_stmt) != GIMPLE_LABEL
2777 && (gimple_code (prev_stmt) != GIMPLE_CALL
2778 || ! gimple_call_internal_p (prev_stmt, IFN_PHI)))
2779 /* PHI nodes start a new block unless preceeded by a label
2780 or another PHI. */
2781 return true;
2782 }
2783
2784 return false;
2785 }
2786
2787
2788 /* Return true if T should end a basic block. */
2789
2790 bool
2791 stmt_ends_bb_p (gimple *t)
2792 {
2793 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2794 }
2795
2796 /* Remove block annotations and other data structures. */
2797
2798 void
2799 delete_tree_cfg_annotations (struct function *fn)
2800 {
2801 vec_free (label_to_block_map_for_fn (fn));
2802 }
2803
2804 /* Return the virtual phi in BB. */
2805
2806 gphi *
2807 get_virtual_phi (basic_block bb)
2808 {
2809 for (gphi_iterator gsi = gsi_start_phis (bb);
2810 !gsi_end_p (gsi);
2811 gsi_next (&gsi))
2812 {
2813 gphi *phi = gsi.phi ();
2814
2815 if (virtual_operand_p (PHI_RESULT (phi)))
2816 return phi;
2817 }
2818
2819 return NULL;
2820 }
2821
2822 /* Return the first statement in basic block BB. */
2823
2824 gimple *
2825 first_stmt (basic_block bb)
2826 {
2827 gimple_stmt_iterator i = gsi_start_bb (bb);
2828 gimple *stmt = NULL;
2829
2830 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2831 {
2832 gsi_next (&i);
2833 stmt = NULL;
2834 }
2835 return stmt;
2836 }
2837
2838 /* Return the first non-label statement in basic block BB. */
2839
2840 static gimple *
2841 first_non_label_stmt (basic_block bb)
2842 {
2843 gimple_stmt_iterator i = gsi_start_bb (bb);
2844 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2845 gsi_next (&i);
2846 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2847 }
2848
2849 /* Return the last statement in basic block BB. */
2850
2851 gimple *
2852 last_stmt (basic_block bb)
2853 {
2854 gimple_stmt_iterator i = gsi_last_bb (bb);
2855 gimple *stmt = NULL;
2856
2857 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2858 {
2859 gsi_prev (&i);
2860 stmt = NULL;
2861 }
2862 return stmt;
2863 }
2864
2865 /* Return the last statement of an otherwise empty block. Return NULL
2866 if the block is totally empty, or if it contains more than one
2867 statement. */
2868
2869 gimple *
2870 last_and_only_stmt (basic_block bb)
2871 {
2872 gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2873 gimple *last, *prev;
2874
2875 if (gsi_end_p (i))
2876 return NULL;
2877
2878 last = gsi_stmt (i);
2879 gsi_prev_nondebug (&i);
2880 if (gsi_end_p (i))
2881 return last;
2882
2883 /* Empty statements should no longer appear in the instruction stream.
2884 Everything that might have appeared before should be deleted by
2885 remove_useless_stmts, and the optimizers should just gsi_remove
2886 instead of smashing with build_empty_stmt.
2887
2888 Thus the only thing that should appear here in a block containing
2889 one executable statement is a label. */
2890 prev = gsi_stmt (i);
2891 if (gimple_code (prev) == GIMPLE_LABEL)
2892 return last;
2893 else
2894 return NULL;
2895 }
2896
2897 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2898
2899 static void
2900 reinstall_phi_args (edge new_edge, edge old_edge)
2901 {
2902 edge_var_map *vm;
2903 int i;
2904 gphi_iterator phis;
2905
2906 vec<edge_var_map> *v = redirect_edge_var_map_vector (old_edge);
2907 if (!v)
2908 return;
2909
2910 for (i = 0, phis = gsi_start_phis (new_edge->dest);
2911 v->iterate (i, &vm) && !gsi_end_p (phis);
2912 i++, gsi_next (&phis))
2913 {
2914 gphi *phi = phis.phi ();
2915 tree result = redirect_edge_var_map_result (vm);
2916 tree arg = redirect_edge_var_map_def (vm);
2917
2918 gcc_assert (result == gimple_phi_result (phi));
2919
2920 add_phi_arg (phi, arg, new_edge, redirect_edge_var_map_location (vm));
2921 }
2922
2923 redirect_edge_var_map_clear (old_edge);
2924 }
2925
2926 /* Returns the basic block after which the new basic block created
2927 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2928 near its "logical" location. This is of most help to humans looking
2929 at debugging dumps. */
2930
2931 basic_block
2932 split_edge_bb_loc (edge edge_in)
2933 {
2934 basic_block dest = edge_in->dest;
2935 basic_block dest_prev = dest->prev_bb;
2936
2937 if (dest_prev)
2938 {
2939 edge e = find_edge (dest_prev, dest);
2940 if (e && !(e->flags & EDGE_COMPLEX))
2941 return edge_in->src;
2942 }
2943 return dest_prev;
2944 }
2945
2946 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2947 Abort on abnormal edges. */
2948
2949 static basic_block
2950 gimple_split_edge (edge edge_in)
2951 {
2952 basic_block new_bb, after_bb, dest;
2953 edge new_edge, e;
2954
2955 /* Abnormal edges cannot be split. */
2956 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2957
2958 dest = edge_in->dest;
2959
2960 after_bb = split_edge_bb_loc (edge_in);
2961
2962 new_bb = create_empty_bb (after_bb);
2963 new_bb->count = edge_in->count ();
2964
2965 e = redirect_edge_and_branch (edge_in, new_bb);
2966 gcc_assert (e == edge_in);
2967
2968 new_edge = make_single_succ_edge (new_bb, dest, EDGE_FALLTHRU);
2969 reinstall_phi_args (new_edge, e);
2970
2971 return new_bb;
2972 }
2973
2974
2975 /* Verify properties of the address expression T whose base should be
2976 TREE_ADDRESSABLE if VERIFY_ADDRESSABLE is true. */
2977
2978 static bool
2979 verify_address (tree t, bool verify_addressable)
2980 {
2981 bool old_constant;
2982 bool old_side_effects;
2983 bool new_constant;
2984 bool new_side_effects;
2985
2986 old_constant = TREE_CONSTANT (t);
2987 old_side_effects = TREE_SIDE_EFFECTS (t);
2988
2989 recompute_tree_invariant_for_addr_expr (t);
2990 new_side_effects = TREE_SIDE_EFFECTS (t);
2991 new_constant = TREE_CONSTANT (t);
2992
2993 if (old_constant != new_constant)
2994 {
2995 error ("constant not recomputed when %<ADDR_EXPR%> changed");
2996 return true;
2997 }
2998 if (old_side_effects != new_side_effects)
2999 {
3000 error ("side effects not recomputed when %<ADDR_EXPR%> changed");
3001 return true;
3002 }
3003
3004 tree base = TREE_OPERAND (t, 0);
3005 while (handled_component_p (base))
3006 base = TREE_OPERAND (base, 0);
3007
3008 if (!(VAR_P (base)
3009 || TREE_CODE (base) == PARM_DECL
3010 || TREE_CODE (base) == RESULT_DECL))
3011 return false;
3012
3013 if (verify_addressable && !TREE_ADDRESSABLE (base))
3014 {
3015 error ("address taken but %<TREE_ADDRESSABLE%> bit not set");
3016 return true;
3017 }
3018
3019 return false;
3020 }
3021
3022
3023 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
3024 Returns true if there is an error, otherwise false. */
3025
3026 static bool
3027 verify_types_in_gimple_min_lval (tree expr)
3028 {
3029 tree op;
3030
3031 if (is_gimple_id (expr))
3032 return false;
3033
3034 if (TREE_CODE (expr) != TARGET_MEM_REF
3035 && TREE_CODE (expr) != MEM_REF)
3036 {
3037 error ("invalid expression for min lvalue");
3038 return true;
3039 }
3040
3041 /* TARGET_MEM_REFs are strange beasts. */
3042 if (TREE_CODE (expr) == TARGET_MEM_REF)
3043 return false;
3044
3045 op = TREE_OPERAND (expr, 0);
3046 if (!is_gimple_val (op))
3047 {
3048 error ("invalid operand in indirect reference");
3049 debug_generic_stmt (op);
3050 return true;
3051 }
3052 /* Memory references now generally can involve a value conversion. */
3053
3054 return false;
3055 }
3056
3057 /* Verify if EXPR is a valid GIMPLE reference expression. If
3058 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
3059 if there is an error, otherwise false. */
3060
3061 static bool
3062 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
3063 {
3064 const char *code_name = get_tree_code_name (TREE_CODE (expr));
3065
3066 if (TREE_CODE (expr) == REALPART_EXPR
3067 || TREE_CODE (expr) == IMAGPART_EXPR
3068 || TREE_CODE (expr) == BIT_FIELD_REF)
3069 {
3070 tree op = TREE_OPERAND (expr, 0);
3071 if (!is_gimple_reg_type (TREE_TYPE (expr)))
3072 {
3073 error ("non-scalar %qs", code_name);
3074 return true;
3075 }
3076
3077 if (TREE_CODE (expr) == BIT_FIELD_REF)
3078 {
3079 tree t1 = TREE_OPERAND (expr, 1);
3080 tree t2 = TREE_OPERAND (expr, 2);
3081 poly_uint64 size, bitpos;
3082 if (!poly_int_tree_p (t1, &size)
3083 || !poly_int_tree_p (t2, &bitpos)
3084 || !types_compatible_p (bitsizetype, TREE_TYPE (t1))
3085 || !types_compatible_p (bitsizetype, TREE_TYPE (t2)))
3086 {
3087 error ("invalid position or size operand to %qs", code_name);
3088 return true;
3089 }
3090 if (INTEGRAL_TYPE_P (TREE_TYPE (expr))
3091 && maybe_ne (TYPE_PRECISION (TREE_TYPE (expr)), size))
3092 {
3093 error ("integral result type precision does not match "
3094 "field size of %qs", code_name);
3095 return true;
3096 }
3097 else if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
3098 && TYPE_MODE (TREE_TYPE (expr)) != BLKmode
3099 && maybe_ne (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (expr))),
3100 size))
3101 {
3102 error ("mode size of non-integral result does not "
3103 "match field size of %qs",
3104 code_name);
3105 return true;
3106 }
3107 if (INTEGRAL_TYPE_P (TREE_TYPE (op))
3108 && !type_has_mode_precision_p (TREE_TYPE (op)))
3109 {
3110 error ("%qs of non-mode-precision operand", code_name);
3111 return true;
3112 }
3113 if (!AGGREGATE_TYPE_P (TREE_TYPE (op))
3114 && maybe_gt (size + bitpos,
3115 tree_to_poly_uint64 (TYPE_SIZE (TREE_TYPE (op)))))
3116 {
3117 error ("position plus size exceeds size of referenced object in "
3118 "%qs", code_name);
3119 return true;
3120 }
3121 }
3122
3123 if ((TREE_CODE (expr) == REALPART_EXPR
3124 || TREE_CODE (expr) == IMAGPART_EXPR)
3125 && !useless_type_conversion_p (TREE_TYPE (expr),
3126 TREE_TYPE (TREE_TYPE (op))))
3127 {
3128 error ("type mismatch in %qs reference", code_name);
3129 debug_generic_stmt (TREE_TYPE (expr));
3130 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3131 return true;
3132 }
3133 expr = op;
3134 }
3135
3136 while (handled_component_p (expr))
3137 {
3138 code_name = get_tree_code_name (TREE_CODE (expr));
3139
3140 if (TREE_CODE (expr) == REALPART_EXPR
3141 || TREE_CODE (expr) == IMAGPART_EXPR
3142 || TREE_CODE (expr) == BIT_FIELD_REF)
3143 {
3144 error ("non-top-level %qs", code_name);
3145 return true;
3146 }
3147
3148 tree op = TREE_OPERAND (expr, 0);
3149
3150 if (TREE_CODE (expr) == ARRAY_REF
3151 || TREE_CODE (expr) == ARRAY_RANGE_REF)
3152 {
3153 if (!is_gimple_val (TREE_OPERAND (expr, 1))
3154 || (TREE_OPERAND (expr, 2)
3155 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3156 || (TREE_OPERAND (expr, 3)
3157 && !is_gimple_val (TREE_OPERAND (expr, 3))))
3158 {
3159 error ("invalid operands to %qs", code_name);
3160 debug_generic_stmt (expr);
3161 return true;
3162 }
3163 }
3164
3165 /* Verify if the reference array element types are compatible. */
3166 if (TREE_CODE (expr) == ARRAY_REF
3167 && !useless_type_conversion_p (TREE_TYPE (expr),
3168 TREE_TYPE (TREE_TYPE (op))))
3169 {
3170 error ("type mismatch in %qs", code_name);
3171 debug_generic_stmt (TREE_TYPE (expr));
3172 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3173 return true;
3174 }
3175 if (TREE_CODE (expr) == ARRAY_RANGE_REF
3176 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
3177 TREE_TYPE (TREE_TYPE (op))))
3178 {
3179 error ("type mismatch in %qs", code_name);
3180 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
3181 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3182 return true;
3183 }
3184
3185 if (TREE_CODE (expr) == COMPONENT_REF)
3186 {
3187 if (TREE_OPERAND (expr, 2)
3188 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3189 {
3190 error ("invalid %qs offset operator", code_name);
3191 return true;
3192 }
3193 if (!useless_type_conversion_p (TREE_TYPE (expr),
3194 TREE_TYPE (TREE_OPERAND (expr, 1))))
3195 {
3196 error ("type mismatch in %qs", code_name);
3197 debug_generic_stmt (TREE_TYPE (expr));
3198 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
3199 return true;
3200 }
3201 }
3202
3203 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3204 {
3205 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
3206 that their operand is not an SSA name or an invariant when
3207 requiring an lvalue (this usually means there is a SRA or IPA-SRA
3208 bug). Otherwise there is nothing to verify, gross mismatches at
3209 most invoke undefined behavior. */
3210 if (require_lvalue
3211 && (TREE_CODE (op) == SSA_NAME
3212 || is_gimple_min_invariant (op)))
3213 {
3214 error ("conversion of %qs on the left hand side of %qs",
3215 get_tree_code_name (TREE_CODE (op)), code_name);
3216 debug_generic_stmt (expr);
3217 return true;
3218 }
3219 else if (TREE_CODE (op) == SSA_NAME
3220 && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
3221 {
3222 error ("conversion of register to a different size in %qs",
3223 code_name);
3224 debug_generic_stmt (expr);
3225 return true;
3226 }
3227 else if (!handled_component_p (op))
3228 return false;
3229 }
3230
3231 expr = op;
3232 }
3233
3234 code_name = get_tree_code_name (TREE_CODE (expr));
3235
3236 if (TREE_CODE (expr) == MEM_REF)
3237 {
3238 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0))
3239 || (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR
3240 && verify_address (TREE_OPERAND (expr, 0), false)))
3241 {
3242 error ("invalid address operand in %qs", code_name);
3243 debug_generic_stmt (expr);
3244 return true;
3245 }
3246 if (!poly_int_tree_p (TREE_OPERAND (expr, 1))
3247 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
3248 {
3249 error ("invalid offset operand in %qs", code_name);
3250 debug_generic_stmt (expr);
3251 return true;
3252 }
3253 if (MR_DEPENDENCE_CLIQUE (expr) != 0
3254 && MR_DEPENDENCE_CLIQUE (expr) > cfun->last_clique)
3255 {
3256 error ("invalid clique in %qs", code_name);
3257 debug_generic_stmt (expr);
3258 return true;
3259 }
3260 }
3261 else if (TREE_CODE (expr) == TARGET_MEM_REF)
3262 {
3263 if (!TMR_BASE (expr)
3264 || !is_gimple_mem_ref_addr (TMR_BASE (expr))
3265 || (TREE_CODE (TMR_BASE (expr)) == ADDR_EXPR
3266 && verify_address (TMR_BASE (expr), false)))
3267 {
3268 error ("invalid address operand in %qs", code_name);
3269 return true;
3270 }
3271 if (!TMR_OFFSET (expr)
3272 || !poly_int_tree_p (TMR_OFFSET (expr))
3273 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
3274 {
3275 error ("invalid offset operand in %qs", code_name);
3276 debug_generic_stmt (expr);
3277 return true;
3278 }
3279 if (MR_DEPENDENCE_CLIQUE (expr) != 0
3280 && MR_DEPENDENCE_CLIQUE (expr) > cfun->last_clique)
3281 {
3282 error ("invalid clique in %qs", code_name);
3283 debug_generic_stmt (expr);
3284 return true;
3285 }
3286 }
3287 else if (TREE_CODE (expr) == INDIRECT_REF)
3288 {
3289 error ("%qs in gimple IL", code_name);
3290 debug_generic_stmt (expr);
3291 return true;
3292 }
3293
3294 return ((require_lvalue || !is_gimple_min_invariant (expr))
3295 && verify_types_in_gimple_min_lval (expr));
3296 }
3297
3298 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3299 list of pointer-to types that is trivially convertible to DEST. */
3300
3301 static bool
3302 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3303 {
3304 tree src;
3305
3306 if (!TYPE_POINTER_TO (src_obj))
3307 return true;
3308
3309 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3310 if (useless_type_conversion_p (dest, src))
3311 return true;
3312
3313 return false;
3314 }
3315
3316 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3317 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3318
3319 static bool
3320 valid_fixed_convert_types_p (tree type1, tree type2)
3321 {
3322 return (FIXED_POINT_TYPE_P (type1)
3323 && (INTEGRAL_TYPE_P (type2)
3324 || SCALAR_FLOAT_TYPE_P (type2)
3325 || FIXED_POINT_TYPE_P (type2)));
3326 }
3327
3328 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3329 is a problem, otherwise false. */
3330
3331 static bool
3332 verify_gimple_call (gcall *stmt)
3333 {
3334 tree fn = gimple_call_fn (stmt);
3335 tree fntype, fndecl;
3336 unsigned i;
3337
3338 if (gimple_call_internal_p (stmt))
3339 {
3340 if (fn)
3341 {
3342 error ("gimple call has two targets");
3343 debug_generic_stmt (fn);
3344 return true;
3345 }
3346 }
3347 else
3348 {
3349 if (!fn)
3350 {
3351 error ("gimple call has no target");
3352 return true;
3353 }
3354 }
3355
3356 if (fn && !is_gimple_call_addr (fn))
3357 {
3358 error ("invalid function in gimple call");
3359 debug_generic_stmt (fn);
3360 return true;
3361 }
3362
3363 if (fn
3364 && (!POINTER_TYPE_P (TREE_TYPE (fn))
3365 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3366 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
3367 {
3368 error ("non-function in gimple call");
3369 return true;
3370 }
3371
3372 fndecl = gimple_call_fndecl (stmt);
3373 if (fndecl
3374 && TREE_CODE (fndecl) == FUNCTION_DECL
3375 && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
3376 && !DECL_PURE_P (fndecl)
3377 && !TREE_READONLY (fndecl))
3378 {
3379 error ("invalid pure const state for function");
3380 return true;
3381 }
3382
3383 tree lhs = gimple_call_lhs (stmt);
3384 if (lhs
3385 && (!is_gimple_lvalue (lhs)
3386 || verify_types_in_gimple_reference (lhs, true)))
3387 {
3388 error ("invalid LHS in gimple call");
3389 return true;
3390 }
3391
3392 if (gimple_call_ctrl_altering_p (stmt)
3393 && gimple_call_noreturn_p (stmt)
3394 && should_remove_lhs_p (lhs))
3395 {
3396 error ("LHS in %<noreturn%> call");
3397 return true;
3398 }
3399
3400 fntype = gimple_call_fntype (stmt);
3401 if (fntype
3402 && lhs
3403 && !useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (fntype))
3404 /* ??? At least C++ misses conversions at assignments from
3405 void * call results.
3406 For now simply allow arbitrary pointer type conversions. */
3407 && !(POINTER_TYPE_P (TREE_TYPE (lhs))
3408 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3409 {
3410 error ("invalid conversion in gimple call");
3411 debug_generic_stmt (TREE_TYPE (lhs));
3412 debug_generic_stmt (TREE_TYPE (fntype));
3413 return true;
3414 }
3415
3416 if (gimple_call_chain (stmt)
3417 && !is_gimple_val (gimple_call_chain (stmt)))
3418 {
3419 error ("invalid static chain in gimple call");
3420 debug_generic_stmt (gimple_call_chain (stmt));
3421 return true;
3422 }
3423
3424 /* If there is a static chain argument, the call should either be
3425 indirect, or the decl should have DECL_STATIC_CHAIN set. */
3426 if (gimple_call_chain (stmt)
3427 && fndecl
3428 && !DECL_STATIC_CHAIN (fndecl))
3429 {
3430 error ("static chain with function that doesn%'t use one");
3431 return true;
3432 }
3433
3434 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
3435 {
3436 switch (DECL_FUNCTION_CODE (fndecl))
3437 {
3438 case BUILT_IN_UNREACHABLE:
3439 case BUILT_IN_TRAP:
3440 if (gimple_call_num_args (stmt) > 0)
3441 {
3442 /* Built-in unreachable with parameters might not be caught by
3443 undefined behavior sanitizer. Front-ends do check users do not
3444 call them that way but we also produce calls to
3445 __builtin_unreachable internally, for example when IPA figures
3446 out a call cannot happen in a legal program. In such cases,
3447 we must make sure arguments are stripped off. */
3448 error ("%<__builtin_unreachable%> or %<__builtin_trap%> call "
3449 "with arguments");
3450 return true;
3451 }
3452 break;
3453 default:
3454 break;
3455 }
3456 }
3457
3458 /* ??? The C frontend passes unpromoted arguments in case it
3459 didn't see a function declaration before the call. So for now
3460 leave the call arguments mostly unverified. Once we gimplify
3461 unit-at-a-time we have a chance to fix this. */
3462
3463 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3464 {
3465 tree arg = gimple_call_arg (stmt, i);
3466 if ((is_gimple_reg_type (TREE_TYPE (arg))
3467 && !is_gimple_val (arg))
3468 || (!is_gimple_reg_type (TREE_TYPE (arg))
3469 && !is_gimple_lvalue (arg)))
3470 {
3471 error ("invalid argument to gimple call");
3472 debug_generic_expr (arg);
3473 return true;
3474 }
3475 }
3476
3477 return false;
3478 }
3479
3480 /* Verifies the gimple comparison with the result type TYPE and
3481 the operands OP0 and OP1, comparison code is CODE. */
3482
3483 static bool
3484 verify_gimple_comparison (tree type, tree op0, tree op1, enum tree_code code)
3485 {
3486 tree op0_type = TREE_TYPE (op0);
3487 tree op1_type = TREE_TYPE (op1);
3488
3489 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3490 {
3491 error ("invalid operands in gimple comparison");
3492 return true;
3493 }
3494
3495 /* For comparisons we do not have the operations type as the
3496 effective type the comparison is carried out in. Instead
3497 we require that either the first operand is trivially
3498 convertible into the second, or the other way around.
3499 Because we special-case pointers to void we allow
3500 comparisons of pointers with the same mode as well. */
3501 if (!useless_type_conversion_p (op0_type, op1_type)
3502 && !useless_type_conversion_p (op1_type, op0_type)
3503 && (!POINTER_TYPE_P (op0_type)
3504 || !POINTER_TYPE_P (op1_type)
3505 || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3506 {
3507 error ("mismatching comparison operand types");
3508 debug_generic_expr (op0_type);
3509 debug_generic_expr (op1_type);
3510 return true;
3511 }
3512
3513 /* The resulting type of a comparison may be an effective boolean type. */
3514 if (INTEGRAL_TYPE_P (type)
3515 && (TREE_CODE (type) == BOOLEAN_TYPE
3516 || TYPE_PRECISION (type) == 1))
3517 {
3518 if ((TREE_CODE (op0_type) == VECTOR_TYPE
3519 || TREE_CODE (op1_type) == VECTOR_TYPE)
3520 && code != EQ_EXPR && code != NE_EXPR
3521 && !VECTOR_BOOLEAN_TYPE_P (op0_type)
3522 && !VECTOR_INTEGER_TYPE_P (op0_type))
3523 {
3524 error ("unsupported operation or type for vector comparison"
3525 " returning a boolean");
3526 debug_generic_expr (op0_type);
3527 debug_generic_expr (op1_type);
3528 return true;
3529 }
3530 }
3531 /* Or a boolean vector type with the same element count
3532 as the comparison operand types. */
3533 else if (TREE_CODE (type) == VECTOR_TYPE
3534 && TREE_CODE (TREE_TYPE (type)) == BOOLEAN_TYPE)
3535 {
3536 if (TREE_CODE (op0_type) != VECTOR_TYPE
3537 || TREE_CODE (op1_type) != VECTOR_TYPE)
3538 {
3539 error ("non-vector operands in vector comparison");
3540 debug_generic_expr (op0_type);
3541 debug_generic_expr (op1_type);
3542 return true;
3543 }
3544
3545 if (maybe_ne (TYPE_VECTOR_SUBPARTS (type),
3546 TYPE_VECTOR_SUBPARTS (op0_type)))
3547 {
3548 error ("invalid vector comparison resulting type");
3549 debug_generic_expr (type);
3550 return true;
3551 }
3552 }
3553 else
3554 {
3555 error ("bogus comparison result type");
3556 debug_generic_expr (type);
3557 return true;
3558 }
3559
3560 return false;
3561 }
3562
3563 /* Verify a gimple assignment statement STMT with an unary rhs.
3564 Returns true if anything is wrong. */
3565
3566 static bool
3567 verify_gimple_assign_unary (gassign *stmt)
3568 {
3569 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3570 tree lhs = gimple_assign_lhs (stmt);
3571 tree lhs_type = TREE_TYPE (lhs);
3572 tree rhs1 = gimple_assign_rhs1 (stmt);
3573 tree rhs1_type = TREE_TYPE (rhs1);
3574
3575 if (!is_gimple_reg (lhs))
3576 {
3577 error ("non-register as LHS of unary operation");
3578 return true;
3579 }
3580
3581 if (!is_gimple_val (rhs1))
3582 {
3583 error ("invalid operand in unary operation");
3584 return true;
3585 }
3586
3587 const char* const code_name = get_tree_code_name (rhs_code);
3588
3589 /* First handle conversions. */
3590 switch (rhs_code)
3591 {
3592 CASE_CONVERT:
3593 {
3594 /* Allow conversions between vectors with the same number of elements,
3595 provided that the conversion is OK for the element types too. */
3596 if (VECTOR_TYPE_P (lhs_type)
3597 && VECTOR_TYPE_P (rhs1_type)
3598 && known_eq (TYPE_VECTOR_SUBPARTS (lhs_type),
3599 TYPE_VECTOR_SUBPARTS (rhs1_type)))
3600 {
3601 lhs_type = TREE_TYPE (lhs_type);
3602 rhs1_type = TREE_TYPE (rhs1_type);
3603 }
3604 else if (VECTOR_TYPE_P (lhs_type) || VECTOR_TYPE_P (rhs1_type))
3605 {
3606 error ("invalid vector types in nop conversion");
3607 debug_generic_expr (lhs_type);
3608 debug_generic_expr (rhs1_type);
3609 return true;
3610 }
3611
3612 /* Allow conversions from pointer type to integral type only if
3613 there is no sign or zero extension involved.
3614 For targets were the precision of ptrofftype doesn't match that
3615 of pointers we allow conversions to types where
3616 POINTERS_EXTEND_UNSIGNED specifies how that works. */
3617 if ((POINTER_TYPE_P (lhs_type)
3618 && INTEGRAL_TYPE_P (rhs1_type))
3619 || (POINTER_TYPE_P (rhs1_type)
3620 && INTEGRAL_TYPE_P (lhs_type)
3621 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3622 #if defined(POINTERS_EXTEND_UNSIGNED)
3623 || (TYPE_MODE (rhs1_type) == ptr_mode
3624 && (TYPE_PRECISION (lhs_type)
3625 == BITS_PER_WORD /* word_mode */
3626 || (TYPE_PRECISION (lhs_type)
3627 == GET_MODE_PRECISION (Pmode))))
3628 #endif
3629 )))
3630 return false;
3631
3632 /* Allow conversion from integral to offset type and vice versa. */
3633 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3634 && INTEGRAL_TYPE_P (rhs1_type))
3635 || (INTEGRAL_TYPE_P (lhs_type)
3636 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3637 return false;
3638
3639 /* Otherwise assert we are converting between types of the
3640 same kind. */
3641 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3642 {
3643 error ("invalid types in nop conversion");
3644 debug_generic_expr (lhs_type);
3645 debug_generic_expr (rhs1_type);
3646 return true;
3647 }
3648
3649 return false;
3650 }
3651
3652 case ADDR_SPACE_CONVERT_EXPR:
3653 {
3654 if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3655 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3656 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3657 {
3658 error ("invalid types in address space conversion");
3659 debug_generic_expr (lhs_type);
3660 debug_generic_expr (rhs1_type);
3661 return true;
3662 }
3663
3664 return false;
3665 }
3666
3667 case FIXED_CONVERT_EXPR:
3668 {
3669 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3670 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3671 {
3672 error ("invalid types in fixed-point conversion");
3673 debug_generic_expr (lhs_type);
3674 debug_generic_expr (rhs1_type);
3675 return true;
3676 }
3677
3678 return false;
3679 }
3680
3681 case FLOAT_EXPR:
3682 {
3683 if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3684 && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3685 || !VECTOR_FLOAT_TYPE_P (lhs_type)))
3686 {
3687 error ("invalid types in conversion to floating-point");
3688 debug_generic_expr (lhs_type);
3689 debug_generic_expr (rhs1_type);
3690 return true;
3691 }
3692
3693 return false;
3694 }
3695
3696 case FIX_TRUNC_EXPR:
3697 {
3698 if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3699 && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3700 || !VECTOR_FLOAT_TYPE_P (rhs1_type)))
3701 {
3702 error ("invalid types in conversion to integer");
3703 debug_generic_expr (lhs_type);
3704 debug_generic_expr (rhs1_type);
3705 return true;
3706 }
3707
3708 return false;
3709 }
3710
3711 case VEC_UNPACK_HI_EXPR:
3712 case VEC_UNPACK_LO_EXPR:
3713 case VEC_UNPACK_FLOAT_HI_EXPR:
3714 case VEC_UNPACK_FLOAT_LO_EXPR:
3715 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
3716 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
3717 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3718 || TREE_CODE (lhs_type) != VECTOR_TYPE
3719 || (!INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3720 && !SCALAR_FLOAT_TYPE_P (TREE_TYPE (lhs_type)))
3721 || (!INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3722 && !SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type)))
3723 || ((rhs_code == VEC_UNPACK_HI_EXPR
3724 || rhs_code == VEC_UNPACK_LO_EXPR)
3725 && (INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3726 != INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3727 || ((rhs_code == VEC_UNPACK_FLOAT_HI_EXPR
3728 || rhs_code == VEC_UNPACK_FLOAT_LO_EXPR)
3729 && (INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3730 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type))))
3731 || ((rhs_code == VEC_UNPACK_FIX_TRUNC_HI_EXPR
3732 || rhs_code == VEC_UNPACK_FIX_TRUNC_LO_EXPR)
3733 && (INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3734 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (lhs_type))))
3735 || (maybe_ne (GET_MODE_SIZE (element_mode (lhs_type)),
3736 2 * GET_MODE_SIZE (element_mode (rhs1_type)))
3737 && (!VECTOR_BOOLEAN_TYPE_P (lhs_type)
3738 || !VECTOR_BOOLEAN_TYPE_P (rhs1_type)))
3739 || maybe_ne (2 * TYPE_VECTOR_SUBPARTS (lhs_type),
3740 TYPE_VECTOR_SUBPARTS (rhs1_type)))
3741 {
3742 error ("type mismatch in %qs expression", code_name);
3743 debug_generic_expr (lhs_type);
3744 debug_generic_expr (rhs1_type);
3745 return true;
3746 }
3747
3748 return false;
3749
3750 case NEGATE_EXPR:
3751 case ABS_EXPR:
3752 case BIT_NOT_EXPR:
3753 case PAREN_EXPR:
3754 case CONJ_EXPR:
3755 break;
3756
3757 case ABSU_EXPR:
3758 if (!ANY_INTEGRAL_TYPE_P (lhs_type)
3759 || !TYPE_UNSIGNED (lhs_type)
3760 || !ANY_INTEGRAL_TYPE_P (rhs1_type)
3761 || TYPE_UNSIGNED (rhs1_type)
3762 || element_precision (lhs_type) != element_precision (rhs1_type))
3763 {
3764 error ("invalid types for %qs", code_name);
3765 debug_generic_expr (lhs_type);
3766 debug_generic_expr (rhs1_type);
3767 return true;
3768 }
3769 return false;
3770
3771 case VEC_DUPLICATE_EXPR:
3772 if (TREE_CODE (lhs_type) != VECTOR_TYPE
3773 || !useless_type_conversion_p (TREE_TYPE (lhs_type), rhs1_type))
3774 {
3775 error ("%qs should be from a scalar to a like vector", code_name);
3776 debug_generic_expr (lhs_type);
3777 debug_generic_expr (rhs1_type);
3778 return true;
3779 }
3780 return false;
3781
3782 default:
3783 gcc_unreachable ();
3784 }
3785
3786 /* For the remaining codes assert there is no conversion involved. */
3787 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3788 {
3789 error ("non-trivial conversion in unary operation");
3790 debug_generic_expr (lhs_type);
3791 debug_generic_expr (rhs1_type);
3792 return true;
3793 }
3794
3795 return false;
3796 }
3797
3798 /* Verify a gimple assignment statement STMT with a binary rhs.
3799 Returns true if anything is wrong. */
3800
3801 static bool
3802 verify_gimple_assign_binary (gassign *stmt)
3803 {
3804 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3805 tree lhs = gimple_assign_lhs (stmt);
3806 tree lhs_type = TREE_TYPE (lhs);
3807 tree rhs1 = gimple_assign_rhs1 (stmt);
3808 tree rhs1_type = TREE_TYPE (rhs1);
3809 tree rhs2 = gimple_assign_rhs2 (stmt);
3810 tree rhs2_type = TREE_TYPE (rhs2);
3811
3812 if (!is_gimple_reg (lhs))
3813 {
3814 error ("non-register as LHS of binary operation");
3815 return true;
3816 }
3817
3818 if (!is_gimple_val (rhs1)
3819 || !is_gimple_val (rhs2))
3820 {
3821 error ("invalid operands in binary operation");
3822 return true;
3823 }
3824
3825 const char* const code_name = get_tree_code_name (rhs_code);
3826
3827 /* First handle operations that involve different types. */
3828 switch (rhs_code)
3829 {
3830 case COMPLEX_EXPR:
3831 {
3832 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3833 || !(INTEGRAL_TYPE_P (rhs1_type)
3834 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3835 || !(INTEGRAL_TYPE_P (rhs2_type)
3836 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3837 {
3838 error ("type mismatch in %qs", code_name);
3839 debug_generic_expr (lhs_type);
3840 debug_generic_expr (rhs1_type);
3841 debug_generic_expr (rhs2_type);
3842 return true;
3843 }
3844
3845 return false;
3846 }
3847
3848 case LSHIFT_EXPR:
3849 case RSHIFT_EXPR:
3850 case LROTATE_EXPR:
3851 case RROTATE_EXPR:
3852 {
3853 /* Shifts and rotates are ok on integral types, fixed point
3854 types and integer vector types. */
3855 if ((!INTEGRAL_TYPE_P (rhs1_type)
3856 && !FIXED_POINT_TYPE_P (rhs1_type)
3857 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3858 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3859 || (!INTEGRAL_TYPE_P (rhs2_type)
3860 /* Vector shifts of vectors are also ok. */
3861 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3862 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3863 && TREE_CODE (rhs2_type) == VECTOR_TYPE
3864 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3865 || !useless_type_conversion_p (lhs_type, rhs1_type))
3866 {
3867 error ("type mismatch in %qs", code_name);
3868 debug_generic_expr (lhs_type);
3869 debug_generic_expr (rhs1_type);
3870 debug_generic_expr (rhs2_type);
3871 return true;
3872 }
3873
3874 return false;
3875 }
3876
3877 case WIDEN_LSHIFT_EXPR:
3878 {
3879 if (!INTEGRAL_TYPE_P (lhs_type)
3880 || !INTEGRAL_TYPE_P (rhs1_type)
3881 || TREE_CODE (rhs2) != INTEGER_CST
3882 || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
3883 {
3884 error ("type mismatch in %qs", code_name);
3885 debug_generic_expr (lhs_type);
3886 debug_generic_expr (rhs1_type);
3887 debug_generic_expr (rhs2_type);
3888 return true;
3889 }
3890
3891 return false;
3892 }
3893
3894 case VEC_WIDEN_LSHIFT_HI_EXPR:
3895 case VEC_WIDEN_LSHIFT_LO_EXPR:
3896 {
3897 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3898 || TREE_CODE (lhs_type) != VECTOR_TYPE
3899 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3900 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3901 || TREE_CODE (rhs2) != INTEGER_CST
3902 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
3903 > TYPE_PRECISION (TREE_TYPE (lhs_type))))
3904 {
3905 error ("type mismatch in %qs", code_name);
3906 debug_generic_expr (lhs_type);
3907 debug_generic_expr (rhs1_type);
3908 debug_generic_expr (rhs2_type);
3909 return true;
3910 }
3911
3912 return false;
3913 }
3914
3915 case PLUS_EXPR:
3916 case MINUS_EXPR:
3917 {
3918 tree lhs_etype = lhs_type;
3919 tree rhs1_etype = rhs1_type;
3920 tree rhs2_etype = rhs2_type;
3921 if (TREE_CODE (lhs_type) == VECTOR_TYPE)
3922 {
3923 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3924 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
3925 {
3926 error ("invalid non-vector operands to %qs", code_name);
3927 return true;
3928 }
3929 lhs_etype = TREE_TYPE (lhs_type);
3930 rhs1_etype = TREE_TYPE (rhs1_type);
3931 rhs2_etype = TREE_TYPE (rhs2_type);
3932 }
3933 if (POINTER_TYPE_P (lhs_etype)
3934 || POINTER_TYPE_P (rhs1_etype)
3935 || POINTER_TYPE_P (rhs2_etype))
3936 {
3937 error ("invalid (pointer) operands %qs", code_name);
3938 return true;
3939 }
3940
3941 /* Continue with generic binary expression handling. */
3942 break;
3943 }
3944
3945 case POINTER_PLUS_EXPR:
3946 {
3947 if (!POINTER_TYPE_P (rhs1_type)
3948 || !useless_type_conversion_p (lhs_type, rhs1_type)
3949 || !ptrofftype_p (rhs2_type))
3950 {
3951 error ("type mismatch in %qs", code_name);
3952 debug_generic_stmt (lhs_type);
3953 debug_generic_stmt (rhs1_type);
3954 debug_generic_stmt (rhs2_type);
3955 return true;
3956 }
3957
3958 return false;
3959 }
3960
3961 case POINTER_DIFF_EXPR:
3962 {
3963 if (!POINTER_TYPE_P (rhs1_type)
3964 || !POINTER_TYPE_P (rhs2_type)
3965 /* Because we special-case pointers to void we allow difference
3966 of arbitrary pointers with the same mode. */
3967 || TYPE_MODE (rhs1_type) != TYPE_MODE (rhs2_type)
3968 || TREE_CODE (lhs_type) != INTEGER_TYPE
3969 || TYPE_UNSIGNED (lhs_type)
3970 || TYPE_PRECISION (lhs_type) != TYPE_PRECISION (rhs1_type))
3971 {
3972 error ("type mismatch in %qs", code_name);
3973 debug_generic_stmt (lhs_type);
3974 debug_generic_stmt (rhs1_type);
3975 debug_generic_stmt (rhs2_type);
3976 return true;
3977 }
3978
3979 return false;
3980 }
3981
3982 case TRUTH_ANDIF_EXPR:
3983 case TRUTH_ORIF_EXPR:
3984 case TRUTH_AND_EXPR:
3985 case TRUTH_OR_EXPR:
3986 case TRUTH_XOR_EXPR:
3987
3988 gcc_unreachable ();
3989
3990 case LT_EXPR:
3991 case LE_EXPR:
3992 case GT_EXPR:
3993 case GE_EXPR:
3994 case EQ_EXPR:
3995 case NE_EXPR:
3996 case UNORDERED_EXPR:
3997 case ORDERED_EXPR:
3998 case UNLT_EXPR:
3999 case UNLE_EXPR:
4000 case UNGT_EXPR:
4001 case UNGE_EXPR:
4002 case UNEQ_EXPR:
4003 case LTGT_EXPR:
4004 /* Comparisons are also binary, but the result type is not
4005 connected to the operand types. */
4006 return verify_gimple_comparison (lhs_type, rhs1, rhs2, rhs_code);
4007
4008 case WIDEN_MULT_EXPR:
4009 if (TREE_CODE (lhs_type) != INTEGER_TYPE)
4010 return true;
4011 return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
4012 || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
4013
4014 case WIDEN_SUM_EXPR:
4015 {
4016 if (((TREE_CODE (rhs1_type) != VECTOR_TYPE
4017 || TREE_CODE (lhs_type) != VECTOR_TYPE)
4018 && ((!INTEGRAL_TYPE_P (rhs1_type)
4019 && !SCALAR_FLOAT_TYPE_P (rhs1_type))
4020 || (!INTEGRAL_TYPE_P (lhs_type)
4021 && !SCALAR_FLOAT_TYPE_P (lhs_type))))
4022 || !useless_type_conversion_p (lhs_type, rhs2_type)
4023 || maybe_lt (GET_MODE_SIZE (element_mode (rhs2_type)),
4024 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
4025 {
4026 error ("type mismatch in %qs", code_name);
4027 debug_generic_expr (lhs_type);
4028 debug_generic_expr (rhs1_type);
4029 debug_generic_expr (rhs2_type);
4030 return true;
4031 }
4032 return false;
4033 }
4034
4035 case VEC_WIDEN_MULT_HI_EXPR:
4036 case VEC_WIDEN_MULT_LO_EXPR:
4037 case VEC_WIDEN_MULT_EVEN_EXPR:
4038 case VEC_WIDEN_MULT_ODD_EXPR:
4039 {
4040 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4041 || TREE_CODE (lhs_type) != VECTOR_TYPE
4042 || !types_compatible_p (rhs1_type, rhs2_type)
4043 || maybe_ne (GET_MODE_SIZE (element_mode (lhs_type)),
4044 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
4045 {
4046 error ("type mismatch in %qs", code_name);
4047 debug_generic_expr (lhs_type);
4048 debug_generic_expr (rhs1_type);
4049 debug_generic_expr (rhs2_type);
4050 return true;
4051 }
4052 return false;
4053 }
4054
4055 case VEC_PACK_TRUNC_EXPR:
4056 /* ??? We currently use VEC_PACK_TRUNC_EXPR to simply concat
4057 vector boolean types. */
4058 if (VECTOR_BOOLEAN_TYPE_P (lhs_type)
4059 && VECTOR_BOOLEAN_TYPE_P (rhs1_type)
4060 && types_compatible_p (rhs1_type, rhs2_type)
4061 && known_eq (TYPE_VECTOR_SUBPARTS (lhs_type),
4062 2 * TYPE_VECTOR_SUBPARTS (rhs1_type)))
4063 return false;
4064
4065 /* Fallthru. */
4066 case VEC_PACK_SAT_EXPR:
4067 case VEC_PACK_FIX_TRUNC_EXPR:
4068 {
4069 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4070 || TREE_CODE (lhs_type) != VECTOR_TYPE
4071 || !((rhs_code == VEC_PACK_FIX_TRUNC_EXPR
4072 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type))
4073 && INTEGRAL_TYPE_P (TREE_TYPE (lhs_type)))
4074 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
4075 == INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))))
4076 || !types_compatible_p (rhs1_type, rhs2_type)
4077 || maybe_ne (GET_MODE_SIZE (element_mode (rhs1_type)),
4078 2 * GET_MODE_SIZE (element_mode (lhs_type)))
4079 || maybe_ne (2 * TYPE_VECTOR_SUBPARTS (rhs1_type),
4080 TYPE_VECTOR_SUBPARTS (lhs_type)))
4081 {
4082 error ("type mismatch in %qs", code_name);
4083 debug_generic_expr (lhs_type);
4084 debug_generic_expr (rhs1_type);
4085 debug_generic_expr (rhs2_type);
4086 return true;
4087 }
4088
4089 return false;
4090 }
4091
4092 case VEC_PACK_FLOAT_EXPR:
4093 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4094 || TREE_CODE (lhs_type) != VECTOR_TYPE
4095 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
4096 || !SCALAR_FLOAT_TYPE_P (TREE_TYPE (lhs_type))
4097 || !types_compatible_p (rhs1_type, rhs2_type)
4098 || maybe_ne (GET_MODE_SIZE (element_mode (rhs1_type)),
4099 2 * GET_MODE_SIZE (element_mode (lhs_type)))
4100 || maybe_ne (2 * TYPE_VECTOR_SUBPARTS (rhs1_type),
4101 TYPE_VECTOR_SUBPARTS (lhs_type)))
4102 {
4103 error ("type mismatch in %qs", code_name);
4104 debug_generic_expr (lhs_type);
4105 debug_generic_expr (rhs1_type);
4106 debug_generic_expr (rhs2_type);
4107 return true;
4108 }
4109
4110 return false;
4111
4112 case MULT_EXPR:
4113 case MULT_HIGHPART_EXPR:
4114 case TRUNC_DIV_EXPR:
4115 case CEIL_DIV_EXPR:
4116 case FLOOR_DIV_EXPR:
4117 case ROUND_DIV_EXPR:
4118 case TRUNC_MOD_EXPR:
4119 case CEIL_MOD_EXPR:
4120 case FLOOR_MOD_EXPR:
4121 case ROUND_MOD_EXPR:
4122 case RDIV_EXPR:
4123 case EXACT_DIV_EXPR:
4124 case MIN_EXPR:
4125 case MAX_EXPR:
4126 case BIT_IOR_EXPR:
4127 case BIT_XOR_EXPR:
4128 case BIT_AND_EXPR:
4129 /* Continue with generic binary expression handling. */
4130 break;
4131
4132 case VEC_SERIES_EXPR:
4133 if (!useless_type_conversion_p (rhs1_type, rhs2_type))
4134 {
4135 error ("type mismatch in %qs", code_name);
4136 debug_generic_expr (rhs1_type);
4137 debug_generic_expr (rhs2_type);
4138 return true;
4139 }
4140 if (TREE_CODE (lhs_type) != VECTOR_TYPE
4141 || !useless_type_conversion_p (TREE_TYPE (lhs_type), rhs1_type))
4142 {
4143 error ("vector type expected in %qs", code_name);
4144 debug_generic_expr (lhs_type);
4145 return true;
4146 }
4147 return false;
4148
4149 default:
4150 gcc_unreachable ();
4151 }
4152
4153 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4154 || !useless_type_conversion_p (lhs_type, rhs2_type))
4155 {
4156 error ("type mismatch in binary expression");
4157 debug_generic_stmt (lhs_type);
4158 debug_generic_stmt (rhs1_type);
4159 debug_generic_stmt (rhs2_type);
4160 return true;
4161 }
4162
4163 return false;
4164 }
4165
4166 /* Verify a gimple assignment statement STMT with a ternary rhs.
4167 Returns true if anything is wrong. */
4168
4169 static bool
4170 verify_gimple_assign_ternary (gassign *stmt)
4171 {
4172 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4173 tree lhs = gimple_assign_lhs (stmt);
4174 tree lhs_type = TREE_TYPE (lhs);
4175 tree rhs1 = gimple_assign_rhs1 (stmt);
4176 tree rhs1_type = TREE_TYPE (rhs1);
4177 tree rhs2 = gimple_assign_rhs2 (stmt);
4178 tree rhs2_type = TREE_TYPE (rhs2);
4179 tree rhs3 = gimple_assign_rhs3 (stmt);
4180 tree rhs3_type = TREE_TYPE (rhs3);
4181
4182 if (!is_gimple_reg (lhs))
4183 {
4184 error ("non-register as LHS of ternary operation");
4185 return true;
4186 }
4187
4188 if ((rhs_code == COND_EXPR
4189 ? !is_gimple_condexpr (rhs1) : !is_gimple_val (rhs1))
4190 || !is_gimple_val (rhs2)
4191 || !is_gimple_val (rhs3))
4192 {
4193 error ("invalid operands in ternary operation");
4194 return true;
4195 }
4196
4197 const char* const code_name = get_tree_code_name (rhs_code);
4198
4199 /* First handle operations that involve different types. */
4200 switch (rhs_code)
4201 {
4202 case WIDEN_MULT_PLUS_EXPR:
4203 case WIDEN_MULT_MINUS_EXPR:
4204 if ((!INTEGRAL_TYPE_P (rhs1_type)
4205 && !FIXED_POINT_TYPE_P (rhs1_type))
4206 || !useless_type_conversion_p (rhs1_type, rhs2_type)
4207 || !useless_type_conversion_p (lhs_type, rhs3_type)
4208 || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
4209 || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
4210 {
4211 error ("type mismatch in %qs", code_name);
4212 debug_generic_expr (lhs_type);
4213 debug_generic_expr (rhs1_type);
4214 debug_generic_expr (rhs2_type);
4215 debug_generic_expr (rhs3_type);
4216 return true;
4217 }
4218 break;
4219
4220 case VEC_COND_EXPR:
4221 if (!VECTOR_BOOLEAN_TYPE_P (rhs1_type)
4222 || maybe_ne (TYPE_VECTOR_SUBPARTS (rhs1_type),
4223 TYPE_VECTOR_SUBPARTS (lhs_type)))
4224 {
4225 error ("the first argument of a %qs must be of a "
4226 "boolean vector type of the same number of elements "
4227 "as the result", code_name);
4228 debug_generic_expr (lhs_type);
4229 debug_generic_expr (rhs1_type);
4230 return true;
4231 }
4232 /* Fallthrough. */
4233 case COND_EXPR:
4234 if (!is_gimple_val (rhs1)
4235 && verify_gimple_comparison (TREE_TYPE (rhs1),
4236 TREE_OPERAND (rhs1, 0),
4237 TREE_OPERAND (rhs1, 1),
4238 TREE_CODE (rhs1)))
4239 return true;
4240 if (!useless_type_conversion_p (lhs_type, rhs2_type)
4241 || !useless_type_conversion_p (lhs_type, rhs3_type))
4242 {
4243 error ("type mismatch in %qs", code_name);
4244 debug_generic_expr (lhs_type);
4245 debug_generic_expr (rhs2_type);
4246 debug_generic_expr (rhs3_type);
4247 return true;
4248 }
4249 break;
4250
4251 case VEC_PERM_EXPR:
4252 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4253 || !useless_type_conversion_p (lhs_type, rhs2_type))
4254 {
4255 error ("type mismatch in %qs", code_name);
4256 debug_generic_expr (lhs_type);
4257 debug_generic_expr (rhs1_type);
4258 debug_generic_expr (rhs2_type);
4259 debug_generic_expr (rhs3_type);
4260 return true;
4261 }
4262
4263 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4264 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4265 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4266 {
4267 error ("vector types expected in %qs", code_name);
4268 debug_generic_expr (lhs_type);
4269 debug_generic_expr (rhs1_type);
4270 debug_generic_expr (rhs2_type);
4271 debug_generic_expr (rhs3_type);
4272 return true;
4273 }
4274
4275 if (maybe_ne (TYPE_VECTOR_SUBPARTS (rhs1_type),
4276 TYPE_VECTOR_SUBPARTS (rhs2_type))
4277 || maybe_ne (TYPE_VECTOR_SUBPARTS (rhs2_type),
4278 TYPE_VECTOR_SUBPARTS (rhs3_type))
4279 || maybe_ne (TYPE_VECTOR_SUBPARTS (rhs3_type),
4280 TYPE_VECTOR_SUBPARTS (lhs_type)))
4281 {
4282 error ("vectors with different element number found in %qs",
4283 code_name);
4284 debug_generic_expr (lhs_type);
4285 debug_generic_expr (rhs1_type);
4286 debug_generic_expr (rhs2_type);
4287 debug_generic_expr (rhs3_type);
4288 return true;
4289 }
4290
4291 if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
4292 || (TREE_CODE (rhs3) != VECTOR_CST
4293 && (GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE
4294 (TREE_TYPE (rhs3_type)))
4295 != GET_MODE_BITSIZE (SCALAR_TYPE_MODE
4296 (TREE_TYPE (rhs1_type))))))
4297 {
4298 error ("invalid mask type in %qs", code_name);
4299 debug_generic_expr (lhs_type);
4300 debug_generic_expr (rhs1_type);
4301 debug_generic_expr (rhs2_type);
4302 debug_generic_expr (rhs3_type);
4303 return true;
4304 }
4305
4306 return false;
4307
4308 case SAD_EXPR:
4309 if (!useless_type_conversion_p (rhs1_type, rhs2_type)
4310 || !useless_type_conversion_p (lhs_type, rhs3_type)
4311 || 2 * GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type)))
4312 > GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (lhs_type))))
4313 {
4314 error ("type mismatch in %qs", code_name);
4315 debug_generic_expr (lhs_type);
4316 debug_generic_expr (rhs1_type);
4317 debug_generic_expr (rhs2_type);
4318 debug_generic_expr (rhs3_type);
4319 return true;
4320 }
4321
4322 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4323 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4324 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4325 {
4326 error ("vector types expected in %qs", code_name);
4327 debug_generic_expr (lhs_type);
4328 debug_generic_expr (rhs1_type);
4329 debug_generic_expr (rhs2_type);
4330 debug_generic_expr (rhs3_type);
4331 return true;
4332 }
4333
4334 return false;
4335
4336 case BIT_INSERT_EXPR:
4337 if (! useless_type_conversion_p (lhs_type, rhs1_type))
4338 {
4339 error ("type mismatch in %qs", code_name);
4340 debug_generic_expr (lhs_type);
4341 debug_generic_expr (rhs1_type);
4342 return true;
4343 }
4344 if (! ((INTEGRAL_TYPE_P (rhs1_type)
4345 && INTEGRAL_TYPE_P (rhs2_type))
4346 /* Vector element insert. */
4347 || (VECTOR_TYPE_P (rhs1_type)
4348 && types_compatible_p (TREE_TYPE (rhs1_type), rhs2_type))
4349 /* Aligned sub-vector insert. */
4350 || (VECTOR_TYPE_P (rhs1_type)
4351 && VECTOR_TYPE_P (rhs2_type)
4352 && types_compatible_p (TREE_TYPE (rhs1_type),
4353 TREE_TYPE (rhs2_type))
4354 && multiple_p (TYPE_VECTOR_SUBPARTS (rhs1_type),
4355 TYPE_VECTOR_SUBPARTS (rhs2_type))
4356 && multiple_of_p (bitsizetype, rhs3, TYPE_SIZE (rhs2_type)))))
4357 {
4358 error ("not allowed type combination in %qs", code_name);
4359 debug_generic_expr (rhs1_type);
4360 debug_generic_expr (rhs2_type);
4361 return true;
4362 }
4363 if (! tree_fits_uhwi_p (rhs3)
4364 || ! types_compatible_p (bitsizetype, TREE_TYPE (rhs3))
4365 || ! tree_fits_uhwi_p (TYPE_SIZE (rhs2_type)))
4366 {
4367 error ("invalid position or size in %qs", code_name);
4368 return true;
4369 }
4370 if (INTEGRAL_TYPE_P (rhs1_type)
4371 && !type_has_mode_precision_p (rhs1_type))
4372 {
4373 error ("%qs into non-mode-precision operand", code_name);
4374 return true;
4375 }
4376 if (INTEGRAL_TYPE_P (rhs1_type))
4377 {
4378 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (rhs3);
4379 if (bitpos >= TYPE_PRECISION (rhs1_type)
4380 || (bitpos + TYPE_PRECISION (rhs2_type)
4381 > TYPE_PRECISION (rhs1_type)))
4382 {
4383 error ("insertion out of range in %qs", code_name);
4384 return true;
4385 }
4386 }
4387 else if (VECTOR_TYPE_P (rhs1_type))
4388 {
4389 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (rhs3);
4390 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (TYPE_SIZE (rhs2_type));
4391 if (bitpos % bitsize != 0)
4392 {
4393 error ("%qs not at element boundary", code_name);
4394 return true;
4395 }
4396 }
4397 return false;
4398
4399 case DOT_PROD_EXPR:
4400 {
4401 if (((TREE_CODE (rhs1_type) != VECTOR_TYPE
4402 || TREE_CODE (lhs_type) != VECTOR_TYPE)
4403 && ((!INTEGRAL_TYPE_P (rhs1_type)
4404 && !SCALAR_FLOAT_TYPE_P (rhs1_type))
4405 || (!INTEGRAL_TYPE_P (lhs_type)
4406 && !SCALAR_FLOAT_TYPE_P (lhs_type))))
4407 || !types_compatible_p (rhs1_type, rhs2_type)
4408 || !useless_type_conversion_p (lhs_type, rhs3_type)
4409 || maybe_lt (GET_MODE_SIZE (element_mode (rhs3_type)),
4410 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
4411 {
4412 error ("type mismatch in %qs", code_name);
4413 debug_generic_expr (lhs_type);
4414 debug_generic_expr (rhs1_type);
4415 debug_generic_expr (rhs2_type);
4416 return true;
4417 }
4418 return false;
4419 }
4420
4421 case REALIGN_LOAD_EXPR:
4422 /* FIXME. */
4423 return false;
4424
4425 default:
4426 gcc_unreachable ();
4427 }
4428 return false;
4429 }
4430
4431 /* Verify a gimple assignment statement STMT with a single rhs.
4432 Returns true if anything is wrong. */
4433
4434 static bool
4435 verify_gimple_assign_single (gassign *stmt)
4436 {
4437 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4438 tree lhs = gimple_assign_lhs (stmt);
4439 tree lhs_type = TREE_TYPE (lhs);
4440 tree rhs1 = gimple_assign_rhs1 (stmt);
4441 tree rhs1_type = TREE_TYPE (rhs1);
4442 bool res = false;
4443
4444 const char* const code_name = get_tree_code_name (rhs_code);
4445
4446 if (!useless_type_conversion_p (lhs_type, rhs1_type))
4447 {
4448 error ("non-trivial conversion in %qs", code_name);
4449 debug_generic_expr (lhs_type);
4450 debug_generic_expr (rhs1_type);
4451 return true;
4452 }
4453
4454 if (gimple_clobber_p (stmt)
4455 && !(DECL_P (lhs) || TREE_CODE (lhs) == MEM_REF))
4456 {
4457 error ("%qs LHS in clobber statement",
4458 get_tree_code_name (TREE_CODE (lhs)));
4459 debug_generic_expr (lhs);
4460 return true;
4461 }
4462
4463 if (handled_component_p (lhs)
4464 || TREE_CODE (lhs) == MEM_REF
4465 || TREE_CODE (lhs) == TARGET_MEM_REF)
4466 res |= verify_types_in_gimple_reference (lhs, true);
4467
4468 /* Special codes we cannot handle via their class. */
4469 switch (rhs_code)
4470 {
4471 case ADDR_EXPR:
4472 {
4473 tree op = TREE_OPERAND (rhs1, 0);
4474 if (!is_gimple_addressable (op))
4475 {
4476 error ("invalid operand in %qs", code_name);
4477 return true;
4478 }
4479
4480 /* Technically there is no longer a need for matching types, but
4481 gimple hygiene asks for this check. In LTO we can end up
4482 combining incompatible units and thus end up with addresses
4483 of globals that change their type to a common one. */
4484 if (!in_lto_p
4485 && !types_compatible_p (TREE_TYPE (op),
4486 TREE_TYPE (TREE_TYPE (rhs1)))
4487 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
4488 TREE_TYPE (op)))
4489 {
4490 error ("type mismatch in %qs", code_name);
4491 debug_generic_stmt (TREE_TYPE (rhs1));
4492 debug_generic_stmt (TREE_TYPE (op));
4493 return true;
4494 }
4495
4496 return (verify_address (rhs1, true)
4497 || verify_types_in_gimple_reference (op, true));
4498 }
4499
4500 /* tcc_reference */
4501 case INDIRECT_REF:
4502 error ("%qs in gimple IL", code_name);
4503 return true;
4504
4505 case COMPONENT_REF:
4506 case BIT_FIELD_REF:
4507 case ARRAY_REF:
4508 case ARRAY_RANGE_REF:
4509 case VIEW_CONVERT_EXPR:
4510 case REALPART_EXPR:
4511 case IMAGPART_EXPR:
4512 case TARGET_MEM_REF:
4513 case MEM_REF:
4514 if (!is_gimple_reg (lhs)
4515 && is_gimple_reg_type (TREE_TYPE (lhs)))
4516 {
4517 error ("invalid RHS for gimple memory store: %qs", code_name);
4518 debug_generic_stmt (lhs);
4519 debug_generic_stmt (rhs1);
4520 return true;
4521 }
4522 return res || verify_types_in_gimple_reference (rhs1, false);
4523
4524 /* tcc_constant */
4525 case SSA_NAME:
4526 case INTEGER_CST:
4527 case REAL_CST:
4528 case FIXED_CST:
4529 case COMPLEX_CST:
4530 case VECTOR_CST:
4531 case STRING_CST:
4532 return res;
4533
4534 /* tcc_declaration */
4535 case CONST_DECL:
4536 return res;
4537 case VAR_DECL:
4538 case PARM_DECL:
4539 if (!is_gimple_reg (lhs)
4540 && !is_gimple_reg (rhs1)
4541 && is_gimple_reg_type (TREE_TYPE (lhs)))
4542 {
4543 error ("invalid RHS for gimple memory store: %qs", code_name);
4544 debug_generic_stmt (lhs);
4545 debug_generic_stmt (rhs1);
4546 return true;
4547 }
4548 return res;
4549
4550 case CONSTRUCTOR:
4551 if (TREE_CODE (rhs1_type) == VECTOR_TYPE)
4552 {
4553 unsigned int i;
4554 tree elt_i, elt_v, elt_t = NULL_TREE;
4555
4556 if (CONSTRUCTOR_NELTS (rhs1) == 0)
4557 return res;
4558 /* For vector CONSTRUCTORs we require that either it is empty
4559 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
4560 (then the element count must be correct to cover the whole
4561 outer vector and index must be NULL on all elements, or it is
4562 a CONSTRUCTOR of scalar elements, where we as an exception allow
4563 smaller number of elements (assuming zero filling) and
4564 consecutive indexes as compared to NULL indexes (such
4565 CONSTRUCTORs can appear in the IL from FEs). */
4566 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), i, elt_i, elt_v)
4567 {
4568 if (elt_t == NULL_TREE)
4569 {
4570 elt_t = TREE_TYPE (elt_v);
4571 if (TREE_CODE (elt_t) == VECTOR_TYPE)
4572 {
4573 tree elt_t = TREE_TYPE (elt_v);
4574 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4575 TREE_TYPE (elt_t)))
4576 {
4577 error ("incorrect type of vector %qs elements",
4578 code_name);
4579 debug_generic_stmt (rhs1);
4580 return true;
4581 }
4582 else if (maybe_ne (CONSTRUCTOR_NELTS (rhs1)
4583 * TYPE_VECTOR_SUBPARTS (elt_t),
4584 TYPE_VECTOR_SUBPARTS (rhs1_type)))
4585 {
4586 error ("incorrect number of vector %qs elements",
4587 code_name);
4588 debug_generic_stmt (rhs1);
4589 return true;
4590 }
4591 }
4592 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4593 elt_t))
4594 {
4595 error ("incorrect type of vector %qs elements",
4596 code_name);
4597 debug_generic_stmt (rhs1);
4598 return true;
4599 }
4600 else if (maybe_gt (CONSTRUCTOR_NELTS (rhs1),
4601 TYPE_VECTOR_SUBPARTS (rhs1_type)))
4602 {
4603 error ("incorrect number of vector %qs elements",
4604 code_name);
4605 debug_generic_stmt (rhs1);
4606 return true;
4607 }
4608 }
4609 else if (!useless_type_conversion_p (elt_t, TREE_TYPE (elt_v)))
4610 {
4611 error ("incorrect type of vector CONSTRUCTOR elements");
4612 debug_generic_stmt (rhs1);
4613 return true;
4614 }
4615 if (elt_i != NULL_TREE
4616 && (TREE_CODE (elt_t) == VECTOR_TYPE
4617 || TREE_CODE (elt_i) != INTEGER_CST
4618 || compare_tree_int (elt_i, i) != 0))
4619 {
4620 error ("vector %qs with non-NULL element index",
4621 code_name);
4622 debug_generic_stmt (rhs1);
4623 return true;
4624 }
4625 if (!is_gimple_val (elt_v))
4626 {
4627 error ("vector %qs element is not a GIMPLE value",
4628 code_name);
4629 debug_generic_stmt (rhs1);
4630 return true;
4631 }
4632 }
4633 }
4634 else if (CONSTRUCTOR_NELTS (rhs1) != 0)
4635 {
4636 error ("non-vector %qs with elements", code_name);
4637 debug_generic_stmt (rhs1);
4638 return true;
4639 }
4640 return res;
4641
4642 case ASSERT_EXPR:
4643 /* FIXME. */
4644 rhs1 = fold (ASSERT_EXPR_COND (rhs1));
4645 if (rhs1 == boolean_false_node)
4646 {
4647 error ("%qs with an always-false condition", code_name);
4648 debug_generic_stmt (rhs1);
4649 return true;
4650 }
4651 break;
4652
4653 case OBJ_TYPE_REF:
4654 case WITH_SIZE_EXPR:
4655 /* FIXME. */
4656 return res;
4657
4658 default:;
4659 }
4660
4661 return res;
4662 }
4663
4664 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4665 is a problem, otherwise false. */
4666
4667 static bool
4668 verify_gimple_assign (gassign *stmt)
4669 {
4670 switch (gimple_assign_rhs_class (stmt))
4671 {
4672 case GIMPLE_SINGLE_RHS:
4673 return verify_gimple_assign_single (stmt);
4674
4675 case GIMPLE_UNARY_RHS:
4676 return verify_gimple_assign_unary (stmt);
4677
4678 case GIMPLE_BINARY_RHS:
4679 return verify_gimple_assign_binary (stmt);
4680
4681 case GIMPLE_TERNARY_RHS:
4682 return verify_gimple_assign_ternary (stmt);
4683
4684 default:
4685 gcc_unreachable ();
4686 }
4687 }
4688
4689 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4690 is a problem, otherwise false. */
4691
4692 static bool
4693 verify_gimple_return (greturn *stmt)
4694 {
4695 tree op = gimple_return_retval (stmt);
4696 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
4697
4698 /* We cannot test for present return values as we do not fix up missing
4699 return values from the original source. */
4700 if (op == NULL)
4701 return false;
4702
4703 if (!is_gimple_val (op)
4704 && TREE_CODE (op) != RESULT_DECL)
4705 {
4706 error ("invalid operand in return statement");
4707 debug_generic_stmt (op);
4708 return true;
4709 }
4710
4711 if ((TREE_CODE (op) == RESULT_DECL
4712 && DECL_BY_REFERENCE (op))
4713 || (TREE_CODE (op) == SSA_NAME
4714 && SSA_NAME_VAR (op)
4715 && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
4716 && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
4717 op = TREE_TYPE (op);
4718
4719 if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
4720 {
4721 error ("invalid conversion in return statement");
4722 debug_generic_stmt (restype);
4723 debug_generic_stmt (TREE_TYPE (op));
4724 return true;
4725 }
4726
4727 return false;
4728 }
4729
4730
4731 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4732 is a problem, otherwise false. */
4733
4734 static bool
4735 verify_gimple_goto (ggoto *stmt)
4736 {
4737 tree dest = gimple_goto_dest (stmt);
4738
4739 /* ??? We have two canonical forms of direct goto destinations, a
4740 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4741 if (TREE_CODE (dest) != LABEL_DECL
4742 && (!is_gimple_val (dest)
4743 || !POINTER_TYPE_P (TREE_TYPE (dest))))
4744 {
4745 error ("goto destination is neither a label nor a pointer");
4746 return true;
4747 }
4748
4749 return false;
4750 }
4751
4752 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4753 is a problem, otherwise false. */
4754
4755 static bool
4756 verify_gimple_switch (gswitch *stmt)
4757 {
4758 unsigned int i, n;
4759 tree elt, prev_upper_bound = NULL_TREE;
4760 tree index_type, elt_type = NULL_TREE;
4761
4762 if (!is_gimple_val (gimple_switch_index (stmt)))
4763 {
4764 error ("invalid operand to switch statement");
4765 debug_generic_stmt (gimple_switch_index (stmt));
4766 return true;
4767 }
4768
4769 index_type = TREE_TYPE (gimple_switch_index (stmt));
4770 if (! INTEGRAL_TYPE_P (index_type))
4771 {
4772 error ("non-integral type switch statement");
4773 debug_generic_expr (index_type);
4774 return true;
4775 }
4776
4777 elt = gimple_switch_label (stmt, 0);
4778 if (CASE_LOW (elt) != NULL_TREE
4779 || CASE_HIGH (elt) != NULL_TREE
4780 || CASE_CHAIN (elt) != NULL_TREE)
4781 {
4782 error ("invalid default case label in switch statement");
4783 debug_generic_expr (elt);
4784 return true;
4785 }
4786
4787 n = gimple_switch_num_labels (stmt);
4788 for (i = 1; i < n; i++)
4789 {
4790 elt = gimple_switch_label (stmt, i);
4791
4792 if (CASE_CHAIN (elt))
4793 {
4794 error ("invalid %<CASE_CHAIN%>");
4795 debug_generic_expr (elt);
4796 return true;
4797 }
4798 if (! CASE_LOW (elt))
4799 {
4800 error ("invalid case label in switch statement");
4801 debug_generic_expr (elt);
4802 return true;
4803 }
4804 if (CASE_HIGH (elt)
4805 && ! tree_int_cst_lt (CASE_LOW (elt), CASE_HIGH (elt)))
4806 {
4807 error ("invalid case range in switch statement");
4808 debug_generic_expr (elt);
4809 return true;
4810 }
4811
4812 if (elt_type)
4813 {
4814 if (TREE_TYPE (CASE_LOW (elt)) != elt_type
4815 || (CASE_HIGH (elt) && TREE_TYPE (CASE_HIGH (elt)) != elt_type))
4816 {
4817 error ("type mismatch for case label in switch statement");
4818 debug_generic_expr (elt);
4819 return true;
4820 }
4821 }
4822 else
4823 {
4824 elt_type = TREE_TYPE (CASE_LOW (elt));
4825 if (TYPE_PRECISION (index_type) < TYPE_PRECISION (elt_type))
4826 {
4827 error ("type precision mismatch in switch statement");
4828 return true;
4829 }
4830 }
4831
4832 if (prev_upper_bound)
4833 {
4834 if (! tree_int_cst_lt (prev_upper_bound, CASE_LOW (elt)))
4835 {
4836 error ("case labels not sorted in switch statement");
4837 return true;
4838 }
4839 }
4840
4841 prev_upper_bound = CASE_HIGH (elt);
4842 if (! prev_upper_bound)
4843 prev_upper_bound = CASE_LOW (elt);
4844 }
4845
4846 return false;
4847 }
4848
4849 /* Verify a gimple debug statement STMT.
4850 Returns true if anything is wrong. */
4851
4852 static bool
4853 verify_gimple_debug (gimple *stmt ATTRIBUTE_UNUSED)
4854 {
4855 /* There isn't much that could be wrong in a gimple debug stmt. A
4856 gimple debug bind stmt, for example, maps a tree, that's usually
4857 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4858 component or member of an aggregate type, to another tree, that
4859 can be an arbitrary expression. These stmts expand into debug
4860 insns, and are converted to debug notes by var-tracking.c. */
4861 return false;
4862 }
4863
4864 /* Verify a gimple label statement STMT.
4865 Returns true if anything is wrong. */
4866
4867 static bool
4868 verify_gimple_label (glabel *stmt)
4869 {
4870 tree decl = gimple_label_label (stmt);
4871 int uid;
4872 bool err = false;
4873
4874 if (TREE_CODE (decl) != LABEL_DECL)
4875 return true;
4876 if (!DECL_NONLOCAL (decl) && !FORCED_LABEL (decl)
4877 && DECL_CONTEXT (decl) != current_function_decl)
4878 {
4879 error ("label context is not the current function declaration");
4880 err |= true;
4881 }
4882
4883 uid = LABEL_DECL_UID (decl);
4884 if (cfun->cfg
4885 && (uid == -1
4886 || (*label_to_block_map_for_fn (cfun))[uid] != gimple_bb (stmt)))
4887 {
4888 error ("incorrect entry in %<label_to_block_map%>");
4889 err |= true;
4890 }
4891
4892 uid = EH_LANDING_PAD_NR (decl);
4893 if (uid)
4894 {
4895 eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
4896 if (decl != lp->post_landing_pad)
4897 {
4898 error ("incorrect setting of landing pad number");
4899 err |= true;
4900 }
4901 }
4902
4903 return err;
4904 }
4905
4906 /* Verify a gimple cond statement STMT.
4907 Returns true if anything is wrong. */
4908
4909 static bool
4910 verify_gimple_cond (gcond *stmt)
4911 {
4912 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
4913 {
4914 error ("invalid comparison code in gimple cond");
4915 return true;
4916 }
4917 if (!(!gimple_cond_true_label (stmt)
4918 || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
4919 || !(!gimple_cond_false_label (stmt)
4920 || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
4921 {
4922 error ("invalid labels in gimple cond");
4923 return true;
4924 }
4925
4926 return verify_gimple_comparison (boolean_type_node,
4927 gimple_cond_lhs (stmt),
4928 gimple_cond_rhs (stmt),
4929 gimple_cond_code (stmt));
4930 }
4931
4932 /* Verify the GIMPLE statement STMT. Returns true if there is an
4933 error, otherwise false. */
4934
4935 static bool
4936 verify_gimple_stmt (gimple *stmt)
4937 {
4938 switch (gimple_code (stmt))
4939 {
4940 case GIMPLE_ASSIGN:
4941 return verify_gimple_assign (as_a <gassign *> (stmt));
4942
4943 case GIMPLE_LABEL:
4944 return verify_gimple_label (as_a <glabel *> (stmt));
4945
4946 case GIMPLE_CALL:
4947 return verify_gimple_call (as_a <gcall *> (stmt));
4948
4949 case GIMPLE_COND:
4950 return verify_gimple_cond (as_a <gcond *> (stmt));
4951
4952 case GIMPLE_GOTO:
4953 return verify_gimple_goto (as_a <ggoto *> (stmt));
4954
4955 case GIMPLE_SWITCH:
4956 return verify_gimple_switch (as_a <gswitch *> (stmt));
4957
4958 case GIMPLE_RETURN:
4959 return verify_gimple_return (as_a <greturn *> (stmt));
4960
4961 case GIMPLE_ASM:
4962 return false;
4963
4964 case GIMPLE_TRANSACTION:
4965 return verify_gimple_transaction (as_a <gtransaction *> (stmt));
4966
4967 /* Tuples that do not have tree operands. */
4968 case GIMPLE_NOP:
4969 case GIMPLE_PREDICT:
4970 case GIMPLE_RESX:
4971 case GIMPLE_EH_DISPATCH:
4972 case GIMPLE_EH_MUST_NOT_THROW:
4973 return false;
4974
4975 CASE_GIMPLE_OMP:
4976 /* OpenMP directives are validated by the FE and never operated
4977 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4978 non-gimple expressions when the main index variable has had
4979 its address taken. This does not affect the loop itself
4980 because the header of an GIMPLE_OMP_FOR is merely used to determine
4981 how to setup the parallel iteration. */
4982 return false;
4983
4984 case GIMPLE_DEBUG:
4985 return verify_gimple_debug (stmt);
4986
4987 default:
4988 gcc_unreachable ();
4989 }
4990 }
4991
4992 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4993 and false otherwise. */
4994
4995 static bool
4996 verify_gimple_phi (gphi *phi)
4997 {
4998 bool err = false;
4999 unsigned i;
5000 tree phi_result = gimple_phi_result (phi);
5001 bool virtual_p;
5002
5003 if (!phi_result)
5004 {
5005 error ("invalid %<PHI%> result");
5006 return true;
5007 }
5008
5009 virtual_p = virtual_operand_p (phi_result);
5010 if (TREE_CODE (phi_result) != SSA_NAME
5011 || (virtual_p
5012 && SSA_NAME_VAR (phi_result) != gimple_vop (cfun)))
5013 {
5014 error ("invalid %<PHI%> result");
5015 err = true;
5016 }
5017
5018 for (i = 0; i < gimple_phi_num_args (phi); i++)
5019 {
5020 tree t = gimple_phi_arg_def (phi, i);
5021
5022 if (!t)
5023 {
5024 error ("missing %<PHI%> def");
5025 err |= true;
5026 continue;
5027 }
5028 /* Addressable variables do have SSA_NAMEs but they
5029 are not considered gimple values. */
5030 else if ((TREE_CODE (t) == SSA_NAME
5031 && virtual_p != virtual_operand_p (t))
5032 || (virtual_p
5033 && (TREE_CODE (t) != SSA_NAME
5034 || SSA_NAME_VAR (t) != gimple_vop (cfun)))
5035 || (!virtual_p
5036 && !is_gimple_val (t)))
5037 {
5038 error ("invalid %<PHI%> argument");
5039 debug_generic_expr (t);
5040 err |= true;
5041 }
5042 #ifdef ENABLE_TYPES_CHECKING
5043 if (!useless_type_conversion_p (TREE_TYPE (phi_result), TREE_TYPE (t)))
5044 {
5045 error ("incompatible types in %<PHI%> argument %u", i);
5046 debug_generic_stmt (TREE_TYPE (phi_result));
5047 debug_generic_stmt (TREE_TYPE (t));
5048 err |= true;
5049 }
5050 #endif
5051 }
5052
5053 return err;
5054 }
5055
5056 /* Verify the GIMPLE statements inside the sequence STMTS. */
5057
5058 static bool
5059 verify_gimple_in_seq_2 (gimple_seq stmts)
5060 {
5061 gimple_stmt_iterator ittr;
5062 bool err = false;
5063
5064 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
5065 {
5066 gimple *stmt = gsi_stmt (ittr);
5067
5068 switch (gimple_code (stmt))
5069 {
5070 case GIMPLE_BIND:
5071 err |= verify_gimple_in_seq_2 (
5072 gimple_bind_body (as_a <gbind *> (stmt)));
5073 break;
5074
5075 case GIMPLE_TRY:
5076 err |= verify_gimple_in_seq_2 (gimple_try_eval (stmt));
5077 err |= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt));
5078 break;
5079
5080 case GIMPLE_EH_FILTER:
5081 err |= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt));
5082 break;
5083
5084 case GIMPLE_EH_ELSE:
5085 {
5086 geh_else *eh_else = as_a <geh_else *> (stmt);
5087 err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (eh_else));
5088 err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (eh_else));
5089 }
5090 break;
5091
5092 case GIMPLE_CATCH:
5093 err |= verify_gimple_in_seq_2 (gimple_catch_handler (
5094 as_a <gcatch *> (stmt)));
5095 break;
5096
5097 case GIMPLE_TRANSACTION:
5098 err |= verify_gimple_transaction (as_a <gtransaction *> (stmt));
5099 break;
5100
5101 default:
5102 {
5103 bool err2 = verify_gimple_stmt (stmt);
5104 if (err2)
5105 debug_gimple_stmt (stmt);
5106 err |= err2;
5107 }
5108 }
5109 }
5110
5111 return err;
5112 }
5113
5114 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
5115 is a problem, otherwise false. */
5116
5117 static bool
5118 verify_gimple_transaction (gtransaction *stmt)
5119 {
5120 tree lab;
5121
5122 lab = gimple_transaction_label_norm (stmt);
5123 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
5124 return true;
5125 lab = gimple_transaction_label_uninst (stmt);
5126 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
5127 return true;
5128 lab = gimple_transaction_label_over (stmt);
5129 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
5130 return true;
5131
5132 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt));
5133 }
5134
5135
5136 /* Verify the GIMPLE statements inside the statement list STMTS. */
5137
5138 DEBUG_FUNCTION void
5139 verify_gimple_in_seq (gimple_seq stmts)
5140 {
5141 timevar_push (TV_TREE_STMT_VERIFY);
5142 if (verify_gimple_in_seq_2 (stmts))
5143 internal_error ("%<verify_gimple%> failed");
5144 timevar_pop (TV_TREE_STMT_VERIFY);
5145 }
5146
5147 /* Return true when the T can be shared. */
5148
5149 static bool
5150 tree_node_can_be_shared (tree t)
5151 {
5152 if (IS_TYPE_OR_DECL_P (t)
5153 || TREE_CODE (t) == SSA_NAME
5154 || TREE_CODE (t) == IDENTIFIER_NODE
5155 || TREE_CODE (t) == CASE_LABEL_EXPR
5156 || is_gimple_min_invariant (t))
5157 return true;
5158
5159 if (t == error_mark_node)
5160 return true;
5161
5162 return false;
5163 }
5164
5165 /* Called via walk_tree. Verify tree sharing. */
5166
5167 static tree
5168 verify_node_sharing_1 (tree *tp, int *walk_subtrees, void *data)
5169 {
5170 hash_set<void *> *visited = (hash_set<void *> *) data;
5171
5172 if (tree_node_can_be_shared (*tp))
5173 {
5174 *walk_subtrees = false;
5175 return NULL;
5176 }
5177
5178 if (visited->add (*tp))
5179 return *tp;
5180
5181 return NULL;
5182 }
5183
5184 /* Called via walk_gimple_stmt. Verify tree sharing. */
5185
5186 static tree
5187 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
5188 {
5189 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5190 return verify_node_sharing_1 (tp, walk_subtrees, wi->info);
5191 }
5192
5193 static bool eh_error_found;
5194 bool
5195 verify_eh_throw_stmt_node (gimple *const &stmt, const int &,
5196 hash_set<gimple *> *visited)
5197 {
5198 if (!visited->contains (stmt))
5199 {
5200 error ("dead statement in EH table");
5201 debug_gimple_stmt (stmt);
5202 eh_error_found = true;
5203 }
5204 return true;
5205 }
5206
5207 /* Verify if the location LOCs block is in BLOCKS. */
5208
5209 static bool
5210 verify_location (hash_set<tree> *blocks, location_t loc)
5211 {
5212 tree block = LOCATION_BLOCK (loc);
5213 if (block != NULL_TREE
5214 && !blocks->contains (block))
5215 {
5216 error ("location references block not in block tree");
5217 return true;
5218 }
5219 if (block != NULL_TREE)
5220 return verify_location (blocks, BLOCK_SOURCE_LOCATION (block));
5221 return false;
5222 }
5223
5224 /* Called via walk_tree. Verify that expressions have no blocks. */
5225
5226 static tree
5227 verify_expr_no_block (tree *tp, int *walk_subtrees, void *)
5228 {
5229 if (!EXPR_P (*tp))
5230 {
5231 *walk_subtrees = false;
5232 return NULL;
5233 }
5234
5235 location_t loc = EXPR_LOCATION (*tp);
5236 if (LOCATION_BLOCK (loc) != NULL)
5237 return *tp;
5238
5239 return NULL;
5240 }
5241
5242 /* Called via walk_tree. Verify locations of expressions. */
5243
5244 static tree
5245 verify_expr_location_1 (tree *tp, int *walk_subtrees, void *data)
5246 {
5247 hash_set<tree> *blocks = (hash_set<tree> *) data;
5248 tree t = *tp;
5249
5250 /* ??? This doesn't really belong here but there's no good place to
5251 stick this remainder of old verify_expr. */
5252 /* ??? This barfs on debug stmts which contain binds to vars with
5253 different function context. */
5254 #if 0
5255 if (VAR_P (t)
5256 || TREE_CODE (t) == PARM_DECL
5257 || TREE_CODE (t) == RESULT_DECL)
5258 {
5259 tree context = decl_function_context (t);
5260 if (context != cfun->decl
5261 && !SCOPE_FILE_SCOPE_P (context)
5262 && !TREE_STATIC (t)
5263 && !DECL_EXTERNAL (t))
5264 {
5265 error ("local declaration from a different function");
5266 return t;
5267 }
5268 }
5269 #endif
5270
5271 if (VAR_P (t) && DECL_HAS_DEBUG_EXPR_P (t))
5272 {
5273 tree x = DECL_DEBUG_EXPR (t);
5274 tree addr = walk_tree (&x, verify_expr_no_block, NULL, NULL);
5275 if (addr)
5276 return addr;
5277 }
5278 if ((VAR_P (t)
5279 || TREE_CODE (t) == PARM_DECL
5280 || TREE_CODE (t) == RESULT_DECL)
5281 && DECL_HAS_VALUE_EXPR_P (t))
5282 {
5283 tree x = DECL_VALUE_EXPR (t);
5284 tree addr = walk_tree (&x, verify_expr_no_block, NULL, NULL);
5285 if (addr)
5286 return addr;
5287 }
5288
5289 if (!EXPR_P (t))
5290 {
5291 *walk_subtrees = false;
5292 return NULL;
5293 }
5294
5295 location_t loc = EXPR_LOCATION (t);
5296 if (verify_location (blocks, loc))
5297 return t;
5298
5299 return NULL;
5300 }
5301
5302 /* Called via walk_gimple_op. Verify locations of expressions. */
5303
5304 static tree
5305 verify_expr_location (tree *tp, int *walk_subtrees, void *data)
5306 {
5307 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5308 return verify_expr_location_1 (tp, walk_subtrees, wi->info);
5309 }
5310
5311 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
5312
5313 static void
5314 collect_subblocks (hash_set<tree> *blocks, tree block)
5315 {
5316 tree t;
5317 for (t = BLOCK_SUBBLOCKS (block); t; t = BLOCK_CHAIN (t))
5318 {
5319 blocks->add (t);
5320 collect_subblocks (blocks, t);
5321 }
5322 }
5323
5324 /* Disable warnings about missing quoting in GCC diagnostics for
5325 the verification errors. Their format strings don't follow
5326 GCC diagnostic conventions and trigger an ICE in the end. */
5327 #if __GNUC__ >= 10
5328 # pragma GCC diagnostic push
5329 # pragma GCC diagnostic ignored "-Wformat-diag"
5330 #endif
5331
5332 /* Verify the GIMPLE statements in the CFG of FN. */
5333
5334 DEBUG_FUNCTION void
5335 verify_gimple_in_cfg (struct function *fn, bool verify_nothrow)
5336 {
5337 basic_block bb;
5338 bool err = false;
5339
5340 timevar_push (TV_TREE_STMT_VERIFY);
5341 hash_set<void *> visited;
5342 hash_set<gimple *> visited_throwing_stmts;
5343
5344 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
5345 hash_set<tree> blocks;
5346 if (DECL_INITIAL (fn->decl))
5347 {
5348 blocks.add (DECL_INITIAL (fn->decl));
5349 collect_subblocks (&blocks, DECL_INITIAL (fn->decl));
5350 }
5351
5352 FOR_EACH_BB_FN (bb, fn)
5353 {
5354 gimple_stmt_iterator gsi;
5355 edge_iterator ei;
5356 edge e;
5357
5358 for (gphi_iterator gpi = gsi_start_phis (bb);
5359 !gsi_end_p (gpi);
5360 gsi_next (&gpi))
5361 {
5362 gphi *phi = gpi.phi ();
5363 bool err2 = false;
5364 unsigned i;
5365
5366 if (gimple_bb (phi) != bb)
5367 {
5368 error ("gimple_bb (phi) is set to a wrong basic block");
5369 err2 = true;
5370 }
5371
5372 err2 |= verify_gimple_phi (phi);
5373
5374 /* Only PHI arguments have locations. */
5375 if (gimple_location (phi) != UNKNOWN_LOCATION)
5376 {
5377 error ("PHI node with location");
5378 err2 = true;
5379 }
5380
5381 for (i = 0; i < gimple_phi_num_args (phi); i++)
5382 {
5383 tree arg = gimple_phi_arg_def (phi, i);
5384 tree addr = walk_tree (&arg, verify_node_sharing_1,
5385 &visited, NULL);
5386 if (addr)
5387 {
5388 error ("incorrect sharing of tree nodes");
5389 debug_generic_expr (addr);
5390 err2 |= true;
5391 }
5392 location_t loc = gimple_phi_arg_location (phi, i);
5393 if (virtual_operand_p (gimple_phi_result (phi))
5394 && loc != UNKNOWN_LOCATION)
5395 {
5396 error ("virtual PHI with argument locations");
5397 err2 = true;
5398 }
5399 addr = walk_tree (&arg, verify_expr_location_1, &blocks, NULL);
5400 if (addr)
5401 {
5402 debug_generic_expr (addr);
5403 err2 = true;
5404 }
5405 err2 |= verify_location (&blocks, loc);
5406 }
5407
5408 if (err2)
5409 debug_gimple_stmt (phi);
5410 err |= err2;
5411 }
5412
5413 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5414 {
5415 gimple *stmt = gsi_stmt (gsi);
5416 bool err2 = false;
5417 struct walk_stmt_info wi;
5418 tree addr;
5419 int lp_nr;
5420
5421 if (gimple_bb (stmt) != bb)
5422 {
5423 error ("gimple_bb (stmt) is set to a wrong basic block");
5424 err2 = true;
5425 }
5426
5427 err2 |= verify_gimple_stmt (stmt);
5428 err2 |= verify_location (&blocks, gimple_location (stmt));
5429
5430 memset (&wi, 0, sizeof (wi));
5431 wi.info = (void *) &visited;
5432 addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
5433 if (addr)
5434 {
5435 error ("incorrect sharing of tree nodes");
5436 debug_generic_expr (addr);
5437 err2 |= true;
5438 }
5439
5440 memset (&wi, 0, sizeof (wi));
5441 wi.info = (void *) &blocks;
5442 addr = walk_gimple_op (stmt, verify_expr_location, &wi);
5443 if (addr)
5444 {
5445 debug_generic_expr (addr);
5446 err2 |= true;
5447 }
5448
5449 /* If the statement is marked as part of an EH region, then it is
5450 expected that the statement could throw. Verify that when we
5451 have optimizations that simplify statements such that we prove
5452 that they cannot throw, that we update other data structures
5453 to match. */
5454 lp_nr = lookup_stmt_eh_lp (stmt);
5455 if (lp_nr != 0)
5456 visited_throwing_stmts.add (stmt);
5457 if (lp_nr > 0)
5458 {
5459 if (!stmt_could_throw_p (cfun, stmt))
5460 {
5461 if (verify_nothrow)
5462 {
5463 error ("statement marked for throw, but doesn%'t");
5464 err2 |= true;
5465 }
5466 }
5467 else if (!gsi_one_before_end_p (gsi))
5468 {
5469 error ("statement marked for throw in middle of block");
5470 err2 |= true;
5471 }
5472 }
5473
5474 if (err2)
5475 debug_gimple_stmt (stmt);
5476 err |= err2;
5477 }
5478
5479 FOR_EACH_EDGE (e, ei, bb->succs)
5480 if (e->goto_locus != UNKNOWN_LOCATION)
5481 err |= verify_location (&blocks, e->goto_locus);
5482 }
5483
5484 hash_map<gimple *, int> *eh_table = get_eh_throw_stmt_table (cfun);
5485 eh_error_found = false;
5486 if (eh_table)
5487 eh_table->traverse<hash_set<gimple *> *, verify_eh_throw_stmt_node>
5488 (&visited_throwing_stmts);
5489
5490 if (err || eh_error_found)
5491 internal_error ("verify_gimple failed");
5492
5493 verify_histograms ();
5494 timevar_pop (TV_TREE_STMT_VERIFY);
5495 }
5496
5497
5498 /* Verifies that the flow information is OK. */
5499
5500 static int
5501 gimple_verify_flow_info (void)
5502 {
5503 int err = 0;
5504 basic_block bb;
5505 gimple_stmt_iterator gsi;
5506 gimple *stmt;
5507 edge e;
5508 edge_iterator ei;
5509
5510 if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5511 || ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5512 {
5513 error ("ENTRY_BLOCK has IL associated with it");
5514 err = 1;
5515 }
5516
5517 if (EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5518 || EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5519 {
5520 error ("EXIT_BLOCK has IL associated with it");
5521 err = 1;
5522 }
5523
5524 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5525 if (e->flags & EDGE_FALLTHRU)
5526 {
5527 error ("fallthru to exit from bb %d", e->src->index);
5528 err = 1;
5529 }
5530
5531 FOR_EACH_BB_FN (bb, cfun)
5532 {
5533 bool found_ctrl_stmt = false;
5534
5535 stmt = NULL;
5536
5537 /* Skip labels on the start of basic block. */
5538 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5539 {
5540 tree label;
5541 gimple *prev_stmt = stmt;
5542
5543 stmt = gsi_stmt (gsi);
5544
5545 if (gimple_code (stmt) != GIMPLE_LABEL)
5546 break;
5547
5548 label = gimple_label_label (as_a <glabel *> (stmt));
5549 if (prev_stmt && DECL_NONLOCAL (label))
5550 {
5551 error ("nonlocal label ");
5552 print_generic_expr (stderr, label);
5553 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5554 bb->index);
5555 err = 1;
5556 }
5557
5558 if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
5559 {
5560 error ("EH landing pad label ");
5561 print_generic_expr (stderr, label);
5562 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5563 bb->index);
5564 err = 1;
5565 }
5566
5567 if (label_to_block (cfun, label) != bb)
5568 {
5569 error ("label ");
5570 print_generic_expr (stderr, label);
5571 fprintf (stderr, " to block does not match in bb %d",
5572 bb->index);
5573 err = 1;
5574 }
5575
5576 if (decl_function_context (label) != current_function_decl)
5577 {
5578 error ("label ");
5579 print_generic_expr (stderr, label);
5580 fprintf (stderr, " has incorrect context in bb %d",
5581 bb->index);
5582 err = 1;
5583 }
5584 }
5585
5586 /* Verify that body of basic block BB is free of control flow. */
5587 for (; !gsi_end_p (gsi); gsi_next (&gsi))
5588 {
5589 gimple *stmt = gsi_stmt (gsi);
5590
5591 if (found_ctrl_stmt)
5592 {
5593 error ("control flow in the middle of basic block %d",
5594 bb->index);
5595 err = 1;
5596 }
5597
5598 if (stmt_ends_bb_p (stmt))
5599 found_ctrl_stmt = true;
5600
5601 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
5602 {
5603 error ("label ");
5604 print_generic_expr (stderr, gimple_label_label (label_stmt));
5605 fprintf (stderr, " in the middle of basic block %d", bb->index);
5606 err = 1;
5607 }
5608 }
5609
5610 gsi = gsi_last_nondebug_bb (bb);
5611 if (gsi_end_p (gsi))
5612 continue;
5613
5614 stmt = gsi_stmt (gsi);
5615
5616 if (gimple_code (stmt) == GIMPLE_LABEL)
5617 continue;
5618
5619 err |= verify_eh_edges (stmt);
5620
5621 if (is_ctrl_stmt (stmt))
5622 {
5623 FOR_EACH_EDGE (e, ei, bb->succs)
5624 if (e->flags & EDGE_FALLTHRU)
5625 {
5626 error ("fallthru edge after a control statement in bb %d",
5627 bb->index);
5628 err = 1;
5629 }
5630 }
5631
5632 if (gimple_code (stmt) != GIMPLE_COND)
5633 {
5634 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
5635 after anything else but if statement. */
5636 FOR_EACH_EDGE (e, ei, bb->succs)
5637 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
5638 {
5639 error ("true/false edge after a non-GIMPLE_COND in bb %d",
5640 bb->index);
5641 err = 1;
5642 }
5643 }
5644
5645 switch (gimple_code (stmt))
5646 {
5647 case GIMPLE_COND:
5648 {
5649 edge true_edge;
5650 edge false_edge;
5651
5652 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
5653
5654 if (!true_edge
5655 || !false_edge
5656 || !(true_edge->flags & EDGE_TRUE_VALUE)
5657 || !(false_edge->flags & EDGE_FALSE_VALUE)
5658 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5659 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5660 || EDGE_COUNT (bb->succs) >= 3)
5661 {
5662 error ("wrong outgoing edge flags at end of bb %d",
5663 bb->index);
5664 err = 1;
5665 }
5666 }
5667 break;
5668
5669 case GIMPLE_GOTO:
5670 if (simple_goto_p (stmt))
5671 {
5672 error ("explicit goto at end of bb %d", bb->index);
5673 err = 1;
5674 }
5675 else
5676 {
5677 /* FIXME. We should double check that the labels in the
5678 destination blocks have their address taken. */
5679 FOR_EACH_EDGE (e, ei, bb->succs)
5680 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
5681 | EDGE_FALSE_VALUE))
5682 || !(e->flags & EDGE_ABNORMAL))
5683 {
5684 error ("wrong outgoing edge flags at end of bb %d",
5685 bb->index);
5686 err = 1;
5687 }
5688 }
5689 break;
5690
5691 case GIMPLE_CALL:
5692 if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
5693 break;
5694 /* fallthru */
5695 case GIMPLE_RETURN:
5696 if (!single_succ_p (bb)
5697 || (single_succ_edge (bb)->flags
5698 & (EDGE_FALLTHRU | EDGE_ABNORMAL
5699 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5700 {
5701 error ("wrong outgoing edge flags at end of bb %d", bb->index);
5702 err = 1;
5703 }
5704 if (single_succ (bb) != EXIT_BLOCK_PTR_FOR_FN (cfun))
5705 {
5706 error ("return edge does not point to exit in bb %d",
5707 bb->index);
5708 err = 1;
5709 }
5710 break;
5711
5712 case GIMPLE_SWITCH:
5713 {
5714 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5715 tree prev;
5716 edge e;
5717 size_t i, n;
5718
5719 n = gimple_switch_num_labels (switch_stmt);
5720
5721 /* Mark all the destination basic blocks. */
5722 for (i = 0; i < n; ++i)
5723 {
5724 basic_block label_bb = gimple_switch_label_bb (cfun, switch_stmt, i);
5725 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
5726 label_bb->aux = (void *)1;
5727 }
5728
5729 /* Verify that the case labels are sorted. */
5730 prev = gimple_switch_label (switch_stmt, 0);
5731 for (i = 1; i < n; ++i)
5732 {
5733 tree c = gimple_switch_label (switch_stmt, i);
5734 if (!CASE_LOW (c))
5735 {
5736 error ("found default case not at the start of "
5737 "case vector");
5738 err = 1;
5739 continue;
5740 }
5741 if (CASE_LOW (prev)
5742 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
5743 {
5744 error ("case labels not sorted: ");
5745 print_generic_expr (stderr, prev);
5746 fprintf (stderr," is greater than ");
5747 print_generic_expr (stderr, c);
5748 fprintf (stderr," but comes before it.\n");
5749 err = 1;
5750 }
5751 prev = c;
5752 }
5753 /* VRP will remove the default case if it can prove it will
5754 never be executed. So do not verify there always exists
5755 a default case here. */
5756
5757 FOR_EACH_EDGE (e, ei, bb->succs)
5758 {
5759 if (!e->dest->aux)
5760 {
5761 error ("extra outgoing edge %d->%d",
5762 bb->index, e->dest->index);
5763 err = 1;
5764 }
5765
5766 e->dest->aux = (void *)2;
5767 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
5768 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5769 {
5770 error ("wrong outgoing edge flags at end of bb %d",
5771 bb->index);
5772 err = 1;
5773 }
5774 }
5775
5776 /* Check that we have all of them. */
5777 for (i = 0; i < n; ++i)
5778 {
5779 basic_block label_bb = gimple_switch_label_bb (cfun,
5780 switch_stmt, i);
5781
5782 if (label_bb->aux != (void *)2)
5783 {
5784 error ("missing edge %i->%i", bb->index, label_bb->index);
5785 err = 1;
5786 }
5787 }
5788
5789 FOR_EACH_EDGE (e, ei, bb->succs)
5790 e->dest->aux = (void *)0;
5791 }
5792 break;
5793
5794 case GIMPLE_EH_DISPATCH:
5795 err |= verify_eh_dispatch_edge (as_a <geh_dispatch *> (stmt));
5796 break;
5797
5798 default:
5799 break;
5800 }
5801 }
5802
5803 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
5804 verify_dominators (CDI_DOMINATORS);
5805
5806 return err;
5807 }
5808
5809 #if __GNUC__ >= 10
5810 # pragma GCC diagnostic pop
5811 #endif
5812
5813 /* Updates phi nodes after creating a forwarder block joined
5814 by edge FALLTHRU. */
5815
5816 static void
5817 gimple_make_forwarder_block (edge fallthru)
5818 {
5819 edge e;
5820 edge_iterator ei;
5821 basic_block dummy, bb;
5822 tree var;
5823 gphi_iterator gsi;
5824 bool forward_location_p;
5825
5826 dummy = fallthru->src;
5827 bb = fallthru->dest;
5828
5829 if (single_pred_p (bb))
5830 return;
5831
5832 /* We can forward location info if we have only one predecessor. */
5833 forward_location_p = single_pred_p (dummy);
5834
5835 /* If we redirected a branch we must create new PHI nodes at the
5836 start of BB. */
5837 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
5838 {
5839 gphi *phi, *new_phi;
5840
5841 phi = gsi.phi ();
5842 var = gimple_phi_result (phi);
5843 new_phi = create_phi_node (var, bb);
5844 gimple_phi_set_result (phi, copy_ssa_name (var, phi));
5845 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
5846 forward_location_p
5847 ? gimple_phi_arg_location (phi, 0) : UNKNOWN_LOCATION);
5848 }
5849
5850 /* Add the arguments we have stored on edges. */
5851 FOR_EACH_EDGE (e, ei, bb->preds)
5852 {
5853 if (e == fallthru)
5854 continue;
5855
5856 flush_pending_stmts (e);
5857 }
5858 }
5859
5860
5861 /* Return a non-special label in the head of basic block BLOCK.
5862 Create one if it doesn't exist. */
5863
5864 tree
5865 gimple_block_label (basic_block bb)
5866 {
5867 gimple_stmt_iterator i, s = gsi_start_bb (bb);
5868 bool first = true;
5869 tree label;
5870 glabel *stmt;
5871
5872 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
5873 {
5874 stmt = dyn_cast <glabel *> (gsi_stmt (i));
5875 if (!stmt)
5876 break;
5877 label = gimple_label_label (stmt);
5878 if (!DECL_NONLOCAL (label))
5879 {
5880 if (!first)
5881 gsi_move_before (&i, &s);
5882 return label;
5883 }
5884 }
5885
5886 label = create_artificial_label (UNKNOWN_LOCATION);
5887 stmt = gimple_build_label (label);
5888 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
5889 return label;
5890 }
5891
5892
5893 /* Attempt to perform edge redirection by replacing a possibly complex
5894 jump instruction by a goto or by removing the jump completely.
5895 This can apply only if all edges now point to the same block. The
5896 parameters and return values are equivalent to
5897 redirect_edge_and_branch. */
5898
5899 static edge
5900 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
5901 {
5902 basic_block src = e->src;
5903 gimple_stmt_iterator i;
5904 gimple *stmt;
5905
5906 /* We can replace or remove a complex jump only when we have exactly
5907 two edges. */
5908 if (EDGE_COUNT (src->succs) != 2
5909 /* Verify that all targets will be TARGET. Specifically, the
5910 edge that is not E must also go to TARGET. */
5911 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
5912 return NULL;
5913
5914 i = gsi_last_bb (src);
5915 if (gsi_end_p (i))
5916 return NULL;
5917
5918 stmt = gsi_stmt (i);
5919
5920 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
5921 {
5922 gsi_remove (&i, true);
5923 e = ssa_redirect_edge (e, target);
5924 e->flags = EDGE_FALLTHRU;
5925 return e;
5926 }
5927
5928 return NULL;
5929 }
5930
5931
5932 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
5933 edge representing the redirected branch. */
5934
5935 static edge
5936 gimple_redirect_edge_and_branch (edge e, basic_block dest)
5937 {
5938 basic_block bb = e->src;
5939 gimple_stmt_iterator gsi;
5940 edge ret;
5941 gimple *stmt;
5942
5943 if (e->flags & EDGE_ABNORMAL)
5944 return NULL;
5945
5946 if (e->dest == dest)
5947 return NULL;
5948
5949 if (e->flags & EDGE_EH)
5950 return redirect_eh_edge (e, dest);
5951
5952 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
5953 {
5954 ret = gimple_try_redirect_by_replacing_jump (e, dest);
5955 if (ret)
5956 return ret;
5957 }
5958
5959 gsi = gsi_last_nondebug_bb (bb);
5960 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
5961
5962 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
5963 {
5964 case GIMPLE_COND:
5965 /* For COND_EXPR, we only need to redirect the edge. */
5966 break;
5967
5968 case GIMPLE_GOTO:
5969 /* No non-abnormal edges should lead from a non-simple goto, and
5970 simple ones should be represented implicitly. */
5971 gcc_unreachable ();
5972
5973 case GIMPLE_SWITCH:
5974 {
5975 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5976 tree label = gimple_block_label (dest);
5977 tree cases = get_cases_for_edge (e, switch_stmt);
5978
5979 /* If we have a list of cases associated with E, then use it
5980 as it's a lot faster than walking the entire case vector. */
5981 if (cases)
5982 {
5983 edge e2 = find_edge (e->src, dest);
5984 tree last, first;
5985
5986 first = cases;
5987 while (cases)
5988 {
5989 last = cases;
5990 CASE_LABEL (cases) = label;
5991 cases = CASE_CHAIN (cases);
5992 }
5993
5994 /* If there was already an edge in the CFG, then we need
5995 to move all the cases associated with E to E2. */
5996 if (e2)
5997 {
5998 tree cases2 = get_cases_for_edge (e2, switch_stmt);
5999
6000 CASE_CHAIN (last) = CASE_CHAIN (cases2);
6001 CASE_CHAIN (cases2) = first;
6002 }
6003 bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
6004 }
6005 else
6006 {
6007 size_t i, n = gimple_switch_num_labels (switch_stmt);
6008
6009 for (i = 0; i < n; i++)
6010 {
6011 tree elt = gimple_switch_label (switch_stmt, i);
6012 if (label_to_block (cfun, CASE_LABEL (elt)) == e->dest)
6013 CASE_LABEL (elt) = label;
6014 }
6015 }
6016 }
6017 break;
6018
6019 case GIMPLE_ASM:
6020 {
6021 gasm *asm_stmt = as_a <gasm *> (stmt);
6022 int i, n = gimple_asm_nlabels (asm_stmt);
6023 tree label = NULL;
6024
6025 for (i = 0; i < n; ++i)
6026 {
6027 tree cons = gimple_asm_label_op (asm_stmt, i);
6028 if (label_to_block (cfun, TREE_VALUE (cons)) == e->dest)
6029 {
6030 if (!label)
6031 label = gimple_block_label (dest);
6032 TREE_VALUE (cons) = label;
6033 }
6034 }
6035
6036 /* If we didn't find any label matching the former edge in the
6037 asm labels, we must be redirecting the fallthrough
6038 edge. */
6039 gcc_assert (label || (e->flags & EDGE_FALLTHRU));
6040 }
6041 break;
6042
6043 case GIMPLE_RETURN:
6044 gsi_remove (&gsi, true);
6045 e->flags |= EDGE_FALLTHRU;
6046 break;
6047
6048 case GIMPLE_OMP_RETURN:
6049 case GIMPLE_OMP_CONTINUE:
6050 case GIMPLE_OMP_SECTIONS_SWITCH:
6051 case GIMPLE_OMP_FOR:
6052 /* The edges from OMP constructs can be simply redirected. */
6053 break;
6054
6055 case GIMPLE_EH_DISPATCH:
6056 if (!(e->flags & EDGE_FALLTHRU))
6057 redirect_eh_dispatch_edge (as_a <geh_dispatch *> (stmt), e, dest);
6058 break;
6059
6060 case GIMPLE_TRANSACTION:
6061 if (e->flags & EDGE_TM_ABORT)
6062 gimple_transaction_set_label_over (as_a <gtransaction *> (stmt),
6063 gimple_block_label (dest));
6064 else if (e->flags & EDGE_TM_UNINSTRUMENTED)
6065 gimple_transaction_set_label_uninst (as_a <gtransaction *> (stmt),
6066 gimple_block_label (dest));
6067 else
6068 gimple_transaction_set_label_norm (as_a <gtransaction *> (stmt),
6069 gimple_block_label (dest));
6070 break;
6071
6072 default:
6073 /* Otherwise it must be a fallthru edge, and we don't need to
6074 do anything besides redirecting it. */
6075 gcc_assert (e->flags & EDGE_FALLTHRU);
6076 break;
6077 }
6078
6079 /* Update/insert PHI nodes as necessary. */
6080
6081 /* Now update the edges in the CFG. */
6082 e = ssa_redirect_edge (e, dest);
6083
6084 return e;
6085 }
6086
6087 /* Returns true if it is possible to remove edge E by redirecting
6088 it to the destination of the other edge from E->src. */
6089
6090 static bool
6091 gimple_can_remove_branch_p (const_edge e)
6092 {
6093 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
6094 return false;
6095
6096 return true;
6097 }
6098
6099 /* Simple wrapper, as we can always redirect fallthru edges. */
6100
6101 static basic_block
6102 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
6103 {
6104 e = gimple_redirect_edge_and_branch (e, dest);
6105 gcc_assert (e);
6106
6107 return NULL;
6108 }
6109
6110
6111 /* Splits basic block BB after statement STMT (but at least after the
6112 labels). If STMT is NULL, BB is split just after the labels. */
6113
6114 static basic_block
6115 gimple_split_block (basic_block bb, void *stmt)
6116 {
6117 gimple_stmt_iterator gsi;
6118 gimple_stmt_iterator gsi_tgt;
6119 gimple_seq list;
6120 basic_block new_bb;
6121 edge e;
6122 edge_iterator ei;
6123
6124 new_bb = create_empty_bb (bb);
6125
6126 /* Redirect the outgoing edges. */
6127 new_bb->succs = bb->succs;
6128 bb->succs = NULL;
6129 FOR_EACH_EDGE (e, ei, new_bb->succs)
6130 e->src = new_bb;
6131
6132 /* Get a stmt iterator pointing to the first stmt to move. */
6133 if (!stmt || gimple_code ((gimple *) stmt) == GIMPLE_LABEL)
6134 gsi = gsi_after_labels (bb);
6135 else
6136 {
6137 gsi = gsi_for_stmt ((gimple *) stmt);
6138 gsi_next (&gsi);
6139 }
6140
6141 /* Move everything from GSI to the new basic block. */
6142 if (gsi_end_p (gsi))
6143 return new_bb;
6144
6145 /* Split the statement list - avoid re-creating new containers as this
6146 brings ugly quadratic memory consumption in the inliner.
6147 (We are still quadratic since we need to update stmt BB pointers,
6148 sadly.) */
6149 gsi_split_seq_before (&gsi, &list);
6150 set_bb_seq (new_bb, list);
6151 for (gsi_tgt = gsi_start (list);
6152 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
6153 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
6154
6155 return new_bb;
6156 }
6157
6158
6159 /* Moves basic block BB after block AFTER. */
6160
6161 static bool
6162 gimple_move_block_after (basic_block bb, basic_block after)
6163 {
6164 if (bb->prev_bb == after)
6165 return true;
6166
6167 unlink_block (bb);
6168 link_block (bb, after);
6169
6170 return true;
6171 }
6172
6173
6174 /* Return TRUE if block BB has no executable statements, otherwise return
6175 FALSE. */
6176
6177 static bool
6178 gimple_empty_block_p (basic_block bb)
6179 {
6180 /* BB must have no executable statements. */
6181 gimple_stmt_iterator gsi = gsi_after_labels (bb);
6182 if (phi_nodes (bb))
6183 return false;
6184 while (!gsi_end_p (gsi))
6185 {
6186 gimple *stmt = gsi_stmt (gsi);
6187 if (is_gimple_debug (stmt))
6188 ;
6189 else if (gimple_code (stmt) == GIMPLE_NOP
6190 || gimple_code (stmt) == GIMPLE_PREDICT)
6191 ;
6192 else
6193 return false;
6194 gsi_next (&gsi);
6195 }
6196 return true;
6197 }
6198
6199
6200 /* Split a basic block if it ends with a conditional branch and if the
6201 other part of the block is not empty. */
6202
6203 static basic_block
6204 gimple_split_block_before_cond_jump (basic_block bb)
6205 {
6206 gimple *last, *split_point;
6207 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
6208 if (gsi_end_p (gsi))
6209 return NULL;
6210 last = gsi_stmt (gsi);
6211 if (gimple_code (last) != GIMPLE_COND
6212 && gimple_code (last) != GIMPLE_SWITCH)
6213 return NULL;
6214 gsi_prev (&gsi);
6215 split_point = gsi_stmt (gsi);
6216 return split_block (bb, split_point)->dest;
6217 }
6218
6219
6220 /* Return true if basic_block can be duplicated. */
6221
6222 static bool
6223 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
6224 {
6225 return true;
6226 }
6227
6228 /* Create a duplicate of the basic block BB. NOTE: This does not
6229 preserve SSA form. */
6230
6231 static basic_block
6232 gimple_duplicate_bb (basic_block bb, copy_bb_data *id)
6233 {
6234 basic_block new_bb;
6235 gimple_stmt_iterator gsi_tgt;
6236
6237 new_bb = create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
6238
6239 /* Copy the PHI nodes. We ignore PHI node arguments here because
6240 the incoming edges have not been setup yet. */
6241 for (gphi_iterator gpi = gsi_start_phis (bb);
6242 !gsi_end_p (gpi);
6243 gsi_next (&gpi))
6244 {
6245 gphi *phi, *copy;
6246 phi = gpi.phi ();
6247 copy = create_phi_node (NULL_TREE, new_bb);
6248 create_new_def_for (gimple_phi_result (phi), copy,
6249 gimple_phi_result_ptr (copy));
6250 gimple_set_uid (copy, gimple_uid (phi));
6251 }
6252
6253 gsi_tgt = gsi_start_bb (new_bb);
6254 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
6255 !gsi_end_p (gsi);
6256 gsi_next (&gsi))
6257 {
6258 def_operand_p def_p;
6259 ssa_op_iter op_iter;
6260 tree lhs;
6261 gimple *stmt, *copy;
6262
6263 stmt = gsi_stmt (gsi);
6264 if (gimple_code (stmt) == GIMPLE_LABEL)
6265 continue;
6266
6267 /* Don't duplicate label debug stmts. */
6268 if (gimple_debug_bind_p (stmt)
6269 && TREE_CODE (gimple_debug_bind_get_var (stmt))
6270 == LABEL_DECL)
6271 continue;
6272
6273 /* Create a new copy of STMT and duplicate STMT's virtual
6274 operands. */
6275 copy = gimple_copy (stmt);
6276 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
6277
6278 maybe_duplicate_eh_stmt (copy, stmt);
6279 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
6280
6281 /* When copying around a stmt writing into a local non-user
6282 aggregate, make sure it won't share stack slot with other
6283 vars. */
6284 lhs = gimple_get_lhs (stmt);
6285 if (lhs && TREE_CODE (lhs) != SSA_NAME)
6286 {
6287 tree base = get_base_address (lhs);
6288 if (base
6289 && (VAR_P (base) || TREE_CODE (base) == RESULT_DECL)
6290 && DECL_IGNORED_P (base)
6291 && !TREE_STATIC (base)
6292 && !DECL_EXTERNAL (base)
6293 && (!VAR_P (base) || !DECL_HAS_VALUE_EXPR_P (base)))
6294 DECL_NONSHAREABLE (base) = 1;
6295 }
6296
6297 /* If requested remap dependence info of cliques brought in
6298 via inlining. */
6299 if (id)
6300 for (unsigned i = 0; i < gimple_num_ops (copy); ++i)
6301 {
6302 tree op = gimple_op (copy, i);
6303 if (!op)
6304 continue;
6305 if (TREE_CODE (op) == ADDR_EXPR
6306 || TREE_CODE (op) == WITH_SIZE_EXPR)
6307 op = TREE_OPERAND (op, 0);
6308 while (handled_component_p (op))
6309 op = TREE_OPERAND (op, 0);
6310 if ((TREE_CODE (op) == MEM_REF
6311 || TREE_CODE (op) == TARGET_MEM_REF)
6312 && MR_DEPENDENCE_CLIQUE (op) > 1
6313 && MR_DEPENDENCE_CLIQUE (op) != bb->loop_father->owned_clique)
6314 {
6315 if (!id->dependence_map)
6316 id->dependence_map = new hash_map<dependence_hash,
6317 unsigned short>;
6318 bool existed;
6319 unsigned short &newc = id->dependence_map->get_or_insert
6320 (MR_DEPENDENCE_CLIQUE (op), &existed);
6321 if (!existed)
6322 {
6323 gcc_assert (MR_DEPENDENCE_CLIQUE (op) <= cfun->last_clique);
6324 newc = ++cfun->last_clique;
6325 }
6326 MR_DEPENDENCE_CLIQUE (op) = newc;
6327 }
6328 }
6329
6330 /* Create new names for all the definitions created by COPY and
6331 add replacement mappings for each new name. */
6332 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
6333 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
6334 }
6335
6336 return new_bb;
6337 }
6338
6339 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
6340
6341 static void
6342 add_phi_args_after_copy_edge (edge e_copy)
6343 {
6344 basic_block bb, bb_copy = e_copy->src, dest;
6345 edge e;
6346 edge_iterator ei;
6347 gphi *phi, *phi_copy;
6348 tree def;
6349 gphi_iterator psi, psi_copy;
6350
6351 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
6352 return;
6353
6354 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
6355
6356 if (e_copy->dest->flags & BB_DUPLICATED)
6357 dest = get_bb_original (e_copy->dest);
6358 else
6359 dest = e_copy->dest;
6360
6361 e = find_edge (bb, dest);
6362 if (!e)
6363 {
6364 /* During loop unrolling the target of the latch edge is copied.
6365 In this case we are not looking for edge to dest, but to
6366 duplicated block whose original was dest. */
6367 FOR_EACH_EDGE (e, ei, bb->succs)
6368 {
6369 if ((e->dest->flags & BB_DUPLICATED)
6370 && get_bb_original (e->dest) == dest)
6371 break;
6372 }
6373
6374 gcc_assert (e != NULL);
6375 }
6376
6377 for (psi = gsi_start_phis (e->dest),
6378 psi_copy = gsi_start_phis (e_copy->dest);
6379 !gsi_end_p (psi);
6380 gsi_next (&psi), gsi_next (&psi_copy))
6381 {
6382 phi = psi.phi ();
6383 phi_copy = psi_copy.phi ();
6384 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
6385 add_phi_arg (phi_copy, def, e_copy,
6386 gimple_phi_arg_location_from_edge (phi, e));
6387 }
6388 }
6389
6390
6391 /* Basic block BB_COPY was created by code duplication. Add phi node
6392 arguments for edges going out of BB_COPY. The blocks that were
6393 duplicated have BB_DUPLICATED set. */
6394
6395 void
6396 add_phi_args_after_copy_bb (basic_block bb_copy)
6397 {
6398 edge e_copy;
6399 edge_iterator ei;
6400
6401 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
6402 {
6403 add_phi_args_after_copy_edge (e_copy);
6404 }
6405 }
6406
6407 /* Blocks in REGION_COPY array of length N_REGION were created by
6408 duplication of basic blocks. Add phi node arguments for edges
6409 going from these blocks. If E_COPY is not NULL, also add
6410 phi node arguments for its destination.*/
6411
6412 void
6413 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
6414 edge e_copy)
6415 {
6416 unsigned i;
6417
6418 for (i = 0; i < n_region; i++)
6419 region_copy[i]->flags |= BB_DUPLICATED;
6420
6421 for (i = 0; i < n_region; i++)
6422 add_phi_args_after_copy_bb (region_copy[i]);
6423 if (e_copy)
6424 add_phi_args_after_copy_edge (e_copy);
6425
6426 for (i = 0; i < n_region; i++)
6427 region_copy[i]->flags &= ~BB_DUPLICATED;
6428 }
6429
6430 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
6431 important exit edge EXIT. By important we mean that no SSA name defined
6432 inside region is live over the other exit edges of the region. All entry
6433 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
6434 to the duplicate of the region. Dominance and loop information is
6435 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
6436 UPDATE_DOMINANCE is false then we assume that the caller will update the
6437 dominance information after calling this function. The new basic
6438 blocks are stored to REGION_COPY in the same order as they had in REGION,
6439 provided that REGION_COPY is not NULL.
6440 The function returns false if it is unable to copy the region,
6441 true otherwise. */
6442
6443 bool
6444 gimple_duplicate_sese_region (edge entry, edge exit,
6445 basic_block *region, unsigned n_region,
6446 basic_block *region_copy,
6447 bool update_dominance)
6448 {
6449 unsigned i;
6450 bool free_region_copy = false, copying_header = false;
6451 class loop *loop = entry->dest->loop_father;
6452 edge exit_copy;
6453 vec<basic_block> doms = vNULL;
6454 edge redirected;
6455 profile_count total_count = profile_count::uninitialized ();
6456 profile_count entry_count = profile_count::uninitialized ();
6457
6458 if (!can_copy_bbs_p (region, n_region))
6459 return false;
6460
6461 /* Some sanity checking. Note that we do not check for all possible
6462 missuses of the functions. I.e. if you ask to copy something weird,
6463 it will work, but the state of structures probably will not be
6464 correct. */
6465 for (i = 0; i < n_region; i++)
6466 {
6467 /* We do not handle subloops, i.e. all the blocks must belong to the
6468 same loop. */
6469 if (region[i]->loop_father != loop)
6470 return false;
6471
6472 if (region[i] != entry->dest
6473 && region[i] == loop->header)
6474 return false;
6475 }
6476
6477 /* In case the function is used for loop header copying (which is the primary
6478 use), ensure that EXIT and its copy will be new latch and entry edges. */
6479 if (loop->header == entry->dest)
6480 {
6481 copying_header = true;
6482
6483 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
6484 return false;
6485
6486 for (i = 0; i < n_region; i++)
6487 if (region[i] != exit->src
6488 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
6489 return false;
6490 }
6491
6492 initialize_original_copy_tables ();
6493
6494 if (copying_header)
6495 set_loop_copy (loop, loop_outer (loop));
6496 else
6497 set_loop_copy (loop, loop);
6498
6499 if (!region_copy)
6500 {
6501 region_copy = XNEWVEC (basic_block, n_region);
6502 free_region_copy = true;
6503 }
6504
6505 /* Record blocks outside the region that are dominated by something
6506 inside. */
6507 if (update_dominance)
6508 {
6509 doms.create (0);
6510 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6511 }
6512
6513 if (entry->dest->count.initialized_p ())
6514 {
6515 total_count = entry->dest->count;
6516 entry_count = entry->count ();
6517 /* Fix up corner cases, to avoid division by zero or creation of negative
6518 frequencies. */
6519 if (entry_count > total_count)
6520 entry_count = total_count;
6521 }
6522
6523 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
6524 split_edge_bb_loc (entry), update_dominance);
6525 if (total_count.initialized_p () && entry_count.initialized_p ())
6526 {
6527 scale_bbs_frequencies_profile_count (region, n_region,
6528 total_count - entry_count,
6529 total_count);
6530 scale_bbs_frequencies_profile_count (region_copy, n_region, entry_count,
6531 total_count);
6532 }
6533
6534 if (copying_header)
6535 {
6536 loop->header = exit->dest;
6537 loop->latch = exit->src;
6538 }
6539
6540 /* Redirect the entry and add the phi node arguments. */
6541 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
6542 gcc_assert (redirected != NULL);
6543 flush_pending_stmts (entry);
6544
6545 /* Concerning updating of dominators: We must recount dominators
6546 for entry block and its copy. Anything that is outside of the
6547 region, but was dominated by something inside needs recounting as
6548 well. */
6549 if (update_dominance)
6550 {
6551 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
6552 doms.safe_push (get_bb_original (entry->dest));
6553 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6554 doms.release ();
6555 }
6556
6557 /* Add the other PHI node arguments. */
6558 add_phi_args_after_copy (region_copy, n_region, NULL);
6559
6560 if (free_region_copy)
6561 free (region_copy);
6562
6563 free_original_copy_tables ();
6564 return true;
6565 }
6566
6567 /* Checks if BB is part of the region defined by N_REGION BBS. */
6568 static bool
6569 bb_part_of_region_p (basic_block bb, basic_block* bbs, unsigned n_region)
6570 {
6571 unsigned int n;
6572
6573 for (n = 0; n < n_region; n++)
6574 {
6575 if (bb == bbs[n])
6576 return true;
6577 }
6578 return false;
6579 }
6580
6581 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
6582 are stored to REGION_COPY in the same order in that they appear
6583 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
6584 the region, EXIT an exit from it. The condition guarding EXIT
6585 is moved to ENTRY. Returns true if duplication succeeds, false
6586 otherwise.
6587
6588 For example,
6589
6590 some_code;
6591 if (cond)
6592 A;
6593 else
6594 B;
6595
6596 is transformed to
6597
6598 if (cond)
6599 {
6600 some_code;
6601 A;
6602 }
6603 else
6604 {
6605 some_code;
6606 B;
6607 }
6608 */
6609
6610 bool
6611 gimple_duplicate_sese_tail (edge entry, edge exit,
6612 basic_block *region, unsigned n_region,
6613 basic_block *region_copy)
6614 {
6615 unsigned i;
6616 bool free_region_copy = false;
6617 class loop *loop = exit->dest->loop_father;
6618 class loop *orig_loop = entry->dest->loop_father;
6619 basic_block switch_bb, entry_bb, nentry_bb;
6620 vec<basic_block> doms;
6621 profile_count total_count = profile_count::uninitialized (),
6622 exit_count = profile_count::uninitialized ();
6623 edge exits[2], nexits[2], e;
6624 gimple_stmt_iterator gsi;
6625 gimple *cond_stmt;
6626 edge sorig, snew;
6627 basic_block exit_bb;
6628 gphi_iterator psi;
6629 gphi *phi;
6630 tree def;
6631 class loop *target, *aloop, *cloop;
6632
6633 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
6634 exits[0] = exit;
6635 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
6636
6637 if (!can_copy_bbs_p (region, n_region))
6638 return false;
6639
6640 initialize_original_copy_tables ();
6641 set_loop_copy (orig_loop, loop);
6642
6643 target= loop;
6644 for (aloop = orig_loop->inner; aloop; aloop = aloop->next)
6645 {
6646 if (bb_part_of_region_p (aloop->header, region, n_region))
6647 {
6648 cloop = duplicate_loop (aloop, target);
6649 duplicate_subloops (aloop, cloop);
6650 }
6651 }
6652
6653 if (!region_copy)
6654 {
6655 region_copy = XNEWVEC (basic_block, n_region);
6656 free_region_copy = true;
6657 }
6658
6659 gcc_assert (!need_ssa_update_p (cfun));
6660
6661 /* Record blocks outside the region that are dominated by something
6662 inside. */
6663 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6664
6665 total_count = exit->src->count;
6666 exit_count = exit->count ();
6667 /* Fix up corner cases, to avoid division by zero or creation of negative
6668 frequencies. */
6669 if (exit_count > total_count)
6670 exit_count = total_count;
6671
6672 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
6673 split_edge_bb_loc (exit), true);
6674 if (total_count.initialized_p () && exit_count.initialized_p ())
6675 {
6676 scale_bbs_frequencies_profile_count (region, n_region,
6677 total_count - exit_count,
6678 total_count);
6679 scale_bbs_frequencies_profile_count (region_copy, n_region, exit_count,
6680 total_count);
6681 }
6682
6683 /* Create the switch block, and put the exit condition to it. */
6684 entry_bb = entry->dest;
6685 nentry_bb = get_bb_copy (entry_bb);
6686 if (!last_stmt (entry->src)
6687 || !stmt_ends_bb_p (last_stmt (entry->src)))
6688 switch_bb = entry->src;
6689 else
6690 switch_bb = split_edge (entry);
6691 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
6692
6693 gsi = gsi_last_bb (switch_bb);
6694 cond_stmt = last_stmt (exit->src);
6695 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
6696 cond_stmt = gimple_copy (cond_stmt);
6697
6698 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
6699
6700 sorig = single_succ_edge (switch_bb);
6701 sorig->flags = exits[1]->flags;
6702 sorig->probability = exits[1]->probability;
6703 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
6704 snew->probability = exits[0]->probability;
6705
6706
6707 /* Register the new edge from SWITCH_BB in loop exit lists. */
6708 rescan_loop_exit (snew, true, false);
6709
6710 /* Add the PHI node arguments. */
6711 add_phi_args_after_copy (region_copy, n_region, snew);
6712
6713 /* Get rid of now superfluous conditions and associated edges (and phi node
6714 arguments). */
6715 exit_bb = exit->dest;
6716
6717 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
6718 PENDING_STMT (e) = NULL;
6719
6720 /* The latch of ORIG_LOOP was copied, and so was the backedge
6721 to the original header. We redirect this backedge to EXIT_BB. */
6722 for (i = 0; i < n_region; i++)
6723 if (get_bb_original (region_copy[i]) == orig_loop->latch)
6724 {
6725 gcc_assert (single_succ_edge (region_copy[i]));
6726 e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
6727 PENDING_STMT (e) = NULL;
6728 for (psi = gsi_start_phis (exit_bb);
6729 !gsi_end_p (psi);
6730 gsi_next (&psi))
6731 {
6732 phi = psi.phi ();
6733 def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
6734 add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
6735 }
6736 }
6737 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
6738 PENDING_STMT (e) = NULL;
6739
6740 /* Anything that is outside of the region, but was dominated by something
6741 inside needs to update dominance info. */
6742 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6743 doms.release ();
6744 /* Update the SSA web. */
6745 update_ssa (TODO_update_ssa);
6746
6747 if (free_region_copy)
6748 free (region_copy);
6749
6750 free_original_copy_tables ();
6751 return true;
6752 }
6753
6754 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
6755 adding blocks when the dominator traversal reaches EXIT. This
6756 function silently assumes that ENTRY strictly dominates EXIT. */
6757
6758 void
6759 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
6760 vec<basic_block> *bbs_p)
6761 {
6762 basic_block son;
6763
6764 for (son = first_dom_son (CDI_DOMINATORS, entry);
6765 son;
6766 son = next_dom_son (CDI_DOMINATORS, son))
6767 {
6768 bbs_p->safe_push (son);
6769 if (son != exit)
6770 gather_blocks_in_sese_region (son, exit, bbs_p);
6771 }
6772 }
6773
6774 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
6775 The duplicates are recorded in VARS_MAP. */
6776
6777 static void
6778 replace_by_duplicate_decl (tree *tp, hash_map<tree, tree> *vars_map,
6779 tree to_context)
6780 {
6781 tree t = *tp, new_t;
6782 struct function *f = DECL_STRUCT_FUNCTION (to_context);
6783
6784 if (DECL_CONTEXT (t) == to_context)
6785 return;
6786
6787 bool existed;
6788 tree &loc = vars_map->get_or_insert (t, &existed);
6789
6790 if (!existed)
6791 {
6792 if (SSA_VAR_P (t))
6793 {
6794 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
6795 add_local_decl (f, new_t);
6796 }
6797 else
6798 {
6799 gcc_assert (TREE_CODE (t) == CONST_DECL);
6800 new_t = copy_node (t);
6801 }
6802 DECL_CONTEXT (new_t) = to_context;
6803
6804 loc = new_t;
6805 }
6806 else
6807 new_t = loc;
6808
6809 *tp = new_t;
6810 }
6811
6812
6813 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6814 VARS_MAP maps old ssa names and var_decls to the new ones. */
6815
6816 static tree
6817 replace_ssa_name (tree name, hash_map<tree, tree> *vars_map,
6818 tree to_context)
6819 {
6820 tree new_name;
6821
6822 gcc_assert (!virtual_operand_p (name));
6823
6824 tree *loc = vars_map->get (name);
6825
6826 if (!loc)
6827 {
6828 tree decl = SSA_NAME_VAR (name);
6829 if (decl)
6830 {
6831 gcc_assert (!SSA_NAME_IS_DEFAULT_DEF (name));
6832 replace_by_duplicate_decl (&decl, vars_map, to_context);
6833 new_name = make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6834 decl, SSA_NAME_DEF_STMT (name));
6835 }
6836 else
6837 new_name = copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6838 name, SSA_NAME_DEF_STMT (name));
6839
6840 /* Now that we've used the def stmt to define new_name, make sure it
6841 doesn't define name anymore. */
6842 SSA_NAME_DEF_STMT (name) = NULL;
6843
6844 vars_map->put (name, new_name);
6845 }
6846 else
6847 new_name = *loc;
6848
6849 return new_name;
6850 }
6851
6852 struct move_stmt_d
6853 {
6854 tree orig_block;
6855 tree new_block;
6856 tree from_context;
6857 tree to_context;
6858 hash_map<tree, tree> *vars_map;
6859 htab_t new_label_map;
6860 hash_map<void *, void *> *eh_map;
6861 bool remap_decls_p;
6862 };
6863
6864 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
6865 contained in *TP if it has been ORIG_BLOCK previously and change the
6866 DECL_CONTEXT of every local variable referenced in *TP. */
6867
6868 static tree
6869 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
6870 {
6871 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
6872 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6873 tree t = *tp;
6874
6875 if (EXPR_P (t))
6876 {
6877 tree block = TREE_BLOCK (t);
6878 if (block == NULL_TREE)
6879 ;
6880 else if (block == p->orig_block
6881 || p->orig_block == NULL_TREE)
6882 {
6883 /* tree_node_can_be_shared says we can share invariant
6884 addresses but unshare_expr copies them anyways. Make sure
6885 to unshare before adjusting the block in place - we do not
6886 always see a copy here. */
6887 if (TREE_CODE (t) == ADDR_EXPR
6888 && is_gimple_min_invariant (t))
6889 *tp = t = unshare_expr (t);
6890 TREE_SET_BLOCK (t, p->new_block);
6891 }
6892 else if (flag_checking)
6893 {
6894 while (block && TREE_CODE (block) == BLOCK && block != p->orig_block)
6895 block = BLOCK_SUPERCONTEXT (block);
6896 gcc_assert (block == p->orig_block);
6897 }
6898 }
6899 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
6900 {
6901 if (TREE_CODE (t) == SSA_NAME)
6902 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
6903 else if (TREE_CODE (t) == PARM_DECL
6904 && gimple_in_ssa_p (cfun))
6905 *tp = *(p->vars_map->get (t));
6906 else if (TREE_CODE (t) == LABEL_DECL)
6907 {
6908 if (p->new_label_map)
6909 {
6910 struct tree_map in, *out;
6911 in.base.from = t;
6912 out = (struct tree_map *)
6913 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
6914 if (out)
6915 *tp = t = out->to;
6916 }
6917
6918 /* For FORCED_LABELs we can end up with references from other
6919 functions if some SESE regions are outlined. It is UB to
6920 jump in between them, but they could be used just for printing
6921 addresses etc. In that case, DECL_CONTEXT on the label should
6922 be the function containing the glabel stmt with that LABEL_DECL,
6923 rather than whatever function a reference to the label was seen
6924 last time. */
6925 if (!FORCED_LABEL (t) && !DECL_NONLOCAL (t))
6926 DECL_CONTEXT (t) = p->to_context;
6927 }
6928 else if (p->remap_decls_p)
6929 {
6930 /* Replace T with its duplicate. T should no longer appear in the
6931 parent function, so this looks wasteful; however, it may appear
6932 in referenced_vars, and more importantly, as virtual operands of
6933 statements, and in alias lists of other variables. It would be
6934 quite difficult to expunge it from all those places. ??? It might
6935 suffice to do this for addressable variables. */
6936 if ((VAR_P (t) && !is_global_var (t))
6937 || TREE_CODE (t) == CONST_DECL)
6938 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
6939 }
6940 *walk_subtrees = 0;
6941 }
6942 else if (TYPE_P (t))
6943 *walk_subtrees = 0;
6944
6945 return NULL_TREE;
6946 }
6947
6948 /* Helper for move_stmt_r. Given an EH region number for the source
6949 function, map that to the duplicate EH regio number in the dest. */
6950
6951 static int
6952 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
6953 {
6954 eh_region old_r, new_r;
6955
6956 old_r = get_eh_region_from_number (old_nr);
6957 new_r = static_cast<eh_region> (*p->eh_map->get (old_r));
6958
6959 return new_r->index;
6960 }
6961
6962 /* Similar, but operate on INTEGER_CSTs. */
6963
6964 static tree
6965 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
6966 {
6967 int old_nr, new_nr;
6968
6969 old_nr = tree_to_shwi (old_t_nr);
6970 new_nr = move_stmt_eh_region_nr (old_nr, p);
6971
6972 return build_int_cst (integer_type_node, new_nr);
6973 }
6974
6975 /* Like move_stmt_op, but for gimple statements.
6976
6977 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
6978 contained in the current statement in *GSI_P and change the
6979 DECL_CONTEXT of every local variable referenced in the current
6980 statement. */
6981
6982 static tree
6983 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
6984 struct walk_stmt_info *wi)
6985 {
6986 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6987 gimple *stmt = gsi_stmt (*gsi_p);
6988 tree block = gimple_block (stmt);
6989
6990 if (block == p->orig_block
6991 || (p->orig_block == NULL_TREE
6992 && block != NULL_TREE))
6993 gimple_set_block (stmt, p->new_block);
6994
6995 switch (gimple_code (stmt))
6996 {
6997 case GIMPLE_CALL:
6998 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
6999 {
7000 tree r, fndecl = gimple_call_fndecl (stmt);
7001 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
7002 switch (DECL_FUNCTION_CODE (fndecl))
7003 {
7004 case BUILT_IN_EH_COPY_VALUES:
7005 r = gimple_call_arg (stmt, 1);
7006 r = move_stmt_eh_region_tree_nr (r, p);
7007 gimple_call_set_arg (stmt, 1, r);
7008 /* FALLTHRU */
7009
7010 case BUILT_IN_EH_POINTER:
7011 case BUILT_IN_EH_FILTER:
7012 r = gimple_call_arg (stmt, 0);
7013 r = move_stmt_eh_region_tree_nr (r, p);
7014 gimple_call_set_arg (stmt, 0, r);
7015 break;
7016
7017 default:
7018 break;
7019 }
7020 }
7021 break;
7022
7023 case GIMPLE_RESX:
7024 {
7025 gresx *resx_stmt = as_a <gresx *> (stmt);
7026 int r = gimple_resx_region (resx_stmt);
7027 r = move_stmt_eh_region_nr (r, p);
7028 gimple_resx_set_region (resx_stmt, r);
7029 }
7030 break;
7031
7032 case GIMPLE_EH_DISPATCH:
7033 {
7034 geh_dispatch *eh_dispatch_stmt = as_a <geh_dispatch *> (stmt);
7035 int r = gimple_eh_dispatch_region (eh_dispatch_stmt);
7036 r = move_stmt_eh_region_nr (r, p);
7037 gimple_eh_dispatch_set_region (eh_dispatch_stmt, r);
7038 }
7039 break;
7040
7041 case GIMPLE_OMP_RETURN:
7042 case GIMPLE_OMP_CONTINUE:
7043 break;
7044
7045 case GIMPLE_LABEL:
7046 {
7047 /* For FORCED_LABEL, move_stmt_op doesn't adjust DECL_CONTEXT,
7048 so that such labels can be referenced from other regions.
7049 Make sure to update it when seeing a GIMPLE_LABEL though,
7050 that is the owner of the label. */
7051 walk_gimple_op (stmt, move_stmt_op, wi);
7052 *handled_ops_p = true;
7053 tree label = gimple_label_label (as_a <glabel *> (stmt));
7054 if (FORCED_LABEL (label) || DECL_NONLOCAL (label))
7055 DECL_CONTEXT (label) = p->to_context;
7056 }
7057 break;
7058
7059 default:
7060 if (is_gimple_omp (stmt))
7061 {
7062 /* Do not remap variables inside OMP directives. Variables
7063 referenced in clauses and directive header belong to the
7064 parent function and should not be moved into the child
7065 function. */
7066 bool save_remap_decls_p = p->remap_decls_p;
7067 p->remap_decls_p = false;
7068 *handled_ops_p = true;
7069
7070 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), move_stmt_r,
7071 move_stmt_op, wi);
7072
7073 p->remap_decls_p = save_remap_decls_p;
7074 }
7075 break;
7076 }
7077
7078 return NULL_TREE;
7079 }
7080
7081 /* Move basic block BB from function CFUN to function DEST_FN. The
7082 block is moved out of the original linked list and placed after
7083 block AFTER in the new list. Also, the block is removed from the
7084 original array of blocks and placed in DEST_FN's array of blocks.
7085 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
7086 updated to reflect the moved edges.
7087
7088 The local variables are remapped to new instances, VARS_MAP is used
7089 to record the mapping. */
7090
7091 static void
7092 move_block_to_fn (struct function *dest_cfun, basic_block bb,
7093 basic_block after, bool update_edge_count_p,
7094 struct move_stmt_d *d)
7095 {
7096 struct control_flow_graph *cfg;
7097 edge_iterator ei;
7098 edge e;
7099 gimple_stmt_iterator si;
7100 unsigned old_len, new_len;
7101
7102 /* Remove BB from dominance structures. */
7103 delete_from_dominance_info (CDI_DOMINATORS, bb);
7104
7105 /* Move BB from its current loop to the copy in the new function. */
7106 if (current_loops)
7107 {
7108 class loop *new_loop = (class loop *)bb->loop_father->aux;
7109 if (new_loop)
7110 bb->loop_father = new_loop;
7111 }
7112
7113 /* Link BB to the new linked list. */
7114 move_block_after (bb, after);
7115
7116 /* Update the edge count in the corresponding flowgraphs. */
7117 if (update_edge_count_p)
7118 FOR_EACH_EDGE (e, ei, bb->succs)
7119 {
7120 cfun->cfg->x_n_edges--;
7121 dest_cfun->cfg->x_n_edges++;
7122 }
7123
7124 /* Remove BB from the original basic block array. */
7125 (*cfun->cfg->x_basic_block_info)[bb->index] = NULL;
7126 cfun->cfg->x_n_basic_blocks--;
7127
7128 /* Grow DEST_CFUN's basic block array if needed. */
7129 cfg = dest_cfun->cfg;
7130 cfg->x_n_basic_blocks++;
7131 if (bb->index >= cfg->x_last_basic_block)
7132 cfg->x_last_basic_block = bb->index + 1;
7133
7134 old_len = vec_safe_length (cfg->x_basic_block_info);
7135 if ((unsigned) cfg->x_last_basic_block >= old_len)
7136 {
7137 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
7138 vec_safe_grow_cleared (cfg->x_basic_block_info, new_len);
7139 }
7140
7141 (*cfg->x_basic_block_info)[bb->index] = bb;
7142
7143 /* Remap the variables in phi nodes. */
7144 for (gphi_iterator psi = gsi_start_phis (bb);
7145 !gsi_end_p (psi); )
7146 {
7147 gphi *phi = psi.phi ();
7148 use_operand_p use;
7149 tree op = PHI_RESULT (phi);
7150 ssa_op_iter oi;
7151 unsigned i;
7152
7153 if (virtual_operand_p (op))
7154 {
7155 /* Remove the phi nodes for virtual operands (alias analysis will be
7156 run for the new function, anyway). But replace all uses that
7157 might be outside of the region we move. */
7158 use_operand_p use_p;
7159 imm_use_iterator iter;
7160 gimple *use_stmt;
7161 FOR_EACH_IMM_USE_STMT (use_stmt, iter, op)
7162 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
7163 SET_USE (use_p, SSA_NAME_VAR (op));
7164 remove_phi_node (&psi, true);
7165 continue;
7166 }
7167
7168 SET_PHI_RESULT (phi,
7169 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
7170 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
7171 {
7172 op = USE_FROM_PTR (use);
7173 if (TREE_CODE (op) == SSA_NAME)
7174 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
7175 }
7176
7177 for (i = 0; i < EDGE_COUNT (bb->preds); i++)
7178 {
7179 location_t locus = gimple_phi_arg_location (phi, i);
7180 tree block = LOCATION_BLOCK (locus);
7181
7182 if (locus == UNKNOWN_LOCATION)
7183 continue;
7184 if (d->orig_block == NULL_TREE || block == d->orig_block)
7185 {
7186 locus = set_block (locus, d->new_block);
7187 gimple_phi_arg_set_location (phi, i, locus);
7188 }
7189 }
7190
7191 gsi_next (&psi);
7192 }
7193
7194 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
7195 {
7196 gimple *stmt = gsi_stmt (si);
7197 struct walk_stmt_info wi;
7198
7199 memset (&wi, 0, sizeof (wi));
7200 wi.info = d;
7201 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
7202
7203 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
7204 {
7205 tree label = gimple_label_label (label_stmt);
7206 int uid = LABEL_DECL_UID (label);
7207
7208 gcc_assert (uid > -1);
7209
7210 old_len = vec_safe_length (cfg->x_label_to_block_map);
7211 if (old_len <= (unsigned) uid)
7212 {
7213 new_len = 3 * uid / 2 + 1;
7214 vec_safe_grow_cleared (cfg->x_label_to_block_map, new_len);
7215 }
7216
7217 (*cfg->x_label_to_block_map)[uid] = bb;
7218 (*cfun->cfg->x_label_to_block_map)[uid] = NULL;
7219
7220 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
7221
7222 if (uid >= dest_cfun->cfg->last_label_uid)
7223 dest_cfun->cfg->last_label_uid = uid + 1;
7224 }
7225
7226 maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
7227 remove_stmt_from_eh_lp_fn (cfun, stmt);
7228
7229 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
7230 gimple_remove_stmt_histograms (cfun, stmt);
7231
7232 /* We cannot leave any operands allocated from the operand caches of
7233 the current function. */
7234 free_stmt_operands (cfun, stmt);
7235 push_cfun (dest_cfun);
7236 update_stmt (stmt);
7237 pop_cfun ();
7238 }
7239
7240 FOR_EACH_EDGE (e, ei, bb->succs)
7241 if (e->goto_locus != UNKNOWN_LOCATION)
7242 {
7243 tree block = LOCATION_BLOCK (e->goto_locus);
7244 if (d->orig_block == NULL_TREE
7245 || block == d->orig_block)
7246 e->goto_locus = set_block (e->goto_locus, d->new_block);
7247 }
7248 }
7249
7250 /* Examine the statements in BB (which is in SRC_CFUN); find and return
7251 the outermost EH region. Use REGION as the incoming base EH region.
7252 If there is no single outermost region, return NULL and set *ALL to
7253 true. */
7254
7255 static eh_region
7256 find_outermost_region_in_block (struct function *src_cfun,
7257 basic_block bb, eh_region region,
7258 bool *all)
7259 {
7260 gimple_stmt_iterator si;
7261
7262 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
7263 {
7264 gimple *stmt = gsi_stmt (si);
7265 eh_region stmt_region;
7266 int lp_nr;
7267
7268 lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
7269 stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
7270 if (stmt_region)
7271 {
7272 if (region == NULL)
7273 region = stmt_region;
7274 else if (stmt_region != region)
7275 {
7276 region = eh_region_outermost (src_cfun, stmt_region, region);
7277 if (region == NULL)
7278 {
7279 *all = true;
7280 return NULL;
7281 }
7282 }
7283 }
7284 }
7285
7286 return region;
7287 }
7288
7289 static tree
7290 new_label_mapper (tree decl, void *data)
7291 {
7292 htab_t hash = (htab_t) data;
7293 struct tree_map *m;
7294 void **slot;
7295
7296 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
7297
7298 m = XNEW (struct tree_map);
7299 m->hash = DECL_UID (decl);
7300 m->base.from = decl;
7301 m->to = create_artificial_label (UNKNOWN_LOCATION);
7302 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
7303 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
7304 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
7305
7306 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
7307 gcc_assert (*slot == NULL);
7308
7309 *slot = m;
7310
7311 return m->to;
7312 }
7313
7314 /* Tree walker to replace the decls used inside value expressions by
7315 duplicates. */
7316
7317 static tree
7318 replace_block_vars_by_duplicates_1 (tree *tp, int *walk_subtrees, void *data)
7319 {
7320 struct replace_decls_d *rd = (struct replace_decls_d *)data;
7321
7322 switch (TREE_CODE (*tp))
7323 {
7324 case VAR_DECL:
7325 case PARM_DECL:
7326 case RESULT_DECL:
7327 replace_by_duplicate_decl (tp, rd->vars_map, rd->to_context);
7328 break;
7329 default:
7330 break;
7331 }
7332
7333 if (IS_TYPE_OR_DECL_P (*tp))
7334 *walk_subtrees = false;
7335
7336 return NULL;
7337 }
7338
7339 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
7340 subblocks. */
7341
7342 static void
7343 replace_block_vars_by_duplicates (tree block, hash_map<tree, tree> *vars_map,
7344 tree to_context)
7345 {
7346 tree *tp, t;
7347
7348 for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
7349 {
7350 t = *tp;
7351 if (!VAR_P (t) && TREE_CODE (t) != CONST_DECL)
7352 continue;
7353 replace_by_duplicate_decl (&t, vars_map, to_context);
7354 if (t != *tp)
7355 {
7356 if (VAR_P (*tp) && DECL_HAS_VALUE_EXPR_P (*tp))
7357 {
7358 tree x = DECL_VALUE_EXPR (*tp);
7359 struct replace_decls_d rd = { vars_map, to_context };
7360 unshare_expr (x);
7361 walk_tree (&x, replace_block_vars_by_duplicates_1, &rd, NULL);
7362 SET_DECL_VALUE_EXPR (t, x);
7363 DECL_HAS_VALUE_EXPR_P (t) = 1;
7364 }
7365 DECL_CHAIN (t) = DECL_CHAIN (*tp);
7366 *tp = t;
7367 }
7368 }
7369
7370 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
7371 replace_block_vars_by_duplicates (block, vars_map, to_context);
7372 }
7373
7374 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
7375 from FN1 to FN2. */
7376
7377 static void
7378 fixup_loop_arrays_after_move (struct function *fn1, struct function *fn2,
7379 class loop *loop)
7380 {
7381 /* Discard it from the old loop array. */
7382 (*get_loops (fn1))[loop->num] = NULL;
7383
7384 /* Place it in the new loop array, assigning it a new number. */
7385 loop->num = number_of_loops (fn2);
7386 vec_safe_push (loops_for_fn (fn2)->larray, loop);
7387
7388 /* Recurse to children. */
7389 for (loop = loop->inner; loop; loop = loop->next)
7390 fixup_loop_arrays_after_move (fn1, fn2, loop);
7391 }
7392
7393 /* Verify that the blocks in BBS_P are a single-entry, single-exit region
7394 delimited by ENTRY_BB and EXIT_BB, possibly containing noreturn blocks. */
7395
7396 DEBUG_FUNCTION void
7397 verify_sese (basic_block entry, basic_block exit, vec<basic_block> *bbs_p)
7398 {
7399 basic_block bb;
7400 edge_iterator ei;
7401 edge e;
7402 bitmap bbs = BITMAP_ALLOC (NULL);
7403 int i;
7404
7405 gcc_assert (entry != NULL);
7406 gcc_assert (entry != exit);
7407 gcc_assert (bbs_p != NULL);
7408
7409 gcc_assert (bbs_p->length () > 0);
7410
7411 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
7412 bitmap_set_bit (bbs, bb->index);
7413
7414 gcc_assert (bitmap_bit_p (bbs, entry->index));
7415 gcc_assert (exit == NULL || bitmap_bit_p (bbs, exit->index));
7416
7417 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
7418 {
7419 if (bb == entry)
7420 {
7421 gcc_assert (single_pred_p (entry));
7422 gcc_assert (!bitmap_bit_p (bbs, single_pred (entry)->index));
7423 }
7424 else
7425 for (ei = ei_start (bb->preds); !ei_end_p (ei); ei_next (&ei))
7426 {
7427 e = ei_edge (ei);
7428 gcc_assert (bitmap_bit_p (bbs, e->src->index));
7429 }
7430
7431 if (bb == exit)
7432 {
7433 gcc_assert (single_succ_p (exit));
7434 gcc_assert (!bitmap_bit_p (bbs, single_succ (exit)->index));
7435 }
7436 else
7437 for (ei = ei_start (bb->succs); !ei_end_p (ei); ei_next (&ei))
7438 {
7439 e = ei_edge (ei);
7440 gcc_assert (bitmap_bit_p (bbs, e->dest->index));
7441 }
7442 }
7443
7444 BITMAP_FREE (bbs);
7445 }
7446
7447 /* If FROM is an SSA_NAME, mark the version in bitmap DATA. */
7448
7449 bool
7450 gather_ssa_name_hash_map_from (tree const &from, tree const &, void *data)
7451 {
7452 bitmap release_names = (bitmap)data;
7453
7454 if (TREE_CODE (from) != SSA_NAME)
7455 return true;
7456
7457 bitmap_set_bit (release_names, SSA_NAME_VERSION (from));
7458 return true;
7459 }
7460
7461 /* Return LOOP_DIST_ALIAS call if present in BB. */
7462
7463 static gimple *
7464 find_loop_dist_alias (basic_block bb)
7465 {
7466 gimple *g = last_stmt (bb);
7467 if (g == NULL || gimple_code (g) != GIMPLE_COND)
7468 return NULL;
7469
7470 gimple_stmt_iterator gsi = gsi_for_stmt (g);
7471 gsi_prev (&gsi);
7472 if (gsi_end_p (gsi))
7473 return NULL;
7474
7475 g = gsi_stmt (gsi);
7476 if (gimple_call_internal_p (g, IFN_LOOP_DIST_ALIAS))
7477 return g;
7478 return NULL;
7479 }
7480
7481 /* Fold loop internal call G like IFN_LOOP_VECTORIZED/IFN_LOOP_DIST_ALIAS
7482 to VALUE and update any immediate uses of it's LHS. */
7483
7484 void
7485 fold_loop_internal_call (gimple *g, tree value)
7486 {
7487 tree lhs = gimple_call_lhs (g);
7488 use_operand_p use_p;
7489 imm_use_iterator iter;
7490 gimple *use_stmt;
7491 gimple_stmt_iterator gsi = gsi_for_stmt (g);
7492
7493 update_call_from_tree (&gsi, value);
7494 FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
7495 {
7496 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
7497 SET_USE (use_p, value);
7498 update_stmt (use_stmt);
7499 }
7500 }
7501
7502 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
7503 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
7504 single basic block in the original CFG and the new basic block is
7505 returned. DEST_CFUN must not have a CFG yet.
7506
7507 Note that the region need not be a pure SESE region. Blocks inside
7508 the region may contain calls to abort/exit. The only restriction
7509 is that ENTRY_BB should be the only entry point and it must
7510 dominate EXIT_BB.
7511
7512 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
7513 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
7514 to the new function.
7515
7516 All local variables referenced in the region are assumed to be in
7517 the corresponding BLOCK_VARS and unexpanded variable lists
7518 associated with DEST_CFUN.
7519
7520 TODO: investigate whether we can reuse gimple_duplicate_sese_region to
7521 reimplement move_sese_region_to_fn by duplicating the region rather than
7522 moving it. */
7523
7524 basic_block
7525 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
7526 basic_block exit_bb, tree orig_block)
7527 {
7528 vec<basic_block> bbs, dom_bbs;
7529 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
7530 basic_block after, bb, *entry_pred, *exit_succ, abb;
7531 struct function *saved_cfun = cfun;
7532 int *entry_flag, *exit_flag;
7533 profile_probability *entry_prob, *exit_prob;
7534 unsigned i, num_entry_edges, num_exit_edges, num_nodes;
7535 edge e;
7536 edge_iterator ei;
7537 htab_t new_label_map;
7538 hash_map<void *, void *> *eh_map;
7539 class loop *loop = entry_bb->loop_father;
7540 class loop *loop0 = get_loop (saved_cfun, 0);
7541 struct move_stmt_d d;
7542
7543 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
7544 region. */
7545 gcc_assert (entry_bb != exit_bb
7546 && (!exit_bb
7547 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
7548
7549 /* Collect all the blocks in the region. Manually add ENTRY_BB
7550 because it won't be added by dfs_enumerate_from. */
7551 bbs.create (0);
7552 bbs.safe_push (entry_bb);
7553 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
7554
7555 if (flag_checking)
7556 verify_sese (entry_bb, exit_bb, &bbs);
7557
7558 /* The blocks that used to be dominated by something in BBS will now be
7559 dominated by the new block. */
7560 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
7561 bbs.address (),
7562 bbs.length ());
7563
7564 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
7565 the predecessor edges to ENTRY_BB and the successor edges to
7566 EXIT_BB so that we can re-attach them to the new basic block that
7567 will replace the region. */
7568 num_entry_edges = EDGE_COUNT (entry_bb->preds);
7569 entry_pred = XNEWVEC (basic_block, num_entry_edges);
7570 entry_flag = XNEWVEC (int, num_entry_edges);
7571 entry_prob = XNEWVEC (profile_probability, num_entry_edges);
7572 i = 0;
7573 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
7574 {
7575 entry_prob[i] = e->probability;
7576 entry_flag[i] = e->flags;
7577 entry_pred[i++] = e->src;
7578 remove_edge (e);
7579 }
7580
7581 if (exit_bb)
7582 {
7583 num_exit_edges = EDGE_COUNT (exit_bb->succs);
7584 exit_succ = XNEWVEC (basic_block, num_exit_edges);
7585 exit_flag = XNEWVEC (int, num_exit_edges);
7586 exit_prob = XNEWVEC (profile_probability, num_exit_edges);
7587 i = 0;
7588 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
7589 {
7590 exit_prob[i] = e->probability;
7591 exit_flag[i] = e->flags;
7592 exit_succ[i++] = e->dest;
7593 remove_edge (e);
7594 }
7595 }
7596 else
7597 {
7598 num_exit_edges = 0;
7599 exit_succ = NULL;
7600 exit_flag = NULL;
7601 exit_prob = NULL;
7602 }
7603
7604 /* Switch context to the child function to initialize DEST_FN's CFG. */
7605 gcc_assert (dest_cfun->cfg == NULL);
7606 push_cfun (dest_cfun);
7607
7608 init_empty_tree_cfg ();
7609
7610 /* Initialize EH information for the new function. */
7611 eh_map = NULL;
7612 new_label_map = NULL;
7613 if (saved_cfun->eh)
7614 {
7615 eh_region region = NULL;
7616 bool all = false;
7617
7618 FOR_EACH_VEC_ELT (bbs, i, bb)
7619 {
7620 region = find_outermost_region_in_block (saved_cfun, bb, region, &all);
7621 if (all)
7622 break;
7623 }
7624
7625 init_eh_for_function ();
7626 if (region != NULL || all)
7627 {
7628 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
7629 eh_map = duplicate_eh_regions (saved_cfun, region, 0,
7630 new_label_mapper, new_label_map);
7631 }
7632 }
7633
7634 /* Initialize an empty loop tree. */
7635 struct loops *loops = ggc_cleared_alloc<struct loops> ();
7636 init_loops_structure (dest_cfun, loops, 1);
7637 loops->state = LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
7638 set_loops_for_fn (dest_cfun, loops);
7639
7640 vec<loop_p, va_gc> *larray = get_loops (saved_cfun)->copy ();
7641
7642 /* Move the outlined loop tree part. */
7643 num_nodes = bbs.length ();
7644 FOR_EACH_VEC_ELT (bbs, i, bb)
7645 {
7646 if (bb->loop_father->header == bb)
7647 {
7648 class loop *this_loop = bb->loop_father;
7649 class loop *outer = loop_outer (this_loop);
7650 if (outer == loop
7651 /* If the SESE region contains some bbs ending with
7652 a noreturn call, those are considered to belong
7653 to the outermost loop in saved_cfun, rather than
7654 the entry_bb's loop_father. */
7655 || outer == loop0)
7656 {
7657 if (outer != loop)
7658 num_nodes -= this_loop->num_nodes;
7659 flow_loop_tree_node_remove (bb->loop_father);
7660 flow_loop_tree_node_add (get_loop (dest_cfun, 0), this_loop);
7661 fixup_loop_arrays_after_move (saved_cfun, cfun, this_loop);
7662 }
7663 }
7664 else if (bb->loop_father == loop0 && loop0 != loop)
7665 num_nodes--;
7666
7667 /* Remove loop exits from the outlined region. */
7668 if (loops_for_fn (saved_cfun)->exits)
7669 FOR_EACH_EDGE (e, ei, bb->succs)
7670 {
7671 struct loops *l = loops_for_fn (saved_cfun);
7672 loop_exit **slot
7673 = l->exits->find_slot_with_hash (e, htab_hash_pointer (e),
7674 NO_INSERT);
7675 if (slot)
7676 l->exits->clear_slot (slot);
7677 }
7678 }
7679
7680 /* Adjust the number of blocks in the tree root of the outlined part. */
7681 get_loop (dest_cfun, 0)->num_nodes = bbs.length () + 2;
7682
7683 /* Setup a mapping to be used by move_block_to_fn. */
7684 loop->aux = current_loops->tree_root;
7685 loop0->aux = current_loops->tree_root;
7686
7687 /* Fix up orig_loop_num. If the block referenced in it has been moved
7688 to dest_cfun, update orig_loop_num field, otherwise clear it. */
7689 class loop *dloop;
7690 signed char *moved_orig_loop_num = NULL;
7691 FOR_EACH_LOOP_FN (dest_cfun, dloop, 0)
7692 if (dloop->orig_loop_num)
7693 {
7694 if (moved_orig_loop_num == NULL)
7695 moved_orig_loop_num
7696 = XCNEWVEC (signed char, vec_safe_length (larray));
7697 if ((*larray)[dloop->orig_loop_num] != NULL
7698 && get_loop (saved_cfun, dloop->orig_loop_num) == NULL)
7699 {
7700 if (moved_orig_loop_num[dloop->orig_loop_num] >= 0
7701 && moved_orig_loop_num[dloop->orig_loop_num] < 2)
7702 moved_orig_loop_num[dloop->orig_loop_num]++;
7703 dloop->orig_loop_num = (*larray)[dloop->orig_loop_num]->num;
7704 }
7705 else
7706 {
7707 moved_orig_loop_num[dloop->orig_loop_num] = -1;
7708 dloop->orig_loop_num = 0;
7709 }
7710 }
7711 pop_cfun ();
7712
7713 if (moved_orig_loop_num)
7714 {
7715 FOR_EACH_VEC_ELT (bbs, i, bb)
7716 {
7717 gimple *g = find_loop_dist_alias (bb);
7718 if (g == NULL)
7719 continue;
7720
7721 int orig_loop_num = tree_to_shwi (gimple_call_arg (g, 0));
7722 gcc_assert (orig_loop_num
7723 && (unsigned) orig_loop_num < vec_safe_length (larray));
7724 if (moved_orig_loop_num[orig_loop_num] == 2)
7725 {
7726 /* If we have moved both loops with this orig_loop_num into
7727 dest_cfun and the LOOP_DIST_ALIAS call is being moved there
7728 too, update the first argument. */
7729 gcc_assert ((*larray)[dloop->orig_loop_num] != NULL
7730 && (get_loop (saved_cfun, dloop->orig_loop_num)
7731 == NULL));
7732 tree t = build_int_cst (integer_type_node,
7733 (*larray)[dloop->orig_loop_num]->num);
7734 gimple_call_set_arg (g, 0, t);
7735 update_stmt (g);
7736 /* Make sure the following loop will not update it. */
7737 moved_orig_loop_num[orig_loop_num] = 0;
7738 }
7739 else
7740 /* Otherwise at least one of the loops stayed in saved_cfun.
7741 Remove the LOOP_DIST_ALIAS call. */
7742 fold_loop_internal_call (g, gimple_call_arg (g, 1));
7743 }
7744 FOR_EACH_BB_FN (bb, saved_cfun)
7745 {
7746 gimple *g = find_loop_dist_alias (bb);
7747 if (g == NULL)
7748 continue;
7749 int orig_loop_num = tree_to_shwi (gimple_call_arg (g, 0));
7750 gcc_assert (orig_loop_num
7751 && (unsigned) orig_loop_num < vec_safe_length (larray));
7752 if (moved_orig_loop_num[orig_loop_num])
7753 /* LOOP_DIST_ALIAS call remained in saved_cfun, if at least one
7754 of the corresponding loops was moved, remove it. */
7755 fold_loop_internal_call (g, gimple_call_arg (g, 1));
7756 }
7757 XDELETEVEC (moved_orig_loop_num);
7758 }
7759 ggc_free (larray);
7760
7761 /* Move blocks from BBS into DEST_CFUN. */
7762 gcc_assert (bbs.length () >= 2);
7763 after = dest_cfun->cfg->x_entry_block_ptr;
7764 hash_map<tree, tree> vars_map;
7765
7766 memset (&d, 0, sizeof (d));
7767 d.orig_block = orig_block;
7768 d.new_block = DECL_INITIAL (dest_cfun->decl);
7769 d.from_context = cfun->decl;
7770 d.to_context = dest_cfun->decl;
7771 d.vars_map = &vars_map;
7772 d.new_label_map = new_label_map;
7773 d.eh_map = eh_map;
7774 d.remap_decls_p = true;
7775
7776 if (gimple_in_ssa_p (cfun))
7777 for (tree arg = DECL_ARGUMENTS (d.to_context); arg; arg = DECL_CHAIN (arg))
7778 {
7779 tree narg = make_ssa_name_fn (dest_cfun, arg, gimple_build_nop ());
7780 set_ssa_default_def (dest_cfun, arg, narg);
7781 vars_map.put (arg, narg);
7782 }
7783
7784 FOR_EACH_VEC_ELT (bbs, i, bb)
7785 {
7786 /* No need to update edge counts on the last block. It has
7787 already been updated earlier when we detached the region from
7788 the original CFG. */
7789 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
7790 after = bb;
7791 }
7792
7793 /* Adjust the maximum clique used. */
7794 dest_cfun->last_clique = saved_cfun->last_clique;
7795
7796 loop->aux = NULL;
7797 loop0->aux = NULL;
7798 /* Loop sizes are no longer correct, fix them up. */
7799 loop->num_nodes -= num_nodes;
7800 for (class loop *outer = loop_outer (loop);
7801 outer; outer = loop_outer (outer))
7802 outer->num_nodes -= num_nodes;
7803 loop0->num_nodes -= bbs.length () - num_nodes;
7804
7805 if (saved_cfun->has_simduid_loops || saved_cfun->has_force_vectorize_loops)
7806 {
7807 class loop *aloop;
7808 for (i = 0; vec_safe_iterate (loops->larray, i, &aloop); i++)
7809 if (aloop != NULL)
7810 {
7811 if (aloop->simduid)
7812 {
7813 replace_by_duplicate_decl (&aloop->simduid, d.vars_map,
7814 d.to_context);
7815 dest_cfun->has_simduid_loops = true;
7816 }
7817 if (aloop->force_vectorize)
7818 dest_cfun->has_force_vectorize_loops = true;
7819 }
7820 }
7821
7822 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
7823 if (orig_block)
7824 {
7825 tree block;
7826 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7827 == NULL_TREE);
7828 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7829 = BLOCK_SUBBLOCKS (orig_block);
7830 for (block = BLOCK_SUBBLOCKS (orig_block);
7831 block; block = BLOCK_CHAIN (block))
7832 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
7833 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
7834 }
7835
7836 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
7837 &vars_map, dest_cfun->decl);
7838
7839 if (new_label_map)
7840 htab_delete (new_label_map);
7841 if (eh_map)
7842 delete eh_map;
7843
7844 if (gimple_in_ssa_p (cfun))
7845 {
7846 /* We need to release ssa-names in a defined order, so first find them,
7847 and then iterate in ascending version order. */
7848 bitmap release_names = BITMAP_ALLOC (NULL);
7849 vars_map.traverse<void *, gather_ssa_name_hash_map_from> (release_names);
7850 bitmap_iterator bi;
7851 unsigned i;
7852 EXECUTE_IF_SET_IN_BITMAP (release_names, 0, i, bi)
7853 release_ssa_name (ssa_name (i));
7854 BITMAP_FREE (release_names);
7855 }
7856
7857 /* Rewire the entry and exit blocks. The successor to the entry
7858 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
7859 the child function. Similarly, the predecessor of DEST_FN's
7860 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
7861 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
7862 various CFG manipulation function get to the right CFG.
7863
7864 FIXME, this is silly. The CFG ought to become a parameter to
7865 these helpers. */
7866 push_cfun (dest_cfun);
7867 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = entry_bb->count;
7868 make_single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), entry_bb, EDGE_FALLTHRU);
7869 if (exit_bb)
7870 {
7871 make_single_succ_edge (exit_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
7872 EXIT_BLOCK_PTR_FOR_FN (cfun)->count = exit_bb->count;
7873 }
7874 else
7875 EXIT_BLOCK_PTR_FOR_FN (cfun)->count = profile_count::zero ();
7876 pop_cfun ();
7877
7878 /* Back in the original function, the SESE region has disappeared,
7879 create a new basic block in its place. */
7880 bb = create_empty_bb (entry_pred[0]);
7881 if (current_loops)
7882 add_bb_to_loop (bb, loop);
7883 for (i = 0; i < num_entry_edges; i++)
7884 {
7885 e = make_edge (entry_pred[i], bb, entry_flag[i]);
7886 e->probability = entry_prob[i];
7887 }
7888
7889 for (i = 0; i < num_exit_edges; i++)
7890 {
7891 e = make_edge (bb, exit_succ[i], exit_flag[i]);
7892 e->probability = exit_prob[i];
7893 }
7894
7895 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
7896 FOR_EACH_VEC_ELT (dom_bbs, i, abb)
7897 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
7898 dom_bbs.release ();
7899
7900 if (exit_bb)
7901 {
7902 free (exit_prob);
7903 free (exit_flag);
7904 free (exit_succ);
7905 }
7906 free (entry_prob);
7907 free (entry_flag);
7908 free (entry_pred);
7909 bbs.release ();
7910
7911 return bb;
7912 }
7913
7914 /* Dump default def DEF to file FILE using FLAGS and indentation
7915 SPC. */
7916
7917 static void
7918 dump_default_def (FILE *file, tree def, int spc, dump_flags_t flags)
7919 {
7920 for (int i = 0; i < spc; ++i)
7921 fprintf (file, " ");
7922 dump_ssaname_info_to_file (file, def, spc);
7923
7924 print_generic_expr (file, TREE_TYPE (def), flags);
7925 fprintf (file, " ");
7926 print_generic_expr (file, def, flags);
7927 fprintf (file, " = ");
7928 print_generic_expr (file, SSA_NAME_VAR (def), flags);
7929 fprintf (file, ";\n");
7930 }
7931
7932 /* Print no_sanitize attribute to FILE for a given attribute VALUE. */
7933
7934 static void
7935 print_no_sanitize_attr_value (FILE *file, tree value)
7936 {
7937 unsigned int flags = tree_to_uhwi (value);
7938 bool first = true;
7939 for (int i = 0; sanitizer_opts[i].name != NULL; ++i)
7940 {
7941 if ((sanitizer_opts[i].flag & flags) == sanitizer_opts[i].flag)
7942 {
7943 if (!first)
7944 fprintf (file, " | ");
7945 fprintf (file, "%s", sanitizer_opts[i].name);
7946 first = false;
7947 }
7948 }
7949 }
7950
7951 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
7952 */
7953
7954 void
7955 dump_function_to_file (tree fndecl, FILE *file, dump_flags_t flags)
7956 {
7957 tree arg, var, old_current_fndecl = current_function_decl;
7958 struct function *dsf;
7959 bool ignore_topmost_bind = false, any_var = false;
7960 basic_block bb;
7961 tree chain;
7962 bool tmclone = (TREE_CODE (fndecl) == FUNCTION_DECL
7963 && decl_is_tm_clone (fndecl));
7964 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
7965
7966 if (DECL_ATTRIBUTES (fndecl) != NULL_TREE)
7967 {
7968 fprintf (file, "__attribute__((");
7969
7970 bool first = true;
7971 tree chain;
7972 for (chain = DECL_ATTRIBUTES (fndecl); chain;
7973 first = false, chain = TREE_CHAIN (chain))
7974 {
7975 if (!first)
7976 fprintf (file, ", ");
7977
7978 tree name = get_attribute_name (chain);
7979 print_generic_expr (file, name, dump_flags);
7980 if (TREE_VALUE (chain) != NULL_TREE)
7981 {
7982 fprintf (file, " (");
7983
7984 if (strstr (IDENTIFIER_POINTER (name), "no_sanitize"))
7985 print_no_sanitize_attr_value (file, TREE_VALUE (chain));
7986 else
7987 print_generic_expr (file, TREE_VALUE (chain), dump_flags);
7988 fprintf (file, ")");
7989 }
7990 }
7991
7992 fprintf (file, "))\n");
7993 }
7994
7995 current_function_decl = fndecl;
7996 if (flags & TDF_GIMPLE)
7997 {
7998 static bool hotness_bb_param_printed = false;
7999 if (profile_info != NULL
8000 && !hotness_bb_param_printed)
8001 {
8002 hotness_bb_param_printed = true;
8003 fprintf (file,
8004 "/* --param=gimple-fe-computed-hot-bb-threshold=%" PRId64
8005 " */\n", get_hot_bb_threshold ());
8006 }
8007
8008 print_generic_expr (file, TREE_TYPE (TREE_TYPE (fndecl)),
8009 dump_flags | TDF_SLIM);
8010 fprintf (file, " __GIMPLE (%s",
8011 (fun->curr_properties & PROP_ssa) ? "ssa"
8012 : (fun->curr_properties & PROP_cfg) ? "cfg"
8013 : "");
8014
8015 if (cfun->cfg)
8016 {
8017 basic_block bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
8018 if (bb->count.initialized_p ())
8019 fprintf (file, ",%s(%d)",
8020 profile_quality_as_string (bb->count.quality ()),
8021 bb->count.value ());
8022 fprintf (file, ")\n%s (", function_name (fun));
8023 }
8024 }
8025 else
8026 fprintf (file, "%s %s(", function_name (fun), tmclone ? "[tm-clone] " : "");
8027
8028 arg = DECL_ARGUMENTS (fndecl);
8029 while (arg)
8030 {
8031 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
8032 fprintf (file, " ");
8033 print_generic_expr (file, arg, dump_flags);
8034 if (DECL_CHAIN (arg))
8035 fprintf (file, ", ");
8036 arg = DECL_CHAIN (arg);
8037 }
8038 fprintf (file, ")\n");
8039
8040 dsf = DECL_STRUCT_FUNCTION (fndecl);
8041 if (dsf && (flags & TDF_EH))
8042 dump_eh_tree (file, dsf);
8043
8044 if (flags & TDF_RAW && !gimple_has_body_p (fndecl))
8045 {
8046 dump_node (fndecl, TDF_SLIM | flags, file);
8047 current_function_decl = old_current_fndecl;
8048 return;
8049 }
8050
8051 /* When GIMPLE is lowered, the variables are no longer available in
8052 BIND_EXPRs, so display them separately. */
8053 if (fun && fun->decl == fndecl && (fun->curr_properties & PROP_gimple_lcf))
8054 {
8055 unsigned ix;
8056 ignore_topmost_bind = true;
8057
8058 fprintf (file, "{\n");
8059 if (gimple_in_ssa_p (fun)
8060 && (flags & TDF_ALIAS))
8061 {
8062 for (arg = DECL_ARGUMENTS (fndecl); arg != NULL;
8063 arg = DECL_CHAIN (arg))
8064 {
8065 tree def = ssa_default_def (fun, arg);
8066 if (def)
8067 dump_default_def (file, def, 2, flags);
8068 }
8069
8070 tree res = DECL_RESULT (fun->decl);
8071 if (res != NULL_TREE
8072 && DECL_BY_REFERENCE (res))
8073 {
8074 tree def = ssa_default_def (fun, res);
8075 if (def)
8076 dump_default_def (file, def, 2, flags);
8077 }
8078
8079 tree static_chain = fun->static_chain_decl;
8080 if (static_chain != NULL_TREE)
8081 {
8082 tree def = ssa_default_def (fun, static_chain);
8083 if (def)
8084 dump_default_def (file, def, 2, flags);
8085 }
8086 }
8087
8088 if (!vec_safe_is_empty (fun->local_decls))
8089 FOR_EACH_LOCAL_DECL (fun, ix, var)
8090 {
8091 print_generic_decl (file, var, flags);
8092 fprintf (file, "\n");
8093
8094 any_var = true;
8095 }
8096
8097 tree name;
8098
8099 if (gimple_in_ssa_p (cfun))
8100 FOR_EACH_SSA_NAME (ix, name, cfun)
8101 {
8102 if (!SSA_NAME_VAR (name))
8103 {
8104 fprintf (file, " ");
8105 print_generic_expr (file, TREE_TYPE (name), flags);
8106 fprintf (file, " ");
8107 print_generic_expr (file, name, flags);
8108 fprintf (file, ";\n");
8109
8110 any_var = true;
8111 }
8112 }
8113 }
8114
8115 if (fun && fun->decl == fndecl
8116 && fun->cfg
8117 && basic_block_info_for_fn (fun))
8118 {
8119 /* If the CFG has been built, emit a CFG-based dump. */
8120 if (!ignore_topmost_bind)
8121 fprintf (file, "{\n");
8122
8123 if (any_var && n_basic_blocks_for_fn (fun))
8124 fprintf (file, "\n");
8125
8126 FOR_EACH_BB_FN (bb, fun)
8127 dump_bb (file, bb, 2, flags);
8128
8129 fprintf (file, "}\n");
8130 }
8131 else if (fun->curr_properties & PROP_gimple_any)
8132 {
8133 /* The function is now in GIMPLE form but the CFG has not been
8134 built yet. Emit the single sequence of GIMPLE statements
8135 that make up its body. */
8136 gimple_seq body = gimple_body (fndecl);
8137
8138 if (gimple_seq_first_stmt (body)
8139 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
8140 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
8141 print_gimple_seq (file, body, 0, flags);
8142 else
8143 {
8144 if (!ignore_topmost_bind)
8145 fprintf (file, "{\n");
8146
8147 if (any_var)
8148 fprintf (file, "\n");
8149
8150 print_gimple_seq (file, body, 2, flags);
8151 fprintf (file, "}\n");
8152 }
8153 }
8154 else
8155 {
8156 int indent;
8157
8158 /* Make a tree based dump. */
8159 chain = DECL_SAVED_TREE (fndecl);
8160 if (chain && TREE_CODE (chain) == BIND_EXPR)
8161 {
8162 if (ignore_topmost_bind)
8163 {
8164 chain = BIND_EXPR_BODY (chain);
8165 indent = 2;
8166 }
8167 else
8168 indent = 0;
8169 }
8170 else
8171 {
8172 if (!ignore_topmost_bind)
8173 {
8174 fprintf (file, "{\n");
8175 /* No topmost bind, pretend it's ignored for later. */
8176 ignore_topmost_bind = true;
8177 }
8178 indent = 2;
8179 }
8180
8181 if (any_var)
8182 fprintf (file, "\n");
8183
8184 print_generic_stmt_indented (file, chain, flags, indent);
8185 if (ignore_topmost_bind)
8186 fprintf (file, "}\n");
8187 }
8188
8189 if (flags & TDF_ENUMERATE_LOCALS)
8190 dump_enumerated_decls (file, flags);
8191 fprintf (file, "\n\n");
8192
8193 current_function_decl = old_current_fndecl;
8194 }
8195
8196 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
8197
8198 DEBUG_FUNCTION void
8199 debug_function (tree fn, dump_flags_t flags)
8200 {
8201 dump_function_to_file (fn, stderr, flags);
8202 }
8203
8204
8205 /* Print on FILE the indexes for the predecessors of basic_block BB. */
8206
8207 static void
8208 print_pred_bbs (FILE *file, basic_block bb)
8209 {
8210 edge e;
8211 edge_iterator ei;
8212
8213 FOR_EACH_EDGE (e, ei, bb->preds)
8214 fprintf (file, "bb_%d ", e->src->index);
8215 }
8216
8217
8218 /* Print on FILE the indexes for the successors of basic_block BB. */
8219
8220 static void
8221 print_succ_bbs (FILE *file, basic_block bb)
8222 {
8223 edge e;
8224 edge_iterator ei;
8225
8226 FOR_EACH_EDGE (e, ei, bb->succs)
8227 fprintf (file, "bb_%d ", e->dest->index);
8228 }
8229
8230 /* Print to FILE the basic block BB following the VERBOSITY level. */
8231
8232 void
8233 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
8234 {
8235 char *s_indent = (char *) alloca ((size_t) indent + 1);
8236 memset ((void *) s_indent, ' ', (size_t) indent);
8237 s_indent[indent] = '\0';
8238
8239 /* Print basic_block's header. */
8240 if (verbosity >= 2)
8241 {
8242 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
8243 print_pred_bbs (file, bb);
8244 fprintf (file, "}, succs = {");
8245 print_succ_bbs (file, bb);
8246 fprintf (file, "})\n");
8247 }
8248
8249 /* Print basic_block's body. */
8250 if (verbosity >= 3)
8251 {
8252 fprintf (file, "%s {\n", s_indent);
8253 dump_bb (file, bb, indent + 4, TDF_VOPS|TDF_MEMSYMS);
8254 fprintf (file, "%s }\n", s_indent);
8255 }
8256 }
8257
8258 static void print_loop_and_siblings (FILE *, class loop *, int, int);
8259
8260 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
8261 VERBOSITY level this outputs the contents of the loop, or just its
8262 structure. */
8263
8264 static void
8265 print_loop (FILE *file, class loop *loop, int indent, int verbosity)
8266 {
8267 char *s_indent;
8268 basic_block bb;
8269
8270 if (loop == NULL)
8271 return;
8272
8273 s_indent = (char *) alloca ((size_t) indent + 1);
8274 memset ((void *) s_indent, ' ', (size_t) indent);
8275 s_indent[indent] = '\0';
8276
8277 /* Print loop's header. */
8278 fprintf (file, "%sloop_%d (", s_indent, loop->num);
8279 if (loop->header)
8280 fprintf (file, "header = %d", loop->header->index);
8281 else
8282 {
8283 fprintf (file, "deleted)\n");
8284 return;
8285 }
8286 if (loop->latch)
8287 fprintf (file, ", latch = %d", loop->latch->index);
8288 else
8289 fprintf (file, ", multiple latches");
8290 fprintf (file, ", niter = ");
8291 print_generic_expr (file, loop->nb_iterations);
8292
8293 if (loop->any_upper_bound)
8294 {
8295 fprintf (file, ", upper_bound = ");
8296 print_decu (loop->nb_iterations_upper_bound, file);
8297 }
8298 if (loop->any_likely_upper_bound)
8299 {
8300 fprintf (file, ", likely_upper_bound = ");
8301 print_decu (loop->nb_iterations_likely_upper_bound, file);
8302 }
8303
8304 if (loop->any_estimate)
8305 {
8306 fprintf (file, ", estimate = ");
8307 print_decu (loop->nb_iterations_estimate, file);
8308 }
8309 if (loop->unroll)
8310 fprintf (file, ", unroll = %d", loop->unroll);
8311 fprintf (file, ")\n");
8312
8313 /* Print loop's body. */
8314 if (verbosity >= 1)
8315 {
8316 fprintf (file, "%s{\n", s_indent);
8317 FOR_EACH_BB_FN (bb, cfun)
8318 if (bb->loop_father == loop)
8319 print_loops_bb (file, bb, indent, verbosity);
8320
8321 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
8322 fprintf (file, "%s}\n", s_indent);
8323 }
8324 }
8325
8326 /* Print the LOOP and its sibling loops on FILE, indented INDENT
8327 spaces. Following VERBOSITY level this outputs the contents of the
8328 loop, or just its structure. */
8329
8330 static void
8331 print_loop_and_siblings (FILE *file, class loop *loop, int indent,
8332 int verbosity)
8333 {
8334 if (loop == NULL)
8335 return;
8336
8337 print_loop (file, loop, indent, verbosity);
8338 print_loop_and_siblings (file, loop->next, indent, verbosity);
8339 }
8340
8341 /* Follow a CFG edge from the entry point of the program, and on entry
8342 of a loop, pretty print the loop structure on FILE. */
8343
8344 void
8345 print_loops (FILE *file, int verbosity)
8346 {
8347 basic_block bb;
8348
8349 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
8350 fprintf (file, "\nLoops in function: %s\n", current_function_name ());
8351 if (bb && bb->loop_father)
8352 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
8353 }
8354
8355 /* Dump a loop. */
8356
8357 DEBUG_FUNCTION void
8358 debug (class loop &ref)
8359 {
8360 print_loop (stderr, &ref, 0, /*verbosity*/0);
8361 }
8362
8363 DEBUG_FUNCTION void
8364 debug (class loop *ptr)
8365 {
8366 if (ptr)
8367 debug (*ptr);
8368 else
8369 fprintf (stderr, "<nil>\n");
8370 }
8371
8372 /* Dump a loop verbosely. */
8373
8374 DEBUG_FUNCTION void
8375 debug_verbose (class loop &ref)
8376 {
8377 print_loop (stderr, &ref, 0, /*verbosity*/3);
8378 }
8379
8380 DEBUG_FUNCTION void
8381 debug_verbose (class loop *ptr)
8382 {
8383 if (ptr)
8384 debug (*ptr);
8385 else
8386 fprintf (stderr, "<nil>\n");
8387 }
8388
8389
8390 /* Debugging loops structure at tree level, at some VERBOSITY level. */
8391
8392 DEBUG_FUNCTION void
8393 debug_loops (int verbosity)
8394 {
8395 print_loops (stderr, verbosity);
8396 }
8397
8398 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
8399
8400 DEBUG_FUNCTION void
8401 debug_loop (class loop *loop, int verbosity)
8402 {
8403 print_loop (stderr, loop, 0, verbosity);
8404 }
8405
8406 /* Print on stderr the code of loop number NUM, at some VERBOSITY
8407 level. */
8408
8409 DEBUG_FUNCTION void
8410 debug_loop_num (unsigned num, int verbosity)
8411 {
8412 debug_loop (get_loop (cfun, num), verbosity);
8413 }
8414
8415 /* Return true if BB ends with a call, possibly followed by some
8416 instructions that must stay with the call. Return false,
8417 otherwise. */
8418
8419 static bool
8420 gimple_block_ends_with_call_p (basic_block bb)
8421 {
8422 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
8423 return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
8424 }
8425
8426
8427 /* Return true if BB ends with a conditional branch. Return false,
8428 otherwise. */
8429
8430 static bool
8431 gimple_block_ends_with_condjump_p (const_basic_block bb)
8432 {
8433 gimple *stmt = last_stmt (CONST_CAST_BB (bb));
8434 return (stmt && gimple_code (stmt) == GIMPLE_COND);
8435 }
8436
8437
8438 /* Return true if statement T may terminate execution of BB in ways not
8439 explicitly represtented in the CFG. */
8440
8441 bool
8442 stmt_can_terminate_bb_p (gimple *t)
8443 {
8444 tree fndecl = NULL_TREE;
8445 int call_flags = 0;
8446
8447 /* Eh exception not handled internally terminates execution of the whole
8448 function. */
8449 if (stmt_can_throw_external (cfun, t))
8450 return true;
8451
8452 /* NORETURN and LONGJMP calls already have an edge to exit.
8453 CONST and PURE calls do not need one.
8454 We don't currently check for CONST and PURE here, although
8455 it would be a good idea, because those attributes are
8456 figured out from the RTL in mark_constant_function, and
8457 the counter incrementation code from -fprofile-arcs
8458 leads to different results from -fbranch-probabilities. */
8459 if (is_gimple_call (t))
8460 {
8461 fndecl = gimple_call_fndecl (t);
8462 call_flags = gimple_call_flags (t);
8463 }
8464
8465 if (is_gimple_call (t)
8466 && fndecl
8467 && fndecl_built_in_p (fndecl)
8468 && (call_flags & ECF_NOTHROW)
8469 && !(call_flags & ECF_RETURNS_TWICE)
8470 /* fork() doesn't really return twice, but the effect of
8471 wrapping it in __gcov_fork() which calls __gcov_dump() and
8472 __gcov_reset() and clears the counters before forking has the same
8473 effect as returning twice. Force a fake edge. */
8474 && !fndecl_built_in_p (fndecl, BUILT_IN_FORK))
8475 return false;
8476
8477 if (is_gimple_call (t))
8478 {
8479 edge_iterator ei;
8480 edge e;
8481 basic_block bb;
8482
8483 if (call_flags & (ECF_PURE | ECF_CONST)
8484 && !(call_flags & ECF_LOOPING_CONST_OR_PURE))
8485 return false;
8486
8487 /* Function call may do longjmp, terminate program or do other things.
8488 Special case noreturn that have non-abnormal edges out as in this case
8489 the fact is sufficiently represented by lack of edges out of T. */
8490 if (!(call_flags & ECF_NORETURN))
8491 return true;
8492
8493 bb = gimple_bb (t);
8494 FOR_EACH_EDGE (e, ei, bb->succs)
8495 if ((e->flags & EDGE_FAKE) == 0)
8496 return true;
8497 }
8498
8499 if (gasm *asm_stmt = dyn_cast <gasm *> (t))
8500 if (gimple_asm_volatile_p (asm_stmt) || gimple_asm_input_p (asm_stmt))
8501 return true;
8502
8503 return false;
8504 }
8505
8506
8507 /* Add fake edges to the function exit for any non constant and non
8508 noreturn calls (or noreturn calls with EH/abnormal edges),
8509 volatile inline assembly in the bitmap of blocks specified by BLOCKS
8510 or to the whole CFG if BLOCKS is zero. Return the number of blocks
8511 that were split.
8512
8513 The goal is to expose cases in which entering a basic block does
8514 not imply that all subsequent instructions must be executed. */
8515
8516 static int
8517 gimple_flow_call_edges_add (sbitmap blocks)
8518 {
8519 int i;
8520 int blocks_split = 0;
8521 int last_bb = last_basic_block_for_fn (cfun);
8522 bool check_last_block = false;
8523
8524 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
8525 return 0;
8526
8527 if (! blocks)
8528 check_last_block = true;
8529 else
8530 check_last_block = bitmap_bit_p (blocks,
8531 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
8532
8533 /* In the last basic block, before epilogue generation, there will be
8534 a fallthru edge to EXIT. Special care is required if the last insn
8535 of the last basic block is a call because make_edge folds duplicate
8536 edges, which would result in the fallthru edge also being marked
8537 fake, which would result in the fallthru edge being removed by
8538 remove_fake_edges, which would result in an invalid CFG.
8539
8540 Moreover, we can't elide the outgoing fake edge, since the block
8541 profiler needs to take this into account in order to solve the minimal
8542 spanning tree in the case that the call doesn't return.
8543
8544 Handle this by adding a dummy instruction in a new last basic block. */
8545 if (check_last_block)
8546 {
8547 basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
8548 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
8549 gimple *t = NULL;
8550
8551 if (!gsi_end_p (gsi))
8552 t = gsi_stmt (gsi);
8553
8554 if (t && stmt_can_terminate_bb_p (t))
8555 {
8556 edge e;
8557
8558 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
8559 if (e)
8560 {
8561 gsi_insert_on_edge (e, gimple_build_nop ());
8562 gsi_commit_edge_inserts ();
8563 }
8564 }
8565 }
8566
8567 /* Now add fake edges to the function exit for any non constant
8568 calls since there is no way that we can determine if they will
8569 return or not... */
8570 for (i = 0; i < last_bb; i++)
8571 {
8572 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8573 gimple_stmt_iterator gsi;
8574 gimple *stmt, *last_stmt;
8575
8576 if (!bb)
8577 continue;
8578
8579 if (blocks && !bitmap_bit_p (blocks, i))
8580 continue;
8581
8582 gsi = gsi_last_nondebug_bb (bb);
8583 if (!gsi_end_p (gsi))
8584 {
8585 last_stmt = gsi_stmt (gsi);
8586 do
8587 {
8588 stmt = gsi_stmt (gsi);
8589 if (stmt_can_terminate_bb_p (stmt))
8590 {
8591 edge e;
8592
8593 /* The handling above of the final block before the
8594 epilogue should be enough to verify that there is
8595 no edge to the exit block in CFG already.
8596 Calling make_edge in such case would cause us to
8597 mark that edge as fake and remove it later. */
8598 if (flag_checking && stmt == last_stmt)
8599 {
8600 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
8601 gcc_assert (e == NULL);
8602 }
8603
8604 /* Note that the following may create a new basic block
8605 and renumber the existing basic blocks. */
8606 if (stmt != last_stmt)
8607 {
8608 e = split_block (bb, stmt);
8609 if (e)
8610 blocks_split++;
8611 }
8612 e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
8613 e->probability = profile_probability::guessed_never ();
8614 }
8615 gsi_prev (&gsi);
8616 }
8617 while (!gsi_end_p (gsi));
8618 }
8619 }
8620
8621 if (blocks_split)
8622 checking_verify_flow_info ();
8623
8624 return blocks_split;
8625 }
8626
8627 /* Removes edge E and all the blocks dominated by it, and updates dominance
8628 information. The IL in E->src needs to be updated separately.
8629 If dominance info is not available, only the edge E is removed.*/
8630
8631 void
8632 remove_edge_and_dominated_blocks (edge e)
8633 {
8634 vec<basic_block> bbs_to_remove = vNULL;
8635 vec<basic_block> bbs_to_fix_dom = vNULL;
8636 edge f;
8637 edge_iterator ei;
8638 bool none_removed = false;
8639 unsigned i;
8640 basic_block bb, dbb;
8641 bitmap_iterator bi;
8642
8643 /* If we are removing a path inside a non-root loop that may change
8644 loop ownership of blocks or remove loops. Mark loops for fixup. */
8645 if (current_loops
8646 && loop_outer (e->src->loop_father) != NULL
8647 && e->src->loop_father == e->dest->loop_father)
8648 loops_state_set (LOOPS_NEED_FIXUP);
8649
8650 if (!dom_info_available_p (CDI_DOMINATORS))
8651 {
8652 remove_edge (e);
8653 return;
8654 }
8655
8656 /* No updating is needed for edges to exit. */
8657 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
8658 {
8659 if (cfgcleanup_altered_bbs)
8660 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
8661 remove_edge (e);
8662 return;
8663 }
8664
8665 /* First, we find the basic blocks to remove. If E->dest has a predecessor
8666 that is not dominated by E->dest, then this set is empty. Otherwise,
8667 all the basic blocks dominated by E->dest are removed.
8668
8669 Also, to DF_IDOM we store the immediate dominators of the blocks in
8670 the dominance frontier of E (i.e., of the successors of the
8671 removed blocks, if there are any, and of E->dest otherwise). */
8672 FOR_EACH_EDGE (f, ei, e->dest->preds)
8673 {
8674 if (f == e)
8675 continue;
8676
8677 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
8678 {
8679 none_removed = true;
8680 break;
8681 }
8682 }
8683
8684 auto_bitmap df, df_idom;
8685 if (none_removed)
8686 bitmap_set_bit (df_idom,
8687 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
8688 else
8689 {
8690 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
8691 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
8692 {
8693 FOR_EACH_EDGE (f, ei, bb->succs)
8694 {
8695 if (f->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
8696 bitmap_set_bit (df, f->dest->index);
8697 }
8698 }
8699 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
8700 bitmap_clear_bit (df, bb->index);
8701
8702 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
8703 {
8704 bb = BASIC_BLOCK_FOR_FN (cfun, i);
8705 bitmap_set_bit (df_idom,
8706 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
8707 }
8708 }
8709
8710 if (cfgcleanup_altered_bbs)
8711 {
8712 /* Record the set of the altered basic blocks. */
8713 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
8714 bitmap_ior_into (cfgcleanup_altered_bbs, df);
8715 }
8716
8717 /* Remove E and the cancelled blocks. */
8718 if (none_removed)
8719 remove_edge (e);
8720 else
8721 {
8722 /* Walk backwards so as to get a chance to substitute all
8723 released DEFs into debug stmts. See
8724 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
8725 details. */
8726 for (i = bbs_to_remove.length (); i-- > 0; )
8727 delete_basic_block (bbs_to_remove[i]);
8728 }
8729
8730 /* Update the dominance information. The immediate dominator may change only
8731 for blocks whose immediate dominator belongs to DF_IDOM:
8732
8733 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
8734 removal. Let Z the arbitrary block such that idom(Z) = Y and
8735 Z dominates X after the removal. Before removal, there exists a path P
8736 from Y to X that avoids Z. Let F be the last edge on P that is
8737 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
8738 dominates W, and because of P, Z does not dominate W), and W belongs to
8739 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
8740 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
8741 {
8742 bb = BASIC_BLOCK_FOR_FN (cfun, i);
8743 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
8744 dbb;
8745 dbb = next_dom_son (CDI_DOMINATORS, dbb))
8746 bbs_to_fix_dom.safe_push (dbb);
8747 }
8748
8749 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
8750
8751 bbs_to_remove.release ();
8752 bbs_to_fix_dom.release ();
8753 }
8754
8755 /* Purge dead EH edges from basic block BB. */
8756
8757 bool
8758 gimple_purge_dead_eh_edges (basic_block bb)
8759 {
8760 bool changed = false;
8761 edge e;
8762 edge_iterator ei;
8763 gimple *stmt = last_stmt (bb);
8764
8765 if (stmt && stmt_can_throw_internal (cfun, stmt))
8766 return false;
8767
8768 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8769 {
8770 if (e->flags & EDGE_EH)
8771 {
8772 remove_edge_and_dominated_blocks (e);
8773 changed = true;
8774 }
8775 else
8776 ei_next (&ei);
8777 }
8778
8779 return changed;
8780 }
8781
8782 /* Purge dead EH edges from basic block listed in BLOCKS. */
8783
8784 bool
8785 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
8786 {
8787 bool changed = false;
8788 unsigned i;
8789 bitmap_iterator bi;
8790
8791 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8792 {
8793 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8794
8795 /* Earlier gimple_purge_dead_eh_edges could have removed
8796 this basic block already. */
8797 gcc_assert (bb || changed);
8798 if (bb != NULL)
8799 changed |= gimple_purge_dead_eh_edges (bb);
8800 }
8801
8802 return changed;
8803 }
8804
8805 /* Purge dead abnormal call edges from basic block BB. */
8806
8807 bool
8808 gimple_purge_dead_abnormal_call_edges (basic_block bb)
8809 {
8810 bool changed = false;
8811 edge e;
8812 edge_iterator ei;
8813 gimple *stmt = last_stmt (bb);
8814
8815 if (!cfun->has_nonlocal_label
8816 && !cfun->calls_setjmp)
8817 return false;
8818
8819 if (stmt && stmt_can_make_abnormal_goto (stmt))
8820 return false;
8821
8822 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8823 {
8824 if (e->flags & EDGE_ABNORMAL)
8825 {
8826 if (e->flags & EDGE_FALLTHRU)
8827 e->flags &= ~EDGE_ABNORMAL;
8828 else
8829 remove_edge_and_dominated_blocks (e);
8830 changed = true;
8831 }
8832 else
8833 ei_next (&ei);
8834 }
8835
8836 return changed;
8837 }
8838
8839 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
8840
8841 bool
8842 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks)
8843 {
8844 bool changed = false;
8845 unsigned i;
8846 bitmap_iterator bi;
8847
8848 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8849 {
8850 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8851
8852 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
8853 this basic block already. */
8854 gcc_assert (bb || changed);
8855 if (bb != NULL)
8856 changed |= gimple_purge_dead_abnormal_call_edges (bb);
8857 }
8858
8859 return changed;
8860 }
8861
8862 /* This function is called whenever a new edge is created or
8863 redirected. */
8864
8865 static void
8866 gimple_execute_on_growing_pred (edge e)
8867 {
8868 basic_block bb = e->dest;
8869
8870 if (!gimple_seq_empty_p (phi_nodes (bb)))
8871 reserve_phi_args_for_new_edge (bb);
8872 }
8873
8874 /* This function is called immediately before edge E is removed from
8875 the edge vector E->dest->preds. */
8876
8877 static void
8878 gimple_execute_on_shrinking_pred (edge e)
8879 {
8880 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
8881 remove_phi_args (e);
8882 }
8883
8884 /*---------------------------------------------------------------------------
8885 Helper functions for Loop versioning
8886 ---------------------------------------------------------------------------*/
8887
8888 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
8889 of 'first'. Both of them are dominated by 'new_head' basic block. When
8890 'new_head' was created by 'second's incoming edge it received phi arguments
8891 on the edge by split_edge(). Later, additional edge 'e' was created to
8892 connect 'new_head' and 'first'. Now this routine adds phi args on this
8893 additional edge 'e' that new_head to second edge received as part of edge
8894 splitting. */
8895
8896 static void
8897 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
8898 basic_block new_head, edge e)
8899 {
8900 gphi *phi1, *phi2;
8901 gphi_iterator psi1, psi2;
8902 tree def;
8903 edge e2 = find_edge (new_head, second);
8904
8905 /* Because NEW_HEAD has been created by splitting SECOND's incoming
8906 edge, we should always have an edge from NEW_HEAD to SECOND. */
8907 gcc_assert (e2 != NULL);
8908
8909 /* Browse all 'second' basic block phi nodes and add phi args to
8910 edge 'e' for 'first' head. PHI args are always in correct order. */
8911
8912 for (psi2 = gsi_start_phis (second),
8913 psi1 = gsi_start_phis (first);
8914 !gsi_end_p (psi2) && !gsi_end_p (psi1);
8915 gsi_next (&psi2), gsi_next (&psi1))
8916 {
8917 phi1 = psi1.phi ();
8918 phi2 = psi2.phi ();
8919 def = PHI_ARG_DEF (phi2, e2->dest_idx);
8920 add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
8921 }
8922 }
8923
8924
8925 /* Adds a if else statement to COND_BB with condition COND_EXPR.
8926 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
8927 the destination of the ELSE part. */
8928
8929 static void
8930 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
8931 basic_block second_head ATTRIBUTE_UNUSED,
8932 basic_block cond_bb, void *cond_e)
8933 {
8934 gimple_stmt_iterator gsi;
8935 gimple *new_cond_expr;
8936 tree cond_expr = (tree) cond_e;
8937 edge e0;
8938
8939 /* Build new conditional expr */
8940 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
8941 NULL_TREE, NULL_TREE);
8942
8943 /* Add new cond in cond_bb. */
8944 gsi = gsi_last_bb (cond_bb);
8945 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
8946
8947 /* Adjust edges appropriately to connect new head with first head
8948 as well as second head. */
8949 e0 = single_succ_edge (cond_bb);
8950 e0->flags &= ~EDGE_FALLTHRU;
8951 e0->flags |= EDGE_FALSE_VALUE;
8952 }
8953
8954
8955 /* Do book-keeping of basic block BB for the profile consistency checker.
8956 Store the counting in RECORD. */
8957 static void
8958 gimple_account_profile_record (basic_block bb,
8959 struct profile_record *record)
8960 {
8961 gimple_stmt_iterator i;
8962 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
8963 {
8964 record->size
8965 += estimate_num_insns (gsi_stmt (i), &eni_size_weights);
8966 if (bb->count.initialized_p ())
8967 record->time
8968 += estimate_num_insns (gsi_stmt (i),
8969 &eni_time_weights) * bb->count.to_gcov_type ();
8970 else if (profile_status_for_fn (cfun) == PROFILE_GUESSED)
8971 record->time
8972 += estimate_num_insns (gsi_stmt (i),
8973 &eni_time_weights) * bb->count.to_frequency (cfun);
8974 }
8975 }
8976
8977 struct cfg_hooks gimple_cfg_hooks = {
8978 "gimple",
8979 gimple_verify_flow_info,
8980 gimple_dump_bb, /* dump_bb */
8981 gimple_dump_bb_for_graph, /* dump_bb_for_graph */
8982 create_bb, /* create_basic_block */
8983 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
8984 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
8985 gimple_can_remove_branch_p, /* can_remove_branch_p */
8986 remove_bb, /* delete_basic_block */
8987 gimple_split_block, /* split_block */
8988 gimple_move_block_after, /* move_block_after */
8989 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
8990 gimple_merge_blocks, /* merge_blocks */
8991 gimple_predict_edge, /* predict_edge */
8992 gimple_predicted_by_p, /* predicted_by_p */
8993 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
8994 gimple_duplicate_bb, /* duplicate_block */
8995 gimple_split_edge, /* split_edge */
8996 gimple_make_forwarder_block, /* make_forward_block */
8997 NULL, /* tidy_fallthru_edge */
8998 NULL, /* force_nonfallthru */
8999 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
9000 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
9001 gimple_flow_call_edges_add, /* flow_call_edges_add */
9002 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
9003 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
9004 gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
9005 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
9006 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
9007 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
9008 flush_pending_stmts, /* flush_pending_stmts */
9009 gimple_empty_block_p, /* block_empty_p */
9010 gimple_split_block_before_cond_jump, /* split_block_before_cond_jump */
9011 gimple_account_profile_record,
9012 };
9013
9014
9015 /* Split all critical edges. Split some extra (not necessarily critical) edges
9016 if FOR_EDGE_INSERTION_P is true. */
9017
9018 unsigned int
9019 split_critical_edges (bool for_edge_insertion_p /* = false */)
9020 {
9021 basic_block bb;
9022 edge e;
9023 edge_iterator ei;
9024
9025 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
9026 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
9027 mappings around the calls to split_edge. */
9028 start_recording_case_labels ();
9029 FOR_ALL_BB_FN (bb, cfun)
9030 {
9031 FOR_EACH_EDGE (e, ei, bb->succs)
9032 {
9033 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
9034 split_edge (e);
9035 /* PRE inserts statements to edges and expects that
9036 since split_critical_edges was done beforehand, committing edge
9037 insertions will not split more edges. In addition to critical
9038 edges we must split edges that have multiple successors and
9039 end by control flow statements, such as RESX.
9040 Go ahead and split them too. This matches the logic in
9041 gimple_find_edge_insert_loc. */
9042 else if (for_edge_insertion_p
9043 && (!single_pred_p (e->dest)
9044 || !gimple_seq_empty_p (phi_nodes (e->dest))
9045 || e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
9046 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
9047 && !(e->flags & EDGE_ABNORMAL))
9048 {
9049 gimple_stmt_iterator gsi;
9050
9051 gsi = gsi_last_bb (e->src);
9052 if (!gsi_end_p (gsi)
9053 && stmt_ends_bb_p (gsi_stmt (gsi))
9054 && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
9055 && !gimple_call_builtin_p (gsi_stmt (gsi),
9056 BUILT_IN_RETURN)))
9057 split_edge (e);
9058 }
9059 }
9060 }
9061 end_recording_case_labels ();
9062 return 0;
9063 }
9064
9065 namespace {
9066
9067 const pass_data pass_data_split_crit_edges =
9068 {
9069 GIMPLE_PASS, /* type */
9070 "crited", /* name */
9071 OPTGROUP_NONE, /* optinfo_flags */
9072 TV_TREE_SPLIT_EDGES, /* tv_id */
9073 PROP_cfg, /* properties_required */
9074 PROP_no_crit_edges, /* properties_provided */
9075 0, /* properties_destroyed */
9076 0, /* todo_flags_start */
9077 0, /* todo_flags_finish */
9078 };
9079
9080 class pass_split_crit_edges : public gimple_opt_pass
9081 {
9082 public:
9083 pass_split_crit_edges (gcc::context *ctxt)
9084 : gimple_opt_pass (pass_data_split_crit_edges, ctxt)
9085 {}
9086
9087 /* opt_pass methods: */
9088 virtual unsigned int execute (function *) { return split_critical_edges (); }
9089
9090 opt_pass * clone () { return new pass_split_crit_edges (m_ctxt); }
9091 }; // class pass_split_crit_edges
9092
9093 } // anon namespace
9094
9095 gimple_opt_pass *
9096 make_pass_split_crit_edges (gcc::context *ctxt)
9097 {
9098 return new pass_split_crit_edges (ctxt);
9099 }
9100
9101
9102 /* Insert COND expression which is GIMPLE_COND after STMT
9103 in basic block BB with appropriate basic block split
9104 and creation of a new conditionally executed basic block.
9105 Update profile so the new bb is visited with probability PROB.
9106 Return created basic block. */
9107 basic_block
9108 insert_cond_bb (basic_block bb, gimple *stmt, gimple *cond,
9109 profile_probability prob)
9110 {
9111 edge fall = split_block (bb, stmt);
9112 gimple_stmt_iterator iter = gsi_last_bb (bb);
9113 basic_block new_bb;
9114
9115 /* Insert cond statement. */
9116 gcc_assert (gimple_code (cond) == GIMPLE_COND);
9117 if (gsi_end_p (iter))
9118 gsi_insert_before (&iter, cond, GSI_CONTINUE_LINKING);
9119 else
9120 gsi_insert_after (&iter, cond, GSI_CONTINUE_LINKING);
9121
9122 /* Create conditionally executed block. */
9123 new_bb = create_empty_bb (bb);
9124 edge e = make_edge (bb, new_bb, EDGE_TRUE_VALUE);
9125 e->probability = prob;
9126 new_bb->count = e->count ();
9127 make_single_succ_edge (new_bb, fall->dest, EDGE_FALLTHRU);
9128
9129 /* Fix edge for split bb. */
9130 fall->flags = EDGE_FALSE_VALUE;
9131 fall->probability -= e->probability;
9132
9133 /* Update dominance info. */
9134 if (dom_info_available_p (CDI_DOMINATORS))
9135 {
9136 set_immediate_dominator (CDI_DOMINATORS, new_bb, bb);
9137 set_immediate_dominator (CDI_DOMINATORS, fall->dest, bb);
9138 }
9139
9140 /* Update loop info. */
9141 if (current_loops)
9142 add_bb_to_loop (new_bb, bb->loop_father);
9143
9144 return new_bb;
9145 }
9146
9147 /* Build a ternary operation and gimplify it. Emit code before GSI.
9148 Return the gimple_val holding the result. */
9149
9150 tree
9151 gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
9152 tree type, tree a, tree b, tree c)
9153 {
9154 tree ret;
9155 location_t loc = gimple_location (gsi_stmt (*gsi));
9156
9157 ret = fold_build3_loc (loc, code, type, a, b, c);
9158 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
9159 GSI_SAME_STMT);
9160 }
9161
9162 /* Build a binary operation and gimplify it. Emit code before GSI.
9163 Return the gimple_val holding the result. */
9164
9165 tree
9166 gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
9167 tree type, tree a, tree b)
9168 {
9169 tree ret;
9170
9171 ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
9172 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
9173 GSI_SAME_STMT);
9174 }
9175
9176 /* Build a unary operation and gimplify it. Emit code before GSI.
9177 Return the gimple_val holding the result. */
9178
9179 tree
9180 gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
9181 tree a)
9182 {
9183 tree ret;
9184
9185 ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
9186 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
9187 GSI_SAME_STMT);
9188 }
9189
9190
9191 \f
9192 /* Given a basic block B which ends with a conditional and has
9193 precisely two successors, determine which of the edges is taken if
9194 the conditional is true and which is taken if the conditional is
9195 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
9196
9197 void
9198 extract_true_false_edges_from_block (basic_block b,
9199 edge *true_edge,
9200 edge *false_edge)
9201 {
9202 edge e = EDGE_SUCC (b, 0);
9203
9204 if (e->flags & EDGE_TRUE_VALUE)
9205 {
9206 *true_edge = e;
9207 *false_edge = EDGE_SUCC (b, 1);
9208 }
9209 else
9210 {
9211 *false_edge = e;
9212 *true_edge = EDGE_SUCC (b, 1);
9213 }
9214 }
9215
9216
9217 /* From a controlling predicate in the immediate dominator DOM of
9218 PHIBLOCK determine the edges into PHIBLOCK that are chosen if the
9219 predicate evaluates to true and false and store them to
9220 *TRUE_CONTROLLED_EDGE and *FALSE_CONTROLLED_EDGE if
9221 they are non-NULL. Returns true if the edges can be determined,
9222 else return false. */
9223
9224 bool
9225 extract_true_false_controlled_edges (basic_block dom, basic_block phiblock,
9226 edge *true_controlled_edge,
9227 edge *false_controlled_edge)
9228 {
9229 basic_block bb = phiblock;
9230 edge true_edge, false_edge, tem;
9231 edge e0 = NULL, e1 = NULL;
9232
9233 /* We have to verify that one edge into the PHI node is dominated
9234 by the true edge of the predicate block and the other edge
9235 dominated by the false edge. This ensures that the PHI argument
9236 we are going to take is completely determined by the path we
9237 take from the predicate block.
9238 We can only use BB dominance checks below if the destination of
9239 the true/false edges are dominated by their edge, thus only
9240 have a single predecessor. */
9241 extract_true_false_edges_from_block (dom, &true_edge, &false_edge);
9242 tem = EDGE_PRED (bb, 0);
9243 if (tem == true_edge
9244 || (single_pred_p (true_edge->dest)
9245 && (tem->src == true_edge->dest
9246 || dominated_by_p (CDI_DOMINATORS,
9247 tem->src, true_edge->dest))))
9248 e0 = tem;
9249 else if (tem == false_edge
9250 || (single_pred_p (false_edge->dest)
9251 && (tem->src == false_edge->dest
9252 || dominated_by_p (CDI_DOMINATORS,
9253 tem->src, false_edge->dest))))
9254 e1 = tem;
9255 else
9256 return false;
9257 tem = EDGE_PRED (bb, 1);
9258 if (tem == true_edge
9259 || (single_pred_p (true_edge->dest)
9260 && (tem->src == true_edge->dest
9261 || dominated_by_p (CDI_DOMINATORS,
9262 tem->src, true_edge->dest))))
9263 e0 = tem;
9264 else if (tem == false_edge
9265 || (single_pred_p (false_edge->dest)
9266 && (tem->src == false_edge->dest
9267 || dominated_by_p (CDI_DOMINATORS,
9268 tem->src, false_edge->dest))))
9269 e1 = tem;
9270 else
9271 return false;
9272 if (!e0 || !e1)
9273 return false;
9274
9275 if (true_controlled_edge)
9276 *true_controlled_edge = e0;
9277 if (false_controlled_edge)
9278 *false_controlled_edge = e1;
9279
9280 return true;
9281 }
9282
9283 /* Generate a range test LHS CODE RHS that determines whether INDEX is in the
9284 range [low, high]. Place associated stmts before *GSI. */
9285
9286 void
9287 generate_range_test (basic_block bb, tree index, tree low, tree high,
9288 tree *lhs, tree *rhs)
9289 {
9290 tree type = TREE_TYPE (index);
9291 tree utype = range_check_type (type);
9292
9293 low = fold_convert (utype, low);
9294 high = fold_convert (utype, high);
9295
9296 gimple_seq seq = NULL;
9297 index = gimple_convert (&seq, utype, index);
9298 *lhs = gimple_build (&seq, MINUS_EXPR, utype, index, low);
9299 *rhs = const_binop (MINUS_EXPR, utype, high, low);
9300
9301 gimple_stmt_iterator gsi = gsi_last_bb (bb);
9302 gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT);
9303 }
9304
9305 /* Return the basic block that belongs to label numbered INDEX
9306 of a switch statement. */
9307
9308 basic_block
9309 gimple_switch_label_bb (function *ifun, gswitch *gs, unsigned index)
9310 {
9311 return label_to_block (ifun, CASE_LABEL (gimple_switch_label (gs, index)));
9312 }
9313
9314 /* Return the default basic block of a switch statement. */
9315
9316 basic_block
9317 gimple_switch_default_bb (function *ifun, gswitch *gs)
9318 {
9319 return gimple_switch_label_bb (ifun, gs, 0);
9320 }
9321
9322 /* Return the edge that belongs to label numbered INDEX
9323 of a switch statement. */
9324
9325 edge
9326 gimple_switch_edge (function *ifun, gswitch *gs, unsigned index)
9327 {
9328 return find_edge (gimple_bb (gs), gimple_switch_label_bb (ifun, gs, index));
9329 }
9330
9331 /* Return the default edge of a switch statement. */
9332
9333 edge
9334 gimple_switch_default_edge (function *ifun, gswitch *gs)
9335 {
9336 return gimple_switch_edge (ifun, gs, 0);
9337 }
9338
9339
9340 /* Emit return warnings. */
9341
9342 namespace {
9343
9344 const pass_data pass_data_warn_function_return =
9345 {
9346 GIMPLE_PASS, /* type */
9347 "*warn_function_return", /* name */
9348 OPTGROUP_NONE, /* optinfo_flags */
9349 TV_NONE, /* tv_id */
9350 PROP_cfg, /* properties_required */
9351 0, /* properties_provided */
9352 0, /* properties_destroyed */
9353 0, /* todo_flags_start */
9354 0, /* todo_flags_finish */
9355 };
9356
9357 class pass_warn_function_return : public gimple_opt_pass
9358 {
9359 public:
9360 pass_warn_function_return (gcc::context *ctxt)
9361 : gimple_opt_pass (pass_data_warn_function_return, ctxt)
9362 {}
9363
9364 /* opt_pass methods: */
9365 virtual unsigned int execute (function *);
9366
9367 }; // class pass_warn_function_return
9368
9369 unsigned int
9370 pass_warn_function_return::execute (function *fun)
9371 {
9372 location_t location;
9373 gimple *last;
9374 edge e;
9375 edge_iterator ei;
9376
9377 if (!targetm.warn_func_return (fun->decl))
9378 return 0;
9379
9380 /* If we have a path to EXIT, then we do return. */
9381 if (TREE_THIS_VOLATILE (fun->decl)
9382 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0)
9383 {
9384 location = UNKNOWN_LOCATION;
9385 for (ei = ei_start (EXIT_BLOCK_PTR_FOR_FN (fun)->preds);
9386 (e = ei_safe_edge (ei)); )
9387 {
9388 last = last_stmt (e->src);
9389 if ((gimple_code (last) == GIMPLE_RETURN
9390 || gimple_call_builtin_p (last, BUILT_IN_RETURN))
9391 && location == UNKNOWN_LOCATION
9392 && ((location = LOCATION_LOCUS (gimple_location (last)))
9393 != UNKNOWN_LOCATION)
9394 && !optimize)
9395 break;
9396 /* When optimizing, replace return stmts in noreturn functions
9397 with __builtin_unreachable () call. */
9398 if (optimize && gimple_code (last) == GIMPLE_RETURN)
9399 {
9400 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
9401 gimple *new_stmt = gimple_build_call (fndecl, 0);
9402 gimple_set_location (new_stmt, gimple_location (last));
9403 gimple_stmt_iterator gsi = gsi_for_stmt (last);
9404 gsi_replace (&gsi, new_stmt, true);
9405 remove_edge (e);
9406 }
9407 else
9408 ei_next (&ei);
9409 }
9410 if (location == UNKNOWN_LOCATION)
9411 location = cfun->function_end_locus;
9412 warning_at (location, 0, "%<noreturn%> function does return");
9413 }
9414
9415 /* If we see "return;" in some basic block, then we do reach the end
9416 without returning a value. */
9417 else if (warn_return_type > 0
9418 && !TREE_NO_WARNING (fun->decl)
9419 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fun->decl))))
9420 {
9421 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
9422 {
9423 gimple *last = last_stmt (e->src);
9424 greturn *return_stmt = dyn_cast <greturn *> (last);
9425 if (return_stmt
9426 && gimple_return_retval (return_stmt) == NULL
9427 && !gimple_no_warning_p (last))
9428 {
9429 location = gimple_location (last);
9430 if (LOCATION_LOCUS (location) == UNKNOWN_LOCATION)
9431 location = fun->function_end_locus;
9432 if (warning_at (location, OPT_Wreturn_type,
9433 "control reaches end of non-void function"))
9434 TREE_NO_WARNING (fun->decl) = 1;
9435 break;
9436 }
9437 }
9438 /* The C++ FE turns fallthrough from the end of non-void function
9439 into __builtin_unreachable () call with BUILTINS_LOCATION.
9440 Recognize those too. */
9441 basic_block bb;
9442 if (!TREE_NO_WARNING (fun->decl))
9443 FOR_EACH_BB_FN (bb, fun)
9444 if (EDGE_COUNT (bb->succs) == 0)
9445 {
9446 gimple *last = last_stmt (bb);
9447 const enum built_in_function ubsan_missing_ret
9448 = BUILT_IN_UBSAN_HANDLE_MISSING_RETURN;
9449 if (last
9450 && ((LOCATION_LOCUS (gimple_location (last))
9451 == BUILTINS_LOCATION
9452 && gimple_call_builtin_p (last, BUILT_IN_UNREACHABLE))
9453 || gimple_call_builtin_p (last, ubsan_missing_ret)))
9454 {
9455 gimple_stmt_iterator gsi = gsi_for_stmt (last);
9456 gsi_prev_nondebug (&gsi);
9457 gimple *prev = gsi_stmt (gsi);
9458 if (prev == NULL)
9459 location = UNKNOWN_LOCATION;
9460 else
9461 location = gimple_location (prev);
9462 if (LOCATION_LOCUS (location) == UNKNOWN_LOCATION)
9463 location = fun->function_end_locus;
9464 if (warning_at (location, OPT_Wreturn_type,
9465 "control reaches end of non-void function"))
9466 TREE_NO_WARNING (fun->decl) = 1;
9467 break;
9468 }
9469 }
9470 }
9471 return 0;
9472 }
9473
9474 } // anon namespace
9475
9476 gimple_opt_pass *
9477 make_pass_warn_function_return (gcc::context *ctxt)
9478 {
9479 return new pass_warn_function_return (ctxt);
9480 }
9481
9482 /* Walk a gimplified function and warn for functions whose return value is
9483 ignored and attribute((warn_unused_result)) is set. This is done before
9484 inlining, so we don't have to worry about that. */
9485
9486 static void
9487 do_warn_unused_result (gimple_seq seq)
9488 {
9489 tree fdecl, ftype;
9490 gimple_stmt_iterator i;
9491
9492 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
9493 {
9494 gimple *g = gsi_stmt (i);
9495
9496 switch (gimple_code (g))
9497 {
9498 case GIMPLE_BIND:
9499 do_warn_unused_result (gimple_bind_body (as_a <gbind *>(g)));
9500 break;
9501 case GIMPLE_TRY:
9502 do_warn_unused_result (gimple_try_eval (g));
9503 do_warn_unused_result (gimple_try_cleanup (g));
9504 break;
9505 case GIMPLE_CATCH:
9506 do_warn_unused_result (gimple_catch_handler (
9507 as_a <gcatch *> (g)));
9508 break;
9509 case GIMPLE_EH_FILTER:
9510 do_warn_unused_result (gimple_eh_filter_failure (g));
9511 break;
9512
9513 case GIMPLE_CALL:
9514 if (gimple_call_lhs (g))
9515 break;
9516 if (gimple_call_internal_p (g))
9517 break;
9518
9519 /* This is a naked call, as opposed to a GIMPLE_CALL with an
9520 LHS. All calls whose value is ignored should be
9521 represented like this. Look for the attribute. */
9522 fdecl = gimple_call_fndecl (g);
9523 ftype = gimple_call_fntype (g);
9524
9525 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
9526 {
9527 location_t loc = gimple_location (g);
9528
9529 if (fdecl)
9530 warning_at (loc, OPT_Wunused_result,
9531 "ignoring return value of %qD "
9532 "declared with attribute %<warn_unused_result%>",
9533 fdecl);
9534 else
9535 warning_at (loc, OPT_Wunused_result,
9536 "ignoring return value of function "
9537 "declared with attribute %<warn_unused_result%>");
9538 }
9539 break;
9540
9541 default:
9542 /* Not a container, not a call, or a call whose value is used. */
9543 break;
9544 }
9545 }
9546 }
9547
9548 namespace {
9549
9550 const pass_data pass_data_warn_unused_result =
9551 {
9552 GIMPLE_PASS, /* type */
9553 "*warn_unused_result", /* name */
9554 OPTGROUP_NONE, /* optinfo_flags */
9555 TV_NONE, /* tv_id */
9556 PROP_gimple_any, /* properties_required */
9557 0, /* properties_provided */
9558 0, /* properties_destroyed */
9559 0, /* todo_flags_start */
9560 0, /* todo_flags_finish */
9561 };
9562
9563 class pass_warn_unused_result : public gimple_opt_pass
9564 {
9565 public:
9566 pass_warn_unused_result (gcc::context *ctxt)
9567 : gimple_opt_pass (pass_data_warn_unused_result, ctxt)
9568 {}
9569
9570 /* opt_pass methods: */
9571 virtual bool gate (function *) { return flag_warn_unused_result; }
9572 virtual unsigned int execute (function *)
9573 {
9574 do_warn_unused_result (gimple_body (current_function_decl));
9575 return 0;
9576 }
9577
9578 }; // class pass_warn_unused_result
9579
9580 } // anon namespace
9581
9582 gimple_opt_pass *
9583 make_pass_warn_unused_result (gcc::context *ctxt)
9584 {
9585 return new pass_warn_unused_result (ctxt);
9586 }
9587
9588 /* IPA passes, compilation of earlier functions or inlining
9589 might have changed some properties, such as marked functions nothrow,
9590 pure, const or noreturn.
9591 Remove redundant edges and basic blocks, and create new ones if necessary.
9592
9593 This pass can't be executed as stand alone pass from pass manager, because
9594 in between inlining and this fixup the verify_flow_info would fail. */
9595
9596 unsigned int
9597 execute_fixup_cfg (void)
9598 {
9599 basic_block bb;
9600 gimple_stmt_iterator gsi;
9601 int todo = 0;
9602 cgraph_node *node = cgraph_node::get (current_function_decl);
9603 /* Same scaling is also done by ipa_merge_profiles. */
9604 profile_count num = node->count;
9605 profile_count den = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
9606 bool scale = num.initialized_p () && !(num == den);
9607
9608 if (scale)
9609 {
9610 profile_count::adjust_for_ipa_scaling (&num, &den);
9611 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = node->count;
9612 EXIT_BLOCK_PTR_FOR_FN (cfun)->count
9613 = EXIT_BLOCK_PTR_FOR_FN (cfun)->count.apply_scale (num, den);
9614 }
9615
9616 FOR_EACH_BB_FN (bb, cfun)
9617 {
9618 if (scale)
9619 bb->count = bb->count.apply_scale (num, den);
9620 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
9621 {
9622 gimple *stmt = gsi_stmt (gsi);
9623 tree decl = is_gimple_call (stmt)
9624 ? gimple_call_fndecl (stmt)
9625 : NULL;
9626 if (decl)
9627 {
9628 int flags = gimple_call_flags (stmt);
9629 if (flags & (ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE))
9630 {
9631 if (gimple_purge_dead_abnormal_call_edges (bb))
9632 todo |= TODO_cleanup_cfg;
9633
9634 if (gimple_in_ssa_p (cfun))
9635 {
9636 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9637 update_stmt (stmt);
9638 }
9639 }
9640
9641 if (flags & ECF_NORETURN
9642 && fixup_noreturn_call (stmt))
9643 todo |= TODO_cleanup_cfg;
9644 }
9645
9646 /* Remove stores to variables we marked write-only.
9647 Keep access when store has side effect, i.e. in case when source
9648 is volatile. */
9649 if (gimple_store_p (stmt)
9650 && !gimple_has_side_effects (stmt)
9651 && !optimize_debug)
9652 {
9653 tree lhs = get_base_address (gimple_get_lhs (stmt));
9654
9655 if (VAR_P (lhs)
9656 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
9657 && varpool_node::get (lhs)->writeonly)
9658 {
9659 unlink_stmt_vdef (stmt);
9660 gsi_remove (&gsi, true);
9661 release_defs (stmt);
9662 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9663 continue;
9664 }
9665 }
9666 /* For calls we can simply remove LHS when it is known
9667 to be write-only. */
9668 if (is_gimple_call (stmt)
9669 && gimple_get_lhs (stmt))
9670 {
9671 tree lhs = get_base_address (gimple_get_lhs (stmt));
9672
9673 if (VAR_P (lhs)
9674 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
9675 && varpool_node::get (lhs)->writeonly)
9676 {
9677 gimple_call_set_lhs (stmt, NULL);
9678 update_stmt (stmt);
9679 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9680 }
9681 }
9682
9683 if (maybe_clean_eh_stmt (stmt)
9684 && gimple_purge_dead_eh_edges (bb))
9685 todo |= TODO_cleanup_cfg;
9686 gsi_next (&gsi);
9687 }
9688
9689 /* If we have a basic block with no successors that does not
9690 end with a control statement or a noreturn call end it with
9691 a call to __builtin_unreachable. This situation can occur
9692 when inlining a noreturn call that does in fact return. */
9693 if (EDGE_COUNT (bb->succs) == 0)
9694 {
9695 gimple *stmt = last_stmt (bb);
9696 if (!stmt
9697 || (!is_ctrl_stmt (stmt)
9698 && (!is_gimple_call (stmt)
9699 || !gimple_call_noreturn_p (stmt))))
9700 {
9701 if (stmt && is_gimple_call (stmt))
9702 gimple_call_set_ctrl_altering (stmt, false);
9703 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
9704 stmt = gimple_build_call (fndecl, 0);
9705 gimple_stmt_iterator gsi = gsi_last_bb (bb);
9706 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
9707 if (!cfun->after_inlining)
9708 {
9709 gcall *call_stmt = dyn_cast <gcall *> (stmt);
9710 node->create_edge (cgraph_node::get_create (fndecl),
9711 call_stmt, bb->count);
9712 }
9713 }
9714 }
9715 }
9716 if (scale)
9717 {
9718 update_max_bb_count ();
9719 compute_function_frequency ();
9720 }
9721
9722 if (current_loops
9723 && (todo & TODO_cleanup_cfg))
9724 loops_state_set (LOOPS_NEED_FIXUP);
9725
9726 return todo;
9727 }
9728
9729 namespace {
9730
9731 const pass_data pass_data_fixup_cfg =
9732 {
9733 GIMPLE_PASS, /* type */
9734 "fixup_cfg", /* name */
9735 OPTGROUP_NONE, /* optinfo_flags */
9736 TV_NONE, /* tv_id */
9737 PROP_cfg, /* properties_required */
9738 0, /* properties_provided */
9739 0, /* properties_destroyed */
9740 0, /* todo_flags_start */
9741 0, /* todo_flags_finish */
9742 };
9743
9744 class pass_fixup_cfg : public gimple_opt_pass
9745 {
9746 public:
9747 pass_fixup_cfg (gcc::context *ctxt)
9748 : gimple_opt_pass (pass_data_fixup_cfg, ctxt)
9749 {}
9750
9751 /* opt_pass methods: */
9752 opt_pass * clone () { return new pass_fixup_cfg (m_ctxt); }
9753 virtual unsigned int execute (function *) { return execute_fixup_cfg (); }
9754
9755 }; // class pass_fixup_cfg
9756
9757 } // anon namespace
9758
9759 gimple_opt_pass *
9760 make_pass_fixup_cfg (gcc::context *ctxt)
9761 {
9762 return new pass_fixup_cfg (ctxt);
9763 }
9764
9765 /* Garbage collection support for edge_def. */
9766
9767 extern void gt_ggc_mx (tree&);
9768 extern void gt_ggc_mx (gimple *&);
9769 extern void gt_ggc_mx (rtx&);
9770 extern void gt_ggc_mx (basic_block&);
9771
9772 static void
9773 gt_ggc_mx (rtx_insn *& x)
9774 {
9775 if (x)
9776 gt_ggc_mx_rtx_def ((void *) x);
9777 }
9778
9779 void
9780 gt_ggc_mx (edge_def *e)
9781 {
9782 tree block = LOCATION_BLOCK (e->goto_locus);
9783 gt_ggc_mx (e->src);
9784 gt_ggc_mx (e->dest);
9785 if (current_ir_type () == IR_GIMPLE)
9786 gt_ggc_mx (e->insns.g);
9787 else
9788 gt_ggc_mx (e->insns.r);
9789 gt_ggc_mx (block);
9790 }
9791
9792 /* PCH support for edge_def. */
9793
9794 extern void gt_pch_nx (tree&);
9795 extern void gt_pch_nx (gimple *&);
9796 extern void gt_pch_nx (rtx&);
9797 extern void gt_pch_nx (basic_block&);
9798
9799 static void
9800 gt_pch_nx (rtx_insn *& x)
9801 {
9802 if (x)
9803 gt_pch_nx_rtx_def ((void *) x);
9804 }
9805
9806 void
9807 gt_pch_nx (edge_def *e)
9808 {
9809 tree block = LOCATION_BLOCK (e->goto_locus);
9810 gt_pch_nx (e->src);
9811 gt_pch_nx (e->dest);
9812 if (current_ir_type () == IR_GIMPLE)
9813 gt_pch_nx (e->insns.g);
9814 else
9815 gt_pch_nx (e->insns.r);
9816 gt_pch_nx (block);
9817 }
9818
9819 void
9820 gt_pch_nx (edge_def *e, gt_pointer_operator op, void *cookie)
9821 {
9822 tree block = LOCATION_BLOCK (e->goto_locus);
9823 op (&(e->src), cookie);
9824 op (&(e->dest), cookie);
9825 if (current_ir_type () == IR_GIMPLE)
9826 op (&(e->insns.g), cookie);
9827 else
9828 op (&(e->insns.r), cookie);
9829 op (&(block), cookie);
9830 }
9831
9832 #if CHECKING_P
9833
9834 namespace selftest {
9835
9836 /* Helper function for CFG selftests: create a dummy function decl
9837 and push it as cfun. */
9838
9839 static tree
9840 push_fndecl (const char *name)
9841 {
9842 tree fn_type = build_function_type_array (integer_type_node, 0, NULL);
9843 /* FIXME: this uses input_location: */
9844 tree fndecl = build_fn_decl (name, fn_type);
9845 tree retval = build_decl (UNKNOWN_LOCATION, RESULT_DECL,
9846 NULL_TREE, integer_type_node);
9847 DECL_RESULT (fndecl) = retval;
9848 push_struct_function (fndecl);
9849 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9850 ASSERT_TRUE (fun != NULL);
9851 init_empty_tree_cfg_for_function (fun);
9852 ASSERT_EQ (2, n_basic_blocks_for_fn (fun));
9853 ASSERT_EQ (0, n_edges_for_fn (fun));
9854 return fndecl;
9855 }
9856
9857 /* These tests directly create CFGs.
9858 Compare with the static fns within tree-cfg.c:
9859 - build_gimple_cfg
9860 - make_blocks: calls create_basic_block (seq, bb);
9861 - make_edges. */
9862
9863 /* Verify a simple cfg of the form:
9864 ENTRY -> A -> B -> C -> EXIT. */
9865
9866 static void
9867 test_linear_chain ()
9868 {
9869 gimple_register_cfg_hooks ();
9870
9871 tree fndecl = push_fndecl ("cfg_test_linear_chain");
9872 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9873
9874 /* Create some empty blocks. */
9875 basic_block bb_a = create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
9876 basic_block bb_b = create_empty_bb (bb_a);
9877 basic_block bb_c = create_empty_bb (bb_b);
9878
9879 ASSERT_EQ (5, n_basic_blocks_for_fn (fun));
9880 ASSERT_EQ (0, n_edges_for_fn (fun));
9881
9882 /* Create some edges: a simple linear chain of BBs. */
9883 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), bb_a, EDGE_FALLTHRU);
9884 make_edge (bb_a, bb_b, 0);
9885 make_edge (bb_b, bb_c, 0);
9886 make_edge (bb_c, EXIT_BLOCK_PTR_FOR_FN (fun), 0);
9887
9888 /* Verify the edges. */
9889 ASSERT_EQ (4, n_edges_for_fn (fun));
9890 ASSERT_EQ (NULL, ENTRY_BLOCK_PTR_FOR_FN (fun)->preds);
9891 ASSERT_EQ (1, ENTRY_BLOCK_PTR_FOR_FN (fun)->succs->length ());
9892 ASSERT_EQ (1, bb_a->preds->length ());
9893 ASSERT_EQ (1, bb_a->succs->length ());
9894 ASSERT_EQ (1, bb_b->preds->length ());
9895 ASSERT_EQ (1, bb_b->succs->length ());
9896 ASSERT_EQ (1, bb_c->preds->length ());
9897 ASSERT_EQ (1, bb_c->succs->length ());
9898 ASSERT_EQ (1, EXIT_BLOCK_PTR_FOR_FN (fun)->preds->length ());
9899 ASSERT_EQ (NULL, EXIT_BLOCK_PTR_FOR_FN (fun)->succs);
9900
9901 /* Verify the dominance information
9902 Each BB in our simple chain should be dominated by the one before
9903 it. */
9904 calculate_dominance_info (CDI_DOMINATORS);
9905 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_b));
9906 ASSERT_EQ (bb_b, get_immediate_dominator (CDI_DOMINATORS, bb_c));
9907 vec<basic_block> dom_by_b = get_dominated_by (CDI_DOMINATORS, bb_b);
9908 ASSERT_EQ (1, dom_by_b.length ());
9909 ASSERT_EQ (bb_c, dom_by_b[0]);
9910 free_dominance_info (CDI_DOMINATORS);
9911 dom_by_b.release ();
9912
9913 /* Similarly for post-dominance: each BB in our chain is post-dominated
9914 by the one after it. */
9915 calculate_dominance_info (CDI_POST_DOMINATORS);
9916 ASSERT_EQ (bb_b, get_immediate_dominator (CDI_POST_DOMINATORS, bb_a));
9917 ASSERT_EQ (bb_c, get_immediate_dominator (CDI_POST_DOMINATORS, bb_b));
9918 vec<basic_block> postdom_by_b = get_dominated_by (CDI_POST_DOMINATORS, bb_b);
9919 ASSERT_EQ (1, postdom_by_b.length ());
9920 ASSERT_EQ (bb_a, postdom_by_b[0]);
9921 free_dominance_info (CDI_POST_DOMINATORS);
9922 postdom_by_b.release ();
9923
9924 pop_cfun ();
9925 }
9926
9927 /* Verify a simple CFG of the form:
9928 ENTRY
9929 |
9930 A
9931 / \
9932 /t \f
9933 B C
9934 \ /
9935 \ /
9936 D
9937 |
9938 EXIT. */
9939
9940 static void
9941 test_diamond ()
9942 {
9943 gimple_register_cfg_hooks ();
9944
9945 tree fndecl = push_fndecl ("cfg_test_diamond");
9946 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9947
9948 /* Create some empty blocks. */
9949 basic_block bb_a = create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
9950 basic_block bb_b = create_empty_bb (bb_a);
9951 basic_block bb_c = create_empty_bb (bb_a);
9952 basic_block bb_d = create_empty_bb (bb_b);
9953
9954 ASSERT_EQ (6, n_basic_blocks_for_fn (fun));
9955 ASSERT_EQ (0, n_edges_for_fn (fun));
9956
9957 /* Create the edges. */
9958 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), bb_a, EDGE_FALLTHRU);
9959 make_edge (bb_a, bb_b, EDGE_TRUE_VALUE);
9960 make_edge (bb_a, bb_c, EDGE_FALSE_VALUE);
9961 make_edge (bb_b, bb_d, 0);
9962 make_edge (bb_c, bb_d, 0);
9963 make_edge (bb_d, EXIT_BLOCK_PTR_FOR_FN (fun), 0);
9964
9965 /* Verify the edges. */
9966 ASSERT_EQ (6, n_edges_for_fn (fun));
9967 ASSERT_EQ (1, bb_a->preds->length ());
9968 ASSERT_EQ (2, bb_a->succs->length ());
9969 ASSERT_EQ (1, bb_b->preds->length ());
9970 ASSERT_EQ (1, bb_b->succs->length ());
9971 ASSERT_EQ (1, bb_c->preds->length ());
9972 ASSERT_EQ (1, bb_c->succs->length ());
9973 ASSERT_EQ (2, bb_d->preds->length ());
9974 ASSERT_EQ (1, bb_d->succs->length ());
9975
9976 /* Verify the dominance information. */
9977 calculate_dominance_info (CDI_DOMINATORS);
9978 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_b));
9979 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_c));
9980 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_d));
9981 vec<basic_block> dom_by_a = get_dominated_by (CDI_DOMINATORS, bb_a);
9982 ASSERT_EQ (3, dom_by_a.length ()); /* B, C, D, in some order. */
9983 dom_by_a.release ();
9984 vec<basic_block> dom_by_b = get_dominated_by (CDI_DOMINATORS, bb_b);
9985 ASSERT_EQ (0, dom_by_b.length ());
9986 dom_by_b.release ();
9987 free_dominance_info (CDI_DOMINATORS);
9988
9989 /* Similarly for post-dominance. */
9990 calculate_dominance_info (CDI_POST_DOMINATORS);
9991 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_a));
9992 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_b));
9993 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_c));
9994 vec<basic_block> postdom_by_d = get_dominated_by (CDI_POST_DOMINATORS, bb_d);
9995 ASSERT_EQ (3, postdom_by_d.length ()); /* A, B, C in some order. */
9996 postdom_by_d.release ();
9997 vec<basic_block> postdom_by_b = get_dominated_by (CDI_POST_DOMINATORS, bb_b);
9998 ASSERT_EQ (0, postdom_by_b.length ());
9999 postdom_by_b.release ();
10000 free_dominance_info (CDI_POST_DOMINATORS);
10001
10002 pop_cfun ();
10003 }
10004
10005 /* Verify that we can handle a CFG containing a "complete" aka
10006 fully-connected subgraph (where A B C D below all have edges
10007 pointing to each other node, also to themselves).
10008 e.g.:
10009 ENTRY EXIT
10010 | ^
10011 | /
10012 | /
10013 | /
10014 V/
10015 A<--->B
10016 ^^ ^^
10017 | \ / |
10018 | X |
10019 | / \ |
10020 VV VV
10021 C<--->D
10022 */
10023
10024 static void
10025 test_fully_connected ()
10026 {
10027 gimple_register_cfg_hooks ();
10028
10029 tree fndecl = push_fndecl ("cfg_fully_connected");
10030 function *fun = DECL_STRUCT_FUNCTION (fndecl);
10031
10032 const int n = 4;
10033
10034 /* Create some empty blocks. */
10035 auto_vec <basic_block> subgraph_nodes;
10036 for (int i = 0; i < n; i++)
10037 subgraph_nodes.safe_push (create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun)));
10038
10039 ASSERT_EQ (n + 2, n_basic_blocks_for_fn (fun));
10040 ASSERT_EQ (0, n_edges_for_fn (fun));
10041
10042 /* Create the edges. */
10043 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), subgraph_nodes[0], EDGE_FALLTHRU);
10044 make_edge (subgraph_nodes[0], EXIT_BLOCK_PTR_FOR_FN (fun), 0);
10045 for (int i = 0; i < n; i++)
10046 for (int j = 0; j < n; j++)
10047 make_edge (subgraph_nodes[i], subgraph_nodes[j], 0);
10048
10049 /* Verify the edges. */
10050 ASSERT_EQ (2 + (n * n), n_edges_for_fn (fun));
10051 /* The first one is linked to ENTRY/EXIT as well as itself and
10052 everything else. */
10053 ASSERT_EQ (n + 1, subgraph_nodes[0]->preds->length ());
10054 ASSERT_EQ (n + 1, subgraph_nodes[0]->succs->length ());
10055 /* The other ones in the subgraph are linked to everything in
10056 the subgraph (including themselves). */
10057 for (int i = 1; i < n; i++)
10058 {
10059 ASSERT_EQ (n, subgraph_nodes[i]->preds->length ());
10060 ASSERT_EQ (n, subgraph_nodes[i]->succs->length ());
10061 }
10062
10063 /* Verify the dominance information. */
10064 calculate_dominance_info (CDI_DOMINATORS);
10065 /* The initial block in the subgraph should be dominated by ENTRY. */
10066 ASSERT_EQ (ENTRY_BLOCK_PTR_FOR_FN (fun),
10067 get_immediate_dominator (CDI_DOMINATORS,
10068 subgraph_nodes[0]));
10069 /* Every other block in the subgraph should be dominated by the
10070 initial block. */
10071 for (int i = 1; i < n; i++)
10072 ASSERT_EQ (subgraph_nodes[0],
10073 get_immediate_dominator (CDI_DOMINATORS,
10074 subgraph_nodes[i]));
10075 free_dominance_info (CDI_DOMINATORS);
10076
10077 /* Similarly for post-dominance. */
10078 calculate_dominance_info (CDI_POST_DOMINATORS);
10079 /* The initial block in the subgraph should be postdominated by EXIT. */
10080 ASSERT_EQ (EXIT_BLOCK_PTR_FOR_FN (fun),
10081 get_immediate_dominator (CDI_POST_DOMINATORS,
10082 subgraph_nodes[0]));
10083 /* Every other block in the subgraph should be postdominated by the
10084 initial block, since that leads to EXIT. */
10085 for (int i = 1; i < n; i++)
10086 ASSERT_EQ (subgraph_nodes[0],
10087 get_immediate_dominator (CDI_POST_DOMINATORS,
10088 subgraph_nodes[i]));
10089 free_dominance_info (CDI_POST_DOMINATORS);
10090
10091 pop_cfun ();
10092 }
10093
10094 /* Run all of the selftests within this file. */
10095
10096 void
10097 tree_cfg_c_tests ()
10098 {
10099 test_linear_chain ();
10100 test_diamond ();
10101 test_fully_connected ();
10102 }
10103
10104 } // namespace selftest
10105
10106 /* TODO: test the dominator/postdominator logic with various graphs/nodes:
10107 - loop
10108 - nested loops
10109 - switch statement (a block with many out-edges)
10110 - something that jumps to itself
10111 - etc */
10112
10113 #endif /* CHECKING_P */