]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-cfg.c
Update copyright years.
[thirdparty/gcc.git] / gcc / tree-cfg.c
1 /* Control flow functions for trees.
2 Copyright (C) 2001-2020 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "cfghooks.h"
30 #include "tree-pass.h"
31 #include "ssa.h"
32 #include "cgraph.h"
33 #include "gimple-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "fold-const.h"
36 #include "trans-mem.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
39 #include "cfganal.h"
40 #include "gimple-fold.h"
41 #include "tree-eh.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-cfg.h"
46 #include "tree-ssa-loop-manip.h"
47 #include "tree-ssa-loop-niter.h"
48 #include "tree-into-ssa.h"
49 #include "tree-dfa.h"
50 #include "tree-ssa.h"
51 #include "except.h"
52 #include "cfgloop.h"
53 #include "tree-ssa-propagate.h"
54 #include "value-prof.h"
55 #include "tree-inline.h"
56 #include "tree-ssa-live.h"
57 #include "omp-general.h"
58 #include "omp-expand.h"
59 #include "tree-cfgcleanup.h"
60 #include "gimplify.h"
61 #include "attribs.h"
62 #include "selftest.h"
63 #include "opts.h"
64 #include "asan.h"
65 #include "profile.h"
66
67 /* This file contains functions for building the Control Flow Graph (CFG)
68 for a function tree. */
69
70 /* Local declarations. */
71
72 /* Initial capacity for the basic block array. */
73 static const int initial_cfg_capacity = 20;
74
75 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
76 which use a particular edge. The CASE_LABEL_EXPRs are chained together
77 via their CASE_CHAIN field, which we clear after we're done with the
78 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
79
80 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
81 update the case vector in response to edge redirections.
82
83 Right now this table is set up and torn down at key points in the
84 compilation process. It would be nice if we could make the table
85 more persistent. The key is getting notification of changes to
86 the CFG (particularly edge removal, creation and redirection). */
87
88 static hash_map<edge, tree> *edge_to_cases;
89
90 /* If we record edge_to_cases, this bitmap will hold indexes
91 of basic blocks that end in a GIMPLE_SWITCH which we touched
92 due to edge manipulations. */
93
94 static bitmap touched_switch_bbs;
95
96 /* CFG statistics. */
97 struct cfg_stats_d
98 {
99 long num_merged_labels;
100 };
101
102 static struct cfg_stats_d cfg_stats;
103
104 /* Data to pass to replace_block_vars_by_duplicates_1. */
105 struct replace_decls_d
106 {
107 hash_map<tree, tree> *vars_map;
108 tree to_context;
109 };
110
111 /* Hash table to store last discriminator assigned for each locus. */
112 struct locus_discrim_map
113 {
114 int location_line;
115 int discriminator;
116 };
117
118 /* Hashtable helpers. */
119
120 struct locus_discrim_hasher : free_ptr_hash <locus_discrim_map>
121 {
122 static inline hashval_t hash (const locus_discrim_map *);
123 static inline bool equal (const locus_discrim_map *,
124 const locus_discrim_map *);
125 };
126
127 /* Trivial hash function for a location_t. ITEM is a pointer to
128 a hash table entry that maps a location_t to a discriminator. */
129
130 inline hashval_t
131 locus_discrim_hasher::hash (const locus_discrim_map *item)
132 {
133 return item->location_line;
134 }
135
136 /* Equality function for the locus-to-discriminator map. A and B
137 point to the two hash table entries to compare. */
138
139 inline bool
140 locus_discrim_hasher::equal (const locus_discrim_map *a,
141 const locus_discrim_map *b)
142 {
143 return a->location_line == b->location_line;
144 }
145
146 static hash_table<locus_discrim_hasher> *discriminator_per_locus;
147
148 /* Basic blocks and flowgraphs. */
149 static void make_blocks (gimple_seq);
150
151 /* Edges. */
152 static void make_edges (void);
153 static void assign_discriminators (void);
154 static void make_cond_expr_edges (basic_block);
155 static void make_gimple_switch_edges (gswitch *, basic_block);
156 static bool make_goto_expr_edges (basic_block);
157 static void make_gimple_asm_edges (basic_block);
158 static edge gimple_redirect_edge_and_branch (edge, basic_block);
159 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
160
161 /* Various helpers. */
162 static inline bool stmt_starts_bb_p (gimple *, gimple *);
163 static int gimple_verify_flow_info (void);
164 static void gimple_make_forwarder_block (edge);
165 static gimple *first_non_label_stmt (basic_block);
166 static bool verify_gimple_transaction (gtransaction *);
167 static bool call_can_make_abnormal_goto (gimple *);
168
169 /* Flowgraph optimization and cleanup. */
170 static void gimple_merge_blocks (basic_block, basic_block);
171 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
172 static void remove_bb (basic_block);
173 static edge find_taken_edge_computed_goto (basic_block, tree);
174 static edge find_taken_edge_cond_expr (const gcond *, tree);
175
176 void
177 init_empty_tree_cfg_for_function (struct function *fn)
178 {
179 /* Initialize the basic block array. */
180 init_flow (fn);
181 profile_status_for_fn (fn) = PROFILE_ABSENT;
182 n_basic_blocks_for_fn (fn) = NUM_FIXED_BLOCKS;
183 last_basic_block_for_fn (fn) = NUM_FIXED_BLOCKS;
184 vec_alloc (basic_block_info_for_fn (fn), initial_cfg_capacity);
185 vec_safe_grow_cleared (basic_block_info_for_fn (fn),
186 initial_cfg_capacity);
187
188 /* Build a mapping of labels to their associated blocks. */
189 vec_alloc (label_to_block_map_for_fn (fn), initial_cfg_capacity);
190 vec_safe_grow_cleared (label_to_block_map_for_fn (fn),
191 initial_cfg_capacity);
192
193 SET_BASIC_BLOCK_FOR_FN (fn, ENTRY_BLOCK, ENTRY_BLOCK_PTR_FOR_FN (fn));
194 SET_BASIC_BLOCK_FOR_FN (fn, EXIT_BLOCK, EXIT_BLOCK_PTR_FOR_FN (fn));
195
196 ENTRY_BLOCK_PTR_FOR_FN (fn)->next_bb
197 = EXIT_BLOCK_PTR_FOR_FN (fn);
198 EXIT_BLOCK_PTR_FOR_FN (fn)->prev_bb
199 = ENTRY_BLOCK_PTR_FOR_FN (fn);
200 }
201
202 void
203 init_empty_tree_cfg (void)
204 {
205 init_empty_tree_cfg_for_function (cfun);
206 }
207
208 /*---------------------------------------------------------------------------
209 Create basic blocks
210 ---------------------------------------------------------------------------*/
211
212 /* Entry point to the CFG builder for trees. SEQ is the sequence of
213 statements to be added to the flowgraph. */
214
215 static void
216 build_gimple_cfg (gimple_seq seq)
217 {
218 /* Register specific gimple functions. */
219 gimple_register_cfg_hooks ();
220
221 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
222
223 init_empty_tree_cfg ();
224
225 make_blocks (seq);
226
227 /* Make sure there is always at least one block, even if it's empty. */
228 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
229 create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
230
231 /* Adjust the size of the array. */
232 if (basic_block_info_for_fn (cfun)->length ()
233 < (size_t) n_basic_blocks_for_fn (cfun))
234 vec_safe_grow_cleared (basic_block_info_for_fn (cfun),
235 n_basic_blocks_for_fn (cfun));
236
237 /* To speed up statement iterator walks, we first purge dead labels. */
238 cleanup_dead_labels ();
239
240 /* Group case nodes to reduce the number of edges.
241 We do this after cleaning up dead labels because otherwise we miss
242 a lot of obvious case merging opportunities. */
243 group_case_labels ();
244
245 /* Create the edges of the flowgraph. */
246 discriminator_per_locus = new hash_table<locus_discrim_hasher> (13);
247 make_edges ();
248 assign_discriminators ();
249 cleanup_dead_labels ();
250 delete discriminator_per_locus;
251 discriminator_per_locus = NULL;
252 }
253
254 /* Look for ANNOTATE calls with loop annotation kind in BB; if found, remove
255 them and propagate the information to LOOP. We assume that the annotations
256 come immediately before the condition in BB, if any. */
257
258 static void
259 replace_loop_annotate_in_block (basic_block bb, class loop *loop)
260 {
261 gimple_stmt_iterator gsi = gsi_last_bb (bb);
262 gimple *stmt = gsi_stmt (gsi);
263
264 if (!(stmt && gimple_code (stmt) == GIMPLE_COND))
265 return;
266
267 for (gsi_prev_nondebug (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
268 {
269 stmt = gsi_stmt (gsi);
270 if (gimple_code (stmt) != GIMPLE_CALL)
271 break;
272 if (!gimple_call_internal_p (stmt)
273 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
274 break;
275
276 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
277 {
278 case annot_expr_ivdep_kind:
279 loop->safelen = INT_MAX;
280 break;
281 case annot_expr_unroll_kind:
282 loop->unroll
283 = (unsigned short) tree_to_shwi (gimple_call_arg (stmt, 2));
284 cfun->has_unroll = true;
285 break;
286 case annot_expr_no_vector_kind:
287 loop->dont_vectorize = true;
288 break;
289 case annot_expr_vector_kind:
290 loop->force_vectorize = true;
291 cfun->has_force_vectorize_loops = true;
292 break;
293 case annot_expr_parallel_kind:
294 loop->can_be_parallel = true;
295 loop->safelen = INT_MAX;
296 break;
297 default:
298 gcc_unreachable ();
299 }
300
301 stmt = gimple_build_assign (gimple_call_lhs (stmt),
302 gimple_call_arg (stmt, 0));
303 gsi_replace (&gsi, stmt, true);
304 }
305 }
306
307 /* Look for ANNOTATE calls with loop annotation kind; if found, remove
308 them and propagate the information to the loop. We assume that the
309 annotations come immediately before the condition of the loop. */
310
311 static void
312 replace_loop_annotate (void)
313 {
314 class loop *loop;
315 basic_block bb;
316 gimple_stmt_iterator gsi;
317 gimple *stmt;
318
319 FOR_EACH_LOOP (loop, 0)
320 {
321 /* First look into the header. */
322 replace_loop_annotate_in_block (loop->header, loop);
323
324 /* Then look into the latch, if any. */
325 if (loop->latch)
326 replace_loop_annotate_in_block (loop->latch, loop);
327 }
328
329 /* Remove IFN_ANNOTATE. Safeguard for the case loop->latch == NULL. */
330 FOR_EACH_BB_FN (bb, cfun)
331 {
332 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
333 {
334 stmt = gsi_stmt (gsi);
335 if (gimple_code (stmt) != GIMPLE_CALL)
336 continue;
337 if (!gimple_call_internal_p (stmt)
338 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
339 continue;
340
341 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
342 {
343 case annot_expr_ivdep_kind:
344 case annot_expr_unroll_kind:
345 case annot_expr_no_vector_kind:
346 case annot_expr_vector_kind:
347 case annot_expr_parallel_kind:
348 break;
349 default:
350 gcc_unreachable ();
351 }
352
353 warning_at (gimple_location (stmt), 0, "ignoring loop annotation");
354 stmt = gimple_build_assign (gimple_call_lhs (stmt),
355 gimple_call_arg (stmt, 0));
356 gsi_replace (&gsi, stmt, true);
357 }
358 }
359 }
360
361 static unsigned int
362 execute_build_cfg (void)
363 {
364 gimple_seq body = gimple_body (current_function_decl);
365
366 build_gimple_cfg (body);
367 gimple_set_body (current_function_decl, NULL);
368 if (dump_file && (dump_flags & TDF_DETAILS))
369 {
370 fprintf (dump_file, "Scope blocks:\n");
371 dump_scope_blocks (dump_file, dump_flags);
372 }
373 cleanup_tree_cfg ();
374 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
375 replace_loop_annotate ();
376 return 0;
377 }
378
379 namespace {
380
381 const pass_data pass_data_build_cfg =
382 {
383 GIMPLE_PASS, /* type */
384 "cfg", /* name */
385 OPTGROUP_NONE, /* optinfo_flags */
386 TV_TREE_CFG, /* tv_id */
387 PROP_gimple_leh, /* properties_required */
388 ( PROP_cfg | PROP_loops ), /* properties_provided */
389 0, /* properties_destroyed */
390 0, /* todo_flags_start */
391 0, /* todo_flags_finish */
392 };
393
394 class pass_build_cfg : public gimple_opt_pass
395 {
396 public:
397 pass_build_cfg (gcc::context *ctxt)
398 : gimple_opt_pass (pass_data_build_cfg, ctxt)
399 {}
400
401 /* opt_pass methods: */
402 virtual unsigned int execute (function *) { return execute_build_cfg (); }
403
404 }; // class pass_build_cfg
405
406 } // anon namespace
407
408 gimple_opt_pass *
409 make_pass_build_cfg (gcc::context *ctxt)
410 {
411 return new pass_build_cfg (ctxt);
412 }
413
414
415 /* Return true if T is a computed goto. */
416
417 bool
418 computed_goto_p (gimple *t)
419 {
420 return (gimple_code (t) == GIMPLE_GOTO
421 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
422 }
423
424 /* Returns true if the sequence of statements STMTS only contains
425 a call to __builtin_unreachable (). */
426
427 bool
428 gimple_seq_unreachable_p (gimple_seq stmts)
429 {
430 if (stmts == NULL
431 /* Return false if -fsanitize=unreachable, we don't want to
432 optimize away those calls, but rather turn them into
433 __ubsan_handle_builtin_unreachable () or __builtin_trap ()
434 later. */
435 || sanitize_flags_p (SANITIZE_UNREACHABLE))
436 return false;
437
438 gimple_stmt_iterator gsi = gsi_last (stmts);
439
440 if (!gimple_call_builtin_p (gsi_stmt (gsi), BUILT_IN_UNREACHABLE))
441 return false;
442
443 for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
444 {
445 gimple *stmt = gsi_stmt (gsi);
446 if (gimple_code (stmt) != GIMPLE_LABEL
447 && !is_gimple_debug (stmt)
448 && !gimple_clobber_p (stmt))
449 return false;
450 }
451 return true;
452 }
453
454 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
455 the other edge points to a bb with just __builtin_unreachable ().
456 I.e. return true for C->M edge in:
457 <bb C>:
458 ...
459 if (something)
460 goto <bb N>;
461 else
462 goto <bb M>;
463 <bb N>:
464 __builtin_unreachable ();
465 <bb M>: */
466
467 bool
468 assert_unreachable_fallthru_edge_p (edge e)
469 {
470 basic_block pred_bb = e->src;
471 gimple *last = last_stmt (pred_bb);
472 if (last && gimple_code (last) == GIMPLE_COND)
473 {
474 basic_block other_bb = EDGE_SUCC (pred_bb, 0)->dest;
475 if (other_bb == e->dest)
476 other_bb = EDGE_SUCC (pred_bb, 1)->dest;
477 if (EDGE_COUNT (other_bb->succs) == 0)
478 return gimple_seq_unreachable_p (bb_seq (other_bb));
479 }
480 return false;
481 }
482
483
484 /* Initialize GF_CALL_CTRL_ALTERING flag, which indicates the call
485 could alter control flow except via eh. We initialize the flag at
486 CFG build time and only ever clear it later. */
487
488 static void
489 gimple_call_initialize_ctrl_altering (gimple *stmt)
490 {
491 int flags = gimple_call_flags (stmt);
492
493 /* A call alters control flow if it can make an abnormal goto. */
494 if (call_can_make_abnormal_goto (stmt)
495 /* A call also alters control flow if it does not return. */
496 || flags & ECF_NORETURN
497 /* TM ending statements have backedges out of the transaction.
498 Return true so we split the basic block containing them.
499 Note that the TM_BUILTIN test is merely an optimization. */
500 || ((flags & ECF_TM_BUILTIN)
501 && is_tm_ending_fndecl (gimple_call_fndecl (stmt)))
502 /* BUILT_IN_RETURN call is same as return statement. */
503 || gimple_call_builtin_p (stmt, BUILT_IN_RETURN)
504 /* IFN_UNIQUE should be the last insn, to make checking for it
505 as cheap as possible. */
506 || (gimple_call_internal_p (stmt)
507 && gimple_call_internal_unique_p (stmt)))
508 gimple_call_set_ctrl_altering (stmt, true);
509 else
510 gimple_call_set_ctrl_altering (stmt, false);
511 }
512
513
514 /* Insert SEQ after BB and build a flowgraph. */
515
516 static basic_block
517 make_blocks_1 (gimple_seq seq, basic_block bb)
518 {
519 gimple_stmt_iterator i = gsi_start (seq);
520 gimple *stmt = NULL;
521 gimple *prev_stmt = NULL;
522 bool start_new_block = true;
523 bool first_stmt_of_seq = true;
524
525 while (!gsi_end_p (i))
526 {
527 /* PREV_STMT should only be set to a debug stmt if the debug
528 stmt is before nondebug stmts. Once stmt reaches a nondebug
529 nonlabel, prev_stmt will be set to it, so that
530 stmt_starts_bb_p will know to start a new block if a label is
531 found. However, if stmt was a label after debug stmts only,
532 keep the label in prev_stmt even if we find further debug
533 stmts, for there may be other labels after them, and they
534 should land in the same block. */
535 if (!prev_stmt || !stmt || !is_gimple_debug (stmt))
536 prev_stmt = stmt;
537 stmt = gsi_stmt (i);
538
539 if (stmt && is_gimple_call (stmt))
540 gimple_call_initialize_ctrl_altering (stmt);
541
542 /* If the statement starts a new basic block or if we have determined
543 in a previous pass that we need to create a new block for STMT, do
544 so now. */
545 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
546 {
547 if (!first_stmt_of_seq)
548 gsi_split_seq_before (&i, &seq);
549 bb = create_basic_block (seq, bb);
550 start_new_block = false;
551 prev_stmt = NULL;
552 }
553
554 /* Now add STMT to BB and create the subgraphs for special statement
555 codes. */
556 gimple_set_bb (stmt, bb);
557
558 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
559 next iteration. */
560 if (stmt_ends_bb_p (stmt))
561 {
562 /* If the stmt can make abnormal goto use a new temporary
563 for the assignment to the LHS. This makes sure the old value
564 of the LHS is available on the abnormal edge. Otherwise
565 we will end up with overlapping life-ranges for abnormal
566 SSA names. */
567 if (gimple_has_lhs (stmt)
568 && stmt_can_make_abnormal_goto (stmt)
569 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
570 {
571 tree lhs = gimple_get_lhs (stmt);
572 tree tmp = create_tmp_var (TREE_TYPE (lhs));
573 gimple *s = gimple_build_assign (lhs, tmp);
574 gimple_set_location (s, gimple_location (stmt));
575 gimple_set_block (s, gimple_block (stmt));
576 gimple_set_lhs (stmt, tmp);
577 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
578 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
579 DECL_GIMPLE_REG_P (tmp) = 1;
580 gsi_insert_after (&i, s, GSI_SAME_STMT);
581 }
582 start_new_block = true;
583 }
584
585 gsi_next (&i);
586 first_stmt_of_seq = false;
587 }
588 return bb;
589 }
590
591 /* Build a flowgraph for the sequence of stmts SEQ. */
592
593 static void
594 make_blocks (gimple_seq seq)
595 {
596 /* Look for debug markers right before labels, and move the debug
597 stmts after the labels. Accepting labels among debug markers
598 adds no value, just complexity; if we wanted to annotate labels
599 with view numbers (so sequencing among markers would matter) or
600 somesuch, we're probably better off still moving the labels, but
601 adding other debug annotations in their original positions or
602 emitting nonbind or bind markers associated with the labels in
603 the original position of the labels.
604
605 Moving labels would probably be simpler, but we can't do that:
606 moving labels assigns label ids to them, and doing so because of
607 debug markers makes for -fcompare-debug and possibly even codegen
608 differences. So, we have to move the debug stmts instead. To
609 that end, we scan SEQ backwards, marking the position of the
610 latest (earliest we find) label, and moving debug stmts that are
611 not separated from it by nondebug nonlabel stmts after the
612 label. */
613 if (MAY_HAVE_DEBUG_MARKER_STMTS)
614 {
615 gimple_stmt_iterator label = gsi_none ();
616
617 for (gimple_stmt_iterator i = gsi_last (seq); !gsi_end_p (i); gsi_prev (&i))
618 {
619 gimple *stmt = gsi_stmt (i);
620
621 /* If this is the first label we encounter (latest in SEQ)
622 before nondebug stmts, record its position. */
623 if (is_a <glabel *> (stmt))
624 {
625 if (gsi_end_p (label))
626 label = i;
627 continue;
628 }
629
630 /* Without a recorded label position to move debug stmts to,
631 there's nothing to do. */
632 if (gsi_end_p (label))
633 continue;
634
635 /* Move the debug stmt at I after LABEL. */
636 if (is_gimple_debug (stmt))
637 {
638 gcc_assert (gimple_debug_nonbind_marker_p (stmt));
639 /* As STMT is removed, I advances to the stmt after
640 STMT, so the gsi_prev in the for "increment"
641 expression gets us to the stmt we're to visit after
642 STMT. LABEL, however, would advance to the moved
643 stmt if we passed it to gsi_move_after, so pass it a
644 copy instead, so as to keep LABEL pointing to the
645 LABEL. */
646 gimple_stmt_iterator copy = label;
647 gsi_move_after (&i, &copy);
648 continue;
649 }
650
651 /* There aren't any (more?) debug stmts before label, so
652 there isn't anything else to move after it. */
653 label = gsi_none ();
654 }
655 }
656
657 make_blocks_1 (seq, ENTRY_BLOCK_PTR_FOR_FN (cfun));
658 }
659
660 /* Create and return a new empty basic block after bb AFTER. */
661
662 static basic_block
663 create_bb (void *h, void *e, basic_block after)
664 {
665 basic_block bb;
666
667 gcc_assert (!e);
668
669 /* Create and initialize a new basic block. Since alloc_block uses
670 GC allocation that clears memory to allocate a basic block, we do
671 not have to clear the newly allocated basic block here. */
672 bb = alloc_block ();
673
674 bb->index = last_basic_block_for_fn (cfun);
675 bb->flags = BB_NEW;
676 set_bb_seq (bb, h ? (gimple_seq) h : NULL);
677
678 /* Add the new block to the linked list of blocks. */
679 link_block (bb, after);
680
681 /* Grow the basic block array if needed. */
682 if ((size_t) last_basic_block_for_fn (cfun)
683 == basic_block_info_for_fn (cfun)->length ())
684 {
685 size_t new_size =
686 (last_basic_block_for_fn (cfun)
687 + (last_basic_block_for_fn (cfun) + 3) / 4);
688 vec_safe_grow_cleared (basic_block_info_for_fn (cfun), new_size);
689 }
690
691 /* Add the newly created block to the array. */
692 SET_BASIC_BLOCK_FOR_FN (cfun, last_basic_block_for_fn (cfun), bb);
693
694 n_basic_blocks_for_fn (cfun)++;
695 last_basic_block_for_fn (cfun)++;
696
697 return bb;
698 }
699
700
701 /*---------------------------------------------------------------------------
702 Edge creation
703 ---------------------------------------------------------------------------*/
704
705 /* If basic block BB has an abnormal edge to a basic block
706 containing IFN_ABNORMAL_DISPATCHER internal call, return
707 that the dispatcher's basic block, otherwise return NULL. */
708
709 basic_block
710 get_abnormal_succ_dispatcher (basic_block bb)
711 {
712 edge e;
713 edge_iterator ei;
714
715 FOR_EACH_EDGE (e, ei, bb->succs)
716 if ((e->flags & (EDGE_ABNORMAL | EDGE_EH)) == EDGE_ABNORMAL)
717 {
718 gimple_stmt_iterator gsi
719 = gsi_start_nondebug_after_labels_bb (e->dest);
720 gimple *g = gsi_stmt (gsi);
721 if (g && gimple_call_internal_p (g, IFN_ABNORMAL_DISPATCHER))
722 return e->dest;
723 }
724 return NULL;
725 }
726
727 /* Helper function for make_edges. Create a basic block with
728 with ABNORMAL_DISPATCHER internal call in it if needed, and
729 create abnormal edges from BBS to it and from it to FOR_BB
730 if COMPUTED_GOTO is false, otherwise factor the computed gotos. */
731
732 static void
733 handle_abnormal_edges (basic_block *dispatcher_bbs,
734 basic_block for_bb, int *bb_to_omp_idx,
735 auto_vec<basic_block> *bbs, bool computed_goto)
736 {
737 basic_block *dispatcher = dispatcher_bbs + (computed_goto ? 1 : 0);
738 unsigned int idx = 0;
739 basic_block bb;
740 bool inner = false;
741
742 if (bb_to_omp_idx)
743 {
744 dispatcher = dispatcher_bbs + 2 * bb_to_omp_idx[for_bb->index];
745 if (bb_to_omp_idx[for_bb->index] != 0)
746 inner = true;
747 }
748
749 /* If the dispatcher has been created already, then there are basic
750 blocks with abnormal edges to it, so just make a new edge to
751 for_bb. */
752 if (*dispatcher == NULL)
753 {
754 /* Check if there are any basic blocks that need to have
755 abnormal edges to this dispatcher. If there are none, return
756 early. */
757 if (bb_to_omp_idx == NULL)
758 {
759 if (bbs->is_empty ())
760 return;
761 }
762 else
763 {
764 FOR_EACH_VEC_ELT (*bbs, idx, bb)
765 if (bb_to_omp_idx[bb->index] == bb_to_omp_idx[for_bb->index])
766 break;
767 if (bb == NULL)
768 return;
769 }
770
771 /* Create the dispatcher bb. */
772 *dispatcher = create_basic_block (NULL, for_bb);
773 if (computed_goto)
774 {
775 /* Factor computed gotos into a common computed goto site. Also
776 record the location of that site so that we can un-factor the
777 gotos after we have converted back to normal form. */
778 gimple_stmt_iterator gsi = gsi_start_bb (*dispatcher);
779
780 /* Create the destination of the factored goto. Each original
781 computed goto will put its desired destination into this
782 variable and jump to the label we create immediately below. */
783 tree var = create_tmp_var (ptr_type_node, "gotovar");
784
785 /* Build a label for the new block which will contain the
786 factored computed goto. */
787 tree factored_label_decl
788 = create_artificial_label (UNKNOWN_LOCATION);
789 gimple *factored_computed_goto_label
790 = gimple_build_label (factored_label_decl);
791 gsi_insert_after (&gsi, factored_computed_goto_label, GSI_NEW_STMT);
792
793 /* Build our new computed goto. */
794 gimple *factored_computed_goto = gimple_build_goto (var);
795 gsi_insert_after (&gsi, factored_computed_goto, GSI_NEW_STMT);
796
797 FOR_EACH_VEC_ELT (*bbs, idx, bb)
798 {
799 if (bb_to_omp_idx
800 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
801 continue;
802
803 gsi = gsi_last_bb (bb);
804 gimple *last = gsi_stmt (gsi);
805
806 gcc_assert (computed_goto_p (last));
807
808 /* Copy the original computed goto's destination into VAR. */
809 gimple *assignment
810 = gimple_build_assign (var, gimple_goto_dest (last));
811 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
812
813 edge e = make_edge (bb, *dispatcher, EDGE_FALLTHRU);
814 e->goto_locus = gimple_location (last);
815 gsi_remove (&gsi, true);
816 }
817 }
818 else
819 {
820 tree arg = inner ? boolean_true_node : boolean_false_node;
821 gimple *g = gimple_build_call_internal (IFN_ABNORMAL_DISPATCHER,
822 1, arg);
823 gimple_stmt_iterator gsi = gsi_after_labels (*dispatcher);
824 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
825
826 /* Create predecessor edges of the dispatcher. */
827 FOR_EACH_VEC_ELT (*bbs, idx, bb)
828 {
829 if (bb_to_omp_idx
830 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
831 continue;
832 make_edge (bb, *dispatcher, EDGE_ABNORMAL);
833 }
834 }
835 }
836
837 make_edge (*dispatcher, for_bb, EDGE_ABNORMAL);
838 }
839
840 /* Creates outgoing edges for BB. Returns 1 when it ends with an
841 computed goto, returns 2 when it ends with a statement that
842 might return to this function via an nonlocal goto, otherwise
843 return 0. Updates *PCUR_REGION with the OMP region this BB is in. */
844
845 static int
846 make_edges_bb (basic_block bb, struct omp_region **pcur_region, int *pomp_index)
847 {
848 gimple *last = last_stmt (bb);
849 bool fallthru = false;
850 int ret = 0;
851
852 if (!last)
853 return ret;
854
855 switch (gimple_code (last))
856 {
857 case GIMPLE_GOTO:
858 if (make_goto_expr_edges (bb))
859 ret = 1;
860 fallthru = false;
861 break;
862 case GIMPLE_RETURN:
863 {
864 edge e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
865 e->goto_locus = gimple_location (last);
866 fallthru = false;
867 }
868 break;
869 case GIMPLE_COND:
870 make_cond_expr_edges (bb);
871 fallthru = false;
872 break;
873 case GIMPLE_SWITCH:
874 make_gimple_switch_edges (as_a <gswitch *> (last), bb);
875 fallthru = false;
876 break;
877 case GIMPLE_RESX:
878 make_eh_edges (last);
879 fallthru = false;
880 break;
881 case GIMPLE_EH_DISPATCH:
882 fallthru = make_eh_dispatch_edges (as_a <geh_dispatch *> (last));
883 break;
884
885 case GIMPLE_CALL:
886 /* If this function receives a nonlocal goto, then we need to
887 make edges from this call site to all the nonlocal goto
888 handlers. */
889 if (stmt_can_make_abnormal_goto (last))
890 ret = 2;
891
892 /* If this statement has reachable exception handlers, then
893 create abnormal edges to them. */
894 make_eh_edges (last);
895
896 /* BUILTIN_RETURN is really a return statement. */
897 if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
898 {
899 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
900 fallthru = false;
901 }
902 /* Some calls are known not to return. */
903 else
904 fallthru = !gimple_call_noreturn_p (last);
905 break;
906
907 case GIMPLE_ASSIGN:
908 /* A GIMPLE_ASSIGN may throw internally and thus be considered
909 control-altering. */
910 if (is_ctrl_altering_stmt (last))
911 make_eh_edges (last);
912 fallthru = true;
913 break;
914
915 case GIMPLE_ASM:
916 make_gimple_asm_edges (bb);
917 fallthru = true;
918 break;
919
920 CASE_GIMPLE_OMP:
921 fallthru = omp_make_gimple_edges (bb, pcur_region, pomp_index);
922 break;
923
924 case GIMPLE_TRANSACTION:
925 {
926 gtransaction *txn = as_a <gtransaction *> (last);
927 tree label1 = gimple_transaction_label_norm (txn);
928 tree label2 = gimple_transaction_label_uninst (txn);
929
930 if (label1)
931 make_edge (bb, label_to_block (cfun, label1), EDGE_FALLTHRU);
932 if (label2)
933 make_edge (bb, label_to_block (cfun, label2),
934 EDGE_TM_UNINSTRUMENTED | (label1 ? 0 : EDGE_FALLTHRU));
935
936 tree label3 = gimple_transaction_label_over (txn);
937 if (gimple_transaction_subcode (txn)
938 & (GTMA_HAVE_ABORT | GTMA_IS_OUTER))
939 make_edge (bb, label_to_block (cfun, label3), EDGE_TM_ABORT);
940
941 fallthru = false;
942 }
943 break;
944
945 default:
946 gcc_assert (!stmt_ends_bb_p (last));
947 fallthru = true;
948 break;
949 }
950
951 if (fallthru)
952 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
953
954 return ret;
955 }
956
957 /* Join all the blocks in the flowgraph. */
958
959 static void
960 make_edges (void)
961 {
962 basic_block bb;
963 struct omp_region *cur_region = NULL;
964 auto_vec<basic_block> ab_edge_goto;
965 auto_vec<basic_block> ab_edge_call;
966 int *bb_to_omp_idx = NULL;
967 int cur_omp_region_idx = 0;
968
969 /* Create an edge from entry to the first block with executable
970 statements in it. */
971 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun),
972 BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS),
973 EDGE_FALLTHRU);
974
975 /* Traverse the basic block array placing edges. */
976 FOR_EACH_BB_FN (bb, cfun)
977 {
978 int mer;
979
980 if (bb_to_omp_idx)
981 bb_to_omp_idx[bb->index] = cur_omp_region_idx;
982
983 mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
984 if (mer == 1)
985 ab_edge_goto.safe_push (bb);
986 else if (mer == 2)
987 ab_edge_call.safe_push (bb);
988
989 if (cur_region && bb_to_omp_idx == NULL)
990 bb_to_omp_idx = XCNEWVEC (int, n_basic_blocks_for_fn (cfun));
991 }
992
993 /* Computed gotos are hell to deal with, especially if there are
994 lots of them with a large number of destinations. So we factor
995 them to a common computed goto location before we build the
996 edge list. After we convert back to normal form, we will un-factor
997 the computed gotos since factoring introduces an unwanted jump.
998 For non-local gotos and abnormal edges from calls to calls that return
999 twice or forced labels, factor the abnormal edges too, by having all
1000 abnormal edges from the calls go to a common artificial basic block
1001 with ABNORMAL_DISPATCHER internal call and abnormal edges from that
1002 basic block to all forced labels and calls returning twice.
1003 We do this per-OpenMP structured block, because those regions
1004 are guaranteed to be single entry single exit by the standard,
1005 so it is not allowed to enter or exit such regions abnormally this way,
1006 thus all computed gotos, non-local gotos and setjmp/longjmp calls
1007 must not transfer control across SESE region boundaries. */
1008 if (!ab_edge_goto.is_empty () || !ab_edge_call.is_empty ())
1009 {
1010 gimple_stmt_iterator gsi;
1011 basic_block dispatcher_bb_array[2] = { NULL, NULL };
1012 basic_block *dispatcher_bbs = dispatcher_bb_array;
1013 int count = n_basic_blocks_for_fn (cfun);
1014
1015 if (bb_to_omp_idx)
1016 dispatcher_bbs = XCNEWVEC (basic_block, 2 * count);
1017
1018 FOR_EACH_BB_FN (bb, cfun)
1019 {
1020 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1021 {
1022 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
1023 tree target;
1024
1025 if (!label_stmt)
1026 break;
1027
1028 target = gimple_label_label (label_stmt);
1029
1030 /* Make an edge to every label block that has been marked as a
1031 potential target for a computed goto or a non-local goto. */
1032 if (FORCED_LABEL (target))
1033 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
1034 &ab_edge_goto, true);
1035 if (DECL_NONLOCAL (target))
1036 {
1037 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
1038 &ab_edge_call, false);
1039 break;
1040 }
1041 }
1042
1043 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
1044 gsi_next_nondebug (&gsi);
1045 if (!gsi_end_p (gsi))
1046 {
1047 /* Make an edge to every setjmp-like call. */
1048 gimple *call_stmt = gsi_stmt (gsi);
1049 if (is_gimple_call (call_stmt)
1050 && ((gimple_call_flags (call_stmt) & ECF_RETURNS_TWICE)
1051 || gimple_call_builtin_p (call_stmt,
1052 BUILT_IN_SETJMP_RECEIVER)))
1053 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
1054 &ab_edge_call, false);
1055 }
1056 }
1057
1058 if (bb_to_omp_idx)
1059 XDELETE (dispatcher_bbs);
1060 }
1061
1062 XDELETE (bb_to_omp_idx);
1063
1064 omp_free_regions ();
1065 }
1066
1067 /* Add SEQ after GSI. Start new bb after GSI, and created further bbs as
1068 needed. Returns true if new bbs were created.
1069 Note: This is transitional code, and should not be used for new code. We
1070 should be able to get rid of this by rewriting all target va-arg
1071 gimplification hooks to use an interface gimple_build_cond_value as described
1072 in https://gcc.gnu.org/ml/gcc-patches/2015-02/msg01194.html. */
1073
1074 bool
1075 gimple_find_sub_bbs (gimple_seq seq, gimple_stmt_iterator *gsi)
1076 {
1077 gimple *stmt = gsi_stmt (*gsi);
1078 basic_block bb = gimple_bb (stmt);
1079 basic_block lastbb, afterbb;
1080 int old_num_bbs = n_basic_blocks_for_fn (cfun);
1081 edge e;
1082 lastbb = make_blocks_1 (seq, bb);
1083 if (old_num_bbs == n_basic_blocks_for_fn (cfun))
1084 return false;
1085 e = split_block (bb, stmt);
1086 /* Move e->dest to come after the new basic blocks. */
1087 afterbb = e->dest;
1088 unlink_block (afterbb);
1089 link_block (afterbb, lastbb);
1090 redirect_edge_succ (e, bb->next_bb);
1091 bb = bb->next_bb;
1092 while (bb != afterbb)
1093 {
1094 struct omp_region *cur_region = NULL;
1095 profile_count cnt = profile_count::zero ();
1096 bool all = true;
1097
1098 int cur_omp_region_idx = 0;
1099 int mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
1100 gcc_assert (!mer && !cur_region);
1101 add_bb_to_loop (bb, afterbb->loop_father);
1102
1103 edge e;
1104 edge_iterator ei;
1105 FOR_EACH_EDGE (e, ei, bb->preds)
1106 {
1107 if (e->count ().initialized_p ())
1108 cnt += e->count ();
1109 else
1110 all = false;
1111 }
1112 tree_guess_outgoing_edge_probabilities (bb);
1113 if (all || profile_status_for_fn (cfun) == PROFILE_READ)
1114 bb->count = cnt;
1115
1116 bb = bb->next_bb;
1117 }
1118 return true;
1119 }
1120
1121 /* Find the next available discriminator value for LOCUS. The
1122 discriminator distinguishes among several basic blocks that
1123 share a common locus, allowing for more accurate sample-based
1124 profiling. */
1125
1126 static int
1127 next_discriminator_for_locus (int line)
1128 {
1129 struct locus_discrim_map item;
1130 struct locus_discrim_map **slot;
1131
1132 item.location_line = line;
1133 item.discriminator = 0;
1134 slot = discriminator_per_locus->find_slot_with_hash (&item, line, INSERT);
1135 gcc_assert (slot);
1136 if (*slot == HTAB_EMPTY_ENTRY)
1137 {
1138 *slot = XNEW (struct locus_discrim_map);
1139 gcc_assert (*slot);
1140 (*slot)->location_line = line;
1141 (*slot)->discriminator = 0;
1142 }
1143 (*slot)->discriminator++;
1144 return (*slot)->discriminator;
1145 }
1146
1147 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
1148
1149 static bool
1150 same_line_p (location_t locus1, expanded_location *from, location_t locus2)
1151 {
1152 expanded_location to;
1153
1154 if (locus1 == locus2)
1155 return true;
1156
1157 to = expand_location (locus2);
1158
1159 if (from->line != to.line)
1160 return false;
1161 if (from->file == to.file)
1162 return true;
1163 return (from->file != NULL
1164 && to.file != NULL
1165 && filename_cmp (from->file, to.file) == 0);
1166 }
1167
1168 /* Assign discriminators to each basic block. */
1169
1170 static void
1171 assign_discriminators (void)
1172 {
1173 basic_block bb;
1174
1175 FOR_EACH_BB_FN (bb, cfun)
1176 {
1177 edge e;
1178 edge_iterator ei;
1179 gimple *last = last_stmt (bb);
1180 location_t locus = last ? gimple_location (last) : UNKNOWN_LOCATION;
1181
1182 if (locus == UNKNOWN_LOCATION)
1183 continue;
1184
1185 expanded_location locus_e = expand_location (locus);
1186
1187 FOR_EACH_EDGE (e, ei, bb->succs)
1188 {
1189 gimple *first = first_non_label_stmt (e->dest);
1190 gimple *last = last_stmt (e->dest);
1191 if ((first && same_line_p (locus, &locus_e,
1192 gimple_location (first)))
1193 || (last && same_line_p (locus, &locus_e,
1194 gimple_location (last))))
1195 {
1196 if (e->dest->discriminator != 0 && bb->discriminator == 0)
1197 bb->discriminator
1198 = next_discriminator_for_locus (locus_e.line);
1199 else
1200 e->dest->discriminator
1201 = next_discriminator_for_locus (locus_e.line);
1202 }
1203 }
1204 }
1205 }
1206
1207 /* Create the edges for a GIMPLE_COND starting at block BB. */
1208
1209 static void
1210 make_cond_expr_edges (basic_block bb)
1211 {
1212 gcond *entry = as_a <gcond *> (last_stmt (bb));
1213 gimple *then_stmt, *else_stmt;
1214 basic_block then_bb, else_bb;
1215 tree then_label, else_label;
1216 edge e;
1217
1218 gcc_assert (entry);
1219 gcc_assert (gimple_code (entry) == GIMPLE_COND);
1220
1221 /* Entry basic blocks for each component. */
1222 then_label = gimple_cond_true_label (entry);
1223 else_label = gimple_cond_false_label (entry);
1224 then_bb = label_to_block (cfun, then_label);
1225 else_bb = label_to_block (cfun, else_label);
1226 then_stmt = first_stmt (then_bb);
1227 else_stmt = first_stmt (else_bb);
1228
1229 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1230 e->goto_locus = gimple_location (then_stmt);
1231 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1232 if (e)
1233 e->goto_locus = gimple_location (else_stmt);
1234
1235 /* We do not need the labels anymore. */
1236 gimple_cond_set_true_label (entry, NULL_TREE);
1237 gimple_cond_set_false_label (entry, NULL_TREE);
1238 }
1239
1240
1241 /* Called for each element in the hash table (P) as we delete the
1242 edge to cases hash table.
1243
1244 Clear all the CASE_CHAINs to prevent problems with copying of
1245 SWITCH_EXPRs and structure sharing rules, then free the hash table
1246 element. */
1247
1248 bool
1249 edge_to_cases_cleanup (edge const &, tree const &value, void *)
1250 {
1251 tree t, next;
1252
1253 for (t = value; t; t = next)
1254 {
1255 next = CASE_CHAIN (t);
1256 CASE_CHAIN (t) = NULL;
1257 }
1258
1259 return true;
1260 }
1261
1262 /* Start recording information mapping edges to case labels. */
1263
1264 void
1265 start_recording_case_labels (void)
1266 {
1267 gcc_assert (edge_to_cases == NULL);
1268 edge_to_cases = new hash_map<edge, tree>;
1269 touched_switch_bbs = BITMAP_ALLOC (NULL);
1270 }
1271
1272 /* Return nonzero if we are recording information for case labels. */
1273
1274 static bool
1275 recording_case_labels_p (void)
1276 {
1277 return (edge_to_cases != NULL);
1278 }
1279
1280 /* Stop recording information mapping edges to case labels and
1281 remove any information we have recorded. */
1282 void
1283 end_recording_case_labels (void)
1284 {
1285 bitmap_iterator bi;
1286 unsigned i;
1287 edge_to_cases->traverse<void *, edge_to_cases_cleanup> (NULL);
1288 delete edge_to_cases;
1289 edge_to_cases = NULL;
1290 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
1291 {
1292 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
1293 if (bb)
1294 {
1295 gimple *stmt = last_stmt (bb);
1296 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1297 group_case_labels_stmt (as_a <gswitch *> (stmt));
1298 }
1299 }
1300 BITMAP_FREE (touched_switch_bbs);
1301 }
1302
1303 /* If we are inside a {start,end}_recording_cases block, then return
1304 a chain of CASE_LABEL_EXPRs from T which reference E.
1305
1306 Otherwise return NULL. */
1307
1308 static tree
1309 get_cases_for_edge (edge e, gswitch *t)
1310 {
1311 tree *slot;
1312 size_t i, n;
1313
1314 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
1315 chains available. Return NULL so the caller can detect this case. */
1316 if (!recording_case_labels_p ())
1317 return NULL;
1318
1319 slot = edge_to_cases->get (e);
1320 if (slot)
1321 return *slot;
1322
1323 /* If we did not find E in the hash table, then this must be the first
1324 time we have been queried for information about E & T. Add all the
1325 elements from T to the hash table then perform the query again. */
1326
1327 n = gimple_switch_num_labels (t);
1328 for (i = 0; i < n; i++)
1329 {
1330 tree elt = gimple_switch_label (t, i);
1331 tree lab = CASE_LABEL (elt);
1332 basic_block label_bb = label_to_block (cfun, lab);
1333 edge this_edge = find_edge (e->src, label_bb);
1334
1335 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
1336 a new chain. */
1337 tree &s = edge_to_cases->get_or_insert (this_edge);
1338 CASE_CHAIN (elt) = s;
1339 s = elt;
1340 }
1341
1342 return *edge_to_cases->get (e);
1343 }
1344
1345 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
1346
1347 static void
1348 make_gimple_switch_edges (gswitch *entry, basic_block bb)
1349 {
1350 size_t i, n;
1351
1352 n = gimple_switch_num_labels (entry);
1353
1354 for (i = 0; i < n; ++i)
1355 {
1356 basic_block label_bb = gimple_switch_label_bb (cfun, entry, i);
1357 make_edge (bb, label_bb, 0);
1358 }
1359 }
1360
1361
1362 /* Return the basic block holding label DEST. */
1363
1364 basic_block
1365 label_to_block (struct function *ifun, tree dest)
1366 {
1367 int uid = LABEL_DECL_UID (dest);
1368
1369 /* We would die hard when faced by an undefined label. Emit a label to
1370 the very first basic block. This will hopefully make even the dataflow
1371 and undefined variable warnings quite right. */
1372 if (seen_error () && uid < 0)
1373 {
1374 gimple_stmt_iterator gsi =
1375 gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS));
1376 gimple *stmt;
1377
1378 stmt = gimple_build_label (dest);
1379 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
1380 uid = LABEL_DECL_UID (dest);
1381 }
1382 if (vec_safe_length (ifun->cfg->x_label_to_block_map) <= (unsigned int) uid)
1383 return NULL;
1384 return (*ifun->cfg->x_label_to_block_map)[uid];
1385 }
1386
1387 /* Create edges for a goto statement at block BB. Returns true
1388 if abnormal edges should be created. */
1389
1390 static bool
1391 make_goto_expr_edges (basic_block bb)
1392 {
1393 gimple_stmt_iterator last = gsi_last_bb (bb);
1394 gimple *goto_t = gsi_stmt (last);
1395
1396 /* A simple GOTO creates normal edges. */
1397 if (simple_goto_p (goto_t))
1398 {
1399 tree dest = gimple_goto_dest (goto_t);
1400 basic_block label_bb = label_to_block (cfun, dest);
1401 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1402 e->goto_locus = gimple_location (goto_t);
1403 gsi_remove (&last, true);
1404 return false;
1405 }
1406
1407 /* A computed GOTO creates abnormal edges. */
1408 return true;
1409 }
1410
1411 /* Create edges for an asm statement with labels at block BB. */
1412
1413 static void
1414 make_gimple_asm_edges (basic_block bb)
1415 {
1416 gasm *stmt = as_a <gasm *> (last_stmt (bb));
1417 int i, n = gimple_asm_nlabels (stmt);
1418
1419 for (i = 0; i < n; ++i)
1420 {
1421 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1422 basic_block label_bb = label_to_block (cfun, label);
1423 make_edge (bb, label_bb, 0);
1424 }
1425 }
1426
1427 /*---------------------------------------------------------------------------
1428 Flowgraph analysis
1429 ---------------------------------------------------------------------------*/
1430
1431 /* Cleanup useless labels in basic blocks. This is something we wish
1432 to do early because it allows us to group case labels before creating
1433 the edges for the CFG, and it speeds up block statement iterators in
1434 all passes later on.
1435 We rerun this pass after CFG is created, to get rid of the labels that
1436 are no longer referenced. After then we do not run it any more, since
1437 (almost) no new labels should be created. */
1438
1439 /* A map from basic block index to the leading label of that block. */
1440 struct label_record
1441 {
1442 /* The label. */
1443 tree label;
1444
1445 /* True if the label is referenced from somewhere. */
1446 bool used;
1447 };
1448
1449 /* Given LABEL return the first label in the same basic block. */
1450
1451 static tree
1452 main_block_label (tree label, label_record *label_for_bb)
1453 {
1454 basic_block bb = label_to_block (cfun, label);
1455 tree main_label = label_for_bb[bb->index].label;
1456
1457 /* label_to_block possibly inserted undefined label into the chain. */
1458 if (!main_label)
1459 {
1460 label_for_bb[bb->index].label = label;
1461 main_label = label;
1462 }
1463
1464 label_for_bb[bb->index].used = true;
1465 return main_label;
1466 }
1467
1468 /* Clean up redundant labels within the exception tree. */
1469
1470 static void
1471 cleanup_dead_labels_eh (label_record *label_for_bb)
1472 {
1473 eh_landing_pad lp;
1474 eh_region r;
1475 tree lab;
1476 int i;
1477
1478 if (cfun->eh == NULL)
1479 return;
1480
1481 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1482 if (lp && lp->post_landing_pad)
1483 {
1484 lab = main_block_label (lp->post_landing_pad, label_for_bb);
1485 if (lab != lp->post_landing_pad)
1486 {
1487 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1488 EH_LANDING_PAD_NR (lab) = lp->index;
1489 }
1490 }
1491
1492 FOR_ALL_EH_REGION (r)
1493 switch (r->type)
1494 {
1495 case ERT_CLEANUP:
1496 case ERT_MUST_NOT_THROW:
1497 break;
1498
1499 case ERT_TRY:
1500 {
1501 eh_catch c;
1502 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1503 {
1504 lab = c->label;
1505 if (lab)
1506 c->label = main_block_label (lab, label_for_bb);
1507 }
1508 }
1509 break;
1510
1511 case ERT_ALLOWED_EXCEPTIONS:
1512 lab = r->u.allowed.label;
1513 if (lab)
1514 r->u.allowed.label = main_block_label (lab, label_for_bb);
1515 break;
1516 }
1517 }
1518
1519
1520 /* Cleanup redundant labels. This is a three-step process:
1521 1) Find the leading label for each block.
1522 2) Redirect all references to labels to the leading labels.
1523 3) Cleanup all useless labels. */
1524
1525 void
1526 cleanup_dead_labels (void)
1527 {
1528 basic_block bb;
1529 label_record *label_for_bb = XCNEWVEC (struct label_record,
1530 last_basic_block_for_fn (cfun));
1531
1532 /* Find a suitable label for each block. We use the first user-defined
1533 label if there is one, or otherwise just the first label we see. */
1534 FOR_EACH_BB_FN (bb, cfun)
1535 {
1536 gimple_stmt_iterator i;
1537
1538 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1539 {
1540 tree label;
1541 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1542
1543 if (!label_stmt)
1544 break;
1545
1546 label = gimple_label_label (label_stmt);
1547
1548 /* If we have not yet seen a label for the current block,
1549 remember this one and see if there are more labels. */
1550 if (!label_for_bb[bb->index].label)
1551 {
1552 label_for_bb[bb->index].label = label;
1553 continue;
1554 }
1555
1556 /* If we did see a label for the current block already, but it
1557 is an artificially created label, replace it if the current
1558 label is a user defined label. */
1559 if (!DECL_ARTIFICIAL (label)
1560 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1561 {
1562 label_for_bb[bb->index].label = label;
1563 break;
1564 }
1565 }
1566 }
1567
1568 /* Now redirect all jumps/branches to the selected label.
1569 First do so for each block ending in a control statement. */
1570 FOR_EACH_BB_FN (bb, cfun)
1571 {
1572 gimple *stmt = last_stmt (bb);
1573 tree label, new_label;
1574
1575 if (!stmt)
1576 continue;
1577
1578 switch (gimple_code (stmt))
1579 {
1580 case GIMPLE_COND:
1581 {
1582 gcond *cond_stmt = as_a <gcond *> (stmt);
1583 label = gimple_cond_true_label (cond_stmt);
1584 if (label)
1585 {
1586 new_label = main_block_label (label, label_for_bb);
1587 if (new_label != label)
1588 gimple_cond_set_true_label (cond_stmt, new_label);
1589 }
1590
1591 label = gimple_cond_false_label (cond_stmt);
1592 if (label)
1593 {
1594 new_label = main_block_label (label, label_for_bb);
1595 if (new_label != label)
1596 gimple_cond_set_false_label (cond_stmt, new_label);
1597 }
1598 }
1599 break;
1600
1601 case GIMPLE_SWITCH:
1602 {
1603 gswitch *switch_stmt = as_a <gswitch *> (stmt);
1604 size_t i, n = gimple_switch_num_labels (switch_stmt);
1605
1606 /* Replace all destination labels. */
1607 for (i = 0; i < n; ++i)
1608 {
1609 tree case_label = gimple_switch_label (switch_stmt, i);
1610 label = CASE_LABEL (case_label);
1611 new_label = main_block_label (label, label_for_bb);
1612 if (new_label != label)
1613 CASE_LABEL (case_label) = new_label;
1614 }
1615 break;
1616 }
1617
1618 case GIMPLE_ASM:
1619 {
1620 gasm *asm_stmt = as_a <gasm *> (stmt);
1621 int i, n = gimple_asm_nlabels (asm_stmt);
1622
1623 for (i = 0; i < n; ++i)
1624 {
1625 tree cons = gimple_asm_label_op (asm_stmt, i);
1626 tree label = main_block_label (TREE_VALUE (cons), label_for_bb);
1627 TREE_VALUE (cons) = label;
1628 }
1629 break;
1630 }
1631
1632 /* We have to handle gotos until they're removed, and we don't
1633 remove them until after we've created the CFG edges. */
1634 case GIMPLE_GOTO:
1635 if (!computed_goto_p (stmt))
1636 {
1637 ggoto *goto_stmt = as_a <ggoto *> (stmt);
1638 label = gimple_goto_dest (goto_stmt);
1639 new_label = main_block_label (label, label_for_bb);
1640 if (new_label != label)
1641 gimple_goto_set_dest (goto_stmt, new_label);
1642 }
1643 break;
1644
1645 case GIMPLE_TRANSACTION:
1646 {
1647 gtransaction *txn = as_a <gtransaction *> (stmt);
1648
1649 label = gimple_transaction_label_norm (txn);
1650 if (label)
1651 {
1652 new_label = main_block_label (label, label_for_bb);
1653 if (new_label != label)
1654 gimple_transaction_set_label_norm (txn, new_label);
1655 }
1656
1657 label = gimple_transaction_label_uninst (txn);
1658 if (label)
1659 {
1660 new_label = main_block_label (label, label_for_bb);
1661 if (new_label != label)
1662 gimple_transaction_set_label_uninst (txn, new_label);
1663 }
1664
1665 label = gimple_transaction_label_over (txn);
1666 if (label)
1667 {
1668 new_label = main_block_label (label, label_for_bb);
1669 if (new_label != label)
1670 gimple_transaction_set_label_over (txn, new_label);
1671 }
1672 }
1673 break;
1674
1675 default:
1676 break;
1677 }
1678 }
1679
1680 /* Do the same for the exception region tree labels. */
1681 cleanup_dead_labels_eh (label_for_bb);
1682
1683 /* Finally, purge dead labels. All user-defined labels and labels that
1684 can be the target of non-local gotos and labels which have their
1685 address taken are preserved. */
1686 FOR_EACH_BB_FN (bb, cfun)
1687 {
1688 gimple_stmt_iterator i;
1689 tree label_for_this_bb = label_for_bb[bb->index].label;
1690
1691 if (!label_for_this_bb)
1692 continue;
1693
1694 /* If the main label of the block is unused, we may still remove it. */
1695 if (!label_for_bb[bb->index].used)
1696 label_for_this_bb = NULL;
1697
1698 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1699 {
1700 tree label;
1701 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1702
1703 if (!label_stmt)
1704 break;
1705
1706 label = gimple_label_label (label_stmt);
1707
1708 if (label == label_for_this_bb
1709 || !DECL_ARTIFICIAL (label)
1710 || DECL_NONLOCAL (label)
1711 || FORCED_LABEL (label))
1712 gsi_next (&i);
1713 else
1714 gsi_remove (&i, true);
1715 }
1716 }
1717
1718 free (label_for_bb);
1719 }
1720
1721 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1722 the ones jumping to the same label.
1723 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1724
1725 bool
1726 group_case_labels_stmt (gswitch *stmt)
1727 {
1728 int old_size = gimple_switch_num_labels (stmt);
1729 int i, next_index, new_size;
1730 basic_block default_bb = NULL;
1731
1732 default_bb = gimple_switch_default_bb (cfun, stmt);
1733
1734 /* Look for possible opportunities to merge cases. */
1735 new_size = i = 1;
1736 while (i < old_size)
1737 {
1738 tree base_case, base_high;
1739 basic_block base_bb;
1740
1741 base_case = gimple_switch_label (stmt, i);
1742
1743 gcc_assert (base_case);
1744 base_bb = label_to_block (cfun, CASE_LABEL (base_case));
1745
1746 /* Discard cases that have the same destination as the default case or
1747 whose destiniation blocks have already been removed as unreachable. */
1748 if (base_bb == NULL || base_bb == default_bb)
1749 {
1750 i++;
1751 continue;
1752 }
1753
1754 base_high = CASE_HIGH (base_case)
1755 ? CASE_HIGH (base_case)
1756 : CASE_LOW (base_case);
1757 next_index = i + 1;
1758
1759 /* Try to merge case labels. Break out when we reach the end
1760 of the label vector or when we cannot merge the next case
1761 label with the current one. */
1762 while (next_index < old_size)
1763 {
1764 tree merge_case = gimple_switch_label (stmt, next_index);
1765 basic_block merge_bb = label_to_block (cfun, CASE_LABEL (merge_case));
1766 wide_int bhp1 = wi::to_wide (base_high) + 1;
1767
1768 /* Merge the cases if they jump to the same place,
1769 and their ranges are consecutive. */
1770 if (merge_bb == base_bb
1771 && wi::to_wide (CASE_LOW (merge_case)) == bhp1)
1772 {
1773 base_high = CASE_HIGH (merge_case) ?
1774 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1775 CASE_HIGH (base_case) = base_high;
1776 next_index++;
1777 }
1778 else
1779 break;
1780 }
1781
1782 /* Discard cases that have an unreachable destination block. */
1783 if (EDGE_COUNT (base_bb->succs) == 0
1784 && gimple_seq_unreachable_p (bb_seq (base_bb))
1785 /* Don't optimize this if __builtin_unreachable () is the
1786 implicitly added one by the C++ FE too early, before
1787 -Wreturn-type can be diagnosed. We'll optimize it later
1788 during switchconv pass or any other cfg cleanup. */
1789 && (gimple_in_ssa_p (cfun)
1790 || (LOCATION_LOCUS (gimple_location (last_stmt (base_bb)))
1791 != BUILTINS_LOCATION)))
1792 {
1793 edge base_edge = find_edge (gimple_bb (stmt), base_bb);
1794 if (base_edge != NULL)
1795 remove_edge_and_dominated_blocks (base_edge);
1796 i = next_index;
1797 continue;
1798 }
1799
1800 if (new_size < i)
1801 gimple_switch_set_label (stmt, new_size,
1802 gimple_switch_label (stmt, i));
1803 i = next_index;
1804 new_size++;
1805 }
1806
1807 gcc_assert (new_size <= old_size);
1808
1809 if (new_size < old_size)
1810 gimple_switch_set_num_labels (stmt, new_size);
1811
1812 return new_size < old_size;
1813 }
1814
1815 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1816 and scan the sorted vector of cases. Combine the ones jumping to the
1817 same label. */
1818
1819 bool
1820 group_case_labels (void)
1821 {
1822 basic_block bb;
1823 bool changed = false;
1824
1825 FOR_EACH_BB_FN (bb, cfun)
1826 {
1827 gimple *stmt = last_stmt (bb);
1828 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1829 changed |= group_case_labels_stmt (as_a <gswitch *> (stmt));
1830 }
1831
1832 return changed;
1833 }
1834
1835 /* Checks whether we can merge block B into block A. */
1836
1837 static bool
1838 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1839 {
1840 gimple *stmt;
1841
1842 if (!single_succ_p (a))
1843 return false;
1844
1845 if (single_succ_edge (a)->flags & EDGE_COMPLEX)
1846 return false;
1847
1848 if (single_succ (a) != b)
1849 return false;
1850
1851 if (!single_pred_p (b))
1852 return false;
1853
1854 if (a == ENTRY_BLOCK_PTR_FOR_FN (cfun)
1855 || b == EXIT_BLOCK_PTR_FOR_FN (cfun))
1856 return false;
1857
1858 /* If A ends by a statement causing exceptions or something similar, we
1859 cannot merge the blocks. */
1860 stmt = last_stmt (a);
1861 if (stmt && stmt_ends_bb_p (stmt))
1862 return false;
1863
1864 /* Do not allow a block with only a non-local label to be merged. */
1865 if (stmt)
1866 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
1867 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
1868 return false;
1869
1870 /* Examine the labels at the beginning of B. */
1871 for (gimple_stmt_iterator gsi = gsi_start_bb (b); !gsi_end_p (gsi);
1872 gsi_next (&gsi))
1873 {
1874 tree lab;
1875 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
1876 if (!label_stmt)
1877 break;
1878 lab = gimple_label_label (label_stmt);
1879
1880 /* Do not remove user forced labels or for -O0 any user labels. */
1881 if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
1882 return false;
1883 }
1884
1885 /* Protect simple loop latches. We only want to avoid merging
1886 the latch with the loop header or with a block in another
1887 loop in this case. */
1888 if (current_loops
1889 && b->loop_father->latch == b
1890 && loops_state_satisfies_p (LOOPS_HAVE_SIMPLE_LATCHES)
1891 && (b->loop_father->header == a
1892 || b->loop_father != a->loop_father))
1893 return false;
1894
1895 /* It must be possible to eliminate all phi nodes in B. If ssa form
1896 is not up-to-date and a name-mapping is registered, we cannot eliminate
1897 any phis. Symbols marked for renaming are never a problem though. */
1898 for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);
1899 gsi_next (&gsi))
1900 {
1901 gphi *phi = gsi.phi ();
1902 /* Technically only new names matter. */
1903 if (name_registered_for_update_p (PHI_RESULT (phi)))
1904 return false;
1905 }
1906
1907 /* When not optimizing, don't merge if we'd lose goto_locus. */
1908 if (!optimize
1909 && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
1910 {
1911 location_t goto_locus = single_succ_edge (a)->goto_locus;
1912 gimple_stmt_iterator prev, next;
1913 prev = gsi_last_nondebug_bb (a);
1914 next = gsi_after_labels (b);
1915 if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
1916 gsi_next_nondebug (&next);
1917 if ((gsi_end_p (prev)
1918 || gimple_location (gsi_stmt (prev)) != goto_locus)
1919 && (gsi_end_p (next)
1920 || gimple_location (gsi_stmt (next)) != goto_locus))
1921 return false;
1922 }
1923
1924 return true;
1925 }
1926
1927 /* Replaces all uses of NAME by VAL. */
1928
1929 void
1930 replace_uses_by (tree name, tree val)
1931 {
1932 imm_use_iterator imm_iter;
1933 use_operand_p use;
1934 gimple *stmt;
1935 edge e;
1936
1937 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1938 {
1939 /* Mark the block if we change the last stmt in it. */
1940 if (cfgcleanup_altered_bbs
1941 && stmt_ends_bb_p (stmt))
1942 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1943
1944 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1945 {
1946 replace_exp (use, val);
1947
1948 if (gimple_code (stmt) == GIMPLE_PHI)
1949 {
1950 e = gimple_phi_arg_edge (as_a <gphi *> (stmt),
1951 PHI_ARG_INDEX_FROM_USE (use));
1952 if (e->flags & EDGE_ABNORMAL
1953 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val))
1954 {
1955 /* This can only occur for virtual operands, since
1956 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1957 would prevent replacement. */
1958 gcc_checking_assert (virtual_operand_p (name));
1959 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1960 }
1961 }
1962 }
1963
1964 if (gimple_code (stmt) != GIMPLE_PHI)
1965 {
1966 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1967 gimple *orig_stmt = stmt;
1968 size_t i;
1969
1970 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
1971 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
1972 only change sth from non-invariant to invariant, and only
1973 when propagating constants. */
1974 if (is_gimple_min_invariant (val))
1975 for (i = 0; i < gimple_num_ops (stmt); i++)
1976 {
1977 tree op = gimple_op (stmt, i);
1978 /* Operands may be empty here. For example, the labels
1979 of a GIMPLE_COND are nulled out following the creation
1980 of the corresponding CFG edges. */
1981 if (op && TREE_CODE (op) == ADDR_EXPR)
1982 recompute_tree_invariant_for_addr_expr (op);
1983 }
1984
1985 if (fold_stmt (&gsi))
1986 stmt = gsi_stmt (gsi);
1987
1988 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
1989 gimple_purge_dead_eh_edges (gimple_bb (stmt));
1990
1991 update_stmt (stmt);
1992 }
1993 }
1994
1995 gcc_checking_assert (has_zero_uses (name));
1996
1997 /* Also update the trees stored in loop structures. */
1998 if (current_loops)
1999 {
2000 class loop *loop;
2001
2002 FOR_EACH_LOOP (loop, 0)
2003 {
2004 substitute_in_loop_info (loop, name, val);
2005 }
2006 }
2007 }
2008
2009 /* Merge block B into block A. */
2010
2011 static void
2012 gimple_merge_blocks (basic_block a, basic_block b)
2013 {
2014 gimple_stmt_iterator last, gsi;
2015 gphi_iterator psi;
2016
2017 if (dump_file)
2018 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
2019
2020 /* Remove all single-valued PHI nodes from block B of the form
2021 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
2022 gsi = gsi_last_bb (a);
2023 for (psi = gsi_start_phis (b); !gsi_end_p (psi); )
2024 {
2025 gimple *phi = gsi_stmt (psi);
2026 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
2027 gimple *copy;
2028 bool may_replace_uses = (virtual_operand_p (def)
2029 || may_propagate_copy (def, use));
2030
2031 /* In case we maintain loop closed ssa form, do not propagate arguments
2032 of loop exit phi nodes. */
2033 if (current_loops
2034 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
2035 && !virtual_operand_p (def)
2036 && TREE_CODE (use) == SSA_NAME
2037 && a->loop_father != b->loop_father)
2038 may_replace_uses = false;
2039
2040 if (!may_replace_uses)
2041 {
2042 gcc_assert (!virtual_operand_p (def));
2043
2044 /* Note that just emitting the copies is fine -- there is no problem
2045 with ordering of phi nodes. This is because A is the single
2046 predecessor of B, therefore results of the phi nodes cannot
2047 appear as arguments of the phi nodes. */
2048 copy = gimple_build_assign (def, use);
2049 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
2050 remove_phi_node (&psi, false);
2051 }
2052 else
2053 {
2054 /* If we deal with a PHI for virtual operands, we can simply
2055 propagate these without fussing with folding or updating
2056 the stmt. */
2057 if (virtual_operand_p (def))
2058 {
2059 imm_use_iterator iter;
2060 use_operand_p use_p;
2061 gimple *stmt;
2062
2063 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
2064 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
2065 SET_USE (use_p, use);
2066
2067 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
2068 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
2069 }
2070 else
2071 replace_uses_by (def, use);
2072
2073 remove_phi_node (&psi, true);
2074 }
2075 }
2076
2077 /* Ensure that B follows A. */
2078 move_block_after (b, a);
2079
2080 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
2081 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
2082
2083 /* Remove labels from B and set gimple_bb to A for other statements. */
2084 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
2085 {
2086 gimple *stmt = gsi_stmt (gsi);
2087 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2088 {
2089 tree label = gimple_label_label (label_stmt);
2090 int lp_nr;
2091
2092 gsi_remove (&gsi, false);
2093
2094 /* Now that we can thread computed gotos, we might have
2095 a situation where we have a forced label in block B
2096 However, the label at the start of block B might still be
2097 used in other ways (think about the runtime checking for
2098 Fortran assigned gotos). So we cannot just delete the
2099 label. Instead we move the label to the start of block A. */
2100 if (FORCED_LABEL (label))
2101 {
2102 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
2103 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
2104 }
2105 /* Other user labels keep around in a form of a debug stmt. */
2106 else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_BIND_STMTS)
2107 {
2108 gimple *dbg = gimple_build_debug_bind (label,
2109 integer_zero_node,
2110 stmt);
2111 gimple_debug_bind_reset_value (dbg);
2112 gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
2113 }
2114
2115 lp_nr = EH_LANDING_PAD_NR (label);
2116 if (lp_nr)
2117 {
2118 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
2119 lp->post_landing_pad = NULL;
2120 }
2121 }
2122 else
2123 {
2124 gimple_set_bb (stmt, a);
2125 gsi_next (&gsi);
2126 }
2127 }
2128
2129 /* When merging two BBs, if their counts are different, the larger count
2130 is selected as the new bb count. This is to handle inconsistent
2131 profiles. */
2132 if (a->loop_father == b->loop_father)
2133 {
2134 a->count = a->count.merge (b->count);
2135 }
2136
2137 /* Merge the sequences. */
2138 last = gsi_last_bb (a);
2139 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
2140 set_bb_seq (b, NULL);
2141
2142 if (cfgcleanup_altered_bbs)
2143 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
2144 }
2145
2146
2147 /* Return the one of two successors of BB that is not reachable by a
2148 complex edge, if there is one. Else, return BB. We use
2149 this in optimizations that use post-dominators for their heuristics,
2150 to catch the cases in C++ where function calls are involved. */
2151
2152 basic_block
2153 single_noncomplex_succ (basic_block bb)
2154 {
2155 edge e0, e1;
2156 if (EDGE_COUNT (bb->succs) != 2)
2157 return bb;
2158
2159 e0 = EDGE_SUCC (bb, 0);
2160 e1 = EDGE_SUCC (bb, 1);
2161 if (e0->flags & EDGE_COMPLEX)
2162 return e1->dest;
2163 if (e1->flags & EDGE_COMPLEX)
2164 return e0->dest;
2165
2166 return bb;
2167 }
2168
2169 /* T is CALL_EXPR. Set current_function_calls_* flags. */
2170
2171 void
2172 notice_special_calls (gcall *call)
2173 {
2174 int flags = gimple_call_flags (call);
2175
2176 if (flags & ECF_MAY_BE_ALLOCA)
2177 cfun->calls_alloca = true;
2178 if (flags & ECF_RETURNS_TWICE)
2179 cfun->calls_setjmp = true;
2180 }
2181
2182
2183 /* Clear flags set by notice_special_calls. Used by dead code removal
2184 to update the flags. */
2185
2186 void
2187 clear_special_calls (void)
2188 {
2189 cfun->calls_alloca = false;
2190 cfun->calls_setjmp = false;
2191 }
2192
2193 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
2194
2195 static void
2196 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
2197 {
2198 /* Since this block is no longer reachable, we can just delete all
2199 of its PHI nodes. */
2200 remove_phi_nodes (bb);
2201
2202 /* Remove edges to BB's successors. */
2203 while (EDGE_COUNT (bb->succs) > 0)
2204 remove_edge (EDGE_SUCC (bb, 0));
2205 }
2206
2207
2208 /* Remove statements of basic block BB. */
2209
2210 static void
2211 remove_bb (basic_block bb)
2212 {
2213 gimple_stmt_iterator i;
2214
2215 if (dump_file)
2216 {
2217 fprintf (dump_file, "Removing basic block %d\n", bb->index);
2218 if (dump_flags & TDF_DETAILS)
2219 {
2220 dump_bb (dump_file, bb, 0, TDF_BLOCKS);
2221 fprintf (dump_file, "\n");
2222 }
2223 }
2224
2225 if (current_loops)
2226 {
2227 class loop *loop = bb->loop_father;
2228
2229 /* If a loop gets removed, clean up the information associated
2230 with it. */
2231 if (loop->latch == bb
2232 || loop->header == bb)
2233 free_numbers_of_iterations_estimates (loop);
2234 }
2235
2236 /* Remove all the instructions in the block. */
2237 if (bb_seq (bb) != NULL)
2238 {
2239 /* Walk backwards so as to get a chance to substitute all
2240 released DEFs into debug stmts. See
2241 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
2242 details. */
2243 for (i = gsi_last_bb (bb); !gsi_end_p (i);)
2244 {
2245 gimple *stmt = gsi_stmt (i);
2246 glabel *label_stmt = dyn_cast <glabel *> (stmt);
2247 if (label_stmt
2248 && (FORCED_LABEL (gimple_label_label (label_stmt))
2249 || DECL_NONLOCAL (gimple_label_label (label_stmt))))
2250 {
2251 basic_block new_bb;
2252 gimple_stmt_iterator new_gsi;
2253
2254 /* A non-reachable non-local label may still be referenced.
2255 But it no longer needs to carry the extra semantics of
2256 non-locality. */
2257 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
2258 {
2259 DECL_NONLOCAL (gimple_label_label (label_stmt)) = 0;
2260 FORCED_LABEL (gimple_label_label (label_stmt)) = 1;
2261 }
2262
2263 new_bb = bb->prev_bb;
2264 /* Don't move any labels into ENTRY block. */
2265 if (new_bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
2266 {
2267 new_bb = single_succ (new_bb);
2268 gcc_assert (new_bb != bb);
2269 }
2270 new_gsi = gsi_after_labels (new_bb);
2271 gsi_remove (&i, false);
2272 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
2273 }
2274 else
2275 {
2276 /* Release SSA definitions. */
2277 release_defs (stmt);
2278 gsi_remove (&i, true);
2279 }
2280
2281 if (gsi_end_p (i))
2282 i = gsi_last_bb (bb);
2283 else
2284 gsi_prev (&i);
2285 }
2286 }
2287
2288 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2289 bb->il.gimple.seq = NULL;
2290 bb->il.gimple.phi_nodes = NULL;
2291 }
2292
2293
2294 /* Given a basic block BB and a value VAL for use in the final statement
2295 of the block (if a GIMPLE_COND, GIMPLE_SWITCH, or computed goto), return
2296 the edge that will be taken out of the block.
2297 If VAL is NULL_TREE, then the current value of the final statement's
2298 predicate or index is used.
2299 If the value does not match a unique edge, NULL is returned. */
2300
2301 edge
2302 find_taken_edge (basic_block bb, tree val)
2303 {
2304 gimple *stmt;
2305
2306 stmt = last_stmt (bb);
2307
2308 /* Handle ENTRY and EXIT. */
2309 if (!stmt)
2310 return NULL;
2311
2312 if (gimple_code (stmt) == GIMPLE_COND)
2313 return find_taken_edge_cond_expr (as_a <gcond *> (stmt), val);
2314
2315 if (gimple_code (stmt) == GIMPLE_SWITCH)
2316 return find_taken_edge_switch_expr (as_a <gswitch *> (stmt), val);
2317
2318 if (computed_goto_p (stmt))
2319 {
2320 /* Only optimize if the argument is a label, if the argument is
2321 not a label then we cannot construct a proper CFG.
2322
2323 It may be the case that we only need to allow the LABEL_REF to
2324 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2325 appear inside a LABEL_EXPR just to be safe. */
2326 if (val
2327 && (TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
2328 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
2329 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
2330 }
2331
2332 /* Otherwise we only know the taken successor edge if it's unique. */
2333 return single_succ_p (bb) ? single_succ_edge (bb) : NULL;
2334 }
2335
2336 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2337 statement, determine which of the outgoing edges will be taken out of the
2338 block. Return NULL if either edge may be taken. */
2339
2340 static edge
2341 find_taken_edge_computed_goto (basic_block bb, tree val)
2342 {
2343 basic_block dest;
2344 edge e = NULL;
2345
2346 dest = label_to_block (cfun, val);
2347 if (dest)
2348 e = find_edge (bb, dest);
2349
2350 /* It's possible for find_edge to return NULL here on invalid code
2351 that abuses the labels-as-values extension (e.g. code that attempts to
2352 jump *between* functions via stored labels-as-values; PR 84136).
2353 If so, then we simply return that NULL for the edge.
2354 We don't currently have a way of detecting such invalid code, so we
2355 can't assert that it was the case when a NULL edge occurs here. */
2356
2357 return e;
2358 }
2359
2360 /* Given COND_STMT and a constant value VAL for use as the predicate,
2361 determine which of the two edges will be taken out of
2362 the statement's block. Return NULL if either edge may be taken.
2363 If VAL is NULL_TREE, then the current value of COND_STMT's predicate
2364 is used. */
2365
2366 static edge
2367 find_taken_edge_cond_expr (const gcond *cond_stmt, tree val)
2368 {
2369 edge true_edge, false_edge;
2370
2371 if (val == NULL_TREE)
2372 {
2373 /* Use the current value of the predicate. */
2374 if (gimple_cond_true_p (cond_stmt))
2375 val = integer_one_node;
2376 else if (gimple_cond_false_p (cond_stmt))
2377 val = integer_zero_node;
2378 else
2379 return NULL;
2380 }
2381 else if (TREE_CODE (val) != INTEGER_CST)
2382 return NULL;
2383
2384 extract_true_false_edges_from_block (gimple_bb (cond_stmt),
2385 &true_edge, &false_edge);
2386
2387 return (integer_zerop (val) ? false_edge : true_edge);
2388 }
2389
2390 /* Given SWITCH_STMT and an INTEGER_CST VAL for use as the index, determine
2391 which edge will be taken out of the statement's block. Return NULL if any
2392 edge may be taken.
2393 If VAL is NULL_TREE, then the current value of SWITCH_STMT's index
2394 is used. */
2395
2396 edge
2397 find_taken_edge_switch_expr (const gswitch *switch_stmt, tree val)
2398 {
2399 basic_block dest_bb;
2400 edge e;
2401 tree taken_case;
2402
2403 if (gimple_switch_num_labels (switch_stmt) == 1)
2404 taken_case = gimple_switch_default_label (switch_stmt);
2405 else
2406 {
2407 if (val == NULL_TREE)
2408 val = gimple_switch_index (switch_stmt);
2409 if (TREE_CODE (val) != INTEGER_CST)
2410 return NULL;
2411 else
2412 taken_case = find_case_label_for_value (switch_stmt, val);
2413 }
2414 dest_bb = label_to_block (cfun, CASE_LABEL (taken_case));
2415
2416 e = find_edge (gimple_bb (switch_stmt), dest_bb);
2417 gcc_assert (e);
2418 return e;
2419 }
2420
2421
2422 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2423 We can make optimal use here of the fact that the case labels are
2424 sorted: We can do a binary search for a case matching VAL. */
2425
2426 tree
2427 find_case_label_for_value (const gswitch *switch_stmt, tree val)
2428 {
2429 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2430 tree default_case = gimple_switch_default_label (switch_stmt);
2431
2432 for (low = 0, high = n; high - low > 1; )
2433 {
2434 size_t i = (high + low) / 2;
2435 tree t = gimple_switch_label (switch_stmt, i);
2436 int cmp;
2437
2438 /* Cache the result of comparing CASE_LOW and val. */
2439 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2440
2441 if (cmp > 0)
2442 high = i;
2443 else
2444 low = i;
2445
2446 if (CASE_HIGH (t) == NULL)
2447 {
2448 /* A singe-valued case label. */
2449 if (cmp == 0)
2450 return t;
2451 }
2452 else
2453 {
2454 /* A case range. We can only handle integer ranges. */
2455 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2456 return t;
2457 }
2458 }
2459
2460 return default_case;
2461 }
2462
2463
2464 /* Dump a basic block on stderr. */
2465
2466 void
2467 gimple_debug_bb (basic_block bb)
2468 {
2469 dump_bb (stderr, bb, 0, TDF_VOPS|TDF_MEMSYMS|TDF_BLOCKS);
2470 }
2471
2472
2473 /* Dump basic block with index N on stderr. */
2474
2475 basic_block
2476 gimple_debug_bb_n (int n)
2477 {
2478 gimple_debug_bb (BASIC_BLOCK_FOR_FN (cfun, n));
2479 return BASIC_BLOCK_FOR_FN (cfun, n);
2480 }
2481
2482
2483 /* Dump the CFG on stderr.
2484
2485 FLAGS are the same used by the tree dumping functions
2486 (see TDF_* in dumpfile.h). */
2487
2488 void
2489 gimple_debug_cfg (dump_flags_t flags)
2490 {
2491 gimple_dump_cfg (stderr, flags);
2492 }
2493
2494
2495 /* Dump the program showing basic block boundaries on the given FILE.
2496
2497 FLAGS are the same used by the tree dumping functions (see TDF_* in
2498 tree.h). */
2499
2500 void
2501 gimple_dump_cfg (FILE *file, dump_flags_t flags)
2502 {
2503 if (flags & TDF_DETAILS)
2504 {
2505 dump_function_header (file, current_function_decl, flags);
2506 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2507 n_basic_blocks_for_fn (cfun), n_edges_for_fn (cfun),
2508 last_basic_block_for_fn (cfun));
2509
2510 brief_dump_cfg (file, flags);
2511 fprintf (file, "\n");
2512 }
2513
2514 if (flags & TDF_STATS)
2515 dump_cfg_stats (file);
2516
2517 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2518 }
2519
2520
2521 /* Dump CFG statistics on FILE. */
2522
2523 void
2524 dump_cfg_stats (FILE *file)
2525 {
2526 static long max_num_merged_labels = 0;
2527 unsigned long size, total = 0;
2528 long num_edges;
2529 basic_block bb;
2530 const char * const fmt_str = "%-30s%-13s%12s\n";
2531 const char * const fmt_str_1 = "%-30s%13d" PRsa (11) "\n";
2532 const char * const fmt_str_2 = "%-30s%13ld" PRsa (11) "\n";
2533 const char * const fmt_str_3 = "%-43s" PRsa (11) "\n";
2534 const char *funcname = current_function_name ();
2535
2536 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2537
2538 fprintf (file, "---------------------------------------------------------\n");
2539 fprintf (file, fmt_str, "", " Number of ", "Memory");
2540 fprintf (file, fmt_str, "", " instances ", "used ");
2541 fprintf (file, "---------------------------------------------------------\n");
2542
2543 size = n_basic_blocks_for_fn (cfun) * sizeof (struct basic_block_def);
2544 total += size;
2545 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks_for_fn (cfun),
2546 SIZE_AMOUNT (size));
2547
2548 num_edges = 0;
2549 FOR_EACH_BB_FN (bb, cfun)
2550 num_edges += EDGE_COUNT (bb->succs);
2551 size = num_edges * sizeof (class edge_def);
2552 total += size;
2553 fprintf (file, fmt_str_2, "Edges", num_edges, SIZE_AMOUNT (size));
2554
2555 fprintf (file, "---------------------------------------------------------\n");
2556 fprintf (file, fmt_str_3, "Total memory used by CFG data",
2557 SIZE_AMOUNT (total));
2558 fprintf (file, "---------------------------------------------------------\n");
2559 fprintf (file, "\n");
2560
2561 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2562 max_num_merged_labels = cfg_stats.num_merged_labels;
2563
2564 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2565 cfg_stats.num_merged_labels, max_num_merged_labels);
2566
2567 fprintf (file, "\n");
2568 }
2569
2570
2571 /* Dump CFG statistics on stderr. Keep extern so that it's always
2572 linked in the final executable. */
2573
2574 DEBUG_FUNCTION void
2575 debug_cfg_stats (void)
2576 {
2577 dump_cfg_stats (stderr);
2578 }
2579
2580 /*---------------------------------------------------------------------------
2581 Miscellaneous helpers
2582 ---------------------------------------------------------------------------*/
2583
2584 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2585 flow. Transfers of control flow associated with EH are excluded. */
2586
2587 static bool
2588 call_can_make_abnormal_goto (gimple *t)
2589 {
2590 /* If the function has no non-local labels, then a call cannot make an
2591 abnormal transfer of control. */
2592 if (!cfun->has_nonlocal_label
2593 && !cfun->calls_setjmp)
2594 return false;
2595
2596 /* Likewise if the call has no side effects. */
2597 if (!gimple_has_side_effects (t))
2598 return false;
2599
2600 /* Likewise if the called function is leaf. */
2601 if (gimple_call_flags (t) & ECF_LEAF)
2602 return false;
2603
2604 return true;
2605 }
2606
2607
2608 /* Return true if T can make an abnormal transfer of control flow.
2609 Transfers of control flow associated with EH are excluded. */
2610
2611 bool
2612 stmt_can_make_abnormal_goto (gimple *t)
2613 {
2614 if (computed_goto_p (t))
2615 return true;
2616 if (is_gimple_call (t))
2617 return call_can_make_abnormal_goto (t);
2618 return false;
2619 }
2620
2621
2622 /* Return true if T represents a stmt that always transfers control. */
2623
2624 bool
2625 is_ctrl_stmt (gimple *t)
2626 {
2627 switch (gimple_code (t))
2628 {
2629 case GIMPLE_COND:
2630 case GIMPLE_SWITCH:
2631 case GIMPLE_GOTO:
2632 case GIMPLE_RETURN:
2633 case GIMPLE_RESX:
2634 return true;
2635 default:
2636 return false;
2637 }
2638 }
2639
2640
2641 /* Return true if T is a statement that may alter the flow of control
2642 (e.g., a call to a non-returning function). */
2643
2644 bool
2645 is_ctrl_altering_stmt (gimple *t)
2646 {
2647 gcc_assert (t);
2648
2649 switch (gimple_code (t))
2650 {
2651 case GIMPLE_CALL:
2652 /* Per stmt call flag indicates whether the call could alter
2653 controlflow. */
2654 if (gimple_call_ctrl_altering_p (t))
2655 return true;
2656 break;
2657
2658 case GIMPLE_EH_DISPATCH:
2659 /* EH_DISPATCH branches to the individual catch handlers at
2660 this level of a try or allowed-exceptions region. It can
2661 fallthru to the next statement as well. */
2662 return true;
2663
2664 case GIMPLE_ASM:
2665 if (gimple_asm_nlabels (as_a <gasm *> (t)) > 0)
2666 return true;
2667 break;
2668
2669 CASE_GIMPLE_OMP:
2670 /* OpenMP directives alter control flow. */
2671 return true;
2672
2673 case GIMPLE_TRANSACTION:
2674 /* A transaction start alters control flow. */
2675 return true;
2676
2677 default:
2678 break;
2679 }
2680
2681 /* If a statement can throw, it alters control flow. */
2682 return stmt_can_throw_internal (cfun, t);
2683 }
2684
2685
2686 /* Return true if T is a simple local goto. */
2687
2688 bool
2689 simple_goto_p (gimple *t)
2690 {
2691 return (gimple_code (t) == GIMPLE_GOTO
2692 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2693 }
2694
2695
2696 /* Return true if STMT should start a new basic block. PREV_STMT is
2697 the statement preceding STMT. It is used when STMT is a label or a
2698 case label. Labels should only start a new basic block if their
2699 previous statement wasn't a label. Otherwise, sequence of labels
2700 would generate unnecessary basic blocks that only contain a single
2701 label. */
2702
2703 static inline bool
2704 stmt_starts_bb_p (gimple *stmt, gimple *prev_stmt)
2705 {
2706 if (stmt == NULL)
2707 return false;
2708
2709 /* PREV_STMT is only set to a debug stmt if the debug stmt is before
2710 any nondebug stmts in the block. We don't want to start another
2711 block in this case: the debug stmt will already have started the
2712 one STMT would start if we weren't outputting debug stmts. */
2713 if (prev_stmt && is_gimple_debug (prev_stmt))
2714 return false;
2715
2716 /* Labels start a new basic block only if the preceding statement
2717 wasn't a label of the same type. This prevents the creation of
2718 consecutive blocks that have nothing but a single label. */
2719 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2720 {
2721 /* Nonlocal and computed GOTO targets always start a new block. */
2722 if (DECL_NONLOCAL (gimple_label_label (label_stmt))
2723 || FORCED_LABEL (gimple_label_label (label_stmt)))
2724 return true;
2725
2726 if (glabel *plabel = safe_dyn_cast <glabel *> (prev_stmt))
2727 {
2728 if (DECL_NONLOCAL (gimple_label_label (plabel))
2729 || !DECL_ARTIFICIAL (gimple_label_label (plabel)))
2730 return true;
2731
2732 cfg_stats.num_merged_labels++;
2733 return false;
2734 }
2735 else
2736 return true;
2737 }
2738 else if (gimple_code (stmt) == GIMPLE_CALL)
2739 {
2740 if (gimple_call_flags (stmt) & ECF_RETURNS_TWICE)
2741 /* setjmp acts similar to a nonlocal GOTO target and thus should
2742 start a new block. */
2743 return true;
2744 if (gimple_call_internal_p (stmt, IFN_PHI)
2745 && prev_stmt
2746 && gimple_code (prev_stmt) != GIMPLE_LABEL
2747 && (gimple_code (prev_stmt) != GIMPLE_CALL
2748 || ! gimple_call_internal_p (prev_stmt, IFN_PHI)))
2749 /* PHI nodes start a new block unless preceeded by a label
2750 or another PHI. */
2751 return true;
2752 }
2753
2754 return false;
2755 }
2756
2757
2758 /* Return true if T should end a basic block. */
2759
2760 bool
2761 stmt_ends_bb_p (gimple *t)
2762 {
2763 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2764 }
2765
2766 /* Remove block annotations and other data structures. */
2767
2768 void
2769 delete_tree_cfg_annotations (struct function *fn)
2770 {
2771 vec_free (label_to_block_map_for_fn (fn));
2772 }
2773
2774 /* Return the virtual phi in BB. */
2775
2776 gphi *
2777 get_virtual_phi (basic_block bb)
2778 {
2779 for (gphi_iterator gsi = gsi_start_phis (bb);
2780 !gsi_end_p (gsi);
2781 gsi_next (&gsi))
2782 {
2783 gphi *phi = gsi.phi ();
2784
2785 if (virtual_operand_p (PHI_RESULT (phi)))
2786 return phi;
2787 }
2788
2789 return NULL;
2790 }
2791
2792 /* Return the first statement in basic block BB. */
2793
2794 gimple *
2795 first_stmt (basic_block bb)
2796 {
2797 gimple_stmt_iterator i = gsi_start_bb (bb);
2798 gimple *stmt = NULL;
2799
2800 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2801 {
2802 gsi_next (&i);
2803 stmt = NULL;
2804 }
2805 return stmt;
2806 }
2807
2808 /* Return the first non-label statement in basic block BB. */
2809
2810 static gimple *
2811 first_non_label_stmt (basic_block bb)
2812 {
2813 gimple_stmt_iterator i = gsi_start_bb (bb);
2814 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2815 gsi_next (&i);
2816 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2817 }
2818
2819 /* Return the last statement in basic block BB. */
2820
2821 gimple *
2822 last_stmt (basic_block bb)
2823 {
2824 gimple_stmt_iterator i = gsi_last_bb (bb);
2825 gimple *stmt = NULL;
2826
2827 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2828 {
2829 gsi_prev (&i);
2830 stmt = NULL;
2831 }
2832 return stmt;
2833 }
2834
2835 /* Return the last statement of an otherwise empty block. Return NULL
2836 if the block is totally empty, or if it contains more than one
2837 statement. */
2838
2839 gimple *
2840 last_and_only_stmt (basic_block bb)
2841 {
2842 gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2843 gimple *last, *prev;
2844
2845 if (gsi_end_p (i))
2846 return NULL;
2847
2848 last = gsi_stmt (i);
2849 gsi_prev_nondebug (&i);
2850 if (gsi_end_p (i))
2851 return last;
2852
2853 /* Empty statements should no longer appear in the instruction stream.
2854 Everything that might have appeared before should be deleted by
2855 remove_useless_stmts, and the optimizers should just gsi_remove
2856 instead of smashing with build_empty_stmt.
2857
2858 Thus the only thing that should appear here in a block containing
2859 one executable statement is a label. */
2860 prev = gsi_stmt (i);
2861 if (gimple_code (prev) == GIMPLE_LABEL)
2862 return last;
2863 else
2864 return NULL;
2865 }
2866
2867 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2868
2869 static void
2870 reinstall_phi_args (edge new_edge, edge old_edge)
2871 {
2872 edge_var_map *vm;
2873 int i;
2874 gphi_iterator phis;
2875
2876 vec<edge_var_map> *v = redirect_edge_var_map_vector (old_edge);
2877 if (!v)
2878 return;
2879
2880 for (i = 0, phis = gsi_start_phis (new_edge->dest);
2881 v->iterate (i, &vm) && !gsi_end_p (phis);
2882 i++, gsi_next (&phis))
2883 {
2884 gphi *phi = phis.phi ();
2885 tree result = redirect_edge_var_map_result (vm);
2886 tree arg = redirect_edge_var_map_def (vm);
2887
2888 gcc_assert (result == gimple_phi_result (phi));
2889
2890 add_phi_arg (phi, arg, new_edge, redirect_edge_var_map_location (vm));
2891 }
2892
2893 redirect_edge_var_map_clear (old_edge);
2894 }
2895
2896 /* Returns the basic block after which the new basic block created
2897 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2898 near its "logical" location. This is of most help to humans looking
2899 at debugging dumps. */
2900
2901 basic_block
2902 split_edge_bb_loc (edge edge_in)
2903 {
2904 basic_block dest = edge_in->dest;
2905 basic_block dest_prev = dest->prev_bb;
2906
2907 if (dest_prev)
2908 {
2909 edge e = find_edge (dest_prev, dest);
2910 if (e && !(e->flags & EDGE_COMPLEX))
2911 return edge_in->src;
2912 }
2913 return dest_prev;
2914 }
2915
2916 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2917 Abort on abnormal edges. */
2918
2919 static basic_block
2920 gimple_split_edge (edge edge_in)
2921 {
2922 basic_block new_bb, after_bb, dest;
2923 edge new_edge, e;
2924
2925 /* Abnormal edges cannot be split. */
2926 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2927
2928 dest = edge_in->dest;
2929
2930 after_bb = split_edge_bb_loc (edge_in);
2931
2932 new_bb = create_empty_bb (after_bb);
2933 new_bb->count = edge_in->count ();
2934
2935 e = redirect_edge_and_branch (edge_in, new_bb);
2936 gcc_assert (e == edge_in);
2937
2938 new_edge = make_single_succ_edge (new_bb, dest, EDGE_FALLTHRU);
2939 reinstall_phi_args (new_edge, e);
2940
2941 return new_bb;
2942 }
2943
2944
2945 /* Verify properties of the address expression T whose base should be
2946 TREE_ADDRESSABLE if VERIFY_ADDRESSABLE is true. */
2947
2948 static bool
2949 verify_address (tree t, bool verify_addressable)
2950 {
2951 bool old_constant;
2952 bool old_side_effects;
2953 bool new_constant;
2954 bool new_side_effects;
2955
2956 old_constant = TREE_CONSTANT (t);
2957 old_side_effects = TREE_SIDE_EFFECTS (t);
2958
2959 recompute_tree_invariant_for_addr_expr (t);
2960 new_side_effects = TREE_SIDE_EFFECTS (t);
2961 new_constant = TREE_CONSTANT (t);
2962
2963 if (old_constant != new_constant)
2964 {
2965 error ("constant not recomputed when %<ADDR_EXPR%> changed");
2966 return true;
2967 }
2968 if (old_side_effects != new_side_effects)
2969 {
2970 error ("side effects not recomputed when %<ADDR_EXPR%> changed");
2971 return true;
2972 }
2973
2974 tree base = TREE_OPERAND (t, 0);
2975 while (handled_component_p (base))
2976 base = TREE_OPERAND (base, 0);
2977
2978 if (!(VAR_P (base)
2979 || TREE_CODE (base) == PARM_DECL
2980 || TREE_CODE (base) == RESULT_DECL))
2981 return false;
2982
2983 if (DECL_GIMPLE_REG_P (base))
2984 {
2985 error ("%<DECL_GIMPLE_REG_P%> set on a variable with address taken");
2986 return true;
2987 }
2988
2989 if (verify_addressable && !TREE_ADDRESSABLE (base))
2990 {
2991 error ("address taken but %<TREE_ADDRESSABLE%> bit not set");
2992 return true;
2993 }
2994
2995 return false;
2996 }
2997
2998
2999 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
3000 Returns true if there is an error, otherwise false. */
3001
3002 static bool
3003 verify_types_in_gimple_min_lval (tree expr)
3004 {
3005 tree op;
3006
3007 if (is_gimple_id (expr))
3008 return false;
3009
3010 if (TREE_CODE (expr) != TARGET_MEM_REF
3011 && TREE_CODE (expr) != MEM_REF)
3012 {
3013 error ("invalid expression for min lvalue");
3014 return true;
3015 }
3016
3017 /* TARGET_MEM_REFs are strange beasts. */
3018 if (TREE_CODE (expr) == TARGET_MEM_REF)
3019 return false;
3020
3021 op = TREE_OPERAND (expr, 0);
3022 if (!is_gimple_val (op))
3023 {
3024 error ("invalid operand in indirect reference");
3025 debug_generic_stmt (op);
3026 return true;
3027 }
3028 /* Memory references now generally can involve a value conversion. */
3029
3030 return false;
3031 }
3032
3033 /* Verify if EXPR is a valid GIMPLE reference expression. If
3034 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
3035 if there is an error, otherwise false. */
3036
3037 static bool
3038 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
3039 {
3040 const char *code_name = get_tree_code_name (TREE_CODE (expr));
3041
3042 if (TREE_CODE (expr) == REALPART_EXPR
3043 || TREE_CODE (expr) == IMAGPART_EXPR
3044 || TREE_CODE (expr) == BIT_FIELD_REF)
3045 {
3046 tree op = TREE_OPERAND (expr, 0);
3047 if (!is_gimple_reg_type (TREE_TYPE (expr)))
3048 {
3049 error ("non-scalar %qs", code_name);
3050 return true;
3051 }
3052
3053 if (TREE_CODE (expr) == BIT_FIELD_REF)
3054 {
3055 tree t1 = TREE_OPERAND (expr, 1);
3056 tree t2 = TREE_OPERAND (expr, 2);
3057 poly_uint64 size, bitpos;
3058 if (!poly_int_tree_p (t1, &size)
3059 || !poly_int_tree_p (t2, &bitpos)
3060 || !types_compatible_p (bitsizetype, TREE_TYPE (t1))
3061 || !types_compatible_p (bitsizetype, TREE_TYPE (t2)))
3062 {
3063 error ("invalid position or size operand to %qs", code_name);
3064 return true;
3065 }
3066 if (INTEGRAL_TYPE_P (TREE_TYPE (expr))
3067 && maybe_ne (TYPE_PRECISION (TREE_TYPE (expr)), size))
3068 {
3069 error ("integral result type precision does not match "
3070 "field size of %qs", code_name);
3071 return true;
3072 }
3073 else if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
3074 && TYPE_MODE (TREE_TYPE (expr)) != BLKmode
3075 && maybe_ne (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (expr))),
3076 size))
3077 {
3078 error ("mode size of non-integral result does not "
3079 "match field size of %qs",
3080 code_name);
3081 return true;
3082 }
3083 if (INTEGRAL_TYPE_P (TREE_TYPE (op))
3084 && !type_has_mode_precision_p (TREE_TYPE (op)))
3085 {
3086 error ("%qs of non-mode-precision operand", code_name);
3087 return true;
3088 }
3089 if (!AGGREGATE_TYPE_P (TREE_TYPE (op))
3090 && maybe_gt (size + bitpos,
3091 tree_to_poly_uint64 (TYPE_SIZE (TREE_TYPE (op)))))
3092 {
3093 error ("position plus size exceeds size of referenced object in "
3094 "%qs", code_name);
3095 return true;
3096 }
3097 }
3098
3099 if ((TREE_CODE (expr) == REALPART_EXPR
3100 || TREE_CODE (expr) == IMAGPART_EXPR)
3101 && !useless_type_conversion_p (TREE_TYPE (expr),
3102 TREE_TYPE (TREE_TYPE (op))))
3103 {
3104 error ("type mismatch in %qs reference", code_name);
3105 debug_generic_stmt (TREE_TYPE (expr));
3106 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3107 return true;
3108 }
3109 expr = op;
3110 }
3111
3112 while (handled_component_p (expr))
3113 {
3114 code_name = get_tree_code_name (TREE_CODE (expr));
3115
3116 if (TREE_CODE (expr) == REALPART_EXPR
3117 || TREE_CODE (expr) == IMAGPART_EXPR
3118 || TREE_CODE (expr) == BIT_FIELD_REF)
3119 {
3120 error ("non-top-level %qs", code_name);
3121 return true;
3122 }
3123
3124 tree op = TREE_OPERAND (expr, 0);
3125
3126 if (TREE_CODE (expr) == ARRAY_REF
3127 || TREE_CODE (expr) == ARRAY_RANGE_REF)
3128 {
3129 if (!is_gimple_val (TREE_OPERAND (expr, 1))
3130 || (TREE_OPERAND (expr, 2)
3131 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3132 || (TREE_OPERAND (expr, 3)
3133 && !is_gimple_val (TREE_OPERAND (expr, 3))))
3134 {
3135 error ("invalid operands to %qs", code_name);
3136 debug_generic_stmt (expr);
3137 return true;
3138 }
3139 }
3140
3141 /* Verify if the reference array element types are compatible. */
3142 if (TREE_CODE (expr) == ARRAY_REF
3143 && !useless_type_conversion_p (TREE_TYPE (expr),
3144 TREE_TYPE (TREE_TYPE (op))))
3145 {
3146 error ("type mismatch in %qs", code_name);
3147 debug_generic_stmt (TREE_TYPE (expr));
3148 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3149 return true;
3150 }
3151 if (TREE_CODE (expr) == ARRAY_RANGE_REF
3152 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
3153 TREE_TYPE (TREE_TYPE (op))))
3154 {
3155 error ("type mismatch in %qs", code_name);
3156 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
3157 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3158 return true;
3159 }
3160
3161 if (TREE_CODE (expr) == COMPONENT_REF)
3162 {
3163 if (TREE_OPERAND (expr, 2)
3164 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3165 {
3166 error ("invalid %qs offset operator", code_name);
3167 return true;
3168 }
3169 if (!useless_type_conversion_p (TREE_TYPE (expr),
3170 TREE_TYPE (TREE_OPERAND (expr, 1))))
3171 {
3172 error ("type mismatch in %qs", code_name);
3173 debug_generic_stmt (TREE_TYPE (expr));
3174 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
3175 return true;
3176 }
3177 }
3178
3179 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3180 {
3181 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
3182 that their operand is not an SSA name or an invariant when
3183 requiring an lvalue (this usually means there is a SRA or IPA-SRA
3184 bug). Otherwise there is nothing to verify, gross mismatches at
3185 most invoke undefined behavior. */
3186 if (require_lvalue
3187 && (TREE_CODE (op) == SSA_NAME
3188 || is_gimple_min_invariant (op)))
3189 {
3190 error ("conversion of %qs on the left hand side of %qs",
3191 get_tree_code_name (TREE_CODE (op)), code_name);
3192 debug_generic_stmt (expr);
3193 return true;
3194 }
3195 else if (TREE_CODE (op) == SSA_NAME
3196 && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
3197 {
3198 error ("conversion of register to a different size in %qs",
3199 code_name);
3200 debug_generic_stmt (expr);
3201 return true;
3202 }
3203 else if (!handled_component_p (op))
3204 return false;
3205 }
3206
3207 expr = op;
3208 }
3209
3210 code_name = get_tree_code_name (TREE_CODE (expr));
3211
3212 if (TREE_CODE (expr) == MEM_REF)
3213 {
3214 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0))
3215 || (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR
3216 && verify_address (TREE_OPERAND (expr, 0), false)))
3217 {
3218 error ("invalid address operand in %qs", code_name);
3219 debug_generic_stmt (expr);
3220 return true;
3221 }
3222 if (!poly_int_tree_p (TREE_OPERAND (expr, 1))
3223 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
3224 {
3225 error ("invalid offset operand in %qs", code_name);
3226 debug_generic_stmt (expr);
3227 return true;
3228 }
3229 }
3230 else if (TREE_CODE (expr) == TARGET_MEM_REF)
3231 {
3232 if (!TMR_BASE (expr)
3233 || !is_gimple_mem_ref_addr (TMR_BASE (expr))
3234 || (TREE_CODE (TMR_BASE (expr)) == ADDR_EXPR
3235 && verify_address (TMR_BASE (expr), false)))
3236 {
3237 error ("invalid address operand in %qs", code_name);
3238 return true;
3239 }
3240 if (!TMR_OFFSET (expr)
3241 || !poly_int_tree_p (TMR_OFFSET (expr))
3242 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
3243 {
3244 error ("invalid offset operand in %qs", code_name);
3245 debug_generic_stmt (expr);
3246 return true;
3247 }
3248 }
3249 else if (TREE_CODE (expr) == INDIRECT_REF)
3250 {
3251 error ("%qs in gimple IL", code_name);
3252 debug_generic_stmt (expr);
3253 return true;
3254 }
3255
3256 return ((require_lvalue || !is_gimple_min_invariant (expr))
3257 && verify_types_in_gimple_min_lval (expr));
3258 }
3259
3260 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3261 list of pointer-to types that is trivially convertible to DEST. */
3262
3263 static bool
3264 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3265 {
3266 tree src;
3267
3268 if (!TYPE_POINTER_TO (src_obj))
3269 return true;
3270
3271 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3272 if (useless_type_conversion_p (dest, src))
3273 return true;
3274
3275 return false;
3276 }
3277
3278 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3279 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3280
3281 static bool
3282 valid_fixed_convert_types_p (tree type1, tree type2)
3283 {
3284 return (FIXED_POINT_TYPE_P (type1)
3285 && (INTEGRAL_TYPE_P (type2)
3286 || SCALAR_FLOAT_TYPE_P (type2)
3287 || FIXED_POINT_TYPE_P (type2)));
3288 }
3289
3290 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3291 is a problem, otherwise false. */
3292
3293 static bool
3294 verify_gimple_call (gcall *stmt)
3295 {
3296 tree fn = gimple_call_fn (stmt);
3297 tree fntype, fndecl;
3298 unsigned i;
3299
3300 if (gimple_call_internal_p (stmt))
3301 {
3302 if (fn)
3303 {
3304 error ("gimple call has two targets");
3305 debug_generic_stmt (fn);
3306 return true;
3307 }
3308 }
3309 else
3310 {
3311 if (!fn)
3312 {
3313 error ("gimple call has no target");
3314 return true;
3315 }
3316 }
3317
3318 if (fn && !is_gimple_call_addr (fn))
3319 {
3320 error ("invalid function in gimple call");
3321 debug_generic_stmt (fn);
3322 return true;
3323 }
3324
3325 if (fn
3326 && (!POINTER_TYPE_P (TREE_TYPE (fn))
3327 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3328 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
3329 {
3330 error ("non-function in gimple call");
3331 return true;
3332 }
3333
3334 fndecl = gimple_call_fndecl (stmt);
3335 if (fndecl
3336 && TREE_CODE (fndecl) == FUNCTION_DECL
3337 && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
3338 && !DECL_PURE_P (fndecl)
3339 && !TREE_READONLY (fndecl))
3340 {
3341 error ("invalid pure const state for function");
3342 return true;
3343 }
3344
3345 tree lhs = gimple_call_lhs (stmt);
3346 if (lhs
3347 && (!is_gimple_lvalue (lhs)
3348 || verify_types_in_gimple_reference (lhs, true)))
3349 {
3350 error ("invalid LHS in gimple call");
3351 return true;
3352 }
3353
3354 if (gimple_call_ctrl_altering_p (stmt)
3355 && gimple_call_noreturn_p (stmt)
3356 && should_remove_lhs_p (lhs))
3357 {
3358 error ("LHS in %<noreturn%> call");
3359 return true;
3360 }
3361
3362 fntype = gimple_call_fntype (stmt);
3363 if (fntype
3364 && lhs
3365 && !useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (fntype))
3366 /* ??? At least C++ misses conversions at assignments from
3367 void * call results.
3368 For now simply allow arbitrary pointer type conversions. */
3369 && !(POINTER_TYPE_P (TREE_TYPE (lhs))
3370 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3371 {
3372 error ("invalid conversion in gimple call");
3373 debug_generic_stmt (TREE_TYPE (lhs));
3374 debug_generic_stmt (TREE_TYPE (fntype));
3375 return true;
3376 }
3377
3378 if (gimple_call_chain (stmt)
3379 && !is_gimple_val (gimple_call_chain (stmt)))
3380 {
3381 error ("invalid static chain in gimple call");
3382 debug_generic_stmt (gimple_call_chain (stmt));
3383 return true;
3384 }
3385
3386 /* If there is a static chain argument, the call should either be
3387 indirect, or the decl should have DECL_STATIC_CHAIN set. */
3388 if (gimple_call_chain (stmt)
3389 && fndecl
3390 && !DECL_STATIC_CHAIN (fndecl))
3391 {
3392 error ("static chain with function that doesn%'t use one");
3393 return true;
3394 }
3395
3396 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
3397 {
3398 switch (DECL_FUNCTION_CODE (fndecl))
3399 {
3400 case BUILT_IN_UNREACHABLE:
3401 case BUILT_IN_TRAP:
3402 if (gimple_call_num_args (stmt) > 0)
3403 {
3404 /* Built-in unreachable with parameters might not be caught by
3405 undefined behavior sanitizer. Front-ends do check users do not
3406 call them that way but we also produce calls to
3407 __builtin_unreachable internally, for example when IPA figures
3408 out a call cannot happen in a legal program. In such cases,
3409 we must make sure arguments are stripped off. */
3410 error ("%<__builtin_unreachable%> or %<__builtin_trap%> call "
3411 "with arguments");
3412 return true;
3413 }
3414 break;
3415 default:
3416 break;
3417 }
3418 }
3419
3420 /* ??? The C frontend passes unpromoted arguments in case it
3421 didn't see a function declaration before the call. So for now
3422 leave the call arguments mostly unverified. Once we gimplify
3423 unit-at-a-time we have a chance to fix this. */
3424
3425 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3426 {
3427 tree arg = gimple_call_arg (stmt, i);
3428 if ((is_gimple_reg_type (TREE_TYPE (arg))
3429 && !is_gimple_val (arg))
3430 || (!is_gimple_reg_type (TREE_TYPE (arg))
3431 && !is_gimple_lvalue (arg)))
3432 {
3433 error ("invalid argument to gimple call");
3434 debug_generic_expr (arg);
3435 return true;
3436 }
3437 }
3438
3439 return false;
3440 }
3441
3442 /* Verifies the gimple comparison with the result type TYPE and
3443 the operands OP0 and OP1, comparison code is CODE. */
3444
3445 static bool
3446 verify_gimple_comparison (tree type, tree op0, tree op1, enum tree_code code)
3447 {
3448 tree op0_type = TREE_TYPE (op0);
3449 tree op1_type = TREE_TYPE (op1);
3450
3451 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3452 {
3453 error ("invalid operands in gimple comparison");
3454 return true;
3455 }
3456
3457 /* For comparisons we do not have the operations type as the
3458 effective type the comparison is carried out in. Instead
3459 we require that either the first operand is trivially
3460 convertible into the second, or the other way around.
3461 Because we special-case pointers to void we allow
3462 comparisons of pointers with the same mode as well. */
3463 if (!useless_type_conversion_p (op0_type, op1_type)
3464 && !useless_type_conversion_p (op1_type, op0_type)
3465 && (!POINTER_TYPE_P (op0_type)
3466 || !POINTER_TYPE_P (op1_type)
3467 || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3468 {
3469 error ("mismatching comparison operand types");
3470 debug_generic_expr (op0_type);
3471 debug_generic_expr (op1_type);
3472 return true;
3473 }
3474
3475 /* The resulting type of a comparison may be an effective boolean type. */
3476 if (INTEGRAL_TYPE_P (type)
3477 && (TREE_CODE (type) == BOOLEAN_TYPE
3478 || TYPE_PRECISION (type) == 1))
3479 {
3480 if ((TREE_CODE (op0_type) == VECTOR_TYPE
3481 || TREE_CODE (op1_type) == VECTOR_TYPE)
3482 && code != EQ_EXPR && code != NE_EXPR
3483 && !VECTOR_BOOLEAN_TYPE_P (op0_type)
3484 && !VECTOR_INTEGER_TYPE_P (op0_type))
3485 {
3486 error ("unsupported operation or type for vector comparison"
3487 " returning a boolean");
3488 debug_generic_expr (op0_type);
3489 debug_generic_expr (op1_type);
3490 return true;
3491 }
3492 }
3493 /* Or a boolean vector type with the same element count
3494 as the comparison operand types. */
3495 else if (TREE_CODE (type) == VECTOR_TYPE
3496 && TREE_CODE (TREE_TYPE (type)) == BOOLEAN_TYPE)
3497 {
3498 if (TREE_CODE (op0_type) != VECTOR_TYPE
3499 || TREE_CODE (op1_type) != VECTOR_TYPE)
3500 {
3501 error ("non-vector operands in vector comparison");
3502 debug_generic_expr (op0_type);
3503 debug_generic_expr (op1_type);
3504 return true;
3505 }
3506
3507 if (maybe_ne (TYPE_VECTOR_SUBPARTS (type),
3508 TYPE_VECTOR_SUBPARTS (op0_type)))
3509 {
3510 error ("invalid vector comparison resulting type");
3511 debug_generic_expr (type);
3512 return true;
3513 }
3514 }
3515 else
3516 {
3517 error ("bogus comparison result type");
3518 debug_generic_expr (type);
3519 return true;
3520 }
3521
3522 return false;
3523 }
3524
3525 /* Verify a gimple assignment statement STMT with an unary rhs.
3526 Returns true if anything is wrong. */
3527
3528 static bool
3529 verify_gimple_assign_unary (gassign *stmt)
3530 {
3531 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3532 tree lhs = gimple_assign_lhs (stmt);
3533 tree lhs_type = TREE_TYPE (lhs);
3534 tree rhs1 = gimple_assign_rhs1 (stmt);
3535 tree rhs1_type = TREE_TYPE (rhs1);
3536
3537 if (!is_gimple_reg (lhs))
3538 {
3539 error ("non-register as LHS of unary operation");
3540 return true;
3541 }
3542
3543 if (!is_gimple_val (rhs1))
3544 {
3545 error ("invalid operand in unary operation");
3546 return true;
3547 }
3548
3549 const char* const code_name = get_tree_code_name (rhs_code);
3550
3551 /* First handle conversions. */
3552 switch (rhs_code)
3553 {
3554 CASE_CONVERT:
3555 {
3556 /* Allow conversions between vectors with the same number of elements,
3557 provided that the conversion is OK for the element types too. */
3558 if (VECTOR_TYPE_P (lhs_type)
3559 && VECTOR_TYPE_P (rhs1_type)
3560 && known_eq (TYPE_VECTOR_SUBPARTS (lhs_type),
3561 TYPE_VECTOR_SUBPARTS (rhs1_type)))
3562 {
3563 lhs_type = TREE_TYPE (lhs_type);
3564 rhs1_type = TREE_TYPE (rhs1_type);
3565 }
3566 else if (VECTOR_TYPE_P (lhs_type) || VECTOR_TYPE_P (rhs1_type))
3567 {
3568 error ("invalid vector types in nop conversion");
3569 debug_generic_expr (lhs_type);
3570 debug_generic_expr (rhs1_type);
3571 return true;
3572 }
3573
3574 /* Allow conversions from pointer type to integral type only if
3575 there is no sign or zero extension involved.
3576 For targets were the precision of ptrofftype doesn't match that
3577 of pointers we need to allow arbitrary conversions to ptrofftype. */
3578 if ((POINTER_TYPE_P (lhs_type)
3579 && INTEGRAL_TYPE_P (rhs1_type))
3580 || (POINTER_TYPE_P (rhs1_type)
3581 && INTEGRAL_TYPE_P (lhs_type)
3582 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3583 || ptrofftype_p (lhs_type))))
3584 return false;
3585
3586 /* Allow conversion from integral to offset type and vice versa. */
3587 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3588 && INTEGRAL_TYPE_P (rhs1_type))
3589 || (INTEGRAL_TYPE_P (lhs_type)
3590 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3591 return false;
3592
3593 /* Otherwise assert we are converting between types of the
3594 same kind. */
3595 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3596 {
3597 error ("invalid types in nop conversion");
3598 debug_generic_expr (lhs_type);
3599 debug_generic_expr (rhs1_type);
3600 return true;
3601 }
3602
3603 return false;
3604 }
3605
3606 case ADDR_SPACE_CONVERT_EXPR:
3607 {
3608 if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3609 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3610 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3611 {
3612 error ("invalid types in address space conversion");
3613 debug_generic_expr (lhs_type);
3614 debug_generic_expr (rhs1_type);
3615 return true;
3616 }
3617
3618 return false;
3619 }
3620
3621 case FIXED_CONVERT_EXPR:
3622 {
3623 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3624 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3625 {
3626 error ("invalid types in fixed-point conversion");
3627 debug_generic_expr (lhs_type);
3628 debug_generic_expr (rhs1_type);
3629 return true;
3630 }
3631
3632 return false;
3633 }
3634
3635 case FLOAT_EXPR:
3636 {
3637 if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3638 && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3639 || !VECTOR_FLOAT_TYPE_P (lhs_type)))
3640 {
3641 error ("invalid types in conversion to floating-point");
3642 debug_generic_expr (lhs_type);
3643 debug_generic_expr (rhs1_type);
3644 return true;
3645 }
3646
3647 return false;
3648 }
3649
3650 case FIX_TRUNC_EXPR:
3651 {
3652 if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3653 && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3654 || !VECTOR_FLOAT_TYPE_P (rhs1_type)))
3655 {
3656 error ("invalid types in conversion to integer");
3657 debug_generic_expr (lhs_type);
3658 debug_generic_expr (rhs1_type);
3659 return true;
3660 }
3661
3662 return false;
3663 }
3664
3665 case VEC_UNPACK_HI_EXPR:
3666 case VEC_UNPACK_LO_EXPR:
3667 case VEC_UNPACK_FLOAT_HI_EXPR:
3668 case VEC_UNPACK_FLOAT_LO_EXPR:
3669 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
3670 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
3671 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3672 || TREE_CODE (lhs_type) != VECTOR_TYPE
3673 || (!INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3674 && !SCALAR_FLOAT_TYPE_P (TREE_TYPE (lhs_type)))
3675 || (!INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3676 && !SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type)))
3677 || ((rhs_code == VEC_UNPACK_HI_EXPR
3678 || rhs_code == VEC_UNPACK_LO_EXPR)
3679 && (INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3680 != INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3681 || ((rhs_code == VEC_UNPACK_FLOAT_HI_EXPR
3682 || rhs_code == VEC_UNPACK_FLOAT_LO_EXPR)
3683 && (INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3684 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type))))
3685 || ((rhs_code == VEC_UNPACK_FIX_TRUNC_HI_EXPR
3686 || rhs_code == VEC_UNPACK_FIX_TRUNC_LO_EXPR)
3687 && (INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3688 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (lhs_type))))
3689 || (maybe_ne (GET_MODE_SIZE (element_mode (lhs_type)),
3690 2 * GET_MODE_SIZE (element_mode (rhs1_type)))
3691 && (!VECTOR_BOOLEAN_TYPE_P (lhs_type)
3692 || !VECTOR_BOOLEAN_TYPE_P (rhs1_type)))
3693 || maybe_ne (2 * TYPE_VECTOR_SUBPARTS (lhs_type),
3694 TYPE_VECTOR_SUBPARTS (rhs1_type)))
3695 {
3696 error ("type mismatch in %qs expression", code_name);
3697 debug_generic_expr (lhs_type);
3698 debug_generic_expr (rhs1_type);
3699 return true;
3700 }
3701
3702 return false;
3703
3704 case NEGATE_EXPR:
3705 case ABS_EXPR:
3706 case BIT_NOT_EXPR:
3707 case PAREN_EXPR:
3708 case CONJ_EXPR:
3709 break;
3710
3711 case ABSU_EXPR:
3712 if (!ANY_INTEGRAL_TYPE_P (lhs_type)
3713 || !TYPE_UNSIGNED (lhs_type)
3714 || !ANY_INTEGRAL_TYPE_P (rhs1_type)
3715 || TYPE_UNSIGNED (rhs1_type)
3716 || element_precision (lhs_type) != element_precision (rhs1_type))
3717 {
3718 error ("invalid types for %qs", code_name);
3719 debug_generic_expr (lhs_type);
3720 debug_generic_expr (rhs1_type);
3721 return true;
3722 }
3723 return false;
3724
3725 case VEC_DUPLICATE_EXPR:
3726 if (TREE_CODE (lhs_type) != VECTOR_TYPE
3727 || !useless_type_conversion_p (TREE_TYPE (lhs_type), rhs1_type))
3728 {
3729 error ("%qs should be from a scalar to a like vector", code_name);
3730 debug_generic_expr (lhs_type);
3731 debug_generic_expr (rhs1_type);
3732 return true;
3733 }
3734 return false;
3735
3736 default:
3737 gcc_unreachable ();
3738 }
3739
3740 /* For the remaining codes assert there is no conversion involved. */
3741 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3742 {
3743 error ("non-trivial conversion in unary operation");
3744 debug_generic_expr (lhs_type);
3745 debug_generic_expr (rhs1_type);
3746 return true;
3747 }
3748
3749 return false;
3750 }
3751
3752 /* Verify a gimple assignment statement STMT with a binary rhs.
3753 Returns true if anything is wrong. */
3754
3755 static bool
3756 verify_gimple_assign_binary (gassign *stmt)
3757 {
3758 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3759 tree lhs = gimple_assign_lhs (stmt);
3760 tree lhs_type = TREE_TYPE (lhs);
3761 tree rhs1 = gimple_assign_rhs1 (stmt);
3762 tree rhs1_type = TREE_TYPE (rhs1);
3763 tree rhs2 = gimple_assign_rhs2 (stmt);
3764 tree rhs2_type = TREE_TYPE (rhs2);
3765
3766 if (!is_gimple_reg (lhs))
3767 {
3768 error ("non-register as LHS of binary operation");
3769 return true;
3770 }
3771
3772 if (!is_gimple_val (rhs1)
3773 || !is_gimple_val (rhs2))
3774 {
3775 error ("invalid operands in binary operation");
3776 return true;
3777 }
3778
3779 const char* const code_name = get_tree_code_name (rhs_code);
3780
3781 /* First handle operations that involve different types. */
3782 switch (rhs_code)
3783 {
3784 case COMPLEX_EXPR:
3785 {
3786 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3787 || !(INTEGRAL_TYPE_P (rhs1_type)
3788 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3789 || !(INTEGRAL_TYPE_P (rhs2_type)
3790 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3791 {
3792 error ("type mismatch in %qs", code_name);
3793 debug_generic_expr (lhs_type);
3794 debug_generic_expr (rhs1_type);
3795 debug_generic_expr (rhs2_type);
3796 return true;
3797 }
3798
3799 return false;
3800 }
3801
3802 case LSHIFT_EXPR:
3803 case RSHIFT_EXPR:
3804 case LROTATE_EXPR:
3805 case RROTATE_EXPR:
3806 {
3807 /* Shifts and rotates are ok on integral types, fixed point
3808 types and integer vector types. */
3809 if ((!INTEGRAL_TYPE_P (rhs1_type)
3810 && !FIXED_POINT_TYPE_P (rhs1_type)
3811 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3812 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3813 || (!INTEGRAL_TYPE_P (rhs2_type)
3814 /* Vector shifts of vectors are also ok. */
3815 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3816 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3817 && TREE_CODE (rhs2_type) == VECTOR_TYPE
3818 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3819 || !useless_type_conversion_p (lhs_type, rhs1_type))
3820 {
3821 error ("type mismatch in %qs", code_name);
3822 debug_generic_expr (lhs_type);
3823 debug_generic_expr (rhs1_type);
3824 debug_generic_expr (rhs2_type);
3825 return true;
3826 }
3827
3828 return false;
3829 }
3830
3831 case WIDEN_LSHIFT_EXPR:
3832 {
3833 if (!INTEGRAL_TYPE_P (lhs_type)
3834 || !INTEGRAL_TYPE_P (rhs1_type)
3835 || TREE_CODE (rhs2) != INTEGER_CST
3836 || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
3837 {
3838 error ("type mismatch in %qs", code_name);
3839 debug_generic_expr (lhs_type);
3840 debug_generic_expr (rhs1_type);
3841 debug_generic_expr (rhs2_type);
3842 return true;
3843 }
3844
3845 return false;
3846 }
3847
3848 case VEC_WIDEN_LSHIFT_HI_EXPR:
3849 case VEC_WIDEN_LSHIFT_LO_EXPR:
3850 {
3851 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3852 || TREE_CODE (lhs_type) != VECTOR_TYPE
3853 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3854 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3855 || TREE_CODE (rhs2) != INTEGER_CST
3856 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
3857 > TYPE_PRECISION (TREE_TYPE (lhs_type))))
3858 {
3859 error ("type mismatch in %qs", code_name);
3860 debug_generic_expr (lhs_type);
3861 debug_generic_expr (rhs1_type);
3862 debug_generic_expr (rhs2_type);
3863 return true;
3864 }
3865
3866 return false;
3867 }
3868
3869 case PLUS_EXPR:
3870 case MINUS_EXPR:
3871 {
3872 tree lhs_etype = lhs_type;
3873 tree rhs1_etype = rhs1_type;
3874 tree rhs2_etype = rhs2_type;
3875 if (TREE_CODE (lhs_type) == VECTOR_TYPE)
3876 {
3877 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3878 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
3879 {
3880 error ("invalid non-vector operands to %qs", code_name);
3881 return true;
3882 }
3883 lhs_etype = TREE_TYPE (lhs_type);
3884 rhs1_etype = TREE_TYPE (rhs1_type);
3885 rhs2_etype = TREE_TYPE (rhs2_type);
3886 }
3887 if (POINTER_TYPE_P (lhs_etype)
3888 || POINTER_TYPE_P (rhs1_etype)
3889 || POINTER_TYPE_P (rhs2_etype))
3890 {
3891 error ("invalid (pointer) operands %qs", code_name);
3892 return true;
3893 }
3894
3895 /* Continue with generic binary expression handling. */
3896 break;
3897 }
3898
3899 case POINTER_PLUS_EXPR:
3900 {
3901 if (!POINTER_TYPE_P (rhs1_type)
3902 || !useless_type_conversion_p (lhs_type, rhs1_type)
3903 || !ptrofftype_p (rhs2_type))
3904 {
3905 error ("type mismatch in %qs", code_name);
3906 debug_generic_stmt (lhs_type);
3907 debug_generic_stmt (rhs1_type);
3908 debug_generic_stmt (rhs2_type);
3909 return true;
3910 }
3911
3912 return false;
3913 }
3914
3915 case POINTER_DIFF_EXPR:
3916 {
3917 if (!POINTER_TYPE_P (rhs1_type)
3918 || !POINTER_TYPE_P (rhs2_type)
3919 /* Because we special-case pointers to void we allow difference
3920 of arbitrary pointers with the same mode. */
3921 || TYPE_MODE (rhs1_type) != TYPE_MODE (rhs2_type)
3922 || TREE_CODE (lhs_type) != INTEGER_TYPE
3923 || TYPE_UNSIGNED (lhs_type)
3924 || TYPE_PRECISION (lhs_type) != TYPE_PRECISION (rhs1_type))
3925 {
3926 error ("type mismatch in %qs", code_name);
3927 debug_generic_stmt (lhs_type);
3928 debug_generic_stmt (rhs1_type);
3929 debug_generic_stmt (rhs2_type);
3930 return true;
3931 }
3932
3933 return false;
3934 }
3935
3936 case TRUTH_ANDIF_EXPR:
3937 case TRUTH_ORIF_EXPR:
3938 case TRUTH_AND_EXPR:
3939 case TRUTH_OR_EXPR:
3940 case TRUTH_XOR_EXPR:
3941
3942 gcc_unreachable ();
3943
3944 case LT_EXPR:
3945 case LE_EXPR:
3946 case GT_EXPR:
3947 case GE_EXPR:
3948 case EQ_EXPR:
3949 case NE_EXPR:
3950 case UNORDERED_EXPR:
3951 case ORDERED_EXPR:
3952 case UNLT_EXPR:
3953 case UNLE_EXPR:
3954 case UNGT_EXPR:
3955 case UNGE_EXPR:
3956 case UNEQ_EXPR:
3957 case LTGT_EXPR:
3958 /* Comparisons are also binary, but the result type is not
3959 connected to the operand types. */
3960 return verify_gimple_comparison (lhs_type, rhs1, rhs2, rhs_code);
3961
3962 case WIDEN_MULT_EXPR:
3963 if (TREE_CODE (lhs_type) != INTEGER_TYPE)
3964 return true;
3965 return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
3966 || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
3967
3968 case WIDEN_SUM_EXPR:
3969 {
3970 if (((TREE_CODE (rhs1_type) != VECTOR_TYPE
3971 || TREE_CODE (lhs_type) != VECTOR_TYPE)
3972 && ((!INTEGRAL_TYPE_P (rhs1_type)
3973 && !SCALAR_FLOAT_TYPE_P (rhs1_type))
3974 || (!INTEGRAL_TYPE_P (lhs_type)
3975 && !SCALAR_FLOAT_TYPE_P (lhs_type))))
3976 || !useless_type_conversion_p (lhs_type, rhs2_type)
3977 || maybe_lt (GET_MODE_SIZE (element_mode (rhs2_type)),
3978 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
3979 {
3980 error ("type mismatch in %qs", code_name);
3981 debug_generic_expr (lhs_type);
3982 debug_generic_expr (rhs1_type);
3983 debug_generic_expr (rhs2_type);
3984 return true;
3985 }
3986 return false;
3987 }
3988
3989 case VEC_WIDEN_MULT_HI_EXPR:
3990 case VEC_WIDEN_MULT_LO_EXPR:
3991 case VEC_WIDEN_MULT_EVEN_EXPR:
3992 case VEC_WIDEN_MULT_ODD_EXPR:
3993 {
3994 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3995 || TREE_CODE (lhs_type) != VECTOR_TYPE
3996 || !types_compatible_p (rhs1_type, rhs2_type)
3997 || maybe_ne (GET_MODE_SIZE (element_mode (lhs_type)),
3998 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
3999 {
4000 error ("type mismatch in %qs", code_name);
4001 debug_generic_expr (lhs_type);
4002 debug_generic_expr (rhs1_type);
4003 debug_generic_expr (rhs2_type);
4004 return true;
4005 }
4006 return false;
4007 }
4008
4009 case VEC_PACK_TRUNC_EXPR:
4010 /* ??? We currently use VEC_PACK_TRUNC_EXPR to simply concat
4011 vector boolean types. */
4012 if (VECTOR_BOOLEAN_TYPE_P (lhs_type)
4013 && VECTOR_BOOLEAN_TYPE_P (rhs1_type)
4014 && types_compatible_p (rhs1_type, rhs2_type)
4015 && known_eq (TYPE_VECTOR_SUBPARTS (lhs_type),
4016 2 * TYPE_VECTOR_SUBPARTS (rhs1_type)))
4017 return false;
4018
4019 /* Fallthru. */
4020 case VEC_PACK_SAT_EXPR:
4021 case VEC_PACK_FIX_TRUNC_EXPR:
4022 {
4023 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4024 || TREE_CODE (lhs_type) != VECTOR_TYPE
4025 || !((rhs_code == VEC_PACK_FIX_TRUNC_EXPR
4026 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type))
4027 && INTEGRAL_TYPE_P (TREE_TYPE (lhs_type)))
4028 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
4029 == INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))))
4030 || !types_compatible_p (rhs1_type, rhs2_type)
4031 || maybe_ne (GET_MODE_SIZE (element_mode (rhs1_type)),
4032 2 * GET_MODE_SIZE (element_mode (lhs_type)))
4033 || maybe_ne (2 * TYPE_VECTOR_SUBPARTS (rhs1_type),
4034 TYPE_VECTOR_SUBPARTS (lhs_type)))
4035 {
4036 error ("type mismatch in %qs", code_name);
4037 debug_generic_expr (lhs_type);
4038 debug_generic_expr (rhs1_type);
4039 debug_generic_expr (rhs2_type);
4040 return true;
4041 }
4042
4043 return false;
4044 }
4045
4046 case VEC_PACK_FLOAT_EXPR:
4047 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4048 || TREE_CODE (lhs_type) != VECTOR_TYPE
4049 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
4050 || !SCALAR_FLOAT_TYPE_P (TREE_TYPE (lhs_type))
4051 || !types_compatible_p (rhs1_type, rhs2_type)
4052 || maybe_ne (GET_MODE_SIZE (element_mode (rhs1_type)),
4053 2 * GET_MODE_SIZE (element_mode (lhs_type)))
4054 || maybe_ne (2 * TYPE_VECTOR_SUBPARTS (rhs1_type),
4055 TYPE_VECTOR_SUBPARTS (lhs_type)))
4056 {
4057 error ("type mismatch in %qs", code_name);
4058 debug_generic_expr (lhs_type);
4059 debug_generic_expr (rhs1_type);
4060 debug_generic_expr (rhs2_type);
4061 return true;
4062 }
4063
4064 return false;
4065
4066 case MULT_EXPR:
4067 case MULT_HIGHPART_EXPR:
4068 case TRUNC_DIV_EXPR:
4069 case CEIL_DIV_EXPR:
4070 case FLOOR_DIV_EXPR:
4071 case ROUND_DIV_EXPR:
4072 case TRUNC_MOD_EXPR:
4073 case CEIL_MOD_EXPR:
4074 case FLOOR_MOD_EXPR:
4075 case ROUND_MOD_EXPR:
4076 case RDIV_EXPR:
4077 case EXACT_DIV_EXPR:
4078 case MIN_EXPR:
4079 case MAX_EXPR:
4080 case BIT_IOR_EXPR:
4081 case BIT_XOR_EXPR:
4082 case BIT_AND_EXPR:
4083 /* Continue with generic binary expression handling. */
4084 break;
4085
4086 case VEC_SERIES_EXPR:
4087 if (!useless_type_conversion_p (rhs1_type, rhs2_type))
4088 {
4089 error ("type mismatch in %qs", code_name);
4090 debug_generic_expr (rhs1_type);
4091 debug_generic_expr (rhs2_type);
4092 return true;
4093 }
4094 if (TREE_CODE (lhs_type) != VECTOR_TYPE
4095 || !useless_type_conversion_p (TREE_TYPE (lhs_type), rhs1_type))
4096 {
4097 error ("vector type expected in %qs", code_name);
4098 debug_generic_expr (lhs_type);
4099 return true;
4100 }
4101 return false;
4102
4103 default:
4104 gcc_unreachable ();
4105 }
4106
4107 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4108 || !useless_type_conversion_p (lhs_type, rhs2_type))
4109 {
4110 error ("type mismatch in binary expression");
4111 debug_generic_stmt (lhs_type);
4112 debug_generic_stmt (rhs1_type);
4113 debug_generic_stmt (rhs2_type);
4114 return true;
4115 }
4116
4117 return false;
4118 }
4119
4120 /* Verify a gimple assignment statement STMT with a ternary rhs.
4121 Returns true if anything is wrong. */
4122
4123 static bool
4124 verify_gimple_assign_ternary (gassign *stmt)
4125 {
4126 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4127 tree lhs = gimple_assign_lhs (stmt);
4128 tree lhs_type = TREE_TYPE (lhs);
4129 tree rhs1 = gimple_assign_rhs1 (stmt);
4130 tree rhs1_type = TREE_TYPE (rhs1);
4131 tree rhs2 = gimple_assign_rhs2 (stmt);
4132 tree rhs2_type = TREE_TYPE (rhs2);
4133 tree rhs3 = gimple_assign_rhs3 (stmt);
4134 tree rhs3_type = TREE_TYPE (rhs3);
4135
4136 if (!is_gimple_reg (lhs))
4137 {
4138 error ("non-register as LHS of ternary operation");
4139 return true;
4140 }
4141
4142 if (((rhs_code == VEC_COND_EXPR || rhs_code == COND_EXPR)
4143 ? !is_gimple_condexpr (rhs1) : !is_gimple_val (rhs1))
4144 || !is_gimple_val (rhs2)
4145 || !is_gimple_val (rhs3))
4146 {
4147 error ("invalid operands in ternary operation");
4148 return true;
4149 }
4150
4151 const char* const code_name = get_tree_code_name (rhs_code);
4152
4153 /* First handle operations that involve different types. */
4154 switch (rhs_code)
4155 {
4156 case WIDEN_MULT_PLUS_EXPR:
4157 case WIDEN_MULT_MINUS_EXPR:
4158 if ((!INTEGRAL_TYPE_P (rhs1_type)
4159 && !FIXED_POINT_TYPE_P (rhs1_type))
4160 || !useless_type_conversion_p (rhs1_type, rhs2_type)
4161 || !useless_type_conversion_p (lhs_type, rhs3_type)
4162 || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
4163 || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
4164 {
4165 error ("type mismatch in %qs", code_name);
4166 debug_generic_expr (lhs_type);
4167 debug_generic_expr (rhs1_type);
4168 debug_generic_expr (rhs2_type);
4169 debug_generic_expr (rhs3_type);
4170 return true;
4171 }
4172 break;
4173
4174 case VEC_COND_EXPR:
4175 if (!VECTOR_BOOLEAN_TYPE_P (rhs1_type)
4176 || maybe_ne (TYPE_VECTOR_SUBPARTS (rhs1_type),
4177 TYPE_VECTOR_SUBPARTS (lhs_type)))
4178 {
4179 error ("the first argument of a %qs must be of a "
4180 "boolean vector type of the same number of elements "
4181 "as the result", code_name);
4182 debug_generic_expr (lhs_type);
4183 debug_generic_expr (rhs1_type);
4184 return true;
4185 }
4186 /* Fallthrough. */
4187 case COND_EXPR:
4188 if (!is_gimple_val (rhs1)
4189 && verify_gimple_comparison (TREE_TYPE (rhs1),
4190 TREE_OPERAND (rhs1, 0),
4191 TREE_OPERAND (rhs1, 1),
4192 TREE_CODE (rhs1)))
4193 return true;
4194 if (!useless_type_conversion_p (lhs_type, rhs2_type)
4195 || !useless_type_conversion_p (lhs_type, rhs3_type))
4196 {
4197 error ("type mismatch in %qs", code_name);
4198 debug_generic_expr (lhs_type);
4199 debug_generic_expr (rhs2_type);
4200 debug_generic_expr (rhs3_type);
4201 return true;
4202 }
4203 break;
4204
4205 case VEC_PERM_EXPR:
4206 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4207 || !useless_type_conversion_p (lhs_type, rhs2_type))
4208 {
4209 error ("type mismatch in %qs", code_name);
4210 debug_generic_expr (lhs_type);
4211 debug_generic_expr (rhs1_type);
4212 debug_generic_expr (rhs2_type);
4213 debug_generic_expr (rhs3_type);
4214 return true;
4215 }
4216
4217 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4218 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4219 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4220 {
4221 error ("vector types expected in %qs", code_name);
4222 debug_generic_expr (lhs_type);
4223 debug_generic_expr (rhs1_type);
4224 debug_generic_expr (rhs2_type);
4225 debug_generic_expr (rhs3_type);
4226 return true;
4227 }
4228
4229 if (maybe_ne (TYPE_VECTOR_SUBPARTS (rhs1_type),
4230 TYPE_VECTOR_SUBPARTS (rhs2_type))
4231 || maybe_ne (TYPE_VECTOR_SUBPARTS (rhs2_type),
4232 TYPE_VECTOR_SUBPARTS (rhs3_type))
4233 || maybe_ne (TYPE_VECTOR_SUBPARTS (rhs3_type),
4234 TYPE_VECTOR_SUBPARTS (lhs_type)))
4235 {
4236 error ("vectors with different element number found in %qs",
4237 code_name);
4238 debug_generic_expr (lhs_type);
4239 debug_generic_expr (rhs1_type);
4240 debug_generic_expr (rhs2_type);
4241 debug_generic_expr (rhs3_type);
4242 return true;
4243 }
4244
4245 if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
4246 || (TREE_CODE (rhs3) != VECTOR_CST
4247 && (GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE
4248 (TREE_TYPE (rhs3_type)))
4249 != GET_MODE_BITSIZE (SCALAR_TYPE_MODE
4250 (TREE_TYPE (rhs1_type))))))
4251 {
4252 error ("invalid mask type in %qs", code_name);
4253 debug_generic_expr (lhs_type);
4254 debug_generic_expr (rhs1_type);
4255 debug_generic_expr (rhs2_type);
4256 debug_generic_expr (rhs3_type);
4257 return true;
4258 }
4259
4260 return false;
4261
4262 case SAD_EXPR:
4263 if (!useless_type_conversion_p (rhs1_type, rhs2_type)
4264 || !useless_type_conversion_p (lhs_type, rhs3_type)
4265 || 2 * GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type)))
4266 > GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (lhs_type))))
4267 {
4268 error ("type mismatch in %qs", code_name);
4269 debug_generic_expr (lhs_type);
4270 debug_generic_expr (rhs1_type);
4271 debug_generic_expr (rhs2_type);
4272 debug_generic_expr (rhs3_type);
4273 return true;
4274 }
4275
4276 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4277 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4278 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4279 {
4280 error ("vector types expected in %qs", code_name);
4281 debug_generic_expr (lhs_type);
4282 debug_generic_expr (rhs1_type);
4283 debug_generic_expr (rhs2_type);
4284 debug_generic_expr (rhs3_type);
4285 return true;
4286 }
4287
4288 return false;
4289
4290 case BIT_INSERT_EXPR:
4291 if (! useless_type_conversion_p (lhs_type, rhs1_type))
4292 {
4293 error ("type mismatch in %qs", code_name);
4294 debug_generic_expr (lhs_type);
4295 debug_generic_expr (rhs1_type);
4296 return true;
4297 }
4298 if (! ((INTEGRAL_TYPE_P (rhs1_type)
4299 && INTEGRAL_TYPE_P (rhs2_type))
4300 /* Vector element insert. */
4301 || (VECTOR_TYPE_P (rhs1_type)
4302 && types_compatible_p (TREE_TYPE (rhs1_type), rhs2_type))
4303 /* Aligned sub-vector insert. */
4304 || (VECTOR_TYPE_P (rhs1_type)
4305 && VECTOR_TYPE_P (rhs2_type)
4306 && types_compatible_p (TREE_TYPE (rhs1_type),
4307 TREE_TYPE (rhs2_type))
4308 && multiple_p (TYPE_VECTOR_SUBPARTS (rhs1_type),
4309 TYPE_VECTOR_SUBPARTS (rhs2_type))
4310 && multiple_of_p (bitsizetype, rhs3, TYPE_SIZE (rhs2_type)))))
4311 {
4312 error ("not allowed type combination in %qs", code_name);
4313 debug_generic_expr (rhs1_type);
4314 debug_generic_expr (rhs2_type);
4315 return true;
4316 }
4317 if (! tree_fits_uhwi_p (rhs3)
4318 || ! types_compatible_p (bitsizetype, TREE_TYPE (rhs3))
4319 || ! tree_fits_uhwi_p (TYPE_SIZE (rhs2_type)))
4320 {
4321 error ("invalid position or size in %qs", code_name);
4322 return true;
4323 }
4324 if (INTEGRAL_TYPE_P (rhs1_type)
4325 && !type_has_mode_precision_p (rhs1_type))
4326 {
4327 error ("%qs into non-mode-precision operand", code_name);
4328 return true;
4329 }
4330 if (INTEGRAL_TYPE_P (rhs1_type))
4331 {
4332 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (rhs3);
4333 if (bitpos >= TYPE_PRECISION (rhs1_type)
4334 || (bitpos + TYPE_PRECISION (rhs2_type)
4335 > TYPE_PRECISION (rhs1_type)))
4336 {
4337 error ("insertion out of range in %qs", code_name);
4338 return true;
4339 }
4340 }
4341 else if (VECTOR_TYPE_P (rhs1_type))
4342 {
4343 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (rhs3);
4344 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (TYPE_SIZE (rhs2_type));
4345 if (bitpos % bitsize != 0)
4346 {
4347 error ("%qs not at element boundary", code_name);
4348 return true;
4349 }
4350 }
4351 return false;
4352
4353 case DOT_PROD_EXPR:
4354 {
4355 if (((TREE_CODE (rhs1_type) != VECTOR_TYPE
4356 || TREE_CODE (lhs_type) != VECTOR_TYPE)
4357 && ((!INTEGRAL_TYPE_P (rhs1_type)
4358 && !SCALAR_FLOAT_TYPE_P (rhs1_type))
4359 || (!INTEGRAL_TYPE_P (lhs_type)
4360 && !SCALAR_FLOAT_TYPE_P (lhs_type))))
4361 || !types_compatible_p (rhs1_type, rhs2_type)
4362 || !useless_type_conversion_p (lhs_type, rhs3_type)
4363 || maybe_lt (GET_MODE_SIZE (element_mode (rhs3_type)),
4364 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
4365 {
4366 error ("type mismatch in %qs", code_name);
4367 debug_generic_expr (lhs_type);
4368 debug_generic_expr (rhs1_type);
4369 debug_generic_expr (rhs2_type);
4370 return true;
4371 }
4372 return false;
4373 }
4374
4375 case REALIGN_LOAD_EXPR:
4376 /* FIXME. */
4377 return false;
4378
4379 default:
4380 gcc_unreachable ();
4381 }
4382 return false;
4383 }
4384
4385 /* Verify a gimple assignment statement STMT with a single rhs.
4386 Returns true if anything is wrong. */
4387
4388 static bool
4389 verify_gimple_assign_single (gassign *stmt)
4390 {
4391 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4392 tree lhs = gimple_assign_lhs (stmt);
4393 tree lhs_type = TREE_TYPE (lhs);
4394 tree rhs1 = gimple_assign_rhs1 (stmt);
4395 tree rhs1_type = TREE_TYPE (rhs1);
4396 bool res = false;
4397
4398 const char* const code_name = get_tree_code_name (rhs_code);
4399
4400 if (!useless_type_conversion_p (lhs_type, rhs1_type))
4401 {
4402 error ("non-trivial conversion in %qs", code_name);
4403 debug_generic_expr (lhs_type);
4404 debug_generic_expr (rhs1_type);
4405 return true;
4406 }
4407
4408 if (gimple_clobber_p (stmt)
4409 && !(DECL_P (lhs) || TREE_CODE (lhs) == MEM_REF))
4410 {
4411 error ("%qs LHS in clobber statement",
4412 get_tree_code_name (TREE_CODE (lhs)));
4413 debug_generic_expr (lhs);
4414 return true;
4415 }
4416
4417 if (handled_component_p (lhs)
4418 || TREE_CODE (lhs) == MEM_REF
4419 || TREE_CODE (lhs) == TARGET_MEM_REF)
4420 res |= verify_types_in_gimple_reference (lhs, true);
4421
4422 /* Special codes we cannot handle via their class. */
4423 switch (rhs_code)
4424 {
4425 case ADDR_EXPR:
4426 {
4427 tree op = TREE_OPERAND (rhs1, 0);
4428 if (!is_gimple_addressable (op))
4429 {
4430 error ("invalid operand in %qs", code_name);
4431 return true;
4432 }
4433
4434 /* Technically there is no longer a need for matching types, but
4435 gimple hygiene asks for this check. In LTO we can end up
4436 combining incompatible units and thus end up with addresses
4437 of globals that change their type to a common one. */
4438 if (!in_lto_p
4439 && !types_compatible_p (TREE_TYPE (op),
4440 TREE_TYPE (TREE_TYPE (rhs1)))
4441 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
4442 TREE_TYPE (op)))
4443 {
4444 error ("type mismatch in %qs", code_name);
4445 debug_generic_stmt (TREE_TYPE (rhs1));
4446 debug_generic_stmt (TREE_TYPE (op));
4447 return true;
4448 }
4449
4450 return (verify_address (rhs1, true)
4451 || verify_types_in_gimple_reference (op, true));
4452 }
4453
4454 /* tcc_reference */
4455 case INDIRECT_REF:
4456 error ("%qs in gimple IL", code_name);
4457 return true;
4458
4459 case COMPONENT_REF:
4460 case BIT_FIELD_REF:
4461 case ARRAY_REF:
4462 case ARRAY_RANGE_REF:
4463 case VIEW_CONVERT_EXPR:
4464 case REALPART_EXPR:
4465 case IMAGPART_EXPR:
4466 case TARGET_MEM_REF:
4467 case MEM_REF:
4468 if (!is_gimple_reg (lhs)
4469 && is_gimple_reg_type (TREE_TYPE (lhs)))
4470 {
4471 error ("invalid RHS for gimple memory store: %qs", code_name);
4472 debug_generic_stmt (lhs);
4473 debug_generic_stmt (rhs1);
4474 return true;
4475 }
4476 return res || verify_types_in_gimple_reference (rhs1, false);
4477
4478 /* tcc_constant */
4479 case SSA_NAME:
4480 case INTEGER_CST:
4481 case REAL_CST:
4482 case FIXED_CST:
4483 case COMPLEX_CST:
4484 case VECTOR_CST:
4485 case STRING_CST:
4486 return res;
4487
4488 /* tcc_declaration */
4489 case CONST_DECL:
4490 return res;
4491 case VAR_DECL:
4492 case PARM_DECL:
4493 if (!is_gimple_reg (lhs)
4494 && !is_gimple_reg (rhs1)
4495 && is_gimple_reg_type (TREE_TYPE (lhs)))
4496 {
4497 error ("invalid RHS for gimple memory store: %qs", code_name);
4498 debug_generic_stmt (lhs);
4499 debug_generic_stmt (rhs1);
4500 return true;
4501 }
4502 return res;
4503
4504 case CONSTRUCTOR:
4505 if (TREE_CODE (rhs1_type) == VECTOR_TYPE)
4506 {
4507 unsigned int i;
4508 tree elt_i, elt_v, elt_t = NULL_TREE;
4509
4510 if (CONSTRUCTOR_NELTS (rhs1) == 0)
4511 return res;
4512 /* For vector CONSTRUCTORs we require that either it is empty
4513 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
4514 (then the element count must be correct to cover the whole
4515 outer vector and index must be NULL on all elements, or it is
4516 a CONSTRUCTOR of scalar elements, where we as an exception allow
4517 smaller number of elements (assuming zero filling) and
4518 consecutive indexes as compared to NULL indexes (such
4519 CONSTRUCTORs can appear in the IL from FEs). */
4520 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), i, elt_i, elt_v)
4521 {
4522 if (elt_t == NULL_TREE)
4523 {
4524 elt_t = TREE_TYPE (elt_v);
4525 if (TREE_CODE (elt_t) == VECTOR_TYPE)
4526 {
4527 tree elt_t = TREE_TYPE (elt_v);
4528 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4529 TREE_TYPE (elt_t)))
4530 {
4531 error ("incorrect type of vector %qs elements",
4532 code_name);
4533 debug_generic_stmt (rhs1);
4534 return true;
4535 }
4536 else if (maybe_ne (CONSTRUCTOR_NELTS (rhs1)
4537 * TYPE_VECTOR_SUBPARTS (elt_t),
4538 TYPE_VECTOR_SUBPARTS (rhs1_type)))
4539 {
4540 error ("incorrect number of vector %qs elements",
4541 code_name);
4542 debug_generic_stmt (rhs1);
4543 return true;
4544 }
4545 }
4546 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4547 elt_t))
4548 {
4549 error ("incorrect type of vector %qs elements",
4550 code_name);
4551 debug_generic_stmt (rhs1);
4552 return true;
4553 }
4554 else if (maybe_gt (CONSTRUCTOR_NELTS (rhs1),
4555 TYPE_VECTOR_SUBPARTS (rhs1_type)))
4556 {
4557 error ("incorrect number of vector %qs elements",
4558 code_name);
4559 debug_generic_stmt (rhs1);
4560 return true;
4561 }
4562 }
4563 else if (!useless_type_conversion_p (elt_t, TREE_TYPE (elt_v)))
4564 {
4565 error ("incorrect type of vector CONSTRUCTOR elements");
4566 debug_generic_stmt (rhs1);
4567 return true;
4568 }
4569 if (elt_i != NULL_TREE
4570 && (TREE_CODE (elt_t) == VECTOR_TYPE
4571 || TREE_CODE (elt_i) != INTEGER_CST
4572 || compare_tree_int (elt_i, i) != 0))
4573 {
4574 error ("vector %qs with non-NULL element index",
4575 code_name);
4576 debug_generic_stmt (rhs1);
4577 return true;
4578 }
4579 if (!is_gimple_val (elt_v))
4580 {
4581 error ("vector %qs element is not a GIMPLE value",
4582 code_name);
4583 debug_generic_stmt (rhs1);
4584 return true;
4585 }
4586 }
4587 }
4588 else if (CONSTRUCTOR_NELTS (rhs1) != 0)
4589 {
4590 error ("non-vector %qs with elements", code_name);
4591 debug_generic_stmt (rhs1);
4592 return true;
4593 }
4594 return res;
4595
4596 case ASSERT_EXPR:
4597 /* FIXME. */
4598 rhs1 = fold (ASSERT_EXPR_COND (rhs1));
4599 if (rhs1 == boolean_false_node)
4600 {
4601 error ("%qs with an always-false condition", code_name);
4602 debug_generic_stmt (rhs1);
4603 return true;
4604 }
4605 break;
4606
4607 case OBJ_TYPE_REF:
4608 case WITH_SIZE_EXPR:
4609 /* FIXME. */
4610 return res;
4611
4612 default:;
4613 }
4614
4615 return res;
4616 }
4617
4618 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4619 is a problem, otherwise false. */
4620
4621 static bool
4622 verify_gimple_assign (gassign *stmt)
4623 {
4624 switch (gimple_assign_rhs_class (stmt))
4625 {
4626 case GIMPLE_SINGLE_RHS:
4627 return verify_gimple_assign_single (stmt);
4628
4629 case GIMPLE_UNARY_RHS:
4630 return verify_gimple_assign_unary (stmt);
4631
4632 case GIMPLE_BINARY_RHS:
4633 return verify_gimple_assign_binary (stmt);
4634
4635 case GIMPLE_TERNARY_RHS:
4636 return verify_gimple_assign_ternary (stmt);
4637
4638 default:
4639 gcc_unreachable ();
4640 }
4641 }
4642
4643 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4644 is a problem, otherwise false. */
4645
4646 static bool
4647 verify_gimple_return (greturn *stmt)
4648 {
4649 tree op = gimple_return_retval (stmt);
4650 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
4651
4652 /* We cannot test for present return values as we do not fix up missing
4653 return values from the original source. */
4654 if (op == NULL)
4655 return false;
4656
4657 if (!is_gimple_val (op)
4658 && TREE_CODE (op) != RESULT_DECL)
4659 {
4660 error ("invalid operand in return statement");
4661 debug_generic_stmt (op);
4662 return true;
4663 }
4664
4665 if ((TREE_CODE (op) == RESULT_DECL
4666 && DECL_BY_REFERENCE (op))
4667 || (TREE_CODE (op) == SSA_NAME
4668 && SSA_NAME_VAR (op)
4669 && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
4670 && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
4671 op = TREE_TYPE (op);
4672
4673 if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
4674 {
4675 error ("invalid conversion in return statement");
4676 debug_generic_stmt (restype);
4677 debug_generic_stmt (TREE_TYPE (op));
4678 return true;
4679 }
4680
4681 return false;
4682 }
4683
4684
4685 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4686 is a problem, otherwise false. */
4687
4688 static bool
4689 verify_gimple_goto (ggoto *stmt)
4690 {
4691 tree dest = gimple_goto_dest (stmt);
4692
4693 /* ??? We have two canonical forms of direct goto destinations, a
4694 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4695 if (TREE_CODE (dest) != LABEL_DECL
4696 && (!is_gimple_val (dest)
4697 || !POINTER_TYPE_P (TREE_TYPE (dest))))
4698 {
4699 error ("goto destination is neither a label nor a pointer");
4700 return true;
4701 }
4702
4703 return false;
4704 }
4705
4706 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4707 is a problem, otherwise false. */
4708
4709 static bool
4710 verify_gimple_switch (gswitch *stmt)
4711 {
4712 unsigned int i, n;
4713 tree elt, prev_upper_bound = NULL_TREE;
4714 tree index_type, elt_type = NULL_TREE;
4715
4716 if (!is_gimple_val (gimple_switch_index (stmt)))
4717 {
4718 error ("invalid operand to switch statement");
4719 debug_generic_stmt (gimple_switch_index (stmt));
4720 return true;
4721 }
4722
4723 index_type = TREE_TYPE (gimple_switch_index (stmt));
4724 if (! INTEGRAL_TYPE_P (index_type))
4725 {
4726 error ("non-integral type switch statement");
4727 debug_generic_expr (index_type);
4728 return true;
4729 }
4730
4731 elt = gimple_switch_label (stmt, 0);
4732 if (CASE_LOW (elt) != NULL_TREE
4733 || CASE_HIGH (elt) != NULL_TREE
4734 || CASE_CHAIN (elt) != NULL_TREE)
4735 {
4736 error ("invalid default case label in switch statement");
4737 debug_generic_expr (elt);
4738 return true;
4739 }
4740
4741 n = gimple_switch_num_labels (stmt);
4742 for (i = 1; i < n; i++)
4743 {
4744 elt = gimple_switch_label (stmt, i);
4745
4746 if (CASE_CHAIN (elt))
4747 {
4748 error ("invalid %<CASE_CHAIN%>");
4749 debug_generic_expr (elt);
4750 return true;
4751 }
4752 if (! CASE_LOW (elt))
4753 {
4754 error ("invalid case label in switch statement");
4755 debug_generic_expr (elt);
4756 return true;
4757 }
4758 if (CASE_HIGH (elt)
4759 && ! tree_int_cst_lt (CASE_LOW (elt), CASE_HIGH (elt)))
4760 {
4761 error ("invalid case range in switch statement");
4762 debug_generic_expr (elt);
4763 return true;
4764 }
4765
4766 if (elt_type)
4767 {
4768 if (TREE_TYPE (CASE_LOW (elt)) != elt_type
4769 || (CASE_HIGH (elt) && TREE_TYPE (CASE_HIGH (elt)) != elt_type))
4770 {
4771 error ("type mismatch for case label in switch statement");
4772 debug_generic_expr (elt);
4773 return true;
4774 }
4775 }
4776 else
4777 {
4778 elt_type = TREE_TYPE (CASE_LOW (elt));
4779 if (TYPE_PRECISION (index_type) < TYPE_PRECISION (elt_type))
4780 {
4781 error ("type precision mismatch in switch statement");
4782 return true;
4783 }
4784 }
4785
4786 if (prev_upper_bound)
4787 {
4788 if (! tree_int_cst_lt (prev_upper_bound, CASE_LOW (elt)))
4789 {
4790 error ("case labels not sorted in switch statement");
4791 return true;
4792 }
4793 }
4794
4795 prev_upper_bound = CASE_HIGH (elt);
4796 if (! prev_upper_bound)
4797 prev_upper_bound = CASE_LOW (elt);
4798 }
4799
4800 return false;
4801 }
4802
4803 /* Verify a gimple debug statement STMT.
4804 Returns true if anything is wrong. */
4805
4806 static bool
4807 verify_gimple_debug (gimple *stmt ATTRIBUTE_UNUSED)
4808 {
4809 /* There isn't much that could be wrong in a gimple debug stmt. A
4810 gimple debug bind stmt, for example, maps a tree, that's usually
4811 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4812 component or member of an aggregate type, to another tree, that
4813 can be an arbitrary expression. These stmts expand into debug
4814 insns, and are converted to debug notes by var-tracking.c. */
4815 return false;
4816 }
4817
4818 /* Verify a gimple label statement STMT.
4819 Returns true if anything is wrong. */
4820
4821 static bool
4822 verify_gimple_label (glabel *stmt)
4823 {
4824 tree decl = gimple_label_label (stmt);
4825 int uid;
4826 bool err = false;
4827
4828 if (TREE_CODE (decl) != LABEL_DECL)
4829 return true;
4830 if (!DECL_NONLOCAL (decl) && !FORCED_LABEL (decl)
4831 && DECL_CONTEXT (decl) != current_function_decl)
4832 {
4833 error ("label context is not the current function declaration");
4834 err |= true;
4835 }
4836
4837 uid = LABEL_DECL_UID (decl);
4838 if (cfun->cfg
4839 && (uid == -1
4840 || (*label_to_block_map_for_fn (cfun))[uid] != gimple_bb (stmt)))
4841 {
4842 error ("incorrect entry in %<label_to_block_map%>");
4843 err |= true;
4844 }
4845
4846 uid = EH_LANDING_PAD_NR (decl);
4847 if (uid)
4848 {
4849 eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
4850 if (decl != lp->post_landing_pad)
4851 {
4852 error ("incorrect setting of landing pad number");
4853 err |= true;
4854 }
4855 }
4856
4857 return err;
4858 }
4859
4860 /* Verify a gimple cond statement STMT.
4861 Returns true if anything is wrong. */
4862
4863 static bool
4864 verify_gimple_cond (gcond *stmt)
4865 {
4866 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
4867 {
4868 error ("invalid comparison code in gimple cond");
4869 return true;
4870 }
4871 if (!(!gimple_cond_true_label (stmt)
4872 || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
4873 || !(!gimple_cond_false_label (stmt)
4874 || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
4875 {
4876 error ("invalid labels in gimple cond");
4877 return true;
4878 }
4879
4880 return verify_gimple_comparison (boolean_type_node,
4881 gimple_cond_lhs (stmt),
4882 gimple_cond_rhs (stmt),
4883 gimple_cond_code (stmt));
4884 }
4885
4886 /* Verify the GIMPLE statement STMT. Returns true if there is an
4887 error, otherwise false. */
4888
4889 static bool
4890 verify_gimple_stmt (gimple *stmt)
4891 {
4892 switch (gimple_code (stmt))
4893 {
4894 case GIMPLE_ASSIGN:
4895 return verify_gimple_assign (as_a <gassign *> (stmt));
4896
4897 case GIMPLE_LABEL:
4898 return verify_gimple_label (as_a <glabel *> (stmt));
4899
4900 case GIMPLE_CALL:
4901 return verify_gimple_call (as_a <gcall *> (stmt));
4902
4903 case GIMPLE_COND:
4904 return verify_gimple_cond (as_a <gcond *> (stmt));
4905
4906 case GIMPLE_GOTO:
4907 return verify_gimple_goto (as_a <ggoto *> (stmt));
4908
4909 case GIMPLE_SWITCH:
4910 return verify_gimple_switch (as_a <gswitch *> (stmt));
4911
4912 case GIMPLE_RETURN:
4913 return verify_gimple_return (as_a <greturn *> (stmt));
4914
4915 case GIMPLE_ASM:
4916 return false;
4917
4918 case GIMPLE_TRANSACTION:
4919 return verify_gimple_transaction (as_a <gtransaction *> (stmt));
4920
4921 /* Tuples that do not have tree operands. */
4922 case GIMPLE_NOP:
4923 case GIMPLE_PREDICT:
4924 case GIMPLE_RESX:
4925 case GIMPLE_EH_DISPATCH:
4926 case GIMPLE_EH_MUST_NOT_THROW:
4927 return false;
4928
4929 CASE_GIMPLE_OMP:
4930 /* OpenMP directives are validated by the FE and never operated
4931 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4932 non-gimple expressions when the main index variable has had
4933 its address taken. This does not affect the loop itself
4934 because the header of an GIMPLE_OMP_FOR is merely used to determine
4935 how to setup the parallel iteration. */
4936 return false;
4937
4938 case GIMPLE_DEBUG:
4939 return verify_gimple_debug (stmt);
4940
4941 default:
4942 gcc_unreachable ();
4943 }
4944 }
4945
4946 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4947 and false otherwise. */
4948
4949 static bool
4950 verify_gimple_phi (gphi *phi)
4951 {
4952 bool err = false;
4953 unsigned i;
4954 tree phi_result = gimple_phi_result (phi);
4955 bool virtual_p;
4956
4957 if (!phi_result)
4958 {
4959 error ("invalid %<PHI%> result");
4960 return true;
4961 }
4962
4963 virtual_p = virtual_operand_p (phi_result);
4964 if (TREE_CODE (phi_result) != SSA_NAME
4965 || (virtual_p
4966 && SSA_NAME_VAR (phi_result) != gimple_vop (cfun)))
4967 {
4968 error ("invalid %<PHI%> result");
4969 err = true;
4970 }
4971
4972 for (i = 0; i < gimple_phi_num_args (phi); i++)
4973 {
4974 tree t = gimple_phi_arg_def (phi, i);
4975
4976 if (!t)
4977 {
4978 error ("missing %<PHI%> def");
4979 err |= true;
4980 continue;
4981 }
4982 /* Addressable variables do have SSA_NAMEs but they
4983 are not considered gimple values. */
4984 else if ((TREE_CODE (t) == SSA_NAME
4985 && virtual_p != virtual_operand_p (t))
4986 || (virtual_p
4987 && (TREE_CODE (t) != SSA_NAME
4988 || SSA_NAME_VAR (t) != gimple_vop (cfun)))
4989 || (!virtual_p
4990 && !is_gimple_val (t)))
4991 {
4992 error ("invalid %<PHI%> argument");
4993 debug_generic_expr (t);
4994 err |= true;
4995 }
4996 #ifdef ENABLE_TYPES_CHECKING
4997 if (!useless_type_conversion_p (TREE_TYPE (phi_result), TREE_TYPE (t)))
4998 {
4999 error ("incompatible types in %<PHI%> argument %u", i);
5000 debug_generic_stmt (TREE_TYPE (phi_result));
5001 debug_generic_stmt (TREE_TYPE (t));
5002 err |= true;
5003 }
5004 #endif
5005 }
5006
5007 return err;
5008 }
5009
5010 /* Verify the GIMPLE statements inside the sequence STMTS. */
5011
5012 static bool
5013 verify_gimple_in_seq_2 (gimple_seq stmts)
5014 {
5015 gimple_stmt_iterator ittr;
5016 bool err = false;
5017
5018 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
5019 {
5020 gimple *stmt = gsi_stmt (ittr);
5021
5022 switch (gimple_code (stmt))
5023 {
5024 case GIMPLE_BIND:
5025 err |= verify_gimple_in_seq_2 (
5026 gimple_bind_body (as_a <gbind *> (stmt)));
5027 break;
5028
5029 case GIMPLE_TRY:
5030 err |= verify_gimple_in_seq_2 (gimple_try_eval (stmt));
5031 err |= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt));
5032 break;
5033
5034 case GIMPLE_EH_FILTER:
5035 err |= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt));
5036 break;
5037
5038 case GIMPLE_EH_ELSE:
5039 {
5040 geh_else *eh_else = as_a <geh_else *> (stmt);
5041 err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (eh_else));
5042 err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (eh_else));
5043 }
5044 break;
5045
5046 case GIMPLE_CATCH:
5047 err |= verify_gimple_in_seq_2 (gimple_catch_handler (
5048 as_a <gcatch *> (stmt)));
5049 break;
5050
5051 case GIMPLE_TRANSACTION:
5052 err |= verify_gimple_transaction (as_a <gtransaction *> (stmt));
5053 break;
5054
5055 default:
5056 {
5057 bool err2 = verify_gimple_stmt (stmt);
5058 if (err2)
5059 debug_gimple_stmt (stmt);
5060 err |= err2;
5061 }
5062 }
5063 }
5064
5065 return err;
5066 }
5067
5068 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
5069 is a problem, otherwise false. */
5070
5071 static bool
5072 verify_gimple_transaction (gtransaction *stmt)
5073 {
5074 tree lab;
5075
5076 lab = gimple_transaction_label_norm (stmt);
5077 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
5078 return true;
5079 lab = gimple_transaction_label_uninst (stmt);
5080 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
5081 return true;
5082 lab = gimple_transaction_label_over (stmt);
5083 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
5084 return true;
5085
5086 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt));
5087 }
5088
5089
5090 /* Verify the GIMPLE statements inside the statement list STMTS. */
5091
5092 DEBUG_FUNCTION void
5093 verify_gimple_in_seq (gimple_seq stmts)
5094 {
5095 timevar_push (TV_TREE_STMT_VERIFY);
5096 if (verify_gimple_in_seq_2 (stmts))
5097 internal_error ("%<verify_gimple%> failed");
5098 timevar_pop (TV_TREE_STMT_VERIFY);
5099 }
5100
5101 /* Return true when the T can be shared. */
5102
5103 static bool
5104 tree_node_can_be_shared (tree t)
5105 {
5106 if (IS_TYPE_OR_DECL_P (t)
5107 || TREE_CODE (t) == SSA_NAME
5108 || TREE_CODE (t) == IDENTIFIER_NODE
5109 || TREE_CODE (t) == CASE_LABEL_EXPR
5110 || is_gimple_min_invariant (t))
5111 return true;
5112
5113 if (t == error_mark_node)
5114 return true;
5115
5116 return false;
5117 }
5118
5119 /* Called via walk_tree. Verify tree sharing. */
5120
5121 static tree
5122 verify_node_sharing_1 (tree *tp, int *walk_subtrees, void *data)
5123 {
5124 hash_set<void *> *visited = (hash_set<void *> *) data;
5125
5126 if (tree_node_can_be_shared (*tp))
5127 {
5128 *walk_subtrees = false;
5129 return NULL;
5130 }
5131
5132 if (visited->add (*tp))
5133 return *tp;
5134
5135 return NULL;
5136 }
5137
5138 /* Called via walk_gimple_stmt. Verify tree sharing. */
5139
5140 static tree
5141 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
5142 {
5143 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5144 return verify_node_sharing_1 (tp, walk_subtrees, wi->info);
5145 }
5146
5147 static bool eh_error_found;
5148 bool
5149 verify_eh_throw_stmt_node (gimple *const &stmt, const int &,
5150 hash_set<gimple *> *visited)
5151 {
5152 if (!visited->contains (stmt))
5153 {
5154 error ("dead statement in EH table");
5155 debug_gimple_stmt (stmt);
5156 eh_error_found = true;
5157 }
5158 return true;
5159 }
5160
5161 /* Verify if the location LOCs block is in BLOCKS. */
5162
5163 static bool
5164 verify_location (hash_set<tree> *blocks, location_t loc)
5165 {
5166 tree block = LOCATION_BLOCK (loc);
5167 if (block != NULL_TREE
5168 && !blocks->contains (block))
5169 {
5170 error ("location references block not in block tree");
5171 return true;
5172 }
5173 if (block != NULL_TREE)
5174 return verify_location (blocks, BLOCK_SOURCE_LOCATION (block));
5175 return false;
5176 }
5177
5178 /* Called via walk_tree. Verify that expressions have no blocks. */
5179
5180 static tree
5181 verify_expr_no_block (tree *tp, int *walk_subtrees, void *)
5182 {
5183 if (!EXPR_P (*tp))
5184 {
5185 *walk_subtrees = false;
5186 return NULL;
5187 }
5188
5189 location_t loc = EXPR_LOCATION (*tp);
5190 if (LOCATION_BLOCK (loc) != NULL)
5191 return *tp;
5192
5193 return NULL;
5194 }
5195
5196 /* Called via walk_tree. Verify locations of expressions. */
5197
5198 static tree
5199 verify_expr_location_1 (tree *tp, int *walk_subtrees, void *data)
5200 {
5201 hash_set<tree> *blocks = (hash_set<tree> *) data;
5202 tree t = *tp;
5203
5204 /* ??? This doesn't really belong here but there's no good place to
5205 stick this remainder of old verify_expr. */
5206 /* ??? This barfs on debug stmts which contain binds to vars with
5207 different function context. */
5208 #if 0
5209 if (VAR_P (t)
5210 || TREE_CODE (t) == PARM_DECL
5211 || TREE_CODE (t) == RESULT_DECL)
5212 {
5213 tree context = decl_function_context (t);
5214 if (context != cfun->decl
5215 && !SCOPE_FILE_SCOPE_P (context)
5216 && !TREE_STATIC (t)
5217 && !DECL_EXTERNAL (t))
5218 {
5219 error ("local declaration from a different function");
5220 return t;
5221 }
5222 }
5223 #endif
5224
5225 if (VAR_P (t) && DECL_HAS_DEBUG_EXPR_P (t))
5226 {
5227 tree x = DECL_DEBUG_EXPR (t);
5228 tree addr = walk_tree (&x, verify_expr_no_block, NULL, NULL);
5229 if (addr)
5230 return addr;
5231 }
5232 if ((VAR_P (t)
5233 || TREE_CODE (t) == PARM_DECL
5234 || TREE_CODE (t) == RESULT_DECL)
5235 && DECL_HAS_VALUE_EXPR_P (t))
5236 {
5237 tree x = DECL_VALUE_EXPR (t);
5238 tree addr = walk_tree (&x, verify_expr_no_block, NULL, NULL);
5239 if (addr)
5240 return addr;
5241 }
5242
5243 if (!EXPR_P (t))
5244 {
5245 *walk_subtrees = false;
5246 return NULL;
5247 }
5248
5249 location_t loc = EXPR_LOCATION (t);
5250 if (verify_location (blocks, loc))
5251 return t;
5252
5253 return NULL;
5254 }
5255
5256 /* Called via walk_gimple_op. Verify locations of expressions. */
5257
5258 static tree
5259 verify_expr_location (tree *tp, int *walk_subtrees, void *data)
5260 {
5261 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5262 return verify_expr_location_1 (tp, walk_subtrees, wi->info);
5263 }
5264
5265 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
5266
5267 static void
5268 collect_subblocks (hash_set<tree> *blocks, tree block)
5269 {
5270 tree t;
5271 for (t = BLOCK_SUBBLOCKS (block); t; t = BLOCK_CHAIN (t))
5272 {
5273 blocks->add (t);
5274 collect_subblocks (blocks, t);
5275 }
5276 }
5277
5278 /* Disable warnings about missing quoting in GCC diagnostics for
5279 the verification errors. Their format strings don't follow
5280 GCC diagnostic conventions and trigger an ICE in the end. */
5281 #if __GNUC__ >= 10
5282 # pragma GCC diagnostic push
5283 # pragma GCC diagnostic ignored "-Wformat-diag"
5284 #endif
5285
5286 /* Verify the GIMPLE statements in the CFG of FN. */
5287
5288 DEBUG_FUNCTION void
5289 verify_gimple_in_cfg (struct function *fn, bool verify_nothrow)
5290 {
5291 basic_block bb;
5292 bool err = false;
5293
5294 timevar_push (TV_TREE_STMT_VERIFY);
5295 hash_set<void *> visited;
5296 hash_set<gimple *> visited_throwing_stmts;
5297
5298 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
5299 hash_set<tree> blocks;
5300 if (DECL_INITIAL (fn->decl))
5301 {
5302 blocks.add (DECL_INITIAL (fn->decl));
5303 collect_subblocks (&blocks, DECL_INITIAL (fn->decl));
5304 }
5305
5306 FOR_EACH_BB_FN (bb, fn)
5307 {
5308 gimple_stmt_iterator gsi;
5309 edge_iterator ei;
5310 edge e;
5311
5312 for (gphi_iterator gpi = gsi_start_phis (bb);
5313 !gsi_end_p (gpi);
5314 gsi_next (&gpi))
5315 {
5316 gphi *phi = gpi.phi ();
5317 bool err2 = false;
5318 unsigned i;
5319
5320 if (gimple_bb (phi) != bb)
5321 {
5322 error ("gimple_bb (phi) is set to a wrong basic block");
5323 err2 = true;
5324 }
5325
5326 err2 |= verify_gimple_phi (phi);
5327
5328 /* Only PHI arguments have locations. */
5329 if (gimple_location (phi) != UNKNOWN_LOCATION)
5330 {
5331 error ("PHI node with location");
5332 err2 = true;
5333 }
5334
5335 for (i = 0; i < gimple_phi_num_args (phi); i++)
5336 {
5337 tree arg = gimple_phi_arg_def (phi, i);
5338 tree addr = walk_tree (&arg, verify_node_sharing_1,
5339 &visited, NULL);
5340 if (addr)
5341 {
5342 error ("incorrect sharing of tree nodes");
5343 debug_generic_expr (addr);
5344 err2 |= true;
5345 }
5346 location_t loc = gimple_phi_arg_location (phi, i);
5347 if (virtual_operand_p (gimple_phi_result (phi))
5348 && loc != UNKNOWN_LOCATION)
5349 {
5350 error ("virtual PHI with argument locations");
5351 err2 = true;
5352 }
5353 addr = walk_tree (&arg, verify_expr_location_1, &blocks, NULL);
5354 if (addr)
5355 {
5356 debug_generic_expr (addr);
5357 err2 = true;
5358 }
5359 err2 |= verify_location (&blocks, loc);
5360 }
5361
5362 if (err2)
5363 debug_gimple_stmt (phi);
5364 err |= err2;
5365 }
5366
5367 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5368 {
5369 gimple *stmt = gsi_stmt (gsi);
5370 bool err2 = false;
5371 struct walk_stmt_info wi;
5372 tree addr;
5373 int lp_nr;
5374
5375 if (gimple_bb (stmt) != bb)
5376 {
5377 error ("gimple_bb (stmt) is set to a wrong basic block");
5378 err2 = true;
5379 }
5380
5381 err2 |= verify_gimple_stmt (stmt);
5382 err2 |= verify_location (&blocks, gimple_location (stmt));
5383
5384 memset (&wi, 0, sizeof (wi));
5385 wi.info = (void *) &visited;
5386 addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
5387 if (addr)
5388 {
5389 error ("incorrect sharing of tree nodes");
5390 debug_generic_expr (addr);
5391 err2 |= true;
5392 }
5393
5394 memset (&wi, 0, sizeof (wi));
5395 wi.info = (void *) &blocks;
5396 addr = walk_gimple_op (stmt, verify_expr_location, &wi);
5397 if (addr)
5398 {
5399 debug_generic_expr (addr);
5400 err2 |= true;
5401 }
5402
5403 /* If the statement is marked as part of an EH region, then it is
5404 expected that the statement could throw. Verify that when we
5405 have optimizations that simplify statements such that we prove
5406 that they cannot throw, that we update other data structures
5407 to match. */
5408 lp_nr = lookup_stmt_eh_lp (stmt);
5409 if (lp_nr != 0)
5410 visited_throwing_stmts.add (stmt);
5411 if (lp_nr > 0)
5412 {
5413 if (!stmt_could_throw_p (cfun, stmt))
5414 {
5415 if (verify_nothrow)
5416 {
5417 error ("statement marked for throw, but doesn%'t");
5418 err2 |= true;
5419 }
5420 }
5421 else if (!gsi_one_before_end_p (gsi))
5422 {
5423 error ("statement marked for throw in middle of block");
5424 err2 |= true;
5425 }
5426 }
5427
5428 if (err2)
5429 debug_gimple_stmt (stmt);
5430 err |= err2;
5431 }
5432
5433 FOR_EACH_EDGE (e, ei, bb->succs)
5434 if (e->goto_locus != UNKNOWN_LOCATION)
5435 err |= verify_location (&blocks, e->goto_locus);
5436 }
5437
5438 hash_map<gimple *, int> *eh_table = get_eh_throw_stmt_table (cfun);
5439 eh_error_found = false;
5440 if (eh_table)
5441 eh_table->traverse<hash_set<gimple *> *, verify_eh_throw_stmt_node>
5442 (&visited_throwing_stmts);
5443
5444 if (err || eh_error_found)
5445 internal_error ("verify_gimple failed");
5446
5447 verify_histograms ();
5448 timevar_pop (TV_TREE_STMT_VERIFY);
5449 }
5450
5451
5452 /* Verifies that the flow information is OK. */
5453
5454 static int
5455 gimple_verify_flow_info (void)
5456 {
5457 int err = 0;
5458 basic_block bb;
5459 gimple_stmt_iterator gsi;
5460 gimple *stmt;
5461 edge e;
5462 edge_iterator ei;
5463
5464 if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5465 || ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5466 {
5467 error ("ENTRY_BLOCK has IL associated with it");
5468 err = 1;
5469 }
5470
5471 if (EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5472 || EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5473 {
5474 error ("EXIT_BLOCK has IL associated with it");
5475 err = 1;
5476 }
5477
5478 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5479 if (e->flags & EDGE_FALLTHRU)
5480 {
5481 error ("fallthru to exit from bb %d", e->src->index);
5482 err = 1;
5483 }
5484
5485 FOR_EACH_BB_FN (bb, cfun)
5486 {
5487 bool found_ctrl_stmt = false;
5488
5489 stmt = NULL;
5490
5491 /* Skip labels on the start of basic block. */
5492 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5493 {
5494 tree label;
5495 gimple *prev_stmt = stmt;
5496
5497 stmt = gsi_stmt (gsi);
5498
5499 if (gimple_code (stmt) != GIMPLE_LABEL)
5500 break;
5501
5502 label = gimple_label_label (as_a <glabel *> (stmt));
5503 if (prev_stmt && DECL_NONLOCAL (label))
5504 {
5505 error ("nonlocal label ");
5506 print_generic_expr (stderr, label);
5507 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5508 bb->index);
5509 err = 1;
5510 }
5511
5512 if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
5513 {
5514 error ("EH landing pad label ");
5515 print_generic_expr (stderr, label);
5516 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5517 bb->index);
5518 err = 1;
5519 }
5520
5521 if (label_to_block (cfun, label) != bb)
5522 {
5523 error ("label ");
5524 print_generic_expr (stderr, label);
5525 fprintf (stderr, " to block does not match in bb %d",
5526 bb->index);
5527 err = 1;
5528 }
5529
5530 if (decl_function_context (label) != current_function_decl)
5531 {
5532 error ("label ");
5533 print_generic_expr (stderr, label);
5534 fprintf (stderr, " has incorrect context in bb %d",
5535 bb->index);
5536 err = 1;
5537 }
5538 }
5539
5540 /* Verify that body of basic block BB is free of control flow. */
5541 for (; !gsi_end_p (gsi); gsi_next (&gsi))
5542 {
5543 gimple *stmt = gsi_stmt (gsi);
5544
5545 if (found_ctrl_stmt)
5546 {
5547 error ("control flow in the middle of basic block %d",
5548 bb->index);
5549 err = 1;
5550 }
5551
5552 if (stmt_ends_bb_p (stmt))
5553 found_ctrl_stmt = true;
5554
5555 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
5556 {
5557 error ("label ");
5558 print_generic_expr (stderr, gimple_label_label (label_stmt));
5559 fprintf (stderr, " in the middle of basic block %d", bb->index);
5560 err = 1;
5561 }
5562 }
5563
5564 gsi = gsi_last_nondebug_bb (bb);
5565 if (gsi_end_p (gsi))
5566 continue;
5567
5568 stmt = gsi_stmt (gsi);
5569
5570 if (gimple_code (stmt) == GIMPLE_LABEL)
5571 continue;
5572
5573 err |= verify_eh_edges (stmt);
5574
5575 if (is_ctrl_stmt (stmt))
5576 {
5577 FOR_EACH_EDGE (e, ei, bb->succs)
5578 if (e->flags & EDGE_FALLTHRU)
5579 {
5580 error ("fallthru edge after a control statement in bb %d",
5581 bb->index);
5582 err = 1;
5583 }
5584 }
5585
5586 if (gimple_code (stmt) != GIMPLE_COND)
5587 {
5588 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
5589 after anything else but if statement. */
5590 FOR_EACH_EDGE (e, ei, bb->succs)
5591 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
5592 {
5593 error ("true/false edge after a non-GIMPLE_COND in bb %d",
5594 bb->index);
5595 err = 1;
5596 }
5597 }
5598
5599 switch (gimple_code (stmt))
5600 {
5601 case GIMPLE_COND:
5602 {
5603 edge true_edge;
5604 edge false_edge;
5605
5606 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
5607
5608 if (!true_edge
5609 || !false_edge
5610 || !(true_edge->flags & EDGE_TRUE_VALUE)
5611 || !(false_edge->flags & EDGE_FALSE_VALUE)
5612 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5613 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5614 || EDGE_COUNT (bb->succs) >= 3)
5615 {
5616 error ("wrong outgoing edge flags at end of bb %d",
5617 bb->index);
5618 err = 1;
5619 }
5620 }
5621 break;
5622
5623 case GIMPLE_GOTO:
5624 if (simple_goto_p (stmt))
5625 {
5626 error ("explicit goto at end of bb %d", bb->index);
5627 err = 1;
5628 }
5629 else
5630 {
5631 /* FIXME. We should double check that the labels in the
5632 destination blocks have their address taken. */
5633 FOR_EACH_EDGE (e, ei, bb->succs)
5634 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
5635 | EDGE_FALSE_VALUE))
5636 || !(e->flags & EDGE_ABNORMAL))
5637 {
5638 error ("wrong outgoing edge flags at end of bb %d",
5639 bb->index);
5640 err = 1;
5641 }
5642 }
5643 break;
5644
5645 case GIMPLE_CALL:
5646 if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
5647 break;
5648 /* fallthru */
5649 case GIMPLE_RETURN:
5650 if (!single_succ_p (bb)
5651 || (single_succ_edge (bb)->flags
5652 & (EDGE_FALLTHRU | EDGE_ABNORMAL
5653 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5654 {
5655 error ("wrong outgoing edge flags at end of bb %d", bb->index);
5656 err = 1;
5657 }
5658 if (single_succ (bb) != EXIT_BLOCK_PTR_FOR_FN (cfun))
5659 {
5660 error ("return edge does not point to exit in bb %d",
5661 bb->index);
5662 err = 1;
5663 }
5664 break;
5665
5666 case GIMPLE_SWITCH:
5667 {
5668 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5669 tree prev;
5670 edge e;
5671 size_t i, n;
5672
5673 n = gimple_switch_num_labels (switch_stmt);
5674
5675 /* Mark all the destination basic blocks. */
5676 for (i = 0; i < n; ++i)
5677 {
5678 basic_block label_bb = gimple_switch_label_bb (cfun, switch_stmt, i);
5679 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
5680 label_bb->aux = (void *)1;
5681 }
5682
5683 /* Verify that the case labels are sorted. */
5684 prev = gimple_switch_label (switch_stmt, 0);
5685 for (i = 1; i < n; ++i)
5686 {
5687 tree c = gimple_switch_label (switch_stmt, i);
5688 if (!CASE_LOW (c))
5689 {
5690 error ("found default case not at the start of "
5691 "case vector");
5692 err = 1;
5693 continue;
5694 }
5695 if (CASE_LOW (prev)
5696 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
5697 {
5698 error ("case labels not sorted: ");
5699 print_generic_expr (stderr, prev);
5700 fprintf (stderr," is greater than ");
5701 print_generic_expr (stderr, c);
5702 fprintf (stderr," but comes before it.\n");
5703 err = 1;
5704 }
5705 prev = c;
5706 }
5707 /* VRP will remove the default case if it can prove it will
5708 never be executed. So do not verify there always exists
5709 a default case here. */
5710
5711 FOR_EACH_EDGE (e, ei, bb->succs)
5712 {
5713 if (!e->dest->aux)
5714 {
5715 error ("extra outgoing edge %d->%d",
5716 bb->index, e->dest->index);
5717 err = 1;
5718 }
5719
5720 e->dest->aux = (void *)2;
5721 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
5722 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5723 {
5724 error ("wrong outgoing edge flags at end of bb %d",
5725 bb->index);
5726 err = 1;
5727 }
5728 }
5729
5730 /* Check that we have all of them. */
5731 for (i = 0; i < n; ++i)
5732 {
5733 basic_block label_bb = gimple_switch_label_bb (cfun,
5734 switch_stmt, i);
5735
5736 if (label_bb->aux != (void *)2)
5737 {
5738 error ("missing edge %i->%i", bb->index, label_bb->index);
5739 err = 1;
5740 }
5741 }
5742
5743 FOR_EACH_EDGE (e, ei, bb->succs)
5744 e->dest->aux = (void *)0;
5745 }
5746 break;
5747
5748 case GIMPLE_EH_DISPATCH:
5749 err |= verify_eh_dispatch_edge (as_a <geh_dispatch *> (stmt));
5750 break;
5751
5752 default:
5753 break;
5754 }
5755 }
5756
5757 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
5758 verify_dominators (CDI_DOMINATORS);
5759
5760 return err;
5761 }
5762
5763 #if __GNUC__ >= 10
5764 # pragma GCC diagnostic pop
5765 #endif
5766
5767 /* Updates phi nodes after creating a forwarder block joined
5768 by edge FALLTHRU. */
5769
5770 static void
5771 gimple_make_forwarder_block (edge fallthru)
5772 {
5773 edge e;
5774 edge_iterator ei;
5775 basic_block dummy, bb;
5776 tree var;
5777 gphi_iterator gsi;
5778 bool forward_location_p;
5779
5780 dummy = fallthru->src;
5781 bb = fallthru->dest;
5782
5783 if (single_pred_p (bb))
5784 return;
5785
5786 /* We can forward location info if we have only one predecessor. */
5787 forward_location_p = single_pred_p (dummy);
5788
5789 /* If we redirected a branch we must create new PHI nodes at the
5790 start of BB. */
5791 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
5792 {
5793 gphi *phi, *new_phi;
5794
5795 phi = gsi.phi ();
5796 var = gimple_phi_result (phi);
5797 new_phi = create_phi_node (var, bb);
5798 gimple_phi_set_result (phi, copy_ssa_name (var, phi));
5799 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
5800 forward_location_p
5801 ? gimple_phi_arg_location (phi, 0) : UNKNOWN_LOCATION);
5802 }
5803
5804 /* Add the arguments we have stored on edges. */
5805 FOR_EACH_EDGE (e, ei, bb->preds)
5806 {
5807 if (e == fallthru)
5808 continue;
5809
5810 flush_pending_stmts (e);
5811 }
5812 }
5813
5814
5815 /* Return a non-special label in the head of basic block BLOCK.
5816 Create one if it doesn't exist. */
5817
5818 tree
5819 gimple_block_label (basic_block bb)
5820 {
5821 gimple_stmt_iterator i, s = gsi_start_bb (bb);
5822 bool first = true;
5823 tree label;
5824 glabel *stmt;
5825
5826 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
5827 {
5828 stmt = dyn_cast <glabel *> (gsi_stmt (i));
5829 if (!stmt)
5830 break;
5831 label = gimple_label_label (stmt);
5832 if (!DECL_NONLOCAL (label))
5833 {
5834 if (!first)
5835 gsi_move_before (&i, &s);
5836 return label;
5837 }
5838 }
5839
5840 label = create_artificial_label (UNKNOWN_LOCATION);
5841 stmt = gimple_build_label (label);
5842 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
5843 return label;
5844 }
5845
5846
5847 /* Attempt to perform edge redirection by replacing a possibly complex
5848 jump instruction by a goto or by removing the jump completely.
5849 This can apply only if all edges now point to the same block. The
5850 parameters and return values are equivalent to
5851 redirect_edge_and_branch. */
5852
5853 static edge
5854 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
5855 {
5856 basic_block src = e->src;
5857 gimple_stmt_iterator i;
5858 gimple *stmt;
5859
5860 /* We can replace or remove a complex jump only when we have exactly
5861 two edges. */
5862 if (EDGE_COUNT (src->succs) != 2
5863 /* Verify that all targets will be TARGET. Specifically, the
5864 edge that is not E must also go to TARGET. */
5865 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
5866 return NULL;
5867
5868 i = gsi_last_bb (src);
5869 if (gsi_end_p (i))
5870 return NULL;
5871
5872 stmt = gsi_stmt (i);
5873
5874 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
5875 {
5876 gsi_remove (&i, true);
5877 e = ssa_redirect_edge (e, target);
5878 e->flags = EDGE_FALLTHRU;
5879 return e;
5880 }
5881
5882 return NULL;
5883 }
5884
5885
5886 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
5887 edge representing the redirected branch. */
5888
5889 static edge
5890 gimple_redirect_edge_and_branch (edge e, basic_block dest)
5891 {
5892 basic_block bb = e->src;
5893 gimple_stmt_iterator gsi;
5894 edge ret;
5895 gimple *stmt;
5896
5897 if (e->flags & EDGE_ABNORMAL)
5898 return NULL;
5899
5900 if (e->dest == dest)
5901 return NULL;
5902
5903 if (e->flags & EDGE_EH)
5904 return redirect_eh_edge (e, dest);
5905
5906 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
5907 {
5908 ret = gimple_try_redirect_by_replacing_jump (e, dest);
5909 if (ret)
5910 return ret;
5911 }
5912
5913 gsi = gsi_last_nondebug_bb (bb);
5914 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
5915
5916 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
5917 {
5918 case GIMPLE_COND:
5919 /* For COND_EXPR, we only need to redirect the edge. */
5920 break;
5921
5922 case GIMPLE_GOTO:
5923 /* No non-abnormal edges should lead from a non-simple goto, and
5924 simple ones should be represented implicitly. */
5925 gcc_unreachable ();
5926
5927 case GIMPLE_SWITCH:
5928 {
5929 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5930 tree label = gimple_block_label (dest);
5931 tree cases = get_cases_for_edge (e, switch_stmt);
5932
5933 /* If we have a list of cases associated with E, then use it
5934 as it's a lot faster than walking the entire case vector. */
5935 if (cases)
5936 {
5937 edge e2 = find_edge (e->src, dest);
5938 tree last, first;
5939
5940 first = cases;
5941 while (cases)
5942 {
5943 last = cases;
5944 CASE_LABEL (cases) = label;
5945 cases = CASE_CHAIN (cases);
5946 }
5947
5948 /* If there was already an edge in the CFG, then we need
5949 to move all the cases associated with E to E2. */
5950 if (e2)
5951 {
5952 tree cases2 = get_cases_for_edge (e2, switch_stmt);
5953
5954 CASE_CHAIN (last) = CASE_CHAIN (cases2);
5955 CASE_CHAIN (cases2) = first;
5956 }
5957 bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
5958 }
5959 else
5960 {
5961 size_t i, n = gimple_switch_num_labels (switch_stmt);
5962
5963 for (i = 0; i < n; i++)
5964 {
5965 tree elt = gimple_switch_label (switch_stmt, i);
5966 if (label_to_block (cfun, CASE_LABEL (elt)) == e->dest)
5967 CASE_LABEL (elt) = label;
5968 }
5969 }
5970 }
5971 break;
5972
5973 case GIMPLE_ASM:
5974 {
5975 gasm *asm_stmt = as_a <gasm *> (stmt);
5976 int i, n = gimple_asm_nlabels (asm_stmt);
5977 tree label = NULL;
5978
5979 for (i = 0; i < n; ++i)
5980 {
5981 tree cons = gimple_asm_label_op (asm_stmt, i);
5982 if (label_to_block (cfun, TREE_VALUE (cons)) == e->dest)
5983 {
5984 if (!label)
5985 label = gimple_block_label (dest);
5986 TREE_VALUE (cons) = label;
5987 }
5988 }
5989
5990 /* If we didn't find any label matching the former edge in the
5991 asm labels, we must be redirecting the fallthrough
5992 edge. */
5993 gcc_assert (label || (e->flags & EDGE_FALLTHRU));
5994 }
5995 break;
5996
5997 case GIMPLE_RETURN:
5998 gsi_remove (&gsi, true);
5999 e->flags |= EDGE_FALLTHRU;
6000 break;
6001
6002 case GIMPLE_OMP_RETURN:
6003 case GIMPLE_OMP_CONTINUE:
6004 case GIMPLE_OMP_SECTIONS_SWITCH:
6005 case GIMPLE_OMP_FOR:
6006 /* The edges from OMP constructs can be simply redirected. */
6007 break;
6008
6009 case GIMPLE_EH_DISPATCH:
6010 if (!(e->flags & EDGE_FALLTHRU))
6011 redirect_eh_dispatch_edge (as_a <geh_dispatch *> (stmt), e, dest);
6012 break;
6013
6014 case GIMPLE_TRANSACTION:
6015 if (e->flags & EDGE_TM_ABORT)
6016 gimple_transaction_set_label_over (as_a <gtransaction *> (stmt),
6017 gimple_block_label (dest));
6018 else if (e->flags & EDGE_TM_UNINSTRUMENTED)
6019 gimple_transaction_set_label_uninst (as_a <gtransaction *> (stmt),
6020 gimple_block_label (dest));
6021 else
6022 gimple_transaction_set_label_norm (as_a <gtransaction *> (stmt),
6023 gimple_block_label (dest));
6024 break;
6025
6026 default:
6027 /* Otherwise it must be a fallthru edge, and we don't need to
6028 do anything besides redirecting it. */
6029 gcc_assert (e->flags & EDGE_FALLTHRU);
6030 break;
6031 }
6032
6033 /* Update/insert PHI nodes as necessary. */
6034
6035 /* Now update the edges in the CFG. */
6036 e = ssa_redirect_edge (e, dest);
6037
6038 return e;
6039 }
6040
6041 /* Returns true if it is possible to remove edge E by redirecting
6042 it to the destination of the other edge from E->src. */
6043
6044 static bool
6045 gimple_can_remove_branch_p (const_edge e)
6046 {
6047 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
6048 return false;
6049
6050 return true;
6051 }
6052
6053 /* Simple wrapper, as we can always redirect fallthru edges. */
6054
6055 static basic_block
6056 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
6057 {
6058 e = gimple_redirect_edge_and_branch (e, dest);
6059 gcc_assert (e);
6060
6061 return NULL;
6062 }
6063
6064
6065 /* Splits basic block BB after statement STMT (but at least after the
6066 labels). If STMT is NULL, BB is split just after the labels. */
6067
6068 static basic_block
6069 gimple_split_block (basic_block bb, void *stmt)
6070 {
6071 gimple_stmt_iterator gsi;
6072 gimple_stmt_iterator gsi_tgt;
6073 gimple_seq list;
6074 basic_block new_bb;
6075 edge e;
6076 edge_iterator ei;
6077
6078 new_bb = create_empty_bb (bb);
6079
6080 /* Redirect the outgoing edges. */
6081 new_bb->succs = bb->succs;
6082 bb->succs = NULL;
6083 FOR_EACH_EDGE (e, ei, new_bb->succs)
6084 e->src = new_bb;
6085
6086 /* Get a stmt iterator pointing to the first stmt to move. */
6087 if (!stmt || gimple_code ((gimple *) stmt) == GIMPLE_LABEL)
6088 gsi = gsi_after_labels (bb);
6089 else
6090 {
6091 gsi = gsi_for_stmt ((gimple *) stmt);
6092 gsi_next (&gsi);
6093 }
6094
6095 /* Move everything from GSI to the new basic block. */
6096 if (gsi_end_p (gsi))
6097 return new_bb;
6098
6099 /* Split the statement list - avoid re-creating new containers as this
6100 brings ugly quadratic memory consumption in the inliner.
6101 (We are still quadratic since we need to update stmt BB pointers,
6102 sadly.) */
6103 gsi_split_seq_before (&gsi, &list);
6104 set_bb_seq (new_bb, list);
6105 for (gsi_tgt = gsi_start (list);
6106 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
6107 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
6108
6109 return new_bb;
6110 }
6111
6112
6113 /* Moves basic block BB after block AFTER. */
6114
6115 static bool
6116 gimple_move_block_after (basic_block bb, basic_block after)
6117 {
6118 if (bb->prev_bb == after)
6119 return true;
6120
6121 unlink_block (bb);
6122 link_block (bb, after);
6123
6124 return true;
6125 }
6126
6127
6128 /* Return TRUE if block BB has no executable statements, otherwise return
6129 FALSE. */
6130
6131 static bool
6132 gimple_empty_block_p (basic_block bb)
6133 {
6134 /* BB must have no executable statements. */
6135 gimple_stmt_iterator gsi = gsi_after_labels (bb);
6136 if (phi_nodes (bb))
6137 return false;
6138 while (!gsi_end_p (gsi))
6139 {
6140 gimple *stmt = gsi_stmt (gsi);
6141 if (is_gimple_debug (stmt))
6142 ;
6143 else if (gimple_code (stmt) == GIMPLE_NOP
6144 || gimple_code (stmt) == GIMPLE_PREDICT)
6145 ;
6146 else
6147 return false;
6148 gsi_next (&gsi);
6149 }
6150 return true;
6151 }
6152
6153
6154 /* Split a basic block if it ends with a conditional branch and if the
6155 other part of the block is not empty. */
6156
6157 static basic_block
6158 gimple_split_block_before_cond_jump (basic_block bb)
6159 {
6160 gimple *last, *split_point;
6161 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
6162 if (gsi_end_p (gsi))
6163 return NULL;
6164 last = gsi_stmt (gsi);
6165 if (gimple_code (last) != GIMPLE_COND
6166 && gimple_code (last) != GIMPLE_SWITCH)
6167 return NULL;
6168 gsi_prev (&gsi);
6169 split_point = gsi_stmt (gsi);
6170 return split_block (bb, split_point)->dest;
6171 }
6172
6173
6174 /* Return true if basic_block can be duplicated. */
6175
6176 static bool
6177 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
6178 {
6179 return true;
6180 }
6181
6182 /* Create a duplicate of the basic block BB. NOTE: This does not
6183 preserve SSA form. */
6184
6185 static basic_block
6186 gimple_duplicate_bb (basic_block bb, copy_bb_data *id)
6187 {
6188 basic_block new_bb;
6189 gimple_stmt_iterator gsi_tgt;
6190
6191 new_bb = create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
6192
6193 /* Copy the PHI nodes. We ignore PHI node arguments here because
6194 the incoming edges have not been setup yet. */
6195 for (gphi_iterator gpi = gsi_start_phis (bb);
6196 !gsi_end_p (gpi);
6197 gsi_next (&gpi))
6198 {
6199 gphi *phi, *copy;
6200 phi = gpi.phi ();
6201 copy = create_phi_node (NULL_TREE, new_bb);
6202 create_new_def_for (gimple_phi_result (phi), copy,
6203 gimple_phi_result_ptr (copy));
6204 gimple_set_uid (copy, gimple_uid (phi));
6205 }
6206
6207 gsi_tgt = gsi_start_bb (new_bb);
6208 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
6209 !gsi_end_p (gsi);
6210 gsi_next (&gsi))
6211 {
6212 def_operand_p def_p;
6213 ssa_op_iter op_iter;
6214 tree lhs;
6215 gimple *stmt, *copy;
6216
6217 stmt = gsi_stmt (gsi);
6218 if (gimple_code (stmt) == GIMPLE_LABEL)
6219 continue;
6220
6221 /* Don't duplicate label debug stmts. */
6222 if (gimple_debug_bind_p (stmt)
6223 && TREE_CODE (gimple_debug_bind_get_var (stmt))
6224 == LABEL_DECL)
6225 continue;
6226
6227 /* Create a new copy of STMT and duplicate STMT's virtual
6228 operands. */
6229 copy = gimple_copy (stmt);
6230 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
6231
6232 maybe_duplicate_eh_stmt (copy, stmt);
6233 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
6234
6235 /* When copying around a stmt writing into a local non-user
6236 aggregate, make sure it won't share stack slot with other
6237 vars. */
6238 lhs = gimple_get_lhs (stmt);
6239 if (lhs && TREE_CODE (lhs) != SSA_NAME)
6240 {
6241 tree base = get_base_address (lhs);
6242 if (base
6243 && (VAR_P (base) || TREE_CODE (base) == RESULT_DECL)
6244 && DECL_IGNORED_P (base)
6245 && !TREE_STATIC (base)
6246 && !DECL_EXTERNAL (base)
6247 && (!VAR_P (base) || !DECL_HAS_VALUE_EXPR_P (base)))
6248 DECL_NONSHAREABLE (base) = 1;
6249 }
6250
6251 /* If requested remap dependence info of cliques brought in
6252 via inlining. */
6253 if (id)
6254 for (unsigned i = 0; i < gimple_num_ops (copy); ++i)
6255 {
6256 tree op = gimple_op (copy, i);
6257 if (!op)
6258 continue;
6259 if (TREE_CODE (op) == ADDR_EXPR
6260 || TREE_CODE (op) == WITH_SIZE_EXPR)
6261 op = TREE_OPERAND (op, 0);
6262 while (handled_component_p (op))
6263 op = TREE_OPERAND (op, 0);
6264 if ((TREE_CODE (op) == MEM_REF
6265 || TREE_CODE (op) == TARGET_MEM_REF)
6266 && MR_DEPENDENCE_CLIQUE (op) > 1
6267 && MR_DEPENDENCE_CLIQUE (op) != bb->loop_father->owned_clique)
6268 {
6269 if (!id->dependence_map)
6270 id->dependence_map = new hash_map<dependence_hash,
6271 unsigned short>;
6272 bool existed;
6273 unsigned short &newc = id->dependence_map->get_or_insert
6274 (MR_DEPENDENCE_CLIQUE (op), &existed);
6275 if (!existed)
6276 {
6277 gcc_assert (MR_DEPENDENCE_CLIQUE (op) <= cfun->last_clique);
6278 newc = ++cfun->last_clique;
6279 }
6280 MR_DEPENDENCE_CLIQUE (op) = newc;
6281 }
6282 }
6283
6284 /* Create new names for all the definitions created by COPY and
6285 add replacement mappings for each new name. */
6286 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
6287 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
6288 }
6289
6290 return new_bb;
6291 }
6292
6293 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
6294
6295 static void
6296 add_phi_args_after_copy_edge (edge e_copy)
6297 {
6298 basic_block bb, bb_copy = e_copy->src, dest;
6299 edge e;
6300 edge_iterator ei;
6301 gphi *phi, *phi_copy;
6302 tree def;
6303 gphi_iterator psi, psi_copy;
6304
6305 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
6306 return;
6307
6308 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
6309
6310 if (e_copy->dest->flags & BB_DUPLICATED)
6311 dest = get_bb_original (e_copy->dest);
6312 else
6313 dest = e_copy->dest;
6314
6315 e = find_edge (bb, dest);
6316 if (!e)
6317 {
6318 /* During loop unrolling the target of the latch edge is copied.
6319 In this case we are not looking for edge to dest, but to
6320 duplicated block whose original was dest. */
6321 FOR_EACH_EDGE (e, ei, bb->succs)
6322 {
6323 if ((e->dest->flags & BB_DUPLICATED)
6324 && get_bb_original (e->dest) == dest)
6325 break;
6326 }
6327
6328 gcc_assert (e != NULL);
6329 }
6330
6331 for (psi = gsi_start_phis (e->dest),
6332 psi_copy = gsi_start_phis (e_copy->dest);
6333 !gsi_end_p (psi);
6334 gsi_next (&psi), gsi_next (&psi_copy))
6335 {
6336 phi = psi.phi ();
6337 phi_copy = psi_copy.phi ();
6338 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
6339 add_phi_arg (phi_copy, def, e_copy,
6340 gimple_phi_arg_location_from_edge (phi, e));
6341 }
6342 }
6343
6344
6345 /* Basic block BB_COPY was created by code duplication. Add phi node
6346 arguments for edges going out of BB_COPY. The blocks that were
6347 duplicated have BB_DUPLICATED set. */
6348
6349 void
6350 add_phi_args_after_copy_bb (basic_block bb_copy)
6351 {
6352 edge e_copy;
6353 edge_iterator ei;
6354
6355 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
6356 {
6357 add_phi_args_after_copy_edge (e_copy);
6358 }
6359 }
6360
6361 /* Blocks in REGION_COPY array of length N_REGION were created by
6362 duplication of basic blocks. Add phi node arguments for edges
6363 going from these blocks. If E_COPY is not NULL, also add
6364 phi node arguments for its destination.*/
6365
6366 void
6367 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
6368 edge e_copy)
6369 {
6370 unsigned i;
6371
6372 for (i = 0; i < n_region; i++)
6373 region_copy[i]->flags |= BB_DUPLICATED;
6374
6375 for (i = 0; i < n_region; i++)
6376 add_phi_args_after_copy_bb (region_copy[i]);
6377 if (e_copy)
6378 add_phi_args_after_copy_edge (e_copy);
6379
6380 for (i = 0; i < n_region; i++)
6381 region_copy[i]->flags &= ~BB_DUPLICATED;
6382 }
6383
6384 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
6385 important exit edge EXIT. By important we mean that no SSA name defined
6386 inside region is live over the other exit edges of the region. All entry
6387 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
6388 to the duplicate of the region. Dominance and loop information is
6389 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
6390 UPDATE_DOMINANCE is false then we assume that the caller will update the
6391 dominance information after calling this function. The new basic
6392 blocks are stored to REGION_COPY in the same order as they had in REGION,
6393 provided that REGION_COPY is not NULL.
6394 The function returns false if it is unable to copy the region,
6395 true otherwise. */
6396
6397 bool
6398 gimple_duplicate_sese_region (edge entry, edge exit,
6399 basic_block *region, unsigned n_region,
6400 basic_block *region_copy,
6401 bool update_dominance)
6402 {
6403 unsigned i;
6404 bool free_region_copy = false, copying_header = false;
6405 class loop *loop = entry->dest->loop_father;
6406 edge exit_copy;
6407 vec<basic_block> doms = vNULL;
6408 edge redirected;
6409 profile_count total_count = profile_count::uninitialized ();
6410 profile_count entry_count = profile_count::uninitialized ();
6411
6412 if (!can_copy_bbs_p (region, n_region))
6413 return false;
6414
6415 /* Some sanity checking. Note that we do not check for all possible
6416 missuses of the functions. I.e. if you ask to copy something weird,
6417 it will work, but the state of structures probably will not be
6418 correct. */
6419 for (i = 0; i < n_region; i++)
6420 {
6421 /* We do not handle subloops, i.e. all the blocks must belong to the
6422 same loop. */
6423 if (region[i]->loop_father != loop)
6424 return false;
6425
6426 if (region[i] != entry->dest
6427 && region[i] == loop->header)
6428 return false;
6429 }
6430
6431 /* In case the function is used for loop header copying (which is the primary
6432 use), ensure that EXIT and its copy will be new latch and entry edges. */
6433 if (loop->header == entry->dest)
6434 {
6435 copying_header = true;
6436
6437 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
6438 return false;
6439
6440 for (i = 0; i < n_region; i++)
6441 if (region[i] != exit->src
6442 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
6443 return false;
6444 }
6445
6446 initialize_original_copy_tables ();
6447
6448 if (copying_header)
6449 set_loop_copy (loop, loop_outer (loop));
6450 else
6451 set_loop_copy (loop, loop);
6452
6453 if (!region_copy)
6454 {
6455 region_copy = XNEWVEC (basic_block, n_region);
6456 free_region_copy = true;
6457 }
6458
6459 /* Record blocks outside the region that are dominated by something
6460 inside. */
6461 if (update_dominance)
6462 {
6463 doms.create (0);
6464 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6465 }
6466
6467 if (entry->dest->count.initialized_p ())
6468 {
6469 total_count = entry->dest->count;
6470 entry_count = entry->count ();
6471 /* Fix up corner cases, to avoid division by zero or creation of negative
6472 frequencies. */
6473 if (entry_count > total_count)
6474 entry_count = total_count;
6475 }
6476
6477 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
6478 split_edge_bb_loc (entry), update_dominance);
6479 if (total_count.initialized_p () && entry_count.initialized_p ())
6480 {
6481 scale_bbs_frequencies_profile_count (region, n_region,
6482 total_count - entry_count,
6483 total_count);
6484 scale_bbs_frequencies_profile_count (region_copy, n_region, entry_count,
6485 total_count);
6486 }
6487
6488 if (copying_header)
6489 {
6490 loop->header = exit->dest;
6491 loop->latch = exit->src;
6492 }
6493
6494 /* Redirect the entry and add the phi node arguments. */
6495 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
6496 gcc_assert (redirected != NULL);
6497 flush_pending_stmts (entry);
6498
6499 /* Concerning updating of dominators: We must recount dominators
6500 for entry block and its copy. Anything that is outside of the
6501 region, but was dominated by something inside needs recounting as
6502 well. */
6503 if (update_dominance)
6504 {
6505 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
6506 doms.safe_push (get_bb_original (entry->dest));
6507 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6508 doms.release ();
6509 }
6510
6511 /* Add the other PHI node arguments. */
6512 add_phi_args_after_copy (region_copy, n_region, NULL);
6513
6514 if (free_region_copy)
6515 free (region_copy);
6516
6517 free_original_copy_tables ();
6518 return true;
6519 }
6520
6521 /* Checks if BB is part of the region defined by N_REGION BBS. */
6522 static bool
6523 bb_part_of_region_p (basic_block bb, basic_block* bbs, unsigned n_region)
6524 {
6525 unsigned int n;
6526
6527 for (n = 0; n < n_region; n++)
6528 {
6529 if (bb == bbs[n])
6530 return true;
6531 }
6532 return false;
6533 }
6534
6535 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
6536 are stored to REGION_COPY in the same order in that they appear
6537 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
6538 the region, EXIT an exit from it. The condition guarding EXIT
6539 is moved to ENTRY. Returns true if duplication succeeds, false
6540 otherwise.
6541
6542 For example,
6543
6544 some_code;
6545 if (cond)
6546 A;
6547 else
6548 B;
6549
6550 is transformed to
6551
6552 if (cond)
6553 {
6554 some_code;
6555 A;
6556 }
6557 else
6558 {
6559 some_code;
6560 B;
6561 }
6562 */
6563
6564 bool
6565 gimple_duplicate_sese_tail (edge entry, edge exit,
6566 basic_block *region, unsigned n_region,
6567 basic_block *region_copy)
6568 {
6569 unsigned i;
6570 bool free_region_copy = false;
6571 class loop *loop = exit->dest->loop_father;
6572 class loop *orig_loop = entry->dest->loop_father;
6573 basic_block switch_bb, entry_bb, nentry_bb;
6574 vec<basic_block> doms;
6575 profile_count total_count = profile_count::uninitialized (),
6576 exit_count = profile_count::uninitialized ();
6577 edge exits[2], nexits[2], e;
6578 gimple_stmt_iterator gsi;
6579 gimple *cond_stmt;
6580 edge sorig, snew;
6581 basic_block exit_bb;
6582 gphi_iterator psi;
6583 gphi *phi;
6584 tree def;
6585 class loop *target, *aloop, *cloop;
6586
6587 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
6588 exits[0] = exit;
6589 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
6590
6591 if (!can_copy_bbs_p (region, n_region))
6592 return false;
6593
6594 initialize_original_copy_tables ();
6595 set_loop_copy (orig_loop, loop);
6596
6597 target= loop;
6598 for (aloop = orig_loop->inner; aloop; aloop = aloop->next)
6599 {
6600 if (bb_part_of_region_p (aloop->header, region, n_region))
6601 {
6602 cloop = duplicate_loop (aloop, target);
6603 duplicate_subloops (aloop, cloop);
6604 }
6605 }
6606
6607 if (!region_copy)
6608 {
6609 region_copy = XNEWVEC (basic_block, n_region);
6610 free_region_copy = true;
6611 }
6612
6613 gcc_assert (!need_ssa_update_p (cfun));
6614
6615 /* Record blocks outside the region that are dominated by something
6616 inside. */
6617 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6618
6619 total_count = exit->src->count;
6620 exit_count = exit->count ();
6621 /* Fix up corner cases, to avoid division by zero or creation of negative
6622 frequencies. */
6623 if (exit_count > total_count)
6624 exit_count = total_count;
6625
6626 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
6627 split_edge_bb_loc (exit), true);
6628 if (total_count.initialized_p () && exit_count.initialized_p ())
6629 {
6630 scale_bbs_frequencies_profile_count (region, n_region,
6631 total_count - exit_count,
6632 total_count);
6633 scale_bbs_frequencies_profile_count (region_copy, n_region, exit_count,
6634 total_count);
6635 }
6636
6637 /* Create the switch block, and put the exit condition to it. */
6638 entry_bb = entry->dest;
6639 nentry_bb = get_bb_copy (entry_bb);
6640 if (!last_stmt (entry->src)
6641 || !stmt_ends_bb_p (last_stmt (entry->src)))
6642 switch_bb = entry->src;
6643 else
6644 switch_bb = split_edge (entry);
6645 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
6646
6647 gsi = gsi_last_bb (switch_bb);
6648 cond_stmt = last_stmt (exit->src);
6649 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
6650 cond_stmt = gimple_copy (cond_stmt);
6651
6652 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
6653
6654 sorig = single_succ_edge (switch_bb);
6655 sorig->flags = exits[1]->flags;
6656 sorig->probability = exits[1]->probability;
6657 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
6658 snew->probability = exits[0]->probability;
6659
6660
6661 /* Register the new edge from SWITCH_BB in loop exit lists. */
6662 rescan_loop_exit (snew, true, false);
6663
6664 /* Add the PHI node arguments. */
6665 add_phi_args_after_copy (region_copy, n_region, snew);
6666
6667 /* Get rid of now superfluous conditions and associated edges (and phi node
6668 arguments). */
6669 exit_bb = exit->dest;
6670
6671 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
6672 PENDING_STMT (e) = NULL;
6673
6674 /* The latch of ORIG_LOOP was copied, and so was the backedge
6675 to the original header. We redirect this backedge to EXIT_BB. */
6676 for (i = 0; i < n_region; i++)
6677 if (get_bb_original (region_copy[i]) == orig_loop->latch)
6678 {
6679 gcc_assert (single_succ_edge (region_copy[i]));
6680 e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
6681 PENDING_STMT (e) = NULL;
6682 for (psi = gsi_start_phis (exit_bb);
6683 !gsi_end_p (psi);
6684 gsi_next (&psi))
6685 {
6686 phi = psi.phi ();
6687 def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
6688 add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
6689 }
6690 }
6691 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
6692 PENDING_STMT (e) = NULL;
6693
6694 /* Anything that is outside of the region, but was dominated by something
6695 inside needs to update dominance info. */
6696 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6697 doms.release ();
6698 /* Update the SSA web. */
6699 update_ssa (TODO_update_ssa);
6700
6701 if (free_region_copy)
6702 free (region_copy);
6703
6704 free_original_copy_tables ();
6705 return true;
6706 }
6707
6708 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
6709 adding blocks when the dominator traversal reaches EXIT. This
6710 function silently assumes that ENTRY strictly dominates EXIT. */
6711
6712 void
6713 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
6714 vec<basic_block> *bbs_p)
6715 {
6716 basic_block son;
6717
6718 for (son = first_dom_son (CDI_DOMINATORS, entry);
6719 son;
6720 son = next_dom_son (CDI_DOMINATORS, son))
6721 {
6722 bbs_p->safe_push (son);
6723 if (son != exit)
6724 gather_blocks_in_sese_region (son, exit, bbs_p);
6725 }
6726 }
6727
6728 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
6729 The duplicates are recorded in VARS_MAP. */
6730
6731 static void
6732 replace_by_duplicate_decl (tree *tp, hash_map<tree, tree> *vars_map,
6733 tree to_context)
6734 {
6735 tree t = *tp, new_t;
6736 struct function *f = DECL_STRUCT_FUNCTION (to_context);
6737
6738 if (DECL_CONTEXT (t) == to_context)
6739 return;
6740
6741 bool existed;
6742 tree &loc = vars_map->get_or_insert (t, &existed);
6743
6744 if (!existed)
6745 {
6746 if (SSA_VAR_P (t))
6747 {
6748 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
6749 add_local_decl (f, new_t);
6750 }
6751 else
6752 {
6753 gcc_assert (TREE_CODE (t) == CONST_DECL);
6754 new_t = copy_node (t);
6755 }
6756 DECL_CONTEXT (new_t) = to_context;
6757
6758 loc = new_t;
6759 }
6760 else
6761 new_t = loc;
6762
6763 *tp = new_t;
6764 }
6765
6766
6767 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6768 VARS_MAP maps old ssa names and var_decls to the new ones. */
6769
6770 static tree
6771 replace_ssa_name (tree name, hash_map<tree, tree> *vars_map,
6772 tree to_context)
6773 {
6774 tree new_name;
6775
6776 gcc_assert (!virtual_operand_p (name));
6777
6778 tree *loc = vars_map->get (name);
6779
6780 if (!loc)
6781 {
6782 tree decl = SSA_NAME_VAR (name);
6783 if (decl)
6784 {
6785 gcc_assert (!SSA_NAME_IS_DEFAULT_DEF (name));
6786 replace_by_duplicate_decl (&decl, vars_map, to_context);
6787 new_name = make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6788 decl, SSA_NAME_DEF_STMT (name));
6789 }
6790 else
6791 new_name = copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6792 name, SSA_NAME_DEF_STMT (name));
6793
6794 /* Now that we've used the def stmt to define new_name, make sure it
6795 doesn't define name anymore. */
6796 SSA_NAME_DEF_STMT (name) = NULL;
6797
6798 vars_map->put (name, new_name);
6799 }
6800 else
6801 new_name = *loc;
6802
6803 return new_name;
6804 }
6805
6806 struct move_stmt_d
6807 {
6808 tree orig_block;
6809 tree new_block;
6810 tree from_context;
6811 tree to_context;
6812 hash_map<tree, tree> *vars_map;
6813 htab_t new_label_map;
6814 hash_map<void *, void *> *eh_map;
6815 bool remap_decls_p;
6816 };
6817
6818 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
6819 contained in *TP if it has been ORIG_BLOCK previously and change the
6820 DECL_CONTEXT of every local variable referenced in *TP. */
6821
6822 static tree
6823 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
6824 {
6825 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
6826 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6827 tree t = *tp;
6828
6829 if (EXPR_P (t))
6830 {
6831 tree block = TREE_BLOCK (t);
6832 if (block == NULL_TREE)
6833 ;
6834 else if (block == p->orig_block
6835 || p->orig_block == NULL_TREE)
6836 {
6837 /* tree_node_can_be_shared says we can share invariant
6838 addresses but unshare_expr copies them anyways. Make sure
6839 to unshare before adjusting the block in place - we do not
6840 always see a copy here. */
6841 if (TREE_CODE (t) == ADDR_EXPR
6842 && is_gimple_min_invariant (t))
6843 *tp = t = unshare_expr (t);
6844 TREE_SET_BLOCK (t, p->new_block);
6845 }
6846 else if (flag_checking)
6847 {
6848 while (block && TREE_CODE (block) == BLOCK && block != p->orig_block)
6849 block = BLOCK_SUPERCONTEXT (block);
6850 gcc_assert (block == p->orig_block);
6851 }
6852 }
6853 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
6854 {
6855 if (TREE_CODE (t) == SSA_NAME)
6856 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
6857 else if (TREE_CODE (t) == PARM_DECL
6858 && gimple_in_ssa_p (cfun))
6859 *tp = *(p->vars_map->get (t));
6860 else if (TREE_CODE (t) == LABEL_DECL)
6861 {
6862 if (p->new_label_map)
6863 {
6864 struct tree_map in, *out;
6865 in.base.from = t;
6866 out = (struct tree_map *)
6867 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
6868 if (out)
6869 *tp = t = out->to;
6870 }
6871
6872 /* For FORCED_LABELs we can end up with references from other
6873 functions if some SESE regions are outlined. It is UB to
6874 jump in between them, but they could be used just for printing
6875 addresses etc. In that case, DECL_CONTEXT on the label should
6876 be the function containing the glabel stmt with that LABEL_DECL,
6877 rather than whatever function a reference to the label was seen
6878 last time. */
6879 if (!FORCED_LABEL (t) && !DECL_NONLOCAL (t))
6880 DECL_CONTEXT (t) = p->to_context;
6881 }
6882 else if (p->remap_decls_p)
6883 {
6884 /* Replace T with its duplicate. T should no longer appear in the
6885 parent function, so this looks wasteful; however, it may appear
6886 in referenced_vars, and more importantly, as virtual operands of
6887 statements, and in alias lists of other variables. It would be
6888 quite difficult to expunge it from all those places. ??? It might
6889 suffice to do this for addressable variables. */
6890 if ((VAR_P (t) && !is_global_var (t))
6891 || TREE_CODE (t) == CONST_DECL)
6892 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
6893 }
6894 *walk_subtrees = 0;
6895 }
6896 else if (TYPE_P (t))
6897 *walk_subtrees = 0;
6898
6899 return NULL_TREE;
6900 }
6901
6902 /* Helper for move_stmt_r. Given an EH region number for the source
6903 function, map that to the duplicate EH regio number in the dest. */
6904
6905 static int
6906 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
6907 {
6908 eh_region old_r, new_r;
6909
6910 old_r = get_eh_region_from_number (old_nr);
6911 new_r = static_cast<eh_region> (*p->eh_map->get (old_r));
6912
6913 return new_r->index;
6914 }
6915
6916 /* Similar, but operate on INTEGER_CSTs. */
6917
6918 static tree
6919 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
6920 {
6921 int old_nr, new_nr;
6922
6923 old_nr = tree_to_shwi (old_t_nr);
6924 new_nr = move_stmt_eh_region_nr (old_nr, p);
6925
6926 return build_int_cst (integer_type_node, new_nr);
6927 }
6928
6929 /* Like move_stmt_op, but for gimple statements.
6930
6931 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
6932 contained in the current statement in *GSI_P and change the
6933 DECL_CONTEXT of every local variable referenced in the current
6934 statement. */
6935
6936 static tree
6937 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
6938 struct walk_stmt_info *wi)
6939 {
6940 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6941 gimple *stmt = gsi_stmt (*gsi_p);
6942 tree block = gimple_block (stmt);
6943
6944 if (block == p->orig_block
6945 || (p->orig_block == NULL_TREE
6946 && block != NULL_TREE))
6947 gimple_set_block (stmt, p->new_block);
6948
6949 switch (gimple_code (stmt))
6950 {
6951 case GIMPLE_CALL:
6952 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
6953 {
6954 tree r, fndecl = gimple_call_fndecl (stmt);
6955 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
6956 switch (DECL_FUNCTION_CODE (fndecl))
6957 {
6958 case BUILT_IN_EH_COPY_VALUES:
6959 r = gimple_call_arg (stmt, 1);
6960 r = move_stmt_eh_region_tree_nr (r, p);
6961 gimple_call_set_arg (stmt, 1, r);
6962 /* FALLTHRU */
6963
6964 case BUILT_IN_EH_POINTER:
6965 case BUILT_IN_EH_FILTER:
6966 r = gimple_call_arg (stmt, 0);
6967 r = move_stmt_eh_region_tree_nr (r, p);
6968 gimple_call_set_arg (stmt, 0, r);
6969 break;
6970
6971 default:
6972 break;
6973 }
6974 }
6975 break;
6976
6977 case GIMPLE_RESX:
6978 {
6979 gresx *resx_stmt = as_a <gresx *> (stmt);
6980 int r = gimple_resx_region (resx_stmt);
6981 r = move_stmt_eh_region_nr (r, p);
6982 gimple_resx_set_region (resx_stmt, r);
6983 }
6984 break;
6985
6986 case GIMPLE_EH_DISPATCH:
6987 {
6988 geh_dispatch *eh_dispatch_stmt = as_a <geh_dispatch *> (stmt);
6989 int r = gimple_eh_dispatch_region (eh_dispatch_stmt);
6990 r = move_stmt_eh_region_nr (r, p);
6991 gimple_eh_dispatch_set_region (eh_dispatch_stmt, r);
6992 }
6993 break;
6994
6995 case GIMPLE_OMP_RETURN:
6996 case GIMPLE_OMP_CONTINUE:
6997 break;
6998
6999 case GIMPLE_LABEL:
7000 {
7001 /* For FORCED_LABEL, move_stmt_op doesn't adjust DECL_CONTEXT,
7002 so that such labels can be referenced from other regions.
7003 Make sure to update it when seeing a GIMPLE_LABEL though,
7004 that is the owner of the label. */
7005 walk_gimple_op (stmt, move_stmt_op, wi);
7006 *handled_ops_p = true;
7007 tree label = gimple_label_label (as_a <glabel *> (stmt));
7008 if (FORCED_LABEL (label) || DECL_NONLOCAL (label))
7009 DECL_CONTEXT (label) = p->to_context;
7010 }
7011 break;
7012
7013 default:
7014 if (is_gimple_omp (stmt))
7015 {
7016 /* Do not remap variables inside OMP directives. Variables
7017 referenced in clauses and directive header belong to the
7018 parent function and should not be moved into the child
7019 function. */
7020 bool save_remap_decls_p = p->remap_decls_p;
7021 p->remap_decls_p = false;
7022 *handled_ops_p = true;
7023
7024 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), move_stmt_r,
7025 move_stmt_op, wi);
7026
7027 p->remap_decls_p = save_remap_decls_p;
7028 }
7029 break;
7030 }
7031
7032 return NULL_TREE;
7033 }
7034
7035 /* Move basic block BB from function CFUN to function DEST_FN. The
7036 block is moved out of the original linked list and placed after
7037 block AFTER in the new list. Also, the block is removed from the
7038 original array of blocks and placed in DEST_FN's array of blocks.
7039 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
7040 updated to reflect the moved edges.
7041
7042 The local variables are remapped to new instances, VARS_MAP is used
7043 to record the mapping. */
7044
7045 static void
7046 move_block_to_fn (struct function *dest_cfun, basic_block bb,
7047 basic_block after, bool update_edge_count_p,
7048 struct move_stmt_d *d)
7049 {
7050 struct control_flow_graph *cfg;
7051 edge_iterator ei;
7052 edge e;
7053 gimple_stmt_iterator si;
7054 unsigned old_len, new_len;
7055
7056 /* Remove BB from dominance structures. */
7057 delete_from_dominance_info (CDI_DOMINATORS, bb);
7058
7059 /* Move BB from its current loop to the copy in the new function. */
7060 if (current_loops)
7061 {
7062 class loop *new_loop = (class loop *)bb->loop_father->aux;
7063 if (new_loop)
7064 bb->loop_father = new_loop;
7065 }
7066
7067 /* Link BB to the new linked list. */
7068 move_block_after (bb, after);
7069
7070 /* Update the edge count in the corresponding flowgraphs. */
7071 if (update_edge_count_p)
7072 FOR_EACH_EDGE (e, ei, bb->succs)
7073 {
7074 cfun->cfg->x_n_edges--;
7075 dest_cfun->cfg->x_n_edges++;
7076 }
7077
7078 /* Remove BB from the original basic block array. */
7079 (*cfun->cfg->x_basic_block_info)[bb->index] = NULL;
7080 cfun->cfg->x_n_basic_blocks--;
7081
7082 /* Grow DEST_CFUN's basic block array if needed. */
7083 cfg = dest_cfun->cfg;
7084 cfg->x_n_basic_blocks++;
7085 if (bb->index >= cfg->x_last_basic_block)
7086 cfg->x_last_basic_block = bb->index + 1;
7087
7088 old_len = vec_safe_length (cfg->x_basic_block_info);
7089 if ((unsigned) cfg->x_last_basic_block >= old_len)
7090 {
7091 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
7092 vec_safe_grow_cleared (cfg->x_basic_block_info, new_len);
7093 }
7094
7095 (*cfg->x_basic_block_info)[bb->index] = bb;
7096
7097 /* Remap the variables in phi nodes. */
7098 for (gphi_iterator psi = gsi_start_phis (bb);
7099 !gsi_end_p (psi); )
7100 {
7101 gphi *phi = psi.phi ();
7102 use_operand_p use;
7103 tree op = PHI_RESULT (phi);
7104 ssa_op_iter oi;
7105 unsigned i;
7106
7107 if (virtual_operand_p (op))
7108 {
7109 /* Remove the phi nodes for virtual operands (alias analysis will be
7110 run for the new function, anyway). But replace all uses that
7111 might be outside of the region we move. */
7112 use_operand_p use_p;
7113 imm_use_iterator iter;
7114 gimple *use_stmt;
7115 FOR_EACH_IMM_USE_STMT (use_stmt, iter, op)
7116 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
7117 SET_USE (use_p, SSA_NAME_VAR (op));
7118 remove_phi_node (&psi, true);
7119 continue;
7120 }
7121
7122 SET_PHI_RESULT (phi,
7123 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
7124 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
7125 {
7126 op = USE_FROM_PTR (use);
7127 if (TREE_CODE (op) == SSA_NAME)
7128 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
7129 }
7130
7131 for (i = 0; i < EDGE_COUNT (bb->preds); i++)
7132 {
7133 location_t locus = gimple_phi_arg_location (phi, i);
7134 tree block = LOCATION_BLOCK (locus);
7135
7136 if (locus == UNKNOWN_LOCATION)
7137 continue;
7138 if (d->orig_block == NULL_TREE || block == d->orig_block)
7139 {
7140 locus = set_block (locus, d->new_block);
7141 gimple_phi_arg_set_location (phi, i, locus);
7142 }
7143 }
7144
7145 gsi_next (&psi);
7146 }
7147
7148 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
7149 {
7150 gimple *stmt = gsi_stmt (si);
7151 struct walk_stmt_info wi;
7152
7153 memset (&wi, 0, sizeof (wi));
7154 wi.info = d;
7155 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
7156
7157 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
7158 {
7159 tree label = gimple_label_label (label_stmt);
7160 int uid = LABEL_DECL_UID (label);
7161
7162 gcc_assert (uid > -1);
7163
7164 old_len = vec_safe_length (cfg->x_label_to_block_map);
7165 if (old_len <= (unsigned) uid)
7166 {
7167 new_len = 3 * uid / 2 + 1;
7168 vec_safe_grow_cleared (cfg->x_label_to_block_map, new_len);
7169 }
7170
7171 (*cfg->x_label_to_block_map)[uid] = bb;
7172 (*cfun->cfg->x_label_to_block_map)[uid] = NULL;
7173
7174 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
7175
7176 if (uid >= dest_cfun->cfg->last_label_uid)
7177 dest_cfun->cfg->last_label_uid = uid + 1;
7178 }
7179
7180 maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
7181 remove_stmt_from_eh_lp_fn (cfun, stmt);
7182
7183 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
7184 gimple_remove_stmt_histograms (cfun, stmt);
7185
7186 /* We cannot leave any operands allocated from the operand caches of
7187 the current function. */
7188 free_stmt_operands (cfun, stmt);
7189 push_cfun (dest_cfun);
7190 update_stmt (stmt);
7191 pop_cfun ();
7192 }
7193
7194 FOR_EACH_EDGE (e, ei, bb->succs)
7195 if (e->goto_locus != UNKNOWN_LOCATION)
7196 {
7197 tree block = LOCATION_BLOCK (e->goto_locus);
7198 if (d->orig_block == NULL_TREE
7199 || block == d->orig_block)
7200 e->goto_locus = set_block (e->goto_locus, d->new_block);
7201 }
7202 }
7203
7204 /* Examine the statements in BB (which is in SRC_CFUN); find and return
7205 the outermost EH region. Use REGION as the incoming base EH region.
7206 If there is no single outermost region, return NULL and set *ALL to
7207 true. */
7208
7209 static eh_region
7210 find_outermost_region_in_block (struct function *src_cfun,
7211 basic_block bb, eh_region region,
7212 bool *all)
7213 {
7214 gimple_stmt_iterator si;
7215
7216 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
7217 {
7218 gimple *stmt = gsi_stmt (si);
7219 eh_region stmt_region;
7220 int lp_nr;
7221
7222 lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
7223 stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
7224 if (stmt_region)
7225 {
7226 if (region == NULL)
7227 region = stmt_region;
7228 else if (stmt_region != region)
7229 {
7230 region = eh_region_outermost (src_cfun, stmt_region, region);
7231 if (region == NULL)
7232 {
7233 *all = true;
7234 return NULL;
7235 }
7236 }
7237 }
7238 }
7239
7240 return region;
7241 }
7242
7243 static tree
7244 new_label_mapper (tree decl, void *data)
7245 {
7246 htab_t hash = (htab_t) data;
7247 struct tree_map *m;
7248 void **slot;
7249
7250 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
7251
7252 m = XNEW (struct tree_map);
7253 m->hash = DECL_UID (decl);
7254 m->base.from = decl;
7255 m->to = create_artificial_label (UNKNOWN_LOCATION);
7256 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
7257 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
7258 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
7259
7260 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
7261 gcc_assert (*slot == NULL);
7262
7263 *slot = m;
7264
7265 return m->to;
7266 }
7267
7268 /* Tree walker to replace the decls used inside value expressions by
7269 duplicates. */
7270
7271 static tree
7272 replace_block_vars_by_duplicates_1 (tree *tp, int *walk_subtrees, void *data)
7273 {
7274 struct replace_decls_d *rd = (struct replace_decls_d *)data;
7275
7276 switch (TREE_CODE (*tp))
7277 {
7278 case VAR_DECL:
7279 case PARM_DECL:
7280 case RESULT_DECL:
7281 replace_by_duplicate_decl (tp, rd->vars_map, rd->to_context);
7282 break;
7283 default:
7284 break;
7285 }
7286
7287 if (IS_TYPE_OR_DECL_P (*tp))
7288 *walk_subtrees = false;
7289
7290 return NULL;
7291 }
7292
7293 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
7294 subblocks. */
7295
7296 static void
7297 replace_block_vars_by_duplicates (tree block, hash_map<tree, tree> *vars_map,
7298 tree to_context)
7299 {
7300 tree *tp, t;
7301
7302 for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
7303 {
7304 t = *tp;
7305 if (!VAR_P (t) && TREE_CODE (t) != CONST_DECL)
7306 continue;
7307 replace_by_duplicate_decl (&t, vars_map, to_context);
7308 if (t != *tp)
7309 {
7310 if (VAR_P (*tp) && DECL_HAS_VALUE_EXPR_P (*tp))
7311 {
7312 tree x = DECL_VALUE_EXPR (*tp);
7313 struct replace_decls_d rd = { vars_map, to_context };
7314 unshare_expr (x);
7315 walk_tree (&x, replace_block_vars_by_duplicates_1, &rd, NULL);
7316 SET_DECL_VALUE_EXPR (t, x);
7317 DECL_HAS_VALUE_EXPR_P (t) = 1;
7318 }
7319 DECL_CHAIN (t) = DECL_CHAIN (*tp);
7320 *tp = t;
7321 }
7322 }
7323
7324 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
7325 replace_block_vars_by_duplicates (block, vars_map, to_context);
7326 }
7327
7328 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
7329 from FN1 to FN2. */
7330
7331 static void
7332 fixup_loop_arrays_after_move (struct function *fn1, struct function *fn2,
7333 class loop *loop)
7334 {
7335 /* Discard it from the old loop array. */
7336 (*get_loops (fn1))[loop->num] = NULL;
7337
7338 /* Place it in the new loop array, assigning it a new number. */
7339 loop->num = number_of_loops (fn2);
7340 vec_safe_push (loops_for_fn (fn2)->larray, loop);
7341
7342 /* Recurse to children. */
7343 for (loop = loop->inner; loop; loop = loop->next)
7344 fixup_loop_arrays_after_move (fn1, fn2, loop);
7345 }
7346
7347 /* Verify that the blocks in BBS_P are a single-entry, single-exit region
7348 delimited by ENTRY_BB and EXIT_BB, possibly containing noreturn blocks. */
7349
7350 DEBUG_FUNCTION void
7351 verify_sese (basic_block entry, basic_block exit, vec<basic_block> *bbs_p)
7352 {
7353 basic_block bb;
7354 edge_iterator ei;
7355 edge e;
7356 bitmap bbs = BITMAP_ALLOC (NULL);
7357 int i;
7358
7359 gcc_assert (entry != NULL);
7360 gcc_assert (entry != exit);
7361 gcc_assert (bbs_p != NULL);
7362
7363 gcc_assert (bbs_p->length () > 0);
7364
7365 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
7366 bitmap_set_bit (bbs, bb->index);
7367
7368 gcc_assert (bitmap_bit_p (bbs, entry->index));
7369 gcc_assert (exit == NULL || bitmap_bit_p (bbs, exit->index));
7370
7371 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
7372 {
7373 if (bb == entry)
7374 {
7375 gcc_assert (single_pred_p (entry));
7376 gcc_assert (!bitmap_bit_p (bbs, single_pred (entry)->index));
7377 }
7378 else
7379 for (ei = ei_start (bb->preds); !ei_end_p (ei); ei_next (&ei))
7380 {
7381 e = ei_edge (ei);
7382 gcc_assert (bitmap_bit_p (bbs, e->src->index));
7383 }
7384
7385 if (bb == exit)
7386 {
7387 gcc_assert (single_succ_p (exit));
7388 gcc_assert (!bitmap_bit_p (bbs, single_succ (exit)->index));
7389 }
7390 else
7391 for (ei = ei_start (bb->succs); !ei_end_p (ei); ei_next (&ei))
7392 {
7393 e = ei_edge (ei);
7394 gcc_assert (bitmap_bit_p (bbs, e->dest->index));
7395 }
7396 }
7397
7398 BITMAP_FREE (bbs);
7399 }
7400
7401 /* If FROM is an SSA_NAME, mark the version in bitmap DATA. */
7402
7403 bool
7404 gather_ssa_name_hash_map_from (tree const &from, tree const &, void *data)
7405 {
7406 bitmap release_names = (bitmap)data;
7407
7408 if (TREE_CODE (from) != SSA_NAME)
7409 return true;
7410
7411 bitmap_set_bit (release_names, SSA_NAME_VERSION (from));
7412 return true;
7413 }
7414
7415 /* Return LOOP_DIST_ALIAS call if present in BB. */
7416
7417 static gimple *
7418 find_loop_dist_alias (basic_block bb)
7419 {
7420 gimple *g = last_stmt (bb);
7421 if (g == NULL || gimple_code (g) != GIMPLE_COND)
7422 return NULL;
7423
7424 gimple_stmt_iterator gsi = gsi_for_stmt (g);
7425 gsi_prev (&gsi);
7426 if (gsi_end_p (gsi))
7427 return NULL;
7428
7429 g = gsi_stmt (gsi);
7430 if (gimple_call_internal_p (g, IFN_LOOP_DIST_ALIAS))
7431 return g;
7432 return NULL;
7433 }
7434
7435 /* Fold loop internal call G like IFN_LOOP_VECTORIZED/IFN_LOOP_DIST_ALIAS
7436 to VALUE and update any immediate uses of it's LHS. */
7437
7438 void
7439 fold_loop_internal_call (gimple *g, tree value)
7440 {
7441 tree lhs = gimple_call_lhs (g);
7442 use_operand_p use_p;
7443 imm_use_iterator iter;
7444 gimple *use_stmt;
7445 gimple_stmt_iterator gsi = gsi_for_stmt (g);
7446
7447 update_call_from_tree (&gsi, value);
7448 FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
7449 {
7450 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
7451 SET_USE (use_p, value);
7452 update_stmt (use_stmt);
7453 }
7454 }
7455
7456 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
7457 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
7458 single basic block in the original CFG and the new basic block is
7459 returned. DEST_CFUN must not have a CFG yet.
7460
7461 Note that the region need not be a pure SESE region. Blocks inside
7462 the region may contain calls to abort/exit. The only restriction
7463 is that ENTRY_BB should be the only entry point and it must
7464 dominate EXIT_BB.
7465
7466 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
7467 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
7468 to the new function.
7469
7470 All local variables referenced in the region are assumed to be in
7471 the corresponding BLOCK_VARS and unexpanded variable lists
7472 associated with DEST_CFUN.
7473
7474 TODO: investigate whether we can reuse gimple_duplicate_sese_region to
7475 reimplement move_sese_region_to_fn by duplicating the region rather than
7476 moving it. */
7477
7478 basic_block
7479 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
7480 basic_block exit_bb, tree orig_block)
7481 {
7482 vec<basic_block> bbs, dom_bbs;
7483 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
7484 basic_block after, bb, *entry_pred, *exit_succ, abb;
7485 struct function *saved_cfun = cfun;
7486 int *entry_flag, *exit_flag;
7487 profile_probability *entry_prob, *exit_prob;
7488 unsigned i, num_entry_edges, num_exit_edges, num_nodes;
7489 edge e;
7490 edge_iterator ei;
7491 htab_t new_label_map;
7492 hash_map<void *, void *> *eh_map;
7493 class loop *loop = entry_bb->loop_father;
7494 class loop *loop0 = get_loop (saved_cfun, 0);
7495 struct move_stmt_d d;
7496
7497 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
7498 region. */
7499 gcc_assert (entry_bb != exit_bb
7500 && (!exit_bb
7501 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
7502
7503 /* Collect all the blocks in the region. Manually add ENTRY_BB
7504 because it won't be added by dfs_enumerate_from. */
7505 bbs.create (0);
7506 bbs.safe_push (entry_bb);
7507 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
7508
7509 if (flag_checking)
7510 verify_sese (entry_bb, exit_bb, &bbs);
7511
7512 /* The blocks that used to be dominated by something in BBS will now be
7513 dominated by the new block. */
7514 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
7515 bbs.address (),
7516 bbs.length ());
7517
7518 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
7519 the predecessor edges to ENTRY_BB and the successor edges to
7520 EXIT_BB so that we can re-attach them to the new basic block that
7521 will replace the region. */
7522 num_entry_edges = EDGE_COUNT (entry_bb->preds);
7523 entry_pred = XNEWVEC (basic_block, num_entry_edges);
7524 entry_flag = XNEWVEC (int, num_entry_edges);
7525 entry_prob = XNEWVEC (profile_probability, num_entry_edges);
7526 i = 0;
7527 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
7528 {
7529 entry_prob[i] = e->probability;
7530 entry_flag[i] = e->flags;
7531 entry_pred[i++] = e->src;
7532 remove_edge (e);
7533 }
7534
7535 if (exit_bb)
7536 {
7537 num_exit_edges = EDGE_COUNT (exit_bb->succs);
7538 exit_succ = XNEWVEC (basic_block, num_exit_edges);
7539 exit_flag = XNEWVEC (int, num_exit_edges);
7540 exit_prob = XNEWVEC (profile_probability, num_exit_edges);
7541 i = 0;
7542 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
7543 {
7544 exit_prob[i] = e->probability;
7545 exit_flag[i] = e->flags;
7546 exit_succ[i++] = e->dest;
7547 remove_edge (e);
7548 }
7549 }
7550 else
7551 {
7552 num_exit_edges = 0;
7553 exit_succ = NULL;
7554 exit_flag = NULL;
7555 exit_prob = NULL;
7556 }
7557
7558 /* Switch context to the child function to initialize DEST_FN's CFG. */
7559 gcc_assert (dest_cfun->cfg == NULL);
7560 push_cfun (dest_cfun);
7561
7562 init_empty_tree_cfg ();
7563
7564 /* Initialize EH information for the new function. */
7565 eh_map = NULL;
7566 new_label_map = NULL;
7567 if (saved_cfun->eh)
7568 {
7569 eh_region region = NULL;
7570 bool all = false;
7571
7572 FOR_EACH_VEC_ELT (bbs, i, bb)
7573 {
7574 region = find_outermost_region_in_block (saved_cfun, bb, region, &all);
7575 if (all)
7576 break;
7577 }
7578
7579 init_eh_for_function ();
7580 if (region != NULL || all)
7581 {
7582 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
7583 eh_map = duplicate_eh_regions (saved_cfun, region, 0,
7584 new_label_mapper, new_label_map);
7585 }
7586 }
7587
7588 /* Initialize an empty loop tree. */
7589 struct loops *loops = ggc_cleared_alloc<struct loops> ();
7590 init_loops_structure (dest_cfun, loops, 1);
7591 loops->state = LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
7592 set_loops_for_fn (dest_cfun, loops);
7593
7594 vec<loop_p, va_gc> *larray = get_loops (saved_cfun)->copy ();
7595
7596 /* Move the outlined loop tree part. */
7597 num_nodes = bbs.length ();
7598 FOR_EACH_VEC_ELT (bbs, i, bb)
7599 {
7600 if (bb->loop_father->header == bb)
7601 {
7602 class loop *this_loop = bb->loop_father;
7603 class loop *outer = loop_outer (this_loop);
7604 if (outer == loop
7605 /* If the SESE region contains some bbs ending with
7606 a noreturn call, those are considered to belong
7607 to the outermost loop in saved_cfun, rather than
7608 the entry_bb's loop_father. */
7609 || outer == loop0)
7610 {
7611 if (outer != loop)
7612 num_nodes -= this_loop->num_nodes;
7613 flow_loop_tree_node_remove (bb->loop_father);
7614 flow_loop_tree_node_add (get_loop (dest_cfun, 0), this_loop);
7615 fixup_loop_arrays_after_move (saved_cfun, cfun, this_loop);
7616 }
7617 }
7618 else if (bb->loop_father == loop0 && loop0 != loop)
7619 num_nodes--;
7620
7621 /* Remove loop exits from the outlined region. */
7622 if (loops_for_fn (saved_cfun)->exits)
7623 FOR_EACH_EDGE (e, ei, bb->succs)
7624 {
7625 struct loops *l = loops_for_fn (saved_cfun);
7626 loop_exit **slot
7627 = l->exits->find_slot_with_hash (e, htab_hash_pointer (e),
7628 NO_INSERT);
7629 if (slot)
7630 l->exits->clear_slot (slot);
7631 }
7632 }
7633
7634 /* Adjust the number of blocks in the tree root of the outlined part. */
7635 get_loop (dest_cfun, 0)->num_nodes = bbs.length () + 2;
7636
7637 /* Setup a mapping to be used by move_block_to_fn. */
7638 loop->aux = current_loops->tree_root;
7639 loop0->aux = current_loops->tree_root;
7640
7641 /* Fix up orig_loop_num. If the block referenced in it has been moved
7642 to dest_cfun, update orig_loop_num field, otherwise clear it. */
7643 class loop *dloop;
7644 signed char *moved_orig_loop_num = NULL;
7645 FOR_EACH_LOOP_FN (dest_cfun, dloop, 0)
7646 if (dloop->orig_loop_num)
7647 {
7648 if (moved_orig_loop_num == NULL)
7649 moved_orig_loop_num
7650 = XCNEWVEC (signed char, vec_safe_length (larray));
7651 if ((*larray)[dloop->orig_loop_num] != NULL
7652 && get_loop (saved_cfun, dloop->orig_loop_num) == NULL)
7653 {
7654 if (moved_orig_loop_num[dloop->orig_loop_num] >= 0
7655 && moved_orig_loop_num[dloop->orig_loop_num] < 2)
7656 moved_orig_loop_num[dloop->orig_loop_num]++;
7657 dloop->orig_loop_num = (*larray)[dloop->orig_loop_num]->num;
7658 }
7659 else
7660 {
7661 moved_orig_loop_num[dloop->orig_loop_num] = -1;
7662 dloop->orig_loop_num = 0;
7663 }
7664 }
7665 pop_cfun ();
7666
7667 if (moved_orig_loop_num)
7668 {
7669 FOR_EACH_VEC_ELT (bbs, i, bb)
7670 {
7671 gimple *g = find_loop_dist_alias (bb);
7672 if (g == NULL)
7673 continue;
7674
7675 int orig_loop_num = tree_to_shwi (gimple_call_arg (g, 0));
7676 gcc_assert (orig_loop_num
7677 && (unsigned) orig_loop_num < vec_safe_length (larray));
7678 if (moved_orig_loop_num[orig_loop_num] == 2)
7679 {
7680 /* If we have moved both loops with this orig_loop_num into
7681 dest_cfun and the LOOP_DIST_ALIAS call is being moved there
7682 too, update the first argument. */
7683 gcc_assert ((*larray)[dloop->orig_loop_num] != NULL
7684 && (get_loop (saved_cfun, dloop->orig_loop_num)
7685 == NULL));
7686 tree t = build_int_cst (integer_type_node,
7687 (*larray)[dloop->orig_loop_num]->num);
7688 gimple_call_set_arg (g, 0, t);
7689 update_stmt (g);
7690 /* Make sure the following loop will not update it. */
7691 moved_orig_loop_num[orig_loop_num] = 0;
7692 }
7693 else
7694 /* Otherwise at least one of the loops stayed in saved_cfun.
7695 Remove the LOOP_DIST_ALIAS call. */
7696 fold_loop_internal_call (g, gimple_call_arg (g, 1));
7697 }
7698 FOR_EACH_BB_FN (bb, saved_cfun)
7699 {
7700 gimple *g = find_loop_dist_alias (bb);
7701 if (g == NULL)
7702 continue;
7703 int orig_loop_num = tree_to_shwi (gimple_call_arg (g, 0));
7704 gcc_assert (orig_loop_num
7705 && (unsigned) orig_loop_num < vec_safe_length (larray));
7706 if (moved_orig_loop_num[orig_loop_num])
7707 /* LOOP_DIST_ALIAS call remained in saved_cfun, if at least one
7708 of the corresponding loops was moved, remove it. */
7709 fold_loop_internal_call (g, gimple_call_arg (g, 1));
7710 }
7711 XDELETEVEC (moved_orig_loop_num);
7712 }
7713 ggc_free (larray);
7714
7715 /* Move blocks from BBS into DEST_CFUN. */
7716 gcc_assert (bbs.length () >= 2);
7717 after = dest_cfun->cfg->x_entry_block_ptr;
7718 hash_map<tree, tree> vars_map;
7719
7720 memset (&d, 0, sizeof (d));
7721 d.orig_block = orig_block;
7722 d.new_block = DECL_INITIAL (dest_cfun->decl);
7723 d.from_context = cfun->decl;
7724 d.to_context = dest_cfun->decl;
7725 d.vars_map = &vars_map;
7726 d.new_label_map = new_label_map;
7727 d.eh_map = eh_map;
7728 d.remap_decls_p = true;
7729
7730 if (gimple_in_ssa_p (cfun))
7731 for (tree arg = DECL_ARGUMENTS (d.to_context); arg; arg = DECL_CHAIN (arg))
7732 {
7733 tree narg = make_ssa_name_fn (dest_cfun, arg, gimple_build_nop ());
7734 set_ssa_default_def (dest_cfun, arg, narg);
7735 vars_map.put (arg, narg);
7736 }
7737
7738 FOR_EACH_VEC_ELT (bbs, i, bb)
7739 {
7740 /* No need to update edge counts on the last block. It has
7741 already been updated earlier when we detached the region from
7742 the original CFG. */
7743 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
7744 after = bb;
7745 }
7746
7747 loop->aux = NULL;
7748 loop0->aux = NULL;
7749 /* Loop sizes are no longer correct, fix them up. */
7750 loop->num_nodes -= num_nodes;
7751 for (class loop *outer = loop_outer (loop);
7752 outer; outer = loop_outer (outer))
7753 outer->num_nodes -= num_nodes;
7754 loop0->num_nodes -= bbs.length () - num_nodes;
7755
7756 if (saved_cfun->has_simduid_loops || saved_cfun->has_force_vectorize_loops)
7757 {
7758 class loop *aloop;
7759 for (i = 0; vec_safe_iterate (loops->larray, i, &aloop); i++)
7760 if (aloop != NULL)
7761 {
7762 if (aloop->simduid)
7763 {
7764 replace_by_duplicate_decl (&aloop->simduid, d.vars_map,
7765 d.to_context);
7766 dest_cfun->has_simduid_loops = true;
7767 }
7768 if (aloop->force_vectorize)
7769 dest_cfun->has_force_vectorize_loops = true;
7770 }
7771 }
7772
7773 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
7774 if (orig_block)
7775 {
7776 tree block;
7777 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7778 == NULL_TREE);
7779 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7780 = BLOCK_SUBBLOCKS (orig_block);
7781 for (block = BLOCK_SUBBLOCKS (orig_block);
7782 block; block = BLOCK_CHAIN (block))
7783 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
7784 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
7785 }
7786
7787 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
7788 &vars_map, dest_cfun->decl);
7789
7790 if (new_label_map)
7791 htab_delete (new_label_map);
7792 if (eh_map)
7793 delete eh_map;
7794
7795 if (gimple_in_ssa_p (cfun))
7796 {
7797 /* We need to release ssa-names in a defined order, so first find them,
7798 and then iterate in ascending version order. */
7799 bitmap release_names = BITMAP_ALLOC (NULL);
7800 vars_map.traverse<void *, gather_ssa_name_hash_map_from> (release_names);
7801 bitmap_iterator bi;
7802 unsigned i;
7803 EXECUTE_IF_SET_IN_BITMAP (release_names, 0, i, bi)
7804 release_ssa_name (ssa_name (i));
7805 BITMAP_FREE (release_names);
7806 }
7807
7808 /* Rewire the entry and exit blocks. The successor to the entry
7809 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
7810 the child function. Similarly, the predecessor of DEST_FN's
7811 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
7812 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
7813 various CFG manipulation function get to the right CFG.
7814
7815 FIXME, this is silly. The CFG ought to become a parameter to
7816 these helpers. */
7817 push_cfun (dest_cfun);
7818 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = entry_bb->count;
7819 make_single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), entry_bb, EDGE_FALLTHRU);
7820 if (exit_bb)
7821 {
7822 make_single_succ_edge (exit_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
7823 EXIT_BLOCK_PTR_FOR_FN (cfun)->count = exit_bb->count;
7824 }
7825 else
7826 EXIT_BLOCK_PTR_FOR_FN (cfun)->count = profile_count::zero ();
7827 pop_cfun ();
7828
7829 /* Back in the original function, the SESE region has disappeared,
7830 create a new basic block in its place. */
7831 bb = create_empty_bb (entry_pred[0]);
7832 if (current_loops)
7833 add_bb_to_loop (bb, loop);
7834 for (i = 0; i < num_entry_edges; i++)
7835 {
7836 e = make_edge (entry_pred[i], bb, entry_flag[i]);
7837 e->probability = entry_prob[i];
7838 }
7839
7840 for (i = 0; i < num_exit_edges; i++)
7841 {
7842 e = make_edge (bb, exit_succ[i], exit_flag[i]);
7843 e->probability = exit_prob[i];
7844 }
7845
7846 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
7847 FOR_EACH_VEC_ELT (dom_bbs, i, abb)
7848 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
7849 dom_bbs.release ();
7850
7851 if (exit_bb)
7852 {
7853 free (exit_prob);
7854 free (exit_flag);
7855 free (exit_succ);
7856 }
7857 free (entry_prob);
7858 free (entry_flag);
7859 free (entry_pred);
7860 bbs.release ();
7861
7862 return bb;
7863 }
7864
7865 /* Dump default def DEF to file FILE using FLAGS and indentation
7866 SPC. */
7867
7868 static void
7869 dump_default_def (FILE *file, tree def, int spc, dump_flags_t flags)
7870 {
7871 for (int i = 0; i < spc; ++i)
7872 fprintf (file, " ");
7873 dump_ssaname_info_to_file (file, def, spc);
7874
7875 print_generic_expr (file, TREE_TYPE (def), flags);
7876 fprintf (file, " ");
7877 print_generic_expr (file, def, flags);
7878 fprintf (file, " = ");
7879 print_generic_expr (file, SSA_NAME_VAR (def), flags);
7880 fprintf (file, ";\n");
7881 }
7882
7883 /* Print no_sanitize attribute to FILE for a given attribute VALUE. */
7884
7885 static void
7886 print_no_sanitize_attr_value (FILE *file, tree value)
7887 {
7888 unsigned int flags = tree_to_uhwi (value);
7889 bool first = true;
7890 for (int i = 0; sanitizer_opts[i].name != NULL; ++i)
7891 {
7892 if ((sanitizer_opts[i].flag & flags) == sanitizer_opts[i].flag)
7893 {
7894 if (!first)
7895 fprintf (file, " | ");
7896 fprintf (file, "%s", sanitizer_opts[i].name);
7897 first = false;
7898 }
7899 }
7900 }
7901
7902 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
7903 */
7904
7905 void
7906 dump_function_to_file (tree fndecl, FILE *file, dump_flags_t flags)
7907 {
7908 tree arg, var, old_current_fndecl = current_function_decl;
7909 struct function *dsf;
7910 bool ignore_topmost_bind = false, any_var = false;
7911 basic_block bb;
7912 tree chain;
7913 bool tmclone = (TREE_CODE (fndecl) == FUNCTION_DECL
7914 && decl_is_tm_clone (fndecl));
7915 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
7916
7917 if (DECL_ATTRIBUTES (fndecl) != NULL_TREE)
7918 {
7919 fprintf (file, "__attribute__((");
7920
7921 bool first = true;
7922 tree chain;
7923 for (chain = DECL_ATTRIBUTES (fndecl); chain;
7924 first = false, chain = TREE_CHAIN (chain))
7925 {
7926 if (!first)
7927 fprintf (file, ", ");
7928
7929 tree name = get_attribute_name (chain);
7930 print_generic_expr (file, name, dump_flags);
7931 if (TREE_VALUE (chain) != NULL_TREE)
7932 {
7933 fprintf (file, " (");
7934
7935 if (strstr (IDENTIFIER_POINTER (name), "no_sanitize"))
7936 print_no_sanitize_attr_value (file, TREE_VALUE (chain));
7937 else
7938 print_generic_expr (file, TREE_VALUE (chain), dump_flags);
7939 fprintf (file, ")");
7940 }
7941 }
7942
7943 fprintf (file, "))\n");
7944 }
7945
7946 current_function_decl = fndecl;
7947 if (flags & TDF_GIMPLE)
7948 {
7949 static bool hotness_bb_param_printed = false;
7950 if (profile_info != NULL
7951 && !hotness_bb_param_printed)
7952 {
7953 hotness_bb_param_printed = true;
7954 fprintf (file,
7955 "/* --param=gimple-fe-computed-hot-bb-threshold=%" PRId64
7956 " */\n", get_hot_bb_threshold ());
7957 }
7958
7959 print_generic_expr (file, TREE_TYPE (TREE_TYPE (fndecl)),
7960 dump_flags | TDF_SLIM);
7961 fprintf (file, " __GIMPLE (%s",
7962 (fun->curr_properties & PROP_ssa) ? "ssa"
7963 : (fun->curr_properties & PROP_cfg) ? "cfg"
7964 : "");
7965
7966 if (cfun->cfg)
7967 {
7968 basic_block bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
7969 if (bb->count.initialized_p ())
7970 fprintf (file, ",%s(%d)",
7971 profile_quality_as_string (bb->count.quality ()),
7972 bb->count.value ());
7973 fprintf (file, ")\n%s (", function_name (fun));
7974 }
7975 }
7976 else
7977 fprintf (file, "%s %s(", function_name (fun), tmclone ? "[tm-clone] " : "");
7978
7979 arg = DECL_ARGUMENTS (fndecl);
7980 while (arg)
7981 {
7982 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
7983 fprintf (file, " ");
7984 print_generic_expr (file, arg, dump_flags);
7985 if (DECL_CHAIN (arg))
7986 fprintf (file, ", ");
7987 arg = DECL_CHAIN (arg);
7988 }
7989 fprintf (file, ")\n");
7990
7991 dsf = DECL_STRUCT_FUNCTION (fndecl);
7992 if (dsf && (flags & TDF_EH))
7993 dump_eh_tree (file, dsf);
7994
7995 if (flags & TDF_RAW && !gimple_has_body_p (fndecl))
7996 {
7997 dump_node (fndecl, TDF_SLIM | flags, file);
7998 current_function_decl = old_current_fndecl;
7999 return;
8000 }
8001
8002 /* When GIMPLE is lowered, the variables are no longer available in
8003 BIND_EXPRs, so display them separately. */
8004 if (fun && fun->decl == fndecl && (fun->curr_properties & PROP_gimple_lcf))
8005 {
8006 unsigned ix;
8007 ignore_topmost_bind = true;
8008
8009 fprintf (file, "{\n");
8010 if (gimple_in_ssa_p (fun)
8011 && (flags & TDF_ALIAS))
8012 {
8013 for (arg = DECL_ARGUMENTS (fndecl); arg != NULL;
8014 arg = DECL_CHAIN (arg))
8015 {
8016 tree def = ssa_default_def (fun, arg);
8017 if (def)
8018 dump_default_def (file, def, 2, flags);
8019 }
8020
8021 tree res = DECL_RESULT (fun->decl);
8022 if (res != NULL_TREE
8023 && DECL_BY_REFERENCE (res))
8024 {
8025 tree def = ssa_default_def (fun, res);
8026 if (def)
8027 dump_default_def (file, def, 2, flags);
8028 }
8029
8030 tree static_chain = fun->static_chain_decl;
8031 if (static_chain != NULL_TREE)
8032 {
8033 tree def = ssa_default_def (fun, static_chain);
8034 if (def)
8035 dump_default_def (file, def, 2, flags);
8036 }
8037 }
8038
8039 if (!vec_safe_is_empty (fun->local_decls))
8040 FOR_EACH_LOCAL_DECL (fun, ix, var)
8041 {
8042 print_generic_decl (file, var, flags);
8043 fprintf (file, "\n");
8044
8045 any_var = true;
8046 }
8047
8048 tree name;
8049
8050 if (gimple_in_ssa_p (cfun))
8051 FOR_EACH_SSA_NAME (ix, name, cfun)
8052 {
8053 if (!SSA_NAME_VAR (name))
8054 {
8055 fprintf (file, " ");
8056 print_generic_expr (file, TREE_TYPE (name), flags);
8057 fprintf (file, " ");
8058 print_generic_expr (file, name, flags);
8059 fprintf (file, ";\n");
8060
8061 any_var = true;
8062 }
8063 }
8064 }
8065
8066 if (fun && fun->decl == fndecl
8067 && fun->cfg
8068 && basic_block_info_for_fn (fun))
8069 {
8070 /* If the CFG has been built, emit a CFG-based dump. */
8071 if (!ignore_topmost_bind)
8072 fprintf (file, "{\n");
8073
8074 if (any_var && n_basic_blocks_for_fn (fun))
8075 fprintf (file, "\n");
8076
8077 FOR_EACH_BB_FN (bb, fun)
8078 dump_bb (file, bb, 2, flags);
8079
8080 fprintf (file, "}\n");
8081 }
8082 else if (fun->curr_properties & PROP_gimple_any)
8083 {
8084 /* The function is now in GIMPLE form but the CFG has not been
8085 built yet. Emit the single sequence of GIMPLE statements
8086 that make up its body. */
8087 gimple_seq body = gimple_body (fndecl);
8088
8089 if (gimple_seq_first_stmt (body)
8090 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
8091 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
8092 print_gimple_seq (file, body, 0, flags);
8093 else
8094 {
8095 if (!ignore_topmost_bind)
8096 fprintf (file, "{\n");
8097
8098 if (any_var)
8099 fprintf (file, "\n");
8100
8101 print_gimple_seq (file, body, 2, flags);
8102 fprintf (file, "}\n");
8103 }
8104 }
8105 else
8106 {
8107 int indent;
8108
8109 /* Make a tree based dump. */
8110 chain = DECL_SAVED_TREE (fndecl);
8111 if (chain && TREE_CODE (chain) == BIND_EXPR)
8112 {
8113 if (ignore_topmost_bind)
8114 {
8115 chain = BIND_EXPR_BODY (chain);
8116 indent = 2;
8117 }
8118 else
8119 indent = 0;
8120 }
8121 else
8122 {
8123 if (!ignore_topmost_bind)
8124 {
8125 fprintf (file, "{\n");
8126 /* No topmost bind, pretend it's ignored for later. */
8127 ignore_topmost_bind = true;
8128 }
8129 indent = 2;
8130 }
8131
8132 if (any_var)
8133 fprintf (file, "\n");
8134
8135 print_generic_stmt_indented (file, chain, flags, indent);
8136 if (ignore_topmost_bind)
8137 fprintf (file, "}\n");
8138 }
8139
8140 if (flags & TDF_ENUMERATE_LOCALS)
8141 dump_enumerated_decls (file, flags);
8142 fprintf (file, "\n\n");
8143
8144 current_function_decl = old_current_fndecl;
8145 }
8146
8147 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
8148
8149 DEBUG_FUNCTION void
8150 debug_function (tree fn, dump_flags_t flags)
8151 {
8152 dump_function_to_file (fn, stderr, flags);
8153 }
8154
8155
8156 /* Print on FILE the indexes for the predecessors of basic_block BB. */
8157
8158 static void
8159 print_pred_bbs (FILE *file, basic_block bb)
8160 {
8161 edge e;
8162 edge_iterator ei;
8163
8164 FOR_EACH_EDGE (e, ei, bb->preds)
8165 fprintf (file, "bb_%d ", e->src->index);
8166 }
8167
8168
8169 /* Print on FILE the indexes for the successors of basic_block BB. */
8170
8171 static void
8172 print_succ_bbs (FILE *file, basic_block bb)
8173 {
8174 edge e;
8175 edge_iterator ei;
8176
8177 FOR_EACH_EDGE (e, ei, bb->succs)
8178 fprintf (file, "bb_%d ", e->dest->index);
8179 }
8180
8181 /* Print to FILE the basic block BB following the VERBOSITY level. */
8182
8183 void
8184 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
8185 {
8186 char *s_indent = (char *) alloca ((size_t) indent + 1);
8187 memset ((void *) s_indent, ' ', (size_t) indent);
8188 s_indent[indent] = '\0';
8189
8190 /* Print basic_block's header. */
8191 if (verbosity >= 2)
8192 {
8193 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
8194 print_pred_bbs (file, bb);
8195 fprintf (file, "}, succs = {");
8196 print_succ_bbs (file, bb);
8197 fprintf (file, "})\n");
8198 }
8199
8200 /* Print basic_block's body. */
8201 if (verbosity >= 3)
8202 {
8203 fprintf (file, "%s {\n", s_indent);
8204 dump_bb (file, bb, indent + 4, TDF_VOPS|TDF_MEMSYMS);
8205 fprintf (file, "%s }\n", s_indent);
8206 }
8207 }
8208
8209 static void print_loop_and_siblings (FILE *, class loop *, int, int);
8210
8211 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
8212 VERBOSITY level this outputs the contents of the loop, or just its
8213 structure. */
8214
8215 static void
8216 print_loop (FILE *file, class loop *loop, int indent, int verbosity)
8217 {
8218 char *s_indent;
8219 basic_block bb;
8220
8221 if (loop == NULL)
8222 return;
8223
8224 s_indent = (char *) alloca ((size_t) indent + 1);
8225 memset ((void *) s_indent, ' ', (size_t) indent);
8226 s_indent[indent] = '\0';
8227
8228 /* Print loop's header. */
8229 fprintf (file, "%sloop_%d (", s_indent, loop->num);
8230 if (loop->header)
8231 fprintf (file, "header = %d", loop->header->index);
8232 else
8233 {
8234 fprintf (file, "deleted)\n");
8235 return;
8236 }
8237 if (loop->latch)
8238 fprintf (file, ", latch = %d", loop->latch->index);
8239 else
8240 fprintf (file, ", multiple latches");
8241 fprintf (file, ", niter = ");
8242 print_generic_expr (file, loop->nb_iterations);
8243
8244 if (loop->any_upper_bound)
8245 {
8246 fprintf (file, ", upper_bound = ");
8247 print_decu (loop->nb_iterations_upper_bound, file);
8248 }
8249 if (loop->any_likely_upper_bound)
8250 {
8251 fprintf (file, ", likely_upper_bound = ");
8252 print_decu (loop->nb_iterations_likely_upper_bound, file);
8253 }
8254
8255 if (loop->any_estimate)
8256 {
8257 fprintf (file, ", estimate = ");
8258 print_decu (loop->nb_iterations_estimate, file);
8259 }
8260 if (loop->unroll)
8261 fprintf (file, ", unroll = %d", loop->unroll);
8262 fprintf (file, ")\n");
8263
8264 /* Print loop's body. */
8265 if (verbosity >= 1)
8266 {
8267 fprintf (file, "%s{\n", s_indent);
8268 FOR_EACH_BB_FN (bb, cfun)
8269 if (bb->loop_father == loop)
8270 print_loops_bb (file, bb, indent, verbosity);
8271
8272 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
8273 fprintf (file, "%s}\n", s_indent);
8274 }
8275 }
8276
8277 /* Print the LOOP and its sibling loops on FILE, indented INDENT
8278 spaces. Following VERBOSITY level this outputs the contents of the
8279 loop, or just its structure. */
8280
8281 static void
8282 print_loop_and_siblings (FILE *file, class loop *loop, int indent,
8283 int verbosity)
8284 {
8285 if (loop == NULL)
8286 return;
8287
8288 print_loop (file, loop, indent, verbosity);
8289 print_loop_and_siblings (file, loop->next, indent, verbosity);
8290 }
8291
8292 /* Follow a CFG edge from the entry point of the program, and on entry
8293 of a loop, pretty print the loop structure on FILE. */
8294
8295 void
8296 print_loops (FILE *file, int verbosity)
8297 {
8298 basic_block bb;
8299
8300 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
8301 fprintf (file, "\nLoops in function: %s\n", current_function_name ());
8302 if (bb && bb->loop_father)
8303 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
8304 }
8305
8306 /* Dump a loop. */
8307
8308 DEBUG_FUNCTION void
8309 debug (class loop &ref)
8310 {
8311 print_loop (stderr, &ref, 0, /*verbosity*/0);
8312 }
8313
8314 DEBUG_FUNCTION void
8315 debug (class loop *ptr)
8316 {
8317 if (ptr)
8318 debug (*ptr);
8319 else
8320 fprintf (stderr, "<nil>\n");
8321 }
8322
8323 /* Dump a loop verbosely. */
8324
8325 DEBUG_FUNCTION void
8326 debug_verbose (class loop &ref)
8327 {
8328 print_loop (stderr, &ref, 0, /*verbosity*/3);
8329 }
8330
8331 DEBUG_FUNCTION void
8332 debug_verbose (class loop *ptr)
8333 {
8334 if (ptr)
8335 debug (*ptr);
8336 else
8337 fprintf (stderr, "<nil>\n");
8338 }
8339
8340
8341 /* Debugging loops structure at tree level, at some VERBOSITY level. */
8342
8343 DEBUG_FUNCTION void
8344 debug_loops (int verbosity)
8345 {
8346 print_loops (stderr, verbosity);
8347 }
8348
8349 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
8350
8351 DEBUG_FUNCTION void
8352 debug_loop (class loop *loop, int verbosity)
8353 {
8354 print_loop (stderr, loop, 0, verbosity);
8355 }
8356
8357 /* Print on stderr the code of loop number NUM, at some VERBOSITY
8358 level. */
8359
8360 DEBUG_FUNCTION void
8361 debug_loop_num (unsigned num, int verbosity)
8362 {
8363 debug_loop (get_loop (cfun, num), verbosity);
8364 }
8365
8366 /* Return true if BB ends with a call, possibly followed by some
8367 instructions that must stay with the call. Return false,
8368 otherwise. */
8369
8370 static bool
8371 gimple_block_ends_with_call_p (basic_block bb)
8372 {
8373 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
8374 return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
8375 }
8376
8377
8378 /* Return true if BB ends with a conditional branch. Return false,
8379 otherwise. */
8380
8381 static bool
8382 gimple_block_ends_with_condjump_p (const_basic_block bb)
8383 {
8384 gimple *stmt = last_stmt (CONST_CAST_BB (bb));
8385 return (stmt && gimple_code (stmt) == GIMPLE_COND);
8386 }
8387
8388
8389 /* Return true if statement T may terminate execution of BB in ways not
8390 explicitly represtented in the CFG. */
8391
8392 bool
8393 stmt_can_terminate_bb_p (gimple *t)
8394 {
8395 tree fndecl = NULL_TREE;
8396 int call_flags = 0;
8397
8398 /* Eh exception not handled internally terminates execution of the whole
8399 function. */
8400 if (stmt_can_throw_external (cfun, t))
8401 return true;
8402
8403 /* NORETURN and LONGJMP calls already have an edge to exit.
8404 CONST and PURE calls do not need one.
8405 We don't currently check for CONST and PURE here, although
8406 it would be a good idea, because those attributes are
8407 figured out from the RTL in mark_constant_function, and
8408 the counter incrementation code from -fprofile-arcs
8409 leads to different results from -fbranch-probabilities. */
8410 if (is_gimple_call (t))
8411 {
8412 fndecl = gimple_call_fndecl (t);
8413 call_flags = gimple_call_flags (t);
8414 }
8415
8416 if (is_gimple_call (t)
8417 && fndecl
8418 && fndecl_built_in_p (fndecl)
8419 && (call_flags & ECF_NOTHROW)
8420 && !(call_flags & ECF_RETURNS_TWICE)
8421 /* fork() doesn't really return twice, but the effect of
8422 wrapping it in __gcov_fork() which calls __gcov_flush()
8423 and clears the counters before forking has the same
8424 effect as returning twice. Force a fake edge. */
8425 && !fndecl_built_in_p (fndecl, BUILT_IN_FORK))
8426 return false;
8427
8428 if (is_gimple_call (t))
8429 {
8430 edge_iterator ei;
8431 edge e;
8432 basic_block bb;
8433
8434 if (call_flags & (ECF_PURE | ECF_CONST)
8435 && !(call_flags & ECF_LOOPING_CONST_OR_PURE))
8436 return false;
8437
8438 /* Function call may do longjmp, terminate program or do other things.
8439 Special case noreturn that have non-abnormal edges out as in this case
8440 the fact is sufficiently represented by lack of edges out of T. */
8441 if (!(call_flags & ECF_NORETURN))
8442 return true;
8443
8444 bb = gimple_bb (t);
8445 FOR_EACH_EDGE (e, ei, bb->succs)
8446 if ((e->flags & EDGE_FAKE) == 0)
8447 return true;
8448 }
8449
8450 if (gasm *asm_stmt = dyn_cast <gasm *> (t))
8451 if (gimple_asm_volatile_p (asm_stmt) || gimple_asm_input_p (asm_stmt))
8452 return true;
8453
8454 return false;
8455 }
8456
8457
8458 /* Add fake edges to the function exit for any non constant and non
8459 noreturn calls (or noreturn calls with EH/abnormal edges),
8460 volatile inline assembly in the bitmap of blocks specified by BLOCKS
8461 or to the whole CFG if BLOCKS is zero. Return the number of blocks
8462 that were split.
8463
8464 The goal is to expose cases in which entering a basic block does
8465 not imply that all subsequent instructions must be executed. */
8466
8467 static int
8468 gimple_flow_call_edges_add (sbitmap blocks)
8469 {
8470 int i;
8471 int blocks_split = 0;
8472 int last_bb = last_basic_block_for_fn (cfun);
8473 bool check_last_block = false;
8474
8475 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
8476 return 0;
8477
8478 if (! blocks)
8479 check_last_block = true;
8480 else
8481 check_last_block = bitmap_bit_p (blocks,
8482 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
8483
8484 /* In the last basic block, before epilogue generation, there will be
8485 a fallthru edge to EXIT. Special care is required if the last insn
8486 of the last basic block is a call because make_edge folds duplicate
8487 edges, which would result in the fallthru edge also being marked
8488 fake, which would result in the fallthru edge being removed by
8489 remove_fake_edges, which would result in an invalid CFG.
8490
8491 Moreover, we can't elide the outgoing fake edge, since the block
8492 profiler needs to take this into account in order to solve the minimal
8493 spanning tree in the case that the call doesn't return.
8494
8495 Handle this by adding a dummy instruction in a new last basic block. */
8496 if (check_last_block)
8497 {
8498 basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
8499 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
8500 gimple *t = NULL;
8501
8502 if (!gsi_end_p (gsi))
8503 t = gsi_stmt (gsi);
8504
8505 if (t && stmt_can_terminate_bb_p (t))
8506 {
8507 edge e;
8508
8509 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
8510 if (e)
8511 {
8512 gsi_insert_on_edge (e, gimple_build_nop ());
8513 gsi_commit_edge_inserts ();
8514 }
8515 }
8516 }
8517
8518 /* Now add fake edges to the function exit for any non constant
8519 calls since there is no way that we can determine if they will
8520 return or not... */
8521 for (i = 0; i < last_bb; i++)
8522 {
8523 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8524 gimple_stmt_iterator gsi;
8525 gimple *stmt, *last_stmt;
8526
8527 if (!bb)
8528 continue;
8529
8530 if (blocks && !bitmap_bit_p (blocks, i))
8531 continue;
8532
8533 gsi = gsi_last_nondebug_bb (bb);
8534 if (!gsi_end_p (gsi))
8535 {
8536 last_stmt = gsi_stmt (gsi);
8537 do
8538 {
8539 stmt = gsi_stmt (gsi);
8540 if (stmt_can_terminate_bb_p (stmt))
8541 {
8542 edge e;
8543
8544 /* The handling above of the final block before the
8545 epilogue should be enough to verify that there is
8546 no edge to the exit block in CFG already.
8547 Calling make_edge in such case would cause us to
8548 mark that edge as fake and remove it later. */
8549 if (flag_checking && stmt == last_stmt)
8550 {
8551 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
8552 gcc_assert (e == NULL);
8553 }
8554
8555 /* Note that the following may create a new basic block
8556 and renumber the existing basic blocks. */
8557 if (stmt != last_stmt)
8558 {
8559 e = split_block (bb, stmt);
8560 if (e)
8561 blocks_split++;
8562 }
8563 e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
8564 e->probability = profile_probability::guessed_never ();
8565 }
8566 gsi_prev (&gsi);
8567 }
8568 while (!gsi_end_p (gsi));
8569 }
8570 }
8571
8572 if (blocks_split)
8573 checking_verify_flow_info ();
8574
8575 return blocks_split;
8576 }
8577
8578 /* Removes edge E and all the blocks dominated by it, and updates dominance
8579 information. The IL in E->src needs to be updated separately.
8580 If dominance info is not available, only the edge E is removed.*/
8581
8582 void
8583 remove_edge_and_dominated_blocks (edge e)
8584 {
8585 vec<basic_block> bbs_to_remove = vNULL;
8586 vec<basic_block> bbs_to_fix_dom = vNULL;
8587 edge f;
8588 edge_iterator ei;
8589 bool none_removed = false;
8590 unsigned i;
8591 basic_block bb, dbb;
8592 bitmap_iterator bi;
8593
8594 /* If we are removing a path inside a non-root loop that may change
8595 loop ownership of blocks or remove loops. Mark loops for fixup. */
8596 if (current_loops
8597 && loop_outer (e->src->loop_father) != NULL
8598 && e->src->loop_father == e->dest->loop_father)
8599 loops_state_set (LOOPS_NEED_FIXUP);
8600
8601 if (!dom_info_available_p (CDI_DOMINATORS))
8602 {
8603 remove_edge (e);
8604 return;
8605 }
8606
8607 /* No updating is needed for edges to exit. */
8608 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
8609 {
8610 if (cfgcleanup_altered_bbs)
8611 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
8612 remove_edge (e);
8613 return;
8614 }
8615
8616 /* First, we find the basic blocks to remove. If E->dest has a predecessor
8617 that is not dominated by E->dest, then this set is empty. Otherwise,
8618 all the basic blocks dominated by E->dest are removed.
8619
8620 Also, to DF_IDOM we store the immediate dominators of the blocks in
8621 the dominance frontier of E (i.e., of the successors of the
8622 removed blocks, if there are any, and of E->dest otherwise). */
8623 FOR_EACH_EDGE (f, ei, e->dest->preds)
8624 {
8625 if (f == e)
8626 continue;
8627
8628 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
8629 {
8630 none_removed = true;
8631 break;
8632 }
8633 }
8634
8635 auto_bitmap df, df_idom;
8636 if (none_removed)
8637 bitmap_set_bit (df_idom,
8638 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
8639 else
8640 {
8641 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
8642 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
8643 {
8644 FOR_EACH_EDGE (f, ei, bb->succs)
8645 {
8646 if (f->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
8647 bitmap_set_bit (df, f->dest->index);
8648 }
8649 }
8650 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
8651 bitmap_clear_bit (df, bb->index);
8652
8653 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
8654 {
8655 bb = BASIC_BLOCK_FOR_FN (cfun, i);
8656 bitmap_set_bit (df_idom,
8657 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
8658 }
8659 }
8660
8661 if (cfgcleanup_altered_bbs)
8662 {
8663 /* Record the set of the altered basic blocks. */
8664 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
8665 bitmap_ior_into (cfgcleanup_altered_bbs, df);
8666 }
8667
8668 /* Remove E and the cancelled blocks. */
8669 if (none_removed)
8670 remove_edge (e);
8671 else
8672 {
8673 /* Walk backwards so as to get a chance to substitute all
8674 released DEFs into debug stmts. See
8675 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
8676 details. */
8677 for (i = bbs_to_remove.length (); i-- > 0; )
8678 delete_basic_block (bbs_to_remove[i]);
8679 }
8680
8681 /* Update the dominance information. The immediate dominator may change only
8682 for blocks whose immediate dominator belongs to DF_IDOM:
8683
8684 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
8685 removal. Let Z the arbitrary block such that idom(Z) = Y and
8686 Z dominates X after the removal. Before removal, there exists a path P
8687 from Y to X that avoids Z. Let F be the last edge on P that is
8688 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
8689 dominates W, and because of P, Z does not dominate W), and W belongs to
8690 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
8691 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
8692 {
8693 bb = BASIC_BLOCK_FOR_FN (cfun, i);
8694 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
8695 dbb;
8696 dbb = next_dom_son (CDI_DOMINATORS, dbb))
8697 bbs_to_fix_dom.safe_push (dbb);
8698 }
8699
8700 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
8701
8702 bbs_to_remove.release ();
8703 bbs_to_fix_dom.release ();
8704 }
8705
8706 /* Purge dead EH edges from basic block BB. */
8707
8708 bool
8709 gimple_purge_dead_eh_edges (basic_block bb)
8710 {
8711 bool changed = false;
8712 edge e;
8713 edge_iterator ei;
8714 gimple *stmt = last_stmt (bb);
8715
8716 if (stmt && stmt_can_throw_internal (cfun, stmt))
8717 return false;
8718
8719 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8720 {
8721 if (e->flags & EDGE_EH)
8722 {
8723 remove_edge_and_dominated_blocks (e);
8724 changed = true;
8725 }
8726 else
8727 ei_next (&ei);
8728 }
8729
8730 return changed;
8731 }
8732
8733 /* Purge dead EH edges from basic block listed in BLOCKS. */
8734
8735 bool
8736 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
8737 {
8738 bool changed = false;
8739 unsigned i;
8740 bitmap_iterator bi;
8741
8742 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8743 {
8744 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8745
8746 /* Earlier gimple_purge_dead_eh_edges could have removed
8747 this basic block already. */
8748 gcc_assert (bb || changed);
8749 if (bb != NULL)
8750 changed |= gimple_purge_dead_eh_edges (bb);
8751 }
8752
8753 return changed;
8754 }
8755
8756 /* Purge dead abnormal call edges from basic block BB. */
8757
8758 bool
8759 gimple_purge_dead_abnormal_call_edges (basic_block bb)
8760 {
8761 bool changed = false;
8762 edge e;
8763 edge_iterator ei;
8764 gimple *stmt = last_stmt (bb);
8765
8766 if (!cfun->has_nonlocal_label
8767 && !cfun->calls_setjmp)
8768 return false;
8769
8770 if (stmt && stmt_can_make_abnormal_goto (stmt))
8771 return false;
8772
8773 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8774 {
8775 if (e->flags & EDGE_ABNORMAL)
8776 {
8777 if (e->flags & EDGE_FALLTHRU)
8778 e->flags &= ~EDGE_ABNORMAL;
8779 else
8780 remove_edge_and_dominated_blocks (e);
8781 changed = true;
8782 }
8783 else
8784 ei_next (&ei);
8785 }
8786
8787 return changed;
8788 }
8789
8790 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
8791
8792 bool
8793 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks)
8794 {
8795 bool changed = false;
8796 unsigned i;
8797 bitmap_iterator bi;
8798
8799 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8800 {
8801 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8802
8803 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
8804 this basic block already. */
8805 gcc_assert (bb || changed);
8806 if (bb != NULL)
8807 changed |= gimple_purge_dead_abnormal_call_edges (bb);
8808 }
8809
8810 return changed;
8811 }
8812
8813 /* This function is called whenever a new edge is created or
8814 redirected. */
8815
8816 static void
8817 gimple_execute_on_growing_pred (edge e)
8818 {
8819 basic_block bb = e->dest;
8820
8821 if (!gimple_seq_empty_p (phi_nodes (bb)))
8822 reserve_phi_args_for_new_edge (bb);
8823 }
8824
8825 /* This function is called immediately before edge E is removed from
8826 the edge vector E->dest->preds. */
8827
8828 static void
8829 gimple_execute_on_shrinking_pred (edge e)
8830 {
8831 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
8832 remove_phi_args (e);
8833 }
8834
8835 /*---------------------------------------------------------------------------
8836 Helper functions for Loop versioning
8837 ---------------------------------------------------------------------------*/
8838
8839 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
8840 of 'first'. Both of them are dominated by 'new_head' basic block. When
8841 'new_head' was created by 'second's incoming edge it received phi arguments
8842 on the edge by split_edge(). Later, additional edge 'e' was created to
8843 connect 'new_head' and 'first'. Now this routine adds phi args on this
8844 additional edge 'e' that new_head to second edge received as part of edge
8845 splitting. */
8846
8847 static void
8848 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
8849 basic_block new_head, edge e)
8850 {
8851 gphi *phi1, *phi2;
8852 gphi_iterator psi1, psi2;
8853 tree def;
8854 edge e2 = find_edge (new_head, second);
8855
8856 /* Because NEW_HEAD has been created by splitting SECOND's incoming
8857 edge, we should always have an edge from NEW_HEAD to SECOND. */
8858 gcc_assert (e2 != NULL);
8859
8860 /* Browse all 'second' basic block phi nodes and add phi args to
8861 edge 'e' for 'first' head. PHI args are always in correct order. */
8862
8863 for (psi2 = gsi_start_phis (second),
8864 psi1 = gsi_start_phis (first);
8865 !gsi_end_p (psi2) && !gsi_end_p (psi1);
8866 gsi_next (&psi2), gsi_next (&psi1))
8867 {
8868 phi1 = psi1.phi ();
8869 phi2 = psi2.phi ();
8870 def = PHI_ARG_DEF (phi2, e2->dest_idx);
8871 add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
8872 }
8873 }
8874
8875
8876 /* Adds a if else statement to COND_BB with condition COND_EXPR.
8877 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
8878 the destination of the ELSE part. */
8879
8880 static void
8881 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
8882 basic_block second_head ATTRIBUTE_UNUSED,
8883 basic_block cond_bb, void *cond_e)
8884 {
8885 gimple_stmt_iterator gsi;
8886 gimple *new_cond_expr;
8887 tree cond_expr = (tree) cond_e;
8888 edge e0;
8889
8890 /* Build new conditional expr */
8891 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
8892 NULL_TREE, NULL_TREE);
8893
8894 /* Add new cond in cond_bb. */
8895 gsi = gsi_last_bb (cond_bb);
8896 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
8897
8898 /* Adjust edges appropriately to connect new head with first head
8899 as well as second head. */
8900 e0 = single_succ_edge (cond_bb);
8901 e0->flags &= ~EDGE_FALLTHRU;
8902 e0->flags |= EDGE_FALSE_VALUE;
8903 }
8904
8905
8906 /* Do book-keeping of basic block BB for the profile consistency checker.
8907 Store the counting in RECORD. */
8908 static void
8909 gimple_account_profile_record (basic_block bb,
8910 struct profile_record *record)
8911 {
8912 gimple_stmt_iterator i;
8913 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
8914 {
8915 record->size
8916 += estimate_num_insns (gsi_stmt (i), &eni_size_weights);
8917 if (bb->count.initialized_p ())
8918 record->time
8919 += estimate_num_insns (gsi_stmt (i),
8920 &eni_time_weights) * bb->count.to_gcov_type ();
8921 else if (profile_status_for_fn (cfun) == PROFILE_GUESSED)
8922 record->time
8923 += estimate_num_insns (gsi_stmt (i),
8924 &eni_time_weights) * bb->count.to_frequency (cfun);
8925 }
8926 }
8927
8928 struct cfg_hooks gimple_cfg_hooks = {
8929 "gimple",
8930 gimple_verify_flow_info,
8931 gimple_dump_bb, /* dump_bb */
8932 gimple_dump_bb_for_graph, /* dump_bb_for_graph */
8933 create_bb, /* create_basic_block */
8934 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
8935 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
8936 gimple_can_remove_branch_p, /* can_remove_branch_p */
8937 remove_bb, /* delete_basic_block */
8938 gimple_split_block, /* split_block */
8939 gimple_move_block_after, /* move_block_after */
8940 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
8941 gimple_merge_blocks, /* merge_blocks */
8942 gimple_predict_edge, /* predict_edge */
8943 gimple_predicted_by_p, /* predicted_by_p */
8944 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
8945 gimple_duplicate_bb, /* duplicate_block */
8946 gimple_split_edge, /* split_edge */
8947 gimple_make_forwarder_block, /* make_forward_block */
8948 NULL, /* tidy_fallthru_edge */
8949 NULL, /* force_nonfallthru */
8950 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
8951 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
8952 gimple_flow_call_edges_add, /* flow_call_edges_add */
8953 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
8954 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
8955 gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
8956 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
8957 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
8958 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
8959 flush_pending_stmts, /* flush_pending_stmts */
8960 gimple_empty_block_p, /* block_empty_p */
8961 gimple_split_block_before_cond_jump, /* split_block_before_cond_jump */
8962 gimple_account_profile_record,
8963 };
8964
8965
8966 /* Split all critical edges. Split some extra (not necessarily critical) edges
8967 if FOR_EDGE_INSERTION_P is true. */
8968
8969 unsigned int
8970 split_critical_edges (bool for_edge_insertion_p /* = false */)
8971 {
8972 basic_block bb;
8973 edge e;
8974 edge_iterator ei;
8975
8976 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
8977 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
8978 mappings around the calls to split_edge. */
8979 start_recording_case_labels ();
8980 FOR_ALL_BB_FN (bb, cfun)
8981 {
8982 FOR_EACH_EDGE (e, ei, bb->succs)
8983 {
8984 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
8985 split_edge (e);
8986 /* PRE inserts statements to edges and expects that
8987 since split_critical_edges was done beforehand, committing edge
8988 insertions will not split more edges. In addition to critical
8989 edges we must split edges that have multiple successors and
8990 end by control flow statements, such as RESX.
8991 Go ahead and split them too. This matches the logic in
8992 gimple_find_edge_insert_loc. */
8993 else if (for_edge_insertion_p
8994 && (!single_pred_p (e->dest)
8995 || !gimple_seq_empty_p (phi_nodes (e->dest))
8996 || e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
8997 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
8998 && !(e->flags & EDGE_ABNORMAL))
8999 {
9000 gimple_stmt_iterator gsi;
9001
9002 gsi = gsi_last_bb (e->src);
9003 if (!gsi_end_p (gsi)
9004 && stmt_ends_bb_p (gsi_stmt (gsi))
9005 && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
9006 && !gimple_call_builtin_p (gsi_stmt (gsi),
9007 BUILT_IN_RETURN)))
9008 split_edge (e);
9009 }
9010 }
9011 }
9012 end_recording_case_labels ();
9013 return 0;
9014 }
9015
9016 namespace {
9017
9018 const pass_data pass_data_split_crit_edges =
9019 {
9020 GIMPLE_PASS, /* type */
9021 "crited", /* name */
9022 OPTGROUP_NONE, /* optinfo_flags */
9023 TV_TREE_SPLIT_EDGES, /* tv_id */
9024 PROP_cfg, /* properties_required */
9025 PROP_no_crit_edges, /* properties_provided */
9026 0, /* properties_destroyed */
9027 0, /* todo_flags_start */
9028 0, /* todo_flags_finish */
9029 };
9030
9031 class pass_split_crit_edges : public gimple_opt_pass
9032 {
9033 public:
9034 pass_split_crit_edges (gcc::context *ctxt)
9035 : gimple_opt_pass (pass_data_split_crit_edges, ctxt)
9036 {}
9037
9038 /* opt_pass methods: */
9039 virtual unsigned int execute (function *) { return split_critical_edges (); }
9040
9041 opt_pass * clone () { return new pass_split_crit_edges (m_ctxt); }
9042 }; // class pass_split_crit_edges
9043
9044 } // anon namespace
9045
9046 gimple_opt_pass *
9047 make_pass_split_crit_edges (gcc::context *ctxt)
9048 {
9049 return new pass_split_crit_edges (ctxt);
9050 }
9051
9052
9053 /* Insert COND expression which is GIMPLE_COND after STMT
9054 in basic block BB with appropriate basic block split
9055 and creation of a new conditionally executed basic block.
9056 Update profile so the new bb is visited with probability PROB.
9057 Return created basic block. */
9058 basic_block
9059 insert_cond_bb (basic_block bb, gimple *stmt, gimple *cond,
9060 profile_probability prob)
9061 {
9062 edge fall = split_block (bb, stmt);
9063 gimple_stmt_iterator iter = gsi_last_bb (bb);
9064 basic_block new_bb;
9065
9066 /* Insert cond statement. */
9067 gcc_assert (gimple_code (cond) == GIMPLE_COND);
9068 if (gsi_end_p (iter))
9069 gsi_insert_before (&iter, cond, GSI_CONTINUE_LINKING);
9070 else
9071 gsi_insert_after (&iter, cond, GSI_CONTINUE_LINKING);
9072
9073 /* Create conditionally executed block. */
9074 new_bb = create_empty_bb (bb);
9075 edge e = make_edge (bb, new_bb, EDGE_TRUE_VALUE);
9076 e->probability = prob;
9077 new_bb->count = e->count ();
9078 make_single_succ_edge (new_bb, fall->dest, EDGE_FALLTHRU);
9079
9080 /* Fix edge for split bb. */
9081 fall->flags = EDGE_FALSE_VALUE;
9082 fall->probability -= e->probability;
9083
9084 /* Update dominance info. */
9085 if (dom_info_available_p (CDI_DOMINATORS))
9086 {
9087 set_immediate_dominator (CDI_DOMINATORS, new_bb, bb);
9088 set_immediate_dominator (CDI_DOMINATORS, fall->dest, bb);
9089 }
9090
9091 /* Update loop info. */
9092 if (current_loops)
9093 add_bb_to_loop (new_bb, bb->loop_father);
9094
9095 return new_bb;
9096 }
9097
9098 /* Build a ternary operation and gimplify it. Emit code before GSI.
9099 Return the gimple_val holding the result. */
9100
9101 tree
9102 gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
9103 tree type, tree a, tree b, tree c)
9104 {
9105 tree ret;
9106 location_t loc = gimple_location (gsi_stmt (*gsi));
9107
9108 ret = fold_build3_loc (loc, code, type, a, b, c);
9109 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
9110 GSI_SAME_STMT);
9111 }
9112
9113 /* Build a binary operation and gimplify it. Emit code before GSI.
9114 Return the gimple_val holding the result. */
9115
9116 tree
9117 gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
9118 tree type, tree a, tree b)
9119 {
9120 tree ret;
9121
9122 ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
9123 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
9124 GSI_SAME_STMT);
9125 }
9126
9127 /* Build a unary operation and gimplify it. Emit code before GSI.
9128 Return the gimple_val holding the result. */
9129
9130 tree
9131 gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
9132 tree a)
9133 {
9134 tree ret;
9135
9136 ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
9137 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
9138 GSI_SAME_STMT);
9139 }
9140
9141
9142 \f
9143 /* Given a basic block B which ends with a conditional and has
9144 precisely two successors, determine which of the edges is taken if
9145 the conditional is true and which is taken if the conditional is
9146 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
9147
9148 void
9149 extract_true_false_edges_from_block (basic_block b,
9150 edge *true_edge,
9151 edge *false_edge)
9152 {
9153 edge e = EDGE_SUCC (b, 0);
9154
9155 if (e->flags & EDGE_TRUE_VALUE)
9156 {
9157 *true_edge = e;
9158 *false_edge = EDGE_SUCC (b, 1);
9159 }
9160 else
9161 {
9162 *false_edge = e;
9163 *true_edge = EDGE_SUCC (b, 1);
9164 }
9165 }
9166
9167
9168 /* From a controlling predicate in the immediate dominator DOM of
9169 PHIBLOCK determine the edges into PHIBLOCK that are chosen if the
9170 predicate evaluates to true and false and store them to
9171 *TRUE_CONTROLLED_EDGE and *FALSE_CONTROLLED_EDGE if
9172 they are non-NULL. Returns true if the edges can be determined,
9173 else return false. */
9174
9175 bool
9176 extract_true_false_controlled_edges (basic_block dom, basic_block phiblock,
9177 edge *true_controlled_edge,
9178 edge *false_controlled_edge)
9179 {
9180 basic_block bb = phiblock;
9181 edge true_edge, false_edge, tem;
9182 edge e0 = NULL, e1 = NULL;
9183
9184 /* We have to verify that one edge into the PHI node is dominated
9185 by the true edge of the predicate block and the other edge
9186 dominated by the false edge. This ensures that the PHI argument
9187 we are going to take is completely determined by the path we
9188 take from the predicate block.
9189 We can only use BB dominance checks below if the destination of
9190 the true/false edges are dominated by their edge, thus only
9191 have a single predecessor. */
9192 extract_true_false_edges_from_block (dom, &true_edge, &false_edge);
9193 tem = EDGE_PRED (bb, 0);
9194 if (tem == true_edge
9195 || (single_pred_p (true_edge->dest)
9196 && (tem->src == true_edge->dest
9197 || dominated_by_p (CDI_DOMINATORS,
9198 tem->src, true_edge->dest))))
9199 e0 = tem;
9200 else if (tem == false_edge
9201 || (single_pred_p (false_edge->dest)
9202 && (tem->src == false_edge->dest
9203 || dominated_by_p (CDI_DOMINATORS,
9204 tem->src, false_edge->dest))))
9205 e1 = tem;
9206 else
9207 return false;
9208 tem = EDGE_PRED (bb, 1);
9209 if (tem == true_edge
9210 || (single_pred_p (true_edge->dest)
9211 && (tem->src == true_edge->dest
9212 || dominated_by_p (CDI_DOMINATORS,
9213 tem->src, true_edge->dest))))
9214 e0 = tem;
9215 else if (tem == false_edge
9216 || (single_pred_p (false_edge->dest)
9217 && (tem->src == false_edge->dest
9218 || dominated_by_p (CDI_DOMINATORS,
9219 tem->src, false_edge->dest))))
9220 e1 = tem;
9221 else
9222 return false;
9223 if (!e0 || !e1)
9224 return false;
9225
9226 if (true_controlled_edge)
9227 *true_controlled_edge = e0;
9228 if (false_controlled_edge)
9229 *false_controlled_edge = e1;
9230
9231 return true;
9232 }
9233
9234 /* Generate a range test LHS CODE RHS that determines whether INDEX is in the
9235 range [low, high]. Place associated stmts before *GSI. */
9236
9237 void
9238 generate_range_test (basic_block bb, tree index, tree low, tree high,
9239 tree *lhs, tree *rhs)
9240 {
9241 tree type = TREE_TYPE (index);
9242 tree utype = range_check_type (type);
9243
9244 low = fold_convert (utype, low);
9245 high = fold_convert (utype, high);
9246
9247 gimple_seq seq = NULL;
9248 index = gimple_convert (&seq, utype, index);
9249 *lhs = gimple_build (&seq, MINUS_EXPR, utype, index, low);
9250 *rhs = const_binop (MINUS_EXPR, utype, high, low);
9251
9252 gimple_stmt_iterator gsi = gsi_last_bb (bb);
9253 gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT);
9254 }
9255
9256 /* Return the basic block that belongs to label numbered INDEX
9257 of a switch statement. */
9258
9259 basic_block
9260 gimple_switch_label_bb (function *ifun, gswitch *gs, unsigned index)
9261 {
9262 return label_to_block (ifun, CASE_LABEL (gimple_switch_label (gs, index)));
9263 }
9264
9265 /* Return the default basic block of a switch statement. */
9266
9267 basic_block
9268 gimple_switch_default_bb (function *ifun, gswitch *gs)
9269 {
9270 return gimple_switch_label_bb (ifun, gs, 0);
9271 }
9272
9273 /* Return the edge that belongs to label numbered INDEX
9274 of a switch statement. */
9275
9276 edge
9277 gimple_switch_edge (function *ifun, gswitch *gs, unsigned index)
9278 {
9279 return find_edge (gimple_bb (gs), gimple_switch_label_bb (ifun, gs, index));
9280 }
9281
9282 /* Return the default edge of a switch statement. */
9283
9284 edge
9285 gimple_switch_default_edge (function *ifun, gswitch *gs)
9286 {
9287 return gimple_switch_edge (ifun, gs, 0);
9288 }
9289
9290
9291 /* Emit return warnings. */
9292
9293 namespace {
9294
9295 const pass_data pass_data_warn_function_return =
9296 {
9297 GIMPLE_PASS, /* type */
9298 "*warn_function_return", /* name */
9299 OPTGROUP_NONE, /* optinfo_flags */
9300 TV_NONE, /* tv_id */
9301 PROP_cfg, /* properties_required */
9302 0, /* properties_provided */
9303 0, /* properties_destroyed */
9304 0, /* todo_flags_start */
9305 0, /* todo_flags_finish */
9306 };
9307
9308 class pass_warn_function_return : public gimple_opt_pass
9309 {
9310 public:
9311 pass_warn_function_return (gcc::context *ctxt)
9312 : gimple_opt_pass (pass_data_warn_function_return, ctxt)
9313 {}
9314
9315 /* opt_pass methods: */
9316 virtual unsigned int execute (function *);
9317
9318 }; // class pass_warn_function_return
9319
9320 unsigned int
9321 pass_warn_function_return::execute (function *fun)
9322 {
9323 location_t location;
9324 gimple *last;
9325 edge e;
9326 edge_iterator ei;
9327
9328 if (!targetm.warn_func_return (fun->decl))
9329 return 0;
9330
9331 /* If we have a path to EXIT, then we do return. */
9332 if (TREE_THIS_VOLATILE (fun->decl)
9333 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0)
9334 {
9335 location = UNKNOWN_LOCATION;
9336 for (ei = ei_start (EXIT_BLOCK_PTR_FOR_FN (fun)->preds);
9337 (e = ei_safe_edge (ei)); )
9338 {
9339 last = last_stmt (e->src);
9340 if ((gimple_code (last) == GIMPLE_RETURN
9341 || gimple_call_builtin_p (last, BUILT_IN_RETURN))
9342 && location == UNKNOWN_LOCATION
9343 && ((location = LOCATION_LOCUS (gimple_location (last)))
9344 != UNKNOWN_LOCATION)
9345 && !optimize)
9346 break;
9347 /* When optimizing, replace return stmts in noreturn functions
9348 with __builtin_unreachable () call. */
9349 if (optimize && gimple_code (last) == GIMPLE_RETURN)
9350 {
9351 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
9352 gimple *new_stmt = gimple_build_call (fndecl, 0);
9353 gimple_set_location (new_stmt, gimple_location (last));
9354 gimple_stmt_iterator gsi = gsi_for_stmt (last);
9355 gsi_replace (&gsi, new_stmt, true);
9356 remove_edge (e);
9357 }
9358 else
9359 ei_next (&ei);
9360 }
9361 if (location == UNKNOWN_LOCATION)
9362 location = cfun->function_end_locus;
9363 warning_at (location, 0, "%<noreturn%> function does return");
9364 }
9365
9366 /* If we see "return;" in some basic block, then we do reach the end
9367 without returning a value. */
9368 else if (warn_return_type > 0
9369 && !TREE_NO_WARNING (fun->decl)
9370 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fun->decl))))
9371 {
9372 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
9373 {
9374 gimple *last = last_stmt (e->src);
9375 greturn *return_stmt = dyn_cast <greturn *> (last);
9376 if (return_stmt
9377 && gimple_return_retval (return_stmt) == NULL
9378 && !gimple_no_warning_p (last))
9379 {
9380 location = gimple_location (last);
9381 if (LOCATION_LOCUS (location) == UNKNOWN_LOCATION)
9382 location = fun->function_end_locus;
9383 if (warning_at (location, OPT_Wreturn_type,
9384 "control reaches end of non-void function"))
9385 TREE_NO_WARNING (fun->decl) = 1;
9386 break;
9387 }
9388 }
9389 /* The C++ FE turns fallthrough from the end of non-void function
9390 into __builtin_unreachable () call with BUILTINS_LOCATION.
9391 Recognize those too. */
9392 basic_block bb;
9393 if (!TREE_NO_WARNING (fun->decl))
9394 FOR_EACH_BB_FN (bb, fun)
9395 if (EDGE_COUNT (bb->succs) == 0)
9396 {
9397 gimple *last = last_stmt (bb);
9398 const enum built_in_function ubsan_missing_ret
9399 = BUILT_IN_UBSAN_HANDLE_MISSING_RETURN;
9400 if (last
9401 && ((LOCATION_LOCUS (gimple_location (last))
9402 == BUILTINS_LOCATION
9403 && gimple_call_builtin_p (last, BUILT_IN_UNREACHABLE))
9404 || gimple_call_builtin_p (last, ubsan_missing_ret)))
9405 {
9406 gimple_stmt_iterator gsi = gsi_for_stmt (last);
9407 gsi_prev_nondebug (&gsi);
9408 gimple *prev = gsi_stmt (gsi);
9409 if (prev == NULL)
9410 location = UNKNOWN_LOCATION;
9411 else
9412 location = gimple_location (prev);
9413 if (LOCATION_LOCUS (location) == UNKNOWN_LOCATION)
9414 location = fun->function_end_locus;
9415 if (warning_at (location, OPT_Wreturn_type,
9416 "control reaches end of non-void function"))
9417 TREE_NO_WARNING (fun->decl) = 1;
9418 break;
9419 }
9420 }
9421 }
9422 return 0;
9423 }
9424
9425 } // anon namespace
9426
9427 gimple_opt_pass *
9428 make_pass_warn_function_return (gcc::context *ctxt)
9429 {
9430 return new pass_warn_function_return (ctxt);
9431 }
9432
9433 /* Walk a gimplified function and warn for functions whose return value is
9434 ignored and attribute((warn_unused_result)) is set. This is done before
9435 inlining, so we don't have to worry about that. */
9436
9437 static void
9438 do_warn_unused_result (gimple_seq seq)
9439 {
9440 tree fdecl, ftype;
9441 gimple_stmt_iterator i;
9442
9443 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
9444 {
9445 gimple *g = gsi_stmt (i);
9446
9447 switch (gimple_code (g))
9448 {
9449 case GIMPLE_BIND:
9450 do_warn_unused_result (gimple_bind_body (as_a <gbind *>(g)));
9451 break;
9452 case GIMPLE_TRY:
9453 do_warn_unused_result (gimple_try_eval (g));
9454 do_warn_unused_result (gimple_try_cleanup (g));
9455 break;
9456 case GIMPLE_CATCH:
9457 do_warn_unused_result (gimple_catch_handler (
9458 as_a <gcatch *> (g)));
9459 break;
9460 case GIMPLE_EH_FILTER:
9461 do_warn_unused_result (gimple_eh_filter_failure (g));
9462 break;
9463
9464 case GIMPLE_CALL:
9465 if (gimple_call_lhs (g))
9466 break;
9467 if (gimple_call_internal_p (g))
9468 break;
9469
9470 /* This is a naked call, as opposed to a GIMPLE_CALL with an
9471 LHS. All calls whose value is ignored should be
9472 represented like this. Look for the attribute. */
9473 fdecl = gimple_call_fndecl (g);
9474 ftype = gimple_call_fntype (g);
9475
9476 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
9477 {
9478 location_t loc = gimple_location (g);
9479
9480 if (fdecl)
9481 warning_at (loc, OPT_Wunused_result,
9482 "ignoring return value of %qD "
9483 "declared with attribute %<warn_unused_result%>",
9484 fdecl);
9485 else
9486 warning_at (loc, OPT_Wunused_result,
9487 "ignoring return value of function "
9488 "declared with attribute %<warn_unused_result%>");
9489 }
9490 break;
9491
9492 default:
9493 /* Not a container, not a call, or a call whose value is used. */
9494 break;
9495 }
9496 }
9497 }
9498
9499 namespace {
9500
9501 const pass_data pass_data_warn_unused_result =
9502 {
9503 GIMPLE_PASS, /* type */
9504 "*warn_unused_result", /* name */
9505 OPTGROUP_NONE, /* optinfo_flags */
9506 TV_NONE, /* tv_id */
9507 PROP_gimple_any, /* properties_required */
9508 0, /* properties_provided */
9509 0, /* properties_destroyed */
9510 0, /* todo_flags_start */
9511 0, /* todo_flags_finish */
9512 };
9513
9514 class pass_warn_unused_result : public gimple_opt_pass
9515 {
9516 public:
9517 pass_warn_unused_result (gcc::context *ctxt)
9518 : gimple_opt_pass (pass_data_warn_unused_result, ctxt)
9519 {}
9520
9521 /* opt_pass methods: */
9522 virtual bool gate (function *) { return flag_warn_unused_result; }
9523 virtual unsigned int execute (function *)
9524 {
9525 do_warn_unused_result (gimple_body (current_function_decl));
9526 return 0;
9527 }
9528
9529 }; // class pass_warn_unused_result
9530
9531 } // anon namespace
9532
9533 gimple_opt_pass *
9534 make_pass_warn_unused_result (gcc::context *ctxt)
9535 {
9536 return new pass_warn_unused_result (ctxt);
9537 }
9538
9539 /* IPA passes, compilation of earlier functions or inlining
9540 might have changed some properties, such as marked functions nothrow,
9541 pure, const or noreturn.
9542 Remove redundant edges and basic blocks, and create new ones if necessary.
9543
9544 This pass can't be executed as stand alone pass from pass manager, because
9545 in between inlining and this fixup the verify_flow_info would fail. */
9546
9547 unsigned int
9548 execute_fixup_cfg (void)
9549 {
9550 basic_block bb;
9551 gimple_stmt_iterator gsi;
9552 int todo = 0;
9553 cgraph_node *node = cgraph_node::get (current_function_decl);
9554 /* Same scaling is also done by ipa_merge_profiles. */
9555 profile_count num = node->count;
9556 profile_count den = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
9557 bool scale = num.initialized_p () && !(num == den);
9558
9559 if (scale)
9560 {
9561 profile_count::adjust_for_ipa_scaling (&num, &den);
9562 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = node->count;
9563 EXIT_BLOCK_PTR_FOR_FN (cfun)->count
9564 = EXIT_BLOCK_PTR_FOR_FN (cfun)->count.apply_scale (num, den);
9565 }
9566
9567 FOR_EACH_BB_FN (bb, cfun)
9568 {
9569 if (scale)
9570 bb->count = bb->count.apply_scale (num, den);
9571 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
9572 {
9573 gimple *stmt = gsi_stmt (gsi);
9574 tree decl = is_gimple_call (stmt)
9575 ? gimple_call_fndecl (stmt)
9576 : NULL;
9577 if (decl)
9578 {
9579 int flags = gimple_call_flags (stmt);
9580 if (flags & (ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE))
9581 {
9582 if (gimple_purge_dead_abnormal_call_edges (bb))
9583 todo |= TODO_cleanup_cfg;
9584
9585 if (gimple_in_ssa_p (cfun))
9586 {
9587 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9588 update_stmt (stmt);
9589 }
9590 }
9591
9592 if (flags & ECF_NORETURN
9593 && fixup_noreturn_call (stmt))
9594 todo |= TODO_cleanup_cfg;
9595 }
9596
9597 /* Remove stores to variables we marked write-only.
9598 Keep access when store has side effect, i.e. in case when source
9599 is volatile. */
9600 if (gimple_store_p (stmt)
9601 && !gimple_has_side_effects (stmt)
9602 && !optimize_debug)
9603 {
9604 tree lhs = get_base_address (gimple_get_lhs (stmt));
9605
9606 if (VAR_P (lhs)
9607 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
9608 && varpool_node::get (lhs)->writeonly)
9609 {
9610 unlink_stmt_vdef (stmt);
9611 gsi_remove (&gsi, true);
9612 release_defs (stmt);
9613 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9614 continue;
9615 }
9616 }
9617 /* For calls we can simply remove LHS when it is known
9618 to be write-only. */
9619 if (is_gimple_call (stmt)
9620 && gimple_get_lhs (stmt))
9621 {
9622 tree lhs = get_base_address (gimple_get_lhs (stmt));
9623
9624 if (VAR_P (lhs)
9625 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
9626 && varpool_node::get (lhs)->writeonly)
9627 {
9628 gimple_call_set_lhs (stmt, NULL);
9629 update_stmt (stmt);
9630 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9631 }
9632 }
9633
9634 if (maybe_clean_eh_stmt (stmt)
9635 && gimple_purge_dead_eh_edges (bb))
9636 todo |= TODO_cleanup_cfg;
9637 gsi_next (&gsi);
9638 }
9639
9640 /* If we have a basic block with no successors that does not
9641 end with a control statement or a noreturn call end it with
9642 a call to __builtin_unreachable. This situation can occur
9643 when inlining a noreturn call that does in fact return. */
9644 if (EDGE_COUNT (bb->succs) == 0)
9645 {
9646 gimple *stmt = last_stmt (bb);
9647 if (!stmt
9648 || (!is_ctrl_stmt (stmt)
9649 && (!is_gimple_call (stmt)
9650 || !gimple_call_noreturn_p (stmt))))
9651 {
9652 if (stmt && is_gimple_call (stmt))
9653 gimple_call_set_ctrl_altering (stmt, false);
9654 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
9655 stmt = gimple_build_call (fndecl, 0);
9656 gimple_stmt_iterator gsi = gsi_last_bb (bb);
9657 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
9658 if (!cfun->after_inlining)
9659 {
9660 gcall *call_stmt = dyn_cast <gcall *> (stmt);
9661 node->create_edge (cgraph_node::get_create (fndecl),
9662 call_stmt, bb->count);
9663 }
9664 }
9665 }
9666 }
9667 if (scale)
9668 {
9669 update_max_bb_count ();
9670 compute_function_frequency ();
9671 }
9672
9673 if (current_loops
9674 && (todo & TODO_cleanup_cfg))
9675 loops_state_set (LOOPS_NEED_FIXUP);
9676
9677 return todo;
9678 }
9679
9680 namespace {
9681
9682 const pass_data pass_data_fixup_cfg =
9683 {
9684 GIMPLE_PASS, /* type */
9685 "fixup_cfg", /* name */
9686 OPTGROUP_NONE, /* optinfo_flags */
9687 TV_NONE, /* tv_id */
9688 PROP_cfg, /* properties_required */
9689 0, /* properties_provided */
9690 0, /* properties_destroyed */
9691 0, /* todo_flags_start */
9692 0, /* todo_flags_finish */
9693 };
9694
9695 class pass_fixup_cfg : public gimple_opt_pass
9696 {
9697 public:
9698 pass_fixup_cfg (gcc::context *ctxt)
9699 : gimple_opt_pass (pass_data_fixup_cfg, ctxt)
9700 {}
9701
9702 /* opt_pass methods: */
9703 opt_pass * clone () { return new pass_fixup_cfg (m_ctxt); }
9704 virtual unsigned int execute (function *) { return execute_fixup_cfg (); }
9705
9706 }; // class pass_fixup_cfg
9707
9708 } // anon namespace
9709
9710 gimple_opt_pass *
9711 make_pass_fixup_cfg (gcc::context *ctxt)
9712 {
9713 return new pass_fixup_cfg (ctxt);
9714 }
9715
9716 /* Garbage collection support for edge_def. */
9717
9718 extern void gt_ggc_mx (tree&);
9719 extern void gt_ggc_mx (gimple *&);
9720 extern void gt_ggc_mx (rtx&);
9721 extern void gt_ggc_mx (basic_block&);
9722
9723 static void
9724 gt_ggc_mx (rtx_insn *& x)
9725 {
9726 if (x)
9727 gt_ggc_mx_rtx_def ((void *) x);
9728 }
9729
9730 void
9731 gt_ggc_mx (edge_def *e)
9732 {
9733 tree block = LOCATION_BLOCK (e->goto_locus);
9734 gt_ggc_mx (e->src);
9735 gt_ggc_mx (e->dest);
9736 if (current_ir_type () == IR_GIMPLE)
9737 gt_ggc_mx (e->insns.g);
9738 else
9739 gt_ggc_mx (e->insns.r);
9740 gt_ggc_mx (block);
9741 }
9742
9743 /* PCH support for edge_def. */
9744
9745 extern void gt_pch_nx (tree&);
9746 extern void gt_pch_nx (gimple *&);
9747 extern void gt_pch_nx (rtx&);
9748 extern void gt_pch_nx (basic_block&);
9749
9750 static void
9751 gt_pch_nx (rtx_insn *& x)
9752 {
9753 if (x)
9754 gt_pch_nx_rtx_def ((void *) x);
9755 }
9756
9757 void
9758 gt_pch_nx (edge_def *e)
9759 {
9760 tree block = LOCATION_BLOCK (e->goto_locus);
9761 gt_pch_nx (e->src);
9762 gt_pch_nx (e->dest);
9763 if (current_ir_type () == IR_GIMPLE)
9764 gt_pch_nx (e->insns.g);
9765 else
9766 gt_pch_nx (e->insns.r);
9767 gt_pch_nx (block);
9768 }
9769
9770 void
9771 gt_pch_nx (edge_def *e, gt_pointer_operator op, void *cookie)
9772 {
9773 tree block = LOCATION_BLOCK (e->goto_locus);
9774 op (&(e->src), cookie);
9775 op (&(e->dest), cookie);
9776 if (current_ir_type () == IR_GIMPLE)
9777 op (&(e->insns.g), cookie);
9778 else
9779 op (&(e->insns.r), cookie);
9780 op (&(block), cookie);
9781 }
9782
9783 #if CHECKING_P
9784
9785 namespace selftest {
9786
9787 /* Helper function for CFG selftests: create a dummy function decl
9788 and push it as cfun. */
9789
9790 static tree
9791 push_fndecl (const char *name)
9792 {
9793 tree fn_type = build_function_type_array (integer_type_node, 0, NULL);
9794 /* FIXME: this uses input_location: */
9795 tree fndecl = build_fn_decl (name, fn_type);
9796 tree retval = build_decl (UNKNOWN_LOCATION, RESULT_DECL,
9797 NULL_TREE, integer_type_node);
9798 DECL_RESULT (fndecl) = retval;
9799 push_struct_function (fndecl);
9800 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9801 ASSERT_TRUE (fun != NULL);
9802 init_empty_tree_cfg_for_function (fun);
9803 ASSERT_EQ (2, n_basic_blocks_for_fn (fun));
9804 ASSERT_EQ (0, n_edges_for_fn (fun));
9805 return fndecl;
9806 }
9807
9808 /* These tests directly create CFGs.
9809 Compare with the static fns within tree-cfg.c:
9810 - build_gimple_cfg
9811 - make_blocks: calls create_basic_block (seq, bb);
9812 - make_edges. */
9813
9814 /* Verify a simple cfg of the form:
9815 ENTRY -> A -> B -> C -> EXIT. */
9816
9817 static void
9818 test_linear_chain ()
9819 {
9820 gimple_register_cfg_hooks ();
9821
9822 tree fndecl = push_fndecl ("cfg_test_linear_chain");
9823 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9824
9825 /* Create some empty blocks. */
9826 basic_block bb_a = create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
9827 basic_block bb_b = create_empty_bb (bb_a);
9828 basic_block bb_c = create_empty_bb (bb_b);
9829
9830 ASSERT_EQ (5, n_basic_blocks_for_fn (fun));
9831 ASSERT_EQ (0, n_edges_for_fn (fun));
9832
9833 /* Create some edges: a simple linear chain of BBs. */
9834 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), bb_a, EDGE_FALLTHRU);
9835 make_edge (bb_a, bb_b, 0);
9836 make_edge (bb_b, bb_c, 0);
9837 make_edge (bb_c, EXIT_BLOCK_PTR_FOR_FN (fun), 0);
9838
9839 /* Verify the edges. */
9840 ASSERT_EQ (4, n_edges_for_fn (fun));
9841 ASSERT_EQ (NULL, ENTRY_BLOCK_PTR_FOR_FN (fun)->preds);
9842 ASSERT_EQ (1, ENTRY_BLOCK_PTR_FOR_FN (fun)->succs->length ());
9843 ASSERT_EQ (1, bb_a->preds->length ());
9844 ASSERT_EQ (1, bb_a->succs->length ());
9845 ASSERT_EQ (1, bb_b->preds->length ());
9846 ASSERT_EQ (1, bb_b->succs->length ());
9847 ASSERT_EQ (1, bb_c->preds->length ());
9848 ASSERT_EQ (1, bb_c->succs->length ());
9849 ASSERT_EQ (1, EXIT_BLOCK_PTR_FOR_FN (fun)->preds->length ());
9850 ASSERT_EQ (NULL, EXIT_BLOCK_PTR_FOR_FN (fun)->succs);
9851
9852 /* Verify the dominance information
9853 Each BB in our simple chain should be dominated by the one before
9854 it. */
9855 calculate_dominance_info (CDI_DOMINATORS);
9856 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_b));
9857 ASSERT_EQ (bb_b, get_immediate_dominator (CDI_DOMINATORS, bb_c));
9858 vec<basic_block> dom_by_b = get_dominated_by (CDI_DOMINATORS, bb_b);
9859 ASSERT_EQ (1, dom_by_b.length ());
9860 ASSERT_EQ (bb_c, dom_by_b[0]);
9861 free_dominance_info (CDI_DOMINATORS);
9862 dom_by_b.release ();
9863
9864 /* Similarly for post-dominance: each BB in our chain is post-dominated
9865 by the one after it. */
9866 calculate_dominance_info (CDI_POST_DOMINATORS);
9867 ASSERT_EQ (bb_b, get_immediate_dominator (CDI_POST_DOMINATORS, bb_a));
9868 ASSERT_EQ (bb_c, get_immediate_dominator (CDI_POST_DOMINATORS, bb_b));
9869 vec<basic_block> postdom_by_b = get_dominated_by (CDI_POST_DOMINATORS, bb_b);
9870 ASSERT_EQ (1, postdom_by_b.length ());
9871 ASSERT_EQ (bb_a, postdom_by_b[0]);
9872 free_dominance_info (CDI_POST_DOMINATORS);
9873 postdom_by_b.release ();
9874
9875 pop_cfun ();
9876 }
9877
9878 /* Verify a simple CFG of the form:
9879 ENTRY
9880 |
9881 A
9882 / \
9883 /t \f
9884 B C
9885 \ /
9886 \ /
9887 D
9888 |
9889 EXIT. */
9890
9891 static void
9892 test_diamond ()
9893 {
9894 gimple_register_cfg_hooks ();
9895
9896 tree fndecl = push_fndecl ("cfg_test_diamond");
9897 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9898
9899 /* Create some empty blocks. */
9900 basic_block bb_a = create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
9901 basic_block bb_b = create_empty_bb (bb_a);
9902 basic_block bb_c = create_empty_bb (bb_a);
9903 basic_block bb_d = create_empty_bb (bb_b);
9904
9905 ASSERT_EQ (6, n_basic_blocks_for_fn (fun));
9906 ASSERT_EQ (0, n_edges_for_fn (fun));
9907
9908 /* Create the edges. */
9909 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), bb_a, EDGE_FALLTHRU);
9910 make_edge (bb_a, bb_b, EDGE_TRUE_VALUE);
9911 make_edge (bb_a, bb_c, EDGE_FALSE_VALUE);
9912 make_edge (bb_b, bb_d, 0);
9913 make_edge (bb_c, bb_d, 0);
9914 make_edge (bb_d, EXIT_BLOCK_PTR_FOR_FN (fun), 0);
9915
9916 /* Verify the edges. */
9917 ASSERT_EQ (6, n_edges_for_fn (fun));
9918 ASSERT_EQ (1, bb_a->preds->length ());
9919 ASSERT_EQ (2, bb_a->succs->length ());
9920 ASSERT_EQ (1, bb_b->preds->length ());
9921 ASSERT_EQ (1, bb_b->succs->length ());
9922 ASSERT_EQ (1, bb_c->preds->length ());
9923 ASSERT_EQ (1, bb_c->succs->length ());
9924 ASSERT_EQ (2, bb_d->preds->length ());
9925 ASSERT_EQ (1, bb_d->succs->length ());
9926
9927 /* Verify the dominance information. */
9928 calculate_dominance_info (CDI_DOMINATORS);
9929 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_b));
9930 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_c));
9931 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_d));
9932 vec<basic_block> dom_by_a = get_dominated_by (CDI_DOMINATORS, bb_a);
9933 ASSERT_EQ (3, dom_by_a.length ()); /* B, C, D, in some order. */
9934 dom_by_a.release ();
9935 vec<basic_block> dom_by_b = get_dominated_by (CDI_DOMINATORS, bb_b);
9936 ASSERT_EQ (0, dom_by_b.length ());
9937 dom_by_b.release ();
9938 free_dominance_info (CDI_DOMINATORS);
9939
9940 /* Similarly for post-dominance. */
9941 calculate_dominance_info (CDI_POST_DOMINATORS);
9942 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_a));
9943 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_b));
9944 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_c));
9945 vec<basic_block> postdom_by_d = get_dominated_by (CDI_POST_DOMINATORS, bb_d);
9946 ASSERT_EQ (3, postdom_by_d.length ()); /* A, B, C in some order. */
9947 postdom_by_d.release ();
9948 vec<basic_block> postdom_by_b = get_dominated_by (CDI_POST_DOMINATORS, bb_b);
9949 ASSERT_EQ (0, postdom_by_b.length ());
9950 postdom_by_b.release ();
9951 free_dominance_info (CDI_POST_DOMINATORS);
9952
9953 pop_cfun ();
9954 }
9955
9956 /* Verify that we can handle a CFG containing a "complete" aka
9957 fully-connected subgraph (where A B C D below all have edges
9958 pointing to each other node, also to themselves).
9959 e.g.:
9960 ENTRY EXIT
9961 | ^
9962 | /
9963 | /
9964 | /
9965 V/
9966 A<--->B
9967 ^^ ^^
9968 | \ / |
9969 | X |
9970 | / \ |
9971 VV VV
9972 C<--->D
9973 */
9974
9975 static void
9976 test_fully_connected ()
9977 {
9978 gimple_register_cfg_hooks ();
9979
9980 tree fndecl = push_fndecl ("cfg_fully_connected");
9981 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9982
9983 const int n = 4;
9984
9985 /* Create some empty blocks. */
9986 auto_vec <basic_block> subgraph_nodes;
9987 for (int i = 0; i < n; i++)
9988 subgraph_nodes.safe_push (create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun)));
9989
9990 ASSERT_EQ (n + 2, n_basic_blocks_for_fn (fun));
9991 ASSERT_EQ (0, n_edges_for_fn (fun));
9992
9993 /* Create the edges. */
9994 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), subgraph_nodes[0], EDGE_FALLTHRU);
9995 make_edge (subgraph_nodes[0], EXIT_BLOCK_PTR_FOR_FN (fun), 0);
9996 for (int i = 0; i < n; i++)
9997 for (int j = 0; j < n; j++)
9998 make_edge (subgraph_nodes[i], subgraph_nodes[j], 0);
9999
10000 /* Verify the edges. */
10001 ASSERT_EQ (2 + (n * n), n_edges_for_fn (fun));
10002 /* The first one is linked to ENTRY/EXIT as well as itself and
10003 everything else. */
10004 ASSERT_EQ (n + 1, subgraph_nodes[0]->preds->length ());
10005 ASSERT_EQ (n + 1, subgraph_nodes[0]->succs->length ());
10006 /* The other ones in the subgraph are linked to everything in
10007 the subgraph (including themselves). */
10008 for (int i = 1; i < n; i++)
10009 {
10010 ASSERT_EQ (n, subgraph_nodes[i]->preds->length ());
10011 ASSERT_EQ (n, subgraph_nodes[i]->succs->length ());
10012 }
10013
10014 /* Verify the dominance information. */
10015 calculate_dominance_info (CDI_DOMINATORS);
10016 /* The initial block in the subgraph should be dominated by ENTRY. */
10017 ASSERT_EQ (ENTRY_BLOCK_PTR_FOR_FN (fun),
10018 get_immediate_dominator (CDI_DOMINATORS,
10019 subgraph_nodes[0]));
10020 /* Every other block in the subgraph should be dominated by the
10021 initial block. */
10022 for (int i = 1; i < n; i++)
10023 ASSERT_EQ (subgraph_nodes[0],
10024 get_immediate_dominator (CDI_DOMINATORS,
10025 subgraph_nodes[i]));
10026 free_dominance_info (CDI_DOMINATORS);
10027
10028 /* Similarly for post-dominance. */
10029 calculate_dominance_info (CDI_POST_DOMINATORS);
10030 /* The initial block in the subgraph should be postdominated by EXIT. */
10031 ASSERT_EQ (EXIT_BLOCK_PTR_FOR_FN (fun),
10032 get_immediate_dominator (CDI_POST_DOMINATORS,
10033 subgraph_nodes[0]));
10034 /* Every other block in the subgraph should be postdominated by the
10035 initial block, since that leads to EXIT. */
10036 for (int i = 1; i < n; i++)
10037 ASSERT_EQ (subgraph_nodes[0],
10038 get_immediate_dominator (CDI_POST_DOMINATORS,
10039 subgraph_nodes[i]));
10040 free_dominance_info (CDI_POST_DOMINATORS);
10041
10042 pop_cfun ();
10043 }
10044
10045 /* Run all of the selftests within this file. */
10046
10047 void
10048 tree_cfg_c_tests ()
10049 {
10050 test_linear_chain ();
10051 test_diamond ();
10052 test_fully_connected ();
10053 }
10054
10055 } // namespace selftest
10056
10057 /* TODO: test the dominator/postdominator logic with various graphs/nodes:
10058 - loop
10059 - nested loops
10060 - switch statement (a block with many out-edges)
10061 - something that jumps to itself
10062 - etc */
10063
10064 #endif /* CHECKING_P */