]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-cfg.c
[Ada] Revert change for gnatprove that is no longer needed
[thirdparty/gcc.git] / gcc / tree-cfg.c
1 /* Control flow functions for trees.
2 Copyright (C) 2001-2019 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "cfghooks.h"
30 #include "tree-pass.h"
31 #include "ssa.h"
32 #include "cgraph.h"
33 #include "gimple-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "fold-const.h"
36 #include "trans-mem.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
39 #include "cfganal.h"
40 #include "gimple-fold.h"
41 #include "tree-eh.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-cfg.h"
46 #include "tree-ssa-loop-manip.h"
47 #include "tree-ssa-loop-niter.h"
48 #include "tree-into-ssa.h"
49 #include "tree-dfa.h"
50 #include "tree-ssa.h"
51 #include "except.h"
52 #include "cfgloop.h"
53 #include "tree-ssa-propagate.h"
54 #include "value-prof.h"
55 #include "tree-inline.h"
56 #include "tree-ssa-live.h"
57 #include "omp-general.h"
58 #include "omp-expand.h"
59 #include "tree-cfgcleanup.h"
60 #include "gimplify.h"
61 #include "attribs.h"
62 #include "selftest.h"
63 #include "opts.h"
64 #include "asan.h"
65 #include "profile.h"
66
67 /* This file contains functions for building the Control Flow Graph (CFG)
68 for a function tree. */
69
70 /* Local declarations. */
71
72 /* Initial capacity for the basic block array. */
73 static const int initial_cfg_capacity = 20;
74
75 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
76 which use a particular edge. The CASE_LABEL_EXPRs are chained together
77 via their CASE_CHAIN field, which we clear after we're done with the
78 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
79
80 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
81 update the case vector in response to edge redirections.
82
83 Right now this table is set up and torn down at key points in the
84 compilation process. It would be nice if we could make the table
85 more persistent. The key is getting notification of changes to
86 the CFG (particularly edge removal, creation and redirection). */
87
88 static hash_map<edge, tree> *edge_to_cases;
89
90 /* If we record edge_to_cases, this bitmap will hold indexes
91 of basic blocks that end in a GIMPLE_SWITCH which we touched
92 due to edge manipulations. */
93
94 static bitmap touched_switch_bbs;
95
96 /* CFG statistics. */
97 struct cfg_stats_d
98 {
99 long num_merged_labels;
100 };
101
102 static struct cfg_stats_d cfg_stats;
103
104 /* Data to pass to replace_block_vars_by_duplicates_1. */
105 struct replace_decls_d
106 {
107 hash_map<tree, tree> *vars_map;
108 tree to_context;
109 };
110
111 /* Hash table to store last discriminator assigned for each locus. */
112 struct locus_discrim_map
113 {
114 int location_line;
115 int discriminator;
116 };
117
118 /* Hashtable helpers. */
119
120 struct locus_discrim_hasher : free_ptr_hash <locus_discrim_map>
121 {
122 static inline hashval_t hash (const locus_discrim_map *);
123 static inline bool equal (const locus_discrim_map *,
124 const locus_discrim_map *);
125 };
126
127 /* Trivial hash function for a location_t. ITEM is a pointer to
128 a hash table entry that maps a location_t to a discriminator. */
129
130 inline hashval_t
131 locus_discrim_hasher::hash (const locus_discrim_map *item)
132 {
133 return item->location_line;
134 }
135
136 /* Equality function for the locus-to-discriminator map. A and B
137 point to the two hash table entries to compare. */
138
139 inline bool
140 locus_discrim_hasher::equal (const locus_discrim_map *a,
141 const locus_discrim_map *b)
142 {
143 return a->location_line == b->location_line;
144 }
145
146 static hash_table<locus_discrim_hasher> *discriminator_per_locus;
147
148 /* Basic blocks and flowgraphs. */
149 static void make_blocks (gimple_seq);
150
151 /* Edges. */
152 static void make_edges (void);
153 static void assign_discriminators (void);
154 static void make_cond_expr_edges (basic_block);
155 static void make_gimple_switch_edges (gswitch *, basic_block);
156 static bool make_goto_expr_edges (basic_block);
157 static void make_gimple_asm_edges (basic_block);
158 static edge gimple_redirect_edge_and_branch (edge, basic_block);
159 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
160
161 /* Various helpers. */
162 static inline bool stmt_starts_bb_p (gimple *, gimple *);
163 static int gimple_verify_flow_info (void);
164 static void gimple_make_forwarder_block (edge);
165 static gimple *first_non_label_stmt (basic_block);
166 static bool verify_gimple_transaction (gtransaction *);
167 static bool call_can_make_abnormal_goto (gimple *);
168
169 /* Flowgraph optimization and cleanup. */
170 static void gimple_merge_blocks (basic_block, basic_block);
171 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
172 static void remove_bb (basic_block);
173 static edge find_taken_edge_computed_goto (basic_block, tree);
174 static edge find_taken_edge_cond_expr (const gcond *, tree);
175
176 void
177 init_empty_tree_cfg_for_function (struct function *fn)
178 {
179 /* Initialize the basic block array. */
180 init_flow (fn);
181 profile_status_for_fn (fn) = PROFILE_ABSENT;
182 n_basic_blocks_for_fn (fn) = NUM_FIXED_BLOCKS;
183 last_basic_block_for_fn (fn) = NUM_FIXED_BLOCKS;
184 vec_alloc (basic_block_info_for_fn (fn), initial_cfg_capacity);
185 vec_safe_grow_cleared (basic_block_info_for_fn (fn),
186 initial_cfg_capacity);
187
188 /* Build a mapping of labels to their associated blocks. */
189 vec_alloc (label_to_block_map_for_fn (fn), initial_cfg_capacity);
190 vec_safe_grow_cleared (label_to_block_map_for_fn (fn),
191 initial_cfg_capacity);
192
193 SET_BASIC_BLOCK_FOR_FN (fn, ENTRY_BLOCK, ENTRY_BLOCK_PTR_FOR_FN (fn));
194 SET_BASIC_BLOCK_FOR_FN (fn, EXIT_BLOCK, EXIT_BLOCK_PTR_FOR_FN (fn));
195
196 ENTRY_BLOCK_PTR_FOR_FN (fn)->next_bb
197 = EXIT_BLOCK_PTR_FOR_FN (fn);
198 EXIT_BLOCK_PTR_FOR_FN (fn)->prev_bb
199 = ENTRY_BLOCK_PTR_FOR_FN (fn);
200 }
201
202 void
203 init_empty_tree_cfg (void)
204 {
205 init_empty_tree_cfg_for_function (cfun);
206 }
207
208 /*---------------------------------------------------------------------------
209 Create basic blocks
210 ---------------------------------------------------------------------------*/
211
212 /* Entry point to the CFG builder for trees. SEQ is the sequence of
213 statements to be added to the flowgraph. */
214
215 static void
216 build_gimple_cfg (gimple_seq seq)
217 {
218 /* Register specific gimple functions. */
219 gimple_register_cfg_hooks ();
220
221 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
222
223 init_empty_tree_cfg ();
224
225 make_blocks (seq);
226
227 /* Make sure there is always at least one block, even if it's empty. */
228 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
229 create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
230
231 /* Adjust the size of the array. */
232 if (basic_block_info_for_fn (cfun)->length ()
233 < (size_t) n_basic_blocks_for_fn (cfun))
234 vec_safe_grow_cleared (basic_block_info_for_fn (cfun),
235 n_basic_blocks_for_fn (cfun));
236
237 /* To speed up statement iterator walks, we first purge dead labels. */
238 cleanup_dead_labels ();
239
240 /* Group case nodes to reduce the number of edges.
241 We do this after cleaning up dead labels because otherwise we miss
242 a lot of obvious case merging opportunities. */
243 group_case_labels ();
244
245 /* Create the edges of the flowgraph. */
246 discriminator_per_locus = new hash_table<locus_discrim_hasher> (13);
247 make_edges ();
248 assign_discriminators ();
249 cleanup_dead_labels ();
250 delete discriminator_per_locus;
251 discriminator_per_locus = NULL;
252 }
253
254 /* Look for ANNOTATE calls with loop annotation kind in BB; if found, remove
255 them and propagate the information to LOOP. We assume that the annotations
256 come immediately before the condition in BB, if any. */
257
258 static void
259 replace_loop_annotate_in_block (basic_block bb, class loop *loop)
260 {
261 gimple_stmt_iterator gsi = gsi_last_bb (bb);
262 gimple *stmt = gsi_stmt (gsi);
263
264 if (!(stmt && gimple_code (stmt) == GIMPLE_COND))
265 return;
266
267 for (gsi_prev_nondebug (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
268 {
269 stmt = gsi_stmt (gsi);
270 if (gimple_code (stmt) != GIMPLE_CALL)
271 break;
272 if (!gimple_call_internal_p (stmt)
273 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
274 break;
275
276 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
277 {
278 case annot_expr_ivdep_kind:
279 loop->safelen = INT_MAX;
280 break;
281 case annot_expr_unroll_kind:
282 loop->unroll
283 = (unsigned short) tree_to_shwi (gimple_call_arg (stmt, 2));
284 cfun->has_unroll = true;
285 break;
286 case annot_expr_no_vector_kind:
287 loop->dont_vectorize = true;
288 break;
289 case annot_expr_vector_kind:
290 loop->force_vectorize = true;
291 cfun->has_force_vectorize_loops = true;
292 break;
293 case annot_expr_parallel_kind:
294 loop->can_be_parallel = true;
295 loop->safelen = INT_MAX;
296 break;
297 default:
298 gcc_unreachable ();
299 }
300
301 stmt = gimple_build_assign (gimple_call_lhs (stmt),
302 gimple_call_arg (stmt, 0));
303 gsi_replace (&gsi, stmt, true);
304 }
305 }
306
307 /* Look for ANNOTATE calls with loop annotation kind; if found, remove
308 them and propagate the information to the loop. We assume that the
309 annotations come immediately before the condition of the loop. */
310
311 static void
312 replace_loop_annotate (void)
313 {
314 class loop *loop;
315 basic_block bb;
316 gimple_stmt_iterator gsi;
317 gimple *stmt;
318
319 FOR_EACH_LOOP (loop, 0)
320 {
321 /* First look into the header. */
322 replace_loop_annotate_in_block (loop->header, loop);
323
324 /* Then look into the latch, if any. */
325 if (loop->latch)
326 replace_loop_annotate_in_block (loop->latch, loop);
327 }
328
329 /* Remove IFN_ANNOTATE. Safeguard for the case loop->latch == NULL. */
330 FOR_EACH_BB_FN (bb, cfun)
331 {
332 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
333 {
334 stmt = gsi_stmt (gsi);
335 if (gimple_code (stmt) != GIMPLE_CALL)
336 continue;
337 if (!gimple_call_internal_p (stmt)
338 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
339 continue;
340
341 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
342 {
343 case annot_expr_ivdep_kind:
344 case annot_expr_unroll_kind:
345 case annot_expr_no_vector_kind:
346 case annot_expr_vector_kind:
347 case annot_expr_parallel_kind:
348 break;
349 default:
350 gcc_unreachable ();
351 }
352
353 warning_at (gimple_location (stmt), 0, "ignoring loop annotation");
354 stmt = gimple_build_assign (gimple_call_lhs (stmt),
355 gimple_call_arg (stmt, 0));
356 gsi_replace (&gsi, stmt, true);
357 }
358 }
359 }
360
361 static unsigned int
362 execute_build_cfg (void)
363 {
364 gimple_seq body = gimple_body (current_function_decl);
365
366 build_gimple_cfg (body);
367 gimple_set_body (current_function_decl, NULL);
368 if (dump_file && (dump_flags & TDF_DETAILS))
369 {
370 fprintf (dump_file, "Scope blocks:\n");
371 dump_scope_blocks (dump_file, dump_flags);
372 }
373 cleanup_tree_cfg ();
374 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
375 replace_loop_annotate ();
376 return 0;
377 }
378
379 namespace {
380
381 const pass_data pass_data_build_cfg =
382 {
383 GIMPLE_PASS, /* type */
384 "cfg", /* name */
385 OPTGROUP_NONE, /* optinfo_flags */
386 TV_TREE_CFG, /* tv_id */
387 PROP_gimple_leh, /* properties_required */
388 ( PROP_cfg | PROP_loops ), /* properties_provided */
389 0, /* properties_destroyed */
390 0, /* todo_flags_start */
391 0, /* todo_flags_finish */
392 };
393
394 class pass_build_cfg : public gimple_opt_pass
395 {
396 public:
397 pass_build_cfg (gcc::context *ctxt)
398 : gimple_opt_pass (pass_data_build_cfg, ctxt)
399 {}
400
401 /* opt_pass methods: */
402 virtual unsigned int execute (function *) { return execute_build_cfg (); }
403
404 }; // class pass_build_cfg
405
406 } // anon namespace
407
408 gimple_opt_pass *
409 make_pass_build_cfg (gcc::context *ctxt)
410 {
411 return new pass_build_cfg (ctxt);
412 }
413
414
415 /* Return true if T is a computed goto. */
416
417 bool
418 computed_goto_p (gimple *t)
419 {
420 return (gimple_code (t) == GIMPLE_GOTO
421 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
422 }
423
424 /* Returns true if the sequence of statements STMTS only contains
425 a call to __builtin_unreachable (). */
426
427 bool
428 gimple_seq_unreachable_p (gimple_seq stmts)
429 {
430 if (stmts == NULL
431 /* Return false if -fsanitize=unreachable, we don't want to
432 optimize away those calls, but rather turn them into
433 __ubsan_handle_builtin_unreachable () or __builtin_trap ()
434 later. */
435 || sanitize_flags_p (SANITIZE_UNREACHABLE))
436 return false;
437
438 gimple_stmt_iterator gsi = gsi_last (stmts);
439
440 if (!gimple_call_builtin_p (gsi_stmt (gsi), BUILT_IN_UNREACHABLE))
441 return false;
442
443 for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
444 {
445 gimple *stmt = gsi_stmt (gsi);
446 if (gimple_code (stmt) != GIMPLE_LABEL
447 && !is_gimple_debug (stmt)
448 && !gimple_clobber_p (stmt))
449 return false;
450 }
451 return true;
452 }
453
454 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
455 the other edge points to a bb with just __builtin_unreachable ().
456 I.e. return true for C->M edge in:
457 <bb C>:
458 ...
459 if (something)
460 goto <bb N>;
461 else
462 goto <bb M>;
463 <bb N>:
464 __builtin_unreachable ();
465 <bb M>: */
466
467 bool
468 assert_unreachable_fallthru_edge_p (edge e)
469 {
470 basic_block pred_bb = e->src;
471 gimple *last = last_stmt (pred_bb);
472 if (last && gimple_code (last) == GIMPLE_COND)
473 {
474 basic_block other_bb = EDGE_SUCC (pred_bb, 0)->dest;
475 if (other_bb == e->dest)
476 other_bb = EDGE_SUCC (pred_bb, 1)->dest;
477 if (EDGE_COUNT (other_bb->succs) == 0)
478 return gimple_seq_unreachable_p (bb_seq (other_bb));
479 }
480 return false;
481 }
482
483
484 /* Initialize GF_CALL_CTRL_ALTERING flag, which indicates the call
485 could alter control flow except via eh. We initialize the flag at
486 CFG build time and only ever clear it later. */
487
488 static void
489 gimple_call_initialize_ctrl_altering (gimple *stmt)
490 {
491 int flags = gimple_call_flags (stmt);
492
493 /* A call alters control flow if it can make an abnormal goto. */
494 if (call_can_make_abnormal_goto (stmt)
495 /* A call also alters control flow if it does not return. */
496 || flags & ECF_NORETURN
497 /* TM ending statements have backedges out of the transaction.
498 Return true so we split the basic block containing them.
499 Note that the TM_BUILTIN test is merely an optimization. */
500 || ((flags & ECF_TM_BUILTIN)
501 && is_tm_ending_fndecl (gimple_call_fndecl (stmt)))
502 /* BUILT_IN_RETURN call is same as return statement. */
503 || gimple_call_builtin_p (stmt, BUILT_IN_RETURN)
504 /* IFN_UNIQUE should be the last insn, to make checking for it
505 as cheap as possible. */
506 || (gimple_call_internal_p (stmt)
507 && gimple_call_internal_unique_p (stmt)))
508 gimple_call_set_ctrl_altering (stmt, true);
509 else
510 gimple_call_set_ctrl_altering (stmt, false);
511 }
512
513
514 /* Insert SEQ after BB and build a flowgraph. */
515
516 static basic_block
517 make_blocks_1 (gimple_seq seq, basic_block bb)
518 {
519 gimple_stmt_iterator i = gsi_start (seq);
520 gimple *stmt = NULL;
521 gimple *prev_stmt = NULL;
522 bool start_new_block = true;
523 bool first_stmt_of_seq = true;
524
525 while (!gsi_end_p (i))
526 {
527 /* PREV_STMT should only be set to a debug stmt if the debug
528 stmt is before nondebug stmts. Once stmt reaches a nondebug
529 nonlabel, prev_stmt will be set to it, so that
530 stmt_starts_bb_p will know to start a new block if a label is
531 found. However, if stmt was a label after debug stmts only,
532 keep the label in prev_stmt even if we find further debug
533 stmts, for there may be other labels after them, and they
534 should land in the same block. */
535 if (!prev_stmt || !stmt || !is_gimple_debug (stmt))
536 prev_stmt = stmt;
537 stmt = gsi_stmt (i);
538
539 if (stmt && is_gimple_call (stmt))
540 gimple_call_initialize_ctrl_altering (stmt);
541
542 /* If the statement starts a new basic block or if we have determined
543 in a previous pass that we need to create a new block for STMT, do
544 so now. */
545 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
546 {
547 if (!first_stmt_of_seq)
548 gsi_split_seq_before (&i, &seq);
549 bb = create_basic_block (seq, bb);
550 start_new_block = false;
551 prev_stmt = NULL;
552 }
553
554 /* Now add STMT to BB and create the subgraphs for special statement
555 codes. */
556 gimple_set_bb (stmt, bb);
557
558 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
559 next iteration. */
560 if (stmt_ends_bb_p (stmt))
561 {
562 /* If the stmt can make abnormal goto use a new temporary
563 for the assignment to the LHS. This makes sure the old value
564 of the LHS is available on the abnormal edge. Otherwise
565 we will end up with overlapping life-ranges for abnormal
566 SSA names. */
567 if (gimple_has_lhs (stmt)
568 && stmt_can_make_abnormal_goto (stmt)
569 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
570 {
571 tree lhs = gimple_get_lhs (stmt);
572 tree tmp = create_tmp_var (TREE_TYPE (lhs));
573 gimple *s = gimple_build_assign (lhs, tmp);
574 gimple_set_location (s, gimple_location (stmt));
575 gimple_set_block (s, gimple_block (stmt));
576 gimple_set_lhs (stmt, tmp);
577 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
578 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
579 DECL_GIMPLE_REG_P (tmp) = 1;
580 gsi_insert_after (&i, s, GSI_SAME_STMT);
581 }
582 start_new_block = true;
583 }
584
585 gsi_next (&i);
586 first_stmt_of_seq = false;
587 }
588 return bb;
589 }
590
591 /* Build a flowgraph for the sequence of stmts SEQ. */
592
593 static void
594 make_blocks (gimple_seq seq)
595 {
596 /* Look for debug markers right before labels, and move the debug
597 stmts after the labels. Accepting labels among debug markers
598 adds no value, just complexity; if we wanted to annotate labels
599 with view numbers (so sequencing among markers would matter) or
600 somesuch, we're probably better off still moving the labels, but
601 adding other debug annotations in their original positions or
602 emitting nonbind or bind markers associated with the labels in
603 the original position of the labels.
604
605 Moving labels would probably be simpler, but we can't do that:
606 moving labels assigns label ids to them, and doing so because of
607 debug markers makes for -fcompare-debug and possibly even codegen
608 differences. So, we have to move the debug stmts instead. To
609 that end, we scan SEQ backwards, marking the position of the
610 latest (earliest we find) label, and moving debug stmts that are
611 not separated from it by nondebug nonlabel stmts after the
612 label. */
613 if (MAY_HAVE_DEBUG_MARKER_STMTS)
614 {
615 gimple_stmt_iterator label = gsi_none ();
616
617 for (gimple_stmt_iterator i = gsi_last (seq); !gsi_end_p (i); gsi_prev (&i))
618 {
619 gimple *stmt = gsi_stmt (i);
620
621 /* If this is the first label we encounter (latest in SEQ)
622 before nondebug stmts, record its position. */
623 if (is_a <glabel *> (stmt))
624 {
625 if (gsi_end_p (label))
626 label = i;
627 continue;
628 }
629
630 /* Without a recorded label position to move debug stmts to,
631 there's nothing to do. */
632 if (gsi_end_p (label))
633 continue;
634
635 /* Move the debug stmt at I after LABEL. */
636 if (is_gimple_debug (stmt))
637 {
638 gcc_assert (gimple_debug_nonbind_marker_p (stmt));
639 /* As STMT is removed, I advances to the stmt after
640 STMT, so the gsi_prev in the for "increment"
641 expression gets us to the stmt we're to visit after
642 STMT. LABEL, however, would advance to the moved
643 stmt if we passed it to gsi_move_after, so pass it a
644 copy instead, so as to keep LABEL pointing to the
645 LABEL. */
646 gimple_stmt_iterator copy = label;
647 gsi_move_after (&i, &copy);
648 continue;
649 }
650
651 /* There aren't any (more?) debug stmts before label, so
652 there isn't anything else to move after it. */
653 label = gsi_none ();
654 }
655 }
656
657 make_blocks_1 (seq, ENTRY_BLOCK_PTR_FOR_FN (cfun));
658 }
659
660 /* Create and return a new empty basic block after bb AFTER. */
661
662 static basic_block
663 create_bb (void *h, void *e, basic_block after)
664 {
665 basic_block bb;
666
667 gcc_assert (!e);
668
669 /* Create and initialize a new basic block. Since alloc_block uses
670 GC allocation that clears memory to allocate a basic block, we do
671 not have to clear the newly allocated basic block here. */
672 bb = alloc_block ();
673
674 bb->index = last_basic_block_for_fn (cfun);
675 bb->flags = BB_NEW;
676 set_bb_seq (bb, h ? (gimple_seq) h : NULL);
677
678 /* Add the new block to the linked list of blocks. */
679 link_block (bb, after);
680
681 /* Grow the basic block array if needed. */
682 if ((size_t) last_basic_block_for_fn (cfun)
683 == basic_block_info_for_fn (cfun)->length ())
684 {
685 size_t new_size =
686 (last_basic_block_for_fn (cfun)
687 + (last_basic_block_for_fn (cfun) + 3) / 4);
688 vec_safe_grow_cleared (basic_block_info_for_fn (cfun), new_size);
689 }
690
691 /* Add the newly created block to the array. */
692 SET_BASIC_BLOCK_FOR_FN (cfun, last_basic_block_for_fn (cfun), bb);
693
694 n_basic_blocks_for_fn (cfun)++;
695 last_basic_block_for_fn (cfun)++;
696
697 return bb;
698 }
699
700
701 /*---------------------------------------------------------------------------
702 Edge creation
703 ---------------------------------------------------------------------------*/
704
705 /* If basic block BB has an abnormal edge to a basic block
706 containing IFN_ABNORMAL_DISPATCHER internal call, return
707 that the dispatcher's basic block, otherwise return NULL. */
708
709 basic_block
710 get_abnormal_succ_dispatcher (basic_block bb)
711 {
712 edge e;
713 edge_iterator ei;
714
715 FOR_EACH_EDGE (e, ei, bb->succs)
716 if ((e->flags & (EDGE_ABNORMAL | EDGE_EH)) == EDGE_ABNORMAL)
717 {
718 gimple_stmt_iterator gsi
719 = gsi_start_nondebug_after_labels_bb (e->dest);
720 gimple *g = gsi_stmt (gsi);
721 if (g && gimple_call_internal_p (g, IFN_ABNORMAL_DISPATCHER))
722 return e->dest;
723 }
724 return NULL;
725 }
726
727 /* Helper function for make_edges. Create a basic block with
728 with ABNORMAL_DISPATCHER internal call in it if needed, and
729 create abnormal edges from BBS to it and from it to FOR_BB
730 if COMPUTED_GOTO is false, otherwise factor the computed gotos. */
731
732 static void
733 handle_abnormal_edges (basic_block *dispatcher_bbs,
734 basic_block for_bb, int *bb_to_omp_idx,
735 auto_vec<basic_block> *bbs, bool computed_goto)
736 {
737 basic_block *dispatcher = dispatcher_bbs + (computed_goto ? 1 : 0);
738 unsigned int idx = 0;
739 basic_block bb;
740 bool inner = false;
741
742 if (bb_to_omp_idx)
743 {
744 dispatcher = dispatcher_bbs + 2 * bb_to_omp_idx[for_bb->index];
745 if (bb_to_omp_idx[for_bb->index] != 0)
746 inner = true;
747 }
748
749 /* If the dispatcher has been created already, then there are basic
750 blocks with abnormal edges to it, so just make a new edge to
751 for_bb. */
752 if (*dispatcher == NULL)
753 {
754 /* Check if there are any basic blocks that need to have
755 abnormal edges to this dispatcher. If there are none, return
756 early. */
757 if (bb_to_omp_idx == NULL)
758 {
759 if (bbs->is_empty ())
760 return;
761 }
762 else
763 {
764 FOR_EACH_VEC_ELT (*bbs, idx, bb)
765 if (bb_to_omp_idx[bb->index] == bb_to_omp_idx[for_bb->index])
766 break;
767 if (bb == NULL)
768 return;
769 }
770
771 /* Create the dispatcher bb. */
772 *dispatcher = create_basic_block (NULL, for_bb);
773 if (computed_goto)
774 {
775 /* Factor computed gotos into a common computed goto site. Also
776 record the location of that site so that we can un-factor the
777 gotos after we have converted back to normal form. */
778 gimple_stmt_iterator gsi = gsi_start_bb (*dispatcher);
779
780 /* Create the destination of the factored goto. Each original
781 computed goto will put its desired destination into this
782 variable and jump to the label we create immediately below. */
783 tree var = create_tmp_var (ptr_type_node, "gotovar");
784
785 /* Build a label for the new block which will contain the
786 factored computed goto. */
787 tree factored_label_decl
788 = create_artificial_label (UNKNOWN_LOCATION);
789 gimple *factored_computed_goto_label
790 = gimple_build_label (factored_label_decl);
791 gsi_insert_after (&gsi, factored_computed_goto_label, GSI_NEW_STMT);
792
793 /* Build our new computed goto. */
794 gimple *factored_computed_goto = gimple_build_goto (var);
795 gsi_insert_after (&gsi, factored_computed_goto, GSI_NEW_STMT);
796
797 FOR_EACH_VEC_ELT (*bbs, idx, bb)
798 {
799 if (bb_to_omp_idx
800 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
801 continue;
802
803 gsi = gsi_last_bb (bb);
804 gimple *last = gsi_stmt (gsi);
805
806 gcc_assert (computed_goto_p (last));
807
808 /* Copy the original computed goto's destination into VAR. */
809 gimple *assignment
810 = gimple_build_assign (var, gimple_goto_dest (last));
811 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
812
813 edge e = make_edge (bb, *dispatcher, EDGE_FALLTHRU);
814 e->goto_locus = gimple_location (last);
815 gsi_remove (&gsi, true);
816 }
817 }
818 else
819 {
820 tree arg = inner ? boolean_true_node : boolean_false_node;
821 gimple *g = gimple_build_call_internal (IFN_ABNORMAL_DISPATCHER,
822 1, arg);
823 gimple_stmt_iterator gsi = gsi_after_labels (*dispatcher);
824 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
825
826 /* Create predecessor edges of the dispatcher. */
827 FOR_EACH_VEC_ELT (*bbs, idx, bb)
828 {
829 if (bb_to_omp_idx
830 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
831 continue;
832 make_edge (bb, *dispatcher, EDGE_ABNORMAL);
833 }
834 }
835 }
836
837 make_edge (*dispatcher, for_bb, EDGE_ABNORMAL);
838 }
839
840 /* Creates outgoing edges for BB. Returns 1 when it ends with an
841 computed goto, returns 2 when it ends with a statement that
842 might return to this function via an nonlocal goto, otherwise
843 return 0. Updates *PCUR_REGION with the OMP region this BB is in. */
844
845 static int
846 make_edges_bb (basic_block bb, struct omp_region **pcur_region, int *pomp_index)
847 {
848 gimple *last = last_stmt (bb);
849 bool fallthru = false;
850 int ret = 0;
851
852 if (!last)
853 return ret;
854
855 switch (gimple_code (last))
856 {
857 case GIMPLE_GOTO:
858 if (make_goto_expr_edges (bb))
859 ret = 1;
860 fallthru = false;
861 break;
862 case GIMPLE_RETURN:
863 {
864 edge e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
865 e->goto_locus = gimple_location (last);
866 fallthru = false;
867 }
868 break;
869 case GIMPLE_COND:
870 make_cond_expr_edges (bb);
871 fallthru = false;
872 break;
873 case GIMPLE_SWITCH:
874 make_gimple_switch_edges (as_a <gswitch *> (last), bb);
875 fallthru = false;
876 break;
877 case GIMPLE_RESX:
878 make_eh_edges (last);
879 fallthru = false;
880 break;
881 case GIMPLE_EH_DISPATCH:
882 fallthru = make_eh_dispatch_edges (as_a <geh_dispatch *> (last));
883 break;
884
885 case GIMPLE_CALL:
886 /* If this function receives a nonlocal goto, then we need to
887 make edges from this call site to all the nonlocal goto
888 handlers. */
889 if (stmt_can_make_abnormal_goto (last))
890 ret = 2;
891
892 /* If this statement has reachable exception handlers, then
893 create abnormal edges to them. */
894 make_eh_edges (last);
895
896 /* BUILTIN_RETURN is really a return statement. */
897 if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
898 {
899 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
900 fallthru = false;
901 }
902 /* Some calls are known not to return. */
903 else
904 fallthru = !gimple_call_noreturn_p (last);
905 break;
906
907 case GIMPLE_ASSIGN:
908 /* A GIMPLE_ASSIGN may throw internally and thus be considered
909 control-altering. */
910 if (is_ctrl_altering_stmt (last))
911 make_eh_edges (last);
912 fallthru = true;
913 break;
914
915 case GIMPLE_ASM:
916 make_gimple_asm_edges (bb);
917 fallthru = true;
918 break;
919
920 CASE_GIMPLE_OMP:
921 fallthru = omp_make_gimple_edges (bb, pcur_region, pomp_index);
922 break;
923
924 case GIMPLE_TRANSACTION:
925 {
926 gtransaction *txn = as_a <gtransaction *> (last);
927 tree label1 = gimple_transaction_label_norm (txn);
928 tree label2 = gimple_transaction_label_uninst (txn);
929
930 if (label1)
931 make_edge (bb, label_to_block (cfun, label1), EDGE_FALLTHRU);
932 if (label2)
933 make_edge (bb, label_to_block (cfun, label2),
934 EDGE_TM_UNINSTRUMENTED | (label1 ? 0 : EDGE_FALLTHRU));
935
936 tree label3 = gimple_transaction_label_over (txn);
937 if (gimple_transaction_subcode (txn)
938 & (GTMA_HAVE_ABORT | GTMA_IS_OUTER))
939 make_edge (bb, label_to_block (cfun, label3), EDGE_TM_ABORT);
940
941 fallthru = false;
942 }
943 break;
944
945 default:
946 gcc_assert (!stmt_ends_bb_p (last));
947 fallthru = true;
948 break;
949 }
950
951 if (fallthru)
952 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
953
954 return ret;
955 }
956
957 /* Join all the blocks in the flowgraph. */
958
959 static void
960 make_edges (void)
961 {
962 basic_block bb;
963 struct omp_region *cur_region = NULL;
964 auto_vec<basic_block> ab_edge_goto;
965 auto_vec<basic_block> ab_edge_call;
966 int *bb_to_omp_idx = NULL;
967 int cur_omp_region_idx = 0;
968
969 /* Create an edge from entry to the first block with executable
970 statements in it. */
971 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun),
972 BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS),
973 EDGE_FALLTHRU);
974
975 /* Traverse the basic block array placing edges. */
976 FOR_EACH_BB_FN (bb, cfun)
977 {
978 int mer;
979
980 if (bb_to_omp_idx)
981 bb_to_omp_idx[bb->index] = cur_omp_region_idx;
982
983 mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
984 if (mer == 1)
985 ab_edge_goto.safe_push (bb);
986 else if (mer == 2)
987 ab_edge_call.safe_push (bb);
988
989 if (cur_region && bb_to_omp_idx == NULL)
990 bb_to_omp_idx = XCNEWVEC (int, n_basic_blocks_for_fn (cfun));
991 }
992
993 /* Computed gotos are hell to deal with, especially if there are
994 lots of them with a large number of destinations. So we factor
995 them to a common computed goto location before we build the
996 edge list. After we convert back to normal form, we will un-factor
997 the computed gotos since factoring introduces an unwanted jump.
998 For non-local gotos and abnormal edges from calls to calls that return
999 twice or forced labels, factor the abnormal edges too, by having all
1000 abnormal edges from the calls go to a common artificial basic block
1001 with ABNORMAL_DISPATCHER internal call and abnormal edges from that
1002 basic block to all forced labels and calls returning twice.
1003 We do this per-OpenMP structured block, because those regions
1004 are guaranteed to be single entry single exit by the standard,
1005 so it is not allowed to enter or exit such regions abnormally this way,
1006 thus all computed gotos, non-local gotos and setjmp/longjmp calls
1007 must not transfer control across SESE region boundaries. */
1008 if (!ab_edge_goto.is_empty () || !ab_edge_call.is_empty ())
1009 {
1010 gimple_stmt_iterator gsi;
1011 basic_block dispatcher_bb_array[2] = { NULL, NULL };
1012 basic_block *dispatcher_bbs = dispatcher_bb_array;
1013 int count = n_basic_blocks_for_fn (cfun);
1014
1015 if (bb_to_omp_idx)
1016 dispatcher_bbs = XCNEWVEC (basic_block, 2 * count);
1017
1018 FOR_EACH_BB_FN (bb, cfun)
1019 {
1020 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1021 {
1022 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
1023 tree target;
1024
1025 if (!label_stmt)
1026 break;
1027
1028 target = gimple_label_label (label_stmt);
1029
1030 /* Make an edge to every label block that has been marked as a
1031 potential target for a computed goto or a non-local goto. */
1032 if (FORCED_LABEL (target))
1033 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
1034 &ab_edge_goto, true);
1035 if (DECL_NONLOCAL (target))
1036 {
1037 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
1038 &ab_edge_call, false);
1039 break;
1040 }
1041 }
1042
1043 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
1044 gsi_next_nondebug (&gsi);
1045 if (!gsi_end_p (gsi))
1046 {
1047 /* Make an edge to every setjmp-like call. */
1048 gimple *call_stmt = gsi_stmt (gsi);
1049 if (is_gimple_call (call_stmt)
1050 && ((gimple_call_flags (call_stmt) & ECF_RETURNS_TWICE)
1051 || gimple_call_builtin_p (call_stmt,
1052 BUILT_IN_SETJMP_RECEIVER)))
1053 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
1054 &ab_edge_call, false);
1055 }
1056 }
1057
1058 if (bb_to_omp_idx)
1059 XDELETE (dispatcher_bbs);
1060 }
1061
1062 XDELETE (bb_to_omp_idx);
1063
1064 omp_free_regions ();
1065 }
1066
1067 /* Add SEQ after GSI. Start new bb after GSI, and created further bbs as
1068 needed. Returns true if new bbs were created.
1069 Note: This is transitional code, and should not be used for new code. We
1070 should be able to get rid of this by rewriting all target va-arg
1071 gimplification hooks to use an interface gimple_build_cond_value as described
1072 in https://gcc.gnu.org/ml/gcc-patches/2015-02/msg01194.html. */
1073
1074 bool
1075 gimple_find_sub_bbs (gimple_seq seq, gimple_stmt_iterator *gsi)
1076 {
1077 gimple *stmt = gsi_stmt (*gsi);
1078 basic_block bb = gimple_bb (stmt);
1079 basic_block lastbb, afterbb;
1080 int old_num_bbs = n_basic_blocks_for_fn (cfun);
1081 edge e;
1082 lastbb = make_blocks_1 (seq, bb);
1083 if (old_num_bbs == n_basic_blocks_for_fn (cfun))
1084 return false;
1085 e = split_block (bb, stmt);
1086 /* Move e->dest to come after the new basic blocks. */
1087 afterbb = e->dest;
1088 unlink_block (afterbb);
1089 link_block (afterbb, lastbb);
1090 redirect_edge_succ (e, bb->next_bb);
1091 bb = bb->next_bb;
1092 while (bb != afterbb)
1093 {
1094 struct omp_region *cur_region = NULL;
1095 profile_count cnt = profile_count::zero ();
1096 bool all = true;
1097
1098 int cur_omp_region_idx = 0;
1099 int mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
1100 gcc_assert (!mer && !cur_region);
1101 add_bb_to_loop (bb, afterbb->loop_father);
1102
1103 edge e;
1104 edge_iterator ei;
1105 FOR_EACH_EDGE (e, ei, bb->preds)
1106 {
1107 if (e->count ().initialized_p ())
1108 cnt += e->count ();
1109 else
1110 all = false;
1111 }
1112 tree_guess_outgoing_edge_probabilities (bb);
1113 if (all || profile_status_for_fn (cfun) == PROFILE_READ)
1114 bb->count = cnt;
1115
1116 bb = bb->next_bb;
1117 }
1118 return true;
1119 }
1120
1121 /* Find the next available discriminator value for LOCUS. The
1122 discriminator distinguishes among several basic blocks that
1123 share a common locus, allowing for more accurate sample-based
1124 profiling. */
1125
1126 static int
1127 next_discriminator_for_locus (int line)
1128 {
1129 struct locus_discrim_map item;
1130 struct locus_discrim_map **slot;
1131
1132 item.location_line = line;
1133 item.discriminator = 0;
1134 slot = discriminator_per_locus->find_slot_with_hash (&item, line, INSERT);
1135 gcc_assert (slot);
1136 if (*slot == HTAB_EMPTY_ENTRY)
1137 {
1138 *slot = XNEW (struct locus_discrim_map);
1139 gcc_assert (*slot);
1140 (*slot)->location_line = line;
1141 (*slot)->discriminator = 0;
1142 }
1143 (*slot)->discriminator++;
1144 return (*slot)->discriminator;
1145 }
1146
1147 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
1148
1149 static bool
1150 same_line_p (location_t locus1, expanded_location *from, location_t locus2)
1151 {
1152 expanded_location to;
1153
1154 if (locus1 == locus2)
1155 return true;
1156
1157 to = expand_location (locus2);
1158
1159 if (from->line != to.line)
1160 return false;
1161 if (from->file == to.file)
1162 return true;
1163 return (from->file != NULL
1164 && to.file != NULL
1165 && filename_cmp (from->file, to.file) == 0);
1166 }
1167
1168 /* Assign discriminators to each basic block. */
1169
1170 static void
1171 assign_discriminators (void)
1172 {
1173 basic_block bb;
1174
1175 FOR_EACH_BB_FN (bb, cfun)
1176 {
1177 edge e;
1178 edge_iterator ei;
1179 gimple *last = last_stmt (bb);
1180 location_t locus = last ? gimple_location (last) : UNKNOWN_LOCATION;
1181
1182 if (locus == UNKNOWN_LOCATION)
1183 continue;
1184
1185 expanded_location locus_e = expand_location (locus);
1186
1187 FOR_EACH_EDGE (e, ei, bb->succs)
1188 {
1189 gimple *first = first_non_label_stmt (e->dest);
1190 gimple *last = last_stmt (e->dest);
1191 if ((first && same_line_p (locus, &locus_e,
1192 gimple_location (first)))
1193 || (last && same_line_p (locus, &locus_e,
1194 gimple_location (last))))
1195 {
1196 if (e->dest->discriminator != 0 && bb->discriminator == 0)
1197 bb->discriminator
1198 = next_discriminator_for_locus (locus_e.line);
1199 else
1200 e->dest->discriminator
1201 = next_discriminator_for_locus (locus_e.line);
1202 }
1203 }
1204 }
1205 }
1206
1207 /* Create the edges for a GIMPLE_COND starting at block BB. */
1208
1209 static void
1210 make_cond_expr_edges (basic_block bb)
1211 {
1212 gcond *entry = as_a <gcond *> (last_stmt (bb));
1213 gimple *then_stmt, *else_stmt;
1214 basic_block then_bb, else_bb;
1215 tree then_label, else_label;
1216 edge e;
1217
1218 gcc_assert (entry);
1219 gcc_assert (gimple_code (entry) == GIMPLE_COND);
1220
1221 /* Entry basic blocks for each component. */
1222 then_label = gimple_cond_true_label (entry);
1223 else_label = gimple_cond_false_label (entry);
1224 then_bb = label_to_block (cfun, then_label);
1225 else_bb = label_to_block (cfun, else_label);
1226 then_stmt = first_stmt (then_bb);
1227 else_stmt = first_stmt (else_bb);
1228
1229 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1230 e->goto_locus = gimple_location (then_stmt);
1231 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1232 if (e)
1233 e->goto_locus = gimple_location (else_stmt);
1234
1235 /* We do not need the labels anymore. */
1236 gimple_cond_set_true_label (entry, NULL_TREE);
1237 gimple_cond_set_false_label (entry, NULL_TREE);
1238 }
1239
1240
1241 /* Called for each element in the hash table (P) as we delete the
1242 edge to cases hash table.
1243
1244 Clear all the CASE_CHAINs to prevent problems with copying of
1245 SWITCH_EXPRs and structure sharing rules, then free the hash table
1246 element. */
1247
1248 bool
1249 edge_to_cases_cleanup (edge const &, tree const &value, void *)
1250 {
1251 tree t, next;
1252
1253 for (t = value; t; t = next)
1254 {
1255 next = CASE_CHAIN (t);
1256 CASE_CHAIN (t) = NULL;
1257 }
1258
1259 return true;
1260 }
1261
1262 /* Start recording information mapping edges to case labels. */
1263
1264 void
1265 start_recording_case_labels (void)
1266 {
1267 gcc_assert (edge_to_cases == NULL);
1268 edge_to_cases = new hash_map<edge, tree>;
1269 touched_switch_bbs = BITMAP_ALLOC (NULL);
1270 }
1271
1272 /* Return nonzero if we are recording information for case labels. */
1273
1274 static bool
1275 recording_case_labels_p (void)
1276 {
1277 return (edge_to_cases != NULL);
1278 }
1279
1280 /* Stop recording information mapping edges to case labels and
1281 remove any information we have recorded. */
1282 void
1283 end_recording_case_labels (void)
1284 {
1285 bitmap_iterator bi;
1286 unsigned i;
1287 edge_to_cases->traverse<void *, edge_to_cases_cleanup> (NULL);
1288 delete edge_to_cases;
1289 edge_to_cases = NULL;
1290 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
1291 {
1292 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
1293 if (bb)
1294 {
1295 gimple *stmt = last_stmt (bb);
1296 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1297 group_case_labels_stmt (as_a <gswitch *> (stmt));
1298 }
1299 }
1300 BITMAP_FREE (touched_switch_bbs);
1301 }
1302
1303 /* If we are inside a {start,end}_recording_cases block, then return
1304 a chain of CASE_LABEL_EXPRs from T which reference E.
1305
1306 Otherwise return NULL. */
1307
1308 static tree
1309 get_cases_for_edge (edge e, gswitch *t)
1310 {
1311 tree *slot;
1312 size_t i, n;
1313
1314 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
1315 chains available. Return NULL so the caller can detect this case. */
1316 if (!recording_case_labels_p ())
1317 return NULL;
1318
1319 slot = edge_to_cases->get (e);
1320 if (slot)
1321 return *slot;
1322
1323 /* If we did not find E in the hash table, then this must be the first
1324 time we have been queried for information about E & T. Add all the
1325 elements from T to the hash table then perform the query again. */
1326
1327 n = gimple_switch_num_labels (t);
1328 for (i = 0; i < n; i++)
1329 {
1330 tree elt = gimple_switch_label (t, i);
1331 tree lab = CASE_LABEL (elt);
1332 basic_block label_bb = label_to_block (cfun, lab);
1333 edge this_edge = find_edge (e->src, label_bb);
1334
1335 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
1336 a new chain. */
1337 tree &s = edge_to_cases->get_or_insert (this_edge);
1338 CASE_CHAIN (elt) = s;
1339 s = elt;
1340 }
1341
1342 return *edge_to_cases->get (e);
1343 }
1344
1345 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
1346
1347 static void
1348 make_gimple_switch_edges (gswitch *entry, basic_block bb)
1349 {
1350 size_t i, n;
1351
1352 n = gimple_switch_num_labels (entry);
1353
1354 for (i = 0; i < n; ++i)
1355 {
1356 basic_block label_bb = gimple_switch_label_bb (cfun, entry, i);
1357 make_edge (bb, label_bb, 0);
1358 }
1359 }
1360
1361
1362 /* Return the basic block holding label DEST. */
1363
1364 basic_block
1365 label_to_block (struct function *ifun, tree dest)
1366 {
1367 int uid = LABEL_DECL_UID (dest);
1368
1369 /* We would die hard when faced by an undefined label. Emit a label to
1370 the very first basic block. This will hopefully make even the dataflow
1371 and undefined variable warnings quite right. */
1372 if (seen_error () && uid < 0)
1373 {
1374 gimple_stmt_iterator gsi =
1375 gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS));
1376 gimple *stmt;
1377
1378 stmt = gimple_build_label (dest);
1379 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
1380 uid = LABEL_DECL_UID (dest);
1381 }
1382 if (vec_safe_length (ifun->cfg->x_label_to_block_map) <= (unsigned int) uid)
1383 return NULL;
1384 return (*ifun->cfg->x_label_to_block_map)[uid];
1385 }
1386
1387 /* Create edges for a goto statement at block BB. Returns true
1388 if abnormal edges should be created. */
1389
1390 static bool
1391 make_goto_expr_edges (basic_block bb)
1392 {
1393 gimple_stmt_iterator last = gsi_last_bb (bb);
1394 gimple *goto_t = gsi_stmt (last);
1395
1396 /* A simple GOTO creates normal edges. */
1397 if (simple_goto_p (goto_t))
1398 {
1399 tree dest = gimple_goto_dest (goto_t);
1400 basic_block label_bb = label_to_block (cfun, dest);
1401 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1402 e->goto_locus = gimple_location (goto_t);
1403 gsi_remove (&last, true);
1404 return false;
1405 }
1406
1407 /* A computed GOTO creates abnormal edges. */
1408 return true;
1409 }
1410
1411 /* Create edges for an asm statement with labels at block BB. */
1412
1413 static void
1414 make_gimple_asm_edges (basic_block bb)
1415 {
1416 gasm *stmt = as_a <gasm *> (last_stmt (bb));
1417 int i, n = gimple_asm_nlabels (stmt);
1418
1419 for (i = 0; i < n; ++i)
1420 {
1421 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1422 basic_block label_bb = label_to_block (cfun, label);
1423 make_edge (bb, label_bb, 0);
1424 }
1425 }
1426
1427 /*---------------------------------------------------------------------------
1428 Flowgraph analysis
1429 ---------------------------------------------------------------------------*/
1430
1431 /* Cleanup useless labels in basic blocks. This is something we wish
1432 to do early because it allows us to group case labels before creating
1433 the edges for the CFG, and it speeds up block statement iterators in
1434 all passes later on.
1435 We rerun this pass after CFG is created, to get rid of the labels that
1436 are no longer referenced. After then we do not run it any more, since
1437 (almost) no new labels should be created. */
1438
1439 /* A map from basic block index to the leading label of that block. */
1440 struct label_record
1441 {
1442 /* The label. */
1443 tree label;
1444
1445 /* True if the label is referenced from somewhere. */
1446 bool used;
1447 };
1448
1449 /* Given LABEL return the first label in the same basic block. */
1450
1451 static tree
1452 main_block_label (tree label, label_record *label_for_bb)
1453 {
1454 basic_block bb = label_to_block (cfun, label);
1455 tree main_label = label_for_bb[bb->index].label;
1456
1457 /* label_to_block possibly inserted undefined label into the chain. */
1458 if (!main_label)
1459 {
1460 label_for_bb[bb->index].label = label;
1461 main_label = label;
1462 }
1463
1464 label_for_bb[bb->index].used = true;
1465 return main_label;
1466 }
1467
1468 /* Clean up redundant labels within the exception tree. */
1469
1470 static void
1471 cleanup_dead_labels_eh (label_record *label_for_bb)
1472 {
1473 eh_landing_pad lp;
1474 eh_region r;
1475 tree lab;
1476 int i;
1477
1478 if (cfun->eh == NULL)
1479 return;
1480
1481 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1482 if (lp && lp->post_landing_pad)
1483 {
1484 lab = main_block_label (lp->post_landing_pad, label_for_bb);
1485 if (lab != lp->post_landing_pad)
1486 {
1487 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1488 EH_LANDING_PAD_NR (lab) = lp->index;
1489 }
1490 }
1491
1492 FOR_ALL_EH_REGION (r)
1493 switch (r->type)
1494 {
1495 case ERT_CLEANUP:
1496 case ERT_MUST_NOT_THROW:
1497 break;
1498
1499 case ERT_TRY:
1500 {
1501 eh_catch c;
1502 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1503 {
1504 lab = c->label;
1505 if (lab)
1506 c->label = main_block_label (lab, label_for_bb);
1507 }
1508 }
1509 break;
1510
1511 case ERT_ALLOWED_EXCEPTIONS:
1512 lab = r->u.allowed.label;
1513 if (lab)
1514 r->u.allowed.label = main_block_label (lab, label_for_bb);
1515 break;
1516 }
1517 }
1518
1519
1520 /* Cleanup redundant labels. This is a three-step process:
1521 1) Find the leading label for each block.
1522 2) Redirect all references to labels to the leading labels.
1523 3) Cleanup all useless labels. */
1524
1525 void
1526 cleanup_dead_labels (void)
1527 {
1528 basic_block bb;
1529 label_record *label_for_bb = XCNEWVEC (struct label_record,
1530 last_basic_block_for_fn (cfun));
1531
1532 /* Find a suitable label for each block. We use the first user-defined
1533 label if there is one, or otherwise just the first label we see. */
1534 FOR_EACH_BB_FN (bb, cfun)
1535 {
1536 gimple_stmt_iterator i;
1537
1538 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1539 {
1540 tree label;
1541 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1542
1543 if (!label_stmt)
1544 break;
1545
1546 label = gimple_label_label (label_stmt);
1547
1548 /* If we have not yet seen a label for the current block,
1549 remember this one and see if there are more labels. */
1550 if (!label_for_bb[bb->index].label)
1551 {
1552 label_for_bb[bb->index].label = label;
1553 continue;
1554 }
1555
1556 /* If we did see a label for the current block already, but it
1557 is an artificially created label, replace it if the current
1558 label is a user defined label. */
1559 if (!DECL_ARTIFICIAL (label)
1560 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1561 {
1562 label_for_bb[bb->index].label = label;
1563 break;
1564 }
1565 }
1566 }
1567
1568 /* Now redirect all jumps/branches to the selected label.
1569 First do so for each block ending in a control statement. */
1570 FOR_EACH_BB_FN (bb, cfun)
1571 {
1572 gimple *stmt = last_stmt (bb);
1573 tree label, new_label;
1574
1575 if (!stmt)
1576 continue;
1577
1578 switch (gimple_code (stmt))
1579 {
1580 case GIMPLE_COND:
1581 {
1582 gcond *cond_stmt = as_a <gcond *> (stmt);
1583 label = gimple_cond_true_label (cond_stmt);
1584 if (label)
1585 {
1586 new_label = main_block_label (label, label_for_bb);
1587 if (new_label != label)
1588 gimple_cond_set_true_label (cond_stmt, new_label);
1589 }
1590
1591 label = gimple_cond_false_label (cond_stmt);
1592 if (label)
1593 {
1594 new_label = main_block_label (label, label_for_bb);
1595 if (new_label != label)
1596 gimple_cond_set_false_label (cond_stmt, new_label);
1597 }
1598 }
1599 break;
1600
1601 case GIMPLE_SWITCH:
1602 {
1603 gswitch *switch_stmt = as_a <gswitch *> (stmt);
1604 size_t i, n = gimple_switch_num_labels (switch_stmt);
1605
1606 /* Replace all destination labels. */
1607 for (i = 0; i < n; ++i)
1608 {
1609 tree case_label = gimple_switch_label (switch_stmt, i);
1610 label = CASE_LABEL (case_label);
1611 new_label = main_block_label (label, label_for_bb);
1612 if (new_label != label)
1613 CASE_LABEL (case_label) = new_label;
1614 }
1615 break;
1616 }
1617
1618 case GIMPLE_ASM:
1619 {
1620 gasm *asm_stmt = as_a <gasm *> (stmt);
1621 int i, n = gimple_asm_nlabels (asm_stmt);
1622
1623 for (i = 0; i < n; ++i)
1624 {
1625 tree cons = gimple_asm_label_op (asm_stmt, i);
1626 tree label = main_block_label (TREE_VALUE (cons), label_for_bb);
1627 TREE_VALUE (cons) = label;
1628 }
1629 break;
1630 }
1631
1632 /* We have to handle gotos until they're removed, and we don't
1633 remove them until after we've created the CFG edges. */
1634 case GIMPLE_GOTO:
1635 if (!computed_goto_p (stmt))
1636 {
1637 ggoto *goto_stmt = as_a <ggoto *> (stmt);
1638 label = gimple_goto_dest (goto_stmt);
1639 new_label = main_block_label (label, label_for_bb);
1640 if (new_label != label)
1641 gimple_goto_set_dest (goto_stmt, new_label);
1642 }
1643 break;
1644
1645 case GIMPLE_TRANSACTION:
1646 {
1647 gtransaction *txn = as_a <gtransaction *> (stmt);
1648
1649 label = gimple_transaction_label_norm (txn);
1650 if (label)
1651 {
1652 new_label = main_block_label (label, label_for_bb);
1653 if (new_label != label)
1654 gimple_transaction_set_label_norm (txn, new_label);
1655 }
1656
1657 label = gimple_transaction_label_uninst (txn);
1658 if (label)
1659 {
1660 new_label = main_block_label (label, label_for_bb);
1661 if (new_label != label)
1662 gimple_transaction_set_label_uninst (txn, new_label);
1663 }
1664
1665 label = gimple_transaction_label_over (txn);
1666 if (label)
1667 {
1668 new_label = main_block_label (label, label_for_bb);
1669 if (new_label != label)
1670 gimple_transaction_set_label_over (txn, new_label);
1671 }
1672 }
1673 break;
1674
1675 default:
1676 break;
1677 }
1678 }
1679
1680 /* Do the same for the exception region tree labels. */
1681 cleanup_dead_labels_eh (label_for_bb);
1682
1683 /* Finally, purge dead labels. All user-defined labels and labels that
1684 can be the target of non-local gotos and labels which have their
1685 address taken are preserved. */
1686 FOR_EACH_BB_FN (bb, cfun)
1687 {
1688 gimple_stmt_iterator i;
1689 tree label_for_this_bb = label_for_bb[bb->index].label;
1690
1691 if (!label_for_this_bb)
1692 continue;
1693
1694 /* If the main label of the block is unused, we may still remove it. */
1695 if (!label_for_bb[bb->index].used)
1696 label_for_this_bb = NULL;
1697
1698 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1699 {
1700 tree label;
1701 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1702
1703 if (!label_stmt)
1704 break;
1705
1706 label = gimple_label_label (label_stmt);
1707
1708 if (label == label_for_this_bb
1709 || !DECL_ARTIFICIAL (label)
1710 || DECL_NONLOCAL (label)
1711 || FORCED_LABEL (label))
1712 gsi_next (&i);
1713 else
1714 gsi_remove (&i, true);
1715 }
1716 }
1717
1718 free (label_for_bb);
1719 }
1720
1721 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1722 the ones jumping to the same label.
1723 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1724
1725 bool
1726 group_case_labels_stmt (gswitch *stmt)
1727 {
1728 int old_size = gimple_switch_num_labels (stmt);
1729 int i, next_index, new_size;
1730 basic_block default_bb = NULL;
1731
1732 default_bb = gimple_switch_default_bb (cfun, stmt);
1733
1734 /* Look for possible opportunities to merge cases. */
1735 new_size = i = 1;
1736 while (i < old_size)
1737 {
1738 tree base_case, base_high;
1739 basic_block base_bb;
1740
1741 base_case = gimple_switch_label (stmt, i);
1742
1743 gcc_assert (base_case);
1744 base_bb = label_to_block (cfun, CASE_LABEL (base_case));
1745
1746 /* Discard cases that have the same destination as the default case or
1747 whose destiniation blocks have already been removed as unreachable. */
1748 if (base_bb == NULL || base_bb == default_bb)
1749 {
1750 i++;
1751 continue;
1752 }
1753
1754 base_high = CASE_HIGH (base_case)
1755 ? CASE_HIGH (base_case)
1756 : CASE_LOW (base_case);
1757 next_index = i + 1;
1758
1759 /* Try to merge case labels. Break out when we reach the end
1760 of the label vector or when we cannot merge the next case
1761 label with the current one. */
1762 while (next_index < old_size)
1763 {
1764 tree merge_case = gimple_switch_label (stmt, next_index);
1765 basic_block merge_bb = label_to_block (cfun, CASE_LABEL (merge_case));
1766 wide_int bhp1 = wi::to_wide (base_high) + 1;
1767
1768 /* Merge the cases if they jump to the same place,
1769 and their ranges are consecutive. */
1770 if (merge_bb == base_bb
1771 && wi::to_wide (CASE_LOW (merge_case)) == bhp1)
1772 {
1773 base_high = CASE_HIGH (merge_case) ?
1774 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1775 CASE_HIGH (base_case) = base_high;
1776 next_index++;
1777 }
1778 else
1779 break;
1780 }
1781
1782 /* Discard cases that have an unreachable destination block. */
1783 if (EDGE_COUNT (base_bb->succs) == 0
1784 && gimple_seq_unreachable_p (bb_seq (base_bb))
1785 /* Don't optimize this if __builtin_unreachable () is the
1786 implicitly added one by the C++ FE too early, before
1787 -Wreturn-type can be diagnosed. We'll optimize it later
1788 during switchconv pass or any other cfg cleanup. */
1789 && (gimple_in_ssa_p (cfun)
1790 || (LOCATION_LOCUS (gimple_location (last_stmt (base_bb)))
1791 != BUILTINS_LOCATION)))
1792 {
1793 edge base_edge = find_edge (gimple_bb (stmt), base_bb);
1794 if (base_edge != NULL)
1795 remove_edge_and_dominated_blocks (base_edge);
1796 i = next_index;
1797 continue;
1798 }
1799
1800 if (new_size < i)
1801 gimple_switch_set_label (stmt, new_size,
1802 gimple_switch_label (stmt, i));
1803 i = next_index;
1804 new_size++;
1805 }
1806
1807 gcc_assert (new_size <= old_size);
1808
1809 if (new_size < old_size)
1810 gimple_switch_set_num_labels (stmt, new_size);
1811
1812 return new_size < old_size;
1813 }
1814
1815 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1816 and scan the sorted vector of cases. Combine the ones jumping to the
1817 same label. */
1818
1819 bool
1820 group_case_labels (void)
1821 {
1822 basic_block bb;
1823 bool changed = false;
1824
1825 FOR_EACH_BB_FN (bb, cfun)
1826 {
1827 gimple *stmt = last_stmt (bb);
1828 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1829 changed |= group_case_labels_stmt (as_a <gswitch *> (stmt));
1830 }
1831
1832 return changed;
1833 }
1834
1835 /* Checks whether we can merge block B into block A. */
1836
1837 static bool
1838 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1839 {
1840 gimple *stmt;
1841
1842 if (!single_succ_p (a))
1843 return false;
1844
1845 if (single_succ_edge (a)->flags & EDGE_COMPLEX)
1846 return false;
1847
1848 if (single_succ (a) != b)
1849 return false;
1850
1851 if (!single_pred_p (b))
1852 return false;
1853
1854 if (a == ENTRY_BLOCK_PTR_FOR_FN (cfun)
1855 || b == EXIT_BLOCK_PTR_FOR_FN (cfun))
1856 return false;
1857
1858 /* If A ends by a statement causing exceptions or something similar, we
1859 cannot merge the blocks. */
1860 stmt = last_stmt (a);
1861 if (stmt && stmt_ends_bb_p (stmt))
1862 return false;
1863
1864 /* Do not allow a block with only a non-local label to be merged. */
1865 if (stmt)
1866 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
1867 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
1868 return false;
1869
1870 /* Examine the labels at the beginning of B. */
1871 for (gimple_stmt_iterator gsi = gsi_start_bb (b); !gsi_end_p (gsi);
1872 gsi_next (&gsi))
1873 {
1874 tree lab;
1875 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
1876 if (!label_stmt)
1877 break;
1878 lab = gimple_label_label (label_stmt);
1879
1880 /* Do not remove user forced labels or for -O0 any user labels. */
1881 if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
1882 return false;
1883 }
1884
1885 /* Protect simple loop latches. We only want to avoid merging
1886 the latch with the loop header or with a block in another
1887 loop in this case. */
1888 if (current_loops
1889 && b->loop_father->latch == b
1890 && loops_state_satisfies_p (LOOPS_HAVE_SIMPLE_LATCHES)
1891 && (b->loop_father->header == a
1892 || b->loop_father != a->loop_father))
1893 return false;
1894
1895 /* It must be possible to eliminate all phi nodes in B. If ssa form
1896 is not up-to-date and a name-mapping is registered, we cannot eliminate
1897 any phis. Symbols marked for renaming are never a problem though. */
1898 for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);
1899 gsi_next (&gsi))
1900 {
1901 gphi *phi = gsi.phi ();
1902 /* Technically only new names matter. */
1903 if (name_registered_for_update_p (PHI_RESULT (phi)))
1904 return false;
1905 }
1906
1907 /* When not optimizing, don't merge if we'd lose goto_locus. */
1908 if (!optimize
1909 && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
1910 {
1911 location_t goto_locus = single_succ_edge (a)->goto_locus;
1912 gimple_stmt_iterator prev, next;
1913 prev = gsi_last_nondebug_bb (a);
1914 next = gsi_after_labels (b);
1915 if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
1916 gsi_next_nondebug (&next);
1917 if ((gsi_end_p (prev)
1918 || gimple_location (gsi_stmt (prev)) != goto_locus)
1919 && (gsi_end_p (next)
1920 || gimple_location (gsi_stmt (next)) != goto_locus))
1921 return false;
1922 }
1923
1924 return true;
1925 }
1926
1927 /* Replaces all uses of NAME by VAL. */
1928
1929 void
1930 replace_uses_by (tree name, tree val)
1931 {
1932 imm_use_iterator imm_iter;
1933 use_operand_p use;
1934 gimple *stmt;
1935 edge e;
1936
1937 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1938 {
1939 /* Mark the block if we change the last stmt in it. */
1940 if (cfgcleanup_altered_bbs
1941 && stmt_ends_bb_p (stmt))
1942 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1943
1944 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1945 {
1946 replace_exp (use, val);
1947
1948 if (gimple_code (stmt) == GIMPLE_PHI)
1949 {
1950 e = gimple_phi_arg_edge (as_a <gphi *> (stmt),
1951 PHI_ARG_INDEX_FROM_USE (use));
1952 if (e->flags & EDGE_ABNORMAL
1953 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val))
1954 {
1955 /* This can only occur for virtual operands, since
1956 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1957 would prevent replacement. */
1958 gcc_checking_assert (virtual_operand_p (name));
1959 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1960 }
1961 }
1962 }
1963
1964 if (gimple_code (stmt) != GIMPLE_PHI)
1965 {
1966 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1967 gimple *orig_stmt = stmt;
1968 size_t i;
1969
1970 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
1971 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
1972 only change sth from non-invariant to invariant, and only
1973 when propagating constants. */
1974 if (is_gimple_min_invariant (val))
1975 for (i = 0; i < gimple_num_ops (stmt); i++)
1976 {
1977 tree op = gimple_op (stmt, i);
1978 /* Operands may be empty here. For example, the labels
1979 of a GIMPLE_COND are nulled out following the creation
1980 of the corresponding CFG edges. */
1981 if (op && TREE_CODE (op) == ADDR_EXPR)
1982 recompute_tree_invariant_for_addr_expr (op);
1983 }
1984
1985 if (fold_stmt (&gsi))
1986 stmt = gsi_stmt (gsi);
1987
1988 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
1989 gimple_purge_dead_eh_edges (gimple_bb (stmt));
1990
1991 update_stmt (stmt);
1992 }
1993 }
1994
1995 gcc_checking_assert (has_zero_uses (name));
1996
1997 /* Also update the trees stored in loop structures. */
1998 if (current_loops)
1999 {
2000 class loop *loop;
2001
2002 FOR_EACH_LOOP (loop, 0)
2003 {
2004 substitute_in_loop_info (loop, name, val);
2005 }
2006 }
2007 }
2008
2009 /* Merge block B into block A. */
2010
2011 static void
2012 gimple_merge_blocks (basic_block a, basic_block b)
2013 {
2014 gimple_stmt_iterator last, gsi;
2015 gphi_iterator psi;
2016
2017 if (dump_file)
2018 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
2019
2020 /* Remove all single-valued PHI nodes from block B of the form
2021 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
2022 gsi = gsi_last_bb (a);
2023 for (psi = gsi_start_phis (b); !gsi_end_p (psi); )
2024 {
2025 gimple *phi = gsi_stmt (psi);
2026 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
2027 gimple *copy;
2028 bool may_replace_uses = (virtual_operand_p (def)
2029 || may_propagate_copy (def, use));
2030
2031 /* In case we maintain loop closed ssa form, do not propagate arguments
2032 of loop exit phi nodes. */
2033 if (current_loops
2034 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
2035 && !virtual_operand_p (def)
2036 && TREE_CODE (use) == SSA_NAME
2037 && a->loop_father != b->loop_father)
2038 may_replace_uses = false;
2039
2040 if (!may_replace_uses)
2041 {
2042 gcc_assert (!virtual_operand_p (def));
2043
2044 /* Note that just emitting the copies is fine -- there is no problem
2045 with ordering of phi nodes. This is because A is the single
2046 predecessor of B, therefore results of the phi nodes cannot
2047 appear as arguments of the phi nodes. */
2048 copy = gimple_build_assign (def, use);
2049 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
2050 remove_phi_node (&psi, false);
2051 }
2052 else
2053 {
2054 /* If we deal with a PHI for virtual operands, we can simply
2055 propagate these without fussing with folding or updating
2056 the stmt. */
2057 if (virtual_operand_p (def))
2058 {
2059 imm_use_iterator iter;
2060 use_operand_p use_p;
2061 gimple *stmt;
2062
2063 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
2064 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
2065 SET_USE (use_p, use);
2066
2067 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
2068 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
2069 }
2070 else
2071 replace_uses_by (def, use);
2072
2073 remove_phi_node (&psi, true);
2074 }
2075 }
2076
2077 /* Ensure that B follows A. */
2078 move_block_after (b, a);
2079
2080 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
2081 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
2082
2083 /* Remove labels from B and set gimple_bb to A for other statements. */
2084 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
2085 {
2086 gimple *stmt = gsi_stmt (gsi);
2087 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2088 {
2089 tree label = gimple_label_label (label_stmt);
2090 int lp_nr;
2091
2092 gsi_remove (&gsi, false);
2093
2094 /* Now that we can thread computed gotos, we might have
2095 a situation where we have a forced label in block B
2096 However, the label at the start of block B might still be
2097 used in other ways (think about the runtime checking for
2098 Fortran assigned gotos). So we cannot just delete the
2099 label. Instead we move the label to the start of block A. */
2100 if (FORCED_LABEL (label))
2101 {
2102 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
2103 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
2104 }
2105 /* Other user labels keep around in a form of a debug stmt. */
2106 else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_BIND_STMTS)
2107 {
2108 gimple *dbg = gimple_build_debug_bind (label,
2109 integer_zero_node,
2110 stmt);
2111 gimple_debug_bind_reset_value (dbg);
2112 gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
2113 }
2114
2115 lp_nr = EH_LANDING_PAD_NR (label);
2116 if (lp_nr)
2117 {
2118 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
2119 lp->post_landing_pad = NULL;
2120 }
2121 }
2122 else
2123 {
2124 gimple_set_bb (stmt, a);
2125 gsi_next (&gsi);
2126 }
2127 }
2128
2129 /* When merging two BBs, if their counts are different, the larger count
2130 is selected as the new bb count. This is to handle inconsistent
2131 profiles. */
2132 if (a->loop_father == b->loop_father)
2133 {
2134 a->count = a->count.merge (b->count);
2135 }
2136
2137 /* Merge the sequences. */
2138 last = gsi_last_bb (a);
2139 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
2140 set_bb_seq (b, NULL);
2141
2142 if (cfgcleanup_altered_bbs)
2143 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
2144 }
2145
2146
2147 /* Return the one of two successors of BB that is not reachable by a
2148 complex edge, if there is one. Else, return BB. We use
2149 this in optimizations that use post-dominators for their heuristics,
2150 to catch the cases in C++ where function calls are involved. */
2151
2152 basic_block
2153 single_noncomplex_succ (basic_block bb)
2154 {
2155 edge e0, e1;
2156 if (EDGE_COUNT (bb->succs) != 2)
2157 return bb;
2158
2159 e0 = EDGE_SUCC (bb, 0);
2160 e1 = EDGE_SUCC (bb, 1);
2161 if (e0->flags & EDGE_COMPLEX)
2162 return e1->dest;
2163 if (e1->flags & EDGE_COMPLEX)
2164 return e0->dest;
2165
2166 return bb;
2167 }
2168
2169 /* T is CALL_EXPR. Set current_function_calls_* flags. */
2170
2171 void
2172 notice_special_calls (gcall *call)
2173 {
2174 int flags = gimple_call_flags (call);
2175
2176 if (flags & ECF_MAY_BE_ALLOCA)
2177 cfun->calls_alloca = true;
2178 if (flags & ECF_RETURNS_TWICE)
2179 cfun->calls_setjmp = true;
2180 }
2181
2182
2183 /* Clear flags set by notice_special_calls. Used by dead code removal
2184 to update the flags. */
2185
2186 void
2187 clear_special_calls (void)
2188 {
2189 cfun->calls_alloca = false;
2190 cfun->calls_setjmp = false;
2191 }
2192
2193 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
2194
2195 static void
2196 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
2197 {
2198 /* Since this block is no longer reachable, we can just delete all
2199 of its PHI nodes. */
2200 remove_phi_nodes (bb);
2201
2202 /* Remove edges to BB's successors. */
2203 while (EDGE_COUNT (bb->succs) > 0)
2204 remove_edge (EDGE_SUCC (bb, 0));
2205 }
2206
2207
2208 /* Remove statements of basic block BB. */
2209
2210 static void
2211 remove_bb (basic_block bb)
2212 {
2213 gimple_stmt_iterator i;
2214
2215 if (dump_file)
2216 {
2217 fprintf (dump_file, "Removing basic block %d\n", bb->index);
2218 if (dump_flags & TDF_DETAILS)
2219 {
2220 dump_bb (dump_file, bb, 0, TDF_BLOCKS);
2221 fprintf (dump_file, "\n");
2222 }
2223 }
2224
2225 if (current_loops)
2226 {
2227 class loop *loop = bb->loop_father;
2228
2229 /* If a loop gets removed, clean up the information associated
2230 with it. */
2231 if (loop->latch == bb
2232 || loop->header == bb)
2233 free_numbers_of_iterations_estimates (loop);
2234 }
2235
2236 /* Remove all the instructions in the block. */
2237 if (bb_seq (bb) != NULL)
2238 {
2239 /* Walk backwards so as to get a chance to substitute all
2240 released DEFs into debug stmts. See
2241 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
2242 details. */
2243 for (i = gsi_last_bb (bb); !gsi_end_p (i);)
2244 {
2245 gimple *stmt = gsi_stmt (i);
2246 glabel *label_stmt = dyn_cast <glabel *> (stmt);
2247 if (label_stmt
2248 && (FORCED_LABEL (gimple_label_label (label_stmt))
2249 || DECL_NONLOCAL (gimple_label_label (label_stmt))))
2250 {
2251 basic_block new_bb;
2252 gimple_stmt_iterator new_gsi;
2253
2254 /* A non-reachable non-local label may still be referenced.
2255 But it no longer needs to carry the extra semantics of
2256 non-locality. */
2257 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
2258 {
2259 DECL_NONLOCAL (gimple_label_label (label_stmt)) = 0;
2260 FORCED_LABEL (gimple_label_label (label_stmt)) = 1;
2261 }
2262
2263 new_bb = bb->prev_bb;
2264 /* Don't move any labels into ENTRY block. */
2265 if (new_bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
2266 {
2267 new_bb = single_succ (new_bb);
2268 gcc_assert (new_bb != bb);
2269 }
2270 new_gsi = gsi_after_labels (new_bb);
2271 gsi_remove (&i, false);
2272 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
2273 }
2274 else
2275 {
2276 /* Release SSA definitions. */
2277 release_defs (stmt);
2278 gsi_remove (&i, true);
2279 }
2280
2281 if (gsi_end_p (i))
2282 i = gsi_last_bb (bb);
2283 else
2284 gsi_prev (&i);
2285 }
2286 }
2287
2288 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2289 bb->il.gimple.seq = NULL;
2290 bb->il.gimple.phi_nodes = NULL;
2291 }
2292
2293
2294 /* Given a basic block BB and a value VAL for use in the final statement
2295 of the block (if a GIMPLE_COND, GIMPLE_SWITCH, or computed goto), return
2296 the edge that will be taken out of the block.
2297 If VAL is NULL_TREE, then the current value of the final statement's
2298 predicate or index is used.
2299 If the value does not match a unique edge, NULL is returned. */
2300
2301 edge
2302 find_taken_edge (basic_block bb, tree val)
2303 {
2304 gimple *stmt;
2305
2306 stmt = last_stmt (bb);
2307
2308 /* Handle ENTRY and EXIT. */
2309 if (!stmt)
2310 return NULL;
2311
2312 if (gimple_code (stmt) == GIMPLE_COND)
2313 return find_taken_edge_cond_expr (as_a <gcond *> (stmt), val);
2314
2315 if (gimple_code (stmt) == GIMPLE_SWITCH)
2316 return find_taken_edge_switch_expr (as_a <gswitch *> (stmt), val);
2317
2318 if (computed_goto_p (stmt))
2319 {
2320 /* Only optimize if the argument is a label, if the argument is
2321 not a label then we cannot construct a proper CFG.
2322
2323 It may be the case that we only need to allow the LABEL_REF to
2324 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2325 appear inside a LABEL_EXPR just to be safe. */
2326 if (val
2327 && (TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
2328 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
2329 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
2330 }
2331
2332 /* Otherwise we only know the taken successor edge if it's unique. */
2333 return single_succ_p (bb) ? single_succ_edge (bb) : NULL;
2334 }
2335
2336 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2337 statement, determine which of the outgoing edges will be taken out of the
2338 block. Return NULL if either edge may be taken. */
2339
2340 static edge
2341 find_taken_edge_computed_goto (basic_block bb, tree val)
2342 {
2343 basic_block dest;
2344 edge e = NULL;
2345
2346 dest = label_to_block (cfun, val);
2347 if (dest)
2348 e = find_edge (bb, dest);
2349
2350 /* It's possible for find_edge to return NULL here on invalid code
2351 that abuses the labels-as-values extension (e.g. code that attempts to
2352 jump *between* functions via stored labels-as-values; PR 84136).
2353 If so, then we simply return that NULL for the edge.
2354 We don't currently have a way of detecting such invalid code, so we
2355 can't assert that it was the case when a NULL edge occurs here. */
2356
2357 return e;
2358 }
2359
2360 /* Given COND_STMT and a constant value VAL for use as the predicate,
2361 determine which of the two edges will be taken out of
2362 the statement's block. Return NULL if either edge may be taken.
2363 If VAL is NULL_TREE, then the current value of COND_STMT's predicate
2364 is used. */
2365
2366 static edge
2367 find_taken_edge_cond_expr (const gcond *cond_stmt, tree val)
2368 {
2369 edge true_edge, false_edge;
2370
2371 if (val == NULL_TREE)
2372 {
2373 /* Use the current value of the predicate. */
2374 if (gimple_cond_true_p (cond_stmt))
2375 val = integer_one_node;
2376 else if (gimple_cond_false_p (cond_stmt))
2377 val = integer_zero_node;
2378 else
2379 return NULL;
2380 }
2381 else if (TREE_CODE (val) != INTEGER_CST)
2382 return NULL;
2383
2384 extract_true_false_edges_from_block (gimple_bb (cond_stmt),
2385 &true_edge, &false_edge);
2386
2387 return (integer_zerop (val) ? false_edge : true_edge);
2388 }
2389
2390 /* Given SWITCH_STMT and an INTEGER_CST VAL for use as the index, determine
2391 which edge will be taken out of the statement's block. Return NULL if any
2392 edge may be taken.
2393 If VAL is NULL_TREE, then the current value of SWITCH_STMT's index
2394 is used. */
2395
2396 edge
2397 find_taken_edge_switch_expr (const gswitch *switch_stmt, tree val)
2398 {
2399 basic_block dest_bb;
2400 edge e;
2401 tree taken_case;
2402
2403 if (gimple_switch_num_labels (switch_stmt) == 1)
2404 taken_case = gimple_switch_default_label (switch_stmt);
2405 else
2406 {
2407 if (val == NULL_TREE)
2408 val = gimple_switch_index (switch_stmt);
2409 if (TREE_CODE (val) != INTEGER_CST)
2410 return NULL;
2411 else
2412 taken_case = find_case_label_for_value (switch_stmt, val);
2413 }
2414 dest_bb = label_to_block (cfun, CASE_LABEL (taken_case));
2415
2416 e = find_edge (gimple_bb (switch_stmt), dest_bb);
2417 gcc_assert (e);
2418 return e;
2419 }
2420
2421
2422 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2423 We can make optimal use here of the fact that the case labels are
2424 sorted: We can do a binary search for a case matching VAL. */
2425
2426 tree
2427 find_case_label_for_value (const gswitch *switch_stmt, tree val)
2428 {
2429 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2430 tree default_case = gimple_switch_default_label (switch_stmt);
2431
2432 for (low = 0, high = n; high - low > 1; )
2433 {
2434 size_t i = (high + low) / 2;
2435 tree t = gimple_switch_label (switch_stmt, i);
2436 int cmp;
2437
2438 /* Cache the result of comparing CASE_LOW and val. */
2439 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2440
2441 if (cmp > 0)
2442 high = i;
2443 else
2444 low = i;
2445
2446 if (CASE_HIGH (t) == NULL)
2447 {
2448 /* A singe-valued case label. */
2449 if (cmp == 0)
2450 return t;
2451 }
2452 else
2453 {
2454 /* A case range. We can only handle integer ranges. */
2455 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2456 return t;
2457 }
2458 }
2459
2460 return default_case;
2461 }
2462
2463
2464 /* Dump a basic block on stderr. */
2465
2466 void
2467 gimple_debug_bb (basic_block bb)
2468 {
2469 dump_bb (stderr, bb, 0, TDF_VOPS|TDF_MEMSYMS|TDF_BLOCKS);
2470 }
2471
2472
2473 /* Dump basic block with index N on stderr. */
2474
2475 basic_block
2476 gimple_debug_bb_n (int n)
2477 {
2478 gimple_debug_bb (BASIC_BLOCK_FOR_FN (cfun, n));
2479 return BASIC_BLOCK_FOR_FN (cfun, n);
2480 }
2481
2482
2483 /* Dump the CFG on stderr.
2484
2485 FLAGS are the same used by the tree dumping functions
2486 (see TDF_* in dumpfile.h). */
2487
2488 void
2489 gimple_debug_cfg (dump_flags_t flags)
2490 {
2491 gimple_dump_cfg (stderr, flags);
2492 }
2493
2494
2495 /* Dump the program showing basic block boundaries on the given FILE.
2496
2497 FLAGS are the same used by the tree dumping functions (see TDF_* in
2498 tree.h). */
2499
2500 void
2501 gimple_dump_cfg (FILE *file, dump_flags_t flags)
2502 {
2503 if (flags & TDF_DETAILS)
2504 {
2505 dump_function_header (file, current_function_decl, flags);
2506 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2507 n_basic_blocks_for_fn (cfun), n_edges_for_fn (cfun),
2508 last_basic_block_for_fn (cfun));
2509
2510 brief_dump_cfg (file, flags);
2511 fprintf (file, "\n");
2512 }
2513
2514 if (flags & TDF_STATS)
2515 dump_cfg_stats (file);
2516
2517 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2518 }
2519
2520
2521 /* Dump CFG statistics on FILE. */
2522
2523 void
2524 dump_cfg_stats (FILE *file)
2525 {
2526 static long max_num_merged_labels = 0;
2527 unsigned long size, total = 0;
2528 long num_edges;
2529 basic_block bb;
2530 const char * const fmt_str = "%-30s%-13s%12s\n";
2531 const char * const fmt_str_1 = "%-30s%13d" PRsa (11) "\n";
2532 const char * const fmt_str_2 = "%-30s%13ld" PRsa (11) "\n";
2533 const char * const fmt_str_3 = "%-43s" PRsa (11) "\n";
2534 const char *funcname = current_function_name ();
2535
2536 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2537
2538 fprintf (file, "---------------------------------------------------------\n");
2539 fprintf (file, fmt_str, "", " Number of ", "Memory");
2540 fprintf (file, fmt_str, "", " instances ", "used ");
2541 fprintf (file, "---------------------------------------------------------\n");
2542
2543 size = n_basic_blocks_for_fn (cfun) * sizeof (struct basic_block_def);
2544 total += size;
2545 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks_for_fn (cfun),
2546 SIZE_AMOUNT (size));
2547
2548 num_edges = 0;
2549 FOR_EACH_BB_FN (bb, cfun)
2550 num_edges += EDGE_COUNT (bb->succs);
2551 size = num_edges * sizeof (class edge_def);
2552 total += size;
2553 fprintf (file, fmt_str_2, "Edges", num_edges, SIZE_AMOUNT (size));
2554
2555 fprintf (file, "---------------------------------------------------------\n");
2556 fprintf (file, fmt_str_3, "Total memory used by CFG data",
2557 SIZE_AMOUNT (total));
2558 fprintf (file, "---------------------------------------------------------\n");
2559 fprintf (file, "\n");
2560
2561 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2562 max_num_merged_labels = cfg_stats.num_merged_labels;
2563
2564 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2565 cfg_stats.num_merged_labels, max_num_merged_labels);
2566
2567 fprintf (file, "\n");
2568 }
2569
2570
2571 /* Dump CFG statistics on stderr. Keep extern so that it's always
2572 linked in the final executable. */
2573
2574 DEBUG_FUNCTION void
2575 debug_cfg_stats (void)
2576 {
2577 dump_cfg_stats (stderr);
2578 }
2579
2580 /*---------------------------------------------------------------------------
2581 Miscellaneous helpers
2582 ---------------------------------------------------------------------------*/
2583
2584 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2585 flow. Transfers of control flow associated with EH are excluded. */
2586
2587 static bool
2588 call_can_make_abnormal_goto (gimple *t)
2589 {
2590 /* If the function has no non-local labels, then a call cannot make an
2591 abnormal transfer of control. */
2592 if (!cfun->has_nonlocal_label
2593 && !cfun->calls_setjmp)
2594 return false;
2595
2596 /* Likewise if the call has no side effects. */
2597 if (!gimple_has_side_effects (t))
2598 return false;
2599
2600 /* Likewise if the called function is leaf. */
2601 if (gimple_call_flags (t) & ECF_LEAF)
2602 return false;
2603
2604 return true;
2605 }
2606
2607
2608 /* Return true if T can make an abnormal transfer of control flow.
2609 Transfers of control flow associated with EH are excluded. */
2610
2611 bool
2612 stmt_can_make_abnormal_goto (gimple *t)
2613 {
2614 if (computed_goto_p (t))
2615 return true;
2616 if (is_gimple_call (t))
2617 return call_can_make_abnormal_goto (t);
2618 return false;
2619 }
2620
2621
2622 /* Return true if T represents a stmt that always transfers control. */
2623
2624 bool
2625 is_ctrl_stmt (gimple *t)
2626 {
2627 switch (gimple_code (t))
2628 {
2629 case GIMPLE_COND:
2630 case GIMPLE_SWITCH:
2631 case GIMPLE_GOTO:
2632 case GIMPLE_RETURN:
2633 case GIMPLE_RESX:
2634 return true;
2635 default:
2636 return false;
2637 }
2638 }
2639
2640
2641 /* Return true if T is a statement that may alter the flow of control
2642 (e.g., a call to a non-returning function). */
2643
2644 bool
2645 is_ctrl_altering_stmt (gimple *t)
2646 {
2647 gcc_assert (t);
2648
2649 switch (gimple_code (t))
2650 {
2651 case GIMPLE_CALL:
2652 /* Per stmt call flag indicates whether the call could alter
2653 controlflow. */
2654 if (gimple_call_ctrl_altering_p (t))
2655 return true;
2656 break;
2657
2658 case GIMPLE_EH_DISPATCH:
2659 /* EH_DISPATCH branches to the individual catch handlers at
2660 this level of a try or allowed-exceptions region. It can
2661 fallthru to the next statement as well. */
2662 return true;
2663
2664 case GIMPLE_ASM:
2665 if (gimple_asm_nlabels (as_a <gasm *> (t)) > 0)
2666 return true;
2667 break;
2668
2669 CASE_GIMPLE_OMP:
2670 /* OpenMP directives alter control flow. */
2671 return true;
2672
2673 case GIMPLE_TRANSACTION:
2674 /* A transaction start alters control flow. */
2675 return true;
2676
2677 default:
2678 break;
2679 }
2680
2681 /* If a statement can throw, it alters control flow. */
2682 return stmt_can_throw_internal (cfun, t);
2683 }
2684
2685
2686 /* Return true if T is a simple local goto. */
2687
2688 bool
2689 simple_goto_p (gimple *t)
2690 {
2691 return (gimple_code (t) == GIMPLE_GOTO
2692 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2693 }
2694
2695
2696 /* Return true if STMT should start a new basic block. PREV_STMT is
2697 the statement preceding STMT. It is used when STMT is a label or a
2698 case label. Labels should only start a new basic block if their
2699 previous statement wasn't a label. Otherwise, sequence of labels
2700 would generate unnecessary basic blocks that only contain a single
2701 label. */
2702
2703 static inline bool
2704 stmt_starts_bb_p (gimple *stmt, gimple *prev_stmt)
2705 {
2706 if (stmt == NULL)
2707 return false;
2708
2709 /* PREV_STMT is only set to a debug stmt if the debug stmt is before
2710 any nondebug stmts in the block. We don't want to start another
2711 block in this case: the debug stmt will already have started the
2712 one STMT would start if we weren't outputting debug stmts. */
2713 if (prev_stmt && is_gimple_debug (prev_stmt))
2714 return false;
2715
2716 /* Labels start a new basic block only if the preceding statement
2717 wasn't a label of the same type. This prevents the creation of
2718 consecutive blocks that have nothing but a single label. */
2719 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2720 {
2721 /* Nonlocal and computed GOTO targets always start a new block. */
2722 if (DECL_NONLOCAL (gimple_label_label (label_stmt))
2723 || FORCED_LABEL (gimple_label_label (label_stmt)))
2724 return true;
2725
2726 if (glabel *plabel = safe_dyn_cast <glabel *> (prev_stmt))
2727 {
2728 if (DECL_NONLOCAL (gimple_label_label (plabel))
2729 || !DECL_ARTIFICIAL (gimple_label_label (plabel)))
2730 return true;
2731
2732 cfg_stats.num_merged_labels++;
2733 return false;
2734 }
2735 else
2736 return true;
2737 }
2738 else if (gimple_code (stmt) == GIMPLE_CALL)
2739 {
2740 if (gimple_call_flags (stmt) & ECF_RETURNS_TWICE)
2741 /* setjmp acts similar to a nonlocal GOTO target and thus should
2742 start a new block. */
2743 return true;
2744 if (gimple_call_internal_p (stmt, IFN_PHI)
2745 && prev_stmt
2746 && gimple_code (prev_stmt) != GIMPLE_LABEL
2747 && (gimple_code (prev_stmt) != GIMPLE_CALL
2748 || ! gimple_call_internal_p (prev_stmt, IFN_PHI)))
2749 /* PHI nodes start a new block unless preceeded by a label
2750 or another PHI. */
2751 return true;
2752 }
2753
2754 return false;
2755 }
2756
2757
2758 /* Return true if T should end a basic block. */
2759
2760 bool
2761 stmt_ends_bb_p (gimple *t)
2762 {
2763 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2764 }
2765
2766 /* Remove block annotations and other data structures. */
2767
2768 void
2769 delete_tree_cfg_annotations (struct function *fn)
2770 {
2771 vec_free (label_to_block_map_for_fn (fn));
2772 }
2773
2774 /* Return the virtual phi in BB. */
2775
2776 gphi *
2777 get_virtual_phi (basic_block bb)
2778 {
2779 for (gphi_iterator gsi = gsi_start_phis (bb);
2780 !gsi_end_p (gsi);
2781 gsi_next (&gsi))
2782 {
2783 gphi *phi = gsi.phi ();
2784
2785 if (virtual_operand_p (PHI_RESULT (phi)))
2786 return phi;
2787 }
2788
2789 return NULL;
2790 }
2791
2792 /* Return the first statement in basic block BB. */
2793
2794 gimple *
2795 first_stmt (basic_block bb)
2796 {
2797 gimple_stmt_iterator i = gsi_start_bb (bb);
2798 gimple *stmt = NULL;
2799
2800 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2801 {
2802 gsi_next (&i);
2803 stmt = NULL;
2804 }
2805 return stmt;
2806 }
2807
2808 /* Return the first non-label statement in basic block BB. */
2809
2810 static gimple *
2811 first_non_label_stmt (basic_block bb)
2812 {
2813 gimple_stmt_iterator i = gsi_start_bb (bb);
2814 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2815 gsi_next (&i);
2816 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2817 }
2818
2819 /* Return the last statement in basic block BB. */
2820
2821 gimple *
2822 last_stmt (basic_block bb)
2823 {
2824 gimple_stmt_iterator i = gsi_last_bb (bb);
2825 gimple *stmt = NULL;
2826
2827 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2828 {
2829 gsi_prev (&i);
2830 stmt = NULL;
2831 }
2832 return stmt;
2833 }
2834
2835 /* Return the last statement of an otherwise empty block. Return NULL
2836 if the block is totally empty, or if it contains more than one
2837 statement. */
2838
2839 gimple *
2840 last_and_only_stmt (basic_block bb)
2841 {
2842 gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2843 gimple *last, *prev;
2844
2845 if (gsi_end_p (i))
2846 return NULL;
2847
2848 last = gsi_stmt (i);
2849 gsi_prev_nondebug (&i);
2850 if (gsi_end_p (i))
2851 return last;
2852
2853 /* Empty statements should no longer appear in the instruction stream.
2854 Everything that might have appeared before should be deleted by
2855 remove_useless_stmts, and the optimizers should just gsi_remove
2856 instead of smashing with build_empty_stmt.
2857
2858 Thus the only thing that should appear here in a block containing
2859 one executable statement is a label. */
2860 prev = gsi_stmt (i);
2861 if (gimple_code (prev) == GIMPLE_LABEL)
2862 return last;
2863 else
2864 return NULL;
2865 }
2866
2867 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2868
2869 static void
2870 reinstall_phi_args (edge new_edge, edge old_edge)
2871 {
2872 edge_var_map *vm;
2873 int i;
2874 gphi_iterator phis;
2875
2876 vec<edge_var_map> *v = redirect_edge_var_map_vector (old_edge);
2877 if (!v)
2878 return;
2879
2880 for (i = 0, phis = gsi_start_phis (new_edge->dest);
2881 v->iterate (i, &vm) && !gsi_end_p (phis);
2882 i++, gsi_next (&phis))
2883 {
2884 gphi *phi = phis.phi ();
2885 tree result = redirect_edge_var_map_result (vm);
2886 tree arg = redirect_edge_var_map_def (vm);
2887
2888 gcc_assert (result == gimple_phi_result (phi));
2889
2890 add_phi_arg (phi, arg, new_edge, redirect_edge_var_map_location (vm));
2891 }
2892
2893 redirect_edge_var_map_clear (old_edge);
2894 }
2895
2896 /* Returns the basic block after which the new basic block created
2897 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2898 near its "logical" location. This is of most help to humans looking
2899 at debugging dumps. */
2900
2901 basic_block
2902 split_edge_bb_loc (edge edge_in)
2903 {
2904 basic_block dest = edge_in->dest;
2905 basic_block dest_prev = dest->prev_bb;
2906
2907 if (dest_prev)
2908 {
2909 edge e = find_edge (dest_prev, dest);
2910 if (e && !(e->flags & EDGE_COMPLEX))
2911 return edge_in->src;
2912 }
2913 return dest_prev;
2914 }
2915
2916 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2917 Abort on abnormal edges. */
2918
2919 static basic_block
2920 gimple_split_edge (edge edge_in)
2921 {
2922 basic_block new_bb, after_bb, dest;
2923 edge new_edge, e;
2924
2925 /* Abnormal edges cannot be split. */
2926 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2927
2928 dest = edge_in->dest;
2929
2930 after_bb = split_edge_bb_loc (edge_in);
2931
2932 new_bb = create_empty_bb (after_bb);
2933 new_bb->count = edge_in->count ();
2934
2935 e = redirect_edge_and_branch (edge_in, new_bb);
2936 gcc_assert (e == edge_in);
2937
2938 new_edge = make_single_succ_edge (new_bb, dest, EDGE_FALLTHRU);
2939 reinstall_phi_args (new_edge, e);
2940
2941 return new_bb;
2942 }
2943
2944
2945 /* Verify properties of the address expression T whose base should be
2946 TREE_ADDRESSABLE if VERIFY_ADDRESSABLE is true. */
2947
2948 static bool
2949 verify_address (tree t, bool verify_addressable)
2950 {
2951 bool old_constant;
2952 bool old_side_effects;
2953 bool new_constant;
2954 bool new_side_effects;
2955
2956 old_constant = TREE_CONSTANT (t);
2957 old_side_effects = TREE_SIDE_EFFECTS (t);
2958
2959 recompute_tree_invariant_for_addr_expr (t);
2960 new_side_effects = TREE_SIDE_EFFECTS (t);
2961 new_constant = TREE_CONSTANT (t);
2962
2963 if (old_constant != new_constant)
2964 {
2965 error ("constant not recomputed when %<ADDR_EXPR%> changed");
2966 return true;
2967 }
2968 if (old_side_effects != new_side_effects)
2969 {
2970 error ("side effects not recomputed when %<ADDR_EXPR%> changed");
2971 return true;
2972 }
2973
2974 tree base = TREE_OPERAND (t, 0);
2975 while (handled_component_p (base))
2976 base = TREE_OPERAND (base, 0);
2977
2978 if (!(VAR_P (base)
2979 || TREE_CODE (base) == PARM_DECL
2980 || TREE_CODE (base) == RESULT_DECL))
2981 return false;
2982
2983 if (DECL_GIMPLE_REG_P (base))
2984 {
2985 error ("%<DECL_GIMPLE_REG_P%> set on a variable with address taken");
2986 return true;
2987 }
2988
2989 if (verify_addressable && !TREE_ADDRESSABLE (base))
2990 {
2991 error ("address taken but %<TREE_ADDRESSABLE%> bit not set");
2992 return true;
2993 }
2994
2995 return false;
2996 }
2997
2998
2999 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
3000 Returns true if there is an error, otherwise false. */
3001
3002 static bool
3003 verify_types_in_gimple_min_lval (tree expr)
3004 {
3005 tree op;
3006
3007 if (is_gimple_id (expr))
3008 return false;
3009
3010 if (TREE_CODE (expr) != TARGET_MEM_REF
3011 && TREE_CODE (expr) != MEM_REF)
3012 {
3013 error ("invalid expression for min lvalue");
3014 return true;
3015 }
3016
3017 /* TARGET_MEM_REFs are strange beasts. */
3018 if (TREE_CODE (expr) == TARGET_MEM_REF)
3019 return false;
3020
3021 op = TREE_OPERAND (expr, 0);
3022 if (!is_gimple_val (op))
3023 {
3024 error ("invalid operand in indirect reference");
3025 debug_generic_stmt (op);
3026 return true;
3027 }
3028 /* Memory references now generally can involve a value conversion. */
3029
3030 return false;
3031 }
3032
3033 /* Verify if EXPR is a valid GIMPLE reference expression. If
3034 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
3035 if there is an error, otherwise false. */
3036
3037 static bool
3038 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
3039 {
3040 const char *code_name = get_tree_code_name (TREE_CODE (expr));
3041
3042 if (TREE_CODE (expr) == REALPART_EXPR
3043 || TREE_CODE (expr) == IMAGPART_EXPR
3044 || TREE_CODE (expr) == BIT_FIELD_REF)
3045 {
3046 tree op = TREE_OPERAND (expr, 0);
3047 if (!is_gimple_reg_type (TREE_TYPE (expr)))
3048 {
3049 error ("non-scalar %qs", code_name);
3050 return true;
3051 }
3052
3053 if (TREE_CODE (expr) == BIT_FIELD_REF)
3054 {
3055 tree t1 = TREE_OPERAND (expr, 1);
3056 tree t2 = TREE_OPERAND (expr, 2);
3057 poly_uint64 size, bitpos;
3058 if (!poly_int_tree_p (t1, &size)
3059 || !poly_int_tree_p (t2, &bitpos)
3060 || !types_compatible_p (bitsizetype, TREE_TYPE (t1))
3061 || !types_compatible_p (bitsizetype, TREE_TYPE (t2)))
3062 {
3063 error ("invalid position or size operand to %qs", code_name);
3064 return true;
3065 }
3066 if (INTEGRAL_TYPE_P (TREE_TYPE (expr))
3067 && maybe_ne (TYPE_PRECISION (TREE_TYPE (expr)), size))
3068 {
3069 error ("integral result type precision does not match "
3070 "field size of %qs", code_name);
3071 return true;
3072 }
3073 else if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
3074 && TYPE_MODE (TREE_TYPE (expr)) != BLKmode
3075 && maybe_ne (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (expr))),
3076 size))
3077 {
3078 error ("mode size of non-integral result does not "
3079 "match field size of %qs",
3080 code_name);
3081 return true;
3082 }
3083 if (INTEGRAL_TYPE_P (TREE_TYPE (op))
3084 && !type_has_mode_precision_p (TREE_TYPE (op)))
3085 {
3086 error ("%qs of non-mode-precision operand", code_name);
3087 return true;
3088 }
3089 if (!AGGREGATE_TYPE_P (TREE_TYPE (op))
3090 && maybe_gt (size + bitpos,
3091 tree_to_poly_uint64 (TYPE_SIZE (TREE_TYPE (op)))))
3092 {
3093 error ("position plus size exceeds size of referenced object in "
3094 "%qs", code_name);
3095 return true;
3096 }
3097 }
3098
3099 if ((TREE_CODE (expr) == REALPART_EXPR
3100 || TREE_CODE (expr) == IMAGPART_EXPR)
3101 && !useless_type_conversion_p (TREE_TYPE (expr),
3102 TREE_TYPE (TREE_TYPE (op))))
3103 {
3104 error ("type mismatch in %qs reference", code_name);
3105 debug_generic_stmt (TREE_TYPE (expr));
3106 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3107 return true;
3108 }
3109 expr = op;
3110 }
3111
3112 while (handled_component_p (expr))
3113 {
3114 code_name = get_tree_code_name (TREE_CODE (expr));
3115
3116 if (TREE_CODE (expr) == REALPART_EXPR
3117 || TREE_CODE (expr) == IMAGPART_EXPR
3118 || TREE_CODE (expr) == BIT_FIELD_REF)
3119 {
3120 error ("non-top-level %qs", code_name);
3121 return true;
3122 }
3123
3124 tree op = TREE_OPERAND (expr, 0);
3125
3126 if (TREE_CODE (expr) == ARRAY_REF
3127 || TREE_CODE (expr) == ARRAY_RANGE_REF)
3128 {
3129 if (!is_gimple_val (TREE_OPERAND (expr, 1))
3130 || (TREE_OPERAND (expr, 2)
3131 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3132 || (TREE_OPERAND (expr, 3)
3133 && !is_gimple_val (TREE_OPERAND (expr, 3))))
3134 {
3135 error ("invalid operands to %qs", code_name);
3136 debug_generic_stmt (expr);
3137 return true;
3138 }
3139 }
3140
3141 /* Verify if the reference array element types are compatible. */
3142 if (TREE_CODE (expr) == ARRAY_REF
3143 && !useless_type_conversion_p (TREE_TYPE (expr),
3144 TREE_TYPE (TREE_TYPE (op))))
3145 {
3146 error ("type mismatch in %qs", code_name);
3147 debug_generic_stmt (TREE_TYPE (expr));
3148 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3149 return true;
3150 }
3151 if (TREE_CODE (expr) == ARRAY_RANGE_REF
3152 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
3153 TREE_TYPE (TREE_TYPE (op))))
3154 {
3155 error ("type mismatch in %qs", code_name);
3156 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
3157 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3158 return true;
3159 }
3160
3161 if (TREE_CODE (expr) == COMPONENT_REF)
3162 {
3163 if (TREE_OPERAND (expr, 2)
3164 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3165 {
3166 error ("invalid %qs offset operator", code_name);
3167 return true;
3168 }
3169 if (!useless_type_conversion_p (TREE_TYPE (expr),
3170 TREE_TYPE (TREE_OPERAND (expr, 1))))
3171 {
3172 error ("type mismatch in %qs", code_name);
3173 debug_generic_stmt (TREE_TYPE (expr));
3174 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
3175 return true;
3176 }
3177 }
3178
3179 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3180 {
3181 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
3182 that their operand is not an SSA name or an invariant when
3183 requiring an lvalue (this usually means there is a SRA or IPA-SRA
3184 bug). Otherwise there is nothing to verify, gross mismatches at
3185 most invoke undefined behavior. */
3186 if (require_lvalue
3187 && (TREE_CODE (op) == SSA_NAME
3188 || is_gimple_min_invariant (op)))
3189 {
3190 error ("conversion of %qs on the left hand side of %qs",
3191 get_tree_code_name (TREE_CODE (op)), code_name);
3192 debug_generic_stmt (expr);
3193 return true;
3194 }
3195 else if (TREE_CODE (op) == SSA_NAME
3196 && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
3197 {
3198 error ("conversion of register to a different size in %qs",
3199 code_name);
3200 debug_generic_stmt (expr);
3201 return true;
3202 }
3203 else if (!handled_component_p (op))
3204 return false;
3205 }
3206
3207 expr = op;
3208 }
3209
3210 code_name = get_tree_code_name (TREE_CODE (expr));
3211
3212 if (TREE_CODE (expr) == MEM_REF)
3213 {
3214 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0))
3215 || (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR
3216 && verify_address (TREE_OPERAND (expr, 0), false)))
3217 {
3218 error ("invalid address operand in %qs", code_name);
3219 debug_generic_stmt (expr);
3220 return true;
3221 }
3222 if (!poly_int_tree_p (TREE_OPERAND (expr, 1))
3223 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
3224 {
3225 error ("invalid offset operand in %qs", code_name);
3226 debug_generic_stmt (expr);
3227 return true;
3228 }
3229 }
3230 else if (TREE_CODE (expr) == TARGET_MEM_REF)
3231 {
3232 if (!TMR_BASE (expr)
3233 || !is_gimple_mem_ref_addr (TMR_BASE (expr))
3234 || (TREE_CODE (TMR_BASE (expr)) == ADDR_EXPR
3235 && verify_address (TMR_BASE (expr), false)))
3236 {
3237 error ("invalid address operand in %qs", code_name);
3238 return true;
3239 }
3240 if (!TMR_OFFSET (expr)
3241 || !poly_int_tree_p (TMR_OFFSET (expr))
3242 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
3243 {
3244 error ("invalid offset operand in %qs", code_name);
3245 debug_generic_stmt (expr);
3246 return true;
3247 }
3248 }
3249 else if (TREE_CODE (expr) == INDIRECT_REF)
3250 {
3251 error ("%qs in gimple IL", code_name);
3252 debug_generic_stmt (expr);
3253 return true;
3254 }
3255
3256 return ((require_lvalue || !is_gimple_min_invariant (expr))
3257 && verify_types_in_gimple_min_lval (expr));
3258 }
3259
3260 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3261 list of pointer-to types that is trivially convertible to DEST. */
3262
3263 static bool
3264 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3265 {
3266 tree src;
3267
3268 if (!TYPE_POINTER_TO (src_obj))
3269 return true;
3270
3271 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3272 if (useless_type_conversion_p (dest, src))
3273 return true;
3274
3275 return false;
3276 }
3277
3278 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3279 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3280
3281 static bool
3282 valid_fixed_convert_types_p (tree type1, tree type2)
3283 {
3284 return (FIXED_POINT_TYPE_P (type1)
3285 && (INTEGRAL_TYPE_P (type2)
3286 || SCALAR_FLOAT_TYPE_P (type2)
3287 || FIXED_POINT_TYPE_P (type2)));
3288 }
3289
3290 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3291 is a problem, otherwise false. */
3292
3293 static bool
3294 verify_gimple_call (gcall *stmt)
3295 {
3296 tree fn = gimple_call_fn (stmt);
3297 tree fntype, fndecl;
3298 unsigned i;
3299
3300 if (gimple_call_internal_p (stmt))
3301 {
3302 if (fn)
3303 {
3304 error ("gimple call has two targets");
3305 debug_generic_stmt (fn);
3306 return true;
3307 }
3308 }
3309 else
3310 {
3311 if (!fn)
3312 {
3313 error ("gimple call has no target");
3314 return true;
3315 }
3316 }
3317
3318 if (fn && !is_gimple_call_addr (fn))
3319 {
3320 error ("invalid function in gimple call");
3321 debug_generic_stmt (fn);
3322 return true;
3323 }
3324
3325 if (fn
3326 && (!POINTER_TYPE_P (TREE_TYPE (fn))
3327 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3328 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
3329 {
3330 error ("non-function in gimple call");
3331 return true;
3332 }
3333
3334 fndecl = gimple_call_fndecl (stmt);
3335 if (fndecl
3336 && TREE_CODE (fndecl) == FUNCTION_DECL
3337 && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
3338 && !DECL_PURE_P (fndecl)
3339 && !TREE_READONLY (fndecl))
3340 {
3341 error ("invalid pure const state for function");
3342 return true;
3343 }
3344
3345 tree lhs = gimple_call_lhs (stmt);
3346 if (lhs
3347 && (!is_gimple_lvalue (lhs)
3348 || verify_types_in_gimple_reference (lhs, true)))
3349 {
3350 error ("invalid LHS in gimple call");
3351 return true;
3352 }
3353
3354 if (gimple_call_ctrl_altering_p (stmt)
3355 && gimple_call_noreturn_p (stmt)
3356 && should_remove_lhs_p (lhs))
3357 {
3358 error ("LHS in %<noreturn%> call");
3359 return true;
3360 }
3361
3362 fntype = gimple_call_fntype (stmt);
3363 if (fntype
3364 && lhs
3365 && !useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (fntype))
3366 /* ??? At least C++ misses conversions at assignments from
3367 void * call results.
3368 For now simply allow arbitrary pointer type conversions. */
3369 && !(POINTER_TYPE_P (TREE_TYPE (lhs))
3370 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3371 {
3372 error ("invalid conversion in gimple call");
3373 debug_generic_stmt (TREE_TYPE (lhs));
3374 debug_generic_stmt (TREE_TYPE (fntype));
3375 return true;
3376 }
3377
3378 if (gimple_call_chain (stmt)
3379 && !is_gimple_val (gimple_call_chain (stmt)))
3380 {
3381 error ("invalid static chain in gimple call");
3382 debug_generic_stmt (gimple_call_chain (stmt));
3383 return true;
3384 }
3385
3386 /* If there is a static chain argument, the call should either be
3387 indirect, or the decl should have DECL_STATIC_CHAIN set. */
3388 if (gimple_call_chain (stmt)
3389 && fndecl
3390 && !DECL_STATIC_CHAIN (fndecl))
3391 {
3392 error ("static chain with function that doesn%'t use one");
3393 return true;
3394 }
3395
3396 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
3397 {
3398 switch (DECL_FUNCTION_CODE (fndecl))
3399 {
3400 case BUILT_IN_UNREACHABLE:
3401 case BUILT_IN_TRAP:
3402 if (gimple_call_num_args (stmt) > 0)
3403 {
3404 /* Built-in unreachable with parameters might not be caught by
3405 undefined behavior sanitizer. Front-ends do check users do not
3406 call them that way but we also produce calls to
3407 __builtin_unreachable internally, for example when IPA figures
3408 out a call cannot happen in a legal program. In such cases,
3409 we must make sure arguments are stripped off. */
3410 error ("%<__builtin_unreachable%> or %<__builtin_trap%> call "
3411 "with arguments");
3412 return true;
3413 }
3414 break;
3415 default:
3416 break;
3417 }
3418 }
3419
3420 /* ??? The C frontend passes unpromoted arguments in case it
3421 didn't see a function declaration before the call. So for now
3422 leave the call arguments mostly unverified. Once we gimplify
3423 unit-at-a-time we have a chance to fix this. */
3424
3425 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3426 {
3427 tree arg = gimple_call_arg (stmt, i);
3428 if ((is_gimple_reg_type (TREE_TYPE (arg))
3429 && !is_gimple_val (arg))
3430 || (!is_gimple_reg_type (TREE_TYPE (arg))
3431 && !is_gimple_lvalue (arg)))
3432 {
3433 error ("invalid argument to gimple call");
3434 debug_generic_expr (arg);
3435 return true;
3436 }
3437 }
3438
3439 return false;
3440 }
3441
3442 /* Verifies the gimple comparison with the result type TYPE and
3443 the operands OP0 and OP1, comparison code is CODE. */
3444
3445 static bool
3446 verify_gimple_comparison (tree type, tree op0, tree op1, enum tree_code code)
3447 {
3448 tree op0_type = TREE_TYPE (op0);
3449 tree op1_type = TREE_TYPE (op1);
3450
3451 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3452 {
3453 error ("invalid operands in gimple comparison");
3454 return true;
3455 }
3456
3457 /* For comparisons we do not have the operations type as the
3458 effective type the comparison is carried out in. Instead
3459 we require that either the first operand is trivially
3460 convertible into the second, or the other way around.
3461 Because we special-case pointers to void we allow
3462 comparisons of pointers with the same mode as well. */
3463 if (!useless_type_conversion_p (op0_type, op1_type)
3464 && !useless_type_conversion_p (op1_type, op0_type)
3465 && (!POINTER_TYPE_P (op0_type)
3466 || !POINTER_TYPE_P (op1_type)
3467 || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3468 {
3469 error ("mismatching comparison operand types");
3470 debug_generic_expr (op0_type);
3471 debug_generic_expr (op1_type);
3472 return true;
3473 }
3474
3475 /* The resulting type of a comparison may be an effective boolean type. */
3476 if (INTEGRAL_TYPE_P (type)
3477 && (TREE_CODE (type) == BOOLEAN_TYPE
3478 || TYPE_PRECISION (type) == 1))
3479 {
3480 if ((TREE_CODE (op0_type) == VECTOR_TYPE
3481 || TREE_CODE (op1_type) == VECTOR_TYPE)
3482 && code != EQ_EXPR && code != NE_EXPR
3483 && !VECTOR_BOOLEAN_TYPE_P (op0_type)
3484 && !VECTOR_INTEGER_TYPE_P (op0_type))
3485 {
3486 error ("unsupported operation or type for vector comparison"
3487 " returning a boolean");
3488 debug_generic_expr (op0_type);
3489 debug_generic_expr (op1_type);
3490 return true;
3491 }
3492 }
3493 /* Or a boolean vector type with the same element count
3494 as the comparison operand types. */
3495 else if (TREE_CODE (type) == VECTOR_TYPE
3496 && TREE_CODE (TREE_TYPE (type)) == BOOLEAN_TYPE)
3497 {
3498 if (TREE_CODE (op0_type) != VECTOR_TYPE
3499 || TREE_CODE (op1_type) != VECTOR_TYPE)
3500 {
3501 error ("non-vector operands in vector comparison");
3502 debug_generic_expr (op0_type);
3503 debug_generic_expr (op1_type);
3504 return true;
3505 }
3506
3507 if (maybe_ne (TYPE_VECTOR_SUBPARTS (type),
3508 TYPE_VECTOR_SUBPARTS (op0_type)))
3509 {
3510 error ("invalid vector comparison resulting type");
3511 debug_generic_expr (type);
3512 return true;
3513 }
3514 }
3515 else
3516 {
3517 error ("bogus comparison result type");
3518 debug_generic_expr (type);
3519 return true;
3520 }
3521
3522 return false;
3523 }
3524
3525 /* Verify a gimple assignment statement STMT with an unary rhs.
3526 Returns true if anything is wrong. */
3527
3528 static bool
3529 verify_gimple_assign_unary (gassign *stmt)
3530 {
3531 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3532 tree lhs = gimple_assign_lhs (stmt);
3533 tree lhs_type = TREE_TYPE (lhs);
3534 tree rhs1 = gimple_assign_rhs1 (stmt);
3535 tree rhs1_type = TREE_TYPE (rhs1);
3536
3537 if (!is_gimple_reg (lhs))
3538 {
3539 error ("non-register as LHS of unary operation");
3540 return true;
3541 }
3542
3543 if (!is_gimple_val (rhs1))
3544 {
3545 error ("invalid operand in unary operation");
3546 return true;
3547 }
3548
3549 const char* const code_name = get_tree_code_name (rhs_code);
3550
3551 /* First handle conversions. */
3552 switch (rhs_code)
3553 {
3554 CASE_CONVERT:
3555 {
3556 /* Allow conversions from pointer type to integral type only if
3557 there is no sign or zero extension involved.
3558 For targets were the precision of ptrofftype doesn't match that
3559 of pointers we need to allow arbitrary conversions to ptrofftype. */
3560 if ((POINTER_TYPE_P (lhs_type)
3561 && INTEGRAL_TYPE_P (rhs1_type))
3562 || (POINTER_TYPE_P (rhs1_type)
3563 && INTEGRAL_TYPE_P (lhs_type)
3564 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3565 || ptrofftype_p (lhs_type))))
3566 return false;
3567
3568 /* Allow conversion from integral to offset type and vice versa. */
3569 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3570 && INTEGRAL_TYPE_P (rhs1_type))
3571 || (INTEGRAL_TYPE_P (lhs_type)
3572 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3573 return false;
3574
3575 /* Otherwise assert we are converting between types of the
3576 same kind. */
3577 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3578 {
3579 error ("invalid types in nop conversion");
3580 debug_generic_expr (lhs_type);
3581 debug_generic_expr (rhs1_type);
3582 return true;
3583 }
3584
3585 return false;
3586 }
3587
3588 case ADDR_SPACE_CONVERT_EXPR:
3589 {
3590 if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3591 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3592 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3593 {
3594 error ("invalid types in address space conversion");
3595 debug_generic_expr (lhs_type);
3596 debug_generic_expr (rhs1_type);
3597 return true;
3598 }
3599
3600 return false;
3601 }
3602
3603 case FIXED_CONVERT_EXPR:
3604 {
3605 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3606 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3607 {
3608 error ("invalid types in fixed-point conversion");
3609 debug_generic_expr (lhs_type);
3610 debug_generic_expr (rhs1_type);
3611 return true;
3612 }
3613
3614 return false;
3615 }
3616
3617 case FLOAT_EXPR:
3618 {
3619 if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3620 && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3621 || !VECTOR_FLOAT_TYPE_P (lhs_type)))
3622 {
3623 error ("invalid types in conversion to floating-point");
3624 debug_generic_expr (lhs_type);
3625 debug_generic_expr (rhs1_type);
3626 return true;
3627 }
3628
3629 return false;
3630 }
3631
3632 case FIX_TRUNC_EXPR:
3633 {
3634 if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3635 && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3636 || !VECTOR_FLOAT_TYPE_P (rhs1_type)))
3637 {
3638 error ("invalid types in conversion to integer");
3639 debug_generic_expr (lhs_type);
3640 debug_generic_expr (rhs1_type);
3641 return true;
3642 }
3643
3644 return false;
3645 }
3646
3647 case VEC_UNPACK_HI_EXPR:
3648 case VEC_UNPACK_LO_EXPR:
3649 case VEC_UNPACK_FLOAT_HI_EXPR:
3650 case VEC_UNPACK_FLOAT_LO_EXPR:
3651 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
3652 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
3653 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3654 || TREE_CODE (lhs_type) != VECTOR_TYPE
3655 || (!INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3656 && !SCALAR_FLOAT_TYPE_P (TREE_TYPE (lhs_type)))
3657 || (!INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3658 && !SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type)))
3659 || ((rhs_code == VEC_UNPACK_HI_EXPR
3660 || rhs_code == VEC_UNPACK_LO_EXPR)
3661 && (INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3662 != INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3663 || ((rhs_code == VEC_UNPACK_FLOAT_HI_EXPR
3664 || rhs_code == VEC_UNPACK_FLOAT_LO_EXPR)
3665 && (INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3666 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type))))
3667 || ((rhs_code == VEC_UNPACK_FIX_TRUNC_HI_EXPR
3668 || rhs_code == VEC_UNPACK_FIX_TRUNC_LO_EXPR)
3669 && (INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3670 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (lhs_type))))
3671 || (maybe_ne (GET_MODE_SIZE (element_mode (lhs_type)),
3672 2 * GET_MODE_SIZE (element_mode (rhs1_type)))
3673 && (!VECTOR_BOOLEAN_TYPE_P (lhs_type)
3674 || !VECTOR_BOOLEAN_TYPE_P (rhs1_type)))
3675 || maybe_ne (2 * TYPE_VECTOR_SUBPARTS (lhs_type),
3676 TYPE_VECTOR_SUBPARTS (rhs1_type)))
3677 {
3678 error ("type mismatch in %qs expression", code_name);
3679 debug_generic_expr (lhs_type);
3680 debug_generic_expr (rhs1_type);
3681 return true;
3682 }
3683
3684 return false;
3685
3686 case NEGATE_EXPR:
3687 case ABS_EXPR:
3688 case BIT_NOT_EXPR:
3689 case PAREN_EXPR:
3690 case CONJ_EXPR:
3691 break;
3692
3693 case ABSU_EXPR:
3694 if (!ANY_INTEGRAL_TYPE_P (lhs_type)
3695 || !TYPE_UNSIGNED (lhs_type)
3696 || !ANY_INTEGRAL_TYPE_P (rhs1_type)
3697 || TYPE_UNSIGNED (rhs1_type)
3698 || element_precision (lhs_type) != element_precision (rhs1_type))
3699 {
3700 error ("invalid types for %qs", code_name);
3701 debug_generic_expr (lhs_type);
3702 debug_generic_expr (rhs1_type);
3703 return true;
3704 }
3705 return false;
3706
3707 case VEC_DUPLICATE_EXPR:
3708 if (TREE_CODE (lhs_type) != VECTOR_TYPE
3709 || !useless_type_conversion_p (TREE_TYPE (lhs_type), rhs1_type))
3710 {
3711 error ("%qs should be from a scalar to a like vector", code_name);
3712 debug_generic_expr (lhs_type);
3713 debug_generic_expr (rhs1_type);
3714 return true;
3715 }
3716 return false;
3717
3718 default:
3719 gcc_unreachable ();
3720 }
3721
3722 /* For the remaining codes assert there is no conversion involved. */
3723 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3724 {
3725 error ("non-trivial conversion in unary operation");
3726 debug_generic_expr (lhs_type);
3727 debug_generic_expr (rhs1_type);
3728 return true;
3729 }
3730
3731 return false;
3732 }
3733
3734 /* Verify a gimple assignment statement STMT with a binary rhs.
3735 Returns true if anything is wrong. */
3736
3737 static bool
3738 verify_gimple_assign_binary (gassign *stmt)
3739 {
3740 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3741 tree lhs = gimple_assign_lhs (stmt);
3742 tree lhs_type = TREE_TYPE (lhs);
3743 tree rhs1 = gimple_assign_rhs1 (stmt);
3744 tree rhs1_type = TREE_TYPE (rhs1);
3745 tree rhs2 = gimple_assign_rhs2 (stmt);
3746 tree rhs2_type = TREE_TYPE (rhs2);
3747
3748 if (!is_gimple_reg (lhs))
3749 {
3750 error ("non-register as LHS of binary operation");
3751 return true;
3752 }
3753
3754 if (!is_gimple_val (rhs1)
3755 || !is_gimple_val (rhs2))
3756 {
3757 error ("invalid operands in binary operation");
3758 return true;
3759 }
3760
3761 const char* const code_name = get_tree_code_name (rhs_code);
3762
3763 /* First handle operations that involve different types. */
3764 switch (rhs_code)
3765 {
3766 case COMPLEX_EXPR:
3767 {
3768 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3769 || !(INTEGRAL_TYPE_P (rhs1_type)
3770 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3771 || !(INTEGRAL_TYPE_P (rhs2_type)
3772 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3773 {
3774 error ("type mismatch in %qs", code_name);
3775 debug_generic_expr (lhs_type);
3776 debug_generic_expr (rhs1_type);
3777 debug_generic_expr (rhs2_type);
3778 return true;
3779 }
3780
3781 return false;
3782 }
3783
3784 case LSHIFT_EXPR:
3785 case RSHIFT_EXPR:
3786 case LROTATE_EXPR:
3787 case RROTATE_EXPR:
3788 {
3789 /* Shifts and rotates are ok on integral types, fixed point
3790 types and integer vector types. */
3791 if ((!INTEGRAL_TYPE_P (rhs1_type)
3792 && !FIXED_POINT_TYPE_P (rhs1_type)
3793 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3794 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3795 || (!INTEGRAL_TYPE_P (rhs2_type)
3796 /* Vector shifts of vectors are also ok. */
3797 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3798 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3799 && TREE_CODE (rhs2_type) == VECTOR_TYPE
3800 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3801 || !useless_type_conversion_p (lhs_type, rhs1_type))
3802 {
3803 error ("type mismatch in %qs", code_name);
3804 debug_generic_expr (lhs_type);
3805 debug_generic_expr (rhs1_type);
3806 debug_generic_expr (rhs2_type);
3807 return true;
3808 }
3809
3810 return false;
3811 }
3812
3813 case WIDEN_LSHIFT_EXPR:
3814 {
3815 if (!INTEGRAL_TYPE_P (lhs_type)
3816 || !INTEGRAL_TYPE_P (rhs1_type)
3817 || TREE_CODE (rhs2) != INTEGER_CST
3818 || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
3819 {
3820 error ("type mismatch in %qs", code_name);
3821 debug_generic_expr (lhs_type);
3822 debug_generic_expr (rhs1_type);
3823 debug_generic_expr (rhs2_type);
3824 return true;
3825 }
3826
3827 return false;
3828 }
3829
3830 case VEC_WIDEN_LSHIFT_HI_EXPR:
3831 case VEC_WIDEN_LSHIFT_LO_EXPR:
3832 {
3833 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3834 || TREE_CODE (lhs_type) != VECTOR_TYPE
3835 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3836 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3837 || TREE_CODE (rhs2) != INTEGER_CST
3838 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
3839 > TYPE_PRECISION (TREE_TYPE (lhs_type))))
3840 {
3841 error ("type mismatch in %qs", code_name);
3842 debug_generic_expr (lhs_type);
3843 debug_generic_expr (rhs1_type);
3844 debug_generic_expr (rhs2_type);
3845 return true;
3846 }
3847
3848 return false;
3849 }
3850
3851 case PLUS_EXPR:
3852 case MINUS_EXPR:
3853 {
3854 tree lhs_etype = lhs_type;
3855 tree rhs1_etype = rhs1_type;
3856 tree rhs2_etype = rhs2_type;
3857 if (TREE_CODE (lhs_type) == VECTOR_TYPE)
3858 {
3859 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3860 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
3861 {
3862 error ("invalid non-vector operands to %qs", code_name);
3863 return true;
3864 }
3865 lhs_etype = TREE_TYPE (lhs_type);
3866 rhs1_etype = TREE_TYPE (rhs1_type);
3867 rhs2_etype = TREE_TYPE (rhs2_type);
3868 }
3869 if (POINTER_TYPE_P (lhs_etype)
3870 || POINTER_TYPE_P (rhs1_etype)
3871 || POINTER_TYPE_P (rhs2_etype))
3872 {
3873 error ("invalid (pointer) operands %qs", code_name);
3874 return true;
3875 }
3876
3877 /* Continue with generic binary expression handling. */
3878 break;
3879 }
3880
3881 case POINTER_PLUS_EXPR:
3882 {
3883 if (!POINTER_TYPE_P (rhs1_type)
3884 || !useless_type_conversion_p (lhs_type, rhs1_type)
3885 || !ptrofftype_p (rhs2_type))
3886 {
3887 error ("type mismatch in %qs", code_name);
3888 debug_generic_stmt (lhs_type);
3889 debug_generic_stmt (rhs1_type);
3890 debug_generic_stmt (rhs2_type);
3891 return true;
3892 }
3893
3894 return false;
3895 }
3896
3897 case POINTER_DIFF_EXPR:
3898 {
3899 if (!POINTER_TYPE_P (rhs1_type)
3900 || !POINTER_TYPE_P (rhs2_type)
3901 /* Because we special-case pointers to void we allow difference
3902 of arbitrary pointers with the same mode. */
3903 || TYPE_MODE (rhs1_type) != TYPE_MODE (rhs2_type)
3904 || TREE_CODE (lhs_type) != INTEGER_TYPE
3905 || TYPE_UNSIGNED (lhs_type)
3906 || TYPE_PRECISION (lhs_type) != TYPE_PRECISION (rhs1_type))
3907 {
3908 error ("type mismatch in %qs", code_name);
3909 debug_generic_stmt (lhs_type);
3910 debug_generic_stmt (rhs1_type);
3911 debug_generic_stmt (rhs2_type);
3912 return true;
3913 }
3914
3915 return false;
3916 }
3917
3918 case TRUTH_ANDIF_EXPR:
3919 case TRUTH_ORIF_EXPR:
3920 case TRUTH_AND_EXPR:
3921 case TRUTH_OR_EXPR:
3922 case TRUTH_XOR_EXPR:
3923
3924 gcc_unreachable ();
3925
3926 case LT_EXPR:
3927 case LE_EXPR:
3928 case GT_EXPR:
3929 case GE_EXPR:
3930 case EQ_EXPR:
3931 case NE_EXPR:
3932 case UNORDERED_EXPR:
3933 case ORDERED_EXPR:
3934 case UNLT_EXPR:
3935 case UNLE_EXPR:
3936 case UNGT_EXPR:
3937 case UNGE_EXPR:
3938 case UNEQ_EXPR:
3939 case LTGT_EXPR:
3940 /* Comparisons are also binary, but the result type is not
3941 connected to the operand types. */
3942 return verify_gimple_comparison (lhs_type, rhs1, rhs2, rhs_code);
3943
3944 case WIDEN_MULT_EXPR:
3945 if (TREE_CODE (lhs_type) != INTEGER_TYPE)
3946 return true;
3947 return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
3948 || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
3949
3950 case WIDEN_SUM_EXPR:
3951 {
3952 if (((TREE_CODE (rhs1_type) != VECTOR_TYPE
3953 || TREE_CODE (lhs_type) != VECTOR_TYPE)
3954 && ((!INTEGRAL_TYPE_P (rhs1_type)
3955 && !SCALAR_FLOAT_TYPE_P (rhs1_type))
3956 || (!INTEGRAL_TYPE_P (lhs_type)
3957 && !SCALAR_FLOAT_TYPE_P (lhs_type))))
3958 || !useless_type_conversion_p (lhs_type, rhs2_type)
3959 || maybe_lt (GET_MODE_SIZE (element_mode (rhs2_type)),
3960 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
3961 {
3962 error ("type mismatch in %qs", code_name);
3963 debug_generic_expr (lhs_type);
3964 debug_generic_expr (rhs1_type);
3965 debug_generic_expr (rhs2_type);
3966 return true;
3967 }
3968 return false;
3969 }
3970
3971 case VEC_WIDEN_MULT_HI_EXPR:
3972 case VEC_WIDEN_MULT_LO_EXPR:
3973 case VEC_WIDEN_MULT_EVEN_EXPR:
3974 case VEC_WIDEN_MULT_ODD_EXPR:
3975 {
3976 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3977 || TREE_CODE (lhs_type) != VECTOR_TYPE
3978 || !types_compatible_p (rhs1_type, rhs2_type)
3979 || maybe_ne (GET_MODE_SIZE (element_mode (lhs_type)),
3980 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
3981 {
3982 error ("type mismatch in %qs", code_name);
3983 debug_generic_expr (lhs_type);
3984 debug_generic_expr (rhs1_type);
3985 debug_generic_expr (rhs2_type);
3986 return true;
3987 }
3988 return false;
3989 }
3990
3991 case VEC_PACK_TRUNC_EXPR:
3992 /* ??? We currently use VEC_PACK_TRUNC_EXPR to simply concat
3993 vector boolean types. */
3994 if (VECTOR_BOOLEAN_TYPE_P (lhs_type)
3995 && VECTOR_BOOLEAN_TYPE_P (rhs1_type)
3996 && types_compatible_p (rhs1_type, rhs2_type)
3997 && known_eq (TYPE_VECTOR_SUBPARTS (lhs_type),
3998 2 * TYPE_VECTOR_SUBPARTS (rhs1_type)))
3999 return false;
4000
4001 /* Fallthru. */
4002 case VEC_PACK_SAT_EXPR:
4003 case VEC_PACK_FIX_TRUNC_EXPR:
4004 {
4005 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4006 || TREE_CODE (lhs_type) != VECTOR_TYPE
4007 || !((rhs_code == VEC_PACK_FIX_TRUNC_EXPR
4008 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type))
4009 && INTEGRAL_TYPE_P (TREE_TYPE (lhs_type)))
4010 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
4011 == INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))))
4012 || !types_compatible_p (rhs1_type, rhs2_type)
4013 || maybe_ne (GET_MODE_SIZE (element_mode (rhs1_type)),
4014 2 * GET_MODE_SIZE (element_mode (lhs_type)))
4015 || maybe_ne (2 * TYPE_VECTOR_SUBPARTS (rhs1_type),
4016 TYPE_VECTOR_SUBPARTS (lhs_type)))
4017 {
4018 error ("type mismatch in %qs", code_name);
4019 debug_generic_expr (lhs_type);
4020 debug_generic_expr (rhs1_type);
4021 debug_generic_expr (rhs2_type);
4022 return true;
4023 }
4024
4025 return false;
4026 }
4027
4028 case VEC_PACK_FLOAT_EXPR:
4029 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4030 || TREE_CODE (lhs_type) != VECTOR_TYPE
4031 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
4032 || !SCALAR_FLOAT_TYPE_P (TREE_TYPE (lhs_type))
4033 || !types_compatible_p (rhs1_type, rhs2_type)
4034 || maybe_ne (GET_MODE_SIZE (element_mode (rhs1_type)),
4035 2 * GET_MODE_SIZE (element_mode (lhs_type)))
4036 || maybe_ne (2 * TYPE_VECTOR_SUBPARTS (rhs1_type),
4037 TYPE_VECTOR_SUBPARTS (lhs_type)))
4038 {
4039 error ("type mismatch in %qs", code_name);
4040 debug_generic_expr (lhs_type);
4041 debug_generic_expr (rhs1_type);
4042 debug_generic_expr (rhs2_type);
4043 return true;
4044 }
4045
4046 return false;
4047
4048 case MULT_EXPR:
4049 case MULT_HIGHPART_EXPR:
4050 case TRUNC_DIV_EXPR:
4051 case CEIL_DIV_EXPR:
4052 case FLOOR_DIV_EXPR:
4053 case ROUND_DIV_EXPR:
4054 case TRUNC_MOD_EXPR:
4055 case CEIL_MOD_EXPR:
4056 case FLOOR_MOD_EXPR:
4057 case ROUND_MOD_EXPR:
4058 case RDIV_EXPR:
4059 case EXACT_DIV_EXPR:
4060 case MIN_EXPR:
4061 case MAX_EXPR:
4062 case BIT_IOR_EXPR:
4063 case BIT_XOR_EXPR:
4064 case BIT_AND_EXPR:
4065 /* Continue with generic binary expression handling. */
4066 break;
4067
4068 case VEC_SERIES_EXPR:
4069 if (!useless_type_conversion_p (rhs1_type, rhs2_type))
4070 {
4071 error ("type mismatch in %qs", code_name);
4072 debug_generic_expr (rhs1_type);
4073 debug_generic_expr (rhs2_type);
4074 return true;
4075 }
4076 if (TREE_CODE (lhs_type) != VECTOR_TYPE
4077 || !useless_type_conversion_p (TREE_TYPE (lhs_type), rhs1_type))
4078 {
4079 error ("vector type expected in %qs", code_name);
4080 debug_generic_expr (lhs_type);
4081 return true;
4082 }
4083 return false;
4084
4085 default:
4086 gcc_unreachable ();
4087 }
4088
4089 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4090 || !useless_type_conversion_p (lhs_type, rhs2_type))
4091 {
4092 error ("type mismatch in binary expression");
4093 debug_generic_stmt (lhs_type);
4094 debug_generic_stmt (rhs1_type);
4095 debug_generic_stmt (rhs2_type);
4096 return true;
4097 }
4098
4099 return false;
4100 }
4101
4102 /* Verify a gimple assignment statement STMT with a ternary rhs.
4103 Returns true if anything is wrong. */
4104
4105 static bool
4106 verify_gimple_assign_ternary (gassign *stmt)
4107 {
4108 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4109 tree lhs = gimple_assign_lhs (stmt);
4110 tree lhs_type = TREE_TYPE (lhs);
4111 tree rhs1 = gimple_assign_rhs1 (stmt);
4112 tree rhs1_type = TREE_TYPE (rhs1);
4113 tree rhs2 = gimple_assign_rhs2 (stmt);
4114 tree rhs2_type = TREE_TYPE (rhs2);
4115 tree rhs3 = gimple_assign_rhs3 (stmt);
4116 tree rhs3_type = TREE_TYPE (rhs3);
4117
4118 if (!is_gimple_reg (lhs))
4119 {
4120 error ("non-register as LHS of ternary operation");
4121 return true;
4122 }
4123
4124 if (((rhs_code == VEC_COND_EXPR || rhs_code == COND_EXPR)
4125 ? !is_gimple_condexpr (rhs1) : !is_gimple_val (rhs1))
4126 || !is_gimple_val (rhs2)
4127 || !is_gimple_val (rhs3))
4128 {
4129 error ("invalid operands in ternary operation");
4130 return true;
4131 }
4132
4133 const char* const code_name = get_tree_code_name (rhs_code);
4134
4135 /* First handle operations that involve different types. */
4136 switch (rhs_code)
4137 {
4138 case WIDEN_MULT_PLUS_EXPR:
4139 case WIDEN_MULT_MINUS_EXPR:
4140 if ((!INTEGRAL_TYPE_P (rhs1_type)
4141 && !FIXED_POINT_TYPE_P (rhs1_type))
4142 || !useless_type_conversion_p (rhs1_type, rhs2_type)
4143 || !useless_type_conversion_p (lhs_type, rhs3_type)
4144 || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
4145 || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
4146 {
4147 error ("type mismatch in %qs", code_name);
4148 debug_generic_expr (lhs_type);
4149 debug_generic_expr (rhs1_type);
4150 debug_generic_expr (rhs2_type);
4151 debug_generic_expr (rhs3_type);
4152 return true;
4153 }
4154 break;
4155
4156 case VEC_COND_EXPR:
4157 if (!VECTOR_BOOLEAN_TYPE_P (rhs1_type)
4158 || maybe_ne (TYPE_VECTOR_SUBPARTS (rhs1_type),
4159 TYPE_VECTOR_SUBPARTS (lhs_type)))
4160 {
4161 error ("the first argument of a %qs must be of a "
4162 "boolean vector type of the same number of elements "
4163 "as the result", code_name);
4164 debug_generic_expr (lhs_type);
4165 debug_generic_expr (rhs1_type);
4166 return true;
4167 }
4168 /* Fallthrough. */
4169 case COND_EXPR:
4170 if (!is_gimple_val (rhs1)
4171 && verify_gimple_comparison (TREE_TYPE (rhs1),
4172 TREE_OPERAND (rhs1, 0),
4173 TREE_OPERAND (rhs1, 1),
4174 TREE_CODE (rhs1)))
4175 return true;
4176 if (!useless_type_conversion_p (lhs_type, rhs2_type)
4177 || !useless_type_conversion_p (lhs_type, rhs3_type))
4178 {
4179 error ("type mismatch in %qs", code_name);
4180 debug_generic_expr (lhs_type);
4181 debug_generic_expr (rhs2_type);
4182 debug_generic_expr (rhs3_type);
4183 return true;
4184 }
4185 break;
4186
4187 case VEC_PERM_EXPR:
4188 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4189 || !useless_type_conversion_p (lhs_type, rhs2_type))
4190 {
4191 error ("type mismatch in %qs", code_name);
4192 debug_generic_expr (lhs_type);
4193 debug_generic_expr (rhs1_type);
4194 debug_generic_expr (rhs2_type);
4195 debug_generic_expr (rhs3_type);
4196 return true;
4197 }
4198
4199 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4200 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4201 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4202 {
4203 error ("vector types expected in %qs", code_name);
4204 debug_generic_expr (lhs_type);
4205 debug_generic_expr (rhs1_type);
4206 debug_generic_expr (rhs2_type);
4207 debug_generic_expr (rhs3_type);
4208 return true;
4209 }
4210
4211 if (maybe_ne (TYPE_VECTOR_SUBPARTS (rhs1_type),
4212 TYPE_VECTOR_SUBPARTS (rhs2_type))
4213 || maybe_ne (TYPE_VECTOR_SUBPARTS (rhs2_type),
4214 TYPE_VECTOR_SUBPARTS (rhs3_type))
4215 || maybe_ne (TYPE_VECTOR_SUBPARTS (rhs3_type),
4216 TYPE_VECTOR_SUBPARTS (lhs_type)))
4217 {
4218 error ("vectors with different element number found in %qs",
4219 code_name);
4220 debug_generic_expr (lhs_type);
4221 debug_generic_expr (rhs1_type);
4222 debug_generic_expr (rhs2_type);
4223 debug_generic_expr (rhs3_type);
4224 return true;
4225 }
4226
4227 if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
4228 || (TREE_CODE (rhs3) != VECTOR_CST
4229 && (GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE
4230 (TREE_TYPE (rhs3_type)))
4231 != GET_MODE_BITSIZE (SCALAR_TYPE_MODE
4232 (TREE_TYPE (rhs1_type))))))
4233 {
4234 error ("invalid mask type in %qs", code_name);
4235 debug_generic_expr (lhs_type);
4236 debug_generic_expr (rhs1_type);
4237 debug_generic_expr (rhs2_type);
4238 debug_generic_expr (rhs3_type);
4239 return true;
4240 }
4241
4242 return false;
4243
4244 case SAD_EXPR:
4245 if (!useless_type_conversion_p (rhs1_type, rhs2_type)
4246 || !useless_type_conversion_p (lhs_type, rhs3_type)
4247 || 2 * GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type)))
4248 > GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (lhs_type))))
4249 {
4250 error ("type mismatch in %qs", code_name);
4251 debug_generic_expr (lhs_type);
4252 debug_generic_expr (rhs1_type);
4253 debug_generic_expr (rhs2_type);
4254 debug_generic_expr (rhs3_type);
4255 return true;
4256 }
4257
4258 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4259 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4260 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4261 {
4262 error ("vector types expected in %qs", code_name);
4263 debug_generic_expr (lhs_type);
4264 debug_generic_expr (rhs1_type);
4265 debug_generic_expr (rhs2_type);
4266 debug_generic_expr (rhs3_type);
4267 return true;
4268 }
4269
4270 return false;
4271
4272 case BIT_INSERT_EXPR:
4273 if (! useless_type_conversion_p (lhs_type, rhs1_type))
4274 {
4275 error ("type mismatch in %qs", code_name);
4276 debug_generic_expr (lhs_type);
4277 debug_generic_expr (rhs1_type);
4278 return true;
4279 }
4280 if (! ((INTEGRAL_TYPE_P (rhs1_type)
4281 && INTEGRAL_TYPE_P (rhs2_type))
4282 /* Vector element insert. */
4283 || (VECTOR_TYPE_P (rhs1_type)
4284 && types_compatible_p (TREE_TYPE (rhs1_type), rhs2_type))
4285 /* Aligned sub-vector insert. */
4286 || (VECTOR_TYPE_P (rhs1_type)
4287 && VECTOR_TYPE_P (rhs2_type)
4288 && types_compatible_p (TREE_TYPE (rhs1_type),
4289 TREE_TYPE (rhs2_type))
4290 && multiple_p (TYPE_VECTOR_SUBPARTS (rhs1_type),
4291 TYPE_VECTOR_SUBPARTS (rhs2_type))
4292 && multiple_of_p (bitsizetype, rhs3, TYPE_SIZE (rhs2_type)))))
4293 {
4294 error ("not allowed type combination in %qs", code_name);
4295 debug_generic_expr (rhs1_type);
4296 debug_generic_expr (rhs2_type);
4297 return true;
4298 }
4299 if (! tree_fits_uhwi_p (rhs3)
4300 || ! types_compatible_p (bitsizetype, TREE_TYPE (rhs3))
4301 || ! tree_fits_uhwi_p (TYPE_SIZE (rhs2_type)))
4302 {
4303 error ("invalid position or size in %qs", code_name);
4304 return true;
4305 }
4306 if (INTEGRAL_TYPE_P (rhs1_type)
4307 && !type_has_mode_precision_p (rhs1_type))
4308 {
4309 error ("%qs into non-mode-precision operand", code_name);
4310 return true;
4311 }
4312 if (INTEGRAL_TYPE_P (rhs1_type))
4313 {
4314 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (rhs3);
4315 if (bitpos >= TYPE_PRECISION (rhs1_type)
4316 || (bitpos + TYPE_PRECISION (rhs2_type)
4317 > TYPE_PRECISION (rhs1_type)))
4318 {
4319 error ("insertion out of range in %qs", code_name);
4320 return true;
4321 }
4322 }
4323 else if (VECTOR_TYPE_P (rhs1_type))
4324 {
4325 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (rhs3);
4326 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (TYPE_SIZE (rhs2_type));
4327 if (bitpos % bitsize != 0)
4328 {
4329 error ("%qs not at element boundary", code_name);
4330 return true;
4331 }
4332 }
4333 return false;
4334
4335 case DOT_PROD_EXPR:
4336 {
4337 if (((TREE_CODE (rhs1_type) != VECTOR_TYPE
4338 || TREE_CODE (lhs_type) != VECTOR_TYPE)
4339 && ((!INTEGRAL_TYPE_P (rhs1_type)
4340 && !SCALAR_FLOAT_TYPE_P (rhs1_type))
4341 || (!INTEGRAL_TYPE_P (lhs_type)
4342 && !SCALAR_FLOAT_TYPE_P (lhs_type))))
4343 || !types_compatible_p (rhs1_type, rhs2_type)
4344 || !useless_type_conversion_p (lhs_type, rhs3_type)
4345 || maybe_lt (GET_MODE_SIZE (element_mode (rhs3_type)),
4346 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
4347 {
4348 error ("type mismatch in %qs", code_name);
4349 debug_generic_expr (lhs_type);
4350 debug_generic_expr (rhs1_type);
4351 debug_generic_expr (rhs2_type);
4352 return true;
4353 }
4354 return false;
4355 }
4356
4357 case REALIGN_LOAD_EXPR:
4358 /* FIXME. */
4359 return false;
4360
4361 default:
4362 gcc_unreachable ();
4363 }
4364 return false;
4365 }
4366
4367 /* Verify a gimple assignment statement STMT with a single rhs.
4368 Returns true if anything is wrong. */
4369
4370 static bool
4371 verify_gimple_assign_single (gassign *stmt)
4372 {
4373 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4374 tree lhs = gimple_assign_lhs (stmt);
4375 tree lhs_type = TREE_TYPE (lhs);
4376 tree rhs1 = gimple_assign_rhs1 (stmt);
4377 tree rhs1_type = TREE_TYPE (rhs1);
4378 bool res = false;
4379
4380 const char* const code_name = get_tree_code_name (rhs_code);
4381
4382 if (!useless_type_conversion_p (lhs_type, rhs1_type))
4383 {
4384 error ("non-trivial conversion in %qs", code_name);
4385 debug_generic_expr (lhs_type);
4386 debug_generic_expr (rhs1_type);
4387 return true;
4388 }
4389
4390 if (gimple_clobber_p (stmt)
4391 && !(DECL_P (lhs) || TREE_CODE (lhs) == MEM_REF))
4392 {
4393 error ("%qs LHS in clobber statement",
4394 get_tree_code_name (TREE_CODE (lhs)));
4395 debug_generic_expr (lhs);
4396 return true;
4397 }
4398
4399 if (handled_component_p (lhs)
4400 || TREE_CODE (lhs) == MEM_REF
4401 || TREE_CODE (lhs) == TARGET_MEM_REF)
4402 res |= verify_types_in_gimple_reference (lhs, true);
4403
4404 /* Special codes we cannot handle via their class. */
4405 switch (rhs_code)
4406 {
4407 case ADDR_EXPR:
4408 {
4409 tree op = TREE_OPERAND (rhs1, 0);
4410 if (!is_gimple_addressable (op))
4411 {
4412 error ("invalid operand in %qs", code_name);
4413 return true;
4414 }
4415
4416 /* Technically there is no longer a need for matching types, but
4417 gimple hygiene asks for this check. In LTO we can end up
4418 combining incompatible units and thus end up with addresses
4419 of globals that change their type to a common one. */
4420 if (!in_lto_p
4421 && !types_compatible_p (TREE_TYPE (op),
4422 TREE_TYPE (TREE_TYPE (rhs1)))
4423 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
4424 TREE_TYPE (op)))
4425 {
4426 error ("type mismatch in %qs", code_name);
4427 debug_generic_stmt (TREE_TYPE (rhs1));
4428 debug_generic_stmt (TREE_TYPE (op));
4429 return true;
4430 }
4431
4432 return (verify_address (rhs1, true)
4433 || verify_types_in_gimple_reference (op, true));
4434 }
4435
4436 /* tcc_reference */
4437 case INDIRECT_REF:
4438 error ("%qs in gimple IL", code_name);
4439 return true;
4440
4441 case COMPONENT_REF:
4442 case BIT_FIELD_REF:
4443 case ARRAY_REF:
4444 case ARRAY_RANGE_REF:
4445 case VIEW_CONVERT_EXPR:
4446 case REALPART_EXPR:
4447 case IMAGPART_EXPR:
4448 case TARGET_MEM_REF:
4449 case MEM_REF:
4450 if (!is_gimple_reg (lhs)
4451 && is_gimple_reg_type (TREE_TYPE (lhs)))
4452 {
4453 error ("invalid RHS for gimple memory store: %qs", code_name);
4454 debug_generic_stmt (lhs);
4455 debug_generic_stmt (rhs1);
4456 return true;
4457 }
4458 return res || verify_types_in_gimple_reference (rhs1, false);
4459
4460 /* tcc_constant */
4461 case SSA_NAME:
4462 case INTEGER_CST:
4463 case REAL_CST:
4464 case FIXED_CST:
4465 case COMPLEX_CST:
4466 case VECTOR_CST:
4467 case STRING_CST:
4468 return res;
4469
4470 /* tcc_declaration */
4471 case CONST_DECL:
4472 return res;
4473 case VAR_DECL:
4474 case PARM_DECL:
4475 if (!is_gimple_reg (lhs)
4476 && !is_gimple_reg (rhs1)
4477 && is_gimple_reg_type (TREE_TYPE (lhs)))
4478 {
4479 error ("invalid RHS for gimple memory store: %qs", code_name);
4480 debug_generic_stmt (lhs);
4481 debug_generic_stmt (rhs1);
4482 return true;
4483 }
4484 return res;
4485
4486 case CONSTRUCTOR:
4487 if (TREE_CODE (rhs1_type) == VECTOR_TYPE)
4488 {
4489 unsigned int i;
4490 tree elt_i, elt_v, elt_t = NULL_TREE;
4491
4492 if (CONSTRUCTOR_NELTS (rhs1) == 0)
4493 return res;
4494 /* For vector CONSTRUCTORs we require that either it is empty
4495 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
4496 (then the element count must be correct to cover the whole
4497 outer vector and index must be NULL on all elements, or it is
4498 a CONSTRUCTOR of scalar elements, where we as an exception allow
4499 smaller number of elements (assuming zero filling) and
4500 consecutive indexes as compared to NULL indexes (such
4501 CONSTRUCTORs can appear in the IL from FEs). */
4502 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), i, elt_i, elt_v)
4503 {
4504 if (elt_t == NULL_TREE)
4505 {
4506 elt_t = TREE_TYPE (elt_v);
4507 if (TREE_CODE (elt_t) == VECTOR_TYPE)
4508 {
4509 tree elt_t = TREE_TYPE (elt_v);
4510 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4511 TREE_TYPE (elt_t)))
4512 {
4513 error ("incorrect type of vector %qs elements",
4514 code_name);
4515 debug_generic_stmt (rhs1);
4516 return true;
4517 }
4518 else if (maybe_ne (CONSTRUCTOR_NELTS (rhs1)
4519 * TYPE_VECTOR_SUBPARTS (elt_t),
4520 TYPE_VECTOR_SUBPARTS (rhs1_type)))
4521 {
4522 error ("incorrect number of vector %qs elements",
4523 code_name);
4524 debug_generic_stmt (rhs1);
4525 return true;
4526 }
4527 }
4528 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4529 elt_t))
4530 {
4531 error ("incorrect type of vector %qs elements",
4532 code_name);
4533 debug_generic_stmt (rhs1);
4534 return true;
4535 }
4536 else if (maybe_gt (CONSTRUCTOR_NELTS (rhs1),
4537 TYPE_VECTOR_SUBPARTS (rhs1_type)))
4538 {
4539 error ("incorrect number of vector %qs elements",
4540 code_name);
4541 debug_generic_stmt (rhs1);
4542 return true;
4543 }
4544 }
4545 else if (!useless_type_conversion_p (elt_t, TREE_TYPE (elt_v)))
4546 {
4547 error ("incorrect type of vector CONSTRUCTOR elements");
4548 debug_generic_stmt (rhs1);
4549 return true;
4550 }
4551 if (elt_i != NULL_TREE
4552 && (TREE_CODE (elt_t) == VECTOR_TYPE
4553 || TREE_CODE (elt_i) != INTEGER_CST
4554 || compare_tree_int (elt_i, i) != 0))
4555 {
4556 error ("vector %qs with non-NULL element index",
4557 code_name);
4558 debug_generic_stmt (rhs1);
4559 return true;
4560 }
4561 if (!is_gimple_val (elt_v))
4562 {
4563 error ("vector %qs element is not a GIMPLE value",
4564 code_name);
4565 debug_generic_stmt (rhs1);
4566 return true;
4567 }
4568 }
4569 }
4570 else if (CONSTRUCTOR_NELTS (rhs1) != 0)
4571 {
4572 error ("non-vector %qs with elements", code_name);
4573 debug_generic_stmt (rhs1);
4574 return true;
4575 }
4576 return res;
4577
4578 case ASSERT_EXPR:
4579 /* FIXME. */
4580 rhs1 = fold (ASSERT_EXPR_COND (rhs1));
4581 if (rhs1 == boolean_false_node)
4582 {
4583 error ("%qs with an always-false condition", code_name);
4584 debug_generic_stmt (rhs1);
4585 return true;
4586 }
4587 break;
4588
4589 case OBJ_TYPE_REF:
4590 case WITH_SIZE_EXPR:
4591 /* FIXME. */
4592 return res;
4593
4594 default:;
4595 }
4596
4597 return res;
4598 }
4599
4600 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4601 is a problem, otherwise false. */
4602
4603 static bool
4604 verify_gimple_assign (gassign *stmt)
4605 {
4606 switch (gimple_assign_rhs_class (stmt))
4607 {
4608 case GIMPLE_SINGLE_RHS:
4609 return verify_gimple_assign_single (stmt);
4610
4611 case GIMPLE_UNARY_RHS:
4612 return verify_gimple_assign_unary (stmt);
4613
4614 case GIMPLE_BINARY_RHS:
4615 return verify_gimple_assign_binary (stmt);
4616
4617 case GIMPLE_TERNARY_RHS:
4618 return verify_gimple_assign_ternary (stmt);
4619
4620 default:
4621 gcc_unreachable ();
4622 }
4623 }
4624
4625 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4626 is a problem, otherwise false. */
4627
4628 static bool
4629 verify_gimple_return (greturn *stmt)
4630 {
4631 tree op = gimple_return_retval (stmt);
4632 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
4633
4634 /* We cannot test for present return values as we do not fix up missing
4635 return values from the original source. */
4636 if (op == NULL)
4637 return false;
4638
4639 if (!is_gimple_val (op)
4640 && TREE_CODE (op) != RESULT_DECL)
4641 {
4642 error ("invalid operand in return statement");
4643 debug_generic_stmt (op);
4644 return true;
4645 }
4646
4647 if ((TREE_CODE (op) == RESULT_DECL
4648 && DECL_BY_REFERENCE (op))
4649 || (TREE_CODE (op) == SSA_NAME
4650 && SSA_NAME_VAR (op)
4651 && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
4652 && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
4653 op = TREE_TYPE (op);
4654
4655 if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
4656 {
4657 error ("invalid conversion in return statement");
4658 debug_generic_stmt (restype);
4659 debug_generic_stmt (TREE_TYPE (op));
4660 return true;
4661 }
4662
4663 return false;
4664 }
4665
4666
4667 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4668 is a problem, otherwise false. */
4669
4670 static bool
4671 verify_gimple_goto (ggoto *stmt)
4672 {
4673 tree dest = gimple_goto_dest (stmt);
4674
4675 /* ??? We have two canonical forms of direct goto destinations, a
4676 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4677 if (TREE_CODE (dest) != LABEL_DECL
4678 && (!is_gimple_val (dest)
4679 || !POINTER_TYPE_P (TREE_TYPE (dest))))
4680 {
4681 error ("goto destination is neither a label nor a pointer");
4682 return true;
4683 }
4684
4685 return false;
4686 }
4687
4688 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4689 is a problem, otherwise false. */
4690
4691 static bool
4692 verify_gimple_switch (gswitch *stmt)
4693 {
4694 unsigned int i, n;
4695 tree elt, prev_upper_bound = NULL_TREE;
4696 tree index_type, elt_type = NULL_TREE;
4697
4698 if (!is_gimple_val (gimple_switch_index (stmt)))
4699 {
4700 error ("invalid operand to switch statement");
4701 debug_generic_stmt (gimple_switch_index (stmt));
4702 return true;
4703 }
4704
4705 index_type = TREE_TYPE (gimple_switch_index (stmt));
4706 if (! INTEGRAL_TYPE_P (index_type))
4707 {
4708 error ("non-integral type switch statement");
4709 debug_generic_expr (index_type);
4710 return true;
4711 }
4712
4713 elt = gimple_switch_label (stmt, 0);
4714 if (CASE_LOW (elt) != NULL_TREE
4715 || CASE_HIGH (elt) != NULL_TREE
4716 || CASE_CHAIN (elt) != NULL_TREE)
4717 {
4718 error ("invalid default case label in switch statement");
4719 debug_generic_expr (elt);
4720 return true;
4721 }
4722
4723 n = gimple_switch_num_labels (stmt);
4724 for (i = 1; i < n; i++)
4725 {
4726 elt = gimple_switch_label (stmt, i);
4727
4728 if (CASE_CHAIN (elt))
4729 {
4730 error ("invalid %<CASE_CHAIN%>");
4731 debug_generic_expr (elt);
4732 return true;
4733 }
4734 if (! CASE_LOW (elt))
4735 {
4736 error ("invalid case label in switch statement");
4737 debug_generic_expr (elt);
4738 return true;
4739 }
4740 if (CASE_HIGH (elt)
4741 && ! tree_int_cst_lt (CASE_LOW (elt), CASE_HIGH (elt)))
4742 {
4743 error ("invalid case range in switch statement");
4744 debug_generic_expr (elt);
4745 return true;
4746 }
4747
4748 if (elt_type)
4749 {
4750 if (TREE_TYPE (CASE_LOW (elt)) != elt_type
4751 || (CASE_HIGH (elt) && TREE_TYPE (CASE_HIGH (elt)) != elt_type))
4752 {
4753 error ("type mismatch for case label in switch statement");
4754 debug_generic_expr (elt);
4755 return true;
4756 }
4757 }
4758 else
4759 {
4760 elt_type = TREE_TYPE (CASE_LOW (elt));
4761 if (TYPE_PRECISION (index_type) < TYPE_PRECISION (elt_type))
4762 {
4763 error ("type precision mismatch in switch statement");
4764 return true;
4765 }
4766 }
4767
4768 if (prev_upper_bound)
4769 {
4770 if (! tree_int_cst_lt (prev_upper_bound, CASE_LOW (elt)))
4771 {
4772 error ("case labels not sorted in switch statement");
4773 return true;
4774 }
4775 }
4776
4777 prev_upper_bound = CASE_HIGH (elt);
4778 if (! prev_upper_bound)
4779 prev_upper_bound = CASE_LOW (elt);
4780 }
4781
4782 return false;
4783 }
4784
4785 /* Verify a gimple debug statement STMT.
4786 Returns true if anything is wrong. */
4787
4788 static bool
4789 verify_gimple_debug (gimple *stmt ATTRIBUTE_UNUSED)
4790 {
4791 /* There isn't much that could be wrong in a gimple debug stmt. A
4792 gimple debug bind stmt, for example, maps a tree, that's usually
4793 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4794 component or member of an aggregate type, to another tree, that
4795 can be an arbitrary expression. These stmts expand into debug
4796 insns, and are converted to debug notes by var-tracking.c. */
4797 return false;
4798 }
4799
4800 /* Verify a gimple label statement STMT.
4801 Returns true if anything is wrong. */
4802
4803 static bool
4804 verify_gimple_label (glabel *stmt)
4805 {
4806 tree decl = gimple_label_label (stmt);
4807 int uid;
4808 bool err = false;
4809
4810 if (TREE_CODE (decl) != LABEL_DECL)
4811 return true;
4812 if (!DECL_NONLOCAL (decl) && !FORCED_LABEL (decl)
4813 && DECL_CONTEXT (decl) != current_function_decl)
4814 {
4815 error ("label context is not the current function declaration");
4816 err |= true;
4817 }
4818
4819 uid = LABEL_DECL_UID (decl);
4820 if (cfun->cfg
4821 && (uid == -1
4822 || (*label_to_block_map_for_fn (cfun))[uid] != gimple_bb (stmt)))
4823 {
4824 error ("incorrect entry in %<label_to_block_map%>");
4825 err |= true;
4826 }
4827
4828 uid = EH_LANDING_PAD_NR (decl);
4829 if (uid)
4830 {
4831 eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
4832 if (decl != lp->post_landing_pad)
4833 {
4834 error ("incorrect setting of landing pad number");
4835 err |= true;
4836 }
4837 }
4838
4839 return err;
4840 }
4841
4842 /* Verify a gimple cond statement STMT.
4843 Returns true if anything is wrong. */
4844
4845 static bool
4846 verify_gimple_cond (gcond *stmt)
4847 {
4848 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
4849 {
4850 error ("invalid comparison code in gimple cond");
4851 return true;
4852 }
4853 if (!(!gimple_cond_true_label (stmt)
4854 || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
4855 || !(!gimple_cond_false_label (stmt)
4856 || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
4857 {
4858 error ("invalid labels in gimple cond");
4859 return true;
4860 }
4861
4862 return verify_gimple_comparison (boolean_type_node,
4863 gimple_cond_lhs (stmt),
4864 gimple_cond_rhs (stmt),
4865 gimple_cond_code (stmt));
4866 }
4867
4868 /* Verify the GIMPLE statement STMT. Returns true if there is an
4869 error, otherwise false. */
4870
4871 static bool
4872 verify_gimple_stmt (gimple *stmt)
4873 {
4874 switch (gimple_code (stmt))
4875 {
4876 case GIMPLE_ASSIGN:
4877 return verify_gimple_assign (as_a <gassign *> (stmt));
4878
4879 case GIMPLE_LABEL:
4880 return verify_gimple_label (as_a <glabel *> (stmt));
4881
4882 case GIMPLE_CALL:
4883 return verify_gimple_call (as_a <gcall *> (stmt));
4884
4885 case GIMPLE_COND:
4886 return verify_gimple_cond (as_a <gcond *> (stmt));
4887
4888 case GIMPLE_GOTO:
4889 return verify_gimple_goto (as_a <ggoto *> (stmt));
4890
4891 case GIMPLE_SWITCH:
4892 return verify_gimple_switch (as_a <gswitch *> (stmt));
4893
4894 case GIMPLE_RETURN:
4895 return verify_gimple_return (as_a <greturn *> (stmt));
4896
4897 case GIMPLE_ASM:
4898 return false;
4899
4900 case GIMPLE_TRANSACTION:
4901 return verify_gimple_transaction (as_a <gtransaction *> (stmt));
4902
4903 /* Tuples that do not have tree operands. */
4904 case GIMPLE_NOP:
4905 case GIMPLE_PREDICT:
4906 case GIMPLE_RESX:
4907 case GIMPLE_EH_DISPATCH:
4908 case GIMPLE_EH_MUST_NOT_THROW:
4909 return false;
4910
4911 CASE_GIMPLE_OMP:
4912 /* OpenMP directives are validated by the FE and never operated
4913 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4914 non-gimple expressions when the main index variable has had
4915 its address taken. This does not affect the loop itself
4916 because the header of an GIMPLE_OMP_FOR is merely used to determine
4917 how to setup the parallel iteration. */
4918 return false;
4919
4920 case GIMPLE_DEBUG:
4921 return verify_gimple_debug (stmt);
4922
4923 default:
4924 gcc_unreachable ();
4925 }
4926 }
4927
4928 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4929 and false otherwise. */
4930
4931 static bool
4932 verify_gimple_phi (gphi *phi)
4933 {
4934 bool err = false;
4935 unsigned i;
4936 tree phi_result = gimple_phi_result (phi);
4937 bool virtual_p;
4938
4939 if (!phi_result)
4940 {
4941 error ("invalid %<PHI%> result");
4942 return true;
4943 }
4944
4945 virtual_p = virtual_operand_p (phi_result);
4946 if (TREE_CODE (phi_result) != SSA_NAME
4947 || (virtual_p
4948 && SSA_NAME_VAR (phi_result) != gimple_vop (cfun)))
4949 {
4950 error ("invalid %<PHI%> result");
4951 err = true;
4952 }
4953
4954 for (i = 0; i < gimple_phi_num_args (phi); i++)
4955 {
4956 tree t = gimple_phi_arg_def (phi, i);
4957
4958 if (!t)
4959 {
4960 error ("missing %<PHI%> def");
4961 err |= true;
4962 continue;
4963 }
4964 /* Addressable variables do have SSA_NAMEs but they
4965 are not considered gimple values. */
4966 else if ((TREE_CODE (t) == SSA_NAME
4967 && virtual_p != virtual_operand_p (t))
4968 || (virtual_p
4969 && (TREE_CODE (t) != SSA_NAME
4970 || SSA_NAME_VAR (t) != gimple_vop (cfun)))
4971 || (!virtual_p
4972 && !is_gimple_val (t)))
4973 {
4974 error ("invalid %<PHI%> argument");
4975 debug_generic_expr (t);
4976 err |= true;
4977 }
4978 #ifdef ENABLE_TYPES_CHECKING
4979 if (!useless_type_conversion_p (TREE_TYPE (phi_result), TREE_TYPE (t)))
4980 {
4981 error ("incompatible types in %<PHI%> argument %u", i);
4982 debug_generic_stmt (TREE_TYPE (phi_result));
4983 debug_generic_stmt (TREE_TYPE (t));
4984 err |= true;
4985 }
4986 #endif
4987 }
4988
4989 return err;
4990 }
4991
4992 /* Verify the GIMPLE statements inside the sequence STMTS. */
4993
4994 static bool
4995 verify_gimple_in_seq_2 (gimple_seq stmts)
4996 {
4997 gimple_stmt_iterator ittr;
4998 bool err = false;
4999
5000 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
5001 {
5002 gimple *stmt = gsi_stmt (ittr);
5003
5004 switch (gimple_code (stmt))
5005 {
5006 case GIMPLE_BIND:
5007 err |= verify_gimple_in_seq_2 (
5008 gimple_bind_body (as_a <gbind *> (stmt)));
5009 break;
5010
5011 case GIMPLE_TRY:
5012 err |= verify_gimple_in_seq_2 (gimple_try_eval (stmt));
5013 err |= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt));
5014 break;
5015
5016 case GIMPLE_EH_FILTER:
5017 err |= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt));
5018 break;
5019
5020 case GIMPLE_EH_ELSE:
5021 {
5022 geh_else *eh_else = as_a <geh_else *> (stmt);
5023 err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (eh_else));
5024 err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (eh_else));
5025 }
5026 break;
5027
5028 case GIMPLE_CATCH:
5029 err |= verify_gimple_in_seq_2 (gimple_catch_handler (
5030 as_a <gcatch *> (stmt)));
5031 break;
5032
5033 case GIMPLE_TRANSACTION:
5034 err |= verify_gimple_transaction (as_a <gtransaction *> (stmt));
5035 break;
5036
5037 default:
5038 {
5039 bool err2 = verify_gimple_stmt (stmt);
5040 if (err2)
5041 debug_gimple_stmt (stmt);
5042 err |= err2;
5043 }
5044 }
5045 }
5046
5047 return err;
5048 }
5049
5050 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
5051 is a problem, otherwise false. */
5052
5053 static bool
5054 verify_gimple_transaction (gtransaction *stmt)
5055 {
5056 tree lab;
5057
5058 lab = gimple_transaction_label_norm (stmt);
5059 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
5060 return true;
5061 lab = gimple_transaction_label_uninst (stmt);
5062 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
5063 return true;
5064 lab = gimple_transaction_label_over (stmt);
5065 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
5066 return true;
5067
5068 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt));
5069 }
5070
5071
5072 /* Verify the GIMPLE statements inside the statement list STMTS. */
5073
5074 DEBUG_FUNCTION void
5075 verify_gimple_in_seq (gimple_seq stmts)
5076 {
5077 timevar_push (TV_TREE_STMT_VERIFY);
5078 if (verify_gimple_in_seq_2 (stmts))
5079 internal_error ("%<verify_gimple%> failed");
5080 timevar_pop (TV_TREE_STMT_VERIFY);
5081 }
5082
5083 /* Return true when the T can be shared. */
5084
5085 static bool
5086 tree_node_can_be_shared (tree t)
5087 {
5088 if (IS_TYPE_OR_DECL_P (t)
5089 || TREE_CODE (t) == SSA_NAME
5090 || TREE_CODE (t) == IDENTIFIER_NODE
5091 || TREE_CODE (t) == CASE_LABEL_EXPR
5092 || is_gimple_min_invariant (t))
5093 return true;
5094
5095 if (t == error_mark_node)
5096 return true;
5097
5098 return false;
5099 }
5100
5101 /* Called via walk_tree. Verify tree sharing. */
5102
5103 static tree
5104 verify_node_sharing_1 (tree *tp, int *walk_subtrees, void *data)
5105 {
5106 hash_set<void *> *visited = (hash_set<void *> *) data;
5107
5108 if (tree_node_can_be_shared (*tp))
5109 {
5110 *walk_subtrees = false;
5111 return NULL;
5112 }
5113
5114 if (visited->add (*tp))
5115 return *tp;
5116
5117 return NULL;
5118 }
5119
5120 /* Called via walk_gimple_stmt. Verify tree sharing. */
5121
5122 static tree
5123 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
5124 {
5125 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5126 return verify_node_sharing_1 (tp, walk_subtrees, wi->info);
5127 }
5128
5129 static bool eh_error_found;
5130 bool
5131 verify_eh_throw_stmt_node (gimple *const &stmt, const int &,
5132 hash_set<gimple *> *visited)
5133 {
5134 if (!visited->contains (stmt))
5135 {
5136 error ("dead statement in EH table");
5137 debug_gimple_stmt (stmt);
5138 eh_error_found = true;
5139 }
5140 return true;
5141 }
5142
5143 /* Verify if the location LOCs block is in BLOCKS. */
5144
5145 static bool
5146 verify_location (hash_set<tree> *blocks, location_t loc)
5147 {
5148 tree block = LOCATION_BLOCK (loc);
5149 if (block != NULL_TREE
5150 && !blocks->contains (block))
5151 {
5152 error ("location references block not in block tree");
5153 return true;
5154 }
5155 if (block != NULL_TREE)
5156 return verify_location (blocks, BLOCK_SOURCE_LOCATION (block));
5157 return false;
5158 }
5159
5160 /* Called via walk_tree. Verify that expressions have no blocks. */
5161
5162 static tree
5163 verify_expr_no_block (tree *tp, int *walk_subtrees, void *)
5164 {
5165 if (!EXPR_P (*tp))
5166 {
5167 *walk_subtrees = false;
5168 return NULL;
5169 }
5170
5171 location_t loc = EXPR_LOCATION (*tp);
5172 if (LOCATION_BLOCK (loc) != NULL)
5173 return *tp;
5174
5175 return NULL;
5176 }
5177
5178 /* Called via walk_tree. Verify locations of expressions. */
5179
5180 static tree
5181 verify_expr_location_1 (tree *tp, int *walk_subtrees, void *data)
5182 {
5183 hash_set<tree> *blocks = (hash_set<tree> *) data;
5184 tree t = *tp;
5185
5186 /* ??? This doesn't really belong here but there's no good place to
5187 stick this remainder of old verify_expr. */
5188 /* ??? This barfs on debug stmts which contain binds to vars with
5189 different function context. */
5190 #if 0
5191 if (VAR_P (t)
5192 || TREE_CODE (t) == PARM_DECL
5193 || TREE_CODE (t) == RESULT_DECL)
5194 {
5195 tree context = decl_function_context (t);
5196 if (context != cfun->decl
5197 && !SCOPE_FILE_SCOPE_P (context)
5198 && !TREE_STATIC (t)
5199 && !DECL_EXTERNAL (t))
5200 {
5201 error ("local declaration from a different function");
5202 return t;
5203 }
5204 }
5205 #endif
5206
5207 if (VAR_P (t) && DECL_HAS_DEBUG_EXPR_P (t))
5208 {
5209 tree x = DECL_DEBUG_EXPR (t);
5210 tree addr = walk_tree (&x, verify_expr_no_block, NULL, NULL);
5211 if (addr)
5212 return addr;
5213 }
5214 if ((VAR_P (t)
5215 || TREE_CODE (t) == PARM_DECL
5216 || TREE_CODE (t) == RESULT_DECL)
5217 && DECL_HAS_VALUE_EXPR_P (t))
5218 {
5219 tree x = DECL_VALUE_EXPR (t);
5220 tree addr = walk_tree (&x, verify_expr_no_block, NULL, NULL);
5221 if (addr)
5222 return addr;
5223 }
5224
5225 if (!EXPR_P (t))
5226 {
5227 *walk_subtrees = false;
5228 return NULL;
5229 }
5230
5231 location_t loc = EXPR_LOCATION (t);
5232 if (verify_location (blocks, loc))
5233 return t;
5234
5235 return NULL;
5236 }
5237
5238 /* Called via walk_gimple_op. Verify locations of expressions. */
5239
5240 static tree
5241 verify_expr_location (tree *tp, int *walk_subtrees, void *data)
5242 {
5243 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5244 return verify_expr_location_1 (tp, walk_subtrees, wi->info);
5245 }
5246
5247 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
5248
5249 static void
5250 collect_subblocks (hash_set<tree> *blocks, tree block)
5251 {
5252 tree t;
5253 for (t = BLOCK_SUBBLOCKS (block); t; t = BLOCK_CHAIN (t))
5254 {
5255 blocks->add (t);
5256 collect_subblocks (blocks, t);
5257 }
5258 }
5259
5260 /* Disable warnings about missing quoting in GCC diagnostics for
5261 the verification errors. Their format strings don't follow
5262 GCC diagnostic conventions and trigger an ICE in the end. */
5263 #if __GNUC__ >= 10
5264 # pragma GCC diagnostic push
5265 # pragma GCC diagnostic ignored "-Wformat-diag"
5266 #endif
5267
5268 /* Verify the GIMPLE statements in the CFG of FN. */
5269
5270 DEBUG_FUNCTION void
5271 verify_gimple_in_cfg (struct function *fn, bool verify_nothrow)
5272 {
5273 basic_block bb;
5274 bool err = false;
5275
5276 timevar_push (TV_TREE_STMT_VERIFY);
5277 hash_set<void *> visited;
5278 hash_set<gimple *> visited_throwing_stmts;
5279
5280 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
5281 hash_set<tree> blocks;
5282 if (DECL_INITIAL (fn->decl))
5283 {
5284 blocks.add (DECL_INITIAL (fn->decl));
5285 collect_subblocks (&blocks, DECL_INITIAL (fn->decl));
5286 }
5287
5288 FOR_EACH_BB_FN (bb, fn)
5289 {
5290 gimple_stmt_iterator gsi;
5291 edge_iterator ei;
5292 edge e;
5293
5294 for (gphi_iterator gpi = gsi_start_phis (bb);
5295 !gsi_end_p (gpi);
5296 gsi_next (&gpi))
5297 {
5298 gphi *phi = gpi.phi ();
5299 bool err2 = false;
5300 unsigned i;
5301
5302 if (gimple_bb (phi) != bb)
5303 {
5304 error ("gimple_bb (phi) is set to a wrong basic block");
5305 err2 = true;
5306 }
5307
5308 err2 |= verify_gimple_phi (phi);
5309
5310 /* Only PHI arguments have locations. */
5311 if (gimple_location (phi) != UNKNOWN_LOCATION)
5312 {
5313 error ("PHI node with location");
5314 err2 = true;
5315 }
5316
5317 for (i = 0; i < gimple_phi_num_args (phi); i++)
5318 {
5319 tree arg = gimple_phi_arg_def (phi, i);
5320 tree addr = walk_tree (&arg, verify_node_sharing_1,
5321 &visited, NULL);
5322 if (addr)
5323 {
5324 error ("incorrect sharing of tree nodes");
5325 debug_generic_expr (addr);
5326 err2 |= true;
5327 }
5328 location_t loc = gimple_phi_arg_location (phi, i);
5329 if (virtual_operand_p (gimple_phi_result (phi))
5330 && loc != UNKNOWN_LOCATION)
5331 {
5332 error ("virtual PHI with argument locations");
5333 err2 = true;
5334 }
5335 addr = walk_tree (&arg, verify_expr_location_1, &blocks, NULL);
5336 if (addr)
5337 {
5338 debug_generic_expr (addr);
5339 err2 = true;
5340 }
5341 err2 |= verify_location (&blocks, loc);
5342 }
5343
5344 if (err2)
5345 debug_gimple_stmt (phi);
5346 err |= err2;
5347 }
5348
5349 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5350 {
5351 gimple *stmt = gsi_stmt (gsi);
5352 bool err2 = false;
5353 struct walk_stmt_info wi;
5354 tree addr;
5355 int lp_nr;
5356
5357 if (gimple_bb (stmt) != bb)
5358 {
5359 error ("gimple_bb (stmt) is set to a wrong basic block");
5360 err2 = true;
5361 }
5362
5363 err2 |= verify_gimple_stmt (stmt);
5364 err2 |= verify_location (&blocks, gimple_location (stmt));
5365
5366 memset (&wi, 0, sizeof (wi));
5367 wi.info = (void *) &visited;
5368 addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
5369 if (addr)
5370 {
5371 error ("incorrect sharing of tree nodes");
5372 debug_generic_expr (addr);
5373 err2 |= true;
5374 }
5375
5376 memset (&wi, 0, sizeof (wi));
5377 wi.info = (void *) &blocks;
5378 addr = walk_gimple_op (stmt, verify_expr_location, &wi);
5379 if (addr)
5380 {
5381 debug_generic_expr (addr);
5382 err2 |= true;
5383 }
5384
5385 /* If the statement is marked as part of an EH region, then it is
5386 expected that the statement could throw. Verify that when we
5387 have optimizations that simplify statements such that we prove
5388 that they cannot throw, that we update other data structures
5389 to match. */
5390 lp_nr = lookup_stmt_eh_lp (stmt);
5391 if (lp_nr != 0)
5392 visited_throwing_stmts.add (stmt);
5393 if (lp_nr > 0)
5394 {
5395 if (!stmt_could_throw_p (cfun, stmt))
5396 {
5397 if (verify_nothrow)
5398 {
5399 error ("statement marked for throw, but doesn%'t");
5400 err2 |= true;
5401 }
5402 }
5403 else if (!gsi_one_before_end_p (gsi))
5404 {
5405 error ("statement marked for throw in middle of block");
5406 err2 |= true;
5407 }
5408 }
5409
5410 if (err2)
5411 debug_gimple_stmt (stmt);
5412 err |= err2;
5413 }
5414
5415 FOR_EACH_EDGE (e, ei, bb->succs)
5416 if (e->goto_locus != UNKNOWN_LOCATION)
5417 err |= verify_location (&blocks, e->goto_locus);
5418 }
5419
5420 hash_map<gimple *, int> *eh_table = get_eh_throw_stmt_table (cfun);
5421 eh_error_found = false;
5422 if (eh_table)
5423 eh_table->traverse<hash_set<gimple *> *, verify_eh_throw_stmt_node>
5424 (&visited_throwing_stmts);
5425
5426 if (err || eh_error_found)
5427 internal_error ("verify_gimple failed");
5428
5429 verify_histograms ();
5430 timevar_pop (TV_TREE_STMT_VERIFY);
5431 }
5432
5433
5434 /* Verifies that the flow information is OK. */
5435
5436 static int
5437 gimple_verify_flow_info (void)
5438 {
5439 int err = 0;
5440 basic_block bb;
5441 gimple_stmt_iterator gsi;
5442 gimple *stmt;
5443 edge e;
5444 edge_iterator ei;
5445
5446 if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5447 || ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5448 {
5449 error ("ENTRY_BLOCK has IL associated with it");
5450 err = 1;
5451 }
5452
5453 if (EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5454 || EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5455 {
5456 error ("EXIT_BLOCK has IL associated with it");
5457 err = 1;
5458 }
5459
5460 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5461 if (e->flags & EDGE_FALLTHRU)
5462 {
5463 error ("fallthru to exit from bb %d", e->src->index);
5464 err = 1;
5465 }
5466
5467 FOR_EACH_BB_FN (bb, cfun)
5468 {
5469 bool found_ctrl_stmt = false;
5470
5471 stmt = NULL;
5472
5473 /* Skip labels on the start of basic block. */
5474 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5475 {
5476 tree label;
5477 gimple *prev_stmt = stmt;
5478
5479 stmt = gsi_stmt (gsi);
5480
5481 if (gimple_code (stmt) != GIMPLE_LABEL)
5482 break;
5483
5484 label = gimple_label_label (as_a <glabel *> (stmt));
5485 if (prev_stmt && DECL_NONLOCAL (label))
5486 {
5487 error ("nonlocal label ");
5488 print_generic_expr (stderr, label);
5489 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5490 bb->index);
5491 err = 1;
5492 }
5493
5494 if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
5495 {
5496 error ("EH landing pad label ");
5497 print_generic_expr (stderr, label);
5498 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5499 bb->index);
5500 err = 1;
5501 }
5502
5503 if (label_to_block (cfun, label) != bb)
5504 {
5505 error ("label ");
5506 print_generic_expr (stderr, label);
5507 fprintf (stderr, " to block does not match in bb %d",
5508 bb->index);
5509 err = 1;
5510 }
5511
5512 if (decl_function_context (label) != current_function_decl)
5513 {
5514 error ("label ");
5515 print_generic_expr (stderr, label);
5516 fprintf (stderr, " has incorrect context in bb %d",
5517 bb->index);
5518 err = 1;
5519 }
5520 }
5521
5522 /* Verify that body of basic block BB is free of control flow. */
5523 for (; !gsi_end_p (gsi); gsi_next (&gsi))
5524 {
5525 gimple *stmt = gsi_stmt (gsi);
5526
5527 if (found_ctrl_stmt)
5528 {
5529 error ("control flow in the middle of basic block %d",
5530 bb->index);
5531 err = 1;
5532 }
5533
5534 if (stmt_ends_bb_p (stmt))
5535 found_ctrl_stmt = true;
5536
5537 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
5538 {
5539 error ("label ");
5540 print_generic_expr (stderr, gimple_label_label (label_stmt));
5541 fprintf (stderr, " in the middle of basic block %d", bb->index);
5542 err = 1;
5543 }
5544 }
5545
5546 gsi = gsi_last_nondebug_bb (bb);
5547 if (gsi_end_p (gsi))
5548 continue;
5549
5550 stmt = gsi_stmt (gsi);
5551
5552 if (gimple_code (stmt) == GIMPLE_LABEL)
5553 continue;
5554
5555 err |= verify_eh_edges (stmt);
5556
5557 if (is_ctrl_stmt (stmt))
5558 {
5559 FOR_EACH_EDGE (e, ei, bb->succs)
5560 if (e->flags & EDGE_FALLTHRU)
5561 {
5562 error ("fallthru edge after a control statement in bb %d",
5563 bb->index);
5564 err = 1;
5565 }
5566 }
5567
5568 if (gimple_code (stmt) != GIMPLE_COND)
5569 {
5570 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
5571 after anything else but if statement. */
5572 FOR_EACH_EDGE (e, ei, bb->succs)
5573 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
5574 {
5575 error ("true/false edge after a non-GIMPLE_COND in bb %d",
5576 bb->index);
5577 err = 1;
5578 }
5579 }
5580
5581 switch (gimple_code (stmt))
5582 {
5583 case GIMPLE_COND:
5584 {
5585 edge true_edge;
5586 edge false_edge;
5587
5588 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
5589
5590 if (!true_edge
5591 || !false_edge
5592 || !(true_edge->flags & EDGE_TRUE_VALUE)
5593 || !(false_edge->flags & EDGE_FALSE_VALUE)
5594 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5595 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5596 || EDGE_COUNT (bb->succs) >= 3)
5597 {
5598 error ("wrong outgoing edge flags at end of bb %d",
5599 bb->index);
5600 err = 1;
5601 }
5602 }
5603 break;
5604
5605 case GIMPLE_GOTO:
5606 if (simple_goto_p (stmt))
5607 {
5608 error ("explicit goto at end of bb %d", bb->index);
5609 err = 1;
5610 }
5611 else
5612 {
5613 /* FIXME. We should double check that the labels in the
5614 destination blocks have their address taken. */
5615 FOR_EACH_EDGE (e, ei, bb->succs)
5616 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
5617 | EDGE_FALSE_VALUE))
5618 || !(e->flags & EDGE_ABNORMAL))
5619 {
5620 error ("wrong outgoing edge flags at end of bb %d",
5621 bb->index);
5622 err = 1;
5623 }
5624 }
5625 break;
5626
5627 case GIMPLE_CALL:
5628 if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
5629 break;
5630 /* fallthru */
5631 case GIMPLE_RETURN:
5632 if (!single_succ_p (bb)
5633 || (single_succ_edge (bb)->flags
5634 & (EDGE_FALLTHRU | EDGE_ABNORMAL
5635 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5636 {
5637 error ("wrong outgoing edge flags at end of bb %d", bb->index);
5638 err = 1;
5639 }
5640 if (single_succ (bb) != EXIT_BLOCK_PTR_FOR_FN (cfun))
5641 {
5642 error ("return edge does not point to exit in bb %d",
5643 bb->index);
5644 err = 1;
5645 }
5646 break;
5647
5648 case GIMPLE_SWITCH:
5649 {
5650 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5651 tree prev;
5652 edge e;
5653 size_t i, n;
5654
5655 n = gimple_switch_num_labels (switch_stmt);
5656
5657 /* Mark all the destination basic blocks. */
5658 for (i = 0; i < n; ++i)
5659 {
5660 basic_block label_bb = gimple_switch_label_bb (cfun, switch_stmt, i);
5661 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
5662 label_bb->aux = (void *)1;
5663 }
5664
5665 /* Verify that the case labels are sorted. */
5666 prev = gimple_switch_label (switch_stmt, 0);
5667 for (i = 1; i < n; ++i)
5668 {
5669 tree c = gimple_switch_label (switch_stmt, i);
5670 if (!CASE_LOW (c))
5671 {
5672 error ("found default case not at the start of "
5673 "case vector");
5674 err = 1;
5675 continue;
5676 }
5677 if (CASE_LOW (prev)
5678 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
5679 {
5680 error ("case labels not sorted: ");
5681 print_generic_expr (stderr, prev);
5682 fprintf (stderr," is greater than ");
5683 print_generic_expr (stderr, c);
5684 fprintf (stderr," but comes before it.\n");
5685 err = 1;
5686 }
5687 prev = c;
5688 }
5689 /* VRP will remove the default case if it can prove it will
5690 never be executed. So do not verify there always exists
5691 a default case here. */
5692
5693 FOR_EACH_EDGE (e, ei, bb->succs)
5694 {
5695 if (!e->dest->aux)
5696 {
5697 error ("extra outgoing edge %d->%d",
5698 bb->index, e->dest->index);
5699 err = 1;
5700 }
5701
5702 e->dest->aux = (void *)2;
5703 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
5704 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5705 {
5706 error ("wrong outgoing edge flags at end of bb %d",
5707 bb->index);
5708 err = 1;
5709 }
5710 }
5711
5712 /* Check that we have all of them. */
5713 for (i = 0; i < n; ++i)
5714 {
5715 basic_block label_bb = gimple_switch_label_bb (cfun,
5716 switch_stmt, i);
5717
5718 if (label_bb->aux != (void *)2)
5719 {
5720 error ("missing edge %i->%i", bb->index, label_bb->index);
5721 err = 1;
5722 }
5723 }
5724
5725 FOR_EACH_EDGE (e, ei, bb->succs)
5726 e->dest->aux = (void *)0;
5727 }
5728 break;
5729
5730 case GIMPLE_EH_DISPATCH:
5731 err |= verify_eh_dispatch_edge (as_a <geh_dispatch *> (stmt));
5732 break;
5733
5734 default:
5735 break;
5736 }
5737 }
5738
5739 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
5740 verify_dominators (CDI_DOMINATORS);
5741
5742 return err;
5743 }
5744
5745 #if __GNUC__ >= 10
5746 # pragma GCC diagnostic pop
5747 #endif
5748
5749 /* Updates phi nodes after creating a forwarder block joined
5750 by edge FALLTHRU. */
5751
5752 static void
5753 gimple_make_forwarder_block (edge fallthru)
5754 {
5755 edge e;
5756 edge_iterator ei;
5757 basic_block dummy, bb;
5758 tree var;
5759 gphi_iterator gsi;
5760 bool forward_location_p;
5761
5762 dummy = fallthru->src;
5763 bb = fallthru->dest;
5764
5765 if (single_pred_p (bb))
5766 return;
5767
5768 /* We can forward location info if we have only one predecessor. */
5769 forward_location_p = single_pred_p (dummy);
5770
5771 /* If we redirected a branch we must create new PHI nodes at the
5772 start of BB. */
5773 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
5774 {
5775 gphi *phi, *new_phi;
5776
5777 phi = gsi.phi ();
5778 var = gimple_phi_result (phi);
5779 new_phi = create_phi_node (var, bb);
5780 gimple_phi_set_result (phi, copy_ssa_name (var, phi));
5781 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
5782 forward_location_p
5783 ? gimple_phi_arg_location (phi, 0) : UNKNOWN_LOCATION);
5784 }
5785
5786 /* Add the arguments we have stored on edges. */
5787 FOR_EACH_EDGE (e, ei, bb->preds)
5788 {
5789 if (e == fallthru)
5790 continue;
5791
5792 flush_pending_stmts (e);
5793 }
5794 }
5795
5796
5797 /* Return a non-special label in the head of basic block BLOCK.
5798 Create one if it doesn't exist. */
5799
5800 tree
5801 gimple_block_label (basic_block bb)
5802 {
5803 gimple_stmt_iterator i, s = gsi_start_bb (bb);
5804 bool first = true;
5805 tree label;
5806 glabel *stmt;
5807
5808 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
5809 {
5810 stmt = dyn_cast <glabel *> (gsi_stmt (i));
5811 if (!stmt)
5812 break;
5813 label = gimple_label_label (stmt);
5814 if (!DECL_NONLOCAL (label))
5815 {
5816 if (!first)
5817 gsi_move_before (&i, &s);
5818 return label;
5819 }
5820 }
5821
5822 label = create_artificial_label (UNKNOWN_LOCATION);
5823 stmt = gimple_build_label (label);
5824 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
5825 return label;
5826 }
5827
5828
5829 /* Attempt to perform edge redirection by replacing a possibly complex
5830 jump instruction by a goto or by removing the jump completely.
5831 This can apply only if all edges now point to the same block. The
5832 parameters and return values are equivalent to
5833 redirect_edge_and_branch. */
5834
5835 static edge
5836 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
5837 {
5838 basic_block src = e->src;
5839 gimple_stmt_iterator i;
5840 gimple *stmt;
5841
5842 /* We can replace or remove a complex jump only when we have exactly
5843 two edges. */
5844 if (EDGE_COUNT (src->succs) != 2
5845 /* Verify that all targets will be TARGET. Specifically, the
5846 edge that is not E must also go to TARGET. */
5847 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
5848 return NULL;
5849
5850 i = gsi_last_bb (src);
5851 if (gsi_end_p (i))
5852 return NULL;
5853
5854 stmt = gsi_stmt (i);
5855
5856 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
5857 {
5858 gsi_remove (&i, true);
5859 e = ssa_redirect_edge (e, target);
5860 e->flags = EDGE_FALLTHRU;
5861 return e;
5862 }
5863
5864 return NULL;
5865 }
5866
5867
5868 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
5869 edge representing the redirected branch. */
5870
5871 static edge
5872 gimple_redirect_edge_and_branch (edge e, basic_block dest)
5873 {
5874 basic_block bb = e->src;
5875 gimple_stmt_iterator gsi;
5876 edge ret;
5877 gimple *stmt;
5878
5879 if (e->flags & EDGE_ABNORMAL)
5880 return NULL;
5881
5882 if (e->dest == dest)
5883 return NULL;
5884
5885 if (e->flags & EDGE_EH)
5886 return redirect_eh_edge (e, dest);
5887
5888 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
5889 {
5890 ret = gimple_try_redirect_by_replacing_jump (e, dest);
5891 if (ret)
5892 return ret;
5893 }
5894
5895 gsi = gsi_last_nondebug_bb (bb);
5896 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
5897
5898 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
5899 {
5900 case GIMPLE_COND:
5901 /* For COND_EXPR, we only need to redirect the edge. */
5902 break;
5903
5904 case GIMPLE_GOTO:
5905 /* No non-abnormal edges should lead from a non-simple goto, and
5906 simple ones should be represented implicitly. */
5907 gcc_unreachable ();
5908
5909 case GIMPLE_SWITCH:
5910 {
5911 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5912 tree label = gimple_block_label (dest);
5913 tree cases = get_cases_for_edge (e, switch_stmt);
5914
5915 /* If we have a list of cases associated with E, then use it
5916 as it's a lot faster than walking the entire case vector. */
5917 if (cases)
5918 {
5919 edge e2 = find_edge (e->src, dest);
5920 tree last, first;
5921
5922 first = cases;
5923 while (cases)
5924 {
5925 last = cases;
5926 CASE_LABEL (cases) = label;
5927 cases = CASE_CHAIN (cases);
5928 }
5929
5930 /* If there was already an edge in the CFG, then we need
5931 to move all the cases associated with E to E2. */
5932 if (e2)
5933 {
5934 tree cases2 = get_cases_for_edge (e2, switch_stmt);
5935
5936 CASE_CHAIN (last) = CASE_CHAIN (cases2);
5937 CASE_CHAIN (cases2) = first;
5938 }
5939 bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
5940 }
5941 else
5942 {
5943 size_t i, n = gimple_switch_num_labels (switch_stmt);
5944
5945 for (i = 0; i < n; i++)
5946 {
5947 tree elt = gimple_switch_label (switch_stmt, i);
5948 if (label_to_block (cfun, CASE_LABEL (elt)) == e->dest)
5949 CASE_LABEL (elt) = label;
5950 }
5951 }
5952 }
5953 break;
5954
5955 case GIMPLE_ASM:
5956 {
5957 gasm *asm_stmt = as_a <gasm *> (stmt);
5958 int i, n = gimple_asm_nlabels (asm_stmt);
5959 tree label = NULL;
5960
5961 for (i = 0; i < n; ++i)
5962 {
5963 tree cons = gimple_asm_label_op (asm_stmt, i);
5964 if (label_to_block (cfun, TREE_VALUE (cons)) == e->dest)
5965 {
5966 if (!label)
5967 label = gimple_block_label (dest);
5968 TREE_VALUE (cons) = label;
5969 }
5970 }
5971
5972 /* If we didn't find any label matching the former edge in the
5973 asm labels, we must be redirecting the fallthrough
5974 edge. */
5975 gcc_assert (label || (e->flags & EDGE_FALLTHRU));
5976 }
5977 break;
5978
5979 case GIMPLE_RETURN:
5980 gsi_remove (&gsi, true);
5981 e->flags |= EDGE_FALLTHRU;
5982 break;
5983
5984 case GIMPLE_OMP_RETURN:
5985 case GIMPLE_OMP_CONTINUE:
5986 case GIMPLE_OMP_SECTIONS_SWITCH:
5987 case GIMPLE_OMP_FOR:
5988 /* The edges from OMP constructs can be simply redirected. */
5989 break;
5990
5991 case GIMPLE_EH_DISPATCH:
5992 if (!(e->flags & EDGE_FALLTHRU))
5993 redirect_eh_dispatch_edge (as_a <geh_dispatch *> (stmt), e, dest);
5994 break;
5995
5996 case GIMPLE_TRANSACTION:
5997 if (e->flags & EDGE_TM_ABORT)
5998 gimple_transaction_set_label_over (as_a <gtransaction *> (stmt),
5999 gimple_block_label (dest));
6000 else if (e->flags & EDGE_TM_UNINSTRUMENTED)
6001 gimple_transaction_set_label_uninst (as_a <gtransaction *> (stmt),
6002 gimple_block_label (dest));
6003 else
6004 gimple_transaction_set_label_norm (as_a <gtransaction *> (stmt),
6005 gimple_block_label (dest));
6006 break;
6007
6008 default:
6009 /* Otherwise it must be a fallthru edge, and we don't need to
6010 do anything besides redirecting it. */
6011 gcc_assert (e->flags & EDGE_FALLTHRU);
6012 break;
6013 }
6014
6015 /* Update/insert PHI nodes as necessary. */
6016
6017 /* Now update the edges in the CFG. */
6018 e = ssa_redirect_edge (e, dest);
6019
6020 return e;
6021 }
6022
6023 /* Returns true if it is possible to remove edge E by redirecting
6024 it to the destination of the other edge from E->src. */
6025
6026 static bool
6027 gimple_can_remove_branch_p (const_edge e)
6028 {
6029 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
6030 return false;
6031
6032 return true;
6033 }
6034
6035 /* Simple wrapper, as we can always redirect fallthru edges. */
6036
6037 static basic_block
6038 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
6039 {
6040 e = gimple_redirect_edge_and_branch (e, dest);
6041 gcc_assert (e);
6042
6043 return NULL;
6044 }
6045
6046
6047 /* Splits basic block BB after statement STMT (but at least after the
6048 labels). If STMT is NULL, BB is split just after the labels. */
6049
6050 static basic_block
6051 gimple_split_block (basic_block bb, void *stmt)
6052 {
6053 gimple_stmt_iterator gsi;
6054 gimple_stmt_iterator gsi_tgt;
6055 gimple_seq list;
6056 basic_block new_bb;
6057 edge e;
6058 edge_iterator ei;
6059
6060 new_bb = create_empty_bb (bb);
6061
6062 /* Redirect the outgoing edges. */
6063 new_bb->succs = bb->succs;
6064 bb->succs = NULL;
6065 FOR_EACH_EDGE (e, ei, new_bb->succs)
6066 e->src = new_bb;
6067
6068 /* Get a stmt iterator pointing to the first stmt to move. */
6069 if (!stmt || gimple_code ((gimple *) stmt) == GIMPLE_LABEL)
6070 gsi = gsi_after_labels (bb);
6071 else
6072 {
6073 gsi = gsi_for_stmt ((gimple *) stmt);
6074 gsi_next (&gsi);
6075 }
6076
6077 /* Move everything from GSI to the new basic block. */
6078 if (gsi_end_p (gsi))
6079 return new_bb;
6080
6081 /* Split the statement list - avoid re-creating new containers as this
6082 brings ugly quadratic memory consumption in the inliner.
6083 (We are still quadratic since we need to update stmt BB pointers,
6084 sadly.) */
6085 gsi_split_seq_before (&gsi, &list);
6086 set_bb_seq (new_bb, list);
6087 for (gsi_tgt = gsi_start (list);
6088 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
6089 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
6090
6091 return new_bb;
6092 }
6093
6094
6095 /* Moves basic block BB after block AFTER. */
6096
6097 static bool
6098 gimple_move_block_after (basic_block bb, basic_block after)
6099 {
6100 if (bb->prev_bb == after)
6101 return true;
6102
6103 unlink_block (bb);
6104 link_block (bb, after);
6105
6106 return true;
6107 }
6108
6109
6110 /* Return TRUE if block BB has no executable statements, otherwise return
6111 FALSE. */
6112
6113 static bool
6114 gimple_empty_block_p (basic_block bb)
6115 {
6116 /* BB must have no executable statements. */
6117 gimple_stmt_iterator gsi = gsi_after_labels (bb);
6118 if (phi_nodes (bb))
6119 return false;
6120 while (!gsi_end_p (gsi))
6121 {
6122 gimple *stmt = gsi_stmt (gsi);
6123 if (is_gimple_debug (stmt))
6124 ;
6125 else if (gimple_code (stmt) == GIMPLE_NOP
6126 || gimple_code (stmt) == GIMPLE_PREDICT)
6127 ;
6128 else
6129 return false;
6130 gsi_next (&gsi);
6131 }
6132 return true;
6133 }
6134
6135
6136 /* Split a basic block if it ends with a conditional branch and if the
6137 other part of the block is not empty. */
6138
6139 static basic_block
6140 gimple_split_block_before_cond_jump (basic_block bb)
6141 {
6142 gimple *last, *split_point;
6143 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
6144 if (gsi_end_p (gsi))
6145 return NULL;
6146 last = gsi_stmt (gsi);
6147 if (gimple_code (last) != GIMPLE_COND
6148 && gimple_code (last) != GIMPLE_SWITCH)
6149 return NULL;
6150 gsi_prev (&gsi);
6151 split_point = gsi_stmt (gsi);
6152 return split_block (bb, split_point)->dest;
6153 }
6154
6155
6156 /* Return true if basic_block can be duplicated. */
6157
6158 static bool
6159 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
6160 {
6161 return true;
6162 }
6163
6164 /* Create a duplicate of the basic block BB. NOTE: This does not
6165 preserve SSA form. */
6166
6167 static basic_block
6168 gimple_duplicate_bb (basic_block bb, copy_bb_data *id)
6169 {
6170 basic_block new_bb;
6171 gimple_stmt_iterator gsi_tgt;
6172
6173 new_bb = create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
6174
6175 /* Copy the PHI nodes. We ignore PHI node arguments here because
6176 the incoming edges have not been setup yet. */
6177 for (gphi_iterator gpi = gsi_start_phis (bb);
6178 !gsi_end_p (gpi);
6179 gsi_next (&gpi))
6180 {
6181 gphi *phi, *copy;
6182 phi = gpi.phi ();
6183 copy = create_phi_node (NULL_TREE, new_bb);
6184 create_new_def_for (gimple_phi_result (phi), copy,
6185 gimple_phi_result_ptr (copy));
6186 gimple_set_uid (copy, gimple_uid (phi));
6187 }
6188
6189 gsi_tgt = gsi_start_bb (new_bb);
6190 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
6191 !gsi_end_p (gsi);
6192 gsi_next (&gsi))
6193 {
6194 def_operand_p def_p;
6195 ssa_op_iter op_iter;
6196 tree lhs;
6197 gimple *stmt, *copy;
6198
6199 stmt = gsi_stmt (gsi);
6200 if (gimple_code (stmt) == GIMPLE_LABEL)
6201 continue;
6202
6203 /* Don't duplicate label debug stmts. */
6204 if (gimple_debug_bind_p (stmt)
6205 && TREE_CODE (gimple_debug_bind_get_var (stmt))
6206 == LABEL_DECL)
6207 continue;
6208
6209 /* Create a new copy of STMT and duplicate STMT's virtual
6210 operands. */
6211 copy = gimple_copy (stmt);
6212 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
6213
6214 maybe_duplicate_eh_stmt (copy, stmt);
6215 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
6216
6217 /* When copying around a stmt writing into a local non-user
6218 aggregate, make sure it won't share stack slot with other
6219 vars. */
6220 lhs = gimple_get_lhs (stmt);
6221 if (lhs && TREE_CODE (lhs) != SSA_NAME)
6222 {
6223 tree base = get_base_address (lhs);
6224 if (base
6225 && (VAR_P (base) || TREE_CODE (base) == RESULT_DECL)
6226 && DECL_IGNORED_P (base)
6227 && !TREE_STATIC (base)
6228 && !DECL_EXTERNAL (base)
6229 && (!VAR_P (base) || !DECL_HAS_VALUE_EXPR_P (base)))
6230 DECL_NONSHAREABLE (base) = 1;
6231 }
6232
6233 /* If requested remap dependence info of cliques brought in
6234 via inlining. */
6235 if (id)
6236 for (unsigned i = 0; i < gimple_num_ops (copy); ++i)
6237 {
6238 tree op = gimple_op (copy, i);
6239 if (!op)
6240 continue;
6241 if (TREE_CODE (op) == ADDR_EXPR
6242 || TREE_CODE (op) == WITH_SIZE_EXPR)
6243 op = TREE_OPERAND (op, 0);
6244 while (handled_component_p (op))
6245 op = TREE_OPERAND (op, 0);
6246 if ((TREE_CODE (op) == MEM_REF
6247 || TREE_CODE (op) == TARGET_MEM_REF)
6248 && MR_DEPENDENCE_CLIQUE (op) > 1
6249 && MR_DEPENDENCE_CLIQUE (op) != bb->loop_father->owned_clique)
6250 {
6251 if (!id->dependence_map)
6252 id->dependence_map = new hash_map<dependence_hash,
6253 unsigned short>;
6254 bool existed;
6255 unsigned short &newc = id->dependence_map->get_or_insert
6256 (MR_DEPENDENCE_CLIQUE (op), &existed);
6257 if (!existed)
6258 {
6259 gcc_assert (MR_DEPENDENCE_CLIQUE (op) <= cfun->last_clique);
6260 newc = ++cfun->last_clique;
6261 }
6262 MR_DEPENDENCE_CLIQUE (op) = newc;
6263 }
6264 }
6265
6266 /* Create new names for all the definitions created by COPY and
6267 add replacement mappings for each new name. */
6268 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
6269 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
6270 }
6271
6272 return new_bb;
6273 }
6274
6275 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
6276
6277 static void
6278 add_phi_args_after_copy_edge (edge e_copy)
6279 {
6280 basic_block bb, bb_copy = e_copy->src, dest;
6281 edge e;
6282 edge_iterator ei;
6283 gphi *phi, *phi_copy;
6284 tree def;
6285 gphi_iterator psi, psi_copy;
6286
6287 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
6288 return;
6289
6290 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
6291
6292 if (e_copy->dest->flags & BB_DUPLICATED)
6293 dest = get_bb_original (e_copy->dest);
6294 else
6295 dest = e_copy->dest;
6296
6297 e = find_edge (bb, dest);
6298 if (!e)
6299 {
6300 /* During loop unrolling the target of the latch edge is copied.
6301 In this case we are not looking for edge to dest, but to
6302 duplicated block whose original was dest. */
6303 FOR_EACH_EDGE (e, ei, bb->succs)
6304 {
6305 if ((e->dest->flags & BB_DUPLICATED)
6306 && get_bb_original (e->dest) == dest)
6307 break;
6308 }
6309
6310 gcc_assert (e != NULL);
6311 }
6312
6313 for (psi = gsi_start_phis (e->dest),
6314 psi_copy = gsi_start_phis (e_copy->dest);
6315 !gsi_end_p (psi);
6316 gsi_next (&psi), gsi_next (&psi_copy))
6317 {
6318 phi = psi.phi ();
6319 phi_copy = psi_copy.phi ();
6320 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
6321 add_phi_arg (phi_copy, def, e_copy,
6322 gimple_phi_arg_location_from_edge (phi, e));
6323 }
6324 }
6325
6326
6327 /* Basic block BB_COPY was created by code duplication. Add phi node
6328 arguments for edges going out of BB_COPY. The blocks that were
6329 duplicated have BB_DUPLICATED set. */
6330
6331 void
6332 add_phi_args_after_copy_bb (basic_block bb_copy)
6333 {
6334 edge e_copy;
6335 edge_iterator ei;
6336
6337 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
6338 {
6339 add_phi_args_after_copy_edge (e_copy);
6340 }
6341 }
6342
6343 /* Blocks in REGION_COPY array of length N_REGION were created by
6344 duplication of basic blocks. Add phi node arguments for edges
6345 going from these blocks. If E_COPY is not NULL, also add
6346 phi node arguments for its destination.*/
6347
6348 void
6349 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
6350 edge e_copy)
6351 {
6352 unsigned i;
6353
6354 for (i = 0; i < n_region; i++)
6355 region_copy[i]->flags |= BB_DUPLICATED;
6356
6357 for (i = 0; i < n_region; i++)
6358 add_phi_args_after_copy_bb (region_copy[i]);
6359 if (e_copy)
6360 add_phi_args_after_copy_edge (e_copy);
6361
6362 for (i = 0; i < n_region; i++)
6363 region_copy[i]->flags &= ~BB_DUPLICATED;
6364 }
6365
6366 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
6367 important exit edge EXIT. By important we mean that no SSA name defined
6368 inside region is live over the other exit edges of the region. All entry
6369 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
6370 to the duplicate of the region. Dominance and loop information is
6371 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
6372 UPDATE_DOMINANCE is false then we assume that the caller will update the
6373 dominance information after calling this function. The new basic
6374 blocks are stored to REGION_COPY in the same order as they had in REGION,
6375 provided that REGION_COPY is not NULL.
6376 The function returns false if it is unable to copy the region,
6377 true otherwise. */
6378
6379 bool
6380 gimple_duplicate_sese_region (edge entry, edge exit,
6381 basic_block *region, unsigned n_region,
6382 basic_block *region_copy,
6383 bool update_dominance)
6384 {
6385 unsigned i;
6386 bool free_region_copy = false, copying_header = false;
6387 class loop *loop = entry->dest->loop_father;
6388 edge exit_copy;
6389 vec<basic_block> doms = vNULL;
6390 edge redirected;
6391 profile_count total_count = profile_count::uninitialized ();
6392 profile_count entry_count = profile_count::uninitialized ();
6393
6394 if (!can_copy_bbs_p (region, n_region))
6395 return false;
6396
6397 /* Some sanity checking. Note that we do not check for all possible
6398 missuses of the functions. I.e. if you ask to copy something weird,
6399 it will work, but the state of structures probably will not be
6400 correct. */
6401 for (i = 0; i < n_region; i++)
6402 {
6403 /* We do not handle subloops, i.e. all the blocks must belong to the
6404 same loop. */
6405 if (region[i]->loop_father != loop)
6406 return false;
6407
6408 if (region[i] != entry->dest
6409 && region[i] == loop->header)
6410 return false;
6411 }
6412
6413 /* In case the function is used for loop header copying (which is the primary
6414 use), ensure that EXIT and its copy will be new latch and entry edges. */
6415 if (loop->header == entry->dest)
6416 {
6417 copying_header = true;
6418
6419 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
6420 return false;
6421
6422 for (i = 0; i < n_region; i++)
6423 if (region[i] != exit->src
6424 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
6425 return false;
6426 }
6427
6428 initialize_original_copy_tables ();
6429
6430 if (copying_header)
6431 set_loop_copy (loop, loop_outer (loop));
6432 else
6433 set_loop_copy (loop, loop);
6434
6435 if (!region_copy)
6436 {
6437 region_copy = XNEWVEC (basic_block, n_region);
6438 free_region_copy = true;
6439 }
6440
6441 /* Record blocks outside the region that are dominated by something
6442 inside. */
6443 if (update_dominance)
6444 {
6445 doms.create (0);
6446 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6447 }
6448
6449 if (entry->dest->count.initialized_p ())
6450 {
6451 total_count = entry->dest->count;
6452 entry_count = entry->count ();
6453 /* Fix up corner cases, to avoid division by zero or creation of negative
6454 frequencies. */
6455 if (entry_count > total_count)
6456 entry_count = total_count;
6457 }
6458
6459 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
6460 split_edge_bb_loc (entry), update_dominance);
6461 if (total_count.initialized_p () && entry_count.initialized_p ())
6462 {
6463 scale_bbs_frequencies_profile_count (region, n_region,
6464 total_count - entry_count,
6465 total_count);
6466 scale_bbs_frequencies_profile_count (region_copy, n_region, entry_count,
6467 total_count);
6468 }
6469
6470 if (copying_header)
6471 {
6472 loop->header = exit->dest;
6473 loop->latch = exit->src;
6474 }
6475
6476 /* Redirect the entry and add the phi node arguments. */
6477 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
6478 gcc_assert (redirected != NULL);
6479 flush_pending_stmts (entry);
6480
6481 /* Concerning updating of dominators: We must recount dominators
6482 for entry block and its copy. Anything that is outside of the
6483 region, but was dominated by something inside needs recounting as
6484 well. */
6485 if (update_dominance)
6486 {
6487 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
6488 doms.safe_push (get_bb_original (entry->dest));
6489 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6490 doms.release ();
6491 }
6492
6493 /* Add the other PHI node arguments. */
6494 add_phi_args_after_copy (region_copy, n_region, NULL);
6495
6496 if (free_region_copy)
6497 free (region_copy);
6498
6499 free_original_copy_tables ();
6500 return true;
6501 }
6502
6503 /* Checks if BB is part of the region defined by N_REGION BBS. */
6504 static bool
6505 bb_part_of_region_p (basic_block bb, basic_block* bbs, unsigned n_region)
6506 {
6507 unsigned int n;
6508
6509 for (n = 0; n < n_region; n++)
6510 {
6511 if (bb == bbs[n])
6512 return true;
6513 }
6514 return false;
6515 }
6516
6517 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
6518 are stored to REGION_COPY in the same order in that they appear
6519 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
6520 the region, EXIT an exit from it. The condition guarding EXIT
6521 is moved to ENTRY. Returns true if duplication succeeds, false
6522 otherwise.
6523
6524 For example,
6525
6526 some_code;
6527 if (cond)
6528 A;
6529 else
6530 B;
6531
6532 is transformed to
6533
6534 if (cond)
6535 {
6536 some_code;
6537 A;
6538 }
6539 else
6540 {
6541 some_code;
6542 B;
6543 }
6544 */
6545
6546 bool
6547 gimple_duplicate_sese_tail (edge entry, edge exit,
6548 basic_block *region, unsigned n_region,
6549 basic_block *region_copy)
6550 {
6551 unsigned i;
6552 bool free_region_copy = false;
6553 class loop *loop = exit->dest->loop_father;
6554 class loop *orig_loop = entry->dest->loop_father;
6555 basic_block switch_bb, entry_bb, nentry_bb;
6556 vec<basic_block> doms;
6557 profile_count total_count = profile_count::uninitialized (),
6558 exit_count = profile_count::uninitialized ();
6559 edge exits[2], nexits[2], e;
6560 gimple_stmt_iterator gsi;
6561 gimple *cond_stmt;
6562 edge sorig, snew;
6563 basic_block exit_bb;
6564 gphi_iterator psi;
6565 gphi *phi;
6566 tree def;
6567 class loop *target, *aloop, *cloop;
6568
6569 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
6570 exits[0] = exit;
6571 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
6572
6573 if (!can_copy_bbs_p (region, n_region))
6574 return false;
6575
6576 initialize_original_copy_tables ();
6577 set_loop_copy (orig_loop, loop);
6578
6579 target= loop;
6580 for (aloop = orig_loop->inner; aloop; aloop = aloop->next)
6581 {
6582 if (bb_part_of_region_p (aloop->header, region, n_region))
6583 {
6584 cloop = duplicate_loop (aloop, target);
6585 duplicate_subloops (aloop, cloop);
6586 }
6587 }
6588
6589 if (!region_copy)
6590 {
6591 region_copy = XNEWVEC (basic_block, n_region);
6592 free_region_copy = true;
6593 }
6594
6595 gcc_assert (!need_ssa_update_p (cfun));
6596
6597 /* Record blocks outside the region that are dominated by something
6598 inside. */
6599 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6600
6601 total_count = exit->src->count;
6602 exit_count = exit->count ();
6603 /* Fix up corner cases, to avoid division by zero or creation of negative
6604 frequencies. */
6605 if (exit_count > total_count)
6606 exit_count = total_count;
6607
6608 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
6609 split_edge_bb_loc (exit), true);
6610 if (total_count.initialized_p () && exit_count.initialized_p ())
6611 {
6612 scale_bbs_frequencies_profile_count (region, n_region,
6613 total_count - exit_count,
6614 total_count);
6615 scale_bbs_frequencies_profile_count (region_copy, n_region, exit_count,
6616 total_count);
6617 }
6618
6619 /* Create the switch block, and put the exit condition to it. */
6620 entry_bb = entry->dest;
6621 nentry_bb = get_bb_copy (entry_bb);
6622 if (!last_stmt (entry->src)
6623 || !stmt_ends_bb_p (last_stmt (entry->src)))
6624 switch_bb = entry->src;
6625 else
6626 switch_bb = split_edge (entry);
6627 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
6628
6629 gsi = gsi_last_bb (switch_bb);
6630 cond_stmt = last_stmt (exit->src);
6631 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
6632 cond_stmt = gimple_copy (cond_stmt);
6633
6634 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
6635
6636 sorig = single_succ_edge (switch_bb);
6637 sorig->flags = exits[1]->flags;
6638 sorig->probability = exits[1]->probability;
6639 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
6640 snew->probability = exits[0]->probability;
6641
6642
6643 /* Register the new edge from SWITCH_BB in loop exit lists. */
6644 rescan_loop_exit (snew, true, false);
6645
6646 /* Add the PHI node arguments. */
6647 add_phi_args_after_copy (region_copy, n_region, snew);
6648
6649 /* Get rid of now superfluous conditions and associated edges (and phi node
6650 arguments). */
6651 exit_bb = exit->dest;
6652
6653 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
6654 PENDING_STMT (e) = NULL;
6655
6656 /* The latch of ORIG_LOOP was copied, and so was the backedge
6657 to the original header. We redirect this backedge to EXIT_BB. */
6658 for (i = 0; i < n_region; i++)
6659 if (get_bb_original (region_copy[i]) == orig_loop->latch)
6660 {
6661 gcc_assert (single_succ_edge (region_copy[i]));
6662 e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
6663 PENDING_STMT (e) = NULL;
6664 for (psi = gsi_start_phis (exit_bb);
6665 !gsi_end_p (psi);
6666 gsi_next (&psi))
6667 {
6668 phi = psi.phi ();
6669 def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
6670 add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
6671 }
6672 }
6673 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
6674 PENDING_STMT (e) = NULL;
6675
6676 /* Anything that is outside of the region, but was dominated by something
6677 inside needs to update dominance info. */
6678 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6679 doms.release ();
6680 /* Update the SSA web. */
6681 update_ssa (TODO_update_ssa);
6682
6683 if (free_region_copy)
6684 free (region_copy);
6685
6686 free_original_copy_tables ();
6687 return true;
6688 }
6689
6690 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
6691 adding blocks when the dominator traversal reaches EXIT. This
6692 function silently assumes that ENTRY strictly dominates EXIT. */
6693
6694 void
6695 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
6696 vec<basic_block> *bbs_p)
6697 {
6698 basic_block son;
6699
6700 for (son = first_dom_son (CDI_DOMINATORS, entry);
6701 son;
6702 son = next_dom_son (CDI_DOMINATORS, son))
6703 {
6704 bbs_p->safe_push (son);
6705 if (son != exit)
6706 gather_blocks_in_sese_region (son, exit, bbs_p);
6707 }
6708 }
6709
6710 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
6711 The duplicates are recorded in VARS_MAP. */
6712
6713 static void
6714 replace_by_duplicate_decl (tree *tp, hash_map<tree, tree> *vars_map,
6715 tree to_context)
6716 {
6717 tree t = *tp, new_t;
6718 struct function *f = DECL_STRUCT_FUNCTION (to_context);
6719
6720 if (DECL_CONTEXT (t) == to_context)
6721 return;
6722
6723 bool existed;
6724 tree &loc = vars_map->get_or_insert (t, &existed);
6725
6726 if (!existed)
6727 {
6728 if (SSA_VAR_P (t))
6729 {
6730 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
6731 add_local_decl (f, new_t);
6732 }
6733 else
6734 {
6735 gcc_assert (TREE_CODE (t) == CONST_DECL);
6736 new_t = copy_node (t);
6737 }
6738 DECL_CONTEXT (new_t) = to_context;
6739
6740 loc = new_t;
6741 }
6742 else
6743 new_t = loc;
6744
6745 *tp = new_t;
6746 }
6747
6748
6749 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6750 VARS_MAP maps old ssa names and var_decls to the new ones. */
6751
6752 static tree
6753 replace_ssa_name (tree name, hash_map<tree, tree> *vars_map,
6754 tree to_context)
6755 {
6756 tree new_name;
6757
6758 gcc_assert (!virtual_operand_p (name));
6759
6760 tree *loc = vars_map->get (name);
6761
6762 if (!loc)
6763 {
6764 tree decl = SSA_NAME_VAR (name);
6765 if (decl)
6766 {
6767 gcc_assert (!SSA_NAME_IS_DEFAULT_DEF (name));
6768 replace_by_duplicate_decl (&decl, vars_map, to_context);
6769 new_name = make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6770 decl, SSA_NAME_DEF_STMT (name));
6771 }
6772 else
6773 new_name = copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6774 name, SSA_NAME_DEF_STMT (name));
6775
6776 /* Now that we've used the def stmt to define new_name, make sure it
6777 doesn't define name anymore. */
6778 SSA_NAME_DEF_STMT (name) = NULL;
6779
6780 vars_map->put (name, new_name);
6781 }
6782 else
6783 new_name = *loc;
6784
6785 return new_name;
6786 }
6787
6788 struct move_stmt_d
6789 {
6790 tree orig_block;
6791 tree new_block;
6792 tree from_context;
6793 tree to_context;
6794 hash_map<tree, tree> *vars_map;
6795 htab_t new_label_map;
6796 hash_map<void *, void *> *eh_map;
6797 bool remap_decls_p;
6798 };
6799
6800 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
6801 contained in *TP if it has been ORIG_BLOCK previously and change the
6802 DECL_CONTEXT of every local variable referenced in *TP. */
6803
6804 static tree
6805 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
6806 {
6807 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
6808 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6809 tree t = *tp;
6810
6811 if (EXPR_P (t))
6812 {
6813 tree block = TREE_BLOCK (t);
6814 if (block == NULL_TREE)
6815 ;
6816 else if (block == p->orig_block
6817 || p->orig_block == NULL_TREE)
6818 {
6819 /* tree_node_can_be_shared says we can share invariant
6820 addresses but unshare_expr copies them anyways. Make sure
6821 to unshare before adjusting the block in place - we do not
6822 always see a copy here. */
6823 if (TREE_CODE (t) == ADDR_EXPR
6824 && is_gimple_min_invariant (t))
6825 *tp = t = unshare_expr (t);
6826 TREE_SET_BLOCK (t, p->new_block);
6827 }
6828 else if (flag_checking)
6829 {
6830 while (block && TREE_CODE (block) == BLOCK && block != p->orig_block)
6831 block = BLOCK_SUPERCONTEXT (block);
6832 gcc_assert (block == p->orig_block);
6833 }
6834 }
6835 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
6836 {
6837 if (TREE_CODE (t) == SSA_NAME)
6838 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
6839 else if (TREE_CODE (t) == PARM_DECL
6840 && gimple_in_ssa_p (cfun))
6841 *tp = *(p->vars_map->get (t));
6842 else if (TREE_CODE (t) == LABEL_DECL)
6843 {
6844 if (p->new_label_map)
6845 {
6846 struct tree_map in, *out;
6847 in.base.from = t;
6848 out = (struct tree_map *)
6849 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
6850 if (out)
6851 *tp = t = out->to;
6852 }
6853
6854 /* For FORCED_LABELs we can end up with references from other
6855 functions if some SESE regions are outlined. It is UB to
6856 jump in between them, but they could be used just for printing
6857 addresses etc. In that case, DECL_CONTEXT on the label should
6858 be the function containing the glabel stmt with that LABEL_DECL,
6859 rather than whatever function a reference to the label was seen
6860 last time. */
6861 if (!FORCED_LABEL (t) && !DECL_NONLOCAL (t))
6862 DECL_CONTEXT (t) = p->to_context;
6863 }
6864 else if (p->remap_decls_p)
6865 {
6866 /* Replace T with its duplicate. T should no longer appear in the
6867 parent function, so this looks wasteful; however, it may appear
6868 in referenced_vars, and more importantly, as virtual operands of
6869 statements, and in alias lists of other variables. It would be
6870 quite difficult to expunge it from all those places. ??? It might
6871 suffice to do this for addressable variables. */
6872 if ((VAR_P (t) && !is_global_var (t))
6873 || TREE_CODE (t) == CONST_DECL)
6874 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
6875 }
6876 *walk_subtrees = 0;
6877 }
6878 else if (TYPE_P (t))
6879 *walk_subtrees = 0;
6880
6881 return NULL_TREE;
6882 }
6883
6884 /* Helper for move_stmt_r. Given an EH region number for the source
6885 function, map that to the duplicate EH regio number in the dest. */
6886
6887 static int
6888 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
6889 {
6890 eh_region old_r, new_r;
6891
6892 old_r = get_eh_region_from_number (old_nr);
6893 new_r = static_cast<eh_region> (*p->eh_map->get (old_r));
6894
6895 return new_r->index;
6896 }
6897
6898 /* Similar, but operate on INTEGER_CSTs. */
6899
6900 static tree
6901 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
6902 {
6903 int old_nr, new_nr;
6904
6905 old_nr = tree_to_shwi (old_t_nr);
6906 new_nr = move_stmt_eh_region_nr (old_nr, p);
6907
6908 return build_int_cst (integer_type_node, new_nr);
6909 }
6910
6911 /* Like move_stmt_op, but for gimple statements.
6912
6913 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
6914 contained in the current statement in *GSI_P and change the
6915 DECL_CONTEXT of every local variable referenced in the current
6916 statement. */
6917
6918 static tree
6919 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
6920 struct walk_stmt_info *wi)
6921 {
6922 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6923 gimple *stmt = gsi_stmt (*gsi_p);
6924 tree block = gimple_block (stmt);
6925
6926 if (block == p->orig_block
6927 || (p->orig_block == NULL_TREE
6928 && block != NULL_TREE))
6929 gimple_set_block (stmt, p->new_block);
6930
6931 switch (gimple_code (stmt))
6932 {
6933 case GIMPLE_CALL:
6934 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
6935 {
6936 tree r, fndecl = gimple_call_fndecl (stmt);
6937 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
6938 switch (DECL_FUNCTION_CODE (fndecl))
6939 {
6940 case BUILT_IN_EH_COPY_VALUES:
6941 r = gimple_call_arg (stmt, 1);
6942 r = move_stmt_eh_region_tree_nr (r, p);
6943 gimple_call_set_arg (stmt, 1, r);
6944 /* FALLTHRU */
6945
6946 case BUILT_IN_EH_POINTER:
6947 case BUILT_IN_EH_FILTER:
6948 r = gimple_call_arg (stmt, 0);
6949 r = move_stmt_eh_region_tree_nr (r, p);
6950 gimple_call_set_arg (stmt, 0, r);
6951 break;
6952
6953 default:
6954 break;
6955 }
6956 }
6957 break;
6958
6959 case GIMPLE_RESX:
6960 {
6961 gresx *resx_stmt = as_a <gresx *> (stmt);
6962 int r = gimple_resx_region (resx_stmt);
6963 r = move_stmt_eh_region_nr (r, p);
6964 gimple_resx_set_region (resx_stmt, r);
6965 }
6966 break;
6967
6968 case GIMPLE_EH_DISPATCH:
6969 {
6970 geh_dispatch *eh_dispatch_stmt = as_a <geh_dispatch *> (stmt);
6971 int r = gimple_eh_dispatch_region (eh_dispatch_stmt);
6972 r = move_stmt_eh_region_nr (r, p);
6973 gimple_eh_dispatch_set_region (eh_dispatch_stmt, r);
6974 }
6975 break;
6976
6977 case GIMPLE_OMP_RETURN:
6978 case GIMPLE_OMP_CONTINUE:
6979 break;
6980
6981 case GIMPLE_LABEL:
6982 {
6983 /* For FORCED_LABEL, move_stmt_op doesn't adjust DECL_CONTEXT,
6984 so that such labels can be referenced from other regions.
6985 Make sure to update it when seeing a GIMPLE_LABEL though,
6986 that is the owner of the label. */
6987 walk_gimple_op (stmt, move_stmt_op, wi);
6988 *handled_ops_p = true;
6989 tree label = gimple_label_label (as_a <glabel *> (stmt));
6990 if (FORCED_LABEL (label) || DECL_NONLOCAL (label))
6991 DECL_CONTEXT (label) = p->to_context;
6992 }
6993 break;
6994
6995 default:
6996 if (is_gimple_omp (stmt))
6997 {
6998 /* Do not remap variables inside OMP directives. Variables
6999 referenced in clauses and directive header belong to the
7000 parent function and should not be moved into the child
7001 function. */
7002 bool save_remap_decls_p = p->remap_decls_p;
7003 p->remap_decls_p = false;
7004 *handled_ops_p = true;
7005
7006 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), move_stmt_r,
7007 move_stmt_op, wi);
7008
7009 p->remap_decls_p = save_remap_decls_p;
7010 }
7011 break;
7012 }
7013
7014 return NULL_TREE;
7015 }
7016
7017 /* Move basic block BB from function CFUN to function DEST_FN. The
7018 block is moved out of the original linked list and placed after
7019 block AFTER in the new list. Also, the block is removed from the
7020 original array of blocks and placed in DEST_FN's array of blocks.
7021 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
7022 updated to reflect the moved edges.
7023
7024 The local variables are remapped to new instances, VARS_MAP is used
7025 to record the mapping. */
7026
7027 static void
7028 move_block_to_fn (struct function *dest_cfun, basic_block bb,
7029 basic_block after, bool update_edge_count_p,
7030 struct move_stmt_d *d)
7031 {
7032 struct control_flow_graph *cfg;
7033 edge_iterator ei;
7034 edge e;
7035 gimple_stmt_iterator si;
7036 unsigned old_len, new_len;
7037
7038 /* Remove BB from dominance structures. */
7039 delete_from_dominance_info (CDI_DOMINATORS, bb);
7040
7041 /* Move BB from its current loop to the copy in the new function. */
7042 if (current_loops)
7043 {
7044 class loop *new_loop = (class loop *)bb->loop_father->aux;
7045 if (new_loop)
7046 bb->loop_father = new_loop;
7047 }
7048
7049 /* Link BB to the new linked list. */
7050 move_block_after (bb, after);
7051
7052 /* Update the edge count in the corresponding flowgraphs. */
7053 if (update_edge_count_p)
7054 FOR_EACH_EDGE (e, ei, bb->succs)
7055 {
7056 cfun->cfg->x_n_edges--;
7057 dest_cfun->cfg->x_n_edges++;
7058 }
7059
7060 /* Remove BB from the original basic block array. */
7061 (*cfun->cfg->x_basic_block_info)[bb->index] = NULL;
7062 cfun->cfg->x_n_basic_blocks--;
7063
7064 /* Grow DEST_CFUN's basic block array if needed. */
7065 cfg = dest_cfun->cfg;
7066 cfg->x_n_basic_blocks++;
7067 if (bb->index >= cfg->x_last_basic_block)
7068 cfg->x_last_basic_block = bb->index + 1;
7069
7070 old_len = vec_safe_length (cfg->x_basic_block_info);
7071 if ((unsigned) cfg->x_last_basic_block >= old_len)
7072 {
7073 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
7074 vec_safe_grow_cleared (cfg->x_basic_block_info, new_len);
7075 }
7076
7077 (*cfg->x_basic_block_info)[bb->index] = bb;
7078
7079 /* Remap the variables in phi nodes. */
7080 for (gphi_iterator psi = gsi_start_phis (bb);
7081 !gsi_end_p (psi); )
7082 {
7083 gphi *phi = psi.phi ();
7084 use_operand_p use;
7085 tree op = PHI_RESULT (phi);
7086 ssa_op_iter oi;
7087 unsigned i;
7088
7089 if (virtual_operand_p (op))
7090 {
7091 /* Remove the phi nodes for virtual operands (alias analysis will be
7092 run for the new function, anyway). But replace all uses that
7093 might be outside of the region we move. */
7094 use_operand_p use_p;
7095 imm_use_iterator iter;
7096 gimple *use_stmt;
7097 FOR_EACH_IMM_USE_STMT (use_stmt, iter, op)
7098 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
7099 SET_USE (use_p, SSA_NAME_VAR (op));
7100 remove_phi_node (&psi, true);
7101 continue;
7102 }
7103
7104 SET_PHI_RESULT (phi,
7105 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
7106 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
7107 {
7108 op = USE_FROM_PTR (use);
7109 if (TREE_CODE (op) == SSA_NAME)
7110 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
7111 }
7112
7113 for (i = 0; i < EDGE_COUNT (bb->preds); i++)
7114 {
7115 location_t locus = gimple_phi_arg_location (phi, i);
7116 tree block = LOCATION_BLOCK (locus);
7117
7118 if (locus == UNKNOWN_LOCATION)
7119 continue;
7120 if (d->orig_block == NULL_TREE || block == d->orig_block)
7121 {
7122 locus = set_block (locus, d->new_block);
7123 gimple_phi_arg_set_location (phi, i, locus);
7124 }
7125 }
7126
7127 gsi_next (&psi);
7128 }
7129
7130 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
7131 {
7132 gimple *stmt = gsi_stmt (si);
7133 struct walk_stmt_info wi;
7134
7135 memset (&wi, 0, sizeof (wi));
7136 wi.info = d;
7137 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
7138
7139 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
7140 {
7141 tree label = gimple_label_label (label_stmt);
7142 int uid = LABEL_DECL_UID (label);
7143
7144 gcc_assert (uid > -1);
7145
7146 old_len = vec_safe_length (cfg->x_label_to_block_map);
7147 if (old_len <= (unsigned) uid)
7148 {
7149 new_len = 3 * uid / 2 + 1;
7150 vec_safe_grow_cleared (cfg->x_label_to_block_map, new_len);
7151 }
7152
7153 (*cfg->x_label_to_block_map)[uid] = bb;
7154 (*cfun->cfg->x_label_to_block_map)[uid] = NULL;
7155
7156 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
7157
7158 if (uid >= dest_cfun->cfg->last_label_uid)
7159 dest_cfun->cfg->last_label_uid = uid + 1;
7160 }
7161
7162 maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
7163 remove_stmt_from_eh_lp_fn (cfun, stmt);
7164
7165 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
7166 gimple_remove_stmt_histograms (cfun, stmt);
7167
7168 /* We cannot leave any operands allocated from the operand caches of
7169 the current function. */
7170 free_stmt_operands (cfun, stmt);
7171 push_cfun (dest_cfun);
7172 update_stmt (stmt);
7173 pop_cfun ();
7174 }
7175
7176 FOR_EACH_EDGE (e, ei, bb->succs)
7177 if (e->goto_locus != UNKNOWN_LOCATION)
7178 {
7179 tree block = LOCATION_BLOCK (e->goto_locus);
7180 if (d->orig_block == NULL_TREE
7181 || block == d->orig_block)
7182 e->goto_locus = set_block (e->goto_locus, d->new_block);
7183 }
7184 }
7185
7186 /* Examine the statements in BB (which is in SRC_CFUN); find and return
7187 the outermost EH region. Use REGION as the incoming base EH region.
7188 If there is no single outermost region, return NULL and set *ALL to
7189 true. */
7190
7191 static eh_region
7192 find_outermost_region_in_block (struct function *src_cfun,
7193 basic_block bb, eh_region region,
7194 bool *all)
7195 {
7196 gimple_stmt_iterator si;
7197
7198 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
7199 {
7200 gimple *stmt = gsi_stmt (si);
7201 eh_region stmt_region;
7202 int lp_nr;
7203
7204 lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
7205 stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
7206 if (stmt_region)
7207 {
7208 if (region == NULL)
7209 region = stmt_region;
7210 else if (stmt_region != region)
7211 {
7212 region = eh_region_outermost (src_cfun, stmt_region, region);
7213 if (region == NULL)
7214 {
7215 *all = true;
7216 return NULL;
7217 }
7218 }
7219 }
7220 }
7221
7222 return region;
7223 }
7224
7225 static tree
7226 new_label_mapper (tree decl, void *data)
7227 {
7228 htab_t hash = (htab_t) data;
7229 struct tree_map *m;
7230 void **slot;
7231
7232 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
7233
7234 m = XNEW (struct tree_map);
7235 m->hash = DECL_UID (decl);
7236 m->base.from = decl;
7237 m->to = create_artificial_label (UNKNOWN_LOCATION);
7238 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
7239 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
7240 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
7241
7242 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
7243 gcc_assert (*slot == NULL);
7244
7245 *slot = m;
7246
7247 return m->to;
7248 }
7249
7250 /* Tree walker to replace the decls used inside value expressions by
7251 duplicates. */
7252
7253 static tree
7254 replace_block_vars_by_duplicates_1 (tree *tp, int *walk_subtrees, void *data)
7255 {
7256 struct replace_decls_d *rd = (struct replace_decls_d *)data;
7257
7258 switch (TREE_CODE (*tp))
7259 {
7260 case VAR_DECL:
7261 case PARM_DECL:
7262 case RESULT_DECL:
7263 replace_by_duplicate_decl (tp, rd->vars_map, rd->to_context);
7264 break;
7265 default:
7266 break;
7267 }
7268
7269 if (IS_TYPE_OR_DECL_P (*tp))
7270 *walk_subtrees = false;
7271
7272 return NULL;
7273 }
7274
7275 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
7276 subblocks. */
7277
7278 static void
7279 replace_block_vars_by_duplicates (tree block, hash_map<tree, tree> *vars_map,
7280 tree to_context)
7281 {
7282 tree *tp, t;
7283
7284 for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
7285 {
7286 t = *tp;
7287 if (!VAR_P (t) && TREE_CODE (t) != CONST_DECL)
7288 continue;
7289 replace_by_duplicate_decl (&t, vars_map, to_context);
7290 if (t != *tp)
7291 {
7292 if (VAR_P (*tp) && DECL_HAS_VALUE_EXPR_P (*tp))
7293 {
7294 tree x = DECL_VALUE_EXPR (*tp);
7295 struct replace_decls_d rd = { vars_map, to_context };
7296 unshare_expr (x);
7297 walk_tree (&x, replace_block_vars_by_duplicates_1, &rd, NULL);
7298 SET_DECL_VALUE_EXPR (t, x);
7299 DECL_HAS_VALUE_EXPR_P (t) = 1;
7300 }
7301 DECL_CHAIN (t) = DECL_CHAIN (*tp);
7302 *tp = t;
7303 }
7304 }
7305
7306 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
7307 replace_block_vars_by_duplicates (block, vars_map, to_context);
7308 }
7309
7310 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
7311 from FN1 to FN2. */
7312
7313 static void
7314 fixup_loop_arrays_after_move (struct function *fn1, struct function *fn2,
7315 class loop *loop)
7316 {
7317 /* Discard it from the old loop array. */
7318 (*get_loops (fn1))[loop->num] = NULL;
7319
7320 /* Place it in the new loop array, assigning it a new number. */
7321 loop->num = number_of_loops (fn2);
7322 vec_safe_push (loops_for_fn (fn2)->larray, loop);
7323
7324 /* Recurse to children. */
7325 for (loop = loop->inner; loop; loop = loop->next)
7326 fixup_loop_arrays_after_move (fn1, fn2, loop);
7327 }
7328
7329 /* Verify that the blocks in BBS_P are a single-entry, single-exit region
7330 delimited by ENTRY_BB and EXIT_BB, possibly containing noreturn blocks. */
7331
7332 DEBUG_FUNCTION void
7333 verify_sese (basic_block entry, basic_block exit, vec<basic_block> *bbs_p)
7334 {
7335 basic_block bb;
7336 edge_iterator ei;
7337 edge e;
7338 bitmap bbs = BITMAP_ALLOC (NULL);
7339 int i;
7340
7341 gcc_assert (entry != NULL);
7342 gcc_assert (entry != exit);
7343 gcc_assert (bbs_p != NULL);
7344
7345 gcc_assert (bbs_p->length () > 0);
7346
7347 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
7348 bitmap_set_bit (bbs, bb->index);
7349
7350 gcc_assert (bitmap_bit_p (bbs, entry->index));
7351 gcc_assert (exit == NULL || bitmap_bit_p (bbs, exit->index));
7352
7353 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
7354 {
7355 if (bb == entry)
7356 {
7357 gcc_assert (single_pred_p (entry));
7358 gcc_assert (!bitmap_bit_p (bbs, single_pred (entry)->index));
7359 }
7360 else
7361 for (ei = ei_start (bb->preds); !ei_end_p (ei); ei_next (&ei))
7362 {
7363 e = ei_edge (ei);
7364 gcc_assert (bitmap_bit_p (bbs, e->src->index));
7365 }
7366
7367 if (bb == exit)
7368 {
7369 gcc_assert (single_succ_p (exit));
7370 gcc_assert (!bitmap_bit_p (bbs, single_succ (exit)->index));
7371 }
7372 else
7373 for (ei = ei_start (bb->succs); !ei_end_p (ei); ei_next (&ei))
7374 {
7375 e = ei_edge (ei);
7376 gcc_assert (bitmap_bit_p (bbs, e->dest->index));
7377 }
7378 }
7379
7380 BITMAP_FREE (bbs);
7381 }
7382
7383 /* If FROM is an SSA_NAME, mark the version in bitmap DATA. */
7384
7385 bool
7386 gather_ssa_name_hash_map_from (tree const &from, tree const &, void *data)
7387 {
7388 bitmap release_names = (bitmap)data;
7389
7390 if (TREE_CODE (from) != SSA_NAME)
7391 return true;
7392
7393 bitmap_set_bit (release_names, SSA_NAME_VERSION (from));
7394 return true;
7395 }
7396
7397 /* Return LOOP_DIST_ALIAS call if present in BB. */
7398
7399 static gimple *
7400 find_loop_dist_alias (basic_block bb)
7401 {
7402 gimple *g = last_stmt (bb);
7403 if (g == NULL || gimple_code (g) != GIMPLE_COND)
7404 return NULL;
7405
7406 gimple_stmt_iterator gsi = gsi_for_stmt (g);
7407 gsi_prev (&gsi);
7408 if (gsi_end_p (gsi))
7409 return NULL;
7410
7411 g = gsi_stmt (gsi);
7412 if (gimple_call_internal_p (g, IFN_LOOP_DIST_ALIAS))
7413 return g;
7414 return NULL;
7415 }
7416
7417 /* Fold loop internal call G like IFN_LOOP_VECTORIZED/IFN_LOOP_DIST_ALIAS
7418 to VALUE and update any immediate uses of it's LHS. */
7419
7420 void
7421 fold_loop_internal_call (gimple *g, tree value)
7422 {
7423 tree lhs = gimple_call_lhs (g);
7424 use_operand_p use_p;
7425 imm_use_iterator iter;
7426 gimple *use_stmt;
7427 gimple_stmt_iterator gsi = gsi_for_stmt (g);
7428
7429 update_call_from_tree (&gsi, value);
7430 FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
7431 {
7432 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
7433 SET_USE (use_p, value);
7434 update_stmt (use_stmt);
7435 }
7436 }
7437
7438 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
7439 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
7440 single basic block in the original CFG and the new basic block is
7441 returned. DEST_CFUN must not have a CFG yet.
7442
7443 Note that the region need not be a pure SESE region. Blocks inside
7444 the region may contain calls to abort/exit. The only restriction
7445 is that ENTRY_BB should be the only entry point and it must
7446 dominate EXIT_BB.
7447
7448 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
7449 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
7450 to the new function.
7451
7452 All local variables referenced in the region are assumed to be in
7453 the corresponding BLOCK_VARS and unexpanded variable lists
7454 associated with DEST_CFUN.
7455
7456 TODO: investigate whether we can reuse gimple_duplicate_sese_region to
7457 reimplement move_sese_region_to_fn by duplicating the region rather than
7458 moving it. */
7459
7460 basic_block
7461 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
7462 basic_block exit_bb, tree orig_block)
7463 {
7464 vec<basic_block> bbs, dom_bbs;
7465 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
7466 basic_block after, bb, *entry_pred, *exit_succ, abb;
7467 struct function *saved_cfun = cfun;
7468 int *entry_flag, *exit_flag;
7469 profile_probability *entry_prob, *exit_prob;
7470 unsigned i, num_entry_edges, num_exit_edges, num_nodes;
7471 edge e;
7472 edge_iterator ei;
7473 htab_t new_label_map;
7474 hash_map<void *, void *> *eh_map;
7475 class loop *loop = entry_bb->loop_father;
7476 class loop *loop0 = get_loop (saved_cfun, 0);
7477 struct move_stmt_d d;
7478
7479 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
7480 region. */
7481 gcc_assert (entry_bb != exit_bb
7482 && (!exit_bb
7483 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
7484
7485 /* Collect all the blocks in the region. Manually add ENTRY_BB
7486 because it won't be added by dfs_enumerate_from. */
7487 bbs.create (0);
7488 bbs.safe_push (entry_bb);
7489 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
7490
7491 if (flag_checking)
7492 verify_sese (entry_bb, exit_bb, &bbs);
7493
7494 /* The blocks that used to be dominated by something in BBS will now be
7495 dominated by the new block. */
7496 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
7497 bbs.address (),
7498 bbs.length ());
7499
7500 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
7501 the predecessor edges to ENTRY_BB and the successor edges to
7502 EXIT_BB so that we can re-attach them to the new basic block that
7503 will replace the region. */
7504 num_entry_edges = EDGE_COUNT (entry_bb->preds);
7505 entry_pred = XNEWVEC (basic_block, num_entry_edges);
7506 entry_flag = XNEWVEC (int, num_entry_edges);
7507 entry_prob = XNEWVEC (profile_probability, num_entry_edges);
7508 i = 0;
7509 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
7510 {
7511 entry_prob[i] = e->probability;
7512 entry_flag[i] = e->flags;
7513 entry_pred[i++] = e->src;
7514 remove_edge (e);
7515 }
7516
7517 if (exit_bb)
7518 {
7519 num_exit_edges = EDGE_COUNT (exit_bb->succs);
7520 exit_succ = XNEWVEC (basic_block, num_exit_edges);
7521 exit_flag = XNEWVEC (int, num_exit_edges);
7522 exit_prob = XNEWVEC (profile_probability, num_exit_edges);
7523 i = 0;
7524 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
7525 {
7526 exit_prob[i] = e->probability;
7527 exit_flag[i] = e->flags;
7528 exit_succ[i++] = e->dest;
7529 remove_edge (e);
7530 }
7531 }
7532 else
7533 {
7534 num_exit_edges = 0;
7535 exit_succ = NULL;
7536 exit_flag = NULL;
7537 exit_prob = NULL;
7538 }
7539
7540 /* Switch context to the child function to initialize DEST_FN's CFG. */
7541 gcc_assert (dest_cfun->cfg == NULL);
7542 push_cfun (dest_cfun);
7543
7544 init_empty_tree_cfg ();
7545
7546 /* Initialize EH information for the new function. */
7547 eh_map = NULL;
7548 new_label_map = NULL;
7549 if (saved_cfun->eh)
7550 {
7551 eh_region region = NULL;
7552 bool all = false;
7553
7554 FOR_EACH_VEC_ELT (bbs, i, bb)
7555 {
7556 region = find_outermost_region_in_block (saved_cfun, bb, region, &all);
7557 if (all)
7558 break;
7559 }
7560
7561 init_eh_for_function ();
7562 if (region != NULL || all)
7563 {
7564 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
7565 eh_map = duplicate_eh_regions (saved_cfun, region, 0,
7566 new_label_mapper, new_label_map);
7567 }
7568 }
7569
7570 /* Initialize an empty loop tree. */
7571 struct loops *loops = ggc_cleared_alloc<struct loops> ();
7572 init_loops_structure (dest_cfun, loops, 1);
7573 loops->state = LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
7574 set_loops_for_fn (dest_cfun, loops);
7575
7576 vec<loop_p, va_gc> *larray = get_loops (saved_cfun)->copy ();
7577
7578 /* Move the outlined loop tree part. */
7579 num_nodes = bbs.length ();
7580 FOR_EACH_VEC_ELT (bbs, i, bb)
7581 {
7582 if (bb->loop_father->header == bb)
7583 {
7584 class loop *this_loop = bb->loop_father;
7585 class loop *outer = loop_outer (this_loop);
7586 if (outer == loop
7587 /* If the SESE region contains some bbs ending with
7588 a noreturn call, those are considered to belong
7589 to the outermost loop in saved_cfun, rather than
7590 the entry_bb's loop_father. */
7591 || outer == loop0)
7592 {
7593 if (outer != loop)
7594 num_nodes -= this_loop->num_nodes;
7595 flow_loop_tree_node_remove (bb->loop_father);
7596 flow_loop_tree_node_add (get_loop (dest_cfun, 0), this_loop);
7597 fixup_loop_arrays_after_move (saved_cfun, cfun, this_loop);
7598 }
7599 }
7600 else if (bb->loop_father == loop0 && loop0 != loop)
7601 num_nodes--;
7602
7603 /* Remove loop exits from the outlined region. */
7604 if (loops_for_fn (saved_cfun)->exits)
7605 FOR_EACH_EDGE (e, ei, bb->succs)
7606 {
7607 struct loops *l = loops_for_fn (saved_cfun);
7608 loop_exit **slot
7609 = l->exits->find_slot_with_hash (e, htab_hash_pointer (e),
7610 NO_INSERT);
7611 if (slot)
7612 l->exits->clear_slot (slot);
7613 }
7614 }
7615
7616 /* Adjust the number of blocks in the tree root of the outlined part. */
7617 get_loop (dest_cfun, 0)->num_nodes = bbs.length () + 2;
7618
7619 /* Setup a mapping to be used by move_block_to_fn. */
7620 loop->aux = current_loops->tree_root;
7621 loop0->aux = current_loops->tree_root;
7622
7623 /* Fix up orig_loop_num. If the block referenced in it has been moved
7624 to dest_cfun, update orig_loop_num field, otherwise clear it. */
7625 class loop *dloop;
7626 signed char *moved_orig_loop_num = NULL;
7627 FOR_EACH_LOOP_FN (dest_cfun, dloop, 0)
7628 if (dloop->orig_loop_num)
7629 {
7630 if (moved_orig_loop_num == NULL)
7631 moved_orig_loop_num
7632 = XCNEWVEC (signed char, vec_safe_length (larray));
7633 if ((*larray)[dloop->orig_loop_num] != NULL
7634 && get_loop (saved_cfun, dloop->orig_loop_num) == NULL)
7635 {
7636 if (moved_orig_loop_num[dloop->orig_loop_num] >= 0
7637 && moved_orig_loop_num[dloop->orig_loop_num] < 2)
7638 moved_orig_loop_num[dloop->orig_loop_num]++;
7639 dloop->orig_loop_num = (*larray)[dloop->orig_loop_num]->num;
7640 }
7641 else
7642 {
7643 moved_orig_loop_num[dloop->orig_loop_num] = -1;
7644 dloop->orig_loop_num = 0;
7645 }
7646 }
7647 pop_cfun ();
7648
7649 if (moved_orig_loop_num)
7650 {
7651 FOR_EACH_VEC_ELT (bbs, i, bb)
7652 {
7653 gimple *g = find_loop_dist_alias (bb);
7654 if (g == NULL)
7655 continue;
7656
7657 int orig_loop_num = tree_to_shwi (gimple_call_arg (g, 0));
7658 gcc_assert (orig_loop_num
7659 && (unsigned) orig_loop_num < vec_safe_length (larray));
7660 if (moved_orig_loop_num[orig_loop_num] == 2)
7661 {
7662 /* If we have moved both loops with this orig_loop_num into
7663 dest_cfun and the LOOP_DIST_ALIAS call is being moved there
7664 too, update the first argument. */
7665 gcc_assert ((*larray)[dloop->orig_loop_num] != NULL
7666 && (get_loop (saved_cfun, dloop->orig_loop_num)
7667 == NULL));
7668 tree t = build_int_cst (integer_type_node,
7669 (*larray)[dloop->orig_loop_num]->num);
7670 gimple_call_set_arg (g, 0, t);
7671 update_stmt (g);
7672 /* Make sure the following loop will not update it. */
7673 moved_orig_loop_num[orig_loop_num] = 0;
7674 }
7675 else
7676 /* Otherwise at least one of the loops stayed in saved_cfun.
7677 Remove the LOOP_DIST_ALIAS call. */
7678 fold_loop_internal_call (g, gimple_call_arg (g, 1));
7679 }
7680 FOR_EACH_BB_FN (bb, saved_cfun)
7681 {
7682 gimple *g = find_loop_dist_alias (bb);
7683 if (g == NULL)
7684 continue;
7685 int orig_loop_num = tree_to_shwi (gimple_call_arg (g, 0));
7686 gcc_assert (orig_loop_num
7687 && (unsigned) orig_loop_num < vec_safe_length (larray));
7688 if (moved_orig_loop_num[orig_loop_num])
7689 /* LOOP_DIST_ALIAS call remained in saved_cfun, if at least one
7690 of the corresponding loops was moved, remove it. */
7691 fold_loop_internal_call (g, gimple_call_arg (g, 1));
7692 }
7693 XDELETEVEC (moved_orig_loop_num);
7694 }
7695 ggc_free (larray);
7696
7697 /* Move blocks from BBS into DEST_CFUN. */
7698 gcc_assert (bbs.length () >= 2);
7699 after = dest_cfun->cfg->x_entry_block_ptr;
7700 hash_map<tree, tree> vars_map;
7701
7702 memset (&d, 0, sizeof (d));
7703 d.orig_block = orig_block;
7704 d.new_block = DECL_INITIAL (dest_cfun->decl);
7705 d.from_context = cfun->decl;
7706 d.to_context = dest_cfun->decl;
7707 d.vars_map = &vars_map;
7708 d.new_label_map = new_label_map;
7709 d.eh_map = eh_map;
7710 d.remap_decls_p = true;
7711
7712 if (gimple_in_ssa_p (cfun))
7713 for (tree arg = DECL_ARGUMENTS (d.to_context); arg; arg = DECL_CHAIN (arg))
7714 {
7715 tree narg = make_ssa_name_fn (dest_cfun, arg, gimple_build_nop ());
7716 set_ssa_default_def (dest_cfun, arg, narg);
7717 vars_map.put (arg, narg);
7718 }
7719
7720 FOR_EACH_VEC_ELT (bbs, i, bb)
7721 {
7722 /* No need to update edge counts on the last block. It has
7723 already been updated earlier when we detached the region from
7724 the original CFG. */
7725 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
7726 after = bb;
7727 }
7728
7729 loop->aux = NULL;
7730 loop0->aux = NULL;
7731 /* Loop sizes are no longer correct, fix them up. */
7732 loop->num_nodes -= num_nodes;
7733 for (class loop *outer = loop_outer (loop);
7734 outer; outer = loop_outer (outer))
7735 outer->num_nodes -= num_nodes;
7736 loop0->num_nodes -= bbs.length () - num_nodes;
7737
7738 if (saved_cfun->has_simduid_loops || saved_cfun->has_force_vectorize_loops)
7739 {
7740 class loop *aloop;
7741 for (i = 0; vec_safe_iterate (loops->larray, i, &aloop); i++)
7742 if (aloop != NULL)
7743 {
7744 if (aloop->simduid)
7745 {
7746 replace_by_duplicate_decl (&aloop->simduid, d.vars_map,
7747 d.to_context);
7748 dest_cfun->has_simduid_loops = true;
7749 }
7750 if (aloop->force_vectorize)
7751 dest_cfun->has_force_vectorize_loops = true;
7752 }
7753 }
7754
7755 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
7756 if (orig_block)
7757 {
7758 tree block;
7759 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7760 == NULL_TREE);
7761 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7762 = BLOCK_SUBBLOCKS (orig_block);
7763 for (block = BLOCK_SUBBLOCKS (orig_block);
7764 block; block = BLOCK_CHAIN (block))
7765 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
7766 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
7767 }
7768
7769 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
7770 &vars_map, dest_cfun->decl);
7771
7772 if (new_label_map)
7773 htab_delete (new_label_map);
7774 if (eh_map)
7775 delete eh_map;
7776
7777 if (gimple_in_ssa_p (cfun))
7778 {
7779 /* We need to release ssa-names in a defined order, so first find them,
7780 and then iterate in ascending version order. */
7781 bitmap release_names = BITMAP_ALLOC (NULL);
7782 vars_map.traverse<void *, gather_ssa_name_hash_map_from> (release_names);
7783 bitmap_iterator bi;
7784 unsigned i;
7785 EXECUTE_IF_SET_IN_BITMAP (release_names, 0, i, bi)
7786 release_ssa_name (ssa_name (i));
7787 BITMAP_FREE (release_names);
7788 }
7789
7790 /* Rewire the entry and exit blocks. The successor to the entry
7791 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
7792 the child function. Similarly, the predecessor of DEST_FN's
7793 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
7794 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
7795 various CFG manipulation function get to the right CFG.
7796
7797 FIXME, this is silly. The CFG ought to become a parameter to
7798 these helpers. */
7799 push_cfun (dest_cfun);
7800 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = entry_bb->count;
7801 make_single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), entry_bb, EDGE_FALLTHRU);
7802 if (exit_bb)
7803 {
7804 make_single_succ_edge (exit_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
7805 EXIT_BLOCK_PTR_FOR_FN (cfun)->count = exit_bb->count;
7806 }
7807 else
7808 EXIT_BLOCK_PTR_FOR_FN (cfun)->count = profile_count::zero ();
7809 pop_cfun ();
7810
7811 /* Back in the original function, the SESE region has disappeared,
7812 create a new basic block in its place. */
7813 bb = create_empty_bb (entry_pred[0]);
7814 if (current_loops)
7815 add_bb_to_loop (bb, loop);
7816 for (i = 0; i < num_entry_edges; i++)
7817 {
7818 e = make_edge (entry_pred[i], bb, entry_flag[i]);
7819 e->probability = entry_prob[i];
7820 }
7821
7822 for (i = 0; i < num_exit_edges; i++)
7823 {
7824 e = make_edge (bb, exit_succ[i], exit_flag[i]);
7825 e->probability = exit_prob[i];
7826 }
7827
7828 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
7829 FOR_EACH_VEC_ELT (dom_bbs, i, abb)
7830 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
7831 dom_bbs.release ();
7832
7833 if (exit_bb)
7834 {
7835 free (exit_prob);
7836 free (exit_flag);
7837 free (exit_succ);
7838 }
7839 free (entry_prob);
7840 free (entry_flag);
7841 free (entry_pred);
7842 bbs.release ();
7843
7844 return bb;
7845 }
7846
7847 /* Dump default def DEF to file FILE using FLAGS and indentation
7848 SPC. */
7849
7850 static void
7851 dump_default_def (FILE *file, tree def, int spc, dump_flags_t flags)
7852 {
7853 for (int i = 0; i < spc; ++i)
7854 fprintf (file, " ");
7855 dump_ssaname_info_to_file (file, def, spc);
7856
7857 print_generic_expr (file, TREE_TYPE (def), flags);
7858 fprintf (file, " ");
7859 print_generic_expr (file, def, flags);
7860 fprintf (file, " = ");
7861 print_generic_expr (file, SSA_NAME_VAR (def), flags);
7862 fprintf (file, ";\n");
7863 }
7864
7865 /* Print no_sanitize attribute to FILE for a given attribute VALUE. */
7866
7867 static void
7868 print_no_sanitize_attr_value (FILE *file, tree value)
7869 {
7870 unsigned int flags = tree_to_uhwi (value);
7871 bool first = true;
7872 for (int i = 0; sanitizer_opts[i].name != NULL; ++i)
7873 {
7874 if ((sanitizer_opts[i].flag & flags) == sanitizer_opts[i].flag)
7875 {
7876 if (!first)
7877 fprintf (file, " | ");
7878 fprintf (file, "%s", sanitizer_opts[i].name);
7879 first = false;
7880 }
7881 }
7882 }
7883
7884 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
7885 */
7886
7887 void
7888 dump_function_to_file (tree fndecl, FILE *file, dump_flags_t flags)
7889 {
7890 tree arg, var, old_current_fndecl = current_function_decl;
7891 struct function *dsf;
7892 bool ignore_topmost_bind = false, any_var = false;
7893 basic_block bb;
7894 tree chain;
7895 bool tmclone = (TREE_CODE (fndecl) == FUNCTION_DECL
7896 && decl_is_tm_clone (fndecl));
7897 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
7898
7899 if (DECL_ATTRIBUTES (fndecl) != NULL_TREE)
7900 {
7901 fprintf (file, "__attribute__((");
7902
7903 bool first = true;
7904 tree chain;
7905 for (chain = DECL_ATTRIBUTES (fndecl); chain;
7906 first = false, chain = TREE_CHAIN (chain))
7907 {
7908 if (!first)
7909 fprintf (file, ", ");
7910
7911 tree name = get_attribute_name (chain);
7912 print_generic_expr (file, name, dump_flags);
7913 if (TREE_VALUE (chain) != NULL_TREE)
7914 {
7915 fprintf (file, " (");
7916
7917 if (strstr (IDENTIFIER_POINTER (name), "no_sanitize"))
7918 print_no_sanitize_attr_value (file, TREE_VALUE (chain));
7919 else
7920 print_generic_expr (file, TREE_VALUE (chain), dump_flags);
7921 fprintf (file, ")");
7922 }
7923 }
7924
7925 fprintf (file, "))\n");
7926 }
7927
7928 current_function_decl = fndecl;
7929 if (flags & TDF_GIMPLE)
7930 {
7931 static bool hotness_bb_param_printed = false;
7932 if (profile_info != NULL
7933 && !hotness_bb_param_printed)
7934 {
7935 hotness_bb_param_printed = true;
7936 fprintf (file,
7937 "/* --param=gimple-fe-computed-hot-bb-threshold=%" PRId64
7938 " */\n", get_hot_bb_threshold ());
7939 }
7940
7941 print_generic_expr (file, TREE_TYPE (TREE_TYPE (fndecl)),
7942 dump_flags | TDF_SLIM);
7943 fprintf (file, " __GIMPLE (%s",
7944 (fun->curr_properties & PROP_ssa) ? "ssa"
7945 : (fun->curr_properties & PROP_cfg) ? "cfg"
7946 : "");
7947
7948 if (cfun->cfg)
7949 {
7950 basic_block bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
7951 if (bb->count.initialized_p ())
7952 fprintf (file, ",%s(%d)",
7953 profile_quality_as_string (bb->count.quality ()),
7954 bb->count.value ());
7955 fprintf (file, ")\n%s (", function_name (fun));
7956 }
7957 }
7958 else
7959 fprintf (file, "%s %s(", function_name (fun), tmclone ? "[tm-clone] " : "");
7960
7961 arg = DECL_ARGUMENTS (fndecl);
7962 while (arg)
7963 {
7964 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
7965 fprintf (file, " ");
7966 print_generic_expr (file, arg, dump_flags);
7967 if (DECL_CHAIN (arg))
7968 fprintf (file, ", ");
7969 arg = DECL_CHAIN (arg);
7970 }
7971 fprintf (file, ")\n");
7972
7973 dsf = DECL_STRUCT_FUNCTION (fndecl);
7974 if (dsf && (flags & TDF_EH))
7975 dump_eh_tree (file, dsf);
7976
7977 if (flags & TDF_RAW && !gimple_has_body_p (fndecl))
7978 {
7979 dump_node (fndecl, TDF_SLIM | flags, file);
7980 current_function_decl = old_current_fndecl;
7981 return;
7982 }
7983
7984 /* When GIMPLE is lowered, the variables are no longer available in
7985 BIND_EXPRs, so display them separately. */
7986 if (fun && fun->decl == fndecl && (fun->curr_properties & PROP_gimple_lcf))
7987 {
7988 unsigned ix;
7989 ignore_topmost_bind = true;
7990
7991 fprintf (file, "{\n");
7992 if (gimple_in_ssa_p (fun)
7993 && (flags & TDF_ALIAS))
7994 {
7995 for (arg = DECL_ARGUMENTS (fndecl); arg != NULL;
7996 arg = DECL_CHAIN (arg))
7997 {
7998 tree def = ssa_default_def (fun, arg);
7999 if (def)
8000 dump_default_def (file, def, 2, flags);
8001 }
8002
8003 tree res = DECL_RESULT (fun->decl);
8004 if (res != NULL_TREE
8005 && DECL_BY_REFERENCE (res))
8006 {
8007 tree def = ssa_default_def (fun, res);
8008 if (def)
8009 dump_default_def (file, def, 2, flags);
8010 }
8011
8012 tree static_chain = fun->static_chain_decl;
8013 if (static_chain != NULL_TREE)
8014 {
8015 tree def = ssa_default_def (fun, static_chain);
8016 if (def)
8017 dump_default_def (file, def, 2, flags);
8018 }
8019 }
8020
8021 if (!vec_safe_is_empty (fun->local_decls))
8022 FOR_EACH_LOCAL_DECL (fun, ix, var)
8023 {
8024 print_generic_decl (file, var, flags);
8025 fprintf (file, "\n");
8026
8027 any_var = true;
8028 }
8029
8030 tree name;
8031
8032 if (gimple_in_ssa_p (cfun))
8033 FOR_EACH_SSA_NAME (ix, name, cfun)
8034 {
8035 if (!SSA_NAME_VAR (name))
8036 {
8037 fprintf (file, " ");
8038 print_generic_expr (file, TREE_TYPE (name), flags);
8039 fprintf (file, " ");
8040 print_generic_expr (file, name, flags);
8041 fprintf (file, ";\n");
8042
8043 any_var = true;
8044 }
8045 }
8046 }
8047
8048 if (fun && fun->decl == fndecl
8049 && fun->cfg
8050 && basic_block_info_for_fn (fun))
8051 {
8052 /* If the CFG has been built, emit a CFG-based dump. */
8053 if (!ignore_topmost_bind)
8054 fprintf (file, "{\n");
8055
8056 if (any_var && n_basic_blocks_for_fn (fun))
8057 fprintf (file, "\n");
8058
8059 FOR_EACH_BB_FN (bb, fun)
8060 dump_bb (file, bb, 2, flags);
8061
8062 fprintf (file, "}\n");
8063 }
8064 else if (fun->curr_properties & PROP_gimple_any)
8065 {
8066 /* The function is now in GIMPLE form but the CFG has not been
8067 built yet. Emit the single sequence of GIMPLE statements
8068 that make up its body. */
8069 gimple_seq body = gimple_body (fndecl);
8070
8071 if (gimple_seq_first_stmt (body)
8072 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
8073 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
8074 print_gimple_seq (file, body, 0, flags);
8075 else
8076 {
8077 if (!ignore_topmost_bind)
8078 fprintf (file, "{\n");
8079
8080 if (any_var)
8081 fprintf (file, "\n");
8082
8083 print_gimple_seq (file, body, 2, flags);
8084 fprintf (file, "}\n");
8085 }
8086 }
8087 else
8088 {
8089 int indent;
8090
8091 /* Make a tree based dump. */
8092 chain = DECL_SAVED_TREE (fndecl);
8093 if (chain && TREE_CODE (chain) == BIND_EXPR)
8094 {
8095 if (ignore_topmost_bind)
8096 {
8097 chain = BIND_EXPR_BODY (chain);
8098 indent = 2;
8099 }
8100 else
8101 indent = 0;
8102 }
8103 else
8104 {
8105 if (!ignore_topmost_bind)
8106 {
8107 fprintf (file, "{\n");
8108 /* No topmost bind, pretend it's ignored for later. */
8109 ignore_topmost_bind = true;
8110 }
8111 indent = 2;
8112 }
8113
8114 if (any_var)
8115 fprintf (file, "\n");
8116
8117 print_generic_stmt_indented (file, chain, flags, indent);
8118 if (ignore_topmost_bind)
8119 fprintf (file, "}\n");
8120 }
8121
8122 if (flags & TDF_ENUMERATE_LOCALS)
8123 dump_enumerated_decls (file, flags);
8124 fprintf (file, "\n\n");
8125
8126 current_function_decl = old_current_fndecl;
8127 }
8128
8129 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
8130
8131 DEBUG_FUNCTION void
8132 debug_function (tree fn, dump_flags_t flags)
8133 {
8134 dump_function_to_file (fn, stderr, flags);
8135 }
8136
8137
8138 /* Print on FILE the indexes for the predecessors of basic_block BB. */
8139
8140 static void
8141 print_pred_bbs (FILE *file, basic_block bb)
8142 {
8143 edge e;
8144 edge_iterator ei;
8145
8146 FOR_EACH_EDGE (e, ei, bb->preds)
8147 fprintf (file, "bb_%d ", e->src->index);
8148 }
8149
8150
8151 /* Print on FILE the indexes for the successors of basic_block BB. */
8152
8153 static void
8154 print_succ_bbs (FILE *file, basic_block bb)
8155 {
8156 edge e;
8157 edge_iterator ei;
8158
8159 FOR_EACH_EDGE (e, ei, bb->succs)
8160 fprintf (file, "bb_%d ", e->dest->index);
8161 }
8162
8163 /* Print to FILE the basic block BB following the VERBOSITY level. */
8164
8165 void
8166 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
8167 {
8168 char *s_indent = (char *) alloca ((size_t) indent + 1);
8169 memset ((void *) s_indent, ' ', (size_t) indent);
8170 s_indent[indent] = '\0';
8171
8172 /* Print basic_block's header. */
8173 if (verbosity >= 2)
8174 {
8175 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
8176 print_pred_bbs (file, bb);
8177 fprintf (file, "}, succs = {");
8178 print_succ_bbs (file, bb);
8179 fprintf (file, "})\n");
8180 }
8181
8182 /* Print basic_block's body. */
8183 if (verbosity >= 3)
8184 {
8185 fprintf (file, "%s {\n", s_indent);
8186 dump_bb (file, bb, indent + 4, TDF_VOPS|TDF_MEMSYMS);
8187 fprintf (file, "%s }\n", s_indent);
8188 }
8189 }
8190
8191 static void print_loop_and_siblings (FILE *, class loop *, int, int);
8192
8193 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
8194 VERBOSITY level this outputs the contents of the loop, or just its
8195 structure. */
8196
8197 static void
8198 print_loop (FILE *file, class loop *loop, int indent, int verbosity)
8199 {
8200 char *s_indent;
8201 basic_block bb;
8202
8203 if (loop == NULL)
8204 return;
8205
8206 s_indent = (char *) alloca ((size_t) indent + 1);
8207 memset ((void *) s_indent, ' ', (size_t) indent);
8208 s_indent[indent] = '\0';
8209
8210 /* Print loop's header. */
8211 fprintf (file, "%sloop_%d (", s_indent, loop->num);
8212 if (loop->header)
8213 fprintf (file, "header = %d", loop->header->index);
8214 else
8215 {
8216 fprintf (file, "deleted)\n");
8217 return;
8218 }
8219 if (loop->latch)
8220 fprintf (file, ", latch = %d", loop->latch->index);
8221 else
8222 fprintf (file, ", multiple latches");
8223 fprintf (file, ", niter = ");
8224 print_generic_expr (file, loop->nb_iterations);
8225
8226 if (loop->any_upper_bound)
8227 {
8228 fprintf (file, ", upper_bound = ");
8229 print_decu (loop->nb_iterations_upper_bound, file);
8230 }
8231 if (loop->any_likely_upper_bound)
8232 {
8233 fprintf (file, ", likely_upper_bound = ");
8234 print_decu (loop->nb_iterations_likely_upper_bound, file);
8235 }
8236
8237 if (loop->any_estimate)
8238 {
8239 fprintf (file, ", estimate = ");
8240 print_decu (loop->nb_iterations_estimate, file);
8241 }
8242 if (loop->unroll)
8243 fprintf (file, ", unroll = %d", loop->unroll);
8244 fprintf (file, ")\n");
8245
8246 /* Print loop's body. */
8247 if (verbosity >= 1)
8248 {
8249 fprintf (file, "%s{\n", s_indent);
8250 FOR_EACH_BB_FN (bb, cfun)
8251 if (bb->loop_father == loop)
8252 print_loops_bb (file, bb, indent, verbosity);
8253
8254 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
8255 fprintf (file, "%s}\n", s_indent);
8256 }
8257 }
8258
8259 /* Print the LOOP and its sibling loops on FILE, indented INDENT
8260 spaces. Following VERBOSITY level this outputs the contents of the
8261 loop, or just its structure. */
8262
8263 static void
8264 print_loop_and_siblings (FILE *file, class loop *loop, int indent,
8265 int verbosity)
8266 {
8267 if (loop == NULL)
8268 return;
8269
8270 print_loop (file, loop, indent, verbosity);
8271 print_loop_and_siblings (file, loop->next, indent, verbosity);
8272 }
8273
8274 /* Follow a CFG edge from the entry point of the program, and on entry
8275 of a loop, pretty print the loop structure on FILE. */
8276
8277 void
8278 print_loops (FILE *file, int verbosity)
8279 {
8280 basic_block bb;
8281
8282 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
8283 fprintf (file, "\nLoops in function: %s\n", current_function_name ());
8284 if (bb && bb->loop_father)
8285 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
8286 }
8287
8288 /* Dump a loop. */
8289
8290 DEBUG_FUNCTION void
8291 debug (class loop &ref)
8292 {
8293 print_loop (stderr, &ref, 0, /*verbosity*/0);
8294 }
8295
8296 DEBUG_FUNCTION void
8297 debug (class loop *ptr)
8298 {
8299 if (ptr)
8300 debug (*ptr);
8301 else
8302 fprintf (stderr, "<nil>\n");
8303 }
8304
8305 /* Dump a loop verbosely. */
8306
8307 DEBUG_FUNCTION void
8308 debug_verbose (class loop &ref)
8309 {
8310 print_loop (stderr, &ref, 0, /*verbosity*/3);
8311 }
8312
8313 DEBUG_FUNCTION void
8314 debug_verbose (class loop *ptr)
8315 {
8316 if (ptr)
8317 debug (*ptr);
8318 else
8319 fprintf (stderr, "<nil>\n");
8320 }
8321
8322
8323 /* Debugging loops structure at tree level, at some VERBOSITY level. */
8324
8325 DEBUG_FUNCTION void
8326 debug_loops (int verbosity)
8327 {
8328 print_loops (stderr, verbosity);
8329 }
8330
8331 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
8332
8333 DEBUG_FUNCTION void
8334 debug_loop (class loop *loop, int verbosity)
8335 {
8336 print_loop (stderr, loop, 0, verbosity);
8337 }
8338
8339 /* Print on stderr the code of loop number NUM, at some VERBOSITY
8340 level. */
8341
8342 DEBUG_FUNCTION void
8343 debug_loop_num (unsigned num, int verbosity)
8344 {
8345 debug_loop (get_loop (cfun, num), verbosity);
8346 }
8347
8348 /* Return true if BB ends with a call, possibly followed by some
8349 instructions that must stay with the call. Return false,
8350 otherwise. */
8351
8352 static bool
8353 gimple_block_ends_with_call_p (basic_block bb)
8354 {
8355 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
8356 return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
8357 }
8358
8359
8360 /* Return true if BB ends with a conditional branch. Return false,
8361 otherwise. */
8362
8363 static bool
8364 gimple_block_ends_with_condjump_p (const_basic_block bb)
8365 {
8366 gimple *stmt = last_stmt (CONST_CAST_BB (bb));
8367 return (stmt && gimple_code (stmt) == GIMPLE_COND);
8368 }
8369
8370
8371 /* Return true if statement T may terminate execution of BB in ways not
8372 explicitly represtented in the CFG. */
8373
8374 bool
8375 stmt_can_terminate_bb_p (gimple *t)
8376 {
8377 tree fndecl = NULL_TREE;
8378 int call_flags = 0;
8379
8380 /* Eh exception not handled internally terminates execution of the whole
8381 function. */
8382 if (stmt_can_throw_external (cfun, t))
8383 return true;
8384
8385 /* NORETURN and LONGJMP calls already have an edge to exit.
8386 CONST and PURE calls do not need one.
8387 We don't currently check for CONST and PURE here, although
8388 it would be a good idea, because those attributes are
8389 figured out from the RTL in mark_constant_function, and
8390 the counter incrementation code from -fprofile-arcs
8391 leads to different results from -fbranch-probabilities. */
8392 if (is_gimple_call (t))
8393 {
8394 fndecl = gimple_call_fndecl (t);
8395 call_flags = gimple_call_flags (t);
8396 }
8397
8398 if (is_gimple_call (t)
8399 && fndecl
8400 && fndecl_built_in_p (fndecl)
8401 && (call_flags & ECF_NOTHROW)
8402 && !(call_flags & ECF_RETURNS_TWICE)
8403 /* fork() doesn't really return twice, but the effect of
8404 wrapping it in __gcov_fork() which calls __gcov_flush()
8405 and clears the counters before forking has the same
8406 effect as returning twice. Force a fake edge. */
8407 && !fndecl_built_in_p (fndecl, BUILT_IN_FORK))
8408 return false;
8409
8410 if (is_gimple_call (t))
8411 {
8412 edge_iterator ei;
8413 edge e;
8414 basic_block bb;
8415
8416 if (call_flags & (ECF_PURE | ECF_CONST)
8417 && !(call_flags & ECF_LOOPING_CONST_OR_PURE))
8418 return false;
8419
8420 /* Function call may do longjmp, terminate program or do other things.
8421 Special case noreturn that have non-abnormal edges out as in this case
8422 the fact is sufficiently represented by lack of edges out of T. */
8423 if (!(call_flags & ECF_NORETURN))
8424 return true;
8425
8426 bb = gimple_bb (t);
8427 FOR_EACH_EDGE (e, ei, bb->succs)
8428 if ((e->flags & EDGE_FAKE) == 0)
8429 return true;
8430 }
8431
8432 if (gasm *asm_stmt = dyn_cast <gasm *> (t))
8433 if (gimple_asm_volatile_p (asm_stmt) || gimple_asm_input_p (asm_stmt))
8434 return true;
8435
8436 return false;
8437 }
8438
8439
8440 /* Add fake edges to the function exit for any non constant and non
8441 noreturn calls (or noreturn calls with EH/abnormal edges),
8442 volatile inline assembly in the bitmap of blocks specified by BLOCKS
8443 or to the whole CFG if BLOCKS is zero. Return the number of blocks
8444 that were split.
8445
8446 The goal is to expose cases in which entering a basic block does
8447 not imply that all subsequent instructions must be executed. */
8448
8449 static int
8450 gimple_flow_call_edges_add (sbitmap blocks)
8451 {
8452 int i;
8453 int blocks_split = 0;
8454 int last_bb = last_basic_block_for_fn (cfun);
8455 bool check_last_block = false;
8456
8457 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
8458 return 0;
8459
8460 if (! blocks)
8461 check_last_block = true;
8462 else
8463 check_last_block = bitmap_bit_p (blocks,
8464 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
8465
8466 /* In the last basic block, before epilogue generation, there will be
8467 a fallthru edge to EXIT. Special care is required if the last insn
8468 of the last basic block is a call because make_edge folds duplicate
8469 edges, which would result in the fallthru edge also being marked
8470 fake, which would result in the fallthru edge being removed by
8471 remove_fake_edges, which would result in an invalid CFG.
8472
8473 Moreover, we can't elide the outgoing fake edge, since the block
8474 profiler needs to take this into account in order to solve the minimal
8475 spanning tree in the case that the call doesn't return.
8476
8477 Handle this by adding a dummy instruction in a new last basic block. */
8478 if (check_last_block)
8479 {
8480 basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
8481 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
8482 gimple *t = NULL;
8483
8484 if (!gsi_end_p (gsi))
8485 t = gsi_stmt (gsi);
8486
8487 if (t && stmt_can_terminate_bb_p (t))
8488 {
8489 edge e;
8490
8491 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
8492 if (e)
8493 {
8494 gsi_insert_on_edge (e, gimple_build_nop ());
8495 gsi_commit_edge_inserts ();
8496 }
8497 }
8498 }
8499
8500 /* Now add fake edges to the function exit for any non constant
8501 calls since there is no way that we can determine if they will
8502 return or not... */
8503 for (i = 0; i < last_bb; i++)
8504 {
8505 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8506 gimple_stmt_iterator gsi;
8507 gimple *stmt, *last_stmt;
8508
8509 if (!bb)
8510 continue;
8511
8512 if (blocks && !bitmap_bit_p (blocks, i))
8513 continue;
8514
8515 gsi = gsi_last_nondebug_bb (bb);
8516 if (!gsi_end_p (gsi))
8517 {
8518 last_stmt = gsi_stmt (gsi);
8519 do
8520 {
8521 stmt = gsi_stmt (gsi);
8522 if (stmt_can_terminate_bb_p (stmt))
8523 {
8524 edge e;
8525
8526 /* The handling above of the final block before the
8527 epilogue should be enough to verify that there is
8528 no edge to the exit block in CFG already.
8529 Calling make_edge in such case would cause us to
8530 mark that edge as fake and remove it later. */
8531 if (flag_checking && stmt == last_stmt)
8532 {
8533 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
8534 gcc_assert (e == NULL);
8535 }
8536
8537 /* Note that the following may create a new basic block
8538 and renumber the existing basic blocks. */
8539 if (stmt != last_stmt)
8540 {
8541 e = split_block (bb, stmt);
8542 if (e)
8543 blocks_split++;
8544 }
8545 e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
8546 e->probability = profile_probability::guessed_never ();
8547 }
8548 gsi_prev (&gsi);
8549 }
8550 while (!gsi_end_p (gsi));
8551 }
8552 }
8553
8554 if (blocks_split)
8555 checking_verify_flow_info ();
8556
8557 return blocks_split;
8558 }
8559
8560 /* Removes edge E and all the blocks dominated by it, and updates dominance
8561 information. The IL in E->src needs to be updated separately.
8562 If dominance info is not available, only the edge E is removed.*/
8563
8564 void
8565 remove_edge_and_dominated_blocks (edge e)
8566 {
8567 vec<basic_block> bbs_to_remove = vNULL;
8568 vec<basic_block> bbs_to_fix_dom = vNULL;
8569 edge f;
8570 edge_iterator ei;
8571 bool none_removed = false;
8572 unsigned i;
8573 basic_block bb, dbb;
8574 bitmap_iterator bi;
8575
8576 /* If we are removing a path inside a non-root loop that may change
8577 loop ownership of blocks or remove loops. Mark loops for fixup. */
8578 if (current_loops
8579 && loop_outer (e->src->loop_father) != NULL
8580 && e->src->loop_father == e->dest->loop_father)
8581 loops_state_set (LOOPS_NEED_FIXUP);
8582
8583 if (!dom_info_available_p (CDI_DOMINATORS))
8584 {
8585 remove_edge (e);
8586 return;
8587 }
8588
8589 /* No updating is needed for edges to exit. */
8590 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
8591 {
8592 if (cfgcleanup_altered_bbs)
8593 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
8594 remove_edge (e);
8595 return;
8596 }
8597
8598 /* First, we find the basic blocks to remove. If E->dest has a predecessor
8599 that is not dominated by E->dest, then this set is empty. Otherwise,
8600 all the basic blocks dominated by E->dest are removed.
8601
8602 Also, to DF_IDOM we store the immediate dominators of the blocks in
8603 the dominance frontier of E (i.e., of the successors of the
8604 removed blocks, if there are any, and of E->dest otherwise). */
8605 FOR_EACH_EDGE (f, ei, e->dest->preds)
8606 {
8607 if (f == e)
8608 continue;
8609
8610 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
8611 {
8612 none_removed = true;
8613 break;
8614 }
8615 }
8616
8617 auto_bitmap df, df_idom;
8618 if (none_removed)
8619 bitmap_set_bit (df_idom,
8620 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
8621 else
8622 {
8623 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
8624 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
8625 {
8626 FOR_EACH_EDGE (f, ei, bb->succs)
8627 {
8628 if (f->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
8629 bitmap_set_bit (df, f->dest->index);
8630 }
8631 }
8632 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
8633 bitmap_clear_bit (df, bb->index);
8634
8635 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
8636 {
8637 bb = BASIC_BLOCK_FOR_FN (cfun, i);
8638 bitmap_set_bit (df_idom,
8639 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
8640 }
8641 }
8642
8643 if (cfgcleanup_altered_bbs)
8644 {
8645 /* Record the set of the altered basic blocks. */
8646 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
8647 bitmap_ior_into (cfgcleanup_altered_bbs, df);
8648 }
8649
8650 /* Remove E and the cancelled blocks. */
8651 if (none_removed)
8652 remove_edge (e);
8653 else
8654 {
8655 /* Walk backwards so as to get a chance to substitute all
8656 released DEFs into debug stmts. See
8657 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
8658 details. */
8659 for (i = bbs_to_remove.length (); i-- > 0; )
8660 delete_basic_block (bbs_to_remove[i]);
8661 }
8662
8663 /* Update the dominance information. The immediate dominator may change only
8664 for blocks whose immediate dominator belongs to DF_IDOM:
8665
8666 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
8667 removal. Let Z the arbitrary block such that idom(Z) = Y and
8668 Z dominates X after the removal. Before removal, there exists a path P
8669 from Y to X that avoids Z. Let F be the last edge on P that is
8670 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
8671 dominates W, and because of P, Z does not dominate W), and W belongs to
8672 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
8673 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
8674 {
8675 bb = BASIC_BLOCK_FOR_FN (cfun, i);
8676 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
8677 dbb;
8678 dbb = next_dom_son (CDI_DOMINATORS, dbb))
8679 bbs_to_fix_dom.safe_push (dbb);
8680 }
8681
8682 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
8683
8684 bbs_to_remove.release ();
8685 bbs_to_fix_dom.release ();
8686 }
8687
8688 /* Purge dead EH edges from basic block BB. */
8689
8690 bool
8691 gimple_purge_dead_eh_edges (basic_block bb)
8692 {
8693 bool changed = false;
8694 edge e;
8695 edge_iterator ei;
8696 gimple *stmt = last_stmt (bb);
8697
8698 if (stmt && stmt_can_throw_internal (cfun, stmt))
8699 return false;
8700
8701 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8702 {
8703 if (e->flags & EDGE_EH)
8704 {
8705 remove_edge_and_dominated_blocks (e);
8706 changed = true;
8707 }
8708 else
8709 ei_next (&ei);
8710 }
8711
8712 return changed;
8713 }
8714
8715 /* Purge dead EH edges from basic block listed in BLOCKS. */
8716
8717 bool
8718 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
8719 {
8720 bool changed = false;
8721 unsigned i;
8722 bitmap_iterator bi;
8723
8724 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8725 {
8726 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8727
8728 /* Earlier gimple_purge_dead_eh_edges could have removed
8729 this basic block already. */
8730 gcc_assert (bb || changed);
8731 if (bb != NULL)
8732 changed |= gimple_purge_dead_eh_edges (bb);
8733 }
8734
8735 return changed;
8736 }
8737
8738 /* Purge dead abnormal call edges from basic block BB. */
8739
8740 bool
8741 gimple_purge_dead_abnormal_call_edges (basic_block bb)
8742 {
8743 bool changed = false;
8744 edge e;
8745 edge_iterator ei;
8746 gimple *stmt = last_stmt (bb);
8747
8748 if (!cfun->has_nonlocal_label
8749 && !cfun->calls_setjmp)
8750 return false;
8751
8752 if (stmt && stmt_can_make_abnormal_goto (stmt))
8753 return false;
8754
8755 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8756 {
8757 if (e->flags & EDGE_ABNORMAL)
8758 {
8759 if (e->flags & EDGE_FALLTHRU)
8760 e->flags &= ~EDGE_ABNORMAL;
8761 else
8762 remove_edge_and_dominated_blocks (e);
8763 changed = true;
8764 }
8765 else
8766 ei_next (&ei);
8767 }
8768
8769 return changed;
8770 }
8771
8772 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
8773
8774 bool
8775 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks)
8776 {
8777 bool changed = false;
8778 unsigned i;
8779 bitmap_iterator bi;
8780
8781 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8782 {
8783 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8784
8785 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
8786 this basic block already. */
8787 gcc_assert (bb || changed);
8788 if (bb != NULL)
8789 changed |= gimple_purge_dead_abnormal_call_edges (bb);
8790 }
8791
8792 return changed;
8793 }
8794
8795 /* This function is called whenever a new edge is created or
8796 redirected. */
8797
8798 static void
8799 gimple_execute_on_growing_pred (edge e)
8800 {
8801 basic_block bb = e->dest;
8802
8803 if (!gimple_seq_empty_p (phi_nodes (bb)))
8804 reserve_phi_args_for_new_edge (bb);
8805 }
8806
8807 /* This function is called immediately before edge E is removed from
8808 the edge vector E->dest->preds. */
8809
8810 static void
8811 gimple_execute_on_shrinking_pred (edge e)
8812 {
8813 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
8814 remove_phi_args (e);
8815 }
8816
8817 /*---------------------------------------------------------------------------
8818 Helper functions for Loop versioning
8819 ---------------------------------------------------------------------------*/
8820
8821 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
8822 of 'first'. Both of them are dominated by 'new_head' basic block. When
8823 'new_head' was created by 'second's incoming edge it received phi arguments
8824 on the edge by split_edge(). Later, additional edge 'e' was created to
8825 connect 'new_head' and 'first'. Now this routine adds phi args on this
8826 additional edge 'e' that new_head to second edge received as part of edge
8827 splitting. */
8828
8829 static void
8830 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
8831 basic_block new_head, edge e)
8832 {
8833 gphi *phi1, *phi2;
8834 gphi_iterator psi1, psi2;
8835 tree def;
8836 edge e2 = find_edge (new_head, second);
8837
8838 /* Because NEW_HEAD has been created by splitting SECOND's incoming
8839 edge, we should always have an edge from NEW_HEAD to SECOND. */
8840 gcc_assert (e2 != NULL);
8841
8842 /* Browse all 'second' basic block phi nodes and add phi args to
8843 edge 'e' for 'first' head. PHI args are always in correct order. */
8844
8845 for (psi2 = gsi_start_phis (second),
8846 psi1 = gsi_start_phis (first);
8847 !gsi_end_p (psi2) && !gsi_end_p (psi1);
8848 gsi_next (&psi2), gsi_next (&psi1))
8849 {
8850 phi1 = psi1.phi ();
8851 phi2 = psi2.phi ();
8852 def = PHI_ARG_DEF (phi2, e2->dest_idx);
8853 add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
8854 }
8855 }
8856
8857
8858 /* Adds a if else statement to COND_BB with condition COND_EXPR.
8859 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
8860 the destination of the ELSE part. */
8861
8862 static void
8863 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
8864 basic_block second_head ATTRIBUTE_UNUSED,
8865 basic_block cond_bb, void *cond_e)
8866 {
8867 gimple_stmt_iterator gsi;
8868 gimple *new_cond_expr;
8869 tree cond_expr = (tree) cond_e;
8870 edge e0;
8871
8872 /* Build new conditional expr */
8873 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
8874 NULL_TREE, NULL_TREE);
8875
8876 /* Add new cond in cond_bb. */
8877 gsi = gsi_last_bb (cond_bb);
8878 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
8879
8880 /* Adjust edges appropriately to connect new head with first head
8881 as well as second head. */
8882 e0 = single_succ_edge (cond_bb);
8883 e0->flags &= ~EDGE_FALLTHRU;
8884 e0->flags |= EDGE_FALSE_VALUE;
8885 }
8886
8887
8888 /* Do book-keeping of basic block BB for the profile consistency checker.
8889 Store the counting in RECORD. */
8890 static void
8891 gimple_account_profile_record (basic_block bb,
8892 struct profile_record *record)
8893 {
8894 gimple_stmt_iterator i;
8895 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
8896 {
8897 record->size
8898 += estimate_num_insns (gsi_stmt (i), &eni_size_weights);
8899 if (bb->count.initialized_p ())
8900 record->time
8901 += estimate_num_insns (gsi_stmt (i),
8902 &eni_time_weights) * bb->count.to_gcov_type ();
8903 else if (profile_status_for_fn (cfun) == PROFILE_GUESSED)
8904 record->time
8905 += estimate_num_insns (gsi_stmt (i),
8906 &eni_time_weights) * bb->count.to_frequency (cfun);
8907 }
8908 }
8909
8910 struct cfg_hooks gimple_cfg_hooks = {
8911 "gimple",
8912 gimple_verify_flow_info,
8913 gimple_dump_bb, /* dump_bb */
8914 gimple_dump_bb_for_graph, /* dump_bb_for_graph */
8915 create_bb, /* create_basic_block */
8916 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
8917 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
8918 gimple_can_remove_branch_p, /* can_remove_branch_p */
8919 remove_bb, /* delete_basic_block */
8920 gimple_split_block, /* split_block */
8921 gimple_move_block_after, /* move_block_after */
8922 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
8923 gimple_merge_blocks, /* merge_blocks */
8924 gimple_predict_edge, /* predict_edge */
8925 gimple_predicted_by_p, /* predicted_by_p */
8926 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
8927 gimple_duplicate_bb, /* duplicate_block */
8928 gimple_split_edge, /* split_edge */
8929 gimple_make_forwarder_block, /* make_forward_block */
8930 NULL, /* tidy_fallthru_edge */
8931 NULL, /* force_nonfallthru */
8932 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
8933 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
8934 gimple_flow_call_edges_add, /* flow_call_edges_add */
8935 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
8936 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
8937 gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
8938 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
8939 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
8940 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
8941 flush_pending_stmts, /* flush_pending_stmts */
8942 gimple_empty_block_p, /* block_empty_p */
8943 gimple_split_block_before_cond_jump, /* split_block_before_cond_jump */
8944 gimple_account_profile_record,
8945 };
8946
8947
8948 /* Split all critical edges. Split some extra (not necessarily critical) edges
8949 if FOR_EDGE_INSERTION_P is true. */
8950
8951 unsigned int
8952 split_critical_edges (bool for_edge_insertion_p /* = false */)
8953 {
8954 basic_block bb;
8955 edge e;
8956 edge_iterator ei;
8957
8958 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
8959 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
8960 mappings around the calls to split_edge. */
8961 start_recording_case_labels ();
8962 FOR_ALL_BB_FN (bb, cfun)
8963 {
8964 FOR_EACH_EDGE (e, ei, bb->succs)
8965 {
8966 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
8967 split_edge (e);
8968 /* PRE inserts statements to edges and expects that
8969 since split_critical_edges was done beforehand, committing edge
8970 insertions will not split more edges. In addition to critical
8971 edges we must split edges that have multiple successors and
8972 end by control flow statements, such as RESX.
8973 Go ahead and split them too. This matches the logic in
8974 gimple_find_edge_insert_loc. */
8975 else if (for_edge_insertion_p
8976 && (!single_pred_p (e->dest)
8977 || !gimple_seq_empty_p (phi_nodes (e->dest))
8978 || e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
8979 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
8980 && !(e->flags & EDGE_ABNORMAL))
8981 {
8982 gimple_stmt_iterator gsi;
8983
8984 gsi = gsi_last_bb (e->src);
8985 if (!gsi_end_p (gsi)
8986 && stmt_ends_bb_p (gsi_stmt (gsi))
8987 && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
8988 && !gimple_call_builtin_p (gsi_stmt (gsi),
8989 BUILT_IN_RETURN)))
8990 split_edge (e);
8991 }
8992 }
8993 }
8994 end_recording_case_labels ();
8995 return 0;
8996 }
8997
8998 namespace {
8999
9000 const pass_data pass_data_split_crit_edges =
9001 {
9002 GIMPLE_PASS, /* type */
9003 "crited", /* name */
9004 OPTGROUP_NONE, /* optinfo_flags */
9005 TV_TREE_SPLIT_EDGES, /* tv_id */
9006 PROP_cfg, /* properties_required */
9007 PROP_no_crit_edges, /* properties_provided */
9008 0, /* properties_destroyed */
9009 0, /* todo_flags_start */
9010 0, /* todo_flags_finish */
9011 };
9012
9013 class pass_split_crit_edges : public gimple_opt_pass
9014 {
9015 public:
9016 pass_split_crit_edges (gcc::context *ctxt)
9017 : gimple_opt_pass (pass_data_split_crit_edges, ctxt)
9018 {}
9019
9020 /* opt_pass methods: */
9021 virtual unsigned int execute (function *) { return split_critical_edges (); }
9022
9023 opt_pass * clone () { return new pass_split_crit_edges (m_ctxt); }
9024 }; // class pass_split_crit_edges
9025
9026 } // anon namespace
9027
9028 gimple_opt_pass *
9029 make_pass_split_crit_edges (gcc::context *ctxt)
9030 {
9031 return new pass_split_crit_edges (ctxt);
9032 }
9033
9034
9035 /* Insert COND expression which is GIMPLE_COND after STMT
9036 in basic block BB with appropriate basic block split
9037 and creation of a new conditionally executed basic block.
9038 Update profile so the new bb is visited with probability PROB.
9039 Return created basic block. */
9040 basic_block
9041 insert_cond_bb (basic_block bb, gimple *stmt, gimple *cond,
9042 profile_probability prob)
9043 {
9044 edge fall = split_block (bb, stmt);
9045 gimple_stmt_iterator iter = gsi_last_bb (bb);
9046 basic_block new_bb;
9047
9048 /* Insert cond statement. */
9049 gcc_assert (gimple_code (cond) == GIMPLE_COND);
9050 if (gsi_end_p (iter))
9051 gsi_insert_before (&iter, cond, GSI_CONTINUE_LINKING);
9052 else
9053 gsi_insert_after (&iter, cond, GSI_CONTINUE_LINKING);
9054
9055 /* Create conditionally executed block. */
9056 new_bb = create_empty_bb (bb);
9057 edge e = make_edge (bb, new_bb, EDGE_TRUE_VALUE);
9058 e->probability = prob;
9059 new_bb->count = e->count ();
9060 make_single_succ_edge (new_bb, fall->dest, EDGE_FALLTHRU);
9061
9062 /* Fix edge for split bb. */
9063 fall->flags = EDGE_FALSE_VALUE;
9064 fall->probability -= e->probability;
9065
9066 /* Update dominance info. */
9067 if (dom_info_available_p (CDI_DOMINATORS))
9068 {
9069 set_immediate_dominator (CDI_DOMINATORS, new_bb, bb);
9070 set_immediate_dominator (CDI_DOMINATORS, fall->dest, bb);
9071 }
9072
9073 /* Update loop info. */
9074 if (current_loops)
9075 add_bb_to_loop (new_bb, bb->loop_father);
9076
9077 return new_bb;
9078 }
9079
9080 /* Build a ternary operation and gimplify it. Emit code before GSI.
9081 Return the gimple_val holding the result. */
9082
9083 tree
9084 gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
9085 tree type, tree a, tree b, tree c)
9086 {
9087 tree ret;
9088 location_t loc = gimple_location (gsi_stmt (*gsi));
9089
9090 ret = fold_build3_loc (loc, code, type, a, b, c);
9091 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
9092 GSI_SAME_STMT);
9093 }
9094
9095 /* Build a binary operation and gimplify it. Emit code before GSI.
9096 Return the gimple_val holding the result. */
9097
9098 tree
9099 gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
9100 tree type, tree a, tree b)
9101 {
9102 tree ret;
9103
9104 ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
9105 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
9106 GSI_SAME_STMT);
9107 }
9108
9109 /* Build a unary operation and gimplify it. Emit code before GSI.
9110 Return the gimple_val holding the result. */
9111
9112 tree
9113 gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
9114 tree a)
9115 {
9116 tree ret;
9117
9118 ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
9119 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
9120 GSI_SAME_STMT);
9121 }
9122
9123
9124 \f
9125 /* Given a basic block B which ends with a conditional and has
9126 precisely two successors, determine which of the edges is taken if
9127 the conditional is true and which is taken if the conditional is
9128 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
9129
9130 void
9131 extract_true_false_edges_from_block (basic_block b,
9132 edge *true_edge,
9133 edge *false_edge)
9134 {
9135 edge e = EDGE_SUCC (b, 0);
9136
9137 if (e->flags & EDGE_TRUE_VALUE)
9138 {
9139 *true_edge = e;
9140 *false_edge = EDGE_SUCC (b, 1);
9141 }
9142 else
9143 {
9144 *false_edge = e;
9145 *true_edge = EDGE_SUCC (b, 1);
9146 }
9147 }
9148
9149
9150 /* From a controlling predicate in the immediate dominator DOM of
9151 PHIBLOCK determine the edges into PHIBLOCK that are chosen if the
9152 predicate evaluates to true and false and store them to
9153 *TRUE_CONTROLLED_EDGE and *FALSE_CONTROLLED_EDGE if
9154 they are non-NULL. Returns true if the edges can be determined,
9155 else return false. */
9156
9157 bool
9158 extract_true_false_controlled_edges (basic_block dom, basic_block phiblock,
9159 edge *true_controlled_edge,
9160 edge *false_controlled_edge)
9161 {
9162 basic_block bb = phiblock;
9163 edge true_edge, false_edge, tem;
9164 edge e0 = NULL, e1 = NULL;
9165
9166 /* We have to verify that one edge into the PHI node is dominated
9167 by the true edge of the predicate block and the other edge
9168 dominated by the false edge. This ensures that the PHI argument
9169 we are going to take is completely determined by the path we
9170 take from the predicate block.
9171 We can only use BB dominance checks below if the destination of
9172 the true/false edges are dominated by their edge, thus only
9173 have a single predecessor. */
9174 extract_true_false_edges_from_block (dom, &true_edge, &false_edge);
9175 tem = EDGE_PRED (bb, 0);
9176 if (tem == true_edge
9177 || (single_pred_p (true_edge->dest)
9178 && (tem->src == true_edge->dest
9179 || dominated_by_p (CDI_DOMINATORS,
9180 tem->src, true_edge->dest))))
9181 e0 = tem;
9182 else if (tem == false_edge
9183 || (single_pred_p (false_edge->dest)
9184 && (tem->src == false_edge->dest
9185 || dominated_by_p (CDI_DOMINATORS,
9186 tem->src, false_edge->dest))))
9187 e1 = tem;
9188 else
9189 return false;
9190 tem = EDGE_PRED (bb, 1);
9191 if (tem == true_edge
9192 || (single_pred_p (true_edge->dest)
9193 && (tem->src == true_edge->dest
9194 || dominated_by_p (CDI_DOMINATORS,
9195 tem->src, true_edge->dest))))
9196 e0 = tem;
9197 else if (tem == false_edge
9198 || (single_pred_p (false_edge->dest)
9199 && (tem->src == false_edge->dest
9200 || dominated_by_p (CDI_DOMINATORS,
9201 tem->src, false_edge->dest))))
9202 e1 = tem;
9203 else
9204 return false;
9205 if (!e0 || !e1)
9206 return false;
9207
9208 if (true_controlled_edge)
9209 *true_controlled_edge = e0;
9210 if (false_controlled_edge)
9211 *false_controlled_edge = e1;
9212
9213 return true;
9214 }
9215
9216 /* Generate a range test LHS CODE RHS that determines whether INDEX is in the
9217 range [low, high]. Place associated stmts before *GSI. */
9218
9219 void
9220 generate_range_test (basic_block bb, tree index, tree low, tree high,
9221 tree *lhs, tree *rhs)
9222 {
9223 tree type = TREE_TYPE (index);
9224 tree utype = unsigned_type_for (type);
9225
9226 low = fold_convert (utype, low);
9227 high = fold_convert (utype, high);
9228
9229 gimple_seq seq = NULL;
9230 index = gimple_convert (&seq, utype, index);
9231 *lhs = gimple_build (&seq, MINUS_EXPR, utype, index, low);
9232 *rhs = const_binop (MINUS_EXPR, utype, high, low);
9233
9234 gimple_stmt_iterator gsi = gsi_last_bb (bb);
9235 gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT);
9236 }
9237
9238 /* Return the basic block that belongs to label numbered INDEX
9239 of a switch statement. */
9240
9241 basic_block
9242 gimple_switch_label_bb (function *ifun, gswitch *gs, unsigned index)
9243 {
9244 return label_to_block (ifun, CASE_LABEL (gimple_switch_label (gs, index)));
9245 }
9246
9247 /* Return the default basic block of a switch statement. */
9248
9249 basic_block
9250 gimple_switch_default_bb (function *ifun, gswitch *gs)
9251 {
9252 return gimple_switch_label_bb (ifun, gs, 0);
9253 }
9254
9255 /* Return the edge that belongs to label numbered INDEX
9256 of a switch statement. */
9257
9258 edge
9259 gimple_switch_edge (function *ifun, gswitch *gs, unsigned index)
9260 {
9261 return find_edge (gimple_bb (gs), gimple_switch_label_bb (ifun, gs, index));
9262 }
9263
9264 /* Return the default edge of a switch statement. */
9265
9266 edge
9267 gimple_switch_default_edge (function *ifun, gswitch *gs)
9268 {
9269 return gimple_switch_edge (ifun, gs, 0);
9270 }
9271
9272
9273 /* Emit return warnings. */
9274
9275 namespace {
9276
9277 const pass_data pass_data_warn_function_return =
9278 {
9279 GIMPLE_PASS, /* type */
9280 "*warn_function_return", /* name */
9281 OPTGROUP_NONE, /* optinfo_flags */
9282 TV_NONE, /* tv_id */
9283 PROP_cfg, /* properties_required */
9284 0, /* properties_provided */
9285 0, /* properties_destroyed */
9286 0, /* todo_flags_start */
9287 0, /* todo_flags_finish */
9288 };
9289
9290 class pass_warn_function_return : public gimple_opt_pass
9291 {
9292 public:
9293 pass_warn_function_return (gcc::context *ctxt)
9294 : gimple_opt_pass (pass_data_warn_function_return, ctxt)
9295 {}
9296
9297 /* opt_pass methods: */
9298 virtual unsigned int execute (function *);
9299
9300 }; // class pass_warn_function_return
9301
9302 unsigned int
9303 pass_warn_function_return::execute (function *fun)
9304 {
9305 location_t location;
9306 gimple *last;
9307 edge e;
9308 edge_iterator ei;
9309
9310 if (!targetm.warn_func_return (fun->decl))
9311 return 0;
9312
9313 /* If we have a path to EXIT, then we do return. */
9314 if (TREE_THIS_VOLATILE (fun->decl)
9315 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0)
9316 {
9317 location = UNKNOWN_LOCATION;
9318 for (ei = ei_start (EXIT_BLOCK_PTR_FOR_FN (fun)->preds);
9319 (e = ei_safe_edge (ei)); )
9320 {
9321 last = last_stmt (e->src);
9322 if ((gimple_code (last) == GIMPLE_RETURN
9323 || gimple_call_builtin_p (last, BUILT_IN_RETURN))
9324 && location == UNKNOWN_LOCATION
9325 && ((location = LOCATION_LOCUS (gimple_location (last)))
9326 != UNKNOWN_LOCATION)
9327 && !optimize)
9328 break;
9329 /* When optimizing, replace return stmts in noreturn functions
9330 with __builtin_unreachable () call. */
9331 if (optimize && gimple_code (last) == GIMPLE_RETURN)
9332 {
9333 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
9334 gimple *new_stmt = gimple_build_call (fndecl, 0);
9335 gimple_set_location (new_stmt, gimple_location (last));
9336 gimple_stmt_iterator gsi = gsi_for_stmt (last);
9337 gsi_replace (&gsi, new_stmt, true);
9338 remove_edge (e);
9339 }
9340 else
9341 ei_next (&ei);
9342 }
9343 if (location == UNKNOWN_LOCATION)
9344 location = cfun->function_end_locus;
9345 warning_at (location, 0, "%<noreturn%> function does return");
9346 }
9347
9348 /* If we see "return;" in some basic block, then we do reach the end
9349 without returning a value. */
9350 else if (warn_return_type > 0
9351 && !TREE_NO_WARNING (fun->decl)
9352 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fun->decl))))
9353 {
9354 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
9355 {
9356 gimple *last = last_stmt (e->src);
9357 greturn *return_stmt = dyn_cast <greturn *> (last);
9358 if (return_stmt
9359 && gimple_return_retval (return_stmt) == NULL
9360 && !gimple_no_warning_p (last))
9361 {
9362 location = gimple_location (last);
9363 if (LOCATION_LOCUS (location) == UNKNOWN_LOCATION)
9364 location = fun->function_end_locus;
9365 if (warning_at (location, OPT_Wreturn_type,
9366 "control reaches end of non-void function"))
9367 TREE_NO_WARNING (fun->decl) = 1;
9368 break;
9369 }
9370 }
9371 /* The C++ FE turns fallthrough from the end of non-void function
9372 into __builtin_unreachable () call with BUILTINS_LOCATION.
9373 Recognize those too. */
9374 basic_block bb;
9375 if (!TREE_NO_WARNING (fun->decl))
9376 FOR_EACH_BB_FN (bb, fun)
9377 if (EDGE_COUNT (bb->succs) == 0)
9378 {
9379 gimple *last = last_stmt (bb);
9380 const enum built_in_function ubsan_missing_ret
9381 = BUILT_IN_UBSAN_HANDLE_MISSING_RETURN;
9382 if (last
9383 && ((LOCATION_LOCUS (gimple_location (last))
9384 == BUILTINS_LOCATION
9385 && gimple_call_builtin_p (last, BUILT_IN_UNREACHABLE))
9386 || gimple_call_builtin_p (last, ubsan_missing_ret)))
9387 {
9388 gimple_stmt_iterator gsi = gsi_for_stmt (last);
9389 gsi_prev_nondebug (&gsi);
9390 gimple *prev = gsi_stmt (gsi);
9391 if (prev == NULL)
9392 location = UNKNOWN_LOCATION;
9393 else
9394 location = gimple_location (prev);
9395 if (LOCATION_LOCUS (location) == UNKNOWN_LOCATION)
9396 location = fun->function_end_locus;
9397 if (warning_at (location, OPT_Wreturn_type,
9398 "control reaches end of non-void function"))
9399 TREE_NO_WARNING (fun->decl) = 1;
9400 break;
9401 }
9402 }
9403 }
9404 return 0;
9405 }
9406
9407 } // anon namespace
9408
9409 gimple_opt_pass *
9410 make_pass_warn_function_return (gcc::context *ctxt)
9411 {
9412 return new pass_warn_function_return (ctxt);
9413 }
9414
9415 /* Walk a gimplified function and warn for functions whose return value is
9416 ignored and attribute((warn_unused_result)) is set. This is done before
9417 inlining, so we don't have to worry about that. */
9418
9419 static void
9420 do_warn_unused_result (gimple_seq seq)
9421 {
9422 tree fdecl, ftype;
9423 gimple_stmt_iterator i;
9424
9425 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
9426 {
9427 gimple *g = gsi_stmt (i);
9428
9429 switch (gimple_code (g))
9430 {
9431 case GIMPLE_BIND:
9432 do_warn_unused_result (gimple_bind_body (as_a <gbind *>(g)));
9433 break;
9434 case GIMPLE_TRY:
9435 do_warn_unused_result (gimple_try_eval (g));
9436 do_warn_unused_result (gimple_try_cleanup (g));
9437 break;
9438 case GIMPLE_CATCH:
9439 do_warn_unused_result (gimple_catch_handler (
9440 as_a <gcatch *> (g)));
9441 break;
9442 case GIMPLE_EH_FILTER:
9443 do_warn_unused_result (gimple_eh_filter_failure (g));
9444 break;
9445
9446 case GIMPLE_CALL:
9447 if (gimple_call_lhs (g))
9448 break;
9449 if (gimple_call_internal_p (g))
9450 break;
9451
9452 /* This is a naked call, as opposed to a GIMPLE_CALL with an
9453 LHS. All calls whose value is ignored should be
9454 represented like this. Look for the attribute. */
9455 fdecl = gimple_call_fndecl (g);
9456 ftype = gimple_call_fntype (g);
9457
9458 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
9459 {
9460 location_t loc = gimple_location (g);
9461
9462 if (fdecl)
9463 warning_at (loc, OPT_Wunused_result,
9464 "ignoring return value of %qD "
9465 "declared with attribute %<warn_unused_result%>",
9466 fdecl);
9467 else
9468 warning_at (loc, OPT_Wunused_result,
9469 "ignoring return value of function "
9470 "declared with attribute %<warn_unused_result%>");
9471 }
9472 break;
9473
9474 default:
9475 /* Not a container, not a call, or a call whose value is used. */
9476 break;
9477 }
9478 }
9479 }
9480
9481 namespace {
9482
9483 const pass_data pass_data_warn_unused_result =
9484 {
9485 GIMPLE_PASS, /* type */
9486 "*warn_unused_result", /* name */
9487 OPTGROUP_NONE, /* optinfo_flags */
9488 TV_NONE, /* tv_id */
9489 PROP_gimple_any, /* properties_required */
9490 0, /* properties_provided */
9491 0, /* properties_destroyed */
9492 0, /* todo_flags_start */
9493 0, /* todo_flags_finish */
9494 };
9495
9496 class pass_warn_unused_result : public gimple_opt_pass
9497 {
9498 public:
9499 pass_warn_unused_result (gcc::context *ctxt)
9500 : gimple_opt_pass (pass_data_warn_unused_result, ctxt)
9501 {}
9502
9503 /* opt_pass methods: */
9504 virtual bool gate (function *) { return flag_warn_unused_result; }
9505 virtual unsigned int execute (function *)
9506 {
9507 do_warn_unused_result (gimple_body (current_function_decl));
9508 return 0;
9509 }
9510
9511 }; // class pass_warn_unused_result
9512
9513 } // anon namespace
9514
9515 gimple_opt_pass *
9516 make_pass_warn_unused_result (gcc::context *ctxt)
9517 {
9518 return new pass_warn_unused_result (ctxt);
9519 }
9520
9521 /* IPA passes, compilation of earlier functions or inlining
9522 might have changed some properties, such as marked functions nothrow,
9523 pure, const or noreturn.
9524 Remove redundant edges and basic blocks, and create new ones if necessary.
9525
9526 This pass can't be executed as stand alone pass from pass manager, because
9527 in between inlining and this fixup the verify_flow_info would fail. */
9528
9529 unsigned int
9530 execute_fixup_cfg (void)
9531 {
9532 basic_block bb;
9533 gimple_stmt_iterator gsi;
9534 int todo = 0;
9535 cgraph_node *node = cgraph_node::get (current_function_decl);
9536 profile_count num = node->count;
9537 profile_count den = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
9538 bool scale = num.initialized_p () && !(num == den);
9539
9540 if (scale)
9541 {
9542 profile_count::adjust_for_ipa_scaling (&num, &den);
9543 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = node->count;
9544 EXIT_BLOCK_PTR_FOR_FN (cfun)->count
9545 = EXIT_BLOCK_PTR_FOR_FN (cfun)->count.apply_scale (num, den);
9546 }
9547
9548 FOR_EACH_BB_FN (bb, cfun)
9549 {
9550 if (scale)
9551 bb->count = bb->count.apply_scale (num, den);
9552 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
9553 {
9554 gimple *stmt = gsi_stmt (gsi);
9555 tree decl = is_gimple_call (stmt)
9556 ? gimple_call_fndecl (stmt)
9557 : NULL;
9558 if (decl)
9559 {
9560 int flags = gimple_call_flags (stmt);
9561 if (flags & (ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE))
9562 {
9563 if (gimple_purge_dead_abnormal_call_edges (bb))
9564 todo |= TODO_cleanup_cfg;
9565
9566 if (gimple_in_ssa_p (cfun))
9567 {
9568 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9569 update_stmt (stmt);
9570 }
9571 }
9572
9573 if (flags & ECF_NORETURN
9574 && fixup_noreturn_call (stmt))
9575 todo |= TODO_cleanup_cfg;
9576 }
9577
9578 /* Remove stores to variables we marked write-only.
9579 Keep access when store has side effect, i.e. in case when source
9580 is volatile. */
9581 if (gimple_store_p (stmt)
9582 && !gimple_has_side_effects (stmt)
9583 && !optimize_debug)
9584 {
9585 tree lhs = get_base_address (gimple_get_lhs (stmt));
9586
9587 if (VAR_P (lhs)
9588 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
9589 && varpool_node::get (lhs)->writeonly)
9590 {
9591 unlink_stmt_vdef (stmt);
9592 gsi_remove (&gsi, true);
9593 release_defs (stmt);
9594 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9595 continue;
9596 }
9597 }
9598 /* For calls we can simply remove LHS when it is known
9599 to be write-only. */
9600 if (is_gimple_call (stmt)
9601 && gimple_get_lhs (stmt))
9602 {
9603 tree lhs = get_base_address (gimple_get_lhs (stmt));
9604
9605 if (VAR_P (lhs)
9606 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
9607 && varpool_node::get (lhs)->writeonly)
9608 {
9609 gimple_call_set_lhs (stmt, NULL);
9610 update_stmt (stmt);
9611 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9612 }
9613 }
9614
9615 if (maybe_clean_eh_stmt (stmt)
9616 && gimple_purge_dead_eh_edges (bb))
9617 todo |= TODO_cleanup_cfg;
9618 gsi_next (&gsi);
9619 }
9620
9621 /* If we have a basic block with no successors that does not
9622 end with a control statement or a noreturn call end it with
9623 a call to __builtin_unreachable. This situation can occur
9624 when inlining a noreturn call that does in fact return. */
9625 if (EDGE_COUNT (bb->succs) == 0)
9626 {
9627 gimple *stmt = last_stmt (bb);
9628 if (!stmt
9629 || (!is_ctrl_stmt (stmt)
9630 && (!is_gimple_call (stmt)
9631 || !gimple_call_noreturn_p (stmt))))
9632 {
9633 if (stmt && is_gimple_call (stmt))
9634 gimple_call_set_ctrl_altering (stmt, false);
9635 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
9636 stmt = gimple_build_call (fndecl, 0);
9637 gimple_stmt_iterator gsi = gsi_last_bb (bb);
9638 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
9639 if (!cfun->after_inlining)
9640 {
9641 gcall *call_stmt = dyn_cast <gcall *> (stmt);
9642 node->create_edge (cgraph_node::get_create (fndecl),
9643 call_stmt, bb->count);
9644 }
9645 }
9646 }
9647 }
9648 if (scale)
9649 compute_function_frequency ();
9650
9651 if (current_loops
9652 && (todo & TODO_cleanup_cfg))
9653 loops_state_set (LOOPS_NEED_FIXUP);
9654
9655 return todo;
9656 }
9657
9658 namespace {
9659
9660 const pass_data pass_data_fixup_cfg =
9661 {
9662 GIMPLE_PASS, /* type */
9663 "fixup_cfg", /* name */
9664 OPTGROUP_NONE, /* optinfo_flags */
9665 TV_NONE, /* tv_id */
9666 PROP_cfg, /* properties_required */
9667 0, /* properties_provided */
9668 0, /* properties_destroyed */
9669 0, /* todo_flags_start */
9670 0, /* todo_flags_finish */
9671 };
9672
9673 class pass_fixup_cfg : public gimple_opt_pass
9674 {
9675 public:
9676 pass_fixup_cfg (gcc::context *ctxt)
9677 : gimple_opt_pass (pass_data_fixup_cfg, ctxt)
9678 {}
9679
9680 /* opt_pass methods: */
9681 opt_pass * clone () { return new pass_fixup_cfg (m_ctxt); }
9682 virtual unsigned int execute (function *) { return execute_fixup_cfg (); }
9683
9684 }; // class pass_fixup_cfg
9685
9686 } // anon namespace
9687
9688 gimple_opt_pass *
9689 make_pass_fixup_cfg (gcc::context *ctxt)
9690 {
9691 return new pass_fixup_cfg (ctxt);
9692 }
9693
9694 /* Garbage collection support for edge_def. */
9695
9696 extern void gt_ggc_mx (tree&);
9697 extern void gt_ggc_mx (gimple *&);
9698 extern void gt_ggc_mx (rtx&);
9699 extern void gt_ggc_mx (basic_block&);
9700
9701 static void
9702 gt_ggc_mx (rtx_insn *& x)
9703 {
9704 if (x)
9705 gt_ggc_mx_rtx_def ((void *) x);
9706 }
9707
9708 void
9709 gt_ggc_mx (edge_def *e)
9710 {
9711 tree block = LOCATION_BLOCK (e->goto_locus);
9712 gt_ggc_mx (e->src);
9713 gt_ggc_mx (e->dest);
9714 if (current_ir_type () == IR_GIMPLE)
9715 gt_ggc_mx (e->insns.g);
9716 else
9717 gt_ggc_mx (e->insns.r);
9718 gt_ggc_mx (block);
9719 }
9720
9721 /* PCH support for edge_def. */
9722
9723 extern void gt_pch_nx (tree&);
9724 extern void gt_pch_nx (gimple *&);
9725 extern void gt_pch_nx (rtx&);
9726 extern void gt_pch_nx (basic_block&);
9727
9728 static void
9729 gt_pch_nx (rtx_insn *& x)
9730 {
9731 if (x)
9732 gt_pch_nx_rtx_def ((void *) x);
9733 }
9734
9735 void
9736 gt_pch_nx (edge_def *e)
9737 {
9738 tree block = LOCATION_BLOCK (e->goto_locus);
9739 gt_pch_nx (e->src);
9740 gt_pch_nx (e->dest);
9741 if (current_ir_type () == IR_GIMPLE)
9742 gt_pch_nx (e->insns.g);
9743 else
9744 gt_pch_nx (e->insns.r);
9745 gt_pch_nx (block);
9746 }
9747
9748 void
9749 gt_pch_nx (edge_def *e, gt_pointer_operator op, void *cookie)
9750 {
9751 tree block = LOCATION_BLOCK (e->goto_locus);
9752 op (&(e->src), cookie);
9753 op (&(e->dest), cookie);
9754 if (current_ir_type () == IR_GIMPLE)
9755 op (&(e->insns.g), cookie);
9756 else
9757 op (&(e->insns.r), cookie);
9758 op (&(block), cookie);
9759 }
9760
9761 #if CHECKING_P
9762
9763 namespace selftest {
9764
9765 /* Helper function for CFG selftests: create a dummy function decl
9766 and push it as cfun. */
9767
9768 static tree
9769 push_fndecl (const char *name)
9770 {
9771 tree fn_type = build_function_type_array (integer_type_node, 0, NULL);
9772 /* FIXME: this uses input_location: */
9773 tree fndecl = build_fn_decl (name, fn_type);
9774 tree retval = build_decl (UNKNOWN_LOCATION, RESULT_DECL,
9775 NULL_TREE, integer_type_node);
9776 DECL_RESULT (fndecl) = retval;
9777 push_struct_function (fndecl);
9778 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9779 ASSERT_TRUE (fun != NULL);
9780 init_empty_tree_cfg_for_function (fun);
9781 ASSERT_EQ (2, n_basic_blocks_for_fn (fun));
9782 ASSERT_EQ (0, n_edges_for_fn (fun));
9783 return fndecl;
9784 }
9785
9786 /* These tests directly create CFGs.
9787 Compare with the static fns within tree-cfg.c:
9788 - build_gimple_cfg
9789 - make_blocks: calls create_basic_block (seq, bb);
9790 - make_edges. */
9791
9792 /* Verify a simple cfg of the form:
9793 ENTRY -> A -> B -> C -> EXIT. */
9794
9795 static void
9796 test_linear_chain ()
9797 {
9798 gimple_register_cfg_hooks ();
9799
9800 tree fndecl = push_fndecl ("cfg_test_linear_chain");
9801 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9802
9803 /* Create some empty blocks. */
9804 basic_block bb_a = create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
9805 basic_block bb_b = create_empty_bb (bb_a);
9806 basic_block bb_c = create_empty_bb (bb_b);
9807
9808 ASSERT_EQ (5, n_basic_blocks_for_fn (fun));
9809 ASSERT_EQ (0, n_edges_for_fn (fun));
9810
9811 /* Create some edges: a simple linear chain of BBs. */
9812 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), bb_a, EDGE_FALLTHRU);
9813 make_edge (bb_a, bb_b, 0);
9814 make_edge (bb_b, bb_c, 0);
9815 make_edge (bb_c, EXIT_BLOCK_PTR_FOR_FN (fun), 0);
9816
9817 /* Verify the edges. */
9818 ASSERT_EQ (4, n_edges_for_fn (fun));
9819 ASSERT_EQ (NULL, ENTRY_BLOCK_PTR_FOR_FN (fun)->preds);
9820 ASSERT_EQ (1, ENTRY_BLOCK_PTR_FOR_FN (fun)->succs->length ());
9821 ASSERT_EQ (1, bb_a->preds->length ());
9822 ASSERT_EQ (1, bb_a->succs->length ());
9823 ASSERT_EQ (1, bb_b->preds->length ());
9824 ASSERT_EQ (1, bb_b->succs->length ());
9825 ASSERT_EQ (1, bb_c->preds->length ());
9826 ASSERT_EQ (1, bb_c->succs->length ());
9827 ASSERT_EQ (1, EXIT_BLOCK_PTR_FOR_FN (fun)->preds->length ());
9828 ASSERT_EQ (NULL, EXIT_BLOCK_PTR_FOR_FN (fun)->succs);
9829
9830 /* Verify the dominance information
9831 Each BB in our simple chain should be dominated by the one before
9832 it. */
9833 calculate_dominance_info (CDI_DOMINATORS);
9834 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_b));
9835 ASSERT_EQ (bb_b, get_immediate_dominator (CDI_DOMINATORS, bb_c));
9836 vec<basic_block> dom_by_b = get_dominated_by (CDI_DOMINATORS, bb_b);
9837 ASSERT_EQ (1, dom_by_b.length ());
9838 ASSERT_EQ (bb_c, dom_by_b[0]);
9839 free_dominance_info (CDI_DOMINATORS);
9840 dom_by_b.release ();
9841
9842 /* Similarly for post-dominance: each BB in our chain is post-dominated
9843 by the one after it. */
9844 calculate_dominance_info (CDI_POST_DOMINATORS);
9845 ASSERT_EQ (bb_b, get_immediate_dominator (CDI_POST_DOMINATORS, bb_a));
9846 ASSERT_EQ (bb_c, get_immediate_dominator (CDI_POST_DOMINATORS, bb_b));
9847 vec<basic_block> postdom_by_b = get_dominated_by (CDI_POST_DOMINATORS, bb_b);
9848 ASSERT_EQ (1, postdom_by_b.length ());
9849 ASSERT_EQ (bb_a, postdom_by_b[0]);
9850 free_dominance_info (CDI_POST_DOMINATORS);
9851 postdom_by_b.release ();
9852
9853 pop_cfun ();
9854 }
9855
9856 /* Verify a simple CFG of the form:
9857 ENTRY
9858 |
9859 A
9860 / \
9861 /t \f
9862 B C
9863 \ /
9864 \ /
9865 D
9866 |
9867 EXIT. */
9868
9869 static void
9870 test_diamond ()
9871 {
9872 gimple_register_cfg_hooks ();
9873
9874 tree fndecl = push_fndecl ("cfg_test_diamond");
9875 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9876
9877 /* Create some empty blocks. */
9878 basic_block bb_a = create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
9879 basic_block bb_b = create_empty_bb (bb_a);
9880 basic_block bb_c = create_empty_bb (bb_a);
9881 basic_block bb_d = create_empty_bb (bb_b);
9882
9883 ASSERT_EQ (6, n_basic_blocks_for_fn (fun));
9884 ASSERT_EQ (0, n_edges_for_fn (fun));
9885
9886 /* Create the edges. */
9887 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), bb_a, EDGE_FALLTHRU);
9888 make_edge (bb_a, bb_b, EDGE_TRUE_VALUE);
9889 make_edge (bb_a, bb_c, EDGE_FALSE_VALUE);
9890 make_edge (bb_b, bb_d, 0);
9891 make_edge (bb_c, bb_d, 0);
9892 make_edge (bb_d, EXIT_BLOCK_PTR_FOR_FN (fun), 0);
9893
9894 /* Verify the edges. */
9895 ASSERT_EQ (6, n_edges_for_fn (fun));
9896 ASSERT_EQ (1, bb_a->preds->length ());
9897 ASSERT_EQ (2, bb_a->succs->length ());
9898 ASSERT_EQ (1, bb_b->preds->length ());
9899 ASSERT_EQ (1, bb_b->succs->length ());
9900 ASSERT_EQ (1, bb_c->preds->length ());
9901 ASSERT_EQ (1, bb_c->succs->length ());
9902 ASSERT_EQ (2, bb_d->preds->length ());
9903 ASSERT_EQ (1, bb_d->succs->length ());
9904
9905 /* Verify the dominance information. */
9906 calculate_dominance_info (CDI_DOMINATORS);
9907 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_b));
9908 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_c));
9909 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_d));
9910 vec<basic_block> dom_by_a = get_dominated_by (CDI_DOMINATORS, bb_a);
9911 ASSERT_EQ (3, dom_by_a.length ()); /* B, C, D, in some order. */
9912 dom_by_a.release ();
9913 vec<basic_block> dom_by_b = get_dominated_by (CDI_DOMINATORS, bb_b);
9914 ASSERT_EQ (0, dom_by_b.length ());
9915 dom_by_b.release ();
9916 free_dominance_info (CDI_DOMINATORS);
9917
9918 /* Similarly for post-dominance. */
9919 calculate_dominance_info (CDI_POST_DOMINATORS);
9920 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_a));
9921 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_b));
9922 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_c));
9923 vec<basic_block> postdom_by_d = get_dominated_by (CDI_POST_DOMINATORS, bb_d);
9924 ASSERT_EQ (3, postdom_by_d.length ()); /* A, B, C in some order. */
9925 postdom_by_d.release ();
9926 vec<basic_block> postdom_by_b = get_dominated_by (CDI_POST_DOMINATORS, bb_b);
9927 ASSERT_EQ (0, postdom_by_b.length ());
9928 postdom_by_b.release ();
9929 free_dominance_info (CDI_POST_DOMINATORS);
9930
9931 pop_cfun ();
9932 }
9933
9934 /* Verify that we can handle a CFG containing a "complete" aka
9935 fully-connected subgraph (where A B C D below all have edges
9936 pointing to each other node, also to themselves).
9937 e.g.:
9938 ENTRY EXIT
9939 | ^
9940 | /
9941 | /
9942 | /
9943 V/
9944 A<--->B
9945 ^^ ^^
9946 | \ / |
9947 | X |
9948 | / \ |
9949 VV VV
9950 C<--->D
9951 */
9952
9953 static void
9954 test_fully_connected ()
9955 {
9956 gimple_register_cfg_hooks ();
9957
9958 tree fndecl = push_fndecl ("cfg_fully_connected");
9959 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9960
9961 const int n = 4;
9962
9963 /* Create some empty blocks. */
9964 auto_vec <basic_block> subgraph_nodes;
9965 for (int i = 0; i < n; i++)
9966 subgraph_nodes.safe_push (create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun)));
9967
9968 ASSERT_EQ (n + 2, n_basic_blocks_for_fn (fun));
9969 ASSERT_EQ (0, n_edges_for_fn (fun));
9970
9971 /* Create the edges. */
9972 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), subgraph_nodes[0], EDGE_FALLTHRU);
9973 make_edge (subgraph_nodes[0], EXIT_BLOCK_PTR_FOR_FN (fun), 0);
9974 for (int i = 0; i < n; i++)
9975 for (int j = 0; j < n; j++)
9976 make_edge (subgraph_nodes[i], subgraph_nodes[j], 0);
9977
9978 /* Verify the edges. */
9979 ASSERT_EQ (2 + (n * n), n_edges_for_fn (fun));
9980 /* The first one is linked to ENTRY/EXIT as well as itself and
9981 everything else. */
9982 ASSERT_EQ (n + 1, subgraph_nodes[0]->preds->length ());
9983 ASSERT_EQ (n + 1, subgraph_nodes[0]->succs->length ());
9984 /* The other ones in the subgraph are linked to everything in
9985 the subgraph (including themselves). */
9986 for (int i = 1; i < n; i++)
9987 {
9988 ASSERT_EQ (n, subgraph_nodes[i]->preds->length ());
9989 ASSERT_EQ (n, subgraph_nodes[i]->succs->length ());
9990 }
9991
9992 /* Verify the dominance information. */
9993 calculate_dominance_info (CDI_DOMINATORS);
9994 /* The initial block in the subgraph should be dominated by ENTRY. */
9995 ASSERT_EQ (ENTRY_BLOCK_PTR_FOR_FN (fun),
9996 get_immediate_dominator (CDI_DOMINATORS,
9997 subgraph_nodes[0]));
9998 /* Every other block in the subgraph should be dominated by the
9999 initial block. */
10000 for (int i = 1; i < n; i++)
10001 ASSERT_EQ (subgraph_nodes[0],
10002 get_immediate_dominator (CDI_DOMINATORS,
10003 subgraph_nodes[i]));
10004 free_dominance_info (CDI_DOMINATORS);
10005
10006 /* Similarly for post-dominance. */
10007 calculate_dominance_info (CDI_POST_DOMINATORS);
10008 /* The initial block in the subgraph should be postdominated by EXIT. */
10009 ASSERT_EQ (EXIT_BLOCK_PTR_FOR_FN (fun),
10010 get_immediate_dominator (CDI_POST_DOMINATORS,
10011 subgraph_nodes[0]));
10012 /* Every other block in the subgraph should be postdominated by the
10013 initial block, since that leads to EXIT. */
10014 for (int i = 1; i < n; i++)
10015 ASSERT_EQ (subgraph_nodes[0],
10016 get_immediate_dominator (CDI_POST_DOMINATORS,
10017 subgraph_nodes[i]));
10018 free_dominance_info (CDI_POST_DOMINATORS);
10019
10020 pop_cfun ();
10021 }
10022
10023 /* Run all of the selftests within this file. */
10024
10025 void
10026 tree_cfg_c_tests ()
10027 {
10028 test_linear_chain ();
10029 test_diamond ();
10030 test_fully_connected ();
10031 }
10032
10033 } // namespace selftest
10034
10035 /* TODO: test the dominator/postdominator logic with various graphs/nodes:
10036 - loop
10037 - nested loops
10038 - switch statement (a block with many out-edges)
10039 - something that jumps to itself
10040 - etc */
10041
10042 #endif /* CHECKING_P */