]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-cfg.c
Update libbid according to the latest Intel Decimal Floating-Point Math Library.
[thirdparty/gcc.git] / gcc / tree-cfg.c
1 /* Control flow functions for trees.
2 Copyright (C) 2001-2019 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "cfghooks.h"
30 #include "tree-pass.h"
31 #include "ssa.h"
32 #include "cgraph.h"
33 #include "gimple-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "fold-const.h"
36 #include "trans-mem.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
39 #include "cfganal.h"
40 #include "gimple-fold.h"
41 #include "tree-eh.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-cfg.h"
46 #include "tree-ssa-loop-manip.h"
47 #include "tree-ssa-loop-niter.h"
48 #include "tree-into-ssa.h"
49 #include "tree-dfa.h"
50 #include "tree-ssa.h"
51 #include "except.h"
52 #include "cfgloop.h"
53 #include "tree-ssa-propagate.h"
54 #include "value-prof.h"
55 #include "tree-inline.h"
56 #include "tree-ssa-live.h"
57 #include "omp-general.h"
58 #include "omp-expand.h"
59 #include "tree-cfgcleanup.h"
60 #include "gimplify.h"
61 #include "attribs.h"
62 #include "selftest.h"
63 #include "opts.h"
64 #include "asan.h"
65 #include "profile.h"
66
67 /* This file contains functions for building the Control Flow Graph (CFG)
68 for a function tree. */
69
70 /* Local declarations. */
71
72 /* Initial capacity for the basic block array. */
73 static const int initial_cfg_capacity = 20;
74
75 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
76 which use a particular edge. The CASE_LABEL_EXPRs are chained together
77 via their CASE_CHAIN field, which we clear after we're done with the
78 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
79
80 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
81 update the case vector in response to edge redirections.
82
83 Right now this table is set up and torn down at key points in the
84 compilation process. It would be nice if we could make the table
85 more persistent. The key is getting notification of changes to
86 the CFG (particularly edge removal, creation and redirection). */
87
88 static hash_map<edge, tree> *edge_to_cases;
89
90 /* If we record edge_to_cases, this bitmap will hold indexes
91 of basic blocks that end in a GIMPLE_SWITCH which we touched
92 due to edge manipulations. */
93
94 static bitmap touched_switch_bbs;
95
96 /* CFG statistics. */
97 struct cfg_stats_d
98 {
99 long num_merged_labels;
100 };
101
102 static struct cfg_stats_d cfg_stats;
103
104 /* Data to pass to replace_block_vars_by_duplicates_1. */
105 struct replace_decls_d
106 {
107 hash_map<tree, tree> *vars_map;
108 tree to_context;
109 };
110
111 /* Hash table to store last discriminator assigned for each locus. */
112 struct locus_discrim_map
113 {
114 int location_line;
115 int discriminator;
116 };
117
118 /* Hashtable helpers. */
119
120 struct locus_discrim_hasher : free_ptr_hash <locus_discrim_map>
121 {
122 static inline hashval_t hash (const locus_discrim_map *);
123 static inline bool equal (const locus_discrim_map *,
124 const locus_discrim_map *);
125 };
126
127 /* Trivial hash function for a location_t. ITEM is a pointer to
128 a hash table entry that maps a location_t to a discriminator. */
129
130 inline hashval_t
131 locus_discrim_hasher::hash (const locus_discrim_map *item)
132 {
133 return item->location_line;
134 }
135
136 /* Equality function for the locus-to-discriminator map. A and B
137 point to the two hash table entries to compare. */
138
139 inline bool
140 locus_discrim_hasher::equal (const locus_discrim_map *a,
141 const locus_discrim_map *b)
142 {
143 return a->location_line == b->location_line;
144 }
145
146 static hash_table<locus_discrim_hasher> *discriminator_per_locus;
147
148 /* Basic blocks and flowgraphs. */
149 static void make_blocks (gimple_seq);
150
151 /* Edges. */
152 static void make_edges (void);
153 static void assign_discriminators (void);
154 static void make_cond_expr_edges (basic_block);
155 static void make_gimple_switch_edges (gswitch *, basic_block);
156 static bool make_goto_expr_edges (basic_block);
157 static void make_gimple_asm_edges (basic_block);
158 static edge gimple_redirect_edge_and_branch (edge, basic_block);
159 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
160
161 /* Various helpers. */
162 static inline bool stmt_starts_bb_p (gimple *, gimple *);
163 static int gimple_verify_flow_info (void);
164 static void gimple_make_forwarder_block (edge);
165 static gimple *first_non_label_stmt (basic_block);
166 static bool verify_gimple_transaction (gtransaction *);
167 static bool call_can_make_abnormal_goto (gimple *);
168
169 /* Flowgraph optimization and cleanup. */
170 static void gimple_merge_blocks (basic_block, basic_block);
171 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
172 static void remove_bb (basic_block);
173 static edge find_taken_edge_computed_goto (basic_block, tree);
174 static edge find_taken_edge_cond_expr (const gcond *, tree);
175
176 void
177 init_empty_tree_cfg_for_function (struct function *fn)
178 {
179 /* Initialize the basic block array. */
180 init_flow (fn);
181 profile_status_for_fn (fn) = PROFILE_ABSENT;
182 n_basic_blocks_for_fn (fn) = NUM_FIXED_BLOCKS;
183 last_basic_block_for_fn (fn) = NUM_FIXED_BLOCKS;
184 vec_alloc (basic_block_info_for_fn (fn), initial_cfg_capacity);
185 vec_safe_grow_cleared (basic_block_info_for_fn (fn),
186 initial_cfg_capacity);
187
188 /* Build a mapping of labels to their associated blocks. */
189 vec_alloc (label_to_block_map_for_fn (fn), initial_cfg_capacity);
190 vec_safe_grow_cleared (label_to_block_map_for_fn (fn),
191 initial_cfg_capacity);
192
193 SET_BASIC_BLOCK_FOR_FN (fn, ENTRY_BLOCK, ENTRY_BLOCK_PTR_FOR_FN (fn));
194 SET_BASIC_BLOCK_FOR_FN (fn, EXIT_BLOCK, EXIT_BLOCK_PTR_FOR_FN (fn));
195
196 ENTRY_BLOCK_PTR_FOR_FN (fn)->next_bb
197 = EXIT_BLOCK_PTR_FOR_FN (fn);
198 EXIT_BLOCK_PTR_FOR_FN (fn)->prev_bb
199 = ENTRY_BLOCK_PTR_FOR_FN (fn);
200 }
201
202 void
203 init_empty_tree_cfg (void)
204 {
205 init_empty_tree_cfg_for_function (cfun);
206 }
207
208 /*---------------------------------------------------------------------------
209 Create basic blocks
210 ---------------------------------------------------------------------------*/
211
212 /* Entry point to the CFG builder for trees. SEQ is the sequence of
213 statements to be added to the flowgraph. */
214
215 static void
216 build_gimple_cfg (gimple_seq seq)
217 {
218 /* Register specific gimple functions. */
219 gimple_register_cfg_hooks ();
220
221 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
222
223 init_empty_tree_cfg ();
224
225 make_blocks (seq);
226
227 /* Make sure there is always at least one block, even if it's empty. */
228 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
229 create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
230
231 /* Adjust the size of the array. */
232 if (basic_block_info_for_fn (cfun)->length ()
233 < (size_t) n_basic_blocks_for_fn (cfun))
234 vec_safe_grow_cleared (basic_block_info_for_fn (cfun),
235 n_basic_blocks_for_fn (cfun));
236
237 /* To speed up statement iterator walks, we first purge dead labels. */
238 cleanup_dead_labels ();
239
240 /* Group case nodes to reduce the number of edges.
241 We do this after cleaning up dead labels because otherwise we miss
242 a lot of obvious case merging opportunities. */
243 group_case_labels ();
244
245 /* Create the edges of the flowgraph. */
246 discriminator_per_locus = new hash_table<locus_discrim_hasher> (13);
247 make_edges ();
248 assign_discriminators ();
249 cleanup_dead_labels ();
250 delete discriminator_per_locus;
251 discriminator_per_locus = NULL;
252 }
253
254 /* Look for ANNOTATE calls with loop annotation kind in BB; if found, remove
255 them and propagate the information to LOOP. We assume that the annotations
256 come immediately before the condition in BB, if any. */
257
258 static void
259 replace_loop_annotate_in_block (basic_block bb, struct loop *loop)
260 {
261 gimple_stmt_iterator gsi = gsi_last_bb (bb);
262 gimple *stmt = gsi_stmt (gsi);
263
264 if (!(stmt && gimple_code (stmt) == GIMPLE_COND))
265 return;
266
267 for (gsi_prev_nondebug (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
268 {
269 stmt = gsi_stmt (gsi);
270 if (gimple_code (stmt) != GIMPLE_CALL)
271 break;
272 if (!gimple_call_internal_p (stmt)
273 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
274 break;
275
276 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
277 {
278 case annot_expr_ivdep_kind:
279 loop->safelen = INT_MAX;
280 break;
281 case annot_expr_unroll_kind:
282 loop->unroll
283 = (unsigned short) tree_to_shwi (gimple_call_arg (stmt, 2));
284 cfun->has_unroll = true;
285 break;
286 case annot_expr_no_vector_kind:
287 loop->dont_vectorize = true;
288 break;
289 case annot_expr_vector_kind:
290 loop->force_vectorize = true;
291 cfun->has_force_vectorize_loops = true;
292 break;
293 case annot_expr_parallel_kind:
294 loop->can_be_parallel = true;
295 loop->safelen = INT_MAX;
296 break;
297 default:
298 gcc_unreachable ();
299 }
300
301 stmt = gimple_build_assign (gimple_call_lhs (stmt),
302 gimple_call_arg (stmt, 0));
303 gsi_replace (&gsi, stmt, true);
304 }
305 }
306
307 /* Look for ANNOTATE calls with loop annotation kind; if found, remove
308 them and propagate the information to the loop. We assume that the
309 annotations come immediately before the condition of the loop. */
310
311 static void
312 replace_loop_annotate (void)
313 {
314 struct loop *loop;
315 basic_block bb;
316 gimple_stmt_iterator gsi;
317 gimple *stmt;
318
319 FOR_EACH_LOOP (loop, 0)
320 {
321 /* First look into the header. */
322 replace_loop_annotate_in_block (loop->header, loop);
323
324 /* Then look into the latch, if any. */
325 if (loop->latch)
326 replace_loop_annotate_in_block (loop->latch, loop);
327 }
328
329 /* Remove IFN_ANNOTATE. Safeguard for the case loop->latch == NULL. */
330 FOR_EACH_BB_FN (bb, cfun)
331 {
332 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
333 {
334 stmt = gsi_stmt (gsi);
335 if (gimple_code (stmt) != GIMPLE_CALL)
336 continue;
337 if (!gimple_call_internal_p (stmt)
338 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
339 continue;
340
341 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
342 {
343 case annot_expr_ivdep_kind:
344 case annot_expr_unroll_kind:
345 case annot_expr_no_vector_kind:
346 case annot_expr_vector_kind:
347 case annot_expr_parallel_kind:
348 break;
349 default:
350 gcc_unreachable ();
351 }
352
353 warning_at (gimple_location (stmt), 0, "ignoring loop annotation");
354 stmt = gimple_build_assign (gimple_call_lhs (stmt),
355 gimple_call_arg (stmt, 0));
356 gsi_replace (&gsi, stmt, true);
357 }
358 }
359 }
360
361 static unsigned int
362 execute_build_cfg (void)
363 {
364 gimple_seq body = gimple_body (current_function_decl);
365
366 build_gimple_cfg (body);
367 gimple_set_body (current_function_decl, NULL);
368 if (dump_file && (dump_flags & TDF_DETAILS))
369 {
370 fprintf (dump_file, "Scope blocks:\n");
371 dump_scope_blocks (dump_file, dump_flags);
372 }
373 cleanup_tree_cfg ();
374 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
375 replace_loop_annotate ();
376 return 0;
377 }
378
379 namespace {
380
381 const pass_data pass_data_build_cfg =
382 {
383 GIMPLE_PASS, /* type */
384 "cfg", /* name */
385 OPTGROUP_NONE, /* optinfo_flags */
386 TV_TREE_CFG, /* tv_id */
387 PROP_gimple_leh, /* properties_required */
388 ( PROP_cfg | PROP_loops ), /* properties_provided */
389 0, /* properties_destroyed */
390 0, /* todo_flags_start */
391 0, /* todo_flags_finish */
392 };
393
394 class pass_build_cfg : public gimple_opt_pass
395 {
396 public:
397 pass_build_cfg (gcc::context *ctxt)
398 : gimple_opt_pass (pass_data_build_cfg, ctxt)
399 {}
400
401 /* opt_pass methods: */
402 virtual unsigned int execute (function *) { return execute_build_cfg (); }
403
404 }; // class pass_build_cfg
405
406 } // anon namespace
407
408 gimple_opt_pass *
409 make_pass_build_cfg (gcc::context *ctxt)
410 {
411 return new pass_build_cfg (ctxt);
412 }
413
414
415 /* Return true if T is a computed goto. */
416
417 bool
418 computed_goto_p (gimple *t)
419 {
420 return (gimple_code (t) == GIMPLE_GOTO
421 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
422 }
423
424 /* Returns true if the sequence of statements STMTS only contains
425 a call to __builtin_unreachable (). */
426
427 bool
428 gimple_seq_unreachable_p (gimple_seq stmts)
429 {
430 if (stmts == NULL
431 /* Return false if -fsanitize=unreachable, we don't want to
432 optimize away those calls, but rather turn them into
433 __ubsan_handle_builtin_unreachable () or __builtin_trap ()
434 later. */
435 || sanitize_flags_p (SANITIZE_UNREACHABLE))
436 return false;
437
438 gimple_stmt_iterator gsi = gsi_last (stmts);
439
440 if (!gimple_call_builtin_p (gsi_stmt (gsi), BUILT_IN_UNREACHABLE))
441 return false;
442
443 for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
444 {
445 gimple *stmt = gsi_stmt (gsi);
446 if (gimple_code (stmt) != GIMPLE_LABEL
447 && !is_gimple_debug (stmt)
448 && !gimple_clobber_p (stmt))
449 return false;
450 }
451 return true;
452 }
453
454 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
455 the other edge points to a bb with just __builtin_unreachable ().
456 I.e. return true for C->M edge in:
457 <bb C>:
458 ...
459 if (something)
460 goto <bb N>;
461 else
462 goto <bb M>;
463 <bb N>:
464 __builtin_unreachable ();
465 <bb M>: */
466
467 bool
468 assert_unreachable_fallthru_edge_p (edge e)
469 {
470 basic_block pred_bb = e->src;
471 gimple *last = last_stmt (pred_bb);
472 if (last && gimple_code (last) == GIMPLE_COND)
473 {
474 basic_block other_bb = EDGE_SUCC (pred_bb, 0)->dest;
475 if (other_bb == e->dest)
476 other_bb = EDGE_SUCC (pred_bb, 1)->dest;
477 if (EDGE_COUNT (other_bb->succs) == 0)
478 return gimple_seq_unreachable_p (bb_seq (other_bb));
479 }
480 return false;
481 }
482
483
484 /* Initialize GF_CALL_CTRL_ALTERING flag, which indicates the call
485 could alter control flow except via eh. We initialize the flag at
486 CFG build time and only ever clear it later. */
487
488 static void
489 gimple_call_initialize_ctrl_altering (gimple *stmt)
490 {
491 int flags = gimple_call_flags (stmt);
492
493 /* A call alters control flow if it can make an abnormal goto. */
494 if (call_can_make_abnormal_goto (stmt)
495 /* A call also alters control flow if it does not return. */
496 || flags & ECF_NORETURN
497 /* TM ending statements have backedges out of the transaction.
498 Return true so we split the basic block containing them.
499 Note that the TM_BUILTIN test is merely an optimization. */
500 || ((flags & ECF_TM_BUILTIN)
501 && is_tm_ending_fndecl (gimple_call_fndecl (stmt)))
502 /* BUILT_IN_RETURN call is same as return statement. */
503 || gimple_call_builtin_p (stmt, BUILT_IN_RETURN)
504 /* IFN_UNIQUE should be the last insn, to make checking for it
505 as cheap as possible. */
506 || (gimple_call_internal_p (stmt)
507 && gimple_call_internal_unique_p (stmt)))
508 gimple_call_set_ctrl_altering (stmt, true);
509 else
510 gimple_call_set_ctrl_altering (stmt, false);
511 }
512
513
514 /* Insert SEQ after BB and build a flowgraph. */
515
516 static basic_block
517 make_blocks_1 (gimple_seq seq, basic_block bb)
518 {
519 gimple_stmt_iterator i = gsi_start (seq);
520 gimple *stmt = NULL;
521 gimple *prev_stmt = NULL;
522 bool start_new_block = true;
523 bool first_stmt_of_seq = true;
524
525 while (!gsi_end_p (i))
526 {
527 /* PREV_STMT should only be set to a debug stmt if the debug
528 stmt is before nondebug stmts. Once stmt reaches a nondebug
529 nonlabel, prev_stmt will be set to it, so that
530 stmt_starts_bb_p will know to start a new block if a label is
531 found. However, if stmt was a label after debug stmts only,
532 keep the label in prev_stmt even if we find further debug
533 stmts, for there may be other labels after them, and they
534 should land in the same block. */
535 if (!prev_stmt || !stmt || !is_gimple_debug (stmt))
536 prev_stmt = stmt;
537 stmt = gsi_stmt (i);
538
539 if (stmt && is_gimple_call (stmt))
540 gimple_call_initialize_ctrl_altering (stmt);
541
542 /* If the statement starts a new basic block or if we have determined
543 in a previous pass that we need to create a new block for STMT, do
544 so now. */
545 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
546 {
547 if (!first_stmt_of_seq)
548 gsi_split_seq_before (&i, &seq);
549 bb = create_basic_block (seq, bb);
550 start_new_block = false;
551 prev_stmt = NULL;
552 }
553
554 /* Now add STMT to BB and create the subgraphs for special statement
555 codes. */
556 gimple_set_bb (stmt, bb);
557
558 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
559 next iteration. */
560 if (stmt_ends_bb_p (stmt))
561 {
562 /* If the stmt can make abnormal goto use a new temporary
563 for the assignment to the LHS. This makes sure the old value
564 of the LHS is available on the abnormal edge. Otherwise
565 we will end up with overlapping life-ranges for abnormal
566 SSA names. */
567 if (gimple_has_lhs (stmt)
568 && stmt_can_make_abnormal_goto (stmt)
569 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
570 {
571 tree lhs = gimple_get_lhs (stmt);
572 tree tmp = create_tmp_var (TREE_TYPE (lhs));
573 gimple *s = gimple_build_assign (lhs, tmp);
574 gimple_set_location (s, gimple_location (stmt));
575 gimple_set_block (s, gimple_block (stmt));
576 gimple_set_lhs (stmt, tmp);
577 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
578 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
579 DECL_GIMPLE_REG_P (tmp) = 1;
580 gsi_insert_after (&i, s, GSI_SAME_STMT);
581 }
582 start_new_block = true;
583 }
584
585 gsi_next (&i);
586 first_stmt_of_seq = false;
587 }
588 return bb;
589 }
590
591 /* Build a flowgraph for the sequence of stmts SEQ. */
592
593 static void
594 make_blocks (gimple_seq seq)
595 {
596 /* Look for debug markers right before labels, and move the debug
597 stmts after the labels. Accepting labels among debug markers
598 adds no value, just complexity; if we wanted to annotate labels
599 with view numbers (so sequencing among markers would matter) or
600 somesuch, we're probably better off still moving the labels, but
601 adding other debug annotations in their original positions or
602 emitting nonbind or bind markers associated with the labels in
603 the original position of the labels.
604
605 Moving labels would probably be simpler, but we can't do that:
606 moving labels assigns label ids to them, and doing so because of
607 debug markers makes for -fcompare-debug and possibly even codegen
608 differences. So, we have to move the debug stmts instead. To
609 that end, we scan SEQ backwards, marking the position of the
610 latest (earliest we find) label, and moving debug stmts that are
611 not separated from it by nondebug nonlabel stmts after the
612 label. */
613 if (MAY_HAVE_DEBUG_MARKER_STMTS)
614 {
615 gimple_stmt_iterator label = gsi_none ();
616
617 for (gimple_stmt_iterator i = gsi_last (seq); !gsi_end_p (i); gsi_prev (&i))
618 {
619 gimple *stmt = gsi_stmt (i);
620
621 /* If this is the first label we encounter (latest in SEQ)
622 before nondebug stmts, record its position. */
623 if (is_a <glabel *> (stmt))
624 {
625 if (gsi_end_p (label))
626 label = i;
627 continue;
628 }
629
630 /* Without a recorded label position to move debug stmts to,
631 there's nothing to do. */
632 if (gsi_end_p (label))
633 continue;
634
635 /* Move the debug stmt at I after LABEL. */
636 if (is_gimple_debug (stmt))
637 {
638 gcc_assert (gimple_debug_nonbind_marker_p (stmt));
639 /* As STMT is removed, I advances to the stmt after
640 STMT, so the gsi_prev in the for "increment"
641 expression gets us to the stmt we're to visit after
642 STMT. LABEL, however, would advance to the moved
643 stmt if we passed it to gsi_move_after, so pass it a
644 copy instead, so as to keep LABEL pointing to the
645 LABEL. */
646 gimple_stmt_iterator copy = label;
647 gsi_move_after (&i, &copy);
648 continue;
649 }
650
651 /* There aren't any (more?) debug stmts before label, so
652 there isn't anything else to move after it. */
653 label = gsi_none ();
654 }
655 }
656
657 make_blocks_1 (seq, ENTRY_BLOCK_PTR_FOR_FN (cfun));
658 }
659
660 /* Create and return a new empty basic block after bb AFTER. */
661
662 static basic_block
663 create_bb (void *h, void *e, basic_block after)
664 {
665 basic_block bb;
666
667 gcc_assert (!e);
668
669 /* Create and initialize a new basic block. Since alloc_block uses
670 GC allocation that clears memory to allocate a basic block, we do
671 not have to clear the newly allocated basic block here. */
672 bb = alloc_block ();
673
674 bb->index = last_basic_block_for_fn (cfun);
675 bb->flags = BB_NEW;
676 set_bb_seq (bb, h ? (gimple_seq) h : NULL);
677
678 /* Add the new block to the linked list of blocks. */
679 link_block (bb, after);
680
681 /* Grow the basic block array if needed. */
682 if ((size_t) last_basic_block_for_fn (cfun)
683 == basic_block_info_for_fn (cfun)->length ())
684 {
685 size_t new_size =
686 (last_basic_block_for_fn (cfun)
687 + (last_basic_block_for_fn (cfun) + 3) / 4);
688 vec_safe_grow_cleared (basic_block_info_for_fn (cfun), new_size);
689 }
690
691 /* Add the newly created block to the array. */
692 SET_BASIC_BLOCK_FOR_FN (cfun, last_basic_block_for_fn (cfun), bb);
693
694 n_basic_blocks_for_fn (cfun)++;
695 last_basic_block_for_fn (cfun)++;
696
697 return bb;
698 }
699
700
701 /*---------------------------------------------------------------------------
702 Edge creation
703 ---------------------------------------------------------------------------*/
704
705 /* If basic block BB has an abnormal edge to a basic block
706 containing IFN_ABNORMAL_DISPATCHER internal call, return
707 that the dispatcher's basic block, otherwise return NULL. */
708
709 basic_block
710 get_abnormal_succ_dispatcher (basic_block bb)
711 {
712 edge e;
713 edge_iterator ei;
714
715 FOR_EACH_EDGE (e, ei, bb->succs)
716 if ((e->flags & (EDGE_ABNORMAL | EDGE_EH)) == EDGE_ABNORMAL)
717 {
718 gimple_stmt_iterator gsi
719 = gsi_start_nondebug_after_labels_bb (e->dest);
720 gimple *g = gsi_stmt (gsi);
721 if (g && gimple_call_internal_p (g, IFN_ABNORMAL_DISPATCHER))
722 return e->dest;
723 }
724 return NULL;
725 }
726
727 /* Helper function for make_edges. Create a basic block with
728 with ABNORMAL_DISPATCHER internal call in it if needed, and
729 create abnormal edges from BBS to it and from it to FOR_BB
730 if COMPUTED_GOTO is false, otherwise factor the computed gotos. */
731
732 static void
733 handle_abnormal_edges (basic_block *dispatcher_bbs,
734 basic_block for_bb, int *bb_to_omp_idx,
735 auto_vec<basic_block> *bbs, bool computed_goto)
736 {
737 basic_block *dispatcher = dispatcher_bbs + (computed_goto ? 1 : 0);
738 unsigned int idx = 0;
739 basic_block bb;
740 bool inner = false;
741
742 if (bb_to_omp_idx)
743 {
744 dispatcher = dispatcher_bbs + 2 * bb_to_omp_idx[for_bb->index];
745 if (bb_to_omp_idx[for_bb->index] != 0)
746 inner = true;
747 }
748
749 /* If the dispatcher has been created already, then there are basic
750 blocks with abnormal edges to it, so just make a new edge to
751 for_bb. */
752 if (*dispatcher == NULL)
753 {
754 /* Check if there are any basic blocks that need to have
755 abnormal edges to this dispatcher. If there are none, return
756 early. */
757 if (bb_to_omp_idx == NULL)
758 {
759 if (bbs->is_empty ())
760 return;
761 }
762 else
763 {
764 FOR_EACH_VEC_ELT (*bbs, idx, bb)
765 if (bb_to_omp_idx[bb->index] == bb_to_omp_idx[for_bb->index])
766 break;
767 if (bb == NULL)
768 return;
769 }
770
771 /* Create the dispatcher bb. */
772 *dispatcher = create_basic_block (NULL, for_bb);
773 if (computed_goto)
774 {
775 /* Factor computed gotos into a common computed goto site. Also
776 record the location of that site so that we can un-factor the
777 gotos after we have converted back to normal form. */
778 gimple_stmt_iterator gsi = gsi_start_bb (*dispatcher);
779
780 /* Create the destination of the factored goto. Each original
781 computed goto will put its desired destination into this
782 variable and jump to the label we create immediately below. */
783 tree var = create_tmp_var (ptr_type_node, "gotovar");
784
785 /* Build a label for the new block which will contain the
786 factored computed goto. */
787 tree factored_label_decl
788 = create_artificial_label (UNKNOWN_LOCATION);
789 gimple *factored_computed_goto_label
790 = gimple_build_label (factored_label_decl);
791 gsi_insert_after (&gsi, factored_computed_goto_label, GSI_NEW_STMT);
792
793 /* Build our new computed goto. */
794 gimple *factored_computed_goto = gimple_build_goto (var);
795 gsi_insert_after (&gsi, factored_computed_goto, GSI_NEW_STMT);
796
797 FOR_EACH_VEC_ELT (*bbs, idx, bb)
798 {
799 if (bb_to_omp_idx
800 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
801 continue;
802
803 gsi = gsi_last_bb (bb);
804 gimple *last = gsi_stmt (gsi);
805
806 gcc_assert (computed_goto_p (last));
807
808 /* Copy the original computed goto's destination into VAR. */
809 gimple *assignment
810 = gimple_build_assign (var, gimple_goto_dest (last));
811 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
812
813 edge e = make_edge (bb, *dispatcher, EDGE_FALLTHRU);
814 e->goto_locus = gimple_location (last);
815 gsi_remove (&gsi, true);
816 }
817 }
818 else
819 {
820 tree arg = inner ? boolean_true_node : boolean_false_node;
821 gimple *g = gimple_build_call_internal (IFN_ABNORMAL_DISPATCHER,
822 1, arg);
823 gimple_stmt_iterator gsi = gsi_after_labels (*dispatcher);
824 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
825
826 /* Create predecessor edges of the dispatcher. */
827 FOR_EACH_VEC_ELT (*bbs, idx, bb)
828 {
829 if (bb_to_omp_idx
830 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
831 continue;
832 make_edge (bb, *dispatcher, EDGE_ABNORMAL);
833 }
834 }
835 }
836
837 make_edge (*dispatcher, for_bb, EDGE_ABNORMAL);
838 }
839
840 /* Creates outgoing edges for BB. Returns 1 when it ends with an
841 computed goto, returns 2 when it ends with a statement that
842 might return to this function via an nonlocal goto, otherwise
843 return 0. Updates *PCUR_REGION with the OMP region this BB is in. */
844
845 static int
846 make_edges_bb (basic_block bb, struct omp_region **pcur_region, int *pomp_index)
847 {
848 gimple *last = last_stmt (bb);
849 bool fallthru = false;
850 int ret = 0;
851
852 if (!last)
853 return ret;
854
855 switch (gimple_code (last))
856 {
857 case GIMPLE_GOTO:
858 if (make_goto_expr_edges (bb))
859 ret = 1;
860 fallthru = false;
861 break;
862 case GIMPLE_RETURN:
863 {
864 edge e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
865 e->goto_locus = gimple_location (last);
866 fallthru = false;
867 }
868 break;
869 case GIMPLE_COND:
870 make_cond_expr_edges (bb);
871 fallthru = false;
872 break;
873 case GIMPLE_SWITCH:
874 make_gimple_switch_edges (as_a <gswitch *> (last), bb);
875 fallthru = false;
876 break;
877 case GIMPLE_RESX:
878 make_eh_edges (last);
879 fallthru = false;
880 break;
881 case GIMPLE_EH_DISPATCH:
882 fallthru = make_eh_dispatch_edges (as_a <geh_dispatch *> (last));
883 break;
884
885 case GIMPLE_CALL:
886 /* If this function receives a nonlocal goto, then we need to
887 make edges from this call site to all the nonlocal goto
888 handlers. */
889 if (stmt_can_make_abnormal_goto (last))
890 ret = 2;
891
892 /* If this statement has reachable exception handlers, then
893 create abnormal edges to them. */
894 make_eh_edges (last);
895
896 /* BUILTIN_RETURN is really a return statement. */
897 if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
898 {
899 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
900 fallthru = false;
901 }
902 /* Some calls are known not to return. */
903 else
904 fallthru = !gimple_call_noreturn_p (last);
905 break;
906
907 case GIMPLE_ASSIGN:
908 /* A GIMPLE_ASSIGN may throw internally and thus be considered
909 control-altering. */
910 if (is_ctrl_altering_stmt (last))
911 make_eh_edges (last);
912 fallthru = true;
913 break;
914
915 case GIMPLE_ASM:
916 make_gimple_asm_edges (bb);
917 fallthru = true;
918 break;
919
920 CASE_GIMPLE_OMP:
921 fallthru = omp_make_gimple_edges (bb, pcur_region, pomp_index);
922 break;
923
924 case GIMPLE_TRANSACTION:
925 {
926 gtransaction *txn = as_a <gtransaction *> (last);
927 tree label1 = gimple_transaction_label_norm (txn);
928 tree label2 = gimple_transaction_label_uninst (txn);
929
930 if (label1)
931 make_edge (bb, label_to_block (cfun, label1), EDGE_FALLTHRU);
932 if (label2)
933 make_edge (bb, label_to_block (cfun, label2),
934 EDGE_TM_UNINSTRUMENTED | (label1 ? 0 : EDGE_FALLTHRU));
935
936 tree label3 = gimple_transaction_label_over (txn);
937 if (gimple_transaction_subcode (txn)
938 & (GTMA_HAVE_ABORT | GTMA_IS_OUTER))
939 make_edge (bb, label_to_block (cfun, label3), EDGE_TM_ABORT);
940
941 fallthru = false;
942 }
943 break;
944
945 default:
946 gcc_assert (!stmt_ends_bb_p (last));
947 fallthru = true;
948 break;
949 }
950
951 if (fallthru)
952 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
953
954 return ret;
955 }
956
957 /* Join all the blocks in the flowgraph. */
958
959 static void
960 make_edges (void)
961 {
962 basic_block bb;
963 struct omp_region *cur_region = NULL;
964 auto_vec<basic_block> ab_edge_goto;
965 auto_vec<basic_block> ab_edge_call;
966 int *bb_to_omp_idx = NULL;
967 int cur_omp_region_idx = 0;
968
969 /* Create an edge from entry to the first block with executable
970 statements in it. */
971 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun),
972 BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS),
973 EDGE_FALLTHRU);
974
975 /* Traverse the basic block array placing edges. */
976 FOR_EACH_BB_FN (bb, cfun)
977 {
978 int mer;
979
980 if (bb_to_omp_idx)
981 bb_to_omp_idx[bb->index] = cur_omp_region_idx;
982
983 mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
984 if (mer == 1)
985 ab_edge_goto.safe_push (bb);
986 else if (mer == 2)
987 ab_edge_call.safe_push (bb);
988
989 if (cur_region && bb_to_omp_idx == NULL)
990 bb_to_omp_idx = XCNEWVEC (int, n_basic_blocks_for_fn (cfun));
991 }
992
993 /* Computed gotos are hell to deal with, especially if there are
994 lots of them with a large number of destinations. So we factor
995 them to a common computed goto location before we build the
996 edge list. After we convert back to normal form, we will un-factor
997 the computed gotos since factoring introduces an unwanted jump.
998 For non-local gotos and abnormal edges from calls to calls that return
999 twice or forced labels, factor the abnormal edges too, by having all
1000 abnormal edges from the calls go to a common artificial basic block
1001 with ABNORMAL_DISPATCHER internal call and abnormal edges from that
1002 basic block to all forced labels and calls returning twice.
1003 We do this per-OpenMP structured block, because those regions
1004 are guaranteed to be single entry single exit by the standard,
1005 so it is not allowed to enter or exit such regions abnormally this way,
1006 thus all computed gotos, non-local gotos and setjmp/longjmp calls
1007 must not transfer control across SESE region boundaries. */
1008 if (!ab_edge_goto.is_empty () || !ab_edge_call.is_empty ())
1009 {
1010 gimple_stmt_iterator gsi;
1011 basic_block dispatcher_bb_array[2] = { NULL, NULL };
1012 basic_block *dispatcher_bbs = dispatcher_bb_array;
1013 int count = n_basic_blocks_for_fn (cfun);
1014
1015 if (bb_to_omp_idx)
1016 dispatcher_bbs = XCNEWVEC (basic_block, 2 * count);
1017
1018 FOR_EACH_BB_FN (bb, cfun)
1019 {
1020 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1021 {
1022 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
1023 tree target;
1024
1025 if (!label_stmt)
1026 break;
1027
1028 target = gimple_label_label (label_stmt);
1029
1030 /* Make an edge to every label block that has been marked as a
1031 potential target for a computed goto or a non-local goto. */
1032 if (FORCED_LABEL (target))
1033 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
1034 &ab_edge_goto, true);
1035 if (DECL_NONLOCAL (target))
1036 {
1037 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
1038 &ab_edge_call, false);
1039 break;
1040 }
1041 }
1042
1043 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
1044 gsi_next_nondebug (&gsi);
1045 if (!gsi_end_p (gsi))
1046 {
1047 /* Make an edge to every setjmp-like call. */
1048 gimple *call_stmt = gsi_stmt (gsi);
1049 if (is_gimple_call (call_stmt)
1050 && ((gimple_call_flags (call_stmt) & ECF_RETURNS_TWICE)
1051 || gimple_call_builtin_p (call_stmt,
1052 BUILT_IN_SETJMP_RECEIVER)))
1053 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
1054 &ab_edge_call, false);
1055 }
1056 }
1057
1058 if (bb_to_omp_idx)
1059 XDELETE (dispatcher_bbs);
1060 }
1061
1062 XDELETE (bb_to_omp_idx);
1063
1064 omp_free_regions ();
1065 }
1066
1067 /* Add SEQ after GSI. Start new bb after GSI, and created further bbs as
1068 needed. Returns true if new bbs were created.
1069 Note: This is transitional code, and should not be used for new code. We
1070 should be able to get rid of this by rewriting all target va-arg
1071 gimplification hooks to use an interface gimple_build_cond_value as described
1072 in https://gcc.gnu.org/ml/gcc-patches/2015-02/msg01194.html. */
1073
1074 bool
1075 gimple_find_sub_bbs (gimple_seq seq, gimple_stmt_iterator *gsi)
1076 {
1077 gimple *stmt = gsi_stmt (*gsi);
1078 basic_block bb = gimple_bb (stmt);
1079 basic_block lastbb, afterbb;
1080 int old_num_bbs = n_basic_blocks_for_fn (cfun);
1081 edge e;
1082 lastbb = make_blocks_1 (seq, bb);
1083 if (old_num_bbs == n_basic_blocks_for_fn (cfun))
1084 return false;
1085 e = split_block (bb, stmt);
1086 /* Move e->dest to come after the new basic blocks. */
1087 afterbb = e->dest;
1088 unlink_block (afterbb);
1089 link_block (afterbb, lastbb);
1090 redirect_edge_succ (e, bb->next_bb);
1091 bb = bb->next_bb;
1092 while (bb != afterbb)
1093 {
1094 struct omp_region *cur_region = NULL;
1095 profile_count cnt = profile_count::zero ();
1096 bool all = true;
1097
1098 int cur_omp_region_idx = 0;
1099 int mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
1100 gcc_assert (!mer && !cur_region);
1101 add_bb_to_loop (bb, afterbb->loop_father);
1102
1103 edge e;
1104 edge_iterator ei;
1105 FOR_EACH_EDGE (e, ei, bb->preds)
1106 {
1107 if (e->count ().initialized_p ())
1108 cnt += e->count ();
1109 else
1110 all = false;
1111 }
1112 tree_guess_outgoing_edge_probabilities (bb);
1113 if (all || profile_status_for_fn (cfun) == PROFILE_READ)
1114 bb->count = cnt;
1115
1116 bb = bb->next_bb;
1117 }
1118 return true;
1119 }
1120
1121 /* Find the next available discriminator value for LOCUS. The
1122 discriminator distinguishes among several basic blocks that
1123 share a common locus, allowing for more accurate sample-based
1124 profiling. */
1125
1126 static int
1127 next_discriminator_for_locus (int line)
1128 {
1129 struct locus_discrim_map item;
1130 struct locus_discrim_map **slot;
1131
1132 item.location_line = line;
1133 item.discriminator = 0;
1134 slot = discriminator_per_locus->find_slot_with_hash (&item, line, INSERT);
1135 gcc_assert (slot);
1136 if (*slot == HTAB_EMPTY_ENTRY)
1137 {
1138 *slot = XNEW (struct locus_discrim_map);
1139 gcc_assert (*slot);
1140 (*slot)->location_line = line;
1141 (*slot)->discriminator = 0;
1142 }
1143 (*slot)->discriminator++;
1144 return (*slot)->discriminator;
1145 }
1146
1147 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
1148
1149 static bool
1150 same_line_p (location_t locus1, expanded_location *from, location_t locus2)
1151 {
1152 expanded_location to;
1153
1154 if (locus1 == locus2)
1155 return true;
1156
1157 to = expand_location (locus2);
1158
1159 if (from->line != to.line)
1160 return false;
1161 if (from->file == to.file)
1162 return true;
1163 return (from->file != NULL
1164 && to.file != NULL
1165 && filename_cmp (from->file, to.file) == 0);
1166 }
1167
1168 /* Assign discriminators to each basic block. */
1169
1170 static void
1171 assign_discriminators (void)
1172 {
1173 basic_block bb;
1174
1175 FOR_EACH_BB_FN (bb, cfun)
1176 {
1177 edge e;
1178 edge_iterator ei;
1179 gimple *last = last_stmt (bb);
1180 location_t locus = last ? gimple_location (last) : UNKNOWN_LOCATION;
1181
1182 if (locus == UNKNOWN_LOCATION)
1183 continue;
1184
1185 expanded_location locus_e = expand_location (locus);
1186
1187 FOR_EACH_EDGE (e, ei, bb->succs)
1188 {
1189 gimple *first = first_non_label_stmt (e->dest);
1190 gimple *last = last_stmt (e->dest);
1191 if ((first && same_line_p (locus, &locus_e,
1192 gimple_location (first)))
1193 || (last && same_line_p (locus, &locus_e,
1194 gimple_location (last))))
1195 {
1196 if (e->dest->discriminator != 0 && bb->discriminator == 0)
1197 bb->discriminator
1198 = next_discriminator_for_locus (locus_e.line);
1199 else
1200 e->dest->discriminator
1201 = next_discriminator_for_locus (locus_e.line);
1202 }
1203 }
1204 }
1205 }
1206
1207 /* Create the edges for a GIMPLE_COND starting at block BB. */
1208
1209 static void
1210 make_cond_expr_edges (basic_block bb)
1211 {
1212 gcond *entry = as_a <gcond *> (last_stmt (bb));
1213 gimple *then_stmt, *else_stmt;
1214 basic_block then_bb, else_bb;
1215 tree then_label, else_label;
1216 edge e;
1217
1218 gcc_assert (entry);
1219 gcc_assert (gimple_code (entry) == GIMPLE_COND);
1220
1221 /* Entry basic blocks for each component. */
1222 then_label = gimple_cond_true_label (entry);
1223 else_label = gimple_cond_false_label (entry);
1224 then_bb = label_to_block (cfun, then_label);
1225 else_bb = label_to_block (cfun, else_label);
1226 then_stmt = first_stmt (then_bb);
1227 else_stmt = first_stmt (else_bb);
1228
1229 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1230 e->goto_locus = gimple_location (then_stmt);
1231 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1232 if (e)
1233 e->goto_locus = gimple_location (else_stmt);
1234
1235 /* We do not need the labels anymore. */
1236 gimple_cond_set_true_label (entry, NULL_TREE);
1237 gimple_cond_set_false_label (entry, NULL_TREE);
1238 }
1239
1240
1241 /* Called for each element in the hash table (P) as we delete the
1242 edge to cases hash table.
1243
1244 Clear all the CASE_CHAINs to prevent problems with copying of
1245 SWITCH_EXPRs and structure sharing rules, then free the hash table
1246 element. */
1247
1248 bool
1249 edge_to_cases_cleanup (edge const &, tree const &value, void *)
1250 {
1251 tree t, next;
1252
1253 for (t = value; t; t = next)
1254 {
1255 next = CASE_CHAIN (t);
1256 CASE_CHAIN (t) = NULL;
1257 }
1258
1259 return true;
1260 }
1261
1262 /* Start recording information mapping edges to case labels. */
1263
1264 void
1265 start_recording_case_labels (void)
1266 {
1267 gcc_assert (edge_to_cases == NULL);
1268 edge_to_cases = new hash_map<edge, tree>;
1269 touched_switch_bbs = BITMAP_ALLOC (NULL);
1270 }
1271
1272 /* Return nonzero if we are recording information for case labels. */
1273
1274 static bool
1275 recording_case_labels_p (void)
1276 {
1277 return (edge_to_cases != NULL);
1278 }
1279
1280 /* Stop recording information mapping edges to case labels and
1281 remove any information we have recorded. */
1282 void
1283 end_recording_case_labels (void)
1284 {
1285 bitmap_iterator bi;
1286 unsigned i;
1287 edge_to_cases->traverse<void *, edge_to_cases_cleanup> (NULL);
1288 delete edge_to_cases;
1289 edge_to_cases = NULL;
1290 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
1291 {
1292 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
1293 if (bb)
1294 {
1295 gimple *stmt = last_stmt (bb);
1296 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1297 group_case_labels_stmt (as_a <gswitch *> (stmt));
1298 }
1299 }
1300 BITMAP_FREE (touched_switch_bbs);
1301 }
1302
1303 /* If we are inside a {start,end}_recording_cases block, then return
1304 a chain of CASE_LABEL_EXPRs from T which reference E.
1305
1306 Otherwise return NULL. */
1307
1308 static tree
1309 get_cases_for_edge (edge e, gswitch *t)
1310 {
1311 tree *slot;
1312 size_t i, n;
1313
1314 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
1315 chains available. Return NULL so the caller can detect this case. */
1316 if (!recording_case_labels_p ())
1317 return NULL;
1318
1319 slot = edge_to_cases->get (e);
1320 if (slot)
1321 return *slot;
1322
1323 /* If we did not find E in the hash table, then this must be the first
1324 time we have been queried for information about E & T. Add all the
1325 elements from T to the hash table then perform the query again. */
1326
1327 n = gimple_switch_num_labels (t);
1328 for (i = 0; i < n; i++)
1329 {
1330 tree elt = gimple_switch_label (t, i);
1331 tree lab = CASE_LABEL (elt);
1332 basic_block label_bb = label_to_block (cfun, lab);
1333 edge this_edge = find_edge (e->src, label_bb);
1334
1335 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
1336 a new chain. */
1337 tree &s = edge_to_cases->get_or_insert (this_edge);
1338 CASE_CHAIN (elt) = s;
1339 s = elt;
1340 }
1341
1342 return *edge_to_cases->get (e);
1343 }
1344
1345 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
1346
1347 static void
1348 make_gimple_switch_edges (gswitch *entry, basic_block bb)
1349 {
1350 size_t i, n;
1351
1352 n = gimple_switch_num_labels (entry);
1353
1354 for (i = 0; i < n; ++i)
1355 {
1356 basic_block label_bb = gimple_switch_label_bb (cfun, entry, i);
1357 make_edge (bb, label_bb, 0);
1358 }
1359 }
1360
1361
1362 /* Return the basic block holding label DEST. */
1363
1364 basic_block
1365 label_to_block (struct function *ifun, tree dest)
1366 {
1367 int uid = LABEL_DECL_UID (dest);
1368
1369 /* We would die hard when faced by an undefined label. Emit a label to
1370 the very first basic block. This will hopefully make even the dataflow
1371 and undefined variable warnings quite right. */
1372 if (seen_error () && uid < 0)
1373 {
1374 gimple_stmt_iterator gsi =
1375 gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS));
1376 gimple *stmt;
1377
1378 stmt = gimple_build_label (dest);
1379 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
1380 uid = LABEL_DECL_UID (dest);
1381 }
1382 if (vec_safe_length (ifun->cfg->x_label_to_block_map) <= (unsigned int) uid)
1383 return NULL;
1384 return (*ifun->cfg->x_label_to_block_map)[uid];
1385 }
1386
1387 /* Create edges for a goto statement at block BB. Returns true
1388 if abnormal edges should be created. */
1389
1390 static bool
1391 make_goto_expr_edges (basic_block bb)
1392 {
1393 gimple_stmt_iterator last = gsi_last_bb (bb);
1394 gimple *goto_t = gsi_stmt (last);
1395
1396 /* A simple GOTO creates normal edges. */
1397 if (simple_goto_p (goto_t))
1398 {
1399 tree dest = gimple_goto_dest (goto_t);
1400 basic_block label_bb = label_to_block (cfun, dest);
1401 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1402 e->goto_locus = gimple_location (goto_t);
1403 gsi_remove (&last, true);
1404 return false;
1405 }
1406
1407 /* A computed GOTO creates abnormal edges. */
1408 return true;
1409 }
1410
1411 /* Create edges for an asm statement with labels at block BB. */
1412
1413 static void
1414 make_gimple_asm_edges (basic_block bb)
1415 {
1416 gasm *stmt = as_a <gasm *> (last_stmt (bb));
1417 int i, n = gimple_asm_nlabels (stmt);
1418
1419 for (i = 0; i < n; ++i)
1420 {
1421 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1422 basic_block label_bb = label_to_block (cfun, label);
1423 make_edge (bb, label_bb, 0);
1424 }
1425 }
1426
1427 /*---------------------------------------------------------------------------
1428 Flowgraph analysis
1429 ---------------------------------------------------------------------------*/
1430
1431 /* Cleanup useless labels in basic blocks. This is something we wish
1432 to do early because it allows us to group case labels before creating
1433 the edges for the CFG, and it speeds up block statement iterators in
1434 all passes later on.
1435 We rerun this pass after CFG is created, to get rid of the labels that
1436 are no longer referenced. After then we do not run it any more, since
1437 (almost) no new labels should be created. */
1438
1439 /* A map from basic block index to the leading label of that block. */
1440 static struct label_record
1441 {
1442 /* The label. */
1443 tree label;
1444
1445 /* True if the label is referenced from somewhere. */
1446 bool used;
1447 } *label_for_bb;
1448
1449 /* Given LABEL return the first label in the same basic block. */
1450
1451 static tree
1452 main_block_label (tree label)
1453 {
1454 basic_block bb = label_to_block (cfun, label);
1455 tree main_label = label_for_bb[bb->index].label;
1456
1457 /* label_to_block possibly inserted undefined label into the chain. */
1458 if (!main_label)
1459 {
1460 label_for_bb[bb->index].label = label;
1461 main_label = label;
1462 }
1463
1464 label_for_bb[bb->index].used = true;
1465 return main_label;
1466 }
1467
1468 /* Clean up redundant labels within the exception tree. */
1469
1470 static void
1471 cleanup_dead_labels_eh (void)
1472 {
1473 eh_landing_pad lp;
1474 eh_region r;
1475 tree lab;
1476 int i;
1477
1478 if (cfun->eh == NULL)
1479 return;
1480
1481 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1482 if (lp && lp->post_landing_pad)
1483 {
1484 lab = main_block_label (lp->post_landing_pad);
1485 if (lab != lp->post_landing_pad)
1486 {
1487 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1488 EH_LANDING_PAD_NR (lab) = lp->index;
1489 }
1490 }
1491
1492 FOR_ALL_EH_REGION (r)
1493 switch (r->type)
1494 {
1495 case ERT_CLEANUP:
1496 case ERT_MUST_NOT_THROW:
1497 break;
1498
1499 case ERT_TRY:
1500 {
1501 eh_catch c;
1502 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1503 {
1504 lab = c->label;
1505 if (lab)
1506 c->label = main_block_label (lab);
1507 }
1508 }
1509 break;
1510
1511 case ERT_ALLOWED_EXCEPTIONS:
1512 lab = r->u.allowed.label;
1513 if (lab)
1514 r->u.allowed.label = main_block_label (lab);
1515 break;
1516 }
1517 }
1518
1519
1520 /* Cleanup redundant labels. This is a three-step process:
1521 1) Find the leading label for each block.
1522 2) Redirect all references to labels to the leading labels.
1523 3) Cleanup all useless labels. */
1524
1525 void
1526 cleanup_dead_labels (void)
1527 {
1528 basic_block bb;
1529 label_for_bb = XCNEWVEC (struct label_record, last_basic_block_for_fn (cfun));
1530
1531 /* Find a suitable label for each block. We use the first user-defined
1532 label if there is one, or otherwise just the first label we see. */
1533 FOR_EACH_BB_FN (bb, cfun)
1534 {
1535 gimple_stmt_iterator i;
1536
1537 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1538 {
1539 tree label;
1540 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1541
1542 if (!label_stmt)
1543 break;
1544
1545 label = gimple_label_label (label_stmt);
1546
1547 /* If we have not yet seen a label for the current block,
1548 remember this one and see if there are more labels. */
1549 if (!label_for_bb[bb->index].label)
1550 {
1551 label_for_bb[bb->index].label = label;
1552 continue;
1553 }
1554
1555 /* If we did see a label for the current block already, but it
1556 is an artificially created label, replace it if the current
1557 label is a user defined label. */
1558 if (!DECL_ARTIFICIAL (label)
1559 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1560 {
1561 label_for_bb[bb->index].label = label;
1562 break;
1563 }
1564 }
1565 }
1566
1567 /* Now redirect all jumps/branches to the selected label.
1568 First do so for each block ending in a control statement. */
1569 FOR_EACH_BB_FN (bb, cfun)
1570 {
1571 gimple *stmt = last_stmt (bb);
1572 tree label, new_label;
1573
1574 if (!stmt)
1575 continue;
1576
1577 switch (gimple_code (stmt))
1578 {
1579 case GIMPLE_COND:
1580 {
1581 gcond *cond_stmt = as_a <gcond *> (stmt);
1582 label = gimple_cond_true_label (cond_stmt);
1583 if (label)
1584 {
1585 new_label = main_block_label (label);
1586 if (new_label != label)
1587 gimple_cond_set_true_label (cond_stmt, new_label);
1588 }
1589
1590 label = gimple_cond_false_label (cond_stmt);
1591 if (label)
1592 {
1593 new_label = main_block_label (label);
1594 if (new_label != label)
1595 gimple_cond_set_false_label (cond_stmt, new_label);
1596 }
1597 }
1598 break;
1599
1600 case GIMPLE_SWITCH:
1601 {
1602 gswitch *switch_stmt = as_a <gswitch *> (stmt);
1603 size_t i, n = gimple_switch_num_labels (switch_stmt);
1604
1605 /* Replace all destination labels. */
1606 for (i = 0; i < n; ++i)
1607 {
1608 tree case_label = gimple_switch_label (switch_stmt, i);
1609 label = CASE_LABEL (case_label);
1610 new_label = main_block_label (label);
1611 if (new_label != label)
1612 CASE_LABEL (case_label) = new_label;
1613 }
1614 break;
1615 }
1616
1617 case GIMPLE_ASM:
1618 {
1619 gasm *asm_stmt = as_a <gasm *> (stmt);
1620 int i, n = gimple_asm_nlabels (asm_stmt);
1621
1622 for (i = 0; i < n; ++i)
1623 {
1624 tree cons = gimple_asm_label_op (asm_stmt, i);
1625 tree label = main_block_label (TREE_VALUE (cons));
1626 TREE_VALUE (cons) = label;
1627 }
1628 break;
1629 }
1630
1631 /* We have to handle gotos until they're removed, and we don't
1632 remove them until after we've created the CFG edges. */
1633 case GIMPLE_GOTO:
1634 if (!computed_goto_p (stmt))
1635 {
1636 ggoto *goto_stmt = as_a <ggoto *> (stmt);
1637 label = gimple_goto_dest (goto_stmt);
1638 new_label = main_block_label (label);
1639 if (new_label != label)
1640 gimple_goto_set_dest (goto_stmt, new_label);
1641 }
1642 break;
1643
1644 case GIMPLE_TRANSACTION:
1645 {
1646 gtransaction *txn = as_a <gtransaction *> (stmt);
1647
1648 label = gimple_transaction_label_norm (txn);
1649 if (label)
1650 {
1651 new_label = main_block_label (label);
1652 if (new_label != label)
1653 gimple_transaction_set_label_norm (txn, new_label);
1654 }
1655
1656 label = gimple_transaction_label_uninst (txn);
1657 if (label)
1658 {
1659 new_label = main_block_label (label);
1660 if (new_label != label)
1661 gimple_transaction_set_label_uninst (txn, new_label);
1662 }
1663
1664 label = gimple_transaction_label_over (txn);
1665 if (label)
1666 {
1667 new_label = main_block_label (label);
1668 if (new_label != label)
1669 gimple_transaction_set_label_over (txn, new_label);
1670 }
1671 }
1672 break;
1673
1674 default:
1675 break;
1676 }
1677 }
1678
1679 /* Do the same for the exception region tree labels. */
1680 cleanup_dead_labels_eh ();
1681
1682 /* Finally, purge dead labels. All user-defined labels and labels that
1683 can be the target of non-local gotos and labels which have their
1684 address taken are preserved. */
1685 FOR_EACH_BB_FN (bb, cfun)
1686 {
1687 gimple_stmt_iterator i;
1688 tree label_for_this_bb = label_for_bb[bb->index].label;
1689
1690 if (!label_for_this_bb)
1691 continue;
1692
1693 /* If the main label of the block is unused, we may still remove it. */
1694 if (!label_for_bb[bb->index].used)
1695 label_for_this_bb = NULL;
1696
1697 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1698 {
1699 tree label;
1700 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1701
1702 if (!label_stmt)
1703 break;
1704
1705 label = gimple_label_label (label_stmt);
1706
1707 if (label == label_for_this_bb
1708 || !DECL_ARTIFICIAL (label)
1709 || DECL_NONLOCAL (label)
1710 || FORCED_LABEL (label))
1711 gsi_next (&i);
1712 else
1713 gsi_remove (&i, true);
1714 }
1715 }
1716
1717 free (label_for_bb);
1718 }
1719
1720 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1721 the ones jumping to the same label.
1722 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1723
1724 bool
1725 group_case_labels_stmt (gswitch *stmt)
1726 {
1727 int old_size = gimple_switch_num_labels (stmt);
1728 int i, next_index, new_size;
1729 basic_block default_bb = NULL;
1730
1731 default_bb = gimple_switch_default_bb (cfun, stmt);
1732
1733 /* Look for possible opportunities to merge cases. */
1734 new_size = i = 1;
1735 while (i < old_size)
1736 {
1737 tree base_case, base_high;
1738 basic_block base_bb;
1739
1740 base_case = gimple_switch_label (stmt, i);
1741
1742 gcc_assert (base_case);
1743 base_bb = label_to_block (cfun, CASE_LABEL (base_case));
1744
1745 /* Discard cases that have the same destination as the default case or
1746 whose destiniation blocks have already been removed as unreachable. */
1747 if (base_bb == NULL || base_bb == default_bb)
1748 {
1749 i++;
1750 continue;
1751 }
1752
1753 base_high = CASE_HIGH (base_case)
1754 ? CASE_HIGH (base_case)
1755 : CASE_LOW (base_case);
1756 next_index = i + 1;
1757
1758 /* Try to merge case labels. Break out when we reach the end
1759 of the label vector or when we cannot merge the next case
1760 label with the current one. */
1761 while (next_index < old_size)
1762 {
1763 tree merge_case = gimple_switch_label (stmt, next_index);
1764 basic_block merge_bb = label_to_block (cfun, CASE_LABEL (merge_case));
1765 wide_int bhp1 = wi::to_wide (base_high) + 1;
1766
1767 /* Merge the cases if they jump to the same place,
1768 and their ranges are consecutive. */
1769 if (merge_bb == base_bb
1770 && wi::to_wide (CASE_LOW (merge_case)) == bhp1)
1771 {
1772 base_high = CASE_HIGH (merge_case) ?
1773 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1774 CASE_HIGH (base_case) = base_high;
1775 next_index++;
1776 }
1777 else
1778 break;
1779 }
1780
1781 /* Discard cases that have an unreachable destination block. */
1782 if (EDGE_COUNT (base_bb->succs) == 0
1783 && gimple_seq_unreachable_p (bb_seq (base_bb))
1784 /* Don't optimize this if __builtin_unreachable () is the
1785 implicitly added one by the C++ FE too early, before
1786 -Wreturn-type can be diagnosed. We'll optimize it later
1787 during switchconv pass or any other cfg cleanup. */
1788 && (gimple_in_ssa_p (cfun)
1789 || (LOCATION_LOCUS (gimple_location (last_stmt (base_bb)))
1790 != BUILTINS_LOCATION)))
1791 {
1792 edge base_edge = find_edge (gimple_bb (stmt), base_bb);
1793 if (base_edge != NULL)
1794 remove_edge_and_dominated_blocks (base_edge);
1795 i = next_index;
1796 continue;
1797 }
1798
1799 if (new_size < i)
1800 gimple_switch_set_label (stmt, new_size,
1801 gimple_switch_label (stmt, i));
1802 i = next_index;
1803 new_size++;
1804 }
1805
1806 gcc_assert (new_size <= old_size);
1807
1808 if (new_size < old_size)
1809 gimple_switch_set_num_labels (stmt, new_size);
1810
1811 return new_size < old_size;
1812 }
1813
1814 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1815 and scan the sorted vector of cases. Combine the ones jumping to the
1816 same label. */
1817
1818 bool
1819 group_case_labels (void)
1820 {
1821 basic_block bb;
1822 bool changed = false;
1823
1824 FOR_EACH_BB_FN (bb, cfun)
1825 {
1826 gimple *stmt = last_stmt (bb);
1827 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1828 changed |= group_case_labels_stmt (as_a <gswitch *> (stmt));
1829 }
1830
1831 return changed;
1832 }
1833
1834 /* Checks whether we can merge block B into block A. */
1835
1836 static bool
1837 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1838 {
1839 gimple *stmt;
1840
1841 if (!single_succ_p (a))
1842 return false;
1843
1844 if (single_succ_edge (a)->flags & EDGE_COMPLEX)
1845 return false;
1846
1847 if (single_succ (a) != b)
1848 return false;
1849
1850 if (!single_pred_p (b))
1851 return false;
1852
1853 if (a == ENTRY_BLOCK_PTR_FOR_FN (cfun)
1854 || b == EXIT_BLOCK_PTR_FOR_FN (cfun))
1855 return false;
1856
1857 /* If A ends by a statement causing exceptions or something similar, we
1858 cannot merge the blocks. */
1859 stmt = last_stmt (a);
1860 if (stmt && stmt_ends_bb_p (stmt))
1861 return false;
1862
1863 /* Do not allow a block with only a non-local label to be merged. */
1864 if (stmt)
1865 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
1866 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
1867 return false;
1868
1869 /* Examine the labels at the beginning of B. */
1870 for (gimple_stmt_iterator gsi = gsi_start_bb (b); !gsi_end_p (gsi);
1871 gsi_next (&gsi))
1872 {
1873 tree lab;
1874 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
1875 if (!label_stmt)
1876 break;
1877 lab = gimple_label_label (label_stmt);
1878
1879 /* Do not remove user forced labels or for -O0 any user labels. */
1880 if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
1881 return false;
1882 }
1883
1884 /* Protect simple loop latches. We only want to avoid merging
1885 the latch with the loop header or with a block in another
1886 loop in this case. */
1887 if (current_loops
1888 && b->loop_father->latch == b
1889 && loops_state_satisfies_p (LOOPS_HAVE_SIMPLE_LATCHES)
1890 && (b->loop_father->header == a
1891 || b->loop_father != a->loop_father))
1892 return false;
1893
1894 /* It must be possible to eliminate all phi nodes in B. If ssa form
1895 is not up-to-date and a name-mapping is registered, we cannot eliminate
1896 any phis. Symbols marked for renaming are never a problem though. */
1897 for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);
1898 gsi_next (&gsi))
1899 {
1900 gphi *phi = gsi.phi ();
1901 /* Technically only new names matter. */
1902 if (name_registered_for_update_p (PHI_RESULT (phi)))
1903 return false;
1904 }
1905
1906 /* When not optimizing, don't merge if we'd lose goto_locus. */
1907 if (!optimize
1908 && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
1909 {
1910 location_t goto_locus = single_succ_edge (a)->goto_locus;
1911 gimple_stmt_iterator prev, next;
1912 prev = gsi_last_nondebug_bb (a);
1913 next = gsi_after_labels (b);
1914 if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
1915 gsi_next_nondebug (&next);
1916 if ((gsi_end_p (prev)
1917 || gimple_location (gsi_stmt (prev)) != goto_locus)
1918 && (gsi_end_p (next)
1919 || gimple_location (gsi_stmt (next)) != goto_locus))
1920 return false;
1921 }
1922
1923 return true;
1924 }
1925
1926 /* Replaces all uses of NAME by VAL. */
1927
1928 void
1929 replace_uses_by (tree name, tree val)
1930 {
1931 imm_use_iterator imm_iter;
1932 use_operand_p use;
1933 gimple *stmt;
1934 edge e;
1935
1936 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1937 {
1938 /* Mark the block if we change the last stmt in it. */
1939 if (cfgcleanup_altered_bbs
1940 && stmt_ends_bb_p (stmt))
1941 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1942
1943 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1944 {
1945 replace_exp (use, val);
1946
1947 if (gimple_code (stmt) == GIMPLE_PHI)
1948 {
1949 e = gimple_phi_arg_edge (as_a <gphi *> (stmt),
1950 PHI_ARG_INDEX_FROM_USE (use));
1951 if (e->flags & EDGE_ABNORMAL
1952 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val))
1953 {
1954 /* This can only occur for virtual operands, since
1955 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1956 would prevent replacement. */
1957 gcc_checking_assert (virtual_operand_p (name));
1958 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1959 }
1960 }
1961 }
1962
1963 if (gimple_code (stmt) != GIMPLE_PHI)
1964 {
1965 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1966 gimple *orig_stmt = stmt;
1967 size_t i;
1968
1969 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
1970 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
1971 only change sth from non-invariant to invariant, and only
1972 when propagating constants. */
1973 if (is_gimple_min_invariant (val))
1974 for (i = 0; i < gimple_num_ops (stmt); i++)
1975 {
1976 tree op = gimple_op (stmt, i);
1977 /* Operands may be empty here. For example, the labels
1978 of a GIMPLE_COND are nulled out following the creation
1979 of the corresponding CFG edges. */
1980 if (op && TREE_CODE (op) == ADDR_EXPR)
1981 recompute_tree_invariant_for_addr_expr (op);
1982 }
1983
1984 if (fold_stmt (&gsi))
1985 stmt = gsi_stmt (gsi);
1986
1987 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
1988 gimple_purge_dead_eh_edges (gimple_bb (stmt));
1989
1990 update_stmt (stmt);
1991 }
1992 }
1993
1994 gcc_checking_assert (has_zero_uses (name));
1995
1996 /* Also update the trees stored in loop structures. */
1997 if (current_loops)
1998 {
1999 struct loop *loop;
2000
2001 FOR_EACH_LOOP (loop, 0)
2002 {
2003 substitute_in_loop_info (loop, name, val);
2004 }
2005 }
2006 }
2007
2008 /* Merge block B into block A. */
2009
2010 static void
2011 gimple_merge_blocks (basic_block a, basic_block b)
2012 {
2013 gimple_stmt_iterator last, gsi;
2014 gphi_iterator psi;
2015
2016 if (dump_file)
2017 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
2018
2019 /* Remove all single-valued PHI nodes from block B of the form
2020 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
2021 gsi = gsi_last_bb (a);
2022 for (psi = gsi_start_phis (b); !gsi_end_p (psi); )
2023 {
2024 gimple *phi = gsi_stmt (psi);
2025 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
2026 gimple *copy;
2027 bool may_replace_uses = (virtual_operand_p (def)
2028 || may_propagate_copy (def, use));
2029
2030 /* In case we maintain loop closed ssa form, do not propagate arguments
2031 of loop exit phi nodes. */
2032 if (current_loops
2033 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
2034 && !virtual_operand_p (def)
2035 && TREE_CODE (use) == SSA_NAME
2036 && a->loop_father != b->loop_father)
2037 may_replace_uses = false;
2038
2039 if (!may_replace_uses)
2040 {
2041 gcc_assert (!virtual_operand_p (def));
2042
2043 /* Note that just emitting the copies is fine -- there is no problem
2044 with ordering of phi nodes. This is because A is the single
2045 predecessor of B, therefore results of the phi nodes cannot
2046 appear as arguments of the phi nodes. */
2047 copy = gimple_build_assign (def, use);
2048 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
2049 remove_phi_node (&psi, false);
2050 }
2051 else
2052 {
2053 /* If we deal with a PHI for virtual operands, we can simply
2054 propagate these without fussing with folding or updating
2055 the stmt. */
2056 if (virtual_operand_p (def))
2057 {
2058 imm_use_iterator iter;
2059 use_operand_p use_p;
2060 gimple *stmt;
2061
2062 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
2063 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
2064 SET_USE (use_p, use);
2065
2066 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
2067 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
2068 }
2069 else
2070 replace_uses_by (def, use);
2071
2072 remove_phi_node (&psi, true);
2073 }
2074 }
2075
2076 /* Ensure that B follows A. */
2077 move_block_after (b, a);
2078
2079 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
2080 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
2081
2082 /* Remove labels from B and set gimple_bb to A for other statements. */
2083 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
2084 {
2085 gimple *stmt = gsi_stmt (gsi);
2086 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2087 {
2088 tree label = gimple_label_label (label_stmt);
2089 int lp_nr;
2090
2091 gsi_remove (&gsi, false);
2092
2093 /* Now that we can thread computed gotos, we might have
2094 a situation where we have a forced label in block B
2095 However, the label at the start of block B might still be
2096 used in other ways (think about the runtime checking for
2097 Fortran assigned gotos). So we cannot just delete the
2098 label. Instead we move the label to the start of block A. */
2099 if (FORCED_LABEL (label))
2100 {
2101 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
2102 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
2103 }
2104 /* Other user labels keep around in a form of a debug stmt. */
2105 else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_BIND_STMTS)
2106 {
2107 gimple *dbg = gimple_build_debug_bind (label,
2108 integer_zero_node,
2109 stmt);
2110 gimple_debug_bind_reset_value (dbg);
2111 gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
2112 }
2113
2114 lp_nr = EH_LANDING_PAD_NR (label);
2115 if (lp_nr)
2116 {
2117 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
2118 lp->post_landing_pad = NULL;
2119 }
2120 }
2121 else
2122 {
2123 gimple_set_bb (stmt, a);
2124 gsi_next (&gsi);
2125 }
2126 }
2127
2128 /* When merging two BBs, if their counts are different, the larger count
2129 is selected as the new bb count. This is to handle inconsistent
2130 profiles. */
2131 if (a->loop_father == b->loop_father)
2132 {
2133 a->count = a->count.merge (b->count);
2134 }
2135
2136 /* Merge the sequences. */
2137 last = gsi_last_bb (a);
2138 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
2139 set_bb_seq (b, NULL);
2140
2141 if (cfgcleanup_altered_bbs)
2142 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
2143 }
2144
2145
2146 /* Return the one of two successors of BB that is not reachable by a
2147 complex edge, if there is one. Else, return BB. We use
2148 this in optimizations that use post-dominators for their heuristics,
2149 to catch the cases in C++ where function calls are involved. */
2150
2151 basic_block
2152 single_noncomplex_succ (basic_block bb)
2153 {
2154 edge e0, e1;
2155 if (EDGE_COUNT (bb->succs) != 2)
2156 return bb;
2157
2158 e0 = EDGE_SUCC (bb, 0);
2159 e1 = EDGE_SUCC (bb, 1);
2160 if (e0->flags & EDGE_COMPLEX)
2161 return e1->dest;
2162 if (e1->flags & EDGE_COMPLEX)
2163 return e0->dest;
2164
2165 return bb;
2166 }
2167
2168 /* T is CALL_EXPR. Set current_function_calls_* flags. */
2169
2170 void
2171 notice_special_calls (gcall *call)
2172 {
2173 int flags = gimple_call_flags (call);
2174
2175 if (flags & ECF_MAY_BE_ALLOCA)
2176 cfun->calls_alloca = true;
2177 if (flags & ECF_RETURNS_TWICE)
2178 cfun->calls_setjmp = true;
2179 }
2180
2181
2182 /* Clear flags set by notice_special_calls. Used by dead code removal
2183 to update the flags. */
2184
2185 void
2186 clear_special_calls (void)
2187 {
2188 cfun->calls_alloca = false;
2189 cfun->calls_setjmp = false;
2190 }
2191
2192 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
2193
2194 static void
2195 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
2196 {
2197 /* Since this block is no longer reachable, we can just delete all
2198 of its PHI nodes. */
2199 remove_phi_nodes (bb);
2200
2201 /* Remove edges to BB's successors. */
2202 while (EDGE_COUNT (bb->succs) > 0)
2203 remove_edge (EDGE_SUCC (bb, 0));
2204 }
2205
2206
2207 /* Remove statements of basic block BB. */
2208
2209 static void
2210 remove_bb (basic_block bb)
2211 {
2212 gimple_stmt_iterator i;
2213
2214 if (dump_file)
2215 {
2216 fprintf (dump_file, "Removing basic block %d\n", bb->index);
2217 if (dump_flags & TDF_DETAILS)
2218 {
2219 dump_bb (dump_file, bb, 0, TDF_BLOCKS);
2220 fprintf (dump_file, "\n");
2221 }
2222 }
2223
2224 if (current_loops)
2225 {
2226 struct loop *loop = bb->loop_father;
2227
2228 /* If a loop gets removed, clean up the information associated
2229 with it. */
2230 if (loop->latch == bb
2231 || loop->header == bb)
2232 free_numbers_of_iterations_estimates (loop);
2233 }
2234
2235 /* Remove all the instructions in the block. */
2236 if (bb_seq (bb) != NULL)
2237 {
2238 /* Walk backwards so as to get a chance to substitute all
2239 released DEFs into debug stmts. See
2240 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
2241 details. */
2242 for (i = gsi_last_bb (bb); !gsi_end_p (i);)
2243 {
2244 gimple *stmt = gsi_stmt (i);
2245 glabel *label_stmt = dyn_cast <glabel *> (stmt);
2246 if (label_stmt
2247 && (FORCED_LABEL (gimple_label_label (label_stmt))
2248 || DECL_NONLOCAL (gimple_label_label (label_stmt))))
2249 {
2250 basic_block new_bb;
2251 gimple_stmt_iterator new_gsi;
2252
2253 /* A non-reachable non-local label may still be referenced.
2254 But it no longer needs to carry the extra semantics of
2255 non-locality. */
2256 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
2257 {
2258 DECL_NONLOCAL (gimple_label_label (label_stmt)) = 0;
2259 FORCED_LABEL (gimple_label_label (label_stmt)) = 1;
2260 }
2261
2262 new_bb = bb->prev_bb;
2263 /* Don't move any labels into ENTRY block. */
2264 if (new_bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
2265 {
2266 new_bb = single_succ (new_bb);
2267 gcc_assert (new_bb != bb);
2268 }
2269 new_gsi = gsi_after_labels (new_bb);
2270 gsi_remove (&i, false);
2271 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
2272 }
2273 else
2274 {
2275 /* Release SSA definitions. */
2276 release_defs (stmt);
2277 gsi_remove (&i, true);
2278 }
2279
2280 if (gsi_end_p (i))
2281 i = gsi_last_bb (bb);
2282 else
2283 gsi_prev (&i);
2284 }
2285 }
2286
2287 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2288 bb->il.gimple.seq = NULL;
2289 bb->il.gimple.phi_nodes = NULL;
2290 }
2291
2292
2293 /* Given a basic block BB and a value VAL for use in the final statement
2294 of the block (if a GIMPLE_COND, GIMPLE_SWITCH, or computed goto), return
2295 the edge that will be taken out of the block.
2296 If VAL is NULL_TREE, then the current value of the final statement's
2297 predicate or index is used.
2298 If the value does not match a unique edge, NULL is returned. */
2299
2300 edge
2301 find_taken_edge (basic_block bb, tree val)
2302 {
2303 gimple *stmt;
2304
2305 stmt = last_stmt (bb);
2306
2307 /* Handle ENTRY and EXIT. */
2308 if (!stmt)
2309 return NULL;
2310
2311 if (gimple_code (stmt) == GIMPLE_COND)
2312 return find_taken_edge_cond_expr (as_a <gcond *> (stmt), val);
2313
2314 if (gimple_code (stmt) == GIMPLE_SWITCH)
2315 return find_taken_edge_switch_expr (as_a <gswitch *> (stmt), val);
2316
2317 if (computed_goto_p (stmt))
2318 {
2319 /* Only optimize if the argument is a label, if the argument is
2320 not a label then we cannot construct a proper CFG.
2321
2322 It may be the case that we only need to allow the LABEL_REF to
2323 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2324 appear inside a LABEL_EXPR just to be safe. */
2325 if (val
2326 && (TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
2327 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
2328 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
2329 }
2330
2331 /* Otherwise we only know the taken successor edge if it's unique. */
2332 return single_succ_p (bb) ? single_succ_edge (bb) : NULL;
2333 }
2334
2335 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2336 statement, determine which of the outgoing edges will be taken out of the
2337 block. Return NULL if either edge may be taken. */
2338
2339 static edge
2340 find_taken_edge_computed_goto (basic_block bb, tree val)
2341 {
2342 basic_block dest;
2343 edge e = NULL;
2344
2345 dest = label_to_block (cfun, val);
2346 if (dest)
2347 e = find_edge (bb, dest);
2348
2349 /* It's possible for find_edge to return NULL here on invalid code
2350 that abuses the labels-as-values extension (e.g. code that attempts to
2351 jump *between* functions via stored labels-as-values; PR 84136).
2352 If so, then we simply return that NULL for the edge.
2353 We don't currently have a way of detecting such invalid code, so we
2354 can't assert that it was the case when a NULL edge occurs here. */
2355
2356 return e;
2357 }
2358
2359 /* Given COND_STMT and a constant value VAL for use as the predicate,
2360 determine which of the two edges will be taken out of
2361 the statement's block. Return NULL if either edge may be taken.
2362 If VAL is NULL_TREE, then the current value of COND_STMT's predicate
2363 is used. */
2364
2365 static edge
2366 find_taken_edge_cond_expr (const gcond *cond_stmt, tree val)
2367 {
2368 edge true_edge, false_edge;
2369
2370 if (val == NULL_TREE)
2371 {
2372 /* Use the current value of the predicate. */
2373 if (gimple_cond_true_p (cond_stmt))
2374 val = integer_one_node;
2375 else if (gimple_cond_false_p (cond_stmt))
2376 val = integer_zero_node;
2377 else
2378 return NULL;
2379 }
2380 else if (TREE_CODE (val) != INTEGER_CST)
2381 return NULL;
2382
2383 extract_true_false_edges_from_block (gimple_bb (cond_stmt),
2384 &true_edge, &false_edge);
2385
2386 return (integer_zerop (val) ? false_edge : true_edge);
2387 }
2388
2389 /* Given SWITCH_STMT and an INTEGER_CST VAL for use as the index, determine
2390 which edge will be taken out of the statement's block. Return NULL if any
2391 edge may be taken.
2392 If VAL is NULL_TREE, then the current value of SWITCH_STMT's index
2393 is used. */
2394
2395 edge
2396 find_taken_edge_switch_expr (const gswitch *switch_stmt, tree val)
2397 {
2398 basic_block dest_bb;
2399 edge e;
2400 tree taken_case;
2401
2402 if (gimple_switch_num_labels (switch_stmt) == 1)
2403 taken_case = gimple_switch_default_label (switch_stmt);
2404 else
2405 {
2406 if (val == NULL_TREE)
2407 val = gimple_switch_index (switch_stmt);
2408 if (TREE_CODE (val) != INTEGER_CST)
2409 return NULL;
2410 else
2411 taken_case = find_case_label_for_value (switch_stmt, val);
2412 }
2413 dest_bb = label_to_block (cfun, CASE_LABEL (taken_case));
2414
2415 e = find_edge (gimple_bb (switch_stmt), dest_bb);
2416 gcc_assert (e);
2417 return e;
2418 }
2419
2420
2421 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2422 We can make optimal use here of the fact that the case labels are
2423 sorted: We can do a binary search for a case matching VAL. */
2424
2425 tree
2426 find_case_label_for_value (const gswitch *switch_stmt, tree val)
2427 {
2428 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2429 tree default_case = gimple_switch_default_label (switch_stmt);
2430
2431 for (low = 0, high = n; high - low > 1; )
2432 {
2433 size_t i = (high + low) / 2;
2434 tree t = gimple_switch_label (switch_stmt, i);
2435 int cmp;
2436
2437 /* Cache the result of comparing CASE_LOW and val. */
2438 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2439
2440 if (cmp > 0)
2441 high = i;
2442 else
2443 low = i;
2444
2445 if (CASE_HIGH (t) == NULL)
2446 {
2447 /* A singe-valued case label. */
2448 if (cmp == 0)
2449 return t;
2450 }
2451 else
2452 {
2453 /* A case range. We can only handle integer ranges. */
2454 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2455 return t;
2456 }
2457 }
2458
2459 return default_case;
2460 }
2461
2462
2463 /* Dump a basic block on stderr. */
2464
2465 void
2466 gimple_debug_bb (basic_block bb)
2467 {
2468 dump_bb (stderr, bb, 0, TDF_VOPS|TDF_MEMSYMS|TDF_BLOCKS);
2469 }
2470
2471
2472 /* Dump basic block with index N on stderr. */
2473
2474 basic_block
2475 gimple_debug_bb_n (int n)
2476 {
2477 gimple_debug_bb (BASIC_BLOCK_FOR_FN (cfun, n));
2478 return BASIC_BLOCK_FOR_FN (cfun, n);
2479 }
2480
2481
2482 /* Dump the CFG on stderr.
2483
2484 FLAGS are the same used by the tree dumping functions
2485 (see TDF_* in dumpfile.h). */
2486
2487 void
2488 gimple_debug_cfg (dump_flags_t flags)
2489 {
2490 gimple_dump_cfg (stderr, flags);
2491 }
2492
2493
2494 /* Dump the program showing basic block boundaries on the given FILE.
2495
2496 FLAGS are the same used by the tree dumping functions (see TDF_* in
2497 tree.h). */
2498
2499 void
2500 gimple_dump_cfg (FILE *file, dump_flags_t flags)
2501 {
2502 if (flags & TDF_DETAILS)
2503 {
2504 dump_function_header (file, current_function_decl, flags);
2505 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2506 n_basic_blocks_for_fn (cfun), n_edges_for_fn (cfun),
2507 last_basic_block_for_fn (cfun));
2508
2509 brief_dump_cfg (file, flags);
2510 fprintf (file, "\n");
2511 }
2512
2513 if (flags & TDF_STATS)
2514 dump_cfg_stats (file);
2515
2516 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2517 }
2518
2519
2520 /* Dump CFG statistics on FILE. */
2521
2522 void
2523 dump_cfg_stats (FILE *file)
2524 {
2525 static long max_num_merged_labels = 0;
2526 unsigned long size, total = 0;
2527 long num_edges;
2528 basic_block bb;
2529 const char * const fmt_str = "%-30s%-13s%12s\n";
2530 const char * const fmt_str_1 = "%-30s%13d" PRsa (11) "\n";
2531 const char * const fmt_str_2 = "%-30s%13ld" PRsa (11) "\n";
2532 const char * const fmt_str_3 = "%-43s" PRsa (11) "\n";
2533 const char *funcname = current_function_name ();
2534
2535 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2536
2537 fprintf (file, "---------------------------------------------------------\n");
2538 fprintf (file, fmt_str, "", " Number of ", "Memory");
2539 fprintf (file, fmt_str, "", " instances ", "used ");
2540 fprintf (file, "---------------------------------------------------------\n");
2541
2542 size = n_basic_blocks_for_fn (cfun) * sizeof (struct basic_block_def);
2543 total += size;
2544 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks_for_fn (cfun),
2545 SIZE_AMOUNT (size));
2546
2547 num_edges = 0;
2548 FOR_EACH_BB_FN (bb, cfun)
2549 num_edges += EDGE_COUNT (bb->succs);
2550 size = num_edges * sizeof (struct edge_def);
2551 total += size;
2552 fprintf (file, fmt_str_2, "Edges", num_edges, SIZE_AMOUNT (size));
2553
2554 fprintf (file, "---------------------------------------------------------\n");
2555 fprintf (file, fmt_str_3, "Total memory used by CFG data",
2556 SIZE_AMOUNT (total));
2557 fprintf (file, "---------------------------------------------------------\n");
2558 fprintf (file, "\n");
2559
2560 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2561 max_num_merged_labels = cfg_stats.num_merged_labels;
2562
2563 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2564 cfg_stats.num_merged_labels, max_num_merged_labels);
2565
2566 fprintf (file, "\n");
2567 }
2568
2569
2570 /* Dump CFG statistics on stderr. Keep extern so that it's always
2571 linked in the final executable. */
2572
2573 DEBUG_FUNCTION void
2574 debug_cfg_stats (void)
2575 {
2576 dump_cfg_stats (stderr);
2577 }
2578
2579 /*---------------------------------------------------------------------------
2580 Miscellaneous helpers
2581 ---------------------------------------------------------------------------*/
2582
2583 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2584 flow. Transfers of control flow associated with EH are excluded. */
2585
2586 static bool
2587 call_can_make_abnormal_goto (gimple *t)
2588 {
2589 /* If the function has no non-local labels, then a call cannot make an
2590 abnormal transfer of control. */
2591 if (!cfun->has_nonlocal_label
2592 && !cfun->calls_setjmp)
2593 return false;
2594
2595 /* Likewise if the call has no side effects. */
2596 if (!gimple_has_side_effects (t))
2597 return false;
2598
2599 /* Likewise if the called function is leaf. */
2600 if (gimple_call_flags (t) & ECF_LEAF)
2601 return false;
2602
2603 return true;
2604 }
2605
2606
2607 /* Return true if T can make an abnormal transfer of control flow.
2608 Transfers of control flow associated with EH are excluded. */
2609
2610 bool
2611 stmt_can_make_abnormal_goto (gimple *t)
2612 {
2613 if (computed_goto_p (t))
2614 return true;
2615 if (is_gimple_call (t))
2616 return call_can_make_abnormal_goto (t);
2617 return false;
2618 }
2619
2620
2621 /* Return true if T represents a stmt that always transfers control. */
2622
2623 bool
2624 is_ctrl_stmt (gimple *t)
2625 {
2626 switch (gimple_code (t))
2627 {
2628 case GIMPLE_COND:
2629 case GIMPLE_SWITCH:
2630 case GIMPLE_GOTO:
2631 case GIMPLE_RETURN:
2632 case GIMPLE_RESX:
2633 return true;
2634 default:
2635 return false;
2636 }
2637 }
2638
2639
2640 /* Return true if T is a statement that may alter the flow of control
2641 (e.g., a call to a non-returning function). */
2642
2643 bool
2644 is_ctrl_altering_stmt (gimple *t)
2645 {
2646 gcc_assert (t);
2647
2648 switch (gimple_code (t))
2649 {
2650 case GIMPLE_CALL:
2651 /* Per stmt call flag indicates whether the call could alter
2652 controlflow. */
2653 if (gimple_call_ctrl_altering_p (t))
2654 return true;
2655 break;
2656
2657 case GIMPLE_EH_DISPATCH:
2658 /* EH_DISPATCH branches to the individual catch handlers at
2659 this level of a try or allowed-exceptions region. It can
2660 fallthru to the next statement as well. */
2661 return true;
2662
2663 case GIMPLE_ASM:
2664 if (gimple_asm_nlabels (as_a <gasm *> (t)) > 0)
2665 return true;
2666 break;
2667
2668 CASE_GIMPLE_OMP:
2669 /* OpenMP directives alter control flow. */
2670 return true;
2671
2672 case GIMPLE_TRANSACTION:
2673 /* A transaction start alters control flow. */
2674 return true;
2675
2676 default:
2677 break;
2678 }
2679
2680 /* If a statement can throw, it alters control flow. */
2681 return stmt_can_throw_internal (cfun, t);
2682 }
2683
2684
2685 /* Return true if T is a simple local goto. */
2686
2687 bool
2688 simple_goto_p (gimple *t)
2689 {
2690 return (gimple_code (t) == GIMPLE_GOTO
2691 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2692 }
2693
2694
2695 /* Return true if STMT should start a new basic block. PREV_STMT is
2696 the statement preceding STMT. It is used when STMT is a label or a
2697 case label. Labels should only start a new basic block if their
2698 previous statement wasn't a label. Otherwise, sequence of labels
2699 would generate unnecessary basic blocks that only contain a single
2700 label. */
2701
2702 static inline bool
2703 stmt_starts_bb_p (gimple *stmt, gimple *prev_stmt)
2704 {
2705 if (stmt == NULL)
2706 return false;
2707
2708 /* PREV_STMT is only set to a debug stmt if the debug stmt is before
2709 any nondebug stmts in the block. We don't want to start another
2710 block in this case: the debug stmt will already have started the
2711 one STMT would start if we weren't outputting debug stmts. */
2712 if (prev_stmt && is_gimple_debug (prev_stmt))
2713 return false;
2714
2715 /* Labels start a new basic block only if the preceding statement
2716 wasn't a label of the same type. This prevents the creation of
2717 consecutive blocks that have nothing but a single label. */
2718 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2719 {
2720 /* Nonlocal and computed GOTO targets always start a new block. */
2721 if (DECL_NONLOCAL (gimple_label_label (label_stmt))
2722 || FORCED_LABEL (gimple_label_label (label_stmt)))
2723 return true;
2724
2725 if (glabel *plabel = safe_dyn_cast <glabel *> (prev_stmt))
2726 {
2727 if (DECL_NONLOCAL (gimple_label_label (plabel))
2728 || !DECL_ARTIFICIAL (gimple_label_label (plabel)))
2729 return true;
2730
2731 cfg_stats.num_merged_labels++;
2732 return false;
2733 }
2734 else
2735 return true;
2736 }
2737 else if (gimple_code (stmt) == GIMPLE_CALL)
2738 {
2739 if (gimple_call_flags (stmt) & ECF_RETURNS_TWICE)
2740 /* setjmp acts similar to a nonlocal GOTO target and thus should
2741 start a new block. */
2742 return true;
2743 if (gimple_call_internal_p (stmt, IFN_PHI)
2744 && prev_stmt
2745 && gimple_code (prev_stmt) != GIMPLE_LABEL
2746 && (gimple_code (prev_stmt) != GIMPLE_CALL
2747 || ! gimple_call_internal_p (prev_stmt, IFN_PHI)))
2748 /* PHI nodes start a new block unless preceeded by a label
2749 or another PHI. */
2750 return true;
2751 }
2752
2753 return false;
2754 }
2755
2756
2757 /* Return true if T should end a basic block. */
2758
2759 bool
2760 stmt_ends_bb_p (gimple *t)
2761 {
2762 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2763 }
2764
2765 /* Remove block annotations and other data structures. */
2766
2767 void
2768 delete_tree_cfg_annotations (struct function *fn)
2769 {
2770 vec_free (label_to_block_map_for_fn (fn));
2771 }
2772
2773 /* Return the virtual phi in BB. */
2774
2775 gphi *
2776 get_virtual_phi (basic_block bb)
2777 {
2778 for (gphi_iterator gsi = gsi_start_phis (bb);
2779 !gsi_end_p (gsi);
2780 gsi_next (&gsi))
2781 {
2782 gphi *phi = gsi.phi ();
2783
2784 if (virtual_operand_p (PHI_RESULT (phi)))
2785 return phi;
2786 }
2787
2788 return NULL;
2789 }
2790
2791 /* Return the first statement in basic block BB. */
2792
2793 gimple *
2794 first_stmt (basic_block bb)
2795 {
2796 gimple_stmt_iterator i = gsi_start_bb (bb);
2797 gimple *stmt = NULL;
2798
2799 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2800 {
2801 gsi_next (&i);
2802 stmt = NULL;
2803 }
2804 return stmt;
2805 }
2806
2807 /* Return the first non-label statement in basic block BB. */
2808
2809 static gimple *
2810 first_non_label_stmt (basic_block bb)
2811 {
2812 gimple_stmt_iterator i = gsi_start_bb (bb);
2813 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2814 gsi_next (&i);
2815 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2816 }
2817
2818 /* Return the last statement in basic block BB. */
2819
2820 gimple *
2821 last_stmt (basic_block bb)
2822 {
2823 gimple_stmt_iterator i = gsi_last_bb (bb);
2824 gimple *stmt = NULL;
2825
2826 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2827 {
2828 gsi_prev (&i);
2829 stmt = NULL;
2830 }
2831 return stmt;
2832 }
2833
2834 /* Return the last statement of an otherwise empty block. Return NULL
2835 if the block is totally empty, or if it contains more than one
2836 statement. */
2837
2838 gimple *
2839 last_and_only_stmt (basic_block bb)
2840 {
2841 gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2842 gimple *last, *prev;
2843
2844 if (gsi_end_p (i))
2845 return NULL;
2846
2847 last = gsi_stmt (i);
2848 gsi_prev_nondebug (&i);
2849 if (gsi_end_p (i))
2850 return last;
2851
2852 /* Empty statements should no longer appear in the instruction stream.
2853 Everything that might have appeared before should be deleted by
2854 remove_useless_stmts, and the optimizers should just gsi_remove
2855 instead of smashing with build_empty_stmt.
2856
2857 Thus the only thing that should appear here in a block containing
2858 one executable statement is a label. */
2859 prev = gsi_stmt (i);
2860 if (gimple_code (prev) == GIMPLE_LABEL)
2861 return last;
2862 else
2863 return NULL;
2864 }
2865
2866 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2867
2868 static void
2869 reinstall_phi_args (edge new_edge, edge old_edge)
2870 {
2871 edge_var_map *vm;
2872 int i;
2873 gphi_iterator phis;
2874
2875 vec<edge_var_map> *v = redirect_edge_var_map_vector (old_edge);
2876 if (!v)
2877 return;
2878
2879 for (i = 0, phis = gsi_start_phis (new_edge->dest);
2880 v->iterate (i, &vm) && !gsi_end_p (phis);
2881 i++, gsi_next (&phis))
2882 {
2883 gphi *phi = phis.phi ();
2884 tree result = redirect_edge_var_map_result (vm);
2885 tree arg = redirect_edge_var_map_def (vm);
2886
2887 gcc_assert (result == gimple_phi_result (phi));
2888
2889 add_phi_arg (phi, arg, new_edge, redirect_edge_var_map_location (vm));
2890 }
2891
2892 redirect_edge_var_map_clear (old_edge);
2893 }
2894
2895 /* Returns the basic block after which the new basic block created
2896 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2897 near its "logical" location. This is of most help to humans looking
2898 at debugging dumps. */
2899
2900 basic_block
2901 split_edge_bb_loc (edge edge_in)
2902 {
2903 basic_block dest = edge_in->dest;
2904 basic_block dest_prev = dest->prev_bb;
2905
2906 if (dest_prev)
2907 {
2908 edge e = find_edge (dest_prev, dest);
2909 if (e && !(e->flags & EDGE_COMPLEX))
2910 return edge_in->src;
2911 }
2912 return dest_prev;
2913 }
2914
2915 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2916 Abort on abnormal edges. */
2917
2918 static basic_block
2919 gimple_split_edge (edge edge_in)
2920 {
2921 basic_block new_bb, after_bb, dest;
2922 edge new_edge, e;
2923
2924 /* Abnormal edges cannot be split. */
2925 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2926
2927 dest = edge_in->dest;
2928
2929 after_bb = split_edge_bb_loc (edge_in);
2930
2931 new_bb = create_empty_bb (after_bb);
2932 new_bb->count = edge_in->count ();
2933
2934 e = redirect_edge_and_branch (edge_in, new_bb);
2935 gcc_assert (e == edge_in);
2936
2937 new_edge = make_single_succ_edge (new_bb, dest, EDGE_FALLTHRU);
2938 reinstall_phi_args (new_edge, e);
2939
2940 return new_bb;
2941 }
2942
2943
2944 /* Verify properties of the address expression T whose base should be
2945 TREE_ADDRESSABLE if VERIFY_ADDRESSABLE is true. */
2946
2947 static bool
2948 verify_address (tree t, bool verify_addressable)
2949 {
2950 bool old_constant;
2951 bool old_side_effects;
2952 bool new_constant;
2953 bool new_side_effects;
2954
2955 old_constant = TREE_CONSTANT (t);
2956 old_side_effects = TREE_SIDE_EFFECTS (t);
2957
2958 recompute_tree_invariant_for_addr_expr (t);
2959 new_side_effects = TREE_SIDE_EFFECTS (t);
2960 new_constant = TREE_CONSTANT (t);
2961
2962 if (old_constant != new_constant)
2963 {
2964 error ("constant not recomputed when %<ADDR_EXPR%> changed");
2965 return true;
2966 }
2967 if (old_side_effects != new_side_effects)
2968 {
2969 error ("side effects not recomputed when %<ADDR_EXPR%> changed");
2970 return true;
2971 }
2972
2973 tree base = TREE_OPERAND (t, 0);
2974 while (handled_component_p (base))
2975 base = TREE_OPERAND (base, 0);
2976
2977 if (!(VAR_P (base)
2978 || TREE_CODE (base) == PARM_DECL
2979 || TREE_CODE (base) == RESULT_DECL))
2980 return false;
2981
2982 if (DECL_GIMPLE_REG_P (base))
2983 {
2984 error ("%<DECL_GIMPLE_REG_P%> set on a variable with address taken");
2985 return true;
2986 }
2987
2988 if (verify_addressable && !TREE_ADDRESSABLE (base))
2989 {
2990 error ("address taken but %<TREE_ADDRESSABLE%> bit not set");
2991 return true;
2992 }
2993
2994 return false;
2995 }
2996
2997
2998 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
2999 Returns true if there is an error, otherwise false. */
3000
3001 static bool
3002 verify_types_in_gimple_min_lval (tree expr)
3003 {
3004 tree op;
3005
3006 if (is_gimple_id (expr))
3007 return false;
3008
3009 if (TREE_CODE (expr) != TARGET_MEM_REF
3010 && TREE_CODE (expr) != MEM_REF)
3011 {
3012 error ("invalid expression for min lvalue");
3013 return true;
3014 }
3015
3016 /* TARGET_MEM_REFs are strange beasts. */
3017 if (TREE_CODE (expr) == TARGET_MEM_REF)
3018 return false;
3019
3020 op = TREE_OPERAND (expr, 0);
3021 if (!is_gimple_val (op))
3022 {
3023 error ("invalid operand in indirect reference");
3024 debug_generic_stmt (op);
3025 return true;
3026 }
3027 /* Memory references now generally can involve a value conversion. */
3028
3029 return false;
3030 }
3031
3032 /* Verify if EXPR is a valid GIMPLE reference expression. If
3033 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
3034 if there is an error, otherwise false. */
3035
3036 static bool
3037 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
3038 {
3039 const char *code_name = get_tree_code_name (TREE_CODE (expr));
3040
3041 if (TREE_CODE (expr) == REALPART_EXPR
3042 || TREE_CODE (expr) == IMAGPART_EXPR
3043 || TREE_CODE (expr) == BIT_FIELD_REF)
3044 {
3045 tree op = TREE_OPERAND (expr, 0);
3046 if (!is_gimple_reg_type (TREE_TYPE (expr)))
3047 {
3048 error ("non-scalar %qs", code_name);
3049 return true;
3050 }
3051
3052 if (TREE_CODE (expr) == BIT_FIELD_REF)
3053 {
3054 tree t1 = TREE_OPERAND (expr, 1);
3055 tree t2 = TREE_OPERAND (expr, 2);
3056 poly_uint64 size, bitpos;
3057 if (!poly_int_tree_p (t1, &size)
3058 || !poly_int_tree_p (t2, &bitpos)
3059 || !types_compatible_p (bitsizetype, TREE_TYPE (t1))
3060 || !types_compatible_p (bitsizetype, TREE_TYPE (t2)))
3061 {
3062 error ("invalid position or size operand to %qs", code_name);
3063 return true;
3064 }
3065 if (INTEGRAL_TYPE_P (TREE_TYPE (expr))
3066 && maybe_ne (TYPE_PRECISION (TREE_TYPE (expr)), size))
3067 {
3068 error ("integral result type precision does not match "
3069 "field size of %qs", code_name);
3070 return true;
3071 }
3072 else if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
3073 && TYPE_MODE (TREE_TYPE (expr)) != BLKmode
3074 && maybe_ne (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (expr))),
3075 size))
3076 {
3077 error ("mode size of non-integral result does not "
3078 "match field size of %qs",
3079 code_name);
3080 return true;
3081 }
3082 if (INTEGRAL_TYPE_P (TREE_TYPE (op))
3083 && !type_has_mode_precision_p (TREE_TYPE (op)))
3084 {
3085 error ("%qs of non-mode-precision operand", code_name);
3086 return true;
3087 }
3088 if (!AGGREGATE_TYPE_P (TREE_TYPE (op))
3089 && maybe_gt (size + bitpos,
3090 tree_to_poly_uint64 (TYPE_SIZE (TREE_TYPE (op)))))
3091 {
3092 error ("position plus size exceeds size of referenced object in "
3093 "%qs", code_name);
3094 return true;
3095 }
3096 }
3097
3098 if ((TREE_CODE (expr) == REALPART_EXPR
3099 || TREE_CODE (expr) == IMAGPART_EXPR)
3100 && !useless_type_conversion_p (TREE_TYPE (expr),
3101 TREE_TYPE (TREE_TYPE (op))))
3102 {
3103 error ("type mismatch in %qs reference", code_name);
3104 debug_generic_stmt (TREE_TYPE (expr));
3105 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3106 return true;
3107 }
3108 expr = op;
3109 }
3110
3111 while (handled_component_p (expr))
3112 {
3113 code_name = get_tree_code_name (TREE_CODE (expr));
3114
3115 if (TREE_CODE (expr) == REALPART_EXPR
3116 || TREE_CODE (expr) == IMAGPART_EXPR
3117 || TREE_CODE (expr) == BIT_FIELD_REF)
3118 {
3119 error ("non-top-level %qs", code_name);
3120 return true;
3121 }
3122
3123 tree op = TREE_OPERAND (expr, 0);
3124
3125 if (TREE_CODE (expr) == ARRAY_REF
3126 || TREE_CODE (expr) == ARRAY_RANGE_REF)
3127 {
3128 if (!is_gimple_val (TREE_OPERAND (expr, 1))
3129 || (TREE_OPERAND (expr, 2)
3130 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3131 || (TREE_OPERAND (expr, 3)
3132 && !is_gimple_val (TREE_OPERAND (expr, 3))))
3133 {
3134 error ("invalid operands to %qs", code_name);
3135 debug_generic_stmt (expr);
3136 return true;
3137 }
3138 }
3139
3140 /* Verify if the reference array element types are compatible. */
3141 if (TREE_CODE (expr) == ARRAY_REF
3142 && !useless_type_conversion_p (TREE_TYPE (expr),
3143 TREE_TYPE (TREE_TYPE (op))))
3144 {
3145 error ("type mismatch in %qs", code_name);
3146 debug_generic_stmt (TREE_TYPE (expr));
3147 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3148 return true;
3149 }
3150 if (TREE_CODE (expr) == ARRAY_RANGE_REF
3151 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
3152 TREE_TYPE (TREE_TYPE (op))))
3153 {
3154 error ("type mismatch in %qs", code_name);
3155 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
3156 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3157 return true;
3158 }
3159
3160 if (TREE_CODE (expr) == COMPONENT_REF)
3161 {
3162 if (TREE_OPERAND (expr, 2)
3163 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3164 {
3165 error ("invalid %qs offset operator", code_name);
3166 return true;
3167 }
3168 if (!useless_type_conversion_p (TREE_TYPE (expr),
3169 TREE_TYPE (TREE_OPERAND (expr, 1))))
3170 {
3171 error ("type mismatch in %qs", code_name);
3172 debug_generic_stmt (TREE_TYPE (expr));
3173 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
3174 return true;
3175 }
3176 }
3177
3178 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3179 {
3180 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
3181 that their operand is not an SSA name or an invariant when
3182 requiring an lvalue (this usually means there is a SRA or IPA-SRA
3183 bug). Otherwise there is nothing to verify, gross mismatches at
3184 most invoke undefined behavior. */
3185 if (require_lvalue
3186 && (TREE_CODE (op) == SSA_NAME
3187 || is_gimple_min_invariant (op)))
3188 {
3189 error ("conversion of %qs on the left hand side of %qs",
3190 get_tree_code_name (TREE_CODE (op)), code_name);
3191 debug_generic_stmt (expr);
3192 return true;
3193 }
3194 else if (TREE_CODE (op) == SSA_NAME
3195 && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
3196 {
3197 error ("conversion of register to a different size in %qs",
3198 code_name);
3199 debug_generic_stmt (expr);
3200 return true;
3201 }
3202 else if (!handled_component_p (op))
3203 return false;
3204 }
3205
3206 expr = op;
3207 }
3208
3209 code_name = get_tree_code_name (TREE_CODE (expr));
3210
3211 if (TREE_CODE (expr) == MEM_REF)
3212 {
3213 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0))
3214 || (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR
3215 && verify_address (TREE_OPERAND (expr, 0), false)))
3216 {
3217 error ("invalid address operand in %qs", code_name);
3218 debug_generic_stmt (expr);
3219 return true;
3220 }
3221 if (!poly_int_tree_p (TREE_OPERAND (expr, 1))
3222 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
3223 {
3224 error ("invalid offset operand in %qs", code_name);
3225 debug_generic_stmt (expr);
3226 return true;
3227 }
3228 }
3229 else if (TREE_CODE (expr) == TARGET_MEM_REF)
3230 {
3231 if (!TMR_BASE (expr)
3232 || !is_gimple_mem_ref_addr (TMR_BASE (expr))
3233 || (TREE_CODE (TMR_BASE (expr)) == ADDR_EXPR
3234 && verify_address (TMR_BASE (expr), false)))
3235 {
3236 error ("invalid address operand in %qs", code_name);
3237 return true;
3238 }
3239 if (!TMR_OFFSET (expr)
3240 || !poly_int_tree_p (TMR_OFFSET (expr))
3241 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
3242 {
3243 error ("invalid offset operand in %qs", code_name);
3244 debug_generic_stmt (expr);
3245 return true;
3246 }
3247 }
3248 else if (TREE_CODE (expr) == INDIRECT_REF)
3249 {
3250 error ("%qs in gimple IL", code_name);
3251 debug_generic_stmt (expr);
3252 return true;
3253 }
3254
3255 return ((require_lvalue || !is_gimple_min_invariant (expr))
3256 && verify_types_in_gimple_min_lval (expr));
3257 }
3258
3259 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3260 list of pointer-to types that is trivially convertible to DEST. */
3261
3262 static bool
3263 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3264 {
3265 tree src;
3266
3267 if (!TYPE_POINTER_TO (src_obj))
3268 return true;
3269
3270 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3271 if (useless_type_conversion_p (dest, src))
3272 return true;
3273
3274 return false;
3275 }
3276
3277 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3278 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3279
3280 static bool
3281 valid_fixed_convert_types_p (tree type1, tree type2)
3282 {
3283 return (FIXED_POINT_TYPE_P (type1)
3284 && (INTEGRAL_TYPE_P (type2)
3285 || SCALAR_FLOAT_TYPE_P (type2)
3286 || FIXED_POINT_TYPE_P (type2)));
3287 }
3288
3289 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3290 is a problem, otherwise false. */
3291
3292 static bool
3293 verify_gimple_call (gcall *stmt)
3294 {
3295 tree fn = gimple_call_fn (stmt);
3296 tree fntype, fndecl;
3297 unsigned i;
3298
3299 if (gimple_call_internal_p (stmt))
3300 {
3301 if (fn)
3302 {
3303 error ("gimple call has two targets");
3304 debug_generic_stmt (fn);
3305 return true;
3306 }
3307 }
3308 else
3309 {
3310 if (!fn)
3311 {
3312 error ("gimple call has no target");
3313 return true;
3314 }
3315 }
3316
3317 if (fn && !is_gimple_call_addr (fn))
3318 {
3319 error ("invalid function in gimple call");
3320 debug_generic_stmt (fn);
3321 return true;
3322 }
3323
3324 if (fn
3325 && (!POINTER_TYPE_P (TREE_TYPE (fn))
3326 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3327 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
3328 {
3329 error ("non-function in gimple call");
3330 return true;
3331 }
3332
3333 fndecl = gimple_call_fndecl (stmt);
3334 if (fndecl
3335 && TREE_CODE (fndecl) == FUNCTION_DECL
3336 && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
3337 && !DECL_PURE_P (fndecl)
3338 && !TREE_READONLY (fndecl))
3339 {
3340 error ("invalid pure const state for function");
3341 return true;
3342 }
3343
3344 tree lhs = gimple_call_lhs (stmt);
3345 if (lhs
3346 && (!is_gimple_lvalue (lhs)
3347 || verify_types_in_gimple_reference (lhs, true)))
3348 {
3349 error ("invalid LHS in gimple call");
3350 return true;
3351 }
3352
3353 if (gimple_call_ctrl_altering_p (stmt)
3354 && gimple_call_noreturn_p (stmt)
3355 && should_remove_lhs_p (lhs))
3356 {
3357 error ("LHS in %<noreturn%> call");
3358 return true;
3359 }
3360
3361 fntype = gimple_call_fntype (stmt);
3362 if (fntype
3363 && lhs
3364 && !useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (fntype))
3365 /* ??? At least C++ misses conversions at assignments from
3366 void * call results.
3367 For now simply allow arbitrary pointer type conversions. */
3368 && !(POINTER_TYPE_P (TREE_TYPE (lhs))
3369 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3370 {
3371 error ("invalid conversion in gimple call");
3372 debug_generic_stmt (TREE_TYPE (lhs));
3373 debug_generic_stmt (TREE_TYPE (fntype));
3374 return true;
3375 }
3376
3377 if (gimple_call_chain (stmt)
3378 && !is_gimple_val (gimple_call_chain (stmt)))
3379 {
3380 error ("invalid static chain in gimple call");
3381 debug_generic_stmt (gimple_call_chain (stmt));
3382 return true;
3383 }
3384
3385 /* If there is a static chain argument, the call should either be
3386 indirect, or the decl should have DECL_STATIC_CHAIN set. */
3387 if (gimple_call_chain (stmt)
3388 && fndecl
3389 && !DECL_STATIC_CHAIN (fndecl))
3390 {
3391 error ("static chain with function that doesn%'t use one");
3392 return true;
3393 }
3394
3395 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
3396 {
3397 switch (DECL_FUNCTION_CODE (fndecl))
3398 {
3399 case BUILT_IN_UNREACHABLE:
3400 case BUILT_IN_TRAP:
3401 if (gimple_call_num_args (stmt) > 0)
3402 {
3403 /* Built-in unreachable with parameters might not be caught by
3404 undefined behavior sanitizer. Front-ends do check users do not
3405 call them that way but we also produce calls to
3406 __builtin_unreachable internally, for example when IPA figures
3407 out a call cannot happen in a legal program. In such cases,
3408 we must make sure arguments are stripped off. */
3409 error ("%<__builtin_unreachable%> or %<__builtin_trap%> call "
3410 "with arguments");
3411 return true;
3412 }
3413 break;
3414 default:
3415 break;
3416 }
3417 }
3418
3419 /* ??? The C frontend passes unpromoted arguments in case it
3420 didn't see a function declaration before the call. So for now
3421 leave the call arguments mostly unverified. Once we gimplify
3422 unit-at-a-time we have a chance to fix this. */
3423
3424 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3425 {
3426 tree arg = gimple_call_arg (stmt, i);
3427 if ((is_gimple_reg_type (TREE_TYPE (arg))
3428 && !is_gimple_val (arg))
3429 || (!is_gimple_reg_type (TREE_TYPE (arg))
3430 && !is_gimple_lvalue (arg)))
3431 {
3432 error ("invalid argument to gimple call");
3433 debug_generic_expr (arg);
3434 return true;
3435 }
3436 }
3437
3438 return false;
3439 }
3440
3441 /* Verifies the gimple comparison with the result type TYPE and
3442 the operands OP0 and OP1, comparison code is CODE. */
3443
3444 static bool
3445 verify_gimple_comparison (tree type, tree op0, tree op1, enum tree_code code)
3446 {
3447 tree op0_type = TREE_TYPE (op0);
3448 tree op1_type = TREE_TYPE (op1);
3449
3450 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3451 {
3452 error ("invalid operands in gimple comparison");
3453 return true;
3454 }
3455
3456 /* For comparisons we do not have the operations type as the
3457 effective type the comparison is carried out in. Instead
3458 we require that either the first operand is trivially
3459 convertible into the second, or the other way around.
3460 Because we special-case pointers to void we allow
3461 comparisons of pointers with the same mode as well. */
3462 if (!useless_type_conversion_p (op0_type, op1_type)
3463 && !useless_type_conversion_p (op1_type, op0_type)
3464 && (!POINTER_TYPE_P (op0_type)
3465 || !POINTER_TYPE_P (op1_type)
3466 || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3467 {
3468 error ("mismatching comparison operand types");
3469 debug_generic_expr (op0_type);
3470 debug_generic_expr (op1_type);
3471 return true;
3472 }
3473
3474 /* The resulting type of a comparison may be an effective boolean type. */
3475 if (INTEGRAL_TYPE_P (type)
3476 && (TREE_CODE (type) == BOOLEAN_TYPE
3477 || TYPE_PRECISION (type) == 1))
3478 {
3479 if ((TREE_CODE (op0_type) == VECTOR_TYPE
3480 || TREE_CODE (op1_type) == VECTOR_TYPE)
3481 && code != EQ_EXPR && code != NE_EXPR
3482 && !VECTOR_BOOLEAN_TYPE_P (op0_type)
3483 && !VECTOR_INTEGER_TYPE_P (op0_type))
3484 {
3485 error ("unsupported operation or type for vector comparison"
3486 " returning a boolean");
3487 debug_generic_expr (op0_type);
3488 debug_generic_expr (op1_type);
3489 return true;
3490 }
3491 }
3492 /* Or a boolean vector type with the same element count
3493 as the comparison operand types. */
3494 else if (TREE_CODE (type) == VECTOR_TYPE
3495 && TREE_CODE (TREE_TYPE (type)) == BOOLEAN_TYPE)
3496 {
3497 if (TREE_CODE (op0_type) != VECTOR_TYPE
3498 || TREE_CODE (op1_type) != VECTOR_TYPE)
3499 {
3500 error ("non-vector operands in vector comparison");
3501 debug_generic_expr (op0_type);
3502 debug_generic_expr (op1_type);
3503 return true;
3504 }
3505
3506 if (maybe_ne (TYPE_VECTOR_SUBPARTS (type),
3507 TYPE_VECTOR_SUBPARTS (op0_type)))
3508 {
3509 error ("invalid vector comparison resulting type");
3510 debug_generic_expr (type);
3511 return true;
3512 }
3513 }
3514 else
3515 {
3516 error ("bogus comparison result type");
3517 debug_generic_expr (type);
3518 return true;
3519 }
3520
3521 return false;
3522 }
3523
3524 /* Verify a gimple assignment statement STMT with an unary rhs.
3525 Returns true if anything is wrong. */
3526
3527 static bool
3528 verify_gimple_assign_unary (gassign *stmt)
3529 {
3530 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3531 tree lhs = gimple_assign_lhs (stmt);
3532 tree lhs_type = TREE_TYPE (lhs);
3533 tree rhs1 = gimple_assign_rhs1 (stmt);
3534 tree rhs1_type = TREE_TYPE (rhs1);
3535
3536 if (!is_gimple_reg (lhs))
3537 {
3538 error ("non-register as LHS of unary operation");
3539 return true;
3540 }
3541
3542 if (!is_gimple_val (rhs1))
3543 {
3544 error ("invalid operand in unary operation");
3545 return true;
3546 }
3547
3548 const char* const code_name = get_tree_code_name (rhs_code);
3549
3550 /* First handle conversions. */
3551 switch (rhs_code)
3552 {
3553 CASE_CONVERT:
3554 {
3555 /* Allow conversions from pointer type to integral type only if
3556 there is no sign or zero extension involved.
3557 For targets were the precision of ptrofftype doesn't match that
3558 of pointers we need to allow arbitrary conversions to ptrofftype. */
3559 if ((POINTER_TYPE_P (lhs_type)
3560 && INTEGRAL_TYPE_P (rhs1_type))
3561 || (POINTER_TYPE_P (rhs1_type)
3562 && INTEGRAL_TYPE_P (lhs_type)
3563 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3564 || ptrofftype_p (lhs_type))))
3565 return false;
3566
3567 /* Allow conversion from integral to offset type and vice versa. */
3568 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3569 && INTEGRAL_TYPE_P (rhs1_type))
3570 || (INTEGRAL_TYPE_P (lhs_type)
3571 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3572 return false;
3573
3574 /* Otherwise assert we are converting between types of the
3575 same kind. */
3576 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3577 {
3578 error ("invalid types in nop conversion");
3579 debug_generic_expr (lhs_type);
3580 debug_generic_expr (rhs1_type);
3581 return true;
3582 }
3583
3584 return false;
3585 }
3586
3587 case ADDR_SPACE_CONVERT_EXPR:
3588 {
3589 if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3590 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3591 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3592 {
3593 error ("invalid types in address space conversion");
3594 debug_generic_expr (lhs_type);
3595 debug_generic_expr (rhs1_type);
3596 return true;
3597 }
3598
3599 return false;
3600 }
3601
3602 case FIXED_CONVERT_EXPR:
3603 {
3604 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3605 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3606 {
3607 error ("invalid types in fixed-point conversion");
3608 debug_generic_expr (lhs_type);
3609 debug_generic_expr (rhs1_type);
3610 return true;
3611 }
3612
3613 return false;
3614 }
3615
3616 case FLOAT_EXPR:
3617 {
3618 if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3619 && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3620 || !VECTOR_FLOAT_TYPE_P (lhs_type)))
3621 {
3622 error ("invalid types in conversion to floating-point");
3623 debug_generic_expr (lhs_type);
3624 debug_generic_expr (rhs1_type);
3625 return true;
3626 }
3627
3628 return false;
3629 }
3630
3631 case FIX_TRUNC_EXPR:
3632 {
3633 if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3634 && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3635 || !VECTOR_FLOAT_TYPE_P (rhs1_type)))
3636 {
3637 error ("invalid types in conversion to integer");
3638 debug_generic_expr (lhs_type);
3639 debug_generic_expr (rhs1_type);
3640 return true;
3641 }
3642
3643 return false;
3644 }
3645
3646 case VEC_UNPACK_HI_EXPR:
3647 case VEC_UNPACK_LO_EXPR:
3648 case VEC_UNPACK_FLOAT_HI_EXPR:
3649 case VEC_UNPACK_FLOAT_LO_EXPR:
3650 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
3651 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
3652 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3653 || TREE_CODE (lhs_type) != VECTOR_TYPE
3654 || (!INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3655 && !SCALAR_FLOAT_TYPE_P (TREE_TYPE (lhs_type)))
3656 || (!INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3657 && !SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type)))
3658 || ((rhs_code == VEC_UNPACK_HI_EXPR
3659 || rhs_code == VEC_UNPACK_LO_EXPR)
3660 && (INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3661 != INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3662 || ((rhs_code == VEC_UNPACK_FLOAT_HI_EXPR
3663 || rhs_code == VEC_UNPACK_FLOAT_LO_EXPR)
3664 && (INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3665 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type))))
3666 || ((rhs_code == VEC_UNPACK_FIX_TRUNC_HI_EXPR
3667 || rhs_code == VEC_UNPACK_FIX_TRUNC_LO_EXPR)
3668 && (INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3669 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (lhs_type))))
3670 || (maybe_ne (GET_MODE_SIZE (element_mode (lhs_type)),
3671 2 * GET_MODE_SIZE (element_mode (rhs1_type)))
3672 && (!VECTOR_BOOLEAN_TYPE_P (lhs_type)
3673 || !VECTOR_BOOLEAN_TYPE_P (rhs1_type)))
3674 || maybe_ne (2 * TYPE_VECTOR_SUBPARTS (lhs_type),
3675 TYPE_VECTOR_SUBPARTS (rhs1_type)))
3676 {
3677 error ("type mismatch in %qs expression", code_name);
3678 debug_generic_expr (lhs_type);
3679 debug_generic_expr (rhs1_type);
3680 return true;
3681 }
3682
3683 return false;
3684
3685 case NEGATE_EXPR:
3686 case ABS_EXPR:
3687 case BIT_NOT_EXPR:
3688 case PAREN_EXPR:
3689 case CONJ_EXPR:
3690 break;
3691
3692 case ABSU_EXPR:
3693 if (!ANY_INTEGRAL_TYPE_P (lhs_type)
3694 || !TYPE_UNSIGNED (lhs_type)
3695 || !ANY_INTEGRAL_TYPE_P (rhs1_type)
3696 || TYPE_UNSIGNED (rhs1_type)
3697 || element_precision (lhs_type) != element_precision (rhs1_type))
3698 {
3699 error ("invalid types for %qs", code_name);
3700 debug_generic_expr (lhs_type);
3701 debug_generic_expr (rhs1_type);
3702 return true;
3703 }
3704 return false;
3705
3706 case VEC_DUPLICATE_EXPR:
3707 if (TREE_CODE (lhs_type) != VECTOR_TYPE
3708 || !useless_type_conversion_p (TREE_TYPE (lhs_type), rhs1_type))
3709 {
3710 error ("%qs should be from a scalar to a like vector", code_name);
3711 debug_generic_expr (lhs_type);
3712 debug_generic_expr (rhs1_type);
3713 return true;
3714 }
3715 return false;
3716
3717 default:
3718 gcc_unreachable ();
3719 }
3720
3721 /* For the remaining codes assert there is no conversion involved. */
3722 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3723 {
3724 error ("non-trivial conversion in unary operation");
3725 debug_generic_expr (lhs_type);
3726 debug_generic_expr (rhs1_type);
3727 return true;
3728 }
3729
3730 return false;
3731 }
3732
3733 /* Verify a gimple assignment statement STMT with a binary rhs.
3734 Returns true if anything is wrong. */
3735
3736 static bool
3737 verify_gimple_assign_binary (gassign *stmt)
3738 {
3739 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3740 tree lhs = gimple_assign_lhs (stmt);
3741 tree lhs_type = TREE_TYPE (lhs);
3742 tree rhs1 = gimple_assign_rhs1 (stmt);
3743 tree rhs1_type = TREE_TYPE (rhs1);
3744 tree rhs2 = gimple_assign_rhs2 (stmt);
3745 tree rhs2_type = TREE_TYPE (rhs2);
3746
3747 if (!is_gimple_reg (lhs))
3748 {
3749 error ("non-register as LHS of binary operation");
3750 return true;
3751 }
3752
3753 if (!is_gimple_val (rhs1)
3754 || !is_gimple_val (rhs2))
3755 {
3756 error ("invalid operands in binary operation");
3757 return true;
3758 }
3759
3760 const char* const code_name = get_tree_code_name (rhs_code);
3761
3762 /* First handle operations that involve different types. */
3763 switch (rhs_code)
3764 {
3765 case COMPLEX_EXPR:
3766 {
3767 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3768 || !(INTEGRAL_TYPE_P (rhs1_type)
3769 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3770 || !(INTEGRAL_TYPE_P (rhs2_type)
3771 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3772 {
3773 error ("type mismatch in %qs", code_name);
3774 debug_generic_expr (lhs_type);
3775 debug_generic_expr (rhs1_type);
3776 debug_generic_expr (rhs2_type);
3777 return true;
3778 }
3779
3780 return false;
3781 }
3782
3783 case LSHIFT_EXPR:
3784 case RSHIFT_EXPR:
3785 case LROTATE_EXPR:
3786 case RROTATE_EXPR:
3787 {
3788 /* Shifts and rotates are ok on integral types, fixed point
3789 types and integer vector types. */
3790 if ((!INTEGRAL_TYPE_P (rhs1_type)
3791 && !FIXED_POINT_TYPE_P (rhs1_type)
3792 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3793 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3794 || (!INTEGRAL_TYPE_P (rhs2_type)
3795 /* Vector shifts of vectors are also ok. */
3796 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3797 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3798 && TREE_CODE (rhs2_type) == VECTOR_TYPE
3799 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3800 || !useless_type_conversion_p (lhs_type, rhs1_type))
3801 {
3802 error ("type mismatch in %qs", code_name);
3803 debug_generic_expr (lhs_type);
3804 debug_generic_expr (rhs1_type);
3805 debug_generic_expr (rhs2_type);
3806 return true;
3807 }
3808
3809 return false;
3810 }
3811
3812 case WIDEN_LSHIFT_EXPR:
3813 {
3814 if (!INTEGRAL_TYPE_P (lhs_type)
3815 || !INTEGRAL_TYPE_P (rhs1_type)
3816 || TREE_CODE (rhs2) != INTEGER_CST
3817 || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
3818 {
3819 error ("type mismatch in %qs", code_name);
3820 debug_generic_expr (lhs_type);
3821 debug_generic_expr (rhs1_type);
3822 debug_generic_expr (rhs2_type);
3823 return true;
3824 }
3825
3826 return false;
3827 }
3828
3829 case VEC_WIDEN_LSHIFT_HI_EXPR:
3830 case VEC_WIDEN_LSHIFT_LO_EXPR:
3831 {
3832 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3833 || TREE_CODE (lhs_type) != VECTOR_TYPE
3834 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3835 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3836 || TREE_CODE (rhs2) != INTEGER_CST
3837 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
3838 > TYPE_PRECISION (TREE_TYPE (lhs_type))))
3839 {
3840 error ("type mismatch in %qs", code_name);
3841 debug_generic_expr (lhs_type);
3842 debug_generic_expr (rhs1_type);
3843 debug_generic_expr (rhs2_type);
3844 return true;
3845 }
3846
3847 return false;
3848 }
3849
3850 case PLUS_EXPR:
3851 case MINUS_EXPR:
3852 {
3853 tree lhs_etype = lhs_type;
3854 tree rhs1_etype = rhs1_type;
3855 tree rhs2_etype = rhs2_type;
3856 if (TREE_CODE (lhs_type) == VECTOR_TYPE)
3857 {
3858 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3859 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
3860 {
3861 error ("invalid non-vector operands to %qs", code_name);
3862 return true;
3863 }
3864 lhs_etype = TREE_TYPE (lhs_type);
3865 rhs1_etype = TREE_TYPE (rhs1_type);
3866 rhs2_etype = TREE_TYPE (rhs2_type);
3867 }
3868 if (POINTER_TYPE_P (lhs_etype)
3869 || POINTER_TYPE_P (rhs1_etype)
3870 || POINTER_TYPE_P (rhs2_etype))
3871 {
3872 error ("invalid (pointer) operands %qs", code_name);
3873 return true;
3874 }
3875
3876 /* Continue with generic binary expression handling. */
3877 break;
3878 }
3879
3880 case POINTER_PLUS_EXPR:
3881 {
3882 if (!POINTER_TYPE_P (rhs1_type)
3883 || !useless_type_conversion_p (lhs_type, rhs1_type)
3884 || !ptrofftype_p (rhs2_type))
3885 {
3886 error ("type mismatch in %qs", code_name);
3887 debug_generic_stmt (lhs_type);
3888 debug_generic_stmt (rhs1_type);
3889 debug_generic_stmt (rhs2_type);
3890 return true;
3891 }
3892
3893 return false;
3894 }
3895
3896 case POINTER_DIFF_EXPR:
3897 {
3898 if (!POINTER_TYPE_P (rhs1_type)
3899 || !POINTER_TYPE_P (rhs2_type)
3900 /* Because we special-case pointers to void we allow difference
3901 of arbitrary pointers with the same mode. */
3902 || TYPE_MODE (rhs1_type) != TYPE_MODE (rhs2_type)
3903 || TREE_CODE (lhs_type) != INTEGER_TYPE
3904 || TYPE_UNSIGNED (lhs_type)
3905 || TYPE_PRECISION (lhs_type) != TYPE_PRECISION (rhs1_type))
3906 {
3907 error ("type mismatch in %qs", code_name);
3908 debug_generic_stmt (lhs_type);
3909 debug_generic_stmt (rhs1_type);
3910 debug_generic_stmt (rhs2_type);
3911 return true;
3912 }
3913
3914 return false;
3915 }
3916
3917 case TRUTH_ANDIF_EXPR:
3918 case TRUTH_ORIF_EXPR:
3919 case TRUTH_AND_EXPR:
3920 case TRUTH_OR_EXPR:
3921 case TRUTH_XOR_EXPR:
3922
3923 gcc_unreachable ();
3924
3925 case LT_EXPR:
3926 case LE_EXPR:
3927 case GT_EXPR:
3928 case GE_EXPR:
3929 case EQ_EXPR:
3930 case NE_EXPR:
3931 case UNORDERED_EXPR:
3932 case ORDERED_EXPR:
3933 case UNLT_EXPR:
3934 case UNLE_EXPR:
3935 case UNGT_EXPR:
3936 case UNGE_EXPR:
3937 case UNEQ_EXPR:
3938 case LTGT_EXPR:
3939 /* Comparisons are also binary, but the result type is not
3940 connected to the operand types. */
3941 return verify_gimple_comparison (lhs_type, rhs1, rhs2, rhs_code);
3942
3943 case WIDEN_MULT_EXPR:
3944 if (TREE_CODE (lhs_type) != INTEGER_TYPE)
3945 return true;
3946 return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
3947 || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
3948
3949 case WIDEN_SUM_EXPR:
3950 {
3951 if (((TREE_CODE (rhs1_type) != VECTOR_TYPE
3952 || TREE_CODE (lhs_type) != VECTOR_TYPE)
3953 && ((!INTEGRAL_TYPE_P (rhs1_type)
3954 && !SCALAR_FLOAT_TYPE_P (rhs1_type))
3955 || (!INTEGRAL_TYPE_P (lhs_type)
3956 && !SCALAR_FLOAT_TYPE_P (lhs_type))))
3957 || !useless_type_conversion_p (lhs_type, rhs2_type)
3958 || maybe_lt (GET_MODE_SIZE (element_mode (rhs2_type)),
3959 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
3960 {
3961 error ("type mismatch in %qs", code_name);
3962 debug_generic_expr (lhs_type);
3963 debug_generic_expr (rhs1_type);
3964 debug_generic_expr (rhs2_type);
3965 return true;
3966 }
3967 return false;
3968 }
3969
3970 case VEC_WIDEN_MULT_HI_EXPR:
3971 case VEC_WIDEN_MULT_LO_EXPR:
3972 case VEC_WIDEN_MULT_EVEN_EXPR:
3973 case VEC_WIDEN_MULT_ODD_EXPR:
3974 {
3975 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3976 || TREE_CODE (lhs_type) != VECTOR_TYPE
3977 || !types_compatible_p (rhs1_type, rhs2_type)
3978 || maybe_ne (GET_MODE_SIZE (element_mode (lhs_type)),
3979 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
3980 {
3981 error ("type mismatch in %qs", code_name);
3982 debug_generic_expr (lhs_type);
3983 debug_generic_expr (rhs1_type);
3984 debug_generic_expr (rhs2_type);
3985 return true;
3986 }
3987 return false;
3988 }
3989
3990 case VEC_PACK_TRUNC_EXPR:
3991 /* ??? We currently use VEC_PACK_TRUNC_EXPR to simply concat
3992 vector boolean types. */
3993 if (VECTOR_BOOLEAN_TYPE_P (lhs_type)
3994 && VECTOR_BOOLEAN_TYPE_P (rhs1_type)
3995 && types_compatible_p (rhs1_type, rhs2_type)
3996 && known_eq (TYPE_VECTOR_SUBPARTS (lhs_type),
3997 2 * TYPE_VECTOR_SUBPARTS (rhs1_type)))
3998 return false;
3999
4000 /* Fallthru. */
4001 case VEC_PACK_SAT_EXPR:
4002 case VEC_PACK_FIX_TRUNC_EXPR:
4003 {
4004 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4005 || TREE_CODE (lhs_type) != VECTOR_TYPE
4006 || !((rhs_code == VEC_PACK_FIX_TRUNC_EXPR
4007 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type))
4008 && INTEGRAL_TYPE_P (TREE_TYPE (lhs_type)))
4009 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
4010 == INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))))
4011 || !types_compatible_p (rhs1_type, rhs2_type)
4012 || maybe_ne (GET_MODE_SIZE (element_mode (rhs1_type)),
4013 2 * GET_MODE_SIZE (element_mode (lhs_type)))
4014 || maybe_ne (2 * TYPE_VECTOR_SUBPARTS (rhs1_type),
4015 TYPE_VECTOR_SUBPARTS (lhs_type)))
4016 {
4017 error ("type mismatch in %qs", code_name);
4018 debug_generic_expr (lhs_type);
4019 debug_generic_expr (rhs1_type);
4020 debug_generic_expr (rhs2_type);
4021 return true;
4022 }
4023
4024 return false;
4025 }
4026
4027 case VEC_PACK_FLOAT_EXPR:
4028 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4029 || TREE_CODE (lhs_type) != VECTOR_TYPE
4030 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
4031 || !SCALAR_FLOAT_TYPE_P (TREE_TYPE (lhs_type))
4032 || !types_compatible_p (rhs1_type, rhs2_type)
4033 || maybe_ne (GET_MODE_SIZE (element_mode (rhs1_type)),
4034 2 * GET_MODE_SIZE (element_mode (lhs_type)))
4035 || maybe_ne (2 * TYPE_VECTOR_SUBPARTS (rhs1_type),
4036 TYPE_VECTOR_SUBPARTS (lhs_type)))
4037 {
4038 error ("type mismatch in %qs", code_name);
4039 debug_generic_expr (lhs_type);
4040 debug_generic_expr (rhs1_type);
4041 debug_generic_expr (rhs2_type);
4042 return true;
4043 }
4044
4045 return false;
4046
4047 case MULT_EXPR:
4048 case MULT_HIGHPART_EXPR:
4049 case TRUNC_DIV_EXPR:
4050 case CEIL_DIV_EXPR:
4051 case FLOOR_DIV_EXPR:
4052 case ROUND_DIV_EXPR:
4053 case TRUNC_MOD_EXPR:
4054 case CEIL_MOD_EXPR:
4055 case FLOOR_MOD_EXPR:
4056 case ROUND_MOD_EXPR:
4057 case RDIV_EXPR:
4058 case EXACT_DIV_EXPR:
4059 case MIN_EXPR:
4060 case MAX_EXPR:
4061 case BIT_IOR_EXPR:
4062 case BIT_XOR_EXPR:
4063 case BIT_AND_EXPR:
4064 /* Continue with generic binary expression handling. */
4065 break;
4066
4067 case VEC_SERIES_EXPR:
4068 if (!useless_type_conversion_p (rhs1_type, rhs2_type))
4069 {
4070 error ("type mismatch in %qs", code_name);
4071 debug_generic_expr (rhs1_type);
4072 debug_generic_expr (rhs2_type);
4073 return true;
4074 }
4075 if (TREE_CODE (lhs_type) != VECTOR_TYPE
4076 || !useless_type_conversion_p (TREE_TYPE (lhs_type), rhs1_type))
4077 {
4078 error ("vector type expected in %qs", code_name);
4079 debug_generic_expr (lhs_type);
4080 return true;
4081 }
4082 return false;
4083
4084 default:
4085 gcc_unreachable ();
4086 }
4087
4088 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4089 || !useless_type_conversion_p (lhs_type, rhs2_type))
4090 {
4091 error ("type mismatch in binary expression");
4092 debug_generic_stmt (lhs_type);
4093 debug_generic_stmt (rhs1_type);
4094 debug_generic_stmt (rhs2_type);
4095 return true;
4096 }
4097
4098 return false;
4099 }
4100
4101 /* Verify a gimple assignment statement STMT with a ternary rhs.
4102 Returns true if anything is wrong. */
4103
4104 static bool
4105 verify_gimple_assign_ternary (gassign *stmt)
4106 {
4107 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4108 tree lhs = gimple_assign_lhs (stmt);
4109 tree lhs_type = TREE_TYPE (lhs);
4110 tree rhs1 = gimple_assign_rhs1 (stmt);
4111 tree rhs1_type = TREE_TYPE (rhs1);
4112 tree rhs2 = gimple_assign_rhs2 (stmt);
4113 tree rhs2_type = TREE_TYPE (rhs2);
4114 tree rhs3 = gimple_assign_rhs3 (stmt);
4115 tree rhs3_type = TREE_TYPE (rhs3);
4116
4117 if (!is_gimple_reg (lhs))
4118 {
4119 error ("non-register as LHS of ternary operation");
4120 return true;
4121 }
4122
4123 if (((rhs_code == VEC_COND_EXPR || rhs_code == COND_EXPR)
4124 ? !is_gimple_condexpr (rhs1) : !is_gimple_val (rhs1))
4125 || !is_gimple_val (rhs2)
4126 || !is_gimple_val (rhs3))
4127 {
4128 error ("invalid operands in ternary operation");
4129 return true;
4130 }
4131
4132 const char* const code_name = get_tree_code_name (rhs_code);
4133
4134 /* First handle operations that involve different types. */
4135 switch (rhs_code)
4136 {
4137 case WIDEN_MULT_PLUS_EXPR:
4138 case WIDEN_MULT_MINUS_EXPR:
4139 if ((!INTEGRAL_TYPE_P (rhs1_type)
4140 && !FIXED_POINT_TYPE_P (rhs1_type))
4141 || !useless_type_conversion_p (rhs1_type, rhs2_type)
4142 || !useless_type_conversion_p (lhs_type, rhs3_type)
4143 || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
4144 || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
4145 {
4146 error ("type mismatch in %qs", code_name);
4147 debug_generic_expr (lhs_type);
4148 debug_generic_expr (rhs1_type);
4149 debug_generic_expr (rhs2_type);
4150 debug_generic_expr (rhs3_type);
4151 return true;
4152 }
4153 break;
4154
4155 case VEC_COND_EXPR:
4156 if (!VECTOR_BOOLEAN_TYPE_P (rhs1_type)
4157 || maybe_ne (TYPE_VECTOR_SUBPARTS (rhs1_type),
4158 TYPE_VECTOR_SUBPARTS (lhs_type)))
4159 {
4160 error ("the first argument of a %qs must be of a "
4161 "boolean vector type of the same number of elements "
4162 "as the result", code_name);
4163 debug_generic_expr (lhs_type);
4164 debug_generic_expr (rhs1_type);
4165 return true;
4166 }
4167 /* Fallthrough. */
4168 case COND_EXPR:
4169 if (!is_gimple_val (rhs1)
4170 && verify_gimple_comparison (TREE_TYPE (rhs1),
4171 TREE_OPERAND (rhs1, 0),
4172 TREE_OPERAND (rhs1, 1),
4173 TREE_CODE (rhs1)))
4174 return true;
4175 if (!useless_type_conversion_p (lhs_type, rhs2_type)
4176 || !useless_type_conversion_p (lhs_type, rhs3_type))
4177 {
4178 error ("type mismatch in %qs", code_name);
4179 debug_generic_expr (lhs_type);
4180 debug_generic_expr (rhs2_type);
4181 debug_generic_expr (rhs3_type);
4182 return true;
4183 }
4184 break;
4185
4186 case VEC_PERM_EXPR:
4187 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4188 || !useless_type_conversion_p (lhs_type, rhs2_type))
4189 {
4190 error ("type mismatch in %qs", code_name);
4191 debug_generic_expr (lhs_type);
4192 debug_generic_expr (rhs1_type);
4193 debug_generic_expr (rhs2_type);
4194 debug_generic_expr (rhs3_type);
4195 return true;
4196 }
4197
4198 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4199 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4200 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4201 {
4202 error ("vector types expected in %qs", code_name);
4203 debug_generic_expr (lhs_type);
4204 debug_generic_expr (rhs1_type);
4205 debug_generic_expr (rhs2_type);
4206 debug_generic_expr (rhs3_type);
4207 return true;
4208 }
4209
4210 if (maybe_ne (TYPE_VECTOR_SUBPARTS (rhs1_type),
4211 TYPE_VECTOR_SUBPARTS (rhs2_type))
4212 || maybe_ne (TYPE_VECTOR_SUBPARTS (rhs2_type),
4213 TYPE_VECTOR_SUBPARTS (rhs3_type))
4214 || maybe_ne (TYPE_VECTOR_SUBPARTS (rhs3_type),
4215 TYPE_VECTOR_SUBPARTS (lhs_type)))
4216 {
4217 error ("vectors with different element number found in %qs",
4218 code_name);
4219 debug_generic_expr (lhs_type);
4220 debug_generic_expr (rhs1_type);
4221 debug_generic_expr (rhs2_type);
4222 debug_generic_expr (rhs3_type);
4223 return true;
4224 }
4225
4226 if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
4227 || (TREE_CODE (rhs3) != VECTOR_CST
4228 && (GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE
4229 (TREE_TYPE (rhs3_type)))
4230 != GET_MODE_BITSIZE (SCALAR_TYPE_MODE
4231 (TREE_TYPE (rhs1_type))))))
4232 {
4233 error ("invalid mask type in %qs", code_name);
4234 debug_generic_expr (lhs_type);
4235 debug_generic_expr (rhs1_type);
4236 debug_generic_expr (rhs2_type);
4237 debug_generic_expr (rhs3_type);
4238 return true;
4239 }
4240
4241 return false;
4242
4243 case SAD_EXPR:
4244 if (!useless_type_conversion_p (rhs1_type, rhs2_type)
4245 || !useless_type_conversion_p (lhs_type, rhs3_type)
4246 || 2 * GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type)))
4247 > GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (lhs_type))))
4248 {
4249 error ("type mismatch in %qs", code_name);
4250 debug_generic_expr (lhs_type);
4251 debug_generic_expr (rhs1_type);
4252 debug_generic_expr (rhs2_type);
4253 debug_generic_expr (rhs3_type);
4254 return true;
4255 }
4256
4257 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4258 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4259 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4260 {
4261 error ("vector types expected in %qs", code_name);
4262 debug_generic_expr (lhs_type);
4263 debug_generic_expr (rhs1_type);
4264 debug_generic_expr (rhs2_type);
4265 debug_generic_expr (rhs3_type);
4266 return true;
4267 }
4268
4269 return false;
4270
4271 case BIT_INSERT_EXPR:
4272 if (! useless_type_conversion_p (lhs_type, rhs1_type))
4273 {
4274 error ("type mismatch in %qs", code_name);
4275 debug_generic_expr (lhs_type);
4276 debug_generic_expr (rhs1_type);
4277 return true;
4278 }
4279 if (! ((INTEGRAL_TYPE_P (rhs1_type)
4280 && INTEGRAL_TYPE_P (rhs2_type))
4281 /* Vector element insert. */
4282 || (VECTOR_TYPE_P (rhs1_type)
4283 && types_compatible_p (TREE_TYPE (rhs1_type), rhs2_type))
4284 /* Aligned sub-vector insert. */
4285 || (VECTOR_TYPE_P (rhs1_type)
4286 && VECTOR_TYPE_P (rhs2_type)
4287 && types_compatible_p (TREE_TYPE (rhs1_type),
4288 TREE_TYPE (rhs2_type))
4289 && multiple_p (TYPE_VECTOR_SUBPARTS (rhs1_type),
4290 TYPE_VECTOR_SUBPARTS (rhs2_type))
4291 && multiple_of_p (bitsizetype, rhs3, TYPE_SIZE (rhs2_type)))))
4292 {
4293 error ("not allowed type combination in %qs", code_name);
4294 debug_generic_expr (rhs1_type);
4295 debug_generic_expr (rhs2_type);
4296 return true;
4297 }
4298 if (! tree_fits_uhwi_p (rhs3)
4299 || ! types_compatible_p (bitsizetype, TREE_TYPE (rhs3))
4300 || ! tree_fits_uhwi_p (TYPE_SIZE (rhs2_type)))
4301 {
4302 error ("invalid position or size in %qs", code_name);
4303 return true;
4304 }
4305 if (INTEGRAL_TYPE_P (rhs1_type)
4306 && !type_has_mode_precision_p (rhs1_type))
4307 {
4308 error ("%qs into non-mode-precision operand", code_name);
4309 return true;
4310 }
4311 if (INTEGRAL_TYPE_P (rhs1_type))
4312 {
4313 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (rhs3);
4314 if (bitpos >= TYPE_PRECISION (rhs1_type)
4315 || (bitpos + TYPE_PRECISION (rhs2_type)
4316 > TYPE_PRECISION (rhs1_type)))
4317 {
4318 error ("insertion out of range in %qs", code_name);
4319 return true;
4320 }
4321 }
4322 else if (VECTOR_TYPE_P (rhs1_type))
4323 {
4324 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (rhs3);
4325 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (TYPE_SIZE (rhs2_type));
4326 if (bitpos % bitsize != 0)
4327 {
4328 error ("%qs not at element boundary", code_name);
4329 return true;
4330 }
4331 }
4332 return false;
4333
4334 case DOT_PROD_EXPR:
4335 {
4336 if (((TREE_CODE (rhs1_type) != VECTOR_TYPE
4337 || TREE_CODE (lhs_type) != VECTOR_TYPE)
4338 && ((!INTEGRAL_TYPE_P (rhs1_type)
4339 && !SCALAR_FLOAT_TYPE_P (rhs1_type))
4340 || (!INTEGRAL_TYPE_P (lhs_type)
4341 && !SCALAR_FLOAT_TYPE_P (lhs_type))))
4342 || !types_compatible_p (rhs1_type, rhs2_type)
4343 || !useless_type_conversion_p (lhs_type, rhs3_type)
4344 || maybe_lt (GET_MODE_SIZE (element_mode (rhs3_type)),
4345 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
4346 {
4347 error ("type mismatch in %qs", code_name);
4348 debug_generic_expr (lhs_type);
4349 debug_generic_expr (rhs1_type);
4350 debug_generic_expr (rhs2_type);
4351 return true;
4352 }
4353 return false;
4354 }
4355
4356 case REALIGN_LOAD_EXPR:
4357 /* FIXME. */
4358 return false;
4359
4360 default:
4361 gcc_unreachable ();
4362 }
4363 return false;
4364 }
4365
4366 /* Verify a gimple assignment statement STMT with a single rhs.
4367 Returns true if anything is wrong. */
4368
4369 static bool
4370 verify_gimple_assign_single (gassign *stmt)
4371 {
4372 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4373 tree lhs = gimple_assign_lhs (stmt);
4374 tree lhs_type = TREE_TYPE (lhs);
4375 tree rhs1 = gimple_assign_rhs1 (stmt);
4376 tree rhs1_type = TREE_TYPE (rhs1);
4377 bool res = false;
4378
4379 const char* const code_name = get_tree_code_name (rhs_code);
4380
4381 if (!useless_type_conversion_p (lhs_type, rhs1_type))
4382 {
4383 error ("non-trivial conversion in %qs", code_name);
4384 debug_generic_expr (lhs_type);
4385 debug_generic_expr (rhs1_type);
4386 return true;
4387 }
4388
4389 if (gimple_clobber_p (stmt)
4390 && !(DECL_P (lhs) || TREE_CODE (lhs) == MEM_REF))
4391 {
4392 error ("%qs LHS in clobber statement",
4393 get_tree_code_name (TREE_CODE (lhs)));
4394 debug_generic_expr (lhs);
4395 return true;
4396 }
4397
4398 if (handled_component_p (lhs)
4399 || TREE_CODE (lhs) == MEM_REF
4400 || TREE_CODE (lhs) == TARGET_MEM_REF)
4401 res |= verify_types_in_gimple_reference (lhs, true);
4402
4403 /* Special codes we cannot handle via their class. */
4404 switch (rhs_code)
4405 {
4406 case ADDR_EXPR:
4407 {
4408 tree op = TREE_OPERAND (rhs1, 0);
4409 if (!is_gimple_addressable (op))
4410 {
4411 error ("invalid operand in %qs", code_name);
4412 return true;
4413 }
4414
4415 /* Technically there is no longer a need for matching types, but
4416 gimple hygiene asks for this check. In LTO we can end up
4417 combining incompatible units and thus end up with addresses
4418 of globals that change their type to a common one. */
4419 if (!in_lto_p
4420 && !types_compatible_p (TREE_TYPE (op),
4421 TREE_TYPE (TREE_TYPE (rhs1)))
4422 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
4423 TREE_TYPE (op)))
4424 {
4425 error ("type mismatch in %qs", code_name);
4426 debug_generic_stmt (TREE_TYPE (rhs1));
4427 debug_generic_stmt (TREE_TYPE (op));
4428 return true;
4429 }
4430
4431 return (verify_address (rhs1, true)
4432 || verify_types_in_gimple_reference (op, true));
4433 }
4434
4435 /* tcc_reference */
4436 case INDIRECT_REF:
4437 error ("%qs in gimple IL", code_name);
4438 return true;
4439
4440 case COMPONENT_REF:
4441 case BIT_FIELD_REF:
4442 case ARRAY_REF:
4443 case ARRAY_RANGE_REF:
4444 case VIEW_CONVERT_EXPR:
4445 case REALPART_EXPR:
4446 case IMAGPART_EXPR:
4447 case TARGET_MEM_REF:
4448 case MEM_REF:
4449 if (!is_gimple_reg (lhs)
4450 && is_gimple_reg_type (TREE_TYPE (lhs)))
4451 {
4452 error ("invalid RHS for gimple memory store: %qs", code_name);
4453 debug_generic_stmt (lhs);
4454 debug_generic_stmt (rhs1);
4455 return true;
4456 }
4457 return res || verify_types_in_gimple_reference (rhs1, false);
4458
4459 /* tcc_constant */
4460 case SSA_NAME:
4461 case INTEGER_CST:
4462 case REAL_CST:
4463 case FIXED_CST:
4464 case COMPLEX_CST:
4465 case VECTOR_CST:
4466 case STRING_CST:
4467 return res;
4468
4469 /* tcc_declaration */
4470 case CONST_DECL:
4471 return res;
4472 case VAR_DECL:
4473 case PARM_DECL:
4474 if (!is_gimple_reg (lhs)
4475 && !is_gimple_reg (rhs1)
4476 && is_gimple_reg_type (TREE_TYPE (lhs)))
4477 {
4478 error ("invalid RHS for gimple memory store: %qs", code_name);
4479 debug_generic_stmt (lhs);
4480 debug_generic_stmt (rhs1);
4481 return true;
4482 }
4483 return res;
4484
4485 case CONSTRUCTOR:
4486 if (TREE_CODE (rhs1_type) == VECTOR_TYPE)
4487 {
4488 unsigned int i;
4489 tree elt_i, elt_v, elt_t = NULL_TREE;
4490
4491 if (CONSTRUCTOR_NELTS (rhs1) == 0)
4492 return res;
4493 /* For vector CONSTRUCTORs we require that either it is empty
4494 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
4495 (then the element count must be correct to cover the whole
4496 outer vector and index must be NULL on all elements, or it is
4497 a CONSTRUCTOR of scalar elements, where we as an exception allow
4498 smaller number of elements (assuming zero filling) and
4499 consecutive indexes as compared to NULL indexes (such
4500 CONSTRUCTORs can appear in the IL from FEs). */
4501 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), i, elt_i, elt_v)
4502 {
4503 if (elt_t == NULL_TREE)
4504 {
4505 elt_t = TREE_TYPE (elt_v);
4506 if (TREE_CODE (elt_t) == VECTOR_TYPE)
4507 {
4508 tree elt_t = TREE_TYPE (elt_v);
4509 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4510 TREE_TYPE (elt_t)))
4511 {
4512 error ("incorrect type of vector %qs elements",
4513 code_name);
4514 debug_generic_stmt (rhs1);
4515 return true;
4516 }
4517 else if (maybe_ne (CONSTRUCTOR_NELTS (rhs1)
4518 * TYPE_VECTOR_SUBPARTS (elt_t),
4519 TYPE_VECTOR_SUBPARTS (rhs1_type)))
4520 {
4521 error ("incorrect number of vector %qs elements",
4522 code_name);
4523 debug_generic_stmt (rhs1);
4524 return true;
4525 }
4526 }
4527 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4528 elt_t))
4529 {
4530 error ("incorrect type of vector %qs elements",
4531 code_name);
4532 debug_generic_stmt (rhs1);
4533 return true;
4534 }
4535 else if (maybe_gt (CONSTRUCTOR_NELTS (rhs1),
4536 TYPE_VECTOR_SUBPARTS (rhs1_type)))
4537 {
4538 error ("incorrect number of vector %qs elements",
4539 code_name);
4540 debug_generic_stmt (rhs1);
4541 return true;
4542 }
4543 }
4544 else if (!useless_type_conversion_p (elt_t, TREE_TYPE (elt_v)))
4545 {
4546 error ("incorrect type of vector CONSTRUCTOR elements");
4547 debug_generic_stmt (rhs1);
4548 return true;
4549 }
4550 if (elt_i != NULL_TREE
4551 && (TREE_CODE (elt_t) == VECTOR_TYPE
4552 || TREE_CODE (elt_i) != INTEGER_CST
4553 || compare_tree_int (elt_i, i) != 0))
4554 {
4555 error ("vector %qs with non-NULL element index",
4556 code_name);
4557 debug_generic_stmt (rhs1);
4558 return true;
4559 }
4560 if (!is_gimple_val (elt_v))
4561 {
4562 error ("vector %qs element is not a GIMPLE value",
4563 code_name);
4564 debug_generic_stmt (rhs1);
4565 return true;
4566 }
4567 }
4568 }
4569 else if (CONSTRUCTOR_NELTS (rhs1) != 0)
4570 {
4571 error ("non-vector %qs with elements", code_name);
4572 debug_generic_stmt (rhs1);
4573 return true;
4574 }
4575 return res;
4576
4577 case ASSERT_EXPR:
4578 /* FIXME. */
4579 rhs1 = fold (ASSERT_EXPR_COND (rhs1));
4580 if (rhs1 == boolean_false_node)
4581 {
4582 error ("%qs with an always-false condition", code_name);
4583 debug_generic_stmt (rhs1);
4584 return true;
4585 }
4586 break;
4587
4588 case OBJ_TYPE_REF:
4589 case WITH_SIZE_EXPR:
4590 /* FIXME. */
4591 return res;
4592
4593 default:;
4594 }
4595
4596 return res;
4597 }
4598
4599 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4600 is a problem, otherwise false. */
4601
4602 static bool
4603 verify_gimple_assign (gassign *stmt)
4604 {
4605 switch (gimple_assign_rhs_class (stmt))
4606 {
4607 case GIMPLE_SINGLE_RHS:
4608 return verify_gimple_assign_single (stmt);
4609
4610 case GIMPLE_UNARY_RHS:
4611 return verify_gimple_assign_unary (stmt);
4612
4613 case GIMPLE_BINARY_RHS:
4614 return verify_gimple_assign_binary (stmt);
4615
4616 case GIMPLE_TERNARY_RHS:
4617 return verify_gimple_assign_ternary (stmt);
4618
4619 default:
4620 gcc_unreachable ();
4621 }
4622 }
4623
4624 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4625 is a problem, otherwise false. */
4626
4627 static bool
4628 verify_gimple_return (greturn *stmt)
4629 {
4630 tree op = gimple_return_retval (stmt);
4631 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
4632
4633 /* We cannot test for present return values as we do not fix up missing
4634 return values from the original source. */
4635 if (op == NULL)
4636 return false;
4637
4638 if (!is_gimple_val (op)
4639 && TREE_CODE (op) != RESULT_DECL)
4640 {
4641 error ("invalid operand in return statement");
4642 debug_generic_stmt (op);
4643 return true;
4644 }
4645
4646 if ((TREE_CODE (op) == RESULT_DECL
4647 && DECL_BY_REFERENCE (op))
4648 || (TREE_CODE (op) == SSA_NAME
4649 && SSA_NAME_VAR (op)
4650 && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
4651 && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
4652 op = TREE_TYPE (op);
4653
4654 if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
4655 {
4656 error ("invalid conversion in return statement");
4657 debug_generic_stmt (restype);
4658 debug_generic_stmt (TREE_TYPE (op));
4659 return true;
4660 }
4661
4662 return false;
4663 }
4664
4665
4666 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4667 is a problem, otherwise false. */
4668
4669 static bool
4670 verify_gimple_goto (ggoto *stmt)
4671 {
4672 tree dest = gimple_goto_dest (stmt);
4673
4674 /* ??? We have two canonical forms of direct goto destinations, a
4675 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4676 if (TREE_CODE (dest) != LABEL_DECL
4677 && (!is_gimple_val (dest)
4678 || !POINTER_TYPE_P (TREE_TYPE (dest))))
4679 {
4680 error ("goto destination is neither a label nor a pointer");
4681 return true;
4682 }
4683
4684 return false;
4685 }
4686
4687 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4688 is a problem, otherwise false. */
4689
4690 static bool
4691 verify_gimple_switch (gswitch *stmt)
4692 {
4693 unsigned int i, n;
4694 tree elt, prev_upper_bound = NULL_TREE;
4695 tree index_type, elt_type = NULL_TREE;
4696
4697 if (!is_gimple_val (gimple_switch_index (stmt)))
4698 {
4699 error ("invalid operand to switch statement");
4700 debug_generic_stmt (gimple_switch_index (stmt));
4701 return true;
4702 }
4703
4704 index_type = TREE_TYPE (gimple_switch_index (stmt));
4705 if (! INTEGRAL_TYPE_P (index_type))
4706 {
4707 error ("non-integral type switch statement");
4708 debug_generic_expr (index_type);
4709 return true;
4710 }
4711
4712 elt = gimple_switch_label (stmt, 0);
4713 if (CASE_LOW (elt) != NULL_TREE
4714 || CASE_HIGH (elt) != NULL_TREE
4715 || CASE_CHAIN (elt) != NULL_TREE)
4716 {
4717 error ("invalid default case label in switch statement");
4718 debug_generic_expr (elt);
4719 return true;
4720 }
4721
4722 n = gimple_switch_num_labels (stmt);
4723 for (i = 1; i < n; i++)
4724 {
4725 elt = gimple_switch_label (stmt, i);
4726
4727 if (CASE_CHAIN (elt))
4728 {
4729 error ("invalid %<CASE_CHAIN%>");
4730 debug_generic_expr (elt);
4731 return true;
4732 }
4733 if (! CASE_LOW (elt))
4734 {
4735 error ("invalid case label in switch statement");
4736 debug_generic_expr (elt);
4737 return true;
4738 }
4739 if (CASE_HIGH (elt)
4740 && ! tree_int_cst_lt (CASE_LOW (elt), CASE_HIGH (elt)))
4741 {
4742 error ("invalid case range in switch statement");
4743 debug_generic_expr (elt);
4744 return true;
4745 }
4746
4747 if (elt_type)
4748 {
4749 if (TREE_TYPE (CASE_LOW (elt)) != elt_type
4750 || (CASE_HIGH (elt) && TREE_TYPE (CASE_HIGH (elt)) != elt_type))
4751 {
4752 error ("type mismatch for case label in switch statement");
4753 debug_generic_expr (elt);
4754 return true;
4755 }
4756 }
4757 else
4758 {
4759 elt_type = TREE_TYPE (CASE_LOW (elt));
4760 if (TYPE_PRECISION (index_type) < TYPE_PRECISION (elt_type))
4761 {
4762 error ("type precision mismatch in switch statement");
4763 return true;
4764 }
4765 }
4766
4767 if (prev_upper_bound)
4768 {
4769 if (! tree_int_cst_lt (prev_upper_bound, CASE_LOW (elt)))
4770 {
4771 error ("case labels not sorted in switch statement");
4772 return true;
4773 }
4774 }
4775
4776 prev_upper_bound = CASE_HIGH (elt);
4777 if (! prev_upper_bound)
4778 prev_upper_bound = CASE_LOW (elt);
4779 }
4780
4781 return false;
4782 }
4783
4784 /* Verify a gimple debug statement STMT.
4785 Returns true if anything is wrong. */
4786
4787 static bool
4788 verify_gimple_debug (gimple *stmt ATTRIBUTE_UNUSED)
4789 {
4790 /* There isn't much that could be wrong in a gimple debug stmt. A
4791 gimple debug bind stmt, for example, maps a tree, that's usually
4792 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4793 component or member of an aggregate type, to another tree, that
4794 can be an arbitrary expression. These stmts expand into debug
4795 insns, and are converted to debug notes by var-tracking.c. */
4796 return false;
4797 }
4798
4799 /* Verify a gimple label statement STMT.
4800 Returns true if anything is wrong. */
4801
4802 static bool
4803 verify_gimple_label (glabel *stmt)
4804 {
4805 tree decl = gimple_label_label (stmt);
4806 int uid;
4807 bool err = false;
4808
4809 if (TREE_CODE (decl) != LABEL_DECL)
4810 return true;
4811 if (!DECL_NONLOCAL (decl) && !FORCED_LABEL (decl)
4812 && DECL_CONTEXT (decl) != current_function_decl)
4813 {
4814 error ("label context is not the current function declaration");
4815 err |= true;
4816 }
4817
4818 uid = LABEL_DECL_UID (decl);
4819 if (cfun->cfg
4820 && (uid == -1
4821 || (*label_to_block_map_for_fn (cfun))[uid] != gimple_bb (stmt)))
4822 {
4823 error ("incorrect entry in %<label_to_block_map%>");
4824 err |= true;
4825 }
4826
4827 uid = EH_LANDING_PAD_NR (decl);
4828 if (uid)
4829 {
4830 eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
4831 if (decl != lp->post_landing_pad)
4832 {
4833 error ("incorrect setting of landing pad number");
4834 err |= true;
4835 }
4836 }
4837
4838 return err;
4839 }
4840
4841 /* Verify a gimple cond statement STMT.
4842 Returns true if anything is wrong. */
4843
4844 static bool
4845 verify_gimple_cond (gcond *stmt)
4846 {
4847 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
4848 {
4849 error ("invalid comparison code in gimple cond");
4850 return true;
4851 }
4852 if (!(!gimple_cond_true_label (stmt)
4853 || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
4854 || !(!gimple_cond_false_label (stmt)
4855 || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
4856 {
4857 error ("invalid labels in gimple cond");
4858 return true;
4859 }
4860
4861 return verify_gimple_comparison (boolean_type_node,
4862 gimple_cond_lhs (stmt),
4863 gimple_cond_rhs (stmt),
4864 gimple_cond_code (stmt));
4865 }
4866
4867 /* Verify the GIMPLE statement STMT. Returns true if there is an
4868 error, otherwise false. */
4869
4870 static bool
4871 verify_gimple_stmt (gimple *stmt)
4872 {
4873 switch (gimple_code (stmt))
4874 {
4875 case GIMPLE_ASSIGN:
4876 return verify_gimple_assign (as_a <gassign *> (stmt));
4877
4878 case GIMPLE_LABEL:
4879 return verify_gimple_label (as_a <glabel *> (stmt));
4880
4881 case GIMPLE_CALL:
4882 return verify_gimple_call (as_a <gcall *> (stmt));
4883
4884 case GIMPLE_COND:
4885 return verify_gimple_cond (as_a <gcond *> (stmt));
4886
4887 case GIMPLE_GOTO:
4888 return verify_gimple_goto (as_a <ggoto *> (stmt));
4889
4890 case GIMPLE_SWITCH:
4891 return verify_gimple_switch (as_a <gswitch *> (stmt));
4892
4893 case GIMPLE_RETURN:
4894 return verify_gimple_return (as_a <greturn *> (stmt));
4895
4896 case GIMPLE_ASM:
4897 return false;
4898
4899 case GIMPLE_TRANSACTION:
4900 return verify_gimple_transaction (as_a <gtransaction *> (stmt));
4901
4902 /* Tuples that do not have tree operands. */
4903 case GIMPLE_NOP:
4904 case GIMPLE_PREDICT:
4905 case GIMPLE_RESX:
4906 case GIMPLE_EH_DISPATCH:
4907 case GIMPLE_EH_MUST_NOT_THROW:
4908 return false;
4909
4910 CASE_GIMPLE_OMP:
4911 /* OpenMP directives are validated by the FE and never operated
4912 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4913 non-gimple expressions when the main index variable has had
4914 its address taken. This does not affect the loop itself
4915 because the header of an GIMPLE_OMP_FOR is merely used to determine
4916 how to setup the parallel iteration. */
4917 return false;
4918
4919 case GIMPLE_DEBUG:
4920 return verify_gimple_debug (stmt);
4921
4922 default:
4923 gcc_unreachable ();
4924 }
4925 }
4926
4927 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4928 and false otherwise. */
4929
4930 static bool
4931 verify_gimple_phi (gphi *phi)
4932 {
4933 bool err = false;
4934 unsigned i;
4935 tree phi_result = gimple_phi_result (phi);
4936 bool virtual_p;
4937
4938 if (!phi_result)
4939 {
4940 error ("invalid %<PHI%> result");
4941 return true;
4942 }
4943
4944 virtual_p = virtual_operand_p (phi_result);
4945 if (TREE_CODE (phi_result) != SSA_NAME
4946 || (virtual_p
4947 && SSA_NAME_VAR (phi_result) != gimple_vop (cfun)))
4948 {
4949 error ("invalid %<PHI%> result");
4950 err = true;
4951 }
4952
4953 for (i = 0; i < gimple_phi_num_args (phi); i++)
4954 {
4955 tree t = gimple_phi_arg_def (phi, i);
4956
4957 if (!t)
4958 {
4959 error ("missing %<PHI%> def");
4960 err |= true;
4961 continue;
4962 }
4963 /* Addressable variables do have SSA_NAMEs but they
4964 are not considered gimple values. */
4965 else if ((TREE_CODE (t) == SSA_NAME
4966 && virtual_p != virtual_operand_p (t))
4967 || (virtual_p
4968 && (TREE_CODE (t) != SSA_NAME
4969 || SSA_NAME_VAR (t) != gimple_vop (cfun)))
4970 || (!virtual_p
4971 && !is_gimple_val (t)))
4972 {
4973 error ("invalid %<PHI%> argument");
4974 debug_generic_expr (t);
4975 err |= true;
4976 }
4977 #ifdef ENABLE_TYPES_CHECKING
4978 if (!useless_type_conversion_p (TREE_TYPE (phi_result), TREE_TYPE (t)))
4979 {
4980 error ("incompatible types in %<PHI%> argument %u", i);
4981 debug_generic_stmt (TREE_TYPE (phi_result));
4982 debug_generic_stmt (TREE_TYPE (t));
4983 err |= true;
4984 }
4985 #endif
4986 }
4987
4988 return err;
4989 }
4990
4991 /* Verify the GIMPLE statements inside the sequence STMTS. */
4992
4993 static bool
4994 verify_gimple_in_seq_2 (gimple_seq stmts)
4995 {
4996 gimple_stmt_iterator ittr;
4997 bool err = false;
4998
4999 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
5000 {
5001 gimple *stmt = gsi_stmt (ittr);
5002
5003 switch (gimple_code (stmt))
5004 {
5005 case GIMPLE_BIND:
5006 err |= verify_gimple_in_seq_2 (
5007 gimple_bind_body (as_a <gbind *> (stmt)));
5008 break;
5009
5010 case GIMPLE_TRY:
5011 err |= verify_gimple_in_seq_2 (gimple_try_eval (stmt));
5012 err |= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt));
5013 break;
5014
5015 case GIMPLE_EH_FILTER:
5016 err |= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt));
5017 break;
5018
5019 case GIMPLE_EH_ELSE:
5020 {
5021 geh_else *eh_else = as_a <geh_else *> (stmt);
5022 err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (eh_else));
5023 err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (eh_else));
5024 }
5025 break;
5026
5027 case GIMPLE_CATCH:
5028 err |= verify_gimple_in_seq_2 (gimple_catch_handler (
5029 as_a <gcatch *> (stmt)));
5030 break;
5031
5032 case GIMPLE_TRANSACTION:
5033 err |= verify_gimple_transaction (as_a <gtransaction *> (stmt));
5034 break;
5035
5036 default:
5037 {
5038 bool err2 = verify_gimple_stmt (stmt);
5039 if (err2)
5040 debug_gimple_stmt (stmt);
5041 err |= err2;
5042 }
5043 }
5044 }
5045
5046 return err;
5047 }
5048
5049 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
5050 is a problem, otherwise false. */
5051
5052 static bool
5053 verify_gimple_transaction (gtransaction *stmt)
5054 {
5055 tree lab;
5056
5057 lab = gimple_transaction_label_norm (stmt);
5058 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
5059 return true;
5060 lab = gimple_transaction_label_uninst (stmt);
5061 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
5062 return true;
5063 lab = gimple_transaction_label_over (stmt);
5064 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
5065 return true;
5066
5067 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt));
5068 }
5069
5070
5071 /* Verify the GIMPLE statements inside the statement list STMTS. */
5072
5073 DEBUG_FUNCTION void
5074 verify_gimple_in_seq (gimple_seq stmts)
5075 {
5076 timevar_push (TV_TREE_STMT_VERIFY);
5077 if (verify_gimple_in_seq_2 (stmts))
5078 internal_error ("%<verify_gimple%> failed");
5079 timevar_pop (TV_TREE_STMT_VERIFY);
5080 }
5081
5082 /* Return true when the T can be shared. */
5083
5084 static bool
5085 tree_node_can_be_shared (tree t)
5086 {
5087 if (IS_TYPE_OR_DECL_P (t)
5088 || TREE_CODE (t) == SSA_NAME
5089 || TREE_CODE (t) == IDENTIFIER_NODE
5090 || TREE_CODE (t) == CASE_LABEL_EXPR
5091 || is_gimple_min_invariant (t))
5092 return true;
5093
5094 if (t == error_mark_node)
5095 return true;
5096
5097 return false;
5098 }
5099
5100 /* Called via walk_tree. Verify tree sharing. */
5101
5102 static tree
5103 verify_node_sharing_1 (tree *tp, int *walk_subtrees, void *data)
5104 {
5105 hash_set<void *> *visited = (hash_set<void *> *) data;
5106
5107 if (tree_node_can_be_shared (*tp))
5108 {
5109 *walk_subtrees = false;
5110 return NULL;
5111 }
5112
5113 if (visited->add (*tp))
5114 return *tp;
5115
5116 return NULL;
5117 }
5118
5119 /* Called via walk_gimple_stmt. Verify tree sharing. */
5120
5121 static tree
5122 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
5123 {
5124 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5125 return verify_node_sharing_1 (tp, walk_subtrees, wi->info);
5126 }
5127
5128 static bool eh_error_found;
5129 bool
5130 verify_eh_throw_stmt_node (gimple *const &stmt, const int &,
5131 hash_set<gimple *> *visited)
5132 {
5133 if (!visited->contains (stmt))
5134 {
5135 error ("dead statement in EH table");
5136 debug_gimple_stmt (stmt);
5137 eh_error_found = true;
5138 }
5139 return true;
5140 }
5141
5142 /* Verify if the location LOCs block is in BLOCKS. */
5143
5144 static bool
5145 verify_location (hash_set<tree> *blocks, location_t loc)
5146 {
5147 tree block = LOCATION_BLOCK (loc);
5148 if (block != NULL_TREE
5149 && !blocks->contains (block))
5150 {
5151 error ("location references block not in block tree");
5152 return true;
5153 }
5154 if (block != NULL_TREE)
5155 return verify_location (blocks, BLOCK_SOURCE_LOCATION (block));
5156 return false;
5157 }
5158
5159 /* Called via walk_tree. Verify that expressions have no blocks. */
5160
5161 static tree
5162 verify_expr_no_block (tree *tp, int *walk_subtrees, void *)
5163 {
5164 if (!EXPR_P (*tp))
5165 {
5166 *walk_subtrees = false;
5167 return NULL;
5168 }
5169
5170 location_t loc = EXPR_LOCATION (*tp);
5171 if (LOCATION_BLOCK (loc) != NULL)
5172 return *tp;
5173
5174 return NULL;
5175 }
5176
5177 /* Called via walk_tree. Verify locations of expressions. */
5178
5179 static tree
5180 verify_expr_location_1 (tree *tp, int *walk_subtrees, void *data)
5181 {
5182 hash_set<tree> *blocks = (hash_set<tree> *) data;
5183 tree t = *tp;
5184
5185 /* ??? This doesn't really belong here but there's no good place to
5186 stick this remainder of old verify_expr. */
5187 /* ??? This barfs on debug stmts which contain binds to vars with
5188 different function context. */
5189 #if 0
5190 if (VAR_P (t)
5191 || TREE_CODE (t) == PARM_DECL
5192 || TREE_CODE (t) == RESULT_DECL)
5193 {
5194 tree context = decl_function_context (t);
5195 if (context != cfun->decl
5196 && !SCOPE_FILE_SCOPE_P (context)
5197 && !TREE_STATIC (t)
5198 && !DECL_EXTERNAL (t))
5199 {
5200 error ("local declaration from a different function");
5201 return t;
5202 }
5203 }
5204 #endif
5205
5206 if (VAR_P (t) && DECL_HAS_DEBUG_EXPR_P (t))
5207 {
5208 tree x = DECL_DEBUG_EXPR (t);
5209 tree addr = walk_tree (&x, verify_expr_no_block, NULL, NULL);
5210 if (addr)
5211 return addr;
5212 }
5213 if ((VAR_P (t)
5214 || TREE_CODE (t) == PARM_DECL
5215 || TREE_CODE (t) == RESULT_DECL)
5216 && DECL_HAS_VALUE_EXPR_P (t))
5217 {
5218 tree x = DECL_VALUE_EXPR (t);
5219 tree addr = walk_tree (&x, verify_expr_no_block, NULL, NULL);
5220 if (addr)
5221 return addr;
5222 }
5223
5224 if (!EXPR_P (t))
5225 {
5226 *walk_subtrees = false;
5227 return NULL;
5228 }
5229
5230 location_t loc = EXPR_LOCATION (t);
5231 if (verify_location (blocks, loc))
5232 return t;
5233
5234 return NULL;
5235 }
5236
5237 /* Called via walk_gimple_op. Verify locations of expressions. */
5238
5239 static tree
5240 verify_expr_location (tree *tp, int *walk_subtrees, void *data)
5241 {
5242 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5243 return verify_expr_location_1 (tp, walk_subtrees, wi->info);
5244 }
5245
5246 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
5247
5248 static void
5249 collect_subblocks (hash_set<tree> *blocks, tree block)
5250 {
5251 tree t;
5252 for (t = BLOCK_SUBBLOCKS (block); t; t = BLOCK_CHAIN (t))
5253 {
5254 blocks->add (t);
5255 collect_subblocks (blocks, t);
5256 }
5257 }
5258
5259 /* Disable warnings about missing quoting in GCC diagnostics for
5260 the verification errors. Their format strings don't follow
5261 GCC diagnostic conventions and trigger an ICE in the end. */
5262 #if __GNUC__ >= 10
5263 # pragma GCC diagnostic push
5264 # pragma GCC diagnostic ignored "-Wformat-diag"
5265 #endif
5266
5267 /* Verify the GIMPLE statements in the CFG of FN. */
5268
5269 DEBUG_FUNCTION void
5270 verify_gimple_in_cfg (struct function *fn, bool verify_nothrow)
5271 {
5272 basic_block bb;
5273 bool err = false;
5274
5275 timevar_push (TV_TREE_STMT_VERIFY);
5276 hash_set<void *> visited;
5277 hash_set<gimple *> visited_throwing_stmts;
5278
5279 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
5280 hash_set<tree> blocks;
5281 if (DECL_INITIAL (fn->decl))
5282 {
5283 blocks.add (DECL_INITIAL (fn->decl));
5284 collect_subblocks (&blocks, DECL_INITIAL (fn->decl));
5285 }
5286
5287 FOR_EACH_BB_FN (bb, fn)
5288 {
5289 gimple_stmt_iterator gsi;
5290 edge_iterator ei;
5291 edge e;
5292
5293 for (gphi_iterator gpi = gsi_start_phis (bb);
5294 !gsi_end_p (gpi);
5295 gsi_next (&gpi))
5296 {
5297 gphi *phi = gpi.phi ();
5298 bool err2 = false;
5299 unsigned i;
5300
5301 if (gimple_bb (phi) != bb)
5302 {
5303 error ("gimple_bb (phi) is set to a wrong basic block");
5304 err2 = true;
5305 }
5306
5307 err2 |= verify_gimple_phi (phi);
5308
5309 /* Only PHI arguments have locations. */
5310 if (gimple_location (phi) != UNKNOWN_LOCATION)
5311 {
5312 error ("PHI node with location");
5313 err2 = true;
5314 }
5315
5316 for (i = 0; i < gimple_phi_num_args (phi); i++)
5317 {
5318 tree arg = gimple_phi_arg_def (phi, i);
5319 tree addr = walk_tree (&arg, verify_node_sharing_1,
5320 &visited, NULL);
5321 if (addr)
5322 {
5323 error ("incorrect sharing of tree nodes");
5324 debug_generic_expr (addr);
5325 err2 |= true;
5326 }
5327 location_t loc = gimple_phi_arg_location (phi, i);
5328 if (virtual_operand_p (gimple_phi_result (phi))
5329 && loc != UNKNOWN_LOCATION)
5330 {
5331 error ("virtual PHI with argument locations");
5332 err2 = true;
5333 }
5334 addr = walk_tree (&arg, verify_expr_location_1, &blocks, NULL);
5335 if (addr)
5336 {
5337 debug_generic_expr (addr);
5338 err2 = true;
5339 }
5340 err2 |= verify_location (&blocks, loc);
5341 }
5342
5343 if (err2)
5344 debug_gimple_stmt (phi);
5345 err |= err2;
5346 }
5347
5348 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5349 {
5350 gimple *stmt = gsi_stmt (gsi);
5351 bool err2 = false;
5352 struct walk_stmt_info wi;
5353 tree addr;
5354 int lp_nr;
5355
5356 if (gimple_bb (stmt) != bb)
5357 {
5358 error ("gimple_bb (stmt) is set to a wrong basic block");
5359 err2 = true;
5360 }
5361
5362 err2 |= verify_gimple_stmt (stmt);
5363 err2 |= verify_location (&blocks, gimple_location (stmt));
5364
5365 memset (&wi, 0, sizeof (wi));
5366 wi.info = (void *) &visited;
5367 addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
5368 if (addr)
5369 {
5370 error ("incorrect sharing of tree nodes");
5371 debug_generic_expr (addr);
5372 err2 |= true;
5373 }
5374
5375 memset (&wi, 0, sizeof (wi));
5376 wi.info = (void *) &blocks;
5377 addr = walk_gimple_op (stmt, verify_expr_location, &wi);
5378 if (addr)
5379 {
5380 debug_generic_expr (addr);
5381 err2 |= true;
5382 }
5383
5384 /* If the statement is marked as part of an EH region, then it is
5385 expected that the statement could throw. Verify that when we
5386 have optimizations that simplify statements such that we prove
5387 that they cannot throw, that we update other data structures
5388 to match. */
5389 lp_nr = lookup_stmt_eh_lp (stmt);
5390 if (lp_nr != 0)
5391 visited_throwing_stmts.add (stmt);
5392 if (lp_nr > 0)
5393 {
5394 if (!stmt_could_throw_p (cfun, stmt))
5395 {
5396 if (verify_nothrow)
5397 {
5398 error ("statement marked for throw, but doesn%'t");
5399 err2 |= true;
5400 }
5401 }
5402 else if (!gsi_one_before_end_p (gsi))
5403 {
5404 error ("statement marked for throw in middle of block");
5405 err2 |= true;
5406 }
5407 }
5408
5409 if (err2)
5410 debug_gimple_stmt (stmt);
5411 err |= err2;
5412 }
5413
5414 FOR_EACH_EDGE (e, ei, bb->succs)
5415 if (e->goto_locus != UNKNOWN_LOCATION)
5416 err |= verify_location (&blocks, e->goto_locus);
5417 }
5418
5419 hash_map<gimple *, int> *eh_table = get_eh_throw_stmt_table (cfun);
5420 eh_error_found = false;
5421 if (eh_table)
5422 eh_table->traverse<hash_set<gimple *> *, verify_eh_throw_stmt_node>
5423 (&visited_throwing_stmts);
5424
5425 if (err || eh_error_found)
5426 internal_error ("verify_gimple failed");
5427
5428 verify_histograms ();
5429 timevar_pop (TV_TREE_STMT_VERIFY);
5430 }
5431
5432
5433 /* Verifies that the flow information is OK. */
5434
5435 static int
5436 gimple_verify_flow_info (void)
5437 {
5438 int err = 0;
5439 basic_block bb;
5440 gimple_stmt_iterator gsi;
5441 gimple *stmt;
5442 edge e;
5443 edge_iterator ei;
5444
5445 if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5446 || ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5447 {
5448 error ("ENTRY_BLOCK has IL associated with it");
5449 err = 1;
5450 }
5451
5452 if (EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5453 || EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5454 {
5455 error ("EXIT_BLOCK has IL associated with it");
5456 err = 1;
5457 }
5458
5459 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5460 if (e->flags & EDGE_FALLTHRU)
5461 {
5462 error ("fallthru to exit from bb %d", e->src->index);
5463 err = 1;
5464 }
5465
5466 FOR_EACH_BB_FN (bb, cfun)
5467 {
5468 bool found_ctrl_stmt = false;
5469
5470 stmt = NULL;
5471
5472 /* Skip labels on the start of basic block. */
5473 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5474 {
5475 tree label;
5476 gimple *prev_stmt = stmt;
5477
5478 stmt = gsi_stmt (gsi);
5479
5480 if (gimple_code (stmt) != GIMPLE_LABEL)
5481 break;
5482
5483 label = gimple_label_label (as_a <glabel *> (stmt));
5484 if (prev_stmt && DECL_NONLOCAL (label))
5485 {
5486 error ("nonlocal label ");
5487 print_generic_expr (stderr, label);
5488 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5489 bb->index);
5490 err = 1;
5491 }
5492
5493 if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
5494 {
5495 error ("EH landing pad label ");
5496 print_generic_expr (stderr, label);
5497 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5498 bb->index);
5499 err = 1;
5500 }
5501
5502 if (label_to_block (cfun, label) != bb)
5503 {
5504 error ("label ");
5505 print_generic_expr (stderr, label);
5506 fprintf (stderr, " to block does not match in bb %d",
5507 bb->index);
5508 err = 1;
5509 }
5510
5511 if (decl_function_context (label) != current_function_decl)
5512 {
5513 error ("label ");
5514 print_generic_expr (stderr, label);
5515 fprintf (stderr, " has incorrect context in bb %d",
5516 bb->index);
5517 err = 1;
5518 }
5519 }
5520
5521 /* Verify that body of basic block BB is free of control flow. */
5522 for (; !gsi_end_p (gsi); gsi_next (&gsi))
5523 {
5524 gimple *stmt = gsi_stmt (gsi);
5525
5526 if (found_ctrl_stmt)
5527 {
5528 error ("control flow in the middle of basic block %d",
5529 bb->index);
5530 err = 1;
5531 }
5532
5533 if (stmt_ends_bb_p (stmt))
5534 found_ctrl_stmt = true;
5535
5536 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
5537 {
5538 error ("label ");
5539 print_generic_expr (stderr, gimple_label_label (label_stmt));
5540 fprintf (stderr, " in the middle of basic block %d", bb->index);
5541 err = 1;
5542 }
5543 }
5544
5545 gsi = gsi_last_nondebug_bb (bb);
5546 if (gsi_end_p (gsi))
5547 continue;
5548
5549 stmt = gsi_stmt (gsi);
5550
5551 if (gimple_code (stmt) == GIMPLE_LABEL)
5552 continue;
5553
5554 err |= verify_eh_edges (stmt);
5555
5556 if (is_ctrl_stmt (stmt))
5557 {
5558 FOR_EACH_EDGE (e, ei, bb->succs)
5559 if (e->flags & EDGE_FALLTHRU)
5560 {
5561 error ("fallthru edge after a control statement in bb %d",
5562 bb->index);
5563 err = 1;
5564 }
5565 }
5566
5567 if (gimple_code (stmt) != GIMPLE_COND)
5568 {
5569 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
5570 after anything else but if statement. */
5571 FOR_EACH_EDGE (e, ei, bb->succs)
5572 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
5573 {
5574 error ("true/false edge after a non-GIMPLE_COND in bb %d",
5575 bb->index);
5576 err = 1;
5577 }
5578 }
5579
5580 switch (gimple_code (stmt))
5581 {
5582 case GIMPLE_COND:
5583 {
5584 edge true_edge;
5585 edge false_edge;
5586
5587 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
5588
5589 if (!true_edge
5590 || !false_edge
5591 || !(true_edge->flags & EDGE_TRUE_VALUE)
5592 || !(false_edge->flags & EDGE_FALSE_VALUE)
5593 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5594 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5595 || EDGE_COUNT (bb->succs) >= 3)
5596 {
5597 error ("wrong outgoing edge flags at end of bb %d",
5598 bb->index);
5599 err = 1;
5600 }
5601 }
5602 break;
5603
5604 case GIMPLE_GOTO:
5605 if (simple_goto_p (stmt))
5606 {
5607 error ("explicit goto at end of bb %d", bb->index);
5608 err = 1;
5609 }
5610 else
5611 {
5612 /* FIXME. We should double check that the labels in the
5613 destination blocks have their address taken. */
5614 FOR_EACH_EDGE (e, ei, bb->succs)
5615 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
5616 | EDGE_FALSE_VALUE))
5617 || !(e->flags & EDGE_ABNORMAL))
5618 {
5619 error ("wrong outgoing edge flags at end of bb %d",
5620 bb->index);
5621 err = 1;
5622 }
5623 }
5624 break;
5625
5626 case GIMPLE_CALL:
5627 if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
5628 break;
5629 /* fallthru */
5630 case GIMPLE_RETURN:
5631 if (!single_succ_p (bb)
5632 || (single_succ_edge (bb)->flags
5633 & (EDGE_FALLTHRU | EDGE_ABNORMAL
5634 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5635 {
5636 error ("wrong outgoing edge flags at end of bb %d", bb->index);
5637 err = 1;
5638 }
5639 if (single_succ (bb) != EXIT_BLOCK_PTR_FOR_FN (cfun))
5640 {
5641 error ("return edge does not point to exit in bb %d",
5642 bb->index);
5643 err = 1;
5644 }
5645 break;
5646
5647 case GIMPLE_SWITCH:
5648 {
5649 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5650 tree prev;
5651 edge e;
5652 size_t i, n;
5653
5654 n = gimple_switch_num_labels (switch_stmt);
5655
5656 /* Mark all the destination basic blocks. */
5657 for (i = 0; i < n; ++i)
5658 {
5659 basic_block label_bb = gimple_switch_label_bb (cfun, switch_stmt, i);
5660 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
5661 label_bb->aux = (void *)1;
5662 }
5663
5664 /* Verify that the case labels are sorted. */
5665 prev = gimple_switch_label (switch_stmt, 0);
5666 for (i = 1; i < n; ++i)
5667 {
5668 tree c = gimple_switch_label (switch_stmt, i);
5669 if (!CASE_LOW (c))
5670 {
5671 error ("found default case not at the start of "
5672 "case vector");
5673 err = 1;
5674 continue;
5675 }
5676 if (CASE_LOW (prev)
5677 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
5678 {
5679 error ("case labels not sorted: ");
5680 print_generic_expr (stderr, prev);
5681 fprintf (stderr," is greater than ");
5682 print_generic_expr (stderr, c);
5683 fprintf (stderr," but comes before it.\n");
5684 err = 1;
5685 }
5686 prev = c;
5687 }
5688 /* VRP will remove the default case if it can prove it will
5689 never be executed. So do not verify there always exists
5690 a default case here. */
5691
5692 FOR_EACH_EDGE (e, ei, bb->succs)
5693 {
5694 if (!e->dest->aux)
5695 {
5696 error ("extra outgoing edge %d->%d",
5697 bb->index, e->dest->index);
5698 err = 1;
5699 }
5700
5701 e->dest->aux = (void *)2;
5702 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
5703 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5704 {
5705 error ("wrong outgoing edge flags at end of bb %d",
5706 bb->index);
5707 err = 1;
5708 }
5709 }
5710
5711 /* Check that we have all of them. */
5712 for (i = 0; i < n; ++i)
5713 {
5714 basic_block label_bb = gimple_switch_label_bb (cfun,
5715 switch_stmt, i);
5716
5717 if (label_bb->aux != (void *)2)
5718 {
5719 error ("missing edge %i->%i", bb->index, label_bb->index);
5720 err = 1;
5721 }
5722 }
5723
5724 FOR_EACH_EDGE (e, ei, bb->succs)
5725 e->dest->aux = (void *)0;
5726 }
5727 break;
5728
5729 case GIMPLE_EH_DISPATCH:
5730 err |= verify_eh_dispatch_edge (as_a <geh_dispatch *> (stmt));
5731 break;
5732
5733 default:
5734 break;
5735 }
5736 }
5737
5738 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
5739 verify_dominators (CDI_DOMINATORS);
5740
5741 return err;
5742 }
5743
5744 #if __GNUC__ >= 10
5745 # pragma GCC diagnostic pop
5746 #endif
5747
5748 /* Updates phi nodes after creating a forwarder block joined
5749 by edge FALLTHRU. */
5750
5751 static void
5752 gimple_make_forwarder_block (edge fallthru)
5753 {
5754 edge e;
5755 edge_iterator ei;
5756 basic_block dummy, bb;
5757 tree var;
5758 gphi_iterator gsi;
5759
5760 dummy = fallthru->src;
5761 bb = fallthru->dest;
5762
5763 if (single_pred_p (bb))
5764 return;
5765
5766 /* If we redirected a branch we must create new PHI nodes at the
5767 start of BB. */
5768 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
5769 {
5770 gphi *phi, *new_phi;
5771
5772 phi = gsi.phi ();
5773 var = gimple_phi_result (phi);
5774 new_phi = create_phi_node (var, bb);
5775 gimple_phi_set_result (phi, copy_ssa_name (var, phi));
5776 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
5777 UNKNOWN_LOCATION);
5778 }
5779
5780 /* Add the arguments we have stored on edges. */
5781 FOR_EACH_EDGE (e, ei, bb->preds)
5782 {
5783 if (e == fallthru)
5784 continue;
5785
5786 flush_pending_stmts (e);
5787 }
5788 }
5789
5790
5791 /* Return a non-special label in the head of basic block BLOCK.
5792 Create one if it doesn't exist. */
5793
5794 tree
5795 gimple_block_label (basic_block bb)
5796 {
5797 gimple_stmt_iterator i, s = gsi_start_bb (bb);
5798 bool first = true;
5799 tree label;
5800 glabel *stmt;
5801
5802 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
5803 {
5804 stmt = dyn_cast <glabel *> (gsi_stmt (i));
5805 if (!stmt)
5806 break;
5807 label = gimple_label_label (stmt);
5808 if (!DECL_NONLOCAL (label))
5809 {
5810 if (!first)
5811 gsi_move_before (&i, &s);
5812 return label;
5813 }
5814 }
5815
5816 label = create_artificial_label (UNKNOWN_LOCATION);
5817 stmt = gimple_build_label (label);
5818 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
5819 return label;
5820 }
5821
5822
5823 /* Attempt to perform edge redirection by replacing a possibly complex
5824 jump instruction by a goto or by removing the jump completely.
5825 This can apply only if all edges now point to the same block. The
5826 parameters and return values are equivalent to
5827 redirect_edge_and_branch. */
5828
5829 static edge
5830 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
5831 {
5832 basic_block src = e->src;
5833 gimple_stmt_iterator i;
5834 gimple *stmt;
5835
5836 /* We can replace or remove a complex jump only when we have exactly
5837 two edges. */
5838 if (EDGE_COUNT (src->succs) != 2
5839 /* Verify that all targets will be TARGET. Specifically, the
5840 edge that is not E must also go to TARGET. */
5841 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
5842 return NULL;
5843
5844 i = gsi_last_bb (src);
5845 if (gsi_end_p (i))
5846 return NULL;
5847
5848 stmt = gsi_stmt (i);
5849
5850 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
5851 {
5852 gsi_remove (&i, true);
5853 e = ssa_redirect_edge (e, target);
5854 e->flags = EDGE_FALLTHRU;
5855 return e;
5856 }
5857
5858 return NULL;
5859 }
5860
5861
5862 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
5863 edge representing the redirected branch. */
5864
5865 static edge
5866 gimple_redirect_edge_and_branch (edge e, basic_block dest)
5867 {
5868 basic_block bb = e->src;
5869 gimple_stmt_iterator gsi;
5870 edge ret;
5871 gimple *stmt;
5872
5873 if (e->flags & EDGE_ABNORMAL)
5874 return NULL;
5875
5876 if (e->dest == dest)
5877 return NULL;
5878
5879 if (e->flags & EDGE_EH)
5880 return redirect_eh_edge (e, dest);
5881
5882 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
5883 {
5884 ret = gimple_try_redirect_by_replacing_jump (e, dest);
5885 if (ret)
5886 return ret;
5887 }
5888
5889 gsi = gsi_last_nondebug_bb (bb);
5890 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
5891
5892 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
5893 {
5894 case GIMPLE_COND:
5895 /* For COND_EXPR, we only need to redirect the edge. */
5896 break;
5897
5898 case GIMPLE_GOTO:
5899 /* No non-abnormal edges should lead from a non-simple goto, and
5900 simple ones should be represented implicitly. */
5901 gcc_unreachable ();
5902
5903 case GIMPLE_SWITCH:
5904 {
5905 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5906 tree label = gimple_block_label (dest);
5907 tree cases = get_cases_for_edge (e, switch_stmt);
5908
5909 /* If we have a list of cases associated with E, then use it
5910 as it's a lot faster than walking the entire case vector. */
5911 if (cases)
5912 {
5913 edge e2 = find_edge (e->src, dest);
5914 tree last, first;
5915
5916 first = cases;
5917 while (cases)
5918 {
5919 last = cases;
5920 CASE_LABEL (cases) = label;
5921 cases = CASE_CHAIN (cases);
5922 }
5923
5924 /* If there was already an edge in the CFG, then we need
5925 to move all the cases associated with E to E2. */
5926 if (e2)
5927 {
5928 tree cases2 = get_cases_for_edge (e2, switch_stmt);
5929
5930 CASE_CHAIN (last) = CASE_CHAIN (cases2);
5931 CASE_CHAIN (cases2) = first;
5932 }
5933 bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
5934 }
5935 else
5936 {
5937 size_t i, n = gimple_switch_num_labels (switch_stmt);
5938
5939 for (i = 0; i < n; i++)
5940 {
5941 tree elt = gimple_switch_label (switch_stmt, i);
5942 if (label_to_block (cfun, CASE_LABEL (elt)) == e->dest)
5943 CASE_LABEL (elt) = label;
5944 }
5945 }
5946 }
5947 break;
5948
5949 case GIMPLE_ASM:
5950 {
5951 gasm *asm_stmt = as_a <gasm *> (stmt);
5952 int i, n = gimple_asm_nlabels (asm_stmt);
5953 tree label = NULL;
5954
5955 for (i = 0; i < n; ++i)
5956 {
5957 tree cons = gimple_asm_label_op (asm_stmt, i);
5958 if (label_to_block (cfun, TREE_VALUE (cons)) == e->dest)
5959 {
5960 if (!label)
5961 label = gimple_block_label (dest);
5962 TREE_VALUE (cons) = label;
5963 }
5964 }
5965
5966 /* If we didn't find any label matching the former edge in the
5967 asm labels, we must be redirecting the fallthrough
5968 edge. */
5969 gcc_assert (label || (e->flags & EDGE_FALLTHRU));
5970 }
5971 break;
5972
5973 case GIMPLE_RETURN:
5974 gsi_remove (&gsi, true);
5975 e->flags |= EDGE_FALLTHRU;
5976 break;
5977
5978 case GIMPLE_OMP_RETURN:
5979 case GIMPLE_OMP_CONTINUE:
5980 case GIMPLE_OMP_SECTIONS_SWITCH:
5981 case GIMPLE_OMP_FOR:
5982 /* The edges from OMP constructs can be simply redirected. */
5983 break;
5984
5985 case GIMPLE_EH_DISPATCH:
5986 if (!(e->flags & EDGE_FALLTHRU))
5987 redirect_eh_dispatch_edge (as_a <geh_dispatch *> (stmt), e, dest);
5988 break;
5989
5990 case GIMPLE_TRANSACTION:
5991 if (e->flags & EDGE_TM_ABORT)
5992 gimple_transaction_set_label_over (as_a <gtransaction *> (stmt),
5993 gimple_block_label (dest));
5994 else if (e->flags & EDGE_TM_UNINSTRUMENTED)
5995 gimple_transaction_set_label_uninst (as_a <gtransaction *> (stmt),
5996 gimple_block_label (dest));
5997 else
5998 gimple_transaction_set_label_norm (as_a <gtransaction *> (stmt),
5999 gimple_block_label (dest));
6000 break;
6001
6002 default:
6003 /* Otherwise it must be a fallthru edge, and we don't need to
6004 do anything besides redirecting it. */
6005 gcc_assert (e->flags & EDGE_FALLTHRU);
6006 break;
6007 }
6008
6009 /* Update/insert PHI nodes as necessary. */
6010
6011 /* Now update the edges in the CFG. */
6012 e = ssa_redirect_edge (e, dest);
6013
6014 return e;
6015 }
6016
6017 /* Returns true if it is possible to remove edge E by redirecting
6018 it to the destination of the other edge from E->src. */
6019
6020 static bool
6021 gimple_can_remove_branch_p (const_edge e)
6022 {
6023 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
6024 return false;
6025
6026 return true;
6027 }
6028
6029 /* Simple wrapper, as we can always redirect fallthru edges. */
6030
6031 static basic_block
6032 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
6033 {
6034 e = gimple_redirect_edge_and_branch (e, dest);
6035 gcc_assert (e);
6036
6037 return NULL;
6038 }
6039
6040
6041 /* Splits basic block BB after statement STMT (but at least after the
6042 labels). If STMT is NULL, BB is split just after the labels. */
6043
6044 static basic_block
6045 gimple_split_block (basic_block bb, void *stmt)
6046 {
6047 gimple_stmt_iterator gsi;
6048 gimple_stmt_iterator gsi_tgt;
6049 gimple_seq list;
6050 basic_block new_bb;
6051 edge e;
6052 edge_iterator ei;
6053
6054 new_bb = create_empty_bb (bb);
6055
6056 /* Redirect the outgoing edges. */
6057 new_bb->succs = bb->succs;
6058 bb->succs = NULL;
6059 FOR_EACH_EDGE (e, ei, new_bb->succs)
6060 e->src = new_bb;
6061
6062 /* Get a stmt iterator pointing to the first stmt to move. */
6063 if (!stmt || gimple_code ((gimple *) stmt) == GIMPLE_LABEL)
6064 gsi = gsi_after_labels (bb);
6065 else
6066 {
6067 gsi = gsi_for_stmt ((gimple *) stmt);
6068 gsi_next (&gsi);
6069 }
6070
6071 /* Move everything from GSI to the new basic block. */
6072 if (gsi_end_p (gsi))
6073 return new_bb;
6074
6075 /* Split the statement list - avoid re-creating new containers as this
6076 brings ugly quadratic memory consumption in the inliner.
6077 (We are still quadratic since we need to update stmt BB pointers,
6078 sadly.) */
6079 gsi_split_seq_before (&gsi, &list);
6080 set_bb_seq (new_bb, list);
6081 for (gsi_tgt = gsi_start (list);
6082 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
6083 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
6084
6085 return new_bb;
6086 }
6087
6088
6089 /* Moves basic block BB after block AFTER. */
6090
6091 static bool
6092 gimple_move_block_after (basic_block bb, basic_block after)
6093 {
6094 if (bb->prev_bb == after)
6095 return true;
6096
6097 unlink_block (bb);
6098 link_block (bb, after);
6099
6100 return true;
6101 }
6102
6103
6104 /* Return TRUE if block BB has no executable statements, otherwise return
6105 FALSE. */
6106
6107 static bool
6108 gimple_empty_block_p (basic_block bb)
6109 {
6110 /* BB must have no executable statements. */
6111 gimple_stmt_iterator gsi = gsi_after_labels (bb);
6112 if (phi_nodes (bb))
6113 return false;
6114 while (!gsi_end_p (gsi))
6115 {
6116 gimple *stmt = gsi_stmt (gsi);
6117 if (is_gimple_debug (stmt))
6118 ;
6119 else if (gimple_code (stmt) == GIMPLE_NOP
6120 || gimple_code (stmt) == GIMPLE_PREDICT)
6121 ;
6122 else
6123 return false;
6124 gsi_next (&gsi);
6125 }
6126 return true;
6127 }
6128
6129
6130 /* Split a basic block if it ends with a conditional branch and if the
6131 other part of the block is not empty. */
6132
6133 static basic_block
6134 gimple_split_block_before_cond_jump (basic_block bb)
6135 {
6136 gimple *last, *split_point;
6137 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
6138 if (gsi_end_p (gsi))
6139 return NULL;
6140 last = gsi_stmt (gsi);
6141 if (gimple_code (last) != GIMPLE_COND
6142 && gimple_code (last) != GIMPLE_SWITCH)
6143 return NULL;
6144 gsi_prev (&gsi);
6145 split_point = gsi_stmt (gsi);
6146 return split_block (bb, split_point)->dest;
6147 }
6148
6149
6150 /* Return true if basic_block can be duplicated. */
6151
6152 static bool
6153 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
6154 {
6155 return true;
6156 }
6157
6158 /* Create a duplicate of the basic block BB. NOTE: This does not
6159 preserve SSA form. */
6160
6161 static basic_block
6162 gimple_duplicate_bb (basic_block bb, copy_bb_data *id)
6163 {
6164 basic_block new_bb;
6165 gimple_stmt_iterator gsi_tgt;
6166
6167 new_bb = create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
6168
6169 /* Copy the PHI nodes. We ignore PHI node arguments here because
6170 the incoming edges have not been setup yet. */
6171 for (gphi_iterator gpi = gsi_start_phis (bb);
6172 !gsi_end_p (gpi);
6173 gsi_next (&gpi))
6174 {
6175 gphi *phi, *copy;
6176 phi = gpi.phi ();
6177 copy = create_phi_node (NULL_TREE, new_bb);
6178 create_new_def_for (gimple_phi_result (phi), copy,
6179 gimple_phi_result_ptr (copy));
6180 gimple_set_uid (copy, gimple_uid (phi));
6181 }
6182
6183 gsi_tgt = gsi_start_bb (new_bb);
6184 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
6185 !gsi_end_p (gsi);
6186 gsi_next (&gsi))
6187 {
6188 def_operand_p def_p;
6189 ssa_op_iter op_iter;
6190 tree lhs;
6191 gimple *stmt, *copy;
6192
6193 stmt = gsi_stmt (gsi);
6194 if (gimple_code (stmt) == GIMPLE_LABEL)
6195 continue;
6196
6197 /* Don't duplicate label debug stmts. */
6198 if (gimple_debug_bind_p (stmt)
6199 && TREE_CODE (gimple_debug_bind_get_var (stmt))
6200 == LABEL_DECL)
6201 continue;
6202
6203 /* Create a new copy of STMT and duplicate STMT's virtual
6204 operands. */
6205 copy = gimple_copy (stmt);
6206 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
6207
6208 maybe_duplicate_eh_stmt (copy, stmt);
6209 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
6210
6211 /* When copying around a stmt writing into a local non-user
6212 aggregate, make sure it won't share stack slot with other
6213 vars. */
6214 lhs = gimple_get_lhs (stmt);
6215 if (lhs && TREE_CODE (lhs) != SSA_NAME)
6216 {
6217 tree base = get_base_address (lhs);
6218 if (base
6219 && (VAR_P (base) || TREE_CODE (base) == RESULT_DECL)
6220 && DECL_IGNORED_P (base)
6221 && !TREE_STATIC (base)
6222 && !DECL_EXTERNAL (base)
6223 && (!VAR_P (base) || !DECL_HAS_VALUE_EXPR_P (base)))
6224 DECL_NONSHAREABLE (base) = 1;
6225 }
6226
6227 /* If requested remap dependence info of cliques brought in
6228 via inlining. */
6229 if (id)
6230 for (unsigned i = 0; i < gimple_num_ops (copy); ++i)
6231 {
6232 tree op = gimple_op (copy, i);
6233 if (!op)
6234 continue;
6235 if (TREE_CODE (op) == ADDR_EXPR
6236 || TREE_CODE (op) == WITH_SIZE_EXPR)
6237 op = TREE_OPERAND (op, 0);
6238 while (handled_component_p (op))
6239 op = TREE_OPERAND (op, 0);
6240 if ((TREE_CODE (op) == MEM_REF
6241 || TREE_CODE (op) == TARGET_MEM_REF)
6242 && MR_DEPENDENCE_CLIQUE (op) > 1
6243 && MR_DEPENDENCE_CLIQUE (op) != bb->loop_father->owned_clique)
6244 {
6245 if (!id->dependence_map)
6246 id->dependence_map = new hash_map<dependence_hash,
6247 unsigned short>;
6248 bool existed;
6249 unsigned short &newc = id->dependence_map->get_or_insert
6250 (MR_DEPENDENCE_CLIQUE (op), &existed);
6251 if (!existed)
6252 {
6253 gcc_assert (MR_DEPENDENCE_CLIQUE (op) <= cfun->last_clique);
6254 newc = ++cfun->last_clique;
6255 }
6256 MR_DEPENDENCE_CLIQUE (op) = newc;
6257 }
6258 }
6259
6260 /* Create new names for all the definitions created by COPY and
6261 add replacement mappings for each new name. */
6262 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
6263 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
6264 }
6265
6266 return new_bb;
6267 }
6268
6269 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
6270
6271 static void
6272 add_phi_args_after_copy_edge (edge e_copy)
6273 {
6274 basic_block bb, bb_copy = e_copy->src, dest;
6275 edge e;
6276 edge_iterator ei;
6277 gphi *phi, *phi_copy;
6278 tree def;
6279 gphi_iterator psi, psi_copy;
6280
6281 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
6282 return;
6283
6284 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
6285
6286 if (e_copy->dest->flags & BB_DUPLICATED)
6287 dest = get_bb_original (e_copy->dest);
6288 else
6289 dest = e_copy->dest;
6290
6291 e = find_edge (bb, dest);
6292 if (!e)
6293 {
6294 /* During loop unrolling the target of the latch edge is copied.
6295 In this case we are not looking for edge to dest, but to
6296 duplicated block whose original was dest. */
6297 FOR_EACH_EDGE (e, ei, bb->succs)
6298 {
6299 if ((e->dest->flags & BB_DUPLICATED)
6300 && get_bb_original (e->dest) == dest)
6301 break;
6302 }
6303
6304 gcc_assert (e != NULL);
6305 }
6306
6307 for (psi = gsi_start_phis (e->dest),
6308 psi_copy = gsi_start_phis (e_copy->dest);
6309 !gsi_end_p (psi);
6310 gsi_next (&psi), gsi_next (&psi_copy))
6311 {
6312 phi = psi.phi ();
6313 phi_copy = psi_copy.phi ();
6314 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
6315 add_phi_arg (phi_copy, def, e_copy,
6316 gimple_phi_arg_location_from_edge (phi, e));
6317 }
6318 }
6319
6320
6321 /* Basic block BB_COPY was created by code duplication. Add phi node
6322 arguments for edges going out of BB_COPY. The blocks that were
6323 duplicated have BB_DUPLICATED set. */
6324
6325 void
6326 add_phi_args_after_copy_bb (basic_block bb_copy)
6327 {
6328 edge e_copy;
6329 edge_iterator ei;
6330
6331 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
6332 {
6333 add_phi_args_after_copy_edge (e_copy);
6334 }
6335 }
6336
6337 /* Blocks in REGION_COPY array of length N_REGION were created by
6338 duplication of basic blocks. Add phi node arguments for edges
6339 going from these blocks. If E_COPY is not NULL, also add
6340 phi node arguments for its destination.*/
6341
6342 void
6343 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
6344 edge e_copy)
6345 {
6346 unsigned i;
6347
6348 for (i = 0; i < n_region; i++)
6349 region_copy[i]->flags |= BB_DUPLICATED;
6350
6351 for (i = 0; i < n_region; i++)
6352 add_phi_args_after_copy_bb (region_copy[i]);
6353 if (e_copy)
6354 add_phi_args_after_copy_edge (e_copy);
6355
6356 for (i = 0; i < n_region; i++)
6357 region_copy[i]->flags &= ~BB_DUPLICATED;
6358 }
6359
6360 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
6361 important exit edge EXIT. By important we mean that no SSA name defined
6362 inside region is live over the other exit edges of the region. All entry
6363 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
6364 to the duplicate of the region. Dominance and loop information is
6365 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
6366 UPDATE_DOMINANCE is false then we assume that the caller will update the
6367 dominance information after calling this function. The new basic
6368 blocks are stored to REGION_COPY in the same order as they had in REGION,
6369 provided that REGION_COPY is not NULL.
6370 The function returns false if it is unable to copy the region,
6371 true otherwise. */
6372
6373 bool
6374 gimple_duplicate_sese_region (edge entry, edge exit,
6375 basic_block *region, unsigned n_region,
6376 basic_block *region_copy,
6377 bool update_dominance)
6378 {
6379 unsigned i;
6380 bool free_region_copy = false, copying_header = false;
6381 struct loop *loop = entry->dest->loop_father;
6382 edge exit_copy;
6383 vec<basic_block> doms = vNULL;
6384 edge redirected;
6385 profile_count total_count = profile_count::uninitialized ();
6386 profile_count entry_count = profile_count::uninitialized ();
6387
6388 if (!can_copy_bbs_p (region, n_region))
6389 return false;
6390
6391 /* Some sanity checking. Note that we do not check for all possible
6392 missuses of the functions. I.e. if you ask to copy something weird,
6393 it will work, but the state of structures probably will not be
6394 correct. */
6395 for (i = 0; i < n_region; i++)
6396 {
6397 /* We do not handle subloops, i.e. all the blocks must belong to the
6398 same loop. */
6399 if (region[i]->loop_father != loop)
6400 return false;
6401
6402 if (region[i] != entry->dest
6403 && region[i] == loop->header)
6404 return false;
6405 }
6406
6407 /* In case the function is used for loop header copying (which is the primary
6408 use), ensure that EXIT and its copy will be new latch and entry edges. */
6409 if (loop->header == entry->dest)
6410 {
6411 copying_header = true;
6412
6413 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
6414 return false;
6415
6416 for (i = 0; i < n_region; i++)
6417 if (region[i] != exit->src
6418 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
6419 return false;
6420 }
6421
6422 initialize_original_copy_tables ();
6423
6424 if (copying_header)
6425 set_loop_copy (loop, loop_outer (loop));
6426 else
6427 set_loop_copy (loop, loop);
6428
6429 if (!region_copy)
6430 {
6431 region_copy = XNEWVEC (basic_block, n_region);
6432 free_region_copy = true;
6433 }
6434
6435 /* Record blocks outside the region that are dominated by something
6436 inside. */
6437 if (update_dominance)
6438 {
6439 doms.create (0);
6440 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6441 }
6442
6443 if (entry->dest->count.initialized_p ())
6444 {
6445 total_count = entry->dest->count;
6446 entry_count = entry->count ();
6447 /* Fix up corner cases, to avoid division by zero or creation of negative
6448 frequencies. */
6449 if (entry_count > total_count)
6450 entry_count = total_count;
6451 }
6452
6453 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
6454 split_edge_bb_loc (entry), update_dominance);
6455 if (total_count.initialized_p () && entry_count.initialized_p ())
6456 {
6457 scale_bbs_frequencies_profile_count (region, n_region,
6458 total_count - entry_count,
6459 total_count);
6460 scale_bbs_frequencies_profile_count (region_copy, n_region, entry_count,
6461 total_count);
6462 }
6463
6464 if (copying_header)
6465 {
6466 loop->header = exit->dest;
6467 loop->latch = exit->src;
6468 }
6469
6470 /* Redirect the entry and add the phi node arguments. */
6471 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
6472 gcc_assert (redirected != NULL);
6473 flush_pending_stmts (entry);
6474
6475 /* Concerning updating of dominators: We must recount dominators
6476 for entry block and its copy. Anything that is outside of the
6477 region, but was dominated by something inside needs recounting as
6478 well. */
6479 if (update_dominance)
6480 {
6481 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
6482 doms.safe_push (get_bb_original (entry->dest));
6483 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6484 doms.release ();
6485 }
6486
6487 /* Add the other PHI node arguments. */
6488 add_phi_args_after_copy (region_copy, n_region, NULL);
6489
6490 if (free_region_copy)
6491 free (region_copy);
6492
6493 free_original_copy_tables ();
6494 return true;
6495 }
6496
6497 /* Checks if BB is part of the region defined by N_REGION BBS. */
6498 static bool
6499 bb_part_of_region_p (basic_block bb, basic_block* bbs, unsigned n_region)
6500 {
6501 unsigned int n;
6502
6503 for (n = 0; n < n_region; n++)
6504 {
6505 if (bb == bbs[n])
6506 return true;
6507 }
6508 return false;
6509 }
6510
6511 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
6512 are stored to REGION_COPY in the same order in that they appear
6513 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
6514 the region, EXIT an exit from it. The condition guarding EXIT
6515 is moved to ENTRY. Returns true if duplication succeeds, false
6516 otherwise.
6517
6518 For example,
6519
6520 some_code;
6521 if (cond)
6522 A;
6523 else
6524 B;
6525
6526 is transformed to
6527
6528 if (cond)
6529 {
6530 some_code;
6531 A;
6532 }
6533 else
6534 {
6535 some_code;
6536 B;
6537 }
6538 */
6539
6540 bool
6541 gimple_duplicate_sese_tail (edge entry, edge exit,
6542 basic_block *region, unsigned n_region,
6543 basic_block *region_copy)
6544 {
6545 unsigned i;
6546 bool free_region_copy = false;
6547 struct loop *loop = exit->dest->loop_father;
6548 struct loop *orig_loop = entry->dest->loop_father;
6549 basic_block switch_bb, entry_bb, nentry_bb;
6550 vec<basic_block> doms;
6551 profile_count total_count = profile_count::uninitialized (),
6552 exit_count = profile_count::uninitialized ();
6553 edge exits[2], nexits[2], e;
6554 gimple_stmt_iterator gsi;
6555 gimple *cond_stmt;
6556 edge sorig, snew;
6557 basic_block exit_bb;
6558 gphi_iterator psi;
6559 gphi *phi;
6560 tree def;
6561 struct loop *target, *aloop, *cloop;
6562
6563 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
6564 exits[0] = exit;
6565 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
6566
6567 if (!can_copy_bbs_p (region, n_region))
6568 return false;
6569
6570 initialize_original_copy_tables ();
6571 set_loop_copy (orig_loop, loop);
6572
6573 target= loop;
6574 for (aloop = orig_loop->inner; aloop; aloop = aloop->next)
6575 {
6576 if (bb_part_of_region_p (aloop->header, region, n_region))
6577 {
6578 cloop = duplicate_loop (aloop, target);
6579 duplicate_subloops (aloop, cloop);
6580 }
6581 }
6582
6583 if (!region_copy)
6584 {
6585 region_copy = XNEWVEC (basic_block, n_region);
6586 free_region_copy = true;
6587 }
6588
6589 gcc_assert (!need_ssa_update_p (cfun));
6590
6591 /* Record blocks outside the region that are dominated by something
6592 inside. */
6593 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6594
6595 total_count = exit->src->count;
6596 exit_count = exit->count ();
6597 /* Fix up corner cases, to avoid division by zero or creation of negative
6598 frequencies. */
6599 if (exit_count > total_count)
6600 exit_count = total_count;
6601
6602 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
6603 split_edge_bb_loc (exit), true);
6604 if (total_count.initialized_p () && exit_count.initialized_p ())
6605 {
6606 scale_bbs_frequencies_profile_count (region, n_region,
6607 total_count - exit_count,
6608 total_count);
6609 scale_bbs_frequencies_profile_count (region_copy, n_region, exit_count,
6610 total_count);
6611 }
6612
6613 /* Create the switch block, and put the exit condition to it. */
6614 entry_bb = entry->dest;
6615 nentry_bb = get_bb_copy (entry_bb);
6616 if (!last_stmt (entry->src)
6617 || !stmt_ends_bb_p (last_stmt (entry->src)))
6618 switch_bb = entry->src;
6619 else
6620 switch_bb = split_edge (entry);
6621 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
6622
6623 gsi = gsi_last_bb (switch_bb);
6624 cond_stmt = last_stmt (exit->src);
6625 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
6626 cond_stmt = gimple_copy (cond_stmt);
6627
6628 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
6629
6630 sorig = single_succ_edge (switch_bb);
6631 sorig->flags = exits[1]->flags;
6632 sorig->probability = exits[1]->probability;
6633 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
6634 snew->probability = exits[0]->probability;
6635
6636
6637 /* Register the new edge from SWITCH_BB in loop exit lists. */
6638 rescan_loop_exit (snew, true, false);
6639
6640 /* Add the PHI node arguments. */
6641 add_phi_args_after_copy (region_copy, n_region, snew);
6642
6643 /* Get rid of now superfluous conditions and associated edges (and phi node
6644 arguments). */
6645 exit_bb = exit->dest;
6646
6647 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
6648 PENDING_STMT (e) = NULL;
6649
6650 /* The latch of ORIG_LOOP was copied, and so was the backedge
6651 to the original header. We redirect this backedge to EXIT_BB. */
6652 for (i = 0; i < n_region; i++)
6653 if (get_bb_original (region_copy[i]) == orig_loop->latch)
6654 {
6655 gcc_assert (single_succ_edge (region_copy[i]));
6656 e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
6657 PENDING_STMT (e) = NULL;
6658 for (psi = gsi_start_phis (exit_bb);
6659 !gsi_end_p (psi);
6660 gsi_next (&psi))
6661 {
6662 phi = psi.phi ();
6663 def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
6664 add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
6665 }
6666 }
6667 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
6668 PENDING_STMT (e) = NULL;
6669
6670 /* Anything that is outside of the region, but was dominated by something
6671 inside needs to update dominance info. */
6672 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6673 doms.release ();
6674 /* Update the SSA web. */
6675 update_ssa (TODO_update_ssa);
6676
6677 if (free_region_copy)
6678 free (region_copy);
6679
6680 free_original_copy_tables ();
6681 return true;
6682 }
6683
6684 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
6685 adding blocks when the dominator traversal reaches EXIT. This
6686 function silently assumes that ENTRY strictly dominates EXIT. */
6687
6688 void
6689 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
6690 vec<basic_block> *bbs_p)
6691 {
6692 basic_block son;
6693
6694 for (son = first_dom_son (CDI_DOMINATORS, entry);
6695 son;
6696 son = next_dom_son (CDI_DOMINATORS, son))
6697 {
6698 bbs_p->safe_push (son);
6699 if (son != exit)
6700 gather_blocks_in_sese_region (son, exit, bbs_p);
6701 }
6702 }
6703
6704 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
6705 The duplicates are recorded in VARS_MAP. */
6706
6707 static void
6708 replace_by_duplicate_decl (tree *tp, hash_map<tree, tree> *vars_map,
6709 tree to_context)
6710 {
6711 tree t = *tp, new_t;
6712 struct function *f = DECL_STRUCT_FUNCTION (to_context);
6713
6714 if (DECL_CONTEXT (t) == to_context)
6715 return;
6716
6717 bool existed;
6718 tree &loc = vars_map->get_or_insert (t, &existed);
6719
6720 if (!existed)
6721 {
6722 if (SSA_VAR_P (t))
6723 {
6724 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
6725 add_local_decl (f, new_t);
6726 }
6727 else
6728 {
6729 gcc_assert (TREE_CODE (t) == CONST_DECL);
6730 new_t = copy_node (t);
6731 }
6732 DECL_CONTEXT (new_t) = to_context;
6733
6734 loc = new_t;
6735 }
6736 else
6737 new_t = loc;
6738
6739 *tp = new_t;
6740 }
6741
6742
6743 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6744 VARS_MAP maps old ssa names and var_decls to the new ones. */
6745
6746 static tree
6747 replace_ssa_name (tree name, hash_map<tree, tree> *vars_map,
6748 tree to_context)
6749 {
6750 tree new_name;
6751
6752 gcc_assert (!virtual_operand_p (name));
6753
6754 tree *loc = vars_map->get (name);
6755
6756 if (!loc)
6757 {
6758 tree decl = SSA_NAME_VAR (name);
6759 if (decl)
6760 {
6761 gcc_assert (!SSA_NAME_IS_DEFAULT_DEF (name));
6762 replace_by_duplicate_decl (&decl, vars_map, to_context);
6763 new_name = make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6764 decl, SSA_NAME_DEF_STMT (name));
6765 }
6766 else
6767 new_name = copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6768 name, SSA_NAME_DEF_STMT (name));
6769
6770 /* Now that we've used the def stmt to define new_name, make sure it
6771 doesn't define name anymore. */
6772 SSA_NAME_DEF_STMT (name) = NULL;
6773
6774 vars_map->put (name, new_name);
6775 }
6776 else
6777 new_name = *loc;
6778
6779 return new_name;
6780 }
6781
6782 struct move_stmt_d
6783 {
6784 tree orig_block;
6785 tree new_block;
6786 tree from_context;
6787 tree to_context;
6788 hash_map<tree, tree> *vars_map;
6789 htab_t new_label_map;
6790 hash_map<void *, void *> *eh_map;
6791 bool remap_decls_p;
6792 };
6793
6794 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
6795 contained in *TP if it has been ORIG_BLOCK previously and change the
6796 DECL_CONTEXT of every local variable referenced in *TP. */
6797
6798 static tree
6799 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
6800 {
6801 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
6802 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6803 tree t = *tp;
6804
6805 if (EXPR_P (t))
6806 {
6807 tree block = TREE_BLOCK (t);
6808 if (block == NULL_TREE)
6809 ;
6810 else if (block == p->orig_block
6811 || p->orig_block == NULL_TREE)
6812 {
6813 /* tree_node_can_be_shared says we can share invariant
6814 addresses but unshare_expr copies them anyways. Make sure
6815 to unshare before adjusting the block in place - we do not
6816 always see a copy here. */
6817 if (TREE_CODE (t) == ADDR_EXPR
6818 && is_gimple_min_invariant (t))
6819 *tp = t = unshare_expr (t);
6820 TREE_SET_BLOCK (t, p->new_block);
6821 }
6822 else if (flag_checking)
6823 {
6824 while (block && TREE_CODE (block) == BLOCK && block != p->orig_block)
6825 block = BLOCK_SUPERCONTEXT (block);
6826 gcc_assert (block == p->orig_block);
6827 }
6828 }
6829 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
6830 {
6831 if (TREE_CODE (t) == SSA_NAME)
6832 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
6833 else if (TREE_CODE (t) == PARM_DECL
6834 && gimple_in_ssa_p (cfun))
6835 *tp = *(p->vars_map->get (t));
6836 else if (TREE_CODE (t) == LABEL_DECL)
6837 {
6838 if (p->new_label_map)
6839 {
6840 struct tree_map in, *out;
6841 in.base.from = t;
6842 out = (struct tree_map *)
6843 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
6844 if (out)
6845 *tp = t = out->to;
6846 }
6847
6848 /* For FORCED_LABELs we can end up with references from other
6849 functions if some SESE regions are outlined. It is UB to
6850 jump in between them, but they could be used just for printing
6851 addresses etc. In that case, DECL_CONTEXT on the label should
6852 be the function containing the glabel stmt with that LABEL_DECL,
6853 rather than whatever function a reference to the label was seen
6854 last time. */
6855 if (!FORCED_LABEL (t) && !DECL_NONLOCAL (t))
6856 DECL_CONTEXT (t) = p->to_context;
6857 }
6858 else if (p->remap_decls_p)
6859 {
6860 /* Replace T with its duplicate. T should no longer appear in the
6861 parent function, so this looks wasteful; however, it may appear
6862 in referenced_vars, and more importantly, as virtual operands of
6863 statements, and in alias lists of other variables. It would be
6864 quite difficult to expunge it from all those places. ??? It might
6865 suffice to do this for addressable variables. */
6866 if ((VAR_P (t) && !is_global_var (t))
6867 || TREE_CODE (t) == CONST_DECL)
6868 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
6869 }
6870 *walk_subtrees = 0;
6871 }
6872 else if (TYPE_P (t))
6873 *walk_subtrees = 0;
6874
6875 return NULL_TREE;
6876 }
6877
6878 /* Helper for move_stmt_r. Given an EH region number for the source
6879 function, map that to the duplicate EH regio number in the dest. */
6880
6881 static int
6882 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
6883 {
6884 eh_region old_r, new_r;
6885
6886 old_r = get_eh_region_from_number (old_nr);
6887 new_r = static_cast<eh_region> (*p->eh_map->get (old_r));
6888
6889 return new_r->index;
6890 }
6891
6892 /* Similar, but operate on INTEGER_CSTs. */
6893
6894 static tree
6895 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
6896 {
6897 int old_nr, new_nr;
6898
6899 old_nr = tree_to_shwi (old_t_nr);
6900 new_nr = move_stmt_eh_region_nr (old_nr, p);
6901
6902 return build_int_cst (integer_type_node, new_nr);
6903 }
6904
6905 /* Like move_stmt_op, but for gimple statements.
6906
6907 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
6908 contained in the current statement in *GSI_P and change the
6909 DECL_CONTEXT of every local variable referenced in the current
6910 statement. */
6911
6912 static tree
6913 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
6914 struct walk_stmt_info *wi)
6915 {
6916 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6917 gimple *stmt = gsi_stmt (*gsi_p);
6918 tree block = gimple_block (stmt);
6919
6920 if (block == p->orig_block
6921 || (p->orig_block == NULL_TREE
6922 && block != NULL_TREE))
6923 gimple_set_block (stmt, p->new_block);
6924
6925 switch (gimple_code (stmt))
6926 {
6927 case GIMPLE_CALL:
6928 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
6929 {
6930 tree r, fndecl = gimple_call_fndecl (stmt);
6931 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
6932 switch (DECL_FUNCTION_CODE (fndecl))
6933 {
6934 case BUILT_IN_EH_COPY_VALUES:
6935 r = gimple_call_arg (stmt, 1);
6936 r = move_stmt_eh_region_tree_nr (r, p);
6937 gimple_call_set_arg (stmt, 1, r);
6938 /* FALLTHRU */
6939
6940 case BUILT_IN_EH_POINTER:
6941 case BUILT_IN_EH_FILTER:
6942 r = gimple_call_arg (stmt, 0);
6943 r = move_stmt_eh_region_tree_nr (r, p);
6944 gimple_call_set_arg (stmt, 0, r);
6945 break;
6946
6947 default:
6948 break;
6949 }
6950 }
6951 break;
6952
6953 case GIMPLE_RESX:
6954 {
6955 gresx *resx_stmt = as_a <gresx *> (stmt);
6956 int r = gimple_resx_region (resx_stmt);
6957 r = move_stmt_eh_region_nr (r, p);
6958 gimple_resx_set_region (resx_stmt, r);
6959 }
6960 break;
6961
6962 case GIMPLE_EH_DISPATCH:
6963 {
6964 geh_dispatch *eh_dispatch_stmt = as_a <geh_dispatch *> (stmt);
6965 int r = gimple_eh_dispatch_region (eh_dispatch_stmt);
6966 r = move_stmt_eh_region_nr (r, p);
6967 gimple_eh_dispatch_set_region (eh_dispatch_stmt, r);
6968 }
6969 break;
6970
6971 case GIMPLE_OMP_RETURN:
6972 case GIMPLE_OMP_CONTINUE:
6973 break;
6974
6975 case GIMPLE_LABEL:
6976 {
6977 /* For FORCED_LABEL, move_stmt_op doesn't adjust DECL_CONTEXT,
6978 so that such labels can be referenced from other regions.
6979 Make sure to update it when seeing a GIMPLE_LABEL though,
6980 that is the owner of the label. */
6981 walk_gimple_op (stmt, move_stmt_op, wi);
6982 *handled_ops_p = true;
6983 tree label = gimple_label_label (as_a <glabel *> (stmt));
6984 if (FORCED_LABEL (label) || DECL_NONLOCAL (label))
6985 DECL_CONTEXT (label) = p->to_context;
6986 }
6987 break;
6988
6989 default:
6990 if (is_gimple_omp (stmt))
6991 {
6992 /* Do not remap variables inside OMP directives. Variables
6993 referenced in clauses and directive header belong to the
6994 parent function and should not be moved into the child
6995 function. */
6996 bool save_remap_decls_p = p->remap_decls_p;
6997 p->remap_decls_p = false;
6998 *handled_ops_p = true;
6999
7000 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), move_stmt_r,
7001 move_stmt_op, wi);
7002
7003 p->remap_decls_p = save_remap_decls_p;
7004 }
7005 break;
7006 }
7007
7008 return NULL_TREE;
7009 }
7010
7011 /* Move basic block BB from function CFUN to function DEST_FN. The
7012 block is moved out of the original linked list and placed after
7013 block AFTER in the new list. Also, the block is removed from the
7014 original array of blocks and placed in DEST_FN's array of blocks.
7015 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
7016 updated to reflect the moved edges.
7017
7018 The local variables are remapped to new instances, VARS_MAP is used
7019 to record the mapping. */
7020
7021 static void
7022 move_block_to_fn (struct function *dest_cfun, basic_block bb,
7023 basic_block after, bool update_edge_count_p,
7024 struct move_stmt_d *d)
7025 {
7026 struct control_flow_graph *cfg;
7027 edge_iterator ei;
7028 edge e;
7029 gimple_stmt_iterator si;
7030 unsigned old_len, new_len;
7031
7032 /* Remove BB from dominance structures. */
7033 delete_from_dominance_info (CDI_DOMINATORS, bb);
7034
7035 /* Move BB from its current loop to the copy in the new function. */
7036 if (current_loops)
7037 {
7038 struct loop *new_loop = (struct loop *)bb->loop_father->aux;
7039 if (new_loop)
7040 bb->loop_father = new_loop;
7041 }
7042
7043 /* Link BB to the new linked list. */
7044 move_block_after (bb, after);
7045
7046 /* Update the edge count in the corresponding flowgraphs. */
7047 if (update_edge_count_p)
7048 FOR_EACH_EDGE (e, ei, bb->succs)
7049 {
7050 cfun->cfg->x_n_edges--;
7051 dest_cfun->cfg->x_n_edges++;
7052 }
7053
7054 /* Remove BB from the original basic block array. */
7055 (*cfun->cfg->x_basic_block_info)[bb->index] = NULL;
7056 cfun->cfg->x_n_basic_blocks--;
7057
7058 /* Grow DEST_CFUN's basic block array if needed. */
7059 cfg = dest_cfun->cfg;
7060 cfg->x_n_basic_blocks++;
7061 if (bb->index >= cfg->x_last_basic_block)
7062 cfg->x_last_basic_block = bb->index + 1;
7063
7064 old_len = vec_safe_length (cfg->x_basic_block_info);
7065 if ((unsigned) cfg->x_last_basic_block >= old_len)
7066 {
7067 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
7068 vec_safe_grow_cleared (cfg->x_basic_block_info, new_len);
7069 }
7070
7071 (*cfg->x_basic_block_info)[bb->index] = bb;
7072
7073 /* Remap the variables in phi nodes. */
7074 for (gphi_iterator psi = gsi_start_phis (bb);
7075 !gsi_end_p (psi); )
7076 {
7077 gphi *phi = psi.phi ();
7078 use_operand_p use;
7079 tree op = PHI_RESULT (phi);
7080 ssa_op_iter oi;
7081 unsigned i;
7082
7083 if (virtual_operand_p (op))
7084 {
7085 /* Remove the phi nodes for virtual operands (alias analysis will be
7086 run for the new function, anyway). */
7087 remove_phi_node (&psi, true);
7088 continue;
7089 }
7090
7091 SET_PHI_RESULT (phi,
7092 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
7093 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
7094 {
7095 op = USE_FROM_PTR (use);
7096 if (TREE_CODE (op) == SSA_NAME)
7097 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
7098 }
7099
7100 for (i = 0; i < EDGE_COUNT (bb->preds); i++)
7101 {
7102 location_t locus = gimple_phi_arg_location (phi, i);
7103 tree block = LOCATION_BLOCK (locus);
7104
7105 if (locus == UNKNOWN_LOCATION)
7106 continue;
7107 if (d->orig_block == NULL_TREE || block == d->orig_block)
7108 {
7109 locus = set_block (locus, d->new_block);
7110 gimple_phi_arg_set_location (phi, i, locus);
7111 }
7112 }
7113
7114 gsi_next (&psi);
7115 }
7116
7117 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
7118 {
7119 gimple *stmt = gsi_stmt (si);
7120 struct walk_stmt_info wi;
7121
7122 memset (&wi, 0, sizeof (wi));
7123 wi.info = d;
7124 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
7125
7126 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
7127 {
7128 tree label = gimple_label_label (label_stmt);
7129 int uid = LABEL_DECL_UID (label);
7130
7131 gcc_assert (uid > -1);
7132
7133 old_len = vec_safe_length (cfg->x_label_to_block_map);
7134 if (old_len <= (unsigned) uid)
7135 {
7136 new_len = 3 * uid / 2 + 1;
7137 vec_safe_grow_cleared (cfg->x_label_to_block_map, new_len);
7138 }
7139
7140 (*cfg->x_label_to_block_map)[uid] = bb;
7141 (*cfun->cfg->x_label_to_block_map)[uid] = NULL;
7142
7143 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
7144
7145 if (uid >= dest_cfun->cfg->last_label_uid)
7146 dest_cfun->cfg->last_label_uid = uid + 1;
7147 }
7148
7149 maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
7150 remove_stmt_from_eh_lp_fn (cfun, stmt);
7151
7152 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
7153 gimple_remove_stmt_histograms (cfun, stmt);
7154
7155 /* We cannot leave any operands allocated from the operand caches of
7156 the current function. */
7157 free_stmt_operands (cfun, stmt);
7158 push_cfun (dest_cfun);
7159 update_stmt (stmt);
7160 pop_cfun ();
7161 }
7162
7163 FOR_EACH_EDGE (e, ei, bb->succs)
7164 if (e->goto_locus != UNKNOWN_LOCATION)
7165 {
7166 tree block = LOCATION_BLOCK (e->goto_locus);
7167 if (d->orig_block == NULL_TREE
7168 || block == d->orig_block)
7169 e->goto_locus = set_block (e->goto_locus, d->new_block);
7170 }
7171 }
7172
7173 /* Examine the statements in BB (which is in SRC_CFUN); find and return
7174 the outermost EH region. Use REGION as the incoming base EH region.
7175 If there is no single outermost region, return NULL and set *ALL to
7176 true. */
7177
7178 static eh_region
7179 find_outermost_region_in_block (struct function *src_cfun,
7180 basic_block bb, eh_region region,
7181 bool *all)
7182 {
7183 gimple_stmt_iterator si;
7184
7185 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
7186 {
7187 gimple *stmt = gsi_stmt (si);
7188 eh_region stmt_region;
7189 int lp_nr;
7190
7191 lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
7192 stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
7193 if (stmt_region)
7194 {
7195 if (region == NULL)
7196 region = stmt_region;
7197 else if (stmt_region != region)
7198 {
7199 region = eh_region_outermost (src_cfun, stmt_region, region);
7200 if (region == NULL)
7201 {
7202 *all = true;
7203 return NULL;
7204 }
7205 }
7206 }
7207 }
7208
7209 return region;
7210 }
7211
7212 static tree
7213 new_label_mapper (tree decl, void *data)
7214 {
7215 htab_t hash = (htab_t) data;
7216 struct tree_map *m;
7217 void **slot;
7218
7219 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
7220
7221 m = XNEW (struct tree_map);
7222 m->hash = DECL_UID (decl);
7223 m->base.from = decl;
7224 m->to = create_artificial_label (UNKNOWN_LOCATION);
7225 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
7226 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
7227 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
7228
7229 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
7230 gcc_assert (*slot == NULL);
7231
7232 *slot = m;
7233
7234 return m->to;
7235 }
7236
7237 /* Tree walker to replace the decls used inside value expressions by
7238 duplicates. */
7239
7240 static tree
7241 replace_block_vars_by_duplicates_1 (tree *tp, int *walk_subtrees, void *data)
7242 {
7243 struct replace_decls_d *rd = (struct replace_decls_d *)data;
7244
7245 switch (TREE_CODE (*tp))
7246 {
7247 case VAR_DECL:
7248 case PARM_DECL:
7249 case RESULT_DECL:
7250 replace_by_duplicate_decl (tp, rd->vars_map, rd->to_context);
7251 break;
7252 default:
7253 break;
7254 }
7255
7256 if (IS_TYPE_OR_DECL_P (*tp))
7257 *walk_subtrees = false;
7258
7259 return NULL;
7260 }
7261
7262 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
7263 subblocks. */
7264
7265 static void
7266 replace_block_vars_by_duplicates (tree block, hash_map<tree, tree> *vars_map,
7267 tree to_context)
7268 {
7269 tree *tp, t;
7270
7271 for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
7272 {
7273 t = *tp;
7274 if (!VAR_P (t) && TREE_CODE (t) != CONST_DECL)
7275 continue;
7276 replace_by_duplicate_decl (&t, vars_map, to_context);
7277 if (t != *tp)
7278 {
7279 if (VAR_P (*tp) && DECL_HAS_VALUE_EXPR_P (*tp))
7280 {
7281 tree x = DECL_VALUE_EXPR (*tp);
7282 struct replace_decls_d rd = { vars_map, to_context };
7283 unshare_expr (x);
7284 walk_tree (&x, replace_block_vars_by_duplicates_1, &rd, NULL);
7285 SET_DECL_VALUE_EXPR (t, x);
7286 DECL_HAS_VALUE_EXPR_P (t) = 1;
7287 }
7288 DECL_CHAIN (t) = DECL_CHAIN (*tp);
7289 *tp = t;
7290 }
7291 }
7292
7293 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
7294 replace_block_vars_by_duplicates (block, vars_map, to_context);
7295 }
7296
7297 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
7298 from FN1 to FN2. */
7299
7300 static void
7301 fixup_loop_arrays_after_move (struct function *fn1, struct function *fn2,
7302 struct loop *loop)
7303 {
7304 /* Discard it from the old loop array. */
7305 (*get_loops (fn1))[loop->num] = NULL;
7306
7307 /* Place it in the new loop array, assigning it a new number. */
7308 loop->num = number_of_loops (fn2);
7309 vec_safe_push (loops_for_fn (fn2)->larray, loop);
7310
7311 /* Recurse to children. */
7312 for (loop = loop->inner; loop; loop = loop->next)
7313 fixup_loop_arrays_after_move (fn1, fn2, loop);
7314 }
7315
7316 /* Verify that the blocks in BBS_P are a single-entry, single-exit region
7317 delimited by ENTRY_BB and EXIT_BB, possibly containing noreturn blocks. */
7318
7319 DEBUG_FUNCTION void
7320 verify_sese (basic_block entry, basic_block exit, vec<basic_block> *bbs_p)
7321 {
7322 basic_block bb;
7323 edge_iterator ei;
7324 edge e;
7325 bitmap bbs = BITMAP_ALLOC (NULL);
7326 int i;
7327
7328 gcc_assert (entry != NULL);
7329 gcc_assert (entry != exit);
7330 gcc_assert (bbs_p != NULL);
7331
7332 gcc_assert (bbs_p->length () > 0);
7333
7334 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
7335 bitmap_set_bit (bbs, bb->index);
7336
7337 gcc_assert (bitmap_bit_p (bbs, entry->index));
7338 gcc_assert (exit == NULL || bitmap_bit_p (bbs, exit->index));
7339
7340 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
7341 {
7342 if (bb == entry)
7343 {
7344 gcc_assert (single_pred_p (entry));
7345 gcc_assert (!bitmap_bit_p (bbs, single_pred (entry)->index));
7346 }
7347 else
7348 for (ei = ei_start (bb->preds); !ei_end_p (ei); ei_next (&ei))
7349 {
7350 e = ei_edge (ei);
7351 gcc_assert (bitmap_bit_p (bbs, e->src->index));
7352 }
7353
7354 if (bb == exit)
7355 {
7356 gcc_assert (single_succ_p (exit));
7357 gcc_assert (!bitmap_bit_p (bbs, single_succ (exit)->index));
7358 }
7359 else
7360 for (ei = ei_start (bb->succs); !ei_end_p (ei); ei_next (&ei))
7361 {
7362 e = ei_edge (ei);
7363 gcc_assert (bitmap_bit_p (bbs, e->dest->index));
7364 }
7365 }
7366
7367 BITMAP_FREE (bbs);
7368 }
7369
7370 /* If FROM is an SSA_NAME, mark the version in bitmap DATA. */
7371
7372 bool
7373 gather_ssa_name_hash_map_from (tree const &from, tree const &, void *data)
7374 {
7375 bitmap release_names = (bitmap)data;
7376
7377 if (TREE_CODE (from) != SSA_NAME)
7378 return true;
7379
7380 bitmap_set_bit (release_names, SSA_NAME_VERSION (from));
7381 return true;
7382 }
7383
7384 /* Return LOOP_DIST_ALIAS call if present in BB. */
7385
7386 static gimple *
7387 find_loop_dist_alias (basic_block bb)
7388 {
7389 gimple *g = last_stmt (bb);
7390 if (g == NULL || gimple_code (g) != GIMPLE_COND)
7391 return NULL;
7392
7393 gimple_stmt_iterator gsi = gsi_for_stmt (g);
7394 gsi_prev (&gsi);
7395 if (gsi_end_p (gsi))
7396 return NULL;
7397
7398 g = gsi_stmt (gsi);
7399 if (gimple_call_internal_p (g, IFN_LOOP_DIST_ALIAS))
7400 return g;
7401 return NULL;
7402 }
7403
7404 /* Fold loop internal call G like IFN_LOOP_VECTORIZED/IFN_LOOP_DIST_ALIAS
7405 to VALUE and update any immediate uses of it's LHS. */
7406
7407 void
7408 fold_loop_internal_call (gimple *g, tree value)
7409 {
7410 tree lhs = gimple_call_lhs (g);
7411 use_operand_p use_p;
7412 imm_use_iterator iter;
7413 gimple *use_stmt;
7414 gimple_stmt_iterator gsi = gsi_for_stmt (g);
7415
7416 update_call_from_tree (&gsi, value);
7417 FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
7418 {
7419 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
7420 SET_USE (use_p, value);
7421 update_stmt (use_stmt);
7422 }
7423 }
7424
7425 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
7426 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
7427 single basic block in the original CFG and the new basic block is
7428 returned. DEST_CFUN must not have a CFG yet.
7429
7430 Note that the region need not be a pure SESE region. Blocks inside
7431 the region may contain calls to abort/exit. The only restriction
7432 is that ENTRY_BB should be the only entry point and it must
7433 dominate EXIT_BB.
7434
7435 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
7436 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
7437 to the new function.
7438
7439 All local variables referenced in the region are assumed to be in
7440 the corresponding BLOCK_VARS and unexpanded variable lists
7441 associated with DEST_CFUN.
7442
7443 TODO: investigate whether we can reuse gimple_duplicate_sese_region to
7444 reimplement move_sese_region_to_fn by duplicating the region rather than
7445 moving it. */
7446
7447 basic_block
7448 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
7449 basic_block exit_bb, tree orig_block)
7450 {
7451 vec<basic_block> bbs, dom_bbs;
7452 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
7453 basic_block after, bb, *entry_pred, *exit_succ, abb;
7454 struct function *saved_cfun = cfun;
7455 int *entry_flag, *exit_flag;
7456 profile_probability *entry_prob, *exit_prob;
7457 unsigned i, num_entry_edges, num_exit_edges, num_nodes;
7458 edge e;
7459 edge_iterator ei;
7460 htab_t new_label_map;
7461 hash_map<void *, void *> *eh_map;
7462 struct loop *loop = entry_bb->loop_father;
7463 struct loop *loop0 = get_loop (saved_cfun, 0);
7464 struct move_stmt_d d;
7465
7466 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
7467 region. */
7468 gcc_assert (entry_bb != exit_bb
7469 && (!exit_bb
7470 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
7471
7472 /* Collect all the blocks in the region. Manually add ENTRY_BB
7473 because it won't be added by dfs_enumerate_from. */
7474 bbs.create (0);
7475 bbs.safe_push (entry_bb);
7476 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
7477
7478 if (flag_checking)
7479 verify_sese (entry_bb, exit_bb, &bbs);
7480
7481 /* The blocks that used to be dominated by something in BBS will now be
7482 dominated by the new block. */
7483 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
7484 bbs.address (),
7485 bbs.length ());
7486
7487 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
7488 the predecessor edges to ENTRY_BB and the successor edges to
7489 EXIT_BB so that we can re-attach them to the new basic block that
7490 will replace the region. */
7491 num_entry_edges = EDGE_COUNT (entry_bb->preds);
7492 entry_pred = XNEWVEC (basic_block, num_entry_edges);
7493 entry_flag = XNEWVEC (int, num_entry_edges);
7494 entry_prob = XNEWVEC (profile_probability, num_entry_edges);
7495 i = 0;
7496 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
7497 {
7498 entry_prob[i] = e->probability;
7499 entry_flag[i] = e->flags;
7500 entry_pred[i++] = e->src;
7501 remove_edge (e);
7502 }
7503
7504 if (exit_bb)
7505 {
7506 num_exit_edges = EDGE_COUNT (exit_bb->succs);
7507 exit_succ = XNEWVEC (basic_block, num_exit_edges);
7508 exit_flag = XNEWVEC (int, num_exit_edges);
7509 exit_prob = XNEWVEC (profile_probability, num_exit_edges);
7510 i = 0;
7511 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
7512 {
7513 exit_prob[i] = e->probability;
7514 exit_flag[i] = e->flags;
7515 exit_succ[i++] = e->dest;
7516 remove_edge (e);
7517 }
7518 }
7519 else
7520 {
7521 num_exit_edges = 0;
7522 exit_succ = NULL;
7523 exit_flag = NULL;
7524 exit_prob = NULL;
7525 }
7526
7527 /* Switch context to the child function to initialize DEST_FN's CFG. */
7528 gcc_assert (dest_cfun->cfg == NULL);
7529 push_cfun (dest_cfun);
7530
7531 init_empty_tree_cfg ();
7532
7533 /* Initialize EH information for the new function. */
7534 eh_map = NULL;
7535 new_label_map = NULL;
7536 if (saved_cfun->eh)
7537 {
7538 eh_region region = NULL;
7539 bool all = false;
7540
7541 FOR_EACH_VEC_ELT (bbs, i, bb)
7542 {
7543 region = find_outermost_region_in_block (saved_cfun, bb, region, &all);
7544 if (all)
7545 break;
7546 }
7547
7548 init_eh_for_function ();
7549 if (region != NULL || all)
7550 {
7551 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
7552 eh_map = duplicate_eh_regions (saved_cfun, region, 0,
7553 new_label_mapper, new_label_map);
7554 }
7555 }
7556
7557 /* Initialize an empty loop tree. */
7558 struct loops *loops = ggc_cleared_alloc<struct loops> ();
7559 init_loops_structure (dest_cfun, loops, 1);
7560 loops->state = LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
7561 set_loops_for_fn (dest_cfun, loops);
7562
7563 vec<loop_p, va_gc> *larray = get_loops (saved_cfun)->copy ();
7564
7565 /* Move the outlined loop tree part. */
7566 num_nodes = bbs.length ();
7567 FOR_EACH_VEC_ELT (bbs, i, bb)
7568 {
7569 if (bb->loop_father->header == bb)
7570 {
7571 struct loop *this_loop = bb->loop_father;
7572 struct loop *outer = loop_outer (this_loop);
7573 if (outer == loop
7574 /* If the SESE region contains some bbs ending with
7575 a noreturn call, those are considered to belong
7576 to the outermost loop in saved_cfun, rather than
7577 the entry_bb's loop_father. */
7578 || outer == loop0)
7579 {
7580 if (outer != loop)
7581 num_nodes -= this_loop->num_nodes;
7582 flow_loop_tree_node_remove (bb->loop_father);
7583 flow_loop_tree_node_add (get_loop (dest_cfun, 0), this_loop);
7584 fixup_loop_arrays_after_move (saved_cfun, cfun, this_loop);
7585 }
7586 }
7587 else if (bb->loop_father == loop0 && loop0 != loop)
7588 num_nodes--;
7589
7590 /* Remove loop exits from the outlined region. */
7591 if (loops_for_fn (saved_cfun)->exits)
7592 FOR_EACH_EDGE (e, ei, bb->succs)
7593 {
7594 struct loops *l = loops_for_fn (saved_cfun);
7595 loop_exit **slot
7596 = l->exits->find_slot_with_hash (e, htab_hash_pointer (e),
7597 NO_INSERT);
7598 if (slot)
7599 l->exits->clear_slot (slot);
7600 }
7601 }
7602
7603 /* Adjust the number of blocks in the tree root of the outlined part. */
7604 get_loop (dest_cfun, 0)->num_nodes = bbs.length () + 2;
7605
7606 /* Setup a mapping to be used by move_block_to_fn. */
7607 loop->aux = current_loops->tree_root;
7608 loop0->aux = current_loops->tree_root;
7609
7610 /* Fix up orig_loop_num. If the block referenced in it has been moved
7611 to dest_cfun, update orig_loop_num field, otherwise clear it. */
7612 struct loop *dloop;
7613 signed char *moved_orig_loop_num = NULL;
7614 FOR_EACH_LOOP_FN (dest_cfun, dloop, 0)
7615 if (dloop->orig_loop_num)
7616 {
7617 if (moved_orig_loop_num == NULL)
7618 moved_orig_loop_num
7619 = XCNEWVEC (signed char, vec_safe_length (larray));
7620 if ((*larray)[dloop->orig_loop_num] != NULL
7621 && get_loop (saved_cfun, dloop->orig_loop_num) == NULL)
7622 {
7623 if (moved_orig_loop_num[dloop->orig_loop_num] >= 0
7624 && moved_orig_loop_num[dloop->orig_loop_num] < 2)
7625 moved_orig_loop_num[dloop->orig_loop_num]++;
7626 dloop->orig_loop_num = (*larray)[dloop->orig_loop_num]->num;
7627 }
7628 else
7629 {
7630 moved_orig_loop_num[dloop->orig_loop_num] = -1;
7631 dloop->orig_loop_num = 0;
7632 }
7633 }
7634 pop_cfun ();
7635
7636 if (moved_orig_loop_num)
7637 {
7638 FOR_EACH_VEC_ELT (bbs, i, bb)
7639 {
7640 gimple *g = find_loop_dist_alias (bb);
7641 if (g == NULL)
7642 continue;
7643
7644 int orig_loop_num = tree_to_shwi (gimple_call_arg (g, 0));
7645 gcc_assert (orig_loop_num
7646 && (unsigned) orig_loop_num < vec_safe_length (larray));
7647 if (moved_orig_loop_num[orig_loop_num] == 2)
7648 {
7649 /* If we have moved both loops with this orig_loop_num into
7650 dest_cfun and the LOOP_DIST_ALIAS call is being moved there
7651 too, update the first argument. */
7652 gcc_assert ((*larray)[dloop->orig_loop_num] != NULL
7653 && (get_loop (saved_cfun, dloop->orig_loop_num)
7654 == NULL));
7655 tree t = build_int_cst (integer_type_node,
7656 (*larray)[dloop->orig_loop_num]->num);
7657 gimple_call_set_arg (g, 0, t);
7658 update_stmt (g);
7659 /* Make sure the following loop will not update it. */
7660 moved_orig_loop_num[orig_loop_num] = 0;
7661 }
7662 else
7663 /* Otherwise at least one of the loops stayed in saved_cfun.
7664 Remove the LOOP_DIST_ALIAS call. */
7665 fold_loop_internal_call (g, gimple_call_arg (g, 1));
7666 }
7667 FOR_EACH_BB_FN (bb, saved_cfun)
7668 {
7669 gimple *g = find_loop_dist_alias (bb);
7670 if (g == NULL)
7671 continue;
7672 int orig_loop_num = tree_to_shwi (gimple_call_arg (g, 0));
7673 gcc_assert (orig_loop_num
7674 && (unsigned) orig_loop_num < vec_safe_length (larray));
7675 if (moved_orig_loop_num[orig_loop_num])
7676 /* LOOP_DIST_ALIAS call remained in saved_cfun, if at least one
7677 of the corresponding loops was moved, remove it. */
7678 fold_loop_internal_call (g, gimple_call_arg (g, 1));
7679 }
7680 XDELETEVEC (moved_orig_loop_num);
7681 }
7682 ggc_free (larray);
7683
7684 /* Move blocks from BBS into DEST_CFUN. */
7685 gcc_assert (bbs.length () >= 2);
7686 after = dest_cfun->cfg->x_entry_block_ptr;
7687 hash_map<tree, tree> vars_map;
7688
7689 memset (&d, 0, sizeof (d));
7690 d.orig_block = orig_block;
7691 d.new_block = DECL_INITIAL (dest_cfun->decl);
7692 d.from_context = cfun->decl;
7693 d.to_context = dest_cfun->decl;
7694 d.vars_map = &vars_map;
7695 d.new_label_map = new_label_map;
7696 d.eh_map = eh_map;
7697 d.remap_decls_p = true;
7698
7699 if (gimple_in_ssa_p (cfun))
7700 for (tree arg = DECL_ARGUMENTS (d.to_context); arg; arg = DECL_CHAIN (arg))
7701 {
7702 tree narg = make_ssa_name_fn (dest_cfun, arg, gimple_build_nop ());
7703 set_ssa_default_def (dest_cfun, arg, narg);
7704 vars_map.put (arg, narg);
7705 }
7706
7707 FOR_EACH_VEC_ELT (bbs, i, bb)
7708 {
7709 /* No need to update edge counts on the last block. It has
7710 already been updated earlier when we detached the region from
7711 the original CFG. */
7712 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
7713 after = bb;
7714 }
7715
7716 loop->aux = NULL;
7717 loop0->aux = NULL;
7718 /* Loop sizes are no longer correct, fix them up. */
7719 loop->num_nodes -= num_nodes;
7720 for (struct loop *outer = loop_outer (loop);
7721 outer; outer = loop_outer (outer))
7722 outer->num_nodes -= num_nodes;
7723 loop0->num_nodes -= bbs.length () - num_nodes;
7724
7725 if (saved_cfun->has_simduid_loops || saved_cfun->has_force_vectorize_loops)
7726 {
7727 struct loop *aloop;
7728 for (i = 0; vec_safe_iterate (loops->larray, i, &aloop); i++)
7729 if (aloop != NULL)
7730 {
7731 if (aloop->simduid)
7732 {
7733 replace_by_duplicate_decl (&aloop->simduid, d.vars_map,
7734 d.to_context);
7735 dest_cfun->has_simduid_loops = true;
7736 }
7737 if (aloop->force_vectorize)
7738 dest_cfun->has_force_vectorize_loops = true;
7739 }
7740 }
7741
7742 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
7743 if (orig_block)
7744 {
7745 tree block;
7746 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7747 == NULL_TREE);
7748 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7749 = BLOCK_SUBBLOCKS (orig_block);
7750 for (block = BLOCK_SUBBLOCKS (orig_block);
7751 block; block = BLOCK_CHAIN (block))
7752 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
7753 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
7754 }
7755
7756 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
7757 &vars_map, dest_cfun->decl);
7758
7759 if (new_label_map)
7760 htab_delete (new_label_map);
7761 if (eh_map)
7762 delete eh_map;
7763
7764 if (gimple_in_ssa_p (cfun))
7765 {
7766 /* We need to release ssa-names in a defined order, so first find them,
7767 and then iterate in ascending version order. */
7768 bitmap release_names = BITMAP_ALLOC (NULL);
7769 vars_map.traverse<void *, gather_ssa_name_hash_map_from> (release_names);
7770 bitmap_iterator bi;
7771 unsigned i;
7772 EXECUTE_IF_SET_IN_BITMAP (release_names, 0, i, bi)
7773 release_ssa_name (ssa_name (i));
7774 BITMAP_FREE (release_names);
7775 }
7776
7777 /* Rewire the entry and exit blocks. The successor to the entry
7778 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
7779 the child function. Similarly, the predecessor of DEST_FN's
7780 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
7781 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
7782 various CFG manipulation function get to the right CFG.
7783
7784 FIXME, this is silly. The CFG ought to become a parameter to
7785 these helpers. */
7786 push_cfun (dest_cfun);
7787 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = entry_bb->count;
7788 make_single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), entry_bb, EDGE_FALLTHRU);
7789 if (exit_bb)
7790 {
7791 make_single_succ_edge (exit_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
7792 EXIT_BLOCK_PTR_FOR_FN (cfun)->count = exit_bb->count;
7793 }
7794 else
7795 EXIT_BLOCK_PTR_FOR_FN (cfun)->count = profile_count::zero ();
7796 pop_cfun ();
7797
7798 /* Back in the original function, the SESE region has disappeared,
7799 create a new basic block in its place. */
7800 bb = create_empty_bb (entry_pred[0]);
7801 if (current_loops)
7802 add_bb_to_loop (bb, loop);
7803 for (i = 0; i < num_entry_edges; i++)
7804 {
7805 e = make_edge (entry_pred[i], bb, entry_flag[i]);
7806 e->probability = entry_prob[i];
7807 }
7808
7809 for (i = 0; i < num_exit_edges; i++)
7810 {
7811 e = make_edge (bb, exit_succ[i], exit_flag[i]);
7812 e->probability = exit_prob[i];
7813 }
7814
7815 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
7816 FOR_EACH_VEC_ELT (dom_bbs, i, abb)
7817 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
7818 dom_bbs.release ();
7819
7820 if (exit_bb)
7821 {
7822 free (exit_prob);
7823 free (exit_flag);
7824 free (exit_succ);
7825 }
7826 free (entry_prob);
7827 free (entry_flag);
7828 free (entry_pred);
7829 bbs.release ();
7830
7831 return bb;
7832 }
7833
7834 /* Dump default def DEF to file FILE using FLAGS and indentation
7835 SPC. */
7836
7837 static void
7838 dump_default_def (FILE *file, tree def, int spc, dump_flags_t flags)
7839 {
7840 for (int i = 0; i < spc; ++i)
7841 fprintf (file, " ");
7842 dump_ssaname_info_to_file (file, def, spc);
7843
7844 print_generic_expr (file, TREE_TYPE (def), flags);
7845 fprintf (file, " ");
7846 print_generic_expr (file, def, flags);
7847 fprintf (file, " = ");
7848 print_generic_expr (file, SSA_NAME_VAR (def), flags);
7849 fprintf (file, ";\n");
7850 }
7851
7852 /* Print no_sanitize attribute to FILE for a given attribute VALUE. */
7853
7854 static void
7855 print_no_sanitize_attr_value (FILE *file, tree value)
7856 {
7857 unsigned int flags = tree_to_uhwi (value);
7858 bool first = true;
7859 for (int i = 0; sanitizer_opts[i].name != NULL; ++i)
7860 {
7861 if ((sanitizer_opts[i].flag & flags) == sanitizer_opts[i].flag)
7862 {
7863 if (!first)
7864 fprintf (file, " | ");
7865 fprintf (file, "%s", sanitizer_opts[i].name);
7866 first = false;
7867 }
7868 }
7869 }
7870
7871 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
7872 */
7873
7874 void
7875 dump_function_to_file (tree fndecl, FILE *file, dump_flags_t flags)
7876 {
7877 tree arg, var, old_current_fndecl = current_function_decl;
7878 struct function *dsf;
7879 bool ignore_topmost_bind = false, any_var = false;
7880 basic_block bb;
7881 tree chain;
7882 bool tmclone = (TREE_CODE (fndecl) == FUNCTION_DECL
7883 && decl_is_tm_clone (fndecl));
7884 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
7885
7886 if (DECL_ATTRIBUTES (fndecl) != NULL_TREE)
7887 {
7888 fprintf (file, "__attribute__((");
7889
7890 bool first = true;
7891 tree chain;
7892 for (chain = DECL_ATTRIBUTES (fndecl); chain;
7893 first = false, chain = TREE_CHAIN (chain))
7894 {
7895 if (!first)
7896 fprintf (file, ", ");
7897
7898 tree name = get_attribute_name (chain);
7899 print_generic_expr (file, name, dump_flags);
7900 if (TREE_VALUE (chain) != NULL_TREE)
7901 {
7902 fprintf (file, " (");
7903
7904 if (strstr (IDENTIFIER_POINTER (name), "no_sanitize"))
7905 print_no_sanitize_attr_value (file, TREE_VALUE (chain));
7906 else
7907 print_generic_expr (file, TREE_VALUE (chain), dump_flags);
7908 fprintf (file, ")");
7909 }
7910 }
7911
7912 fprintf (file, "))\n");
7913 }
7914
7915 current_function_decl = fndecl;
7916 if (flags & TDF_GIMPLE)
7917 {
7918 static bool hotness_bb_param_printed = false;
7919 if (profile_info != NULL
7920 && !hotness_bb_param_printed)
7921 {
7922 hotness_bb_param_printed = true;
7923 fprintf (file,
7924 "/* --param=gimple-fe-computed-hot-bb-threshold=%" PRId64
7925 " */\n", get_hot_bb_threshold ());
7926 }
7927
7928 print_generic_expr (file, TREE_TYPE (TREE_TYPE (fndecl)),
7929 dump_flags | TDF_SLIM);
7930 fprintf (file, " __GIMPLE (%s",
7931 (fun->curr_properties & PROP_ssa) ? "ssa"
7932 : (fun->curr_properties & PROP_cfg) ? "cfg"
7933 : "");
7934
7935 if (cfun->cfg)
7936 {
7937 basic_block bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
7938 if (bb->count.initialized_p ())
7939 fprintf (file, ",%s(%d)",
7940 profile_quality_as_string (bb->count.quality ()),
7941 bb->count.value ());
7942 fprintf (file, ")\n%s (", function_name (fun));
7943 }
7944 }
7945 else
7946 fprintf (file, "%s %s(", function_name (fun), tmclone ? "[tm-clone] " : "");
7947
7948 arg = DECL_ARGUMENTS (fndecl);
7949 while (arg)
7950 {
7951 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
7952 fprintf (file, " ");
7953 print_generic_expr (file, arg, dump_flags);
7954 if (DECL_CHAIN (arg))
7955 fprintf (file, ", ");
7956 arg = DECL_CHAIN (arg);
7957 }
7958 fprintf (file, ")\n");
7959
7960 dsf = DECL_STRUCT_FUNCTION (fndecl);
7961 if (dsf && (flags & TDF_EH))
7962 dump_eh_tree (file, dsf);
7963
7964 if (flags & TDF_RAW && !gimple_has_body_p (fndecl))
7965 {
7966 dump_node (fndecl, TDF_SLIM | flags, file);
7967 current_function_decl = old_current_fndecl;
7968 return;
7969 }
7970
7971 /* When GIMPLE is lowered, the variables are no longer available in
7972 BIND_EXPRs, so display them separately. */
7973 if (fun && fun->decl == fndecl && (fun->curr_properties & PROP_gimple_lcf))
7974 {
7975 unsigned ix;
7976 ignore_topmost_bind = true;
7977
7978 fprintf (file, "{\n");
7979 if (gimple_in_ssa_p (fun)
7980 && (flags & TDF_ALIAS))
7981 {
7982 for (arg = DECL_ARGUMENTS (fndecl); arg != NULL;
7983 arg = DECL_CHAIN (arg))
7984 {
7985 tree def = ssa_default_def (fun, arg);
7986 if (def)
7987 dump_default_def (file, def, 2, flags);
7988 }
7989
7990 tree res = DECL_RESULT (fun->decl);
7991 if (res != NULL_TREE
7992 && DECL_BY_REFERENCE (res))
7993 {
7994 tree def = ssa_default_def (fun, res);
7995 if (def)
7996 dump_default_def (file, def, 2, flags);
7997 }
7998
7999 tree static_chain = fun->static_chain_decl;
8000 if (static_chain != NULL_TREE)
8001 {
8002 tree def = ssa_default_def (fun, static_chain);
8003 if (def)
8004 dump_default_def (file, def, 2, flags);
8005 }
8006 }
8007
8008 if (!vec_safe_is_empty (fun->local_decls))
8009 FOR_EACH_LOCAL_DECL (fun, ix, var)
8010 {
8011 print_generic_decl (file, var, flags);
8012 fprintf (file, "\n");
8013
8014 any_var = true;
8015 }
8016
8017 tree name;
8018
8019 if (gimple_in_ssa_p (cfun))
8020 FOR_EACH_SSA_NAME (ix, name, cfun)
8021 {
8022 if (!SSA_NAME_VAR (name))
8023 {
8024 fprintf (file, " ");
8025 print_generic_expr (file, TREE_TYPE (name), flags);
8026 fprintf (file, " ");
8027 print_generic_expr (file, name, flags);
8028 fprintf (file, ";\n");
8029
8030 any_var = true;
8031 }
8032 }
8033 }
8034
8035 if (fun && fun->decl == fndecl
8036 && fun->cfg
8037 && basic_block_info_for_fn (fun))
8038 {
8039 /* If the CFG has been built, emit a CFG-based dump. */
8040 if (!ignore_topmost_bind)
8041 fprintf (file, "{\n");
8042
8043 if (any_var && n_basic_blocks_for_fn (fun))
8044 fprintf (file, "\n");
8045
8046 FOR_EACH_BB_FN (bb, fun)
8047 dump_bb (file, bb, 2, flags);
8048
8049 fprintf (file, "}\n");
8050 }
8051 else if (fun->curr_properties & PROP_gimple_any)
8052 {
8053 /* The function is now in GIMPLE form but the CFG has not been
8054 built yet. Emit the single sequence of GIMPLE statements
8055 that make up its body. */
8056 gimple_seq body = gimple_body (fndecl);
8057
8058 if (gimple_seq_first_stmt (body)
8059 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
8060 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
8061 print_gimple_seq (file, body, 0, flags);
8062 else
8063 {
8064 if (!ignore_topmost_bind)
8065 fprintf (file, "{\n");
8066
8067 if (any_var)
8068 fprintf (file, "\n");
8069
8070 print_gimple_seq (file, body, 2, flags);
8071 fprintf (file, "}\n");
8072 }
8073 }
8074 else
8075 {
8076 int indent;
8077
8078 /* Make a tree based dump. */
8079 chain = DECL_SAVED_TREE (fndecl);
8080 if (chain && TREE_CODE (chain) == BIND_EXPR)
8081 {
8082 if (ignore_topmost_bind)
8083 {
8084 chain = BIND_EXPR_BODY (chain);
8085 indent = 2;
8086 }
8087 else
8088 indent = 0;
8089 }
8090 else
8091 {
8092 if (!ignore_topmost_bind)
8093 {
8094 fprintf (file, "{\n");
8095 /* No topmost bind, pretend it's ignored for later. */
8096 ignore_topmost_bind = true;
8097 }
8098 indent = 2;
8099 }
8100
8101 if (any_var)
8102 fprintf (file, "\n");
8103
8104 print_generic_stmt_indented (file, chain, flags, indent);
8105 if (ignore_topmost_bind)
8106 fprintf (file, "}\n");
8107 }
8108
8109 if (flags & TDF_ENUMERATE_LOCALS)
8110 dump_enumerated_decls (file, flags);
8111 fprintf (file, "\n\n");
8112
8113 current_function_decl = old_current_fndecl;
8114 }
8115
8116 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
8117
8118 DEBUG_FUNCTION void
8119 debug_function (tree fn, dump_flags_t flags)
8120 {
8121 dump_function_to_file (fn, stderr, flags);
8122 }
8123
8124
8125 /* Print on FILE the indexes for the predecessors of basic_block BB. */
8126
8127 static void
8128 print_pred_bbs (FILE *file, basic_block bb)
8129 {
8130 edge e;
8131 edge_iterator ei;
8132
8133 FOR_EACH_EDGE (e, ei, bb->preds)
8134 fprintf (file, "bb_%d ", e->src->index);
8135 }
8136
8137
8138 /* Print on FILE the indexes for the successors of basic_block BB. */
8139
8140 static void
8141 print_succ_bbs (FILE *file, basic_block bb)
8142 {
8143 edge e;
8144 edge_iterator ei;
8145
8146 FOR_EACH_EDGE (e, ei, bb->succs)
8147 fprintf (file, "bb_%d ", e->dest->index);
8148 }
8149
8150 /* Print to FILE the basic block BB following the VERBOSITY level. */
8151
8152 void
8153 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
8154 {
8155 char *s_indent = (char *) alloca ((size_t) indent + 1);
8156 memset ((void *) s_indent, ' ', (size_t) indent);
8157 s_indent[indent] = '\0';
8158
8159 /* Print basic_block's header. */
8160 if (verbosity >= 2)
8161 {
8162 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
8163 print_pred_bbs (file, bb);
8164 fprintf (file, "}, succs = {");
8165 print_succ_bbs (file, bb);
8166 fprintf (file, "})\n");
8167 }
8168
8169 /* Print basic_block's body. */
8170 if (verbosity >= 3)
8171 {
8172 fprintf (file, "%s {\n", s_indent);
8173 dump_bb (file, bb, indent + 4, TDF_VOPS|TDF_MEMSYMS);
8174 fprintf (file, "%s }\n", s_indent);
8175 }
8176 }
8177
8178 static void print_loop_and_siblings (FILE *, struct loop *, int, int);
8179
8180 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
8181 VERBOSITY level this outputs the contents of the loop, or just its
8182 structure. */
8183
8184 static void
8185 print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
8186 {
8187 char *s_indent;
8188 basic_block bb;
8189
8190 if (loop == NULL)
8191 return;
8192
8193 s_indent = (char *) alloca ((size_t) indent + 1);
8194 memset ((void *) s_indent, ' ', (size_t) indent);
8195 s_indent[indent] = '\0';
8196
8197 /* Print loop's header. */
8198 fprintf (file, "%sloop_%d (", s_indent, loop->num);
8199 if (loop->header)
8200 fprintf (file, "header = %d", loop->header->index);
8201 else
8202 {
8203 fprintf (file, "deleted)\n");
8204 return;
8205 }
8206 if (loop->latch)
8207 fprintf (file, ", latch = %d", loop->latch->index);
8208 else
8209 fprintf (file, ", multiple latches");
8210 fprintf (file, ", niter = ");
8211 print_generic_expr (file, loop->nb_iterations);
8212
8213 if (loop->any_upper_bound)
8214 {
8215 fprintf (file, ", upper_bound = ");
8216 print_decu (loop->nb_iterations_upper_bound, file);
8217 }
8218 if (loop->any_likely_upper_bound)
8219 {
8220 fprintf (file, ", likely_upper_bound = ");
8221 print_decu (loop->nb_iterations_likely_upper_bound, file);
8222 }
8223
8224 if (loop->any_estimate)
8225 {
8226 fprintf (file, ", estimate = ");
8227 print_decu (loop->nb_iterations_estimate, file);
8228 }
8229 if (loop->unroll)
8230 fprintf (file, ", unroll = %d", loop->unroll);
8231 fprintf (file, ")\n");
8232
8233 /* Print loop's body. */
8234 if (verbosity >= 1)
8235 {
8236 fprintf (file, "%s{\n", s_indent);
8237 FOR_EACH_BB_FN (bb, cfun)
8238 if (bb->loop_father == loop)
8239 print_loops_bb (file, bb, indent, verbosity);
8240
8241 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
8242 fprintf (file, "%s}\n", s_indent);
8243 }
8244 }
8245
8246 /* Print the LOOP and its sibling loops on FILE, indented INDENT
8247 spaces. Following VERBOSITY level this outputs the contents of the
8248 loop, or just its structure. */
8249
8250 static void
8251 print_loop_and_siblings (FILE *file, struct loop *loop, int indent,
8252 int verbosity)
8253 {
8254 if (loop == NULL)
8255 return;
8256
8257 print_loop (file, loop, indent, verbosity);
8258 print_loop_and_siblings (file, loop->next, indent, verbosity);
8259 }
8260
8261 /* Follow a CFG edge from the entry point of the program, and on entry
8262 of a loop, pretty print the loop structure on FILE. */
8263
8264 void
8265 print_loops (FILE *file, int verbosity)
8266 {
8267 basic_block bb;
8268
8269 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
8270 fprintf (file, "\nLoops in function: %s\n", current_function_name ());
8271 if (bb && bb->loop_father)
8272 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
8273 }
8274
8275 /* Dump a loop. */
8276
8277 DEBUG_FUNCTION void
8278 debug (struct loop &ref)
8279 {
8280 print_loop (stderr, &ref, 0, /*verbosity*/0);
8281 }
8282
8283 DEBUG_FUNCTION void
8284 debug (struct loop *ptr)
8285 {
8286 if (ptr)
8287 debug (*ptr);
8288 else
8289 fprintf (stderr, "<nil>\n");
8290 }
8291
8292 /* Dump a loop verbosely. */
8293
8294 DEBUG_FUNCTION void
8295 debug_verbose (struct loop &ref)
8296 {
8297 print_loop (stderr, &ref, 0, /*verbosity*/3);
8298 }
8299
8300 DEBUG_FUNCTION void
8301 debug_verbose (struct loop *ptr)
8302 {
8303 if (ptr)
8304 debug (*ptr);
8305 else
8306 fprintf (stderr, "<nil>\n");
8307 }
8308
8309
8310 /* Debugging loops structure at tree level, at some VERBOSITY level. */
8311
8312 DEBUG_FUNCTION void
8313 debug_loops (int verbosity)
8314 {
8315 print_loops (stderr, verbosity);
8316 }
8317
8318 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
8319
8320 DEBUG_FUNCTION void
8321 debug_loop (struct loop *loop, int verbosity)
8322 {
8323 print_loop (stderr, loop, 0, verbosity);
8324 }
8325
8326 /* Print on stderr the code of loop number NUM, at some VERBOSITY
8327 level. */
8328
8329 DEBUG_FUNCTION void
8330 debug_loop_num (unsigned num, int verbosity)
8331 {
8332 debug_loop (get_loop (cfun, num), verbosity);
8333 }
8334
8335 /* Return true if BB ends with a call, possibly followed by some
8336 instructions that must stay with the call. Return false,
8337 otherwise. */
8338
8339 static bool
8340 gimple_block_ends_with_call_p (basic_block bb)
8341 {
8342 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
8343 return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
8344 }
8345
8346
8347 /* Return true if BB ends with a conditional branch. Return false,
8348 otherwise. */
8349
8350 static bool
8351 gimple_block_ends_with_condjump_p (const_basic_block bb)
8352 {
8353 gimple *stmt = last_stmt (CONST_CAST_BB (bb));
8354 return (stmt && gimple_code (stmt) == GIMPLE_COND);
8355 }
8356
8357
8358 /* Return true if statement T may terminate execution of BB in ways not
8359 explicitly represtented in the CFG. */
8360
8361 bool
8362 stmt_can_terminate_bb_p (gimple *t)
8363 {
8364 tree fndecl = NULL_TREE;
8365 int call_flags = 0;
8366
8367 /* Eh exception not handled internally terminates execution of the whole
8368 function. */
8369 if (stmt_can_throw_external (cfun, t))
8370 return true;
8371
8372 /* NORETURN and LONGJMP calls already have an edge to exit.
8373 CONST and PURE calls do not need one.
8374 We don't currently check for CONST and PURE here, although
8375 it would be a good idea, because those attributes are
8376 figured out from the RTL in mark_constant_function, and
8377 the counter incrementation code from -fprofile-arcs
8378 leads to different results from -fbranch-probabilities. */
8379 if (is_gimple_call (t))
8380 {
8381 fndecl = gimple_call_fndecl (t);
8382 call_flags = gimple_call_flags (t);
8383 }
8384
8385 if (is_gimple_call (t)
8386 && fndecl
8387 && fndecl_built_in_p (fndecl)
8388 && (call_flags & ECF_NOTHROW)
8389 && !(call_flags & ECF_RETURNS_TWICE)
8390 /* fork() doesn't really return twice, but the effect of
8391 wrapping it in __gcov_fork() which calls __gcov_flush()
8392 and clears the counters before forking has the same
8393 effect as returning twice. Force a fake edge. */
8394 && !fndecl_built_in_p (fndecl, BUILT_IN_FORK))
8395 return false;
8396
8397 if (is_gimple_call (t))
8398 {
8399 edge_iterator ei;
8400 edge e;
8401 basic_block bb;
8402
8403 if (call_flags & (ECF_PURE | ECF_CONST)
8404 && !(call_flags & ECF_LOOPING_CONST_OR_PURE))
8405 return false;
8406
8407 /* Function call may do longjmp, terminate program or do other things.
8408 Special case noreturn that have non-abnormal edges out as in this case
8409 the fact is sufficiently represented by lack of edges out of T. */
8410 if (!(call_flags & ECF_NORETURN))
8411 return true;
8412
8413 bb = gimple_bb (t);
8414 FOR_EACH_EDGE (e, ei, bb->succs)
8415 if ((e->flags & EDGE_FAKE) == 0)
8416 return true;
8417 }
8418
8419 if (gasm *asm_stmt = dyn_cast <gasm *> (t))
8420 if (gimple_asm_volatile_p (asm_stmt) || gimple_asm_input_p (asm_stmt))
8421 return true;
8422
8423 return false;
8424 }
8425
8426
8427 /* Add fake edges to the function exit for any non constant and non
8428 noreturn calls (or noreturn calls with EH/abnormal edges),
8429 volatile inline assembly in the bitmap of blocks specified by BLOCKS
8430 or to the whole CFG if BLOCKS is zero. Return the number of blocks
8431 that were split.
8432
8433 The goal is to expose cases in which entering a basic block does
8434 not imply that all subsequent instructions must be executed. */
8435
8436 static int
8437 gimple_flow_call_edges_add (sbitmap blocks)
8438 {
8439 int i;
8440 int blocks_split = 0;
8441 int last_bb = last_basic_block_for_fn (cfun);
8442 bool check_last_block = false;
8443
8444 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
8445 return 0;
8446
8447 if (! blocks)
8448 check_last_block = true;
8449 else
8450 check_last_block = bitmap_bit_p (blocks,
8451 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
8452
8453 /* In the last basic block, before epilogue generation, there will be
8454 a fallthru edge to EXIT. Special care is required if the last insn
8455 of the last basic block is a call because make_edge folds duplicate
8456 edges, which would result in the fallthru edge also being marked
8457 fake, which would result in the fallthru edge being removed by
8458 remove_fake_edges, which would result in an invalid CFG.
8459
8460 Moreover, we can't elide the outgoing fake edge, since the block
8461 profiler needs to take this into account in order to solve the minimal
8462 spanning tree in the case that the call doesn't return.
8463
8464 Handle this by adding a dummy instruction in a new last basic block. */
8465 if (check_last_block)
8466 {
8467 basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
8468 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
8469 gimple *t = NULL;
8470
8471 if (!gsi_end_p (gsi))
8472 t = gsi_stmt (gsi);
8473
8474 if (t && stmt_can_terminate_bb_p (t))
8475 {
8476 edge e;
8477
8478 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
8479 if (e)
8480 {
8481 gsi_insert_on_edge (e, gimple_build_nop ());
8482 gsi_commit_edge_inserts ();
8483 }
8484 }
8485 }
8486
8487 /* Now add fake edges to the function exit for any non constant
8488 calls since there is no way that we can determine if they will
8489 return or not... */
8490 for (i = 0; i < last_bb; i++)
8491 {
8492 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8493 gimple_stmt_iterator gsi;
8494 gimple *stmt, *last_stmt;
8495
8496 if (!bb)
8497 continue;
8498
8499 if (blocks && !bitmap_bit_p (blocks, i))
8500 continue;
8501
8502 gsi = gsi_last_nondebug_bb (bb);
8503 if (!gsi_end_p (gsi))
8504 {
8505 last_stmt = gsi_stmt (gsi);
8506 do
8507 {
8508 stmt = gsi_stmt (gsi);
8509 if (stmt_can_terminate_bb_p (stmt))
8510 {
8511 edge e;
8512
8513 /* The handling above of the final block before the
8514 epilogue should be enough to verify that there is
8515 no edge to the exit block in CFG already.
8516 Calling make_edge in such case would cause us to
8517 mark that edge as fake and remove it later. */
8518 if (flag_checking && stmt == last_stmt)
8519 {
8520 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
8521 gcc_assert (e == NULL);
8522 }
8523
8524 /* Note that the following may create a new basic block
8525 and renumber the existing basic blocks. */
8526 if (stmt != last_stmt)
8527 {
8528 e = split_block (bb, stmt);
8529 if (e)
8530 blocks_split++;
8531 }
8532 e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
8533 e->probability = profile_probability::guessed_never ();
8534 }
8535 gsi_prev (&gsi);
8536 }
8537 while (!gsi_end_p (gsi));
8538 }
8539 }
8540
8541 if (blocks_split)
8542 checking_verify_flow_info ();
8543
8544 return blocks_split;
8545 }
8546
8547 /* Removes edge E and all the blocks dominated by it, and updates dominance
8548 information. The IL in E->src needs to be updated separately.
8549 If dominance info is not available, only the edge E is removed.*/
8550
8551 void
8552 remove_edge_and_dominated_blocks (edge e)
8553 {
8554 vec<basic_block> bbs_to_remove = vNULL;
8555 vec<basic_block> bbs_to_fix_dom = vNULL;
8556 edge f;
8557 edge_iterator ei;
8558 bool none_removed = false;
8559 unsigned i;
8560 basic_block bb, dbb;
8561 bitmap_iterator bi;
8562
8563 /* If we are removing a path inside a non-root loop that may change
8564 loop ownership of blocks or remove loops. Mark loops for fixup. */
8565 if (current_loops
8566 && loop_outer (e->src->loop_father) != NULL
8567 && e->src->loop_father == e->dest->loop_father)
8568 loops_state_set (LOOPS_NEED_FIXUP);
8569
8570 if (!dom_info_available_p (CDI_DOMINATORS))
8571 {
8572 remove_edge (e);
8573 return;
8574 }
8575
8576 /* No updating is needed for edges to exit. */
8577 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
8578 {
8579 if (cfgcleanup_altered_bbs)
8580 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
8581 remove_edge (e);
8582 return;
8583 }
8584
8585 /* First, we find the basic blocks to remove. If E->dest has a predecessor
8586 that is not dominated by E->dest, then this set is empty. Otherwise,
8587 all the basic blocks dominated by E->dest are removed.
8588
8589 Also, to DF_IDOM we store the immediate dominators of the blocks in
8590 the dominance frontier of E (i.e., of the successors of the
8591 removed blocks, if there are any, and of E->dest otherwise). */
8592 FOR_EACH_EDGE (f, ei, e->dest->preds)
8593 {
8594 if (f == e)
8595 continue;
8596
8597 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
8598 {
8599 none_removed = true;
8600 break;
8601 }
8602 }
8603
8604 auto_bitmap df, df_idom;
8605 if (none_removed)
8606 bitmap_set_bit (df_idom,
8607 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
8608 else
8609 {
8610 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
8611 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
8612 {
8613 FOR_EACH_EDGE (f, ei, bb->succs)
8614 {
8615 if (f->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
8616 bitmap_set_bit (df, f->dest->index);
8617 }
8618 }
8619 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
8620 bitmap_clear_bit (df, bb->index);
8621
8622 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
8623 {
8624 bb = BASIC_BLOCK_FOR_FN (cfun, i);
8625 bitmap_set_bit (df_idom,
8626 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
8627 }
8628 }
8629
8630 if (cfgcleanup_altered_bbs)
8631 {
8632 /* Record the set of the altered basic blocks. */
8633 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
8634 bitmap_ior_into (cfgcleanup_altered_bbs, df);
8635 }
8636
8637 /* Remove E and the cancelled blocks. */
8638 if (none_removed)
8639 remove_edge (e);
8640 else
8641 {
8642 /* Walk backwards so as to get a chance to substitute all
8643 released DEFs into debug stmts. See
8644 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
8645 details. */
8646 for (i = bbs_to_remove.length (); i-- > 0; )
8647 delete_basic_block (bbs_to_remove[i]);
8648 }
8649
8650 /* Update the dominance information. The immediate dominator may change only
8651 for blocks whose immediate dominator belongs to DF_IDOM:
8652
8653 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
8654 removal. Let Z the arbitrary block such that idom(Z) = Y and
8655 Z dominates X after the removal. Before removal, there exists a path P
8656 from Y to X that avoids Z. Let F be the last edge on P that is
8657 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
8658 dominates W, and because of P, Z does not dominate W), and W belongs to
8659 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
8660 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
8661 {
8662 bb = BASIC_BLOCK_FOR_FN (cfun, i);
8663 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
8664 dbb;
8665 dbb = next_dom_son (CDI_DOMINATORS, dbb))
8666 bbs_to_fix_dom.safe_push (dbb);
8667 }
8668
8669 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
8670
8671 bbs_to_remove.release ();
8672 bbs_to_fix_dom.release ();
8673 }
8674
8675 /* Purge dead EH edges from basic block BB. */
8676
8677 bool
8678 gimple_purge_dead_eh_edges (basic_block bb)
8679 {
8680 bool changed = false;
8681 edge e;
8682 edge_iterator ei;
8683 gimple *stmt = last_stmt (bb);
8684
8685 if (stmt && stmt_can_throw_internal (cfun, stmt))
8686 return false;
8687
8688 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8689 {
8690 if (e->flags & EDGE_EH)
8691 {
8692 remove_edge_and_dominated_blocks (e);
8693 changed = true;
8694 }
8695 else
8696 ei_next (&ei);
8697 }
8698
8699 return changed;
8700 }
8701
8702 /* Purge dead EH edges from basic block listed in BLOCKS. */
8703
8704 bool
8705 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
8706 {
8707 bool changed = false;
8708 unsigned i;
8709 bitmap_iterator bi;
8710
8711 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8712 {
8713 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8714
8715 /* Earlier gimple_purge_dead_eh_edges could have removed
8716 this basic block already. */
8717 gcc_assert (bb || changed);
8718 if (bb != NULL)
8719 changed |= gimple_purge_dead_eh_edges (bb);
8720 }
8721
8722 return changed;
8723 }
8724
8725 /* Purge dead abnormal call edges from basic block BB. */
8726
8727 bool
8728 gimple_purge_dead_abnormal_call_edges (basic_block bb)
8729 {
8730 bool changed = false;
8731 edge e;
8732 edge_iterator ei;
8733 gimple *stmt = last_stmt (bb);
8734
8735 if (!cfun->has_nonlocal_label
8736 && !cfun->calls_setjmp)
8737 return false;
8738
8739 if (stmt && stmt_can_make_abnormal_goto (stmt))
8740 return false;
8741
8742 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8743 {
8744 if (e->flags & EDGE_ABNORMAL)
8745 {
8746 if (e->flags & EDGE_FALLTHRU)
8747 e->flags &= ~EDGE_ABNORMAL;
8748 else
8749 remove_edge_and_dominated_blocks (e);
8750 changed = true;
8751 }
8752 else
8753 ei_next (&ei);
8754 }
8755
8756 return changed;
8757 }
8758
8759 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
8760
8761 bool
8762 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks)
8763 {
8764 bool changed = false;
8765 unsigned i;
8766 bitmap_iterator bi;
8767
8768 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8769 {
8770 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8771
8772 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
8773 this basic block already. */
8774 gcc_assert (bb || changed);
8775 if (bb != NULL)
8776 changed |= gimple_purge_dead_abnormal_call_edges (bb);
8777 }
8778
8779 return changed;
8780 }
8781
8782 /* This function is called whenever a new edge is created or
8783 redirected. */
8784
8785 static void
8786 gimple_execute_on_growing_pred (edge e)
8787 {
8788 basic_block bb = e->dest;
8789
8790 if (!gimple_seq_empty_p (phi_nodes (bb)))
8791 reserve_phi_args_for_new_edge (bb);
8792 }
8793
8794 /* This function is called immediately before edge E is removed from
8795 the edge vector E->dest->preds. */
8796
8797 static void
8798 gimple_execute_on_shrinking_pred (edge e)
8799 {
8800 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
8801 remove_phi_args (e);
8802 }
8803
8804 /*---------------------------------------------------------------------------
8805 Helper functions for Loop versioning
8806 ---------------------------------------------------------------------------*/
8807
8808 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
8809 of 'first'. Both of them are dominated by 'new_head' basic block. When
8810 'new_head' was created by 'second's incoming edge it received phi arguments
8811 on the edge by split_edge(). Later, additional edge 'e' was created to
8812 connect 'new_head' and 'first'. Now this routine adds phi args on this
8813 additional edge 'e' that new_head to second edge received as part of edge
8814 splitting. */
8815
8816 static void
8817 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
8818 basic_block new_head, edge e)
8819 {
8820 gphi *phi1, *phi2;
8821 gphi_iterator psi1, psi2;
8822 tree def;
8823 edge e2 = find_edge (new_head, second);
8824
8825 /* Because NEW_HEAD has been created by splitting SECOND's incoming
8826 edge, we should always have an edge from NEW_HEAD to SECOND. */
8827 gcc_assert (e2 != NULL);
8828
8829 /* Browse all 'second' basic block phi nodes and add phi args to
8830 edge 'e' for 'first' head. PHI args are always in correct order. */
8831
8832 for (psi2 = gsi_start_phis (second),
8833 psi1 = gsi_start_phis (first);
8834 !gsi_end_p (psi2) && !gsi_end_p (psi1);
8835 gsi_next (&psi2), gsi_next (&psi1))
8836 {
8837 phi1 = psi1.phi ();
8838 phi2 = psi2.phi ();
8839 def = PHI_ARG_DEF (phi2, e2->dest_idx);
8840 add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
8841 }
8842 }
8843
8844
8845 /* Adds a if else statement to COND_BB with condition COND_EXPR.
8846 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
8847 the destination of the ELSE part. */
8848
8849 static void
8850 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
8851 basic_block second_head ATTRIBUTE_UNUSED,
8852 basic_block cond_bb, void *cond_e)
8853 {
8854 gimple_stmt_iterator gsi;
8855 gimple *new_cond_expr;
8856 tree cond_expr = (tree) cond_e;
8857 edge e0;
8858
8859 /* Build new conditional expr */
8860 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
8861 NULL_TREE, NULL_TREE);
8862
8863 /* Add new cond in cond_bb. */
8864 gsi = gsi_last_bb (cond_bb);
8865 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
8866
8867 /* Adjust edges appropriately to connect new head with first head
8868 as well as second head. */
8869 e0 = single_succ_edge (cond_bb);
8870 e0->flags &= ~EDGE_FALLTHRU;
8871 e0->flags |= EDGE_FALSE_VALUE;
8872 }
8873
8874
8875 /* Do book-keeping of basic block BB for the profile consistency checker.
8876 Store the counting in RECORD. */
8877 static void
8878 gimple_account_profile_record (basic_block bb,
8879 struct profile_record *record)
8880 {
8881 gimple_stmt_iterator i;
8882 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
8883 {
8884 record->size
8885 += estimate_num_insns (gsi_stmt (i), &eni_size_weights);
8886 if (bb->count.initialized_p ())
8887 record->time
8888 += estimate_num_insns (gsi_stmt (i),
8889 &eni_time_weights) * bb->count.to_gcov_type ();
8890 else if (profile_status_for_fn (cfun) == PROFILE_GUESSED)
8891 record->time
8892 += estimate_num_insns (gsi_stmt (i),
8893 &eni_time_weights) * bb->count.to_frequency (cfun);
8894 }
8895 }
8896
8897 struct cfg_hooks gimple_cfg_hooks = {
8898 "gimple",
8899 gimple_verify_flow_info,
8900 gimple_dump_bb, /* dump_bb */
8901 gimple_dump_bb_for_graph, /* dump_bb_for_graph */
8902 create_bb, /* create_basic_block */
8903 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
8904 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
8905 gimple_can_remove_branch_p, /* can_remove_branch_p */
8906 remove_bb, /* delete_basic_block */
8907 gimple_split_block, /* split_block */
8908 gimple_move_block_after, /* move_block_after */
8909 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
8910 gimple_merge_blocks, /* merge_blocks */
8911 gimple_predict_edge, /* predict_edge */
8912 gimple_predicted_by_p, /* predicted_by_p */
8913 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
8914 gimple_duplicate_bb, /* duplicate_block */
8915 gimple_split_edge, /* split_edge */
8916 gimple_make_forwarder_block, /* make_forward_block */
8917 NULL, /* tidy_fallthru_edge */
8918 NULL, /* force_nonfallthru */
8919 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
8920 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
8921 gimple_flow_call_edges_add, /* flow_call_edges_add */
8922 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
8923 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
8924 gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
8925 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
8926 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
8927 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
8928 flush_pending_stmts, /* flush_pending_stmts */
8929 gimple_empty_block_p, /* block_empty_p */
8930 gimple_split_block_before_cond_jump, /* split_block_before_cond_jump */
8931 gimple_account_profile_record,
8932 };
8933
8934
8935 /* Split all critical edges. Split some extra (not necessarily critical) edges
8936 if FOR_EDGE_INSERTION_P is true. */
8937
8938 unsigned int
8939 split_critical_edges (bool for_edge_insertion_p /* = false */)
8940 {
8941 basic_block bb;
8942 edge e;
8943 edge_iterator ei;
8944
8945 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
8946 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
8947 mappings around the calls to split_edge. */
8948 start_recording_case_labels ();
8949 FOR_ALL_BB_FN (bb, cfun)
8950 {
8951 FOR_EACH_EDGE (e, ei, bb->succs)
8952 {
8953 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
8954 split_edge (e);
8955 /* PRE inserts statements to edges and expects that
8956 since split_critical_edges was done beforehand, committing edge
8957 insertions will not split more edges. In addition to critical
8958 edges we must split edges that have multiple successors and
8959 end by control flow statements, such as RESX.
8960 Go ahead and split them too. This matches the logic in
8961 gimple_find_edge_insert_loc. */
8962 else if (for_edge_insertion_p
8963 && (!single_pred_p (e->dest)
8964 || !gimple_seq_empty_p (phi_nodes (e->dest))
8965 || e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
8966 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
8967 && !(e->flags & EDGE_ABNORMAL))
8968 {
8969 gimple_stmt_iterator gsi;
8970
8971 gsi = gsi_last_bb (e->src);
8972 if (!gsi_end_p (gsi)
8973 && stmt_ends_bb_p (gsi_stmt (gsi))
8974 && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
8975 && !gimple_call_builtin_p (gsi_stmt (gsi),
8976 BUILT_IN_RETURN)))
8977 split_edge (e);
8978 }
8979 }
8980 }
8981 end_recording_case_labels ();
8982 return 0;
8983 }
8984
8985 namespace {
8986
8987 const pass_data pass_data_split_crit_edges =
8988 {
8989 GIMPLE_PASS, /* type */
8990 "crited", /* name */
8991 OPTGROUP_NONE, /* optinfo_flags */
8992 TV_TREE_SPLIT_EDGES, /* tv_id */
8993 PROP_cfg, /* properties_required */
8994 PROP_no_crit_edges, /* properties_provided */
8995 0, /* properties_destroyed */
8996 0, /* todo_flags_start */
8997 0, /* todo_flags_finish */
8998 };
8999
9000 class pass_split_crit_edges : public gimple_opt_pass
9001 {
9002 public:
9003 pass_split_crit_edges (gcc::context *ctxt)
9004 : gimple_opt_pass (pass_data_split_crit_edges, ctxt)
9005 {}
9006
9007 /* opt_pass methods: */
9008 virtual unsigned int execute (function *) { return split_critical_edges (); }
9009
9010 opt_pass * clone () { return new pass_split_crit_edges (m_ctxt); }
9011 }; // class pass_split_crit_edges
9012
9013 } // anon namespace
9014
9015 gimple_opt_pass *
9016 make_pass_split_crit_edges (gcc::context *ctxt)
9017 {
9018 return new pass_split_crit_edges (ctxt);
9019 }
9020
9021
9022 /* Insert COND expression which is GIMPLE_COND after STMT
9023 in basic block BB with appropriate basic block split
9024 and creation of a new conditionally executed basic block.
9025 Update profile so the new bb is visited with probability PROB.
9026 Return created basic block. */
9027 basic_block
9028 insert_cond_bb (basic_block bb, gimple *stmt, gimple *cond,
9029 profile_probability prob)
9030 {
9031 edge fall = split_block (bb, stmt);
9032 gimple_stmt_iterator iter = gsi_last_bb (bb);
9033 basic_block new_bb;
9034
9035 /* Insert cond statement. */
9036 gcc_assert (gimple_code (cond) == GIMPLE_COND);
9037 if (gsi_end_p (iter))
9038 gsi_insert_before (&iter, cond, GSI_CONTINUE_LINKING);
9039 else
9040 gsi_insert_after (&iter, cond, GSI_CONTINUE_LINKING);
9041
9042 /* Create conditionally executed block. */
9043 new_bb = create_empty_bb (bb);
9044 edge e = make_edge (bb, new_bb, EDGE_TRUE_VALUE);
9045 e->probability = prob;
9046 new_bb->count = e->count ();
9047 make_single_succ_edge (new_bb, fall->dest, EDGE_FALLTHRU);
9048
9049 /* Fix edge for split bb. */
9050 fall->flags = EDGE_FALSE_VALUE;
9051 fall->probability -= e->probability;
9052
9053 /* Update dominance info. */
9054 if (dom_info_available_p (CDI_DOMINATORS))
9055 {
9056 set_immediate_dominator (CDI_DOMINATORS, new_bb, bb);
9057 set_immediate_dominator (CDI_DOMINATORS, fall->dest, bb);
9058 }
9059
9060 /* Update loop info. */
9061 if (current_loops)
9062 add_bb_to_loop (new_bb, bb->loop_father);
9063
9064 return new_bb;
9065 }
9066
9067 /* Build a ternary operation and gimplify it. Emit code before GSI.
9068 Return the gimple_val holding the result. */
9069
9070 tree
9071 gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
9072 tree type, tree a, tree b, tree c)
9073 {
9074 tree ret;
9075 location_t loc = gimple_location (gsi_stmt (*gsi));
9076
9077 ret = fold_build3_loc (loc, code, type, a, b, c);
9078 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
9079 GSI_SAME_STMT);
9080 }
9081
9082 /* Build a binary operation and gimplify it. Emit code before GSI.
9083 Return the gimple_val holding the result. */
9084
9085 tree
9086 gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
9087 tree type, tree a, tree b)
9088 {
9089 tree ret;
9090
9091 ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
9092 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
9093 GSI_SAME_STMT);
9094 }
9095
9096 /* Build a unary operation and gimplify it. Emit code before GSI.
9097 Return the gimple_val holding the result. */
9098
9099 tree
9100 gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
9101 tree a)
9102 {
9103 tree ret;
9104
9105 ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
9106 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
9107 GSI_SAME_STMT);
9108 }
9109
9110
9111 \f
9112 /* Given a basic block B which ends with a conditional and has
9113 precisely two successors, determine which of the edges is taken if
9114 the conditional is true and which is taken if the conditional is
9115 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
9116
9117 void
9118 extract_true_false_edges_from_block (basic_block b,
9119 edge *true_edge,
9120 edge *false_edge)
9121 {
9122 edge e = EDGE_SUCC (b, 0);
9123
9124 if (e->flags & EDGE_TRUE_VALUE)
9125 {
9126 *true_edge = e;
9127 *false_edge = EDGE_SUCC (b, 1);
9128 }
9129 else
9130 {
9131 *false_edge = e;
9132 *true_edge = EDGE_SUCC (b, 1);
9133 }
9134 }
9135
9136
9137 /* From a controlling predicate in the immediate dominator DOM of
9138 PHIBLOCK determine the edges into PHIBLOCK that are chosen if the
9139 predicate evaluates to true and false and store them to
9140 *TRUE_CONTROLLED_EDGE and *FALSE_CONTROLLED_EDGE if
9141 they are non-NULL. Returns true if the edges can be determined,
9142 else return false. */
9143
9144 bool
9145 extract_true_false_controlled_edges (basic_block dom, basic_block phiblock,
9146 edge *true_controlled_edge,
9147 edge *false_controlled_edge)
9148 {
9149 basic_block bb = phiblock;
9150 edge true_edge, false_edge, tem;
9151 edge e0 = NULL, e1 = NULL;
9152
9153 /* We have to verify that one edge into the PHI node is dominated
9154 by the true edge of the predicate block and the other edge
9155 dominated by the false edge. This ensures that the PHI argument
9156 we are going to take is completely determined by the path we
9157 take from the predicate block.
9158 We can only use BB dominance checks below if the destination of
9159 the true/false edges are dominated by their edge, thus only
9160 have a single predecessor. */
9161 extract_true_false_edges_from_block (dom, &true_edge, &false_edge);
9162 tem = EDGE_PRED (bb, 0);
9163 if (tem == true_edge
9164 || (single_pred_p (true_edge->dest)
9165 && (tem->src == true_edge->dest
9166 || dominated_by_p (CDI_DOMINATORS,
9167 tem->src, true_edge->dest))))
9168 e0 = tem;
9169 else if (tem == false_edge
9170 || (single_pred_p (false_edge->dest)
9171 && (tem->src == false_edge->dest
9172 || dominated_by_p (CDI_DOMINATORS,
9173 tem->src, false_edge->dest))))
9174 e1 = tem;
9175 else
9176 return false;
9177 tem = EDGE_PRED (bb, 1);
9178 if (tem == true_edge
9179 || (single_pred_p (true_edge->dest)
9180 && (tem->src == true_edge->dest
9181 || dominated_by_p (CDI_DOMINATORS,
9182 tem->src, true_edge->dest))))
9183 e0 = tem;
9184 else if (tem == false_edge
9185 || (single_pred_p (false_edge->dest)
9186 && (tem->src == false_edge->dest
9187 || dominated_by_p (CDI_DOMINATORS,
9188 tem->src, false_edge->dest))))
9189 e1 = tem;
9190 else
9191 return false;
9192 if (!e0 || !e1)
9193 return false;
9194
9195 if (true_controlled_edge)
9196 *true_controlled_edge = e0;
9197 if (false_controlled_edge)
9198 *false_controlled_edge = e1;
9199
9200 return true;
9201 }
9202
9203 /* Generate a range test LHS CODE RHS that determines whether INDEX is in the
9204 range [low, high]. Place associated stmts before *GSI. */
9205
9206 void
9207 generate_range_test (basic_block bb, tree index, tree low, tree high,
9208 tree *lhs, tree *rhs)
9209 {
9210 tree type = TREE_TYPE (index);
9211 tree utype = unsigned_type_for (type);
9212
9213 low = fold_convert (utype, low);
9214 high = fold_convert (utype, high);
9215
9216 gimple_seq seq = NULL;
9217 index = gimple_convert (&seq, utype, index);
9218 *lhs = gimple_build (&seq, MINUS_EXPR, utype, index, low);
9219 *rhs = const_binop (MINUS_EXPR, utype, high, low);
9220
9221 gimple_stmt_iterator gsi = gsi_last_bb (bb);
9222 gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT);
9223 }
9224
9225 /* Return the basic block that belongs to label numbered INDEX
9226 of a switch statement. */
9227
9228 basic_block
9229 gimple_switch_label_bb (function *ifun, gswitch *gs, unsigned index)
9230 {
9231 return label_to_block (ifun, CASE_LABEL (gimple_switch_label (gs, index)));
9232 }
9233
9234 /* Return the default basic block of a switch statement. */
9235
9236 basic_block
9237 gimple_switch_default_bb (function *ifun, gswitch *gs)
9238 {
9239 return gimple_switch_label_bb (ifun, gs, 0);
9240 }
9241
9242 /* Return the edge that belongs to label numbered INDEX
9243 of a switch statement. */
9244
9245 edge
9246 gimple_switch_edge (function *ifun, gswitch *gs, unsigned index)
9247 {
9248 return find_edge (gimple_bb (gs), gimple_switch_label_bb (ifun, gs, index));
9249 }
9250
9251 /* Return the default edge of a switch statement. */
9252
9253 edge
9254 gimple_switch_default_edge (function *ifun, gswitch *gs)
9255 {
9256 return gimple_switch_edge (ifun, gs, 0);
9257 }
9258
9259
9260 /* Emit return warnings. */
9261
9262 namespace {
9263
9264 const pass_data pass_data_warn_function_return =
9265 {
9266 GIMPLE_PASS, /* type */
9267 "*warn_function_return", /* name */
9268 OPTGROUP_NONE, /* optinfo_flags */
9269 TV_NONE, /* tv_id */
9270 PROP_cfg, /* properties_required */
9271 0, /* properties_provided */
9272 0, /* properties_destroyed */
9273 0, /* todo_flags_start */
9274 0, /* todo_flags_finish */
9275 };
9276
9277 class pass_warn_function_return : public gimple_opt_pass
9278 {
9279 public:
9280 pass_warn_function_return (gcc::context *ctxt)
9281 : gimple_opt_pass (pass_data_warn_function_return, ctxt)
9282 {}
9283
9284 /* opt_pass methods: */
9285 virtual unsigned int execute (function *);
9286
9287 }; // class pass_warn_function_return
9288
9289 unsigned int
9290 pass_warn_function_return::execute (function *fun)
9291 {
9292 location_t location;
9293 gimple *last;
9294 edge e;
9295 edge_iterator ei;
9296
9297 if (!targetm.warn_func_return (fun->decl))
9298 return 0;
9299
9300 /* If we have a path to EXIT, then we do return. */
9301 if (TREE_THIS_VOLATILE (fun->decl)
9302 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0)
9303 {
9304 location = UNKNOWN_LOCATION;
9305 for (ei = ei_start (EXIT_BLOCK_PTR_FOR_FN (fun)->preds);
9306 (e = ei_safe_edge (ei)); )
9307 {
9308 last = last_stmt (e->src);
9309 if ((gimple_code (last) == GIMPLE_RETURN
9310 || gimple_call_builtin_p (last, BUILT_IN_RETURN))
9311 && location == UNKNOWN_LOCATION
9312 && ((location = LOCATION_LOCUS (gimple_location (last)))
9313 != UNKNOWN_LOCATION)
9314 && !optimize)
9315 break;
9316 /* When optimizing, replace return stmts in noreturn functions
9317 with __builtin_unreachable () call. */
9318 if (optimize && gimple_code (last) == GIMPLE_RETURN)
9319 {
9320 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
9321 gimple *new_stmt = gimple_build_call (fndecl, 0);
9322 gimple_set_location (new_stmt, gimple_location (last));
9323 gimple_stmt_iterator gsi = gsi_for_stmt (last);
9324 gsi_replace (&gsi, new_stmt, true);
9325 remove_edge (e);
9326 }
9327 else
9328 ei_next (&ei);
9329 }
9330 if (location == UNKNOWN_LOCATION)
9331 location = cfun->function_end_locus;
9332 warning_at (location, 0, "%<noreturn%> function does return");
9333 }
9334
9335 /* If we see "return;" in some basic block, then we do reach the end
9336 without returning a value. */
9337 else if (warn_return_type > 0
9338 && !TREE_NO_WARNING (fun->decl)
9339 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fun->decl))))
9340 {
9341 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
9342 {
9343 gimple *last = last_stmt (e->src);
9344 greturn *return_stmt = dyn_cast <greturn *> (last);
9345 if (return_stmt
9346 && gimple_return_retval (return_stmt) == NULL
9347 && !gimple_no_warning_p (last))
9348 {
9349 location = gimple_location (last);
9350 if (LOCATION_LOCUS (location) == UNKNOWN_LOCATION)
9351 location = fun->function_end_locus;
9352 if (warning_at (location, OPT_Wreturn_type,
9353 "control reaches end of non-void function"))
9354 TREE_NO_WARNING (fun->decl) = 1;
9355 break;
9356 }
9357 }
9358 /* The C++ FE turns fallthrough from the end of non-void function
9359 into __builtin_unreachable () call with BUILTINS_LOCATION.
9360 Recognize those too. */
9361 basic_block bb;
9362 if (!TREE_NO_WARNING (fun->decl))
9363 FOR_EACH_BB_FN (bb, fun)
9364 if (EDGE_COUNT (bb->succs) == 0)
9365 {
9366 gimple *last = last_stmt (bb);
9367 const enum built_in_function ubsan_missing_ret
9368 = BUILT_IN_UBSAN_HANDLE_MISSING_RETURN;
9369 if (last
9370 && ((LOCATION_LOCUS (gimple_location (last))
9371 == BUILTINS_LOCATION
9372 && gimple_call_builtin_p (last, BUILT_IN_UNREACHABLE))
9373 || gimple_call_builtin_p (last, ubsan_missing_ret)))
9374 {
9375 gimple_stmt_iterator gsi = gsi_for_stmt (last);
9376 gsi_prev_nondebug (&gsi);
9377 gimple *prev = gsi_stmt (gsi);
9378 if (prev == NULL)
9379 location = UNKNOWN_LOCATION;
9380 else
9381 location = gimple_location (prev);
9382 if (LOCATION_LOCUS (location) == UNKNOWN_LOCATION)
9383 location = fun->function_end_locus;
9384 if (warning_at (location, OPT_Wreturn_type,
9385 "control reaches end of non-void function"))
9386 TREE_NO_WARNING (fun->decl) = 1;
9387 break;
9388 }
9389 }
9390 }
9391 return 0;
9392 }
9393
9394 } // anon namespace
9395
9396 gimple_opt_pass *
9397 make_pass_warn_function_return (gcc::context *ctxt)
9398 {
9399 return new pass_warn_function_return (ctxt);
9400 }
9401
9402 /* Walk a gimplified function and warn for functions whose return value is
9403 ignored and attribute((warn_unused_result)) is set. This is done before
9404 inlining, so we don't have to worry about that. */
9405
9406 static void
9407 do_warn_unused_result (gimple_seq seq)
9408 {
9409 tree fdecl, ftype;
9410 gimple_stmt_iterator i;
9411
9412 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
9413 {
9414 gimple *g = gsi_stmt (i);
9415
9416 switch (gimple_code (g))
9417 {
9418 case GIMPLE_BIND:
9419 do_warn_unused_result (gimple_bind_body (as_a <gbind *>(g)));
9420 break;
9421 case GIMPLE_TRY:
9422 do_warn_unused_result (gimple_try_eval (g));
9423 do_warn_unused_result (gimple_try_cleanup (g));
9424 break;
9425 case GIMPLE_CATCH:
9426 do_warn_unused_result (gimple_catch_handler (
9427 as_a <gcatch *> (g)));
9428 break;
9429 case GIMPLE_EH_FILTER:
9430 do_warn_unused_result (gimple_eh_filter_failure (g));
9431 break;
9432
9433 case GIMPLE_CALL:
9434 if (gimple_call_lhs (g))
9435 break;
9436 if (gimple_call_internal_p (g))
9437 break;
9438
9439 /* This is a naked call, as opposed to a GIMPLE_CALL with an
9440 LHS. All calls whose value is ignored should be
9441 represented like this. Look for the attribute. */
9442 fdecl = gimple_call_fndecl (g);
9443 ftype = gimple_call_fntype (g);
9444
9445 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
9446 {
9447 location_t loc = gimple_location (g);
9448
9449 if (fdecl)
9450 warning_at (loc, OPT_Wunused_result,
9451 "ignoring return value of %qD "
9452 "declared with attribute %<warn_unused_result%>",
9453 fdecl);
9454 else
9455 warning_at (loc, OPT_Wunused_result,
9456 "ignoring return value of function "
9457 "declared with attribute %<warn_unused_result%>");
9458 }
9459 break;
9460
9461 default:
9462 /* Not a container, not a call, or a call whose value is used. */
9463 break;
9464 }
9465 }
9466 }
9467
9468 namespace {
9469
9470 const pass_data pass_data_warn_unused_result =
9471 {
9472 GIMPLE_PASS, /* type */
9473 "*warn_unused_result", /* name */
9474 OPTGROUP_NONE, /* optinfo_flags */
9475 TV_NONE, /* tv_id */
9476 PROP_gimple_any, /* properties_required */
9477 0, /* properties_provided */
9478 0, /* properties_destroyed */
9479 0, /* todo_flags_start */
9480 0, /* todo_flags_finish */
9481 };
9482
9483 class pass_warn_unused_result : public gimple_opt_pass
9484 {
9485 public:
9486 pass_warn_unused_result (gcc::context *ctxt)
9487 : gimple_opt_pass (pass_data_warn_unused_result, ctxt)
9488 {}
9489
9490 /* opt_pass methods: */
9491 virtual bool gate (function *) { return flag_warn_unused_result; }
9492 virtual unsigned int execute (function *)
9493 {
9494 do_warn_unused_result (gimple_body (current_function_decl));
9495 return 0;
9496 }
9497
9498 }; // class pass_warn_unused_result
9499
9500 } // anon namespace
9501
9502 gimple_opt_pass *
9503 make_pass_warn_unused_result (gcc::context *ctxt)
9504 {
9505 return new pass_warn_unused_result (ctxt);
9506 }
9507
9508 /* IPA passes, compilation of earlier functions or inlining
9509 might have changed some properties, such as marked functions nothrow,
9510 pure, const or noreturn.
9511 Remove redundant edges and basic blocks, and create new ones if necessary.
9512
9513 This pass can't be executed as stand alone pass from pass manager, because
9514 in between inlining and this fixup the verify_flow_info would fail. */
9515
9516 unsigned int
9517 execute_fixup_cfg (void)
9518 {
9519 basic_block bb;
9520 gimple_stmt_iterator gsi;
9521 int todo = 0;
9522 cgraph_node *node = cgraph_node::get (current_function_decl);
9523 profile_count num = node->count;
9524 profile_count den = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
9525 bool scale = num.initialized_p () && !(num == den);
9526
9527 if (scale)
9528 {
9529 profile_count::adjust_for_ipa_scaling (&num, &den);
9530 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = node->count;
9531 EXIT_BLOCK_PTR_FOR_FN (cfun)->count
9532 = EXIT_BLOCK_PTR_FOR_FN (cfun)->count.apply_scale (num, den);
9533 }
9534
9535 FOR_EACH_BB_FN (bb, cfun)
9536 {
9537 if (scale)
9538 bb->count = bb->count.apply_scale (num, den);
9539 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
9540 {
9541 gimple *stmt = gsi_stmt (gsi);
9542 tree decl = is_gimple_call (stmt)
9543 ? gimple_call_fndecl (stmt)
9544 : NULL;
9545 if (decl)
9546 {
9547 int flags = gimple_call_flags (stmt);
9548 if (flags & (ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE))
9549 {
9550 if (gimple_purge_dead_abnormal_call_edges (bb))
9551 todo |= TODO_cleanup_cfg;
9552
9553 if (gimple_in_ssa_p (cfun))
9554 {
9555 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9556 update_stmt (stmt);
9557 }
9558 }
9559
9560 if (flags & ECF_NORETURN
9561 && fixup_noreturn_call (stmt))
9562 todo |= TODO_cleanup_cfg;
9563 }
9564
9565 /* Remove stores to variables we marked write-only.
9566 Keep access when store has side effect, i.e. in case when source
9567 is volatile. */
9568 if (gimple_store_p (stmt)
9569 && !gimple_has_side_effects (stmt))
9570 {
9571 tree lhs = get_base_address (gimple_get_lhs (stmt));
9572
9573 if (VAR_P (lhs)
9574 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
9575 && varpool_node::get (lhs)->writeonly)
9576 {
9577 unlink_stmt_vdef (stmt);
9578 gsi_remove (&gsi, true);
9579 release_defs (stmt);
9580 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9581 continue;
9582 }
9583 }
9584 /* For calls we can simply remove LHS when it is known
9585 to be write-only. */
9586 if (is_gimple_call (stmt)
9587 && gimple_get_lhs (stmt))
9588 {
9589 tree lhs = get_base_address (gimple_get_lhs (stmt));
9590
9591 if (VAR_P (lhs)
9592 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
9593 && varpool_node::get (lhs)->writeonly)
9594 {
9595 gimple_call_set_lhs (stmt, NULL);
9596 update_stmt (stmt);
9597 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9598 }
9599 }
9600
9601 if (maybe_clean_eh_stmt (stmt)
9602 && gimple_purge_dead_eh_edges (bb))
9603 todo |= TODO_cleanup_cfg;
9604 gsi_next (&gsi);
9605 }
9606
9607 /* If we have a basic block with no successors that does not
9608 end with a control statement or a noreturn call end it with
9609 a call to __builtin_unreachable. This situation can occur
9610 when inlining a noreturn call that does in fact return. */
9611 if (EDGE_COUNT (bb->succs) == 0)
9612 {
9613 gimple *stmt = last_stmt (bb);
9614 if (!stmt
9615 || (!is_ctrl_stmt (stmt)
9616 && (!is_gimple_call (stmt)
9617 || !gimple_call_noreturn_p (stmt))))
9618 {
9619 if (stmt && is_gimple_call (stmt))
9620 gimple_call_set_ctrl_altering (stmt, false);
9621 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
9622 stmt = gimple_build_call (fndecl, 0);
9623 gimple_stmt_iterator gsi = gsi_last_bb (bb);
9624 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
9625 if (!cfun->after_inlining)
9626 {
9627 gcall *call_stmt = dyn_cast <gcall *> (stmt);
9628 node->create_edge (cgraph_node::get_create (fndecl),
9629 call_stmt, bb->count);
9630 }
9631 }
9632 }
9633 }
9634 if (scale)
9635 compute_function_frequency ();
9636
9637 if (current_loops
9638 && (todo & TODO_cleanup_cfg))
9639 loops_state_set (LOOPS_NEED_FIXUP);
9640
9641 return todo;
9642 }
9643
9644 namespace {
9645
9646 const pass_data pass_data_fixup_cfg =
9647 {
9648 GIMPLE_PASS, /* type */
9649 "fixup_cfg", /* name */
9650 OPTGROUP_NONE, /* optinfo_flags */
9651 TV_NONE, /* tv_id */
9652 PROP_cfg, /* properties_required */
9653 0, /* properties_provided */
9654 0, /* properties_destroyed */
9655 0, /* todo_flags_start */
9656 0, /* todo_flags_finish */
9657 };
9658
9659 class pass_fixup_cfg : public gimple_opt_pass
9660 {
9661 public:
9662 pass_fixup_cfg (gcc::context *ctxt)
9663 : gimple_opt_pass (pass_data_fixup_cfg, ctxt)
9664 {}
9665
9666 /* opt_pass methods: */
9667 opt_pass * clone () { return new pass_fixup_cfg (m_ctxt); }
9668 virtual unsigned int execute (function *) { return execute_fixup_cfg (); }
9669
9670 }; // class pass_fixup_cfg
9671
9672 } // anon namespace
9673
9674 gimple_opt_pass *
9675 make_pass_fixup_cfg (gcc::context *ctxt)
9676 {
9677 return new pass_fixup_cfg (ctxt);
9678 }
9679
9680 /* Garbage collection support for edge_def. */
9681
9682 extern void gt_ggc_mx (tree&);
9683 extern void gt_ggc_mx (gimple *&);
9684 extern void gt_ggc_mx (rtx&);
9685 extern void gt_ggc_mx (basic_block&);
9686
9687 static void
9688 gt_ggc_mx (rtx_insn *& x)
9689 {
9690 if (x)
9691 gt_ggc_mx_rtx_def ((void *) x);
9692 }
9693
9694 void
9695 gt_ggc_mx (edge_def *e)
9696 {
9697 tree block = LOCATION_BLOCK (e->goto_locus);
9698 gt_ggc_mx (e->src);
9699 gt_ggc_mx (e->dest);
9700 if (current_ir_type () == IR_GIMPLE)
9701 gt_ggc_mx (e->insns.g);
9702 else
9703 gt_ggc_mx (e->insns.r);
9704 gt_ggc_mx (block);
9705 }
9706
9707 /* PCH support for edge_def. */
9708
9709 extern void gt_pch_nx (tree&);
9710 extern void gt_pch_nx (gimple *&);
9711 extern void gt_pch_nx (rtx&);
9712 extern void gt_pch_nx (basic_block&);
9713
9714 static void
9715 gt_pch_nx (rtx_insn *& x)
9716 {
9717 if (x)
9718 gt_pch_nx_rtx_def ((void *) x);
9719 }
9720
9721 void
9722 gt_pch_nx (edge_def *e)
9723 {
9724 tree block = LOCATION_BLOCK (e->goto_locus);
9725 gt_pch_nx (e->src);
9726 gt_pch_nx (e->dest);
9727 if (current_ir_type () == IR_GIMPLE)
9728 gt_pch_nx (e->insns.g);
9729 else
9730 gt_pch_nx (e->insns.r);
9731 gt_pch_nx (block);
9732 }
9733
9734 void
9735 gt_pch_nx (edge_def *e, gt_pointer_operator op, void *cookie)
9736 {
9737 tree block = LOCATION_BLOCK (e->goto_locus);
9738 op (&(e->src), cookie);
9739 op (&(e->dest), cookie);
9740 if (current_ir_type () == IR_GIMPLE)
9741 op (&(e->insns.g), cookie);
9742 else
9743 op (&(e->insns.r), cookie);
9744 op (&(block), cookie);
9745 }
9746
9747 #if CHECKING_P
9748
9749 namespace selftest {
9750
9751 /* Helper function for CFG selftests: create a dummy function decl
9752 and push it as cfun. */
9753
9754 static tree
9755 push_fndecl (const char *name)
9756 {
9757 tree fn_type = build_function_type_array (integer_type_node, 0, NULL);
9758 /* FIXME: this uses input_location: */
9759 tree fndecl = build_fn_decl (name, fn_type);
9760 tree retval = build_decl (UNKNOWN_LOCATION, RESULT_DECL,
9761 NULL_TREE, integer_type_node);
9762 DECL_RESULT (fndecl) = retval;
9763 push_struct_function (fndecl);
9764 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9765 ASSERT_TRUE (fun != NULL);
9766 init_empty_tree_cfg_for_function (fun);
9767 ASSERT_EQ (2, n_basic_blocks_for_fn (fun));
9768 ASSERT_EQ (0, n_edges_for_fn (fun));
9769 return fndecl;
9770 }
9771
9772 /* These tests directly create CFGs.
9773 Compare with the static fns within tree-cfg.c:
9774 - build_gimple_cfg
9775 - make_blocks: calls create_basic_block (seq, bb);
9776 - make_edges. */
9777
9778 /* Verify a simple cfg of the form:
9779 ENTRY -> A -> B -> C -> EXIT. */
9780
9781 static void
9782 test_linear_chain ()
9783 {
9784 gimple_register_cfg_hooks ();
9785
9786 tree fndecl = push_fndecl ("cfg_test_linear_chain");
9787 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9788
9789 /* Create some empty blocks. */
9790 basic_block bb_a = create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
9791 basic_block bb_b = create_empty_bb (bb_a);
9792 basic_block bb_c = create_empty_bb (bb_b);
9793
9794 ASSERT_EQ (5, n_basic_blocks_for_fn (fun));
9795 ASSERT_EQ (0, n_edges_for_fn (fun));
9796
9797 /* Create some edges: a simple linear chain of BBs. */
9798 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), bb_a, EDGE_FALLTHRU);
9799 make_edge (bb_a, bb_b, 0);
9800 make_edge (bb_b, bb_c, 0);
9801 make_edge (bb_c, EXIT_BLOCK_PTR_FOR_FN (fun), 0);
9802
9803 /* Verify the edges. */
9804 ASSERT_EQ (4, n_edges_for_fn (fun));
9805 ASSERT_EQ (NULL, ENTRY_BLOCK_PTR_FOR_FN (fun)->preds);
9806 ASSERT_EQ (1, ENTRY_BLOCK_PTR_FOR_FN (fun)->succs->length ());
9807 ASSERT_EQ (1, bb_a->preds->length ());
9808 ASSERT_EQ (1, bb_a->succs->length ());
9809 ASSERT_EQ (1, bb_b->preds->length ());
9810 ASSERT_EQ (1, bb_b->succs->length ());
9811 ASSERT_EQ (1, bb_c->preds->length ());
9812 ASSERT_EQ (1, bb_c->succs->length ());
9813 ASSERT_EQ (1, EXIT_BLOCK_PTR_FOR_FN (fun)->preds->length ());
9814 ASSERT_EQ (NULL, EXIT_BLOCK_PTR_FOR_FN (fun)->succs);
9815
9816 /* Verify the dominance information
9817 Each BB in our simple chain should be dominated by the one before
9818 it. */
9819 calculate_dominance_info (CDI_DOMINATORS);
9820 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_b));
9821 ASSERT_EQ (bb_b, get_immediate_dominator (CDI_DOMINATORS, bb_c));
9822 vec<basic_block> dom_by_b = get_dominated_by (CDI_DOMINATORS, bb_b);
9823 ASSERT_EQ (1, dom_by_b.length ());
9824 ASSERT_EQ (bb_c, dom_by_b[0]);
9825 free_dominance_info (CDI_DOMINATORS);
9826 dom_by_b.release ();
9827
9828 /* Similarly for post-dominance: each BB in our chain is post-dominated
9829 by the one after it. */
9830 calculate_dominance_info (CDI_POST_DOMINATORS);
9831 ASSERT_EQ (bb_b, get_immediate_dominator (CDI_POST_DOMINATORS, bb_a));
9832 ASSERT_EQ (bb_c, get_immediate_dominator (CDI_POST_DOMINATORS, bb_b));
9833 vec<basic_block> postdom_by_b = get_dominated_by (CDI_POST_DOMINATORS, bb_b);
9834 ASSERT_EQ (1, postdom_by_b.length ());
9835 ASSERT_EQ (bb_a, postdom_by_b[0]);
9836 free_dominance_info (CDI_POST_DOMINATORS);
9837 postdom_by_b.release ();
9838
9839 pop_cfun ();
9840 }
9841
9842 /* Verify a simple CFG of the form:
9843 ENTRY
9844 |
9845 A
9846 / \
9847 /t \f
9848 B C
9849 \ /
9850 \ /
9851 D
9852 |
9853 EXIT. */
9854
9855 static void
9856 test_diamond ()
9857 {
9858 gimple_register_cfg_hooks ();
9859
9860 tree fndecl = push_fndecl ("cfg_test_diamond");
9861 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9862
9863 /* Create some empty blocks. */
9864 basic_block bb_a = create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
9865 basic_block bb_b = create_empty_bb (bb_a);
9866 basic_block bb_c = create_empty_bb (bb_a);
9867 basic_block bb_d = create_empty_bb (bb_b);
9868
9869 ASSERT_EQ (6, n_basic_blocks_for_fn (fun));
9870 ASSERT_EQ (0, n_edges_for_fn (fun));
9871
9872 /* Create the edges. */
9873 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), bb_a, EDGE_FALLTHRU);
9874 make_edge (bb_a, bb_b, EDGE_TRUE_VALUE);
9875 make_edge (bb_a, bb_c, EDGE_FALSE_VALUE);
9876 make_edge (bb_b, bb_d, 0);
9877 make_edge (bb_c, bb_d, 0);
9878 make_edge (bb_d, EXIT_BLOCK_PTR_FOR_FN (fun), 0);
9879
9880 /* Verify the edges. */
9881 ASSERT_EQ (6, n_edges_for_fn (fun));
9882 ASSERT_EQ (1, bb_a->preds->length ());
9883 ASSERT_EQ (2, bb_a->succs->length ());
9884 ASSERT_EQ (1, bb_b->preds->length ());
9885 ASSERT_EQ (1, bb_b->succs->length ());
9886 ASSERT_EQ (1, bb_c->preds->length ());
9887 ASSERT_EQ (1, bb_c->succs->length ());
9888 ASSERT_EQ (2, bb_d->preds->length ());
9889 ASSERT_EQ (1, bb_d->succs->length ());
9890
9891 /* Verify the dominance information. */
9892 calculate_dominance_info (CDI_DOMINATORS);
9893 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_b));
9894 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_c));
9895 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_d));
9896 vec<basic_block> dom_by_a = get_dominated_by (CDI_DOMINATORS, bb_a);
9897 ASSERT_EQ (3, dom_by_a.length ()); /* B, C, D, in some order. */
9898 dom_by_a.release ();
9899 vec<basic_block> dom_by_b = get_dominated_by (CDI_DOMINATORS, bb_b);
9900 ASSERT_EQ (0, dom_by_b.length ());
9901 dom_by_b.release ();
9902 free_dominance_info (CDI_DOMINATORS);
9903
9904 /* Similarly for post-dominance. */
9905 calculate_dominance_info (CDI_POST_DOMINATORS);
9906 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_a));
9907 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_b));
9908 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_c));
9909 vec<basic_block> postdom_by_d = get_dominated_by (CDI_POST_DOMINATORS, bb_d);
9910 ASSERT_EQ (3, postdom_by_d.length ()); /* A, B, C in some order. */
9911 postdom_by_d.release ();
9912 vec<basic_block> postdom_by_b = get_dominated_by (CDI_POST_DOMINATORS, bb_b);
9913 ASSERT_EQ (0, postdom_by_b.length ());
9914 postdom_by_b.release ();
9915 free_dominance_info (CDI_POST_DOMINATORS);
9916
9917 pop_cfun ();
9918 }
9919
9920 /* Verify that we can handle a CFG containing a "complete" aka
9921 fully-connected subgraph (where A B C D below all have edges
9922 pointing to each other node, also to themselves).
9923 e.g.:
9924 ENTRY EXIT
9925 | ^
9926 | /
9927 | /
9928 | /
9929 V/
9930 A<--->B
9931 ^^ ^^
9932 | \ / |
9933 | X |
9934 | / \ |
9935 VV VV
9936 C<--->D
9937 */
9938
9939 static void
9940 test_fully_connected ()
9941 {
9942 gimple_register_cfg_hooks ();
9943
9944 tree fndecl = push_fndecl ("cfg_fully_connected");
9945 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9946
9947 const int n = 4;
9948
9949 /* Create some empty blocks. */
9950 auto_vec <basic_block> subgraph_nodes;
9951 for (int i = 0; i < n; i++)
9952 subgraph_nodes.safe_push (create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun)));
9953
9954 ASSERT_EQ (n + 2, n_basic_blocks_for_fn (fun));
9955 ASSERT_EQ (0, n_edges_for_fn (fun));
9956
9957 /* Create the edges. */
9958 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), subgraph_nodes[0], EDGE_FALLTHRU);
9959 make_edge (subgraph_nodes[0], EXIT_BLOCK_PTR_FOR_FN (fun), 0);
9960 for (int i = 0; i < n; i++)
9961 for (int j = 0; j < n; j++)
9962 make_edge (subgraph_nodes[i], subgraph_nodes[j], 0);
9963
9964 /* Verify the edges. */
9965 ASSERT_EQ (2 + (n * n), n_edges_for_fn (fun));
9966 /* The first one is linked to ENTRY/EXIT as well as itself and
9967 everything else. */
9968 ASSERT_EQ (n + 1, subgraph_nodes[0]->preds->length ());
9969 ASSERT_EQ (n + 1, subgraph_nodes[0]->succs->length ());
9970 /* The other ones in the subgraph are linked to everything in
9971 the subgraph (including themselves). */
9972 for (int i = 1; i < n; i++)
9973 {
9974 ASSERT_EQ (n, subgraph_nodes[i]->preds->length ());
9975 ASSERT_EQ (n, subgraph_nodes[i]->succs->length ());
9976 }
9977
9978 /* Verify the dominance information. */
9979 calculate_dominance_info (CDI_DOMINATORS);
9980 /* The initial block in the subgraph should be dominated by ENTRY. */
9981 ASSERT_EQ (ENTRY_BLOCK_PTR_FOR_FN (fun),
9982 get_immediate_dominator (CDI_DOMINATORS,
9983 subgraph_nodes[0]));
9984 /* Every other block in the subgraph should be dominated by the
9985 initial block. */
9986 for (int i = 1; i < n; i++)
9987 ASSERT_EQ (subgraph_nodes[0],
9988 get_immediate_dominator (CDI_DOMINATORS,
9989 subgraph_nodes[i]));
9990 free_dominance_info (CDI_DOMINATORS);
9991
9992 /* Similarly for post-dominance. */
9993 calculate_dominance_info (CDI_POST_DOMINATORS);
9994 /* The initial block in the subgraph should be postdominated by EXIT. */
9995 ASSERT_EQ (EXIT_BLOCK_PTR_FOR_FN (fun),
9996 get_immediate_dominator (CDI_POST_DOMINATORS,
9997 subgraph_nodes[0]));
9998 /* Every other block in the subgraph should be postdominated by the
9999 initial block, since that leads to EXIT. */
10000 for (int i = 1; i < n; i++)
10001 ASSERT_EQ (subgraph_nodes[0],
10002 get_immediate_dominator (CDI_POST_DOMINATORS,
10003 subgraph_nodes[i]));
10004 free_dominance_info (CDI_POST_DOMINATORS);
10005
10006 pop_cfun ();
10007 }
10008
10009 /* Run all of the selftests within this file. */
10010
10011 void
10012 tree_cfg_c_tests ()
10013 {
10014 test_linear_chain ();
10015 test_diamond ();
10016 test_fully_connected ();
10017 }
10018
10019 } // namespace selftest
10020
10021 /* TODO: test the dominator/postdominator logic with various graphs/nodes:
10022 - loop
10023 - nested loops
10024 - switch statement (a block with many out-edges)
10025 - something that jumps to itself
10026 - etc */
10027
10028 #endif /* CHECKING_P */