]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-cfg.c
71966147880c83017f36ee46df38e35d0581da9a
[thirdparty/gcc.git] / gcc / tree-cfg.c
1 /* Control flow functions for trees.
2 Copyright (C) 2001-2019 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "cfghooks.h"
30 #include "tree-pass.h"
31 #include "ssa.h"
32 #include "cgraph.h"
33 #include "gimple-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "fold-const.h"
36 #include "trans-mem.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
39 #include "cfganal.h"
40 #include "gimple-fold.h"
41 #include "tree-eh.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-cfg.h"
46 #include "tree-ssa-loop-manip.h"
47 #include "tree-ssa-loop-niter.h"
48 #include "tree-into-ssa.h"
49 #include "tree-dfa.h"
50 #include "tree-ssa.h"
51 #include "except.h"
52 #include "cfgloop.h"
53 #include "tree-ssa-propagate.h"
54 #include "value-prof.h"
55 #include "tree-inline.h"
56 #include "tree-ssa-live.h"
57 #include "omp-general.h"
58 #include "omp-expand.h"
59 #include "tree-cfgcleanup.h"
60 #include "gimplify.h"
61 #include "attribs.h"
62 #include "selftest.h"
63 #include "opts.h"
64 #include "asan.h"
65
66 /* This file contains functions for building the Control Flow Graph (CFG)
67 for a function tree. */
68
69 /* Local declarations. */
70
71 /* Initial capacity for the basic block array. */
72 static const int initial_cfg_capacity = 20;
73
74 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
75 which use a particular edge. The CASE_LABEL_EXPRs are chained together
76 via their CASE_CHAIN field, which we clear after we're done with the
77 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
78
79 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
80 update the case vector in response to edge redirections.
81
82 Right now this table is set up and torn down at key points in the
83 compilation process. It would be nice if we could make the table
84 more persistent. The key is getting notification of changes to
85 the CFG (particularly edge removal, creation and redirection). */
86
87 static hash_map<edge, tree> *edge_to_cases;
88
89 /* If we record edge_to_cases, this bitmap will hold indexes
90 of basic blocks that end in a GIMPLE_SWITCH which we touched
91 due to edge manipulations. */
92
93 static bitmap touched_switch_bbs;
94
95 /* CFG statistics. */
96 struct cfg_stats_d
97 {
98 long num_merged_labels;
99 };
100
101 static struct cfg_stats_d cfg_stats;
102
103 /* Data to pass to replace_block_vars_by_duplicates_1. */
104 struct replace_decls_d
105 {
106 hash_map<tree, tree> *vars_map;
107 tree to_context;
108 };
109
110 /* Hash table to store last discriminator assigned for each locus. */
111 struct locus_discrim_map
112 {
113 int location_line;
114 int discriminator;
115 };
116
117 /* Hashtable helpers. */
118
119 struct locus_discrim_hasher : free_ptr_hash <locus_discrim_map>
120 {
121 static inline hashval_t hash (const locus_discrim_map *);
122 static inline bool equal (const locus_discrim_map *,
123 const locus_discrim_map *);
124 };
125
126 /* Trivial hash function for a location_t. ITEM is a pointer to
127 a hash table entry that maps a location_t to a discriminator. */
128
129 inline hashval_t
130 locus_discrim_hasher::hash (const locus_discrim_map *item)
131 {
132 return item->location_line;
133 }
134
135 /* Equality function for the locus-to-discriminator map. A and B
136 point to the two hash table entries to compare. */
137
138 inline bool
139 locus_discrim_hasher::equal (const locus_discrim_map *a,
140 const locus_discrim_map *b)
141 {
142 return a->location_line == b->location_line;
143 }
144
145 static hash_table<locus_discrim_hasher> *discriminator_per_locus;
146
147 /* Basic blocks and flowgraphs. */
148 static void make_blocks (gimple_seq);
149
150 /* Edges. */
151 static void make_edges (void);
152 static void assign_discriminators (void);
153 static void make_cond_expr_edges (basic_block);
154 static void make_gimple_switch_edges (gswitch *, basic_block);
155 static bool make_goto_expr_edges (basic_block);
156 static void make_gimple_asm_edges (basic_block);
157 static edge gimple_redirect_edge_and_branch (edge, basic_block);
158 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
159
160 /* Various helpers. */
161 static inline bool stmt_starts_bb_p (gimple *, gimple *);
162 static int gimple_verify_flow_info (void);
163 static void gimple_make_forwarder_block (edge);
164 static gimple *first_non_label_stmt (basic_block);
165 static bool verify_gimple_transaction (gtransaction *);
166 static bool call_can_make_abnormal_goto (gimple *);
167
168 /* Flowgraph optimization and cleanup. */
169 static void gimple_merge_blocks (basic_block, basic_block);
170 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
171 static void remove_bb (basic_block);
172 static edge find_taken_edge_computed_goto (basic_block, tree);
173 static edge find_taken_edge_cond_expr (const gcond *, tree);
174 static void lower_phi_internal_fn ();
175
176 void
177 init_empty_tree_cfg_for_function (struct function *fn)
178 {
179 /* Initialize the basic block array. */
180 init_flow (fn);
181 profile_status_for_fn (fn) = PROFILE_ABSENT;
182 n_basic_blocks_for_fn (fn) = NUM_FIXED_BLOCKS;
183 last_basic_block_for_fn (fn) = NUM_FIXED_BLOCKS;
184 vec_alloc (basic_block_info_for_fn (fn), initial_cfg_capacity);
185 vec_safe_grow_cleared (basic_block_info_for_fn (fn),
186 initial_cfg_capacity);
187
188 /* Build a mapping of labels to their associated blocks. */
189 vec_alloc (label_to_block_map_for_fn (fn), initial_cfg_capacity);
190 vec_safe_grow_cleared (label_to_block_map_for_fn (fn),
191 initial_cfg_capacity);
192
193 SET_BASIC_BLOCK_FOR_FN (fn, ENTRY_BLOCK, ENTRY_BLOCK_PTR_FOR_FN (fn));
194 SET_BASIC_BLOCK_FOR_FN (fn, EXIT_BLOCK, EXIT_BLOCK_PTR_FOR_FN (fn));
195
196 ENTRY_BLOCK_PTR_FOR_FN (fn)->next_bb
197 = EXIT_BLOCK_PTR_FOR_FN (fn);
198 EXIT_BLOCK_PTR_FOR_FN (fn)->prev_bb
199 = ENTRY_BLOCK_PTR_FOR_FN (fn);
200 }
201
202 void
203 init_empty_tree_cfg (void)
204 {
205 init_empty_tree_cfg_for_function (cfun);
206 }
207
208 /*---------------------------------------------------------------------------
209 Create basic blocks
210 ---------------------------------------------------------------------------*/
211
212 /* Entry point to the CFG builder for trees. SEQ is the sequence of
213 statements to be added to the flowgraph. */
214
215 static void
216 build_gimple_cfg (gimple_seq seq)
217 {
218 /* Register specific gimple functions. */
219 gimple_register_cfg_hooks ();
220
221 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
222
223 init_empty_tree_cfg ();
224
225 make_blocks (seq);
226
227 /* Make sure there is always at least one block, even if it's empty. */
228 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
229 create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
230
231 /* Adjust the size of the array. */
232 if (basic_block_info_for_fn (cfun)->length ()
233 < (size_t) n_basic_blocks_for_fn (cfun))
234 vec_safe_grow_cleared (basic_block_info_for_fn (cfun),
235 n_basic_blocks_for_fn (cfun));
236
237 /* To speed up statement iterator walks, we first purge dead labels. */
238 cleanup_dead_labels ();
239
240 /* Group case nodes to reduce the number of edges.
241 We do this after cleaning up dead labels because otherwise we miss
242 a lot of obvious case merging opportunities. */
243 group_case_labels ();
244
245 /* Create the edges of the flowgraph. */
246 discriminator_per_locus = new hash_table<locus_discrim_hasher> (13);
247 make_edges ();
248 assign_discriminators ();
249 lower_phi_internal_fn ();
250 cleanup_dead_labels ();
251 delete discriminator_per_locus;
252 discriminator_per_locus = NULL;
253 }
254
255 /* Look for ANNOTATE calls with loop annotation kind in BB; if found, remove
256 them and propagate the information to LOOP. We assume that the annotations
257 come immediately before the condition in BB, if any. */
258
259 static void
260 replace_loop_annotate_in_block (basic_block bb, struct loop *loop)
261 {
262 gimple_stmt_iterator gsi = gsi_last_bb (bb);
263 gimple *stmt = gsi_stmt (gsi);
264
265 if (!(stmt && gimple_code (stmt) == GIMPLE_COND))
266 return;
267
268 for (gsi_prev_nondebug (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
269 {
270 stmt = gsi_stmt (gsi);
271 if (gimple_code (stmt) != GIMPLE_CALL)
272 break;
273 if (!gimple_call_internal_p (stmt)
274 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
275 break;
276
277 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
278 {
279 case annot_expr_ivdep_kind:
280 loop->safelen = INT_MAX;
281 break;
282 case annot_expr_unroll_kind:
283 loop->unroll
284 = (unsigned short) tree_to_shwi (gimple_call_arg (stmt, 2));
285 cfun->has_unroll = true;
286 break;
287 case annot_expr_no_vector_kind:
288 loop->dont_vectorize = true;
289 break;
290 case annot_expr_vector_kind:
291 loop->force_vectorize = true;
292 cfun->has_force_vectorize_loops = true;
293 break;
294 case annot_expr_parallel_kind:
295 loop->can_be_parallel = true;
296 loop->safelen = INT_MAX;
297 break;
298 default:
299 gcc_unreachable ();
300 }
301
302 stmt = gimple_build_assign (gimple_call_lhs (stmt),
303 gimple_call_arg (stmt, 0));
304 gsi_replace (&gsi, stmt, true);
305 }
306 }
307
308 /* Look for ANNOTATE calls with loop annotation kind; if found, remove
309 them and propagate the information to the loop. We assume that the
310 annotations come immediately before the condition of the loop. */
311
312 static void
313 replace_loop_annotate (void)
314 {
315 struct loop *loop;
316 basic_block bb;
317 gimple_stmt_iterator gsi;
318 gimple *stmt;
319
320 FOR_EACH_LOOP (loop, 0)
321 {
322 /* First look into the header. */
323 replace_loop_annotate_in_block (loop->header, loop);
324
325 /* Then look into the latch, if any. */
326 if (loop->latch)
327 replace_loop_annotate_in_block (loop->latch, loop);
328 }
329
330 /* Remove IFN_ANNOTATE. Safeguard for the case loop->latch == NULL. */
331 FOR_EACH_BB_FN (bb, cfun)
332 {
333 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
334 {
335 stmt = gsi_stmt (gsi);
336 if (gimple_code (stmt) != GIMPLE_CALL)
337 continue;
338 if (!gimple_call_internal_p (stmt)
339 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
340 continue;
341
342 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
343 {
344 case annot_expr_ivdep_kind:
345 case annot_expr_unroll_kind:
346 case annot_expr_no_vector_kind:
347 case annot_expr_vector_kind:
348 case annot_expr_parallel_kind:
349 break;
350 default:
351 gcc_unreachable ();
352 }
353
354 warning_at (gimple_location (stmt), 0, "ignoring loop annotation");
355 stmt = gimple_build_assign (gimple_call_lhs (stmt),
356 gimple_call_arg (stmt, 0));
357 gsi_replace (&gsi, stmt, true);
358 }
359 }
360 }
361
362 /* Lower internal PHI function from GIMPLE FE. */
363
364 static void
365 lower_phi_internal_fn ()
366 {
367 basic_block bb, pred = NULL;
368 gimple_stmt_iterator gsi;
369 tree lhs;
370 gphi *phi_node;
371 gimple *stmt;
372
373 /* After edge creation, handle __PHI function from GIMPLE FE. */
374 FOR_EACH_BB_FN (bb, cfun)
375 {
376 for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi);)
377 {
378 stmt = gsi_stmt (gsi);
379 if (! gimple_call_internal_p (stmt, IFN_PHI))
380 break;
381
382 lhs = gimple_call_lhs (stmt);
383 phi_node = create_phi_node (lhs, bb);
384
385 /* Add arguments to the PHI node. */
386 for (unsigned i = 0; i < gimple_call_num_args (stmt); ++i)
387 {
388 tree arg = gimple_call_arg (stmt, i);
389 if (TREE_CODE (arg) == LABEL_DECL)
390 pred = label_to_block (cfun, arg);
391 else
392 {
393 edge e = find_edge (pred, bb);
394 add_phi_arg (phi_node, arg, e, UNKNOWN_LOCATION);
395 }
396 }
397
398 gsi_remove (&gsi, true);
399 }
400 }
401 }
402
403 static unsigned int
404 execute_build_cfg (void)
405 {
406 gimple_seq body = gimple_body (current_function_decl);
407
408 build_gimple_cfg (body);
409 gimple_set_body (current_function_decl, NULL);
410 if (dump_file && (dump_flags & TDF_DETAILS))
411 {
412 fprintf (dump_file, "Scope blocks:\n");
413 dump_scope_blocks (dump_file, dump_flags);
414 }
415 cleanup_tree_cfg ();
416 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
417 replace_loop_annotate ();
418 return 0;
419 }
420
421 namespace {
422
423 const pass_data pass_data_build_cfg =
424 {
425 GIMPLE_PASS, /* type */
426 "cfg", /* name */
427 OPTGROUP_NONE, /* optinfo_flags */
428 TV_TREE_CFG, /* tv_id */
429 PROP_gimple_leh, /* properties_required */
430 ( PROP_cfg | PROP_loops ), /* properties_provided */
431 0, /* properties_destroyed */
432 0, /* todo_flags_start */
433 0, /* todo_flags_finish */
434 };
435
436 class pass_build_cfg : public gimple_opt_pass
437 {
438 public:
439 pass_build_cfg (gcc::context *ctxt)
440 : gimple_opt_pass (pass_data_build_cfg, ctxt)
441 {}
442
443 /* opt_pass methods: */
444 virtual unsigned int execute (function *) { return execute_build_cfg (); }
445
446 }; // class pass_build_cfg
447
448 } // anon namespace
449
450 gimple_opt_pass *
451 make_pass_build_cfg (gcc::context *ctxt)
452 {
453 return new pass_build_cfg (ctxt);
454 }
455
456
457 /* Return true if T is a computed goto. */
458
459 bool
460 computed_goto_p (gimple *t)
461 {
462 return (gimple_code (t) == GIMPLE_GOTO
463 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
464 }
465
466 /* Returns true if the sequence of statements STMTS only contains
467 a call to __builtin_unreachable (). */
468
469 bool
470 gimple_seq_unreachable_p (gimple_seq stmts)
471 {
472 if (stmts == NULL
473 /* Return false if -fsanitize=unreachable, we don't want to
474 optimize away those calls, but rather turn them into
475 __ubsan_handle_builtin_unreachable () or __builtin_trap ()
476 later. */
477 || sanitize_flags_p (SANITIZE_UNREACHABLE))
478 return false;
479
480 gimple_stmt_iterator gsi = gsi_last (stmts);
481
482 if (!gimple_call_builtin_p (gsi_stmt (gsi), BUILT_IN_UNREACHABLE))
483 return false;
484
485 for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
486 {
487 gimple *stmt = gsi_stmt (gsi);
488 if (gimple_code (stmt) != GIMPLE_LABEL
489 && !is_gimple_debug (stmt)
490 && !gimple_clobber_p (stmt))
491 return false;
492 }
493 return true;
494 }
495
496 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
497 the other edge points to a bb with just __builtin_unreachable ().
498 I.e. return true for C->M edge in:
499 <bb C>:
500 ...
501 if (something)
502 goto <bb N>;
503 else
504 goto <bb M>;
505 <bb N>:
506 __builtin_unreachable ();
507 <bb M>: */
508
509 bool
510 assert_unreachable_fallthru_edge_p (edge e)
511 {
512 basic_block pred_bb = e->src;
513 gimple *last = last_stmt (pred_bb);
514 if (last && gimple_code (last) == GIMPLE_COND)
515 {
516 basic_block other_bb = EDGE_SUCC (pred_bb, 0)->dest;
517 if (other_bb == e->dest)
518 other_bb = EDGE_SUCC (pred_bb, 1)->dest;
519 if (EDGE_COUNT (other_bb->succs) == 0)
520 return gimple_seq_unreachable_p (bb_seq (other_bb));
521 }
522 return false;
523 }
524
525
526 /* Initialize GF_CALL_CTRL_ALTERING flag, which indicates the call
527 could alter control flow except via eh. We initialize the flag at
528 CFG build time and only ever clear it later. */
529
530 static void
531 gimple_call_initialize_ctrl_altering (gimple *stmt)
532 {
533 int flags = gimple_call_flags (stmt);
534
535 /* A call alters control flow if it can make an abnormal goto. */
536 if (call_can_make_abnormal_goto (stmt)
537 /* A call also alters control flow if it does not return. */
538 || flags & ECF_NORETURN
539 /* TM ending statements have backedges out of the transaction.
540 Return true so we split the basic block containing them.
541 Note that the TM_BUILTIN test is merely an optimization. */
542 || ((flags & ECF_TM_BUILTIN)
543 && is_tm_ending_fndecl (gimple_call_fndecl (stmt)))
544 /* BUILT_IN_RETURN call is same as return statement. */
545 || gimple_call_builtin_p (stmt, BUILT_IN_RETURN)
546 /* IFN_UNIQUE should be the last insn, to make checking for it
547 as cheap as possible. */
548 || (gimple_call_internal_p (stmt)
549 && gimple_call_internal_unique_p (stmt)))
550 gimple_call_set_ctrl_altering (stmt, true);
551 else
552 gimple_call_set_ctrl_altering (stmt, false);
553 }
554
555
556 /* Insert SEQ after BB and build a flowgraph. */
557
558 static basic_block
559 make_blocks_1 (gimple_seq seq, basic_block bb)
560 {
561 gimple_stmt_iterator i = gsi_start (seq);
562 gimple *stmt = NULL;
563 gimple *prev_stmt = NULL;
564 bool start_new_block = true;
565 bool first_stmt_of_seq = true;
566
567 while (!gsi_end_p (i))
568 {
569 /* PREV_STMT should only be set to a debug stmt if the debug
570 stmt is before nondebug stmts. Once stmt reaches a nondebug
571 nonlabel, prev_stmt will be set to it, so that
572 stmt_starts_bb_p will know to start a new block if a label is
573 found. However, if stmt was a label after debug stmts only,
574 keep the label in prev_stmt even if we find further debug
575 stmts, for there may be other labels after them, and they
576 should land in the same block. */
577 if (!prev_stmt || !stmt || !is_gimple_debug (stmt))
578 prev_stmt = stmt;
579 stmt = gsi_stmt (i);
580
581 if (stmt && is_gimple_call (stmt))
582 gimple_call_initialize_ctrl_altering (stmt);
583
584 /* If the statement starts a new basic block or if we have determined
585 in a previous pass that we need to create a new block for STMT, do
586 so now. */
587 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
588 {
589 if (!first_stmt_of_seq)
590 gsi_split_seq_before (&i, &seq);
591 bb = create_basic_block (seq, bb);
592 start_new_block = false;
593 prev_stmt = NULL;
594 }
595
596 /* Now add STMT to BB and create the subgraphs for special statement
597 codes. */
598 gimple_set_bb (stmt, bb);
599
600 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
601 next iteration. */
602 if (stmt_ends_bb_p (stmt))
603 {
604 /* If the stmt can make abnormal goto use a new temporary
605 for the assignment to the LHS. This makes sure the old value
606 of the LHS is available on the abnormal edge. Otherwise
607 we will end up with overlapping life-ranges for abnormal
608 SSA names. */
609 if (gimple_has_lhs (stmt)
610 && stmt_can_make_abnormal_goto (stmt)
611 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
612 {
613 tree lhs = gimple_get_lhs (stmt);
614 tree tmp = create_tmp_var (TREE_TYPE (lhs));
615 gimple *s = gimple_build_assign (lhs, tmp);
616 gimple_set_location (s, gimple_location (stmt));
617 gimple_set_block (s, gimple_block (stmt));
618 gimple_set_lhs (stmt, tmp);
619 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
620 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
621 DECL_GIMPLE_REG_P (tmp) = 1;
622 gsi_insert_after (&i, s, GSI_SAME_STMT);
623 }
624 start_new_block = true;
625 }
626
627 gsi_next (&i);
628 first_stmt_of_seq = false;
629 }
630 return bb;
631 }
632
633 /* Build a flowgraph for the sequence of stmts SEQ. */
634
635 static void
636 make_blocks (gimple_seq seq)
637 {
638 /* Look for debug markers right before labels, and move the debug
639 stmts after the labels. Accepting labels among debug markers
640 adds no value, just complexity; if we wanted to annotate labels
641 with view numbers (so sequencing among markers would matter) or
642 somesuch, we're probably better off still moving the labels, but
643 adding other debug annotations in their original positions or
644 emitting nonbind or bind markers associated with the labels in
645 the original position of the labels.
646
647 Moving labels would probably be simpler, but we can't do that:
648 moving labels assigns label ids to them, and doing so because of
649 debug markers makes for -fcompare-debug and possibly even codegen
650 differences. So, we have to move the debug stmts instead. To
651 that end, we scan SEQ backwards, marking the position of the
652 latest (earliest we find) label, and moving debug stmts that are
653 not separated from it by nondebug nonlabel stmts after the
654 label. */
655 if (MAY_HAVE_DEBUG_MARKER_STMTS)
656 {
657 gimple_stmt_iterator label = gsi_none ();
658
659 for (gimple_stmt_iterator i = gsi_last (seq); !gsi_end_p (i); gsi_prev (&i))
660 {
661 gimple *stmt = gsi_stmt (i);
662
663 /* If this is the first label we encounter (latest in SEQ)
664 before nondebug stmts, record its position. */
665 if (is_a <glabel *> (stmt))
666 {
667 if (gsi_end_p (label))
668 label = i;
669 continue;
670 }
671
672 /* Without a recorded label position to move debug stmts to,
673 there's nothing to do. */
674 if (gsi_end_p (label))
675 continue;
676
677 /* Move the debug stmt at I after LABEL. */
678 if (is_gimple_debug (stmt))
679 {
680 gcc_assert (gimple_debug_nonbind_marker_p (stmt));
681 /* As STMT is removed, I advances to the stmt after
682 STMT, so the gsi_prev in the for "increment"
683 expression gets us to the stmt we're to visit after
684 STMT. LABEL, however, would advance to the moved
685 stmt if we passed it to gsi_move_after, so pass it a
686 copy instead, so as to keep LABEL pointing to the
687 LABEL. */
688 gimple_stmt_iterator copy = label;
689 gsi_move_after (&i, &copy);
690 continue;
691 }
692
693 /* There aren't any (more?) debug stmts before label, so
694 there isn't anything else to move after it. */
695 label = gsi_none ();
696 }
697 }
698
699 make_blocks_1 (seq, ENTRY_BLOCK_PTR_FOR_FN (cfun));
700 }
701
702 /* Create and return a new empty basic block after bb AFTER. */
703
704 static basic_block
705 create_bb (void *h, void *e, basic_block after)
706 {
707 basic_block bb;
708
709 gcc_assert (!e);
710
711 /* Create and initialize a new basic block. Since alloc_block uses
712 GC allocation that clears memory to allocate a basic block, we do
713 not have to clear the newly allocated basic block here. */
714 bb = alloc_block ();
715
716 bb->index = last_basic_block_for_fn (cfun);
717 bb->flags = BB_NEW;
718 set_bb_seq (bb, h ? (gimple_seq) h : NULL);
719
720 /* Add the new block to the linked list of blocks. */
721 link_block (bb, after);
722
723 /* Grow the basic block array if needed. */
724 if ((size_t) last_basic_block_for_fn (cfun)
725 == basic_block_info_for_fn (cfun)->length ())
726 {
727 size_t new_size =
728 (last_basic_block_for_fn (cfun)
729 + (last_basic_block_for_fn (cfun) + 3) / 4);
730 vec_safe_grow_cleared (basic_block_info_for_fn (cfun), new_size);
731 }
732
733 /* Add the newly created block to the array. */
734 SET_BASIC_BLOCK_FOR_FN (cfun, last_basic_block_for_fn (cfun), bb);
735
736 n_basic_blocks_for_fn (cfun)++;
737 last_basic_block_for_fn (cfun)++;
738
739 return bb;
740 }
741
742
743 /*---------------------------------------------------------------------------
744 Edge creation
745 ---------------------------------------------------------------------------*/
746
747 /* If basic block BB has an abnormal edge to a basic block
748 containing IFN_ABNORMAL_DISPATCHER internal call, return
749 that the dispatcher's basic block, otherwise return NULL. */
750
751 basic_block
752 get_abnormal_succ_dispatcher (basic_block bb)
753 {
754 edge e;
755 edge_iterator ei;
756
757 FOR_EACH_EDGE (e, ei, bb->succs)
758 if ((e->flags & (EDGE_ABNORMAL | EDGE_EH)) == EDGE_ABNORMAL)
759 {
760 gimple_stmt_iterator gsi
761 = gsi_start_nondebug_after_labels_bb (e->dest);
762 gimple *g = gsi_stmt (gsi);
763 if (g && gimple_call_internal_p (g, IFN_ABNORMAL_DISPATCHER))
764 return e->dest;
765 }
766 return NULL;
767 }
768
769 /* Helper function for make_edges. Create a basic block with
770 with ABNORMAL_DISPATCHER internal call in it if needed, and
771 create abnormal edges from BBS to it and from it to FOR_BB
772 if COMPUTED_GOTO is false, otherwise factor the computed gotos. */
773
774 static void
775 handle_abnormal_edges (basic_block *dispatcher_bbs,
776 basic_block for_bb, int *bb_to_omp_idx,
777 auto_vec<basic_block> *bbs, bool computed_goto)
778 {
779 basic_block *dispatcher = dispatcher_bbs + (computed_goto ? 1 : 0);
780 unsigned int idx = 0;
781 basic_block bb;
782 bool inner = false;
783
784 if (bb_to_omp_idx)
785 {
786 dispatcher = dispatcher_bbs + 2 * bb_to_omp_idx[for_bb->index];
787 if (bb_to_omp_idx[for_bb->index] != 0)
788 inner = true;
789 }
790
791 /* If the dispatcher has been created already, then there are basic
792 blocks with abnormal edges to it, so just make a new edge to
793 for_bb. */
794 if (*dispatcher == NULL)
795 {
796 /* Check if there are any basic blocks that need to have
797 abnormal edges to this dispatcher. If there are none, return
798 early. */
799 if (bb_to_omp_idx == NULL)
800 {
801 if (bbs->is_empty ())
802 return;
803 }
804 else
805 {
806 FOR_EACH_VEC_ELT (*bbs, idx, bb)
807 if (bb_to_omp_idx[bb->index] == bb_to_omp_idx[for_bb->index])
808 break;
809 if (bb == NULL)
810 return;
811 }
812
813 /* Create the dispatcher bb. */
814 *dispatcher = create_basic_block (NULL, for_bb);
815 if (computed_goto)
816 {
817 /* Factor computed gotos into a common computed goto site. Also
818 record the location of that site so that we can un-factor the
819 gotos after we have converted back to normal form. */
820 gimple_stmt_iterator gsi = gsi_start_bb (*dispatcher);
821
822 /* Create the destination of the factored goto. Each original
823 computed goto will put its desired destination into this
824 variable and jump to the label we create immediately below. */
825 tree var = create_tmp_var (ptr_type_node, "gotovar");
826
827 /* Build a label for the new block which will contain the
828 factored computed goto. */
829 tree factored_label_decl
830 = create_artificial_label (UNKNOWN_LOCATION);
831 gimple *factored_computed_goto_label
832 = gimple_build_label (factored_label_decl);
833 gsi_insert_after (&gsi, factored_computed_goto_label, GSI_NEW_STMT);
834
835 /* Build our new computed goto. */
836 gimple *factored_computed_goto = gimple_build_goto (var);
837 gsi_insert_after (&gsi, factored_computed_goto, GSI_NEW_STMT);
838
839 FOR_EACH_VEC_ELT (*bbs, idx, bb)
840 {
841 if (bb_to_omp_idx
842 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
843 continue;
844
845 gsi = gsi_last_bb (bb);
846 gimple *last = gsi_stmt (gsi);
847
848 gcc_assert (computed_goto_p (last));
849
850 /* Copy the original computed goto's destination into VAR. */
851 gimple *assignment
852 = gimple_build_assign (var, gimple_goto_dest (last));
853 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
854
855 edge e = make_edge (bb, *dispatcher, EDGE_FALLTHRU);
856 e->goto_locus = gimple_location (last);
857 gsi_remove (&gsi, true);
858 }
859 }
860 else
861 {
862 tree arg = inner ? boolean_true_node : boolean_false_node;
863 gimple *g = gimple_build_call_internal (IFN_ABNORMAL_DISPATCHER,
864 1, arg);
865 gimple_stmt_iterator gsi = gsi_after_labels (*dispatcher);
866 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
867
868 /* Create predecessor edges of the dispatcher. */
869 FOR_EACH_VEC_ELT (*bbs, idx, bb)
870 {
871 if (bb_to_omp_idx
872 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
873 continue;
874 make_edge (bb, *dispatcher, EDGE_ABNORMAL);
875 }
876 }
877 }
878
879 make_edge (*dispatcher, for_bb, EDGE_ABNORMAL);
880 }
881
882 /* Creates outgoing edges for BB. Returns 1 when it ends with an
883 computed goto, returns 2 when it ends with a statement that
884 might return to this function via an nonlocal goto, otherwise
885 return 0. Updates *PCUR_REGION with the OMP region this BB is in. */
886
887 static int
888 make_edges_bb (basic_block bb, struct omp_region **pcur_region, int *pomp_index)
889 {
890 gimple *last = last_stmt (bb);
891 bool fallthru = false;
892 int ret = 0;
893
894 if (!last)
895 return ret;
896
897 switch (gimple_code (last))
898 {
899 case GIMPLE_GOTO:
900 if (make_goto_expr_edges (bb))
901 ret = 1;
902 fallthru = false;
903 break;
904 case GIMPLE_RETURN:
905 {
906 edge e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
907 e->goto_locus = gimple_location (last);
908 fallthru = false;
909 }
910 break;
911 case GIMPLE_COND:
912 make_cond_expr_edges (bb);
913 fallthru = false;
914 break;
915 case GIMPLE_SWITCH:
916 make_gimple_switch_edges (as_a <gswitch *> (last), bb);
917 fallthru = false;
918 break;
919 case GIMPLE_RESX:
920 make_eh_edges (last);
921 fallthru = false;
922 break;
923 case GIMPLE_EH_DISPATCH:
924 fallthru = make_eh_dispatch_edges (as_a <geh_dispatch *> (last));
925 break;
926
927 case GIMPLE_CALL:
928 /* If this function receives a nonlocal goto, then we need to
929 make edges from this call site to all the nonlocal goto
930 handlers. */
931 if (stmt_can_make_abnormal_goto (last))
932 ret = 2;
933
934 /* If this statement has reachable exception handlers, then
935 create abnormal edges to them. */
936 make_eh_edges (last);
937
938 /* BUILTIN_RETURN is really a return statement. */
939 if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
940 {
941 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
942 fallthru = false;
943 }
944 /* Some calls are known not to return. */
945 else
946 fallthru = !gimple_call_noreturn_p (last);
947 break;
948
949 case GIMPLE_ASSIGN:
950 /* A GIMPLE_ASSIGN may throw internally and thus be considered
951 control-altering. */
952 if (is_ctrl_altering_stmt (last))
953 make_eh_edges (last);
954 fallthru = true;
955 break;
956
957 case GIMPLE_ASM:
958 make_gimple_asm_edges (bb);
959 fallthru = true;
960 break;
961
962 CASE_GIMPLE_OMP:
963 fallthru = omp_make_gimple_edges (bb, pcur_region, pomp_index);
964 break;
965
966 case GIMPLE_TRANSACTION:
967 {
968 gtransaction *txn = as_a <gtransaction *> (last);
969 tree label1 = gimple_transaction_label_norm (txn);
970 tree label2 = gimple_transaction_label_uninst (txn);
971
972 if (label1)
973 make_edge (bb, label_to_block (cfun, label1), EDGE_FALLTHRU);
974 if (label2)
975 make_edge (bb, label_to_block (cfun, label2),
976 EDGE_TM_UNINSTRUMENTED | (label1 ? 0 : EDGE_FALLTHRU));
977
978 tree label3 = gimple_transaction_label_over (txn);
979 if (gimple_transaction_subcode (txn)
980 & (GTMA_HAVE_ABORT | GTMA_IS_OUTER))
981 make_edge (bb, label_to_block (cfun, label3), EDGE_TM_ABORT);
982
983 fallthru = false;
984 }
985 break;
986
987 default:
988 gcc_assert (!stmt_ends_bb_p (last));
989 fallthru = true;
990 break;
991 }
992
993 if (fallthru)
994 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
995
996 return ret;
997 }
998
999 /* Join all the blocks in the flowgraph. */
1000
1001 static void
1002 make_edges (void)
1003 {
1004 basic_block bb;
1005 struct omp_region *cur_region = NULL;
1006 auto_vec<basic_block> ab_edge_goto;
1007 auto_vec<basic_block> ab_edge_call;
1008 int *bb_to_omp_idx = NULL;
1009 int cur_omp_region_idx = 0;
1010
1011 /* Create an edge from entry to the first block with executable
1012 statements in it. */
1013 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun),
1014 BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS),
1015 EDGE_FALLTHRU);
1016
1017 /* Traverse the basic block array placing edges. */
1018 FOR_EACH_BB_FN (bb, cfun)
1019 {
1020 int mer;
1021
1022 if (bb_to_omp_idx)
1023 bb_to_omp_idx[bb->index] = cur_omp_region_idx;
1024
1025 mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
1026 if (mer == 1)
1027 ab_edge_goto.safe_push (bb);
1028 else if (mer == 2)
1029 ab_edge_call.safe_push (bb);
1030
1031 if (cur_region && bb_to_omp_idx == NULL)
1032 bb_to_omp_idx = XCNEWVEC (int, n_basic_blocks_for_fn (cfun));
1033 }
1034
1035 /* Computed gotos are hell to deal with, especially if there are
1036 lots of them with a large number of destinations. So we factor
1037 them to a common computed goto location before we build the
1038 edge list. After we convert back to normal form, we will un-factor
1039 the computed gotos since factoring introduces an unwanted jump.
1040 For non-local gotos and abnormal edges from calls to calls that return
1041 twice or forced labels, factor the abnormal edges too, by having all
1042 abnormal edges from the calls go to a common artificial basic block
1043 with ABNORMAL_DISPATCHER internal call and abnormal edges from that
1044 basic block to all forced labels and calls returning twice.
1045 We do this per-OpenMP structured block, because those regions
1046 are guaranteed to be single entry single exit by the standard,
1047 so it is not allowed to enter or exit such regions abnormally this way,
1048 thus all computed gotos, non-local gotos and setjmp/longjmp calls
1049 must not transfer control across SESE region boundaries. */
1050 if (!ab_edge_goto.is_empty () || !ab_edge_call.is_empty ())
1051 {
1052 gimple_stmt_iterator gsi;
1053 basic_block dispatcher_bb_array[2] = { NULL, NULL };
1054 basic_block *dispatcher_bbs = dispatcher_bb_array;
1055 int count = n_basic_blocks_for_fn (cfun);
1056
1057 if (bb_to_omp_idx)
1058 dispatcher_bbs = XCNEWVEC (basic_block, 2 * count);
1059
1060 FOR_EACH_BB_FN (bb, cfun)
1061 {
1062 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1063 {
1064 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
1065 tree target;
1066
1067 if (!label_stmt)
1068 break;
1069
1070 target = gimple_label_label (label_stmt);
1071
1072 /* Make an edge to every label block that has been marked as a
1073 potential target for a computed goto or a non-local goto. */
1074 if (FORCED_LABEL (target))
1075 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
1076 &ab_edge_goto, true);
1077 if (DECL_NONLOCAL (target))
1078 {
1079 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
1080 &ab_edge_call, false);
1081 break;
1082 }
1083 }
1084
1085 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
1086 gsi_next_nondebug (&gsi);
1087 if (!gsi_end_p (gsi))
1088 {
1089 /* Make an edge to every setjmp-like call. */
1090 gimple *call_stmt = gsi_stmt (gsi);
1091 if (is_gimple_call (call_stmt)
1092 && ((gimple_call_flags (call_stmt) & ECF_RETURNS_TWICE)
1093 || gimple_call_builtin_p (call_stmt,
1094 BUILT_IN_SETJMP_RECEIVER)))
1095 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
1096 &ab_edge_call, false);
1097 }
1098 }
1099
1100 if (bb_to_omp_idx)
1101 XDELETE (dispatcher_bbs);
1102 }
1103
1104 XDELETE (bb_to_omp_idx);
1105
1106 omp_free_regions ();
1107 }
1108
1109 /* Add SEQ after GSI. Start new bb after GSI, and created further bbs as
1110 needed. Returns true if new bbs were created.
1111 Note: This is transitional code, and should not be used for new code. We
1112 should be able to get rid of this by rewriting all target va-arg
1113 gimplification hooks to use an interface gimple_build_cond_value as described
1114 in https://gcc.gnu.org/ml/gcc-patches/2015-02/msg01194.html. */
1115
1116 bool
1117 gimple_find_sub_bbs (gimple_seq seq, gimple_stmt_iterator *gsi)
1118 {
1119 gimple *stmt = gsi_stmt (*gsi);
1120 basic_block bb = gimple_bb (stmt);
1121 basic_block lastbb, afterbb;
1122 int old_num_bbs = n_basic_blocks_for_fn (cfun);
1123 edge e;
1124 lastbb = make_blocks_1 (seq, bb);
1125 if (old_num_bbs == n_basic_blocks_for_fn (cfun))
1126 return false;
1127 e = split_block (bb, stmt);
1128 /* Move e->dest to come after the new basic blocks. */
1129 afterbb = e->dest;
1130 unlink_block (afterbb);
1131 link_block (afterbb, lastbb);
1132 redirect_edge_succ (e, bb->next_bb);
1133 bb = bb->next_bb;
1134 while (bb != afterbb)
1135 {
1136 struct omp_region *cur_region = NULL;
1137 profile_count cnt = profile_count::zero ();
1138 bool all = true;
1139
1140 int cur_omp_region_idx = 0;
1141 int mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
1142 gcc_assert (!mer && !cur_region);
1143 add_bb_to_loop (bb, afterbb->loop_father);
1144
1145 edge e;
1146 edge_iterator ei;
1147 FOR_EACH_EDGE (e, ei, bb->preds)
1148 {
1149 if (e->count ().initialized_p ())
1150 cnt += e->count ();
1151 else
1152 all = false;
1153 }
1154 tree_guess_outgoing_edge_probabilities (bb);
1155 if (all || profile_status_for_fn (cfun) == PROFILE_READ)
1156 bb->count = cnt;
1157
1158 bb = bb->next_bb;
1159 }
1160 return true;
1161 }
1162
1163 /* Find the next available discriminator value for LOCUS. The
1164 discriminator distinguishes among several basic blocks that
1165 share a common locus, allowing for more accurate sample-based
1166 profiling. */
1167
1168 static int
1169 next_discriminator_for_locus (int line)
1170 {
1171 struct locus_discrim_map item;
1172 struct locus_discrim_map **slot;
1173
1174 item.location_line = line;
1175 item.discriminator = 0;
1176 slot = discriminator_per_locus->find_slot_with_hash (&item, line, INSERT);
1177 gcc_assert (slot);
1178 if (*slot == HTAB_EMPTY_ENTRY)
1179 {
1180 *slot = XNEW (struct locus_discrim_map);
1181 gcc_assert (*slot);
1182 (*slot)->location_line = line;
1183 (*slot)->discriminator = 0;
1184 }
1185 (*slot)->discriminator++;
1186 return (*slot)->discriminator;
1187 }
1188
1189 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
1190
1191 static bool
1192 same_line_p (location_t locus1, expanded_location *from, location_t locus2)
1193 {
1194 expanded_location to;
1195
1196 if (locus1 == locus2)
1197 return true;
1198
1199 to = expand_location (locus2);
1200
1201 if (from->line != to.line)
1202 return false;
1203 if (from->file == to.file)
1204 return true;
1205 return (from->file != NULL
1206 && to.file != NULL
1207 && filename_cmp (from->file, to.file) == 0);
1208 }
1209
1210 /* Assign discriminators to each basic block. */
1211
1212 static void
1213 assign_discriminators (void)
1214 {
1215 basic_block bb;
1216
1217 FOR_EACH_BB_FN (bb, cfun)
1218 {
1219 edge e;
1220 edge_iterator ei;
1221 gimple *last = last_stmt (bb);
1222 location_t locus = last ? gimple_location (last) : UNKNOWN_LOCATION;
1223
1224 if (locus == UNKNOWN_LOCATION)
1225 continue;
1226
1227 expanded_location locus_e = expand_location (locus);
1228
1229 FOR_EACH_EDGE (e, ei, bb->succs)
1230 {
1231 gimple *first = first_non_label_stmt (e->dest);
1232 gimple *last = last_stmt (e->dest);
1233 if ((first && same_line_p (locus, &locus_e,
1234 gimple_location (first)))
1235 || (last && same_line_p (locus, &locus_e,
1236 gimple_location (last))))
1237 {
1238 if (e->dest->discriminator != 0 && bb->discriminator == 0)
1239 bb->discriminator
1240 = next_discriminator_for_locus (locus_e.line);
1241 else
1242 e->dest->discriminator
1243 = next_discriminator_for_locus (locus_e.line);
1244 }
1245 }
1246 }
1247 }
1248
1249 /* Create the edges for a GIMPLE_COND starting at block BB. */
1250
1251 static void
1252 make_cond_expr_edges (basic_block bb)
1253 {
1254 gcond *entry = as_a <gcond *> (last_stmt (bb));
1255 gimple *then_stmt, *else_stmt;
1256 basic_block then_bb, else_bb;
1257 tree then_label, else_label;
1258 edge e;
1259
1260 gcc_assert (entry);
1261 gcc_assert (gimple_code (entry) == GIMPLE_COND);
1262
1263 /* Entry basic blocks for each component. */
1264 then_label = gimple_cond_true_label (entry);
1265 else_label = gimple_cond_false_label (entry);
1266 then_bb = label_to_block (cfun, then_label);
1267 else_bb = label_to_block (cfun, else_label);
1268 then_stmt = first_stmt (then_bb);
1269 else_stmt = first_stmt (else_bb);
1270
1271 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1272 e->goto_locus = gimple_location (then_stmt);
1273 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1274 if (e)
1275 e->goto_locus = gimple_location (else_stmt);
1276
1277 /* We do not need the labels anymore. */
1278 gimple_cond_set_true_label (entry, NULL_TREE);
1279 gimple_cond_set_false_label (entry, NULL_TREE);
1280 }
1281
1282
1283 /* Called for each element in the hash table (P) as we delete the
1284 edge to cases hash table.
1285
1286 Clear all the CASE_CHAINs to prevent problems with copying of
1287 SWITCH_EXPRs and structure sharing rules, then free the hash table
1288 element. */
1289
1290 bool
1291 edge_to_cases_cleanup (edge const &, tree const &value, void *)
1292 {
1293 tree t, next;
1294
1295 for (t = value; t; t = next)
1296 {
1297 next = CASE_CHAIN (t);
1298 CASE_CHAIN (t) = NULL;
1299 }
1300
1301 return true;
1302 }
1303
1304 /* Start recording information mapping edges to case labels. */
1305
1306 void
1307 start_recording_case_labels (void)
1308 {
1309 gcc_assert (edge_to_cases == NULL);
1310 edge_to_cases = new hash_map<edge, tree>;
1311 touched_switch_bbs = BITMAP_ALLOC (NULL);
1312 }
1313
1314 /* Return nonzero if we are recording information for case labels. */
1315
1316 static bool
1317 recording_case_labels_p (void)
1318 {
1319 return (edge_to_cases != NULL);
1320 }
1321
1322 /* Stop recording information mapping edges to case labels and
1323 remove any information we have recorded. */
1324 void
1325 end_recording_case_labels (void)
1326 {
1327 bitmap_iterator bi;
1328 unsigned i;
1329 edge_to_cases->traverse<void *, edge_to_cases_cleanup> (NULL);
1330 delete edge_to_cases;
1331 edge_to_cases = NULL;
1332 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
1333 {
1334 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
1335 if (bb)
1336 {
1337 gimple *stmt = last_stmt (bb);
1338 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1339 group_case_labels_stmt (as_a <gswitch *> (stmt));
1340 }
1341 }
1342 BITMAP_FREE (touched_switch_bbs);
1343 }
1344
1345 /* If we are inside a {start,end}_recording_cases block, then return
1346 a chain of CASE_LABEL_EXPRs from T which reference E.
1347
1348 Otherwise return NULL. */
1349
1350 static tree
1351 get_cases_for_edge (edge e, gswitch *t)
1352 {
1353 tree *slot;
1354 size_t i, n;
1355
1356 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
1357 chains available. Return NULL so the caller can detect this case. */
1358 if (!recording_case_labels_p ())
1359 return NULL;
1360
1361 slot = edge_to_cases->get (e);
1362 if (slot)
1363 return *slot;
1364
1365 /* If we did not find E in the hash table, then this must be the first
1366 time we have been queried for information about E & T. Add all the
1367 elements from T to the hash table then perform the query again. */
1368
1369 n = gimple_switch_num_labels (t);
1370 for (i = 0; i < n; i++)
1371 {
1372 tree elt = gimple_switch_label (t, i);
1373 tree lab = CASE_LABEL (elt);
1374 basic_block label_bb = label_to_block (cfun, lab);
1375 edge this_edge = find_edge (e->src, label_bb);
1376
1377 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
1378 a new chain. */
1379 tree &s = edge_to_cases->get_or_insert (this_edge);
1380 CASE_CHAIN (elt) = s;
1381 s = elt;
1382 }
1383
1384 return *edge_to_cases->get (e);
1385 }
1386
1387 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
1388
1389 static void
1390 make_gimple_switch_edges (gswitch *entry, basic_block bb)
1391 {
1392 size_t i, n;
1393
1394 n = gimple_switch_num_labels (entry);
1395
1396 for (i = 0; i < n; ++i)
1397 {
1398 basic_block label_bb = gimple_switch_label_bb (cfun, entry, i);
1399 make_edge (bb, label_bb, 0);
1400 }
1401 }
1402
1403
1404 /* Return the basic block holding label DEST. */
1405
1406 basic_block
1407 label_to_block (struct function *ifun, tree dest)
1408 {
1409 int uid = LABEL_DECL_UID (dest);
1410
1411 /* We would die hard when faced by an undefined label. Emit a label to
1412 the very first basic block. This will hopefully make even the dataflow
1413 and undefined variable warnings quite right. */
1414 if (seen_error () && uid < 0)
1415 {
1416 gimple_stmt_iterator gsi =
1417 gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS));
1418 gimple *stmt;
1419
1420 stmt = gimple_build_label (dest);
1421 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
1422 uid = LABEL_DECL_UID (dest);
1423 }
1424 if (vec_safe_length (ifun->cfg->x_label_to_block_map) <= (unsigned int) uid)
1425 return NULL;
1426 return (*ifun->cfg->x_label_to_block_map)[uid];
1427 }
1428
1429 /* Create edges for a goto statement at block BB. Returns true
1430 if abnormal edges should be created. */
1431
1432 static bool
1433 make_goto_expr_edges (basic_block bb)
1434 {
1435 gimple_stmt_iterator last = gsi_last_bb (bb);
1436 gimple *goto_t = gsi_stmt (last);
1437
1438 /* A simple GOTO creates normal edges. */
1439 if (simple_goto_p (goto_t))
1440 {
1441 tree dest = gimple_goto_dest (goto_t);
1442 basic_block label_bb = label_to_block (cfun, dest);
1443 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1444 e->goto_locus = gimple_location (goto_t);
1445 gsi_remove (&last, true);
1446 return false;
1447 }
1448
1449 /* A computed GOTO creates abnormal edges. */
1450 return true;
1451 }
1452
1453 /* Create edges for an asm statement with labels at block BB. */
1454
1455 static void
1456 make_gimple_asm_edges (basic_block bb)
1457 {
1458 gasm *stmt = as_a <gasm *> (last_stmt (bb));
1459 int i, n = gimple_asm_nlabels (stmt);
1460
1461 for (i = 0; i < n; ++i)
1462 {
1463 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1464 basic_block label_bb = label_to_block (cfun, label);
1465 make_edge (bb, label_bb, 0);
1466 }
1467 }
1468
1469 /*---------------------------------------------------------------------------
1470 Flowgraph analysis
1471 ---------------------------------------------------------------------------*/
1472
1473 /* Cleanup useless labels in basic blocks. This is something we wish
1474 to do early because it allows us to group case labels before creating
1475 the edges for the CFG, and it speeds up block statement iterators in
1476 all passes later on.
1477 We rerun this pass after CFG is created, to get rid of the labels that
1478 are no longer referenced. After then we do not run it any more, since
1479 (almost) no new labels should be created. */
1480
1481 /* A map from basic block index to the leading label of that block. */
1482 static struct label_record
1483 {
1484 /* The label. */
1485 tree label;
1486
1487 /* True if the label is referenced from somewhere. */
1488 bool used;
1489 } *label_for_bb;
1490
1491 /* Given LABEL return the first label in the same basic block. */
1492
1493 static tree
1494 main_block_label (tree label)
1495 {
1496 basic_block bb = label_to_block (cfun, label);
1497 tree main_label = label_for_bb[bb->index].label;
1498
1499 /* label_to_block possibly inserted undefined label into the chain. */
1500 if (!main_label)
1501 {
1502 label_for_bb[bb->index].label = label;
1503 main_label = label;
1504 }
1505
1506 label_for_bb[bb->index].used = true;
1507 return main_label;
1508 }
1509
1510 /* Clean up redundant labels within the exception tree. */
1511
1512 static void
1513 cleanup_dead_labels_eh (void)
1514 {
1515 eh_landing_pad lp;
1516 eh_region r;
1517 tree lab;
1518 int i;
1519
1520 if (cfun->eh == NULL)
1521 return;
1522
1523 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1524 if (lp && lp->post_landing_pad)
1525 {
1526 lab = main_block_label (lp->post_landing_pad);
1527 if (lab != lp->post_landing_pad)
1528 {
1529 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1530 EH_LANDING_PAD_NR (lab) = lp->index;
1531 }
1532 }
1533
1534 FOR_ALL_EH_REGION (r)
1535 switch (r->type)
1536 {
1537 case ERT_CLEANUP:
1538 case ERT_MUST_NOT_THROW:
1539 break;
1540
1541 case ERT_TRY:
1542 {
1543 eh_catch c;
1544 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1545 {
1546 lab = c->label;
1547 if (lab)
1548 c->label = main_block_label (lab);
1549 }
1550 }
1551 break;
1552
1553 case ERT_ALLOWED_EXCEPTIONS:
1554 lab = r->u.allowed.label;
1555 if (lab)
1556 r->u.allowed.label = main_block_label (lab);
1557 break;
1558 }
1559 }
1560
1561
1562 /* Cleanup redundant labels. This is a three-step process:
1563 1) Find the leading label for each block.
1564 2) Redirect all references to labels to the leading labels.
1565 3) Cleanup all useless labels. */
1566
1567 void
1568 cleanup_dead_labels (void)
1569 {
1570 basic_block bb;
1571 label_for_bb = XCNEWVEC (struct label_record, last_basic_block_for_fn (cfun));
1572
1573 /* Find a suitable label for each block. We use the first user-defined
1574 label if there is one, or otherwise just the first label we see. */
1575 FOR_EACH_BB_FN (bb, cfun)
1576 {
1577 gimple_stmt_iterator i;
1578
1579 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1580 {
1581 tree label;
1582 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1583
1584 if (!label_stmt)
1585 break;
1586
1587 label = gimple_label_label (label_stmt);
1588
1589 /* If we have not yet seen a label for the current block,
1590 remember this one and see if there are more labels. */
1591 if (!label_for_bb[bb->index].label)
1592 {
1593 label_for_bb[bb->index].label = label;
1594 continue;
1595 }
1596
1597 /* If we did see a label for the current block already, but it
1598 is an artificially created label, replace it if the current
1599 label is a user defined label. */
1600 if (!DECL_ARTIFICIAL (label)
1601 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1602 {
1603 label_for_bb[bb->index].label = label;
1604 break;
1605 }
1606 }
1607 }
1608
1609 /* Now redirect all jumps/branches to the selected label.
1610 First do so for each block ending in a control statement. */
1611 FOR_EACH_BB_FN (bb, cfun)
1612 {
1613 gimple *stmt = last_stmt (bb);
1614 tree label, new_label;
1615
1616 if (!stmt)
1617 continue;
1618
1619 switch (gimple_code (stmt))
1620 {
1621 case GIMPLE_COND:
1622 {
1623 gcond *cond_stmt = as_a <gcond *> (stmt);
1624 label = gimple_cond_true_label (cond_stmt);
1625 if (label)
1626 {
1627 new_label = main_block_label (label);
1628 if (new_label != label)
1629 gimple_cond_set_true_label (cond_stmt, new_label);
1630 }
1631
1632 label = gimple_cond_false_label (cond_stmt);
1633 if (label)
1634 {
1635 new_label = main_block_label (label);
1636 if (new_label != label)
1637 gimple_cond_set_false_label (cond_stmt, new_label);
1638 }
1639 }
1640 break;
1641
1642 case GIMPLE_SWITCH:
1643 {
1644 gswitch *switch_stmt = as_a <gswitch *> (stmt);
1645 size_t i, n = gimple_switch_num_labels (switch_stmt);
1646
1647 /* Replace all destination labels. */
1648 for (i = 0; i < n; ++i)
1649 {
1650 tree case_label = gimple_switch_label (switch_stmt, i);
1651 label = CASE_LABEL (case_label);
1652 new_label = main_block_label (label);
1653 if (new_label != label)
1654 CASE_LABEL (case_label) = new_label;
1655 }
1656 break;
1657 }
1658
1659 case GIMPLE_ASM:
1660 {
1661 gasm *asm_stmt = as_a <gasm *> (stmt);
1662 int i, n = gimple_asm_nlabels (asm_stmt);
1663
1664 for (i = 0; i < n; ++i)
1665 {
1666 tree cons = gimple_asm_label_op (asm_stmt, i);
1667 tree label = main_block_label (TREE_VALUE (cons));
1668 TREE_VALUE (cons) = label;
1669 }
1670 break;
1671 }
1672
1673 /* We have to handle gotos until they're removed, and we don't
1674 remove them until after we've created the CFG edges. */
1675 case GIMPLE_GOTO:
1676 if (!computed_goto_p (stmt))
1677 {
1678 ggoto *goto_stmt = as_a <ggoto *> (stmt);
1679 label = gimple_goto_dest (goto_stmt);
1680 new_label = main_block_label (label);
1681 if (new_label != label)
1682 gimple_goto_set_dest (goto_stmt, new_label);
1683 }
1684 break;
1685
1686 case GIMPLE_TRANSACTION:
1687 {
1688 gtransaction *txn = as_a <gtransaction *> (stmt);
1689
1690 label = gimple_transaction_label_norm (txn);
1691 if (label)
1692 {
1693 new_label = main_block_label (label);
1694 if (new_label != label)
1695 gimple_transaction_set_label_norm (txn, new_label);
1696 }
1697
1698 label = gimple_transaction_label_uninst (txn);
1699 if (label)
1700 {
1701 new_label = main_block_label (label);
1702 if (new_label != label)
1703 gimple_transaction_set_label_uninst (txn, new_label);
1704 }
1705
1706 label = gimple_transaction_label_over (txn);
1707 if (label)
1708 {
1709 new_label = main_block_label (label);
1710 if (new_label != label)
1711 gimple_transaction_set_label_over (txn, new_label);
1712 }
1713 }
1714 break;
1715
1716 default:
1717 break;
1718 }
1719 }
1720
1721 /* Do the same for the exception region tree labels. */
1722 cleanup_dead_labels_eh ();
1723
1724 /* Finally, purge dead labels. All user-defined labels and labels that
1725 can be the target of non-local gotos and labels which have their
1726 address taken are preserved. */
1727 FOR_EACH_BB_FN (bb, cfun)
1728 {
1729 gimple_stmt_iterator i;
1730 tree label_for_this_bb = label_for_bb[bb->index].label;
1731
1732 if (!label_for_this_bb)
1733 continue;
1734
1735 /* If the main label of the block is unused, we may still remove it. */
1736 if (!label_for_bb[bb->index].used)
1737 label_for_this_bb = NULL;
1738
1739 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1740 {
1741 tree label;
1742 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1743
1744 if (!label_stmt)
1745 break;
1746
1747 label = gimple_label_label (label_stmt);
1748
1749 if (label == label_for_this_bb
1750 || !DECL_ARTIFICIAL (label)
1751 || DECL_NONLOCAL (label)
1752 || FORCED_LABEL (label))
1753 gsi_next (&i);
1754 else
1755 gsi_remove (&i, true);
1756 }
1757 }
1758
1759 free (label_for_bb);
1760 }
1761
1762 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1763 the ones jumping to the same label.
1764 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1765
1766 bool
1767 group_case_labels_stmt (gswitch *stmt)
1768 {
1769 int old_size = gimple_switch_num_labels (stmt);
1770 int i, next_index, new_size;
1771 basic_block default_bb = NULL;
1772
1773 default_bb = gimple_switch_default_bb (cfun, stmt);
1774
1775 /* Look for possible opportunities to merge cases. */
1776 new_size = i = 1;
1777 while (i < old_size)
1778 {
1779 tree base_case, base_high;
1780 basic_block base_bb;
1781
1782 base_case = gimple_switch_label (stmt, i);
1783
1784 gcc_assert (base_case);
1785 base_bb = label_to_block (cfun, CASE_LABEL (base_case));
1786
1787 /* Discard cases that have the same destination as the default case or
1788 whose destiniation blocks have already been removed as unreachable. */
1789 if (base_bb == NULL || base_bb == default_bb)
1790 {
1791 i++;
1792 continue;
1793 }
1794
1795 base_high = CASE_HIGH (base_case)
1796 ? CASE_HIGH (base_case)
1797 : CASE_LOW (base_case);
1798 next_index = i + 1;
1799
1800 /* Try to merge case labels. Break out when we reach the end
1801 of the label vector or when we cannot merge the next case
1802 label with the current one. */
1803 while (next_index < old_size)
1804 {
1805 tree merge_case = gimple_switch_label (stmt, next_index);
1806 basic_block merge_bb = label_to_block (cfun, CASE_LABEL (merge_case));
1807 wide_int bhp1 = wi::to_wide (base_high) + 1;
1808
1809 /* Merge the cases if they jump to the same place,
1810 and their ranges are consecutive. */
1811 if (merge_bb == base_bb
1812 && wi::to_wide (CASE_LOW (merge_case)) == bhp1)
1813 {
1814 base_high = CASE_HIGH (merge_case) ?
1815 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1816 CASE_HIGH (base_case) = base_high;
1817 next_index++;
1818 }
1819 else
1820 break;
1821 }
1822
1823 /* Discard cases that have an unreachable destination block. */
1824 if (EDGE_COUNT (base_bb->succs) == 0
1825 && gimple_seq_unreachable_p (bb_seq (base_bb))
1826 /* Don't optimize this if __builtin_unreachable () is the
1827 implicitly added one by the C++ FE too early, before
1828 -Wreturn-type can be diagnosed. We'll optimize it later
1829 during switchconv pass or any other cfg cleanup. */
1830 && (gimple_in_ssa_p (cfun)
1831 || (LOCATION_LOCUS (gimple_location (last_stmt (base_bb)))
1832 != BUILTINS_LOCATION)))
1833 {
1834 edge base_edge = find_edge (gimple_bb (stmt), base_bb);
1835 if (base_edge != NULL)
1836 remove_edge_and_dominated_blocks (base_edge);
1837 i = next_index;
1838 continue;
1839 }
1840
1841 if (new_size < i)
1842 gimple_switch_set_label (stmt, new_size,
1843 gimple_switch_label (stmt, i));
1844 i = next_index;
1845 new_size++;
1846 }
1847
1848 gcc_assert (new_size <= old_size);
1849
1850 if (new_size < old_size)
1851 gimple_switch_set_num_labels (stmt, new_size);
1852
1853 return new_size < old_size;
1854 }
1855
1856 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1857 and scan the sorted vector of cases. Combine the ones jumping to the
1858 same label. */
1859
1860 bool
1861 group_case_labels (void)
1862 {
1863 basic_block bb;
1864 bool changed = false;
1865
1866 FOR_EACH_BB_FN (bb, cfun)
1867 {
1868 gimple *stmt = last_stmt (bb);
1869 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1870 changed |= group_case_labels_stmt (as_a <gswitch *> (stmt));
1871 }
1872
1873 return changed;
1874 }
1875
1876 /* Checks whether we can merge block B into block A. */
1877
1878 static bool
1879 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1880 {
1881 gimple *stmt;
1882
1883 if (!single_succ_p (a))
1884 return false;
1885
1886 if (single_succ_edge (a)->flags & EDGE_COMPLEX)
1887 return false;
1888
1889 if (single_succ (a) != b)
1890 return false;
1891
1892 if (!single_pred_p (b))
1893 return false;
1894
1895 if (a == ENTRY_BLOCK_PTR_FOR_FN (cfun)
1896 || b == EXIT_BLOCK_PTR_FOR_FN (cfun))
1897 return false;
1898
1899 /* If A ends by a statement causing exceptions or something similar, we
1900 cannot merge the blocks. */
1901 stmt = last_stmt (a);
1902 if (stmt && stmt_ends_bb_p (stmt))
1903 return false;
1904
1905 /* Do not allow a block with only a non-local label to be merged. */
1906 if (stmt)
1907 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
1908 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
1909 return false;
1910
1911 /* Examine the labels at the beginning of B. */
1912 for (gimple_stmt_iterator gsi = gsi_start_bb (b); !gsi_end_p (gsi);
1913 gsi_next (&gsi))
1914 {
1915 tree lab;
1916 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
1917 if (!label_stmt)
1918 break;
1919 lab = gimple_label_label (label_stmt);
1920
1921 /* Do not remove user forced labels or for -O0 any user labels. */
1922 if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
1923 return false;
1924 }
1925
1926 /* Protect simple loop latches. We only want to avoid merging
1927 the latch with the loop header or with a block in another
1928 loop in this case. */
1929 if (current_loops
1930 && b->loop_father->latch == b
1931 && loops_state_satisfies_p (LOOPS_HAVE_SIMPLE_LATCHES)
1932 && (b->loop_father->header == a
1933 || b->loop_father != a->loop_father))
1934 return false;
1935
1936 /* It must be possible to eliminate all phi nodes in B. If ssa form
1937 is not up-to-date and a name-mapping is registered, we cannot eliminate
1938 any phis. Symbols marked for renaming are never a problem though. */
1939 for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);
1940 gsi_next (&gsi))
1941 {
1942 gphi *phi = gsi.phi ();
1943 /* Technically only new names matter. */
1944 if (name_registered_for_update_p (PHI_RESULT (phi)))
1945 return false;
1946 }
1947
1948 /* When not optimizing, don't merge if we'd lose goto_locus. */
1949 if (!optimize
1950 && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
1951 {
1952 location_t goto_locus = single_succ_edge (a)->goto_locus;
1953 gimple_stmt_iterator prev, next;
1954 prev = gsi_last_nondebug_bb (a);
1955 next = gsi_after_labels (b);
1956 if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
1957 gsi_next_nondebug (&next);
1958 if ((gsi_end_p (prev)
1959 || gimple_location (gsi_stmt (prev)) != goto_locus)
1960 && (gsi_end_p (next)
1961 || gimple_location (gsi_stmt (next)) != goto_locus))
1962 return false;
1963 }
1964
1965 return true;
1966 }
1967
1968 /* Replaces all uses of NAME by VAL. */
1969
1970 void
1971 replace_uses_by (tree name, tree val)
1972 {
1973 imm_use_iterator imm_iter;
1974 use_operand_p use;
1975 gimple *stmt;
1976 edge e;
1977
1978 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1979 {
1980 /* Mark the block if we change the last stmt in it. */
1981 if (cfgcleanup_altered_bbs
1982 && stmt_ends_bb_p (stmt))
1983 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1984
1985 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1986 {
1987 replace_exp (use, val);
1988
1989 if (gimple_code (stmt) == GIMPLE_PHI)
1990 {
1991 e = gimple_phi_arg_edge (as_a <gphi *> (stmt),
1992 PHI_ARG_INDEX_FROM_USE (use));
1993 if (e->flags & EDGE_ABNORMAL
1994 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val))
1995 {
1996 /* This can only occur for virtual operands, since
1997 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1998 would prevent replacement. */
1999 gcc_checking_assert (virtual_operand_p (name));
2000 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
2001 }
2002 }
2003 }
2004
2005 if (gimple_code (stmt) != GIMPLE_PHI)
2006 {
2007 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
2008 gimple *orig_stmt = stmt;
2009 size_t i;
2010
2011 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
2012 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
2013 only change sth from non-invariant to invariant, and only
2014 when propagating constants. */
2015 if (is_gimple_min_invariant (val))
2016 for (i = 0; i < gimple_num_ops (stmt); i++)
2017 {
2018 tree op = gimple_op (stmt, i);
2019 /* Operands may be empty here. For example, the labels
2020 of a GIMPLE_COND are nulled out following the creation
2021 of the corresponding CFG edges. */
2022 if (op && TREE_CODE (op) == ADDR_EXPR)
2023 recompute_tree_invariant_for_addr_expr (op);
2024 }
2025
2026 if (fold_stmt (&gsi))
2027 stmt = gsi_stmt (gsi);
2028
2029 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
2030 gimple_purge_dead_eh_edges (gimple_bb (stmt));
2031
2032 update_stmt (stmt);
2033 }
2034 }
2035
2036 gcc_checking_assert (has_zero_uses (name));
2037
2038 /* Also update the trees stored in loop structures. */
2039 if (current_loops)
2040 {
2041 struct loop *loop;
2042
2043 FOR_EACH_LOOP (loop, 0)
2044 {
2045 substitute_in_loop_info (loop, name, val);
2046 }
2047 }
2048 }
2049
2050 /* Merge block B into block A. */
2051
2052 static void
2053 gimple_merge_blocks (basic_block a, basic_block b)
2054 {
2055 gimple_stmt_iterator last, gsi;
2056 gphi_iterator psi;
2057
2058 if (dump_file)
2059 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
2060
2061 /* Remove all single-valued PHI nodes from block B of the form
2062 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
2063 gsi = gsi_last_bb (a);
2064 for (psi = gsi_start_phis (b); !gsi_end_p (psi); )
2065 {
2066 gimple *phi = gsi_stmt (psi);
2067 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
2068 gimple *copy;
2069 bool may_replace_uses = (virtual_operand_p (def)
2070 || may_propagate_copy (def, use));
2071
2072 /* In case we maintain loop closed ssa form, do not propagate arguments
2073 of loop exit phi nodes. */
2074 if (current_loops
2075 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
2076 && !virtual_operand_p (def)
2077 && TREE_CODE (use) == SSA_NAME
2078 && a->loop_father != b->loop_father)
2079 may_replace_uses = false;
2080
2081 if (!may_replace_uses)
2082 {
2083 gcc_assert (!virtual_operand_p (def));
2084
2085 /* Note that just emitting the copies is fine -- there is no problem
2086 with ordering of phi nodes. This is because A is the single
2087 predecessor of B, therefore results of the phi nodes cannot
2088 appear as arguments of the phi nodes. */
2089 copy = gimple_build_assign (def, use);
2090 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
2091 remove_phi_node (&psi, false);
2092 }
2093 else
2094 {
2095 /* If we deal with a PHI for virtual operands, we can simply
2096 propagate these without fussing with folding or updating
2097 the stmt. */
2098 if (virtual_operand_p (def))
2099 {
2100 imm_use_iterator iter;
2101 use_operand_p use_p;
2102 gimple *stmt;
2103
2104 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
2105 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
2106 SET_USE (use_p, use);
2107
2108 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
2109 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
2110 }
2111 else
2112 replace_uses_by (def, use);
2113
2114 remove_phi_node (&psi, true);
2115 }
2116 }
2117
2118 /* Ensure that B follows A. */
2119 move_block_after (b, a);
2120
2121 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
2122 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
2123
2124 /* Remove labels from B and set gimple_bb to A for other statements. */
2125 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
2126 {
2127 gimple *stmt = gsi_stmt (gsi);
2128 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2129 {
2130 tree label = gimple_label_label (label_stmt);
2131 int lp_nr;
2132
2133 gsi_remove (&gsi, false);
2134
2135 /* Now that we can thread computed gotos, we might have
2136 a situation where we have a forced label in block B
2137 However, the label at the start of block B might still be
2138 used in other ways (think about the runtime checking for
2139 Fortran assigned gotos). So we cannot just delete the
2140 label. Instead we move the label to the start of block A. */
2141 if (FORCED_LABEL (label))
2142 {
2143 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
2144 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
2145 }
2146 /* Other user labels keep around in a form of a debug stmt. */
2147 else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_BIND_STMTS)
2148 {
2149 gimple *dbg = gimple_build_debug_bind (label,
2150 integer_zero_node,
2151 stmt);
2152 gimple_debug_bind_reset_value (dbg);
2153 gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
2154 }
2155
2156 lp_nr = EH_LANDING_PAD_NR (label);
2157 if (lp_nr)
2158 {
2159 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
2160 lp->post_landing_pad = NULL;
2161 }
2162 }
2163 else
2164 {
2165 gimple_set_bb (stmt, a);
2166 gsi_next (&gsi);
2167 }
2168 }
2169
2170 /* When merging two BBs, if their counts are different, the larger count
2171 is selected as the new bb count. This is to handle inconsistent
2172 profiles. */
2173 if (a->loop_father == b->loop_father)
2174 {
2175 a->count = a->count.merge (b->count);
2176 }
2177
2178 /* Merge the sequences. */
2179 last = gsi_last_bb (a);
2180 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
2181 set_bb_seq (b, NULL);
2182
2183 if (cfgcleanup_altered_bbs)
2184 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
2185 }
2186
2187
2188 /* Return the one of two successors of BB that is not reachable by a
2189 complex edge, if there is one. Else, return BB. We use
2190 this in optimizations that use post-dominators for their heuristics,
2191 to catch the cases in C++ where function calls are involved. */
2192
2193 basic_block
2194 single_noncomplex_succ (basic_block bb)
2195 {
2196 edge e0, e1;
2197 if (EDGE_COUNT (bb->succs) != 2)
2198 return bb;
2199
2200 e0 = EDGE_SUCC (bb, 0);
2201 e1 = EDGE_SUCC (bb, 1);
2202 if (e0->flags & EDGE_COMPLEX)
2203 return e1->dest;
2204 if (e1->flags & EDGE_COMPLEX)
2205 return e0->dest;
2206
2207 return bb;
2208 }
2209
2210 /* T is CALL_EXPR. Set current_function_calls_* flags. */
2211
2212 void
2213 notice_special_calls (gcall *call)
2214 {
2215 int flags = gimple_call_flags (call);
2216
2217 if (flags & ECF_MAY_BE_ALLOCA)
2218 cfun->calls_alloca = true;
2219 if (flags & ECF_RETURNS_TWICE)
2220 cfun->calls_setjmp = true;
2221 }
2222
2223
2224 /* Clear flags set by notice_special_calls. Used by dead code removal
2225 to update the flags. */
2226
2227 void
2228 clear_special_calls (void)
2229 {
2230 cfun->calls_alloca = false;
2231 cfun->calls_setjmp = false;
2232 }
2233
2234 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
2235
2236 static void
2237 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
2238 {
2239 /* Since this block is no longer reachable, we can just delete all
2240 of its PHI nodes. */
2241 remove_phi_nodes (bb);
2242
2243 /* Remove edges to BB's successors. */
2244 while (EDGE_COUNT (bb->succs) > 0)
2245 remove_edge (EDGE_SUCC (bb, 0));
2246 }
2247
2248
2249 /* Remove statements of basic block BB. */
2250
2251 static void
2252 remove_bb (basic_block bb)
2253 {
2254 gimple_stmt_iterator i;
2255
2256 if (dump_file)
2257 {
2258 fprintf (dump_file, "Removing basic block %d\n", bb->index);
2259 if (dump_flags & TDF_DETAILS)
2260 {
2261 dump_bb (dump_file, bb, 0, TDF_BLOCKS);
2262 fprintf (dump_file, "\n");
2263 }
2264 }
2265
2266 if (current_loops)
2267 {
2268 struct loop *loop = bb->loop_father;
2269
2270 /* If a loop gets removed, clean up the information associated
2271 with it. */
2272 if (loop->latch == bb
2273 || loop->header == bb)
2274 free_numbers_of_iterations_estimates (loop);
2275 }
2276
2277 /* Remove all the instructions in the block. */
2278 if (bb_seq (bb) != NULL)
2279 {
2280 /* Walk backwards so as to get a chance to substitute all
2281 released DEFs into debug stmts. See
2282 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
2283 details. */
2284 for (i = gsi_last_bb (bb); !gsi_end_p (i);)
2285 {
2286 gimple *stmt = gsi_stmt (i);
2287 glabel *label_stmt = dyn_cast <glabel *> (stmt);
2288 if (label_stmt
2289 && (FORCED_LABEL (gimple_label_label (label_stmt))
2290 || DECL_NONLOCAL (gimple_label_label (label_stmt))))
2291 {
2292 basic_block new_bb;
2293 gimple_stmt_iterator new_gsi;
2294
2295 /* A non-reachable non-local label may still be referenced.
2296 But it no longer needs to carry the extra semantics of
2297 non-locality. */
2298 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
2299 {
2300 DECL_NONLOCAL (gimple_label_label (label_stmt)) = 0;
2301 FORCED_LABEL (gimple_label_label (label_stmt)) = 1;
2302 }
2303
2304 new_bb = bb->prev_bb;
2305 /* Don't move any labels into ENTRY block. */
2306 if (new_bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
2307 {
2308 new_bb = single_succ (new_bb);
2309 gcc_assert (new_bb != bb);
2310 }
2311 new_gsi = gsi_start_bb (new_bb);
2312 gsi_remove (&i, false);
2313 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
2314 }
2315 else
2316 {
2317 /* Release SSA definitions. */
2318 release_defs (stmt);
2319 gsi_remove (&i, true);
2320 }
2321
2322 if (gsi_end_p (i))
2323 i = gsi_last_bb (bb);
2324 else
2325 gsi_prev (&i);
2326 }
2327 }
2328
2329 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2330 bb->il.gimple.seq = NULL;
2331 bb->il.gimple.phi_nodes = NULL;
2332 }
2333
2334
2335 /* Given a basic block BB and a value VAL for use in the final statement
2336 of the block (if a GIMPLE_COND, GIMPLE_SWITCH, or computed goto), return
2337 the edge that will be taken out of the block.
2338 If VAL is NULL_TREE, then the current value of the final statement's
2339 predicate or index is used.
2340 If the value does not match a unique edge, NULL is returned. */
2341
2342 edge
2343 find_taken_edge (basic_block bb, tree val)
2344 {
2345 gimple *stmt;
2346
2347 stmt = last_stmt (bb);
2348
2349 /* Handle ENTRY and EXIT. */
2350 if (!stmt)
2351 return NULL;
2352
2353 if (gimple_code (stmt) == GIMPLE_COND)
2354 return find_taken_edge_cond_expr (as_a <gcond *> (stmt), val);
2355
2356 if (gimple_code (stmt) == GIMPLE_SWITCH)
2357 return find_taken_edge_switch_expr (as_a <gswitch *> (stmt), val);
2358
2359 if (computed_goto_p (stmt))
2360 {
2361 /* Only optimize if the argument is a label, if the argument is
2362 not a label then we cannot construct a proper CFG.
2363
2364 It may be the case that we only need to allow the LABEL_REF to
2365 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2366 appear inside a LABEL_EXPR just to be safe. */
2367 if (val
2368 && (TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
2369 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
2370 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
2371 }
2372
2373 /* Otherwise we only know the taken successor edge if it's unique. */
2374 return single_succ_p (bb) ? single_succ_edge (bb) : NULL;
2375 }
2376
2377 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2378 statement, determine which of the outgoing edges will be taken out of the
2379 block. Return NULL if either edge may be taken. */
2380
2381 static edge
2382 find_taken_edge_computed_goto (basic_block bb, tree val)
2383 {
2384 basic_block dest;
2385 edge e = NULL;
2386
2387 dest = label_to_block (cfun, val);
2388 if (dest)
2389 e = find_edge (bb, dest);
2390
2391 /* It's possible for find_edge to return NULL here on invalid code
2392 that abuses the labels-as-values extension (e.g. code that attempts to
2393 jump *between* functions via stored labels-as-values; PR 84136).
2394 If so, then we simply return that NULL for the edge.
2395 We don't currently have a way of detecting such invalid code, so we
2396 can't assert that it was the case when a NULL edge occurs here. */
2397
2398 return e;
2399 }
2400
2401 /* Given COND_STMT and a constant value VAL for use as the predicate,
2402 determine which of the two edges will be taken out of
2403 the statement's block. Return NULL if either edge may be taken.
2404 If VAL is NULL_TREE, then the current value of COND_STMT's predicate
2405 is used. */
2406
2407 static edge
2408 find_taken_edge_cond_expr (const gcond *cond_stmt, tree val)
2409 {
2410 edge true_edge, false_edge;
2411
2412 if (val == NULL_TREE)
2413 {
2414 /* Use the current value of the predicate. */
2415 if (gimple_cond_true_p (cond_stmt))
2416 val = integer_one_node;
2417 else if (gimple_cond_false_p (cond_stmt))
2418 val = integer_zero_node;
2419 else
2420 return NULL;
2421 }
2422 else if (TREE_CODE (val) != INTEGER_CST)
2423 return NULL;
2424
2425 extract_true_false_edges_from_block (gimple_bb (cond_stmt),
2426 &true_edge, &false_edge);
2427
2428 return (integer_zerop (val) ? false_edge : true_edge);
2429 }
2430
2431 /* Given SWITCH_STMT and an INTEGER_CST VAL for use as the index, determine
2432 which edge will be taken out of the statement's block. Return NULL if any
2433 edge may be taken.
2434 If VAL is NULL_TREE, then the current value of SWITCH_STMT's index
2435 is used. */
2436
2437 edge
2438 find_taken_edge_switch_expr (const gswitch *switch_stmt, tree val)
2439 {
2440 basic_block dest_bb;
2441 edge e;
2442 tree taken_case;
2443
2444 if (gimple_switch_num_labels (switch_stmt) == 1)
2445 taken_case = gimple_switch_default_label (switch_stmt);
2446 else
2447 {
2448 if (val == NULL_TREE)
2449 val = gimple_switch_index (switch_stmt);
2450 if (TREE_CODE (val) != INTEGER_CST)
2451 return NULL;
2452 else
2453 taken_case = find_case_label_for_value (switch_stmt, val);
2454 }
2455 dest_bb = label_to_block (cfun, CASE_LABEL (taken_case));
2456
2457 e = find_edge (gimple_bb (switch_stmt), dest_bb);
2458 gcc_assert (e);
2459 return e;
2460 }
2461
2462
2463 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2464 We can make optimal use here of the fact that the case labels are
2465 sorted: We can do a binary search for a case matching VAL. */
2466
2467 tree
2468 find_case_label_for_value (const gswitch *switch_stmt, tree val)
2469 {
2470 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2471 tree default_case = gimple_switch_default_label (switch_stmt);
2472
2473 for (low = 0, high = n; high - low > 1; )
2474 {
2475 size_t i = (high + low) / 2;
2476 tree t = gimple_switch_label (switch_stmt, i);
2477 int cmp;
2478
2479 /* Cache the result of comparing CASE_LOW and val. */
2480 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2481
2482 if (cmp > 0)
2483 high = i;
2484 else
2485 low = i;
2486
2487 if (CASE_HIGH (t) == NULL)
2488 {
2489 /* A singe-valued case label. */
2490 if (cmp == 0)
2491 return t;
2492 }
2493 else
2494 {
2495 /* A case range. We can only handle integer ranges. */
2496 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2497 return t;
2498 }
2499 }
2500
2501 return default_case;
2502 }
2503
2504
2505 /* Dump a basic block on stderr. */
2506
2507 void
2508 gimple_debug_bb (basic_block bb)
2509 {
2510 dump_bb (stderr, bb, 0, TDF_VOPS|TDF_MEMSYMS|TDF_BLOCKS);
2511 }
2512
2513
2514 /* Dump basic block with index N on stderr. */
2515
2516 basic_block
2517 gimple_debug_bb_n (int n)
2518 {
2519 gimple_debug_bb (BASIC_BLOCK_FOR_FN (cfun, n));
2520 return BASIC_BLOCK_FOR_FN (cfun, n);
2521 }
2522
2523
2524 /* Dump the CFG on stderr.
2525
2526 FLAGS are the same used by the tree dumping functions
2527 (see TDF_* in dumpfile.h). */
2528
2529 void
2530 gimple_debug_cfg (dump_flags_t flags)
2531 {
2532 gimple_dump_cfg (stderr, flags);
2533 }
2534
2535
2536 /* Dump the program showing basic block boundaries on the given FILE.
2537
2538 FLAGS are the same used by the tree dumping functions (see TDF_* in
2539 tree.h). */
2540
2541 void
2542 gimple_dump_cfg (FILE *file, dump_flags_t flags)
2543 {
2544 if (flags & TDF_DETAILS)
2545 {
2546 dump_function_header (file, current_function_decl, flags);
2547 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2548 n_basic_blocks_for_fn (cfun), n_edges_for_fn (cfun),
2549 last_basic_block_for_fn (cfun));
2550
2551 brief_dump_cfg (file, flags);
2552 fprintf (file, "\n");
2553 }
2554
2555 if (flags & TDF_STATS)
2556 dump_cfg_stats (file);
2557
2558 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2559 }
2560
2561
2562 /* Dump CFG statistics on FILE. */
2563
2564 void
2565 dump_cfg_stats (FILE *file)
2566 {
2567 static long max_num_merged_labels = 0;
2568 unsigned long size, total = 0;
2569 long num_edges;
2570 basic_block bb;
2571 const char * const fmt_str = "%-30s%-13s%12s\n";
2572 const char * const fmt_str_1 = "%-30s%13d" PRsa (11) "\n";
2573 const char * const fmt_str_2 = "%-30s%13ld" PRsa (11) "\n";
2574 const char * const fmt_str_3 = "%-43s" PRsa (11) "\n";
2575 const char *funcname = current_function_name ();
2576
2577 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2578
2579 fprintf (file, "---------------------------------------------------------\n");
2580 fprintf (file, fmt_str, "", " Number of ", "Memory");
2581 fprintf (file, fmt_str, "", " instances ", "used ");
2582 fprintf (file, "---------------------------------------------------------\n");
2583
2584 size = n_basic_blocks_for_fn (cfun) * sizeof (struct basic_block_def);
2585 total += size;
2586 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks_for_fn (cfun),
2587 SIZE_AMOUNT (size));
2588
2589 num_edges = 0;
2590 FOR_EACH_BB_FN (bb, cfun)
2591 num_edges += EDGE_COUNT (bb->succs);
2592 size = num_edges * sizeof (struct edge_def);
2593 total += size;
2594 fprintf (file, fmt_str_2, "Edges", num_edges, SIZE_AMOUNT (size));
2595
2596 fprintf (file, "---------------------------------------------------------\n");
2597 fprintf (file, fmt_str_3, "Total memory used by CFG data",
2598 SIZE_AMOUNT (total));
2599 fprintf (file, "---------------------------------------------------------\n");
2600 fprintf (file, "\n");
2601
2602 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2603 max_num_merged_labels = cfg_stats.num_merged_labels;
2604
2605 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2606 cfg_stats.num_merged_labels, max_num_merged_labels);
2607
2608 fprintf (file, "\n");
2609 }
2610
2611
2612 /* Dump CFG statistics on stderr. Keep extern so that it's always
2613 linked in the final executable. */
2614
2615 DEBUG_FUNCTION void
2616 debug_cfg_stats (void)
2617 {
2618 dump_cfg_stats (stderr);
2619 }
2620
2621 /*---------------------------------------------------------------------------
2622 Miscellaneous helpers
2623 ---------------------------------------------------------------------------*/
2624
2625 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2626 flow. Transfers of control flow associated with EH are excluded. */
2627
2628 static bool
2629 call_can_make_abnormal_goto (gimple *t)
2630 {
2631 /* If the function has no non-local labels, then a call cannot make an
2632 abnormal transfer of control. */
2633 if (!cfun->has_nonlocal_label
2634 && !cfun->calls_setjmp)
2635 return false;
2636
2637 /* Likewise if the call has no side effects. */
2638 if (!gimple_has_side_effects (t))
2639 return false;
2640
2641 /* Likewise if the called function is leaf. */
2642 if (gimple_call_flags (t) & ECF_LEAF)
2643 return false;
2644
2645 return true;
2646 }
2647
2648
2649 /* Return true if T can make an abnormal transfer of control flow.
2650 Transfers of control flow associated with EH are excluded. */
2651
2652 bool
2653 stmt_can_make_abnormal_goto (gimple *t)
2654 {
2655 if (computed_goto_p (t))
2656 return true;
2657 if (is_gimple_call (t))
2658 return call_can_make_abnormal_goto (t);
2659 return false;
2660 }
2661
2662
2663 /* Return true if T represents a stmt that always transfers control. */
2664
2665 bool
2666 is_ctrl_stmt (gimple *t)
2667 {
2668 switch (gimple_code (t))
2669 {
2670 case GIMPLE_COND:
2671 case GIMPLE_SWITCH:
2672 case GIMPLE_GOTO:
2673 case GIMPLE_RETURN:
2674 case GIMPLE_RESX:
2675 return true;
2676 default:
2677 return false;
2678 }
2679 }
2680
2681
2682 /* Return true if T is a statement that may alter the flow of control
2683 (e.g., a call to a non-returning function). */
2684
2685 bool
2686 is_ctrl_altering_stmt (gimple *t)
2687 {
2688 gcc_assert (t);
2689
2690 switch (gimple_code (t))
2691 {
2692 case GIMPLE_CALL:
2693 /* Per stmt call flag indicates whether the call could alter
2694 controlflow. */
2695 if (gimple_call_ctrl_altering_p (t))
2696 return true;
2697 break;
2698
2699 case GIMPLE_EH_DISPATCH:
2700 /* EH_DISPATCH branches to the individual catch handlers at
2701 this level of a try or allowed-exceptions region. It can
2702 fallthru to the next statement as well. */
2703 return true;
2704
2705 case GIMPLE_ASM:
2706 if (gimple_asm_nlabels (as_a <gasm *> (t)) > 0)
2707 return true;
2708 break;
2709
2710 CASE_GIMPLE_OMP:
2711 /* OpenMP directives alter control flow. */
2712 return true;
2713
2714 case GIMPLE_TRANSACTION:
2715 /* A transaction start alters control flow. */
2716 return true;
2717
2718 default:
2719 break;
2720 }
2721
2722 /* If a statement can throw, it alters control flow. */
2723 return stmt_can_throw_internal (cfun, t);
2724 }
2725
2726
2727 /* Return true if T is a simple local goto. */
2728
2729 bool
2730 simple_goto_p (gimple *t)
2731 {
2732 return (gimple_code (t) == GIMPLE_GOTO
2733 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2734 }
2735
2736
2737 /* Return true if STMT should start a new basic block. PREV_STMT is
2738 the statement preceding STMT. It is used when STMT is a label or a
2739 case label. Labels should only start a new basic block if their
2740 previous statement wasn't a label. Otherwise, sequence of labels
2741 would generate unnecessary basic blocks that only contain a single
2742 label. */
2743
2744 static inline bool
2745 stmt_starts_bb_p (gimple *stmt, gimple *prev_stmt)
2746 {
2747 if (stmt == NULL)
2748 return false;
2749
2750 /* PREV_STMT is only set to a debug stmt if the debug stmt is before
2751 any nondebug stmts in the block. We don't want to start another
2752 block in this case: the debug stmt will already have started the
2753 one STMT would start if we weren't outputting debug stmts. */
2754 if (prev_stmt && is_gimple_debug (prev_stmt))
2755 return false;
2756
2757 /* Labels start a new basic block only if the preceding statement
2758 wasn't a label of the same type. This prevents the creation of
2759 consecutive blocks that have nothing but a single label. */
2760 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2761 {
2762 /* Nonlocal and computed GOTO targets always start a new block. */
2763 if (DECL_NONLOCAL (gimple_label_label (label_stmt))
2764 || FORCED_LABEL (gimple_label_label (label_stmt)))
2765 return true;
2766
2767 if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
2768 {
2769 if (DECL_NONLOCAL (gimple_label_label (
2770 as_a <glabel *> (prev_stmt))))
2771 return true;
2772
2773 cfg_stats.num_merged_labels++;
2774 return false;
2775 }
2776 else
2777 return true;
2778 }
2779 else if (gimple_code (stmt) == GIMPLE_CALL)
2780 {
2781 if (gimple_call_flags (stmt) & ECF_RETURNS_TWICE)
2782 /* setjmp acts similar to a nonlocal GOTO target and thus should
2783 start a new block. */
2784 return true;
2785 if (gimple_call_internal_p (stmt, IFN_PHI)
2786 && prev_stmt
2787 && gimple_code (prev_stmt) != GIMPLE_LABEL
2788 && (gimple_code (prev_stmt) != GIMPLE_CALL
2789 || ! gimple_call_internal_p (prev_stmt, IFN_PHI)))
2790 /* PHI nodes start a new block unless preceeded by a label
2791 or another PHI. */
2792 return true;
2793 }
2794
2795 return false;
2796 }
2797
2798
2799 /* Return true if T should end a basic block. */
2800
2801 bool
2802 stmt_ends_bb_p (gimple *t)
2803 {
2804 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2805 }
2806
2807 /* Remove block annotations and other data structures. */
2808
2809 void
2810 delete_tree_cfg_annotations (struct function *fn)
2811 {
2812 vec_free (label_to_block_map_for_fn (fn));
2813 }
2814
2815 /* Return the virtual phi in BB. */
2816
2817 gphi *
2818 get_virtual_phi (basic_block bb)
2819 {
2820 for (gphi_iterator gsi = gsi_start_phis (bb);
2821 !gsi_end_p (gsi);
2822 gsi_next (&gsi))
2823 {
2824 gphi *phi = gsi.phi ();
2825
2826 if (virtual_operand_p (PHI_RESULT (phi)))
2827 return phi;
2828 }
2829
2830 return NULL;
2831 }
2832
2833 /* Return the first statement in basic block BB. */
2834
2835 gimple *
2836 first_stmt (basic_block bb)
2837 {
2838 gimple_stmt_iterator i = gsi_start_bb (bb);
2839 gimple *stmt = NULL;
2840
2841 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2842 {
2843 gsi_next (&i);
2844 stmt = NULL;
2845 }
2846 return stmt;
2847 }
2848
2849 /* Return the first non-label statement in basic block BB. */
2850
2851 static gimple *
2852 first_non_label_stmt (basic_block bb)
2853 {
2854 gimple_stmt_iterator i = gsi_start_bb (bb);
2855 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2856 gsi_next (&i);
2857 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2858 }
2859
2860 /* Return the last statement in basic block BB. */
2861
2862 gimple *
2863 last_stmt (basic_block bb)
2864 {
2865 gimple_stmt_iterator i = gsi_last_bb (bb);
2866 gimple *stmt = NULL;
2867
2868 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2869 {
2870 gsi_prev (&i);
2871 stmt = NULL;
2872 }
2873 return stmt;
2874 }
2875
2876 /* Return the last statement of an otherwise empty block. Return NULL
2877 if the block is totally empty, or if it contains more than one
2878 statement. */
2879
2880 gimple *
2881 last_and_only_stmt (basic_block bb)
2882 {
2883 gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2884 gimple *last, *prev;
2885
2886 if (gsi_end_p (i))
2887 return NULL;
2888
2889 last = gsi_stmt (i);
2890 gsi_prev_nondebug (&i);
2891 if (gsi_end_p (i))
2892 return last;
2893
2894 /* Empty statements should no longer appear in the instruction stream.
2895 Everything that might have appeared before should be deleted by
2896 remove_useless_stmts, and the optimizers should just gsi_remove
2897 instead of smashing with build_empty_stmt.
2898
2899 Thus the only thing that should appear here in a block containing
2900 one executable statement is a label. */
2901 prev = gsi_stmt (i);
2902 if (gimple_code (prev) == GIMPLE_LABEL)
2903 return last;
2904 else
2905 return NULL;
2906 }
2907
2908 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2909
2910 static void
2911 reinstall_phi_args (edge new_edge, edge old_edge)
2912 {
2913 edge_var_map *vm;
2914 int i;
2915 gphi_iterator phis;
2916
2917 vec<edge_var_map> *v = redirect_edge_var_map_vector (old_edge);
2918 if (!v)
2919 return;
2920
2921 for (i = 0, phis = gsi_start_phis (new_edge->dest);
2922 v->iterate (i, &vm) && !gsi_end_p (phis);
2923 i++, gsi_next (&phis))
2924 {
2925 gphi *phi = phis.phi ();
2926 tree result = redirect_edge_var_map_result (vm);
2927 tree arg = redirect_edge_var_map_def (vm);
2928
2929 gcc_assert (result == gimple_phi_result (phi));
2930
2931 add_phi_arg (phi, arg, new_edge, redirect_edge_var_map_location (vm));
2932 }
2933
2934 redirect_edge_var_map_clear (old_edge);
2935 }
2936
2937 /* Returns the basic block after which the new basic block created
2938 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2939 near its "logical" location. This is of most help to humans looking
2940 at debugging dumps. */
2941
2942 basic_block
2943 split_edge_bb_loc (edge edge_in)
2944 {
2945 basic_block dest = edge_in->dest;
2946 basic_block dest_prev = dest->prev_bb;
2947
2948 if (dest_prev)
2949 {
2950 edge e = find_edge (dest_prev, dest);
2951 if (e && !(e->flags & EDGE_COMPLEX))
2952 return edge_in->src;
2953 }
2954 return dest_prev;
2955 }
2956
2957 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2958 Abort on abnormal edges. */
2959
2960 static basic_block
2961 gimple_split_edge (edge edge_in)
2962 {
2963 basic_block new_bb, after_bb, dest;
2964 edge new_edge, e;
2965
2966 /* Abnormal edges cannot be split. */
2967 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2968
2969 dest = edge_in->dest;
2970
2971 after_bb = split_edge_bb_loc (edge_in);
2972
2973 new_bb = create_empty_bb (after_bb);
2974 new_bb->count = edge_in->count ();
2975
2976 e = redirect_edge_and_branch (edge_in, new_bb);
2977 gcc_assert (e == edge_in);
2978
2979 new_edge = make_single_succ_edge (new_bb, dest, EDGE_FALLTHRU);
2980 reinstall_phi_args (new_edge, e);
2981
2982 return new_bb;
2983 }
2984
2985
2986 /* Verify properties of the address expression T whose base should be
2987 TREE_ADDRESSABLE if VERIFY_ADDRESSABLE is true. */
2988
2989 static bool
2990 verify_address (tree t, bool verify_addressable)
2991 {
2992 bool old_constant;
2993 bool old_side_effects;
2994 bool new_constant;
2995 bool new_side_effects;
2996
2997 old_constant = TREE_CONSTANT (t);
2998 old_side_effects = TREE_SIDE_EFFECTS (t);
2999
3000 recompute_tree_invariant_for_addr_expr (t);
3001 new_side_effects = TREE_SIDE_EFFECTS (t);
3002 new_constant = TREE_CONSTANT (t);
3003
3004 if (old_constant != new_constant)
3005 {
3006 error ("constant not recomputed when ADDR_EXPR changed");
3007 return true;
3008 }
3009 if (old_side_effects != new_side_effects)
3010 {
3011 error ("side effects not recomputed when ADDR_EXPR changed");
3012 return true;
3013 }
3014
3015 tree base = TREE_OPERAND (t, 0);
3016 while (handled_component_p (base))
3017 base = TREE_OPERAND (base, 0);
3018
3019 if (!(VAR_P (base)
3020 || TREE_CODE (base) == PARM_DECL
3021 || TREE_CODE (base) == RESULT_DECL))
3022 return false;
3023
3024 if (DECL_GIMPLE_REG_P (base))
3025 {
3026 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
3027 return true;
3028 }
3029
3030 if (verify_addressable && !TREE_ADDRESSABLE (base))
3031 {
3032 error ("address taken, but ADDRESSABLE bit not set");
3033 return true;
3034 }
3035
3036 return false;
3037 }
3038
3039
3040 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
3041 Returns true if there is an error, otherwise false. */
3042
3043 static bool
3044 verify_types_in_gimple_min_lval (tree expr)
3045 {
3046 tree op;
3047
3048 if (is_gimple_id (expr))
3049 return false;
3050
3051 if (TREE_CODE (expr) != TARGET_MEM_REF
3052 && TREE_CODE (expr) != MEM_REF)
3053 {
3054 error ("invalid expression for min lvalue");
3055 return true;
3056 }
3057
3058 /* TARGET_MEM_REFs are strange beasts. */
3059 if (TREE_CODE (expr) == TARGET_MEM_REF)
3060 return false;
3061
3062 op = TREE_OPERAND (expr, 0);
3063 if (!is_gimple_val (op))
3064 {
3065 error ("invalid operand in indirect reference");
3066 debug_generic_stmt (op);
3067 return true;
3068 }
3069 /* Memory references now generally can involve a value conversion. */
3070
3071 return false;
3072 }
3073
3074 /* Verify if EXPR is a valid GIMPLE reference expression. If
3075 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
3076 if there is an error, otherwise false. */
3077
3078 static bool
3079 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
3080 {
3081 if (TREE_CODE (expr) == REALPART_EXPR
3082 || TREE_CODE (expr) == IMAGPART_EXPR
3083 || TREE_CODE (expr) == BIT_FIELD_REF)
3084 {
3085 tree op = TREE_OPERAND (expr, 0);
3086 if (!is_gimple_reg_type (TREE_TYPE (expr)))
3087 {
3088 error ("non-scalar BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR");
3089 return true;
3090 }
3091
3092 if (TREE_CODE (expr) == BIT_FIELD_REF)
3093 {
3094 tree t1 = TREE_OPERAND (expr, 1);
3095 tree t2 = TREE_OPERAND (expr, 2);
3096 poly_uint64 size, bitpos;
3097 if (!poly_int_tree_p (t1, &size)
3098 || !poly_int_tree_p (t2, &bitpos)
3099 || !types_compatible_p (bitsizetype, TREE_TYPE (t1))
3100 || !types_compatible_p (bitsizetype, TREE_TYPE (t2)))
3101 {
3102 error ("invalid position or size operand to BIT_FIELD_REF");
3103 return true;
3104 }
3105 if (INTEGRAL_TYPE_P (TREE_TYPE (expr))
3106 && maybe_ne (TYPE_PRECISION (TREE_TYPE (expr)), size))
3107 {
3108 error ("integral result type precision does not match "
3109 "field size of BIT_FIELD_REF");
3110 return true;
3111 }
3112 else if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
3113 && TYPE_MODE (TREE_TYPE (expr)) != BLKmode
3114 && maybe_ne (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (expr))),
3115 size))
3116 {
3117 error ("mode size of non-integral result does not "
3118 "match field size of BIT_FIELD_REF");
3119 return true;
3120 }
3121 if (INTEGRAL_TYPE_P (TREE_TYPE (op))
3122 && !type_has_mode_precision_p (TREE_TYPE (op)))
3123 {
3124 error ("BIT_FIELD_REF of non-mode-precision operand");
3125 return true;
3126 }
3127 if (!AGGREGATE_TYPE_P (TREE_TYPE (op))
3128 && maybe_gt (size + bitpos,
3129 tree_to_poly_uint64 (TYPE_SIZE (TREE_TYPE (op)))))
3130 {
3131 error ("position plus size exceeds size of referenced object in "
3132 "BIT_FIELD_REF");
3133 return true;
3134 }
3135 }
3136
3137 if ((TREE_CODE (expr) == REALPART_EXPR
3138 || TREE_CODE (expr) == IMAGPART_EXPR)
3139 && !useless_type_conversion_p (TREE_TYPE (expr),
3140 TREE_TYPE (TREE_TYPE (op))))
3141 {
3142 error ("type mismatch in real/imagpart reference");
3143 debug_generic_stmt (TREE_TYPE (expr));
3144 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3145 return true;
3146 }
3147 expr = op;
3148 }
3149
3150 while (handled_component_p (expr))
3151 {
3152 if (TREE_CODE (expr) == REALPART_EXPR
3153 || TREE_CODE (expr) == IMAGPART_EXPR
3154 || TREE_CODE (expr) == BIT_FIELD_REF)
3155 {
3156 error ("non-top-level BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR");
3157 return true;
3158 }
3159
3160 tree op = TREE_OPERAND (expr, 0);
3161
3162 if (TREE_CODE (expr) == ARRAY_REF
3163 || TREE_CODE (expr) == ARRAY_RANGE_REF)
3164 {
3165 if (!is_gimple_val (TREE_OPERAND (expr, 1))
3166 || (TREE_OPERAND (expr, 2)
3167 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3168 || (TREE_OPERAND (expr, 3)
3169 && !is_gimple_val (TREE_OPERAND (expr, 3))))
3170 {
3171 error ("invalid operands to array reference");
3172 debug_generic_stmt (expr);
3173 return true;
3174 }
3175 }
3176
3177 /* Verify if the reference array element types are compatible. */
3178 if (TREE_CODE (expr) == ARRAY_REF
3179 && !useless_type_conversion_p (TREE_TYPE (expr),
3180 TREE_TYPE (TREE_TYPE (op))))
3181 {
3182 error ("type mismatch in array reference");
3183 debug_generic_stmt (TREE_TYPE (expr));
3184 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3185 return true;
3186 }
3187 if (TREE_CODE (expr) == ARRAY_RANGE_REF
3188 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
3189 TREE_TYPE (TREE_TYPE (op))))
3190 {
3191 error ("type mismatch in array range reference");
3192 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
3193 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3194 return true;
3195 }
3196
3197 if (TREE_CODE (expr) == COMPONENT_REF)
3198 {
3199 if (TREE_OPERAND (expr, 2)
3200 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3201 {
3202 error ("invalid COMPONENT_REF offset operator");
3203 return true;
3204 }
3205 if (!useless_type_conversion_p (TREE_TYPE (expr),
3206 TREE_TYPE (TREE_OPERAND (expr, 1))))
3207 {
3208 error ("type mismatch in component reference");
3209 debug_generic_stmt (TREE_TYPE (expr));
3210 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
3211 return true;
3212 }
3213 }
3214
3215 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3216 {
3217 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
3218 that their operand is not an SSA name or an invariant when
3219 requiring an lvalue (this usually means there is a SRA or IPA-SRA
3220 bug). Otherwise there is nothing to verify, gross mismatches at
3221 most invoke undefined behavior. */
3222 if (require_lvalue
3223 && (TREE_CODE (op) == SSA_NAME
3224 || is_gimple_min_invariant (op)))
3225 {
3226 error ("conversion of an SSA_NAME on the left hand side");
3227 debug_generic_stmt (expr);
3228 return true;
3229 }
3230 else if (TREE_CODE (op) == SSA_NAME
3231 && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
3232 {
3233 error ("conversion of register to a different size");
3234 debug_generic_stmt (expr);
3235 return true;
3236 }
3237 else if (!handled_component_p (op))
3238 return false;
3239 }
3240
3241 expr = op;
3242 }
3243
3244 if (TREE_CODE (expr) == MEM_REF)
3245 {
3246 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0))
3247 || (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR
3248 && verify_address (TREE_OPERAND (expr, 0), false)))
3249 {
3250 error ("invalid address operand in MEM_REF");
3251 debug_generic_stmt (expr);
3252 return true;
3253 }
3254 if (!poly_int_tree_p (TREE_OPERAND (expr, 1))
3255 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
3256 {
3257 error ("invalid offset operand in MEM_REF");
3258 debug_generic_stmt (expr);
3259 return true;
3260 }
3261 }
3262 else if (TREE_CODE (expr) == TARGET_MEM_REF)
3263 {
3264 if (!TMR_BASE (expr)
3265 || !is_gimple_mem_ref_addr (TMR_BASE (expr))
3266 || (TREE_CODE (TMR_BASE (expr)) == ADDR_EXPR
3267 && verify_address (TMR_BASE (expr), false)))
3268 {
3269 error ("invalid address operand in TARGET_MEM_REF");
3270 return true;
3271 }
3272 if (!TMR_OFFSET (expr)
3273 || !poly_int_tree_p (TMR_OFFSET (expr))
3274 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
3275 {
3276 error ("invalid offset operand in TARGET_MEM_REF");
3277 debug_generic_stmt (expr);
3278 return true;
3279 }
3280 }
3281 else if (TREE_CODE (expr) == INDIRECT_REF)
3282 {
3283 error ("INDIRECT_REF in gimple IL");
3284 debug_generic_stmt (expr);
3285 return true;
3286 }
3287
3288 return ((require_lvalue || !is_gimple_min_invariant (expr))
3289 && verify_types_in_gimple_min_lval (expr));
3290 }
3291
3292 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3293 list of pointer-to types that is trivially convertible to DEST. */
3294
3295 static bool
3296 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3297 {
3298 tree src;
3299
3300 if (!TYPE_POINTER_TO (src_obj))
3301 return true;
3302
3303 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3304 if (useless_type_conversion_p (dest, src))
3305 return true;
3306
3307 return false;
3308 }
3309
3310 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3311 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3312
3313 static bool
3314 valid_fixed_convert_types_p (tree type1, tree type2)
3315 {
3316 return (FIXED_POINT_TYPE_P (type1)
3317 && (INTEGRAL_TYPE_P (type2)
3318 || SCALAR_FLOAT_TYPE_P (type2)
3319 || FIXED_POINT_TYPE_P (type2)));
3320 }
3321
3322 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3323 is a problem, otherwise false. */
3324
3325 static bool
3326 verify_gimple_call (gcall *stmt)
3327 {
3328 tree fn = gimple_call_fn (stmt);
3329 tree fntype, fndecl;
3330 unsigned i;
3331
3332 if (gimple_call_internal_p (stmt))
3333 {
3334 if (fn)
3335 {
3336 error ("gimple call has two targets");
3337 debug_generic_stmt (fn);
3338 return true;
3339 }
3340 /* FIXME : for passing label as arg in internal fn PHI from GIMPLE FE*/
3341 else if (gimple_call_internal_fn (stmt) == IFN_PHI)
3342 {
3343 return false;
3344 }
3345 }
3346 else
3347 {
3348 if (!fn)
3349 {
3350 error ("gimple call has no target");
3351 return true;
3352 }
3353 }
3354
3355 if (fn && !is_gimple_call_addr (fn))
3356 {
3357 error ("invalid function in gimple call");
3358 debug_generic_stmt (fn);
3359 return true;
3360 }
3361
3362 if (fn
3363 && (!POINTER_TYPE_P (TREE_TYPE (fn))
3364 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3365 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
3366 {
3367 error ("non-function in gimple call");
3368 return true;
3369 }
3370
3371 fndecl = gimple_call_fndecl (stmt);
3372 if (fndecl
3373 && TREE_CODE (fndecl) == FUNCTION_DECL
3374 && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
3375 && !DECL_PURE_P (fndecl)
3376 && !TREE_READONLY (fndecl))
3377 {
3378 error ("invalid pure const state for function");
3379 return true;
3380 }
3381
3382 tree lhs = gimple_call_lhs (stmt);
3383 if (lhs
3384 && (!is_gimple_lvalue (lhs)
3385 || verify_types_in_gimple_reference (lhs, true)))
3386 {
3387 error ("invalid LHS in gimple call");
3388 return true;
3389 }
3390
3391 if (gimple_call_ctrl_altering_p (stmt)
3392 && gimple_call_noreturn_p (stmt)
3393 && should_remove_lhs_p (lhs))
3394 {
3395 error ("LHS in noreturn call");
3396 return true;
3397 }
3398
3399 fntype = gimple_call_fntype (stmt);
3400 if (fntype
3401 && lhs
3402 && !useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (fntype))
3403 /* ??? At least C++ misses conversions at assignments from
3404 void * call results.
3405 For now simply allow arbitrary pointer type conversions. */
3406 && !(POINTER_TYPE_P (TREE_TYPE (lhs))
3407 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3408 {
3409 error ("invalid conversion in gimple call");
3410 debug_generic_stmt (TREE_TYPE (lhs));
3411 debug_generic_stmt (TREE_TYPE (fntype));
3412 return true;
3413 }
3414
3415 if (gimple_call_chain (stmt)
3416 && !is_gimple_val (gimple_call_chain (stmt)))
3417 {
3418 error ("invalid static chain in gimple call");
3419 debug_generic_stmt (gimple_call_chain (stmt));
3420 return true;
3421 }
3422
3423 /* If there is a static chain argument, the call should either be
3424 indirect, or the decl should have DECL_STATIC_CHAIN set. */
3425 if (gimple_call_chain (stmt)
3426 && fndecl
3427 && !DECL_STATIC_CHAIN (fndecl))
3428 {
3429 error ("static chain with function that doesn%'t use one");
3430 return true;
3431 }
3432
3433 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
3434 {
3435 switch (DECL_FUNCTION_CODE (fndecl))
3436 {
3437 case BUILT_IN_UNREACHABLE:
3438 case BUILT_IN_TRAP:
3439 if (gimple_call_num_args (stmt) > 0)
3440 {
3441 /* Built-in unreachable with parameters might not be caught by
3442 undefined behavior sanitizer. Front-ends do check users do not
3443 call them that way but we also produce calls to
3444 __builtin_unreachable internally, for example when IPA figures
3445 out a call cannot happen in a legal program. In such cases,
3446 we must make sure arguments are stripped off. */
3447 error ("__builtin_unreachable or __builtin_trap call with "
3448 "arguments");
3449 return true;
3450 }
3451 break;
3452 default:
3453 break;
3454 }
3455 }
3456
3457 /* ??? The C frontend passes unpromoted arguments in case it
3458 didn't see a function declaration before the call. So for now
3459 leave the call arguments mostly unverified. Once we gimplify
3460 unit-at-a-time we have a chance to fix this. */
3461
3462 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3463 {
3464 tree arg = gimple_call_arg (stmt, i);
3465 if ((is_gimple_reg_type (TREE_TYPE (arg))
3466 && !is_gimple_val (arg))
3467 || (!is_gimple_reg_type (TREE_TYPE (arg))
3468 && !is_gimple_lvalue (arg)))
3469 {
3470 error ("invalid argument to gimple call");
3471 debug_generic_expr (arg);
3472 return true;
3473 }
3474 }
3475
3476 return false;
3477 }
3478
3479 /* Verifies the gimple comparison with the result type TYPE and
3480 the operands OP0 and OP1, comparison code is CODE. */
3481
3482 static bool
3483 verify_gimple_comparison (tree type, tree op0, tree op1, enum tree_code code)
3484 {
3485 tree op0_type = TREE_TYPE (op0);
3486 tree op1_type = TREE_TYPE (op1);
3487
3488 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3489 {
3490 error ("invalid operands in gimple comparison");
3491 return true;
3492 }
3493
3494 /* For comparisons we do not have the operations type as the
3495 effective type the comparison is carried out in. Instead
3496 we require that either the first operand is trivially
3497 convertible into the second, or the other way around.
3498 Because we special-case pointers to void we allow
3499 comparisons of pointers with the same mode as well. */
3500 if (!useless_type_conversion_p (op0_type, op1_type)
3501 && !useless_type_conversion_p (op1_type, op0_type)
3502 && (!POINTER_TYPE_P (op0_type)
3503 || !POINTER_TYPE_P (op1_type)
3504 || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3505 {
3506 error ("mismatching comparison operand types");
3507 debug_generic_expr (op0_type);
3508 debug_generic_expr (op1_type);
3509 return true;
3510 }
3511
3512 /* The resulting type of a comparison may be an effective boolean type. */
3513 if (INTEGRAL_TYPE_P (type)
3514 && (TREE_CODE (type) == BOOLEAN_TYPE
3515 || TYPE_PRECISION (type) == 1))
3516 {
3517 if ((TREE_CODE (op0_type) == VECTOR_TYPE
3518 || TREE_CODE (op1_type) == VECTOR_TYPE)
3519 && code != EQ_EXPR && code != NE_EXPR
3520 && !VECTOR_BOOLEAN_TYPE_P (op0_type)
3521 && !VECTOR_INTEGER_TYPE_P (op0_type))
3522 {
3523 error ("unsupported operation or type for vector comparison"
3524 " returning a boolean");
3525 debug_generic_expr (op0_type);
3526 debug_generic_expr (op1_type);
3527 return true;
3528 }
3529 }
3530 /* Or a boolean vector type with the same element count
3531 as the comparison operand types. */
3532 else if (TREE_CODE (type) == VECTOR_TYPE
3533 && TREE_CODE (TREE_TYPE (type)) == BOOLEAN_TYPE)
3534 {
3535 if (TREE_CODE (op0_type) != VECTOR_TYPE
3536 || TREE_CODE (op1_type) != VECTOR_TYPE)
3537 {
3538 error ("non-vector operands in vector comparison");
3539 debug_generic_expr (op0_type);
3540 debug_generic_expr (op1_type);
3541 return true;
3542 }
3543
3544 if (maybe_ne (TYPE_VECTOR_SUBPARTS (type),
3545 TYPE_VECTOR_SUBPARTS (op0_type)))
3546 {
3547 error ("invalid vector comparison resulting type");
3548 debug_generic_expr (type);
3549 return true;
3550 }
3551 }
3552 else
3553 {
3554 error ("bogus comparison result type");
3555 debug_generic_expr (type);
3556 return true;
3557 }
3558
3559 return false;
3560 }
3561
3562 /* Verify a gimple assignment statement STMT with an unary rhs.
3563 Returns true if anything is wrong. */
3564
3565 static bool
3566 verify_gimple_assign_unary (gassign *stmt)
3567 {
3568 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3569 tree lhs = gimple_assign_lhs (stmt);
3570 tree lhs_type = TREE_TYPE (lhs);
3571 tree rhs1 = gimple_assign_rhs1 (stmt);
3572 tree rhs1_type = TREE_TYPE (rhs1);
3573
3574 if (!is_gimple_reg (lhs))
3575 {
3576 error ("non-register as LHS of unary operation");
3577 return true;
3578 }
3579
3580 if (!is_gimple_val (rhs1))
3581 {
3582 error ("invalid operand in unary operation");
3583 return true;
3584 }
3585
3586 /* First handle conversions. */
3587 switch (rhs_code)
3588 {
3589 CASE_CONVERT:
3590 {
3591 /* Allow conversions from pointer type to integral type only if
3592 there is no sign or zero extension involved.
3593 For targets were the precision of ptrofftype doesn't match that
3594 of pointers we need to allow arbitrary conversions to ptrofftype. */
3595 if ((POINTER_TYPE_P (lhs_type)
3596 && INTEGRAL_TYPE_P (rhs1_type))
3597 || (POINTER_TYPE_P (rhs1_type)
3598 && INTEGRAL_TYPE_P (lhs_type)
3599 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3600 || ptrofftype_p (lhs_type))))
3601 return false;
3602
3603 /* Allow conversion from integral to offset type and vice versa. */
3604 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3605 && INTEGRAL_TYPE_P (rhs1_type))
3606 || (INTEGRAL_TYPE_P (lhs_type)
3607 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3608 return false;
3609
3610 /* Otherwise assert we are converting between types of the
3611 same kind. */
3612 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3613 {
3614 error ("invalid types in nop conversion");
3615 debug_generic_expr (lhs_type);
3616 debug_generic_expr (rhs1_type);
3617 return true;
3618 }
3619
3620 return false;
3621 }
3622
3623 case ADDR_SPACE_CONVERT_EXPR:
3624 {
3625 if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3626 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3627 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3628 {
3629 error ("invalid types in address space conversion");
3630 debug_generic_expr (lhs_type);
3631 debug_generic_expr (rhs1_type);
3632 return true;
3633 }
3634
3635 return false;
3636 }
3637
3638 case FIXED_CONVERT_EXPR:
3639 {
3640 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3641 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3642 {
3643 error ("invalid types in fixed-point conversion");
3644 debug_generic_expr (lhs_type);
3645 debug_generic_expr (rhs1_type);
3646 return true;
3647 }
3648
3649 return false;
3650 }
3651
3652 case FLOAT_EXPR:
3653 {
3654 if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3655 && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3656 || !VECTOR_FLOAT_TYPE_P (lhs_type)))
3657 {
3658 error ("invalid types in conversion to floating point");
3659 debug_generic_expr (lhs_type);
3660 debug_generic_expr (rhs1_type);
3661 return true;
3662 }
3663
3664 return false;
3665 }
3666
3667 case FIX_TRUNC_EXPR:
3668 {
3669 if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3670 && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3671 || !VECTOR_FLOAT_TYPE_P (rhs1_type)))
3672 {
3673 error ("invalid types in conversion to integer");
3674 debug_generic_expr (lhs_type);
3675 debug_generic_expr (rhs1_type);
3676 return true;
3677 }
3678
3679 return false;
3680 }
3681
3682 case VEC_UNPACK_HI_EXPR:
3683 case VEC_UNPACK_LO_EXPR:
3684 case VEC_UNPACK_FLOAT_HI_EXPR:
3685 case VEC_UNPACK_FLOAT_LO_EXPR:
3686 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
3687 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
3688 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3689 || TREE_CODE (lhs_type) != VECTOR_TYPE
3690 || (!INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3691 && !SCALAR_FLOAT_TYPE_P (TREE_TYPE (lhs_type)))
3692 || (!INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3693 && !SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type)))
3694 || ((rhs_code == VEC_UNPACK_HI_EXPR
3695 || rhs_code == VEC_UNPACK_LO_EXPR)
3696 && (INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3697 != INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3698 || ((rhs_code == VEC_UNPACK_FLOAT_HI_EXPR
3699 || rhs_code == VEC_UNPACK_FLOAT_LO_EXPR)
3700 && (INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3701 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type))))
3702 || ((rhs_code == VEC_UNPACK_FIX_TRUNC_HI_EXPR
3703 || rhs_code == VEC_UNPACK_FIX_TRUNC_LO_EXPR)
3704 && (INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3705 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (lhs_type))))
3706 || (maybe_ne (GET_MODE_SIZE (element_mode (lhs_type)),
3707 2 * GET_MODE_SIZE (element_mode (rhs1_type)))
3708 && (!VECTOR_BOOLEAN_TYPE_P (lhs_type)
3709 || !VECTOR_BOOLEAN_TYPE_P (rhs1_type)))
3710 || maybe_ne (2 * TYPE_VECTOR_SUBPARTS (lhs_type),
3711 TYPE_VECTOR_SUBPARTS (rhs1_type)))
3712 {
3713 error ("type mismatch in vector unpack expression");
3714 debug_generic_expr (lhs_type);
3715 debug_generic_expr (rhs1_type);
3716 return true;
3717 }
3718
3719 return false;
3720
3721 case NEGATE_EXPR:
3722 case ABS_EXPR:
3723 case BIT_NOT_EXPR:
3724 case PAREN_EXPR:
3725 case CONJ_EXPR:
3726 break;
3727
3728 case ABSU_EXPR:
3729 if (!ANY_INTEGRAL_TYPE_P (lhs_type)
3730 || !TYPE_UNSIGNED (lhs_type)
3731 || !ANY_INTEGRAL_TYPE_P (rhs1_type)
3732 || TYPE_UNSIGNED (rhs1_type)
3733 || element_precision (lhs_type) != element_precision (rhs1_type))
3734 {
3735 error ("invalid types for ABSU_EXPR");
3736 debug_generic_expr (lhs_type);
3737 debug_generic_expr (rhs1_type);
3738 return true;
3739 }
3740 return false;
3741
3742 case VEC_DUPLICATE_EXPR:
3743 if (TREE_CODE (lhs_type) != VECTOR_TYPE
3744 || !useless_type_conversion_p (TREE_TYPE (lhs_type), rhs1_type))
3745 {
3746 error ("vec_duplicate should be from a scalar to a like vector");
3747 debug_generic_expr (lhs_type);
3748 debug_generic_expr (rhs1_type);
3749 return true;
3750 }
3751 return false;
3752
3753 default:
3754 gcc_unreachable ();
3755 }
3756
3757 /* For the remaining codes assert there is no conversion involved. */
3758 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3759 {
3760 error ("non-trivial conversion in unary operation");
3761 debug_generic_expr (lhs_type);
3762 debug_generic_expr (rhs1_type);
3763 return true;
3764 }
3765
3766 return false;
3767 }
3768
3769 /* Verify a gimple assignment statement STMT with a binary rhs.
3770 Returns true if anything is wrong. */
3771
3772 static bool
3773 verify_gimple_assign_binary (gassign *stmt)
3774 {
3775 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3776 tree lhs = gimple_assign_lhs (stmt);
3777 tree lhs_type = TREE_TYPE (lhs);
3778 tree rhs1 = gimple_assign_rhs1 (stmt);
3779 tree rhs1_type = TREE_TYPE (rhs1);
3780 tree rhs2 = gimple_assign_rhs2 (stmt);
3781 tree rhs2_type = TREE_TYPE (rhs2);
3782
3783 if (!is_gimple_reg (lhs))
3784 {
3785 error ("non-register as LHS of binary operation");
3786 return true;
3787 }
3788
3789 if (!is_gimple_val (rhs1)
3790 || !is_gimple_val (rhs2))
3791 {
3792 error ("invalid operands in binary operation");
3793 return true;
3794 }
3795
3796 /* First handle operations that involve different types. */
3797 switch (rhs_code)
3798 {
3799 case COMPLEX_EXPR:
3800 {
3801 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3802 || !(INTEGRAL_TYPE_P (rhs1_type)
3803 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3804 || !(INTEGRAL_TYPE_P (rhs2_type)
3805 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3806 {
3807 error ("type mismatch in complex expression");
3808 debug_generic_expr (lhs_type);
3809 debug_generic_expr (rhs1_type);
3810 debug_generic_expr (rhs2_type);
3811 return true;
3812 }
3813
3814 return false;
3815 }
3816
3817 case LSHIFT_EXPR:
3818 case RSHIFT_EXPR:
3819 case LROTATE_EXPR:
3820 case RROTATE_EXPR:
3821 {
3822 /* Shifts and rotates are ok on integral types, fixed point
3823 types and integer vector types. */
3824 if ((!INTEGRAL_TYPE_P (rhs1_type)
3825 && !FIXED_POINT_TYPE_P (rhs1_type)
3826 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3827 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3828 || (!INTEGRAL_TYPE_P (rhs2_type)
3829 /* Vector shifts of vectors are also ok. */
3830 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3831 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3832 && TREE_CODE (rhs2_type) == VECTOR_TYPE
3833 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3834 || !useless_type_conversion_p (lhs_type, rhs1_type))
3835 {
3836 error ("type mismatch in shift expression");
3837 debug_generic_expr (lhs_type);
3838 debug_generic_expr (rhs1_type);
3839 debug_generic_expr (rhs2_type);
3840 return true;
3841 }
3842
3843 return false;
3844 }
3845
3846 case WIDEN_LSHIFT_EXPR:
3847 {
3848 if (!INTEGRAL_TYPE_P (lhs_type)
3849 || !INTEGRAL_TYPE_P (rhs1_type)
3850 || TREE_CODE (rhs2) != INTEGER_CST
3851 || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
3852 {
3853 error ("type mismatch in widening vector shift expression");
3854 debug_generic_expr (lhs_type);
3855 debug_generic_expr (rhs1_type);
3856 debug_generic_expr (rhs2_type);
3857 return true;
3858 }
3859
3860 return false;
3861 }
3862
3863 case VEC_WIDEN_LSHIFT_HI_EXPR:
3864 case VEC_WIDEN_LSHIFT_LO_EXPR:
3865 {
3866 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3867 || TREE_CODE (lhs_type) != VECTOR_TYPE
3868 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3869 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3870 || TREE_CODE (rhs2) != INTEGER_CST
3871 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
3872 > TYPE_PRECISION (TREE_TYPE (lhs_type))))
3873 {
3874 error ("type mismatch in widening vector shift expression");
3875 debug_generic_expr (lhs_type);
3876 debug_generic_expr (rhs1_type);
3877 debug_generic_expr (rhs2_type);
3878 return true;
3879 }
3880
3881 return false;
3882 }
3883
3884 case PLUS_EXPR:
3885 case MINUS_EXPR:
3886 {
3887 tree lhs_etype = lhs_type;
3888 tree rhs1_etype = rhs1_type;
3889 tree rhs2_etype = rhs2_type;
3890 if (TREE_CODE (lhs_type) == VECTOR_TYPE)
3891 {
3892 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3893 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
3894 {
3895 error ("invalid non-vector operands to vector valued plus");
3896 return true;
3897 }
3898 lhs_etype = TREE_TYPE (lhs_type);
3899 rhs1_etype = TREE_TYPE (rhs1_type);
3900 rhs2_etype = TREE_TYPE (rhs2_type);
3901 }
3902 if (POINTER_TYPE_P (lhs_etype)
3903 || POINTER_TYPE_P (rhs1_etype)
3904 || POINTER_TYPE_P (rhs2_etype))
3905 {
3906 error ("invalid (pointer) operands to plus/minus");
3907 return true;
3908 }
3909
3910 /* Continue with generic binary expression handling. */
3911 break;
3912 }
3913
3914 case POINTER_PLUS_EXPR:
3915 {
3916 if (!POINTER_TYPE_P (rhs1_type)
3917 || !useless_type_conversion_p (lhs_type, rhs1_type)
3918 || !ptrofftype_p (rhs2_type))
3919 {
3920 error ("type mismatch in pointer plus expression");
3921 debug_generic_stmt (lhs_type);
3922 debug_generic_stmt (rhs1_type);
3923 debug_generic_stmt (rhs2_type);
3924 return true;
3925 }
3926
3927 return false;
3928 }
3929
3930 case POINTER_DIFF_EXPR:
3931 {
3932 if (!POINTER_TYPE_P (rhs1_type)
3933 || !POINTER_TYPE_P (rhs2_type)
3934 /* Because we special-case pointers to void we allow difference
3935 of arbitrary pointers with the same mode. */
3936 || TYPE_MODE (rhs1_type) != TYPE_MODE (rhs2_type)
3937 || TREE_CODE (lhs_type) != INTEGER_TYPE
3938 || TYPE_UNSIGNED (lhs_type)
3939 || TYPE_PRECISION (lhs_type) != TYPE_PRECISION (rhs1_type))
3940 {
3941 error ("type mismatch in pointer diff expression");
3942 debug_generic_stmt (lhs_type);
3943 debug_generic_stmt (rhs1_type);
3944 debug_generic_stmt (rhs2_type);
3945 return true;
3946 }
3947
3948 return false;
3949 }
3950
3951 case TRUTH_ANDIF_EXPR:
3952 case TRUTH_ORIF_EXPR:
3953 case TRUTH_AND_EXPR:
3954 case TRUTH_OR_EXPR:
3955 case TRUTH_XOR_EXPR:
3956
3957 gcc_unreachable ();
3958
3959 case LT_EXPR:
3960 case LE_EXPR:
3961 case GT_EXPR:
3962 case GE_EXPR:
3963 case EQ_EXPR:
3964 case NE_EXPR:
3965 case UNORDERED_EXPR:
3966 case ORDERED_EXPR:
3967 case UNLT_EXPR:
3968 case UNLE_EXPR:
3969 case UNGT_EXPR:
3970 case UNGE_EXPR:
3971 case UNEQ_EXPR:
3972 case LTGT_EXPR:
3973 /* Comparisons are also binary, but the result type is not
3974 connected to the operand types. */
3975 return verify_gimple_comparison (lhs_type, rhs1, rhs2, rhs_code);
3976
3977 case WIDEN_MULT_EXPR:
3978 if (TREE_CODE (lhs_type) != INTEGER_TYPE)
3979 return true;
3980 return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
3981 || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
3982
3983 case WIDEN_SUM_EXPR:
3984 {
3985 if (((TREE_CODE (rhs1_type) != VECTOR_TYPE
3986 || TREE_CODE (lhs_type) != VECTOR_TYPE)
3987 && ((!INTEGRAL_TYPE_P (rhs1_type)
3988 && !SCALAR_FLOAT_TYPE_P (rhs1_type))
3989 || (!INTEGRAL_TYPE_P (lhs_type)
3990 && !SCALAR_FLOAT_TYPE_P (lhs_type))))
3991 || !useless_type_conversion_p (lhs_type, rhs2_type)
3992 || maybe_lt (GET_MODE_SIZE (element_mode (rhs2_type)),
3993 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
3994 {
3995 error ("type mismatch in widening sum reduction");
3996 debug_generic_expr (lhs_type);
3997 debug_generic_expr (rhs1_type);
3998 debug_generic_expr (rhs2_type);
3999 return true;
4000 }
4001 return false;
4002 }
4003
4004 case VEC_WIDEN_MULT_HI_EXPR:
4005 case VEC_WIDEN_MULT_LO_EXPR:
4006 case VEC_WIDEN_MULT_EVEN_EXPR:
4007 case VEC_WIDEN_MULT_ODD_EXPR:
4008 {
4009 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4010 || TREE_CODE (lhs_type) != VECTOR_TYPE
4011 || !types_compatible_p (rhs1_type, rhs2_type)
4012 || maybe_ne (GET_MODE_SIZE (element_mode (lhs_type)),
4013 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
4014 {
4015 error ("type mismatch in vector widening multiplication");
4016 debug_generic_expr (lhs_type);
4017 debug_generic_expr (rhs1_type);
4018 debug_generic_expr (rhs2_type);
4019 return true;
4020 }
4021 return false;
4022 }
4023
4024 case VEC_PACK_TRUNC_EXPR:
4025 /* ??? We currently use VEC_PACK_TRUNC_EXPR to simply concat
4026 vector boolean types. */
4027 if (VECTOR_BOOLEAN_TYPE_P (lhs_type)
4028 && VECTOR_BOOLEAN_TYPE_P (rhs1_type)
4029 && types_compatible_p (rhs1_type, rhs2_type)
4030 && known_eq (TYPE_VECTOR_SUBPARTS (lhs_type),
4031 2 * TYPE_VECTOR_SUBPARTS (rhs1_type)))
4032 return false;
4033
4034 /* Fallthru. */
4035 case VEC_PACK_SAT_EXPR:
4036 case VEC_PACK_FIX_TRUNC_EXPR:
4037 {
4038 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4039 || TREE_CODE (lhs_type) != VECTOR_TYPE
4040 || !((rhs_code == VEC_PACK_FIX_TRUNC_EXPR
4041 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type))
4042 && INTEGRAL_TYPE_P (TREE_TYPE (lhs_type)))
4043 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
4044 == INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))))
4045 || !types_compatible_p (rhs1_type, rhs2_type)
4046 || maybe_ne (GET_MODE_SIZE (element_mode (rhs1_type)),
4047 2 * GET_MODE_SIZE (element_mode (lhs_type)))
4048 || maybe_ne (2 * TYPE_VECTOR_SUBPARTS (rhs1_type),
4049 TYPE_VECTOR_SUBPARTS (lhs_type)))
4050 {
4051 error ("type mismatch in vector pack expression");
4052 debug_generic_expr (lhs_type);
4053 debug_generic_expr (rhs1_type);
4054 debug_generic_expr (rhs2_type);
4055 return true;
4056 }
4057
4058 return false;
4059 }
4060
4061 case VEC_PACK_FLOAT_EXPR:
4062 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4063 || TREE_CODE (lhs_type) != VECTOR_TYPE
4064 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
4065 || !SCALAR_FLOAT_TYPE_P (TREE_TYPE (lhs_type))
4066 || !types_compatible_p (rhs1_type, rhs2_type)
4067 || maybe_ne (GET_MODE_SIZE (element_mode (rhs1_type)),
4068 2 * GET_MODE_SIZE (element_mode (lhs_type)))
4069 || maybe_ne (2 * TYPE_VECTOR_SUBPARTS (rhs1_type),
4070 TYPE_VECTOR_SUBPARTS (lhs_type)))
4071 {
4072 error ("type mismatch in vector pack expression");
4073 debug_generic_expr (lhs_type);
4074 debug_generic_expr (rhs1_type);
4075 debug_generic_expr (rhs2_type);
4076 return true;
4077 }
4078
4079 return false;
4080
4081 case MULT_EXPR:
4082 case MULT_HIGHPART_EXPR:
4083 case TRUNC_DIV_EXPR:
4084 case CEIL_DIV_EXPR:
4085 case FLOOR_DIV_EXPR:
4086 case ROUND_DIV_EXPR:
4087 case TRUNC_MOD_EXPR:
4088 case CEIL_MOD_EXPR:
4089 case FLOOR_MOD_EXPR:
4090 case ROUND_MOD_EXPR:
4091 case RDIV_EXPR:
4092 case EXACT_DIV_EXPR:
4093 case MIN_EXPR:
4094 case MAX_EXPR:
4095 case BIT_IOR_EXPR:
4096 case BIT_XOR_EXPR:
4097 case BIT_AND_EXPR:
4098 /* Continue with generic binary expression handling. */
4099 break;
4100
4101 case VEC_SERIES_EXPR:
4102 if (!useless_type_conversion_p (rhs1_type, rhs2_type))
4103 {
4104 error ("type mismatch in series expression");
4105 debug_generic_expr (rhs1_type);
4106 debug_generic_expr (rhs2_type);
4107 return true;
4108 }
4109 if (TREE_CODE (lhs_type) != VECTOR_TYPE
4110 || !useless_type_conversion_p (TREE_TYPE (lhs_type), rhs1_type))
4111 {
4112 error ("vector type expected in series expression");
4113 debug_generic_expr (lhs_type);
4114 return true;
4115 }
4116 return false;
4117
4118 default:
4119 gcc_unreachable ();
4120 }
4121
4122 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4123 || !useless_type_conversion_p (lhs_type, rhs2_type))
4124 {
4125 error ("type mismatch in binary expression");
4126 debug_generic_stmt (lhs_type);
4127 debug_generic_stmt (rhs1_type);
4128 debug_generic_stmt (rhs2_type);
4129 return true;
4130 }
4131
4132 return false;
4133 }
4134
4135 /* Verify a gimple assignment statement STMT with a ternary rhs.
4136 Returns true if anything is wrong. */
4137
4138 static bool
4139 verify_gimple_assign_ternary (gassign *stmt)
4140 {
4141 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4142 tree lhs = gimple_assign_lhs (stmt);
4143 tree lhs_type = TREE_TYPE (lhs);
4144 tree rhs1 = gimple_assign_rhs1 (stmt);
4145 tree rhs1_type = TREE_TYPE (rhs1);
4146 tree rhs2 = gimple_assign_rhs2 (stmt);
4147 tree rhs2_type = TREE_TYPE (rhs2);
4148 tree rhs3 = gimple_assign_rhs3 (stmt);
4149 tree rhs3_type = TREE_TYPE (rhs3);
4150
4151 if (!is_gimple_reg (lhs))
4152 {
4153 error ("non-register as LHS of ternary operation");
4154 return true;
4155 }
4156
4157 if (((rhs_code == VEC_COND_EXPR || rhs_code == COND_EXPR)
4158 ? !is_gimple_condexpr (rhs1) : !is_gimple_val (rhs1))
4159 || !is_gimple_val (rhs2)
4160 || !is_gimple_val (rhs3))
4161 {
4162 error ("invalid operands in ternary operation");
4163 return true;
4164 }
4165
4166 /* First handle operations that involve different types. */
4167 switch (rhs_code)
4168 {
4169 case WIDEN_MULT_PLUS_EXPR:
4170 case WIDEN_MULT_MINUS_EXPR:
4171 if ((!INTEGRAL_TYPE_P (rhs1_type)
4172 && !FIXED_POINT_TYPE_P (rhs1_type))
4173 || !useless_type_conversion_p (rhs1_type, rhs2_type)
4174 || !useless_type_conversion_p (lhs_type, rhs3_type)
4175 || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
4176 || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
4177 {
4178 error ("type mismatch in widening multiply-accumulate expression");
4179 debug_generic_expr (lhs_type);
4180 debug_generic_expr (rhs1_type);
4181 debug_generic_expr (rhs2_type);
4182 debug_generic_expr (rhs3_type);
4183 return true;
4184 }
4185 break;
4186
4187 case VEC_COND_EXPR:
4188 if (!VECTOR_BOOLEAN_TYPE_P (rhs1_type)
4189 || maybe_ne (TYPE_VECTOR_SUBPARTS (rhs1_type),
4190 TYPE_VECTOR_SUBPARTS (lhs_type)))
4191 {
4192 error ("the first argument of a VEC_COND_EXPR must be of a "
4193 "boolean vector type of the same number of elements "
4194 "as the result");
4195 debug_generic_expr (lhs_type);
4196 debug_generic_expr (rhs1_type);
4197 return true;
4198 }
4199 /* Fallthrough. */
4200 case COND_EXPR:
4201 if (!is_gimple_val (rhs1)
4202 && verify_gimple_comparison (TREE_TYPE (rhs1),
4203 TREE_OPERAND (rhs1, 0),
4204 TREE_OPERAND (rhs1, 1),
4205 TREE_CODE (rhs1)))
4206 return true;
4207 if (!useless_type_conversion_p (lhs_type, rhs2_type)
4208 || !useless_type_conversion_p (lhs_type, rhs3_type))
4209 {
4210 error ("type mismatch in conditional expression");
4211 debug_generic_expr (lhs_type);
4212 debug_generic_expr (rhs2_type);
4213 debug_generic_expr (rhs3_type);
4214 return true;
4215 }
4216 break;
4217
4218 case VEC_PERM_EXPR:
4219 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4220 || !useless_type_conversion_p (lhs_type, rhs2_type))
4221 {
4222 error ("type mismatch in vector permute expression");
4223 debug_generic_expr (lhs_type);
4224 debug_generic_expr (rhs1_type);
4225 debug_generic_expr (rhs2_type);
4226 debug_generic_expr (rhs3_type);
4227 return true;
4228 }
4229
4230 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4231 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4232 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4233 {
4234 error ("vector types expected in vector permute expression");
4235 debug_generic_expr (lhs_type);
4236 debug_generic_expr (rhs1_type);
4237 debug_generic_expr (rhs2_type);
4238 debug_generic_expr (rhs3_type);
4239 return true;
4240 }
4241
4242 if (maybe_ne (TYPE_VECTOR_SUBPARTS (rhs1_type),
4243 TYPE_VECTOR_SUBPARTS (rhs2_type))
4244 || maybe_ne (TYPE_VECTOR_SUBPARTS (rhs2_type),
4245 TYPE_VECTOR_SUBPARTS (rhs3_type))
4246 || maybe_ne (TYPE_VECTOR_SUBPARTS (rhs3_type),
4247 TYPE_VECTOR_SUBPARTS (lhs_type)))
4248 {
4249 error ("vectors with different element number found "
4250 "in vector permute expression");
4251 debug_generic_expr (lhs_type);
4252 debug_generic_expr (rhs1_type);
4253 debug_generic_expr (rhs2_type);
4254 debug_generic_expr (rhs3_type);
4255 return true;
4256 }
4257
4258 if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
4259 || (TREE_CODE (rhs3) != VECTOR_CST
4260 && (GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE
4261 (TREE_TYPE (rhs3_type)))
4262 != GET_MODE_BITSIZE (SCALAR_TYPE_MODE
4263 (TREE_TYPE (rhs1_type))))))
4264 {
4265 error ("invalid mask type in vector permute expression");
4266 debug_generic_expr (lhs_type);
4267 debug_generic_expr (rhs1_type);
4268 debug_generic_expr (rhs2_type);
4269 debug_generic_expr (rhs3_type);
4270 return true;
4271 }
4272
4273 return false;
4274
4275 case SAD_EXPR:
4276 if (!useless_type_conversion_p (rhs1_type, rhs2_type)
4277 || !useless_type_conversion_p (lhs_type, rhs3_type)
4278 || 2 * GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type)))
4279 > GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (lhs_type))))
4280 {
4281 error ("type mismatch in sad expression");
4282 debug_generic_expr (lhs_type);
4283 debug_generic_expr (rhs1_type);
4284 debug_generic_expr (rhs2_type);
4285 debug_generic_expr (rhs3_type);
4286 return true;
4287 }
4288
4289 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4290 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4291 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4292 {
4293 error ("vector types expected in sad expression");
4294 debug_generic_expr (lhs_type);
4295 debug_generic_expr (rhs1_type);
4296 debug_generic_expr (rhs2_type);
4297 debug_generic_expr (rhs3_type);
4298 return true;
4299 }
4300
4301 return false;
4302
4303 case BIT_INSERT_EXPR:
4304 if (! useless_type_conversion_p (lhs_type, rhs1_type))
4305 {
4306 error ("type mismatch in BIT_INSERT_EXPR");
4307 debug_generic_expr (lhs_type);
4308 debug_generic_expr (rhs1_type);
4309 return true;
4310 }
4311 if (! ((INTEGRAL_TYPE_P (rhs1_type)
4312 && INTEGRAL_TYPE_P (rhs2_type))
4313 || (VECTOR_TYPE_P (rhs1_type)
4314 && types_compatible_p (TREE_TYPE (rhs1_type), rhs2_type))))
4315 {
4316 error ("not allowed type combination in BIT_INSERT_EXPR");
4317 debug_generic_expr (rhs1_type);
4318 debug_generic_expr (rhs2_type);
4319 return true;
4320 }
4321 if (! tree_fits_uhwi_p (rhs3)
4322 || ! types_compatible_p (bitsizetype, TREE_TYPE (rhs3))
4323 || ! tree_fits_uhwi_p (TYPE_SIZE (rhs2_type)))
4324 {
4325 error ("invalid position or size in BIT_INSERT_EXPR");
4326 return true;
4327 }
4328 if (INTEGRAL_TYPE_P (rhs1_type)
4329 && !type_has_mode_precision_p (rhs1_type))
4330 {
4331 error ("BIT_INSERT_EXPR into non-mode-precision operand");
4332 return true;
4333 }
4334 if (INTEGRAL_TYPE_P (rhs1_type))
4335 {
4336 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (rhs3);
4337 if (bitpos >= TYPE_PRECISION (rhs1_type)
4338 || (bitpos + TYPE_PRECISION (rhs2_type)
4339 > TYPE_PRECISION (rhs1_type)))
4340 {
4341 error ("insertion out of range in BIT_INSERT_EXPR");
4342 return true;
4343 }
4344 }
4345 else if (VECTOR_TYPE_P (rhs1_type))
4346 {
4347 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (rhs3);
4348 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (TYPE_SIZE (rhs2_type));
4349 if (bitpos % bitsize != 0)
4350 {
4351 error ("vector insertion not at element boundary");
4352 return true;
4353 }
4354 }
4355 return false;
4356
4357 case DOT_PROD_EXPR:
4358 {
4359 if (((TREE_CODE (rhs1_type) != VECTOR_TYPE
4360 || TREE_CODE (lhs_type) != VECTOR_TYPE)
4361 && ((!INTEGRAL_TYPE_P (rhs1_type)
4362 && !SCALAR_FLOAT_TYPE_P (rhs1_type))
4363 || (!INTEGRAL_TYPE_P (lhs_type)
4364 && !SCALAR_FLOAT_TYPE_P (lhs_type))))
4365 || !types_compatible_p (rhs1_type, rhs2_type)
4366 || !useless_type_conversion_p (lhs_type, rhs3_type)
4367 || maybe_lt (GET_MODE_SIZE (element_mode (rhs3_type)),
4368 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
4369 {
4370 error ("type mismatch in dot product reduction");
4371 debug_generic_expr (lhs_type);
4372 debug_generic_expr (rhs1_type);
4373 debug_generic_expr (rhs2_type);
4374 return true;
4375 }
4376 return false;
4377 }
4378
4379 case REALIGN_LOAD_EXPR:
4380 /* FIXME. */
4381 return false;
4382
4383 default:
4384 gcc_unreachable ();
4385 }
4386 return false;
4387 }
4388
4389 /* Verify a gimple assignment statement STMT with a single rhs.
4390 Returns true if anything is wrong. */
4391
4392 static bool
4393 verify_gimple_assign_single (gassign *stmt)
4394 {
4395 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4396 tree lhs = gimple_assign_lhs (stmt);
4397 tree lhs_type = TREE_TYPE (lhs);
4398 tree rhs1 = gimple_assign_rhs1 (stmt);
4399 tree rhs1_type = TREE_TYPE (rhs1);
4400 bool res = false;
4401
4402 if (!useless_type_conversion_p (lhs_type, rhs1_type))
4403 {
4404 error ("non-trivial conversion at assignment");
4405 debug_generic_expr (lhs_type);
4406 debug_generic_expr (rhs1_type);
4407 return true;
4408 }
4409
4410 if (gimple_clobber_p (stmt)
4411 && !(DECL_P (lhs) || TREE_CODE (lhs) == MEM_REF))
4412 {
4413 error ("non-decl/MEM_REF LHS in clobber statement");
4414 debug_generic_expr (lhs);
4415 return true;
4416 }
4417
4418 if (handled_component_p (lhs)
4419 || TREE_CODE (lhs) == MEM_REF
4420 || TREE_CODE (lhs) == TARGET_MEM_REF)
4421 res |= verify_types_in_gimple_reference (lhs, true);
4422
4423 /* Special codes we cannot handle via their class. */
4424 switch (rhs_code)
4425 {
4426 case ADDR_EXPR:
4427 {
4428 tree op = TREE_OPERAND (rhs1, 0);
4429 if (!is_gimple_addressable (op))
4430 {
4431 error ("invalid operand in unary expression");
4432 return true;
4433 }
4434
4435 /* Technically there is no longer a need for matching types, but
4436 gimple hygiene asks for this check. In LTO we can end up
4437 combining incompatible units and thus end up with addresses
4438 of globals that change their type to a common one. */
4439 if (!in_lto_p
4440 && !types_compatible_p (TREE_TYPE (op),
4441 TREE_TYPE (TREE_TYPE (rhs1)))
4442 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
4443 TREE_TYPE (op)))
4444 {
4445 error ("type mismatch in address expression");
4446 debug_generic_stmt (TREE_TYPE (rhs1));
4447 debug_generic_stmt (TREE_TYPE (op));
4448 return true;
4449 }
4450
4451 return (verify_address (rhs1, true)
4452 || verify_types_in_gimple_reference (op, true));
4453 }
4454
4455 /* tcc_reference */
4456 case INDIRECT_REF:
4457 error ("INDIRECT_REF in gimple IL");
4458 return true;
4459
4460 case COMPONENT_REF:
4461 case BIT_FIELD_REF:
4462 case ARRAY_REF:
4463 case ARRAY_RANGE_REF:
4464 case VIEW_CONVERT_EXPR:
4465 case REALPART_EXPR:
4466 case IMAGPART_EXPR:
4467 case TARGET_MEM_REF:
4468 case MEM_REF:
4469 if (!is_gimple_reg (lhs)
4470 && is_gimple_reg_type (TREE_TYPE (lhs)))
4471 {
4472 error ("invalid rhs for gimple memory store");
4473 debug_generic_stmt (lhs);
4474 debug_generic_stmt (rhs1);
4475 return true;
4476 }
4477 return res || verify_types_in_gimple_reference (rhs1, false);
4478
4479 /* tcc_constant */
4480 case SSA_NAME:
4481 case INTEGER_CST:
4482 case REAL_CST:
4483 case FIXED_CST:
4484 case COMPLEX_CST:
4485 case VECTOR_CST:
4486 case STRING_CST:
4487 return res;
4488
4489 /* tcc_declaration */
4490 case CONST_DECL:
4491 return res;
4492 case VAR_DECL:
4493 case PARM_DECL:
4494 if (!is_gimple_reg (lhs)
4495 && !is_gimple_reg (rhs1)
4496 && is_gimple_reg_type (TREE_TYPE (lhs)))
4497 {
4498 error ("invalid rhs for gimple memory store");
4499 debug_generic_stmt (lhs);
4500 debug_generic_stmt (rhs1);
4501 return true;
4502 }
4503 return res;
4504
4505 case CONSTRUCTOR:
4506 if (TREE_CODE (rhs1_type) == VECTOR_TYPE)
4507 {
4508 unsigned int i;
4509 tree elt_i, elt_v, elt_t = NULL_TREE;
4510
4511 if (CONSTRUCTOR_NELTS (rhs1) == 0)
4512 return res;
4513 /* For vector CONSTRUCTORs we require that either it is empty
4514 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
4515 (then the element count must be correct to cover the whole
4516 outer vector and index must be NULL on all elements, or it is
4517 a CONSTRUCTOR of scalar elements, where we as an exception allow
4518 smaller number of elements (assuming zero filling) and
4519 consecutive indexes as compared to NULL indexes (such
4520 CONSTRUCTORs can appear in the IL from FEs). */
4521 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), i, elt_i, elt_v)
4522 {
4523 if (elt_t == NULL_TREE)
4524 {
4525 elt_t = TREE_TYPE (elt_v);
4526 if (TREE_CODE (elt_t) == VECTOR_TYPE)
4527 {
4528 tree elt_t = TREE_TYPE (elt_v);
4529 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4530 TREE_TYPE (elt_t)))
4531 {
4532 error ("incorrect type of vector CONSTRUCTOR"
4533 " elements");
4534 debug_generic_stmt (rhs1);
4535 return true;
4536 }
4537 else if (maybe_ne (CONSTRUCTOR_NELTS (rhs1)
4538 * TYPE_VECTOR_SUBPARTS (elt_t),
4539 TYPE_VECTOR_SUBPARTS (rhs1_type)))
4540 {
4541 error ("incorrect number of vector CONSTRUCTOR"
4542 " elements");
4543 debug_generic_stmt (rhs1);
4544 return true;
4545 }
4546 }
4547 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4548 elt_t))
4549 {
4550 error ("incorrect type of vector CONSTRUCTOR elements");
4551 debug_generic_stmt (rhs1);
4552 return true;
4553 }
4554 else if (maybe_gt (CONSTRUCTOR_NELTS (rhs1),
4555 TYPE_VECTOR_SUBPARTS (rhs1_type)))
4556 {
4557 error ("incorrect number of vector CONSTRUCTOR elements");
4558 debug_generic_stmt (rhs1);
4559 return true;
4560 }
4561 }
4562 else if (!useless_type_conversion_p (elt_t, TREE_TYPE (elt_v)))
4563 {
4564 error ("incorrect type of vector CONSTRUCTOR elements");
4565 debug_generic_stmt (rhs1);
4566 return true;
4567 }
4568 if (elt_i != NULL_TREE
4569 && (TREE_CODE (elt_t) == VECTOR_TYPE
4570 || TREE_CODE (elt_i) != INTEGER_CST
4571 || compare_tree_int (elt_i, i) != 0))
4572 {
4573 error ("vector CONSTRUCTOR with non-NULL element index");
4574 debug_generic_stmt (rhs1);
4575 return true;
4576 }
4577 if (!is_gimple_val (elt_v))
4578 {
4579 error ("vector CONSTRUCTOR element is not a GIMPLE value");
4580 debug_generic_stmt (rhs1);
4581 return true;
4582 }
4583 }
4584 }
4585 else if (CONSTRUCTOR_NELTS (rhs1) != 0)
4586 {
4587 error ("non-vector CONSTRUCTOR with elements");
4588 debug_generic_stmt (rhs1);
4589 return true;
4590 }
4591 return res;
4592
4593 case ASSERT_EXPR:
4594 /* FIXME. */
4595 rhs1 = fold (ASSERT_EXPR_COND (rhs1));
4596 if (rhs1 == boolean_false_node)
4597 {
4598 error ("ASSERT_EXPR with an always-false condition");
4599 debug_generic_stmt (rhs1);
4600 return true;
4601 }
4602 break;
4603
4604 case OBJ_TYPE_REF:
4605 case WITH_SIZE_EXPR:
4606 /* FIXME. */
4607 return res;
4608
4609 default:;
4610 }
4611
4612 return res;
4613 }
4614
4615 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4616 is a problem, otherwise false. */
4617
4618 static bool
4619 verify_gimple_assign (gassign *stmt)
4620 {
4621 switch (gimple_assign_rhs_class (stmt))
4622 {
4623 case GIMPLE_SINGLE_RHS:
4624 return verify_gimple_assign_single (stmt);
4625
4626 case GIMPLE_UNARY_RHS:
4627 return verify_gimple_assign_unary (stmt);
4628
4629 case GIMPLE_BINARY_RHS:
4630 return verify_gimple_assign_binary (stmt);
4631
4632 case GIMPLE_TERNARY_RHS:
4633 return verify_gimple_assign_ternary (stmt);
4634
4635 default:
4636 gcc_unreachable ();
4637 }
4638 }
4639
4640 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4641 is a problem, otherwise false. */
4642
4643 static bool
4644 verify_gimple_return (greturn *stmt)
4645 {
4646 tree op = gimple_return_retval (stmt);
4647 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
4648
4649 /* We cannot test for present return values as we do not fix up missing
4650 return values from the original source. */
4651 if (op == NULL)
4652 return false;
4653
4654 if (!is_gimple_val (op)
4655 && TREE_CODE (op) != RESULT_DECL)
4656 {
4657 error ("invalid operand in return statement");
4658 debug_generic_stmt (op);
4659 return true;
4660 }
4661
4662 if ((TREE_CODE (op) == RESULT_DECL
4663 && DECL_BY_REFERENCE (op))
4664 || (TREE_CODE (op) == SSA_NAME
4665 && SSA_NAME_VAR (op)
4666 && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
4667 && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
4668 op = TREE_TYPE (op);
4669
4670 if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
4671 {
4672 error ("invalid conversion in return statement");
4673 debug_generic_stmt (restype);
4674 debug_generic_stmt (TREE_TYPE (op));
4675 return true;
4676 }
4677
4678 return false;
4679 }
4680
4681
4682 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4683 is a problem, otherwise false. */
4684
4685 static bool
4686 verify_gimple_goto (ggoto *stmt)
4687 {
4688 tree dest = gimple_goto_dest (stmt);
4689
4690 /* ??? We have two canonical forms of direct goto destinations, a
4691 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4692 if (TREE_CODE (dest) != LABEL_DECL
4693 && (!is_gimple_val (dest)
4694 || !POINTER_TYPE_P (TREE_TYPE (dest))))
4695 {
4696 error ("goto destination is neither a label nor a pointer");
4697 return true;
4698 }
4699
4700 return false;
4701 }
4702
4703 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4704 is a problem, otherwise false. */
4705
4706 static bool
4707 verify_gimple_switch (gswitch *stmt)
4708 {
4709 unsigned int i, n;
4710 tree elt, prev_upper_bound = NULL_TREE;
4711 tree index_type, elt_type = NULL_TREE;
4712
4713 if (!is_gimple_val (gimple_switch_index (stmt)))
4714 {
4715 error ("invalid operand to switch statement");
4716 debug_generic_stmt (gimple_switch_index (stmt));
4717 return true;
4718 }
4719
4720 index_type = TREE_TYPE (gimple_switch_index (stmt));
4721 if (! INTEGRAL_TYPE_P (index_type))
4722 {
4723 error ("non-integral type switch statement");
4724 debug_generic_expr (index_type);
4725 return true;
4726 }
4727
4728 elt = gimple_switch_label (stmt, 0);
4729 if (CASE_LOW (elt) != NULL_TREE
4730 || CASE_HIGH (elt) != NULL_TREE
4731 || CASE_CHAIN (elt) != NULL_TREE)
4732 {
4733 error ("invalid default case label in switch statement");
4734 debug_generic_expr (elt);
4735 return true;
4736 }
4737
4738 n = gimple_switch_num_labels (stmt);
4739 for (i = 1; i < n; i++)
4740 {
4741 elt = gimple_switch_label (stmt, i);
4742
4743 if (CASE_CHAIN (elt))
4744 {
4745 error ("invalid CASE_CHAIN");
4746 debug_generic_expr (elt);
4747 return true;
4748 }
4749 if (! CASE_LOW (elt))
4750 {
4751 error ("invalid case label in switch statement");
4752 debug_generic_expr (elt);
4753 return true;
4754 }
4755 if (CASE_HIGH (elt)
4756 && ! tree_int_cst_lt (CASE_LOW (elt), CASE_HIGH (elt)))
4757 {
4758 error ("invalid case range in switch statement");
4759 debug_generic_expr (elt);
4760 return true;
4761 }
4762
4763 if (elt_type)
4764 {
4765 if (TREE_TYPE (CASE_LOW (elt)) != elt_type
4766 || (CASE_HIGH (elt) && TREE_TYPE (CASE_HIGH (elt)) != elt_type))
4767 {
4768 error ("type mismatch for case label in switch statement");
4769 debug_generic_expr (elt);
4770 return true;
4771 }
4772 }
4773 else
4774 {
4775 elt_type = TREE_TYPE (CASE_LOW (elt));
4776 if (TYPE_PRECISION (index_type) < TYPE_PRECISION (elt_type))
4777 {
4778 error ("type precision mismatch in switch statement");
4779 return true;
4780 }
4781 }
4782
4783 if (prev_upper_bound)
4784 {
4785 if (! tree_int_cst_lt (prev_upper_bound, CASE_LOW (elt)))
4786 {
4787 error ("case labels not sorted in switch statement");
4788 return true;
4789 }
4790 }
4791
4792 prev_upper_bound = CASE_HIGH (elt);
4793 if (! prev_upper_bound)
4794 prev_upper_bound = CASE_LOW (elt);
4795 }
4796
4797 return false;
4798 }
4799
4800 /* Verify a gimple debug statement STMT.
4801 Returns true if anything is wrong. */
4802
4803 static bool
4804 verify_gimple_debug (gimple *stmt ATTRIBUTE_UNUSED)
4805 {
4806 /* There isn't much that could be wrong in a gimple debug stmt. A
4807 gimple debug bind stmt, for example, maps a tree, that's usually
4808 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4809 component or member of an aggregate type, to another tree, that
4810 can be an arbitrary expression. These stmts expand into debug
4811 insns, and are converted to debug notes by var-tracking.c. */
4812 return false;
4813 }
4814
4815 /* Verify a gimple label statement STMT.
4816 Returns true if anything is wrong. */
4817
4818 static bool
4819 verify_gimple_label (glabel *stmt)
4820 {
4821 tree decl = gimple_label_label (stmt);
4822 int uid;
4823 bool err = false;
4824
4825 if (TREE_CODE (decl) != LABEL_DECL)
4826 return true;
4827 if (!DECL_NONLOCAL (decl) && !FORCED_LABEL (decl)
4828 && DECL_CONTEXT (decl) != current_function_decl)
4829 {
4830 error ("label's context is not the current function decl");
4831 err |= true;
4832 }
4833
4834 uid = LABEL_DECL_UID (decl);
4835 if (cfun->cfg
4836 && (uid == -1
4837 || (*label_to_block_map_for_fn (cfun))[uid] != gimple_bb (stmt)))
4838 {
4839 error ("incorrect entry in label_to_block_map");
4840 err |= true;
4841 }
4842
4843 uid = EH_LANDING_PAD_NR (decl);
4844 if (uid)
4845 {
4846 eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
4847 if (decl != lp->post_landing_pad)
4848 {
4849 error ("incorrect setting of landing pad number");
4850 err |= true;
4851 }
4852 }
4853
4854 return err;
4855 }
4856
4857 /* Verify a gimple cond statement STMT.
4858 Returns true if anything is wrong. */
4859
4860 static bool
4861 verify_gimple_cond (gcond *stmt)
4862 {
4863 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
4864 {
4865 error ("invalid comparison code in gimple cond");
4866 return true;
4867 }
4868 if (!(!gimple_cond_true_label (stmt)
4869 || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
4870 || !(!gimple_cond_false_label (stmt)
4871 || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
4872 {
4873 error ("invalid labels in gimple cond");
4874 return true;
4875 }
4876
4877 return verify_gimple_comparison (boolean_type_node,
4878 gimple_cond_lhs (stmt),
4879 gimple_cond_rhs (stmt),
4880 gimple_cond_code (stmt));
4881 }
4882
4883 /* Verify the GIMPLE statement STMT. Returns true if there is an
4884 error, otherwise false. */
4885
4886 static bool
4887 verify_gimple_stmt (gimple *stmt)
4888 {
4889 switch (gimple_code (stmt))
4890 {
4891 case GIMPLE_ASSIGN:
4892 return verify_gimple_assign (as_a <gassign *> (stmt));
4893
4894 case GIMPLE_LABEL:
4895 return verify_gimple_label (as_a <glabel *> (stmt));
4896
4897 case GIMPLE_CALL:
4898 return verify_gimple_call (as_a <gcall *> (stmt));
4899
4900 case GIMPLE_COND:
4901 return verify_gimple_cond (as_a <gcond *> (stmt));
4902
4903 case GIMPLE_GOTO:
4904 return verify_gimple_goto (as_a <ggoto *> (stmt));
4905
4906 case GIMPLE_SWITCH:
4907 return verify_gimple_switch (as_a <gswitch *> (stmt));
4908
4909 case GIMPLE_RETURN:
4910 return verify_gimple_return (as_a <greturn *> (stmt));
4911
4912 case GIMPLE_ASM:
4913 return false;
4914
4915 case GIMPLE_TRANSACTION:
4916 return verify_gimple_transaction (as_a <gtransaction *> (stmt));
4917
4918 /* Tuples that do not have tree operands. */
4919 case GIMPLE_NOP:
4920 case GIMPLE_PREDICT:
4921 case GIMPLE_RESX:
4922 case GIMPLE_EH_DISPATCH:
4923 case GIMPLE_EH_MUST_NOT_THROW:
4924 return false;
4925
4926 CASE_GIMPLE_OMP:
4927 /* OpenMP directives are validated by the FE and never operated
4928 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4929 non-gimple expressions when the main index variable has had
4930 its address taken. This does not affect the loop itself
4931 because the header of an GIMPLE_OMP_FOR is merely used to determine
4932 how to setup the parallel iteration. */
4933 return false;
4934
4935 case GIMPLE_DEBUG:
4936 return verify_gimple_debug (stmt);
4937
4938 default:
4939 gcc_unreachable ();
4940 }
4941 }
4942
4943 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4944 and false otherwise. */
4945
4946 static bool
4947 verify_gimple_phi (gphi *phi)
4948 {
4949 bool err = false;
4950 unsigned i;
4951 tree phi_result = gimple_phi_result (phi);
4952 bool virtual_p;
4953
4954 if (!phi_result)
4955 {
4956 error ("invalid PHI result");
4957 return true;
4958 }
4959
4960 virtual_p = virtual_operand_p (phi_result);
4961 if (TREE_CODE (phi_result) != SSA_NAME
4962 || (virtual_p
4963 && SSA_NAME_VAR (phi_result) != gimple_vop (cfun)))
4964 {
4965 error ("invalid PHI result");
4966 err = true;
4967 }
4968
4969 for (i = 0; i < gimple_phi_num_args (phi); i++)
4970 {
4971 tree t = gimple_phi_arg_def (phi, i);
4972
4973 if (!t)
4974 {
4975 error ("missing PHI def");
4976 err |= true;
4977 continue;
4978 }
4979 /* Addressable variables do have SSA_NAMEs but they
4980 are not considered gimple values. */
4981 else if ((TREE_CODE (t) == SSA_NAME
4982 && virtual_p != virtual_operand_p (t))
4983 || (virtual_p
4984 && (TREE_CODE (t) != SSA_NAME
4985 || SSA_NAME_VAR (t) != gimple_vop (cfun)))
4986 || (!virtual_p
4987 && !is_gimple_val (t)))
4988 {
4989 error ("invalid PHI argument");
4990 debug_generic_expr (t);
4991 err |= true;
4992 }
4993 #ifdef ENABLE_TYPES_CHECKING
4994 if (!useless_type_conversion_p (TREE_TYPE (phi_result), TREE_TYPE (t)))
4995 {
4996 error ("incompatible types in PHI argument %u", i);
4997 debug_generic_stmt (TREE_TYPE (phi_result));
4998 debug_generic_stmt (TREE_TYPE (t));
4999 err |= true;
5000 }
5001 #endif
5002 }
5003
5004 return err;
5005 }
5006
5007 /* Verify the GIMPLE statements inside the sequence STMTS. */
5008
5009 static bool
5010 verify_gimple_in_seq_2 (gimple_seq stmts)
5011 {
5012 gimple_stmt_iterator ittr;
5013 bool err = false;
5014
5015 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
5016 {
5017 gimple *stmt = gsi_stmt (ittr);
5018
5019 switch (gimple_code (stmt))
5020 {
5021 case GIMPLE_BIND:
5022 err |= verify_gimple_in_seq_2 (
5023 gimple_bind_body (as_a <gbind *> (stmt)));
5024 break;
5025
5026 case GIMPLE_TRY:
5027 err |= verify_gimple_in_seq_2 (gimple_try_eval (stmt));
5028 err |= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt));
5029 break;
5030
5031 case GIMPLE_EH_FILTER:
5032 err |= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt));
5033 break;
5034
5035 case GIMPLE_EH_ELSE:
5036 {
5037 geh_else *eh_else = as_a <geh_else *> (stmt);
5038 err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (eh_else));
5039 err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (eh_else));
5040 }
5041 break;
5042
5043 case GIMPLE_CATCH:
5044 err |= verify_gimple_in_seq_2 (gimple_catch_handler (
5045 as_a <gcatch *> (stmt)));
5046 break;
5047
5048 case GIMPLE_TRANSACTION:
5049 err |= verify_gimple_transaction (as_a <gtransaction *> (stmt));
5050 break;
5051
5052 default:
5053 {
5054 bool err2 = verify_gimple_stmt (stmt);
5055 if (err2)
5056 debug_gimple_stmt (stmt);
5057 err |= err2;
5058 }
5059 }
5060 }
5061
5062 return err;
5063 }
5064
5065 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
5066 is a problem, otherwise false. */
5067
5068 static bool
5069 verify_gimple_transaction (gtransaction *stmt)
5070 {
5071 tree lab;
5072
5073 lab = gimple_transaction_label_norm (stmt);
5074 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
5075 return true;
5076 lab = gimple_transaction_label_uninst (stmt);
5077 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
5078 return true;
5079 lab = gimple_transaction_label_over (stmt);
5080 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
5081 return true;
5082
5083 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt));
5084 }
5085
5086
5087 /* Verify the GIMPLE statements inside the statement list STMTS. */
5088
5089 DEBUG_FUNCTION void
5090 verify_gimple_in_seq (gimple_seq stmts)
5091 {
5092 timevar_push (TV_TREE_STMT_VERIFY);
5093 if (verify_gimple_in_seq_2 (stmts))
5094 internal_error ("verify_gimple failed");
5095 timevar_pop (TV_TREE_STMT_VERIFY);
5096 }
5097
5098 /* Return true when the T can be shared. */
5099
5100 static bool
5101 tree_node_can_be_shared (tree t)
5102 {
5103 if (IS_TYPE_OR_DECL_P (t)
5104 || TREE_CODE (t) == SSA_NAME
5105 || TREE_CODE (t) == IDENTIFIER_NODE
5106 || TREE_CODE (t) == CASE_LABEL_EXPR
5107 || is_gimple_min_invariant (t))
5108 return true;
5109
5110 if (t == error_mark_node)
5111 return true;
5112
5113 return false;
5114 }
5115
5116 /* Called via walk_tree. Verify tree sharing. */
5117
5118 static tree
5119 verify_node_sharing_1 (tree *tp, int *walk_subtrees, void *data)
5120 {
5121 hash_set<void *> *visited = (hash_set<void *> *) data;
5122
5123 if (tree_node_can_be_shared (*tp))
5124 {
5125 *walk_subtrees = false;
5126 return NULL;
5127 }
5128
5129 if (visited->add (*tp))
5130 return *tp;
5131
5132 return NULL;
5133 }
5134
5135 /* Called via walk_gimple_stmt. Verify tree sharing. */
5136
5137 static tree
5138 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
5139 {
5140 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5141 return verify_node_sharing_1 (tp, walk_subtrees, wi->info);
5142 }
5143
5144 static bool eh_error_found;
5145 bool
5146 verify_eh_throw_stmt_node (gimple *const &stmt, const int &,
5147 hash_set<gimple *> *visited)
5148 {
5149 if (!visited->contains (stmt))
5150 {
5151 error ("dead STMT in EH table");
5152 debug_gimple_stmt (stmt);
5153 eh_error_found = true;
5154 }
5155 return true;
5156 }
5157
5158 /* Verify if the location LOCs block is in BLOCKS. */
5159
5160 static bool
5161 verify_location (hash_set<tree> *blocks, location_t loc)
5162 {
5163 tree block = LOCATION_BLOCK (loc);
5164 if (block != NULL_TREE
5165 && !blocks->contains (block))
5166 {
5167 error ("location references block not in block tree");
5168 return true;
5169 }
5170 if (block != NULL_TREE)
5171 return verify_location (blocks, BLOCK_SOURCE_LOCATION (block));
5172 return false;
5173 }
5174
5175 /* Called via walk_tree. Verify that expressions have no blocks. */
5176
5177 static tree
5178 verify_expr_no_block (tree *tp, int *walk_subtrees, void *)
5179 {
5180 if (!EXPR_P (*tp))
5181 {
5182 *walk_subtrees = false;
5183 return NULL;
5184 }
5185
5186 location_t loc = EXPR_LOCATION (*tp);
5187 if (LOCATION_BLOCK (loc) != NULL)
5188 return *tp;
5189
5190 return NULL;
5191 }
5192
5193 /* Called via walk_tree. Verify locations of expressions. */
5194
5195 static tree
5196 verify_expr_location_1 (tree *tp, int *walk_subtrees, void *data)
5197 {
5198 hash_set<tree> *blocks = (hash_set<tree> *) data;
5199 tree t = *tp;
5200
5201 /* ??? This doesn't really belong here but there's no good place to
5202 stick this remainder of old verify_expr. */
5203 /* ??? This barfs on debug stmts which contain binds to vars with
5204 different function context. */
5205 #if 0
5206 if (VAR_P (t)
5207 || TREE_CODE (t) == PARM_DECL
5208 || TREE_CODE (t) == RESULT_DECL)
5209 {
5210 tree context = decl_function_context (t);
5211 if (context != cfun->decl
5212 && !SCOPE_FILE_SCOPE_P (context)
5213 && !TREE_STATIC (t)
5214 && !DECL_EXTERNAL (t))
5215 {
5216 error ("local declaration from a different function");
5217 return t;
5218 }
5219 }
5220 #endif
5221
5222 if (VAR_P (t) && DECL_HAS_DEBUG_EXPR_P (t))
5223 {
5224 tree x = DECL_DEBUG_EXPR (t);
5225 tree addr = walk_tree (&x, verify_expr_no_block, NULL, NULL);
5226 if (addr)
5227 return addr;
5228 }
5229 if ((VAR_P (t)
5230 || TREE_CODE (t) == PARM_DECL
5231 || TREE_CODE (t) == RESULT_DECL)
5232 && DECL_HAS_VALUE_EXPR_P (t))
5233 {
5234 tree x = DECL_VALUE_EXPR (t);
5235 tree addr = walk_tree (&x, verify_expr_no_block, NULL, NULL);
5236 if (addr)
5237 return addr;
5238 }
5239
5240 if (!EXPR_P (t))
5241 {
5242 *walk_subtrees = false;
5243 return NULL;
5244 }
5245
5246 location_t loc = EXPR_LOCATION (t);
5247 if (verify_location (blocks, loc))
5248 return t;
5249
5250 return NULL;
5251 }
5252
5253 /* Called via walk_gimple_op. Verify locations of expressions. */
5254
5255 static tree
5256 verify_expr_location (tree *tp, int *walk_subtrees, void *data)
5257 {
5258 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5259 return verify_expr_location_1 (tp, walk_subtrees, wi->info);
5260 }
5261
5262 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
5263
5264 static void
5265 collect_subblocks (hash_set<tree> *blocks, tree block)
5266 {
5267 tree t;
5268 for (t = BLOCK_SUBBLOCKS (block); t; t = BLOCK_CHAIN (t))
5269 {
5270 blocks->add (t);
5271 collect_subblocks (blocks, t);
5272 }
5273 }
5274
5275 /* Verify the GIMPLE statements in the CFG of FN. */
5276
5277 DEBUG_FUNCTION void
5278 verify_gimple_in_cfg (struct function *fn, bool verify_nothrow)
5279 {
5280 basic_block bb;
5281 bool err = false;
5282
5283 timevar_push (TV_TREE_STMT_VERIFY);
5284 hash_set<void *> visited;
5285 hash_set<gimple *> visited_throwing_stmts;
5286
5287 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
5288 hash_set<tree> blocks;
5289 if (DECL_INITIAL (fn->decl))
5290 {
5291 blocks.add (DECL_INITIAL (fn->decl));
5292 collect_subblocks (&blocks, DECL_INITIAL (fn->decl));
5293 }
5294
5295 FOR_EACH_BB_FN (bb, fn)
5296 {
5297 gimple_stmt_iterator gsi;
5298 edge_iterator ei;
5299 edge e;
5300
5301 for (gphi_iterator gpi = gsi_start_phis (bb);
5302 !gsi_end_p (gpi);
5303 gsi_next (&gpi))
5304 {
5305 gphi *phi = gpi.phi ();
5306 bool err2 = false;
5307 unsigned i;
5308
5309 if (gimple_bb (phi) != bb)
5310 {
5311 error ("gimple_bb (phi) is set to a wrong basic block");
5312 err2 = true;
5313 }
5314
5315 err2 |= verify_gimple_phi (phi);
5316
5317 /* Only PHI arguments have locations. */
5318 if (gimple_location (phi) != UNKNOWN_LOCATION)
5319 {
5320 error ("PHI node with location");
5321 err2 = true;
5322 }
5323
5324 for (i = 0; i < gimple_phi_num_args (phi); i++)
5325 {
5326 tree arg = gimple_phi_arg_def (phi, i);
5327 tree addr = walk_tree (&arg, verify_node_sharing_1,
5328 &visited, NULL);
5329 if (addr)
5330 {
5331 error ("incorrect sharing of tree nodes");
5332 debug_generic_expr (addr);
5333 err2 |= true;
5334 }
5335 location_t loc = gimple_phi_arg_location (phi, i);
5336 if (virtual_operand_p (gimple_phi_result (phi))
5337 && loc != UNKNOWN_LOCATION)
5338 {
5339 error ("virtual PHI with argument locations");
5340 err2 = true;
5341 }
5342 addr = walk_tree (&arg, verify_expr_location_1, &blocks, NULL);
5343 if (addr)
5344 {
5345 debug_generic_expr (addr);
5346 err2 = true;
5347 }
5348 err2 |= verify_location (&blocks, loc);
5349 }
5350
5351 if (err2)
5352 debug_gimple_stmt (phi);
5353 err |= err2;
5354 }
5355
5356 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5357 {
5358 gimple *stmt = gsi_stmt (gsi);
5359 bool err2 = false;
5360 struct walk_stmt_info wi;
5361 tree addr;
5362 int lp_nr;
5363
5364 if (gimple_bb (stmt) != bb)
5365 {
5366 error ("gimple_bb (stmt) is set to a wrong basic block");
5367 err2 = true;
5368 }
5369
5370 err2 |= verify_gimple_stmt (stmt);
5371 err2 |= verify_location (&blocks, gimple_location (stmt));
5372
5373 memset (&wi, 0, sizeof (wi));
5374 wi.info = (void *) &visited;
5375 addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
5376 if (addr)
5377 {
5378 error ("incorrect sharing of tree nodes");
5379 debug_generic_expr (addr);
5380 err2 |= true;
5381 }
5382
5383 memset (&wi, 0, sizeof (wi));
5384 wi.info = (void *) &blocks;
5385 addr = walk_gimple_op (stmt, verify_expr_location, &wi);
5386 if (addr)
5387 {
5388 debug_generic_expr (addr);
5389 err2 |= true;
5390 }
5391
5392 /* If the statement is marked as part of an EH region, then it is
5393 expected that the statement could throw. Verify that when we
5394 have optimizations that simplify statements such that we prove
5395 that they cannot throw, that we update other data structures
5396 to match. */
5397 lp_nr = lookup_stmt_eh_lp (stmt);
5398 if (lp_nr != 0)
5399 visited_throwing_stmts.add (stmt);
5400 if (lp_nr > 0)
5401 {
5402 if (!stmt_could_throw_p (cfun, stmt))
5403 {
5404 if (verify_nothrow)
5405 {
5406 error ("statement marked for throw, but doesn%'t");
5407 err2 |= true;
5408 }
5409 }
5410 else if (!gsi_one_before_end_p (gsi))
5411 {
5412 error ("statement marked for throw in middle of block");
5413 err2 |= true;
5414 }
5415 }
5416
5417 if (err2)
5418 debug_gimple_stmt (stmt);
5419 err |= err2;
5420 }
5421
5422 FOR_EACH_EDGE (e, ei, bb->succs)
5423 if (e->goto_locus != UNKNOWN_LOCATION)
5424 err |= verify_location (&blocks, e->goto_locus);
5425 }
5426
5427 hash_map<gimple *, int> *eh_table = get_eh_throw_stmt_table (cfun);
5428 eh_error_found = false;
5429 if (eh_table)
5430 eh_table->traverse<hash_set<gimple *> *, verify_eh_throw_stmt_node>
5431 (&visited_throwing_stmts);
5432
5433 if (err || eh_error_found)
5434 internal_error ("verify_gimple failed");
5435
5436 verify_histograms ();
5437 timevar_pop (TV_TREE_STMT_VERIFY);
5438 }
5439
5440
5441 /* Verifies that the flow information is OK. */
5442
5443 static int
5444 gimple_verify_flow_info (void)
5445 {
5446 int err = 0;
5447 basic_block bb;
5448 gimple_stmt_iterator gsi;
5449 gimple *stmt;
5450 edge e;
5451 edge_iterator ei;
5452
5453 if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5454 || ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5455 {
5456 error ("ENTRY_BLOCK has IL associated with it");
5457 err = 1;
5458 }
5459
5460 if (EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5461 || EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5462 {
5463 error ("EXIT_BLOCK has IL associated with it");
5464 err = 1;
5465 }
5466
5467 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5468 if (e->flags & EDGE_FALLTHRU)
5469 {
5470 error ("fallthru to exit from bb %d", e->src->index);
5471 err = 1;
5472 }
5473
5474 FOR_EACH_BB_FN (bb, cfun)
5475 {
5476 bool found_ctrl_stmt = false;
5477
5478 stmt = NULL;
5479
5480 /* Skip labels on the start of basic block. */
5481 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5482 {
5483 tree label;
5484 gimple *prev_stmt = stmt;
5485
5486 stmt = gsi_stmt (gsi);
5487
5488 if (gimple_code (stmt) != GIMPLE_LABEL)
5489 break;
5490
5491 label = gimple_label_label (as_a <glabel *> (stmt));
5492 if (prev_stmt && DECL_NONLOCAL (label))
5493 {
5494 error ("nonlocal label ");
5495 print_generic_expr (stderr, label);
5496 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5497 bb->index);
5498 err = 1;
5499 }
5500
5501 if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
5502 {
5503 error ("EH landing pad label ");
5504 print_generic_expr (stderr, label);
5505 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5506 bb->index);
5507 err = 1;
5508 }
5509
5510 if (label_to_block (cfun, label) != bb)
5511 {
5512 error ("label ");
5513 print_generic_expr (stderr, label);
5514 fprintf (stderr, " to block does not match in bb %d",
5515 bb->index);
5516 err = 1;
5517 }
5518
5519 if (decl_function_context (label) != current_function_decl)
5520 {
5521 error ("label ");
5522 print_generic_expr (stderr, label);
5523 fprintf (stderr, " has incorrect context in bb %d",
5524 bb->index);
5525 err = 1;
5526 }
5527 }
5528
5529 /* Verify that body of basic block BB is free of control flow. */
5530 for (; !gsi_end_p (gsi); gsi_next (&gsi))
5531 {
5532 gimple *stmt = gsi_stmt (gsi);
5533
5534 if (found_ctrl_stmt)
5535 {
5536 error ("control flow in the middle of basic block %d",
5537 bb->index);
5538 err = 1;
5539 }
5540
5541 if (stmt_ends_bb_p (stmt))
5542 found_ctrl_stmt = true;
5543
5544 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
5545 {
5546 error ("label ");
5547 print_generic_expr (stderr, gimple_label_label (label_stmt));
5548 fprintf (stderr, " in the middle of basic block %d", bb->index);
5549 err = 1;
5550 }
5551 }
5552
5553 gsi = gsi_last_nondebug_bb (bb);
5554 if (gsi_end_p (gsi))
5555 continue;
5556
5557 stmt = gsi_stmt (gsi);
5558
5559 if (gimple_code (stmt) == GIMPLE_LABEL)
5560 continue;
5561
5562 err |= verify_eh_edges (stmt);
5563
5564 if (is_ctrl_stmt (stmt))
5565 {
5566 FOR_EACH_EDGE (e, ei, bb->succs)
5567 if (e->flags & EDGE_FALLTHRU)
5568 {
5569 error ("fallthru edge after a control statement in bb %d",
5570 bb->index);
5571 err = 1;
5572 }
5573 }
5574
5575 if (gimple_code (stmt) != GIMPLE_COND)
5576 {
5577 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
5578 after anything else but if statement. */
5579 FOR_EACH_EDGE (e, ei, bb->succs)
5580 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
5581 {
5582 error ("true/false edge after a non-GIMPLE_COND in bb %d",
5583 bb->index);
5584 err = 1;
5585 }
5586 }
5587
5588 switch (gimple_code (stmt))
5589 {
5590 case GIMPLE_COND:
5591 {
5592 edge true_edge;
5593 edge false_edge;
5594
5595 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
5596
5597 if (!true_edge
5598 || !false_edge
5599 || !(true_edge->flags & EDGE_TRUE_VALUE)
5600 || !(false_edge->flags & EDGE_FALSE_VALUE)
5601 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5602 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5603 || EDGE_COUNT (bb->succs) >= 3)
5604 {
5605 error ("wrong outgoing edge flags at end of bb %d",
5606 bb->index);
5607 err = 1;
5608 }
5609 }
5610 break;
5611
5612 case GIMPLE_GOTO:
5613 if (simple_goto_p (stmt))
5614 {
5615 error ("explicit goto at end of bb %d", bb->index);
5616 err = 1;
5617 }
5618 else
5619 {
5620 /* FIXME. We should double check that the labels in the
5621 destination blocks have their address taken. */
5622 FOR_EACH_EDGE (e, ei, bb->succs)
5623 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
5624 | EDGE_FALSE_VALUE))
5625 || !(e->flags & EDGE_ABNORMAL))
5626 {
5627 error ("wrong outgoing edge flags at end of bb %d",
5628 bb->index);
5629 err = 1;
5630 }
5631 }
5632 break;
5633
5634 case GIMPLE_CALL:
5635 if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
5636 break;
5637 /* fallthru */
5638 case GIMPLE_RETURN:
5639 if (!single_succ_p (bb)
5640 || (single_succ_edge (bb)->flags
5641 & (EDGE_FALLTHRU | EDGE_ABNORMAL
5642 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5643 {
5644 error ("wrong outgoing edge flags at end of bb %d", bb->index);
5645 err = 1;
5646 }
5647 if (single_succ (bb) != EXIT_BLOCK_PTR_FOR_FN (cfun))
5648 {
5649 error ("return edge does not point to exit in bb %d",
5650 bb->index);
5651 err = 1;
5652 }
5653 break;
5654
5655 case GIMPLE_SWITCH:
5656 {
5657 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5658 tree prev;
5659 edge e;
5660 size_t i, n;
5661
5662 n = gimple_switch_num_labels (switch_stmt);
5663
5664 /* Mark all the destination basic blocks. */
5665 for (i = 0; i < n; ++i)
5666 {
5667 basic_block label_bb = gimple_switch_label_bb (cfun, switch_stmt, i);
5668 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
5669 label_bb->aux = (void *)1;
5670 }
5671
5672 /* Verify that the case labels are sorted. */
5673 prev = gimple_switch_label (switch_stmt, 0);
5674 for (i = 1; i < n; ++i)
5675 {
5676 tree c = gimple_switch_label (switch_stmt, i);
5677 if (!CASE_LOW (c))
5678 {
5679 error ("found default case not at the start of "
5680 "case vector");
5681 err = 1;
5682 continue;
5683 }
5684 if (CASE_LOW (prev)
5685 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
5686 {
5687 error ("case labels not sorted: ");
5688 print_generic_expr (stderr, prev);
5689 fprintf (stderr," is greater than ");
5690 print_generic_expr (stderr, c);
5691 fprintf (stderr," but comes before it.\n");
5692 err = 1;
5693 }
5694 prev = c;
5695 }
5696 /* VRP will remove the default case if it can prove it will
5697 never be executed. So do not verify there always exists
5698 a default case here. */
5699
5700 FOR_EACH_EDGE (e, ei, bb->succs)
5701 {
5702 if (!e->dest->aux)
5703 {
5704 error ("extra outgoing edge %d->%d",
5705 bb->index, e->dest->index);
5706 err = 1;
5707 }
5708
5709 e->dest->aux = (void *)2;
5710 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
5711 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5712 {
5713 error ("wrong outgoing edge flags at end of bb %d",
5714 bb->index);
5715 err = 1;
5716 }
5717 }
5718
5719 /* Check that we have all of them. */
5720 for (i = 0; i < n; ++i)
5721 {
5722 basic_block label_bb = gimple_switch_label_bb (cfun,
5723 switch_stmt, i);
5724
5725 if (label_bb->aux != (void *)2)
5726 {
5727 error ("missing edge %i->%i", bb->index, label_bb->index);
5728 err = 1;
5729 }
5730 }
5731
5732 FOR_EACH_EDGE (e, ei, bb->succs)
5733 e->dest->aux = (void *)0;
5734 }
5735 break;
5736
5737 case GIMPLE_EH_DISPATCH:
5738 err |= verify_eh_dispatch_edge (as_a <geh_dispatch *> (stmt));
5739 break;
5740
5741 default:
5742 break;
5743 }
5744 }
5745
5746 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
5747 verify_dominators (CDI_DOMINATORS);
5748
5749 return err;
5750 }
5751
5752
5753 /* Updates phi nodes after creating a forwarder block joined
5754 by edge FALLTHRU. */
5755
5756 static void
5757 gimple_make_forwarder_block (edge fallthru)
5758 {
5759 edge e;
5760 edge_iterator ei;
5761 basic_block dummy, bb;
5762 tree var;
5763 gphi_iterator gsi;
5764
5765 dummy = fallthru->src;
5766 bb = fallthru->dest;
5767
5768 if (single_pred_p (bb))
5769 return;
5770
5771 /* If we redirected a branch we must create new PHI nodes at the
5772 start of BB. */
5773 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
5774 {
5775 gphi *phi, *new_phi;
5776
5777 phi = gsi.phi ();
5778 var = gimple_phi_result (phi);
5779 new_phi = create_phi_node (var, bb);
5780 gimple_phi_set_result (phi, copy_ssa_name (var, phi));
5781 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
5782 UNKNOWN_LOCATION);
5783 }
5784
5785 /* Add the arguments we have stored on edges. */
5786 FOR_EACH_EDGE (e, ei, bb->preds)
5787 {
5788 if (e == fallthru)
5789 continue;
5790
5791 flush_pending_stmts (e);
5792 }
5793 }
5794
5795
5796 /* Return a non-special label in the head of basic block BLOCK.
5797 Create one if it doesn't exist. */
5798
5799 tree
5800 gimple_block_label (basic_block bb)
5801 {
5802 gimple_stmt_iterator i, s = gsi_start_bb (bb);
5803 bool first = true;
5804 tree label;
5805 glabel *stmt;
5806
5807 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
5808 {
5809 stmt = dyn_cast <glabel *> (gsi_stmt (i));
5810 if (!stmt)
5811 break;
5812 label = gimple_label_label (stmt);
5813 if (!DECL_NONLOCAL (label))
5814 {
5815 if (!first)
5816 gsi_move_before (&i, &s);
5817 return label;
5818 }
5819 }
5820
5821 label = create_artificial_label (UNKNOWN_LOCATION);
5822 stmt = gimple_build_label (label);
5823 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
5824 return label;
5825 }
5826
5827
5828 /* Attempt to perform edge redirection by replacing a possibly complex
5829 jump instruction by a goto or by removing the jump completely.
5830 This can apply only if all edges now point to the same block. The
5831 parameters and return values are equivalent to
5832 redirect_edge_and_branch. */
5833
5834 static edge
5835 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
5836 {
5837 basic_block src = e->src;
5838 gimple_stmt_iterator i;
5839 gimple *stmt;
5840
5841 /* We can replace or remove a complex jump only when we have exactly
5842 two edges. */
5843 if (EDGE_COUNT (src->succs) != 2
5844 /* Verify that all targets will be TARGET. Specifically, the
5845 edge that is not E must also go to TARGET. */
5846 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
5847 return NULL;
5848
5849 i = gsi_last_bb (src);
5850 if (gsi_end_p (i))
5851 return NULL;
5852
5853 stmt = gsi_stmt (i);
5854
5855 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
5856 {
5857 gsi_remove (&i, true);
5858 e = ssa_redirect_edge (e, target);
5859 e->flags = EDGE_FALLTHRU;
5860 return e;
5861 }
5862
5863 return NULL;
5864 }
5865
5866
5867 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
5868 edge representing the redirected branch. */
5869
5870 static edge
5871 gimple_redirect_edge_and_branch (edge e, basic_block dest)
5872 {
5873 basic_block bb = e->src;
5874 gimple_stmt_iterator gsi;
5875 edge ret;
5876 gimple *stmt;
5877
5878 if (e->flags & EDGE_ABNORMAL)
5879 return NULL;
5880
5881 if (e->dest == dest)
5882 return NULL;
5883
5884 if (e->flags & EDGE_EH)
5885 return redirect_eh_edge (e, dest);
5886
5887 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
5888 {
5889 ret = gimple_try_redirect_by_replacing_jump (e, dest);
5890 if (ret)
5891 return ret;
5892 }
5893
5894 gsi = gsi_last_nondebug_bb (bb);
5895 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
5896
5897 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
5898 {
5899 case GIMPLE_COND:
5900 /* For COND_EXPR, we only need to redirect the edge. */
5901 break;
5902
5903 case GIMPLE_GOTO:
5904 /* No non-abnormal edges should lead from a non-simple goto, and
5905 simple ones should be represented implicitly. */
5906 gcc_unreachable ();
5907
5908 case GIMPLE_SWITCH:
5909 {
5910 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5911 tree label = gimple_block_label (dest);
5912 tree cases = get_cases_for_edge (e, switch_stmt);
5913
5914 /* If we have a list of cases associated with E, then use it
5915 as it's a lot faster than walking the entire case vector. */
5916 if (cases)
5917 {
5918 edge e2 = find_edge (e->src, dest);
5919 tree last, first;
5920
5921 first = cases;
5922 while (cases)
5923 {
5924 last = cases;
5925 CASE_LABEL (cases) = label;
5926 cases = CASE_CHAIN (cases);
5927 }
5928
5929 /* If there was already an edge in the CFG, then we need
5930 to move all the cases associated with E to E2. */
5931 if (e2)
5932 {
5933 tree cases2 = get_cases_for_edge (e2, switch_stmt);
5934
5935 CASE_CHAIN (last) = CASE_CHAIN (cases2);
5936 CASE_CHAIN (cases2) = first;
5937 }
5938 bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
5939 }
5940 else
5941 {
5942 size_t i, n = gimple_switch_num_labels (switch_stmt);
5943
5944 for (i = 0; i < n; i++)
5945 {
5946 tree elt = gimple_switch_label (switch_stmt, i);
5947 if (label_to_block (cfun, CASE_LABEL (elt)) == e->dest)
5948 CASE_LABEL (elt) = label;
5949 }
5950 }
5951 }
5952 break;
5953
5954 case GIMPLE_ASM:
5955 {
5956 gasm *asm_stmt = as_a <gasm *> (stmt);
5957 int i, n = gimple_asm_nlabels (asm_stmt);
5958 tree label = NULL;
5959
5960 for (i = 0; i < n; ++i)
5961 {
5962 tree cons = gimple_asm_label_op (asm_stmt, i);
5963 if (label_to_block (cfun, TREE_VALUE (cons)) == e->dest)
5964 {
5965 if (!label)
5966 label = gimple_block_label (dest);
5967 TREE_VALUE (cons) = label;
5968 }
5969 }
5970
5971 /* If we didn't find any label matching the former edge in the
5972 asm labels, we must be redirecting the fallthrough
5973 edge. */
5974 gcc_assert (label || (e->flags & EDGE_FALLTHRU));
5975 }
5976 break;
5977
5978 case GIMPLE_RETURN:
5979 gsi_remove (&gsi, true);
5980 e->flags |= EDGE_FALLTHRU;
5981 break;
5982
5983 case GIMPLE_OMP_RETURN:
5984 case GIMPLE_OMP_CONTINUE:
5985 case GIMPLE_OMP_SECTIONS_SWITCH:
5986 case GIMPLE_OMP_FOR:
5987 /* The edges from OMP constructs can be simply redirected. */
5988 break;
5989
5990 case GIMPLE_EH_DISPATCH:
5991 if (!(e->flags & EDGE_FALLTHRU))
5992 redirect_eh_dispatch_edge (as_a <geh_dispatch *> (stmt), e, dest);
5993 break;
5994
5995 case GIMPLE_TRANSACTION:
5996 if (e->flags & EDGE_TM_ABORT)
5997 gimple_transaction_set_label_over (as_a <gtransaction *> (stmt),
5998 gimple_block_label (dest));
5999 else if (e->flags & EDGE_TM_UNINSTRUMENTED)
6000 gimple_transaction_set_label_uninst (as_a <gtransaction *> (stmt),
6001 gimple_block_label (dest));
6002 else
6003 gimple_transaction_set_label_norm (as_a <gtransaction *> (stmt),
6004 gimple_block_label (dest));
6005 break;
6006
6007 default:
6008 /* Otherwise it must be a fallthru edge, and we don't need to
6009 do anything besides redirecting it. */
6010 gcc_assert (e->flags & EDGE_FALLTHRU);
6011 break;
6012 }
6013
6014 /* Update/insert PHI nodes as necessary. */
6015
6016 /* Now update the edges in the CFG. */
6017 e = ssa_redirect_edge (e, dest);
6018
6019 return e;
6020 }
6021
6022 /* Returns true if it is possible to remove edge E by redirecting
6023 it to the destination of the other edge from E->src. */
6024
6025 static bool
6026 gimple_can_remove_branch_p (const_edge e)
6027 {
6028 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
6029 return false;
6030
6031 return true;
6032 }
6033
6034 /* Simple wrapper, as we can always redirect fallthru edges. */
6035
6036 static basic_block
6037 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
6038 {
6039 e = gimple_redirect_edge_and_branch (e, dest);
6040 gcc_assert (e);
6041
6042 return NULL;
6043 }
6044
6045
6046 /* Splits basic block BB after statement STMT (but at least after the
6047 labels). If STMT is NULL, BB is split just after the labels. */
6048
6049 static basic_block
6050 gimple_split_block (basic_block bb, void *stmt)
6051 {
6052 gimple_stmt_iterator gsi;
6053 gimple_stmt_iterator gsi_tgt;
6054 gimple_seq list;
6055 basic_block new_bb;
6056 edge e;
6057 edge_iterator ei;
6058
6059 new_bb = create_empty_bb (bb);
6060
6061 /* Redirect the outgoing edges. */
6062 new_bb->succs = bb->succs;
6063 bb->succs = NULL;
6064 FOR_EACH_EDGE (e, ei, new_bb->succs)
6065 e->src = new_bb;
6066
6067 /* Get a stmt iterator pointing to the first stmt to move. */
6068 if (!stmt || gimple_code ((gimple *) stmt) == GIMPLE_LABEL)
6069 gsi = gsi_after_labels (bb);
6070 else
6071 {
6072 gsi = gsi_for_stmt ((gimple *) stmt);
6073 gsi_next (&gsi);
6074 }
6075
6076 /* Move everything from GSI to the new basic block. */
6077 if (gsi_end_p (gsi))
6078 return new_bb;
6079
6080 /* Split the statement list - avoid re-creating new containers as this
6081 brings ugly quadratic memory consumption in the inliner.
6082 (We are still quadratic since we need to update stmt BB pointers,
6083 sadly.) */
6084 gsi_split_seq_before (&gsi, &list);
6085 set_bb_seq (new_bb, list);
6086 for (gsi_tgt = gsi_start (list);
6087 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
6088 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
6089
6090 return new_bb;
6091 }
6092
6093
6094 /* Moves basic block BB after block AFTER. */
6095
6096 static bool
6097 gimple_move_block_after (basic_block bb, basic_block after)
6098 {
6099 if (bb->prev_bb == after)
6100 return true;
6101
6102 unlink_block (bb);
6103 link_block (bb, after);
6104
6105 return true;
6106 }
6107
6108
6109 /* Return TRUE if block BB has no executable statements, otherwise return
6110 FALSE. */
6111
6112 static bool
6113 gimple_empty_block_p (basic_block bb)
6114 {
6115 /* BB must have no executable statements. */
6116 gimple_stmt_iterator gsi = gsi_after_labels (bb);
6117 if (phi_nodes (bb))
6118 return false;
6119 while (!gsi_end_p (gsi))
6120 {
6121 gimple *stmt = gsi_stmt (gsi);
6122 if (is_gimple_debug (stmt))
6123 ;
6124 else if (gimple_code (stmt) == GIMPLE_NOP
6125 || gimple_code (stmt) == GIMPLE_PREDICT)
6126 ;
6127 else
6128 return false;
6129 gsi_next (&gsi);
6130 }
6131 return true;
6132 }
6133
6134
6135 /* Split a basic block if it ends with a conditional branch and if the
6136 other part of the block is not empty. */
6137
6138 static basic_block
6139 gimple_split_block_before_cond_jump (basic_block bb)
6140 {
6141 gimple *last, *split_point;
6142 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
6143 if (gsi_end_p (gsi))
6144 return NULL;
6145 last = gsi_stmt (gsi);
6146 if (gimple_code (last) != GIMPLE_COND
6147 && gimple_code (last) != GIMPLE_SWITCH)
6148 return NULL;
6149 gsi_prev (&gsi);
6150 split_point = gsi_stmt (gsi);
6151 return split_block (bb, split_point)->dest;
6152 }
6153
6154
6155 /* Return true if basic_block can be duplicated. */
6156
6157 static bool
6158 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
6159 {
6160 return true;
6161 }
6162
6163 /* Create a duplicate of the basic block BB. NOTE: This does not
6164 preserve SSA form. */
6165
6166 static basic_block
6167 gimple_duplicate_bb (basic_block bb, copy_bb_data *id)
6168 {
6169 basic_block new_bb;
6170 gimple_stmt_iterator gsi_tgt;
6171
6172 new_bb = create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
6173
6174 /* Copy the PHI nodes. We ignore PHI node arguments here because
6175 the incoming edges have not been setup yet. */
6176 for (gphi_iterator gpi = gsi_start_phis (bb);
6177 !gsi_end_p (gpi);
6178 gsi_next (&gpi))
6179 {
6180 gphi *phi, *copy;
6181 phi = gpi.phi ();
6182 copy = create_phi_node (NULL_TREE, new_bb);
6183 create_new_def_for (gimple_phi_result (phi), copy,
6184 gimple_phi_result_ptr (copy));
6185 gimple_set_uid (copy, gimple_uid (phi));
6186 }
6187
6188 gsi_tgt = gsi_start_bb (new_bb);
6189 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
6190 !gsi_end_p (gsi);
6191 gsi_next (&gsi))
6192 {
6193 def_operand_p def_p;
6194 ssa_op_iter op_iter;
6195 tree lhs;
6196 gimple *stmt, *copy;
6197
6198 stmt = gsi_stmt (gsi);
6199 if (gimple_code (stmt) == GIMPLE_LABEL)
6200 continue;
6201
6202 /* Don't duplicate label debug stmts. */
6203 if (gimple_debug_bind_p (stmt)
6204 && TREE_CODE (gimple_debug_bind_get_var (stmt))
6205 == LABEL_DECL)
6206 continue;
6207
6208 /* Create a new copy of STMT and duplicate STMT's virtual
6209 operands. */
6210 copy = gimple_copy (stmt);
6211 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
6212
6213 maybe_duplicate_eh_stmt (copy, stmt);
6214 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
6215
6216 /* When copying around a stmt writing into a local non-user
6217 aggregate, make sure it won't share stack slot with other
6218 vars. */
6219 lhs = gimple_get_lhs (stmt);
6220 if (lhs && TREE_CODE (lhs) != SSA_NAME)
6221 {
6222 tree base = get_base_address (lhs);
6223 if (base
6224 && (VAR_P (base) || TREE_CODE (base) == RESULT_DECL)
6225 && DECL_IGNORED_P (base)
6226 && !TREE_STATIC (base)
6227 && !DECL_EXTERNAL (base)
6228 && (!VAR_P (base) || !DECL_HAS_VALUE_EXPR_P (base)))
6229 DECL_NONSHAREABLE (base) = 1;
6230 }
6231
6232 /* If requested remap dependence info of cliques brought in
6233 via inlining. */
6234 if (id)
6235 for (unsigned i = 0; i < gimple_num_ops (copy); ++i)
6236 {
6237 tree op = gimple_op (copy, i);
6238 if (!op)
6239 continue;
6240 if (TREE_CODE (op) == ADDR_EXPR
6241 || TREE_CODE (op) == WITH_SIZE_EXPR)
6242 op = TREE_OPERAND (op, 0);
6243 while (handled_component_p (op))
6244 op = TREE_OPERAND (op, 0);
6245 if ((TREE_CODE (op) == MEM_REF
6246 || TREE_CODE (op) == TARGET_MEM_REF)
6247 && MR_DEPENDENCE_CLIQUE (op) > 1
6248 && MR_DEPENDENCE_CLIQUE (op) != bb->loop_father->owned_clique)
6249 {
6250 if (!id->dependence_map)
6251 id->dependence_map = new hash_map<dependence_hash,
6252 unsigned short>;
6253 bool existed;
6254 unsigned short &newc = id->dependence_map->get_or_insert
6255 (MR_DEPENDENCE_CLIQUE (op), &existed);
6256 if (!existed)
6257 {
6258 gcc_assert (MR_DEPENDENCE_CLIQUE (op) <= cfun->last_clique);
6259 newc = ++cfun->last_clique;
6260 }
6261 MR_DEPENDENCE_CLIQUE (op) = newc;
6262 }
6263 }
6264
6265 /* Create new names for all the definitions created by COPY and
6266 add replacement mappings for each new name. */
6267 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
6268 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
6269 }
6270
6271 return new_bb;
6272 }
6273
6274 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
6275
6276 static void
6277 add_phi_args_after_copy_edge (edge e_copy)
6278 {
6279 basic_block bb, bb_copy = e_copy->src, dest;
6280 edge e;
6281 edge_iterator ei;
6282 gphi *phi, *phi_copy;
6283 tree def;
6284 gphi_iterator psi, psi_copy;
6285
6286 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
6287 return;
6288
6289 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
6290
6291 if (e_copy->dest->flags & BB_DUPLICATED)
6292 dest = get_bb_original (e_copy->dest);
6293 else
6294 dest = e_copy->dest;
6295
6296 e = find_edge (bb, dest);
6297 if (!e)
6298 {
6299 /* During loop unrolling the target of the latch edge is copied.
6300 In this case we are not looking for edge to dest, but to
6301 duplicated block whose original was dest. */
6302 FOR_EACH_EDGE (e, ei, bb->succs)
6303 {
6304 if ((e->dest->flags & BB_DUPLICATED)
6305 && get_bb_original (e->dest) == dest)
6306 break;
6307 }
6308
6309 gcc_assert (e != NULL);
6310 }
6311
6312 for (psi = gsi_start_phis (e->dest),
6313 psi_copy = gsi_start_phis (e_copy->dest);
6314 !gsi_end_p (psi);
6315 gsi_next (&psi), gsi_next (&psi_copy))
6316 {
6317 phi = psi.phi ();
6318 phi_copy = psi_copy.phi ();
6319 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
6320 add_phi_arg (phi_copy, def, e_copy,
6321 gimple_phi_arg_location_from_edge (phi, e));
6322 }
6323 }
6324
6325
6326 /* Basic block BB_COPY was created by code duplication. Add phi node
6327 arguments for edges going out of BB_COPY. The blocks that were
6328 duplicated have BB_DUPLICATED set. */
6329
6330 void
6331 add_phi_args_after_copy_bb (basic_block bb_copy)
6332 {
6333 edge e_copy;
6334 edge_iterator ei;
6335
6336 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
6337 {
6338 add_phi_args_after_copy_edge (e_copy);
6339 }
6340 }
6341
6342 /* Blocks in REGION_COPY array of length N_REGION were created by
6343 duplication of basic blocks. Add phi node arguments for edges
6344 going from these blocks. If E_COPY is not NULL, also add
6345 phi node arguments for its destination.*/
6346
6347 void
6348 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
6349 edge e_copy)
6350 {
6351 unsigned i;
6352
6353 for (i = 0; i < n_region; i++)
6354 region_copy[i]->flags |= BB_DUPLICATED;
6355
6356 for (i = 0; i < n_region; i++)
6357 add_phi_args_after_copy_bb (region_copy[i]);
6358 if (e_copy)
6359 add_phi_args_after_copy_edge (e_copy);
6360
6361 for (i = 0; i < n_region; i++)
6362 region_copy[i]->flags &= ~BB_DUPLICATED;
6363 }
6364
6365 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
6366 important exit edge EXIT. By important we mean that no SSA name defined
6367 inside region is live over the other exit edges of the region. All entry
6368 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
6369 to the duplicate of the region. Dominance and loop information is
6370 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
6371 UPDATE_DOMINANCE is false then we assume that the caller will update the
6372 dominance information after calling this function. The new basic
6373 blocks are stored to REGION_COPY in the same order as they had in REGION,
6374 provided that REGION_COPY is not NULL.
6375 The function returns false if it is unable to copy the region,
6376 true otherwise. */
6377
6378 bool
6379 gimple_duplicate_sese_region (edge entry, edge exit,
6380 basic_block *region, unsigned n_region,
6381 basic_block *region_copy,
6382 bool update_dominance)
6383 {
6384 unsigned i;
6385 bool free_region_copy = false, copying_header = false;
6386 struct loop *loop = entry->dest->loop_father;
6387 edge exit_copy;
6388 vec<basic_block> doms = vNULL;
6389 edge redirected;
6390 profile_count total_count = profile_count::uninitialized ();
6391 profile_count entry_count = profile_count::uninitialized ();
6392
6393 if (!can_copy_bbs_p (region, n_region))
6394 return false;
6395
6396 /* Some sanity checking. Note that we do not check for all possible
6397 missuses of the functions. I.e. if you ask to copy something weird,
6398 it will work, but the state of structures probably will not be
6399 correct. */
6400 for (i = 0; i < n_region; i++)
6401 {
6402 /* We do not handle subloops, i.e. all the blocks must belong to the
6403 same loop. */
6404 if (region[i]->loop_father != loop)
6405 return false;
6406
6407 if (region[i] != entry->dest
6408 && region[i] == loop->header)
6409 return false;
6410 }
6411
6412 /* In case the function is used for loop header copying (which is the primary
6413 use), ensure that EXIT and its copy will be new latch and entry edges. */
6414 if (loop->header == entry->dest)
6415 {
6416 copying_header = true;
6417
6418 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
6419 return false;
6420
6421 for (i = 0; i < n_region; i++)
6422 if (region[i] != exit->src
6423 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
6424 return false;
6425 }
6426
6427 initialize_original_copy_tables ();
6428
6429 if (copying_header)
6430 set_loop_copy (loop, loop_outer (loop));
6431 else
6432 set_loop_copy (loop, loop);
6433
6434 if (!region_copy)
6435 {
6436 region_copy = XNEWVEC (basic_block, n_region);
6437 free_region_copy = true;
6438 }
6439
6440 /* Record blocks outside the region that are dominated by something
6441 inside. */
6442 if (update_dominance)
6443 {
6444 doms.create (0);
6445 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6446 }
6447
6448 if (entry->dest->count.initialized_p ())
6449 {
6450 total_count = entry->dest->count;
6451 entry_count = entry->count ();
6452 /* Fix up corner cases, to avoid division by zero or creation of negative
6453 frequencies. */
6454 if (entry_count > total_count)
6455 entry_count = total_count;
6456 }
6457
6458 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
6459 split_edge_bb_loc (entry), update_dominance);
6460 if (total_count.initialized_p () && entry_count.initialized_p ())
6461 {
6462 scale_bbs_frequencies_profile_count (region, n_region,
6463 total_count - entry_count,
6464 total_count);
6465 scale_bbs_frequencies_profile_count (region_copy, n_region, entry_count,
6466 total_count);
6467 }
6468
6469 if (copying_header)
6470 {
6471 loop->header = exit->dest;
6472 loop->latch = exit->src;
6473 }
6474
6475 /* Redirect the entry and add the phi node arguments. */
6476 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
6477 gcc_assert (redirected != NULL);
6478 flush_pending_stmts (entry);
6479
6480 /* Concerning updating of dominators: We must recount dominators
6481 for entry block and its copy. Anything that is outside of the
6482 region, but was dominated by something inside needs recounting as
6483 well. */
6484 if (update_dominance)
6485 {
6486 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
6487 doms.safe_push (get_bb_original (entry->dest));
6488 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6489 doms.release ();
6490 }
6491
6492 /* Add the other PHI node arguments. */
6493 add_phi_args_after_copy (region_copy, n_region, NULL);
6494
6495 if (free_region_copy)
6496 free (region_copy);
6497
6498 free_original_copy_tables ();
6499 return true;
6500 }
6501
6502 /* Checks if BB is part of the region defined by N_REGION BBS. */
6503 static bool
6504 bb_part_of_region_p (basic_block bb, basic_block* bbs, unsigned n_region)
6505 {
6506 unsigned int n;
6507
6508 for (n = 0; n < n_region; n++)
6509 {
6510 if (bb == bbs[n])
6511 return true;
6512 }
6513 return false;
6514 }
6515
6516 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
6517 are stored to REGION_COPY in the same order in that they appear
6518 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
6519 the region, EXIT an exit from it. The condition guarding EXIT
6520 is moved to ENTRY. Returns true if duplication succeeds, false
6521 otherwise.
6522
6523 For example,
6524
6525 some_code;
6526 if (cond)
6527 A;
6528 else
6529 B;
6530
6531 is transformed to
6532
6533 if (cond)
6534 {
6535 some_code;
6536 A;
6537 }
6538 else
6539 {
6540 some_code;
6541 B;
6542 }
6543 */
6544
6545 bool
6546 gimple_duplicate_sese_tail (edge entry, edge exit,
6547 basic_block *region, unsigned n_region,
6548 basic_block *region_copy)
6549 {
6550 unsigned i;
6551 bool free_region_copy = false;
6552 struct loop *loop = exit->dest->loop_father;
6553 struct loop *orig_loop = entry->dest->loop_father;
6554 basic_block switch_bb, entry_bb, nentry_bb;
6555 vec<basic_block> doms;
6556 profile_count total_count = profile_count::uninitialized (),
6557 exit_count = profile_count::uninitialized ();
6558 edge exits[2], nexits[2], e;
6559 gimple_stmt_iterator gsi;
6560 gimple *cond_stmt;
6561 edge sorig, snew;
6562 basic_block exit_bb;
6563 gphi_iterator psi;
6564 gphi *phi;
6565 tree def;
6566 struct loop *target, *aloop, *cloop;
6567
6568 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
6569 exits[0] = exit;
6570 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
6571
6572 if (!can_copy_bbs_p (region, n_region))
6573 return false;
6574
6575 initialize_original_copy_tables ();
6576 set_loop_copy (orig_loop, loop);
6577
6578 target= loop;
6579 for (aloop = orig_loop->inner; aloop; aloop = aloop->next)
6580 {
6581 if (bb_part_of_region_p (aloop->header, region, n_region))
6582 {
6583 cloop = duplicate_loop (aloop, target);
6584 duplicate_subloops (aloop, cloop);
6585 }
6586 }
6587
6588 if (!region_copy)
6589 {
6590 region_copy = XNEWVEC (basic_block, n_region);
6591 free_region_copy = true;
6592 }
6593
6594 gcc_assert (!need_ssa_update_p (cfun));
6595
6596 /* Record blocks outside the region that are dominated by something
6597 inside. */
6598 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6599
6600 total_count = exit->src->count;
6601 exit_count = exit->count ();
6602 /* Fix up corner cases, to avoid division by zero or creation of negative
6603 frequencies. */
6604 if (exit_count > total_count)
6605 exit_count = total_count;
6606
6607 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
6608 split_edge_bb_loc (exit), true);
6609 if (total_count.initialized_p () && exit_count.initialized_p ())
6610 {
6611 scale_bbs_frequencies_profile_count (region, n_region,
6612 total_count - exit_count,
6613 total_count);
6614 scale_bbs_frequencies_profile_count (region_copy, n_region, exit_count,
6615 total_count);
6616 }
6617
6618 /* Create the switch block, and put the exit condition to it. */
6619 entry_bb = entry->dest;
6620 nentry_bb = get_bb_copy (entry_bb);
6621 if (!last_stmt (entry->src)
6622 || !stmt_ends_bb_p (last_stmt (entry->src)))
6623 switch_bb = entry->src;
6624 else
6625 switch_bb = split_edge (entry);
6626 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
6627
6628 gsi = gsi_last_bb (switch_bb);
6629 cond_stmt = last_stmt (exit->src);
6630 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
6631 cond_stmt = gimple_copy (cond_stmt);
6632
6633 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
6634
6635 sorig = single_succ_edge (switch_bb);
6636 sorig->flags = exits[1]->flags;
6637 sorig->probability = exits[1]->probability;
6638 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
6639 snew->probability = exits[0]->probability;
6640
6641
6642 /* Register the new edge from SWITCH_BB in loop exit lists. */
6643 rescan_loop_exit (snew, true, false);
6644
6645 /* Add the PHI node arguments. */
6646 add_phi_args_after_copy (region_copy, n_region, snew);
6647
6648 /* Get rid of now superfluous conditions and associated edges (and phi node
6649 arguments). */
6650 exit_bb = exit->dest;
6651
6652 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
6653 PENDING_STMT (e) = NULL;
6654
6655 /* The latch of ORIG_LOOP was copied, and so was the backedge
6656 to the original header. We redirect this backedge to EXIT_BB. */
6657 for (i = 0; i < n_region; i++)
6658 if (get_bb_original (region_copy[i]) == orig_loop->latch)
6659 {
6660 gcc_assert (single_succ_edge (region_copy[i]));
6661 e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
6662 PENDING_STMT (e) = NULL;
6663 for (psi = gsi_start_phis (exit_bb);
6664 !gsi_end_p (psi);
6665 gsi_next (&psi))
6666 {
6667 phi = psi.phi ();
6668 def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
6669 add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
6670 }
6671 }
6672 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
6673 PENDING_STMT (e) = NULL;
6674
6675 /* Anything that is outside of the region, but was dominated by something
6676 inside needs to update dominance info. */
6677 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6678 doms.release ();
6679 /* Update the SSA web. */
6680 update_ssa (TODO_update_ssa);
6681
6682 if (free_region_copy)
6683 free (region_copy);
6684
6685 free_original_copy_tables ();
6686 return true;
6687 }
6688
6689 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
6690 adding blocks when the dominator traversal reaches EXIT. This
6691 function silently assumes that ENTRY strictly dominates EXIT. */
6692
6693 void
6694 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
6695 vec<basic_block> *bbs_p)
6696 {
6697 basic_block son;
6698
6699 for (son = first_dom_son (CDI_DOMINATORS, entry);
6700 son;
6701 son = next_dom_son (CDI_DOMINATORS, son))
6702 {
6703 bbs_p->safe_push (son);
6704 if (son != exit)
6705 gather_blocks_in_sese_region (son, exit, bbs_p);
6706 }
6707 }
6708
6709 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
6710 The duplicates are recorded in VARS_MAP. */
6711
6712 static void
6713 replace_by_duplicate_decl (tree *tp, hash_map<tree, tree> *vars_map,
6714 tree to_context)
6715 {
6716 tree t = *tp, new_t;
6717 struct function *f = DECL_STRUCT_FUNCTION (to_context);
6718
6719 if (DECL_CONTEXT (t) == to_context)
6720 return;
6721
6722 bool existed;
6723 tree &loc = vars_map->get_or_insert (t, &existed);
6724
6725 if (!existed)
6726 {
6727 if (SSA_VAR_P (t))
6728 {
6729 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
6730 add_local_decl (f, new_t);
6731 }
6732 else
6733 {
6734 gcc_assert (TREE_CODE (t) == CONST_DECL);
6735 new_t = copy_node (t);
6736 }
6737 DECL_CONTEXT (new_t) = to_context;
6738
6739 loc = new_t;
6740 }
6741 else
6742 new_t = loc;
6743
6744 *tp = new_t;
6745 }
6746
6747
6748 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6749 VARS_MAP maps old ssa names and var_decls to the new ones. */
6750
6751 static tree
6752 replace_ssa_name (tree name, hash_map<tree, tree> *vars_map,
6753 tree to_context)
6754 {
6755 tree new_name;
6756
6757 gcc_assert (!virtual_operand_p (name));
6758
6759 tree *loc = vars_map->get (name);
6760
6761 if (!loc)
6762 {
6763 tree decl = SSA_NAME_VAR (name);
6764 if (decl)
6765 {
6766 gcc_assert (!SSA_NAME_IS_DEFAULT_DEF (name));
6767 replace_by_duplicate_decl (&decl, vars_map, to_context);
6768 new_name = make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6769 decl, SSA_NAME_DEF_STMT (name));
6770 }
6771 else
6772 new_name = copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6773 name, SSA_NAME_DEF_STMT (name));
6774
6775 /* Now that we've used the def stmt to define new_name, make sure it
6776 doesn't define name anymore. */
6777 SSA_NAME_DEF_STMT (name) = NULL;
6778
6779 vars_map->put (name, new_name);
6780 }
6781 else
6782 new_name = *loc;
6783
6784 return new_name;
6785 }
6786
6787 struct move_stmt_d
6788 {
6789 tree orig_block;
6790 tree new_block;
6791 tree from_context;
6792 tree to_context;
6793 hash_map<tree, tree> *vars_map;
6794 htab_t new_label_map;
6795 hash_map<void *, void *> *eh_map;
6796 bool remap_decls_p;
6797 };
6798
6799 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
6800 contained in *TP if it has been ORIG_BLOCK previously and change the
6801 DECL_CONTEXT of every local variable referenced in *TP. */
6802
6803 static tree
6804 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
6805 {
6806 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
6807 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6808 tree t = *tp;
6809
6810 if (EXPR_P (t))
6811 {
6812 tree block = TREE_BLOCK (t);
6813 if (block == NULL_TREE)
6814 ;
6815 else if (block == p->orig_block
6816 || p->orig_block == NULL_TREE)
6817 {
6818 /* tree_node_can_be_shared says we can share invariant
6819 addresses but unshare_expr copies them anyways. Make sure
6820 to unshare before adjusting the block in place - we do not
6821 always see a copy here. */
6822 if (TREE_CODE (t) == ADDR_EXPR
6823 && is_gimple_min_invariant (t))
6824 *tp = t = unshare_expr (t);
6825 TREE_SET_BLOCK (t, p->new_block);
6826 }
6827 else if (flag_checking)
6828 {
6829 while (block && TREE_CODE (block) == BLOCK && block != p->orig_block)
6830 block = BLOCK_SUPERCONTEXT (block);
6831 gcc_assert (block == p->orig_block);
6832 }
6833 }
6834 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
6835 {
6836 if (TREE_CODE (t) == SSA_NAME)
6837 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
6838 else if (TREE_CODE (t) == PARM_DECL
6839 && gimple_in_ssa_p (cfun))
6840 *tp = *(p->vars_map->get (t));
6841 else if (TREE_CODE (t) == LABEL_DECL)
6842 {
6843 if (p->new_label_map)
6844 {
6845 struct tree_map in, *out;
6846 in.base.from = t;
6847 out = (struct tree_map *)
6848 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
6849 if (out)
6850 *tp = t = out->to;
6851 }
6852
6853 /* For FORCED_LABELs we can end up with references from other
6854 functions if some SESE regions are outlined. It is UB to
6855 jump in between them, but they could be used just for printing
6856 addresses etc. In that case, DECL_CONTEXT on the label should
6857 be the function containing the glabel stmt with that LABEL_DECL,
6858 rather than whatever function a reference to the label was seen
6859 last time. */
6860 if (!FORCED_LABEL (t) && !DECL_NONLOCAL (t))
6861 DECL_CONTEXT (t) = p->to_context;
6862 }
6863 else if (p->remap_decls_p)
6864 {
6865 /* Replace T with its duplicate. T should no longer appear in the
6866 parent function, so this looks wasteful; however, it may appear
6867 in referenced_vars, and more importantly, as virtual operands of
6868 statements, and in alias lists of other variables. It would be
6869 quite difficult to expunge it from all those places. ??? It might
6870 suffice to do this for addressable variables. */
6871 if ((VAR_P (t) && !is_global_var (t))
6872 || TREE_CODE (t) == CONST_DECL)
6873 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
6874 }
6875 *walk_subtrees = 0;
6876 }
6877 else if (TYPE_P (t))
6878 *walk_subtrees = 0;
6879
6880 return NULL_TREE;
6881 }
6882
6883 /* Helper for move_stmt_r. Given an EH region number for the source
6884 function, map that to the duplicate EH regio number in the dest. */
6885
6886 static int
6887 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
6888 {
6889 eh_region old_r, new_r;
6890
6891 old_r = get_eh_region_from_number (old_nr);
6892 new_r = static_cast<eh_region> (*p->eh_map->get (old_r));
6893
6894 return new_r->index;
6895 }
6896
6897 /* Similar, but operate on INTEGER_CSTs. */
6898
6899 static tree
6900 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
6901 {
6902 int old_nr, new_nr;
6903
6904 old_nr = tree_to_shwi (old_t_nr);
6905 new_nr = move_stmt_eh_region_nr (old_nr, p);
6906
6907 return build_int_cst (integer_type_node, new_nr);
6908 }
6909
6910 /* Like move_stmt_op, but for gimple statements.
6911
6912 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
6913 contained in the current statement in *GSI_P and change the
6914 DECL_CONTEXT of every local variable referenced in the current
6915 statement. */
6916
6917 static tree
6918 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
6919 struct walk_stmt_info *wi)
6920 {
6921 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6922 gimple *stmt = gsi_stmt (*gsi_p);
6923 tree block = gimple_block (stmt);
6924
6925 if (block == p->orig_block
6926 || (p->orig_block == NULL_TREE
6927 && block != NULL_TREE))
6928 gimple_set_block (stmt, p->new_block);
6929
6930 switch (gimple_code (stmt))
6931 {
6932 case GIMPLE_CALL:
6933 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
6934 {
6935 tree r, fndecl = gimple_call_fndecl (stmt);
6936 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
6937 switch (DECL_FUNCTION_CODE (fndecl))
6938 {
6939 case BUILT_IN_EH_COPY_VALUES:
6940 r = gimple_call_arg (stmt, 1);
6941 r = move_stmt_eh_region_tree_nr (r, p);
6942 gimple_call_set_arg (stmt, 1, r);
6943 /* FALLTHRU */
6944
6945 case BUILT_IN_EH_POINTER:
6946 case BUILT_IN_EH_FILTER:
6947 r = gimple_call_arg (stmt, 0);
6948 r = move_stmt_eh_region_tree_nr (r, p);
6949 gimple_call_set_arg (stmt, 0, r);
6950 break;
6951
6952 default:
6953 break;
6954 }
6955 }
6956 break;
6957
6958 case GIMPLE_RESX:
6959 {
6960 gresx *resx_stmt = as_a <gresx *> (stmt);
6961 int r = gimple_resx_region (resx_stmt);
6962 r = move_stmt_eh_region_nr (r, p);
6963 gimple_resx_set_region (resx_stmt, r);
6964 }
6965 break;
6966
6967 case GIMPLE_EH_DISPATCH:
6968 {
6969 geh_dispatch *eh_dispatch_stmt = as_a <geh_dispatch *> (stmt);
6970 int r = gimple_eh_dispatch_region (eh_dispatch_stmt);
6971 r = move_stmt_eh_region_nr (r, p);
6972 gimple_eh_dispatch_set_region (eh_dispatch_stmt, r);
6973 }
6974 break;
6975
6976 case GIMPLE_OMP_RETURN:
6977 case GIMPLE_OMP_CONTINUE:
6978 break;
6979
6980 case GIMPLE_LABEL:
6981 {
6982 /* For FORCED_LABEL, move_stmt_op doesn't adjust DECL_CONTEXT,
6983 so that such labels can be referenced from other regions.
6984 Make sure to update it when seeing a GIMPLE_LABEL though,
6985 that is the owner of the label. */
6986 walk_gimple_op (stmt, move_stmt_op, wi);
6987 *handled_ops_p = true;
6988 tree label = gimple_label_label (as_a <glabel *> (stmt));
6989 if (FORCED_LABEL (label) || DECL_NONLOCAL (label))
6990 DECL_CONTEXT (label) = p->to_context;
6991 }
6992 break;
6993
6994 default:
6995 if (is_gimple_omp (stmt))
6996 {
6997 /* Do not remap variables inside OMP directives. Variables
6998 referenced in clauses and directive header belong to the
6999 parent function and should not be moved into the child
7000 function. */
7001 bool save_remap_decls_p = p->remap_decls_p;
7002 p->remap_decls_p = false;
7003 *handled_ops_p = true;
7004
7005 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), move_stmt_r,
7006 move_stmt_op, wi);
7007
7008 p->remap_decls_p = save_remap_decls_p;
7009 }
7010 break;
7011 }
7012
7013 return NULL_TREE;
7014 }
7015
7016 /* Move basic block BB from function CFUN to function DEST_FN. The
7017 block is moved out of the original linked list and placed after
7018 block AFTER in the new list. Also, the block is removed from the
7019 original array of blocks and placed in DEST_FN's array of blocks.
7020 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
7021 updated to reflect the moved edges.
7022
7023 The local variables are remapped to new instances, VARS_MAP is used
7024 to record the mapping. */
7025
7026 static void
7027 move_block_to_fn (struct function *dest_cfun, basic_block bb,
7028 basic_block after, bool update_edge_count_p,
7029 struct move_stmt_d *d)
7030 {
7031 struct control_flow_graph *cfg;
7032 edge_iterator ei;
7033 edge e;
7034 gimple_stmt_iterator si;
7035 unsigned old_len, new_len;
7036
7037 /* Remove BB from dominance structures. */
7038 delete_from_dominance_info (CDI_DOMINATORS, bb);
7039
7040 /* Move BB from its current loop to the copy in the new function. */
7041 if (current_loops)
7042 {
7043 struct loop *new_loop = (struct loop *)bb->loop_father->aux;
7044 if (new_loop)
7045 bb->loop_father = new_loop;
7046 }
7047
7048 /* Link BB to the new linked list. */
7049 move_block_after (bb, after);
7050
7051 /* Update the edge count in the corresponding flowgraphs. */
7052 if (update_edge_count_p)
7053 FOR_EACH_EDGE (e, ei, bb->succs)
7054 {
7055 cfun->cfg->x_n_edges--;
7056 dest_cfun->cfg->x_n_edges++;
7057 }
7058
7059 /* Remove BB from the original basic block array. */
7060 (*cfun->cfg->x_basic_block_info)[bb->index] = NULL;
7061 cfun->cfg->x_n_basic_blocks--;
7062
7063 /* Grow DEST_CFUN's basic block array if needed. */
7064 cfg = dest_cfun->cfg;
7065 cfg->x_n_basic_blocks++;
7066 if (bb->index >= cfg->x_last_basic_block)
7067 cfg->x_last_basic_block = bb->index + 1;
7068
7069 old_len = vec_safe_length (cfg->x_basic_block_info);
7070 if ((unsigned) cfg->x_last_basic_block >= old_len)
7071 {
7072 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
7073 vec_safe_grow_cleared (cfg->x_basic_block_info, new_len);
7074 }
7075
7076 (*cfg->x_basic_block_info)[bb->index] = bb;
7077
7078 /* Remap the variables in phi nodes. */
7079 for (gphi_iterator psi = gsi_start_phis (bb);
7080 !gsi_end_p (psi); )
7081 {
7082 gphi *phi = psi.phi ();
7083 use_operand_p use;
7084 tree op = PHI_RESULT (phi);
7085 ssa_op_iter oi;
7086 unsigned i;
7087
7088 if (virtual_operand_p (op))
7089 {
7090 /* Remove the phi nodes for virtual operands (alias analysis will be
7091 run for the new function, anyway). */
7092 remove_phi_node (&psi, true);
7093 continue;
7094 }
7095
7096 SET_PHI_RESULT (phi,
7097 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
7098 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
7099 {
7100 op = USE_FROM_PTR (use);
7101 if (TREE_CODE (op) == SSA_NAME)
7102 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
7103 }
7104
7105 for (i = 0; i < EDGE_COUNT (bb->preds); i++)
7106 {
7107 location_t locus = gimple_phi_arg_location (phi, i);
7108 tree block = LOCATION_BLOCK (locus);
7109
7110 if (locus == UNKNOWN_LOCATION)
7111 continue;
7112 if (d->orig_block == NULL_TREE || block == d->orig_block)
7113 {
7114 locus = set_block (locus, d->new_block);
7115 gimple_phi_arg_set_location (phi, i, locus);
7116 }
7117 }
7118
7119 gsi_next (&psi);
7120 }
7121
7122 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
7123 {
7124 gimple *stmt = gsi_stmt (si);
7125 struct walk_stmt_info wi;
7126
7127 memset (&wi, 0, sizeof (wi));
7128 wi.info = d;
7129 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
7130
7131 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
7132 {
7133 tree label = gimple_label_label (label_stmt);
7134 int uid = LABEL_DECL_UID (label);
7135
7136 gcc_assert (uid > -1);
7137
7138 old_len = vec_safe_length (cfg->x_label_to_block_map);
7139 if (old_len <= (unsigned) uid)
7140 {
7141 new_len = 3 * uid / 2 + 1;
7142 vec_safe_grow_cleared (cfg->x_label_to_block_map, new_len);
7143 }
7144
7145 (*cfg->x_label_to_block_map)[uid] = bb;
7146 (*cfun->cfg->x_label_to_block_map)[uid] = NULL;
7147
7148 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
7149
7150 if (uid >= dest_cfun->cfg->last_label_uid)
7151 dest_cfun->cfg->last_label_uid = uid + 1;
7152 }
7153
7154 maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
7155 remove_stmt_from_eh_lp_fn (cfun, stmt);
7156
7157 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
7158 gimple_remove_stmt_histograms (cfun, stmt);
7159
7160 /* We cannot leave any operands allocated from the operand caches of
7161 the current function. */
7162 free_stmt_operands (cfun, stmt);
7163 push_cfun (dest_cfun);
7164 update_stmt (stmt);
7165 pop_cfun ();
7166 }
7167
7168 FOR_EACH_EDGE (e, ei, bb->succs)
7169 if (e->goto_locus != UNKNOWN_LOCATION)
7170 {
7171 tree block = LOCATION_BLOCK (e->goto_locus);
7172 if (d->orig_block == NULL_TREE
7173 || block == d->orig_block)
7174 e->goto_locus = set_block (e->goto_locus, d->new_block);
7175 }
7176 }
7177
7178 /* Examine the statements in BB (which is in SRC_CFUN); find and return
7179 the outermost EH region. Use REGION as the incoming base EH region.
7180 If there is no single outermost region, return NULL and set *ALL to
7181 true. */
7182
7183 static eh_region
7184 find_outermost_region_in_block (struct function *src_cfun,
7185 basic_block bb, eh_region region,
7186 bool *all)
7187 {
7188 gimple_stmt_iterator si;
7189
7190 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
7191 {
7192 gimple *stmt = gsi_stmt (si);
7193 eh_region stmt_region;
7194 int lp_nr;
7195
7196 lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
7197 stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
7198 if (stmt_region)
7199 {
7200 if (region == NULL)
7201 region = stmt_region;
7202 else if (stmt_region != region)
7203 {
7204 region = eh_region_outermost (src_cfun, stmt_region, region);
7205 if (region == NULL)
7206 {
7207 *all = true;
7208 return NULL;
7209 }
7210 }
7211 }
7212 }
7213
7214 return region;
7215 }
7216
7217 static tree
7218 new_label_mapper (tree decl, void *data)
7219 {
7220 htab_t hash = (htab_t) data;
7221 struct tree_map *m;
7222 void **slot;
7223
7224 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
7225
7226 m = XNEW (struct tree_map);
7227 m->hash = DECL_UID (decl);
7228 m->base.from = decl;
7229 m->to = create_artificial_label (UNKNOWN_LOCATION);
7230 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
7231 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
7232 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
7233
7234 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
7235 gcc_assert (*slot == NULL);
7236
7237 *slot = m;
7238
7239 return m->to;
7240 }
7241
7242 /* Tree walker to replace the decls used inside value expressions by
7243 duplicates. */
7244
7245 static tree
7246 replace_block_vars_by_duplicates_1 (tree *tp, int *walk_subtrees, void *data)
7247 {
7248 struct replace_decls_d *rd = (struct replace_decls_d *)data;
7249
7250 switch (TREE_CODE (*tp))
7251 {
7252 case VAR_DECL:
7253 case PARM_DECL:
7254 case RESULT_DECL:
7255 replace_by_duplicate_decl (tp, rd->vars_map, rd->to_context);
7256 break;
7257 default:
7258 break;
7259 }
7260
7261 if (IS_TYPE_OR_DECL_P (*tp))
7262 *walk_subtrees = false;
7263
7264 return NULL;
7265 }
7266
7267 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
7268 subblocks. */
7269
7270 static void
7271 replace_block_vars_by_duplicates (tree block, hash_map<tree, tree> *vars_map,
7272 tree to_context)
7273 {
7274 tree *tp, t;
7275
7276 for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
7277 {
7278 t = *tp;
7279 if (!VAR_P (t) && TREE_CODE (t) != CONST_DECL)
7280 continue;
7281 replace_by_duplicate_decl (&t, vars_map, to_context);
7282 if (t != *tp)
7283 {
7284 if (VAR_P (*tp) && DECL_HAS_VALUE_EXPR_P (*tp))
7285 {
7286 tree x = DECL_VALUE_EXPR (*tp);
7287 struct replace_decls_d rd = { vars_map, to_context };
7288 unshare_expr (x);
7289 walk_tree (&x, replace_block_vars_by_duplicates_1, &rd, NULL);
7290 SET_DECL_VALUE_EXPR (t, x);
7291 DECL_HAS_VALUE_EXPR_P (t) = 1;
7292 }
7293 DECL_CHAIN (t) = DECL_CHAIN (*tp);
7294 *tp = t;
7295 }
7296 }
7297
7298 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
7299 replace_block_vars_by_duplicates (block, vars_map, to_context);
7300 }
7301
7302 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
7303 from FN1 to FN2. */
7304
7305 static void
7306 fixup_loop_arrays_after_move (struct function *fn1, struct function *fn2,
7307 struct loop *loop)
7308 {
7309 /* Discard it from the old loop array. */
7310 (*get_loops (fn1))[loop->num] = NULL;
7311
7312 /* Place it in the new loop array, assigning it a new number. */
7313 loop->num = number_of_loops (fn2);
7314 vec_safe_push (loops_for_fn (fn2)->larray, loop);
7315
7316 /* Recurse to children. */
7317 for (loop = loop->inner; loop; loop = loop->next)
7318 fixup_loop_arrays_after_move (fn1, fn2, loop);
7319 }
7320
7321 /* Verify that the blocks in BBS_P are a single-entry, single-exit region
7322 delimited by ENTRY_BB and EXIT_BB, possibly containing noreturn blocks. */
7323
7324 DEBUG_FUNCTION void
7325 verify_sese (basic_block entry, basic_block exit, vec<basic_block> *bbs_p)
7326 {
7327 basic_block bb;
7328 edge_iterator ei;
7329 edge e;
7330 bitmap bbs = BITMAP_ALLOC (NULL);
7331 int i;
7332
7333 gcc_assert (entry != NULL);
7334 gcc_assert (entry != exit);
7335 gcc_assert (bbs_p != NULL);
7336
7337 gcc_assert (bbs_p->length () > 0);
7338
7339 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
7340 bitmap_set_bit (bbs, bb->index);
7341
7342 gcc_assert (bitmap_bit_p (bbs, entry->index));
7343 gcc_assert (exit == NULL || bitmap_bit_p (bbs, exit->index));
7344
7345 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
7346 {
7347 if (bb == entry)
7348 {
7349 gcc_assert (single_pred_p (entry));
7350 gcc_assert (!bitmap_bit_p (bbs, single_pred (entry)->index));
7351 }
7352 else
7353 for (ei = ei_start (bb->preds); !ei_end_p (ei); ei_next (&ei))
7354 {
7355 e = ei_edge (ei);
7356 gcc_assert (bitmap_bit_p (bbs, e->src->index));
7357 }
7358
7359 if (bb == exit)
7360 {
7361 gcc_assert (single_succ_p (exit));
7362 gcc_assert (!bitmap_bit_p (bbs, single_succ (exit)->index));
7363 }
7364 else
7365 for (ei = ei_start (bb->succs); !ei_end_p (ei); ei_next (&ei))
7366 {
7367 e = ei_edge (ei);
7368 gcc_assert (bitmap_bit_p (bbs, e->dest->index));
7369 }
7370 }
7371
7372 BITMAP_FREE (bbs);
7373 }
7374
7375 /* If FROM is an SSA_NAME, mark the version in bitmap DATA. */
7376
7377 bool
7378 gather_ssa_name_hash_map_from (tree const &from, tree const &, void *data)
7379 {
7380 bitmap release_names = (bitmap)data;
7381
7382 if (TREE_CODE (from) != SSA_NAME)
7383 return true;
7384
7385 bitmap_set_bit (release_names, SSA_NAME_VERSION (from));
7386 return true;
7387 }
7388
7389 /* Return LOOP_DIST_ALIAS call if present in BB. */
7390
7391 static gimple *
7392 find_loop_dist_alias (basic_block bb)
7393 {
7394 gimple *g = last_stmt (bb);
7395 if (g == NULL || gimple_code (g) != GIMPLE_COND)
7396 return NULL;
7397
7398 gimple_stmt_iterator gsi = gsi_for_stmt (g);
7399 gsi_prev (&gsi);
7400 if (gsi_end_p (gsi))
7401 return NULL;
7402
7403 g = gsi_stmt (gsi);
7404 if (gimple_call_internal_p (g, IFN_LOOP_DIST_ALIAS))
7405 return g;
7406 return NULL;
7407 }
7408
7409 /* Fold loop internal call G like IFN_LOOP_VECTORIZED/IFN_LOOP_DIST_ALIAS
7410 to VALUE and update any immediate uses of it's LHS. */
7411
7412 void
7413 fold_loop_internal_call (gimple *g, tree value)
7414 {
7415 tree lhs = gimple_call_lhs (g);
7416 use_operand_p use_p;
7417 imm_use_iterator iter;
7418 gimple *use_stmt;
7419 gimple_stmt_iterator gsi = gsi_for_stmt (g);
7420
7421 update_call_from_tree (&gsi, value);
7422 FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
7423 {
7424 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
7425 SET_USE (use_p, value);
7426 update_stmt (use_stmt);
7427 }
7428 }
7429
7430 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
7431 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
7432 single basic block in the original CFG and the new basic block is
7433 returned. DEST_CFUN must not have a CFG yet.
7434
7435 Note that the region need not be a pure SESE region. Blocks inside
7436 the region may contain calls to abort/exit. The only restriction
7437 is that ENTRY_BB should be the only entry point and it must
7438 dominate EXIT_BB.
7439
7440 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
7441 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
7442 to the new function.
7443
7444 All local variables referenced in the region are assumed to be in
7445 the corresponding BLOCK_VARS and unexpanded variable lists
7446 associated with DEST_CFUN.
7447
7448 TODO: investigate whether we can reuse gimple_duplicate_sese_region to
7449 reimplement move_sese_region_to_fn by duplicating the region rather than
7450 moving it. */
7451
7452 basic_block
7453 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
7454 basic_block exit_bb, tree orig_block)
7455 {
7456 vec<basic_block> bbs, dom_bbs;
7457 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
7458 basic_block after, bb, *entry_pred, *exit_succ, abb;
7459 struct function *saved_cfun = cfun;
7460 int *entry_flag, *exit_flag;
7461 profile_probability *entry_prob, *exit_prob;
7462 unsigned i, num_entry_edges, num_exit_edges, num_nodes;
7463 edge e;
7464 edge_iterator ei;
7465 htab_t new_label_map;
7466 hash_map<void *, void *> *eh_map;
7467 struct loop *loop = entry_bb->loop_father;
7468 struct loop *loop0 = get_loop (saved_cfun, 0);
7469 struct move_stmt_d d;
7470
7471 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
7472 region. */
7473 gcc_assert (entry_bb != exit_bb
7474 && (!exit_bb
7475 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
7476
7477 /* Collect all the blocks in the region. Manually add ENTRY_BB
7478 because it won't be added by dfs_enumerate_from. */
7479 bbs.create (0);
7480 bbs.safe_push (entry_bb);
7481 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
7482
7483 if (flag_checking)
7484 verify_sese (entry_bb, exit_bb, &bbs);
7485
7486 /* The blocks that used to be dominated by something in BBS will now be
7487 dominated by the new block. */
7488 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
7489 bbs.address (),
7490 bbs.length ());
7491
7492 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
7493 the predecessor edges to ENTRY_BB and the successor edges to
7494 EXIT_BB so that we can re-attach them to the new basic block that
7495 will replace the region. */
7496 num_entry_edges = EDGE_COUNT (entry_bb->preds);
7497 entry_pred = XNEWVEC (basic_block, num_entry_edges);
7498 entry_flag = XNEWVEC (int, num_entry_edges);
7499 entry_prob = XNEWVEC (profile_probability, num_entry_edges);
7500 i = 0;
7501 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
7502 {
7503 entry_prob[i] = e->probability;
7504 entry_flag[i] = e->flags;
7505 entry_pred[i++] = e->src;
7506 remove_edge (e);
7507 }
7508
7509 if (exit_bb)
7510 {
7511 num_exit_edges = EDGE_COUNT (exit_bb->succs);
7512 exit_succ = XNEWVEC (basic_block, num_exit_edges);
7513 exit_flag = XNEWVEC (int, num_exit_edges);
7514 exit_prob = XNEWVEC (profile_probability, num_exit_edges);
7515 i = 0;
7516 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
7517 {
7518 exit_prob[i] = e->probability;
7519 exit_flag[i] = e->flags;
7520 exit_succ[i++] = e->dest;
7521 remove_edge (e);
7522 }
7523 }
7524 else
7525 {
7526 num_exit_edges = 0;
7527 exit_succ = NULL;
7528 exit_flag = NULL;
7529 exit_prob = NULL;
7530 }
7531
7532 /* Switch context to the child function to initialize DEST_FN's CFG. */
7533 gcc_assert (dest_cfun->cfg == NULL);
7534 push_cfun (dest_cfun);
7535
7536 init_empty_tree_cfg ();
7537
7538 /* Initialize EH information for the new function. */
7539 eh_map = NULL;
7540 new_label_map = NULL;
7541 if (saved_cfun->eh)
7542 {
7543 eh_region region = NULL;
7544 bool all = false;
7545
7546 FOR_EACH_VEC_ELT (bbs, i, bb)
7547 {
7548 region = find_outermost_region_in_block (saved_cfun, bb, region, &all);
7549 if (all)
7550 break;
7551 }
7552
7553 init_eh_for_function ();
7554 if (region != NULL || all)
7555 {
7556 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
7557 eh_map = duplicate_eh_regions (saved_cfun, region, 0,
7558 new_label_mapper, new_label_map);
7559 }
7560 }
7561
7562 /* Initialize an empty loop tree. */
7563 struct loops *loops = ggc_cleared_alloc<struct loops> ();
7564 init_loops_structure (dest_cfun, loops, 1);
7565 loops->state = LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
7566 set_loops_for_fn (dest_cfun, loops);
7567
7568 vec<loop_p, va_gc> *larray = get_loops (saved_cfun)->copy ();
7569
7570 /* Move the outlined loop tree part. */
7571 num_nodes = bbs.length ();
7572 FOR_EACH_VEC_ELT (bbs, i, bb)
7573 {
7574 if (bb->loop_father->header == bb)
7575 {
7576 struct loop *this_loop = bb->loop_father;
7577 struct loop *outer = loop_outer (this_loop);
7578 if (outer == loop
7579 /* If the SESE region contains some bbs ending with
7580 a noreturn call, those are considered to belong
7581 to the outermost loop in saved_cfun, rather than
7582 the entry_bb's loop_father. */
7583 || outer == loop0)
7584 {
7585 if (outer != loop)
7586 num_nodes -= this_loop->num_nodes;
7587 flow_loop_tree_node_remove (bb->loop_father);
7588 flow_loop_tree_node_add (get_loop (dest_cfun, 0), this_loop);
7589 fixup_loop_arrays_after_move (saved_cfun, cfun, this_loop);
7590 }
7591 }
7592 else if (bb->loop_father == loop0 && loop0 != loop)
7593 num_nodes--;
7594
7595 /* Remove loop exits from the outlined region. */
7596 if (loops_for_fn (saved_cfun)->exits)
7597 FOR_EACH_EDGE (e, ei, bb->succs)
7598 {
7599 struct loops *l = loops_for_fn (saved_cfun);
7600 loop_exit **slot
7601 = l->exits->find_slot_with_hash (e, htab_hash_pointer (e),
7602 NO_INSERT);
7603 if (slot)
7604 l->exits->clear_slot (slot);
7605 }
7606 }
7607
7608 /* Adjust the number of blocks in the tree root of the outlined part. */
7609 get_loop (dest_cfun, 0)->num_nodes = bbs.length () + 2;
7610
7611 /* Setup a mapping to be used by move_block_to_fn. */
7612 loop->aux = current_loops->tree_root;
7613 loop0->aux = current_loops->tree_root;
7614
7615 /* Fix up orig_loop_num. If the block referenced in it has been moved
7616 to dest_cfun, update orig_loop_num field, otherwise clear it. */
7617 struct loop *dloop;
7618 signed char *moved_orig_loop_num = NULL;
7619 FOR_EACH_LOOP_FN (dest_cfun, dloop, 0)
7620 if (dloop->orig_loop_num)
7621 {
7622 if (moved_orig_loop_num == NULL)
7623 moved_orig_loop_num
7624 = XCNEWVEC (signed char, vec_safe_length (larray));
7625 if ((*larray)[dloop->orig_loop_num] != NULL
7626 && get_loop (saved_cfun, dloop->orig_loop_num) == NULL)
7627 {
7628 if (moved_orig_loop_num[dloop->orig_loop_num] >= 0
7629 && moved_orig_loop_num[dloop->orig_loop_num] < 2)
7630 moved_orig_loop_num[dloop->orig_loop_num]++;
7631 dloop->orig_loop_num = (*larray)[dloop->orig_loop_num]->num;
7632 }
7633 else
7634 {
7635 moved_orig_loop_num[dloop->orig_loop_num] = -1;
7636 dloop->orig_loop_num = 0;
7637 }
7638 }
7639 pop_cfun ();
7640
7641 if (moved_orig_loop_num)
7642 {
7643 FOR_EACH_VEC_ELT (bbs, i, bb)
7644 {
7645 gimple *g = find_loop_dist_alias (bb);
7646 if (g == NULL)
7647 continue;
7648
7649 int orig_loop_num = tree_to_shwi (gimple_call_arg (g, 0));
7650 gcc_assert (orig_loop_num
7651 && (unsigned) orig_loop_num < vec_safe_length (larray));
7652 if (moved_orig_loop_num[orig_loop_num] == 2)
7653 {
7654 /* If we have moved both loops with this orig_loop_num into
7655 dest_cfun and the LOOP_DIST_ALIAS call is being moved there
7656 too, update the first argument. */
7657 gcc_assert ((*larray)[dloop->orig_loop_num] != NULL
7658 && (get_loop (saved_cfun, dloop->orig_loop_num)
7659 == NULL));
7660 tree t = build_int_cst (integer_type_node,
7661 (*larray)[dloop->orig_loop_num]->num);
7662 gimple_call_set_arg (g, 0, t);
7663 update_stmt (g);
7664 /* Make sure the following loop will not update it. */
7665 moved_orig_loop_num[orig_loop_num] = 0;
7666 }
7667 else
7668 /* Otherwise at least one of the loops stayed in saved_cfun.
7669 Remove the LOOP_DIST_ALIAS call. */
7670 fold_loop_internal_call (g, gimple_call_arg (g, 1));
7671 }
7672 FOR_EACH_BB_FN (bb, saved_cfun)
7673 {
7674 gimple *g = find_loop_dist_alias (bb);
7675 if (g == NULL)
7676 continue;
7677 int orig_loop_num = tree_to_shwi (gimple_call_arg (g, 0));
7678 gcc_assert (orig_loop_num
7679 && (unsigned) orig_loop_num < vec_safe_length (larray));
7680 if (moved_orig_loop_num[orig_loop_num])
7681 /* LOOP_DIST_ALIAS call remained in saved_cfun, if at least one
7682 of the corresponding loops was moved, remove it. */
7683 fold_loop_internal_call (g, gimple_call_arg (g, 1));
7684 }
7685 XDELETEVEC (moved_orig_loop_num);
7686 }
7687 ggc_free (larray);
7688
7689 /* Move blocks from BBS into DEST_CFUN. */
7690 gcc_assert (bbs.length () >= 2);
7691 after = dest_cfun->cfg->x_entry_block_ptr;
7692 hash_map<tree, tree> vars_map;
7693
7694 memset (&d, 0, sizeof (d));
7695 d.orig_block = orig_block;
7696 d.new_block = DECL_INITIAL (dest_cfun->decl);
7697 d.from_context = cfun->decl;
7698 d.to_context = dest_cfun->decl;
7699 d.vars_map = &vars_map;
7700 d.new_label_map = new_label_map;
7701 d.eh_map = eh_map;
7702 d.remap_decls_p = true;
7703
7704 if (gimple_in_ssa_p (cfun))
7705 for (tree arg = DECL_ARGUMENTS (d.to_context); arg; arg = DECL_CHAIN (arg))
7706 {
7707 tree narg = make_ssa_name_fn (dest_cfun, arg, gimple_build_nop ());
7708 set_ssa_default_def (dest_cfun, arg, narg);
7709 vars_map.put (arg, narg);
7710 }
7711
7712 FOR_EACH_VEC_ELT (bbs, i, bb)
7713 {
7714 /* No need to update edge counts on the last block. It has
7715 already been updated earlier when we detached the region from
7716 the original CFG. */
7717 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
7718 after = bb;
7719 }
7720
7721 loop->aux = NULL;
7722 loop0->aux = NULL;
7723 /* Loop sizes are no longer correct, fix them up. */
7724 loop->num_nodes -= num_nodes;
7725 for (struct loop *outer = loop_outer (loop);
7726 outer; outer = loop_outer (outer))
7727 outer->num_nodes -= num_nodes;
7728 loop0->num_nodes -= bbs.length () - num_nodes;
7729
7730 if (saved_cfun->has_simduid_loops || saved_cfun->has_force_vectorize_loops)
7731 {
7732 struct loop *aloop;
7733 for (i = 0; vec_safe_iterate (loops->larray, i, &aloop); i++)
7734 if (aloop != NULL)
7735 {
7736 if (aloop->simduid)
7737 {
7738 replace_by_duplicate_decl (&aloop->simduid, d.vars_map,
7739 d.to_context);
7740 dest_cfun->has_simduid_loops = true;
7741 }
7742 if (aloop->force_vectorize)
7743 dest_cfun->has_force_vectorize_loops = true;
7744 }
7745 }
7746
7747 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
7748 if (orig_block)
7749 {
7750 tree block;
7751 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7752 == NULL_TREE);
7753 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7754 = BLOCK_SUBBLOCKS (orig_block);
7755 for (block = BLOCK_SUBBLOCKS (orig_block);
7756 block; block = BLOCK_CHAIN (block))
7757 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
7758 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
7759 }
7760
7761 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
7762 &vars_map, dest_cfun->decl);
7763
7764 if (new_label_map)
7765 htab_delete (new_label_map);
7766 if (eh_map)
7767 delete eh_map;
7768
7769 if (gimple_in_ssa_p (cfun))
7770 {
7771 /* We need to release ssa-names in a defined order, so first find them,
7772 and then iterate in ascending version order. */
7773 bitmap release_names = BITMAP_ALLOC (NULL);
7774 vars_map.traverse<void *, gather_ssa_name_hash_map_from> (release_names);
7775 bitmap_iterator bi;
7776 unsigned i;
7777 EXECUTE_IF_SET_IN_BITMAP (release_names, 0, i, bi)
7778 release_ssa_name (ssa_name (i));
7779 BITMAP_FREE (release_names);
7780 }
7781
7782 /* Rewire the entry and exit blocks. The successor to the entry
7783 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
7784 the child function. Similarly, the predecessor of DEST_FN's
7785 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
7786 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
7787 various CFG manipulation function get to the right CFG.
7788
7789 FIXME, this is silly. The CFG ought to become a parameter to
7790 these helpers. */
7791 push_cfun (dest_cfun);
7792 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = entry_bb->count;
7793 make_single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), entry_bb, EDGE_FALLTHRU);
7794 if (exit_bb)
7795 {
7796 make_single_succ_edge (exit_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
7797 EXIT_BLOCK_PTR_FOR_FN (cfun)->count = exit_bb->count;
7798 }
7799 else
7800 EXIT_BLOCK_PTR_FOR_FN (cfun)->count = profile_count::zero ();
7801 pop_cfun ();
7802
7803 /* Back in the original function, the SESE region has disappeared,
7804 create a new basic block in its place. */
7805 bb = create_empty_bb (entry_pred[0]);
7806 if (current_loops)
7807 add_bb_to_loop (bb, loop);
7808 for (i = 0; i < num_entry_edges; i++)
7809 {
7810 e = make_edge (entry_pred[i], bb, entry_flag[i]);
7811 e->probability = entry_prob[i];
7812 }
7813
7814 for (i = 0; i < num_exit_edges; i++)
7815 {
7816 e = make_edge (bb, exit_succ[i], exit_flag[i]);
7817 e->probability = exit_prob[i];
7818 }
7819
7820 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
7821 FOR_EACH_VEC_ELT (dom_bbs, i, abb)
7822 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
7823 dom_bbs.release ();
7824
7825 if (exit_bb)
7826 {
7827 free (exit_prob);
7828 free (exit_flag);
7829 free (exit_succ);
7830 }
7831 free (entry_prob);
7832 free (entry_flag);
7833 free (entry_pred);
7834 bbs.release ();
7835
7836 return bb;
7837 }
7838
7839 /* Dump default def DEF to file FILE using FLAGS and indentation
7840 SPC. */
7841
7842 static void
7843 dump_default_def (FILE *file, tree def, int spc, dump_flags_t flags)
7844 {
7845 for (int i = 0; i < spc; ++i)
7846 fprintf (file, " ");
7847 dump_ssaname_info_to_file (file, def, spc);
7848
7849 print_generic_expr (file, TREE_TYPE (def), flags);
7850 fprintf (file, " ");
7851 print_generic_expr (file, def, flags);
7852 fprintf (file, " = ");
7853 print_generic_expr (file, SSA_NAME_VAR (def), flags);
7854 fprintf (file, ";\n");
7855 }
7856
7857 /* Print no_sanitize attribute to FILE for a given attribute VALUE. */
7858
7859 static void
7860 print_no_sanitize_attr_value (FILE *file, tree value)
7861 {
7862 unsigned int flags = tree_to_uhwi (value);
7863 bool first = true;
7864 for (int i = 0; sanitizer_opts[i].name != NULL; ++i)
7865 {
7866 if ((sanitizer_opts[i].flag & flags) == sanitizer_opts[i].flag)
7867 {
7868 if (!first)
7869 fprintf (file, " | ");
7870 fprintf (file, "%s", sanitizer_opts[i].name);
7871 first = false;
7872 }
7873 }
7874 }
7875
7876 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
7877 */
7878
7879 void
7880 dump_function_to_file (tree fndecl, FILE *file, dump_flags_t flags)
7881 {
7882 tree arg, var, old_current_fndecl = current_function_decl;
7883 struct function *dsf;
7884 bool ignore_topmost_bind = false, any_var = false;
7885 basic_block bb;
7886 tree chain;
7887 bool tmclone = (TREE_CODE (fndecl) == FUNCTION_DECL
7888 && decl_is_tm_clone (fndecl));
7889 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
7890
7891 if (DECL_ATTRIBUTES (fndecl) != NULL_TREE)
7892 {
7893 fprintf (file, "__attribute__((");
7894
7895 bool first = true;
7896 tree chain;
7897 for (chain = DECL_ATTRIBUTES (fndecl); chain;
7898 first = false, chain = TREE_CHAIN (chain))
7899 {
7900 if (!first)
7901 fprintf (file, ", ");
7902
7903 tree name = get_attribute_name (chain);
7904 print_generic_expr (file, name, dump_flags);
7905 if (TREE_VALUE (chain) != NULL_TREE)
7906 {
7907 fprintf (file, " (");
7908
7909 if (strstr (IDENTIFIER_POINTER (name), "no_sanitize"))
7910 print_no_sanitize_attr_value (file, TREE_VALUE (chain));
7911 else
7912 print_generic_expr (file, TREE_VALUE (chain), dump_flags);
7913 fprintf (file, ")");
7914 }
7915 }
7916
7917 fprintf (file, "))\n");
7918 }
7919
7920 current_function_decl = fndecl;
7921 if (flags & TDF_GIMPLE)
7922 {
7923 print_generic_expr (file, TREE_TYPE (TREE_TYPE (fndecl)),
7924 dump_flags | TDF_SLIM);
7925 fprintf (file, " __GIMPLE ()\n%s (", function_name (fun));
7926 }
7927 else
7928 fprintf (file, "%s %s(", function_name (fun), tmclone ? "[tm-clone] " : "");
7929
7930 arg = DECL_ARGUMENTS (fndecl);
7931 while (arg)
7932 {
7933 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
7934 fprintf (file, " ");
7935 print_generic_expr (file, arg, dump_flags);
7936 if (DECL_CHAIN (arg))
7937 fprintf (file, ", ");
7938 arg = DECL_CHAIN (arg);
7939 }
7940 fprintf (file, ")\n");
7941
7942 dsf = DECL_STRUCT_FUNCTION (fndecl);
7943 if (dsf && (flags & TDF_EH))
7944 dump_eh_tree (file, dsf);
7945
7946 if (flags & TDF_RAW && !gimple_has_body_p (fndecl))
7947 {
7948 dump_node (fndecl, TDF_SLIM | flags, file);
7949 current_function_decl = old_current_fndecl;
7950 return;
7951 }
7952
7953 /* When GIMPLE is lowered, the variables are no longer available in
7954 BIND_EXPRs, so display them separately. */
7955 if (fun && fun->decl == fndecl && (fun->curr_properties & PROP_gimple_lcf))
7956 {
7957 unsigned ix;
7958 ignore_topmost_bind = true;
7959
7960 fprintf (file, "{\n");
7961 if (gimple_in_ssa_p (fun)
7962 && (flags & TDF_ALIAS))
7963 {
7964 for (arg = DECL_ARGUMENTS (fndecl); arg != NULL;
7965 arg = DECL_CHAIN (arg))
7966 {
7967 tree def = ssa_default_def (fun, arg);
7968 if (def)
7969 dump_default_def (file, def, 2, flags);
7970 }
7971
7972 tree res = DECL_RESULT (fun->decl);
7973 if (res != NULL_TREE
7974 && DECL_BY_REFERENCE (res))
7975 {
7976 tree def = ssa_default_def (fun, res);
7977 if (def)
7978 dump_default_def (file, def, 2, flags);
7979 }
7980
7981 tree static_chain = fun->static_chain_decl;
7982 if (static_chain != NULL_TREE)
7983 {
7984 tree def = ssa_default_def (fun, static_chain);
7985 if (def)
7986 dump_default_def (file, def, 2, flags);
7987 }
7988 }
7989
7990 if (!vec_safe_is_empty (fun->local_decls))
7991 FOR_EACH_LOCAL_DECL (fun, ix, var)
7992 {
7993 print_generic_decl (file, var, flags);
7994 fprintf (file, "\n");
7995
7996 any_var = true;
7997 }
7998
7999 tree name;
8000
8001 if (gimple_in_ssa_p (cfun))
8002 FOR_EACH_SSA_NAME (ix, name, cfun)
8003 {
8004 if (!SSA_NAME_VAR (name))
8005 {
8006 fprintf (file, " ");
8007 print_generic_expr (file, TREE_TYPE (name), flags);
8008 fprintf (file, " ");
8009 print_generic_expr (file, name, flags);
8010 fprintf (file, ";\n");
8011
8012 any_var = true;
8013 }
8014 }
8015 }
8016
8017 if (fun && fun->decl == fndecl
8018 && fun->cfg
8019 && basic_block_info_for_fn (fun))
8020 {
8021 /* If the CFG has been built, emit a CFG-based dump. */
8022 if (!ignore_topmost_bind)
8023 fprintf (file, "{\n");
8024
8025 if (any_var && n_basic_blocks_for_fn (fun))
8026 fprintf (file, "\n");
8027
8028 FOR_EACH_BB_FN (bb, fun)
8029 dump_bb (file, bb, 2, flags);
8030
8031 fprintf (file, "}\n");
8032 }
8033 else if (fun->curr_properties & PROP_gimple_any)
8034 {
8035 /* The function is now in GIMPLE form but the CFG has not been
8036 built yet. Emit the single sequence of GIMPLE statements
8037 that make up its body. */
8038 gimple_seq body = gimple_body (fndecl);
8039
8040 if (gimple_seq_first_stmt (body)
8041 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
8042 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
8043 print_gimple_seq (file, body, 0, flags);
8044 else
8045 {
8046 if (!ignore_topmost_bind)
8047 fprintf (file, "{\n");
8048
8049 if (any_var)
8050 fprintf (file, "\n");
8051
8052 print_gimple_seq (file, body, 2, flags);
8053 fprintf (file, "}\n");
8054 }
8055 }
8056 else
8057 {
8058 int indent;
8059
8060 /* Make a tree based dump. */
8061 chain = DECL_SAVED_TREE (fndecl);
8062 if (chain && TREE_CODE (chain) == BIND_EXPR)
8063 {
8064 if (ignore_topmost_bind)
8065 {
8066 chain = BIND_EXPR_BODY (chain);
8067 indent = 2;
8068 }
8069 else
8070 indent = 0;
8071 }
8072 else
8073 {
8074 if (!ignore_topmost_bind)
8075 {
8076 fprintf (file, "{\n");
8077 /* No topmost bind, pretend it's ignored for later. */
8078 ignore_topmost_bind = true;
8079 }
8080 indent = 2;
8081 }
8082
8083 if (any_var)
8084 fprintf (file, "\n");
8085
8086 print_generic_stmt_indented (file, chain, flags, indent);
8087 if (ignore_topmost_bind)
8088 fprintf (file, "}\n");
8089 }
8090
8091 if (flags & TDF_ENUMERATE_LOCALS)
8092 dump_enumerated_decls (file, flags);
8093 fprintf (file, "\n\n");
8094
8095 current_function_decl = old_current_fndecl;
8096 }
8097
8098 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
8099
8100 DEBUG_FUNCTION void
8101 debug_function (tree fn, dump_flags_t flags)
8102 {
8103 dump_function_to_file (fn, stderr, flags);
8104 }
8105
8106
8107 /* Print on FILE the indexes for the predecessors of basic_block BB. */
8108
8109 static void
8110 print_pred_bbs (FILE *file, basic_block bb)
8111 {
8112 edge e;
8113 edge_iterator ei;
8114
8115 FOR_EACH_EDGE (e, ei, bb->preds)
8116 fprintf (file, "bb_%d ", e->src->index);
8117 }
8118
8119
8120 /* Print on FILE the indexes for the successors of basic_block BB. */
8121
8122 static void
8123 print_succ_bbs (FILE *file, basic_block bb)
8124 {
8125 edge e;
8126 edge_iterator ei;
8127
8128 FOR_EACH_EDGE (e, ei, bb->succs)
8129 fprintf (file, "bb_%d ", e->dest->index);
8130 }
8131
8132 /* Print to FILE the basic block BB following the VERBOSITY level. */
8133
8134 void
8135 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
8136 {
8137 char *s_indent = (char *) alloca ((size_t) indent + 1);
8138 memset ((void *) s_indent, ' ', (size_t) indent);
8139 s_indent[indent] = '\0';
8140
8141 /* Print basic_block's header. */
8142 if (verbosity >= 2)
8143 {
8144 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
8145 print_pred_bbs (file, bb);
8146 fprintf (file, "}, succs = {");
8147 print_succ_bbs (file, bb);
8148 fprintf (file, "})\n");
8149 }
8150
8151 /* Print basic_block's body. */
8152 if (verbosity >= 3)
8153 {
8154 fprintf (file, "%s {\n", s_indent);
8155 dump_bb (file, bb, indent + 4, TDF_VOPS|TDF_MEMSYMS);
8156 fprintf (file, "%s }\n", s_indent);
8157 }
8158 }
8159
8160 static void print_loop_and_siblings (FILE *, struct loop *, int, int);
8161
8162 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
8163 VERBOSITY level this outputs the contents of the loop, or just its
8164 structure. */
8165
8166 static void
8167 print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
8168 {
8169 char *s_indent;
8170 basic_block bb;
8171
8172 if (loop == NULL)
8173 return;
8174
8175 s_indent = (char *) alloca ((size_t) indent + 1);
8176 memset ((void *) s_indent, ' ', (size_t) indent);
8177 s_indent[indent] = '\0';
8178
8179 /* Print loop's header. */
8180 fprintf (file, "%sloop_%d (", s_indent, loop->num);
8181 if (loop->header)
8182 fprintf (file, "header = %d", loop->header->index);
8183 else
8184 {
8185 fprintf (file, "deleted)\n");
8186 return;
8187 }
8188 if (loop->latch)
8189 fprintf (file, ", latch = %d", loop->latch->index);
8190 else
8191 fprintf (file, ", multiple latches");
8192 fprintf (file, ", niter = ");
8193 print_generic_expr (file, loop->nb_iterations);
8194
8195 if (loop->any_upper_bound)
8196 {
8197 fprintf (file, ", upper_bound = ");
8198 print_decu (loop->nb_iterations_upper_bound, file);
8199 }
8200 if (loop->any_likely_upper_bound)
8201 {
8202 fprintf (file, ", likely_upper_bound = ");
8203 print_decu (loop->nb_iterations_likely_upper_bound, file);
8204 }
8205
8206 if (loop->any_estimate)
8207 {
8208 fprintf (file, ", estimate = ");
8209 print_decu (loop->nb_iterations_estimate, file);
8210 }
8211 if (loop->unroll)
8212 fprintf (file, ", unroll = %d", loop->unroll);
8213 fprintf (file, ")\n");
8214
8215 /* Print loop's body. */
8216 if (verbosity >= 1)
8217 {
8218 fprintf (file, "%s{\n", s_indent);
8219 FOR_EACH_BB_FN (bb, cfun)
8220 if (bb->loop_father == loop)
8221 print_loops_bb (file, bb, indent, verbosity);
8222
8223 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
8224 fprintf (file, "%s}\n", s_indent);
8225 }
8226 }
8227
8228 /* Print the LOOP and its sibling loops on FILE, indented INDENT
8229 spaces. Following VERBOSITY level this outputs the contents of the
8230 loop, or just its structure. */
8231
8232 static void
8233 print_loop_and_siblings (FILE *file, struct loop *loop, int indent,
8234 int verbosity)
8235 {
8236 if (loop == NULL)
8237 return;
8238
8239 print_loop (file, loop, indent, verbosity);
8240 print_loop_and_siblings (file, loop->next, indent, verbosity);
8241 }
8242
8243 /* Follow a CFG edge from the entry point of the program, and on entry
8244 of a loop, pretty print the loop structure on FILE. */
8245
8246 void
8247 print_loops (FILE *file, int verbosity)
8248 {
8249 basic_block bb;
8250
8251 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
8252 fprintf (file, "\nLoops in function: %s\n", current_function_name ());
8253 if (bb && bb->loop_father)
8254 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
8255 }
8256
8257 /* Dump a loop. */
8258
8259 DEBUG_FUNCTION void
8260 debug (struct loop &ref)
8261 {
8262 print_loop (stderr, &ref, 0, /*verbosity*/0);
8263 }
8264
8265 DEBUG_FUNCTION void
8266 debug (struct loop *ptr)
8267 {
8268 if (ptr)
8269 debug (*ptr);
8270 else
8271 fprintf (stderr, "<nil>\n");
8272 }
8273
8274 /* Dump a loop verbosely. */
8275
8276 DEBUG_FUNCTION void
8277 debug_verbose (struct loop &ref)
8278 {
8279 print_loop (stderr, &ref, 0, /*verbosity*/3);
8280 }
8281
8282 DEBUG_FUNCTION void
8283 debug_verbose (struct loop *ptr)
8284 {
8285 if (ptr)
8286 debug (*ptr);
8287 else
8288 fprintf (stderr, "<nil>\n");
8289 }
8290
8291
8292 /* Debugging loops structure at tree level, at some VERBOSITY level. */
8293
8294 DEBUG_FUNCTION void
8295 debug_loops (int verbosity)
8296 {
8297 print_loops (stderr, verbosity);
8298 }
8299
8300 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
8301
8302 DEBUG_FUNCTION void
8303 debug_loop (struct loop *loop, int verbosity)
8304 {
8305 print_loop (stderr, loop, 0, verbosity);
8306 }
8307
8308 /* Print on stderr the code of loop number NUM, at some VERBOSITY
8309 level. */
8310
8311 DEBUG_FUNCTION void
8312 debug_loop_num (unsigned num, int verbosity)
8313 {
8314 debug_loop (get_loop (cfun, num), verbosity);
8315 }
8316
8317 /* Return true if BB ends with a call, possibly followed by some
8318 instructions that must stay with the call. Return false,
8319 otherwise. */
8320
8321 static bool
8322 gimple_block_ends_with_call_p (basic_block bb)
8323 {
8324 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
8325 return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
8326 }
8327
8328
8329 /* Return true if BB ends with a conditional branch. Return false,
8330 otherwise. */
8331
8332 static bool
8333 gimple_block_ends_with_condjump_p (const_basic_block bb)
8334 {
8335 gimple *stmt = last_stmt (CONST_CAST_BB (bb));
8336 return (stmt && gimple_code (stmt) == GIMPLE_COND);
8337 }
8338
8339
8340 /* Return true if statement T may terminate execution of BB in ways not
8341 explicitly represtented in the CFG. */
8342
8343 bool
8344 stmt_can_terminate_bb_p (gimple *t)
8345 {
8346 tree fndecl = NULL_TREE;
8347 int call_flags = 0;
8348
8349 /* Eh exception not handled internally terminates execution of the whole
8350 function. */
8351 if (stmt_can_throw_external (cfun, t))
8352 return true;
8353
8354 /* NORETURN and LONGJMP calls already have an edge to exit.
8355 CONST and PURE calls do not need one.
8356 We don't currently check for CONST and PURE here, although
8357 it would be a good idea, because those attributes are
8358 figured out from the RTL in mark_constant_function, and
8359 the counter incrementation code from -fprofile-arcs
8360 leads to different results from -fbranch-probabilities. */
8361 if (is_gimple_call (t))
8362 {
8363 fndecl = gimple_call_fndecl (t);
8364 call_flags = gimple_call_flags (t);
8365 }
8366
8367 if (is_gimple_call (t)
8368 && fndecl
8369 && fndecl_built_in_p (fndecl)
8370 && (call_flags & ECF_NOTHROW)
8371 && !(call_flags & ECF_RETURNS_TWICE)
8372 /* fork() doesn't really return twice, but the effect of
8373 wrapping it in __gcov_fork() which calls __gcov_flush()
8374 and clears the counters before forking has the same
8375 effect as returning twice. Force a fake edge. */
8376 && !fndecl_built_in_p (fndecl, BUILT_IN_FORK))
8377 return false;
8378
8379 if (is_gimple_call (t))
8380 {
8381 edge_iterator ei;
8382 edge e;
8383 basic_block bb;
8384
8385 if (call_flags & (ECF_PURE | ECF_CONST)
8386 && !(call_flags & ECF_LOOPING_CONST_OR_PURE))
8387 return false;
8388
8389 /* Function call may do longjmp, terminate program or do other things.
8390 Special case noreturn that have non-abnormal edges out as in this case
8391 the fact is sufficiently represented by lack of edges out of T. */
8392 if (!(call_flags & ECF_NORETURN))
8393 return true;
8394
8395 bb = gimple_bb (t);
8396 FOR_EACH_EDGE (e, ei, bb->succs)
8397 if ((e->flags & EDGE_FAKE) == 0)
8398 return true;
8399 }
8400
8401 if (gasm *asm_stmt = dyn_cast <gasm *> (t))
8402 if (gimple_asm_volatile_p (asm_stmt) || gimple_asm_input_p (asm_stmt))
8403 return true;
8404
8405 return false;
8406 }
8407
8408
8409 /* Add fake edges to the function exit for any non constant and non
8410 noreturn calls (or noreturn calls with EH/abnormal edges),
8411 volatile inline assembly in the bitmap of blocks specified by BLOCKS
8412 or to the whole CFG if BLOCKS is zero. Return the number of blocks
8413 that were split.
8414
8415 The goal is to expose cases in which entering a basic block does
8416 not imply that all subsequent instructions must be executed. */
8417
8418 static int
8419 gimple_flow_call_edges_add (sbitmap blocks)
8420 {
8421 int i;
8422 int blocks_split = 0;
8423 int last_bb = last_basic_block_for_fn (cfun);
8424 bool check_last_block = false;
8425
8426 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
8427 return 0;
8428
8429 if (! blocks)
8430 check_last_block = true;
8431 else
8432 check_last_block = bitmap_bit_p (blocks,
8433 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
8434
8435 /* In the last basic block, before epilogue generation, there will be
8436 a fallthru edge to EXIT. Special care is required if the last insn
8437 of the last basic block is a call because make_edge folds duplicate
8438 edges, which would result in the fallthru edge also being marked
8439 fake, which would result in the fallthru edge being removed by
8440 remove_fake_edges, which would result in an invalid CFG.
8441
8442 Moreover, we can't elide the outgoing fake edge, since the block
8443 profiler needs to take this into account in order to solve the minimal
8444 spanning tree in the case that the call doesn't return.
8445
8446 Handle this by adding a dummy instruction in a new last basic block. */
8447 if (check_last_block)
8448 {
8449 basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
8450 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
8451 gimple *t = NULL;
8452
8453 if (!gsi_end_p (gsi))
8454 t = gsi_stmt (gsi);
8455
8456 if (t && stmt_can_terminate_bb_p (t))
8457 {
8458 edge e;
8459
8460 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
8461 if (e)
8462 {
8463 gsi_insert_on_edge (e, gimple_build_nop ());
8464 gsi_commit_edge_inserts ();
8465 }
8466 }
8467 }
8468
8469 /* Now add fake edges to the function exit for any non constant
8470 calls since there is no way that we can determine if they will
8471 return or not... */
8472 for (i = 0; i < last_bb; i++)
8473 {
8474 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8475 gimple_stmt_iterator gsi;
8476 gimple *stmt, *last_stmt;
8477
8478 if (!bb)
8479 continue;
8480
8481 if (blocks && !bitmap_bit_p (blocks, i))
8482 continue;
8483
8484 gsi = gsi_last_nondebug_bb (bb);
8485 if (!gsi_end_p (gsi))
8486 {
8487 last_stmt = gsi_stmt (gsi);
8488 do
8489 {
8490 stmt = gsi_stmt (gsi);
8491 if (stmt_can_terminate_bb_p (stmt))
8492 {
8493 edge e;
8494
8495 /* The handling above of the final block before the
8496 epilogue should be enough to verify that there is
8497 no edge to the exit block in CFG already.
8498 Calling make_edge in such case would cause us to
8499 mark that edge as fake and remove it later. */
8500 if (flag_checking && stmt == last_stmt)
8501 {
8502 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
8503 gcc_assert (e == NULL);
8504 }
8505
8506 /* Note that the following may create a new basic block
8507 and renumber the existing basic blocks. */
8508 if (stmt != last_stmt)
8509 {
8510 e = split_block (bb, stmt);
8511 if (e)
8512 blocks_split++;
8513 }
8514 e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
8515 e->probability = profile_probability::guessed_never ();
8516 }
8517 gsi_prev (&gsi);
8518 }
8519 while (!gsi_end_p (gsi));
8520 }
8521 }
8522
8523 if (blocks_split)
8524 checking_verify_flow_info ();
8525
8526 return blocks_split;
8527 }
8528
8529 /* Removes edge E and all the blocks dominated by it, and updates dominance
8530 information. The IL in E->src needs to be updated separately.
8531 If dominance info is not available, only the edge E is removed.*/
8532
8533 void
8534 remove_edge_and_dominated_blocks (edge e)
8535 {
8536 vec<basic_block> bbs_to_remove = vNULL;
8537 vec<basic_block> bbs_to_fix_dom = vNULL;
8538 edge f;
8539 edge_iterator ei;
8540 bool none_removed = false;
8541 unsigned i;
8542 basic_block bb, dbb;
8543 bitmap_iterator bi;
8544
8545 /* If we are removing a path inside a non-root loop that may change
8546 loop ownership of blocks or remove loops. Mark loops for fixup. */
8547 if (current_loops
8548 && loop_outer (e->src->loop_father) != NULL
8549 && e->src->loop_father == e->dest->loop_father)
8550 loops_state_set (LOOPS_NEED_FIXUP);
8551
8552 if (!dom_info_available_p (CDI_DOMINATORS))
8553 {
8554 remove_edge (e);
8555 return;
8556 }
8557
8558 /* No updating is needed for edges to exit. */
8559 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
8560 {
8561 if (cfgcleanup_altered_bbs)
8562 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
8563 remove_edge (e);
8564 return;
8565 }
8566
8567 /* First, we find the basic blocks to remove. If E->dest has a predecessor
8568 that is not dominated by E->dest, then this set is empty. Otherwise,
8569 all the basic blocks dominated by E->dest are removed.
8570
8571 Also, to DF_IDOM we store the immediate dominators of the blocks in
8572 the dominance frontier of E (i.e., of the successors of the
8573 removed blocks, if there are any, and of E->dest otherwise). */
8574 FOR_EACH_EDGE (f, ei, e->dest->preds)
8575 {
8576 if (f == e)
8577 continue;
8578
8579 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
8580 {
8581 none_removed = true;
8582 break;
8583 }
8584 }
8585
8586 auto_bitmap df, df_idom;
8587 if (none_removed)
8588 bitmap_set_bit (df_idom,
8589 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
8590 else
8591 {
8592 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
8593 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
8594 {
8595 FOR_EACH_EDGE (f, ei, bb->succs)
8596 {
8597 if (f->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
8598 bitmap_set_bit (df, f->dest->index);
8599 }
8600 }
8601 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
8602 bitmap_clear_bit (df, bb->index);
8603
8604 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
8605 {
8606 bb = BASIC_BLOCK_FOR_FN (cfun, i);
8607 bitmap_set_bit (df_idom,
8608 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
8609 }
8610 }
8611
8612 if (cfgcleanup_altered_bbs)
8613 {
8614 /* Record the set of the altered basic blocks. */
8615 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
8616 bitmap_ior_into (cfgcleanup_altered_bbs, df);
8617 }
8618
8619 /* Remove E and the cancelled blocks. */
8620 if (none_removed)
8621 remove_edge (e);
8622 else
8623 {
8624 /* Walk backwards so as to get a chance to substitute all
8625 released DEFs into debug stmts. See
8626 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
8627 details. */
8628 for (i = bbs_to_remove.length (); i-- > 0; )
8629 delete_basic_block (bbs_to_remove[i]);
8630 }
8631
8632 /* Update the dominance information. The immediate dominator may change only
8633 for blocks whose immediate dominator belongs to DF_IDOM:
8634
8635 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
8636 removal. Let Z the arbitrary block such that idom(Z) = Y and
8637 Z dominates X after the removal. Before removal, there exists a path P
8638 from Y to X that avoids Z. Let F be the last edge on P that is
8639 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
8640 dominates W, and because of P, Z does not dominate W), and W belongs to
8641 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
8642 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
8643 {
8644 bb = BASIC_BLOCK_FOR_FN (cfun, i);
8645 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
8646 dbb;
8647 dbb = next_dom_son (CDI_DOMINATORS, dbb))
8648 bbs_to_fix_dom.safe_push (dbb);
8649 }
8650
8651 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
8652
8653 bbs_to_remove.release ();
8654 bbs_to_fix_dom.release ();
8655 }
8656
8657 /* Purge dead EH edges from basic block BB. */
8658
8659 bool
8660 gimple_purge_dead_eh_edges (basic_block bb)
8661 {
8662 bool changed = false;
8663 edge e;
8664 edge_iterator ei;
8665 gimple *stmt = last_stmt (bb);
8666
8667 if (stmt && stmt_can_throw_internal (cfun, stmt))
8668 return false;
8669
8670 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8671 {
8672 if (e->flags & EDGE_EH)
8673 {
8674 remove_edge_and_dominated_blocks (e);
8675 changed = true;
8676 }
8677 else
8678 ei_next (&ei);
8679 }
8680
8681 return changed;
8682 }
8683
8684 /* Purge dead EH edges from basic block listed in BLOCKS. */
8685
8686 bool
8687 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
8688 {
8689 bool changed = false;
8690 unsigned i;
8691 bitmap_iterator bi;
8692
8693 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8694 {
8695 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8696
8697 /* Earlier gimple_purge_dead_eh_edges could have removed
8698 this basic block already. */
8699 gcc_assert (bb || changed);
8700 if (bb != NULL)
8701 changed |= gimple_purge_dead_eh_edges (bb);
8702 }
8703
8704 return changed;
8705 }
8706
8707 /* Purge dead abnormal call edges from basic block BB. */
8708
8709 bool
8710 gimple_purge_dead_abnormal_call_edges (basic_block bb)
8711 {
8712 bool changed = false;
8713 edge e;
8714 edge_iterator ei;
8715 gimple *stmt = last_stmt (bb);
8716
8717 if (!cfun->has_nonlocal_label
8718 && !cfun->calls_setjmp)
8719 return false;
8720
8721 if (stmt && stmt_can_make_abnormal_goto (stmt))
8722 return false;
8723
8724 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8725 {
8726 if (e->flags & EDGE_ABNORMAL)
8727 {
8728 if (e->flags & EDGE_FALLTHRU)
8729 e->flags &= ~EDGE_ABNORMAL;
8730 else
8731 remove_edge_and_dominated_blocks (e);
8732 changed = true;
8733 }
8734 else
8735 ei_next (&ei);
8736 }
8737
8738 return changed;
8739 }
8740
8741 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
8742
8743 bool
8744 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks)
8745 {
8746 bool changed = false;
8747 unsigned i;
8748 bitmap_iterator bi;
8749
8750 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8751 {
8752 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8753
8754 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
8755 this basic block already. */
8756 gcc_assert (bb || changed);
8757 if (bb != NULL)
8758 changed |= gimple_purge_dead_abnormal_call_edges (bb);
8759 }
8760
8761 return changed;
8762 }
8763
8764 /* This function is called whenever a new edge is created or
8765 redirected. */
8766
8767 static void
8768 gimple_execute_on_growing_pred (edge e)
8769 {
8770 basic_block bb = e->dest;
8771
8772 if (!gimple_seq_empty_p (phi_nodes (bb)))
8773 reserve_phi_args_for_new_edge (bb);
8774 }
8775
8776 /* This function is called immediately before edge E is removed from
8777 the edge vector E->dest->preds. */
8778
8779 static void
8780 gimple_execute_on_shrinking_pred (edge e)
8781 {
8782 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
8783 remove_phi_args (e);
8784 }
8785
8786 /*---------------------------------------------------------------------------
8787 Helper functions for Loop versioning
8788 ---------------------------------------------------------------------------*/
8789
8790 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
8791 of 'first'. Both of them are dominated by 'new_head' basic block. When
8792 'new_head' was created by 'second's incoming edge it received phi arguments
8793 on the edge by split_edge(). Later, additional edge 'e' was created to
8794 connect 'new_head' and 'first'. Now this routine adds phi args on this
8795 additional edge 'e' that new_head to second edge received as part of edge
8796 splitting. */
8797
8798 static void
8799 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
8800 basic_block new_head, edge e)
8801 {
8802 gphi *phi1, *phi2;
8803 gphi_iterator psi1, psi2;
8804 tree def;
8805 edge e2 = find_edge (new_head, second);
8806
8807 /* Because NEW_HEAD has been created by splitting SECOND's incoming
8808 edge, we should always have an edge from NEW_HEAD to SECOND. */
8809 gcc_assert (e2 != NULL);
8810
8811 /* Browse all 'second' basic block phi nodes and add phi args to
8812 edge 'e' for 'first' head. PHI args are always in correct order. */
8813
8814 for (psi2 = gsi_start_phis (second),
8815 psi1 = gsi_start_phis (first);
8816 !gsi_end_p (psi2) && !gsi_end_p (psi1);
8817 gsi_next (&psi2), gsi_next (&psi1))
8818 {
8819 phi1 = psi1.phi ();
8820 phi2 = psi2.phi ();
8821 def = PHI_ARG_DEF (phi2, e2->dest_idx);
8822 add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
8823 }
8824 }
8825
8826
8827 /* Adds a if else statement to COND_BB with condition COND_EXPR.
8828 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
8829 the destination of the ELSE part. */
8830
8831 static void
8832 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
8833 basic_block second_head ATTRIBUTE_UNUSED,
8834 basic_block cond_bb, void *cond_e)
8835 {
8836 gimple_stmt_iterator gsi;
8837 gimple *new_cond_expr;
8838 tree cond_expr = (tree) cond_e;
8839 edge e0;
8840
8841 /* Build new conditional expr */
8842 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
8843 NULL_TREE, NULL_TREE);
8844
8845 /* Add new cond in cond_bb. */
8846 gsi = gsi_last_bb (cond_bb);
8847 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
8848
8849 /* Adjust edges appropriately to connect new head with first head
8850 as well as second head. */
8851 e0 = single_succ_edge (cond_bb);
8852 e0->flags &= ~EDGE_FALLTHRU;
8853 e0->flags |= EDGE_FALSE_VALUE;
8854 }
8855
8856
8857 /* Do book-keeping of basic block BB for the profile consistency checker.
8858 Store the counting in RECORD. */
8859 static void
8860 gimple_account_profile_record (basic_block bb,
8861 struct profile_record *record)
8862 {
8863 gimple_stmt_iterator i;
8864 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
8865 {
8866 record->size
8867 += estimate_num_insns (gsi_stmt (i), &eni_size_weights);
8868 if (bb->count.initialized_p ())
8869 record->time
8870 += estimate_num_insns (gsi_stmt (i),
8871 &eni_time_weights) * bb->count.to_gcov_type ();
8872 else if (profile_status_for_fn (cfun) == PROFILE_GUESSED)
8873 record->time
8874 += estimate_num_insns (gsi_stmt (i),
8875 &eni_time_weights) * bb->count.to_frequency (cfun);
8876 }
8877 }
8878
8879 struct cfg_hooks gimple_cfg_hooks = {
8880 "gimple",
8881 gimple_verify_flow_info,
8882 gimple_dump_bb, /* dump_bb */
8883 gimple_dump_bb_for_graph, /* dump_bb_for_graph */
8884 create_bb, /* create_basic_block */
8885 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
8886 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
8887 gimple_can_remove_branch_p, /* can_remove_branch_p */
8888 remove_bb, /* delete_basic_block */
8889 gimple_split_block, /* split_block */
8890 gimple_move_block_after, /* move_block_after */
8891 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
8892 gimple_merge_blocks, /* merge_blocks */
8893 gimple_predict_edge, /* predict_edge */
8894 gimple_predicted_by_p, /* predicted_by_p */
8895 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
8896 gimple_duplicate_bb, /* duplicate_block */
8897 gimple_split_edge, /* split_edge */
8898 gimple_make_forwarder_block, /* make_forward_block */
8899 NULL, /* tidy_fallthru_edge */
8900 NULL, /* force_nonfallthru */
8901 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
8902 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
8903 gimple_flow_call_edges_add, /* flow_call_edges_add */
8904 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
8905 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
8906 gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
8907 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
8908 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
8909 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
8910 flush_pending_stmts, /* flush_pending_stmts */
8911 gimple_empty_block_p, /* block_empty_p */
8912 gimple_split_block_before_cond_jump, /* split_block_before_cond_jump */
8913 gimple_account_profile_record,
8914 };
8915
8916
8917 /* Split all critical edges. */
8918
8919 unsigned int
8920 split_critical_edges (void)
8921 {
8922 basic_block bb;
8923 edge e;
8924 edge_iterator ei;
8925
8926 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
8927 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
8928 mappings around the calls to split_edge. */
8929 start_recording_case_labels ();
8930 FOR_ALL_BB_FN (bb, cfun)
8931 {
8932 FOR_EACH_EDGE (e, ei, bb->succs)
8933 {
8934 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
8935 split_edge (e);
8936 /* PRE inserts statements to edges and expects that
8937 since split_critical_edges was done beforehand, committing edge
8938 insertions will not split more edges. In addition to critical
8939 edges we must split edges that have multiple successors and
8940 end by control flow statements, such as RESX.
8941 Go ahead and split them too. This matches the logic in
8942 gimple_find_edge_insert_loc. */
8943 else if ((!single_pred_p (e->dest)
8944 || !gimple_seq_empty_p (phi_nodes (e->dest))
8945 || e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
8946 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
8947 && !(e->flags & EDGE_ABNORMAL))
8948 {
8949 gimple_stmt_iterator gsi;
8950
8951 gsi = gsi_last_bb (e->src);
8952 if (!gsi_end_p (gsi)
8953 && stmt_ends_bb_p (gsi_stmt (gsi))
8954 && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
8955 && !gimple_call_builtin_p (gsi_stmt (gsi),
8956 BUILT_IN_RETURN)))
8957 split_edge (e);
8958 }
8959 }
8960 }
8961 end_recording_case_labels ();
8962 return 0;
8963 }
8964
8965 namespace {
8966
8967 const pass_data pass_data_split_crit_edges =
8968 {
8969 GIMPLE_PASS, /* type */
8970 "crited", /* name */
8971 OPTGROUP_NONE, /* optinfo_flags */
8972 TV_TREE_SPLIT_EDGES, /* tv_id */
8973 PROP_cfg, /* properties_required */
8974 PROP_no_crit_edges, /* properties_provided */
8975 0, /* properties_destroyed */
8976 0, /* todo_flags_start */
8977 0, /* todo_flags_finish */
8978 };
8979
8980 class pass_split_crit_edges : public gimple_opt_pass
8981 {
8982 public:
8983 pass_split_crit_edges (gcc::context *ctxt)
8984 : gimple_opt_pass (pass_data_split_crit_edges, ctxt)
8985 {}
8986
8987 /* opt_pass methods: */
8988 virtual unsigned int execute (function *) { return split_critical_edges (); }
8989
8990 opt_pass * clone () { return new pass_split_crit_edges (m_ctxt); }
8991 }; // class pass_split_crit_edges
8992
8993 } // anon namespace
8994
8995 gimple_opt_pass *
8996 make_pass_split_crit_edges (gcc::context *ctxt)
8997 {
8998 return new pass_split_crit_edges (ctxt);
8999 }
9000
9001
9002 /* Insert COND expression which is GIMPLE_COND after STMT
9003 in basic block BB with appropriate basic block split
9004 and creation of a new conditionally executed basic block.
9005 Update profile so the new bb is visited with probability PROB.
9006 Return created basic block. */
9007 basic_block
9008 insert_cond_bb (basic_block bb, gimple *stmt, gimple *cond,
9009 profile_probability prob)
9010 {
9011 edge fall = split_block (bb, stmt);
9012 gimple_stmt_iterator iter = gsi_last_bb (bb);
9013 basic_block new_bb;
9014
9015 /* Insert cond statement. */
9016 gcc_assert (gimple_code (cond) == GIMPLE_COND);
9017 if (gsi_end_p (iter))
9018 gsi_insert_before (&iter, cond, GSI_CONTINUE_LINKING);
9019 else
9020 gsi_insert_after (&iter, cond, GSI_CONTINUE_LINKING);
9021
9022 /* Create conditionally executed block. */
9023 new_bb = create_empty_bb (bb);
9024 edge e = make_edge (bb, new_bb, EDGE_TRUE_VALUE);
9025 e->probability = prob;
9026 new_bb->count = e->count ();
9027 make_single_succ_edge (new_bb, fall->dest, EDGE_FALLTHRU);
9028
9029 /* Fix edge for split bb. */
9030 fall->flags = EDGE_FALSE_VALUE;
9031 fall->probability -= e->probability;
9032
9033 /* Update dominance info. */
9034 if (dom_info_available_p (CDI_DOMINATORS))
9035 {
9036 set_immediate_dominator (CDI_DOMINATORS, new_bb, bb);
9037 set_immediate_dominator (CDI_DOMINATORS, fall->dest, bb);
9038 }
9039
9040 /* Update loop info. */
9041 if (current_loops)
9042 add_bb_to_loop (new_bb, bb->loop_father);
9043
9044 return new_bb;
9045 }
9046
9047 /* Build a ternary operation and gimplify it. Emit code before GSI.
9048 Return the gimple_val holding the result. */
9049
9050 tree
9051 gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
9052 tree type, tree a, tree b, tree c)
9053 {
9054 tree ret;
9055 location_t loc = gimple_location (gsi_stmt (*gsi));
9056
9057 ret = fold_build3_loc (loc, code, type, a, b, c);
9058 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
9059 GSI_SAME_STMT);
9060 }
9061
9062 /* Build a binary operation and gimplify it. Emit code before GSI.
9063 Return the gimple_val holding the result. */
9064
9065 tree
9066 gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
9067 tree type, tree a, tree b)
9068 {
9069 tree ret;
9070
9071 ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
9072 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
9073 GSI_SAME_STMT);
9074 }
9075
9076 /* Build a unary operation and gimplify it. Emit code before GSI.
9077 Return the gimple_val holding the result. */
9078
9079 tree
9080 gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
9081 tree a)
9082 {
9083 tree ret;
9084
9085 ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
9086 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
9087 GSI_SAME_STMT);
9088 }
9089
9090
9091 \f
9092 /* Given a basic block B which ends with a conditional and has
9093 precisely two successors, determine which of the edges is taken if
9094 the conditional is true and which is taken if the conditional is
9095 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
9096
9097 void
9098 extract_true_false_edges_from_block (basic_block b,
9099 edge *true_edge,
9100 edge *false_edge)
9101 {
9102 edge e = EDGE_SUCC (b, 0);
9103
9104 if (e->flags & EDGE_TRUE_VALUE)
9105 {
9106 *true_edge = e;
9107 *false_edge = EDGE_SUCC (b, 1);
9108 }
9109 else
9110 {
9111 *false_edge = e;
9112 *true_edge = EDGE_SUCC (b, 1);
9113 }
9114 }
9115
9116
9117 /* From a controlling predicate in the immediate dominator DOM of
9118 PHIBLOCK determine the edges into PHIBLOCK that are chosen if the
9119 predicate evaluates to true and false and store them to
9120 *TRUE_CONTROLLED_EDGE and *FALSE_CONTROLLED_EDGE if
9121 they are non-NULL. Returns true if the edges can be determined,
9122 else return false. */
9123
9124 bool
9125 extract_true_false_controlled_edges (basic_block dom, basic_block phiblock,
9126 edge *true_controlled_edge,
9127 edge *false_controlled_edge)
9128 {
9129 basic_block bb = phiblock;
9130 edge true_edge, false_edge, tem;
9131 edge e0 = NULL, e1 = NULL;
9132
9133 /* We have to verify that one edge into the PHI node is dominated
9134 by the true edge of the predicate block and the other edge
9135 dominated by the false edge. This ensures that the PHI argument
9136 we are going to take is completely determined by the path we
9137 take from the predicate block.
9138 We can only use BB dominance checks below if the destination of
9139 the true/false edges are dominated by their edge, thus only
9140 have a single predecessor. */
9141 extract_true_false_edges_from_block (dom, &true_edge, &false_edge);
9142 tem = EDGE_PRED (bb, 0);
9143 if (tem == true_edge
9144 || (single_pred_p (true_edge->dest)
9145 && (tem->src == true_edge->dest
9146 || dominated_by_p (CDI_DOMINATORS,
9147 tem->src, true_edge->dest))))
9148 e0 = tem;
9149 else if (tem == false_edge
9150 || (single_pred_p (false_edge->dest)
9151 && (tem->src == false_edge->dest
9152 || dominated_by_p (CDI_DOMINATORS,
9153 tem->src, false_edge->dest))))
9154 e1 = tem;
9155 else
9156 return false;
9157 tem = EDGE_PRED (bb, 1);
9158 if (tem == true_edge
9159 || (single_pred_p (true_edge->dest)
9160 && (tem->src == true_edge->dest
9161 || dominated_by_p (CDI_DOMINATORS,
9162 tem->src, true_edge->dest))))
9163 e0 = tem;
9164 else if (tem == false_edge
9165 || (single_pred_p (false_edge->dest)
9166 && (tem->src == false_edge->dest
9167 || dominated_by_p (CDI_DOMINATORS,
9168 tem->src, false_edge->dest))))
9169 e1 = tem;
9170 else
9171 return false;
9172 if (!e0 || !e1)
9173 return false;
9174
9175 if (true_controlled_edge)
9176 *true_controlled_edge = e0;
9177 if (false_controlled_edge)
9178 *false_controlled_edge = e1;
9179
9180 return true;
9181 }
9182
9183 /* Generate a range test LHS CODE RHS that determines whether INDEX is in the
9184 range [low, high]. Place associated stmts before *GSI. */
9185
9186 void
9187 generate_range_test (basic_block bb, tree index, tree low, tree high,
9188 tree *lhs, tree *rhs)
9189 {
9190 tree type = TREE_TYPE (index);
9191 tree utype = unsigned_type_for (type);
9192
9193 low = fold_convert (utype, low);
9194 high = fold_convert (utype, high);
9195
9196 gimple_seq seq = NULL;
9197 index = gimple_convert (&seq, utype, index);
9198 *lhs = gimple_build (&seq, MINUS_EXPR, utype, index, low);
9199 *rhs = const_binop (MINUS_EXPR, utype, high, low);
9200
9201 gimple_stmt_iterator gsi = gsi_last_bb (bb);
9202 gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT);
9203 }
9204
9205 /* Return the basic block that belongs to label numbered INDEX
9206 of a switch statement. */
9207
9208 basic_block
9209 gimple_switch_label_bb (function *ifun, gswitch *gs, unsigned index)
9210 {
9211 return label_to_block (ifun, CASE_LABEL (gimple_switch_label (gs, index)));
9212 }
9213
9214 /* Return the default basic block of a switch statement. */
9215
9216 basic_block
9217 gimple_switch_default_bb (function *ifun, gswitch *gs)
9218 {
9219 return gimple_switch_label_bb (ifun, gs, 0);
9220 }
9221
9222 /* Return the edge that belongs to label numbered INDEX
9223 of a switch statement. */
9224
9225 edge
9226 gimple_switch_edge (function *ifun, gswitch *gs, unsigned index)
9227 {
9228 return find_edge (gimple_bb (gs), gimple_switch_label_bb (ifun, gs, index));
9229 }
9230
9231 /* Return the default edge of a switch statement. */
9232
9233 edge
9234 gimple_switch_default_edge (function *ifun, gswitch *gs)
9235 {
9236 return gimple_switch_edge (ifun, gs, 0);
9237 }
9238
9239
9240 /* Emit return warnings. */
9241
9242 namespace {
9243
9244 const pass_data pass_data_warn_function_return =
9245 {
9246 GIMPLE_PASS, /* type */
9247 "*warn_function_return", /* name */
9248 OPTGROUP_NONE, /* optinfo_flags */
9249 TV_NONE, /* tv_id */
9250 PROP_cfg, /* properties_required */
9251 0, /* properties_provided */
9252 0, /* properties_destroyed */
9253 0, /* todo_flags_start */
9254 0, /* todo_flags_finish */
9255 };
9256
9257 class pass_warn_function_return : public gimple_opt_pass
9258 {
9259 public:
9260 pass_warn_function_return (gcc::context *ctxt)
9261 : gimple_opt_pass (pass_data_warn_function_return, ctxt)
9262 {}
9263
9264 /* opt_pass methods: */
9265 virtual unsigned int execute (function *);
9266
9267 }; // class pass_warn_function_return
9268
9269 unsigned int
9270 pass_warn_function_return::execute (function *fun)
9271 {
9272 location_t location;
9273 gimple *last;
9274 edge e;
9275 edge_iterator ei;
9276
9277 if (!targetm.warn_func_return (fun->decl))
9278 return 0;
9279
9280 /* If we have a path to EXIT, then we do return. */
9281 if (TREE_THIS_VOLATILE (fun->decl)
9282 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0)
9283 {
9284 location = UNKNOWN_LOCATION;
9285 for (ei = ei_start (EXIT_BLOCK_PTR_FOR_FN (fun)->preds);
9286 (e = ei_safe_edge (ei)); )
9287 {
9288 last = last_stmt (e->src);
9289 if ((gimple_code (last) == GIMPLE_RETURN
9290 || gimple_call_builtin_p (last, BUILT_IN_RETURN))
9291 && location == UNKNOWN_LOCATION
9292 && ((location = LOCATION_LOCUS (gimple_location (last)))
9293 != UNKNOWN_LOCATION)
9294 && !optimize)
9295 break;
9296 /* When optimizing, replace return stmts in noreturn functions
9297 with __builtin_unreachable () call. */
9298 if (optimize && gimple_code (last) == GIMPLE_RETURN)
9299 {
9300 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
9301 gimple *new_stmt = gimple_build_call (fndecl, 0);
9302 gimple_set_location (new_stmt, gimple_location (last));
9303 gimple_stmt_iterator gsi = gsi_for_stmt (last);
9304 gsi_replace (&gsi, new_stmt, true);
9305 remove_edge (e);
9306 }
9307 else
9308 ei_next (&ei);
9309 }
9310 if (location == UNKNOWN_LOCATION)
9311 location = cfun->function_end_locus;
9312 warning_at (location, 0, "%<noreturn%> function does return");
9313 }
9314
9315 /* If we see "return;" in some basic block, then we do reach the end
9316 without returning a value. */
9317 else if (warn_return_type > 0
9318 && !TREE_NO_WARNING (fun->decl)
9319 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fun->decl))))
9320 {
9321 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
9322 {
9323 gimple *last = last_stmt (e->src);
9324 greturn *return_stmt = dyn_cast <greturn *> (last);
9325 if (return_stmt
9326 && gimple_return_retval (return_stmt) == NULL
9327 && !gimple_no_warning_p (last))
9328 {
9329 location = gimple_location (last);
9330 if (LOCATION_LOCUS (location) == UNKNOWN_LOCATION)
9331 location = fun->function_end_locus;
9332 if (warning_at (location, OPT_Wreturn_type,
9333 "control reaches end of non-void function"))
9334 TREE_NO_WARNING (fun->decl) = 1;
9335 break;
9336 }
9337 }
9338 /* The C++ FE turns fallthrough from the end of non-void function
9339 into __builtin_unreachable () call with BUILTINS_LOCATION.
9340 Recognize those too. */
9341 basic_block bb;
9342 if (!TREE_NO_WARNING (fun->decl))
9343 FOR_EACH_BB_FN (bb, fun)
9344 if (EDGE_COUNT (bb->succs) == 0)
9345 {
9346 gimple *last = last_stmt (bb);
9347 const enum built_in_function ubsan_missing_ret
9348 = BUILT_IN_UBSAN_HANDLE_MISSING_RETURN;
9349 if (last
9350 && ((LOCATION_LOCUS (gimple_location (last))
9351 == BUILTINS_LOCATION
9352 && gimple_call_builtin_p (last, BUILT_IN_UNREACHABLE))
9353 || gimple_call_builtin_p (last, ubsan_missing_ret)))
9354 {
9355 gimple_stmt_iterator gsi = gsi_for_stmt (last);
9356 gsi_prev_nondebug (&gsi);
9357 gimple *prev = gsi_stmt (gsi);
9358 if (prev == NULL)
9359 location = UNKNOWN_LOCATION;
9360 else
9361 location = gimple_location (prev);
9362 if (LOCATION_LOCUS (location) == UNKNOWN_LOCATION)
9363 location = fun->function_end_locus;
9364 if (warning_at (location, OPT_Wreturn_type,
9365 "control reaches end of non-void function"))
9366 TREE_NO_WARNING (fun->decl) = 1;
9367 break;
9368 }
9369 }
9370 }
9371 return 0;
9372 }
9373
9374 } // anon namespace
9375
9376 gimple_opt_pass *
9377 make_pass_warn_function_return (gcc::context *ctxt)
9378 {
9379 return new pass_warn_function_return (ctxt);
9380 }
9381
9382 /* Walk a gimplified function and warn for functions whose return value is
9383 ignored and attribute((warn_unused_result)) is set. This is done before
9384 inlining, so we don't have to worry about that. */
9385
9386 static void
9387 do_warn_unused_result (gimple_seq seq)
9388 {
9389 tree fdecl, ftype;
9390 gimple_stmt_iterator i;
9391
9392 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
9393 {
9394 gimple *g = gsi_stmt (i);
9395
9396 switch (gimple_code (g))
9397 {
9398 case GIMPLE_BIND:
9399 do_warn_unused_result (gimple_bind_body (as_a <gbind *>(g)));
9400 break;
9401 case GIMPLE_TRY:
9402 do_warn_unused_result (gimple_try_eval (g));
9403 do_warn_unused_result (gimple_try_cleanup (g));
9404 break;
9405 case GIMPLE_CATCH:
9406 do_warn_unused_result (gimple_catch_handler (
9407 as_a <gcatch *> (g)));
9408 break;
9409 case GIMPLE_EH_FILTER:
9410 do_warn_unused_result (gimple_eh_filter_failure (g));
9411 break;
9412
9413 case GIMPLE_CALL:
9414 if (gimple_call_lhs (g))
9415 break;
9416 if (gimple_call_internal_p (g))
9417 break;
9418
9419 /* This is a naked call, as opposed to a GIMPLE_CALL with an
9420 LHS. All calls whose value is ignored should be
9421 represented like this. Look for the attribute. */
9422 fdecl = gimple_call_fndecl (g);
9423 ftype = gimple_call_fntype (g);
9424
9425 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
9426 {
9427 location_t loc = gimple_location (g);
9428
9429 if (fdecl)
9430 warning_at (loc, OPT_Wunused_result,
9431 "ignoring return value of %qD, "
9432 "declared with attribute warn_unused_result",
9433 fdecl);
9434 else
9435 warning_at (loc, OPT_Wunused_result,
9436 "ignoring return value of function "
9437 "declared with attribute warn_unused_result");
9438 }
9439 break;
9440
9441 default:
9442 /* Not a container, not a call, or a call whose value is used. */
9443 break;
9444 }
9445 }
9446 }
9447
9448 namespace {
9449
9450 const pass_data pass_data_warn_unused_result =
9451 {
9452 GIMPLE_PASS, /* type */
9453 "*warn_unused_result", /* name */
9454 OPTGROUP_NONE, /* optinfo_flags */
9455 TV_NONE, /* tv_id */
9456 PROP_gimple_any, /* properties_required */
9457 0, /* properties_provided */
9458 0, /* properties_destroyed */
9459 0, /* todo_flags_start */
9460 0, /* todo_flags_finish */
9461 };
9462
9463 class pass_warn_unused_result : public gimple_opt_pass
9464 {
9465 public:
9466 pass_warn_unused_result (gcc::context *ctxt)
9467 : gimple_opt_pass (pass_data_warn_unused_result, ctxt)
9468 {}
9469
9470 /* opt_pass methods: */
9471 virtual bool gate (function *) { return flag_warn_unused_result; }
9472 virtual unsigned int execute (function *)
9473 {
9474 do_warn_unused_result (gimple_body (current_function_decl));
9475 return 0;
9476 }
9477
9478 }; // class pass_warn_unused_result
9479
9480 } // anon namespace
9481
9482 gimple_opt_pass *
9483 make_pass_warn_unused_result (gcc::context *ctxt)
9484 {
9485 return new pass_warn_unused_result (ctxt);
9486 }
9487
9488 /* IPA passes, compilation of earlier functions or inlining
9489 might have changed some properties, such as marked functions nothrow,
9490 pure, const or noreturn.
9491 Remove redundant edges and basic blocks, and create new ones if necessary.
9492
9493 This pass can't be executed as stand alone pass from pass manager, because
9494 in between inlining and this fixup the verify_flow_info would fail. */
9495
9496 unsigned int
9497 execute_fixup_cfg (void)
9498 {
9499 basic_block bb;
9500 gimple_stmt_iterator gsi;
9501 int todo = 0;
9502 cgraph_node *node = cgraph_node::get (current_function_decl);
9503 profile_count num = node->count;
9504 profile_count den = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
9505 bool scale = num.initialized_p () && !(num == den);
9506
9507 if (scale)
9508 {
9509 profile_count::adjust_for_ipa_scaling (&num, &den);
9510 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = node->count;
9511 EXIT_BLOCK_PTR_FOR_FN (cfun)->count
9512 = EXIT_BLOCK_PTR_FOR_FN (cfun)->count.apply_scale (num, den);
9513 }
9514
9515 FOR_EACH_BB_FN (bb, cfun)
9516 {
9517 if (scale)
9518 bb->count = bb->count.apply_scale (num, den);
9519 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
9520 {
9521 gimple *stmt = gsi_stmt (gsi);
9522 tree decl = is_gimple_call (stmt)
9523 ? gimple_call_fndecl (stmt)
9524 : NULL;
9525 if (decl)
9526 {
9527 int flags = gimple_call_flags (stmt);
9528 if (flags & (ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE))
9529 {
9530 if (gimple_purge_dead_abnormal_call_edges (bb))
9531 todo |= TODO_cleanup_cfg;
9532
9533 if (gimple_in_ssa_p (cfun))
9534 {
9535 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9536 update_stmt (stmt);
9537 }
9538 }
9539
9540 if (flags & ECF_NORETURN
9541 && fixup_noreturn_call (stmt))
9542 todo |= TODO_cleanup_cfg;
9543 }
9544
9545 /* Remove stores to variables we marked write-only.
9546 Keep access when store has side effect, i.e. in case when source
9547 is volatile. */
9548 if (gimple_store_p (stmt)
9549 && !gimple_has_side_effects (stmt))
9550 {
9551 tree lhs = get_base_address (gimple_get_lhs (stmt));
9552
9553 if (VAR_P (lhs)
9554 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
9555 && varpool_node::get (lhs)->writeonly)
9556 {
9557 unlink_stmt_vdef (stmt);
9558 gsi_remove (&gsi, true);
9559 release_defs (stmt);
9560 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9561 continue;
9562 }
9563 }
9564 /* For calls we can simply remove LHS when it is known
9565 to be write-only. */
9566 if (is_gimple_call (stmt)
9567 && gimple_get_lhs (stmt))
9568 {
9569 tree lhs = get_base_address (gimple_get_lhs (stmt));
9570
9571 if (VAR_P (lhs)
9572 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
9573 && varpool_node::get (lhs)->writeonly)
9574 {
9575 gimple_call_set_lhs (stmt, NULL);
9576 update_stmt (stmt);
9577 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9578 }
9579 }
9580
9581 if (maybe_clean_eh_stmt (stmt)
9582 && gimple_purge_dead_eh_edges (bb))
9583 todo |= TODO_cleanup_cfg;
9584 gsi_next (&gsi);
9585 }
9586
9587 /* If we have a basic block with no successors that does not
9588 end with a control statement or a noreturn call end it with
9589 a call to __builtin_unreachable. This situation can occur
9590 when inlining a noreturn call that does in fact return. */
9591 if (EDGE_COUNT (bb->succs) == 0)
9592 {
9593 gimple *stmt = last_stmt (bb);
9594 if (!stmt
9595 || (!is_ctrl_stmt (stmt)
9596 && (!is_gimple_call (stmt)
9597 || !gimple_call_noreturn_p (stmt))))
9598 {
9599 if (stmt && is_gimple_call (stmt))
9600 gimple_call_set_ctrl_altering (stmt, false);
9601 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
9602 stmt = gimple_build_call (fndecl, 0);
9603 gimple_stmt_iterator gsi = gsi_last_bb (bb);
9604 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
9605 if (!cfun->after_inlining)
9606 {
9607 gcall *call_stmt = dyn_cast <gcall *> (stmt);
9608 node->create_edge (cgraph_node::get_create (fndecl),
9609 call_stmt, bb->count);
9610 }
9611 }
9612 }
9613 }
9614 if (scale)
9615 compute_function_frequency ();
9616
9617 if (current_loops
9618 && (todo & TODO_cleanup_cfg))
9619 loops_state_set (LOOPS_NEED_FIXUP);
9620
9621 return todo;
9622 }
9623
9624 namespace {
9625
9626 const pass_data pass_data_fixup_cfg =
9627 {
9628 GIMPLE_PASS, /* type */
9629 "fixup_cfg", /* name */
9630 OPTGROUP_NONE, /* optinfo_flags */
9631 TV_NONE, /* tv_id */
9632 PROP_cfg, /* properties_required */
9633 0, /* properties_provided */
9634 0, /* properties_destroyed */
9635 0, /* todo_flags_start */
9636 0, /* todo_flags_finish */
9637 };
9638
9639 class pass_fixup_cfg : public gimple_opt_pass
9640 {
9641 public:
9642 pass_fixup_cfg (gcc::context *ctxt)
9643 : gimple_opt_pass (pass_data_fixup_cfg, ctxt)
9644 {}
9645
9646 /* opt_pass methods: */
9647 opt_pass * clone () { return new pass_fixup_cfg (m_ctxt); }
9648 virtual unsigned int execute (function *) { return execute_fixup_cfg (); }
9649
9650 }; // class pass_fixup_cfg
9651
9652 } // anon namespace
9653
9654 gimple_opt_pass *
9655 make_pass_fixup_cfg (gcc::context *ctxt)
9656 {
9657 return new pass_fixup_cfg (ctxt);
9658 }
9659
9660 /* Garbage collection support for edge_def. */
9661
9662 extern void gt_ggc_mx (tree&);
9663 extern void gt_ggc_mx (gimple *&);
9664 extern void gt_ggc_mx (rtx&);
9665 extern void gt_ggc_mx (basic_block&);
9666
9667 static void
9668 gt_ggc_mx (rtx_insn *& x)
9669 {
9670 if (x)
9671 gt_ggc_mx_rtx_def ((void *) x);
9672 }
9673
9674 void
9675 gt_ggc_mx (edge_def *e)
9676 {
9677 tree block = LOCATION_BLOCK (e->goto_locus);
9678 gt_ggc_mx (e->src);
9679 gt_ggc_mx (e->dest);
9680 if (current_ir_type () == IR_GIMPLE)
9681 gt_ggc_mx (e->insns.g);
9682 else
9683 gt_ggc_mx (e->insns.r);
9684 gt_ggc_mx (block);
9685 }
9686
9687 /* PCH support for edge_def. */
9688
9689 extern void gt_pch_nx (tree&);
9690 extern void gt_pch_nx (gimple *&);
9691 extern void gt_pch_nx (rtx&);
9692 extern void gt_pch_nx (basic_block&);
9693
9694 static void
9695 gt_pch_nx (rtx_insn *& x)
9696 {
9697 if (x)
9698 gt_pch_nx_rtx_def ((void *) x);
9699 }
9700
9701 void
9702 gt_pch_nx (edge_def *e)
9703 {
9704 tree block = LOCATION_BLOCK (e->goto_locus);
9705 gt_pch_nx (e->src);
9706 gt_pch_nx (e->dest);
9707 if (current_ir_type () == IR_GIMPLE)
9708 gt_pch_nx (e->insns.g);
9709 else
9710 gt_pch_nx (e->insns.r);
9711 gt_pch_nx (block);
9712 }
9713
9714 void
9715 gt_pch_nx (edge_def *e, gt_pointer_operator op, void *cookie)
9716 {
9717 tree block = LOCATION_BLOCK (e->goto_locus);
9718 op (&(e->src), cookie);
9719 op (&(e->dest), cookie);
9720 if (current_ir_type () == IR_GIMPLE)
9721 op (&(e->insns.g), cookie);
9722 else
9723 op (&(e->insns.r), cookie);
9724 op (&(block), cookie);
9725 }
9726
9727 #if CHECKING_P
9728
9729 namespace selftest {
9730
9731 /* Helper function for CFG selftests: create a dummy function decl
9732 and push it as cfun. */
9733
9734 static tree
9735 push_fndecl (const char *name)
9736 {
9737 tree fn_type = build_function_type_array (integer_type_node, 0, NULL);
9738 /* FIXME: this uses input_location: */
9739 tree fndecl = build_fn_decl (name, fn_type);
9740 tree retval = build_decl (UNKNOWN_LOCATION, RESULT_DECL,
9741 NULL_TREE, integer_type_node);
9742 DECL_RESULT (fndecl) = retval;
9743 push_struct_function (fndecl);
9744 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9745 ASSERT_TRUE (fun != NULL);
9746 init_empty_tree_cfg_for_function (fun);
9747 ASSERT_EQ (2, n_basic_blocks_for_fn (fun));
9748 ASSERT_EQ (0, n_edges_for_fn (fun));
9749 return fndecl;
9750 }
9751
9752 /* These tests directly create CFGs.
9753 Compare with the static fns within tree-cfg.c:
9754 - build_gimple_cfg
9755 - make_blocks: calls create_basic_block (seq, bb);
9756 - make_edges. */
9757
9758 /* Verify a simple cfg of the form:
9759 ENTRY -> A -> B -> C -> EXIT. */
9760
9761 static void
9762 test_linear_chain ()
9763 {
9764 gimple_register_cfg_hooks ();
9765
9766 tree fndecl = push_fndecl ("cfg_test_linear_chain");
9767 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9768
9769 /* Create some empty blocks. */
9770 basic_block bb_a = create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
9771 basic_block bb_b = create_empty_bb (bb_a);
9772 basic_block bb_c = create_empty_bb (bb_b);
9773
9774 ASSERT_EQ (5, n_basic_blocks_for_fn (fun));
9775 ASSERT_EQ (0, n_edges_for_fn (fun));
9776
9777 /* Create some edges: a simple linear chain of BBs. */
9778 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), bb_a, EDGE_FALLTHRU);
9779 make_edge (bb_a, bb_b, 0);
9780 make_edge (bb_b, bb_c, 0);
9781 make_edge (bb_c, EXIT_BLOCK_PTR_FOR_FN (fun), 0);
9782
9783 /* Verify the edges. */
9784 ASSERT_EQ (4, n_edges_for_fn (fun));
9785 ASSERT_EQ (NULL, ENTRY_BLOCK_PTR_FOR_FN (fun)->preds);
9786 ASSERT_EQ (1, ENTRY_BLOCK_PTR_FOR_FN (fun)->succs->length ());
9787 ASSERT_EQ (1, bb_a->preds->length ());
9788 ASSERT_EQ (1, bb_a->succs->length ());
9789 ASSERT_EQ (1, bb_b->preds->length ());
9790 ASSERT_EQ (1, bb_b->succs->length ());
9791 ASSERT_EQ (1, bb_c->preds->length ());
9792 ASSERT_EQ (1, bb_c->succs->length ());
9793 ASSERT_EQ (1, EXIT_BLOCK_PTR_FOR_FN (fun)->preds->length ());
9794 ASSERT_EQ (NULL, EXIT_BLOCK_PTR_FOR_FN (fun)->succs);
9795
9796 /* Verify the dominance information
9797 Each BB in our simple chain should be dominated by the one before
9798 it. */
9799 calculate_dominance_info (CDI_DOMINATORS);
9800 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_b));
9801 ASSERT_EQ (bb_b, get_immediate_dominator (CDI_DOMINATORS, bb_c));
9802 vec<basic_block> dom_by_b = get_dominated_by (CDI_DOMINATORS, bb_b);
9803 ASSERT_EQ (1, dom_by_b.length ());
9804 ASSERT_EQ (bb_c, dom_by_b[0]);
9805 free_dominance_info (CDI_DOMINATORS);
9806 dom_by_b.release ();
9807
9808 /* Similarly for post-dominance: each BB in our chain is post-dominated
9809 by the one after it. */
9810 calculate_dominance_info (CDI_POST_DOMINATORS);
9811 ASSERT_EQ (bb_b, get_immediate_dominator (CDI_POST_DOMINATORS, bb_a));
9812 ASSERT_EQ (bb_c, get_immediate_dominator (CDI_POST_DOMINATORS, bb_b));
9813 vec<basic_block> postdom_by_b = get_dominated_by (CDI_POST_DOMINATORS, bb_b);
9814 ASSERT_EQ (1, postdom_by_b.length ());
9815 ASSERT_EQ (bb_a, postdom_by_b[0]);
9816 free_dominance_info (CDI_POST_DOMINATORS);
9817 postdom_by_b.release ();
9818
9819 pop_cfun ();
9820 }
9821
9822 /* Verify a simple CFG of the form:
9823 ENTRY
9824 |
9825 A
9826 / \
9827 /t \f
9828 B C
9829 \ /
9830 \ /
9831 D
9832 |
9833 EXIT. */
9834
9835 static void
9836 test_diamond ()
9837 {
9838 gimple_register_cfg_hooks ();
9839
9840 tree fndecl = push_fndecl ("cfg_test_diamond");
9841 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9842
9843 /* Create some empty blocks. */
9844 basic_block bb_a = create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
9845 basic_block bb_b = create_empty_bb (bb_a);
9846 basic_block bb_c = create_empty_bb (bb_a);
9847 basic_block bb_d = create_empty_bb (bb_b);
9848
9849 ASSERT_EQ (6, n_basic_blocks_for_fn (fun));
9850 ASSERT_EQ (0, n_edges_for_fn (fun));
9851
9852 /* Create the edges. */
9853 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), bb_a, EDGE_FALLTHRU);
9854 make_edge (bb_a, bb_b, EDGE_TRUE_VALUE);
9855 make_edge (bb_a, bb_c, EDGE_FALSE_VALUE);
9856 make_edge (bb_b, bb_d, 0);
9857 make_edge (bb_c, bb_d, 0);
9858 make_edge (bb_d, EXIT_BLOCK_PTR_FOR_FN (fun), 0);
9859
9860 /* Verify the edges. */
9861 ASSERT_EQ (6, n_edges_for_fn (fun));
9862 ASSERT_EQ (1, bb_a->preds->length ());
9863 ASSERT_EQ (2, bb_a->succs->length ());
9864 ASSERT_EQ (1, bb_b->preds->length ());
9865 ASSERT_EQ (1, bb_b->succs->length ());
9866 ASSERT_EQ (1, bb_c->preds->length ());
9867 ASSERT_EQ (1, bb_c->succs->length ());
9868 ASSERT_EQ (2, bb_d->preds->length ());
9869 ASSERT_EQ (1, bb_d->succs->length ());
9870
9871 /* Verify the dominance information. */
9872 calculate_dominance_info (CDI_DOMINATORS);
9873 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_b));
9874 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_c));
9875 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_d));
9876 vec<basic_block> dom_by_a = get_dominated_by (CDI_DOMINATORS, bb_a);
9877 ASSERT_EQ (3, dom_by_a.length ()); /* B, C, D, in some order. */
9878 dom_by_a.release ();
9879 vec<basic_block> dom_by_b = get_dominated_by (CDI_DOMINATORS, bb_b);
9880 ASSERT_EQ (0, dom_by_b.length ());
9881 dom_by_b.release ();
9882 free_dominance_info (CDI_DOMINATORS);
9883
9884 /* Similarly for post-dominance. */
9885 calculate_dominance_info (CDI_POST_DOMINATORS);
9886 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_a));
9887 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_b));
9888 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_c));
9889 vec<basic_block> postdom_by_d = get_dominated_by (CDI_POST_DOMINATORS, bb_d);
9890 ASSERT_EQ (3, postdom_by_d.length ()); /* A, B, C in some order. */
9891 postdom_by_d.release ();
9892 vec<basic_block> postdom_by_b = get_dominated_by (CDI_POST_DOMINATORS, bb_b);
9893 ASSERT_EQ (0, postdom_by_b.length ());
9894 postdom_by_b.release ();
9895 free_dominance_info (CDI_POST_DOMINATORS);
9896
9897 pop_cfun ();
9898 }
9899
9900 /* Verify that we can handle a CFG containing a "complete" aka
9901 fully-connected subgraph (where A B C D below all have edges
9902 pointing to each other node, also to themselves).
9903 e.g.:
9904 ENTRY EXIT
9905 | ^
9906 | /
9907 | /
9908 | /
9909 V/
9910 A<--->B
9911 ^^ ^^
9912 | \ / |
9913 | X |
9914 | / \ |
9915 VV VV
9916 C<--->D
9917 */
9918
9919 static void
9920 test_fully_connected ()
9921 {
9922 gimple_register_cfg_hooks ();
9923
9924 tree fndecl = push_fndecl ("cfg_fully_connected");
9925 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9926
9927 const int n = 4;
9928
9929 /* Create some empty blocks. */
9930 auto_vec <basic_block> subgraph_nodes;
9931 for (int i = 0; i < n; i++)
9932 subgraph_nodes.safe_push (create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun)));
9933
9934 ASSERT_EQ (n + 2, n_basic_blocks_for_fn (fun));
9935 ASSERT_EQ (0, n_edges_for_fn (fun));
9936
9937 /* Create the edges. */
9938 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), subgraph_nodes[0], EDGE_FALLTHRU);
9939 make_edge (subgraph_nodes[0], EXIT_BLOCK_PTR_FOR_FN (fun), 0);
9940 for (int i = 0; i < n; i++)
9941 for (int j = 0; j < n; j++)
9942 make_edge (subgraph_nodes[i], subgraph_nodes[j], 0);
9943
9944 /* Verify the edges. */
9945 ASSERT_EQ (2 + (n * n), n_edges_for_fn (fun));
9946 /* The first one is linked to ENTRY/EXIT as well as itself and
9947 everything else. */
9948 ASSERT_EQ (n + 1, subgraph_nodes[0]->preds->length ());
9949 ASSERT_EQ (n + 1, subgraph_nodes[0]->succs->length ());
9950 /* The other ones in the subgraph are linked to everything in
9951 the subgraph (including themselves). */
9952 for (int i = 1; i < n; i++)
9953 {
9954 ASSERT_EQ (n, subgraph_nodes[i]->preds->length ());
9955 ASSERT_EQ (n, subgraph_nodes[i]->succs->length ());
9956 }
9957
9958 /* Verify the dominance information. */
9959 calculate_dominance_info (CDI_DOMINATORS);
9960 /* The initial block in the subgraph should be dominated by ENTRY. */
9961 ASSERT_EQ (ENTRY_BLOCK_PTR_FOR_FN (fun),
9962 get_immediate_dominator (CDI_DOMINATORS,
9963 subgraph_nodes[0]));
9964 /* Every other block in the subgraph should be dominated by the
9965 initial block. */
9966 for (int i = 1; i < n; i++)
9967 ASSERT_EQ (subgraph_nodes[0],
9968 get_immediate_dominator (CDI_DOMINATORS,
9969 subgraph_nodes[i]));
9970 free_dominance_info (CDI_DOMINATORS);
9971
9972 /* Similarly for post-dominance. */
9973 calculate_dominance_info (CDI_POST_DOMINATORS);
9974 /* The initial block in the subgraph should be postdominated by EXIT. */
9975 ASSERT_EQ (EXIT_BLOCK_PTR_FOR_FN (fun),
9976 get_immediate_dominator (CDI_POST_DOMINATORS,
9977 subgraph_nodes[0]));
9978 /* Every other block in the subgraph should be postdominated by the
9979 initial block, since that leads to EXIT. */
9980 for (int i = 1; i < n; i++)
9981 ASSERT_EQ (subgraph_nodes[0],
9982 get_immediate_dominator (CDI_POST_DOMINATORS,
9983 subgraph_nodes[i]));
9984 free_dominance_info (CDI_POST_DOMINATORS);
9985
9986 pop_cfun ();
9987 }
9988
9989 /* Run all of the selftests within this file. */
9990
9991 void
9992 tree_cfg_c_tests ()
9993 {
9994 test_linear_chain ();
9995 test_diamond ();
9996 test_fully_connected ();
9997 }
9998
9999 } // namespace selftest
10000
10001 /* TODO: test the dominator/postdominator logic with various graphs/nodes:
10002 - loop
10003 - nested loops
10004 - switch statement (a block with many out-edges)
10005 - something that jumps to itself
10006 - etc */
10007
10008 #endif /* CHECKING_P */