1 /* Control flow functions for trees.
2 Copyright (C) 2001-2019 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
30 #include "tree-pass.h"
33 #include "gimple-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "fold-const.h"
36 #include "trans-mem.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
40 #include "gimple-fold.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
46 #include "tree-ssa-loop-manip.h"
47 #include "tree-ssa-loop-niter.h"
48 #include "tree-into-ssa.h"
53 #include "tree-ssa-propagate.h"
54 #include "value-prof.h"
55 #include "tree-inline.h"
56 #include "tree-ssa-live.h"
57 #include "omp-general.h"
58 #include "omp-expand.h"
59 #include "tree-cfgcleanup.h"
66 /* This file contains functions for building the Control Flow Graph (CFG)
67 for a function tree. */
69 /* Local declarations. */
71 /* Initial capacity for the basic block array. */
72 static const int initial_cfg_capacity
= 20;
74 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
75 which use a particular edge. The CASE_LABEL_EXPRs are chained together
76 via their CASE_CHAIN field, which we clear after we're done with the
77 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
79 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
80 update the case vector in response to edge redirections.
82 Right now this table is set up and torn down at key points in the
83 compilation process. It would be nice if we could make the table
84 more persistent. The key is getting notification of changes to
85 the CFG (particularly edge removal, creation and redirection). */
87 static hash_map
<edge
, tree
> *edge_to_cases
;
89 /* If we record edge_to_cases, this bitmap will hold indexes
90 of basic blocks that end in a GIMPLE_SWITCH which we touched
91 due to edge manipulations. */
93 static bitmap touched_switch_bbs
;
98 long num_merged_labels
;
101 static struct cfg_stats_d cfg_stats
;
103 /* Data to pass to replace_block_vars_by_duplicates_1. */
104 struct replace_decls_d
106 hash_map
<tree
, tree
> *vars_map
;
110 /* Hash table to store last discriminator assigned for each locus. */
111 struct locus_discrim_map
117 /* Hashtable helpers. */
119 struct locus_discrim_hasher
: free_ptr_hash
<locus_discrim_map
>
121 static inline hashval_t
hash (const locus_discrim_map
*);
122 static inline bool equal (const locus_discrim_map
*,
123 const locus_discrim_map
*);
126 /* Trivial hash function for a location_t. ITEM is a pointer to
127 a hash table entry that maps a location_t to a discriminator. */
130 locus_discrim_hasher::hash (const locus_discrim_map
*item
)
132 return item
->location_line
;
135 /* Equality function for the locus-to-discriminator map. A and B
136 point to the two hash table entries to compare. */
139 locus_discrim_hasher::equal (const locus_discrim_map
*a
,
140 const locus_discrim_map
*b
)
142 return a
->location_line
== b
->location_line
;
145 static hash_table
<locus_discrim_hasher
> *discriminator_per_locus
;
147 /* Basic blocks and flowgraphs. */
148 static void make_blocks (gimple_seq
);
151 static void make_edges (void);
152 static void assign_discriminators (void);
153 static void make_cond_expr_edges (basic_block
);
154 static void make_gimple_switch_edges (gswitch
*, basic_block
);
155 static bool make_goto_expr_edges (basic_block
);
156 static void make_gimple_asm_edges (basic_block
);
157 static edge
gimple_redirect_edge_and_branch (edge
, basic_block
);
158 static edge
gimple_try_redirect_by_replacing_jump (edge
, basic_block
);
160 /* Various helpers. */
161 static inline bool stmt_starts_bb_p (gimple
*, gimple
*);
162 static int gimple_verify_flow_info (void);
163 static void gimple_make_forwarder_block (edge
);
164 static gimple
*first_non_label_stmt (basic_block
);
165 static bool verify_gimple_transaction (gtransaction
*);
166 static bool call_can_make_abnormal_goto (gimple
*);
168 /* Flowgraph optimization and cleanup. */
169 static void gimple_merge_blocks (basic_block
, basic_block
);
170 static bool gimple_can_merge_blocks_p (basic_block
, basic_block
);
171 static void remove_bb (basic_block
);
172 static edge
find_taken_edge_computed_goto (basic_block
, tree
);
173 static edge
find_taken_edge_cond_expr (const gcond
*, tree
);
174 static void lower_phi_internal_fn ();
177 init_empty_tree_cfg_for_function (struct function
*fn
)
179 /* Initialize the basic block array. */
181 profile_status_for_fn (fn
) = PROFILE_ABSENT
;
182 n_basic_blocks_for_fn (fn
) = NUM_FIXED_BLOCKS
;
183 last_basic_block_for_fn (fn
) = NUM_FIXED_BLOCKS
;
184 vec_alloc (basic_block_info_for_fn (fn
), initial_cfg_capacity
);
185 vec_safe_grow_cleared (basic_block_info_for_fn (fn
),
186 initial_cfg_capacity
);
188 /* Build a mapping of labels to their associated blocks. */
189 vec_alloc (label_to_block_map_for_fn (fn
), initial_cfg_capacity
);
190 vec_safe_grow_cleared (label_to_block_map_for_fn (fn
),
191 initial_cfg_capacity
);
193 SET_BASIC_BLOCK_FOR_FN (fn
, ENTRY_BLOCK
, ENTRY_BLOCK_PTR_FOR_FN (fn
));
194 SET_BASIC_BLOCK_FOR_FN (fn
, EXIT_BLOCK
, EXIT_BLOCK_PTR_FOR_FN (fn
));
196 ENTRY_BLOCK_PTR_FOR_FN (fn
)->next_bb
197 = EXIT_BLOCK_PTR_FOR_FN (fn
);
198 EXIT_BLOCK_PTR_FOR_FN (fn
)->prev_bb
199 = ENTRY_BLOCK_PTR_FOR_FN (fn
);
203 init_empty_tree_cfg (void)
205 init_empty_tree_cfg_for_function (cfun
);
208 /*---------------------------------------------------------------------------
210 ---------------------------------------------------------------------------*/
212 /* Entry point to the CFG builder for trees. SEQ is the sequence of
213 statements to be added to the flowgraph. */
216 build_gimple_cfg (gimple_seq seq
)
218 /* Register specific gimple functions. */
219 gimple_register_cfg_hooks ();
221 memset ((void *) &cfg_stats
, 0, sizeof (cfg_stats
));
223 init_empty_tree_cfg ();
227 /* Make sure there is always at least one block, even if it's empty. */
228 if (n_basic_blocks_for_fn (cfun
) == NUM_FIXED_BLOCKS
)
229 create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
231 /* Adjust the size of the array. */
232 if (basic_block_info_for_fn (cfun
)->length ()
233 < (size_t) n_basic_blocks_for_fn (cfun
))
234 vec_safe_grow_cleared (basic_block_info_for_fn (cfun
),
235 n_basic_blocks_for_fn (cfun
));
237 /* To speed up statement iterator walks, we first purge dead labels. */
238 cleanup_dead_labels ();
240 /* Group case nodes to reduce the number of edges.
241 We do this after cleaning up dead labels because otherwise we miss
242 a lot of obvious case merging opportunities. */
243 group_case_labels ();
245 /* Create the edges of the flowgraph. */
246 discriminator_per_locus
= new hash_table
<locus_discrim_hasher
> (13);
248 assign_discriminators ();
249 lower_phi_internal_fn ();
250 cleanup_dead_labels ();
251 delete discriminator_per_locus
;
252 discriminator_per_locus
= NULL
;
255 /* Look for ANNOTATE calls with loop annotation kind in BB; if found, remove
256 them and propagate the information to LOOP. We assume that the annotations
257 come immediately before the condition in BB, if any. */
260 replace_loop_annotate_in_block (basic_block bb
, struct loop
*loop
)
262 gimple_stmt_iterator gsi
= gsi_last_bb (bb
);
263 gimple
*stmt
= gsi_stmt (gsi
);
265 if (!(stmt
&& gimple_code (stmt
) == GIMPLE_COND
))
268 for (gsi_prev_nondebug (&gsi
); !gsi_end_p (gsi
); gsi_prev (&gsi
))
270 stmt
= gsi_stmt (gsi
);
271 if (gimple_code (stmt
) != GIMPLE_CALL
)
273 if (!gimple_call_internal_p (stmt
)
274 || gimple_call_internal_fn (stmt
) != IFN_ANNOTATE
)
277 switch ((annot_expr_kind
) tree_to_shwi (gimple_call_arg (stmt
, 1)))
279 case annot_expr_ivdep_kind
:
280 loop
->safelen
= INT_MAX
;
282 case annot_expr_unroll_kind
:
284 = (unsigned short) tree_to_shwi (gimple_call_arg (stmt
, 2));
285 cfun
->has_unroll
= true;
287 case annot_expr_no_vector_kind
:
288 loop
->dont_vectorize
= true;
290 case annot_expr_vector_kind
:
291 loop
->force_vectorize
= true;
292 cfun
->has_force_vectorize_loops
= true;
294 case annot_expr_parallel_kind
:
295 loop
->can_be_parallel
= true;
296 loop
->safelen
= INT_MAX
;
302 stmt
= gimple_build_assign (gimple_call_lhs (stmt
),
303 gimple_call_arg (stmt
, 0));
304 gsi_replace (&gsi
, stmt
, true);
308 /* Look for ANNOTATE calls with loop annotation kind; if found, remove
309 them and propagate the information to the loop. We assume that the
310 annotations come immediately before the condition of the loop. */
313 replace_loop_annotate (void)
317 gimple_stmt_iterator gsi
;
320 FOR_EACH_LOOP (loop
, 0)
322 /* First look into the header. */
323 replace_loop_annotate_in_block (loop
->header
, loop
);
325 /* Then look into the latch, if any. */
327 replace_loop_annotate_in_block (loop
->latch
, loop
);
330 /* Remove IFN_ANNOTATE. Safeguard for the case loop->latch == NULL. */
331 FOR_EACH_BB_FN (bb
, cfun
)
333 for (gsi
= gsi_last_bb (bb
); !gsi_end_p (gsi
); gsi_prev (&gsi
))
335 stmt
= gsi_stmt (gsi
);
336 if (gimple_code (stmt
) != GIMPLE_CALL
)
338 if (!gimple_call_internal_p (stmt
)
339 || gimple_call_internal_fn (stmt
) != IFN_ANNOTATE
)
342 switch ((annot_expr_kind
) tree_to_shwi (gimple_call_arg (stmt
, 1)))
344 case annot_expr_ivdep_kind
:
345 case annot_expr_unroll_kind
:
346 case annot_expr_no_vector_kind
:
347 case annot_expr_vector_kind
:
348 case annot_expr_parallel_kind
:
354 warning_at (gimple_location (stmt
), 0, "ignoring loop annotation");
355 stmt
= gimple_build_assign (gimple_call_lhs (stmt
),
356 gimple_call_arg (stmt
, 0));
357 gsi_replace (&gsi
, stmt
, true);
362 /* Lower internal PHI function from GIMPLE FE. */
365 lower_phi_internal_fn ()
367 basic_block bb
, pred
= NULL
;
368 gimple_stmt_iterator gsi
;
373 /* After edge creation, handle __PHI function from GIMPLE FE. */
374 FOR_EACH_BB_FN (bb
, cfun
)
376 for (gsi
= gsi_after_labels (bb
); !gsi_end_p (gsi
);)
378 stmt
= gsi_stmt (gsi
);
379 if (! gimple_call_internal_p (stmt
, IFN_PHI
))
382 lhs
= gimple_call_lhs (stmt
);
383 phi_node
= create_phi_node (lhs
, bb
);
385 /* Add arguments to the PHI node. */
386 for (unsigned i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
388 tree arg
= gimple_call_arg (stmt
, i
);
389 if (TREE_CODE (arg
) == LABEL_DECL
)
390 pred
= label_to_block (cfun
, arg
);
393 edge e
= find_edge (pred
, bb
);
394 add_phi_arg (phi_node
, arg
, e
, UNKNOWN_LOCATION
);
398 gsi_remove (&gsi
, true);
404 execute_build_cfg (void)
406 gimple_seq body
= gimple_body (current_function_decl
);
408 build_gimple_cfg (body
);
409 gimple_set_body (current_function_decl
, NULL
);
410 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
412 fprintf (dump_file
, "Scope blocks:\n");
413 dump_scope_blocks (dump_file
, dump_flags
);
416 loop_optimizer_init (AVOID_CFG_MODIFICATIONS
);
417 replace_loop_annotate ();
423 const pass_data pass_data_build_cfg
=
425 GIMPLE_PASS
, /* type */
427 OPTGROUP_NONE
, /* optinfo_flags */
428 TV_TREE_CFG
, /* tv_id */
429 PROP_gimple_leh
, /* properties_required */
430 ( PROP_cfg
| PROP_loops
), /* properties_provided */
431 0, /* properties_destroyed */
432 0, /* todo_flags_start */
433 0, /* todo_flags_finish */
436 class pass_build_cfg
: public gimple_opt_pass
439 pass_build_cfg (gcc::context
*ctxt
)
440 : gimple_opt_pass (pass_data_build_cfg
, ctxt
)
443 /* opt_pass methods: */
444 virtual unsigned int execute (function
*) { return execute_build_cfg (); }
446 }; // class pass_build_cfg
451 make_pass_build_cfg (gcc::context
*ctxt
)
453 return new pass_build_cfg (ctxt
);
457 /* Return true if T is a computed goto. */
460 computed_goto_p (gimple
*t
)
462 return (gimple_code (t
) == GIMPLE_GOTO
463 && TREE_CODE (gimple_goto_dest (t
)) != LABEL_DECL
);
466 /* Returns true if the sequence of statements STMTS only contains
467 a call to __builtin_unreachable (). */
470 gimple_seq_unreachable_p (gimple_seq stmts
)
473 /* Return false if -fsanitize=unreachable, we don't want to
474 optimize away those calls, but rather turn them into
475 __ubsan_handle_builtin_unreachable () or __builtin_trap ()
477 || sanitize_flags_p (SANITIZE_UNREACHABLE
))
480 gimple_stmt_iterator gsi
= gsi_last (stmts
);
482 if (!gimple_call_builtin_p (gsi_stmt (gsi
), BUILT_IN_UNREACHABLE
))
485 for (gsi_prev (&gsi
); !gsi_end_p (gsi
); gsi_prev (&gsi
))
487 gimple
*stmt
= gsi_stmt (gsi
);
488 if (gimple_code (stmt
) != GIMPLE_LABEL
489 && !is_gimple_debug (stmt
)
490 && !gimple_clobber_p (stmt
))
496 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
497 the other edge points to a bb with just __builtin_unreachable ().
498 I.e. return true for C->M edge in:
506 __builtin_unreachable ();
510 assert_unreachable_fallthru_edge_p (edge e
)
512 basic_block pred_bb
= e
->src
;
513 gimple
*last
= last_stmt (pred_bb
);
514 if (last
&& gimple_code (last
) == GIMPLE_COND
)
516 basic_block other_bb
= EDGE_SUCC (pred_bb
, 0)->dest
;
517 if (other_bb
== e
->dest
)
518 other_bb
= EDGE_SUCC (pred_bb
, 1)->dest
;
519 if (EDGE_COUNT (other_bb
->succs
) == 0)
520 return gimple_seq_unreachable_p (bb_seq (other_bb
));
526 /* Initialize GF_CALL_CTRL_ALTERING flag, which indicates the call
527 could alter control flow except via eh. We initialize the flag at
528 CFG build time and only ever clear it later. */
531 gimple_call_initialize_ctrl_altering (gimple
*stmt
)
533 int flags
= gimple_call_flags (stmt
);
535 /* A call alters control flow if it can make an abnormal goto. */
536 if (call_can_make_abnormal_goto (stmt
)
537 /* A call also alters control flow if it does not return. */
538 || flags
& ECF_NORETURN
539 /* TM ending statements have backedges out of the transaction.
540 Return true so we split the basic block containing them.
541 Note that the TM_BUILTIN test is merely an optimization. */
542 || ((flags
& ECF_TM_BUILTIN
)
543 && is_tm_ending_fndecl (gimple_call_fndecl (stmt
)))
544 /* BUILT_IN_RETURN call is same as return statement. */
545 || gimple_call_builtin_p (stmt
, BUILT_IN_RETURN
)
546 /* IFN_UNIQUE should be the last insn, to make checking for it
547 as cheap as possible. */
548 || (gimple_call_internal_p (stmt
)
549 && gimple_call_internal_unique_p (stmt
)))
550 gimple_call_set_ctrl_altering (stmt
, true);
552 gimple_call_set_ctrl_altering (stmt
, false);
556 /* Insert SEQ after BB and build a flowgraph. */
559 make_blocks_1 (gimple_seq seq
, basic_block bb
)
561 gimple_stmt_iterator i
= gsi_start (seq
);
563 gimple
*prev_stmt
= NULL
;
564 bool start_new_block
= true;
565 bool first_stmt_of_seq
= true;
567 while (!gsi_end_p (i
))
569 /* PREV_STMT should only be set to a debug stmt if the debug
570 stmt is before nondebug stmts. Once stmt reaches a nondebug
571 nonlabel, prev_stmt will be set to it, so that
572 stmt_starts_bb_p will know to start a new block if a label is
573 found. However, if stmt was a label after debug stmts only,
574 keep the label in prev_stmt even if we find further debug
575 stmts, for there may be other labels after them, and they
576 should land in the same block. */
577 if (!prev_stmt
|| !stmt
|| !is_gimple_debug (stmt
))
581 if (stmt
&& is_gimple_call (stmt
))
582 gimple_call_initialize_ctrl_altering (stmt
);
584 /* If the statement starts a new basic block or if we have determined
585 in a previous pass that we need to create a new block for STMT, do
587 if (start_new_block
|| stmt_starts_bb_p (stmt
, prev_stmt
))
589 if (!first_stmt_of_seq
)
590 gsi_split_seq_before (&i
, &seq
);
591 bb
= create_basic_block (seq
, bb
);
592 start_new_block
= false;
596 /* Now add STMT to BB and create the subgraphs for special statement
598 gimple_set_bb (stmt
, bb
);
600 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
602 if (stmt_ends_bb_p (stmt
))
604 /* If the stmt can make abnormal goto use a new temporary
605 for the assignment to the LHS. This makes sure the old value
606 of the LHS is available on the abnormal edge. Otherwise
607 we will end up with overlapping life-ranges for abnormal
609 if (gimple_has_lhs (stmt
)
610 && stmt_can_make_abnormal_goto (stmt
)
611 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt
))))
613 tree lhs
= gimple_get_lhs (stmt
);
614 tree tmp
= create_tmp_var (TREE_TYPE (lhs
));
615 gimple
*s
= gimple_build_assign (lhs
, tmp
);
616 gimple_set_location (s
, gimple_location (stmt
));
617 gimple_set_block (s
, gimple_block (stmt
));
618 gimple_set_lhs (stmt
, tmp
);
619 if (TREE_CODE (TREE_TYPE (tmp
)) == COMPLEX_TYPE
620 || TREE_CODE (TREE_TYPE (tmp
)) == VECTOR_TYPE
)
621 DECL_GIMPLE_REG_P (tmp
) = 1;
622 gsi_insert_after (&i
, s
, GSI_SAME_STMT
);
624 start_new_block
= true;
628 first_stmt_of_seq
= false;
633 /* Build a flowgraph for the sequence of stmts SEQ. */
636 make_blocks (gimple_seq seq
)
638 /* Look for debug markers right before labels, and move the debug
639 stmts after the labels. Accepting labels among debug markers
640 adds no value, just complexity; if we wanted to annotate labels
641 with view numbers (so sequencing among markers would matter) or
642 somesuch, we're probably better off still moving the labels, but
643 adding other debug annotations in their original positions or
644 emitting nonbind or bind markers associated with the labels in
645 the original position of the labels.
647 Moving labels would probably be simpler, but we can't do that:
648 moving labels assigns label ids to them, and doing so because of
649 debug markers makes for -fcompare-debug and possibly even codegen
650 differences. So, we have to move the debug stmts instead. To
651 that end, we scan SEQ backwards, marking the position of the
652 latest (earliest we find) label, and moving debug stmts that are
653 not separated from it by nondebug nonlabel stmts after the
655 if (MAY_HAVE_DEBUG_MARKER_STMTS
)
657 gimple_stmt_iterator label
= gsi_none ();
659 for (gimple_stmt_iterator i
= gsi_last (seq
); !gsi_end_p (i
); gsi_prev (&i
))
661 gimple
*stmt
= gsi_stmt (i
);
663 /* If this is the first label we encounter (latest in SEQ)
664 before nondebug stmts, record its position. */
665 if (is_a
<glabel
*> (stmt
))
667 if (gsi_end_p (label
))
672 /* Without a recorded label position to move debug stmts to,
673 there's nothing to do. */
674 if (gsi_end_p (label
))
677 /* Move the debug stmt at I after LABEL. */
678 if (is_gimple_debug (stmt
))
680 gcc_assert (gimple_debug_nonbind_marker_p (stmt
));
681 /* As STMT is removed, I advances to the stmt after
682 STMT, so the gsi_prev in the for "increment"
683 expression gets us to the stmt we're to visit after
684 STMT. LABEL, however, would advance to the moved
685 stmt if we passed it to gsi_move_after, so pass it a
686 copy instead, so as to keep LABEL pointing to the
688 gimple_stmt_iterator copy
= label
;
689 gsi_move_after (&i
, ©
);
693 /* There aren't any (more?) debug stmts before label, so
694 there isn't anything else to move after it. */
699 make_blocks_1 (seq
, ENTRY_BLOCK_PTR_FOR_FN (cfun
));
702 /* Create and return a new empty basic block after bb AFTER. */
705 create_bb (void *h
, void *e
, basic_block after
)
711 /* Create and initialize a new basic block. Since alloc_block uses
712 GC allocation that clears memory to allocate a basic block, we do
713 not have to clear the newly allocated basic block here. */
716 bb
->index
= last_basic_block_for_fn (cfun
);
718 set_bb_seq (bb
, h
? (gimple_seq
) h
: NULL
);
720 /* Add the new block to the linked list of blocks. */
721 link_block (bb
, after
);
723 /* Grow the basic block array if needed. */
724 if ((size_t) last_basic_block_for_fn (cfun
)
725 == basic_block_info_for_fn (cfun
)->length ())
728 (last_basic_block_for_fn (cfun
)
729 + (last_basic_block_for_fn (cfun
) + 3) / 4);
730 vec_safe_grow_cleared (basic_block_info_for_fn (cfun
), new_size
);
733 /* Add the newly created block to the array. */
734 SET_BASIC_BLOCK_FOR_FN (cfun
, last_basic_block_for_fn (cfun
), bb
);
736 n_basic_blocks_for_fn (cfun
)++;
737 last_basic_block_for_fn (cfun
)++;
743 /*---------------------------------------------------------------------------
745 ---------------------------------------------------------------------------*/
747 /* If basic block BB has an abnormal edge to a basic block
748 containing IFN_ABNORMAL_DISPATCHER internal call, return
749 that the dispatcher's basic block, otherwise return NULL. */
752 get_abnormal_succ_dispatcher (basic_block bb
)
757 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
758 if ((e
->flags
& (EDGE_ABNORMAL
| EDGE_EH
)) == EDGE_ABNORMAL
)
760 gimple_stmt_iterator gsi
761 = gsi_start_nondebug_after_labels_bb (e
->dest
);
762 gimple
*g
= gsi_stmt (gsi
);
763 if (g
&& gimple_call_internal_p (g
, IFN_ABNORMAL_DISPATCHER
))
769 /* Helper function for make_edges. Create a basic block with
770 with ABNORMAL_DISPATCHER internal call in it if needed, and
771 create abnormal edges from BBS to it and from it to FOR_BB
772 if COMPUTED_GOTO is false, otherwise factor the computed gotos. */
775 handle_abnormal_edges (basic_block
*dispatcher_bbs
,
776 basic_block for_bb
, int *bb_to_omp_idx
,
777 auto_vec
<basic_block
> *bbs
, bool computed_goto
)
779 basic_block
*dispatcher
= dispatcher_bbs
+ (computed_goto
? 1 : 0);
780 unsigned int idx
= 0;
786 dispatcher
= dispatcher_bbs
+ 2 * bb_to_omp_idx
[for_bb
->index
];
787 if (bb_to_omp_idx
[for_bb
->index
] != 0)
791 /* If the dispatcher has been created already, then there are basic
792 blocks with abnormal edges to it, so just make a new edge to
794 if (*dispatcher
== NULL
)
796 /* Check if there are any basic blocks that need to have
797 abnormal edges to this dispatcher. If there are none, return
799 if (bb_to_omp_idx
== NULL
)
801 if (bbs
->is_empty ())
806 FOR_EACH_VEC_ELT (*bbs
, idx
, bb
)
807 if (bb_to_omp_idx
[bb
->index
] == bb_to_omp_idx
[for_bb
->index
])
813 /* Create the dispatcher bb. */
814 *dispatcher
= create_basic_block (NULL
, for_bb
);
817 /* Factor computed gotos into a common computed goto site. Also
818 record the location of that site so that we can un-factor the
819 gotos after we have converted back to normal form. */
820 gimple_stmt_iterator gsi
= gsi_start_bb (*dispatcher
);
822 /* Create the destination of the factored goto. Each original
823 computed goto will put its desired destination into this
824 variable and jump to the label we create immediately below. */
825 tree var
= create_tmp_var (ptr_type_node
, "gotovar");
827 /* Build a label for the new block which will contain the
828 factored computed goto. */
829 tree factored_label_decl
830 = create_artificial_label (UNKNOWN_LOCATION
);
831 gimple
*factored_computed_goto_label
832 = gimple_build_label (factored_label_decl
);
833 gsi_insert_after (&gsi
, factored_computed_goto_label
, GSI_NEW_STMT
);
835 /* Build our new computed goto. */
836 gimple
*factored_computed_goto
= gimple_build_goto (var
);
837 gsi_insert_after (&gsi
, factored_computed_goto
, GSI_NEW_STMT
);
839 FOR_EACH_VEC_ELT (*bbs
, idx
, bb
)
842 && bb_to_omp_idx
[bb
->index
] != bb_to_omp_idx
[for_bb
->index
])
845 gsi
= gsi_last_bb (bb
);
846 gimple
*last
= gsi_stmt (gsi
);
848 gcc_assert (computed_goto_p (last
));
850 /* Copy the original computed goto's destination into VAR. */
852 = gimple_build_assign (var
, gimple_goto_dest (last
));
853 gsi_insert_before (&gsi
, assignment
, GSI_SAME_STMT
);
855 edge e
= make_edge (bb
, *dispatcher
, EDGE_FALLTHRU
);
856 e
->goto_locus
= gimple_location (last
);
857 gsi_remove (&gsi
, true);
862 tree arg
= inner
? boolean_true_node
: boolean_false_node
;
863 gimple
*g
= gimple_build_call_internal (IFN_ABNORMAL_DISPATCHER
,
865 gimple_stmt_iterator gsi
= gsi_after_labels (*dispatcher
);
866 gsi_insert_after (&gsi
, g
, GSI_NEW_STMT
);
868 /* Create predecessor edges of the dispatcher. */
869 FOR_EACH_VEC_ELT (*bbs
, idx
, bb
)
872 && bb_to_omp_idx
[bb
->index
] != bb_to_omp_idx
[for_bb
->index
])
874 make_edge (bb
, *dispatcher
, EDGE_ABNORMAL
);
879 make_edge (*dispatcher
, for_bb
, EDGE_ABNORMAL
);
882 /* Creates outgoing edges for BB. Returns 1 when it ends with an
883 computed goto, returns 2 when it ends with a statement that
884 might return to this function via an nonlocal goto, otherwise
885 return 0. Updates *PCUR_REGION with the OMP region this BB is in. */
888 make_edges_bb (basic_block bb
, struct omp_region
**pcur_region
, int *pomp_index
)
890 gimple
*last
= last_stmt (bb
);
891 bool fallthru
= false;
897 switch (gimple_code (last
))
900 if (make_goto_expr_edges (bb
))
906 edge e
= make_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
), 0);
907 e
->goto_locus
= gimple_location (last
);
912 make_cond_expr_edges (bb
);
916 make_gimple_switch_edges (as_a
<gswitch
*> (last
), bb
);
920 make_eh_edges (last
);
923 case GIMPLE_EH_DISPATCH
:
924 fallthru
= make_eh_dispatch_edges (as_a
<geh_dispatch
*> (last
));
928 /* If this function receives a nonlocal goto, then we need to
929 make edges from this call site to all the nonlocal goto
931 if (stmt_can_make_abnormal_goto (last
))
934 /* If this statement has reachable exception handlers, then
935 create abnormal edges to them. */
936 make_eh_edges (last
);
938 /* BUILTIN_RETURN is really a return statement. */
939 if (gimple_call_builtin_p (last
, BUILT_IN_RETURN
))
941 make_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
), 0);
944 /* Some calls are known not to return. */
946 fallthru
= !gimple_call_noreturn_p (last
);
950 /* A GIMPLE_ASSIGN may throw internally and thus be considered
952 if (is_ctrl_altering_stmt (last
))
953 make_eh_edges (last
);
958 make_gimple_asm_edges (bb
);
963 fallthru
= omp_make_gimple_edges (bb
, pcur_region
, pomp_index
);
966 case GIMPLE_TRANSACTION
:
968 gtransaction
*txn
= as_a
<gtransaction
*> (last
);
969 tree label1
= gimple_transaction_label_norm (txn
);
970 tree label2
= gimple_transaction_label_uninst (txn
);
973 make_edge (bb
, label_to_block (cfun
, label1
), EDGE_FALLTHRU
);
975 make_edge (bb
, label_to_block (cfun
, label2
),
976 EDGE_TM_UNINSTRUMENTED
| (label1
? 0 : EDGE_FALLTHRU
));
978 tree label3
= gimple_transaction_label_over (txn
);
979 if (gimple_transaction_subcode (txn
)
980 & (GTMA_HAVE_ABORT
| GTMA_IS_OUTER
))
981 make_edge (bb
, label_to_block (cfun
, label3
), EDGE_TM_ABORT
);
988 gcc_assert (!stmt_ends_bb_p (last
));
994 make_edge (bb
, bb
->next_bb
, EDGE_FALLTHRU
);
999 /* Join all the blocks in the flowgraph. */
1005 struct omp_region
*cur_region
= NULL
;
1006 auto_vec
<basic_block
> ab_edge_goto
;
1007 auto_vec
<basic_block
> ab_edge_call
;
1008 int *bb_to_omp_idx
= NULL
;
1009 int cur_omp_region_idx
= 0;
1011 /* Create an edge from entry to the first block with executable
1012 statements in it. */
1013 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun
),
1014 BASIC_BLOCK_FOR_FN (cfun
, NUM_FIXED_BLOCKS
),
1017 /* Traverse the basic block array placing edges. */
1018 FOR_EACH_BB_FN (bb
, cfun
)
1023 bb_to_omp_idx
[bb
->index
] = cur_omp_region_idx
;
1025 mer
= make_edges_bb (bb
, &cur_region
, &cur_omp_region_idx
);
1027 ab_edge_goto
.safe_push (bb
);
1029 ab_edge_call
.safe_push (bb
);
1031 if (cur_region
&& bb_to_omp_idx
== NULL
)
1032 bb_to_omp_idx
= XCNEWVEC (int, n_basic_blocks_for_fn (cfun
));
1035 /* Computed gotos are hell to deal with, especially if there are
1036 lots of them with a large number of destinations. So we factor
1037 them to a common computed goto location before we build the
1038 edge list. After we convert back to normal form, we will un-factor
1039 the computed gotos since factoring introduces an unwanted jump.
1040 For non-local gotos and abnormal edges from calls to calls that return
1041 twice or forced labels, factor the abnormal edges too, by having all
1042 abnormal edges from the calls go to a common artificial basic block
1043 with ABNORMAL_DISPATCHER internal call and abnormal edges from that
1044 basic block to all forced labels and calls returning twice.
1045 We do this per-OpenMP structured block, because those regions
1046 are guaranteed to be single entry single exit by the standard,
1047 so it is not allowed to enter or exit such regions abnormally this way,
1048 thus all computed gotos, non-local gotos and setjmp/longjmp calls
1049 must not transfer control across SESE region boundaries. */
1050 if (!ab_edge_goto
.is_empty () || !ab_edge_call
.is_empty ())
1052 gimple_stmt_iterator gsi
;
1053 basic_block dispatcher_bb_array
[2] = { NULL
, NULL
};
1054 basic_block
*dispatcher_bbs
= dispatcher_bb_array
;
1055 int count
= n_basic_blocks_for_fn (cfun
);
1058 dispatcher_bbs
= XCNEWVEC (basic_block
, 2 * count
);
1060 FOR_EACH_BB_FN (bb
, cfun
)
1062 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1064 glabel
*label_stmt
= dyn_cast
<glabel
*> (gsi_stmt (gsi
));
1070 target
= gimple_label_label (label_stmt
);
1072 /* Make an edge to every label block that has been marked as a
1073 potential target for a computed goto or a non-local goto. */
1074 if (FORCED_LABEL (target
))
1075 handle_abnormal_edges (dispatcher_bbs
, bb
, bb_to_omp_idx
,
1076 &ab_edge_goto
, true);
1077 if (DECL_NONLOCAL (target
))
1079 handle_abnormal_edges (dispatcher_bbs
, bb
, bb_to_omp_idx
,
1080 &ab_edge_call
, false);
1085 if (!gsi_end_p (gsi
) && is_gimple_debug (gsi_stmt (gsi
)))
1086 gsi_next_nondebug (&gsi
);
1087 if (!gsi_end_p (gsi
))
1089 /* Make an edge to every setjmp-like call. */
1090 gimple
*call_stmt
= gsi_stmt (gsi
);
1091 if (is_gimple_call (call_stmt
)
1092 && ((gimple_call_flags (call_stmt
) & ECF_RETURNS_TWICE
)
1093 || gimple_call_builtin_p (call_stmt
,
1094 BUILT_IN_SETJMP_RECEIVER
)))
1095 handle_abnormal_edges (dispatcher_bbs
, bb
, bb_to_omp_idx
,
1096 &ab_edge_call
, false);
1101 XDELETE (dispatcher_bbs
);
1104 XDELETE (bb_to_omp_idx
);
1106 omp_free_regions ();
1109 /* Add SEQ after GSI. Start new bb after GSI, and created further bbs as
1110 needed. Returns true if new bbs were created.
1111 Note: This is transitional code, and should not be used for new code. We
1112 should be able to get rid of this by rewriting all target va-arg
1113 gimplification hooks to use an interface gimple_build_cond_value as described
1114 in https://gcc.gnu.org/ml/gcc-patches/2015-02/msg01194.html. */
1117 gimple_find_sub_bbs (gimple_seq seq
, gimple_stmt_iterator
*gsi
)
1119 gimple
*stmt
= gsi_stmt (*gsi
);
1120 basic_block bb
= gimple_bb (stmt
);
1121 basic_block lastbb
, afterbb
;
1122 int old_num_bbs
= n_basic_blocks_for_fn (cfun
);
1124 lastbb
= make_blocks_1 (seq
, bb
);
1125 if (old_num_bbs
== n_basic_blocks_for_fn (cfun
))
1127 e
= split_block (bb
, stmt
);
1128 /* Move e->dest to come after the new basic blocks. */
1130 unlink_block (afterbb
);
1131 link_block (afterbb
, lastbb
);
1132 redirect_edge_succ (e
, bb
->next_bb
);
1134 while (bb
!= afterbb
)
1136 struct omp_region
*cur_region
= NULL
;
1137 profile_count cnt
= profile_count::zero ();
1140 int cur_omp_region_idx
= 0;
1141 int mer
= make_edges_bb (bb
, &cur_region
, &cur_omp_region_idx
);
1142 gcc_assert (!mer
&& !cur_region
);
1143 add_bb_to_loop (bb
, afterbb
->loop_father
);
1147 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
1149 if (e
->count ().initialized_p ())
1154 tree_guess_outgoing_edge_probabilities (bb
);
1155 if (all
|| profile_status_for_fn (cfun
) == PROFILE_READ
)
1163 /* Find the next available discriminator value for LOCUS. The
1164 discriminator distinguishes among several basic blocks that
1165 share a common locus, allowing for more accurate sample-based
1169 next_discriminator_for_locus (int line
)
1171 struct locus_discrim_map item
;
1172 struct locus_discrim_map
**slot
;
1174 item
.location_line
= line
;
1175 item
.discriminator
= 0;
1176 slot
= discriminator_per_locus
->find_slot_with_hash (&item
, line
, INSERT
);
1178 if (*slot
== HTAB_EMPTY_ENTRY
)
1180 *slot
= XNEW (struct locus_discrim_map
);
1182 (*slot
)->location_line
= line
;
1183 (*slot
)->discriminator
= 0;
1185 (*slot
)->discriminator
++;
1186 return (*slot
)->discriminator
;
1189 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
1192 same_line_p (location_t locus1
, expanded_location
*from
, location_t locus2
)
1194 expanded_location to
;
1196 if (locus1
== locus2
)
1199 to
= expand_location (locus2
);
1201 if (from
->line
!= to
.line
)
1203 if (from
->file
== to
.file
)
1205 return (from
->file
!= NULL
1207 && filename_cmp (from
->file
, to
.file
) == 0);
1210 /* Assign discriminators to each basic block. */
1213 assign_discriminators (void)
1217 FOR_EACH_BB_FN (bb
, cfun
)
1221 gimple
*last
= last_stmt (bb
);
1222 location_t locus
= last
? gimple_location (last
) : UNKNOWN_LOCATION
;
1224 if (locus
== UNKNOWN_LOCATION
)
1227 expanded_location locus_e
= expand_location (locus
);
1229 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
1231 gimple
*first
= first_non_label_stmt (e
->dest
);
1232 gimple
*last
= last_stmt (e
->dest
);
1233 if ((first
&& same_line_p (locus
, &locus_e
,
1234 gimple_location (first
)))
1235 || (last
&& same_line_p (locus
, &locus_e
,
1236 gimple_location (last
))))
1238 if (e
->dest
->discriminator
!= 0 && bb
->discriminator
== 0)
1240 = next_discriminator_for_locus (locus_e
.line
);
1242 e
->dest
->discriminator
1243 = next_discriminator_for_locus (locus_e
.line
);
1249 /* Create the edges for a GIMPLE_COND starting at block BB. */
1252 make_cond_expr_edges (basic_block bb
)
1254 gcond
*entry
= as_a
<gcond
*> (last_stmt (bb
));
1255 gimple
*then_stmt
, *else_stmt
;
1256 basic_block then_bb
, else_bb
;
1257 tree then_label
, else_label
;
1261 gcc_assert (gimple_code (entry
) == GIMPLE_COND
);
1263 /* Entry basic blocks for each component. */
1264 then_label
= gimple_cond_true_label (entry
);
1265 else_label
= gimple_cond_false_label (entry
);
1266 then_bb
= label_to_block (cfun
, then_label
);
1267 else_bb
= label_to_block (cfun
, else_label
);
1268 then_stmt
= first_stmt (then_bb
);
1269 else_stmt
= first_stmt (else_bb
);
1271 e
= make_edge (bb
, then_bb
, EDGE_TRUE_VALUE
);
1272 e
->goto_locus
= gimple_location (then_stmt
);
1273 e
= make_edge (bb
, else_bb
, EDGE_FALSE_VALUE
);
1275 e
->goto_locus
= gimple_location (else_stmt
);
1277 /* We do not need the labels anymore. */
1278 gimple_cond_set_true_label (entry
, NULL_TREE
);
1279 gimple_cond_set_false_label (entry
, NULL_TREE
);
1283 /* Called for each element in the hash table (P) as we delete the
1284 edge to cases hash table.
1286 Clear all the CASE_CHAINs to prevent problems with copying of
1287 SWITCH_EXPRs and structure sharing rules, then free the hash table
1291 edge_to_cases_cleanup (edge
const &, tree
const &value
, void *)
1295 for (t
= value
; t
; t
= next
)
1297 next
= CASE_CHAIN (t
);
1298 CASE_CHAIN (t
) = NULL
;
1304 /* Start recording information mapping edges to case labels. */
1307 start_recording_case_labels (void)
1309 gcc_assert (edge_to_cases
== NULL
);
1310 edge_to_cases
= new hash_map
<edge
, tree
>;
1311 touched_switch_bbs
= BITMAP_ALLOC (NULL
);
1314 /* Return nonzero if we are recording information for case labels. */
1317 recording_case_labels_p (void)
1319 return (edge_to_cases
!= NULL
);
1322 /* Stop recording information mapping edges to case labels and
1323 remove any information we have recorded. */
1325 end_recording_case_labels (void)
1329 edge_to_cases
->traverse
<void *, edge_to_cases_cleanup
> (NULL
);
1330 delete edge_to_cases
;
1331 edge_to_cases
= NULL
;
1332 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs
, 0, i
, bi
)
1334 basic_block bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
1337 gimple
*stmt
= last_stmt (bb
);
1338 if (stmt
&& gimple_code (stmt
) == GIMPLE_SWITCH
)
1339 group_case_labels_stmt (as_a
<gswitch
*> (stmt
));
1342 BITMAP_FREE (touched_switch_bbs
);
1345 /* If we are inside a {start,end}_recording_cases block, then return
1346 a chain of CASE_LABEL_EXPRs from T which reference E.
1348 Otherwise return NULL. */
1351 get_cases_for_edge (edge e
, gswitch
*t
)
1356 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
1357 chains available. Return NULL so the caller can detect this case. */
1358 if (!recording_case_labels_p ())
1361 slot
= edge_to_cases
->get (e
);
1365 /* If we did not find E in the hash table, then this must be the first
1366 time we have been queried for information about E & T. Add all the
1367 elements from T to the hash table then perform the query again. */
1369 n
= gimple_switch_num_labels (t
);
1370 for (i
= 0; i
< n
; i
++)
1372 tree elt
= gimple_switch_label (t
, i
);
1373 tree lab
= CASE_LABEL (elt
);
1374 basic_block label_bb
= label_to_block (cfun
, lab
);
1375 edge this_edge
= find_edge (e
->src
, label_bb
);
1377 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
1379 tree
&s
= edge_to_cases
->get_or_insert (this_edge
);
1380 CASE_CHAIN (elt
) = s
;
1384 return *edge_to_cases
->get (e
);
1387 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
1390 make_gimple_switch_edges (gswitch
*entry
, basic_block bb
)
1394 n
= gimple_switch_num_labels (entry
);
1396 for (i
= 0; i
< n
; ++i
)
1398 basic_block label_bb
= gimple_switch_label_bb (cfun
, entry
, i
);
1399 make_edge (bb
, label_bb
, 0);
1404 /* Return the basic block holding label DEST. */
1407 label_to_block (struct function
*ifun
, tree dest
)
1409 int uid
= LABEL_DECL_UID (dest
);
1411 /* We would die hard when faced by an undefined label. Emit a label to
1412 the very first basic block. This will hopefully make even the dataflow
1413 and undefined variable warnings quite right. */
1414 if (seen_error () && uid
< 0)
1416 gimple_stmt_iterator gsi
=
1417 gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun
, NUM_FIXED_BLOCKS
));
1420 stmt
= gimple_build_label (dest
);
1421 gsi_insert_before (&gsi
, stmt
, GSI_NEW_STMT
);
1422 uid
= LABEL_DECL_UID (dest
);
1424 if (vec_safe_length (ifun
->cfg
->x_label_to_block_map
) <= (unsigned int) uid
)
1426 return (*ifun
->cfg
->x_label_to_block_map
)[uid
];
1429 /* Create edges for a goto statement at block BB. Returns true
1430 if abnormal edges should be created. */
1433 make_goto_expr_edges (basic_block bb
)
1435 gimple_stmt_iterator last
= gsi_last_bb (bb
);
1436 gimple
*goto_t
= gsi_stmt (last
);
1438 /* A simple GOTO creates normal edges. */
1439 if (simple_goto_p (goto_t
))
1441 tree dest
= gimple_goto_dest (goto_t
);
1442 basic_block label_bb
= label_to_block (cfun
, dest
);
1443 edge e
= make_edge (bb
, label_bb
, EDGE_FALLTHRU
);
1444 e
->goto_locus
= gimple_location (goto_t
);
1445 gsi_remove (&last
, true);
1449 /* A computed GOTO creates abnormal edges. */
1453 /* Create edges for an asm statement with labels at block BB. */
1456 make_gimple_asm_edges (basic_block bb
)
1458 gasm
*stmt
= as_a
<gasm
*> (last_stmt (bb
));
1459 int i
, n
= gimple_asm_nlabels (stmt
);
1461 for (i
= 0; i
< n
; ++i
)
1463 tree label
= TREE_VALUE (gimple_asm_label_op (stmt
, i
));
1464 basic_block label_bb
= label_to_block (cfun
, label
);
1465 make_edge (bb
, label_bb
, 0);
1469 /*---------------------------------------------------------------------------
1471 ---------------------------------------------------------------------------*/
1473 /* Cleanup useless labels in basic blocks. This is something we wish
1474 to do early because it allows us to group case labels before creating
1475 the edges for the CFG, and it speeds up block statement iterators in
1476 all passes later on.
1477 We rerun this pass after CFG is created, to get rid of the labels that
1478 are no longer referenced. After then we do not run it any more, since
1479 (almost) no new labels should be created. */
1481 /* A map from basic block index to the leading label of that block. */
1482 static struct label_record
1487 /* True if the label is referenced from somewhere. */
1491 /* Given LABEL return the first label in the same basic block. */
1494 main_block_label (tree label
)
1496 basic_block bb
= label_to_block (cfun
, label
);
1497 tree main_label
= label_for_bb
[bb
->index
].label
;
1499 /* label_to_block possibly inserted undefined label into the chain. */
1502 label_for_bb
[bb
->index
].label
= label
;
1506 label_for_bb
[bb
->index
].used
= true;
1510 /* Clean up redundant labels within the exception tree. */
1513 cleanup_dead_labels_eh (void)
1520 if (cfun
->eh
== NULL
)
1523 for (i
= 1; vec_safe_iterate (cfun
->eh
->lp_array
, i
, &lp
); ++i
)
1524 if (lp
&& lp
->post_landing_pad
)
1526 lab
= main_block_label (lp
->post_landing_pad
);
1527 if (lab
!= lp
->post_landing_pad
)
1529 EH_LANDING_PAD_NR (lp
->post_landing_pad
) = 0;
1530 EH_LANDING_PAD_NR (lab
) = lp
->index
;
1534 FOR_ALL_EH_REGION (r
)
1538 case ERT_MUST_NOT_THROW
:
1544 for (c
= r
->u
.eh_try
.first_catch
; c
; c
= c
->next_catch
)
1548 c
->label
= main_block_label (lab
);
1553 case ERT_ALLOWED_EXCEPTIONS
:
1554 lab
= r
->u
.allowed
.label
;
1556 r
->u
.allowed
.label
= main_block_label (lab
);
1562 /* Cleanup redundant labels. This is a three-step process:
1563 1) Find the leading label for each block.
1564 2) Redirect all references to labels to the leading labels.
1565 3) Cleanup all useless labels. */
1568 cleanup_dead_labels (void)
1571 label_for_bb
= XCNEWVEC (struct label_record
, last_basic_block_for_fn (cfun
));
1573 /* Find a suitable label for each block. We use the first user-defined
1574 label if there is one, or otherwise just the first label we see. */
1575 FOR_EACH_BB_FN (bb
, cfun
)
1577 gimple_stmt_iterator i
;
1579 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
1582 glabel
*label_stmt
= dyn_cast
<glabel
*> (gsi_stmt (i
));
1587 label
= gimple_label_label (label_stmt
);
1589 /* If we have not yet seen a label for the current block,
1590 remember this one and see if there are more labels. */
1591 if (!label_for_bb
[bb
->index
].label
)
1593 label_for_bb
[bb
->index
].label
= label
;
1597 /* If we did see a label for the current block already, but it
1598 is an artificially created label, replace it if the current
1599 label is a user defined label. */
1600 if (!DECL_ARTIFICIAL (label
)
1601 && DECL_ARTIFICIAL (label_for_bb
[bb
->index
].label
))
1603 label_for_bb
[bb
->index
].label
= label
;
1609 /* Now redirect all jumps/branches to the selected label.
1610 First do so for each block ending in a control statement. */
1611 FOR_EACH_BB_FN (bb
, cfun
)
1613 gimple
*stmt
= last_stmt (bb
);
1614 tree label
, new_label
;
1619 switch (gimple_code (stmt
))
1623 gcond
*cond_stmt
= as_a
<gcond
*> (stmt
);
1624 label
= gimple_cond_true_label (cond_stmt
);
1627 new_label
= main_block_label (label
);
1628 if (new_label
!= label
)
1629 gimple_cond_set_true_label (cond_stmt
, new_label
);
1632 label
= gimple_cond_false_label (cond_stmt
);
1635 new_label
= main_block_label (label
);
1636 if (new_label
!= label
)
1637 gimple_cond_set_false_label (cond_stmt
, new_label
);
1644 gswitch
*switch_stmt
= as_a
<gswitch
*> (stmt
);
1645 size_t i
, n
= gimple_switch_num_labels (switch_stmt
);
1647 /* Replace all destination labels. */
1648 for (i
= 0; i
< n
; ++i
)
1650 tree case_label
= gimple_switch_label (switch_stmt
, i
);
1651 label
= CASE_LABEL (case_label
);
1652 new_label
= main_block_label (label
);
1653 if (new_label
!= label
)
1654 CASE_LABEL (case_label
) = new_label
;
1661 gasm
*asm_stmt
= as_a
<gasm
*> (stmt
);
1662 int i
, n
= gimple_asm_nlabels (asm_stmt
);
1664 for (i
= 0; i
< n
; ++i
)
1666 tree cons
= gimple_asm_label_op (asm_stmt
, i
);
1667 tree label
= main_block_label (TREE_VALUE (cons
));
1668 TREE_VALUE (cons
) = label
;
1673 /* We have to handle gotos until they're removed, and we don't
1674 remove them until after we've created the CFG edges. */
1676 if (!computed_goto_p (stmt
))
1678 ggoto
*goto_stmt
= as_a
<ggoto
*> (stmt
);
1679 label
= gimple_goto_dest (goto_stmt
);
1680 new_label
= main_block_label (label
);
1681 if (new_label
!= label
)
1682 gimple_goto_set_dest (goto_stmt
, new_label
);
1686 case GIMPLE_TRANSACTION
:
1688 gtransaction
*txn
= as_a
<gtransaction
*> (stmt
);
1690 label
= gimple_transaction_label_norm (txn
);
1693 new_label
= main_block_label (label
);
1694 if (new_label
!= label
)
1695 gimple_transaction_set_label_norm (txn
, new_label
);
1698 label
= gimple_transaction_label_uninst (txn
);
1701 new_label
= main_block_label (label
);
1702 if (new_label
!= label
)
1703 gimple_transaction_set_label_uninst (txn
, new_label
);
1706 label
= gimple_transaction_label_over (txn
);
1709 new_label
= main_block_label (label
);
1710 if (new_label
!= label
)
1711 gimple_transaction_set_label_over (txn
, new_label
);
1721 /* Do the same for the exception region tree labels. */
1722 cleanup_dead_labels_eh ();
1724 /* Finally, purge dead labels. All user-defined labels and labels that
1725 can be the target of non-local gotos and labels which have their
1726 address taken are preserved. */
1727 FOR_EACH_BB_FN (bb
, cfun
)
1729 gimple_stmt_iterator i
;
1730 tree label_for_this_bb
= label_for_bb
[bb
->index
].label
;
1732 if (!label_for_this_bb
)
1735 /* If the main label of the block is unused, we may still remove it. */
1736 if (!label_for_bb
[bb
->index
].used
)
1737 label_for_this_bb
= NULL
;
1739 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); )
1742 glabel
*label_stmt
= dyn_cast
<glabel
*> (gsi_stmt (i
));
1747 label
= gimple_label_label (label_stmt
);
1749 if (label
== label_for_this_bb
1750 || !DECL_ARTIFICIAL (label
)
1751 || DECL_NONLOCAL (label
)
1752 || FORCED_LABEL (label
))
1755 gsi_remove (&i
, true);
1759 free (label_for_bb
);
1762 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1763 the ones jumping to the same label.
1764 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1767 group_case_labels_stmt (gswitch
*stmt
)
1769 int old_size
= gimple_switch_num_labels (stmt
);
1770 int i
, next_index
, new_size
;
1771 basic_block default_bb
= NULL
;
1773 default_bb
= gimple_switch_default_bb (cfun
, stmt
);
1775 /* Look for possible opportunities to merge cases. */
1777 while (i
< old_size
)
1779 tree base_case
, base_high
;
1780 basic_block base_bb
;
1782 base_case
= gimple_switch_label (stmt
, i
);
1784 gcc_assert (base_case
);
1785 base_bb
= label_to_block (cfun
, CASE_LABEL (base_case
));
1787 /* Discard cases that have the same destination as the default case or
1788 whose destiniation blocks have already been removed as unreachable. */
1789 if (base_bb
== NULL
|| base_bb
== default_bb
)
1795 base_high
= CASE_HIGH (base_case
)
1796 ? CASE_HIGH (base_case
)
1797 : CASE_LOW (base_case
);
1800 /* Try to merge case labels. Break out when we reach the end
1801 of the label vector or when we cannot merge the next case
1802 label with the current one. */
1803 while (next_index
< old_size
)
1805 tree merge_case
= gimple_switch_label (stmt
, next_index
);
1806 basic_block merge_bb
= label_to_block (cfun
, CASE_LABEL (merge_case
));
1807 wide_int bhp1
= wi::to_wide (base_high
) + 1;
1809 /* Merge the cases if they jump to the same place,
1810 and their ranges are consecutive. */
1811 if (merge_bb
== base_bb
1812 && wi::to_wide (CASE_LOW (merge_case
)) == bhp1
)
1814 base_high
= CASE_HIGH (merge_case
) ?
1815 CASE_HIGH (merge_case
) : CASE_LOW (merge_case
);
1816 CASE_HIGH (base_case
) = base_high
;
1823 /* Discard cases that have an unreachable destination block. */
1824 if (EDGE_COUNT (base_bb
->succs
) == 0
1825 && gimple_seq_unreachable_p (bb_seq (base_bb
))
1826 /* Don't optimize this if __builtin_unreachable () is the
1827 implicitly added one by the C++ FE too early, before
1828 -Wreturn-type can be diagnosed. We'll optimize it later
1829 during switchconv pass or any other cfg cleanup. */
1830 && (gimple_in_ssa_p (cfun
)
1831 || (LOCATION_LOCUS (gimple_location (last_stmt (base_bb
)))
1832 != BUILTINS_LOCATION
)))
1834 edge base_edge
= find_edge (gimple_bb (stmt
), base_bb
);
1835 if (base_edge
!= NULL
)
1836 remove_edge_and_dominated_blocks (base_edge
);
1842 gimple_switch_set_label (stmt
, new_size
,
1843 gimple_switch_label (stmt
, i
));
1848 gcc_assert (new_size
<= old_size
);
1850 if (new_size
< old_size
)
1851 gimple_switch_set_num_labels (stmt
, new_size
);
1853 return new_size
< old_size
;
1856 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1857 and scan the sorted vector of cases. Combine the ones jumping to the
1861 group_case_labels (void)
1864 bool changed
= false;
1866 FOR_EACH_BB_FN (bb
, cfun
)
1868 gimple
*stmt
= last_stmt (bb
);
1869 if (stmt
&& gimple_code (stmt
) == GIMPLE_SWITCH
)
1870 changed
|= group_case_labels_stmt (as_a
<gswitch
*> (stmt
));
1876 /* Checks whether we can merge block B into block A. */
1879 gimple_can_merge_blocks_p (basic_block a
, basic_block b
)
1883 if (!single_succ_p (a
))
1886 if (single_succ_edge (a
)->flags
& EDGE_COMPLEX
)
1889 if (single_succ (a
) != b
)
1892 if (!single_pred_p (b
))
1895 if (a
== ENTRY_BLOCK_PTR_FOR_FN (cfun
)
1896 || b
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
1899 /* If A ends by a statement causing exceptions or something similar, we
1900 cannot merge the blocks. */
1901 stmt
= last_stmt (a
);
1902 if (stmt
&& stmt_ends_bb_p (stmt
))
1905 /* Do not allow a block with only a non-local label to be merged. */
1907 if (glabel
*label_stmt
= dyn_cast
<glabel
*> (stmt
))
1908 if (DECL_NONLOCAL (gimple_label_label (label_stmt
)))
1911 /* Examine the labels at the beginning of B. */
1912 for (gimple_stmt_iterator gsi
= gsi_start_bb (b
); !gsi_end_p (gsi
);
1916 glabel
*label_stmt
= dyn_cast
<glabel
*> (gsi_stmt (gsi
));
1919 lab
= gimple_label_label (label_stmt
);
1921 /* Do not remove user forced labels or for -O0 any user labels. */
1922 if (!DECL_ARTIFICIAL (lab
) && (!optimize
|| FORCED_LABEL (lab
)))
1926 /* Protect simple loop latches. We only want to avoid merging
1927 the latch with the loop header or with a block in another
1928 loop in this case. */
1930 && b
->loop_father
->latch
== b
1931 && loops_state_satisfies_p (LOOPS_HAVE_SIMPLE_LATCHES
)
1932 && (b
->loop_father
->header
== a
1933 || b
->loop_father
!= a
->loop_father
))
1936 /* It must be possible to eliminate all phi nodes in B. If ssa form
1937 is not up-to-date and a name-mapping is registered, we cannot eliminate
1938 any phis. Symbols marked for renaming are never a problem though. */
1939 for (gphi_iterator gsi
= gsi_start_phis (b
); !gsi_end_p (gsi
);
1942 gphi
*phi
= gsi
.phi ();
1943 /* Technically only new names matter. */
1944 if (name_registered_for_update_p (PHI_RESULT (phi
)))
1948 /* When not optimizing, don't merge if we'd lose goto_locus. */
1950 && single_succ_edge (a
)->goto_locus
!= UNKNOWN_LOCATION
)
1952 location_t goto_locus
= single_succ_edge (a
)->goto_locus
;
1953 gimple_stmt_iterator prev
, next
;
1954 prev
= gsi_last_nondebug_bb (a
);
1955 next
= gsi_after_labels (b
);
1956 if (!gsi_end_p (next
) && is_gimple_debug (gsi_stmt (next
)))
1957 gsi_next_nondebug (&next
);
1958 if ((gsi_end_p (prev
)
1959 || gimple_location (gsi_stmt (prev
)) != goto_locus
)
1960 && (gsi_end_p (next
)
1961 || gimple_location (gsi_stmt (next
)) != goto_locus
))
1968 /* Replaces all uses of NAME by VAL. */
1971 replace_uses_by (tree name
, tree val
)
1973 imm_use_iterator imm_iter
;
1978 FOR_EACH_IMM_USE_STMT (stmt
, imm_iter
, name
)
1980 /* Mark the block if we change the last stmt in it. */
1981 if (cfgcleanup_altered_bbs
1982 && stmt_ends_bb_p (stmt
))
1983 bitmap_set_bit (cfgcleanup_altered_bbs
, gimple_bb (stmt
)->index
);
1985 FOR_EACH_IMM_USE_ON_STMT (use
, imm_iter
)
1987 replace_exp (use
, val
);
1989 if (gimple_code (stmt
) == GIMPLE_PHI
)
1991 e
= gimple_phi_arg_edge (as_a
<gphi
*> (stmt
),
1992 PHI_ARG_INDEX_FROM_USE (use
));
1993 if (e
->flags
& EDGE_ABNORMAL
1994 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val
))
1996 /* This can only occur for virtual operands, since
1997 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1998 would prevent replacement. */
1999 gcc_checking_assert (virtual_operand_p (name
));
2000 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val
) = 1;
2005 if (gimple_code (stmt
) != GIMPLE_PHI
)
2007 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
2008 gimple
*orig_stmt
= stmt
;
2011 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
2012 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
2013 only change sth from non-invariant to invariant, and only
2014 when propagating constants. */
2015 if (is_gimple_min_invariant (val
))
2016 for (i
= 0; i
< gimple_num_ops (stmt
); i
++)
2018 tree op
= gimple_op (stmt
, i
);
2019 /* Operands may be empty here. For example, the labels
2020 of a GIMPLE_COND are nulled out following the creation
2021 of the corresponding CFG edges. */
2022 if (op
&& TREE_CODE (op
) == ADDR_EXPR
)
2023 recompute_tree_invariant_for_addr_expr (op
);
2026 if (fold_stmt (&gsi
))
2027 stmt
= gsi_stmt (gsi
);
2029 if (maybe_clean_or_replace_eh_stmt (orig_stmt
, stmt
))
2030 gimple_purge_dead_eh_edges (gimple_bb (stmt
));
2036 gcc_checking_assert (has_zero_uses (name
));
2038 /* Also update the trees stored in loop structures. */
2043 FOR_EACH_LOOP (loop
, 0)
2045 substitute_in_loop_info (loop
, name
, val
);
2050 /* Merge block B into block A. */
2053 gimple_merge_blocks (basic_block a
, basic_block b
)
2055 gimple_stmt_iterator last
, gsi
;
2059 fprintf (dump_file
, "Merging blocks %d and %d\n", a
->index
, b
->index
);
2061 /* Remove all single-valued PHI nodes from block B of the form
2062 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
2063 gsi
= gsi_last_bb (a
);
2064 for (psi
= gsi_start_phis (b
); !gsi_end_p (psi
); )
2066 gimple
*phi
= gsi_stmt (psi
);
2067 tree def
= gimple_phi_result (phi
), use
= gimple_phi_arg_def (phi
, 0);
2069 bool may_replace_uses
= (virtual_operand_p (def
)
2070 || may_propagate_copy (def
, use
));
2072 /* In case we maintain loop closed ssa form, do not propagate arguments
2073 of loop exit phi nodes. */
2075 && loops_state_satisfies_p (LOOP_CLOSED_SSA
)
2076 && !virtual_operand_p (def
)
2077 && TREE_CODE (use
) == SSA_NAME
2078 && a
->loop_father
!= b
->loop_father
)
2079 may_replace_uses
= false;
2081 if (!may_replace_uses
)
2083 gcc_assert (!virtual_operand_p (def
));
2085 /* Note that just emitting the copies is fine -- there is no problem
2086 with ordering of phi nodes. This is because A is the single
2087 predecessor of B, therefore results of the phi nodes cannot
2088 appear as arguments of the phi nodes. */
2089 copy
= gimple_build_assign (def
, use
);
2090 gsi_insert_after (&gsi
, copy
, GSI_NEW_STMT
);
2091 remove_phi_node (&psi
, false);
2095 /* If we deal with a PHI for virtual operands, we can simply
2096 propagate these without fussing with folding or updating
2098 if (virtual_operand_p (def
))
2100 imm_use_iterator iter
;
2101 use_operand_p use_p
;
2104 FOR_EACH_IMM_USE_STMT (stmt
, iter
, def
)
2105 FOR_EACH_IMM_USE_ON_STMT (use_p
, iter
)
2106 SET_USE (use_p
, use
);
2108 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def
))
2109 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use
) = 1;
2112 replace_uses_by (def
, use
);
2114 remove_phi_node (&psi
, true);
2118 /* Ensure that B follows A. */
2119 move_block_after (b
, a
);
2121 gcc_assert (single_succ_edge (a
)->flags
& EDGE_FALLTHRU
);
2122 gcc_assert (!last_stmt (a
) || !stmt_ends_bb_p (last_stmt (a
)));
2124 /* Remove labels from B and set gimple_bb to A for other statements. */
2125 for (gsi
= gsi_start_bb (b
); !gsi_end_p (gsi
);)
2127 gimple
*stmt
= gsi_stmt (gsi
);
2128 if (glabel
*label_stmt
= dyn_cast
<glabel
*> (stmt
))
2130 tree label
= gimple_label_label (label_stmt
);
2133 gsi_remove (&gsi
, false);
2135 /* Now that we can thread computed gotos, we might have
2136 a situation where we have a forced label in block B
2137 However, the label at the start of block B might still be
2138 used in other ways (think about the runtime checking for
2139 Fortran assigned gotos). So we cannot just delete the
2140 label. Instead we move the label to the start of block A. */
2141 if (FORCED_LABEL (label
))
2143 gimple_stmt_iterator dest_gsi
= gsi_start_bb (a
);
2144 gsi_insert_before (&dest_gsi
, stmt
, GSI_NEW_STMT
);
2146 /* Other user labels keep around in a form of a debug stmt. */
2147 else if (!DECL_ARTIFICIAL (label
) && MAY_HAVE_DEBUG_BIND_STMTS
)
2149 gimple
*dbg
= gimple_build_debug_bind (label
,
2152 gimple_debug_bind_reset_value (dbg
);
2153 gsi_insert_before (&gsi
, dbg
, GSI_SAME_STMT
);
2156 lp_nr
= EH_LANDING_PAD_NR (label
);
2159 eh_landing_pad lp
= get_eh_landing_pad_from_number (lp_nr
);
2160 lp
->post_landing_pad
= NULL
;
2165 gimple_set_bb (stmt
, a
);
2170 /* When merging two BBs, if their counts are different, the larger count
2171 is selected as the new bb count. This is to handle inconsistent
2173 if (a
->loop_father
== b
->loop_father
)
2175 a
->count
= a
->count
.merge (b
->count
);
2178 /* Merge the sequences. */
2179 last
= gsi_last_bb (a
);
2180 gsi_insert_seq_after (&last
, bb_seq (b
), GSI_NEW_STMT
);
2181 set_bb_seq (b
, NULL
);
2183 if (cfgcleanup_altered_bbs
)
2184 bitmap_set_bit (cfgcleanup_altered_bbs
, a
->index
);
2188 /* Return the one of two successors of BB that is not reachable by a
2189 complex edge, if there is one. Else, return BB. We use
2190 this in optimizations that use post-dominators for their heuristics,
2191 to catch the cases in C++ where function calls are involved. */
2194 single_noncomplex_succ (basic_block bb
)
2197 if (EDGE_COUNT (bb
->succs
) != 2)
2200 e0
= EDGE_SUCC (bb
, 0);
2201 e1
= EDGE_SUCC (bb
, 1);
2202 if (e0
->flags
& EDGE_COMPLEX
)
2204 if (e1
->flags
& EDGE_COMPLEX
)
2210 /* T is CALL_EXPR. Set current_function_calls_* flags. */
2213 notice_special_calls (gcall
*call
)
2215 int flags
= gimple_call_flags (call
);
2217 if (flags
& ECF_MAY_BE_ALLOCA
)
2218 cfun
->calls_alloca
= true;
2219 if (flags
& ECF_RETURNS_TWICE
)
2220 cfun
->calls_setjmp
= true;
2224 /* Clear flags set by notice_special_calls. Used by dead code removal
2225 to update the flags. */
2228 clear_special_calls (void)
2230 cfun
->calls_alloca
= false;
2231 cfun
->calls_setjmp
= false;
2234 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
2237 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb
)
2239 /* Since this block is no longer reachable, we can just delete all
2240 of its PHI nodes. */
2241 remove_phi_nodes (bb
);
2243 /* Remove edges to BB's successors. */
2244 while (EDGE_COUNT (bb
->succs
) > 0)
2245 remove_edge (EDGE_SUCC (bb
, 0));
2249 /* Remove statements of basic block BB. */
2252 remove_bb (basic_block bb
)
2254 gimple_stmt_iterator i
;
2258 fprintf (dump_file
, "Removing basic block %d\n", bb
->index
);
2259 if (dump_flags
& TDF_DETAILS
)
2261 dump_bb (dump_file
, bb
, 0, TDF_BLOCKS
);
2262 fprintf (dump_file
, "\n");
2268 struct loop
*loop
= bb
->loop_father
;
2270 /* If a loop gets removed, clean up the information associated
2272 if (loop
->latch
== bb
2273 || loop
->header
== bb
)
2274 free_numbers_of_iterations_estimates (loop
);
2277 /* Remove all the instructions in the block. */
2278 if (bb_seq (bb
) != NULL
)
2280 /* Walk backwards so as to get a chance to substitute all
2281 released DEFs into debug stmts. See
2282 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
2284 for (i
= gsi_last_bb (bb
); !gsi_end_p (i
);)
2286 gimple
*stmt
= gsi_stmt (i
);
2287 glabel
*label_stmt
= dyn_cast
<glabel
*> (stmt
);
2289 && (FORCED_LABEL (gimple_label_label (label_stmt
))
2290 || DECL_NONLOCAL (gimple_label_label (label_stmt
))))
2293 gimple_stmt_iterator new_gsi
;
2295 /* A non-reachable non-local label may still be referenced.
2296 But it no longer needs to carry the extra semantics of
2298 if (DECL_NONLOCAL (gimple_label_label (label_stmt
)))
2300 DECL_NONLOCAL (gimple_label_label (label_stmt
)) = 0;
2301 FORCED_LABEL (gimple_label_label (label_stmt
)) = 1;
2304 new_bb
= bb
->prev_bb
;
2305 /* Don't move any labels into ENTRY block. */
2306 if (new_bb
== ENTRY_BLOCK_PTR_FOR_FN (cfun
))
2308 new_bb
= single_succ (new_bb
);
2309 gcc_assert (new_bb
!= bb
);
2311 new_gsi
= gsi_start_bb (new_bb
);
2312 gsi_remove (&i
, false);
2313 gsi_insert_before (&new_gsi
, stmt
, GSI_NEW_STMT
);
2317 /* Release SSA definitions. */
2318 release_defs (stmt
);
2319 gsi_remove (&i
, true);
2323 i
= gsi_last_bb (bb
);
2329 remove_phi_nodes_and_edges_for_unreachable_block (bb
);
2330 bb
->il
.gimple
.seq
= NULL
;
2331 bb
->il
.gimple
.phi_nodes
= NULL
;
2335 /* Given a basic block BB and a value VAL for use in the final statement
2336 of the block (if a GIMPLE_COND, GIMPLE_SWITCH, or computed goto), return
2337 the edge that will be taken out of the block.
2338 If VAL is NULL_TREE, then the current value of the final statement's
2339 predicate or index is used.
2340 If the value does not match a unique edge, NULL is returned. */
2343 find_taken_edge (basic_block bb
, tree val
)
2347 stmt
= last_stmt (bb
);
2349 /* Handle ENTRY and EXIT. */
2353 if (gimple_code (stmt
) == GIMPLE_COND
)
2354 return find_taken_edge_cond_expr (as_a
<gcond
*> (stmt
), val
);
2356 if (gimple_code (stmt
) == GIMPLE_SWITCH
)
2357 return find_taken_edge_switch_expr (as_a
<gswitch
*> (stmt
), val
);
2359 if (computed_goto_p (stmt
))
2361 /* Only optimize if the argument is a label, if the argument is
2362 not a label then we cannot construct a proper CFG.
2364 It may be the case that we only need to allow the LABEL_REF to
2365 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2366 appear inside a LABEL_EXPR just to be safe. */
2368 && (TREE_CODE (val
) == ADDR_EXPR
|| TREE_CODE (val
) == LABEL_EXPR
)
2369 && TREE_CODE (TREE_OPERAND (val
, 0)) == LABEL_DECL
)
2370 return find_taken_edge_computed_goto (bb
, TREE_OPERAND (val
, 0));
2373 /* Otherwise we only know the taken successor edge if it's unique. */
2374 return single_succ_p (bb
) ? single_succ_edge (bb
) : NULL
;
2377 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2378 statement, determine which of the outgoing edges will be taken out of the
2379 block. Return NULL if either edge may be taken. */
2382 find_taken_edge_computed_goto (basic_block bb
, tree val
)
2387 dest
= label_to_block (cfun
, val
);
2389 e
= find_edge (bb
, dest
);
2391 /* It's possible for find_edge to return NULL here on invalid code
2392 that abuses the labels-as-values extension (e.g. code that attempts to
2393 jump *between* functions via stored labels-as-values; PR 84136).
2394 If so, then we simply return that NULL for the edge.
2395 We don't currently have a way of detecting such invalid code, so we
2396 can't assert that it was the case when a NULL edge occurs here. */
2401 /* Given COND_STMT and a constant value VAL for use as the predicate,
2402 determine which of the two edges will be taken out of
2403 the statement's block. Return NULL if either edge may be taken.
2404 If VAL is NULL_TREE, then the current value of COND_STMT's predicate
2408 find_taken_edge_cond_expr (const gcond
*cond_stmt
, tree val
)
2410 edge true_edge
, false_edge
;
2412 if (val
== NULL_TREE
)
2414 /* Use the current value of the predicate. */
2415 if (gimple_cond_true_p (cond_stmt
))
2416 val
= integer_one_node
;
2417 else if (gimple_cond_false_p (cond_stmt
))
2418 val
= integer_zero_node
;
2422 else if (TREE_CODE (val
) != INTEGER_CST
)
2425 extract_true_false_edges_from_block (gimple_bb (cond_stmt
),
2426 &true_edge
, &false_edge
);
2428 return (integer_zerop (val
) ? false_edge
: true_edge
);
2431 /* Given SWITCH_STMT and an INTEGER_CST VAL for use as the index, determine
2432 which edge will be taken out of the statement's block. Return NULL if any
2434 If VAL is NULL_TREE, then the current value of SWITCH_STMT's index
2438 find_taken_edge_switch_expr (const gswitch
*switch_stmt
, tree val
)
2440 basic_block dest_bb
;
2444 if (gimple_switch_num_labels (switch_stmt
) == 1)
2445 taken_case
= gimple_switch_default_label (switch_stmt
);
2448 if (val
== NULL_TREE
)
2449 val
= gimple_switch_index (switch_stmt
);
2450 if (TREE_CODE (val
) != INTEGER_CST
)
2453 taken_case
= find_case_label_for_value (switch_stmt
, val
);
2455 dest_bb
= label_to_block (cfun
, CASE_LABEL (taken_case
));
2457 e
= find_edge (gimple_bb (switch_stmt
), dest_bb
);
2463 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2464 We can make optimal use here of the fact that the case labels are
2465 sorted: We can do a binary search for a case matching VAL. */
2468 find_case_label_for_value (const gswitch
*switch_stmt
, tree val
)
2470 size_t low
, high
, n
= gimple_switch_num_labels (switch_stmt
);
2471 tree default_case
= gimple_switch_default_label (switch_stmt
);
2473 for (low
= 0, high
= n
; high
- low
> 1; )
2475 size_t i
= (high
+ low
) / 2;
2476 tree t
= gimple_switch_label (switch_stmt
, i
);
2479 /* Cache the result of comparing CASE_LOW and val. */
2480 cmp
= tree_int_cst_compare (CASE_LOW (t
), val
);
2487 if (CASE_HIGH (t
) == NULL
)
2489 /* A singe-valued case label. */
2495 /* A case range. We can only handle integer ranges. */
2496 if (cmp
<= 0 && tree_int_cst_compare (CASE_HIGH (t
), val
) >= 0)
2501 return default_case
;
2505 /* Dump a basic block on stderr. */
2508 gimple_debug_bb (basic_block bb
)
2510 dump_bb (stderr
, bb
, 0, TDF_VOPS
|TDF_MEMSYMS
|TDF_BLOCKS
);
2514 /* Dump basic block with index N on stderr. */
2517 gimple_debug_bb_n (int n
)
2519 gimple_debug_bb (BASIC_BLOCK_FOR_FN (cfun
, n
));
2520 return BASIC_BLOCK_FOR_FN (cfun
, n
);
2524 /* Dump the CFG on stderr.
2526 FLAGS are the same used by the tree dumping functions
2527 (see TDF_* in dumpfile.h). */
2530 gimple_debug_cfg (dump_flags_t flags
)
2532 gimple_dump_cfg (stderr
, flags
);
2536 /* Dump the program showing basic block boundaries on the given FILE.
2538 FLAGS are the same used by the tree dumping functions (see TDF_* in
2542 gimple_dump_cfg (FILE *file
, dump_flags_t flags
)
2544 if (flags
& TDF_DETAILS
)
2546 dump_function_header (file
, current_function_decl
, flags
);
2547 fprintf (file
, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2548 n_basic_blocks_for_fn (cfun
), n_edges_for_fn (cfun
),
2549 last_basic_block_for_fn (cfun
));
2551 brief_dump_cfg (file
, flags
);
2552 fprintf (file
, "\n");
2555 if (flags
& TDF_STATS
)
2556 dump_cfg_stats (file
);
2558 dump_function_to_file (current_function_decl
, file
, flags
| TDF_BLOCKS
);
2562 /* Dump CFG statistics on FILE. */
2565 dump_cfg_stats (FILE *file
)
2567 static long max_num_merged_labels
= 0;
2568 unsigned long size
, total
= 0;
2571 const char * const fmt_str
= "%-30s%-13s%12s\n";
2572 const char * const fmt_str_1
= "%-30s%13d" PRsa (11) "\n";
2573 const char * const fmt_str_2
= "%-30s%13ld" PRsa (11) "\n";
2574 const char * const fmt_str_3
= "%-43s" PRsa (11) "\n";
2575 const char *funcname
= current_function_name ();
2577 fprintf (file
, "\nCFG Statistics for %s\n\n", funcname
);
2579 fprintf (file
, "---------------------------------------------------------\n");
2580 fprintf (file
, fmt_str
, "", " Number of ", "Memory");
2581 fprintf (file
, fmt_str
, "", " instances ", "used ");
2582 fprintf (file
, "---------------------------------------------------------\n");
2584 size
= n_basic_blocks_for_fn (cfun
) * sizeof (struct basic_block_def
);
2586 fprintf (file
, fmt_str_1
, "Basic blocks", n_basic_blocks_for_fn (cfun
),
2587 SIZE_AMOUNT (size
));
2590 FOR_EACH_BB_FN (bb
, cfun
)
2591 num_edges
+= EDGE_COUNT (bb
->succs
);
2592 size
= num_edges
* sizeof (struct edge_def
);
2594 fprintf (file
, fmt_str_2
, "Edges", num_edges
, SIZE_AMOUNT (size
));
2596 fprintf (file
, "---------------------------------------------------------\n");
2597 fprintf (file
, fmt_str_3
, "Total memory used by CFG data",
2598 SIZE_AMOUNT (total
));
2599 fprintf (file
, "---------------------------------------------------------\n");
2600 fprintf (file
, "\n");
2602 if (cfg_stats
.num_merged_labels
> max_num_merged_labels
)
2603 max_num_merged_labels
= cfg_stats
.num_merged_labels
;
2605 fprintf (file
, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2606 cfg_stats
.num_merged_labels
, max_num_merged_labels
);
2608 fprintf (file
, "\n");
2612 /* Dump CFG statistics on stderr. Keep extern so that it's always
2613 linked in the final executable. */
2616 debug_cfg_stats (void)
2618 dump_cfg_stats (stderr
);
2621 /*---------------------------------------------------------------------------
2622 Miscellaneous helpers
2623 ---------------------------------------------------------------------------*/
2625 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2626 flow. Transfers of control flow associated with EH are excluded. */
2629 call_can_make_abnormal_goto (gimple
*t
)
2631 /* If the function has no non-local labels, then a call cannot make an
2632 abnormal transfer of control. */
2633 if (!cfun
->has_nonlocal_label
2634 && !cfun
->calls_setjmp
)
2637 /* Likewise if the call has no side effects. */
2638 if (!gimple_has_side_effects (t
))
2641 /* Likewise if the called function is leaf. */
2642 if (gimple_call_flags (t
) & ECF_LEAF
)
2649 /* Return true if T can make an abnormal transfer of control flow.
2650 Transfers of control flow associated with EH are excluded. */
2653 stmt_can_make_abnormal_goto (gimple
*t
)
2655 if (computed_goto_p (t
))
2657 if (is_gimple_call (t
))
2658 return call_can_make_abnormal_goto (t
);
2663 /* Return true if T represents a stmt that always transfers control. */
2666 is_ctrl_stmt (gimple
*t
)
2668 switch (gimple_code (t
))
2682 /* Return true if T is a statement that may alter the flow of control
2683 (e.g., a call to a non-returning function). */
2686 is_ctrl_altering_stmt (gimple
*t
)
2690 switch (gimple_code (t
))
2693 /* Per stmt call flag indicates whether the call could alter
2695 if (gimple_call_ctrl_altering_p (t
))
2699 case GIMPLE_EH_DISPATCH
:
2700 /* EH_DISPATCH branches to the individual catch handlers at
2701 this level of a try or allowed-exceptions region. It can
2702 fallthru to the next statement as well. */
2706 if (gimple_asm_nlabels (as_a
<gasm
*> (t
)) > 0)
2711 /* OpenMP directives alter control flow. */
2714 case GIMPLE_TRANSACTION
:
2715 /* A transaction start alters control flow. */
2722 /* If a statement can throw, it alters control flow. */
2723 return stmt_can_throw_internal (cfun
, t
);
2727 /* Return true if T is a simple local goto. */
2730 simple_goto_p (gimple
*t
)
2732 return (gimple_code (t
) == GIMPLE_GOTO
2733 && TREE_CODE (gimple_goto_dest (t
)) == LABEL_DECL
);
2737 /* Return true if STMT should start a new basic block. PREV_STMT is
2738 the statement preceding STMT. It is used when STMT is a label or a
2739 case label. Labels should only start a new basic block if their
2740 previous statement wasn't a label. Otherwise, sequence of labels
2741 would generate unnecessary basic blocks that only contain a single
2745 stmt_starts_bb_p (gimple
*stmt
, gimple
*prev_stmt
)
2750 /* PREV_STMT is only set to a debug stmt if the debug stmt is before
2751 any nondebug stmts in the block. We don't want to start another
2752 block in this case: the debug stmt will already have started the
2753 one STMT would start if we weren't outputting debug stmts. */
2754 if (prev_stmt
&& is_gimple_debug (prev_stmt
))
2757 /* Labels start a new basic block only if the preceding statement
2758 wasn't a label of the same type. This prevents the creation of
2759 consecutive blocks that have nothing but a single label. */
2760 if (glabel
*label_stmt
= dyn_cast
<glabel
*> (stmt
))
2762 /* Nonlocal and computed GOTO targets always start a new block. */
2763 if (DECL_NONLOCAL (gimple_label_label (label_stmt
))
2764 || FORCED_LABEL (gimple_label_label (label_stmt
)))
2767 if (prev_stmt
&& gimple_code (prev_stmt
) == GIMPLE_LABEL
)
2769 if (DECL_NONLOCAL (gimple_label_label (
2770 as_a
<glabel
*> (prev_stmt
))))
2773 cfg_stats
.num_merged_labels
++;
2779 else if (gimple_code (stmt
) == GIMPLE_CALL
)
2781 if (gimple_call_flags (stmt
) & ECF_RETURNS_TWICE
)
2782 /* setjmp acts similar to a nonlocal GOTO target and thus should
2783 start a new block. */
2785 if (gimple_call_internal_p (stmt
, IFN_PHI
)
2787 && gimple_code (prev_stmt
) != GIMPLE_LABEL
2788 && (gimple_code (prev_stmt
) != GIMPLE_CALL
2789 || ! gimple_call_internal_p (prev_stmt
, IFN_PHI
)))
2790 /* PHI nodes start a new block unless preceeded by a label
2799 /* Return true if T should end a basic block. */
2802 stmt_ends_bb_p (gimple
*t
)
2804 return is_ctrl_stmt (t
) || is_ctrl_altering_stmt (t
);
2807 /* Remove block annotations and other data structures. */
2810 delete_tree_cfg_annotations (struct function
*fn
)
2812 vec_free (label_to_block_map_for_fn (fn
));
2815 /* Return the virtual phi in BB. */
2818 get_virtual_phi (basic_block bb
)
2820 for (gphi_iterator gsi
= gsi_start_phis (bb
);
2824 gphi
*phi
= gsi
.phi ();
2826 if (virtual_operand_p (PHI_RESULT (phi
)))
2833 /* Return the first statement in basic block BB. */
2836 first_stmt (basic_block bb
)
2838 gimple_stmt_iterator i
= gsi_start_bb (bb
);
2839 gimple
*stmt
= NULL
;
2841 while (!gsi_end_p (i
) && is_gimple_debug ((stmt
= gsi_stmt (i
))))
2849 /* Return the first non-label statement in basic block BB. */
2852 first_non_label_stmt (basic_block bb
)
2854 gimple_stmt_iterator i
= gsi_start_bb (bb
);
2855 while (!gsi_end_p (i
) && gimple_code (gsi_stmt (i
)) == GIMPLE_LABEL
)
2857 return !gsi_end_p (i
) ? gsi_stmt (i
) : NULL
;
2860 /* Return the last statement in basic block BB. */
2863 last_stmt (basic_block bb
)
2865 gimple_stmt_iterator i
= gsi_last_bb (bb
);
2866 gimple
*stmt
= NULL
;
2868 while (!gsi_end_p (i
) && is_gimple_debug ((stmt
= gsi_stmt (i
))))
2876 /* Return the last statement of an otherwise empty block. Return NULL
2877 if the block is totally empty, or if it contains more than one
2881 last_and_only_stmt (basic_block bb
)
2883 gimple_stmt_iterator i
= gsi_last_nondebug_bb (bb
);
2884 gimple
*last
, *prev
;
2889 last
= gsi_stmt (i
);
2890 gsi_prev_nondebug (&i
);
2894 /* Empty statements should no longer appear in the instruction stream.
2895 Everything that might have appeared before should be deleted by
2896 remove_useless_stmts, and the optimizers should just gsi_remove
2897 instead of smashing with build_empty_stmt.
2899 Thus the only thing that should appear here in a block containing
2900 one executable statement is a label. */
2901 prev
= gsi_stmt (i
);
2902 if (gimple_code (prev
) == GIMPLE_LABEL
)
2908 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2911 reinstall_phi_args (edge new_edge
, edge old_edge
)
2917 vec
<edge_var_map
> *v
= redirect_edge_var_map_vector (old_edge
);
2921 for (i
= 0, phis
= gsi_start_phis (new_edge
->dest
);
2922 v
->iterate (i
, &vm
) && !gsi_end_p (phis
);
2923 i
++, gsi_next (&phis
))
2925 gphi
*phi
= phis
.phi ();
2926 tree result
= redirect_edge_var_map_result (vm
);
2927 tree arg
= redirect_edge_var_map_def (vm
);
2929 gcc_assert (result
== gimple_phi_result (phi
));
2931 add_phi_arg (phi
, arg
, new_edge
, redirect_edge_var_map_location (vm
));
2934 redirect_edge_var_map_clear (old_edge
);
2937 /* Returns the basic block after which the new basic block created
2938 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2939 near its "logical" location. This is of most help to humans looking
2940 at debugging dumps. */
2943 split_edge_bb_loc (edge edge_in
)
2945 basic_block dest
= edge_in
->dest
;
2946 basic_block dest_prev
= dest
->prev_bb
;
2950 edge e
= find_edge (dest_prev
, dest
);
2951 if (e
&& !(e
->flags
& EDGE_COMPLEX
))
2952 return edge_in
->src
;
2957 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2958 Abort on abnormal edges. */
2961 gimple_split_edge (edge edge_in
)
2963 basic_block new_bb
, after_bb
, dest
;
2966 /* Abnormal edges cannot be split. */
2967 gcc_assert (!(edge_in
->flags
& EDGE_ABNORMAL
));
2969 dest
= edge_in
->dest
;
2971 after_bb
= split_edge_bb_loc (edge_in
);
2973 new_bb
= create_empty_bb (after_bb
);
2974 new_bb
->count
= edge_in
->count ();
2976 e
= redirect_edge_and_branch (edge_in
, new_bb
);
2977 gcc_assert (e
== edge_in
);
2979 new_edge
= make_single_succ_edge (new_bb
, dest
, EDGE_FALLTHRU
);
2980 reinstall_phi_args (new_edge
, e
);
2986 /* Verify properties of the address expression T whose base should be
2987 TREE_ADDRESSABLE if VERIFY_ADDRESSABLE is true. */
2990 verify_address (tree t
, bool verify_addressable
)
2993 bool old_side_effects
;
2995 bool new_side_effects
;
2997 old_constant
= TREE_CONSTANT (t
);
2998 old_side_effects
= TREE_SIDE_EFFECTS (t
);
3000 recompute_tree_invariant_for_addr_expr (t
);
3001 new_side_effects
= TREE_SIDE_EFFECTS (t
);
3002 new_constant
= TREE_CONSTANT (t
);
3004 if (old_constant
!= new_constant
)
3006 error ("constant not recomputed when ADDR_EXPR changed");
3009 if (old_side_effects
!= new_side_effects
)
3011 error ("side effects not recomputed when ADDR_EXPR changed");
3015 tree base
= TREE_OPERAND (t
, 0);
3016 while (handled_component_p (base
))
3017 base
= TREE_OPERAND (base
, 0);
3020 || TREE_CODE (base
) == PARM_DECL
3021 || TREE_CODE (base
) == RESULT_DECL
))
3024 if (DECL_GIMPLE_REG_P (base
))
3026 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
3030 if (verify_addressable
&& !TREE_ADDRESSABLE (base
))
3032 error ("address taken, but ADDRESSABLE bit not set");
3040 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
3041 Returns true if there is an error, otherwise false. */
3044 verify_types_in_gimple_min_lval (tree expr
)
3048 if (is_gimple_id (expr
))
3051 if (TREE_CODE (expr
) != TARGET_MEM_REF
3052 && TREE_CODE (expr
) != MEM_REF
)
3054 error ("invalid expression for min lvalue");
3058 /* TARGET_MEM_REFs are strange beasts. */
3059 if (TREE_CODE (expr
) == TARGET_MEM_REF
)
3062 op
= TREE_OPERAND (expr
, 0);
3063 if (!is_gimple_val (op
))
3065 error ("invalid operand in indirect reference");
3066 debug_generic_stmt (op
);
3069 /* Memory references now generally can involve a value conversion. */
3074 /* Verify if EXPR is a valid GIMPLE reference expression. If
3075 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
3076 if there is an error, otherwise false. */
3079 verify_types_in_gimple_reference (tree expr
, bool require_lvalue
)
3081 if (TREE_CODE (expr
) == REALPART_EXPR
3082 || TREE_CODE (expr
) == IMAGPART_EXPR
3083 || TREE_CODE (expr
) == BIT_FIELD_REF
)
3085 tree op
= TREE_OPERAND (expr
, 0);
3086 if (!is_gimple_reg_type (TREE_TYPE (expr
)))
3088 error ("non-scalar BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR");
3092 if (TREE_CODE (expr
) == BIT_FIELD_REF
)
3094 tree t1
= TREE_OPERAND (expr
, 1);
3095 tree t2
= TREE_OPERAND (expr
, 2);
3096 poly_uint64 size
, bitpos
;
3097 if (!poly_int_tree_p (t1
, &size
)
3098 || !poly_int_tree_p (t2
, &bitpos
)
3099 || !types_compatible_p (bitsizetype
, TREE_TYPE (t1
))
3100 || !types_compatible_p (bitsizetype
, TREE_TYPE (t2
)))
3102 error ("invalid position or size operand to BIT_FIELD_REF");
3105 if (INTEGRAL_TYPE_P (TREE_TYPE (expr
))
3106 && maybe_ne (TYPE_PRECISION (TREE_TYPE (expr
)), size
))
3108 error ("integral result type precision does not match "
3109 "field size of BIT_FIELD_REF");
3112 else if (!INTEGRAL_TYPE_P (TREE_TYPE (expr
))
3113 && TYPE_MODE (TREE_TYPE (expr
)) != BLKmode
3114 && maybe_ne (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (expr
))),
3117 error ("mode size of non-integral result does not "
3118 "match field size of BIT_FIELD_REF");
3121 if (INTEGRAL_TYPE_P (TREE_TYPE (op
))
3122 && !type_has_mode_precision_p (TREE_TYPE (op
)))
3124 error ("BIT_FIELD_REF of non-mode-precision operand");
3127 if (!AGGREGATE_TYPE_P (TREE_TYPE (op
))
3128 && maybe_gt (size
+ bitpos
,
3129 tree_to_poly_uint64 (TYPE_SIZE (TREE_TYPE (op
)))))
3131 error ("position plus size exceeds size of referenced object in "
3137 if ((TREE_CODE (expr
) == REALPART_EXPR
3138 || TREE_CODE (expr
) == IMAGPART_EXPR
)
3139 && !useless_type_conversion_p (TREE_TYPE (expr
),
3140 TREE_TYPE (TREE_TYPE (op
))))
3142 error ("type mismatch in real/imagpart reference");
3143 debug_generic_stmt (TREE_TYPE (expr
));
3144 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op
)));
3150 while (handled_component_p (expr
))
3152 if (TREE_CODE (expr
) == REALPART_EXPR
3153 || TREE_CODE (expr
) == IMAGPART_EXPR
3154 || TREE_CODE (expr
) == BIT_FIELD_REF
)
3156 error ("non-top-level BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR");
3160 tree op
= TREE_OPERAND (expr
, 0);
3162 if (TREE_CODE (expr
) == ARRAY_REF
3163 || TREE_CODE (expr
) == ARRAY_RANGE_REF
)
3165 if (!is_gimple_val (TREE_OPERAND (expr
, 1))
3166 || (TREE_OPERAND (expr
, 2)
3167 && !is_gimple_val (TREE_OPERAND (expr
, 2)))
3168 || (TREE_OPERAND (expr
, 3)
3169 && !is_gimple_val (TREE_OPERAND (expr
, 3))))
3171 error ("invalid operands to array reference");
3172 debug_generic_stmt (expr
);
3177 /* Verify if the reference array element types are compatible. */
3178 if (TREE_CODE (expr
) == ARRAY_REF
3179 && !useless_type_conversion_p (TREE_TYPE (expr
),
3180 TREE_TYPE (TREE_TYPE (op
))))
3182 error ("type mismatch in array reference");
3183 debug_generic_stmt (TREE_TYPE (expr
));
3184 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op
)));
3187 if (TREE_CODE (expr
) == ARRAY_RANGE_REF
3188 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr
)),
3189 TREE_TYPE (TREE_TYPE (op
))))
3191 error ("type mismatch in array range reference");
3192 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr
)));
3193 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op
)));
3197 if (TREE_CODE (expr
) == COMPONENT_REF
)
3199 if (TREE_OPERAND (expr
, 2)
3200 && !is_gimple_val (TREE_OPERAND (expr
, 2)))
3202 error ("invalid COMPONENT_REF offset operator");
3205 if (!useless_type_conversion_p (TREE_TYPE (expr
),
3206 TREE_TYPE (TREE_OPERAND (expr
, 1))))
3208 error ("type mismatch in component reference");
3209 debug_generic_stmt (TREE_TYPE (expr
));
3210 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr
, 1)));
3215 if (TREE_CODE (expr
) == VIEW_CONVERT_EXPR
)
3217 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
3218 that their operand is not an SSA name or an invariant when
3219 requiring an lvalue (this usually means there is a SRA or IPA-SRA
3220 bug). Otherwise there is nothing to verify, gross mismatches at
3221 most invoke undefined behavior. */
3223 && (TREE_CODE (op
) == SSA_NAME
3224 || is_gimple_min_invariant (op
)))
3226 error ("conversion of an SSA_NAME on the left hand side");
3227 debug_generic_stmt (expr
);
3230 else if (TREE_CODE (op
) == SSA_NAME
3231 && TYPE_SIZE (TREE_TYPE (expr
)) != TYPE_SIZE (TREE_TYPE (op
)))
3233 error ("conversion of register to a different size");
3234 debug_generic_stmt (expr
);
3237 else if (!handled_component_p (op
))
3244 if (TREE_CODE (expr
) == MEM_REF
)
3246 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr
, 0))
3247 || (TREE_CODE (TREE_OPERAND (expr
, 0)) == ADDR_EXPR
3248 && verify_address (TREE_OPERAND (expr
, 0), false)))
3250 error ("invalid address operand in MEM_REF");
3251 debug_generic_stmt (expr
);
3254 if (!poly_int_tree_p (TREE_OPERAND (expr
, 1))
3255 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr
, 1))))
3257 error ("invalid offset operand in MEM_REF");
3258 debug_generic_stmt (expr
);
3262 else if (TREE_CODE (expr
) == TARGET_MEM_REF
)
3264 if (!TMR_BASE (expr
)
3265 || !is_gimple_mem_ref_addr (TMR_BASE (expr
))
3266 || (TREE_CODE (TMR_BASE (expr
)) == ADDR_EXPR
3267 && verify_address (TMR_BASE (expr
), false)))
3269 error ("invalid address operand in TARGET_MEM_REF");
3272 if (!TMR_OFFSET (expr
)
3273 || !poly_int_tree_p (TMR_OFFSET (expr
))
3274 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr
))))
3276 error ("invalid offset operand in TARGET_MEM_REF");
3277 debug_generic_stmt (expr
);
3281 else if (TREE_CODE (expr
) == INDIRECT_REF
)
3283 error ("INDIRECT_REF in gimple IL");
3284 debug_generic_stmt (expr
);
3288 return ((require_lvalue
|| !is_gimple_min_invariant (expr
))
3289 && verify_types_in_gimple_min_lval (expr
));
3292 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3293 list of pointer-to types that is trivially convertible to DEST. */
3296 one_pointer_to_useless_type_conversion_p (tree dest
, tree src_obj
)
3300 if (!TYPE_POINTER_TO (src_obj
))
3303 for (src
= TYPE_POINTER_TO (src_obj
); src
; src
= TYPE_NEXT_PTR_TO (src
))
3304 if (useless_type_conversion_p (dest
, src
))
3310 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3311 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3314 valid_fixed_convert_types_p (tree type1
, tree type2
)
3316 return (FIXED_POINT_TYPE_P (type1
)
3317 && (INTEGRAL_TYPE_P (type2
)
3318 || SCALAR_FLOAT_TYPE_P (type2
)
3319 || FIXED_POINT_TYPE_P (type2
)));
3322 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3323 is a problem, otherwise false. */
3326 verify_gimple_call (gcall
*stmt
)
3328 tree fn
= gimple_call_fn (stmt
);
3329 tree fntype
, fndecl
;
3332 if (gimple_call_internal_p (stmt
))
3336 error ("gimple call has two targets");
3337 debug_generic_stmt (fn
);
3340 /* FIXME : for passing label as arg in internal fn PHI from GIMPLE FE*/
3341 else if (gimple_call_internal_fn (stmt
) == IFN_PHI
)
3350 error ("gimple call has no target");
3355 if (fn
&& !is_gimple_call_addr (fn
))
3357 error ("invalid function in gimple call");
3358 debug_generic_stmt (fn
);
3363 && (!POINTER_TYPE_P (TREE_TYPE (fn
))
3364 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn
))) != FUNCTION_TYPE
3365 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn
))) != METHOD_TYPE
)))
3367 error ("non-function in gimple call");
3371 fndecl
= gimple_call_fndecl (stmt
);
3373 && TREE_CODE (fndecl
) == FUNCTION_DECL
3374 && DECL_LOOPING_CONST_OR_PURE_P (fndecl
)
3375 && !DECL_PURE_P (fndecl
)
3376 && !TREE_READONLY (fndecl
))
3378 error ("invalid pure const state for function");
3382 tree lhs
= gimple_call_lhs (stmt
);
3384 && (!is_gimple_lvalue (lhs
)
3385 || verify_types_in_gimple_reference (lhs
, true)))
3387 error ("invalid LHS in gimple call");
3391 if (gimple_call_ctrl_altering_p (stmt
)
3392 && gimple_call_noreturn_p (stmt
)
3393 && should_remove_lhs_p (lhs
))
3395 error ("LHS in noreturn call");
3399 fntype
= gimple_call_fntype (stmt
);
3402 && !useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (fntype
))
3403 /* ??? At least C++ misses conversions at assignments from
3404 void * call results.
3405 For now simply allow arbitrary pointer type conversions. */
3406 && !(POINTER_TYPE_P (TREE_TYPE (lhs
))
3407 && POINTER_TYPE_P (TREE_TYPE (fntype
))))
3409 error ("invalid conversion in gimple call");
3410 debug_generic_stmt (TREE_TYPE (lhs
));
3411 debug_generic_stmt (TREE_TYPE (fntype
));
3415 if (gimple_call_chain (stmt
)
3416 && !is_gimple_val (gimple_call_chain (stmt
)))
3418 error ("invalid static chain in gimple call");
3419 debug_generic_stmt (gimple_call_chain (stmt
));
3423 /* If there is a static chain argument, the call should either be
3424 indirect, or the decl should have DECL_STATIC_CHAIN set. */
3425 if (gimple_call_chain (stmt
)
3427 && !DECL_STATIC_CHAIN (fndecl
))
3429 error ("static chain with function that doesn%'t use one");
3433 if (fndecl
&& fndecl_built_in_p (fndecl
, BUILT_IN_NORMAL
))
3435 switch (DECL_FUNCTION_CODE (fndecl
))
3437 case BUILT_IN_UNREACHABLE
:
3439 if (gimple_call_num_args (stmt
) > 0)
3441 /* Built-in unreachable with parameters might not be caught by
3442 undefined behavior sanitizer. Front-ends do check users do not
3443 call them that way but we also produce calls to
3444 __builtin_unreachable internally, for example when IPA figures
3445 out a call cannot happen in a legal program. In such cases,
3446 we must make sure arguments are stripped off. */
3447 error ("__builtin_unreachable or __builtin_trap call with "
3457 /* ??? The C frontend passes unpromoted arguments in case it
3458 didn't see a function declaration before the call. So for now
3459 leave the call arguments mostly unverified. Once we gimplify
3460 unit-at-a-time we have a chance to fix this. */
3462 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
3464 tree arg
= gimple_call_arg (stmt
, i
);
3465 if ((is_gimple_reg_type (TREE_TYPE (arg
))
3466 && !is_gimple_val (arg
))
3467 || (!is_gimple_reg_type (TREE_TYPE (arg
))
3468 && !is_gimple_lvalue (arg
)))
3470 error ("invalid argument to gimple call");
3471 debug_generic_expr (arg
);
3479 /* Verifies the gimple comparison with the result type TYPE and
3480 the operands OP0 and OP1, comparison code is CODE. */
3483 verify_gimple_comparison (tree type
, tree op0
, tree op1
, enum tree_code code
)
3485 tree op0_type
= TREE_TYPE (op0
);
3486 tree op1_type
= TREE_TYPE (op1
);
3488 if (!is_gimple_val (op0
) || !is_gimple_val (op1
))
3490 error ("invalid operands in gimple comparison");
3494 /* For comparisons we do not have the operations type as the
3495 effective type the comparison is carried out in. Instead
3496 we require that either the first operand is trivially
3497 convertible into the second, or the other way around.
3498 Because we special-case pointers to void we allow
3499 comparisons of pointers with the same mode as well. */
3500 if (!useless_type_conversion_p (op0_type
, op1_type
)
3501 && !useless_type_conversion_p (op1_type
, op0_type
)
3502 && (!POINTER_TYPE_P (op0_type
)
3503 || !POINTER_TYPE_P (op1_type
)
3504 || TYPE_MODE (op0_type
) != TYPE_MODE (op1_type
)))
3506 error ("mismatching comparison operand types");
3507 debug_generic_expr (op0_type
);
3508 debug_generic_expr (op1_type
);
3512 /* The resulting type of a comparison may be an effective boolean type. */
3513 if (INTEGRAL_TYPE_P (type
)
3514 && (TREE_CODE (type
) == BOOLEAN_TYPE
3515 || TYPE_PRECISION (type
) == 1))
3517 if ((TREE_CODE (op0_type
) == VECTOR_TYPE
3518 || TREE_CODE (op1_type
) == VECTOR_TYPE
)
3519 && code
!= EQ_EXPR
&& code
!= NE_EXPR
3520 && !VECTOR_BOOLEAN_TYPE_P (op0_type
)
3521 && !VECTOR_INTEGER_TYPE_P (op0_type
))
3523 error ("unsupported operation or type for vector comparison"
3524 " returning a boolean");
3525 debug_generic_expr (op0_type
);
3526 debug_generic_expr (op1_type
);
3530 /* Or a boolean vector type with the same element count
3531 as the comparison operand types. */
3532 else if (TREE_CODE (type
) == VECTOR_TYPE
3533 && TREE_CODE (TREE_TYPE (type
)) == BOOLEAN_TYPE
)
3535 if (TREE_CODE (op0_type
) != VECTOR_TYPE
3536 || TREE_CODE (op1_type
) != VECTOR_TYPE
)
3538 error ("non-vector operands in vector comparison");
3539 debug_generic_expr (op0_type
);
3540 debug_generic_expr (op1_type
);
3544 if (maybe_ne (TYPE_VECTOR_SUBPARTS (type
),
3545 TYPE_VECTOR_SUBPARTS (op0_type
)))
3547 error ("invalid vector comparison resulting type");
3548 debug_generic_expr (type
);
3554 error ("bogus comparison result type");
3555 debug_generic_expr (type
);
3562 /* Verify a gimple assignment statement STMT with an unary rhs.
3563 Returns true if anything is wrong. */
3566 verify_gimple_assign_unary (gassign
*stmt
)
3568 enum tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
3569 tree lhs
= gimple_assign_lhs (stmt
);
3570 tree lhs_type
= TREE_TYPE (lhs
);
3571 tree rhs1
= gimple_assign_rhs1 (stmt
);
3572 tree rhs1_type
= TREE_TYPE (rhs1
);
3574 if (!is_gimple_reg (lhs
))
3576 error ("non-register as LHS of unary operation");
3580 if (!is_gimple_val (rhs1
))
3582 error ("invalid operand in unary operation");
3586 /* First handle conversions. */
3591 /* Allow conversions from pointer type to integral type only if
3592 there is no sign or zero extension involved.
3593 For targets were the precision of ptrofftype doesn't match that
3594 of pointers we need to allow arbitrary conversions to ptrofftype. */
3595 if ((POINTER_TYPE_P (lhs_type
)
3596 && INTEGRAL_TYPE_P (rhs1_type
))
3597 || (POINTER_TYPE_P (rhs1_type
)
3598 && INTEGRAL_TYPE_P (lhs_type
)
3599 && (TYPE_PRECISION (rhs1_type
) >= TYPE_PRECISION (lhs_type
)
3600 || ptrofftype_p (lhs_type
))))
3603 /* Allow conversion from integral to offset type and vice versa. */
3604 if ((TREE_CODE (lhs_type
) == OFFSET_TYPE
3605 && INTEGRAL_TYPE_P (rhs1_type
))
3606 || (INTEGRAL_TYPE_P (lhs_type
)
3607 && TREE_CODE (rhs1_type
) == OFFSET_TYPE
))
3610 /* Otherwise assert we are converting between types of the
3612 if (INTEGRAL_TYPE_P (lhs_type
) != INTEGRAL_TYPE_P (rhs1_type
))
3614 error ("invalid types in nop conversion");
3615 debug_generic_expr (lhs_type
);
3616 debug_generic_expr (rhs1_type
);
3623 case ADDR_SPACE_CONVERT_EXPR
:
3625 if (!POINTER_TYPE_P (rhs1_type
) || !POINTER_TYPE_P (lhs_type
)
3626 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type
))
3627 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type
))))
3629 error ("invalid types in address space conversion");
3630 debug_generic_expr (lhs_type
);
3631 debug_generic_expr (rhs1_type
);
3638 case FIXED_CONVERT_EXPR
:
3640 if (!valid_fixed_convert_types_p (lhs_type
, rhs1_type
)
3641 && !valid_fixed_convert_types_p (rhs1_type
, lhs_type
))
3643 error ("invalid types in fixed-point conversion");
3644 debug_generic_expr (lhs_type
);
3645 debug_generic_expr (rhs1_type
);
3654 if ((!INTEGRAL_TYPE_P (rhs1_type
) || !SCALAR_FLOAT_TYPE_P (lhs_type
))
3655 && (!VECTOR_INTEGER_TYPE_P (rhs1_type
)
3656 || !VECTOR_FLOAT_TYPE_P (lhs_type
)))
3658 error ("invalid types in conversion to floating point");
3659 debug_generic_expr (lhs_type
);
3660 debug_generic_expr (rhs1_type
);
3667 case FIX_TRUNC_EXPR
:
3669 if ((!INTEGRAL_TYPE_P (lhs_type
) || !SCALAR_FLOAT_TYPE_P (rhs1_type
))
3670 && (!VECTOR_INTEGER_TYPE_P (lhs_type
)
3671 || !VECTOR_FLOAT_TYPE_P (rhs1_type
)))
3673 error ("invalid types in conversion to integer");
3674 debug_generic_expr (lhs_type
);
3675 debug_generic_expr (rhs1_type
);
3682 case VEC_UNPACK_HI_EXPR
:
3683 case VEC_UNPACK_LO_EXPR
:
3684 case VEC_UNPACK_FLOAT_HI_EXPR
:
3685 case VEC_UNPACK_FLOAT_LO_EXPR
:
3686 case VEC_UNPACK_FIX_TRUNC_HI_EXPR
:
3687 case VEC_UNPACK_FIX_TRUNC_LO_EXPR
:
3688 if (TREE_CODE (rhs1_type
) != VECTOR_TYPE
3689 || TREE_CODE (lhs_type
) != VECTOR_TYPE
3690 || (!INTEGRAL_TYPE_P (TREE_TYPE (lhs_type
))
3691 && !SCALAR_FLOAT_TYPE_P (TREE_TYPE (lhs_type
)))
3692 || (!INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type
))
3693 && !SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type
)))
3694 || ((rhs_code
== VEC_UNPACK_HI_EXPR
3695 || rhs_code
== VEC_UNPACK_LO_EXPR
)
3696 && (INTEGRAL_TYPE_P (TREE_TYPE (lhs_type
))
3697 != INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type
))))
3698 || ((rhs_code
== VEC_UNPACK_FLOAT_HI_EXPR
3699 || rhs_code
== VEC_UNPACK_FLOAT_LO_EXPR
)
3700 && (INTEGRAL_TYPE_P (TREE_TYPE (lhs_type
))
3701 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type
))))
3702 || ((rhs_code
== VEC_UNPACK_FIX_TRUNC_HI_EXPR
3703 || rhs_code
== VEC_UNPACK_FIX_TRUNC_LO_EXPR
)
3704 && (INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type
))
3705 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (lhs_type
))))
3706 || (maybe_ne (GET_MODE_SIZE (element_mode (lhs_type
)),
3707 2 * GET_MODE_SIZE (element_mode (rhs1_type
)))
3708 && (!VECTOR_BOOLEAN_TYPE_P (lhs_type
)
3709 || !VECTOR_BOOLEAN_TYPE_P (rhs1_type
)))
3710 || maybe_ne (2 * TYPE_VECTOR_SUBPARTS (lhs_type
),
3711 TYPE_VECTOR_SUBPARTS (rhs1_type
)))
3713 error ("type mismatch in vector unpack expression");
3714 debug_generic_expr (lhs_type
);
3715 debug_generic_expr (rhs1_type
);
3729 if (!ANY_INTEGRAL_TYPE_P (lhs_type
)
3730 || !TYPE_UNSIGNED (lhs_type
)
3731 || !ANY_INTEGRAL_TYPE_P (rhs1_type
)
3732 || TYPE_UNSIGNED (rhs1_type
)
3733 || element_precision (lhs_type
) != element_precision (rhs1_type
))
3735 error ("invalid types for ABSU_EXPR");
3736 debug_generic_expr (lhs_type
);
3737 debug_generic_expr (rhs1_type
);
3742 case VEC_DUPLICATE_EXPR
:
3743 if (TREE_CODE (lhs_type
) != VECTOR_TYPE
3744 || !useless_type_conversion_p (TREE_TYPE (lhs_type
), rhs1_type
))
3746 error ("vec_duplicate should be from a scalar to a like vector");
3747 debug_generic_expr (lhs_type
);
3748 debug_generic_expr (rhs1_type
);
3757 /* For the remaining codes assert there is no conversion involved. */
3758 if (!useless_type_conversion_p (lhs_type
, rhs1_type
))
3760 error ("non-trivial conversion in unary operation");
3761 debug_generic_expr (lhs_type
);
3762 debug_generic_expr (rhs1_type
);
3769 /* Verify a gimple assignment statement STMT with a binary rhs.
3770 Returns true if anything is wrong. */
3773 verify_gimple_assign_binary (gassign
*stmt
)
3775 enum tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
3776 tree lhs
= gimple_assign_lhs (stmt
);
3777 tree lhs_type
= TREE_TYPE (lhs
);
3778 tree rhs1
= gimple_assign_rhs1 (stmt
);
3779 tree rhs1_type
= TREE_TYPE (rhs1
);
3780 tree rhs2
= gimple_assign_rhs2 (stmt
);
3781 tree rhs2_type
= TREE_TYPE (rhs2
);
3783 if (!is_gimple_reg (lhs
))
3785 error ("non-register as LHS of binary operation");
3789 if (!is_gimple_val (rhs1
)
3790 || !is_gimple_val (rhs2
))
3792 error ("invalid operands in binary operation");
3796 /* First handle operations that involve different types. */
3801 if (TREE_CODE (lhs_type
) != COMPLEX_TYPE
3802 || !(INTEGRAL_TYPE_P (rhs1_type
)
3803 || SCALAR_FLOAT_TYPE_P (rhs1_type
))
3804 || !(INTEGRAL_TYPE_P (rhs2_type
)
3805 || SCALAR_FLOAT_TYPE_P (rhs2_type
)))
3807 error ("type mismatch in complex expression");
3808 debug_generic_expr (lhs_type
);
3809 debug_generic_expr (rhs1_type
);
3810 debug_generic_expr (rhs2_type
);
3822 /* Shifts and rotates are ok on integral types, fixed point
3823 types and integer vector types. */
3824 if ((!INTEGRAL_TYPE_P (rhs1_type
)
3825 && !FIXED_POINT_TYPE_P (rhs1_type
)
3826 && !(TREE_CODE (rhs1_type
) == VECTOR_TYPE
3827 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type
))))
3828 || (!INTEGRAL_TYPE_P (rhs2_type
)
3829 /* Vector shifts of vectors are also ok. */
3830 && !(TREE_CODE (rhs1_type
) == VECTOR_TYPE
3831 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type
))
3832 && TREE_CODE (rhs2_type
) == VECTOR_TYPE
3833 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type
))))
3834 || !useless_type_conversion_p (lhs_type
, rhs1_type
))
3836 error ("type mismatch in shift expression");
3837 debug_generic_expr (lhs_type
);
3838 debug_generic_expr (rhs1_type
);
3839 debug_generic_expr (rhs2_type
);
3846 case WIDEN_LSHIFT_EXPR
:
3848 if (!INTEGRAL_TYPE_P (lhs_type
)
3849 || !INTEGRAL_TYPE_P (rhs1_type
)
3850 || TREE_CODE (rhs2
) != INTEGER_CST
3851 || (2 * TYPE_PRECISION (rhs1_type
) > TYPE_PRECISION (lhs_type
)))
3853 error ("type mismatch in widening vector shift expression");
3854 debug_generic_expr (lhs_type
);
3855 debug_generic_expr (rhs1_type
);
3856 debug_generic_expr (rhs2_type
);
3863 case VEC_WIDEN_LSHIFT_HI_EXPR
:
3864 case VEC_WIDEN_LSHIFT_LO_EXPR
:
3866 if (TREE_CODE (rhs1_type
) != VECTOR_TYPE
3867 || TREE_CODE (lhs_type
) != VECTOR_TYPE
3868 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type
))
3869 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type
))
3870 || TREE_CODE (rhs2
) != INTEGER_CST
3871 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type
))
3872 > TYPE_PRECISION (TREE_TYPE (lhs_type
))))
3874 error ("type mismatch in widening vector shift expression");
3875 debug_generic_expr (lhs_type
);
3876 debug_generic_expr (rhs1_type
);
3877 debug_generic_expr (rhs2_type
);
3887 tree lhs_etype
= lhs_type
;
3888 tree rhs1_etype
= rhs1_type
;
3889 tree rhs2_etype
= rhs2_type
;
3890 if (TREE_CODE (lhs_type
) == VECTOR_TYPE
)
3892 if (TREE_CODE (rhs1_type
) != VECTOR_TYPE
3893 || TREE_CODE (rhs2_type
) != VECTOR_TYPE
)
3895 error ("invalid non-vector operands to vector valued plus");
3898 lhs_etype
= TREE_TYPE (lhs_type
);
3899 rhs1_etype
= TREE_TYPE (rhs1_type
);
3900 rhs2_etype
= TREE_TYPE (rhs2_type
);
3902 if (POINTER_TYPE_P (lhs_etype
)
3903 || POINTER_TYPE_P (rhs1_etype
)
3904 || POINTER_TYPE_P (rhs2_etype
))
3906 error ("invalid (pointer) operands to plus/minus");
3910 /* Continue with generic binary expression handling. */
3914 case POINTER_PLUS_EXPR
:
3916 if (!POINTER_TYPE_P (rhs1_type
)
3917 || !useless_type_conversion_p (lhs_type
, rhs1_type
)
3918 || !ptrofftype_p (rhs2_type
))
3920 error ("type mismatch in pointer plus expression");
3921 debug_generic_stmt (lhs_type
);
3922 debug_generic_stmt (rhs1_type
);
3923 debug_generic_stmt (rhs2_type
);
3930 case POINTER_DIFF_EXPR
:
3932 if (!POINTER_TYPE_P (rhs1_type
)
3933 || !POINTER_TYPE_P (rhs2_type
)
3934 /* Because we special-case pointers to void we allow difference
3935 of arbitrary pointers with the same mode. */
3936 || TYPE_MODE (rhs1_type
) != TYPE_MODE (rhs2_type
)
3937 || TREE_CODE (lhs_type
) != INTEGER_TYPE
3938 || TYPE_UNSIGNED (lhs_type
)
3939 || TYPE_PRECISION (lhs_type
) != TYPE_PRECISION (rhs1_type
))
3941 error ("type mismatch in pointer diff expression");
3942 debug_generic_stmt (lhs_type
);
3943 debug_generic_stmt (rhs1_type
);
3944 debug_generic_stmt (rhs2_type
);
3951 case TRUTH_ANDIF_EXPR
:
3952 case TRUTH_ORIF_EXPR
:
3953 case TRUTH_AND_EXPR
:
3955 case TRUTH_XOR_EXPR
:
3965 case UNORDERED_EXPR
:
3973 /* Comparisons are also binary, but the result type is not
3974 connected to the operand types. */
3975 return verify_gimple_comparison (lhs_type
, rhs1
, rhs2
, rhs_code
);
3977 case WIDEN_MULT_EXPR
:
3978 if (TREE_CODE (lhs_type
) != INTEGER_TYPE
)
3980 return ((2 * TYPE_PRECISION (rhs1_type
) > TYPE_PRECISION (lhs_type
))
3981 || (TYPE_PRECISION (rhs1_type
) != TYPE_PRECISION (rhs2_type
)));
3983 case WIDEN_SUM_EXPR
:
3985 if (((TREE_CODE (rhs1_type
) != VECTOR_TYPE
3986 || TREE_CODE (lhs_type
) != VECTOR_TYPE
)
3987 && ((!INTEGRAL_TYPE_P (rhs1_type
)
3988 && !SCALAR_FLOAT_TYPE_P (rhs1_type
))
3989 || (!INTEGRAL_TYPE_P (lhs_type
)
3990 && !SCALAR_FLOAT_TYPE_P (lhs_type
))))
3991 || !useless_type_conversion_p (lhs_type
, rhs2_type
)
3992 || maybe_lt (GET_MODE_SIZE (element_mode (rhs2_type
)),
3993 2 * GET_MODE_SIZE (element_mode (rhs1_type
))))
3995 error ("type mismatch in widening sum reduction");
3996 debug_generic_expr (lhs_type
);
3997 debug_generic_expr (rhs1_type
);
3998 debug_generic_expr (rhs2_type
);
4004 case VEC_WIDEN_MULT_HI_EXPR
:
4005 case VEC_WIDEN_MULT_LO_EXPR
:
4006 case VEC_WIDEN_MULT_EVEN_EXPR
:
4007 case VEC_WIDEN_MULT_ODD_EXPR
:
4009 if (TREE_CODE (rhs1_type
) != VECTOR_TYPE
4010 || TREE_CODE (lhs_type
) != VECTOR_TYPE
4011 || !types_compatible_p (rhs1_type
, rhs2_type
)
4012 || maybe_ne (GET_MODE_SIZE (element_mode (lhs_type
)),
4013 2 * GET_MODE_SIZE (element_mode (rhs1_type
))))
4015 error ("type mismatch in vector widening multiplication");
4016 debug_generic_expr (lhs_type
);
4017 debug_generic_expr (rhs1_type
);
4018 debug_generic_expr (rhs2_type
);
4024 case VEC_PACK_TRUNC_EXPR
:
4025 /* ??? We currently use VEC_PACK_TRUNC_EXPR to simply concat
4026 vector boolean types. */
4027 if (VECTOR_BOOLEAN_TYPE_P (lhs_type
)
4028 && VECTOR_BOOLEAN_TYPE_P (rhs1_type
)
4029 && types_compatible_p (rhs1_type
, rhs2_type
)
4030 && known_eq (TYPE_VECTOR_SUBPARTS (lhs_type
),
4031 2 * TYPE_VECTOR_SUBPARTS (rhs1_type
)))
4035 case VEC_PACK_SAT_EXPR
:
4036 case VEC_PACK_FIX_TRUNC_EXPR
:
4038 if (TREE_CODE (rhs1_type
) != VECTOR_TYPE
4039 || TREE_CODE (lhs_type
) != VECTOR_TYPE
4040 || !((rhs_code
== VEC_PACK_FIX_TRUNC_EXPR
4041 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type
))
4042 && INTEGRAL_TYPE_P (TREE_TYPE (lhs_type
)))
4043 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type
))
4044 == INTEGRAL_TYPE_P (TREE_TYPE (lhs_type
))))
4045 || !types_compatible_p (rhs1_type
, rhs2_type
)
4046 || maybe_ne (GET_MODE_SIZE (element_mode (rhs1_type
)),
4047 2 * GET_MODE_SIZE (element_mode (lhs_type
)))
4048 || maybe_ne (2 * TYPE_VECTOR_SUBPARTS (rhs1_type
),
4049 TYPE_VECTOR_SUBPARTS (lhs_type
)))
4051 error ("type mismatch in vector pack expression");
4052 debug_generic_expr (lhs_type
);
4053 debug_generic_expr (rhs1_type
);
4054 debug_generic_expr (rhs2_type
);
4061 case VEC_PACK_FLOAT_EXPR
:
4062 if (TREE_CODE (rhs1_type
) != VECTOR_TYPE
4063 || TREE_CODE (lhs_type
) != VECTOR_TYPE
4064 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type
))
4065 || !SCALAR_FLOAT_TYPE_P (TREE_TYPE (lhs_type
))
4066 || !types_compatible_p (rhs1_type
, rhs2_type
)
4067 || maybe_ne (GET_MODE_SIZE (element_mode (rhs1_type
)),
4068 2 * GET_MODE_SIZE (element_mode (lhs_type
)))
4069 || maybe_ne (2 * TYPE_VECTOR_SUBPARTS (rhs1_type
),
4070 TYPE_VECTOR_SUBPARTS (lhs_type
)))
4072 error ("type mismatch in vector pack expression");
4073 debug_generic_expr (lhs_type
);
4074 debug_generic_expr (rhs1_type
);
4075 debug_generic_expr (rhs2_type
);
4082 case MULT_HIGHPART_EXPR
:
4083 case TRUNC_DIV_EXPR
:
4085 case FLOOR_DIV_EXPR
:
4086 case ROUND_DIV_EXPR
:
4087 case TRUNC_MOD_EXPR
:
4089 case FLOOR_MOD_EXPR
:
4090 case ROUND_MOD_EXPR
:
4092 case EXACT_DIV_EXPR
:
4098 /* Continue with generic binary expression handling. */
4101 case VEC_SERIES_EXPR
:
4102 if (!useless_type_conversion_p (rhs1_type
, rhs2_type
))
4104 error ("type mismatch in series expression");
4105 debug_generic_expr (rhs1_type
);
4106 debug_generic_expr (rhs2_type
);
4109 if (TREE_CODE (lhs_type
) != VECTOR_TYPE
4110 || !useless_type_conversion_p (TREE_TYPE (lhs_type
), rhs1_type
))
4112 error ("vector type expected in series expression");
4113 debug_generic_expr (lhs_type
);
4122 if (!useless_type_conversion_p (lhs_type
, rhs1_type
)
4123 || !useless_type_conversion_p (lhs_type
, rhs2_type
))
4125 error ("type mismatch in binary expression");
4126 debug_generic_stmt (lhs_type
);
4127 debug_generic_stmt (rhs1_type
);
4128 debug_generic_stmt (rhs2_type
);
4135 /* Verify a gimple assignment statement STMT with a ternary rhs.
4136 Returns true if anything is wrong. */
4139 verify_gimple_assign_ternary (gassign
*stmt
)
4141 enum tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
4142 tree lhs
= gimple_assign_lhs (stmt
);
4143 tree lhs_type
= TREE_TYPE (lhs
);
4144 tree rhs1
= gimple_assign_rhs1 (stmt
);
4145 tree rhs1_type
= TREE_TYPE (rhs1
);
4146 tree rhs2
= gimple_assign_rhs2 (stmt
);
4147 tree rhs2_type
= TREE_TYPE (rhs2
);
4148 tree rhs3
= gimple_assign_rhs3 (stmt
);
4149 tree rhs3_type
= TREE_TYPE (rhs3
);
4151 if (!is_gimple_reg (lhs
))
4153 error ("non-register as LHS of ternary operation");
4157 if (((rhs_code
== VEC_COND_EXPR
|| rhs_code
== COND_EXPR
)
4158 ? !is_gimple_condexpr (rhs1
) : !is_gimple_val (rhs1
))
4159 || !is_gimple_val (rhs2
)
4160 || !is_gimple_val (rhs3
))
4162 error ("invalid operands in ternary operation");
4166 /* First handle operations that involve different types. */
4169 case WIDEN_MULT_PLUS_EXPR
:
4170 case WIDEN_MULT_MINUS_EXPR
:
4171 if ((!INTEGRAL_TYPE_P (rhs1_type
)
4172 && !FIXED_POINT_TYPE_P (rhs1_type
))
4173 || !useless_type_conversion_p (rhs1_type
, rhs2_type
)
4174 || !useless_type_conversion_p (lhs_type
, rhs3_type
)
4175 || 2 * TYPE_PRECISION (rhs1_type
) > TYPE_PRECISION (lhs_type
)
4176 || TYPE_PRECISION (rhs1_type
) != TYPE_PRECISION (rhs2_type
))
4178 error ("type mismatch in widening multiply-accumulate expression");
4179 debug_generic_expr (lhs_type
);
4180 debug_generic_expr (rhs1_type
);
4181 debug_generic_expr (rhs2_type
);
4182 debug_generic_expr (rhs3_type
);
4188 if (!VECTOR_BOOLEAN_TYPE_P (rhs1_type
)
4189 || maybe_ne (TYPE_VECTOR_SUBPARTS (rhs1_type
),
4190 TYPE_VECTOR_SUBPARTS (lhs_type
)))
4192 error ("the first argument of a VEC_COND_EXPR must be of a "
4193 "boolean vector type of the same number of elements "
4195 debug_generic_expr (lhs_type
);
4196 debug_generic_expr (rhs1_type
);
4201 if (!is_gimple_val (rhs1
)
4202 && verify_gimple_comparison (TREE_TYPE (rhs1
),
4203 TREE_OPERAND (rhs1
, 0),
4204 TREE_OPERAND (rhs1
, 1),
4207 if (!useless_type_conversion_p (lhs_type
, rhs2_type
)
4208 || !useless_type_conversion_p (lhs_type
, rhs3_type
))
4210 error ("type mismatch in conditional expression");
4211 debug_generic_expr (lhs_type
);
4212 debug_generic_expr (rhs2_type
);
4213 debug_generic_expr (rhs3_type
);
4219 if (!useless_type_conversion_p (lhs_type
, rhs1_type
)
4220 || !useless_type_conversion_p (lhs_type
, rhs2_type
))
4222 error ("type mismatch in vector permute expression");
4223 debug_generic_expr (lhs_type
);
4224 debug_generic_expr (rhs1_type
);
4225 debug_generic_expr (rhs2_type
);
4226 debug_generic_expr (rhs3_type
);
4230 if (TREE_CODE (rhs1_type
) != VECTOR_TYPE
4231 || TREE_CODE (rhs2_type
) != VECTOR_TYPE
4232 || TREE_CODE (rhs3_type
) != VECTOR_TYPE
)
4234 error ("vector types expected in vector permute expression");
4235 debug_generic_expr (lhs_type
);
4236 debug_generic_expr (rhs1_type
);
4237 debug_generic_expr (rhs2_type
);
4238 debug_generic_expr (rhs3_type
);
4242 if (maybe_ne (TYPE_VECTOR_SUBPARTS (rhs1_type
),
4243 TYPE_VECTOR_SUBPARTS (rhs2_type
))
4244 || maybe_ne (TYPE_VECTOR_SUBPARTS (rhs2_type
),
4245 TYPE_VECTOR_SUBPARTS (rhs3_type
))
4246 || maybe_ne (TYPE_VECTOR_SUBPARTS (rhs3_type
),
4247 TYPE_VECTOR_SUBPARTS (lhs_type
)))
4249 error ("vectors with different element number found "
4250 "in vector permute expression");
4251 debug_generic_expr (lhs_type
);
4252 debug_generic_expr (rhs1_type
);
4253 debug_generic_expr (rhs2_type
);
4254 debug_generic_expr (rhs3_type
);
4258 if (TREE_CODE (TREE_TYPE (rhs3_type
)) != INTEGER_TYPE
4259 || (TREE_CODE (rhs3
) != VECTOR_CST
4260 && (GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE
4261 (TREE_TYPE (rhs3_type
)))
4262 != GET_MODE_BITSIZE (SCALAR_TYPE_MODE
4263 (TREE_TYPE (rhs1_type
))))))
4265 error ("invalid mask type in vector permute expression");
4266 debug_generic_expr (lhs_type
);
4267 debug_generic_expr (rhs1_type
);
4268 debug_generic_expr (rhs2_type
);
4269 debug_generic_expr (rhs3_type
);
4276 if (!useless_type_conversion_p (rhs1_type
, rhs2_type
)
4277 || !useless_type_conversion_p (lhs_type
, rhs3_type
)
4278 || 2 * GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type
)))
4279 > GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (lhs_type
))))
4281 error ("type mismatch in sad expression");
4282 debug_generic_expr (lhs_type
);
4283 debug_generic_expr (rhs1_type
);
4284 debug_generic_expr (rhs2_type
);
4285 debug_generic_expr (rhs3_type
);
4289 if (TREE_CODE (rhs1_type
) != VECTOR_TYPE
4290 || TREE_CODE (rhs2_type
) != VECTOR_TYPE
4291 || TREE_CODE (rhs3_type
) != VECTOR_TYPE
)
4293 error ("vector types expected in sad expression");
4294 debug_generic_expr (lhs_type
);
4295 debug_generic_expr (rhs1_type
);
4296 debug_generic_expr (rhs2_type
);
4297 debug_generic_expr (rhs3_type
);
4303 case BIT_INSERT_EXPR
:
4304 if (! useless_type_conversion_p (lhs_type
, rhs1_type
))
4306 error ("type mismatch in BIT_INSERT_EXPR");
4307 debug_generic_expr (lhs_type
);
4308 debug_generic_expr (rhs1_type
);
4311 if (! ((INTEGRAL_TYPE_P (rhs1_type
)
4312 && INTEGRAL_TYPE_P (rhs2_type
))
4313 || (VECTOR_TYPE_P (rhs1_type
)
4314 && types_compatible_p (TREE_TYPE (rhs1_type
), rhs2_type
))))
4316 error ("not allowed type combination in BIT_INSERT_EXPR");
4317 debug_generic_expr (rhs1_type
);
4318 debug_generic_expr (rhs2_type
);
4321 if (! tree_fits_uhwi_p (rhs3
)
4322 || ! types_compatible_p (bitsizetype
, TREE_TYPE (rhs3
))
4323 || ! tree_fits_uhwi_p (TYPE_SIZE (rhs2_type
)))
4325 error ("invalid position or size in BIT_INSERT_EXPR");
4328 if (INTEGRAL_TYPE_P (rhs1_type
)
4329 && !type_has_mode_precision_p (rhs1_type
))
4331 error ("BIT_INSERT_EXPR into non-mode-precision operand");
4334 if (INTEGRAL_TYPE_P (rhs1_type
))
4336 unsigned HOST_WIDE_INT bitpos
= tree_to_uhwi (rhs3
);
4337 if (bitpos
>= TYPE_PRECISION (rhs1_type
)
4338 || (bitpos
+ TYPE_PRECISION (rhs2_type
)
4339 > TYPE_PRECISION (rhs1_type
)))
4341 error ("insertion out of range in BIT_INSERT_EXPR");
4345 else if (VECTOR_TYPE_P (rhs1_type
))
4347 unsigned HOST_WIDE_INT bitpos
= tree_to_uhwi (rhs3
);
4348 unsigned HOST_WIDE_INT bitsize
= tree_to_uhwi (TYPE_SIZE (rhs2_type
));
4349 if (bitpos
% bitsize
!= 0)
4351 error ("vector insertion not at element boundary");
4359 if (((TREE_CODE (rhs1_type
) != VECTOR_TYPE
4360 || TREE_CODE (lhs_type
) != VECTOR_TYPE
)
4361 && ((!INTEGRAL_TYPE_P (rhs1_type
)
4362 && !SCALAR_FLOAT_TYPE_P (rhs1_type
))
4363 || (!INTEGRAL_TYPE_P (lhs_type
)
4364 && !SCALAR_FLOAT_TYPE_P (lhs_type
))))
4365 || !types_compatible_p (rhs1_type
, rhs2_type
)
4366 || !useless_type_conversion_p (lhs_type
, rhs3_type
)
4367 || maybe_lt (GET_MODE_SIZE (element_mode (rhs3_type
)),
4368 2 * GET_MODE_SIZE (element_mode (rhs1_type
))))
4370 error ("type mismatch in dot product reduction");
4371 debug_generic_expr (lhs_type
);
4372 debug_generic_expr (rhs1_type
);
4373 debug_generic_expr (rhs2_type
);
4379 case REALIGN_LOAD_EXPR
:
4389 /* Verify a gimple assignment statement STMT with a single rhs.
4390 Returns true if anything is wrong. */
4393 verify_gimple_assign_single (gassign
*stmt
)
4395 enum tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
4396 tree lhs
= gimple_assign_lhs (stmt
);
4397 tree lhs_type
= TREE_TYPE (lhs
);
4398 tree rhs1
= gimple_assign_rhs1 (stmt
);
4399 tree rhs1_type
= TREE_TYPE (rhs1
);
4402 if (!useless_type_conversion_p (lhs_type
, rhs1_type
))
4404 error ("non-trivial conversion at assignment");
4405 debug_generic_expr (lhs_type
);
4406 debug_generic_expr (rhs1_type
);
4410 if (gimple_clobber_p (stmt
)
4411 && !(DECL_P (lhs
) || TREE_CODE (lhs
) == MEM_REF
))
4413 error ("non-decl/MEM_REF LHS in clobber statement");
4414 debug_generic_expr (lhs
);
4418 if (handled_component_p (lhs
)
4419 || TREE_CODE (lhs
) == MEM_REF
4420 || TREE_CODE (lhs
) == TARGET_MEM_REF
)
4421 res
|= verify_types_in_gimple_reference (lhs
, true);
4423 /* Special codes we cannot handle via their class. */
4428 tree op
= TREE_OPERAND (rhs1
, 0);
4429 if (!is_gimple_addressable (op
))
4431 error ("invalid operand in unary expression");
4435 /* Technically there is no longer a need for matching types, but
4436 gimple hygiene asks for this check. In LTO we can end up
4437 combining incompatible units and thus end up with addresses
4438 of globals that change their type to a common one. */
4440 && !types_compatible_p (TREE_TYPE (op
),
4441 TREE_TYPE (TREE_TYPE (rhs1
)))
4442 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1
),
4445 error ("type mismatch in address expression");
4446 debug_generic_stmt (TREE_TYPE (rhs1
));
4447 debug_generic_stmt (TREE_TYPE (op
));
4451 return (verify_address (rhs1
, true)
4452 || verify_types_in_gimple_reference (op
, true));
4457 error ("INDIRECT_REF in gimple IL");
4463 case ARRAY_RANGE_REF
:
4464 case VIEW_CONVERT_EXPR
:
4467 case TARGET_MEM_REF
:
4469 if (!is_gimple_reg (lhs
)
4470 && is_gimple_reg_type (TREE_TYPE (lhs
)))
4472 error ("invalid rhs for gimple memory store");
4473 debug_generic_stmt (lhs
);
4474 debug_generic_stmt (rhs1
);
4477 return res
|| verify_types_in_gimple_reference (rhs1
, false);
4489 /* tcc_declaration */
4494 if (!is_gimple_reg (lhs
)
4495 && !is_gimple_reg (rhs1
)
4496 && is_gimple_reg_type (TREE_TYPE (lhs
)))
4498 error ("invalid rhs for gimple memory store");
4499 debug_generic_stmt (lhs
);
4500 debug_generic_stmt (rhs1
);
4506 if (TREE_CODE (rhs1_type
) == VECTOR_TYPE
)
4509 tree elt_i
, elt_v
, elt_t
= NULL_TREE
;
4511 if (CONSTRUCTOR_NELTS (rhs1
) == 0)
4513 /* For vector CONSTRUCTORs we require that either it is empty
4514 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
4515 (then the element count must be correct to cover the whole
4516 outer vector and index must be NULL on all elements, or it is
4517 a CONSTRUCTOR of scalar elements, where we as an exception allow
4518 smaller number of elements (assuming zero filling) and
4519 consecutive indexes as compared to NULL indexes (such
4520 CONSTRUCTORs can appear in the IL from FEs). */
4521 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1
), i
, elt_i
, elt_v
)
4523 if (elt_t
== NULL_TREE
)
4525 elt_t
= TREE_TYPE (elt_v
);
4526 if (TREE_CODE (elt_t
) == VECTOR_TYPE
)
4528 tree elt_t
= TREE_TYPE (elt_v
);
4529 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type
),
4532 error ("incorrect type of vector CONSTRUCTOR"
4534 debug_generic_stmt (rhs1
);
4537 else if (maybe_ne (CONSTRUCTOR_NELTS (rhs1
)
4538 * TYPE_VECTOR_SUBPARTS (elt_t
),
4539 TYPE_VECTOR_SUBPARTS (rhs1_type
)))
4541 error ("incorrect number of vector CONSTRUCTOR"
4543 debug_generic_stmt (rhs1
);
4547 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type
),
4550 error ("incorrect type of vector CONSTRUCTOR elements");
4551 debug_generic_stmt (rhs1
);
4554 else if (maybe_gt (CONSTRUCTOR_NELTS (rhs1
),
4555 TYPE_VECTOR_SUBPARTS (rhs1_type
)))
4557 error ("incorrect number of vector CONSTRUCTOR elements");
4558 debug_generic_stmt (rhs1
);
4562 else if (!useless_type_conversion_p (elt_t
, TREE_TYPE (elt_v
)))
4564 error ("incorrect type of vector CONSTRUCTOR elements");
4565 debug_generic_stmt (rhs1
);
4568 if (elt_i
!= NULL_TREE
4569 && (TREE_CODE (elt_t
) == VECTOR_TYPE
4570 || TREE_CODE (elt_i
) != INTEGER_CST
4571 || compare_tree_int (elt_i
, i
) != 0))
4573 error ("vector CONSTRUCTOR with non-NULL element index");
4574 debug_generic_stmt (rhs1
);
4577 if (!is_gimple_val (elt_v
))
4579 error ("vector CONSTRUCTOR element is not a GIMPLE value");
4580 debug_generic_stmt (rhs1
);
4585 else if (CONSTRUCTOR_NELTS (rhs1
) != 0)
4587 error ("non-vector CONSTRUCTOR with elements");
4588 debug_generic_stmt (rhs1
);
4595 rhs1
= fold (ASSERT_EXPR_COND (rhs1
));
4596 if (rhs1
== boolean_false_node
)
4598 error ("ASSERT_EXPR with an always-false condition");
4599 debug_generic_stmt (rhs1
);
4605 case WITH_SIZE_EXPR
:
4615 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4616 is a problem, otherwise false. */
4619 verify_gimple_assign (gassign
*stmt
)
4621 switch (gimple_assign_rhs_class (stmt
))
4623 case GIMPLE_SINGLE_RHS
:
4624 return verify_gimple_assign_single (stmt
);
4626 case GIMPLE_UNARY_RHS
:
4627 return verify_gimple_assign_unary (stmt
);
4629 case GIMPLE_BINARY_RHS
:
4630 return verify_gimple_assign_binary (stmt
);
4632 case GIMPLE_TERNARY_RHS
:
4633 return verify_gimple_assign_ternary (stmt
);
4640 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4641 is a problem, otherwise false. */
4644 verify_gimple_return (greturn
*stmt
)
4646 tree op
= gimple_return_retval (stmt
);
4647 tree restype
= TREE_TYPE (TREE_TYPE (cfun
->decl
));
4649 /* We cannot test for present return values as we do not fix up missing
4650 return values from the original source. */
4654 if (!is_gimple_val (op
)
4655 && TREE_CODE (op
) != RESULT_DECL
)
4657 error ("invalid operand in return statement");
4658 debug_generic_stmt (op
);
4662 if ((TREE_CODE (op
) == RESULT_DECL
4663 && DECL_BY_REFERENCE (op
))
4664 || (TREE_CODE (op
) == SSA_NAME
4665 && SSA_NAME_VAR (op
)
4666 && TREE_CODE (SSA_NAME_VAR (op
)) == RESULT_DECL
4667 && DECL_BY_REFERENCE (SSA_NAME_VAR (op
))))
4668 op
= TREE_TYPE (op
);
4670 if (!useless_type_conversion_p (restype
, TREE_TYPE (op
)))
4672 error ("invalid conversion in return statement");
4673 debug_generic_stmt (restype
);
4674 debug_generic_stmt (TREE_TYPE (op
));
4682 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4683 is a problem, otherwise false. */
4686 verify_gimple_goto (ggoto
*stmt
)
4688 tree dest
= gimple_goto_dest (stmt
);
4690 /* ??? We have two canonical forms of direct goto destinations, a
4691 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4692 if (TREE_CODE (dest
) != LABEL_DECL
4693 && (!is_gimple_val (dest
)
4694 || !POINTER_TYPE_P (TREE_TYPE (dest
))))
4696 error ("goto destination is neither a label nor a pointer");
4703 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4704 is a problem, otherwise false. */
4707 verify_gimple_switch (gswitch
*stmt
)
4710 tree elt
, prev_upper_bound
= NULL_TREE
;
4711 tree index_type
, elt_type
= NULL_TREE
;
4713 if (!is_gimple_val (gimple_switch_index (stmt
)))
4715 error ("invalid operand to switch statement");
4716 debug_generic_stmt (gimple_switch_index (stmt
));
4720 index_type
= TREE_TYPE (gimple_switch_index (stmt
));
4721 if (! INTEGRAL_TYPE_P (index_type
))
4723 error ("non-integral type switch statement");
4724 debug_generic_expr (index_type
);
4728 elt
= gimple_switch_label (stmt
, 0);
4729 if (CASE_LOW (elt
) != NULL_TREE
4730 || CASE_HIGH (elt
) != NULL_TREE
4731 || CASE_CHAIN (elt
) != NULL_TREE
)
4733 error ("invalid default case label in switch statement");
4734 debug_generic_expr (elt
);
4738 n
= gimple_switch_num_labels (stmt
);
4739 for (i
= 1; i
< n
; i
++)
4741 elt
= gimple_switch_label (stmt
, i
);
4743 if (CASE_CHAIN (elt
))
4745 error ("invalid CASE_CHAIN");
4746 debug_generic_expr (elt
);
4749 if (! CASE_LOW (elt
))
4751 error ("invalid case label in switch statement");
4752 debug_generic_expr (elt
);
4756 && ! tree_int_cst_lt (CASE_LOW (elt
), CASE_HIGH (elt
)))
4758 error ("invalid case range in switch statement");
4759 debug_generic_expr (elt
);
4765 if (TREE_TYPE (CASE_LOW (elt
)) != elt_type
4766 || (CASE_HIGH (elt
) && TREE_TYPE (CASE_HIGH (elt
)) != elt_type
))
4768 error ("type mismatch for case label in switch statement");
4769 debug_generic_expr (elt
);
4775 elt_type
= TREE_TYPE (CASE_LOW (elt
));
4776 if (TYPE_PRECISION (index_type
) < TYPE_PRECISION (elt_type
))
4778 error ("type precision mismatch in switch statement");
4783 if (prev_upper_bound
)
4785 if (! tree_int_cst_lt (prev_upper_bound
, CASE_LOW (elt
)))
4787 error ("case labels not sorted in switch statement");
4792 prev_upper_bound
= CASE_HIGH (elt
);
4793 if (! prev_upper_bound
)
4794 prev_upper_bound
= CASE_LOW (elt
);
4800 /* Verify a gimple debug statement STMT.
4801 Returns true if anything is wrong. */
4804 verify_gimple_debug (gimple
*stmt ATTRIBUTE_UNUSED
)
4806 /* There isn't much that could be wrong in a gimple debug stmt. A
4807 gimple debug bind stmt, for example, maps a tree, that's usually
4808 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4809 component or member of an aggregate type, to another tree, that
4810 can be an arbitrary expression. These stmts expand into debug
4811 insns, and are converted to debug notes by var-tracking.c. */
4815 /* Verify a gimple label statement STMT.
4816 Returns true if anything is wrong. */
4819 verify_gimple_label (glabel
*stmt
)
4821 tree decl
= gimple_label_label (stmt
);
4825 if (TREE_CODE (decl
) != LABEL_DECL
)
4827 if (!DECL_NONLOCAL (decl
) && !FORCED_LABEL (decl
)
4828 && DECL_CONTEXT (decl
) != current_function_decl
)
4830 error ("label's context is not the current function decl");
4834 uid
= LABEL_DECL_UID (decl
);
4837 || (*label_to_block_map_for_fn (cfun
))[uid
] != gimple_bb (stmt
)))
4839 error ("incorrect entry in label_to_block_map");
4843 uid
= EH_LANDING_PAD_NR (decl
);
4846 eh_landing_pad lp
= get_eh_landing_pad_from_number (uid
);
4847 if (decl
!= lp
->post_landing_pad
)
4849 error ("incorrect setting of landing pad number");
4857 /* Verify a gimple cond statement STMT.
4858 Returns true if anything is wrong. */
4861 verify_gimple_cond (gcond
*stmt
)
4863 if (TREE_CODE_CLASS (gimple_cond_code (stmt
)) != tcc_comparison
)
4865 error ("invalid comparison code in gimple cond");
4868 if (!(!gimple_cond_true_label (stmt
)
4869 || TREE_CODE (gimple_cond_true_label (stmt
)) == LABEL_DECL
)
4870 || !(!gimple_cond_false_label (stmt
)
4871 || TREE_CODE (gimple_cond_false_label (stmt
)) == LABEL_DECL
))
4873 error ("invalid labels in gimple cond");
4877 return verify_gimple_comparison (boolean_type_node
,
4878 gimple_cond_lhs (stmt
),
4879 gimple_cond_rhs (stmt
),
4880 gimple_cond_code (stmt
));
4883 /* Verify the GIMPLE statement STMT. Returns true if there is an
4884 error, otherwise false. */
4887 verify_gimple_stmt (gimple
*stmt
)
4889 switch (gimple_code (stmt
))
4892 return verify_gimple_assign (as_a
<gassign
*> (stmt
));
4895 return verify_gimple_label (as_a
<glabel
*> (stmt
));
4898 return verify_gimple_call (as_a
<gcall
*> (stmt
));
4901 return verify_gimple_cond (as_a
<gcond
*> (stmt
));
4904 return verify_gimple_goto (as_a
<ggoto
*> (stmt
));
4907 return verify_gimple_switch (as_a
<gswitch
*> (stmt
));
4910 return verify_gimple_return (as_a
<greturn
*> (stmt
));
4915 case GIMPLE_TRANSACTION
:
4916 return verify_gimple_transaction (as_a
<gtransaction
*> (stmt
));
4918 /* Tuples that do not have tree operands. */
4920 case GIMPLE_PREDICT
:
4922 case GIMPLE_EH_DISPATCH
:
4923 case GIMPLE_EH_MUST_NOT_THROW
:
4927 /* OpenMP directives are validated by the FE and never operated
4928 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4929 non-gimple expressions when the main index variable has had
4930 its address taken. This does not affect the loop itself
4931 because the header of an GIMPLE_OMP_FOR is merely used to determine
4932 how to setup the parallel iteration. */
4936 return verify_gimple_debug (stmt
);
4943 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4944 and false otherwise. */
4947 verify_gimple_phi (gphi
*phi
)
4951 tree phi_result
= gimple_phi_result (phi
);
4956 error ("invalid PHI result");
4960 virtual_p
= virtual_operand_p (phi_result
);
4961 if (TREE_CODE (phi_result
) != SSA_NAME
4963 && SSA_NAME_VAR (phi_result
) != gimple_vop (cfun
)))
4965 error ("invalid PHI result");
4969 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
4971 tree t
= gimple_phi_arg_def (phi
, i
);
4975 error ("missing PHI def");
4979 /* Addressable variables do have SSA_NAMEs but they
4980 are not considered gimple values. */
4981 else if ((TREE_CODE (t
) == SSA_NAME
4982 && virtual_p
!= virtual_operand_p (t
))
4984 && (TREE_CODE (t
) != SSA_NAME
4985 || SSA_NAME_VAR (t
) != gimple_vop (cfun
)))
4987 && !is_gimple_val (t
)))
4989 error ("invalid PHI argument");
4990 debug_generic_expr (t
);
4993 #ifdef ENABLE_TYPES_CHECKING
4994 if (!useless_type_conversion_p (TREE_TYPE (phi_result
), TREE_TYPE (t
)))
4996 error ("incompatible types in PHI argument %u", i
);
4997 debug_generic_stmt (TREE_TYPE (phi_result
));
4998 debug_generic_stmt (TREE_TYPE (t
));
5007 /* Verify the GIMPLE statements inside the sequence STMTS. */
5010 verify_gimple_in_seq_2 (gimple_seq stmts
)
5012 gimple_stmt_iterator ittr
;
5015 for (ittr
= gsi_start (stmts
); !gsi_end_p (ittr
); gsi_next (&ittr
))
5017 gimple
*stmt
= gsi_stmt (ittr
);
5019 switch (gimple_code (stmt
))
5022 err
|= verify_gimple_in_seq_2 (
5023 gimple_bind_body (as_a
<gbind
*> (stmt
)));
5027 err
|= verify_gimple_in_seq_2 (gimple_try_eval (stmt
));
5028 err
|= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt
));
5031 case GIMPLE_EH_FILTER
:
5032 err
|= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt
));
5035 case GIMPLE_EH_ELSE
:
5037 geh_else
*eh_else
= as_a
<geh_else
*> (stmt
);
5038 err
|= verify_gimple_in_seq_2 (gimple_eh_else_n_body (eh_else
));
5039 err
|= verify_gimple_in_seq_2 (gimple_eh_else_e_body (eh_else
));
5044 err
|= verify_gimple_in_seq_2 (gimple_catch_handler (
5045 as_a
<gcatch
*> (stmt
)));
5048 case GIMPLE_TRANSACTION
:
5049 err
|= verify_gimple_transaction (as_a
<gtransaction
*> (stmt
));
5054 bool err2
= verify_gimple_stmt (stmt
);
5056 debug_gimple_stmt (stmt
);
5065 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
5066 is a problem, otherwise false. */
5069 verify_gimple_transaction (gtransaction
*stmt
)
5073 lab
= gimple_transaction_label_norm (stmt
);
5074 if (lab
!= NULL
&& TREE_CODE (lab
) != LABEL_DECL
)
5076 lab
= gimple_transaction_label_uninst (stmt
);
5077 if (lab
!= NULL
&& TREE_CODE (lab
) != LABEL_DECL
)
5079 lab
= gimple_transaction_label_over (stmt
);
5080 if (lab
!= NULL
&& TREE_CODE (lab
) != LABEL_DECL
)
5083 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt
));
5087 /* Verify the GIMPLE statements inside the statement list STMTS. */
5090 verify_gimple_in_seq (gimple_seq stmts
)
5092 timevar_push (TV_TREE_STMT_VERIFY
);
5093 if (verify_gimple_in_seq_2 (stmts
))
5094 internal_error ("verify_gimple failed");
5095 timevar_pop (TV_TREE_STMT_VERIFY
);
5098 /* Return true when the T can be shared. */
5101 tree_node_can_be_shared (tree t
)
5103 if (IS_TYPE_OR_DECL_P (t
)
5104 || TREE_CODE (t
) == SSA_NAME
5105 || TREE_CODE (t
) == IDENTIFIER_NODE
5106 || TREE_CODE (t
) == CASE_LABEL_EXPR
5107 || is_gimple_min_invariant (t
))
5110 if (t
== error_mark_node
)
5116 /* Called via walk_tree. Verify tree sharing. */
5119 verify_node_sharing_1 (tree
*tp
, int *walk_subtrees
, void *data
)
5121 hash_set
<void *> *visited
= (hash_set
<void *> *) data
;
5123 if (tree_node_can_be_shared (*tp
))
5125 *walk_subtrees
= false;
5129 if (visited
->add (*tp
))
5135 /* Called via walk_gimple_stmt. Verify tree sharing. */
5138 verify_node_sharing (tree
*tp
, int *walk_subtrees
, void *data
)
5140 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
5141 return verify_node_sharing_1 (tp
, walk_subtrees
, wi
->info
);
5144 static bool eh_error_found
;
5146 verify_eh_throw_stmt_node (gimple
*const &stmt
, const int &,
5147 hash_set
<gimple
*> *visited
)
5149 if (!visited
->contains (stmt
))
5151 error ("dead STMT in EH table");
5152 debug_gimple_stmt (stmt
);
5153 eh_error_found
= true;
5158 /* Verify if the location LOCs block is in BLOCKS. */
5161 verify_location (hash_set
<tree
> *blocks
, location_t loc
)
5163 tree block
= LOCATION_BLOCK (loc
);
5164 if (block
!= NULL_TREE
5165 && !blocks
->contains (block
))
5167 error ("location references block not in block tree");
5170 if (block
!= NULL_TREE
)
5171 return verify_location (blocks
, BLOCK_SOURCE_LOCATION (block
));
5175 /* Called via walk_tree. Verify that expressions have no blocks. */
5178 verify_expr_no_block (tree
*tp
, int *walk_subtrees
, void *)
5182 *walk_subtrees
= false;
5186 location_t loc
= EXPR_LOCATION (*tp
);
5187 if (LOCATION_BLOCK (loc
) != NULL
)
5193 /* Called via walk_tree. Verify locations of expressions. */
5196 verify_expr_location_1 (tree
*tp
, int *walk_subtrees
, void *data
)
5198 hash_set
<tree
> *blocks
= (hash_set
<tree
> *) data
;
5201 /* ??? This doesn't really belong here but there's no good place to
5202 stick this remainder of old verify_expr. */
5203 /* ??? This barfs on debug stmts which contain binds to vars with
5204 different function context. */
5207 || TREE_CODE (t
) == PARM_DECL
5208 || TREE_CODE (t
) == RESULT_DECL
)
5210 tree context
= decl_function_context (t
);
5211 if (context
!= cfun
->decl
5212 && !SCOPE_FILE_SCOPE_P (context
)
5214 && !DECL_EXTERNAL (t
))
5216 error ("local declaration from a different function");
5222 if (VAR_P (t
) && DECL_HAS_DEBUG_EXPR_P (t
))
5224 tree x
= DECL_DEBUG_EXPR (t
);
5225 tree addr
= walk_tree (&x
, verify_expr_no_block
, NULL
, NULL
);
5230 || TREE_CODE (t
) == PARM_DECL
5231 || TREE_CODE (t
) == RESULT_DECL
)
5232 && DECL_HAS_VALUE_EXPR_P (t
))
5234 tree x
= DECL_VALUE_EXPR (t
);
5235 tree addr
= walk_tree (&x
, verify_expr_no_block
, NULL
, NULL
);
5242 *walk_subtrees
= false;
5246 location_t loc
= EXPR_LOCATION (t
);
5247 if (verify_location (blocks
, loc
))
5253 /* Called via walk_gimple_op. Verify locations of expressions. */
5256 verify_expr_location (tree
*tp
, int *walk_subtrees
, void *data
)
5258 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
5259 return verify_expr_location_1 (tp
, walk_subtrees
, wi
->info
);
5262 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
5265 collect_subblocks (hash_set
<tree
> *blocks
, tree block
)
5268 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= BLOCK_CHAIN (t
))
5271 collect_subblocks (blocks
, t
);
5275 /* Verify the GIMPLE statements in the CFG of FN. */
5278 verify_gimple_in_cfg (struct function
*fn
, bool verify_nothrow
)
5283 timevar_push (TV_TREE_STMT_VERIFY
);
5284 hash_set
<void *> visited
;
5285 hash_set
<gimple
*> visited_throwing_stmts
;
5287 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
5288 hash_set
<tree
> blocks
;
5289 if (DECL_INITIAL (fn
->decl
))
5291 blocks
.add (DECL_INITIAL (fn
->decl
));
5292 collect_subblocks (&blocks
, DECL_INITIAL (fn
->decl
));
5295 FOR_EACH_BB_FN (bb
, fn
)
5297 gimple_stmt_iterator gsi
;
5301 for (gphi_iterator gpi
= gsi_start_phis (bb
);
5305 gphi
*phi
= gpi
.phi ();
5309 if (gimple_bb (phi
) != bb
)
5311 error ("gimple_bb (phi) is set to a wrong basic block");
5315 err2
|= verify_gimple_phi (phi
);
5317 /* Only PHI arguments have locations. */
5318 if (gimple_location (phi
) != UNKNOWN_LOCATION
)
5320 error ("PHI node with location");
5324 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
5326 tree arg
= gimple_phi_arg_def (phi
, i
);
5327 tree addr
= walk_tree (&arg
, verify_node_sharing_1
,
5331 error ("incorrect sharing of tree nodes");
5332 debug_generic_expr (addr
);
5335 location_t loc
= gimple_phi_arg_location (phi
, i
);
5336 if (virtual_operand_p (gimple_phi_result (phi
))
5337 && loc
!= UNKNOWN_LOCATION
)
5339 error ("virtual PHI with argument locations");
5342 addr
= walk_tree (&arg
, verify_expr_location_1
, &blocks
, NULL
);
5345 debug_generic_expr (addr
);
5348 err2
|= verify_location (&blocks
, loc
);
5352 debug_gimple_stmt (phi
);
5356 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5358 gimple
*stmt
= gsi_stmt (gsi
);
5360 struct walk_stmt_info wi
;
5364 if (gimple_bb (stmt
) != bb
)
5366 error ("gimple_bb (stmt) is set to a wrong basic block");
5370 err2
|= verify_gimple_stmt (stmt
);
5371 err2
|= verify_location (&blocks
, gimple_location (stmt
));
5373 memset (&wi
, 0, sizeof (wi
));
5374 wi
.info
= (void *) &visited
;
5375 addr
= walk_gimple_op (stmt
, verify_node_sharing
, &wi
);
5378 error ("incorrect sharing of tree nodes");
5379 debug_generic_expr (addr
);
5383 memset (&wi
, 0, sizeof (wi
));
5384 wi
.info
= (void *) &blocks
;
5385 addr
= walk_gimple_op (stmt
, verify_expr_location
, &wi
);
5388 debug_generic_expr (addr
);
5392 /* If the statement is marked as part of an EH region, then it is
5393 expected that the statement could throw. Verify that when we
5394 have optimizations that simplify statements such that we prove
5395 that they cannot throw, that we update other data structures
5397 lp_nr
= lookup_stmt_eh_lp (stmt
);
5399 visited_throwing_stmts
.add (stmt
);
5402 if (!stmt_could_throw_p (cfun
, stmt
))
5406 error ("statement marked for throw, but doesn%'t");
5410 else if (!gsi_one_before_end_p (gsi
))
5412 error ("statement marked for throw in middle of block");
5418 debug_gimple_stmt (stmt
);
5422 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5423 if (e
->goto_locus
!= UNKNOWN_LOCATION
)
5424 err
|= verify_location (&blocks
, e
->goto_locus
);
5427 hash_map
<gimple
*, int> *eh_table
= get_eh_throw_stmt_table (cfun
);
5428 eh_error_found
= false;
5430 eh_table
->traverse
<hash_set
<gimple
*> *, verify_eh_throw_stmt_node
>
5431 (&visited_throwing_stmts
);
5433 if (err
|| eh_error_found
)
5434 internal_error ("verify_gimple failed");
5436 verify_histograms ();
5437 timevar_pop (TV_TREE_STMT_VERIFY
);
5441 /* Verifies that the flow information is OK. */
5444 gimple_verify_flow_info (void)
5448 gimple_stmt_iterator gsi
;
5453 if (ENTRY_BLOCK_PTR_FOR_FN (cfun
)->il
.gimple
.seq
5454 || ENTRY_BLOCK_PTR_FOR_FN (cfun
)->il
.gimple
.phi_nodes
)
5456 error ("ENTRY_BLOCK has IL associated with it");
5460 if (EXIT_BLOCK_PTR_FOR_FN (cfun
)->il
.gimple
.seq
5461 || EXIT_BLOCK_PTR_FOR_FN (cfun
)->il
.gimple
.phi_nodes
)
5463 error ("EXIT_BLOCK has IL associated with it");
5467 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
)
5468 if (e
->flags
& EDGE_FALLTHRU
)
5470 error ("fallthru to exit from bb %d", e
->src
->index
);
5474 FOR_EACH_BB_FN (bb
, cfun
)
5476 bool found_ctrl_stmt
= false;
5480 /* Skip labels on the start of basic block. */
5481 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5484 gimple
*prev_stmt
= stmt
;
5486 stmt
= gsi_stmt (gsi
);
5488 if (gimple_code (stmt
) != GIMPLE_LABEL
)
5491 label
= gimple_label_label (as_a
<glabel
*> (stmt
));
5492 if (prev_stmt
&& DECL_NONLOCAL (label
))
5494 error ("nonlocal label ");
5495 print_generic_expr (stderr
, label
);
5496 fprintf (stderr
, " is not first in a sequence of labels in bb %d",
5501 if (prev_stmt
&& EH_LANDING_PAD_NR (label
) != 0)
5503 error ("EH landing pad label ");
5504 print_generic_expr (stderr
, label
);
5505 fprintf (stderr
, " is not first in a sequence of labels in bb %d",
5510 if (label_to_block (cfun
, label
) != bb
)
5513 print_generic_expr (stderr
, label
);
5514 fprintf (stderr
, " to block does not match in bb %d",
5519 if (decl_function_context (label
) != current_function_decl
)
5522 print_generic_expr (stderr
, label
);
5523 fprintf (stderr
, " has incorrect context in bb %d",
5529 /* Verify that body of basic block BB is free of control flow. */
5530 for (; !gsi_end_p (gsi
); gsi_next (&gsi
))
5532 gimple
*stmt
= gsi_stmt (gsi
);
5534 if (found_ctrl_stmt
)
5536 error ("control flow in the middle of basic block %d",
5541 if (stmt_ends_bb_p (stmt
))
5542 found_ctrl_stmt
= true;
5544 if (glabel
*label_stmt
= dyn_cast
<glabel
*> (stmt
))
5547 print_generic_expr (stderr
, gimple_label_label (label_stmt
));
5548 fprintf (stderr
, " in the middle of basic block %d", bb
->index
);
5553 gsi
= gsi_last_nondebug_bb (bb
);
5554 if (gsi_end_p (gsi
))
5557 stmt
= gsi_stmt (gsi
);
5559 if (gimple_code (stmt
) == GIMPLE_LABEL
)
5562 err
|= verify_eh_edges (stmt
);
5564 if (is_ctrl_stmt (stmt
))
5566 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5567 if (e
->flags
& EDGE_FALLTHRU
)
5569 error ("fallthru edge after a control statement in bb %d",
5575 if (gimple_code (stmt
) != GIMPLE_COND
)
5577 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
5578 after anything else but if statement. */
5579 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5580 if (e
->flags
& (EDGE_TRUE_VALUE
| EDGE_FALSE_VALUE
))
5582 error ("true/false edge after a non-GIMPLE_COND in bb %d",
5588 switch (gimple_code (stmt
))
5595 extract_true_false_edges_from_block (bb
, &true_edge
, &false_edge
);
5599 || !(true_edge
->flags
& EDGE_TRUE_VALUE
)
5600 || !(false_edge
->flags
& EDGE_FALSE_VALUE
)
5601 || (true_edge
->flags
& (EDGE_FALLTHRU
| EDGE_ABNORMAL
))
5602 || (false_edge
->flags
& (EDGE_FALLTHRU
| EDGE_ABNORMAL
))
5603 || EDGE_COUNT (bb
->succs
) >= 3)
5605 error ("wrong outgoing edge flags at end of bb %d",
5613 if (simple_goto_p (stmt
))
5615 error ("explicit goto at end of bb %d", bb
->index
);
5620 /* FIXME. We should double check that the labels in the
5621 destination blocks have their address taken. */
5622 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5623 if ((e
->flags
& (EDGE_FALLTHRU
| EDGE_TRUE_VALUE
5624 | EDGE_FALSE_VALUE
))
5625 || !(e
->flags
& EDGE_ABNORMAL
))
5627 error ("wrong outgoing edge flags at end of bb %d",
5635 if (!gimple_call_builtin_p (stmt
, BUILT_IN_RETURN
))
5639 if (!single_succ_p (bb
)
5640 || (single_succ_edge (bb
)->flags
5641 & (EDGE_FALLTHRU
| EDGE_ABNORMAL
5642 | EDGE_TRUE_VALUE
| EDGE_FALSE_VALUE
)))
5644 error ("wrong outgoing edge flags at end of bb %d", bb
->index
);
5647 if (single_succ (bb
) != EXIT_BLOCK_PTR_FOR_FN (cfun
))
5649 error ("return edge does not point to exit in bb %d",
5657 gswitch
*switch_stmt
= as_a
<gswitch
*> (stmt
);
5662 n
= gimple_switch_num_labels (switch_stmt
);
5664 /* Mark all the destination basic blocks. */
5665 for (i
= 0; i
< n
; ++i
)
5667 basic_block label_bb
= gimple_switch_label_bb (cfun
, switch_stmt
, i
);
5668 gcc_assert (!label_bb
->aux
|| label_bb
->aux
== (void *)1);
5669 label_bb
->aux
= (void *)1;
5672 /* Verify that the case labels are sorted. */
5673 prev
= gimple_switch_label (switch_stmt
, 0);
5674 for (i
= 1; i
< n
; ++i
)
5676 tree c
= gimple_switch_label (switch_stmt
, i
);
5679 error ("found default case not at the start of "
5685 && !tree_int_cst_lt (CASE_LOW (prev
), CASE_LOW (c
)))
5687 error ("case labels not sorted: ");
5688 print_generic_expr (stderr
, prev
);
5689 fprintf (stderr
," is greater than ");
5690 print_generic_expr (stderr
, c
);
5691 fprintf (stderr
," but comes before it.\n");
5696 /* VRP will remove the default case if it can prove it will
5697 never be executed. So do not verify there always exists
5698 a default case here. */
5700 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5704 error ("extra outgoing edge %d->%d",
5705 bb
->index
, e
->dest
->index
);
5709 e
->dest
->aux
= (void *)2;
5710 if ((e
->flags
& (EDGE_FALLTHRU
| EDGE_ABNORMAL
5711 | EDGE_TRUE_VALUE
| EDGE_FALSE_VALUE
)))
5713 error ("wrong outgoing edge flags at end of bb %d",
5719 /* Check that we have all of them. */
5720 for (i
= 0; i
< n
; ++i
)
5722 basic_block label_bb
= gimple_switch_label_bb (cfun
,
5725 if (label_bb
->aux
!= (void *)2)
5727 error ("missing edge %i->%i", bb
->index
, label_bb
->index
);
5732 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5733 e
->dest
->aux
= (void *)0;
5737 case GIMPLE_EH_DISPATCH
:
5738 err
|= verify_eh_dispatch_edge (as_a
<geh_dispatch
*> (stmt
));
5746 if (dom_info_state (CDI_DOMINATORS
) >= DOM_NO_FAST_QUERY
)
5747 verify_dominators (CDI_DOMINATORS
);
5753 /* Updates phi nodes after creating a forwarder block joined
5754 by edge FALLTHRU. */
5757 gimple_make_forwarder_block (edge fallthru
)
5761 basic_block dummy
, bb
;
5765 dummy
= fallthru
->src
;
5766 bb
= fallthru
->dest
;
5768 if (single_pred_p (bb
))
5771 /* If we redirected a branch we must create new PHI nodes at the
5773 for (gsi
= gsi_start_phis (dummy
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5775 gphi
*phi
, *new_phi
;
5778 var
= gimple_phi_result (phi
);
5779 new_phi
= create_phi_node (var
, bb
);
5780 gimple_phi_set_result (phi
, copy_ssa_name (var
, phi
));
5781 add_phi_arg (new_phi
, gimple_phi_result (phi
), fallthru
,
5785 /* Add the arguments we have stored on edges. */
5786 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
5791 flush_pending_stmts (e
);
5796 /* Return a non-special label in the head of basic block BLOCK.
5797 Create one if it doesn't exist. */
5800 gimple_block_label (basic_block bb
)
5802 gimple_stmt_iterator i
, s
= gsi_start_bb (bb
);
5807 for (i
= s
; !gsi_end_p (i
); first
= false, gsi_next (&i
))
5809 stmt
= dyn_cast
<glabel
*> (gsi_stmt (i
));
5812 label
= gimple_label_label (stmt
);
5813 if (!DECL_NONLOCAL (label
))
5816 gsi_move_before (&i
, &s
);
5821 label
= create_artificial_label (UNKNOWN_LOCATION
);
5822 stmt
= gimple_build_label (label
);
5823 gsi_insert_before (&s
, stmt
, GSI_NEW_STMT
);
5828 /* Attempt to perform edge redirection by replacing a possibly complex
5829 jump instruction by a goto or by removing the jump completely.
5830 This can apply only if all edges now point to the same block. The
5831 parameters and return values are equivalent to
5832 redirect_edge_and_branch. */
5835 gimple_try_redirect_by_replacing_jump (edge e
, basic_block target
)
5837 basic_block src
= e
->src
;
5838 gimple_stmt_iterator i
;
5841 /* We can replace or remove a complex jump only when we have exactly
5843 if (EDGE_COUNT (src
->succs
) != 2
5844 /* Verify that all targets will be TARGET. Specifically, the
5845 edge that is not E must also go to TARGET. */
5846 || EDGE_SUCC (src
, EDGE_SUCC (src
, 0) == e
)->dest
!= target
)
5849 i
= gsi_last_bb (src
);
5853 stmt
= gsi_stmt (i
);
5855 if (gimple_code (stmt
) == GIMPLE_COND
|| gimple_code (stmt
) == GIMPLE_SWITCH
)
5857 gsi_remove (&i
, true);
5858 e
= ssa_redirect_edge (e
, target
);
5859 e
->flags
= EDGE_FALLTHRU
;
5867 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
5868 edge representing the redirected branch. */
5871 gimple_redirect_edge_and_branch (edge e
, basic_block dest
)
5873 basic_block bb
= e
->src
;
5874 gimple_stmt_iterator gsi
;
5878 if (e
->flags
& EDGE_ABNORMAL
)
5881 if (e
->dest
== dest
)
5884 if (e
->flags
& EDGE_EH
)
5885 return redirect_eh_edge (e
, dest
);
5887 if (e
->src
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
))
5889 ret
= gimple_try_redirect_by_replacing_jump (e
, dest
);
5894 gsi
= gsi_last_nondebug_bb (bb
);
5895 stmt
= gsi_end_p (gsi
) ? NULL
: gsi_stmt (gsi
);
5897 switch (stmt
? gimple_code (stmt
) : GIMPLE_ERROR_MARK
)
5900 /* For COND_EXPR, we only need to redirect the edge. */
5904 /* No non-abnormal edges should lead from a non-simple goto, and
5905 simple ones should be represented implicitly. */
5910 gswitch
*switch_stmt
= as_a
<gswitch
*> (stmt
);
5911 tree label
= gimple_block_label (dest
);
5912 tree cases
= get_cases_for_edge (e
, switch_stmt
);
5914 /* If we have a list of cases associated with E, then use it
5915 as it's a lot faster than walking the entire case vector. */
5918 edge e2
= find_edge (e
->src
, dest
);
5925 CASE_LABEL (cases
) = label
;
5926 cases
= CASE_CHAIN (cases
);
5929 /* If there was already an edge in the CFG, then we need
5930 to move all the cases associated with E to E2. */
5933 tree cases2
= get_cases_for_edge (e2
, switch_stmt
);
5935 CASE_CHAIN (last
) = CASE_CHAIN (cases2
);
5936 CASE_CHAIN (cases2
) = first
;
5938 bitmap_set_bit (touched_switch_bbs
, gimple_bb (stmt
)->index
);
5942 size_t i
, n
= gimple_switch_num_labels (switch_stmt
);
5944 for (i
= 0; i
< n
; i
++)
5946 tree elt
= gimple_switch_label (switch_stmt
, i
);
5947 if (label_to_block (cfun
, CASE_LABEL (elt
)) == e
->dest
)
5948 CASE_LABEL (elt
) = label
;
5956 gasm
*asm_stmt
= as_a
<gasm
*> (stmt
);
5957 int i
, n
= gimple_asm_nlabels (asm_stmt
);
5960 for (i
= 0; i
< n
; ++i
)
5962 tree cons
= gimple_asm_label_op (asm_stmt
, i
);
5963 if (label_to_block (cfun
, TREE_VALUE (cons
)) == e
->dest
)
5966 label
= gimple_block_label (dest
);
5967 TREE_VALUE (cons
) = label
;
5971 /* If we didn't find any label matching the former edge in the
5972 asm labels, we must be redirecting the fallthrough
5974 gcc_assert (label
|| (e
->flags
& EDGE_FALLTHRU
));
5979 gsi_remove (&gsi
, true);
5980 e
->flags
|= EDGE_FALLTHRU
;
5983 case GIMPLE_OMP_RETURN
:
5984 case GIMPLE_OMP_CONTINUE
:
5985 case GIMPLE_OMP_SECTIONS_SWITCH
:
5986 case GIMPLE_OMP_FOR
:
5987 /* The edges from OMP constructs can be simply redirected. */
5990 case GIMPLE_EH_DISPATCH
:
5991 if (!(e
->flags
& EDGE_FALLTHRU
))
5992 redirect_eh_dispatch_edge (as_a
<geh_dispatch
*> (stmt
), e
, dest
);
5995 case GIMPLE_TRANSACTION
:
5996 if (e
->flags
& EDGE_TM_ABORT
)
5997 gimple_transaction_set_label_over (as_a
<gtransaction
*> (stmt
),
5998 gimple_block_label (dest
));
5999 else if (e
->flags
& EDGE_TM_UNINSTRUMENTED
)
6000 gimple_transaction_set_label_uninst (as_a
<gtransaction
*> (stmt
),
6001 gimple_block_label (dest
));
6003 gimple_transaction_set_label_norm (as_a
<gtransaction
*> (stmt
),
6004 gimple_block_label (dest
));
6008 /* Otherwise it must be a fallthru edge, and we don't need to
6009 do anything besides redirecting it. */
6010 gcc_assert (e
->flags
& EDGE_FALLTHRU
);
6014 /* Update/insert PHI nodes as necessary. */
6016 /* Now update the edges in the CFG. */
6017 e
= ssa_redirect_edge (e
, dest
);
6022 /* Returns true if it is possible to remove edge E by redirecting
6023 it to the destination of the other edge from E->src. */
6026 gimple_can_remove_branch_p (const_edge e
)
6028 if (e
->flags
& (EDGE_ABNORMAL
| EDGE_EH
))
6034 /* Simple wrapper, as we can always redirect fallthru edges. */
6037 gimple_redirect_edge_and_branch_force (edge e
, basic_block dest
)
6039 e
= gimple_redirect_edge_and_branch (e
, dest
);
6046 /* Splits basic block BB after statement STMT (but at least after the
6047 labels). If STMT is NULL, BB is split just after the labels. */
6050 gimple_split_block (basic_block bb
, void *stmt
)
6052 gimple_stmt_iterator gsi
;
6053 gimple_stmt_iterator gsi_tgt
;
6059 new_bb
= create_empty_bb (bb
);
6061 /* Redirect the outgoing edges. */
6062 new_bb
->succs
= bb
->succs
;
6064 FOR_EACH_EDGE (e
, ei
, new_bb
->succs
)
6067 /* Get a stmt iterator pointing to the first stmt to move. */
6068 if (!stmt
|| gimple_code ((gimple
*) stmt
) == GIMPLE_LABEL
)
6069 gsi
= gsi_after_labels (bb
);
6072 gsi
= gsi_for_stmt ((gimple
*) stmt
);
6076 /* Move everything from GSI to the new basic block. */
6077 if (gsi_end_p (gsi
))
6080 /* Split the statement list - avoid re-creating new containers as this
6081 brings ugly quadratic memory consumption in the inliner.
6082 (We are still quadratic since we need to update stmt BB pointers,
6084 gsi_split_seq_before (&gsi
, &list
);
6085 set_bb_seq (new_bb
, list
);
6086 for (gsi_tgt
= gsi_start (list
);
6087 !gsi_end_p (gsi_tgt
); gsi_next (&gsi_tgt
))
6088 gimple_set_bb (gsi_stmt (gsi_tgt
), new_bb
);
6094 /* Moves basic block BB after block AFTER. */
6097 gimple_move_block_after (basic_block bb
, basic_block after
)
6099 if (bb
->prev_bb
== after
)
6103 link_block (bb
, after
);
6109 /* Return TRUE if block BB has no executable statements, otherwise return
6113 gimple_empty_block_p (basic_block bb
)
6115 /* BB must have no executable statements. */
6116 gimple_stmt_iterator gsi
= gsi_after_labels (bb
);
6119 while (!gsi_end_p (gsi
))
6121 gimple
*stmt
= gsi_stmt (gsi
);
6122 if (is_gimple_debug (stmt
))
6124 else if (gimple_code (stmt
) == GIMPLE_NOP
6125 || gimple_code (stmt
) == GIMPLE_PREDICT
)
6135 /* Split a basic block if it ends with a conditional branch and if the
6136 other part of the block is not empty. */
6139 gimple_split_block_before_cond_jump (basic_block bb
)
6141 gimple
*last
, *split_point
;
6142 gimple_stmt_iterator gsi
= gsi_last_nondebug_bb (bb
);
6143 if (gsi_end_p (gsi
))
6145 last
= gsi_stmt (gsi
);
6146 if (gimple_code (last
) != GIMPLE_COND
6147 && gimple_code (last
) != GIMPLE_SWITCH
)
6150 split_point
= gsi_stmt (gsi
);
6151 return split_block (bb
, split_point
)->dest
;
6155 /* Return true if basic_block can be duplicated. */
6158 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED
)
6163 /* Create a duplicate of the basic block BB. NOTE: This does not
6164 preserve SSA form. */
6167 gimple_duplicate_bb (basic_block bb
, copy_bb_data
*id
)
6170 gimple_stmt_iterator gsi_tgt
;
6172 new_bb
= create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun
)->prev_bb
);
6174 /* Copy the PHI nodes. We ignore PHI node arguments here because
6175 the incoming edges have not been setup yet. */
6176 for (gphi_iterator gpi
= gsi_start_phis (bb
);
6182 copy
= create_phi_node (NULL_TREE
, new_bb
);
6183 create_new_def_for (gimple_phi_result (phi
), copy
,
6184 gimple_phi_result_ptr (copy
));
6185 gimple_set_uid (copy
, gimple_uid (phi
));
6188 gsi_tgt
= gsi_start_bb (new_bb
);
6189 for (gimple_stmt_iterator gsi
= gsi_start_bb (bb
);
6193 def_operand_p def_p
;
6194 ssa_op_iter op_iter
;
6196 gimple
*stmt
, *copy
;
6198 stmt
= gsi_stmt (gsi
);
6199 if (gimple_code (stmt
) == GIMPLE_LABEL
)
6202 /* Don't duplicate label debug stmts. */
6203 if (gimple_debug_bind_p (stmt
)
6204 && TREE_CODE (gimple_debug_bind_get_var (stmt
))
6208 /* Create a new copy of STMT and duplicate STMT's virtual
6210 copy
= gimple_copy (stmt
);
6211 gsi_insert_after (&gsi_tgt
, copy
, GSI_NEW_STMT
);
6213 maybe_duplicate_eh_stmt (copy
, stmt
);
6214 gimple_duplicate_stmt_histograms (cfun
, copy
, cfun
, stmt
);
6216 /* When copying around a stmt writing into a local non-user
6217 aggregate, make sure it won't share stack slot with other
6219 lhs
= gimple_get_lhs (stmt
);
6220 if (lhs
&& TREE_CODE (lhs
) != SSA_NAME
)
6222 tree base
= get_base_address (lhs
);
6224 && (VAR_P (base
) || TREE_CODE (base
) == RESULT_DECL
)
6225 && DECL_IGNORED_P (base
)
6226 && !TREE_STATIC (base
)
6227 && !DECL_EXTERNAL (base
)
6228 && (!VAR_P (base
) || !DECL_HAS_VALUE_EXPR_P (base
)))
6229 DECL_NONSHAREABLE (base
) = 1;
6232 /* If requested remap dependence info of cliques brought in
6235 for (unsigned i
= 0; i
< gimple_num_ops (copy
); ++i
)
6237 tree op
= gimple_op (copy
, i
);
6240 if (TREE_CODE (op
) == ADDR_EXPR
6241 || TREE_CODE (op
) == WITH_SIZE_EXPR
)
6242 op
= TREE_OPERAND (op
, 0);
6243 while (handled_component_p (op
))
6244 op
= TREE_OPERAND (op
, 0);
6245 if ((TREE_CODE (op
) == MEM_REF
6246 || TREE_CODE (op
) == TARGET_MEM_REF
)
6247 && MR_DEPENDENCE_CLIQUE (op
) > 1
6248 && MR_DEPENDENCE_CLIQUE (op
) != bb
->loop_father
->owned_clique
)
6250 if (!id
->dependence_map
)
6251 id
->dependence_map
= new hash_map
<dependence_hash
,
6254 unsigned short &newc
= id
->dependence_map
->get_or_insert
6255 (MR_DEPENDENCE_CLIQUE (op
), &existed
);
6258 gcc_assert (MR_DEPENDENCE_CLIQUE (op
) <= cfun
->last_clique
);
6259 newc
= ++cfun
->last_clique
;
6261 MR_DEPENDENCE_CLIQUE (op
) = newc
;
6265 /* Create new names for all the definitions created by COPY and
6266 add replacement mappings for each new name. */
6267 FOR_EACH_SSA_DEF_OPERAND (def_p
, copy
, op_iter
, SSA_OP_ALL_DEFS
)
6268 create_new_def_for (DEF_FROM_PTR (def_p
), copy
, def_p
);
6274 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
6277 add_phi_args_after_copy_edge (edge e_copy
)
6279 basic_block bb
, bb_copy
= e_copy
->src
, dest
;
6282 gphi
*phi
, *phi_copy
;
6284 gphi_iterator psi
, psi_copy
;
6286 if (gimple_seq_empty_p (phi_nodes (e_copy
->dest
)))
6289 bb
= bb_copy
->flags
& BB_DUPLICATED
? get_bb_original (bb_copy
) : bb_copy
;
6291 if (e_copy
->dest
->flags
& BB_DUPLICATED
)
6292 dest
= get_bb_original (e_copy
->dest
);
6294 dest
= e_copy
->dest
;
6296 e
= find_edge (bb
, dest
);
6299 /* During loop unrolling the target of the latch edge is copied.
6300 In this case we are not looking for edge to dest, but to
6301 duplicated block whose original was dest. */
6302 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
6304 if ((e
->dest
->flags
& BB_DUPLICATED
)
6305 && get_bb_original (e
->dest
) == dest
)
6309 gcc_assert (e
!= NULL
);
6312 for (psi
= gsi_start_phis (e
->dest
),
6313 psi_copy
= gsi_start_phis (e_copy
->dest
);
6315 gsi_next (&psi
), gsi_next (&psi_copy
))
6318 phi_copy
= psi_copy
.phi ();
6319 def
= PHI_ARG_DEF_FROM_EDGE (phi
, e
);
6320 add_phi_arg (phi_copy
, def
, e_copy
,
6321 gimple_phi_arg_location_from_edge (phi
, e
));
6326 /* Basic block BB_COPY was created by code duplication. Add phi node
6327 arguments for edges going out of BB_COPY. The blocks that were
6328 duplicated have BB_DUPLICATED set. */
6331 add_phi_args_after_copy_bb (basic_block bb_copy
)
6336 FOR_EACH_EDGE (e_copy
, ei
, bb_copy
->succs
)
6338 add_phi_args_after_copy_edge (e_copy
);
6342 /* Blocks in REGION_COPY array of length N_REGION were created by
6343 duplication of basic blocks. Add phi node arguments for edges
6344 going from these blocks. If E_COPY is not NULL, also add
6345 phi node arguments for its destination.*/
6348 add_phi_args_after_copy (basic_block
*region_copy
, unsigned n_region
,
6353 for (i
= 0; i
< n_region
; i
++)
6354 region_copy
[i
]->flags
|= BB_DUPLICATED
;
6356 for (i
= 0; i
< n_region
; i
++)
6357 add_phi_args_after_copy_bb (region_copy
[i
]);
6359 add_phi_args_after_copy_edge (e_copy
);
6361 for (i
= 0; i
< n_region
; i
++)
6362 region_copy
[i
]->flags
&= ~BB_DUPLICATED
;
6365 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
6366 important exit edge EXIT. By important we mean that no SSA name defined
6367 inside region is live over the other exit edges of the region. All entry
6368 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
6369 to the duplicate of the region. Dominance and loop information is
6370 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
6371 UPDATE_DOMINANCE is false then we assume that the caller will update the
6372 dominance information after calling this function. The new basic
6373 blocks are stored to REGION_COPY in the same order as they had in REGION,
6374 provided that REGION_COPY is not NULL.
6375 The function returns false if it is unable to copy the region,
6379 gimple_duplicate_sese_region (edge entry
, edge exit
,
6380 basic_block
*region
, unsigned n_region
,
6381 basic_block
*region_copy
,
6382 bool update_dominance
)
6385 bool free_region_copy
= false, copying_header
= false;
6386 struct loop
*loop
= entry
->dest
->loop_father
;
6388 vec
<basic_block
> doms
= vNULL
;
6390 profile_count total_count
= profile_count::uninitialized ();
6391 profile_count entry_count
= profile_count::uninitialized ();
6393 if (!can_copy_bbs_p (region
, n_region
))
6396 /* Some sanity checking. Note that we do not check for all possible
6397 missuses of the functions. I.e. if you ask to copy something weird,
6398 it will work, but the state of structures probably will not be
6400 for (i
= 0; i
< n_region
; i
++)
6402 /* We do not handle subloops, i.e. all the blocks must belong to the
6404 if (region
[i
]->loop_father
!= loop
)
6407 if (region
[i
] != entry
->dest
6408 && region
[i
] == loop
->header
)
6412 /* In case the function is used for loop header copying (which is the primary
6413 use), ensure that EXIT and its copy will be new latch and entry edges. */
6414 if (loop
->header
== entry
->dest
)
6416 copying_header
= true;
6418 if (!dominated_by_p (CDI_DOMINATORS
, loop
->latch
, exit
->src
))
6421 for (i
= 0; i
< n_region
; i
++)
6422 if (region
[i
] != exit
->src
6423 && dominated_by_p (CDI_DOMINATORS
, region
[i
], exit
->src
))
6427 initialize_original_copy_tables ();
6430 set_loop_copy (loop
, loop_outer (loop
));
6432 set_loop_copy (loop
, loop
);
6436 region_copy
= XNEWVEC (basic_block
, n_region
);
6437 free_region_copy
= true;
6440 /* Record blocks outside the region that are dominated by something
6442 if (update_dominance
)
6445 doms
= get_dominated_by_region (CDI_DOMINATORS
, region
, n_region
);
6448 if (entry
->dest
->count
.initialized_p ())
6450 total_count
= entry
->dest
->count
;
6451 entry_count
= entry
->count ();
6452 /* Fix up corner cases, to avoid division by zero or creation of negative
6454 if (entry_count
> total_count
)
6455 entry_count
= total_count
;
6458 copy_bbs (region
, n_region
, region_copy
, &exit
, 1, &exit_copy
, loop
,
6459 split_edge_bb_loc (entry
), update_dominance
);
6460 if (total_count
.initialized_p () && entry_count
.initialized_p ())
6462 scale_bbs_frequencies_profile_count (region
, n_region
,
6463 total_count
- entry_count
,
6465 scale_bbs_frequencies_profile_count (region_copy
, n_region
, entry_count
,
6471 loop
->header
= exit
->dest
;
6472 loop
->latch
= exit
->src
;
6475 /* Redirect the entry and add the phi node arguments. */
6476 redirected
= redirect_edge_and_branch (entry
, get_bb_copy (entry
->dest
));
6477 gcc_assert (redirected
!= NULL
);
6478 flush_pending_stmts (entry
);
6480 /* Concerning updating of dominators: We must recount dominators
6481 for entry block and its copy. Anything that is outside of the
6482 region, but was dominated by something inside needs recounting as
6484 if (update_dominance
)
6486 set_immediate_dominator (CDI_DOMINATORS
, entry
->dest
, entry
->src
);
6487 doms
.safe_push (get_bb_original (entry
->dest
));
6488 iterate_fix_dominators (CDI_DOMINATORS
, doms
, false);
6492 /* Add the other PHI node arguments. */
6493 add_phi_args_after_copy (region_copy
, n_region
, NULL
);
6495 if (free_region_copy
)
6498 free_original_copy_tables ();
6502 /* Checks if BB is part of the region defined by N_REGION BBS. */
6504 bb_part_of_region_p (basic_block bb
, basic_block
* bbs
, unsigned n_region
)
6508 for (n
= 0; n
< n_region
; n
++)
6516 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
6517 are stored to REGION_COPY in the same order in that they appear
6518 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
6519 the region, EXIT an exit from it. The condition guarding EXIT
6520 is moved to ENTRY. Returns true if duplication succeeds, false
6546 gimple_duplicate_sese_tail (edge entry
, edge exit
,
6547 basic_block
*region
, unsigned n_region
,
6548 basic_block
*region_copy
)
6551 bool free_region_copy
= false;
6552 struct loop
*loop
= exit
->dest
->loop_father
;
6553 struct loop
*orig_loop
= entry
->dest
->loop_father
;
6554 basic_block switch_bb
, entry_bb
, nentry_bb
;
6555 vec
<basic_block
> doms
;
6556 profile_count total_count
= profile_count::uninitialized (),
6557 exit_count
= profile_count::uninitialized ();
6558 edge exits
[2], nexits
[2], e
;
6559 gimple_stmt_iterator gsi
;
6562 basic_block exit_bb
;
6566 struct loop
*target
, *aloop
, *cloop
;
6568 gcc_assert (EDGE_COUNT (exit
->src
->succs
) == 2);
6570 exits
[1] = EDGE_SUCC (exit
->src
, EDGE_SUCC (exit
->src
, 0) == exit
);
6572 if (!can_copy_bbs_p (region
, n_region
))
6575 initialize_original_copy_tables ();
6576 set_loop_copy (orig_loop
, loop
);
6579 for (aloop
= orig_loop
->inner
; aloop
; aloop
= aloop
->next
)
6581 if (bb_part_of_region_p (aloop
->header
, region
, n_region
))
6583 cloop
= duplicate_loop (aloop
, target
);
6584 duplicate_subloops (aloop
, cloop
);
6590 region_copy
= XNEWVEC (basic_block
, n_region
);
6591 free_region_copy
= true;
6594 gcc_assert (!need_ssa_update_p (cfun
));
6596 /* Record blocks outside the region that are dominated by something
6598 doms
= get_dominated_by_region (CDI_DOMINATORS
, region
, n_region
);
6600 total_count
= exit
->src
->count
;
6601 exit_count
= exit
->count ();
6602 /* Fix up corner cases, to avoid division by zero or creation of negative
6604 if (exit_count
> total_count
)
6605 exit_count
= total_count
;
6607 copy_bbs (region
, n_region
, region_copy
, exits
, 2, nexits
, orig_loop
,
6608 split_edge_bb_loc (exit
), true);
6609 if (total_count
.initialized_p () && exit_count
.initialized_p ())
6611 scale_bbs_frequencies_profile_count (region
, n_region
,
6612 total_count
- exit_count
,
6614 scale_bbs_frequencies_profile_count (region_copy
, n_region
, exit_count
,
6618 /* Create the switch block, and put the exit condition to it. */
6619 entry_bb
= entry
->dest
;
6620 nentry_bb
= get_bb_copy (entry_bb
);
6621 if (!last_stmt (entry
->src
)
6622 || !stmt_ends_bb_p (last_stmt (entry
->src
)))
6623 switch_bb
= entry
->src
;
6625 switch_bb
= split_edge (entry
);
6626 set_immediate_dominator (CDI_DOMINATORS
, nentry_bb
, switch_bb
);
6628 gsi
= gsi_last_bb (switch_bb
);
6629 cond_stmt
= last_stmt (exit
->src
);
6630 gcc_assert (gimple_code (cond_stmt
) == GIMPLE_COND
);
6631 cond_stmt
= gimple_copy (cond_stmt
);
6633 gsi_insert_after (&gsi
, cond_stmt
, GSI_NEW_STMT
);
6635 sorig
= single_succ_edge (switch_bb
);
6636 sorig
->flags
= exits
[1]->flags
;
6637 sorig
->probability
= exits
[1]->probability
;
6638 snew
= make_edge (switch_bb
, nentry_bb
, exits
[0]->flags
);
6639 snew
->probability
= exits
[0]->probability
;
6642 /* Register the new edge from SWITCH_BB in loop exit lists. */
6643 rescan_loop_exit (snew
, true, false);
6645 /* Add the PHI node arguments. */
6646 add_phi_args_after_copy (region_copy
, n_region
, snew
);
6648 /* Get rid of now superfluous conditions and associated edges (and phi node
6650 exit_bb
= exit
->dest
;
6652 e
= redirect_edge_and_branch (exits
[0], exits
[1]->dest
);
6653 PENDING_STMT (e
) = NULL
;
6655 /* The latch of ORIG_LOOP was copied, and so was the backedge
6656 to the original header. We redirect this backedge to EXIT_BB. */
6657 for (i
= 0; i
< n_region
; i
++)
6658 if (get_bb_original (region_copy
[i
]) == orig_loop
->latch
)
6660 gcc_assert (single_succ_edge (region_copy
[i
]));
6661 e
= redirect_edge_and_branch (single_succ_edge (region_copy
[i
]), exit_bb
);
6662 PENDING_STMT (e
) = NULL
;
6663 for (psi
= gsi_start_phis (exit_bb
);
6668 def
= PHI_ARG_DEF (phi
, nexits
[0]->dest_idx
);
6669 add_phi_arg (phi
, def
, e
, gimple_phi_arg_location_from_edge (phi
, e
));
6672 e
= redirect_edge_and_branch (nexits
[1], nexits
[0]->dest
);
6673 PENDING_STMT (e
) = NULL
;
6675 /* Anything that is outside of the region, but was dominated by something
6676 inside needs to update dominance info. */
6677 iterate_fix_dominators (CDI_DOMINATORS
, doms
, false);
6679 /* Update the SSA web. */
6680 update_ssa (TODO_update_ssa
);
6682 if (free_region_copy
)
6685 free_original_copy_tables ();
6689 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
6690 adding blocks when the dominator traversal reaches EXIT. This
6691 function silently assumes that ENTRY strictly dominates EXIT. */
6694 gather_blocks_in_sese_region (basic_block entry
, basic_block exit
,
6695 vec
<basic_block
> *bbs_p
)
6699 for (son
= first_dom_son (CDI_DOMINATORS
, entry
);
6701 son
= next_dom_son (CDI_DOMINATORS
, son
))
6703 bbs_p
->safe_push (son
);
6705 gather_blocks_in_sese_region (son
, exit
, bbs_p
);
6709 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
6710 The duplicates are recorded in VARS_MAP. */
6713 replace_by_duplicate_decl (tree
*tp
, hash_map
<tree
, tree
> *vars_map
,
6716 tree t
= *tp
, new_t
;
6717 struct function
*f
= DECL_STRUCT_FUNCTION (to_context
);
6719 if (DECL_CONTEXT (t
) == to_context
)
6723 tree
&loc
= vars_map
->get_or_insert (t
, &existed
);
6729 new_t
= copy_var_decl (t
, DECL_NAME (t
), TREE_TYPE (t
));
6730 add_local_decl (f
, new_t
);
6734 gcc_assert (TREE_CODE (t
) == CONST_DECL
);
6735 new_t
= copy_node (t
);
6737 DECL_CONTEXT (new_t
) = to_context
;
6748 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6749 VARS_MAP maps old ssa names and var_decls to the new ones. */
6752 replace_ssa_name (tree name
, hash_map
<tree
, tree
> *vars_map
,
6757 gcc_assert (!virtual_operand_p (name
));
6759 tree
*loc
= vars_map
->get (name
);
6763 tree decl
= SSA_NAME_VAR (name
);
6766 gcc_assert (!SSA_NAME_IS_DEFAULT_DEF (name
));
6767 replace_by_duplicate_decl (&decl
, vars_map
, to_context
);
6768 new_name
= make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context
),
6769 decl
, SSA_NAME_DEF_STMT (name
));
6772 new_name
= copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context
),
6773 name
, SSA_NAME_DEF_STMT (name
));
6775 /* Now that we've used the def stmt to define new_name, make sure it
6776 doesn't define name anymore. */
6777 SSA_NAME_DEF_STMT (name
) = NULL
;
6779 vars_map
->put (name
, new_name
);
6793 hash_map
<tree
, tree
> *vars_map
;
6794 htab_t new_label_map
;
6795 hash_map
<void *, void *> *eh_map
;
6799 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
6800 contained in *TP if it has been ORIG_BLOCK previously and change the
6801 DECL_CONTEXT of every local variable referenced in *TP. */
6804 move_stmt_op (tree
*tp
, int *walk_subtrees
, void *data
)
6806 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
6807 struct move_stmt_d
*p
= (struct move_stmt_d
*) wi
->info
;
6812 tree block
= TREE_BLOCK (t
);
6813 if (block
== NULL_TREE
)
6815 else if (block
== p
->orig_block
6816 || p
->orig_block
== NULL_TREE
)
6818 /* tree_node_can_be_shared says we can share invariant
6819 addresses but unshare_expr copies them anyways. Make sure
6820 to unshare before adjusting the block in place - we do not
6821 always see a copy here. */
6822 if (TREE_CODE (t
) == ADDR_EXPR
6823 && is_gimple_min_invariant (t
))
6824 *tp
= t
= unshare_expr (t
);
6825 TREE_SET_BLOCK (t
, p
->new_block
);
6827 else if (flag_checking
)
6829 while (block
&& TREE_CODE (block
) == BLOCK
&& block
!= p
->orig_block
)
6830 block
= BLOCK_SUPERCONTEXT (block
);
6831 gcc_assert (block
== p
->orig_block
);
6834 else if (DECL_P (t
) || TREE_CODE (t
) == SSA_NAME
)
6836 if (TREE_CODE (t
) == SSA_NAME
)
6837 *tp
= replace_ssa_name (t
, p
->vars_map
, p
->to_context
);
6838 else if (TREE_CODE (t
) == PARM_DECL
6839 && gimple_in_ssa_p (cfun
))
6840 *tp
= *(p
->vars_map
->get (t
));
6841 else if (TREE_CODE (t
) == LABEL_DECL
)
6843 if (p
->new_label_map
)
6845 struct tree_map in
, *out
;
6847 out
= (struct tree_map
*)
6848 htab_find_with_hash (p
->new_label_map
, &in
, DECL_UID (t
));
6853 /* For FORCED_LABELs we can end up with references from other
6854 functions if some SESE regions are outlined. It is UB to
6855 jump in between them, but they could be used just for printing
6856 addresses etc. In that case, DECL_CONTEXT on the label should
6857 be the function containing the glabel stmt with that LABEL_DECL,
6858 rather than whatever function a reference to the label was seen
6860 if (!FORCED_LABEL (t
) && !DECL_NONLOCAL (t
))
6861 DECL_CONTEXT (t
) = p
->to_context
;
6863 else if (p
->remap_decls_p
)
6865 /* Replace T with its duplicate. T should no longer appear in the
6866 parent function, so this looks wasteful; however, it may appear
6867 in referenced_vars, and more importantly, as virtual operands of
6868 statements, and in alias lists of other variables. It would be
6869 quite difficult to expunge it from all those places. ??? It might
6870 suffice to do this for addressable variables. */
6871 if ((VAR_P (t
) && !is_global_var (t
))
6872 || TREE_CODE (t
) == CONST_DECL
)
6873 replace_by_duplicate_decl (tp
, p
->vars_map
, p
->to_context
);
6877 else if (TYPE_P (t
))
6883 /* Helper for move_stmt_r. Given an EH region number for the source
6884 function, map that to the duplicate EH regio number in the dest. */
6887 move_stmt_eh_region_nr (int old_nr
, struct move_stmt_d
*p
)
6889 eh_region old_r
, new_r
;
6891 old_r
= get_eh_region_from_number (old_nr
);
6892 new_r
= static_cast<eh_region
> (*p
->eh_map
->get (old_r
));
6894 return new_r
->index
;
6897 /* Similar, but operate on INTEGER_CSTs. */
6900 move_stmt_eh_region_tree_nr (tree old_t_nr
, struct move_stmt_d
*p
)
6904 old_nr
= tree_to_shwi (old_t_nr
);
6905 new_nr
= move_stmt_eh_region_nr (old_nr
, p
);
6907 return build_int_cst (integer_type_node
, new_nr
);
6910 /* Like move_stmt_op, but for gimple statements.
6912 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
6913 contained in the current statement in *GSI_P and change the
6914 DECL_CONTEXT of every local variable referenced in the current
6918 move_stmt_r (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
6919 struct walk_stmt_info
*wi
)
6921 struct move_stmt_d
*p
= (struct move_stmt_d
*) wi
->info
;
6922 gimple
*stmt
= gsi_stmt (*gsi_p
);
6923 tree block
= gimple_block (stmt
);
6925 if (block
== p
->orig_block
6926 || (p
->orig_block
== NULL_TREE
6927 && block
!= NULL_TREE
))
6928 gimple_set_block (stmt
, p
->new_block
);
6930 switch (gimple_code (stmt
))
6933 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
6935 tree r
, fndecl
= gimple_call_fndecl (stmt
);
6936 if (fndecl
&& fndecl_built_in_p (fndecl
, BUILT_IN_NORMAL
))
6937 switch (DECL_FUNCTION_CODE (fndecl
))
6939 case BUILT_IN_EH_COPY_VALUES
:
6940 r
= gimple_call_arg (stmt
, 1);
6941 r
= move_stmt_eh_region_tree_nr (r
, p
);
6942 gimple_call_set_arg (stmt
, 1, r
);
6945 case BUILT_IN_EH_POINTER
:
6946 case BUILT_IN_EH_FILTER
:
6947 r
= gimple_call_arg (stmt
, 0);
6948 r
= move_stmt_eh_region_tree_nr (r
, p
);
6949 gimple_call_set_arg (stmt
, 0, r
);
6960 gresx
*resx_stmt
= as_a
<gresx
*> (stmt
);
6961 int r
= gimple_resx_region (resx_stmt
);
6962 r
= move_stmt_eh_region_nr (r
, p
);
6963 gimple_resx_set_region (resx_stmt
, r
);
6967 case GIMPLE_EH_DISPATCH
:
6969 geh_dispatch
*eh_dispatch_stmt
= as_a
<geh_dispatch
*> (stmt
);
6970 int r
= gimple_eh_dispatch_region (eh_dispatch_stmt
);
6971 r
= move_stmt_eh_region_nr (r
, p
);
6972 gimple_eh_dispatch_set_region (eh_dispatch_stmt
, r
);
6976 case GIMPLE_OMP_RETURN
:
6977 case GIMPLE_OMP_CONTINUE
:
6982 /* For FORCED_LABEL, move_stmt_op doesn't adjust DECL_CONTEXT,
6983 so that such labels can be referenced from other regions.
6984 Make sure to update it when seeing a GIMPLE_LABEL though,
6985 that is the owner of the label. */
6986 walk_gimple_op (stmt
, move_stmt_op
, wi
);
6987 *handled_ops_p
= true;
6988 tree label
= gimple_label_label (as_a
<glabel
*> (stmt
));
6989 if (FORCED_LABEL (label
) || DECL_NONLOCAL (label
))
6990 DECL_CONTEXT (label
) = p
->to_context
;
6995 if (is_gimple_omp (stmt
))
6997 /* Do not remap variables inside OMP directives. Variables
6998 referenced in clauses and directive header belong to the
6999 parent function and should not be moved into the child
7001 bool save_remap_decls_p
= p
->remap_decls_p
;
7002 p
->remap_decls_p
= false;
7003 *handled_ops_p
= true;
7005 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt
), move_stmt_r
,
7008 p
->remap_decls_p
= save_remap_decls_p
;
7016 /* Move basic block BB from function CFUN to function DEST_FN. The
7017 block is moved out of the original linked list and placed after
7018 block AFTER in the new list. Also, the block is removed from the
7019 original array of blocks and placed in DEST_FN's array of blocks.
7020 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
7021 updated to reflect the moved edges.
7023 The local variables are remapped to new instances, VARS_MAP is used
7024 to record the mapping. */
7027 move_block_to_fn (struct function
*dest_cfun
, basic_block bb
,
7028 basic_block after
, bool update_edge_count_p
,
7029 struct move_stmt_d
*d
)
7031 struct control_flow_graph
*cfg
;
7034 gimple_stmt_iterator si
;
7035 unsigned old_len
, new_len
;
7037 /* Remove BB from dominance structures. */
7038 delete_from_dominance_info (CDI_DOMINATORS
, bb
);
7040 /* Move BB from its current loop to the copy in the new function. */
7043 struct loop
*new_loop
= (struct loop
*)bb
->loop_father
->aux
;
7045 bb
->loop_father
= new_loop
;
7048 /* Link BB to the new linked list. */
7049 move_block_after (bb
, after
);
7051 /* Update the edge count in the corresponding flowgraphs. */
7052 if (update_edge_count_p
)
7053 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
7055 cfun
->cfg
->x_n_edges
--;
7056 dest_cfun
->cfg
->x_n_edges
++;
7059 /* Remove BB from the original basic block array. */
7060 (*cfun
->cfg
->x_basic_block_info
)[bb
->index
] = NULL
;
7061 cfun
->cfg
->x_n_basic_blocks
--;
7063 /* Grow DEST_CFUN's basic block array if needed. */
7064 cfg
= dest_cfun
->cfg
;
7065 cfg
->x_n_basic_blocks
++;
7066 if (bb
->index
>= cfg
->x_last_basic_block
)
7067 cfg
->x_last_basic_block
= bb
->index
+ 1;
7069 old_len
= vec_safe_length (cfg
->x_basic_block_info
);
7070 if ((unsigned) cfg
->x_last_basic_block
>= old_len
)
7072 new_len
= cfg
->x_last_basic_block
+ (cfg
->x_last_basic_block
+ 3) / 4;
7073 vec_safe_grow_cleared (cfg
->x_basic_block_info
, new_len
);
7076 (*cfg
->x_basic_block_info
)[bb
->index
] = bb
;
7078 /* Remap the variables in phi nodes. */
7079 for (gphi_iterator psi
= gsi_start_phis (bb
);
7082 gphi
*phi
= psi
.phi ();
7084 tree op
= PHI_RESULT (phi
);
7088 if (virtual_operand_p (op
))
7090 /* Remove the phi nodes for virtual operands (alias analysis will be
7091 run for the new function, anyway). */
7092 remove_phi_node (&psi
, true);
7096 SET_PHI_RESULT (phi
,
7097 replace_ssa_name (op
, d
->vars_map
, dest_cfun
->decl
));
7098 FOR_EACH_PHI_ARG (use
, phi
, oi
, SSA_OP_USE
)
7100 op
= USE_FROM_PTR (use
);
7101 if (TREE_CODE (op
) == SSA_NAME
)
7102 SET_USE (use
, replace_ssa_name (op
, d
->vars_map
, dest_cfun
->decl
));
7105 for (i
= 0; i
< EDGE_COUNT (bb
->preds
); i
++)
7107 location_t locus
= gimple_phi_arg_location (phi
, i
);
7108 tree block
= LOCATION_BLOCK (locus
);
7110 if (locus
== UNKNOWN_LOCATION
)
7112 if (d
->orig_block
== NULL_TREE
|| block
== d
->orig_block
)
7114 locus
= set_block (locus
, d
->new_block
);
7115 gimple_phi_arg_set_location (phi
, i
, locus
);
7122 for (si
= gsi_start_bb (bb
); !gsi_end_p (si
); gsi_next (&si
))
7124 gimple
*stmt
= gsi_stmt (si
);
7125 struct walk_stmt_info wi
;
7127 memset (&wi
, 0, sizeof (wi
));
7129 walk_gimple_stmt (&si
, move_stmt_r
, move_stmt_op
, &wi
);
7131 if (glabel
*label_stmt
= dyn_cast
<glabel
*> (stmt
))
7133 tree label
= gimple_label_label (label_stmt
);
7134 int uid
= LABEL_DECL_UID (label
);
7136 gcc_assert (uid
> -1);
7138 old_len
= vec_safe_length (cfg
->x_label_to_block_map
);
7139 if (old_len
<= (unsigned) uid
)
7141 new_len
= 3 * uid
/ 2 + 1;
7142 vec_safe_grow_cleared (cfg
->x_label_to_block_map
, new_len
);
7145 (*cfg
->x_label_to_block_map
)[uid
] = bb
;
7146 (*cfun
->cfg
->x_label_to_block_map
)[uid
] = NULL
;
7148 gcc_assert (DECL_CONTEXT (label
) == dest_cfun
->decl
);
7150 if (uid
>= dest_cfun
->cfg
->last_label_uid
)
7151 dest_cfun
->cfg
->last_label_uid
= uid
+ 1;
7154 maybe_duplicate_eh_stmt_fn (dest_cfun
, stmt
, cfun
, stmt
, d
->eh_map
, 0);
7155 remove_stmt_from_eh_lp_fn (cfun
, stmt
);
7157 gimple_duplicate_stmt_histograms (dest_cfun
, stmt
, cfun
, stmt
);
7158 gimple_remove_stmt_histograms (cfun
, stmt
);
7160 /* We cannot leave any operands allocated from the operand caches of
7161 the current function. */
7162 free_stmt_operands (cfun
, stmt
);
7163 push_cfun (dest_cfun
);
7168 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
7169 if (e
->goto_locus
!= UNKNOWN_LOCATION
)
7171 tree block
= LOCATION_BLOCK (e
->goto_locus
);
7172 if (d
->orig_block
== NULL_TREE
7173 || block
== d
->orig_block
)
7174 e
->goto_locus
= set_block (e
->goto_locus
, d
->new_block
);
7178 /* Examine the statements in BB (which is in SRC_CFUN); find and return
7179 the outermost EH region. Use REGION as the incoming base EH region.
7180 If there is no single outermost region, return NULL and set *ALL to
7184 find_outermost_region_in_block (struct function
*src_cfun
,
7185 basic_block bb
, eh_region region
,
7188 gimple_stmt_iterator si
;
7190 for (si
= gsi_start_bb (bb
); !gsi_end_p (si
); gsi_next (&si
))
7192 gimple
*stmt
= gsi_stmt (si
);
7193 eh_region stmt_region
;
7196 lp_nr
= lookup_stmt_eh_lp_fn (src_cfun
, stmt
);
7197 stmt_region
= get_eh_region_from_lp_number_fn (src_cfun
, lp_nr
);
7201 region
= stmt_region
;
7202 else if (stmt_region
!= region
)
7204 region
= eh_region_outermost (src_cfun
, stmt_region
, region
);
7218 new_label_mapper (tree decl
, void *data
)
7220 htab_t hash
= (htab_t
) data
;
7224 gcc_assert (TREE_CODE (decl
) == LABEL_DECL
);
7226 m
= XNEW (struct tree_map
);
7227 m
->hash
= DECL_UID (decl
);
7228 m
->base
.from
= decl
;
7229 m
->to
= create_artificial_label (UNKNOWN_LOCATION
);
7230 LABEL_DECL_UID (m
->to
) = LABEL_DECL_UID (decl
);
7231 if (LABEL_DECL_UID (m
->to
) >= cfun
->cfg
->last_label_uid
)
7232 cfun
->cfg
->last_label_uid
= LABEL_DECL_UID (m
->to
) + 1;
7234 slot
= htab_find_slot_with_hash (hash
, m
, m
->hash
, INSERT
);
7235 gcc_assert (*slot
== NULL
);
7242 /* Tree walker to replace the decls used inside value expressions by
7246 replace_block_vars_by_duplicates_1 (tree
*tp
, int *walk_subtrees
, void *data
)
7248 struct replace_decls_d
*rd
= (struct replace_decls_d
*)data
;
7250 switch (TREE_CODE (*tp
))
7255 replace_by_duplicate_decl (tp
, rd
->vars_map
, rd
->to_context
);
7261 if (IS_TYPE_OR_DECL_P (*tp
))
7262 *walk_subtrees
= false;
7267 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
7271 replace_block_vars_by_duplicates (tree block
, hash_map
<tree
, tree
> *vars_map
,
7276 for (tp
= &BLOCK_VARS (block
); *tp
; tp
= &DECL_CHAIN (*tp
))
7279 if (!VAR_P (t
) && TREE_CODE (t
) != CONST_DECL
)
7281 replace_by_duplicate_decl (&t
, vars_map
, to_context
);
7284 if (VAR_P (*tp
) && DECL_HAS_VALUE_EXPR_P (*tp
))
7286 tree x
= DECL_VALUE_EXPR (*tp
);
7287 struct replace_decls_d rd
= { vars_map
, to_context
};
7289 walk_tree (&x
, replace_block_vars_by_duplicates_1
, &rd
, NULL
);
7290 SET_DECL_VALUE_EXPR (t
, x
);
7291 DECL_HAS_VALUE_EXPR_P (t
) = 1;
7293 DECL_CHAIN (t
) = DECL_CHAIN (*tp
);
7298 for (block
= BLOCK_SUBBLOCKS (block
); block
; block
= BLOCK_CHAIN (block
))
7299 replace_block_vars_by_duplicates (block
, vars_map
, to_context
);
7302 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
7306 fixup_loop_arrays_after_move (struct function
*fn1
, struct function
*fn2
,
7309 /* Discard it from the old loop array. */
7310 (*get_loops (fn1
))[loop
->num
] = NULL
;
7312 /* Place it in the new loop array, assigning it a new number. */
7313 loop
->num
= number_of_loops (fn2
);
7314 vec_safe_push (loops_for_fn (fn2
)->larray
, loop
);
7316 /* Recurse to children. */
7317 for (loop
= loop
->inner
; loop
; loop
= loop
->next
)
7318 fixup_loop_arrays_after_move (fn1
, fn2
, loop
);
7321 /* Verify that the blocks in BBS_P are a single-entry, single-exit region
7322 delimited by ENTRY_BB and EXIT_BB, possibly containing noreturn blocks. */
7325 verify_sese (basic_block entry
, basic_block exit
, vec
<basic_block
> *bbs_p
)
7330 bitmap bbs
= BITMAP_ALLOC (NULL
);
7333 gcc_assert (entry
!= NULL
);
7334 gcc_assert (entry
!= exit
);
7335 gcc_assert (bbs_p
!= NULL
);
7337 gcc_assert (bbs_p
->length () > 0);
7339 FOR_EACH_VEC_ELT (*bbs_p
, i
, bb
)
7340 bitmap_set_bit (bbs
, bb
->index
);
7342 gcc_assert (bitmap_bit_p (bbs
, entry
->index
));
7343 gcc_assert (exit
== NULL
|| bitmap_bit_p (bbs
, exit
->index
));
7345 FOR_EACH_VEC_ELT (*bbs_p
, i
, bb
)
7349 gcc_assert (single_pred_p (entry
));
7350 gcc_assert (!bitmap_bit_p (bbs
, single_pred (entry
)->index
));
7353 for (ei
= ei_start (bb
->preds
); !ei_end_p (ei
); ei_next (&ei
))
7356 gcc_assert (bitmap_bit_p (bbs
, e
->src
->index
));
7361 gcc_assert (single_succ_p (exit
));
7362 gcc_assert (!bitmap_bit_p (bbs
, single_succ (exit
)->index
));
7365 for (ei
= ei_start (bb
->succs
); !ei_end_p (ei
); ei_next (&ei
))
7368 gcc_assert (bitmap_bit_p (bbs
, e
->dest
->index
));
7375 /* If FROM is an SSA_NAME, mark the version in bitmap DATA. */
7378 gather_ssa_name_hash_map_from (tree
const &from
, tree
const &, void *data
)
7380 bitmap release_names
= (bitmap
)data
;
7382 if (TREE_CODE (from
) != SSA_NAME
)
7385 bitmap_set_bit (release_names
, SSA_NAME_VERSION (from
));
7389 /* Return LOOP_DIST_ALIAS call if present in BB. */
7392 find_loop_dist_alias (basic_block bb
)
7394 gimple
*g
= last_stmt (bb
);
7395 if (g
== NULL
|| gimple_code (g
) != GIMPLE_COND
)
7398 gimple_stmt_iterator gsi
= gsi_for_stmt (g
);
7400 if (gsi_end_p (gsi
))
7404 if (gimple_call_internal_p (g
, IFN_LOOP_DIST_ALIAS
))
7409 /* Fold loop internal call G like IFN_LOOP_VECTORIZED/IFN_LOOP_DIST_ALIAS
7410 to VALUE and update any immediate uses of it's LHS. */
7413 fold_loop_internal_call (gimple
*g
, tree value
)
7415 tree lhs
= gimple_call_lhs (g
);
7416 use_operand_p use_p
;
7417 imm_use_iterator iter
;
7419 gimple_stmt_iterator gsi
= gsi_for_stmt (g
);
7421 update_call_from_tree (&gsi
, value
);
7422 FOR_EACH_IMM_USE_STMT (use_stmt
, iter
, lhs
)
7424 FOR_EACH_IMM_USE_ON_STMT (use_p
, iter
)
7425 SET_USE (use_p
, value
);
7426 update_stmt (use_stmt
);
7430 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
7431 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
7432 single basic block in the original CFG and the new basic block is
7433 returned. DEST_CFUN must not have a CFG yet.
7435 Note that the region need not be a pure SESE region. Blocks inside
7436 the region may contain calls to abort/exit. The only restriction
7437 is that ENTRY_BB should be the only entry point and it must
7440 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
7441 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
7442 to the new function.
7444 All local variables referenced in the region are assumed to be in
7445 the corresponding BLOCK_VARS and unexpanded variable lists
7446 associated with DEST_CFUN.
7448 TODO: investigate whether we can reuse gimple_duplicate_sese_region to
7449 reimplement move_sese_region_to_fn by duplicating the region rather than
7453 move_sese_region_to_fn (struct function
*dest_cfun
, basic_block entry_bb
,
7454 basic_block exit_bb
, tree orig_block
)
7456 vec
<basic_block
> bbs
, dom_bbs
;
7457 basic_block dom_entry
= get_immediate_dominator (CDI_DOMINATORS
, entry_bb
);
7458 basic_block after
, bb
, *entry_pred
, *exit_succ
, abb
;
7459 struct function
*saved_cfun
= cfun
;
7460 int *entry_flag
, *exit_flag
;
7461 profile_probability
*entry_prob
, *exit_prob
;
7462 unsigned i
, num_entry_edges
, num_exit_edges
, num_nodes
;
7465 htab_t new_label_map
;
7466 hash_map
<void *, void *> *eh_map
;
7467 struct loop
*loop
= entry_bb
->loop_father
;
7468 struct loop
*loop0
= get_loop (saved_cfun
, 0);
7469 struct move_stmt_d d
;
7471 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
7473 gcc_assert (entry_bb
!= exit_bb
7475 || dominated_by_p (CDI_DOMINATORS
, exit_bb
, entry_bb
)));
7477 /* Collect all the blocks in the region. Manually add ENTRY_BB
7478 because it won't be added by dfs_enumerate_from. */
7480 bbs
.safe_push (entry_bb
);
7481 gather_blocks_in_sese_region (entry_bb
, exit_bb
, &bbs
);
7484 verify_sese (entry_bb
, exit_bb
, &bbs
);
7486 /* The blocks that used to be dominated by something in BBS will now be
7487 dominated by the new block. */
7488 dom_bbs
= get_dominated_by_region (CDI_DOMINATORS
,
7492 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
7493 the predecessor edges to ENTRY_BB and the successor edges to
7494 EXIT_BB so that we can re-attach them to the new basic block that
7495 will replace the region. */
7496 num_entry_edges
= EDGE_COUNT (entry_bb
->preds
);
7497 entry_pred
= XNEWVEC (basic_block
, num_entry_edges
);
7498 entry_flag
= XNEWVEC (int, num_entry_edges
);
7499 entry_prob
= XNEWVEC (profile_probability
, num_entry_edges
);
7501 for (ei
= ei_start (entry_bb
->preds
); (e
= ei_safe_edge (ei
)) != NULL
;)
7503 entry_prob
[i
] = e
->probability
;
7504 entry_flag
[i
] = e
->flags
;
7505 entry_pred
[i
++] = e
->src
;
7511 num_exit_edges
= EDGE_COUNT (exit_bb
->succs
);
7512 exit_succ
= XNEWVEC (basic_block
, num_exit_edges
);
7513 exit_flag
= XNEWVEC (int, num_exit_edges
);
7514 exit_prob
= XNEWVEC (profile_probability
, num_exit_edges
);
7516 for (ei
= ei_start (exit_bb
->succs
); (e
= ei_safe_edge (ei
)) != NULL
;)
7518 exit_prob
[i
] = e
->probability
;
7519 exit_flag
[i
] = e
->flags
;
7520 exit_succ
[i
++] = e
->dest
;
7532 /* Switch context to the child function to initialize DEST_FN's CFG. */
7533 gcc_assert (dest_cfun
->cfg
== NULL
);
7534 push_cfun (dest_cfun
);
7536 init_empty_tree_cfg ();
7538 /* Initialize EH information for the new function. */
7540 new_label_map
= NULL
;
7543 eh_region region
= NULL
;
7546 FOR_EACH_VEC_ELT (bbs
, i
, bb
)
7548 region
= find_outermost_region_in_block (saved_cfun
, bb
, region
, &all
);
7553 init_eh_for_function ();
7554 if (region
!= NULL
|| all
)
7556 new_label_map
= htab_create (17, tree_map_hash
, tree_map_eq
, free
);
7557 eh_map
= duplicate_eh_regions (saved_cfun
, region
, 0,
7558 new_label_mapper
, new_label_map
);
7562 /* Initialize an empty loop tree. */
7563 struct loops
*loops
= ggc_cleared_alloc
<struct loops
> ();
7564 init_loops_structure (dest_cfun
, loops
, 1);
7565 loops
->state
= LOOPS_MAY_HAVE_MULTIPLE_LATCHES
;
7566 set_loops_for_fn (dest_cfun
, loops
);
7568 vec
<loop_p
, va_gc
> *larray
= get_loops (saved_cfun
)->copy ();
7570 /* Move the outlined loop tree part. */
7571 num_nodes
= bbs
.length ();
7572 FOR_EACH_VEC_ELT (bbs
, i
, bb
)
7574 if (bb
->loop_father
->header
== bb
)
7576 struct loop
*this_loop
= bb
->loop_father
;
7577 struct loop
*outer
= loop_outer (this_loop
);
7579 /* If the SESE region contains some bbs ending with
7580 a noreturn call, those are considered to belong
7581 to the outermost loop in saved_cfun, rather than
7582 the entry_bb's loop_father. */
7586 num_nodes
-= this_loop
->num_nodes
;
7587 flow_loop_tree_node_remove (bb
->loop_father
);
7588 flow_loop_tree_node_add (get_loop (dest_cfun
, 0), this_loop
);
7589 fixup_loop_arrays_after_move (saved_cfun
, cfun
, this_loop
);
7592 else if (bb
->loop_father
== loop0
&& loop0
!= loop
)
7595 /* Remove loop exits from the outlined region. */
7596 if (loops_for_fn (saved_cfun
)->exits
)
7597 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
7599 struct loops
*l
= loops_for_fn (saved_cfun
);
7601 = l
->exits
->find_slot_with_hash (e
, htab_hash_pointer (e
),
7604 l
->exits
->clear_slot (slot
);
7608 /* Adjust the number of blocks in the tree root of the outlined part. */
7609 get_loop (dest_cfun
, 0)->num_nodes
= bbs
.length () + 2;
7611 /* Setup a mapping to be used by move_block_to_fn. */
7612 loop
->aux
= current_loops
->tree_root
;
7613 loop0
->aux
= current_loops
->tree_root
;
7615 /* Fix up orig_loop_num. If the block referenced in it has been moved
7616 to dest_cfun, update orig_loop_num field, otherwise clear it. */
7618 signed char *moved_orig_loop_num
= NULL
;
7619 FOR_EACH_LOOP_FN (dest_cfun
, dloop
, 0)
7620 if (dloop
->orig_loop_num
)
7622 if (moved_orig_loop_num
== NULL
)
7624 = XCNEWVEC (signed char, vec_safe_length (larray
));
7625 if ((*larray
)[dloop
->orig_loop_num
] != NULL
7626 && get_loop (saved_cfun
, dloop
->orig_loop_num
) == NULL
)
7628 if (moved_orig_loop_num
[dloop
->orig_loop_num
] >= 0
7629 && moved_orig_loop_num
[dloop
->orig_loop_num
] < 2)
7630 moved_orig_loop_num
[dloop
->orig_loop_num
]++;
7631 dloop
->orig_loop_num
= (*larray
)[dloop
->orig_loop_num
]->num
;
7635 moved_orig_loop_num
[dloop
->orig_loop_num
] = -1;
7636 dloop
->orig_loop_num
= 0;
7641 if (moved_orig_loop_num
)
7643 FOR_EACH_VEC_ELT (bbs
, i
, bb
)
7645 gimple
*g
= find_loop_dist_alias (bb
);
7649 int orig_loop_num
= tree_to_shwi (gimple_call_arg (g
, 0));
7650 gcc_assert (orig_loop_num
7651 && (unsigned) orig_loop_num
< vec_safe_length (larray
));
7652 if (moved_orig_loop_num
[orig_loop_num
] == 2)
7654 /* If we have moved both loops with this orig_loop_num into
7655 dest_cfun and the LOOP_DIST_ALIAS call is being moved there
7656 too, update the first argument. */
7657 gcc_assert ((*larray
)[dloop
->orig_loop_num
] != NULL
7658 && (get_loop (saved_cfun
, dloop
->orig_loop_num
)
7660 tree t
= build_int_cst (integer_type_node
,
7661 (*larray
)[dloop
->orig_loop_num
]->num
);
7662 gimple_call_set_arg (g
, 0, t
);
7664 /* Make sure the following loop will not update it. */
7665 moved_orig_loop_num
[orig_loop_num
] = 0;
7668 /* Otherwise at least one of the loops stayed in saved_cfun.
7669 Remove the LOOP_DIST_ALIAS call. */
7670 fold_loop_internal_call (g
, gimple_call_arg (g
, 1));
7672 FOR_EACH_BB_FN (bb
, saved_cfun
)
7674 gimple
*g
= find_loop_dist_alias (bb
);
7677 int orig_loop_num
= tree_to_shwi (gimple_call_arg (g
, 0));
7678 gcc_assert (orig_loop_num
7679 && (unsigned) orig_loop_num
< vec_safe_length (larray
));
7680 if (moved_orig_loop_num
[orig_loop_num
])
7681 /* LOOP_DIST_ALIAS call remained in saved_cfun, if at least one
7682 of the corresponding loops was moved, remove it. */
7683 fold_loop_internal_call (g
, gimple_call_arg (g
, 1));
7685 XDELETEVEC (moved_orig_loop_num
);
7689 /* Move blocks from BBS into DEST_CFUN. */
7690 gcc_assert (bbs
.length () >= 2);
7691 after
= dest_cfun
->cfg
->x_entry_block_ptr
;
7692 hash_map
<tree
, tree
> vars_map
;
7694 memset (&d
, 0, sizeof (d
));
7695 d
.orig_block
= orig_block
;
7696 d
.new_block
= DECL_INITIAL (dest_cfun
->decl
);
7697 d
.from_context
= cfun
->decl
;
7698 d
.to_context
= dest_cfun
->decl
;
7699 d
.vars_map
= &vars_map
;
7700 d
.new_label_map
= new_label_map
;
7702 d
.remap_decls_p
= true;
7704 if (gimple_in_ssa_p (cfun
))
7705 for (tree arg
= DECL_ARGUMENTS (d
.to_context
); arg
; arg
= DECL_CHAIN (arg
))
7707 tree narg
= make_ssa_name_fn (dest_cfun
, arg
, gimple_build_nop ());
7708 set_ssa_default_def (dest_cfun
, arg
, narg
);
7709 vars_map
.put (arg
, narg
);
7712 FOR_EACH_VEC_ELT (bbs
, i
, bb
)
7714 /* No need to update edge counts on the last block. It has
7715 already been updated earlier when we detached the region from
7716 the original CFG. */
7717 move_block_to_fn (dest_cfun
, bb
, after
, bb
!= exit_bb
, &d
);
7723 /* Loop sizes are no longer correct, fix them up. */
7724 loop
->num_nodes
-= num_nodes
;
7725 for (struct loop
*outer
= loop_outer (loop
);
7726 outer
; outer
= loop_outer (outer
))
7727 outer
->num_nodes
-= num_nodes
;
7728 loop0
->num_nodes
-= bbs
.length () - num_nodes
;
7730 if (saved_cfun
->has_simduid_loops
|| saved_cfun
->has_force_vectorize_loops
)
7733 for (i
= 0; vec_safe_iterate (loops
->larray
, i
, &aloop
); i
++)
7738 replace_by_duplicate_decl (&aloop
->simduid
, d
.vars_map
,
7740 dest_cfun
->has_simduid_loops
= true;
7742 if (aloop
->force_vectorize
)
7743 dest_cfun
->has_force_vectorize_loops
= true;
7747 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
7751 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun
->decl
))
7753 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun
->decl
))
7754 = BLOCK_SUBBLOCKS (orig_block
);
7755 for (block
= BLOCK_SUBBLOCKS (orig_block
);
7756 block
; block
= BLOCK_CHAIN (block
))
7757 BLOCK_SUPERCONTEXT (block
) = DECL_INITIAL (dest_cfun
->decl
);
7758 BLOCK_SUBBLOCKS (orig_block
) = NULL_TREE
;
7761 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun
->decl
),
7762 &vars_map
, dest_cfun
->decl
);
7765 htab_delete (new_label_map
);
7769 if (gimple_in_ssa_p (cfun
))
7771 /* We need to release ssa-names in a defined order, so first find them,
7772 and then iterate in ascending version order. */
7773 bitmap release_names
= BITMAP_ALLOC (NULL
);
7774 vars_map
.traverse
<void *, gather_ssa_name_hash_map_from
> (release_names
);
7777 EXECUTE_IF_SET_IN_BITMAP (release_names
, 0, i
, bi
)
7778 release_ssa_name (ssa_name (i
));
7779 BITMAP_FREE (release_names
);
7782 /* Rewire the entry and exit blocks. The successor to the entry
7783 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
7784 the child function. Similarly, the predecessor of DEST_FN's
7785 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
7786 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
7787 various CFG manipulation function get to the right CFG.
7789 FIXME, this is silly. The CFG ought to become a parameter to
7791 push_cfun (dest_cfun
);
7792 ENTRY_BLOCK_PTR_FOR_FN (cfun
)->count
= entry_bb
->count
;
7793 make_single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun
), entry_bb
, EDGE_FALLTHRU
);
7796 make_single_succ_edge (exit_bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
), 0);
7797 EXIT_BLOCK_PTR_FOR_FN (cfun
)->count
= exit_bb
->count
;
7800 EXIT_BLOCK_PTR_FOR_FN (cfun
)->count
= profile_count::zero ();
7803 /* Back in the original function, the SESE region has disappeared,
7804 create a new basic block in its place. */
7805 bb
= create_empty_bb (entry_pred
[0]);
7807 add_bb_to_loop (bb
, loop
);
7808 for (i
= 0; i
< num_entry_edges
; i
++)
7810 e
= make_edge (entry_pred
[i
], bb
, entry_flag
[i
]);
7811 e
->probability
= entry_prob
[i
];
7814 for (i
= 0; i
< num_exit_edges
; i
++)
7816 e
= make_edge (bb
, exit_succ
[i
], exit_flag
[i
]);
7817 e
->probability
= exit_prob
[i
];
7820 set_immediate_dominator (CDI_DOMINATORS
, bb
, dom_entry
);
7821 FOR_EACH_VEC_ELT (dom_bbs
, i
, abb
)
7822 set_immediate_dominator (CDI_DOMINATORS
, abb
, bb
);
7839 /* Dump default def DEF to file FILE using FLAGS and indentation
7843 dump_default_def (FILE *file
, tree def
, int spc
, dump_flags_t flags
)
7845 for (int i
= 0; i
< spc
; ++i
)
7846 fprintf (file
, " ");
7847 dump_ssaname_info_to_file (file
, def
, spc
);
7849 print_generic_expr (file
, TREE_TYPE (def
), flags
);
7850 fprintf (file
, " ");
7851 print_generic_expr (file
, def
, flags
);
7852 fprintf (file
, " = ");
7853 print_generic_expr (file
, SSA_NAME_VAR (def
), flags
);
7854 fprintf (file
, ";\n");
7857 /* Print no_sanitize attribute to FILE for a given attribute VALUE. */
7860 print_no_sanitize_attr_value (FILE *file
, tree value
)
7862 unsigned int flags
= tree_to_uhwi (value
);
7864 for (int i
= 0; sanitizer_opts
[i
].name
!= NULL
; ++i
)
7866 if ((sanitizer_opts
[i
].flag
& flags
) == sanitizer_opts
[i
].flag
)
7869 fprintf (file
, " | ");
7870 fprintf (file
, "%s", sanitizer_opts
[i
].name
);
7876 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
7880 dump_function_to_file (tree fndecl
, FILE *file
, dump_flags_t flags
)
7882 tree arg
, var
, old_current_fndecl
= current_function_decl
;
7883 struct function
*dsf
;
7884 bool ignore_topmost_bind
= false, any_var
= false;
7887 bool tmclone
= (TREE_CODE (fndecl
) == FUNCTION_DECL
7888 && decl_is_tm_clone (fndecl
));
7889 struct function
*fun
= DECL_STRUCT_FUNCTION (fndecl
);
7891 if (DECL_ATTRIBUTES (fndecl
) != NULL_TREE
)
7893 fprintf (file
, "__attribute__((");
7897 for (chain
= DECL_ATTRIBUTES (fndecl
); chain
;
7898 first
= false, chain
= TREE_CHAIN (chain
))
7901 fprintf (file
, ", ");
7903 tree name
= get_attribute_name (chain
);
7904 print_generic_expr (file
, name
, dump_flags
);
7905 if (TREE_VALUE (chain
) != NULL_TREE
)
7907 fprintf (file
, " (");
7909 if (strstr (IDENTIFIER_POINTER (name
), "no_sanitize"))
7910 print_no_sanitize_attr_value (file
, TREE_VALUE (chain
));
7912 print_generic_expr (file
, TREE_VALUE (chain
), dump_flags
);
7913 fprintf (file
, ")");
7917 fprintf (file
, "))\n");
7920 current_function_decl
= fndecl
;
7921 if (flags
& TDF_GIMPLE
)
7923 print_generic_expr (file
, TREE_TYPE (TREE_TYPE (fndecl
)),
7924 dump_flags
| TDF_SLIM
);
7925 fprintf (file
, " __GIMPLE ()\n%s (", function_name (fun
));
7928 fprintf (file
, "%s %s(", function_name (fun
), tmclone
? "[tm-clone] " : "");
7930 arg
= DECL_ARGUMENTS (fndecl
);
7933 print_generic_expr (file
, TREE_TYPE (arg
), dump_flags
);
7934 fprintf (file
, " ");
7935 print_generic_expr (file
, arg
, dump_flags
);
7936 if (DECL_CHAIN (arg
))
7937 fprintf (file
, ", ");
7938 arg
= DECL_CHAIN (arg
);
7940 fprintf (file
, ")\n");
7942 dsf
= DECL_STRUCT_FUNCTION (fndecl
);
7943 if (dsf
&& (flags
& TDF_EH
))
7944 dump_eh_tree (file
, dsf
);
7946 if (flags
& TDF_RAW
&& !gimple_has_body_p (fndecl
))
7948 dump_node (fndecl
, TDF_SLIM
| flags
, file
);
7949 current_function_decl
= old_current_fndecl
;
7953 /* When GIMPLE is lowered, the variables are no longer available in
7954 BIND_EXPRs, so display them separately. */
7955 if (fun
&& fun
->decl
== fndecl
&& (fun
->curr_properties
& PROP_gimple_lcf
))
7958 ignore_topmost_bind
= true;
7960 fprintf (file
, "{\n");
7961 if (gimple_in_ssa_p (fun
)
7962 && (flags
& TDF_ALIAS
))
7964 for (arg
= DECL_ARGUMENTS (fndecl
); arg
!= NULL
;
7965 arg
= DECL_CHAIN (arg
))
7967 tree def
= ssa_default_def (fun
, arg
);
7969 dump_default_def (file
, def
, 2, flags
);
7972 tree res
= DECL_RESULT (fun
->decl
);
7973 if (res
!= NULL_TREE
7974 && DECL_BY_REFERENCE (res
))
7976 tree def
= ssa_default_def (fun
, res
);
7978 dump_default_def (file
, def
, 2, flags
);
7981 tree static_chain
= fun
->static_chain_decl
;
7982 if (static_chain
!= NULL_TREE
)
7984 tree def
= ssa_default_def (fun
, static_chain
);
7986 dump_default_def (file
, def
, 2, flags
);
7990 if (!vec_safe_is_empty (fun
->local_decls
))
7991 FOR_EACH_LOCAL_DECL (fun
, ix
, var
)
7993 print_generic_decl (file
, var
, flags
);
7994 fprintf (file
, "\n");
8001 if (gimple_in_ssa_p (cfun
))
8002 FOR_EACH_SSA_NAME (ix
, name
, cfun
)
8004 if (!SSA_NAME_VAR (name
))
8006 fprintf (file
, " ");
8007 print_generic_expr (file
, TREE_TYPE (name
), flags
);
8008 fprintf (file
, " ");
8009 print_generic_expr (file
, name
, flags
);
8010 fprintf (file
, ";\n");
8017 if (fun
&& fun
->decl
== fndecl
8019 && basic_block_info_for_fn (fun
))
8021 /* If the CFG has been built, emit a CFG-based dump. */
8022 if (!ignore_topmost_bind
)
8023 fprintf (file
, "{\n");
8025 if (any_var
&& n_basic_blocks_for_fn (fun
))
8026 fprintf (file
, "\n");
8028 FOR_EACH_BB_FN (bb
, fun
)
8029 dump_bb (file
, bb
, 2, flags
);
8031 fprintf (file
, "}\n");
8033 else if (fun
->curr_properties
& PROP_gimple_any
)
8035 /* The function is now in GIMPLE form but the CFG has not been
8036 built yet. Emit the single sequence of GIMPLE statements
8037 that make up its body. */
8038 gimple_seq body
= gimple_body (fndecl
);
8040 if (gimple_seq_first_stmt (body
)
8041 && gimple_seq_first_stmt (body
) == gimple_seq_last_stmt (body
)
8042 && gimple_code (gimple_seq_first_stmt (body
)) == GIMPLE_BIND
)
8043 print_gimple_seq (file
, body
, 0, flags
);
8046 if (!ignore_topmost_bind
)
8047 fprintf (file
, "{\n");
8050 fprintf (file
, "\n");
8052 print_gimple_seq (file
, body
, 2, flags
);
8053 fprintf (file
, "}\n");
8060 /* Make a tree based dump. */
8061 chain
= DECL_SAVED_TREE (fndecl
);
8062 if (chain
&& TREE_CODE (chain
) == BIND_EXPR
)
8064 if (ignore_topmost_bind
)
8066 chain
= BIND_EXPR_BODY (chain
);
8074 if (!ignore_topmost_bind
)
8076 fprintf (file
, "{\n");
8077 /* No topmost bind, pretend it's ignored for later. */
8078 ignore_topmost_bind
= true;
8084 fprintf (file
, "\n");
8086 print_generic_stmt_indented (file
, chain
, flags
, indent
);
8087 if (ignore_topmost_bind
)
8088 fprintf (file
, "}\n");
8091 if (flags
& TDF_ENUMERATE_LOCALS
)
8092 dump_enumerated_decls (file
, flags
);
8093 fprintf (file
, "\n\n");
8095 current_function_decl
= old_current_fndecl
;
8098 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
8101 debug_function (tree fn
, dump_flags_t flags
)
8103 dump_function_to_file (fn
, stderr
, flags
);
8107 /* Print on FILE the indexes for the predecessors of basic_block BB. */
8110 print_pred_bbs (FILE *file
, basic_block bb
)
8115 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
8116 fprintf (file
, "bb_%d ", e
->src
->index
);
8120 /* Print on FILE the indexes for the successors of basic_block BB. */
8123 print_succ_bbs (FILE *file
, basic_block bb
)
8128 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
8129 fprintf (file
, "bb_%d ", e
->dest
->index
);
8132 /* Print to FILE the basic block BB following the VERBOSITY level. */
8135 print_loops_bb (FILE *file
, basic_block bb
, int indent
, int verbosity
)
8137 char *s_indent
= (char *) alloca ((size_t) indent
+ 1);
8138 memset ((void *) s_indent
, ' ', (size_t) indent
);
8139 s_indent
[indent
] = '\0';
8141 /* Print basic_block's header. */
8144 fprintf (file
, "%s bb_%d (preds = {", s_indent
, bb
->index
);
8145 print_pred_bbs (file
, bb
);
8146 fprintf (file
, "}, succs = {");
8147 print_succ_bbs (file
, bb
);
8148 fprintf (file
, "})\n");
8151 /* Print basic_block's body. */
8154 fprintf (file
, "%s {\n", s_indent
);
8155 dump_bb (file
, bb
, indent
+ 4, TDF_VOPS
|TDF_MEMSYMS
);
8156 fprintf (file
, "%s }\n", s_indent
);
8160 static void print_loop_and_siblings (FILE *, struct loop
*, int, int);
8162 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
8163 VERBOSITY level this outputs the contents of the loop, or just its
8167 print_loop (FILE *file
, struct loop
*loop
, int indent
, int verbosity
)
8175 s_indent
= (char *) alloca ((size_t) indent
+ 1);
8176 memset ((void *) s_indent
, ' ', (size_t) indent
);
8177 s_indent
[indent
] = '\0';
8179 /* Print loop's header. */
8180 fprintf (file
, "%sloop_%d (", s_indent
, loop
->num
);
8182 fprintf (file
, "header = %d", loop
->header
->index
);
8185 fprintf (file
, "deleted)\n");
8189 fprintf (file
, ", latch = %d", loop
->latch
->index
);
8191 fprintf (file
, ", multiple latches");
8192 fprintf (file
, ", niter = ");
8193 print_generic_expr (file
, loop
->nb_iterations
);
8195 if (loop
->any_upper_bound
)
8197 fprintf (file
, ", upper_bound = ");
8198 print_decu (loop
->nb_iterations_upper_bound
, file
);
8200 if (loop
->any_likely_upper_bound
)
8202 fprintf (file
, ", likely_upper_bound = ");
8203 print_decu (loop
->nb_iterations_likely_upper_bound
, file
);
8206 if (loop
->any_estimate
)
8208 fprintf (file
, ", estimate = ");
8209 print_decu (loop
->nb_iterations_estimate
, file
);
8212 fprintf (file
, ", unroll = %d", loop
->unroll
);
8213 fprintf (file
, ")\n");
8215 /* Print loop's body. */
8218 fprintf (file
, "%s{\n", s_indent
);
8219 FOR_EACH_BB_FN (bb
, cfun
)
8220 if (bb
->loop_father
== loop
)
8221 print_loops_bb (file
, bb
, indent
, verbosity
);
8223 print_loop_and_siblings (file
, loop
->inner
, indent
+ 2, verbosity
);
8224 fprintf (file
, "%s}\n", s_indent
);
8228 /* Print the LOOP and its sibling loops on FILE, indented INDENT
8229 spaces. Following VERBOSITY level this outputs the contents of the
8230 loop, or just its structure. */
8233 print_loop_and_siblings (FILE *file
, struct loop
*loop
, int indent
,
8239 print_loop (file
, loop
, indent
, verbosity
);
8240 print_loop_and_siblings (file
, loop
->next
, indent
, verbosity
);
8243 /* Follow a CFG edge from the entry point of the program, and on entry
8244 of a loop, pretty print the loop structure on FILE. */
8247 print_loops (FILE *file
, int verbosity
)
8251 bb
= ENTRY_BLOCK_PTR_FOR_FN (cfun
);
8252 fprintf (file
, "\nLoops in function: %s\n", current_function_name ());
8253 if (bb
&& bb
->loop_father
)
8254 print_loop_and_siblings (file
, bb
->loop_father
, 0, verbosity
);
8260 debug (struct loop
&ref
)
8262 print_loop (stderr
, &ref
, 0, /*verbosity*/0);
8266 debug (struct loop
*ptr
)
8271 fprintf (stderr
, "<nil>\n");
8274 /* Dump a loop verbosely. */
8277 debug_verbose (struct loop
&ref
)
8279 print_loop (stderr
, &ref
, 0, /*verbosity*/3);
8283 debug_verbose (struct loop
*ptr
)
8288 fprintf (stderr
, "<nil>\n");
8292 /* Debugging loops structure at tree level, at some VERBOSITY level. */
8295 debug_loops (int verbosity
)
8297 print_loops (stderr
, verbosity
);
8300 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
8303 debug_loop (struct loop
*loop
, int verbosity
)
8305 print_loop (stderr
, loop
, 0, verbosity
);
8308 /* Print on stderr the code of loop number NUM, at some VERBOSITY
8312 debug_loop_num (unsigned num
, int verbosity
)
8314 debug_loop (get_loop (cfun
, num
), verbosity
);
8317 /* Return true if BB ends with a call, possibly followed by some
8318 instructions that must stay with the call. Return false,
8322 gimple_block_ends_with_call_p (basic_block bb
)
8324 gimple_stmt_iterator gsi
= gsi_last_nondebug_bb (bb
);
8325 return !gsi_end_p (gsi
) && is_gimple_call (gsi_stmt (gsi
));
8329 /* Return true if BB ends with a conditional branch. Return false,
8333 gimple_block_ends_with_condjump_p (const_basic_block bb
)
8335 gimple
*stmt
= last_stmt (CONST_CAST_BB (bb
));
8336 return (stmt
&& gimple_code (stmt
) == GIMPLE_COND
);
8340 /* Return true if statement T may terminate execution of BB in ways not
8341 explicitly represtented in the CFG. */
8344 stmt_can_terminate_bb_p (gimple
*t
)
8346 tree fndecl
= NULL_TREE
;
8349 /* Eh exception not handled internally terminates execution of the whole
8351 if (stmt_can_throw_external (cfun
, t
))
8354 /* NORETURN and LONGJMP calls already have an edge to exit.
8355 CONST and PURE calls do not need one.
8356 We don't currently check for CONST and PURE here, although
8357 it would be a good idea, because those attributes are
8358 figured out from the RTL in mark_constant_function, and
8359 the counter incrementation code from -fprofile-arcs
8360 leads to different results from -fbranch-probabilities. */
8361 if (is_gimple_call (t
))
8363 fndecl
= gimple_call_fndecl (t
);
8364 call_flags
= gimple_call_flags (t
);
8367 if (is_gimple_call (t
)
8369 && fndecl_built_in_p (fndecl
)
8370 && (call_flags
& ECF_NOTHROW
)
8371 && !(call_flags
& ECF_RETURNS_TWICE
)
8372 /* fork() doesn't really return twice, but the effect of
8373 wrapping it in __gcov_fork() which calls __gcov_flush()
8374 and clears the counters before forking has the same
8375 effect as returning twice. Force a fake edge. */
8376 && !fndecl_built_in_p (fndecl
, BUILT_IN_FORK
))
8379 if (is_gimple_call (t
))
8385 if (call_flags
& (ECF_PURE
| ECF_CONST
)
8386 && !(call_flags
& ECF_LOOPING_CONST_OR_PURE
))
8389 /* Function call may do longjmp, terminate program or do other things.
8390 Special case noreturn that have non-abnormal edges out as in this case
8391 the fact is sufficiently represented by lack of edges out of T. */
8392 if (!(call_flags
& ECF_NORETURN
))
8396 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
8397 if ((e
->flags
& EDGE_FAKE
) == 0)
8401 if (gasm
*asm_stmt
= dyn_cast
<gasm
*> (t
))
8402 if (gimple_asm_volatile_p (asm_stmt
) || gimple_asm_input_p (asm_stmt
))
8409 /* Add fake edges to the function exit for any non constant and non
8410 noreturn calls (or noreturn calls with EH/abnormal edges),
8411 volatile inline assembly in the bitmap of blocks specified by BLOCKS
8412 or to the whole CFG if BLOCKS is zero. Return the number of blocks
8415 The goal is to expose cases in which entering a basic block does
8416 not imply that all subsequent instructions must be executed. */
8419 gimple_flow_call_edges_add (sbitmap blocks
)
8422 int blocks_split
= 0;
8423 int last_bb
= last_basic_block_for_fn (cfun
);
8424 bool check_last_block
= false;
8426 if (n_basic_blocks_for_fn (cfun
) == NUM_FIXED_BLOCKS
)
8430 check_last_block
= true;
8432 check_last_block
= bitmap_bit_p (blocks
,
8433 EXIT_BLOCK_PTR_FOR_FN (cfun
)->prev_bb
->index
);
8435 /* In the last basic block, before epilogue generation, there will be
8436 a fallthru edge to EXIT. Special care is required if the last insn
8437 of the last basic block is a call because make_edge folds duplicate
8438 edges, which would result in the fallthru edge also being marked
8439 fake, which would result in the fallthru edge being removed by
8440 remove_fake_edges, which would result in an invalid CFG.
8442 Moreover, we can't elide the outgoing fake edge, since the block
8443 profiler needs to take this into account in order to solve the minimal
8444 spanning tree in the case that the call doesn't return.
8446 Handle this by adding a dummy instruction in a new last basic block. */
8447 if (check_last_block
)
8449 basic_block bb
= EXIT_BLOCK_PTR_FOR_FN (cfun
)->prev_bb
;
8450 gimple_stmt_iterator gsi
= gsi_last_nondebug_bb (bb
);
8453 if (!gsi_end_p (gsi
))
8456 if (t
&& stmt_can_terminate_bb_p (t
))
8460 e
= find_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
));
8463 gsi_insert_on_edge (e
, gimple_build_nop ());
8464 gsi_commit_edge_inserts ();
8469 /* Now add fake edges to the function exit for any non constant
8470 calls since there is no way that we can determine if they will
8472 for (i
= 0; i
< last_bb
; i
++)
8474 basic_block bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
8475 gimple_stmt_iterator gsi
;
8476 gimple
*stmt
, *last_stmt
;
8481 if (blocks
&& !bitmap_bit_p (blocks
, i
))
8484 gsi
= gsi_last_nondebug_bb (bb
);
8485 if (!gsi_end_p (gsi
))
8487 last_stmt
= gsi_stmt (gsi
);
8490 stmt
= gsi_stmt (gsi
);
8491 if (stmt_can_terminate_bb_p (stmt
))
8495 /* The handling above of the final block before the
8496 epilogue should be enough to verify that there is
8497 no edge to the exit block in CFG already.
8498 Calling make_edge in such case would cause us to
8499 mark that edge as fake and remove it later. */
8500 if (flag_checking
&& stmt
== last_stmt
)
8502 e
= find_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
));
8503 gcc_assert (e
== NULL
);
8506 /* Note that the following may create a new basic block
8507 and renumber the existing basic blocks. */
8508 if (stmt
!= last_stmt
)
8510 e
= split_block (bb
, stmt
);
8514 e
= make_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
), EDGE_FAKE
);
8515 e
->probability
= profile_probability::guessed_never ();
8519 while (!gsi_end_p (gsi
));
8524 checking_verify_flow_info ();
8526 return blocks_split
;
8529 /* Removes edge E and all the blocks dominated by it, and updates dominance
8530 information. The IL in E->src needs to be updated separately.
8531 If dominance info is not available, only the edge E is removed.*/
8534 remove_edge_and_dominated_blocks (edge e
)
8536 vec
<basic_block
> bbs_to_remove
= vNULL
;
8537 vec
<basic_block
> bbs_to_fix_dom
= vNULL
;
8540 bool none_removed
= false;
8542 basic_block bb
, dbb
;
8545 /* If we are removing a path inside a non-root loop that may change
8546 loop ownership of blocks or remove loops. Mark loops for fixup. */
8548 && loop_outer (e
->src
->loop_father
) != NULL
8549 && e
->src
->loop_father
== e
->dest
->loop_father
)
8550 loops_state_set (LOOPS_NEED_FIXUP
);
8552 if (!dom_info_available_p (CDI_DOMINATORS
))
8558 /* No updating is needed for edges to exit. */
8559 if (e
->dest
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
8561 if (cfgcleanup_altered_bbs
)
8562 bitmap_set_bit (cfgcleanup_altered_bbs
, e
->src
->index
);
8567 /* First, we find the basic blocks to remove. If E->dest has a predecessor
8568 that is not dominated by E->dest, then this set is empty. Otherwise,
8569 all the basic blocks dominated by E->dest are removed.
8571 Also, to DF_IDOM we store the immediate dominators of the blocks in
8572 the dominance frontier of E (i.e., of the successors of the
8573 removed blocks, if there are any, and of E->dest otherwise). */
8574 FOR_EACH_EDGE (f
, ei
, e
->dest
->preds
)
8579 if (!dominated_by_p (CDI_DOMINATORS
, f
->src
, e
->dest
))
8581 none_removed
= true;
8586 auto_bitmap df
, df_idom
;
8588 bitmap_set_bit (df_idom
,
8589 get_immediate_dominator (CDI_DOMINATORS
, e
->dest
)->index
);
8592 bbs_to_remove
= get_all_dominated_blocks (CDI_DOMINATORS
, e
->dest
);
8593 FOR_EACH_VEC_ELT (bbs_to_remove
, i
, bb
)
8595 FOR_EACH_EDGE (f
, ei
, bb
->succs
)
8597 if (f
->dest
!= EXIT_BLOCK_PTR_FOR_FN (cfun
))
8598 bitmap_set_bit (df
, f
->dest
->index
);
8601 FOR_EACH_VEC_ELT (bbs_to_remove
, i
, bb
)
8602 bitmap_clear_bit (df
, bb
->index
);
8604 EXECUTE_IF_SET_IN_BITMAP (df
, 0, i
, bi
)
8606 bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
8607 bitmap_set_bit (df_idom
,
8608 get_immediate_dominator (CDI_DOMINATORS
, bb
)->index
);
8612 if (cfgcleanup_altered_bbs
)
8614 /* Record the set of the altered basic blocks. */
8615 bitmap_set_bit (cfgcleanup_altered_bbs
, e
->src
->index
);
8616 bitmap_ior_into (cfgcleanup_altered_bbs
, df
);
8619 /* Remove E and the cancelled blocks. */
8624 /* Walk backwards so as to get a chance to substitute all
8625 released DEFs into debug stmts. See
8626 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
8628 for (i
= bbs_to_remove
.length (); i
-- > 0; )
8629 delete_basic_block (bbs_to_remove
[i
]);
8632 /* Update the dominance information. The immediate dominator may change only
8633 for blocks whose immediate dominator belongs to DF_IDOM:
8635 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
8636 removal. Let Z the arbitrary block such that idom(Z) = Y and
8637 Z dominates X after the removal. Before removal, there exists a path P
8638 from Y to X that avoids Z. Let F be the last edge on P that is
8639 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
8640 dominates W, and because of P, Z does not dominate W), and W belongs to
8641 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
8642 EXECUTE_IF_SET_IN_BITMAP (df_idom
, 0, i
, bi
)
8644 bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
8645 for (dbb
= first_dom_son (CDI_DOMINATORS
, bb
);
8647 dbb
= next_dom_son (CDI_DOMINATORS
, dbb
))
8648 bbs_to_fix_dom
.safe_push (dbb
);
8651 iterate_fix_dominators (CDI_DOMINATORS
, bbs_to_fix_dom
, true);
8653 bbs_to_remove
.release ();
8654 bbs_to_fix_dom
.release ();
8657 /* Purge dead EH edges from basic block BB. */
8660 gimple_purge_dead_eh_edges (basic_block bb
)
8662 bool changed
= false;
8665 gimple
*stmt
= last_stmt (bb
);
8667 if (stmt
&& stmt_can_throw_internal (cfun
, stmt
))
8670 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
8672 if (e
->flags
& EDGE_EH
)
8674 remove_edge_and_dominated_blocks (e
);
8684 /* Purge dead EH edges from basic block listed in BLOCKS. */
8687 gimple_purge_all_dead_eh_edges (const_bitmap blocks
)
8689 bool changed
= false;
8693 EXECUTE_IF_SET_IN_BITMAP (blocks
, 0, i
, bi
)
8695 basic_block bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
8697 /* Earlier gimple_purge_dead_eh_edges could have removed
8698 this basic block already. */
8699 gcc_assert (bb
|| changed
);
8701 changed
|= gimple_purge_dead_eh_edges (bb
);
8707 /* Purge dead abnormal call edges from basic block BB. */
8710 gimple_purge_dead_abnormal_call_edges (basic_block bb
)
8712 bool changed
= false;
8715 gimple
*stmt
= last_stmt (bb
);
8717 if (!cfun
->has_nonlocal_label
8718 && !cfun
->calls_setjmp
)
8721 if (stmt
&& stmt_can_make_abnormal_goto (stmt
))
8724 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
8726 if (e
->flags
& EDGE_ABNORMAL
)
8728 if (e
->flags
& EDGE_FALLTHRU
)
8729 e
->flags
&= ~EDGE_ABNORMAL
;
8731 remove_edge_and_dominated_blocks (e
);
8741 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
8744 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks
)
8746 bool changed
= false;
8750 EXECUTE_IF_SET_IN_BITMAP (blocks
, 0, i
, bi
)
8752 basic_block bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
8754 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
8755 this basic block already. */
8756 gcc_assert (bb
|| changed
);
8758 changed
|= gimple_purge_dead_abnormal_call_edges (bb
);
8764 /* This function is called whenever a new edge is created or
8768 gimple_execute_on_growing_pred (edge e
)
8770 basic_block bb
= e
->dest
;
8772 if (!gimple_seq_empty_p (phi_nodes (bb
)))
8773 reserve_phi_args_for_new_edge (bb
);
8776 /* This function is called immediately before edge E is removed from
8777 the edge vector E->dest->preds. */
8780 gimple_execute_on_shrinking_pred (edge e
)
8782 if (!gimple_seq_empty_p (phi_nodes (e
->dest
)))
8783 remove_phi_args (e
);
8786 /*---------------------------------------------------------------------------
8787 Helper functions for Loop versioning
8788 ---------------------------------------------------------------------------*/
8790 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
8791 of 'first'. Both of them are dominated by 'new_head' basic block. When
8792 'new_head' was created by 'second's incoming edge it received phi arguments
8793 on the edge by split_edge(). Later, additional edge 'e' was created to
8794 connect 'new_head' and 'first'. Now this routine adds phi args on this
8795 additional edge 'e' that new_head to second edge received as part of edge
8799 gimple_lv_adjust_loop_header_phi (basic_block first
, basic_block second
,
8800 basic_block new_head
, edge e
)
8803 gphi_iterator psi1
, psi2
;
8805 edge e2
= find_edge (new_head
, second
);
8807 /* Because NEW_HEAD has been created by splitting SECOND's incoming
8808 edge, we should always have an edge from NEW_HEAD to SECOND. */
8809 gcc_assert (e2
!= NULL
);
8811 /* Browse all 'second' basic block phi nodes and add phi args to
8812 edge 'e' for 'first' head. PHI args are always in correct order. */
8814 for (psi2
= gsi_start_phis (second
),
8815 psi1
= gsi_start_phis (first
);
8816 !gsi_end_p (psi2
) && !gsi_end_p (psi1
);
8817 gsi_next (&psi2
), gsi_next (&psi1
))
8821 def
= PHI_ARG_DEF (phi2
, e2
->dest_idx
);
8822 add_phi_arg (phi1
, def
, e
, gimple_phi_arg_location_from_edge (phi2
, e2
));
8827 /* Adds a if else statement to COND_BB with condition COND_EXPR.
8828 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
8829 the destination of the ELSE part. */
8832 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED
,
8833 basic_block second_head ATTRIBUTE_UNUSED
,
8834 basic_block cond_bb
, void *cond_e
)
8836 gimple_stmt_iterator gsi
;
8837 gimple
*new_cond_expr
;
8838 tree cond_expr
= (tree
) cond_e
;
8841 /* Build new conditional expr */
8842 new_cond_expr
= gimple_build_cond_from_tree (cond_expr
,
8843 NULL_TREE
, NULL_TREE
);
8845 /* Add new cond in cond_bb. */
8846 gsi
= gsi_last_bb (cond_bb
);
8847 gsi_insert_after (&gsi
, new_cond_expr
, GSI_NEW_STMT
);
8849 /* Adjust edges appropriately to connect new head with first head
8850 as well as second head. */
8851 e0
= single_succ_edge (cond_bb
);
8852 e0
->flags
&= ~EDGE_FALLTHRU
;
8853 e0
->flags
|= EDGE_FALSE_VALUE
;
8857 /* Do book-keeping of basic block BB for the profile consistency checker.
8858 Store the counting in RECORD. */
8860 gimple_account_profile_record (basic_block bb
,
8861 struct profile_record
*record
)
8863 gimple_stmt_iterator i
;
8864 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
8867 += estimate_num_insns (gsi_stmt (i
), &eni_size_weights
);
8868 if (bb
->count
.initialized_p ())
8870 += estimate_num_insns (gsi_stmt (i
),
8871 &eni_time_weights
) * bb
->count
.to_gcov_type ();
8872 else if (profile_status_for_fn (cfun
) == PROFILE_GUESSED
)
8874 += estimate_num_insns (gsi_stmt (i
),
8875 &eni_time_weights
) * bb
->count
.to_frequency (cfun
);
8879 struct cfg_hooks gimple_cfg_hooks
= {
8881 gimple_verify_flow_info
,
8882 gimple_dump_bb
, /* dump_bb */
8883 gimple_dump_bb_for_graph
, /* dump_bb_for_graph */
8884 create_bb
, /* create_basic_block */
8885 gimple_redirect_edge_and_branch
, /* redirect_edge_and_branch */
8886 gimple_redirect_edge_and_branch_force
, /* redirect_edge_and_branch_force */
8887 gimple_can_remove_branch_p
, /* can_remove_branch_p */
8888 remove_bb
, /* delete_basic_block */
8889 gimple_split_block
, /* split_block */
8890 gimple_move_block_after
, /* move_block_after */
8891 gimple_can_merge_blocks_p
, /* can_merge_blocks_p */
8892 gimple_merge_blocks
, /* merge_blocks */
8893 gimple_predict_edge
, /* predict_edge */
8894 gimple_predicted_by_p
, /* predicted_by_p */
8895 gimple_can_duplicate_bb_p
, /* can_duplicate_block_p */
8896 gimple_duplicate_bb
, /* duplicate_block */
8897 gimple_split_edge
, /* split_edge */
8898 gimple_make_forwarder_block
, /* make_forward_block */
8899 NULL
, /* tidy_fallthru_edge */
8900 NULL
, /* force_nonfallthru */
8901 gimple_block_ends_with_call_p
,/* block_ends_with_call_p */
8902 gimple_block_ends_with_condjump_p
, /* block_ends_with_condjump_p */
8903 gimple_flow_call_edges_add
, /* flow_call_edges_add */
8904 gimple_execute_on_growing_pred
, /* execute_on_growing_pred */
8905 gimple_execute_on_shrinking_pred
, /* execute_on_shrinking_pred */
8906 gimple_duplicate_loop_to_header_edge
, /* duplicate loop for trees */
8907 gimple_lv_add_condition_to_bb
, /* lv_add_condition_to_bb */
8908 gimple_lv_adjust_loop_header_phi
, /* lv_adjust_loop_header_phi*/
8909 extract_true_false_edges_from_block
, /* extract_cond_bb_edges */
8910 flush_pending_stmts
, /* flush_pending_stmts */
8911 gimple_empty_block_p
, /* block_empty_p */
8912 gimple_split_block_before_cond_jump
, /* split_block_before_cond_jump */
8913 gimple_account_profile_record
,
8917 /* Split all critical edges. */
8920 split_critical_edges (void)
8926 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
8927 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
8928 mappings around the calls to split_edge. */
8929 start_recording_case_labels ();
8930 FOR_ALL_BB_FN (bb
, cfun
)
8932 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
8934 if (EDGE_CRITICAL_P (e
) && !(e
->flags
& EDGE_ABNORMAL
))
8936 /* PRE inserts statements to edges and expects that
8937 since split_critical_edges was done beforehand, committing edge
8938 insertions will not split more edges. In addition to critical
8939 edges we must split edges that have multiple successors and
8940 end by control flow statements, such as RESX.
8941 Go ahead and split them too. This matches the logic in
8942 gimple_find_edge_insert_loc. */
8943 else if ((!single_pred_p (e
->dest
)
8944 || !gimple_seq_empty_p (phi_nodes (e
->dest
))
8945 || e
->dest
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
8946 && e
->src
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
)
8947 && !(e
->flags
& EDGE_ABNORMAL
))
8949 gimple_stmt_iterator gsi
;
8951 gsi
= gsi_last_bb (e
->src
);
8952 if (!gsi_end_p (gsi
)
8953 && stmt_ends_bb_p (gsi_stmt (gsi
))
8954 && (gimple_code (gsi_stmt (gsi
)) != GIMPLE_RETURN
8955 && !gimple_call_builtin_p (gsi_stmt (gsi
),
8961 end_recording_case_labels ();
8967 const pass_data pass_data_split_crit_edges
=
8969 GIMPLE_PASS
, /* type */
8970 "crited", /* name */
8971 OPTGROUP_NONE
, /* optinfo_flags */
8972 TV_TREE_SPLIT_EDGES
, /* tv_id */
8973 PROP_cfg
, /* properties_required */
8974 PROP_no_crit_edges
, /* properties_provided */
8975 0, /* properties_destroyed */
8976 0, /* todo_flags_start */
8977 0, /* todo_flags_finish */
8980 class pass_split_crit_edges
: public gimple_opt_pass
8983 pass_split_crit_edges (gcc::context
*ctxt
)
8984 : gimple_opt_pass (pass_data_split_crit_edges
, ctxt
)
8987 /* opt_pass methods: */
8988 virtual unsigned int execute (function
*) { return split_critical_edges (); }
8990 opt_pass
* clone () { return new pass_split_crit_edges (m_ctxt
); }
8991 }; // class pass_split_crit_edges
8996 make_pass_split_crit_edges (gcc::context
*ctxt
)
8998 return new pass_split_crit_edges (ctxt
);
9002 /* Insert COND expression which is GIMPLE_COND after STMT
9003 in basic block BB with appropriate basic block split
9004 and creation of a new conditionally executed basic block.
9005 Update profile so the new bb is visited with probability PROB.
9006 Return created basic block. */
9008 insert_cond_bb (basic_block bb
, gimple
*stmt
, gimple
*cond
,
9009 profile_probability prob
)
9011 edge fall
= split_block (bb
, stmt
);
9012 gimple_stmt_iterator iter
= gsi_last_bb (bb
);
9015 /* Insert cond statement. */
9016 gcc_assert (gimple_code (cond
) == GIMPLE_COND
);
9017 if (gsi_end_p (iter
))
9018 gsi_insert_before (&iter
, cond
, GSI_CONTINUE_LINKING
);
9020 gsi_insert_after (&iter
, cond
, GSI_CONTINUE_LINKING
);
9022 /* Create conditionally executed block. */
9023 new_bb
= create_empty_bb (bb
);
9024 edge e
= make_edge (bb
, new_bb
, EDGE_TRUE_VALUE
);
9025 e
->probability
= prob
;
9026 new_bb
->count
= e
->count ();
9027 make_single_succ_edge (new_bb
, fall
->dest
, EDGE_FALLTHRU
);
9029 /* Fix edge for split bb. */
9030 fall
->flags
= EDGE_FALSE_VALUE
;
9031 fall
->probability
-= e
->probability
;
9033 /* Update dominance info. */
9034 if (dom_info_available_p (CDI_DOMINATORS
))
9036 set_immediate_dominator (CDI_DOMINATORS
, new_bb
, bb
);
9037 set_immediate_dominator (CDI_DOMINATORS
, fall
->dest
, bb
);
9040 /* Update loop info. */
9042 add_bb_to_loop (new_bb
, bb
->loop_father
);
9047 /* Build a ternary operation and gimplify it. Emit code before GSI.
9048 Return the gimple_val holding the result. */
9051 gimplify_build3 (gimple_stmt_iterator
*gsi
, enum tree_code code
,
9052 tree type
, tree a
, tree b
, tree c
)
9055 location_t loc
= gimple_location (gsi_stmt (*gsi
));
9057 ret
= fold_build3_loc (loc
, code
, type
, a
, b
, c
);
9058 return force_gimple_operand_gsi (gsi
, ret
, true, NULL
, true,
9062 /* Build a binary operation and gimplify it. Emit code before GSI.
9063 Return the gimple_val holding the result. */
9066 gimplify_build2 (gimple_stmt_iterator
*gsi
, enum tree_code code
,
9067 tree type
, tree a
, tree b
)
9071 ret
= fold_build2_loc (gimple_location (gsi_stmt (*gsi
)), code
, type
, a
, b
);
9072 return force_gimple_operand_gsi (gsi
, ret
, true, NULL
, true,
9076 /* Build a unary operation and gimplify it. Emit code before GSI.
9077 Return the gimple_val holding the result. */
9080 gimplify_build1 (gimple_stmt_iterator
*gsi
, enum tree_code code
, tree type
,
9085 ret
= fold_build1_loc (gimple_location (gsi_stmt (*gsi
)), code
, type
, a
);
9086 return force_gimple_operand_gsi (gsi
, ret
, true, NULL
, true,
9092 /* Given a basic block B which ends with a conditional and has
9093 precisely two successors, determine which of the edges is taken if
9094 the conditional is true and which is taken if the conditional is
9095 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
9098 extract_true_false_edges_from_block (basic_block b
,
9102 edge e
= EDGE_SUCC (b
, 0);
9104 if (e
->flags
& EDGE_TRUE_VALUE
)
9107 *false_edge
= EDGE_SUCC (b
, 1);
9112 *true_edge
= EDGE_SUCC (b
, 1);
9117 /* From a controlling predicate in the immediate dominator DOM of
9118 PHIBLOCK determine the edges into PHIBLOCK that are chosen if the
9119 predicate evaluates to true and false and store them to
9120 *TRUE_CONTROLLED_EDGE and *FALSE_CONTROLLED_EDGE if
9121 they are non-NULL. Returns true if the edges can be determined,
9122 else return false. */
9125 extract_true_false_controlled_edges (basic_block dom
, basic_block phiblock
,
9126 edge
*true_controlled_edge
,
9127 edge
*false_controlled_edge
)
9129 basic_block bb
= phiblock
;
9130 edge true_edge
, false_edge
, tem
;
9131 edge e0
= NULL
, e1
= NULL
;
9133 /* We have to verify that one edge into the PHI node is dominated
9134 by the true edge of the predicate block and the other edge
9135 dominated by the false edge. This ensures that the PHI argument
9136 we are going to take is completely determined by the path we
9137 take from the predicate block.
9138 We can only use BB dominance checks below if the destination of
9139 the true/false edges are dominated by their edge, thus only
9140 have a single predecessor. */
9141 extract_true_false_edges_from_block (dom
, &true_edge
, &false_edge
);
9142 tem
= EDGE_PRED (bb
, 0);
9143 if (tem
== true_edge
9144 || (single_pred_p (true_edge
->dest
)
9145 && (tem
->src
== true_edge
->dest
9146 || dominated_by_p (CDI_DOMINATORS
,
9147 tem
->src
, true_edge
->dest
))))
9149 else if (tem
== false_edge
9150 || (single_pred_p (false_edge
->dest
)
9151 && (tem
->src
== false_edge
->dest
9152 || dominated_by_p (CDI_DOMINATORS
,
9153 tem
->src
, false_edge
->dest
))))
9157 tem
= EDGE_PRED (bb
, 1);
9158 if (tem
== true_edge
9159 || (single_pred_p (true_edge
->dest
)
9160 && (tem
->src
== true_edge
->dest
9161 || dominated_by_p (CDI_DOMINATORS
,
9162 tem
->src
, true_edge
->dest
))))
9164 else if (tem
== false_edge
9165 || (single_pred_p (false_edge
->dest
)
9166 && (tem
->src
== false_edge
->dest
9167 || dominated_by_p (CDI_DOMINATORS
,
9168 tem
->src
, false_edge
->dest
))))
9175 if (true_controlled_edge
)
9176 *true_controlled_edge
= e0
;
9177 if (false_controlled_edge
)
9178 *false_controlled_edge
= e1
;
9183 /* Generate a range test LHS CODE RHS that determines whether INDEX is in the
9184 range [low, high]. Place associated stmts before *GSI. */
9187 generate_range_test (basic_block bb
, tree index
, tree low
, tree high
,
9188 tree
*lhs
, tree
*rhs
)
9190 tree type
= TREE_TYPE (index
);
9191 tree utype
= unsigned_type_for (type
);
9193 low
= fold_convert (utype
, low
);
9194 high
= fold_convert (utype
, high
);
9196 gimple_seq seq
= NULL
;
9197 index
= gimple_convert (&seq
, utype
, index
);
9198 *lhs
= gimple_build (&seq
, MINUS_EXPR
, utype
, index
, low
);
9199 *rhs
= const_binop (MINUS_EXPR
, utype
, high
, low
);
9201 gimple_stmt_iterator gsi
= gsi_last_bb (bb
);
9202 gsi_insert_seq_before (&gsi
, seq
, GSI_SAME_STMT
);
9205 /* Return the basic block that belongs to label numbered INDEX
9206 of a switch statement. */
9209 gimple_switch_label_bb (function
*ifun
, gswitch
*gs
, unsigned index
)
9211 return label_to_block (ifun
, CASE_LABEL (gimple_switch_label (gs
, index
)));
9214 /* Return the default basic block of a switch statement. */
9217 gimple_switch_default_bb (function
*ifun
, gswitch
*gs
)
9219 return gimple_switch_label_bb (ifun
, gs
, 0);
9222 /* Return the edge that belongs to label numbered INDEX
9223 of a switch statement. */
9226 gimple_switch_edge (function
*ifun
, gswitch
*gs
, unsigned index
)
9228 return find_edge (gimple_bb (gs
), gimple_switch_label_bb (ifun
, gs
, index
));
9231 /* Return the default edge of a switch statement. */
9234 gimple_switch_default_edge (function
*ifun
, gswitch
*gs
)
9236 return gimple_switch_edge (ifun
, gs
, 0);
9240 /* Emit return warnings. */
9244 const pass_data pass_data_warn_function_return
=
9246 GIMPLE_PASS
, /* type */
9247 "*warn_function_return", /* name */
9248 OPTGROUP_NONE
, /* optinfo_flags */
9249 TV_NONE
, /* tv_id */
9250 PROP_cfg
, /* properties_required */
9251 0, /* properties_provided */
9252 0, /* properties_destroyed */
9253 0, /* todo_flags_start */
9254 0, /* todo_flags_finish */
9257 class pass_warn_function_return
: public gimple_opt_pass
9260 pass_warn_function_return (gcc::context
*ctxt
)
9261 : gimple_opt_pass (pass_data_warn_function_return
, ctxt
)
9264 /* opt_pass methods: */
9265 virtual unsigned int execute (function
*);
9267 }; // class pass_warn_function_return
9270 pass_warn_function_return::execute (function
*fun
)
9272 location_t location
;
9277 if (!targetm
.warn_func_return (fun
->decl
))
9280 /* If we have a path to EXIT, then we do return. */
9281 if (TREE_THIS_VOLATILE (fun
->decl
)
9282 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun
)->preds
) > 0)
9284 location
= UNKNOWN_LOCATION
;
9285 for (ei
= ei_start (EXIT_BLOCK_PTR_FOR_FN (fun
)->preds
);
9286 (e
= ei_safe_edge (ei
)); )
9288 last
= last_stmt (e
->src
);
9289 if ((gimple_code (last
) == GIMPLE_RETURN
9290 || gimple_call_builtin_p (last
, BUILT_IN_RETURN
))
9291 && location
== UNKNOWN_LOCATION
9292 && ((location
= LOCATION_LOCUS (gimple_location (last
)))
9293 != UNKNOWN_LOCATION
)
9296 /* When optimizing, replace return stmts in noreturn functions
9297 with __builtin_unreachable () call. */
9298 if (optimize
&& gimple_code (last
) == GIMPLE_RETURN
)
9300 tree fndecl
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
9301 gimple
*new_stmt
= gimple_build_call (fndecl
, 0);
9302 gimple_set_location (new_stmt
, gimple_location (last
));
9303 gimple_stmt_iterator gsi
= gsi_for_stmt (last
);
9304 gsi_replace (&gsi
, new_stmt
, true);
9310 if (location
== UNKNOWN_LOCATION
)
9311 location
= cfun
->function_end_locus
;
9312 warning_at (location
, 0, "%<noreturn%> function does return");
9315 /* If we see "return;" in some basic block, then we do reach the end
9316 without returning a value. */
9317 else if (warn_return_type
> 0
9318 && !TREE_NO_WARNING (fun
->decl
)
9319 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fun
->decl
))))
9321 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (fun
)->preds
)
9323 gimple
*last
= last_stmt (e
->src
);
9324 greturn
*return_stmt
= dyn_cast
<greturn
*> (last
);
9326 && gimple_return_retval (return_stmt
) == NULL
9327 && !gimple_no_warning_p (last
))
9329 location
= gimple_location (last
);
9330 if (LOCATION_LOCUS (location
) == UNKNOWN_LOCATION
)
9331 location
= fun
->function_end_locus
;
9332 if (warning_at (location
, OPT_Wreturn_type
,
9333 "control reaches end of non-void function"))
9334 TREE_NO_WARNING (fun
->decl
) = 1;
9338 /* The C++ FE turns fallthrough from the end of non-void function
9339 into __builtin_unreachable () call with BUILTINS_LOCATION.
9340 Recognize those too. */
9342 if (!TREE_NO_WARNING (fun
->decl
))
9343 FOR_EACH_BB_FN (bb
, fun
)
9344 if (EDGE_COUNT (bb
->succs
) == 0)
9346 gimple
*last
= last_stmt (bb
);
9347 const enum built_in_function ubsan_missing_ret
9348 = BUILT_IN_UBSAN_HANDLE_MISSING_RETURN
;
9350 && ((LOCATION_LOCUS (gimple_location (last
))
9351 == BUILTINS_LOCATION
9352 && gimple_call_builtin_p (last
, BUILT_IN_UNREACHABLE
))
9353 || gimple_call_builtin_p (last
, ubsan_missing_ret
)))
9355 gimple_stmt_iterator gsi
= gsi_for_stmt (last
);
9356 gsi_prev_nondebug (&gsi
);
9357 gimple
*prev
= gsi_stmt (gsi
);
9359 location
= UNKNOWN_LOCATION
;
9361 location
= gimple_location (prev
);
9362 if (LOCATION_LOCUS (location
) == UNKNOWN_LOCATION
)
9363 location
= fun
->function_end_locus
;
9364 if (warning_at (location
, OPT_Wreturn_type
,
9365 "control reaches end of non-void function"))
9366 TREE_NO_WARNING (fun
->decl
) = 1;
9377 make_pass_warn_function_return (gcc::context
*ctxt
)
9379 return new pass_warn_function_return (ctxt
);
9382 /* Walk a gimplified function and warn for functions whose return value is
9383 ignored and attribute((warn_unused_result)) is set. This is done before
9384 inlining, so we don't have to worry about that. */
9387 do_warn_unused_result (gimple_seq seq
)
9390 gimple_stmt_iterator i
;
9392 for (i
= gsi_start (seq
); !gsi_end_p (i
); gsi_next (&i
))
9394 gimple
*g
= gsi_stmt (i
);
9396 switch (gimple_code (g
))
9399 do_warn_unused_result (gimple_bind_body (as_a
<gbind
*>(g
)));
9402 do_warn_unused_result (gimple_try_eval (g
));
9403 do_warn_unused_result (gimple_try_cleanup (g
));
9406 do_warn_unused_result (gimple_catch_handler (
9407 as_a
<gcatch
*> (g
)));
9409 case GIMPLE_EH_FILTER
:
9410 do_warn_unused_result (gimple_eh_filter_failure (g
));
9414 if (gimple_call_lhs (g
))
9416 if (gimple_call_internal_p (g
))
9419 /* This is a naked call, as opposed to a GIMPLE_CALL with an
9420 LHS. All calls whose value is ignored should be
9421 represented like this. Look for the attribute. */
9422 fdecl
= gimple_call_fndecl (g
);
9423 ftype
= gimple_call_fntype (g
);
9425 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype
)))
9427 location_t loc
= gimple_location (g
);
9430 warning_at (loc
, OPT_Wunused_result
,
9431 "ignoring return value of %qD, "
9432 "declared with attribute warn_unused_result",
9435 warning_at (loc
, OPT_Wunused_result
,
9436 "ignoring return value of function "
9437 "declared with attribute warn_unused_result");
9442 /* Not a container, not a call, or a call whose value is used. */
9450 const pass_data pass_data_warn_unused_result
=
9452 GIMPLE_PASS
, /* type */
9453 "*warn_unused_result", /* name */
9454 OPTGROUP_NONE
, /* optinfo_flags */
9455 TV_NONE
, /* tv_id */
9456 PROP_gimple_any
, /* properties_required */
9457 0, /* properties_provided */
9458 0, /* properties_destroyed */
9459 0, /* todo_flags_start */
9460 0, /* todo_flags_finish */
9463 class pass_warn_unused_result
: public gimple_opt_pass
9466 pass_warn_unused_result (gcc::context
*ctxt
)
9467 : gimple_opt_pass (pass_data_warn_unused_result
, ctxt
)
9470 /* opt_pass methods: */
9471 virtual bool gate (function
*) { return flag_warn_unused_result
; }
9472 virtual unsigned int execute (function
*)
9474 do_warn_unused_result (gimple_body (current_function_decl
));
9478 }; // class pass_warn_unused_result
9483 make_pass_warn_unused_result (gcc::context
*ctxt
)
9485 return new pass_warn_unused_result (ctxt
);
9488 /* IPA passes, compilation of earlier functions or inlining
9489 might have changed some properties, such as marked functions nothrow,
9490 pure, const or noreturn.
9491 Remove redundant edges and basic blocks, and create new ones if necessary.
9493 This pass can't be executed as stand alone pass from pass manager, because
9494 in between inlining and this fixup the verify_flow_info would fail. */
9497 execute_fixup_cfg (void)
9500 gimple_stmt_iterator gsi
;
9502 cgraph_node
*node
= cgraph_node::get (current_function_decl
);
9503 profile_count num
= node
->count
;
9504 profile_count den
= ENTRY_BLOCK_PTR_FOR_FN (cfun
)->count
;
9505 bool scale
= num
.initialized_p () && !(num
== den
);
9509 profile_count::adjust_for_ipa_scaling (&num
, &den
);
9510 ENTRY_BLOCK_PTR_FOR_FN (cfun
)->count
= node
->count
;
9511 EXIT_BLOCK_PTR_FOR_FN (cfun
)->count
9512 = EXIT_BLOCK_PTR_FOR_FN (cfun
)->count
.apply_scale (num
, den
);
9515 FOR_EACH_BB_FN (bb
, cfun
)
9518 bb
->count
= bb
->count
.apply_scale (num
, den
);
9519 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
);)
9521 gimple
*stmt
= gsi_stmt (gsi
);
9522 tree decl
= is_gimple_call (stmt
)
9523 ? gimple_call_fndecl (stmt
)
9527 int flags
= gimple_call_flags (stmt
);
9528 if (flags
& (ECF_CONST
| ECF_PURE
| ECF_LOOPING_CONST_OR_PURE
))
9530 if (gimple_purge_dead_abnormal_call_edges (bb
))
9531 todo
|= TODO_cleanup_cfg
;
9533 if (gimple_in_ssa_p (cfun
))
9535 todo
|= TODO_update_ssa
| TODO_cleanup_cfg
;
9540 if (flags
& ECF_NORETURN
9541 && fixup_noreturn_call (stmt
))
9542 todo
|= TODO_cleanup_cfg
;
9545 /* Remove stores to variables we marked write-only.
9546 Keep access when store has side effect, i.e. in case when source
9548 if (gimple_store_p (stmt
)
9549 && !gimple_has_side_effects (stmt
))
9551 tree lhs
= get_base_address (gimple_get_lhs (stmt
));
9554 && (TREE_STATIC (lhs
) || DECL_EXTERNAL (lhs
))
9555 && varpool_node::get (lhs
)->writeonly
)
9557 unlink_stmt_vdef (stmt
);
9558 gsi_remove (&gsi
, true);
9559 release_defs (stmt
);
9560 todo
|= TODO_update_ssa
| TODO_cleanup_cfg
;
9564 /* For calls we can simply remove LHS when it is known
9565 to be write-only. */
9566 if (is_gimple_call (stmt
)
9567 && gimple_get_lhs (stmt
))
9569 tree lhs
= get_base_address (gimple_get_lhs (stmt
));
9572 && (TREE_STATIC (lhs
) || DECL_EXTERNAL (lhs
))
9573 && varpool_node::get (lhs
)->writeonly
)
9575 gimple_call_set_lhs (stmt
, NULL
);
9577 todo
|= TODO_update_ssa
| TODO_cleanup_cfg
;
9581 if (maybe_clean_eh_stmt (stmt
)
9582 && gimple_purge_dead_eh_edges (bb
))
9583 todo
|= TODO_cleanup_cfg
;
9587 /* If we have a basic block with no successors that does not
9588 end with a control statement or a noreturn call end it with
9589 a call to __builtin_unreachable. This situation can occur
9590 when inlining a noreturn call that does in fact return. */
9591 if (EDGE_COUNT (bb
->succs
) == 0)
9593 gimple
*stmt
= last_stmt (bb
);
9595 || (!is_ctrl_stmt (stmt
)
9596 && (!is_gimple_call (stmt
)
9597 || !gimple_call_noreturn_p (stmt
))))
9599 if (stmt
&& is_gimple_call (stmt
))
9600 gimple_call_set_ctrl_altering (stmt
, false);
9601 tree fndecl
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
9602 stmt
= gimple_build_call (fndecl
, 0);
9603 gimple_stmt_iterator gsi
= gsi_last_bb (bb
);
9604 gsi_insert_after (&gsi
, stmt
, GSI_NEW_STMT
);
9605 if (!cfun
->after_inlining
)
9607 gcall
*call_stmt
= dyn_cast
<gcall
*> (stmt
);
9608 node
->create_edge (cgraph_node::get_create (fndecl
),
9609 call_stmt
, bb
->count
);
9615 compute_function_frequency ();
9618 && (todo
& TODO_cleanup_cfg
))
9619 loops_state_set (LOOPS_NEED_FIXUP
);
9626 const pass_data pass_data_fixup_cfg
=
9628 GIMPLE_PASS
, /* type */
9629 "fixup_cfg", /* name */
9630 OPTGROUP_NONE
, /* optinfo_flags */
9631 TV_NONE
, /* tv_id */
9632 PROP_cfg
, /* properties_required */
9633 0, /* properties_provided */
9634 0, /* properties_destroyed */
9635 0, /* todo_flags_start */
9636 0, /* todo_flags_finish */
9639 class pass_fixup_cfg
: public gimple_opt_pass
9642 pass_fixup_cfg (gcc::context
*ctxt
)
9643 : gimple_opt_pass (pass_data_fixup_cfg
, ctxt
)
9646 /* opt_pass methods: */
9647 opt_pass
* clone () { return new pass_fixup_cfg (m_ctxt
); }
9648 virtual unsigned int execute (function
*) { return execute_fixup_cfg (); }
9650 }; // class pass_fixup_cfg
9655 make_pass_fixup_cfg (gcc::context
*ctxt
)
9657 return new pass_fixup_cfg (ctxt
);
9660 /* Garbage collection support for edge_def. */
9662 extern void gt_ggc_mx (tree
&);
9663 extern void gt_ggc_mx (gimple
*&);
9664 extern void gt_ggc_mx (rtx
&);
9665 extern void gt_ggc_mx (basic_block
&);
9668 gt_ggc_mx (rtx_insn
*& x
)
9671 gt_ggc_mx_rtx_def ((void *) x
);
9675 gt_ggc_mx (edge_def
*e
)
9677 tree block
= LOCATION_BLOCK (e
->goto_locus
);
9679 gt_ggc_mx (e
->dest
);
9680 if (current_ir_type () == IR_GIMPLE
)
9681 gt_ggc_mx (e
->insns
.g
);
9683 gt_ggc_mx (e
->insns
.r
);
9687 /* PCH support for edge_def. */
9689 extern void gt_pch_nx (tree
&);
9690 extern void gt_pch_nx (gimple
*&);
9691 extern void gt_pch_nx (rtx
&);
9692 extern void gt_pch_nx (basic_block
&);
9695 gt_pch_nx (rtx_insn
*& x
)
9698 gt_pch_nx_rtx_def ((void *) x
);
9702 gt_pch_nx (edge_def
*e
)
9704 tree block
= LOCATION_BLOCK (e
->goto_locus
);
9706 gt_pch_nx (e
->dest
);
9707 if (current_ir_type () == IR_GIMPLE
)
9708 gt_pch_nx (e
->insns
.g
);
9710 gt_pch_nx (e
->insns
.r
);
9715 gt_pch_nx (edge_def
*e
, gt_pointer_operator op
, void *cookie
)
9717 tree block
= LOCATION_BLOCK (e
->goto_locus
);
9718 op (&(e
->src
), cookie
);
9719 op (&(e
->dest
), cookie
);
9720 if (current_ir_type () == IR_GIMPLE
)
9721 op (&(e
->insns
.g
), cookie
);
9723 op (&(e
->insns
.r
), cookie
);
9724 op (&(block
), cookie
);
9729 namespace selftest
{
9731 /* Helper function for CFG selftests: create a dummy function decl
9732 and push it as cfun. */
9735 push_fndecl (const char *name
)
9737 tree fn_type
= build_function_type_array (integer_type_node
, 0, NULL
);
9738 /* FIXME: this uses input_location: */
9739 tree fndecl
= build_fn_decl (name
, fn_type
);
9740 tree retval
= build_decl (UNKNOWN_LOCATION
, RESULT_DECL
,
9741 NULL_TREE
, integer_type_node
);
9742 DECL_RESULT (fndecl
) = retval
;
9743 push_struct_function (fndecl
);
9744 function
*fun
= DECL_STRUCT_FUNCTION (fndecl
);
9745 ASSERT_TRUE (fun
!= NULL
);
9746 init_empty_tree_cfg_for_function (fun
);
9747 ASSERT_EQ (2, n_basic_blocks_for_fn (fun
));
9748 ASSERT_EQ (0, n_edges_for_fn (fun
));
9752 /* These tests directly create CFGs.
9753 Compare with the static fns within tree-cfg.c:
9755 - make_blocks: calls create_basic_block (seq, bb);
9758 /* Verify a simple cfg of the form:
9759 ENTRY -> A -> B -> C -> EXIT. */
9762 test_linear_chain ()
9764 gimple_register_cfg_hooks ();
9766 tree fndecl
= push_fndecl ("cfg_test_linear_chain");
9767 function
*fun
= DECL_STRUCT_FUNCTION (fndecl
);
9769 /* Create some empty blocks. */
9770 basic_block bb_a
= create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun
));
9771 basic_block bb_b
= create_empty_bb (bb_a
);
9772 basic_block bb_c
= create_empty_bb (bb_b
);
9774 ASSERT_EQ (5, n_basic_blocks_for_fn (fun
));
9775 ASSERT_EQ (0, n_edges_for_fn (fun
));
9777 /* Create some edges: a simple linear chain of BBs. */
9778 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun
), bb_a
, EDGE_FALLTHRU
);
9779 make_edge (bb_a
, bb_b
, 0);
9780 make_edge (bb_b
, bb_c
, 0);
9781 make_edge (bb_c
, EXIT_BLOCK_PTR_FOR_FN (fun
), 0);
9783 /* Verify the edges. */
9784 ASSERT_EQ (4, n_edges_for_fn (fun
));
9785 ASSERT_EQ (NULL
, ENTRY_BLOCK_PTR_FOR_FN (fun
)->preds
);
9786 ASSERT_EQ (1, ENTRY_BLOCK_PTR_FOR_FN (fun
)->succs
->length ());
9787 ASSERT_EQ (1, bb_a
->preds
->length ());
9788 ASSERT_EQ (1, bb_a
->succs
->length ());
9789 ASSERT_EQ (1, bb_b
->preds
->length ());
9790 ASSERT_EQ (1, bb_b
->succs
->length ());
9791 ASSERT_EQ (1, bb_c
->preds
->length ());
9792 ASSERT_EQ (1, bb_c
->succs
->length ());
9793 ASSERT_EQ (1, EXIT_BLOCK_PTR_FOR_FN (fun
)->preds
->length ());
9794 ASSERT_EQ (NULL
, EXIT_BLOCK_PTR_FOR_FN (fun
)->succs
);
9796 /* Verify the dominance information
9797 Each BB in our simple chain should be dominated by the one before
9799 calculate_dominance_info (CDI_DOMINATORS
);
9800 ASSERT_EQ (bb_a
, get_immediate_dominator (CDI_DOMINATORS
, bb_b
));
9801 ASSERT_EQ (bb_b
, get_immediate_dominator (CDI_DOMINATORS
, bb_c
));
9802 vec
<basic_block
> dom_by_b
= get_dominated_by (CDI_DOMINATORS
, bb_b
);
9803 ASSERT_EQ (1, dom_by_b
.length ());
9804 ASSERT_EQ (bb_c
, dom_by_b
[0]);
9805 free_dominance_info (CDI_DOMINATORS
);
9806 dom_by_b
.release ();
9808 /* Similarly for post-dominance: each BB in our chain is post-dominated
9809 by the one after it. */
9810 calculate_dominance_info (CDI_POST_DOMINATORS
);
9811 ASSERT_EQ (bb_b
, get_immediate_dominator (CDI_POST_DOMINATORS
, bb_a
));
9812 ASSERT_EQ (bb_c
, get_immediate_dominator (CDI_POST_DOMINATORS
, bb_b
));
9813 vec
<basic_block
> postdom_by_b
= get_dominated_by (CDI_POST_DOMINATORS
, bb_b
);
9814 ASSERT_EQ (1, postdom_by_b
.length ());
9815 ASSERT_EQ (bb_a
, postdom_by_b
[0]);
9816 free_dominance_info (CDI_POST_DOMINATORS
);
9817 postdom_by_b
.release ();
9822 /* Verify a simple CFG of the form:
9838 gimple_register_cfg_hooks ();
9840 tree fndecl
= push_fndecl ("cfg_test_diamond");
9841 function
*fun
= DECL_STRUCT_FUNCTION (fndecl
);
9843 /* Create some empty blocks. */
9844 basic_block bb_a
= create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun
));
9845 basic_block bb_b
= create_empty_bb (bb_a
);
9846 basic_block bb_c
= create_empty_bb (bb_a
);
9847 basic_block bb_d
= create_empty_bb (bb_b
);
9849 ASSERT_EQ (6, n_basic_blocks_for_fn (fun
));
9850 ASSERT_EQ (0, n_edges_for_fn (fun
));
9852 /* Create the edges. */
9853 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun
), bb_a
, EDGE_FALLTHRU
);
9854 make_edge (bb_a
, bb_b
, EDGE_TRUE_VALUE
);
9855 make_edge (bb_a
, bb_c
, EDGE_FALSE_VALUE
);
9856 make_edge (bb_b
, bb_d
, 0);
9857 make_edge (bb_c
, bb_d
, 0);
9858 make_edge (bb_d
, EXIT_BLOCK_PTR_FOR_FN (fun
), 0);
9860 /* Verify the edges. */
9861 ASSERT_EQ (6, n_edges_for_fn (fun
));
9862 ASSERT_EQ (1, bb_a
->preds
->length ());
9863 ASSERT_EQ (2, bb_a
->succs
->length ());
9864 ASSERT_EQ (1, bb_b
->preds
->length ());
9865 ASSERT_EQ (1, bb_b
->succs
->length ());
9866 ASSERT_EQ (1, bb_c
->preds
->length ());
9867 ASSERT_EQ (1, bb_c
->succs
->length ());
9868 ASSERT_EQ (2, bb_d
->preds
->length ());
9869 ASSERT_EQ (1, bb_d
->succs
->length ());
9871 /* Verify the dominance information. */
9872 calculate_dominance_info (CDI_DOMINATORS
);
9873 ASSERT_EQ (bb_a
, get_immediate_dominator (CDI_DOMINATORS
, bb_b
));
9874 ASSERT_EQ (bb_a
, get_immediate_dominator (CDI_DOMINATORS
, bb_c
));
9875 ASSERT_EQ (bb_a
, get_immediate_dominator (CDI_DOMINATORS
, bb_d
));
9876 vec
<basic_block
> dom_by_a
= get_dominated_by (CDI_DOMINATORS
, bb_a
);
9877 ASSERT_EQ (3, dom_by_a
.length ()); /* B, C, D, in some order. */
9878 dom_by_a
.release ();
9879 vec
<basic_block
> dom_by_b
= get_dominated_by (CDI_DOMINATORS
, bb_b
);
9880 ASSERT_EQ (0, dom_by_b
.length ());
9881 dom_by_b
.release ();
9882 free_dominance_info (CDI_DOMINATORS
);
9884 /* Similarly for post-dominance. */
9885 calculate_dominance_info (CDI_POST_DOMINATORS
);
9886 ASSERT_EQ (bb_d
, get_immediate_dominator (CDI_POST_DOMINATORS
, bb_a
));
9887 ASSERT_EQ (bb_d
, get_immediate_dominator (CDI_POST_DOMINATORS
, bb_b
));
9888 ASSERT_EQ (bb_d
, get_immediate_dominator (CDI_POST_DOMINATORS
, bb_c
));
9889 vec
<basic_block
> postdom_by_d
= get_dominated_by (CDI_POST_DOMINATORS
, bb_d
);
9890 ASSERT_EQ (3, postdom_by_d
.length ()); /* A, B, C in some order. */
9891 postdom_by_d
.release ();
9892 vec
<basic_block
> postdom_by_b
= get_dominated_by (CDI_POST_DOMINATORS
, bb_b
);
9893 ASSERT_EQ (0, postdom_by_b
.length ());
9894 postdom_by_b
.release ();
9895 free_dominance_info (CDI_POST_DOMINATORS
);
9900 /* Verify that we can handle a CFG containing a "complete" aka
9901 fully-connected subgraph (where A B C D below all have edges
9902 pointing to each other node, also to themselves).
9920 test_fully_connected ()
9922 gimple_register_cfg_hooks ();
9924 tree fndecl
= push_fndecl ("cfg_fully_connected");
9925 function
*fun
= DECL_STRUCT_FUNCTION (fndecl
);
9929 /* Create some empty blocks. */
9930 auto_vec
<basic_block
> subgraph_nodes
;
9931 for (int i
= 0; i
< n
; i
++)
9932 subgraph_nodes
.safe_push (create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun
)));
9934 ASSERT_EQ (n
+ 2, n_basic_blocks_for_fn (fun
));
9935 ASSERT_EQ (0, n_edges_for_fn (fun
));
9937 /* Create the edges. */
9938 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun
), subgraph_nodes
[0], EDGE_FALLTHRU
);
9939 make_edge (subgraph_nodes
[0], EXIT_BLOCK_PTR_FOR_FN (fun
), 0);
9940 for (int i
= 0; i
< n
; i
++)
9941 for (int j
= 0; j
< n
; j
++)
9942 make_edge (subgraph_nodes
[i
], subgraph_nodes
[j
], 0);
9944 /* Verify the edges. */
9945 ASSERT_EQ (2 + (n
* n
), n_edges_for_fn (fun
));
9946 /* The first one is linked to ENTRY/EXIT as well as itself and
9948 ASSERT_EQ (n
+ 1, subgraph_nodes
[0]->preds
->length ());
9949 ASSERT_EQ (n
+ 1, subgraph_nodes
[0]->succs
->length ());
9950 /* The other ones in the subgraph are linked to everything in
9951 the subgraph (including themselves). */
9952 for (int i
= 1; i
< n
; i
++)
9954 ASSERT_EQ (n
, subgraph_nodes
[i
]->preds
->length ());
9955 ASSERT_EQ (n
, subgraph_nodes
[i
]->succs
->length ());
9958 /* Verify the dominance information. */
9959 calculate_dominance_info (CDI_DOMINATORS
);
9960 /* The initial block in the subgraph should be dominated by ENTRY. */
9961 ASSERT_EQ (ENTRY_BLOCK_PTR_FOR_FN (fun
),
9962 get_immediate_dominator (CDI_DOMINATORS
,
9963 subgraph_nodes
[0]));
9964 /* Every other block in the subgraph should be dominated by the
9966 for (int i
= 1; i
< n
; i
++)
9967 ASSERT_EQ (subgraph_nodes
[0],
9968 get_immediate_dominator (CDI_DOMINATORS
,
9969 subgraph_nodes
[i
]));
9970 free_dominance_info (CDI_DOMINATORS
);
9972 /* Similarly for post-dominance. */
9973 calculate_dominance_info (CDI_POST_DOMINATORS
);
9974 /* The initial block in the subgraph should be postdominated by EXIT. */
9975 ASSERT_EQ (EXIT_BLOCK_PTR_FOR_FN (fun
),
9976 get_immediate_dominator (CDI_POST_DOMINATORS
,
9977 subgraph_nodes
[0]));
9978 /* Every other block in the subgraph should be postdominated by the
9979 initial block, since that leads to EXIT. */
9980 for (int i
= 1; i
< n
; i
++)
9981 ASSERT_EQ (subgraph_nodes
[0],
9982 get_immediate_dominator (CDI_POST_DOMINATORS
,
9983 subgraph_nodes
[i
]));
9984 free_dominance_info (CDI_POST_DOMINATORS
);
9989 /* Run all of the selftests within this file. */
9994 test_linear_chain ();
9996 test_fully_connected ();
9999 } // namespace selftest
10001 /* TODO: test the dominator/postdominator logic with various graphs/nodes:
10004 - switch statement (a block with many out-edges)
10005 - something that jumps to itself
10008 #endif /* CHECKING_P */