1 /* Control flow functions for trees.
2 Copyright (C) 2001-2017 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
30 #include "tree-pass.h"
33 #include "gimple-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "fold-const.h"
36 #include "trans-mem.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
40 #include "gimple-fold.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
46 #include "tree-ssa-loop-manip.h"
47 #include "tree-ssa-loop-niter.h"
48 #include "tree-into-ssa.h"
53 #include "tree-ssa-propagate.h"
54 #include "value-prof.h"
55 #include "tree-inline.h"
56 #include "tree-ssa-live.h"
57 #include "omp-general.h"
58 #include "omp-expand.h"
59 #include "tree-cfgcleanup.h"
64 /* This file contains functions for building the Control Flow Graph (CFG)
65 for a function tree. */
67 /* Local declarations. */
69 /* Initial capacity for the basic block array. */
70 static const int initial_cfg_capacity
= 20;
72 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
73 which use a particular edge. The CASE_LABEL_EXPRs are chained together
74 via their CASE_CHAIN field, which we clear after we're done with the
75 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
77 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
78 update the case vector in response to edge redirections.
80 Right now this table is set up and torn down at key points in the
81 compilation process. It would be nice if we could make the table
82 more persistent. The key is getting notification of changes to
83 the CFG (particularly edge removal, creation and redirection). */
85 static hash_map
<edge
, tree
> *edge_to_cases
;
87 /* If we record edge_to_cases, this bitmap will hold indexes
88 of basic blocks that end in a GIMPLE_SWITCH which we touched
89 due to edge manipulations. */
91 static bitmap touched_switch_bbs
;
96 long num_merged_labels
;
99 static struct cfg_stats_d cfg_stats
;
101 /* Data to pass to replace_block_vars_by_duplicates_1. */
102 struct replace_decls_d
104 hash_map
<tree
, tree
> *vars_map
;
108 /* Hash table to store last discriminator assigned for each locus. */
109 struct locus_discrim_map
115 /* Hashtable helpers. */
117 struct locus_discrim_hasher
: free_ptr_hash
<locus_discrim_map
>
119 static inline hashval_t
hash (const locus_discrim_map
*);
120 static inline bool equal (const locus_discrim_map
*,
121 const locus_discrim_map
*);
124 /* Trivial hash function for a location_t. ITEM is a pointer to
125 a hash table entry that maps a location_t to a discriminator. */
128 locus_discrim_hasher::hash (const locus_discrim_map
*item
)
130 return LOCATION_LINE (item
->locus
);
133 /* Equality function for the locus-to-discriminator map. A and B
134 point to the two hash table entries to compare. */
137 locus_discrim_hasher::equal (const locus_discrim_map
*a
,
138 const locus_discrim_map
*b
)
140 return LOCATION_LINE (a
->locus
) == LOCATION_LINE (b
->locus
);
143 static hash_table
<locus_discrim_hasher
> *discriminator_per_locus
;
145 /* Basic blocks and flowgraphs. */
146 static void make_blocks (gimple_seq
);
149 static void make_edges (void);
150 static void assign_discriminators (void);
151 static void make_cond_expr_edges (basic_block
);
152 static void make_gimple_switch_edges (gswitch
*, basic_block
);
153 static bool make_goto_expr_edges (basic_block
);
154 static void make_gimple_asm_edges (basic_block
);
155 static edge
gimple_redirect_edge_and_branch (edge
, basic_block
);
156 static edge
gimple_try_redirect_by_replacing_jump (edge
, basic_block
);
158 /* Various helpers. */
159 static inline bool stmt_starts_bb_p (gimple
*, gimple
*);
160 static int gimple_verify_flow_info (void);
161 static void gimple_make_forwarder_block (edge
);
162 static gimple
*first_non_label_stmt (basic_block
);
163 static bool verify_gimple_transaction (gtransaction
*);
164 static bool call_can_make_abnormal_goto (gimple
*);
166 /* Flowgraph optimization and cleanup. */
167 static void gimple_merge_blocks (basic_block
, basic_block
);
168 static bool gimple_can_merge_blocks_p (basic_block
, basic_block
);
169 static void remove_bb (basic_block
);
170 static edge
find_taken_edge_computed_goto (basic_block
, tree
);
171 static edge
find_taken_edge_cond_expr (basic_block
, tree
);
172 static edge
find_taken_edge_switch_expr (gswitch
*, basic_block
, tree
);
173 static tree
find_case_label_for_value (gswitch
*, tree
);
174 static void lower_phi_internal_fn ();
177 init_empty_tree_cfg_for_function (struct function
*fn
)
179 /* Initialize the basic block array. */
181 profile_status_for_fn (fn
) = PROFILE_ABSENT
;
182 n_basic_blocks_for_fn (fn
) = NUM_FIXED_BLOCKS
;
183 last_basic_block_for_fn (fn
) = NUM_FIXED_BLOCKS
;
184 vec_alloc (basic_block_info_for_fn (fn
), initial_cfg_capacity
);
185 vec_safe_grow_cleared (basic_block_info_for_fn (fn
),
186 initial_cfg_capacity
);
188 /* Build a mapping of labels to their associated blocks. */
189 vec_alloc (label_to_block_map_for_fn (fn
), initial_cfg_capacity
);
190 vec_safe_grow_cleared (label_to_block_map_for_fn (fn
),
191 initial_cfg_capacity
);
193 SET_BASIC_BLOCK_FOR_FN (fn
, ENTRY_BLOCK
, ENTRY_BLOCK_PTR_FOR_FN (fn
));
194 SET_BASIC_BLOCK_FOR_FN (fn
, EXIT_BLOCK
, EXIT_BLOCK_PTR_FOR_FN (fn
));
196 ENTRY_BLOCK_PTR_FOR_FN (fn
)->next_bb
197 = EXIT_BLOCK_PTR_FOR_FN (fn
);
198 EXIT_BLOCK_PTR_FOR_FN (fn
)->prev_bb
199 = ENTRY_BLOCK_PTR_FOR_FN (fn
);
203 init_empty_tree_cfg (void)
205 init_empty_tree_cfg_for_function (cfun
);
208 /*---------------------------------------------------------------------------
210 ---------------------------------------------------------------------------*/
212 /* Entry point to the CFG builder for trees. SEQ is the sequence of
213 statements to be added to the flowgraph. */
216 build_gimple_cfg (gimple_seq seq
)
218 /* Register specific gimple functions. */
219 gimple_register_cfg_hooks ();
221 memset ((void *) &cfg_stats
, 0, sizeof (cfg_stats
));
223 init_empty_tree_cfg ();
227 /* Make sure there is always at least one block, even if it's empty. */
228 if (n_basic_blocks_for_fn (cfun
) == NUM_FIXED_BLOCKS
)
229 create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
231 /* Adjust the size of the array. */
232 if (basic_block_info_for_fn (cfun
)->length ()
233 < (size_t) n_basic_blocks_for_fn (cfun
))
234 vec_safe_grow_cleared (basic_block_info_for_fn (cfun
),
235 n_basic_blocks_for_fn (cfun
));
237 /* To speed up statement iterator walks, we first purge dead labels. */
238 cleanup_dead_labels ();
240 /* Group case nodes to reduce the number of edges.
241 We do this after cleaning up dead labels because otherwise we miss
242 a lot of obvious case merging opportunities. */
243 group_case_labels ();
245 /* Create the edges of the flowgraph. */
246 discriminator_per_locus
= new hash_table
<locus_discrim_hasher
> (13);
248 assign_discriminators ();
249 lower_phi_internal_fn ();
250 cleanup_dead_labels ();
251 delete discriminator_per_locus
;
252 discriminator_per_locus
= NULL
;
255 /* Look for ANNOTATE calls with loop annotation kind in BB; if found, remove
256 them and propagate the information to LOOP. We assume that the annotations
257 come immediately before the condition in BB, if any. */
260 replace_loop_annotate_in_block (basic_block bb
, struct loop
*loop
)
262 gimple_stmt_iterator gsi
= gsi_last_bb (bb
);
263 gimple
*stmt
= gsi_stmt (gsi
);
265 if (!(stmt
&& gimple_code (stmt
) == GIMPLE_COND
))
268 for (gsi_prev_nondebug (&gsi
); !gsi_end_p (gsi
); gsi_prev (&gsi
))
270 stmt
= gsi_stmt (gsi
);
271 if (gimple_code (stmt
) != GIMPLE_CALL
)
273 if (!gimple_call_internal_p (stmt
)
274 || gimple_call_internal_fn (stmt
) != IFN_ANNOTATE
)
277 switch ((annot_expr_kind
) tree_to_shwi (gimple_call_arg (stmt
, 1)))
279 case annot_expr_ivdep_kind
:
280 loop
->safelen
= INT_MAX
;
282 case annot_expr_no_vector_kind
:
283 loop
->dont_vectorize
= true;
285 case annot_expr_vector_kind
:
286 loop
->force_vectorize
= true;
287 cfun
->has_force_vectorize_loops
= true;
293 stmt
= gimple_build_assign (gimple_call_lhs (stmt
),
294 gimple_call_arg (stmt
, 0));
295 gsi_replace (&gsi
, stmt
, true);
299 /* Look for ANNOTATE calls with loop annotation kind; if found, remove
300 them and propagate the information to the loop. We assume that the
301 annotations come immediately before the condition of the loop. */
304 replace_loop_annotate (void)
308 gimple_stmt_iterator gsi
;
311 FOR_EACH_LOOP (loop
, 0)
313 /* First look into the header. */
314 replace_loop_annotate_in_block (loop
->header
, loop
);
316 /* Then look into the latch, if any. */
318 replace_loop_annotate_in_block (loop
->latch
, loop
);
321 /* Remove IFN_ANNOTATE. Safeguard for the case loop->latch == NULL. */
322 FOR_EACH_BB_FN (bb
, cfun
)
324 for (gsi
= gsi_last_bb (bb
); !gsi_end_p (gsi
); gsi_prev (&gsi
))
326 stmt
= gsi_stmt (gsi
);
327 if (gimple_code (stmt
) != GIMPLE_CALL
)
329 if (!gimple_call_internal_p (stmt
)
330 || gimple_call_internal_fn (stmt
) != IFN_ANNOTATE
)
333 switch ((annot_expr_kind
) tree_to_shwi (gimple_call_arg (stmt
, 1)))
335 case annot_expr_ivdep_kind
:
336 case annot_expr_no_vector_kind
:
337 case annot_expr_vector_kind
:
343 warning_at (gimple_location (stmt
), 0, "ignoring loop annotation");
344 stmt
= gimple_build_assign (gimple_call_lhs (stmt
),
345 gimple_call_arg (stmt
, 0));
346 gsi_replace (&gsi
, stmt
, true);
351 /* Lower internal PHI function from GIMPLE FE. */
354 lower_phi_internal_fn ()
356 basic_block bb
, pred
= NULL
;
357 gimple_stmt_iterator gsi
;
362 /* After edge creation, handle __PHI function from GIMPLE FE. */
363 FOR_EACH_BB_FN (bb
, cfun
)
365 for (gsi
= gsi_after_labels (bb
); !gsi_end_p (gsi
);)
367 stmt
= gsi_stmt (gsi
);
368 if (! gimple_call_internal_p (stmt
, IFN_PHI
))
371 lhs
= gimple_call_lhs (stmt
);
372 phi_node
= create_phi_node (lhs
, bb
);
374 /* Add arguments to the PHI node. */
375 for (unsigned i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
377 tree arg
= gimple_call_arg (stmt
, i
);
378 if (TREE_CODE (arg
) == LABEL_DECL
)
379 pred
= label_to_block (arg
);
382 edge e
= find_edge (pred
, bb
);
383 add_phi_arg (phi_node
, arg
, e
, UNKNOWN_LOCATION
);
387 gsi_remove (&gsi
, true);
393 execute_build_cfg (void)
395 gimple_seq body
= gimple_body (current_function_decl
);
397 build_gimple_cfg (body
);
398 gimple_set_body (current_function_decl
, NULL
);
399 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
401 fprintf (dump_file
, "Scope blocks:\n");
402 dump_scope_blocks (dump_file
, dump_flags
);
405 loop_optimizer_init (AVOID_CFG_MODIFICATIONS
);
406 replace_loop_annotate ();
412 const pass_data pass_data_build_cfg
=
414 GIMPLE_PASS
, /* type */
416 OPTGROUP_NONE
, /* optinfo_flags */
417 TV_TREE_CFG
, /* tv_id */
418 PROP_gimple_leh
, /* properties_required */
419 ( PROP_cfg
| PROP_loops
), /* properties_provided */
420 0, /* properties_destroyed */
421 0, /* todo_flags_start */
422 0, /* todo_flags_finish */
425 class pass_build_cfg
: public gimple_opt_pass
428 pass_build_cfg (gcc::context
*ctxt
)
429 : gimple_opt_pass (pass_data_build_cfg
, ctxt
)
432 /* opt_pass methods: */
433 virtual unsigned int execute (function
*) { return execute_build_cfg (); }
435 }; // class pass_build_cfg
440 make_pass_build_cfg (gcc::context
*ctxt
)
442 return new pass_build_cfg (ctxt
);
446 /* Return true if T is a computed goto. */
449 computed_goto_p (gimple
*t
)
451 return (gimple_code (t
) == GIMPLE_GOTO
452 && TREE_CODE (gimple_goto_dest (t
)) != LABEL_DECL
);
455 /* Returns true if the sequence of statements STMTS only contains
456 a call to __builtin_unreachable (). */
459 gimple_seq_unreachable_p (gimple_seq stmts
)
464 gimple_stmt_iterator gsi
= gsi_last (stmts
);
466 if (!gimple_call_builtin_p (gsi_stmt (gsi
), BUILT_IN_UNREACHABLE
))
469 for (gsi_prev (&gsi
); !gsi_end_p (gsi
); gsi_prev (&gsi
))
471 gimple
*stmt
= gsi_stmt (gsi
);
472 if (gimple_code (stmt
) != GIMPLE_LABEL
473 && !is_gimple_debug (stmt
)
474 && !gimple_clobber_p (stmt
))
480 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
481 the other edge points to a bb with just __builtin_unreachable ().
482 I.e. return true for C->M edge in:
490 __builtin_unreachable ();
494 assert_unreachable_fallthru_edge_p (edge e
)
496 basic_block pred_bb
= e
->src
;
497 gimple
*last
= last_stmt (pred_bb
);
498 if (last
&& gimple_code (last
) == GIMPLE_COND
)
500 basic_block other_bb
= EDGE_SUCC (pred_bb
, 0)->dest
;
501 if (other_bb
== e
->dest
)
502 other_bb
= EDGE_SUCC (pred_bb
, 1)->dest
;
503 if (EDGE_COUNT (other_bb
->succs
) == 0)
504 return gimple_seq_unreachable_p (bb_seq (other_bb
));
510 /* Initialize GF_CALL_CTRL_ALTERING flag, which indicates the call
511 could alter control flow except via eh. We initialize the flag at
512 CFG build time and only ever clear it later. */
515 gimple_call_initialize_ctrl_altering (gimple
*stmt
)
517 int flags
= gimple_call_flags (stmt
);
519 /* A call alters control flow if it can make an abnormal goto. */
520 if (call_can_make_abnormal_goto (stmt
)
521 /* A call also alters control flow if it does not return. */
522 || flags
& ECF_NORETURN
523 /* TM ending statements have backedges out of the transaction.
524 Return true so we split the basic block containing them.
525 Note that the TM_BUILTIN test is merely an optimization. */
526 || ((flags
& ECF_TM_BUILTIN
)
527 && is_tm_ending_fndecl (gimple_call_fndecl (stmt
)))
528 /* BUILT_IN_RETURN call is same as return statement. */
529 || gimple_call_builtin_p (stmt
, BUILT_IN_RETURN
)
530 /* IFN_UNIQUE should be the last insn, to make checking for it
531 as cheap as possible. */
532 || (gimple_call_internal_p (stmt
)
533 && gimple_call_internal_unique_p (stmt
)))
534 gimple_call_set_ctrl_altering (stmt
, true);
536 gimple_call_set_ctrl_altering (stmt
, false);
540 /* Insert SEQ after BB and build a flowgraph. */
543 make_blocks_1 (gimple_seq seq
, basic_block bb
)
545 gimple_stmt_iterator i
= gsi_start (seq
);
547 bool start_new_block
= true;
548 bool first_stmt_of_seq
= true;
550 while (!gsi_end_p (i
))
557 if (stmt
&& is_gimple_call (stmt
))
558 gimple_call_initialize_ctrl_altering (stmt
);
560 /* If the statement starts a new basic block or if we have determined
561 in a previous pass that we need to create a new block for STMT, do
563 if (start_new_block
|| stmt_starts_bb_p (stmt
, prev_stmt
))
565 if (!first_stmt_of_seq
)
566 gsi_split_seq_before (&i
, &seq
);
567 bb
= create_basic_block (seq
, bb
);
568 start_new_block
= false;
571 /* Now add STMT to BB and create the subgraphs for special statement
573 gimple_set_bb (stmt
, bb
);
575 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
577 if (stmt_ends_bb_p (stmt
))
579 /* If the stmt can make abnormal goto use a new temporary
580 for the assignment to the LHS. This makes sure the old value
581 of the LHS is available on the abnormal edge. Otherwise
582 we will end up with overlapping life-ranges for abnormal
584 if (gimple_has_lhs (stmt
)
585 && stmt_can_make_abnormal_goto (stmt
)
586 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt
))))
588 tree lhs
= gimple_get_lhs (stmt
);
589 tree tmp
= create_tmp_var (TREE_TYPE (lhs
));
590 gimple
*s
= gimple_build_assign (lhs
, tmp
);
591 gimple_set_location (s
, gimple_location (stmt
));
592 gimple_set_block (s
, gimple_block (stmt
));
593 gimple_set_lhs (stmt
, tmp
);
594 if (TREE_CODE (TREE_TYPE (tmp
)) == COMPLEX_TYPE
595 || TREE_CODE (TREE_TYPE (tmp
)) == VECTOR_TYPE
)
596 DECL_GIMPLE_REG_P (tmp
) = 1;
597 gsi_insert_after (&i
, s
, GSI_SAME_STMT
);
599 start_new_block
= true;
603 first_stmt_of_seq
= false;
608 /* Build a flowgraph for the sequence of stmts SEQ. */
611 make_blocks (gimple_seq seq
)
613 make_blocks_1 (seq
, ENTRY_BLOCK_PTR_FOR_FN (cfun
));
616 /* Create and return a new empty basic block after bb AFTER. */
619 create_bb (void *h
, void *e
, basic_block after
)
625 /* Create and initialize a new basic block. Since alloc_block uses
626 GC allocation that clears memory to allocate a basic block, we do
627 not have to clear the newly allocated basic block here. */
630 bb
->index
= last_basic_block_for_fn (cfun
);
632 set_bb_seq (bb
, h
? (gimple_seq
) h
: NULL
);
634 /* Add the new block to the linked list of blocks. */
635 link_block (bb
, after
);
637 /* Grow the basic block array if needed. */
638 if ((size_t) last_basic_block_for_fn (cfun
)
639 == basic_block_info_for_fn (cfun
)->length ())
642 (last_basic_block_for_fn (cfun
)
643 + (last_basic_block_for_fn (cfun
) + 3) / 4);
644 vec_safe_grow_cleared (basic_block_info_for_fn (cfun
), new_size
);
647 /* Add the newly created block to the array. */
648 SET_BASIC_BLOCK_FOR_FN (cfun
, last_basic_block_for_fn (cfun
), bb
);
650 n_basic_blocks_for_fn (cfun
)++;
651 last_basic_block_for_fn (cfun
)++;
657 /*---------------------------------------------------------------------------
659 ---------------------------------------------------------------------------*/
661 /* If basic block BB has an abnormal edge to a basic block
662 containing IFN_ABNORMAL_DISPATCHER internal call, return
663 that the dispatcher's basic block, otherwise return NULL. */
666 get_abnormal_succ_dispatcher (basic_block bb
)
671 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
672 if ((e
->flags
& (EDGE_ABNORMAL
| EDGE_EH
)) == EDGE_ABNORMAL
)
674 gimple_stmt_iterator gsi
675 = gsi_start_nondebug_after_labels_bb (e
->dest
);
676 gimple
*g
= gsi_stmt (gsi
);
677 if (g
&& gimple_call_internal_p (g
, IFN_ABNORMAL_DISPATCHER
))
683 /* Helper function for make_edges. Create a basic block with
684 with ABNORMAL_DISPATCHER internal call in it if needed, and
685 create abnormal edges from BBS to it and from it to FOR_BB
686 if COMPUTED_GOTO is false, otherwise factor the computed gotos. */
689 handle_abnormal_edges (basic_block
*dispatcher_bbs
,
690 basic_block for_bb
, int *bb_to_omp_idx
,
691 auto_vec
<basic_block
> *bbs
, bool computed_goto
)
693 basic_block
*dispatcher
= dispatcher_bbs
+ (computed_goto
? 1 : 0);
694 unsigned int idx
= 0;
700 dispatcher
= dispatcher_bbs
+ 2 * bb_to_omp_idx
[for_bb
->index
];
701 if (bb_to_omp_idx
[for_bb
->index
] != 0)
705 /* If the dispatcher has been created already, then there are basic
706 blocks with abnormal edges to it, so just make a new edge to
708 if (*dispatcher
== NULL
)
710 /* Check if there are any basic blocks that need to have
711 abnormal edges to this dispatcher. If there are none, return
713 if (bb_to_omp_idx
== NULL
)
715 if (bbs
->is_empty ())
720 FOR_EACH_VEC_ELT (*bbs
, idx
, bb
)
721 if (bb_to_omp_idx
[bb
->index
] == bb_to_omp_idx
[for_bb
->index
])
727 /* Create the dispatcher bb. */
728 *dispatcher
= create_basic_block (NULL
, for_bb
);
731 /* Factor computed gotos into a common computed goto site. Also
732 record the location of that site so that we can un-factor the
733 gotos after we have converted back to normal form. */
734 gimple_stmt_iterator gsi
= gsi_start_bb (*dispatcher
);
736 /* Create the destination of the factored goto. Each original
737 computed goto will put its desired destination into this
738 variable and jump to the label we create immediately below. */
739 tree var
= create_tmp_var (ptr_type_node
, "gotovar");
741 /* Build a label for the new block which will contain the
742 factored computed goto. */
743 tree factored_label_decl
744 = create_artificial_label (UNKNOWN_LOCATION
);
745 gimple
*factored_computed_goto_label
746 = gimple_build_label (factored_label_decl
);
747 gsi_insert_after (&gsi
, factored_computed_goto_label
, GSI_NEW_STMT
);
749 /* Build our new computed goto. */
750 gimple
*factored_computed_goto
= gimple_build_goto (var
);
751 gsi_insert_after (&gsi
, factored_computed_goto
, GSI_NEW_STMT
);
753 FOR_EACH_VEC_ELT (*bbs
, idx
, bb
)
756 && bb_to_omp_idx
[bb
->index
] != bb_to_omp_idx
[for_bb
->index
])
759 gsi
= gsi_last_bb (bb
);
760 gimple
*last
= gsi_stmt (gsi
);
762 gcc_assert (computed_goto_p (last
));
764 /* Copy the original computed goto's destination into VAR. */
766 = gimple_build_assign (var
, gimple_goto_dest (last
));
767 gsi_insert_before (&gsi
, assignment
, GSI_SAME_STMT
);
769 edge e
= make_edge (bb
, *dispatcher
, EDGE_FALLTHRU
);
770 e
->goto_locus
= gimple_location (last
);
771 gsi_remove (&gsi
, true);
776 tree arg
= inner
? boolean_true_node
: boolean_false_node
;
777 gimple
*g
= gimple_build_call_internal (IFN_ABNORMAL_DISPATCHER
,
779 gimple_stmt_iterator gsi
= gsi_after_labels (*dispatcher
);
780 gsi_insert_after (&gsi
, g
, GSI_NEW_STMT
);
782 /* Create predecessor edges of the dispatcher. */
783 FOR_EACH_VEC_ELT (*bbs
, idx
, bb
)
786 && bb_to_omp_idx
[bb
->index
] != bb_to_omp_idx
[for_bb
->index
])
788 make_edge (bb
, *dispatcher
, EDGE_ABNORMAL
);
793 make_edge (*dispatcher
, for_bb
, EDGE_ABNORMAL
);
796 /* Creates outgoing edges for BB. Returns 1 when it ends with an
797 computed goto, returns 2 when it ends with a statement that
798 might return to this function via an nonlocal goto, otherwise
799 return 0. Updates *PCUR_REGION with the OMP region this BB is in. */
802 make_edges_bb (basic_block bb
, struct omp_region
**pcur_region
, int *pomp_index
)
804 gimple
*last
= last_stmt (bb
);
805 bool fallthru
= false;
811 switch (gimple_code (last
))
814 if (make_goto_expr_edges (bb
))
820 edge e
= make_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
), 0);
821 e
->goto_locus
= gimple_location (last
);
826 make_cond_expr_edges (bb
);
830 make_gimple_switch_edges (as_a
<gswitch
*> (last
), bb
);
834 make_eh_edges (last
);
837 case GIMPLE_EH_DISPATCH
:
838 fallthru
= make_eh_dispatch_edges (as_a
<geh_dispatch
*> (last
));
842 /* If this function receives a nonlocal goto, then we need to
843 make edges from this call site to all the nonlocal goto
845 if (stmt_can_make_abnormal_goto (last
))
848 /* If this statement has reachable exception handlers, then
849 create abnormal edges to them. */
850 make_eh_edges (last
);
852 /* BUILTIN_RETURN is really a return statement. */
853 if (gimple_call_builtin_p (last
, BUILT_IN_RETURN
))
855 make_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
), 0);
858 /* Some calls are known not to return. */
860 fallthru
= !gimple_call_noreturn_p (last
);
864 /* A GIMPLE_ASSIGN may throw internally and thus be considered
866 if (is_ctrl_altering_stmt (last
))
867 make_eh_edges (last
);
872 make_gimple_asm_edges (bb
);
877 fallthru
= omp_make_gimple_edges (bb
, pcur_region
, pomp_index
);
880 case GIMPLE_TRANSACTION
:
882 gtransaction
*txn
= as_a
<gtransaction
*> (last
);
883 tree label1
= gimple_transaction_label_norm (txn
);
884 tree label2
= gimple_transaction_label_uninst (txn
);
887 make_edge (bb
, label_to_block (label1
), EDGE_FALLTHRU
);
889 make_edge (bb
, label_to_block (label2
),
890 EDGE_TM_UNINSTRUMENTED
| (label1
? 0 : EDGE_FALLTHRU
));
892 tree label3
= gimple_transaction_label_over (txn
);
893 if (gimple_transaction_subcode (txn
)
894 & (GTMA_HAVE_ABORT
| GTMA_IS_OUTER
))
895 make_edge (bb
, label_to_block (label3
), EDGE_TM_ABORT
);
902 gcc_assert (!stmt_ends_bb_p (last
));
908 make_edge (bb
, bb
->next_bb
, EDGE_FALLTHRU
);
913 /* Join all the blocks in the flowgraph. */
919 struct omp_region
*cur_region
= NULL
;
920 auto_vec
<basic_block
> ab_edge_goto
;
921 auto_vec
<basic_block
> ab_edge_call
;
922 int *bb_to_omp_idx
= NULL
;
923 int cur_omp_region_idx
= 0;
925 /* Create an edge from entry to the first block with executable
927 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun
),
928 BASIC_BLOCK_FOR_FN (cfun
, NUM_FIXED_BLOCKS
),
931 /* Traverse the basic block array placing edges. */
932 FOR_EACH_BB_FN (bb
, cfun
)
937 bb_to_omp_idx
[bb
->index
] = cur_omp_region_idx
;
939 mer
= make_edges_bb (bb
, &cur_region
, &cur_omp_region_idx
);
941 ab_edge_goto
.safe_push (bb
);
943 ab_edge_call
.safe_push (bb
);
945 if (cur_region
&& bb_to_omp_idx
== NULL
)
946 bb_to_omp_idx
= XCNEWVEC (int, n_basic_blocks_for_fn (cfun
));
949 /* Computed gotos are hell to deal with, especially if there are
950 lots of them with a large number of destinations. So we factor
951 them to a common computed goto location before we build the
952 edge list. After we convert back to normal form, we will un-factor
953 the computed gotos since factoring introduces an unwanted jump.
954 For non-local gotos and abnormal edges from calls to calls that return
955 twice or forced labels, factor the abnormal edges too, by having all
956 abnormal edges from the calls go to a common artificial basic block
957 with ABNORMAL_DISPATCHER internal call and abnormal edges from that
958 basic block to all forced labels and calls returning twice.
959 We do this per-OpenMP structured block, because those regions
960 are guaranteed to be single entry single exit by the standard,
961 so it is not allowed to enter or exit such regions abnormally this way,
962 thus all computed gotos, non-local gotos and setjmp/longjmp calls
963 must not transfer control across SESE region boundaries. */
964 if (!ab_edge_goto
.is_empty () || !ab_edge_call
.is_empty ())
966 gimple_stmt_iterator gsi
;
967 basic_block dispatcher_bb_array
[2] = { NULL
, NULL
};
968 basic_block
*dispatcher_bbs
= dispatcher_bb_array
;
969 int count
= n_basic_blocks_for_fn (cfun
);
972 dispatcher_bbs
= XCNEWVEC (basic_block
, 2 * count
);
974 FOR_EACH_BB_FN (bb
, cfun
)
976 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
978 glabel
*label_stmt
= dyn_cast
<glabel
*> (gsi_stmt (gsi
));
984 target
= gimple_label_label (label_stmt
);
986 /* Make an edge to every label block that has been marked as a
987 potential target for a computed goto or a non-local goto. */
988 if (FORCED_LABEL (target
))
989 handle_abnormal_edges (dispatcher_bbs
, bb
, bb_to_omp_idx
,
990 &ab_edge_goto
, true);
991 if (DECL_NONLOCAL (target
))
993 handle_abnormal_edges (dispatcher_bbs
, bb
, bb_to_omp_idx
,
994 &ab_edge_call
, false);
999 if (!gsi_end_p (gsi
) && is_gimple_debug (gsi_stmt (gsi
)))
1000 gsi_next_nondebug (&gsi
);
1001 if (!gsi_end_p (gsi
))
1003 /* Make an edge to every setjmp-like call. */
1004 gimple
*call_stmt
= gsi_stmt (gsi
);
1005 if (is_gimple_call (call_stmt
)
1006 && ((gimple_call_flags (call_stmt
) & ECF_RETURNS_TWICE
)
1007 || gimple_call_builtin_p (call_stmt
,
1008 BUILT_IN_SETJMP_RECEIVER
)))
1009 handle_abnormal_edges (dispatcher_bbs
, bb
, bb_to_omp_idx
,
1010 &ab_edge_call
, false);
1015 XDELETE (dispatcher_bbs
);
1018 XDELETE (bb_to_omp_idx
);
1020 omp_free_regions ();
1023 /* Add SEQ after GSI. Start new bb after GSI, and created further bbs as
1024 needed. Returns true if new bbs were created.
1025 Note: This is transitional code, and should not be used for new code. We
1026 should be able to get rid of this by rewriting all target va-arg
1027 gimplification hooks to use an interface gimple_build_cond_value as described
1028 in https://gcc.gnu.org/ml/gcc-patches/2015-02/msg01194.html. */
1031 gimple_find_sub_bbs (gimple_seq seq
, gimple_stmt_iterator
*gsi
)
1033 gimple
*stmt
= gsi_stmt (*gsi
);
1034 basic_block bb
= gimple_bb (stmt
);
1035 basic_block lastbb
, afterbb
;
1036 int old_num_bbs
= n_basic_blocks_for_fn (cfun
);
1038 lastbb
= make_blocks_1 (seq
, bb
);
1039 if (old_num_bbs
== n_basic_blocks_for_fn (cfun
))
1041 e
= split_block (bb
, stmt
);
1042 /* Move e->dest to come after the new basic blocks. */
1044 unlink_block (afterbb
);
1045 link_block (afterbb
, lastbb
);
1046 redirect_edge_succ (e
, bb
->next_bb
);
1048 while (bb
!= afterbb
)
1050 struct omp_region
*cur_region
= NULL
;
1051 int cur_omp_region_idx
= 0;
1052 int mer
= make_edges_bb (bb
, &cur_region
, &cur_omp_region_idx
);
1053 gcc_assert (!mer
&& !cur_region
);
1054 add_bb_to_loop (bb
, afterbb
->loop_father
);
1060 /* Find the next available discriminator value for LOCUS. The
1061 discriminator distinguishes among several basic blocks that
1062 share a common locus, allowing for more accurate sample-based
1066 next_discriminator_for_locus (location_t locus
)
1068 struct locus_discrim_map item
;
1069 struct locus_discrim_map
**slot
;
1072 item
.discriminator
= 0;
1073 slot
= discriminator_per_locus
->find_slot_with_hash (
1074 &item
, LOCATION_LINE (locus
), INSERT
);
1076 if (*slot
== HTAB_EMPTY_ENTRY
)
1078 *slot
= XNEW (struct locus_discrim_map
);
1080 (*slot
)->locus
= locus
;
1081 (*slot
)->discriminator
= 0;
1083 (*slot
)->discriminator
++;
1084 return (*slot
)->discriminator
;
1087 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
1090 same_line_p (location_t locus1
, location_t locus2
)
1092 expanded_location from
, to
;
1094 if (locus1
== locus2
)
1097 from
= expand_location (locus1
);
1098 to
= expand_location (locus2
);
1100 if (from
.line
!= to
.line
)
1102 if (from
.file
== to
.file
)
1104 return (from
.file
!= NULL
1106 && filename_cmp (from
.file
, to
.file
) == 0);
1109 /* Assign discriminators to each basic block. */
1112 assign_discriminators (void)
1116 FOR_EACH_BB_FN (bb
, cfun
)
1120 gimple
*last
= last_stmt (bb
);
1121 location_t locus
= last
? gimple_location (last
) : UNKNOWN_LOCATION
;
1123 if (locus
== UNKNOWN_LOCATION
)
1126 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
1128 gimple
*first
= first_non_label_stmt (e
->dest
);
1129 gimple
*last
= last_stmt (e
->dest
);
1130 if ((first
&& same_line_p (locus
, gimple_location (first
)))
1131 || (last
&& same_line_p (locus
, gimple_location (last
))))
1133 if (e
->dest
->discriminator
!= 0 && bb
->discriminator
== 0)
1134 bb
->discriminator
= next_discriminator_for_locus (locus
);
1136 e
->dest
->discriminator
= next_discriminator_for_locus (locus
);
1142 /* Create the edges for a GIMPLE_COND starting at block BB. */
1145 make_cond_expr_edges (basic_block bb
)
1147 gcond
*entry
= as_a
<gcond
*> (last_stmt (bb
));
1148 gimple
*then_stmt
, *else_stmt
;
1149 basic_block then_bb
, else_bb
;
1150 tree then_label
, else_label
;
1154 gcc_assert (gimple_code (entry
) == GIMPLE_COND
);
1156 /* Entry basic blocks for each component. */
1157 then_label
= gimple_cond_true_label (entry
);
1158 else_label
= gimple_cond_false_label (entry
);
1159 then_bb
= label_to_block (then_label
);
1160 else_bb
= label_to_block (else_label
);
1161 then_stmt
= first_stmt (then_bb
);
1162 else_stmt
= first_stmt (else_bb
);
1164 e
= make_edge (bb
, then_bb
, EDGE_TRUE_VALUE
);
1165 e
->goto_locus
= gimple_location (then_stmt
);
1166 e
= make_edge (bb
, else_bb
, EDGE_FALSE_VALUE
);
1168 e
->goto_locus
= gimple_location (else_stmt
);
1170 /* We do not need the labels anymore. */
1171 gimple_cond_set_true_label (entry
, NULL_TREE
);
1172 gimple_cond_set_false_label (entry
, NULL_TREE
);
1176 /* Called for each element in the hash table (P) as we delete the
1177 edge to cases hash table.
1179 Clear all the CASE_CHAINs to prevent problems with copying of
1180 SWITCH_EXPRs and structure sharing rules, then free the hash table
1184 edge_to_cases_cleanup (edge
const &, tree
const &value
, void *)
1188 for (t
= value
; t
; t
= next
)
1190 next
= CASE_CHAIN (t
);
1191 CASE_CHAIN (t
) = NULL
;
1197 /* Start recording information mapping edges to case labels. */
1200 start_recording_case_labels (void)
1202 gcc_assert (edge_to_cases
== NULL
);
1203 edge_to_cases
= new hash_map
<edge
, tree
>;
1204 touched_switch_bbs
= BITMAP_ALLOC (NULL
);
1207 /* Return nonzero if we are recording information for case labels. */
1210 recording_case_labels_p (void)
1212 return (edge_to_cases
!= NULL
);
1215 /* Stop recording information mapping edges to case labels and
1216 remove any information we have recorded. */
1218 end_recording_case_labels (void)
1222 edge_to_cases
->traverse
<void *, edge_to_cases_cleanup
> (NULL
);
1223 delete edge_to_cases
;
1224 edge_to_cases
= NULL
;
1225 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs
, 0, i
, bi
)
1227 basic_block bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
1230 gimple
*stmt
= last_stmt (bb
);
1231 if (stmt
&& gimple_code (stmt
) == GIMPLE_SWITCH
)
1232 group_case_labels_stmt (as_a
<gswitch
*> (stmt
));
1235 BITMAP_FREE (touched_switch_bbs
);
1238 /* If we are inside a {start,end}_recording_cases block, then return
1239 a chain of CASE_LABEL_EXPRs from T which reference E.
1241 Otherwise return NULL. */
1244 get_cases_for_edge (edge e
, gswitch
*t
)
1249 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
1250 chains available. Return NULL so the caller can detect this case. */
1251 if (!recording_case_labels_p ())
1254 slot
= edge_to_cases
->get (e
);
1258 /* If we did not find E in the hash table, then this must be the first
1259 time we have been queried for information about E & T. Add all the
1260 elements from T to the hash table then perform the query again. */
1262 n
= gimple_switch_num_labels (t
);
1263 for (i
= 0; i
< n
; i
++)
1265 tree elt
= gimple_switch_label (t
, i
);
1266 tree lab
= CASE_LABEL (elt
);
1267 basic_block label_bb
= label_to_block (lab
);
1268 edge this_edge
= find_edge (e
->src
, label_bb
);
1270 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
1272 tree
&s
= edge_to_cases
->get_or_insert (this_edge
);
1273 CASE_CHAIN (elt
) = s
;
1277 return *edge_to_cases
->get (e
);
1280 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
1283 make_gimple_switch_edges (gswitch
*entry
, basic_block bb
)
1287 n
= gimple_switch_num_labels (entry
);
1289 for (i
= 0; i
< n
; ++i
)
1291 tree lab
= CASE_LABEL (gimple_switch_label (entry
, i
));
1292 basic_block label_bb
= label_to_block (lab
);
1293 make_edge (bb
, label_bb
, 0);
1298 /* Return the basic block holding label DEST. */
1301 label_to_block_fn (struct function
*ifun
, tree dest
)
1303 int uid
= LABEL_DECL_UID (dest
);
1305 /* We would die hard when faced by an undefined label. Emit a label to
1306 the very first basic block. This will hopefully make even the dataflow
1307 and undefined variable warnings quite right. */
1308 if (seen_error () && uid
< 0)
1310 gimple_stmt_iterator gsi
=
1311 gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun
, NUM_FIXED_BLOCKS
));
1314 stmt
= gimple_build_label (dest
);
1315 gsi_insert_before (&gsi
, stmt
, GSI_NEW_STMT
);
1316 uid
= LABEL_DECL_UID (dest
);
1318 if (vec_safe_length (ifun
->cfg
->x_label_to_block_map
) <= (unsigned int) uid
)
1320 return (*ifun
->cfg
->x_label_to_block_map
)[uid
];
1323 /* Create edges for a goto statement at block BB. Returns true
1324 if abnormal edges should be created. */
1327 make_goto_expr_edges (basic_block bb
)
1329 gimple_stmt_iterator last
= gsi_last_bb (bb
);
1330 gimple
*goto_t
= gsi_stmt (last
);
1332 /* A simple GOTO creates normal edges. */
1333 if (simple_goto_p (goto_t
))
1335 tree dest
= gimple_goto_dest (goto_t
);
1336 basic_block label_bb
= label_to_block (dest
);
1337 edge e
= make_edge (bb
, label_bb
, EDGE_FALLTHRU
);
1338 e
->goto_locus
= gimple_location (goto_t
);
1339 gsi_remove (&last
, true);
1343 /* A computed GOTO creates abnormal edges. */
1347 /* Create edges for an asm statement with labels at block BB. */
1350 make_gimple_asm_edges (basic_block bb
)
1352 gasm
*stmt
= as_a
<gasm
*> (last_stmt (bb
));
1353 int i
, n
= gimple_asm_nlabels (stmt
);
1355 for (i
= 0; i
< n
; ++i
)
1357 tree label
= TREE_VALUE (gimple_asm_label_op (stmt
, i
));
1358 basic_block label_bb
= label_to_block (label
);
1359 make_edge (bb
, label_bb
, 0);
1363 /*---------------------------------------------------------------------------
1365 ---------------------------------------------------------------------------*/
1367 /* Cleanup useless labels in basic blocks. This is something we wish
1368 to do early because it allows us to group case labels before creating
1369 the edges for the CFG, and it speeds up block statement iterators in
1370 all passes later on.
1371 We rerun this pass after CFG is created, to get rid of the labels that
1372 are no longer referenced. After then we do not run it any more, since
1373 (almost) no new labels should be created. */
1375 /* A map from basic block index to the leading label of that block. */
1376 static struct label_record
1381 /* True if the label is referenced from somewhere. */
1385 /* Given LABEL return the first label in the same basic block. */
1388 main_block_label (tree label
)
1390 basic_block bb
= label_to_block (label
);
1391 tree main_label
= label_for_bb
[bb
->index
].label
;
1393 /* label_to_block possibly inserted undefined label into the chain. */
1396 label_for_bb
[bb
->index
].label
= label
;
1400 label_for_bb
[bb
->index
].used
= true;
1404 /* Clean up redundant labels within the exception tree. */
1407 cleanup_dead_labels_eh (void)
1414 if (cfun
->eh
== NULL
)
1417 for (i
= 1; vec_safe_iterate (cfun
->eh
->lp_array
, i
, &lp
); ++i
)
1418 if (lp
&& lp
->post_landing_pad
)
1420 lab
= main_block_label (lp
->post_landing_pad
);
1421 if (lab
!= lp
->post_landing_pad
)
1423 EH_LANDING_PAD_NR (lp
->post_landing_pad
) = 0;
1424 EH_LANDING_PAD_NR (lab
) = lp
->index
;
1428 FOR_ALL_EH_REGION (r
)
1432 case ERT_MUST_NOT_THROW
:
1438 for (c
= r
->u
.eh_try
.first_catch
; c
; c
= c
->next_catch
)
1442 c
->label
= main_block_label (lab
);
1447 case ERT_ALLOWED_EXCEPTIONS
:
1448 lab
= r
->u
.allowed
.label
;
1450 r
->u
.allowed
.label
= main_block_label (lab
);
1456 /* Cleanup redundant labels. This is a three-step process:
1457 1) Find the leading label for each block.
1458 2) Redirect all references to labels to the leading labels.
1459 3) Cleanup all useless labels. */
1462 cleanup_dead_labels (void)
1465 label_for_bb
= XCNEWVEC (struct label_record
, last_basic_block_for_fn (cfun
));
1467 /* Find a suitable label for each block. We use the first user-defined
1468 label if there is one, or otherwise just the first label we see. */
1469 FOR_EACH_BB_FN (bb
, cfun
)
1471 gimple_stmt_iterator i
;
1473 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
1476 glabel
*label_stmt
= dyn_cast
<glabel
*> (gsi_stmt (i
));
1481 label
= gimple_label_label (label_stmt
);
1483 /* If we have not yet seen a label for the current block,
1484 remember this one and see if there are more labels. */
1485 if (!label_for_bb
[bb
->index
].label
)
1487 label_for_bb
[bb
->index
].label
= label
;
1491 /* If we did see a label for the current block already, but it
1492 is an artificially created label, replace it if the current
1493 label is a user defined label. */
1494 if (!DECL_ARTIFICIAL (label
)
1495 && DECL_ARTIFICIAL (label_for_bb
[bb
->index
].label
))
1497 label_for_bb
[bb
->index
].label
= label
;
1503 /* Now redirect all jumps/branches to the selected label.
1504 First do so for each block ending in a control statement. */
1505 FOR_EACH_BB_FN (bb
, cfun
)
1507 gimple
*stmt
= last_stmt (bb
);
1508 tree label
, new_label
;
1513 switch (gimple_code (stmt
))
1517 gcond
*cond_stmt
= as_a
<gcond
*> (stmt
);
1518 label
= gimple_cond_true_label (cond_stmt
);
1521 new_label
= main_block_label (label
);
1522 if (new_label
!= label
)
1523 gimple_cond_set_true_label (cond_stmt
, new_label
);
1526 label
= gimple_cond_false_label (cond_stmt
);
1529 new_label
= main_block_label (label
);
1530 if (new_label
!= label
)
1531 gimple_cond_set_false_label (cond_stmt
, new_label
);
1538 gswitch
*switch_stmt
= as_a
<gswitch
*> (stmt
);
1539 size_t i
, n
= gimple_switch_num_labels (switch_stmt
);
1541 /* Replace all destination labels. */
1542 for (i
= 0; i
< n
; ++i
)
1544 tree case_label
= gimple_switch_label (switch_stmt
, i
);
1545 label
= CASE_LABEL (case_label
);
1546 new_label
= main_block_label (label
);
1547 if (new_label
!= label
)
1548 CASE_LABEL (case_label
) = new_label
;
1555 gasm
*asm_stmt
= as_a
<gasm
*> (stmt
);
1556 int i
, n
= gimple_asm_nlabels (asm_stmt
);
1558 for (i
= 0; i
< n
; ++i
)
1560 tree cons
= gimple_asm_label_op (asm_stmt
, i
);
1561 tree label
= main_block_label (TREE_VALUE (cons
));
1562 TREE_VALUE (cons
) = label
;
1567 /* We have to handle gotos until they're removed, and we don't
1568 remove them until after we've created the CFG edges. */
1570 if (!computed_goto_p (stmt
))
1572 ggoto
*goto_stmt
= as_a
<ggoto
*> (stmt
);
1573 label
= gimple_goto_dest (goto_stmt
);
1574 new_label
= main_block_label (label
);
1575 if (new_label
!= label
)
1576 gimple_goto_set_dest (goto_stmt
, new_label
);
1580 case GIMPLE_TRANSACTION
:
1582 gtransaction
*txn
= as_a
<gtransaction
*> (stmt
);
1584 label
= gimple_transaction_label_norm (txn
);
1587 new_label
= main_block_label (label
);
1588 if (new_label
!= label
)
1589 gimple_transaction_set_label_norm (txn
, new_label
);
1592 label
= gimple_transaction_label_uninst (txn
);
1595 new_label
= main_block_label (label
);
1596 if (new_label
!= label
)
1597 gimple_transaction_set_label_uninst (txn
, new_label
);
1600 label
= gimple_transaction_label_over (txn
);
1603 new_label
= main_block_label (label
);
1604 if (new_label
!= label
)
1605 gimple_transaction_set_label_over (txn
, new_label
);
1615 /* Do the same for the exception region tree labels. */
1616 cleanup_dead_labels_eh ();
1618 /* Finally, purge dead labels. All user-defined labels and labels that
1619 can be the target of non-local gotos and labels which have their
1620 address taken are preserved. */
1621 FOR_EACH_BB_FN (bb
, cfun
)
1623 gimple_stmt_iterator i
;
1624 tree label_for_this_bb
= label_for_bb
[bb
->index
].label
;
1626 if (!label_for_this_bb
)
1629 /* If the main label of the block is unused, we may still remove it. */
1630 if (!label_for_bb
[bb
->index
].used
)
1631 label_for_this_bb
= NULL
;
1633 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); )
1636 glabel
*label_stmt
= dyn_cast
<glabel
*> (gsi_stmt (i
));
1641 label
= gimple_label_label (label_stmt
);
1643 if (label
== label_for_this_bb
1644 || !DECL_ARTIFICIAL (label
)
1645 || DECL_NONLOCAL (label
)
1646 || FORCED_LABEL (label
))
1649 gsi_remove (&i
, true);
1653 free (label_for_bb
);
1656 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1657 the ones jumping to the same label.
1658 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1661 group_case_labels_stmt (gswitch
*stmt
)
1663 int old_size
= gimple_switch_num_labels (stmt
);
1664 int i
, j
, base_index
, new_size
= old_size
;
1665 basic_block default_bb
= NULL
;
1667 default_bb
= label_to_block (CASE_LABEL (gimple_switch_default_label (stmt
)));
1669 /* Look for possible opportunities to merge cases. */
1671 while (i
< old_size
)
1673 tree base_case
, base_high
;
1674 basic_block base_bb
;
1676 base_case
= gimple_switch_label (stmt
, i
);
1678 gcc_assert (base_case
);
1679 base_bb
= label_to_block (CASE_LABEL (base_case
));
1681 /* Discard cases that have the same destination as the default case. */
1682 if (base_bb
== default_bb
)
1684 gimple_switch_set_label (stmt
, i
, NULL_TREE
);
1690 base_high
= CASE_HIGH (base_case
)
1691 ? CASE_HIGH (base_case
)
1692 : CASE_LOW (base_case
);
1695 /* Try to merge case labels. Break out when we reach the end
1696 of the label vector or when we cannot merge the next case
1697 label with the current one. */
1698 while (i
< old_size
)
1700 tree merge_case
= gimple_switch_label (stmt
, i
);
1701 basic_block merge_bb
= label_to_block (CASE_LABEL (merge_case
));
1702 wide_int bhp1
= wi::add (base_high
, 1);
1704 /* Merge the cases if they jump to the same place,
1705 and their ranges are consecutive. */
1706 if (merge_bb
== base_bb
1707 && wi::eq_p (CASE_LOW (merge_case
), bhp1
))
1709 base_high
= CASE_HIGH (merge_case
) ?
1710 CASE_HIGH (merge_case
) : CASE_LOW (merge_case
);
1711 CASE_HIGH (base_case
) = base_high
;
1712 gimple_switch_set_label (stmt
, i
, NULL_TREE
);
1720 /* Discard cases that have an unreachable destination block. */
1721 if (EDGE_COUNT (base_bb
->succs
) == 0
1722 && gimple_seq_unreachable_p (bb_seq (base_bb
)))
1724 edge base_edge
= find_edge (gimple_bb (stmt
), base_bb
);
1725 if (base_edge
!= NULL
)
1726 remove_edge_and_dominated_blocks (base_edge
);
1727 gimple_switch_set_label (stmt
, base_index
, NULL_TREE
);
1732 /* Compress the case labels in the label vector, and adjust the
1733 length of the vector. */
1734 for (i
= 0, j
= 0; i
< new_size
; i
++)
1736 while (! gimple_switch_label (stmt
, j
))
1738 gimple_switch_set_label (stmt
, i
,
1739 gimple_switch_label (stmt
, j
++));
1742 gcc_assert (new_size
<= old_size
);
1743 gimple_switch_set_num_labels (stmt
, new_size
);
1746 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1747 and scan the sorted vector of cases. Combine the ones jumping to the
1751 group_case_labels (void)
1755 FOR_EACH_BB_FN (bb
, cfun
)
1757 gimple
*stmt
= last_stmt (bb
);
1758 if (stmt
&& gimple_code (stmt
) == GIMPLE_SWITCH
)
1759 group_case_labels_stmt (as_a
<gswitch
*> (stmt
));
1763 /* Checks whether we can merge block B into block A. */
1766 gimple_can_merge_blocks_p (basic_block a
, basic_block b
)
1770 if (!single_succ_p (a
))
1773 if (single_succ_edge (a
)->flags
& EDGE_COMPLEX
)
1776 if (single_succ (a
) != b
)
1779 if (!single_pred_p (b
))
1782 if (a
== ENTRY_BLOCK_PTR_FOR_FN (cfun
)
1783 || b
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
1786 /* If A ends by a statement causing exceptions or something similar, we
1787 cannot merge the blocks. */
1788 stmt
= last_stmt (a
);
1789 if (stmt
&& stmt_ends_bb_p (stmt
))
1792 /* Do not allow a block with only a non-local label to be merged. */
1794 if (glabel
*label_stmt
= dyn_cast
<glabel
*> (stmt
))
1795 if (DECL_NONLOCAL (gimple_label_label (label_stmt
)))
1798 /* Examine the labels at the beginning of B. */
1799 for (gimple_stmt_iterator gsi
= gsi_start_bb (b
); !gsi_end_p (gsi
);
1803 glabel
*label_stmt
= dyn_cast
<glabel
*> (gsi_stmt (gsi
));
1806 lab
= gimple_label_label (label_stmt
);
1808 /* Do not remove user forced labels or for -O0 any user labels. */
1809 if (!DECL_ARTIFICIAL (lab
) && (!optimize
|| FORCED_LABEL (lab
)))
1813 /* Protect simple loop latches. We only want to avoid merging
1814 the latch with the loop header or with a block in another
1815 loop in this case. */
1817 && b
->loop_father
->latch
== b
1818 && loops_state_satisfies_p (LOOPS_HAVE_SIMPLE_LATCHES
)
1819 && (b
->loop_father
->header
== a
1820 || b
->loop_father
!= a
->loop_father
))
1823 /* It must be possible to eliminate all phi nodes in B. If ssa form
1824 is not up-to-date and a name-mapping is registered, we cannot eliminate
1825 any phis. Symbols marked for renaming are never a problem though. */
1826 for (gphi_iterator gsi
= gsi_start_phis (b
); !gsi_end_p (gsi
);
1829 gphi
*phi
= gsi
.phi ();
1830 /* Technically only new names matter. */
1831 if (name_registered_for_update_p (PHI_RESULT (phi
)))
1835 /* When not optimizing, don't merge if we'd lose goto_locus. */
1837 && single_succ_edge (a
)->goto_locus
!= UNKNOWN_LOCATION
)
1839 location_t goto_locus
= single_succ_edge (a
)->goto_locus
;
1840 gimple_stmt_iterator prev
, next
;
1841 prev
= gsi_last_nondebug_bb (a
);
1842 next
= gsi_after_labels (b
);
1843 if (!gsi_end_p (next
) && is_gimple_debug (gsi_stmt (next
)))
1844 gsi_next_nondebug (&next
);
1845 if ((gsi_end_p (prev
)
1846 || gimple_location (gsi_stmt (prev
)) != goto_locus
)
1847 && (gsi_end_p (next
)
1848 || gimple_location (gsi_stmt (next
)) != goto_locus
))
1855 /* Replaces all uses of NAME by VAL. */
1858 replace_uses_by (tree name
, tree val
)
1860 imm_use_iterator imm_iter
;
1865 FOR_EACH_IMM_USE_STMT (stmt
, imm_iter
, name
)
1867 /* Mark the block if we change the last stmt in it. */
1868 if (cfgcleanup_altered_bbs
1869 && stmt_ends_bb_p (stmt
))
1870 bitmap_set_bit (cfgcleanup_altered_bbs
, gimple_bb (stmt
)->index
);
1872 FOR_EACH_IMM_USE_ON_STMT (use
, imm_iter
)
1874 replace_exp (use
, val
);
1876 if (gimple_code (stmt
) == GIMPLE_PHI
)
1878 e
= gimple_phi_arg_edge (as_a
<gphi
*> (stmt
),
1879 PHI_ARG_INDEX_FROM_USE (use
));
1880 if (e
->flags
& EDGE_ABNORMAL
1881 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val
))
1883 /* This can only occur for virtual operands, since
1884 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1885 would prevent replacement. */
1886 gcc_checking_assert (virtual_operand_p (name
));
1887 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val
) = 1;
1892 if (gimple_code (stmt
) != GIMPLE_PHI
)
1894 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
1895 gimple
*orig_stmt
= stmt
;
1898 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
1899 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
1900 only change sth from non-invariant to invariant, and only
1901 when propagating constants. */
1902 if (is_gimple_min_invariant (val
))
1903 for (i
= 0; i
< gimple_num_ops (stmt
); i
++)
1905 tree op
= gimple_op (stmt
, i
);
1906 /* Operands may be empty here. For example, the labels
1907 of a GIMPLE_COND are nulled out following the creation
1908 of the corresponding CFG edges. */
1909 if (op
&& TREE_CODE (op
) == ADDR_EXPR
)
1910 recompute_tree_invariant_for_addr_expr (op
);
1913 if (fold_stmt (&gsi
))
1914 stmt
= gsi_stmt (gsi
);
1916 if (maybe_clean_or_replace_eh_stmt (orig_stmt
, stmt
))
1917 gimple_purge_dead_eh_edges (gimple_bb (stmt
));
1923 gcc_checking_assert (has_zero_uses (name
));
1925 /* Also update the trees stored in loop structures. */
1930 FOR_EACH_LOOP (loop
, 0)
1932 substitute_in_loop_info (loop
, name
, val
);
1937 /* Merge block B into block A. */
1940 gimple_merge_blocks (basic_block a
, basic_block b
)
1942 gimple_stmt_iterator last
, gsi
;
1946 fprintf (dump_file
, "Merging blocks %d and %d\n", a
->index
, b
->index
);
1948 /* Remove all single-valued PHI nodes from block B of the form
1949 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1950 gsi
= gsi_last_bb (a
);
1951 for (psi
= gsi_start_phis (b
); !gsi_end_p (psi
); )
1953 gimple
*phi
= gsi_stmt (psi
);
1954 tree def
= gimple_phi_result (phi
), use
= gimple_phi_arg_def (phi
, 0);
1956 bool may_replace_uses
= (virtual_operand_p (def
)
1957 || may_propagate_copy (def
, use
));
1959 /* In case we maintain loop closed ssa form, do not propagate arguments
1960 of loop exit phi nodes. */
1962 && loops_state_satisfies_p (LOOP_CLOSED_SSA
)
1963 && !virtual_operand_p (def
)
1964 && TREE_CODE (use
) == SSA_NAME
1965 && a
->loop_father
!= b
->loop_father
)
1966 may_replace_uses
= false;
1968 if (!may_replace_uses
)
1970 gcc_assert (!virtual_operand_p (def
));
1972 /* Note that just emitting the copies is fine -- there is no problem
1973 with ordering of phi nodes. This is because A is the single
1974 predecessor of B, therefore results of the phi nodes cannot
1975 appear as arguments of the phi nodes. */
1976 copy
= gimple_build_assign (def
, use
);
1977 gsi_insert_after (&gsi
, copy
, GSI_NEW_STMT
);
1978 remove_phi_node (&psi
, false);
1982 /* If we deal with a PHI for virtual operands, we can simply
1983 propagate these without fussing with folding or updating
1985 if (virtual_operand_p (def
))
1987 imm_use_iterator iter
;
1988 use_operand_p use_p
;
1991 FOR_EACH_IMM_USE_STMT (stmt
, iter
, def
)
1992 FOR_EACH_IMM_USE_ON_STMT (use_p
, iter
)
1993 SET_USE (use_p
, use
);
1995 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def
))
1996 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use
) = 1;
1999 replace_uses_by (def
, use
);
2001 remove_phi_node (&psi
, true);
2005 /* Ensure that B follows A. */
2006 move_block_after (b
, a
);
2008 gcc_assert (single_succ_edge (a
)->flags
& EDGE_FALLTHRU
);
2009 gcc_assert (!last_stmt (a
) || !stmt_ends_bb_p (last_stmt (a
)));
2011 /* Remove labels from B and set gimple_bb to A for other statements. */
2012 for (gsi
= gsi_start_bb (b
); !gsi_end_p (gsi
);)
2014 gimple
*stmt
= gsi_stmt (gsi
);
2015 if (glabel
*label_stmt
= dyn_cast
<glabel
*> (stmt
))
2017 tree label
= gimple_label_label (label_stmt
);
2020 gsi_remove (&gsi
, false);
2022 /* Now that we can thread computed gotos, we might have
2023 a situation where we have a forced label in block B
2024 However, the label at the start of block B might still be
2025 used in other ways (think about the runtime checking for
2026 Fortran assigned gotos). So we can not just delete the
2027 label. Instead we move the label to the start of block A. */
2028 if (FORCED_LABEL (label
))
2030 gimple_stmt_iterator dest_gsi
= gsi_start_bb (a
);
2031 gsi_insert_before (&dest_gsi
, stmt
, GSI_NEW_STMT
);
2033 /* Other user labels keep around in a form of a debug stmt. */
2034 else if (!DECL_ARTIFICIAL (label
) && MAY_HAVE_DEBUG_STMTS
)
2036 gimple
*dbg
= gimple_build_debug_bind (label
,
2039 gimple_debug_bind_reset_value (dbg
);
2040 gsi_insert_before (&gsi
, dbg
, GSI_SAME_STMT
);
2043 lp_nr
= EH_LANDING_PAD_NR (label
);
2046 eh_landing_pad lp
= get_eh_landing_pad_from_number (lp_nr
);
2047 lp
->post_landing_pad
= NULL
;
2052 gimple_set_bb (stmt
, a
);
2057 /* When merging two BBs, if their counts are different, the larger count
2058 is selected as the new bb count. This is to handle inconsistent
2060 if (a
->loop_father
== b
->loop_father
)
2062 a
->count
= MAX (a
->count
, b
->count
);
2063 a
->frequency
= MAX (a
->frequency
, b
->frequency
);
2066 /* Merge the sequences. */
2067 last
= gsi_last_bb (a
);
2068 gsi_insert_seq_after (&last
, bb_seq (b
), GSI_NEW_STMT
);
2069 set_bb_seq (b
, NULL
);
2071 if (cfgcleanup_altered_bbs
)
2072 bitmap_set_bit (cfgcleanup_altered_bbs
, a
->index
);
2076 /* Return the one of two successors of BB that is not reachable by a
2077 complex edge, if there is one. Else, return BB. We use
2078 this in optimizations that use post-dominators for their heuristics,
2079 to catch the cases in C++ where function calls are involved. */
2082 single_noncomplex_succ (basic_block bb
)
2085 if (EDGE_COUNT (bb
->succs
) != 2)
2088 e0
= EDGE_SUCC (bb
, 0);
2089 e1
= EDGE_SUCC (bb
, 1);
2090 if (e0
->flags
& EDGE_COMPLEX
)
2092 if (e1
->flags
& EDGE_COMPLEX
)
2098 /* T is CALL_EXPR. Set current_function_calls_* flags. */
2101 notice_special_calls (gcall
*call
)
2103 int flags
= gimple_call_flags (call
);
2105 if (flags
& ECF_MAY_BE_ALLOCA
)
2106 cfun
->calls_alloca
= true;
2107 if (flags
& ECF_RETURNS_TWICE
)
2108 cfun
->calls_setjmp
= true;
2112 /* Clear flags set by notice_special_calls. Used by dead code removal
2113 to update the flags. */
2116 clear_special_calls (void)
2118 cfun
->calls_alloca
= false;
2119 cfun
->calls_setjmp
= false;
2122 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
2125 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb
)
2127 /* Since this block is no longer reachable, we can just delete all
2128 of its PHI nodes. */
2129 remove_phi_nodes (bb
);
2131 /* Remove edges to BB's successors. */
2132 while (EDGE_COUNT (bb
->succs
) > 0)
2133 remove_edge (EDGE_SUCC (bb
, 0));
2137 /* Remove statements of basic block BB. */
2140 remove_bb (basic_block bb
)
2142 gimple_stmt_iterator i
;
2146 fprintf (dump_file
, "Removing basic block %d\n", bb
->index
);
2147 if (dump_flags
& TDF_DETAILS
)
2149 dump_bb (dump_file
, bb
, 0, TDF_BLOCKS
);
2150 fprintf (dump_file
, "\n");
2156 struct loop
*loop
= bb
->loop_father
;
2158 /* If a loop gets removed, clean up the information associated
2160 if (loop
->latch
== bb
2161 || loop
->header
== bb
)
2162 free_numbers_of_iterations_estimates_loop (loop
);
2165 /* Remove all the instructions in the block. */
2166 if (bb_seq (bb
) != NULL
)
2168 /* Walk backwards so as to get a chance to substitute all
2169 released DEFs into debug stmts. See
2170 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
2172 for (i
= gsi_last_bb (bb
); !gsi_end_p (i
);)
2174 gimple
*stmt
= gsi_stmt (i
);
2175 glabel
*label_stmt
= dyn_cast
<glabel
*> (stmt
);
2177 && (FORCED_LABEL (gimple_label_label (label_stmt
))
2178 || DECL_NONLOCAL (gimple_label_label (label_stmt
))))
2181 gimple_stmt_iterator new_gsi
;
2183 /* A non-reachable non-local label may still be referenced.
2184 But it no longer needs to carry the extra semantics of
2186 if (DECL_NONLOCAL (gimple_label_label (label_stmt
)))
2188 DECL_NONLOCAL (gimple_label_label (label_stmt
)) = 0;
2189 FORCED_LABEL (gimple_label_label (label_stmt
)) = 1;
2192 new_bb
= bb
->prev_bb
;
2193 new_gsi
= gsi_start_bb (new_bb
);
2194 gsi_remove (&i
, false);
2195 gsi_insert_before (&new_gsi
, stmt
, GSI_NEW_STMT
);
2199 /* Release SSA definitions. */
2200 release_defs (stmt
);
2201 gsi_remove (&i
, true);
2205 i
= gsi_last_bb (bb
);
2211 remove_phi_nodes_and_edges_for_unreachable_block (bb
);
2212 bb
->il
.gimple
.seq
= NULL
;
2213 bb
->il
.gimple
.phi_nodes
= NULL
;
2217 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2218 predicate VAL, return the edge that will be taken out of the block.
2219 If VAL does not match a unique edge, NULL is returned. */
2222 find_taken_edge (basic_block bb
, tree val
)
2226 stmt
= last_stmt (bb
);
2229 gcc_assert (is_ctrl_stmt (stmt
));
2234 if (!is_gimple_min_invariant (val
))
2237 if (gimple_code (stmt
) == GIMPLE_COND
)
2238 return find_taken_edge_cond_expr (bb
, val
);
2240 if (gimple_code (stmt
) == GIMPLE_SWITCH
)
2241 return find_taken_edge_switch_expr (as_a
<gswitch
*> (stmt
), bb
, val
);
2243 if (computed_goto_p (stmt
))
2245 /* Only optimize if the argument is a label, if the argument is
2246 not a label then we can not construct a proper CFG.
2248 It may be the case that we only need to allow the LABEL_REF to
2249 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2250 appear inside a LABEL_EXPR just to be safe. */
2251 if ((TREE_CODE (val
) == ADDR_EXPR
|| TREE_CODE (val
) == LABEL_EXPR
)
2252 && TREE_CODE (TREE_OPERAND (val
, 0)) == LABEL_DECL
)
2253 return find_taken_edge_computed_goto (bb
, TREE_OPERAND (val
, 0));
2260 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2261 statement, determine which of the outgoing edges will be taken out of the
2262 block. Return NULL if either edge may be taken. */
2265 find_taken_edge_computed_goto (basic_block bb
, tree val
)
2270 dest
= label_to_block (val
);
2273 e
= find_edge (bb
, dest
);
2274 gcc_assert (e
!= NULL
);
2280 /* Given a constant value VAL and the entry block BB to a COND_EXPR
2281 statement, determine which of the two edges will be taken out of the
2282 block. Return NULL if either edge may be taken. */
2285 find_taken_edge_cond_expr (basic_block bb
, tree val
)
2287 edge true_edge
, false_edge
;
2289 extract_true_false_edges_from_block (bb
, &true_edge
, &false_edge
);
2291 gcc_assert (TREE_CODE (val
) == INTEGER_CST
);
2292 return (integer_zerop (val
) ? false_edge
: true_edge
);
2295 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2296 statement, determine which edge will be taken out of the block. Return
2297 NULL if any edge may be taken. */
2300 find_taken_edge_switch_expr (gswitch
*switch_stmt
, basic_block bb
,
2303 basic_block dest_bb
;
2307 taken_case
= find_case_label_for_value (switch_stmt
, val
);
2308 dest_bb
= label_to_block (CASE_LABEL (taken_case
));
2310 e
= find_edge (bb
, dest_bb
);
2316 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2317 We can make optimal use here of the fact that the case labels are
2318 sorted: We can do a binary search for a case matching VAL. */
2321 find_case_label_for_value (gswitch
*switch_stmt
, tree val
)
2323 size_t low
, high
, n
= gimple_switch_num_labels (switch_stmt
);
2324 tree default_case
= gimple_switch_default_label (switch_stmt
);
2326 for (low
= 0, high
= n
; high
- low
> 1; )
2328 size_t i
= (high
+ low
) / 2;
2329 tree t
= gimple_switch_label (switch_stmt
, i
);
2332 /* Cache the result of comparing CASE_LOW and val. */
2333 cmp
= tree_int_cst_compare (CASE_LOW (t
), val
);
2340 if (CASE_HIGH (t
) == NULL
)
2342 /* A singe-valued case label. */
2348 /* A case range. We can only handle integer ranges. */
2349 if (cmp
<= 0 && tree_int_cst_compare (CASE_HIGH (t
), val
) >= 0)
2354 return default_case
;
2358 /* Dump a basic block on stderr. */
2361 gimple_debug_bb (basic_block bb
)
2363 dump_bb (stderr
, bb
, 0, TDF_VOPS
|TDF_MEMSYMS
|TDF_BLOCKS
);
2367 /* Dump basic block with index N on stderr. */
2370 gimple_debug_bb_n (int n
)
2372 gimple_debug_bb (BASIC_BLOCK_FOR_FN (cfun
, n
));
2373 return BASIC_BLOCK_FOR_FN (cfun
, n
);
2377 /* Dump the CFG on stderr.
2379 FLAGS are the same used by the tree dumping functions
2380 (see TDF_* in dumpfile.h). */
2383 gimple_debug_cfg (dump_flags_t flags
)
2385 gimple_dump_cfg (stderr
, flags
);
2389 /* Dump the program showing basic block boundaries on the given FILE.
2391 FLAGS are the same used by the tree dumping functions (see TDF_* in
2395 gimple_dump_cfg (FILE *file
, dump_flags_t flags
)
2397 if (flags
& TDF_DETAILS
)
2399 dump_function_header (file
, current_function_decl
, flags
);
2400 fprintf (file
, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2401 n_basic_blocks_for_fn (cfun
), n_edges_for_fn (cfun
),
2402 last_basic_block_for_fn (cfun
));
2404 brief_dump_cfg (file
, flags
);
2405 fprintf (file
, "\n");
2408 if (flags
& TDF_STATS
)
2409 dump_cfg_stats (file
);
2411 dump_function_to_file (current_function_decl
, file
, flags
| TDF_BLOCKS
);
2415 /* Dump CFG statistics on FILE. */
2418 dump_cfg_stats (FILE *file
)
2420 static long max_num_merged_labels
= 0;
2421 unsigned long size
, total
= 0;
2424 const char * const fmt_str
= "%-30s%-13s%12s\n";
2425 const char * const fmt_str_1
= "%-30s%13d%11lu%c\n";
2426 const char * const fmt_str_2
= "%-30s%13ld%11lu%c\n";
2427 const char * const fmt_str_3
= "%-43s%11lu%c\n";
2428 const char *funcname
= current_function_name ();
2430 fprintf (file
, "\nCFG Statistics for %s\n\n", funcname
);
2432 fprintf (file
, "---------------------------------------------------------\n");
2433 fprintf (file
, fmt_str
, "", " Number of ", "Memory");
2434 fprintf (file
, fmt_str
, "", " instances ", "used ");
2435 fprintf (file
, "---------------------------------------------------------\n");
2437 size
= n_basic_blocks_for_fn (cfun
) * sizeof (struct basic_block_def
);
2439 fprintf (file
, fmt_str_1
, "Basic blocks", n_basic_blocks_for_fn (cfun
),
2440 SCALE (size
), LABEL (size
));
2443 FOR_EACH_BB_FN (bb
, cfun
)
2444 num_edges
+= EDGE_COUNT (bb
->succs
);
2445 size
= num_edges
* sizeof (struct edge_def
);
2447 fprintf (file
, fmt_str_2
, "Edges", num_edges
, SCALE (size
), LABEL (size
));
2449 fprintf (file
, "---------------------------------------------------------\n");
2450 fprintf (file
, fmt_str_3
, "Total memory used by CFG data", SCALE (total
),
2452 fprintf (file
, "---------------------------------------------------------\n");
2453 fprintf (file
, "\n");
2455 if (cfg_stats
.num_merged_labels
> max_num_merged_labels
)
2456 max_num_merged_labels
= cfg_stats
.num_merged_labels
;
2458 fprintf (file
, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2459 cfg_stats
.num_merged_labels
, max_num_merged_labels
);
2461 fprintf (file
, "\n");
2465 /* Dump CFG statistics on stderr. Keep extern so that it's always
2466 linked in the final executable. */
2469 debug_cfg_stats (void)
2471 dump_cfg_stats (stderr
);
2474 /*---------------------------------------------------------------------------
2475 Miscellaneous helpers
2476 ---------------------------------------------------------------------------*/
2478 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2479 flow. Transfers of control flow associated with EH are excluded. */
2482 call_can_make_abnormal_goto (gimple
*t
)
2484 /* If the function has no non-local labels, then a call cannot make an
2485 abnormal transfer of control. */
2486 if (!cfun
->has_nonlocal_label
2487 && !cfun
->calls_setjmp
)
2490 /* Likewise if the call has no side effects. */
2491 if (!gimple_has_side_effects (t
))
2494 /* Likewise if the called function is leaf. */
2495 if (gimple_call_flags (t
) & ECF_LEAF
)
2502 /* Return true if T can make an abnormal transfer of control flow.
2503 Transfers of control flow associated with EH are excluded. */
2506 stmt_can_make_abnormal_goto (gimple
*t
)
2508 if (computed_goto_p (t
))
2510 if (is_gimple_call (t
))
2511 return call_can_make_abnormal_goto (t
);
2516 /* Return true if T represents a stmt that always transfers control. */
2519 is_ctrl_stmt (gimple
*t
)
2521 switch (gimple_code (t
))
2535 /* Return true if T is a statement that may alter the flow of control
2536 (e.g., a call to a non-returning function). */
2539 is_ctrl_altering_stmt (gimple
*t
)
2543 switch (gimple_code (t
))
2546 /* Per stmt call flag indicates whether the call could alter
2548 if (gimple_call_ctrl_altering_p (t
))
2552 case GIMPLE_EH_DISPATCH
:
2553 /* EH_DISPATCH branches to the individual catch handlers at
2554 this level of a try or allowed-exceptions region. It can
2555 fallthru to the next statement as well. */
2559 if (gimple_asm_nlabels (as_a
<gasm
*> (t
)) > 0)
2564 /* OpenMP directives alter control flow. */
2567 case GIMPLE_TRANSACTION
:
2568 /* A transaction start alters control flow. */
2575 /* If a statement can throw, it alters control flow. */
2576 return stmt_can_throw_internal (t
);
2580 /* Return true if T is a simple local goto. */
2583 simple_goto_p (gimple
*t
)
2585 return (gimple_code (t
) == GIMPLE_GOTO
2586 && TREE_CODE (gimple_goto_dest (t
)) == LABEL_DECL
);
2590 /* Return true if STMT should start a new basic block. PREV_STMT is
2591 the statement preceding STMT. It is used when STMT is a label or a
2592 case label. Labels should only start a new basic block if their
2593 previous statement wasn't a label. Otherwise, sequence of labels
2594 would generate unnecessary basic blocks that only contain a single
2598 stmt_starts_bb_p (gimple
*stmt
, gimple
*prev_stmt
)
2603 /* Labels start a new basic block only if the preceding statement
2604 wasn't a label of the same type. This prevents the creation of
2605 consecutive blocks that have nothing but a single label. */
2606 if (glabel
*label_stmt
= dyn_cast
<glabel
*> (stmt
))
2608 /* Nonlocal and computed GOTO targets always start a new block. */
2609 if (DECL_NONLOCAL (gimple_label_label (label_stmt
))
2610 || FORCED_LABEL (gimple_label_label (label_stmt
)))
2613 if (prev_stmt
&& gimple_code (prev_stmt
) == GIMPLE_LABEL
)
2615 if (DECL_NONLOCAL (gimple_label_label (
2616 as_a
<glabel
*> (prev_stmt
))))
2619 cfg_stats
.num_merged_labels
++;
2625 else if (gimple_code (stmt
) == GIMPLE_CALL
)
2627 if (gimple_call_flags (stmt
) & ECF_RETURNS_TWICE
)
2628 /* setjmp acts similar to a nonlocal GOTO target and thus should
2629 start a new block. */
2631 if (gimple_call_internal_p (stmt
, IFN_PHI
)
2633 && gimple_code (prev_stmt
) != GIMPLE_LABEL
2634 && (gimple_code (prev_stmt
) != GIMPLE_CALL
2635 || ! gimple_call_internal_p (prev_stmt
, IFN_PHI
)))
2636 /* PHI nodes start a new block unless preceeded by a label
2645 /* Return true if T should end a basic block. */
2648 stmt_ends_bb_p (gimple
*t
)
2650 return is_ctrl_stmt (t
) || is_ctrl_altering_stmt (t
);
2653 /* Remove block annotations and other data structures. */
2656 delete_tree_cfg_annotations (struct function
*fn
)
2658 vec_free (label_to_block_map_for_fn (fn
));
2661 /* Return the virtual phi in BB. */
2664 get_virtual_phi (basic_block bb
)
2666 for (gphi_iterator gsi
= gsi_start_phis (bb
);
2670 gphi
*phi
= gsi
.phi ();
2672 if (virtual_operand_p (PHI_RESULT (phi
)))
2679 /* Return the first statement in basic block BB. */
2682 first_stmt (basic_block bb
)
2684 gimple_stmt_iterator i
= gsi_start_bb (bb
);
2685 gimple
*stmt
= NULL
;
2687 while (!gsi_end_p (i
) && is_gimple_debug ((stmt
= gsi_stmt (i
))))
2695 /* Return the first non-label statement in basic block BB. */
2698 first_non_label_stmt (basic_block bb
)
2700 gimple_stmt_iterator i
= gsi_start_bb (bb
);
2701 while (!gsi_end_p (i
) && gimple_code (gsi_stmt (i
)) == GIMPLE_LABEL
)
2703 return !gsi_end_p (i
) ? gsi_stmt (i
) : NULL
;
2706 /* Return the last statement in basic block BB. */
2709 last_stmt (basic_block bb
)
2711 gimple_stmt_iterator i
= gsi_last_bb (bb
);
2712 gimple
*stmt
= NULL
;
2714 while (!gsi_end_p (i
) && is_gimple_debug ((stmt
= gsi_stmt (i
))))
2722 /* Return the last statement of an otherwise empty block. Return NULL
2723 if the block is totally empty, or if it contains more than one
2727 last_and_only_stmt (basic_block bb
)
2729 gimple_stmt_iterator i
= gsi_last_nondebug_bb (bb
);
2730 gimple
*last
, *prev
;
2735 last
= gsi_stmt (i
);
2736 gsi_prev_nondebug (&i
);
2740 /* Empty statements should no longer appear in the instruction stream.
2741 Everything that might have appeared before should be deleted by
2742 remove_useless_stmts, and the optimizers should just gsi_remove
2743 instead of smashing with build_empty_stmt.
2745 Thus the only thing that should appear here in a block containing
2746 one executable statement is a label. */
2747 prev
= gsi_stmt (i
);
2748 if (gimple_code (prev
) == GIMPLE_LABEL
)
2754 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2757 reinstall_phi_args (edge new_edge
, edge old_edge
)
2763 vec
<edge_var_map
> *v
= redirect_edge_var_map_vector (old_edge
);
2767 for (i
= 0, phis
= gsi_start_phis (new_edge
->dest
);
2768 v
->iterate (i
, &vm
) && !gsi_end_p (phis
);
2769 i
++, gsi_next (&phis
))
2771 gphi
*phi
= phis
.phi ();
2772 tree result
= redirect_edge_var_map_result (vm
);
2773 tree arg
= redirect_edge_var_map_def (vm
);
2775 gcc_assert (result
== gimple_phi_result (phi
));
2777 add_phi_arg (phi
, arg
, new_edge
, redirect_edge_var_map_location (vm
));
2780 redirect_edge_var_map_clear (old_edge
);
2783 /* Returns the basic block after which the new basic block created
2784 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2785 near its "logical" location. This is of most help to humans looking
2786 at debugging dumps. */
2789 split_edge_bb_loc (edge edge_in
)
2791 basic_block dest
= edge_in
->dest
;
2792 basic_block dest_prev
= dest
->prev_bb
;
2796 edge e
= find_edge (dest_prev
, dest
);
2797 if (e
&& !(e
->flags
& EDGE_COMPLEX
))
2798 return edge_in
->src
;
2803 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2804 Abort on abnormal edges. */
2807 gimple_split_edge (edge edge_in
)
2809 basic_block new_bb
, after_bb
, dest
;
2812 /* Abnormal edges cannot be split. */
2813 gcc_assert (!(edge_in
->flags
& EDGE_ABNORMAL
));
2815 dest
= edge_in
->dest
;
2817 after_bb
= split_edge_bb_loc (edge_in
);
2819 new_bb
= create_empty_bb (after_bb
);
2820 new_bb
->frequency
= EDGE_FREQUENCY (edge_in
);
2821 new_bb
->count
= edge_in
->count
;
2822 new_edge
= make_edge (new_bb
, dest
, EDGE_FALLTHRU
);
2823 new_edge
->probability
= REG_BR_PROB_BASE
;
2824 new_edge
->count
= edge_in
->count
;
2826 e
= redirect_edge_and_branch (edge_in
, new_bb
);
2827 gcc_assert (e
== edge_in
);
2828 reinstall_phi_args (new_edge
, e
);
2834 /* Verify properties of the address expression T with base object BASE. */
2837 verify_address (tree t
, tree base
)
2840 bool old_side_effects
;
2842 bool new_side_effects
;
2844 old_constant
= TREE_CONSTANT (t
);
2845 old_side_effects
= TREE_SIDE_EFFECTS (t
);
2847 recompute_tree_invariant_for_addr_expr (t
);
2848 new_side_effects
= TREE_SIDE_EFFECTS (t
);
2849 new_constant
= TREE_CONSTANT (t
);
2851 if (old_constant
!= new_constant
)
2853 error ("constant not recomputed when ADDR_EXPR changed");
2856 if (old_side_effects
!= new_side_effects
)
2858 error ("side effects not recomputed when ADDR_EXPR changed");
2863 || TREE_CODE (base
) == PARM_DECL
2864 || TREE_CODE (base
) == RESULT_DECL
))
2867 if (DECL_GIMPLE_REG_P (base
))
2869 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2876 /* Callback for walk_tree, check that all elements with address taken are
2877 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2878 inside a PHI node. */
2881 verify_expr (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
2888 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2889 #define CHECK_OP(N, MSG) \
2890 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2891 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2893 switch (TREE_CODE (t
))
2896 if (SSA_NAME_IN_FREE_LIST (t
))
2898 error ("SSA name in freelist but still referenced");
2907 tree context
= decl_function_context (t
);
2908 if (context
!= cfun
->decl
2909 && !SCOPE_FILE_SCOPE_P (context
)
2911 && !DECL_EXTERNAL (t
))
2913 error ("Local declaration from a different function");
2920 error ("INDIRECT_REF in gimple IL");
2924 x
= TREE_OPERAND (t
, 0);
2925 if (!POINTER_TYPE_P (TREE_TYPE (x
))
2926 || !is_gimple_mem_ref_addr (x
))
2928 error ("invalid first operand of MEM_REF");
2931 if (TREE_CODE (TREE_OPERAND (t
, 1)) != INTEGER_CST
2932 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t
, 1))))
2934 error ("invalid offset operand of MEM_REF");
2935 return TREE_OPERAND (t
, 1);
2937 if (TREE_CODE (x
) == ADDR_EXPR
)
2939 tree va
= verify_address (x
, TREE_OPERAND (x
, 0));
2942 x
= TREE_OPERAND (x
, 0);
2944 walk_tree (&x
, verify_expr
, data
, NULL
);
2949 x
= fold (ASSERT_EXPR_COND (t
));
2950 if (x
== boolean_false_node
)
2952 error ("ASSERT_EXPR with an always-false condition");
2958 error ("MODIFY_EXPR not expected while having tuples");
2965 gcc_assert (is_gimple_address (t
));
2967 /* Skip any references (they will be checked when we recurse down the
2968 tree) and ensure that any variable used as a prefix is marked
2970 for (x
= TREE_OPERAND (t
, 0);
2971 handled_component_p (x
);
2972 x
= TREE_OPERAND (x
, 0))
2975 if ((tem
= verify_address (t
, x
)))
2979 || TREE_CODE (x
) == PARM_DECL
2980 || TREE_CODE (x
) == RESULT_DECL
))
2983 if (!TREE_ADDRESSABLE (x
))
2985 error ("address taken, but ADDRESSABLE bit not set");
2993 x
= COND_EXPR_COND (t
);
2994 if (!INTEGRAL_TYPE_P (TREE_TYPE (x
)))
2996 error ("non-integral used in condition");
2999 if (!is_gimple_condexpr (x
))
3001 error ("invalid conditional operand");
3006 case NON_LVALUE_EXPR
:
3007 case TRUTH_NOT_EXPR
:
3011 case FIX_TRUNC_EXPR
:
3016 CHECK_OP (0, "invalid operand to unary operator");
3022 if (!is_gimple_reg_type (TREE_TYPE (t
)))
3024 error ("non-scalar BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR");
3028 if (TREE_CODE (t
) == BIT_FIELD_REF
)
3030 tree t0
= TREE_OPERAND (t
, 0);
3031 tree t1
= TREE_OPERAND (t
, 1);
3032 tree t2
= TREE_OPERAND (t
, 2);
3033 if (!tree_fits_uhwi_p (t1
)
3034 || !tree_fits_uhwi_p (t2
))
3036 error ("invalid position or size operand to BIT_FIELD_REF");
3039 if (INTEGRAL_TYPE_P (TREE_TYPE (t
))
3040 && (TYPE_PRECISION (TREE_TYPE (t
))
3041 != tree_to_uhwi (t1
)))
3043 error ("integral result type precision does not match "
3044 "field size of BIT_FIELD_REF");
3047 else if (!INTEGRAL_TYPE_P (TREE_TYPE (t
))
3048 && TYPE_MODE (TREE_TYPE (t
)) != BLKmode
3049 && (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (t
)))
3050 != tree_to_uhwi (t1
)))
3052 error ("mode size of non-integral result does not "
3053 "match field size of BIT_FIELD_REF");
3056 if (!AGGREGATE_TYPE_P (TREE_TYPE (t0
))
3057 && (tree_to_uhwi (t1
) + tree_to_uhwi (t2
)
3058 > tree_to_uhwi (TYPE_SIZE (TREE_TYPE (t0
)))))
3060 error ("position plus size exceeds size of referenced object in "
3065 t
= TREE_OPERAND (t
, 0);
3070 case ARRAY_RANGE_REF
:
3071 case VIEW_CONVERT_EXPR
:
3072 /* We have a nest of references. Verify that each of the operands
3073 that determine where to reference is either a constant or a variable,
3074 verify that the base is valid, and then show we've already checked
3076 while (handled_component_p (t
))
3078 if (TREE_CODE (t
) == COMPONENT_REF
&& TREE_OPERAND (t
, 2))
3079 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
3080 else if (TREE_CODE (t
) == ARRAY_REF
3081 || TREE_CODE (t
) == ARRAY_RANGE_REF
)
3083 CHECK_OP (1, "invalid array index");
3084 if (TREE_OPERAND (t
, 2))
3085 CHECK_OP (2, "invalid array lower bound");
3086 if (TREE_OPERAND (t
, 3))
3087 CHECK_OP (3, "invalid array stride");
3089 else if (TREE_CODE (t
) == BIT_FIELD_REF
3090 || TREE_CODE (t
) == REALPART_EXPR
3091 || TREE_CODE (t
) == IMAGPART_EXPR
)
3093 error ("non-top-level BIT_FIELD_REF, IMAGPART_EXPR or "
3098 t
= TREE_OPERAND (t
, 0);
3101 if (!is_gimple_min_invariant (t
) && !is_gimple_lvalue (t
))
3103 error ("invalid reference prefix");
3106 walk_tree (&t
, verify_expr
, data
, NULL
);
3111 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
3112 POINTER_PLUS_EXPR. */
3113 if (POINTER_TYPE_P (TREE_TYPE (t
)))
3115 error ("invalid operand to plus/minus, type is a pointer");
3118 CHECK_OP (0, "invalid operand to binary operator");
3119 CHECK_OP (1, "invalid operand to binary operator");
3122 case POINTER_PLUS_EXPR
:
3123 /* Check to make sure the first operand is a pointer or reference type. */
3124 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t
, 0))))
3126 error ("invalid operand to pointer plus, first operand is not a pointer");
3129 /* Check to make sure the second operand is a ptrofftype. */
3130 if (!ptrofftype_p (TREE_TYPE (TREE_OPERAND (t
, 1))))
3132 error ("invalid operand to pointer plus, second operand is not an "
3133 "integer type of appropriate width");
3143 case UNORDERED_EXPR
:
3152 case TRUNC_DIV_EXPR
:
3154 case FLOOR_DIV_EXPR
:
3155 case ROUND_DIV_EXPR
:
3156 case TRUNC_MOD_EXPR
:
3158 case FLOOR_MOD_EXPR
:
3159 case ROUND_MOD_EXPR
:
3161 case EXACT_DIV_EXPR
:
3171 CHECK_OP (0, "invalid operand to binary operator");
3172 CHECK_OP (1, "invalid operand to binary operator");
3176 if (TREE_CONSTANT (t
) && TREE_CODE (TREE_TYPE (t
)) == VECTOR_TYPE
)
3180 case CASE_LABEL_EXPR
:
3183 error ("invalid CASE_CHAIN");
3197 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
3198 Returns true if there is an error, otherwise false. */
3201 verify_types_in_gimple_min_lval (tree expr
)
3205 if (is_gimple_id (expr
))
3208 if (TREE_CODE (expr
) != TARGET_MEM_REF
3209 && TREE_CODE (expr
) != MEM_REF
)
3211 error ("invalid expression for min lvalue");
3215 /* TARGET_MEM_REFs are strange beasts. */
3216 if (TREE_CODE (expr
) == TARGET_MEM_REF
)
3219 op
= TREE_OPERAND (expr
, 0);
3220 if (!is_gimple_val (op
))
3222 error ("invalid operand in indirect reference");
3223 debug_generic_stmt (op
);
3226 /* Memory references now generally can involve a value conversion. */
3231 /* Verify if EXPR is a valid GIMPLE reference expression. If
3232 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
3233 if there is an error, otherwise false. */
3236 verify_types_in_gimple_reference (tree expr
, bool require_lvalue
)
3238 while (handled_component_p (expr
))
3240 tree op
= TREE_OPERAND (expr
, 0);
3242 if (TREE_CODE (expr
) == ARRAY_REF
3243 || TREE_CODE (expr
) == ARRAY_RANGE_REF
)
3245 if (!is_gimple_val (TREE_OPERAND (expr
, 1))
3246 || (TREE_OPERAND (expr
, 2)
3247 && !is_gimple_val (TREE_OPERAND (expr
, 2)))
3248 || (TREE_OPERAND (expr
, 3)
3249 && !is_gimple_val (TREE_OPERAND (expr
, 3))))
3251 error ("invalid operands to array reference");
3252 debug_generic_stmt (expr
);
3257 /* Verify if the reference array element types are compatible. */
3258 if (TREE_CODE (expr
) == ARRAY_REF
3259 && !useless_type_conversion_p (TREE_TYPE (expr
),
3260 TREE_TYPE (TREE_TYPE (op
))))
3262 error ("type mismatch in array reference");
3263 debug_generic_stmt (TREE_TYPE (expr
));
3264 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op
)));
3267 if (TREE_CODE (expr
) == ARRAY_RANGE_REF
3268 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr
)),
3269 TREE_TYPE (TREE_TYPE (op
))))
3271 error ("type mismatch in array range reference");
3272 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr
)));
3273 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op
)));
3277 if ((TREE_CODE (expr
) == REALPART_EXPR
3278 || TREE_CODE (expr
) == IMAGPART_EXPR
)
3279 && !useless_type_conversion_p (TREE_TYPE (expr
),
3280 TREE_TYPE (TREE_TYPE (op
))))
3282 error ("type mismatch in real/imagpart reference");
3283 debug_generic_stmt (TREE_TYPE (expr
));
3284 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op
)));
3288 if (TREE_CODE (expr
) == COMPONENT_REF
3289 && !useless_type_conversion_p (TREE_TYPE (expr
),
3290 TREE_TYPE (TREE_OPERAND (expr
, 1))))
3292 error ("type mismatch in component reference");
3293 debug_generic_stmt (TREE_TYPE (expr
));
3294 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr
, 1)));
3298 if (TREE_CODE (expr
) == VIEW_CONVERT_EXPR
)
3300 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
3301 that their operand is not an SSA name or an invariant when
3302 requiring an lvalue (this usually means there is a SRA or IPA-SRA
3303 bug). Otherwise there is nothing to verify, gross mismatches at
3304 most invoke undefined behavior. */
3306 && (TREE_CODE (op
) == SSA_NAME
3307 || is_gimple_min_invariant (op
)))
3309 error ("conversion of an SSA_NAME on the left hand side");
3310 debug_generic_stmt (expr
);
3313 else if (TREE_CODE (op
) == SSA_NAME
3314 && TYPE_SIZE (TREE_TYPE (expr
)) != TYPE_SIZE (TREE_TYPE (op
)))
3316 error ("conversion of register to a different size");
3317 debug_generic_stmt (expr
);
3320 else if (!handled_component_p (op
))
3327 if (TREE_CODE (expr
) == MEM_REF
)
3329 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr
, 0)))
3331 error ("invalid address operand in MEM_REF");
3332 debug_generic_stmt (expr
);
3335 if (TREE_CODE (TREE_OPERAND (expr
, 1)) != INTEGER_CST
3336 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr
, 1))))
3338 error ("invalid offset operand in MEM_REF");
3339 debug_generic_stmt (expr
);
3343 else if (TREE_CODE (expr
) == TARGET_MEM_REF
)
3345 if (!TMR_BASE (expr
)
3346 || !is_gimple_mem_ref_addr (TMR_BASE (expr
)))
3348 error ("invalid address operand in TARGET_MEM_REF");
3351 if (!TMR_OFFSET (expr
)
3352 || TREE_CODE (TMR_OFFSET (expr
)) != INTEGER_CST
3353 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr
))))
3355 error ("invalid offset operand in TARGET_MEM_REF");
3356 debug_generic_stmt (expr
);
3361 return ((require_lvalue
|| !is_gimple_min_invariant (expr
))
3362 && verify_types_in_gimple_min_lval (expr
));
3365 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3366 list of pointer-to types that is trivially convertible to DEST. */
3369 one_pointer_to_useless_type_conversion_p (tree dest
, tree src_obj
)
3373 if (!TYPE_POINTER_TO (src_obj
))
3376 for (src
= TYPE_POINTER_TO (src_obj
); src
; src
= TYPE_NEXT_PTR_TO (src
))
3377 if (useless_type_conversion_p (dest
, src
))
3383 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3384 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3387 valid_fixed_convert_types_p (tree type1
, tree type2
)
3389 return (FIXED_POINT_TYPE_P (type1
)
3390 && (INTEGRAL_TYPE_P (type2
)
3391 || SCALAR_FLOAT_TYPE_P (type2
)
3392 || FIXED_POINT_TYPE_P (type2
)));
3395 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3396 is a problem, otherwise false. */
3399 verify_gimple_call (gcall
*stmt
)
3401 tree fn
= gimple_call_fn (stmt
);
3402 tree fntype
, fndecl
;
3405 if (gimple_call_internal_p (stmt
))
3409 error ("gimple call has two targets");
3410 debug_generic_stmt (fn
);
3413 /* FIXME : for passing label as arg in internal fn PHI from GIMPLE FE*/
3414 else if (gimple_call_internal_fn (stmt
) == IFN_PHI
)
3423 error ("gimple call has no target");
3428 if (fn
&& !is_gimple_call_addr (fn
))
3430 error ("invalid function in gimple call");
3431 debug_generic_stmt (fn
);
3436 && (!POINTER_TYPE_P (TREE_TYPE (fn
))
3437 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn
))) != FUNCTION_TYPE
3438 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn
))) != METHOD_TYPE
)))
3440 error ("non-function in gimple call");
3444 fndecl
= gimple_call_fndecl (stmt
);
3446 && TREE_CODE (fndecl
) == FUNCTION_DECL
3447 && DECL_LOOPING_CONST_OR_PURE_P (fndecl
)
3448 && !DECL_PURE_P (fndecl
)
3449 && !TREE_READONLY (fndecl
))
3451 error ("invalid pure const state for function");
3455 tree lhs
= gimple_call_lhs (stmt
);
3457 && (!is_gimple_lvalue (lhs
)
3458 || verify_types_in_gimple_reference (lhs
, true)))
3460 error ("invalid LHS in gimple call");
3464 if (gimple_call_ctrl_altering_p (stmt
)
3465 && gimple_call_noreturn_p (stmt
)
3466 && should_remove_lhs_p (lhs
))
3468 error ("LHS in noreturn call");
3472 fntype
= gimple_call_fntype (stmt
);
3475 && !useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (fntype
))
3476 /* ??? At least C++ misses conversions at assignments from
3477 void * call results.
3478 ??? Java is completely off. Especially with functions
3479 returning java.lang.Object.
3480 For now simply allow arbitrary pointer type conversions. */
3481 && !(POINTER_TYPE_P (TREE_TYPE (lhs
))
3482 && POINTER_TYPE_P (TREE_TYPE (fntype
))))
3484 error ("invalid conversion in gimple call");
3485 debug_generic_stmt (TREE_TYPE (lhs
));
3486 debug_generic_stmt (TREE_TYPE (fntype
));
3490 if (gimple_call_chain (stmt
)
3491 && !is_gimple_val (gimple_call_chain (stmt
)))
3493 error ("invalid static chain in gimple call");
3494 debug_generic_stmt (gimple_call_chain (stmt
));
3498 /* If there is a static chain argument, the call should either be
3499 indirect, or the decl should have DECL_STATIC_CHAIN set. */
3500 if (gimple_call_chain (stmt
)
3502 && !DECL_STATIC_CHAIN (fndecl
))
3504 error ("static chain with function that doesn%'t use one");
3508 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
3510 switch (DECL_FUNCTION_CODE (fndecl
))
3512 case BUILT_IN_UNREACHABLE
:
3514 if (gimple_call_num_args (stmt
) > 0)
3516 /* Built-in unreachable with parameters might not be caught by
3517 undefined behavior sanitizer. Front-ends do check users do not
3518 call them that way but we also produce calls to
3519 __builtin_unreachable internally, for example when IPA figures
3520 out a call cannot happen in a legal program. In such cases,
3521 we must make sure arguments are stripped off. */
3522 error ("__builtin_unreachable or __builtin_trap call with "
3532 /* ??? The C frontend passes unpromoted arguments in case it
3533 didn't see a function declaration before the call. So for now
3534 leave the call arguments mostly unverified. Once we gimplify
3535 unit-at-a-time we have a chance to fix this. */
3537 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
3539 tree arg
= gimple_call_arg (stmt
, i
);
3540 if ((is_gimple_reg_type (TREE_TYPE (arg
))
3541 && !is_gimple_val (arg
))
3542 || (!is_gimple_reg_type (TREE_TYPE (arg
))
3543 && !is_gimple_lvalue (arg
)))
3545 error ("invalid argument to gimple call");
3546 debug_generic_expr (arg
);
3554 /* Verifies the gimple comparison with the result type TYPE and
3555 the operands OP0 and OP1, comparison code is CODE. */
3558 verify_gimple_comparison (tree type
, tree op0
, tree op1
, enum tree_code code
)
3560 tree op0_type
= TREE_TYPE (op0
);
3561 tree op1_type
= TREE_TYPE (op1
);
3563 if (!is_gimple_val (op0
) || !is_gimple_val (op1
))
3565 error ("invalid operands in gimple comparison");
3569 /* For comparisons we do not have the operations type as the
3570 effective type the comparison is carried out in. Instead
3571 we require that either the first operand is trivially
3572 convertible into the second, or the other way around.
3573 Because we special-case pointers to void we allow
3574 comparisons of pointers with the same mode as well. */
3575 if (!useless_type_conversion_p (op0_type
, op1_type
)
3576 && !useless_type_conversion_p (op1_type
, op0_type
)
3577 && (!POINTER_TYPE_P (op0_type
)
3578 || !POINTER_TYPE_P (op1_type
)
3579 || TYPE_MODE (op0_type
) != TYPE_MODE (op1_type
)))
3581 error ("mismatching comparison operand types");
3582 debug_generic_expr (op0_type
);
3583 debug_generic_expr (op1_type
);
3587 /* The resulting type of a comparison may be an effective boolean type. */
3588 if (INTEGRAL_TYPE_P (type
)
3589 && (TREE_CODE (type
) == BOOLEAN_TYPE
3590 || TYPE_PRECISION (type
) == 1))
3592 if ((TREE_CODE (op0_type
) == VECTOR_TYPE
3593 || TREE_CODE (op1_type
) == VECTOR_TYPE
)
3594 && code
!= EQ_EXPR
&& code
!= NE_EXPR
3595 && !VECTOR_BOOLEAN_TYPE_P (op0_type
)
3596 && !VECTOR_INTEGER_TYPE_P (op0_type
))
3598 error ("unsupported operation or type for vector comparison"
3599 " returning a boolean");
3600 debug_generic_expr (op0_type
);
3601 debug_generic_expr (op1_type
);
3605 /* Or a boolean vector type with the same element count
3606 as the comparison operand types. */
3607 else if (TREE_CODE (type
) == VECTOR_TYPE
3608 && TREE_CODE (TREE_TYPE (type
)) == BOOLEAN_TYPE
)
3610 if (TREE_CODE (op0_type
) != VECTOR_TYPE
3611 || TREE_CODE (op1_type
) != VECTOR_TYPE
)
3613 error ("non-vector operands in vector comparison");
3614 debug_generic_expr (op0_type
);
3615 debug_generic_expr (op1_type
);
3619 if (TYPE_VECTOR_SUBPARTS (type
) != TYPE_VECTOR_SUBPARTS (op0_type
))
3621 error ("invalid vector comparison resulting type");
3622 debug_generic_expr (type
);
3628 error ("bogus comparison result type");
3629 debug_generic_expr (type
);
3636 /* Verify a gimple assignment statement STMT with an unary rhs.
3637 Returns true if anything is wrong. */
3640 verify_gimple_assign_unary (gassign
*stmt
)
3642 enum tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
3643 tree lhs
= gimple_assign_lhs (stmt
);
3644 tree lhs_type
= TREE_TYPE (lhs
);
3645 tree rhs1
= gimple_assign_rhs1 (stmt
);
3646 tree rhs1_type
= TREE_TYPE (rhs1
);
3648 if (!is_gimple_reg (lhs
))
3650 error ("non-register as LHS of unary operation");
3654 if (!is_gimple_val (rhs1
))
3656 error ("invalid operand in unary operation");
3660 /* First handle conversions. */
3665 /* Allow conversions from pointer type to integral type only if
3666 there is no sign or zero extension involved.
3667 For targets were the precision of ptrofftype doesn't match that
3668 of pointers we need to allow arbitrary conversions to ptrofftype. */
3669 if ((POINTER_TYPE_P (lhs_type
)
3670 && INTEGRAL_TYPE_P (rhs1_type
))
3671 || (POINTER_TYPE_P (rhs1_type
)
3672 && INTEGRAL_TYPE_P (lhs_type
)
3673 && (TYPE_PRECISION (rhs1_type
) >= TYPE_PRECISION (lhs_type
)
3674 || ptrofftype_p (sizetype
))))
3677 /* Allow conversion from integral to offset type and vice versa. */
3678 if ((TREE_CODE (lhs_type
) == OFFSET_TYPE
3679 && INTEGRAL_TYPE_P (rhs1_type
))
3680 || (INTEGRAL_TYPE_P (lhs_type
)
3681 && TREE_CODE (rhs1_type
) == OFFSET_TYPE
))
3684 /* Otherwise assert we are converting between types of the
3686 if (INTEGRAL_TYPE_P (lhs_type
) != INTEGRAL_TYPE_P (rhs1_type
))
3688 error ("invalid types in nop conversion");
3689 debug_generic_expr (lhs_type
);
3690 debug_generic_expr (rhs1_type
);
3697 case ADDR_SPACE_CONVERT_EXPR
:
3699 if (!POINTER_TYPE_P (rhs1_type
) || !POINTER_TYPE_P (lhs_type
)
3700 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type
))
3701 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type
))))
3703 error ("invalid types in address space conversion");
3704 debug_generic_expr (lhs_type
);
3705 debug_generic_expr (rhs1_type
);
3712 case FIXED_CONVERT_EXPR
:
3714 if (!valid_fixed_convert_types_p (lhs_type
, rhs1_type
)
3715 && !valid_fixed_convert_types_p (rhs1_type
, lhs_type
))
3717 error ("invalid types in fixed-point conversion");
3718 debug_generic_expr (lhs_type
);
3719 debug_generic_expr (rhs1_type
);
3728 if ((!INTEGRAL_TYPE_P (rhs1_type
) || !SCALAR_FLOAT_TYPE_P (lhs_type
))
3729 && (!VECTOR_INTEGER_TYPE_P (rhs1_type
)
3730 || !VECTOR_FLOAT_TYPE_P (lhs_type
)))
3732 error ("invalid types in conversion to floating point");
3733 debug_generic_expr (lhs_type
);
3734 debug_generic_expr (rhs1_type
);
3741 case FIX_TRUNC_EXPR
:
3743 if ((!INTEGRAL_TYPE_P (lhs_type
) || !SCALAR_FLOAT_TYPE_P (rhs1_type
))
3744 && (!VECTOR_INTEGER_TYPE_P (lhs_type
)
3745 || !VECTOR_FLOAT_TYPE_P (rhs1_type
)))
3747 error ("invalid types in conversion to integer");
3748 debug_generic_expr (lhs_type
);
3749 debug_generic_expr (rhs1_type
);
3755 case REDUC_MAX_EXPR
:
3756 case REDUC_MIN_EXPR
:
3757 case REDUC_PLUS_EXPR
:
3758 if (!VECTOR_TYPE_P (rhs1_type
)
3759 || !useless_type_conversion_p (lhs_type
, TREE_TYPE (rhs1_type
)))
3761 error ("reduction should convert from vector to element type");
3762 debug_generic_expr (lhs_type
);
3763 debug_generic_expr (rhs1_type
);
3768 case VEC_UNPACK_HI_EXPR
:
3769 case VEC_UNPACK_LO_EXPR
:
3770 case VEC_UNPACK_FLOAT_HI_EXPR
:
3771 case VEC_UNPACK_FLOAT_LO_EXPR
:
3786 /* For the remaining codes assert there is no conversion involved. */
3787 if (!useless_type_conversion_p (lhs_type
, rhs1_type
))
3789 error ("non-trivial conversion in unary operation");
3790 debug_generic_expr (lhs_type
);
3791 debug_generic_expr (rhs1_type
);
3798 /* Verify a gimple assignment statement STMT with a binary rhs.
3799 Returns true if anything is wrong. */
3802 verify_gimple_assign_binary (gassign
*stmt
)
3804 enum tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
3805 tree lhs
= gimple_assign_lhs (stmt
);
3806 tree lhs_type
= TREE_TYPE (lhs
);
3807 tree rhs1
= gimple_assign_rhs1 (stmt
);
3808 tree rhs1_type
= TREE_TYPE (rhs1
);
3809 tree rhs2
= gimple_assign_rhs2 (stmt
);
3810 tree rhs2_type
= TREE_TYPE (rhs2
);
3812 if (!is_gimple_reg (lhs
))
3814 error ("non-register as LHS of binary operation");
3818 if (!is_gimple_val (rhs1
)
3819 || !is_gimple_val (rhs2
))
3821 error ("invalid operands in binary operation");
3825 /* First handle operations that involve different types. */
3830 if (TREE_CODE (lhs_type
) != COMPLEX_TYPE
3831 || !(INTEGRAL_TYPE_P (rhs1_type
)
3832 || SCALAR_FLOAT_TYPE_P (rhs1_type
))
3833 || !(INTEGRAL_TYPE_P (rhs2_type
)
3834 || SCALAR_FLOAT_TYPE_P (rhs2_type
)))
3836 error ("type mismatch in complex expression");
3837 debug_generic_expr (lhs_type
);
3838 debug_generic_expr (rhs1_type
);
3839 debug_generic_expr (rhs2_type
);
3851 /* Shifts and rotates are ok on integral types, fixed point
3852 types and integer vector types. */
3853 if ((!INTEGRAL_TYPE_P (rhs1_type
)
3854 && !FIXED_POINT_TYPE_P (rhs1_type
)
3855 && !(TREE_CODE (rhs1_type
) == VECTOR_TYPE
3856 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type
))))
3857 || (!INTEGRAL_TYPE_P (rhs2_type
)
3858 /* Vector shifts of vectors are also ok. */
3859 && !(TREE_CODE (rhs1_type
) == VECTOR_TYPE
3860 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type
))
3861 && TREE_CODE (rhs2_type
) == VECTOR_TYPE
3862 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type
))))
3863 || !useless_type_conversion_p (lhs_type
, rhs1_type
))
3865 error ("type mismatch in shift expression");
3866 debug_generic_expr (lhs_type
);
3867 debug_generic_expr (rhs1_type
);
3868 debug_generic_expr (rhs2_type
);
3875 case WIDEN_LSHIFT_EXPR
:
3877 if (!INTEGRAL_TYPE_P (lhs_type
)
3878 || !INTEGRAL_TYPE_P (rhs1_type
)
3879 || TREE_CODE (rhs2
) != INTEGER_CST
3880 || (2 * TYPE_PRECISION (rhs1_type
) > TYPE_PRECISION (lhs_type
)))
3882 error ("type mismatch in widening vector shift expression");
3883 debug_generic_expr (lhs_type
);
3884 debug_generic_expr (rhs1_type
);
3885 debug_generic_expr (rhs2_type
);
3892 case VEC_WIDEN_LSHIFT_HI_EXPR
:
3893 case VEC_WIDEN_LSHIFT_LO_EXPR
:
3895 if (TREE_CODE (rhs1_type
) != VECTOR_TYPE
3896 || TREE_CODE (lhs_type
) != VECTOR_TYPE
3897 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type
))
3898 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type
))
3899 || TREE_CODE (rhs2
) != INTEGER_CST
3900 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type
))
3901 > TYPE_PRECISION (TREE_TYPE (lhs_type
))))
3903 error ("type mismatch in widening vector shift expression");
3904 debug_generic_expr (lhs_type
);
3905 debug_generic_expr (rhs1_type
);
3906 debug_generic_expr (rhs2_type
);
3916 tree lhs_etype
= lhs_type
;
3917 tree rhs1_etype
= rhs1_type
;
3918 tree rhs2_etype
= rhs2_type
;
3919 if (TREE_CODE (lhs_type
) == VECTOR_TYPE
)
3921 if (TREE_CODE (rhs1_type
) != VECTOR_TYPE
3922 || TREE_CODE (rhs2_type
) != VECTOR_TYPE
)
3924 error ("invalid non-vector operands to vector valued plus");
3927 lhs_etype
= TREE_TYPE (lhs_type
);
3928 rhs1_etype
= TREE_TYPE (rhs1_type
);
3929 rhs2_etype
= TREE_TYPE (rhs2_type
);
3931 if (POINTER_TYPE_P (lhs_etype
)
3932 || POINTER_TYPE_P (rhs1_etype
)
3933 || POINTER_TYPE_P (rhs2_etype
))
3935 error ("invalid (pointer) operands to plus/minus");
3939 /* Continue with generic binary expression handling. */
3943 case POINTER_PLUS_EXPR
:
3945 if (!POINTER_TYPE_P (rhs1_type
)
3946 || !useless_type_conversion_p (lhs_type
, rhs1_type
)
3947 || !ptrofftype_p (rhs2_type
))
3949 error ("type mismatch in pointer plus expression");
3950 debug_generic_stmt (lhs_type
);
3951 debug_generic_stmt (rhs1_type
);
3952 debug_generic_stmt (rhs2_type
);
3959 case TRUTH_ANDIF_EXPR
:
3960 case TRUTH_ORIF_EXPR
:
3961 case TRUTH_AND_EXPR
:
3963 case TRUTH_XOR_EXPR
:
3973 case UNORDERED_EXPR
:
3981 /* Comparisons are also binary, but the result type is not
3982 connected to the operand types. */
3983 return verify_gimple_comparison (lhs_type
, rhs1
, rhs2
, rhs_code
);
3985 case WIDEN_MULT_EXPR
:
3986 if (TREE_CODE (lhs_type
) != INTEGER_TYPE
)
3988 return ((2 * TYPE_PRECISION (rhs1_type
) > TYPE_PRECISION (lhs_type
))
3989 || (TYPE_PRECISION (rhs1_type
) != TYPE_PRECISION (rhs2_type
)));
3991 case WIDEN_SUM_EXPR
:
3992 case VEC_WIDEN_MULT_HI_EXPR
:
3993 case VEC_WIDEN_MULT_LO_EXPR
:
3994 case VEC_WIDEN_MULT_EVEN_EXPR
:
3995 case VEC_WIDEN_MULT_ODD_EXPR
:
3996 case VEC_PACK_TRUNC_EXPR
:
3997 case VEC_PACK_SAT_EXPR
:
3998 case VEC_PACK_FIX_TRUNC_EXPR
:
4003 case MULT_HIGHPART_EXPR
:
4004 case TRUNC_DIV_EXPR
:
4006 case FLOOR_DIV_EXPR
:
4007 case ROUND_DIV_EXPR
:
4008 case TRUNC_MOD_EXPR
:
4010 case FLOOR_MOD_EXPR
:
4011 case ROUND_MOD_EXPR
:
4013 case EXACT_DIV_EXPR
:
4019 /* Continue with generic binary expression handling. */
4026 if (!useless_type_conversion_p (lhs_type
, rhs1_type
)
4027 || !useless_type_conversion_p (lhs_type
, rhs2_type
))
4029 error ("type mismatch in binary expression");
4030 debug_generic_stmt (lhs_type
);
4031 debug_generic_stmt (rhs1_type
);
4032 debug_generic_stmt (rhs2_type
);
4039 /* Verify a gimple assignment statement STMT with a ternary rhs.
4040 Returns true if anything is wrong. */
4043 verify_gimple_assign_ternary (gassign
*stmt
)
4045 enum tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
4046 tree lhs
= gimple_assign_lhs (stmt
);
4047 tree lhs_type
= TREE_TYPE (lhs
);
4048 tree rhs1
= gimple_assign_rhs1 (stmt
);
4049 tree rhs1_type
= TREE_TYPE (rhs1
);
4050 tree rhs2
= gimple_assign_rhs2 (stmt
);
4051 tree rhs2_type
= TREE_TYPE (rhs2
);
4052 tree rhs3
= gimple_assign_rhs3 (stmt
);
4053 tree rhs3_type
= TREE_TYPE (rhs3
);
4055 if (!is_gimple_reg (lhs
))
4057 error ("non-register as LHS of ternary operation");
4061 if (((rhs_code
== VEC_COND_EXPR
|| rhs_code
== COND_EXPR
)
4062 ? !is_gimple_condexpr (rhs1
) : !is_gimple_val (rhs1
))
4063 || !is_gimple_val (rhs2
)
4064 || !is_gimple_val (rhs3
))
4066 error ("invalid operands in ternary operation");
4070 /* First handle operations that involve different types. */
4073 case WIDEN_MULT_PLUS_EXPR
:
4074 case WIDEN_MULT_MINUS_EXPR
:
4075 if ((!INTEGRAL_TYPE_P (rhs1_type
)
4076 && !FIXED_POINT_TYPE_P (rhs1_type
))
4077 || !useless_type_conversion_p (rhs1_type
, rhs2_type
)
4078 || !useless_type_conversion_p (lhs_type
, rhs3_type
)
4079 || 2 * TYPE_PRECISION (rhs1_type
) > TYPE_PRECISION (lhs_type
)
4080 || TYPE_PRECISION (rhs1_type
) != TYPE_PRECISION (rhs2_type
))
4082 error ("type mismatch in widening multiply-accumulate expression");
4083 debug_generic_expr (lhs_type
);
4084 debug_generic_expr (rhs1_type
);
4085 debug_generic_expr (rhs2_type
);
4086 debug_generic_expr (rhs3_type
);
4092 if (!useless_type_conversion_p (lhs_type
, rhs1_type
)
4093 || !useless_type_conversion_p (lhs_type
, rhs2_type
)
4094 || !useless_type_conversion_p (lhs_type
, rhs3_type
))
4096 error ("type mismatch in fused multiply-add expression");
4097 debug_generic_expr (lhs_type
);
4098 debug_generic_expr (rhs1_type
);
4099 debug_generic_expr (rhs2_type
);
4100 debug_generic_expr (rhs3_type
);
4106 if (!VECTOR_BOOLEAN_TYPE_P (rhs1_type
)
4107 || TYPE_VECTOR_SUBPARTS (rhs1_type
)
4108 != TYPE_VECTOR_SUBPARTS (lhs_type
))
4110 error ("the first argument of a VEC_COND_EXPR must be of a "
4111 "boolean vector type of the same number of elements "
4113 debug_generic_expr (lhs_type
);
4114 debug_generic_expr (rhs1_type
);
4119 if (!useless_type_conversion_p (lhs_type
, rhs2_type
)
4120 || !useless_type_conversion_p (lhs_type
, rhs3_type
))
4122 error ("type mismatch in conditional expression");
4123 debug_generic_expr (lhs_type
);
4124 debug_generic_expr (rhs2_type
);
4125 debug_generic_expr (rhs3_type
);
4131 if (!useless_type_conversion_p (lhs_type
, rhs1_type
)
4132 || !useless_type_conversion_p (lhs_type
, rhs2_type
))
4134 error ("type mismatch in vector permute expression");
4135 debug_generic_expr (lhs_type
);
4136 debug_generic_expr (rhs1_type
);
4137 debug_generic_expr (rhs2_type
);
4138 debug_generic_expr (rhs3_type
);
4142 if (TREE_CODE (rhs1_type
) != VECTOR_TYPE
4143 || TREE_CODE (rhs2_type
) != VECTOR_TYPE
4144 || TREE_CODE (rhs3_type
) != VECTOR_TYPE
)
4146 error ("vector types expected in vector permute expression");
4147 debug_generic_expr (lhs_type
);
4148 debug_generic_expr (rhs1_type
);
4149 debug_generic_expr (rhs2_type
);
4150 debug_generic_expr (rhs3_type
);
4154 if (TYPE_VECTOR_SUBPARTS (rhs1_type
) != TYPE_VECTOR_SUBPARTS (rhs2_type
)
4155 || TYPE_VECTOR_SUBPARTS (rhs2_type
)
4156 != TYPE_VECTOR_SUBPARTS (rhs3_type
)
4157 || TYPE_VECTOR_SUBPARTS (rhs3_type
)
4158 != TYPE_VECTOR_SUBPARTS (lhs_type
))
4160 error ("vectors with different element number found "
4161 "in vector permute expression");
4162 debug_generic_expr (lhs_type
);
4163 debug_generic_expr (rhs1_type
);
4164 debug_generic_expr (rhs2_type
);
4165 debug_generic_expr (rhs3_type
);
4169 if (TREE_CODE (TREE_TYPE (rhs3_type
)) != INTEGER_TYPE
4170 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs3_type
)))
4171 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type
))))
4173 error ("invalid mask type in vector permute expression");
4174 debug_generic_expr (lhs_type
);
4175 debug_generic_expr (rhs1_type
);
4176 debug_generic_expr (rhs2_type
);
4177 debug_generic_expr (rhs3_type
);
4184 if (!useless_type_conversion_p (rhs1_type
, rhs2_type
)
4185 || !useless_type_conversion_p (lhs_type
, rhs3_type
)
4186 || 2 * GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type
)))
4187 > GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (lhs_type
))))
4189 error ("type mismatch in sad expression");
4190 debug_generic_expr (lhs_type
);
4191 debug_generic_expr (rhs1_type
);
4192 debug_generic_expr (rhs2_type
);
4193 debug_generic_expr (rhs3_type
);
4197 if (TREE_CODE (rhs1_type
) != VECTOR_TYPE
4198 || TREE_CODE (rhs2_type
) != VECTOR_TYPE
4199 || TREE_CODE (rhs3_type
) != VECTOR_TYPE
)
4201 error ("vector types expected in sad expression");
4202 debug_generic_expr (lhs_type
);
4203 debug_generic_expr (rhs1_type
);
4204 debug_generic_expr (rhs2_type
);
4205 debug_generic_expr (rhs3_type
);
4211 case BIT_INSERT_EXPR
:
4212 if (! useless_type_conversion_p (lhs_type
, rhs1_type
))
4214 error ("type mismatch in BIT_INSERT_EXPR");
4215 debug_generic_expr (lhs_type
);
4216 debug_generic_expr (rhs1_type
);
4219 if (! ((INTEGRAL_TYPE_P (rhs1_type
)
4220 && INTEGRAL_TYPE_P (rhs2_type
))
4221 || (VECTOR_TYPE_P (rhs1_type
)
4222 && types_compatible_p (TREE_TYPE (rhs1_type
), rhs2_type
))))
4224 error ("not allowed type combination in BIT_INSERT_EXPR");
4225 debug_generic_expr (rhs1_type
);
4226 debug_generic_expr (rhs2_type
);
4229 if (! tree_fits_uhwi_p (rhs3
)
4230 || ! tree_fits_uhwi_p (TYPE_SIZE (rhs2_type
)))
4232 error ("invalid position or size in BIT_INSERT_EXPR");
4235 if (INTEGRAL_TYPE_P (rhs1_type
))
4237 unsigned HOST_WIDE_INT bitpos
= tree_to_uhwi (rhs3
);
4238 if (bitpos
>= TYPE_PRECISION (rhs1_type
)
4239 || (bitpos
+ TYPE_PRECISION (rhs2_type
)
4240 > TYPE_PRECISION (rhs1_type
)))
4242 error ("insertion out of range in BIT_INSERT_EXPR");
4246 else if (VECTOR_TYPE_P (rhs1_type
))
4248 unsigned HOST_WIDE_INT bitpos
= tree_to_uhwi (rhs3
);
4249 unsigned HOST_WIDE_INT bitsize
= tree_to_uhwi (TYPE_SIZE (rhs2_type
));
4250 if (bitpos
% bitsize
!= 0)
4252 error ("vector insertion not at element boundary");
4259 case REALIGN_LOAD_EXPR
:
4269 /* Verify a gimple assignment statement STMT with a single rhs.
4270 Returns true if anything is wrong. */
4273 verify_gimple_assign_single (gassign
*stmt
)
4275 enum tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
4276 tree lhs
= gimple_assign_lhs (stmt
);
4277 tree lhs_type
= TREE_TYPE (lhs
);
4278 tree rhs1
= gimple_assign_rhs1 (stmt
);
4279 tree rhs1_type
= TREE_TYPE (rhs1
);
4282 if (!useless_type_conversion_p (lhs_type
, rhs1_type
))
4284 error ("non-trivial conversion at assignment");
4285 debug_generic_expr (lhs_type
);
4286 debug_generic_expr (rhs1_type
);
4290 if (gimple_clobber_p (stmt
)
4291 && !(DECL_P (lhs
) || TREE_CODE (lhs
) == MEM_REF
))
4293 error ("non-decl/MEM_REF LHS in clobber statement");
4294 debug_generic_expr (lhs
);
4298 if (handled_component_p (lhs
)
4299 || TREE_CODE (lhs
) == MEM_REF
4300 || TREE_CODE (lhs
) == TARGET_MEM_REF
)
4301 res
|= verify_types_in_gimple_reference (lhs
, true);
4303 /* Special codes we cannot handle via their class. */
4308 tree op
= TREE_OPERAND (rhs1
, 0);
4309 if (!is_gimple_addressable (op
))
4311 error ("invalid operand in unary expression");
4315 /* Technically there is no longer a need for matching types, but
4316 gimple hygiene asks for this check. In LTO we can end up
4317 combining incompatible units and thus end up with addresses
4318 of globals that change their type to a common one. */
4320 && !types_compatible_p (TREE_TYPE (op
),
4321 TREE_TYPE (TREE_TYPE (rhs1
)))
4322 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1
),
4325 error ("type mismatch in address expression");
4326 debug_generic_stmt (TREE_TYPE (rhs1
));
4327 debug_generic_stmt (TREE_TYPE (op
));
4331 return verify_types_in_gimple_reference (op
, true);
4336 error ("INDIRECT_REF in gimple IL");
4342 case ARRAY_RANGE_REF
:
4343 case VIEW_CONVERT_EXPR
:
4346 case TARGET_MEM_REF
:
4348 if (!is_gimple_reg (lhs
)
4349 && is_gimple_reg_type (TREE_TYPE (lhs
)))
4351 error ("invalid rhs for gimple memory store");
4352 debug_generic_stmt (lhs
);
4353 debug_generic_stmt (rhs1
);
4356 return res
|| verify_types_in_gimple_reference (rhs1
, false);
4368 /* tcc_declaration */
4373 if (!is_gimple_reg (lhs
)
4374 && !is_gimple_reg (rhs1
)
4375 && is_gimple_reg_type (TREE_TYPE (lhs
)))
4377 error ("invalid rhs for gimple memory store");
4378 debug_generic_stmt (lhs
);
4379 debug_generic_stmt (rhs1
);
4385 if (TREE_CODE (rhs1_type
) == VECTOR_TYPE
)
4388 tree elt_i
, elt_v
, elt_t
= NULL_TREE
;
4390 if (CONSTRUCTOR_NELTS (rhs1
) == 0)
4392 /* For vector CONSTRUCTORs we require that either it is empty
4393 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
4394 (then the element count must be correct to cover the whole
4395 outer vector and index must be NULL on all elements, or it is
4396 a CONSTRUCTOR of scalar elements, where we as an exception allow
4397 smaller number of elements (assuming zero filling) and
4398 consecutive indexes as compared to NULL indexes (such
4399 CONSTRUCTORs can appear in the IL from FEs). */
4400 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1
), i
, elt_i
, elt_v
)
4402 if (elt_t
== NULL_TREE
)
4404 elt_t
= TREE_TYPE (elt_v
);
4405 if (TREE_CODE (elt_t
) == VECTOR_TYPE
)
4407 tree elt_t
= TREE_TYPE (elt_v
);
4408 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type
),
4411 error ("incorrect type of vector CONSTRUCTOR"
4413 debug_generic_stmt (rhs1
);
4416 else if (CONSTRUCTOR_NELTS (rhs1
)
4417 * TYPE_VECTOR_SUBPARTS (elt_t
)
4418 != TYPE_VECTOR_SUBPARTS (rhs1_type
))
4420 error ("incorrect number of vector CONSTRUCTOR"
4422 debug_generic_stmt (rhs1
);
4426 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type
),
4429 error ("incorrect type of vector CONSTRUCTOR elements");
4430 debug_generic_stmt (rhs1
);
4433 else if (CONSTRUCTOR_NELTS (rhs1
)
4434 > TYPE_VECTOR_SUBPARTS (rhs1_type
))
4436 error ("incorrect number of vector CONSTRUCTOR elements");
4437 debug_generic_stmt (rhs1
);
4441 else if (!useless_type_conversion_p (elt_t
, TREE_TYPE (elt_v
)))
4443 error ("incorrect type of vector CONSTRUCTOR elements");
4444 debug_generic_stmt (rhs1
);
4447 if (elt_i
!= NULL_TREE
4448 && (TREE_CODE (elt_t
) == VECTOR_TYPE
4449 || TREE_CODE (elt_i
) != INTEGER_CST
4450 || compare_tree_int (elt_i
, i
) != 0))
4452 error ("vector CONSTRUCTOR with non-NULL element index");
4453 debug_generic_stmt (rhs1
);
4456 if (!is_gimple_val (elt_v
))
4458 error ("vector CONSTRUCTOR element is not a GIMPLE value");
4459 debug_generic_stmt (rhs1
);
4464 else if (CONSTRUCTOR_NELTS (rhs1
) != 0)
4466 error ("non-vector CONSTRUCTOR with elements");
4467 debug_generic_stmt (rhs1
);
4473 case WITH_SIZE_EXPR
:
4483 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4484 is a problem, otherwise false. */
4487 verify_gimple_assign (gassign
*stmt
)
4489 switch (gimple_assign_rhs_class (stmt
))
4491 case GIMPLE_SINGLE_RHS
:
4492 return verify_gimple_assign_single (stmt
);
4494 case GIMPLE_UNARY_RHS
:
4495 return verify_gimple_assign_unary (stmt
);
4497 case GIMPLE_BINARY_RHS
:
4498 return verify_gimple_assign_binary (stmt
);
4500 case GIMPLE_TERNARY_RHS
:
4501 return verify_gimple_assign_ternary (stmt
);
4508 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4509 is a problem, otherwise false. */
4512 verify_gimple_return (greturn
*stmt
)
4514 tree op
= gimple_return_retval (stmt
);
4515 tree restype
= TREE_TYPE (TREE_TYPE (cfun
->decl
));
4517 /* We cannot test for present return values as we do not fix up missing
4518 return values from the original source. */
4522 if (!is_gimple_val (op
)
4523 && TREE_CODE (op
) != RESULT_DECL
)
4525 error ("invalid operand in return statement");
4526 debug_generic_stmt (op
);
4530 if ((TREE_CODE (op
) == RESULT_DECL
4531 && DECL_BY_REFERENCE (op
))
4532 || (TREE_CODE (op
) == SSA_NAME
4533 && SSA_NAME_VAR (op
)
4534 && TREE_CODE (SSA_NAME_VAR (op
)) == RESULT_DECL
4535 && DECL_BY_REFERENCE (SSA_NAME_VAR (op
))))
4536 op
= TREE_TYPE (op
);
4538 if (!useless_type_conversion_p (restype
, TREE_TYPE (op
)))
4540 error ("invalid conversion in return statement");
4541 debug_generic_stmt (restype
);
4542 debug_generic_stmt (TREE_TYPE (op
));
4550 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4551 is a problem, otherwise false. */
4554 verify_gimple_goto (ggoto
*stmt
)
4556 tree dest
= gimple_goto_dest (stmt
);
4558 /* ??? We have two canonical forms of direct goto destinations, a
4559 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4560 if (TREE_CODE (dest
) != LABEL_DECL
4561 && (!is_gimple_val (dest
)
4562 || !POINTER_TYPE_P (TREE_TYPE (dest
))))
4564 error ("goto destination is neither a label nor a pointer");
4571 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4572 is a problem, otherwise false. */
4575 verify_gimple_switch (gswitch
*stmt
)
4578 tree elt
, prev_upper_bound
= NULL_TREE
;
4579 tree index_type
, elt_type
= NULL_TREE
;
4581 if (!is_gimple_val (gimple_switch_index (stmt
)))
4583 error ("invalid operand to switch statement");
4584 debug_generic_stmt (gimple_switch_index (stmt
));
4588 index_type
= TREE_TYPE (gimple_switch_index (stmt
));
4589 if (! INTEGRAL_TYPE_P (index_type
))
4591 error ("non-integral type switch statement");
4592 debug_generic_expr (index_type
);
4596 elt
= gimple_switch_label (stmt
, 0);
4597 if (CASE_LOW (elt
) != NULL_TREE
|| CASE_HIGH (elt
) != NULL_TREE
)
4599 error ("invalid default case label in switch statement");
4600 debug_generic_expr (elt
);
4604 n
= gimple_switch_num_labels (stmt
);
4605 for (i
= 1; i
< n
; i
++)
4607 elt
= gimple_switch_label (stmt
, i
);
4609 if (! CASE_LOW (elt
))
4611 error ("invalid case label in switch statement");
4612 debug_generic_expr (elt
);
4616 && ! tree_int_cst_lt (CASE_LOW (elt
), CASE_HIGH (elt
)))
4618 error ("invalid case range in switch statement");
4619 debug_generic_expr (elt
);
4625 if (TREE_TYPE (CASE_LOW (elt
)) != elt_type
4626 || (CASE_HIGH (elt
) && TREE_TYPE (CASE_HIGH (elt
)) != elt_type
))
4628 error ("type mismatch for case label in switch statement");
4629 debug_generic_expr (elt
);
4635 elt_type
= TREE_TYPE (CASE_LOW (elt
));
4636 if (TYPE_PRECISION (index_type
) < TYPE_PRECISION (elt_type
))
4638 error ("type precision mismatch in switch statement");
4643 if (prev_upper_bound
)
4645 if (! tree_int_cst_lt (prev_upper_bound
, CASE_LOW (elt
)))
4647 error ("case labels not sorted in switch statement");
4652 prev_upper_bound
= CASE_HIGH (elt
);
4653 if (! prev_upper_bound
)
4654 prev_upper_bound
= CASE_LOW (elt
);
4660 /* Verify a gimple debug statement STMT.
4661 Returns true if anything is wrong. */
4664 verify_gimple_debug (gimple
*stmt ATTRIBUTE_UNUSED
)
4666 /* There isn't much that could be wrong in a gimple debug stmt. A
4667 gimple debug bind stmt, for example, maps a tree, that's usually
4668 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4669 component or member of an aggregate type, to another tree, that
4670 can be an arbitrary expression. These stmts expand into debug
4671 insns, and are converted to debug notes by var-tracking.c. */
4675 /* Verify a gimple label statement STMT.
4676 Returns true if anything is wrong. */
4679 verify_gimple_label (glabel
*stmt
)
4681 tree decl
= gimple_label_label (stmt
);
4685 if (TREE_CODE (decl
) != LABEL_DECL
)
4687 if (!DECL_NONLOCAL (decl
) && !FORCED_LABEL (decl
)
4688 && DECL_CONTEXT (decl
) != current_function_decl
)
4690 error ("label's context is not the current function decl");
4694 uid
= LABEL_DECL_UID (decl
);
4697 || (*label_to_block_map_for_fn (cfun
))[uid
] != gimple_bb (stmt
)))
4699 error ("incorrect entry in label_to_block_map");
4703 uid
= EH_LANDING_PAD_NR (decl
);
4706 eh_landing_pad lp
= get_eh_landing_pad_from_number (uid
);
4707 if (decl
!= lp
->post_landing_pad
)
4709 error ("incorrect setting of landing pad number");
4717 /* Verify a gimple cond statement STMT.
4718 Returns true if anything is wrong. */
4721 verify_gimple_cond (gcond
*stmt
)
4723 if (TREE_CODE_CLASS (gimple_cond_code (stmt
)) != tcc_comparison
)
4725 error ("invalid comparison code in gimple cond");
4728 if (!(!gimple_cond_true_label (stmt
)
4729 || TREE_CODE (gimple_cond_true_label (stmt
)) == LABEL_DECL
)
4730 || !(!gimple_cond_false_label (stmt
)
4731 || TREE_CODE (gimple_cond_false_label (stmt
)) == LABEL_DECL
))
4733 error ("invalid labels in gimple cond");
4737 return verify_gimple_comparison (boolean_type_node
,
4738 gimple_cond_lhs (stmt
),
4739 gimple_cond_rhs (stmt
),
4740 gimple_cond_code (stmt
));
4743 /* Verify the GIMPLE statement STMT. Returns true if there is an
4744 error, otherwise false. */
4747 verify_gimple_stmt (gimple
*stmt
)
4749 switch (gimple_code (stmt
))
4752 return verify_gimple_assign (as_a
<gassign
*> (stmt
));
4755 return verify_gimple_label (as_a
<glabel
*> (stmt
));
4758 return verify_gimple_call (as_a
<gcall
*> (stmt
));
4761 return verify_gimple_cond (as_a
<gcond
*> (stmt
));
4764 return verify_gimple_goto (as_a
<ggoto
*> (stmt
));
4767 return verify_gimple_switch (as_a
<gswitch
*> (stmt
));
4770 return verify_gimple_return (as_a
<greturn
*> (stmt
));
4775 case GIMPLE_TRANSACTION
:
4776 return verify_gimple_transaction (as_a
<gtransaction
*> (stmt
));
4778 /* Tuples that do not have tree operands. */
4780 case GIMPLE_PREDICT
:
4782 case GIMPLE_EH_DISPATCH
:
4783 case GIMPLE_EH_MUST_NOT_THROW
:
4787 /* OpenMP directives are validated by the FE and never operated
4788 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4789 non-gimple expressions when the main index variable has had
4790 its address taken. This does not affect the loop itself
4791 because the header of an GIMPLE_OMP_FOR is merely used to determine
4792 how to setup the parallel iteration. */
4796 return verify_gimple_debug (stmt
);
4803 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4804 and false otherwise. */
4807 verify_gimple_phi (gimple
*phi
)
4811 tree phi_result
= gimple_phi_result (phi
);
4816 error ("invalid PHI result");
4820 virtual_p
= virtual_operand_p (phi_result
);
4821 if (TREE_CODE (phi_result
) != SSA_NAME
4823 && SSA_NAME_VAR (phi_result
) != gimple_vop (cfun
)))
4825 error ("invalid PHI result");
4829 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
4831 tree t
= gimple_phi_arg_def (phi
, i
);
4835 error ("missing PHI def");
4839 /* Addressable variables do have SSA_NAMEs but they
4840 are not considered gimple values. */
4841 else if ((TREE_CODE (t
) == SSA_NAME
4842 && virtual_p
!= virtual_operand_p (t
))
4844 && (TREE_CODE (t
) != SSA_NAME
4845 || SSA_NAME_VAR (t
) != gimple_vop (cfun
)))
4847 && !is_gimple_val (t
)))
4849 error ("invalid PHI argument");
4850 debug_generic_expr (t
);
4853 #ifdef ENABLE_TYPES_CHECKING
4854 if (!useless_type_conversion_p (TREE_TYPE (phi_result
), TREE_TYPE (t
)))
4856 error ("incompatible types in PHI argument %u", i
);
4857 debug_generic_stmt (TREE_TYPE (phi_result
));
4858 debug_generic_stmt (TREE_TYPE (t
));
4867 /* Verify the GIMPLE statements inside the sequence STMTS. */
4870 verify_gimple_in_seq_2 (gimple_seq stmts
)
4872 gimple_stmt_iterator ittr
;
4875 for (ittr
= gsi_start (stmts
); !gsi_end_p (ittr
); gsi_next (&ittr
))
4877 gimple
*stmt
= gsi_stmt (ittr
);
4879 switch (gimple_code (stmt
))
4882 err
|= verify_gimple_in_seq_2 (
4883 gimple_bind_body (as_a
<gbind
*> (stmt
)));
4887 err
|= verify_gimple_in_seq_2 (gimple_try_eval (stmt
));
4888 err
|= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt
));
4891 case GIMPLE_EH_FILTER
:
4892 err
|= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt
));
4895 case GIMPLE_EH_ELSE
:
4897 geh_else
*eh_else
= as_a
<geh_else
*> (stmt
);
4898 err
|= verify_gimple_in_seq_2 (gimple_eh_else_n_body (eh_else
));
4899 err
|= verify_gimple_in_seq_2 (gimple_eh_else_e_body (eh_else
));
4904 err
|= verify_gimple_in_seq_2 (gimple_catch_handler (
4905 as_a
<gcatch
*> (stmt
)));
4908 case GIMPLE_TRANSACTION
:
4909 err
|= verify_gimple_transaction (as_a
<gtransaction
*> (stmt
));
4914 bool err2
= verify_gimple_stmt (stmt
);
4916 debug_gimple_stmt (stmt
);
4925 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
4926 is a problem, otherwise false. */
4929 verify_gimple_transaction (gtransaction
*stmt
)
4933 lab
= gimple_transaction_label_norm (stmt
);
4934 if (lab
!= NULL
&& TREE_CODE (lab
) != LABEL_DECL
)
4936 lab
= gimple_transaction_label_uninst (stmt
);
4937 if (lab
!= NULL
&& TREE_CODE (lab
) != LABEL_DECL
)
4939 lab
= gimple_transaction_label_over (stmt
);
4940 if (lab
!= NULL
&& TREE_CODE (lab
) != LABEL_DECL
)
4943 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt
));
4947 /* Verify the GIMPLE statements inside the statement list STMTS. */
4950 verify_gimple_in_seq (gimple_seq stmts
)
4952 timevar_push (TV_TREE_STMT_VERIFY
);
4953 if (verify_gimple_in_seq_2 (stmts
))
4954 internal_error ("verify_gimple failed");
4955 timevar_pop (TV_TREE_STMT_VERIFY
);
4958 /* Return true when the T can be shared. */
4961 tree_node_can_be_shared (tree t
)
4963 if (IS_TYPE_OR_DECL_P (t
)
4964 || is_gimple_min_invariant (t
)
4965 || TREE_CODE (t
) == SSA_NAME
4966 || t
== error_mark_node
4967 || TREE_CODE (t
) == IDENTIFIER_NODE
)
4970 if (TREE_CODE (t
) == CASE_LABEL_EXPR
)
4979 /* Called via walk_tree. Verify tree sharing. */
4982 verify_node_sharing_1 (tree
*tp
, int *walk_subtrees
, void *data
)
4984 hash_set
<void *> *visited
= (hash_set
<void *> *) data
;
4986 if (tree_node_can_be_shared (*tp
))
4988 *walk_subtrees
= false;
4992 if (visited
->add (*tp
))
4998 /* Called via walk_gimple_stmt. Verify tree sharing. */
5001 verify_node_sharing (tree
*tp
, int *walk_subtrees
, void *data
)
5003 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
5004 return verify_node_sharing_1 (tp
, walk_subtrees
, wi
->info
);
5007 static bool eh_error_found
;
5009 verify_eh_throw_stmt_node (gimple
*const &stmt
, const int &,
5010 hash_set
<gimple
*> *visited
)
5012 if (!visited
->contains (stmt
))
5014 error ("dead STMT in EH table");
5015 debug_gimple_stmt (stmt
);
5016 eh_error_found
= true;
5021 /* Verify if the location LOCs block is in BLOCKS. */
5024 verify_location (hash_set
<tree
> *blocks
, location_t loc
)
5026 tree block
= LOCATION_BLOCK (loc
);
5027 if (block
!= NULL_TREE
5028 && !blocks
->contains (block
))
5030 error ("location references block not in block tree");
5033 if (block
!= NULL_TREE
)
5034 return verify_location (blocks
, BLOCK_SOURCE_LOCATION (block
));
5038 /* Called via walk_tree. Verify that expressions have no blocks. */
5041 verify_expr_no_block (tree
*tp
, int *walk_subtrees
, void *)
5045 *walk_subtrees
= false;
5049 location_t loc
= EXPR_LOCATION (*tp
);
5050 if (LOCATION_BLOCK (loc
) != NULL
)
5056 /* Called via walk_tree. Verify locations of expressions. */
5059 verify_expr_location_1 (tree
*tp
, int *walk_subtrees
, void *data
)
5061 hash_set
<tree
> *blocks
= (hash_set
<tree
> *) data
;
5063 if (VAR_P (*tp
) && DECL_HAS_DEBUG_EXPR_P (*tp
))
5065 tree t
= DECL_DEBUG_EXPR (*tp
);
5066 tree addr
= walk_tree (&t
, verify_expr_no_block
, NULL
, NULL
);
5071 || TREE_CODE (*tp
) == PARM_DECL
5072 || TREE_CODE (*tp
) == RESULT_DECL
)
5073 && DECL_HAS_VALUE_EXPR_P (*tp
))
5075 tree t
= DECL_VALUE_EXPR (*tp
);
5076 tree addr
= walk_tree (&t
, verify_expr_no_block
, NULL
, NULL
);
5083 *walk_subtrees
= false;
5087 location_t loc
= EXPR_LOCATION (*tp
);
5088 if (verify_location (blocks
, loc
))
5094 /* Called via walk_gimple_op. Verify locations of expressions. */
5097 verify_expr_location (tree
*tp
, int *walk_subtrees
, void *data
)
5099 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
5100 return verify_expr_location_1 (tp
, walk_subtrees
, wi
->info
);
5103 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
5106 collect_subblocks (hash_set
<tree
> *blocks
, tree block
)
5109 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= BLOCK_CHAIN (t
))
5112 collect_subblocks (blocks
, t
);
5116 /* Verify the GIMPLE statements in the CFG of FN. */
5119 verify_gimple_in_cfg (struct function
*fn
, bool verify_nothrow
)
5124 timevar_push (TV_TREE_STMT_VERIFY
);
5125 hash_set
<void *> visited
;
5126 hash_set
<gimple
*> visited_stmts
;
5128 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
5129 hash_set
<tree
> blocks
;
5130 if (DECL_INITIAL (fn
->decl
))
5132 blocks
.add (DECL_INITIAL (fn
->decl
));
5133 collect_subblocks (&blocks
, DECL_INITIAL (fn
->decl
));
5136 FOR_EACH_BB_FN (bb
, fn
)
5138 gimple_stmt_iterator gsi
;
5140 for (gphi_iterator gpi
= gsi_start_phis (bb
);
5144 gphi
*phi
= gpi
.phi ();
5148 visited_stmts
.add (phi
);
5150 if (gimple_bb (phi
) != bb
)
5152 error ("gimple_bb (phi) is set to a wrong basic block");
5156 err2
|= verify_gimple_phi (phi
);
5158 /* Only PHI arguments have locations. */
5159 if (gimple_location (phi
) != UNKNOWN_LOCATION
)
5161 error ("PHI node with location");
5165 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
5167 tree arg
= gimple_phi_arg_def (phi
, i
);
5168 tree addr
= walk_tree (&arg
, verify_node_sharing_1
,
5172 error ("incorrect sharing of tree nodes");
5173 debug_generic_expr (addr
);
5176 location_t loc
= gimple_phi_arg_location (phi
, i
);
5177 if (virtual_operand_p (gimple_phi_result (phi
))
5178 && loc
!= UNKNOWN_LOCATION
)
5180 error ("virtual PHI with argument locations");
5183 addr
= walk_tree (&arg
, verify_expr_location_1
, &blocks
, NULL
);
5186 debug_generic_expr (addr
);
5189 err2
|= verify_location (&blocks
, loc
);
5193 debug_gimple_stmt (phi
);
5197 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5199 gimple
*stmt
= gsi_stmt (gsi
);
5201 struct walk_stmt_info wi
;
5205 visited_stmts
.add (stmt
);
5207 if (gimple_bb (stmt
) != bb
)
5209 error ("gimple_bb (stmt) is set to a wrong basic block");
5213 err2
|= verify_gimple_stmt (stmt
);
5214 err2
|= verify_location (&blocks
, gimple_location (stmt
));
5216 memset (&wi
, 0, sizeof (wi
));
5217 wi
.info
= (void *) &visited
;
5218 addr
= walk_gimple_op (stmt
, verify_node_sharing
, &wi
);
5221 error ("incorrect sharing of tree nodes");
5222 debug_generic_expr (addr
);
5226 memset (&wi
, 0, sizeof (wi
));
5227 wi
.info
= (void *) &blocks
;
5228 addr
= walk_gimple_op (stmt
, verify_expr_location
, &wi
);
5231 debug_generic_expr (addr
);
5235 /* ??? Instead of not checking these stmts at all the walker
5236 should know its context via wi. */
5237 if (!is_gimple_debug (stmt
)
5238 && !is_gimple_omp (stmt
))
5240 memset (&wi
, 0, sizeof (wi
));
5241 addr
= walk_gimple_op (stmt
, verify_expr
, &wi
);
5244 debug_generic_expr (addr
);
5245 inform (gimple_location (stmt
), "in statement");
5250 /* If the statement is marked as part of an EH region, then it is
5251 expected that the statement could throw. Verify that when we
5252 have optimizations that simplify statements such that we prove
5253 that they cannot throw, that we update other data structures
5255 lp_nr
= lookup_stmt_eh_lp (stmt
);
5258 if (!stmt_could_throw_p (stmt
))
5262 error ("statement marked for throw, but doesn%'t");
5266 else if (!gsi_one_before_end_p (gsi
))
5268 error ("statement marked for throw in middle of block");
5274 debug_gimple_stmt (stmt
);
5279 eh_error_found
= false;
5280 hash_map
<gimple
*, int> *eh_table
= get_eh_throw_stmt_table (cfun
);
5282 eh_table
->traverse
<hash_set
<gimple
*> *, verify_eh_throw_stmt_node
>
5285 if (err
|| eh_error_found
)
5286 internal_error ("verify_gimple failed");
5288 verify_histograms ();
5289 timevar_pop (TV_TREE_STMT_VERIFY
);
5293 /* Verifies that the flow information is OK. */
5296 gimple_verify_flow_info (void)
5300 gimple_stmt_iterator gsi
;
5305 if (ENTRY_BLOCK_PTR_FOR_FN (cfun
)->il
.gimple
.seq
5306 || ENTRY_BLOCK_PTR_FOR_FN (cfun
)->il
.gimple
.phi_nodes
)
5308 error ("ENTRY_BLOCK has IL associated with it");
5312 if (EXIT_BLOCK_PTR_FOR_FN (cfun
)->il
.gimple
.seq
5313 || EXIT_BLOCK_PTR_FOR_FN (cfun
)->il
.gimple
.phi_nodes
)
5315 error ("EXIT_BLOCK has IL associated with it");
5319 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
)
5320 if (e
->flags
& EDGE_FALLTHRU
)
5322 error ("fallthru to exit from bb %d", e
->src
->index
);
5326 FOR_EACH_BB_FN (bb
, cfun
)
5328 bool found_ctrl_stmt
= false;
5332 /* Skip labels on the start of basic block. */
5333 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5336 gimple
*prev_stmt
= stmt
;
5338 stmt
= gsi_stmt (gsi
);
5340 if (gimple_code (stmt
) != GIMPLE_LABEL
)
5343 label
= gimple_label_label (as_a
<glabel
*> (stmt
));
5344 if (prev_stmt
&& DECL_NONLOCAL (label
))
5346 error ("nonlocal label ");
5347 print_generic_expr (stderr
, label
);
5348 fprintf (stderr
, " is not first in a sequence of labels in bb %d",
5353 if (prev_stmt
&& EH_LANDING_PAD_NR (label
) != 0)
5355 error ("EH landing pad label ");
5356 print_generic_expr (stderr
, label
);
5357 fprintf (stderr
, " is not first in a sequence of labels in bb %d",
5362 if (label_to_block (label
) != bb
)
5365 print_generic_expr (stderr
, label
);
5366 fprintf (stderr
, " to block does not match in bb %d",
5371 if (decl_function_context (label
) != current_function_decl
)
5374 print_generic_expr (stderr
, label
);
5375 fprintf (stderr
, " has incorrect context in bb %d",
5381 /* Verify that body of basic block BB is free of control flow. */
5382 for (; !gsi_end_p (gsi
); gsi_next (&gsi
))
5384 gimple
*stmt
= gsi_stmt (gsi
);
5386 if (found_ctrl_stmt
)
5388 error ("control flow in the middle of basic block %d",
5393 if (stmt_ends_bb_p (stmt
))
5394 found_ctrl_stmt
= true;
5396 if (glabel
*label_stmt
= dyn_cast
<glabel
*> (stmt
))
5399 print_generic_expr (stderr
, gimple_label_label (label_stmt
));
5400 fprintf (stderr
, " in the middle of basic block %d", bb
->index
);
5405 gsi
= gsi_last_bb (bb
);
5406 if (gsi_end_p (gsi
))
5409 stmt
= gsi_stmt (gsi
);
5411 if (gimple_code (stmt
) == GIMPLE_LABEL
)
5414 err
|= verify_eh_edges (stmt
);
5416 if (is_ctrl_stmt (stmt
))
5418 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5419 if (e
->flags
& EDGE_FALLTHRU
)
5421 error ("fallthru edge after a control statement in bb %d",
5427 if (gimple_code (stmt
) != GIMPLE_COND
)
5429 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
5430 after anything else but if statement. */
5431 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5432 if (e
->flags
& (EDGE_TRUE_VALUE
| EDGE_FALSE_VALUE
))
5434 error ("true/false edge after a non-GIMPLE_COND in bb %d",
5440 switch (gimple_code (stmt
))
5447 extract_true_false_edges_from_block (bb
, &true_edge
, &false_edge
);
5451 || !(true_edge
->flags
& EDGE_TRUE_VALUE
)
5452 || !(false_edge
->flags
& EDGE_FALSE_VALUE
)
5453 || (true_edge
->flags
& (EDGE_FALLTHRU
| EDGE_ABNORMAL
))
5454 || (false_edge
->flags
& (EDGE_FALLTHRU
| EDGE_ABNORMAL
))
5455 || EDGE_COUNT (bb
->succs
) >= 3)
5457 error ("wrong outgoing edge flags at end of bb %d",
5465 if (simple_goto_p (stmt
))
5467 error ("explicit goto at end of bb %d", bb
->index
);
5472 /* FIXME. We should double check that the labels in the
5473 destination blocks have their address taken. */
5474 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5475 if ((e
->flags
& (EDGE_FALLTHRU
| EDGE_TRUE_VALUE
5476 | EDGE_FALSE_VALUE
))
5477 || !(e
->flags
& EDGE_ABNORMAL
))
5479 error ("wrong outgoing edge flags at end of bb %d",
5487 if (!gimple_call_builtin_p (stmt
, BUILT_IN_RETURN
))
5491 if (!single_succ_p (bb
)
5492 || (single_succ_edge (bb
)->flags
5493 & (EDGE_FALLTHRU
| EDGE_ABNORMAL
5494 | EDGE_TRUE_VALUE
| EDGE_FALSE_VALUE
)))
5496 error ("wrong outgoing edge flags at end of bb %d", bb
->index
);
5499 if (single_succ (bb
) != EXIT_BLOCK_PTR_FOR_FN (cfun
))
5501 error ("return edge does not point to exit in bb %d",
5509 gswitch
*switch_stmt
= as_a
<gswitch
*> (stmt
);
5514 n
= gimple_switch_num_labels (switch_stmt
);
5516 /* Mark all the destination basic blocks. */
5517 for (i
= 0; i
< n
; ++i
)
5519 tree lab
= CASE_LABEL (gimple_switch_label (switch_stmt
, i
));
5520 basic_block label_bb
= label_to_block (lab
);
5521 gcc_assert (!label_bb
->aux
|| label_bb
->aux
== (void *)1);
5522 label_bb
->aux
= (void *)1;
5525 /* Verify that the case labels are sorted. */
5526 prev
= gimple_switch_label (switch_stmt
, 0);
5527 for (i
= 1; i
< n
; ++i
)
5529 tree c
= gimple_switch_label (switch_stmt
, i
);
5532 error ("found default case not at the start of "
5538 && !tree_int_cst_lt (CASE_LOW (prev
), CASE_LOW (c
)))
5540 error ("case labels not sorted: ");
5541 print_generic_expr (stderr
, prev
);
5542 fprintf (stderr
," is greater than ");
5543 print_generic_expr (stderr
, c
);
5544 fprintf (stderr
," but comes before it.\n");
5549 /* VRP will remove the default case if it can prove it will
5550 never be executed. So do not verify there always exists
5551 a default case here. */
5553 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5557 error ("extra outgoing edge %d->%d",
5558 bb
->index
, e
->dest
->index
);
5562 e
->dest
->aux
= (void *)2;
5563 if ((e
->flags
& (EDGE_FALLTHRU
| EDGE_ABNORMAL
5564 | EDGE_TRUE_VALUE
| EDGE_FALSE_VALUE
)))
5566 error ("wrong outgoing edge flags at end of bb %d",
5572 /* Check that we have all of them. */
5573 for (i
= 0; i
< n
; ++i
)
5575 tree lab
= CASE_LABEL (gimple_switch_label (switch_stmt
, i
));
5576 basic_block label_bb
= label_to_block (lab
);
5578 if (label_bb
->aux
!= (void *)2)
5580 error ("missing edge %i->%i", bb
->index
, label_bb
->index
);
5585 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5586 e
->dest
->aux
= (void *)0;
5590 case GIMPLE_EH_DISPATCH
:
5591 err
|= verify_eh_dispatch_edge (as_a
<geh_dispatch
*> (stmt
));
5599 if (dom_info_state (CDI_DOMINATORS
) >= DOM_NO_FAST_QUERY
)
5600 verify_dominators (CDI_DOMINATORS
);
5606 /* Updates phi nodes after creating a forwarder block joined
5607 by edge FALLTHRU. */
5610 gimple_make_forwarder_block (edge fallthru
)
5614 basic_block dummy
, bb
;
5618 dummy
= fallthru
->src
;
5619 bb
= fallthru
->dest
;
5621 if (single_pred_p (bb
))
5624 /* If we redirected a branch we must create new PHI nodes at the
5626 for (gsi
= gsi_start_phis (dummy
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5628 gphi
*phi
, *new_phi
;
5631 var
= gimple_phi_result (phi
);
5632 new_phi
= create_phi_node (var
, bb
);
5633 gimple_phi_set_result (phi
, copy_ssa_name (var
, phi
));
5634 add_phi_arg (new_phi
, gimple_phi_result (phi
), fallthru
,
5638 /* Add the arguments we have stored on edges. */
5639 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
5644 flush_pending_stmts (e
);
5649 /* Return a non-special label in the head of basic block BLOCK.
5650 Create one if it doesn't exist. */
5653 gimple_block_label (basic_block bb
)
5655 gimple_stmt_iterator i
, s
= gsi_start_bb (bb
);
5660 for (i
= s
; !gsi_end_p (i
); first
= false, gsi_next (&i
))
5662 stmt
= dyn_cast
<glabel
*> (gsi_stmt (i
));
5665 label
= gimple_label_label (stmt
);
5666 if (!DECL_NONLOCAL (label
))
5669 gsi_move_before (&i
, &s
);
5674 label
= create_artificial_label (UNKNOWN_LOCATION
);
5675 stmt
= gimple_build_label (label
);
5676 gsi_insert_before (&s
, stmt
, GSI_NEW_STMT
);
5681 /* Attempt to perform edge redirection by replacing a possibly complex
5682 jump instruction by a goto or by removing the jump completely.
5683 This can apply only if all edges now point to the same block. The
5684 parameters and return values are equivalent to
5685 redirect_edge_and_branch. */
5688 gimple_try_redirect_by_replacing_jump (edge e
, basic_block target
)
5690 basic_block src
= e
->src
;
5691 gimple_stmt_iterator i
;
5694 /* We can replace or remove a complex jump only when we have exactly
5696 if (EDGE_COUNT (src
->succs
) != 2
5697 /* Verify that all targets will be TARGET. Specifically, the
5698 edge that is not E must also go to TARGET. */
5699 || EDGE_SUCC (src
, EDGE_SUCC (src
, 0) == e
)->dest
!= target
)
5702 i
= gsi_last_bb (src
);
5706 stmt
= gsi_stmt (i
);
5708 if (gimple_code (stmt
) == GIMPLE_COND
|| gimple_code (stmt
) == GIMPLE_SWITCH
)
5710 gsi_remove (&i
, true);
5711 e
= ssa_redirect_edge (e
, target
);
5712 e
->flags
= EDGE_FALLTHRU
;
5720 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
5721 edge representing the redirected branch. */
5724 gimple_redirect_edge_and_branch (edge e
, basic_block dest
)
5726 basic_block bb
= e
->src
;
5727 gimple_stmt_iterator gsi
;
5731 if (e
->flags
& EDGE_ABNORMAL
)
5734 if (e
->dest
== dest
)
5737 if (e
->flags
& EDGE_EH
)
5738 return redirect_eh_edge (e
, dest
);
5740 if (e
->src
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
))
5742 ret
= gimple_try_redirect_by_replacing_jump (e
, dest
);
5747 gsi
= gsi_last_bb (bb
);
5748 stmt
= gsi_end_p (gsi
) ? NULL
: gsi_stmt (gsi
);
5750 switch (stmt
? gimple_code (stmt
) : GIMPLE_ERROR_MARK
)
5753 /* For COND_EXPR, we only need to redirect the edge. */
5757 /* No non-abnormal edges should lead from a non-simple goto, and
5758 simple ones should be represented implicitly. */
5763 gswitch
*switch_stmt
= as_a
<gswitch
*> (stmt
);
5764 tree label
= gimple_block_label (dest
);
5765 tree cases
= get_cases_for_edge (e
, switch_stmt
);
5767 /* If we have a list of cases associated with E, then use it
5768 as it's a lot faster than walking the entire case vector. */
5771 edge e2
= find_edge (e
->src
, dest
);
5778 CASE_LABEL (cases
) = label
;
5779 cases
= CASE_CHAIN (cases
);
5782 /* If there was already an edge in the CFG, then we need
5783 to move all the cases associated with E to E2. */
5786 tree cases2
= get_cases_for_edge (e2
, switch_stmt
);
5788 CASE_CHAIN (last
) = CASE_CHAIN (cases2
);
5789 CASE_CHAIN (cases2
) = first
;
5791 bitmap_set_bit (touched_switch_bbs
, gimple_bb (stmt
)->index
);
5795 size_t i
, n
= gimple_switch_num_labels (switch_stmt
);
5797 for (i
= 0; i
< n
; i
++)
5799 tree elt
= gimple_switch_label (switch_stmt
, i
);
5800 if (label_to_block (CASE_LABEL (elt
)) == e
->dest
)
5801 CASE_LABEL (elt
) = label
;
5809 gasm
*asm_stmt
= as_a
<gasm
*> (stmt
);
5810 int i
, n
= gimple_asm_nlabels (asm_stmt
);
5813 for (i
= 0; i
< n
; ++i
)
5815 tree cons
= gimple_asm_label_op (asm_stmt
, i
);
5816 if (label_to_block (TREE_VALUE (cons
)) == e
->dest
)
5819 label
= gimple_block_label (dest
);
5820 TREE_VALUE (cons
) = label
;
5824 /* If we didn't find any label matching the former edge in the
5825 asm labels, we must be redirecting the fallthrough
5827 gcc_assert (label
|| (e
->flags
& EDGE_FALLTHRU
));
5832 gsi_remove (&gsi
, true);
5833 e
->flags
|= EDGE_FALLTHRU
;
5836 case GIMPLE_OMP_RETURN
:
5837 case GIMPLE_OMP_CONTINUE
:
5838 case GIMPLE_OMP_SECTIONS_SWITCH
:
5839 case GIMPLE_OMP_FOR
:
5840 /* The edges from OMP constructs can be simply redirected. */
5843 case GIMPLE_EH_DISPATCH
:
5844 if (!(e
->flags
& EDGE_FALLTHRU
))
5845 redirect_eh_dispatch_edge (as_a
<geh_dispatch
*> (stmt
), e
, dest
);
5848 case GIMPLE_TRANSACTION
:
5849 if (e
->flags
& EDGE_TM_ABORT
)
5850 gimple_transaction_set_label_over (as_a
<gtransaction
*> (stmt
),
5851 gimple_block_label (dest
));
5852 else if (e
->flags
& EDGE_TM_UNINSTRUMENTED
)
5853 gimple_transaction_set_label_uninst (as_a
<gtransaction
*> (stmt
),
5854 gimple_block_label (dest
));
5856 gimple_transaction_set_label_norm (as_a
<gtransaction
*> (stmt
),
5857 gimple_block_label (dest
));
5861 /* Otherwise it must be a fallthru edge, and we don't need to
5862 do anything besides redirecting it. */
5863 gcc_assert (e
->flags
& EDGE_FALLTHRU
);
5867 /* Update/insert PHI nodes as necessary. */
5869 /* Now update the edges in the CFG. */
5870 e
= ssa_redirect_edge (e
, dest
);
5875 /* Returns true if it is possible to remove edge E by redirecting
5876 it to the destination of the other edge from E->src. */
5879 gimple_can_remove_branch_p (const_edge e
)
5881 if (e
->flags
& (EDGE_ABNORMAL
| EDGE_EH
))
5887 /* Simple wrapper, as we can always redirect fallthru edges. */
5890 gimple_redirect_edge_and_branch_force (edge e
, basic_block dest
)
5892 e
= gimple_redirect_edge_and_branch (e
, dest
);
5899 /* Splits basic block BB after statement STMT (but at least after the
5900 labels). If STMT is NULL, BB is split just after the labels. */
5903 gimple_split_block (basic_block bb
, void *stmt
)
5905 gimple_stmt_iterator gsi
;
5906 gimple_stmt_iterator gsi_tgt
;
5912 new_bb
= create_empty_bb (bb
);
5914 /* Redirect the outgoing edges. */
5915 new_bb
->succs
= bb
->succs
;
5917 FOR_EACH_EDGE (e
, ei
, new_bb
->succs
)
5920 /* Get a stmt iterator pointing to the first stmt to move. */
5921 if (!stmt
|| gimple_code ((gimple
*) stmt
) == GIMPLE_LABEL
)
5922 gsi
= gsi_after_labels (bb
);
5925 gsi
= gsi_for_stmt ((gimple
*) stmt
);
5929 /* Move everything from GSI to the new basic block. */
5930 if (gsi_end_p (gsi
))
5933 /* Split the statement list - avoid re-creating new containers as this
5934 brings ugly quadratic memory consumption in the inliner.
5935 (We are still quadratic since we need to update stmt BB pointers,
5937 gsi_split_seq_before (&gsi
, &list
);
5938 set_bb_seq (new_bb
, list
);
5939 for (gsi_tgt
= gsi_start (list
);
5940 !gsi_end_p (gsi_tgt
); gsi_next (&gsi_tgt
))
5941 gimple_set_bb (gsi_stmt (gsi_tgt
), new_bb
);
5947 /* Moves basic block BB after block AFTER. */
5950 gimple_move_block_after (basic_block bb
, basic_block after
)
5952 if (bb
->prev_bb
== after
)
5956 link_block (bb
, after
);
5962 /* Return TRUE if block BB has no executable statements, otherwise return
5966 gimple_empty_block_p (basic_block bb
)
5968 /* BB must have no executable statements. */
5969 gimple_stmt_iterator gsi
= gsi_after_labels (bb
);
5972 if (gsi_end_p (gsi
))
5974 if (is_gimple_debug (gsi_stmt (gsi
)))
5975 gsi_next_nondebug (&gsi
);
5976 return gsi_end_p (gsi
);
5980 /* Split a basic block if it ends with a conditional branch and if the
5981 other part of the block is not empty. */
5984 gimple_split_block_before_cond_jump (basic_block bb
)
5986 gimple
*last
, *split_point
;
5987 gimple_stmt_iterator gsi
= gsi_last_nondebug_bb (bb
);
5988 if (gsi_end_p (gsi
))
5990 last
= gsi_stmt (gsi
);
5991 if (gimple_code (last
) != GIMPLE_COND
5992 && gimple_code (last
) != GIMPLE_SWITCH
)
5995 split_point
= gsi_stmt (gsi
);
5996 return split_block (bb
, split_point
)->dest
;
6000 /* Return true if basic_block can be duplicated. */
6003 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED
)
6008 /* Create a duplicate of the basic block BB. NOTE: This does not
6009 preserve SSA form. */
6012 gimple_duplicate_bb (basic_block bb
)
6015 gimple_stmt_iterator gsi_tgt
;
6017 new_bb
= create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun
)->prev_bb
);
6019 /* Copy the PHI nodes. We ignore PHI node arguments here because
6020 the incoming edges have not been setup yet. */
6021 for (gphi_iterator gpi
= gsi_start_phis (bb
);
6027 copy
= create_phi_node (NULL_TREE
, new_bb
);
6028 create_new_def_for (gimple_phi_result (phi
), copy
,
6029 gimple_phi_result_ptr (copy
));
6030 gimple_set_uid (copy
, gimple_uid (phi
));
6033 gsi_tgt
= gsi_start_bb (new_bb
);
6034 for (gimple_stmt_iterator gsi
= gsi_start_bb (bb
);
6038 def_operand_p def_p
;
6039 ssa_op_iter op_iter
;
6041 gimple
*stmt
, *copy
;
6043 stmt
= gsi_stmt (gsi
);
6044 if (gimple_code (stmt
) == GIMPLE_LABEL
)
6047 /* Don't duplicate label debug stmts. */
6048 if (gimple_debug_bind_p (stmt
)
6049 && TREE_CODE (gimple_debug_bind_get_var (stmt
))
6053 /* Create a new copy of STMT and duplicate STMT's virtual
6055 copy
= gimple_copy (stmt
);
6056 gsi_insert_after (&gsi_tgt
, copy
, GSI_NEW_STMT
);
6058 maybe_duplicate_eh_stmt (copy
, stmt
);
6059 gimple_duplicate_stmt_histograms (cfun
, copy
, cfun
, stmt
);
6061 /* When copying around a stmt writing into a local non-user
6062 aggregate, make sure it won't share stack slot with other
6064 lhs
= gimple_get_lhs (stmt
);
6065 if (lhs
&& TREE_CODE (lhs
) != SSA_NAME
)
6067 tree base
= get_base_address (lhs
);
6069 && (VAR_P (base
) || TREE_CODE (base
) == RESULT_DECL
)
6070 && DECL_IGNORED_P (base
)
6071 && !TREE_STATIC (base
)
6072 && !DECL_EXTERNAL (base
)
6073 && (!VAR_P (base
) || !DECL_HAS_VALUE_EXPR_P (base
)))
6074 DECL_NONSHAREABLE (base
) = 1;
6077 /* Create new names for all the definitions created by COPY and
6078 add replacement mappings for each new name. */
6079 FOR_EACH_SSA_DEF_OPERAND (def_p
, copy
, op_iter
, SSA_OP_ALL_DEFS
)
6080 create_new_def_for (DEF_FROM_PTR (def_p
), copy
, def_p
);
6086 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
6089 add_phi_args_after_copy_edge (edge e_copy
)
6091 basic_block bb
, bb_copy
= e_copy
->src
, dest
;
6094 gphi
*phi
, *phi_copy
;
6096 gphi_iterator psi
, psi_copy
;
6098 if (gimple_seq_empty_p (phi_nodes (e_copy
->dest
)))
6101 bb
= bb_copy
->flags
& BB_DUPLICATED
? get_bb_original (bb_copy
) : bb_copy
;
6103 if (e_copy
->dest
->flags
& BB_DUPLICATED
)
6104 dest
= get_bb_original (e_copy
->dest
);
6106 dest
= e_copy
->dest
;
6108 e
= find_edge (bb
, dest
);
6111 /* During loop unrolling the target of the latch edge is copied.
6112 In this case we are not looking for edge to dest, but to
6113 duplicated block whose original was dest. */
6114 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
6116 if ((e
->dest
->flags
& BB_DUPLICATED
)
6117 && get_bb_original (e
->dest
) == dest
)
6121 gcc_assert (e
!= NULL
);
6124 for (psi
= gsi_start_phis (e
->dest
),
6125 psi_copy
= gsi_start_phis (e_copy
->dest
);
6127 gsi_next (&psi
), gsi_next (&psi_copy
))
6130 phi_copy
= psi_copy
.phi ();
6131 def
= PHI_ARG_DEF_FROM_EDGE (phi
, e
);
6132 add_phi_arg (phi_copy
, def
, e_copy
,
6133 gimple_phi_arg_location_from_edge (phi
, e
));
6138 /* Basic block BB_COPY was created by code duplication. Add phi node
6139 arguments for edges going out of BB_COPY. The blocks that were
6140 duplicated have BB_DUPLICATED set. */
6143 add_phi_args_after_copy_bb (basic_block bb_copy
)
6148 FOR_EACH_EDGE (e_copy
, ei
, bb_copy
->succs
)
6150 add_phi_args_after_copy_edge (e_copy
);
6154 /* Blocks in REGION_COPY array of length N_REGION were created by
6155 duplication of basic blocks. Add phi node arguments for edges
6156 going from these blocks. If E_COPY is not NULL, also add
6157 phi node arguments for its destination.*/
6160 add_phi_args_after_copy (basic_block
*region_copy
, unsigned n_region
,
6165 for (i
= 0; i
< n_region
; i
++)
6166 region_copy
[i
]->flags
|= BB_DUPLICATED
;
6168 for (i
= 0; i
< n_region
; i
++)
6169 add_phi_args_after_copy_bb (region_copy
[i
]);
6171 add_phi_args_after_copy_edge (e_copy
);
6173 for (i
= 0; i
< n_region
; i
++)
6174 region_copy
[i
]->flags
&= ~BB_DUPLICATED
;
6177 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
6178 important exit edge EXIT. By important we mean that no SSA name defined
6179 inside region is live over the other exit edges of the region. All entry
6180 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
6181 to the duplicate of the region. Dominance and loop information is
6182 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
6183 UPDATE_DOMINANCE is false then we assume that the caller will update the
6184 dominance information after calling this function. The new basic
6185 blocks are stored to REGION_COPY in the same order as they had in REGION,
6186 provided that REGION_COPY is not NULL.
6187 The function returns false if it is unable to copy the region,
6191 gimple_duplicate_sese_region (edge entry
, edge exit
,
6192 basic_block
*region
, unsigned n_region
,
6193 basic_block
*region_copy
,
6194 bool update_dominance
)
6197 bool free_region_copy
= false, copying_header
= false;
6198 struct loop
*loop
= entry
->dest
->loop_father
;
6200 vec
<basic_block
> doms
;
6202 int total_freq
= 0, entry_freq
= 0;
6203 profile_count total_count
= profile_count::uninitialized ();
6204 profile_count entry_count
= profile_count::uninitialized ();
6206 if (!can_copy_bbs_p (region
, n_region
))
6209 /* Some sanity checking. Note that we do not check for all possible
6210 missuses of the functions. I.e. if you ask to copy something weird,
6211 it will work, but the state of structures probably will not be
6213 for (i
= 0; i
< n_region
; i
++)
6215 /* We do not handle subloops, i.e. all the blocks must belong to the
6217 if (region
[i
]->loop_father
!= loop
)
6220 if (region
[i
] != entry
->dest
6221 && region
[i
] == loop
->header
)
6225 /* In case the function is used for loop header copying (which is the primary
6226 use), ensure that EXIT and its copy will be new latch and entry edges. */
6227 if (loop
->header
== entry
->dest
)
6229 copying_header
= true;
6231 if (!dominated_by_p (CDI_DOMINATORS
, loop
->latch
, exit
->src
))
6234 for (i
= 0; i
< n_region
; i
++)
6235 if (region
[i
] != exit
->src
6236 && dominated_by_p (CDI_DOMINATORS
, region
[i
], exit
->src
))
6240 initialize_original_copy_tables ();
6243 set_loop_copy (loop
, loop_outer (loop
));
6245 set_loop_copy (loop
, loop
);
6249 region_copy
= XNEWVEC (basic_block
, n_region
);
6250 free_region_copy
= true;
6253 /* Record blocks outside the region that are dominated by something
6255 if (update_dominance
)
6258 doms
= get_dominated_by_region (CDI_DOMINATORS
, region
, n_region
);
6261 if (entry
->dest
->count
.initialized_p ())
6263 total_count
= entry
->dest
->count
;
6264 entry_count
= entry
->count
;
6265 /* Fix up corner cases, to avoid division by zero or creation of negative
6267 if (entry_count
> total_count
)
6268 entry_count
= total_count
;
6270 if (!(total_count
> 0) || !(entry_count
> 0))
6272 total_freq
= entry
->dest
->frequency
;
6273 entry_freq
= EDGE_FREQUENCY (entry
);
6274 /* Fix up corner cases, to avoid division by zero or creation of negative
6276 if (total_freq
== 0)
6278 else if (entry_freq
> total_freq
)
6279 entry_freq
= total_freq
;
6282 copy_bbs (region
, n_region
, region_copy
, &exit
, 1, &exit_copy
, loop
,
6283 split_edge_bb_loc (entry
), update_dominance
);
6284 if (total_count
> 0 && entry_count
> 0)
6286 scale_bbs_frequencies_profile_count (region
, n_region
,
6287 total_count
- entry_count
,
6289 scale_bbs_frequencies_profile_count (region_copy
, n_region
, entry_count
,
6294 scale_bbs_frequencies_int (region
, n_region
, total_freq
- entry_freq
,
6296 scale_bbs_frequencies_int (region_copy
, n_region
, entry_freq
, total_freq
);
6301 loop
->header
= exit
->dest
;
6302 loop
->latch
= exit
->src
;
6305 /* Redirect the entry and add the phi node arguments. */
6306 redirected
= redirect_edge_and_branch (entry
, get_bb_copy (entry
->dest
));
6307 gcc_assert (redirected
!= NULL
);
6308 flush_pending_stmts (entry
);
6310 /* Concerning updating of dominators: We must recount dominators
6311 for entry block and its copy. Anything that is outside of the
6312 region, but was dominated by something inside needs recounting as
6314 if (update_dominance
)
6316 set_immediate_dominator (CDI_DOMINATORS
, entry
->dest
, entry
->src
);
6317 doms
.safe_push (get_bb_original (entry
->dest
));
6318 iterate_fix_dominators (CDI_DOMINATORS
, doms
, false);
6322 /* Add the other PHI node arguments. */
6323 add_phi_args_after_copy (region_copy
, n_region
, NULL
);
6325 if (free_region_copy
)
6328 free_original_copy_tables ();
6332 /* Checks if BB is part of the region defined by N_REGION BBS. */
6334 bb_part_of_region_p (basic_block bb
, basic_block
* bbs
, unsigned n_region
)
6338 for (n
= 0; n
< n_region
; n
++)
6346 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
6347 are stored to REGION_COPY in the same order in that they appear
6348 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
6349 the region, EXIT an exit from it. The condition guarding EXIT
6350 is moved to ENTRY. Returns true if duplication succeeds, false
6376 gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED
, edge exit ATTRIBUTE_UNUSED
,
6377 basic_block
*region ATTRIBUTE_UNUSED
, unsigned n_region ATTRIBUTE_UNUSED
,
6378 basic_block
*region_copy ATTRIBUTE_UNUSED
)
6381 bool free_region_copy
= false;
6382 struct loop
*loop
= exit
->dest
->loop_father
;
6383 struct loop
*orig_loop
= entry
->dest
->loop_father
;
6384 basic_block switch_bb
, entry_bb
, nentry_bb
;
6385 vec
<basic_block
> doms
;
6386 int total_freq
= 0, exit_freq
= 0;
6387 profile_count total_count
= profile_count::uninitialized (),
6388 exit_count
= profile_count::uninitialized ();
6389 edge exits
[2], nexits
[2], e
;
6390 gimple_stmt_iterator gsi
;
6393 basic_block exit_bb
;
6397 struct loop
*target
, *aloop
, *cloop
;
6399 gcc_assert (EDGE_COUNT (exit
->src
->succs
) == 2);
6401 exits
[1] = EDGE_SUCC (exit
->src
, EDGE_SUCC (exit
->src
, 0) == exit
);
6403 if (!can_copy_bbs_p (region
, n_region
))
6406 initialize_original_copy_tables ();
6407 set_loop_copy (orig_loop
, loop
);
6410 for (aloop
= orig_loop
->inner
; aloop
; aloop
= aloop
->next
)
6412 if (bb_part_of_region_p (aloop
->header
, region
, n_region
))
6414 cloop
= duplicate_loop (aloop
, target
);
6415 duplicate_subloops (aloop
, cloop
);
6421 region_copy
= XNEWVEC (basic_block
, n_region
);
6422 free_region_copy
= true;
6425 gcc_assert (!need_ssa_update_p (cfun
));
6427 /* Record blocks outside the region that are dominated by something
6429 doms
= get_dominated_by_region (CDI_DOMINATORS
, region
, n_region
);
6431 if (exit
->src
->count
> 0)
6433 total_count
= exit
->src
->count
;
6434 exit_count
= exit
->count
;
6435 /* Fix up corner cases, to avoid division by zero or creation of negative
6437 if (exit_count
> total_count
)
6438 exit_count
= total_count
;
6442 total_freq
= exit
->src
->frequency
;
6443 exit_freq
= EDGE_FREQUENCY (exit
);
6444 /* Fix up corner cases, to avoid division by zero or creation of negative
6446 if (total_freq
== 0)
6448 if (exit_freq
> total_freq
)
6449 exit_freq
= total_freq
;
6452 copy_bbs (region
, n_region
, region_copy
, exits
, 2, nexits
, orig_loop
,
6453 split_edge_bb_loc (exit
), true);
6454 if (total_count
.initialized_p ())
6456 scale_bbs_frequencies_profile_count (region
, n_region
,
6457 total_count
- exit_count
,
6459 scale_bbs_frequencies_profile_count (region_copy
, n_region
, exit_count
,
6464 scale_bbs_frequencies_int (region
, n_region
, total_freq
- exit_freq
,
6466 scale_bbs_frequencies_int (region_copy
, n_region
, exit_freq
, total_freq
);
6469 /* Create the switch block, and put the exit condition to it. */
6470 entry_bb
= entry
->dest
;
6471 nentry_bb
= get_bb_copy (entry_bb
);
6472 if (!last_stmt (entry
->src
)
6473 || !stmt_ends_bb_p (last_stmt (entry
->src
)))
6474 switch_bb
= entry
->src
;
6476 switch_bb
= split_edge (entry
);
6477 set_immediate_dominator (CDI_DOMINATORS
, nentry_bb
, switch_bb
);
6479 gsi
= gsi_last_bb (switch_bb
);
6480 cond_stmt
= last_stmt (exit
->src
);
6481 gcc_assert (gimple_code (cond_stmt
) == GIMPLE_COND
);
6482 cond_stmt
= gimple_copy (cond_stmt
);
6484 gsi_insert_after (&gsi
, cond_stmt
, GSI_NEW_STMT
);
6486 sorig
= single_succ_edge (switch_bb
);
6487 sorig
->flags
= exits
[1]->flags
;
6488 snew
= make_edge (switch_bb
, nentry_bb
, exits
[0]->flags
);
6490 /* Register the new edge from SWITCH_BB in loop exit lists. */
6491 rescan_loop_exit (snew
, true, false);
6493 /* Add the PHI node arguments. */
6494 add_phi_args_after_copy (region_copy
, n_region
, snew
);
6496 /* Get rid of now superfluous conditions and associated edges (and phi node
6498 exit_bb
= exit
->dest
;
6500 e
= redirect_edge_and_branch (exits
[0], exits
[1]->dest
);
6501 PENDING_STMT (e
) = NULL
;
6503 /* The latch of ORIG_LOOP was copied, and so was the backedge
6504 to the original header. We redirect this backedge to EXIT_BB. */
6505 for (i
= 0; i
< n_region
; i
++)
6506 if (get_bb_original (region_copy
[i
]) == orig_loop
->latch
)
6508 gcc_assert (single_succ_edge (region_copy
[i
]));
6509 e
= redirect_edge_and_branch (single_succ_edge (region_copy
[i
]), exit_bb
);
6510 PENDING_STMT (e
) = NULL
;
6511 for (psi
= gsi_start_phis (exit_bb
);
6516 def
= PHI_ARG_DEF (phi
, nexits
[0]->dest_idx
);
6517 add_phi_arg (phi
, def
, e
, gimple_phi_arg_location_from_edge (phi
, e
));
6520 e
= redirect_edge_and_branch (nexits
[1], nexits
[0]->dest
);
6521 PENDING_STMT (e
) = NULL
;
6523 /* Anything that is outside of the region, but was dominated by something
6524 inside needs to update dominance info. */
6525 iterate_fix_dominators (CDI_DOMINATORS
, doms
, false);
6527 /* Update the SSA web. */
6528 update_ssa (TODO_update_ssa
);
6530 if (free_region_copy
)
6533 free_original_copy_tables ();
6537 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
6538 adding blocks when the dominator traversal reaches EXIT. This
6539 function silently assumes that ENTRY strictly dominates EXIT. */
6542 gather_blocks_in_sese_region (basic_block entry
, basic_block exit
,
6543 vec
<basic_block
> *bbs_p
)
6547 for (son
= first_dom_son (CDI_DOMINATORS
, entry
);
6549 son
= next_dom_son (CDI_DOMINATORS
, son
))
6551 bbs_p
->safe_push (son
);
6553 gather_blocks_in_sese_region (son
, exit
, bbs_p
);
6557 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
6558 The duplicates are recorded in VARS_MAP. */
6561 replace_by_duplicate_decl (tree
*tp
, hash_map
<tree
, tree
> *vars_map
,
6564 tree t
= *tp
, new_t
;
6565 struct function
*f
= DECL_STRUCT_FUNCTION (to_context
);
6567 if (DECL_CONTEXT (t
) == to_context
)
6571 tree
&loc
= vars_map
->get_or_insert (t
, &existed
);
6577 new_t
= copy_var_decl (t
, DECL_NAME (t
), TREE_TYPE (t
));
6578 add_local_decl (f
, new_t
);
6582 gcc_assert (TREE_CODE (t
) == CONST_DECL
);
6583 new_t
= copy_node (t
);
6585 DECL_CONTEXT (new_t
) = to_context
;
6596 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6597 VARS_MAP maps old ssa names and var_decls to the new ones. */
6600 replace_ssa_name (tree name
, hash_map
<tree
, tree
> *vars_map
,
6605 gcc_assert (!virtual_operand_p (name
));
6607 tree
*loc
= vars_map
->get (name
);
6611 tree decl
= SSA_NAME_VAR (name
);
6614 gcc_assert (!SSA_NAME_IS_DEFAULT_DEF (name
));
6615 replace_by_duplicate_decl (&decl
, vars_map
, to_context
);
6616 new_name
= make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context
),
6617 decl
, SSA_NAME_DEF_STMT (name
));
6620 new_name
= copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context
),
6621 name
, SSA_NAME_DEF_STMT (name
));
6623 /* Now that we've used the def stmt to define new_name, make sure it
6624 doesn't define name anymore. */
6625 SSA_NAME_DEF_STMT (name
) = NULL
;
6627 vars_map
->put (name
, new_name
);
6641 hash_map
<tree
, tree
> *vars_map
;
6642 htab_t new_label_map
;
6643 hash_map
<void *, void *> *eh_map
;
6647 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
6648 contained in *TP if it has been ORIG_BLOCK previously and change the
6649 DECL_CONTEXT of every local variable referenced in *TP. */
6652 move_stmt_op (tree
*tp
, int *walk_subtrees
, void *data
)
6654 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
6655 struct move_stmt_d
*p
= (struct move_stmt_d
*) wi
->info
;
6660 tree block
= TREE_BLOCK (t
);
6661 if (block
== NULL_TREE
)
6663 else if (block
== p
->orig_block
6664 || p
->orig_block
== NULL_TREE
)
6665 TREE_SET_BLOCK (t
, p
->new_block
);
6666 else if (flag_checking
)
6668 while (block
&& TREE_CODE (block
) == BLOCK
&& block
!= p
->orig_block
)
6669 block
= BLOCK_SUPERCONTEXT (block
);
6670 gcc_assert (block
== p
->orig_block
);
6673 else if (DECL_P (t
) || TREE_CODE (t
) == SSA_NAME
)
6675 if (TREE_CODE (t
) == SSA_NAME
)
6676 *tp
= replace_ssa_name (t
, p
->vars_map
, p
->to_context
);
6677 else if (TREE_CODE (t
) == PARM_DECL
6678 && gimple_in_ssa_p (cfun
))
6679 *tp
= *(p
->vars_map
->get (t
));
6680 else if (TREE_CODE (t
) == LABEL_DECL
)
6682 if (p
->new_label_map
)
6684 struct tree_map in
, *out
;
6686 out
= (struct tree_map
*)
6687 htab_find_with_hash (p
->new_label_map
, &in
, DECL_UID (t
));
6692 DECL_CONTEXT (t
) = p
->to_context
;
6694 else if (p
->remap_decls_p
)
6696 /* Replace T with its duplicate. T should no longer appear in the
6697 parent function, so this looks wasteful; however, it may appear
6698 in referenced_vars, and more importantly, as virtual operands of
6699 statements, and in alias lists of other variables. It would be
6700 quite difficult to expunge it from all those places. ??? It might
6701 suffice to do this for addressable variables. */
6702 if ((VAR_P (t
) && !is_global_var (t
))
6703 || TREE_CODE (t
) == CONST_DECL
)
6704 replace_by_duplicate_decl (tp
, p
->vars_map
, p
->to_context
);
6708 else if (TYPE_P (t
))
6714 /* Helper for move_stmt_r. Given an EH region number for the source
6715 function, map that to the duplicate EH regio number in the dest. */
6718 move_stmt_eh_region_nr (int old_nr
, struct move_stmt_d
*p
)
6720 eh_region old_r
, new_r
;
6722 old_r
= get_eh_region_from_number (old_nr
);
6723 new_r
= static_cast<eh_region
> (*p
->eh_map
->get (old_r
));
6725 return new_r
->index
;
6728 /* Similar, but operate on INTEGER_CSTs. */
6731 move_stmt_eh_region_tree_nr (tree old_t_nr
, struct move_stmt_d
*p
)
6735 old_nr
= tree_to_shwi (old_t_nr
);
6736 new_nr
= move_stmt_eh_region_nr (old_nr
, p
);
6738 return build_int_cst (integer_type_node
, new_nr
);
6741 /* Like move_stmt_op, but for gimple statements.
6743 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
6744 contained in the current statement in *GSI_P and change the
6745 DECL_CONTEXT of every local variable referenced in the current
6749 move_stmt_r (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
6750 struct walk_stmt_info
*wi
)
6752 struct move_stmt_d
*p
= (struct move_stmt_d
*) wi
->info
;
6753 gimple
*stmt
= gsi_stmt (*gsi_p
);
6754 tree block
= gimple_block (stmt
);
6756 if (block
== p
->orig_block
6757 || (p
->orig_block
== NULL_TREE
6758 && block
!= NULL_TREE
))
6759 gimple_set_block (stmt
, p
->new_block
);
6761 switch (gimple_code (stmt
))
6764 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
6766 tree r
, fndecl
= gimple_call_fndecl (stmt
);
6767 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
6768 switch (DECL_FUNCTION_CODE (fndecl
))
6770 case BUILT_IN_EH_COPY_VALUES
:
6771 r
= gimple_call_arg (stmt
, 1);
6772 r
= move_stmt_eh_region_tree_nr (r
, p
);
6773 gimple_call_set_arg (stmt
, 1, r
);
6776 case BUILT_IN_EH_POINTER
:
6777 case BUILT_IN_EH_FILTER
:
6778 r
= gimple_call_arg (stmt
, 0);
6779 r
= move_stmt_eh_region_tree_nr (r
, p
);
6780 gimple_call_set_arg (stmt
, 0, r
);
6791 gresx
*resx_stmt
= as_a
<gresx
*> (stmt
);
6792 int r
= gimple_resx_region (resx_stmt
);
6793 r
= move_stmt_eh_region_nr (r
, p
);
6794 gimple_resx_set_region (resx_stmt
, r
);
6798 case GIMPLE_EH_DISPATCH
:
6800 geh_dispatch
*eh_dispatch_stmt
= as_a
<geh_dispatch
*> (stmt
);
6801 int r
= gimple_eh_dispatch_region (eh_dispatch_stmt
);
6802 r
= move_stmt_eh_region_nr (r
, p
);
6803 gimple_eh_dispatch_set_region (eh_dispatch_stmt
, r
);
6807 case GIMPLE_OMP_RETURN
:
6808 case GIMPLE_OMP_CONTINUE
:
6811 if (is_gimple_omp (stmt
))
6813 /* Do not remap variables inside OMP directives. Variables
6814 referenced in clauses and directive header belong to the
6815 parent function and should not be moved into the child
6817 bool save_remap_decls_p
= p
->remap_decls_p
;
6818 p
->remap_decls_p
= false;
6819 *handled_ops_p
= true;
6821 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt
), move_stmt_r
,
6824 p
->remap_decls_p
= save_remap_decls_p
;
6832 /* Move basic block BB from function CFUN to function DEST_FN. The
6833 block is moved out of the original linked list and placed after
6834 block AFTER in the new list. Also, the block is removed from the
6835 original array of blocks and placed in DEST_FN's array of blocks.
6836 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
6837 updated to reflect the moved edges.
6839 The local variables are remapped to new instances, VARS_MAP is used
6840 to record the mapping. */
6843 move_block_to_fn (struct function
*dest_cfun
, basic_block bb
,
6844 basic_block after
, bool update_edge_count_p
,
6845 struct move_stmt_d
*d
)
6847 struct control_flow_graph
*cfg
;
6850 gimple_stmt_iterator si
;
6851 unsigned old_len
, new_len
;
6853 /* Remove BB from dominance structures. */
6854 delete_from_dominance_info (CDI_DOMINATORS
, bb
);
6856 /* Move BB from its current loop to the copy in the new function. */
6859 struct loop
*new_loop
= (struct loop
*)bb
->loop_father
->aux
;
6861 bb
->loop_father
= new_loop
;
6864 /* Link BB to the new linked list. */
6865 move_block_after (bb
, after
);
6867 /* Update the edge count in the corresponding flowgraphs. */
6868 if (update_edge_count_p
)
6869 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
6871 cfun
->cfg
->x_n_edges
--;
6872 dest_cfun
->cfg
->x_n_edges
++;
6875 /* Remove BB from the original basic block array. */
6876 (*cfun
->cfg
->x_basic_block_info
)[bb
->index
] = NULL
;
6877 cfun
->cfg
->x_n_basic_blocks
--;
6879 /* Grow DEST_CFUN's basic block array if needed. */
6880 cfg
= dest_cfun
->cfg
;
6881 cfg
->x_n_basic_blocks
++;
6882 if (bb
->index
>= cfg
->x_last_basic_block
)
6883 cfg
->x_last_basic_block
= bb
->index
+ 1;
6885 old_len
= vec_safe_length (cfg
->x_basic_block_info
);
6886 if ((unsigned) cfg
->x_last_basic_block
>= old_len
)
6888 new_len
= cfg
->x_last_basic_block
+ (cfg
->x_last_basic_block
+ 3) / 4;
6889 vec_safe_grow_cleared (cfg
->x_basic_block_info
, new_len
);
6892 (*cfg
->x_basic_block_info
)[bb
->index
] = bb
;
6894 /* Remap the variables in phi nodes. */
6895 for (gphi_iterator psi
= gsi_start_phis (bb
);
6898 gphi
*phi
= psi
.phi ();
6900 tree op
= PHI_RESULT (phi
);
6904 if (virtual_operand_p (op
))
6906 /* Remove the phi nodes for virtual operands (alias analysis will be
6907 run for the new function, anyway). */
6908 remove_phi_node (&psi
, true);
6912 SET_PHI_RESULT (phi
,
6913 replace_ssa_name (op
, d
->vars_map
, dest_cfun
->decl
));
6914 FOR_EACH_PHI_ARG (use
, phi
, oi
, SSA_OP_USE
)
6916 op
= USE_FROM_PTR (use
);
6917 if (TREE_CODE (op
) == SSA_NAME
)
6918 SET_USE (use
, replace_ssa_name (op
, d
->vars_map
, dest_cfun
->decl
));
6921 for (i
= 0; i
< EDGE_COUNT (bb
->preds
); i
++)
6923 location_t locus
= gimple_phi_arg_location (phi
, i
);
6924 tree block
= LOCATION_BLOCK (locus
);
6926 if (locus
== UNKNOWN_LOCATION
)
6928 if (d
->orig_block
== NULL_TREE
|| block
== d
->orig_block
)
6930 locus
= set_block (locus
, d
->new_block
);
6931 gimple_phi_arg_set_location (phi
, i
, locus
);
6938 for (si
= gsi_start_bb (bb
); !gsi_end_p (si
); gsi_next (&si
))
6940 gimple
*stmt
= gsi_stmt (si
);
6941 struct walk_stmt_info wi
;
6943 memset (&wi
, 0, sizeof (wi
));
6945 walk_gimple_stmt (&si
, move_stmt_r
, move_stmt_op
, &wi
);
6947 if (glabel
*label_stmt
= dyn_cast
<glabel
*> (stmt
))
6949 tree label
= gimple_label_label (label_stmt
);
6950 int uid
= LABEL_DECL_UID (label
);
6952 gcc_assert (uid
> -1);
6954 old_len
= vec_safe_length (cfg
->x_label_to_block_map
);
6955 if (old_len
<= (unsigned) uid
)
6957 new_len
= 3 * uid
/ 2 + 1;
6958 vec_safe_grow_cleared (cfg
->x_label_to_block_map
, new_len
);
6961 (*cfg
->x_label_to_block_map
)[uid
] = bb
;
6962 (*cfun
->cfg
->x_label_to_block_map
)[uid
] = NULL
;
6964 gcc_assert (DECL_CONTEXT (label
) == dest_cfun
->decl
);
6966 if (uid
>= dest_cfun
->cfg
->last_label_uid
)
6967 dest_cfun
->cfg
->last_label_uid
= uid
+ 1;
6970 maybe_duplicate_eh_stmt_fn (dest_cfun
, stmt
, cfun
, stmt
, d
->eh_map
, 0);
6971 remove_stmt_from_eh_lp_fn (cfun
, stmt
);
6973 gimple_duplicate_stmt_histograms (dest_cfun
, stmt
, cfun
, stmt
);
6974 gimple_remove_stmt_histograms (cfun
, stmt
);
6976 /* We cannot leave any operands allocated from the operand caches of
6977 the current function. */
6978 free_stmt_operands (cfun
, stmt
);
6979 push_cfun (dest_cfun
);
6984 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
6985 if (e
->goto_locus
!= UNKNOWN_LOCATION
)
6987 tree block
= LOCATION_BLOCK (e
->goto_locus
);
6988 if (d
->orig_block
== NULL_TREE
6989 || block
== d
->orig_block
)
6990 e
->goto_locus
= set_block (e
->goto_locus
, d
->new_block
);
6994 /* Examine the statements in BB (which is in SRC_CFUN); find and return
6995 the outermost EH region. Use REGION as the incoming base EH region. */
6998 find_outermost_region_in_block (struct function
*src_cfun
,
6999 basic_block bb
, eh_region region
)
7001 gimple_stmt_iterator si
;
7003 for (si
= gsi_start_bb (bb
); !gsi_end_p (si
); gsi_next (&si
))
7005 gimple
*stmt
= gsi_stmt (si
);
7006 eh_region stmt_region
;
7009 lp_nr
= lookup_stmt_eh_lp_fn (src_cfun
, stmt
);
7010 stmt_region
= get_eh_region_from_lp_number_fn (src_cfun
, lp_nr
);
7014 region
= stmt_region
;
7015 else if (stmt_region
!= region
)
7017 region
= eh_region_outermost (src_cfun
, stmt_region
, region
);
7018 gcc_assert (region
!= NULL
);
7027 new_label_mapper (tree decl
, void *data
)
7029 htab_t hash
= (htab_t
) data
;
7033 gcc_assert (TREE_CODE (decl
) == LABEL_DECL
);
7035 m
= XNEW (struct tree_map
);
7036 m
->hash
= DECL_UID (decl
);
7037 m
->base
.from
= decl
;
7038 m
->to
= create_artificial_label (UNKNOWN_LOCATION
);
7039 LABEL_DECL_UID (m
->to
) = LABEL_DECL_UID (decl
);
7040 if (LABEL_DECL_UID (m
->to
) >= cfun
->cfg
->last_label_uid
)
7041 cfun
->cfg
->last_label_uid
= LABEL_DECL_UID (m
->to
) + 1;
7043 slot
= htab_find_slot_with_hash (hash
, m
, m
->hash
, INSERT
);
7044 gcc_assert (*slot
== NULL
);
7051 /* Tree walker to replace the decls used inside value expressions by
7055 replace_block_vars_by_duplicates_1 (tree
*tp
, int *walk_subtrees
, void *data
)
7057 struct replace_decls_d
*rd
= (struct replace_decls_d
*)data
;
7059 switch (TREE_CODE (*tp
))
7064 replace_by_duplicate_decl (tp
, rd
->vars_map
, rd
->to_context
);
7070 if (IS_TYPE_OR_DECL_P (*tp
))
7071 *walk_subtrees
= false;
7076 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
7080 replace_block_vars_by_duplicates (tree block
, hash_map
<tree
, tree
> *vars_map
,
7085 for (tp
= &BLOCK_VARS (block
); *tp
; tp
= &DECL_CHAIN (*tp
))
7088 if (!VAR_P (t
) && TREE_CODE (t
) != CONST_DECL
)
7090 replace_by_duplicate_decl (&t
, vars_map
, to_context
);
7093 if (VAR_P (*tp
) && DECL_HAS_VALUE_EXPR_P (*tp
))
7095 tree x
= DECL_VALUE_EXPR (*tp
);
7096 struct replace_decls_d rd
= { vars_map
, to_context
};
7098 walk_tree (&x
, replace_block_vars_by_duplicates_1
, &rd
, NULL
);
7099 SET_DECL_VALUE_EXPR (t
, x
);
7100 DECL_HAS_VALUE_EXPR_P (t
) = 1;
7102 DECL_CHAIN (t
) = DECL_CHAIN (*tp
);
7107 for (block
= BLOCK_SUBBLOCKS (block
); block
; block
= BLOCK_CHAIN (block
))
7108 replace_block_vars_by_duplicates (block
, vars_map
, to_context
);
7111 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
7115 fixup_loop_arrays_after_move (struct function
*fn1
, struct function
*fn2
,
7118 /* Discard it from the old loop array. */
7119 (*get_loops (fn1
))[loop
->num
] = NULL
;
7121 /* Place it in the new loop array, assigning it a new number. */
7122 loop
->num
= number_of_loops (fn2
);
7123 vec_safe_push (loops_for_fn (fn2
)->larray
, loop
);
7125 /* Recurse to children. */
7126 for (loop
= loop
->inner
; loop
; loop
= loop
->next
)
7127 fixup_loop_arrays_after_move (fn1
, fn2
, loop
);
7130 /* Verify that the blocks in BBS_P are a single-entry, single-exit region
7131 delimited by ENTRY_BB and EXIT_BB, possibly containing noreturn blocks. */
7134 verify_sese (basic_block entry
, basic_block exit
, vec
<basic_block
> *bbs_p
)
7139 bitmap bbs
= BITMAP_ALLOC (NULL
);
7142 gcc_assert (entry
!= NULL
);
7143 gcc_assert (entry
!= exit
);
7144 gcc_assert (bbs_p
!= NULL
);
7146 gcc_assert (bbs_p
->length () > 0);
7148 FOR_EACH_VEC_ELT (*bbs_p
, i
, bb
)
7149 bitmap_set_bit (bbs
, bb
->index
);
7151 gcc_assert (bitmap_bit_p (bbs
, entry
->index
));
7152 gcc_assert (exit
== NULL
|| bitmap_bit_p (bbs
, exit
->index
));
7154 FOR_EACH_VEC_ELT (*bbs_p
, i
, bb
)
7158 gcc_assert (single_pred_p (entry
));
7159 gcc_assert (!bitmap_bit_p (bbs
, single_pred (entry
)->index
));
7162 for (ei
= ei_start (bb
->preds
); !ei_end_p (ei
); ei_next (&ei
))
7165 gcc_assert (bitmap_bit_p (bbs
, e
->src
->index
));
7170 gcc_assert (single_succ_p (exit
));
7171 gcc_assert (!bitmap_bit_p (bbs
, single_succ (exit
)->index
));
7174 for (ei
= ei_start (bb
->succs
); !ei_end_p (ei
); ei_next (&ei
))
7177 gcc_assert (bitmap_bit_p (bbs
, e
->dest
->index
));
7184 /* If FROM is an SSA_NAME, mark the version in bitmap DATA. */
7187 gather_ssa_name_hash_map_from (tree
const &from
, tree
const &, void *data
)
7189 bitmap release_names
= (bitmap
)data
;
7191 if (TREE_CODE (from
) != SSA_NAME
)
7194 bitmap_set_bit (release_names
, SSA_NAME_VERSION (from
));
7198 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
7199 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
7200 single basic block in the original CFG and the new basic block is
7201 returned. DEST_CFUN must not have a CFG yet.
7203 Note that the region need not be a pure SESE region. Blocks inside
7204 the region may contain calls to abort/exit. The only restriction
7205 is that ENTRY_BB should be the only entry point and it must
7208 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
7209 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
7210 to the new function.
7212 All local variables referenced in the region are assumed to be in
7213 the corresponding BLOCK_VARS and unexpanded variable lists
7214 associated with DEST_CFUN.
7216 TODO: investigate whether we can reuse gimple_duplicate_sese_region to
7217 reimplement move_sese_region_to_fn by duplicating the region rather than
7221 move_sese_region_to_fn (struct function
*dest_cfun
, basic_block entry_bb
,
7222 basic_block exit_bb
, tree orig_block
)
7224 vec
<basic_block
> bbs
, dom_bbs
;
7225 basic_block dom_entry
= get_immediate_dominator (CDI_DOMINATORS
, entry_bb
);
7226 basic_block after
, bb
, *entry_pred
, *exit_succ
, abb
;
7227 struct function
*saved_cfun
= cfun
;
7228 int *entry_flag
, *exit_flag
;
7229 unsigned *entry_prob
, *exit_prob
;
7230 unsigned i
, num_entry_edges
, num_exit_edges
, num_nodes
;
7233 htab_t new_label_map
;
7234 hash_map
<void *, void *> *eh_map
;
7235 struct loop
*loop
= entry_bb
->loop_father
;
7236 struct loop
*loop0
= get_loop (saved_cfun
, 0);
7237 struct move_stmt_d d
;
7239 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
7241 gcc_assert (entry_bb
!= exit_bb
7243 || dominated_by_p (CDI_DOMINATORS
, exit_bb
, entry_bb
)));
7245 /* Collect all the blocks in the region. Manually add ENTRY_BB
7246 because it won't be added by dfs_enumerate_from. */
7248 bbs
.safe_push (entry_bb
);
7249 gather_blocks_in_sese_region (entry_bb
, exit_bb
, &bbs
);
7252 verify_sese (entry_bb
, exit_bb
, &bbs
);
7254 /* The blocks that used to be dominated by something in BBS will now be
7255 dominated by the new block. */
7256 dom_bbs
= get_dominated_by_region (CDI_DOMINATORS
,
7260 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
7261 the predecessor edges to ENTRY_BB and the successor edges to
7262 EXIT_BB so that we can re-attach them to the new basic block that
7263 will replace the region. */
7264 num_entry_edges
= EDGE_COUNT (entry_bb
->preds
);
7265 entry_pred
= XNEWVEC (basic_block
, num_entry_edges
);
7266 entry_flag
= XNEWVEC (int, num_entry_edges
);
7267 entry_prob
= XNEWVEC (unsigned, num_entry_edges
);
7269 for (ei
= ei_start (entry_bb
->preds
); (e
= ei_safe_edge (ei
)) != NULL
;)
7271 entry_prob
[i
] = e
->probability
;
7272 entry_flag
[i
] = e
->flags
;
7273 entry_pred
[i
++] = e
->src
;
7279 num_exit_edges
= EDGE_COUNT (exit_bb
->succs
);
7280 exit_succ
= XNEWVEC (basic_block
, num_exit_edges
);
7281 exit_flag
= XNEWVEC (int, num_exit_edges
);
7282 exit_prob
= XNEWVEC (unsigned, num_exit_edges
);
7284 for (ei
= ei_start (exit_bb
->succs
); (e
= ei_safe_edge (ei
)) != NULL
;)
7286 exit_prob
[i
] = e
->probability
;
7287 exit_flag
[i
] = e
->flags
;
7288 exit_succ
[i
++] = e
->dest
;
7300 /* Switch context to the child function to initialize DEST_FN's CFG. */
7301 gcc_assert (dest_cfun
->cfg
== NULL
);
7302 push_cfun (dest_cfun
);
7304 init_empty_tree_cfg ();
7306 /* Initialize EH information for the new function. */
7308 new_label_map
= NULL
;
7311 eh_region region
= NULL
;
7313 FOR_EACH_VEC_ELT (bbs
, i
, bb
)
7314 region
= find_outermost_region_in_block (saved_cfun
, bb
, region
);
7316 init_eh_for_function ();
7319 new_label_map
= htab_create (17, tree_map_hash
, tree_map_eq
, free
);
7320 eh_map
= duplicate_eh_regions (saved_cfun
, region
, 0,
7321 new_label_mapper
, new_label_map
);
7325 /* Initialize an empty loop tree. */
7326 struct loops
*loops
= ggc_cleared_alloc
<struct loops
> ();
7327 init_loops_structure (dest_cfun
, loops
, 1);
7328 loops
->state
= LOOPS_MAY_HAVE_MULTIPLE_LATCHES
;
7329 set_loops_for_fn (dest_cfun
, loops
);
7331 /* Move the outlined loop tree part. */
7332 num_nodes
= bbs
.length ();
7333 FOR_EACH_VEC_ELT (bbs
, i
, bb
)
7335 if (bb
->loop_father
->header
== bb
)
7337 struct loop
*this_loop
= bb
->loop_father
;
7338 struct loop
*outer
= loop_outer (this_loop
);
7340 /* If the SESE region contains some bbs ending with
7341 a noreturn call, those are considered to belong
7342 to the outermost loop in saved_cfun, rather than
7343 the entry_bb's loop_father. */
7347 num_nodes
-= this_loop
->num_nodes
;
7348 flow_loop_tree_node_remove (bb
->loop_father
);
7349 flow_loop_tree_node_add (get_loop (dest_cfun
, 0), this_loop
);
7350 fixup_loop_arrays_after_move (saved_cfun
, cfun
, this_loop
);
7353 else if (bb
->loop_father
== loop0
&& loop0
!= loop
)
7356 /* Remove loop exits from the outlined region. */
7357 if (loops_for_fn (saved_cfun
)->exits
)
7358 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
7360 struct loops
*l
= loops_for_fn (saved_cfun
);
7362 = l
->exits
->find_slot_with_hash (e
, htab_hash_pointer (e
),
7365 l
->exits
->clear_slot (slot
);
7370 /* Adjust the number of blocks in the tree root of the outlined part. */
7371 get_loop (dest_cfun
, 0)->num_nodes
= bbs
.length () + 2;
7373 /* Setup a mapping to be used by move_block_to_fn. */
7374 loop
->aux
= current_loops
->tree_root
;
7375 loop0
->aux
= current_loops
->tree_root
;
7379 /* Move blocks from BBS into DEST_CFUN. */
7380 gcc_assert (bbs
.length () >= 2);
7381 after
= dest_cfun
->cfg
->x_entry_block_ptr
;
7382 hash_map
<tree
, tree
> vars_map
;
7384 memset (&d
, 0, sizeof (d
));
7385 d
.orig_block
= orig_block
;
7386 d
.new_block
= DECL_INITIAL (dest_cfun
->decl
);
7387 d
.from_context
= cfun
->decl
;
7388 d
.to_context
= dest_cfun
->decl
;
7389 d
.vars_map
= &vars_map
;
7390 d
.new_label_map
= new_label_map
;
7392 d
.remap_decls_p
= true;
7394 if (gimple_in_ssa_p (cfun
))
7395 for (tree arg
= DECL_ARGUMENTS (d
.to_context
); arg
; arg
= DECL_CHAIN (arg
))
7397 tree narg
= make_ssa_name_fn (dest_cfun
, arg
, gimple_build_nop ());
7398 set_ssa_default_def (dest_cfun
, arg
, narg
);
7399 vars_map
.put (arg
, narg
);
7402 FOR_EACH_VEC_ELT (bbs
, i
, bb
)
7404 /* No need to update edge counts on the last block. It has
7405 already been updated earlier when we detached the region from
7406 the original CFG. */
7407 move_block_to_fn (dest_cfun
, bb
, after
, bb
!= exit_bb
, &d
);
7413 /* Loop sizes are no longer correct, fix them up. */
7414 loop
->num_nodes
-= num_nodes
;
7415 for (struct loop
*outer
= loop_outer (loop
);
7416 outer
; outer
= loop_outer (outer
))
7417 outer
->num_nodes
-= num_nodes
;
7418 loop0
->num_nodes
-= bbs
.length () - num_nodes
;
7420 if (saved_cfun
->has_simduid_loops
|| saved_cfun
->has_force_vectorize_loops
)
7423 for (i
= 0; vec_safe_iterate (loops
->larray
, i
, &aloop
); i
++)
7428 replace_by_duplicate_decl (&aloop
->simduid
, d
.vars_map
,
7430 dest_cfun
->has_simduid_loops
= true;
7432 if (aloop
->force_vectorize
)
7433 dest_cfun
->has_force_vectorize_loops
= true;
7437 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
7441 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun
->decl
))
7443 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun
->decl
))
7444 = BLOCK_SUBBLOCKS (orig_block
);
7445 for (block
= BLOCK_SUBBLOCKS (orig_block
);
7446 block
; block
= BLOCK_CHAIN (block
))
7447 BLOCK_SUPERCONTEXT (block
) = DECL_INITIAL (dest_cfun
->decl
);
7448 BLOCK_SUBBLOCKS (orig_block
) = NULL_TREE
;
7451 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun
->decl
),
7452 &vars_map
, dest_cfun
->decl
);
7455 htab_delete (new_label_map
);
7459 if (gimple_in_ssa_p (cfun
))
7461 /* We need to release ssa-names in a defined order, so first find them,
7462 and then iterate in ascending version order. */
7463 bitmap release_names
= BITMAP_ALLOC (NULL
);
7464 vars_map
.traverse
<void *, gather_ssa_name_hash_map_from
> (release_names
);
7467 EXECUTE_IF_SET_IN_BITMAP (release_names
, 0, i
, bi
)
7468 release_ssa_name (ssa_name (i
));
7469 BITMAP_FREE (release_names
);
7472 /* Rewire the entry and exit blocks. The successor to the entry
7473 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
7474 the child function. Similarly, the predecessor of DEST_FN's
7475 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
7476 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
7477 various CFG manipulation function get to the right CFG.
7479 FIXME, this is silly. The CFG ought to become a parameter to
7481 push_cfun (dest_cfun
);
7482 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun
), entry_bb
, EDGE_FALLTHRU
);
7484 make_edge (exit_bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
), 0);
7487 /* Back in the original function, the SESE region has disappeared,
7488 create a new basic block in its place. */
7489 bb
= create_empty_bb (entry_pred
[0]);
7491 add_bb_to_loop (bb
, loop
);
7492 for (i
= 0; i
< num_entry_edges
; i
++)
7494 e
= make_edge (entry_pred
[i
], bb
, entry_flag
[i
]);
7495 e
->probability
= entry_prob
[i
];
7498 for (i
= 0; i
< num_exit_edges
; i
++)
7500 e
= make_edge (bb
, exit_succ
[i
], exit_flag
[i
]);
7501 e
->probability
= exit_prob
[i
];
7504 set_immediate_dominator (CDI_DOMINATORS
, bb
, dom_entry
);
7505 FOR_EACH_VEC_ELT (dom_bbs
, i
, abb
)
7506 set_immediate_dominator (CDI_DOMINATORS
, abb
, bb
);
7523 /* Dump default def DEF to file FILE using FLAGS and indentation
7527 dump_default_def (FILE *file
, tree def
, int spc
, dump_flags_t flags
)
7529 for (int i
= 0; i
< spc
; ++i
)
7530 fprintf (file
, " ");
7531 dump_ssaname_info_to_file (file
, def
, spc
);
7533 print_generic_expr (file
, TREE_TYPE (def
), flags
);
7534 fprintf (file
, " ");
7535 print_generic_expr (file
, def
, flags
);
7536 fprintf (file
, " = ");
7537 print_generic_expr (file
, SSA_NAME_VAR (def
), flags
);
7538 fprintf (file
, ";\n");
7541 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
7545 dump_function_to_file (tree fndecl
, FILE *file
, dump_flags_t flags
)
7547 tree arg
, var
, old_current_fndecl
= current_function_decl
;
7548 struct function
*dsf
;
7549 bool ignore_topmost_bind
= false, any_var
= false;
7552 bool tmclone
= (TREE_CODE (fndecl
) == FUNCTION_DECL
7553 && decl_is_tm_clone (fndecl
));
7554 struct function
*fun
= DECL_STRUCT_FUNCTION (fndecl
);
7556 if (DECL_ATTRIBUTES (fndecl
) != NULL_TREE
)
7558 fprintf (file
, "__attribute__((");
7562 for (chain
= DECL_ATTRIBUTES (fndecl
); chain
;
7563 first
= false, chain
= TREE_CHAIN (chain
))
7566 fprintf (file
, ", ");
7568 print_generic_expr (file
, get_attribute_name (chain
), dump_flags
);
7569 if (TREE_VALUE (chain
) != NULL_TREE
)
7571 fprintf (file
, " (");
7572 print_generic_expr (file
, TREE_VALUE (chain
), dump_flags
);
7573 fprintf (file
, ")");
7577 fprintf (file
, "))\n");
7580 current_function_decl
= fndecl
;
7581 if (flags
& TDF_GIMPLE
)
7583 print_generic_expr (file
, TREE_TYPE (TREE_TYPE (fndecl
)),
7584 dump_flags
| TDF_SLIM
);
7585 fprintf (file
, " __GIMPLE ()\n%s (", function_name (fun
));
7588 fprintf (file
, "%s %s(", function_name (fun
), tmclone
? "[tm-clone] " : "");
7590 arg
= DECL_ARGUMENTS (fndecl
);
7593 print_generic_expr (file
, TREE_TYPE (arg
), dump_flags
);
7594 fprintf (file
, " ");
7595 print_generic_expr (file
, arg
, dump_flags
);
7596 if (DECL_CHAIN (arg
))
7597 fprintf (file
, ", ");
7598 arg
= DECL_CHAIN (arg
);
7600 fprintf (file
, ")\n");
7602 dsf
= DECL_STRUCT_FUNCTION (fndecl
);
7603 if (dsf
&& (flags
& TDF_EH
))
7604 dump_eh_tree (file
, dsf
);
7606 if (flags
& TDF_RAW
&& !gimple_has_body_p (fndecl
))
7608 dump_node (fndecl
, TDF_SLIM
| flags
, file
);
7609 current_function_decl
= old_current_fndecl
;
7613 /* When GIMPLE is lowered, the variables are no longer available in
7614 BIND_EXPRs, so display them separately. */
7615 if (fun
&& fun
->decl
== fndecl
&& (fun
->curr_properties
& PROP_gimple_lcf
))
7618 ignore_topmost_bind
= true;
7620 fprintf (file
, "{\n");
7621 if (gimple_in_ssa_p (fun
)
7622 && (flags
& TDF_ALIAS
))
7624 for (arg
= DECL_ARGUMENTS (fndecl
); arg
!= NULL
;
7625 arg
= DECL_CHAIN (arg
))
7627 tree def
= ssa_default_def (fun
, arg
);
7629 dump_default_def (file
, def
, 2, flags
);
7632 tree res
= DECL_RESULT (fun
->decl
);
7633 if (res
!= NULL_TREE
7634 && DECL_BY_REFERENCE (res
))
7636 tree def
= ssa_default_def (fun
, res
);
7638 dump_default_def (file
, def
, 2, flags
);
7641 tree static_chain
= fun
->static_chain_decl
;
7642 if (static_chain
!= NULL_TREE
)
7644 tree def
= ssa_default_def (fun
, static_chain
);
7646 dump_default_def (file
, def
, 2, flags
);
7650 if (!vec_safe_is_empty (fun
->local_decls
))
7651 FOR_EACH_LOCAL_DECL (fun
, ix
, var
)
7653 print_generic_decl (file
, var
, flags
);
7654 fprintf (file
, "\n");
7661 if (gimple_in_ssa_p (cfun
))
7662 FOR_EACH_SSA_NAME (ix
, name
, cfun
)
7664 if (!SSA_NAME_VAR (name
))
7666 fprintf (file
, " ");
7667 print_generic_expr (file
, TREE_TYPE (name
), flags
);
7668 fprintf (file
, " ");
7669 print_generic_expr (file
, name
, flags
);
7670 fprintf (file
, ";\n");
7677 if (fun
&& fun
->decl
== fndecl
7679 && basic_block_info_for_fn (fun
))
7681 /* If the CFG has been built, emit a CFG-based dump. */
7682 if (!ignore_topmost_bind
)
7683 fprintf (file
, "{\n");
7685 if (any_var
&& n_basic_blocks_for_fn (fun
))
7686 fprintf (file
, "\n");
7688 FOR_EACH_BB_FN (bb
, fun
)
7689 dump_bb (file
, bb
, 2, flags
);
7691 fprintf (file
, "}\n");
7693 else if (fun
->curr_properties
& PROP_gimple_any
)
7695 /* The function is now in GIMPLE form but the CFG has not been
7696 built yet. Emit the single sequence of GIMPLE statements
7697 that make up its body. */
7698 gimple_seq body
= gimple_body (fndecl
);
7700 if (gimple_seq_first_stmt (body
)
7701 && gimple_seq_first_stmt (body
) == gimple_seq_last_stmt (body
)
7702 && gimple_code (gimple_seq_first_stmt (body
)) == GIMPLE_BIND
)
7703 print_gimple_seq (file
, body
, 0, flags
);
7706 if (!ignore_topmost_bind
)
7707 fprintf (file
, "{\n");
7710 fprintf (file
, "\n");
7712 print_gimple_seq (file
, body
, 2, flags
);
7713 fprintf (file
, "}\n");
7720 /* Make a tree based dump. */
7721 chain
= DECL_SAVED_TREE (fndecl
);
7722 if (chain
&& TREE_CODE (chain
) == BIND_EXPR
)
7724 if (ignore_topmost_bind
)
7726 chain
= BIND_EXPR_BODY (chain
);
7734 if (!ignore_topmost_bind
)
7736 fprintf (file
, "{\n");
7737 /* No topmost bind, pretend it's ignored for later. */
7738 ignore_topmost_bind
= true;
7744 fprintf (file
, "\n");
7746 print_generic_stmt_indented (file
, chain
, flags
, indent
);
7747 if (ignore_topmost_bind
)
7748 fprintf (file
, "}\n");
7751 if (flags
& TDF_ENUMERATE_LOCALS
)
7752 dump_enumerated_decls (file
, flags
);
7753 fprintf (file
, "\n\n");
7755 current_function_decl
= old_current_fndecl
;
7758 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
7761 debug_function (tree fn
, dump_flags_t flags
)
7763 dump_function_to_file (fn
, stderr
, flags
);
7767 /* Print on FILE the indexes for the predecessors of basic_block BB. */
7770 print_pred_bbs (FILE *file
, basic_block bb
)
7775 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
7776 fprintf (file
, "bb_%d ", e
->src
->index
);
7780 /* Print on FILE the indexes for the successors of basic_block BB. */
7783 print_succ_bbs (FILE *file
, basic_block bb
)
7788 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
7789 fprintf (file
, "bb_%d ", e
->dest
->index
);
7792 /* Print to FILE the basic block BB following the VERBOSITY level. */
7795 print_loops_bb (FILE *file
, basic_block bb
, int indent
, int verbosity
)
7797 char *s_indent
= (char *) alloca ((size_t) indent
+ 1);
7798 memset ((void *) s_indent
, ' ', (size_t) indent
);
7799 s_indent
[indent
] = '\0';
7801 /* Print basic_block's header. */
7804 fprintf (file
, "%s bb_%d (preds = {", s_indent
, bb
->index
);
7805 print_pred_bbs (file
, bb
);
7806 fprintf (file
, "}, succs = {");
7807 print_succ_bbs (file
, bb
);
7808 fprintf (file
, "})\n");
7811 /* Print basic_block's body. */
7814 fprintf (file
, "%s {\n", s_indent
);
7815 dump_bb (file
, bb
, indent
+ 4, TDF_VOPS
|TDF_MEMSYMS
);
7816 fprintf (file
, "%s }\n", s_indent
);
7820 static void print_loop_and_siblings (FILE *, struct loop
*, int, int);
7822 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
7823 VERBOSITY level this outputs the contents of the loop, or just its
7827 print_loop (FILE *file
, struct loop
*loop
, int indent
, int verbosity
)
7835 s_indent
= (char *) alloca ((size_t) indent
+ 1);
7836 memset ((void *) s_indent
, ' ', (size_t) indent
);
7837 s_indent
[indent
] = '\0';
7839 /* Print loop's header. */
7840 fprintf (file
, "%sloop_%d (", s_indent
, loop
->num
);
7842 fprintf (file
, "header = %d", loop
->header
->index
);
7845 fprintf (file
, "deleted)\n");
7849 fprintf (file
, ", latch = %d", loop
->latch
->index
);
7851 fprintf (file
, ", multiple latches");
7852 fprintf (file
, ", niter = ");
7853 print_generic_expr (file
, loop
->nb_iterations
);
7855 if (loop
->any_upper_bound
)
7857 fprintf (file
, ", upper_bound = ");
7858 print_decu (loop
->nb_iterations_upper_bound
, file
);
7860 if (loop
->any_likely_upper_bound
)
7862 fprintf (file
, ", likely_upper_bound = ");
7863 print_decu (loop
->nb_iterations_likely_upper_bound
, file
);
7866 if (loop
->any_estimate
)
7868 fprintf (file
, ", estimate = ");
7869 print_decu (loop
->nb_iterations_estimate
, file
);
7871 fprintf (file
, ")\n");
7873 /* Print loop's body. */
7876 fprintf (file
, "%s{\n", s_indent
);
7877 FOR_EACH_BB_FN (bb
, cfun
)
7878 if (bb
->loop_father
== loop
)
7879 print_loops_bb (file
, bb
, indent
, verbosity
);
7881 print_loop_and_siblings (file
, loop
->inner
, indent
+ 2, verbosity
);
7882 fprintf (file
, "%s}\n", s_indent
);
7886 /* Print the LOOP and its sibling loops on FILE, indented INDENT
7887 spaces. Following VERBOSITY level this outputs the contents of the
7888 loop, or just its structure. */
7891 print_loop_and_siblings (FILE *file
, struct loop
*loop
, int indent
,
7897 print_loop (file
, loop
, indent
, verbosity
);
7898 print_loop_and_siblings (file
, loop
->next
, indent
, verbosity
);
7901 /* Follow a CFG edge from the entry point of the program, and on entry
7902 of a loop, pretty print the loop structure on FILE. */
7905 print_loops (FILE *file
, int verbosity
)
7909 bb
= ENTRY_BLOCK_PTR_FOR_FN (cfun
);
7910 fprintf (file
, "\nLoops in function: %s\n", current_function_name ());
7911 if (bb
&& bb
->loop_father
)
7912 print_loop_and_siblings (file
, bb
->loop_father
, 0, verbosity
);
7918 debug (struct loop
&ref
)
7920 print_loop (stderr
, &ref
, 0, /*verbosity*/0);
7924 debug (struct loop
*ptr
)
7929 fprintf (stderr
, "<nil>\n");
7932 /* Dump a loop verbosely. */
7935 debug_verbose (struct loop
&ref
)
7937 print_loop (stderr
, &ref
, 0, /*verbosity*/3);
7941 debug_verbose (struct loop
*ptr
)
7946 fprintf (stderr
, "<nil>\n");
7950 /* Debugging loops structure at tree level, at some VERBOSITY level. */
7953 debug_loops (int verbosity
)
7955 print_loops (stderr
, verbosity
);
7958 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
7961 debug_loop (struct loop
*loop
, int verbosity
)
7963 print_loop (stderr
, loop
, 0, verbosity
);
7966 /* Print on stderr the code of loop number NUM, at some VERBOSITY
7970 debug_loop_num (unsigned num
, int verbosity
)
7972 debug_loop (get_loop (cfun
, num
), verbosity
);
7975 /* Return true if BB ends with a call, possibly followed by some
7976 instructions that must stay with the call. Return false,
7980 gimple_block_ends_with_call_p (basic_block bb
)
7982 gimple_stmt_iterator gsi
= gsi_last_nondebug_bb (bb
);
7983 return !gsi_end_p (gsi
) && is_gimple_call (gsi_stmt (gsi
));
7987 /* Return true if BB ends with a conditional branch. Return false,
7991 gimple_block_ends_with_condjump_p (const_basic_block bb
)
7993 gimple
*stmt
= last_stmt (CONST_CAST_BB (bb
));
7994 return (stmt
&& gimple_code (stmt
) == GIMPLE_COND
);
7998 /* Return true if statement T may terminate execution of BB in ways not
7999 explicitly represtented in the CFG. */
8002 stmt_can_terminate_bb_p (gimple
*t
)
8004 tree fndecl
= NULL_TREE
;
8007 /* Eh exception not handled internally terminates execution of the whole
8009 if (stmt_can_throw_external (t
))
8012 /* NORETURN and LONGJMP calls already have an edge to exit.
8013 CONST and PURE calls do not need one.
8014 We don't currently check for CONST and PURE here, although
8015 it would be a good idea, because those attributes are
8016 figured out from the RTL in mark_constant_function, and
8017 the counter incrementation code from -fprofile-arcs
8018 leads to different results from -fbranch-probabilities. */
8019 if (is_gimple_call (t
))
8021 fndecl
= gimple_call_fndecl (t
);
8022 call_flags
= gimple_call_flags (t
);
8025 if (is_gimple_call (t
)
8027 && DECL_BUILT_IN (fndecl
)
8028 && (call_flags
& ECF_NOTHROW
)
8029 && !(call_flags
& ECF_RETURNS_TWICE
)
8030 /* fork() doesn't really return twice, but the effect of
8031 wrapping it in __gcov_fork() which calls __gcov_flush()
8032 and clears the counters before forking has the same
8033 effect as returning twice. Force a fake edge. */
8034 && !(DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
8035 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FORK
))
8038 if (is_gimple_call (t
))
8044 if (call_flags
& (ECF_PURE
| ECF_CONST
)
8045 && !(call_flags
& ECF_LOOPING_CONST_OR_PURE
))
8048 /* Function call may do longjmp, terminate program or do other things.
8049 Special case noreturn that have non-abnormal edges out as in this case
8050 the fact is sufficiently represented by lack of edges out of T. */
8051 if (!(call_flags
& ECF_NORETURN
))
8055 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
8056 if ((e
->flags
& EDGE_FAKE
) == 0)
8060 if (gasm
*asm_stmt
= dyn_cast
<gasm
*> (t
))
8061 if (gimple_asm_volatile_p (asm_stmt
) || gimple_asm_input_p (asm_stmt
))
8068 /* Add fake edges to the function exit for any non constant and non
8069 noreturn calls (or noreturn calls with EH/abnormal edges),
8070 volatile inline assembly in the bitmap of blocks specified by BLOCKS
8071 or to the whole CFG if BLOCKS is zero. Return the number of blocks
8074 The goal is to expose cases in which entering a basic block does
8075 not imply that all subsequent instructions must be executed. */
8078 gimple_flow_call_edges_add (sbitmap blocks
)
8081 int blocks_split
= 0;
8082 int last_bb
= last_basic_block_for_fn (cfun
);
8083 bool check_last_block
= false;
8085 if (n_basic_blocks_for_fn (cfun
) == NUM_FIXED_BLOCKS
)
8089 check_last_block
= true;
8091 check_last_block
= bitmap_bit_p (blocks
,
8092 EXIT_BLOCK_PTR_FOR_FN (cfun
)->prev_bb
->index
);
8094 /* In the last basic block, before epilogue generation, there will be
8095 a fallthru edge to EXIT. Special care is required if the last insn
8096 of the last basic block is a call because make_edge folds duplicate
8097 edges, which would result in the fallthru edge also being marked
8098 fake, which would result in the fallthru edge being removed by
8099 remove_fake_edges, which would result in an invalid CFG.
8101 Moreover, we can't elide the outgoing fake edge, since the block
8102 profiler needs to take this into account in order to solve the minimal
8103 spanning tree in the case that the call doesn't return.
8105 Handle this by adding a dummy instruction in a new last basic block. */
8106 if (check_last_block
)
8108 basic_block bb
= EXIT_BLOCK_PTR_FOR_FN (cfun
)->prev_bb
;
8109 gimple_stmt_iterator gsi
= gsi_last_nondebug_bb (bb
);
8112 if (!gsi_end_p (gsi
))
8115 if (t
&& stmt_can_terminate_bb_p (t
))
8119 e
= find_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
));
8122 gsi_insert_on_edge (e
, gimple_build_nop ());
8123 gsi_commit_edge_inserts ();
8128 /* Now add fake edges to the function exit for any non constant
8129 calls since there is no way that we can determine if they will
8131 for (i
= 0; i
< last_bb
; i
++)
8133 basic_block bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
8134 gimple_stmt_iterator gsi
;
8135 gimple
*stmt
, *last_stmt
;
8140 if (blocks
&& !bitmap_bit_p (blocks
, i
))
8143 gsi
= gsi_last_nondebug_bb (bb
);
8144 if (!gsi_end_p (gsi
))
8146 last_stmt
= gsi_stmt (gsi
);
8149 stmt
= gsi_stmt (gsi
);
8150 if (stmt_can_terminate_bb_p (stmt
))
8154 /* The handling above of the final block before the
8155 epilogue should be enough to verify that there is
8156 no edge to the exit block in CFG already.
8157 Calling make_edge in such case would cause us to
8158 mark that edge as fake and remove it later. */
8159 if (flag_checking
&& stmt
== last_stmt
)
8161 e
= find_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
));
8162 gcc_assert (e
== NULL
);
8165 /* Note that the following may create a new basic block
8166 and renumber the existing basic blocks. */
8167 if (stmt
!= last_stmt
)
8169 e
= split_block (bb
, stmt
);
8173 make_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
), EDGE_FAKE
);
8177 while (!gsi_end_p (gsi
));
8182 verify_flow_info ();
8184 return blocks_split
;
8187 /* Removes edge E and all the blocks dominated by it, and updates dominance
8188 information. The IL in E->src needs to be updated separately.
8189 If dominance info is not available, only the edge E is removed.*/
8192 remove_edge_and_dominated_blocks (edge e
)
8194 vec
<basic_block
> bbs_to_remove
= vNULL
;
8195 vec
<basic_block
> bbs_to_fix_dom
= vNULL
;
8198 bool none_removed
= false;
8200 basic_block bb
, dbb
;
8203 /* If we are removing a path inside a non-root loop that may change
8204 loop ownership of blocks or remove loops. Mark loops for fixup. */
8206 && loop_outer (e
->src
->loop_father
) != NULL
8207 && e
->src
->loop_father
== e
->dest
->loop_father
)
8208 loops_state_set (LOOPS_NEED_FIXUP
);
8210 if (!dom_info_available_p (CDI_DOMINATORS
))
8216 /* No updating is needed for edges to exit. */
8217 if (e
->dest
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
8219 if (cfgcleanup_altered_bbs
)
8220 bitmap_set_bit (cfgcleanup_altered_bbs
, e
->src
->index
);
8225 /* First, we find the basic blocks to remove. If E->dest has a predecessor
8226 that is not dominated by E->dest, then this set is empty. Otherwise,
8227 all the basic blocks dominated by E->dest are removed.
8229 Also, to DF_IDOM we store the immediate dominators of the blocks in
8230 the dominance frontier of E (i.e., of the successors of the
8231 removed blocks, if there are any, and of E->dest otherwise). */
8232 FOR_EACH_EDGE (f
, ei
, e
->dest
->preds
)
8237 if (!dominated_by_p (CDI_DOMINATORS
, f
->src
, e
->dest
))
8239 none_removed
= true;
8244 auto_bitmap df
, df_idom
;
8246 bitmap_set_bit (df_idom
,
8247 get_immediate_dominator (CDI_DOMINATORS
, e
->dest
)->index
);
8250 bbs_to_remove
= get_all_dominated_blocks (CDI_DOMINATORS
, e
->dest
);
8251 FOR_EACH_VEC_ELT (bbs_to_remove
, i
, bb
)
8253 FOR_EACH_EDGE (f
, ei
, bb
->succs
)
8255 if (f
->dest
!= EXIT_BLOCK_PTR_FOR_FN (cfun
))
8256 bitmap_set_bit (df
, f
->dest
->index
);
8259 FOR_EACH_VEC_ELT (bbs_to_remove
, i
, bb
)
8260 bitmap_clear_bit (df
, bb
->index
);
8262 EXECUTE_IF_SET_IN_BITMAP (df
, 0, i
, bi
)
8264 bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
8265 bitmap_set_bit (df_idom
,
8266 get_immediate_dominator (CDI_DOMINATORS
, bb
)->index
);
8270 if (cfgcleanup_altered_bbs
)
8272 /* Record the set of the altered basic blocks. */
8273 bitmap_set_bit (cfgcleanup_altered_bbs
, e
->src
->index
);
8274 bitmap_ior_into (cfgcleanup_altered_bbs
, df
);
8277 /* Remove E and the cancelled blocks. */
8282 /* Walk backwards so as to get a chance to substitute all
8283 released DEFs into debug stmts. See
8284 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
8286 for (i
= bbs_to_remove
.length (); i
-- > 0; )
8287 delete_basic_block (bbs_to_remove
[i
]);
8290 /* Update the dominance information. The immediate dominator may change only
8291 for blocks whose immediate dominator belongs to DF_IDOM:
8293 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
8294 removal. Let Z the arbitrary block such that idom(Z) = Y and
8295 Z dominates X after the removal. Before removal, there exists a path P
8296 from Y to X that avoids Z. Let F be the last edge on P that is
8297 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
8298 dominates W, and because of P, Z does not dominate W), and W belongs to
8299 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
8300 EXECUTE_IF_SET_IN_BITMAP (df_idom
, 0, i
, bi
)
8302 bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
8303 for (dbb
= first_dom_son (CDI_DOMINATORS
, bb
);
8305 dbb
= next_dom_son (CDI_DOMINATORS
, dbb
))
8306 bbs_to_fix_dom
.safe_push (dbb
);
8309 iterate_fix_dominators (CDI_DOMINATORS
, bbs_to_fix_dom
, true);
8311 bbs_to_remove
.release ();
8312 bbs_to_fix_dom
.release ();
8315 /* Purge dead EH edges from basic block BB. */
8318 gimple_purge_dead_eh_edges (basic_block bb
)
8320 bool changed
= false;
8323 gimple
*stmt
= last_stmt (bb
);
8325 if (stmt
&& stmt_can_throw_internal (stmt
))
8328 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
8330 if (e
->flags
& EDGE_EH
)
8332 remove_edge_and_dominated_blocks (e
);
8342 /* Purge dead EH edges from basic block listed in BLOCKS. */
8345 gimple_purge_all_dead_eh_edges (const_bitmap blocks
)
8347 bool changed
= false;
8351 EXECUTE_IF_SET_IN_BITMAP (blocks
, 0, i
, bi
)
8353 basic_block bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
8355 /* Earlier gimple_purge_dead_eh_edges could have removed
8356 this basic block already. */
8357 gcc_assert (bb
|| changed
);
8359 changed
|= gimple_purge_dead_eh_edges (bb
);
8365 /* Purge dead abnormal call edges from basic block BB. */
8368 gimple_purge_dead_abnormal_call_edges (basic_block bb
)
8370 bool changed
= false;
8373 gimple
*stmt
= last_stmt (bb
);
8375 if (!cfun
->has_nonlocal_label
8376 && !cfun
->calls_setjmp
)
8379 if (stmt
&& stmt_can_make_abnormal_goto (stmt
))
8382 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
8384 if (e
->flags
& EDGE_ABNORMAL
)
8386 if (e
->flags
& EDGE_FALLTHRU
)
8387 e
->flags
&= ~EDGE_ABNORMAL
;
8389 remove_edge_and_dominated_blocks (e
);
8399 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
8402 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks
)
8404 bool changed
= false;
8408 EXECUTE_IF_SET_IN_BITMAP (blocks
, 0, i
, bi
)
8410 basic_block bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
8412 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
8413 this basic block already. */
8414 gcc_assert (bb
|| changed
);
8416 changed
|= gimple_purge_dead_abnormal_call_edges (bb
);
8422 /* This function is called whenever a new edge is created or
8426 gimple_execute_on_growing_pred (edge e
)
8428 basic_block bb
= e
->dest
;
8430 if (!gimple_seq_empty_p (phi_nodes (bb
)))
8431 reserve_phi_args_for_new_edge (bb
);
8434 /* This function is called immediately before edge E is removed from
8435 the edge vector E->dest->preds. */
8438 gimple_execute_on_shrinking_pred (edge e
)
8440 if (!gimple_seq_empty_p (phi_nodes (e
->dest
)))
8441 remove_phi_args (e
);
8444 /*---------------------------------------------------------------------------
8445 Helper functions for Loop versioning
8446 ---------------------------------------------------------------------------*/
8448 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
8449 of 'first'. Both of them are dominated by 'new_head' basic block. When
8450 'new_head' was created by 'second's incoming edge it received phi arguments
8451 on the edge by split_edge(). Later, additional edge 'e' was created to
8452 connect 'new_head' and 'first'. Now this routine adds phi args on this
8453 additional edge 'e' that new_head to second edge received as part of edge
8457 gimple_lv_adjust_loop_header_phi (basic_block first
, basic_block second
,
8458 basic_block new_head
, edge e
)
8461 gphi_iterator psi1
, psi2
;
8463 edge e2
= find_edge (new_head
, second
);
8465 /* Because NEW_HEAD has been created by splitting SECOND's incoming
8466 edge, we should always have an edge from NEW_HEAD to SECOND. */
8467 gcc_assert (e2
!= NULL
);
8469 /* Browse all 'second' basic block phi nodes and add phi args to
8470 edge 'e' for 'first' head. PHI args are always in correct order. */
8472 for (psi2
= gsi_start_phis (second
),
8473 psi1
= gsi_start_phis (first
);
8474 !gsi_end_p (psi2
) && !gsi_end_p (psi1
);
8475 gsi_next (&psi2
), gsi_next (&psi1
))
8479 def
= PHI_ARG_DEF (phi2
, e2
->dest_idx
);
8480 add_phi_arg (phi1
, def
, e
, gimple_phi_arg_location_from_edge (phi2
, e2
));
8485 /* Adds a if else statement to COND_BB with condition COND_EXPR.
8486 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
8487 the destination of the ELSE part. */
8490 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED
,
8491 basic_block second_head ATTRIBUTE_UNUSED
,
8492 basic_block cond_bb
, void *cond_e
)
8494 gimple_stmt_iterator gsi
;
8495 gimple
*new_cond_expr
;
8496 tree cond_expr
= (tree
) cond_e
;
8499 /* Build new conditional expr */
8500 new_cond_expr
= gimple_build_cond_from_tree (cond_expr
,
8501 NULL_TREE
, NULL_TREE
);
8503 /* Add new cond in cond_bb. */
8504 gsi
= gsi_last_bb (cond_bb
);
8505 gsi_insert_after (&gsi
, new_cond_expr
, GSI_NEW_STMT
);
8507 /* Adjust edges appropriately to connect new head with first head
8508 as well as second head. */
8509 e0
= single_succ_edge (cond_bb
);
8510 e0
->flags
&= ~EDGE_FALLTHRU
;
8511 e0
->flags
|= EDGE_FALSE_VALUE
;
8515 /* Do book-keeping of basic block BB for the profile consistency checker.
8516 If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
8517 then do post-pass accounting. Store the counting in RECORD. */
8519 gimple_account_profile_record (basic_block bb
, int after_pass
,
8520 struct profile_record
*record
)
8522 gimple_stmt_iterator i
;
8523 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
8525 record
->size
[after_pass
]
8526 += estimate_num_insns (gsi_stmt (i
), &eni_size_weights
);
8527 if (bb
->count
.initialized_p ())
8528 record
->time
[after_pass
]
8529 += estimate_num_insns (gsi_stmt (i
),
8530 &eni_time_weights
) * bb
->count
.to_gcov_type ();
8531 else if (profile_status_for_fn (cfun
) == PROFILE_GUESSED
)
8532 record
->time
[after_pass
]
8533 += estimate_num_insns (gsi_stmt (i
),
8534 &eni_time_weights
) * bb
->frequency
;
8538 struct cfg_hooks gimple_cfg_hooks
= {
8540 gimple_verify_flow_info
,
8541 gimple_dump_bb
, /* dump_bb */
8542 gimple_dump_bb_for_graph
, /* dump_bb_for_graph */
8543 create_bb
, /* create_basic_block */
8544 gimple_redirect_edge_and_branch
, /* redirect_edge_and_branch */
8545 gimple_redirect_edge_and_branch_force
, /* redirect_edge_and_branch_force */
8546 gimple_can_remove_branch_p
, /* can_remove_branch_p */
8547 remove_bb
, /* delete_basic_block */
8548 gimple_split_block
, /* split_block */
8549 gimple_move_block_after
, /* move_block_after */
8550 gimple_can_merge_blocks_p
, /* can_merge_blocks_p */
8551 gimple_merge_blocks
, /* merge_blocks */
8552 gimple_predict_edge
, /* predict_edge */
8553 gimple_predicted_by_p
, /* predicted_by_p */
8554 gimple_can_duplicate_bb_p
, /* can_duplicate_block_p */
8555 gimple_duplicate_bb
, /* duplicate_block */
8556 gimple_split_edge
, /* split_edge */
8557 gimple_make_forwarder_block
, /* make_forward_block */
8558 NULL
, /* tidy_fallthru_edge */
8559 NULL
, /* force_nonfallthru */
8560 gimple_block_ends_with_call_p
,/* block_ends_with_call_p */
8561 gimple_block_ends_with_condjump_p
, /* block_ends_with_condjump_p */
8562 gimple_flow_call_edges_add
, /* flow_call_edges_add */
8563 gimple_execute_on_growing_pred
, /* execute_on_growing_pred */
8564 gimple_execute_on_shrinking_pred
, /* execute_on_shrinking_pred */
8565 gimple_duplicate_loop_to_header_edge
, /* duplicate loop for trees */
8566 gimple_lv_add_condition_to_bb
, /* lv_add_condition_to_bb */
8567 gimple_lv_adjust_loop_header_phi
, /* lv_adjust_loop_header_phi*/
8568 extract_true_false_edges_from_block
, /* extract_cond_bb_edges */
8569 flush_pending_stmts
, /* flush_pending_stmts */
8570 gimple_empty_block_p
, /* block_empty_p */
8571 gimple_split_block_before_cond_jump
, /* split_block_before_cond_jump */
8572 gimple_account_profile_record
,
8576 /* Split all critical edges. */
8579 split_critical_edges (void)
8585 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
8586 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
8587 mappings around the calls to split_edge. */
8588 start_recording_case_labels ();
8589 FOR_ALL_BB_FN (bb
, cfun
)
8591 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
8593 if (EDGE_CRITICAL_P (e
) && !(e
->flags
& EDGE_ABNORMAL
))
8595 /* PRE inserts statements to edges and expects that
8596 since split_critical_edges was done beforehand, committing edge
8597 insertions will not split more edges. In addition to critical
8598 edges we must split edges that have multiple successors and
8599 end by control flow statements, such as RESX.
8600 Go ahead and split them too. This matches the logic in
8601 gimple_find_edge_insert_loc. */
8602 else if ((!single_pred_p (e
->dest
)
8603 || !gimple_seq_empty_p (phi_nodes (e
->dest
))
8604 || e
->dest
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
8605 && e
->src
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
)
8606 && !(e
->flags
& EDGE_ABNORMAL
))
8608 gimple_stmt_iterator gsi
;
8610 gsi
= gsi_last_bb (e
->src
);
8611 if (!gsi_end_p (gsi
)
8612 && stmt_ends_bb_p (gsi_stmt (gsi
))
8613 && (gimple_code (gsi_stmt (gsi
)) != GIMPLE_RETURN
8614 && !gimple_call_builtin_p (gsi_stmt (gsi
),
8620 end_recording_case_labels ();
8626 const pass_data pass_data_split_crit_edges
=
8628 GIMPLE_PASS
, /* type */
8629 "crited", /* name */
8630 OPTGROUP_NONE
, /* optinfo_flags */
8631 TV_TREE_SPLIT_EDGES
, /* tv_id */
8632 PROP_cfg
, /* properties_required */
8633 PROP_no_crit_edges
, /* properties_provided */
8634 0, /* properties_destroyed */
8635 0, /* todo_flags_start */
8636 0, /* todo_flags_finish */
8639 class pass_split_crit_edges
: public gimple_opt_pass
8642 pass_split_crit_edges (gcc::context
*ctxt
)
8643 : gimple_opt_pass (pass_data_split_crit_edges
, ctxt
)
8646 /* opt_pass methods: */
8647 virtual unsigned int execute (function
*) { return split_critical_edges (); }
8649 opt_pass
* clone () { return new pass_split_crit_edges (m_ctxt
); }
8650 }; // class pass_split_crit_edges
8655 make_pass_split_crit_edges (gcc::context
*ctxt
)
8657 return new pass_split_crit_edges (ctxt
);
8661 /* Insert COND expression which is GIMPLE_COND after STMT
8662 in basic block BB with appropriate basic block split
8663 and creation of a new conditionally executed basic block.
8664 Return created basic block. */
8666 insert_cond_bb (basic_block bb
, gimple
*stmt
, gimple
*cond
)
8668 edge fall
= split_block (bb
, stmt
);
8669 gimple_stmt_iterator iter
= gsi_last_bb (bb
);
8672 /* Insert cond statement. */
8673 gcc_assert (gimple_code (cond
) == GIMPLE_COND
);
8674 if (gsi_end_p (iter
))
8675 gsi_insert_before (&iter
, cond
, GSI_CONTINUE_LINKING
);
8677 gsi_insert_after (&iter
, cond
, GSI_CONTINUE_LINKING
);
8679 /* Create conditionally executed block. */
8680 new_bb
= create_empty_bb (bb
);
8681 make_edge (bb
, new_bb
, EDGE_TRUE_VALUE
);
8682 make_single_succ_edge (new_bb
, fall
->dest
, EDGE_FALLTHRU
);
8684 /* Fix edge for split bb. */
8685 fall
->flags
= EDGE_FALSE_VALUE
;
8687 /* Update dominance info. */
8688 if (dom_info_available_p (CDI_DOMINATORS
))
8690 set_immediate_dominator (CDI_DOMINATORS
, new_bb
, bb
);
8691 set_immediate_dominator (CDI_DOMINATORS
, fall
->dest
, bb
);
8694 /* Update loop info. */
8696 add_bb_to_loop (new_bb
, bb
->loop_father
);
8701 /* Build a ternary operation and gimplify it. Emit code before GSI.
8702 Return the gimple_val holding the result. */
8705 gimplify_build3 (gimple_stmt_iterator
*gsi
, enum tree_code code
,
8706 tree type
, tree a
, tree b
, tree c
)
8709 location_t loc
= gimple_location (gsi_stmt (*gsi
));
8711 ret
= fold_build3_loc (loc
, code
, type
, a
, b
, c
);
8714 return force_gimple_operand_gsi (gsi
, ret
, true, NULL
, true,
8718 /* Build a binary operation and gimplify it. Emit code before GSI.
8719 Return the gimple_val holding the result. */
8722 gimplify_build2 (gimple_stmt_iterator
*gsi
, enum tree_code code
,
8723 tree type
, tree a
, tree b
)
8727 ret
= fold_build2_loc (gimple_location (gsi_stmt (*gsi
)), code
, type
, a
, b
);
8730 return force_gimple_operand_gsi (gsi
, ret
, true, NULL
, true,
8734 /* Build a unary operation and gimplify it. Emit code before GSI.
8735 Return the gimple_val holding the result. */
8738 gimplify_build1 (gimple_stmt_iterator
*gsi
, enum tree_code code
, tree type
,
8743 ret
= fold_build1_loc (gimple_location (gsi_stmt (*gsi
)), code
, type
, a
);
8746 return force_gimple_operand_gsi (gsi
, ret
, true, NULL
, true,
8752 /* Given a basic block B which ends with a conditional and has
8753 precisely two successors, determine which of the edges is taken if
8754 the conditional is true and which is taken if the conditional is
8755 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
8758 extract_true_false_edges_from_block (basic_block b
,
8762 edge e
= EDGE_SUCC (b
, 0);
8764 if (e
->flags
& EDGE_TRUE_VALUE
)
8767 *false_edge
= EDGE_SUCC (b
, 1);
8772 *true_edge
= EDGE_SUCC (b
, 1);
8777 /* From a controlling predicate in the immediate dominator DOM of
8778 PHIBLOCK determine the edges into PHIBLOCK that are chosen if the
8779 predicate evaluates to true and false and store them to
8780 *TRUE_CONTROLLED_EDGE and *FALSE_CONTROLLED_EDGE if
8781 they are non-NULL. Returns true if the edges can be determined,
8782 else return false. */
8785 extract_true_false_controlled_edges (basic_block dom
, basic_block phiblock
,
8786 edge
*true_controlled_edge
,
8787 edge
*false_controlled_edge
)
8789 basic_block bb
= phiblock
;
8790 edge true_edge
, false_edge
, tem
;
8791 edge e0
= NULL
, e1
= NULL
;
8793 /* We have to verify that one edge into the PHI node is dominated
8794 by the true edge of the predicate block and the other edge
8795 dominated by the false edge. This ensures that the PHI argument
8796 we are going to take is completely determined by the path we
8797 take from the predicate block.
8798 We can only use BB dominance checks below if the destination of
8799 the true/false edges are dominated by their edge, thus only
8800 have a single predecessor. */
8801 extract_true_false_edges_from_block (dom
, &true_edge
, &false_edge
);
8802 tem
= EDGE_PRED (bb
, 0);
8803 if (tem
== true_edge
8804 || (single_pred_p (true_edge
->dest
)
8805 && (tem
->src
== true_edge
->dest
8806 || dominated_by_p (CDI_DOMINATORS
,
8807 tem
->src
, true_edge
->dest
))))
8809 else if (tem
== false_edge
8810 || (single_pred_p (false_edge
->dest
)
8811 && (tem
->src
== false_edge
->dest
8812 || dominated_by_p (CDI_DOMINATORS
,
8813 tem
->src
, false_edge
->dest
))))
8817 tem
= EDGE_PRED (bb
, 1);
8818 if (tem
== true_edge
8819 || (single_pred_p (true_edge
->dest
)
8820 && (tem
->src
== true_edge
->dest
8821 || dominated_by_p (CDI_DOMINATORS
,
8822 tem
->src
, true_edge
->dest
))))
8824 else if (tem
== false_edge
8825 || (single_pred_p (false_edge
->dest
)
8826 && (tem
->src
== false_edge
->dest
8827 || dominated_by_p (CDI_DOMINATORS
,
8828 tem
->src
, false_edge
->dest
))))
8835 if (true_controlled_edge
)
8836 *true_controlled_edge
= e0
;
8837 if (false_controlled_edge
)
8838 *false_controlled_edge
= e1
;
8845 /* Emit return warnings. */
8849 const pass_data pass_data_warn_function_return
=
8851 GIMPLE_PASS
, /* type */
8852 "*warn_function_return", /* name */
8853 OPTGROUP_NONE
, /* optinfo_flags */
8854 TV_NONE
, /* tv_id */
8855 PROP_cfg
, /* properties_required */
8856 0, /* properties_provided */
8857 0, /* properties_destroyed */
8858 0, /* todo_flags_start */
8859 0, /* todo_flags_finish */
8862 class pass_warn_function_return
: public gimple_opt_pass
8865 pass_warn_function_return (gcc::context
*ctxt
)
8866 : gimple_opt_pass (pass_data_warn_function_return
, ctxt
)
8869 /* opt_pass methods: */
8870 virtual unsigned int execute (function
*);
8872 }; // class pass_warn_function_return
8875 pass_warn_function_return::execute (function
*fun
)
8877 source_location location
;
8882 if (!targetm
.warn_func_return (fun
->decl
))
8885 /* If we have a path to EXIT, then we do return. */
8886 if (TREE_THIS_VOLATILE (fun
->decl
)
8887 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun
)->preds
) > 0)
8889 location
= UNKNOWN_LOCATION
;
8890 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (fun
)->preds
)
8892 last
= last_stmt (e
->src
);
8893 if ((gimple_code (last
) == GIMPLE_RETURN
8894 || gimple_call_builtin_p (last
, BUILT_IN_RETURN
))
8895 && (location
= gimple_location (last
)) != UNKNOWN_LOCATION
)
8898 if (location
== UNKNOWN_LOCATION
)
8899 location
= cfun
->function_end_locus
;
8900 warning_at (location
, 0, "%<noreturn%> function does return");
8903 /* If we see "return;" in some basic block, then we do reach the end
8904 without returning a value. */
8905 else if (warn_return_type
8906 && !TREE_NO_WARNING (fun
->decl
)
8907 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun
)->preds
) > 0
8908 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fun
->decl
))))
8910 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (fun
)->preds
)
8912 gimple
*last
= last_stmt (e
->src
);
8913 greturn
*return_stmt
= dyn_cast
<greturn
*> (last
);
8915 && gimple_return_retval (return_stmt
) == NULL
8916 && !gimple_no_warning_p (last
))
8918 location
= gimple_location (last
);
8919 if (location
== UNKNOWN_LOCATION
)
8920 location
= fun
->function_end_locus
;
8921 warning_at (location
, OPT_Wreturn_type
, "control reaches end of non-void function");
8922 TREE_NO_WARNING (fun
->decl
) = 1;
8933 make_pass_warn_function_return (gcc::context
*ctxt
)
8935 return new pass_warn_function_return (ctxt
);
8938 /* Walk a gimplified function and warn for functions whose return value is
8939 ignored and attribute((warn_unused_result)) is set. This is done before
8940 inlining, so we don't have to worry about that. */
8943 do_warn_unused_result (gimple_seq seq
)
8946 gimple_stmt_iterator i
;
8948 for (i
= gsi_start (seq
); !gsi_end_p (i
); gsi_next (&i
))
8950 gimple
*g
= gsi_stmt (i
);
8952 switch (gimple_code (g
))
8955 do_warn_unused_result (gimple_bind_body (as_a
<gbind
*>(g
)));
8958 do_warn_unused_result (gimple_try_eval (g
));
8959 do_warn_unused_result (gimple_try_cleanup (g
));
8962 do_warn_unused_result (gimple_catch_handler (
8963 as_a
<gcatch
*> (g
)));
8965 case GIMPLE_EH_FILTER
:
8966 do_warn_unused_result (gimple_eh_filter_failure (g
));
8970 if (gimple_call_lhs (g
))
8972 if (gimple_call_internal_p (g
))
8975 /* This is a naked call, as opposed to a GIMPLE_CALL with an
8976 LHS. All calls whose value is ignored should be
8977 represented like this. Look for the attribute. */
8978 fdecl
= gimple_call_fndecl (g
);
8979 ftype
= gimple_call_fntype (g
);
8981 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype
)))
8983 location_t loc
= gimple_location (g
);
8986 warning_at (loc
, OPT_Wunused_result
,
8987 "ignoring return value of %qD, "
8988 "declared with attribute warn_unused_result",
8991 warning_at (loc
, OPT_Wunused_result
,
8992 "ignoring return value of function "
8993 "declared with attribute warn_unused_result");
8998 /* Not a container, not a call, or a call whose value is used. */
9006 const pass_data pass_data_warn_unused_result
=
9008 GIMPLE_PASS
, /* type */
9009 "*warn_unused_result", /* name */
9010 OPTGROUP_NONE
, /* optinfo_flags */
9011 TV_NONE
, /* tv_id */
9012 PROP_gimple_any
, /* properties_required */
9013 0, /* properties_provided */
9014 0, /* properties_destroyed */
9015 0, /* todo_flags_start */
9016 0, /* todo_flags_finish */
9019 class pass_warn_unused_result
: public gimple_opt_pass
9022 pass_warn_unused_result (gcc::context
*ctxt
)
9023 : gimple_opt_pass (pass_data_warn_unused_result
, ctxt
)
9026 /* opt_pass methods: */
9027 virtual bool gate (function
*) { return flag_warn_unused_result
; }
9028 virtual unsigned int execute (function
*)
9030 do_warn_unused_result (gimple_body (current_function_decl
));
9034 }; // class pass_warn_unused_result
9039 make_pass_warn_unused_result (gcc::context
*ctxt
)
9041 return new pass_warn_unused_result (ctxt
);
9044 /* IPA passes, compilation of earlier functions or inlining
9045 might have changed some properties, such as marked functions nothrow,
9046 pure, const or noreturn.
9047 Remove redundant edges and basic blocks, and create new ones if necessary.
9049 This pass can't be executed as stand alone pass from pass manager, because
9050 in between inlining and this fixup the verify_flow_info would fail. */
9053 execute_fixup_cfg (void)
9056 gimple_stmt_iterator gsi
;
9060 cgraph_node
*node
= cgraph_node::get (current_function_decl
);
9061 profile_count num
= node
->count
;
9062 profile_count den
= ENTRY_BLOCK_PTR_FOR_FN (cfun
)->count
;
9063 bool scale
= num
.initialized_p () && den
.initialized_p () && !(num
== den
);
9067 ENTRY_BLOCK_PTR_FOR_FN (cfun
)->count
= node
->count
;
9068 EXIT_BLOCK_PTR_FOR_FN (cfun
)->count
9069 = EXIT_BLOCK_PTR_FOR_FN (cfun
)->count
.apply_scale (num
, den
);
9071 FOR_EACH_EDGE (e
, ei
, ENTRY_BLOCK_PTR_FOR_FN (cfun
)->succs
)
9072 e
->count
= e
->count
.apply_scale (num
, den
);
9075 FOR_EACH_BB_FN (bb
, cfun
)
9078 bb
->count
= bb
->count
.apply_scale (num
, den
);
9079 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
);)
9081 gimple
*stmt
= gsi_stmt (gsi
);
9082 tree decl
= is_gimple_call (stmt
)
9083 ? gimple_call_fndecl (stmt
)
9087 int flags
= gimple_call_flags (stmt
);
9088 if (flags
& (ECF_CONST
| ECF_PURE
| ECF_LOOPING_CONST_OR_PURE
))
9090 if (gimple_purge_dead_abnormal_call_edges (bb
))
9091 todo
|= TODO_cleanup_cfg
;
9093 if (gimple_in_ssa_p (cfun
))
9095 todo
|= TODO_update_ssa
| TODO_cleanup_cfg
;
9100 if (flags
& ECF_NORETURN
9101 && fixup_noreturn_call (stmt
))
9102 todo
|= TODO_cleanup_cfg
;
9105 /* Remove stores to variables we marked write-only.
9106 Keep access when store has side effect, i.e. in case when source
9108 if (gimple_store_p (stmt
)
9109 && !gimple_has_side_effects (stmt
))
9111 tree lhs
= get_base_address (gimple_get_lhs (stmt
));
9114 && (TREE_STATIC (lhs
) || DECL_EXTERNAL (lhs
))
9115 && varpool_node::get (lhs
)->writeonly
)
9117 unlink_stmt_vdef (stmt
);
9118 gsi_remove (&gsi
, true);
9119 release_defs (stmt
);
9120 todo
|= TODO_update_ssa
| TODO_cleanup_cfg
;
9124 /* For calls we can simply remove LHS when it is known
9125 to be write-only. */
9126 if (is_gimple_call (stmt
)
9127 && gimple_get_lhs (stmt
))
9129 tree lhs
= get_base_address (gimple_get_lhs (stmt
));
9132 && (TREE_STATIC (lhs
) || DECL_EXTERNAL (lhs
))
9133 && varpool_node::get (lhs
)->writeonly
)
9135 gimple_call_set_lhs (stmt
, NULL
);
9137 todo
|= TODO_update_ssa
| TODO_cleanup_cfg
;
9141 if (maybe_clean_eh_stmt (stmt
)
9142 && gimple_purge_dead_eh_edges (bb
))
9143 todo
|= TODO_cleanup_cfg
;
9148 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
9149 e
->count
= e
->count
.apply_scale (num
, den
);
9151 /* If we have a basic block with no successors that does not
9152 end with a control statement or a noreturn call end it with
9153 a call to __builtin_unreachable. This situation can occur
9154 when inlining a noreturn call that does in fact return. */
9155 if (EDGE_COUNT (bb
->succs
) == 0)
9157 gimple
*stmt
= last_stmt (bb
);
9159 || (!is_ctrl_stmt (stmt
)
9160 && (!is_gimple_call (stmt
)
9161 || !gimple_call_noreturn_p (stmt
))))
9163 if (stmt
&& is_gimple_call (stmt
))
9164 gimple_call_set_ctrl_altering (stmt
, false);
9165 tree fndecl
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
9166 stmt
= gimple_build_call (fndecl
, 0);
9167 gimple_stmt_iterator gsi
= gsi_last_bb (bb
);
9168 gsi_insert_after (&gsi
, stmt
, GSI_NEW_STMT
);
9169 if (!cfun
->after_inlining
)
9171 gcall
*call_stmt
= dyn_cast
<gcall
*> (stmt
);
9173 = compute_call_stmt_bb_frequency (current_function_decl
,
9175 node
->create_edge (cgraph_node::get_create (fndecl
),
9176 call_stmt
, bb
->count
, freq
);
9182 compute_function_frequency ();
9185 && (todo
& TODO_cleanup_cfg
))
9186 loops_state_set (LOOPS_NEED_FIXUP
);
9193 const pass_data pass_data_fixup_cfg
=
9195 GIMPLE_PASS
, /* type */
9196 "fixup_cfg", /* name */
9197 OPTGROUP_NONE
, /* optinfo_flags */
9198 TV_NONE
, /* tv_id */
9199 PROP_cfg
, /* properties_required */
9200 0, /* properties_provided */
9201 0, /* properties_destroyed */
9202 0, /* todo_flags_start */
9203 0, /* todo_flags_finish */
9206 class pass_fixup_cfg
: public gimple_opt_pass
9209 pass_fixup_cfg (gcc::context
*ctxt
)
9210 : gimple_opt_pass (pass_data_fixup_cfg
, ctxt
)
9213 /* opt_pass methods: */
9214 opt_pass
* clone () { return new pass_fixup_cfg (m_ctxt
); }
9215 virtual unsigned int execute (function
*) { return execute_fixup_cfg (); }
9217 }; // class pass_fixup_cfg
9222 make_pass_fixup_cfg (gcc::context
*ctxt
)
9224 return new pass_fixup_cfg (ctxt
);
9227 /* Garbage collection support for edge_def. */
9229 extern void gt_ggc_mx (tree
&);
9230 extern void gt_ggc_mx (gimple
*&);
9231 extern void gt_ggc_mx (rtx
&);
9232 extern void gt_ggc_mx (basic_block
&);
9235 gt_ggc_mx (rtx_insn
*& x
)
9238 gt_ggc_mx_rtx_def ((void *) x
);
9242 gt_ggc_mx (edge_def
*e
)
9244 tree block
= LOCATION_BLOCK (e
->goto_locus
);
9246 gt_ggc_mx (e
->dest
);
9247 if (current_ir_type () == IR_GIMPLE
)
9248 gt_ggc_mx (e
->insns
.g
);
9250 gt_ggc_mx (e
->insns
.r
);
9254 /* PCH support for edge_def. */
9256 extern void gt_pch_nx (tree
&);
9257 extern void gt_pch_nx (gimple
*&);
9258 extern void gt_pch_nx (rtx
&);
9259 extern void gt_pch_nx (basic_block
&);
9262 gt_pch_nx (rtx_insn
*& x
)
9265 gt_pch_nx_rtx_def ((void *) x
);
9269 gt_pch_nx (edge_def
*e
)
9271 tree block
= LOCATION_BLOCK (e
->goto_locus
);
9273 gt_pch_nx (e
->dest
);
9274 if (current_ir_type () == IR_GIMPLE
)
9275 gt_pch_nx (e
->insns
.g
);
9277 gt_pch_nx (e
->insns
.r
);
9282 gt_pch_nx (edge_def
*e
, gt_pointer_operator op
, void *cookie
)
9284 tree block
= LOCATION_BLOCK (e
->goto_locus
);
9285 op (&(e
->src
), cookie
);
9286 op (&(e
->dest
), cookie
);
9287 if (current_ir_type () == IR_GIMPLE
)
9288 op (&(e
->insns
.g
), cookie
);
9290 op (&(e
->insns
.r
), cookie
);
9291 op (&(block
), cookie
);
9296 namespace selftest
{
9298 /* Helper function for CFG selftests: create a dummy function decl
9299 and push it as cfun. */
9302 push_fndecl (const char *name
)
9304 tree fn_type
= build_function_type_array (integer_type_node
, 0, NULL
);
9305 /* FIXME: this uses input_location: */
9306 tree fndecl
= build_fn_decl (name
, fn_type
);
9307 tree retval
= build_decl (UNKNOWN_LOCATION
, RESULT_DECL
,
9308 NULL_TREE
, integer_type_node
);
9309 DECL_RESULT (fndecl
) = retval
;
9310 push_struct_function (fndecl
);
9311 function
*fun
= DECL_STRUCT_FUNCTION (fndecl
);
9312 ASSERT_TRUE (fun
!= NULL
);
9313 init_empty_tree_cfg_for_function (fun
);
9314 ASSERT_EQ (2, n_basic_blocks_for_fn (fun
));
9315 ASSERT_EQ (0, n_edges_for_fn (fun
));
9319 /* These tests directly create CFGs.
9320 Compare with the static fns within tree-cfg.c:
9322 - make_blocks: calls create_basic_block (seq, bb);
9325 /* Verify a simple cfg of the form:
9326 ENTRY -> A -> B -> C -> EXIT. */
9329 test_linear_chain ()
9331 gimple_register_cfg_hooks ();
9333 tree fndecl
= push_fndecl ("cfg_test_linear_chain");
9334 function
*fun
= DECL_STRUCT_FUNCTION (fndecl
);
9336 /* Create some empty blocks. */
9337 basic_block bb_a
= create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun
));
9338 basic_block bb_b
= create_empty_bb (bb_a
);
9339 basic_block bb_c
= create_empty_bb (bb_b
);
9341 ASSERT_EQ (5, n_basic_blocks_for_fn (fun
));
9342 ASSERT_EQ (0, n_edges_for_fn (fun
));
9344 /* Create some edges: a simple linear chain of BBs. */
9345 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun
), bb_a
, EDGE_FALLTHRU
);
9346 make_edge (bb_a
, bb_b
, 0);
9347 make_edge (bb_b
, bb_c
, 0);
9348 make_edge (bb_c
, EXIT_BLOCK_PTR_FOR_FN (fun
), 0);
9350 /* Verify the edges. */
9351 ASSERT_EQ (4, n_edges_for_fn (fun
));
9352 ASSERT_EQ (NULL
, ENTRY_BLOCK_PTR_FOR_FN (fun
)->preds
);
9353 ASSERT_EQ (1, ENTRY_BLOCK_PTR_FOR_FN (fun
)->succs
->length ());
9354 ASSERT_EQ (1, bb_a
->preds
->length ());
9355 ASSERT_EQ (1, bb_a
->succs
->length ());
9356 ASSERT_EQ (1, bb_b
->preds
->length ());
9357 ASSERT_EQ (1, bb_b
->succs
->length ());
9358 ASSERT_EQ (1, bb_c
->preds
->length ());
9359 ASSERT_EQ (1, bb_c
->succs
->length ());
9360 ASSERT_EQ (1, EXIT_BLOCK_PTR_FOR_FN (fun
)->preds
->length ());
9361 ASSERT_EQ (NULL
, EXIT_BLOCK_PTR_FOR_FN (fun
)->succs
);
9363 /* Verify the dominance information
9364 Each BB in our simple chain should be dominated by the one before
9366 calculate_dominance_info (CDI_DOMINATORS
);
9367 ASSERT_EQ (bb_a
, get_immediate_dominator (CDI_DOMINATORS
, bb_b
));
9368 ASSERT_EQ (bb_b
, get_immediate_dominator (CDI_DOMINATORS
, bb_c
));
9369 vec
<basic_block
> dom_by_b
= get_dominated_by (CDI_DOMINATORS
, bb_b
);
9370 ASSERT_EQ (1, dom_by_b
.length ());
9371 ASSERT_EQ (bb_c
, dom_by_b
[0]);
9372 free_dominance_info (CDI_DOMINATORS
);
9373 dom_by_b
.release ();
9375 /* Similarly for post-dominance: each BB in our chain is post-dominated
9376 by the one after it. */
9377 calculate_dominance_info (CDI_POST_DOMINATORS
);
9378 ASSERT_EQ (bb_b
, get_immediate_dominator (CDI_POST_DOMINATORS
, bb_a
));
9379 ASSERT_EQ (bb_c
, get_immediate_dominator (CDI_POST_DOMINATORS
, bb_b
));
9380 vec
<basic_block
> postdom_by_b
= get_dominated_by (CDI_POST_DOMINATORS
, bb_b
);
9381 ASSERT_EQ (1, postdom_by_b
.length ());
9382 ASSERT_EQ (bb_a
, postdom_by_b
[0]);
9383 free_dominance_info (CDI_POST_DOMINATORS
);
9384 postdom_by_b
.release ();
9389 /* Verify a simple CFG of the form:
9405 gimple_register_cfg_hooks ();
9407 tree fndecl
= push_fndecl ("cfg_test_diamond");
9408 function
*fun
= DECL_STRUCT_FUNCTION (fndecl
);
9410 /* Create some empty blocks. */
9411 basic_block bb_a
= create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun
));
9412 basic_block bb_b
= create_empty_bb (bb_a
);
9413 basic_block bb_c
= create_empty_bb (bb_a
);
9414 basic_block bb_d
= create_empty_bb (bb_b
);
9416 ASSERT_EQ (6, n_basic_blocks_for_fn (fun
));
9417 ASSERT_EQ (0, n_edges_for_fn (fun
));
9419 /* Create the edges. */
9420 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun
), bb_a
, EDGE_FALLTHRU
);
9421 make_edge (bb_a
, bb_b
, EDGE_TRUE_VALUE
);
9422 make_edge (bb_a
, bb_c
, EDGE_FALSE_VALUE
);
9423 make_edge (bb_b
, bb_d
, 0);
9424 make_edge (bb_c
, bb_d
, 0);
9425 make_edge (bb_d
, EXIT_BLOCK_PTR_FOR_FN (fun
), 0);
9427 /* Verify the edges. */
9428 ASSERT_EQ (6, n_edges_for_fn (fun
));
9429 ASSERT_EQ (1, bb_a
->preds
->length ());
9430 ASSERT_EQ (2, bb_a
->succs
->length ());
9431 ASSERT_EQ (1, bb_b
->preds
->length ());
9432 ASSERT_EQ (1, bb_b
->succs
->length ());
9433 ASSERT_EQ (1, bb_c
->preds
->length ());
9434 ASSERT_EQ (1, bb_c
->succs
->length ());
9435 ASSERT_EQ (2, bb_d
->preds
->length ());
9436 ASSERT_EQ (1, bb_d
->succs
->length ());
9438 /* Verify the dominance information. */
9439 calculate_dominance_info (CDI_DOMINATORS
);
9440 ASSERT_EQ (bb_a
, get_immediate_dominator (CDI_DOMINATORS
, bb_b
));
9441 ASSERT_EQ (bb_a
, get_immediate_dominator (CDI_DOMINATORS
, bb_c
));
9442 ASSERT_EQ (bb_a
, get_immediate_dominator (CDI_DOMINATORS
, bb_d
));
9443 vec
<basic_block
> dom_by_a
= get_dominated_by (CDI_DOMINATORS
, bb_a
);
9444 ASSERT_EQ (3, dom_by_a
.length ()); /* B, C, D, in some order. */
9445 dom_by_a
.release ();
9446 vec
<basic_block
> dom_by_b
= get_dominated_by (CDI_DOMINATORS
, bb_b
);
9447 ASSERT_EQ (0, dom_by_b
.length ());
9448 dom_by_b
.release ();
9449 free_dominance_info (CDI_DOMINATORS
);
9451 /* Similarly for post-dominance. */
9452 calculate_dominance_info (CDI_POST_DOMINATORS
);
9453 ASSERT_EQ (bb_d
, get_immediate_dominator (CDI_POST_DOMINATORS
, bb_a
));
9454 ASSERT_EQ (bb_d
, get_immediate_dominator (CDI_POST_DOMINATORS
, bb_b
));
9455 ASSERT_EQ (bb_d
, get_immediate_dominator (CDI_POST_DOMINATORS
, bb_c
));
9456 vec
<basic_block
> postdom_by_d
= get_dominated_by (CDI_POST_DOMINATORS
, bb_d
);
9457 ASSERT_EQ (3, postdom_by_d
.length ()); /* A, B, C in some order. */
9458 postdom_by_d
.release ();
9459 vec
<basic_block
> postdom_by_b
= get_dominated_by (CDI_POST_DOMINATORS
, bb_b
);
9460 ASSERT_EQ (0, postdom_by_b
.length ());
9461 postdom_by_b
.release ();
9462 free_dominance_info (CDI_POST_DOMINATORS
);
9467 /* Verify that we can handle a CFG containing a "complete" aka
9468 fully-connected subgraph (where A B C D below all have edges
9469 pointing to each other node, also to themselves).
9487 test_fully_connected ()
9489 gimple_register_cfg_hooks ();
9491 tree fndecl
= push_fndecl ("cfg_fully_connected");
9492 function
*fun
= DECL_STRUCT_FUNCTION (fndecl
);
9496 /* Create some empty blocks. */
9497 auto_vec
<basic_block
> subgraph_nodes
;
9498 for (int i
= 0; i
< n
; i
++)
9499 subgraph_nodes
.safe_push (create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun
)));
9501 ASSERT_EQ (n
+ 2, n_basic_blocks_for_fn (fun
));
9502 ASSERT_EQ (0, n_edges_for_fn (fun
));
9504 /* Create the edges. */
9505 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun
), subgraph_nodes
[0], EDGE_FALLTHRU
);
9506 make_edge (subgraph_nodes
[0], EXIT_BLOCK_PTR_FOR_FN (fun
), 0);
9507 for (int i
= 0; i
< n
; i
++)
9508 for (int j
= 0; j
< n
; j
++)
9509 make_edge (subgraph_nodes
[i
], subgraph_nodes
[j
], 0);
9511 /* Verify the edges. */
9512 ASSERT_EQ (2 + (n
* n
), n_edges_for_fn (fun
));
9513 /* The first one is linked to ENTRY/EXIT as well as itself and
9515 ASSERT_EQ (n
+ 1, subgraph_nodes
[0]->preds
->length ());
9516 ASSERT_EQ (n
+ 1, subgraph_nodes
[0]->succs
->length ());
9517 /* The other ones in the subgraph are linked to everything in
9518 the subgraph (including themselves). */
9519 for (int i
= 1; i
< n
; i
++)
9521 ASSERT_EQ (n
, subgraph_nodes
[i
]->preds
->length ());
9522 ASSERT_EQ (n
, subgraph_nodes
[i
]->succs
->length ());
9525 /* Verify the dominance information. */
9526 calculate_dominance_info (CDI_DOMINATORS
);
9527 /* The initial block in the subgraph should be dominated by ENTRY. */
9528 ASSERT_EQ (ENTRY_BLOCK_PTR_FOR_FN (fun
),
9529 get_immediate_dominator (CDI_DOMINATORS
,
9530 subgraph_nodes
[0]));
9531 /* Every other block in the subgraph should be dominated by the
9533 for (int i
= 1; i
< n
; i
++)
9534 ASSERT_EQ (subgraph_nodes
[0],
9535 get_immediate_dominator (CDI_DOMINATORS
,
9536 subgraph_nodes
[i
]));
9537 free_dominance_info (CDI_DOMINATORS
);
9539 /* Similarly for post-dominance. */
9540 calculate_dominance_info (CDI_POST_DOMINATORS
);
9541 /* The initial block in the subgraph should be postdominated by EXIT. */
9542 ASSERT_EQ (EXIT_BLOCK_PTR_FOR_FN (fun
),
9543 get_immediate_dominator (CDI_POST_DOMINATORS
,
9544 subgraph_nodes
[0]));
9545 /* Every other block in the subgraph should be postdominated by the
9546 initial block, since that leads to EXIT. */
9547 for (int i
= 1; i
< n
; i
++)
9548 ASSERT_EQ (subgraph_nodes
[0],
9549 get_immediate_dominator (CDI_POST_DOMINATORS
,
9550 subgraph_nodes
[i
]));
9551 free_dominance_info (CDI_POST_DOMINATORS
);
9556 /* Run all of the selftests within this file. */
9561 test_linear_chain ();
9563 test_fully_connected ();
9566 } // namespace selftest
9568 /* TODO: test the dominator/postdominator logic with various graphs/nodes:
9571 - switch statement (a block with many out-edges)
9572 - something that jumps to itself
9575 #endif /* CHECKING_P */