1 /* Control flow functions for trees.
2 Copyright (C) 2001-2015 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
24 #include "hash-table.h"
34 #include "fold-const.h"
35 #include "trans-mem.h"
36 #include "stor-layout.h"
37 #include "print-tree.h"
40 #include "hard-reg-set.h"
42 #include "dominance.h"
45 #include "basic-block.h"
47 #include "gimple-pretty-print.h"
48 #include "tree-ssa-alias.h"
49 #include "internal-fn.h"
50 #include "gimple-fold.h"
52 #include "gimple-expr.h"
55 #include "gimple-iterator.h"
56 #include "gimplify-me.h"
57 #include "gimple-walk.h"
58 #include "gimple-ssa.h"
59 #include "plugin-api.h"
63 #include "tree-phinodes.h"
64 #include "ssa-iterators.h"
65 #include "stringpool.h"
66 #include "tree-ssanames.h"
67 #include "tree-ssa-loop-manip.h"
68 #include "tree-ssa-loop-niter.h"
69 #include "tree-into-ssa.h"
72 #include "statistics.h"
73 #include "insn-config.h"
84 #include "tree-dump.h"
85 #include "tree-pass.h"
86 #include "diagnostic-core.h"
89 #include "tree-ssa-propagate.h"
90 #include "value-prof.h"
91 #include "tree-inline.h"
93 #include "tree-ssa-live.h"
95 #include "tree-cfgcleanup.h"
96 #include "wide-int-print.h"
98 /* This file contains functions for building the Control Flow Graph (CFG)
99 for a function tree. */
101 /* Local declarations. */
103 /* Initial capacity for the basic block array. */
104 static const int initial_cfg_capacity
= 20;
106 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
107 which use a particular edge. The CASE_LABEL_EXPRs are chained together
108 via their CASE_CHAIN field, which we clear after we're done with the
109 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
111 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
112 update the case vector in response to edge redirections.
114 Right now this table is set up and torn down at key points in the
115 compilation process. It would be nice if we could make the table
116 more persistent. The key is getting notification of changes to
117 the CFG (particularly edge removal, creation and redirection). */
119 static hash_map
<edge
, tree
> *edge_to_cases
;
121 /* If we record edge_to_cases, this bitmap will hold indexes
122 of basic blocks that end in a GIMPLE_SWITCH which we touched
123 due to edge manipulations. */
125 static bitmap touched_switch_bbs
;
127 /* CFG statistics. */
130 long num_merged_labels
;
133 static struct cfg_stats_d cfg_stats
;
135 /* Hash table to store last discriminator assigned for each locus. */
136 struct locus_discrim_map
142 /* Hashtable helpers. */
144 struct locus_discrim_hasher
: typed_free_remove
<locus_discrim_map
>
146 typedef locus_discrim_map
*value_type
;
147 typedef locus_discrim_map
*compare_type
;
148 static inline hashval_t
hash (const locus_discrim_map
*);
149 static inline bool equal (const locus_discrim_map
*,
150 const locus_discrim_map
*);
153 /* Trivial hash function for a location_t. ITEM is a pointer to
154 a hash table entry that maps a location_t to a discriminator. */
157 locus_discrim_hasher::hash (const locus_discrim_map
*item
)
159 return LOCATION_LINE (item
->locus
);
162 /* Equality function for the locus-to-discriminator map. A and B
163 point to the two hash table entries to compare. */
166 locus_discrim_hasher::equal (const locus_discrim_map
*a
,
167 const locus_discrim_map
*b
)
169 return LOCATION_LINE (a
->locus
) == LOCATION_LINE (b
->locus
);
172 static hash_table
<locus_discrim_hasher
> *discriminator_per_locus
;
174 /* Basic blocks and flowgraphs. */
175 static void make_blocks (gimple_seq
);
178 static void make_edges (void);
179 static void assign_discriminators (void);
180 static void make_cond_expr_edges (basic_block
);
181 static void make_gimple_switch_edges (gswitch
*, basic_block
);
182 static bool make_goto_expr_edges (basic_block
);
183 static void make_gimple_asm_edges (basic_block
);
184 static edge
gimple_redirect_edge_and_branch (edge
, basic_block
);
185 static edge
gimple_try_redirect_by_replacing_jump (edge
, basic_block
);
187 /* Various helpers. */
188 static inline bool stmt_starts_bb_p (gimple
, gimple
);
189 static int gimple_verify_flow_info (void);
190 static void gimple_make_forwarder_block (edge
);
191 static gimple
first_non_label_stmt (basic_block
);
192 static bool verify_gimple_transaction (gtransaction
*);
193 static bool call_can_make_abnormal_goto (gimple
);
195 /* Flowgraph optimization and cleanup. */
196 static void gimple_merge_blocks (basic_block
, basic_block
);
197 static bool gimple_can_merge_blocks_p (basic_block
, basic_block
);
198 static void remove_bb (basic_block
);
199 static edge
find_taken_edge_computed_goto (basic_block
, tree
);
200 static edge
find_taken_edge_cond_expr (basic_block
, tree
);
201 static edge
find_taken_edge_switch_expr (gswitch
*, basic_block
, tree
);
202 static tree
find_case_label_for_value (gswitch
*, tree
);
205 init_empty_tree_cfg_for_function (struct function
*fn
)
207 /* Initialize the basic block array. */
209 profile_status_for_fn (fn
) = PROFILE_ABSENT
;
210 n_basic_blocks_for_fn (fn
) = NUM_FIXED_BLOCKS
;
211 last_basic_block_for_fn (fn
) = NUM_FIXED_BLOCKS
;
212 vec_alloc (basic_block_info_for_fn (fn
), initial_cfg_capacity
);
213 vec_safe_grow_cleared (basic_block_info_for_fn (fn
),
214 initial_cfg_capacity
);
216 /* Build a mapping of labels to their associated blocks. */
217 vec_alloc (label_to_block_map_for_fn (fn
), initial_cfg_capacity
);
218 vec_safe_grow_cleared (label_to_block_map_for_fn (fn
),
219 initial_cfg_capacity
);
221 SET_BASIC_BLOCK_FOR_FN (fn
, ENTRY_BLOCK
, ENTRY_BLOCK_PTR_FOR_FN (fn
));
222 SET_BASIC_BLOCK_FOR_FN (fn
, EXIT_BLOCK
, EXIT_BLOCK_PTR_FOR_FN (fn
));
224 ENTRY_BLOCK_PTR_FOR_FN (fn
)->next_bb
225 = EXIT_BLOCK_PTR_FOR_FN (fn
);
226 EXIT_BLOCK_PTR_FOR_FN (fn
)->prev_bb
227 = ENTRY_BLOCK_PTR_FOR_FN (fn
);
231 init_empty_tree_cfg (void)
233 init_empty_tree_cfg_for_function (cfun
);
236 /*---------------------------------------------------------------------------
238 ---------------------------------------------------------------------------*/
240 /* Entry point to the CFG builder for trees. SEQ is the sequence of
241 statements to be added to the flowgraph. */
244 build_gimple_cfg (gimple_seq seq
)
246 /* Register specific gimple functions. */
247 gimple_register_cfg_hooks ();
249 memset ((void *) &cfg_stats
, 0, sizeof (cfg_stats
));
251 init_empty_tree_cfg ();
255 /* Make sure there is always at least one block, even if it's empty. */
256 if (n_basic_blocks_for_fn (cfun
) == NUM_FIXED_BLOCKS
)
257 create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
259 /* Adjust the size of the array. */
260 if (basic_block_info_for_fn (cfun
)->length ()
261 < (size_t) n_basic_blocks_for_fn (cfun
))
262 vec_safe_grow_cleared (basic_block_info_for_fn (cfun
),
263 n_basic_blocks_for_fn (cfun
));
265 /* To speed up statement iterator walks, we first purge dead labels. */
266 cleanup_dead_labels ();
268 /* Group case nodes to reduce the number of edges.
269 We do this after cleaning up dead labels because otherwise we miss
270 a lot of obvious case merging opportunities. */
271 group_case_labels ();
273 /* Create the edges of the flowgraph. */
274 discriminator_per_locus
= new hash_table
<locus_discrim_hasher
> (13);
276 assign_discriminators ();
277 cleanup_dead_labels ();
278 delete discriminator_per_locus
;
279 discriminator_per_locus
= NULL
;
282 /* Look for ANNOTATE calls with loop annotation kind in BB; if found, remove
283 them and propagate the information to LOOP. We assume that the annotations
284 come immediately before the condition in BB, if any. */
287 replace_loop_annotate_in_block (basic_block bb
, struct loop
*loop
)
289 gimple_stmt_iterator gsi
= gsi_last_bb (bb
);
290 gimple stmt
= gsi_stmt (gsi
);
292 if (!(stmt
&& gimple_code (stmt
) == GIMPLE_COND
))
295 for (gsi_prev_nondebug (&gsi
); !gsi_end_p (gsi
); gsi_prev (&gsi
))
297 stmt
= gsi_stmt (gsi
);
298 if (gimple_code (stmt
) != GIMPLE_CALL
)
300 if (!gimple_call_internal_p (stmt
)
301 || gimple_call_internal_fn (stmt
) != IFN_ANNOTATE
)
304 switch ((annot_expr_kind
) tree_to_shwi (gimple_call_arg (stmt
, 1)))
306 case annot_expr_ivdep_kind
:
307 loop
->safelen
= INT_MAX
;
309 case annot_expr_no_vector_kind
:
310 loop
->dont_vectorize
= true;
312 case annot_expr_vector_kind
:
313 loop
->force_vectorize
= true;
314 cfun
->has_force_vectorize_loops
= true;
320 stmt
= gimple_build_assign (gimple_call_lhs (stmt
),
321 gimple_call_arg (stmt
, 0));
322 gsi_replace (&gsi
, stmt
, true);
326 /* Look for ANNOTATE calls with loop annotation kind; if found, remove
327 them and propagate the information to the loop. We assume that the
328 annotations come immediately before the condition of the loop. */
331 replace_loop_annotate (void)
335 gimple_stmt_iterator gsi
;
338 FOR_EACH_LOOP (loop
, 0)
340 /* First look into the header. */
341 replace_loop_annotate_in_block (loop
->header
, loop
);
343 /* Then look into the latch, if any. */
345 replace_loop_annotate_in_block (loop
->latch
, loop
);
348 /* Remove IFN_ANNOTATE. Safeguard for the case loop->latch == NULL. */
349 FOR_EACH_BB_FN (bb
, cfun
)
351 for (gsi
= gsi_last_bb (bb
); !gsi_end_p (gsi
); gsi_prev (&gsi
))
353 stmt
= gsi_stmt (gsi
);
354 if (gimple_code (stmt
) != GIMPLE_CALL
)
356 if (!gimple_call_internal_p (stmt
)
357 || gimple_call_internal_fn (stmt
) != IFN_ANNOTATE
)
360 switch ((annot_expr_kind
) tree_to_shwi (gimple_call_arg (stmt
, 1)))
362 case annot_expr_ivdep_kind
:
363 case annot_expr_no_vector_kind
:
364 case annot_expr_vector_kind
:
370 warning_at (gimple_location (stmt
), 0, "ignoring loop annotation");
371 stmt
= gimple_build_assign (gimple_call_lhs (stmt
),
372 gimple_call_arg (stmt
, 0));
373 gsi_replace (&gsi
, stmt
, true);
380 execute_build_cfg (void)
382 gimple_seq body
= gimple_body (current_function_decl
);
384 build_gimple_cfg (body
);
385 gimple_set_body (current_function_decl
, NULL
);
386 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
388 fprintf (dump_file
, "Scope blocks:\n");
389 dump_scope_blocks (dump_file
, dump_flags
);
392 loop_optimizer_init (AVOID_CFG_MODIFICATIONS
);
393 replace_loop_annotate ();
399 const pass_data pass_data_build_cfg
=
401 GIMPLE_PASS
, /* type */
403 OPTGROUP_NONE
, /* optinfo_flags */
404 TV_TREE_CFG
, /* tv_id */
405 PROP_gimple_leh
, /* properties_required */
406 ( PROP_cfg
| PROP_loops
), /* properties_provided */
407 0, /* properties_destroyed */
408 0, /* todo_flags_start */
409 0, /* todo_flags_finish */
412 class pass_build_cfg
: public gimple_opt_pass
415 pass_build_cfg (gcc::context
*ctxt
)
416 : gimple_opt_pass (pass_data_build_cfg
, ctxt
)
419 /* opt_pass methods: */
420 virtual unsigned int execute (function
*) { return execute_build_cfg (); }
422 }; // class pass_build_cfg
427 make_pass_build_cfg (gcc::context
*ctxt
)
429 return new pass_build_cfg (ctxt
);
433 /* Return true if T is a computed goto. */
436 computed_goto_p (gimple t
)
438 return (gimple_code (t
) == GIMPLE_GOTO
439 && TREE_CODE (gimple_goto_dest (t
)) != LABEL_DECL
);
442 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
443 the other edge points to a bb with just __builtin_unreachable ().
444 I.e. return true for C->M edge in:
452 __builtin_unreachable ();
456 assert_unreachable_fallthru_edge_p (edge e
)
458 basic_block pred_bb
= e
->src
;
459 gimple last
= last_stmt (pred_bb
);
460 if (last
&& gimple_code (last
) == GIMPLE_COND
)
462 basic_block other_bb
= EDGE_SUCC (pred_bb
, 0)->dest
;
463 if (other_bb
== e
->dest
)
464 other_bb
= EDGE_SUCC (pred_bb
, 1)->dest
;
465 if (EDGE_COUNT (other_bb
->succs
) == 0)
467 gimple_stmt_iterator gsi
= gsi_after_labels (other_bb
);
472 stmt
= gsi_stmt (gsi
);
473 while (is_gimple_debug (stmt
) || gimple_clobber_p (stmt
))
478 stmt
= gsi_stmt (gsi
);
480 return gimple_call_builtin_p (stmt
, BUILT_IN_UNREACHABLE
);
487 /* Initialize GF_CALL_CTRL_ALTERING flag, which indicates the call
488 could alter control flow except via eh. We initialize the flag at
489 CFG build time and only ever clear it later. */
492 gimple_call_initialize_ctrl_altering (gimple stmt
)
494 int flags
= gimple_call_flags (stmt
);
496 /* A call alters control flow if it can make an abnormal goto. */
497 if (call_can_make_abnormal_goto (stmt
)
498 /* A call also alters control flow if it does not return. */
499 || flags
& ECF_NORETURN
500 /* TM ending statements have backedges out of the transaction.
501 Return true so we split the basic block containing them.
502 Note that the TM_BUILTIN test is merely an optimization. */
503 || ((flags
& ECF_TM_BUILTIN
)
504 && is_tm_ending_fndecl (gimple_call_fndecl (stmt
)))
505 /* BUILT_IN_RETURN call is same as return statement. */
506 || gimple_call_builtin_p (stmt
, BUILT_IN_RETURN
))
507 gimple_call_set_ctrl_altering (stmt
, true);
509 gimple_call_set_ctrl_altering (stmt
, false);
513 /* Insert SEQ after BB and build a flowgraph. */
516 make_blocks_1 (gimple_seq seq
, basic_block bb
)
518 gimple_stmt_iterator i
= gsi_start (seq
);
520 bool start_new_block
= true;
521 bool first_stmt_of_seq
= true;
523 while (!gsi_end_p (i
))
530 if (stmt
&& is_gimple_call (stmt
))
531 gimple_call_initialize_ctrl_altering (stmt
);
533 /* If the statement starts a new basic block or if we have determined
534 in a previous pass that we need to create a new block for STMT, do
536 if (start_new_block
|| stmt_starts_bb_p (stmt
, prev_stmt
))
538 if (!first_stmt_of_seq
)
539 gsi_split_seq_before (&i
, &seq
);
540 bb
= create_basic_block (seq
, bb
);
541 start_new_block
= false;
544 /* Now add STMT to BB and create the subgraphs for special statement
546 gimple_set_bb (stmt
, bb
);
548 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
550 if (stmt_ends_bb_p (stmt
))
552 /* If the stmt can make abnormal goto use a new temporary
553 for the assignment to the LHS. This makes sure the old value
554 of the LHS is available on the abnormal edge. Otherwise
555 we will end up with overlapping life-ranges for abnormal
557 if (gimple_has_lhs (stmt
)
558 && stmt_can_make_abnormal_goto (stmt
)
559 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt
))))
561 tree lhs
= gimple_get_lhs (stmt
);
562 tree tmp
= create_tmp_var (TREE_TYPE (lhs
));
563 gimple s
= gimple_build_assign (lhs
, tmp
);
564 gimple_set_location (s
, gimple_location (stmt
));
565 gimple_set_block (s
, gimple_block (stmt
));
566 gimple_set_lhs (stmt
, tmp
);
567 if (TREE_CODE (TREE_TYPE (tmp
)) == COMPLEX_TYPE
568 || TREE_CODE (TREE_TYPE (tmp
)) == VECTOR_TYPE
)
569 DECL_GIMPLE_REG_P (tmp
) = 1;
570 gsi_insert_after (&i
, s
, GSI_SAME_STMT
);
572 start_new_block
= true;
576 first_stmt_of_seq
= false;
581 /* Build a flowgraph for the sequence of stmts SEQ. */
584 make_blocks (gimple_seq seq
)
586 make_blocks_1 (seq
, ENTRY_BLOCK_PTR_FOR_FN (cfun
));
589 /* Create and return a new empty basic block after bb AFTER. */
592 create_bb (void *h
, void *e
, basic_block after
)
598 /* Create and initialize a new basic block. Since alloc_block uses
599 GC allocation that clears memory to allocate a basic block, we do
600 not have to clear the newly allocated basic block here. */
603 bb
->index
= last_basic_block_for_fn (cfun
);
605 set_bb_seq (bb
, h
? (gimple_seq
) h
: NULL
);
607 /* Add the new block to the linked list of blocks. */
608 link_block (bb
, after
);
610 /* Grow the basic block array if needed. */
611 if ((size_t) last_basic_block_for_fn (cfun
)
612 == basic_block_info_for_fn (cfun
)->length ())
615 (last_basic_block_for_fn (cfun
)
616 + (last_basic_block_for_fn (cfun
) + 3) / 4);
617 vec_safe_grow_cleared (basic_block_info_for_fn (cfun
), new_size
);
620 /* Add the newly created block to the array. */
621 SET_BASIC_BLOCK_FOR_FN (cfun
, last_basic_block_for_fn (cfun
), bb
);
623 n_basic_blocks_for_fn (cfun
)++;
624 last_basic_block_for_fn (cfun
)++;
630 /*---------------------------------------------------------------------------
632 ---------------------------------------------------------------------------*/
634 /* Fold COND_EXPR_COND of each COND_EXPR. */
637 fold_cond_expr_cond (void)
641 FOR_EACH_BB_FN (bb
, cfun
)
643 gimple stmt
= last_stmt (bb
);
645 if (stmt
&& gimple_code (stmt
) == GIMPLE_COND
)
647 gcond
*cond_stmt
= as_a
<gcond
*> (stmt
);
648 location_t loc
= gimple_location (stmt
);
652 fold_defer_overflow_warnings ();
653 cond
= fold_binary_loc (loc
, gimple_cond_code (cond_stmt
),
655 gimple_cond_lhs (cond_stmt
),
656 gimple_cond_rhs (cond_stmt
));
659 zerop
= integer_zerop (cond
);
660 onep
= integer_onep (cond
);
663 zerop
= onep
= false;
665 fold_undefer_overflow_warnings (zerop
|| onep
,
667 WARN_STRICT_OVERFLOW_CONDITIONAL
);
669 gimple_cond_make_false (cond_stmt
);
671 gimple_cond_make_true (cond_stmt
);
676 /* If basic block BB has an abnormal edge to a basic block
677 containing IFN_ABNORMAL_DISPATCHER internal call, return
678 that the dispatcher's basic block, otherwise return NULL. */
681 get_abnormal_succ_dispatcher (basic_block bb
)
686 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
687 if ((e
->flags
& (EDGE_ABNORMAL
| EDGE_EH
)) == EDGE_ABNORMAL
)
689 gimple_stmt_iterator gsi
690 = gsi_start_nondebug_after_labels_bb (e
->dest
);
691 gimple g
= gsi_stmt (gsi
);
693 && is_gimple_call (g
)
694 && gimple_call_internal_p (g
)
695 && gimple_call_internal_fn (g
) == IFN_ABNORMAL_DISPATCHER
)
701 /* Helper function for make_edges. Create a basic block with
702 with ABNORMAL_DISPATCHER internal call in it if needed, and
703 create abnormal edges from BBS to it and from it to FOR_BB
704 if COMPUTED_GOTO is false, otherwise factor the computed gotos. */
707 handle_abnormal_edges (basic_block
*dispatcher_bbs
,
708 basic_block for_bb
, int *bb_to_omp_idx
,
709 auto_vec
<basic_block
> *bbs
, bool computed_goto
)
711 basic_block
*dispatcher
= dispatcher_bbs
+ (computed_goto
? 1 : 0);
712 unsigned int idx
= 0;
718 dispatcher
= dispatcher_bbs
+ 2 * bb_to_omp_idx
[for_bb
->index
];
719 if (bb_to_omp_idx
[for_bb
->index
] != 0)
723 /* If the dispatcher has been created already, then there are basic
724 blocks with abnormal edges to it, so just make a new edge to
726 if (*dispatcher
== NULL
)
728 /* Check if there are any basic blocks that need to have
729 abnormal edges to this dispatcher. If there are none, return
731 if (bb_to_omp_idx
== NULL
)
733 if (bbs
->is_empty ())
738 FOR_EACH_VEC_ELT (*bbs
, idx
, bb
)
739 if (bb_to_omp_idx
[bb
->index
] == bb_to_omp_idx
[for_bb
->index
])
745 /* Create the dispatcher bb. */
746 *dispatcher
= create_basic_block (NULL
, for_bb
);
749 /* Factor computed gotos into a common computed goto site. Also
750 record the location of that site so that we can un-factor the
751 gotos after we have converted back to normal form. */
752 gimple_stmt_iterator gsi
= gsi_start_bb (*dispatcher
);
754 /* Create the destination of the factored goto. Each original
755 computed goto will put its desired destination into this
756 variable and jump to the label we create immediately below. */
757 tree var
= create_tmp_var (ptr_type_node
, "gotovar");
759 /* Build a label for the new block which will contain the
760 factored computed goto. */
761 tree factored_label_decl
762 = create_artificial_label (UNKNOWN_LOCATION
);
763 gimple factored_computed_goto_label
764 = gimple_build_label (factored_label_decl
);
765 gsi_insert_after (&gsi
, factored_computed_goto_label
, GSI_NEW_STMT
);
767 /* Build our new computed goto. */
768 gimple factored_computed_goto
= gimple_build_goto (var
);
769 gsi_insert_after (&gsi
, factored_computed_goto
, GSI_NEW_STMT
);
771 FOR_EACH_VEC_ELT (*bbs
, idx
, bb
)
774 && bb_to_omp_idx
[bb
->index
] != bb_to_omp_idx
[for_bb
->index
])
777 gsi
= gsi_last_bb (bb
);
778 gimple last
= gsi_stmt (gsi
);
780 gcc_assert (computed_goto_p (last
));
782 /* Copy the original computed goto's destination into VAR. */
784 = gimple_build_assign (var
, gimple_goto_dest (last
));
785 gsi_insert_before (&gsi
, assignment
, GSI_SAME_STMT
);
787 edge e
= make_edge (bb
, *dispatcher
, EDGE_FALLTHRU
);
788 e
->goto_locus
= gimple_location (last
);
789 gsi_remove (&gsi
, true);
794 tree arg
= inner
? boolean_true_node
: boolean_false_node
;
795 gimple g
= gimple_build_call_internal (IFN_ABNORMAL_DISPATCHER
,
797 gimple_stmt_iterator gsi
= gsi_after_labels (*dispatcher
);
798 gsi_insert_after (&gsi
, g
, GSI_NEW_STMT
);
800 /* Create predecessor edges of the dispatcher. */
801 FOR_EACH_VEC_ELT (*bbs
, idx
, bb
)
804 && bb_to_omp_idx
[bb
->index
] != bb_to_omp_idx
[for_bb
->index
])
806 make_edge (bb
, *dispatcher
, EDGE_ABNORMAL
);
811 make_edge (*dispatcher
, for_bb
, EDGE_ABNORMAL
);
814 /* Creates outgoing edges for BB. Returns 1 when it ends with an
815 computed goto, returns 2 when it ends with a statement that
816 might return to this function via an nonlocal goto, otherwise
817 return 0. Updates *PCUR_REGION with the OMP region this BB is in. */
820 make_edges_bb (basic_block bb
, struct omp_region
**pcur_region
, int *pomp_index
)
822 gimple last
= last_stmt (bb
);
823 bool fallthru
= false;
829 switch (gimple_code (last
))
832 if (make_goto_expr_edges (bb
))
838 edge e
= make_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
), 0);
839 e
->goto_locus
= gimple_location (last
);
844 make_cond_expr_edges (bb
);
848 make_gimple_switch_edges (as_a
<gswitch
*> (last
), bb
);
852 make_eh_edges (last
);
855 case GIMPLE_EH_DISPATCH
:
856 fallthru
= make_eh_dispatch_edges (as_a
<geh_dispatch
*> (last
));
860 /* If this function receives a nonlocal goto, then we need to
861 make edges from this call site to all the nonlocal goto
863 if (stmt_can_make_abnormal_goto (last
))
866 /* If this statement has reachable exception handlers, then
867 create abnormal edges to them. */
868 make_eh_edges (last
);
870 /* BUILTIN_RETURN is really a return statement. */
871 if (gimple_call_builtin_p (last
, BUILT_IN_RETURN
))
873 make_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
), 0);
876 /* Some calls are known not to return. */
878 fallthru
= !(gimple_call_flags (last
) & ECF_NORETURN
);
882 /* A GIMPLE_ASSIGN may throw internally and thus be considered
884 if (is_ctrl_altering_stmt (last
))
885 make_eh_edges (last
);
890 make_gimple_asm_edges (bb
);
895 fallthru
= make_gimple_omp_edges (bb
, pcur_region
, pomp_index
);
898 case GIMPLE_TRANSACTION
:
901 = gimple_transaction_label (as_a
<gtransaction
*> (last
));
903 make_edge (bb
, label_to_block (abort_label
), EDGE_TM_ABORT
);
909 gcc_assert (!stmt_ends_bb_p (last
));
915 make_edge (bb
, bb
->next_bb
, EDGE_FALLTHRU
);
920 /* Join all the blocks in the flowgraph. */
926 struct omp_region
*cur_region
= NULL
;
927 auto_vec
<basic_block
> ab_edge_goto
;
928 auto_vec
<basic_block
> ab_edge_call
;
929 int *bb_to_omp_idx
= NULL
;
930 int cur_omp_region_idx
= 0;
932 /* Create an edge from entry to the first block with executable
934 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun
),
935 BASIC_BLOCK_FOR_FN (cfun
, NUM_FIXED_BLOCKS
),
938 /* Traverse the basic block array placing edges. */
939 FOR_EACH_BB_FN (bb
, cfun
)
944 bb_to_omp_idx
[bb
->index
] = cur_omp_region_idx
;
946 mer
= make_edges_bb (bb
, &cur_region
, &cur_omp_region_idx
);
948 ab_edge_goto
.safe_push (bb
);
950 ab_edge_call
.safe_push (bb
);
952 if (cur_region
&& bb_to_omp_idx
== NULL
)
953 bb_to_omp_idx
= XCNEWVEC (int, n_basic_blocks_for_fn (cfun
));
956 /* Computed gotos are hell to deal with, especially if there are
957 lots of them with a large number of destinations. So we factor
958 them to a common computed goto location before we build the
959 edge list. After we convert back to normal form, we will un-factor
960 the computed gotos since factoring introduces an unwanted jump.
961 For non-local gotos and abnormal edges from calls to calls that return
962 twice or forced labels, factor the abnormal edges too, by having all
963 abnormal edges from the calls go to a common artificial basic block
964 with ABNORMAL_DISPATCHER internal call and abnormal edges from that
965 basic block to all forced labels and calls returning twice.
966 We do this per-OpenMP structured block, because those regions
967 are guaranteed to be single entry single exit by the standard,
968 so it is not allowed to enter or exit such regions abnormally this way,
969 thus all computed gotos, non-local gotos and setjmp/longjmp calls
970 must not transfer control across SESE region boundaries. */
971 if (!ab_edge_goto
.is_empty () || !ab_edge_call
.is_empty ())
973 gimple_stmt_iterator gsi
;
974 basic_block dispatcher_bb_array
[2] = { NULL
, NULL
};
975 basic_block
*dispatcher_bbs
= dispatcher_bb_array
;
976 int count
= n_basic_blocks_for_fn (cfun
);
979 dispatcher_bbs
= XCNEWVEC (basic_block
, 2 * count
);
981 FOR_EACH_BB_FN (bb
, cfun
)
983 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
985 glabel
*label_stmt
= dyn_cast
<glabel
*> (gsi_stmt (gsi
));
991 target
= gimple_label_label (label_stmt
);
993 /* Make an edge to every label block that has been marked as a
994 potential target for a computed goto or a non-local goto. */
995 if (FORCED_LABEL (target
))
996 handle_abnormal_edges (dispatcher_bbs
, bb
, bb_to_omp_idx
,
997 &ab_edge_goto
, true);
998 if (DECL_NONLOCAL (target
))
1000 handle_abnormal_edges (dispatcher_bbs
, bb
, bb_to_omp_idx
,
1001 &ab_edge_call
, false);
1006 if (!gsi_end_p (gsi
) && is_gimple_debug (gsi_stmt (gsi
)))
1007 gsi_next_nondebug (&gsi
);
1008 if (!gsi_end_p (gsi
))
1010 /* Make an edge to every setjmp-like call. */
1011 gimple call_stmt
= gsi_stmt (gsi
);
1012 if (is_gimple_call (call_stmt
)
1013 && ((gimple_call_flags (call_stmt
) & ECF_RETURNS_TWICE
)
1014 || gimple_call_builtin_p (call_stmt
,
1015 BUILT_IN_SETJMP_RECEIVER
)))
1016 handle_abnormal_edges (dispatcher_bbs
, bb
, bb_to_omp_idx
,
1017 &ab_edge_call
, false);
1022 XDELETE (dispatcher_bbs
);
1025 XDELETE (bb_to_omp_idx
);
1027 free_omp_regions ();
1029 /* Fold COND_EXPR_COND of each COND_EXPR. */
1030 fold_cond_expr_cond ();
1033 /* Add SEQ after GSI. Start new bb after GSI, and created further bbs as
1034 needed. Returns true if new bbs were created.
1035 Note: This is transitional code, and should not be used for new code. We
1036 should be able to get rid of this by rewriting all target va-arg
1037 gimplification hooks to use an interface gimple_build_cond_value as described
1038 in https://gcc.gnu.org/ml/gcc-patches/2015-02/msg01194.html. */
1041 gimple_find_sub_bbs (gimple_seq seq
, gimple_stmt_iterator
*gsi
)
1043 gimple stmt
= gsi_stmt (*gsi
);
1044 basic_block bb
= gimple_bb (stmt
);
1045 basic_block lastbb
, afterbb
;
1046 int old_num_bbs
= n_basic_blocks_for_fn (cfun
);
1048 lastbb
= make_blocks_1 (seq
, bb
);
1049 if (old_num_bbs
== n_basic_blocks_for_fn (cfun
))
1051 e
= split_block (bb
, stmt
);
1052 /* Move e->dest to come after the new basic blocks. */
1054 unlink_block (afterbb
);
1055 link_block (afterbb
, lastbb
);
1056 redirect_edge_succ (e
, bb
->next_bb
);
1058 while (bb
!= afterbb
)
1060 struct omp_region
*cur_region
= NULL
;
1061 int cur_omp_region_idx
= 0;
1062 int mer
= make_edges_bb (bb
, &cur_region
, &cur_omp_region_idx
);
1063 gcc_assert (!mer
&& !cur_region
);
1064 add_bb_to_loop (bb
, afterbb
->loop_father
);
1070 /* Find the next available discriminator value for LOCUS. The
1071 discriminator distinguishes among several basic blocks that
1072 share a common locus, allowing for more accurate sample-based
1076 next_discriminator_for_locus (location_t locus
)
1078 struct locus_discrim_map item
;
1079 struct locus_discrim_map
**slot
;
1082 item
.discriminator
= 0;
1083 slot
= discriminator_per_locus
->find_slot_with_hash (
1084 &item
, LOCATION_LINE (locus
), INSERT
);
1086 if (*slot
== HTAB_EMPTY_ENTRY
)
1088 *slot
= XNEW (struct locus_discrim_map
);
1090 (*slot
)->locus
= locus
;
1091 (*slot
)->discriminator
= 0;
1093 (*slot
)->discriminator
++;
1094 return (*slot
)->discriminator
;
1097 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
1100 same_line_p (location_t locus1
, location_t locus2
)
1102 expanded_location from
, to
;
1104 if (locus1
== locus2
)
1107 from
= expand_location (locus1
);
1108 to
= expand_location (locus2
);
1110 if (from
.line
!= to
.line
)
1112 if (from
.file
== to
.file
)
1114 return (from
.file
!= NULL
1116 && filename_cmp (from
.file
, to
.file
) == 0);
1119 /* Assign discriminators to each basic block. */
1122 assign_discriminators (void)
1126 FOR_EACH_BB_FN (bb
, cfun
)
1130 gimple last
= last_stmt (bb
);
1131 location_t locus
= last
? gimple_location (last
) : UNKNOWN_LOCATION
;
1133 if (locus
== UNKNOWN_LOCATION
)
1136 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
1138 gimple first
= first_non_label_stmt (e
->dest
);
1139 gimple last
= last_stmt (e
->dest
);
1140 if ((first
&& same_line_p (locus
, gimple_location (first
)))
1141 || (last
&& same_line_p (locus
, gimple_location (last
))))
1143 if (e
->dest
->discriminator
!= 0 && bb
->discriminator
== 0)
1144 bb
->discriminator
= next_discriminator_for_locus (locus
);
1146 e
->dest
->discriminator
= next_discriminator_for_locus (locus
);
1152 /* Create the edges for a GIMPLE_COND starting at block BB. */
1155 make_cond_expr_edges (basic_block bb
)
1157 gcond
*entry
= as_a
<gcond
*> (last_stmt (bb
));
1158 gimple then_stmt
, else_stmt
;
1159 basic_block then_bb
, else_bb
;
1160 tree then_label
, else_label
;
1164 gcc_assert (gimple_code (entry
) == GIMPLE_COND
);
1166 /* Entry basic blocks for each component. */
1167 then_label
= gimple_cond_true_label (entry
);
1168 else_label
= gimple_cond_false_label (entry
);
1169 then_bb
= label_to_block (then_label
);
1170 else_bb
= label_to_block (else_label
);
1171 then_stmt
= first_stmt (then_bb
);
1172 else_stmt
= first_stmt (else_bb
);
1174 e
= make_edge (bb
, then_bb
, EDGE_TRUE_VALUE
);
1175 e
->goto_locus
= gimple_location (then_stmt
);
1176 e
= make_edge (bb
, else_bb
, EDGE_FALSE_VALUE
);
1178 e
->goto_locus
= gimple_location (else_stmt
);
1180 /* We do not need the labels anymore. */
1181 gimple_cond_set_true_label (entry
, NULL_TREE
);
1182 gimple_cond_set_false_label (entry
, NULL_TREE
);
1186 /* Called for each element in the hash table (P) as we delete the
1187 edge to cases hash table.
1189 Clear all the TREE_CHAINs to prevent problems with copying of
1190 SWITCH_EXPRs and structure sharing rules, then free the hash table
1194 edge_to_cases_cleanup (edge
const &, tree
const &value
, void *)
1198 for (t
= value
; t
; t
= next
)
1200 next
= CASE_CHAIN (t
);
1201 CASE_CHAIN (t
) = NULL
;
1207 /* Start recording information mapping edges to case labels. */
1210 start_recording_case_labels (void)
1212 gcc_assert (edge_to_cases
== NULL
);
1213 edge_to_cases
= new hash_map
<edge
, tree
>;
1214 touched_switch_bbs
= BITMAP_ALLOC (NULL
);
1217 /* Return nonzero if we are recording information for case labels. */
1220 recording_case_labels_p (void)
1222 return (edge_to_cases
!= NULL
);
1225 /* Stop recording information mapping edges to case labels and
1226 remove any information we have recorded. */
1228 end_recording_case_labels (void)
1232 edge_to_cases
->traverse
<void *, edge_to_cases_cleanup
> (NULL
);
1233 delete edge_to_cases
;
1234 edge_to_cases
= NULL
;
1235 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs
, 0, i
, bi
)
1237 basic_block bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
1240 gimple stmt
= last_stmt (bb
);
1241 if (stmt
&& gimple_code (stmt
) == GIMPLE_SWITCH
)
1242 group_case_labels_stmt (as_a
<gswitch
*> (stmt
));
1245 BITMAP_FREE (touched_switch_bbs
);
1248 /* If we are inside a {start,end}_recording_cases block, then return
1249 a chain of CASE_LABEL_EXPRs from T which reference E.
1251 Otherwise return NULL. */
1254 get_cases_for_edge (edge e
, gswitch
*t
)
1259 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
1260 chains available. Return NULL so the caller can detect this case. */
1261 if (!recording_case_labels_p ())
1264 slot
= edge_to_cases
->get (e
);
1268 /* If we did not find E in the hash table, then this must be the first
1269 time we have been queried for information about E & T. Add all the
1270 elements from T to the hash table then perform the query again. */
1272 n
= gimple_switch_num_labels (t
);
1273 for (i
= 0; i
< n
; i
++)
1275 tree elt
= gimple_switch_label (t
, i
);
1276 tree lab
= CASE_LABEL (elt
);
1277 basic_block label_bb
= label_to_block (lab
);
1278 edge this_edge
= find_edge (e
->src
, label_bb
);
1280 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
1282 tree
&s
= edge_to_cases
->get_or_insert (this_edge
);
1283 CASE_CHAIN (elt
) = s
;
1287 return *edge_to_cases
->get (e
);
1290 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
1293 make_gimple_switch_edges (gswitch
*entry
, basic_block bb
)
1297 n
= gimple_switch_num_labels (entry
);
1299 for (i
= 0; i
< n
; ++i
)
1301 tree lab
= CASE_LABEL (gimple_switch_label (entry
, i
));
1302 basic_block label_bb
= label_to_block (lab
);
1303 make_edge (bb
, label_bb
, 0);
1308 /* Return the basic block holding label DEST. */
1311 label_to_block_fn (struct function
*ifun
, tree dest
)
1313 int uid
= LABEL_DECL_UID (dest
);
1315 /* We would die hard when faced by an undefined label. Emit a label to
1316 the very first basic block. This will hopefully make even the dataflow
1317 and undefined variable warnings quite right. */
1318 if (seen_error () && uid
< 0)
1320 gimple_stmt_iterator gsi
=
1321 gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun
, NUM_FIXED_BLOCKS
));
1324 stmt
= gimple_build_label (dest
);
1325 gsi_insert_before (&gsi
, stmt
, GSI_NEW_STMT
);
1326 uid
= LABEL_DECL_UID (dest
);
1328 if (vec_safe_length (ifun
->cfg
->x_label_to_block_map
) <= (unsigned int) uid
)
1330 return (*ifun
->cfg
->x_label_to_block_map
)[uid
];
1333 /* Create edges for a goto statement at block BB. Returns true
1334 if abnormal edges should be created. */
1337 make_goto_expr_edges (basic_block bb
)
1339 gimple_stmt_iterator last
= gsi_last_bb (bb
);
1340 gimple goto_t
= gsi_stmt (last
);
1342 /* A simple GOTO creates normal edges. */
1343 if (simple_goto_p (goto_t
))
1345 tree dest
= gimple_goto_dest (goto_t
);
1346 basic_block label_bb
= label_to_block (dest
);
1347 edge e
= make_edge (bb
, label_bb
, EDGE_FALLTHRU
);
1348 e
->goto_locus
= gimple_location (goto_t
);
1349 gsi_remove (&last
, true);
1353 /* A computed GOTO creates abnormal edges. */
1357 /* Create edges for an asm statement with labels at block BB. */
1360 make_gimple_asm_edges (basic_block bb
)
1362 gasm
*stmt
= as_a
<gasm
*> (last_stmt (bb
));
1363 int i
, n
= gimple_asm_nlabels (stmt
);
1365 for (i
= 0; i
< n
; ++i
)
1367 tree label
= TREE_VALUE (gimple_asm_label_op (stmt
, i
));
1368 basic_block label_bb
= label_to_block (label
);
1369 make_edge (bb
, label_bb
, 0);
1373 /*---------------------------------------------------------------------------
1375 ---------------------------------------------------------------------------*/
1377 /* Cleanup useless labels in basic blocks. This is something we wish
1378 to do early because it allows us to group case labels before creating
1379 the edges for the CFG, and it speeds up block statement iterators in
1380 all passes later on.
1381 We rerun this pass after CFG is created, to get rid of the labels that
1382 are no longer referenced. After then we do not run it any more, since
1383 (almost) no new labels should be created. */
1385 /* A map from basic block index to the leading label of that block. */
1386 static struct label_record
1391 /* True if the label is referenced from somewhere. */
1395 /* Given LABEL return the first label in the same basic block. */
1398 main_block_label (tree label
)
1400 basic_block bb
= label_to_block (label
);
1401 tree main_label
= label_for_bb
[bb
->index
].label
;
1403 /* label_to_block possibly inserted undefined label into the chain. */
1406 label_for_bb
[bb
->index
].label
= label
;
1410 label_for_bb
[bb
->index
].used
= true;
1414 /* Clean up redundant labels within the exception tree. */
1417 cleanup_dead_labels_eh (void)
1424 if (cfun
->eh
== NULL
)
1427 for (i
= 1; vec_safe_iterate (cfun
->eh
->lp_array
, i
, &lp
); ++i
)
1428 if (lp
&& lp
->post_landing_pad
)
1430 lab
= main_block_label (lp
->post_landing_pad
);
1431 if (lab
!= lp
->post_landing_pad
)
1433 EH_LANDING_PAD_NR (lp
->post_landing_pad
) = 0;
1434 EH_LANDING_PAD_NR (lab
) = lp
->index
;
1438 FOR_ALL_EH_REGION (r
)
1442 case ERT_MUST_NOT_THROW
:
1448 for (c
= r
->u
.eh_try
.first_catch
; c
; c
= c
->next_catch
)
1452 c
->label
= main_block_label (lab
);
1457 case ERT_ALLOWED_EXCEPTIONS
:
1458 lab
= r
->u
.allowed
.label
;
1460 r
->u
.allowed
.label
= main_block_label (lab
);
1466 /* Cleanup redundant labels. This is a three-step process:
1467 1) Find the leading label for each block.
1468 2) Redirect all references to labels to the leading labels.
1469 3) Cleanup all useless labels. */
1472 cleanup_dead_labels (void)
1475 label_for_bb
= XCNEWVEC (struct label_record
, last_basic_block_for_fn (cfun
));
1477 /* Find a suitable label for each block. We use the first user-defined
1478 label if there is one, or otherwise just the first label we see. */
1479 FOR_EACH_BB_FN (bb
, cfun
)
1481 gimple_stmt_iterator i
;
1483 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
1486 glabel
*label_stmt
= dyn_cast
<glabel
*> (gsi_stmt (i
));
1491 label
= gimple_label_label (label_stmt
);
1493 /* If we have not yet seen a label for the current block,
1494 remember this one and see if there are more labels. */
1495 if (!label_for_bb
[bb
->index
].label
)
1497 label_for_bb
[bb
->index
].label
= label
;
1501 /* If we did see a label for the current block already, but it
1502 is an artificially created label, replace it if the current
1503 label is a user defined label. */
1504 if (!DECL_ARTIFICIAL (label
)
1505 && DECL_ARTIFICIAL (label_for_bb
[bb
->index
].label
))
1507 label_for_bb
[bb
->index
].label
= label
;
1513 /* Now redirect all jumps/branches to the selected label.
1514 First do so for each block ending in a control statement. */
1515 FOR_EACH_BB_FN (bb
, cfun
)
1517 gimple stmt
= last_stmt (bb
);
1518 tree label
, new_label
;
1523 switch (gimple_code (stmt
))
1527 gcond
*cond_stmt
= as_a
<gcond
*> (stmt
);
1528 label
= gimple_cond_true_label (cond_stmt
);
1531 new_label
= main_block_label (label
);
1532 if (new_label
!= label
)
1533 gimple_cond_set_true_label (cond_stmt
, new_label
);
1536 label
= gimple_cond_false_label (cond_stmt
);
1539 new_label
= main_block_label (label
);
1540 if (new_label
!= label
)
1541 gimple_cond_set_false_label (cond_stmt
, new_label
);
1548 gswitch
*switch_stmt
= as_a
<gswitch
*> (stmt
);
1549 size_t i
, n
= gimple_switch_num_labels (switch_stmt
);
1551 /* Replace all destination labels. */
1552 for (i
= 0; i
< n
; ++i
)
1554 tree case_label
= gimple_switch_label (switch_stmt
, i
);
1555 label
= CASE_LABEL (case_label
);
1556 new_label
= main_block_label (label
);
1557 if (new_label
!= label
)
1558 CASE_LABEL (case_label
) = new_label
;
1565 gasm
*asm_stmt
= as_a
<gasm
*> (stmt
);
1566 int i
, n
= gimple_asm_nlabels (asm_stmt
);
1568 for (i
= 0; i
< n
; ++i
)
1570 tree cons
= gimple_asm_label_op (asm_stmt
, i
);
1571 tree label
= main_block_label (TREE_VALUE (cons
));
1572 TREE_VALUE (cons
) = label
;
1577 /* We have to handle gotos until they're removed, and we don't
1578 remove them until after we've created the CFG edges. */
1580 if (!computed_goto_p (stmt
))
1582 ggoto
*goto_stmt
= as_a
<ggoto
*> (stmt
);
1583 label
= gimple_goto_dest (goto_stmt
);
1584 new_label
= main_block_label (label
);
1585 if (new_label
!= label
)
1586 gimple_goto_set_dest (goto_stmt
, new_label
);
1590 case GIMPLE_TRANSACTION
:
1592 gtransaction
*trans_stmt
= as_a
<gtransaction
*> (stmt
);
1593 tree label
= gimple_transaction_label (trans_stmt
);
1596 tree new_label
= main_block_label (label
);
1597 if (new_label
!= label
)
1598 gimple_transaction_set_label (trans_stmt
, new_label
);
1608 /* Do the same for the exception region tree labels. */
1609 cleanup_dead_labels_eh ();
1611 /* Finally, purge dead labels. All user-defined labels and labels that
1612 can be the target of non-local gotos and labels which have their
1613 address taken are preserved. */
1614 FOR_EACH_BB_FN (bb
, cfun
)
1616 gimple_stmt_iterator i
;
1617 tree label_for_this_bb
= label_for_bb
[bb
->index
].label
;
1619 if (!label_for_this_bb
)
1622 /* If the main label of the block is unused, we may still remove it. */
1623 if (!label_for_bb
[bb
->index
].used
)
1624 label_for_this_bb
= NULL
;
1626 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); )
1629 glabel
*label_stmt
= dyn_cast
<glabel
*> (gsi_stmt (i
));
1634 label
= gimple_label_label (label_stmt
);
1636 if (label
== label_for_this_bb
1637 || !DECL_ARTIFICIAL (label
)
1638 || DECL_NONLOCAL (label
)
1639 || FORCED_LABEL (label
))
1642 gsi_remove (&i
, true);
1646 free (label_for_bb
);
1649 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1650 the ones jumping to the same label.
1651 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1654 group_case_labels_stmt (gswitch
*stmt
)
1656 int old_size
= gimple_switch_num_labels (stmt
);
1657 int i
, j
, new_size
= old_size
;
1658 basic_block default_bb
= NULL
;
1660 default_bb
= label_to_block (CASE_LABEL (gimple_switch_default_label (stmt
)));
1662 /* Look for possible opportunities to merge cases. */
1664 while (i
< old_size
)
1666 tree base_case
, base_high
;
1667 basic_block base_bb
;
1669 base_case
= gimple_switch_label (stmt
, i
);
1671 gcc_assert (base_case
);
1672 base_bb
= label_to_block (CASE_LABEL (base_case
));
1674 /* Discard cases that have the same destination as the
1676 if (base_bb
== default_bb
)
1678 gimple_switch_set_label (stmt
, i
, NULL_TREE
);
1684 base_high
= CASE_HIGH (base_case
)
1685 ? CASE_HIGH (base_case
)
1686 : CASE_LOW (base_case
);
1689 /* Try to merge case labels. Break out when we reach the end
1690 of the label vector or when we cannot merge the next case
1691 label with the current one. */
1692 while (i
< old_size
)
1694 tree merge_case
= gimple_switch_label (stmt
, i
);
1695 basic_block merge_bb
= label_to_block (CASE_LABEL (merge_case
));
1696 wide_int bhp1
= wi::add (base_high
, 1);
1698 /* Merge the cases if they jump to the same place,
1699 and their ranges are consecutive. */
1700 if (merge_bb
== base_bb
1701 && wi::eq_p (CASE_LOW (merge_case
), bhp1
))
1703 base_high
= CASE_HIGH (merge_case
) ?
1704 CASE_HIGH (merge_case
) : CASE_LOW (merge_case
);
1705 CASE_HIGH (base_case
) = base_high
;
1706 gimple_switch_set_label (stmt
, i
, NULL_TREE
);
1715 /* Compress the case labels in the label vector, and adjust the
1716 length of the vector. */
1717 for (i
= 0, j
= 0; i
< new_size
; i
++)
1719 while (! gimple_switch_label (stmt
, j
))
1721 gimple_switch_set_label (stmt
, i
,
1722 gimple_switch_label (stmt
, j
++));
1725 gcc_assert (new_size
<= old_size
);
1726 gimple_switch_set_num_labels (stmt
, new_size
);
1729 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1730 and scan the sorted vector of cases. Combine the ones jumping to the
1734 group_case_labels (void)
1738 FOR_EACH_BB_FN (bb
, cfun
)
1740 gimple stmt
= last_stmt (bb
);
1741 if (stmt
&& gimple_code (stmt
) == GIMPLE_SWITCH
)
1742 group_case_labels_stmt (as_a
<gswitch
*> (stmt
));
1746 /* Checks whether we can merge block B into block A. */
1749 gimple_can_merge_blocks_p (basic_block a
, basic_block b
)
1753 if (!single_succ_p (a
))
1756 if (single_succ_edge (a
)->flags
& EDGE_COMPLEX
)
1759 if (single_succ (a
) != b
)
1762 if (!single_pred_p (b
))
1765 if (a
== ENTRY_BLOCK_PTR_FOR_FN (cfun
)
1766 || b
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
1769 /* If A ends by a statement causing exceptions or something similar, we
1770 cannot merge the blocks. */
1771 stmt
= last_stmt (a
);
1772 if (stmt
&& stmt_ends_bb_p (stmt
))
1775 /* Do not allow a block with only a non-local label to be merged. */
1777 if (glabel
*label_stmt
= dyn_cast
<glabel
*> (stmt
))
1778 if (DECL_NONLOCAL (gimple_label_label (label_stmt
)))
1781 /* Examine the labels at the beginning of B. */
1782 for (gimple_stmt_iterator gsi
= gsi_start_bb (b
); !gsi_end_p (gsi
);
1786 glabel
*label_stmt
= dyn_cast
<glabel
*> (gsi_stmt (gsi
));
1789 lab
= gimple_label_label (label_stmt
);
1791 /* Do not remove user forced labels or for -O0 any user labels. */
1792 if (!DECL_ARTIFICIAL (lab
) && (!optimize
|| FORCED_LABEL (lab
)))
1796 /* Protect simple loop latches. We only want to avoid merging
1797 the latch with the loop header or with a block in another
1798 loop in this case. */
1800 && b
->loop_father
->latch
== b
1801 && loops_state_satisfies_p (LOOPS_HAVE_SIMPLE_LATCHES
)
1802 && (b
->loop_father
->header
== a
1803 || b
->loop_father
!= a
->loop_father
))
1806 /* It must be possible to eliminate all phi nodes in B. If ssa form
1807 is not up-to-date and a name-mapping is registered, we cannot eliminate
1808 any phis. Symbols marked for renaming are never a problem though. */
1809 for (gphi_iterator gsi
= gsi_start_phis (b
); !gsi_end_p (gsi
);
1812 gphi
*phi
= gsi
.phi ();
1813 /* Technically only new names matter. */
1814 if (name_registered_for_update_p (PHI_RESULT (phi
)))
1818 /* When not optimizing, don't merge if we'd lose goto_locus. */
1820 && single_succ_edge (a
)->goto_locus
!= UNKNOWN_LOCATION
)
1822 location_t goto_locus
= single_succ_edge (a
)->goto_locus
;
1823 gimple_stmt_iterator prev
, next
;
1824 prev
= gsi_last_nondebug_bb (a
);
1825 next
= gsi_after_labels (b
);
1826 if (!gsi_end_p (next
) && is_gimple_debug (gsi_stmt (next
)))
1827 gsi_next_nondebug (&next
);
1828 if ((gsi_end_p (prev
)
1829 || gimple_location (gsi_stmt (prev
)) != goto_locus
)
1830 && (gsi_end_p (next
)
1831 || gimple_location (gsi_stmt (next
)) != goto_locus
))
1838 /* Replaces all uses of NAME by VAL. */
1841 replace_uses_by (tree name
, tree val
)
1843 imm_use_iterator imm_iter
;
1848 FOR_EACH_IMM_USE_STMT (stmt
, imm_iter
, name
)
1850 /* Mark the block if we change the last stmt in it. */
1851 if (cfgcleanup_altered_bbs
1852 && stmt_ends_bb_p (stmt
))
1853 bitmap_set_bit (cfgcleanup_altered_bbs
, gimple_bb (stmt
)->index
);
1855 FOR_EACH_IMM_USE_ON_STMT (use
, imm_iter
)
1857 replace_exp (use
, val
);
1859 if (gimple_code (stmt
) == GIMPLE_PHI
)
1861 e
= gimple_phi_arg_edge (as_a
<gphi
*> (stmt
),
1862 PHI_ARG_INDEX_FROM_USE (use
));
1863 if (e
->flags
& EDGE_ABNORMAL
1864 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val
))
1866 /* This can only occur for virtual operands, since
1867 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1868 would prevent replacement. */
1869 gcc_checking_assert (virtual_operand_p (name
));
1870 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val
) = 1;
1875 if (gimple_code (stmt
) != GIMPLE_PHI
)
1877 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
1878 gimple orig_stmt
= stmt
;
1881 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
1882 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
1883 only change sth from non-invariant to invariant, and only
1884 when propagating constants. */
1885 if (is_gimple_min_invariant (val
))
1886 for (i
= 0; i
< gimple_num_ops (stmt
); i
++)
1888 tree op
= gimple_op (stmt
, i
);
1889 /* Operands may be empty here. For example, the labels
1890 of a GIMPLE_COND are nulled out following the creation
1891 of the corresponding CFG edges. */
1892 if (op
&& TREE_CODE (op
) == ADDR_EXPR
)
1893 recompute_tree_invariant_for_addr_expr (op
);
1896 if (fold_stmt (&gsi
))
1897 stmt
= gsi_stmt (gsi
);
1899 if (maybe_clean_or_replace_eh_stmt (orig_stmt
, stmt
))
1900 gimple_purge_dead_eh_edges (gimple_bb (stmt
));
1906 gcc_checking_assert (has_zero_uses (name
));
1908 /* Also update the trees stored in loop structures. */
1913 FOR_EACH_LOOP (loop
, 0)
1915 substitute_in_loop_info (loop
, name
, val
);
1920 /* Merge block B into block A. */
1923 gimple_merge_blocks (basic_block a
, basic_block b
)
1925 gimple_stmt_iterator last
, gsi
;
1929 fprintf (dump_file
, "Merging blocks %d and %d\n", a
->index
, b
->index
);
1931 /* Remove all single-valued PHI nodes from block B of the form
1932 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1933 gsi
= gsi_last_bb (a
);
1934 for (psi
= gsi_start_phis (b
); !gsi_end_p (psi
); )
1936 gimple phi
= gsi_stmt (psi
);
1937 tree def
= gimple_phi_result (phi
), use
= gimple_phi_arg_def (phi
, 0);
1939 bool may_replace_uses
= (virtual_operand_p (def
)
1940 || may_propagate_copy (def
, use
));
1942 /* In case we maintain loop closed ssa form, do not propagate arguments
1943 of loop exit phi nodes. */
1945 && loops_state_satisfies_p (LOOP_CLOSED_SSA
)
1946 && !virtual_operand_p (def
)
1947 && TREE_CODE (use
) == SSA_NAME
1948 && a
->loop_father
!= b
->loop_father
)
1949 may_replace_uses
= false;
1951 if (!may_replace_uses
)
1953 gcc_assert (!virtual_operand_p (def
));
1955 /* Note that just emitting the copies is fine -- there is no problem
1956 with ordering of phi nodes. This is because A is the single
1957 predecessor of B, therefore results of the phi nodes cannot
1958 appear as arguments of the phi nodes. */
1959 copy
= gimple_build_assign (def
, use
);
1960 gsi_insert_after (&gsi
, copy
, GSI_NEW_STMT
);
1961 remove_phi_node (&psi
, false);
1965 /* If we deal with a PHI for virtual operands, we can simply
1966 propagate these without fussing with folding or updating
1968 if (virtual_operand_p (def
))
1970 imm_use_iterator iter
;
1971 use_operand_p use_p
;
1974 FOR_EACH_IMM_USE_STMT (stmt
, iter
, def
)
1975 FOR_EACH_IMM_USE_ON_STMT (use_p
, iter
)
1976 SET_USE (use_p
, use
);
1978 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def
))
1979 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use
) = 1;
1982 replace_uses_by (def
, use
);
1984 remove_phi_node (&psi
, true);
1988 /* Ensure that B follows A. */
1989 move_block_after (b
, a
);
1991 gcc_assert (single_succ_edge (a
)->flags
& EDGE_FALLTHRU
);
1992 gcc_assert (!last_stmt (a
) || !stmt_ends_bb_p (last_stmt (a
)));
1994 /* Remove labels from B and set gimple_bb to A for other statements. */
1995 for (gsi
= gsi_start_bb (b
); !gsi_end_p (gsi
);)
1997 gimple stmt
= gsi_stmt (gsi
);
1998 if (glabel
*label_stmt
= dyn_cast
<glabel
*> (stmt
))
2000 tree label
= gimple_label_label (label_stmt
);
2003 gsi_remove (&gsi
, false);
2005 /* Now that we can thread computed gotos, we might have
2006 a situation where we have a forced label in block B
2007 However, the label at the start of block B might still be
2008 used in other ways (think about the runtime checking for
2009 Fortran assigned gotos). So we can not just delete the
2010 label. Instead we move the label to the start of block A. */
2011 if (FORCED_LABEL (label
))
2013 gimple_stmt_iterator dest_gsi
= gsi_start_bb (a
);
2014 gsi_insert_before (&dest_gsi
, stmt
, GSI_NEW_STMT
);
2016 /* Other user labels keep around in a form of a debug stmt. */
2017 else if (!DECL_ARTIFICIAL (label
) && MAY_HAVE_DEBUG_STMTS
)
2019 gimple dbg
= gimple_build_debug_bind (label
,
2022 gimple_debug_bind_reset_value (dbg
);
2023 gsi_insert_before (&gsi
, dbg
, GSI_SAME_STMT
);
2026 lp_nr
= EH_LANDING_PAD_NR (label
);
2029 eh_landing_pad lp
= get_eh_landing_pad_from_number (lp_nr
);
2030 lp
->post_landing_pad
= NULL
;
2035 gimple_set_bb (stmt
, a
);
2040 /* When merging two BBs, if their counts are different, the larger count
2041 is selected as the new bb count. This is to handle inconsistent
2043 if (a
->loop_father
== b
->loop_father
)
2045 a
->count
= MAX (a
->count
, b
->count
);
2046 a
->frequency
= MAX (a
->frequency
, b
->frequency
);
2049 /* Merge the sequences. */
2050 last
= gsi_last_bb (a
);
2051 gsi_insert_seq_after (&last
, bb_seq (b
), GSI_NEW_STMT
);
2052 set_bb_seq (b
, NULL
);
2054 if (cfgcleanup_altered_bbs
)
2055 bitmap_set_bit (cfgcleanup_altered_bbs
, a
->index
);
2059 /* Return the one of two successors of BB that is not reachable by a
2060 complex edge, if there is one. Else, return BB. We use
2061 this in optimizations that use post-dominators for their heuristics,
2062 to catch the cases in C++ where function calls are involved. */
2065 single_noncomplex_succ (basic_block bb
)
2068 if (EDGE_COUNT (bb
->succs
) != 2)
2071 e0
= EDGE_SUCC (bb
, 0);
2072 e1
= EDGE_SUCC (bb
, 1);
2073 if (e0
->flags
& EDGE_COMPLEX
)
2075 if (e1
->flags
& EDGE_COMPLEX
)
2081 /* T is CALL_EXPR. Set current_function_calls_* flags. */
2084 notice_special_calls (gcall
*call
)
2086 int flags
= gimple_call_flags (call
);
2088 if (flags
& ECF_MAY_BE_ALLOCA
)
2089 cfun
->calls_alloca
= true;
2090 if (flags
& ECF_RETURNS_TWICE
)
2091 cfun
->calls_setjmp
= true;
2095 /* Clear flags set by notice_special_calls. Used by dead code removal
2096 to update the flags. */
2099 clear_special_calls (void)
2101 cfun
->calls_alloca
= false;
2102 cfun
->calls_setjmp
= false;
2105 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
2108 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb
)
2110 /* Since this block is no longer reachable, we can just delete all
2111 of its PHI nodes. */
2112 remove_phi_nodes (bb
);
2114 /* Remove edges to BB's successors. */
2115 while (EDGE_COUNT (bb
->succs
) > 0)
2116 remove_edge (EDGE_SUCC (bb
, 0));
2120 /* Remove statements of basic block BB. */
2123 remove_bb (basic_block bb
)
2125 gimple_stmt_iterator i
;
2129 fprintf (dump_file
, "Removing basic block %d\n", bb
->index
);
2130 if (dump_flags
& TDF_DETAILS
)
2132 dump_bb (dump_file
, bb
, 0, TDF_BLOCKS
);
2133 fprintf (dump_file
, "\n");
2139 struct loop
*loop
= bb
->loop_father
;
2141 /* If a loop gets removed, clean up the information associated
2143 if (loop
->latch
== bb
2144 || loop
->header
== bb
)
2145 free_numbers_of_iterations_estimates_loop (loop
);
2148 /* Remove all the instructions in the block. */
2149 if (bb_seq (bb
) != NULL
)
2151 /* Walk backwards so as to get a chance to substitute all
2152 released DEFs into debug stmts. See
2153 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
2155 for (i
= gsi_last_bb (bb
); !gsi_end_p (i
);)
2157 gimple stmt
= gsi_stmt (i
);
2158 glabel
*label_stmt
= dyn_cast
<glabel
*> (stmt
);
2160 && (FORCED_LABEL (gimple_label_label (label_stmt
))
2161 || DECL_NONLOCAL (gimple_label_label (label_stmt
))))
2164 gimple_stmt_iterator new_gsi
;
2166 /* A non-reachable non-local label may still be referenced.
2167 But it no longer needs to carry the extra semantics of
2169 if (DECL_NONLOCAL (gimple_label_label (label_stmt
)))
2171 DECL_NONLOCAL (gimple_label_label (label_stmt
)) = 0;
2172 FORCED_LABEL (gimple_label_label (label_stmt
)) = 1;
2175 new_bb
= bb
->prev_bb
;
2176 new_gsi
= gsi_start_bb (new_bb
);
2177 gsi_remove (&i
, false);
2178 gsi_insert_before (&new_gsi
, stmt
, GSI_NEW_STMT
);
2182 /* Release SSA definitions if we are in SSA. Note that we
2183 may be called when not in SSA. For example,
2184 final_cleanup calls this function via
2185 cleanup_tree_cfg. */
2186 if (gimple_in_ssa_p (cfun
))
2187 release_defs (stmt
);
2189 gsi_remove (&i
, true);
2193 i
= gsi_last_bb (bb
);
2199 remove_phi_nodes_and_edges_for_unreachable_block (bb
);
2200 bb
->il
.gimple
.seq
= NULL
;
2201 bb
->il
.gimple
.phi_nodes
= NULL
;
2205 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2206 predicate VAL, return the edge that will be taken out of the block.
2207 If VAL does not match a unique edge, NULL is returned. */
2210 find_taken_edge (basic_block bb
, tree val
)
2214 stmt
= last_stmt (bb
);
2217 gcc_assert (is_ctrl_stmt (stmt
));
2222 if (!is_gimple_min_invariant (val
))
2225 if (gimple_code (stmt
) == GIMPLE_COND
)
2226 return find_taken_edge_cond_expr (bb
, val
);
2228 if (gimple_code (stmt
) == GIMPLE_SWITCH
)
2229 return find_taken_edge_switch_expr (as_a
<gswitch
*> (stmt
), bb
, val
);
2231 if (computed_goto_p (stmt
))
2233 /* Only optimize if the argument is a label, if the argument is
2234 not a label then we can not construct a proper CFG.
2236 It may be the case that we only need to allow the LABEL_REF to
2237 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2238 appear inside a LABEL_EXPR just to be safe. */
2239 if ((TREE_CODE (val
) == ADDR_EXPR
|| TREE_CODE (val
) == LABEL_EXPR
)
2240 && TREE_CODE (TREE_OPERAND (val
, 0)) == LABEL_DECL
)
2241 return find_taken_edge_computed_goto (bb
, TREE_OPERAND (val
, 0));
2248 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2249 statement, determine which of the outgoing edges will be taken out of the
2250 block. Return NULL if either edge may be taken. */
2253 find_taken_edge_computed_goto (basic_block bb
, tree val
)
2258 dest
= label_to_block (val
);
2261 e
= find_edge (bb
, dest
);
2262 gcc_assert (e
!= NULL
);
2268 /* Given a constant value VAL and the entry block BB to a COND_EXPR
2269 statement, determine which of the two edges will be taken out of the
2270 block. Return NULL if either edge may be taken. */
2273 find_taken_edge_cond_expr (basic_block bb
, tree val
)
2275 edge true_edge
, false_edge
;
2277 extract_true_false_edges_from_block (bb
, &true_edge
, &false_edge
);
2279 gcc_assert (TREE_CODE (val
) == INTEGER_CST
);
2280 return (integer_zerop (val
) ? false_edge
: true_edge
);
2283 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2284 statement, determine which edge will be taken out of the block. Return
2285 NULL if any edge may be taken. */
2288 find_taken_edge_switch_expr (gswitch
*switch_stmt
, basic_block bb
,
2291 basic_block dest_bb
;
2295 taken_case
= find_case_label_for_value (switch_stmt
, val
);
2296 dest_bb
= label_to_block (CASE_LABEL (taken_case
));
2298 e
= find_edge (bb
, dest_bb
);
2304 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2305 We can make optimal use here of the fact that the case labels are
2306 sorted: We can do a binary search for a case matching VAL. */
2309 find_case_label_for_value (gswitch
*switch_stmt
, tree val
)
2311 size_t low
, high
, n
= gimple_switch_num_labels (switch_stmt
);
2312 tree default_case
= gimple_switch_default_label (switch_stmt
);
2314 for (low
= 0, high
= n
; high
- low
> 1; )
2316 size_t i
= (high
+ low
) / 2;
2317 tree t
= gimple_switch_label (switch_stmt
, i
);
2320 /* Cache the result of comparing CASE_LOW and val. */
2321 cmp
= tree_int_cst_compare (CASE_LOW (t
), val
);
2328 if (CASE_HIGH (t
) == NULL
)
2330 /* A singe-valued case label. */
2336 /* A case range. We can only handle integer ranges. */
2337 if (cmp
<= 0 && tree_int_cst_compare (CASE_HIGH (t
), val
) >= 0)
2342 return default_case
;
2346 /* Dump a basic block on stderr. */
2349 gimple_debug_bb (basic_block bb
)
2351 dump_bb (stderr
, bb
, 0, TDF_VOPS
|TDF_MEMSYMS
|TDF_BLOCKS
);
2355 /* Dump basic block with index N on stderr. */
2358 gimple_debug_bb_n (int n
)
2360 gimple_debug_bb (BASIC_BLOCK_FOR_FN (cfun
, n
));
2361 return BASIC_BLOCK_FOR_FN (cfun
, n
);
2365 /* Dump the CFG on stderr.
2367 FLAGS are the same used by the tree dumping functions
2368 (see TDF_* in dumpfile.h). */
2371 gimple_debug_cfg (int flags
)
2373 gimple_dump_cfg (stderr
, flags
);
2377 /* Dump the program showing basic block boundaries on the given FILE.
2379 FLAGS are the same used by the tree dumping functions (see TDF_* in
2383 gimple_dump_cfg (FILE *file
, int flags
)
2385 if (flags
& TDF_DETAILS
)
2387 dump_function_header (file
, current_function_decl
, flags
);
2388 fprintf (file
, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2389 n_basic_blocks_for_fn (cfun
), n_edges_for_fn (cfun
),
2390 last_basic_block_for_fn (cfun
));
2392 brief_dump_cfg (file
, flags
| TDF_COMMENT
);
2393 fprintf (file
, "\n");
2396 if (flags
& TDF_STATS
)
2397 dump_cfg_stats (file
);
2399 dump_function_to_file (current_function_decl
, file
, flags
| TDF_BLOCKS
);
2403 /* Dump CFG statistics on FILE. */
2406 dump_cfg_stats (FILE *file
)
2408 static long max_num_merged_labels
= 0;
2409 unsigned long size
, total
= 0;
2412 const char * const fmt_str
= "%-30s%-13s%12s\n";
2413 const char * const fmt_str_1
= "%-30s%13d%11lu%c\n";
2414 const char * const fmt_str_2
= "%-30s%13ld%11lu%c\n";
2415 const char * const fmt_str_3
= "%-43s%11lu%c\n";
2416 const char *funcname
= current_function_name ();
2418 fprintf (file
, "\nCFG Statistics for %s\n\n", funcname
);
2420 fprintf (file
, "---------------------------------------------------------\n");
2421 fprintf (file
, fmt_str
, "", " Number of ", "Memory");
2422 fprintf (file
, fmt_str
, "", " instances ", "used ");
2423 fprintf (file
, "---------------------------------------------------------\n");
2425 size
= n_basic_blocks_for_fn (cfun
) * sizeof (struct basic_block_def
);
2427 fprintf (file
, fmt_str_1
, "Basic blocks", n_basic_blocks_for_fn (cfun
),
2428 SCALE (size
), LABEL (size
));
2431 FOR_EACH_BB_FN (bb
, cfun
)
2432 num_edges
+= EDGE_COUNT (bb
->succs
);
2433 size
= num_edges
* sizeof (struct edge_def
);
2435 fprintf (file
, fmt_str_2
, "Edges", num_edges
, SCALE (size
), LABEL (size
));
2437 fprintf (file
, "---------------------------------------------------------\n");
2438 fprintf (file
, fmt_str_3
, "Total memory used by CFG data", SCALE (total
),
2440 fprintf (file
, "---------------------------------------------------------\n");
2441 fprintf (file
, "\n");
2443 if (cfg_stats
.num_merged_labels
> max_num_merged_labels
)
2444 max_num_merged_labels
= cfg_stats
.num_merged_labels
;
2446 fprintf (file
, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2447 cfg_stats
.num_merged_labels
, max_num_merged_labels
);
2449 fprintf (file
, "\n");
2453 /* Dump CFG statistics on stderr. Keep extern so that it's always
2454 linked in the final executable. */
2457 debug_cfg_stats (void)
2459 dump_cfg_stats (stderr
);
2462 /*---------------------------------------------------------------------------
2463 Miscellaneous helpers
2464 ---------------------------------------------------------------------------*/
2466 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2467 flow. Transfers of control flow associated with EH are excluded. */
2470 call_can_make_abnormal_goto (gimple t
)
2472 /* If the function has no non-local labels, then a call cannot make an
2473 abnormal transfer of control. */
2474 if (!cfun
->has_nonlocal_label
2475 && !cfun
->calls_setjmp
)
2478 /* Likewise if the call has no side effects. */
2479 if (!gimple_has_side_effects (t
))
2482 /* Likewise if the called function is leaf. */
2483 if (gimple_call_flags (t
) & ECF_LEAF
)
2490 /* Return true if T can make an abnormal transfer of control flow.
2491 Transfers of control flow associated with EH are excluded. */
2494 stmt_can_make_abnormal_goto (gimple t
)
2496 if (computed_goto_p (t
))
2498 if (is_gimple_call (t
))
2499 return call_can_make_abnormal_goto (t
);
2504 /* Return true if T represents a stmt that always transfers control. */
2507 is_ctrl_stmt (gimple t
)
2509 switch (gimple_code (t
))
2523 /* Return true if T is a statement that may alter the flow of control
2524 (e.g., a call to a non-returning function). */
2527 is_ctrl_altering_stmt (gimple t
)
2531 switch (gimple_code (t
))
2534 /* Per stmt call flag indicates whether the call could alter
2536 if (gimple_call_ctrl_altering_p (t
))
2540 case GIMPLE_EH_DISPATCH
:
2541 /* EH_DISPATCH branches to the individual catch handlers at
2542 this level of a try or allowed-exceptions region. It can
2543 fallthru to the next statement as well. */
2547 if (gimple_asm_nlabels (as_a
<gasm
*> (t
)) > 0)
2552 /* OpenMP directives alter control flow. */
2555 case GIMPLE_TRANSACTION
:
2556 /* A transaction start alters control flow. */
2563 /* If a statement can throw, it alters control flow. */
2564 return stmt_can_throw_internal (t
);
2568 /* Return true if T is a simple local goto. */
2571 simple_goto_p (gimple t
)
2573 return (gimple_code (t
) == GIMPLE_GOTO
2574 && TREE_CODE (gimple_goto_dest (t
)) == LABEL_DECL
);
2578 /* Return true if STMT should start a new basic block. PREV_STMT is
2579 the statement preceding STMT. It is used when STMT is a label or a
2580 case label. Labels should only start a new basic block if their
2581 previous statement wasn't a label. Otherwise, sequence of labels
2582 would generate unnecessary basic blocks that only contain a single
2586 stmt_starts_bb_p (gimple stmt
, gimple prev_stmt
)
2591 /* Labels start a new basic block only if the preceding statement
2592 wasn't a label of the same type. This prevents the creation of
2593 consecutive blocks that have nothing but a single label. */
2594 if (glabel
*label_stmt
= dyn_cast
<glabel
*> (stmt
))
2596 /* Nonlocal and computed GOTO targets always start a new block. */
2597 if (DECL_NONLOCAL (gimple_label_label (label_stmt
))
2598 || FORCED_LABEL (gimple_label_label (label_stmt
)))
2601 if (prev_stmt
&& gimple_code (prev_stmt
) == GIMPLE_LABEL
)
2603 if (DECL_NONLOCAL (gimple_label_label (
2604 as_a
<glabel
*> (prev_stmt
))))
2607 cfg_stats
.num_merged_labels
++;
2613 else if (gimple_code (stmt
) == GIMPLE_CALL
2614 && gimple_call_flags (stmt
) & ECF_RETURNS_TWICE
)
2615 /* setjmp acts similar to a nonlocal GOTO target and thus should
2616 start a new block. */
2623 /* Return true if T should end a basic block. */
2626 stmt_ends_bb_p (gimple t
)
2628 return is_ctrl_stmt (t
) || is_ctrl_altering_stmt (t
);
2631 /* Remove block annotations and other data structures. */
2634 delete_tree_cfg_annotations (void)
2636 vec_free (label_to_block_map_for_fn (cfun
));
2640 /* Return the first statement in basic block BB. */
2643 first_stmt (basic_block bb
)
2645 gimple_stmt_iterator i
= gsi_start_bb (bb
);
2648 while (!gsi_end_p (i
) && is_gimple_debug ((stmt
= gsi_stmt (i
))))
2656 /* Return the first non-label statement in basic block BB. */
2659 first_non_label_stmt (basic_block bb
)
2661 gimple_stmt_iterator i
= gsi_start_bb (bb
);
2662 while (!gsi_end_p (i
) && gimple_code (gsi_stmt (i
)) == GIMPLE_LABEL
)
2664 return !gsi_end_p (i
) ? gsi_stmt (i
) : NULL
;
2667 /* Return the last statement in basic block BB. */
2670 last_stmt (basic_block bb
)
2672 gimple_stmt_iterator i
= gsi_last_bb (bb
);
2675 while (!gsi_end_p (i
) && is_gimple_debug ((stmt
= gsi_stmt (i
))))
2683 /* Return the last statement of an otherwise empty block. Return NULL
2684 if the block is totally empty, or if it contains more than one
2688 last_and_only_stmt (basic_block bb
)
2690 gimple_stmt_iterator i
= gsi_last_nondebug_bb (bb
);
2696 last
= gsi_stmt (i
);
2697 gsi_prev_nondebug (&i
);
2701 /* Empty statements should no longer appear in the instruction stream.
2702 Everything that might have appeared before should be deleted by
2703 remove_useless_stmts, and the optimizers should just gsi_remove
2704 instead of smashing with build_empty_stmt.
2706 Thus the only thing that should appear here in a block containing
2707 one executable statement is a label. */
2708 prev
= gsi_stmt (i
);
2709 if (gimple_code (prev
) == GIMPLE_LABEL
)
2715 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2718 reinstall_phi_args (edge new_edge
, edge old_edge
)
2724 vec
<edge_var_map
> *v
= redirect_edge_var_map_vector (old_edge
);
2728 for (i
= 0, phis
= gsi_start_phis (new_edge
->dest
);
2729 v
->iterate (i
, &vm
) && !gsi_end_p (phis
);
2730 i
++, gsi_next (&phis
))
2732 gphi
*phi
= phis
.phi ();
2733 tree result
= redirect_edge_var_map_result (vm
);
2734 tree arg
= redirect_edge_var_map_def (vm
);
2736 gcc_assert (result
== gimple_phi_result (phi
));
2738 add_phi_arg (phi
, arg
, new_edge
, redirect_edge_var_map_location (vm
));
2741 redirect_edge_var_map_clear (old_edge
);
2744 /* Returns the basic block after which the new basic block created
2745 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2746 near its "logical" location. This is of most help to humans looking
2747 at debugging dumps. */
2750 split_edge_bb_loc (edge edge_in
)
2752 basic_block dest
= edge_in
->dest
;
2753 basic_block dest_prev
= dest
->prev_bb
;
2757 edge e
= find_edge (dest_prev
, dest
);
2758 if (e
&& !(e
->flags
& EDGE_COMPLEX
))
2759 return edge_in
->src
;
2764 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2765 Abort on abnormal edges. */
2768 gimple_split_edge (edge edge_in
)
2770 basic_block new_bb
, after_bb
, dest
;
2773 /* Abnormal edges cannot be split. */
2774 gcc_assert (!(edge_in
->flags
& EDGE_ABNORMAL
));
2776 dest
= edge_in
->dest
;
2778 after_bb
= split_edge_bb_loc (edge_in
);
2780 new_bb
= create_empty_bb (after_bb
);
2781 new_bb
->frequency
= EDGE_FREQUENCY (edge_in
);
2782 new_bb
->count
= edge_in
->count
;
2783 new_edge
= make_edge (new_bb
, dest
, EDGE_FALLTHRU
);
2784 new_edge
->probability
= REG_BR_PROB_BASE
;
2785 new_edge
->count
= edge_in
->count
;
2787 e
= redirect_edge_and_branch (edge_in
, new_bb
);
2788 gcc_assert (e
== edge_in
);
2789 reinstall_phi_args (new_edge
, e
);
2795 /* Verify properties of the address expression T with base object BASE. */
2798 verify_address (tree t
, tree base
)
2801 bool old_side_effects
;
2803 bool new_side_effects
;
2805 old_constant
= TREE_CONSTANT (t
);
2806 old_side_effects
= TREE_SIDE_EFFECTS (t
);
2808 recompute_tree_invariant_for_addr_expr (t
);
2809 new_side_effects
= TREE_SIDE_EFFECTS (t
);
2810 new_constant
= TREE_CONSTANT (t
);
2812 if (old_constant
!= new_constant
)
2814 error ("constant not recomputed when ADDR_EXPR changed");
2817 if (old_side_effects
!= new_side_effects
)
2819 error ("side effects not recomputed when ADDR_EXPR changed");
2823 if (!(TREE_CODE (base
) == VAR_DECL
2824 || TREE_CODE (base
) == PARM_DECL
2825 || TREE_CODE (base
) == RESULT_DECL
))
2828 if (DECL_GIMPLE_REG_P (base
))
2830 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2837 /* Callback for walk_tree, check that all elements with address taken are
2838 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2839 inside a PHI node. */
2842 verify_expr (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
2849 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2850 #define CHECK_OP(N, MSG) \
2851 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2852 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2854 switch (TREE_CODE (t
))
2857 if (SSA_NAME_IN_FREE_LIST (t
))
2859 error ("SSA name in freelist but still referenced");
2865 error ("INDIRECT_REF in gimple IL");
2869 x
= TREE_OPERAND (t
, 0);
2870 if (!POINTER_TYPE_P (TREE_TYPE (x
))
2871 || !is_gimple_mem_ref_addr (x
))
2873 error ("invalid first operand of MEM_REF");
2876 if (TREE_CODE (TREE_OPERAND (t
, 1)) != INTEGER_CST
2877 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t
, 1))))
2879 error ("invalid offset operand of MEM_REF");
2880 return TREE_OPERAND (t
, 1);
2882 if (TREE_CODE (x
) == ADDR_EXPR
2883 && (x
= verify_address (x
, TREE_OPERAND (x
, 0))))
2889 x
= fold (ASSERT_EXPR_COND (t
));
2890 if (x
== boolean_false_node
)
2892 error ("ASSERT_EXPR with an always-false condition");
2898 error ("MODIFY_EXPR not expected while having tuples");
2905 gcc_assert (is_gimple_address (t
));
2907 /* Skip any references (they will be checked when we recurse down the
2908 tree) and ensure that any variable used as a prefix is marked
2910 for (x
= TREE_OPERAND (t
, 0);
2911 handled_component_p (x
);
2912 x
= TREE_OPERAND (x
, 0))
2915 if ((tem
= verify_address (t
, x
)))
2918 if (!(TREE_CODE (x
) == VAR_DECL
2919 || TREE_CODE (x
) == PARM_DECL
2920 || TREE_CODE (x
) == RESULT_DECL
))
2923 if (!TREE_ADDRESSABLE (x
))
2925 error ("address taken, but ADDRESSABLE bit not set");
2933 x
= COND_EXPR_COND (t
);
2934 if (!INTEGRAL_TYPE_P (TREE_TYPE (x
)))
2936 error ("non-integral used in condition");
2939 if (!is_gimple_condexpr (x
))
2941 error ("invalid conditional operand");
2946 case NON_LVALUE_EXPR
:
2947 case TRUTH_NOT_EXPR
:
2951 case FIX_TRUNC_EXPR
:
2956 CHECK_OP (0, "invalid operand to unary operator");
2962 if (!is_gimple_reg_type (TREE_TYPE (t
)))
2964 error ("non-scalar BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR");
2968 if (TREE_CODE (t
) == BIT_FIELD_REF
)
2970 tree t0
= TREE_OPERAND (t
, 0);
2971 tree t1
= TREE_OPERAND (t
, 1);
2972 tree t2
= TREE_OPERAND (t
, 2);
2973 if (!tree_fits_uhwi_p (t1
)
2974 || !tree_fits_uhwi_p (t2
))
2976 error ("invalid position or size operand to BIT_FIELD_REF");
2979 if (INTEGRAL_TYPE_P (TREE_TYPE (t
))
2980 && (TYPE_PRECISION (TREE_TYPE (t
))
2981 != tree_to_uhwi (t1
)))
2983 error ("integral result type precision does not match "
2984 "field size of BIT_FIELD_REF");
2987 else if (!INTEGRAL_TYPE_P (TREE_TYPE (t
))
2988 && TYPE_MODE (TREE_TYPE (t
)) != BLKmode
2989 && (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (t
)))
2990 != tree_to_uhwi (t1
)))
2992 error ("mode precision of non-integral result does not "
2993 "match field size of BIT_FIELD_REF");
2996 if (!AGGREGATE_TYPE_P (TREE_TYPE (t0
))
2997 && (tree_to_uhwi (t1
) + tree_to_uhwi (t2
)
2998 > tree_to_uhwi (TYPE_SIZE (TREE_TYPE (t0
)))))
3000 error ("position plus size exceeds size of referenced object in "
3005 t
= TREE_OPERAND (t
, 0);
3010 case ARRAY_RANGE_REF
:
3011 case VIEW_CONVERT_EXPR
:
3012 /* We have a nest of references. Verify that each of the operands
3013 that determine where to reference is either a constant or a variable,
3014 verify that the base is valid, and then show we've already checked
3016 while (handled_component_p (t
))
3018 if (TREE_CODE (t
) == COMPONENT_REF
&& TREE_OPERAND (t
, 2))
3019 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
3020 else if (TREE_CODE (t
) == ARRAY_REF
3021 || TREE_CODE (t
) == ARRAY_RANGE_REF
)
3023 CHECK_OP (1, "invalid array index");
3024 if (TREE_OPERAND (t
, 2))
3025 CHECK_OP (2, "invalid array lower bound");
3026 if (TREE_OPERAND (t
, 3))
3027 CHECK_OP (3, "invalid array stride");
3029 else if (TREE_CODE (t
) == BIT_FIELD_REF
3030 || TREE_CODE (t
) == REALPART_EXPR
3031 || TREE_CODE (t
) == IMAGPART_EXPR
)
3033 error ("non-top-level BIT_FIELD_REF, IMAGPART_EXPR or "
3038 t
= TREE_OPERAND (t
, 0);
3041 if (!is_gimple_min_invariant (t
) && !is_gimple_lvalue (t
))
3043 error ("invalid reference prefix");
3050 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
3051 POINTER_PLUS_EXPR. */
3052 if (POINTER_TYPE_P (TREE_TYPE (t
)))
3054 error ("invalid operand to plus/minus, type is a pointer");
3057 CHECK_OP (0, "invalid operand to binary operator");
3058 CHECK_OP (1, "invalid operand to binary operator");
3061 case POINTER_PLUS_EXPR
:
3062 /* Check to make sure the first operand is a pointer or reference type. */
3063 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t
, 0))))
3065 error ("invalid operand to pointer plus, first operand is not a pointer");
3068 /* Check to make sure the second operand is a ptrofftype. */
3069 if (!ptrofftype_p (TREE_TYPE (TREE_OPERAND (t
, 1))))
3071 error ("invalid operand to pointer plus, second operand is not an "
3072 "integer type of appropriate width");
3082 case UNORDERED_EXPR
:
3091 case TRUNC_DIV_EXPR
:
3093 case FLOOR_DIV_EXPR
:
3094 case ROUND_DIV_EXPR
:
3095 case TRUNC_MOD_EXPR
:
3097 case FLOOR_MOD_EXPR
:
3098 case ROUND_MOD_EXPR
:
3100 case EXACT_DIV_EXPR
:
3110 CHECK_OP (0, "invalid operand to binary operator");
3111 CHECK_OP (1, "invalid operand to binary operator");
3115 if (TREE_CONSTANT (t
) && TREE_CODE (TREE_TYPE (t
)) == VECTOR_TYPE
)
3119 case CASE_LABEL_EXPR
:
3122 error ("invalid CASE_CHAIN");
3136 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
3137 Returns true if there is an error, otherwise false. */
3140 verify_types_in_gimple_min_lval (tree expr
)
3144 if (is_gimple_id (expr
))
3147 if (TREE_CODE (expr
) != TARGET_MEM_REF
3148 && TREE_CODE (expr
) != MEM_REF
)
3150 error ("invalid expression for min lvalue");
3154 /* TARGET_MEM_REFs are strange beasts. */
3155 if (TREE_CODE (expr
) == TARGET_MEM_REF
)
3158 op
= TREE_OPERAND (expr
, 0);
3159 if (!is_gimple_val (op
))
3161 error ("invalid operand in indirect reference");
3162 debug_generic_stmt (op
);
3165 /* Memory references now generally can involve a value conversion. */
3170 /* Verify if EXPR is a valid GIMPLE reference expression. If
3171 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
3172 if there is an error, otherwise false. */
3175 verify_types_in_gimple_reference (tree expr
, bool require_lvalue
)
3177 while (handled_component_p (expr
))
3179 tree op
= TREE_OPERAND (expr
, 0);
3181 if (TREE_CODE (expr
) == ARRAY_REF
3182 || TREE_CODE (expr
) == ARRAY_RANGE_REF
)
3184 if (!is_gimple_val (TREE_OPERAND (expr
, 1))
3185 || (TREE_OPERAND (expr
, 2)
3186 && !is_gimple_val (TREE_OPERAND (expr
, 2)))
3187 || (TREE_OPERAND (expr
, 3)
3188 && !is_gimple_val (TREE_OPERAND (expr
, 3))))
3190 error ("invalid operands to array reference");
3191 debug_generic_stmt (expr
);
3196 /* Verify if the reference array element types are compatible. */
3197 if (TREE_CODE (expr
) == ARRAY_REF
3198 && !useless_type_conversion_p (TREE_TYPE (expr
),
3199 TREE_TYPE (TREE_TYPE (op
))))
3201 error ("type mismatch in array reference");
3202 debug_generic_stmt (TREE_TYPE (expr
));
3203 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op
)));
3206 if (TREE_CODE (expr
) == ARRAY_RANGE_REF
3207 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr
)),
3208 TREE_TYPE (TREE_TYPE (op
))))
3210 error ("type mismatch in array range reference");
3211 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr
)));
3212 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op
)));
3216 if ((TREE_CODE (expr
) == REALPART_EXPR
3217 || TREE_CODE (expr
) == IMAGPART_EXPR
)
3218 && !useless_type_conversion_p (TREE_TYPE (expr
),
3219 TREE_TYPE (TREE_TYPE (op
))))
3221 error ("type mismatch in real/imagpart reference");
3222 debug_generic_stmt (TREE_TYPE (expr
));
3223 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op
)));
3227 if (TREE_CODE (expr
) == COMPONENT_REF
3228 && !useless_type_conversion_p (TREE_TYPE (expr
),
3229 TREE_TYPE (TREE_OPERAND (expr
, 1))))
3231 error ("type mismatch in component reference");
3232 debug_generic_stmt (TREE_TYPE (expr
));
3233 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr
, 1)));
3237 if (TREE_CODE (expr
) == VIEW_CONVERT_EXPR
)
3239 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
3240 that their operand is not an SSA name or an invariant when
3241 requiring an lvalue (this usually means there is a SRA or IPA-SRA
3242 bug). Otherwise there is nothing to verify, gross mismatches at
3243 most invoke undefined behavior. */
3245 && (TREE_CODE (op
) == SSA_NAME
3246 || is_gimple_min_invariant (op
)))
3248 error ("conversion of an SSA_NAME on the left hand side");
3249 debug_generic_stmt (expr
);
3252 else if (TREE_CODE (op
) == SSA_NAME
3253 && TYPE_SIZE (TREE_TYPE (expr
)) != TYPE_SIZE (TREE_TYPE (op
)))
3255 error ("conversion of register to a different size");
3256 debug_generic_stmt (expr
);
3259 else if (!handled_component_p (op
))
3266 if (TREE_CODE (expr
) == MEM_REF
)
3268 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr
, 0)))
3270 error ("invalid address operand in MEM_REF");
3271 debug_generic_stmt (expr
);
3274 if (TREE_CODE (TREE_OPERAND (expr
, 1)) != INTEGER_CST
3275 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr
, 1))))
3277 error ("invalid offset operand in MEM_REF");
3278 debug_generic_stmt (expr
);
3282 else if (TREE_CODE (expr
) == TARGET_MEM_REF
)
3284 if (!TMR_BASE (expr
)
3285 || !is_gimple_mem_ref_addr (TMR_BASE (expr
)))
3287 error ("invalid address operand in TARGET_MEM_REF");
3290 if (!TMR_OFFSET (expr
)
3291 || TREE_CODE (TMR_OFFSET (expr
)) != INTEGER_CST
3292 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr
))))
3294 error ("invalid offset operand in TARGET_MEM_REF");
3295 debug_generic_stmt (expr
);
3300 return ((require_lvalue
|| !is_gimple_min_invariant (expr
))
3301 && verify_types_in_gimple_min_lval (expr
));
3304 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3305 list of pointer-to types that is trivially convertible to DEST. */
3308 one_pointer_to_useless_type_conversion_p (tree dest
, tree src_obj
)
3312 if (!TYPE_POINTER_TO (src_obj
))
3315 for (src
= TYPE_POINTER_TO (src_obj
); src
; src
= TYPE_NEXT_PTR_TO (src
))
3316 if (useless_type_conversion_p (dest
, src
))
3322 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3323 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3326 valid_fixed_convert_types_p (tree type1
, tree type2
)
3328 return (FIXED_POINT_TYPE_P (type1
)
3329 && (INTEGRAL_TYPE_P (type2
)
3330 || SCALAR_FLOAT_TYPE_P (type2
)
3331 || FIXED_POINT_TYPE_P (type2
)));
3334 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3335 is a problem, otherwise false. */
3338 verify_gimple_call (gcall
*stmt
)
3340 tree fn
= gimple_call_fn (stmt
);
3341 tree fntype
, fndecl
;
3344 if (gimple_call_internal_p (stmt
))
3348 error ("gimple call has two targets");
3349 debug_generic_stmt (fn
);
3357 error ("gimple call has no target");
3362 if (fn
&& !is_gimple_call_addr (fn
))
3364 error ("invalid function in gimple call");
3365 debug_generic_stmt (fn
);
3370 && (!POINTER_TYPE_P (TREE_TYPE (fn
))
3371 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn
))) != FUNCTION_TYPE
3372 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn
))) != METHOD_TYPE
)))
3374 error ("non-function in gimple call");
3378 fndecl
= gimple_call_fndecl (stmt
);
3380 && TREE_CODE (fndecl
) == FUNCTION_DECL
3381 && DECL_LOOPING_CONST_OR_PURE_P (fndecl
)
3382 && !DECL_PURE_P (fndecl
)
3383 && !TREE_READONLY (fndecl
))
3385 error ("invalid pure const state for function");
3389 tree lhs
= gimple_call_lhs (stmt
);
3391 && (!is_gimple_lvalue (lhs
)
3392 || verify_types_in_gimple_reference (lhs
, true)))
3394 error ("invalid LHS in gimple call");
3399 && gimple_call_ctrl_altering_p (stmt
)
3400 && gimple_call_noreturn_p (stmt
)
3401 && TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (lhs
))) == INTEGER_CST
)
3403 error ("LHS in noreturn call");
3407 fntype
= gimple_call_fntype (stmt
);
3410 && !useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (fntype
))
3411 /* ??? At least C++ misses conversions at assignments from
3412 void * call results.
3413 ??? Java is completely off. Especially with functions
3414 returning java.lang.Object.
3415 For now simply allow arbitrary pointer type conversions. */
3416 && !(POINTER_TYPE_P (TREE_TYPE (lhs
))
3417 && POINTER_TYPE_P (TREE_TYPE (fntype
))))
3419 error ("invalid conversion in gimple call");
3420 debug_generic_stmt (TREE_TYPE (lhs
));
3421 debug_generic_stmt (TREE_TYPE (fntype
));
3425 if (gimple_call_chain (stmt
)
3426 && !is_gimple_val (gimple_call_chain (stmt
)))
3428 error ("invalid static chain in gimple call");
3429 debug_generic_stmt (gimple_call_chain (stmt
));
3433 /* If there is a static chain argument, the call should either be
3434 indirect, or the decl should have DECL_STATIC_CHAIN set. */
3435 if (gimple_call_chain (stmt
)
3437 && !DECL_STATIC_CHAIN (fndecl
))
3439 error ("static chain with function that doesn%'t use one");
3443 /* ??? The C frontend passes unpromoted arguments in case it
3444 didn't see a function declaration before the call. So for now
3445 leave the call arguments mostly unverified. Once we gimplify
3446 unit-at-a-time we have a chance to fix this. */
3448 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
3450 tree arg
= gimple_call_arg (stmt
, i
);
3451 if ((is_gimple_reg_type (TREE_TYPE (arg
))
3452 && !is_gimple_val (arg
))
3453 || (!is_gimple_reg_type (TREE_TYPE (arg
))
3454 && !is_gimple_lvalue (arg
)))
3456 error ("invalid argument to gimple call");
3457 debug_generic_expr (arg
);
3465 /* Verifies the gimple comparison with the result type TYPE and
3466 the operands OP0 and OP1. */
3469 verify_gimple_comparison (tree type
, tree op0
, tree op1
)
3471 tree op0_type
= TREE_TYPE (op0
);
3472 tree op1_type
= TREE_TYPE (op1
);
3474 if (!is_gimple_val (op0
) || !is_gimple_val (op1
))
3476 error ("invalid operands in gimple comparison");
3480 /* For comparisons we do not have the operations type as the
3481 effective type the comparison is carried out in. Instead
3482 we require that either the first operand is trivially
3483 convertible into the second, or the other way around.
3484 Because we special-case pointers to void we allow
3485 comparisons of pointers with the same mode as well. */
3486 if (!useless_type_conversion_p (op0_type
, op1_type
)
3487 && !useless_type_conversion_p (op1_type
, op0_type
)
3488 && (!POINTER_TYPE_P (op0_type
)
3489 || !POINTER_TYPE_P (op1_type
)
3490 || TYPE_MODE (op0_type
) != TYPE_MODE (op1_type
)))
3492 error ("mismatching comparison operand types");
3493 debug_generic_expr (op0_type
);
3494 debug_generic_expr (op1_type
);
3498 /* The resulting type of a comparison may be an effective boolean type. */
3499 if (INTEGRAL_TYPE_P (type
)
3500 && (TREE_CODE (type
) == BOOLEAN_TYPE
3501 || TYPE_PRECISION (type
) == 1))
3503 if (TREE_CODE (op0_type
) == VECTOR_TYPE
3504 || TREE_CODE (op1_type
) == VECTOR_TYPE
)
3506 error ("vector comparison returning a boolean");
3507 debug_generic_expr (op0_type
);
3508 debug_generic_expr (op1_type
);
3512 /* Or an integer vector type with the same size and element count
3513 as the comparison operand types. */
3514 else if (TREE_CODE (type
) == VECTOR_TYPE
3515 && TREE_CODE (TREE_TYPE (type
)) == INTEGER_TYPE
)
3517 if (TREE_CODE (op0_type
) != VECTOR_TYPE
3518 || TREE_CODE (op1_type
) != VECTOR_TYPE
)
3520 error ("non-vector operands in vector comparison");
3521 debug_generic_expr (op0_type
);
3522 debug_generic_expr (op1_type
);
3526 if (TYPE_VECTOR_SUBPARTS (type
) != TYPE_VECTOR_SUBPARTS (op0_type
)
3527 || (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (type
)))
3528 != GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0_type
))))
3529 /* The result of a vector comparison is of signed
3531 || TYPE_UNSIGNED (TREE_TYPE (type
)))
3533 error ("invalid vector comparison resulting type");
3534 debug_generic_expr (type
);
3540 error ("bogus comparison result type");
3541 debug_generic_expr (type
);
3548 /* Verify a gimple assignment statement STMT with an unary rhs.
3549 Returns true if anything is wrong. */
3552 verify_gimple_assign_unary (gassign
*stmt
)
3554 enum tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
3555 tree lhs
= gimple_assign_lhs (stmt
);
3556 tree lhs_type
= TREE_TYPE (lhs
);
3557 tree rhs1
= gimple_assign_rhs1 (stmt
);
3558 tree rhs1_type
= TREE_TYPE (rhs1
);
3560 if (!is_gimple_reg (lhs
))
3562 error ("non-register as LHS of unary operation");
3566 if (!is_gimple_val (rhs1
))
3568 error ("invalid operand in unary operation");
3572 /* First handle conversions. */
3577 /* Allow conversions from pointer type to integral type only if
3578 there is no sign or zero extension involved.
3579 For targets were the precision of ptrofftype doesn't match that
3580 of pointers we need to allow arbitrary conversions to ptrofftype. */
3581 if ((POINTER_TYPE_P (lhs_type
)
3582 && INTEGRAL_TYPE_P (rhs1_type
))
3583 || (POINTER_TYPE_P (rhs1_type
)
3584 && INTEGRAL_TYPE_P (lhs_type
)
3585 && (TYPE_PRECISION (rhs1_type
) >= TYPE_PRECISION (lhs_type
)
3586 || ptrofftype_p (sizetype
))))
3589 /* Allow conversion from integral to offset type and vice versa. */
3590 if ((TREE_CODE (lhs_type
) == OFFSET_TYPE
3591 && INTEGRAL_TYPE_P (rhs1_type
))
3592 || (INTEGRAL_TYPE_P (lhs_type
)
3593 && TREE_CODE (rhs1_type
) == OFFSET_TYPE
))
3596 /* Otherwise assert we are converting between types of the
3598 if (INTEGRAL_TYPE_P (lhs_type
) != INTEGRAL_TYPE_P (rhs1_type
))
3600 error ("invalid types in nop conversion");
3601 debug_generic_expr (lhs_type
);
3602 debug_generic_expr (rhs1_type
);
3609 case ADDR_SPACE_CONVERT_EXPR
:
3611 if (!POINTER_TYPE_P (rhs1_type
) || !POINTER_TYPE_P (lhs_type
)
3612 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type
))
3613 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type
))))
3615 error ("invalid types in address space conversion");
3616 debug_generic_expr (lhs_type
);
3617 debug_generic_expr (rhs1_type
);
3624 case FIXED_CONVERT_EXPR
:
3626 if (!valid_fixed_convert_types_p (lhs_type
, rhs1_type
)
3627 && !valid_fixed_convert_types_p (rhs1_type
, lhs_type
))
3629 error ("invalid types in fixed-point conversion");
3630 debug_generic_expr (lhs_type
);
3631 debug_generic_expr (rhs1_type
);
3640 if ((!INTEGRAL_TYPE_P (rhs1_type
) || !SCALAR_FLOAT_TYPE_P (lhs_type
))
3641 && (!VECTOR_INTEGER_TYPE_P (rhs1_type
)
3642 || !VECTOR_FLOAT_TYPE_P (lhs_type
)))
3644 error ("invalid types in conversion to floating point");
3645 debug_generic_expr (lhs_type
);
3646 debug_generic_expr (rhs1_type
);
3653 case FIX_TRUNC_EXPR
:
3655 if ((!INTEGRAL_TYPE_P (lhs_type
) || !SCALAR_FLOAT_TYPE_P (rhs1_type
))
3656 && (!VECTOR_INTEGER_TYPE_P (lhs_type
)
3657 || !VECTOR_FLOAT_TYPE_P (rhs1_type
)))
3659 error ("invalid types in conversion to integer");
3660 debug_generic_expr (lhs_type
);
3661 debug_generic_expr (rhs1_type
);
3667 case REDUC_MAX_EXPR
:
3668 case REDUC_MIN_EXPR
:
3669 case REDUC_PLUS_EXPR
:
3670 if (!VECTOR_TYPE_P (rhs1_type
)
3671 || !useless_type_conversion_p (lhs_type
, TREE_TYPE (rhs1_type
)))
3673 error ("reduction should convert from vector to element type");
3674 debug_generic_expr (lhs_type
);
3675 debug_generic_expr (rhs1_type
);
3680 case VEC_UNPACK_HI_EXPR
:
3681 case VEC_UNPACK_LO_EXPR
:
3682 case VEC_UNPACK_FLOAT_HI_EXPR
:
3683 case VEC_UNPACK_FLOAT_LO_EXPR
:
3698 /* For the remaining codes assert there is no conversion involved. */
3699 if (!useless_type_conversion_p (lhs_type
, rhs1_type
))
3701 error ("non-trivial conversion in unary operation");
3702 debug_generic_expr (lhs_type
);
3703 debug_generic_expr (rhs1_type
);
3710 /* Verify a gimple assignment statement STMT with a binary rhs.
3711 Returns true if anything is wrong. */
3714 verify_gimple_assign_binary (gassign
*stmt
)
3716 enum tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
3717 tree lhs
= gimple_assign_lhs (stmt
);
3718 tree lhs_type
= TREE_TYPE (lhs
);
3719 tree rhs1
= gimple_assign_rhs1 (stmt
);
3720 tree rhs1_type
= TREE_TYPE (rhs1
);
3721 tree rhs2
= gimple_assign_rhs2 (stmt
);
3722 tree rhs2_type
= TREE_TYPE (rhs2
);
3724 if (!is_gimple_reg (lhs
))
3726 error ("non-register as LHS of binary operation");
3730 if (!is_gimple_val (rhs1
)
3731 || !is_gimple_val (rhs2
))
3733 error ("invalid operands in binary operation");
3737 /* First handle operations that involve different types. */
3742 if (TREE_CODE (lhs_type
) != COMPLEX_TYPE
3743 || !(INTEGRAL_TYPE_P (rhs1_type
)
3744 || SCALAR_FLOAT_TYPE_P (rhs1_type
))
3745 || !(INTEGRAL_TYPE_P (rhs2_type
)
3746 || SCALAR_FLOAT_TYPE_P (rhs2_type
)))
3748 error ("type mismatch in complex expression");
3749 debug_generic_expr (lhs_type
);
3750 debug_generic_expr (rhs1_type
);
3751 debug_generic_expr (rhs2_type
);
3763 /* Shifts and rotates are ok on integral types, fixed point
3764 types and integer vector types. */
3765 if ((!INTEGRAL_TYPE_P (rhs1_type
)
3766 && !FIXED_POINT_TYPE_P (rhs1_type
)
3767 && !(TREE_CODE (rhs1_type
) == VECTOR_TYPE
3768 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type
))))
3769 || (!INTEGRAL_TYPE_P (rhs2_type
)
3770 /* Vector shifts of vectors are also ok. */
3771 && !(TREE_CODE (rhs1_type
) == VECTOR_TYPE
3772 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type
))
3773 && TREE_CODE (rhs2_type
) == VECTOR_TYPE
3774 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type
))))
3775 || !useless_type_conversion_p (lhs_type
, rhs1_type
))
3777 error ("type mismatch in shift expression");
3778 debug_generic_expr (lhs_type
);
3779 debug_generic_expr (rhs1_type
);
3780 debug_generic_expr (rhs2_type
);
3787 case WIDEN_LSHIFT_EXPR
:
3789 if (!INTEGRAL_TYPE_P (lhs_type
)
3790 || !INTEGRAL_TYPE_P (rhs1_type
)
3791 || TREE_CODE (rhs2
) != INTEGER_CST
3792 || (2 * TYPE_PRECISION (rhs1_type
) > TYPE_PRECISION (lhs_type
)))
3794 error ("type mismatch in widening vector shift expression");
3795 debug_generic_expr (lhs_type
);
3796 debug_generic_expr (rhs1_type
);
3797 debug_generic_expr (rhs2_type
);
3804 case VEC_WIDEN_LSHIFT_HI_EXPR
:
3805 case VEC_WIDEN_LSHIFT_LO_EXPR
:
3807 if (TREE_CODE (rhs1_type
) != VECTOR_TYPE
3808 || TREE_CODE (lhs_type
) != VECTOR_TYPE
3809 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type
))
3810 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type
))
3811 || TREE_CODE (rhs2
) != INTEGER_CST
3812 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type
))
3813 > TYPE_PRECISION (TREE_TYPE (lhs_type
))))
3815 error ("type mismatch in widening vector shift expression");
3816 debug_generic_expr (lhs_type
);
3817 debug_generic_expr (rhs1_type
);
3818 debug_generic_expr (rhs2_type
);
3828 tree lhs_etype
= lhs_type
;
3829 tree rhs1_etype
= rhs1_type
;
3830 tree rhs2_etype
= rhs2_type
;
3831 if (TREE_CODE (lhs_type
) == VECTOR_TYPE
)
3833 if (TREE_CODE (rhs1_type
) != VECTOR_TYPE
3834 || TREE_CODE (rhs2_type
) != VECTOR_TYPE
)
3836 error ("invalid non-vector operands to vector valued plus");
3839 lhs_etype
= TREE_TYPE (lhs_type
);
3840 rhs1_etype
= TREE_TYPE (rhs1_type
);
3841 rhs2_etype
= TREE_TYPE (rhs2_type
);
3843 if (POINTER_TYPE_P (lhs_etype
)
3844 || POINTER_TYPE_P (rhs1_etype
)
3845 || POINTER_TYPE_P (rhs2_etype
))
3847 error ("invalid (pointer) operands to plus/minus");
3851 /* Continue with generic binary expression handling. */
3855 case POINTER_PLUS_EXPR
:
3857 if (!POINTER_TYPE_P (rhs1_type
)
3858 || !useless_type_conversion_p (lhs_type
, rhs1_type
)
3859 || !ptrofftype_p (rhs2_type
))
3861 error ("type mismatch in pointer plus expression");
3862 debug_generic_stmt (lhs_type
);
3863 debug_generic_stmt (rhs1_type
);
3864 debug_generic_stmt (rhs2_type
);
3871 case TRUTH_ANDIF_EXPR
:
3872 case TRUTH_ORIF_EXPR
:
3873 case TRUTH_AND_EXPR
:
3875 case TRUTH_XOR_EXPR
:
3885 case UNORDERED_EXPR
:
3893 /* Comparisons are also binary, but the result type is not
3894 connected to the operand types. */
3895 return verify_gimple_comparison (lhs_type
, rhs1
, rhs2
);
3897 case WIDEN_MULT_EXPR
:
3898 if (TREE_CODE (lhs_type
) != INTEGER_TYPE
)
3900 return ((2 * TYPE_PRECISION (rhs1_type
) > TYPE_PRECISION (lhs_type
))
3901 || (TYPE_PRECISION (rhs1_type
) != TYPE_PRECISION (rhs2_type
)));
3903 case WIDEN_SUM_EXPR
:
3904 case VEC_WIDEN_MULT_HI_EXPR
:
3905 case VEC_WIDEN_MULT_LO_EXPR
:
3906 case VEC_WIDEN_MULT_EVEN_EXPR
:
3907 case VEC_WIDEN_MULT_ODD_EXPR
:
3908 case VEC_PACK_TRUNC_EXPR
:
3909 case VEC_PACK_SAT_EXPR
:
3910 case VEC_PACK_FIX_TRUNC_EXPR
:
3915 case MULT_HIGHPART_EXPR
:
3916 case TRUNC_DIV_EXPR
:
3918 case FLOOR_DIV_EXPR
:
3919 case ROUND_DIV_EXPR
:
3920 case TRUNC_MOD_EXPR
:
3922 case FLOOR_MOD_EXPR
:
3923 case ROUND_MOD_EXPR
:
3925 case EXACT_DIV_EXPR
:
3931 /* Continue with generic binary expression handling. */
3938 if (!useless_type_conversion_p (lhs_type
, rhs1_type
)
3939 || !useless_type_conversion_p (lhs_type
, rhs2_type
))
3941 error ("type mismatch in binary expression");
3942 debug_generic_stmt (lhs_type
);
3943 debug_generic_stmt (rhs1_type
);
3944 debug_generic_stmt (rhs2_type
);
3951 /* Verify a gimple assignment statement STMT with a ternary rhs.
3952 Returns true if anything is wrong. */
3955 verify_gimple_assign_ternary (gassign
*stmt
)
3957 enum tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
3958 tree lhs
= gimple_assign_lhs (stmt
);
3959 tree lhs_type
= TREE_TYPE (lhs
);
3960 tree rhs1
= gimple_assign_rhs1 (stmt
);
3961 tree rhs1_type
= TREE_TYPE (rhs1
);
3962 tree rhs2
= gimple_assign_rhs2 (stmt
);
3963 tree rhs2_type
= TREE_TYPE (rhs2
);
3964 tree rhs3
= gimple_assign_rhs3 (stmt
);
3965 tree rhs3_type
= TREE_TYPE (rhs3
);
3967 if (!is_gimple_reg (lhs
))
3969 error ("non-register as LHS of ternary operation");
3973 if (((rhs_code
== VEC_COND_EXPR
|| rhs_code
== COND_EXPR
)
3974 ? !is_gimple_condexpr (rhs1
) : !is_gimple_val (rhs1
))
3975 || !is_gimple_val (rhs2
)
3976 || !is_gimple_val (rhs3
))
3978 error ("invalid operands in ternary operation");
3982 /* First handle operations that involve different types. */
3985 case WIDEN_MULT_PLUS_EXPR
:
3986 case WIDEN_MULT_MINUS_EXPR
:
3987 if ((!INTEGRAL_TYPE_P (rhs1_type
)
3988 && !FIXED_POINT_TYPE_P (rhs1_type
))
3989 || !useless_type_conversion_p (rhs1_type
, rhs2_type
)
3990 || !useless_type_conversion_p (lhs_type
, rhs3_type
)
3991 || 2 * TYPE_PRECISION (rhs1_type
) > TYPE_PRECISION (lhs_type
)
3992 || TYPE_PRECISION (rhs1_type
) != TYPE_PRECISION (rhs2_type
))
3994 error ("type mismatch in widening multiply-accumulate expression");
3995 debug_generic_expr (lhs_type
);
3996 debug_generic_expr (rhs1_type
);
3997 debug_generic_expr (rhs2_type
);
3998 debug_generic_expr (rhs3_type
);
4004 if (!useless_type_conversion_p (lhs_type
, rhs1_type
)
4005 || !useless_type_conversion_p (lhs_type
, rhs2_type
)
4006 || !useless_type_conversion_p (lhs_type
, rhs3_type
))
4008 error ("type mismatch in fused multiply-add expression");
4009 debug_generic_expr (lhs_type
);
4010 debug_generic_expr (rhs1_type
);
4011 debug_generic_expr (rhs2_type
);
4012 debug_generic_expr (rhs3_type
);
4019 if (!useless_type_conversion_p (lhs_type
, rhs2_type
)
4020 || !useless_type_conversion_p (lhs_type
, rhs3_type
))
4022 error ("type mismatch in conditional expression");
4023 debug_generic_expr (lhs_type
);
4024 debug_generic_expr (rhs2_type
);
4025 debug_generic_expr (rhs3_type
);
4031 if (!useless_type_conversion_p (lhs_type
, rhs1_type
)
4032 || !useless_type_conversion_p (lhs_type
, rhs2_type
))
4034 error ("type mismatch in vector permute expression");
4035 debug_generic_expr (lhs_type
);
4036 debug_generic_expr (rhs1_type
);
4037 debug_generic_expr (rhs2_type
);
4038 debug_generic_expr (rhs3_type
);
4042 if (TREE_CODE (rhs1_type
) != VECTOR_TYPE
4043 || TREE_CODE (rhs2_type
) != VECTOR_TYPE
4044 || TREE_CODE (rhs3_type
) != VECTOR_TYPE
)
4046 error ("vector types expected in vector permute expression");
4047 debug_generic_expr (lhs_type
);
4048 debug_generic_expr (rhs1_type
);
4049 debug_generic_expr (rhs2_type
);
4050 debug_generic_expr (rhs3_type
);
4054 if (TYPE_VECTOR_SUBPARTS (rhs1_type
) != TYPE_VECTOR_SUBPARTS (rhs2_type
)
4055 || TYPE_VECTOR_SUBPARTS (rhs2_type
)
4056 != TYPE_VECTOR_SUBPARTS (rhs3_type
)
4057 || TYPE_VECTOR_SUBPARTS (rhs3_type
)
4058 != TYPE_VECTOR_SUBPARTS (lhs_type
))
4060 error ("vectors with different element number found "
4061 "in vector permute expression");
4062 debug_generic_expr (lhs_type
);
4063 debug_generic_expr (rhs1_type
);
4064 debug_generic_expr (rhs2_type
);
4065 debug_generic_expr (rhs3_type
);
4069 if (TREE_CODE (TREE_TYPE (rhs3_type
)) != INTEGER_TYPE
4070 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs3_type
)))
4071 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type
))))
4073 error ("invalid mask type in vector permute expression");
4074 debug_generic_expr (lhs_type
);
4075 debug_generic_expr (rhs1_type
);
4076 debug_generic_expr (rhs2_type
);
4077 debug_generic_expr (rhs3_type
);
4084 if (!useless_type_conversion_p (rhs1_type
, rhs2_type
)
4085 || !useless_type_conversion_p (lhs_type
, rhs3_type
)
4086 || 2 * GET_MODE_BITSIZE (GET_MODE_INNER
4087 (TYPE_MODE (TREE_TYPE (rhs1_type
))))
4088 > GET_MODE_BITSIZE (GET_MODE_INNER
4089 (TYPE_MODE (TREE_TYPE (lhs_type
)))))
4091 error ("type mismatch in sad expression");
4092 debug_generic_expr (lhs_type
);
4093 debug_generic_expr (rhs1_type
);
4094 debug_generic_expr (rhs2_type
);
4095 debug_generic_expr (rhs3_type
);
4099 if (TREE_CODE (rhs1_type
) != VECTOR_TYPE
4100 || TREE_CODE (rhs2_type
) != VECTOR_TYPE
4101 || TREE_CODE (rhs3_type
) != VECTOR_TYPE
)
4103 error ("vector types expected in sad expression");
4104 debug_generic_expr (lhs_type
);
4105 debug_generic_expr (rhs1_type
);
4106 debug_generic_expr (rhs2_type
);
4107 debug_generic_expr (rhs3_type
);
4114 case REALIGN_LOAD_EXPR
:
4124 /* Verify a gimple assignment statement STMT with a single rhs.
4125 Returns true if anything is wrong. */
4128 verify_gimple_assign_single (gassign
*stmt
)
4130 enum tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
4131 tree lhs
= gimple_assign_lhs (stmt
);
4132 tree lhs_type
= TREE_TYPE (lhs
);
4133 tree rhs1
= gimple_assign_rhs1 (stmt
);
4134 tree rhs1_type
= TREE_TYPE (rhs1
);
4137 if (!useless_type_conversion_p (lhs_type
, rhs1_type
))
4139 error ("non-trivial conversion at assignment");
4140 debug_generic_expr (lhs_type
);
4141 debug_generic_expr (rhs1_type
);
4145 if (gimple_clobber_p (stmt
)
4146 && !(DECL_P (lhs
) || TREE_CODE (lhs
) == MEM_REF
))
4148 error ("non-decl/MEM_REF LHS in clobber statement");
4149 debug_generic_expr (lhs
);
4153 if (handled_component_p (lhs
)
4154 || TREE_CODE (lhs
) == MEM_REF
4155 || TREE_CODE (lhs
) == TARGET_MEM_REF
)
4156 res
|= verify_types_in_gimple_reference (lhs
, true);
4158 /* Special codes we cannot handle via their class. */
4163 tree op
= TREE_OPERAND (rhs1
, 0);
4164 if (!is_gimple_addressable (op
))
4166 error ("invalid operand in unary expression");
4170 /* Technically there is no longer a need for matching types, but
4171 gimple hygiene asks for this check. In LTO we can end up
4172 combining incompatible units and thus end up with addresses
4173 of globals that change their type to a common one. */
4175 && !types_compatible_p (TREE_TYPE (op
),
4176 TREE_TYPE (TREE_TYPE (rhs1
)))
4177 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1
),
4180 error ("type mismatch in address expression");
4181 debug_generic_stmt (TREE_TYPE (rhs1
));
4182 debug_generic_stmt (TREE_TYPE (op
));
4186 return verify_types_in_gimple_reference (op
, true);
4191 error ("INDIRECT_REF in gimple IL");
4197 case ARRAY_RANGE_REF
:
4198 case VIEW_CONVERT_EXPR
:
4201 case TARGET_MEM_REF
:
4203 if (!is_gimple_reg (lhs
)
4204 && is_gimple_reg_type (TREE_TYPE (lhs
)))
4206 error ("invalid rhs for gimple memory store");
4207 debug_generic_stmt (lhs
);
4208 debug_generic_stmt (rhs1
);
4211 return res
|| verify_types_in_gimple_reference (rhs1
, false);
4223 /* tcc_declaration */
4228 if (!is_gimple_reg (lhs
)
4229 && !is_gimple_reg (rhs1
)
4230 && is_gimple_reg_type (TREE_TYPE (lhs
)))
4232 error ("invalid rhs for gimple memory store");
4233 debug_generic_stmt (lhs
);
4234 debug_generic_stmt (rhs1
);
4240 if (TREE_CODE (rhs1_type
) == VECTOR_TYPE
)
4243 tree elt_i
, elt_v
, elt_t
= NULL_TREE
;
4245 if (CONSTRUCTOR_NELTS (rhs1
) == 0)
4247 /* For vector CONSTRUCTORs we require that either it is empty
4248 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
4249 (then the element count must be correct to cover the whole
4250 outer vector and index must be NULL on all elements, or it is
4251 a CONSTRUCTOR of scalar elements, where we as an exception allow
4252 smaller number of elements (assuming zero filling) and
4253 consecutive indexes as compared to NULL indexes (such
4254 CONSTRUCTORs can appear in the IL from FEs). */
4255 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1
), i
, elt_i
, elt_v
)
4257 if (elt_t
== NULL_TREE
)
4259 elt_t
= TREE_TYPE (elt_v
);
4260 if (TREE_CODE (elt_t
) == VECTOR_TYPE
)
4262 tree elt_t
= TREE_TYPE (elt_v
);
4263 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type
),
4266 error ("incorrect type of vector CONSTRUCTOR"
4268 debug_generic_stmt (rhs1
);
4271 else if (CONSTRUCTOR_NELTS (rhs1
)
4272 * TYPE_VECTOR_SUBPARTS (elt_t
)
4273 != TYPE_VECTOR_SUBPARTS (rhs1_type
))
4275 error ("incorrect number of vector CONSTRUCTOR"
4277 debug_generic_stmt (rhs1
);
4281 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type
),
4284 error ("incorrect type of vector CONSTRUCTOR elements");
4285 debug_generic_stmt (rhs1
);
4288 else if (CONSTRUCTOR_NELTS (rhs1
)
4289 > TYPE_VECTOR_SUBPARTS (rhs1_type
))
4291 error ("incorrect number of vector CONSTRUCTOR elements");
4292 debug_generic_stmt (rhs1
);
4296 else if (!useless_type_conversion_p (elt_t
, TREE_TYPE (elt_v
)))
4298 error ("incorrect type of vector CONSTRUCTOR elements");
4299 debug_generic_stmt (rhs1
);
4302 if (elt_i
!= NULL_TREE
4303 && (TREE_CODE (elt_t
) == VECTOR_TYPE
4304 || TREE_CODE (elt_i
) != INTEGER_CST
4305 || compare_tree_int (elt_i
, i
) != 0))
4307 error ("vector CONSTRUCTOR with non-NULL element index");
4308 debug_generic_stmt (rhs1
);
4311 if (!is_gimple_val (elt_v
))
4313 error ("vector CONSTRUCTOR element is not a GIMPLE value");
4314 debug_generic_stmt (rhs1
);
4319 else if (CONSTRUCTOR_NELTS (rhs1
) != 0)
4321 error ("non-vector CONSTRUCTOR with elements");
4322 debug_generic_stmt (rhs1
);
4328 case WITH_SIZE_EXPR
:
4338 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4339 is a problem, otherwise false. */
4342 verify_gimple_assign (gassign
*stmt
)
4344 switch (gimple_assign_rhs_class (stmt
))
4346 case GIMPLE_SINGLE_RHS
:
4347 return verify_gimple_assign_single (stmt
);
4349 case GIMPLE_UNARY_RHS
:
4350 return verify_gimple_assign_unary (stmt
);
4352 case GIMPLE_BINARY_RHS
:
4353 return verify_gimple_assign_binary (stmt
);
4355 case GIMPLE_TERNARY_RHS
:
4356 return verify_gimple_assign_ternary (stmt
);
4363 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4364 is a problem, otherwise false. */
4367 verify_gimple_return (greturn
*stmt
)
4369 tree op
= gimple_return_retval (stmt
);
4370 tree restype
= TREE_TYPE (TREE_TYPE (cfun
->decl
));
4372 /* We cannot test for present return values as we do not fix up missing
4373 return values from the original source. */
4377 if (!is_gimple_val (op
)
4378 && TREE_CODE (op
) != RESULT_DECL
)
4380 error ("invalid operand in return statement");
4381 debug_generic_stmt (op
);
4385 if ((TREE_CODE (op
) == RESULT_DECL
4386 && DECL_BY_REFERENCE (op
))
4387 || (TREE_CODE (op
) == SSA_NAME
4388 && SSA_NAME_VAR (op
)
4389 && TREE_CODE (SSA_NAME_VAR (op
)) == RESULT_DECL
4390 && DECL_BY_REFERENCE (SSA_NAME_VAR (op
))))
4391 op
= TREE_TYPE (op
);
4393 if (!useless_type_conversion_p (restype
, TREE_TYPE (op
)))
4395 error ("invalid conversion in return statement");
4396 debug_generic_stmt (restype
);
4397 debug_generic_stmt (TREE_TYPE (op
));
4405 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4406 is a problem, otherwise false. */
4409 verify_gimple_goto (ggoto
*stmt
)
4411 tree dest
= gimple_goto_dest (stmt
);
4413 /* ??? We have two canonical forms of direct goto destinations, a
4414 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4415 if (TREE_CODE (dest
) != LABEL_DECL
4416 && (!is_gimple_val (dest
)
4417 || !POINTER_TYPE_P (TREE_TYPE (dest
))))
4419 error ("goto destination is neither a label nor a pointer");
4426 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4427 is a problem, otherwise false. */
4430 verify_gimple_switch (gswitch
*stmt
)
4433 tree elt
, prev_upper_bound
= NULL_TREE
;
4434 tree index_type
, elt_type
= NULL_TREE
;
4436 if (!is_gimple_val (gimple_switch_index (stmt
)))
4438 error ("invalid operand to switch statement");
4439 debug_generic_stmt (gimple_switch_index (stmt
));
4443 index_type
= TREE_TYPE (gimple_switch_index (stmt
));
4444 if (! INTEGRAL_TYPE_P (index_type
))
4446 error ("non-integral type switch statement");
4447 debug_generic_expr (index_type
);
4451 elt
= gimple_switch_label (stmt
, 0);
4452 if (CASE_LOW (elt
) != NULL_TREE
|| CASE_HIGH (elt
) != NULL_TREE
)
4454 error ("invalid default case label in switch statement");
4455 debug_generic_expr (elt
);
4459 n
= gimple_switch_num_labels (stmt
);
4460 for (i
= 1; i
< n
; i
++)
4462 elt
= gimple_switch_label (stmt
, i
);
4464 if (! CASE_LOW (elt
))
4466 error ("invalid case label in switch statement");
4467 debug_generic_expr (elt
);
4471 && ! tree_int_cst_lt (CASE_LOW (elt
), CASE_HIGH (elt
)))
4473 error ("invalid case range in switch statement");
4474 debug_generic_expr (elt
);
4480 if (TREE_TYPE (CASE_LOW (elt
)) != elt_type
4481 || (CASE_HIGH (elt
) && TREE_TYPE (CASE_HIGH (elt
)) != elt_type
))
4483 error ("type mismatch for case label in switch statement");
4484 debug_generic_expr (elt
);
4490 elt_type
= TREE_TYPE (CASE_LOW (elt
));
4491 if (TYPE_PRECISION (index_type
) < TYPE_PRECISION (elt_type
))
4493 error ("type precision mismatch in switch statement");
4498 if (prev_upper_bound
)
4500 if (! tree_int_cst_lt (prev_upper_bound
, CASE_LOW (elt
)))
4502 error ("case labels not sorted in switch statement");
4507 prev_upper_bound
= CASE_HIGH (elt
);
4508 if (! prev_upper_bound
)
4509 prev_upper_bound
= CASE_LOW (elt
);
4515 /* Verify a gimple debug statement STMT.
4516 Returns true if anything is wrong. */
4519 verify_gimple_debug (gimple stmt ATTRIBUTE_UNUSED
)
4521 /* There isn't much that could be wrong in a gimple debug stmt. A
4522 gimple debug bind stmt, for example, maps a tree, that's usually
4523 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4524 component or member of an aggregate type, to another tree, that
4525 can be an arbitrary expression. These stmts expand into debug
4526 insns, and are converted to debug notes by var-tracking.c. */
4530 /* Verify a gimple label statement STMT.
4531 Returns true if anything is wrong. */
4534 verify_gimple_label (glabel
*stmt
)
4536 tree decl
= gimple_label_label (stmt
);
4540 if (TREE_CODE (decl
) != LABEL_DECL
)
4542 if (!DECL_NONLOCAL (decl
) && !FORCED_LABEL (decl
)
4543 && DECL_CONTEXT (decl
) != current_function_decl
)
4545 error ("label's context is not the current function decl");
4549 uid
= LABEL_DECL_UID (decl
);
4552 || (*label_to_block_map_for_fn (cfun
))[uid
] != gimple_bb (stmt
)))
4554 error ("incorrect entry in label_to_block_map");
4558 uid
= EH_LANDING_PAD_NR (decl
);
4561 eh_landing_pad lp
= get_eh_landing_pad_from_number (uid
);
4562 if (decl
!= lp
->post_landing_pad
)
4564 error ("incorrect setting of landing pad number");
4572 /* Verify a gimple cond statement STMT.
4573 Returns true if anything is wrong. */
4576 verify_gimple_cond (gcond
*stmt
)
4578 if (TREE_CODE_CLASS (gimple_cond_code (stmt
)) != tcc_comparison
)
4580 error ("invalid comparison code in gimple cond");
4583 if (!(!gimple_cond_true_label (stmt
)
4584 || TREE_CODE (gimple_cond_true_label (stmt
)) == LABEL_DECL
)
4585 || !(!gimple_cond_false_label (stmt
)
4586 || TREE_CODE (gimple_cond_false_label (stmt
)) == LABEL_DECL
))
4588 error ("invalid labels in gimple cond");
4592 return verify_gimple_comparison (boolean_type_node
,
4593 gimple_cond_lhs (stmt
),
4594 gimple_cond_rhs (stmt
));
4597 /* Verify the GIMPLE statement STMT. Returns true if there is an
4598 error, otherwise false. */
4601 verify_gimple_stmt (gimple stmt
)
4603 switch (gimple_code (stmt
))
4606 return verify_gimple_assign (as_a
<gassign
*> (stmt
));
4609 return verify_gimple_label (as_a
<glabel
*> (stmt
));
4612 return verify_gimple_call (as_a
<gcall
*> (stmt
));
4615 return verify_gimple_cond (as_a
<gcond
*> (stmt
));
4618 return verify_gimple_goto (as_a
<ggoto
*> (stmt
));
4621 return verify_gimple_switch (as_a
<gswitch
*> (stmt
));
4624 return verify_gimple_return (as_a
<greturn
*> (stmt
));
4629 case GIMPLE_TRANSACTION
:
4630 return verify_gimple_transaction (as_a
<gtransaction
*> (stmt
));
4632 /* Tuples that do not have tree operands. */
4634 case GIMPLE_PREDICT
:
4636 case GIMPLE_EH_DISPATCH
:
4637 case GIMPLE_EH_MUST_NOT_THROW
:
4641 /* OpenMP directives are validated by the FE and never operated
4642 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4643 non-gimple expressions when the main index variable has had
4644 its address taken. This does not affect the loop itself
4645 because the header of an GIMPLE_OMP_FOR is merely used to determine
4646 how to setup the parallel iteration. */
4650 return verify_gimple_debug (stmt
);
4657 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4658 and false otherwise. */
4661 verify_gimple_phi (gimple phi
)
4665 tree phi_result
= gimple_phi_result (phi
);
4670 error ("invalid PHI result");
4674 virtual_p
= virtual_operand_p (phi_result
);
4675 if (TREE_CODE (phi_result
) != SSA_NAME
4677 && SSA_NAME_VAR (phi_result
) != gimple_vop (cfun
)))
4679 error ("invalid PHI result");
4683 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
4685 tree t
= gimple_phi_arg_def (phi
, i
);
4689 error ("missing PHI def");
4693 /* Addressable variables do have SSA_NAMEs but they
4694 are not considered gimple values. */
4695 else if ((TREE_CODE (t
) == SSA_NAME
4696 && virtual_p
!= virtual_operand_p (t
))
4698 && (TREE_CODE (t
) != SSA_NAME
4699 || SSA_NAME_VAR (t
) != gimple_vop (cfun
)))
4701 && !is_gimple_val (t
)))
4703 error ("invalid PHI argument");
4704 debug_generic_expr (t
);
4707 #ifdef ENABLE_TYPES_CHECKING
4708 if (!useless_type_conversion_p (TREE_TYPE (phi_result
), TREE_TYPE (t
)))
4710 error ("incompatible types in PHI argument %u", i
);
4711 debug_generic_stmt (TREE_TYPE (phi_result
));
4712 debug_generic_stmt (TREE_TYPE (t
));
4721 /* Verify the GIMPLE statements inside the sequence STMTS. */
4724 verify_gimple_in_seq_2 (gimple_seq stmts
)
4726 gimple_stmt_iterator ittr
;
4729 for (ittr
= gsi_start (stmts
); !gsi_end_p (ittr
); gsi_next (&ittr
))
4731 gimple stmt
= gsi_stmt (ittr
);
4733 switch (gimple_code (stmt
))
4736 err
|= verify_gimple_in_seq_2 (
4737 gimple_bind_body (as_a
<gbind
*> (stmt
)));
4741 err
|= verify_gimple_in_seq_2 (gimple_try_eval (stmt
));
4742 err
|= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt
));
4745 case GIMPLE_EH_FILTER
:
4746 err
|= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt
));
4749 case GIMPLE_EH_ELSE
:
4751 geh_else
*eh_else
= as_a
<geh_else
*> (stmt
);
4752 err
|= verify_gimple_in_seq_2 (gimple_eh_else_n_body (eh_else
));
4753 err
|= verify_gimple_in_seq_2 (gimple_eh_else_e_body (eh_else
));
4758 err
|= verify_gimple_in_seq_2 (gimple_catch_handler (
4759 as_a
<gcatch
*> (stmt
)));
4762 case GIMPLE_TRANSACTION
:
4763 err
|= verify_gimple_transaction (as_a
<gtransaction
*> (stmt
));
4768 bool err2
= verify_gimple_stmt (stmt
);
4770 debug_gimple_stmt (stmt
);
4779 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
4780 is a problem, otherwise false. */
4783 verify_gimple_transaction (gtransaction
*stmt
)
4785 tree lab
= gimple_transaction_label (stmt
);
4786 if (lab
!= NULL
&& TREE_CODE (lab
) != LABEL_DECL
)
4788 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt
));
4792 /* Verify the GIMPLE statements inside the statement list STMTS. */
4795 verify_gimple_in_seq (gimple_seq stmts
)
4797 timevar_push (TV_TREE_STMT_VERIFY
);
4798 if (verify_gimple_in_seq_2 (stmts
))
4799 internal_error ("verify_gimple failed");
4800 timevar_pop (TV_TREE_STMT_VERIFY
);
4803 /* Return true when the T can be shared. */
4806 tree_node_can_be_shared (tree t
)
4808 if (IS_TYPE_OR_DECL_P (t
)
4809 || is_gimple_min_invariant (t
)
4810 || TREE_CODE (t
) == SSA_NAME
4811 || t
== error_mark_node
4812 || TREE_CODE (t
) == IDENTIFIER_NODE
)
4815 if (TREE_CODE (t
) == CASE_LABEL_EXPR
)
4824 /* Called via walk_tree. Verify tree sharing. */
4827 verify_node_sharing_1 (tree
*tp
, int *walk_subtrees
, void *data
)
4829 hash_set
<void *> *visited
= (hash_set
<void *> *) data
;
4831 if (tree_node_can_be_shared (*tp
))
4833 *walk_subtrees
= false;
4837 if (visited
->add (*tp
))
4843 /* Called via walk_gimple_stmt. Verify tree sharing. */
4846 verify_node_sharing (tree
*tp
, int *walk_subtrees
, void *data
)
4848 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
4849 return verify_node_sharing_1 (tp
, walk_subtrees
, wi
->info
);
4852 static bool eh_error_found
;
4854 verify_eh_throw_stmt_node (const gimple
&stmt
, const int &,
4855 hash_set
<gimple
> *visited
)
4857 if (!visited
->contains (stmt
))
4859 error ("dead STMT in EH table");
4860 debug_gimple_stmt (stmt
);
4861 eh_error_found
= true;
4866 /* Verify if the location LOCs block is in BLOCKS. */
4869 verify_location (hash_set
<tree
> *blocks
, location_t loc
)
4871 tree block
= LOCATION_BLOCK (loc
);
4872 if (block
!= NULL_TREE
4873 && !blocks
->contains (block
))
4875 error ("location references block not in block tree");
4878 if (block
!= NULL_TREE
)
4879 return verify_location (blocks
, BLOCK_SOURCE_LOCATION (block
));
4883 /* Called via walk_tree. Verify that expressions have no blocks. */
4886 verify_expr_no_block (tree
*tp
, int *walk_subtrees
, void *)
4890 *walk_subtrees
= false;
4894 location_t loc
= EXPR_LOCATION (*tp
);
4895 if (LOCATION_BLOCK (loc
) != NULL
)
4901 /* Called via walk_tree. Verify locations of expressions. */
4904 verify_expr_location_1 (tree
*tp
, int *walk_subtrees
, void *data
)
4906 hash_set
<tree
> *blocks
= (hash_set
<tree
> *) data
;
4908 if (TREE_CODE (*tp
) == VAR_DECL
4909 && DECL_HAS_DEBUG_EXPR_P (*tp
))
4911 tree t
= DECL_DEBUG_EXPR (*tp
);
4912 tree addr
= walk_tree (&t
, verify_expr_no_block
, NULL
, NULL
);
4916 if ((TREE_CODE (*tp
) == VAR_DECL
4917 || TREE_CODE (*tp
) == PARM_DECL
4918 || TREE_CODE (*tp
) == RESULT_DECL
)
4919 && DECL_HAS_VALUE_EXPR_P (*tp
))
4921 tree t
= DECL_VALUE_EXPR (*tp
);
4922 tree addr
= walk_tree (&t
, verify_expr_no_block
, NULL
, NULL
);
4929 *walk_subtrees
= false;
4933 location_t loc
= EXPR_LOCATION (*tp
);
4934 if (verify_location (blocks
, loc
))
4940 /* Called via walk_gimple_op. Verify locations of expressions. */
4943 verify_expr_location (tree
*tp
, int *walk_subtrees
, void *data
)
4945 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
4946 return verify_expr_location_1 (tp
, walk_subtrees
, wi
->info
);
4949 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
4952 collect_subblocks (hash_set
<tree
> *blocks
, tree block
)
4955 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= BLOCK_CHAIN (t
))
4958 collect_subblocks (blocks
, t
);
4962 /* Verify the GIMPLE statements in the CFG of FN. */
4965 verify_gimple_in_cfg (struct function
*fn
, bool verify_nothrow
)
4970 timevar_push (TV_TREE_STMT_VERIFY
);
4971 hash_set
<void *> visited
;
4972 hash_set
<gimple
> visited_stmts
;
4974 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
4975 hash_set
<tree
> blocks
;
4976 if (DECL_INITIAL (fn
->decl
))
4978 blocks
.add (DECL_INITIAL (fn
->decl
));
4979 collect_subblocks (&blocks
, DECL_INITIAL (fn
->decl
));
4982 FOR_EACH_BB_FN (bb
, fn
)
4984 gimple_stmt_iterator gsi
;
4986 for (gphi_iterator gpi
= gsi_start_phis (bb
);
4990 gphi
*phi
= gpi
.phi ();
4994 visited_stmts
.add (phi
);
4996 if (gimple_bb (phi
) != bb
)
4998 error ("gimple_bb (phi) is set to a wrong basic block");
5002 err2
|= verify_gimple_phi (phi
);
5004 /* Only PHI arguments have locations. */
5005 if (gimple_location (phi
) != UNKNOWN_LOCATION
)
5007 error ("PHI node with location");
5011 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
5013 tree arg
= gimple_phi_arg_def (phi
, i
);
5014 tree addr
= walk_tree (&arg
, verify_node_sharing_1
,
5018 error ("incorrect sharing of tree nodes");
5019 debug_generic_expr (addr
);
5022 location_t loc
= gimple_phi_arg_location (phi
, i
);
5023 if (virtual_operand_p (gimple_phi_result (phi
))
5024 && loc
!= UNKNOWN_LOCATION
)
5026 error ("virtual PHI with argument locations");
5029 addr
= walk_tree (&arg
, verify_expr_location_1
, &blocks
, NULL
);
5032 debug_generic_expr (addr
);
5035 err2
|= verify_location (&blocks
, loc
);
5039 debug_gimple_stmt (phi
);
5043 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5045 gimple stmt
= gsi_stmt (gsi
);
5047 struct walk_stmt_info wi
;
5051 visited_stmts
.add (stmt
);
5053 if (gimple_bb (stmt
) != bb
)
5055 error ("gimple_bb (stmt) is set to a wrong basic block");
5059 err2
|= verify_gimple_stmt (stmt
);
5060 err2
|= verify_location (&blocks
, gimple_location (stmt
));
5062 memset (&wi
, 0, sizeof (wi
));
5063 wi
.info
= (void *) &visited
;
5064 addr
= walk_gimple_op (stmt
, verify_node_sharing
, &wi
);
5067 error ("incorrect sharing of tree nodes");
5068 debug_generic_expr (addr
);
5072 memset (&wi
, 0, sizeof (wi
));
5073 wi
.info
= (void *) &blocks
;
5074 addr
= walk_gimple_op (stmt
, verify_expr_location
, &wi
);
5077 debug_generic_expr (addr
);
5081 /* ??? Instead of not checking these stmts at all the walker
5082 should know its context via wi. */
5083 if (!is_gimple_debug (stmt
)
5084 && !is_gimple_omp (stmt
))
5086 memset (&wi
, 0, sizeof (wi
));
5087 addr
= walk_gimple_op (stmt
, verify_expr
, &wi
);
5090 debug_generic_expr (addr
);
5091 inform (gimple_location (stmt
), "in statement");
5096 /* If the statement is marked as part of an EH region, then it is
5097 expected that the statement could throw. Verify that when we
5098 have optimizations that simplify statements such that we prove
5099 that they cannot throw, that we update other data structures
5101 lp_nr
= lookup_stmt_eh_lp (stmt
);
5104 if (!stmt_could_throw_p (stmt
))
5108 error ("statement marked for throw, but doesn%'t");
5112 else if (!gsi_one_before_end_p (gsi
))
5114 error ("statement marked for throw in middle of block");
5120 debug_gimple_stmt (stmt
);
5125 eh_error_found
= false;
5126 hash_map
<gimple
, int> *eh_table
= get_eh_throw_stmt_table (cfun
);
5128 eh_table
->traverse
<hash_set
<gimple
> *, verify_eh_throw_stmt_node
>
5131 if (err
|| eh_error_found
)
5132 internal_error ("verify_gimple failed");
5134 verify_histograms ();
5135 timevar_pop (TV_TREE_STMT_VERIFY
);
5139 /* Verifies that the flow information is OK. */
5142 gimple_verify_flow_info (void)
5146 gimple_stmt_iterator gsi
;
5151 if (ENTRY_BLOCK_PTR_FOR_FN (cfun
)->il
.gimple
.seq
5152 || ENTRY_BLOCK_PTR_FOR_FN (cfun
)->il
.gimple
.phi_nodes
)
5154 error ("ENTRY_BLOCK has IL associated with it");
5158 if (EXIT_BLOCK_PTR_FOR_FN (cfun
)->il
.gimple
.seq
5159 || EXIT_BLOCK_PTR_FOR_FN (cfun
)->il
.gimple
.phi_nodes
)
5161 error ("EXIT_BLOCK has IL associated with it");
5165 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
)
5166 if (e
->flags
& EDGE_FALLTHRU
)
5168 error ("fallthru to exit from bb %d", e
->src
->index
);
5172 FOR_EACH_BB_FN (bb
, cfun
)
5174 bool found_ctrl_stmt
= false;
5178 /* Skip labels on the start of basic block. */
5179 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5182 gimple prev_stmt
= stmt
;
5184 stmt
= gsi_stmt (gsi
);
5186 if (gimple_code (stmt
) != GIMPLE_LABEL
)
5189 label
= gimple_label_label (as_a
<glabel
*> (stmt
));
5190 if (prev_stmt
&& DECL_NONLOCAL (label
))
5192 error ("nonlocal label ");
5193 print_generic_expr (stderr
, label
, 0);
5194 fprintf (stderr
, " is not first in a sequence of labels in bb %d",
5199 if (prev_stmt
&& EH_LANDING_PAD_NR (label
) != 0)
5201 error ("EH landing pad label ");
5202 print_generic_expr (stderr
, label
, 0);
5203 fprintf (stderr
, " is not first in a sequence of labels in bb %d",
5208 if (label_to_block (label
) != bb
)
5211 print_generic_expr (stderr
, label
, 0);
5212 fprintf (stderr
, " to block does not match in bb %d",
5217 if (decl_function_context (label
) != current_function_decl
)
5220 print_generic_expr (stderr
, label
, 0);
5221 fprintf (stderr
, " has incorrect context in bb %d",
5227 /* Verify that body of basic block BB is free of control flow. */
5228 for (; !gsi_end_p (gsi
); gsi_next (&gsi
))
5230 gimple stmt
= gsi_stmt (gsi
);
5232 if (found_ctrl_stmt
)
5234 error ("control flow in the middle of basic block %d",
5239 if (stmt_ends_bb_p (stmt
))
5240 found_ctrl_stmt
= true;
5242 if (glabel
*label_stmt
= dyn_cast
<glabel
*> (stmt
))
5245 print_generic_expr (stderr
, gimple_label_label (label_stmt
), 0);
5246 fprintf (stderr
, " in the middle of basic block %d", bb
->index
);
5251 gsi
= gsi_last_bb (bb
);
5252 if (gsi_end_p (gsi
))
5255 stmt
= gsi_stmt (gsi
);
5257 if (gimple_code (stmt
) == GIMPLE_LABEL
)
5260 err
|= verify_eh_edges (stmt
);
5262 if (is_ctrl_stmt (stmt
))
5264 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5265 if (e
->flags
& EDGE_FALLTHRU
)
5267 error ("fallthru edge after a control statement in bb %d",
5273 if (gimple_code (stmt
) != GIMPLE_COND
)
5275 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
5276 after anything else but if statement. */
5277 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5278 if (e
->flags
& (EDGE_TRUE_VALUE
| EDGE_FALSE_VALUE
))
5280 error ("true/false edge after a non-GIMPLE_COND in bb %d",
5286 switch (gimple_code (stmt
))
5293 extract_true_false_edges_from_block (bb
, &true_edge
, &false_edge
);
5297 || !(true_edge
->flags
& EDGE_TRUE_VALUE
)
5298 || !(false_edge
->flags
& EDGE_FALSE_VALUE
)
5299 || (true_edge
->flags
& (EDGE_FALLTHRU
| EDGE_ABNORMAL
))
5300 || (false_edge
->flags
& (EDGE_FALLTHRU
| EDGE_ABNORMAL
))
5301 || EDGE_COUNT (bb
->succs
) >= 3)
5303 error ("wrong outgoing edge flags at end of bb %d",
5311 if (simple_goto_p (stmt
))
5313 error ("explicit goto at end of bb %d", bb
->index
);
5318 /* FIXME. We should double check that the labels in the
5319 destination blocks have their address taken. */
5320 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5321 if ((e
->flags
& (EDGE_FALLTHRU
| EDGE_TRUE_VALUE
5322 | EDGE_FALSE_VALUE
))
5323 || !(e
->flags
& EDGE_ABNORMAL
))
5325 error ("wrong outgoing edge flags at end of bb %d",
5333 if (!gimple_call_builtin_p (stmt
, BUILT_IN_RETURN
))
5335 /* ... fallthru ... */
5337 if (!single_succ_p (bb
)
5338 || (single_succ_edge (bb
)->flags
5339 & (EDGE_FALLTHRU
| EDGE_ABNORMAL
5340 | EDGE_TRUE_VALUE
| EDGE_FALSE_VALUE
)))
5342 error ("wrong outgoing edge flags at end of bb %d", bb
->index
);
5345 if (single_succ (bb
) != EXIT_BLOCK_PTR_FOR_FN (cfun
))
5347 error ("return edge does not point to exit in bb %d",
5355 gswitch
*switch_stmt
= as_a
<gswitch
*> (stmt
);
5360 n
= gimple_switch_num_labels (switch_stmt
);
5362 /* Mark all the destination basic blocks. */
5363 for (i
= 0; i
< n
; ++i
)
5365 tree lab
= CASE_LABEL (gimple_switch_label (switch_stmt
, i
));
5366 basic_block label_bb
= label_to_block (lab
);
5367 gcc_assert (!label_bb
->aux
|| label_bb
->aux
== (void *)1);
5368 label_bb
->aux
= (void *)1;
5371 /* Verify that the case labels are sorted. */
5372 prev
= gimple_switch_label (switch_stmt
, 0);
5373 for (i
= 1; i
< n
; ++i
)
5375 tree c
= gimple_switch_label (switch_stmt
, i
);
5378 error ("found default case not at the start of "
5384 && !tree_int_cst_lt (CASE_LOW (prev
), CASE_LOW (c
)))
5386 error ("case labels not sorted: ");
5387 print_generic_expr (stderr
, prev
, 0);
5388 fprintf (stderr
," is greater than ");
5389 print_generic_expr (stderr
, c
, 0);
5390 fprintf (stderr
," but comes before it.\n");
5395 /* VRP will remove the default case if it can prove it will
5396 never be executed. So do not verify there always exists
5397 a default case here. */
5399 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5403 error ("extra outgoing edge %d->%d",
5404 bb
->index
, e
->dest
->index
);
5408 e
->dest
->aux
= (void *)2;
5409 if ((e
->flags
& (EDGE_FALLTHRU
| EDGE_ABNORMAL
5410 | EDGE_TRUE_VALUE
| EDGE_FALSE_VALUE
)))
5412 error ("wrong outgoing edge flags at end of bb %d",
5418 /* Check that we have all of them. */
5419 for (i
= 0; i
< n
; ++i
)
5421 tree lab
= CASE_LABEL (gimple_switch_label (switch_stmt
, i
));
5422 basic_block label_bb
= label_to_block (lab
);
5424 if (label_bb
->aux
!= (void *)2)
5426 error ("missing edge %i->%i", bb
->index
, label_bb
->index
);
5431 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5432 e
->dest
->aux
= (void *)0;
5436 case GIMPLE_EH_DISPATCH
:
5437 err
|= verify_eh_dispatch_edge (as_a
<geh_dispatch
*> (stmt
));
5445 if (dom_info_state (CDI_DOMINATORS
) >= DOM_NO_FAST_QUERY
)
5446 verify_dominators (CDI_DOMINATORS
);
5452 /* Updates phi nodes after creating a forwarder block joined
5453 by edge FALLTHRU. */
5456 gimple_make_forwarder_block (edge fallthru
)
5460 basic_block dummy
, bb
;
5464 dummy
= fallthru
->src
;
5465 bb
= fallthru
->dest
;
5467 if (single_pred_p (bb
))
5470 /* If we redirected a branch we must create new PHI nodes at the
5472 for (gsi
= gsi_start_phis (dummy
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5474 gphi
*phi
, *new_phi
;
5477 var
= gimple_phi_result (phi
);
5478 new_phi
= create_phi_node (var
, bb
);
5479 gimple_phi_set_result (phi
, copy_ssa_name (var
, phi
));
5480 add_phi_arg (new_phi
, gimple_phi_result (phi
), fallthru
,
5484 /* Add the arguments we have stored on edges. */
5485 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
5490 flush_pending_stmts (e
);
5495 /* Return a non-special label in the head of basic block BLOCK.
5496 Create one if it doesn't exist. */
5499 gimple_block_label (basic_block bb
)
5501 gimple_stmt_iterator i
, s
= gsi_start_bb (bb
);
5506 for (i
= s
; !gsi_end_p (i
); first
= false, gsi_next (&i
))
5508 stmt
= dyn_cast
<glabel
*> (gsi_stmt (i
));
5511 label
= gimple_label_label (stmt
);
5512 if (!DECL_NONLOCAL (label
))
5515 gsi_move_before (&i
, &s
);
5520 label
= create_artificial_label (UNKNOWN_LOCATION
);
5521 stmt
= gimple_build_label (label
);
5522 gsi_insert_before (&s
, stmt
, GSI_NEW_STMT
);
5527 /* Attempt to perform edge redirection by replacing a possibly complex
5528 jump instruction by a goto or by removing the jump completely.
5529 This can apply only if all edges now point to the same block. The
5530 parameters and return values are equivalent to
5531 redirect_edge_and_branch. */
5534 gimple_try_redirect_by_replacing_jump (edge e
, basic_block target
)
5536 basic_block src
= e
->src
;
5537 gimple_stmt_iterator i
;
5540 /* We can replace or remove a complex jump only when we have exactly
5542 if (EDGE_COUNT (src
->succs
) != 2
5543 /* Verify that all targets will be TARGET. Specifically, the
5544 edge that is not E must also go to TARGET. */
5545 || EDGE_SUCC (src
, EDGE_SUCC (src
, 0) == e
)->dest
!= target
)
5548 i
= gsi_last_bb (src
);
5552 stmt
= gsi_stmt (i
);
5554 if (gimple_code (stmt
) == GIMPLE_COND
|| gimple_code (stmt
) == GIMPLE_SWITCH
)
5556 gsi_remove (&i
, true);
5557 e
= ssa_redirect_edge (e
, target
);
5558 e
->flags
= EDGE_FALLTHRU
;
5566 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
5567 edge representing the redirected branch. */
5570 gimple_redirect_edge_and_branch (edge e
, basic_block dest
)
5572 basic_block bb
= e
->src
;
5573 gimple_stmt_iterator gsi
;
5577 if (e
->flags
& EDGE_ABNORMAL
)
5580 if (e
->dest
== dest
)
5583 if (e
->flags
& EDGE_EH
)
5584 return redirect_eh_edge (e
, dest
);
5586 if (e
->src
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
))
5588 ret
= gimple_try_redirect_by_replacing_jump (e
, dest
);
5593 gsi
= gsi_last_bb (bb
);
5594 stmt
= gsi_end_p (gsi
) ? NULL
: gsi_stmt (gsi
);
5596 switch (stmt
? gimple_code (stmt
) : GIMPLE_ERROR_MARK
)
5599 /* For COND_EXPR, we only need to redirect the edge. */
5603 /* No non-abnormal edges should lead from a non-simple goto, and
5604 simple ones should be represented implicitly. */
5609 gswitch
*switch_stmt
= as_a
<gswitch
*> (stmt
);
5610 tree label
= gimple_block_label (dest
);
5611 tree cases
= get_cases_for_edge (e
, switch_stmt
);
5613 /* If we have a list of cases associated with E, then use it
5614 as it's a lot faster than walking the entire case vector. */
5617 edge e2
= find_edge (e
->src
, dest
);
5624 CASE_LABEL (cases
) = label
;
5625 cases
= CASE_CHAIN (cases
);
5628 /* If there was already an edge in the CFG, then we need
5629 to move all the cases associated with E to E2. */
5632 tree cases2
= get_cases_for_edge (e2
, switch_stmt
);
5634 CASE_CHAIN (last
) = CASE_CHAIN (cases2
);
5635 CASE_CHAIN (cases2
) = first
;
5637 bitmap_set_bit (touched_switch_bbs
, gimple_bb (stmt
)->index
);
5641 size_t i
, n
= gimple_switch_num_labels (switch_stmt
);
5643 for (i
= 0; i
< n
; i
++)
5645 tree elt
= gimple_switch_label (switch_stmt
, i
);
5646 if (label_to_block (CASE_LABEL (elt
)) == e
->dest
)
5647 CASE_LABEL (elt
) = label
;
5655 gasm
*asm_stmt
= as_a
<gasm
*> (stmt
);
5656 int i
, n
= gimple_asm_nlabels (asm_stmt
);
5659 for (i
= 0; i
< n
; ++i
)
5661 tree cons
= gimple_asm_label_op (asm_stmt
, i
);
5662 if (label_to_block (TREE_VALUE (cons
)) == e
->dest
)
5665 label
= gimple_block_label (dest
);
5666 TREE_VALUE (cons
) = label
;
5670 /* If we didn't find any label matching the former edge in the
5671 asm labels, we must be redirecting the fallthrough
5673 gcc_assert (label
|| (e
->flags
& EDGE_FALLTHRU
));
5678 gsi_remove (&gsi
, true);
5679 e
->flags
|= EDGE_FALLTHRU
;
5682 case GIMPLE_OMP_RETURN
:
5683 case GIMPLE_OMP_CONTINUE
:
5684 case GIMPLE_OMP_SECTIONS_SWITCH
:
5685 case GIMPLE_OMP_FOR
:
5686 /* The edges from OMP constructs can be simply redirected. */
5689 case GIMPLE_EH_DISPATCH
:
5690 if (!(e
->flags
& EDGE_FALLTHRU
))
5691 redirect_eh_dispatch_edge (as_a
<geh_dispatch
*> (stmt
), e
, dest
);
5694 case GIMPLE_TRANSACTION
:
5695 /* The ABORT edge has a stored label associated with it, otherwise
5696 the edges are simply redirectable. */
5698 gimple_transaction_set_label (as_a
<gtransaction
*> (stmt
),
5699 gimple_block_label (dest
));
5703 /* Otherwise it must be a fallthru edge, and we don't need to
5704 do anything besides redirecting it. */
5705 gcc_assert (e
->flags
& EDGE_FALLTHRU
);
5709 /* Update/insert PHI nodes as necessary. */
5711 /* Now update the edges in the CFG. */
5712 e
= ssa_redirect_edge (e
, dest
);
5717 /* Returns true if it is possible to remove edge E by redirecting
5718 it to the destination of the other edge from E->src. */
5721 gimple_can_remove_branch_p (const_edge e
)
5723 if (e
->flags
& (EDGE_ABNORMAL
| EDGE_EH
))
5729 /* Simple wrapper, as we can always redirect fallthru edges. */
5732 gimple_redirect_edge_and_branch_force (edge e
, basic_block dest
)
5734 e
= gimple_redirect_edge_and_branch (e
, dest
);
5741 /* Splits basic block BB after statement STMT (but at least after the
5742 labels). If STMT is NULL, BB is split just after the labels. */
5745 gimple_split_block (basic_block bb
, void *stmt
)
5747 gimple_stmt_iterator gsi
;
5748 gimple_stmt_iterator gsi_tgt
;
5754 new_bb
= create_empty_bb (bb
);
5756 /* Redirect the outgoing edges. */
5757 new_bb
->succs
= bb
->succs
;
5759 FOR_EACH_EDGE (e
, ei
, new_bb
->succs
)
5762 /* Get a stmt iterator pointing to the first stmt to move. */
5763 if (!stmt
|| gimple_code ((gimple
) stmt
) == GIMPLE_LABEL
)
5764 gsi
= gsi_after_labels (bb
);
5767 gsi
= gsi_for_stmt ((gimple
) stmt
);
5771 /* Move everything from GSI to the new basic block. */
5772 if (gsi_end_p (gsi
))
5775 /* Split the statement list - avoid re-creating new containers as this
5776 brings ugly quadratic memory consumption in the inliner.
5777 (We are still quadratic since we need to update stmt BB pointers,
5779 gsi_split_seq_before (&gsi
, &list
);
5780 set_bb_seq (new_bb
, list
);
5781 for (gsi_tgt
= gsi_start (list
);
5782 !gsi_end_p (gsi_tgt
); gsi_next (&gsi_tgt
))
5783 gimple_set_bb (gsi_stmt (gsi_tgt
), new_bb
);
5789 /* Moves basic block BB after block AFTER. */
5792 gimple_move_block_after (basic_block bb
, basic_block after
)
5794 if (bb
->prev_bb
== after
)
5798 link_block (bb
, after
);
5804 /* Return TRUE if block BB has no executable statements, otherwise return
5808 gimple_empty_block_p (basic_block bb
)
5810 /* BB must have no executable statements. */
5811 gimple_stmt_iterator gsi
= gsi_after_labels (bb
);
5814 if (gsi_end_p (gsi
))
5816 if (is_gimple_debug (gsi_stmt (gsi
)))
5817 gsi_next_nondebug (&gsi
);
5818 return gsi_end_p (gsi
);
5822 /* Split a basic block if it ends with a conditional branch and if the
5823 other part of the block is not empty. */
5826 gimple_split_block_before_cond_jump (basic_block bb
)
5828 gimple last
, split_point
;
5829 gimple_stmt_iterator gsi
= gsi_last_nondebug_bb (bb
);
5830 if (gsi_end_p (gsi
))
5832 last
= gsi_stmt (gsi
);
5833 if (gimple_code (last
) != GIMPLE_COND
5834 && gimple_code (last
) != GIMPLE_SWITCH
)
5836 gsi_prev_nondebug (&gsi
);
5837 split_point
= gsi_stmt (gsi
);
5838 return split_block (bb
, split_point
)->dest
;
5842 /* Return true if basic_block can be duplicated. */
5845 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED
)
5850 /* Create a duplicate of the basic block BB. NOTE: This does not
5851 preserve SSA form. */
5854 gimple_duplicate_bb (basic_block bb
)
5857 gimple_stmt_iterator gsi_tgt
;
5859 new_bb
= create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun
)->prev_bb
);
5861 /* Copy the PHI nodes. We ignore PHI node arguments here because
5862 the incoming edges have not been setup yet. */
5863 for (gphi_iterator gpi
= gsi_start_phis (bb
);
5869 copy
= create_phi_node (NULL_TREE
, new_bb
);
5870 create_new_def_for (gimple_phi_result (phi
), copy
,
5871 gimple_phi_result_ptr (copy
));
5872 gimple_set_uid (copy
, gimple_uid (phi
));
5875 gsi_tgt
= gsi_start_bb (new_bb
);
5876 for (gimple_stmt_iterator gsi
= gsi_start_bb (bb
);
5880 def_operand_p def_p
;
5881 ssa_op_iter op_iter
;
5885 stmt
= gsi_stmt (gsi
);
5886 if (gimple_code (stmt
) == GIMPLE_LABEL
)
5889 /* Don't duplicate label debug stmts. */
5890 if (gimple_debug_bind_p (stmt
)
5891 && TREE_CODE (gimple_debug_bind_get_var (stmt
))
5895 /* Create a new copy of STMT and duplicate STMT's virtual
5897 copy
= gimple_copy (stmt
);
5898 gsi_insert_after (&gsi_tgt
, copy
, GSI_NEW_STMT
);
5900 maybe_duplicate_eh_stmt (copy
, stmt
);
5901 gimple_duplicate_stmt_histograms (cfun
, copy
, cfun
, stmt
);
5903 /* When copying around a stmt writing into a local non-user
5904 aggregate, make sure it won't share stack slot with other
5906 lhs
= gimple_get_lhs (stmt
);
5907 if (lhs
&& TREE_CODE (lhs
) != SSA_NAME
)
5909 tree base
= get_base_address (lhs
);
5911 && (TREE_CODE (base
) == VAR_DECL
5912 || TREE_CODE (base
) == RESULT_DECL
)
5913 && DECL_IGNORED_P (base
)
5914 && !TREE_STATIC (base
)
5915 && !DECL_EXTERNAL (base
)
5916 && (TREE_CODE (base
) != VAR_DECL
5917 || !DECL_HAS_VALUE_EXPR_P (base
)))
5918 DECL_NONSHAREABLE (base
) = 1;
5921 /* Create new names for all the definitions created by COPY and
5922 add replacement mappings for each new name. */
5923 FOR_EACH_SSA_DEF_OPERAND (def_p
, copy
, op_iter
, SSA_OP_ALL_DEFS
)
5924 create_new_def_for (DEF_FROM_PTR (def_p
), copy
, def_p
);
5930 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
5933 add_phi_args_after_copy_edge (edge e_copy
)
5935 basic_block bb
, bb_copy
= e_copy
->src
, dest
;
5938 gphi
*phi
, *phi_copy
;
5940 gphi_iterator psi
, psi_copy
;
5942 if (gimple_seq_empty_p (phi_nodes (e_copy
->dest
)))
5945 bb
= bb_copy
->flags
& BB_DUPLICATED
? get_bb_original (bb_copy
) : bb_copy
;
5947 if (e_copy
->dest
->flags
& BB_DUPLICATED
)
5948 dest
= get_bb_original (e_copy
->dest
);
5950 dest
= e_copy
->dest
;
5952 e
= find_edge (bb
, dest
);
5955 /* During loop unrolling the target of the latch edge is copied.
5956 In this case we are not looking for edge to dest, but to
5957 duplicated block whose original was dest. */
5958 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5960 if ((e
->dest
->flags
& BB_DUPLICATED
)
5961 && get_bb_original (e
->dest
) == dest
)
5965 gcc_assert (e
!= NULL
);
5968 for (psi
= gsi_start_phis (e
->dest
),
5969 psi_copy
= gsi_start_phis (e_copy
->dest
);
5971 gsi_next (&psi
), gsi_next (&psi_copy
))
5974 phi_copy
= psi_copy
.phi ();
5975 def
= PHI_ARG_DEF_FROM_EDGE (phi
, e
);
5976 add_phi_arg (phi_copy
, def
, e_copy
,
5977 gimple_phi_arg_location_from_edge (phi
, e
));
5982 /* Basic block BB_COPY was created by code duplication. Add phi node
5983 arguments for edges going out of BB_COPY. The blocks that were
5984 duplicated have BB_DUPLICATED set. */
5987 add_phi_args_after_copy_bb (basic_block bb_copy
)
5992 FOR_EACH_EDGE (e_copy
, ei
, bb_copy
->succs
)
5994 add_phi_args_after_copy_edge (e_copy
);
5998 /* Blocks in REGION_COPY array of length N_REGION were created by
5999 duplication of basic blocks. Add phi node arguments for edges
6000 going from these blocks. If E_COPY is not NULL, also add
6001 phi node arguments for its destination.*/
6004 add_phi_args_after_copy (basic_block
*region_copy
, unsigned n_region
,
6009 for (i
= 0; i
< n_region
; i
++)
6010 region_copy
[i
]->flags
|= BB_DUPLICATED
;
6012 for (i
= 0; i
< n_region
; i
++)
6013 add_phi_args_after_copy_bb (region_copy
[i
]);
6015 add_phi_args_after_copy_edge (e_copy
);
6017 for (i
= 0; i
< n_region
; i
++)
6018 region_copy
[i
]->flags
&= ~BB_DUPLICATED
;
6021 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
6022 important exit edge EXIT. By important we mean that no SSA name defined
6023 inside region is live over the other exit edges of the region. All entry
6024 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
6025 to the duplicate of the region. Dominance and loop information is
6026 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
6027 UPDATE_DOMINANCE is false then we assume that the caller will update the
6028 dominance information after calling this function. The new basic
6029 blocks are stored to REGION_COPY in the same order as they had in REGION,
6030 provided that REGION_COPY is not NULL.
6031 The function returns false if it is unable to copy the region,
6035 gimple_duplicate_sese_region (edge entry
, edge exit
,
6036 basic_block
*region
, unsigned n_region
,
6037 basic_block
*region_copy
,
6038 bool update_dominance
)
6041 bool free_region_copy
= false, copying_header
= false;
6042 struct loop
*loop
= entry
->dest
->loop_father
;
6044 vec
<basic_block
> doms
;
6046 int total_freq
= 0, entry_freq
= 0;
6047 gcov_type total_count
= 0, entry_count
= 0;
6049 if (!can_copy_bbs_p (region
, n_region
))
6052 /* Some sanity checking. Note that we do not check for all possible
6053 missuses of the functions. I.e. if you ask to copy something weird,
6054 it will work, but the state of structures probably will not be
6056 for (i
= 0; i
< n_region
; i
++)
6058 /* We do not handle subloops, i.e. all the blocks must belong to the
6060 if (region
[i
]->loop_father
!= loop
)
6063 if (region
[i
] != entry
->dest
6064 && region
[i
] == loop
->header
)
6068 /* In case the function is used for loop header copying (which is the primary
6069 use), ensure that EXIT and its copy will be new latch and entry edges. */
6070 if (loop
->header
== entry
->dest
)
6072 copying_header
= true;
6074 if (!dominated_by_p (CDI_DOMINATORS
, loop
->latch
, exit
->src
))
6077 for (i
= 0; i
< n_region
; i
++)
6078 if (region
[i
] != exit
->src
6079 && dominated_by_p (CDI_DOMINATORS
, region
[i
], exit
->src
))
6083 initialize_original_copy_tables ();
6086 set_loop_copy (loop
, loop_outer (loop
));
6088 set_loop_copy (loop
, loop
);
6092 region_copy
= XNEWVEC (basic_block
, n_region
);
6093 free_region_copy
= true;
6096 /* Record blocks outside the region that are dominated by something
6098 if (update_dominance
)
6101 doms
= get_dominated_by_region (CDI_DOMINATORS
, region
, n_region
);
6104 if (entry
->dest
->count
)
6106 total_count
= entry
->dest
->count
;
6107 entry_count
= entry
->count
;
6108 /* Fix up corner cases, to avoid division by zero or creation of negative
6110 if (entry_count
> total_count
)
6111 entry_count
= total_count
;
6115 total_freq
= entry
->dest
->frequency
;
6116 entry_freq
= EDGE_FREQUENCY (entry
);
6117 /* Fix up corner cases, to avoid division by zero or creation of negative
6119 if (total_freq
== 0)
6121 else if (entry_freq
> total_freq
)
6122 entry_freq
= total_freq
;
6125 copy_bbs (region
, n_region
, region_copy
, &exit
, 1, &exit_copy
, loop
,
6126 split_edge_bb_loc (entry
), update_dominance
);
6129 scale_bbs_frequencies_gcov_type (region
, n_region
,
6130 total_count
- entry_count
,
6132 scale_bbs_frequencies_gcov_type (region_copy
, n_region
, entry_count
,
6137 scale_bbs_frequencies_int (region
, n_region
, total_freq
- entry_freq
,
6139 scale_bbs_frequencies_int (region_copy
, n_region
, entry_freq
, total_freq
);
6144 loop
->header
= exit
->dest
;
6145 loop
->latch
= exit
->src
;
6148 /* Redirect the entry and add the phi node arguments. */
6149 redirected
= redirect_edge_and_branch (entry
, get_bb_copy (entry
->dest
));
6150 gcc_assert (redirected
!= NULL
);
6151 flush_pending_stmts (entry
);
6153 /* Concerning updating of dominators: We must recount dominators
6154 for entry block and its copy. Anything that is outside of the
6155 region, but was dominated by something inside needs recounting as
6157 if (update_dominance
)
6159 set_immediate_dominator (CDI_DOMINATORS
, entry
->dest
, entry
->src
);
6160 doms
.safe_push (get_bb_original (entry
->dest
));
6161 iterate_fix_dominators (CDI_DOMINATORS
, doms
, false);
6165 /* Add the other PHI node arguments. */
6166 add_phi_args_after_copy (region_copy
, n_region
, NULL
);
6168 if (free_region_copy
)
6171 free_original_copy_tables ();
6175 /* Checks if BB is part of the region defined by N_REGION BBS. */
6177 bb_part_of_region_p (basic_block bb
, basic_block
* bbs
, unsigned n_region
)
6181 for (n
= 0; n
< n_region
; n
++)
6189 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
6190 are stored to REGION_COPY in the same order in that they appear
6191 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
6192 the region, EXIT an exit from it. The condition guarding EXIT
6193 is moved to ENTRY. Returns true if duplication succeeds, false
6219 gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED
, edge exit ATTRIBUTE_UNUSED
,
6220 basic_block
*region ATTRIBUTE_UNUSED
, unsigned n_region ATTRIBUTE_UNUSED
,
6221 basic_block
*region_copy ATTRIBUTE_UNUSED
)
6224 bool free_region_copy
= false;
6225 struct loop
*loop
= exit
->dest
->loop_father
;
6226 struct loop
*orig_loop
= entry
->dest
->loop_father
;
6227 basic_block switch_bb
, entry_bb
, nentry_bb
;
6228 vec
<basic_block
> doms
;
6229 int total_freq
= 0, exit_freq
= 0;
6230 gcov_type total_count
= 0, exit_count
= 0;
6231 edge exits
[2], nexits
[2], e
;
6232 gimple_stmt_iterator gsi
;
6235 basic_block exit_bb
;
6239 struct loop
*target
, *aloop
, *cloop
;
6241 gcc_assert (EDGE_COUNT (exit
->src
->succs
) == 2);
6243 exits
[1] = EDGE_SUCC (exit
->src
, EDGE_SUCC (exit
->src
, 0) == exit
);
6245 if (!can_copy_bbs_p (region
, n_region
))
6248 initialize_original_copy_tables ();
6249 set_loop_copy (orig_loop
, loop
);
6252 for (aloop
= orig_loop
->inner
; aloop
; aloop
= aloop
->next
)
6254 if (bb_part_of_region_p (aloop
->header
, region
, n_region
))
6256 cloop
= duplicate_loop (aloop
, target
);
6257 duplicate_subloops (aloop
, cloop
);
6263 region_copy
= XNEWVEC (basic_block
, n_region
);
6264 free_region_copy
= true;
6267 gcc_assert (!need_ssa_update_p (cfun
));
6269 /* Record blocks outside the region that are dominated by something
6271 doms
= get_dominated_by_region (CDI_DOMINATORS
, region
, n_region
);
6273 if (exit
->src
->count
)
6275 total_count
= exit
->src
->count
;
6276 exit_count
= exit
->count
;
6277 /* Fix up corner cases, to avoid division by zero or creation of negative
6279 if (exit_count
> total_count
)
6280 exit_count
= total_count
;
6284 total_freq
= exit
->src
->frequency
;
6285 exit_freq
= EDGE_FREQUENCY (exit
);
6286 /* Fix up corner cases, to avoid division by zero or creation of negative
6288 if (total_freq
== 0)
6290 if (exit_freq
> total_freq
)
6291 exit_freq
= total_freq
;
6294 copy_bbs (region
, n_region
, region_copy
, exits
, 2, nexits
, orig_loop
,
6295 split_edge_bb_loc (exit
), true);
6298 scale_bbs_frequencies_gcov_type (region
, n_region
,
6299 total_count
- exit_count
,
6301 scale_bbs_frequencies_gcov_type (region_copy
, n_region
, exit_count
,
6306 scale_bbs_frequencies_int (region
, n_region
, total_freq
- exit_freq
,
6308 scale_bbs_frequencies_int (region_copy
, n_region
, exit_freq
, total_freq
);
6311 /* Create the switch block, and put the exit condition to it. */
6312 entry_bb
= entry
->dest
;
6313 nentry_bb
= get_bb_copy (entry_bb
);
6314 if (!last_stmt (entry
->src
)
6315 || !stmt_ends_bb_p (last_stmt (entry
->src
)))
6316 switch_bb
= entry
->src
;
6318 switch_bb
= split_edge (entry
);
6319 set_immediate_dominator (CDI_DOMINATORS
, nentry_bb
, switch_bb
);
6321 gsi
= gsi_last_bb (switch_bb
);
6322 cond_stmt
= last_stmt (exit
->src
);
6323 gcc_assert (gimple_code (cond_stmt
) == GIMPLE_COND
);
6324 cond_stmt
= gimple_copy (cond_stmt
);
6326 gsi_insert_after (&gsi
, cond_stmt
, GSI_NEW_STMT
);
6328 sorig
= single_succ_edge (switch_bb
);
6329 sorig
->flags
= exits
[1]->flags
;
6330 snew
= make_edge (switch_bb
, nentry_bb
, exits
[0]->flags
);
6332 /* Register the new edge from SWITCH_BB in loop exit lists. */
6333 rescan_loop_exit (snew
, true, false);
6335 /* Add the PHI node arguments. */
6336 add_phi_args_after_copy (region_copy
, n_region
, snew
);
6338 /* Get rid of now superfluous conditions and associated edges (and phi node
6340 exit_bb
= exit
->dest
;
6342 e
= redirect_edge_and_branch (exits
[0], exits
[1]->dest
);
6343 PENDING_STMT (e
) = NULL
;
6345 /* The latch of ORIG_LOOP was copied, and so was the backedge
6346 to the original header. We redirect this backedge to EXIT_BB. */
6347 for (i
= 0; i
< n_region
; i
++)
6348 if (get_bb_original (region_copy
[i
]) == orig_loop
->latch
)
6350 gcc_assert (single_succ_edge (region_copy
[i
]));
6351 e
= redirect_edge_and_branch (single_succ_edge (region_copy
[i
]), exit_bb
);
6352 PENDING_STMT (e
) = NULL
;
6353 for (psi
= gsi_start_phis (exit_bb
);
6358 def
= PHI_ARG_DEF (phi
, nexits
[0]->dest_idx
);
6359 add_phi_arg (phi
, def
, e
, gimple_phi_arg_location_from_edge (phi
, e
));
6362 e
= redirect_edge_and_branch (nexits
[1], nexits
[0]->dest
);
6363 PENDING_STMT (e
) = NULL
;
6365 /* Anything that is outside of the region, but was dominated by something
6366 inside needs to update dominance info. */
6367 iterate_fix_dominators (CDI_DOMINATORS
, doms
, false);
6369 /* Update the SSA web. */
6370 update_ssa (TODO_update_ssa
);
6372 if (free_region_copy
)
6375 free_original_copy_tables ();
6379 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
6380 adding blocks when the dominator traversal reaches EXIT. This
6381 function silently assumes that ENTRY strictly dominates EXIT. */
6384 gather_blocks_in_sese_region (basic_block entry
, basic_block exit
,
6385 vec
<basic_block
> *bbs_p
)
6389 for (son
= first_dom_son (CDI_DOMINATORS
, entry
);
6391 son
= next_dom_son (CDI_DOMINATORS
, son
))
6393 bbs_p
->safe_push (son
);
6395 gather_blocks_in_sese_region (son
, exit
, bbs_p
);
6399 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
6400 The duplicates are recorded in VARS_MAP. */
6403 replace_by_duplicate_decl (tree
*tp
, hash_map
<tree
, tree
> *vars_map
,
6406 tree t
= *tp
, new_t
;
6407 struct function
*f
= DECL_STRUCT_FUNCTION (to_context
);
6409 if (DECL_CONTEXT (t
) == to_context
)
6413 tree
&loc
= vars_map
->get_or_insert (t
, &existed
);
6419 new_t
= copy_var_decl (t
, DECL_NAME (t
), TREE_TYPE (t
));
6420 add_local_decl (f
, new_t
);
6424 gcc_assert (TREE_CODE (t
) == CONST_DECL
);
6425 new_t
= copy_node (t
);
6427 DECL_CONTEXT (new_t
) = to_context
;
6438 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6439 VARS_MAP maps old ssa names and var_decls to the new ones. */
6442 replace_ssa_name (tree name
, hash_map
<tree
, tree
> *vars_map
,
6447 gcc_assert (!virtual_operand_p (name
));
6449 tree
*loc
= vars_map
->get (name
);
6453 tree decl
= SSA_NAME_VAR (name
);
6456 replace_by_duplicate_decl (&decl
, vars_map
, to_context
);
6457 new_name
= make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context
),
6458 decl
, SSA_NAME_DEF_STMT (name
));
6459 if (SSA_NAME_IS_DEFAULT_DEF (name
))
6460 set_ssa_default_def (DECL_STRUCT_FUNCTION (to_context
),
6464 new_name
= copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context
),
6465 name
, SSA_NAME_DEF_STMT (name
));
6467 vars_map
->put (name
, new_name
);
6481 hash_map
<tree
, tree
> *vars_map
;
6482 htab_t new_label_map
;
6483 hash_map
<void *, void *> *eh_map
;
6487 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
6488 contained in *TP if it has been ORIG_BLOCK previously and change the
6489 DECL_CONTEXT of every local variable referenced in *TP. */
6492 move_stmt_op (tree
*tp
, int *walk_subtrees
, void *data
)
6494 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
6495 struct move_stmt_d
*p
= (struct move_stmt_d
*) wi
->info
;
6500 tree block
= TREE_BLOCK (t
);
6501 if (block
== p
->orig_block
6502 || (p
->orig_block
== NULL_TREE
6503 && block
!= NULL_TREE
))
6504 TREE_SET_BLOCK (t
, p
->new_block
);
6505 #ifdef ENABLE_CHECKING
6506 else if (block
!= NULL_TREE
)
6508 while (block
&& TREE_CODE (block
) == BLOCK
&& block
!= p
->orig_block
)
6509 block
= BLOCK_SUPERCONTEXT (block
);
6510 gcc_assert (block
== p
->orig_block
);
6514 else if (DECL_P (t
) || TREE_CODE (t
) == SSA_NAME
)
6516 if (TREE_CODE (t
) == SSA_NAME
)
6517 *tp
= replace_ssa_name (t
, p
->vars_map
, p
->to_context
);
6518 else if (TREE_CODE (t
) == LABEL_DECL
)
6520 if (p
->new_label_map
)
6522 struct tree_map in
, *out
;
6524 out
= (struct tree_map
*)
6525 htab_find_with_hash (p
->new_label_map
, &in
, DECL_UID (t
));
6530 DECL_CONTEXT (t
) = p
->to_context
;
6532 else if (p
->remap_decls_p
)
6534 /* Replace T with its duplicate. T should no longer appear in the
6535 parent function, so this looks wasteful; however, it may appear
6536 in referenced_vars, and more importantly, as virtual operands of
6537 statements, and in alias lists of other variables. It would be
6538 quite difficult to expunge it from all those places. ??? It might
6539 suffice to do this for addressable variables. */
6540 if ((TREE_CODE (t
) == VAR_DECL
6541 && !is_global_var (t
))
6542 || TREE_CODE (t
) == CONST_DECL
)
6543 replace_by_duplicate_decl (tp
, p
->vars_map
, p
->to_context
);
6547 else if (TYPE_P (t
))
6553 /* Helper for move_stmt_r. Given an EH region number for the source
6554 function, map that to the duplicate EH regio number in the dest. */
6557 move_stmt_eh_region_nr (int old_nr
, struct move_stmt_d
*p
)
6559 eh_region old_r
, new_r
;
6561 old_r
= get_eh_region_from_number (old_nr
);
6562 new_r
= static_cast<eh_region
> (*p
->eh_map
->get (old_r
));
6564 return new_r
->index
;
6567 /* Similar, but operate on INTEGER_CSTs. */
6570 move_stmt_eh_region_tree_nr (tree old_t_nr
, struct move_stmt_d
*p
)
6574 old_nr
= tree_to_shwi (old_t_nr
);
6575 new_nr
= move_stmt_eh_region_nr (old_nr
, p
);
6577 return build_int_cst (integer_type_node
, new_nr
);
6580 /* Like move_stmt_op, but for gimple statements.
6582 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
6583 contained in the current statement in *GSI_P and change the
6584 DECL_CONTEXT of every local variable referenced in the current
6588 move_stmt_r (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
6589 struct walk_stmt_info
*wi
)
6591 struct move_stmt_d
*p
= (struct move_stmt_d
*) wi
->info
;
6592 gimple stmt
= gsi_stmt (*gsi_p
);
6593 tree block
= gimple_block (stmt
);
6595 if (block
== p
->orig_block
6596 || (p
->orig_block
== NULL_TREE
6597 && block
!= NULL_TREE
))
6598 gimple_set_block (stmt
, p
->new_block
);
6600 switch (gimple_code (stmt
))
6603 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
6605 tree r
, fndecl
= gimple_call_fndecl (stmt
);
6606 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
6607 switch (DECL_FUNCTION_CODE (fndecl
))
6609 case BUILT_IN_EH_COPY_VALUES
:
6610 r
= gimple_call_arg (stmt
, 1);
6611 r
= move_stmt_eh_region_tree_nr (r
, p
);
6612 gimple_call_set_arg (stmt
, 1, r
);
6615 case BUILT_IN_EH_POINTER
:
6616 case BUILT_IN_EH_FILTER
:
6617 r
= gimple_call_arg (stmt
, 0);
6618 r
= move_stmt_eh_region_tree_nr (r
, p
);
6619 gimple_call_set_arg (stmt
, 0, r
);
6630 gresx
*resx_stmt
= as_a
<gresx
*> (stmt
);
6631 int r
= gimple_resx_region (resx_stmt
);
6632 r
= move_stmt_eh_region_nr (r
, p
);
6633 gimple_resx_set_region (resx_stmt
, r
);
6637 case GIMPLE_EH_DISPATCH
:
6639 geh_dispatch
*eh_dispatch_stmt
= as_a
<geh_dispatch
*> (stmt
);
6640 int r
= gimple_eh_dispatch_region (eh_dispatch_stmt
);
6641 r
= move_stmt_eh_region_nr (r
, p
);
6642 gimple_eh_dispatch_set_region (eh_dispatch_stmt
, r
);
6646 case GIMPLE_OMP_RETURN
:
6647 case GIMPLE_OMP_CONTINUE
:
6650 if (is_gimple_omp (stmt
))
6652 /* Do not remap variables inside OMP directives. Variables
6653 referenced in clauses and directive header belong to the
6654 parent function and should not be moved into the child
6656 bool save_remap_decls_p
= p
->remap_decls_p
;
6657 p
->remap_decls_p
= false;
6658 *handled_ops_p
= true;
6660 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt
), move_stmt_r
,
6663 p
->remap_decls_p
= save_remap_decls_p
;
6671 /* Move basic block BB from function CFUN to function DEST_FN. The
6672 block is moved out of the original linked list and placed after
6673 block AFTER in the new list. Also, the block is removed from the
6674 original array of blocks and placed in DEST_FN's array of blocks.
6675 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
6676 updated to reflect the moved edges.
6678 The local variables are remapped to new instances, VARS_MAP is used
6679 to record the mapping. */
6682 move_block_to_fn (struct function
*dest_cfun
, basic_block bb
,
6683 basic_block after
, bool update_edge_count_p
,
6684 struct move_stmt_d
*d
)
6686 struct control_flow_graph
*cfg
;
6689 gimple_stmt_iterator si
;
6690 unsigned old_len
, new_len
;
6692 /* Remove BB from dominance structures. */
6693 delete_from_dominance_info (CDI_DOMINATORS
, bb
);
6695 /* Move BB from its current loop to the copy in the new function. */
6698 struct loop
*new_loop
= (struct loop
*)bb
->loop_father
->aux
;
6700 bb
->loop_father
= new_loop
;
6703 /* Link BB to the new linked list. */
6704 move_block_after (bb
, after
);
6706 /* Update the edge count in the corresponding flowgraphs. */
6707 if (update_edge_count_p
)
6708 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
6710 cfun
->cfg
->x_n_edges
--;
6711 dest_cfun
->cfg
->x_n_edges
++;
6714 /* Remove BB from the original basic block array. */
6715 (*cfun
->cfg
->x_basic_block_info
)[bb
->index
] = NULL
;
6716 cfun
->cfg
->x_n_basic_blocks
--;
6718 /* Grow DEST_CFUN's basic block array if needed. */
6719 cfg
= dest_cfun
->cfg
;
6720 cfg
->x_n_basic_blocks
++;
6721 if (bb
->index
>= cfg
->x_last_basic_block
)
6722 cfg
->x_last_basic_block
= bb
->index
+ 1;
6724 old_len
= vec_safe_length (cfg
->x_basic_block_info
);
6725 if ((unsigned) cfg
->x_last_basic_block
>= old_len
)
6727 new_len
= cfg
->x_last_basic_block
+ (cfg
->x_last_basic_block
+ 3) / 4;
6728 vec_safe_grow_cleared (cfg
->x_basic_block_info
, new_len
);
6731 (*cfg
->x_basic_block_info
)[bb
->index
] = bb
;
6733 /* Remap the variables in phi nodes. */
6734 for (gphi_iterator psi
= gsi_start_phis (bb
);
6737 gphi
*phi
= psi
.phi ();
6739 tree op
= PHI_RESULT (phi
);
6743 if (virtual_operand_p (op
))
6745 /* Remove the phi nodes for virtual operands (alias analysis will be
6746 run for the new function, anyway). */
6747 remove_phi_node (&psi
, true);
6751 SET_PHI_RESULT (phi
,
6752 replace_ssa_name (op
, d
->vars_map
, dest_cfun
->decl
));
6753 FOR_EACH_PHI_ARG (use
, phi
, oi
, SSA_OP_USE
)
6755 op
= USE_FROM_PTR (use
);
6756 if (TREE_CODE (op
) == SSA_NAME
)
6757 SET_USE (use
, replace_ssa_name (op
, d
->vars_map
, dest_cfun
->decl
));
6760 for (i
= 0; i
< EDGE_COUNT (bb
->preds
); i
++)
6762 location_t locus
= gimple_phi_arg_location (phi
, i
);
6763 tree block
= LOCATION_BLOCK (locus
);
6765 if (locus
== UNKNOWN_LOCATION
)
6767 if (d
->orig_block
== NULL_TREE
|| block
== d
->orig_block
)
6769 if (d
->new_block
== NULL_TREE
)
6770 locus
= LOCATION_LOCUS (locus
);
6772 locus
= COMBINE_LOCATION_DATA (line_table
, locus
, d
->new_block
);
6773 gimple_phi_arg_set_location (phi
, i
, locus
);
6780 for (si
= gsi_start_bb (bb
); !gsi_end_p (si
); gsi_next (&si
))
6782 gimple stmt
= gsi_stmt (si
);
6783 struct walk_stmt_info wi
;
6785 memset (&wi
, 0, sizeof (wi
));
6787 walk_gimple_stmt (&si
, move_stmt_r
, move_stmt_op
, &wi
);
6789 if (glabel
*label_stmt
= dyn_cast
<glabel
*> (stmt
))
6791 tree label
= gimple_label_label (label_stmt
);
6792 int uid
= LABEL_DECL_UID (label
);
6794 gcc_assert (uid
> -1);
6796 old_len
= vec_safe_length (cfg
->x_label_to_block_map
);
6797 if (old_len
<= (unsigned) uid
)
6799 new_len
= 3 * uid
/ 2 + 1;
6800 vec_safe_grow_cleared (cfg
->x_label_to_block_map
, new_len
);
6803 (*cfg
->x_label_to_block_map
)[uid
] = bb
;
6804 (*cfun
->cfg
->x_label_to_block_map
)[uid
] = NULL
;
6806 gcc_assert (DECL_CONTEXT (label
) == dest_cfun
->decl
);
6808 if (uid
>= dest_cfun
->cfg
->last_label_uid
)
6809 dest_cfun
->cfg
->last_label_uid
= uid
+ 1;
6812 maybe_duplicate_eh_stmt_fn (dest_cfun
, stmt
, cfun
, stmt
, d
->eh_map
, 0);
6813 remove_stmt_from_eh_lp_fn (cfun
, stmt
);
6815 gimple_duplicate_stmt_histograms (dest_cfun
, stmt
, cfun
, stmt
);
6816 gimple_remove_stmt_histograms (cfun
, stmt
);
6818 /* We cannot leave any operands allocated from the operand caches of
6819 the current function. */
6820 free_stmt_operands (cfun
, stmt
);
6821 push_cfun (dest_cfun
);
6826 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
6827 if (e
->goto_locus
!= UNKNOWN_LOCATION
)
6829 tree block
= LOCATION_BLOCK (e
->goto_locus
);
6830 if (d
->orig_block
== NULL_TREE
6831 || block
== d
->orig_block
)
6832 e
->goto_locus
= d
->new_block
?
6833 COMBINE_LOCATION_DATA (line_table
, e
->goto_locus
, d
->new_block
) :
6834 LOCATION_LOCUS (e
->goto_locus
);
6838 /* Examine the statements in BB (which is in SRC_CFUN); find and return
6839 the outermost EH region. Use REGION as the incoming base EH region. */
6842 find_outermost_region_in_block (struct function
*src_cfun
,
6843 basic_block bb
, eh_region region
)
6845 gimple_stmt_iterator si
;
6847 for (si
= gsi_start_bb (bb
); !gsi_end_p (si
); gsi_next (&si
))
6849 gimple stmt
= gsi_stmt (si
);
6850 eh_region stmt_region
;
6853 lp_nr
= lookup_stmt_eh_lp_fn (src_cfun
, stmt
);
6854 stmt_region
= get_eh_region_from_lp_number_fn (src_cfun
, lp_nr
);
6858 region
= stmt_region
;
6859 else if (stmt_region
!= region
)
6861 region
= eh_region_outermost (src_cfun
, stmt_region
, region
);
6862 gcc_assert (region
!= NULL
);
6871 new_label_mapper (tree decl
, void *data
)
6873 htab_t hash
= (htab_t
) data
;
6877 gcc_assert (TREE_CODE (decl
) == LABEL_DECL
);
6879 m
= XNEW (struct tree_map
);
6880 m
->hash
= DECL_UID (decl
);
6881 m
->base
.from
= decl
;
6882 m
->to
= create_artificial_label (UNKNOWN_LOCATION
);
6883 LABEL_DECL_UID (m
->to
) = LABEL_DECL_UID (decl
);
6884 if (LABEL_DECL_UID (m
->to
) >= cfun
->cfg
->last_label_uid
)
6885 cfun
->cfg
->last_label_uid
= LABEL_DECL_UID (m
->to
) + 1;
6887 slot
= htab_find_slot_with_hash (hash
, m
, m
->hash
, INSERT
);
6888 gcc_assert (*slot
== NULL
);
6895 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
6899 replace_block_vars_by_duplicates (tree block
, hash_map
<tree
, tree
> *vars_map
,
6904 for (tp
= &BLOCK_VARS (block
); *tp
; tp
= &DECL_CHAIN (*tp
))
6907 if (TREE_CODE (t
) != VAR_DECL
&& TREE_CODE (t
) != CONST_DECL
)
6909 replace_by_duplicate_decl (&t
, vars_map
, to_context
);
6912 if (TREE_CODE (*tp
) == VAR_DECL
&& DECL_HAS_VALUE_EXPR_P (*tp
))
6914 SET_DECL_VALUE_EXPR (t
, DECL_VALUE_EXPR (*tp
));
6915 DECL_HAS_VALUE_EXPR_P (t
) = 1;
6917 DECL_CHAIN (t
) = DECL_CHAIN (*tp
);
6922 for (block
= BLOCK_SUBBLOCKS (block
); block
; block
= BLOCK_CHAIN (block
))
6923 replace_block_vars_by_duplicates (block
, vars_map
, to_context
);
6926 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
6930 fixup_loop_arrays_after_move (struct function
*fn1
, struct function
*fn2
,
6933 /* Discard it from the old loop array. */
6934 (*get_loops (fn1
))[loop
->num
] = NULL
;
6936 /* Place it in the new loop array, assigning it a new number. */
6937 loop
->num
= number_of_loops (fn2
);
6938 vec_safe_push (loops_for_fn (fn2
)->larray
, loop
);
6940 /* Recurse to children. */
6941 for (loop
= loop
->inner
; loop
; loop
= loop
->next
)
6942 fixup_loop_arrays_after_move (fn1
, fn2
, loop
);
6945 /* Verify that the blocks in BBS_P are a single-entry, single-exit region
6946 delimited by ENTRY_BB and EXIT_BB, possibly containing noreturn blocks. */
6949 verify_sese (basic_block entry
, basic_block exit
, vec
<basic_block
> *bbs_p
)
6954 bitmap bbs
= BITMAP_ALLOC (NULL
);
6957 gcc_assert (entry
!= NULL
);
6958 gcc_assert (entry
!= exit
);
6959 gcc_assert (bbs_p
!= NULL
);
6961 gcc_assert (bbs_p
->length () > 0);
6963 FOR_EACH_VEC_ELT (*bbs_p
, i
, bb
)
6964 bitmap_set_bit (bbs
, bb
->index
);
6966 gcc_assert (bitmap_bit_p (bbs
, entry
->index
));
6967 gcc_assert (exit
== NULL
|| bitmap_bit_p (bbs
, exit
->index
));
6969 FOR_EACH_VEC_ELT (*bbs_p
, i
, bb
)
6973 gcc_assert (single_pred_p (entry
));
6974 gcc_assert (!bitmap_bit_p (bbs
, single_pred (entry
)->index
));
6977 for (ei
= ei_start (bb
->preds
); !ei_end_p (ei
); ei_next (&ei
))
6980 gcc_assert (bitmap_bit_p (bbs
, e
->src
->index
));
6985 gcc_assert (single_succ_p (exit
));
6986 gcc_assert (!bitmap_bit_p (bbs
, single_succ (exit
)->index
));
6989 for (ei
= ei_start (bb
->succs
); !ei_end_p (ei
); ei_next (&ei
))
6992 gcc_assert (bitmap_bit_p (bbs
, e
->dest
->index
));
7000 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
7001 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
7002 single basic block in the original CFG and the new basic block is
7003 returned. DEST_CFUN must not have a CFG yet.
7005 Note that the region need not be a pure SESE region. Blocks inside
7006 the region may contain calls to abort/exit. The only restriction
7007 is that ENTRY_BB should be the only entry point and it must
7010 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
7011 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
7012 to the new function.
7014 All local variables referenced in the region are assumed to be in
7015 the corresponding BLOCK_VARS and unexpanded variable lists
7016 associated with DEST_CFUN. */
7019 move_sese_region_to_fn (struct function
*dest_cfun
, basic_block entry_bb
,
7020 basic_block exit_bb
, tree orig_block
)
7022 vec
<basic_block
> bbs
, dom_bbs
;
7023 basic_block dom_entry
= get_immediate_dominator (CDI_DOMINATORS
, entry_bb
);
7024 basic_block after
, bb
, *entry_pred
, *exit_succ
, abb
;
7025 struct function
*saved_cfun
= cfun
;
7026 int *entry_flag
, *exit_flag
;
7027 unsigned *entry_prob
, *exit_prob
;
7028 unsigned i
, num_entry_edges
, num_exit_edges
, num_nodes
;
7031 htab_t new_label_map
;
7032 hash_map
<void *, void *> *eh_map
;
7033 struct loop
*loop
= entry_bb
->loop_father
;
7034 struct loop
*loop0
= get_loop (saved_cfun
, 0);
7035 struct move_stmt_d d
;
7037 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
7039 gcc_assert (entry_bb
!= exit_bb
7041 || dominated_by_p (CDI_DOMINATORS
, exit_bb
, entry_bb
)));
7043 /* Collect all the blocks in the region. Manually add ENTRY_BB
7044 because it won't be added by dfs_enumerate_from. */
7046 bbs
.safe_push (entry_bb
);
7047 gather_blocks_in_sese_region (entry_bb
, exit_bb
, &bbs
);
7048 #ifdef ENABLE_CHECKING
7049 verify_sese (entry_bb
, exit_bb
, &bbs
);
7052 /* The blocks that used to be dominated by something in BBS will now be
7053 dominated by the new block. */
7054 dom_bbs
= get_dominated_by_region (CDI_DOMINATORS
,
7058 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
7059 the predecessor edges to ENTRY_BB and the successor edges to
7060 EXIT_BB so that we can re-attach them to the new basic block that
7061 will replace the region. */
7062 num_entry_edges
= EDGE_COUNT (entry_bb
->preds
);
7063 entry_pred
= XNEWVEC (basic_block
, num_entry_edges
);
7064 entry_flag
= XNEWVEC (int, num_entry_edges
);
7065 entry_prob
= XNEWVEC (unsigned, num_entry_edges
);
7067 for (ei
= ei_start (entry_bb
->preds
); (e
= ei_safe_edge (ei
)) != NULL
;)
7069 entry_prob
[i
] = e
->probability
;
7070 entry_flag
[i
] = e
->flags
;
7071 entry_pred
[i
++] = e
->src
;
7077 num_exit_edges
= EDGE_COUNT (exit_bb
->succs
);
7078 exit_succ
= XNEWVEC (basic_block
, num_exit_edges
);
7079 exit_flag
= XNEWVEC (int, num_exit_edges
);
7080 exit_prob
= XNEWVEC (unsigned, num_exit_edges
);
7082 for (ei
= ei_start (exit_bb
->succs
); (e
= ei_safe_edge (ei
)) != NULL
;)
7084 exit_prob
[i
] = e
->probability
;
7085 exit_flag
[i
] = e
->flags
;
7086 exit_succ
[i
++] = e
->dest
;
7098 /* Switch context to the child function to initialize DEST_FN's CFG. */
7099 gcc_assert (dest_cfun
->cfg
== NULL
);
7100 push_cfun (dest_cfun
);
7102 init_empty_tree_cfg ();
7104 /* Initialize EH information for the new function. */
7106 new_label_map
= NULL
;
7109 eh_region region
= NULL
;
7111 FOR_EACH_VEC_ELT (bbs
, i
, bb
)
7112 region
= find_outermost_region_in_block (saved_cfun
, bb
, region
);
7114 init_eh_for_function ();
7117 new_label_map
= htab_create (17, tree_map_hash
, tree_map_eq
, free
);
7118 eh_map
= duplicate_eh_regions (saved_cfun
, region
, 0,
7119 new_label_mapper
, new_label_map
);
7123 /* Initialize an empty loop tree. */
7124 struct loops
*loops
= ggc_cleared_alloc
<struct loops
> ();
7125 init_loops_structure (dest_cfun
, loops
, 1);
7126 loops
->state
= LOOPS_MAY_HAVE_MULTIPLE_LATCHES
;
7127 set_loops_for_fn (dest_cfun
, loops
);
7129 /* Move the outlined loop tree part. */
7130 num_nodes
= bbs
.length ();
7131 FOR_EACH_VEC_ELT (bbs
, i
, bb
)
7133 if (bb
->loop_father
->header
== bb
)
7135 struct loop
*this_loop
= bb
->loop_father
;
7136 struct loop
*outer
= loop_outer (this_loop
);
7138 /* If the SESE region contains some bbs ending with
7139 a noreturn call, those are considered to belong
7140 to the outermost loop in saved_cfun, rather than
7141 the entry_bb's loop_father. */
7145 num_nodes
-= this_loop
->num_nodes
;
7146 flow_loop_tree_node_remove (bb
->loop_father
);
7147 flow_loop_tree_node_add (get_loop (dest_cfun
, 0), this_loop
);
7148 fixup_loop_arrays_after_move (saved_cfun
, cfun
, this_loop
);
7151 else if (bb
->loop_father
== loop0
&& loop0
!= loop
)
7154 /* Remove loop exits from the outlined region. */
7155 if (loops_for_fn (saved_cfun
)->exits
)
7156 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
7158 struct loops
*l
= loops_for_fn (saved_cfun
);
7160 = l
->exits
->find_slot_with_hash (e
, htab_hash_pointer (e
),
7163 l
->exits
->clear_slot (slot
);
7168 /* Adjust the number of blocks in the tree root of the outlined part. */
7169 get_loop (dest_cfun
, 0)->num_nodes
= bbs
.length () + 2;
7171 /* Setup a mapping to be used by move_block_to_fn. */
7172 loop
->aux
= current_loops
->tree_root
;
7173 loop0
->aux
= current_loops
->tree_root
;
7177 /* Move blocks from BBS into DEST_CFUN. */
7178 gcc_assert (bbs
.length () >= 2);
7179 after
= dest_cfun
->cfg
->x_entry_block_ptr
;
7180 hash_map
<tree
, tree
> vars_map
;
7182 memset (&d
, 0, sizeof (d
));
7183 d
.orig_block
= orig_block
;
7184 d
.new_block
= DECL_INITIAL (dest_cfun
->decl
);
7185 d
.from_context
= cfun
->decl
;
7186 d
.to_context
= dest_cfun
->decl
;
7187 d
.vars_map
= &vars_map
;
7188 d
.new_label_map
= new_label_map
;
7190 d
.remap_decls_p
= true;
7192 FOR_EACH_VEC_ELT (bbs
, i
, bb
)
7194 /* No need to update edge counts on the last block. It has
7195 already been updated earlier when we detached the region from
7196 the original CFG. */
7197 move_block_to_fn (dest_cfun
, bb
, after
, bb
!= exit_bb
, &d
);
7203 /* Loop sizes are no longer correct, fix them up. */
7204 loop
->num_nodes
-= num_nodes
;
7205 for (struct loop
*outer
= loop_outer (loop
);
7206 outer
; outer
= loop_outer (outer
))
7207 outer
->num_nodes
-= num_nodes
;
7208 loop0
->num_nodes
-= bbs
.length () - num_nodes
;
7210 if (saved_cfun
->has_simduid_loops
|| saved_cfun
->has_force_vectorize_loops
)
7213 for (i
= 0; vec_safe_iterate (loops
->larray
, i
, &aloop
); i
++)
7218 replace_by_duplicate_decl (&aloop
->simduid
, d
.vars_map
,
7220 dest_cfun
->has_simduid_loops
= true;
7222 if (aloop
->force_vectorize
)
7223 dest_cfun
->has_force_vectorize_loops
= true;
7227 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
7231 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun
->decl
))
7233 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun
->decl
))
7234 = BLOCK_SUBBLOCKS (orig_block
);
7235 for (block
= BLOCK_SUBBLOCKS (orig_block
);
7236 block
; block
= BLOCK_CHAIN (block
))
7237 BLOCK_SUPERCONTEXT (block
) = DECL_INITIAL (dest_cfun
->decl
);
7238 BLOCK_SUBBLOCKS (orig_block
) = NULL_TREE
;
7241 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun
->decl
),
7242 &vars_map
, dest_cfun
->decl
);
7245 htab_delete (new_label_map
);
7249 /* Rewire the entry and exit blocks. The successor to the entry
7250 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
7251 the child function. Similarly, the predecessor of DEST_FN's
7252 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
7253 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
7254 various CFG manipulation function get to the right CFG.
7256 FIXME, this is silly. The CFG ought to become a parameter to
7258 push_cfun (dest_cfun
);
7259 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun
), entry_bb
, EDGE_FALLTHRU
);
7261 make_edge (exit_bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
), 0);
7264 /* Back in the original function, the SESE region has disappeared,
7265 create a new basic block in its place. */
7266 bb
= create_empty_bb (entry_pred
[0]);
7268 add_bb_to_loop (bb
, loop
);
7269 for (i
= 0; i
< num_entry_edges
; i
++)
7271 e
= make_edge (entry_pred
[i
], bb
, entry_flag
[i
]);
7272 e
->probability
= entry_prob
[i
];
7275 for (i
= 0; i
< num_exit_edges
; i
++)
7277 e
= make_edge (bb
, exit_succ
[i
], exit_flag
[i
]);
7278 e
->probability
= exit_prob
[i
];
7281 set_immediate_dominator (CDI_DOMINATORS
, bb
, dom_entry
);
7282 FOR_EACH_VEC_ELT (dom_bbs
, i
, abb
)
7283 set_immediate_dominator (CDI_DOMINATORS
, abb
, bb
);
7301 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
7305 dump_function_to_file (tree fndecl
, FILE *file
, int flags
)
7307 tree arg
, var
, old_current_fndecl
= current_function_decl
;
7308 struct function
*dsf
;
7309 bool ignore_topmost_bind
= false, any_var
= false;
7312 bool tmclone
= (TREE_CODE (fndecl
) == FUNCTION_DECL
7313 && decl_is_tm_clone (fndecl
));
7314 struct function
*fun
= DECL_STRUCT_FUNCTION (fndecl
);
7316 current_function_decl
= fndecl
;
7317 fprintf (file
, "%s %s(", function_name (fun
), tmclone
? "[tm-clone] " : "");
7319 arg
= DECL_ARGUMENTS (fndecl
);
7322 print_generic_expr (file
, TREE_TYPE (arg
), dump_flags
);
7323 fprintf (file
, " ");
7324 print_generic_expr (file
, arg
, dump_flags
);
7325 if (flags
& TDF_VERBOSE
)
7326 print_node (file
, "", arg
, 4);
7327 if (DECL_CHAIN (arg
))
7328 fprintf (file
, ", ");
7329 arg
= DECL_CHAIN (arg
);
7331 fprintf (file
, ")\n");
7333 if (flags
& TDF_VERBOSE
)
7334 print_node (file
, "", fndecl
, 2);
7336 dsf
= DECL_STRUCT_FUNCTION (fndecl
);
7337 if (dsf
&& (flags
& TDF_EH
))
7338 dump_eh_tree (file
, dsf
);
7340 if (flags
& TDF_RAW
&& !gimple_has_body_p (fndecl
))
7342 dump_node (fndecl
, TDF_SLIM
| flags
, file
);
7343 current_function_decl
= old_current_fndecl
;
7347 /* When GIMPLE is lowered, the variables are no longer available in
7348 BIND_EXPRs, so display them separately. */
7349 if (fun
&& fun
->decl
== fndecl
&& (fun
->curr_properties
& PROP_gimple_lcf
))
7352 ignore_topmost_bind
= true;
7354 fprintf (file
, "{\n");
7355 if (!vec_safe_is_empty (fun
->local_decls
))
7356 FOR_EACH_LOCAL_DECL (fun
, ix
, var
)
7358 print_generic_decl (file
, var
, flags
);
7359 if (flags
& TDF_VERBOSE
)
7360 print_node (file
, "", var
, 4);
7361 fprintf (file
, "\n");
7365 if (gimple_in_ssa_p (cfun
))
7366 for (ix
= 1; ix
< num_ssa_names
; ++ix
)
7368 tree name
= ssa_name (ix
);
7369 if (name
&& !SSA_NAME_VAR (name
))
7371 fprintf (file
, " ");
7372 print_generic_expr (file
, TREE_TYPE (name
), flags
);
7373 fprintf (file
, " ");
7374 print_generic_expr (file
, name
, flags
);
7375 fprintf (file
, ";\n");
7382 if (fun
&& fun
->decl
== fndecl
7384 && basic_block_info_for_fn (fun
))
7386 /* If the CFG has been built, emit a CFG-based dump. */
7387 if (!ignore_topmost_bind
)
7388 fprintf (file
, "{\n");
7390 if (any_var
&& n_basic_blocks_for_fn (fun
))
7391 fprintf (file
, "\n");
7393 FOR_EACH_BB_FN (bb
, fun
)
7394 dump_bb (file
, bb
, 2, flags
| TDF_COMMENT
);
7396 fprintf (file
, "}\n");
7398 else if (DECL_SAVED_TREE (fndecl
) == NULL
)
7400 /* The function is now in GIMPLE form but the CFG has not been
7401 built yet. Emit the single sequence of GIMPLE statements
7402 that make up its body. */
7403 gimple_seq body
= gimple_body (fndecl
);
7405 if (gimple_seq_first_stmt (body
)
7406 && gimple_seq_first_stmt (body
) == gimple_seq_last_stmt (body
)
7407 && gimple_code (gimple_seq_first_stmt (body
)) == GIMPLE_BIND
)
7408 print_gimple_seq (file
, body
, 0, flags
);
7411 if (!ignore_topmost_bind
)
7412 fprintf (file
, "{\n");
7415 fprintf (file
, "\n");
7417 print_gimple_seq (file
, body
, 2, flags
);
7418 fprintf (file
, "}\n");
7425 /* Make a tree based dump. */
7426 chain
= DECL_SAVED_TREE (fndecl
);
7427 if (chain
&& TREE_CODE (chain
) == BIND_EXPR
)
7429 if (ignore_topmost_bind
)
7431 chain
= BIND_EXPR_BODY (chain
);
7439 if (!ignore_topmost_bind
)
7441 fprintf (file
, "{\n");
7442 /* No topmost bind, pretend it's ignored for later. */
7443 ignore_topmost_bind
= true;
7449 fprintf (file
, "\n");
7451 print_generic_stmt_indented (file
, chain
, flags
, indent
);
7452 if (ignore_topmost_bind
)
7453 fprintf (file
, "}\n");
7456 if (flags
& TDF_ENUMERATE_LOCALS
)
7457 dump_enumerated_decls (file
, flags
);
7458 fprintf (file
, "\n\n");
7460 current_function_decl
= old_current_fndecl
;
7463 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
7466 debug_function (tree fn
, int flags
)
7468 dump_function_to_file (fn
, stderr
, flags
);
7472 /* Print on FILE the indexes for the predecessors of basic_block BB. */
7475 print_pred_bbs (FILE *file
, basic_block bb
)
7480 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
7481 fprintf (file
, "bb_%d ", e
->src
->index
);
7485 /* Print on FILE the indexes for the successors of basic_block BB. */
7488 print_succ_bbs (FILE *file
, basic_block bb
)
7493 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
7494 fprintf (file
, "bb_%d ", e
->dest
->index
);
7497 /* Print to FILE the basic block BB following the VERBOSITY level. */
7500 print_loops_bb (FILE *file
, basic_block bb
, int indent
, int verbosity
)
7502 char *s_indent
= (char *) alloca ((size_t) indent
+ 1);
7503 memset ((void *) s_indent
, ' ', (size_t) indent
);
7504 s_indent
[indent
] = '\0';
7506 /* Print basic_block's header. */
7509 fprintf (file
, "%s bb_%d (preds = {", s_indent
, bb
->index
);
7510 print_pred_bbs (file
, bb
);
7511 fprintf (file
, "}, succs = {");
7512 print_succ_bbs (file
, bb
);
7513 fprintf (file
, "})\n");
7516 /* Print basic_block's body. */
7519 fprintf (file
, "%s {\n", s_indent
);
7520 dump_bb (file
, bb
, indent
+ 4, TDF_VOPS
|TDF_MEMSYMS
);
7521 fprintf (file
, "%s }\n", s_indent
);
7525 static void print_loop_and_siblings (FILE *, struct loop
*, int, int);
7527 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
7528 VERBOSITY level this outputs the contents of the loop, or just its
7532 print_loop (FILE *file
, struct loop
*loop
, int indent
, int verbosity
)
7540 s_indent
= (char *) alloca ((size_t) indent
+ 1);
7541 memset ((void *) s_indent
, ' ', (size_t) indent
);
7542 s_indent
[indent
] = '\0';
7544 /* Print loop's header. */
7545 fprintf (file
, "%sloop_%d (", s_indent
, loop
->num
);
7547 fprintf (file
, "header = %d", loop
->header
->index
);
7550 fprintf (file
, "deleted)\n");
7554 fprintf (file
, ", latch = %d", loop
->latch
->index
);
7556 fprintf (file
, ", multiple latches");
7557 fprintf (file
, ", niter = ");
7558 print_generic_expr (file
, loop
->nb_iterations
, 0);
7560 if (loop
->any_upper_bound
)
7562 fprintf (file
, ", upper_bound = ");
7563 print_decu (loop
->nb_iterations_upper_bound
, file
);
7566 if (loop
->any_estimate
)
7568 fprintf (file
, ", estimate = ");
7569 print_decu (loop
->nb_iterations_estimate
, file
);
7571 fprintf (file
, ")\n");
7573 /* Print loop's body. */
7576 fprintf (file
, "%s{\n", s_indent
);
7577 FOR_EACH_BB_FN (bb
, cfun
)
7578 if (bb
->loop_father
== loop
)
7579 print_loops_bb (file
, bb
, indent
, verbosity
);
7581 print_loop_and_siblings (file
, loop
->inner
, indent
+ 2, verbosity
);
7582 fprintf (file
, "%s}\n", s_indent
);
7586 /* Print the LOOP and its sibling loops on FILE, indented INDENT
7587 spaces. Following VERBOSITY level this outputs the contents of the
7588 loop, or just its structure. */
7591 print_loop_and_siblings (FILE *file
, struct loop
*loop
, int indent
,
7597 print_loop (file
, loop
, indent
, verbosity
);
7598 print_loop_and_siblings (file
, loop
->next
, indent
, verbosity
);
7601 /* Follow a CFG edge from the entry point of the program, and on entry
7602 of a loop, pretty print the loop structure on FILE. */
7605 print_loops (FILE *file
, int verbosity
)
7609 bb
= ENTRY_BLOCK_PTR_FOR_FN (cfun
);
7610 if (bb
&& bb
->loop_father
)
7611 print_loop_and_siblings (file
, bb
->loop_father
, 0, verbosity
);
7617 debug (struct loop
&ref
)
7619 print_loop (stderr
, &ref
, 0, /*verbosity*/0);
7623 debug (struct loop
*ptr
)
7628 fprintf (stderr
, "<nil>\n");
7631 /* Dump a loop verbosely. */
7634 debug_verbose (struct loop
&ref
)
7636 print_loop (stderr
, &ref
, 0, /*verbosity*/3);
7640 debug_verbose (struct loop
*ptr
)
7645 fprintf (stderr
, "<nil>\n");
7649 /* Debugging loops structure at tree level, at some VERBOSITY level. */
7652 debug_loops (int verbosity
)
7654 print_loops (stderr
, verbosity
);
7657 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
7660 debug_loop (struct loop
*loop
, int verbosity
)
7662 print_loop (stderr
, loop
, 0, verbosity
);
7665 /* Print on stderr the code of loop number NUM, at some VERBOSITY
7669 debug_loop_num (unsigned num
, int verbosity
)
7671 debug_loop (get_loop (cfun
, num
), verbosity
);
7674 /* Return true if BB ends with a call, possibly followed by some
7675 instructions that must stay with the call. Return false,
7679 gimple_block_ends_with_call_p (basic_block bb
)
7681 gimple_stmt_iterator gsi
= gsi_last_nondebug_bb (bb
);
7682 return !gsi_end_p (gsi
) && is_gimple_call (gsi_stmt (gsi
));
7686 /* Return true if BB ends with a conditional branch. Return false,
7690 gimple_block_ends_with_condjump_p (const_basic_block bb
)
7692 gimple stmt
= last_stmt (CONST_CAST_BB (bb
));
7693 return (stmt
&& gimple_code (stmt
) == GIMPLE_COND
);
7697 /* Return true if we need to add fake edge to exit at statement T.
7698 Helper function for gimple_flow_call_edges_add. */
7701 need_fake_edge_p (gimple t
)
7703 tree fndecl
= NULL_TREE
;
7706 /* NORETURN and LONGJMP calls already have an edge to exit.
7707 CONST and PURE calls do not need one.
7708 We don't currently check for CONST and PURE here, although
7709 it would be a good idea, because those attributes are
7710 figured out from the RTL in mark_constant_function, and
7711 the counter incrementation code from -fprofile-arcs
7712 leads to different results from -fbranch-probabilities. */
7713 if (is_gimple_call (t
))
7715 fndecl
= gimple_call_fndecl (t
);
7716 call_flags
= gimple_call_flags (t
);
7719 if (is_gimple_call (t
)
7721 && DECL_BUILT_IN (fndecl
)
7722 && (call_flags
& ECF_NOTHROW
)
7723 && !(call_flags
& ECF_RETURNS_TWICE
)
7724 /* fork() doesn't really return twice, but the effect of
7725 wrapping it in __gcov_fork() which calls __gcov_flush()
7726 and clears the counters before forking has the same
7727 effect as returning twice. Force a fake edge. */
7728 && !(DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
7729 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FORK
))
7732 if (is_gimple_call (t
))
7738 if (!(call_flags
& ECF_NORETURN
))
7742 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
7743 if ((e
->flags
& EDGE_FAKE
) == 0)
7747 if (gasm
*asm_stmt
= dyn_cast
<gasm
*> (t
))
7748 if (gimple_asm_volatile_p (asm_stmt
) || gimple_asm_input_p (asm_stmt
))
7755 /* Add fake edges to the function exit for any non constant and non
7756 noreturn calls (or noreturn calls with EH/abnormal edges),
7757 volatile inline assembly in the bitmap of blocks specified by BLOCKS
7758 or to the whole CFG if BLOCKS is zero. Return the number of blocks
7761 The goal is to expose cases in which entering a basic block does
7762 not imply that all subsequent instructions must be executed. */
7765 gimple_flow_call_edges_add (sbitmap blocks
)
7768 int blocks_split
= 0;
7769 int last_bb
= last_basic_block_for_fn (cfun
);
7770 bool check_last_block
= false;
7772 if (n_basic_blocks_for_fn (cfun
) == NUM_FIXED_BLOCKS
)
7776 check_last_block
= true;
7778 check_last_block
= bitmap_bit_p (blocks
,
7779 EXIT_BLOCK_PTR_FOR_FN (cfun
)->prev_bb
->index
);
7781 /* In the last basic block, before epilogue generation, there will be
7782 a fallthru edge to EXIT. Special care is required if the last insn
7783 of the last basic block is a call because make_edge folds duplicate
7784 edges, which would result in the fallthru edge also being marked
7785 fake, which would result in the fallthru edge being removed by
7786 remove_fake_edges, which would result in an invalid CFG.
7788 Moreover, we can't elide the outgoing fake edge, since the block
7789 profiler needs to take this into account in order to solve the minimal
7790 spanning tree in the case that the call doesn't return.
7792 Handle this by adding a dummy instruction in a new last basic block. */
7793 if (check_last_block
)
7795 basic_block bb
= EXIT_BLOCK_PTR_FOR_FN (cfun
)->prev_bb
;
7796 gimple_stmt_iterator gsi
= gsi_last_nondebug_bb (bb
);
7799 if (!gsi_end_p (gsi
))
7802 if (t
&& need_fake_edge_p (t
))
7806 e
= find_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
));
7809 gsi_insert_on_edge (e
, gimple_build_nop ());
7810 gsi_commit_edge_inserts ();
7815 /* Now add fake edges to the function exit for any non constant
7816 calls since there is no way that we can determine if they will
7818 for (i
= 0; i
< last_bb
; i
++)
7820 basic_block bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
7821 gimple_stmt_iterator gsi
;
7822 gimple stmt
, last_stmt
;
7827 if (blocks
&& !bitmap_bit_p (blocks
, i
))
7830 gsi
= gsi_last_nondebug_bb (bb
);
7831 if (!gsi_end_p (gsi
))
7833 last_stmt
= gsi_stmt (gsi
);
7836 stmt
= gsi_stmt (gsi
);
7837 if (need_fake_edge_p (stmt
))
7841 /* The handling above of the final block before the
7842 epilogue should be enough to verify that there is
7843 no edge to the exit block in CFG already.
7844 Calling make_edge in such case would cause us to
7845 mark that edge as fake and remove it later. */
7846 #ifdef ENABLE_CHECKING
7847 if (stmt
== last_stmt
)
7849 e
= find_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
));
7850 gcc_assert (e
== NULL
);
7854 /* Note that the following may create a new basic block
7855 and renumber the existing basic blocks. */
7856 if (stmt
!= last_stmt
)
7858 e
= split_block (bb
, stmt
);
7862 make_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
), EDGE_FAKE
);
7866 while (!gsi_end_p (gsi
));
7871 verify_flow_info ();
7873 return blocks_split
;
7876 /* Removes edge E and all the blocks dominated by it, and updates dominance
7877 information. The IL in E->src needs to be updated separately.
7878 If dominance info is not available, only the edge E is removed.*/
7881 remove_edge_and_dominated_blocks (edge e
)
7883 vec
<basic_block
> bbs_to_remove
= vNULL
;
7884 vec
<basic_block
> bbs_to_fix_dom
= vNULL
;
7888 bool none_removed
= false;
7890 basic_block bb
, dbb
;
7893 /* If we are removing a path inside a non-root loop that may change
7894 loop ownership of blocks or remove loops. Mark loops for fixup. */
7896 && loop_outer (e
->src
->loop_father
) != NULL
7897 && e
->src
->loop_father
== e
->dest
->loop_father
)
7898 loops_state_set (LOOPS_NEED_FIXUP
);
7900 if (!dom_info_available_p (CDI_DOMINATORS
))
7906 /* No updating is needed for edges to exit. */
7907 if (e
->dest
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
7909 if (cfgcleanup_altered_bbs
)
7910 bitmap_set_bit (cfgcleanup_altered_bbs
, e
->src
->index
);
7915 /* First, we find the basic blocks to remove. If E->dest has a predecessor
7916 that is not dominated by E->dest, then this set is empty. Otherwise,
7917 all the basic blocks dominated by E->dest are removed.
7919 Also, to DF_IDOM we store the immediate dominators of the blocks in
7920 the dominance frontier of E (i.e., of the successors of the
7921 removed blocks, if there are any, and of E->dest otherwise). */
7922 FOR_EACH_EDGE (f
, ei
, e
->dest
->preds
)
7927 if (!dominated_by_p (CDI_DOMINATORS
, f
->src
, e
->dest
))
7929 none_removed
= true;
7934 df
= BITMAP_ALLOC (NULL
);
7935 df_idom
= BITMAP_ALLOC (NULL
);
7938 bitmap_set_bit (df_idom
,
7939 get_immediate_dominator (CDI_DOMINATORS
, e
->dest
)->index
);
7942 bbs_to_remove
= get_all_dominated_blocks (CDI_DOMINATORS
, e
->dest
);
7943 FOR_EACH_VEC_ELT (bbs_to_remove
, i
, bb
)
7945 FOR_EACH_EDGE (f
, ei
, bb
->succs
)
7947 if (f
->dest
!= EXIT_BLOCK_PTR_FOR_FN (cfun
))
7948 bitmap_set_bit (df
, f
->dest
->index
);
7951 FOR_EACH_VEC_ELT (bbs_to_remove
, i
, bb
)
7952 bitmap_clear_bit (df
, bb
->index
);
7954 EXECUTE_IF_SET_IN_BITMAP (df
, 0, i
, bi
)
7956 bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
7957 bitmap_set_bit (df_idom
,
7958 get_immediate_dominator (CDI_DOMINATORS
, bb
)->index
);
7962 if (cfgcleanup_altered_bbs
)
7964 /* Record the set of the altered basic blocks. */
7965 bitmap_set_bit (cfgcleanup_altered_bbs
, e
->src
->index
);
7966 bitmap_ior_into (cfgcleanup_altered_bbs
, df
);
7969 /* Remove E and the cancelled blocks. */
7974 /* Walk backwards so as to get a chance to substitute all
7975 released DEFs into debug stmts. See
7976 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
7978 for (i
= bbs_to_remove
.length (); i
-- > 0; )
7979 delete_basic_block (bbs_to_remove
[i
]);
7982 /* Update the dominance information. The immediate dominator may change only
7983 for blocks whose immediate dominator belongs to DF_IDOM:
7985 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
7986 removal. Let Z the arbitrary block such that idom(Z) = Y and
7987 Z dominates X after the removal. Before removal, there exists a path P
7988 from Y to X that avoids Z. Let F be the last edge on P that is
7989 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
7990 dominates W, and because of P, Z does not dominate W), and W belongs to
7991 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
7992 EXECUTE_IF_SET_IN_BITMAP (df_idom
, 0, i
, bi
)
7994 bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
7995 for (dbb
= first_dom_son (CDI_DOMINATORS
, bb
);
7997 dbb
= next_dom_son (CDI_DOMINATORS
, dbb
))
7998 bbs_to_fix_dom
.safe_push (dbb
);
8001 iterate_fix_dominators (CDI_DOMINATORS
, bbs_to_fix_dom
, true);
8004 BITMAP_FREE (df_idom
);
8005 bbs_to_remove
.release ();
8006 bbs_to_fix_dom
.release ();
8009 /* Purge dead EH edges from basic block BB. */
8012 gimple_purge_dead_eh_edges (basic_block bb
)
8014 bool changed
= false;
8017 gimple stmt
= last_stmt (bb
);
8019 if (stmt
&& stmt_can_throw_internal (stmt
))
8022 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
8024 if (e
->flags
& EDGE_EH
)
8026 remove_edge_and_dominated_blocks (e
);
8036 /* Purge dead EH edges from basic block listed in BLOCKS. */
8039 gimple_purge_all_dead_eh_edges (const_bitmap blocks
)
8041 bool changed
= false;
8045 EXECUTE_IF_SET_IN_BITMAP (blocks
, 0, i
, bi
)
8047 basic_block bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
8049 /* Earlier gimple_purge_dead_eh_edges could have removed
8050 this basic block already. */
8051 gcc_assert (bb
|| changed
);
8053 changed
|= gimple_purge_dead_eh_edges (bb
);
8059 /* Purge dead abnormal call edges from basic block BB. */
8062 gimple_purge_dead_abnormal_call_edges (basic_block bb
)
8064 bool changed
= false;
8067 gimple stmt
= last_stmt (bb
);
8069 if (!cfun
->has_nonlocal_label
8070 && !cfun
->calls_setjmp
)
8073 if (stmt
&& stmt_can_make_abnormal_goto (stmt
))
8076 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
8078 if (e
->flags
& EDGE_ABNORMAL
)
8080 if (e
->flags
& EDGE_FALLTHRU
)
8081 e
->flags
&= ~EDGE_ABNORMAL
;
8083 remove_edge_and_dominated_blocks (e
);
8093 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
8096 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks
)
8098 bool changed
= false;
8102 EXECUTE_IF_SET_IN_BITMAP (blocks
, 0, i
, bi
)
8104 basic_block bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
8106 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
8107 this basic block already. */
8108 gcc_assert (bb
|| changed
);
8110 changed
|= gimple_purge_dead_abnormal_call_edges (bb
);
8116 /* This function is called whenever a new edge is created or
8120 gimple_execute_on_growing_pred (edge e
)
8122 basic_block bb
= e
->dest
;
8124 if (!gimple_seq_empty_p (phi_nodes (bb
)))
8125 reserve_phi_args_for_new_edge (bb
);
8128 /* This function is called immediately before edge E is removed from
8129 the edge vector E->dest->preds. */
8132 gimple_execute_on_shrinking_pred (edge e
)
8134 if (!gimple_seq_empty_p (phi_nodes (e
->dest
)))
8135 remove_phi_args (e
);
8138 /*---------------------------------------------------------------------------
8139 Helper functions for Loop versioning
8140 ---------------------------------------------------------------------------*/
8142 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
8143 of 'first'. Both of them are dominated by 'new_head' basic block. When
8144 'new_head' was created by 'second's incoming edge it received phi arguments
8145 on the edge by split_edge(). Later, additional edge 'e' was created to
8146 connect 'new_head' and 'first'. Now this routine adds phi args on this
8147 additional edge 'e' that new_head to second edge received as part of edge
8151 gimple_lv_adjust_loop_header_phi (basic_block first
, basic_block second
,
8152 basic_block new_head
, edge e
)
8155 gphi_iterator psi1
, psi2
;
8157 edge e2
= find_edge (new_head
, second
);
8159 /* Because NEW_HEAD has been created by splitting SECOND's incoming
8160 edge, we should always have an edge from NEW_HEAD to SECOND. */
8161 gcc_assert (e2
!= NULL
);
8163 /* Browse all 'second' basic block phi nodes and add phi args to
8164 edge 'e' for 'first' head. PHI args are always in correct order. */
8166 for (psi2
= gsi_start_phis (second
),
8167 psi1
= gsi_start_phis (first
);
8168 !gsi_end_p (psi2
) && !gsi_end_p (psi1
);
8169 gsi_next (&psi2
), gsi_next (&psi1
))
8173 def
= PHI_ARG_DEF (phi2
, e2
->dest_idx
);
8174 add_phi_arg (phi1
, def
, e
, gimple_phi_arg_location_from_edge (phi2
, e2
));
8179 /* Adds a if else statement to COND_BB with condition COND_EXPR.
8180 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
8181 the destination of the ELSE part. */
8184 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED
,
8185 basic_block second_head ATTRIBUTE_UNUSED
,
8186 basic_block cond_bb
, void *cond_e
)
8188 gimple_stmt_iterator gsi
;
8189 gimple new_cond_expr
;
8190 tree cond_expr
= (tree
) cond_e
;
8193 /* Build new conditional expr */
8194 new_cond_expr
= gimple_build_cond_from_tree (cond_expr
,
8195 NULL_TREE
, NULL_TREE
);
8197 /* Add new cond in cond_bb. */
8198 gsi
= gsi_last_bb (cond_bb
);
8199 gsi_insert_after (&gsi
, new_cond_expr
, GSI_NEW_STMT
);
8201 /* Adjust edges appropriately to connect new head with first head
8202 as well as second head. */
8203 e0
= single_succ_edge (cond_bb
);
8204 e0
->flags
&= ~EDGE_FALLTHRU
;
8205 e0
->flags
|= EDGE_FALSE_VALUE
;
8209 /* Do book-keeping of basic block BB for the profile consistency checker.
8210 If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
8211 then do post-pass accounting. Store the counting in RECORD. */
8213 gimple_account_profile_record (basic_block bb
, int after_pass
,
8214 struct profile_record
*record
)
8216 gimple_stmt_iterator i
;
8217 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
8219 record
->size
[after_pass
]
8220 += estimate_num_insns (gsi_stmt (i
), &eni_size_weights
);
8221 if (profile_status_for_fn (cfun
) == PROFILE_READ
)
8222 record
->time
[after_pass
]
8223 += estimate_num_insns (gsi_stmt (i
),
8224 &eni_time_weights
) * bb
->count
;
8225 else if (profile_status_for_fn (cfun
) == PROFILE_GUESSED
)
8226 record
->time
[after_pass
]
8227 += estimate_num_insns (gsi_stmt (i
),
8228 &eni_time_weights
) * bb
->frequency
;
8232 struct cfg_hooks gimple_cfg_hooks
= {
8234 gimple_verify_flow_info
,
8235 gimple_dump_bb
, /* dump_bb */
8236 gimple_dump_bb_for_graph
, /* dump_bb_for_graph */
8237 create_bb
, /* create_basic_block */
8238 gimple_redirect_edge_and_branch
, /* redirect_edge_and_branch */
8239 gimple_redirect_edge_and_branch_force
, /* redirect_edge_and_branch_force */
8240 gimple_can_remove_branch_p
, /* can_remove_branch_p */
8241 remove_bb
, /* delete_basic_block */
8242 gimple_split_block
, /* split_block */
8243 gimple_move_block_after
, /* move_block_after */
8244 gimple_can_merge_blocks_p
, /* can_merge_blocks_p */
8245 gimple_merge_blocks
, /* merge_blocks */
8246 gimple_predict_edge
, /* predict_edge */
8247 gimple_predicted_by_p
, /* predicted_by_p */
8248 gimple_can_duplicate_bb_p
, /* can_duplicate_block_p */
8249 gimple_duplicate_bb
, /* duplicate_block */
8250 gimple_split_edge
, /* split_edge */
8251 gimple_make_forwarder_block
, /* make_forward_block */
8252 NULL
, /* tidy_fallthru_edge */
8253 NULL
, /* force_nonfallthru */
8254 gimple_block_ends_with_call_p
,/* block_ends_with_call_p */
8255 gimple_block_ends_with_condjump_p
, /* block_ends_with_condjump_p */
8256 gimple_flow_call_edges_add
, /* flow_call_edges_add */
8257 gimple_execute_on_growing_pred
, /* execute_on_growing_pred */
8258 gimple_execute_on_shrinking_pred
, /* execute_on_shrinking_pred */
8259 gimple_duplicate_loop_to_header_edge
, /* duplicate loop for trees */
8260 gimple_lv_add_condition_to_bb
, /* lv_add_condition_to_bb */
8261 gimple_lv_adjust_loop_header_phi
, /* lv_adjust_loop_header_phi*/
8262 extract_true_false_edges_from_block
, /* extract_cond_bb_edges */
8263 flush_pending_stmts
, /* flush_pending_stmts */
8264 gimple_empty_block_p
, /* block_empty_p */
8265 gimple_split_block_before_cond_jump
, /* split_block_before_cond_jump */
8266 gimple_account_profile_record
,
8270 /* Split all critical edges. */
8273 split_critical_edges (void)
8279 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
8280 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
8281 mappings around the calls to split_edge. */
8282 start_recording_case_labels ();
8283 FOR_ALL_BB_FN (bb
, cfun
)
8285 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
8287 if (EDGE_CRITICAL_P (e
) && !(e
->flags
& EDGE_ABNORMAL
))
8289 /* PRE inserts statements to edges and expects that
8290 since split_critical_edges was done beforehand, committing edge
8291 insertions will not split more edges. In addition to critical
8292 edges we must split edges that have multiple successors and
8293 end by control flow statements, such as RESX.
8294 Go ahead and split them too. This matches the logic in
8295 gimple_find_edge_insert_loc. */
8296 else if ((!single_pred_p (e
->dest
)
8297 || !gimple_seq_empty_p (phi_nodes (e
->dest
))
8298 || e
->dest
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
8299 && e
->src
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
)
8300 && !(e
->flags
& EDGE_ABNORMAL
))
8302 gimple_stmt_iterator gsi
;
8304 gsi
= gsi_last_bb (e
->src
);
8305 if (!gsi_end_p (gsi
)
8306 && stmt_ends_bb_p (gsi_stmt (gsi
))
8307 && (gimple_code (gsi_stmt (gsi
)) != GIMPLE_RETURN
8308 && !gimple_call_builtin_p (gsi_stmt (gsi
),
8314 end_recording_case_labels ();
8320 const pass_data pass_data_split_crit_edges
=
8322 GIMPLE_PASS
, /* type */
8323 "crited", /* name */
8324 OPTGROUP_NONE
, /* optinfo_flags */
8325 TV_TREE_SPLIT_EDGES
, /* tv_id */
8326 PROP_cfg
, /* properties_required */
8327 PROP_no_crit_edges
, /* properties_provided */
8328 0, /* properties_destroyed */
8329 0, /* todo_flags_start */
8330 0, /* todo_flags_finish */
8333 class pass_split_crit_edges
: public gimple_opt_pass
8336 pass_split_crit_edges (gcc::context
*ctxt
)
8337 : gimple_opt_pass (pass_data_split_crit_edges
, ctxt
)
8340 /* opt_pass methods: */
8341 virtual unsigned int execute (function
*) { return split_critical_edges (); }
8343 opt_pass
* clone () { return new pass_split_crit_edges (m_ctxt
); }
8344 }; // class pass_split_crit_edges
8349 make_pass_split_crit_edges (gcc::context
*ctxt
)
8351 return new pass_split_crit_edges (ctxt
);
8355 /* Insert COND expression which is GIMPLE_COND after STMT
8356 in basic block BB with appropriate basic block split
8357 and creation of a new conditionally executed basic block.
8358 Return created basic block. */
8360 insert_cond_bb (basic_block bb
, gimple stmt
, gimple cond
)
8362 edge fall
= split_block (bb
, stmt
);
8363 gimple_stmt_iterator iter
= gsi_last_bb (bb
);
8366 /* Insert cond statement. */
8367 gcc_assert (gimple_code (cond
) == GIMPLE_COND
);
8368 if (gsi_end_p (iter
))
8369 gsi_insert_before (&iter
, cond
, GSI_CONTINUE_LINKING
);
8371 gsi_insert_after (&iter
, cond
, GSI_CONTINUE_LINKING
);
8373 /* Create conditionally executed block. */
8374 new_bb
= create_empty_bb (bb
);
8375 make_edge (bb
, new_bb
, EDGE_TRUE_VALUE
);
8376 make_single_succ_edge (new_bb
, fall
->dest
, EDGE_FALLTHRU
);
8378 /* Fix edge for split bb. */
8379 fall
->flags
= EDGE_FALSE_VALUE
;
8381 /* Update dominance info. */
8382 if (dom_info_available_p (CDI_DOMINATORS
))
8384 set_immediate_dominator (CDI_DOMINATORS
, new_bb
, bb
);
8385 set_immediate_dominator (CDI_DOMINATORS
, fall
->dest
, bb
);
8388 /* Update loop info. */
8390 add_bb_to_loop (new_bb
, bb
->loop_father
);
8395 /* Build a ternary operation and gimplify it. Emit code before GSI.
8396 Return the gimple_val holding the result. */
8399 gimplify_build3 (gimple_stmt_iterator
*gsi
, enum tree_code code
,
8400 tree type
, tree a
, tree b
, tree c
)
8403 location_t loc
= gimple_location (gsi_stmt (*gsi
));
8405 ret
= fold_build3_loc (loc
, code
, type
, a
, b
, c
);
8408 return force_gimple_operand_gsi (gsi
, ret
, true, NULL
, true,
8412 /* Build a binary operation and gimplify it. Emit code before GSI.
8413 Return the gimple_val holding the result. */
8416 gimplify_build2 (gimple_stmt_iterator
*gsi
, enum tree_code code
,
8417 tree type
, tree a
, tree b
)
8421 ret
= fold_build2_loc (gimple_location (gsi_stmt (*gsi
)), code
, type
, a
, b
);
8424 return force_gimple_operand_gsi (gsi
, ret
, true, NULL
, true,
8428 /* Build a unary operation and gimplify it. Emit code before GSI.
8429 Return the gimple_val holding the result. */
8432 gimplify_build1 (gimple_stmt_iterator
*gsi
, enum tree_code code
, tree type
,
8437 ret
= fold_build1_loc (gimple_location (gsi_stmt (*gsi
)), code
, type
, a
);
8440 return force_gimple_operand_gsi (gsi
, ret
, true, NULL
, true,
8446 /* Given a basic block B which ends with a conditional and has
8447 precisely two successors, determine which of the edges is taken if
8448 the conditional is true and which is taken if the conditional is
8449 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
8452 extract_true_false_edges_from_block (basic_block b
,
8456 edge e
= EDGE_SUCC (b
, 0);
8458 if (e
->flags
& EDGE_TRUE_VALUE
)
8461 *false_edge
= EDGE_SUCC (b
, 1);
8466 *true_edge
= EDGE_SUCC (b
, 1);
8470 /* Emit return warnings. */
8474 const pass_data pass_data_warn_function_return
=
8476 GIMPLE_PASS
, /* type */
8477 "*warn_function_return", /* name */
8478 OPTGROUP_NONE
, /* optinfo_flags */
8479 TV_NONE
, /* tv_id */
8480 PROP_cfg
, /* properties_required */
8481 0, /* properties_provided */
8482 0, /* properties_destroyed */
8483 0, /* todo_flags_start */
8484 0, /* todo_flags_finish */
8487 class pass_warn_function_return
: public gimple_opt_pass
8490 pass_warn_function_return (gcc::context
*ctxt
)
8491 : gimple_opt_pass (pass_data_warn_function_return
, ctxt
)
8494 /* opt_pass methods: */
8495 virtual unsigned int execute (function
*);
8497 }; // class pass_warn_function_return
8500 pass_warn_function_return::execute (function
*fun
)
8502 source_location location
;
8507 if (!targetm
.warn_func_return (fun
->decl
))
8510 /* If we have a path to EXIT, then we do return. */
8511 if (TREE_THIS_VOLATILE (fun
->decl
)
8512 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun
)->preds
) > 0)
8514 location
= UNKNOWN_LOCATION
;
8515 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (fun
)->preds
)
8517 last
= last_stmt (e
->src
);
8518 if ((gimple_code (last
) == GIMPLE_RETURN
8519 || gimple_call_builtin_p (last
, BUILT_IN_RETURN
))
8520 && (location
= gimple_location (last
)) != UNKNOWN_LOCATION
)
8523 if (location
== UNKNOWN_LOCATION
)
8524 location
= cfun
->function_end_locus
;
8525 warning_at (location
, 0, "%<noreturn%> function does return");
8528 /* If we see "return;" in some basic block, then we do reach the end
8529 without returning a value. */
8530 else if (warn_return_type
8531 && !TREE_NO_WARNING (fun
->decl
)
8532 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun
)->preds
) > 0
8533 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fun
->decl
))))
8535 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (fun
)->preds
)
8537 gimple last
= last_stmt (e
->src
);
8538 greturn
*return_stmt
= dyn_cast
<greturn
*> (last
);
8540 && gimple_return_retval (return_stmt
) == NULL
8541 && !gimple_no_warning_p (last
))
8543 location
= gimple_location (last
);
8544 if (location
== UNKNOWN_LOCATION
)
8545 location
= fun
->function_end_locus
;
8546 warning_at (location
, OPT_Wreturn_type
, "control reaches end of non-void function");
8547 TREE_NO_WARNING (fun
->decl
) = 1;
8558 make_pass_warn_function_return (gcc::context
*ctxt
)
8560 return new pass_warn_function_return (ctxt
);
8563 /* Walk a gimplified function and warn for functions whose return value is
8564 ignored and attribute((warn_unused_result)) is set. This is done before
8565 inlining, so we don't have to worry about that. */
8568 do_warn_unused_result (gimple_seq seq
)
8571 gimple_stmt_iterator i
;
8573 for (i
= gsi_start (seq
); !gsi_end_p (i
); gsi_next (&i
))
8575 gimple g
= gsi_stmt (i
);
8577 switch (gimple_code (g
))
8580 do_warn_unused_result (gimple_bind_body (as_a
<gbind
*>(g
)));
8583 do_warn_unused_result (gimple_try_eval (g
));
8584 do_warn_unused_result (gimple_try_cleanup (g
));
8587 do_warn_unused_result (gimple_catch_handler (
8588 as_a
<gcatch
*> (g
)));
8590 case GIMPLE_EH_FILTER
:
8591 do_warn_unused_result (gimple_eh_filter_failure (g
));
8595 if (gimple_call_lhs (g
))
8597 if (gimple_call_internal_p (g
))
8600 /* This is a naked call, as opposed to a GIMPLE_CALL with an
8601 LHS. All calls whose value is ignored should be
8602 represented like this. Look for the attribute. */
8603 fdecl
= gimple_call_fndecl (g
);
8604 ftype
= gimple_call_fntype (g
);
8606 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype
)))
8608 location_t loc
= gimple_location (g
);
8611 warning_at (loc
, OPT_Wunused_result
,
8612 "ignoring return value of %qD, "
8613 "declared with attribute warn_unused_result",
8616 warning_at (loc
, OPT_Wunused_result
,
8617 "ignoring return value of function "
8618 "declared with attribute warn_unused_result");
8623 /* Not a container, not a call, or a call whose value is used. */
8631 const pass_data pass_data_warn_unused_result
=
8633 GIMPLE_PASS
, /* type */
8634 "*warn_unused_result", /* name */
8635 OPTGROUP_NONE
, /* optinfo_flags */
8636 TV_NONE
, /* tv_id */
8637 PROP_gimple_any
, /* properties_required */
8638 0, /* properties_provided */
8639 0, /* properties_destroyed */
8640 0, /* todo_flags_start */
8641 0, /* todo_flags_finish */
8644 class pass_warn_unused_result
: public gimple_opt_pass
8647 pass_warn_unused_result (gcc::context
*ctxt
)
8648 : gimple_opt_pass (pass_data_warn_unused_result
, ctxt
)
8651 /* opt_pass methods: */
8652 virtual bool gate (function
*) { return flag_warn_unused_result
; }
8653 virtual unsigned int execute (function
*)
8655 do_warn_unused_result (gimple_body (current_function_decl
));
8659 }; // class pass_warn_unused_result
8664 make_pass_warn_unused_result (gcc::context
*ctxt
)
8666 return new pass_warn_unused_result (ctxt
);
8669 /* IPA passes, compilation of earlier functions or inlining
8670 might have changed some properties, such as marked functions nothrow,
8671 pure, const or noreturn.
8672 Remove redundant edges and basic blocks, and create new ones if necessary.
8674 This pass can't be executed as stand alone pass from pass manager, because
8675 in between inlining and this fixup the verify_flow_info would fail. */
8678 execute_fixup_cfg (void)
8681 gimple_stmt_iterator gsi
;
8683 gcov_type count_scale
;
8688 = GCOV_COMPUTE_SCALE (cgraph_node::get (current_function_decl
)->count
,
8689 ENTRY_BLOCK_PTR_FOR_FN (cfun
)->count
);
8691 ENTRY_BLOCK_PTR_FOR_FN (cfun
)->count
=
8692 cgraph_node::get (current_function_decl
)->count
;
8693 EXIT_BLOCK_PTR_FOR_FN (cfun
)->count
=
8694 apply_scale (EXIT_BLOCK_PTR_FOR_FN (cfun
)->count
,
8697 FOR_EACH_EDGE (e
, ei
, ENTRY_BLOCK_PTR_FOR_FN (cfun
)->succs
)
8698 e
->count
= apply_scale (e
->count
, count_scale
);
8700 FOR_EACH_BB_FN (bb
, cfun
)
8702 bb
->count
= apply_scale (bb
->count
, count_scale
);
8703 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
);)
8705 gimple stmt
= gsi_stmt (gsi
);
8706 tree decl
= is_gimple_call (stmt
)
8707 ? gimple_call_fndecl (stmt
)
8711 int flags
= gimple_call_flags (stmt
);
8712 if (flags
& (ECF_CONST
| ECF_PURE
| ECF_LOOPING_CONST_OR_PURE
))
8714 if (gimple_purge_dead_abnormal_call_edges (bb
))
8715 todo
|= TODO_cleanup_cfg
;
8717 if (gimple_in_ssa_p (cfun
))
8719 todo
|= TODO_update_ssa
| TODO_cleanup_cfg
;
8724 if (flags
& ECF_NORETURN
8725 && fixup_noreturn_call (stmt
))
8726 todo
|= TODO_cleanup_cfg
;
8729 /* Remove stores to variables we marked write-only.
8730 Keep access when store has side effect, i.e. in case when source
8732 if (gimple_store_p (stmt
)
8733 && !gimple_has_side_effects (stmt
))
8735 tree lhs
= get_base_address (gimple_get_lhs (stmt
));
8737 if (TREE_CODE (lhs
) == VAR_DECL
8738 && (TREE_STATIC (lhs
) || DECL_EXTERNAL (lhs
))
8739 && varpool_node::get (lhs
)->writeonly
)
8741 unlink_stmt_vdef (stmt
);
8742 gsi_remove (&gsi
, true);
8743 release_defs (stmt
);
8744 todo
|= TODO_update_ssa
| TODO_cleanup_cfg
;
8748 /* For calls we can simply remove LHS when it is known
8749 to be write-only. */
8750 if (is_gimple_call (stmt
)
8751 && gimple_get_lhs (stmt
))
8753 tree lhs
= get_base_address (gimple_get_lhs (stmt
));
8755 if (TREE_CODE (lhs
) == VAR_DECL
8756 && (TREE_STATIC (lhs
) || DECL_EXTERNAL (lhs
))
8757 && varpool_node::get (lhs
)->writeonly
)
8759 gimple_call_set_lhs (stmt
, NULL
);
8761 todo
|= TODO_update_ssa
| TODO_cleanup_cfg
;
8765 if (maybe_clean_eh_stmt (stmt
)
8766 && gimple_purge_dead_eh_edges (bb
))
8767 todo
|= TODO_cleanup_cfg
;
8771 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
8772 e
->count
= apply_scale (e
->count
, count_scale
);
8774 /* If we have a basic block with no successors that does not
8775 end with a control statement or a noreturn call end it with
8776 a call to __builtin_unreachable. This situation can occur
8777 when inlining a noreturn call that does in fact return. */
8778 if (EDGE_COUNT (bb
->succs
) == 0)
8780 gimple stmt
= last_stmt (bb
);
8782 || (!is_ctrl_stmt (stmt
)
8783 && (!is_gimple_call (stmt
)
8784 || (gimple_call_flags (stmt
) & ECF_NORETURN
) == 0)))
8786 if (stmt
&& is_gimple_call (stmt
))
8787 gimple_call_set_ctrl_altering (stmt
, false);
8788 stmt
= gimple_build_call
8789 (builtin_decl_implicit (BUILT_IN_UNREACHABLE
), 0);
8790 gimple_stmt_iterator gsi
= gsi_last_bb (bb
);
8791 gsi_insert_after (&gsi
, stmt
, GSI_NEW_STMT
);
8795 if (count_scale
!= REG_BR_PROB_BASE
)
8796 compute_function_frequency ();
8799 && (todo
& TODO_cleanup_cfg
))
8800 loops_state_set (LOOPS_NEED_FIXUP
);
8807 const pass_data pass_data_fixup_cfg
=
8809 GIMPLE_PASS
, /* type */
8810 "fixup_cfg", /* name */
8811 OPTGROUP_NONE
, /* optinfo_flags */
8812 TV_NONE
, /* tv_id */
8813 PROP_cfg
, /* properties_required */
8814 0, /* properties_provided */
8815 0, /* properties_destroyed */
8816 0, /* todo_flags_start */
8817 0, /* todo_flags_finish */
8820 class pass_fixup_cfg
: public gimple_opt_pass
8823 pass_fixup_cfg (gcc::context
*ctxt
)
8824 : gimple_opt_pass (pass_data_fixup_cfg
, ctxt
)
8827 /* opt_pass methods: */
8828 opt_pass
* clone () { return new pass_fixup_cfg (m_ctxt
); }
8829 virtual unsigned int execute (function
*) { return execute_fixup_cfg (); }
8831 }; // class pass_fixup_cfg
8836 make_pass_fixup_cfg (gcc::context
*ctxt
)
8838 return new pass_fixup_cfg (ctxt
);
8841 /* Garbage collection support for edge_def. */
8843 extern void gt_ggc_mx (tree
&);
8844 extern void gt_ggc_mx (gimple
&);
8845 extern void gt_ggc_mx (rtx
&);
8846 extern void gt_ggc_mx (basic_block
&);
8849 gt_ggc_mx (rtx_insn
*& x
)
8852 gt_ggc_mx_rtx_def ((void *) x
);
8856 gt_ggc_mx (edge_def
*e
)
8858 tree block
= LOCATION_BLOCK (e
->goto_locus
);
8860 gt_ggc_mx (e
->dest
);
8861 if (current_ir_type () == IR_GIMPLE
)
8862 gt_ggc_mx (e
->insns
.g
);
8864 gt_ggc_mx (e
->insns
.r
);
8868 /* PCH support for edge_def. */
8870 extern void gt_pch_nx (tree
&);
8871 extern void gt_pch_nx (gimple
&);
8872 extern void gt_pch_nx (rtx
&);
8873 extern void gt_pch_nx (basic_block
&);
8876 gt_pch_nx (rtx_insn
*& x
)
8879 gt_pch_nx_rtx_def ((void *) x
);
8883 gt_pch_nx (edge_def
*e
)
8885 tree block
= LOCATION_BLOCK (e
->goto_locus
);
8887 gt_pch_nx (e
->dest
);
8888 if (current_ir_type () == IR_GIMPLE
)
8889 gt_pch_nx (e
->insns
.g
);
8891 gt_pch_nx (e
->insns
.r
);
8896 gt_pch_nx (edge_def
*e
, gt_pointer_operator op
, void *cookie
)
8898 tree block
= LOCATION_BLOCK (e
->goto_locus
);
8899 op (&(e
->src
), cookie
);
8900 op (&(e
->dest
), cookie
);
8901 if (current_ir_type () == IR_GIMPLE
)
8902 op (&(e
->insns
.g
), cookie
);
8904 op (&(e
->insns
.r
), cookie
);
8905 op (&(block
), cookie
);