1 /* Control flow functions for trees.
2 Copyright (C) 2001-2013 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
24 #include "hash-table.h"
27 #include "trans-mem.h"
28 #include "stor-layout.h"
29 #include "print-tree.h"
31 #include "basic-block.h"
34 #include "gimple-pretty-print.h"
35 #include "pointer-set.h"
36 #include "tree-ssa-alias.h"
37 #include "internal-fn.h"
38 #include "gimple-fold.h"
40 #include "gimple-expr.h"
43 #include "gimple-iterator.h"
44 #include "gimplify-me.h"
45 #include "gimple-walk.h"
46 #include "gimple-ssa.h"
49 #include "tree-phinodes.h"
50 #include "ssa-iterators.h"
51 #include "stringpool.h"
52 #include "tree-ssanames.h"
53 #include "tree-ssa-loop-manip.h"
54 #include "tree-ssa-loop-niter.h"
55 #include "tree-into-ssa.h"
59 #include "tree-dump.h"
60 #include "tree-pass.h"
61 #include "diagnostic-core.h"
64 #include "tree-ssa-propagate.h"
65 #include "value-prof.h"
66 #include "tree-inline.h"
68 #include "tree-ssa-live.h"
70 #include "tree-cfgcleanup.h"
72 #include "wide-int-print.h"
74 /* This file contains functions for building the Control Flow Graph (CFG)
75 for a function tree. */
77 /* Local declarations. */
79 /* Initial capacity for the basic block array. */
80 static const int initial_cfg_capacity
= 20;
82 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
83 which use a particular edge. The CASE_LABEL_EXPRs are chained together
84 via their CASE_CHAIN field, which we clear after we're done with the
85 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
87 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
88 update the case vector in response to edge redirections.
90 Right now this table is set up and torn down at key points in the
91 compilation process. It would be nice if we could make the table
92 more persistent. The key is getting notification of changes to
93 the CFG (particularly edge removal, creation and redirection). */
95 static struct pointer_map_t
*edge_to_cases
;
97 /* If we record edge_to_cases, this bitmap will hold indexes
98 of basic blocks that end in a GIMPLE_SWITCH which we touched
99 due to edge manipulations. */
101 static bitmap touched_switch_bbs
;
103 /* CFG statistics. */
106 long num_merged_labels
;
109 static struct cfg_stats_d cfg_stats
;
111 /* Nonzero if we found a computed goto while building basic blocks. */
112 static bool found_computed_goto
;
114 /* Hash table to store last discriminator assigned for each locus. */
115 struct locus_discrim_map
121 /* Hashtable helpers. */
123 struct locus_discrim_hasher
: typed_free_remove
<locus_discrim_map
>
125 typedef locus_discrim_map value_type
;
126 typedef locus_discrim_map compare_type
;
127 static inline hashval_t
hash (const value_type
*);
128 static inline bool equal (const value_type
*, const compare_type
*);
131 /* Trivial hash function for a location_t. ITEM is a pointer to
132 a hash table entry that maps a location_t to a discriminator. */
135 locus_discrim_hasher::hash (const value_type
*item
)
137 return LOCATION_LINE (item
->locus
);
140 /* Equality function for the locus-to-discriminator map. A and B
141 point to the two hash table entries to compare. */
144 locus_discrim_hasher::equal (const value_type
*a
, const compare_type
*b
)
146 return LOCATION_LINE (a
->locus
) == LOCATION_LINE (b
->locus
);
149 static hash_table
<locus_discrim_hasher
> discriminator_per_locus
;
151 /* Basic blocks and flowgraphs. */
152 static void make_blocks (gimple_seq
);
153 static void factor_computed_gotos (void);
156 static void make_edges (void);
157 static void assign_discriminators (void);
158 static void make_cond_expr_edges (basic_block
);
159 static void make_gimple_switch_edges (basic_block
);
160 static void make_goto_expr_edges (basic_block
);
161 static void make_gimple_asm_edges (basic_block
);
162 static edge
gimple_redirect_edge_and_branch (edge
, basic_block
);
163 static edge
gimple_try_redirect_by_replacing_jump (edge
, basic_block
);
164 static unsigned int split_critical_edges (void);
166 /* Various helpers. */
167 static inline bool stmt_starts_bb_p (gimple
, gimple
);
168 static int gimple_verify_flow_info (void);
169 static void gimple_make_forwarder_block (edge
);
170 static gimple
first_non_label_stmt (basic_block
);
171 static bool verify_gimple_transaction (gimple
);
173 /* Flowgraph optimization and cleanup. */
174 static void gimple_merge_blocks (basic_block
, basic_block
);
175 static bool gimple_can_merge_blocks_p (basic_block
, basic_block
);
176 static void remove_bb (basic_block
);
177 static edge
find_taken_edge_computed_goto (basic_block
, tree
);
178 static edge
find_taken_edge_cond_expr (basic_block
, tree
);
179 static edge
find_taken_edge_switch_expr (basic_block
, tree
);
180 static tree
find_case_label_for_value (gimple
, tree
);
183 init_empty_tree_cfg_for_function (struct function
*fn
)
185 /* Initialize the basic block array. */
187 profile_status_for_function (fn
) = PROFILE_ABSENT
;
188 n_basic_blocks_for_fn (fn
) = NUM_FIXED_BLOCKS
;
189 last_basic_block_for_function (fn
) = NUM_FIXED_BLOCKS
;
190 vec_alloc (basic_block_info_for_function (fn
), initial_cfg_capacity
);
191 vec_safe_grow_cleared (basic_block_info_for_function (fn
),
192 initial_cfg_capacity
);
194 /* Build a mapping of labels to their associated blocks. */
195 vec_alloc (label_to_block_map_for_function (fn
), initial_cfg_capacity
);
196 vec_safe_grow_cleared (label_to_block_map_for_function (fn
),
197 initial_cfg_capacity
);
199 SET_BASIC_BLOCK_FOR_FUNCTION (fn
, ENTRY_BLOCK
,
200 ENTRY_BLOCK_PTR_FOR_FN (fn
));
201 SET_BASIC_BLOCK_FOR_FUNCTION (fn
, EXIT_BLOCK
,
202 EXIT_BLOCK_PTR_FOR_FN (fn
));
204 ENTRY_BLOCK_PTR_FOR_FN (fn
)->next_bb
205 = EXIT_BLOCK_PTR_FOR_FN (fn
);
206 EXIT_BLOCK_PTR_FOR_FN (fn
)->prev_bb
207 = ENTRY_BLOCK_PTR_FOR_FN (fn
);
211 init_empty_tree_cfg (void)
213 init_empty_tree_cfg_for_function (cfun
);
216 /*---------------------------------------------------------------------------
218 ---------------------------------------------------------------------------*/
220 /* Entry point to the CFG builder for trees. SEQ is the sequence of
221 statements to be added to the flowgraph. */
224 build_gimple_cfg (gimple_seq seq
)
226 /* Register specific gimple functions. */
227 gimple_register_cfg_hooks ();
229 memset ((void *) &cfg_stats
, 0, sizeof (cfg_stats
));
231 init_empty_tree_cfg ();
233 found_computed_goto
= 0;
236 /* Computed gotos are hell to deal with, especially if there are
237 lots of them with a large number of destinations. So we factor
238 them to a common computed goto location before we build the
239 edge list. After we convert back to normal form, we will un-factor
240 the computed gotos since factoring introduces an unwanted jump. */
241 if (found_computed_goto
)
242 factor_computed_gotos ();
244 /* Make sure there is always at least one block, even if it's empty. */
245 if (n_basic_blocks_for_fn (cfun
) == NUM_FIXED_BLOCKS
)
246 create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
248 /* Adjust the size of the array. */
249 if (basic_block_info
->length () < (size_t) n_basic_blocks_for_fn (cfun
))
250 vec_safe_grow_cleared (basic_block_info
, n_basic_blocks_for_fn (cfun
));
252 /* To speed up statement iterator walks, we first purge dead labels. */
253 cleanup_dead_labels ();
255 /* Group case nodes to reduce the number of edges.
256 We do this after cleaning up dead labels because otherwise we miss
257 a lot of obvious case merging opportunities. */
258 group_case_labels ();
260 /* Create the edges of the flowgraph. */
261 discriminator_per_locus
.create (13);
263 assign_discriminators ();
264 cleanup_dead_labels ();
265 discriminator_per_locus
.dispose ();
269 /* Search for ANNOTATE call with annot_expr_ivdep_kind; if found, remove
270 it and set loop->safelen to INT_MAX. We assume that the annotation
271 comes immediately before the condition. */
274 replace_loop_annotate ()
278 gimple_stmt_iterator gsi
;
281 FOR_EACH_LOOP (loop
, 0)
283 gsi
= gsi_last_bb (loop
->header
);
284 stmt
= gsi_stmt (gsi
);
285 if (stmt
&& gimple_code (stmt
) == GIMPLE_COND
)
287 gsi_prev_nondebug (&gsi
);
290 stmt
= gsi_stmt (gsi
);
291 if (gimple_code (stmt
) != GIMPLE_CALL
)
293 if (!gimple_call_internal_p (stmt
)
294 || gimple_call_internal_fn (stmt
) != IFN_ANNOTATE
)
296 if ((annot_expr_kind
) tree_to_shwi (gimple_call_arg (stmt
, 1))
297 != annot_expr_ivdep_kind
)
299 stmt
= gimple_build_assign (gimple_call_lhs (stmt
),
300 gimple_call_arg (stmt
, 0));
301 gsi_replace (&gsi
, stmt
, true);
302 loop
->safelen
= INT_MAX
;
306 /* Remove IFN_ANNOTATE. Safeguard for the case loop->latch == NULL. */
309 gsi
= gsi_last_bb (bb
);
310 stmt
= gsi_stmt (gsi
);
311 if (stmt
&& gimple_code (stmt
) == GIMPLE_COND
)
312 gsi_prev_nondebug (&gsi
);
315 stmt
= gsi_stmt (gsi
);
316 if (gimple_code (stmt
) != GIMPLE_CALL
)
318 if (!gimple_call_internal_p (stmt
)
319 || gimple_call_internal_fn (stmt
) != IFN_ANNOTATE
)
321 if ((annot_expr_kind
) tree_to_shwi (gimple_call_arg (stmt
, 1))
322 != annot_expr_ivdep_kind
)
324 warning_at (gimple_location (stmt
), 0, "ignoring %<GCC ivdep%> "
326 stmt
= gimple_build_assign (gimple_call_lhs (stmt
),
327 gimple_call_arg (stmt
, 0));
328 gsi_replace (&gsi
, stmt
, true);
334 execute_build_cfg (void)
336 gimple_seq body
= gimple_body (current_function_decl
);
338 build_gimple_cfg (body
);
339 gimple_set_body (current_function_decl
, NULL
);
340 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
342 fprintf (dump_file
, "Scope blocks:\n");
343 dump_scope_blocks (dump_file
, dump_flags
);
346 loop_optimizer_init (AVOID_CFG_MODIFICATIONS
);
347 replace_loop_annotate ();
353 const pass_data pass_data_build_cfg
=
355 GIMPLE_PASS
, /* type */
357 OPTGROUP_NONE
, /* optinfo_flags */
358 false, /* has_gate */
359 true, /* has_execute */
360 TV_TREE_CFG
, /* tv_id */
361 PROP_gimple_leh
, /* properties_required */
362 ( PROP_cfg
| PROP_loops
), /* properties_provided */
363 0, /* properties_destroyed */
364 0, /* todo_flags_start */
365 TODO_verify_stmts
, /* todo_flags_finish */
368 class pass_build_cfg
: public gimple_opt_pass
371 pass_build_cfg (gcc::context
*ctxt
)
372 : gimple_opt_pass (pass_data_build_cfg
, ctxt
)
375 /* opt_pass methods: */
376 unsigned int execute () { return execute_build_cfg (); }
378 }; // class pass_build_cfg
383 make_pass_build_cfg (gcc::context
*ctxt
)
385 return new pass_build_cfg (ctxt
);
389 /* Return true if T is a computed goto. */
392 computed_goto_p (gimple t
)
394 return (gimple_code (t
) == GIMPLE_GOTO
395 && TREE_CODE (gimple_goto_dest (t
)) != LABEL_DECL
);
398 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
399 the other edge points to a bb with just __builtin_unreachable ().
400 I.e. return true for C->M edge in:
408 __builtin_unreachable ();
412 assert_unreachable_fallthru_edge_p (edge e
)
414 basic_block pred_bb
= e
->src
;
415 gimple last
= last_stmt (pred_bb
);
416 if (last
&& gimple_code (last
) == GIMPLE_COND
)
418 basic_block other_bb
= EDGE_SUCC (pred_bb
, 0)->dest
;
419 if (other_bb
== e
->dest
)
420 other_bb
= EDGE_SUCC (pred_bb
, 1)->dest
;
421 if (EDGE_COUNT (other_bb
->succs
) == 0)
423 gimple_stmt_iterator gsi
= gsi_after_labels (other_bb
);
428 stmt
= gsi_stmt (gsi
);
429 if (is_gimple_debug (stmt
))
431 gsi_next_nondebug (&gsi
);
434 stmt
= gsi_stmt (gsi
);
436 return gimple_call_builtin_p (stmt
, BUILT_IN_UNREACHABLE
);
443 /* Search the CFG for any computed gotos. If found, factor them to a
444 common computed goto site. Also record the location of that site so
445 that we can un-factor the gotos after we have converted back to
449 factor_computed_gotos (void)
452 tree factored_label_decl
= NULL
;
454 gimple factored_computed_goto_label
= NULL
;
455 gimple factored_computed_goto
= NULL
;
457 /* We know there are one or more computed gotos in this function.
458 Examine the last statement in each basic block to see if the block
459 ends with a computed goto. */
463 gimple_stmt_iterator gsi
= gsi_last_bb (bb
);
469 last
= gsi_stmt (gsi
);
471 /* Ignore the computed goto we create when we factor the original
473 if (last
== factored_computed_goto
)
476 /* If the last statement is a computed goto, factor it. */
477 if (computed_goto_p (last
))
481 /* The first time we find a computed goto we need to create
482 the factored goto block and the variable each original
483 computed goto will use for their goto destination. */
484 if (!factored_computed_goto
)
486 basic_block new_bb
= create_empty_bb (bb
);
487 gimple_stmt_iterator new_gsi
= gsi_start_bb (new_bb
);
489 /* Create the destination of the factored goto. Each original
490 computed goto will put its desired destination into this
491 variable and jump to the label we create immediately
493 var
= create_tmp_var (ptr_type_node
, "gotovar");
495 /* Build a label for the new block which will contain the
496 factored computed goto. */
497 factored_label_decl
= create_artificial_label (UNKNOWN_LOCATION
);
498 factored_computed_goto_label
499 = gimple_build_label (factored_label_decl
);
500 gsi_insert_after (&new_gsi
, factored_computed_goto_label
,
503 /* Build our new computed goto. */
504 factored_computed_goto
= gimple_build_goto (var
);
505 gsi_insert_after (&new_gsi
, factored_computed_goto
, GSI_NEW_STMT
);
508 /* Copy the original computed goto's destination into VAR. */
509 assignment
= gimple_build_assign (var
, gimple_goto_dest (last
));
510 gsi_insert_before (&gsi
, assignment
, GSI_SAME_STMT
);
512 /* And re-vector the computed goto to the new destination. */
513 gimple_goto_set_dest (last
, factored_label_decl
);
519 /* Build a flowgraph for the sequence of stmts SEQ. */
522 make_blocks (gimple_seq seq
)
524 gimple_stmt_iterator i
= gsi_start (seq
);
526 bool start_new_block
= true;
527 bool first_stmt_of_seq
= true;
528 basic_block bb
= ENTRY_BLOCK_PTR_FOR_FN (cfun
);
530 while (!gsi_end_p (i
))
537 /* If the statement starts a new basic block or if we have determined
538 in a previous pass that we need to create a new block for STMT, do
540 if (start_new_block
|| stmt_starts_bb_p (stmt
, prev_stmt
))
542 if (!first_stmt_of_seq
)
543 gsi_split_seq_before (&i
, &seq
);
544 bb
= create_basic_block (seq
, NULL
, bb
);
545 start_new_block
= false;
548 /* Now add STMT to BB and create the subgraphs for special statement
550 gimple_set_bb (stmt
, bb
);
552 if (computed_goto_p (stmt
))
553 found_computed_goto
= true;
555 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
557 if (stmt_ends_bb_p (stmt
))
559 /* If the stmt can make abnormal goto use a new temporary
560 for the assignment to the LHS. This makes sure the old value
561 of the LHS is available on the abnormal edge. Otherwise
562 we will end up with overlapping life-ranges for abnormal
564 if (gimple_has_lhs (stmt
)
565 && stmt_can_make_abnormal_goto (stmt
)
566 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt
))))
568 tree lhs
= gimple_get_lhs (stmt
);
569 tree tmp
= create_tmp_var (TREE_TYPE (lhs
), NULL
);
570 gimple s
= gimple_build_assign (lhs
, tmp
);
571 gimple_set_location (s
, gimple_location (stmt
));
572 gimple_set_block (s
, gimple_block (stmt
));
573 gimple_set_lhs (stmt
, tmp
);
574 if (TREE_CODE (TREE_TYPE (tmp
)) == COMPLEX_TYPE
575 || TREE_CODE (TREE_TYPE (tmp
)) == VECTOR_TYPE
)
576 DECL_GIMPLE_REG_P (tmp
) = 1;
577 gsi_insert_after (&i
, s
, GSI_SAME_STMT
);
579 start_new_block
= true;
583 first_stmt_of_seq
= false;
588 /* Create and return a new empty basic block after bb AFTER. */
591 create_bb (void *h
, void *e
, basic_block after
)
597 /* Create and initialize a new basic block. Since alloc_block uses
598 GC allocation that clears memory to allocate a basic block, we do
599 not have to clear the newly allocated basic block here. */
602 bb
->index
= last_basic_block
;
604 set_bb_seq (bb
, h
? (gimple_seq
) h
: NULL
);
606 /* Add the new block to the linked list of blocks. */
607 link_block (bb
, after
);
609 /* Grow the basic block array if needed. */
610 if ((size_t) last_basic_block
== basic_block_info
->length ())
612 size_t new_size
= last_basic_block
+ (last_basic_block
+ 3) / 4;
613 vec_safe_grow_cleared (basic_block_info
, new_size
);
616 /* Add the newly created block to the array. */
617 SET_BASIC_BLOCK (last_basic_block
, bb
);
619 n_basic_blocks_for_fn (cfun
)++;
626 /*---------------------------------------------------------------------------
628 ---------------------------------------------------------------------------*/
630 /* Fold COND_EXPR_COND of each COND_EXPR. */
633 fold_cond_expr_cond (void)
639 gimple stmt
= last_stmt (bb
);
641 if (stmt
&& gimple_code (stmt
) == GIMPLE_COND
)
643 location_t loc
= gimple_location (stmt
);
647 fold_defer_overflow_warnings ();
648 cond
= fold_binary_loc (loc
, gimple_cond_code (stmt
), boolean_type_node
,
649 gimple_cond_lhs (stmt
), gimple_cond_rhs (stmt
));
652 zerop
= integer_zerop (cond
);
653 onep
= integer_onep (cond
);
656 zerop
= onep
= false;
658 fold_undefer_overflow_warnings (zerop
|| onep
,
660 WARN_STRICT_OVERFLOW_CONDITIONAL
);
662 gimple_cond_make_false (stmt
);
664 gimple_cond_make_true (stmt
);
669 /* Join all the blocks in the flowgraph. */
675 struct omp_region
*cur_region
= NULL
;
677 /* Create an edge from entry to the first block with executable
679 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun
), BASIC_BLOCK (NUM_FIXED_BLOCKS
),
682 /* Traverse the basic block array placing edges. */
685 gimple last
= last_stmt (bb
);
690 enum gimple_code code
= gimple_code (last
);
694 make_goto_expr_edges (bb
);
698 make_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
), 0);
702 make_cond_expr_edges (bb
);
706 make_gimple_switch_edges (bb
);
710 make_eh_edges (last
);
713 case GIMPLE_EH_DISPATCH
:
714 fallthru
= make_eh_dispatch_edges (last
);
718 /* If this function receives a nonlocal goto, then we need to
719 make edges from this call site to all the nonlocal goto
721 if (stmt_can_make_abnormal_goto (last
))
722 make_abnormal_goto_edges (bb
, true);
724 /* If this statement has reachable exception handlers, then
725 create abnormal edges to them. */
726 make_eh_edges (last
);
728 /* BUILTIN_RETURN is really a return statement. */
729 if (gimple_call_builtin_p (last
, BUILT_IN_RETURN
))
730 make_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
), 0), fallthru
=
732 /* Some calls are known not to return. */
734 fallthru
= !(gimple_call_flags (last
) & ECF_NORETURN
);
738 /* A GIMPLE_ASSIGN may throw internally and thus be considered
740 if (is_ctrl_altering_stmt (last
))
741 make_eh_edges (last
);
746 make_gimple_asm_edges (bb
);
751 fallthru
= make_gimple_omp_edges (bb
, &cur_region
);
754 case GIMPLE_TRANSACTION
:
756 tree abort_label
= gimple_transaction_label (last
);
758 make_edge (bb
, label_to_block (abort_label
), EDGE_TM_ABORT
);
764 gcc_assert (!stmt_ends_bb_p (last
));
772 make_edge (bb
, bb
->next_bb
, EDGE_FALLTHRU
);
777 /* Fold COND_EXPR_COND of each COND_EXPR. */
778 fold_cond_expr_cond ();
781 /* Find the next available discriminator value for LOCUS. The
782 discriminator distinguishes among several basic blocks that
783 share a common locus, allowing for more accurate sample-based
787 next_discriminator_for_locus (location_t locus
)
789 struct locus_discrim_map item
;
790 struct locus_discrim_map
**slot
;
793 item
.discriminator
= 0;
794 slot
= discriminator_per_locus
.find_slot_with_hash (
795 &item
, LOCATION_LINE (locus
), INSERT
);
797 if (*slot
== HTAB_EMPTY_ENTRY
)
799 *slot
= XNEW (struct locus_discrim_map
);
801 (*slot
)->locus
= locus
;
802 (*slot
)->discriminator
= 0;
804 (*slot
)->discriminator
++;
805 return (*slot
)->discriminator
;
808 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
811 same_line_p (location_t locus1
, location_t locus2
)
813 expanded_location from
, to
;
815 if (locus1
== locus2
)
818 from
= expand_location (locus1
);
819 to
= expand_location (locus2
);
821 if (from
.line
!= to
.line
)
823 if (from
.file
== to
.file
)
825 return (from
.file
!= NULL
827 && filename_cmp (from
.file
, to
.file
) == 0);
830 /* Assign discriminators to each basic block. */
833 assign_discriminators (void)
841 gimple last
= last_stmt (bb
);
842 location_t locus
= last
? gimple_location (last
) : UNKNOWN_LOCATION
;
844 if (locus
== UNKNOWN_LOCATION
)
847 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
849 gimple first
= first_non_label_stmt (e
->dest
);
850 gimple last
= last_stmt (e
->dest
);
851 if ((first
&& same_line_p (locus
, gimple_location (first
)))
852 || (last
&& same_line_p (locus
, gimple_location (last
))))
854 if (e
->dest
->discriminator
!= 0 && bb
->discriminator
== 0)
855 bb
->discriminator
= next_discriminator_for_locus (locus
);
857 e
->dest
->discriminator
= next_discriminator_for_locus (locus
);
863 /* Create the edges for a GIMPLE_COND starting at block BB. */
866 make_cond_expr_edges (basic_block bb
)
868 gimple entry
= last_stmt (bb
);
869 gimple then_stmt
, else_stmt
;
870 basic_block then_bb
, else_bb
;
871 tree then_label
, else_label
;
875 gcc_assert (gimple_code (entry
) == GIMPLE_COND
);
877 /* Entry basic blocks for each component. */
878 then_label
= gimple_cond_true_label (entry
);
879 else_label
= gimple_cond_false_label (entry
);
880 then_bb
= label_to_block (then_label
);
881 else_bb
= label_to_block (else_label
);
882 then_stmt
= first_stmt (then_bb
);
883 else_stmt
= first_stmt (else_bb
);
885 e
= make_edge (bb
, then_bb
, EDGE_TRUE_VALUE
);
886 e
->goto_locus
= gimple_location (then_stmt
);
887 e
= make_edge (bb
, else_bb
, EDGE_FALSE_VALUE
);
889 e
->goto_locus
= gimple_location (else_stmt
);
891 /* We do not need the labels anymore. */
892 gimple_cond_set_true_label (entry
, NULL_TREE
);
893 gimple_cond_set_false_label (entry
, NULL_TREE
);
897 /* Called for each element in the hash table (P) as we delete the
898 edge to cases hash table.
900 Clear all the TREE_CHAINs to prevent problems with copying of
901 SWITCH_EXPRs and structure sharing rules, then free the hash table
905 edge_to_cases_cleanup (const void *key ATTRIBUTE_UNUSED
, void **value
,
906 void *data ATTRIBUTE_UNUSED
)
910 for (t
= (tree
) *value
; t
; t
= next
)
912 next
= CASE_CHAIN (t
);
913 CASE_CHAIN (t
) = NULL
;
920 /* Start recording information mapping edges to case labels. */
923 start_recording_case_labels (void)
925 gcc_assert (edge_to_cases
== NULL
);
926 edge_to_cases
= pointer_map_create ();
927 touched_switch_bbs
= BITMAP_ALLOC (NULL
);
930 /* Return nonzero if we are recording information for case labels. */
933 recording_case_labels_p (void)
935 return (edge_to_cases
!= NULL
);
938 /* Stop recording information mapping edges to case labels and
939 remove any information we have recorded. */
941 end_recording_case_labels (void)
945 pointer_map_traverse (edge_to_cases
, edge_to_cases_cleanup
, NULL
);
946 pointer_map_destroy (edge_to_cases
);
947 edge_to_cases
= NULL
;
948 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs
, 0, i
, bi
)
950 basic_block bb
= BASIC_BLOCK (i
);
953 gimple stmt
= last_stmt (bb
);
954 if (stmt
&& gimple_code (stmt
) == GIMPLE_SWITCH
)
955 group_case_labels_stmt (stmt
);
958 BITMAP_FREE (touched_switch_bbs
);
961 /* If we are inside a {start,end}_recording_cases block, then return
962 a chain of CASE_LABEL_EXPRs from T which reference E.
964 Otherwise return NULL. */
967 get_cases_for_edge (edge e
, gimple t
)
972 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
973 chains available. Return NULL so the caller can detect this case. */
974 if (!recording_case_labels_p ())
977 slot
= pointer_map_contains (edge_to_cases
, e
);
981 /* If we did not find E in the hash table, then this must be the first
982 time we have been queried for information about E & T. Add all the
983 elements from T to the hash table then perform the query again. */
985 n
= gimple_switch_num_labels (t
);
986 for (i
= 0; i
< n
; i
++)
988 tree elt
= gimple_switch_label (t
, i
);
989 tree lab
= CASE_LABEL (elt
);
990 basic_block label_bb
= label_to_block (lab
);
991 edge this_edge
= find_edge (e
->src
, label_bb
);
993 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
995 slot
= pointer_map_insert (edge_to_cases
, this_edge
);
996 CASE_CHAIN (elt
) = (tree
) *slot
;
1000 return (tree
) *pointer_map_contains (edge_to_cases
, e
);
1003 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
1006 make_gimple_switch_edges (basic_block bb
)
1008 gimple entry
= last_stmt (bb
);
1011 n
= gimple_switch_num_labels (entry
);
1013 for (i
= 0; i
< n
; ++i
)
1015 tree lab
= CASE_LABEL (gimple_switch_label (entry
, i
));
1016 basic_block label_bb
= label_to_block (lab
);
1017 make_edge (bb
, label_bb
, 0);
1022 /* Return the basic block holding label DEST. */
1025 label_to_block_fn (struct function
*ifun
, tree dest
)
1027 int uid
= LABEL_DECL_UID (dest
);
1029 /* We would die hard when faced by an undefined label. Emit a label to
1030 the very first basic block. This will hopefully make even the dataflow
1031 and undefined variable warnings quite right. */
1032 if (seen_error () && uid
< 0)
1034 gimple_stmt_iterator gsi
= gsi_start_bb (BASIC_BLOCK (NUM_FIXED_BLOCKS
));
1037 stmt
= gimple_build_label (dest
);
1038 gsi_insert_before (&gsi
, stmt
, GSI_NEW_STMT
);
1039 uid
= LABEL_DECL_UID (dest
);
1041 if (vec_safe_length (ifun
->cfg
->x_label_to_block_map
) <= (unsigned int) uid
)
1043 return (*ifun
->cfg
->x_label_to_block_map
)[uid
];
1046 /* Create edges for an abnormal goto statement at block BB. If FOR_CALL
1047 is true, the source statement is a CALL_EXPR instead of a GOTO_EXPR. */
1050 make_abnormal_goto_edges (basic_block bb
, bool for_call
)
1052 basic_block target_bb
;
1053 gimple_stmt_iterator gsi
;
1055 FOR_EACH_BB (target_bb
)
1057 for (gsi
= gsi_start_bb (target_bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1059 gimple label_stmt
= gsi_stmt (gsi
);
1062 if (gimple_code (label_stmt
) != GIMPLE_LABEL
)
1065 target
= gimple_label_label (label_stmt
);
1067 /* Make an edge to every label block that has been marked as a
1068 potential target for a computed goto or a non-local goto. */
1069 if ((FORCED_LABEL (target
) && !for_call
)
1070 || (DECL_NONLOCAL (target
) && for_call
))
1072 make_edge (bb
, target_bb
, EDGE_ABNORMAL
);
1076 if (!gsi_end_p (gsi
)
1077 && is_gimple_debug (gsi_stmt (gsi
)))
1078 gsi_next_nondebug (&gsi
);
1079 if (!gsi_end_p (gsi
))
1081 /* Make an edge to every setjmp-like call. */
1082 gimple call_stmt
= gsi_stmt (gsi
);
1083 if (is_gimple_call (call_stmt
)
1084 && (gimple_call_flags (call_stmt
) & ECF_RETURNS_TWICE
))
1085 make_edge (bb
, target_bb
, EDGE_ABNORMAL
);
1090 /* Create edges for a goto statement at block BB. */
1093 make_goto_expr_edges (basic_block bb
)
1095 gimple_stmt_iterator last
= gsi_last_bb (bb
);
1096 gimple goto_t
= gsi_stmt (last
);
1098 /* A simple GOTO creates normal edges. */
1099 if (simple_goto_p (goto_t
))
1101 tree dest
= gimple_goto_dest (goto_t
);
1102 basic_block label_bb
= label_to_block (dest
);
1103 edge e
= make_edge (bb
, label_bb
, EDGE_FALLTHRU
);
1104 e
->goto_locus
= gimple_location (goto_t
);
1105 gsi_remove (&last
, true);
1109 /* A computed GOTO creates abnormal edges. */
1110 make_abnormal_goto_edges (bb
, false);
1113 /* Create edges for an asm statement with labels at block BB. */
1116 make_gimple_asm_edges (basic_block bb
)
1118 gimple stmt
= last_stmt (bb
);
1119 int i
, n
= gimple_asm_nlabels (stmt
);
1121 for (i
= 0; i
< n
; ++i
)
1123 tree label
= TREE_VALUE (gimple_asm_label_op (stmt
, i
));
1124 basic_block label_bb
= label_to_block (label
);
1125 make_edge (bb
, label_bb
, 0);
1129 /*---------------------------------------------------------------------------
1131 ---------------------------------------------------------------------------*/
1133 /* Cleanup useless labels in basic blocks. This is something we wish
1134 to do early because it allows us to group case labels before creating
1135 the edges for the CFG, and it speeds up block statement iterators in
1136 all passes later on.
1137 We rerun this pass after CFG is created, to get rid of the labels that
1138 are no longer referenced. After then we do not run it any more, since
1139 (almost) no new labels should be created. */
1141 /* A map from basic block index to the leading label of that block. */
1142 static struct label_record
1147 /* True if the label is referenced from somewhere. */
1151 /* Given LABEL return the first label in the same basic block. */
1154 main_block_label (tree label
)
1156 basic_block bb
= label_to_block (label
);
1157 tree main_label
= label_for_bb
[bb
->index
].label
;
1159 /* label_to_block possibly inserted undefined label into the chain. */
1162 label_for_bb
[bb
->index
].label
= label
;
1166 label_for_bb
[bb
->index
].used
= true;
1170 /* Clean up redundant labels within the exception tree. */
1173 cleanup_dead_labels_eh (void)
1180 if (cfun
->eh
== NULL
)
1183 for (i
= 1; vec_safe_iterate (cfun
->eh
->lp_array
, i
, &lp
); ++i
)
1184 if (lp
&& lp
->post_landing_pad
)
1186 lab
= main_block_label (lp
->post_landing_pad
);
1187 if (lab
!= lp
->post_landing_pad
)
1189 EH_LANDING_PAD_NR (lp
->post_landing_pad
) = 0;
1190 EH_LANDING_PAD_NR (lab
) = lp
->index
;
1194 FOR_ALL_EH_REGION (r
)
1198 case ERT_MUST_NOT_THROW
:
1204 for (c
= r
->u
.eh_try
.first_catch
; c
; c
= c
->next_catch
)
1208 c
->label
= main_block_label (lab
);
1213 case ERT_ALLOWED_EXCEPTIONS
:
1214 lab
= r
->u
.allowed
.label
;
1216 r
->u
.allowed
.label
= main_block_label (lab
);
1222 /* Cleanup redundant labels. This is a three-step process:
1223 1) Find the leading label for each block.
1224 2) Redirect all references to labels to the leading labels.
1225 3) Cleanup all useless labels. */
1228 cleanup_dead_labels (void)
1231 label_for_bb
= XCNEWVEC (struct label_record
, last_basic_block
);
1233 /* Find a suitable label for each block. We use the first user-defined
1234 label if there is one, or otherwise just the first label we see. */
1237 gimple_stmt_iterator i
;
1239 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
1242 gimple stmt
= gsi_stmt (i
);
1244 if (gimple_code (stmt
) != GIMPLE_LABEL
)
1247 label
= gimple_label_label (stmt
);
1249 /* If we have not yet seen a label for the current block,
1250 remember this one and see if there are more labels. */
1251 if (!label_for_bb
[bb
->index
].label
)
1253 label_for_bb
[bb
->index
].label
= label
;
1257 /* If we did see a label for the current block already, but it
1258 is an artificially created label, replace it if the current
1259 label is a user defined label. */
1260 if (!DECL_ARTIFICIAL (label
)
1261 && DECL_ARTIFICIAL (label_for_bb
[bb
->index
].label
))
1263 label_for_bb
[bb
->index
].label
= label
;
1269 /* Now redirect all jumps/branches to the selected label.
1270 First do so for each block ending in a control statement. */
1273 gimple stmt
= last_stmt (bb
);
1274 tree label
, new_label
;
1279 switch (gimple_code (stmt
))
1282 label
= gimple_cond_true_label (stmt
);
1285 new_label
= main_block_label (label
);
1286 if (new_label
!= label
)
1287 gimple_cond_set_true_label (stmt
, new_label
);
1290 label
= gimple_cond_false_label (stmt
);
1293 new_label
= main_block_label (label
);
1294 if (new_label
!= label
)
1295 gimple_cond_set_false_label (stmt
, new_label
);
1301 size_t i
, n
= gimple_switch_num_labels (stmt
);
1303 /* Replace all destination labels. */
1304 for (i
= 0; i
< n
; ++i
)
1306 tree case_label
= gimple_switch_label (stmt
, i
);
1307 label
= CASE_LABEL (case_label
);
1308 new_label
= main_block_label (label
);
1309 if (new_label
!= label
)
1310 CASE_LABEL (case_label
) = new_label
;
1317 int i
, n
= gimple_asm_nlabels (stmt
);
1319 for (i
= 0; i
< n
; ++i
)
1321 tree cons
= gimple_asm_label_op (stmt
, i
);
1322 tree label
= main_block_label (TREE_VALUE (cons
));
1323 TREE_VALUE (cons
) = label
;
1328 /* We have to handle gotos until they're removed, and we don't
1329 remove them until after we've created the CFG edges. */
1331 if (!computed_goto_p (stmt
))
1333 label
= gimple_goto_dest (stmt
);
1334 new_label
= main_block_label (label
);
1335 if (new_label
!= label
)
1336 gimple_goto_set_dest (stmt
, new_label
);
1340 case GIMPLE_TRANSACTION
:
1342 tree label
= gimple_transaction_label (stmt
);
1345 tree new_label
= main_block_label (label
);
1346 if (new_label
!= label
)
1347 gimple_transaction_set_label (stmt
, new_label
);
1357 /* Do the same for the exception region tree labels. */
1358 cleanup_dead_labels_eh ();
1360 /* Finally, purge dead labels. All user-defined labels and labels that
1361 can be the target of non-local gotos and labels which have their
1362 address taken are preserved. */
1365 gimple_stmt_iterator i
;
1366 tree label_for_this_bb
= label_for_bb
[bb
->index
].label
;
1368 if (!label_for_this_bb
)
1371 /* If the main label of the block is unused, we may still remove it. */
1372 if (!label_for_bb
[bb
->index
].used
)
1373 label_for_this_bb
= NULL
;
1375 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); )
1378 gimple stmt
= gsi_stmt (i
);
1380 if (gimple_code (stmt
) != GIMPLE_LABEL
)
1383 label
= gimple_label_label (stmt
);
1385 if (label
== label_for_this_bb
1386 || !DECL_ARTIFICIAL (label
)
1387 || DECL_NONLOCAL (label
)
1388 || FORCED_LABEL (label
))
1391 gsi_remove (&i
, true);
1395 free (label_for_bb
);
1398 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1399 the ones jumping to the same label.
1400 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1403 group_case_labels_stmt (gimple stmt
)
1405 int old_size
= gimple_switch_num_labels (stmt
);
1406 int i
, j
, new_size
= old_size
;
1407 basic_block default_bb
= NULL
;
1409 default_bb
= label_to_block (CASE_LABEL (gimple_switch_default_label (stmt
)));
1411 /* Look for possible opportunities to merge cases. */
1413 while (i
< old_size
)
1415 tree base_case
, base_high
;
1416 basic_block base_bb
;
1418 base_case
= gimple_switch_label (stmt
, i
);
1420 gcc_assert (base_case
);
1421 base_bb
= label_to_block (CASE_LABEL (base_case
));
1423 /* Discard cases that have the same destination as the
1425 if (base_bb
== default_bb
)
1427 gimple_switch_set_label (stmt
, i
, NULL_TREE
);
1433 base_high
= CASE_HIGH (base_case
)
1434 ? CASE_HIGH (base_case
)
1435 : CASE_LOW (base_case
);
1438 /* Try to merge case labels. Break out when we reach the end
1439 of the label vector or when we cannot merge the next case
1440 label with the current one. */
1441 while (i
< old_size
)
1443 tree merge_case
= gimple_switch_label (stmt
, i
);
1444 basic_block merge_bb
= label_to_block (CASE_LABEL (merge_case
));
1445 wide_int bhp1
= wi::add (base_high
, 1);
1447 /* Merge the cases if they jump to the same place,
1448 and their ranges are consecutive. */
1449 if (merge_bb
== base_bb
1450 && wi::eq_p (CASE_LOW (merge_case
), bhp1
))
1452 base_high
= CASE_HIGH (merge_case
) ?
1453 CASE_HIGH (merge_case
) : CASE_LOW (merge_case
);
1454 CASE_HIGH (base_case
) = base_high
;
1455 gimple_switch_set_label (stmt
, i
, NULL_TREE
);
1464 /* Compress the case labels in the label vector, and adjust the
1465 length of the vector. */
1466 for (i
= 0, j
= 0; i
< new_size
; i
++)
1468 while (! gimple_switch_label (stmt
, j
))
1470 gimple_switch_set_label (stmt
, i
,
1471 gimple_switch_label (stmt
, j
++));
1474 gcc_assert (new_size
<= old_size
);
1475 gimple_switch_set_num_labels (stmt
, new_size
);
1478 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1479 and scan the sorted vector of cases. Combine the ones jumping to the
1483 group_case_labels (void)
1489 gimple stmt
= last_stmt (bb
);
1490 if (stmt
&& gimple_code (stmt
) == GIMPLE_SWITCH
)
1491 group_case_labels_stmt (stmt
);
1495 /* Checks whether we can merge block B into block A. */
1498 gimple_can_merge_blocks_p (basic_block a
, basic_block b
)
1501 gimple_stmt_iterator gsi
;
1503 if (!single_succ_p (a
))
1506 if (single_succ_edge (a
)->flags
& EDGE_COMPLEX
)
1509 if (single_succ (a
) != b
)
1512 if (!single_pred_p (b
))
1515 if (b
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
1518 /* If A ends by a statement causing exceptions or something similar, we
1519 cannot merge the blocks. */
1520 stmt
= last_stmt (a
);
1521 if (stmt
&& stmt_ends_bb_p (stmt
))
1524 /* Do not allow a block with only a non-local label to be merged. */
1526 && gimple_code (stmt
) == GIMPLE_LABEL
1527 && DECL_NONLOCAL (gimple_label_label (stmt
)))
1530 /* Examine the labels at the beginning of B. */
1531 for (gsi
= gsi_start_bb (b
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1534 stmt
= gsi_stmt (gsi
);
1535 if (gimple_code (stmt
) != GIMPLE_LABEL
)
1537 lab
= gimple_label_label (stmt
);
1539 /* Do not remove user forced labels or for -O0 any user labels. */
1540 if (!DECL_ARTIFICIAL (lab
) && (!optimize
|| FORCED_LABEL (lab
)))
1544 /* Protect the loop latches. */
1545 if (current_loops
&& b
->loop_father
->latch
== b
)
1548 /* It must be possible to eliminate all phi nodes in B. If ssa form
1549 is not up-to-date and a name-mapping is registered, we cannot eliminate
1550 any phis. Symbols marked for renaming are never a problem though. */
1551 for (gsi
= gsi_start_phis (b
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1553 gimple phi
= gsi_stmt (gsi
);
1554 /* Technically only new names matter. */
1555 if (name_registered_for_update_p (PHI_RESULT (phi
)))
1559 /* When not optimizing, don't merge if we'd lose goto_locus. */
1561 && single_succ_edge (a
)->goto_locus
!= UNKNOWN_LOCATION
)
1563 location_t goto_locus
= single_succ_edge (a
)->goto_locus
;
1564 gimple_stmt_iterator prev
, next
;
1565 prev
= gsi_last_nondebug_bb (a
);
1566 next
= gsi_after_labels (b
);
1567 if (!gsi_end_p (next
) && is_gimple_debug (gsi_stmt (next
)))
1568 gsi_next_nondebug (&next
);
1569 if ((gsi_end_p (prev
)
1570 || gimple_location (gsi_stmt (prev
)) != goto_locus
)
1571 && (gsi_end_p (next
)
1572 || gimple_location (gsi_stmt (next
)) != goto_locus
))
1579 /* Replaces all uses of NAME by VAL. */
1582 replace_uses_by (tree name
, tree val
)
1584 imm_use_iterator imm_iter
;
1589 FOR_EACH_IMM_USE_STMT (stmt
, imm_iter
, name
)
1591 FOR_EACH_IMM_USE_ON_STMT (use
, imm_iter
)
1593 replace_exp (use
, val
);
1595 if (gimple_code (stmt
) == GIMPLE_PHI
)
1597 e
= gimple_phi_arg_edge (stmt
, PHI_ARG_INDEX_FROM_USE (use
));
1598 if (e
->flags
& EDGE_ABNORMAL
)
1600 /* This can only occur for virtual operands, since
1601 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1602 would prevent replacement. */
1603 gcc_checking_assert (virtual_operand_p (name
));
1604 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val
) = 1;
1609 if (gimple_code (stmt
) != GIMPLE_PHI
)
1611 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
1612 gimple orig_stmt
= stmt
;
1615 /* Mark the block if we changed the last stmt in it. */
1616 if (cfgcleanup_altered_bbs
1617 && stmt_ends_bb_p (stmt
))
1618 bitmap_set_bit (cfgcleanup_altered_bbs
, gimple_bb (stmt
)->index
);
1620 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
1621 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
1622 only change sth from non-invariant to invariant, and only
1623 when propagating constants. */
1624 if (is_gimple_min_invariant (val
))
1625 for (i
= 0; i
< gimple_num_ops (stmt
); i
++)
1627 tree op
= gimple_op (stmt
, i
);
1628 /* Operands may be empty here. For example, the labels
1629 of a GIMPLE_COND are nulled out following the creation
1630 of the corresponding CFG edges. */
1631 if (op
&& TREE_CODE (op
) == ADDR_EXPR
)
1632 recompute_tree_invariant_for_addr_expr (op
);
1635 if (fold_stmt (&gsi
))
1636 stmt
= gsi_stmt (gsi
);
1638 if (maybe_clean_or_replace_eh_stmt (orig_stmt
, stmt
))
1639 gimple_purge_dead_eh_edges (gimple_bb (stmt
));
1645 gcc_checking_assert (has_zero_uses (name
));
1647 /* Also update the trees stored in loop structures. */
1652 FOR_EACH_LOOP (loop
, 0)
1654 substitute_in_loop_info (loop
, name
, val
);
1659 /* Merge block B into block A. */
1662 gimple_merge_blocks (basic_block a
, basic_block b
)
1664 gimple_stmt_iterator last
, gsi
, psi
;
1667 fprintf (dump_file
, "Merging blocks %d and %d\n", a
->index
, b
->index
);
1669 /* Remove all single-valued PHI nodes from block B of the form
1670 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1671 gsi
= gsi_last_bb (a
);
1672 for (psi
= gsi_start_phis (b
); !gsi_end_p (psi
); )
1674 gimple phi
= gsi_stmt (psi
);
1675 tree def
= gimple_phi_result (phi
), use
= gimple_phi_arg_def (phi
, 0);
1677 bool may_replace_uses
= (virtual_operand_p (def
)
1678 || may_propagate_copy (def
, use
));
1680 /* In case we maintain loop closed ssa form, do not propagate arguments
1681 of loop exit phi nodes. */
1683 && loops_state_satisfies_p (LOOP_CLOSED_SSA
)
1684 && !virtual_operand_p (def
)
1685 && TREE_CODE (use
) == SSA_NAME
1686 && a
->loop_father
!= b
->loop_father
)
1687 may_replace_uses
= false;
1689 if (!may_replace_uses
)
1691 gcc_assert (!virtual_operand_p (def
));
1693 /* Note that just emitting the copies is fine -- there is no problem
1694 with ordering of phi nodes. This is because A is the single
1695 predecessor of B, therefore results of the phi nodes cannot
1696 appear as arguments of the phi nodes. */
1697 copy
= gimple_build_assign (def
, use
);
1698 gsi_insert_after (&gsi
, copy
, GSI_NEW_STMT
);
1699 remove_phi_node (&psi
, false);
1703 /* If we deal with a PHI for virtual operands, we can simply
1704 propagate these without fussing with folding or updating
1706 if (virtual_operand_p (def
))
1708 imm_use_iterator iter
;
1709 use_operand_p use_p
;
1712 FOR_EACH_IMM_USE_STMT (stmt
, iter
, def
)
1713 FOR_EACH_IMM_USE_ON_STMT (use_p
, iter
)
1714 SET_USE (use_p
, use
);
1716 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def
))
1717 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use
) = 1;
1720 replace_uses_by (def
, use
);
1722 remove_phi_node (&psi
, true);
1726 /* Ensure that B follows A. */
1727 move_block_after (b
, a
);
1729 gcc_assert (single_succ_edge (a
)->flags
& EDGE_FALLTHRU
);
1730 gcc_assert (!last_stmt (a
) || !stmt_ends_bb_p (last_stmt (a
)));
1732 /* Remove labels from B and set gimple_bb to A for other statements. */
1733 for (gsi
= gsi_start_bb (b
); !gsi_end_p (gsi
);)
1735 gimple stmt
= gsi_stmt (gsi
);
1736 if (gimple_code (stmt
) == GIMPLE_LABEL
)
1738 tree label
= gimple_label_label (stmt
);
1741 gsi_remove (&gsi
, false);
1743 /* Now that we can thread computed gotos, we might have
1744 a situation where we have a forced label in block B
1745 However, the label at the start of block B might still be
1746 used in other ways (think about the runtime checking for
1747 Fortran assigned gotos). So we can not just delete the
1748 label. Instead we move the label to the start of block A. */
1749 if (FORCED_LABEL (label
))
1751 gimple_stmt_iterator dest_gsi
= gsi_start_bb (a
);
1752 gsi_insert_before (&dest_gsi
, stmt
, GSI_NEW_STMT
);
1754 /* Other user labels keep around in a form of a debug stmt. */
1755 else if (!DECL_ARTIFICIAL (label
) && MAY_HAVE_DEBUG_STMTS
)
1757 gimple dbg
= gimple_build_debug_bind (label
,
1760 gimple_debug_bind_reset_value (dbg
);
1761 gsi_insert_before (&gsi
, dbg
, GSI_SAME_STMT
);
1764 lp_nr
= EH_LANDING_PAD_NR (label
);
1767 eh_landing_pad lp
= get_eh_landing_pad_from_number (lp_nr
);
1768 lp
->post_landing_pad
= NULL
;
1773 gimple_set_bb (stmt
, a
);
1778 /* Merge the sequences. */
1779 last
= gsi_last_bb (a
);
1780 gsi_insert_seq_after (&last
, bb_seq (b
), GSI_NEW_STMT
);
1781 set_bb_seq (b
, NULL
);
1783 if (cfgcleanup_altered_bbs
)
1784 bitmap_set_bit (cfgcleanup_altered_bbs
, a
->index
);
1788 /* Return the one of two successors of BB that is not reachable by a
1789 complex edge, if there is one. Else, return BB. We use
1790 this in optimizations that use post-dominators for their heuristics,
1791 to catch the cases in C++ where function calls are involved. */
1794 single_noncomplex_succ (basic_block bb
)
1797 if (EDGE_COUNT (bb
->succs
) != 2)
1800 e0
= EDGE_SUCC (bb
, 0);
1801 e1
= EDGE_SUCC (bb
, 1);
1802 if (e0
->flags
& EDGE_COMPLEX
)
1804 if (e1
->flags
& EDGE_COMPLEX
)
1810 /* T is CALL_EXPR. Set current_function_calls_* flags. */
1813 notice_special_calls (gimple call
)
1815 int flags
= gimple_call_flags (call
);
1817 if (flags
& ECF_MAY_BE_ALLOCA
)
1818 cfun
->calls_alloca
= true;
1819 if (flags
& ECF_RETURNS_TWICE
)
1820 cfun
->calls_setjmp
= true;
1824 /* Clear flags set by notice_special_calls. Used by dead code removal
1825 to update the flags. */
1828 clear_special_calls (void)
1830 cfun
->calls_alloca
= false;
1831 cfun
->calls_setjmp
= false;
1834 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
1837 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb
)
1839 /* Since this block is no longer reachable, we can just delete all
1840 of its PHI nodes. */
1841 remove_phi_nodes (bb
);
1843 /* Remove edges to BB's successors. */
1844 while (EDGE_COUNT (bb
->succs
) > 0)
1845 remove_edge (EDGE_SUCC (bb
, 0));
1849 /* Remove statements of basic block BB. */
1852 remove_bb (basic_block bb
)
1854 gimple_stmt_iterator i
;
1858 fprintf (dump_file
, "Removing basic block %d\n", bb
->index
);
1859 if (dump_flags
& TDF_DETAILS
)
1861 dump_bb (dump_file
, bb
, 0, dump_flags
);
1862 fprintf (dump_file
, "\n");
1868 struct loop
*loop
= bb
->loop_father
;
1870 /* If a loop gets removed, clean up the information associated
1872 if (loop
->latch
== bb
1873 || loop
->header
== bb
)
1874 free_numbers_of_iterations_estimates_loop (loop
);
1877 /* Remove all the instructions in the block. */
1878 if (bb_seq (bb
) != NULL
)
1880 /* Walk backwards so as to get a chance to substitute all
1881 released DEFs into debug stmts. See
1882 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
1884 for (i
= gsi_last_bb (bb
); !gsi_end_p (i
);)
1886 gimple stmt
= gsi_stmt (i
);
1887 if (gimple_code (stmt
) == GIMPLE_LABEL
1888 && (FORCED_LABEL (gimple_label_label (stmt
))
1889 || DECL_NONLOCAL (gimple_label_label (stmt
))))
1892 gimple_stmt_iterator new_gsi
;
1894 /* A non-reachable non-local label may still be referenced.
1895 But it no longer needs to carry the extra semantics of
1897 if (DECL_NONLOCAL (gimple_label_label (stmt
)))
1899 DECL_NONLOCAL (gimple_label_label (stmt
)) = 0;
1900 FORCED_LABEL (gimple_label_label (stmt
)) = 1;
1903 new_bb
= bb
->prev_bb
;
1904 new_gsi
= gsi_start_bb (new_bb
);
1905 gsi_remove (&i
, false);
1906 gsi_insert_before (&new_gsi
, stmt
, GSI_NEW_STMT
);
1910 /* Release SSA definitions if we are in SSA. Note that we
1911 may be called when not in SSA. For example,
1912 final_cleanup calls this function via
1913 cleanup_tree_cfg. */
1914 if (gimple_in_ssa_p (cfun
))
1915 release_defs (stmt
);
1917 gsi_remove (&i
, true);
1921 i
= gsi_last_bb (bb
);
1927 remove_phi_nodes_and_edges_for_unreachable_block (bb
);
1928 bb
->il
.gimple
.seq
= NULL
;
1929 bb
->il
.gimple
.phi_nodes
= NULL
;
1933 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
1934 predicate VAL, return the edge that will be taken out of the block.
1935 If VAL does not match a unique edge, NULL is returned. */
1938 find_taken_edge (basic_block bb
, tree val
)
1942 stmt
= last_stmt (bb
);
1945 gcc_assert (is_ctrl_stmt (stmt
));
1950 if (!is_gimple_min_invariant (val
))
1953 if (gimple_code (stmt
) == GIMPLE_COND
)
1954 return find_taken_edge_cond_expr (bb
, val
);
1956 if (gimple_code (stmt
) == GIMPLE_SWITCH
)
1957 return find_taken_edge_switch_expr (bb
, val
);
1959 if (computed_goto_p (stmt
))
1961 /* Only optimize if the argument is a label, if the argument is
1962 not a label then we can not construct a proper CFG.
1964 It may be the case that we only need to allow the LABEL_REF to
1965 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
1966 appear inside a LABEL_EXPR just to be safe. */
1967 if ((TREE_CODE (val
) == ADDR_EXPR
|| TREE_CODE (val
) == LABEL_EXPR
)
1968 && TREE_CODE (TREE_OPERAND (val
, 0)) == LABEL_DECL
)
1969 return find_taken_edge_computed_goto (bb
, TREE_OPERAND (val
, 0));
1976 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
1977 statement, determine which of the outgoing edges will be taken out of the
1978 block. Return NULL if either edge may be taken. */
1981 find_taken_edge_computed_goto (basic_block bb
, tree val
)
1986 dest
= label_to_block (val
);
1989 e
= find_edge (bb
, dest
);
1990 gcc_assert (e
!= NULL
);
1996 /* Given a constant value VAL and the entry block BB to a COND_EXPR
1997 statement, determine which of the two edges will be taken out of the
1998 block. Return NULL if either edge may be taken. */
2001 find_taken_edge_cond_expr (basic_block bb
, tree val
)
2003 edge true_edge
, false_edge
;
2005 extract_true_false_edges_from_block (bb
, &true_edge
, &false_edge
);
2007 gcc_assert (TREE_CODE (val
) == INTEGER_CST
);
2008 return (integer_zerop (val
) ? false_edge
: true_edge
);
2011 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2012 statement, determine which edge will be taken out of the block. Return
2013 NULL if any edge may be taken. */
2016 find_taken_edge_switch_expr (basic_block bb
, tree val
)
2018 basic_block dest_bb
;
2023 switch_stmt
= last_stmt (bb
);
2024 taken_case
= find_case_label_for_value (switch_stmt
, val
);
2025 dest_bb
= label_to_block (CASE_LABEL (taken_case
));
2027 e
= find_edge (bb
, dest_bb
);
2033 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2034 We can make optimal use here of the fact that the case labels are
2035 sorted: We can do a binary search for a case matching VAL. */
2038 find_case_label_for_value (gimple switch_stmt
, tree val
)
2040 size_t low
, high
, n
= gimple_switch_num_labels (switch_stmt
);
2041 tree default_case
= gimple_switch_default_label (switch_stmt
);
2043 for (low
= 0, high
= n
; high
- low
> 1; )
2045 size_t i
= (high
+ low
) / 2;
2046 tree t
= gimple_switch_label (switch_stmt
, i
);
2049 /* Cache the result of comparing CASE_LOW and val. */
2050 cmp
= tree_int_cst_compare (CASE_LOW (t
), val
);
2057 if (CASE_HIGH (t
) == NULL
)
2059 /* A singe-valued case label. */
2065 /* A case range. We can only handle integer ranges. */
2066 if (cmp
<= 0 && tree_int_cst_compare (CASE_HIGH (t
), val
) >= 0)
2071 return default_case
;
2075 /* Dump a basic block on stderr. */
2078 gimple_debug_bb (basic_block bb
)
2080 dump_bb (stderr
, bb
, 0, TDF_VOPS
|TDF_MEMSYMS
|TDF_BLOCKS
);
2084 /* Dump basic block with index N on stderr. */
2087 gimple_debug_bb_n (int n
)
2089 gimple_debug_bb (BASIC_BLOCK (n
));
2090 return BASIC_BLOCK (n
);
2094 /* Dump the CFG on stderr.
2096 FLAGS are the same used by the tree dumping functions
2097 (see TDF_* in dumpfile.h). */
2100 gimple_debug_cfg (int flags
)
2102 gimple_dump_cfg (stderr
, flags
);
2106 /* Dump the program showing basic block boundaries on the given FILE.
2108 FLAGS are the same used by the tree dumping functions (see TDF_* in
2112 gimple_dump_cfg (FILE *file
, int flags
)
2114 if (flags
& TDF_DETAILS
)
2116 dump_function_header (file
, current_function_decl
, flags
);
2117 fprintf (file
, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2118 n_basic_blocks_for_fn (cfun
), n_edges_for_fn (cfun
),
2121 brief_dump_cfg (file
, flags
| TDF_COMMENT
);
2122 fprintf (file
, "\n");
2125 if (flags
& TDF_STATS
)
2126 dump_cfg_stats (file
);
2128 dump_function_to_file (current_function_decl
, file
, flags
| TDF_BLOCKS
);
2132 /* Dump CFG statistics on FILE. */
2135 dump_cfg_stats (FILE *file
)
2137 static long max_num_merged_labels
= 0;
2138 unsigned long size
, total
= 0;
2141 const char * const fmt_str
= "%-30s%-13s%12s\n";
2142 const char * const fmt_str_1
= "%-30s%13d%11lu%c\n";
2143 const char * const fmt_str_2
= "%-30s%13ld%11lu%c\n";
2144 const char * const fmt_str_3
= "%-43s%11lu%c\n";
2145 const char *funcname
= current_function_name ();
2147 fprintf (file
, "\nCFG Statistics for %s\n\n", funcname
);
2149 fprintf (file
, "---------------------------------------------------------\n");
2150 fprintf (file
, fmt_str
, "", " Number of ", "Memory");
2151 fprintf (file
, fmt_str
, "", " instances ", "used ");
2152 fprintf (file
, "---------------------------------------------------------\n");
2154 size
= n_basic_blocks_for_fn (cfun
) * sizeof (struct basic_block_def
);
2156 fprintf (file
, fmt_str_1
, "Basic blocks", n_basic_blocks_for_fn (cfun
),
2157 SCALE (size
), LABEL (size
));
2161 num_edges
+= EDGE_COUNT (bb
->succs
);
2162 size
= num_edges
* sizeof (struct edge_def
);
2164 fprintf (file
, fmt_str_2
, "Edges", num_edges
, SCALE (size
), LABEL (size
));
2166 fprintf (file
, "---------------------------------------------------------\n");
2167 fprintf (file
, fmt_str_3
, "Total memory used by CFG data", SCALE (total
),
2169 fprintf (file
, "---------------------------------------------------------\n");
2170 fprintf (file
, "\n");
2172 if (cfg_stats
.num_merged_labels
> max_num_merged_labels
)
2173 max_num_merged_labels
= cfg_stats
.num_merged_labels
;
2175 fprintf (file
, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2176 cfg_stats
.num_merged_labels
, max_num_merged_labels
);
2178 fprintf (file
, "\n");
2182 /* Dump CFG statistics on stderr. Keep extern so that it's always
2183 linked in the final executable. */
2186 debug_cfg_stats (void)
2188 dump_cfg_stats (stderr
);
2191 /*---------------------------------------------------------------------------
2192 Miscellaneous helpers
2193 ---------------------------------------------------------------------------*/
2195 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2196 flow. Transfers of control flow associated with EH are excluded. */
2199 call_can_make_abnormal_goto (gimple t
)
2201 /* If the function has no non-local labels, then a call cannot make an
2202 abnormal transfer of control. */
2203 if (!cfun
->has_nonlocal_label
2204 && !cfun
->calls_setjmp
)
2207 /* Likewise if the call has no side effects. */
2208 if (!gimple_has_side_effects (t
))
2211 /* Likewise if the called function is leaf. */
2212 if (gimple_call_flags (t
) & ECF_LEAF
)
2219 /* Return true if T can make an abnormal transfer of control flow.
2220 Transfers of control flow associated with EH are excluded. */
2223 stmt_can_make_abnormal_goto (gimple t
)
2225 if (computed_goto_p (t
))
2227 if (is_gimple_call (t
))
2228 return call_can_make_abnormal_goto (t
);
2233 /* Return true if T represents a stmt that always transfers control. */
2236 is_ctrl_stmt (gimple t
)
2238 switch (gimple_code (t
))
2252 /* Return true if T is a statement that may alter the flow of control
2253 (e.g., a call to a non-returning function). */
2256 is_ctrl_altering_stmt (gimple t
)
2260 switch (gimple_code (t
))
2264 int flags
= gimple_call_flags (t
);
2266 /* A call alters control flow if it can make an abnormal goto. */
2267 if (call_can_make_abnormal_goto (t
))
2270 /* A call also alters control flow if it does not return. */
2271 if (flags
& ECF_NORETURN
)
2274 /* TM ending statements have backedges out of the transaction.
2275 Return true so we split the basic block containing them.
2276 Note that the TM_BUILTIN test is merely an optimization. */
2277 if ((flags
& ECF_TM_BUILTIN
)
2278 && is_tm_ending_fndecl (gimple_call_fndecl (t
)))
2281 /* BUILT_IN_RETURN call is same as return statement. */
2282 if (gimple_call_builtin_p (t
, BUILT_IN_RETURN
))
2287 case GIMPLE_EH_DISPATCH
:
2288 /* EH_DISPATCH branches to the individual catch handlers at
2289 this level of a try or allowed-exceptions region. It can
2290 fallthru to the next statement as well. */
2294 if (gimple_asm_nlabels (t
) > 0)
2299 /* OpenMP directives alter control flow. */
2302 case GIMPLE_TRANSACTION
:
2303 /* A transaction start alters control flow. */
2310 /* If a statement can throw, it alters control flow. */
2311 return stmt_can_throw_internal (t
);
2315 /* Return true if T is a simple local goto. */
2318 simple_goto_p (gimple t
)
2320 return (gimple_code (t
) == GIMPLE_GOTO
2321 && TREE_CODE (gimple_goto_dest (t
)) == LABEL_DECL
);
2325 /* Return true if STMT should start a new basic block. PREV_STMT is
2326 the statement preceding STMT. It is used when STMT is a label or a
2327 case label. Labels should only start a new basic block if their
2328 previous statement wasn't a label. Otherwise, sequence of labels
2329 would generate unnecessary basic blocks that only contain a single
2333 stmt_starts_bb_p (gimple stmt
, gimple prev_stmt
)
2338 /* Labels start a new basic block only if the preceding statement
2339 wasn't a label of the same type. This prevents the creation of
2340 consecutive blocks that have nothing but a single label. */
2341 if (gimple_code (stmt
) == GIMPLE_LABEL
)
2343 /* Nonlocal and computed GOTO targets always start a new block. */
2344 if (DECL_NONLOCAL (gimple_label_label (stmt
))
2345 || FORCED_LABEL (gimple_label_label (stmt
)))
2348 if (prev_stmt
&& gimple_code (prev_stmt
) == GIMPLE_LABEL
)
2350 if (DECL_NONLOCAL (gimple_label_label (prev_stmt
)))
2353 cfg_stats
.num_merged_labels
++;
2359 else if (gimple_code (stmt
) == GIMPLE_CALL
2360 && gimple_call_flags (stmt
) & ECF_RETURNS_TWICE
)
2361 /* setjmp acts similar to a nonlocal GOTO target and thus should
2362 start a new block. */
2369 /* Return true if T should end a basic block. */
2372 stmt_ends_bb_p (gimple t
)
2374 return is_ctrl_stmt (t
) || is_ctrl_altering_stmt (t
);
2377 /* Remove block annotations and other data structures. */
2380 delete_tree_cfg_annotations (void)
2382 vec_free (label_to_block_map
);
2386 /* Return the first statement in basic block BB. */
2389 first_stmt (basic_block bb
)
2391 gimple_stmt_iterator i
= gsi_start_bb (bb
);
2394 while (!gsi_end_p (i
) && is_gimple_debug ((stmt
= gsi_stmt (i
))))
2402 /* Return the first non-label statement in basic block BB. */
2405 first_non_label_stmt (basic_block bb
)
2407 gimple_stmt_iterator i
= gsi_start_bb (bb
);
2408 while (!gsi_end_p (i
) && gimple_code (gsi_stmt (i
)) == GIMPLE_LABEL
)
2410 return !gsi_end_p (i
) ? gsi_stmt (i
) : NULL
;
2413 /* Return the last statement in basic block BB. */
2416 last_stmt (basic_block bb
)
2418 gimple_stmt_iterator i
= gsi_last_bb (bb
);
2421 while (!gsi_end_p (i
) && is_gimple_debug ((stmt
= gsi_stmt (i
))))
2429 /* Return the last statement of an otherwise empty block. Return NULL
2430 if the block is totally empty, or if it contains more than one
2434 last_and_only_stmt (basic_block bb
)
2436 gimple_stmt_iterator i
= gsi_last_nondebug_bb (bb
);
2442 last
= gsi_stmt (i
);
2443 gsi_prev_nondebug (&i
);
2447 /* Empty statements should no longer appear in the instruction stream.
2448 Everything that might have appeared before should be deleted by
2449 remove_useless_stmts, and the optimizers should just gsi_remove
2450 instead of smashing with build_empty_stmt.
2452 Thus the only thing that should appear here in a block containing
2453 one executable statement is a label. */
2454 prev
= gsi_stmt (i
);
2455 if (gimple_code (prev
) == GIMPLE_LABEL
)
2461 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2464 reinstall_phi_args (edge new_edge
, edge old_edge
)
2466 edge_var_map_vector
*v
;
2469 gimple_stmt_iterator phis
;
2471 v
= redirect_edge_var_map_vector (old_edge
);
2475 for (i
= 0, phis
= gsi_start_phis (new_edge
->dest
);
2476 v
->iterate (i
, &vm
) && !gsi_end_p (phis
);
2477 i
++, gsi_next (&phis
))
2479 gimple phi
= gsi_stmt (phis
);
2480 tree result
= redirect_edge_var_map_result (vm
);
2481 tree arg
= redirect_edge_var_map_def (vm
);
2483 gcc_assert (result
== gimple_phi_result (phi
));
2485 add_phi_arg (phi
, arg
, new_edge
, redirect_edge_var_map_location (vm
));
2488 redirect_edge_var_map_clear (old_edge
);
2491 /* Returns the basic block after which the new basic block created
2492 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2493 near its "logical" location. This is of most help to humans looking
2494 at debugging dumps. */
2497 split_edge_bb_loc (edge edge_in
)
2499 basic_block dest
= edge_in
->dest
;
2500 basic_block dest_prev
= dest
->prev_bb
;
2504 edge e
= find_edge (dest_prev
, dest
);
2505 if (e
&& !(e
->flags
& EDGE_COMPLEX
))
2506 return edge_in
->src
;
2511 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2512 Abort on abnormal edges. */
2515 gimple_split_edge (edge edge_in
)
2517 basic_block new_bb
, after_bb
, dest
;
2520 /* Abnormal edges cannot be split. */
2521 gcc_assert (!(edge_in
->flags
& EDGE_ABNORMAL
));
2523 dest
= edge_in
->dest
;
2525 after_bb
= split_edge_bb_loc (edge_in
);
2527 new_bb
= create_empty_bb (after_bb
);
2528 new_bb
->frequency
= EDGE_FREQUENCY (edge_in
);
2529 new_bb
->count
= edge_in
->count
;
2530 new_edge
= make_edge (new_bb
, dest
, EDGE_FALLTHRU
);
2531 new_edge
->probability
= REG_BR_PROB_BASE
;
2532 new_edge
->count
= edge_in
->count
;
2534 e
= redirect_edge_and_branch (edge_in
, new_bb
);
2535 gcc_assert (e
== edge_in
);
2536 reinstall_phi_args (new_edge
, e
);
2542 /* Verify properties of the address expression T with base object BASE. */
2545 verify_address (tree t
, tree base
)
2548 bool old_side_effects
;
2550 bool new_side_effects
;
2552 old_constant
= TREE_CONSTANT (t
);
2553 old_side_effects
= TREE_SIDE_EFFECTS (t
);
2555 recompute_tree_invariant_for_addr_expr (t
);
2556 new_side_effects
= TREE_SIDE_EFFECTS (t
);
2557 new_constant
= TREE_CONSTANT (t
);
2559 if (old_constant
!= new_constant
)
2561 error ("constant not recomputed when ADDR_EXPR changed");
2564 if (old_side_effects
!= new_side_effects
)
2566 error ("side effects not recomputed when ADDR_EXPR changed");
2570 if (!(TREE_CODE (base
) == VAR_DECL
2571 || TREE_CODE (base
) == PARM_DECL
2572 || TREE_CODE (base
) == RESULT_DECL
))
2575 if (DECL_GIMPLE_REG_P (base
))
2577 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2584 /* Callback for walk_tree, check that all elements with address taken are
2585 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2586 inside a PHI node. */
2589 verify_expr (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
2596 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2597 #define CHECK_OP(N, MSG) \
2598 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2599 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2601 switch (TREE_CODE (t
))
2604 if (SSA_NAME_IN_FREE_LIST (t
))
2606 error ("SSA name in freelist but still referenced");
2612 error ("INDIRECT_REF in gimple IL");
2616 x
= TREE_OPERAND (t
, 0);
2617 if (!POINTER_TYPE_P (TREE_TYPE (x
))
2618 || !is_gimple_mem_ref_addr (x
))
2620 error ("invalid first operand of MEM_REF");
2623 if (TREE_CODE (TREE_OPERAND (t
, 1)) != INTEGER_CST
2624 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t
, 1))))
2626 error ("invalid offset operand of MEM_REF");
2627 return TREE_OPERAND (t
, 1);
2629 if (TREE_CODE (x
) == ADDR_EXPR
2630 && (x
= verify_address (x
, TREE_OPERAND (x
, 0))))
2636 x
= fold (ASSERT_EXPR_COND (t
));
2637 if (x
== boolean_false_node
)
2639 error ("ASSERT_EXPR with an always-false condition");
2645 error ("MODIFY_EXPR not expected while having tuples");
2652 gcc_assert (is_gimple_address (t
));
2654 /* Skip any references (they will be checked when we recurse down the
2655 tree) and ensure that any variable used as a prefix is marked
2657 for (x
= TREE_OPERAND (t
, 0);
2658 handled_component_p (x
);
2659 x
= TREE_OPERAND (x
, 0))
2662 if ((tem
= verify_address (t
, x
)))
2665 if (!(TREE_CODE (x
) == VAR_DECL
2666 || TREE_CODE (x
) == PARM_DECL
2667 || TREE_CODE (x
) == RESULT_DECL
))
2670 if (!TREE_ADDRESSABLE (x
))
2672 error ("address taken, but ADDRESSABLE bit not set");
2680 x
= COND_EXPR_COND (t
);
2681 if (!INTEGRAL_TYPE_P (TREE_TYPE (x
)))
2683 error ("non-integral used in condition");
2686 if (!is_gimple_condexpr (x
))
2688 error ("invalid conditional operand");
2693 case NON_LVALUE_EXPR
:
2694 case TRUTH_NOT_EXPR
:
2698 case FIX_TRUNC_EXPR
:
2703 CHECK_OP (0, "invalid operand to unary operator");
2709 if (!is_gimple_reg_type (TREE_TYPE (t
)))
2711 error ("non-scalar BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR");
2715 if (TREE_CODE (t
) == BIT_FIELD_REF
)
2717 tree t0
= TREE_OPERAND (t
, 0);
2718 tree t1
= TREE_OPERAND (t
, 1);
2719 tree t2
= TREE_OPERAND (t
, 2);
2720 if (!tree_fits_uhwi_p (t1
)
2721 || !tree_fits_uhwi_p (t2
))
2723 error ("invalid position or size operand to BIT_FIELD_REF");
2726 if (INTEGRAL_TYPE_P (TREE_TYPE (t
))
2727 && (TYPE_PRECISION (TREE_TYPE (t
))
2728 != tree_to_uhwi (t1
)))
2730 error ("integral result type precision does not match "
2731 "field size of BIT_FIELD_REF");
2734 else if (!INTEGRAL_TYPE_P (TREE_TYPE (t
))
2735 && TYPE_MODE (TREE_TYPE (t
)) != BLKmode
2736 && (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (t
)))
2737 != tree_to_uhwi (t1
)))
2739 error ("mode precision of non-integral result does not "
2740 "match field size of BIT_FIELD_REF");
2743 if (!AGGREGATE_TYPE_P (TREE_TYPE (t0
))
2744 && (tree_to_uhwi (t1
) + tree_to_uhwi (t2
)
2745 > tree_to_uhwi (TYPE_SIZE (TREE_TYPE (t0
)))))
2747 error ("position plus size exceeds size of referenced object in "
2752 t
= TREE_OPERAND (t
, 0);
2757 case ARRAY_RANGE_REF
:
2758 case VIEW_CONVERT_EXPR
:
2759 /* We have a nest of references. Verify that each of the operands
2760 that determine where to reference is either a constant or a variable,
2761 verify that the base is valid, and then show we've already checked
2763 while (handled_component_p (t
))
2765 if (TREE_CODE (t
) == COMPONENT_REF
&& TREE_OPERAND (t
, 2))
2766 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
2767 else if (TREE_CODE (t
) == ARRAY_REF
2768 || TREE_CODE (t
) == ARRAY_RANGE_REF
)
2770 CHECK_OP (1, "invalid array index");
2771 if (TREE_OPERAND (t
, 2))
2772 CHECK_OP (2, "invalid array lower bound");
2773 if (TREE_OPERAND (t
, 3))
2774 CHECK_OP (3, "invalid array stride");
2776 else if (TREE_CODE (t
) == BIT_FIELD_REF
2777 || TREE_CODE (t
) == REALPART_EXPR
2778 || TREE_CODE (t
) == IMAGPART_EXPR
)
2780 error ("non-top-level BIT_FIELD_REF, IMAGPART_EXPR or "
2785 t
= TREE_OPERAND (t
, 0);
2788 if (!is_gimple_min_invariant (t
) && !is_gimple_lvalue (t
))
2790 error ("invalid reference prefix");
2797 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
2798 POINTER_PLUS_EXPR. */
2799 if (POINTER_TYPE_P (TREE_TYPE (t
)))
2801 error ("invalid operand to plus/minus, type is a pointer");
2804 CHECK_OP (0, "invalid operand to binary operator");
2805 CHECK_OP (1, "invalid operand to binary operator");
2808 case POINTER_PLUS_EXPR
:
2809 /* Check to make sure the first operand is a pointer or reference type. */
2810 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t
, 0))))
2812 error ("invalid operand to pointer plus, first operand is not a pointer");
2815 /* Check to make sure the second operand is a ptrofftype. */
2816 if (!ptrofftype_p (TREE_TYPE (TREE_OPERAND (t
, 1))))
2818 error ("invalid operand to pointer plus, second operand is not an "
2819 "integer type of appropriate width");
2829 case UNORDERED_EXPR
:
2838 case TRUNC_DIV_EXPR
:
2840 case FLOOR_DIV_EXPR
:
2841 case ROUND_DIV_EXPR
:
2842 case TRUNC_MOD_EXPR
:
2844 case FLOOR_MOD_EXPR
:
2845 case ROUND_MOD_EXPR
:
2847 case EXACT_DIV_EXPR
:
2857 CHECK_OP (0, "invalid operand to binary operator");
2858 CHECK_OP (1, "invalid operand to binary operator");
2862 if (TREE_CONSTANT (t
) && TREE_CODE (TREE_TYPE (t
)) == VECTOR_TYPE
)
2866 case CASE_LABEL_EXPR
:
2869 error ("invalid CASE_CHAIN");
2883 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
2884 Returns true if there is an error, otherwise false. */
2887 verify_types_in_gimple_min_lval (tree expr
)
2891 if (is_gimple_id (expr
))
2894 if (TREE_CODE (expr
) != TARGET_MEM_REF
2895 && TREE_CODE (expr
) != MEM_REF
)
2897 error ("invalid expression for min lvalue");
2901 /* TARGET_MEM_REFs are strange beasts. */
2902 if (TREE_CODE (expr
) == TARGET_MEM_REF
)
2905 op
= TREE_OPERAND (expr
, 0);
2906 if (!is_gimple_val (op
))
2908 error ("invalid operand in indirect reference");
2909 debug_generic_stmt (op
);
2912 /* Memory references now generally can involve a value conversion. */
2917 /* Verify if EXPR is a valid GIMPLE reference expression. If
2918 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
2919 if there is an error, otherwise false. */
2922 verify_types_in_gimple_reference (tree expr
, bool require_lvalue
)
2924 while (handled_component_p (expr
))
2926 tree op
= TREE_OPERAND (expr
, 0);
2928 if (TREE_CODE (expr
) == ARRAY_REF
2929 || TREE_CODE (expr
) == ARRAY_RANGE_REF
)
2931 if (!is_gimple_val (TREE_OPERAND (expr
, 1))
2932 || (TREE_OPERAND (expr
, 2)
2933 && !is_gimple_val (TREE_OPERAND (expr
, 2)))
2934 || (TREE_OPERAND (expr
, 3)
2935 && !is_gimple_val (TREE_OPERAND (expr
, 3))))
2937 error ("invalid operands to array reference");
2938 debug_generic_stmt (expr
);
2943 /* Verify if the reference array element types are compatible. */
2944 if (TREE_CODE (expr
) == ARRAY_REF
2945 && !useless_type_conversion_p (TREE_TYPE (expr
),
2946 TREE_TYPE (TREE_TYPE (op
))))
2948 error ("type mismatch in array reference");
2949 debug_generic_stmt (TREE_TYPE (expr
));
2950 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op
)));
2953 if (TREE_CODE (expr
) == ARRAY_RANGE_REF
2954 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr
)),
2955 TREE_TYPE (TREE_TYPE (op
))))
2957 error ("type mismatch in array range reference");
2958 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr
)));
2959 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op
)));
2963 if ((TREE_CODE (expr
) == REALPART_EXPR
2964 || TREE_CODE (expr
) == IMAGPART_EXPR
)
2965 && !useless_type_conversion_p (TREE_TYPE (expr
),
2966 TREE_TYPE (TREE_TYPE (op
))))
2968 error ("type mismatch in real/imagpart reference");
2969 debug_generic_stmt (TREE_TYPE (expr
));
2970 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op
)));
2974 if (TREE_CODE (expr
) == COMPONENT_REF
2975 && !useless_type_conversion_p (TREE_TYPE (expr
),
2976 TREE_TYPE (TREE_OPERAND (expr
, 1))))
2978 error ("type mismatch in component reference");
2979 debug_generic_stmt (TREE_TYPE (expr
));
2980 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr
, 1)));
2984 if (TREE_CODE (expr
) == VIEW_CONVERT_EXPR
)
2986 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
2987 that their operand is not an SSA name or an invariant when
2988 requiring an lvalue (this usually means there is a SRA or IPA-SRA
2989 bug). Otherwise there is nothing to verify, gross mismatches at
2990 most invoke undefined behavior. */
2992 && (TREE_CODE (op
) == SSA_NAME
2993 || is_gimple_min_invariant (op
)))
2995 error ("conversion of an SSA_NAME on the left hand side");
2996 debug_generic_stmt (expr
);
2999 else if (TREE_CODE (op
) == SSA_NAME
3000 && TYPE_SIZE (TREE_TYPE (expr
)) != TYPE_SIZE (TREE_TYPE (op
)))
3002 error ("conversion of register to a different size");
3003 debug_generic_stmt (expr
);
3006 else if (!handled_component_p (op
))
3013 if (TREE_CODE (expr
) == MEM_REF
)
3015 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr
, 0)))
3017 error ("invalid address operand in MEM_REF");
3018 debug_generic_stmt (expr
);
3021 if (TREE_CODE (TREE_OPERAND (expr
, 1)) != INTEGER_CST
3022 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr
, 1))))
3024 error ("invalid offset operand in MEM_REF");
3025 debug_generic_stmt (expr
);
3029 else if (TREE_CODE (expr
) == TARGET_MEM_REF
)
3031 if (!TMR_BASE (expr
)
3032 || !is_gimple_mem_ref_addr (TMR_BASE (expr
)))
3034 error ("invalid address operand in TARGET_MEM_REF");
3037 if (!TMR_OFFSET (expr
)
3038 || TREE_CODE (TMR_OFFSET (expr
)) != INTEGER_CST
3039 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr
))))
3041 error ("invalid offset operand in TARGET_MEM_REF");
3042 debug_generic_stmt (expr
);
3047 return ((require_lvalue
|| !is_gimple_min_invariant (expr
))
3048 && verify_types_in_gimple_min_lval (expr
));
3051 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3052 list of pointer-to types that is trivially convertible to DEST. */
3055 one_pointer_to_useless_type_conversion_p (tree dest
, tree src_obj
)
3059 if (!TYPE_POINTER_TO (src_obj
))
3062 for (src
= TYPE_POINTER_TO (src_obj
); src
; src
= TYPE_NEXT_PTR_TO (src
))
3063 if (useless_type_conversion_p (dest
, src
))
3069 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3070 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3073 valid_fixed_convert_types_p (tree type1
, tree type2
)
3075 return (FIXED_POINT_TYPE_P (type1
)
3076 && (INTEGRAL_TYPE_P (type2
)
3077 || SCALAR_FLOAT_TYPE_P (type2
)
3078 || FIXED_POINT_TYPE_P (type2
)));
3081 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3082 is a problem, otherwise false. */
3085 verify_gimple_call (gimple stmt
)
3087 tree fn
= gimple_call_fn (stmt
);
3088 tree fntype
, fndecl
;
3091 if (gimple_call_internal_p (stmt
))
3095 error ("gimple call has two targets");
3096 debug_generic_stmt (fn
);
3104 error ("gimple call has no target");
3109 if (fn
&& !is_gimple_call_addr (fn
))
3111 error ("invalid function in gimple call");
3112 debug_generic_stmt (fn
);
3117 && (!POINTER_TYPE_P (TREE_TYPE (fn
))
3118 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn
))) != FUNCTION_TYPE
3119 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn
))) != METHOD_TYPE
)))
3121 error ("non-function in gimple call");
3125 fndecl
= gimple_call_fndecl (stmt
);
3127 && TREE_CODE (fndecl
) == FUNCTION_DECL
3128 && DECL_LOOPING_CONST_OR_PURE_P (fndecl
)
3129 && !DECL_PURE_P (fndecl
)
3130 && !TREE_READONLY (fndecl
))
3132 error ("invalid pure const state for function");
3136 if (gimple_call_lhs (stmt
)
3137 && (!is_gimple_lvalue (gimple_call_lhs (stmt
))
3138 || verify_types_in_gimple_reference (gimple_call_lhs (stmt
), true)))
3140 error ("invalid LHS in gimple call");
3144 if (gimple_call_lhs (stmt
) && gimple_call_noreturn_p (stmt
))
3146 error ("LHS in noreturn call");
3150 fntype
= gimple_call_fntype (stmt
);
3152 && gimple_call_lhs (stmt
)
3153 && !useless_type_conversion_p (TREE_TYPE (gimple_call_lhs (stmt
)),
3155 /* ??? At least C++ misses conversions at assignments from
3156 void * call results.
3157 ??? Java is completely off. Especially with functions
3158 returning java.lang.Object.
3159 For now simply allow arbitrary pointer type conversions. */
3160 && !(POINTER_TYPE_P (TREE_TYPE (gimple_call_lhs (stmt
)))
3161 && POINTER_TYPE_P (TREE_TYPE (fntype
))))
3163 error ("invalid conversion in gimple call");
3164 debug_generic_stmt (TREE_TYPE (gimple_call_lhs (stmt
)));
3165 debug_generic_stmt (TREE_TYPE (fntype
));
3169 if (gimple_call_chain (stmt
)
3170 && !is_gimple_val (gimple_call_chain (stmt
)))
3172 error ("invalid static chain in gimple call");
3173 debug_generic_stmt (gimple_call_chain (stmt
));
3177 /* If there is a static chain argument, this should not be an indirect
3178 call, and the decl should have DECL_STATIC_CHAIN set. */
3179 if (gimple_call_chain (stmt
))
3181 if (!gimple_call_fndecl (stmt
))
3183 error ("static chain in indirect gimple call");
3186 fn
= TREE_OPERAND (fn
, 0);
3188 if (!DECL_STATIC_CHAIN (fn
))
3190 error ("static chain with function that doesn%'t use one");
3195 /* ??? The C frontend passes unpromoted arguments in case it
3196 didn't see a function declaration before the call. So for now
3197 leave the call arguments mostly unverified. Once we gimplify
3198 unit-at-a-time we have a chance to fix this. */
3200 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
3202 tree arg
= gimple_call_arg (stmt
, i
);
3203 if ((is_gimple_reg_type (TREE_TYPE (arg
))
3204 && !is_gimple_val (arg
))
3205 || (!is_gimple_reg_type (TREE_TYPE (arg
))
3206 && !is_gimple_lvalue (arg
)))
3208 error ("invalid argument to gimple call");
3209 debug_generic_expr (arg
);
3217 /* Verifies the gimple comparison with the result type TYPE and
3218 the operands OP0 and OP1. */
3221 verify_gimple_comparison (tree type
, tree op0
, tree op1
)
3223 tree op0_type
= TREE_TYPE (op0
);
3224 tree op1_type
= TREE_TYPE (op1
);
3226 if (!is_gimple_val (op0
) || !is_gimple_val (op1
))
3228 error ("invalid operands in gimple comparison");
3232 /* For comparisons we do not have the operations type as the
3233 effective type the comparison is carried out in. Instead
3234 we require that either the first operand is trivially
3235 convertible into the second, or the other way around.
3236 Because we special-case pointers to void we allow
3237 comparisons of pointers with the same mode as well. */
3238 if (!useless_type_conversion_p (op0_type
, op1_type
)
3239 && !useless_type_conversion_p (op1_type
, op0_type
)
3240 && (!POINTER_TYPE_P (op0_type
)
3241 || !POINTER_TYPE_P (op1_type
)
3242 || TYPE_MODE (op0_type
) != TYPE_MODE (op1_type
)))
3244 error ("mismatching comparison operand types");
3245 debug_generic_expr (op0_type
);
3246 debug_generic_expr (op1_type
);
3250 /* The resulting type of a comparison may be an effective boolean type. */
3251 if (INTEGRAL_TYPE_P (type
)
3252 && (TREE_CODE (type
) == BOOLEAN_TYPE
3253 || TYPE_PRECISION (type
) == 1))
3255 if (TREE_CODE (op0_type
) == VECTOR_TYPE
3256 || TREE_CODE (op1_type
) == VECTOR_TYPE
)
3258 error ("vector comparison returning a boolean");
3259 debug_generic_expr (op0_type
);
3260 debug_generic_expr (op1_type
);
3264 /* Or an integer vector type with the same size and element count
3265 as the comparison operand types. */
3266 else if (TREE_CODE (type
) == VECTOR_TYPE
3267 && TREE_CODE (TREE_TYPE (type
)) == INTEGER_TYPE
)
3269 if (TREE_CODE (op0_type
) != VECTOR_TYPE
3270 || TREE_CODE (op1_type
) != VECTOR_TYPE
)
3272 error ("non-vector operands in vector comparison");
3273 debug_generic_expr (op0_type
);
3274 debug_generic_expr (op1_type
);
3278 if (TYPE_VECTOR_SUBPARTS (type
) != TYPE_VECTOR_SUBPARTS (op0_type
)
3279 || (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (type
)))
3280 != GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0_type
))))
3281 /* The result of a vector comparison is of signed
3283 || TYPE_UNSIGNED (TREE_TYPE (type
)))
3285 error ("invalid vector comparison resulting type");
3286 debug_generic_expr (type
);
3292 error ("bogus comparison result type");
3293 debug_generic_expr (type
);
3300 /* Verify a gimple assignment statement STMT with an unary rhs.
3301 Returns true if anything is wrong. */
3304 verify_gimple_assign_unary (gimple stmt
)
3306 enum tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
3307 tree lhs
= gimple_assign_lhs (stmt
);
3308 tree lhs_type
= TREE_TYPE (lhs
);
3309 tree rhs1
= gimple_assign_rhs1 (stmt
);
3310 tree rhs1_type
= TREE_TYPE (rhs1
);
3312 if (!is_gimple_reg (lhs
))
3314 error ("non-register as LHS of unary operation");
3318 if (!is_gimple_val (rhs1
))
3320 error ("invalid operand in unary operation");
3324 /* First handle conversions. */
3329 /* Allow conversions from pointer type to integral type only if
3330 there is no sign or zero extension involved.
3331 For targets were the precision of ptrofftype doesn't match that
3332 of pointers we need to allow arbitrary conversions to ptrofftype. */
3333 if ((POINTER_TYPE_P (lhs_type
)
3334 && INTEGRAL_TYPE_P (rhs1_type
))
3335 || (POINTER_TYPE_P (rhs1_type
)
3336 && INTEGRAL_TYPE_P (lhs_type
)
3337 && (TYPE_PRECISION (rhs1_type
) >= TYPE_PRECISION (lhs_type
)
3338 || ptrofftype_p (sizetype
))))
3341 /* Allow conversion from integral to offset type and vice versa. */
3342 if ((TREE_CODE (lhs_type
) == OFFSET_TYPE
3343 && INTEGRAL_TYPE_P (rhs1_type
))
3344 || (INTEGRAL_TYPE_P (lhs_type
)
3345 && TREE_CODE (rhs1_type
) == OFFSET_TYPE
))
3348 /* Otherwise assert we are converting between types of the
3350 if (INTEGRAL_TYPE_P (lhs_type
) != INTEGRAL_TYPE_P (rhs1_type
))
3352 error ("invalid types in nop conversion");
3353 debug_generic_expr (lhs_type
);
3354 debug_generic_expr (rhs1_type
);
3361 case ADDR_SPACE_CONVERT_EXPR
:
3363 if (!POINTER_TYPE_P (rhs1_type
) || !POINTER_TYPE_P (lhs_type
)
3364 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type
))
3365 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type
))))
3367 error ("invalid types in address space conversion");
3368 debug_generic_expr (lhs_type
);
3369 debug_generic_expr (rhs1_type
);
3376 case FIXED_CONVERT_EXPR
:
3378 if (!valid_fixed_convert_types_p (lhs_type
, rhs1_type
)
3379 && !valid_fixed_convert_types_p (rhs1_type
, lhs_type
))
3381 error ("invalid types in fixed-point conversion");
3382 debug_generic_expr (lhs_type
);
3383 debug_generic_expr (rhs1_type
);
3392 if ((!INTEGRAL_TYPE_P (rhs1_type
) || !SCALAR_FLOAT_TYPE_P (lhs_type
))
3393 && (!VECTOR_INTEGER_TYPE_P (rhs1_type
)
3394 || !VECTOR_FLOAT_TYPE_P (lhs_type
)))
3396 error ("invalid types in conversion to floating point");
3397 debug_generic_expr (lhs_type
);
3398 debug_generic_expr (rhs1_type
);
3405 case FIX_TRUNC_EXPR
:
3407 if ((!INTEGRAL_TYPE_P (lhs_type
) || !SCALAR_FLOAT_TYPE_P (rhs1_type
))
3408 && (!VECTOR_INTEGER_TYPE_P (lhs_type
)
3409 || !VECTOR_FLOAT_TYPE_P (rhs1_type
)))
3411 error ("invalid types in conversion to integer");
3412 debug_generic_expr (lhs_type
);
3413 debug_generic_expr (rhs1_type
);
3420 case VEC_UNPACK_HI_EXPR
:
3421 case VEC_UNPACK_LO_EXPR
:
3422 case REDUC_MAX_EXPR
:
3423 case REDUC_MIN_EXPR
:
3424 case REDUC_PLUS_EXPR
:
3425 case VEC_UNPACK_FLOAT_HI_EXPR
:
3426 case VEC_UNPACK_FLOAT_LO_EXPR
:
3434 case NON_LVALUE_EXPR
:
3442 /* For the remaining codes assert there is no conversion involved. */
3443 if (!useless_type_conversion_p (lhs_type
, rhs1_type
))
3445 error ("non-trivial conversion in unary operation");
3446 debug_generic_expr (lhs_type
);
3447 debug_generic_expr (rhs1_type
);
3454 /* Verify a gimple assignment statement STMT with a binary rhs.
3455 Returns true if anything is wrong. */
3458 verify_gimple_assign_binary (gimple stmt
)
3460 enum tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
3461 tree lhs
= gimple_assign_lhs (stmt
);
3462 tree lhs_type
= TREE_TYPE (lhs
);
3463 tree rhs1
= gimple_assign_rhs1 (stmt
);
3464 tree rhs1_type
= TREE_TYPE (rhs1
);
3465 tree rhs2
= gimple_assign_rhs2 (stmt
);
3466 tree rhs2_type
= TREE_TYPE (rhs2
);
3468 if (!is_gimple_reg (lhs
))
3470 error ("non-register as LHS of binary operation");
3474 if (!is_gimple_val (rhs1
)
3475 || !is_gimple_val (rhs2
))
3477 error ("invalid operands in binary operation");
3481 /* First handle operations that involve different types. */
3486 if (TREE_CODE (lhs_type
) != COMPLEX_TYPE
3487 || !(INTEGRAL_TYPE_P (rhs1_type
)
3488 || SCALAR_FLOAT_TYPE_P (rhs1_type
))
3489 || !(INTEGRAL_TYPE_P (rhs2_type
)
3490 || SCALAR_FLOAT_TYPE_P (rhs2_type
)))
3492 error ("type mismatch in complex expression");
3493 debug_generic_expr (lhs_type
);
3494 debug_generic_expr (rhs1_type
);
3495 debug_generic_expr (rhs2_type
);
3507 /* Shifts and rotates are ok on integral types, fixed point
3508 types and integer vector types. */
3509 if ((!INTEGRAL_TYPE_P (rhs1_type
)
3510 && !FIXED_POINT_TYPE_P (rhs1_type
)
3511 && !(TREE_CODE (rhs1_type
) == VECTOR_TYPE
3512 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type
))))
3513 || (!INTEGRAL_TYPE_P (rhs2_type
)
3514 /* Vector shifts of vectors are also ok. */
3515 && !(TREE_CODE (rhs1_type
) == VECTOR_TYPE
3516 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type
))
3517 && TREE_CODE (rhs2_type
) == VECTOR_TYPE
3518 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type
))))
3519 || !useless_type_conversion_p (lhs_type
, rhs1_type
))
3521 error ("type mismatch in shift expression");
3522 debug_generic_expr (lhs_type
);
3523 debug_generic_expr (rhs1_type
);
3524 debug_generic_expr (rhs2_type
);
3531 case VEC_LSHIFT_EXPR
:
3532 case VEC_RSHIFT_EXPR
:
3534 if (TREE_CODE (rhs1_type
) != VECTOR_TYPE
3535 || !(INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type
))
3536 || POINTER_TYPE_P (TREE_TYPE (rhs1_type
))
3537 || FIXED_POINT_TYPE_P (TREE_TYPE (rhs1_type
))
3538 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type
)))
3539 || (!INTEGRAL_TYPE_P (rhs2_type
)
3540 && (TREE_CODE (rhs2_type
) != VECTOR_TYPE
3541 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type
))))
3542 || !useless_type_conversion_p (lhs_type
, rhs1_type
))
3544 error ("type mismatch in vector shift expression");
3545 debug_generic_expr (lhs_type
);
3546 debug_generic_expr (rhs1_type
);
3547 debug_generic_expr (rhs2_type
);
3550 /* For shifting a vector of non-integral components we
3551 only allow shifting by a constant multiple of the element size. */
3552 if (!INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type
))
3553 && (TREE_CODE (rhs2
) != INTEGER_CST
3554 || !div_if_zero_remainder (rhs2
,
3555 TYPE_SIZE (TREE_TYPE (rhs1_type
)))))
3557 error ("non-element sized vector shift of floating point vector");
3564 case WIDEN_LSHIFT_EXPR
:
3566 if (!INTEGRAL_TYPE_P (lhs_type
)
3567 || !INTEGRAL_TYPE_P (rhs1_type
)
3568 || TREE_CODE (rhs2
) != INTEGER_CST
3569 || (2 * TYPE_PRECISION (rhs1_type
) > TYPE_PRECISION (lhs_type
)))
3571 error ("type mismatch in widening vector shift expression");
3572 debug_generic_expr (lhs_type
);
3573 debug_generic_expr (rhs1_type
);
3574 debug_generic_expr (rhs2_type
);
3581 case VEC_WIDEN_LSHIFT_HI_EXPR
:
3582 case VEC_WIDEN_LSHIFT_LO_EXPR
:
3584 if (TREE_CODE (rhs1_type
) != VECTOR_TYPE
3585 || TREE_CODE (lhs_type
) != VECTOR_TYPE
3586 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type
))
3587 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type
))
3588 || TREE_CODE (rhs2
) != INTEGER_CST
3589 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type
))
3590 > TYPE_PRECISION (TREE_TYPE (lhs_type
))))
3592 error ("type mismatch in widening vector shift expression");
3593 debug_generic_expr (lhs_type
);
3594 debug_generic_expr (rhs1_type
);
3595 debug_generic_expr (rhs2_type
);
3605 tree lhs_etype
= lhs_type
;
3606 tree rhs1_etype
= rhs1_type
;
3607 tree rhs2_etype
= rhs2_type
;
3608 if (TREE_CODE (lhs_type
) == VECTOR_TYPE
)
3610 if (TREE_CODE (rhs1_type
) != VECTOR_TYPE
3611 || TREE_CODE (rhs2_type
) != VECTOR_TYPE
)
3613 error ("invalid non-vector operands to vector valued plus");
3616 lhs_etype
= TREE_TYPE (lhs_type
);
3617 rhs1_etype
= TREE_TYPE (rhs1_type
);
3618 rhs2_etype
= TREE_TYPE (rhs2_type
);
3620 if (POINTER_TYPE_P (lhs_etype
)
3621 || POINTER_TYPE_P (rhs1_etype
)
3622 || POINTER_TYPE_P (rhs2_etype
))
3624 error ("invalid (pointer) operands to plus/minus");
3628 /* Continue with generic binary expression handling. */
3632 case POINTER_PLUS_EXPR
:
3634 if (!POINTER_TYPE_P (rhs1_type
)
3635 || !useless_type_conversion_p (lhs_type
, rhs1_type
)
3636 || !ptrofftype_p (rhs2_type
))
3638 error ("type mismatch in pointer plus expression");
3639 debug_generic_stmt (lhs_type
);
3640 debug_generic_stmt (rhs1_type
);
3641 debug_generic_stmt (rhs2_type
);
3648 case TRUTH_ANDIF_EXPR
:
3649 case TRUTH_ORIF_EXPR
:
3650 case TRUTH_AND_EXPR
:
3652 case TRUTH_XOR_EXPR
:
3662 case UNORDERED_EXPR
:
3670 /* Comparisons are also binary, but the result type is not
3671 connected to the operand types. */
3672 return verify_gimple_comparison (lhs_type
, rhs1
, rhs2
);
3674 case WIDEN_MULT_EXPR
:
3675 if (TREE_CODE (lhs_type
) != INTEGER_TYPE
)
3677 return ((2 * TYPE_PRECISION (rhs1_type
) > TYPE_PRECISION (lhs_type
))
3678 || (TYPE_PRECISION (rhs1_type
) != TYPE_PRECISION (rhs2_type
)));
3680 case WIDEN_SUM_EXPR
:
3681 case VEC_WIDEN_MULT_HI_EXPR
:
3682 case VEC_WIDEN_MULT_LO_EXPR
:
3683 case VEC_WIDEN_MULT_EVEN_EXPR
:
3684 case VEC_WIDEN_MULT_ODD_EXPR
:
3685 case VEC_PACK_TRUNC_EXPR
:
3686 case VEC_PACK_SAT_EXPR
:
3687 case VEC_PACK_FIX_TRUNC_EXPR
:
3692 case MULT_HIGHPART_EXPR
:
3693 case TRUNC_DIV_EXPR
:
3695 case FLOOR_DIV_EXPR
:
3696 case ROUND_DIV_EXPR
:
3697 case TRUNC_MOD_EXPR
:
3699 case FLOOR_MOD_EXPR
:
3700 case ROUND_MOD_EXPR
:
3702 case EXACT_DIV_EXPR
:
3708 /* Continue with generic binary expression handling. */
3715 if (!useless_type_conversion_p (lhs_type
, rhs1_type
)
3716 || !useless_type_conversion_p (lhs_type
, rhs2_type
))
3718 error ("type mismatch in binary expression");
3719 debug_generic_stmt (lhs_type
);
3720 debug_generic_stmt (rhs1_type
);
3721 debug_generic_stmt (rhs2_type
);
3728 /* Verify a gimple assignment statement STMT with a ternary rhs.
3729 Returns true if anything is wrong. */
3732 verify_gimple_assign_ternary (gimple stmt
)
3734 enum tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
3735 tree lhs
= gimple_assign_lhs (stmt
);
3736 tree lhs_type
= TREE_TYPE (lhs
);
3737 tree rhs1
= gimple_assign_rhs1 (stmt
);
3738 tree rhs1_type
= TREE_TYPE (rhs1
);
3739 tree rhs2
= gimple_assign_rhs2 (stmt
);
3740 tree rhs2_type
= TREE_TYPE (rhs2
);
3741 tree rhs3
= gimple_assign_rhs3 (stmt
);
3742 tree rhs3_type
= TREE_TYPE (rhs3
);
3744 if (!is_gimple_reg (lhs
))
3746 error ("non-register as LHS of ternary operation");
3750 if (((rhs_code
== VEC_COND_EXPR
|| rhs_code
== COND_EXPR
)
3751 ? !is_gimple_condexpr (rhs1
) : !is_gimple_val (rhs1
))
3752 || !is_gimple_val (rhs2
)
3753 || !is_gimple_val (rhs3
))
3755 error ("invalid operands in ternary operation");
3759 /* First handle operations that involve different types. */
3762 case WIDEN_MULT_PLUS_EXPR
:
3763 case WIDEN_MULT_MINUS_EXPR
:
3764 if ((!INTEGRAL_TYPE_P (rhs1_type
)
3765 && !FIXED_POINT_TYPE_P (rhs1_type
))
3766 || !useless_type_conversion_p (rhs1_type
, rhs2_type
)
3767 || !useless_type_conversion_p (lhs_type
, rhs3_type
)
3768 || 2 * TYPE_PRECISION (rhs1_type
) > TYPE_PRECISION (lhs_type
)
3769 || TYPE_PRECISION (rhs1_type
) != TYPE_PRECISION (rhs2_type
))
3771 error ("type mismatch in widening multiply-accumulate expression");
3772 debug_generic_expr (lhs_type
);
3773 debug_generic_expr (rhs1_type
);
3774 debug_generic_expr (rhs2_type
);
3775 debug_generic_expr (rhs3_type
);
3781 if (!useless_type_conversion_p (lhs_type
, rhs1_type
)
3782 || !useless_type_conversion_p (lhs_type
, rhs2_type
)
3783 || !useless_type_conversion_p (lhs_type
, rhs3_type
))
3785 error ("type mismatch in fused multiply-add expression");
3786 debug_generic_expr (lhs_type
);
3787 debug_generic_expr (rhs1_type
);
3788 debug_generic_expr (rhs2_type
);
3789 debug_generic_expr (rhs3_type
);
3796 if (!useless_type_conversion_p (lhs_type
, rhs2_type
)
3797 || !useless_type_conversion_p (lhs_type
, rhs3_type
))
3799 error ("type mismatch in conditional expression");
3800 debug_generic_expr (lhs_type
);
3801 debug_generic_expr (rhs2_type
);
3802 debug_generic_expr (rhs3_type
);
3808 if (!useless_type_conversion_p (lhs_type
, rhs1_type
)
3809 || !useless_type_conversion_p (lhs_type
, rhs2_type
))
3811 error ("type mismatch in vector permute expression");
3812 debug_generic_expr (lhs_type
);
3813 debug_generic_expr (rhs1_type
);
3814 debug_generic_expr (rhs2_type
);
3815 debug_generic_expr (rhs3_type
);
3819 if (TREE_CODE (rhs1_type
) != VECTOR_TYPE
3820 || TREE_CODE (rhs2_type
) != VECTOR_TYPE
3821 || TREE_CODE (rhs3_type
) != VECTOR_TYPE
)
3823 error ("vector types expected in vector permute expression");
3824 debug_generic_expr (lhs_type
);
3825 debug_generic_expr (rhs1_type
);
3826 debug_generic_expr (rhs2_type
);
3827 debug_generic_expr (rhs3_type
);
3831 if (TYPE_VECTOR_SUBPARTS (rhs1_type
) != TYPE_VECTOR_SUBPARTS (rhs2_type
)
3832 || TYPE_VECTOR_SUBPARTS (rhs2_type
)
3833 != TYPE_VECTOR_SUBPARTS (rhs3_type
)
3834 || TYPE_VECTOR_SUBPARTS (rhs3_type
)
3835 != TYPE_VECTOR_SUBPARTS (lhs_type
))
3837 error ("vectors with different element number found "
3838 "in vector permute expression");
3839 debug_generic_expr (lhs_type
);
3840 debug_generic_expr (rhs1_type
);
3841 debug_generic_expr (rhs2_type
);
3842 debug_generic_expr (rhs3_type
);
3846 if (TREE_CODE (TREE_TYPE (rhs3_type
)) != INTEGER_TYPE
3847 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs3_type
)))
3848 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type
))))
3850 error ("invalid mask type in vector permute expression");
3851 debug_generic_expr (lhs_type
);
3852 debug_generic_expr (rhs1_type
);
3853 debug_generic_expr (rhs2_type
);
3854 debug_generic_expr (rhs3_type
);
3861 case REALIGN_LOAD_EXPR
:
3871 /* Verify a gimple assignment statement STMT with a single rhs.
3872 Returns true if anything is wrong. */
3875 verify_gimple_assign_single (gimple stmt
)
3877 enum tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
3878 tree lhs
= gimple_assign_lhs (stmt
);
3879 tree lhs_type
= TREE_TYPE (lhs
);
3880 tree rhs1
= gimple_assign_rhs1 (stmt
);
3881 tree rhs1_type
= TREE_TYPE (rhs1
);
3884 if (!useless_type_conversion_p (lhs_type
, rhs1_type
))
3886 error ("non-trivial conversion at assignment");
3887 debug_generic_expr (lhs_type
);
3888 debug_generic_expr (rhs1_type
);
3892 if (gimple_clobber_p (stmt
)
3893 && !(DECL_P (lhs
) || TREE_CODE (lhs
) == MEM_REF
))
3895 error ("non-decl/MEM_REF LHS in clobber statement");
3896 debug_generic_expr (lhs
);
3900 if (handled_component_p (lhs
))
3901 res
|= verify_types_in_gimple_reference (lhs
, true);
3903 /* Special codes we cannot handle via their class. */
3908 tree op
= TREE_OPERAND (rhs1
, 0);
3909 if (!is_gimple_addressable (op
))
3911 error ("invalid operand in unary expression");
3915 /* Technically there is no longer a need for matching types, but
3916 gimple hygiene asks for this check. In LTO we can end up
3917 combining incompatible units and thus end up with addresses
3918 of globals that change their type to a common one. */
3920 && !types_compatible_p (TREE_TYPE (op
),
3921 TREE_TYPE (TREE_TYPE (rhs1
)))
3922 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1
),
3925 error ("type mismatch in address expression");
3926 debug_generic_stmt (TREE_TYPE (rhs1
));
3927 debug_generic_stmt (TREE_TYPE (op
));
3931 return verify_types_in_gimple_reference (op
, true);
3936 error ("INDIRECT_REF in gimple IL");
3942 case ARRAY_RANGE_REF
:
3943 case VIEW_CONVERT_EXPR
:
3946 case TARGET_MEM_REF
:
3948 if (!is_gimple_reg (lhs
)
3949 && is_gimple_reg_type (TREE_TYPE (lhs
)))
3951 error ("invalid rhs for gimple memory store");
3952 debug_generic_stmt (lhs
);
3953 debug_generic_stmt (rhs1
);
3956 return res
|| verify_types_in_gimple_reference (rhs1
, false);
3968 /* tcc_declaration */
3973 if (!is_gimple_reg (lhs
)
3974 && !is_gimple_reg (rhs1
)
3975 && is_gimple_reg_type (TREE_TYPE (lhs
)))
3977 error ("invalid rhs for gimple memory store");
3978 debug_generic_stmt (lhs
);
3979 debug_generic_stmt (rhs1
);
3985 if (TREE_CODE (rhs1_type
) == VECTOR_TYPE
)
3988 tree elt_i
, elt_v
, elt_t
= NULL_TREE
;
3990 if (CONSTRUCTOR_NELTS (rhs1
) == 0)
3992 /* For vector CONSTRUCTORs we require that either it is empty
3993 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
3994 (then the element count must be correct to cover the whole
3995 outer vector and index must be NULL on all elements, or it is
3996 a CONSTRUCTOR of scalar elements, where we as an exception allow
3997 smaller number of elements (assuming zero filling) and
3998 consecutive indexes as compared to NULL indexes (such
3999 CONSTRUCTORs can appear in the IL from FEs). */
4000 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1
), i
, elt_i
, elt_v
)
4002 if (elt_t
== NULL_TREE
)
4004 elt_t
= TREE_TYPE (elt_v
);
4005 if (TREE_CODE (elt_t
) == VECTOR_TYPE
)
4007 tree elt_t
= TREE_TYPE (elt_v
);
4008 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type
),
4011 error ("incorrect type of vector CONSTRUCTOR"
4013 debug_generic_stmt (rhs1
);
4016 else if (CONSTRUCTOR_NELTS (rhs1
)
4017 * TYPE_VECTOR_SUBPARTS (elt_t
)
4018 != TYPE_VECTOR_SUBPARTS (rhs1_type
))
4020 error ("incorrect number of vector CONSTRUCTOR"
4022 debug_generic_stmt (rhs1
);
4026 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type
),
4029 error ("incorrect type of vector CONSTRUCTOR elements");
4030 debug_generic_stmt (rhs1
);
4033 else if (CONSTRUCTOR_NELTS (rhs1
)
4034 > TYPE_VECTOR_SUBPARTS (rhs1_type
))
4036 error ("incorrect number of vector CONSTRUCTOR elements");
4037 debug_generic_stmt (rhs1
);
4041 else if (!useless_type_conversion_p (elt_t
, TREE_TYPE (elt_v
)))
4043 error ("incorrect type of vector CONSTRUCTOR elements");
4044 debug_generic_stmt (rhs1
);
4047 if (elt_i
!= NULL_TREE
4048 && (TREE_CODE (elt_t
) == VECTOR_TYPE
4049 || TREE_CODE (elt_i
) != INTEGER_CST
4050 || compare_tree_int (elt_i
, i
) != 0))
4052 error ("vector CONSTRUCTOR with non-NULL element index");
4053 debug_generic_stmt (rhs1
);
4061 case WITH_SIZE_EXPR
:
4071 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4072 is a problem, otherwise false. */
4075 verify_gimple_assign (gimple stmt
)
4077 switch (gimple_assign_rhs_class (stmt
))
4079 case GIMPLE_SINGLE_RHS
:
4080 return verify_gimple_assign_single (stmt
);
4082 case GIMPLE_UNARY_RHS
:
4083 return verify_gimple_assign_unary (stmt
);
4085 case GIMPLE_BINARY_RHS
:
4086 return verify_gimple_assign_binary (stmt
);
4088 case GIMPLE_TERNARY_RHS
:
4089 return verify_gimple_assign_ternary (stmt
);
4096 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4097 is a problem, otherwise false. */
4100 verify_gimple_return (gimple stmt
)
4102 tree op
= gimple_return_retval (stmt
);
4103 tree restype
= TREE_TYPE (TREE_TYPE (cfun
->decl
));
4105 /* We cannot test for present return values as we do not fix up missing
4106 return values from the original source. */
4110 if (!is_gimple_val (op
)
4111 && TREE_CODE (op
) != RESULT_DECL
)
4113 error ("invalid operand in return statement");
4114 debug_generic_stmt (op
);
4118 if ((TREE_CODE (op
) == RESULT_DECL
4119 && DECL_BY_REFERENCE (op
))
4120 || (TREE_CODE (op
) == SSA_NAME
4121 && SSA_NAME_VAR (op
)
4122 && TREE_CODE (SSA_NAME_VAR (op
)) == RESULT_DECL
4123 && DECL_BY_REFERENCE (SSA_NAME_VAR (op
))))
4124 op
= TREE_TYPE (op
);
4126 if (!useless_type_conversion_p (restype
, TREE_TYPE (op
)))
4128 error ("invalid conversion in return statement");
4129 debug_generic_stmt (restype
);
4130 debug_generic_stmt (TREE_TYPE (op
));
4138 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4139 is a problem, otherwise false. */
4142 verify_gimple_goto (gimple stmt
)
4144 tree dest
= gimple_goto_dest (stmt
);
4146 /* ??? We have two canonical forms of direct goto destinations, a
4147 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4148 if (TREE_CODE (dest
) != LABEL_DECL
4149 && (!is_gimple_val (dest
)
4150 || !POINTER_TYPE_P (TREE_TYPE (dest
))))
4152 error ("goto destination is neither a label nor a pointer");
4159 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4160 is a problem, otherwise false. */
4163 verify_gimple_switch (gimple stmt
)
4166 tree elt
, prev_upper_bound
= NULL_TREE
;
4167 tree index_type
, elt_type
= NULL_TREE
;
4169 if (!is_gimple_val (gimple_switch_index (stmt
)))
4171 error ("invalid operand to switch statement");
4172 debug_generic_stmt (gimple_switch_index (stmt
));
4176 index_type
= TREE_TYPE (gimple_switch_index (stmt
));
4177 if (! INTEGRAL_TYPE_P (index_type
))
4179 error ("non-integral type switch statement");
4180 debug_generic_expr (index_type
);
4184 elt
= gimple_switch_label (stmt
, 0);
4185 if (CASE_LOW (elt
) != NULL_TREE
|| CASE_HIGH (elt
) != NULL_TREE
)
4187 error ("invalid default case label in switch statement");
4188 debug_generic_expr (elt
);
4192 n
= gimple_switch_num_labels (stmt
);
4193 for (i
= 1; i
< n
; i
++)
4195 elt
= gimple_switch_label (stmt
, i
);
4197 if (! CASE_LOW (elt
))
4199 error ("invalid case label in switch statement");
4200 debug_generic_expr (elt
);
4204 && ! tree_int_cst_lt (CASE_LOW (elt
), CASE_HIGH (elt
)))
4206 error ("invalid case range in switch statement");
4207 debug_generic_expr (elt
);
4213 if (TREE_TYPE (CASE_LOW (elt
)) != elt_type
4214 || (CASE_HIGH (elt
) && TREE_TYPE (CASE_HIGH (elt
)) != elt_type
))
4216 error ("type mismatch for case label in switch statement");
4217 debug_generic_expr (elt
);
4223 elt_type
= TREE_TYPE (CASE_LOW (elt
));
4224 if (TYPE_PRECISION (index_type
) < TYPE_PRECISION (elt_type
))
4226 error ("type precision mismatch in switch statement");
4231 if (prev_upper_bound
)
4233 if (! tree_int_cst_lt (prev_upper_bound
, CASE_LOW (elt
)))
4235 error ("case labels not sorted in switch statement");
4240 prev_upper_bound
= CASE_HIGH (elt
);
4241 if (! prev_upper_bound
)
4242 prev_upper_bound
= CASE_LOW (elt
);
4248 /* Verify a gimple debug statement STMT.
4249 Returns true if anything is wrong. */
4252 verify_gimple_debug (gimple stmt ATTRIBUTE_UNUSED
)
4254 /* There isn't much that could be wrong in a gimple debug stmt. A
4255 gimple debug bind stmt, for example, maps a tree, that's usually
4256 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4257 component or member of an aggregate type, to another tree, that
4258 can be an arbitrary expression. These stmts expand into debug
4259 insns, and are converted to debug notes by var-tracking.c. */
4263 /* Verify a gimple label statement STMT.
4264 Returns true if anything is wrong. */
4267 verify_gimple_label (gimple stmt
)
4269 tree decl
= gimple_label_label (stmt
);
4273 if (TREE_CODE (decl
) != LABEL_DECL
)
4275 if (!DECL_NONLOCAL (decl
) && !FORCED_LABEL (decl
)
4276 && DECL_CONTEXT (decl
) != current_function_decl
)
4278 error ("label's context is not the current function decl");
4282 uid
= LABEL_DECL_UID (decl
);
4284 && (uid
== -1 || (*label_to_block_map
)[uid
] != gimple_bb (stmt
)))
4286 error ("incorrect entry in label_to_block_map");
4290 uid
= EH_LANDING_PAD_NR (decl
);
4293 eh_landing_pad lp
= get_eh_landing_pad_from_number (uid
);
4294 if (decl
!= lp
->post_landing_pad
)
4296 error ("incorrect setting of landing pad number");
4304 /* Verify the GIMPLE statement STMT. Returns true if there is an
4305 error, otherwise false. */
4308 verify_gimple_stmt (gimple stmt
)
4310 switch (gimple_code (stmt
))
4313 return verify_gimple_assign (stmt
);
4316 return verify_gimple_label (stmt
);
4319 return verify_gimple_call (stmt
);
4322 if (TREE_CODE_CLASS (gimple_cond_code (stmt
)) != tcc_comparison
)
4324 error ("invalid comparison code in gimple cond");
4327 if (!(!gimple_cond_true_label (stmt
)
4328 || TREE_CODE (gimple_cond_true_label (stmt
)) == LABEL_DECL
)
4329 || !(!gimple_cond_false_label (stmt
)
4330 || TREE_CODE (gimple_cond_false_label (stmt
)) == LABEL_DECL
))
4332 error ("invalid labels in gimple cond");
4336 return verify_gimple_comparison (boolean_type_node
,
4337 gimple_cond_lhs (stmt
),
4338 gimple_cond_rhs (stmt
));
4341 return verify_gimple_goto (stmt
);
4344 return verify_gimple_switch (stmt
);
4347 return verify_gimple_return (stmt
);
4352 case GIMPLE_TRANSACTION
:
4353 return verify_gimple_transaction (stmt
);
4355 /* Tuples that do not have tree operands. */
4357 case GIMPLE_PREDICT
:
4359 case GIMPLE_EH_DISPATCH
:
4360 case GIMPLE_EH_MUST_NOT_THROW
:
4364 /* OpenMP directives are validated by the FE and never operated
4365 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4366 non-gimple expressions when the main index variable has had
4367 its address taken. This does not affect the loop itself
4368 because the header of an GIMPLE_OMP_FOR is merely used to determine
4369 how to setup the parallel iteration. */
4373 return verify_gimple_debug (stmt
);
4380 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4381 and false otherwise. */
4384 verify_gimple_phi (gimple phi
)
4388 tree phi_result
= gimple_phi_result (phi
);
4393 error ("invalid PHI result");
4397 virtual_p
= virtual_operand_p (phi_result
);
4398 if (TREE_CODE (phi_result
) != SSA_NAME
4400 && SSA_NAME_VAR (phi_result
) != gimple_vop (cfun
)))
4402 error ("invalid PHI result");
4406 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
4408 tree t
= gimple_phi_arg_def (phi
, i
);
4412 error ("missing PHI def");
4416 /* Addressable variables do have SSA_NAMEs but they
4417 are not considered gimple values. */
4418 else if ((TREE_CODE (t
) == SSA_NAME
4419 && virtual_p
!= virtual_operand_p (t
))
4421 && (TREE_CODE (t
) != SSA_NAME
4422 || SSA_NAME_VAR (t
) != gimple_vop (cfun
)))
4424 && !is_gimple_val (t
)))
4426 error ("invalid PHI argument");
4427 debug_generic_expr (t
);
4430 #ifdef ENABLE_TYPES_CHECKING
4431 if (!useless_type_conversion_p (TREE_TYPE (phi_result
), TREE_TYPE (t
)))
4433 error ("incompatible types in PHI argument %u", i
);
4434 debug_generic_stmt (TREE_TYPE (phi_result
));
4435 debug_generic_stmt (TREE_TYPE (t
));
4444 /* Verify the GIMPLE statements inside the sequence STMTS. */
4447 verify_gimple_in_seq_2 (gimple_seq stmts
)
4449 gimple_stmt_iterator ittr
;
4452 for (ittr
= gsi_start (stmts
); !gsi_end_p (ittr
); gsi_next (&ittr
))
4454 gimple stmt
= gsi_stmt (ittr
);
4456 switch (gimple_code (stmt
))
4459 err
|= verify_gimple_in_seq_2 (gimple_bind_body (stmt
));
4463 err
|= verify_gimple_in_seq_2 (gimple_try_eval (stmt
));
4464 err
|= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt
));
4467 case GIMPLE_EH_FILTER
:
4468 err
|= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt
));
4471 case GIMPLE_EH_ELSE
:
4472 err
|= verify_gimple_in_seq_2 (gimple_eh_else_n_body (stmt
));
4473 err
|= verify_gimple_in_seq_2 (gimple_eh_else_e_body (stmt
));
4477 err
|= verify_gimple_in_seq_2 (gimple_catch_handler (stmt
));
4480 case GIMPLE_TRANSACTION
:
4481 err
|= verify_gimple_transaction (stmt
);
4486 bool err2
= verify_gimple_stmt (stmt
);
4488 debug_gimple_stmt (stmt
);
4497 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
4498 is a problem, otherwise false. */
4501 verify_gimple_transaction (gimple stmt
)
4503 tree lab
= gimple_transaction_label (stmt
);
4504 if (lab
!= NULL
&& TREE_CODE (lab
) != LABEL_DECL
)
4506 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt
));
4510 /* Verify the GIMPLE statements inside the statement list STMTS. */
4513 verify_gimple_in_seq (gimple_seq stmts
)
4515 timevar_push (TV_TREE_STMT_VERIFY
);
4516 if (verify_gimple_in_seq_2 (stmts
))
4517 internal_error ("verify_gimple failed");
4518 timevar_pop (TV_TREE_STMT_VERIFY
);
4521 /* Return true when the T can be shared. */
4524 tree_node_can_be_shared (tree t
)
4526 if (IS_TYPE_OR_DECL_P (t
)
4527 || is_gimple_min_invariant (t
)
4528 || TREE_CODE (t
) == SSA_NAME
4529 || t
== error_mark_node
4530 || TREE_CODE (t
) == IDENTIFIER_NODE
)
4533 if (TREE_CODE (t
) == CASE_LABEL_EXPR
)
4542 /* Called via walk_tree. Verify tree sharing. */
4545 verify_node_sharing_1 (tree
*tp
, int *walk_subtrees
, void *data
)
4547 struct pointer_set_t
*visited
= (struct pointer_set_t
*) data
;
4549 if (tree_node_can_be_shared (*tp
))
4551 *walk_subtrees
= false;
4555 if (pointer_set_insert (visited
, *tp
))
4561 /* Called via walk_gimple_stmt. Verify tree sharing. */
4564 verify_node_sharing (tree
*tp
, int *walk_subtrees
, void *data
)
4566 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
4567 return verify_node_sharing_1 (tp
, walk_subtrees
, wi
->info
);
4570 static bool eh_error_found
;
4572 verify_eh_throw_stmt_node (void **slot
, void *data
)
4574 struct throw_stmt_node
*node
= (struct throw_stmt_node
*)*slot
;
4575 struct pointer_set_t
*visited
= (struct pointer_set_t
*) data
;
4577 if (!pointer_set_contains (visited
, node
->stmt
))
4579 error ("dead STMT in EH table");
4580 debug_gimple_stmt (node
->stmt
);
4581 eh_error_found
= true;
4586 /* Verify if the location LOCs block is in BLOCKS. */
4589 verify_location (pointer_set_t
*blocks
, location_t loc
)
4591 tree block
= LOCATION_BLOCK (loc
);
4592 if (block
!= NULL_TREE
4593 && !pointer_set_contains (blocks
, block
))
4595 error ("location references block not in block tree");
4598 if (block
!= NULL_TREE
)
4599 return verify_location (blocks
, BLOCK_SOURCE_LOCATION (block
));
4603 /* Called via walk_tree. Verify that expressions have no blocks. */
4606 verify_expr_no_block (tree
*tp
, int *walk_subtrees
, void *)
4610 *walk_subtrees
= false;
4614 location_t loc
= EXPR_LOCATION (*tp
);
4615 if (LOCATION_BLOCK (loc
) != NULL
)
4621 /* Called via walk_tree. Verify locations of expressions. */
4624 verify_expr_location_1 (tree
*tp
, int *walk_subtrees
, void *data
)
4626 struct pointer_set_t
*blocks
= (struct pointer_set_t
*) data
;
4628 if (TREE_CODE (*tp
) == VAR_DECL
4629 && DECL_HAS_DEBUG_EXPR_P (*tp
))
4631 tree t
= DECL_DEBUG_EXPR (*tp
);
4632 tree addr
= walk_tree (&t
, verify_expr_no_block
, NULL
, NULL
);
4636 if ((TREE_CODE (*tp
) == VAR_DECL
4637 || TREE_CODE (*tp
) == PARM_DECL
4638 || TREE_CODE (*tp
) == RESULT_DECL
)
4639 && DECL_HAS_VALUE_EXPR_P (*tp
))
4641 tree t
= DECL_VALUE_EXPR (*tp
);
4642 tree addr
= walk_tree (&t
, verify_expr_no_block
, NULL
, NULL
);
4649 *walk_subtrees
= false;
4653 location_t loc
= EXPR_LOCATION (*tp
);
4654 if (verify_location (blocks
, loc
))
4660 /* Called via walk_gimple_op. Verify locations of expressions. */
4663 verify_expr_location (tree
*tp
, int *walk_subtrees
, void *data
)
4665 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
4666 return verify_expr_location_1 (tp
, walk_subtrees
, wi
->info
);
4669 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
4672 collect_subblocks (pointer_set_t
*blocks
, tree block
)
4675 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= BLOCK_CHAIN (t
))
4677 pointer_set_insert (blocks
, t
);
4678 collect_subblocks (blocks
, t
);
4682 /* Verify the GIMPLE statements in the CFG of FN. */
4685 verify_gimple_in_cfg (struct function
*fn
)
4689 struct pointer_set_t
*visited
, *visited_stmts
, *blocks
;
4691 timevar_push (TV_TREE_STMT_VERIFY
);
4692 visited
= pointer_set_create ();
4693 visited_stmts
= pointer_set_create ();
4695 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
4696 blocks
= pointer_set_create ();
4697 if (DECL_INITIAL (fn
->decl
))
4699 pointer_set_insert (blocks
, DECL_INITIAL (fn
->decl
));
4700 collect_subblocks (blocks
, DECL_INITIAL (fn
->decl
));
4703 FOR_EACH_BB_FN (bb
, fn
)
4705 gimple_stmt_iterator gsi
;
4707 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
4709 gimple phi
= gsi_stmt (gsi
);
4713 pointer_set_insert (visited_stmts
, phi
);
4715 if (gimple_bb (phi
) != bb
)
4717 error ("gimple_bb (phi) is set to a wrong basic block");
4721 err2
|= verify_gimple_phi (phi
);
4723 /* Only PHI arguments have locations. */
4724 if (gimple_location (phi
) != UNKNOWN_LOCATION
)
4726 error ("PHI node with location");
4730 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
4732 tree arg
= gimple_phi_arg_def (phi
, i
);
4733 tree addr
= walk_tree (&arg
, verify_node_sharing_1
,
4737 error ("incorrect sharing of tree nodes");
4738 debug_generic_expr (addr
);
4741 location_t loc
= gimple_phi_arg_location (phi
, i
);
4742 if (virtual_operand_p (gimple_phi_result (phi
))
4743 && loc
!= UNKNOWN_LOCATION
)
4745 error ("virtual PHI with argument locations");
4748 addr
= walk_tree (&arg
, verify_expr_location_1
, blocks
, NULL
);
4751 debug_generic_expr (addr
);
4754 err2
|= verify_location (blocks
, loc
);
4758 debug_gimple_stmt (phi
);
4762 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
4764 gimple stmt
= gsi_stmt (gsi
);
4766 struct walk_stmt_info wi
;
4770 pointer_set_insert (visited_stmts
, stmt
);
4772 if (gimple_bb (stmt
) != bb
)
4774 error ("gimple_bb (stmt) is set to a wrong basic block");
4778 err2
|= verify_gimple_stmt (stmt
);
4779 err2
|= verify_location (blocks
, gimple_location (stmt
));
4781 memset (&wi
, 0, sizeof (wi
));
4782 wi
.info
= (void *) visited
;
4783 addr
= walk_gimple_op (stmt
, verify_node_sharing
, &wi
);
4786 error ("incorrect sharing of tree nodes");
4787 debug_generic_expr (addr
);
4791 memset (&wi
, 0, sizeof (wi
));
4792 wi
.info
= (void *) blocks
;
4793 addr
= walk_gimple_op (stmt
, verify_expr_location
, &wi
);
4796 debug_generic_expr (addr
);
4800 /* ??? Instead of not checking these stmts at all the walker
4801 should know its context via wi. */
4802 if (!is_gimple_debug (stmt
)
4803 && !is_gimple_omp (stmt
))
4805 memset (&wi
, 0, sizeof (wi
));
4806 addr
= walk_gimple_op (stmt
, verify_expr
, &wi
);
4809 debug_generic_expr (addr
);
4810 inform (gimple_location (stmt
), "in statement");
4815 /* If the statement is marked as part of an EH region, then it is
4816 expected that the statement could throw. Verify that when we
4817 have optimizations that simplify statements such that we prove
4818 that they cannot throw, that we update other data structures
4820 lp_nr
= lookup_stmt_eh_lp (stmt
);
4823 if (!stmt_could_throw_p (stmt
))
4825 error ("statement marked for throw, but doesn%'t");
4829 && !gsi_one_before_end_p (gsi
)
4830 && stmt_can_throw_internal (stmt
))
4832 error ("statement marked for throw in middle of block");
4838 debug_gimple_stmt (stmt
);
4843 eh_error_found
= false;
4844 if (get_eh_throw_stmt_table (cfun
))
4845 htab_traverse (get_eh_throw_stmt_table (cfun
),
4846 verify_eh_throw_stmt_node
,
4849 if (err
|| eh_error_found
)
4850 internal_error ("verify_gimple failed");
4852 pointer_set_destroy (visited
);
4853 pointer_set_destroy (visited_stmts
);
4854 pointer_set_destroy (blocks
);
4855 verify_histograms ();
4856 timevar_pop (TV_TREE_STMT_VERIFY
);
4860 /* Verifies that the flow information is OK. */
4863 gimple_verify_flow_info (void)
4867 gimple_stmt_iterator gsi
;
4872 if (ENTRY_BLOCK_PTR_FOR_FN (cfun
)->il
.gimple
.seq
4873 || ENTRY_BLOCK_PTR_FOR_FN (cfun
)->il
.gimple
.phi_nodes
)
4875 error ("ENTRY_BLOCK has IL associated with it");
4879 if (EXIT_BLOCK_PTR_FOR_FN (cfun
)->il
.gimple
.seq
4880 || EXIT_BLOCK_PTR_FOR_FN (cfun
)->il
.gimple
.phi_nodes
)
4882 error ("EXIT_BLOCK has IL associated with it");
4886 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
)
4887 if (e
->flags
& EDGE_FALLTHRU
)
4889 error ("fallthru to exit from bb %d", e
->src
->index
);
4895 bool found_ctrl_stmt
= false;
4899 /* Skip labels on the start of basic block. */
4900 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
4903 gimple prev_stmt
= stmt
;
4905 stmt
= gsi_stmt (gsi
);
4907 if (gimple_code (stmt
) != GIMPLE_LABEL
)
4910 label
= gimple_label_label (stmt
);
4911 if (prev_stmt
&& DECL_NONLOCAL (label
))
4913 error ("nonlocal label ");
4914 print_generic_expr (stderr
, label
, 0);
4915 fprintf (stderr
, " is not first in a sequence of labels in bb %d",
4920 if (prev_stmt
&& EH_LANDING_PAD_NR (label
) != 0)
4922 error ("EH landing pad label ");
4923 print_generic_expr (stderr
, label
, 0);
4924 fprintf (stderr
, " is not first in a sequence of labels in bb %d",
4929 if (label_to_block (label
) != bb
)
4932 print_generic_expr (stderr
, label
, 0);
4933 fprintf (stderr
, " to block does not match in bb %d",
4938 if (decl_function_context (label
) != current_function_decl
)
4941 print_generic_expr (stderr
, label
, 0);
4942 fprintf (stderr
, " has incorrect context in bb %d",
4948 /* Verify that body of basic block BB is free of control flow. */
4949 for (; !gsi_end_p (gsi
); gsi_next (&gsi
))
4951 gimple stmt
= gsi_stmt (gsi
);
4953 if (found_ctrl_stmt
)
4955 error ("control flow in the middle of basic block %d",
4960 if (stmt_ends_bb_p (stmt
))
4961 found_ctrl_stmt
= true;
4963 if (gimple_code (stmt
) == GIMPLE_LABEL
)
4966 print_generic_expr (stderr
, gimple_label_label (stmt
), 0);
4967 fprintf (stderr
, " in the middle of basic block %d", bb
->index
);
4972 gsi
= gsi_last_bb (bb
);
4973 if (gsi_end_p (gsi
))
4976 stmt
= gsi_stmt (gsi
);
4978 if (gimple_code (stmt
) == GIMPLE_LABEL
)
4981 err
|= verify_eh_edges (stmt
);
4983 if (is_ctrl_stmt (stmt
))
4985 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
4986 if (e
->flags
& EDGE_FALLTHRU
)
4988 error ("fallthru edge after a control statement in bb %d",
4994 if (gimple_code (stmt
) != GIMPLE_COND
)
4996 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
4997 after anything else but if statement. */
4998 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
4999 if (e
->flags
& (EDGE_TRUE_VALUE
| EDGE_FALSE_VALUE
))
5001 error ("true/false edge after a non-GIMPLE_COND in bb %d",
5007 switch (gimple_code (stmt
))
5014 extract_true_false_edges_from_block (bb
, &true_edge
, &false_edge
);
5018 || !(true_edge
->flags
& EDGE_TRUE_VALUE
)
5019 || !(false_edge
->flags
& EDGE_FALSE_VALUE
)
5020 || (true_edge
->flags
& (EDGE_FALLTHRU
| EDGE_ABNORMAL
))
5021 || (false_edge
->flags
& (EDGE_FALLTHRU
| EDGE_ABNORMAL
))
5022 || EDGE_COUNT (bb
->succs
) >= 3)
5024 error ("wrong outgoing edge flags at end of bb %d",
5032 if (simple_goto_p (stmt
))
5034 error ("explicit goto at end of bb %d", bb
->index
);
5039 /* FIXME. We should double check that the labels in the
5040 destination blocks have their address taken. */
5041 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5042 if ((e
->flags
& (EDGE_FALLTHRU
| EDGE_TRUE_VALUE
5043 | EDGE_FALSE_VALUE
))
5044 || !(e
->flags
& EDGE_ABNORMAL
))
5046 error ("wrong outgoing edge flags at end of bb %d",
5054 if (!gimple_call_builtin_p (stmt
, BUILT_IN_RETURN
))
5056 /* ... fallthru ... */
5058 if (!single_succ_p (bb
)
5059 || (single_succ_edge (bb
)->flags
5060 & (EDGE_FALLTHRU
| EDGE_ABNORMAL
5061 | EDGE_TRUE_VALUE
| EDGE_FALSE_VALUE
)))
5063 error ("wrong outgoing edge flags at end of bb %d", bb
->index
);
5066 if (single_succ (bb
) != EXIT_BLOCK_PTR_FOR_FN (cfun
))
5068 error ("return edge does not point to exit in bb %d",
5080 n
= gimple_switch_num_labels (stmt
);
5082 /* Mark all the destination basic blocks. */
5083 for (i
= 0; i
< n
; ++i
)
5085 tree lab
= CASE_LABEL (gimple_switch_label (stmt
, i
));
5086 basic_block label_bb
= label_to_block (lab
);
5087 gcc_assert (!label_bb
->aux
|| label_bb
->aux
== (void *)1);
5088 label_bb
->aux
= (void *)1;
5091 /* Verify that the case labels are sorted. */
5092 prev
= gimple_switch_label (stmt
, 0);
5093 for (i
= 1; i
< n
; ++i
)
5095 tree c
= gimple_switch_label (stmt
, i
);
5098 error ("found default case not at the start of "
5104 && !tree_int_cst_lt (CASE_LOW (prev
), CASE_LOW (c
)))
5106 error ("case labels not sorted: ");
5107 print_generic_expr (stderr
, prev
, 0);
5108 fprintf (stderr
," is greater than ");
5109 print_generic_expr (stderr
, c
, 0);
5110 fprintf (stderr
," but comes before it.\n");
5115 /* VRP will remove the default case if it can prove it will
5116 never be executed. So do not verify there always exists
5117 a default case here. */
5119 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5123 error ("extra outgoing edge %d->%d",
5124 bb
->index
, e
->dest
->index
);
5128 e
->dest
->aux
= (void *)2;
5129 if ((e
->flags
& (EDGE_FALLTHRU
| EDGE_ABNORMAL
5130 | EDGE_TRUE_VALUE
| EDGE_FALSE_VALUE
)))
5132 error ("wrong outgoing edge flags at end of bb %d",
5138 /* Check that we have all of them. */
5139 for (i
= 0; i
< n
; ++i
)
5141 tree lab
= CASE_LABEL (gimple_switch_label (stmt
, i
));
5142 basic_block label_bb
= label_to_block (lab
);
5144 if (label_bb
->aux
!= (void *)2)
5146 error ("missing edge %i->%i", bb
->index
, label_bb
->index
);
5151 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5152 e
->dest
->aux
= (void *)0;
5156 case GIMPLE_EH_DISPATCH
:
5157 err
|= verify_eh_dispatch_edge (stmt
);
5165 if (dom_info_state (CDI_DOMINATORS
) >= DOM_NO_FAST_QUERY
)
5166 verify_dominators (CDI_DOMINATORS
);
5172 /* Updates phi nodes after creating a forwarder block joined
5173 by edge FALLTHRU. */
5176 gimple_make_forwarder_block (edge fallthru
)
5180 basic_block dummy
, bb
;
5182 gimple_stmt_iterator gsi
;
5184 dummy
= fallthru
->src
;
5185 bb
= fallthru
->dest
;
5187 if (single_pred_p (bb
))
5190 /* If we redirected a branch we must create new PHI nodes at the
5192 for (gsi
= gsi_start_phis (dummy
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5194 gimple phi
, new_phi
;
5196 phi
= gsi_stmt (gsi
);
5197 var
= gimple_phi_result (phi
);
5198 new_phi
= create_phi_node (var
, bb
);
5199 gimple_phi_set_result (phi
, copy_ssa_name (var
, phi
));
5200 add_phi_arg (new_phi
, gimple_phi_result (phi
), fallthru
,
5204 /* Add the arguments we have stored on edges. */
5205 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
5210 flush_pending_stmts (e
);
5215 /* Return a non-special label in the head of basic block BLOCK.
5216 Create one if it doesn't exist. */
5219 gimple_block_label (basic_block bb
)
5221 gimple_stmt_iterator i
, s
= gsi_start_bb (bb
);
5226 for (i
= s
; !gsi_end_p (i
); first
= false, gsi_next (&i
))
5228 stmt
= gsi_stmt (i
);
5229 if (gimple_code (stmt
) != GIMPLE_LABEL
)
5231 label
= gimple_label_label (stmt
);
5232 if (!DECL_NONLOCAL (label
))
5235 gsi_move_before (&i
, &s
);
5240 label
= create_artificial_label (UNKNOWN_LOCATION
);
5241 stmt
= gimple_build_label (label
);
5242 gsi_insert_before (&s
, stmt
, GSI_NEW_STMT
);
5247 /* Attempt to perform edge redirection by replacing a possibly complex
5248 jump instruction by a goto or by removing the jump completely.
5249 This can apply only if all edges now point to the same block. The
5250 parameters and return values are equivalent to
5251 redirect_edge_and_branch. */
5254 gimple_try_redirect_by_replacing_jump (edge e
, basic_block target
)
5256 basic_block src
= e
->src
;
5257 gimple_stmt_iterator i
;
5260 /* We can replace or remove a complex jump only when we have exactly
5262 if (EDGE_COUNT (src
->succs
) != 2
5263 /* Verify that all targets will be TARGET. Specifically, the
5264 edge that is not E must also go to TARGET. */
5265 || EDGE_SUCC (src
, EDGE_SUCC (src
, 0) == e
)->dest
!= target
)
5268 i
= gsi_last_bb (src
);
5272 stmt
= gsi_stmt (i
);
5274 if (gimple_code (stmt
) == GIMPLE_COND
|| gimple_code (stmt
) == GIMPLE_SWITCH
)
5276 gsi_remove (&i
, true);
5277 e
= ssa_redirect_edge (e
, target
);
5278 e
->flags
= EDGE_FALLTHRU
;
5286 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
5287 edge representing the redirected branch. */
5290 gimple_redirect_edge_and_branch (edge e
, basic_block dest
)
5292 basic_block bb
= e
->src
;
5293 gimple_stmt_iterator gsi
;
5297 if (e
->flags
& EDGE_ABNORMAL
)
5300 if (e
->dest
== dest
)
5303 if (e
->flags
& EDGE_EH
)
5304 return redirect_eh_edge (e
, dest
);
5306 if (e
->src
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
))
5308 ret
= gimple_try_redirect_by_replacing_jump (e
, dest
);
5313 gsi
= gsi_last_bb (bb
);
5314 stmt
= gsi_end_p (gsi
) ? NULL
: gsi_stmt (gsi
);
5316 switch (stmt
? gimple_code (stmt
) : GIMPLE_ERROR_MARK
)
5319 /* For COND_EXPR, we only need to redirect the edge. */
5323 /* No non-abnormal edges should lead from a non-simple goto, and
5324 simple ones should be represented implicitly. */
5329 tree label
= gimple_block_label (dest
);
5330 tree cases
= get_cases_for_edge (e
, stmt
);
5332 /* If we have a list of cases associated with E, then use it
5333 as it's a lot faster than walking the entire case vector. */
5336 edge e2
= find_edge (e
->src
, dest
);
5343 CASE_LABEL (cases
) = label
;
5344 cases
= CASE_CHAIN (cases
);
5347 /* If there was already an edge in the CFG, then we need
5348 to move all the cases associated with E to E2. */
5351 tree cases2
= get_cases_for_edge (e2
, stmt
);
5353 CASE_CHAIN (last
) = CASE_CHAIN (cases2
);
5354 CASE_CHAIN (cases2
) = first
;
5356 bitmap_set_bit (touched_switch_bbs
, gimple_bb (stmt
)->index
);
5360 size_t i
, n
= gimple_switch_num_labels (stmt
);
5362 for (i
= 0; i
< n
; i
++)
5364 tree elt
= gimple_switch_label (stmt
, i
);
5365 if (label_to_block (CASE_LABEL (elt
)) == e
->dest
)
5366 CASE_LABEL (elt
) = label
;
5374 int i
, n
= gimple_asm_nlabels (stmt
);
5377 for (i
= 0; i
< n
; ++i
)
5379 tree cons
= gimple_asm_label_op (stmt
, i
);
5380 if (label_to_block (TREE_VALUE (cons
)) == e
->dest
)
5383 label
= gimple_block_label (dest
);
5384 TREE_VALUE (cons
) = label
;
5388 /* If we didn't find any label matching the former edge in the
5389 asm labels, we must be redirecting the fallthrough
5391 gcc_assert (label
|| (e
->flags
& EDGE_FALLTHRU
));
5396 gsi_remove (&gsi
, true);
5397 e
->flags
|= EDGE_FALLTHRU
;
5400 case GIMPLE_OMP_RETURN
:
5401 case GIMPLE_OMP_CONTINUE
:
5402 case GIMPLE_OMP_SECTIONS_SWITCH
:
5403 case GIMPLE_OMP_FOR
:
5404 /* The edges from OMP constructs can be simply redirected. */
5407 case GIMPLE_EH_DISPATCH
:
5408 if (!(e
->flags
& EDGE_FALLTHRU
))
5409 redirect_eh_dispatch_edge (stmt
, e
, dest
);
5412 case GIMPLE_TRANSACTION
:
5413 /* The ABORT edge has a stored label associated with it, otherwise
5414 the edges are simply redirectable. */
5416 gimple_transaction_set_label (stmt
, gimple_block_label (dest
));
5420 /* Otherwise it must be a fallthru edge, and we don't need to
5421 do anything besides redirecting it. */
5422 gcc_assert (e
->flags
& EDGE_FALLTHRU
);
5426 /* Update/insert PHI nodes as necessary. */
5428 /* Now update the edges in the CFG. */
5429 e
= ssa_redirect_edge (e
, dest
);
5434 /* Returns true if it is possible to remove edge E by redirecting
5435 it to the destination of the other edge from E->src. */
5438 gimple_can_remove_branch_p (const_edge e
)
5440 if (e
->flags
& (EDGE_ABNORMAL
| EDGE_EH
))
5446 /* Simple wrapper, as we can always redirect fallthru edges. */
5449 gimple_redirect_edge_and_branch_force (edge e
, basic_block dest
)
5451 e
= gimple_redirect_edge_and_branch (e
, dest
);
5458 /* Splits basic block BB after statement STMT (but at least after the
5459 labels). If STMT is NULL, BB is split just after the labels. */
5462 gimple_split_block (basic_block bb
, void *stmt
)
5464 gimple_stmt_iterator gsi
;
5465 gimple_stmt_iterator gsi_tgt
;
5472 new_bb
= create_empty_bb (bb
);
5474 /* Redirect the outgoing edges. */
5475 new_bb
->succs
= bb
->succs
;
5477 FOR_EACH_EDGE (e
, ei
, new_bb
->succs
)
5480 if (stmt
&& gimple_code ((gimple
) stmt
) == GIMPLE_LABEL
)
5483 /* Move everything from GSI to the new basic block. */
5484 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5486 act
= gsi_stmt (gsi
);
5487 if (gimple_code (act
) == GIMPLE_LABEL
)
5500 if (gsi_end_p (gsi
))
5503 /* Split the statement list - avoid re-creating new containers as this
5504 brings ugly quadratic memory consumption in the inliner.
5505 (We are still quadratic since we need to update stmt BB pointers,
5507 gsi_split_seq_before (&gsi
, &list
);
5508 set_bb_seq (new_bb
, list
);
5509 for (gsi_tgt
= gsi_start (list
);
5510 !gsi_end_p (gsi_tgt
); gsi_next (&gsi_tgt
))
5511 gimple_set_bb (gsi_stmt (gsi_tgt
), new_bb
);
5517 /* Moves basic block BB after block AFTER. */
5520 gimple_move_block_after (basic_block bb
, basic_block after
)
5522 if (bb
->prev_bb
== after
)
5526 link_block (bb
, after
);
5532 /* Return TRUE if block BB has no executable statements, otherwise return
5536 gimple_empty_block_p (basic_block bb
)
5538 /* BB must have no executable statements. */
5539 gimple_stmt_iterator gsi
= gsi_after_labels (bb
);
5542 if (gsi_end_p (gsi
))
5544 if (is_gimple_debug (gsi_stmt (gsi
)))
5545 gsi_next_nondebug (&gsi
);
5546 return gsi_end_p (gsi
);
5550 /* Split a basic block if it ends with a conditional branch and if the
5551 other part of the block is not empty. */
5554 gimple_split_block_before_cond_jump (basic_block bb
)
5556 gimple last
, split_point
;
5557 gimple_stmt_iterator gsi
= gsi_last_nondebug_bb (bb
);
5558 if (gsi_end_p (gsi
))
5560 last
= gsi_stmt (gsi
);
5561 if (gimple_code (last
) != GIMPLE_COND
5562 && gimple_code (last
) != GIMPLE_SWITCH
)
5564 gsi_prev_nondebug (&gsi
);
5565 split_point
= gsi_stmt (gsi
);
5566 return split_block (bb
, split_point
)->dest
;
5570 /* Return true if basic_block can be duplicated. */
5573 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED
)
5578 /* Create a duplicate of the basic block BB. NOTE: This does not
5579 preserve SSA form. */
5582 gimple_duplicate_bb (basic_block bb
)
5585 gimple_stmt_iterator gsi
, gsi_tgt
;
5586 gimple_seq phis
= phi_nodes (bb
);
5587 gimple phi
, stmt
, copy
;
5589 new_bb
= create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun
)->prev_bb
);
5591 /* Copy the PHI nodes. We ignore PHI node arguments here because
5592 the incoming edges have not been setup yet. */
5593 for (gsi
= gsi_start (phis
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5595 phi
= gsi_stmt (gsi
);
5596 copy
= create_phi_node (NULL_TREE
, new_bb
);
5597 create_new_def_for (gimple_phi_result (phi
), copy
,
5598 gimple_phi_result_ptr (copy
));
5599 gimple_set_uid (copy
, gimple_uid (phi
));
5602 gsi_tgt
= gsi_start_bb (new_bb
);
5603 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5605 def_operand_p def_p
;
5606 ssa_op_iter op_iter
;
5609 stmt
= gsi_stmt (gsi
);
5610 if (gimple_code (stmt
) == GIMPLE_LABEL
)
5613 /* Don't duplicate label debug stmts. */
5614 if (gimple_debug_bind_p (stmt
)
5615 && TREE_CODE (gimple_debug_bind_get_var (stmt
))
5619 /* Create a new copy of STMT and duplicate STMT's virtual
5621 copy
= gimple_copy (stmt
);
5622 gsi_insert_after (&gsi_tgt
, copy
, GSI_NEW_STMT
);
5624 maybe_duplicate_eh_stmt (copy
, stmt
);
5625 gimple_duplicate_stmt_histograms (cfun
, copy
, cfun
, stmt
);
5627 /* When copying around a stmt writing into a local non-user
5628 aggregate, make sure it won't share stack slot with other
5630 lhs
= gimple_get_lhs (stmt
);
5631 if (lhs
&& TREE_CODE (lhs
) != SSA_NAME
)
5633 tree base
= get_base_address (lhs
);
5635 && (TREE_CODE (base
) == VAR_DECL
5636 || TREE_CODE (base
) == RESULT_DECL
)
5637 && DECL_IGNORED_P (base
)
5638 && !TREE_STATIC (base
)
5639 && !DECL_EXTERNAL (base
)
5640 && (TREE_CODE (base
) != VAR_DECL
5641 || !DECL_HAS_VALUE_EXPR_P (base
)))
5642 DECL_NONSHAREABLE (base
) = 1;
5645 /* Create new names for all the definitions created by COPY and
5646 add replacement mappings for each new name. */
5647 FOR_EACH_SSA_DEF_OPERAND (def_p
, copy
, op_iter
, SSA_OP_ALL_DEFS
)
5648 create_new_def_for (DEF_FROM_PTR (def_p
), copy
, def_p
);
5654 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
5657 add_phi_args_after_copy_edge (edge e_copy
)
5659 basic_block bb
, bb_copy
= e_copy
->src
, dest
;
5662 gimple phi
, phi_copy
;
5664 gimple_stmt_iterator psi
, psi_copy
;
5666 if (gimple_seq_empty_p (phi_nodes (e_copy
->dest
)))
5669 bb
= bb_copy
->flags
& BB_DUPLICATED
? get_bb_original (bb_copy
) : bb_copy
;
5671 if (e_copy
->dest
->flags
& BB_DUPLICATED
)
5672 dest
= get_bb_original (e_copy
->dest
);
5674 dest
= e_copy
->dest
;
5676 e
= find_edge (bb
, dest
);
5679 /* During loop unrolling the target of the latch edge is copied.
5680 In this case we are not looking for edge to dest, but to
5681 duplicated block whose original was dest. */
5682 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5684 if ((e
->dest
->flags
& BB_DUPLICATED
)
5685 && get_bb_original (e
->dest
) == dest
)
5689 gcc_assert (e
!= NULL
);
5692 for (psi
= gsi_start_phis (e
->dest
),
5693 psi_copy
= gsi_start_phis (e_copy
->dest
);
5695 gsi_next (&psi
), gsi_next (&psi_copy
))
5697 phi
= gsi_stmt (psi
);
5698 phi_copy
= gsi_stmt (psi_copy
);
5699 def
= PHI_ARG_DEF_FROM_EDGE (phi
, e
);
5700 add_phi_arg (phi_copy
, def
, e_copy
,
5701 gimple_phi_arg_location_from_edge (phi
, e
));
5706 /* Basic block BB_COPY was created by code duplication. Add phi node
5707 arguments for edges going out of BB_COPY. The blocks that were
5708 duplicated have BB_DUPLICATED set. */
5711 add_phi_args_after_copy_bb (basic_block bb_copy
)
5716 FOR_EACH_EDGE (e_copy
, ei
, bb_copy
->succs
)
5718 add_phi_args_after_copy_edge (e_copy
);
5722 /* Blocks in REGION_COPY array of length N_REGION were created by
5723 duplication of basic blocks. Add phi node arguments for edges
5724 going from these blocks. If E_COPY is not NULL, also add
5725 phi node arguments for its destination.*/
5728 add_phi_args_after_copy (basic_block
*region_copy
, unsigned n_region
,
5733 for (i
= 0; i
< n_region
; i
++)
5734 region_copy
[i
]->flags
|= BB_DUPLICATED
;
5736 for (i
= 0; i
< n_region
; i
++)
5737 add_phi_args_after_copy_bb (region_copy
[i
]);
5739 add_phi_args_after_copy_edge (e_copy
);
5741 for (i
= 0; i
< n_region
; i
++)
5742 region_copy
[i
]->flags
&= ~BB_DUPLICATED
;
5745 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
5746 important exit edge EXIT. By important we mean that no SSA name defined
5747 inside region is live over the other exit edges of the region. All entry
5748 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
5749 to the duplicate of the region. Dominance and loop information is
5750 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
5751 UPDATE_DOMINANCE is false then we assume that the caller will update the
5752 dominance information after calling this function. The new basic
5753 blocks are stored to REGION_COPY in the same order as they had in REGION,
5754 provided that REGION_COPY is not NULL.
5755 The function returns false if it is unable to copy the region,
5759 gimple_duplicate_sese_region (edge entry
, edge exit
,
5760 basic_block
*region
, unsigned n_region
,
5761 basic_block
*region_copy
,
5762 bool update_dominance
)
5765 bool free_region_copy
= false, copying_header
= false;
5766 struct loop
*loop
= entry
->dest
->loop_father
;
5768 vec
<basic_block
> doms
;
5770 int total_freq
= 0, entry_freq
= 0;
5771 gcov_type total_count
= 0, entry_count
= 0;
5773 if (!can_copy_bbs_p (region
, n_region
))
5776 /* Some sanity checking. Note that we do not check for all possible
5777 missuses of the functions. I.e. if you ask to copy something weird,
5778 it will work, but the state of structures probably will not be
5780 for (i
= 0; i
< n_region
; i
++)
5782 /* We do not handle subloops, i.e. all the blocks must belong to the
5784 if (region
[i
]->loop_father
!= loop
)
5787 if (region
[i
] != entry
->dest
5788 && region
[i
] == loop
->header
)
5792 set_loop_copy (loop
, loop
);
5794 /* In case the function is used for loop header copying (which is the primary
5795 use), ensure that EXIT and its copy will be new latch and entry edges. */
5796 if (loop
->header
== entry
->dest
)
5798 copying_header
= true;
5799 set_loop_copy (loop
, loop_outer (loop
));
5801 if (!dominated_by_p (CDI_DOMINATORS
, loop
->latch
, exit
->src
))
5804 for (i
= 0; i
< n_region
; i
++)
5805 if (region
[i
] != exit
->src
5806 && dominated_by_p (CDI_DOMINATORS
, region
[i
], exit
->src
))
5812 region_copy
= XNEWVEC (basic_block
, n_region
);
5813 free_region_copy
= true;
5816 initialize_original_copy_tables ();
5818 /* Record blocks outside the region that are dominated by something
5820 if (update_dominance
)
5823 doms
= get_dominated_by_region (CDI_DOMINATORS
, region
, n_region
);
5826 if (entry
->dest
->count
)
5828 total_count
= entry
->dest
->count
;
5829 entry_count
= entry
->count
;
5830 /* Fix up corner cases, to avoid division by zero or creation of negative
5832 if (entry_count
> total_count
)
5833 entry_count
= total_count
;
5837 total_freq
= entry
->dest
->frequency
;
5838 entry_freq
= EDGE_FREQUENCY (entry
);
5839 /* Fix up corner cases, to avoid division by zero or creation of negative
5841 if (total_freq
== 0)
5843 else if (entry_freq
> total_freq
)
5844 entry_freq
= total_freq
;
5847 copy_bbs (region
, n_region
, region_copy
, &exit
, 1, &exit_copy
, loop
,
5848 split_edge_bb_loc (entry
), update_dominance
);
5851 scale_bbs_frequencies_gcov_type (region
, n_region
,
5852 total_count
- entry_count
,
5854 scale_bbs_frequencies_gcov_type (region_copy
, n_region
, entry_count
,
5859 scale_bbs_frequencies_int (region
, n_region
, total_freq
- entry_freq
,
5861 scale_bbs_frequencies_int (region_copy
, n_region
, entry_freq
, total_freq
);
5866 loop
->header
= exit
->dest
;
5867 loop
->latch
= exit
->src
;
5870 /* Redirect the entry and add the phi node arguments. */
5871 redirected
= redirect_edge_and_branch (entry
, get_bb_copy (entry
->dest
));
5872 gcc_assert (redirected
!= NULL
);
5873 flush_pending_stmts (entry
);
5875 /* Concerning updating of dominators: We must recount dominators
5876 for entry block and its copy. Anything that is outside of the
5877 region, but was dominated by something inside needs recounting as
5879 if (update_dominance
)
5881 set_immediate_dominator (CDI_DOMINATORS
, entry
->dest
, entry
->src
);
5882 doms
.safe_push (get_bb_original (entry
->dest
));
5883 iterate_fix_dominators (CDI_DOMINATORS
, doms
, false);
5887 /* Add the other PHI node arguments. */
5888 add_phi_args_after_copy (region_copy
, n_region
, NULL
);
5890 if (free_region_copy
)
5893 free_original_copy_tables ();
5897 /* Checks if BB is part of the region defined by N_REGION BBS. */
5899 bb_part_of_region_p (basic_block bb
, basic_block
* bbs
, unsigned n_region
)
5903 for (n
= 0; n
< n_region
; n
++)
5911 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
5912 are stored to REGION_COPY in the same order in that they appear
5913 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
5914 the region, EXIT an exit from it. The condition guarding EXIT
5915 is moved to ENTRY. Returns true if duplication succeeds, false
5941 gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED
, edge exit ATTRIBUTE_UNUSED
,
5942 basic_block
*region ATTRIBUTE_UNUSED
, unsigned n_region ATTRIBUTE_UNUSED
,
5943 basic_block
*region_copy ATTRIBUTE_UNUSED
)
5946 bool free_region_copy
= false;
5947 struct loop
*loop
= exit
->dest
->loop_father
;
5948 struct loop
*orig_loop
= entry
->dest
->loop_father
;
5949 basic_block switch_bb
, entry_bb
, nentry_bb
;
5950 vec
<basic_block
> doms
;
5951 int total_freq
= 0, exit_freq
= 0;
5952 gcov_type total_count
= 0, exit_count
= 0;
5953 edge exits
[2], nexits
[2], e
;
5954 gimple_stmt_iterator gsi
;
5957 basic_block exit_bb
;
5958 gimple_stmt_iterator psi
;
5961 struct loop
*target
, *aloop
, *cloop
;
5963 gcc_assert (EDGE_COUNT (exit
->src
->succs
) == 2);
5965 exits
[1] = EDGE_SUCC (exit
->src
, EDGE_SUCC (exit
->src
, 0) == exit
);
5967 if (!can_copy_bbs_p (region
, n_region
))
5970 initialize_original_copy_tables ();
5971 set_loop_copy (orig_loop
, loop
);
5974 for (aloop
= orig_loop
->inner
; aloop
; aloop
= aloop
->next
)
5976 if (bb_part_of_region_p (aloop
->header
, region
, n_region
))
5978 cloop
= duplicate_loop (aloop
, target
);
5979 duplicate_subloops (aloop
, cloop
);
5985 region_copy
= XNEWVEC (basic_block
, n_region
);
5986 free_region_copy
= true;
5989 gcc_assert (!need_ssa_update_p (cfun
));
5991 /* Record blocks outside the region that are dominated by something
5993 doms
= get_dominated_by_region (CDI_DOMINATORS
, region
, n_region
);
5995 if (exit
->src
->count
)
5997 total_count
= exit
->src
->count
;
5998 exit_count
= exit
->count
;
5999 /* Fix up corner cases, to avoid division by zero or creation of negative
6001 if (exit_count
> total_count
)
6002 exit_count
= total_count
;
6006 total_freq
= exit
->src
->frequency
;
6007 exit_freq
= EDGE_FREQUENCY (exit
);
6008 /* Fix up corner cases, to avoid division by zero or creation of negative
6010 if (total_freq
== 0)
6012 if (exit_freq
> total_freq
)
6013 exit_freq
= total_freq
;
6016 copy_bbs (region
, n_region
, region_copy
, exits
, 2, nexits
, orig_loop
,
6017 split_edge_bb_loc (exit
), true);
6020 scale_bbs_frequencies_gcov_type (region
, n_region
,
6021 total_count
- exit_count
,
6023 scale_bbs_frequencies_gcov_type (region_copy
, n_region
, exit_count
,
6028 scale_bbs_frequencies_int (region
, n_region
, total_freq
- exit_freq
,
6030 scale_bbs_frequencies_int (region_copy
, n_region
, exit_freq
, total_freq
);
6033 /* Create the switch block, and put the exit condition to it. */
6034 entry_bb
= entry
->dest
;
6035 nentry_bb
= get_bb_copy (entry_bb
);
6036 if (!last_stmt (entry
->src
)
6037 || !stmt_ends_bb_p (last_stmt (entry
->src
)))
6038 switch_bb
= entry
->src
;
6040 switch_bb
= split_edge (entry
);
6041 set_immediate_dominator (CDI_DOMINATORS
, nentry_bb
, switch_bb
);
6043 gsi
= gsi_last_bb (switch_bb
);
6044 cond_stmt
= last_stmt (exit
->src
);
6045 gcc_assert (gimple_code (cond_stmt
) == GIMPLE_COND
);
6046 cond_stmt
= gimple_copy (cond_stmt
);
6048 gsi_insert_after (&gsi
, cond_stmt
, GSI_NEW_STMT
);
6050 sorig
= single_succ_edge (switch_bb
);
6051 sorig
->flags
= exits
[1]->flags
;
6052 snew
= make_edge (switch_bb
, nentry_bb
, exits
[0]->flags
);
6054 /* Register the new edge from SWITCH_BB in loop exit lists. */
6055 rescan_loop_exit (snew
, true, false);
6057 /* Add the PHI node arguments. */
6058 add_phi_args_after_copy (region_copy
, n_region
, snew
);
6060 /* Get rid of now superfluous conditions and associated edges (and phi node
6062 exit_bb
= exit
->dest
;
6064 e
= redirect_edge_and_branch (exits
[0], exits
[1]->dest
);
6065 PENDING_STMT (e
) = NULL
;
6067 /* The latch of ORIG_LOOP was copied, and so was the backedge
6068 to the original header. We redirect this backedge to EXIT_BB. */
6069 for (i
= 0; i
< n_region
; i
++)
6070 if (get_bb_original (region_copy
[i
]) == orig_loop
->latch
)
6072 gcc_assert (single_succ_edge (region_copy
[i
]));
6073 e
= redirect_edge_and_branch (single_succ_edge (region_copy
[i
]), exit_bb
);
6074 PENDING_STMT (e
) = NULL
;
6075 for (psi
= gsi_start_phis (exit_bb
);
6079 phi
= gsi_stmt (psi
);
6080 def
= PHI_ARG_DEF (phi
, nexits
[0]->dest_idx
);
6081 add_phi_arg (phi
, def
, e
, gimple_phi_arg_location_from_edge (phi
, e
));
6084 e
= redirect_edge_and_branch (nexits
[1], nexits
[0]->dest
);
6085 PENDING_STMT (e
) = NULL
;
6087 /* Anything that is outside of the region, but was dominated by something
6088 inside needs to update dominance info. */
6089 iterate_fix_dominators (CDI_DOMINATORS
, doms
, false);
6091 /* Update the SSA web. */
6092 update_ssa (TODO_update_ssa
);
6094 if (free_region_copy
)
6097 free_original_copy_tables ();
6101 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
6102 adding blocks when the dominator traversal reaches EXIT. This
6103 function silently assumes that ENTRY strictly dominates EXIT. */
6106 gather_blocks_in_sese_region (basic_block entry
, basic_block exit
,
6107 vec
<basic_block
> *bbs_p
)
6111 for (son
= first_dom_son (CDI_DOMINATORS
, entry
);
6113 son
= next_dom_son (CDI_DOMINATORS
, son
))
6115 bbs_p
->safe_push (son
);
6117 gather_blocks_in_sese_region (son
, exit
, bbs_p
);
6121 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
6122 The duplicates are recorded in VARS_MAP. */
6125 replace_by_duplicate_decl (tree
*tp
, struct pointer_map_t
*vars_map
,
6128 tree t
= *tp
, new_t
;
6129 struct function
*f
= DECL_STRUCT_FUNCTION (to_context
);
6132 if (DECL_CONTEXT (t
) == to_context
)
6135 loc
= pointer_map_contains (vars_map
, t
);
6139 loc
= pointer_map_insert (vars_map
, t
);
6143 new_t
= copy_var_decl (t
, DECL_NAME (t
), TREE_TYPE (t
));
6144 add_local_decl (f
, new_t
);
6148 gcc_assert (TREE_CODE (t
) == CONST_DECL
);
6149 new_t
= copy_node (t
);
6151 DECL_CONTEXT (new_t
) = to_context
;
6156 new_t
= (tree
) *loc
;
6162 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6163 VARS_MAP maps old ssa names and var_decls to the new ones. */
6166 replace_ssa_name (tree name
, struct pointer_map_t
*vars_map
,
6172 gcc_assert (!virtual_operand_p (name
));
6174 loc
= pointer_map_contains (vars_map
, name
);
6178 tree decl
= SSA_NAME_VAR (name
);
6181 replace_by_duplicate_decl (&decl
, vars_map
, to_context
);
6182 new_name
= make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context
),
6183 decl
, SSA_NAME_DEF_STMT (name
));
6184 if (SSA_NAME_IS_DEFAULT_DEF (name
))
6185 set_ssa_default_def (DECL_STRUCT_FUNCTION (to_context
),
6189 new_name
= copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context
),
6190 name
, SSA_NAME_DEF_STMT (name
));
6192 loc
= pointer_map_insert (vars_map
, name
);
6196 new_name
= (tree
) *loc
;
6207 struct pointer_map_t
*vars_map
;
6208 htab_t new_label_map
;
6209 struct pointer_map_t
*eh_map
;
6213 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
6214 contained in *TP if it has been ORIG_BLOCK previously and change the
6215 DECL_CONTEXT of every local variable referenced in *TP. */
6218 move_stmt_op (tree
*tp
, int *walk_subtrees
, void *data
)
6220 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
6221 struct move_stmt_d
*p
= (struct move_stmt_d
*) wi
->info
;
6226 tree block
= TREE_BLOCK (t
);
6227 if (block
== p
->orig_block
6228 || (p
->orig_block
== NULL_TREE
6229 && block
!= NULL_TREE
))
6230 TREE_SET_BLOCK (t
, p
->new_block
);
6231 #ifdef ENABLE_CHECKING
6232 else if (block
!= NULL_TREE
)
6234 while (block
&& TREE_CODE (block
) == BLOCK
&& block
!= p
->orig_block
)
6235 block
= BLOCK_SUPERCONTEXT (block
);
6236 gcc_assert (block
== p
->orig_block
);
6240 else if (DECL_P (t
) || TREE_CODE (t
) == SSA_NAME
)
6242 if (TREE_CODE (t
) == SSA_NAME
)
6243 *tp
= replace_ssa_name (t
, p
->vars_map
, p
->to_context
);
6244 else if (TREE_CODE (t
) == LABEL_DECL
)
6246 if (p
->new_label_map
)
6248 struct tree_map in
, *out
;
6250 out
= (struct tree_map
*)
6251 htab_find_with_hash (p
->new_label_map
, &in
, DECL_UID (t
));
6256 DECL_CONTEXT (t
) = p
->to_context
;
6258 else if (p
->remap_decls_p
)
6260 /* Replace T with its duplicate. T should no longer appear in the
6261 parent function, so this looks wasteful; however, it may appear
6262 in referenced_vars, and more importantly, as virtual operands of
6263 statements, and in alias lists of other variables. It would be
6264 quite difficult to expunge it from all those places. ??? It might
6265 suffice to do this for addressable variables. */
6266 if ((TREE_CODE (t
) == VAR_DECL
6267 && !is_global_var (t
))
6268 || TREE_CODE (t
) == CONST_DECL
)
6269 replace_by_duplicate_decl (tp
, p
->vars_map
, p
->to_context
);
6273 else if (TYPE_P (t
))
6279 /* Helper for move_stmt_r. Given an EH region number for the source
6280 function, map that to the duplicate EH regio number in the dest. */
6283 move_stmt_eh_region_nr (int old_nr
, struct move_stmt_d
*p
)
6285 eh_region old_r
, new_r
;
6288 old_r
= get_eh_region_from_number (old_nr
);
6289 slot
= pointer_map_contains (p
->eh_map
, old_r
);
6290 new_r
= (eh_region
) *slot
;
6292 return new_r
->index
;
6295 /* Similar, but operate on INTEGER_CSTs. */
6298 move_stmt_eh_region_tree_nr (tree old_t_nr
, struct move_stmt_d
*p
)
6302 old_nr
= tree_to_shwi (old_t_nr
);
6303 new_nr
= move_stmt_eh_region_nr (old_nr
, p
);
6305 return build_int_cst (integer_type_node
, new_nr
);
6308 /* Like move_stmt_op, but for gimple statements.
6310 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
6311 contained in the current statement in *GSI_P and change the
6312 DECL_CONTEXT of every local variable referenced in the current
6316 move_stmt_r (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
6317 struct walk_stmt_info
*wi
)
6319 struct move_stmt_d
*p
= (struct move_stmt_d
*) wi
->info
;
6320 gimple stmt
= gsi_stmt (*gsi_p
);
6321 tree block
= gimple_block (stmt
);
6323 if (block
== p
->orig_block
6324 || (p
->orig_block
== NULL_TREE
6325 && block
!= NULL_TREE
))
6326 gimple_set_block (stmt
, p
->new_block
);
6328 switch (gimple_code (stmt
))
6331 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
6333 tree r
, fndecl
= gimple_call_fndecl (stmt
);
6334 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
6335 switch (DECL_FUNCTION_CODE (fndecl
))
6337 case BUILT_IN_EH_COPY_VALUES
:
6338 r
= gimple_call_arg (stmt
, 1);
6339 r
= move_stmt_eh_region_tree_nr (r
, p
);
6340 gimple_call_set_arg (stmt
, 1, r
);
6343 case BUILT_IN_EH_POINTER
:
6344 case BUILT_IN_EH_FILTER
:
6345 r
= gimple_call_arg (stmt
, 0);
6346 r
= move_stmt_eh_region_tree_nr (r
, p
);
6347 gimple_call_set_arg (stmt
, 0, r
);
6358 int r
= gimple_resx_region (stmt
);
6359 r
= move_stmt_eh_region_nr (r
, p
);
6360 gimple_resx_set_region (stmt
, r
);
6364 case GIMPLE_EH_DISPATCH
:
6366 int r
= gimple_eh_dispatch_region (stmt
);
6367 r
= move_stmt_eh_region_nr (r
, p
);
6368 gimple_eh_dispatch_set_region (stmt
, r
);
6372 case GIMPLE_OMP_RETURN
:
6373 case GIMPLE_OMP_CONTINUE
:
6376 if (is_gimple_omp (stmt
))
6378 /* Do not remap variables inside OMP directives. Variables
6379 referenced in clauses and directive header belong to the
6380 parent function and should not be moved into the child
6382 bool save_remap_decls_p
= p
->remap_decls_p
;
6383 p
->remap_decls_p
= false;
6384 *handled_ops_p
= true;
6386 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt
), move_stmt_r
,
6389 p
->remap_decls_p
= save_remap_decls_p
;
6397 /* Move basic block BB from function CFUN to function DEST_FN. The
6398 block is moved out of the original linked list and placed after
6399 block AFTER in the new list. Also, the block is removed from the
6400 original array of blocks and placed in DEST_FN's array of blocks.
6401 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
6402 updated to reflect the moved edges.
6404 The local variables are remapped to new instances, VARS_MAP is used
6405 to record the mapping. */
6408 move_block_to_fn (struct function
*dest_cfun
, basic_block bb
,
6409 basic_block after
, bool update_edge_count_p
,
6410 struct move_stmt_d
*d
)
6412 struct control_flow_graph
*cfg
;
6415 gimple_stmt_iterator si
;
6416 unsigned old_len
, new_len
;
6418 /* Remove BB from dominance structures. */
6419 delete_from_dominance_info (CDI_DOMINATORS
, bb
);
6421 /* Move BB from its current loop to the copy in the new function. */
6424 struct loop
*new_loop
= (struct loop
*)bb
->loop_father
->aux
;
6426 bb
->loop_father
= new_loop
;
6429 /* Link BB to the new linked list. */
6430 move_block_after (bb
, after
);
6432 /* Update the edge count in the corresponding flowgraphs. */
6433 if (update_edge_count_p
)
6434 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
6436 cfun
->cfg
->x_n_edges
--;
6437 dest_cfun
->cfg
->x_n_edges
++;
6440 /* Remove BB from the original basic block array. */
6441 (*cfun
->cfg
->x_basic_block_info
)[bb
->index
] = NULL
;
6442 cfun
->cfg
->x_n_basic_blocks
--;
6444 /* Grow DEST_CFUN's basic block array if needed. */
6445 cfg
= dest_cfun
->cfg
;
6446 cfg
->x_n_basic_blocks
++;
6447 if (bb
->index
>= cfg
->x_last_basic_block
)
6448 cfg
->x_last_basic_block
= bb
->index
+ 1;
6450 old_len
= vec_safe_length (cfg
->x_basic_block_info
);
6451 if ((unsigned) cfg
->x_last_basic_block
>= old_len
)
6453 new_len
= cfg
->x_last_basic_block
+ (cfg
->x_last_basic_block
+ 3) / 4;
6454 vec_safe_grow_cleared (cfg
->x_basic_block_info
, new_len
);
6457 (*cfg
->x_basic_block_info
)[bb
->index
] = bb
;
6459 /* Remap the variables in phi nodes. */
6460 for (si
= gsi_start_phis (bb
); !gsi_end_p (si
); )
6462 gimple phi
= gsi_stmt (si
);
6464 tree op
= PHI_RESULT (phi
);
6468 if (virtual_operand_p (op
))
6470 /* Remove the phi nodes for virtual operands (alias analysis will be
6471 run for the new function, anyway). */
6472 remove_phi_node (&si
, true);
6476 SET_PHI_RESULT (phi
,
6477 replace_ssa_name (op
, d
->vars_map
, dest_cfun
->decl
));
6478 FOR_EACH_PHI_ARG (use
, phi
, oi
, SSA_OP_USE
)
6480 op
= USE_FROM_PTR (use
);
6481 if (TREE_CODE (op
) == SSA_NAME
)
6482 SET_USE (use
, replace_ssa_name (op
, d
->vars_map
, dest_cfun
->decl
));
6485 for (i
= 0; i
< EDGE_COUNT (bb
->preds
); i
++)
6487 location_t locus
= gimple_phi_arg_location (phi
, i
);
6488 tree block
= LOCATION_BLOCK (locus
);
6490 if (locus
== UNKNOWN_LOCATION
)
6492 if (d
->orig_block
== NULL_TREE
|| block
== d
->orig_block
)
6494 if (d
->new_block
== NULL_TREE
)
6495 locus
= LOCATION_LOCUS (locus
);
6497 locus
= COMBINE_LOCATION_DATA (line_table
, locus
, d
->new_block
);
6498 gimple_phi_arg_set_location (phi
, i
, locus
);
6505 for (si
= gsi_start_bb (bb
); !gsi_end_p (si
); gsi_next (&si
))
6507 gimple stmt
= gsi_stmt (si
);
6508 struct walk_stmt_info wi
;
6510 memset (&wi
, 0, sizeof (wi
));
6512 walk_gimple_stmt (&si
, move_stmt_r
, move_stmt_op
, &wi
);
6514 if (gimple_code (stmt
) == GIMPLE_LABEL
)
6516 tree label
= gimple_label_label (stmt
);
6517 int uid
= LABEL_DECL_UID (label
);
6519 gcc_assert (uid
> -1);
6521 old_len
= vec_safe_length (cfg
->x_label_to_block_map
);
6522 if (old_len
<= (unsigned) uid
)
6524 new_len
= 3 * uid
/ 2 + 1;
6525 vec_safe_grow_cleared (cfg
->x_label_to_block_map
, new_len
);
6528 (*cfg
->x_label_to_block_map
)[uid
] = bb
;
6529 (*cfun
->cfg
->x_label_to_block_map
)[uid
] = NULL
;
6531 gcc_assert (DECL_CONTEXT (label
) == dest_cfun
->decl
);
6533 if (uid
>= dest_cfun
->cfg
->last_label_uid
)
6534 dest_cfun
->cfg
->last_label_uid
= uid
+ 1;
6537 maybe_duplicate_eh_stmt_fn (dest_cfun
, stmt
, cfun
, stmt
, d
->eh_map
, 0);
6538 remove_stmt_from_eh_lp_fn (cfun
, stmt
);
6540 gimple_duplicate_stmt_histograms (dest_cfun
, stmt
, cfun
, stmt
);
6541 gimple_remove_stmt_histograms (cfun
, stmt
);
6543 /* We cannot leave any operands allocated from the operand caches of
6544 the current function. */
6545 free_stmt_operands (cfun
, stmt
);
6546 push_cfun (dest_cfun
);
6551 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
6552 if (e
->goto_locus
!= UNKNOWN_LOCATION
)
6554 tree block
= LOCATION_BLOCK (e
->goto_locus
);
6555 if (d
->orig_block
== NULL_TREE
6556 || block
== d
->orig_block
)
6557 e
->goto_locus
= d
->new_block
?
6558 COMBINE_LOCATION_DATA (line_table
, e
->goto_locus
, d
->new_block
) :
6559 LOCATION_LOCUS (e
->goto_locus
);
6563 /* Examine the statements in BB (which is in SRC_CFUN); find and return
6564 the outermost EH region. Use REGION as the incoming base EH region. */
6567 find_outermost_region_in_block (struct function
*src_cfun
,
6568 basic_block bb
, eh_region region
)
6570 gimple_stmt_iterator si
;
6572 for (si
= gsi_start_bb (bb
); !gsi_end_p (si
); gsi_next (&si
))
6574 gimple stmt
= gsi_stmt (si
);
6575 eh_region stmt_region
;
6578 lp_nr
= lookup_stmt_eh_lp_fn (src_cfun
, stmt
);
6579 stmt_region
= get_eh_region_from_lp_number_fn (src_cfun
, lp_nr
);
6583 region
= stmt_region
;
6584 else if (stmt_region
!= region
)
6586 region
= eh_region_outermost (src_cfun
, stmt_region
, region
);
6587 gcc_assert (region
!= NULL
);
6596 new_label_mapper (tree decl
, void *data
)
6598 htab_t hash
= (htab_t
) data
;
6602 gcc_assert (TREE_CODE (decl
) == LABEL_DECL
);
6604 m
= XNEW (struct tree_map
);
6605 m
->hash
= DECL_UID (decl
);
6606 m
->base
.from
= decl
;
6607 m
->to
= create_artificial_label (UNKNOWN_LOCATION
);
6608 LABEL_DECL_UID (m
->to
) = LABEL_DECL_UID (decl
);
6609 if (LABEL_DECL_UID (m
->to
) >= cfun
->cfg
->last_label_uid
)
6610 cfun
->cfg
->last_label_uid
= LABEL_DECL_UID (m
->to
) + 1;
6612 slot
= htab_find_slot_with_hash (hash
, m
, m
->hash
, INSERT
);
6613 gcc_assert (*slot
== NULL
);
6620 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
6624 replace_block_vars_by_duplicates (tree block
, struct pointer_map_t
*vars_map
,
6629 for (tp
= &BLOCK_VARS (block
); *tp
; tp
= &DECL_CHAIN (*tp
))
6632 if (TREE_CODE (t
) != VAR_DECL
&& TREE_CODE (t
) != CONST_DECL
)
6634 replace_by_duplicate_decl (&t
, vars_map
, to_context
);
6637 if (TREE_CODE (*tp
) == VAR_DECL
&& DECL_HAS_VALUE_EXPR_P (*tp
))
6639 SET_DECL_VALUE_EXPR (t
, DECL_VALUE_EXPR (*tp
));
6640 DECL_HAS_VALUE_EXPR_P (t
) = 1;
6642 DECL_CHAIN (t
) = DECL_CHAIN (*tp
);
6647 for (block
= BLOCK_SUBBLOCKS (block
); block
; block
= BLOCK_CHAIN (block
))
6648 replace_block_vars_by_duplicates (block
, vars_map
, to_context
);
6651 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
6655 fixup_loop_arrays_after_move (struct function
*fn1
, struct function
*fn2
,
6658 /* Discard it from the old loop array. */
6659 (*get_loops (fn1
))[loop
->num
] = NULL
;
6661 /* Place it in the new loop array, assigning it a new number. */
6662 loop
->num
= number_of_loops (fn2
);
6663 vec_safe_push (loops_for_fn (fn2
)->larray
, loop
);
6665 /* Recurse to children. */
6666 for (loop
= loop
->inner
; loop
; loop
= loop
->next
)
6667 fixup_loop_arrays_after_move (fn1
, fn2
, loop
);
6670 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
6671 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
6672 single basic block in the original CFG and the new basic block is
6673 returned. DEST_CFUN must not have a CFG yet.
6675 Note that the region need not be a pure SESE region. Blocks inside
6676 the region may contain calls to abort/exit. The only restriction
6677 is that ENTRY_BB should be the only entry point and it must
6680 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
6681 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
6682 to the new function.
6684 All local variables referenced in the region are assumed to be in
6685 the corresponding BLOCK_VARS and unexpanded variable lists
6686 associated with DEST_CFUN. */
6689 move_sese_region_to_fn (struct function
*dest_cfun
, basic_block entry_bb
,
6690 basic_block exit_bb
, tree orig_block
)
6692 vec
<basic_block
> bbs
, dom_bbs
;
6693 basic_block dom_entry
= get_immediate_dominator (CDI_DOMINATORS
, entry_bb
);
6694 basic_block after
, bb
, *entry_pred
, *exit_succ
, abb
;
6695 struct function
*saved_cfun
= cfun
;
6696 int *entry_flag
, *exit_flag
;
6697 unsigned *entry_prob
, *exit_prob
;
6698 unsigned i
, num_entry_edges
, num_exit_edges
, num_nodes
;
6701 htab_t new_label_map
;
6702 struct pointer_map_t
*vars_map
, *eh_map
;
6703 struct loop
*loop
= entry_bb
->loop_father
;
6704 struct loop
*loop0
= get_loop (saved_cfun
, 0);
6705 struct move_stmt_d d
;
6707 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
6709 gcc_assert (entry_bb
!= exit_bb
6711 || dominated_by_p (CDI_DOMINATORS
, exit_bb
, entry_bb
)));
6713 /* Collect all the blocks in the region. Manually add ENTRY_BB
6714 because it won't be added by dfs_enumerate_from. */
6716 bbs
.safe_push (entry_bb
);
6717 gather_blocks_in_sese_region (entry_bb
, exit_bb
, &bbs
);
6719 /* The blocks that used to be dominated by something in BBS will now be
6720 dominated by the new block. */
6721 dom_bbs
= get_dominated_by_region (CDI_DOMINATORS
,
6725 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
6726 the predecessor edges to ENTRY_BB and the successor edges to
6727 EXIT_BB so that we can re-attach them to the new basic block that
6728 will replace the region. */
6729 num_entry_edges
= EDGE_COUNT (entry_bb
->preds
);
6730 entry_pred
= XNEWVEC (basic_block
, num_entry_edges
);
6731 entry_flag
= XNEWVEC (int, num_entry_edges
);
6732 entry_prob
= XNEWVEC (unsigned, num_entry_edges
);
6734 for (ei
= ei_start (entry_bb
->preds
); (e
= ei_safe_edge (ei
)) != NULL
;)
6736 entry_prob
[i
] = e
->probability
;
6737 entry_flag
[i
] = e
->flags
;
6738 entry_pred
[i
++] = e
->src
;
6744 num_exit_edges
= EDGE_COUNT (exit_bb
->succs
);
6745 exit_succ
= XNEWVEC (basic_block
, num_exit_edges
);
6746 exit_flag
= XNEWVEC (int, num_exit_edges
);
6747 exit_prob
= XNEWVEC (unsigned, num_exit_edges
);
6749 for (ei
= ei_start (exit_bb
->succs
); (e
= ei_safe_edge (ei
)) != NULL
;)
6751 exit_prob
[i
] = e
->probability
;
6752 exit_flag
[i
] = e
->flags
;
6753 exit_succ
[i
++] = e
->dest
;
6765 /* Switch context to the child function to initialize DEST_FN's CFG. */
6766 gcc_assert (dest_cfun
->cfg
== NULL
);
6767 push_cfun (dest_cfun
);
6769 init_empty_tree_cfg ();
6771 /* Initialize EH information for the new function. */
6773 new_label_map
= NULL
;
6776 eh_region region
= NULL
;
6778 FOR_EACH_VEC_ELT (bbs
, i
, bb
)
6779 region
= find_outermost_region_in_block (saved_cfun
, bb
, region
);
6781 init_eh_for_function ();
6784 new_label_map
= htab_create (17, tree_map_hash
, tree_map_eq
, free
);
6785 eh_map
= duplicate_eh_regions (saved_cfun
, region
, 0,
6786 new_label_mapper
, new_label_map
);
6790 /* Initialize an empty loop tree. */
6791 struct loops
*loops
= ggc_alloc_cleared_loops ();
6792 init_loops_structure (dest_cfun
, loops
, 1);
6793 loops
->state
= LOOPS_MAY_HAVE_MULTIPLE_LATCHES
;
6794 set_loops_for_fn (dest_cfun
, loops
);
6796 /* Move the outlined loop tree part. */
6797 num_nodes
= bbs
.length ();
6798 FOR_EACH_VEC_ELT (bbs
, i
, bb
)
6800 if (bb
->loop_father
->header
== bb
)
6802 struct loop
*this_loop
= bb
->loop_father
;
6803 struct loop
*outer
= loop_outer (this_loop
);
6805 /* If the SESE region contains some bbs ending with
6806 a noreturn call, those are considered to belong
6807 to the outermost loop in saved_cfun, rather than
6808 the entry_bb's loop_father. */
6812 num_nodes
-= this_loop
->num_nodes
;
6813 flow_loop_tree_node_remove (bb
->loop_father
);
6814 flow_loop_tree_node_add (get_loop (dest_cfun
, 0), this_loop
);
6815 fixup_loop_arrays_after_move (saved_cfun
, cfun
, this_loop
);
6818 else if (bb
->loop_father
== loop0
&& loop0
!= loop
)
6821 /* Remove loop exits from the outlined region. */
6822 if (loops_for_fn (saved_cfun
)->exits
)
6823 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
6825 void **slot
= htab_find_slot_with_hash
6826 (loops_for_fn (saved_cfun
)->exits
, e
,
6827 htab_hash_pointer (e
), NO_INSERT
);
6829 htab_clear_slot (loops_for_fn (saved_cfun
)->exits
, slot
);
6834 /* Adjust the number of blocks in the tree root of the outlined part. */
6835 get_loop (dest_cfun
, 0)->num_nodes
= bbs
.length () + 2;
6837 /* Setup a mapping to be used by move_block_to_fn. */
6838 loop
->aux
= current_loops
->tree_root
;
6839 loop0
->aux
= current_loops
->tree_root
;
6843 /* Move blocks from BBS into DEST_CFUN. */
6844 gcc_assert (bbs
.length () >= 2);
6845 after
= dest_cfun
->cfg
->x_entry_block_ptr
;
6846 vars_map
= pointer_map_create ();
6848 memset (&d
, 0, sizeof (d
));
6849 d
.orig_block
= orig_block
;
6850 d
.new_block
= DECL_INITIAL (dest_cfun
->decl
);
6851 d
.from_context
= cfun
->decl
;
6852 d
.to_context
= dest_cfun
->decl
;
6853 d
.vars_map
= vars_map
;
6854 d
.new_label_map
= new_label_map
;
6856 d
.remap_decls_p
= true;
6858 FOR_EACH_VEC_ELT (bbs
, i
, bb
)
6860 /* No need to update edge counts on the last block. It has
6861 already been updated earlier when we detached the region from
6862 the original CFG. */
6863 move_block_to_fn (dest_cfun
, bb
, after
, bb
!= exit_bb
, &d
);
6869 /* Loop sizes are no longer correct, fix them up. */
6870 loop
->num_nodes
-= num_nodes
;
6871 for (struct loop
*outer
= loop_outer (loop
);
6872 outer
; outer
= loop_outer (outer
))
6873 outer
->num_nodes
-= num_nodes
;
6874 loop0
->num_nodes
-= bbs
.length () - num_nodes
;
6876 if (saved_cfun
->has_simduid_loops
|| saved_cfun
->has_force_vect_loops
)
6879 for (i
= 0; vec_safe_iterate (loops
->larray
, i
, &aloop
); i
++)
6884 replace_by_duplicate_decl (&aloop
->simduid
, d
.vars_map
,
6886 dest_cfun
->has_simduid_loops
= true;
6888 if (aloop
->force_vect
)
6889 dest_cfun
->has_force_vect_loops
= true;
6893 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
6897 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun
->decl
))
6899 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun
->decl
))
6900 = BLOCK_SUBBLOCKS (orig_block
);
6901 for (block
= BLOCK_SUBBLOCKS (orig_block
);
6902 block
; block
= BLOCK_CHAIN (block
))
6903 BLOCK_SUPERCONTEXT (block
) = DECL_INITIAL (dest_cfun
->decl
);
6904 BLOCK_SUBBLOCKS (orig_block
) = NULL_TREE
;
6907 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun
->decl
),
6908 vars_map
, dest_cfun
->decl
);
6911 htab_delete (new_label_map
);
6913 pointer_map_destroy (eh_map
);
6914 pointer_map_destroy (vars_map
);
6916 /* Rewire the entry and exit blocks. The successor to the entry
6917 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
6918 the child function. Similarly, the predecessor of DEST_FN's
6919 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
6920 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
6921 various CFG manipulation function get to the right CFG.
6923 FIXME, this is silly. The CFG ought to become a parameter to
6925 push_cfun (dest_cfun
);
6926 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun
), entry_bb
, EDGE_FALLTHRU
);
6928 make_edge (exit_bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
), 0);
6931 /* Back in the original function, the SESE region has disappeared,
6932 create a new basic block in its place. */
6933 bb
= create_empty_bb (entry_pred
[0]);
6935 add_bb_to_loop (bb
, loop
);
6936 for (i
= 0; i
< num_entry_edges
; i
++)
6938 e
= make_edge (entry_pred
[i
], bb
, entry_flag
[i
]);
6939 e
->probability
= entry_prob
[i
];
6942 for (i
= 0; i
< num_exit_edges
; i
++)
6944 e
= make_edge (bb
, exit_succ
[i
], exit_flag
[i
]);
6945 e
->probability
= exit_prob
[i
];
6948 set_immediate_dominator (CDI_DOMINATORS
, bb
, dom_entry
);
6949 FOR_EACH_VEC_ELT (dom_bbs
, i
, abb
)
6950 set_immediate_dominator (CDI_DOMINATORS
, abb
, bb
);
6968 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
6972 dump_function_to_file (tree fndecl
, FILE *file
, int flags
)
6974 tree arg
, var
, old_current_fndecl
= current_function_decl
;
6975 struct function
*dsf
;
6976 bool ignore_topmost_bind
= false, any_var
= false;
6979 bool tmclone
= (TREE_CODE (fndecl
) == FUNCTION_DECL
6980 && decl_is_tm_clone (fndecl
));
6981 struct function
*fun
= DECL_STRUCT_FUNCTION (fndecl
);
6983 current_function_decl
= fndecl
;
6984 fprintf (file
, "%s %s(", function_name (fun
), tmclone
? "[tm-clone] " : "");
6986 arg
= DECL_ARGUMENTS (fndecl
);
6989 print_generic_expr (file
, TREE_TYPE (arg
), dump_flags
);
6990 fprintf (file
, " ");
6991 print_generic_expr (file
, arg
, dump_flags
);
6992 if (flags
& TDF_VERBOSE
)
6993 print_node (file
, "", arg
, 4);
6994 if (DECL_CHAIN (arg
))
6995 fprintf (file
, ", ");
6996 arg
= DECL_CHAIN (arg
);
6998 fprintf (file
, ")\n");
7000 if (flags
& TDF_VERBOSE
)
7001 print_node (file
, "", fndecl
, 2);
7003 dsf
= DECL_STRUCT_FUNCTION (fndecl
);
7004 if (dsf
&& (flags
& TDF_EH
))
7005 dump_eh_tree (file
, dsf
);
7007 if (flags
& TDF_RAW
&& !gimple_has_body_p (fndecl
))
7009 dump_node (fndecl
, TDF_SLIM
| flags
, file
);
7010 current_function_decl
= old_current_fndecl
;
7014 /* When GIMPLE is lowered, the variables are no longer available in
7015 BIND_EXPRs, so display them separately. */
7016 if (fun
&& fun
->decl
== fndecl
&& (fun
->curr_properties
& PROP_gimple_lcf
))
7019 ignore_topmost_bind
= true;
7021 fprintf (file
, "{\n");
7022 if (!vec_safe_is_empty (fun
->local_decls
))
7023 FOR_EACH_LOCAL_DECL (fun
, ix
, var
)
7025 print_generic_decl (file
, var
, flags
);
7026 if (flags
& TDF_VERBOSE
)
7027 print_node (file
, "", var
, 4);
7028 fprintf (file
, "\n");
7032 if (gimple_in_ssa_p (cfun
))
7033 for (ix
= 1; ix
< num_ssa_names
; ++ix
)
7035 tree name
= ssa_name (ix
);
7036 if (name
&& !SSA_NAME_VAR (name
))
7038 fprintf (file
, " ");
7039 print_generic_expr (file
, TREE_TYPE (name
), flags
);
7040 fprintf (file
, " ");
7041 print_generic_expr (file
, name
, flags
);
7042 fprintf (file
, ";\n");
7049 if (fun
&& fun
->decl
== fndecl
7051 && basic_block_info_for_function (fun
))
7053 /* If the CFG has been built, emit a CFG-based dump. */
7054 if (!ignore_topmost_bind
)
7055 fprintf (file
, "{\n");
7057 if (any_var
&& n_basic_blocks_for_fn (fun
))
7058 fprintf (file
, "\n");
7060 FOR_EACH_BB_FN (bb
, fun
)
7061 dump_bb (file
, bb
, 2, flags
| TDF_COMMENT
);
7063 fprintf (file
, "}\n");
7065 else if (DECL_SAVED_TREE (fndecl
) == NULL
)
7067 /* The function is now in GIMPLE form but the CFG has not been
7068 built yet. Emit the single sequence of GIMPLE statements
7069 that make up its body. */
7070 gimple_seq body
= gimple_body (fndecl
);
7072 if (gimple_seq_first_stmt (body
)
7073 && gimple_seq_first_stmt (body
) == gimple_seq_last_stmt (body
)
7074 && gimple_code (gimple_seq_first_stmt (body
)) == GIMPLE_BIND
)
7075 print_gimple_seq (file
, body
, 0, flags
);
7078 if (!ignore_topmost_bind
)
7079 fprintf (file
, "{\n");
7082 fprintf (file
, "\n");
7084 print_gimple_seq (file
, body
, 2, flags
);
7085 fprintf (file
, "}\n");
7092 /* Make a tree based dump. */
7093 chain
= DECL_SAVED_TREE (fndecl
);
7094 if (chain
&& TREE_CODE (chain
) == BIND_EXPR
)
7096 if (ignore_topmost_bind
)
7098 chain
= BIND_EXPR_BODY (chain
);
7106 if (!ignore_topmost_bind
)
7107 fprintf (file
, "{\n");
7112 fprintf (file
, "\n");
7114 print_generic_stmt_indented (file
, chain
, flags
, indent
);
7115 if (ignore_topmost_bind
)
7116 fprintf (file
, "}\n");
7119 if (flags
& TDF_ENUMERATE_LOCALS
)
7120 dump_enumerated_decls (file
, flags
);
7121 fprintf (file
, "\n\n");
7123 current_function_decl
= old_current_fndecl
;
7126 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
7129 debug_function (tree fn
, int flags
)
7131 dump_function_to_file (fn
, stderr
, flags
);
7135 /* Print on FILE the indexes for the predecessors of basic_block BB. */
7138 print_pred_bbs (FILE *file
, basic_block bb
)
7143 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
7144 fprintf (file
, "bb_%d ", e
->src
->index
);
7148 /* Print on FILE the indexes for the successors of basic_block BB. */
7151 print_succ_bbs (FILE *file
, basic_block bb
)
7156 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
7157 fprintf (file
, "bb_%d ", e
->dest
->index
);
7160 /* Print to FILE the basic block BB following the VERBOSITY level. */
7163 print_loops_bb (FILE *file
, basic_block bb
, int indent
, int verbosity
)
7165 char *s_indent
= (char *) alloca ((size_t) indent
+ 1);
7166 memset ((void *) s_indent
, ' ', (size_t) indent
);
7167 s_indent
[indent
] = '\0';
7169 /* Print basic_block's header. */
7172 fprintf (file
, "%s bb_%d (preds = {", s_indent
, bb
->index
);
7173 print_pred_bbs (file
, bb
);
7174 fprintf (file
, "}, succs = {");
7175 print_succ_bbs (file
, bb
);
7176 fprintf (file
, "})\n");
7179 /* Print basic_block's body. */
7182 fprintf (file
, "%s {\n", s_indent
);
7183 dump_bb (file
, bb
, indent
+ 4, TDF_VOPS
|TDF_MEMSYMS
);
7184 fprintf (file
, "%s }\n", s_indent
);
7188 static void print_loop_and_siblings (FILE *, struct loop
*, int, int);
7190 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
7191 VERBOSITY level this outputs the contents of the loop, or just its
7195 print_loop (FILE *file
, struct loop
*loop
, int indent
, int verbosity
)
7203 s_indent
= (char *) alloca ((size_t) indent
+ 1);
7204 memset ((void *) s_indent
, ' ', (size_t) indent
);
7205 s_indent
[indent
] = '\0';
7207 /* Print loop's header. */
7208 fprintf (file
, "%sloop_%d (", s_indent
, loop
->num
);
7210 fprintf (file
, "header = %d", loop
->header
->index
);
7213 fprintf (file
, "deleted)\n");
7217 fprintf (file
, ", latch = %d", loop
->latch
->index
);
7219 fprintf (file
, ", multiple latches");
7220 fprintf (file
, ", niter = ");
7221 print_generic_expr (file
, loop
->nb_iterations
, 0);
7223 if (loop
->any_upper_bound
)
7225 fprintf (file
, ", upper_bound = ");
7226 print_decu (loop
->nb_iterations_upper_bound
, file
);
7229 if (loop
->any_estimate
)
7231 fprintf (file
, ", estimate = ");
7232 print_decu (loop
->nb_iterations_estimate
, file
);
7234 fprintf (file
, ")\n");
7236 /* Print loop's body. */
7239 fprintf (file
, "%s{\n", s_indent
);
7241 if (bb
->loop_father
== loop
)
7242 print_loops_bb (file
, bb
, indent
, verbosity
);
7244 print_loop_and_siblings (file
, loop
->inner
, indent
+ 2, verbosity
);
7245 fprintf (file
, "%s}\n", s_indent
);
7249 /* Print the LOOP and its sibling loops on FILE, indented INDENT
7250 spaces. Following VERBOSITY level this outputs the contents of the
7251 loop, or just its structure. */
7254 print_loop_and_siblings (FILE *file
, struct loop
*loop
, int indent
,
7260 print_loop (file
, loop
, indent
, verbosity
);
7261 print_loop_and_siblings (file
, loop
->next
, indent
, verbosity
);
7264 /* Follow a CFG edge from the entry point of the program, and on entry
7265 of a loop, pretty print the loop structure on FILE. */
7268 print_loops (FILE *file
, int verbosity
)
7272 bb
= ENTRY_BLOCK_PTR_FOR_FN (cfun
);
7273 if (bb
&& bb
->loop_father
)
7274 print_loop_and_siblings (file
, bb
->loop_father
, 0, verbosity
);
7280 debug (struct loop
&ref
)
7282 print_loop (stderr
, &ref
, 0, /*verbosity*/0);
7286 debug (struct loop
*ptr
)
7291 fprintf (stderr
, "<nil>\n");
7294 /* Dump a loop verbosely. */
7297 debug_verbose (struct loop
&ref
)
7299 print_loop (stderr
, &ref
, 0, /*verbosity*/3);
7303 debug_verbose (struct loop
*ptr
)
7308 fprintf (stderr
, "<nil>\n");
7312 /* Debugging loops structure at tree level, at some VERBOSITY level. */
7315 debug_loops (int verbosity
)
7317 print_loops (stderr
, verbosity
);
7320 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
7323 debug_loop (struct loop
*loop
, int verbosity
)
7325 print_loop (stderr
, loop
, 0, verbosity
);
7328 /* Print on stderr the code of loop number NUM, at some VERBOSITY
7332 debug_loop_num (unsigned num
, int verbosity
)
7334 debug_loop (get_loop (cfun
, num
), verbosity
);
7337 /* Return true if BB ends with a call, possibly followed by some
7338 instructions that must stay with the call. Return false,
7342 gimple_block_ends_with_call_p (basic_block bb
)
7344 gimple_stmt_iterator gsi
= gsi_last_nondebug_bb (bb
);
7345 return !gsi_end_p (gsi
) && is_gimple_call (gsi_stmt (gsi
));
7349 /* Return true if BB ends with a conditional branch. Return false,
7353 gimple_block_ends_with_condjump_p (const_basic_block bb
)
7355 gimple stmt
= last_stmt (CONST_CAST_BB (bb
));
7356 return (stmt
&& gimple_code (stmt
) == GIMPLE_COND
);
7360 /* Return true if we need to add fake edge to exit at statement T.
7361 Helper function for gimple_flow_call_edges_add. */
7364 need_fake_edge_p (gimple t
)
7366 tree fndecl
= NULL_TREE
;
7369 /* NORETURN and LONGJMP calls already have an edge to exit.
7370 CONST and PURE calls do not need one.
7371 We don't currently check for CONST and PURE here, although
7372 it would be a good idea, because those attributes are
7373 figured out from the RTL in mark_constant_function, and
7374 the counter incrementation code from -fprofile-arcs
7375 leads to different results from -fbranch-probabilities. */
7376 if (is_gimple_call (t
))
7378 fndecl
= gimple_call_fndecl (t
);
7379 call_flags
= gimple_call_flags (t
);
7382 if (is_gimple_call (t
)
7384 && DECL_BUILT_IN (fndecl
)
7385 && (call_flags
& ECF_NOTHROW
)
7386 && !(call_flags
& ECF_RETURNS_TWICE
)
7387 /* fork() doesn't really return twice, but the effect of
7388 wrapping it in __gcov_fork() which calls __gcov_flush()
7389 and clears the counters before forking has the same
7390 effect as returning twice. Force a fake edge. */
7391 && !(DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
7392 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FORK
))
7395 if (is_gimple_call (t
))
7401 if (!(call_flags
& ECF_NORETURN
))
7405 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
7406 if ((e
->flags
& EDGE_FAKE
) == 0)
7410 if (gimple_code (t
) == GIMPLE_ASM
7411 && (gimple_asm_volatile_p (t
) || gimple_asm_input_p (t
)))
7418 /* Add fake edges to the function exit for any non constant and non
7419 noreturn calls (or noreturn calls with EH/abnormal edges),
7420 volatile inline assembly in the bitmap of blocks specified by BLOCKS
7421 or to the whole CFG if BLOCKS is zero. Return the number of blocks
7424 The goal is to expose cases in which entering a basic block does
7425 not imply that all subsequent instructions must be executed. */
7428 gimple_flow_call_edges_add (sbitmap blocks
)
7431 int blocks_split
= 0;
7432 int last_bb
= last_basic_block
;
7433 bool check_last_block
= false;
7435 if (n_basic_blocks_for_fn (cfun
) == NUM_FIXED_BLOCKS
)
7439 check_last_block
= true;
7441 check_last_block
= bitmap_bit_p (blocks
,
7442 EXIT_BLOCK_PTR_FOR_FN (cfun
)->prev_bb
->index
);
7444 /* In the last basic block, before epilogue generation, there will be
7445 a fallthru edge to EXIT. Special care is required if the last insn
7446 of the last basic block is a call because make_edge folds duplicate
7447 edges, which would result in the fallthru edge also being marked
7448 fake, which would result in the fallthru edge being removed by
7449 remove_fake_edges, which would result in an invalid CFG.
7451 Moreover, we can't elide the outgoing fake edge, since the block
7452 profiler needs to take this into account in order to solve the minimal
7453 spanning tree in the case that the call doesn't return.
7455 Handle this by adding a dummy instruction in a new last basic block. */
7456 if (check_last_block
)
7458 basic_block bb
= EXIT_BLOCK_PTR_FOR_FN (cfun
)->prev_bb
;
7459 gimple_stmt_iterator gsi
= gsi_last_nondebug_bb (bb
);
7462 if (!gsi_end_p (gsi
))
7465 if (t
&& need_fake_edge_p (t
))
7469 e
= find_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
));
7472 gsi_insert_on_edge (e
, gimple_build_nop ());
7473 gsi_commit_edge_inserts ();
7478 /* Now add fake edges to the function exit for any non constant
7479 calls since there is no way that we can determine if they will
7481 for (i
= 0; i
< last_bb
; i
++)
7483 basic_block bb
= BASIC_BLOCK (i
);
7484 gimple_stmt_iterator gsi
;
7485 gimple stmt
, last_stmt
;
7490 if (blocks
&& !bitmap_bit_p (blocks
, i
))
7493 gsi
= gsi_last_nondebug_bb (bb
);
7494 if (!gsi_end_p (gsi
))
7496 last_stmt
= gsi_stmt (gsi
);
7499 stmt
= gsi_stmt (gsi
);
7500 if (need_fake_edge_p (stmt
))
7504 /* The handling above of the final block before the
7505 epilogue should be enough to verify that there is
7506 no edge to the exit block in CFG already.
7507 Calling make_edge in such case would cause us to
7508 mark that edge as fake and remove it later. */
7509 #ifdef ENABLE_CHECKING
7510 if (stmt
== last_stmt
)
7512 e
= find_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
));
7513 gcc_assert (e
== NULL
);
7517 /* Note that the following may create a new basic block
7518 and renumber the existing basic blocks. */
7519 if (stmt
!= last_stmt
)
7521 e
= split_block (bb
, stmt
);
7525 make_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
), EDGE_FAKE
);
7529 while (!gsi_end_p (gsi
));
7534 verify_flow_info ();
7536 return blocks_split
;
7539 /* Removes edge E and all the blocks dominated by it, and updates dominance
7540 information. The IL in E->src needs to be updated separately.
7541 If dominance info is not available, only the edge E is removed.*/
7544 remove_edge_and_dominated_blocks (edge e
)
7546 vec
<basic_block
> bbs_to_remove
= vNULL
;
7547 vec
<basic_block
> bbs_to_fix_dom
= vNULL
;
7551 bool none_removed
= false;
7553 basic_block bb
, dbb
;
7556 if (!dom_info_available_p (CDI_DOMINATORS
))
7562 /* No updating is needed for edges to exit. */
7563 if (e
->dest
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
7565 if (cfgcleanup_altered_bbs
)
7566 bitmap_set_bit (cfgcleanup_altered_bbs
, e
->src
->index
);
7571 /* First, we find the basic blocks to remove. If E->dest has a predecessor
7572 that is not dominated by E->dest, then this set is empty. Otherwise,
7573 all the basic blocks dominated by E->dest are removed.
7575 Also, to DF_IDOM we store the immediate dominators of the blocks in
7576 the dominance frontier of E (i.e., of the successors of the
7577 removed blocks, if there are any, and of E->dest otherwise). */
7578 FOR_EACH_EDGE (f
, ei
, e
->dest
->preds
)
7583 if (!dominated_by_p (CDI_DOMINATORS
, f
->src
, e
->dest
))
7585 none_removed
= true;
7590 df
= BITMAP_ALLOC (NULL
);
7591 df_idom
= BITMAP_ALLOC (NULL
);
7594 bitmap_set_bit (df_idom
,
7595 get_immediate_dominator (CDI_DOMINATORS
, e
->dest
)->index
);
7598 bbs_to_remove
= get_all_dominated_blocks (CDI_DOMINATORS
, e
->dest
);
7599 FOR_EACH_VEC_ELT (bbs_to_remove
, i
, bb
)
7601 FOR_EACH_EDGE (f
, ei
, bb
->succs
)
7603 if (f
->dest
!= EXIT_BLOCK_PTR_FOR_FN (cfun
))
7604 bitmap_set_bit (df
, f
->dest
->index
);
7607 FOR_EACH_VEC_ELT (bbs_to_remove
, i
, bb
)
7608 bitmap_clear_bit (df
, bb
->index
);
7610 EXECUTE_IF_SET_IN_BITMAP (df
, 0, i
, bi
)
7612 bb
= BASIC_BLOCK (i
);
7613 bitmap_set_bit (df_idom
,
7614 get_immediate_dominator (CDI_DOMINATORS
, bb
)->index
);
7618 if (cfgcleanup_altered_bbs
)
7620 /* Record the set of the altered basic blocks. */
7621 bitmap_set_bit (cfgcleanup_altered_bbs
, e
->src
->index
);
7622 bitmap_ior_into (cfgcleanup_altered_bbs
, df
);
7625 /* Remove E and the cancelled blocks. */
7630 /* Walk backwards so as to get a chance to substitute all
7631 released DEFs into debug stmts. See
7632 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
7634 for (i
= bbs_to_remove
.length (); i
-- > 0; )
7635 delete_basic_block (bbs_to_remove
[i
]);
7638 /* Update the dominance information. The immediate dominator may change only
7639 for blocks whose immediate dominator belongs to DF_IDOM:
7641 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
7642 removal. Let Z the arbitrary block such that idom(Z) = Y and
7643 Z dominates X after the removal. Before removal, there exists a path P
7644 from Y to X that avoids Z. Let F be the last edge on P that is
7645 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
7646 dominates W, and because of P, Z does not dominate W), and W belongs to
7647 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
7648 EXECUTE_IF_SET_IN_BITMAP (df_idom
, 0, i
, bi
)
7650 bb
= BASIC_BLOCK (i
);
7651 for (dbb
= first_dom_son (CDI_DOMINATORS
, bb
);
7653 dbb
= next_dom_son (CDI_DOMINATORS
, dbb
))
7654 bbs_to_fix_dom
.safe_push (dbb
);
7657 iterate_fix_dominators (CDI_DOMINATORS
, bbs_to_fix_dom
, true);
7660 BITMAP_FREE (df_idom
);
7661 bbs_to_remove
.release ();
7662 bbs_to_fix_dom
.release ();
7665 /* Purge dead EH edges from basic block BB. */
7668 gimple_purge_dead_eh_edges (basic_block bb
)
7670 bool changed
= false;
7673 gimple stmt
= last_stmt (bb
);
7675 if (stmt
&& stmt_can_throw_internal (stmt
))
7678 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
7680 if (e
->flags
& EDGE_EH
)
7682 remove_edge_and_dominated_blocks (e
);
7692 /* Purge dead EH edges from basic block listed in BLOCKS. */
7695 gimple_purge_all_dead_eh_edges (const_bitmap blocks
)
7697 bool changed
= false;
7701 EXECUTE_IF_SET_IN_BITMAP (blocks
, 0, i
, bi
)
7703 basic_block bb
= BASIC_BLOCK (i
);
7705 /* Earlier gimple_purge_dead_eh_edges could have removed
7706 this basic block already. */
7707 gcc_assert (bb
|| changed
);
7709 changed
|= gimple_purge_dead_eh_edges (bb
);
7715 /* Purge dead abnormal call edges from basic block BB. */
7718 gimple_purge_dead_abnormal_call_edges (basic_block bb
)
7720 bool changed
= false;
7723 gimple stmt
= last_stmt (bb
);
7725 if (!cfun
->has_nonlocal_label
7726 && !cfun
->calls_setjmp
)
7729 if (stmt
&& stmt_can_make_abnormal_goto (stmt
))
7732 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
7734 if (e
->flags
& EDGE_ABNORMAL
)
7736 if (e
->flags
& EDGE_FALLTHRU
)
7737 e
->flags
&= ~EDGE_ABNORMAL
;
7739 remove_edge_and_dominated_blocks (e
);
7749 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
7752 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks
)
7754 bool changed
= false;
7758 EXECUTE_IF_SET_IN_BITMAP (blocks
, 0, i
, bi
)
7760 basic_block bb
= BASIC_BLOCK (i
);
7762 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
7763 this basic block already. */
7764 gcc_assert (bb
|| changed
);
7766 changed
|= gimple_purge_dead_abnormal_call_edges (bb
);
7772 /* This function is called whenever a new edge is created or
7776 gimple_execute_on_growing_pred (edge e
)
7778 basic_block bb
= e
->dest
;
7780 if (!gimple_seq_empty_p (phi_nodes (bb
)))
7781 reserve_phi_args_for_new_edge (bb
);
7784 /* This function is called immediately before edge E is removed from
7785 the edge vector E->dest->preds. */
7788 gimple_execute_on_shrinking_pred (edge e
)
7790 if (!gimple_seq_empty_p (phi_nodes (e
->dest
)))
7791 remove_phi_args (e
);
7794 /*---------------------------------------------------------------------------
7795 Helper functions for Loop versioning
7796 ---------------------------------------------------------------------------*/
7798 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
7799 of 'first'. Both of them are dominated by 'new_head' basic block. When
7800 'new_head' was created by 'second's incoming edge it received phi arguments
7801 on the edge by split_edge(). Later, additional edge 'e' was created to
7802 connect 'new_head' and 'first'. Now this routine adds phi args on this
7803 additional edge 'e' that new_head to second edge received as part of edge
7807 gimple_lv_adjust_loop_header_phi (basic_block first
, basic_block second
,
7808 basic_block new_head
, edge e
)
7811 gimple_stmt_iterator psi1
, psi2
;
7813 edge e2
= find_edge (new_head
, second
);
7815 /* Because NEW_HEAD has been created by splitting SECOND's incoming
7816 edge, we should always have an edge from NEW_HEAD to SECOND. */
7817 gcc_assert (e2
!= NULL
);
7819 /* Browse all 'second' basic block phi nodes and add phi args to
7820 edge 'e' for 'first' head. PHI args are always in correct order. */
7822 for (psi2
= gsi_start_phis (second
),
7823 psi1
= gsi_start_phis (first
);
7824 !gsi_end_p (psi2
) && !gsi_end_p (psi1
);
7825 gsi_next (&psi2
), gsi_next (&psi1
))
7827 phi1
= gsi_stmt (psi1
);
7828 phi2
= gsi_stmt (psi2
);
7829 def
= PHI_ARG_DEF (phi2
, e2
->dest_idx
);
7830 add_phi_arg (phi1
, def
, e
, gimple_phi_arg_location_from_edge (phi2
, e2
));
7835 /* Adds a if else statement to COND_BB with condition COND_EXPR.
7836 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
7837 the destination of the ELSE part. */
7840 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED
,
7841 basic_block second_head ATTRIBUTE_UNUSED
,
7842 basic_block cond_bb
, void *cond_e
)
7844 gimple_stmt_iterator gsi
;
7845 gimple new_cond_expr
;
7846 tree cond_expr
= (tree
) cond_e
;
7849 /* Build new conditional expr */
7850 new_cond_expr
= gimple_build_cond_from_tree (cond_expr
,
7851 NULL_TREE
, NULL_TREE
);
7853 /* Add new cond in cond_bb. */
7854 gsi
= gsi_last_bb (cond_bb
);
7855 gsi_insert_after (&gsi
, new_cond_expr
, GSI_NEW_STMT
);
7857 /* Adjust edges appropriately to connect new head with first head
7858 as well as second head. */
7859 e0
= single_succ_edge (cond_bb
);
7860 e0
->flags
&= ~EDGE_FALLTHRU
;
7861 e0
->flags
|= EDGE_FALSE_VALUE
;
7865 /* Do book-keeping of basic block BB for the profile consistency checker.
7866 If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
7867 then do post-pass accounting. Store the counting in RECORD. */
7869 gimple_account_profile_record (basic_block bb
, int after_pass
,
7870 struct profile_record
*record
)
7872 gimple_stmt_iterator i
;
7873 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
7875 record
->size
[after_pass
]
7876 += estimate_num_insns (gsi_stmt (i
), &eni_size_weights
);
7877 if (profile_status
== PROFILE_READ
)
7878 record
->time
[after_pass
]
7879 += estimate_num_insns (gsi_stmt (i
),
7880 &eni_time_weights
) * bb
->count
;
7881 else if (profile_status
== PROFILE_GUESSED
)
7882 record
->time
[after_pass
]
7883 += estimate_num_insns (gsi_stmt (i
),
7884 &eni_time_weights
) * bb
->frequency
;
7888 struct cfg_hooks gimple_cfg_hooks
= {
7890 gimple_verify_flow_info
,
7891 gimple_dump_bb
, /* dump_bb */
7892 gimple_dump_bb_for_graph
, /* dump_bb_for_graph */
7893 create_bb
, /* create_basic_block */
7894 gimple_redirect_edge_and_branch
, /* redirect_edge_and_branch */
7895 gimple_redirect_edge_and_branch_force
, /* redirect_edge_and_branch_force */
7896 gimple_can_remove_branch_p
, /* can_remove_branch_p */
7897 remove_bb
, /* delete_basic_block */
7898 gimple_split_block
, /* split_block */
7899 gimple_move_block_after
, /* move_block_after */
7900 gimple_can_merge_blocks_p
, /* can_merge_blocks_p */
7901 gimple_merge_blocks
, /* merge_blocks */
7902 gimple_predict_edge
, /* predict_edge */
7903 gimple_predicted_by_p
, /* predicted_by_p */
7904 gimple_can_duplicate_bb_p
, /* can_duplicate_block_p */
7905 gimple_duplicate_bb
, /* duplicate_block */
7906 gimple_split_edge
, /* split_edge */
7907 gimple_make_forwarder_block
, /* make_forward_block */
7908 NULL
, /* tidy_fallthru_edge */
7909 NULL
, /* force_nonfallthru */
7910 gimple_block_ends_with_call_p
,/* block_ends_with_call_p */
7911 gimple_block_ends_with_condjump_p
, /* block_ends_with_condjump_p */
7912 gimple_flow_call_edges_add
, /* flow_call_edges_add */
7913 gimple_execute_on_growing_pred
, /* execute_on_growing_pred */
7914 gimple_execute_on_shrinking_pred
, /* execute_on_shrinking_pred */
7915 gimple_duplicate_loop_to_header_edge
, /* duplicate loop for trees */
7916 gimple_lv_add_condition_to_bb
, /* lv_add_condition_to_bb */
7917 gimple_lv_adjust_loop_header_phi
, /* lv_adjust_loop_header_phi*/
7918 extract_true_false_edges_from_block
, /* extract_cond_bb_edges */
7919 flush_pending_stmts
, /* flush_pending_stmts */
7920 gimple_empty_block_p
, /* block_empty_p */
7921 gimple_split_block_before_cond_jump
, /* split_block_before_cond_jump */
7922 gimple_account_profile_record
,
7926 /* Split all critical edges. */
7929 split_critical_edges (void)
7935 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
7936 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
7937 mappings around the calls to split_edge. */
7938 start_recording_case_labels ();
7941 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
7943 if (EDGE_CRITICAL_P (e
) && !(e
->flags
& EDGE_ABNORMAL
))
7945 /* PRE inserts statements to edges and expects that
7946 since split_critical_edges was done beforehand, committing edge
7947 insertions will not split more edges. In addition to critical
7948 edges we must split edges that have multiple successors and
7949 end by control flow statements, such as RESX.
7950 Go ahead and split them too. This matches the logic in
7951 gimple_find_edge_insert_loc. */
7952 else if ((!single_pred_p (e
->dest
)
7953 || !gimple_seq_empty_p (phi_nodes (e
->dest
))
7954 || e
->dest
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
7955 && e
->src
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
)
7956 && !(e
->flags
& EDGE_ABNORMAL
))
7958 gimple_stmt_iterator gsi
;
7960 gsi
= gsi_last_bb (e
->src
);
7961 if (!gsi_end_p (gsi
)
7962 && stmt_ends_bb_p (gsi_stmt (gsi
))
7963 && (gimple_code (gsi_stmt (gsi
)) != GIMPLE_RETURN
7964 && !gimple_call_builtin_p (gsi_stmt (gsi
),
7970 end_recording_case_labels ();
7976 const pass_data pass_data_split_crit_edges
=
7978 GIMPLE_PASS
, /* type */
7979 "crited", /* name */
7980 OPTGROUP_NONE
, /* optinfo_flags */
7981 false, /* has_gate */
7982 true, /* has_execute */
7983 TV_TREE_SPLIT_EDGES
, /* tv_id */
7984 PROP_cfg
, /* properties_required */
7985 PROP_no_crit_edges
, /* properties_provided */
7986 0, /* properties_destroyed */
7987 0, /* todo_flags_start */
7988 TODO_verify_flow
, /* todo_flags_finish */
7991 class pass_split_crit_edges
: public gimple_opt_pass
7994 pass_split_crit_edges (gcc::context
*ctxt
)
7995 : gimple_opt_pass (pass_data_split_crit_edges
, ctxt
)
7998 /* opt_pass methods: */
7999 unsigned int execute () { return split_critical_edges (); }
8001 opt_pass
* clone () { return new pass_split_crit_edges (m_ctxt
); }
8002 }; // class pass_split_crit_edges
8007 make_pass_split_crit_edges (gcc::context
*ctxt
)
8009 return new pass_split_crit_edges (ctxt
);
8013 /* Build a ternary operation and gimplify it. Emit code before GSI.
8014 Return the gimple_val holding the result. */
8017 gimplify_build3 (gimple_stmt_iterator
*gsi
, enum tree_code code
,
8018 tree type
, tree a
, tree b
, tree c
)
8021 location_t loc
= gimple_location (gsi_stmt (*gsi
));
8023 ret
= fold_build3_loc (loc
, code
, type
, a
, b
, c
);
8026 return force_gimple_operand_gsi (gsi
, ret
, true, NULL
, true,
8030 /* Build a binary operation and gimplify it. Emit code before GSI.
8031 Return the gimple_val holding the result. */
8034 gimplify_build2 (gimple_stmt_iterator
*gsi
, enum tree_code code
,
8035 tree type
, tree a
, tree b
)
8039 ret
= fold_build2_loc (gimple_location (gsi_stmt (*gsi
)), code
, type
, a
, b
);
8042 return force_gimple_operand_gsi (gsi
, ret
, true, NULL
, true,
8046 /* Build a unary operation and gimplify it. Emit code before GSI.
8047 Return the gimple_val holding the result. */
8050 gimplify_build1 (gimple_stmt_iterator
*gsi
, enum tree_code code
, tree type
,
8055 ret
= fold_build1_loc (gimple_location (gsi_stmt (*gsi
)), code
, type
, a
);
8058 return force_gimple_operand_gsi (gsi
, ret
, true, NULL
, true,
8064 /* Emit return warnings. */
8067 execute_warn_function_return (void)
8069 source_location location
;
8074 if (!targetm
.warn_func_return (cfun
->decl
))
8077 /* If we have a path to EXIT, then we do return. */
8078 if (TREE_THIS_VOLATILE (cfun
->decl
)
8079 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
) > 0)
8081 location
= UNKNOWN_LOCATION
;
8082 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
)
8084 last
= last_stmt (e
->src
);
8085 if ((gimple_code (last
) == GIMPLE_RETURN
8086 || gimple_call_builtin_p (last
, BUILT_IN_RETURN
))
8087 && (location
= gimple_location (last
)) != UNKNOWN_LOCATION
)
8090 if (location
== UNKNOWN_LOCATION
)
8091 location
= cfun
->function_end_locus
;
8092 warning_at (location
, 0, "%<noreturn%> function does return");
8095 /* If we see "return;" in some basic block, then we do reach the end
8096 without returning a value. */
8097 else if (warn_return_type
8098 && !TREE_NO_WARNING (cfun
->decl
)
8099 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
) > 0
8100 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (cfun
->decl
))))
8102 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
)
8104 gimple last
= last_stmt (e
->src
);
8105 if (gimple_code (last
) == GIMPLE_RETURN
8106 && gimple_return_retval (last
) == NULL
8107 && !gimple_no_warning_p (last
))
8109 location
= gimple_location (last
);
8110 if (location
== UNKNOWN_LOCATION
)
8111 location
= cfun
->function_end_locus
;
8112 warning_at (location
, OPT_Wreturn_type
, "control reaches end of non-void function");
8113 TREE_NO_WARNING (cfun
->decl
) = 1;
8122 /* Given a basic block B which ends with a conditional and has
8123 precisely two successors, determine which of the edges is taken if
8124 the conditional is true and which is taken if the conditional is
8125 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
8128 extract_true_false_edges_from_block (basic_block b
,
8132 edge e
= EDGE_SUCC (b
, 0);
8134 if (e
->flags
& EDGE_TRUE_VALUE
)
8137 *false_edge
= EDGE_SUCC (b
, 1);
8142 *true_edge
= EDGE_SUCC (b
, 1);
8148 const pass_data pass_data_warn_function_return
=
8150 GIMPLE_PASS
, /* type */
8151 "*warn_function_return", /* name */
8152 OPTGROUP_NONE
, /* optinfo_flags */
8153 false, /* has_gate */
8154 true, /* has_execute */
8155 TV_NONE
, /* tv_id */
8156 PROP_cfg
, /* properties_required */
8157 0, /* properties_provided */
8158 0, /* properties_destroyed */
8159 0, /* todo_flags_start */
8160 0, /* todo_flags_finish */
8163 class pass_warn_function_return
: public gimple_opt_pass
8166 pass_warn_function_return (gcc::context
*ctxt
)
8167 : gimple_opt_pass (pass_data_warn_function_return
, ctxt
)
8170 /* opt_pass methods: */
8171 unsigned int execute () { return execute_warn_function_return (); }
8173 }; // class pass_warn_function_return
8178 make_pass_warn_function_return (gcc::context
*ctxt
)
8180 return new pass_warn_function_return (ctxt
);
8183 /* Walk a gimplified function and warn for functions whose return value is
8184 ignored and attribute((warn_unused_result)) is set. This is done before
8185 inlining, so we don't have to worry about that. */
8188 do_warn_unused_result (gimple_seq seq
)
8191 gimple_stmt_iterator i
;
8193 for (i
= gsi_start (seq
); !gsi_end_p (i
); gsi_next (&i
))
8195 gimple g
= gsi_stmt (i
);
8197 switch (gimple_code (g
))
8200 do_warn_unused_result (gimple_bind_body (g
));
8203 do_warn_unused_result (gimple_try_eval (g
));
8204 do_warn_unused_result (gimple_try_cleanup (g
));
8207 do_warn_unused_result (gimple_catch_handler (g
));
8209 case GIMPLE_EH_FILTER
:
8210 do_warn_unused_result (gimple_eh_filter_failure (g
));
8214 if (gimple_call_lhs (g
))
8216 if (gimple_call_internal_p (g
))
8219 /* This is a naked call, as opposed to a GIMPLE_CALL with an
8220 LHS. All calls whose value is ignored should be
8221 represented like this. Look for the attribute. */
8222 fdecl
= gimple_call_fndecl (g
);
8223 ftype
= gimple_call_fntype (g
);
8225 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype
)))
8227 location_t loc
= gimple_location (g
);
8230 warning_at (loc
, OPT_Wunused_result
,
8231 "ignoring return value of %qD, "
8232 "declared with attribute warn_unused_result",
8235 warning_at (loc
, OPT_Wunused_result
,
8236 "ignoring return value of function "
8237 "declared with attribute warn_unused_result");
8242 /* Not a container, not a call, or a call whose value is used. */
8249 run_warn_unused_result (void)
8251 do_warn_unused_result (gimple_body (current_function_decl
));
8256 gate_warn_unused_result (void)
8258 return flag_warn_unused_result
;
8263 const pass_data pass_data_warn_unused_result
=
8265 GIMPLE_PASS
, /* type */
8266 "*warn_unused_result", /* name */
8267 OPTGROUP_NONE
, /* optinfo_flags */
8268 true, /* has_gate */
8269 true, /* has_execute */
8270 TV_NONE
, /* tv_id */
8271 PROP_gimple_any
, /* properties_required */
8272 0, /* properties_provided */
8273 0, /* properties_destroyed */
8274 0, /* todo_flags_start */
8275 0, /* todo_flags_finish */
8278 class pass_warn_unused_result
: public gimple_opt_pass
8281 pass_warn_unused_result (gcc::context
*ctxt
)
8282 : gimple_opt_pass (pass_data_warn_unused_result
, ctxt
)
8285 /* opt_pass methods: */
8286 bool gate () { return gate_warn_unused_result (); }
8287 unsigned int execute () { return run_warn_unused_result (); }
8289 }; // class pass_warn_unused_result
8294 make_pass_warn_unused_result (gcc::context
*ctxt
)
8296 return new pass_warn_unused_result (ctxt
);
8299 /* IPA passes, compilation of earlier functions or inlining
8300 might have changed some properties, such as marked functions nothrow,
8301 pure, const or noreturn.
8302 Remove redundant edges and basic blocks, and create new ones if necessary.
8304 This pass can't be executed as stand alone pass from pass manager, because
8305 in between inlining and this fixup the verify_flow_info would fail. */
8308 execute_fixup_cfg (void)
8311 gimple_stmt_iterator gsi
;
8312 int todo
= gimple_in_ssa_p (cfun
) ? TODO_verify_ssa
: 0;
8313 gcov_type count_scale
;
8318 = GCOV_COMPUTE_SCALE (cgraph_get_node (current_function_decl
)->count
,
8319 ENTRY_BLOCK_PTR_FOR_FN (cfun
)->count
);
8321 ENTRY_BLOCK_PTR_FOR_FN (cfun
)->count
=
8322 cgraph_get_node (current_function_decl
)->count
;
8323 EXIT_BLOCK_PTR_FOR_FN (cfun
)->count
=
8324 apply_scale (EXIT_BLOCK_PTR_FOR_FN (cfun
)->count
,
8327 FOR_EACH_EDGE (e
, ei
, ENTRY_BLOCK_PTR_FOR_FN (cfun
)->succs
)
8328 e
->count
= apply_scale (e
->count
, count_scale
);
8332 bb
->count
= apply_scale (bb
->count
, count_scale
);
8333 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
8335 gimple stmt
= gsi_stmt (gsi
);
8336 tree decl
= is_gimple_call (stmt
)
8337 ? gimple_call_fndecl (stmt
)
8341 int flags
= gimple_call_flags (stmt
);
8342 if (flags
& (ECF_CONST
| ECF_PURE
| ECF_LOOPING_CONST_OR_PURE
))
8344 if (gimple_purge_dead_abnormal_call_edges (bb
))
8345 todo
|= TODO_cleanup_cfg
;
8347 if (gimple_in_ssa_p (cfun
))
8349 todo
|= TODO_update_ssa
| TODO_cleanup_cfg
;
8354 if (flags
& ECF_NORETURN
8355 && fixup_noreturn_call (stmt
))
8356 todo
|= TODO_cleanup_cfg
;
8359 if (maybe_clean_eh_stmt (stmt
)
8360 && gimple_purge_dead_eh_edges (bb
))
8361 todo
|= TODO_cleanup_cfg
;
8364 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
8365 e
->count
= apply_scale (e
->count
, count_scale
);
8367 /* If we have a basic block with no successors that does not
8368 end with a control statement or a noreturn call end it with
8369 a call to __builtin_unreachable. This situation can occur
8370 when inlining a noreturn call that does in fact return. */
8371 if (EDGE_COUNT (bb
->succs
) == 0)
8373 gimple stmt
= last_stmt (bb
);
8375 || (!is_ctrl_stmt (stmt
)
8376 && (!is_gimple_call (stmt
)
8377 || (gimple_call_flags (stmt
) & ECF_NORETURN
) == 0)))
8379 stmt
= gimple_build_call
8380 (builtin_decl_implicit (BUILT_IN_UNREACHABLE
), 0);
8381 gimple_stmt_iterator gsi
= gsi_last_bb (bb
);
8382 gsi_insert_after (&gsi
, stmt
, GSI_NEW_STMT
);
8386 if (count_scale
!= REG_BR_PROB_BASE
)
8387 compute_function_frequency ();
8389 /* We just processed all calls. */
8390 if (cfun
->gimple_df
)
8391 vec_free (MODIFIED_NORETURN_CALLS (cfun
));
8393 /* Dump a textual representation of the flowgraph. */
8395 gimple_dump_cfg (dump_file
, dump_flags
);
8398 && (todo
& TODO_cleanup_cfg
))
8399 loops_state_set (LOOPS_NEED_FIXUP
);
8406 const pass_data pass_data_fixup_cfg
=
8408 GIMPLE_PASS
, /* type */
8409 "*free_cfg_annotations", /* name */
8410 OPTGROUP_NONE
, /* optinfo_flags */
8411 false, /* has_gate */
8412 true, /* has_execute */
8413 TV_NONE
, /* tv_id */
8414 PROP_cfg
, /* properties_required */
8415 0, /* properties_provided */
8416 0, /* properties_destroyed */
8417 0, /* todo_flags_start */
8418 0, /* todo_flags_finish */
8421 class pass_fixup_cfg
: public gimple_opt_pass
8424 pass_fixup_cfg (gcc::context
*ctxt
)
8425 : gimple_opt_pass (pass_data_fixup_cfg
, ctxt
)
8428 /* opt_pass methods: */
8429 opt_pass
* clone () { return new pass_fixup_cfg (m_ctxt
); }
8430 unsigned int execute () { return execute_fixup_cfg (); }
8432 }; // class pass_fixup_cfg
8437 make_pass_fixup_cfg (gcc::context
*ctxt
)
8439 return new pass_fixup_cfg (ctxt
);
8442 /* Garbage collection support for edge_def. */
8444 extern void gt_ggc_mx (tree
&);
8445 extern void gt_ggc_mx (gimple
&);
8446 extern void gt_ggc_mx (rtx
&);
8447 extern void gt_ggc_mx (basic_block
&);
8450 gt_ggc_mx (edge_def
*e
)
8452 tree block
= LOCATION_BLOCK (e
->goto_locus
);
8454 gt_ggc_mx (e
->dest
);
8455 if (current_ir_type () == IR_GIMPLE
)
8456 gt_ggc_mx (e
->insns
.g
);
8458 gt_ggc_mx (e
->insns
.r
);
8462 /* PCH support for edge_def. */
8464 extern void gt_pch_nx (tree
&);
8465 extern void gt_pch_nx (gimple
&);
8466 extern void gt_pch_nx (rtx
&);
8467 extern void gt_pch_nx (basic_block
&);
8470 gt_pch_nx (edge_def
*e
)
8472 tree block
= LOCATION_BLOCK (e
->goto_locus
);
8474 gt_pch_nx (e
->dest
);
8475 if (current_ir_type () == IR_GIMPLE
)
8476 gt_pch_nx (e
->insns
.g
);
8478 gt_pch_nx (e
->insns
.r
);
8483 gt_pch_nx (edge_def
*e
, gt_pointer_operator op
, void *cookie
)
8485 tree block
= LOCATION_BLOCK (e
->goto_locus
);
8486 op (&(e
->src
), cookie
);
8487 op (&(e
->dest
), cookie
);
8488 if (current_ir_type () == IR_GIMPLE
)
8489 op (&(e
->insns
.g
), cookie
);
8491 op (&(e
->insns
.r
), cookie
);
8492 op (&(block
), cookie
);