* recog.c (split_all_insns_noflow): New.
* rtl.h (split_all_insns_noflow): Declare.
* ia64.c (ia64_reorg): Use split_all_insns_noflow.
* m68hc11.c (m68hc11_reorg): Likewise.
* sh.c (machine_dependent_reorg): Likewise.
* toplev.c (rest_of_compilation): Likewise for last split_all_insns
call.
From-SVN: r44312
+Tue Jul 24 20:32:44 CEST 2001 Jan Hubicka <jh@suse.cz>
+
+ * recog.c (split_all_insns_noflow): New.
+ * rtl.h (split_all_insns_noflow): Declare.
+ * ia64.c (ia64_reorg): Use split_all_insns_noflow.
+ * m68hc11.c (m68hc11_reorg): Likewise.
+ * sh.c (machine_dependent_reorg): Likewise.
+ * toplev.c (rest_of_compilation): Likewise for last split_all_insns
+ call.
+
2001-07-18 Andrew Haley <aph@cambridge.redhat.com>
* config/sh/sh.md (ashlsi3_std splitter): Split only after reload.
{
/* If optimizing, we'll have split before scheduling. */
if (optimize == 0)
- split_all_insns (0);
+ split_all_insns_noflow ();
/* Make sure the CFG and global_live_at_start are correct
for emit_predicate_relation_info. */
/* Force a split of all splitable insn. This is necessary for the
Z register replacement mechanism because we end up with basic insns. */
- split_all_insns (0);
+ split_all_insns_noflow ();
split_done = 1;
z_replacement_completed = 1;
split after Z register replacement. This gives more opportunities
for peephole (in particular for consecutives xgdx/xgdy). */
if (optimize > 0)
- split_all_insns (0);
+ split_all_insns_noflow ();
/* Once insns are split after the z_replacement_completed == 2,
we must not re-run the life_analysis. The xgdx/xgdy patterns
optimizing, they'll have already been split. Otherwise, make
sure we don't split them too late. */
if (! optimize)
- split_all_insns (0);
+ split_all_insns_noflow ();
/* If relaxing, generate pseudo-ops to associate function calls with
the symbols they call. It does no harm to not generate these
sbitmap_free (blocks);
}
+
+/* Same as split_all_insns, but do not expect CFG to be available.
+ Used by machine depedent reorg passes. */
+
+void
+split_all_insns_noflow ()
+{
+ rtx next, insn;
+
+ for (insn = get_insns (); insn; insn = next)
+ {
+ next = NEXT_INSN (insn);
+ split_insn (insn);
+ }
+ return;
+}
\f
#ifdef HAVE_peephole2
struct peep2_insn_data
extern rtx get_first_nonparm_insn PARAMS ((void));
extern void split_all_insns PARAMS ((int));
+extern void split_all_insns_noflow PARAMS ((void));
#define MAX_SAVED_CONST_INT 64
extern rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
timevar_push (TV_FLOW2);
open_dump_file (DFI_flow2, decl);
- find_basic_blocks (insns, max_reg_num (), rtl_dump_file);
+#ifdef ENABLE_CHECKING
+ verify_flow_info ();
+#endif
/* If optimizing, then go ahead and split insns now. */
if (optimize > 0)
scheduling to operate in the epilogue. */
thread_prologue_and_epilogue_insns (insns);
+ compute_bb_for_insn (get_max_uid ());
+
if (optimize)
{
cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_CROSSJUMP);
#if defined (HAVE_ATTR_length) && !defined (STACK_REGS)
timevar_push (TV_SHORTEN_BRANCH);
- split_all_insns (0);
+ split_all_insns_noflow ();
timevar_pop (TV_SHORTEN_BRANCH);
#endif