From: John Wehle Date: Tue, 29 Aug 2000 19:15:26 +0000 (+0000) Subject: loop.c (prescan_loop): Don't check unknown_address_altered when deciding if insert_lo... X-Git-Tag: prereleases/libstdc++-2.92~4371 X-Git-Url: http://git.ipfire.org/?a=commitdiff_plain;h=20bd7bfa3b23a701c35980cf74dd7cc39e3ccc30;p=thirdparty%2Fgcc.git loop.c (prescan_loop): Don't check unknown_address_altered when deciding if insert_loop_mem is safe. * loop.c (prescan_loop): Don't check unknown_address_altered when deciding if insert_loop_mem is safe. Add BLKmode MEMs to loop_store_mems as necessary. (loop_invariant_p): Don't check unknown_address_altered or unknown_constant_address_altered. From-SVN: r36045 --- diff --git a/gcc/ChangeLog b/gcc/ChangeLog index 0a91a6cf2f1e..991af6cc155a 100644 --- a/gcc/ChangeLog +++ b/gcc/ChangeLog @@ -1,3 +1,11 @@ +Tue Aug 29 15:17:54 EDT 2000 John Wehle (john@feith.com) + + * loop.c (prescan_loop): Don't check unknown_address_altered + when deciding if insert_loop_mem is safe. Add BLKmode MEMs + to loop_store_mems as necessary. + (loop_invariant_p): Don't check unknown_address_altered + or unknown_constant_address_altered. + 2000-08-29 J. David Anglin * vax.md (sltu, sgeu): Delete sltu and sgeu insn patterns. diff --git a/gcc/loop.c b/gcc/loop.c index 04fe7ccbab0d..5c70055d2655 100644 --- a/gcc/loop.c +++ b/gcc/loop.c @@ -2451,11 +2451,9 @@ prescan_loop (loop) } /* Now, rescan the loop, setting up the LOOP_MEMS array. */ - if (/* We can't tell what MEMs are aliased by what. */ - ! unknown_address_altered - /* An exception thrown by a called function might land us + if (/* An exception thrown by a called function might land us anywhere. */ - && ! loop_info->has_call + ! loop_info->has_call /* We don't want loads for MEMs moved to a location before the one at which their stack memory becomes allocated. (Note that this is not a problem for malloc, etc., since those @@ -2467,6 +2465,23 @@ prescan_loop (loop) for (insn = NEXT_INSN (start); insn != NEXT_INSN (end); insn = NEXT_INSN (insn)) for_each_rtx (&insn, insert_loop_mem, 0); + + /* BLKmode MEMs are added to LOOP_STORE_MEM as necessary so + that loop_invariant_p and load_mems can use true_dependence + to determine what is really clobbered. */ + if (unknown_address_altered) + { + rtx mem = gen_rtx_MEM (BLKmode, const0_rtx); + + loop_store_mems = gen_rtx_EXPR_LIST (VOIDmode, mem, loop_store_mems); + } + if (unknown_constant_address_altered) + { + rtx mem = gen_rtx_MEM (BLKmode, const0_rtx); + + RTX_UNCHANGING_P (mem) = 1; + loop_store_mems = gen_rtx_EXPR_LIST (VOIDmode, mem, loop_store_mems); + } } /* LOOP->CONT_DOMINATOR is now the last label between the loop start @@ -3136,9 +3151,8 @@ note_set_pseudo_multiple_uses (x, y, data) The value is 2 if we refer to something only conditionally invariant. - If `unknown_address_altered' is nonzero, no memory ref is invariant. - Otherwise, a memory ref is invariant if it does not conflict with - anything stored in `loop_store_mems'. */ + A memory ref is invariant if it is not volatile and does not conflict + with anything stored in `loop_store_mems'. */ int loop_invariant_p (loop, x) @@ -3207,14 +3221,6 @@ loop_invariant_p (loop, x) if (MEM_VOLATILE_P (x)) return 0; - /* If we had a subroutine call, any location in memory could - have been clobbered. We used to test here for volatile and - readonly, but true_dependence knows how to do that better - than we do. */ - if (RTX_UNCHANGING_P (x) - ? unknown_constant_address_altered : unknown_address_altered) - return 0; - /* See if there is any dependence between a store and this load. */ mem_list_entry = loop_store_mems; while (mem_list_entry)