+2014-08-21 Marc Glisse <marc.glisse@inria.fr>
+
+ PR tree-optimization/62112
+ * gimple-iterator.c (gsi_replace): Return whether EH cleanup is needed.
+ * gimple-iterator.h (gsi_replace): Return bool.
+ * tree-ssa-alias.c (ref_may_alias_global_p_1): New helper, code
+ moved from ref_may_alias_global_p.
+ (ref_may_alias_global_p, refs_may_alias_p, ref_maybe_used_by_stmt_p):
+ New overloads.
+ (ref_maybe_used_by_call_p): Take ao_ref* instead of tree.
+ (stmt_kills_ref_p_1): Rename...
+ (stmt_kills_ref_p): ... to this.
+ * tree-ssa-alias.h (ref_may_alias_global_p, ref_maybe_used_by_stmt_p,
+ stmt_kills_ref_p): Declare.
+ * tree-ssa-dse.c (dse_possible_dead_store_p): New argument, use it.
+ Move the self-assignment case...
+ (dse_optimize_stmt): ... here. Handle builtin calls. Remove dead code.
+
2014-08-21 David Malcolm <dmalcolm@redhat.com>
* rtl.h (try_split): Strengthen return type from rtx to rtx_insn *.
/* Replace the statement pointed-to by GSI to STMT. If UPDATE_EH_INFO
is true, the exception handling information of the original
statement is moved to the new statement. Assignments must only be
- replaced with assignments to the same LHS. */
+ replaced with assignments to the same LHS. Returns whether EH edge
+ cleanup is required. */
-void
+bool
gsi_replace (gimple_stmt_iterator *gsi, gimple stmt, bool update_eh_info)
{
gimple orig_stmt = gsi_stmt (*gsi);
+ bool require_eh_edge_purge = false;
if (stmt == orig_stmt)
- return;
+ return false;
gcc_assert (!gimple_has_lhs (orig_stmt) || !gimple_has_lhs (stmt)
|| gimple_get_lhs (orig_stmt) == gimple_get_lhs (stmt));
/* Preserve EH region information from the original statement, if
requested by the caller. */
if (update_eh_info)
- maybe_clean_or_replace_eh_stmt (orig_stmt, stmt);
+ require_eh_edge_purge = maybe_clean_or_replace_eh_stmt (orig_stmt, stmt);
gimple_duplicate_stmt_histograms (cfun, stmt, cfun, orig_stmt);
gsi_set_stmt (gsi, stmt);
gimple_set_modified (stmt, true);
update_modified_stmt (stmt);
+ return require_eh_edge_purge;
}
extern gimple_seq gsi_split_seq_after (gimple_stmt_iterator);
extern void gsi_set_stmt (gimple_stmt_iterator *, gimple);
extern void gsi_split_seq_before (gimple_stmt_iterator *, gimple_seq *);
-extern void gsi_replace (gimple_stmt_iterator *, gimple, bool);
+extern bool gsi_replace (gimple_stmt_iterator *, gimple, bool);
extern void gsi_replace_with_seq (gimple_stmt_iterator *, gimple_seq, bool);
extern void gsi_insert_before_without_update (gimple_stmt_iterator *, gimple,
enum gsi_iterator_update);
+2014-08-21 Marc Glisse <marc.glisse@inria.fr>
+
+ PR tree-optimization/62112
+ * gcc.dg/tree-ssa/pr62112-1.c: New file.
+ * gcc.dg/tree-ssa/pr62112-2.c: Likewise.
+ * gcc.c-torture/execute/pr35472.c: Add noclone attribute.
+ * gcc.c-torture/execute/20071219-1.c: Likewise.
+
2014-08-20 Bill Schmidt <wschmidt@linux.vnet.ibm.com>
* testsuite/gcc.target/powerpc/builtins-1.c: New test.
struct S *p;
-void __attribute__((noinline))
+void __attribute__((noinline,noclone))
foo (struct S *x, int set)
{
int i;
p = x;
}
-void __attribute__((noinline))
+void __attribute__((noinline,noclone))
test1 (void)
{
struct S a;
foo (&b, 0);
}
-void __attribute__((noinline))
+void __attribute__((noinline,noclone))
test2 (void)
{
struct S a;
foo (&b, 0);
}
-void __attribute__((noinline))
+void __attribute__((noinline,noclone))
test3 (void)
{
struct S a;
extern void *memset (void *s, int c, __SIZE_TYPE__ n);
struct S { int i[16]; };
struct S *p;
-void __attribute__((noinline))
+void __attribute__((noinline,noclone))
foo(struct S *a, struct S *b) { a->i[0] = -1; p = b; }
void test (void)
{
--- /dev/null
+/* { dg-do compile } */
+/* { dg-options "-O1 -fdump-tree-dse1-details" } */
+
+void f(){
+ char*p=__builtin_malloc(42);
+ __builtin_memset(p,3,10);
+ __builtin_memset(p,7,33);
+}
+char*g;
+void h(){
+ char*p=__builtin_malloc(42);
+ g=__builtin_memset(p,3,10);
+ __builtin_free(p);
+}
+char*i(){
+ char*p=__builtin_malloc(42);
+ __builtin_memset(p,3,10);
+ __builtin_memset(p,7,33);
+ return p;
+}
+
+/* { dg-final { scan-tree-dump-times "Deleted dead call" 4 "dse1" } } */
+/* { dg-final { cleanup-tree-dump "dse1" } } */
--- /dev/null
+/* { dg-do compile } */
+/* { dg-options "-O1 -fdump-tree-dse1-details" } */
+
+char*g;
+char* f(){
+ char*p=__builtin_malloc(42);
+ __builtin_memset(p,3,33);
+ __builtin_memset(p,7,10);
+ return p;
+}
+void h(){
+ char*p=__builtin_malloc(42);
+ g=__builtin_memset(p,3,10);
+}
+
+/* { dg-final { scan-tree-dump-not "Deleted dead" "dse1" } } */
+/* { dg-final { cleanup-tree-dump "dse1" } } */
return true;
}
-/* Return true whether REF may refer to global memory. */
+/* Returns whether reference REF to BASE may refer to global memory. */
-bool
-ref_may_alias_global_p (tree ref)
+static bool
+ref_may_alias_global_p_1 (tree base)
{
- tree base = get_base_address (ref);
if (DECL_P (base))
return is_global_var (base);
else if (TREE_CODE (base) == MEM_REF
return true;
}
+bool
+ref_may_alias_global_p (ao_ref *ref)
+{
+ tree base = ao_ref_base (ref);
+ return ref_may_alias_global_p_1 (base);
+}
+
+bool
+ref_may_alias_global_p (tree ref)
+{
+ tree base = get_base_address (ref);
+ return ref_may_alias_global_p_1 (base);
+}
+
/* Return true whether STMT may clobber global memory. */
bool
#endif
}
+static bool
+refs_may_alias_p (tree ref1, ao_ref *ref2)
+{
+ ao_ref r1;
+ ao_ref_init (&r1, ref1);
+ return refs_may_alias_p_1 (&r1, ref2, true);
+}
+
bool
refs_may_alias_p (tree ref1, tree ref2)
{
}
static bool
-ref_maybe_used_by_call_p (gimple call, tree ref)
+ref_maybe_used_by_call_p (gimple call, ao_ref *ref)
{
- ao_ref r;
bool res;
- ao_ref_init (&r, ref);
- res = ref_maybe_used_by_call_p_1 (call, &r);
+ res = ref_maybe_used_by_call_p_1 (call, ref);
if (res)
++alias_stats.ref_maybe_used_by_call_p_may_alias;
else
true, otherwise return false. */
bool
-ref_maybe_used_by_stmt_p (gimple stmt, tree ref)
+ref_maybe_used_by_stmt_p (gimple stmt, ao_ref *ref)
{
if (is_gimple_assign (stmt))
{
else if (gimple_code (stmt) == GIMPLE_RETURN)
{
tree retval = gimple_return_retval (stmt);
- tree base;
if (retval
&& TREE_CODE (retval) != SSA_NAME
&& !is_gimple_min_invariant (retval)
&& refs_may_alias_p (retval, ref))
return true;
/* If ref escapes the function then the return acts as a use. */
- base = get_base_address (ref);
+ tree base = ao_ref_base (ref);
if (!base)
;
else if (DECL_P (base))
return true;
}
+bool
+ref_maybe_used_by_stmt_p (gimple stmt, tree ref)
+{
+ ao_ref r;
+ ao_ref_init (&r, ref);
+ return ref_maybe_used_by_stmt_p (stmt, &r);
+}
+
/* If the call in statement CALL may clobber the memory reference REF
return true, otherwise return false. */
/* If STMT kills the memory reference REF return true, otherwise
return false. */
-static bool
-stmt_kills_ref_p_1 (gimple stmt, ao_ref *ref)
+bool
+stmt_kills_ref_p (gimple stmt, ao_ref *ref)
{
if (!ao_ref_base (ref))
return false;
{
ao_ref r;
ao_ref_init (&r, ref);
- return stmt_kills_ref_p_1 (stmt, &r);
+ return stmt_kills_ref_p (stmt, &r);
}
extern bool ptr_deref_may_alias_global_p (tree);
extern bool ptr_derefs_may_alias_p (tree, tree);
extern bool ref_may_alias_global_p (tree);
+extern bool ref_may_alias_global_p (ao_ref *);
extern bool refs_may_alias_p (tree, tree);
extern bool refs_may_alias_p_1 (ao_ref *, ao_ref *, bool);
extern bool refs_anti_dependent_p (tree, tree);
extern bool refs_output_dependent_p (tree, tree);
extern bool ref_maybe_used_by_stmt_p (gimple, tree);
+extern bool ref_maybe_used_by_stmt_p (gimple, ao_ref *);
extern bool stmt_may_clobber_global_p (gimple);
extern bool stmt_may_clobber_ref_p (gimple, tree);
extern bool stmt_may_clobber_ref_p_1 (gimple, ao_ref *);
extern bool call_may_clobber_ref_p (gimple, tree);
extern bool call_may_clobber_ref_p_1 (gimple, ao_ref *);
extern bool stmt_kills_ref_p (gimple, tree);
+extern bool stmt_kills_ref_p (gimple, ao_ref *);
extern tree get_continuation_for_phi (gimple, ao_ref *,
unsigned int *, bitmap *, bool,
void *(*)(ao_ref *, tree, void *, bool),
/* A helper of dse_optimize_stmt.
- Given a GIMPLE_ASSIGN in STMT, find a candidate statement *USE_STMT that
- may prove STMT to be dead.
+ Given a GIMPLE_ASSIGN in STMT that writes to REF, find a candidate
+ statement *USE_STMT that may prove STMT to be dead.
Return TRUE if the above conditions are met, otherwise FALSE. */
static bool
-dse_possible_dead_store_p (gimple stmt, gimple *use_stmt)
+dse_possible_dead_store_p (ao_ref *ref, gimple stmt, gimple *use_stmt)
{
gimple temp;
unsigned cnt = 0;
*use_stmt = NULL;
- /* Self-assignments are zombies. */
- if (operand_equal_p (gimple_assign_rhs1 (stmt), gimple_assign_lhs (stmt), 0))
- {
- *use_stmt = stmt;
- return true;
- }
-
/* Find the first dominated statement that clobbers (part of) the
memory stmt stores to with no intermediate statement that may use
part of the memory stmt stores. That is, find a store that may
temp = use_stmt;
}
/* If the statement is a use the store is not dead. */
- else if (ref_maybe_used_by_stmt_p (use_stmt,
- gimple_assign_lhs (stmt)))
+ else if (ref_maybe_used_by_stmt_p (use_stmt, ref))
{
fail = true;
BREAK_FROM_IMM_USE_STMT (ui);
just pretend the stmt makes itself dead. Otherwise fail. */
if (!temp)
{
- if (stmt_may_clobber_global_p (stmt))
+ if (ref_may_alias_global_p (ref))
return false;
temp = stmt;
}
}
/* Continue walking until we reach a kill. */
- while (!stmt_kills_ref_p (temp, gimple_assign_lhs (stmt)));
+ while (!stmt_kills_ref_p (temp, ref));
*use_stmt = temp;
if (!gimple_vdef (stmt))
return;
- /* We know we have virtual definitions. If this is a GIMPLE_ASSIGN
- that's not also a function call, then record it into our table. */
- if (is_gimple_call (stmt) && gimple_call_fndecl (stmt))
- return;
-
/* Don't return early on *this_2(D) ={v} {CLOBBER}. */
if (gimple_has_volatile_ops (stmt)
&& (!gimple_clobber_p (stmt)
|| TREE_CODE (gimple_assign_lhs (stmt)) != MEM_REF))
return;
+ /* We know we have virtual definitions. We can handle assignments and
+ some builtin calls. */
+ if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
+ {
+ switch (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt)))
+ {
+ case BUILT_IN_MEMCPY:
+ case BUILT_IN_MEMMOVE:
+ case BUILT_IN_MEMSET:
+ {
+ gimple use_stmt;
+ ao_ref ref;
+ tree size = NULL_TREE;
+ if (gimple_call_num_args (stmt) == 3)
+ size = gimple_call_arg (stmt, 2);
+ tree ptr = gimple_call_arg (stmt, 0);
+ ao_ref_init_from_ptr_and_size (&ref, ptr, size);
+ if (!dse_possible_dead_store_p (&ref, stmt, &use_stmt))
+ return;
+
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ {
+ fprintf (dump_file, " Deleted dead call '");
+ print_gimple_stmt (dump_file, gsi_stmt (*gsi), dump_flags, 0);
+ fprintf (dump_file, "'\n");
+ }
+
+ tree lhs = gimple_call_lhs (stmt);
+ if (lhs)
+ {
+ gimple new_stmt = gimple_build_assign (lhs, ptr);
+ unlink_stmt_vdef (stmt);
+ if (gsi_replace (gsi, new_stmt, true))
+ bitmap_set_bit (need_eh_cleanup, gimple_bb (stmt)->index);
+ }
+ else
+ {
+ /* Then we need to fix the operand of the consuming stmt. */
+ unlink_stmt_vdef (stmt);
+
+ /* Remove the dead store. */
+ if (gsi_remove (gsi, true))
+ bitmap_set_bit (need_eh_cleanup, gimple_bb (stmt)->index);
+ }
+ break;
+ }
+ default:
+ return;
+ }
+ }
+
if (is_gimple_assign (stmt))
{
gimple use_stmt;
- if (!dse_possible_dead_store_p (stmt, &use_stmt))
- return;
+ /* Self-assignments are zombies. */
+ if (operand_equal_p (gimple_assign_rhs1 (stmt),
+ gimple_assign_lhs (stmt), 0))
+ use_stmt = stmt;
+ else
+ {
+ ao_ref ref;
+ ao_ref_init (&ref, gimple_assign_lhs (stmt));
+ if (!dse_possible_dead_store_p (&ref, stmt, &use_stmt))
+ return;
+ }
/* Now we know that use_stmt kills the LHS of stmt. */
&& !gimple_clobber_p (use_stmt))
return;
- basic_block bb;
-
- /* If use_stmt is or might be a nop assignment, e.g. for
- struct { ... } S a, b, *p; ...
- b = a; b = b;
- or
- b = a; b = *p; where p might be &b,
- or
- *p = a; *p = b; where p might be &b,
- or
- *p = *u; *p = *v; where p might be v, then USE_STMT
- acts as a use as well as definition, so store in STMT
- is not dead. */
- if (stmt != use_stmt
- && ref_maybe_used_by_stmt_p (use_stmt, gimple_assign_lhs (stmt)))
- return;
-
if (dump_file && (dump_flags & TDF_DETAILS))
{
fprintf (dump_file, " Deleted dead store '");
unlink_stmt_vdef (stmt);
/* Remove the dead store. */
- bb = gimple_bb (stmt);
+ basic_block bb = gimple_bb (stmt);
if (gsi_remove (gsi, true))
bitmap_set_bit (need_eh_cleanup, bb->index);