--- /dev/null
+/* { dg-do compile } */
+/* { dg-require-effective-target vect_simd_clones } */
+/* { dg-additional-options "-fopenmp-simd" } */
+
+#pragma omp declare simd simdlen(4) inbranch
+__attribute__((noinline)) int
+foo (int a, int b)
+{
+ return a + b;
+}
+
+/* { dg-final { scan-tree-dump-times "vectorized 1 loops" 4 "vect" { target i?86-*-* x86_64-*-* } } } */
+/* if-conversion shouldn't need to resort to masked stores for the result
+ array created by OMP lowering since that's automatic and does not have
+ its address taken. */
+/* { dg-final { scan-tree-dump-not "MASK_STORE" "vect" } } */
/* an unconditionaly write won't trap if the base is written
to unconditionally. */
- if (base_master_dr
- && DR_BASE_W_UNCONDITIONALLY (*base_master_dr))
- return flag_store_data_races;
- /* or the base is known to be not readonly. */
- else if (base_object_writable (DR_REF (a)))
- return flag_store_data_races;
+ if ((base_master_dr
+ && DR_BASE_W_UNCONDITIONALLY (*base_master_dr))
+ /* or the base is known to be not readonly. */
+ || base_object_writable (DR_REF (a)))
+ return !ref_can_have_store_data_races (base);
}
return false;
return stmt_kills_ref_p (stmt, &r);
}
+/* Return whether REF can be subject to store data races. */
+
+bool
+ref_can_have_store_data_races (tree ref)
+{
+ /* With -fallow-store-data-races do not care about them. */
+ if (flag_store_data_races)
+ return false;
+
+ tree base = get_base_address (ref);
+ if (auto_var_p (base)
+ && ! may_be_aliased (base))
+ /* Automatic variables not aliased are not subject to
+ data races. */
+ return false;
+
+ return true;
+}
+
/* Walk the virtual use-def chain of VUSE until hitting the virtual operand
TARGET or a statement clobbering the memory reference REF in which
extern bool call_may_clobber_ref_p_1 (gcall *, ao_ref *, bool = true);
extern bool stmt_kills_ref_p (gimple *, tree);
extern bool stmt_kills_ref_p (gimple *, ao_ref *);
+extern bool ref_can_have_store_data_races (tree);
+
enum translate_flags
{ TR_TRANSLATE, TR_VALUEIZE_AND_DISAMBIGUATE, TR_DISAMBIGUATE };
extern tree get_continuation_for_phi (gimple *, ao_ref *, bool,
bool always_stored = ref_always_accessed_p (loop, ref, true);
if (maybe_mt
&& (bb_in_transaction (loop_preheader_edge (loop)->src)
- || (! flag_store_data_races && ! always_stored)))
+ || (ref_can_have_store_data_races (ref->mem.ref) && ! always_stored)))
multi_threaded_model_p = true;
if (multi_threaded_model_p && !use_other_flag_var)
/* If LHS is an access to a local variable without address-taken
(or when we allow data races) and known not to trap, we could
always safely move down the store. */
- tree base = get_base_address (lhs);
- if (!auto_var_p (base)
- || (TREE_ADDRESSABLE (base) && !flag_store_data_races)
+ if (ref_can_have_store_data_races (lhs)
|| tree_could_trap_p (lhs))
return false;
}