--- /dev/null
+/* { dg-do compile { target { aarch64*-*-* } } } */
+/* { dg-options "-march=armv8-a+sve -Ofast" } */
+
+double MADPictureC1;
+extern int PictureRejected[];
+int PictureMAD_0, MADModelEstimator_n_windowSize_i, MADModelEstimator_n_windowSize_oneSampleQ;
+
+void MADModelEstimator_n_windowSize() {
+ int estimateX2 = 0;
+ for (; MADModelEstimator_n_windowSize_i; MADModelEstimator_n_windowSize_i++) {
+ if (MADModelEstimator_n_windowSize_oneSampleQ &&
+ !PictureRejected[MADModelEstimator_n_windowSize_i])
+ estimateX2 = 1;
+ if (!PictureRejected[MADModelEstimator_n_windowSize_i])
+ MADPictureC1 += PictureMAD_0;
+ }
+ if (estimateX2)
+ for (;;)
+ ;
+}
op0 = ops[1 - reduc_index];
else
{
- op0 = ops[2];
+ op0 = ops[2 + (1 - reduc_index)];
opmask = ops[0];
gcc_assert (!slp_node);
}
gcc_assert (code == IFN_COND_ADD || code == IFN_COND_SUB
|| code == IFN_COND_MUL || code == IFN_COND_AND
|| code == IFN_COND_IOR || code == IFN_COND_XOR);
- gcc_assert (op.num_ops == 4 && (op.ops[1] == op.ops[3]));
+ gcc_assert (op.num_ops == 4
+ && (op.ops[reduc_index]
+ == op.ops[internal_fn_else_index ((internal_fn) code)]));
}
bool masked_loop_p = LOOP_VINFO_FULLY_MASKED_P (loop_vinfo);
{
/* For a conditional operation pass the truth type as mask
vectype. */
- gcc_assert (single_defuse_cycle && reduc_index == 1);
+ gcc_assert (single_defuse_cycle
+ && (reduc_index == 1 || reduc_index == 2));
vect_get_vec_defs (loop_vinfo, stmt_info, slp_node, ncopies,
- op.ops[0], &vec_oprnds0,
- truth_type_for (vectype_in),
- NULL_TREE, &vec_oprnds1, NULL_TREE,
- op.ops[2], &vec_oprnds2, NULL_TREE);
+ op.ops[0], &vec_oprnds0, truth_type_for (vectype_in),
+ reduc_index == 1 ? NULL_TREE : op.ops[1],
+ &vec_oprnds1, NULL_TREE,
+ reduc_index == 2 ? NULL_TREE : op.ops[2],
+ &vec_oprnds2, NULL_TREE);
}
/* For single def-use cycles get one copy of the vectorized reduction