--- /dev/null
+! { dg-do compile }
+! { dg-options "-O3 -ffast-math" }
+
+! This tests only for compile-time failure, which formerly occurred
+! when statements were emitted out of order, failing verify_ssa.
+
+MODULE xc_cs1
+ INTEGER, PARAMETER :: dp=KIND(0.0D0)
+ REAL(KIND=dp), PARAMETER :: a = 0.04918_dp, &
+ c = 0.2533_dp, &
+ d = 0.349_dp
+CONTAINS
+ SUBROUTINE cs1_u_2 ( rho, grho, r13, e_rho_rho, e_rho_ndrho, e_ndrho_ndrho,&
+ npoints, error)
+ REAL(KIND=dp), DIMENSION(*), &
+ INTENT(INOUT) :: e_rho_rho, e_rho_ndrho, &
+ e_ndrho_ndrho
+ DO ip = 1, npoints
+ IF ( rho(ip) > eps_rho ) THEN
+ oc = 1.0_dp/(r*r*r3*r3 + c*g*g)
+ d2rF4 = c4p*f13*f23*g**4*r3/r * (193*d*r**5*r3*r3+90*d*d*r**5*r3 &
+ -88*g*g*c*r**3*r3-100*d*d*c*g*g*r*r*r3*r3 &
+ +104*r**6)*od**3*oc**4
+ e_rho_rho(ip) = e_rho_rho(ip) + d2F1 + d2rF2 + d2F3 + d2rF4
+ END IF
+ END DO
+ END SUBROUTINE cs1_u_2
+END MODULE xc_cs1
/* Operand->rank hashtable. */
static struct pointer_map_t *operand_rank;
+/* Map from inserted __builtin_powi calls to multiply chains that
+ feed them. */
+static struct pointer_map_t *bip_map;
+
/* Forward decls. */
static long get_rank (tree);
static void
possibly_move_powi (gimple stmt, tree op)
{
- gimple stmt2;
+ gimple stmt2, *mpy;
tree fndecl;
gimple_stmt_iterator gsi1, gsi2;
return;
}
+ /* Move the __builtin_powi. */
gsi1 = gsi_for_stmt (stmt);
gsi2 = gsi_for_stmt (stmt2);
gsi_move_before (&gsi2, &gsi1);
+
+ /* See if there are multiplies feeding the __builtin_powi base
+ argument that must also be moved. */
+ while ((mpy = (gimple *) pointer_map_contains (bip_map, stmt2)) != NULL)
+ {
+ /* If we've already moved this statement, we're done. This is
+ identified by a NULL entry for the statement in bip_map. */
+ gimple *next = (gimple *) pointer_map_contains (bip_map, *mpy);
+ if (next && !*next)
+ return;
+
+ stmt = stmt2;
+ stmt2 = *mpy;
+ gsi1 = gsi_for_stmt (stmt);
+ gsi2 = gsi_for_stmt (stmt2);
+ gsi_move_before (&gsi2, &gsi1);
+
+ /* The moved multiply may be DAG'd from multiple calls if it
+ was the result of a cached multiply. Only move it once.
+ Rank order ensures we move it to the right place the first
+ time. */
+ if (next)
+ *next = NULL;
+ else
+ {
+ next = (gimple *) pointer_map_insert (bip_map, *mpy);
+ *next = NULL;
+ }
+ }
}
/* This function checks three consequtive operands in
while (true)
{
HOST_WIDE_INT power;
+ gimple last_mul = NULL;
/* First look for the largest cached product of factors from
preceding iterations. If found, create a builtin_powi for
}
else
{
+ gimple *value;
+
iter_result = get_reassoc_pow_ssa_name (target, type);
pow_stmt = gimple_build_call (powi_fndecl, 2, rf1->repr,
build_int_cst (integer_type_node,
power));
gimple_call_set_lhs (pow_stmt, iter_result);
gimple_set_location (pow_stmt, gimple_location (stmt));
- /* Temporarily place the call; we will move it to the
- correct place during rewrite_expr. */
+ /* Temporarily place the call; we will move it and its
+ feeding multiplies to the correct place during
+ rewrite_expr. */
gsi_insert_before (&gsi, pow_stmt, GSI_SAME_STMT);
+ if (!operand_equal_p (rf1->repr, rf1->factor, 0))
+ {
+ value = (gimple *) pointer_map_insert (bip_map, pow_stmt);
+ *value = SSA_NAME_DEF_STMT (rf1->repr);
+ }
+
if (dump_file && (dump_flags & TDF_DETAILS))
{
unsigned elt;
gsi_insert_before (&gsi, mul_stmt, GSI_SAME_STMT);
rf1->repr = target_ssa;
+ /* Chain multiplies together for later movement. */
+ if (last_mul)
+ {
+ gimple *value
+ = (gimple *) pointer_map_insert (bip_map, mul_stmt);
+ *value = last_mul;
+ }
+ last_mul = mul_stmt;
+
/* Don't reprocess the multiply we just introduced. */
gimple_set_visited (mul_stmt, true);
}
gimple_call_set_lhs (pow_stmt, iter_result);
gimple_set_location (pow_stmt, gimple_location (stmt));
gsi_insert_before (&gsi, pow_stmt, GSI_SAME_STMT);
+
+ /* If we inserted a chain of multiplies before the pow_stmt,
+ record that fact so we can move it later when we move the
+ pow_stmt. */
+ if (last_mul)
+ {
+ gimple *value = (gimple *) pointer_map_insert (bip_map, pow_stmt);
+ *value = last_mul;
+ }
}
/* Append the result of this iteration to the ops vector. */
if (associative_tree_code (rhs_code))
{
VEC(operand_entry_t, heap) *ops = NULL;
+ bip_map = pointer_map_create ();
/* There may be no immediate uses left by the time we
get here because we may have eliminated them all. */
}
VEC_free (operand_entry_t, heap, ops);
+ pointer_map_destroy (bip_map);
}
}
}