/* Lower complex number operations to scalar operations.
- Copyright (C) 2004-2018 Free Software Foundation, Inc.
+ Copyright (C) 2004-2021 Free Software Foundation, Inc.
This file is part of GCC.
non-SSA_NAME/non-invariant args that need to be replaced by SSA_NAMEs. */
static vec<gphi *> phis_to_revisit;
+/* BBs that need EH cleanup. */
+static bitmap need_eh_cleanup;
+
/* Lookup UID in the complex_variable_components hashtable and return the
associated tree. */
static tree
lhs = gimple_get_lhs (stmt);
/* Skip anything but GIMPLE_ASSIGN and GIMPLE_CALL with a lhs. */
- if (!lhs)
+ if (!lhs || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
return SSA_PROP_VARYING;
/* These conditions should be satisfied due to the initial filter
set up in init_dont_simulate_again. */
gcc_assert (TREE_CODE (TREE_TYPE (lhs)) == COMPLEX_TYPE);
+ if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
+ return SSA_PROP_VARYING;
+
/* We've set up the lattice values such that IOR neatly models PHI meet. */
new_l = UNINITIALIZED;
for (i = gimple_phi_num_args (phi) - 1; i >= 0; --i)
{
/* Replace an anonymous base value with the variable from cvc_lookup.
This should result in better debug info. */
- if (SSA_NAME_VAR (ssa_name)
+ if (!SSA_NAME_IS_DEFAULT_DEF (value)
+ && SSA_NAME_VAR (ssa_name)
&& (!SSA_NAME_VAR (value) || DECL_IGNORED_P (SSA_NAME_VAR (value)))
&& !DECL_IGNORED_P (SSA_NAME_VAR (ssa_name)))
{
static void
update_complex_assignment (gimple_stmt_iterator *gsi, tree r, tree i)
{
- gimple *stmt;
-
+ gimple *old_stmt = gsi_stmt (*gsi);
gimple_assign_set_rhs_with_ops (gsi, COMPLEX_EXPR, r, i);
- stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
update_stmt (stmt);
- if (maybe_clean_eh_stmt (stmt))
- gimple_purge_dead_eh_edges (gimple_bb (stmt));
+ if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt))
+ bitmap_set_bit (need_eh_cleanup, gimple_bb (stmt)->index);
- if (gimple_in_ssa_p (cfun))
- update_complex_components (gsi, gsi_stmt (*gsi), r, i);
+ update_complex_components (gsi, gsi_stmt (*gsi), r, i);
}
else
gcc_unreachable ();
fn = builtin_decl_explicit (bcode);
-
stmt = gimple_build_call (fn, 4, ar, ai, br, bi);
-
if (inplace_p)
{
gimple *old_stmt = gsi_stmt (*gsi);
+ gimple_call_set_nothrow (stmt, !stmt_could_throw_p (cfun, old_stmt));
lhs = gimple_assign_lhs (old_stmt);
gimple_call_set_lhs (stmt, lhs);
- update_stmt (stmt);
- gsi_replace (gsi, stmt, false);
-
- if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt))
- gimple_purge_dead_eh_edges (gsi_bb (*gsi));
+ gsi_replace (gsi, stmt, true);
type = TREE_TYPE (type);
- update_complex_components (gsi, stmt,
- build1 (REALPART_EXPR, type, lhs),
- build1 (IMAGPART_EXPR, type, lhs));
+ if (stmt_can_throw_internal (cfun, stmt))
+ {
+ edge_iterator ei;
+ edge e;
+ FOR_EACH_EDGE (e, ei, gimple_bb (stmt)->succs)
+ if (!(e->flags & EDGE_EH))
+ break;
+ basic_block bb = split_edge (e);
+ gimple_stmt_iterator gsi2 = gsi_start_bb (bb);
+ update_complex_components (&gsi2, stmt,
+ build1 (REALPART_EXPR, type, lhs),
+ build1 (IMAGPART_EXPR, type, lhs));
+ return NULL_TREE;
+ }
+ else
+ update_complex_components (gsi, stmt,
+ build1 (REALPART_EXPR, type, lhs),
+ build1 (IMAGPART_EXPR, type, lhs));
SSA_NAME_DEF_STMT (lhs) = stmt;
return NULL_TREE;
}
- lhs = create_tmp_var (type);
+ gimple_call_set_nothrow (stmt, true);
+ lhs = make_ssa_name (type);
gimple_call_set_lhs (stmt, lhs);
-
- lhs = make_ssa_name (lhs, stmt);
- gimple_call_set_lhs (stmt, lhs);
-
- update_stmt (stmt);
gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
+
return lhs;
}
/* If optimizing for size or not at all just do a libcall.
Same if there are exception-handling edges or signaling NaNs. */
if (optimize == 0 || optimize_bb_for_size_p (gsi_bb (*gsi))
- || stmt_can_throw_internal (gsi_stmt (*gsi))
+ || stmt_can_throw_internal (cfun, gsi_stmt (*gsi))
|| flag_signaling_nans)
{
expand_complex_libcall (gsi, type, ar, ai, br, bi,
return;
}
+ if (!HONOR_NANS (inner_type))
+ {
+ /* If we are not worrying about NaNs expand to
+ (ar*br - ai*bi) + i(ar*bi + br*ai) directly. */
+ expand_complex_multiplication_components (gsi, inner_type,
+ ar, ai, br, bi,
+ &rr, &ri);
+ break;
+ }
+
/* Else, expand x = a * b into
x = (ar*br - ai*bi) + i(ar*bi + br*ai);
if (isunordered (__real__ x, __imag__ x))
tree tmpr, tmpi;
expand_complex_multiplication_components (gsi, inner_type, ar, ai,
- br, bi, &tmpr, &tmpi);
+ br, bi, &tmpr, &tmpi);
gimple *check
= gimple_build_cond (UNORDERED_EXPR, tmpr, tmpi,
= insert_cond_bb (gsi_bb (*gsi), gsi_stmt (*gsi), check,
profile_probability::very_unlikely ());
-
gimple_stmt_iterator cond_bb_gsi = gsi_last_bb (cond_bb);
gsi_insert_after (&cond_bb_gsi, gimple_build_nop (), GSI_NEW_STMT);
tree libcall_res
= expand_complex_libcall (&cond_bb_gsi, type, ar, ai, br,
- bi, MULT_EXPR, false);
+ bi, MULT_EXPR, false);
tree cond_real = gimplify_build1 (&cond_bb_gsi, REALPART_EXPR,
inner_type, libcall_res);
tree cond_imag = gimplify_build1 (&cond_bb_gsi, IMAGPART_EXPR,
edge orig_to_join = find_edge (orig_bb, join_bb);
gphi *real_phi = create_phi_node (rr, gsi_bb (*gsi));
- add_phi_arg (real_phi, cond_real, cond_to_join,
- UNKNOWN_LOCATION);
+ add_phi_arg (real_phi, cond_real, cond_to_join, UNKNOWN_LOCATION);
add_phi_arg (real_phi, tmpr, orig_to_join, UNKNOWN_LOCATION);
gphi *imag_phi = create_phi_node (ri, gsi_bb (*gsi));
- add_phi_arg (imag_phi, cond_imag, cond_to_join,
- UNKNOWN_LOCATION);
+ add_phi_arg (imag_phi, cond_imag, cond_to_join, UNKNOWN_LOCATION);
add_phi_arg (imag_phi, tmpi, orig_to_join, UNKNOWN_LOCATION);
}
else
/* If we are not worrying about NaNs expand to
(ar*br - ai*bi) + i(ar*bi + br*ai) directly. */
expand_complex_multiplication_components (gsi, inner_type, ar, ai,
- br, bi, &rr, &ri);
+ br, bi, &rr, &ri);
break;
default:
gimple *stmt;
tree cond, tmp;
- tmp = create_tmp_var (boolean_type_node);
+ tmp = make_ssa_name (boolean_type_node);
stmt = gimple_build_assign (tmp, compare);
- if (gimple_in_ssa_p (cfun))
- {
- tmp = make_ssa_name (tmp, stmt);
- gimple_assign_set_lhs (stmt, tmp);
- }
-
gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
cond = fold_build2_loc (gimple_location (stmt),
}
update_stmt (stmt);
+ if (maybe_clean_eh_stmt (stmt))
+ bitmap_set_bit (need_eh_cleanup, gimple_bb (stmt)->index);
}
/* Expand inline asm that sets some complex SSA_NAMEs. */
ac = gimple_assign_rhs1 (stmt);
bc = (gimple_num_ops (stmt) > 2) ? gimple_assign_rhs2 (stmt) : NULL;
}
- /* GIMPLE_CALL can not get here. */
+ /* GIMPLE_CALL cannot get here. */
else
{
ac = gimple_cond_lhs (stmt);
else
br = bi = NULL_TREE;
- if (gimple_in_ssa_p (cfun))
+ al = find_lattice_value (ac);
+ if (al == UNINITIALIZED)
+ al = VARYING;
+
+ if (TREE_CODE_CLASS (code) == tcc_unary)
+ bl = UNINITIALIZED;
+ else if (ac == bc)
+ bl = al;
+ else
{
- al = find_lattice_value (ac);
- if (al == UNINITIALIZED)
- al = VARYING;
-
- if (TREE_CODE_CLASS (code) == tcc_unary)
- bl = UNINITIALIZED;
- else if (ac == bc)
- bl = al;
- else
- {
- bl = find_lattice_value (bc);
- if (bl == UNINITIALIZED)
- bl = VARYING;
- }
+ bl = find_lattice_value (bc);
+ if (bl == UNINITIALIZED)
+ bl = VARYING;
}
- else
- al = bl = VARYING;
switch (code)
{
return 0;
complex_lattice_values.create (num_ssa_names);
- complex_lattice_values.safe_grow_cleared (num_ssa_names);
+ complex_lattice_values.safe_grow_cleared (num_ssa_names, true);
init_parameter_lattice_values ();
class complex_propagate complex_propagate;
complex_propagate.ssa_propagate ();
+ need_eh_cleanup = BITMAP_ALLOC (NULL);
+
complex_variable_components = new int_tree_htab_type (10);
complex_ssa_name_components.create (2 * num_ssa_names);
- complex_ssa_name_components.safe_grow_cleared (2 * num_ssa_names);
+ complex_ssa_name_components.safe_grow_cleared (2 * num_ssa_names, true);
update_parameter_components ();
for (i = 0; i < n_bbs; i++)
{
bb = BASIC_BLOCK_FOR_FN (cfun, rpo[i]);
+ if (!bb)
+ continue;
update_phi_components (bb);
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
expand_complex_operations_1 (&gsi);
gsi_commit_edge_inserts ();
+ unsigned todo
+ = gimple_purge_all_dead_eh_edges (need_eh_cleanup) ? TODO_cleanup_cfg : 0;
+ BITMAP_FREE (need_eh_cleanup);
+
delete complex_variable_components;
complex_variable_components = NULL;
complex_ssa_name_components.release ();
complex_lattice_values.release ();
- return 0;
+ return todo;
}
namespace {