/* Preamble and helpers for the autogenerated gimple-match.c file.
- Copyright (C) 2014-2016 Free Software Foundation, Inc.
+ Copyright (C) 2014-2021 Free Software Foundation, Inc.
This file is part of GCC.
#include "gimple.h"
#include "ssa.h"
#include "cgraph.h"
+#include "vec-perm-indices.h"
#include "fold-const.h"
#include "fold-const-call.h"
#include "stor-layout.h"
#include "internal-fn.h"
#include "case-cfn-macros.h"
#include "gimplify.h"
-
+#include "optabs-tree.h"
+#include "tree-eh.h"
+#include "dbgcnt.h"
+#include "tm.h"
+#include "gimple-range.h"
/* Forward declarations of the private auto-generated matchers.
They expect valueized operands in canonical order and do not
perform simplification of all-constant operands. */
-static bool gimple_simplify (code_helper *, tree *,
- gimple_seq *, tree (*)(tree),
+static bool gimple_simplify (gimple_match_op *, gimple_seq *, tree (*)(tree),
code_helper, tree, tree);
-static bool gimple_simplify (code_helper *, tree *,
- gimple_seq *, tree (*)(tree),
+static bool gimple_simplify (gimple_match_op *, gimple_seq *, tree (*)(tree),
code_helper, tree, tree, tree);
-static bool gimple_simplify (code_helper *, tree *,
- gimple_seq *, tree (*)(tree),
+static bool gimple_simplify (gimple_match_op *, gimple_seq *, tree (*)(tree),
code_helper, tree, tree, tree, tree);
-
+static bool gimple_simplify (gimple_match_op *, gimple_seq *, tree (*)(tree),
+ code_helper, tree, tree, tree, tree, tree);
+static bool gimple_simplify (gimple_match_op *, gimple_seq *, tree (*)(tree),
+ code_helper, tree, tree, tree, tree, tree, tree);
+static bool gimple_resimplify1 (gimple_seq *, gimple_match_op *,
+ tree (*)(tree));
+static bool gimple_resimplify2 (gimple_seq *, gimple_match_op *,
+ tree (*)(tree));
+static bool gimple_resimplify3 (gimple_seq *, gimple_match_op *,
+ tree (*)(tree));
+static bool gimple_resimplify4 (gimple_seq *, gimple_match_op *,
+ tree (*)(tree));
+static bool gimple_resimplify5 (gimple_seq *, gimple_match_op *,
+ tree (*)(tree));
+
+const unsigned int gimple_match_op::MAX_NUM_OPS;
/* Return whether T is a constant that we'll dispatch to fold to
evaluate fully constant expressions. */
&& TREE_CODE (TREE_OPERAND (t, 0)) == STRING_CST));
}
+/* Try to convert conditional operation ORIG_OP into an IFN_COND_*
+ operation. Return true on success, storing the new operation in NEW_OP. */
+
+static bool
+convert_conditional_op (gimple_match_op *orig_op,
+ gimple_match_op *new_op)
+{
+ internal_fn ifn;
+ if (orig_op->code.is_tree_code ())
+ ifn = get_conditional_internal_fn ((tree_code) orig_op->code);
+ else
+ {
+ combined_fn cfn = orig_op->code;
+ if (!internal_fn_p (cfn))
+ return false;
+ ifn = get_conditional_internal_fn (as_internal_fn (cfn));
+ }
+ if (ifn == IFN_LAST)
+ return false;
+ unsigned int num_ops = orig_op->num_ops;
+ new_op->set_op (as_combined_fn (ifn), orig_op->type, num_ops + 2);
+ new_op->ops[0] = orig_op->cond.cond;
+ for (unsigned int i = 0; i < num_ops; ++i)
+ new_op->ops[i + 1] = orig_op->ops[i];
+ tree else_value = orig_op->cond.else_value;
+ if (!else_value)
+ else_value = targetm.preferred_else_value (ifn, orig_op->type,
+ num_ops, orig_op->ops);
+ new_op->ops[num_ops + 1] = else_value;
+ return true;
+}
+
+/* RES_OP is the result of a simplification. If it is conditional,
+ try to replace it with the equivalent UNCOND form, such as an
+ IFN_COND_* call or a VEC_COND_EXPR. Also try to resimplify the
+ result of the replacement if appropriate, adding any new statements to
+ SEQ and using VALUEIZE as the valueization function. Return true if
+ this resimplification occurred and resulted in at least one change. */
+
+static bool
+maybe_resimplify_conditional_op (gimple_seq *seq, gimple_match_op *res_op,
+ tree (*valueize) (tree))
+{
+ if (!res_op->cond.cond)
+ return false;
+
+ if (!res_op->cond.else_value
+ && res_op->code.is_tree_code ())
+ {
+ /* The "else" value doesn't matter. If the "then" value is a
+ gimple value, just use it unconditionally. This isn't a
+ simplification in itself, since there was no operation to
+ build in the first place. */
+ if (gimple_simplified_result_is_gimple_val (res_op))
+ {
+ res_op->cond.cond = NULL_TREE;
+ return false;
+ }
+
+ /* Likewise if the operation would not trap. */
+ bool honor_trapv = (INTEGRAL_TYPE_P (res_op->type)
+ && TYPE_OVERFLOW_TRAPS (res_op->type));
+ tree_code op_code = (tree_code) res_op->code;
+ bool op_could_trap;
+
+ /* COND_EXPR will trap if, and only if, the condition
+ traps and hence we have to check this. For all other operations, we
+ don't need to consider the operands. */
+ if (op_code == COND_EXPR)
+ op_could_trap = generic_expr_could_trap_p (res_op->ops[0]);
+ else
+ op_could_trap = operation_could_trap_p ((tree_code) res_op->code,
+ FLOAT_TYPE_P (res_op->type),
+ honor_trapv,
+ res_op->op_or_null (1));
+
+ if (!op_could_trap)
+ {
+ res_op->cond.cond = NULL_TREE;
+ return false;
+ }
+ }
+
+ /* If the "then" value is a gimple value and the "else" value matters,
+ create a VEC_COND_EXPR between them, then see if it can be further
+ simplified. */
+ gimple_match_op new_op;
+ if (res_op->cond.else_value
+ && VECTOR_TYPE_P (res_op->type)
+ && gimple_simplified_result_is_gimple_val (res_op))
+ {
+ new_op.set_op (VEC_COND_EXPR, res_op->type,
+ res_op->cond.cond, res_op->ops[0],
+ res_op->cond.else_value);
+ *res_op = new_op;
+ return gimple_resimplify3 (seq, res_op, valueize);
+ }
+
+ /* Otherwise try rewriting the operation as an IFN_COND_* call.
+ Again, this isn't a simplification in itself, since it's what
+ RES_OP already described. */
+ if (convert_conditional_op (res_op, &new_op))
+ *res_op = new_op;
+
+ return false;
+}
/* Helper that matches and simplifies the toplevel result from
a gimple_simplify run (where we don't want to build
a stmt in case it's used in in-place folding). Replaces
- *RES_CODE and *RES_OPS with a simplified and/or canonicalized
- result and returns whether any change was made. */
+ RES_OP with a simplified and/or canonicalized result and
+ returns whether any change was made. */
-bool
-gimple_resimplify1 (gimple_seq *seq,
- code_helper *res_code, tree type, tree *res_ops,
+static bool
+gimple_resimplify1 (gimple_seq *seq, gimple_match_op *res_op,
tree (*valueize)(tree))
{
- if (constant_for_folding (res_ops[0]))
+ if (constant_for_folding (res_op->ops[0]))
{
tree tem = NULL_TREE;
- if (res_code->is_tree_code ())
- tem = const_unop (*res_code, type, res_ops[0]);
+ if (res_op->code.is_tree_code ())
+ {
+ tree_code code = res_op->code;
+ if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))
+ && TREE_CODE_LENGTH (code) == 1)
+ tem = const_unop (res_op->code, res_op->type, res_op->ops[0]);
+ }
else
- tem = fold_const_call (combined_fn (*res_code), type, res_ops[0]);
+ tem = fold_const_call (combined_fn (res_op->code), res_op->type,
+ res_op->ops[0]);
if (tem != NULL_TREE
&& CONSTANT_CLASS_P (tem))
{
- res_ops[0] = tem;
- res_ops[1] = NULL_TREE;
- res_ops[2] = NULL_TREE;
- *res_code = TREE_CODE (res_ops[0]);
+ if (TREE_OVERFLOW_P (tem))
+ tem = drop_tree_overflow (tem);
+ res_op->set_value (tem);
+ maybe_resimplify_conditional_op (seq, res_op, valueize);
return true;
}
}
- code_helper res_code2;
- tree res_ops2[3] = {};
- if (gimple_simplify (&res_code2, res_ops2, seq, valueize,
- *res_code, type, res_ops[0]))
+ /* Limit recursion, there are cases like PR80887 and others, for
+ example when value-numbering presents us with unfolded expressions
+ that we are really not prepared to handle without eventual
+ oscillation like ((_50 + 0) + 8) where _50 gets mapped to _50
+ itself as available expression. */
+ static unsigned depth;
+ if (depth > 10)
{
- *res_code = res_code2;
- res_ops[0] = res_ops2[0];
- res_ops[1] = res_ops2[1];
- res_ops[2] = res_ops2[2];
+ if (dump_file && (dump_flags & TDF_FOLDING))
+ fprintf (dump_file, "Aborting expression simplification due to "
+ "deep recursion\n");
+ return false;
+ }
+
+ ++depth;
+ gimple_match_op res_op2 (*res_op);
+ if (gimple_simplify (&res_op2, seq, valueize,
+ res_op->code, res_op->type, res_op->ops[0]))
+ {
+ --depth;
+ *res_op = res_op2;
return true;
}
+ --depth;
+
+ if (maybe_resimplify_conditional_op (seq, res_op, valueize))
+ return true;
return false;
}
/* Helper that matches and simplifies the toplevel result from
a gimple_simplify run (where we don't want to build
a stmt in case it's used in in-place folding). Replaces
- *RES_CODE and *RES_OPS with a simplified and/or canonicalized
- result and returns whether any change was made. */
+ RES_OP with a simplified and/or canonicalized result and
+ returns whether any change was made. */
-bool
-gimple_resimplify2 (gimple_seq *seq,
- code_helper *res_code, tree type, tree *res_ops,
+static bool
+gimple_resimplify2 (gimple_seq *seq, gimple_match_op *res_op,
tree (*valueize)(tree))
{
- if (constant_for_folding (res_ops[0]) && constant_for_folding (res_ops[1]))
+ if (constant_for_folding (res_op->ops[0])
+ && constant_for_folding (res_op->ops[1]))
{
tree tem = NULL_TREE;
- if (res_code->is_tree_code ())
- tem = const_binop (*res_code, type, res_ops[0], res_ops[1]);
+ if (res_op->code.is_tree_code ())
+ {
+ tree_code code = res_op->code;
+ if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))
+ && TREE_CODE_LENGTH (code) == 2)
+ tem = const_binop (res_op->code, res_op->type,
+ res_op->ops[0], res_op->ops[1]);
+ }
else
- tem = fold_const_call (combined_fn (*res_code), type,
- res_ops[0], res_ops[1]);
+ tem = fold_const_call (combined_fn (res_op->code), res_op->type,
+ res_op->ops[0], res_op->ops[1]);
if (tem != NULL_TREE
&& CONSTANT_CLASS_P (tem))
{
- res_ops[0] = tem;
- res_ops[1] = NULL_TREE;
- res_ops[2] = NULL_TREE;
- *res_code = TREE_CODE (res_ops[0]);
+ if (TREE_OVERFLOW_P (tem))
+ tem = drop_tree_overflow (tem);
+ res_op->set_value (tem);
+ maybe_resimplify_conditional_op (seq, res_op, valueize);
return true;
}
}
/* Canonicalize operand order. */
bool canonicalized = false;
- if (res_code->is_tree_code ()
- && (TREE_CODE_CLASS ((enum tree_code) *res_code) == tcc_comparison
- || commutative_tree_code (*res_code))
- && tree_swap_operands_p (res_ops[0], res_ops[1], false))
- {
- std::swap (res_ops[0], res_ops[1]);
- if (TREE_CODE_CLASS ((enum tree_code) *res_code) == tcc_comparison)
- *res_code = swap_tree_comparison (*res_code);
+ if (res_op->code.is_tree_code ()
+ && (TREE_CODE_CLASS ((enum tree_code) res_op->code) == tcc_comparison
+ || commutative_tree_code (res_op->code))
+ && tree_swap_operands_p (res_op->ops[0], res_op->ops[1]))
+ {
+ std::swap (res_op->ops[0], res_op->ops[1]);
+ if (TREE_CODE_CLASS ((enum tree_code) res_op->code) == tcc_comparison)
+ res_op->code = swap_tree_comparison (res_op->code);
canonicalized = true;
}
- code_helper res_code2;
- tree res_ops2[3] = {};
- if (gimple_simplify (&res_code2, res_ops2, seq, valueize,
- *res_code, type, res_ops[0], res_ops[1]))
+ /* Limit recursion, see gimple_resimplify1. */
+ static unsigned depth;
+ if (depth > 10)
+ {
+ if (dump_file && (dump_flags & TDF_FOLDING))
+ fprintf (dump_file, "Aborting expression simplification due to "
+ "deep recursion\n");
+ return false;
+ }
+
+ ++depth;
+ gimple_match_op res_op2 (*res_op);
+ if (gimple_simplify (&res_op2, seq, valueize,
+ res_op->code, res_op->type,
+ res_op->ops[0], res_op->ops[1]))
{
- *res_code = res_code2;
- res_ops[0] = res_ops2[0];
- res_ops[1] = res_ops2[1];
- res_ops[2] = res_ops2[2];
+ --depth;
+ *res_op = res_op2;
return true;
}
+ --depth;
+
+ if (maybe_resimplify_conditional_op (seq, res_op, valueize))
+ return true;
return canonicalized;
}
/* Helper that matches and simplifies the toplevel result from
a gimple_simplify run (where we don't want to build
a stmt in case it's used in in-place folding). Replaces
- *RES_CODE and *RES_OPS with a simplified and/or canonicalized
- result and returns whether any change was made. */
+ RES_OP with a simplified and/or canonicalized result and
+ returns whether any change was made. */
-bool
-gimple_resimplify3 (gimple_seq *seq,
- code_helper *res_code, tree type, tree *res_ops,
+static bool
+gimple_resimplify3 (gimple_seq *seq, gimple_match_op *res_op,
tree (*valueize)(tree))
{
- if (constant_for_folding (res_ops[0]) && constant_for_folding (res_ops[1])
- && constant_for_folding (res_ops[2]))
+ if (constant_for_folding (res_op->ops[0])
+ && constant_for_folding (res_op->ops[1])
+ && constant_for_folding (res_op->ops[2]))
{
tree tem = NULL_TREE;
- if (res_code->is_tree_code ())
- tem = fold_ternary/*_to_constant*/ (*res_code, type, res_ops[0],
- res_ops[1], res_ops[2]);
+ if (res_op->code.is_tree_code ())
+ {
+ tree_code code = res_op->code;
+ if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))
+ && TREE_CODE_LENGTH (code) == 3)
+ tem = fold_ternary/*_to_constant*/ (res_op->code, res_op->type,
+ res_op->ops[0], res_op->ops[1],
+ res_op->ops[2]);
+ }
else
- tem = fold_const_call (combined_fn (*res_code), type,
- res_ops[0], res_ops[1], res_ops[2]);
+ tem = fold_const_call (combined_fn (res_op->code), res_op->type,
+ res_op->ops[0], res_op->ops[1], res_op->ops[2]);
if (tem != NULL_TREE
&& CONSTANT_CLASS_P (tem))
{
- res_ops[0] = tem;
- res_ops[1] = NULL_TREE;
- res_ops[2] = NULL_TREE;
- *res_code = TREE_CODE (res_ops[0]);
+ if (TREE_OVERFLOW_P (tem))
+ tem = drop_tree_overflow (tem);
+ res_op->set_value (tem);
+ maybe_resimplify_conditional_op (seq, res_op, valueize);
return true;
}
}
/* Canonicalize operand order. */
bool canonicalized = false;
- if (res_code->is_tree_code ()
- && commutative_ternary_tree_code (*res_code)
- && tree_swap_operands_p (res_ops[0], res_ops[1], false))
+ if (res_op->code.is_tree_code ()
+ && commutative_ternary_tree_code (res_op->code)
+ && tree_swap_operands_p (res_op->ops[0], res_op->ops[1]))
{
- std::swap (res_ops[0], res_ops[1]);
+ std::swap (res_op->ops[0], res_op->ops[1]);
canonicalized = true;
}
- code_helper res_code2;
- tree res_ops2[3] = {};
- if (gimple_simplify (&res_code2, res_ops2, seq, valueize,
- *res_code, type,
- res_ops[0], res_ops[1], res_ops[2]))
+ /* Limit recursion, see gimple_resimplify1. */
+ static unsigned depth;
+ if (depth > 10)
{
- *res_code = res_code2;
- res_ops[0] = res_ops2[0];
- res_ops[1] = res_ops2[1];
- res_ops[2] = res_ops2[2];
+ if (dump_file && (dump_flags & TDF_FOLDING))
+ fprintf (dump_file, "Aborting expression simplification due to "
+ "deep recursion\n");
+ return false;
+ }
+
+ ++depth;
+ gimple_match_op res_op2 (*res_op);
+ if (gimple_simplify (&res_op2, seq, valueize,
+ res_op->code, res_op->type,
+ res_op->ops[0], res_op->ops[1], res_op->ops[2]))
+ {
+ --depth;
+ *res_op = res_op2;
return true;
}
+ --depth;
+
+ if (maybe_resimplify_conditional_op (seq, res_op, valueize))
+ return true;
return canonicalized;
}
+/* Helper that matches and simplifies the toplevel result from
+ a gimple_simplify run (where we don't want to build
+ a stmt in case it's used in in-place folding). Replaces
+ RES_OP with a simplified and/or canonicalized result and
+ returns whether any change was made. */
+
+static bool
+gimple_resimplify4 (gimple_seq *seq, gimple_match_op *res_op,
+ tree (*valueize)(tree))
+{
+ /* No constant folding is defined for four-operand functions. */
+
+ /* Limit recursion, see gimple_resimplify1. */
+ static unsigned depth;
+ if (depth > 10)
+ {
+ if (dump_file && (dump_flags & TDF_FOLDING))
+ fprintf (dump_file, "Aborting expression simplification due to "
+ "deep recursion\n");
+ return false;
+ }
+
+ ++depth;
+ gimple_match_op res_op2 (*res_op);
+ if (gimple_simplify (&res_op2, seq, valueize,
+ res_op->code, res_op->type,
+ res_op->ops[0], res_op->ops[1], res_op->ops[2],
+ res_op->ops[3]))
+ {
+ --depth;
+ *res_op = res_op2;
+ return true;
+ }
+ --depth;
+
+ if (maybe_resimplify_conditional_op (seq, res_op, valueize))
+ return true;
+
+ return false;
+}
+
+/* Helper that matches and simplifies the toplevel result from
+ a gimple_simplify run (where we don't want to build
+ a stmt in case it's used in in-place folding). Replaces
+ RES_OP with a simplified and/or canonicalized result and
+ returns whether any change was made. */
+
+static bool
+gimple_resimplify5 (gimple_seq *seq, gimple_match_op *res_op,
+ tree (*valueize)(tree))
+{
+ /* No constant folding is defined for five-operand functions. */
+
+ gimple_match_op res_op2 (*res_op);
+ if (gimple_simplify (&res_op2, seq, valueize,
+ res_op->code, res_op->type,
+ res_op->ops[0], res_op->ops[1], res_op->ops[2],
+ res_op->ops[3], res_op->ops[4]))
+ {
+ *res_op = res_op2;
+ return true;
+ }
+
+ if (maybe_resimplify_conditional_op (seq, res_op, valueize))
+ return true;
+
+ return false;
+}
+
+/* Match and simplify the toplevel valueized operation THIS.
+ Replaces THIS with a simplified and/or canonicalized result and
+ returns whether any change was made. */
-/* If in GIMPLE expressions with CODE go as single-rhs build
- a GENERIC tree for that expression into *OP0. */
+bool
+gimple_match_op::resimplify (gimple_seq *seq, tree (*valueize)(tree))
+{
+ switch (num_ops)
+ {
+ case 1:
+ return gimple_resimplify1 (seq, this, valueize);
+ case 2:
+ return gimple_resimplify2 (seq, this, valueize);
+ case 3:
+ return gimple_resimplify3 (seq, this, valueize);
+ case 4:
+ return gimple_resimplify4 (seq, this, valueize);
+ case 5:
+ return gimple_resimplify5 (seq, this, valueize);
+ default:
+ gcc_unreachable ();
+ }
+}
+
+/* If in GIMPLE the operation described by RES_OP should be single-rhs,
+ build a GENERIC tree for that expression and update RES_OP accordingly. */
void
-maybe_build_generic_op (enum tree_code code, tree type, tree *ops)
+maybe_build_generic_op (gimple_match_op *res_op)
{
+ tree_code code = (tree_code) res_op->code;
+ tree val;
switch (code)
{
case REALPART_EXPR:
case IMAGPART_EXPR:
case VIEW_CONVERT_EXPR:
- ops[0] = build1 (code, type, ops[0]);
+ val = build1 (code, res_op->type, res_op->ops[0]);
+ res_op->set_value (val);
break;
case BIT_FIELD_REF:
- ops[0] = build3 (code, type, ops[0], ops[1], ops[2]);
- ops[1] = ops[2] = NULL_TREE;
+ val = build3 (code, res_op->type, res_op->ops[0], res_op->ops[1],
+ res_op->ops[2]);
+ REF_REVERSE_STORAGE_ORDER (val) = res_op->reverse;
+ res_op->set_value (val);
break;
default:;
}
}
-tree (*mprts_hook) (code_helper, tree, tree *);
+tree (*mprts_hook) (gimple_match_op *);
-/* Try to build a call to FN with return type TYPE and the NARGS
- arguments given in OPS. Return null if the target doesn't support
- the function. */
+/* Try to build RES_OP, which is known to be a call to FN. Return null
+ if the target doesn't support the function. */
static gcall *
-build_call_internal (internal_fn fn, tree type, unsigned int nargs, tree *ops)
+build_call_internal (internal_fn fn, gimple_match_op *res_op)
{
if (direct_internal_fn_p (fn))
{
- tree_pair types = direct_internal_fn_types (fn, type, ops);
+ tree_pair types = direct_internal_fn_types (fn, res_op->type,
+ res_op->ops);
if (!direct_internal_fn_supported_p (fn, types, OPTIMIZE_FOR_BOTH))
return NULL;
}
- return gimple_build_call_internal (fn, nargs, ops[0], ops[1], ops[2]);
+ return gimple_build_call_internal (fn, res_op->num_ops,
+ res_op->op_or_null (0),
+ res_op->op_or_null (1),
+ res_op->op_or_null (2),
+ res_op->op_or_null (3),
+ res_op->op_or_null (4));
}
-/* Push the exploded expression described by RCODE, TYPE and OPS
- as a statement to SEQ if necessary and return a gimple value
- denoting the value of the expression. If RES is not NULL
- then the result will be always RES and even gimple values are
- pushed to SEQ. */
+/* Push the exploded expression described by RES_OP as a statement to
+ SEQ if necessary and return a gimple value denoting the value of the
+ expression. If RES is not NULL then the result will be always RES
+ and even gimple values are pushed to SEQ. */
tree
-maybe_push_res_to_seq (code_helper rcode, tree type, tree *ops,
- gimple_seq *seq, tree res)
+maybe_push_res_to_seq (gimple_match_op *res_op, gimple_seq *seq, tree res)
{
- if (rcode.is_tree_code ())
+ tree *ops = res_op->ops;
+ unsigned num_ops = res_op->num_ops;
+
+ /* The caller should have converted conditional operations into an UNCOND
+ form and resimplified as appropriate. The conditional form only
+ survives this far if that conversion failed. */
+ if (res_op->cond.cond)
+ return NULL_TREE;
+
+ if (res_op->code.is_tree_code ())
{
if (!res
- && gimple_simplified_result_is_gimple_val (rcode, ops))
+ && gimple_simplified_result_is_gimple_val (res_op))
return ops[0];
if (mprts_hook)
{
- tree tem = mprts_hook (rcode, type, ops);
+ tree tem = mprts_hook (res_op);
if (tem)
return tem;
}
- if (!seq)
- return NULL_TREE;
- /* Play safe and do not allow abnormals to be mentioned in
- newly created statements. */
- if ((TREE_CODE (ops[0]) == SSA_NAME
- && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[0]))
- || (ops[1]
- && TREE_CODE (ops[1]) == SSA_NAME
- && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[1]))
- || (ops[2]
- && TREE_CODE (ops[2]) == SSA_NAME
- && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[2]))
- || (COMPARISON_CLASS_P (ops[0])
- && ((TREE_CODE (TREE_OPERAND (ops[0], 0)) == SSA_NAME
- && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0],
- 0)))
- || (TREE_CODE (TREE_OPERAND (ops[0], 1)) == SSA_NAME
- && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0],
- 1))))))
+ }
+
+ if (!seq)
+ return NULL_TREE;
+
+ /* Play safe and do not allow abnormals to be mentioned in
+ newly created statements. */
+ for (unsigned int i = 0; i < num_ops; ++i)
+ if (TREE_CODE (ops[i]) == SSA_NAME
+ && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[i]))
+ return NULL_TREE;
+
+ if (num_ops > 0 && COMPARISON_CLASS_P (ops[0]))
+ for (unsigned int i = 0; i < 2; ++i)
+ if (TREE_CODE (TREE_OPERAND (ops[0], i)) == SSA_NAME
+ && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], i)))
return NULL_TREE;
+
+ if (res_op->code.is_tree_code ())
+ {
if (!res)
{
if (gimple_in_ssa_p (cfun))
- res = make_ssa_name (type);
+ res = make_ssa_name (res_op->type);
else
- res = create_tmp_reg (type);
+ res = create_tmp_reg (res_op->type);
}
- maybe_build_generic_op (rcode, type, ops);
- gimple *new_stmt = gimple_build_assign (res, rcode,
- ops[0], ops[1], ops[2]);
+ maybe_build_generic_op (res_op);
+ gimple *new_stmt = gimple_build_assign (res, res_op->code,
+ res_op->op_or_null (0),
+ res_op->op_or_null (1),
+ res_op->op_or_null (2));
gimple_seq_add_stmt_without_update (seq, new_stmt);
return res;
}
else
{
- if (!seq)
- return NULL_TREE;
- combined_fn fn = rcode;
- /* Play safe and do not allow abnormals to be mentioned in
- newly created statements. */
- unsigned nargs;
- for (nargs = 0; nargs < 3; ++nargs)
- {
- if (!ops[nargs])
- break;
- if (TREE_CODE (ops[nargs]) == SSA_NAME
- && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[nargs]))
- return NULL_TREE;
- }
- gcc_assert (nargs != 0);
+ gcc_assert (num_ops != 0);
+ combined_fn fn = res_op->code;
gcall *new_stmt = NULL;
if (internal_fn_p (fn))
{
/* Generate the given function if we can. */
internal_fn ifn = as_internal_fn (fn);
- new_stmt = build_call_internal (ifn, type, nargs, ops);
+ new_stmt = build_call_internal (ifn, res_op);
if (!new_stmt)
return NULL_TREE;
}
if (!(flags_from_decl_or_type (decl) & ECF_CONST))
return NULL;
- new_stmt = gimple_build_call (decl, nargs, ops[0], ops[1], ops[2]);
+ new_stmt = gimple_build_call (decl, num_ops,
+ res_op->op_or_null (0),
+ res_op->op_or_null (1),
+ res_op->op_or_null (2),
+ res_op->op_or_null (3),
+ res_op->op_or_null (4));
}
if (!res)
{
if (gimple_in_ssa_p (cfun))
- res = make_ssa_name (type);
+ res = make_ssa_name (res_op->type);
else
- res = create_tmp_reg (type);
+ res = create_tmp_reg (res_op->type);
}
gimple_call_set_lhs (new_stmt, res);
gimple_seq_add_stmt_without_update (seq, new_stmt);
return res;
}
- code_helper rcode;
- tree ops[3] = {};
- if (!gimple_simplify (&rcode, ops, seq, valueize,
- code, type, op0))
+ gimple_match_op res_op;
+ if (!gimple_simplify (&res_op, seq, valueize, code, type, op0))
return NULL_TREE;
- return maybe_push_res_to_seq (rcode, type, ops, seq);
+ return maybe_push_res_to_seq (&res_op, seq);
}
/* Binary ops. */
generation. */
if ((commutative_tree_code (code)
|| TREE_CODE_CLASS (code) == tcc_comparison)
- && tree_swap_operands_p (op0, op1, false))
+ && tree_swap_operands_p (op0, op1))
{
std::swap (op0, op1);
if (TREE_CODE_CLASS (code) == tcc_comparison)
code = swap_tree_comparison (code);
}
- code_helper rcode;
- tree ops[3] = {};
- if (!gimple_simplify (&rcode, ops, seq, valueize,
- code, type, op0, op1))
+ gimple_match_op res_op;
+ if (!gimple_simplify (&res_op, seq, valueize, code, type, op0, op1))
return NULL_TREE;
- return maybe_push_res_to_seq (rcode, type, ops, seq);
+ return maybe_push_res_to_seq (&res_op, seq);
}
/* Ternary ops. */
/* Canonicalize operand order both for matching and fallback stmt
generation. */
if (commutative_ternary_tree_code (code)
- && tree_swap_operands_p (op0, op1, false))
+ && tree_swap_operands_p (op0, op1))
std::swap (op0, op1);
- code_helper rcode;
- tree ops[3] = {};
- if (!gimple_simplify (&rcode, ops, seq, valueize,
- code, type, op0, op1, op2))
+ gimple_match_op res_op;
+ if (!gimple_simplify (&res_op, seq, valueize, code, type, op0, op1, op2))
return NULL_TREE;
- return maybe_push_res_to_seq (rcode, type, ops, seq);
+ return maybe_push_res_to_seq (&res_op, seq);
}
-/* Builtin function with one argument. */
+/* Builtin or internal function with one argument. */
tree
-gimple_simplify (enum built_in_function fn, tree type,
+gimple_simplify (combined_fn fn, tree type,
tree arg0,
gimple_seq *seq, tree (*valueize)(tree))
{
if (constant_for_folding (arg0))
{
- tree res = fold_const_call (as_combined_fn (fn), type, arg0);
+ tree res = fold_const_call (fn, type, arg0);
if (res && CONSTANT_CLASS_P (res))
return res;
}
- code_helper rcode;
- tree ops[3] = {};
- if (!gimple_simplify (&rcode, ops, seq, valueize,
- as_combined_fn (fn), type, arg0))
+ gimple_match_op res_op;
+ if (!gimple_simplify (&res_op, seq, valueize, fn, type, arg0))
return NULL_TREE;
- return maybe_push_res_to_seq (rcode, type, ops, seq);
+ return maybe_push_res_to_seq (&res_op, seq);
}
-/* Builtin function with two arguments. */
+/* Builtin or internal function with two arguments. */
tree
-gimple_simplify (enum built_in_function fn, tree type,
+gimple_simplify (combined_fn fn, tree type,
tree arg0, tree arg1,
gimple_seq *seq, tree (*valueize)(tree))
{
if (constant_for_folding (arg0)
&& constant_for_folding (arg1))
{
- tree res = fold_const_call (as_combined_fn (fn), type, arg0, arg1);
+ tree res = fold_const_call (fn, type, arg0, arg1);
if (res && CONSTANT_CLASS_P (res))
return res;
}
- code_helper rcode;
- tree ops[3] = {};
- if (!gimple_simplify (&rcode, ops, seq, valueize,
- as_combined_fn (fn), type, arg0, arg1))
+ gimple_match_op res_op;
+ if (!gimple_simplify (&res_op, seq, valueize, fn, type, arg0, arg1))
return NULL_TREE;
- return maybe_push_res_to_seq (rcode, type, ops, seq);
+ return maybe_push_res_to_seq (&res_op, seq);
}
-/* Builtin function with three arguments. */
+/* Builtin or internal function with three arguments. */
tree
-gimple_simplify (enum built_in_function fn, tree type,
+gimple_simplify (combined_fn fn, tree type,
tree arg0, tree arg1, tree arg2,
gimple_seq *seq, tree (*valueize)(tree))
{
&& constant_for_folding (arg1)
&& constant_for_folding (arg2))
{
- tree res = fold_const_call (as_combined_fn (fn), type, arg0, arg1, arg2);
+ tree res = fold_const_call (fn, type, arg0, arg1, arg2);
if (res && CONSTANT_CLASS_P (res))
return res;
}
- code_helper rcode;
- tree ops[3] = {};
- if (!gimple_simplify (&rcode, ops, seq, valueize,
- as_combined_fn (fn), type, arg0, arg1, arg2))
+ gimple_match_op res_op;
+ if (!gimple_simplify (&res_op, seq, valueize, fn, type, arg0, arg1, arg2))
return NULL_TREE;
- return maybe_push_res_to_seq (rcode, type, ops, seq);
+ return maybe_push_res_to_seq (&res_op, seq);
}
/* Helper for gimple_simplify valueizing OP using VALUEIZE and setting
return op;
}
+/* If RES_OP is a call to a conditional internal function, try simplifying
+ the associated unconditional operation and using the result to build
+ a new conditional operation. For example, if RES_OP is:
+
+ IFN_COND_ADD (COND, A, B, ELSE)
+
+ try simplifying (plus A B) and using the result to build a replacement
+ for the whole IFN_COND_ADD.
+
+ Return true if this approach led to a simplification, otherwise leave
+ RES_OP unchanged (and so suitable for other simplifications). When
+ returning true, add any new statements to SEQ and use VALUEIZE as the
+ valueization function.
+
+ RES_OP is known to be a call to IFN. */
+
+static bool
+try_conditional_simplification (internal_fn ifn, gimple_match_op *res_op,
+ gimple_seq *seq, tree (*valueize) (tree))
+{
+ code_helper op;
+ tree_code code = conditional_internal_fn_code (ifn);
+ if (code != ERROR_MARK)
+ op = code;
+ else
+ {
+ ifn = get_unconditional_internal_fn (ifn);
+ if (ifn == IFN_LAST)
+ return false;
+ op = as_combined_fn (ifn);
+ }
+
+ unsigned int num_ops = res_op->num_ops;
+ gimple_match_op cond_op (gimple_match_cond (res_op->ops[0],
+ res_op->ops[num_ops - 1]),
+ op, res_op->type, num_ops - 2);
+
+ memcpy (cond_op.ops, res_op->ops + 1, (num_ops - 1) * sizeof *cond_op.ops);
+ switch (num_ops - 2)
+ {
+ case 2:
+ if (!gimple_resimplify2 (seq, &cond_op, valueize))
+ return false;
+ break;
+ case 3:
+ if (!gimple_resimplify3 (seq, &cond_op, valueize))
+ return false;
+ break;
+ default:
+ gcc_unreachable ();
+ }
+ *res_op = cond_op;
+ maybe_resimplify_conditional_op (seq, res_op, valueize);
+ return true;
+}
+
/* The main STMT based simplification entry. It is used by the fold_stmt
and the fold_stmt_to_constant APIs. */
bool
-gimple_simplify (gimple *stmt,
- code_helper *rcode, tree *ops,
- gimple_seq *seq,
+gimple_simplify (gimple *stmt, gimple_match_op *res_op, gimple_seq *seq,
tree (*valueize)(tree), tree (*top_valueize)(tree))
{
switch (gimple_code (stmt))
tree op0 = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
bool valueized = false;
op0 = do_valueize (op0, top_valueize, valueized);
- *rcode = code;
- ops[0] = op0;
- return (gimple_resimplify1 (seq, rcode, type, ops, valueize)
+ res_op->set_op (code, type, op0);
+ return (gimple_resimplify1 (seq, res_op, valueize)
|| valueized);
}
else if (code == BIT_FIELD_REF)
tree op0 = TREE_OPERAND (rhs1, 0);
bool valueized = false;
op0 = do_valueize (op0, top_valueize, valueized);
- *rcode = code;
- ops[0] = op0;
- ops[1] = TREE_OPERAND (rhs1, 1);
- ops[2] = TREE_OPERAND (rhs1, 2);
- return (gimple_resimplify3 (seq, rcode, type, ops, valueize)
+ res_op->set_op (code, type, op0,
+ TREE_OPERAND (rhs1, 1),
+ TREE_OPERAND (rhs1, 2),
+ REF_REVERSE_STORAGE_ORDER (rhs1));
+ if (res_op->reverse)
+ return valueized;
+ return (gimple_resimplify3 (seq, res_op, valueize)
|| valueized);
}
else if (code == SSA_NAME
tree valueized = top_valueize (op0);
if (!valueized || op0 == valueized)
return false;
- ops[0] = valueized;
- *rcode = TREE_CODE (op0);
+ res_op->set_op (TREE_CODE (op0), type, valueized);
return true;
}
break;
tree rhs1 = gimple_assign_rhs1 (stmt);
bool valueized = false;
rhs1 = do_valueize (rhs1, top_valueize, valueized);
- *rcode = code;
- ops[0] = rhs1;
- return (gimple_resimplify1 (seq, rcode, type, ops, valueize)
+ res_op->set_op (code, type, rhs1);
+ return (gimple_resimplify1 (seq, res_op, valueize)
|| valueized);
}
case GIMPLE_BINARY_RHS:
bool valueized = false;
rhs1 = do_valueize (rhs1, top_valueize, valueized);
rhs2 = do_valueize (rhs2, top_valueize, valueized);
- *rcode = code;
- ops[0] = rhs1;
- ops[1] = rhs2;
- return (gimple_resimplify2 (seq, rcode, type, ops, valueize)
+ res_op->set_op (code, type, rhs1, rhs2);
+ return (gimple_resimplify2 (seq, res_op, valueize)
|| valueized);
}
case GIMPLE_TERNARY_RHS:
{
bool valueized = false;
tree rhs1 = gimple_assign_rhs1 (stmt);
- /* If this is a [VEC_]COND_EXPR first try to simplify an
+ /* If this is a COND_EXPR first try to simplify an
embedded GENERIC condition. */
- if (code == COND_EXPR
- || code == VEC_COND_EXPR)
+ if (code == COND_EXPR)
{
if (COMPARISON_CLASS_P (rhs1))
{
tree rhs = TREE_OPERAND (rhs1, 1);
lhs = do_valueize (lhs, top_valueize, valueized);
rhs = do_valueize (rhs, top_valueize, valueized);
- code_helper rcode2 = TREE_CODE (rhs1);
- tree ops2[3] = {};
- ops2[0] = lhs;
- ops2[1] = rhs;
- if ((gimple_resimplify2 (seq, &rcode2, TREE_TYPE (rhs1),
- ops2, valueize)
+ gimple_match_op res_op2 (res_op->cond, TREE_CODE (rhs1),
+ TREE_TYPE (rhs1), lhs, rhs);
+ if ((gimple_resimplify2 (seq, &res_op2, valueize)
|| valueized)
- && rcode2.is_tree_code ())
+ && res_op2.code.is_tree_code ())
{
valueized = true;
- if (TREE_CODE_CLASS ((enum tree_code)rcode2)
+ if (TREE_CODE_CLASS ((enum tree_code) res_op2.code)
== tcc_comparison)
- rhs1 = build2 (rcode2, TREE_TYPE (rhs1),
- ops2[0], ops2[1]);
- else if (rcode2 == SSA_NAME
- || rcode2 == INTEGER_CST
- || rcode2 == VECTOR_CST)
- rhs1 = ops2[0];
+ rhs1 = build2 (res_op2.code, TREE_TYPE (rhs1),
+ res_op2.ops[0], res_op2.ops[1]);
+ else if (res_op2.code == SSA_NAME
+ || res_op2.code == INTEGER_CST
+ || res_op2.code == VECTOR_CST)
+ rhs1 = res_op2.ops[0];
else
valueized = false;
}
rhs1 = do_valueize (rhs1, top_valueize, valueized);
rhs2 = do_valueize (rhs2, top_valueize, valueized);
rhs3 = do_valueize (rhs3, top_valueize, valueized);
- *rcode = code;
- ops[0] = rhs1;
- ops[1] = rhs2;
- ops[2] = rhs3;
- return (gimple_resimplify3 (seq, rcode, type, ops, valueize)
+ res_op->set_op (code, type, rhs1, rhs2, rhs3);
+ return (gimple_resimplify3 (seq, res_op, valueize)
|| valueized);
}
default:
/* ??? This way we can't simplify calls with side-effects. */
if (gimple_call_lhs (stmt) != NULL_TREE
&& gimple_call_num_args (stmt) >= 1
- && gimple_call_num_args (stmt) <= 3)
+ && gimple_call_num_args (stmt) <= 5)
{
bool valueized = false;
+ combined_fn cfn;
if (gimple_call_internal_p (stmt))
- *rcode = as_combined_fn (gimple_call_internal_fn (stmt));
+ cfn = as_combined_fn (gimple_call_internal_fn (stmt));
else
{
tree fn = gimple_call_fn (stmt);
|| !gimple_builtin_call_types_compatible_p (stmt, decl))
return false;
- *rcode = as_combined_fn (DECL_FUNCTION_CODE (decl));
+ cfn = as_combined_fn (DECL_FUNCTION_CODE (decl));
}
- tree type = TREE_TYPE (gimple_call_lhs (stmt));
- for (unsigned i = 0; i < gimple_call_num_args (stmt); ++i)
+ unsigned int num_args = gimple_call_num_args (stmt);
+ res_op->set_op (cfn, TREE_TYPE (gimple_call_lhs (stmt)), num_args);
+ for (unsigned i = 0; i < num_args; ++i)
{
tree arg = gimple_call_arg (stmt, i);
- ops[i] = do_valueize (arg, top_valueize, valueized);
+ res_op->ops[i] = do_valueize (arg, top_valueize, valueized);
}
- switch (gimple_call_num_args (stmt))
+ if (internal_fn_p (cfn)
+ && try_conditional_simplification (as_internal_fn (cfn),
+ res_op, seq, valueize))
+ return true;
+ switch (num_args)
{
case 1:
- return (gimple_resimplify1 (seq, rcode, type, ops, valueize)
+ return (gimple_resimplify1 (seq, res_op, valueize)
|| valueized);
case 2:
- return (gimple_resimplify2 (seq, rcode, type, ops, valueize)
+ return (gimple_resimplify2 (seq, res_op, valueize)
|| valueized);
case 3:
- return (gimple_resimplify3 (seq, rcode, type, ops, valueize)
+ return (gimple_resimplify3 (seq, res_op, valueize)
+ || valueized);
+ case 4:
+ return (gimple_resimplify4 (seq, res_op, valueize)
+ || valueized);
+ case 5:
+ return (gimple_resimplify5 (seq, res_op, valueize)
|| valueized);
default:
gcc_unreachable ();
bool valueized = false;
lhs = do_valueize (lhs, top_valueize, valueized);
rhs = do_valueize (rhs, top_valueize, valueized);
- *rcode = gimple_cond_code (stmt);
- ops[0] = lhs;
- ops[1] = rhs;
- return (gimple_resimplify2 (seq, rcode,
- boolean_type_node, ops, valueize)
+ res_op->set_op (gimple_cond_code (stmt), boolean_type_node, lhs, rhs);
+ return (gimple_resimplify2 (seq, res_op, valueize)
|| valueized);
}
do_valueize (tree (*valueize)(tree), tree op)
{
if (valueize && TREE_CODE (op) == SSA_NAME)
- return valueize (op);
+ {
+ tree tem = valueize (op);
+ if (tem)
+ return tem;
+ }
return op;
}
+/* Helper for the autogenerated code, get at the definition of NAME when
+ VALUEIZE allows that. */
+
+inline gimple *
+get_def (tree (*valueize)(tree), tree name)
+{
+ if (valueize && ! valueize (name))
+ return NULL;
+ return SSA_NAME_DEF_STMT (name);
+}
+
/* Routine to determine if the types T1 and T2 are effectively
the same for GIMPLE. If T1 or T2 is not a type, the test
applies to their TREE_TYPE. */
{
return !cfun || (cfun->curr_properties & PROP_gimple_opt_math) == 0;
}
+
+/* Return true if math operations that are beneficial only after
+ vectorization should be canonicalized. */
+
+static inline bool
+canonicalize_math_after_vectorization_p ()
+{
+ return !cfun || (cfun->curr_properties & PROP_gimple_lvec) != 0;
+}
+
+/* Return true if we can still perform transformations that may introduce
+ vector operations that are not supported by the target. Vector lowering
+ normally handles those, but after that pass, it becomes unsafe. */
+
+static inline bool
+optimize_vectors_before_lowering_p ()
+{
+ return !cfun || (cfun->curr_properties & PROP_gimple_lvec) == 0;
+}
+
+/* Return true if pow(cst, x) should be optimized into exp(log(cst) * x).
+ As a workaround for SPEC CPU2017 628.pop2_s, don't do it if arg0
+ is an exact integer, arg1 = phi_res +/- cst1 and phi_res = PHI <cst2, ...>
+ where cst2 +/- cst1 is an exact integer, because then pow (arg0, arg1)
+ will likely be exact, while exp (log (arg0) * arg1) might be not.
+ Also don't do it if arg1 is phi_res above and cst2 is an exact integer. */
+
+static bool
+optimize_pow_to_exp (tree arg0, tree arg1)
+{
+ gcc_assert (TREE_CODE (arg0) == REAL_CST);
+ if (!real_isinteger (TREE_REAL_CST_PTR (arg0), TYPE_MODE (TREE_TYPE (arg0))))
+ return true;
+
+ if (TREE_CODE (arg1) != SSA_NAME)
+ return true;
+
+ gimple *def = SSA_NAME_DEF_STMT (arg1);
+ gphi *phi = dyn_cast <gphi *> (def);
+ tree cst1 = NULL_TREE;
+ enum tree_code code = ERROR_MARK;
+ if (!phi)
+ {
+ if (!is_gimple_assign (def))
+ return true;
+ code = gimple_assign_rhs_code (def);
+ switch (code)
+ {
+ case PLUS_EXPR:
+ case MINUS_EXPR:
+ break;
+ default:
+ return true;
+ }
+ if (TREE_CODE (gimple_assign_rhs1 (def)) != SSA_NAME
+ || TREE_CODE (gimple_assign_rhs2 (def)) != REAL_CST)
+ return true;
+
+ cst1 = gimple_assign_rhs2 (def);
+
+ phi = dyn_cast <gphi *> (SSA_NAME_DEF_STMT (gimple_assign_rhs1 (def)));
+ if (!phi)
+ return true;
+ }
+
+ tree cst2 = NULL_TREE;
+ int n = gimple_phi_num_args (phi);
+ for (int i = 0; i < n; i++)
+ {
+ tree arg = PHI_ARG_DEF (phi, i);
+ if (TREE_CODE (arg) != REAL_CST)
+ continue;
+ else if (cst2 == NULL_TREE)
+ cst2 = arg;
+ else if (!operand_equal_p (cst2, arg, 0))
+ return true;
+ }
+
+ if (cst1 && cst2)
+ cst2 = const_binop (code, TREE_TYPE (cst2), cst2, cst1);
+ if (cst2
+ && TREE_CODE (cst2) == REAL_CST
+ && real_isinteger (TREE_REAL_CST_PTR (cst2),
+ TYPE_MODE (TREE_TYPE (cst2))))
+ return false;
+ return true;
+}
+
+/* Return true if a division INNER_DIV / DIVISOR where INNER_DIV
+ is another division can be optimized. Don't optimize if INNER_DIV
+ is used in a TRUNC_MOD_EXPR with DIVISOR as second operand. */
+
+static bool
+optimize_successive_divisions_p (tree divisor, tree inner_div)
+{
+ if (!gimple_in_ssa_p (cfun))
+ return false;
+
+ imm_use_iterator imm_iter;
+ use_operand_p use_p;
+ FOR_EACH_IMM_USE_FAST (use_p, imm_iter, inner_div)
+ {
+ gimple *use_stmt = USE_STMT (use_p);
+ if (!is_gimple_assign (use_stmt)
+ || gimple_assign_rhs_code (use_stmt) != TRUNC_MOD_EXPR
+ || !operand_equal_p (gimple_assign_rhs2 (use_stmt), divisor, 0))
+ continue;
+ return false;
+ }
+ return true;
+}