definitely not in a manifestly constant-evaluated
context. */
ff_mce_false = 1 << 1,
+ /* Whether we're only folding non-ODR usages of constants.
+ This happens before saving the constexpr funcdef, so
+ we should do as little other folding as possible.
+ Mutually exclusive with ff_mce_false. */
+ ff_only_non_odr = 1 << 2,
};
using fold_flags_t = int;
{
hash_set<tree> pset;
fold_flags_t flags;
- cp_fold_data (fold_flags_t flags): flags (flags) {}
+ cp_fold_data (fold_flags_t flags): flags (flags)
+ {
+ gcc_checking_assert (!(flags & ff_mce_false)
+ || !(flags & ff_only_non_odr));
+ }
};
/* Forward declarations. */
return NULL_TREE;
}
+/* A walk_tree helper to replace constant-initialized references in an
+ OMP_CLAUSE with the the declaration that they refer to. Such refs
+ will have been folded out in the body by cp_fold_non_odr_use_1 and
+ so we need to follow suit to prevent confusion. */
+
+static tree
+cp_fold_omp_clause_refs_r (tree *expr_p, int *walk_subtrees, void */*data*/)
+{
+ tree expr = *expr_p;
+
+ if (TYPE_P (expr))
+ {
+ *walk_subtrees = 0;
+ return NULL_TREE;
+ }
+
+ if (DECL_P (expr))
+ {
+ *walk_subtrees = 0;
+
+ if (decl_constant_var_p (expr)
+ && TYPE_REF_P (TREE_TYPE (expr)))
+ {
+ tree init = maybe_constant_value (expr);
+ if (TREE_CONSTANT (init))
+ *expr_p = tree_strip_nop_conversions (init);
+ }
+ }
+
+ return NULL_TREE;
+}
+
/* Perform any pre-gimplification folding of C++ front end trees to
GENERIC.
Note: The folding of non-omp cases is something to move into
*walk_subtrees = 0;
return NULL_TREE;
+ case OMP_CLAUSE:
+ if ((data->flags & ff_only_non_odr)
+ && omp_clause_num_ops[OMP_CLAUSE_CODE (stmt)] >= 1
+ && OMP_CLAUSE_CODE (stmt) >= OMP_CLAUSE_PRIVATE
+ && OMP_CLAUSE_CODE (stmt) <= OMP_CLAUSE__SCANTEMP_
+ && OMP_CLAUSE_DECL (stmt))
+ {
+ tree *decl = &OMP_CLAUSE_DECL (stmt);
+ cp_walk_tree (decl, cp_fold_omp_clause_refs_r, NULL, NULL);
+ if (TREE_CODE (*decl) == ADDR_EXPR
+ && DECL_P (TREE_OPERAND (*decl, 0)))
+ *decl = TREE_OPERAND (*decl, 0);
+ data->pset.add (*decl);
+ }
+ break;
+
case IF_STMT:
if (IF_STMT_CONSTEVAL_P (stmt))
{
DECL_ESCALATION_CHECKED_P (fndecl) = true;
}
+/* Fold any non-ODR usages of constant variables in FNDECL. This occurs
+ before saving the constexpr fundef, so do as little other folding
+ as possible. */
+
+void
+cp_fold_function_non_odr_use (tree fndecl)
+{
+ cp_fold_data data (ff_only_non_odr);
+ cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &data, NULL);
+}
+
/* We've stashed immediate-escalating functions. Now see if they indeed
ought to be promoted to consteval. */
return false;
}
+/* Fold any non-ODR-usages of a constant variable in expression X. */
+
+static tree
+cp_fold_non_odr_use_1 (tree x)
+{
+ tree var = x;
+ while (!VAR_P (var))
+ switch (TREE_CODE (var))
+ {
+ case ARRAY_REF:
+ case BIT_FIELD_REF:
+ case COMPONENT_REF:
+ case VIEW_CONVERT_EXPR:
+ CASE_CONVERT:
+ var = TREE_OPERAND (var, 0);
+ break;
+
+ case INDIRECT_REF:
+ if (REFERENCE_REF_P (var))
+ var = TREE_OPERAND (var, 0);
+ else
+ return x;
+ break;
+
+ default:
+ return x;
+ }
+
+ if (TREE_THIS_VOLATILE (var)
+ || !decl_constant_var_p (var))
+ return x;
+
+ /* We mustn't fold std::hardware_destructive_interference_size here
+ so that maybe_warn_about_constant_value can complain if it's used
+ in a manifestly constant-evaluated context. */
+ if (decl_in_std_namespace_p (var)
+ && DECL_NAME (var)
+ && id_equal (DECL_NAME (var), "hardware_destructive_interference_size"))
+ return x;
+
+ tree t = maybe_constant_value (x);
+ return TREE_CONSTANT (t) ? t : x;
+}
+
/* Fold expression X which is used as an rvalue if RVAL is true. */
static tree
x = cp_fold (x, flags);
if (rval)
x = mark_rvalue_use (x);
- if (rval && DECL_P (x)
- && !TYPE_REF_P (TREE_TYPE (x)))
+ if (rval && (flags & ff_only_non_odr))
+ {
+ tree v = cp_fold_non_odr_use_1 (x);
+ if (v != x)
+ {
+ x = v;
+ continue;
+ }
+ }
+ else if (rval && DECL_P (x)
+ && !TYPE_REF_P (TREE_TYPE (x)))
{
tree v = decl_constant_value (x);
if (v != x && v != error_mark_node)
return cp_fold_rvalue (x, ff_none);
}
+/* Fold any non-ODR used constants in an expression X which
+ is used as an rvalue if RVAL is true. */
+
+tree
+cp_fold_non_odr_use (tree x, bool rval)
+{
+ return cp_fold_maybe_rvalue (x, rval, ff_only_non_odr);
+}
+
/* Perform folding on expression X. */
static tree
return cp_fold_maybe_rvalue (x, !lval);
}
-static GTY((deletable)) hash_map<tree, tree> *fold_caches[2];
+static GTY((deletable)) hash_map<tree, tree> *fold_caches[3];
/* Subroutine of cp_fold. Returns which fold cache to use according
to the given flags. We need multiple caches since the result of
get_fold_cache (fold_flags_t flags)
{
if (flags & ff_mce_false)
+ return fold_caches[2];
+ else if (flags & ff_only_non_odr)
return fold_caches[1];
else
return fold_caches[0];
/* Strip CLEANUP_POINT_EXPR if the expression doesn't have side
effects. */
r = cp_fold (TREE_OPERAND (x, 0), flags);
- if (!TREE_SIDE_EFFECTS (r))
+ if (!TREE_SIDE_EFFECTS (r) && !(flags & ff_only_non_odr))
x = r;
break;
if (op0 == error_mark_node)
x = error_mark_node;
+ else if (flags & ff_only_non_odr)
+ {
+ if (op0 != TREE_OPERAND (x, 0))
+ x = build1_loc (loc, code, TREE_TYPE (x), op0);
+ if (code == NOP_EXPR)
+ REINTERPRET_CAST_P (x) = REINTERPRET_CAST_P (org_x);
+ }
else if (code == CONVERT_EXPR
&& SCALAR_TYPE_P (TREE_TYPE (x))
&& op0 != void_node)
case EXCESS_PRECISION_EXPR:
op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops, flags);
- x = fold_convert_loc (EXPR_LOCATION (x), TREE_TYPE (x), op0);
+ if (op0 == error_mark_node)
+ x = error_mark_node;
+ else if (flags & ff_only_non_odr)
+ {
+ if (op0 != TREE_OPERAND (x, 0))
+ x = build1_loc (EXPR_LOCATION (x), code, TREE_TYPE (x), op0);
+ }
+ else
+ x = fold_convert_loc (EXPR_LOCATION (x), TREE_TYPE (x), op0);
break;
case INDIRECT_REF:
if (p != x)
return cp_fold (p, flags);
}
+ /* When folding non-ODR usages of constants, we also want to
+ remove any constant-initialized references, even when
+ used as lvalues. */
+ if ((flags & ff_only_non_odr) && REFERENCE_REF_P (x))
+ {
+ tree r = cp_fold_non_odr_use_1 (x);
+ if (r != x)
+ return convert_from_reference (cp_fold (r, flags));
+ }
goto unary;
case ADDR_EXPR:
/* Cope with user tricks that amount to offsetof. */
if (op0 != error_mark_node
- && !FUNC_OR_METHOD_TYPE_P (TREE_TYPE (op0)))
+ && !FUNC_OR_METHOD_TYPE_P (TREE_TYPE (op0))
+ && !(flags & ff_only_non_odr))
{
tree val = get_base_address (op0);
if (val
x = error_mark_node;
else if (op0 != TREE_OPERAND (x, 0))
{
- x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
+ if (flags & ff_only_non_odr)
+ x = build1_loc (loc, code, TREE_TYPE (x), op0);
+ else
+ x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
if (code == INDIRECT_REF
&& (INDIRECT_REF_P (x) || TREE_CODE (x) == MEM_REF))
{
TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
}
}
- else
+ else if (!(flags & ff_only_non_odr))
x = fold (x);
gcc_assert (TREE_CODE (x) != COND_EXPR
op0 = cp_fold_rvalue (TREE_OPERAND (x, 0), flags);
if (op0 == error_mark_node)
x = error_mark_node;
+ else if (flags & ff_only_non_odr)
+ {
+ if (op0 != TREE_OPERAND (x, 0))
+ x = build1_loc (EXPR_LOCATION (x), code, TREE_TYPE (x), op0);
+ }
else
x = fold_convert (TREE_TYPE (x), op0);
break;
if (clear_decl_read)
DECL_READ_P (op0) = 0;
+ if (flags & ff_only_non_odr)
+ {
+ if (op0 == error_mark_node || op1 == error_mark_node)
+ x = error_mark_node;
+ else if (op0 != TREE_OPERAND (x, 0) || op1 != TREE_OPERAND (x, 1))
+ x = build2_loc (loc, code, TREE_TYPE (x), op0, op1);
+ break;
+ }
+
/* decltype(nullptr) has only one value, so optimize away all comparisons
with that type right away, keeping them in the IL causes troubles for
various optimizations. */
op1 = cp_fold (TREE_OPERAND (x, 1), flags);
op2 = cp_fold (TREE_OPERAND (x, 2), flags);
+ if (flags & ff_only_non_odr)
+ {
+ if (op0 == error_mark_node
+ || op1 == error_mark_node
+ || op2 == error_mark_node)
+ x = error_mark_node;
+ else if (op0 != TREE_OPERAND (x, 0)
+ || op1 != TREE_OPERAND (x, 1)
+ || op2 != TREE_OPERAND (x, 2))
+ x = build3_loc (loc, code, TREE_TYPE (x), op0, op1, op2);
+ break;
+ }
+
if (TREE_CODE (TREE_TYPE (x)) == BOOLEAN_TYPE)
{
warning_sentinel s (warn_int_in_bool_context);
&& DECL_DECLARED_CONSTEXPR_P (current_function_decl))
nw = 1;
- if (callee && fndecl_built_in_p (callee, BUILT_IN_FRONTEND))
+ if (callee && !(flags & ff_only_non_odr)
+ && fndecl_built_in_p (callee, BUILT_IN_FRONTEND))
{
iloc_sentinel ils (EXPR_LOCATION (x));
switch (DECL_FE_FUNCTION_CODE (callee))
break;
}
- if (callee
- && fndecl_built_in_p (callee, CP_BUILT_IN_SOURCE_LOCATION,
- BUILT_IN_FRONTEND))
- {
- x = fold_builtin_source_location (x);
- break;
- }
-
bool changed = false;
int m = call_expr_nargs (x);
for (int i = 0; i < m; i++)
changed = true;
}
}
- if (x == error_mark_node)
+ /* Don't fold away the function entirely if we're just folding
+ non-ODR-used variables. */
+ if (x == error_mark_node || (flags & ff_only_non_odr))
break;
optimize = nw;
TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
}
- x = fold (x);
+ if (!(flags & ff_only_non_odr))
+ x = fold (x);
break;
case SAVE_EXPR: