constexpr functions. These routines are used both during actual parsing
and during the instantiation of template functions.
- Copyright (C) 1998-2019 Free Software Foundation, Inc.
+ Copyright (C) 1998-2020 Free Software Foundation, Inc.
This file is part of GCC.
static bool
cx_check_missing_mem_inits (tree ctype, tree body, bool complain)
{
+ /* We allow uninitialized bases/fields in C++20. */
+ if (cxx_dialect >= cxx2a)
+ return false;
+
unsigned nelts = 0;
if (body)
continue;
if (ANON_AGGR_TYPE_P (TREE_TYPE (field)))
{
- /* Recurse to check the anonummous aggregate member. */
+ /* Recurse to check the anonymous aggregate member. */
bad |= cx_check_missing_mem_inits
(TREE_TYPE (field), NULL_TREE, complain);
if (bad && !complain)
return NULL;
}
- if (!potential_rvalue_constant_expression (massaged))
- {
- if (!DECL_GENERATED_P (fun))
- require_potential_rvalue_constant_expression (massaged);
- return NULL;
- }
+ bool potential = potential_rvalue_constant_expression (massaged);
+ if (!potential && !DECL_GENERATED_P (fun))
+ require_potential_rvalue_constant_expression (massaged);
if (DECL_CONSTRUCTOR_P (fun)
&& cx_check_missing_mem_inits (DECL_CONTEXT (fun),
massaged, !DECL_GENERATED_P (fun)))
+ potential = false;
+
+ if (!potential && !DECL_GENERATED_P (fun))
return NULL;
/* Create the constexpr function table if necessary. */
if (clear_ctx)
DECL_CONTEXT (DECL_RESULT (fun)) = NULL_TREE;
+ if (!potential)
+ /* For a template instantiation, we want to remember the pre-generic body
+ for explain_invalid_constexpr_fn, but do tell cxx_eval_call_expression
+ that it doesn't need to bother trying to expand the function. */
+ entry.result = error_mark_node;
+
gcc_assert (*slot == NULL);
*slot = ggc_alloc<constexpr_fundef> ();
**slot = entry;
if (!DECL_DEFAULTED_FN (fun)
&& !LAMBDA_TYPE_P (CP_DECL_CONTEXT (fun))
&& !is_instantiation_of_constexpr (fun))
- return;
+ {
+ inform (DECL_SOURCE_LOCATION (fun), "%qD declared here", fun);
+ return;
+ }
if (diagnosed == NULL)
diagnosed = new hash_set<tree>;
if (diagnosed->add (fun))
{
/* Then if it's OK, the body. */
if (!DECL_DECLARED_CONSTEXPR_P (fun)
- && !LAMBDA_TYPE_P (CP_DECL_CONTEXT (fun)))
+ && DECL_DEFAULTED_FN (fun))
explain_implicit_non_constexpr (fun);
else
{
- body = massage_constexpr_body (fun, DECL_SAVED_TREE (fun));
+ if (constexpr_fundef *fd = retrieve_constexpr_fundef (fun))
+ body = fd->body;
+ else
+ body = DECL_SAVED_TREE (fun);
+ body = massage_constexpr_body (fun, body);
require_potential_rvalue_constant_expression (body);
if (DECL_CONSTRUCTOR_P (fun))
cx_check_missing_mem_inits (DECL_CONTEXT (fun), body, true);
/* Heap VAR_DECLs created during the evaluation of the outermost constant
expression. */
auto_vec<tree, 16> heap_vars;
+ /* Cleanups that need to be evaluated at the end of CLEANUP_POINT_EXPR. */
+ vec<tree> *cleanups;
/* Constructor. */
- constexpr_global_ctx () : constexpr_ops_count (0) {}
+ constexpr_global_ctx () : constexpr_ops_count (0), cleanups (NULL) {}
};
/* The constexpr expansion context. CALL is the current function
constexpr_global_ctx *global;
/* The innermost call we're evaluating. */
constexpr_call *call;
- /* SAVE_EXPRs that we've seen within the current LOOP_EXPR. NULL if we
- aren't inside a loop. */
+ /* SAVE_EXPRs and TARGET_EXPR_SLOT vars of TARGET_EXPRs that we've seen
+ within the current LOOP_EXPR. NULL if we aren't inside a loop. */
vec<tree> *save_exprs;
/* The CONSTRUCTOR we're currently building up for an aggregate
initializer. */
return boolean_true_node;
}
+ if (fndecl_built_in_p (fun, CP_BUILT_IN_SOURCE_LOCATION, BUILT_IN_FRONTEND))
+ return fold_builtin_source_location (EXPR_LOCATION (t));
+
/* Be permissive for arguments to built-ins; __builtin_constant_p should
return constant false for a non-constant argument. */
constexpr_ctx new_ctx = *ctx;
}
bool save_ffbcp = force_folding_builtin_constant_p;
- force_folding_builtin_constant_p = true;
+ force_folding_builtin_constant_p |= ctx->manifestly_const_eval;
tree save_cur_fn = current_function_decl;
/* Return name of ctx->call->fundef->decl for __builtin_FUNCTION (). */
if (fndecl_built_in_p (fun, BUILT_IN_FUNCTION)
if (!*non_constant_p)
{
- /* Unsharing here isn't necessary for correctness, but it
- significantly improves memory performance for some reason. */
- arg = unshare_constructor (arg);
/* Make sure the binding has the same type as the parm. But
only for constant args. */
if (!TYPE_REF_P (type))
arg = adjust_temp_type (type, arg);
if (!TREE_CONSTANT (arg))
*non_constant_args = true;
+ /* For virtual calls, adjust the this argument, so that it is
+ the object on which the method is called, rather than
+ one of its bases. */
+ if (i == 0 && DECL_VIRTUAL_P (fun))
+ {
+ tree addr = arg;
+ STRIP_NOPS (addr);
+ if (TREE_CODE (addr) == ADDR_EXPR)
+ {
+ tree obj = TREE_OPERAND (addr, 0);
+ while (TREE_CODE (obj) == COMPONENT_REF
+ && DECL_FIELD_IS_BASE (TREE_OPERAND (obj, 1))
+ && !same_type_ignoring_top_level_qualifiers_p
+ (TREE_TYPE (obj), DECL_CONTEXT (fun)))
+ obj = TREE_OPERAND (obj, 0);
+ if (obj != TREE_OPERAND (addr, 0))
+ arg = build_fold_addr_expr_with_type (obj,
+ TREE_TYPE (arg));
+ }
+ }
TREE_VEC_ELT (binds, i) = arg;
}
parms = TREE_CHAIN (parms);
{
return (cxx_dialect >= cxx2a
&& IDENTIFIER_NEWDEL_OP_P (DECL_NAME (fndecl))
+ && CP_DECL_CONTEXT (fndecl) == global_namespace
+ && (DECL_IS_REPLACEABLE_OPERATOR_NEW_P (fndecl)
+ || DECL_IS_OPERATOR_DELETE_P (fndecl)));
+}
+
+/* Return true if FNDECL is a placement new function that should be
+ useable during constant expression evaluation of std::construct_at. */
+
+static inline bool
+cxx_placement_new_fn (tree fndecl)
+{
+ if (cxx_dialect >= cxx2a
+ && IDENTIFIER_NEW_OP_P (DECL_NAME (fndecl))
+ && CP_DECL_CONTEXT (fndecl) == global_namespace
+ && !DECL_IS_REPLACEABLE_OPERATOR_NEW_P (fndecl)
+ && TREE_CODE (TREE_TYPE (fndecl)) == FUNCTION_TYPE)
+ {
+ tree first_arg = TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fndecl)));
+ if (TREE_VALUE (first_arg) == ptr_type_node
+ && TREE_CHAIN (first_arg) == void_list_node)
+ return true;
+ }
+ return false;
+}
+
+/* Return true if FNDECL is std::construct_at. */
+
+static inline bool
+is_std_construct_at (tree fndecl)
+{
+ if (!decl_in_std_namespace_p (fndecl))
+ return false;
+
+ tree name = DECL_NAME (fndecl);
+ return name && id_equal (name, "construct_at");
+}
+
+/* Return true if FNDECL is std::allocator<T>::{,de}allocate. */
+
+static inline bool
+is_std_allocator_allocate (tree fndecl)
+{
+ tree name = DECL_NAME (fndecl);
+ if (name == NULL_TREE
+ || !(id_equal (name, "allocate") || id_equal (name, "deallocate")))
+ return false;
+
+ tree ctx = DECL_CONTEXT (fndecl);
+ if (ctx == NULL_TREE || !CLASS_TYPE_P (ctx) || !TYPE_MAIN_DECL (ctx))
+ return false;
+
+ tree decl = TYPE_MAIN_DECL (ctx);
+ name = DECL_NAME (decl);
+ if (name == NULL_TREE || !id_equal (name, "allocator"))
+ return false;
+
+ return decl_in_std_namespace_p (decl);
+}
+
+/* Return true if FNDECL is __dynamic_cast. */
+
+static inline bool
+cxx_dynamic_cast_fn_p (tree fndecl)
+{
+ return (cxx_dialect >= cxx2a
+ && id_equal (DECL_NAME (fndecl), "__dynamic_cast")
&& CP_DECL_CONTEXT (fndecl) == global_namespace);
}
+/* Often, we have an expression in the form of address + offset, e.g.
+ "&_ZTV1A + 16". Extract the object from it, i.e. "_ZTV1A". */
+
+static tree
+extract_obj_from_addr_offset (tree expr)
+{
+ if (TREE_CODE (expr) == POINTER_PLUS_EXPR)
+ expr = TREE_OPERAND (expr, 0);
+ STRIP_NOPS (expr);
+ if (TREE_CODE (expr) == ADDR_EXPR)
+ expr = TREE_OPERAND (expr, 0);
+ return expr;
+}
+
+/* Given a PATH like
+
+ g.D.2181.D.2154.D.2102.D.2093
+
+ find a component with type TYPE. Return NULL_TREE if not found, and
+ error_mark_node if the component is not accessible. If STOP is non-null,
+ this function will return NULL_TREE if STOP is found before TYPE. */
+
+static tree
+get_component_with_type (tree path, tree type, tree stop)
+{
+ while (true)
+ {
+ if (same_type_ignoring_top_level_qualifiers_p (TREE_TYPE (path), type))
+ /* Found it. */
+ return path;
+ else if (stop
+ && (same_type_ignoring_top_level_qualifiers_p (TREE_TYPE (path),
+ stop)))
+ return NULL_TREE;
+ else if (TREE_CODE (path) == COMPONENT_REF
+ && DECL_FIELD_IS_BASE (TREE_OPERAND (path, 1)))
+ {
+ /* We need to check that the component we're accessing is in fact
+ accessible. */
+ if (TREE_PRIVATE (TREE_OPERAND (path, 1))
+ || TREE_PROTECTED (TREE_OPERAND (path, 1)))
+ return error_mark_node;
+ path = TREE_OPERAND (path, 0);
+ }
+ else
+ return NULL_TREE;
+ }
+}
+
+/* Evaluate a call to __dynamic_cast (permitted by P1327R1).
+
+ The declaration of __dynamic_cast is:
+
+ void* __dynamic_cast (const void* __src_ptr,
+ const __class_type_info* __src_type,
+ const __class_type_info* __dst_type,
+ ptrdiff_t __src2dst);
+
+ where src2dst has the following possible values
+
+ >-1: src_type is a unique public non-virtual base of dst_type
+ dst_ptr + src2dst == src_ptr
+ -1: unspecified relationship
+ -2: src_type is not a public base of dst_type
+ -3: src_type is a multiple public non-virtual base of dst_type
+
+ Since literal types can't have virtual bases, we only expect hint >=0,
+ -2, or -3. */
+
+static tree
+cxx_eval_dynamic_cast_fn (const constexpr_ctx *ctx, tree call,
+ bool *non_constant_p, bool *overflow_p)
+{
+ /* T will be something like
+ __dynamic_cast ((B*) b, &_ZTI1B, &_ZTI1D, 8)
+ dismantle it. */
+ gcc_assert (call_expr_nargs (call) == 4);
+ tsubst_flags_t complain = ctx->quiet ? tf_none : tf_warning_or_error;
+ tree obj = CALL_EXPR_ARG (call, 0);
+ tree type = CALL_EXPR_ARG (call, 2);
+ HOST_WIDE_INT hint = int_cst_value (CALL_EXPR_ARG (call, 3));
+ location_t loc = cp_expr_loc_or_input_loc (call);
+
+ /* Get the target type of the dynamic_cast. */
+ gcc_assert (TREE_CODE (type) == ADDR_EXPR);
+ type = TREE_OPERAND (type, 0);
+ type = TREE_TYPE (DECL_NAME (type));
+
+ /* TYPE can only be either T* or T&. We can't know which of these it
+ is by looking at TYPE, but OBJ will be "(T*) x" in the first case,
+ and something like "(T*)(T&)(T*) x" in the second case. */
+ bool reference_p = false;
+ while (CONVERT_EXPR_P (obj) || TREE_CODE (obj) == SAVE_EXPR)
+ {
+ reference_p |= TYPE_REF_P (TREE_TYPE (obj));
+ obj = TREE_OPERAND (obj, 0);
+ }
+
+ /* Evaluate the object so that we know its dynamic type. */
+ obj = cxx_eval_constant_expression (ctx, obj, /*lval*/false, non_constant_p,
+ overflow_p);
+ if (*non_constant_p)
+ return call;
+
+ /* We expect OBJ to be in form of &d.D.2102 when HINT == 0,
+ but when HINT is > 0, it can also be something like
+ &d.D.2102 + 18446744073709551608, which includes the BINFO_OFFSET. */
+ obj = extract_obj_from_addr_offset (obj);
+ const tree objtype = TREE_TYPE (obj);
+ /* If OBJ doesn't refer to a base field, we're done. */
+ if (tree t = (TREE_CODE (obj) == COMPONENT_REF
+ ? TREE_OPERAND (obj, 1) : obj))
+ if (TREE_CODE (t) != FIELD_DECL || !DECL_FIELD_IS_BASE (t))
+ return integer_zero_node;
+
+ /* [class.cdtor] When a dynamic_cast is used in a constructor ...
+ or in a destructor ... if the operand of the dynamic_cast refers
+ to the object under construction or destruction, this object is
+ considered to be a most derived object that has the type of the
+ constructor or destructor's class. */
+ tree vtable = build_vfield_ref (obj, TREE_TYPE (obj));
+ vtable = cxx_eval_constant_expression (ctx, vtable, /*lval*/false,
+ non_constant_p, overflow_p);
+ if (*non_constant_p)
+ return call;
+ /* VTABLE will be &_ZTV1A + 16 or similar, get _ZTV1A. */
+ vtable = extract_obj_from_addr_offset (vtable);
+ const tree mdtype = DECL_CONTEXT (vtable);
+
+ /* Given dynamic_cast<T>(v),
+
+ [expr.dynamic.cast] If C is the class type to which T points or refers,
+ the runtime check logically executes as follows:
+
+ If, in the most derived object pointed (referred) to by v, v points
+ (refers) to a public base class subobject of a C object, and if only
+ one object of type C is derived from the subobject pointed (referred)
+ to by v the result points (refers) to that C object.
+
+ In this case, HINT >= 0 or -3. */
+ if (hint >= 0 || hint == -3)
+ {
+ /* Look for a component with type TYPE. */
+ tree t = get_component_with_type (obj, type, mdtype);
+ /* If not accessible, give an error. */
+ if (t == error_mark_node)
+ {
+ if (reference_p)
+ {
+ if (!ctx->quiet)
+ {
+ error_at (loc, "reference %<dynamic_cast%> failed");
+ inform (loc, "static type %qT of its operand is a "
+ "non-public base class of dynamic type %qT",
+ objtype, type);
+
+ }
+ *non_constant_p = true;
+ }
+ return integer_zero_node;
+ }
+ else if (t)
+ /* The result points to the TYPE object. */
+ return cp_build_addr_expr (t, complain);
+ /* Else, TYPE was not found, because the HINT turned out to be wrong.
+ Fall through to the normal processing. */
+ }
+
+ /* Otherwise, if v points (refers) to a public base class subobject of the
+ most derived object, and the type of the most derived object has a base
+ class, of type C, that is unambiguous and public, the result points
+ (refers) to the C subobject of the most derived object.
+
+ But it can also be an invalid case. */
+
+ /* Get the most derived object. */
+ obj = get_component_with_type (obj, mdtype, NULL_TREE);
+ if (obj == error_mark_node)
+ {
+ if (reference_p)
+ {
+ if (!ctx->quiet)
+ {
+ error_at (loc, "reference %<dynamic_cast%> failed");
+ inform (loc, "static type %qT of its operand is a non-public"
+ " base class of dynamic type %qT", objtype, mdtype);
+ }
+ *non_constant_p = true;
+ }
+ return integer_zero_node;
+ }
+ else
+ gcc_assert (obj);
+
+ /* Check that the type of the most derived object has a base class
+ of type TYPE that is unambiguous and public. */
+ base_kind b_kind;
+ tree binfo = lookup_base (mdtype, type, ba_check, &b_kind, tf_none);
+ if (!binfo || binfo == error_mark_node)
+ {
+ if (reference_p)
+ {
+ if (!ctx->quiet)
+ {
+ error_at (loc, "reference %<dynamic_cast%> failed");
+ if (b_kind == bk_ambig)
+ inform (loc, "%qT is an ambiguous base class of dynamic "
+ "type %qT of its operand", type, mdtype);
+ else
+ inform (loc, "dynamic type %qT of its operand does not "
+ "have an unambiguous public base class %qT",
+ mdtype, type);
+ }
+ *non_constant_p = true;
+ }
+ return integer_zero_node;
+ }
+ /* If so, return the TYPE subobject of the most derived object. */
+ obj = convert_to_base_statically (obj, binfo);
+ return cp_build_addr_expr (obj, complain);
+}
+
/* Subroutine of cxx_eval_constant_expression.
Evaluate the call expression tree T in the context of OLD_CALL expression
evaluation. */
bool lval,
bool *non_constant_p, bool *overflow_p)
{
+ /* Handle concept checks separately. */
+ if (concept_check_p (t))
+ return evaluate_concept_check (t, tf_warning_or_error);
+
location_t loc = cp_expr_loc_or_input_loc (t);
tree fun = get_function_named_in_call (t);
constexpr_call new_call
lval, non_constant_p, overflow_p);
if (!DECL_DECLARED_CONSTEXPR_P (fun))
{
- if (cxx_replaceable_global_alloc_fn (fun))
+ if (TREE_CODE (t) == CALL_EXPR
+ && cxx_replaceable_global_alloc_fn (fun)
+ && (CALL_FROM_NEW_OR_DELETE_P (t)
+ || (ctx->call
+ && ctx->call->fundef
+ && is_std_allocator_allocate (ctx->call->fundef->decl))))
{
const int nargs = call_expr_nargs (t);
tree arg0 = NULL_TREE;
return t;
}
}
+ /* Allow placement new in std::construct_at, just return the second
+ argument. */
+ if (TREE_CODE (t) == CALL_EXPR
+ && cxx_placement_new_fn (fun)
+ && ctx->call
+ && ctx->call->fundef
+ && is_std_construct_at (ctx->call->fundef->decl))
+ {
+ const int nargs = call_expr_nargs (t);
+ tree arg1 = NULL_TREE;
+ for (int i = 0; i < nargs; ++i)
+ {
+ tree arg = CALL_EXPR_ARG (t, i);
+ arg = cxx_eval_constant_expression (ctx, arg, false,
+ non_constant_p, overflow_p);
+ VERIFY_CONSTANT (arg);
+ if (i == 1)
+ arg1 = arg;
+ }
+ gcc_assert (arg1);
+ return arg1;
+ }
+ else if (cxx_dynamic_cast_fn_p (fun))
+ return cxx_eval_dynamic_cast_fn (ctx, t, non_constant_p, overflow_p);
+
if (!ctx->quiet)
{
if (!lambda_static_thunk_p (fun))
{
new_call.fundef = retrieve_constexpr_fundef (fun);
if (new_call.fundef == NULL || new_call.fundef->body == NULL
+ || new_call.fundef->result == error_mark_node
|| fun == current_function_decl)
{
if (!ctx->quiet)
this function exits. */
class free_bindings
{
+ tree *bindings;
public:
- tree &bindings;
- bool do_free;
- free_bindings (tree &b): bindings (b), do_free(true) { }
- void preserve () { do_free = false; }
- ~free_bindings () {
- if (do_free)
- {
- for (int i = 0; i < TREE_VEC_LENGTH (bindings); ++i)
- free_constructor (TREE_VEC_ELT (bindings, i));
- ggc_free (bindings);
- }
- }
+ free_bindings (tree &b): bindings (&b) { }
+ ~free_bindings () { if (bindings) ggc_free (*bindings); }
+ void preserve () { bindings = NULL; }
} fb (new_call.bindings);
if (*non_constant_p)
for (int i = 0; i < TREE_VEC_LENGTH (bound); ++i)
{
tree arg = TREE_VEC_ELT (bound, i);
- /* Don't share a CONSTRUCTOR that might be changed. */
+ if (entry)
+ {
+ /* Unshare args going into the hash table to separate them
+ from the caller's context, for better GC and to avoid
+ problems with verify_gimple. */
+ arg = unshare_expr_without_location (arg);
+ TREE_VEC_ELT (bound, i) = arg;
+ }
+ /* Don't share a CONSTRUCTOR that might be changed. This is not
+ redundant with the unshare just above; we also don't want to
+ change the argument values in the hash table. XXX Could we
+ unshare lazily in cxx_eval_store_expression? */
arg = unshare_constructor (arg);
if (TREE_CODE (arg) == CONSTRUCTOR)
vec_safe_push (ctors, arg);
else
ctx->global->values.put (res, NULL_TREE);
- /* Track the callee's evaluated SAVE_EXPRs so that we can forget
- their values after the call. */
+ /* Track the callee's evaluated SAVE_EXPRs and TARGET_EXPRs so that
+ we can forget their values after the call. */
constexpr_ctx ctx_with_save_exprs = *ctx;
auto_vec<tree, 10> save_exprs;
ctx_with_save_exprs.save_exprs = &save_exprs;
TREE_READONLY (e) = true;
}
- /* Forget the saved values of the callee's SAVE_EXPRs. */
+ /* Forget the saved values of the callee's SAVE_EXPRs and
+ TARGET_EXPRs. */
unsigned int i;
tree save_expr;
FOR_EACH_VEC_ELT (save_exprs, i, save_expr)
entry->result = result;
}
- /* The result of a constexpr function must be completely initialized. */
- if (TREE_CODE (result) == CONSTRUCTOR)
+ /* The result of a constexpr function must be completely initialized.
+
+ However, in C++20, a constexpr constructor doesn't necessarily have
+ to initialize all the fields, so we don't clear CONSTRUCTOR_NO_CLEARING
+ in order to detect reading an unitialized object in constexpr instead
+ of value-initializing it. (reduced_constant_expression_p is expected to
+ take care of clearing the flag.) */
+ if (TREE_CODE (result) == CONSTRUCTOR
+ && (cxx_dialect < cxx2a
+ || !DECL_CONSTRUCTOR_P (fun)))
clear_no_implicit_zero (result);
pop_cx_call_context ();
return result;
}
-/* FIXME speed this up, it's taking 16% of compile time on sieve testcase. */
+/* Return true if T is a valid constant initializer. If a CONSTRUCTOR
+ initializes all the members, the CONSTRUCTOR_NO_CLEARING flag will be
+ cleared.
+ FIXME speed this up, it's taking 16% of compile time on sieve testcase. */
bool
reduced_constant_expression_p (tree t)
if (TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
/* An initialized vector would have a VECTOR_CST. */
return false;
+ else if (cxx_dialect >= cxx2a
+ /* An ARRAY_TYPE doesn't have any TYPE_FIELDS. */
+ && (TREE_CODE (TREE_TYPE (t)) == ARRAY_TYPE
+ /* A union only initializes one member. */
+ || TREE_CODE (TREE_TYPE (t)) == UNION_TYPE))
+ field = NULL_TREE;
else
field = next_initializable_field (TYPE_FIELDS (TREE_TYPE (t)));
}
return false;
if (field)
{
- if (idx != field)
- return false;
+ /* Empty class field may or may not have an initializer. */
+ for (; idx != field;
+ field = next_initializable_field (DECL_CHAIN (field)))
+ if (!is_really_empty_class (TREE_TYPE (field),
+ /*ignore_vptr*/false))
+ return false;
field = next_initializable_field (DECL_CHAIN (field));
}
}
- if (field)
- return false;
- else if (CONSTRUCTOR_NO_CLEARING (t))
+ /* There could be a non-empty field at the end. */
+ for (; field; field = next_initializable_field (DECL_CHAIN (field)))
+ if (!is_really_empty_class (TREE_TYPE (field), /*ignore_vptr*/false))
+ return false;
+ if (CONSTRUCTOR_NO_CLEARING (t))
/* All the fields are initialized. */
CONSTRUCTOR_NO_CLEARING (t) = false;
return true;
else if (code == POINTER_PLUS_EXPR)
r = cxx_fold_pointer_plus_expression (ctx, t, lhs, rhs, non_constant_p,
overflow_p);
+ else if (code == SPACESHIP_EXPR)
+ {
+ r = genericize_spaceship (type, lhs, rhs);
+ r = cxx_eval_constant_expression (ctx, r, false, non_constant_p,
+ overflow_p);
+ }
if (r == NULL_TREE)
r = fold_binary_loc (loc, code, type, lhs, rhs);
cxx_eval_constant_expression. */
static void
-non_const_var_error (tree r)
+non_const_var_error (location_t loc, tree r)
{
auto_diagnostic_group d;
tree type = TREE_TYPE (r);
if (DECL_NAME (r) == heap_uninit_identifier
|| DECL_NAME (r) == heap_identifier)
{
- error ("the content of uninitialized storage is not usable "
- "in a constant expression");
+ error_at (loc, "the content of uninitialized storage is not usable "
+ "in a constant expression");
inform (DECL_SOURCE_LOCATION (r), "allocated here");
return;
}
if (DECL_NAME (r) == heap_deleted_identifier)
{
- error ("use of allocated storage after deallocation in a "
- "constant expression");
+ error_at (loc, "use of allocated storage after deallocation in a "
+ "constant expression");
inform (DECL_SOURCE_LOCATION (r), "allocated here");
return;
}
- error ("the value of %qD is not usable in a constant "
- "expression", r);
+ error_at (loc, "the value of %qD is not usable in a constant "
+ "expression", r);
/* Avoid error cascade. */
if (DECL_INITIAL (r) == error_mark_node)
return;
tree elt = TREE_OPERAND (probe, 1);
if (TREE_CODE (elt) == FIELD_DECL && DECL_MUTABLE_P (elt))
mutable_p = true;
- if (evaluated
- && modifying_const_object_p (TREE_CODE (t), probe, mutable_p)
- && const_object_being_modified == NULL_TREE)
- const_object_being_modified = probe;
if (TREE_CODE (probe) == ARRAY_REF)
{
elt = eval_and_check_array_index (ctx, probe, false,
if (*non_constant_p)
return t;
}
+ /* We don't check modifying_const_object_p for ARRAY_REFs. Given
+ "int a[10]", an ARRAY_REF "a[2]" can be "const int", even though
+ the array isn't const. Instead, check "a" in the next iteration;
+ that will detect modifying "const int a[10]". */
+ else if (evaluated
+ && modifying_const_object_p (TREE_CODE (t), probe,
+ mutable_p)
+ && const_object_being_modified == NULL_TREE)
+ const_object_being_modified = probe;
vec_safe_push (refs, elt);
vec_safe_push (refs, TREE_TYPE (probe));
probe = ob;
gcc_assert (*jump_target);
}
- /* Forget saved values of SAVE_EXPRs. */
+ /* Forget saved values of SAVE_EXPRs and TARGET_EXPRs. */
unsigned int i;
tree save_expr;
FOR_EACH_VEC_ELT (save_exprs, i, save_expr)
&& (!switches (jump_target) || count == 0)
&& !*non_constant_p);
- /* Forget saved values of SAVE_EXPRs. */
+ /* Forget saved values of SAVE_EXPRs and TARGET_EXPRs. */
unsigned int i;
tree save_expr;
FOR_EACH_VEC_ELT (save_exprs, i, save_expr)
return t;
}
+ location_t loc = cp_expr_loc_or_input_loc (t);
+
STRIP_ANY_LOCATION_WRAPPER (t);
if (CONSTANT_CLASS_P (t))
if (++ctx->global->constexpr_ops_count >= constexpr_ops_limit)
{
if (!ctx->quiet)
- error_at (cp_expr_loc_or_input_loc (t),
+ error_at (loc,
"%<constexpr%> evaluation operation count exceeds limit of "
"%wd (use %<-fconstexpr-ops-limit=%> to increase the limit)",
constexpr_ops_limit);
if (DECL_P (r))
{
if (!ctx->quiet)
- non_const_var_error (r);
+ non_const_var_error (loc, r);
*non_constant_p = true;
}
break;
*non_constant_p = true;
break;
}
+ /* Avoid evaluating a TARGET_EXPR more than once. */
+ if (tree *p = ctx->global->values.get (TARGET_EXPR_SLOT (t)))
+ {
+ if (lval)
+ return TARGET_EXPR_SLOT (t);
+ r = *p;
+ break;
+ }
if ((AGGREGATE_TYPE_P (TREE_TYPE (t)) || VECTOR_TYPE_P (TREE_TYPE (t))))
{
/* We're being expanded without an explicit target, so start
if (!*non_constant_p)
/* Adjust the type of the result to the type of the temporary. */
r = adjust_temp_type (TREE_TYPE (t), r);
+ if (TARGET_EXPR_CLEANUP (t) && !CLEANUP_EH_ONLY (t))
+ ctx->global->cleanups->safe_push (TARGET_EXPR_CLEANUP (t));
+ r = unshare_constructor (r);
+ ctx->global->values.put (TARGET_EXPR_SLOT (t), r);
+ if (ctx->save_exprs)
+ ctx->save_exprs->safe_push (TARGET_EXPR_SLOT (t));
if (lval)
- {
- tree slot = TARGET_EXPR_SLOT (t);
- r = unshare_constructor (r);
- ctx->global->values.put (slot, r);
- return slot;
- }
+ return TARGET_EXPR_SLOT (t);
break;
case INIT_EXPR:
}
break;
- case NON_LVALUE_EXPR:
case TRY_CATCH_EXPR:
+ if (TREE_OPERAND (t, 0) == NULL_TREE)
+ {
+ r = void_node;
+ break;
+ }
+ /* FALLTHRU */
+ case NON_LVALUE_EXPR:
case TRY_BLOCK:
- case CLEANUP_POINT_EXPR:
case MUST_NOT_THROW_EXPR:
case EXPR_STMT:
case EH_SPEC_BLOCK:
jump_target);
break;
+ case CLEANUP_POINT_EXPR:
+ {
+ auto_vec<tree, 2> cleanups;
+ vec<tree> *prev_cleanups = ctx->global->cleanups;
+ ctx->global->cleanups = &cleanups;
+ r = cxx_eval_constant_expression (ctx, TREE_OPERAND (t, 0),
+ lval,
+ non_constant_p, overflow_p,
+ jump_target);
+ ctx->global->cleanups = prev_cleanups;
+ unsigned int i;
+ tree cleanup;
+ /* Evaluate the cleanups. */
+ FOR_EACH_VEC_ELT_REVERSE (cleanups, i, cleanup)
+ cxx_eval_constant_expression (ctx, cleanup, false,
+ non_constant_p, overflow_p,
+ jump_target);
+ }
+ break;
+
case TRY_FINALLY_EXPR:
r = cxx_eval_constant_expression (ctx, TREE_OPERAND (t, 0), lval,
non_constant_p, overflow_p,
non_constant_p, overflow_p,
jump_target);
if (!CLEANUP_EH_ONLY (t) && !*non_constant_p)
- /* Also evaluate the cleanup. If we weren't skipping at the
- start of the CLEANUP_BODY, change jump_target temporarily
- to &initial_jump_target, so that even a return or break or
- continue in the body doesn't skip the cleanup. */
- cxx_eval_constant_expression (ctx, CLEANUP_EXPR (t), true,
- non_constant_p, overflow_p,
- jump_target ? &initial_jump_target
- : NULL);
+ {
+ iloc_sentinel ils (loc);
+ /* Also evaluate the cleanup. If we weren't skipping at the
+ start of the CLEANUP_BODY, change jump_target temporarily
+ to &initial_jump_target, so that even a return or break or
+ continue in the body doesn't skip the cleanup. */
+ cxx_eval_constant_expression (ctx, CLEANUP_EXPR (t), true,
+ non_constant_p, overflow_p,
+ jump_target ? &initial_jump_target
+ : NULL);
+ }
}
break;
case GE_EXPR:
case EQ_EXPR:
case NE_EXPR:
+ case SPACESHIP_EXPR:
case UNORDERED_EXPR:
case ORDERED_EXPR:
case UNLT_EXPR:
if (REINTERPRET_CAST_P (t))
{
if (!ctx->quiet)
- error_at (cp_expr_loc_or_input_loc (t),
+ error_at (loc,
"%<reinterpret_cast%> is not a constant expression");
*non_constant_p = true;
return t;
if (TYPE_REF_P (type))
{
if (!ctx->quiet)
- error_at (cp_expr_loc_or_input_loc (t),
+ error_at (loc,
"dereferencing a null pointer");
*non_constant_p = true;
return t;
if (!can_convert (type, from, tf_none))
{
if (!ctx->quiet)
- error_at (cp_expr_loc_or_input_loc (t),
+ error_at (loc,
"conversion of %qT null pointer to %qT "
"is not a constant expression",
from, type);
reinterpret_cast<void*>(sizeof 0)
*/
if (!ctx->quiet)
- error_at (cp_expr_loc_or_input_loc (t),
- "%<reinterpret_cast<%T>(%E)%> is not "
+ error_at (loc, "%<reinterpret_cast<%T>(%E)%> is not "
"a constant expression",
type, op);
*non_constant_p = true;
case BASELINK:
case OFFSET_REF:
if (!ctx->quiet)
- error_at (cp_expr_loc_or_input_loc (t),
- "expression %qE is not a constant expression", t);
+ error_at (loc, "expression %qE is not a constant expression", t);
*non_constant_p = true;
break;
tree obj = OBJ_TYPE_REF_OBJECT (t);
obj = cxx_eval_constant_expression (ctx, obj, lval, non_constant_p,
overflow_p);
+ STRIP_NOPS (obj);
/* We expect something in the form of &x.D.2103.D.2094; get x. */
if (TREE_CODE (obj) != ADDR_EXPR
|| !DECL_P (get_base_address (TREE_OPERAND (obj, 0))))
{
if (!ctx->quiet)
- error_at (cp_expr_loc_or_input_loc (t),
- "expression %qE is not a constant expression", t);
+ error_at (loc, "expression %qE is not a constant expression", t);
*non_constant_p = true;
return t;
}
{
/* We can evaluate template-id that refers to a concept only if
the template arguments are non-dependent. */
- if (!concept_definition_p (TREE_OPERAND (t, 0)))
+ tree id = unpack_concept_check (t);
+ tree tmpl = TREE_OPERAND (id, 0);
+ if (!concept_definition_p (tmpl))
internal_error ("unexpected template-id %qE", t);
+ if (function_concept_p (tmpl))
+ {
+ if (!ctx->quiet)
+ error_at (cp_expr_loc_or_input_loc (t),
+ "function concept must be called");
+ r = error_mark_node;
+ break;
+ }
+
if (!processing_template_decl)
- return satisfy_constraint_expression (t);
+ r = evaluate_concept_check (t, tf_warning_or_error);
else
*non_constant_p = true;
- return t;
+
+ break;
}
case ASM_EXPR:
if (!ctx->quiet)
- inline_asm_in_constexpr_error (cp_expr_loc_or_input_loc (t));
+ inline_asm_in_constexpr_error (loc);
*non_constant_p = true;
return t;
return NULL_TREE;
}
+/* Find immediate function decls in *TP if any. */
+
+static tree
+find_immediate_fndecl (tree *tp, int */*walk_subtrees*/, void */*data*/)
+{
+ if (TREE_CODE (*tp) == FUNCTION_DECL && DECL_IMMEDIATE_FUNCTION_P (*tp))
+ return *tp;
+ return NULL_TREE;
+}
+
/* ALLOW_NON_CONSTANT is false if T is required to be a constant expression.
STRICT has the same sense as for constant_value_1: true if we only allow
conforming C++ constant expressions, or false if we want a constant value
tree type = initialized_type (t);
tree r = t;
+ bool is_consteval = false;
if (VOID_TYPE_P (type))
{
if (constexpr_dtor)
/* Used for destructors of array elements. */
type = TREE_TYPE (object);
else
- return t;
+ {
+ if (cxx_dialect < cxx2a)
+ return t;
+ if (TREE_CODE (t) != CALL_EXPR && TREE_CODE (t) != AGGR_INIT_EXPR)
+ return t;
+ /* Calls to immediate functions returning void need to be
+ evaluated. */
+ tree fndecl = cp_get_callee_fndecl_nofold (t);
+ if (fndecl == NULL_TREE || !DECL_IMMEDIATE_FUNCTION_P (fndecl))
+ return t;
+ else
+ is_consteval = true;
+ }
+ }
+ else if (cxx_dialect >= cxx2a
+ && (TREE_CODE (t) == CALL_EXPR
+ || TREE_CODE (t) == AGGR_INIT_EXPR
+ || TREE_CODE (t) == TARGET_EXPR))
+ {
+ /* For non-concept checks, determine if it is consteval. */
+ if (!concept_check_p (t))
+ {
+ tree x = t;
+ if (TREE_CODE (x) == TARGET_EXPR)
+ x = TARGET_EXPR_INITIAL (x);
+ tree fndecl = cp_get_callee_fndecl_nofold (x);
+ if (fndecl && DECL_IMMEDIATE_FUNCTION_P (fndecl))
+ is_consteval = true;
+ }
}
if (AGGREGATE_TYPE_P (type) || VECTOR_TYPE_P (type))
{
gcc_assert (object && VAR_P (object));
gcc_assert (DECL_DECLARED_CONSTEXPR_P (object));
gcc_assert (DECL_INITIALIZED_BY_CONSTANT_EXPRESSION_P (object));
+ if (error_operand_p (DECL_INITIAL (object)))
+ return t;
ctx.ctor = unshare_expr (DECL_INITIAL (object));
TREE_READONLY (ctx.ctor) = false;
/* Temporarily force decl_really_constant_value to return false
r = TARGET_EXPR_INITIAL (r);
}
+ auto_vec<tree, 16> cleanups;
+ global_ctx.cleanups = &cleanups;
+
instantiate_constexpr_fns (r);
r = cxx_eval_constant_expression (&ctx, r,
false, &non_constant_p, &overflow_p);
else
DECL_INITIALIZED_BY_CONSTANT_EXPRESSION_P (object) = true;
+ unsigned int i;
+ tree cleanup;
+ /* Evaluate the cleanups. */
+ FOR_EACH_VEC_ELT_REVERSE (cleanups, i, cleanup)
+ cxx_eval_constant_expression (&ctx, cleanup, false,
+ &non_constant_p, &overflow_p);
+
/* Mutable logic is a bit tricky: we want to allow initialization of
constexpr variables with mutable members, but we can't copy those
members to another constexpr variable. */
}
}
+ /* Check that immediate invocation does not return an expression referencing
+ any immediate function decls. They need to be allowed while parsing
+ immediate functions, but can't leak outside of them. */
+ if (is_consteval
+ && t != r
+ && (current_function_decl == NULL_TREE
+ || !DECL_IMMEDIATE_FUNCTION_P (current_function_decl)))
+ if (tree immediate_fndecl
+ = cp_walk_tree_without_duplicates (&r, find_immediate_fndecl,
+ NULL))
+ {
+ if (!allow_non_constant && !non_constant_p)
+ error_at (cp_expr_loc_or_input_loc (t),
+ "immediate evaluation returns address of immediate "
+ "function %qD", immediate_fndecl);
+ r = t;
+ non_constant_p = true;
+ }
+
/* Technically we should check this for all subexpressions, but that
runs into problems with our internal representation of pointer
subtraction and the 5.19 rules are still in flux. */
static tree
fold_non_dependent_expr_template (tree t, tsubst_flags_t complain,
- bool manifestly_const_eval)
+ bool manifestly_const_eval,
+ tree object)
{
gcc_assert (processing_template_decl);
tree r = cxx_eval_outermost_constant_expr (t, true, true,
manifestly_const_eval,
- false, NULL_TREE);
+ false, object);
/* cp_tree_equal looks through NOPs, so allow them. */
gcc_checking_assert (r == t
|| CONVERT_EXPR_P (t)
tree
fold_non_dependent_expr (tree t,
tsubst_flags_t complain /* = tf_warning_or_error */,
- bool manifestly_const_eval /* = false */)
+ bool manifestly_const_eval /* = false */,
+ tree object /* = NULL_TREE */)
{
if (t == NULL_TREE)
return NULL_TREE;
if (processing_template_decl)
return fold_non_dependent_expr_template (t, complain,
- manifestly_const_eval);
+ manifestly_const_eval, object);
- return maybe_constant_value (t, NULL_TREE, manifestly_const_eval);
+ return maybe_constant_value (t, object, manifestly_const_eval);
}
if (processing_template_decl)
{
t = fold_non_dependent_expr_template (t, complain,
- manifestly_const_eval);
+ manifestly_const_eval, NULL_TREE);
/* maybe_constant_init does this stripping, so do it here too. */
if (TREE_CODE (t) == TARGET_EXPR)
{
case LABEL_DECL:
case LABEL_EXPR:
case CASE_LABEL_EXPR:
+ case PREDICT_EXPR:
case CONST_DECL:
case SIZEOF_EXPR:
case ALIGNOF_EXPR:
&& !fndecl_built_in_p (fun)
/* In C++2a, replaceable global allocation functions
are constant expressions. */
- && !cxx_replaceable_global_alloc_fn (fun))
+ && (!cxx_replaceable_global_alloc_fn (fun)
+ || TREE_CODE (t) != CALL_EXPR
+ || (!CALL_FROM_NEW_OR_DELETE_P (t)
+ && (current_function_decl == NULL_TREE
+ || !is_std_allocator_allocate
+ (current_function_decl))))
+ /* Allow placement new in std::construct_at. */
+ && (!cxx_placement_new_fn (fun)
+ || TREE_CODE (t) != CALL_EXPR
+ || current_function_decl == NULL_TREE
+ || !is_std_construct_at (current_function_decl))
+ && !cxx_dynamic_cast_fn_p (fun))
{
if (flags & tf_error)
{
&& !is_really_empty_class (TREE_TYPE (t), /*ignore_vptr*/false))
{
if (flags & tf_error)
- non_const_var_error (t);
+ non_const_var_error (loc, t);
return false;
}
return true;
case OMP_DEPOBJ:
case OACC_PARALLEL:
case OACC_KERNELS:
+ case OACC_SERIAL:
case OACC_DATA:
case OACC_HOST_DATA:
case OACC_LOOP:
return false;
case TYPEID_EXPR:
- /* -- a typeid expression whose operand is of polymorphic
- class type; */
+ /* In C++20, a typeid expression whose operand is of polymorphic
+ class type can be constexpr. */
{
tree e = TREE_OPERAND (t, 0);
- if (!TYPE_P (e) && !type_dependent_expression_p (e)
+ if (cxx_dialect < cxx2a
+ && strict
+ && !TYPE_P (e)
+ && !type_dependent_expression_p (e)
&& TYPE_POLYMORPHIC_P (TREE_TYPE (e)))
{
if (flags & tf_error)
case GE_EXPR:
case EQ_EXPR:
case NE_EXPR:
+ case SPACESHIP_EXPR:
want_rval = true;
goto binary;
return true;
case EMPTY_CLASS_EXPR:
- case PREDICT_EXPR:
return false;
case GOTO_EXPR: