/* Build expressions with type checking for C++ compiler.
- Copyright (C) 1987-2022 Free Software Foundation, Inc.
+ Copyright (C) 1987-2024 Free Software Foundation, Inc.
Hacked by Michael Tiemann (tiemann@cygnus.com)
This file is part of GCC.
if (mv2 == FLOATN_NX_TYPE_NODE (i))
extended2 = i + 1;
}
+ if (mv1 == bfloat16_type_node)
+ extended1 = true;
+ if (mv2 == bfloat16_type_node)
+ extended2 = true;
if (extended2 && !extended1)
{
int ret = cp_compare_floating_point_conversion_ranks (t2, t1);
if (cnt > 1 && mv2 == long_double_type_node)
return -2;
/* Otherwise, they have equal rank, but extended types
- (other than std::bfloat16_t) have higher subrank. */
+ (other than std::bfloat16_t) have higher subrank.
+ std::bfloat16_t shouldn't have equal rank to any standard
+ floating point type. */
return 1;
}
tree subtype
= type_after_usual_arithmetic_conversions (subtype1, subtype2);
+ if (subtype == error_mark_node)
+ return subtype;
if (code1 == COMPLEX_TYPE && TREE_TYPE (t1) == subtype)
return build_type_attribute_variant (t1, attributes);
else if (code2 == COMPLEX_TYPE && TREE_TYPE (t2) == subtype)
return false;
break;
- case UNDERLYING_TYPE:
- if (!same_type_p (UNDERLYING_TYPE_TYPE (t1), UNDERLYING_TYPE_TYPE (t2)))
+ case TRAIT_TYPE:
+ if (TRAIT_TYPE_KIND (t1) != TRAIT_TYPE_KIND (t2))
+ return false;
+ if (!cp_tree_equal (TRAIT_TYPE_TYPE1 (t1), TRAIT_TYPE_TYPE1 (t2))
+ || !cp_tree_equal (TRAIT_TYPE_TYPE2 (t1), TRAIT_TYPE_TYPE2 (t2)))
return false;
break;
the common initial sequence. */
bool
-next_common_initial_seqence (tree &memb1, tree &memb2)
+next_common_initial_sequence (tree &memb1, tree &memb2)
{
while (memb1)
{
if ((!lookup_attribute ("no_unique_address", DECL_ATTRIBUTES (memb1)))
!= !lookup_attribute ("no_unique_address", DECL_ATTRIBUTES (memb2)))
return false;
+ if (DECL_ALIGN (memb1) != DECL_ALIGN (memb2))
+ return false;
if (!tree_int_cst_equal (bit_position (memb1), bit_position (memb2)))
return false;
return true;
type2 = cp_build_qualified_type (type2, TYPE_UNQUALIFIED);
if (TREE_CODE (type1) == ENUMERAL_TYPE)
- return (TYPE_ALIGN (type1) == TYPE_ALIGN (type2)
- && tree_int_cst_equal (TYPE_SIZE (type1), TYPE_SIZE (type2))
+ return (tree_int_cst_equal (TYPE_SIZE (type1), TYPE_SIZE (type2))
&& same_type_p (finish_underlying_type (type1),
finish_underlying_type (type2)));
if (CLASS_TYPE_P (type1)
&& std_layout_type_p (type1)
&& std_layout_type_p (type2)
- && TYPE_ALIGN (type1) == TYPE_ALIGN (type2)
&& tree_int_cst_equal (TYPE_SIZE (type1), TYPE_SIZE (type2)))
{
tree field1 = TYPE_FIELDS (type1);
{
while (1)
{
- if (!next_common_initial_seqence (field1, field2))
+ if (!next_common_initial_sequence (field1, field2))
return false;
if (field1 == NULL_TREE)
return true;
return error_mark_node;
}
- /* Don't let an array compound literal decay to a pointer. It can
- still be used to initialize an array or bind to a reference. */
- if (TREE_CODE (exp) == TARGET_EXPR)
- {
- if (complain & tf_error)
- error_at (loc, "taking address of temporary array");
- return error_mark_node;
- }
-
ptrtype = build_pointer_type (TREE_TYPE (type));
if (VAR_P (exp))
return build_min_nt_loc (UNKNOWN_LOCATION, COMPONENT_REF,
orig_object, orig_name, NULL_TREE);
}
- object = build_non_dependent_expr (object);
}
else if (c_dialect_objc ()
&& identifier_p (name)
name, scope);
return error_mark_node;
}
-
+
if (TREE_SIDE_EFFECTS (object))
val = build2 (COMPOUND_EXPR, TREE_TYPE (val), object, val);
return val;
return error_mark_node;
}
+ /* NAME may refer to a static data member, in which case there is
+ one copy of the data member that is shared by all the objects of
+ the class. So NAME can be unambiguously referred to even if
+ there are multiple indirect base classes containing NAME. */
+ const base_access ba = [scope, name] ()
+ {
+ if (identifier_p (name))
+ {
+ tree m = lookup_member (scope, name, /*protect=*/0,
+ /*want_type=*/false, tf_none);
+ if (!m || shared_member_p (m))
+ return ba_any;
+ }
+ return ba_check;
+ } ();
+
/* Find the base of OBJECT_TYPE corresponding to SCOPE. */
- access_path = lookup_base (object_type, scope, ba_check,
- NULL, complain);
+ access_path = lookup_base (object_type, scope, ba, NULL, complain);
if (access_path == error_mark_node)
return error_mark_node;
if (!access_path)
= build_dependent_operator_type (lookups, INDIRECT_REF, false);
return expr;
}
- expr = build_non_dependent_expr (expr);
}
rval = build_new_op (loc, INDIRECT_REF, LOOKUP_NORMAL, expr,
If INDEX is of some user-defined type, it must be converted to
integer type. Otherwise, to make a compatible PLUS_EXPR, it
will inherit the type of the array, which will be some pointer type.
-
+
LOC is the location to use in building the array reference. */
tree
cp_build_array_ref (location_t loc, tree array, tree idx,
tsubst_flags_t complain)
{
+ tree first = NULL_TREE;
tree ret;
if (idx == 0)
bool non_lvalue = convert_vector_to_array_for_subscript (loc, &array, idx);
+ /* 0[array] */
+ if (TREE_CODE (TREE_TYPE (idx)) == ARRAY_TYPE)
+ {
+ std::swap (array, idx);
+ if (flag_strong_eval_order == 2 && TREE_SIDE_EFFECTS (array))
+ idx = first = save_expr (idx);
+ }
+
if (TREE_CODE (TREE_TYPE (array)) == ARRAY_TYPE)
{
tree rval, type;
protected_set_expr_location (ret, loc);
if (non_lvalue)
ret = non_lvalue_loc (loc, ret);
+ if (first)
+ ret = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (ret), first, ret);
return ret;
}
{
tree ar = cp_default_conversion (array, complain);
tree ind = cp_default_conversion (idx, complain);
- tree first = NULL_TREE;
- if (flag_strong_eval_order == 2 && TREE_SIDE_EFFECTS (ind))
+ if (!first && flag_strong_eval_order == 2 && TREE_SIDE_EFFECTS (ind))
ar = first = save_expr (ar);
/* Put the integer in IND to simplify error checking. */
}
else
{
- if (fndecl && magic_varargs_p (fndecl))
- /* Don't do ellipsis conversion for __built_in_constant_p
- as this will result in spurious errors for non-trivial
- types. */
- val = require_complete_type (val, complain);
+ int magic = fndecl ? magic_varargs_p (fndecl) : 0;
+ if (magic)
+ {
+ /* Don't truncate excess precision to the semantic type. */
+ if (magic == 1 && TREE_CODE (val) == EXCESS_PRECISION_EXPR)
+ val = TREE_OPERAND (val, 0);
+ /* Don't do ellipsis conversion for __built_in_constant_p
+ as this will result in spurious errors for non-trivial
+ types. */
+ val = require_complete_type (val, complain);
+ }
else
val = convert_arg_to_ellipsis (val, complain);
= build_dependent_operator_type (lookups, code, false);
return expr;
}
- arg1 = build_non_dependent_expr (arg1);
- arg2 = build_non_dependent_expr (arg2);
}
if (code == DOTSTAR_EXPR)
|| type_dependent_expression_p (arg2))
return build_min_nt_loc (loc, ARRAY_REF, arg1, arg2,
NULL_TREE, NULL_TREE);
- arg1 = build_non_dependent_expr (arg1);
- arg2 = build_non_dependent_expr (arg2);
}
expr = build_new_op (loc, ARRAY_REF, LOOKUP_NORMAL, arg1, arg2,
{
tree zero_vec = build_zero_cst (type);
tree minus_one_vec = build_minus_one_cst (type);
- tree cmp_type = truth_type_for (type);
+ tree cmp_type = truth_type_for (TREE_TYPE (arg0));
tree cmp = build2 (code, cmp_type, arg0, arg1);
return build3 (VEC_COND_EXPR, type, cmp, minus_one_vec, zero_vec);
}
|| warning_suppressed_p (op, OPT_Waddress))
return;
- if (TREE_CODE (op) == NON_DEPENDENT_EXPR)
- op = TREE_OPERAND (op, 0);
-
tree cop = fold_for_warn (op);
if (TREE_CODE (cop) == NON_LVALUE_EXPR)
type, this behavior is deprecated ([depr.arith.conv.enum]). CODE is the
code of the binary operation, TYPE0 and TYPE1 are the types of the operands,
and LOC is the location for the whole binary expression.
+ For C++26 this is ill-formed rather than deprecated.
+ Return true for SFINAE errors.
TODO: Consider combining this with -Wenum-compare in build_new_op_1. */
-static void
+static bool
do_warn_enum_conversions (location_t loc, enum tree_code code, tree type0,
- tree type1)
+ tree type1, tsubst_flags_t complain)
{
if (TREE_CODE (type0) == ENUMERAL_TYPE
&& TREE_CODE (type1) == ENUMERAL_TYPE
&& TYPE_MAIN_VARIANT (type0) != TYPE_MAIN_VARIANT (type1))
{
+ if (cxx_dialect >= cxx26)
+ {
+ if ((complain & tf_warning_or_error) == 0)
+ return true;
+ }
+ else if ((complain & tf_warning) == 0)
+ return false;
/* In C++20, -Wdeprecated-enum-enum-conversion is on by default.
Otherwise, warn if -Wenum-conversion is on. */
enum opt_code opt;
else if (warn_enum_conversion)
opt = OPT_Wenum_conversion;
else
- return;
+ return false;
switch (code)
{
case EQ_EXPR:
case NE_EXPR:
/* Comparisons are handled by -Wenum-compare. */
- return;
+ return false;
case SPACESHIP_EXPR:
/* This is invalid, don't warn. */
- return;
+ return false;
case BIT_AND_EXPR:
case BIT_IOR_EXPR:
case BIT_XOR_EXPR:
- warning_at (loc, opt, "bitwise operation between different "
- "enumeration types %qT and %qT is deprecated",
- type0, type1);
- return;
+ if (cxx_dialect >= cxx26)
+ pedwarn (loc, opt, "bitwise operation between different "
+ "enumeration types %qT and %qT", type0, type1);
+ else
+ warning_at (loc, opt, "bitwise operation between different "
+ "enumeration types %qT and %qT is deprecated",
+ type0, type1);
+ return false;
default:
- warning_at (loc, opt, "arithmetic between different enumeration "
- "types %qT and %qT is deprecated", type0, type1);
- return;
+ if (cxx_dialect >= cxx26)
+ pedwarn (loc, opt, "arithmetic between different enumeration "
+ "types %qT and %qT", type0, type1);
+ else
+ warning_at (loc, opt, "arithmetic between different enumeration "
+ "types %qT and %qT is deprecated", type0, type1);
+ return false;
}
}
else if ((TREE_CODE (type0) == ENUMERAL_TYPE
- && TREE_CODE (type1) == REAL_TYPE)
- || (TREE_CODE (type0) == REAL_TYPE
+ && SCALAR_FLOAT_TYPE_P (type1))
+ || (SCALAR_FLOAT_TYPE_P (type0)
&& TREE_CODE (type1) == ENUMERAL_TYPE))
{
+ if (cxx_dialect >= cxx26)
+ {
+ if ((complain & tf_warning_or_error) == 0)
+ return true;
+ }
+ else if ((complain & tf_warning) == 0)
+ return false;
const bool enum_first_p = TREE_CODE (type0) == ENUMERAL_TYPE;
/* In C++20, -Wdeprecated-enum-float-conversion is on by default.
Otherwise, warn if -Wenum-conversion is on. */
else if (warn_enum_conversion)
opt = OPT_Wenum_conversion;
else
- return;
+ return false;
switch (code)
{
case LE_EXPR:
case EQ_EXPR:
case NE_EXPR:
- if (enum_first_p)
+ if (enum_first_p && cxx_dialect >= cxx26)
+ pedwarn (loc, opt, "comparison of enumeration type %qT with "
+ "floating-point type %qT", type0, type1);
+ else if (cxx_dialect >= cxx26)
+ pedwarn (loc, opt, "comparison of floating-point type %qT "
+ "with enumeration type %qT", type0, type1);
+ else if (enum_first_p)
warning_at (loc, opt, "comparison of enumeration type %qT with "
"floating-point type %qT is deprecated",
type0, type1);
warning_at (loc, opt, "comparison of floating-point type %qT "
"with enumeration type %qT is deprecated",
type0, type1);
- return;
+ return false;
case SPACESHIP_EXPR:
/* This is invalid, don't warn. */
- return;
+ return false;
default:
- if (enum_first_p)
+ if (enum_first_p && cxx_dialect >= cxx26)
+ pedwarn (loc, opt, "arithmetic between enumeration type %qT "
+ "and floating-point type %qT", type0, type1);
+ else if (cxx_dialect >= cxx26)
+ pedwarn (loc, opt, "arithmetic between floating-point type %qT "
+ "and enumeration type %qT", type0, type1);
+ else if (enum_first_p)
warning_at (loc, opt, "arithmetic between enumeration type %qT "
"and floating-point type %qT is deprecated",
type0, type1);
warning_at (loc, opt, "arithmetic between floating-point type %qT "
"and enumeration type %qT is deprecated",
type0, type1);
- return;
+ return false;
}
}
+ return false;
}
/* Build a binary-operation expression without default conversions.
{
tree op0, op1;
enum tree_code code0, code1;
- tree type0, type1;
+ tree type0, type1, orig_type0, orig_type1;
const char *invalid_op_diag;
/* Expression code to give to the expression when it is built.
In the simplest cases this is the common type of the arguments. */
tree result_type = NULL_TREE;
+ /* When the computation is in excess precision, the type of the
+ final EXCESS_PRECISION_EXPR. */
+ tree semantic_result_type = NULL;
+
/* Nonzero means operands have already been type-converted
in whatever way is necessary.
Zero means they need to be converted to RESULT_TYPE. */
/* Tree holding instrumentation expression. */
tree instrument_expr = NULL_TREE;
+ /* True means this is an arithmetic operation that may need excess
+ precision. */
+ bool may_need_excess_precision;
+
/* Apply default conversions. */
op0 = resolve_nondeduced_context (orig_op0, complain);
op1 = resolve_nondeduced_context (orig_op1, complain);
}
}
- type0 = TREE_TYPE (op0);
- type1 = TREE_TYPE (op1);
+ orig_type0 = type0 = TREE_TYPE (op0);
+ orig_type1 = type1 = TREE_TYPE (op1);
+ tree non_ep_op0 = op0;
+ tree non_ep_op1 = op1;
/* The expression codes of the data types of the arguments tell us
whether the arguments are integers, floating, pointers, etc. */
return error_mark_node;
}
+ switch (code)
+ {
+ case PLUS_EXPR:
+ case MINUS_EXPR:
+ case MULT_EXPR:
+ case TRUNC_DIV_EXPR:
+ case CEIL_DIV_EXPR:
+ case FLOOR_DIV_EXPR:
+ case ROUND_DIV_EXPR:
+ case EXACT_DIV_EXPR:
+ may_need_excess_precision = true;
+ break;
+ case EQ_EXPR:
+ case NE_EXPR:
+ case LE_EXPR:
+ case GE_EXPR:
+ case LT_EXPR:
+ case GT_EXPR:
+ case SPACESHIP_EXPR:
+ /* Excess precision for implicit conversions of integers to
+ floating point. */
+ may_need_excess_precision = (ANY_INTEGRAL_TYPE_P (type0)
+ || ANY_INTEGRAL_TYPE_P (type1));
+ break;
+ default:
+ may_need_excess_precision = false;
+ break;
+ }
+ if (TREE_CODE (op0) == EXCESS_PRECISION_EXPR)
+ {
+ op0 = TREE_OPERAND (op0, 0);
+ type0 = TREE_TYPE (op0);
+ }
+ else if (may_need_excess_precision
+ && (code0 == REAL_TYPE || code0 == COMPLEX_TYPE))
+ if (tree eptype = excess_precision_type (type0))
+ {
+ type0 = eptype;
+ op0 = convert (eptype, op0);
+ }
+ if (TREE_CODE (op1) == EXCESS_PRECISION_EXPR)
+ {
+ op1 = TREE_OPERAND (op1, 0);
+ type1 = TREE_TYPE (op1);
+ }
+ else if (may_need_excess_precision
+ && (code1 == REAL_TYPE || code1 == COMPLEX_TYPE))
+ if (tree eptype = excess_precision_type (type1))
+ {
+ type1 = eptype;
+ op1 = convert (eptype, op1);
+ }
+
/* Issue warnings about peculiar, but valid, uses of NULL. */
if ((null_node_p (orig_op0) || null_node_p (orig_op1))
/* It's reasonable to use pointer values as operands of &&
if ((gnu_vector_type_p (type0) && code1 != VECTOR_TYPE)
|| (gnu_vector_type_p (type1) && code0 != VECTOR_TYPE))
{
- enum stv_conv convert_flag = scalar_to_vector (location, code, op0, op1,
- complain & tf_error);
+ enum stv_conv convert_flag
+ = scalar_to_vector (location, code, non_ep_op0, non_ep_op1,
+ complain & tf_error);
switch (convert_flag)
{
op0 = convert (TREE_TYPE (type1), op0);
op0 = save_expr (op0);
op0 = build_vector_from_val (type1, op0);
- type0 = TREE_TYPE (op0);
+ orig_type0 = type0 = TREE_TYPE (op0);
code0 = TREE_CODE (type0);
converted = 1;
break;
op1 = convert (TREE_TYPE (type0), op1);
op1 = save_expr (op1);
op1 = build_vector_from_val (type0, op1);
- type1 = TREE_TYPE (op1);
+ orig_type1 = type1 = TREE_TYPE (op1);
code1 = TREE_CODE (type1);
converted = 1;
break;
type0 = TREE_TYPE (type0);
if (!TYPE_P (type1))
type1 = TREE_TYPE (type1);
- if (INDIRECT_TYPE_P (type0) && same_type_p (TREE_TYPE (type0), type1))
+ if (type0
+ && INDIRECT_TYPE_P (type0)
+ && same_type_p (TREE_TYPE (type0), type1))
{
if (!(TREE_CODE (first_arg) == PARM_DECL
&& DECL_ARRAY_PARAMETER_P (first_arg)
"first %<sizeof%> operand was declared here");
}
}
- else if (TREE_CODE (type0) == ARRAY_TYPE
+ else if (!dependent_type_p (type0)
+ && !dependent_type_p (type1)
+ && TREE_CODE (type0) == ARRAY_TYPE
&& !char_type_p (TYPE_MAIN_VARIANT (TREE_TYPE (type0)))
/* Set by finish_parenthesized_expr. */
&& !warning_suppressed_p (op1, OPT_Wsizeof_array_div)
point, so we have to dig out the original type to find out if
it was unsigned. */
tree stripped_op1 = tree_strip_any_location_wrapper (op1);
- shorten = ((TREE_CODE (op0) == NOP_EXPR
- && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op0, 0))))
- || (TREE_CODE (stripped_op1) == INTEGER_CST
- && ! integer_all_onesp (stripped_op1)));
+ shorten = may_shorten_divmod (op0, stripped_op1);
}
common = 1;
quotient can't be represented in the computation mode. We shorten
only if unsigned or if dividing by something we know != -1. */
tree stripped_op1 = tree_strip_any_location_wrapper (op1);
- shorten = ((TREE_CODE (op0) == NOP_EXPR
- && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op0, 0))))
- || (TREE_CODE (stripped_op1) == INTEGER_CST
- && ! integer_all_onesp (stripped_op1)));
+ shorten = may_shorten_divmod (op0, stripped_op1);
common = 1;
}
break;
pfn0 = pfn_from_ptrmemfunc (op0);
delta0 = delta_from_ptrmemfunc (op0);
- e1 = cp_build_binary_op (location,
- EQ_EXPR,
- pfn0,
- build_zero_cst (TREE_TYPE (pfn0)),
- complain);
+ {
+ /* If we will warn below about a null-address compare
+ involving the orig_op0 ptrmemfunc, we'd likely also
+ warn about the pfn0's null-address compare, and
+ that would be redundant, so suppress it. */
+ warning_sentinel ws (warn_address);
+ e1 = cp_build_binary_op (location,
+ EQ_EXPR,
+ pfn0,
+ build_zero_cst (TREE_TYPE (pfn0)),
+ complain);
+ }
e2 = cp_build_binary_op (location,
BIT_AND_EXPR,
delta0,
&& (shorten || common || short_compare))
{
result_type = cp_common_type (type0, type1);
- if (result_type == error_mark_node
- && code0 == REAL_TYPE
- && code1 == REAL_TYPE
- && (extended_float_type_p (type0) || extended_float_type_p (type1))
- && cp_compare_floating_point_conversion_ranks (type0, type1) == 3)
+ if (result_type == error_mark_node)
{
+ tree t1 = type0;
+ tree t2 = type1;
+ if (TREE_CODE (t1) == COMPLEX_TYPE)
+ t1 = TREE_TYPE (t1);
+ if (TREE_CODE (t2) == COMPLEX_TYPE)
+ t2 = TREE_TYPE (t2);
+ gcc_checking_assert (TREE_CODE (t1) == REAL_TYPE
+ && TREE_CODE (t2) == REAL_TYPE
+ && (extended_float_type_p (t1)
+ || extended_float_type_p (t2))
+ && cp_compare_floating_point_conversion_ranks
+ (t1, t2) == 3);
if (complain & tf_error)
{
rich_location richloc (line_table, location);
return error_mark_node;
}
if (complain & tf_warning)
- {
- do_warn_double_promotion (result_type, type0, type1,
- "implicit conversion from %qH to %qI "
- "to match other operand of binary "
- "expression",
- location);
- do_warn_enum_conversions (location, code, TREE_TYPE (orig_op0),
- TREE_TYPE (orig_op1));
+ do_warn_double_promotion (result_type, type0, type1,
+ "implicit conversion from %qH to %qI "
+ "to match other operand of binary "
+ "expression", location);
+ if (do_warn_enum_conversions (location, code, TREE_TYPE (orig_op0),
+ TREE_TYPE (orig_op1), complain))
+ return error_mark_node;
+ }
+ if (may_need_excess_precision
+ && (orig_type0 != type0 || orig_type1 != type1)
+ && build_type == NULL_TREE
+ && result_type)
+ {
+ gcc_assert (common);
+ semantic_result_type = cp_common_type (orig_type0, orig_type1);
+ if (semantic_result_type == error_mark_node)
+ {
+ tree t1 = orig_type0;
+ tree t2 = orig_type1;
+ if (TREE_CODE (t1) == COMPLEX_TYPE)
+ t1 = TREE_TYPE (t1);
+ if (TREE_CODE (t2) == COMPLEX_TYPE)
+ t2 = TREE_TYPE (t2);
+ gcc_checking_assert (TREE_CODE (t1) == REAL_TYPE
+ && TREE_CODE (t2) == REAL_TYPE
+ && (extended_float_type_p (t1)
+ || extended_float_type_p (t2))
+ && cp_compare_floating_point_conversion_ranks
+ (t1, t2) == 3);
+ if (complain & tf_error)
+ {
+ rich_location richloc (line_table, location);
+ binary_op_error (&richloc, code, type0, type1);
+ }
+ return error_mark_node;
}
}
tree_code orig_code0 = TREE_CODE (orig_type0);
tree orig_type1 = TREE_TYPE (orig_op1);
tree_code orig_code1 = TREE_CODE (orig_type1);
- if (!result_type)
- /* Nope. */;
+ if (!result_type || result_type == error_mark_node)
+ /* Nope. */
+ result_type = NULL_TREE;
else if ((orig_code0 == BOOLEAN_TYPE) != (orig_code1 == BOOLEAN_TYPE))
/* "If one of the operands is of type bool and the other is not, the
program is ill-formed." */
build_type ? build_type : result_type,
NULL_TREE, op1);
TREE_OPERAND (tmp, 0) = op0;
+ if (semantic_result_type)
+ tmp = build1 (EXCESS_PRECISION_EXPR, semantic_result_type, tmp);
return tmp;
}
}
}
result = build2 (COMPLEX_EXPR, result_type, real, imag);
+ if (semantic_result_type)
+ result = build1 (EXCESS_PRECISION_EXPR, semantic_result_type,
+ result);
return result;
}
if (resultcode == SPACESHIP_EXPR && !processing_template_decl)
result = get_target_expr (result, complain);
+ if (semantic_result_type)
+ result = build1 (EXCESS_PRECISION_EXPR, semantic_result_type, result);
+
if (!c_inhibit_evaluation_warnings)
{
if (!processing_template_decl)
|| type_dependent_expression_p (arg1)
|| type_dependent_expression_p (arg2))
return build_min_nt_loc (loc, VEC_PERM_EXPR, arg0, arg1, arg2);
- arg0 = build_non_dependent_expr (arg0);
- if (arg1)
- arg1 = build_non_dependent_expr (arg1);
- arg2 = build_non_dependent_expr (arg2);
}
tree exp = c_build_vec_perm_expr (loc, arg0, arg1, arg2, complain & tf_error);
if (processing_template_decl && exp != error_mark_node)
CALL_EXPR_IFN (exp) = IFN_SHUFFLEVECTOR;
return exp;
}
- arg0 = build_non_dependent_expr (arg0);
- arg1 = build_non_dependent_expr (arg1);
- /* ??? Nothing needed for the index arguments? */
}
auto_vec<tree, 16> mask;
for (unsigned i = 2; i < args->length (); ++i)
TREE_TYPE (e) = build_dependent_operator_type (lookups, code, false);
return e;
}
-
- xarg = build_non_dependent_expr (xarg);
}
exp = NULL_TREE;
{
if (type_dependent_expression_p (arg))
return build_min_nt_loc (loc, ADDRESSOF_EXPR, arg, NULL_TREE);
-
- arg = build_non_dependent_expr (arg);
}
tree exp = cp_build_addr_expr_strict (arg, complain);
complain);
}
- /* For addresses of immediate functions ensure we have EXPR_LOCATION
- set for possible later diagnostics. */
+ /* Ensure we have EXPR_LOCATION set for possible later diagnostics. */
if (TREE_CODE (val) == ADDR_EXPR
- && TREE_CODE (TREE_OPERAND (val, 0)) == FUNCTION_DECL
- && DECL_IMMEDIATE_FUNCTION_P (TREE_OPERAND (val, 0)))
+ && TREE_CODE (TREE_OPERAND (val, 0)) == FUNCTION_DECL)
SET_EXPR_LOCATION (val, input_location);
return val;
tree arg = xarg;
location_t location = cp_expr_loc_or_input_loc (arg);
tree argtype = 0;
+ tree eptype = NULL_TREE;
const char *errstring = NULL;
tree val;
const char *invalid_op_diag;
return error_mark_node;
}
+ if (TREE_CODE (arg) == EXCESS_PRECISION_EXPR)
+ {
+ eptype = TREE_TYPE (arg);
+ arg = TREE_OPERAND (arg, 0);
+ }
+
switch (code)
{
case UNARY_PLUS_EXPR:
build_zero_cst (TREE_TYPE (arg)), complain);
arg = perform_implicit_conversion (boolean_type_node, arg,
complain);
- val = invert_truthvalue_loc (location, arg);
if (arg != error_mark_node)
- return val;
+ {
+ if (processing_template_decl)
+ return build1_loc (location, TRUTH_NOT_EXPR, boolean_type_node, arg);
+ val = invert_truthvalue_loc (location, arg);
+ if (obvalue_p (val))
+ val = non_lvalue_loc (location, val);
+ return val;
+ }
errstring = _("in argument to unary !");
break;
case REALPART_EXPR:
case IMAGPART_EXPR:
- arg = build_real_imag_expr (input_location, code, arg);
- return arg;
+ val = build_real_imag_expr (input_location, code, arg);
+ if (eptype && TREE_CODE (eptype) == COMPLEX_EXPR)
+ val = build1_loc (input_location, EXCESS_PRECISION_EXPR,
+ TREE_TYPE (eptype), val);
+ return val;
case PREINCREMENT_EXPR:
case POSTINCREMENT_EXPR:
val = unary_complex_lvalue (code, arg);
if (val != 0)
- return val;
+ goto return_build_unary_op;
arg = mark_lvalue_use (arg);
real = cp_build_unary_op (code, real, true, complain);
if (real == error_mark_node || imag == error_mark_node)
return error_mark_node;
- return build2 (COMPLEX_EXPR, TREE_TYPE (arg),
- real, imag);
+ val = build2 (COMPLEX_EXPR, TREE_TYPE (arg), real, imag);
+ goto return_build_unary_op;
}
/* Report invalid types. */
/* [depr.volatile.type] "Postfix ++ and -- expressions and
prefix ++ and -- expressions of volatile-qualified arithmetic
and pointer types are deprecated." */
- if (TREE_THIS_VOLATILE (arg) || CP_TYPE_VOLATILE_P (TREE_TYPE (arg)))
+ if ((TREE_THIS_VOLATILE (arg) || CP_TYPE_VOLATILE_P (TREE_TYPE (arg)))
+ && (complain & tf_warning))
warning_at (location, OPT_Wvolatile,
"%qs expression of %<volatile%>-qualified type is "
"deprecated",
return error_mark_node;
}
/* Otherwise, [depr.incr.bool] says this is deprecated. */
- else
+ else if (complain & tf_warning)
warning_at (location, OPT_Wdeprecated,
"use of an operand of type %qT "
"in %<operator++%> is deprecated",
val = build2 (code, TREE_TYPE (arg), arg, inc);
TREE_SIDE_EFFECTS (val) = 1;
- return val;
+ goto return_build_unary_op;
}
case ADDR_EXPR:
{
if (argtype == 0)
argtype = TREE_TYPE (arg);
- return build1 (code, argtype, arg);
+ val = build1 (code, argtype, arg);
+ return_build_unary_op:
+ if (eptype)
+ val = build1 (EXCESS_PRECISION_EXPR, eptype, val);
+ return val;
}
if (complain & tf_error)
|| (op1 && type_dependent_expression_p (op1))
|| type_dependent_expression_p (op2))
return build_min_nt_loc (loc, COND_EXPR, ifexp, op1, op2);
- ifexp = build_non_dependent_expr (ifexp);
- if (op1)
- op1 = build_non_dependent_expr (op1);
- op2 = build_non_dependent_expr (op2);
}
expr = build_conditional_expr (loc, ifexp, op1, op2, complain);
= build_dependent_operator_type (lookups, COMPOUND_EXPR, false);
return result;
}
- op1 = build_non_dependent_expr (op1);
- op2 = build_non_dependent_expr (op2);
}
result = build_new_op (loc, COMPOUND_EXPR, LOOKUP_NORMAL, op1, op2,
if (lhs == error_mark_node || rhs == error_mark_node)
return error_mark_node;
+ if (TREE_CODE (lhs) == EXCESS_PRECISION_EXPR)
+ lhs = TREE_OPERAND (lhs, 0);
+ tree eptype = NULL_TREE;
+ if (TREE_CODE (rhs) == EXCESS_PRECISION_EXPR)
+ {
+ eptype = TREE_TYPE (rhs);
+ rhs = TREE_OPERAND (rhs, 0);
+ }
+
if (TREE_CODE (rhs) == TARGET_EXPR)
{
/* If the rhs is a TARGET_EXPR, then build the compound
init = build2 (COMPOUND_EXPR, TREE_TYPE (init), lhs, init);
TREE_OPERAND (rhs, 1) = init;
+ if (eptype)
+ rhs = build1 (EXCESS_PRECISION_EXPR, eptype, rhs);
return rhs;
}
return error_mark_node;
}
- return build2 (COMPOUND_EXPR, TREE_TYPE (rhs), lhs, rhs);
+ tree ret = build2 (COMPOUND_EXPR, TREE_TYPE (rhs), lhs, rhs);
+ if (eptype)
+ ret = build1 (EXCESS_PRECISION_EXPR, eptype, ret);
+ return ret;
}
/* Issue a diagnostic message if casting from SRC_TYPE to DEST_TYPE
if (warn_useless_cast
&& complain & tf_warning)
{
- if ((TYPE_REF_P (type)
- && (TYPE_REF_IS_RVALUE (type)
- ? xvalue_p (expr) : lvalue_p (expr))
- && same_type_p (TREE_TYPE (expr), TREE_TYPE (type)))
- || same_type_p (TREE_TYPE (expr), type))
+ if (TYPE_REF_P (type)
+ ? ((TYPE_REF_IS_RVALUE (type)
+ ? xvalue_p (expr) : lvalue_p (expr))
+ && same_type_p (TREE_TYPE (expr), TREE_TYPE (type)))
+ /* Don't warn when converting a class object to a non-reference type,
+ because that's a common way to create a temporary. */
+ : (!glvalue_p (expr) && same_type_p (TREE_TYPE (expr), type)))
warning_at (loc, OPT_Wuseless_cast,
"useless cast to type %q#T", type);
}
Any expression can be explicitly converted to type cv void. */
if (VOID_TYPE_P (type))
- return convert_to_void (expr, ICV_CAST, complain);
+ {
+ if (TREE_CODE (expr) == EXCESS_PRECISION_EXPR)
+ expr = TREE_OPERAND (expr, 0);
+ return convert_to_void (expr, ICV_CAST, complain);
+ }
/* [class.abstract]
An abstract class shall not be used ... as the type of an explicit
{
if (processing_template_decl)
return expr;
+ if (TREE_CODE (expr) == EXCESS_PRECISION_EXPR)
+ expr = TREE_OPERAND (expr, 0);
+ /* [expr.static.cast]: "If the value is not a bit-field, the result
+ refers to the object or the specified base class subobject thereof;
+ otherwise, the lvalue-to-rvalue conversion is applied to the
+ bit-field and the resulting prvalue is used as the operand of the
+ static_cast." There are no prvalue bit-fields; the l-to-r conversion
+ will give us an object of the underlying type of the bit-field. */
+ expr = decay_conversion (expr, complain);
return ocp_convert (type, expr, CONV_C_CAST, LOOKUP_NORMAL, complain);
}
protected_set_expr_location (result, loc);
return result;
}
- else if (processing_template_decl)
- expr = build_non_dependent_expr (expr);
/* build_c_cast puts on a NOP_EXPR to make the result not an lvalue.
Strip such NOP_EXPRs if VALUE is being used in non-lvalue context. */
maybe_warn_about_useless_cast (loc, type, value, complain);
maybe_warn_about_cast_ignoring_quals (loc, type, complain);
}
+ else if (complain & tf_error)
+ build_const_cast_1 (loc, type, value, tf_error, &valid_p);
return result;
}
to succeed. */
if (!same_type_p (non_reference (type), non_reference (result_type)))
{
- result = build_const_cast_1 (loc, type, result, false, &valid_p);
+ result = build_const_cast_1 (loc, type, result, tf_none, &valid_p);
gcc_assert (valid_p);
}
return result;
if (! same_type_p (TREE_TYPE (rhs), lhstype))
/* Call convert to generate an error; see PR 11063. */
rhs = convert (lhstype, rhs);
- result = build2 (INIT_EXPR, lhstype, lhs, rhs);
+ result = cp_build_init_expr (lhs, rhs);
TREE_SIDE_EFFECTS (result) = 1;
goto ret;
}
&& MAYBE_CLASS_TYPE_P (TREE_TYPE (lhstype)))
|| MAYBE_CLASS_TYPE_P (lhstype)));
- /* An expression of the form E1 op= E2. [expr.ass] says:
- "Such expressions are deprecated if E1 has volatile-qualified
- type and op is not one of the bitwise operators |, &, ^."
- We warn here rather than in cp_genericize_r because
- for compound assignments we are supposed to warn even if the
- assignment is a discarded-value expression. */
- if (modifycode != BIT_AND_EXPR
- && modifycode != BIT_IOR_EXPR
- && modifycode != BIT_XOR_EXPR
- && (TREE_THIS_VOLATILE (lhs) || CP_TYPE_VOLATILE_P (lhstype)))
- warning_at (loc, OPT_Wvolatile,
- "compound assignment with %<volatile%>-qualified left "
- "operand is deprecated");
/* Preevaluate the RHS to make sure its evaluation is complete
before the lvalue-to-rvalue conversion of the LHS:
}
/* Allow array assignment in compiler-generated code. */
+ else if (DECL_P (lhs) && DECL_ARTIFICIAL (lhs))
+ /* OK, used by coroutines (co-await-initlist1.C). */;
else if (!current_function_decl
|| !DECL_DEFAULTED_FN (current_function_decl))
{
result = build2_loc (loc, modifycode == NOP_EXPR ? MODIFY_EXPR : INIT_EXPR,
lhstype, lhs, newrhs);
+ if (modifycode == INIT_EXPR)
+ set_target_expr_eliding (newrhs);
TREE_SIDE_EFFECTS (result) = 1;
if (!plain_assign)
if (lhs == error_mark_node || rhs == error_mark_node)
return cp_expr (error_mark_node, loc);
+ tree op = build_min (modifycode, void_type_node, NULL_TREE, NULL_TREE);
+
if (processing_template_decl)
{
- if (modifycode == NOP_EXPR
- || type_dependent_expression_p (lhs)
+ if (type_dependent_expression_p (lhs)
|| type_dependent_expression_p (rhs))
{
- tree op = build_min_nt_loc (loc, modifycode, NULL_TREE, NULL_TREE);
tree rval = build_min_nt_loc (loc, MODOP_EXPR, lhs, op, rhs);
if (modifycode != NOP_EXPR)
TREE_TYPE (rval)
= build_dependent_operator_type (lookups, modifycode, true);
return rval;
}
-
- lhs = build_non_dependent_expr (lhs);
- rhs = build_non_dependent_expr (rhs);
}
- if (modifycode != NOP_EXPR)
+ tree rval;
+ if (modifycode == NOP_EXPR)
+ rval = cp_build_modify_expr (loc, lhs, modifycode, rhs, complain);
+ else
+ rval = build_new_op (loc, MODIFY_EXPR, LOOKUP_NORMAL,
+ lhs, rhs, op, lookups, &overload, complain);
+ if (rval == error_mark_node)
+ return error_mark_node;
+ if (processing_template_decl)
{
- tree op = build_nt (modifycode, NULL_TREE, NULL_TREE);
- tree rval = build_new_op (loc, MODIFY_EXPR, LOOKUP_NORMAL,
- lhs, rhs, op, lookups, &overload, complain);
- if (rval)
- {
- if (rval == error_mark_node)
- return rval;
- suppress_warning (rval /* What warning? */);
- if (processing_template_decl)
- {
- if (overload != NULL_TREE)
- return (build_min_non_dep_op_overload
- (MODIFY_EXPR, rval, overload, orig_lhs, orig_rhs));
+ if (overload != NULL_TREE)
+ return (build_min_non_dep_op_overload
+ (MODIFY_EXPR, rval, overload, orig_lhs, orig_rhs));
- return (build_min_non_dep
- (MODOP_EXPR, rval, orig_lhs, op, orig_rhs));
- }
- return rval;
- }
+ return (build_min_non_dep
+ (MODOP_EXPR, rval, orig_lhs, op, orig_rhs));
}
- return cp_build_modify_expr (loc, lhs, modifycode, rhs, complain);
+ return rval;
}
/* Helper function for get_delta_difference which assumes FROM is a base
if (n == error_mark_node)
return error_mark_node;
+ STRIP_ANY_LOCATION_WRAPPER (pfn);
+
/* We don't have to do any conversion to convert a
pointer-to-member to its own type. But, we don't want to
just return a PTRMEM_CST if there's an explicit cast; that
cast should make the expression an invalid template argument. */
- if (TREE_CODE (pfn) != PTRMEM_CST)
- {
- if (same_type_p (to_type, pfn_type))
- return pfn;
- else if (integer_zerop (n) && TREE_CODE (pfn) != CONSTRUCTOR)
- return build_reinterpret_cast (input_location, to_type, pfn,
- complain);
- }
+ if (TREE_CODE (pfn) != PTRMEM_CST
+ && same_type_p (to_type, pfn_type))
+ return pfn;
if (TREE_SIDE_EFFECTS (pfn))
pfn = save_expr (pfn);
{
range_label_for_type_mismatch label (rhstype, type);
gcc_rich_location richloc (rhs_loc, has_loc ? &label : NULL);
+ auto_diagnostic_group d;
+
switch (errtype)
{
case ICR_DEFAULT_ARGUMENT:
gcc_unreachable();
}
}
+
+ /* See if we can be more helpful. */
+ maybe_show_nonconverting_candidate (type, rhstype, rhs, flags);
+
if (TYPE_PTR_P (rhstype)
&& TYPE_PTR_P (type)
&& CLASS_TYPE_P (TREE_TYPE (rhstype))
}
}
- /* If -Wparentheses, warn about a = b = c when a has type bool and b
- does not. */
- if (warn_parentheses
- && TREE_CODE (type) == BOOLEAN_TYPE
- && TREE_CODE (rhs) == MODIFY_EXPR
- && !warning_suppressed_p (rhs, OPT_Wparentheses)
- && TREE_CODE (TREE_TYPE (rhs)) != BOOLEAN_TYPE
- && (complain & tf_warning)
- && warning_at (rhs_loc, OPT_Wparentheses,
- "suggest parentheses around assignment used as "
- "truth value"))
- suppress_warning (rhs, OPT_Wparentheses);
+ if (TREE_CODE (type) == BOOLEAN_TYPE)
+ maybe_warn_unparenthesized_assignment (rhs, /*nested_p=*/true, complain);
if (complain & tf_warning)
- warn_for_address_or_pointer_of_packed_member (type, rhs);
+ warn_for_address_of_packed_member (type, rhs);
return perform_implicit_conversion_flags (strip_top_quals (type), rhs,
complain, flags);
if (TYPE_REF_P (valtype))
warning_at (loc, OPT_Wreturn_local_addr,
"returning reference to temporary");
+ else if (TYPE_PTR_P (valtype))
+ warning_at (loc, OPT_Wreturn_local_addr,
+ "returning pointer to temporary");
else if (is_std_init_list (valtype))
warning_at (loc, OPT_Winit_list_lifetime,
"returning temporary %<initializer_list%> does not extend "
&& !TYPE_VOLATILE (TREE_TYPE (retval)));
}
+/* True if we would like to perform NRVO, i.e. can_do_nrvo_p is true and we
+ would otherwise return in memory. */
+
+static bool
+want_nrvo_p (tree retval, tree functype)
+{
+ return (can_do_nrvo_p (retval, functype)
+ && aggregate_value_p (functype, current_function_decl));
+}
+
/* Like can_do_nrvo_p, but we check if we're trying to move a class
prvalue. */
if (DECL_CONTEXT (retval) != current_function_decl)
return NULL_TREE;
if (return_p)
- return set_implicit_rvalue_p (move (expr));
+ {
+ expr = move (expr);
+ if (expr == error_mark_node)
+ return NULL_TREE;
+ return set_implicit_rvalue_p (expr);
+ }
/* if the operand of a throw-expression is a (possibly parenthesized)
id-expression that names an implicitly movable entity whose scope does not
tree t = convert_for_initialization (NULL_TREE, type,
moved,
(LOOKUP_NORMAL
- | LOOKUP_ONLYCONVERTING
- | LOOKUP_PREFER_RVALUE),
+ | LOOKUP_ONLYCONVERTING),
ICR_RETURN, NULL_TREE, 0,
tf_none);
/* If this worked, implicit rvalue would work, so the call to
std::move is redundant. */
- if (t != error_mark_node
- /* Trying to move something const will never succeed unless
- there's T(const T&&), which it almost never is, and if
- so, T wouldn't be error_mark_node now: the above convert_
- call with LOOKUP_PREFER_RVALUE returns an error if a const T&
- overload is selected. */
- || (CP_TYPE_CONST_P (TREE_TYPE (arg))
- && same_type_ignoring_top_level_qualifiers_p
- (TREE_TYPE (arg), type)))
+ if (t != error_mark_node)
{
auto_diagnostic_group d;
if (warning_at (loc, OPT_Wredundant_move,
and where the std::move does nothing if T does not have a T(const T&&)
constructor, because the argument is const. It will not use T(T&&)
because that would mean losing the const. */
- else if (TYPE_REF_P (TREE_TYPE (arg))
+ else if (warn_redundant_move
+ && !warning_suppressed_p (expr, OPT_Wredundant_move)
+ && TYPE_REF_P (TREE_TYPE (arg))
&& CP_TYPE_CONST_P (TREE_TYPE (TREE_TYPE (arg))))
{
tree rtype = TREE_TYPE (TREE_TYPE (arg));
return;
}
auto_diagnostic_group d;
- if (warning_at (loc, OPT_Wredundant_move,
- "redundant move in return statement"))
+ if (return_p
+ ? warning_at (loc, OPT_Wredundant_move,
+ "redundant move in return statement")
+ : warning_at (loc, OPT_Wredundant_move,
+ "redundant move in initialization"))
inform (loc, "remove %<std::move%> call");
}
}
change RETVAL into the function return type, and to assign it to
the DECL_RESULT for the function. Set *NO_WARNING to true if
code reaches end of non-void function warning shouldn't be issued
- on this RETURN_EXPR. */
+ on this RETURN_EXPR. Set *DANGLING to true if code returns the
+ address of a local variable. */
tree
-check_return_expr (tree retval, bool *no_warning)
+check_return_expr (tree retval, bool *no_warning, bool *dangling)
{
tree result;
/* The type actually returned by the function. */
location_t loc = cp_expr_loc_or_input_loc (retval);
*no_warning = false;
+ *dangling = false;
/* A `volatile' function is one that isn't supposed to return, ever.
(This is a G++ extension, used to get better code for functions
/* We don't know if this is an lvalue or rvalue use, but
either way we can mark it as read. */
mark_exp_read (retval);
- /* Disable our std::move warnings when we're returning
- a dependent expression (c++/89780). */
- if (retval && TREE_CODE (retval) == CALL_EXPR)
- /* This also suppresses -Wredundant-move. */
- suppress_warning (retval, OPT_Wpessimizing_move);
return retval;
}
So, if this is a value-returning function that always returns the same
local variable, remember it.
- It might be nice to be more flexible, and choose the first suitable
- variable even if the function sometimes returns something else, but
- then we run the risk of clobbering the variable we chose if the other
- returned expression uses the chosen variable somehow. And people expect
- this restriction, anyway. (jason 2000-11-19)
+ We choose the first suitable variable even if the function sometimes
+ returns something else, but only if the variable is out of scope at the
+ other return sites, or else we run the risk of clobbering the variable we
+ chose if the other returned expression uses the chosen variable somehow.
+
+ We don't currently do this if the first return is a non-variable, as it
+ would be complicated to determine whether an NRV selected later was in
+ scope at the point of the earlier return. We also don't currently support
+ multiple variables with non-overlapping scopes (53637).
See finish_function and finalize_nrv for the rest of this optimization. */
tree bare_retval = NULL_TREE;
bare_retval = tree_strip_any_location_wrapper (retval);
}
- bool named_return_value_okay_p = can_do_nrvo_p (bare_retval, functype);
- if (fn_returns_value_p && flag_elide_constructors)
+ bool named_return_value_okay_p = want_nrvo_p (bare_retval, functype);
+ if (fn_returns_value_p && flag_elide_constructors
+ && current_function_return_value != bare_retval)
{
if (named_return_value_okay_p
- && (current_function_return_value == NULL_TREE
- || current_function_return_value == bare_retval))
+ && current_function_return_value == NULL_TREE)
current_function_return_value = bare_retval;
+ else if (current_function_return_value
+ && VAR_P (current_function_return_value)
+ && DECL_NAME (current_function_return_value)
+ && !decl_in_scope_p (current_function_return_value))
+ {
+ /* The earlier NRV is out of scope at this point, so it's safe to
+ leave it alone; the current return can't refer to it. */;
+ if (named_return_value_okay_p
+ && !warning_suppressed_p (current_function_decl, OPT_Wnrvo))
+ {
+ warning (OPT_Wnrvo, "not eliding copy on return from %qD",
+ bare_retval);
+ suppress_warning (current_function_decl, OPT_Wnrvo);
+ }
+ }
else
- current_function_return_value = error_mark_node;
+ {
+ if ((named_return_value_okay_p
+ || (current_function_return_value
+ && current_function_return_value != error_mark_node))
+ && !warning_suppressed_p (current_function_decl, OPT_Wnrvo))
+ {
+ warning (OPT_Wnrvo, "not eliding copy on return in %qD",
+ current_function_decl);
+ suppress_warning (current_function_decl, OPT_Wnrvo);
+ }
+ current_function_return_value = error_mark_node;
+ }
}
/* We don't need to do any conversions when there's nothing being
if (VOID_TYPE_P (functype))
return error_mark_node;
- if (processing_template_decl)
- retval = build_non_dependent_expr (retval);
-
/* Under C++11 [12.8/32 class.copy], a returned lvalue is sometimes
treated as an rvalue for the purposes of overload resolution to
favor move constructors over copy constructors.
the conditions for the named return value optimization. */
bool converted = false;
tree moved;
- /* This is only interesting for class type. */
- if (CLASS_TYPE_P (functype)
- && (moved = treat_lvalue_as_rvalue_p (retval, /*return*/true)))
- {
- if (cxx_dialect < cxx20)
- {
- moved = convert_for_initialization
- (NULL_TREE, functype, moved, flags|LOOKUP_PREFER_RVALUE,
- ICR_RETURN, NULL_TREE, 0, tf_none);
- if (moved != error_mark_node)
- {
- retval = moved;
- converted = true;
- }
- }
- else
- /* In C++20 we just treat the return value as an rvalue that
- can bind to lvalue refs. */
- retval = moved;
- }
+ /* Until C++23, this was only interesting for class type, but in C++23,
+ we should do the below when we're converting rom/to a class/reference
+ (a non-scalar type). */
+ if ((cxx_dialect < cxx23
+ ? CLASS_TYPE_P (functype)
+ : !SCALAR_TYPE_P (functype) || !SCALAR_TYPE_P (TREE_TYPE (retval)))
+ && (moved = treat_lvalue_as_rvalue_p (retval, /*return*/true)))
+ /* In C++20 and earlier we treat the return value as an rvalue
+ that can bind to lvalue refs. In C++23, such an expression is just
+ an xvalue (see reference_binding). */
+ retval = moved;
/* The call in a (lambda) thunk needs no conversions. */
if (TREE_CODE (retval) == CALL_EXPR
else if (!processing_template_decl
&& maybe_warn_about_returning_address_of_local (retval, loc)
&& INDIRECT_TYPE_P (valtype))
- retval = build2 (COMPOUND_EXPR, TREE_TYPE (retval), retval,
- build_zero_cst (TREE_TYPE (retval)));
+ *dangling = true;
}
+ /* A naive attempt to reduce the number of -Wdangling-reference false
+ positives: if we know that this function can return a variable with
+ static storage duration rather than one of its parameters, suppress
+ the warning. */
+ if (warn_dangling_reference
+ && TYPE_REF_P (functype)
+ && bare_retval
+ && VAR_P (bare_retval)
+ && TREE_STATIC (bare_retval))
+ suppress_warning (current_function_decl, OPT_Wdangling_reference);
+
if (processing_template_decl)
return saved_retval;
/* Actually copy the value returned into the appropriate location. */
if (retval && retval != result)
- retval = build2 (INIT_EXPR, TREE_TYPE (result), result, retval);
+ {
+ /* If there's a postcondition for a scalar return value, wrap
+ retval in a call to the postcondition function. */
+ if (tree post = apply_postcondition_to_return (retval))
+ retval = post;
+ retval = cp_build_init_expr (result, retval);
+ }
+
+ if (current_function_return_value == bare_retval)
+ INIT_EXPR_NRV_P (retval) = true;
if (tree set = maybe_set_retval_sentinel ())
retval = build2 (COMPOUND_EXPR, void_type_node, retval, set);
+ /* If there's a postcondition for an aggregate return value, call the
+ postcondition function after the return object is initialized. */
+ if (tree post = apply_postcondition_to_return (result))
+ retval = build2 (COMPOUND_EXPR, void_type_node, retval, post);
+
return retval;
}