building RTL. These routines are used both during actual parsing
and during the instantiation of template functions.
- Copyright (C) 1998-2017 Free Software Foundation, Inc.
+ Copyright (C) 1998-2018 Free Software Foundation, Inc.
Written by Mark Mitchell (mmitchell@usa.net) based on code found
formerly in parse.y and pt.c.
#include "tree-iterator.h"
#include "omp-general.h"
#include "convert.h"
+#include "stringpool.h"
+#include "attribs.h"
#include "gomp-constants.h"
+#include "predict.h"
/* There routines provide a modular interface to perform many parsing
operations. They may therefore be used during actual parsing, or
TREE_USED (destination) = 1;
else
{
- if (check_no_cilk (destination,
- "Cilk array notation cannot be used as a computed goto expression",
- "%<_Cilk_spawn%> statement cannot be used as a computed goto expression"))
- destination = error_mark_node;
destination = mark_rvalue_use (destination);
if (!processing_template_decl)
{
check_goto (destination);
+ add_stmt (build_predict_expr (PRED_GOTO, NOT_TAKEN));
return add_stmt (build_stmt (input_location, GOTO_EXPR, destination));
}
&& !TREE_NO_WARNING (cond)
&& warn_parentheses)
{
- warning_at (EXPR_LOC_OR_LOC (cond, input_location), OPT_Wparentheses,
+ warning_at (cp_expr_loc_or_loc (cond, input_location), OPT_Wparentheses,
"suggest parentheses around assignment used as truth value");
TREE_NO_WARNING (cond) = 1;
}
{
cond = maybe_convert_cond (cond);
if (IF_STMT_CONSTEXPR_P (if_stmt)
- && require_potential_rvalue_constant_expression (cond)
- && !value_dependent_expression_p (cond))
+ && !type_dependent_expression_p (cond)
+ && require_constant_expression (cond)
+ && !instantiation_dependent_expression_p (cond)
+ /* Wait until instantiation time, since only then COND has been
+ converted to bool. */
+ && TYPE_MAIN_VARIANT (TREE_TYPE (cond)) == boolean_type_node)
{
cond = instantiate_non_dependent_expr (cond);
cond = cxx_constant_value (cond, NULL_TREE);
WHILE_STMT. */
void
-finish_while_stmt_cond (tree cond, tree while_stmt, bool ivdep)
+finish_while_stmt_cond (tree cond, tree while_stmt, bool ivdep,
+ unsigned short unroll)
{
- if (check_no_cilk (cond,
- "Cilk array notation cannot be used as a condition for while statement",
- "%<_Cilk_spawn%> statement cannot be used as a condition for while statement"))
- cond = error_mark_node;
cond = maybe_convert_cond (cond);
finish_cond (&WHILE_COND (while_stmt), cond);
begin_maybe_infinite_loop (cond);
if (ivdep && cond != error_mark_node)
- WHILE_COND (while_stmt) = build2 (ANNOTATE_EXPR,
+ WHILE_COND (while_stmt) = build3 (ANNOTATE_EXPR,
+ TREE_TYPE (WHILE_COND (while_stmt)),
+ WHILE_COND (while_stmt),
+ build_int_cst (integer_type_node,
+ annot_expr_ivdep_kind),
+ integer_zero_node);
+ if (unroll && cond != error_mark_node)
+ WHILE_COND (while_stmt) = build3 (ANNOTATE_EXPR,
TREE_TYPE (WHILE_COND (while_stmt)),
WHILE_COND (while_stmt),
build_int_cst (integer_type_node,
- annot_expr_ivdep_kind));
+ annot_expr_unroll_kind),
+ build_int_cst (integer_type_node,
+ unroll));
simplify_loop_decl_cond (&WHILE_COND (while_stmt), WHILE_BODY (while_stmt));
}
COND is as indicated. */
void
-finish_do_stmt (tree cond, tree do_stmt, bool ivdep)
+finish_do_stmt (tree cond, tree do_stmt, bool ivdep, unsigned short unroll)
{
- if (check_no_cilk (cond,
- "Cilk array notation cannot be used as a condition for a do-while statement",
- "%<_Cilk_spawn%> statement cannot be used as a condition for a do-while statement"))
- cond = error_mark_node;
cond = maybe_convert_cond (cond);
end_maybe_infinite_loop (cond);
if (ivdep && cond != error_mark_node)
- cond = build2 (ANNOTATE_EXPR, TREE_TYPE (cond), cond,
- build_int_cst (integer_type_node, annot_expr_ivdep_kind));
+ cond = build3 (ANNOTATE_EXPR, TREE_TYPE (cond), cond,
+ build_int_cst (integer_type_node, annot_expr_ivdep_kind),
+ integer_zero_node);
+ if (unroll && cond != error_mark_node)
+ cond = build3 (ANNOTATE_EXPR, TREE_TYPE (cond), cond,
+ build_int_cst (integer_type_node, annot_expr_unroll_kind),
+ build_int_cst (integer_type_node, unroll));
DO_COND (do_stmt) = cond;
}
tree
begin_for_scope (tree *init)
{
- tree scope = NULL_TREE;
- if (flag_new_for_scope > 0)
- scope = do_pushlevel (sk_for);
+ tree scope = do_pushlevel (sk_for);
if (processing_template_decl)
*init = push_stmt_list ();
if (scope == NULL_TREE)
{
- gcc_assert (!init || !(flag_new_for_scope > 0));
- if (!init)
- scope = begin_for_scope (&init);
+ gcc_assert (!init);
+ scope = begin_for_scope (&init);
}
+
FOR_INIT_STMT (r) = init;
FOR_SCOPE (r) = scope;
FOR_STMT. */
void
-finish_for_cond (tree cond, tree for_stmt, bool ivdep)
+finish_for_cond (tree cond, tree for_stmt, bool ivdep, unsigned short unroll)
{
- if (check_no_cilk (cond,
- "Cilk array notation cannot be used in a condition for a for-loop",
- "%<_Cilk_spawn%> statement cannot be used in a condition for a for-loop"))
- cond = error_mark_node;
cond = maybe_convert_cond (cond);
finish_cond (&FOR_COND (for_stmt), cond);
begin_maybe_infinite_loop (cond);
if (ivdep && cond != error_mark_node)
- FOR_COND (for_stmt) = build2 (ANNOTATE_EXPR,
+ FOR_COND (for_stmt) = build3 (ANNOTATE_EXPR,
+ TREE_TYPE (FOR_COND (for_stmt)),
+ FOR_COND (for_stmt),
+ build_int_cst (integer_type_node,
+ annot_expr_ivdep_kind),
+ integer_zero_node);
+ if (unroll && cond != error_mark_node)
+ FOR_COND (for_stmt) = build3 (ANNOTATE_EXPR,
TREE_TYPE (FOR_COND (for_stmt)),
FOR_COND (for_stmt),
build_int_cst (integer_type_node,
- annot_expr_ivdep_kind));
+ annot_expr_unroll_kind),
+ build_int_cst (integer_type_node,
+ unroll));
simplify_loop_decl_cond (&FOR_COND (for_stmt), FOR_BODY (for_stmt));
}
FOR_BODY (for_stmt) = do_poplevel (FOR_BODY (for_stmt));
/* Pop the scope for the body of the loop. */
- if (flag_new_for_scope > 0)
- {
- tree scope;
- tree *scope_ptr = (TREE_CODE (for_stmt) == RANGE_FOR_STMT
- ? &RANGE_FOR_SCOPE (for_stmt)
- : &FOR_SCOPE (for_stmt));
- scope = *scope_ptr;
- *scope_ptr = NULL;
- add_stmt (do_poplevel (scope));
+ tree *scope_ptr = (TREE_CODE (for_stmt) == RANGE_FOR_STMT
+ ? &RANGE_FOR_SCOPE (for_stmt)
+ : &FOR_SCOPE (for_stmt));
+ tree scope = *scope_ptr;
+ *scope_ptr = NULL;
+
+ /* During parsing of the body, range for uses "__for_{range,begin,end} "
+ decl names to make those unaccessible by code in the body.
+ Change it to ones with underscore instead of space, so that it can
+ be inspected in the debugger. */
+ tree range_for_decl[3] = { NULL_TREE, NULL_TREE, NULL_TREE };
+ gcc_assert (CPTI_FOR_BEGIN__IDENTIFIER == CPTI_FOR_RANGE__IDENTIFIER + 1
+ && CPTI_FOR_END__IDENTIFIER == CPTI_FOR_RANGE__IDENTIFIER + 2
+ && CPTI_FOR_RANGE_IDENTIFIER == CPTI_FOR_RANGE__IDENTIFIER + 3
+ && CPTI_FOR_BEGIN_IDENTIFIER == CPTI_FOR_BEGIN__IDENTIFIER + 3
+ && CPTI_FOR_END_IDENTIFIER == CPTI_FOR_END__IDENTIFIER + 3);
+ for (int i = 0; i < 3; i++)
+ {
+ tree id = cp_global_trees[CPTI_FOR_RANGE__IDENTIFIER + i];
+ if (IDENTIFIER_BINDING (id)
+ && IDENTIFIER_BINDING (id)->scope == current_binding_level)
+ {
+ range_for_decl[i] = IDENTIFIER_BINDING (id)->value;
+ gcc_assert (VAR_P (range_for_decl[i])
+ && DECL_ARTIFICIAL (range_for_decl[i]));
+ }
}
+
+ add_stmt (do_poplevel (scope));
+
+ for (int i = 0; i < 3; i++)
+ if (range_for_decl[i])
+ DECL_NAME (range_for_decl[i])
+ = cp_global_trees[CPTI_FOR_RANGE_IDENTIFIER + i];
}
/* Begin a range-for-statement. Returns a new RANGE_FOR_STMT.
tree
begin_range_for_stmt (tree scope, tree init)
{
- tree r;
-
begin_maybe_infinite_loop (boolean_false_node);
- r = build_stmt (input_location, RANGE_FOR_STMT,
- NULL_TREE, NULL_TREE, NULL_TREE, NULL_TREE);
+ tree r = build_stmt (input_location, RANGE_FOR_STMT, NULL_TREE, NULL_TREE,
+ NULL_TREE, NULL_TREE, NULL_TREE, NULL_TREE);
if (scope == NULL_TREE)
{
- gcc_assert (!init || !(flag_new_for_scope > 0));
- if (!init)
- scope = begin_for_scope (&init);
+ gcc_assert (!init);
+ scope = begin_for_scope (&init);
}
- /* RANGE_FOR_STMTs do not use nor save the init tree, so we
- pop it now. */
- if (init)
- pop_stmt_list (init);
+ /* Since C++20, RANGE_FOR_STMTs can use the init tree, so save it. */
+ RANGE_FOR_INIT_STMT (r) = init;
RANGE_FOR_SCOPE (r) = scope;
return r;
}
/* Finish the head of a range-based for statement, which may
- be given by RANGE_FOR_STMT. DECL must be the declaration
+ be given by RANGE_FOR_STMT. DECL must be the declaration
and EXPR must be the loop expression. */
void
finish_range_for_decl (tree range_for_stmt, tree decl, tree expr)
{
+ if (processing_template_decl)
+ RANGE_FOR_INIT_STMT (range_for_stmt)
+ = pop_stmt_list (RANGE_FOR_INIT_STMT (range_for_stmt));
RANGE_FOR_DECL (range_for_stmt) = decl;
RANGE_FOR_EXPR (range_for_stmt) = expr;
add_stmt (range_for_stmt);
understand. */
if (!block_may_fallthru (cur_stmt_list))
return void_node;
+ note_break_stmt ();
return add_stmt (build_stmt (input_location, BREAK_STMT));
}
{
tree orig_type = NULL;
- if (check_no_cilk (cond,
- "Cilk array notation cannot be used as a condition for switch statement",
- "%<_Cilk_spawn%> statement cannot be used as a condition for switch statement"))
- cond = error_mark_node;
-
if (!processing_template_decl)
{
/* Convert the condition to an integer or enumeration type. */
if (warn_catch_value
&& type != NULL_TREE
&& type != error_mark_node
- && TREE_CODE (TREE_TYPE (decl)) != REFERENCE_TYPE)
+ && !TYPE_REF_P (TREE_TYPE (decl)))
{
tree orig_type = TREE_TYPE (decl);
if (CLASS_TYPE_P (orig_type))
effectively const. */
|| (CLASS_TYPE_P (TREE_TYPE (operand))
&& C_TYPE_FIELDS_READONLY (TREE_TYPE (operand)))))
- cxx_readonly_error (operand, lv_asm);
+ cxx_readonly_error (input_location, operand, lv_asm);
+
+ tree *op = &operand;
+ while (TREE_CODE (*op) == COMPOUND_EXPR)
+ op = &TREE_OPERAND (*op, 1);
+ switch (TREE_CODE (*op))
+ {
+ case PREINCREMENT_EXPR:
+ case PREDECREMENT_EXPR:
+ case MODIFY_EXPR:
+ *op = genericize_compound_lvalue (*op);
+ op = &TREE_OPERAND (*op, 1);
+ break;
+ default:
+ break;
+ }
constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
oconstraints[i] = constraint;
{
/* If the operand is going to end up in memory,
mark it addressable. */
- if (!allows_reg && !cxx_mark_addressable (operand))
+ if (!allows_reg && !cxx_mark_addressable (*op))
operand = error_mark_node;
}
else
/* Strip the nops as we allow this case. FIXME, this really
should be rejected or made deprecated. */
STRIP_NOPS (operand);
- if (!cxx_mark_addressable (operand))
+
+ tree *op = &operand;
+ while (TREE_CODE (*op) == COMPOUND_EXPR)
+ op = &TREE_OPERAND (*op, 1);
+ switch (TREE_CODE (*op))
+ {
+ case PREINCREMENT_EXPR:
+ case PREDECREMENT_EXPR:
+ case MODIFY_EXPR:
+ *op = genericize_compound_lvalue (*op);
+ op = &TREE_OPERAND (*op, 1);
+ break;
+ default:
+ break;
+ }
+
+ if (!cxx_mark_addressable (*op))
operand = error_mark_node;
}
else if (!allows_reg && !allows_mem)
if (TREE_CODE (expr) == COMPONENT_REF
|| TREE_CODE (expr) == SCOPE_REF)
REF_PARENTHESIZED_P (expr) = true;
- else if (type_dependent_expression_p (expr))
+ else if (processing_template_decl)
expr = build1 (PAREN_EXPR, TREE_TYPE (expr), expr);
- else if (VAR_P (expr) && DECL_HARD_REGISTER (expr))
- /* We can't bind a hard register variable to a reference. */;
else
{
- cp_lvalue_kind kind = lvalue_kind (expr);
- if ((kind & ~clk_class) != clk_none)
- {
- tree type = unlowered_expr_type (expr);
- bool rval = !!(kind & clk_rvalueref);
- type = cp_build_reference_type (type, rval);
- /* This inhibits warnings in, eg, cxx_mark_addressable
- (c++/60955). */
- warning_sentinel s (extra_warnings);
- expr = build_static_cast (type, expr, tf_error);
- if (expr != error_mark_node)
- REF_PARENTHESIZED_P (expr) = true;
- }
+ expr = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (expr), expr);
+ REF_PARENTHESIZED_P (expr) = true;
}
return expr;
tree
maybe_undo_parenthesized_ref (tree t)
{
- if (cxx_dialect >= cxx14
- && INDIRECT_REF_P (t)
- && REF_PARENTHESIZED_P (t))
+ if (cxx_dialect < cxx14)
+ return t;
+
+ if (INDIRECT_REF_P (t) && REF_PARENTHESIZED_P (t))
{
t = TREE_OPERAND (t, 0);
while (TREE_CODE (t) == NON_LVALUE_EXPR
|| TREE_CODE (t) == STATIC_CAST_EXPR);
t = TREE_OPERAND (t, 0);
}
+ else if (TREE_CODE (t) == PAREN_EXPR)
+ t = TREE_OPERAND (t, 0);
+ else if (TREE_CODE (t) == VIEW_CONVERT_EXPR
+ && REF_PARENTHESIZED_P (t))
+ t = TREE_OPERAND (t, 0);
return t;
}
{
tree type = TREE_TYPE (decl);
- if (TREE_CODE (type) == REFERENCE_TYPE)
+ if (TYPE_REF_P (type))
/* Quals on the object don't matter. */;
else if (PACK_EXPANSION_P (type))
/* Don't bother trying to represent this. */
if (template_p)
{
if (TREE_CODE (expr) == UNBOUND_CLASS_TEMPLATE)
- /* cp_parser_lookup_name thought we were looking for a type,
- but we're actually looking for a declaration. */
- expr = build_qualified_name (/*type*/NULL_TREE,
- TYPE_CONTEXT (expr),
- TYPE_IDENTIFIER (expr),
- /*template_p*/true);
+ {
+ /* cp_parser_lookup_name thought we were looking for a type,
+ but we're actually looking for a declaration. */
+ qualifying_class = TYPE_CONTEXT (expr);
+ expr = TYPE_IDENTIFIER (expr);
+ }
else
check_template_keyword (expr);
}
}
/* No need to check access within an enum. */
- if (TREE_CODE (qualifying_class) == ENUMERAL_TYPE)
+ if (TREE_CODE (qualifying_class) == ENUMERAL_TYPE
+ && TREE_CODE (expr) != IDENTIFIER_NODE)
return expr;
/* Within the scope of a class, turn references to non-static
qualifying_class);
pop_deferring_access_checks ();
}
- else if (BASELINK_P (expr) && !processing_template_decl)
+ else if (BASELINK_P (expr))
{
/* See if any of the functions are non-static members. */
/* If so, the expression may be relative to 'this'. */
expr = build_offset_ref (qualifying_class, expr, /*address_p=*/false,
complain);
}
- else if (BASELINK_P (expr))
- ;
else
{
/* In a template, return a SCOPE_REF for most qualified-ids
know we have access and building up the SCOPE_REF confuses
non-type template argument handling. */
if (processing_template_decl
- && !currently_open_class (qualifying_class))
+ && (!currently_open_class (qualifying_class)
+ || TREE_CODE (expr) == IDENTIFIER_NODE
+ || TREE_CODE (expr) == TEMPLATE_ID_EXPR
+ || TREE_CODE (expr) == BIT_NOT_EXPR))
expr = build_qualified_name (TREE_TYPE (expr),
qualifying_class, expr,
template_p);
{
tree type = TREE_TYPE (expr);
- if (processing_template_decl)
+ if (type && type_unknown_p (type))
+ {
+ error ("a statement expression is an insufficient context"
+ " for overload resolution");
+ TREE_TYPE (stmt_expr) = error_mark_node;
+ return error_mark_node;
+ }
+ else if (processing_template_decl)
{
expr = build_stmt (input_location, EXPR_STMT, expr);
expr = add_stmt (expr);
else
fn = identifier;
}
+ else if (TREE_CODE (fn) == OVERLOAD && processing_template_decl)
+ /* FIXME: We shouldn't really need to mark the lookup here, as
+ resolving the (non-dependent) call should save the single
+ function we resolve to. Related to PR c++/83529. */
+ lookup_keep (fn);
}
if (fn && template_id && fn != error_mark_node)
{
tree result;
tree orig_fn;
- vec<tree, va_gc> *orig_args = NULL;
+ vec<tree, va_gc> *orig_args = *args;
if (fn == error_mark_node)
return error_mark_node;
|| any_type_dependent_arguments_p (*args))
{
result = build_min_nt_call_vec (orig_fn, *args);
- SET_EXPR_LOCATION (result, EXPR_LOC_OR_LOC (fn, input_location));
+ SET_EXPR_LOCATION (result, cp_expr_loc_or_loc (fn, input_location));
KOENIG_LOOKUP_P (result) = koenig_p;
if (is_overloaded_fn (fn))
- {
- fn = get_fns (fn);
- lookup_keep (fn, true);
- }
+ fn = get_fns (fn);
if (cfun)
{
sizeof_arg, same_type_ignoring_top_level_qualifiers_p);
}
+ if ((complain & tf_warning)
+ && TREE_CODE (fn) == FUNCTION_DECL
+ && fndecl_built_in_p (fn, BUILT_IN_MEMSET)
+ && vec_safe_length (*args) == 3
+ && !any_type_dependent_arguments_p (*args))
+ {
+ tree arg0 = (*orig_args)[0];
+ tree arg1 = (*orig_args)[1];
+ tree arg2 = (*orig_args)[2];
+ int literal_mask = ((literal_integer_zerop (arg1) << 1)
+ | (literal_integer_zerop (arg2) << 2));
+ arg2 = instantiate_non_dependent_expr (arg2);
+ warn_for_memset (input_location, arg0, arg2, literal_mask);
+ }
+
/* A call to a namespace-scope function. */
result = build_new_function_call (fn, args, complain);
}
result = convert_from_reference (result);
}
- /* Free or retain OVERLOADs from lookup. */
- if (is_overloaded_fn (orig_fn))
- lookup_keep (get_fns (orig_fn), processing_template_decl);
-
return result;
}
/* TODO: build_x_unary_op doesn't always honor the location. */
result.set_location (combined_loc);
- tree result_ovl, expr_ovl;
+ if (result == error_mark_node)
+ return result;
if (!(complain & tf_warning))
return result;
- result_ovl = result;
- expr_ovl = expr;
+ tree result_ovl = result;
+ tree expr_ovl = expr;
if (!processing_template_decl)
expr_ovl = cp_fully_fold (expr_ovl);
if (type == error_mark_node)
return error_mark_node;
- if (TREE_CODE (type) == REFERENCE_TYPE)
+ if (TYPE_REF_P (type))
{
compound_literal
= finish_compound_literal (TREE_TYPE (type), compound_literal,
complain, fcl_context);
- return cp_build_c_cast (type, compound_literal, complain);
+ /* The prvalue is then used to direct-initialize the reference. */
+ tree r = (perform_implicit_conversion_flags
+ (type, compound_literal, complain, LOOKUP_NORMAL));
+ return convert_from_reference (r);
}
if (!TYPE_OBJ_P (type))
if (tree anode = type_uses_auto (type))
if (CLASS_PLACEHOLDER_TEMPLATE (anode))
- type = do_auto_deduction (type, compound_literal, anode, complain,
- adc_variable_type);
+ {
+ type = do_auto_deduction (type, compound_literal, anode, complain,
+ adc_variable_type);
+ if (type == error_mark_node)
+ return error_mark_node;
+ }
if (processing_template_decl)
{
if (TREE_CODE (decl) != CONST_DECL)
DECL_CONTEXT (decl) = current_class_type;
- /* Check for bare parameter packs in the member variable declaration. */
+ if (TREE_CODE (decl) == USING_DECL)
+ /* For now, ignore class-scope USING_DECLS, so that debugging
+ backends do not see them. */
+ DECL_IGNORED_P (decl) = 1;
+
+ /* Check for bare parameter packs in the non-static data member
+ declaration. */
if (TREE_CODE (decl) == FIELD_DECL)
{
if (check_for_bare_parameter_packs (TREE_TYPE (decl)))
A C language linkage is ignored for the names of class members
and the member function type of class member functions. */
- if (DECL_LANG_SPECIFIC (decl) && DECL_LANGUAGE (decl) == lang_c)
+ if (DECL_LANG_SPECIFIC (decl))
SET_DECL_LANGUAGE (decl, lang_cplusplus);
- /* Put functions on the TYPE_METHODS list and everything else on the
- TYPE_FIELDS list. Note that these are built up in reverse order.
- We reverse them (to obtain declaration order) in finish_struct. */
- if (DECL_DECLARES_FUNCTION_P (decl))
- {
- /* We also need to add this function to the
- CLASSTYPE_METHOD_VEC. */
- if (add_method (current_class_type, decl, false))
- {
- gcc_assert (TYPE_MAIN_VARIANT (current_class_type) == current_class_type);
- DECL_CHAIN (decl) = TYPE_METHODS (current_class_type);
- TYPE_METHODS (current_class_type) = decl;
+ bool add = false;
- maybe_add_class_template_decl_list (current_class_type, decl,
- /*friend_p=*/0);
- }
- }
+ /* Functions and non-functions are added differently. */
+ if (DECL_DECLARES_FUNCTION_P (decl))
+ add = add_method (current_class_type, decl, false);
/* Enter the DECL into the scope of the class, if the class
isn't a closure (whose fields are supposed to be unnamed). */
else if (CLASSTYPE_LAMBDA_EXPR (current_class_type)
|| pushdecl_class_level (decl))
- {
- if (TREE_CODE (decl) == USING_DECL)
- {
- /* For now, ignore class-scope USING_DECLS, so that
- debugging backends do not see them. */
- DECL_IGNORED_P (decl) = 1;
- }
+ add = true;
+ if (add)
+ {
/* All TYPE_DECLs go at the end of TYPE_FIELDS. Ordinary fields
- go at the beginning. The reason is that lookup_field_1
- searches the list in order, and we want a field name to
- override a type name so that the "struct stat hack" will
- work. In particular:
-
- struct S { enum E { }; int E } s;
- s.E = 3;
-
- is valid. In addition, the FIELD_DECLs must be maintained in
- declaration order so that class layout works as expected.
- However, we don't need that order until class layout, so we
- save a little time by putting FIELD_DECLs on in reverse order
- here, and then reversing them in finish_struct_1. (We could
- also keep a pointer to the correct insertion points in the
- list.) */
+ go at the beginning. The reason is that
+ legacy_nonfn_member_lookup searches the list in order, and we
+ want a field name to override a type name so that the "struct
+ stat hack" will work. In particular:
+
+ struct S { enum E { }; static const int E = 5; int ary[S::E]; } s;
+
+ is valid. */
if (TREE_CODE (decl) == TYPE_DECL)
TYPE_FIELDS (current_class_type)
{
return ((VAR_P (decl) || TREE_CODE (decl) == PARM_DECL)
&& DECL_FUNCTION_SCOPE_P (decl)
+ /* Don't get confused by temporaries. */
+ && DECL_NAME (decl)
&& (DECL_CONTEXT (decl) != current_function_decl
|| parsing_nsdmi ()));
}
}
/* DECL satisfies outer_automatic_var_p. Possibly complain about it or
- rewrite it for lambda capture. */
+ rewrite it for lambda capture.
+
+ If ODR_USE is true, we're being called from mark_use, and we complain about
+ use of constant variables. If ODR_USE is false, we're being called for the
+ id-expression, and we do lambda capture. */
tree
-process_outer_var_ref (tree decl, tsubst_flags_t complain)
+process_outer_var_ref (tree decl, tsubst_flags_t complain, bool odr_use)
{
if (cp_unevaluated_operand)
/* It's not a use (3.2) if we're in an unevaluated context. */
if (!mark_used (decl, complain))
return error_mark_node;
- bool saw_generic_lambda = false;
if (parsing_nsdmi ())
containing_function = NULL_TREE;
- else
- /* If we are in a lambda function, we can move out until we hit
- 1. the context,
- 2. a non-lambda function, or
- 3. a non-default capturing lambda function. */
- while (context != containing_function
- /* containing_function can be null with invalid generic lambdas. */
- && containing_function
- && LAMBDA_FUNCTION_P (containing_function))
- {
- tree closure = DECL_CONTEXT (containing_function);
- lambda_expr = CLASSTYPE_LAMBDA_EXPR (closure);
- if (generic_lambda_fn_p (containing_function))
- saw_generic_lambda = true;
+ if (containing_function && LAMBDA_FUNCTION_P (containing_function))
+ {
+ /* Check whether we've already built a proxy. */
+ tree var = decl;
+ while (is_normal_capture_proxy (var))
+ var = DECL_CAPTURED_VARIABLE (var);
+ tree d = retrieve_local_specialization (var);
- if (TYPE_CLASS_SCOPE_P (closure))
- /* A lambda in an NSDMI (c++/64496). */
- break;
+ if (d && d != decl && is_capture_proxy (d))
+ {
+ if (DECL_CONTEXT (d) == containing_function)
+ /* We already have an inner proxy. */
+ return d;
+ else
+ /* We need to capture an outer proxy. */
+ return process_outer_var_ref (d, complain, odr_use);
+ }
+ }
- if (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda_expr)
- == CPLD_NONE)
- break;
+ /* If we are in a lambda function, we can move out until we hit
+ 1. the context,
+ 2. a non-lambda function, or
+ 3. a non-default capturing lambda function. */
+ while (context != containing_function
+ /* containing_function can be null with invalid generic lambdas. */
+ && containing_function
+ && LAMBDA_FUNCTION_P (containing_function))
+ {
+ tree closure = DECL_CONTEXT (containing_function);
+ lambda_expr = CLASSTYPE_LAMBDA_EXPR (closure);
- lambda_stack = tree_cons (NULL_TREE,
- lambda_expr,
- lambda_stack);
+ if (TYPE_CLASS_SCOPE_P (closure))
+ /* A lambda in an NSDMI (c++/64496). */
+ break;
- containing_function
- = decl_function_context (containing_function);
- }
+ if (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda_expr)
+ == CPLD_NONE)
+ break;
- /* Core issue 696: "[At the July 2009 meeting] the CWG expressed
- support for an approach in which a reference to a local
- [constant] automatic variable in a nested class or lambda body
- would enter the expression as an rvalue, which would reduce
- the complexity of the problem"
-
- FIXME update for final resolution of core issue 696. */
- if (decl_maybe_constant_var_p (decl))
- {
- if (processing_template_decl && !saw_generic_lambda)
- /* In a non-generic lambda within a template, wait until instantiation
- time to decide whether to capture. For a generic lambda, we can't
- wait until we instantiate the op() because the closure class is
- already defined at that point. FIXME to get the semantics exactly
- right we need to partially-instantiate the lambda body so the only
- dependencies left are on the generic parameters themselves. This
- probably means moving away from our current model of lambdas in
- templates (instantiating the closure type) to one based on creating
- the closure type when instantiating the lambda context. That is
- probably also the way to handle lambdas within pack expansions. */
- return decl;
- else if (decl_constant_var_p (decl))
- {
- tree t = maybe_constant_value (convert_from_reference (decl));
- if (TREE_CONSTANT (t))
- return t;
- }
+ lambda_stack = tree_cons (NULL_TREE,
+ lambda_expr,
+ lambda_stack);
+
+ containing_function
+ = decl_function_context (containing_function);
}
+ /* In a lambda within a template, wait until instantiation
+ time to implicitly capture. */
+ if (context == containing_function
+ && DECL_TEMPLATE_INFO (containing_function)
+ && uses_template_parms (DECL_TI_ARGS (containing_function)))
+ return decl;
+
if (lambda_expr && VAR_P (decl)
&& DECL_ANON_UNION_VAR_P (decl))
{
error ("cannot capture member %qD of anonymous union", decl);
return error_mark_node;
}
- if (context == containing_function)
+ /* Do lambda capture when processing the id-expression, not when
+ odr-using a variable. */
+ if (!odr_use && context == containing_function)
{
decl = add_default_capture (lambda_stack,
/*id=*/DECL_NAME (decl),
initializer);
}
+ /* Only an odr-use of an outer automatic variable causes an
+ error, and a constant variable can decay to a prvalue
+ constant without odr-use. So don't complain yet. */
+ else if (!odr_use && decl_constant_var_p (decl))
+ return decl;
else if (lambda_expr)
{
if (complain & tf_error)
inform (location_of (closure),
"the lambda has no capture-default");
else if (TYPE_CLASS_SCOPE_P (closure))
- inform (0, "lambda in local class %q+T cannot "
+ inform (UNKNOWN_LOCATION, "lambda in local class %q+T cannot "
"capture variables from the enclosing context",
TYPE_CONTEXT (closure));
inform (DECL_SOURCE_LOCATION (decl), "%q#D declared here", decl);
else
decl = id_expression;
}
- /* If DECL is a variable that would be out of scope under
- ANSI/ISO rules, but in scope in the ARM, name lookup
- will succeed. Issue a diagnostic here. */
- else
- decl = check_for_out_of_scope_variable (decl);
/* Remember that the name was used in the definition of
the current class so that we can check later to see if
*idk = CP_ID_KIND_NONE;
if (TREE_CODE (decl) == TEMPLATE_PARM_INDEX)
decl = TEMPLATE_PARM_DECL (decl);
- r = convert_from_reference (DECL_INITIAL (decl));
-
+ r = DECL_INITIAL (decl);
+ if (CLASS_TYPE_P (TREE_TYPE (r)) && !CP_TYPE_CONST_P (TREE_TYPE (r)))
+ {
+ /* If the entity is a template parameter object for a template
+ parameter of type T, the type of the expression is const T. */
+ tree ctype = TREE_TYPE (r);
+ ctype = cp_build_qualified_type (ctype, (cp_type_quals (ctype)
+ | TYPE_QUAL_CONST));
+ r = build1 (VIEW_CONVERT_EXPR, ctype, r);
+ }
+ r = convert_from_reference (r);
if (integral_constant_expression_p
&& !dependent_type_p (TREE_TYPE (decl))
&& !(INTEGRAL_OR_ENUMERATION_TYPE_P (TREE_TYPE (r))))
? CP_ID_KIND_UNQUALIFIED_DEPENDENT
: CP_ID_KIND_UNQUALIFIED)));
- /* If the name was dependent on a template parameter and we're not in a
- default capturing generic lambda within a template, we will resolve the
- name at instantiation time. FIXME: For lambdas, we should defer
- building the closure type until instantiation time then we won't need
- the extra test here. */
if (dependent_p
- && !parsing_default_capturing_generic_lambda_in_template ())
- {
- if (DECL_P (decl)
- && any_dependent_type_attributes_p (DECL_ATTRIBUTES (decl)))
- /* Dependent type attributes on the decl mean that the TREE_TYPE is
- wrong, so just return the identifier. */
- return id_expression;
-
- /* If we found a variable, then name lookup during the
- instantiation will always resolve to the same VAR_DECL
- (or an instantiation thereof). */
- if (VAR_P (decl)
- || TREE_CODE (decl) == CONST_DECL
- || TREE_CODE (decl) == PARM_DECL)
- {
- mark_used (decl);
- return convert_from_reference (decl);
- }
-
- /* Create a SCOPE_REF for qualified names, if the scope is
- dependent. */
- if (scope)
- {
- if (TYPE_P (scope))
- {
- if (address_p && done)
- decl = finish_qualified_id_expr (scope, decl,
- done, address_p,
- template_p,
- template_arg_p,
- tf_warning_or_error);
- else
- {
- tree type = NULL_TREE;
- if (DECL_P (decl) && !dependent_scope_p (scope))
- type = TREE_TYPE (decl);
- decl = build_qualified_name (type,
- scope,
- id_expression,
- template_p);
- }
- }
- if (TREE_TYPE (decl))
- decl = convert_from_reference (decl);
- return decl;
- }
- /* A TEMPLATE_ID already contains all the information we
- need. */
- if (TREE_CODE (id_expression) == TEMPLATE_ID_EXPR)
- return id_expression;
- /* The same is true for FIELD_DECL, but we also need to
- make sure that the syntax is correct. */
- else if (TREE_CODE (decl) == FIELD_DECL)
- {
- /* Since SCOPE is NULL here, this is an unqualified name.
- Access checking has been performed during name lookup
- already. Turn off checking to avoid duplicate errors. */
- push_deferring_access_checks (dk_no_check);
- decl = finish_non_static_data_member
- (decl, NULL_TREE,
- /*qualifying_scope=*/NULL_TREE);
- pop_deferring_access_checks ();
- return decl;
- }
- return id_expression;
- }
+ && DECL_P (decl)
+ && any_dependent_type_attributes_p (DECL_ATTRIBUTES (decl)))
+ /* Dependent type attributes on the decl mean that the TREE_TYPE is
+ wrong, so just return the identifier. */
+ return id_expression;
if (TREE_CODE (decl) == NAMESPACE_DECL)
{
expression. Template parameters have already
been handled above. */
if (! error_operand_p (decl)
+ && !dependent_p
&& integral_constant_expression_p
&& ! decl_constant_var_p (decl)
&& TREE_CODE (decl) != CONST_DECL
decl = build_cxx_call (wrap, 0, NULL, tf_warning_or_error);
}
else if (TREE_CODE (decl) == TEMPLATE_ID_EXPR
+ && !dependent_p
&& variable_template_p (TREE_OPERAND (decl, 0)))
{
decl = finish_template_variable (decl);
}
else if (scope)
{
+ if (TREE_CODE (decl) == SCOPE_REF)
+ {
+ gcc_assert (same_type_p (scope, TREE_OPERAND (decl, 0)));
+ decl = TREE_OPERAND (decl, 1);
+ }
+
decl = (adjust_result_of_qualified_name_lookup
(decl, scope, current_nonlambda_class_type()));
}
/* Implement the __direct_bases keyword: Return the direct base classes
- of type */
+ of type. */
tree
-calculate_direct_bases (tree type)
+calculate_direct_bases (tree type, tsubst_flags_t complain)
{
- vec<tree, va_gc> *vector = make_tree_vector();
- tree bases_vec = NULL_TREE;
- vec<tree, va_gc> *base_binfos;
- tree binfo;
- unsigned i;
-
- complete_type (type);
-
- if (!NON_UNION_CLASS_TYPE_P (type))
+ if (!complete_type_or_maybe_complain (type, NULL_TREE, complain)
+ || !NON_UNION_CLASS_TYPE_P (type))
return make_tree_vec (0);
- base_binfos = BINFO_BASE_BINFOS (TYPE_BINFO (type));
+ vec<tree, va_gc> *vector = make_tree_vector ();
+ vec<tree, va_gc> *base_binfos = BINFO_BASE_BINFOS (TYPE_BINFO (type));
+ tree binfo;
+ unsigned i;
/* Virtual bases are initialized first */
for (i = 0; base_binfos->iterate (i, &binfo); i++)
- {
- if (BINFO_VIRTUAL_P (binfo))
- {
- vec_safe_push (vector, binfo);
- }
- }
+ if (BINFO_VIRTUAL_P (binfo))
+ vec_safe_push (vector, binfo);
/* Now non-virtuals */
for (i = 0; base_binfos->iterate (i, &binfo); i++)
- {
- if (!BINFO_VIRTUAL_P (binfo))
- {
- vec_safe_push (vector, binfo);
- }
- }
-
+ if (!BINFO_VIRTUAL_P (binfo))
+ vec_safe_push (vector, binfo);
- bases_vec = make_tree_vec (vector->length ());
+ tree bases_vec = make_tree_vec (vector->length ());
for (i = 0; i < vector->length (); ++i)
- {
- TREE_VEC_ELT (bases_vec, i) = BINFO_TYPE ((*vector)[i]);
- }
+ TREE_VEC_ELT (bases_vec, i) = BINFO_TYPE ((*vector)[i]);
+
+ release_tree_vector (vector);
return bases_vec;
}
{
vec<tree, va_gc> **data = ((vec<tree, va_gc> **) data_);
if (!BINFO_VIRTUAL_P (binfo))
- {
- vec_safe_push (*data, BINFO_TYPE (binfo));
- }
+ vec_safe_push (*data, BINFO_TYPE (binfo));
return NULL_TREE;
}
static vec<tree, va_gc> *
calculate_bases_helper (tree type)
{
- vec<tree, va_gc> *vector = make_tree_vector();
+ vec<tree, va_gc> *vector = make_tree_vector ();
/* Now add non-virtual base classes in order of construction */
if (TYPE_BINFO (type))
}
tree
-calculate_bases (tree type)
+calculate_bases (tree type, tsubst_flags_t complain)
{
- vec<tree, va_gc> *vector = make_tree_vector();
+ if (!complete_type_or_maybe_complain (type, NULL_TREE, complain)
+ || !NON_UNION_CLASS_TYPE_P (type))
+ return make_tree_vec (0);
+
+ vec<tree, va_gc> *vector = make_tree_vector ();
tree bases_vec = NULL_TREE;
unsigned i;
vec<tree, va_gc> *vbases;
vec<tree, va_gc> *nonvbases;
tree binfo;
- complete_type (type);
-
- if (!NON_UNION_CLASS_TYPE_P (type))
- return make_tree_vec (0);
-
/* First go through virtual base classes */
for (vbases = CLASSTYPE_VBASECLASSES (type), i = 0;
vec_safe_iterate (vbases, i, &binfo); i++)
{
- vec<tree, va_gc> *vbase_bases;
- vbase_bases = calculate_bases_helper (BINFO_TYPE (binfo));
+ vec<tree, va_gc> *vbase_bases
+ = calculate_bases_helper (BINFO_TYPE (binfo));
vec_safe_splice (vector, vbase_bases);
release_tree_vector (vbase_bases);
}
if (vector->length () > 1)
{
/* Last element is entire class, so don't copy */
- bases_vec = make_tree_vec (vector->length() - 1);
+ bases_vec = make_tree_vec (vector->length () - 1);
for (i = 0; i < vector->length () - 1; ++i)
TREE_VEC_ELT (bases_vec, i) = (*vector)[i];
|| TREE_CODE (TREE_TYPE (expr)) == METHOD_TYPE
|| TREE_TYPE (expr) == unknown_type_node)
{
- if (INDIRECT_REF_P (expr))
- error ("second operand of %<offsetof%> is neither a single "
- "identifier nor a sequence of member accesses and "
- "array references");
- else
+ while (TREE_CODE (expr) == COMPONENT_REF
+ || TREE_CODE (expr) == COMPOUND_EXPR)
+ expr = TREE_OPERAND (expr, 1);
+
+ if (DECL_P (expr))
{
- if (TREE_CODE (expr) == COMPONENT_REF
- || TREE_CODE (expr) == COMPOUND_EXPR)
- expr = TREE_OPERAND (expr, 1);
error ("cannot apply %<offsetof%> to member function %qD", expr);
+ inform (DECL_SOURCE_LOCATION (expr), "declared here");
}
+ else
+ error ("cannot apply %<offsetof%> to member function");
+ return error_mark_node;
+ }
+ if (TREE_CODE (expr) == CONST_DECL)
+ {
+ error ("cannot apply %<offsetof%> to an enumerator %qD", expr);
return error_mark_node;
}
if (REFERENCE_REF_P (expr))
&& CLASS_TYPE_P (TREE_TYPE (TREE_TYPE (object_ptr)))
&& CLASSTYPE_NON_STD_LAYOUT (TREE_TYPE (TREE_TYPE (object_ptr)))
&& cp_unevaluated_operand == 0)
- pedwarn (loc, OPT_Winvalid_offsetof,
- "offsetof within non-standard-layout type %qT is undefined",
- TREE_TYPE (TREE_TYPE (object_ptr)));
+ warning_at (loc, OPT_Winvalid_offsetof, "offsetof within "
+ "non-standard-layout type %qT is conditionally-supported",
+ TREE_TYPE (TREE_TYPE (object_ptr)));
return fold_offsetof (expr);
}
= CALL_EXPR_OPERATOR_SYNTAX (aggr_init_expr);
CALL_EXPR_ORDERED_ARGS (call_expr) = CALL_EXPR_ORDERED_ARGS (aggr_init_expr);
CALL_EXPR_REVERSE_ARGS (call_expr) = CALL_EXPR_REVERSE_ARGS (aggr_init_expr);
- /* Preserve CILK_SPAWN flag. */
- EXPR_CILK_SPAWN (call_expr) = EXPR_CILK_SPAWN (aggr_init_expr);
if (style == ctor)
{
&& DECL_OMP_PRIVATIZED_MEMBER (decl))
{
tree f = DECL_VALUE_EXPR (decl);
- if (TREE_CODE (f) == INDIRECT_REF)
+ if (INDIRECT_REF_P (f))
f = TREE_OPERAND (f, 0);
if (TREE_CODE (f) == COMPONENT_REF)
{
return error_mark_node;
if (!omp_private_member_map && !shared)
omp_private_member_map = new hash_map<tree, tree>;
- if (TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE)
+ if (TYPE_REF_P (TREE_TYPE (t)))
{
- gcc_assert (TREE_CODE (m) == INDIRECT_REF);
+ gcc_assert (INDIRECT_REF_P (m));
m = TREE_OPERAND (m, 0);
}
tree vb = NULL_TREE;
omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
return error_mark_node;
}
- else if (TREE_CODE (t) == PARM_DECL
+ else if (ort == C_ORT_OMP
+ && TREE_CODE (t) == PARM_DECL
&& DECL_ARTIFICIAL (t)
&& DECL_NAME (t) == this_identifier)
{
}
}
}
- else if (TREE_CODE (type) == POINTER_TYPE)
+ else if (TYPE_PTR_P (type))
{
if (length == NULL_TREE)
{
eltype = TREE_TYPE (eltype);
tree type = build_array_type (eltype, index_type);
tree ptype = build_pointer_type (eltype);
- if (TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE
- && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (t))))
+ if (TYPE_REF_P (TREE_TYPE (t))
+ && INDIRECT_TYPE_P (TREE_TYPE (TREE_TYPE (t))))
t = convert_from_reference (t);
else if (TREE_CODE (TREE_TYPE (t)) == ARRAY_TYPE)
t = build_fold_addr_expr (t);
ptrdiff_type_node, t);
tree ptr = OMP_CLAUSE_DECL (c2);
ptr = convert_from_reference (ptr);
- if (!POINTER_TYPE_P (TREE_TYPE (ptr)))
+ if (!INDIRECT_TYPE_P (TREE_TYPE (ptr)))
ptr = build_fold_addr_expr (ptr);
t = fold_build2_loc (OMP_CLAUSE_LOCATION (c), MINUS_EXPR,
ptrdiff_type_node, t,
OMP_CLAUSE_CHAIN (c) = c2;
ptr = OMP_CLAUSE_DECL (c2);
if (OMP_CLAUSE_MAP_KIND (c2) != GOMP_MAP_FIRSTPRIVATE_POINTER
- && TREE_CODE (TREE_TYPE (ptr)) == REFERENCE_TYPE
- && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (ptr))))
+ && TYPE_REF_P (TREE_TYPE (ptr))
+ && INDIRECT_TYPE_P (TREE_TYPE (TREE_TYPE (ptr))))
{
tree c3 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
OMP_CLAUSE_MAP);
case BIT_IOR_EXPR:
case TRUTH_ANDIF_EXPR:
case TRUTH_ORIF_EXPR:
- reduction_id = cp_operator_id (reduction_code);
+ reduction_id = ovl_op_identifier (false, reduction_code);
break;
case MIN_EXPR:
p = "min";
cp_check_omp_declare_reduction (tree udr)
{
tree type = TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (udr)));
- gcc_assert (TREE_CODE (type) == REFERENCE_TYPE);
+ gcc_assert (TYPE_REF_P (type));
type = TREE_TYPE (type);
int i;
location_t loc = DECL_SOURCE_LOCATION (udr);
"%<#pragma omp declare reduction%>", type);
return;
}
- else if (TREE_CODE (type) == REFERENCE_TYPE)
+ else if (TYPE_REF_P (type))
{
error_at (loc, "reference type %qT in %<#pragma omp declare reduction%>",
type);
tree type = TREE_TYPE (t);
if (TREE_CODE (t) == MEM_REF)
type = TREE_TYPE (type);
- if (TREE_CODE (type) == REFERENCE_TYPE)
+ if (TYPE_REF_P (type))
type = TREE_TYPE (type);
if (TREE_CODE (type) == ARRAY_TYPE)
{
return false;
}
else if (processing_template_decl)
- return false;
+ {
+ if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == error_mark_node)
+ return true;
+ return false;
+ }
tree id = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
if (TREE_ADDRESSABLE (DECL_EXPR_DECL (stmts[0])))
cxx_mark_addressable (placeholder);
if (TREE_ADDRESSABLE (DECL_EXPR_DECL (stmts[1]))
- && TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c)))
- != REFERENCE_TYPE)
+ && !TYPE_REF_P (TREE_TYPE (OMP_CLAUSE_DECL (c))))
cxx_mark_addressable (decl_placeholder ? decl_placeholder
: OMP_CLAUSE_DECL (c));
tree omp_out = placeholder;
if (processing_template_decl)
return;
- for (tree x = TYPE_METHODS (t); x; x = DECL_CHAIN (x))
+ for (tree x = TYPE_FIELDS (t); x; x = DECL_CHAIN (x))
{
if (TREE_CODE (TREE_TYPE (x)) != METHOD_TYPE)
continue;
if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
&& integer_zerop (OMP_CLAUSE_DECL (c))
&& OMP_CLAUSE_LINEAR_STEP (c)
- && TREE_CODE (TREE_TYPE (OMP_CLAUSE_LINEAR_STEP (c)))
- == POINTER_TYPE)
+ && TYPE_PTR_P (TREE_TYPE (OMP_CLAUSE_LINEAR_STEP (c))))
{
tree s = OMP_CLAUSE_LINEAR_STEP (c);
s = fold_convert_loc (OMP_CLAUSE_LOCATION (c), sizetype, s);
for (; t; t = TREE_CHAIN (t))
{
tree decl = TREE_VALUE (t);
- if (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
+ if (TYPE_PTR_P (TREE_TYPE (decl)))
{
tree offset = TREE_PURPOSE (t);
- bool neg = wi::neg_p ((wide_int) offset);
+ bool neg = wi::neg_p (wi::to_wide (offset));
offset = fold_unary (ABS_EXPR, TREE_TYPE (offset), offset);
decl = mark_rvalue_use (decl);
decl = convert_from_reference (decl);
if (TREE_CODE (t) == POINTER_PLUS_EXPR)
t = TREE_OPERAND (t, 0);
if (TREE_CODE (t) == ADDR_EXPR
- || TREE_CODE (t) == INDIRECT_REF)
+ || INDIRECT_REF_P (t))
t = TREE_OPERAND (t, 0);
}
tree n = omp_clause_decl_field (t);
tree type = TREE_TYPE (t);
if ((OMP_CLAUSE_LINEAR_KIND (c) == OMP_CLAUSE_LINEAR_REF
|| OMP_CLAUSE_LINEAR_KIND (c) == OMP_CLAUSE_LINEAR_UVAL)
- && TREE_CODE (type) != REFERENCE_TYPE)
+ && !TYPE_REF_P (type))
{
error ("linear clause with %qs modifier applied to "
"non-reference variable with %qT type",
remove = true;
break;
}
- if (TREE_CODE (type) == REFERENCE_TYPE)
+ if (TYPE_REF_P (type))
type = TREE_TYPE (type);
- if (ort == C_ORT_CILK)
- {
- if (!INTEGRAL_TYPE_P (type)
- && !SCALAR_FLOAT_TYPE_P (type)
- && TREE_CODE (type) != POINTER_TYPE)
- {
- error ("linear clause applied to non-integral, "
- "non-floating, non-pointer variable with %qT type",
- TREE_TYPE (t));
- remove = true;
- break;
- }
- }
- else if (OMP_CLAUSE_LINEAR_KIND (c) != OMP_CLAUSE_LINEAR_REF)
+ if (OMP_CLAUSE_LINEAR_KIND (c) != OMP_CLAUSE_LINEAR_REF)
{
if (!INTEGRAL_TYPE_P (type)
- && TREE_CODE (type) != POINTER_TYPE)
+ && !TYPE_PTR_P (type))
{
error ("linear clause applied to non-integral non-pointer"
" variable with %qT type", TREE_TYPE (t));
&& !INTEGRAL_TYPE_P (TREE_TYPE (t))
&& (ort != C_ORT_OMP_DECLARE_SIMD
|| TREE_CODE (t) != PARM_DECL
- || TREE_CODE (TREE_TYPE (t)) != REFERENCE_TYPE
+ || !TYPE_REF_P (TREE_TYPE (t))
|| !INTEGRAL_TYPE_P (TREE_TYPE (TREE_TYPE (t)))))
{
error ("linear step expression must be integral");
}
t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
tree type = TREE_TYPE (OMP_CLAUSE_DECL (c));
- if (TREE_CODE (type) == REFERENCE_TYPE)
+ if (TYPE_REF_P (type))
type = TREE_TYPE (type);
if (OMP_CLAUSE_LINEAR_KIND (c) == OMP_CLAUSE_LINEAR_REF)
{
break;
}
}
- else if (TREE_CODE (type) == POINTER_TYPE
+ else if (TYPE_PTR_P (type)
/* Can't multiply the step yet if *this
is still incomplete type. */
&& (ort != C_ORT_OMP_DECLARE_SIMD
"positive");
t = integer_one_node;
}
+ t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
}
- t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
}
OMP_CLAUSE_OPERAND (c, 1) = t;
}
else if (t == error_mark_node)
remove = true;
else if (!type_dependent_expression_p (t)
- && (OMP_CLAUSE_SCHEDULE_KIND (c)
- != OMP_CLAUSE_SCHEDULE_CILKFOR)
&& !INTEGRAL_TYPE_P (TREE_TYPE (t)))
{
error ("schedule chunk size expression must be integral");
t = mark_rvalue_use (t);
if (!processing_template_decl)
{
- if (OMP_CLAUSE_SCHEDULE_KIND (c)
- == OMP_CLAUSE_SCHEDULE_CILKFOR)
- {
- t = convert_to_integer (long_integer_type_node, t);
- if (t == error_mark_node)
- {
- remove = true;
- break;
- }
- }
- else
- {
- t = maybe_constant_value (t);
- if (TREE_CODE (t) == INTEGER_CST
- && tree_int_cst_sgn (t) != 1)
- {
- warning_at (OMP_CLAUSE_LOCATION (c), 0,
- "chunk size value must be positive");
- t = integer_one_node;
- }
- }
+ t = maybe_constant_value (t);
+ if (TREE_CODE (t) == INTEGER_CST
+ && tree_int_cst_sgn (t) != 1)
+ {
+ warning_at (OMP_CLAUSE_LOCATION (c), 0,
+ "chunk size value must be positive");
+ t = integer_one_node;
+ }
t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
}
OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (c) = t;
remove = true;
}
else if (!type_dependent_expression_p (t)
- && TREE_CODE (TREE_TYPE (t)) != POINTER_TYPE
+ && !TYPE_PTR_P (TREE_TYPE (t))
&& TREE_CODE (TREE_TYPE (t)) != ARRAY_TYPE
- && (TREE_CODE (TREE_TYPE (t)) != REFERENCE_TYPE
- || (!POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (t)))
+ && (!TYPE_REF_P (TREE_TYPE (t))
+ || (!INDIRECT_TYPE_P (TREE_TYPE (TREE_TYPE (t)))
&& (TREE_CODE (TREE_TYPE (TREE_TYPE (t)))
!= ARRAY_TYPE))))
{
break;
}
else if (!processing_template_decl
- && TREE_CODE (TREE_TYPE (t)) != REFERENCE_TYPE
+ && !TYPE_REF_P (TREE_TYPE (t))
&& (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP
|| (OMP_CLAUSE_MAP_KIND (c)
!= GOMP_MAP_FIRSTPRIVATE_POINTER))
== GOMP_MAP_FIRSTPRIVATE_POINTER)))
&& t == OMP_CLAUSE_DECL (c)
&& !type_dependent_expression_p (t)
- && !cp_omp_mappable_type ((TREE_CODE (TREE_TYPE (t))
- == REFERENCE_TYPE)
+ && !cp_omp_mappable_type (TYPE_REF_P (TREE_TYPE (t))
? TREE_TYPE (TREE_TYPE (t))
: TREE_TYPE (t)))
{
else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
&& OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FORCE_DEVICEPTR
&& !type_dependent_expression_p (t)
- && !POINTER_TYPE_P (TREE_TYPE (t)))
+ && !INDIRECT_TYPE_P (TREE_TYPE (t)))
{
error ("%qD is not a pointer variable", t);
remove = true;
handle_map_references:
if (!remove
&& !processing_template_decl
- && (ort & C_ORT_OMP_DECLARE_SIMD) == C_ORT_OMP
- && TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c))) == REFERENCE_TYPE)
+ && ort != C_ORT_DECLARE_SIMD
+ && TYPE_REF_P (TREE_TYPE (OMP_CLAUSE_DECL (c))))
{
t = OMP_CLAUSE_DECL (c);
if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP)
if (!type_dependent_expression_p (t))
{
tree type = TREE_TYPE (t);
- if (TREE_CODE (type) != POINTER_TYPE
+ if (!TYPE_PTR_P (type)
&& TREE_CODE (type) != ARRAY_TYPE
- && (TREE_CODE (type) != REFERENCE_TYPE
- || (TREE_CODE (TREE_TYPE (type)) != POINTER_TYPE
+ && (!TYPE_REF_P (type)
+ || (!TYPE_PTR_P (TREE_TYPE (type))
&& TREE_CODE (TREE_TYPE (type)) != ARRAY_TYPE)))
{
error_at (OMP_CLAUSE_LOCATION (c),
case OMP_CLAUSE_THREADS:
case OMP_CLAUSE_SIMD:
case OMP_CLAUSE_DEFAULTMAP:
- case OMP_CLAUSE__CILK_FOR_COUNT_:
case OMP_CLAUSE_AUTO:
case OMP_CLAUSE_INDEPENDENT:
case OMP_CLAUSE_SEQ:
+ case OMP_CLAUSE_IF_PRESENT:
+ case OMP_CLAUSE_FINALIZE:
break;
case OMP_CLAUSE_TILE:
"integral constant");
remove = true;
}
+ t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
}
- t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
}
/* Update list item. */
t = require_complete_type (t);
if (t == error_mark_node)
remove = true;
- else if (TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE
+ else if (TYPE_REF_P (TREE_TYPE (t))
&& !complete_type_or_else (TREE_TYPE (TREE_TYPE (t)), t))
remove = true;
}
if (VAR_P (t) && CP_DECL_THREAD_LOCAL_P (t))
share_name = "threadprivate";
- else switch (cxx_omp_predetermined_sharing (t))
+ else switch (cxx_omp_predetermined_sharing_1 (t))
{
case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
break;
if ((need_complete_type
|| need_copy_assignment
|| OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
- && TREE_CODE (inner_type) == REFERENCE_TYPE)
+ && TYPE_REF_P (inner_type))
inner_type = TREE_TYPE (inner_type);
while (TREE_CODE (inner_type) == ARRAY_TYPE)
inner_type = TREE_TYPE (inner_type);
{
/* Allocate a LANG_SPECIFIC structure for V, if needed. */
if (DECL_LANG_SPECIFIC (v) == NULL)
- {
- retrofit_lang_decl (v);
-
- /* Make sure that DECL_DISCRIMINATOR_P continues to be true
- after the allocation of the lang_decl structure. */
- if (DECL_DISCRIMINATOR_P (v))
- DECL_LANG_SPECIFIC (v)->u.base.u2sel = 1;
- }
+ retrofit_lang_decl (v);
if (! CP_DECL_THREAD_LOCAL_P (v))
{
handle_omp_for_class_iterator (int i, location_t locus, enum tree_code code,
tree declv, tree orig_declv, tree initv,
tree condv, tree incrv, tree *body,
- tree *pre_body, tree &clauses, tree *lastp,
+ tree *pre_body, tree &clauses,
int collapse, int ordered)
{
tree diff, iter_init, iter_incr = NULL, last;
TREE_VEC_ELT (initv, i) = init;
TREE_VEC_ELT (condv, i) = cond;
TREE_VEC_ELT (incrv, i) = incr;
- *lastp = last;
+ TREE_VEC_ELT (orig_declv, i)
+ = tree_cons (TREE_VEC_ELT (orig_declv, i), last, NULL_TREE);
return false;
}
tree body, tree pre_body, vec<tree> *orig_inits, tree clauses)
{
tree omp_for = NULL, orig_incr = NULL;
- tree decl = NULL, init, cond, incr, orig_decl = NULL_TREE, block = NULL_TREE;
- tree last = NULL_TREE;
+ tree decl = NULL, init, cond, incr;
location_t elocus;
int i;
int collapse = 1;
"iteration variable %qE", decl);
return NULL;
}
- if (code == CILK_FOR && i == 0)
- orig_decl = decl;
if (handle_omp_for_class_iterator (i, locus, code, declv, orig_declv,
initv, condv, incrv, &body,
- &pre_body, clauses, &last,
+ &pre_body, clauses,
collapse, ordered))
return NULL;
continue;
if (IS_EMPTY_STMT (pre_body))
pre_body = NULL;
- if (code == CILK_FOR && !processing_template_decl)
- block = push_stmt_list ();
-
omp_for = c_finish_omp_for (locus, code, declv, orig_declv, initv, condv,
incrv, body, pre_body);
if (omp_for == NULL)
{
- if (block)
- pop_stmt_list (block);
return NULL;
}
case POSTINCREMENT_EXPR:
/* c_omp_for_incr_canonicalize_ptr() should have been
called to massage things appropriately. */
- gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
+ gcc_assert (!INDIRECT_TYPE_P (TREE_TYPE (decl)));
OMP_CLAUSE_LINEAR_STEP (c) = build_int_cst (TREE_TYPE (decl), 1);
break;
case PREDECREMENT_EXPR:
case POSTDECREMENT_EXPR:
/* c_omp_for_incr_canonicalize_ptr() should have been
called to massage things appropriately. */
- gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
+ gcc_assert (!INDIRECT_TYPE_P (TREE_TYPE (decl)));
OMP_CLAUSE_LINEAR_STEP (c)
= build_int_cst (TREE_TYPE (decl), -1);
break;
gcc_unreachable ();
}
stept = TREE_TYPE (decl);
- if (POINTER_TYPE_P (stept))
+ if (INDIRECT_TYPE_P (stept))
stept = sizetype;
step = fold_convert (stept, step);
if (TREE_CODE (incr) == MINUS_EXPR)
}
}
- if (block)
- {
- tree omp_par = make_node (OMP_PARALLEL);
- TREE_TYPE (omp_par) = void_type_node;
- OMP_PARALLEL_CLAUSES (omp_par) = NULL_TREE;
- tree bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
- TREE_SIDE_EFFECTS (bind) = 1;
- BIND_EXPR_BODY (bind) = pop_stmt_list (block);
- OMP_PARALLEL_BODY (omp_par) = bind;
- if (OMP_FOR_PRE_BODY (omp_for))
- {
- add_stmt (OMP_FOR_PRE_BODY (omp_for));
- OMP_FOR_PRE_BODY (omp_for) = NULL_TREE;
- }
- init = TREE_VEC_ELT (OMP_FOR_INIT (omp_for), 0);
- decl = TREE_OPERAND (init, 0);
- cond = TREE_VEC_ELT (OMP_FOR_COND (omp_for), 0);
- incr = TREE_VEC_ELT (OMP_FOR_INCR (omp_for), 0);
- tree t = TREE_OPERAND (cond, 1), c, clauses, *pc;
- clauses = OMP_FOR_CLAUSES (omp_for);
- OMP_FOR_CLAUSES (omp_for) = NULL_TREE;
- for (pc = &clauses; *pc; )
- if (OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_SCHEDULE)
- {
- gcc_assert (OMP_FOR_CLAUSES (omp_for) == NULL_TREE);
- OMP_FOR_CLAUSES (omp_for) = *pc;
- *pc = OMP_CLAUSE_CHAIN (*pc);
- OMP_CLAUSE_CHAIN (OMP_FOR_CLAUSES (omp_for)) = NULL_TREE;
- }
- else
- {
- gcc_assert (OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_FIRSTPRIVATE);
- pc = &OMP_CLAUSE_CHAIN (*pc);
- }
- if (TREE_CODE (t) != INTEGER_CST)
- {
- TREE_OPERAND (cond, 1) = get_temp_regvar (TREE_TYPE (t), t);
- c = build_omp_clause (input_location, OMP_CLAUSE_FIRSTPRIVATE);
- OMP_CLAUSE_DECL (c) = TREE_OPERAND (cond, 1);
- OMP_CLAUSE_CHAIN (c) = clauses;
- clauses = c;
- }
- if (TREE_CODE (incr) == MODIFY_EXPR)
- {
- t = TREE_OPERAND (TREE_OPERAND (incr, 1), 1);
- if (TREE_CODE (t) != INTEGER_CST)
- {
- TREE_OPERAND (TREE_OPERAND (incr, 1), 1)
- = get_temp_regvar (TREE_TYPE (t), t);
- c = build_omp_clause (input_location, OMP_CLAUSE_FIRSTPRIVATE);
- OMP_CLAUSE_DECL (c) = TREE_OPERAND (TREE_OPERAND (incr, 1), 1);
- OMP_CLAUSE_CHAIN (c) = clauses;
- clauses = c;
- }
- }
- t = TREE_OPERAND (init, 1);
- if (TREE_CODE (t) != INTEGER_CST)
- {
- TREE_OPERAND (init, 1) = get_temp_regvar (TREE_TYPE (t), t);
- c = build_omp_clause (input_location, OMP_CLAUSE_FIRSTPRIVATE);
- OMP_CLAUSE_DECL (c) = TREE_OPERAND (init, 1);
- OMP_CLAUSE_CHAIN (c) = clauses;
- clauses = c;
- }
- if (orig_decl && orig_decl != decl)
- {
- c = build_omp_clause (input_location, OMP_CLAUSE_FIRSTPRIVATE);
- OMP_CLAUSE_DECL (c) = orig_decl;
- OMP_CLAUSE_CHAIN (c) = clauses;
- clauses = c;
- }
- if (last)
- {
- c = build_omp_clause (input_location, OMP_CLAUSE_FIRSTPRIVATE);
- OMP_CLAUSE_DECL (c) = last;
- OMP_CLAUSE_CHAIN (c) = clauses;
- clauses = c;
- }
- c = build_omp_clause (input_location, OMP_CLAUSE_PRIVATE);
- OMP_CLAUSE_DECL (c) = decl;
- OMP_CLAUSE_CHAIN (c) = clauses;
- clauses = c;
- c = build_omp_clause (input_location, OMP_CLAUSE__CILK_FOR_COUNT_);
- OMP_CLAUSE_OPERAND (c, 0)
- = cilk_for_number_of_iterations (omp_for);
- OMP_CLAUSE_CHAIN (c) = clauses;
- OMP_PARALLEL_CLAUSES (omp_par) = finish_omp_clauses (c, C_ORT_CILK);
- add_stmt (omp_par);
- return omp_par;
- }
- else if (code == CILK_FOR && processing_template_decl)
- {
- tree c, clauses = OMP_FOR_CLAUSES (omp_for);
- if (orig_decl && orig_decl != decl)
- {
- c = build_omp_clause (input_location, OMP_CLAUSE_FIRSTPRIVATE);
- OMP_CLAUSE_DECL (c) = orig_decl;
- OMP_CLAUSE_CHAIN (c) = clauses;
- clauses = c;
- }
- if (last)
- {
- c = build_omp_clause (input_location, OMP_CLAUSE_FIRSTPRIVATE);
- OMP_CLAUSE_DECL (c) = last;
- OMP_CLAUSE_CHAIN (c) = clauses;
- clauses = c;
- }
- OMP_FOR_CLAUSES (omp_for) = clauses;
- }
return omp_for;
}
finish_static_assert (tree condition, tree message, location_t location,
bool member_p)
{
+ tsubst_flags_t complain = tf_warning_or_error;
+
if (message == NULL_TREE
|| message == error_mark_node
|| condition == NULL_TREE
if (check_for_bare_parameter_packs (condition))
condition = error_mark_node;
- if (type_dependent_expression_p (condition)
- || value_dependent_expression_p (condition))
+ if (instantiation_dependent_expression_p (condition))
{
/* We're in a template; build a STATIC_ASSERT and put it in
the right place. */
}
/* Fold the expression and convert it to a boolean value. */
- condition = instantiate_non_dependent_expr (condition);
- condition = cp_convert (boolean_type_node, condition, tf_warning_or_error);
- condition = maybe_constant_value (condition);
+ condition = perform_implicit_conversion_flags (boolean_type_node, condition,
+ complain, LOOKUP_NORMAL);
+ condition = fold_non_dependent_expr (condition, complain);
if (TREE_CODE (condition) == INTEGER_CST && !integer_zerop (condition))
/* Do nothing; the condition is satisfied. */
else if (condition && condition != error_mark_node)
{
error ("non-constant condition for static assertion");
- if (require_potential_rvalue_constant_expression (condition))
+ if (require_rvalue_constant_expression (condition))
cxx_constant_value (condition);
}
input_location = saved_loc;
if (identifier_p (expr))
expr = lookup_name (expr);
- if (INDIRECT_REF_P (expr))
+ if (INDIRECT_REF_P (expr)
+ || TREE_CODE (expr) == VIEW_CONVERT_EXPR)
/* This can happen when the expression is, e.g., "a.b". Just
look at the underlying operand. */
expr = TREE_OPERAND (expr, 0);
decltype(e) is defined as T&; if an xvalue, T&&; otherwise, T. */
cp_lvalue_kind clk = lvalue_kind (expr);
type = unlowered_expr_type (expr);
- gcc_assert (TREE_CODE (type) != REFERENCE_TYPE);
+ gcc_assert (!TYPE_REF_P (type));
/* For vector types, pick a non-opaque variant. */
if (VECTOR_TYPE_P (type))
}
/* Called from trait_expr_value to evaluate either __has_nothrow_assign or
- __has_nothrow_copy, depending on assign_p. */
+ __has_nothrow_copy, depending on assign_p. Returns true iff all
+ the copy {ctor,assign} fns are nothrow. */
static bool
classtype_has_nothrow_assign_or_copy_p (tree type, bool assign_p)
{
- tree fns;
+ tree fns = NULL_TREE;
- if (assign_p)
- {
- int ix;
- ix = lookup_fnfields_1 (type, cp_assignment_operator_id (NOP_EXPR));
- if (ix < 0)
- return false;
- fns = (*CLASSTYPE_METHOD_VEC (type))[ix];
- }
- else if (TYPE_HAS_COPY_CTOR (type))
- {
- /* If construction of the copy constructor was postponed, create
- it now. */
- if (CLASSTYPE_LAZY_COPY_CTOR (type))
- lazily_declare_fn (sfk_copy_constructor, type);
- if (CLASSTYPE_LAZY_MOVE_CTOR (type))
- lazily_declare_fn (sfk_move_constructor, type);
- fns = CLASSTYPE_CONSTRUCTORS (type);
- }
- else
- return false;
+ if (assign_p || TYPE_HAS_COPY_CTOR (type))
+ fns = get_class_binding (type, assign_p ? assign_op_identifier
+ : ctor_identifier);
+ bool saw_copy = false;
for (ovl_iterator iter (fns); iter; ++iter)
{
tree fn = *iter;
-
- if (assign_p)
+
+ if (copy_fn_p (fn) > 0)
{
- if (copy_fn_p (fn) == 0)
- continue;
+ saw_copy = true;
+ maybe_instantiate_noexcept (fn);
+ if (!TYPE_NOTHROW_P (TREE_TYPE (fn)))
+ return false;
}
- else if (copy_fn_p (fn) <= 0)
- continue;
-
- maybe_instantiate_noexcept (fn);
- if (!TYPE_NOTHROW_P (TREE_TYPE (fn)))
- return false;
}
- return true;
+ return saw_copy;
}
/* Actually evaluates the trait. */
if (return_type == error_mark_node)
return;
- if (LAMBDA_FUNCTION_P (fco))
- {
- tree lambda = CLASSTYPE_LAMBDA_EXPR (current_class_type);
- LAMBDA_EXPR_RETURN_TYPE (lambda) = return_type;
- }
-
if (DECL_CONV_FN_P (fco))
- DECL_NAME (fco) = mangle_conv_op_name_for_type (return_type);
+ DECL_NAME (fco) = make_conv_op_name (return_type);
TREE_TYPE (fco) = change_return_type (return_type, TREE_TYPE (fco));
case CPLD_COPY:
type = TREE_TYPE (decl);
- if (TREE_CODE (type) == REFERENCE_TYPE
+ if (TYPE_REF_P (type)
&& TREE_CODE (TREE_TYPE (type)) != FUNCTION_TYPE)
type = TREE_TYPE (type);
break;
case CPLD_REFERENCE:
type = TREE_TYPE (decl);
- if (TREE_CODE (type) != REFERENCE_TYPE)
+ if (!TYPE_REF_P (type))
type = build_reference_type (TREE_TYPE (decl));
break;
gcc_unreachable ();
}
- if (TREE_CODE (type) != REFERENCE_TYPE)
+ if (!TYPE_REF_P (type))
{
if (!LAMBDA_EXPR_MUTABLE_P (lam))
type = cp_build_qualified_type (type, (cp_type_quals (type)
if (error_operand_p (arg))
return error_mark_node;
if (!type_dependent_expression_p (arg)
- && TREE_CODE (TREE_TYPE (arg)) != POINTER_TYPE)
+ && !TYPE_PTR_P (TREE_TYPE (arg)))
{
error_at (loc, "non-pointer argument to %<__builtin_launder%>");
return error_mark_node;