building RTL. These routines are used both during actual parsing
and during the instantiation of template functions.
- Copyright (C) 1998-2019 Free Software Foundation, Inc.
+ Copyright (C) 1998-2020 Free Software Foundation, Inc.
Written by Mark Mitchell (mmitchell@usa.net) based on code found
formerly in parse.y and pt.c.
return NULL_TREE;
/* Wait until we instantiate templates before doing conversion. */
- if (processing_template_decl)
+ if (type_dependent_expression_p (cond))
return cond;
- if (warn_sequence_point)
+ if (warn_sequence_point && !processing_template_decl)
verify_sequence_points (cond);
/* Do the conversion. */
if (TREE_CODE (cond) == MODIFY_EXPR
&& !TREE_NO_WARNING (cond)
- && warn_parentheses)
- {
- warning_at (cp_expr_loc_or_loc (cond, input_location), OPT_Wparentheses,
- "suggest parentheses around assignment used as truth value");
- TREE_NO_WARNING (cond) = 1;
- }
+ && warn_parentheses
+ && warning_at (cp_expr_loc_or_input_loc (cond),
+ OPT_Wparentheses, "suggest parentheses around "
+ "assignment used as truth value"))
+ TREE_NO_WARNING (cond) = 1;
return condition_conversion (cond);
}
expr = convert_to_void (expr, ICV_STATEMENT, tf_warning_or_error);
}
else if (!type_dependent_expression_p (expr))
- convert_to_void (build_non_dependent_expr (expr), ICV_STATEMENT,
+ convert_to_void (build_non_dependent_expr (expr), ICV_STATEMENT,
tf_warning_or_error);
if (check_for_bare_parameter_packs (expr))
return r;
}
+/* Returns true if FN, a CALL_EXPR, is a call to
+ std::is_constant_evaluated or __builtin_is_constant_evaluated. */
+
+static bool
+is_std_constant_evaluated_p (tree fn)
+{
+ /* std::is_constant_evaluated takes no arguments. */
+ if (call_expr_nargs (fn) != 0)
+ return false;
+
+ tree fndecl = cp_get_callee_fndecl_nofold (fn);
+ if (fndecl_built_in_p (fndecl, CP_BUILT_IN_IS_CONSTANT_EVALUATED,
+ BUILT_IN_FRONTEND))
+ return true;
+
+ if (!decl_in_std_namespace_p (fndecl))
+ return false;
+
+ tree name = DECL_NAME (fndecl);
+ return name && id_equal (name, "is_constant_evaluated");
+}
+
/* Process the COND of an if-statement, which may be given by
IF_STMT. */
converted to bool. */
&& TYPE_MAIN_VARIANT (TREE_TYPE (cond)) == boolean_type_node)
{
+ /* if constexpr (std::is_constant_evaluated()) is always true,
+ so give the user a clue. */
+ if (warn_tautological_compare)
+ {
+ tree t = cond;
+ if (TREE_CODE (t) == CLEANUP_POINT_EXPR)
+ t = TREE_OPERAND (t, 0);
+ if (TREE_CODE (t) == CALL_EXPR
+ && is_std_constant_evaluated_p (t))
+ warning_at (EXPR_LOCATION (cond), OPT_Wtautological_compare,
+ "%qs always evaluates to true in %<if constexpr%>",
+ "std::is_constant_evaluated");
+ }
+
cond = instantiate_non_dependent_expr (cond);
cond = cxx_constant_value (cond, NULL_TREE);
}
ELSE_CLAUSE (if_stmt) = pop_stmt_list (ELSE_CLAUSE (if_stmt));
}
+/* Callback for cp_walk_tree to mark all {VAR,PARM}_DECLs in a tree as
+ read. */
+
+static tree
+maybe_mark_exp_read_r (tree *tp, int *, void *)
+{
+ tree t = *tp;
+ if (VAR_P (t) || TREE_CODE (t) == PARM_DECL)
+ mark_exp_read (t);
+ return NULL_TREE;
+}
+
/* Finish an if-statement. */
void
{
tree scope = IF_SCOPE (if_stmt);
IF_SCOPE (if_stmt) = NULL;
+ if (IF_STMT_CONSTEXPR_P (if_stmt))
+ {
+ /* Prevent various -Wunused warnings. We might not instantiate
+ either of these branches, so we would not mark the variables
+ used in that branch as read. */
+ cp_walk_tree_without_duplicates (&THEN_CLAUSE (if_stmt),
+ maybe_mark_exp_read_r, NULL);
+ cp_walk_tree_without_duplicates (&ELSE_CLAUSE (if_stmt),
+ maybe_mark_exp_read_r, NULL);
+ }
add_stmt (do_poplevel (scope));
}
{
if (warn_sequence_point)
verify_sequence_points (expr);
-
+
if (DECL_DESTRUCTOR_P (current_function_decl)
|| (DECL_CONSTRUCTOR_P (current_function_decl)
&& targetm.cxx.cdtor_returns_this ()))
if (!processing_template_decl)
{
/* Convert the condition to an integer or enumeration type. */
+ tree orig_cond = cond;
cond = build_expr_type_conversion (WANT_INT | WANT_ENUM, cond, true);
if (cond == NULL_TREE)
{
- error ("switch quantity not an integer");
+ error_at (cp_expr_loc_or_input_loc (orig_cond),
+ "switch quantity not an integer");
cond = error_mark_node;
}
/* We want unlowered type here to handle enum bit-fields. */
if (CLASS_TYPE_P (orig_type))
{
if (TYPE_POLYMORPHIC_P (orig_type))
- warning (OPT_Wcatch_value_,
- "catching polymorphic type %q#T by value", orig_type);
+ warning_at (DECL_SOURCE_LOCATION (decl),
+ OPT_Wcatch_value_,
+ "catching polymorphic type %q#T by value",
+ orig_type);
else if (warn_catch_value > 1)
- warning (OPT_Wcatch_value_,
- "catching type %q#T by value", orig_type);
+ warning_at (DECL_SOURCE_LOCATION (decl),
+ OPT_Wcatch_value_,
+ "catching type %q#T by value", orig_type);
}
else if (warn_catch_value > 2)
- warning (OPT_Wcatch_value_,
- "catching non-reference type %q#T", orig_type);
+ warning_at (DECL_SOURCE_LOCATION (decl),
+ OPT_Wcatch_value_,
+ "catching non-reference type %q#T", orig_type);
}
}
HANDLER_TYPE (handler) = type;
considered volatile, and whether it is asm inline. */
tree
-finish_asm_stmt (int volatile_p, tree string, tree output_operands,
- tree input_operands, tree clobbers, tree labels, bool inline_p)
+finish_asm_stmt (location_t loc, int volatile_p, tree string,
+ tree output_operands, tree input_operands, tree clobbers,
+ tree labels, bool inline_p)
{
tree r;
tree t;
|| CP_TYPE_CONST_P (TREE_TYPE (operand))
/* Functions are not modifiable, even though they are
lvalues. */
- || TREE_CODE (TREE_TYPE (operand)) == FUNCTION_TYPE
- || TREE_CODE (TREE_TYPE (operand)) == METHOD_TYPE
+ || FUNC_OR_METHOD_TYPE_P (TREE_TYPE (operand))
/* If it's an aggregate and any field is const, then it is
effectively const. */
|| (CLASS_TYPE_P (TREE_TYPE (operand))
&& C_TYPE_FIELDS_READONLY (TREE_TYPE (operand)))))
- cxx_readonly_error (input_location, operand, lv_asm);
+ cxx_readonly_error (loc, operand, lv_asm);
tree *op = &operand;
while (TREE_CODE (*op) == COMPOUND_EXPR)
{
constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
bool constraint_parsed
- = parse_input_constraint (&constraint, i, ninputs, noutputs, 0,
+ = parse_input_constraint (&constraint, i, ninputs, noutputs, 0,
oconstraints, &allows_mem, &allows_reg);
/* If the operand is going to end up in memory, don't call
decay_conversion. */
resolve the overloading. */
if (TREE_TYPE (operand) == unknown_type_node)
{
- error ("type of asm operand %qE could not be determined",
- TREE_VALUE (t));
+ error_at (loc,
+ "type of %<asm%> operand %qE could not be determined",
+ TREE_VALUE (t));
operand = error_mark_node;
}
}
}
- r = build_stmt (input_location, ASM_EXPR, string,
+ r = build_stmt (loc, ASM_EXPR, string,
output_operands, input_operands,
clobbers, labels);
ASM_VOLATILE_P (r) = volatile_p || noutputs == 0;
{
if (!at_function_scope_p ())
{
- error ("__label__ declarations are only allowed in function scopes");
+ error ("%<__label__%> declarations are only allowed in function scopes");
return;
}
/* Obfuscate EXPR if it looks like an id-expression or member access so
that the call to finish_decltype in do_auto_deduction will give the
- right result. */
+ right result. If EVEN_UNEVAL, do this even in unevaluated context. */
tree
-force_paren_expr (tree expr)
+force_paren_expr (tree expr, bool even_uneval)
{
/* This is only needed for decltype(auto) in C++14. */
if (cxx_dialect < cxx14)
/* If we're in unevaluated context, we can't be deducing a
return/initializer type, so we don't need to mess with this. */
- if (cp_unevaluated_operand)
+ if (cp_unevaluated_operand && !even_uneval)
return expr;
if (!DECL_P (tree_strip_any_location_wrapper (expr))
&& TREE_CODE (expr) != SCOPE_REF)
return expr;
+ location_t loc = cp_expr_location (expr);
+
if (TREE_CODE (expr) == COMPONENT_REF
|| TREE_CODE (expr) == SCOPE_REF)
REF_PARENTHESIZED_P (expr) = true;
else if (processing_template_decl)
- expr = build1 (PAREN_EXPR, TREE_TYPE (expr), expr);
+ expr = build1_loc (loc, PAREN_EXPR, TREE_TYPE (expr), expr);
else
{
- expr = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (expr), expr);
+ expr = build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (expr), expr);
REF_PARENTHESIZED_P (expr) = true;
}
{
tree scope = qualifying_scope;
if (scope == NULL_TREE)
- scope = context_for_name_lookup (decl);
+ {
+ scope = context_for_name_lookup (decl);
+ if (!TYPE_P (scope))
+ {
+ /* Can happen during error recovery (c++/85014). */
+ gcc_assert (seen_error ());
+ return error_mark_node;
+ }
+ }
object = maybe_dummy_object (scope, NULL);
}
if (current_class_ptr)
TREE_USED (current_class_ptr) = 1;
- if (processing_template_decl && !qualifying_scope)
+ if (processing_template_decl)
{
tree type = TREE_TYPE (decl);
type = cp_build_qualified_type (type, quals);
}
- ret = (convert_from_reference
- (build_min (COMPONENT_REF, type, object, decl, NULL_TREE)));
+ if (qualifying_scope)
+ /* Wrap this in a SCOPE_REF for now. */
+ ret = build_qualified_name (type, qualifying_scope, decl,
+ /*template_p=*/false);
+ else
+ ret = (convert_from_reference
+ (build_min (COMPONENT_REF, type, object, decl, NULL_TREE)));
}
/* If PROCESSING_TEMPLATE_DECL is nonzero here, then
- QUALIFYING_SCOPE is also non-null. Wrap this in a SCOPE_REF
- for now. */
- else if (processing_template_decl)
- ret = build_qualified_name (TREE_TYPE (decl),
- qualifying_scope,
- decl,
- /*template_p=*/false);
+ QUALIFYING_SCOPE is also non-null. */
else
{
tree access_type = TREE_TYPE (object);
its bases. */
qualifying_type = currently_open_derived_class (scope);
- if (qualifying_type
+ if (qualifying_type
/* It is possible for qualifying type to be a TEMPLATE_TYPE_PARM
or similar in a default argument value. */
&& CLASS_TYPE_P (qualifying_type)
expr = build_offset_ref (qualifying_class, expr, /*address_p=*/false,
complain);
}
+ else if (!template_p
+ && TREE_CODE (expr) == TEMPLATE_DECL
+ && !DECL_FUNCTION_TEMPLATE_P (expr))
+ {
+ if (complain & tf_error)
+ error ("%qE missing template arguments", expr);
+ return error_mark_node;
+ }
else
{
/* In a template, return a SCOPE_REF for most qualified-ids
expr = build_qualified_name (TREE_TYPE (expr),
qualifying_class, expr,
template_p);
+ else if (tree wrap = maybe_get_tls_wrapper_call (expr))
+ expr = wrap;
expr = convert_from_reference (expr);
}
|| any_type_dependent_arguments_p (*args))
{
result = build_min_nt_call_vec (orig_fn, *args);
- SET_EXPR_LOCATION (result, cp_expr_loc_or_loc (fn, input_location));
+ SET_EXPR_LOCATION (result, cp_expr_loc_or_input_loc (fn));
KOENIG_LOOKUP_P (result) = koenig_p;
if (is_overloaded_fn (fn))
fn = get_fns (fn);
/*fn_p=*/NULL,
complain);
}
+ else if (concept_check_p (fn))
+ {
+ /* FN is actually a template-id referring to a concept definition. */
+ tree id = unpack_concept_check (fn);
+ tree tmpl = TREE_OPERAND (id, 0);
+ tree args = TREE_OPERAND (id, 1);
+
+ if (!function_concept_p (tmpl))
+ {
+ error_at (EXPR_LOC_OR_LOC (fn, input_location),
+ "cannot call a concept as a function");
+ return error_mark_node;
+ }
+
+ /* Ensure the result is wrapped as a call expression. */
+ result = build_concept_check (tmpl, args, tf_warning_or_error);
+ }
else if (is_overloaded_fn (fn))
{
/* If the function is an overloaded builtin, resolve it. */
return error_mark_node;
}
- if (instantiation_dependent_expression_p (compound_literal)
- || dependent_type_p (type))
+ /* Used to hold a copy of the compound literal in a template. */
+ tree orig_cl = NULL_TREE;
+
+ if (processing_template_decl)
{
- TREE_TYPE (compound_literal) = type;
+ const bool dependent_p
+ = (instantiation_dependent_expression_p (compound_literal)
+ || dependent_type_p (type));
+ if (dependent_p)
+ /* We're about to return, no need to copy. */
+ orig_cl = compound_literal;
+ else
+ /* We're going to need a copy. */
+ orig_cl = unshare_constructor (compound_literal);
+ TREE_TYPE (orig_cl) = type;
/* Mark the expression as a compound literal. */
- TREE_HAS_CONSTRUCTOR (compound_literal) = 1;
+ TREE_HAS_CONSTRUCTOR (orig_cl) = 1;
/* And as instantiation-dependent. */
- CONSTRUCTOR_IS_DEPENDENT (compound_literal) = true;
+ CONSTRUCTOR_IS_DEPENDENT (orig_cl) = dependent_p;
if (fcl_context == fcl_c99)
- CONSTRUCTOR_C99_COMPOUND_LITERAL (compound_literal) = 1;
- return compound_literal;
+ CONSTRUCTOR_C99_COMPOUND_LITERAL (orig_cl) = 1;
+ /* If the compound literal is dependent, we're done for now. */
+ if (dependent_p)
+ return orig_cl;
+ /* Otherwise, do go on to e.g. check narrowing. */
}
type = complete_type (type);
that it came from T{} rather than T({}). */
CONSTRUCTOR_IS_DIRECT_INIT (compound_literal) = 1;
compound_literal = build_tree_list (NULL_TREE, compound_literal);
- return build_functional_cast (type, compound_literal, complain);
+ return build_functional_cast (input_location, type,
+ compound_literal, complain);
}
if (TREE_CODE (type) == ARRAY_TYPE
if (type == error_mark_node)
return error_mark_node;
}
- compound_literal = digest_init_flags (type, compound_literal, LOOKUP_NORMAL,
+ compound_literal = digest_init_flags (type, compound_literal,
+ LOOKUP_NORMAL | LOOKUP_NO_NARROWING,
complain);
+ if (compound_literal == error_mark_node)
+ return error_mark_node;
+
+ /* If we're in a template, return the original compound literal. */
+ if (orig_cl)
+ {
+ if (!VECTOR_TYPE_P (type))
+ return get_target_expr_sfinae (orig_cl, complain);
+ else
+ return orig_cl;
+ }
+
if (TREE_CODE (compound_literal) == CONSTRUCTOR)
{
TREE_HAS_CONSTRUCTOR (compound_literal) = true;
/* Do file scope __FUNCTION__ et al. */
finish_fname_decls ();
+
+ if (scope_chain->omp_declare_target_attribute)
+ {
+ if (!errorcount)
+ error ("%<#pragma omp declare target%> without corresponding "
+ "%<#pragma omp end declare target%>");
+ scope_chain->omp_declare_target_attribute = 0;
+ }
}
/* Finish a template type parameter, specified as AGGR IDENTIFIER.
DECL_TEMPLATE_RESULT (tmpl) = decl;
DECL_ARTIFICIAL (decl) = 1;
- // Associate the constraints with the underlying declaration,
- // not the template.
+ /* Associate the constraints with the underlying declaration,
+ not the template. */
tree reqs = TEMPLATE_PARMS_CONSTRAINTS (current_template_parms);
tree constr = build_constraints (reqs, NULL_TREE);
set_constraints (decl, constr);
gcc_assert (DECL_TEMPLATE_PARMS (tmpl));
- check_default_tmpl_args (decl, DECL_TEMPLATE_PARMS (tmpl),
+ check_default_tmpl_args (decl, DECL_TEMPLATE_PARMS (tmpl),
/*is_primary=*/true, /*is_partial=*/false,
/*is_friend=*/0);
if (error_operand_p (t) || error_operand_p (TYPE_MAIN_DECL (t)))
return error_mark_node;
+ if (processing_template_parmlist && !LAMBDA_TYPE_P (t))
+ {
+ error ("definition of %q#T inside template parameter list", t);
+ return error_mark_node;
+ }
+
/* According to the C++ ABI, decimal classes defined in ISO/IEC TR 24733
are passed the same as decimal scalar types. */
if (TREE_CODE (t) == RECORD_TYPE
tree scope;
tree cl;
- if (BASELINK_P (fns)
+ if (BASELINK_P (fns)
|| error_operand_p (fns))
return fns;
/* A lambda in an NSDMI (c++/64496). */
break;
- if (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda_expr)
- == CPLD_NONE)
+ if (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda_expr) == CPLD_NONE)
break;
- lambda_stack = tree_cons (NULL_TREE,
- lambda_expr,
- lambda_stack);
+ lambda_stack = tree_cons (NULL_TREE, lambda_expr, lambda_stack);
- containing_function
- = decl_function_context (containing_function);
+ containing_function = decl_function_context (containing_function);
}
- /* In a lambda within a template, wait until instantiation
- time to implicitly capture a dependent type. */
+ /* In a lambda within a template, wait until instantiation time to implicitly
+ capture a parameter pack. We want to wait because we don't know if we're
+ capturing the whole pack or a single element, and it's OK to wait because
+ find_parameter_packs_r walks into the lambda body. */
if (context == containing_function
- && dependent_type_p (TREE_TYPE (decl)))
+ && DECL_PACK_P (decl))
return decl;
- if (lambda_expr && VAR_P (decl)
- && DECL_ANON_UNION_VAR_P (decl))
+ if (lambda_expr && VAR_P (decl) && DECL_ANON_UNION_VAR_P (decl))
{
if (complain & tf_error)
error ("cannot capture member %qD of anonymous union", decl);
/* Do lambda capture when processing the id-expression, not when
odr-using a variable. */
if (!odr_use && context == containing_function)
- {
- decl = add_default_capture (lambda_stack,
- /*id=*/DECL_NAME (decl),
- initializer);
- }
+ decl = add_default_capture (lambda_stack,
+ /*id=*/DECL_NAME (decl), initializer);
/* Only an odr-use of an outer automatic variable causes an
error, and a constant variable can decay to a prvalue
constant without odr-use. So don't complain yet. */
{
error ("%qD is not captured", decl);
tree closure = LAMBDA_EXPR_CLOSURE (lambda_expr);
- if (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda_expr)
- == CPLD_NONE)
+ if (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda_expr) == CPLD_NONE)
inform (location_of (closure),
"the lambda has no capture-default");
else if (TYPE_CLASS_SCOPE_P (closure))
wrong, so just return the identifier. */
return id_expression;
- if (TREE_CODE (decl) == NAMESPACE_DECL)
- {
- error ("use of namespace %qD as expression", decl);
- return error_mark_node;
- }
- else if (DECL_CLASS_TEMPLATE_P (decl))
+ if (DECL_CLASS_TEMPLATE_P (decl))
{
error ("use of class template %qT as expression", decl);
return error_mark_node;
}
- else if (TREE_CODE (decl) == TREE_LIST)
+
+ if (TREE_CODE (decl) == TREE_LIST)
{
/* Ambiguous reference to base members. */
error ("request for member %qD is ambiguous in "
if (! error_operand_p (decl)
&& !dependent_p
&& integral_constant_expression_p
- && ! decl_constant_var_p (decl)
+ && !decl_constant_var_p (decl)
&& TREE_CODE (decl) != CONST_DECL
- && ! builtin_valid_in_constant_expr_p (decl))
+ && !builtin_valid_in_constant_expr_p (decl)
+ && !concept_check_p (decl))
{
if (!allow_non_integral_constant_expression_p)
{
*non_integral_constant_expression_p = true;
}
- tree wrap;
- if (VAR_P (decl)
- && !cp_unevaluated_operand
- && !processing_template_decl
- && (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
- && CP_DECL_THREAD_LOCAL_P (decl)
- && (wrap = get_tls_wrapper_fn (decl)))
- {
- /* Replace an evaluated use of the thread_local variable with
- a call to its wrapper. */
- decl = build_cxx_call (wrap, 0, NULL, tf_warning_or_error);
- }
+ if (tree wrap = maybe_get_tls_wrapper_call (decl))
+ /* Replace an evaluated use of the thread_local variable with
+ a call to its wrapper. */
+ decl = wrap;
else if (TREE_CODE (decl) == TEMPLATE_ID_EXPR
&& !dependent_p
- && variable_template_p (TREE_OPERAND (decl, 0)))
+ && variable_template_p (TREE_OPERAND (decl, 0))
+ && !concept_check_p (decl))
{
decl = finish_template_variable (decl);
mark_used (decl);
decl = convert_from_reference (decl);
}
+ else if (concept_check_p (decl))
+ {
+ /* Nothing more to do. All of the analysis for concept checks
+ is done by build_conept_id, called from the parser. */
+ }
else if (scope)
{
if (TREE_CODE (decl) == SCOPE_REF)
if (TREE_CODE (decl) == FUNCTION_DECL)
mark_used (decl);
+ cp_warn_deprecated_use_scopes (scope);
+
if (TYPE_P (scope))
decl = finish_qualified_id_expr (scope,
decl,
}
else if (is_overloaded_fn (decl))
{
+ /* We only need to look at the first function,
+ because all the fns share the attribute we're
+ concerned with (all member fns or all non-members). */
tree first_fn = get_first_fn (decl);
-
- if (TREE_CODE (first_fn) == TEMPLATE_DECL)
- first_fn = DECL_TEMPLATE_RESULT (first_fn);
+ first_fn = STRIP_TEMPLATE (first_fn);
/* [basic.def.odr]: "A function whose name appears as a
potentially-evaluated expression is odr-used if it is the unique
if (DECL_P (decl) && DECL_NONLOCAL (decl)
&& DECL_CLASS_SCOPE_P (decl))
{
- tree context = context_for_name_lookup (decl);
+ tree context = context_for_name_lookup (decl);
if (context != current_class_type)
{
tree path = currently_open_derived_class (context);
|| !NON_UNION_CLASS_TYPE_P (type))
return make_tree_vec (0);
- vec<tree, va_gc> *vector = make_tree_vector ();
+ releasing_vec vector;
vec<tree, va_gc> *base_binfos = BINFO_BASE_BINFOS (TYPE_BINFO (type));
tree binfo;
unsigned i;
for (i = 0; i < vector->length (); ++i)
TREE_VEC_ELT (bases_vec, i) = BINFO_TYPE ((*vector)[i]);
- release_tree_vector (vector);
return bases_vec;
}
|| !NON_UNION_CLASS_TYPE_P (type))
return make_tree_vec (0);
- vec<tree, va_gc> *vector = make_tree_vector ();
+ releasing_vec vector;
tree bases_vec = NULL_TREE;
unsigned i;
vec<tree, va_gc> *vbases;
- vec<tree, va_gc> *nonvbases;
tree binfo;
/* First go through virtual base classes */
for (vbases = CLASSTYPE_VBASECLASSES (type), i = 0;
vec_safe_iterate (vbases, i, &binfo); i++)
{
- vec<tree, va_gc> *vbase_bases
+ releasing_vec vbase_bases
= calculate_bases_helper (BINFO_TYPE (binfo));
vec_safe_splice (vector, vbase_bases);
- release_tree_vector (vbase_bases);
}
/* Now for the non-virtual bases */
- nonvbases = calculate_bases_helper (type);
+ releasing_vec nonvbases = calculate_bases_helper (type);
vec_safe_splice (vector, nonvbases);
- release_tree_vector (nonvbases);
/* Note that during error recovery vector->length can even be zero. */
if (vector->length () > 1)
else
bases_vec = make_tree_vec (0);
- release_tree_vector (vector);
return bases_vec;
}
if (!processing_template_decl)
{
/* Parameter packs can only be used in templates */
- error ("Parameter pack __bases only valid in template declaration");
+ error ("parameter pack %<__bases%> only valid in template declaration");
return error_mark_node;
}
return expr;
}
+ if (expr == error_mark_node)
+ return error_mark_node;
+
if (TREE_CODE (expr) == PSEUDO_DTOR_EXPR)
{
error ("cannot apply %<offsetof%> to destructor %<~%T%>",
TREE_OPERAND (expr, 2));
return error_mark_node;
}
- if (TREE_CODE (TREE_TYPE (expr)) == FUNCTION_TYPE
- || TREE_CODE (TREE_TYPE (expr)) == METHOD_TYPE
+ if (FUNC_OR_METHOD_TYPE_P (TREE_TYPE (expr))
|| TREE_TYPE (expr) == unknown_type_node)
{
while (TREE_CODE (expr) == COMPONENT_REF
&& CLASS_TYPE_P (TREE_TYPE (TREE_TYPE (object_ptr)))
&& CLASSTYPE_NON_STD_LAYOUT (TREE_TYPE (TREE_TYPE (object_ptr)))
&& cp_unevaluated_operand == 0)
- warning_at (loc, OPT_Winvalid_offsetof, "offsetof within "
+ warning_at (loc, OPT_Winvalid_offsetof, "%<offsetof%> within "
"non-standard-layout type %qT is conditionally-supported",
TREE_TYPE (TREE_TYPE (object_ptr)));
return fold_offsetof (expr);
if (DECL_INTERFACE_KNOWN (fn))
/* We've already made a decision as to how this function will
be handled. */;
- else if (!at_eof)
+ else if (!at_eof
+ || DECL_IMMEDIATE_FUNCTION_P (fn)
+ || DECL_OMP_DECLARE_REDUCTION_P (fn))
tentative_decl_linkage (fn);
else
import_export_decl (fn);
be emitted; there may be callers in other DLLs. */
if (DECL_DECLARED_INLINE_P (fn)
&& !DECL_REALLY_EXTERN (fn)
+ && !DECL_IMMEDIATE_FUNCTION_P (fn)
+ && !DECL_OMP_DECLARE_REDUCTION_P (fn)
&& (flag_keep_inline_functions
|| (flag_keep_inline_dllexport
&& lookup_attribute ("dllexport", DECL_ATTRIBUTES (fn)))))
/* There's no reason to do any of the work here if we're only doing
semantic analysis; this code just generates RTL. */
if (flag_syntax_only)
+ {
+ /* Pretend that this function has been written out so that we don't try
+ to expand it again. */
+ TREE_ASM_WRITTEN (fn) = 1;
+ return false;
+ }
+
+ if (DECL_OMP_DECLARE_REDUCTION_P (fn))
return false;
return true;
}
}
-struct nrv_data
+class nrv_data
{
+public:
nrv_data () : visited (37) {}
tree var;
static tree
finalize_nrv_r (tree* tp, int* walk_subtrees, void* data)
{
- struct nrv_data *dp = (struct nrv_data *)data;
+ class nrv_data *dp = (class nrv_data *)data;
tree_node **slot;
/* No need to walk into types. There wouldn't be any need to walk into
void
finalize_nrv (tree *tp, tree var, tree result)
{
- struct nrv_data data;
+ class nrv_data data;
/* Copy name from VAR to RESULT. */
DECL_NAME (result) = DECL_NAME (var);
t = TREE_OPERAND (t, 0);
ret = t;
if (TREE_CODE (t) == COMPONENT_REF
- && ort == C_ORT_OMP
&& (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
|| OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TO
|| OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FROM)
return error_mark_node;
}
t = TREE_OPERAND (t, 0);
+ if (ort == C_ORT_ACC && TREE_CODE (t) == INDIRECT_REF)
+ t = TREE_OPERAND (t, 0);
}
if (REFERENCE_REF_P (t))
t = TREE_OPERAND (t, 0);
if (low_bound == NULL_TREE)
low_bound = integer_zero_node;
+ if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
+ && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
+ || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH))
+ {
+ if (length != integer_one_node)
+ {
+ error_at (OMP_CLAUSE_LOCATION (c),
+ "expected single pointer in %qs clause",
+ c_omp_map_clause_name (c, ort == C_ORT_ACC));
+ return error_mark_node;
+ }
+ }
if (length != NULL_TREE)
{
if (!integer_nonzerop (length))
TREE_PURPOSE (t) = lb;
low_bound = lb;
}
+ /* Temporarily disable -fstrong-eval-order for array reductions.
+ The SAVE_EXPR and COMPOUND_EXPR added if low_bound has side-effects
+ is something the middle-end can't cope with and more importantly,
+ it needs to be the actual base variable that is privatized, not some
+ temporary assigned previous value of it. That, together with OpenMP
+ saying how many times the side-effects are evaluated is unspecified,
+ makes int *a, *b; ... reduction(+:a[a = b, 3:10]) really unspecified. */
+ warning_sentinel s (flag_strong_eval_order,
+ OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
+ || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
+ || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION);
ret = grok_array_decl (OMP_CLAUSE_LOCATION (c), ret, low_bound, false);
return ret;
}
switch (OMP_CLAUSE_MAP_KIND (c))
{
case GOMP_MAP_ALLOC:
+ case GOMP_MAP_IF_PRESENT:
case GOMP_MAP_TO:
case GOMP_MAP_FROM:
case GOMP_MAP_TOFROM:
if ((ort & C_ORT_OMP_DECLARE_SIMD) != C_ORT_OMP && ort != C_ORT_ACC)
OMP_CLAUSE_SET_MAP_KIND (c2, GOMP_MAP_POINTER);
else if (TREE_CODE (t) == COMPONENT_REF)
- OMP_CLAUSE_SET_MAP_KIND (c2, GOMP_MAP_ALWAYS_POINTER);
+ {
+ gomp_map_kind k = (ort == C_ORT_ACC) ? GOMP_MAP_ATTACH_DETACH
+ : GOMP_MAP_ALWAYS_POINTER;
+ OMP_CLAUSE_SET_MAP_KIND (c2, k);
+ }
else if (REFERENCE_REF_P (t)
&& TREE_CODE (TREE_OPERAND (t, 0)) == COMPONENT_REF)
{
t = TREE_OPERAND (t, 0);
- OMP_CLAUSE_SET_MAP_KIND (c2, GOMP_MAP_ALWAYS_POINTER);
+ gomp_map_kind k = (ort == C_ORT_ACC) ? GOMP_MAP_ATTACH_DETACH
+ : GOMP_MAP_ALWAYS_POINTER;
+ OMP_CLAUSE_SET_MAP_KIND (c2, k);
}
else
OMP_CLAUSE_SET_MAP_KIND (c2, GOMP_MAP_FIRSTPRIVATE_POINTER);
return;
}
}
- else if (TREE_CODE (type) == FUNCTION_TYPE
- || TREE_CODE (type) == METHOD_TYPE
+ else if (FUNC_OR_METHOD_TYPE_P (type)
|| TREE_CODE (type) == ARRAY_TYPE)
{
error_at (loc, "function or array type %qT in "
}
else if (TYPE_QUALS_NO_ADDR_SPACE (type))
{
- error_at (loc, "const, volatile or __restrict qualified type %qT in "
- "%<#pragma omp declare reduction%>", type);
+ error_at (loc, "%<const%>, %<volatile%> or %<__restrict%>-qualified "
+ "type %qT in %<#pragma omp declare reduction%>", type);
return;
}
if (need_static_cast)
{
tree rtype = build_reference_type (atype);
- omp_out = build_static_cast (rtype, omp_out,
+ omp_out = build_static_cast (input_location,
+ rtype, omp_out,
tf_warning_or_error);
- omp_in = build_static_cast (rtype, omp_in,
+ omp_in = build_static_cast (input_location,
+ rtype, omp_in,
tf_warning_or_error);
if (omp_out == error_mark_node || omp_in == error_mark_node)
return true;
return true;
}
tree rtype = build_reference_type (atype);
- omp_priv = build_static_cast (rtype, omp_priv,
+ omp_priv = build_static_cast (input_location,
+ rtype, omp_priv,
tf_warning_or_error);
- omp_orig = build_static_cast (rtype, omp_orig,
+ omp_orig = build_static_cast (input_location,
+ rtype, omp_orig,
tf_warning_or_error);
if (omp_priv == error_mark_node
|| omp_orig == error_mark_node)
begin = mark_rvalue_use (begin);
end = mark_rvalue_use (end);
step = mark_rvalue_use (step);
- begin = cp_build_c_cast (type, begin, tf_warning_or_error);
- end = cp_build_c_cast (type, end, tf_warning_or_error);
+ begin = cp_build_c_cast (input_location, type, begin,
+ tf_warning_or_error);
+ end = cp_build_c_cast (input_location, type, end,
+ tf_warning_or_error);
orig_step = step;
if (!processing_template_decl)
step = orig_step = save_expr (step);
tree stype = POINTER_TYPE_P (type) ? sizetype : type;
- step = cp_build_c_cast (stype, step, tf_warning_or_error);
+ step = cp_build_c_cast (input_location, stype, step,
+ tf_warning_or_error);
if (POINTER_TYPE_P (type) && !processing_template_decl)
{
begin = save_expr (begin);
return ret;
}
+/* Ensure that pointers are used in OpenACC attach and detach clauses.
+ Return true if an error has been detected. */
+
+static bool
+cp_oacc_check_attachments (tree c)
+{
+ if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP)
+ return false;
+
+ /* OpenACC attach / detach clauses must be pointers. */
+ if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
+ || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)
+ {
+ tree t = OMP_CLAUSE_DECL (c);
+ tree type;
+
+ while (TREE_CODE (t) == TREE_LIST)
+ t = TREE_CHAIN (t);
+
+ type = TREE_TYPE (t);
+
+ if (TREE_CODE (type) == REFERENCE_TYPE)
+ type = TREE_TYPE (type);
+
+ if (TREE_CODE (type) != POINTER_TYPE)
+ {
+ error_at (OMP_CLAUSE_LOCATION (c), "expected pointer in %qs clause",
+ c_omp_map_clause_name (c, true));
+ return true;
+ }
+ }
+
+ return false;
+}
+
/* For all elements of CLAUSES, validate them vs OpenMP constraints.
Remove any elements from the list that are invalid. */
bool branch_seen = false;
bool copyprivate_seen = false;
bool ordered_seen = false;
+ bool order_seen = false;
+ bool schedule_seen = false;
bool oacc_async = false;
tree last_iterators = NULL_TREE;
bool last_iterators_remove = false;
- bool reduction_seen = false;
+ /* 1 if normal/task reduction has been seen, -1 if inscan reduction
+ has been seen, -2 if mixed inscan/normal reduction diagnosed. */
+ int reduction_seen = 0;
bitmap_obstack_initialize (NULL);
bitmap_initialize (&generic_head, &bitmap_default_obstack);
/* If ort == C_ORT_OMP_DECLARE_SIMD used as uniform_head instead. */
bitmap_initialize (&map_head, &bitmap_default_obstack);
bitmap_initialize (&map_field_head, &bitmap_default_obstack);
- /* If ort == C_ORT_OMP used as nontemporal_head instead. */
+ /* If ort == C_ORT_OMP used as nontemporal_head or use_device_xxx_head
+ instead. */
bitmap_initialize (&oacc_reduction_head, &bitmap_default_obstack);
if (ort & C_ORT_ACC)
field_ok = ((ort & C_ORT_OMP_DECLARE_SIMD) == C_ORT_OMP);
goto check_dup_generic;
case OMP_CLAUSE_REDUCTION:
- reduction_seen = true;
+ if (reduction_seen == 0)
+ reduction_seen = OMP_CLAUSE_REDUCTION_INSCAN (c) ? -1 : 1;
+ else if (reduction_seen != -2
+ && reduction_seen != (OMP_CLAUSE_REDUCTION_INSCAN (c)
+ ? -1 : 1))
+ {
+ error_at (OMP_CLAUSE_LOCATION (c),
+ "%<inscan%> and non-%<inscan%> %<reduction%> clauses "
+ "on the same construct");
+ reduction_seen = -2;
+ }
/* FALLTHRU */
case OMP_CLAUSE_IN_REDUCTION:
case OMP_CLAUSE_TASK_REDUCTION:
remove = true;
break;
}
+ if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
+ && OMP_CLAUSE_REDUCTION_INSCAN (c))
+ {
+ error_at (OMP_CLAUSE_LOCATION (c),
+ "%<inscan%> %<reduction%> clause with array "
+ "section");
+ remove = true;
+ break;
+ }
if (TREE_CODE (t) == TREE_LIST)
{
while (TREE_CODE (t) == TREE_LIST)
t = OMP_CLAUSE_DECL (c);
check_dup_generic_t:
if (t == current_class_ptr
- && (ort != C_ORT_OMP_DECLARE_SIMD
+ && ((ort != C_ORT_OMP_DECLARE_SIMD && ort != C_ORT_ACC)
|| (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LINEAR
&& OMP_CLAUSE_CODE (c) != OMP_CLAUSE_UNIFORM)))
{
omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
remove = true;
}
- else if (ort == C_ORT_ACC
- && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
+ else if ((ort == C_ORT_ACC
+ && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
+ || (ort == C_ORT_OMP
+ && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR
+ || (OMP_CLAUSE_CODE (c)
+ == OMP_CLAUSE_USE_DEVICE_ADDR))))
{
if (bitmap_bit_p (&oacc_reduction_head, DECL_UID (t)))
{
error_at (OMP_CLAUSE_LOCATION (c),
- "%qD appears more than once in reduction clauses",
+ ort == C_ORT_ACC
+ ? "%qD appears more than once in reduction clauses"
+ : "%qD appears more than once in data clauses",
t);
remove = true;
}
handle_field_decl:
if (!remove
&& TREE_CODE (t) == FIELD_DECL
- && t == OMP_CLAUSE_DECL (c)
- && ort != C_ORT_ACC)
+ && t == OMP_CLAUSE_DECL (c))
{
OMP_CLAUSE_DECL (c)
= omp_privatize_field (t, (OMP_CLAUSE_CODE (c)
omp_note_field_privatization (t, OMP_CLAUSE_DECL (c));
else
t = OMP_CLAUSE_DECL (c);
- if (t == current_class_ptr)
+ if (ort != C_ORT_ACC && t == current_class_ptr)
{
error_at (OMP_CLAUSE_LOCATION (c),
"%<this%> allowed in OpenMP only in %<declare simd%>"
}
OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (c) = t;
}
+ if (!remove)
+ schedule_seen = true;
break;
case OMP_CLAUSE_SIMDLEN:
}
if (t == error_mark_node)
remove = true;
- else if (t == current_class_ptr)
+ else if (ort != C_ORT_ACC && t == current_class_ptr)
{
error_at (OMP_CLAUSE_LOCATION (c),
"%<this%> allowed in OpenMP only in %<declare simd%>"
remove = true;
else
{
- t = cp_build_indirect_ref (addr, RO_UNARY_STAR,
+ t = cp_build_indirect_ref (OMP_CLAUSE_LOCATION (c),
+ addr, RO_UNARY_STAR,
tf_warning_or_error);
if (t == error_mark_node)
remove = true;
"array section does not have mappable type "
"in %qs clause",
omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
+ cp_omp_emit_unmappable_type_notes (TREE_TYPE (t));
remove = true;
}
while (TREE_CODE (t) == ARRAY_REF)
}
}
}
+ if (cp_oacc_check_attachments (c))
+ remove = true;
break;
}
if (t == error_mark_node)
remove = true;
break;
}
+ /* OpenACC attach / detach clauses must be pointers. */
+ if (cp_oacc_check_attachments (c))
+ {
+ remove = true;
+ break;
+ }
if (REFERENCE_REF_P (t)
&& TREE_CODE (TREE_OPERAND (t, 0)) == COMPONENT_REF)
{
t = TREE_OPERAND (t, 0);
OMP_CLAUSE_DECL (c) = t;
}
+ if (ort == C_ORT_ACC
+ && TREE_CODE (t) == COMPONENT_REF
+ && TREE_CODE (TREE_OPERAND (t, 0)) == INDIRECT_REF)
+ t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
if (TREE_CODE (t) == COMPONENT_REF
- && (ort & C_ORT_OMP_DECLARE_SIMD) == C_ORT_OMP
+ && ((ort & C_ORT_OMP_DECLARE_SIMD) == C_ORT_OMP
+ || ort == C_ORT_ACC)
&& OMP_CLAUSE_CODE (c) != OMP_CLAUSE__CACHE_)
{
if (type_dependent_expression_p (t))
error_at (OMP_CLAUSE_LOCATION (c),
"%qE does not have a mappable type in %qs clause",
t, omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
+ cp_omp_emit_unmappable_type_notes (TREE_TYPE (t));
remove = true;
}
while (TREE_CODE (t) == COMPONENT_REF)
break;
if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
&& (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
- || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER))
+ || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER
+ || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH))
break;
if (DECL_P (t))
error_at (OMP_CLAUSE_LOCATION (c),
error_at (OMP_CLAUSE_LOCATION (c),
"%qD does not have a mappable type in %qs clause", t,
omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
+ cp_omp_emit_unmappable_type_notes (TREE_TYPE (t));
remove = true;
}
else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
else
bitmap_set_bit (&generic_head, DECL_UID (t));
}
- else if (bitmap_bit_p (&map_head, DECL_UID (t)))
+ else if (bitmap_bit_p (&map_head, DECL_UID (t))
+ && (ort != C_ORT_ACC
+ || !bitmap_bit_p (&map_field_head, DECL_UID (t))))
{
if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP)
error_at (OMP_CLAUSE_LOCATION (c),
tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
OMP_CLAUSE_MAP);
if (TREE_CODE (t) == COMPONENT_REF)
- OMP_CLAUSE_SET_MAP_KIND (c2, GOMP_MAP_ALWAYS_POINTER);
+ {
+ gomp_map_kind k
+ = (ort == C_ORT_ACC) ? GOMP_MAP_ATTACH_DETACH
+ : GOMP_MAP_ALWAYS_POINTER;
+ OMP_CLAUSE_SET_MAP_KIND (c2, k);
+ }
else
OMP_CLAUSE_SET_MAP_KIND (c2,
GOMP_MAP_FIRSTPRIVATE_REFERENCE);
error_at (OMP_CLAUSE_LOCATION (c),
"%qD does not have a mappable type in %qs clause", t,
omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
+ cp_omp_emit_unmappable_type_notes (TREE_TYPE (t));
remove = true;
}
if (remove)
{
tree type = TREE_TYPE (t);
if (!TYPE_PTR_P (type)
- && TREE_CODE (type) != ARRAY_TYPE
- && (!TYPE_REF_P (type)
- || (!TYPE_PTR_P (TREE_TYPE (type))
- && TREE_CODE (TREE_TYPE (type)) != ARRAY_TYPE)))
+ && (!TYPE_REF_P (type) || !TYPE_PTR_P (TREE_TYPE (type))))
{
- error_at (OMP_CLAUSE_LOCATION (c),
- "%qs variable is neither a pointer, nor an array "
- "nor reference to pointer or array",
- omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
- remove = true;
+ if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR
+ && ort == C_ORT_OMP)
+ {
+ error_at (OMP_CLAUSE_LOCATION (c),
+ "%qs variable is neither a pointer "
+ "nor reference to pointer",
+ omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
+ remove = true;
+ }
+ else if (TREE_CODE (type) != ARRAY_TYPE
+ && (!TYPE_REF_P (type)
+ || TREE_CODE (TREE_TYPE (type)) != ARRAY_TYPE))
+ {
+ error_at (OMP_CLAUSE_LOCATION (c),
+ "%qs variable is neither a pointer, nor an "
+ "array nor reference to pointer or array",
+ omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
+ remove = true;
+ }
}
}
goto check_dup_generic;
+ case OMP_CLAUSE_USE_DEVICE_ADDR:
+ field_ok = true;
+ t = OMP_CLAUSE_DECL (c);
+ if (!processing_template_decl
+ && (VAR_P (t) || TREE_CODE (t) == PARM_DECL)
+ && !TYPE_REF_P (TREE_TYPE (t))
+ && !cxx_mark_addressable (t))
+ remove = true;
+ goto check_dup_generic;
+
case OMP_CLAUSE_NOWAIT:
case OMP_CLAUSE_DEFAULT:
case OMP_CLAUSE_UNTIED:
case OMP_CLAUSE_SECTIONS:
case OMP_CLAUSE_TASKGROUP:
case OMP_CLAUSE_PROC_BIND:
+ case OMP_CLAUSE_DEVICE_TYPE:
case OMP_CLAUSE_NOGROUP:
case OMP_CLAUSE_THREADS:
case OMP_CLAUSE_SIMD:
case OMP_CLAUSE_DEFAULTMAP:
+ case OMP_CLAUSE_BIND:
case OMP_CLAUSE_AUTO:
case OMP_CLAUSE_INDEPENDENT:
case OMP_CLAUSE_SEQ:
ordered_seen = true;
break;
+ case OMP_CLAUSE_ORDER:
+ if (order_seen)
+ remove = true;
+ else
+ order_seen = true;
+ break;
+
case OMP_CLAUSE_INBRANCH:
case OMP_CLAUSE_NOTINBRANCH:
if (branch_seen)
branch_seen = true;
break;
+ case OMP_CLAUSE_INCLUSIVE:
+ case OMP_CLAUSE_EXCLUSIVE:
+ t = omp_clause_decl_field (OMP_CLAUSE_DECL (c));
+ if (!t)
+ t = OMP_CLAUSE_DECL (c);
+ if (t == current_class_ptr)
+ {
+ error_at (OMP_CLAUSE_LOCATION (c),
+ "%<this%> allowed in OpenMP only in %<declare simd%>"
+ " clauses");
+ remove = true;
+ break;
+ }
+ if (!VAR_P (t)
+ && TREE_CODE (t) != PARM_DECL
+ && TREE_CODE (t) != FIELD_DECL)
+ {
+ if (processing_template_decl && TREE_CODE (t) != OVERLOAD)
+ break;
+ if (DECL_P (t))
+ error_at (OMP_CLAUSE_LOCATION (c),
+ "%qD is not a variable in clause %qs", t,
+ omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
+ else
+ error_at (OMP_CLAUSE_LOCATION (c),
+ "%qE is not a variable in clause %qs", t,
+ omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
+ remove = true;
+ }
+ break;
+
default:
gcc_unreachable ();
}
pc = &OMP_CLAUSE_CHAIN (c);
}
+ if (reduction_seen < 0 && (ordered_seen || schedule_seen))
+ reduction_seen = -2;
+
for (pc = &clauses, c = clauses; c ; c = *pc)
{
enum omp_clause_code c_kind = OMP_CLAUSE_CODE (c);
need_implicitly_determined = true;
break;
case OMP_CLAUSE_REDUCTION:
+ if (reduction_seen == -2)
+ OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
+ if (OMP_CLAUSE_REDUCTION_INSCAN (c))
+ need_copy_assignment = true;
+ need_implicitly_determined = true;
+ break;
case OMP_CLAUSE_IN_REDUCTION:
case OMP_CLAUSE_TASK_REDUCTION:
+ case OMP_CLAUSE_INCLUSIVE:
+ case OMP_CLAUSE_EXCLUSIVE:
need_implicitly_determined = true;
break;
case OMP_CLAUSE_LINEAR:
(OMP_CLAUSE_SCHEDULE_KIND (c)
& ~OMP_CLAUSE_SCHEDULE_NONMONOTONIC);
}
+ if (reduction_seen == -2)
+ error_at (OMP_CLAUSE_LOCATION (c),
+ "%qs clause specified together with %<inscan%> "
+ "%<reduction%> clause", "schedule");
pc = &OMP_CLAUSE_CHAIN (c);
continue;
case OMP_CLAUSE_NOGROUP:
}
pc = &OMP_CLAUSE_CHAIN (c);
continue;
+ case OMP_CLAUSE_ORDERED:
+ if (reduction_seen == -2)
+ error_at (OMP_CLAUSE_LOCATION (c),
+ "%qs clause specified together with %<inscan%> "
+ "%<reduction%> clause", "ordered");
+ pc = &OMP_CLAUSE_CHAIN (c);
+ continue;
+ case OMP_CLAUSE_ORDER:
+ if (ordered_seen)
+ {
+ error_at (OMP_CLAUSE_LOCATION (c),
+ "%<order%> clause must not be used together "
+ "with %<ordered%>");
+ *pc = OMP_CLAUSE_CHAIN (c);
+ continue;
+ }
+ pc = &OMP_CLAUSE_CHAIN (c);
+ continue;
case OMP_CLAUSE_NOWAIT:
if (copyprivate_seen)
{
t = require_complete_type (t);
if (t == error_mark_node)
remove = true;
- else if (TYPE_REF_P (TREE_TYPE (t))
+ else if (!processing_template_decl
+ && TYPE_REF_P (TREE_TYPE (t))
&& !complete_type_or_else (TREE_TYPE (TREE_TYPE (t)), t))
remove = true;
}
case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
break;
case OMP_CLAUSE_DEFAULT_SHARED:
+ if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
+ || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
+ && c_omp_predefined_variable (t))
+ /* The __func__ variable and similar function-local predefined
+ variables may be listed in a shared or firstprivate
+ clause. */
+ break;
if (VAR_P (t)
&& OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
&& TREE_STATIC (t)
save.release ();
return;
}
-
+
omp_private_member_map = new hash_map <tree, tree>;
while (!save.is_empty ())
{
if (init && EXPR_HAS_LOCATION (init))
elocus = EXPR_LOCATION (init);
- cond = cp_fully_fold (cond);
switch (TREE_CODE (cond))
{
case GT_EXPR:
incr = cp_convert (TREE_TYPE (diff), incr, tf_warning_or_error);
incr = cp_fully_fold (incr);
- bool taskloop_iv_seen = false;
+ tree loop_iv_seen = NULL_TREE;
for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
&& OMP_CLAUSE_DECL (c) == iter)
{
- if (code == OMP_TASKLOOP)
+ if (code == OMP_TASKLOOP || code == OMP_LOOP)
{
- taskloop_iv_seen = true;
- OMP_CLAUSE_LASTPRIVATE_TASKLOOP_IV (c) = 1;
+ loop_iv_seen = c;
+ OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c) = 1;
}
break;
}
- else if (code == OMP_TASKLOOP
+ else if ((code == OMP_TASKLOOP || code == OMP_LOOP)
&& OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
&& OMP_CLAUSE_DECL (c) == iter)
{
- taskloop_iv_seen = true;
- OMP_CLAUSE_PRIVATE_TASKLOOP_IV (c) = 1;
+ loop_iv_seen = c;
+ if (code == OMP_TASKLOOP)
+ OMP_CLAUSE_PRIVATE_TASKLOOP_IV (c) = 1;
}
decl = create_temporary_var (TREE_TYPE (diff));
tree diffvar = NULL_TREE;
if (code == OMP_TASKLOOP)
{
- if (!taskloop_iv_seen)
+ if (!loop_iv_seen)
{
tree ivc = build_omp_clause (locus, OMP_CLAUSE_FIRSTPRIVATE);
OMP_CLAUSE_DECL (ivc) = iter;
pushdecl (diffvar);
add_decl_expr (diffvar);
}
+ else if (code == OMP_LOOP)
+ {
+ if (!loop_iv_seen)
+ {
+ /* While iterators on the loop construct are predetermined
+ lastprivate, if the decl is not declared inside of the
+ loop, OMP_CLAUSE_LASTPRIVATE should have been added
+ already. */
+ loop_iv_seen = build_omp_clause (locus, OMP_CLAUSE_FIRSTPRIVATE);
+ OMP_CLAUSE_DECL (loop_iv_seen) = iter;
+ OMP_CLAUSE_CHAIN (loop_iv_seen) = clauses;
+ clauses = loop_iv_seen;
+ }
+ else if (OMP_CLAUSE_CODE (loop_iv_seen) == OMP_CLAUSE_PRIVATE)
+ {
+ OMP_CLAUSE_PRIVATE_DEBUG (loop_iv_seen) = 0;
+ OMP_CLAUSE_PRIVATE_OUTER_REF (loop_iv_seen) = 0;
+ OMP_CLAUSE_CODE (loop_iv_seen) = OMP_CLAUSE_FIRSTPRIVATE;
+ }
+ if (OMP_CLAUSE_CODE (loop_iv_seen) == OMP_CLAUSE_FIRSTPRIVATE)
+ cxx_omp_finish_clause (loop_iv_seen, NULL);
+ }
orig_pre_body = *pre_body;
*pre_body = push_stmt_list ();
omp_for = NULL_TREE;
if (omp_for == NULL)
- {
- return NULL;
- }
+ return NULL;
add_stmt (omp_for);
gcc_unreachable ();
}
}
+ /* Override saved methods on OMP_LOOP's OMP_CLAUSE_LASTPRIVATE_LOOP_IV
+ clauses, we need copy ctor for those rather than default ctor,
+ plus as for other lastprivates assignment op and dtor. */
+ if (code == OMP_LOOP && !processing_template_decl)
+ for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
+ if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
+ && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c)
+ && cxx_omp_create_clause_info (c, TREE_TYPE (OMP_CLAUSE_DECL (c)),
+ false, true, true, true))
+ CP_OMP_CLAUSE_INFO (c) = NULL_TREE;
return omp_for;
}
{
if (opcode == NOP_EXPR)
stmt = build2 (MODIFY_EXPR, void_type_node, orig_lhs, orig_rhs);
- else
+ else
stmt = build2 (opcode, void_type_node, orig_lhs, orig_rhs);
if (orig_rhs1)
stmt = build_min_nt_loc (EXPR_LOCATION (orig_rhs1),
OMP_ATOMIC_MEMORY_ORDER (stmt) = mo;
SET_EXPR_LOCATION (stmt, loc);
}
+
+ /* Avoid -Wunused-value warnings here, the whole construct has side-effects
+ and even if it might be wrapped from fold-const.c or c-omp.c wrapped
+ in some tree that appears to be unused, the value is not unused. */
+ warning_sentinel w (warn_unused_value);
finish_expr_stmt (stmt);
}
finish_omp_barrier (void)
{
tree fn = builtin_decl_explicit (BUILT_IN_GOMP_BARRIER);
- vec<tree, va_gc> *vec = make_tree_vector ();
+ releasing_vec vec;
tree stmt = finish_call_expr (fn, &vec, false, false, tf_warning_or_error);
- release_tree_vector (vec);
finish_expr_stmt (stmt);
}
if (addr == error_mark_node)
depobj = error_mark_node;
else
- depobj = cp_build_indirect_ref (addr, RO_UNARY_STAR,
+ depobj = cp_build_indirect_ref (loc, addr, RO_UNARY_STAR,
tf_warning_or_error);
}
finish_omp_flush (int mo)
{
tree fn = builtin_decl_explicit (BUILT_IN_SYNC_SYNCHRONIZE);
- vec<tree, va_gc> *vec = make_tree_vector ();
+ releasing_vec vec;
if (mo != MEMMODEL_LAST)
{
fn = builtin_decl_explicit (BUILT_IN_ATOMIC_THREAD_FENCE);
vec->quick_push (build_int_cst (integer_type_node, mo));
}
tree stmt = finish_call_expr (fn, &vec, false, false, tf_warning_or_error);
- release_tree_vector (vec);
finish_expr_stmt (stmt);
}
finish_omp_taskwait (void)
{
tree fn = builtin_decl_explicit (BUILT_IN_GOMP_TASKWAIT);
- vec<tree, va_gc> *vec = make_tree_vector ();
+ releasing_vec vec;
tree stmt = finish_call_expr (fn, &vec, false, false, tf_warning_or_error);
- release_tree_vector (vec);
finish_expr_stmt (stmt);
}
finish_omp_taskyield (void)
{
tree fn = builtin_decl_explicit (BUILT_IN_GOMP_TASKYIELD);
- vec<tree, va_gc> *vec = make_tree_vector ();
+ releasing_vec vec;
tree stmt = finish_call_expr (fn, &vec, false, false, tf_warning_or_error);
- release_tree_vector (vec);
finish_expr_stmt (stmt);
}
"%<parallel%>, %<for%>, %<sections%> or %<taskgroup%> clauses");
return;
}
- vec<tree, va_gc> *vec = make_tree_vector ();
+ releasing_vec vec;
tree ifc = omp_find_clause (clauses, OMP_CLAUSE_IF);
if (ifc != NULL_TREE)
{
vec->quick_push (build_int_cst (integer_type_node, mask));
vec->quick_push (ifc);
tree stmt = finish_call_expr (fn, &vec, false, false, tf_warning_or_error);
- release_tree_vector (vec);
finish_expr_stmt (stmt);
}
"%<parallel%>, %<for%>, %<sections%> or %<taskgroup%> clauses");
return;
}
- vec<tree, va_gc> *vec
+ releasing_vec vec
= make_tree_vector_single (build_int_cst (integer_type_node, mask));
tree stmt = finish_call_expr (fn, &vec, false, false, tf_warning_or_error);
- release_tree_vector (vec);
finish_expr_stmt (stmt);
}
\f
CONDITION and the message text MESSAGE. LOCATION is the location
of the static assertion in the source code. When MEMBER_P, this
static assertion is a member of a class. */
-void
-finish_static_assert (tree condition, tree message, location_t location,
+void
+finish_static_assert (tree condition, tree message, location_t location,
bool member_p)
{
tsubst_flags_t complain = tf_warning_or_error;
STATIC_ASSERT_SOURCE_LOCATION (assertion) = location;
if (member_p)
- maybe_add_class_template_decl_list (current_class_type,
+ maybe_add_class_template_decl_list (current_class_type,
assertion,
/*friend_p=*/0);
else
return;
}
+ /* Save the condition in case it was a concept check. */
+ tree orig_condition = condition;
+
/* Fold the expression and convert it to a boolean value. */
condition = perform_implicit_conversion_flags (boolean_type_node, condition,
complain, LOOKUP_NORMAL);
if (TREE_CODE (condition) == INTEGER_CST && !integer_zerop (condition))
/* Do nothing; the condition is satisfied. */
;
- else
+ else
{
location_t saved_loc = input_location;
input_location = location;
- if (TREE_CODE (condition) == INTEGER_CST
+ if (TREE_CODE (condition) == INTEGER_CST
&& integer_zerop (condition))
{
int sz = TREE_INT_CST_LOW (TYPE_SIZE_UNIT
else
error ("static assertion failed: %s",
TREE_STRING_POINTER (message));
+
+ /* Actually explain the failure if this is a concept check. */
+ if (concept_check_p (orig_condition))
+ diagnose_constraints (location, orig_condition, NULL_TREE);
}
else if (condition && condition != error_mark_node)
{
&& TYPE_P (TREE_OPERAND (expr, 0))))
{
if (complain & tf_error)
- error ("argument to decltype must be an expression");
+ error ("argument to %<decltype%> must be an expression");
return error_mark_node;
}
if (type_unknown_p (expr))
{
if (complain & tf_error)
- error ("decltype cannot resolve address of overloaded function");
+ error ("%<decltype%> cannot resolve address of overloaded function");
return error_mark_node;
}
return type;
}
-/* Called from trait_expr_value to evaluate either __has_nothrow_assign or
+/* Called from trait_expr_value to evaluate either __has_nothrow_assign or
__has_nothrow_copy, depending on assign_p. Returns true iff all
the copy {ctor,assign} fns are nothrow. */
if (copy_fn_p (fn) > 0)
{
saw_copy = true;
- maybe_instantiate_noexcept (fn);
- if (!TYPE_NOTHROW_P (TREE_TYPE (fn)))
+ if (!maybe_instantiate_noexcept (fn)
+ || !TYPE_NOTHROW_P (TREE_TYPE (fn)))
return false;
}
}
case CPTK_HAS_NOTHROW_CONSTRUCTOR:
type1 = strip_array_types (type1);
- return (trait_expr_value (CPTK_HAS_TRIVIAL_CONSTRUCTOR, type1, type2)
+ return (trait_expr_value (CPTK_HAS_TRIVIAL_CONSTRUCTOR, type1, type2)
|| (CLASS_TYPE_P (type1)
&& (t = locate_ctor (type1))
- && (maybe_instantiate_noexcept (t),
- TYPE_NOTHROW_P (TREE_TYPE (t)))));
+ && maybe_instantiate_noexcept (t)
+ && TYPE_NOTHROW_P (TREE_TYPE (t))));
case CPTK_HAS_TRIVIAL_CONSTRUCTOR:
type1 = strip_array_types (type1);
/* Process a trait expression. */
tree
-finish_trait_expr (cp_trait_kind kind, tree type1, tree type2)
+finish_trait_expr (location_t loc, cp_trait_kind kind, tree type1, tree type2)
{
if (type1 == error_mark_node
|| type2 == error_mark_node)
TRAIT_EXPR_TYPE1 (trait_expr) = type1;
TRAIT_EXPR_TYPE2 (trait_expr) = type2;
TRAIT_EXPR_KIND (trait_expr) = kind;
+ TRAIT_EXPR_LOCATION (trait_expr) = loc;
return trait_expr;
}
gcc_unreachable ();
}
- return (trait_expr_value (kind, type1, type2)
- ? boolean_true_node : boolean_false_node);
+tree val = (trait_expr_value (kind, type1, type2)
+ ? boolean_true_node : boolean_false_node);
+ return maybe_wrap_with_location (val, loc);
}
/* Do-nothing variants of functions to handle pragma FLOAT_CONST_DECIMAL64,
#endif
cfun->returns_struct = aggr;
}
-
}
/* DECL is a local variable or parameter from the surrounding scope of a
static tree
finish_unary_fold_expr (tree expr, int op, tree_code dir)
{
- // Build a pack expansion (assuming expr has pack type).
+ /* Build a pack expansion (assuming expr has pack type). */
if (!uses_parameter_packs (expr))
{
error_at (location_of (expr), "operand of fold expression has no "
}
tree pack = make_pack_expansion (expr);
- // Build the fold expression.
+ /* Build the fold expression. */
tree code = build_int_cstu (integer_type_node, abs (op));
tree fold = build_min_nt_loc (UNKNOWN_LOCATION, dir, code, pack);
FOLD_EXPR_MODIFY_P (fold) = (op < 0);
tree ret = NULL_TREE;
if (!type_dependent_expression_p (arg) && !dependent_type_p (type))
- ret = c_build_vec_convert (cp_expr_loc_or_loc (arg, input_location), arg,
+ ret = c_build_vec_convert (cp_expr_loc_or_input_loc (arg), arg,
loc, type, (complain & tf_error) != 0);
if (!processing_template_decl)