/* Subroutines shared by all languages that are variants of C.
- Copyright (C) 1992-2022 Free Software Foundation, Inc.
+ Copyright (C) 1992-2024 Free Software Foundation, Inc.
This file is part of GCC.
int flag_isoc11;
-/* Nonzero means use the ISO C2X dialect of C. */
+/* Nonzero means use the ISO C23 dialect of C. */
-int flag_isoc2x;
+int flag_isoc23;
/* Nonzero means that we have builtin functions, and main is an int. */
{NULL, 0, 0},
};
+/* Flags to restrict availability of generic features that
+ are known to __has_{feature,extension}. */
+
+enum
+{
+ HF_FLAG_NONE = 0,
+ HF_FLAG_EXT = 1, /* Available only as an extension. */
+ HF_FLAG_SANITIZE = 2, /* Availability depends on sanitizer flags. */
+};
+
+/* Info for generic features which can be queried through
+ __has_{feature,extension}. */
+
+struct hf_feature_info
+{
+ const char *ident;
+ unsigned flags;
+ unsigned mask;
+};
+
+/* Table of generic features which can be queried through
+ __has_{feature,extension}. */
+
+static constexpr hf_feature_info has_feature_table[] =
+{
+ { "address_sanitizer", HF_FLAG_SANITIZE, SANITIZE_ADDRESS },
+ { "thread_sanitizer", HF_FLAG_SANITIZE, SANITIZE_THREAD },
+ { "leak_sanitizer", HF_FLAG_SANITIZE, SANITIZE_LEAK },
+ { "hwaddress_sanitizer", HF_FLAG_SANITIZE, SANITIZE_HWADDRESS },
+ { "undefined_behavior_sanitizer", HF_FLAG_SANITIZE, SANITIZE_UNDEFINED },
+ { "attribute_deprecated_with_message", HF_FLAG_NONE, 0 },
+ { "attribute_unavailable_with_message", HF_FLAG_NONE, 0 },
+ { "enumerator_attributes", HF_FLAG_NONE, 0 },
+ { "tls", HF_FLAG_NONE, 0 },
+ { "gnu_asm_goto_with_outputs", HF_FLAG_EXT, 0 },
+ { "gnu_asm_goto_with_outputs_full", HF_FLAG_EXT, 0 }
+};
+
/* Global visibility options. */
struct visibility_flags visibility_options;
if they match the mask.
Masks for languages:
- C --std=c89: D_C99 | D_C2X | D_CXXONLY | D_OBJC | D_CXX_OBJC
- C --std=c99: D_C2X | D_CXXONLY | D_OBJC
- C --std=c17: D_C2X | D_CXXONLY | D_OBJC
- C --std=c2x: D_CXXONLY | D_OBJC
+ C --std=c89: D_C99 | D_C23 | D_CXXONLY | D_OBJC | D_CXX_OBJC
+ C --std=c99: D_C23 | D_CXXONLY | D_OBJC
+ C --std=c17: D_C23 | D_CXXONLY | D_OBJC
+ C --std=c23: D_CXXONLY | D_OBJC
ObjC is like C except that D_OBJC and D_CXX_OBJC are not set
C++ --std=c++98: D_CONLY | D_CXX11 | D_CXX20 | D_OBJC
C++ --std=c++11: D_CONLY | D_CXX20 | D_OBJC
{ "_Alignas", RID_ALIGNAS, D_CONLY },
{ "_Alignof", RID_ALIGNOF, D_CONLY },
{ "_Atomic", RID_ATOMIC, D_CONLY },
+ { "_BitInt", RID_BITINT, D_CONLY },
{ "_Bool", RID_BOOL, D_CONLY },
{ "_Complex", RID_COMPLEX, 0 },
{ "_Imaginary", RID_IMAGINARY, D_CONLY },
{ "__attribute__", RID_ATTRIBUTE, 0 },
{ "__auto_type", RID_AUTO_TYPE, D_CONLY },
{ "__builtin_addressof", RID_ADDRESSOF, D_CXXONLY },
+ { "__builtin_assoc_barrier", RID_BUILTIN_ASSOC_BARRIER, 0 },
{ "__builtin_bit_cast", RID_BUILTIN_BIT_CAST, D_CXXONLY },
{ "__builtin_call_with_static_chain",
RID_BUILTIN_CALL_WITH_STATIC_CHAIN, D_CONLY },
{ "__builtin_convertvector", RID_BUILTIN_CONVERTVECTOR, 0 },
{ "__builtin_has_attribute", RID_BUILTIN_HAS_ATTRIBUTE, 0 },
{ "__builtin_launder", RID_BUILTIN_LAUNDER, D_CXXONLY },
- { "__builtin_assoc_barrier", RID_BUILTIN_ASSOC_BARRIER, 0 },
{ "__builtin_shuffle", RID_BUILTIN_SHUFFLE, 0 },
{ "__builtin_shufflevector", RID_BUILTIN_SHUFFLEVECTOR, 0 },
+ { "__builtin_stdc_bit_ceil", RID_BUILTIN_STDC, D_CONLY },
+ { "__builtin_stdc_bit_floor", RID_BUILTIN_STDC, D_CONLY },
+ { "__builtin_stdc_bit_width", RID_BUILTIN_STDC, D_CONLY },
+ { "__builtin_stdc_count_ones", RID_BUILTIN_STDC, D_CONLY },
+ { "__builtin_stdc_count_zeros", RID_BUILTIN_STDC, D_CONLY },
+ { "__builtin_stdc_first_leading_one", RID_BUILTIN_STDC, D_CONLY },
+ { "__builtin_stdc_first_leading_zero", RID_BUILTIN_STDC, D_CONLY },
+ { "__builtin_stdc_first_trailing_one", RID_BUILTIN_STDC, D_CONLY },
+ { "__builtin_stdc_first_trailing_zero", RID_BUILTIN_STDC, D_CONLY },
+ { "__builtin_stdc_has_single_bit", RID_BUILTIN_STDC, D_CONLY },
+ { "__builtin_stdc_leading_ones", RID_BUILTIN_STDC, D_CONLY },
+ { "__builtin_stdc_leading_zeros", RID_BUILTIN_STDC, D_CONLY },
+ { "__builtin_stdc_trailing_ones", RID_BUILTIN_STDC, D_CONLY },
+ { "__builtin_stdc_trailing_zeros", RID_BUILTIN_STDC, D_CONLY },
{ "__builtin_tgmath", RID_BUILTIN_TGMATH, D_CONLY },
{ "__builtin_offsetof", RID_OFFSETOF, 0 },
{ "__builtin_types_compatible_p", RID_TYPES_COMPATIBLE_P, D_CONLY },
{ "__transaction_cancel", RID_TRANSACTION_CANCEL, 0 },
{ "__typeof", RID_TYPEOF, 0 },
{ "__typeof__", RID_TYPEOF, 0 },
+ { "__typeof_unqual", RID_TYPEOF_UNQUAL, D_CONLY },
+ { "__typeof_unqual__", RID_TYPEOF_UNQUAL, D_CONLY },
{ "__volatile", RID_VOLATILE, 0 },
{ "__volatile__", RID_VOLATILE, 0 },
{ "__GIMPLE", RID_GIMPLE, D_CONLY },
{ "__PHI", RID_PHI, D_CONLY },
{ "__RTL", RID_RTL, D_CONLY },
- { "alignas", RID_ALIGNAS, D_C2X | D_CXX11 | D_CXXWARN },
- { "alignof", RID_ALIGNOF, D_C2X | D_CXX11 | D_CXXWARN },
+ { "alignas", RID_ALIGNAS, D_C23 | D_CXX11 | D_CXXWARN },
+ { "alignof", RID_ALIGNOF, D_C23 | D_CXX11 | D_CXXWARN },
{ "asm", RID_ASM, D_ASM },
{ "auto", RID_AUTO, 0 },
- { "bool", RID_BOOL, D_C2X | D_CXXWARN },
+ { "bool", RID_BOOL, D_C23 | D_CXXWARN },
{ "break", RID_BREAK, 0 },
{ "case", RID_CASE, 0 },
{ "catch", RID_CATCH, D_CXX_OBJC | D_CXXWARN },
{ "class", RID_CLASS, D_CXX_OBJC | D_CXXWARN },
{ "const", RID_CONST, 0 },
{ "consteval", RID_CONSTEVAL, D_CXXONLY | D_CXX20 | D_CXXWARN },
- { "constexpr", RID_CONSTEXPR, D_CXXONLY | D_CXX11 | D_CXXWARN },
+ { "constexpr", RID_CONSTEXPR, D_C23 | D_CXX11 | D_CXXWARN },
{ "constinit", RID_CONSTINIT, D_CXXONLY | D_CXX20 | D_CXXWARN },
{ "const_cast", RID_CONSTCAST, D_CXXONLY | D_CXXWARN },
{ "continue", RID_CONTINUE, 0 },
{ "explicit", RID_EXPLICIT, D_CXXONLY | D_CXXWARN },
{ "export", RID_EXPORT, D_CXXONLY | D_CXXWARN },
{ "extern", RID_EXTERN, 0 },
- { "false", RID_FALSE, D_C2X | D_CXXWARN },
+ { "false", RID_FALSE, D_C23 | D_CXXWARN },
{ "float", RID_FLOAT, 0 },
{ "for", RID_FOR, 0 },
{ "friend", RID_FRIEND, D_CXXONLY | D_CXXWARN },
{ "namespace", RID_NAMESPACE, D_CXXONLY | D_CXXWARN },
{ "new", RID_NEW, D_CXXONLY | D_CXXWARN },
{ "noexcept", RID_NOEXCEPT, D_CXXONLY | D_CXX11 | D_CXXWARN },
- { "nullptr", RID_NULLPTR, D_C2X | D_CXX11 | D_CXXWARN },
+ { "nullptr", RID_NULLPTR, D_C23 | D_CXX11 | D_CXXWARN },
{ "operator", RID_OPERATOR, D_CXXONLY | D_CXXWARN },
{ "private", RID_PRIVATE, D_CXX_OBJC | D_CXXWARN },
{ "protected", RID_PROTECTED, D_CXX_OBJC | D_CXXWARN },
{ "signed", RID_SIGNED, 0 },
{ "sizeof", RID_SIZEOF, 0 },
{ "static", RID_STATIC, 0 },
- { "static_assert", RID_STATIC_ASSERT, D_C2X | D_CXX11 | D_CXXWARN },
+ { "static_assert", RID_STATIC_ASSERT, D_C23 | D_CXX11 | D_CXXWARN },
{ "static_cast", RID_STATCAST, D_CXXONLY | D_CXXWARN },
{ "struct", RID_STRUCT, 0 },
{ "switch", RID_SWITCH, 0 },
{ "template", RID_TEMPLATE, D_CXXONLY | D_CXXWARN },
{ "this", RID_THIS, D_CXXONLY | D_CXXWARN },
- { "thread_local", RID_THREAD, D_C2X | D_CXX11 | D_CXXWARN },
+ { "thread_local", RID_THREAD, D_C23 | D_CXX11 | D_CXXWARN },
{ "throw", RID_THROW, D_CXX_OBJC | D_CXXWARN },
- { "true", RID_TRUE, D_C2X | D_CXXWARN },
+ { "true", RID_TRUE, D_C23 | D_CXXWARN },
{ "try", RID_TRY, D_CXX_OBJC | D_CXXWARN },
{ "typedef", RID_TYPEDEF, 0 },
{ "typename", RID_TYPENAME, D_CXXONLY | D_CXXWARN },
{ "typeid", RID_TYPEID, D_CXXONLY | D_CXXWARN },
{ "typeof", RID_TYPEOF, D_EXT11 },
- { "typeof_unqual", RID_TYPEOF_UNQUAL, D_CONLY | D_C2X },
+ { "typeof_unqual", RID_TYPEOF_UNQUAL, D_CONLY | D_C23 },
{ "union", RID_UNION, 0 },
{ "unsigned", RID_UNSIGNED, 0 },
{ "using", RID_USING, D_CXXONLY | D_CXXWARN },
{ "wchar_t", RID_WCHAR, D_CXXONLY },
{ "while", RID_WHILE, 0 },
-#define DEFTRAIT(TCC, CODE, NAME, ARITY) \
- { NAME, RID_##CODE, D_CXXONLY },
-#include "cp/cp-trait.def"
-#undef DEFTRAIT
- /* An alias for __is_same. */
- { "__is_same_as", RID_IS_SAME, D_CXXONLY },
-
/* C++ transactional memory. */
{ "synchronized", RID_SYNCHRONIZED, D_CXX_OBJC | D_TRANSMEM },
{ "atomic_noexcept", RID_ATOMIC_NOEXCEPT, D_CXXONLY | D_TRANSMEM },
if (tok_type == CPP_OTHER)
return "unrecognized string type";
- return get_location_within_string (parse_in, g_string_concat_db,
+ return get_location_within_string (parse_in,
+ global_dc->get_file_cache (),
+ g_string_concat_db,
substr_loc.get_fmt_string_loc (),
tok_type,
substr_loc.get_caret_idx (),
int uns;
tree type;
+ /* Do not shorten vector operations. */
+ if (VECTOR_TYPE_P (result_type))
+ return result_type;
+
/* Cast OP0 and OP1 to RESULT_TYPE. Doing so prevents
excessive narrowing when we call get_narrower below. For
example, suppose that OP0 is of unsigned int extended
/* Warn for real constant that is not an exact integer converted
to integer type. */
- if (TREE_CODE (expr_type) == REAL_TYPE
- && TREE_CODE (type) == INTEGER_TYPE)
+ if (SCALAR_FLOAT_TYPE_P (expr_type)
+ && (TREE_CODE (type) == INTEGER_TYPE
+ || TREE_CODE (type) == BITINT_TYPE))
{
if (!real_isinteger (TREE_REAL_CST_PTR (expr), TYPE_MODE (expr_type)))
give_warning = UNSAFE_REAL;
}
/* Warn for an integer constant that does not fit into integer type. */
- else if (TREE_CODE (expr_type) == INTEGER_TYPE
- && TREE_CODE (type) == INTEGER_TYPE
+ else if ((TREE_CODE (expr_type) == INTEGER_TYPE
+ || TREE_CODE (expr_type) == BITINT_TYPE)
+ && (TREE_CODE (type) == INTEGER_TYPE
+ || TREE_CODE (type) == BITINT_TYPE)
&& !int_fits_type_p (expr, type))
{
if (TYPE_UNSIGNED (type) && !TYPE_UNSIGNED (expr_type)
else
give_warning = UNSAFE_OTHER;
}
- else if (TREE_CODE (type) == REAL_TYPE)
+ else if (SCALAR_FLOAT_TYPE_P (type))
{
/* Warn for an integer constant that does not fit into real type. */
- if (TREE_CODE (expr_type) == INTEGER_TYPE)
+ if (TREE_CODE (expr_type) == INTEGER_TYPE
+ || TREE_CODE (expr_type) == BITINT_TYPE)
{
REAL_VALUE_TYPE a = real_value_from_int_cst (0, expr);
if (!exact_real_truncate (TYPE_MODE (type), &a))
}
/* Warn for a real constant that does not fit into a smaller
real type. */
- else if (TREE_CODE (expr_type) == REAL_TYPE
+ else if (SCALAR_FLOAT_TYPE_P (expr_type)
&& TYPE_PRECISION (type) < TYPE_PRECISION (expr_type))
{
REAL_VALUE_TYPE a = TREE_REAL_CST (expr);
else
{
/* Warn for real types converted to integer types. */
- if (TREE_CODE (expr_type) == REAL_TYPE
- && TREE_CODE (type) == INTEGER_TYPE)
+ if (SCALAR_FLOAT_TYPE_P (expr_type)
+ && (TREE_CODE (type) == INTEGER_TYPE
+ || TREE_CODE (type) == BITINT_TYPE))
give_warning = UNSAFE_REAL;
- else if (TREE_CODE (expr_type) == INTEGER_TYPE
- && TREE_CODE (type) == INTEGER_TYPE)
+ else if ((TREE_CODE (expr_type) == INTEGER_TYPE
+ || TREE_CODE (expr_type) == BITINT_TYPE)
+ && (TREE_CODE (type) == INTEGER_TYPE
+ || TREE_CODE (type) == BITINT_TYPE))
{
/* Don't warn about unsigned char y = 0xff, x = (int) y; */
expr = get_unwidened (expr, 0);
/* Warn for integer types converted to real types if and only if
all the range of values of the integer type cannot be
represented by the real type. */
- else if (TREE_CODE (expr_type) == INTEGER_TYPE
- && TREE_CODE (type) == REAL_TYPE)
+ else if ((TREE_CODE (expr_type) == INTEGER_TYPE
+ || TREE_CODE (expr_type) == BITINT_TYPE)
+ && SCALAR_FLOAT_TYPE_P (type))
{
/* Don't warn about char y = 0xff; float x = (int) y; */
expr = get_unwidened (expr, 0);
}
/* Warn for real types converted to smaller real types. */
- else if (TREE_CODE (expr_type) == REAL_TYPE
- && TREE_CODE (type) == REAL_TYPE
+ else if (SCALAR_FLOAT_TYPE_P (expr_type)
+ && SCALAR_FLOAT_TYPE_P (type)
&& TYPE_PRECISION (type) < TYPE_PRECISION (expr_type))
give_warning = UNSAFE_REAL;
tree to_type = TREE_TYPE (type);
/* Warn for real types converted to integer types. */
- if (TREE_CODE (from_type) == REAL_TYPE
+ if (SCALAR_FLOAT_TYPE_P (from_type)
&& TREE_CODE (to_type) == INTEGER_TYPE)
give_warning = UNSAFE_REAL;
/* Warn for real types converted to smaller real types. */
- else if (TREE_CODE (from_type) == REAL_TYPE
- && TREE_CODE (to_type) == REAL_TYPE
+ else if (SCALAR_FLOAT_TYPE_P (from_type)
+ && SCALAR_FLOAT_TYPE_P (to_type)
&& TYPE_PRECISION (to_type) < TYPE_PRECISION (from_type))
give_warning = UNSAFE_REAL;
give_warning = UNSAFE_SIGN;
}
else if (TREE_CODE (from_type) == INTEGER_TYPE
- && TREE_CODE (to_type) == REAL_TYPE
+ && SCALAR_FLOAT_TYPE_P (to_type)
&& !int_safely_convertible_to_real_p (from_type, to_type))
give_warning = UNSAFE_OTHER;
}
if (c_inhibit_evaluation_warnings == 0
&& !TREE_OVERFLOW_P (expr)
- && result != error_mark_node)
+ && result != error_mark_node
+ && !c_hardbool_type_attr (type))
warnings_for_convert_and_check (loc, type, expr_for_warning, result);
return result;
case LSHIFT_EXPR:
case RSHIFT_EXPR:
- case COMPONENT_REF:
case ARRAY_REF:
if (cxx_dialect >= cxx17)
goto sequenced_binary;
goto do_default;
+ case COMPONENT_REF:
+ /* Treat as unary, the other operands aren't evaluated. */
+ x = TREE_OPERAND (x, 0);
+ writer = 0;
+ goto restart;
+
default:
do_default:
/* For other expressions, simply recurse on their operands.
if (value == NULL_TREE)
return value;
- if (TREE_CODE (value) == INTEGER_CST)
+ if (INTEGRAL_TYPE_P (TREE_TYPE (value))
+ && TREE_CODE (value) == INTEGER_CST)
/* Promote char or short to int. */
value = perform_integral_promotions (value);
else if (value != error_mark_node)
return (unsignedp ? widest_unsigned_literal_type_node
: widest_integer_literal_type_node);
+ for (tree t = registered_builtin_types; t; t = TREE_CHAIN (t))
+ {
+ tree type = TREE_VALUE (t);
+ if (TREE_CODE (type) == INTEGER_TYPE
+ && bits == TYPE_PRECISION (type)
+ && !!unsignedp == !!TYPE_UNSIGNED (type))
+ return type;
+ }
+
if (bits <= TYPE_PRECISION (intQI_type_node))
return unsignedp ? unsigned_intQI_type_node : intQI_type_node;
else if (GET_MODE_CLASS (mode) == MODE_VECTOR_BOOL
&& valid_vector_subparts_p (GET_MODE_NUNITS (mode)))
{
- unsigned int elem_bits = vector_element_size (GET_MODE_BITSIZE (mode),
+ unsigned int elem_bits = vector_element_size (GET_MODE_PRECISION (mode),
GET_MODE_NUNITS (mode));
tree bool_type = build_nonstandard_boolean_type (elem_bits);
return build_vector_type_for_mode (bool_type, mode);
|| TYPE_UNSIGNED (type) == unsignedp)
return type;
+ if (TREE_CODE (type) == BITINT_TYPE
+ /* signed _BitInt(1) is invalid, avoid creating that. */
+ && (unsignedp || TYPE_PRECISION (type) > 1))
+ return build_bitint_type (TYPE_PRECISION (type), unsignedp);
+
#define TYPE_OK(node) \
(TYPE_MODE (type) == TYPE_MODE (node) \
&& TYPE_PRECISION (type) == TYPE_PRECISION (node))
unsignedp1 = TYPE_UNSIGNED (TREE_TYPE (op1));
/* If one of the operands must be floated, we cannot optimize. */
- real1 = TREE_CODE (TREE_TYPE (primop0)) == REAL_TYPE;
- real2 = TREE_CODE (TREE_TYPE (primop1)) == REAL_TYPE;
+ real1 = SCALAR_FLOAT_TYPE_P (TREE_TYPE (primop0));
+ real2 = SCALAR_FLOAT_TYPE_P (TREE_TYPE (primop1));
/* If first arg is constant, swap the args (changing operation
so value is preserved), for canonicalization. Don't do this if
/* The result is a pointer of the same type that is being added. */
tree result_type = TREE_TYPE (ptrop);
- if (TREE_CODE (TREE_TYPE (result_type)) == VOID_TYPE)
+ if (VOID_TYPE_P (TREE_TYPE (result_type)))
{
if (complain && warn_pointer_arith)
pedwarn (loc, OPT_Wpointer_arith,
was used in anticipation of a possible overflow.
Furthermore, if we see an unsigned type here we know that the
result of the shift is not subject to integer promotion rules. */
- if (TREE_CODE (TREE_TYPE (expr)) == INTEGER_TYPE
+ if ((TREE_CODE (TREE_TYPE (expr)) == INTEGER_TYPE
+ || TREE_CODE (TREE_TYPE (expr)) == BITINT_TYPE)
&& !TYPE_UNSIGNED (TREE_TYPE (expr)))
warning_at (EXPR_LOCATION (expr), OPT_Wint_in_bool_context,
"%<<<%> in boolean context, did you mean %<<%>?");
{
tree op0 = TREE_OPERAND (expr, 0);
if ((TREE_CODE (fromtype) == POINTER_TYPE
- && TREE_CODE (totype) == INTEGER_TYPE)
+ && (TREE_CODE (totype) == INTEGER_TYPE
+ || TREE_CODE (totype) == BITINT_TYPE))
|| warning_suppressed_p (expr, OPT_Waddress))
/* Suppress -Waddress for casts to intptr_t, propagating
any suppression from the enclosing expression to its
goto ret;
}
- if (TREE_CODE (TREE_TYPE (expr)) == FIXED_POINT_TYPE)
+ if (FIXED_POINT_TYPE_P (TREE_TYPE (expr)))
{
tree fixed_zero_node = build_fixed (TREE_TYPE (expr),
FCONST0 (TYPE_MODE
if (!TYPE_P (t))
return -1;
- /* Unlike char, char8_t doesn't alias. */
- if (flag_char8_t && t == char8_type_node)
+ /* Unlike char, char8_t doesn't alias in C++. (In C, char8_t is not
+ a distinct type.) */
+ if (flag_char8_t && t == char8_type_node && c_dialect_cxx ())
return -1;
/* The C standard guarantees that any object may be accessed via an
- lvalue that has narrow character type (except char8_t). */
+ lvalue that has narrow character type. */
if (t == char_type_node
|| t == signed_char_type_node
|| t == unsigned_char_type_node)
/* The C standard specifically allows aliasing between signed and
unsigned variants of the same type. We treat the signed
variant as canonical. */
- if (TREE_CODE (t) == INTEGER_TYPE && TYPE_UNSIGNED (t))
+ if ((TREE_CODE (t) == INTEGER_TYPE || TREE_CODE (t) == BITINT_TYPE)
+ && TYPE_UNSIGNED (t))
{
tree t1 = c_common_signed_type (t);
/* A helper function for c_common_nodes_and_builtins. Build function type
for DEF with return type RET and N arguments. If VAR is true, then the
- function should be variadic after those N arguments.
+ function should be variadic after those N arguments, or, if N is zero,
+ unprototyped.
Takes special care not to ICE if any of the types involved are
error_mark_node, which indicates that said type is not in fact available
if (t == error_mark_node)
goto egress;
if (var)
- t = build_varargs_function_type_array (t, n, args);
+ if (n == 0)
+ t = build_function_type (t, NULL_TREE);
+ else
+ t = build_varargs_function_type_array (t, n, args);
else
t = build_function_type_array (t, n, args);
char32_array_type_node
= build_array_type (char32_type_node, array_domain_type);
- wint_type_node =
- TREE_TYPE (identifier_global_value (get_identifier (WINT_TYPE)));
+ if (strcmp (WINT_TYPE, "wchar_t") == 0)
+ wint_type_node = wchar_type_node;
+ else
+ wint_type_node =
+ TREE_TYPE (identifier_global_value (get_identifier (WINT_TYPE)));
intmax_type_node =
TREE_TYPE (identifier_global_value (get_identifier (INTMAX_TYPE)));
uintptr_type_node =
TREE_TYPE (identifier_global_value (c_get_ident (UINTPTR_TYPE)));
- default_function_type
- = build_varargs_function_type_list (integer_type_node, NULL_TREE);
+ default_function_type = build_function_type (integer_type_node, NULL_TREE);
unsigned_ptrdiff_type_node = c_common_unsigned_type (ptrdiff_type_node);
lang_hooks.decls.pushdecl
builtin_define_with_value ("__SIZE_TYPE__", SIZE_TYPE, 0);
builtin_define_with_value ("__PTRDIFF_TYPE__", PTRDIFF_TYPE, 0);
builtin_define_with_value ("__WCHAR_TYPE__", MODIFIED_WCHAR_TYPE, 0);
- builtin_define_with_value ("__WINT_TYPE__", WINT_TYPE, 0);
+ /* C++ has wchar_t as a builtin type, C doesn't, so if WINT_TYPE
+ maps to wchar_t, define it to the underlying WCHAR_TYPE in C, and
+ to wchar_t in C++, so the desired type equivalence holds. */
+ if (!c_dialect_cxx ()
+ && strcmp (WINT_TYPE, "wchar_t") == 0)
+ builtin_define_with_value ("__WINT_TYPE__", WCHAR_TYPE, 0);
+ else
+ builtin_define_with_value ("__WINT_TYPE__", WINT_TYPE, 0);
builtin_define_with_value ("__INTMAX_TYPE__", INTMAX_TYPE, 0);
builtin_define_with_value ("__UINTMAX_TYPE__", UINTMAX_TYPE, 0);
if (flag_char8_t)
case BUILT_IN_ISLESSEQUAL:
case BUILT_IN_ISLESSGREATER:
case BUILT_IN_ISUNORDERED:
+ case BUILT_IN_ISEQSIG:
if (builtin_function_validate_nargs (loc, fndecl, nargs, 2))
{
enum tree_code code0, code1;
code0 = TREE_CODE (TREE_TYPE (args[0]));
code1 = TREE_CODE (TREE_TYPE (args[1]));
if (!((code0 == REAL_TYPE && code1 == REAL_TYPE)
- || (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
- || (code0 == INTEGER_TYPE && code1 == REAL_TYPE)))
+ || (code0 == REAL_TYPE
+ && (code1 == INTEGER_TYPE || code1 == BITINT_TYPE))
+ || ((code0 == INTEGER_TYPE || code0 == BITINT_TYPE)
+ && code1 == REAL_TYPE)))
{
error_at (loc, "non-floating-point arguments in call to "
"function %qE", fndecl);
case BUILT_IN_ASSUME_ALIGNED:
if (builtin_function_validate_nargs (loc, fndecl, nargs, 2 + (nargs > 2)))
{
- if (nargs >= 3 && TREE_CODE (TREE_TYPE (args[2])) != INTEGER_TYPE)
+ if (nargs >= 3
+ && TREE_CODE (TREE_TYPE (args[2])) != INTEGER_TYPE
+ && TREE_CODE (TREE_TYPE (args[2])) != BITINT_TYPE)
{
error_at (ARG_LOCATION (2), "non-integer argument 3 in call to "
"function %qE", fndecl);
}
if (TREE_CODE (TREE_TYPE (args[2])) == ENUMERAL_TYPE)
{
- error_at (ARG_LOCATION (2), "argument 3 in call to function "
- "%qE has enumerated type", fndecl);
+ error_at (ARG_LOCATION (2), "argument %u in call to function "
+ "%qE has enumerated type", 3, fndecl);
return false;
}
else if (TREE_CODE (TREE_TYPE (args[2])) == BOOLEAN_TYPE)
{
- error_at (ARG_LOCATION (2), "argument 3 in call to function "
- "%qE has boolean type", fndecl);
+ error_at (ARG_LOCATION (2), "argument %u in call to function "
+ "%qE has boolean type", 3, fndecl);
return false;
}
return true;
}
return false;
+ case BUILT_IN_CLZG:
+ case BUILT_IN_CTZG:
+ case BUILT_IN_CLRSBG:
+ case BUILT_IN_FFSG:
+ case BUILT_IN_PARITYG:
+ case BUILT_IN_POPCOUNTG:
+ if (nargs == 2
+ && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CLZG
+ || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CTZG))
+ {
+ if (!INTEGRAL_TYPE_P (TREE_TYPE (args[1])))
+ {
+ error_at (ARG_LOCATION (1), "argument %u in call to function "
+ "%qE does not have integral type", 2, fndecl);
+ return false;
+ }
+ if ((TYPE_PRECISION (TREE_TYPE (args[1]))
+ > TYPE_PRECISION (integer_type_node))
+ || (TYPE_PRECISION (TREE_TYPE (args[1]))
+ == TYPE_PRECISION (integer_type_node)
+ && TYPE_UNSIGNED (TREE_TYPE (args[1]))))
+ {
+ error_at (ARG_LOCATION (1), "argument %u in call to function "
+ "%qE does not have %<int%> type", 2, fndecl);
+ return false;
+ }
+ }
+ else if (!builtin_function_validate_nargs (loc, fndecl, nargs, 1))
+ return false;
+
+ if (!INTEGRAL_TYPE_P (TREE_TYPE (args[0])))
+ {
+ error_at (ARG_LOCATION (0), "argument %u in call to function "
+ "%qE does not have integral type", 1, fndecl);
+ return false;
+ }
+ if (TREE_CODE (TREE_TYPE (args[0])) == ENUMERAL_TYPE)
+ {
+ error_at (ARG_LOCATION (0), "argument %u in call to function "
+ "%qE has enumerated type", 1, fndecl);
+ return false;
+ }
+ if (TREE_CODE (TREE_TYPE (args[0])) == BOOLEAN_TYPE)
+ {
+ error_at (ARG_LOCATION (0), "argument %u in call to function "
+ "%qE has boolean type", 1, fndecl);
+ return false;
+ }
+ if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FFSG
+ || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CLRSBG)
+ {
+ if (TYPE_UNSIGNED (TREE_TYPE (args[0])))
+ {
+ error_at (ARG_LOCATION (0), "argument 1 in call to function "
+ "%qE has unsigned type", fndecl);
+ return false;
+ }
+ }
+ else if (!TYPE_UNSIGNED (TREE_TYPE (args[0])))
+ {
+ error_at (ARG_LOCATION (0), "argument 1 in call to function "
+ "%qE has signed type", fndecl);
+ return false;
+ }
+ return true;
+
default:
return true;
}
{
diagnostic_info diagnostic;
diagnostic_t dlevel;
- bool save_warn_system_headers = global_dc->dc_warn_system_headers;
+ bool save_warn_system_headers = global_dc->m_warn_system_headers;
bool ret;
switch (level)
case CPP_DL_WARNING_SYSHDR:
if (flag_no_output)
return false;
- global_dc->dc_warn_system_headers = 1;
+ global_dc->m_warn_system_headers = 1;
/* Fall through. */
case CPP_DL_WARNING:
if (flag_no_output)
c_option_controlling_cpp_diagnostic (reason));
ret = diagnostic_report_diagnostic (global_dc, &diagnostic);
if (level == CPP_DL_WARNING_SYSHDR)
- global_dc->dc_warn_system_headers = save_warn_system_headers;
+ global_dc->m_warn_system_headers = save_warn_system_headers;
return ret;
}
definition thereof. */
if (TREE_CODE (v) == ARRAY_REF
|| TREE_CODE (v) == COMPONENT_REF)
- warning (OPT_Warray_bounds,
+ warning (OPT_Warray_bounds_,
"index %E denotes an offset "
"greater than size of %qT",
t, TREE_TYPE (TREE_OPERAND (expr, 0)));
/* A helper function for resolve_overloaded_builtin in resolving the
overloaded __sync_ builtins. Returns a positive power of 2 if the
first operand of PARAMS is a pointer to a supported data type.
- Returns 0 if an error is encountered.
+ Returns 0 if an error is encountered. Return -1 for _BitInt
+ __atomic*fetch* with unsupported type which should be handled by
+ a cas loop.
FETCH is true when FUNCTION is one of the _FETCH_OP_ or _OP_FETCH_
+ built-ins. ORIG_FORMAT is for __sync_* rather than __atomic_*
built-ins. */
static int
-sync_resolve_size (tree function, vec<tree, va_gc> *params, bool fetch)
+sync_resolve_size (tree function, vec<tree, va_gc> *params, bool fetch,
+ bool orig_format)
{
/* Type of the argument. */
tree argtype;
goto incompatible;
size = tree_to_uhwi (TYPE_SIZE_UNIT (type));
+ if (size == 16
+ && fetch
+ && !orig_format
+ && TREE_CODE (type) == BITINT_TYPE
+ && !targetm.scalar_mode_supported_p (TImode))
+ return -1;
+
if (size == 1 || size == 2 || size == 4 || size == 8 || size == 16)
return size;
+ if (fetch && !orig_format && TREE_CODE (type) == BITINT_TYPE)
+ return -1;
+
incompatible:
/* Issue the diagnostic only if the argument is valid, otherwise
it would be redundant at best and could be misleading. */
return 0;
}
else
- pedwarn (loc, OPT_Wincompatible_pointer_types, "argument %d "
+ pedwarn (loc, OPT_Wdiscarded_qualifiers, "argument %d "
"of %qE discards %<const%> qualifier", x + 1,
function);
}
return 0;
}
else
- pedwarn (loc, OPT_Wincompatible_pointer_types, "argument %d "
+ pedwarn (loc, OPT_Wdiscarded_qualifiers, "argument %d "
"of %qE discards %<volatile%> qualifier", x + 1,
function);
}
/* Convert object pointer to required type. */
p0 = build1 (VIEW_CONVERT_EXPR, I_type_ptr, p0);
(*params)[0] = p0;
- /* Convert new value to required type, and dereference it. */
- p1 = build_indirect_ref (loc, p1, RO_UNARY_STAR);
- p1 = build1 (VIEW_CONVERT_EXPR, I_type, p1);
+ /* Convert new value to required type, and dereference it.
+ If *p1 type can have padding or may involve floating point which
+ could e.g. be promoted to wider precision and demoted afterwards,
+ state of padding bits might not be preserved. */
+ build_indirect_ref (loc, p1, RO_UNARY_STAR);
+ p1 = build2_loc (loc, MEM_REF, I_type,
+ build1 (VIEW_CONVERT_EXPR, I_type_ptr, p1),
+ build_zero_cst (TREE_TYPE (p1)));
(*params)[1] = p1;
/* Move memory model to the 3rd position, and end param list. */
p1 = build1 (VIEW_CONVERT_EXPR, I_type_ptr, p1);
(*params)[1] = p1;
- /* Convert desired value to required type, and dereference it. */
- p2 = build_indirect_ref (loc, p2, RO_UNARY_STAR);
- p2 = build1 (VIEW_CONVERT_EXPR, I_type, p2);
+ /* Convert desired value to required type, and dereference it.
+ If *p2 type can have padding or may involve floating point which
+ could e.g. be promoted to wider precision and demoted afterwards,
+ state of padding bits might not be preserved. */
+ build_indirect_ref (loc, p2, RO_UNARY_STAR);
+ p2 = build2_loc (loc, MEM_REF, I_type,
+ build1 (VIEW_CONVERT_EXPR, I_type_ptr, p2),
+ build_zero_cst (TREE_TYPE (p2)));
(*params)[2] = p2;
/* The rest of the parameters are fine. NULL means no special return value
}
+/* Emit __atomic*fetch* on _BitInt which doesn't have a size of
+ 1, 2, 4, 8 or 16 bytes using __atomic_compare_exchange loop.
+ ORIG_CODE is the DECL_FUNCTION_CODE of ORIG_FUNCTION and
+ ORIG_PARAMS arguments of the call. */
+
+static tree
+atomic_bitint_fetch_using_cas_loop (location_t loc,
+ enum built_in_function orig_code,
+ tree orig_function,
+ vec<tree, va_gc> *orig_params)
+{
+ enum tree_code code = ERROR_MARK;
+ bool return_old_p = false;
+ switch (orig_code)
+ {
+ case BUILT_IN_ATOMIC_ADD_FETCH_N:
+ code = PLUS_EXPR;
+ break;
+ case BUILT_IN_ATOMIC_SUB_FETCH_N:
+ code = MINUS_EXPR;
+ break;
+ case BUILT_IN_ATOMIC_AND_FETCH_N:
+ code = BIT_AND_EXPR;
+ break;
+ case BUILT_IN_ATOMIC_NAND_FETCH_N:
+ break;
+ case BUILT_IN_ATOMIC_XOR_FETCH_N:
+ code = BIT_XOR_EXPR;
+ break;
+ case BUILT_IN_ATOMIC_OR_FETCH_N:
+ code = BIT_IOR_EXPR;
+ break;
+ case BUILT_IN_ATOMIC_FETCH_ADD_N:
+ code = PLUS_EXPR;
+ return_old_p = true;
+ break;
+ case BUILT_IN_ATOMIC_FETCH_SUB_N:
+ code = MINUS_EXPR;
+ return_old_p = true;
+ break;
+ case BUILT_IN_ATOMIC_FETCH_AND_N:
+ code = BIT_AND_EXPR;
+ return_old_p = true;
+ break;
+ case BUILT_IN_ATOMIC_FETCH_NAND_N:
+ return_old_p = true;
+ break;
+ case BUILT_IN_ATOMIC_FETCH_XOR_N:
+ code = BIT_XOR_EXPR;
+ return_old_p = true;
+ break;
+ case BUILT_IN_ATOMIC_FETCH_OR_N:
+ code = BIT_IOR_EXPR;
+ return_old_p = true;
+ break;
+ default:
+ gcc_unreachable ();
+ }
+
+ if (orig_params->length () != 3)
+ {
+ if (orig_params->length () < 3)
+ error_at (loc, "too few arguments to function %qE", orig_function);
+ else
+ error_at (loc, "too many arguments to function %qE", orig_function);
+ return error_mark_node;
+ }
+
+ tree stmts = push_stmt_list ();
+
+ tree nonatomic_lhs_type = TREE_TYPE (TREE_TYPE ((*orig_params)[0]));
+ nonatomic_lhs_type = TYPE_MAIN_VARIANT (nonatomic_lhs_type);
+ gcc_assert (TREE_CODE (nonatomic_lhs_type) == BITINT_TYPE);
+
+ tree lhs_addr = (*orig_params)[0];
+ tree val = convert (nonatomic_lhs_type, (*orig_params)[1]);
+ tree model = convert (integer_type_node, (*orig_params)[2]);
+ if (!c_dialect_cxx ())
+ {
+ lhs_addr = c_fully_fold (lhs_addr, false, NULL);
+ val = c_fully_fold (val, false, NULL);
+ model = c_fully_fold (model, false, NULL);
+ }
+ if (TREE_SIDE_EFFECTS (lhs_addr))
+ {
+ tree var = create_tmp_var_raw (TREE_TYPE (lhs_addr));
+ lhs_addr = build4 (TARGET_EXPR, TREE_TYPE (lhs_addr), var, lhs_addr,
+ NULL_TREE, NULL_TREE);
+ add_stmt (lhs_addr);
+ }
+ if (TREE_SIDE_EFFECTS (val))
+ {
+ tree var = create_tmp_var_raw (nonatomic_lhs_type);
+ val = build4 (TARGET_EXPR, nonatomic_lhs_type, var, val, NULL_TREE,
+ NULL_TREE);
+ add_stmt (val);
+ }
+ if (TREE_SIDE_EFFECTS (model))
+ {
+ tree var = create_tmp_var_raw (integer_type_node);
+ model = build4 (TARGET_EXPR, integer_type_node, var, model, NULL_TREE,
+ NULL_TREE);
+ add_stmt (model);
+ }
+
+ tree old = create_tmp_var_raw (nonatomic_lhs_type);
+ tree old_addr = build_unary_op (loc, ADDR_EXPR, old, false);
+ TREE_ADDRESSABLE (old) = 1;
+ suppress_warning (old);
+
+ tree newval = create_tmp_var_raw (nonatomic_lhs_type);
+ tree newval_addr = build_unary_op (loc, ADDR_EXPR, newval, false);
+ TREE_ADDRESSABLE (newval) = 1;
+ suppress_warning (newval);
+
+ tree loop_decl = create_artificial_label (loc);
+ tree loop_label = build1 (LABEL_EXPR, void_type_node, loop_decl);
+
+ tree done_decl = create_artificial_label (loc);
+ tree done_label = build1 (LABEL_EXPR, void_type_node, done_decl);
+
+ vec<tree, va_gc> *params;
+ vec_alloc (params, 6);
+
+ /* __atomic_load (addr, &old, SEQ_CST). */
+ tree fndecl = builtin_decl_explicit (BUILT_IN_ATOMIC_LOAD);
+ params->quick_push (lhs_addr);
+ params->quick_push (old_addr);
+ params->quick_push (build_int_cst (integer_type_node, MEMMODEL_RELAXED));
+ tree func_call = resolve_overloaded_builtin (loc, fndecl, params);
+ if (func_call == NULL_TREE)
+ func_call = build_function_call_vec (loc, vNULL, fndecl, params, NULL);
+ old = build4 (TARGET_EXPR, nonatomic_lhs_type, old, func_call, NULL_TREE,
+ NULL_TREE);
+ add_stmt (old);
+ params->truncate (0);
+
+ /* loop: */
+ add_stmt (loop_label);
+
+ /* newval = old + val; */
+ tree rhs;
+ switch (code)
+ {
+ case PLUS_EXPR:
+ case MINUS_EXPR:
+ if (!TYPE_OVERFLOW_WRAPS (nonatomic_lhs_type))
+ {
+ tree utype
+ = build_bitint_type (TYPE_PRECISION (nonatomic_lhs_type), 1);
+ rhs = convert (nonatomic_lhs_type,
+ build2_loc (loc, code, utype,
+ convert (utype, old),
+ convert (utype, val)));
+ }
+ else
+ rhs = build2_loc (loc, code, nonatomic_lhs_type, old, val);
+ break;
+ case BIT_AND_EXPR:
+ case BIT_IOR_EXPR:
+ case BIT_XOR_EXPR:
+ rhs = build2_loc (loc, code, nonatomic_lhs_type, old, val);
+ break;
+ case ERROR_MARK:
+ rhs = build2_loc (loc, BIT_AND_EXPR, nonatomic_lhs_type,
+ build1_loc (loc, BIT_NOT_EXPR,
+ nonatomic_lhs_type, old), val);
+ break;
+ default:
+ gcc_unreachable ();
+ }
+ rhs = build4 (TARGET_EXPR, nonatomic_lhs_type, newval, rhs, NULL_TREE,
+ NULL_TREE);
+ SET_EXPR_LOCATION (rhs, loc);
+ add_stmt (rhs);
+
+ /* if (__atomic_compare_exchange (addr, &old, &new, false, model, model))
+ goto done; */
+ fndecl = builtin_decl_explicit (BUILT_IN_ATOMIC_COMPARE_EXCHANGE);
+ params->quick_push (lhs_addr);
+ params->quick_push (old_addr);
+ params->quick_push (newval_addr);
+ params->quick_push (integer_zero_node);
+ params->quick_push (model);
+ if (tree_fits_uhwi_p (model)
+ && (tree_to_uhwi (model) == MEMMODEL_RELEASE
+ || tree_to_uhwi (model) == MEMMODEL_ACQ_REL))
+ params->quick_push (build_int_cst (integer_type_node, MEMMODEL_RELAXED));
+ else
+ params->quick_push (model);
+ func_call = resolve_overloaded_builtin (loc, fndecl, params);
+ if (func_call == NULL_TREE)
+ func_call = build_function_call_vec (loc, vNULL, fndecl, params, NULL);
+
+ tree goto_stmt = build1 (GOTO_EXPR, void_type_node, done_decl);
+ SET_EXPR_LOCATION (goto_stmt, loc);
+
+ tree stmt
+ = build3 (COND_EXPR, void_type_node, func_call, goto_stmt, NULL_TREE);
+ SET_EXPR_LOCATION (stmt, loc);
+ add_stmt (stmt);
+
+ /* goto loop; */
+ goto_stmt = build1 (GOTO_EXPR, void_type_node, loop_decl);
+ SET_EXPR_LOCATION (goto_stmt, loc);
+ add_stmt (goto_stmt);
+
+ /* done: */
+ add_stmt (done_label);
+
+ tree ret = create_tmp_var_raw (nonatomic_lhs_type);
+ stmt = build2_loc (loc, MODIFY_EXPR, void_type_node, ret,
+ return_old_p ? old : newval);
+ add_stmt (stmt);
+
+ /* Finish the compound statement. */
+ stmts = pop_stmt_list (stmts);
+
+ return build4 (TARGET_EXPR, nonatomic_lhs_type, ret, stmts, NULL_TREE,
+ NULL_TREE);
+}
+
+
/* Some builtin functions are placeholders for other expressions. This
function should be called immediately after parsing the call expression
before surrounding code has committed to the type of the expression.
/* The following are not _FETCH_OPs and must be accepted with
pointers to _Bool (or C++ bool). */
if (fetch_op)
- fetch_op =
- (orig_code != BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_N
- && orig_code != BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_N
- && orig_code != BUILT_IN_SYNC_LOCK_TEST_AND_SET_N
- && orig_code != BUILT_IN_SYNC_LOCK_RELEASE_N);
+ fetch_op = (orig_code != BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_N
+ && orig_code != BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_N
+ && orig_code != BUILT_IN_SYNC_LOCK_TEST_AND_SET_N
+ && orig_code != BUILT_IN_SYNC_LOCK_RELEASE_N);
- int n = sync_resolve_size (function, params, fetch_op);
+ int n = sync_resolve_size (function, params, fetch_op, orig_format);
tree new_function, first_param, result;
enum built_in_function fncode;
if (n == 0)
return error_mark_node;
+ if (n == -1)
+ return atomic_bitint_fetch_using_cas_loop (loc, orig_code,
+ function, params);
+
fncode = (enum built_in_function)((int)orig_code + exact_log2 (n) + 1);
new_function = builtin_decl_explicit (fncode);
if (!sync_resolve_params (loc, function, new_function, params,
if (new_return)
{
/* Cast function result from I{1,2,4,8,16} to the required type. */
- result = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (new_return), result);
+ if (TREE_CODE (TREE_TYPE (new_return)) == BITINT_TYPE)
+ {
+ struct bitint_info info;
+ unsigned prec = TYPE_PRECISION (TREE_TYPE (new_return));
+ targetm.c.bitint_type_info (prec, &info);
+ if (!info.extended)
+ /* For _BitInt which has the padding bits undefined
+ convert to the _BitInt type rather than VCE to force
+ zero or sign extension. */
+ result = build1 (NOP_EXPR, TREE_TYPE (new_return), result);
+ }
+ result
+ = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (new_return), result);
result = build2 (MODIFY_EXPR, TREE_TYPE (new_return), new_return,
result);
TREE_SIDE_EFFECTS (result) = 1;
case RID_FRACT:
case RID_ACCUM:
case RID_BOOL:
+ case RID_BITINT:
case RID_WCHAR:
case RID_CHAR8:
case RID_CHAR16:
MARK_TS_EXP (FOR_STMT);
MARK_TS_EXP (SWITCH_STMT);
MARK_TS_EXP (WHILE_STMT);
+
+ MARK_TS_DECL_COMMON (CONCEPT_DECL);
}
/* Build a user-defined numeric literal out of an integer constant type VALUE
if (TREE_CODE (index) == INTEGER_CST)
if (!tree_fits_uhwi_p (index)
|| maybe_ge (tree_to_uhwi (index), TYPE_VECTOR_SUBPARTS (type)))
- warning_at (loc, OPT_Warray_bounds, "index value is out of bound");
+ warning_at (loc, OPT_Warray_bounds_, "index value is out of bound");
/* We are building an ARRAY_REF so mark the vector as addressable
to not run into the gimplifiers premature setting of DECL_GIMPLE_REG_P
}
else if (!integer_only_op
/* Allow integer --> real conversion if safe. */
- && (TREE_CODE (type0) == REAL_TYPE
+ && (SCALAR_FLOAT_TYPE_P (type0)
|| TREE_CODE (type0) == INTEGER_TYPE)
&& SCALAR_FLOAT_TYPE_P (TREE_TYPE (type1)))
{
richloc->add_fixit_insert_before (include_insert_loc, text);
free (text);
- if (override_location && global_dc->show_caret)
+ if (override_location && global_dc->m_source_printing.enabled)
{
/* Replace the primary location with that of the insertion point for the
fix-it hint.
(*debug_hooks->early_global_decl) (cnode->decl);
}
+/* Get the LEVEL of the strict_flex_array for the ARRAY_FIELD based on the
+ values of attribute strict_flex_array and the flag_strict_flex_arrays. */
+unsigned int
+c_strict_flex_array_level_of (tree array_field)
+{
+ gcc_assert (TREE_CODE (array_field) == FIELD_DECL);
+ unsigned int strict_flex_array_level = flag_strict_flex_arrays;
+
+ tree attr_strict_flex_array
+ = lookup_attribute ("strict_flex_array", DECL_ATTRIBUTES (array_field));
+ /* If there is a strict_flex_array attribute attached to the field,
+ override the flag_strict_flex_arrays. */
+ if (attr_strict_flex_array)
+ {
+ /* Get the value of the level first from the attribute. */
+ unsigned HOST_WIDE_INT attr_strict_flex_array_level = 0;
+ gcc_assert (TREE_VALUE (attr_strict_flex_array) != NULL_TREE);
+ attr_strict_flex_array = TREE_VALUE (attr_strict_flex_array);
+ gcc_assert (TREE_VALUE (attr_strict_flex_array) != NULL_TREE);
+ attr_strict_flex_array = TREE_VALUE (attr_strict_flex_array);
+ gcc_assert (tree_fits_uhwi_p (attr_strict_flex_array));
+ attr_strict_flex_array_level = tree_to_uhwi (attr_strict_flex_array);
+
+ /* The attribute has higher priority than flag_struct_flex_array. */
+ strict_flex_array_level = attr_strict_flex_array_level;
+ }
+ return strict_flex_array_level;
+}
+
+/* Map from identifiers to booleans. Value is true for features, and
+ false for extensions. Used to implement __has_{feature,extension}. */
+
+using feature_map_t = hash_map <tree, bool>;
+static feature_map_t *feature_map;
+
+/* Register a feature for __has_{feature,extension}. FEATURE_P is true
+ if the feature identified by NAME is a feature (as opposed to an
+ extension). */
+
+void
+c_common_register_feature (const char *name, bool feature_p)
+{
+ bool dup = feature_map->put (get_identifier (name), feature_p);
+ gcc_checking_assert (!dup);
+}
+
+/* Lazily initialize hash table for __has_{feature,extension},
+ dispatching to the appropriate front end to register language-specific
+ features. */
+
+static void
+init_has_feature ()
+{
+ gcc_checking_assert (!feature_map);
+ feature_map = new feature_map_t;
+
+ for (unsigned i = 0; i < ARRAY_SIZE (has_feature_table); i++)
+ {
+ const hf_feature_info *info = has_feature_table + i;
+
+ if ((info->flags & HF_FLAG_SANITIZE) && !(flag_sanitize & info->mask))
+ continue;
+
+ const bool feature_p = !(info->flags & HF_FLAG_EXT);
+ c_common_register_feature (info->ident, feature_p);
+ }
+
+ /* Register language-specific features. */
+ c_family_register_lang_features ();
+}
+
+/* If STRICT_P is true, evaluate __has_feature (IDENT).
+ Otherwise, evaluate __has_extension (IDENT). */
+
+bool
+has_feature_p (const char *ident, bool strict_p)
+{
+ if (!feature_map)
+ init_has_feature ();
+
+ tree name = canonicalize_attr_name (get_identifier (ident));
+ bool *feat_p = feature_map->get (name);
+ if (!feat_p)
+ return false;
+
+ return !strict_p || *feat_p;
+}
+
+/* This is the slow path of c-common.h's c_hardbool_type_attr. */
+
+tree
+c_hardbool_type_attr_1 (tree type, tree *false_value, tree *true_value)
+{
+ tree attr = lookup_attribute ("hardbool", TYPE_ATTRIBUTES (type));
+ if (!attr)
+ return attr;
+
+ if (false_value)
+ *false_value = TREE_VALUE (TYPE_VALUES (type));
+
+ if (true_value)
+ *true_value = TREE_VALUE (TREE_CHAIN (TYPE_VALUES (type)));
+
+ return attr;
+}
+
#include "gt-c-family-c-common.h"