if (!as || as->affects_type_identity == false)
continue;
- attr = find_same_attribute (a, CONST_CAST_TREE (a2));
+ attr = find_same_attribute (a, const_cast<tree> (a2));
if (!attr || !attribute_value_equal (a, attr))
break;
}
if (!as || as->affects_type_identity == false)
continue;
- if (!find_same_attribute (a, CONST_CAST_TREE (a1)))
+ if (!find_same_attribute (a, const_cast<tree> (a1)))
break;
/* We don't need to compare trees again, as we did this
already in first loop. */
if (!a)
return 1;
}
- if (lookup_attribute ("transaction_safe", CONST_CAST_TREE (a)))
+ if (lookup_attribute ("transaction_safe", const_cast<tree> (a)))
return 0;
if ((lookup_attribute ("nocf_check", TYPE_ATTRIBUTES (type1)) != NULL)
^ (lookup_attribute ("nocf_check", TYPE_ATTRIBUTES (type2)) != NULL))
return 0;
- int strub_ret = strub_comptypes (CONST_CAST_TREE (type1),
- CONST_CAST_TREE (type2));
+ int strub_ret = strub_comptypes (const_cast<tree> (type1),
+ const_cast<tree> (type2));
if (strub_ret == 0)
return strub_ret;
/* As some type combinations - like default calling-convention - might
modify its argument and the return value is assigned to a
const_tree. */
for (attr = lookup_ident_attribute (get_attribute_name (t2),
- CONST_CAST_TREE (l1));
+ const_cast<tree> (l1));
attr != NULL_TREE && !attribute_value_equal (t2, attr);
attr = lookup_ident_attribute (get_attribute_name (t2),
TREE_CHAIN (attr)))
location_t
expr_loc_or_loc (const_tree expr, location_t or_loc)
{
- tree t = CONST_CAST_TREE (expr);
+ tree t = const_cast<tree> (expr);
location_t loc = UNKNOWN_LOCATION;
if (t)
loc = EXPR_LOCATION (t);
The Windows Arm64 ABI uses just an address of the first variadic
argument. */
if (!TARGET_AARCH64_MS_ABI
- && lang_hooks.types_compatible_p (CONST_CAST_TREE (type), va_list_type))
+ && lang_hooks.types_compatible_p (const_cast<tree> (type), va_list_type))
return "St9__va_list";
/* Half-precision floating point types. */
/* Local functions never leak outside this compilation unit,
so we are free to use whatever conventions are
appropriate. */
- /* FIXME: remove CONST_CAST_TREE when cgraph is constified. */
+ /* FIXME: remove const_cast<tree> when cgraph is constified. */
cgraph_node *local_info_node
- = cgraph_node::local_info_node (CONST_CAST_TREE (decl));
+ = cgraph_node::local_info_node (const_cast<tree> (decl));
if (local_info_node && local_info_node->local)
return ARM_PCS_AAPCS_LOCAL;
}
/* The ARM ABI documents (10th October 2008) say that "__va_list"
has to be managled as if it is in the "std" namespace. */
if (TARGET_AAPCS_BASED
- && lang_hooks.types_compatible_p (CONST_CAST_TREE (type), va_list_type))
+ && lang_hooks.types_compatible_p (const_cast<tree> (type), va_list_type))
return "St9__va_list";
/* Half-precision floating point types. */
if (is_str)
{
bool for_asan = (flag_sanitize & SANITIZE_ADDRESS)
- && asan_protect_global (CONST_CAST_TREE (decl));
+ && asan_protect_global (const_cast<tree> (decl));
/* When we are generating code for sanitized strings, the string
internal symbols are made visible in the object. */
buf = xasprintf ("*%c.str.%s", for_asan ? 'l' : 'L', &name[3]);
if (flag_sanitize & SANITIZE_ADDRESS)
{
if (TREE_CODE (decl) == STRING_CST
- && asan_protect_global (CONST_CAST_TREE (decl)))
+ && asan_protect_global (const_cast<tree> (decl)))
{
return darwin_sections[asan_string_section];
}
case SECCAT_RODATA_MERGE_STR_INIT:
if ((flag_sanitize & SANITIZE_ADDRESS)
- && asan_protect_global (CONST_CAST_TREE (decl)))
+ && asan_protect_global (const_cast<tree> (decl)))
/* or !flag_merge_constants */
return darwin_sections[asan_string_section];
else
if (i == 0 && first_arg != NULL_TREE)
arg = first_arg;
else
- arg = CONST_CAST_TREE (
+ arg = const_cast<tree> (
(*args)[i + skip - (first_arg != NULL_TREE ? 1 : 0)]);
argtype = lvalue_type (arg);
gcc_assert (DECL_P (t));
if (TREE_CODE (t) == TEMPLATE_DECL)
t = DECL_TEMPLATE_RESULT (t);
- tree* found = decl_constraints->get (CONST_CAST_TREE (t));
+ tree* found = decl_constraints->get (const_cast<tree> (t));
if (found)
return *found;
else
static tree
fold_cplus_constants (const_tree c)
{
- tree folded_elems, elems = CONST_CAST_TREE (c);
+ tree folded_elems, elems = const_cast<tree> (c);
int vec_len, i;
if (elems == NULL_TREE || elems == error_mark_node)
the debug info depend on the collection points. */
if (dtype)
{
- tree ktype = CONST_CAST_TREE (type);
+ tree ktype = const_cast<tree> (type);
if (tree *slot = hash_map_safe_get (debug_type_map, ktype))
return *slot;
hash_map_safe_put<hm_ggc> (debug_type_map, ktype, dtype);
DECL_INITIAL (for a PARM_DECL) or as the TREE_TYPE (for a
TYPE_DECL).
- FIXME: CONST_CAST_TREE is a hack that hopefully will go away after
+ FIXME: const_cast<tree> is a hack that hopefully will go away after
tree is converted to C++ class hiearchy. */
#define DECL_TEMPLATE_PARMS(NODE) \
- ((struct tree_template_decl *)CONST_CAST_TREE (TEMPLATE_DECL_CHECK (NODE)))->arguments
+ ((struct tree_template_decl *)const_cast<tree> (TEMPLATE_DECL_CHECK \
+ (NODE)))->arguments
#define DECL_INNERMOST_TEMPLATE_PARMS(NODE) \
INNERMOST_TEMPLATE_PARMS (DECL_TEMPLATE_PARMS (NODE))
#define DECL_NTPARMS(NODE) \
TREE_VEC_LENGTH (DECL_INNERMOST_TEMPLATE_PARMS (NODE))
/* For function, method, class-data templates.
- FIXME: CONST_CAST_TREE is a hack that hopefully will go away after
+ FIXME: const_cast<tree> is a hack that hopefully will go away after
tree is converted to C++ class hiearchy. */
#define DECL_TEMPLATE_RESULT(NODE) \
- ((struct tree_template_decl *)CONST_CAST_TREE(TEMPLATE_DECL_CHECK (NODE)))->result
+ ((struct tree_template_decl *)const_cast<tree> (TEMPLATE_DECL_CHECK \
+ (NODE)))->result
/* For a forward-declared function template at namespace scope, or for any
function template in an exporting module, DECL_TEMPLATE_INSTANTIATIONS lists
all instantiations and specializations of the function so that
inline location_t
cp_expr_location (const_tree t_)
{
- tree t = CONST_CAST_TREE (t_);
+ tree t = const_cast<tree> (t_);
if (t == NULL_TREE)
return UNKNOWN_LOCATION;
switch (TREE_CODE (t))
return false;
return (VAR_P (t)
- && decl_storage_duration (CONST_CAST_TREE (t)) == dk_auto
+ && decl_storage_duration (const_cast<tree> (t)) == dk_auto
&& TYPE_HAS_NONTRIVIAL_DESTRUCTOR (TREE_TYPE (t)));
}
{
tree dom
= compute_array_index_type (NULL_TREE,
- CONST_CAST_TREE (cst_outer_nelts),
+ const_cast<tree> (cst_outer_nelts),
complain);
ttype = build_cplus_array_type (type, dom);
tree ptype = build_pointer_type (ttype);
{
if (tree tinfo = TYPE_ALIAS_TEMPLATE_INFO (t))
if (PRIMARY_TEMPLATE_P (TI_TEMPLATE (tinfo)))
- return CONST_CAST_TREE (t);
+ return const_cast<tree> (t);
if (transparent_typedefs && !dependent_opaque_alias_p (t))
return alias_template_specialization_p (DECL_ORIGINAL_TYPE
(TYPE_NAME (t)),
return false;
/* A renaming alias isn't complex. */
- if (get_underlying_template (CONST_CAST_TREE (tmpl)) != tmpl)
+ if (get_underlying_template (const_cast<tree> (tmpl)) != tmpl)
return false;
/* Any other constrained alias is complex. */
if (!seen)
{
if (any_dependent_template_arguments_p (args))
- return CONST_CAST_TREE (t);
+ return const_cast<tree> (t);
}
else
{
for (int i = 0, len = TREE_VEC_LENGTH (args); i < len; ++i)
if (TREE_VEC_ELT (seen, i) != boolean_true_node
&& dependent_template_arg_p (TREE_VEC_ELT (args, i)))
- return CONST_CAST_TREE (t);
+ return const_cast<tree> (t);
}
return NULL_TREE;
decl = DECL_TI_TEMPLATE (decl);
}
- return CONST_CAST_TREE (decl);
+ return const_cast<tree> (decl);
}
/* Return the most specialized of the template partial specializations
for (int i = 0, depth = TMPL_ARGS_DEPTH (args); i < depth; ++i)
{
const_tree level = TMPL_ARGS_LEVEL (args, i + 1);
- for (tree arg : tree_vec_range (CONST_CAST_TREE (level)))
+ for (tree arg : tree_vec_range (const_cast<tree> (level)))
if (dependent_template_arg_p (arg))
return true;
}
eval_has_static_storage_duration (const_tree r, reflect_kind kind)
{
if (eval_is_variable (r, kind) == boolean_true_node
- && decl_storage_duration (CONST_CAST_TREE (r)) == dk_static)
+ && decl_storage_duration (const_cast<tree> (r)) == dk_static)
return boolean_true_node;
/* This includes DECL_NTTP_OBJECT_P objects. */
else if (eval_is_object (kind) == boolean_true_node)
eval_has_thread_storage_duration (const_tree r, reflect_kind kind)
{
if (eval_is_variable (r, kind) == boolean_true_node
- && decl_storage_duration (CONST_CAST_TREE (r)) == dk_thread)
+ && decl_storage_duration (const_cast<tree> (r)) == dk_thread)
return boolean_true_node;
else
return boolean_false_node;
eval_has_automatic_storage_duration (const_tree r, reflect_kind kind)
{
if (eval_is_variable (r, kind) == boolean_true_node
- && decl_storage_duration (CONST_CAST_TREE (r)) == dk_auto)
+ && decl_storage_duration (const_cast<tree> (r)) == dk_auto)
return boolean_true_node;
else
return boolean_false_node;
/* FALLTHRU */
case VAR_DECL:
if (VAR_P (ref) && DECL_HAS_VALUE_EXPR_P (ref))
- return lvalue_kind (DECL_VALUE_EXPR (CONST_CAST_TREE (ref)));
+ return lvalue_kind (DECL_VALUE_EXPR (const_cast<tree> (ref)));
if (TREE_READONLY (ref) && ! TREE_STATIC (ref)
&& DECL_LANG_SPECIFIC (ref)
/* A scope ref in a template, left as SCOPE_REF to support later
access checking. */
case SCOPE_REF:
- gcc_assert (!type_dependent_expression_p (CONST_CAST_TREE (ref)));
+ gcc_assert (!type_dependent_expression_p (const_cast<tree> (ref)));
{
tree op = TREE_OPERAND (ref, 1);
if (TREE_CODE (op) == FIELD_DECL)
point, we know we got a plain rvalue. Unless we have a
type-dependent expr, that is, but we shouldn't be testing
lvalueness if we can't even tell the types yet! */
- gcc_assert (!type_dependent_expression_p (CONST_CAST_TREE (ref)));
+ gcc_assert (!type_dependent_expression_p (const_cast<tree> (ref)));
goto default_;
}
{
with a BASELINK. */
/* This CONST_CAST is okay because BASELINK_FUNCTIONS returns
its argument unmodified and we assign it to a const_tree. */
- return lvalue_kind (BASELINK_FUNCTIONS (CONST_CAST_TREE (ref)));
+ return lvalue_kind (BASELINK_FUNCTIONS (const_cast<tree> (ref)));
case PAREN_EXPR:
return lvalue_kind (TREE_OPERAND (ref, 0));
bool
type_has_nontrivial_default_init (const_tree t)
{
- t = strip_array_types (CONST_CAST_TREE (t));
+ t = strip_array_types (const_cast<tree> (t));
if (CLASS_TYPE_P (t))
return TYPE_HAS_COMPLEX_DFLT (t);
{
if (!deleted_copy_types)
deleted_copy_types = hash_set<tree>::create_ggc(37);
- deleted_copy_types->add (CONST_CAST_TREE (t));
+ deleted_copy_types->add (const_cast<tree> (t));
}
void
maybe_warn_parm_abi (tree t, location_t loc)
bool
type_has_nontrivial_copy_init (const_tree type)
{
- tree t = strip_array_types (CONST_CAST_TREE (type));
+ tree t = strip_array_types (const_cast<tree> (type));
if (CLASS_TYPE_P (t))
{
bool
trivially_copyable_p (const_tree t)
{
- t = strip_array_types (CONST_CAST_TREE (t));
+ t = strip_array_types (const_cast<tree> (t));
if (CLASS_TYPE_P (t))
return ((!TYPE_HAS_COPY_CTOR (t)
bool
trivial_type_p (const_tree t)
{
- t = strip_array_types (CONST_CAST_TREE (t));
+ t = strip_array_types (const_cast<tree> (t));
if (CLASS_TYPE_P (t))
/* A trivial class is a class that is trivially copyable and has one or
more eligible default constructors, all of which are trivial. */
- return (type_has_non_deleted_trivial_default_ctor (CONST_CAST_TREE (t))
+ return (type_has_non_deleted_trivial_default_ctor (const_cast<tree> (t))
&& trivially_copyable_p (t));
else
return scalarish_type_p (t);
{
/* This CONST_CAST is okay because strip_array_types returns its
argument unmodified and we assign it to a const_tree. */
- t = strip_array_types (CONST_CAST_TREE(t));
+ t = strip_array_types (const_cast<tree>(t));
if (!CLASS_TYPE_P (t))
return scalarish_type_p (t);
bool
layout_pod_type_p (const_tree t)
{
- t = strip_array_types (CONST_CAST_TREE (t));
+ t = strip_array_types (const_cast<tree> (t));
if (CLASS_TYPE_P (t))
return !CLASSTYPE_NON_LAYOUT_POD_P (t);
bool
std_layout_type_p (const_tree t)
{
- t = strip_array_types (CONST_CAST_TREE (t));
+ t = strip_array_types (const_cast<tree> (t));
if (CLASS_TYPE_P (t))
return !CLASSTYPE_NON_STD_LAYOUT (t);
{
bool ret;
- t = strip_array_types (CONST_CAST_TREE (t));
+ t = strip_array_types (const_cast<tree> (t));
if (t == error_mark_node)
return false;
{
/* This CONST_CAST is okay because strip_array_types returns its
argument unmodified and we assign it to a const_tree. */
- t = strip_array_types (CONST_CAST_TREE(t));
+ t = strip_array_types (const_cast<tree> (t));
if (t == error_mark_node)
return 1;
tree
cxx_copy_lang_qualifiers (const_tree typea, const_tree typeb)
{
- tree type = CONST_CAST_TREE (typea);
+ tree type = const_cast<tree> (typea);
if (FUNC_OR_METHOD_TYPE_P (type))
type = build_cp_fntype_variant (type, type_memfn_rqual (typeb),
TYPE_RAISES_EXCEPTIONS (typeb),
if (exact < ce_exact)
{
if (exact == ce_type
- && (canonical_eh_spec (CONST_CAST_TREE (t1))
- == canonical_eh_spec (CONST_CAST_TREE (t2))))
+ && (canonical_eh_spec (const_cast<tree> (t1))
+ == canonical_eh_spec (const_cast<tree> (t2))))
return true;
/* noexcept(false) is compatible with no exception-specification,
case VAR_DECL:
if (DECL_HAS_VALUE_EXPR_P (exp))
return is_bitfield_expr_with_lowered_type (DECL_VALUE_EXPR
- (CONST_CAST_TREE (exp)));
+ (const_cast<tree> (exp)));
return NULL_TREE;
case VIEW_CONVERT_EXPR:
int quals;
/* This CONST_CAST is okay because strip_array_types returns its
argument unmodified and we assign it to a const_tree. */
- type = strip_array_types (CONST_CAST_TREE (type));
+ type = strip_array_types (const_cast<tree> (type));
if (type == error_mark_node
/* Quals on a FUNCTION_TYPE are memfn quals. */
|| TREE_CODE (type) == FUNCTION_TYPE)
{
/* This CONST_CAST is okay because strip_array_types returns its
argument unmodified and we assign it to a const_tree. */
- type = strip_array_types (CONST_CAST_TREE(type));
+ type = strip_array_types (const_cast<tree> (type));
return CLASS_TYPE_P (type) && CLASSTYPE_HAS_MUTABLE (type);
}
return;
location_t loc = DECL_SOURCE_LOCATION (TYPE_MAIN_DECL (type));
- tree ptype = strip_top_quals (CONST_CAST_TREE (type));
+ tree ptype = strip_top_quals (const_cast<tree> (type));
/* When defining a template, current_class_type will be the pattern on
the template definition, while non-self-reference usages of this
&& (TREE_TYPE (TREE_OPERAND (arg0, 0))
== TREE_TYPE (TREE_OPERAND (arg1, 0))
|| (operand_equal_p (array_ref_low_bound
- (CONST_CAST_TREE (arg0)),
+ (const_cast<tree> (arg0)),
array_ref_low_bound
- (CONST_CAST_TREE (arg1)), flags)
+ (const_cast<tree> (arg1)),
+ flags)
&& operand_equal_p (array_ref_element_size
- (CONST_CAST_TREE (arg0)),
+ (const_cast<tree> (arg0)),
array_ref_element_size
- (CONST_CAST_TREE (arg1)),
+ (const_cast<tree> (arg1)),
flags))));
case COMPONENT_REF:
{
/* Compare the STATEMENT_LISTs. */
tree_stmt_iterator tsi1, tsi2;
- tree body1 = CONST_CAST_TREE (arg0);
- tree body2 = CONST_CAST_TREE (arg1);
+ tree body1 = const_cast<tree> (arg0);
+ tree body2 = const_cast<tree> (arg1);
for (tsi1 = tsi_start (body1), tsi2 = tsi_start (body2); ;
tsi_next (&tsi1), tsi_next (&tsi2))
{
case STATEMENT_LIST:
{
tree_stmt_iterator i;
- for (i = tsi_start (CONST_CAST_TREE (t));
+ for (i = tsi_start (const_cast<tree> (t));
!tsi_end_p (i); tsi_next (&i))
hash_operand (tsi_stmt (i), hstate, flags);
return;
bool
is_gimple_min_lval (tree t)
{
- if (!(t = CONST_CAST_TREE (strip_invariant_refs (t))))
+ if (!(t = const_cast<tree> (strip_invariant_refs (t))))
return false;
return (is_gimple_id (t) || TREE_CODE (t) == MEM_REF);
}
{
const_tree t1 = (const_tree) p1;
const_tree t2 = (const_tree) p2;
- return gimple_canonical_types_compatible_p (CONST_CAST_TREE (t1),
- CONST_CAST_TREE (t2));
+ return gimple_canonical_types_compatible_p (const_cast<tree> (t1),
+ const_cast<tree> (t2));
}
/* Main worker for gimple_register_canonical_type. */
print_mem_expr (FILE *outfile, const_tree expr)
{
fputc (' ', outfile);
- print_generic_expr (outfile, CONST_CAST_TREE (expr),
- dump_flags | TDF_SLIM);
+ print_generic_expr (outfile, const_cast<tree> (expr), dump_flags | TDF_SLIM);
}
#endif
location_t
rs_expr_location (const_tree t_)
{
- tree t = CONST_CAST_TREE (t_);
+ tree t = const_cast<tree> (t_);
if (t == NULL_TREE)
return UNKNOWN_LOCATION;
int quals;
/* This CONST_CAST is okay because strip_array_types returns its
argument unmodified and we assign it to a const_tree. */
- type = strip_array_types (CONST_CAST_TREE (type));
+ type = strip_array_types (const_cast<tree> (type));
if (type == error_mark_node
/* Quals on a FUNCTION_TYPE are memfn quals. */
|| TREE_CODE (type) == FUNCTION_TYPE)
if (exact < ce_exact)
{
if (exact == ce_type
- && (canonical_eh_spec (CONST_CAST_TREE (t1))
- == canonical_eh_spec (CONST_CAST_TREE (t2))))
+ && (canonical_eh_spec (const_cast<tree> (t1))
+ == canonical_eh_spec (const_cast<tree> (t2))))
return true;
/* noexcept(false) is compatible with no exception-specification,
case VAR_DECL:
if (DECL_HAS_VALUE_EXPR_P (exp))
return is_bitfield_expr_with_lowered_type (
- DECL_VALUE_EXPR (CONST_CAST_TREE (exp)));
+ DECL_VALUE_EXPR (const_cast<tree> (exp)));
return NULL_TREE;
case VIEW_CONVERT_EXPR:
// return;
// location_t loc = DECL_SOURCE_LOCATION (TYPE_MAIN_DECL (type));
-// tree ptype = strip_top_quals (CONST_CAST_TREE (type));
+// tree ptype = strip_top_quals (const_cast<tree> (type));
// if (current_class_type && TYPE_BEING_DEFINED (current_class_type)
// && same_type_p (ptype, current_class_type))
/* FALLTHRU */
case VAR_DECL:
if (VAR_P (ref) && DECL_HAS_VALUE_EXPR_P (ref))
- return lvalue_kind (DECL_VALUE_EXPR (CONST_CAST_TREE (ref)));
+ return lvalue_kind (DECL_VALUE_EXPR (const_cast<tree> (ref)));
if (TREE_READONLY (ref) && !TREE_STATIC (ref) && DECL_LANG_SPECIFIC (ref)
&& DECL_IN_AGGR_P (ref))
bool
std_layout_type_p (const_tree t)
{
- t = strip_array_types (CONST_CAST_TREE (t));
+ t = strip_array_types (const_cast<tree> (t));
if (CLASS_TYPE_P (t))
return !CLASSTYPE_NON_STD_LAYOUT (t);
t = TREE_OPERAND (expr, 1);
tree low_bound, unit_size;
- low_bound = array_ref_low_bound (CONST_CAST_TREE (expr));
- unit_size = array_ref_element_size (CONST_CAST_TREE (expr));
+ low_bound = array_ref_low_bound (const_cast<tree> (expr));
+ unit_size = array_ref_element_size (const_cast<tree> (expr));
if (! integer_zerop (low_bound))
t = fold_build2 (MINUS_EXPR, TREE_TYPE (t), t, low_bound);
if (TREE_CODE (t) == INTEGER_CST && tree_int_cst_sgn (t) < 0)
case COMPOUND_EXPR:
return tree_ctz (TREE_OPERAND (expr, 1));
case ADDR_EXPR:
- ret1 = get_pointer_alignment (CONST_CAST_TREE (expr));
+ ret1 = get_pointer_alignment (const_cast<tree> (expr));
if (ret1 > BITS_PER_UNIT)
{
ret1 = ctz_hwi (ret1 / BITS_PER_UNIT);
{
/* This CONST_CAST is okay because expr_last returns its argument
unmodified and we assign it to a const_tree. */
- const_tree stmt = expr_last (CONST_CAST_TREE (block));
+ const_tree stmt = expr_last (const_cast<tree> (block));
switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
{
we are not modifying the tree itself. */
#define STRIP_NOPS(EXP) \
- (EXP) = tree_strip_nop_conversions (CONST_CAST_TREE (EXP))
+ (EXP) = tree_strip_nop_conversions (const_cast<tree> (EXP))
/* Like STRIP_NOPS, but don't let the signedness change either. */
#define STRIP_SIGN_NOPS(EXP) \
- (EXP) = tree_strip_sign_nop_conversions (CONST_CAST_TREE (EXP))
+ (EXP) = tree_strip_sign_nop_conversions (const_cast<tree> (EXP))
/* Like STRIP_NOPS, but don't alter the TREE_TYPE either. */
in use to provide a location_t. */
#define STRIP_ANY_LOCATION_WRAPPER(EXP) \
- (EXP) = tree_strip_any_location_wrapper (CONST_CAST_TREE (EXP))
+ (EXP) = tree_strip_any_location_wrapper (const_cast<tree> (EXP))
/* Nonzero if TYPE represents a vector type. */
if (__i < 0 || __i >= __t->base.u.int_length.extended)
tree_int_cst_elt_check_failed (__i, __t->base.u.int_length.extended,
__f, __l, __g);
- return &CONST_CAST_TREE (__t)->int_cst.val[__i];
+ return &const_cast<tree> (__t)->int_cst.val[__i];
}
inline HOST_WIDE_INT *
if (__i < 0 || __i >= __t->base.u.int_length.extended)
tree_int_cst_elt_check_failed (__i, __t->base.u.int_length.extended,
__f, __l, __g);
- return &CONST_CAST_TREE (__t)->int_cst.val[__i];
+ return &const_cast<tree> (__t)->int_cst.val[__i];
}
/* Workaround -Wstrict-overflow false positive during profiledbootstrap. */
tree_check_failed (__t, __f, __l, __g, TREE_VEC, 0);
if (__i < 0 || __i >= __t->base.u.length)
tree_vec_elt_check_failed (__i, __t->base.u.length, __f, __l, __g);
- return &CONST_CAST_TREE (__t)->vec.a[__i];
+ return &const_cast<tree> (__t)->vec.a[__i];
}
# if GCC_VERSION >= 4006
const_tree __u = EXPR_CHECK (__t);
if (__i < 0 || __i >= TREE_OPERAND_LENGTH (__u))
tree_operand_check_failed (__i, __u, __f, __l, __g);
- return &CONST_CAST_TREE (__u)->exp.operands[__i];
+ return &const_cast<tree> (__u)->exp.operands[__i];
}
inline tree *
else if (TREE_CODE (decl) == STRING_CST)
{
if ((flag_sanitize & SANITIZE_ADDRESS)
- && asan_protect_global (CONST_CAST_TREE (decl)))
+ && asan_protect_global (const_cast<tree> (decl)))
/* or !flag_merge_constants */
return SECCAT_RODATA;
else
}
else if (VAR_P (decl))
{
- tree d = CONST_CAST_TREE (decl);
+ tree d = const_cast<tree> (decl);
if (bss_initializer_p (decl))
ret = SECCAT_BSS;
else if (! TREE_READONLY (decl)