static bool
ref_all_alias_ptr_type_p (const_tree t)
{
- return (TREE_CODE (TREE_TYPE (t)) == VOID_TYPE
+ return (VOID_TYPE_P (TREE_TYPE (t))
|| TYPE_REF_CAN_ALIAS_ALL (t));
}
continue;
if ((TREE_CODE (node) == FIELD_DECL
- || TREE_CODE (node) == VAR_DECL)
+ || VAR_P (node))
&& !excl->variable)
continue;
&& TREE_CODE (*anode) != METHOD_TYPE)
{
if (TREE_CODE (*anode) == POINTER_TYPE
- && (TREE_CODE (TREE_TYPE (*anode)) == FUNCTION_TYPE
- || TREE_CODE (TREE_TYPE (*anode)) == METHOD_TYPE))
+ && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (*anode)))
{
/* OK, this is a bit convoluted. We can't just make a copy
of the pointer type and modify its TREE_TYPE, because if
if (TYPE_P (*anode)
&& (flags & (int) ATTR_FLAG_TYPE_IN_PLACE)
- && TYPE_SIZE (*anode) != NULL_TREE)
+ && COMPLETE_TYPE_P (*anode))
{
warning (OPT_Wattributes, "type attributes ignored after type is already defined");
continue;
build_duplicate_type is another solution (as used in
handle_transparent_union_attribute), but that doesn't play well
with the stronger C++ type identity model. */
- if (TREE_CODE (ttype) == RECORD_TYPE
- || TREE_CODE (ttype) == UNION_TYPE
- || TREE_CODE (ttype) == QUAL_UNION_TYPE
+ if (RECORD_OR_UNION_TYPE_P (ttype)
|| TREE_CODE (ttype) == ENUMERAL_TYPE)
{
warning (OPT_Wattributes,
fold_builtin_carg (location_t loc, tree arg, tree type)
{
if (validate_arg (arg, COMPLEX_TYPE)
- && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
+ && SCALAR_FLOAT_TYPE_P (TREE_TYPE (TREE_TYPE (arg))))
{
tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
We must also strip off INDIRECT_EXPR for C++ reference
parameters. */
while (CONVERT_EXPR_P (arg)
- || TREE_CODE (arg) == INDIRECT_REF)
+ || INDIRECT_REF_P (arg))
arg = TREE_OPERAND (arg, 0);
if (arg != last_parm)
{
/* To proceed, MPFR must exactly represent the target floating point
format, which only happens when the target base equals two. */
if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
- && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
+ && SCALAR_FLOAT_TYPE_P (TREE_TYPE (TREE_TYPE (arg0)))
&& TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
- && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
+ && SCALAR_FLOAT_TYPE_P (TREE_TYPE (TREE_TYPE (arg1)))
&& REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
{
const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
tree retval_rhs;
/* If function wants no value, give it none. */
- if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
+ if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
{
expand_normal (retval);
expand_null_return ();
decl_in_symtab_p (const_tree decl)
{
return (TREE_CODE (decl) == FUNCTION_DECL
- || (TREE_CODE (decl) == VAR_DECL
+ || (VAR_P (decl)
&& (TREE_STATIC (decl) || DECL_EXTERNAL (decl))));
}
inline symtab_node *
symtab_node::get_create (tree node)
{
- if (TREE_CODE (node) == VAR_DECL)
+ if (VAR_P (node))
return varpool_node::get_create (node);
else
return cgraph_node::get_create (node);
/* Dwarf2 doesn't know anything about complex ints, so use
a user defined type for it. */
case COMPLEX_TYPE:
- if (TREE_CODE (TREE_TYPE (type)) == REAL_TYPE)
+ if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (type)))
encoding = DW_ATE_complex_float;
else
encoding = DW_ATE_lo_user;
if (dwarf_strict && dwarf_version < 5)
return NULL;
- gcc_assert (TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == VAR_DECL
+ gcc_assert (VAR_P (DEBUG_IMPLICIT_PTR_DECL (rtl))
|| TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == PARM_DECL
|| TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == RESULT_DECL);
ref = lookup_decl_die (DEBUG_IMPLICIT_PTR_DECL (rtl));
size = int_size_in_bytes (TREE_TYPE (szdecl));
if (!DECL_P (szdecl))
{
- if (TREE_CODE (szdecl) == INDIRECT_REF
+ if (INDIRECT_REF_P (szdecl)
&& DECL_P (TREE_OPERAND (szdecl, 0)))
{
rszdecl = TREE_OPERAND (szdecl, 0);
add_name_attribute (array_die, type_tag (type));
equate_type_number_to_die (type, array_die);
- if (TREE_CODE (type) == VECTOR_TYPE)
+ if (VECTOR_TYPE_P (type))
add_AT_flag (array_die, DW_AT_GNU_vector, 1);
/* For Fortran multidimensional arrays use DW_ORD_col_major ordering. */
add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_row_major);
#endif
- if (TREE_CODE (type) == VECTOR_TYPE)
+ if (VECTOR_TYPE_P (type))
{
/* For VECTOR_TYPEs we use an array DIE with appropriate bounds. */
dw_die_ref subrange_die = new_die (DW_TAG_subrange_type, array_die, NULL);
now. (Vectors and arrays are special because the debugging info is in the
cloned type itself. Similarly function/method types can contain extra
ref-qualification). */
- if (TREE_CODE (type) == FUNCTION_TYPE
- || TREE_CODE (type) == METHOD_TYPE)
+ if (FUNC_OR_METHOD_TYPE_P (type))
{
/* For function/method types, can't use type_main_variant here,
because that can have different ref-qualifiers for C++,
return;
if ((TREE_CODE (TREE_OPERAND (init, 0)) == STRING_CST
&& !TREE_ASM_WRITTEN (TREE_OPERAND (init, 0)))
- || (TREE_CODE (TREE_OPERAND (init, 0)) == VAR_DECL
+ || (VAR_P (TREE_OPERAND (init, 0))
&& !DECL_EXTERNAL (TREE_OPERAND (init, 0))
&& TREE_OPERAND (init, 0) != decl))
{
/* For vector typed comparisons emit code to generate the desired
all-ones or all-zeros mask. */
- if (TREE_CODE (ops->type) == VECTOR_TYPE)
+ if (VECTOR_TYPE_P (ops->type))
{
tree ifexp = build2 (ops->code, ops->type, arg0, arg1);
if (VECTOR_BOOLEAN_TYPE_P (ops->type)
case NOP_EXPR:
/* Negate -((double)float) as (double)(-float). */
- if (TREE_CODE (type) == REAL_TYPE)
+ if (SCALAR_FLOAT_TYPE_P (type))
{
tree tem = strip_float_extensions (t);
if (tem != t)
case NOP_EXPR:
/* Convert -((double)float) into (double)(-float). */
- if (TREE_CODE (type) == REAL_TYPE)
+ if (SCALAR_FLOAT_TYPE_P (type))
{
tem = strip_float_extensions (t);
if (tem != t && negate_expr_p (tem))
else if (TREE_CODE (arg1) == FIXED_CST)
return fold_convert_const_int_from_fixed (type, arg1);
}
- else if (TREE_CODE (type) == REAL_TYPE)
+ else if (SCALAR_FLOAT_TYPE_P (type))
{
if (TREE_CODE (arg1) == INTEGER_CST)
{
else if (TREE_CODE (arg1) == FIXED_CST)
return fold_convert_const_real_from_fixed (type, arg1);
}
- else if (TREE_CODE (type) == FIXED_POINT_TYPE)
+ else if (FIXED_POINT_TYPE_P (type))
{
if (TREE_CODE (arg1) == FIXED_CST)
return fold_convert_const_fixed_from_fixed (type, arg1);
else if (TREE_CODE (arg1) == REAL_CST)
return fold_convert_const_fixed_from_real (type, arg1);
}
- else if (TREE_CODE (type) == VECTOR_TYPE)
+ else if (VECTOR_TYPE_P (type))
{
if (TREE_CODE (arg1) == VECTOR_CST
&& known_eq (TYPE_VECTOR_SUBPARTS (type), VECTOR_CST_NELTS (arg1)))
return fold_convert_loc (loc, type,
fold_build1_loc (loc, REALPART_EXPR,
TREE_TYPE (orig), arg));
- gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
+ gcc_assert (VECTOR_TYPE_P (orig)
&& tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
return build_zero_vector (type);
gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
- || TREE_CODE (orig) == VECTOR_TYPE);
+ || VECTOR_TYPE_P (orig));
return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
case VOID_TYPE:
return value ? integer_one_node : integer_zero_node;
else if (type == boolean_type_node)
return value ? boolean_true_node : boolean_false_node;
- else if (TREE_CODE (type) == VECTOR_TYPE)
+ else if (VECTOR_TYPE_P (type))
return build_vector_from_val (type,
build_int_cst (TREE_TYPE (type),
value ? -1 : 0));
rhs = false_value;
}
else if (!(TREE_CODE (type) != VECTOR_TYPE
- && TREE_CODE (TREE_TYPE (cond)) == VECTOR_TYPE))
+ && VECTOR_TYPE_P (TREE_TYPE (cond))))
{
tree testtype = TREE_TYPE (cond);
test = cond;
/* Detect the case of mixing vector and scalar types - bail out. */
return NULL_TREE;
- if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
+ if (VECTOR_TYPE_P (TREE_TYPE (test)))
cond_code = VEC_COND_EXPR;
/* This transformation is only worthwhile if we don't have to wrap ARG
if (TREE_CODE (t) == WITH_SIZE_EXPR)
t = TREE_OPERAND (t, 0);
- if (TREE_CODE (t) == INDIRECT_REF)
+ if (INDIRECT_REF_P (t))
{
t = TREE_OPERAND (t, 0);
= get_inner_reference (TREE_OPERAND (arg0, 0),
&bitsize, &bitpos0, &offset0, &mode,
&unsignedp, &reversep, &volatilep);
- if (TREE_CODE (base0) == INDIRECT_REF)
+ if (INDIRECT_REF_P (base0))
base0 = TREE_OPERAND (base0, 0);
else
indirect_base0 = true;
= get_inner_reference (TREE_OPERAND (base0, 0),
&bitsize, &bitpos0, &offset0, &mode,
&unsignedp, &reversep, &volatilep);
- if (TREE_CODE (base0) == INDIRECT_REF)
+ if (INDIRECT_REF_P (base0))
base0 = TREE_OPERAND (base0, 0);
else
indirect_base0 = true;
= get_inner_reference (TREE_OPERAND (arg1, 0),
&bitsize, &bitpos1, &offset1, &mode,
&unsignedp, &reversep, &volatilep);
- if (TREE_CODE (base1) == INDIRECT_REF)
+ if (INDIRECT_REF_P (base1))
base1 = TREE_OPERAND (base1, 0);
else
indirect_base1 = true;
= get_inner_reference (TREE_OPERAND (base1, 0),
&bitsize, &bitpos1, &offset1, &mode,
&unsignedp, &reversep, &volatilep);
- if (TREE_CODE (base1) == INDIRECT_REF)
+ if (INDIRECT_REF_P (base1))
base1 = TREE_OPERAND (base1, 0);
else
indirect_base1 = true;
fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
}
fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
- if (TREE_CODE (expr) == RECORD_TYPE
- || TREE_CODE (expr) == UNION_TYPE
- || TREE_CODE (expr) == QUAL_UNION_TYPE)
+ if (RECORD_OR_UNION_TYPE_P (expr))
fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
break;
tree inner_type = TREE_TYPE (op0);
tree outer_type = type;
- if (TREE_CODE (outer_type) == REAL_TYPE)
+ if (SCALAR_FLOAT_TYPE_P (outer_type))
{
- if (TREE_CODE (inner_type) == REAL_TYPE)
+ if (SCALAR_FLOAT_TYPE_P (inner_type))
return RECURSE (op0);
if (INTEGRAL_TYPE_P (inner_type))
{
}
else if (INTEGRAL_TYPE_P (outer_type))
{
- if (TREE_CODE (inner_type) == REAL_TYPE)
+ if (SCALAR_FLOAT_TYPE_P (inner_type))
return RECURSE (op0);
if (INTEGRAL_TYPE_P (inner_type))
return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
tree type = TREE_TYPE (op0);
if (TREE_CODE (type) == INTEGER_TYPE)
return true;
- if (TREE_CODE (type) == REAL_TYPE)
+ if (SCALAR_FLOAT_TYPE_P (type))
return RECURSE (op0);
break;
}
tree
fold_read_from_constant_string (tree exp)
{
- if ((TREE_CODE (exp) == INDIRECT_REF
+ if ((INDIRECT_REF_P (exp)
|| TREE_CODE (exp) == ARRAY_REF)
&& TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
{
tree string;
location_t loc = EXPR_LOCATION (exp);
- if (TREE_CODE (exp) == INDIRECT_REF)
+ if (INDIRECT_REF_P (exp))
string = string_constant (exp1, &index, NULL, NULL);
else
{
tree expr_type = TREE_TYPE (m_expr);
tree other_type = NULL_TREE;
- if (m_other_expr && EXPR_P (m_other_expr))
+ if (CAN_HAVE_LOCATION_P (m_other_expr))
other_type = TREE_TYPE (m_other_expr);
range_label_for_type_mismatch inner (expr_type, other_type);
!= TYPE_ADDR_SPACE (TREE_TYPE (inner_type)))
return false;
/* Do not lose casts to function pointer types. */
- if ((TREE_CODE (TREE_TYPE (outer_type)) == FUNCTION_TYPE
- || TREE_CODE (TREE_TYPE (outer_type)) == METHOD_TYPE)
- && !(TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE
- || TREE_CODE (TREE_TYPE (inner_type)) == METHOD_TYPE))
+ if (FUNC_OR_METHOD_TYPE_P (TREE_TYPE (outer_type))
+ && !FUNC_OR_METHOD_TYPE_P (TREE_TYPE (inner_type)))
return false;
}
TREE_TYPE (inner_type));
/* Recurse for vector types with the same number of subparts. */
- else if (TREE_CODE (inner_type) == VECTOR_TYPE
- && TREE_CODE (outer_type) == VECTOR_TYPE)
+ else if (VECTOR_TYPE_P (inner_type)
+ && VECTOR_TYPE_P (outer_type))
return (known_eq (TYPE_VECTOR_SUBPARTS (inner_type),
TYPE_VECTOR_SUBPARTS (outer_type))
&& useless_type_conversion_p (TREE_TYPE (outer_type),
TREE_TYPE (inner_type));
}
- else if ((TREE_CODE (inner_type) == FUNCTION_TYPE
- || TREE_CODE (inner_type) == METHOD_TYPE)
+ else if (FUNC_OR_METHOD_TYPE_P (inner_type)
&& TREE_CODE (inner_type) == TREE_CODE (outer_type))
{
tree outer_parm, inner_parm;
it seems safest to not do too much optimization with these at the
tree level at all. We'll have to rely on the rtl optimizers to
clean this up, as there we've got all the appropriate bits exposed. */
- if (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t))
+ if (VAR_P (t) && DECL_HARD_REGISTER (t))
return false;
/* Variables can be marked as having partial definitions, avoid
bool
is_gimple_asm_val (tree t)
{
- if (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t))
+ if (VAR_P (t) && DECL_HARD_REGISTER (t))
return true;
return is_gimple_val (t);
mark_addressable_1 (x);
/* Also mark the artificial SSA_NAME that points to the partition of X. */
- if (TREE_CODE (x) == VAR_DECL
+ if (VAR_P (x)
&& !DECL_EXTERNAL (x)
&& !TREE_STATIC (x)
&& cfun->gimple_df != NULL
inline bool
is_gimple_variable (tree t)
{
- return (TREE_CODE (t) == VAR_DECL
+ return (VAR_P (t)
|| TREE_CODE (t) == PARM_DECL
|| TREE_CODE (t) == RESULT_DECL
|| TREE_CODE (t) == SSA_NAME);
if (TREE_CODE (op) == SSA_NAME)
return SSA_NAME_IS_VIRTUAL_OPERAND (op);
- if (TREE_CODE (op) == VAR_DECL)
+ if (VAR_P (op))
return VAR_DECL_IS_VIRTUAL_OPERAND (op);
return false;
if (TREE_CODE (lhs_ref.ref) == PARM_DECL
&& (lhs_ref.deref - DECL_BY_REFERENCE (lhs_ref.ref)) > 0)
/* Assignment through a (real) pointer/reference parameter. */;
- else if (TREE_CODE (lhs_ref.ref) == VAR_DECL
+ else if (VAR_P (lhs_ref.ref)
&& !auto_var_p (lhs_ref.ref))
/* Assignment to/through a non-local variable. */;
else
if (DECL_HAS_VALUE_EXPR_P (key))
{
key = DECL_VALUE_EXPR (key);
- if (TREE_CODE (key) == INDIRECT_REF)
+ if (INDIRECT_REF_P (key))
key = TREE_OPERAND (key, 0);
}
tree *c = oacc_declare_returns->get (key);
result_decl = TREE_OPERAND (ret_expr, 0);
/* See through a return by reference. */
- if (TREE_CODE (result_decl) == INDIRECT_REF)
+ if (INDIRECT_REF_P (result_decl))
result_decl = TREE_OPERAND (result_decl, 0);
gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
/* The temporary address variable for this vla should be
created in gimplify_vla_decl. */
gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
- gcc_assert (TREE_CODE (DECL_VALUE_EXPR (decl)) == INDIRECT_REF);
+ gcc_assert (INDIRECT_REF_P (DECL_VALUE_EXPR (decl)));
addr_of_decl = TREE_OPERAND (DECL_VALUE_EXPR (decl), 0);
}
else
/* For various reasons, the gimplification of the expression
may have made a new INDIRECT_REF. */
- if (TREE_CODE (op0) == INDIRECT_REF
+ if (INDIRECT_REF_P (op0)
|| (TREE_CODE (op0) == MEM_REF
&& integer_zerop (TREE_OPERAND (op0, 1))))
goto do_indirect_ref;
nflags = GOVD_FIRSTPRIVATE;
nflags |= flags & GOVD_SEEN;
t = DECL_VALUE_EXPR (decl);
- gcc_assert (TREE_CODE (t) == INDIRECT_REF);
+ gcc_assert (INDIRECT_REF_P (t));
t = TREE_OPERAND (t, 0);
gcc_assert (DECL_P (t));
omp_add_variable (ctx, t, nflags);
{
splay_tree_node n2;
tree t = DECL_VALUE_EXPR (decl);
- gcc_assert (TREE_CODE (t) == INDIRECT_REF);
+ gcc_assert (INDIRECT_REF_P (t));
t = TREE_OPERAND (t, 0);
gcc_assert (DECL_P (t));
n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
|| TREE_CODE (expr) == COMPONENT_REF)
expr = TREE_OPERAND (expr, 0);
- if (TREE_CODE (expr) == INDIRECT_REF
+ if (INDIRECT_REF_P (expr)
|| (TREE_CODE (expr) == MEM_REF
&& integer_zerop (TREE_OPERAND (expr, 1))))
{
omp_strip_components_and_deref (tree expr)
{
while (TREE_CODE (expr) == COMPONENT_REF
- || TREE_CODE (expr) == INDIRECT_REF
+ || INDIRECT_REF_P (expr)
|| (TREE_CODE (expr) == MEM_REF
&& integer_zerop (TREE_OPERAND (expr, 1)))
|| TREE_CODE (expr) == POINTER_PLUS_EXPR
static tree
omp_strip_indirections (tree expr)
{
- while (TREE_CODE (expr) == INDIRECT_REF
+ while (INDIRECT_REF_P (expr)
|| (TREE_CODE (expr) == MEM_REF
&& integer_zerop (TREE_OPERAND (expr, 1))))
expr = TREE_OPERAND (expr, 0);
while (TREE_CODE (ocd) == ARRAY_REF)
ocd = TREE_OPERAND (ocd, 0);
- if (TREE_CODE (ocd) == INDIRECT_REF)
+ if (INDIRECT_REF_P (ocd))
ocd = TREE_OPERAND (ocd, 0);
tree base = extract_base_bit_offset (ocd, &cbitpos, &coffset);
tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (grp_end),
OMP_CLAUSE_MAP);
bool base_ref
- = (TREE_CODE (base) == INDIRECT_REF
+ = (INDIRECT_REF_P (base)
&& ((TREE_CODE (TREE_TYPE (TREE_OPERAND (base, 0)))
== REFERENCE_TYPE)
- || ((TREE_CODE (TREE_OPERAND (base, 0))
- == INDIRECT_REF)
+ || (INDIRECT_REF_P (TREE_OPERAND (base, 0))
&& (TREE_CODE (TREE_TYPE (TREE_OPERAND
(TREE_OPERAND (base, 0), 0)))
== REFERENCE_TYPE))));
|| TREE_CODE (TREE_TYPE (sc_decl)) != ARRAY_TYPE)
break;
}
- else if (TREE_CODE (sc_decl) == INDIRECT_REF
+ else if (INDIRECT_REF_P (sc_decl)
&& TREE_CODE (TREE_OPERAND (sc_decl, 0)) == COMPONENT_REF
&& (TREE_CODE (TREE_TYPE (TREE_OPERAND (sc_decl, 0)))
== REFERENCE_TYPE))
decl = d;
}
if (d == decl
- && TREE_CODE (decl) == INDIRECT_REF
+ && INDIRECT_REF_P (decl)
&& TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
&& (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
== REFERENCE_TYPE)
&& TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
{
tree decl2 = DECL_VALUE_EXPR (decl);
- gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
+ gcc_assert (INDIRECT_REF_P (decl2));
decl2 = TREE_OPERAND (decl2, 0);
gcc_assert (DECL_P (decl2));
tree mem = build_simple_mem_ref (decl2);
case OMP_CLAUSE_HAS_DEVICE_ADDR:
decl = OMP_CLAUSE_DECL (c);
- while (TREE_CODE (decl) == INDIRECT_REF
+ while (INDIRECT_REF_P (decl)
|| TREE_CODE (decl) == ARRAY_REF)
decl = TREE_OPERAND (decl, 0);
n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
if ((ctx->region_type & ORT_TARGET) != 0
&& OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
{
- if (TREE_CODE (decl) == INDIRECT_REF
+ if (INDIRECT_REF_P (decl)
&& TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
&& (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
== REFERENCE_TYPE))
gcc_assert (OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FORCE_DEVICEPTR);
tree decl2 = DECL_VALUE_EXPR (decl);
- gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
+ gcc_assert (INDIRECT_REF_P (decl2));
decl2 = TREE_OPERAND (decl2, 0);
gcc_assert (DECL_P (decl2));
tree mem = build_simple_mem_ref (decl2);
&& TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
{
tree decl2 = DECL_VALUE_EXPR (decl);
- gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
+ gcc_assert (INDIRECT_REF_P (decl2));
decl2 = TREE_OPERAND (decl2, 0);
gcc_assert (DECL_P (decl2));
tree mem = build_simple_mem_ref (decl2);
{
has_decl_expr = BITMAP_ALLOC (NULL);
if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == DECL_EXPR
- && TREE_CODE (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt)))
- == VAR_DECL)
+ && VAR_P (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt))))
{
t = OMP_FOR_PRE_BODY (for_stmt);
bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
{
t = tsi_stmt (si);
if (TREE_CODE (t) == DECL_EXPR
- && TREE_CODE (DECL_EXPR_DECL (t)) == VAR_DECL)
+ && VAR_P (DECL_EXPR_DECL (t)))
bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
}
}
STRIP_TYPE_NOPS but includes the main variant lookup. */
STRIP_USELESS_TYPE_CONVERSION (expr);
- if (TREE_CODE (expr) == INDIRECT_REF)
+ if (INDIRECT_REF_P (expr))
{
expr = TREE_OPERAND (expr, 0);
while (expr != addr
if this type is from outside the function. It's OK to have that here. */
if (expr == NULL_TREE
|| is_gimple_constant (expr)
- || TREE_CODE (expr) == VAR_DECL
+ || VAR_P (expr)
|| CONTAINS_PLACEHOLDER_P (expr))
return;
&& graphite_can_represent_scev (scop, TREE_OPERAND (scev, 1));
case MULT_EXPR:
- return !CONVERT_EXPR_CODE_P (TREE_CODE (TREE_OPERAND (scev, 0)))
- && !CONVERT_EXPR_CODE_P (TREE_CODE (TREE_OPERAND (scev, 1)))
+ return !CONVERT_EXPR_P (TREE_OPERAND (scev, 0))
+ && !CONVERT_EXPR_P (TREE_OPERAND (scev, 1))
&& !(chrec_contains_symbols (TREE_OPERAND (scev, 0))
&& chrec_contains_symbols (TREE_OPERAND (scev, 1)))
&& graphite_can_represent_init (scev)
if (INTEGRAL_TYPE_P (t1)
|| SCALAR_FLOAT_TYPE_P (t1)
|| FIXED_POINT_TYPE_P (t1)
- || TREE_CODE (t1) == VECTOR_TYPE
+ || VECTOR_TYPE_P (t1)
|| TREE_CODE (t1) == COMPLEX_TYPE
|| TREE_CODE (t1) == OFFSET_TYPE
|| POINTER_TYPE_P (t1))
}
}
- if ((TREE_CODE (t1) == VECTOR_TYPE || TREE_CODE (t1) == COMPLEX_TYPE)
+ if ((VECTOR_TYPE_P (t1) || TREE_CODE (t1) == COMPLEX_TYPE)
&& !odr_subtypes_equivalent_p (TREE_TYPE (t1), TREE_TYPE (t2),
visited, loc1, loc2))
{
if (TREE_CODE (constant) == ADDR_EXPR
&& (TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL
- || (TREE_CODE (TREE_OPERAND (constant, 0)) == VAR_DECL
+ || (VAR_P (TREE_OPERAND (constant, 0))
&& TREE_STATIC (TREE_OPERAND (constant, 0)))))
{
struct ipa_cst_ref_desc *rdesc;
if (rdesc->refcount != IPA_UNDESCRIBED_USE
&& ipa_get_param_load_dereferenced (old_root_info, i)
&& TREE_CODE (cst) == ADDR_EXPR
- && TREE_CODE (TREE_OPERAND (cst, 0)) == VAR_DECL)
+ && VAR_P (TREE_OPERAND (cst, 0)))
{
symtab_node *n = symtab_node::get (TREE_OPERAND (cst, 0));
new_root->create_reference (n, IPA_REF_LOAD, NULL);
gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
&& ((TREE_CODE (TREE_OPERAND (cst, 0))
== FUNCTION_DECL)
- || (TREE_CODE (TREE_OPERAND (cst, 0))
- == VAR_DECL)));
+ || VAR_P (TREE_OPERAND (cst, 0))));
symtab_node *n = symtab_node::get (TREE_OPERAND (cst, 0));
if (n)
if (TREE_CODE (old_type) != COMPLEX_TYPE
&& TREE_CODE (old_type) != VECTOR_TYPE
&& (TREE_CODE (new_type) == COMPLEX_TYPE
- || TREE_CODE (new_type) == VECTOR_TYPE))
+ || VECTOR_TYPE_P (new_type)))
return true;
if ((TREE_CODE (old_type) == COMPLEX_TYPE
- || TREE_CODE (old_type) == VECTOR_TYPE)
+ || VECTOR_TYPE_P (old_type))
&& TREE_CODE (new_type) != COMPLEX_TYPE
&& TREE_CODE (new_type) != VECTOR_TYPE)
return false;
if (ctx == ISRA_CTX_ARG)
return;
tree t = get_base_address (TREE_OPERAND (expr, 0));
- if (TREE_CODE (t) == VAR_DECL && !TREE_STATIC (t))
+ if (VAR_P (t) && !TREE_STATIC (t))
loaded_decls->add (t);
return;
}
return;
deref = true;
}
- else if (TREE_CODE (base) == VAR_DECL
+ else if (VAR_P (base)
&& !TREE_STATIC (base)
&& (ctx == ISRA_CTX_ARG
|| ctx == ISRA_CTX_LOAD))
expansion has code to adjust TRUNC_MOD_EXPR into the desired other
modes, but for vector modes it does not. The adjustment code
should be instead emitted in tree-vect-patterns.cc. */
- if (TREE_CODE (type) == VECTOR_TYPE)
+ if (VECTOR_TYPE_P (type))
return unknown_optab;
/* FALLTHRU */
case TRUNC_MOD_EXPR:
into the desired other modes, but for vector modes it does not.
The adjustment code should be instead emitted in
tree-vect-patterns.cc. */
- if (TREE_CODE (type) == VECTOR_TYPE)
+ if (VECTOR_TYPE_P (type))
return unknown_optab;
/* FALLTHRU */
case RDIV_EXPR:
return TYPE_UNSIGNED (type) ? udiv_optab : sdiv_optab;
case LSHIFT_EXPR:
- if (TREE_CODE (type) == VECTOR_TYPE)
+ if (VECTOR_TYPE_P (type))
{
if (subtype == optab_vector)
return TYPE_SATURATING (type) ? unknown_optab : vashl_optab;
return ashl_optab;
case RSHIFT_EXPR:
- if (TREE_CODE (type) == VECTOR_TYPE)
+ if (VECTOR_TYPE_P (type))
{
if (subtype == optab_vector)
return TYPE_UNSIGNED (type) ? vlshr_optab : vashr_optab;
return TYPE_UNSIGNED (type) ? lshr_optab : ashr_optab;
case LROTATE_EXPR:
- if (TREE_CODE (type) == VECTOR_TYPE)
+ if (VECTOR_TYPE_P (type))
{
if (subtype == optab_vector)
return vrotl_optab;
return rotl_optab;
case RROTATE_EXPR:
- if (TREE_CODE (type) == VECTOR_TYPE)
+ if (VECTOR_TYPE_P (type))
{
if (subtype == optab_vector)
return vrotr_optab;
{
/* We're not deferring this any longer. Assignment is conditional to
avoid needlessly dirtying PCH pages. */
- if (CODE_CONTAINS_STRUCT (TREE_CODE (decl), TS_DECL_WITH_VIS)
+ if (HAS_DECL_ASSEMBLER_NAME_P (decl)
&& DECL_DEFER_OUTPUT (decl) != 0)
DECL_DEFER_OUTPUT (decl) = 0;
static bool
transaction_invariant_address_p (const_tree mem, basic_block region_entry_block)
{
- if ((TREE_CODE (mem) == INDIRECT_REF || TREE_CODE (mem) == MEM_REF)
+ if ((INDIRECT_REF_P (mem) || TREE_CODE (mem) == MEM_REF)
&& TREE_CODE (TREE_OPERAND (mem, 0)) == SSA_NAME)
{
basic_block def_bb;
return true;
}
}
- else if (TREE_CODE (expr) == INDIRECT_REF)
+ else if (INDIRECT_REF_P (expr))
{
error ("%qs in gimple IL", code_name);
debug_generic_stmt (expr);
&& (TREE_CODE (type) == BOOLEAN_TYPE
|| TYPE_PRECISION (type) == 1))
{
- if ((TREE_CODE (op0_type) == VECTOR_TYPE
- || TREE_CODE (op1_type) == VECTOR_TYPE)
+ if ((VECTOR_TYPE_P (op0_type)
+ || VECTOR_TYPE_P (op1_type))
&& code != EQ_EXPR && code != NE_EXPR
&& !VECTOR_BOOLEAN_TYPE_P (op0_type)
&& !VECTOR_INTEGER_TYPE_P (op0_type))
}
/* Or a boolean vector type with the same element count
as the comparison operand types. */
- else if (TREE_CODE (type) == VECTOR_TYPE
+ else if (VECTOR_TYPE_P (type)
&& TREE_CODE (TREE_TYPE (type)) == BOOLEAN_TYPE)
{
if (TREE_CODE (op0_type) != VECTOR_TYPE
types and integer vector types. */
if ((!INTEGRAL_TYPE_P (rhs1_type)
&& !FIXED_POINT_TYPE_P (rhs1_type)
- && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
+ && ! (VECTOR_TYPE_P (rhs1_type)
&& INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
|| (!INTEGRAL_TYPE_P (rhs2_type)
/* Vector shifts of vectors are also ok. */
- && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
+ && ! (VECTOR_TYPE_P (rhs1_type)
&& INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
- && TREE_CODE (rhs2_type) == VECTOR_TYPE
+ && VECTOR_TYPE_P (rhs2_type)
&& INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
|| !useless_type_conversion_p (lhs_type, rhs1_type))
{
tree lhs_etype = lhs_type;
tree rhs1_etype = rhs1_type;
tree rhs2_etype = rhs2_type;
- if (TREE_CODE (lhs_type) == VECTOR_TYPE)
+ if (VECTOR_TYPE_P (lhs_type))
{
if (TREE_CODE (rhs1_type) != VECTOR_TYPE
|| TREE_CODE (rhs2_type) != VECTOR_TYPE)
return res;
case CONSTRUCTOR:
- if (TREE_CODE (rhs1_type) == VECTOR_TYPE)
+ if (VECTOR_TYPE_P (rhs1_type))
{
unsigned int i;
tree elt_i, elt_v, elt_t = NULL_TREE;
if (elt_t == NULL_TREE)
{
elt_t = TREE_TYPE (elt_v);
- if (TREE_CODE (elt_t) == VECTOR_TYPE)
+ if (VECTOR_TYPE_P (elt_t))
{
tree elt_t = TREE_TYPE (elt_v);
if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
return true;
}
if (elt_i != NULL_TREE
- && (TREE_CODE (elt_t) == VECTOR_TYPE
+ && (VECTOR_TYPE_P (elt_t)
|| TREE_CODE (elt_i) != INTEGER_CST
|| compare_tree_int (elt_i, i) != 0))
{
SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ret)
= SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ssa_name);
if (SSA_NAME_IS_DEFAULT_DEF (ssa_name)
- && TREE_CODE (SSA_NAME_VAR (ssa_name)) == VAR_DECL)
+ && VAR_P (SSA_NAME_VAR (ssa_name)))
{
SSA_NAME_DEF_STMT (ret) = SSA_NAME_DEF_STMT (ssa_name);
set_ssa_default_def (cfun, SSA_NAME_VAR (ret), ret);
lower_emutls_2 (tree *ptr, int *walk_subtrees, void *)
{
tree t = *ptr;
- if (TREE_CODE (t) == VAR_DECL)
+ if (VAR_P (t))
return DECL_THREAD_LOCAL_P (t) ? t : NULL_TREE;
else if (!EXPR_P (t))
*walk_subtrees = 0;
}
}
}
- else if (TREE_CODE (*tp) == INDIRECT_REF)
+ else if (INDIRECT_REF_P (*tp))
{
/* Get rid of *& from inline substitutions that can happen when a
pointer argument is an ADDR_EXPR. */
/* Handle the case where we substituted an INDIRECT_REF
into the operand of the ADDR_EXPR. */
- if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF
+ if (INDIRECT_REF_P (TREE_OPERAND (*tp, 0))
&& !id->do_not_fold)
{
tree t = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
gcc_assert (!VOID_TYPE_P (type));
- if (TREE_CODE (type) == VECTOR_TYPE)
+ if (VECTOR_TYPE_P (type))
{
scalar_mode inner = SCALAR_TYPE_MODE (TREE_TYPE (type));
machine_mode simd = targetm.vectorize.preferred_simd_mode (inner);
DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
/* The new variable/label has no RTL, yet. */
- if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
+ if (HAS_RTL_P (copy)
&& !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
SET_DECL_RTL (copy, 0);
/* For vector typed decls make sure to update DECL_MODE according
pdecl = &TREE_OPERAND (OMP_CLAUSE_DECL (clause), 0);
if (TREE_CODE (*pdecl) == POINTER_PLUS_EXPR)
pdecl = &TREE_OPERAND (*pdecl, 0);
- if (TREE_CODE (*pdecl) == INDIRECT_REF
+ if (INDIRECT_REF_P (*pdecl)
|| TREE_CODE (*pdecl) == ADDR_EXPR)
pdecl = &TREE_OPERAND (*pdecl, 0);
}
type = DECL_ORIGINAL_TYPE (TYPE_NAME (type));
while (POINTER_TYPE_P (type)
- || TREE_CODE (type) == VECTOR_TYPE
+ || VECTOR_TYPE_P (type)
|| TREE_CODE (type) == FUNCTION_TYPE
|| TREE_CODE (type) == METHOD_TYPE)
type = TREE_TYPE (type);
pdecl = &TREE_OPERAND (OMP_CLAUSE_DECL (clause), 0);
if (TREE_CODE (*pdecl) == POINTER_PLUS_EXPR)
pdecl = &TREE_OPERAND (*pdecl, 0);
- if (TREE_CODE (*pdecl) == INDIRECT_REF
+ if (INDIRECT_REF_P (*pdecl)
|| TREE_CODE (*pdecl) == ADDR_EXPR)
pdecl = &TREE_OPERAND (*pdecl, 0);
}
val = DECL_VALUE_EXPR (var);
type = TREE_TYPE (var);
- if (!(TREE_CODE (val) == INDIRECT_REF
- && TREE_CODE (TREE_OPERAND (val, 0)) == VAR_DECL
+ if (! (INDIRECT_REF_P (val)
+ && VAR_P (TREE_OPERAND (val, 0))
&& variably_modified_type_p (type, NULL)))
continue;
val = DECL_VALUE_EXPR (var);
type = TREE_TYPE (var);
- if (!(TREE_CODE (val) == INDIRECT_REF
- && TREE_CODE (TREE_OPERAND (val, 0)) == VAR_DECL
+ if (! (INDIRECT_REF_P (val)
+ && VAR_P (TREE_OPERAND (val, 0))
&& variably_modified_type_p (type, NULL)))
continue;
{
tree val = DECL_VALUE_EXPR (var);
- if (!(TREE_CODE (val) == INDIRECT_REF
+ if (! (INDIRECT_REF_P (val)
&& VAR_P (TREE_OPERAND (val, 0))
&& DECL_HAS_VALUE_EXPR_P (TREE_OPERAND (val, 0))))
continue;
for (i = 0; i < loop->num_nodes; i++)
for (gsi = gsi_start_phis (bbs[i]); !gsi_end_p (gsi); gsi_next (&gsi))
- if (TREE_CODE (TREE_TYPE (PHI_RESULT (gsi.phi ()))) == VECTOR_TYPE)
+ if (VECTOR_TYPE_P (TREE_TYPE (PHI_RESULT (gsi.phi ()))))
goto end;
res = false;
if (TREE_CODE (t) == TYPE_DECL)
pp_string (pp, "typedef ");
- if (CODE_CONTAINS_STRUCT (TREE_CODE (t), TS_DECL_WRTL) && DECL_REGISTER (t))
+ if (HAS_RTL_P (t) && DECL_REGISTER (t))
pp_string (pp, "register ");
if (TREE_PUBLIC (t) && DECL_EXTERNAL (t))
break;
case ARRAY_REF:
- if (TREE_CODE (TREE_OPERAND (op0, 0)) == VAR_DECL)
+ if (VAR_P (TREE_OPERAND (op0, 0)))
dump_function_name (pp, TREE_OPERAND (op0, 0), flags);
else
dump_generic_node (pp, op0, 0, flags, false);
else if (TREE_CODE (f1->type) != COMPLEX_TYPE
&& TREE_CODE (f1->type) != VECTOR_TYPE
&& (TREE_CODE (f2->type) == COMPLEX_TYPE
- || TREE_CODE (f2->type) == VECTOR_TYPE))
+ || VECTOR_TYPE_P (f2->type)))
return 1;
else if ((TREE_CODE (f1->type) == COMPLEX_TYPE
- || TREE_CODE (f1->type) == VECTOR_TYPE)
+ || VECTOR_TYPE_P (f1->type))
&& TREE_CODE (f2->type) != COMPLEX_TYPE
&& TREE_CODE (f2->type) != VECTOR_TYPE)
return -1;
/* Be conservative for arrays and vectors. We want to support partial
overlap on int[3] and int[3] as tested in gcc.dg/torture/alias-2.c. */
while (TREE_CODE (type1) == ARRAY_TYPE
- || TREE_CODE (type1) == VECTOR_TYPE)
+ || VECTOR_TYPE_P (type1))
type1 = TREE_TYPE (type1);
while (TREE_CODE (type2) == ARRAY_TYPE
- || TREE_CODE (type2) == VECTOR_TYPE)
+ || VECTOR_TYPE_P (type2))
type2 = TREE_TYPE (type2);
return compare_sizes (TYPE_SIZE (type1), TYPE_SIZE (type2));
}
consider it VARYING. */
if (!virtual_operand_p (var)
&& SSA_NAME_VAR (var)
- && TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL)
+ && VAR_P (SSA_NAME_VAR (var)))
val.lattice_val = UNDEFINED;
else
{
originally with optimizations and only the link
performed at -O0, so we can't actually require it. */
const int cost
- = (TREE_CODE (SSA_NAME_VAR (a)) == VAR_DECL || in_lto_p)
+ = (VAR_P (SSA_NAME_VAR (a)) || in_lto_p)
? MUST_COALESCE_COST - 1 : MUST_COALESCE_COST;
add_coalesce (cl, SSA_NAME_VERSION (a),
SSA_NAME_VERSION (*slot), cost);
/* We may have a vector comparison where both arms are uniform
vectors. If so, we can simplify the vector comparison down
to a scalar comparison. */
- if (TREE_CODE (TREE_TYPE (lhs)) == VECTOR_TYPE
- && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE)
+ if (VECTOR_TYPE_P (TREE_TYPE (lhs))
+ && VECTOR_TYPE_P (TREE_TYPE (rhs)))
{
/* If either operand is an SSA_NAME, then look back to its
defining statement to try and get at a suitable source. */
if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
{
tree rhs = gimple_assign_rhs1 (def_stmt);
- if (POINTER_TYPE_P (TREE_TYPE (rhs))
- && TREE_CODE (TREE_TYPE (TREE_TYPE (rhs))) == FUNCTION_TYPE)
+ if (FUNCTION_POINTER_TYPE_P (TREE_TYPE (rhs)))
return false;
}
&& SSA_NAME_OCCURS_IN_ABNORMAL_PHI (orig)
&& SSA_NAME_IS_DEFAULT_DEF (orig)
&& (SSA_NAME_VAR (orig) == NULL_TREE
- || TREE_CODE (SSA_NAME_VAR (orig)) == VAR_DECL))
+ || VAR_P (SSA_NAME_VAR (orig))))
;
/* Otherwise if ORIG just flows in from an abnormal edge then the copy cannot
be propagated. */
ctor = base->op0;
else if (base->opcode == MEM_REF
&& base[1].opcode == ADDR_EXPR
- && (TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == VAR_DECL
+ && (VAR_P (TREE_OPERAND (base[1].op0, 0))
|| TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == CONST_DECL
|| TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == STRING_CST))
{
{
/* Do not sink loads from hard registers. */
if (gimple_assign_single_p (stmt)
- && TREE_CODE (gimple_assign_rhs1 (stmt)) == VAR_DECL
+ && VAR_P (gimple_assign_rhs1 (stmt))
&& DECL_HARD_REGISTER (gimple_assign_rhs1 (stmt)))
return false;
/* A function or method can have pointers as arguments, so track
those separately. */
- if (TREE_CODE (type) == FUNCTION_TYPE
- || TREE_CODE (type) == METHOD_TYPE)
+ if (FUNC_OR_METHOD_TYPE_P (type))
return true;
return false;
/* Increment counter if this statement sets a local
register variable. */
if (gimple_assign_single_p (stmt)
- && (TREE_CODE (gimple_assign_lhs (stmt)) == VAR_DECL
+ && (VAR_P (gimple_assign_lhs (stmt))
&& DECL_HARD_REGISTER (gimple_assign_lhs (stmt))))
cur_reg_vars_cnt++;
if (is_gimple_assign (context))
{
- if (TREE_CODE (gimple_assign_lhs (context)) == VAR_DECL)
+ if (VAR_P (gimple_assign_lhs (context)))
lhs_var = gimple_assign_lhs (context);
else if (TREE_CODE (gimple_assign_lhs (context)) == SSA_NAME)
lhs_var = SSA_NAME_VAR (gimple_assign_lhs (context));
&& is_gimple_reg_type (TREE_TYPE (*tp))
&& ! VOID_TYPE_P (TREE_TYPE (*tp)))
{
- if (TREE_CODE (TREE_TYPE (sym)) == VECTOR_TYPE
+ if (VECTOR_TYPE_P (TREE_TYPE (sym))
&& useless_type_conversion_p (TREE_TYPE (*tp),
TREE_TYPE (TREE_TYPE (sym)))
&& multiple_p (mem_ref_offset (*tp),
|| VOID_TYPE_P (TREE_TYPE (base))
|| TREE_THIS_VOLATILE (decl) != TREE_THIS_VOLATILE (base))
return decl;
- if ((TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE
+ if ((VECTOR_TYPE_P (TREE_TYPE (decl))
|| TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE)
&& useless_type_conversion_p (TREE_TYPE (base),
TREE_TYPE (TREE_TYPE (decl)))
TYPE_DOMAIN (expr) = stream_read_tree_ref (ib, data_in);
else if (RECORD_OR_UNION_TYPE_P (expr))
TYPE_FIELDS (expr) = streamer_read_chain (ib, data_in);
- else if (TREE_CODE (expr) == FUNCTION_TYPE
- || TREE_CODE (expr) == METHOD_TYPE)
+ else if (FUNC_OR_METHOD_TYPE_P (expr))
TYPE_ARG_TYPES (expr) = stream_read_tree_ref (ib, data_in);
if (!POINTER_TYPE_P (expr))
stream_write_tree_ref (ob, TYPE_DOMAIN (expr));
else if (RECORD_OR_UNION_TYPE_P (expr))
streamer_write_chain (ob, TYPE_FIELDS (expr));
- else if (TREE_CODE (expr) == FUNCTION_TYPE
- || TREE_CODE (expr) == METHOD_TYPE)
+ else if (FUNC_OR_METHOD_TYPE_P (expr))
stream_write_tree_ref (ob, TYPE_ARG_TYPES (expr));
if (!POINTER_TYPE_P (expr))
tree bitpos, tree bitsize, enum tree_code code,
tree type ATTRIBUTE_UNUSED)
{
- if (TREE_CODE (TREE_TYPE (a)) == VECTOR_TYPE)
+ if (VECTOR_TYPE_P (TREE_TYPE (a)))
a = tree_vec_extract (gsi, inner_type, a, bitsize, bitpos);
- if (TREE_CODE (TREE_TYPE (b)) == VECTOR_TYPE)
+ if (VECTOR_TYPE_P (TREE_TYPE (b)))
b = tree_vec_extract (gsi, inner_type, b, bitsize, bitpos);
return gimplify_build2 (gsi, code, inner_type, a, b);
}
tree bitpos, tree bitsize, enum tree_code code,
tree type ATTRIBUTE_UNUSED)
{
- if (TREE_CODE (TREE_TYPE (a)) == VECTOR_TYPE)
+ if (VECTOR_TYPE_P (TREE_TYPE (a)))
a = tree_vec_extract (gsi, inner_type, a, bitsize, bitpos);
- if (TREE_CODE (TREE_TYPE (b)) == VECTOR_TYPE)
+ if (VECTOR_TYPE_P (TREE_TYPE (b)))
b = tree_vec_extract (gsi, inner_type, b, bitsize, bitpos);
tree cond = gimple_assign_rhs1 (gsi_stmt (*gsi));
return gimplify_build3 (gsi, code, inner_type, unshare_expr (cond), a, b);
/* We abuse this function to push sth to a SSA name with initial 'val'. */
if (! useless_type_conversion_p (type, TREE_TYPE (val)))
{
- gcc_assert (TREE_CODE (type) == VECTOR_TYPE);
+ gcc_assert (VECTOR_TYPE_P (type));
if (! types_compatible_p (TREE_TYPE (type), TREE_TYPE (val)))
{
/* Scalar boolean value should be transformed into
inline void
tree_vector_builder::note_representative (tree *elt1_ptr, tree elt2)
{
- if (CONSTANT_CLASS_P (elt2) && TREE_OVERFLOW (elt2))
+ if (TREE_OVERFLOW_P (elt2))
{
gcc_assert (operand_equal_p (*elt1_ptr, elt2, 0));
if (!TREE_OVERFLOW (elt2))
return NULL_TREE;
auto mask = wi::set_bit_in_zero (bits - 1, bits);
- if (TREE_CODE (inttype) == VECTOR_TYPE)
+ if (VECTOR_TYPE_P (inttype))
{
tree elt = wide_int_to_tree (TREE_TYPE (inttype), mask);
return build_vector_from_val (inttype, elt);
address is constant too. If it's a decl, its address is constant if the
decl is static. Everything else is not constant and, furthermore,
taking the address of a volatile variable is not volatile. */
- if (TREE_CODE (node) == INDIRECT_REF
+ if (INDIRECT_REF_P (node)
|| TREE_CODE (node) == MEM_REF)
UPDATE_FLAGS (TREE_OPERAND (node, 0));
else if (CONSTANT_CLASS_P (node))
if ((TREE_CODE (t) == ENUMERAL_TYPE && COMPLETE_TYPE_P (t))
|| TREE_CODE (t) == INTEGER_TYPE
|| TREE_CODE (t) == BOOLEAN_TYPE
- || TREE_CODE (t) == REAL_TYPE
- || TREE_CODE (t) == FIXED_POINT_TYPE)
+ || SCALAR_FLOAT_TYPE_P (t)
+ || FIXED_POINT_TYPE_P (t))
{
verify_variant_match (TYPE_MAX_VALUE);
verify_variant_match (TYPE_MIN_VALUE);
return false;
}
}
- else if ((TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE))
+ else if (FUNC_OR_METHOD_TYPE_P (t))
verify_variant_match (TYPE_ARG_TYPES);
/* For C++ the qualified variant of array type is really an array type
of qualified TREE_TYPE.
/* Qualifiers do not matter for canonical type comparison purposes. */
/* Void types and nullptr types are always the same. */
- if (TREE_CODE (t1) == VOID_TYPE
+ if (VOID_TYPE_P (t1)
|| TREE_CODE (t1) == NULLPTR_TYPE)
return true;
if (INTEGRAL_TYPE_P (t1)
|| SCALAR_FLOAT_TYPE_P (t1)
|| FIXED_POINT_TYPE_P (t1)
- || TREE_CODE (t1) == VECTOR_TYPE
+ || VECTOR_TYPE_P (t1)
|| TREE_CODE (t1) == COMPLEX_TYPE
|| TREE_CODE (t1) == OFFSET_TYPE
|| POINTER_TYPE_P (t1))
}
/* Tail-recurse to components. */
- if (TREE_CODE (t1) == VECTOR_TYPE
+ if (VECTOR_TYPE_P (t1)
|| TREE_CODE (t1) == COMPLEX_TYPE)
return gimple_canonical_types_compatible_p (TREE_TYPE (t1),
TREE_TYPE (t2),
error_found = true;
}
}
- else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
- || TREE_CODE (t) == FIXED_POINT_TYPE)
+ else if (INTEGRAL_TYPE_P (t) || SCALAR_FLOAT_TYPE_P (t)
+ || FIXED_POINT_TYPE_P (t))
{
/* FIXME: The following check should pass:
useless_type_conversion_p (const_cast <tree> (t),
error_found = true;
}
}
- else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
+ else if (FUNC_OR_METHOD_TYPE_P (t))
{
if (TYPE_METHOD_BASETYPE (t)
&& TREE_CODE (TYPE_METHOD_BASETYPE (t)) != RECORD_TYPE
error_found = true;
}
}
- else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
- || TREE_CODE (t) == FIXED_POINT_TYPE)
+ else if (INTEGRAL_TYPE_P (t) || SCALAR_FLOAT_TYPE_P (t)
+ || FIXED_POINT_TYPE_P (t))
{
/* FIXME: The following check should pass:
useless_type_conversion_p (const_cast <tree> (t),
}
}
}
- else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
+ else if (FUNC_OR_METHOD_TYPE_P (t))
for (tree l = TYPE_ARG_TYPES (t); l; l = TREE_CHAIN (l))
{
/* C++ FE uses TREE_PURPOSE to store initial values. */
static unsigned short
get_ubsan_type_info_for_type (tree type)
{
- if (TREE_CODE (type) == REAL_TYPE)
+ if (SCALAR_FLOAT_TYPE_P (type))
return tree_to_uhwi (TYPE_SIZE (type));
else if (INTEGRAL_TYPE_P (type))
{
type = TREE_TYPE (fndecl);
if (fndecl && TREE_CODE (fndecl) != FUNCTION_DECL)
{
- if (TREE_CODE (fndecl) == INDIRECT_REF
+ if (INDIRECT_REF_P (fndecl)
&& TREE_CODE (TREE_OPERAND (fndecl, 0)) == OBJ_TYPE_REF)
obj_type_ref = TREE_OPERAND (fndecl, 0);
fndecl = NULL_TREE;
{
tree vexpr = DECL_VALUE_EXPR (DECL_RESULT (current_function_decl));
- if (TREE_CODE (vexpr) == INDIRECT_REF)
+ if (INDIRECT_REF_P (vexpr))
vexpr = TREE_OPERAND (vexpr, 0);
if (TREE_CODE (vexpr) == PARM_DECL
offset += mem_ref_offset (target).force_shwi ();
target = TREE_OPERAND (TREE_OPERAND (target, 0), 0);
}
- else if (TREE_CODE (target) == INDIRECT_REF
+ else if (INDIRECT_REF_P (target)
&& TREE_CODE (TREE_OPERAND (target, 0)) == NOP_EXPR
&& TREE_CODE (TREE_OPERAND (TREE_OPERAND (target, 0), 0))
== ADDR_EXPR)