1 /* Language-independent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987-2021 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This file contains the low level primitives for operating on tree nodes,
21 including allocation, list operations, interning of identifiers,
22 construction of data type nodes and statement nodes,
23 and construction of type conversion nodes. It also contains
24 tables index by tree code that describe how to take apart
27 It is intended to be language-independent but can occasionally
28 calls language-dependent routines. */
32 #include "coretypes.h"
37 #include "tree-pass.h"
40 #include "diagnostic.h"
43 #include "fold-const.h"
44 #include "stor-layout.h"
47 #include "toplev.h" /* get_random_seed */
49 #include "common/common-target.h"
50 #include "langhooks.h"
51 #include "tree-inline.h"
52 #include "tree-iterator.h"
53 #include "internal-fn.h"
54 #include "gimple-iterator.h"
57 #include "langhooks-def.h"
58 #include "tree-diagnostic.h"
61 #include "print-tree.h"
62 #include "ipa-utils.h"
64 #include "stringpool.h"
68 #include "tree-vector-builder.h"
69 #include "gimple-fold.h"
70 #include "escaped_string.h"
71 #include "gimple-range.h"
73 /* Tree code classes. */
75 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) TYPE,
76 #define END_OF_BASE_TREE_CODES tcc_exceptional,
78 const enum tree_code_class tree_code_type
[] = {
79 #include "all-tree.def"
83 #undef END_OF_BASE_TREE_CODES
85 /* Table indexed by tree code giving number of expression
86 operands beyond the fixed part of the node structure.
87 Not used for types or decls. */
89 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) LENGTH,
90 #define END_OF_BASE_TREE_CODES 0,
92 const unsigned char tree_code_length
[] = {
93 #include "all-tree.def"
97 #undef END_OF_BASE_TREE_CODES
99 /* Names of tree components.
100 Used for printing out the tree and error messages. */
101 #define DEFTREECODE(SYM, NAME, TYPE, LEN) NAME,
102 #define END_OF_BASE_TREE_CODES "@dummy",
104 static const char *const tree_code_name
[] = {
105 #include "all-tree.def"
109 #undef END_OF_BASE_TREE_CODES
111 /* Each tree code class has an associated string representation.
112 These must correspond to the tree_code_class entries. */
114 const char *const tree_code_class_strings
[] =
129 /* obstack.[ch] explicitly declined to prototype this. */
130 extern int _obstack_allocated_p (struct obstack
*h
, void *obj
);
132 /* Statistics-gathering stuff. */
134 static uint64_t tree_code_counts
[MAX_TREE_CODES
];
135 uint64_t tree_node_counts
[(int) all_kinds
];
136 uint64_t tree_node_sizes
[(int) all_kinds
];
138 /* Keep in sync with tree.h:enum tree_node_kind. */
139 static const char * const tree_node_kind_names
[] = {
158 /* Unique id for next decl created. */
159 static GTY(()) int next_decl_uid
;
160 /* Unique id for next type created. */
161 static GTY(()) unsigned next_type_uid
= 1;
162 /* Unique id for next debug decl created. Use negative numbers,
163 to catch erroneous uses. */
164 static GTY(()) int next_debug_decl_uid
;
166 /* Since we cannot rehash a type after it is in the table, we have to
167 keep the hash code. */
169 struct GTY((for_user
)) type_hash
{
174 /* Initial size of the hash table (rounded to next prime). */
175 #define TYPE_HASH_INITIAL_SIZE 1000
177 struct type_cache_hasher
: ggc_cache_ptr_hash
<type_hash
>
179 static hashval_t
hash (type_hash
*t
) { return t
->hash
; }
180 static bool equal (type_hash
*a
, type_hash
*b
);
183 keep_cache_entry (type_hash
*&t
)
185 return ggc_marked_p (t
->type
);
189 /* Now here is the hash table. When recording a type, it is added to
190 the slot whose index is the hash code. Note that the hash table is
191 used for several kinds of types (function types, array types and
192 array index range types, for now). While all these live in the
193 same table, they are completely independent, and the hash code is
194 computed differently for each of these. */
196 static GTY ((cache
)) hash_table
<type_cache_hasher
> *type_hash_table
;
198 /* Hash table and temporary node for larger integer const values. */
199 static GTY (()) tree int_cst_node
;
201 struct int_cst_hasher
: ggc_cache_ptr_hash
<tree_node
>
203 static hashval_t
hash (tree t
);
204 static bool equal (tree x
, tree y
);
207 static GTY ((cache
)) hash_table
<int_cst_hasher
> *int_cst_hash_table
;
209 /* Class and variable for making sure that there is a single POLY_INT_CST
210 for a given value. */
211 struct poly_int_cst_hasher
: ggc_cache_ptr_hash
<tree_node
>
213 typedef std::pair
<tree
, const poly_wide_int
*> compare_type
;
214 static hashval_t
hash (tree t
);
215 static bool equal (tree x
, const compare_type
&y
);
218 static GTY ((cache
)) hash_table
<poly_int_cst_hasher
> *poly_int_cst_hash_table
;
220 /* Hash table for optimization flags and target option flags. Use the same
221 hash table for both sets of options. Nodes for building the current
222 optimization and target option nodes. The assumption is most of the time
223 the options created will already be in the hash table, so we avoid
224 allocating and freeing up a node repeatably. */
225 static GTY (()) tree cl_optimization_node
;
226 static GTY (()) tree cl_target_option_node
;
228 struct cl_option_hasher
: ggc_cache_ptr_hash
<tree_node
>
230 static hashval_t
hash (tree t
);
231 static bool equal (tree x
, tree y
);
234 static GTY ((cache
)) hash_table
<cl_option_hasher
> *cl_option_hash_table
;
236 /* General tree->tree mapping structure for use in hash tables. */
240 hash_table
<tree_decl_map_cache_hasher
> *debug_expr_for_decl
;
243 hash_table
<tree_decl_map_cache_hasher
> *value_expr_for_decl
;
245 struct tree_vec_map_cache_hasher
: ggc_cache_ptr_hash
<tree_vec_map
>
247 static hashval_t
hash (tree_vec_map
*m
) { return DECL_UID (m
->base
.from
); }
250 equal (tree_vec_map
*a
, tree_vec_map
*b
)
252 return a
->base
.from
== b
->base
.from
;
256 keep_cache_entry (tree_vec_map
*&m
)
258 return ggc_marked_p (m
->base
.from
);
263 hash_table
<tree_vec_map_cache_hasher
> *debug_args_for_decl
;
265 static void set_type_quals (tree
, int);
266 static void print_type_hash_statistics (void);
267 static void print_debug_expr_statistics (void);
268 static void print_value_expr_statistics (void);
270 tree global_trees
[TI_MAX
];
271 tree integer_types
[itk_none
];
273 bool int_n_enabled_p
[NUM_INT_N_ENTS
];
274 struct int_n_trees_t int_n_trees
[NUM_INT_N_ENTS
];
276 bool tree_contains_struct
[MAX_TREE_CODES
][64];
278 /* Number of operands for each OpenMP clause. */
279 unsigned const char omp_clause_num_ops
[] =
281 0, /* OMP_CLAUSE_ERROR */
282 1, /* OMP_CLAUSE_PRIVATE */
283 1, /* OMP_CLAUSE_SHARED */
284 1, /* OMP_CLAUSE_FIRSTPRIVATE */
285 2, /* OMP_CLAUSE_LASTPRIVATE */
286 5, /* OMP_CLAUSE_REDUCTION */
287 5, /* OMP_CLAUSE_TASK_REDUCTION */
288 5, /* OMP_CLAUSE_IN_REDUCTION */
289 1, /* OMP_CLAUSE_COPYIN */
290 1, /* OMP_CLAUSE_COPYPRIVATE */
291 3, /* OMP_CLAUSE_LINEAR */
292 1, /* OMP_CLAUSE_AFFINITY */
293 2, /* OMP_CLAUSE_ALIGNED */
294 2, /* OMP_CLAUSE_ALLOCATE */
295 1, /* OMP_CLAUSE_DEPEND */
296 1, /* OMP_CLAUSE_NONTEMPORAL */
297 1, /* OMP_CLAUSE_UNIFORM */
298 1, /* OMP_CLAUSE_TO_DECLARE */
299 1, /* OMP_CLAUSE_LINK */
300 1, /* OMP_CLAUSE_DETACH */
301 1, /* OMP_CLAUSE_USE_DEVICE_PTR */
302 1, /* OMP_CLAUSE_USE_DEVICE_ADDR */
303 1, /* OMP_CLAUSE_IS_DEVICE_PTR */
304 1, /* OMP_CLAUSE_INCLUSIVE */
305 1, /* OMP_CLAUSE_EXCLUSIVE */
306 2, /* OMP_CLAUSE_FROM */
307 2, /* OMP_CLAUSE_TO */
308 2, /* OMP_CLAUSE_MAP */
309 2, /* OMP_CLAUSE__CACHE_ */
310 2, /* OMP_CLAUSE_GANG */
311 1, /* OMP_CLAUSE_ASYNC */
312 1, /* OMP_CLAUSE_WAIT */
313 0, /* OMP_CLAUSE_AUTO */
314 0, /* OMP_CLAUSE_SEQ */
315 1, /* OMP_CLAUSE__LOOPTEMP_ */
316 1, /* OMP_CLAUSE__REDUCTEMP_ */
317 1, /* OMP_CLAUSE__CONDTEMP_ */
318 1, /* OMP_CLAUSE__SCANTEMP_ */
319 1, /* OMP_CLAUSE_IF */
320 1, /* OMP_CLAUSE_NUM_THREADS */
321 1, /* OMP_CLAUSE_SCHEDULE */
322 0, /* OMP_CLAUSE_NOWAIT */
323 1, /* OMP_CLAUSE_ORDERED */
324 0, /* OMP_CLAUSE_DEFAULT */
325 3, /* OMP_CLAUSE_COLLAPSE */
326 0, /* OMP_CLAUSE_UNTIED */
327 1, /* OMP_CLAUSE_FINAL */
328 0, /* OMP_CLAUSE_MERGEABLE */
329 1, /* OMP_CLAUSE_DEVICE */
330 1, /* OMP_CLAUSE_DIST_SCHEDULE */
331 0, /* OMP_CLAUSE_INBRANCH */
332 0, /* OMP_CLAUSE_NOTINBRANCH */
333 1, /* OMP_CLAUSE_NUM_TEAMS */
334 1, /* OMP_CLAUSE_THREAD_LIMIT */
335 0, /* OMP_CLAUSE_PROC_BIND */
336 1, /* OMP_CLAUSE_SAFELEN */
337 1, /* OMP_CLAUSE_SIMDLEN */
338 0, /* OMP_CLAUSE_DEVICE_TYPE */
339 0, /* OMP_CLAUSE_FOR */
340 0, /* OMP_CLAUSE_PARALLEL */
341 0, /* OMP_CLAUSE_SECTIONS */
342 0, /* OMP_CLAUSE_TASKGROUP */
343 1, /* OMP_CLAUSE_PRIORITY */
344 1, /* OMP_CLAUSE_GRAINSIZE */
345 1, /* OMP_CLAUSE_NUM_TASKS */
346 0, /* OMP_CLAUSE_NOGROUP */
347 0, /* OMP_CLAUSE_THREADS */
348 0, /* OMP_CLAUSE_SIMD */
349 1, /* OMP_CLAUSE_HINT */
350 0, /* OMP_CLAUSE_DEFAULTMAP */
351 0, /* OMP_CLAUSE_ORDER */
352 0, /* OMP_CLAUSE_BIND */
353 1, /* OMP_CLAUSE__SIMDUID_ */
354 0, /* OMP_CLAUSE__SIMT_ */
355 0, /* OMP_CLAUSE_INDEPENDENT */
356 1, /* OMP_CLAUSE_WORKER */
357 1, /* OMP_CLAUSE_VECTOR */
358 1, /* OMP_CLAUSE_NUM_GANGS */
359 1, /* OMP_CLAUSE_NUM_WORKERS */
360 1, /* OMP_CLAUSE_VECTOR_LENGTH */
361 3, /* OMP_CLAUSE_TILE */
362 0, /* OMP_CLAUSE_IF_PRESENT */
363 0, /* OMP_CLAUSE_FINALIZE */
364 0, /* OMP_CLAUSE_NOHOST */
367 const char * const omp_clause_code_name
[] =
456 /* Return the tree node structure used by tree code CODE. */
458 static inline enum tree_node_structure_enum
459 tree_node_structure_for_code (enum tree_code code
)
461 switch (TREE_CODE_CLASS (code
))
463 case tcc_declaration
:
466 case CONST_DECL
: return TS_CONST_DECL
;
467 case DEBUG_EXPR_DECL
: return TS_DECL_WRTL
;
468 case FIELD_DECL
: return TS_FIELD_DECL
;
469 case FUNCTION_DECL
: return TS_FUNCTION_DECL
;
470 case LABEL_DECL
: return TS_LABEL_DECL
;
471 case PARM_DECL
: return TS_PARM_DECL
;
472 case RESULT_DECL
: return TS_RESULT_DECL
;
473 case TRANSLATION_UNIT_DECL
: return TS_TRANSLATION_UNIT_DECL
;
474 case TYPE_DECL
: return TS_TYPE_DECL
;
475 case VAR_DECL
: return TS_VAR_DECL
;
476 default: return TS_DECL_NON_COMMON
;
479 case tcc_type
: return TS_TYPE_NON_COMMON
;
487 case tcc_vl_exp
: return TS_EXP
;
489 default: /* tcc_constant and tcc_exceptional */
495 /* tcc_constant cases. */
496 case COMPLEX_CST
: return TS_COMPLEX
;
497 case FIXED_CST
: return TS_FIXED_CST
;
498 case INTEGER_CST
: return TS_INT_CST
;
499 case POLY_INT_CST
: return TS_POLY_INT_CST
;
500 case REAL_CST
: return TS_REAL_CST
;
501 case STRING_CST
: return TS_STRING
;
502 case VECTOR_CST
: return TS_VECTOR
;
503 case VOID_CST
: return TS_TYPED
;
505 /* tcc_exceptional cases. */
506 case BLOCK
: return TS_BLOCK
;
507 case CONSTRUCTOR
: return TS_CONSTRUCTOR
;
508 case ERROR_MARK
: return TS_COMMON
;
509 case IDENTIFIER_NODE
: return TS_IDENTIFIER
;
510 case OMP_CLAUSE
: return TS_OMP_CLAUSE
;
511 case OPTIMIZATION_NODE
: return TS_OPTIMIZATION
;
512 case PLACEHOLDER_EXPR
: return TS_COMMON
;
513 case SSA_NAME
: return TS_SSA_NAME
;
514 case STATEMENT_LIST
: return TS_STATEMENT_LIST
;
515 case TARGET_OPTION_NODE
: return TS_TARGET_OPTION
;
516 case TREE_BINFO
: return TS_BINFO
;
517 case TREE_LIST
: return TS_LIST
;
518 case TREE_VEC
: return TS_VEC
;
526 /* Initialize tree_contains_struct to describe the hierarchy of tree
530 initialize_tree_contains_struct (void)
534 for (i
= ERROR_MARK
; i
< LAST_AND_UNUSED_TREE_CODE
; i
++)
537 enum tree_node_structure_enum ts_code
;
539 code
= (enum tree_code
) i
;
540 ts_code
= tree_node_structure_for_code (code
);
542 /* Mark the TS structure itself. */
543 tree_contains_struct
[code
][ts_code
] = 1;
545 /* Mark all the structures that TS is derived from. */
550 case TS_OPTIMIZATION
:
551 case TS_TARGET_OPTION
:
557 case TS_POLY_INT_CST
:
566 case TS_STATEMENT_LIST
:
567 MARK_TS_TYPED (code
);
571 case TS_DECL_MINIMAL
:
577 MARK_TS_COMMON (code
);
580 case TS_TYPE_WITH_LANG_SPECIFIC
:
581 MARK_TS_TYPE_COMMON (code
);
584 case TS_TYPE_NON_COMMON
:
585 MARK_TS_TYPE_WITH_LANG_SPECIFIC (code
);
589 MARK_TS_DECL_MINIMAL (code
);
594 MARK_TS_DECL_COMMON (code
);
597 case TS_DECL_NON_COMMON
:
598 MARK_TS_DECL_WITH_VIS (code
);
601 case TS_DECL_WITH_VIS
:
605 MARK_TS_DECL_WRTL (code
);
609 MARK_TS_DECL_COMMON (code
);
613 MARK_TS_DECL_WITH_VIS (code
);
617 case TS_FUNCTION_DECL
:
618 MARK_TS_DECL_NON_COMMON (code
);
621 case TS_TRANSLATION_UNIT_DECL
:
622 MARK_TS_DECL_COMMON (code
);
630 /* Basic consistency checks for attributes used in fold. */
631 gcc_assert (tree_contains_struct
[FUNCTION_DECL
][TS_DECL_NON_COMMON
]);
632 gcc_assert (tree_contains_struct
[TYPE_DECL
][TS_DECL_NON_COMMON
]);
633 gcc_assert (tree_contains_struct
[CONST_DECL
][TS_DECL_COMMON
]);
634 gcc_assert (tree_contains_struct
[VAR_DECL
][TS_DECL_COMMON
]);
635 gcc_assert (tree_contains_struct
[PARM_DECL
][TS_DECL_COMMON
]);
636 gcc_assert (tree_contains_struct
[RESULT_DECL
][TS_DECL_COMMON
]);
637 gcc_assert (tree_contains_struct
[FUNCTION_DECL
][TS_DECL_COMMON
]);
638 gcc_assert (tree_contains_struct
[TYPE_DECL
][TS_DECL_COMMON
]);
639 gcc_assert (tree_contains_struct
[TRANSLATION_UNIT_DECL
][TS_DECL_COMMON
]);
640 gcc_assert (tree_contains_struct
[LABEL_DECL
][TS_DECL_COMMON
]);
641 gcc_assert (tree_contains_struct
[FIELD_DECL
][TS_DECL_COMMON
]);
642 gcc_assert (tree_contains_struct
[VAR_DECL
][TS_DECL_WRTL
]);
643 gcc_assert (tree_contains_struct
[PARM_DECL
][TS_DECL_WRTL
]);
644 gcc_assert (tree_contains_struct
[RESULT_DECL
][TS_DECL_WRTL
]);
645 gcc_assert (tree_contains_struct
[FUNCTION_DECL
][TS_DECL_WRTL
]);
646 gcc_assert (tree_contains_struct
[LABEL_DECL
][TS_DECL_WRTL
]);
647 gcc_assert (tree_contains_struct
[CONST_DECL
][TS_DECL_MINIMAL
]);
648 gcc_assert (tree_contains_struct
[VAR_DECL
][TS_DECL_MINIMAL
]);
649 gcc_assert (tree_contains_struct
[PARM_DECL
][TS_DECL_MINIMAL
]);
650 gcc_assert (tree_contains_struct
[RESULT_DECL
][TS_DECL_MINIMAL
]);
651 gcc_assert (tree_contains_struct
[FUNCTION_DECL
][TS_DECL_MINIMAL
]);
652 gcc_assert (tree_contains_struct
[TYPE_DECL
][TS_DECL_MINIMAL
]);
653 gcc_assert (tree_contains_struct
[TRANSLATION_UNIT_DECL
][TS_DECL_MINIMAL
]);
654 gcc_assert (tree_contains_struct
[LABEL_DECL
][TS_DECL_MINIMAL
]);
655 gcc_assert (tree_contains_struct
[FIELD_DECL
][TS_DECL_MINIMAL
]);
656 gcc_assert (tree_contains_struct
[VAR_DECL
][TS_DECL_WITH_VIS
]);
657 gcc_assert (tree_contains_struct
[FUNCTION_DECL
][TS_DECL_WITH_VIS
]);
658 gcc_assert (tree_contains_struct
[TYPE_DECL
][TS_DECL_WITH_VIS
]);
659 gcc_assert (tree_contains_struct
[VAR_DECL
][TS_VAR_DECL
]);
660 gcc_assert (tree_contains_struct
[FIELD_DECL
][TS_FIELD_DECL
]);
661 gcc_assert (tree_contains_struct
[PARM_DECL
][TS_PARM_DECL
]);
662 gcc_assert (tree_contains_struct
[LABEL_DECL
][TS_LABEL_DECL
]);
663 gcc_assert (tree_contains_struct
[RESULT_DECL
][TS_RESULT_DECL
]);
664 gcc_assert (tree_contains_struct
[CONST_DECL
][TS_CONST_DECL
]);
665 gcc_assert (tree_contains_struct
[TYPE_DECL
][TS_TYPE_DECL
]);
666 gcc_assert (tree_contains_struct
[FUNCTION_DECL
][TS_FUNCTION_DECL
]);
667 gcc_assert (tree_contains_struct
[IMPORTED_DECL
][TS_DECL_MINIMAL
]);
668 gcc_assert (tree_contains_struct
[IMPORTED_DECL
][TS_DECL_COMMON
]);
669 gcc_assert (tree_contains_struct
[NAMELIST_DECL
][TS_DECL_MINIMAL
]);
670 gcc_assert (tree_contains_struct
[NAMELIST_DECL
][TS_DECL_COMMON
]);
679 /* Initialize the hash table of types. */
681 = hash_table
<type_cache_hasher
>::create_ggc (TYPE_HASH_INITIAL_SIZE
);
684 = hash_table
<tree_decl_map_cache_hasher
>::create_ggc (512);
687 = hash_table
<tree_decl_map_cache_hasher
>::create_ggc (512);
689 int_cst_hash_table
= hash_table
<int_cst_hasher
>::create_ggc (1024);
691 poly_int_cst_hash_table
= hash_table
<poly_int_cst_hasher
>::create_ggc (64);
693 int_cst_node
= make_int_cst (1, 1);
695 cl_option_hash_table
= hash_table
<cl_option_hasher
>::create_ggc (64);
697 cl_optimization_node
= make_node (OPTIMIZATION_NODE
);
698 cl_target_option_node
= make_node (TARGET_OPTION_NODE
);
700 /* Initialize the tree_contains_struct array. */
701 initialize_tree_contains_struct ();
702 lang_hooks
.init_ts ();
706 /* The name of the object as the assembler will see it (but before any
707 translations made by ASM_OUTPUT_LABELREF). Often this is the same
708 as DECL_NAME. It is an IDENTIFIER_NODE. */
710 decl_assembler_name (tree decl
)
712 if (!DECL_ASSEMBLER_NAME_SET_P (decl
))
713 lang_hooks
.set_decl_assembler_name (decl
);
714 return DECL_ASSEMBLER_NAME_RAW (decl
);
717 /* The DECL_ASSEMBLER_NAME_RAW of DECL is being explicitly set to NAME
718 (either of which may be NULL). Inform the FE, if this changes the
722 overwrite_decl_assembler_name (tree decl
, tree name
)
724 if (DECL_ASSEMBLER_NAME_RAW (decl
) != name
)
725 lang_hooks
.overwrite_decl_assembler_name (decl
, name
);
728 /* Return true if DECL may need an assembler name to be set. */
731 need_assembler_name_p (tree decl
)
733 /* We use DECL_ASSEMBLER_NAME to hold mangled type names for One Definition
734 Rule merging. This makes type_odr_p to return true on those types during
735 LTO and by comparing the mangled name, we can say what types are intended
736 to be equivalent across compilation unit.
738 We do not store names of type_in_anonymous_namespace_p.
740 Record, union and enumeration type have linkage that allows use
741 to check type_in_anonymous_namespace_p. We do not mangle compound types
742 that always can be compared structurally.
744 Similarly for builtin types, we compare properties of their main variant.
745 A special case are integer types where mangling do make differences
746 between char/signed char/unsigned char etc. Storing name for these makes
747 e.g. -fno-signed-char/-fsigned-char mismatches to be handled well.
748 See cp/mangle.c:write_builtin_type for details. */
750 if (TREE_CODE (decl
) == TYPE_DECL
)
753 && decl
== TYPE_NAME (TREE_TYPE (decl
))
754 && TYPE_MAIN_VARIANT (TREE_TYPE (decl
)) == TREE_TYPE (decl
)
755 && !TYPE_ARTIFICIAL (TREE_TYPE (decl
))
756 && ((TREE_CODE (TREE_TYPE (decl
)) != RECORD_TYPE
757 && TREE_CODE (TREE_TYPE (decl
)) != UNION_TYPE
)
758 || TYPE_CXX_ODR_P (TREE_TYPE (decl
)))
759 && (type_with_linkage_p (TREE_TYPE (decl
))
760 || TREE_CODE (TREE_TYPE (decl
)) == INTEGER_TYPE
)
761 && !variably_modified_type_p (TREE_TYPE (decl
), NULL_TREE
))
762 return !DECL_ASSEMBLER_NAME_SET_P (decl
);
765 /* Only FUNCTION_DECLs and VAR_DECLs are considered. */
766 if (!VAR_OR_FUNCTION_DECL_P (decl
))
769 /* If DECL already has its assembler name set, it does not need a
771 if (!HAS_DECL_ASSEMBLER_NAME_P (decl
)
772 || DECL_ASSEMBLER_NAME_SET_P (decl
))
775 /* Abstract decls do not need an assembler name. */
776 if (DECL_ABSTRACT_P (decl
))
779 /* For VAR_DECLs, only static, public and external symbols need an
782 && !TREE_STATIC (decl
)
783 && !TREE_PUBLIC (decl
)
784 && !DECL_EXTERNAL (decl
))
787 if (TREE_CODE (decl
) == FUNCTION_DECL
)
789 /* Do not set assembler name on builtins. Allow RTL expansion to
790 decide whether to expand inline or via a regular call. */
791 if (fndecl_built_in_p (decl
)
792 && DECL_BUILT_IN_CLASS (decl
) != BUILT_IN_FRONTEND
)
795 /* Functions represented in the callgraph need an assembler name. */
796 if (cgraph_node::get (decl
) != NULL
)
799 /* Unused and not public functions don't need an assembler name. */
800 if (!TREE_USED (decl
) && !TREE_PUBLIC (decl
))
807 /* If T needs an assembler name, have one created for it. */
810 assign_assembler_name_if_needed (tree t
)
812 if (need_assembler_name_p (t
))
814 /* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit
815 diagnostics that use input_location to show locus
816 information. The problem here is that, at this point,
817 input_location is generally anchored to the end of the file
818 (since the parser is long gone), so we don't have a good
819 position to pin it to.
821 To alleviate this problem, this uses the location of T's
822 declaration. Examples of this are
823 testsuite/g++.dg/template/cond2.C and
824 testsuite/g++.dg/template/pr35240.C. */
825 location_t saved_location
= input_location
;
826 input_location
= DECL_SOURCE_LOCATION (t
);
828 decl_assembler_name (t
);
830 input_location
= saved_location
;
834 /* When the target supports COMDAT groups, this indicates which group the
835 DECL is associated with. This can be either an IDENTIFIER_NODE or a
836 decl, in which case its DECL_ASSEMBLER_NAME identifies the group. */
838 decl_comdat_group (const_tree node
)
840 struct symtab_node
*snode
= symtab_node::get (node
);
843 return snode
->get_comdat_group ();
846 /* Likewise, but make sure it's been reduced to an IDENTIFIER_NODE. */
848 decl_comdat_group_id (const_tree node
)
850 struct symtab_node
*snode
= symtab_node::get (node
);
853 return snode
->get_comdat_group_id ();
856 /* When the target supports named section, return its name as IDENTIFIER_NODE
857 or NULL if it is in no section. */
859 decl_section_name (const_tree node
)
861 struct symtab_node
*snode
= symtab_node::get (node
);
864 return snode
->get_section ();
867 /* Set section name of NODE to VALUE (that is expected to be
870 set_decl_section_name (tree node
, const char *value
)
872 struct symtab_node
*snode
;
876 snode
= symtab_node::get (node
);
880 else if (VAR_P (node
))
881 snode
= varpool_node::get_create (node
);
883 snode
= cgraph_node::get_create (node
);
884 snode
->set_section (value
);
887 /* Set section name of NODE to match the section name of OTHER.
889 set_decl_section_name (decl, other) is equivalent to
890 set_decl_section_name (decl, DECL_SECTION_NAME (other)), but possibly more
893 set_decl_section_name (tree decl
, const_tree other
)
895 struct symtab_node
*other_node
= symtab_node::get (other
);
898 struct symtab_node
*decl_node
;
900 decl_node
= varpool_node::get_create (decl
);
902 decl_node
= cgraph_node::get_create (decl
);
903 decl_node
->set_section (*other_node
);
907 struct symtab_node
*decl_node
= symtab_node::get (decl
);
910 decl_node
->set_section (NULL
);
914 /* Return TLS model of a variable NODE. */
916 decl_tls_model (const_tree node
)
918 struct varpool_node
*snode
= varpool_node::get (node
);
920 return TLS_MODEL_NONE
;
921 return snode
->tls_model
;
924 /* Set TLS model of variable NODE to MODEL. */
926 set_decl_tls_model (tree node
, enum tls_model model
)
928 struct varpool_node
*vnode
;
930 if (model
== TLS_MODEL_NONE
)
932 vnode
= varpool_node::get (node
);
937 vnode
= varpool_node::get_create (node
);
938 vnode
->tls_model
= model
;
941 /* Compute the number of bytes occupied by a tree with code CODE.
942 This function cannot be used for nodes that have variable sizes,
943 including TREE_VEC, INTEGER_CST, STRING_CST, and CALL_EXPR. */
945 tree_code_size (enum tree_code code
)
947 switch (TREE_CODE_CLASS (code
))
949 case tcc_declaration
: /* A decl node */
952 case FIELD_DECL
: return sizeof (tree_field_decl
);
953 case PARM_DECL
: return sizeof (tree_parm_decl
);
954 case VAR_DECL
: return sizeof (tree_var_decl
);
955 case LABEL_DECL
: return sizeof (tree_label_decl
);
956 case RESULT_DECL
: return sizeof (tree_result_decl
);
957 case CONST_DECL
: return sizeof (tree_const_decl
);
958 case TYPE_DECL
: return sizeof (tree_type_decl
);
959 case FUNCTION_DECL
: return sizeof (tree_function_decl
);
960 case DEBUG_EXPR_DECL
: return sizeof (tree_decl_with_rtl
);
961 case TRANSLATION_UNIT_DECL
: return sizeof (tree_translation_unit_decl
);
964 case NAMELIST_DECL
: return sizeof (tree_decl_non_common
);
966 gcc_checking_assert (code
>= NUM_TREE_CODES
);
967 return lang_hooks
.tree_size (code
);
970 case tcc_type
: /* a type node */
982 case FIXED_POINT_TYPE
:
988 case QUAL_UNION_TYPE
:
992 case LANG_TYPE
: return sizeof (tree_type_non_common
);
994 gcc_checking_assert (code
>= NUM_TREE_CODES
);
995 return lang_hooks
.tree_size (code
);
998 case tcc_reference
: /* a reference */
999 case tcc_expression
: /* an expression */
1000 case tcc_statement
: /* an expression with side effects */
1001 case tcc_comparison
: /* a comparison expression */
1002 case tcc_unary
: /* a unary arithmetic expression */
1003 case tcc_binary
: /* a binary arithmetic expression */
1004 return (sizeof (struct tree_exp
)
1005 + (TREE_CODE_LENGTH (code
) - 1) * sizeof (tree
));
1007 case tcc_constant
: /* a constant */
1010 case VOID_CST
: return sizeof (tree_typed
);
1011 case INTEGER_CST
: gcc_unreachable ();
1012 case POLY_INT_CST
: return sizeof (tree_poly_int_cst
);
1013 case REAL_CST
: return sizeof (tree_real_cst
);
1014 case FIXED_CST
: return sizeof (tree_fixed_cst
);
1015 case COMPLEX_CST
: return sizeof (tree_complex
);
1016 case VECTOR_CST
: gcc_unreachable ();
1017 case STRING_CST
: gcc_unreachable ();
1019 gcc_checking_assert (code
>= NUM_TREE_CODES
);
1020 return lang_hooks
.tree_size (code
);
1023 case tcc_exceptional
: /* something random, like an identifier. */
1026 case IDENTIFIER_NODE
: return lang_hooks
.identifier_size
;
1027 case TREE_LIST
: return sizeof (tree_list
);
1030 case PLACEHOLDER_EXPR
: return sizeof (tree_common
);
1032 case TREE_VEC
: gcc_unreachable ();
1033 case OMP_CLAUSE
: gcc_unreachable ();
1035 case SSA_NAME
: return sizeof (tree_ssa_name
);
1037 case STATEMENT_LIST
: return sizeof (tree_statement_list
);
1038 case BLOCK
: return sizeof (struct tree_block
);
1039 case CONSTRUCTOR
: return sizeof (tree_constructor
);
1040 case OPTIMIZATION_NODE
: return sizeof (tree_optimization_option
);
1041 case TARGET_OPTION_NODE
: return sizeof (tree_target_option
);
1044 gcc_checking_assert (code
>= NUM_TREE_CODES
);
1045 return lang_hooks
.tree_size (code
);
1053 /* Compute the number of bytes occupied by NODE. This routine only
1054 looks at TREE_CODE, except for those nodes that have variable sizes. */
1056 tree_size (const_tree node
)
1058 const enum tree_code code
= TREE_CODE (node
);
1062 return (sizeof (struct tree_int_cst
)
1063 + (TREE_INT_CST_EXT_NUNITS (node
) - 1) * sizeof (HOST_WIDE_INT
));
1066 return (offsetof (struct tree_binfo
, base_binfos
)
1068 ::embedded_size (BINFO_N_BASE_BINFOS (node
)));
1071 return (sizeof (struct tree_vec
)
1072 + (TREE_VEC_LENGTH (node
) - 1) * sizeof (tree
));
1075 return (sizeof (struct tree_vector
)
1076 + (vector_cst_encoded_nelts (node
) - 1) * sizeof (tree
));
1079 return TREE_STRING_LENGTH (node
) + offsetof (struct tree_string
, str
) + 1;
1082 return (sizeof (struct tree_omp_clause
)
1083 + (omp_clause_num_ops
[OMP_CLAUSE_CODE (node
)] - 1)
1087 if (TREE_CODE_CLASS (code
) == tcc_vl_exp
)
1088 return (sizeof (struct tree_exp
)
1089 + (VL_EXP_OPERAND_LENGTH (node
) - 1) * sizeof (tree
));
1091 return tree_code_size (code
);
1095 /* Return tree node kind based on tree CODE. */
1097 static tree_node_kind
1098 get_stats_node_kind (enum tree_code code
)
1100 enum tree_code_class type
= TREE_CODE_CLASS (code
);
1104 case tcc_declaration
: /* A decl node */
1106 case tcc_type
: /* a type node */
1108 case tcc_statement
: /* an expression with side effects */
1110 case tcc_reference
: /* a reference */
1112 case tcc_expression
: /* an expression */
1113 case tcc_comparison
: /* a comparison expression */
1114 case tcc_unary
: /* a unary arithmetic expression */
1115 case tcc_binary
: /* a binary arithmetic expression */
1117 case tcc_constant
: /* a constant */
1119 case tcc_exceptional
: /* something random, like an identifier. */
1122 case IDENTIFIER_NODE
:
1129 return ssa_name_kind
;
1135 return omp_clause_kind
;
1147 /* Record interesting allocation statistics for a tree node with CODE
1151 record_node_allocation_statistics (enum tree_code code
, size_t length
)
1153 if (!GATHER_STATISTICS
)
1156 tree_node_kind kind
= get_stats_node_kind (code
);
1158 tree_code_counts
[(int) code
]++;
1159 tree_node_counts
[(int) kind
]++;
1160 tree_node_sizes
[(int) kind
] += length
;
1163 /* Allocate and return a new UID from the DECL_UID namespace. */
1166 allocate_decl_uid (void)
1168 return next_decl_uid
++;
1171 /* Return a newly allocated node of code CODE. For decl and type
1172 nodes, some other fields are initialized. The rest of the node is
1173 initialized to zero. This function cannot be used for TREE_VEC,
1174 INTEGER_CST or OMP_CLAUSE nodes, which is enforced by asserts in
1177 Achoo! I got a code in the node. */
1180 make_node (enum tree_code code MEM_STAT_DECL
)
1183 enum tree_code_class type
= TREE_CODE_CLASS (code
);
1184 size_t length
= tree_code_size (code
);
1186 record_node_allocation_statistics (code
, length
);
1188 t
= ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT
);
1189 TREE_SET_CODE (t
, code
);
1194 if (code
!= DEBUG_BEGIN_STMT
)
1195 TREE_SIDE_EFFECTS (t
) = 1;
1198 case tcc_declaration
:
1199 if (CODE_CONTAINS_STRUCT (code
, TS_DECL_COMMON
))
1201 if (code
== FUNCTION_DECL
)
1203 SET_DECL_ALIGN (t
, FUNCTION_ALIGNMENT (FUNCTION_BOUNDARY
));
1204 SET_DECL_MODE (t
, FUNCTION_MODE
);
1207 SET_DECL_ALIGN (t
, 1);
1209 DECL_SOURCE_LOCATION (t
) = input_location
;
1210 if (TREE_CODE (t
) == DEBUG_EXPR_DECL
)
1211 DECL_UID (t
) = --next_debug_decl_uid
;
1214 DECL_UID (t
) = allocate_decl_uid ();
1215 SET_DECL_PT_UID (t
, -1);
1217 if (TREE_CODE (t
) == LABEL_DECL
)
1218 LABEL_DECL_UID (t
) = -1;
1223 TYPE_UID (t
) = next_type_uid
++;
1224 SET_TYPE_ALIGN (t
, BITS_PER_UNIT
);
1225 TYPE_USER_ALIGN (t
) = 0;
1226 TYPE_MAIN_VARIANT (t
) = t
;
1227 TYPE_CANONICAL (t
) = t
;
1229 /* Default to no attributes for type, but let target change that. */
1230 TYPE_ATTRIBUTES (t
) = NULL_TREE
;
1231 targetm
.set_default_type_attributes (t
);
1233 /* We have not yet computed the alias set for this type. */
1234 TYPE_ALIAS_SET (t
) = -1;
1238 TREE_CONSTANT (t
) = 1;
1241 case tcc_expression
:
1247 case PREDECREMENT_EXPR
:
1248 case PREINCREMENT_EXPR
:
1249 case POSTDECREMENT_EXPR
:
1250 case POSTINCREMENT_EXPR
:
1251 /* All of these have side-effects, no matter what their
1253 TREE_SIDE_EFFECTS (t
) = 1;
1261 case tcc_exceptional
:
1264 case TARGET_OPTION_NODE
:
1265 TREE_TARGET_OPTION(t
)
1266 = ggc_cleared_alloc
<struct cl_target_option
> ();
1269 case OPTIMIZATION_NODE
:
1270 TREE_OPTIMIZATION (t
)
1271 = ggc_cleared_alloc
<struct cl_optimization
> ();
1280 /* Other classes need no special treatment. */
1287 /* Free tree node. */
1290 free_node (tree node
)
1292 enum tree_code code
= TREE_CODE (node
);
1293 if (GATHER_STATISTICS
)
1295 enum tree_node_kind kind
= get_stats_node_kind (code
);
1297 gcc_checking_assert (tree_code_counts
[(int) TREE_CODE (node
)] != 0);
1298 gcc_checking_assert (tree_node_counts
[(int) kind
] != 0);
1299 gcc_checking_assert (tree_node_sizes
[(int) kind
] >= tree_size (node
));
1301 tree_code_counts
[(int) TREE_CODE (node
)]--;
1302 tree_node_counts
[(int) kind
]--;
1303 tree_node_sizes
[(int) kind
] -= tree_size (node
);
1305 if (CODE_CONTAINS_STRUCT (code
, TS_CONSTRUCTOR
))
1306 vec_free (CONSTRUCTOR_ELTS (node
));
1307 else if (code
== BLOCK
)
1308 vec_free (BLOCK_NONLOCALIZED_VARS (node
));
1309 else if (code
== TREE_BINFO
)
1310 vec_free (BINFO_BASE_ACCESSES (node
));
1311 else if (code
== OPTIMIZATION_NODE
)
1312 cl_optimization_option_free (TREE_OPTIMIZATION (node
));
1313 else if (code
== TARGET_OPTION_NODE
)
1314 cl_target_option_free (TREE_TARGET_OPTION (node
));
1318 /* Return a new node with the same contents as NODE except that its
1319 TREE_CHAIN, if it has one, is zero and it has a fresh uid. */
1322 copy_node (tree node MEM_STAT_DECL
)
1325 enum tree_code code
= TREE_CODE (node
);
1328 gcc_assert (code
!= STATEMENT_LIST
);
1330 length
= tree_size (node
);
1331 record_node_allocation_statistics (code
, length
);
1332 t
= ggc_alloc_tree_node_stat (length PASS_MEM_STAT
);
1333 memcpy (t
, node
, length
);
1335 if (CODE_CONTAINS_STRUCT (code
, TS_COMMON
))
1337 TREE_ASM_WRITTEN (t
) = 0;
1338 TREE_VISITED (t
) = 0;
1340 if (TREE_CODE_CLASS (code
) == tcc_declaration
)
1342 if (code
== DEBUG_EXPR_DECL
)
1343 DECL_UID (t
) = --next_debug_decl_uid
;
1346 DECL_UID (t
) = allocate_decl_uid ();
1347 if (DECL_PT_UID_SET_P (node
))
1348 SET_DECL_PT_UID (t
, DECL_PT_UID (node
));
1350 if ((TREE_CODE (node
) == PARM_DECL
|| VAR_P (node
))
1351 && DECL_HAS_VALUE_EXPR_P (node
))
1353 SET_DECL_VALUE_EXPR (t
, DECL_VALUE_EXPR (node
));
1354 DECL_HAS_VALUE_EXPR_P (t
) = 1;
1356 /* DECL_DEBUG_EXPR is copied explicitly by callers. */
1359 DECL_HAS_DEBUG_EXPR_P (t
) = 0;
1360 t
->decl_with_vis
.symtab_node
= NULL
;
1362 if (VAR_P (node
) && DECL_HAS_INIT_PRIORITY_P (node
))
1364 SET_DECL_INIT_PRIORITY (t
, DECL_INIT_PRIORITY (node
));
1365 DECL_HAS_INIT_PRIORITY_P (t
) = 1;
1367 if (TREE_CODE (node
) == FUNCTION_DECL
)
1369 DECL_STRUCT_FUNCTION (t
) = NULL
;
1370 t
->decl_with_vis
.symtab_node
= NULL
;
1373 else if (TREE_CODE_CLASS (code
) == tcc_type
)
1375 TYPE_UID (t
) = next_type_uid
++;
1376 /* The following is so that the debug code for
1377 the copy is different from the original type.
1378 The two statements usually duplicate each other
1379 (because they clear fields of the same union),
1380 but the optimizer should catch that. */
1381 TYPE_SYMTAB_ADDRESS (t
) = 0;
1382 TYPE_SYMTAB_DIE (t
) = 0;
1384 /* Do not copy the values cache. */
1385 if (TYPE_CACHED_VALUES_P (t
))
1387 TYPE_CACHED_VALUES_P (t
) = 0;
1388 TYPE_CACHED_VALUES (t
) = NULL_TREE
;
1391 else if (code
== TARGET_OPTION_NODE
)
1393 TREE_TARGET_OPTION (t
) = ggc_alloc
<struct cl_target_option
>();
1394 memcpy (TREE_TARGET_OPTION (t
), TREE_TARGET_OPTION (node
),
1395 sizeof (struct cl_target_option
));
1397 else if (code
== OPTIMIZATION_NODE
)
1399 TREE_OPTIMIZATION (t
) = ggc_alloc
<struct cl_optimization
>();
1400 memcpy (TREE_OPTIMIZATION (t
), TREE_OPTIMIZATION (node
),
1401 sizeof (struct cl_optimization
));
1407 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
1408 For example, this can copy a list made of TREE_LIST nodes. */
1411 copy_list (tree list
)
1419 head
= prev
= copy_node (list
);
1420 next
= TREE_CHAIN (list
);
1423 TREE_CHAIN (prev
) = copy_node (next
);
1424 prev
= TREE_CHAIN (prev
);
1425 next
= TREE_CHAIN (next
);
1431 /* Return the value that TREE_INT_CST_EXT_NUNITS should have for an
1432 INTEGER_CST with value CST and type TYPE. */
1435 get_int_cst_ext_nunits (tree type
, const wide_int
&cst
)
1437 gcc_checking_assert (cst
.get_precision () == TYPE_PRECISION (type
));
1438 /* We need extra HWIs if CST is an unsigned integer with its
1440 if (TYPE_UNSIGNED (type
) && wi::neg_p (cst
))
1441 return cst
.get_precision () / HOST_BITS_PER_WIDE_INT
+ 1;
1442 return cst
.get_len ();
1445 /* Return a new INTEGER_CST with value CST and type TYPE. */
1448 build_new_int_cst (tree type
, const wide_int
&cst
)
1450 unsigned int len
= cst
.get_len ();
1451 unsigned int ext_len
= get_int_cst_ext_nunits (type
, cst
);
1452 tree nt
= make_int_cst (len
, ext_len
);
1457 TREE_INT_CST_ELT (nt
, ext_len
)
1458 = zext_hwi (-1, cst
.get_precision () % HOST_BITS_PER_WIDE_INT
);
1459 for (unsigned int i
= len
; i
< ext_len
; ++i
)
1460 TREE_INT_CST_ELT (nt
, i
) = -1;
1462 else if (TYPE_UNSIGNED (type
)
1463 && cst
.get_precision () < len
* HOST_BITS_PER_WIDE_INT
)
1466 TREE_INT_CST_ELT (nt
, len
)
1467 = zext_hwi (cst
.elt (len
),
1468 cst
.get_precision () % HOST_BITS_PER_WIDE_INT
);
1471 for (unsigned int i
= 0; i
< len
; i
++)
1472 TREE_INT_CST_ELT (nt
, i
) = cst
.elt (i
);
1473 TREE_TYPE (nt
) = type
;
1477 /* Return a new POLY_INT_CST with coefficients COEFFS and type TYPE. */
1480 build_new_poly_int_cst (tree type
, tree (&coeffs
)[NUM_POLY_INT_COEFFS
]
1483 size_t length
= sizeof (struct tree_poly_int_cst
);
1484 record_node_allocation_statistics (POLY_INT_CST
, length
);
1486 tree t
= ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT
);
1488 TREE_SET_CODE (t
, POLY_INT_CST
);
1489 TREE_CONSTANT (t
) = 1;
1490 TREE_TYPE (t
) = type
;
1491 for (unsigned int i
= 0; i
< NUM_POLY_INT_COEFFS
; ++i
)
1492 POLY_INT_CST_COEFF (t
, i
) = coeffs
[i
];
1496 /* Create a constant tree that contains CST sign-extended to TYPE. */
1499 build_int_cst (tree type
, poly_int64 cst
)
1501 /* Support legacy code. */
1503 type
= integer_type_node
;
1505 return wide_int_to_tree (type
, wi::shwi (cst
, TYPE_PRECISION (type
)));
1508 /* Create a constant tree that contains CST zero-extended to TYPE. */
1511 build_int_cstu (tree type
, poly_uint64 cst
)
1513 return wide_int_to_tree (type
, wi::uhwi (cst
, TYPE_PRECISION (type
)));
1516 /* Create a constant tree that contains CST sign-extended to TYPE. */
1519 build_int_cst_type (tree type
, poly_int64 cst
)
1522 return wide_int_to_tree (type
, wi::shwi (cst
, TYPE_PRECISION (type
)));
1525 /* Constructs tree in type TYPE from with value given by CST. Signedness
1526 of CST is assumed to be the same as the signedness of TYPE. */
1529 double_int_to_tree (tree type
, double_int cst
)
1531 return wide_int_to_tree (type
, widest_int::from (cst
, TYPE_SIGN (type
)));
1534 /* We force the wide_int CST to the range of the type TYPE by sign or
1535 zero extending it. OVERFLOWABLE indicates if we are interested in
1536 overflow of the value, when >0 we are only interested in signed
1537 overflow, for <0 we are interested in any overflow. OVERFLOWED
1538 indicates whether overflow has already occurred. CONST_OVERFLOWED
1539 indicates whether constant overflow has already occurred. We force
1540 T's value to be within range of T's type (by setting to 0 or 1 all
1541 the bits outside the type's range). We set TREE_OVERFLOWED if,
1542 OVERFLOWED is nonzero,
1543 or OVERFLOWABLE is >0 and signed overflow occurs
1544 or OVERFLOWABLE is <0 and any overflow occurs
1545 We return a new tree node for the extended wide_int. The node
1546 is shared if no overflow flags are set. */
1550 force_fit_type (tree type
, const poly_wide_int_ref
&cst
,
1551 int overflowable
, bool overflowed
)
1553 signop sign
= TYPE_SIGN (type
);
1555 /* If we need to set overflow flags, return a new unshared node. */
1556 if (overflowed
|| !wi::fits_to_tree_p (cst
, type
))
1560 || (overflowable
> 0 && sign
== SIGNED
))
1562 poly_wide_int tmp
= poly_wide_int::from (cst
, TYPE_PRECISION (type
),
1565 if (tmp
.is_constant ())
1566 t
= build_new_int_cst (type
, tmp
.coeffs
[0]);
1569 tree coeffs
[NUM_POLY_INT_COEFFS
];
1570 for (unsigned int i
= 0; i
< NUM_POLY_INT_COEFFS
; ++i
)
1572 coeffs
[i
] = build_new_int_cst (type
, tmp
.coeffs
[i
]);
1573 TREE_OVERFLOW (coeffs
[i
]) = 1;
1575 t
= build_new_poly_int_cst (type
, coeffs
);
1577 TREE_OVERFLOW (t
) = 1;
1582 /* Else build a shared node. */
1583 return wide_int_to_tree (type
, cst
);
1586 /* These are the hash table functions for the hash table of INTEGER_CST
1587 nodes of a sizetype. */
1589 /* Return the hash code X, an INTEGER_CST. */
1592 int_cst_hasher::hash (tree x
)
1594 const_tree
const t
= x
;
1595 hashval_t code
= TYPE_UID (TREE_TYPE (t
));
1598 for (i
= 0; i
< TREE_INT_CST_NUNITS (t
); i
++)
1599 code
= iterative_hash_host_wide_int (TREE_INT_CST_ELT(t
, i
), code
);
1604 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1605 is the same as that given by *Y, which is the same. */
1608 int_cst_hasher::equal (tree x
, tree y
)
1610 const_tree
const xt
= x
;
1611 const_tree
const yt
= y
;
1613 if (TREE_TYPE (xt
) != TREE_TYPE (yt
)
1614 || TREE_INT_CST_NUNITS (xt
) != TREE_INT_CST_NUNITS (yt
)
1615 || TREE_INT_CST_EXT_NUNITS (xt
) != TREE_INT_CST_EXT_NUNITS (yt
))
1618 for (int i
= 0; i
< TREE_INT_CST_NUNITS (xt
); i
++)
1619 if (TREE_INT_CST_ELT (xt
, i
) != TREE_INT_CST_ELT (yt
, i
))
1625 /* Cache wide_int CST into the TYPE_CACHED_VALUES cache for TYPE.
1626 SLOT is the slot entry to store it in, and MAX_SLOTS is the maximum
1627 number of slots that can be cached for the type. */
1630 cache_wide_int_in_type_cache (tree type
, const wide_int
&cst
,
1631 int slot
, int max_slots
)
1633 gcc_checking_assert (slot
>= 0);
1634 /* Initialize cache. */
1635 if (!TYPE_CACHED_VALUES_P (type
))
1637 TYPE_CACHED_VALUES_P (type
) = 1;
1638 TYPE_CACHED_VALUES (type
) = make_tree_vec (max_slots
);
1640 tree t
= TREE_VEC_ELT (TYPE_CACHED_VALUES (type
), slot
);
1643 /* Create a new shared int. */
1644 t
= build_new_int_cst (type
, cst
);
1645 TREE_VEC_ELT (TYPE_CACHED_VALUES (type
), slot
) = t
;
1650 /* Create an INT_CST node of TYPE and value CST.
1651 The returned node is always shared. For small integers we use a
1652 per-type vector cache, for larger ones we use a single hash table.
1653 The value is extended from its precision according to the sign of
1654 the type to be a multiple of HOST_BITS_PER_WIDE_INT. This defines
1655 the upper bits and ensures that hashing and value equality based
1656 upon the underlying HOST_WIDE_INTs works without masking. */
1659 wide_int_to_tree_1 (tree type
, const wide_int_ref
&pcst
)
1666 unsigned int prec
= TYPE_PRECISION (type
);
1667 signop sgn
= TYPE_SIGN (type
);
1669 /* Verify that everything is canonical. */
1670 int l
= pcst
.get_len ();
1673 if (pcst
.elt (l
- 1) == 0)
1674 gcc_checking_assert (pcst
.elt (l
- 2) < 0);
1675 if (pcst
.elt (l
- 1) == HOST_WIDE_INT_M1
)
1676 gcc_checking_assert (pcst
.elt (l
- 2) >= 0);
1679 wide_int cst
= wide_int::from (pcst
, prec
, sgn
);
1680 unsigned int ext_len
= get_int_cst_ext_nunits (type
, cst
);
1682 enum tree_code code
= TREE_CODE (type
);
1683 if (code
== POINTER_TYPE
|| code
== REFERENCE_TYPE
)
1685 /* Cache NULL pointer and zero bounds. */
1688 /* Cache upper bounds of pointers. */
1689 else if (cst
== wi::max_value (prec
, sgn
))
1691 /* Cache 1 which is used for a non-zero range. */
1697 t
= cache_wide_int_in_type_cache (type
, cst
, ix
, 3);
1698 /* Make sure no one is clobbering the shared constant. */
1699 gcc_checking_assert (TREE_TYPE (t
) == type
1700 && cst
== wi::to_wide (t
));
1706 /* We just need to store a single HOST_WIDE_INT. */
1708 if (TYPE_UNSIGNED (type
))
1709 hwi
= cst
.to_uhwi ();
1711 hwi
= cst
.to_shwi ();
1716 gcc_assert (hwi
== 0);
1720 case REFERENCE_TYPE
:
1721 /* Ignore pointers, as they were already handled above. */
1725 /* Cache false or true. */
1727 if (IN_RANGE (hwi
, 0, 1))
1733 if (TYPE_SIGN (type
) == UNSIGNED
)
1736 limit
= param_integer_share_limit
;
1737 if (IN_RANGE (hwi
, 0, param_integer_share_limit
- 1))
1742 /* Cache [-1, N). */
1743 limit
= param_integer_share_limit
+ 1;
1744 if (IN_RANGE (hwi
, -1, param_integer_share_limit
- 1))
1758 t
= cache_wide_int_in_type_cache (type
, cst
, ix
, limit
);
1759 /* Make sure no one is clobbering the shared constant. */
1760 gcc_checking_assert (TREE_TYPE (t
) == type
1761 && TREE_INT_CST_NUNITS (t
) == 1
1762 && TREE_INT_CST_OFFSET_NUNITS (t
) == 1
1763 && TREE_INT_CST_EXT_NUNITS (t
) == 1
1764 && TREE_INT_CST_ELT (t
, 0) == hwi
);
1769 /* Use the cache of larger shared ints, using int_cst_node as
1772 TREE_INT_CST_ELT (int_cst_node
, 0) = hwi
;
1773 TREE_TYPE (int_cst_node
) = type
;
1775 tree
*slot
= int_cst_hash_table
->find_slot (int_cst_node
, INSERT
);
1779 /* Insert this one into the hash table. */
1782 /* Make a new node for next time round. */
1783 int_cst_node
= make_int_cst (1, 1);
1789 /* The value either hashes properly or we drop it on the floor
1790 for the gc to take care of. There will not be enough of them
1793 tree nt
= build_new_int_cst (type
, cst
);
1794 tree
*slot
= int_cst_hash_table
->find_slot (nt
, INSERT
);
1798 /* Insert this one into the hash table. */
1810 poly_int_cst_hasher::hash (tree t
)
1812 inchash::hash hstate
;
1814 hstate
.add_int (TYPE_UID (TREE_TYPE (t
)));
1815 for (unsigned int i
= 0; i
< NUM_POLY_INT_COEFFS
; ++i
)
1816 hstate
.add_wide_int (wi::to_wide (POLY_INT_CST_COEFF (t
, i
)));
1818 return hstate
.end ();
1822 poly_int_cst_hasher::equal (tree x
, const compare_type
&y
)
1824 if (TREE_TYPE (x
) != y
.first
)
1826 for (unsigned int i
= 0; i
< NUM_POLY_INT_COEFFS
; ++i
)
1827 if (wi::to_wide (POLY_INT_CST_COEFF (x
, i
)) != y
.second
->coeffs
[i
])
1832 /* Build a POLY_INT_CST node with type TYPE and with the elements in VALUES.
1833 The elements must also have type TYPE. */
1836 build_poly_int_cst (tree type
, const poly_wide_int_ref
&values
)
1838 unsigned int prec
= TYPE_PRECISION (type
);
1839 gcc_assert (prec
<= values
.coeffs
[0].get_precision ());
1840 poly_wide_int c
= poly_wide_int::from (values
, prec
, SIGNED
);
1843 h
.add_int (TYPE_UID (type
));
1844 for (unsigned int i
= 0; i
< NUM_POLY_INT_COEFFS
; ++i
)
1845 h
.add_wide_int (c
.coeffs
[i
]);
1846 poly_int_cst_hasher::compare_type
comp (type
, &c
);
1847 tree
*slot
= poly_int_cst_hash_table
->find_slot_with_hash (comp
, h
.end (),
1849 if (*slot
== NULL_TREE
)
1851 tree coeffs
[NUM_POLY_INT_COEFFS
];
1852 for (unsigned int i
= 0; i
< NUM_POLY_INT_COEFFS
; ++i
)
1853 coeffs
[i
] = wide_int_to_tree_1 (type
, c
.coeffs
[i
]);
1854 *slot
= build_new_poly_int_cst (type
, coeffs
);
1859 /* Create a constant tree with value VALUE in type TYPE. */
1862 wide_int_to_tree (tree type
, const poly_wide_int_ref
&value
)
1864 if (value
.is_constant ())
1865 return wide_int_to_tree_1 (type
, value
.coeffs
[0]);
1866 return build_poly_int_cst (type
, value
);
1869 /* Insert INTEGER_CST T into a cache of integer constants. And return
1870 the cached constant (which may or may not be T). If MIGHT_DUPLICATE
1871 is false, and T falls into the type's 'smaller values' range, there
1872 cannot be an existing entry. Otherwise, if MIGHT_DUPLICATE is true,
1873 or the value is large, should an existing entry exist, it is
1874 returned (rather than inserting T). */
1877 cache_integer_cst (tree t
, bool might_duplicate ATTRIBUTE_UNUSED
)
1879 tree type
= TREE_TYPE (t
);
1882 int prec
= TYPE_PRECISION (type
);
1884 gcc_assert (!TREE_OVERFLOW (t
));
1886 /* The caching indices here must match those in
1887 wide_int_to_type_1. */
1888 switch (TREE_CODE (type
))
1891 gcc_checking_assert (integer_zerop (t
));
1895 case REFERENCE_TYPE
:
1897 if (integer_zerop (t
))
1899 else if (integer_onep (t
))
1908 /* Cache false or true. */
1910 if (wi::ltu_p (wi::to_wide (t
), 2))
1911 ix
= TREE_INT_CST_ELT (t
, 0);
1916 if (TYPE_UNSIGNED (type
))
1919 limit
= param_integer_share_limit
;
1921 /* This is a little hokie, but if the prec is smaller than
1922 what is necessary to hold param_integer_share_limit, then the
1923 obvious test will not get the correct answer. */
1924 if (prec
< HOST_BITS_PER_WIDE_INT
)
1926 if (tree_to_uhwi (t
)
1927 < (unsigned HOST_WIDE_INT
) param_integer_share_limit
)
1928 ix
= tree_to_uhwi (t
);
1930 else if (wi::ltu_p (wi::to_wide (t
), param_integer_share_limit
))
1931 ix
= tree_to_uhwi (t
);
1936 limit
= param_integer_share_limit
+ 1;
1938 if (integer_minus_onep (t
))
1940 else if (!wi::neg_p (wi::to_wide (t
)))
1942 if (prec
< HOST_BITS_PER_WIDE_INT
)
1944 if (tree_to_shwi (t
) < param_integer_share_limit
)
1945 ix
= tree_to_shwi (t
) + 1;
1947 else if (wi::ltu_p (wi::to_wide (t
), param_integer_share_limit
))
1948 ix
= tree_to_shwi (t
) + 1;
1954 /* The slot used by TYPE_CACHED_VALUES is used for the enum
1964 /* Look for it in the type's vector of small shared ints. */
1965 if (!TYPE_CACHED_VALUES_P (type
))
1967 TYPE_CACHED_VALUES_P (type
) = 1;
1968 TYPE_CACHED_VALUES (type
) = make_tree_vec (limit
);
1971 if (tree r
= TREE_VEC_ELT (TYPE_CACHED_VALUES (type
), ix
))
1973 gcc_checking_assert (might_duplicate
);
1977 TREE_VEC_ELT (TYPE_CACHED_VALUES (type
), ix
) = t
;
1981 /* Use the cache of larger shared ints. */
1982 tree
*slot
= int_cst_hash_table
->find_slot (t
, INSERT
);
1985 /* If there is already an entry for the number verify it's the
1987 gcc_checking_assert (wi::to_wide (tree (r
)) == wi::to_wide (t
));
1988 /* And return the cached value. */
1992 /* Otherwise insert this one into the hash table. */
2000 /* Builds an integer constant in TYPE such that lowest BITS bits are ones
2001 and the rest are zeros. */
2004 build_low_bits_mask (tree type
, unsigned bits
)
2006 gcc_assert (bits
<= TYPE_PRECISION (type
));
2008 return wide_int_to_tree (type
, wi::mask (bits
, false,
2009 TYPE_PRECISION (type
)));
2012 /* Checks that X is integer constant that can be expressed in (unsigned)
2013 HOST_WIDE_INT without loss of precision. */
2016 cst_and_fits_in_hwi (const_tree x
)
2018 return (TREE_CODE (x
) == INTEGER_CST
2019 && (tree_fits_shwi_p (x
) || tree_fits_uhwi_p (x
)));
2022 /* Build a newly constructed VECTOR_CST with the given values of
2023 (VECTOR_CST_)LOG2_NPATTERNS and (VECTOR_CST_)NELTS_PER_PATTERN. */
2026 make_vector (unsigned log2_npatterns
,
2027 unsigned int nelts_per_pattern MEM_STAT_DECL
)
2029 gcc_assert (IN_RANGE (nelts_per_pattern
, 1, 3));
2031 unsigned npatterns
= 1 << log2_npatterns
;
2032 unsigned encoded_nelts
= npatterns
* nelts_per_pattern
;
2033 unsigned length
= (sizeof (struct tree_vector
)
2034 + (encoded_nelts
- 1) * sizeof (tree
));
2036 record_node_allocation_statistics (VECTOR_CST
, length
);
2038 t
= ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT
);
2040 TREE_SET_CODE (t
, VECTOR_CST
);
2041 TREE_CONSTANT (t
) = 1;
2042 VECTOR_CST_LOG2_NPATTERNS (t
) = log2_npatterns
;
2043 VECTOR_CST_NELTS_PER_PATTERN (t
) = nelts_per_pattern
;
2048 /* Return a new VECTOR_CST node whose type is TYPE and whose values
2049 are extracted from V, a vector of CONSTRUCTOR_ELT. */
2052 build_vector_from_ctor (tree type
, const vec
<constructor_elt
, va_gc
> *v
)
2054 if (vec_safe_length (v
) == 0)
2055 return build_zero_cst (type
);
2057 unsigned HOST_WIDE_INT idx
, nelts
;
2060 /* We can't construct a VECTOR_CST for a variable number of elements. */
2061 nelts
= TYPE_VECTOR_SUBPARTS (type
).to_constant ();
2062 tree_vector_builder
vec (type
, nelts
, 1);
2063 FOR_EACH_CONSTRUCTOR_VALUE (v
, idx
, value
)
2065 if (TREE_CODE (value
) == VECTOR_CST
)
2067 /* If NELTS is constant then this must be too. */
2068 unsigned int sub_nelts
= VECTOR_CST_NELTS (value
).to_constant ();
2069 for (unsigned i
= 0; i
< sub_nelts
; ++i
)
2070 vec
.quick_push (VECTOR_CST_ELT (value
, i
));
2073 vec
.quick_push (value
);
2075 while (vec
.length () < nelts
)
2076 vec
.quick_push (build_zero_cst (TREE_TYPE (type
)));
2078 return vec
.build ();
2081 /* Build a vector of type VECTYPE where all the elements are SCs. */
2083 build_vector_from_val (tree vectype
, tree sc
)
2085 unsigned HOST_WIDE_INT i
, nunits
;
2087 if (sc
== error_mark_node
)
2090 /* Verify that the vector type is suitable for SC. Note that there
2091 is some inconsistency in the type-system with respect to restrict
2092 qualifications of pointers. Vector types always have a main-variant
2093 element type and the qualification is applied to the vector-type.
2094 So TREE_TYPE (vector-type) does not return a properly qualified
2095 vector element-type. */
2096 gcc_checking_assert (types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (sc
)),
2097 TREE_TYPE (vectype
)));
2099 if (CONSTANT_CLASS_P (sc
))
2101 tree_vector_builder
v (vectype
, 1, 1);
2105 else if (!TYPE_VECTOR_SUBPARTS (vectype
).is_constant (&nunits
))
2106 return fold_build1 (VEC_DUPLICATE_EXPR
, vectype
, sc
);
2109 vec
<constructor_elt
, va_gc
> *v
;
2110 vec_alloc (v
, nunits
);
2111 for (i
= 0; i
< nunits
; ++i
)
2112 CONSTRUCTOR_APPEND_ELT (v
, NULL_TREE
, sc
);
2113 return build_constructor (vectype
, v
);
2117 /* If TYPE is not a vector type, just return SC, otherwise return
2118 build_vector_from_val (TYPE, SC). */
2121 build_uniform_cst (tree type
, tree sc
)
2123 if (!VECTOR_TYPE_P (type
))
2126 return build_vector_from_val (type
, sc
);
2129 /* Build a vector series of type TYPE in which element I has the value
2130 BASE + I * STEP. The result is a constant if BASE and STEP are constant
2131 and a VEC_SERIES_EXPR otherwise. */
2134 build_vec_series (tree type
, tree base
, tree step
)
2136 if (integer_zerop (step
))
2137 return build_vector_from_val (type
, base
);
2138 if (TREE_CODE (base
) == INTEGER_CST
&& TREE_CODE (step
) == INTEGER_CST
)
2140 tree_vector_builder
builder (type
, 1, 3);
2141 tree elt1
= wide_int_to_tree (TREE_TYPE (base
),
2142 wi::to_wide (base
) + wi::to_wide (step
));
2143 tree elt2
= wide_int_to_tree (TREE_TYPE (base
),
2144 wi::to_wide (elt1
) + wi::to_wide (step
));
2145 builder
.quick_push (base
);
2146 builder
.quick_push (elt1
);
2147 builder
.quick_push (elt2
);
2148 return builder
.build ();
2150 return build2 (VEC_SERIES_EXPR
, type
, base
, step
);
2153 /* Return a vector with the same number of units and number of bits
2154 as VEC_TYPE, but in which the elements are a linear series of unsigned
2155 integers { BASE, BASE + STEP, BASE + STEP * 2, ... }. */
2158 build_index_vector (tree vec_type
, poly_uint64 base
, poly_uint64 step
)
2160 tree index_vec_type
= vec_type
;
2161 tree index_elt_type
= TREE_TYPE (vec_type
);
2162 poly_uint64 nunits
= TYPE_VECTOR_SUBPARTS (vec_type
);
2163 if (!INTEGRAL_TYPE_P (index_elt_type
) || !TYPE_UNSIGNED (index_elt_type
))
2165 index_elt_type
= build_nonstandard_integer_type
2166 (GET_MODE_BITSIZE (SCALAR_TYPE_MODE (index_elt_type
)), true);
2167 index_vec_type
= build_vector_type (index_elt_type
, nunits
);
2170 tree_vector_builder
v (index_vec_type
, 1, 3);
2171 for (unsigned int i
= 0; i
< 3; ++i
)
2172 v
.quick_push (build_int_cstu (index_elt_type
, base
+ i
* step
));
2176 /* Return a VECTOR_CST of type VEC_TYPE in which the first NUM_A
2177 elements are A and the rest are B. */
2180 build_vector_a_then_b (tree vec_type
, unsigned int num_a
, tree a
, tree b
)
2182 gcc_assert (known_le (num_a
, TYPE_VECTOR_SUBPARTS (vec_type
)));
2183 unsigned int count
= constant_lower_bound (TYPE_VECTOR_SUBPARTS (vec_type
));
2184 /* Optimize the constant case. */
2185 if ((count
& 1) == 0 && TYPE_VECTOR_SUBPARTS (vec_type
).is_constant ())
2187 tree_vector_builder
builder (vec_type
, count
, 2);
2188 for (unsigned int i
= 0; i
< count
* 2; ++i
)
2189 builder
.quick_push (i
< num_a
? a
: b
);
2190 return builder
.build ();
2193 /* Something has messed with the elements of CONSTRUCTOR C after it was built;
2194 calculate TREE_CONSTANT and TREE_SIDE_EFFECTS. */
2197 recompute_constructor_flags (tree c
)
2201 bool constant_p
= true;
2202 bool side_effects_p
= false;
2203 vec
<constructor_elt
, va_gc
> *vals
= CONSTRUCTOR_ELTS (c
);
2205 FOR_EACH_CONSTRUCTOR_VALUE (vals
, i
, val
)
2207 /* Mostly ctors will have elts that don't have side-effects, so
2208 the usual case is to scan all the elements. Hence a single
2209 loop for both const and side effects, rather than one loop
2210 each (with early outs). */
2211 if (!TREE_CONSTANT (val
))
2213 if (TREE_SIDE_EFFECTS (val
))
2214 side_effects_p
= true;
2217 TREE_SIDE_EFFECTS (c
) = side_effects_p
;
2218 TREE_CONSTANT (c
) = constant_p
;
2221 /* Make sure that TREE_CONSTANT and TREE_SIDE_EFFECTS are correct for
2225 verify_constructor_flags (tree c
)
2229 bool constant_p
= TREE_CONSTANT (c
);
2230 bool side_effects_p
= TREE_SIDE_EFFECTS (c
);
2231 vec
<constructor_elt
, va_gc
> *vals
= CONSTRUCTOR_ELTS (c
);
2233 FOR_EACH_CONSTRUCTOR_VALUE (vals
, i
, val
)
2235 if (constant_p
&& !TREE_CONSTANT (val
))
2236 internal_error ("non-constant element in constant CONSTRUCTOR");
2237 if (!side_effects_p
&& TREE_SIDE_EFFECTS (val
))
2238 internal_error ("side-effects element in no-side-effects CONSTRUCTOR");
2242 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
2243 are in the vec pointed to by VALS. */
2245 build_constructor (tree type
, vec
<constructor_elt
, va_gc
> *vals MEM_STAT_DECL
)
2247 tree c
= make_node (CONSTRUCTOR PASS_MEM_STAT
);
2249 TREE_TYPE (c
) = type
;
2250 CONSTRUCTOR_ELTS (c
) = vals
;
2252 recompute_constructor_flags (c
);
2257 /* Build a CONSTRUCTOR node made of a single initializer, with the specified
2260 build_constructor_single (tree type
, tree index
, tree value
)
2262 vec
<constructor_elt
, va_gc
> *v
;
2263 constructor_elt elt
= {index
, value
};
2266 v
->quick_push (elt
);
2268 return build_constructor (type
, v
);
2272 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
2273 are in a list pointed to by VALS. */
2275 build_constructor_from_list (tree type
, tree vals
)
2278 vec
<constructor_elt
, va_gc
> *v
= NULL
;
2282 vec_alloc (v
, list_length (vals
));
2283 for (t
= vals
; t
; t
= TREE_CHAIN (t
))
2284 CONSTRUCTOR_APPEND_ELT (v
, TREE_PURPOSE (t
), TREE_VALUE (t
));
2287 return build_constructor (type
, v
);
2290 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
2291 are in a vector pointed to by VALS. Note that the TREE_PURPOSE
2292 fields in the constructor remain null. */
2295 build_constructor_from_vec (tree type
, const vec
<tree
, va_gc
> *vals
)
2297 vec
<constructor_elt
, va_gc
> *v
= NULL
;
2300 CONSTRUCTOR_APPEND_ELT (v
, NULL_TREE
, t
);
2302 return build_constructor (type
, v
);
2305 /* Return a new CONSTRUCTOR node whose type is TYPE. NELTS is the number
2306 of elements, provided as index/value pairs. */
2309 build_constructor_va (tree type
, int nelts
, ...)
2311 vec
<constructor_elt
, va_gc
> *v
= NULL
;
2314 va_start (p
, nelts
);
2315 vec_alloc (v
, nelts
);
2318 tree index
= va_arg (p
, tree
);
2319 tree value
= va_arg (p
, tree
);
2320 CONSTRUCTOR_APPEND_ELT (v
, index
, value
);
2323 return build_constructor (type
, v
);
2326 /* Return a node of type TYPE for which TREE_CLOBBER_P is true. */
2329 build_clobber (tree type
)
2331 tree clobber
= build_constructor (type
, NULL
);
2332 TREE_THIS_VOLATILE (clobber
) = true;
2336 /* Return a new FIXED_CST node whose type is TYPE and value is F. */
2339 build_fixed (tree type
, FIXED_VALUE_TYPE f
)
2342 FIXED_VALUE_TYPE
*fp
;
2344 v
= make_node (FIXED_CST
);
2345 fp
= ggc_alloc
<fixed_value
> ();
2346 memcpy (fp
, &f
, sizeof (FIXED_VALUE_TYPE
));
2348 TREE_TYPE (v
) = type
;
2349 TREE_FIXED_CST_PTR (v
) = fp
;
2353 /* Return a new REAL_CST node whose type is TYPE and value is D. */
2356 build_real (tree type
, REAL_VALUE_TYPE d
)
2359 REAL_VALUE_TYPE
*dp
;
2362 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
2363 Consider doing it via real_convert now. */
2365 v
= make_node (REAL_CST
);
2366 dp
= ggc_alloc
<real_value
> ();
2367 memcpy (dp
, &d
, sizeof (REAL_VALUE_TYPE
));
2369 TREE_TYPE (v
) = type
;
2370 TREE_REAL_CST_PTR (v
) = dp
;
2371 TREE_OVERFLOW (v
) = overflow
;
2375 /* Like build_real, but first truncate D to the type. */
2378 build_real_truncate (tree type
, REAL_VALUE_TYPE d
)
2380 return build_real (type
, real_value_truncate (TYPE_MODE (type
), d
));
2383 /* Return a new REAL_CST node whose type is TYPE
2384 and whose value is the integer value of the INTEGER_CST node I. */
2387 real_value_from_int_cst (const_tree type
, const_tree i
)
2391 /* Clear all bits of the real value type so that we can later do
2392 bitwise comparisons to see if two values are the same. */
2393 memset (&d
, 0, sizeof d
);
2395 real_from_integer (&d
, type
? TYPE_MODE (type
) : VOIDmode
, wi::to_wide (i
),
2396 TYPE_SIGN (TREE_TYPE (i
)));
2400 /* Given a tree representing an integer constant I, return a tree
2401 representing the same value as a floating-point constant of type TYPE. */
2404 build_real_from_int_cst (tree type
, const_tree i
)
2407 int overflow
= TREE_OVERFLOW (i
);
2409 v
= build_real (type
, real_value_from_int_cst (type
, i
));
2411 TREE_OVERFLOW (v
) |= overflow
;
2415 /* Return a new REAL_CST node whose type is TYPE
2416 and whose value is the integer value I which has sign SGN. */
2419 build_real_from_wide (tree type
, const wide_int_ref
&i
, signop sgn
)
2423 /* Clear all bits of the real value type so that we can later do
2424 bitwise comparisons to see if two values are the same. */
2425 memset (&d
, 0, sizeof d
);
2427 real_from_integer (&d
, TYPE_MODE (type
), i
, sgn
);
2428 return build_real (type
, d
);
2431 /* Return a newly constructed STRING_CST node whose value is the LEN
2432 characters at STR when STR is nonnull, or all zeros otherwise.
2433 Note that for a C string literal, LEN should include the trailing NUL.
2434 The TREE_TYPE is not initialized. */
2437 build_string (unsigned len
, const char *str
/*= NULL */)
2439 /* Do not waste bytes provided by padding of struct tree_string. */
2440 unsigned size
= len
+ offsetof (struct tree_string
, str
) + 1;
2442 record_node_allocation_statistics (STRING_CST
, size
);
2444 tree s
= (tree
) ggc_internal_alloc (size
);
2446 memset (s
, 0, sizeof (struct tree_typed
));
2447 TREE_SET_CODE (s
, STRING_CST
);
2448 TREE_CONSTANT (s
) = 1;
2449 TREE_STRING_LENGTH (s
) = len
;
2451 memcpy (s
->string
.str
, str
, len
);
2453 memset (s
->string
.str
, 0, len
);
2454 s
->string
.str
[len
] = '\0';
2459 /* Return a newly constructed COMPLEX_CST node whose value is
2460 specified by the real and imaginary parts REAL and IMAG.
2461 Both REAL and IMAG should be constant nodes. TYPE, if specified,
2462 will be the type of the COMPLEX_CST; otherwise a new type will be made. */
2465 build_complex (tree type
, tree real
, tree imag
)
2467 gcc_assert (CONSTANT_CLASS_P (real
));
2468 gcc_assert (CONSTANT_CLASS_P (imag
));
2470 tree t
= make_node (COMPLEX_CST
);
2472 TREE_REALPART (t
) = real
;
2473 TREE_IMAGPART (t
) = imag
;
2474 TREE_TYPE (t
) = type
? type
: build_complex_type (TREE_TYPE (real
));
2475 TREE_OVERFLOW (t
) = TREE_OVERFLOW (real
) | TREE_OVERFLOW (imag
);
2479 /* Build a complex (inf +- 0i), such as for the result of cproj.
2480 TYPE is the complex tree type of the result. If NEG is true, the
2481 imaginary zero is negative. */
2484 build_complex_inf (tree type
, bool neg
)
2486 REAL_VALUE_TYPE rinf
, rzero
= dconst0
;
2490 return build_complex (type
, build_real (TREE_TYPE (type
), rinf
),
2491 build_real (TREE_TYPE (type
), rzero
));
2494 /* Return the constant 1 in type TYPE. If TYPE has several elements, each
2495 element is set to 1. In particular, this is 1 + i for complex types. */
2498 build_each_one_cst (tree type
)
2500 if (TREE_CODE (type
) == COMPLEX_TYPE
)
2502 tree scalar
= build_one_cst (TREE_TYPE (type
));
2503 return build_complex (type
, scalar
, scalar
);
2506 return build_one_cst (type
);
2509 /* Return a constant of arithmetic type TYPE which is the
2510 multiplicative identity of the set TYPE. */
2513 build_one_cst (tree type
)
2515 switch (TREE_CODE (type
))
2517 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
2518 case POINTER_TYPE
: case REFERENCE_TYPE
:
2520 return build_int_cst (type
, 1);
2523 return build_real (type
, dconst1
);
2525 case FIXED_POINT_TYPE
:
2526 /* We can only generate 1 for accum types. */
2527 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type
)));
2528 return build_fixed (type
, FCONST1 (TYPE_MODE (type
)));
2532 tree scalar
= build_one_cst (TREE_TYPE (type
));
2534 return build_vector_from_val (type
, scalar
);
2538 return build_complex (type
,
2539 build_one_cst (TREE_TYPE (type
)),
2540 build_zero_cst (TREE_TYPE (type
)));
2547 /* Return an integer of type TYPE containing all 1's in as much precision as
2548 it contains, or a complex or vector whose subparts are such integers. */
2551 build_all_ones_cst (tree type
)
2553 if (TREE_CODE (type
) == COMPLEX_TYPE
)
2555 tree scalar
= build_all_ones_cst (TREE_TYPE (type
));
2556 return build_complex (type
, scalar
, scalar
);
2559 return build_minus_one_cst (type
);
2562 /* Return a constant of arithmetic type TYPE which is the
2563 opposite of the multiplicative identity of the set TYPE. */
2566 build_minus_one_cst (tree type
)
2568 switch (TREE_CODE (type
))
2570 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
2571 case POINTER_TYPE
: case REFERENCE_TYPE
:
2573 return build_int_cst (type
, -1);
2576 return build_real (type
, dconstm1
);
2578 case FIXED_POINT_TYPE
:
2579 /* We can only generate 1 for accum types. */
2580 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type
)));
2581 return build_fixed (type
,
2582 fixed_from_double_int (double_int_minus_one
,
2583 SCALAR_TYPE_MODE (type
)));
2587 tree scalar
= build_minus_one_cst (TREE_TYPE (type
));
2589 return build_vector_from_val (type
, scalar
);
2593 return build_complex (type
,
2594 build_minus_one_cst (TREE_TYPE (type
)),
2595 build_zero_cst (TREE_TYPE (type
)));
2602 /* Build 0 constant of type TYPE. This is used by constructor folding
2603 and thus the constant should be represented in memory by
2607 build_zero_cst (tree type
)
2609 switch (TREE_CODE (type
))
2611 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
2612 case POINTER_TYPE
: case REFERENCE_TYPE
:
2613 case OFFSET_TYPE
: case NULLPTR_TYPE
:
2614 return build_int_cst (type
, 0);
2617 return build_real (type
, dconst0
);
2619 case FIXED_POINT_TYPE
:
2620 return build_fixed (type
, FCONST0 (TYPE_MODE (type
)));
2624 tree scalar
= build_zero_cst (TREE_TYPE (type
));
2626 return build_vector_from_val (type
, scalar
);
2631 tree zero
= build_zero_cst (TREE_TYPE (type
));
2633 return build_complex (type
, zero
, zero
);
2637 if (!AGGREGATE_TYPE_P (type
))
2638 return fold_convert (type
, integer_zero_node
);
2639 return build_constructor (type
, NULL
);
2644 /* Build a BINFO with LEN language slots. */
2647 make_tree_binfo (unsigned base_binfos MEM_STAT_DECL
)
2650 size_t length
= (offsetof (struct tree_binfo
, base_binfos
)
2651 + vec
<tree
, va_gc
>::embedded_size (base_binfos
));
2653 record_node_allocation_statistics (TREE_BINFO
, length
);
2655 t
= ggc_alloc_tree_node_stat (length PASS_MEM_STAT
);
2657 memset (t
, 0, offsetof (struct tree_binfo
, base_binfos
));
2659 TREE_SET_CODE (t
, TREE_BINFO
);
2661 BINFO_BASE_BINFOS (t
)->embedded_init (base_binfos
);
2666 /* Create a CASE_LABEL_EXPR tree node and return it. */
2669 build_case_label (tree low_value
, tree high_value
, tree label_decl
)
2671 tree t
= make_node (CASE_LABEL_EXPR
);
2673 TREE_TYPE (t
) = void_type_node
;
2674 SET_EXPR_LOCATION (t
, DECL_SOURCE_LOCATION (label_decl
));
2676 CASE_LOW (t
) = low_value
;
2677 CASE_HIGH (t
) = high_value
;
2678 CASE_LABEL (t
) = label_decl
;
2679 CASE_CHAIN (t
) = NULL_TREE
;
2684 /* Build a newly constructed INTEGER_CST node. LEN and EXT_LEN are the
2685 values of TREE_INT_CST_NUNITS and TREE_INT_CST_EXT_NUNITS respectively.
2686 The latter determines the length of the HOST_WIDE_INT vector. */
2689 make_int_cst (int len
, int ext_len MEM_STAT_DECL
)
2692 int length
= ((ext_len
- 1) * sizeof (HOST_WIDE_INT
)
2693 + sizeof (struct tree_int_cst
));
2696 record_node_allocation_statistics (INTEGER_CST
, length
);
2698 t
= ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT
);
2700 TREE_SET_CODE (t
, INTEGER_CST
);
2701 TREE_INT_CST_NUNITS (t
) = len
;
2702 TREE_INT_CST_EXT_NUNITS (t
) = ext_len
;
2703 /* to_offset can only be applied to trees that are offset_int-sized
2704 or smaller. EXT_LEN is correct if it fits, otherwise the constant
2705 must be exactly the precision of offset_int and so LEN is correct. */
2706 if (ext_len
<= OFFSET_INT_ELTS
)
2707 TREE_INT_CST_OFFSET_NUNITS (t
) = ext_len
;
2709 TREE_INT_CST_OFFSET_NUNITS (t
) = len
;
2711 TREE_CONSTANT (t
) = 1;
2716 /* Build a newly constructed TREE_VEC node of length LEN. */
2719 make_tree_vec (int len MEM_STAT_DECL
)
2722 size_t length
= (len
- 1) * sizeof (tree
) + sizeof (struct tree_vec
);
2724 record_node_allocation_statistics (TREE_VEC
, length
);
2726 t
= ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT
);
2728 TREE_SET_CODE (t
, TREE_VEC
);
2729 TREE_VEC_LENGTH (t
) = len
;
2734 /* Grow a TREE_VEC node to new length LEN. */
2737 grow_tree_vec (tree v
, int len MEM_STAT_DECL
)
2739 gcc_assert (TREE_CODE (v
) == TREE_VEC
);
2741 int oldlen
= TREE_VEC_LENGTH (v
);
2742 gcc_assert (len
> oldlen
);
2744 size_t oldlength
= (oldlen
- 1) * sizeof (tree
) + sizeof (struct tree_vec
);
2745 size_t length
= (len
- 1) * sizeof (tree
) + sizeof (struct tree_vec
);
2747 record_node_allocation_statistics (TREE_VEC
, length
- oldlength
);
2749 v
= (tree
) ggc_realloc (v
, length PASS_MEM_STAT
);
2751 TREE_VEC_LENGTH (v
) = len
;
2756 /* Return 1 if EXPR is the constant zero, whether it is integral, float or
2757 fixed, and scalar, complex or vector. */
2760 zerop (const_tree expr
)
2762 return (integer_zerop (expr
)
2763 || real_zerop (expr
)
2764 || fixed_zerop (expr
));
2767 /* Return 1 if EXPR is the integer constant zero or a complex constant
2768 of zero, or a location wrapper for such a constant. */
2771 integer_zerop (const_tree expr
)
2773 STRIP_ANY_LOCATION_WRAPPER (expr
);
2775 switch (TREE_CODE (expr
))
2778 return wi::to_wide (expr
) == 0;
2780 return (integer_zerop (TREE_REALPART (expr
))
2781 && integer_zerop (TREE_IMAGPART (expr
)));
2783 return (VECTOR_CST_NPATTERNS (expr
) == 1
2784 && VECTOR_CST_DUPLICATE_P (expr
)
2785 && integer_zerop (VECTOR_CST_ENCODED_ELT (expr
, 0)));
2791 /* Return 1 if EXPR is the integer constant one or the corresponding
2792 complex constant, or a location wrapper for such a constant. */
2795 integer_onep (const_tree expr
)
2797 STRIP_ANY_LOCATION_WRAPPER (expr
);
2799 switch (TREE_CODE (expr
))
2802 return wi::eq_p (wi::to_widest (expr
), 1);
2804 return (integer_onep (TREE_REALPART (expr
))
2805 && integer_zerop (TREE_IMAGPART (expr
)));
2807 return (VECTOR_CST_NPATTERNS (expr
) == 1
2808 && VECTOR_CST_DUPLICATE_P (expr
)
2809 && integer_onep (VECTOR_CST_ENCODED_ELT (expr
, 0)));
2815 /* Return 1 if EXPR is the integer constant one. For complex and vector,
2816 return 1 if every piece is the integer constant one.
2817 Also return 1 for location wrappers for such a constant. */
2820 integer_each_onep (const_tree expr
)
2822 STRIP_ANY_LOCATION_WRAPPER (expr
);
2824 if (TREE_CODE (expr
) == COMPLEX_CST
)
2825 return (integer_onep (TREE_REALPART (expr
))
2826 && integer_onep (TREE_IMAGPART (expr
)));
2828 return integer_onep (expr
);
2831 /* Return 1 if EXPR is an integer containing all 1's in as much precision as
2832 it contains, or a complex or vector whose subparts are such integers,
2833 or a location wrapper for such a constant. */
2836 integer_all_onesp (const_tree expr
)
2838 STRIP_ANY_LOCATION_WRAPPER (expr
);
2840 if (TREE_CODE (expr
) == COMPLEX_CST
2841 && integer_all_onesp (TREE_REALPART (expr
))
2842 && integer_all_onesp (TREE_IMAGPART (expr
)))
2845 else if (TREE_CODE (expr
) == VECTOR_CST
)
2846 return (VECTOR_CST_NPATTERNS (expr
) == 1
2847 && VECTOR_CST_DUPLICATE_P (expr
)
2848 && integer_all_onesp (VECTOR_CST_ENCODED_ELT (expr
, 0)));
2850 else if (TREE_CODE (expr
) != INTEGER_CST
)
2853 return (wi::max_value (TYPE_PRECISION (TREE_TYPE (expr
)), UNSIGNED
)
2854 == wi::to_wide (expr
));
2857 /* Return 1 if EXPR is the integer constant minus one, or a location wrapper
2858 for such a constant. */
2861 integer_minus_onep (const_tree expr
)
2863 STRIP_ANY_LOCATION_WRAPPER (expr
);
2865 if (TREE_CODE (expr
) == COMPLEX_CST
)
2866 return (integer_all_onesp (TREE_REALPART (expr
))
2867 && integer_zerop (TREE_IMAGPART (expr
)));
2869 return integer_all_onesp (expr
);
2872 /* Return 1 if EXPR is an integer constant that is a power of 2 (i.e., has only
2873 one bit on), or a location wrapper for such a constant. */
2876 integer_pow2p (const_tree expr
)
2878 STRIP_ANY_LOCATION_WRAPPER (expr
);
2880 if (TREE_CODE (expr
) == COMPLEX_CST
2881 && integer_pow2p (TREE_REALPART (expr
))
2882 && integer_zerop (TREE_IMAGPART (expr
)))
2885 if (TREE_CODE (expr
) != INTEGER_CST
)
2888 return wi::popcount (wi::to_wide (expr
)) == 1;
2891 /* Return 1 if EXPR is an integer constant other than zero or a
2892 complex constant other than zero, or a location wrapper for such a
2896 integer_nonzerop (const_tree expr
)
2898 STRIP_ANY_LOCATION_WRAPPER (expr
);
2900 return ((TREE_CODE (expr
) == INTEGER_CST
2901 && wi::to_wide (expr
) != 0)
2902 || (TREE_CODE (expr
) == COMPLEX_CST
2903 && (integer_nonzerop (TREE_REALPART (expr
))
2904 || integer_nonzerop (TREE_IMAGPART (expr
)))));
2907 /* Return 1 if EXPR is the integer constant one. For vector,
2908 return 1 if every piece is the integer constant minus one
2909 (representing the value TRUE).
2910 Also return 1 for location wrappers for such a constant. */
2913 integer_truep (const_tree expr
)
2915 STRIP_ANY_LOCATION_WRAPPER (expr
);
2917 if (TREE_CODE (expr
) == VECTOR_CST
)
2918 return integer_all_onesp (expr
);
2919 return integer_onep (expr
);
2922 /* Return 1 if EXPR is the fixed-point constant zero, or a location wrapper
2923 for such a constant. */
2926 fixed_zerop (const_tree expr
)
2928 STRIP_ANY_LOCATION_WRAPPER (expr
);
2930 return (TREE_CODE (expr
) == FIXED_CST
2931 && TREE_FIXED_CST (expr
).data
.is_zero ());
2934 /* Return the power of two represented by a tree node known to be a
2938 tree_log2 (const_tree expr
)
2940 if (TREE_CODE (expr
) == COMPLEX_CST
)
2941 return tree_log2 (TREE_REALPART (expr
));
2943 return wi::exact_log2 (wi::to_wide (expr
));
2946 /* Similar, but return the largest integer Y such that 2 ** Y is less
2947 than or equal to EXPR. */
2950 tree_floor_log2 (const_tree expr
)
2952 if (TREE_CODE (expr
) == COMPLEX_CST
)
2953 return tree_log2 (TREE_REALPART (expr
));
2955 return wi::floor_log2 (wi::to_wide (expr
));
2958 /* Return number of known trailing zero bits in EXPR, or, if the value of
2959 EXPR is known to be zero, the precision of it's type. */
2962 tree_ctz (const_tree expr
)
2964 if (!INTEGRAL_TYPE_P (TREE_TYPE (expr
))
2965 && !POINTER_TYPE_P (TREE_TYPE (expr
)))
2968 unsigned int ret1
, ret2
, prec
= TYPE_PRECISION (TREE_TYPE (expr
));
2969 switch (TREE_CODE (expr
))
2972 ret1
= wi::ctz (wi::to_wide (expr
));
2973 return MIN (ret1
, prec
);
2975 ret1
= wi::ctz (get_nonzero_bits (expr
));
2976 return MIN (ret1
, prec
);
2983 ret1
= tree_ctz (TREE_OPERAND (expr
, 0));
2986 ret2
= tree_ctz (TREE_OPERAND (expr
, 1));
2987 return MIN (ret1
, ret2
);
2988 case POINTER_PLUS_EXPR
:
2989 ret1
= tree_ctz (TREE_OPERAND (expr
, 0));
2990 ret2
= tree_ctz (TREE_OPERAND (expr
, 1));
2991 /* Second operand is sizetype, which could be in theory
2992 wider than pointer's precision. Make sure we never
2993 return more than prec. */
2994 ret2
= MIN (ret2
, prec
);
2995 return MIN (ret1
, ret2
);
2997 ret1
= tree_ctz (TREE_OPERAND (expr
, 0));
2998 ret2
= tree_ctz (TREE_OPERAND (expr
, 1));
2999 return MAX (ret1
, ret2
);
3001 ret1
= tree_ctz (TREE_OPERAND (expr
, 0));
3002 ret2
= tree_ctz (TREE_OPERAND (expr
, 1));
3003 return MIN (ret1
+ ret2
, prec
);
3005 ret1
= tree_ctz (TREE_OPERAND (expr
, 0));
3006 if (tree_fits_uhwi_p (TREE_OPERAND (expr
, 1))
3007 && (tree_to_uhwi (TREE_OPERAND (expr
, 1)) < prec
))
3009 ret2
= tree_to_uhwi (TREE_OPERAND (expr
, 1));
3010 return MIN (ret1
+ ret2
, prec
);
3014 if (tree_fits_uhwi_p (TREE_OPERAND (expr
, 1))
3015 && (tree_to_uhwi (TREE_OPERAND (expr
, 1)) < prec
))
3017 ret1
= tree_ctz (TREE_OPERAND (expr
, 0));
3018 ret2
= tree_to_uhwi (TREE_OPERAND (expr
, 1));
3023 case TRUNC_DIV_EXPR
:
3025 case FLOOR_DIV_EXPR
:
3026 case ROUND_DIV_EXPR
:
3027 case EXACT_DIV_EXPR
:
3028 if (TREE_CODE (TREE_OPERAND (expr
, 1)) == INTEGER_CST
3029 && tree_int_cst_sgn (TREE_OPERAND (expr
, 1)) == 1)
3031 int l
= tree_log2 (TREE_OPERAND (expr
, 1));
3034 ret1
= tree_ctz (TREE_OPERAND (expr
, 0));
3042 ret1
= tree_ctz (TREE_OPERAND (expr
, 0));
3043 if (ret1
&& ret1
== TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr
, 0))))
3045 return MIN (ret1
, prec
);
3047 return tree_ctz (TREE_OPERAND (expr
, 0));
3049 ret1
= tree_ctz (TREE_OPERAND (expr
, 1));
3052 ret2
= tree_ctz (TREE_OPERAND (expr
, 2));
3053 return MIN (ret1
, ret2
);
3055 return tree_ctz (TREE_OPERAND (expr
, 1));
3057 ret1
= get_pointer_alignment (CONST_CAST_TREE (expr
));
3058 if (ret1
> BITS_PER_UNIT
)
3060 ret1
= ctz_hwi (ret1
/ BITS_PER_UNIT
);
3061 return MIN (ret1
, prec
);
3069 /* Return 1 if EXPR is the real constant zero. Trailing zeroes matter for
3070 decimal float constants, so don't return 1 for them.
3071 Also return 1 for location wrappers around such a constant. */
3074 real_zerop (const_tree expr
)
3076 STRIP_ANY_LOCATION_WRAPPER (expr
);
3078 switch (TREE_CODE (expr
))
3081 return real_equal (&TREE_REAL_CST (expr
), &dconst0
)
3082 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr
))));
3084 return real_zerop (TREE_REALPART (expr
))
3085 && real_zerop (TREE_IMAGPART (expr
));
3088 /* Don't simply check for a duplicate because the predicate
3089 accepts both +0.0 and -0.0. */
3090 unsigned count
= vector_cst_encoded_nelts (expr
);
3091 for (unsigned int i
= 0; i
< count
; ++i
)
3092 if (!real_zerop (VECTOR_CST_ENCODED_ELT (expr
, i
)))
3101 /* Return 1 if EXPR is the real constant one in real or complex form.
3102 Trailing zeroes matter for decimal float constants, so don't return
3104 Also return 1 for location wrappers around such a constant. */
3107 real_onep (const_tree expr
)
3109 STRIP_ANY_LOCATION_WRAPPER (expr
);
3111 switch (TREE_CODE (expr
))
3114 return real_equal (&TREE_REAL_CST (expr
), &dconst1
)
3115 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr
))));
3117 return real_onep (TREE_REALPART (expr
))
3118 && real_zerop (TREE_IMAGPART (expr
));
3120 return (VECTOR_CST_NPATTERNS (expr
) == 1
3121 && VECTOR_CST_DUPLICATE_P (expr
)
3122 && real_onep (VECTOR_CST_ENCODED_ELT (expr
, 0)));
3128 /* Return 1 if EXPR is the real constant minus one. Trailing zeroes
3129 matter for decimal float constants, so don't return 1 for them.
3130 Also return 1 for location wrappers around such a constant. */
3133 real_minus_onep (const_tree expr
)
3135 STRIP_ANY_LOCATION_WRAPPER (expr
);
3137 switch (TREE_CODE (expr
))
3140 return real_equal (&TREE_REAL_CST (expr
), &dconstm1
)
3141 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr
))));
3143 return real_minus_onep (TREE_REALPART (expr
))
3144 && real_zerop (TREE_IMAGPART (expr
));
3146 return (VECTOR_CST_NPATTERNS (expr
) == 1
3147 && VECTOR_CST_DUPLICATE_P (expr
)
3148 && real_minus_onep (VECTOR_CST_ENCODED_ELT (expr
, 0)));
3154 /* Nonzero if EXP is a constant or a cast of a constant. */
3157 really_constant_p (const_tree exp
)
3159 /* This is not quite the same as STRIP_NOPS. It does more. */
3160 while (CONVERT_EXPR_P (exp
)
3161 || TREE_CODE (exp
) == NON_LVALUE_EXPR
)
3162 exp
= TREE_OPERAND (exp
, 0);
3163 return TREE_CONSTANT (exp
);
3166 /* Return true if T holds a polynomial pointer difference, storing it in
3167 *VALUE if so. A true return means that T's precision is no greater
3168 than 64 bits, which is the largest address space we support, so *VALUE
3169 never loses precision. However, the signedness of the result does
3170 not necessarily match the signedness of T: sometimes an unsigned type
3171 like sizetype is used to encode a value that is actually negative. */
3174 ptrdiff_tree_p (const_tree t
, poly_int64_pod
*value
)
3178 if (TREE_CODE (t
) == INTEGER_CST
)
3180 if (!cst_and_fits_in_hwi (t
))
3182 *value
= int_cst_value (t
);
3185 if (POLY_INT_CST_P (t
))
3187 for (unsigned int i
= 0; i
< NUM_POLY_INT_COEFFS
; ++i
)
3188 if (!cst_and_fits_in_hwi (POLY_INT_CST_COEFF (t
, i
)))
3190 for (unsigned int i
= 0; i
< NUM_POLY_INT_COEFFS
; ++i
)
3191 value
->coeffs
[i
] = int_cst_value (POLY_INT_CST_COEFF (t
, i
));
3198 tree_to_poly_int64 (const_tree t
)
3200 gcc_assert (tree_fits_poly_int64_p (t
));
3201 if (POLY_INT_CST_P (t
))
3202 return poly_int_cst_value (t
).force_shwi ();
3203 return TREE_INT_CST_LOW (t
);
3207 tree_to_poly_uint64 (const_tree t
)
3209 gcc_assert (tree_fits_poly_uint64_p (t
));
3210 if (POLY_INT_CST_P (t
))
3211 return poly_int_cst_value (t
).force_uhwi ();
3212 return TREE_INT_CST_LOW (t
);
3215 /* Return first list element whose TREE_VALUE is ELEM.
3216 Return 0 if ELEM is not in LIST. */
3219 value_member (tree elem
, tree list
)
3223 if (elem
== TREE_VALUE (list
))
3225 list
= TREE_CHAIN (list
);
3230 /* Return first list element whose TREE_PURPOSE is ELEM.
3231 Return 0 if ELEM is not in LIST. */
3234 purpose_member (const_tree elem
, tree list
)
3238 if (elem
== TREE_PURPOSE (list
))
3240 list
= TREE_CHAIN (list
);
3245 /* Return true if ELEM is in V. */
3248 vec_member (const_tree elem
, vec
<tree
, va_gc
> *v
)
3252 FOR_EACH_VEC_SAFE_ELT (v
, ix
, t
)
3258 /* Returns element number IDX (zero-origin) of chain CHAIN, or
3262 chain_index (int idx
, tree chain
)
3264 for (; chain
&& idx
> 0; --idx
)
3265 chain
= TREE_CHAIN (chain
);
3269 /* Return nonzero if ELEM is part of the chain CHAIN. */
3272 chain_member (const_tree elem
, const_tree chain
)
3278 chain
= DECL_CHAIN (chain
);
3284 /* Return the length of a chain of nodes chained through TREE_CHAIN.
3285 We expect a null pointer to mark the end of the chain.
3286 This is the Lisp primitive `length'. */
3289 list_length (const_tree t
)
3292 #ifdef ENABLE_TREE_CHECKING
3300 #ifdef ENABLE_TREE_CHECKING
3303 gcc_assert (p
!= q
);
3311 /* Returns the first FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
3312 UNION_TYPE TYPE, or NULL_TREE if none. */
3315 first_field (const_tree type
)
3317 tree t
= TYPE_FIELDS (type
);
3318 while (t
&& TREE_CODE (t
) != FIELD_DECL
)
3323 /* Returns the last FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
3324 UNION_TYPE TYPE, or NULL_TREE if none. */
3327 last_field (const_tree type
)
3329 tree last
= NULL_TREE
;
3331 for (tree fld
= TYPE_FIELDS (type
); fld
; fld
= TREE_CHAIN (fld
))
3333 if (TREE_CODE (fld
) != FIELD_DECL
)
3342 /* Concatenate two chains of nodes (chained through TREE_CHAIN)
3343 by modifying the last node in chain 1 to point to chain 2.
3344 This is the Lisp primitive `nconc'. */
3347 chainon (tree op1
, tree op2
)
3356 for (t1
= op1
; TREE_CHAIN (t1
); t1
= TREE_CHAIN (t1
))
3358 TREE_CHAIN (t1
) = op2
;
3360 #ifdef ENABLE_TREE_CHECKING
3363 for (t2
= op2
; t2
; t2
= TREE_CHAIN (t2
))
3364 gcc_assert (t2
!= t1
);
3371 /* Return the last node in a chain of nodes (chained through TREE_CHAIN). */
3374 tree_last (tree chain
)
3378 while ((next
= TREE_CHAIN (chain
)))
3383 /* Reverse the order of elements in the chain T,
3384 and return the new head of the chain (old last element). */
3389 tree prev
= 0, decl
, next
;
3390 for (decl
= t
; decl
; decl
= next
)
3392 /* We shouldn't be using this function to reverse BLOCK chains; we
3393 have blocks_nreverse for that. */
3394 gcc_checking_assert (TREE_CODE (decl
) != BLOCK
);
3395 next
= TREE_CHAIN (decl
);
3396 TREE_CHAIN (decl
) = prev
;
3402 /* Return a newly created TREE_LIST node whose
3403 purpose and value fields are PARM and VALUE. */
3406 build_tree_list (tree parm
, tree value MEM_STAT_DECL
)
3408 tree t
= make_node (TREE_LIST PASS_MEM_STAT
);
3409 TREE_PURPOSE (t
) = parm
;
3410 TREE_VALUE (t
) = value
;
3414 /* Build a chain of TREE_LIST nodes from a vector. */
3417 build_tree_list_vec (const vec
<tree
, va_gc
> *vec MEM_STAT_DECL
)
3419 tree ret
= NULL_TREE
;
3423 FOR_EACH_VEC_SAFE_ELT (vec
, i
, t
)
3425 *pp
= build_tree_list (NULL
, t PASS_MEM_STAT
);
3426 pp
= &TREE_CHAIN (*pp
);
3431 /* Return a newly created TREE_LIST node whose
3432 purpose and value fields are PURPOSE and VALUE
3433 and whose TREE_CHAIN is CHAIN. */
3436 tree_cons (tree purpose
, tree value
, tree chain MEM_STAT_DECL
)
3440 node
= ggc_alloc_tree_node_stat (sizeof (struct tree_list
) PASS_MEM_STAT
);
3441 memset (node
, 0, sizeof (struct tree_common
));
3443 record_node_allocation_statistics (TREE_LIST
, sizeof (struct tree_list
));
3445 TREE_SET_CODE (node
, TREE_LIST
);
3446 TREE_CHAIN (node
) = chain
;
3447 TREE_PURPOSE (node
) = purpose
;
3448 TREE_VALUE (node
) = value
;
3452 /* Return the values of the elements of a CONSTRUCTOR as a vector of
3456 ctor_to_vec (tree ctor
)
3458 vec
<tree
, va_gc
> *vec
;
3459 vec_alloc (vec
, CONSTRUCTOR_NELTS (ctor
));
3463 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor
), ix
, val
)
3464 vec
->quick_push (val
);
3469 /* Return the size nominally occupied by an object of type TYPE
3470 when it resides in memory. The value is measured in units of bytes,
3471 and its data type is that normally used for type sizes
3472 (which is the first type created by make_signed_type or
3473 make_unsigned_type). */
3476 size_in_bytes_loc (location_t loc
, const_tree type
)
3480 if (type
== error_mark_node
)
3481 return integer_zero_node
;
3483 type
= TYPE_MAIN_VARIANT (type
);
3484 t
= TYPE_SIZE_UNIT (type
);
3488 lang_hooks
.types
.incomplete_type_error (loc
, NULL_TREE
, type
);
3489 return size_zero_node
;
3495 /* Return the size of TYPE (in bytes) as a wide integer
3496 or return -1 if the size can vary or is larger than an integer. */
3499 int_size_in_bytes (const_tree type
)
3503 if (type
== error_mark_node
)
3506 type
= TYPE_MAIN_VARIANT (type
);
3507 t
= TYPE_SIZE_UNIT (type
);
3509 if (t
&& tree_fits_uhwi_p (t
))
3510 return TREE_INT_CST_LOW (t
);
3515 /* Return the maximum size of TYPE (in bytes) as a wide integer
3516 or return -1 if the size can vary or is larger than an integer. */
3519 max_int_size_in_bytes (const_tree type
)
3521 HOST_WIDE_INT size
= -1;
3524 /* If this is an array type, check for a possible MAX_SIZE attached. */
3526 if (TREE_CODE (type
) == ARRAY_TYPE
)
3528 size_tree
= TYPE_ARRAY_MAX_SIZE (type
);
3530 if (size_tree
&& tree_fits_uhwi_p (size_tree
))
3531 size
= tree_to_uhwi (size_tree
);
3534 /* If we still haven't been able to get a size, see if the language
3535 can compute a maximum size. */
3539 size_tree
= lang_hooks
.types
.max_size (type
);
3541 if (size_tree
&& tree_fits_uhwi_p (size_tree
))
3542 size
= tree_to_uhwi (size_tree
);
3548 /* Return the bit position of FIELD, in bits from the start of the record.
3549 This is a tree of type bitsizetype. */
3552 bit_position (const_tree field
)
3554 return bit_from_pos (DECL_FIELD_OFFSET (field
),
3555 DECL_FIELD_BIT_OFFSET (field
));
3558 /* Return the byte position of FIELD, in bytes from the start of the record.
3559 This is a tree of type sizetype. */
3562 byte_position (const_tree field
)
3564 return byte_from_pos (DECL_FIELD_OFFSET (field
),
3565 DECL_FIELD_BIT_OFFSET (field
));
3568 /* Likewise, but return as an integer. It must be representable in
3569 that way (since it could be a signed value, we don't have the
3570 option of returning -1 like int_size_in_byte can. */
3573 int_byte_position (const_tree field
)
3575 return tree_to_shwi (byte_position (field
));
3578 /* Return, as a tree node, the number of elements for TYPE (which is an
3579 ARRAY_TYPE) minus one. This counts only elements of the top array. */
3582 array_type_nelts (const_tree type
)
3584 tree index_type
, min
, max
;
3586 /* If they did it with unspecified bounds, then we should have already
3587 given an error about it before we got here. */
3588 if (! TYPE_DOMAIN (type
))
3589 return error_mark_node
;
3591 index_type
= TYPE_DOMAIN (type
);
3592 min
= TYPE_MIN_VALUE (index_type
);
3593 max
= TYPE_MAX_VALUE (index_type
);
3595 /* TYPE_MAX_VALUE may not be set if the array has unknown length. */
3598 /* zero sized arrays are represented from C FE as complete types with
3599 NULL TYPE_MAX_VALUE and zero TYPE_SIZE, while C++ FE represents
3600 them as min 0, max -1. */
3601 if (COMPLETE_TYPE_P (type
)
3602 && integer_zerop (TYPE_SIZE (type
))
3603 && integer_zerop (min
))
3604 return build_int_cst (TREE_TYPE (min
), -1);
3606 return error_mark_node
;
3609 return (integer_zerop (min
)
3611 : fold_build2 (MINUS_EXPR
, TREE_TYPE (max
), max
, min
));
3614 /* If arg is static -- a reference to an object in static storage -- then
3615 return the object. This is not the same as the C meaning of `static'.
3616 If arg isn't static, return NULL. */
3621 switch (TREE_CODE (arg
))
3624 /* Nested functions are static, even though taking their address will
3625 involve a trampoline as we unnest the nested function and create
3626 the trampoline on the tree level. */
3630 return ((TREE_STATIC (arg
) || DECL_EXTERNAL (arg
))
3631 && ! DECL_THREAD_LOCAL_P (arg
)
3632 && ! DECL_DLLIMPORT_P (arg
)
3636 return ((TREE_STATIC (arg
) || DECL_EXTERNAL (arg
))
3640 return TREE_STATIC (arg
) ? arg
: NULL
;
3647 /* If the thing being referenced is not a field, then it is
3648 something language specific. */
3649 gcc_assert (TREE_CODE (TREE_OPERAND (arg
, 1)) == FIELD_DECL
);
3651 /* If we are referencing a bitfield, we can't evaluate an
3652 ADDR_EXPR at compile time and so it isn't a constant. */
3653 if (DECL_BIT_FIELD (TREE_OPERAND (arg
, 1)))
3656 return staticp (TREE_OPERAND (arg
, 0));
3662 return TREE_CONSTANT (TREE_OPERAND (arg
, 0)) ? arg
: NULL
;
3665 case ARRAY_RANGE_REF
:
3666 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (arg
))) == INTEGER_CST
3667 && TREE_CODE (TREE_OPERAND (arg
, 1)) == INTEGER_CST
)
3668 return staticp (TREE_OPERAND (arg
, 0));
3672 case COMPOUND_LITERAL_EXPR
:
3673 return TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (arg
)) ? arg
: NULL
;
3683 /* Return whether OP is a DECL whose address is function-invariant. */
3686 decl_address_invariant_p (const_tree op
)
3688 /* The conditions below are slightly less strict than the one in
3691 switch (TREE_CODE (op
))
3700 if ((TREE_STATIC (op
) || DECL_EXTERNAL (op
))
3701 || DECL_THREAD_LOCAL_P (op
)
3702 || DECL_CONTEXT (op
) == current_function_decl
3703 || decl_function_context (op
) == current_function_decl
)
3708 if ((TREE_STATIC (op
) || DECL_EXTERNAL (op
))
3709 || decl_function_context (op
) == current_function_decl
)
3720 /* Return whether OP is a DECL whose address is interprocedural-invariant. */
3723 decl_address_ip_invariant_p (const_tree op
)
3725 /* The conditions below are slightly less strict than the one in
3728 switch (TREE_CODE (op
))
3736 if (((TREE_STATIC (op
) || DECL_EXTERNAL (op
))
3737 && !DECL_DLLIMPORT_P (op
))
3738 || DECL_THREAD_LOCAL_P (op
))
3743 if ((TREE_STATIC (op
) || DECL_EXTERNAL (op
)))
3755 /* Return true if T is function-invariant (internal function, does
3756 not handle arithmetic; that's handled in skip_simple_arithmetic and
3757 tree_invariant_p). */
3760 tree_invariant_p_1 (tree t
)
3764 if (TREE_CONSTANT (t
)
3765 || (TREE_READONLY (t
) && !TREE_SIDE_EFFECTS (t
)))
3768 switch (TREE_CODE (t
))
3774 op
= TREE_OPERAND (t
, 0);
3775 while (handled_component_p (op
))
3777 switch (TREE_CODE (op
))
3780 case ARRAY_RANGE_REF
:
3781 if (!tree_invariant_p (TREE_OPERAND (op
, 1))
3782 || TREE_OPERAND (op
, 2) != NULL_TREE
3783 || TREE_OPERAND (op
, 3) != NULL_TREE
)
3788 if (TREE_OPERAND (op
, 2) != NULL_TREE
)
3794 op
= TREE_OPERAND (op
, 0);
3797 return CONSTANT_CLASS_P (op
) || decl_address_invariant_p (op
);
3806 /* Return true if T is function-invariant. */
3809 tree_invariant_p (tree t
)
3811 tree inner
= skip_simple_arithmetic (t
);
3812 return tree_invariant_p_1 (inner
);
3815 /* Wrap a SAVE_EXPR around EXPR, if appropriate.
3816 Do this to any expression which may be used in more than one place,
3817 but must be evaluated only once.
3819 Normally, expand_expr would reevaluate the expression each time.
3820 Calling save_expr produces something that is evaluated and recorded
3821 the first time expand_expr is called on it. Subsequent calls to
3822 expand_expr just reuse the recorded value.
3824 The call to expand_expr that generates code that actually computes
3825 the value is the first call *at compile time*. Subsequent calls
3826 *at compile time* generate code to use the saved value.
3827 This produces correct result provided that *at run time* control
3828 always flows through the insns made by the first expand_expr
3829 before reaching the other places where the save_expr was evaluated.
3830 You, the caller of save_expr, must make sure this is so.
3832 Constants, and certain read-only nodes, are returned with no
3833 SAVE_EXPR because that is safe. Expressions containing placeholders
3834 are not touched; see tree.def for an explanation of what these
3838 save_expr (tree expr
)
3842 /* If the tree evaluates to a constant, then we don't want to hide that
3843 fact (i.e. this allows further folding, and direct checks for constants).
3844 However, a read-only object that has side effects cannot be bypassed.
3845 Since it is no problem to reevaluate literals, we just return the
3847 inner
= skip_simple_arithmetic (expr
);
3848 if (TREE_CODE (inner
) == ERROR_MARK
)
3851 if (tree_invariant_p_1 (inner
))
3854 /* If INNER contains a PLACEHOLDER_EXPR, we must evaluate it each time, since
3855 it means that the size or offset of some field of an object depends on
3856 the value within another field.
3858 Note that it must not be the case that EXPR contains both a PLACEHOLDER_EXPR
3859 and some variable since it would then need to be both evaluated once and
3860 evaluated more than once. Front-ends must assure this case cannot
3861 happen by surrounding any such subexpressions in their own SAVE_EXPR
3862 and forcing evaluation at the proper time. */
3863 if (contains_placeholder_p (inner
))
3866 expr
= build1_loc (EXPR_LOCATION (expr
), SAVE_EXPR
, TREE_TYPE (expr
), expr
);
3868 /* This expression might be placed ahead of a jump to ensure that the
3869 value was computed on both sides of the jump. So make sure it isn't
3870 eliminated as dead. */
3871 TREE_SIDE_EFFECTS (expr
) = 1;
3875 /* Look inside EXPR into any simple arithmetic operations. Return the
3876 outermost non-arithmetic or non-invariant node. */
3879 skip_simple_arithmetic (tree expr
)
3881 /* We don't care about whether this can be used as an lvalue in this
3883 while (TREE_CODE (expr
) == NON_LVALUE_EXPR
)
3884 expr
= TREE_OPERAND (expr
, 0);
3886 /* If we have simple operations applied to a SAVE_EXPR or to a SAVE_EXPR and
3887 a constant, it will be more efficient to not make another SAVE_EXPR since
3888 it will allow better simplification and GCSE will be able to merge the
3889 computations if they actually occur. */
3892 if (UNARY_CLASS_P (expr
))
3893 expr
= TREE_OPERAND (expr
, 0);
3894 else if (BINARY_CLASS_P (expr
))
3896 if (tree_invariant_p (TREE_OPERAND (expr
, 1)))
3897 expr
= TREE_OPERAND (expr
, 0);
3898 else if (tree_invariant_p (TREE_OPERAND (expr
, 0)))
3899 expr
= TREE_OPERAND (expr
, 1);
3910 /* Look inside EXPR into simple arithmetic operations involving constants.
3911 Return the outermost non-arithmetic or non-constant node. */
3914 skip_simple_constant_arithmetic (tree expr
)
3916 while (TREE_CODE (expr
) == NON_LVALUE_EXPR
)
3917 expr
= TREE_OPERAND (expr
, 0);
3921 if (UNARY_CLASS_P (expr
))
3922 expr
= TREE_OPERAND (expr
, 0);
3923 else if (BINARY_CLASS_P (expr
))
3925 if (TREE_CONSTANT (TREE_OPERAND (expr
, 1)))
3926 expr
= TREE_OPERAND (expr
, 0);
3927 else if (TREE_CONSTANT (TREE_OPERAND (expr
, 0)))
3928 expr
= TREE_OPERAND (expr
, 1);
3939 /* Return which tree structure is used by T. */
3941 enum tree_node_structure_enum
3942 tree_node_structure (const_tree t
)
3944 const enum tree_code code
= TREE_CODE (t
);
3945 return tree_node_structure_for_code (code
);
3948 /* Set various status flags when building a CALL_EXPR object T. */
3951 process_call_operands (tree t
)
3953 bool side_effects
= TREE_SIDE_EFFECTS (t
);
3954 bool read_only
= false;
3955 int i
= call_expr_flags (t
);
3957 /* Calls have side-effects, except those to const or pure functions. */
3958 if ((i
& ECF_LOOPING_CONST_OR_PURE
) || !(i
& (ECF_CONST
| ECF_PURE
)))
3959 side_effects
= true;
3960 /* Propagate TREE_READONLY of arguments for const functions. */
3964 if (!side_effects
|| read_only
)
3965 for (i
= 1; i
< TREE_OPERAND_LENGTH (t
); i
++)
3967 tree op
= TREE_OPERAND (t
, i
);
3968 if (op
&& TREE_SIDE_EFFECTS (op
))
3969 side_effects
= true;
3970 if (op
&& !TREE_READONLY (op
) && !CONSTANT_CLASS_P (op
))
3974 TREE_SIDE_EFFECTS (t
) = side_effects
;
3975 TREE_READONLY (t
) = read_only
;
3978 /* Return true if EXP contains a PLACEHOLDER_EXPR, i.e. if it represents a
3979 size or offset that depends on a field within a record. */
3982 contains_placeholder_p (const_tree exp
)
3984 enum tree_code code
;
3989 code
= TREE_CODE (exp
);
3990 if (code
== PLACEHOLDER_EXPR
)
3993 switch (TREE_CODE_CLASS (code
))
3996 /* Don't look at any PLACEHOLDER_EXPRs that might be in index or bit
3997 position computations since they will be converted into a
3998 WITH_RECORD_EXPR involving the reference, which will assume
3999 here will be valid. */
4000 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp
, 0));
4002 case tcc_exceptional
:
4003 if (code
== TREE_LIST
)
4004 return (CONTAINS_PLACEHOLDER_P (TREE_VALUE (exp
))
4005 || CONTAINS_PLACEHOLDER_P (TREE_CHAIN (exp
)));
4010 case tcc_comparison
:
4011 case tcc_expression
:
4015 /* Ignoring the first operand isn't quite right, but works best. */
4016 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp
, 1));
4019 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp
, 0))
4020 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp
, 1))
4021 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp
, 2)));
4024 /* The save_expr function never wraps anything containing
4025 a PLACEHOLDER_EXPR. */
4032 switch (TREE_CODE_LENGTH (code
))
4035 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp
, 0));
4037 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp
, 0))
4038 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp
, 1)));
4049 const_call_expr_arg_iterator iter
;
4050 FOR_EACH_CONST_CALL_EXPR_ARG (arg
, iter
, exp
)
4051 if (CONTAINS_PLACEHOLDER_P (arg
))
4065 /* Return true if any part of the structure of TYPE involves a PLACEHOLDER_EXPR
4066 directly. This includes size, bounds, qualifiers (for QUAL_UNION_TYPE) and
4070 type_contains_placeholder_1 (const_tree type
)
4072 /* If the size contains a placeholder or the parent type (component type in
4073 the case of arrays) type involves a placeholder, this type does. */
4074 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type
))
4075 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (type
))
4076 || (!POINTER_TYPE_P (type
)
4078 && type_contains_placeholder_p (TREE_TYPE (type
))))
4081 /* Now do type-specific checks. Note that the last part of the check above
4082 greatly limits what we have to do below. */
4083 switch (TREE_CODE (type
))
4092 case REFERENCE_TYPE
:
4101 case FIXED_POINT_TYPE
:
4102 /* Here we just check the bounds. */
4103 return (CONTAINS_PLACEHOLDER_P (TYPE_MIN_VALUE (type
))
4104 || CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type
)));
4107 /* We have already checked the component type above, so just check
4108 the domain type. Flexible array members have a null domain. */
4109 return TYPE_DOMAIN (type
) ?
4110 type_contains_placeholder_p (TYPE_DOMAIN (type
)) : false;
4114 case QUAL_UNION_TYPE
:
4118 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
4119 if (TREE_CODE (field
) == FIELD_DECL
4120 && (CONTAINS_PLACEHOLDER_P (DECL_FIELD_OFFSET (field
))
4121 || (TREE_CODE (type
) == QUAL_UNION_TYPE
4122 && CONTAINS_PLACEHOLDER_P (DECL_QUALIFIER (field
)))
4123 || type_contains_placeholder_p (TREE_TYPE (field
))))
4134 /* Wrapper around above function used to cache its result. */
4137 type_contains_placeholder_p (tree type
)
4141 /* If the contains_placeholder_bits field has been initialized,
4142 then we know the answer. */
4143 if (TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type
) > 0)
4144 return TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type
) - 1;
4146 /* Indicate that we've seen this type node, and the answer is false.
4147 This is what we want to return if we run into recursion via fields. */
4148 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type
) = 1;
4150 /* Compute the real value. */
4151 result
= type_contains_placeholder_1 (type
);
4153 /* Store the real value. */
4154 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type
) = result
+ 1;
4159 /* Push tree EXP onto vector QUEUE if it is not already present. */
4162 push_without_duplicates (tree exp
, vec
<tree
> *queue
)
4167 FOR_EACH_VEC_ELT (*queue
, i
, iter
)
4168 if (simple_cst_equal (iter
, exp
) == 1)
4172 queue
->safe_push (exp
);
4175 /* Given a tree EXP, find all occurrences of references to fields
4176 in a PLACEHOLDER_EXPR and place them in vector REFS without
4177 duplicates. Also record VAR_DECLs and CONST_DECLs. Note that
4178 we assume here that EXP contains only arithmetic expressions
4179 or CALL_EXPRs with PLACEHOLDER_EXPRs occurring only in their
4183 find_placeholder_in_expr (tree exp
, vec
<tree
> *refs
)
4185 enum tree_code code
= TREE_CODE (exp
);
4189 /* We handle TREE_LIST and COMPONENT_REF separately. */
4190 if (code
== TREE_LIST
)
4192 FIND_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp
), refs
);
4193 FIND_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp
), refs
);
4195 else if (code
== COMPONENT_REF
)
4197 for (inner
= TREE_OPERAND (exp
, 0);
4198 REFERENCE_CLASS_P (inner
);
4199 inner
= TREE_OPERAND (inner
, 0))
4202 if (TREE_CODE (inner
) == PLACEHOLDER_EXPR
)
4203 push_without_duplicates (exp
, refs
);
4205 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 0), refs
);
4208 switch (TREE_CODE_CLASS (code
))
4213 case tcc_declaration
:
4214 /* Variables allocated to static storage can stay. */
4215 if (!TREE_STATIC (exp
))
4216 push_without_duplicates (exp
, refs
);
4219 case tcc_expression
:
4220 /* This is the pattern built in ada/make_aligning_type. */
4221 if (code
== ADDR_EXPR
4222 && TREE_CODE (TREE_OPERAND (exp
, 0)) == PLACEHOLDER_EXPR
)
4224 push_without_duplicates (exp
, refs
);
4230 case tcc_exceptional
:
4233 case tcc_comparison
:
4235 for (i
= 0; i
< TREE_CODE_LENGTH (code
); i
++)
4236 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, i
), refs
);
4240 for (i
= 1; i
< TREE_OPERAND_LENGTH (exp
); i
++)
4241 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, i
), refs
);
4249 /* Given a tree EXP, a FIELD_DECL F, and a replacement value R,
4250 return a tree with all occurrences of references to F in a
4251 PLACEHOLDER_EXPR replaced by R. Also handle VAR_DECLs and
4252 CONST_DECLs. Note that we assume here that EXP contains only
4253 arithmetic expressions or CALL_EXPRs with PLACEHOLDER_EXPRs
4254 occurring only in their argument list. */
4257 substitute_in_expr (tree exp
, tree f
, tree r
)
4259 enum tree_code code
= TREE_CODE (exp
);
4260 tree op0
, op1
, op2
, op3
;
4263 /* We handle TREE_LIST and COMPONENT_REF separately. */
4264 if (code
== TREE_LIST
)
4266 op0
= SUBSTITUTE_IN_EXPR (TREE_CHAIN (exp
), f
, r
);
4267 op1
= SUBSTITUTE_IN_EXPR (TREE_VALUE (exp
), f
, r
);
4268 if (op0
== TREE_CHAIN (exp
) && op1
== TREE_VALUE (exp
))
4271 return tree_cons (TREE_PURPOSE (exp
), op1
, op0
);
4273 else if (code
== COMPONENT_REF
)
4277 /* If this expression is getting a value from a PLACEHOLDER_EXPR
4278 and it is the right field, replace it with R. */
4279 for (inner
= TREE_OPERAND (exp
, 0);
4280 REFERENCE_CLASS_P (inner
);
4281 inner
= TREE_OPERAND (inner
, 0))
4285 op1
= TREE_OPERAND (exp
, 1);
4287 if (TREE_CODE (inner
) == PLACEHOLDER_EXPR
&& op1
== f
)
4290 /* If this expression hasn't been completed let, leave it alone. */
4291 if (TREE_CODE (inner
) == PLACEHOLDER_EXPR
&& !TREE_TYPE (inner
))
4294 op0
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 0), f
, r
);
4295 if (op0
== TREE_OPERAND (exp
, 0))
4299 = fold_build3 (COMPONENT_REF
, TREE_TYPE (exp
), op0
, op1
, NULL_TREE
);
4302 switch (TREE_CODE_CLASS (code
))
4307 case tcc_declaration
:
4313 case tcc_expression
:
4319 case tcc_exceptional
:
4322 case tcc_comparison
:
4324 switch (TREE_CODE_LENGTH (code
))
4330 op0
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 0), f
, r
);
4331 if (op0
== TREE_OPERAND (exp
, 0))
4334 new_tree
= fold_build1 (code
, TREE_TYPE (exp
), op0
);
4338 op0
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 0), f
, r
);
4339 op1
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 1), f
, r
);
4341 if (op0
== TREE_OPERAND (exp
, 0) && op1
== TREE_OPERAND (exp
, 1))
4344 new_tree
= fold_build2 (code
, TREE_TYPE (exp
), op0
, op1
);
4348 op0
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 0), f
, r
);
4349 op1
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 1), f
, r
);
4350 op2
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 2), f
, r
);
4352 if (op0
== TREE_OPERAND (exp
, 0) && op1
== TREE_OPERAND (exp
, 1)
4353 && op2
== TREE_OPERAND (exp
, 2))
4356 new_tree
= fold_build3 (code
, TREE_TYPE (exp
), op0
, op1
, op2
);
4360 op0
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 0), f
, r
);
4361 op1
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 1), f
, r
);
4362 op2
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 2), f
, r
);
4363 op3
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 3), f
, r
);
4365 if (op0
== TREE_OPERAND (exp
, 0) && op1
== TREE_OPERAND (exp
, 1)
4366 && op2
== TREE_OPERAND (exp
, 2)
4367 && op3
== TREE_OPERAND (exp
, 3))
4371 = fold (build4 (code
, TREE_TYPE (exp
), op0
, op1
, op2
, op3
));
4383 new_tree
= NULL_TREE
;
4385 /* If we are trying to replace F with a constant or with another
4386 instance of one of the arguments of the call, inline back
4387 functions which do nothing else than computing a value from
4388 the arguments they are passed. This makes it possible to
4389 fold partially or entirely the replacement expression. */
4390 if (code
== CALL_EXPR
)
4392 bool maybe_inline
= false;
4393 if (CONSTANT_CLASS_P (r
))
4394 maybe_inline
= true;
4396 for (i
= 3; i
< TREE_OPERAND_LENGTH (exp
); i
++)
4397 if (operand_equal_p (TREE_OPERAND (exp
, i
), r
, 0))
4399 maybe_inline
= true;
4404 tree t
= maybe_inline_call_in_expr (exp
);
4406 return SUBSTITUTE_IN_EXPR (t
, f
, r
);
4410 for (i
= 1; i
< TREE_OPERAND_LENGTH (exp
); i
++)
4412 tree op
= TREE_OPERAND (exp
, i
);
4413 tree new_op
= SUBSTITUTE_IN_EXPR (op
, f
, r
);
4417 new_tree
= copy_node (exp
);
4418 TREE_OPERAND (new_tree
, i
) = new_op
;
4424 new_tree
= fold (new_tree
);
4425 if (TREE_CODE (new_tree
) == CALL_EXPR
)
4426 process_call_operands (new_tree
);
4437 TREE_READONLY (new_tree
) |= TREE_READONLY (exp
);
4439 if (code
== INDIRECT_REF
|| code
== ARRAY_REF
|| code
== ARRAY_RANGE_REF
)
4440 TREE_THIS_NOTRAP (new_tree
) |= TREE_THIS_NOTRAP (exp
);
4445 /* Similar, but look for a PLACEHOLDER_EXPR in EXP and find a replacement
4446 for it within OBJ, a tree that is an object or a chain of references. */
4449 substitute_placeholder_in_expr (tree exp
, tree obj
)
4451 enum tree_code code
= TREE_CODE (exp
);
4452 tree op0
, op1
, op2
, op3
;
4455 /* If this is a PLACEHOLDER_EXPR, see if we find a corresponding type
4456 in the chain of OBJ. */
4457 if (code
== PLACEHOLDER_EXPR
)
4459 tree need_type
= TYPE_MAIN_VARIANT (TREE_TYPE (exp
));
4462 for (elt
= obj
; elt
!= 0;
4463 elt
= ((TREE_CODE (elt
) == COMPOUND_EXPR
4464 || TREE_CODE (elt
) == COND_EXPR
)
4465 ? TREE_OPERAND (elt
, 1)
4466 : (REFERENCE_CLASS_P (elt
)
4467 || UNARY_CLASS_P (elt
)
4468 || BINARY_CLASS_P (elt
)
4469 || VL_EXP_CLASS_P (elt
)
4470 || EXPRESSION_CLASS_P (elt
))
4471 ? TREE_OPERAND (elt
, 0) : 0))
4472 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt
)) == need_type
)
4475 for (elt
= obj
; elt
!= 0;
4476 elt
= ((TREE_CODE (elt
) == COMPOUND_EXPR
4477 || TREE_CODE (elt
) == COND_EXPR
)
4478 ? TREE_OPERAND (elt
, 1)
4479 : (REFERENCE_CLASS_P (elt
)
4480 || UNARY_CLASS_P (elt
)
4481 || BINARY_CLASS_P (elt
)
4482 || VL_EXP_CLASS_P (elt
)
4483 || EXPRESSION_CLASS_P (elt
))
4484 ? TREE_OPERAND (elt
, 0) : 0))
4485 if (POINTER_TYPE_P (TREE_TYPE (elt
))
4486 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt
)))
4488 return fold_build1 (INDIRECT_REF
, need_type
, elt
);
4490 /* If we didn't find it, return the original PLACEHOLDER_EXPR. If it
4491 survives until RTL generation, there will be an error. */
4495 /* TREE_LIST is special because we need to look at TREE_VALUE
4496 and TREE_CHAIN, not TREE_OPERANDS. */
4497 else if (code
== TREE_LIST
)
4499 op0
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp
), obj
);
4500 op1
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp
), obj
);
4501 if (op0
== TREE_CHAIN (exp
) && op1
== TREE_VALUE (exp
))
4504 return tree_cons (TREE_PURPOSE (exp
), op1
, op0
);
4507 switch (TREE_CODE_CLASS (code
))
4510 case tcc_declaration
:
4513 case tcc_exceptional
:
4516 case tcc_comparison
:
4517 case tcc_expression
:
4520 switch (TREE_CODE_LENGTH (code
))
4526 op0
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 0), obj
);
4527 if (op0
== TREE_OPERAND (exp
, 0))
4530 new_tree
= fold_build1 (code
, TREE_TYPE (exp
), op0
);
4534 op0
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 0), obj
);
4535 op1
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 1), obj
);
4537 if (op0
== TREE_OPERAND (exp
, 0) && op1
== TREE_OPERAND (exp
, 1))
4540 new_tree
= fold_build2 (code
, TREE_TYPE (exp
), op0
, op1
);
4544 op0
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 0), obj
);
4545 op1
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 1), obj
);
4546 op2
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 2), obj
);
4548 if (op0
== TREE_OPERAND (exp
, 0) && op1
== TREE_OPERAND (exp
, 1)
4549 && op2
== TREE_OPERAND (exp
, 2))
4552 new_tree
= fold_build3 (code
, TREE_TYPE (exp
), op0
, op1
, op2
);
4556 op0
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 0), obj
);
4557 op1
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 1), obj
);
4558 op2
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 2), obj
);
4559 op3
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 3), obj
);
4561 if (op0
== TREE_OPERAND (exp
, 0) && op1
== TREE_OPERAND (exp
, 1)
4562 && op2
== TREE_OPERAND (exp
, 2)
4563 && op3
== TREE_OPERAND (exp
, 3))
4567 = fold (build4 (code
, TREE_TYPE (exp
), op0
, op1
, op2
, op3
));
4579 new_tree
= NULL_TREE
;
4581 for (i
= 1; i
< TREE_OPERAND_LENGTH (exp
); i
++)
4583 tree op
= TREE_OPERAND (exp
, i
);
4584 tree new_op
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (op
, obj
);
4588 new_tree
= copy_node (exp
);
4589 TREE_OPERAND (new_tree
, i
) = new_op
;
4595 new_tree
= fold (new_tree
);
4596 if (TREE_CODE (new_tree
) == CALL_EXPR
)
4597 process_call_operands (new_tree
);
4608 TREE_READONLY (new_tree
) |= TREE_READONLY (exp
);
4610 if (code
== INDIRECT_REF
|| code
== ARRAY_REF
|| code
== ARRAY_RANGE_REF
)
4611 TREE_THIS_NOTRAP (new_tree
) |= TREE_THIS_NOTRAP (exp
);
4617 /* Subroutine of stabilize_reference; this is called for subtrees of
4618 references. Any expression with side-effects must be put in a SAVE_EXPR
4619 to ensure that it is only evaluated once.
4621 We don't put SAVE_EXPR nodes around everything, because assigning very
4622 simple expressions to temporaries causes us to miss good opportunities
4623 for optimizations. Among other things, the opportunity to fold in the
4624 addition of a constant into an addressing mode often gets lost, e.g.
4625 "y[i+1] += x;". In general, we take the approach that we should not make
4626 an assignment unless we are forced into it - i.e., that any non-side effect
4627 operator should be allowed, and that cse should take care of coalescing
4628 multiple utterances of the same expression should that prove fruitful. */
4631 stabilize_reference_1 (tree e
)
4634 enum tree_code code
= TREE_CODE (e
);
4636 /* We cannot ignore const expressions because it might be a reference
4637 to a const array but whose index contains side-effects. But we can
4638 ignore things that are actual constant or that already have been
4639 handled by this function. */
4641 if (tree_invariant_p (e
))
4644 switch (TREE_CODE_CLASS (code
))
4646 case tcc_exceptional
:
4647 /* Always wrap STATEMENT_LIST into SAVE_EXPR, even if it doesn't
4648 have side-effects. */
4649 if (code
== STATEMENT_LIST
)
4650 return save_expr (e
);
4653 case tcc_declaration
:
4654 case tcc_comparison
:
4656 case tcc_expression
:
4659 /* If the expression has side-effects, then encase it in a SAVE_EXPR
4660 so that it will only be evaluated once. */
4661 /* The reference (r) and comparison (<) classes could be handled as
4662 below, but it is generally faster to only evaluate them once. */
4663 if (TREE_SIDE_EFFECTS (e
))
4664 return save_expr (e
);
4668 /* Constants need no processing. In fact, we should never reach
4673 /* Division is slow and tends to be compiled with jumps,
4674 especially the division by powers of 2 that is often
4675 found inside of an array reference. So do it just once. */
4676 if (code
== TRUNC_DIV_EXPR
|| code
== TRUNC_MOD_EXPR
4677 || code
== FLOOR_DIV_EXPR
|| code
== FLOOR_MOD_EXPR
4678 || code
== CEIL_DIV_EXPR
|| code
== CEIL_MOD_EXPR
4679 || code
== ROUND_DIV_EXPR
|| code
== ROUND_MOD_EXPR
)
4680 return save_expr (e
);
4681 /* Recursively stabilize each operand. */
4682 result
= build_nt (code
, stabilize_reference_1 (TREE_OPERAND (e
, 0)),
4683 stabilize_reference_1 (TREE_OPERAND (e
, 1)));
4687 /* Recursively stabilize each operand. */
4688 result
= build_nt (code
, stabilize_reference_1 (TREE_OPERAND (e
, 0)));
4695 TREE_TYPE (result
) = TREE_TYPE (e
);
4696 TREE_READONLY (result
) = TREE_READONLY (e
);
4697 TREE_SIDE_EFFECTS (result
) = TREE_SIDE_EFFECTS (e
);
4698 TREE_THIS_VOLATILE (result
) = TREE_THIS_VOLATILE (e
);
4703 /* Stabilize a reference so that we can use it any number of times
4704 without causing its operands to be evaluated more than once.
4705 Returns the stabilized reference. This works by means of save_expr,
4706 so see the caveats in the comments about save_expr.
4708 Also allows conversion expressions whose operands are references.
4709 Any other kind of expression is returned unchanged. */
4712 stabilize_reference (tree ref
)
4715 enum tree_code code
= TREE_CODE (ref
);
4722 /* No action is needed in this case. */
4727 case FIX_TRUNC_EXPR
:
4728 result
= build_nt (code
, stabilize_reference (TREE_OPERAND (ref
, 0)));
4732 result
= build_nt (INDIRECT_REF
,
4733 stabilize_reference_1 (TREE_OPERAND (ref
, 0)));
4737 result
= build_nt (COMPONENT_REF
,
4738 stabilize_reference (TREE_OPERAND (ref
, 0)),
4739 TREE_OPERAND (ref
, 1), NULL_TREE
);
4743 result
= build_nt (BIT_FIELD_REF
,
4744 stabilize_reference (TREE_OPERAND (ref
, 0)),
4745 TREE_OPERAND (ref
, 1), TREE_OPERAND (ref
, 2));
4746 REF_REVERSE_STORAGE_ORDER (result
) = REF_REVERSE_STORAGE_ORDER (ref
);
4750 result
= build_nt (ARRAY_REF
,
4751 stabilize_reference (TREE_OPERAND (ref
, 0)),
4752 stabilize_reference_1 (TREE_OPERAND (ref
, 1)),
4753 TREE_OPERAND (ref
, 2), TREE_OPERAND (ref
, 3));
4756 case ARRAY_RANGE_REF
:
4757 result
= build_nt (ARRAY_RANGE_REF
,
4758 stabilize_reference (TREE_OPERAND (ref
, 0)),
4759 stabilize_reference_1 (TREE_OPERAND (ref
, 1)),
4760 TREE_OPERAND (ref
, 2), TREE_OPERAND (ref
, 3));
4764 /* We cannot wrap the first expression in a SAVE_EXPR, as then
4765 it wouldn't be ignored. This matters when dealing with
4767 return stabilize_reference_1 (ref
);
4769 /* If arg isn't a kind of lvalue we recognize, make no change.
4770 Caller should recognize the error for an invalid lvalue. */
4775 return error_mark_node
;
4778 TREE_TYPE (result
) = TREE_TYPE (ref
);
4779 TREE_READONLY (result
) = TREE_READONLY (ref
);
4780 TREE_SIDE_EFFECTS (result
) = TREE_SIDE_EFFECTS (ref
);
4781 TREE_THIS_VOLATILE (result
) = TREE_THIS_VOLATILE (ref
);
4786 /* Low-level constructors for expressions. */
4788 /* A helper function for build1 and constant folders. Set TREE_CONSTANT,
4789 and TREE_SIDE_EFFECTS for an ADDR_EXPR. */
4792 recompute_tree_invariant_for_addr_expr (tree t
)
4795 bool tc
= true, se
= false;
4797 gcc_assert (TREE_CODE (t
) == ADDR_EXPR
);
4799 /* We started out assuming this address is both invariant and constant, but
4800 does not have side effects. Now go down any handled components and see if
4801 any of them involve offsets that are either non-constant or non-invariant.
4802 Also check for side-effects.
4804 ??? Note that this code makes no attempt to deal with the case where
4805 taking the address of something causes a copy due to misalignment. */
4807 #define UPDATE_FLAGS(NODE) \
4808 do { tree _node = (NODE); \
4809 if (_node && !TREE_CONSTANT (_node)) tc = false; \
4810 if (_node && TREE_SIDE_EFFECTS (_node)) se = true; } while (0)
4812 for (node
= TREE_OPERAND (t
, 0); handled_component_p (node
);
4813 node
= TREE_OPERAND (node
, 0))
4815 /* If the first operand doesn't have an ARRAY_TYPE, this is a bogus
4816 array reference (probably made temporarily by the G++ front end),
4817 so ignore all the operands. */
4818 if ((TREE_CODE (node
) == ARRAY_REF
4819 || TREE_CODE (node
) == ARRAY_RANGE_REF
)
4820 && TREE_CODE (TREE_TYPE (TREE_OPERAND (node
, 0))) == ARRAY_TYPE
)
4822 UPDATE_FLAGS (TREE_OPERAND (node
, 1));
4823 if (TREE_OPERAND (node
, 2))
4824 UPDATE_FLAGS (TREE_OPERAND (node
, 2));
4825 if (TREE_OPERAND (node
, 3))
4826 UPDATE_FLAGS (TREE_OPERAND (node
, 3));
4828 /* Likewise, just because this is a COMPONENT_REF doesn't mean we have a
4829 FIELD_DECL, apparently. The G++ front end can put something else
4830 there, at least temporarily. */
4831 else if (TREE_CODE (node
) == COMPONENT_REF
4832 && TREE_CODE (TREE_OPERAND (node
, 1)) == FIELD_DECL
)
4834 if (TREE_OPERAND (node
, 2))
4835 UPDATE_FLAGS (TREE_OPERAND (node
, 2));
4839 node
= lang_hooks
.expr_to_decl (node
, &tc
, &se
);
4841 /* Now see what's inside. If it's an INDIRECT_REF, copy our properties from
4842 the address, since &(*a)->b is a form of addition. If it's a constant, the
4843 address is constant too. If it's a decl, its address is constant if the
4844 decl is static. Everything else is not constant and, furthermore,
4845 taking the address of a volatile variable is not volatile. */
4846 if (TREE_CODE (node
) == INDIRECT_REF
4847 || TREE_CODE (node
) == MEM_REF
)
4848 UPDATE_FLAGS (TREE_OPERAND (node
, 0));
4849 else if (CONSTANT_CLASS_P (node
))
4851 else if (DECL_P (node
))
4852 tc
&= (staticp (node
) != NULL_TREE
);
4856 se
|= TREE_SIDE_EFFECTS (node
);
4860 TREE_CONSTANT (t
) = tc
;
4861 TREE_SIDE_EFFECTS (t
) = se
;
4865 /* Build an expression of code CODE, data type TYPE, and operands as
4866 specified. Expressions and reference nodes can be created this way.
4867 Constants, decls, types and misc nodes cannot be.
4869 We define 5 non-variadic functions, from 0 to 4 arguments. This is
4870 enough for all extant tree codes. */
4873 build0 (enum tree_code code
, tree tt MEM_STAT_DECL
)
4877 gcc_assert (TREE_CODE_LENGTH (code
) == 0);
4879 t
= make_node (code PASS_MEM_STAT
);
4886 build1 (enum tree_code code
, tree type
, tree node MEM_STAT_DECL
)
4888 int length
= sizeof (struct tree_exp
);
4891 record_node_allocation_statistics (code
, length
);
4893 gcc_assert (TREE_CODE_LENGTH (code
) == 1);
4895 t
= ggc_alloc_tree_node_stat (length PASS_MEM_STAT
);
4897 memset (t
, 0, sizeof (struct tree_common
));
4899 TREE_SET_CODE (t
, code
);
4901 TREE_TYPE (t
) = type
;
4902 SET_EXPR_LOCATION (t
, UNKNOWN_LOCATION
);
4903 TREE_OPERAND (t
, 0) = node
;
4904 if (node
&& !TYPE_P (node
))
4906 TREE_SIDE_EFFECTS (t
) = TREE_SIDE_EFFECTS (node
);
4907 TREE_READONLY (t
) = TREE_READONLY (node
);
4910 if (TREE_CODE_CLASS (code
) == tcc_statement
)
4912 if (code
!= DEBUG_BEGIN_STMT
)
4913 TREE_SIDE_EFFECTS (t
) = 1;
4918 /* All of these have side-effects, no matter what their
4920 TREE_SIDE_EFFECTS (t
) = 1;
4921 TREE_READONLY (t
) = 0;
4925 /* Whether a dereference is readonly has nothing to do with whether
4926 its operand is readonly. */
4927 TREE_READONLY (t
) = 0;
4932 recompute_tree_invariant_for_addr_expr (t
);
4936 if ((TREE_CODE_CLASS (code
) == tcc_unary
|| code
== VIEW_CONVERT_EXPR
)
4937 && node
&& !TYPE_P (node
)
4938 && TREE_CONSTANT (node
))
4939 TREE_CONSTANT (t
) = 1;
4940 if (TREE_CODE_CLASS (code
) == tcc_reference
4941 && node
&& TREE_THIS_VOLATILE (node
))
4942 TREE_THIS_VOLATILE (t
) = 1;
4949 #define PROCESS_ARG(N) \
4951 TREE_OPERAND (t, N) = arg##N; \
4952 if (arg##N &&!TYPE_P (arg##N)) \
4954 if (TREE_SIDE_EFFECTS (arg##N)) \
4956 if (!TREE_READONLY (arg##N) \
4957 && !CONSTANT_CLASS_P (arg##N)) \
4958 (void) (read_only = 0); \
4959 if (!TREE_CONSTANT (arg##N)) \
4960 (void) (constant = 0); \
4965 build2 (enum tree_code code
, tree tt
, tree arg0
, tree arg1 MEM_STAT_DECL
)
4967 bool constant
, read_only
, side_effects
, div_by_zero
;
4970 gcc_assert (TREE_CODE_LENGTH (code
) == 2);
4972 if ((code
== MINUS_EXPR
|| code
== PLUS_EXPR
|| code
== MULT_EXPR
)
4973 && arg0
&& arg1
&& tt
&& POINTER_TYPE_P (tt
)
4974 /* When sizetype precision doesn't match that of pointers
4975 we need to be able to build explicit extensions or truncations
4976 of the offset argument. */
4977 && TYPE_PRECISION (sizetype
) == TYPE_PRECISION (tt
))
4978 gcc_assert (TREE_CODE (arg0
) == INTEGER_CST
4979 && TREE_CODE (arg1
) == INTEGER_CST
);
4981 if (code
== POINTER_PLUS_EXPR
&& arg0
&& arg1
&& tt
)
4982 gcc_assert (POINTER_TYPE_P (tt
) && POINTER_TYPE_P (TREE_TYPE (arg0
))
4983 && ptrofftype_p (TREE_TYPE (arg1
)));
4985 t
= make_node (code PASS_MEM_STAT
);
4988 /* Below, we automatically set TREE_SIDE_EFFECTS and TREE_READONLY for the
4989 result based on those same flags for the arguments. But if the
4990 arguments aren't really even `tree' expressions, we shouldn't be trying
4993 /* Expressions without side effects may be constant if their
4994 arguments are as well. */
4995 constant
= (TREE_CODE_CLASS (code
) == tcc_comparison
4996 || TREE_CODE_CLASS (code
) == tcc_binary
);
4998 side_effects
= TREE_SIDE_EFFECTS (t
);
5002 case TRUNC_DIV_EXPR
:
5004 case FLOOR_DIV_EXPR
:
5005 case ROUND_DIV_EXPR
:
5006 case EXACT_DIV_EXPR
:
5008 case FLOOR_MOD_EXPR
:
5009 case ROUND_MOD_EXPR
:
5010 case TRUNC_MOD_EXPR
:
5011 div_by_zero
= integer_zerop (arg1
);
5014 div_by_zero
= false;
5020 TREE_SIDE_EFFECTS (t
) = side_effects
;
5021 if (code
== MEM_REF
)
5023 if (arg0
&& TREE_CODE (arg0
) == ADDR_EXPR
)
5025 tree o
= TREE_OPERAND (arg0
, 0);
5026 TREE_READONLY (t
) = TREE_READONLY (o
);
5027 TREE_THIS_VOLATILE (t
) = TREE_THIS_VOLATILE (o
);
5032 TREE_READONLY (t
) = read_only
;
5033 /* Don't mark X / 0 as constant. */
5034 TREE_CONSTANT (t
) = constant
&& !div_by_zero
;
5035 TREE_THIS_VOLATILE (t
)
5036 = (TREE_CODE_CLASS (code
) == tcc_reference
5037 && arg0
&& TREE_THIS_VOLATILE (arg0
));
5045 build3 (enum tree_code code
, tree tt
, tree arg0
, tree arg1
,
5046 tree arg2 MEM_STAT_DECL
)
5048 bool constant
, read_only
, side_effects
;
5051 gcc_assert (TREE_CODE_LENGTH (code
) == 3);
5052 gcc_assert (TREE_CODE_CLASS (code
) != tcc_vl_exp
);
5054 t
= make_node (code PASS_MEM_STAT
);
5059 /* As a special exception, if COND_EXPR has NULL branches, we
5060 assume that it is a gimple statement and always consider
5061 it to have side effects. */
5062 if (code
== COND_EXPR
5063 && tt
== void_type_node
5064 && arg1
== NULL_TREE
5065 && arg2
== NULL_TREE
)
5066 side_effects
= true;
5068 side_effects
= TREE_SIDE_EFFECTS (t
);
5074 if (code
== COND_EXPR
)
5075 TREE_READONLY (t
) = read_only
;
5077 TREE_SIDE_EFFECTS (t
) = side_effects
;
5078 TREE_THIS_VOLATILE (t
)
5079 = (TREE_CODE_CLASS (code
) == tcc_reference
5080 && arg0
&& TREE_THIS_VOLATILE (arg0
));
5086 build4 (enum tree_code code
, tree tt
, tree arg0
, tree arg1
,
5087 tree arg2
, tree arg3 MEM_STAT_DECL
)
5089 bool constant
, read_only
, side_effects
;
5092 gcc_assert (TREE_CODE_LENGTH (code
) == 4);
5094 t
= make_node (code PASS_MEM_STAT
);
5097 side_effects
= TREE_SIDE_EFFECTS (t
);
5104 TREE_SIDE_EFFECTS (t
) = side_effects
;
5105 TREE_THIS_VOLATILE (t
)
5106 = (TREE_CODE_CLASS (code
) == tcc_reference
5107 && arg0
&& TREE_THIS_VOLATILE (arg0
));
5113 build5 (enum tree_code code
, tree tt
, tree arg0
, tree arg1
,
5114 tree arg2
, tree arg3
, tree arg4 MEM_STAT_DECL
)
5116 bool constant
, read_only
, side_effects
;
5119 gcc_assert (TREE_CODE_LENGTH (code
) == 5);
5121 t
= make_node (code PASS_MEM_STAT
);
5124 side_effects
= TREE_SIDE_EFFECTS (t
);
5132 TREE_SIDE_EFFECTS (t
) = side_effects
;
5133 if (code
== TARGET_MEM_REF
)
5135 if (arg0
&& TREE_CODE (arg0
) == ADDR_EXPR
)
5137 tree o
= TREE_OPERAND (arg0
, 0);
5138 TREE_READONLY (t
) = TREE_READONLY (o
);
5139 TREE_THIS_VOLATILE (t
) = TREE_THIS_VOLATILE (o
);
5143 TREE_THIS_VOLATILE (t
)
5144 = (TREE_CODE_CLASS (code
) == tcc_reference
5145 && arg0
&& TREE_THIS_VOLATILE (arg0
));
5150 /* Build a simple MEM_REF tree with the sematics of a plain INDIRECT_REF
5151 on the pointer PTR. */
5154 build_simple_mem_ref_loc (location_t loc
, tree ptr
)
5156 poly_int64 offset
= 0;
5157 tree ptype
= TREE_TYPE (ptr
);
5159 /* For convenience allow addresses that collapse to a simple base
5161 if (TREE_CODE (ptr
) == ADDR_EXPR
5162 && (handled_component_p (TREE_OPERAND (ptr
, 0))
5163 || TREE_CODE (TREE_OPERAND (ptr
, 0)) == MEM_REF
))
5165 ptr
= get_addr_base_and_unit_offset (TREE_OPERAND (ptr
, 0), &offset
);
5167 if (TREE_CODE (ptr
) == MEM_REF
)
5169 offset
+= mem_ref_offset (ptr
).force_shwi ();
5170 ptr
= TREE_OPERAND (ptr
, 0);
5173 ptr
= build_fold_addr_expr (ptr
);
5174 gcc_assert (is_gimple_reg (ptr
) || is_gimple_min_invariant (ptr
));
5176 tem
= build2 (MEM_REF
, TREE_TYPE (ptype
),
5177 ptr
, build_int_cst (ptype
, offset
));
5178 SET_EXPR_LOCATION (tem
, loc
);
5182 /* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T. */
5185 mem_ref_offset (const_tree t
)
5187 return poly_offset_int::from (wi::to_poly_wide (TREE_OPERAND (t
, 1)),
5191 /* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE
5192 offsetted by OFFSET units. */
5195 build_invariant_address (tree type
, tree base
, poly_int64 offset
)
5197 tree ref
= fold_build2 (MEM_REF
, TREE_TYPE (type
),
5198 build_fold_addr_expr (base
),
5199 build_int_cst (ptr_type_node
, offset
));
5200 tree addr
= build1 (ADDR_EXPR
, type
, ref
);
5201 recompute_tree_invariant_for_addr_expr (addr
);
5205 /* Similar except don't specify the TREE_TYPE
5206 and leave the TREE_SIDE_EFFECTS as 0.
5207 It is permissible for arguments to be null,
5208 or even garbage if their values do not matter. */
5211 build_nt (enum tree_code code
, ...)
5218 gcc_assert (TREE_CODE_CLASS (code
) != tcc_vl_exp
);
5222 t
= make_node (code
);
5223 length
= TREE_CODE_LENGTH (code
);
5225 for (i
= 0; i
< length
; i
++)
5226 TREE_OPERAND (t
, i
) = va_arg (p
, tree
);
5232 /* Similar to build_nt, but for creating a CALL_EXPR object with a
5236 build_nt_call_vec (tree fn
, vec
<tree
, va_gc
> *args
)
5241 ret
= build_vl_exp (CALL_EXPR
, vec_safe_length (args
) + 3);
5242 CALL_EXPR_FN (ret
) = fn
;
5243 CALL_EXPR_STATIC_CHAIN (ret
) = NULL_TREE
;
5244 FOR_EACH_VEC_SAFE_ELT (args
, ix
, t
)
5245 CALL_EXPR_ARG (ret
, ix
) = t
;
5249 /* Create a DECL_... node of code CODE, name NAME (if non-null)
5251 We do NOT enter this node in any sort of symbol table.
5253 LOC is the location of the decl.
5255 layout_decl is used to set up the decl's storage layout.
5256 Other slots are initialized to 0 or null pointers. */
5259 build_decl (location_t loc
, enum tree_code code
, tree name
,
5260 tree type MEM_STAT_DECL
)
5264 t
= make_node (code PASS_MEM_STAT
);
5265 DECL_SOURCE_LOCATION (t
) = loc
;
5267 /* if (type == error_mark_node)
5268 type = integer_type_node; */
5269 /* That is not done, deliberately, so that having error_mark_node
5270 as the type can suppress useless errors in the use of this variable. */
5272 DECL_NAME (t
) = name
;
5273 TREE_TYPE (t
) = type
;
5275 if (code
== VAR_DECL
|| code
== PARM_DECL
|| code
== RESULT_DECL
)
5281 /* Builds and returns function declaration with NAME and TYPE. */
5284 build_fn_decl (const char *name
, tree type
)
5286 tree id
= get_identifier (name
);
5287 tree decl
= build_decl (input_location
, FUNCTION_DECL
, id
, type
);
5289 DECL_EXTERNAL (decl
) = 1;
5290 TREE_PUBLIC (decl
) = 1;
5291 DECL_ARTIFICIAL (decl
) = 1;
5292 TREE_NOTHROW (decl
) = 1;
5297 vec
<tree
, va_gc
> *all_translation_units
;
5299 /* Builds a new translation-unit decl with name NAME, queues it in the
5300 global list of translation-unit decls and returns it. */
5303 build_translation_unit_decl (tree name
)
5305 tree tu
= build_decl (UNKNOWN_LOCATION
, TRANSLATION_UNIT_DECL
,
5307 TRANSLATION_UNIT_LANGUAGE (tu
) = lang_hooks
.name
;
5308 vec_safe_push (all_translation_units
, tu
);
5313 /* BLOCK nodes are used to represent the structure of binding contours
5314 and declarations, once those contours have been exited and their contents
5315 compiled. This information is used for outputting debugging info. */
5318 build_block (tree vars
, tree subblocks
, tree supercontext
, tree chain
)
5320 tree block
= make_node (BLOCK
);
5322 BLOCK_VARS (block
) = vars
;
5323 BLOCK_SUBBLOCKS (block
) = subblocks
;
5324 BLOCK_SUPERCONTEXT (block
) = supercontext
;
5325 BLOCK_CHAIN (block
) = chain
;
5330 /* Like SET_EXPR_LOCATION, but make sure the tree can have a location.
5332 LOC is the location to use in tree T. */
5335 protected_set_expr_location (tree t
, location_t loc
)
5337 if (CAN_HAVE_LOCATION_P (t
))
5338 SET_EXPR_LOCATION (t
, loc
);
5339 else if (t
&& TREE_CODE (t
) == STATEMENT_LIST
)
5341 t
= expr_single (t
);
5342 if (t
&& CAN_HAVE_LOCATION_P (t
))
5343 SET_EXPR_LOCATION (t
, loc
);
5347 /* Like PROTECTED_SET_EXPR_LOCATION, but only do that if T has
5348 UNKNOWN_LOCATION. */
5351 protected_set_expr_location_if_unset (tree t
, location_t loc
)
5353 t
= expr_single (t
);
5354 if (t
&& !EXPR_HAS_LOCATION (t
))
5355 protected_set_expr_location (t
, loc
);
5358 /* Set the type qualifiers for TYPE to TYPE_QUALS, which is a bitmask
5359 of the various TYPE_QUAL values. */
5362 set_type_quals (tree type
, int type_quals
)
5364 TYPE_READONLY (type
) = (type_quals
& TYPE_QUAL_CONST
) != 0;
5365 TYPE_VOLATILE (type
) = (type_quals
& TYPE_QUAL_VOLATILE
) != 0;
5366 TYPE_RESTRICT (type
) = (type_quals
& TYPE_QUAL_RESTRICT
) != 0;
5367 TYPE_ATOMIC (type
) = (type_quals
& TYPE_QUAL_ATOMIC
) != 0;
5368 TYPE_ADDR_SPACE (type
) = DECODE_QUAL_ADDR_SPACE (type_quals
);
5371 /* Returns true iff CAND and BASE have equivalent language-specific
5375 check_lang_type (const_tree cand
, const_tree base
)
5377 if (lang_hooks
.types
.type_hash_eq
== NULL
)
5379 /* type_hash_eq currently only applies to these types. */
5380 if (TREE_CODE (cand
) != FUNCTION_TYPE
5381 && TREE_CODE (cand
) != METHOD_TYPE
)
5383 return lang_hooks
.types
.type_hash_eq (cand
, base
);
5386 /* This function checks to see if TYPE matches the size one of the built-in
5387 atomic types, and returns that core atomic type. */
5390 find_atomic_core_type (const_tree type
)
5392 tree base_atomic_type
;
5394 /* Only handle complete types. */
5395 if (!tree_fits_uhwi_p (TYPE_SIZE (type
)))
5398 switch (tree_to_uhwi (TYPE_SIZE (type
)))
5401 base_atomic_type
= atomicQI_type_node
;
5405 base_atomic_type
= atomicHI_type_node
;
5409 base_atomic_type
= atomicSI_type_node
;
5413 base_atomic_type
= atomicDI_type_node
;
5417 base_atomic_type
= atomicTI_type_node
;
5421 base_atomic_type
= NULL_TREE
;
5424 return base_atomic_type
;
5427 /* Returns true iff unqualified CAND and BASE are equivalent. */
5430 check_base_type (const_tree cand
, const_tree base
)
5432 if (TYPE_NAME (cand
) != TYPE_NAME (base
)
5433 /* Apparently this is needed for Objective-C. */
5434 || TYPE_CONTEXT (cand
) != TYPE_CONTEXT (base
)
5435 || !attribute_list_equal (TYPE_ATTRIBUTES (cand
),
5436 TYPE_ATTRIBUTES (base
)))
5438 /* Check alignment. */
5439 if (TYPE_ALIGN (cand
) == TYPE_ALIGN (base
)
5440 && TYPE_USER_ALIGN (cand
) == TYPE_USER_ALIGN (base
))
5442 /* Atomic types increase minimal alignment. We must to do so as well
5443 or we get duplicated canonical types. See PR88686. */
5444 if ((TYPE_QUALS (cand
) & TYPE_QUAL_ATOMIC
))
5446 /* See if this object can map to a basic atomic type. */
5447 tree atomic_type
= find_atomic_core_type (cand
);
5448 if (atomic_type
&& TYPE_ALIGN (atomic_type
) == TYPE_ALIGN (cand
))
5454 /* Returns true iff CAND is equivalent to BASE with TYPE_QUALS. */
5457 check_qualified_type (const_tree cand
, const_tree base
, int type_quals
)
5459 return (TYPE_QUALS (cand
) == type_quals
5460 && check_base_type (cand
, base
)
5461 && check_lang_type (cand
, base
));
5464 /* Returns true iff CAND is equivalent to BASE with ALIGN. */
5467 check_aligned_type (const_tree cand
, const_tree base
, unsigned int align
)
5469 return (TYPE_QUALS (cand
) == TYPE_QUALS (base
)
5470 && TYPE_NAME (cand
) == TYPE_NAME (base
)
5471 /* Apparently this is needed for Objective-C. */
5472 && TYPE_CONTEXT (cand
) == TYPE_CONTEXT (base
)
5473 /* Check alignment. */
5474 && TYPE_ALIGN (cand
) == align
5475 /* Check this is a user-aligned type as build_aligned_type
5477 && TYPE_USER_ALIGN (cand
)
5478 && attribute_list_equal (TYPE_ATTRIBUTES (cand
),
5479 TYPE_ATTRIBUTES (base
))
5480 && check_lang_type (cand
, base
));
5483 /* Return a version of the TYPE, qualified as indicated by the
5484 TYPE_QUALS, if one exists. If no qualified version exists yet,
5485 return NULL_TREE. */
5488 get_qualified_type (tree type
, int type_quals
)
5490 if (TYPE_QUALS (type
) == type_quals
)
5493 tree mv
= TYPE_MAIN_VARIANT (type
);
5494 if (check_qualified_type (mv
, type
, type_quals
))
5497 /* Search the chain of variants to see if there is already one there just
5498 like the one we need to have. If so, use that existing one. We must
5499 preserve the TYPE_NAME, since there is code that depends on this. */
5500 for (tree
*tp
= &TYPE_NEXT_VARIANT (mv
); *tp
; tp
= &TYPE_NEXT_VARIANT (*tp
))
5501 if (check_qualified_type (*tp
, type
, type_quals
))
5503 /* Put the found variant at the head of the variant list so
5504 frequently searched variants get found faster. The C++ FE
5505 benefits greatly from this. */
5507 *tp
= TYPE_NEXT_VARIANT (t
);
5508 TYPE_NEXT_VARIANT (t
) = TYPE_NEXT_VARIANT (mv
);
5509 TYPE_NEXT_VARIANT (mv
) = t
;
5516 /* Like get_qualified_type, but creates the type if it does not
5517 exist. This function never returns NULL_TREE. */
5520 build_qualified_type (tree type
, int type_quals MEM_STAT_DECL
)
5524 /* See if we already have the appropriate qualified variant. */
5525 t
= get_qualified_type (type
, type_quals
);
5527 /* If not, build it. */
5530 t
= build_variant_type_copy (type PASS_MEM_STAT
);
5531 set_type_quals (t
, type_quals
);
5533 if (((type_quals
& TYPE_QUAL_ATOMIC
) == TYPE_QUAL_ATOMIC
))
5535 /* See if this object can map to a basic atomic type. */
5536 tree atomic_type
= find_atomic_core_type (type
);
5539 /* Ensure the alignment of this type is compatible with
5540 the required alignment of the atomic type. */
5541 if (TYPE_ALIGN (atomic_type
) > TYPE_ALIGN (t
))
5542 SET_TYPE_ALIGN (t
, TYPE_ALIGN (atomic_type
));
5546 if (TYPE_STRUCTURAL_EQUALITY_P (type
))
5547 /* Propagate structural equality. */
5548 SET_TYPE_STRUCTURAL_EQUALITY (t
);
5549 else if (TYPE_CANONICAL (type
) != type
)
5550 /* Build the underlying canonical type, since it is different
5553 tree c
= build_qualified_type (TYPE_CANONICAL (type
), type_quals
);
5554 TYPE_CANONICAL (t
) = TYPE_CANONICAL (c
);
5557 /* T is its own canonical type. */
5558 TYPE_CANONICAL (t
) = t
;
5565 /* Create a variant of type T with alignment ALIGN. */
5568 build_aligned_type (tree type
, unsigned int align
)
5572 if (TYPE_PACKED (type
)
5573 || TYPE_ALIGN (type
) == align
)
5576 for (t
= TYPE_MAIN_VARIANT (type
); t
; t
= TYPE_NEXT_VARIANT (t
))
5577 if (check_aligned_type (t
, type
, align
))
5580 t
= build_variant_type_copy (type
);
5581 SET_TYPE_ALIGN (t
, align
);
5582 TYPE_USER_ALIGN (t
) = 1;
5587 /* Create a new distinct copy of TYPE. The new type is made its own
5588 MAIN_VARIANT. If TYPE requires structural equality checks, the
5589 resulting type requires structural equality checks; otherwise, its
5590 TYPE_CANONICAL points to itself. */
5593 build_distinct_type_copy (tree type MEM_STAT_DECL
)
5595 tree t
= copy_node (type PASS_MEM_STAT
);
5597 TYPE_POINTER_TO (t
) = 0;
5598 TYPE_REFERENCE_TO (t
) = 0;
5600 /* Set the canonical type either to a new equivalence class, or
5601 propagate the need for structural equality checks. */
5602 if (TYPE_STRUCTURAL_EQUALITY_P (type
))
5603 SET_TYPE_STRUCTURAL_EQUALITY (t
);
5605 TYPE_CANONICAL (t
) = t
;
5607 /* Make it its own variant. */
5608 TYPE_MAIN_VARIANT (t
) = t
;
5609 TYPE_NEXT_VARIANT (t
) = 0;
5611 /* Note that it is now possible for TYPE_MIN_VALUE to be a value
5612 whose TREE_TYPE is not t. This can also happen in the Ada
5613 frontend when using subtypes. */
5618 /* Create a new variant of TYPE, equivalent but distinct. This is so
5619 the caller can modify it. TYPE_CANONICAL for the return type will
5620 be equivalent to TYPE_CANONICAL of TYPE, indicating that the types
5621 are considered equal by the language itself (or that both types
5622 require structural equality checks). */
5625 build_variant_type_copy (tree type MEM_STAT_DECL
)
5627 tree t
, m
= TYPE_MAIN_VARIANT (type
);
5629 t
= build_distinct_type_copy (type PASS_MEM_STAT
);
5631 /* Since we're building a variant, assume that it is a non-semantic
5632 variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */
5633 TYPE_CANONICAL (t
) = TYPE_CANONICAL (type
);
5634 /* Type variants have no alias set defined. */
5635 TYPE_ALIAS_SET (t
) = -1;
5637 /* Add the new type to the chain of variants of TYPE. */
5638 TYPE_NEXT_VARIANT (t
) = TYPE_NEXT_VARIANT (m
);
5639 TYPE_NEXT_VARIANT (m
) = t
;
5640 TYPE_MAIN_VARIANT (t
) = m
;
5645 /* Return true if the from tree in both tree maps are equal. */
5648 tree_map_base_eq (const void *va
, const void *vb
)
5650 const struct tree_map_base
*const a
= (const struct tree_map_base
*) va
,
5651 *const b
= (const struct tree_map_base
*) vb
;
5652 return (a
->from
== b
->from
);
5655 /* Hash a from tree in a tree_base_map. */
5658 tree_map_base_hash (const void *item
)
5660 return htab_hash_pointer (((const struct tree_map_base
*)item
)->from
);
5663 /* Return true if this tree map structure is marked for garbage collection
5664 purposes. We simply return true if the from tree is marked, so that this
5665 structure goes away when the from tree goes away. */
5668 tree_map_base_marked_p (const void *p
)
5670 return ggc_marked_p (((const struct tree_map_base
*) p
)->from
);
5673 /* Hash a from tree in a tree_map. */
5676 tree_map_hash (const void *item
)
5678 return (((const struct tree_map
*) item
)->hash
);
5681 /* Hash a from tree in a tree_decl_map. */
5684 tree_decl_map_hash (const void *item
)
5686 return DECL_UID (((const struct tree_decl_map
*) item
)->base
.from
);
5689 /* Return the initialization priority for DECL. */
5692 decl_init_priority_lookup (tree decl
)
5694 symtab_node
*snode
= symtab_node::get (decl
);
5697 return DEFAULT_INIT_PRIORITY
;
5699 snode
->get_init_priority ();
5702 /* Return the finalization priority for DECL. */
5705 decl_fini_priority_lookup (tree decl
)
5707 cgraph_node
*node
= cgraph_node::get (decl
);
5710 return DEFAULT_INIT_PRIORITY
;
5712 node
->get_fini_priority ();
5715 /* Set the initialization priority for DECL to PRIORITY. */
5718 decl_init_priority_insert (tree decl
, priority_type priority
)
5720 struct symtab_node
*snode
;
5722 if (priority
== DEFAULT_INIT_PRIORITY
)
5724 snode
= symtab_node::get (decl
);
5728 else if (VAR_P (decl
))
5729 snode
= varpool_node::get_create (decl
);
5731 snode
= cgraph_node::get_create (decl
);
5732 snode
->set_init_priority (priority
);
5735 /* Set the finalization priority for DECL to PRIORITY. */
5738 decl_fini_priority_insert (tree decl
, priority_type priority
)
5740 struct cgraph_node
*node
;
5742 if (priority
== DEFAULT_INIT_PRIORITY
)
5744 node
= cgraph_node::get (decl
);
5749 node
= cgraph_node::get_create (decl
);
5750 node
->set_fini_priority (priority
);
5753 /* Print out the statistics for the DECL_DEBUG_EXPR hash table. */
5756 print_debug_expr_statistics (void)
5758 fprintf (stderr
, "DECL_DEBUG_EXPR hash: size %ld, %ld elements, %f collisions\n",
5759 (long) debug_expr_for_decl
->size (),
5760 (long) debug_expr_for_decl
->elements (),
5761 debug_expr_for_decl
->collisions ());
5764 /* Print out the statistics for the DECL_VALUE_EXPR hash table. */
5767 print_value_expr_statistics (void)
5769 fprintf (stderr
, "DECL_VALUE_EXPR hash: size %ld, %ld elements, %f collisions\n",
5770 (long) value_expr_for_decl
->size (),
5771 (long) value_expr_for_decl
->elements (),
5772 value_expr_for_decl
->collisions ());
5775 /* Lookup a debug expression for FROM, and return it if we find one. */
5778 decl_debug_expr_lookup (tree from
)
5780 struct tree_decl_map
*h
, in
;
5781 in
.base
.from
= from
;
5783 h
= debug_expr_for_decl
->find_with_hash (&in
, DECL_UID (from
));
5789 /* Insert a mapping FROM->TO in the debug expression hashtable. */
5792 decl_debug_expr_insert (tree from
, tree to
)
5794 struct tree_decl_map
*h
;
5796 h
= ggc_alloc
<tree_decl_map
> ();
5797 h
->base
.from
= from
;
5799 *debug_expr_for_decl
->find_slot_with_hash (h
, DECL_UID (from
), INSERT
) = h
;
5802 /* Lookup a value expression for FROM, and return it if we find one. */
5805 decl_value_expr_lookup (tree from
)
5807 struct tree_decl_map
*h
, in
;
5808 in
.base
.from
= from
;
5810 h
= value_expr_for_decl
->find_with_hash (&in
, DECL_UID (from
));
5816 /* Insert a mapping FROM->TO in the value expression hashtable. */
5819 decl_value_expr_insert (tree from
, tree to
)
5821 struct tree_decl_map
*h
;
5823 h
= ggc_alloc
<tree_decl_map
> ();
5824 h
->base
.from
= from
;
5826 *value_expr_for_decl
->find_slot_with_hash (h
, DECL_UID (from
), INSERT
) = h
;
5829 /* Lookup a vector of debug arguments for FROM, and return it if we
5833 decl_debug_args_lookup (tree from
)
5835 struct tree_vec_map
*h
, in
;
5837 if (!DECL_HAS_DEBUG_ARGS_P (from
))
5839 gcc_checking_assert (debug_args_for_decl
!= NULL
);
5840 in
.base
.from
= from
;
5841 h
= debug_args_for_decl
->find_with_hash (&in
, DECL_UID (from
));
5847 /* Insert a mapping FROM->empty vector of debug arguments in the value
5848 expression hashtable. */
5851 decl_debug_args_insert (tree from
)
5853 struct tree_vec_map
*h
;
5856 if (DECL_HAS_DEBUG_ARGS_P (from
))
5857 return decl_debug_args_lookup (from
);
5858 if (debug_args_for_decl
== NULL
)
5859 debug_args_for_decl
= hash_table
<tree_vec_map_cache_hasher
>::create_ggc (64);
5860 h
= ggc_alloc
<tree_vec_map
> ();
5861 h
->base
.from
= from
;
5863 loc
= debug_args_for_decl
->find_slot_with_hash (h
, DECL_UID (from
), INSERT
);
5865 DECL_HAS_DEBUG_ARGS_P (from
) = 1;
5869 /* Hashing of types so that we don't make duplicates.
5870 The entry point is `type_hash_canon'. */
5872 /* Generate the default hash code for TYPE. This is designed for
5873 speed, rather than maximum entropy. */
5876 type_hash_canon_hash (tree type
)
5878 inchash::hash hstate
;
5880 hstate
.add_int (TREE_CODE (type
));
5882 if (TREE_TYPE (type
))
5883 hstate
.add_object (TYPE_HASH (TREE_TYPE (type
)));
5885 for (tree t
= TYPE_ATTRIBUTES (type
); t
; t
= TREE_CHAIN (t
))
5886 /* Just the identifier is adequate to distinguish. */
5887 hstate
.add_object (IDENTIFIER_HASH_VALUE (get_attribute_name (t
)));
5889 switch (TREE_CODE (type
))
5892 hstate
.add_object (TYPE_HASH (TYPE_METHOD_BASETYPE (type
)));
5895 for (tree t
= TYPE_ARG_TYPES (type
); t
; t
= TREE_CHAIN (t
))
5896 if (TREE_VALUE (t
) != error_mark_node
)
5897 hstate
.add_object (TYPE_HASH (TREE_VALUE (t
)));
5901 hstate
.add_object (TYPE_HASH (TYPE_OFFSET_BASETYPE (type
)));
5906 if (TYPE_DOMAIN (type
))
5907 hstate
.add_object (TYPE_HASH (TYPE_DOMAIN (type
)));
5908 if (!AGGREGATE_TYPE_P (TREE_TYPE (type
)))
5910 unsigned typeless
= TYPE_TYPELESS_STORAGE (type
);
5911 hstate
.add_object (typeless
);
5918 tree t
= TYPE_MAX_VALUE (type
);
5920 t
= TYPE_MIN_VALUE (type
);
5921 for (int i
= 0; i
< TREE_INT_CST_NUNITS (t
); i
++)
5922 hstate
.add_object (TREE_INT_CST_ELT (t
, i
));
5927 case FIXED_POINT_TYPE
:
5929 unsigned prec
= TYPE_PRECISION (type
);
5930 hstate
.add_object (prec
);
5935 hstate
.add_poly_int (TYPE_VECTOR_SUBPARTS (type
));
5942 return hstate
.end ();
5945 /* These are the Hashtable callback functions. */
5947 /* Returns true iff the types are equivalent. */
5950 type_cache_hasher::equal (type_hash
*a
, type_hash
*b
)
5952 /* First test the things that are the same for all types. */
5953 if (a
->hash
!= b
->hash
5954 || TREE_CODE (a
->type
) != TREE_CODE (b
->type
)
5955 || TREE_TYPE (a
->type
) != TREE_TYPE (b
->type
)
5956 || !attribute_list_equal (TYPE_ATTRIBUTES (a
->type
),
5957 TYPE_ATTRIBUTES (b
->type
))
5958 || (TREE_CODE (a
->type
) != COMPLEX_TYPE
5959 && TYPE_NAME (a
->type
) != TYPE_NAME (b
->type
)))
5962 /* Be careful about comparing arrays before and after the element type
5963 has been completed; don't compare TYPE_ALIGN unless both types are
5965 if (COMPLETE_TYPE_P (a
->type
) && COMPLETE_TYPE_P (b
->type
)
5966 && (TYPE_ALIGN (a
->type
) != TYPE_ALIGN (b
->type
)
5967 || TYPE_MODE (a
->type
) != TYPE_MODE (b
->type
)))
5970 switch (TREE_CODE (a
->type
))
5976 case REFERENCE_TYPE
:
5981 return known_eq (TYPE_VECTOR_SUBPARTS (a
->type
),
5982 TYPE_VECTOR_SUBPARTS (b
->type
));
5985 if (TYPE_VALUES (a
->type
) != TYPE_VALUES (b
->type
)
5986 && !(TYPE_VALUES (a
->type
)
5987 && TREE_CODE (TYPE_VALUES (a
->type
)) == TREE_LIST
5988 && TYPE_VALUES (b
->type
)
5989 && TREE_CODE (TYPE_VALUES (b
->type
)) == TREE_LIST
5990 && type_list_equal (TYPE_VALUES (a
->type
),
5991 TYPE_VALUES (b
->type
))))
5999 if (TYPE_PRECISION (a
->type
) != TYPE_PRECISION (b
->type
))
6001 return ((TYPE_MAX_VALUE (a
->type
) == TYPE_MAX_VALUE (b
->type
)
6002 || tree_int_cst_equal (TYPE_MAX_VALUE (a
->type
),
6003 TYPE_MAX_VALUE (b
->type
)))
6004 && (TYPE_MIN_VALUE (a
->type
) == TYPE_MIN_VALUE (b
->type
)
6005 || tree_int_cst_equal (TYPE_MIN_VALUE (a
->type
),
6006 TYPE_MIN_VALUE (b
->type
))));
6008 case FIXED_POINT_TYPE
:
6009 return TYPE_SATURATING (a
->type
) == TYPE_SATURATING (b
->type
);
6012 return TYPE_OFFSET_BASETYPE (a
->type
) == TYPE_OFFSET_BASETYPE (b
->type
);
6015 if (TYPE_METHOD_BASETYPE (a
->type
) == TYPE_METHOD_BASETYPE (b
->type
)
6016 && (TYPE_ARG_TYPES (a
->type
) == TYPE_ARG_TYPES (b
->type
)
6017 || (TYPE_ARG_TYPES (a
->type
)
6018 && TREE_CODE (TYPE_ARG_TYPES (a
->type
)) == TREE_LIST
6019 && TYPE_ARG_TYPES (b
->type
)
6020 && TREE_CODE (TYPE_ARG_TYPES (b
->type
)) == TREE_LIST
6021 && type_list_equal (TYPE_ARG_TYPES (a
->type
),
6022 TYPE_ARG_TYPES (b
->type
)))))
6026 /* Don't compare TYPE_TYPELESS_STORAGE flag on aggregates,
6027 where the flag should be inherited from the element type
6028 and can change after ARRAY_TYPEs are created; on non-aggregates
6029 compare it and hash it, scalars will never have that flag set
6030 and we need to differentiate between arrays created by different
6031 front-ends or middle-end created arrays. */
6032 return (TYPE_DOMAIN (a
->type
) == TYPE_DOMAIN (b
->type
)
6033 && (AGGREGATE_TYPE_P (TREE_TYPE (a
->type
))
6034 || (TYPE_TYPELESS_STORAGE (a
->type
)
6035 == TYPE_TYPELESS_STORAGE (b
->type
))));
6039 case QUAL_UNION_TYPE
:
6040 return (TYPE_FIELDS (a
->type
) == TYPE_FIELDS (b
->type
)
6041 || (TYPE_FIELDS (a
->type
)
6042 && TREE_CODE (TYPE_FIELDS (a
->type
)) == TREE_LIST
6043 && TYPE_FIELDS (b
->type
)
6044 && TREE_CODE (TYPE_FIELDS (b
->type
)) == TREE_LIST
6045 && type_list_equal (TYPE_FIELDS (a
->type
),
6046 TYPE_FIELDS (b
->type
))));
6049 if (TYPE_ARG_TYPES (a
->type
) == TYPE_ARG_TYPES (b
->type
)
6050 || (TYPE_ARG_TYPES (a
->type
)
6051 && TREE_CODE (TYPE_ARG_TYPES (a
->type
)) == TREE_LIST
6052 && TYPE_ARG_TYPES (b
->type
)
6053 && TREE_CODE (TYPE_ARG_TYPES (b
->type
)) == TREE_LIST
6054 && type_list_equal (TYPE_ARG_TYPES (a
->type
),
6055 TYPE_ARG_TYPES (b
->type
))))
6063 if (lang_hooks
.types
.type_hash_eq
!= NULL
)
6064 return lang_hooks
.types
.type_hash_eq (a
->type
, b
->type
);
6069 /* Given TYPE, and HASHCODE its hash code, return the canonical
6070 object for an identical type if one already exists.
6071 Otherwise, return TYPE, and record it as the canonical object.
6073 To use this function, first create a type of the sort you want.
6074 Then compute its hash code from the fields of the type that
6075 make it different from other similar types.
6076 Then call this function and use the value. */
6079 type_hash_canon (unsigned int hashcode
, tree type
)
6084 /* The hash table only contains main variants, so ensure that's what we're
6086 gcc_assert (TYPE_MAIN_VARIANT (type
) == type
);
6088 /* The TYPE_ALIGN field of a type is set by layout_type(), so we
6089 must call that routine before comparing TYPE_ALIGNs. */
6095 loc
= type_hash_table
->find_slot_with_hash (&in
, hashcode
, INSERT
);
6098 tree t1
= ((type_hash
*) *loc
)->type
;
6099 gcc_assert (TYPE_MAIN_VARIANT (t1
) == t1
6101 if (TYPE_UID (type
) + 1 == next_type_uid
)
6103 /* Free also min/max values and the cache for integer
6104 types. This can't be done in free_node, as LTO frees
6105 those on its own. */
6106 if (TREE_CODE (type
) == INTEGER_TYPE
)
6108 if (TYPE_MIN_VALUE (type
)
6109 && TREE_TYPE (TYPE_MIN_VALUE (type
)) == type
)
6111 /* Zero is always in TYPE_CACHED_VALUES. */
6112 if (! TYPE_UNSIGNED (type
))
6113 int_cst_hash_table
->remove_elt (TYPE_MIN_VALUE (type
));
6114 ggc_free (TYPE_MIN_VALUE (type
));
6116 if (TYPE_MAX_VALUE (type
)
6117 && TREE_TYPE (TYPE_MAX_VALUE (type
)) == type
)
6119 int_cst_hash_table
->remove_elt (TYPE_MAX_VALUE (type
));
6120 ggc_free (TYPE_MAX_VALUE (type
));
6122 if (TYPE_CACHED_VALUES_P (type
))
6123 ggc_free (TYPE_CACHED_VALUES (type
));
6130 struct type_hash
*h
;
6132 h
= ggc_alloc
<type_hash
> ();
6142 print_type_hash_statistics (void)
6144 fprintf (stderr
, "Type hash: size %ld, %ld elements, %f collisions\n",
6145 (long) type_hash_table
->size (),
6146 (long) type_hash_table
->elements (),
6147 type_hash_table
->collisions ());
6150 /* Given two lists of types
6151 (chains of TREE_LIST nodes with types in the TREE_VALUE slots)
6152 return 1 if the lists contain the same types in the same order.
6153 Also, the TREE_PURPOSEs must match. */
6156 type_list_equal (const_tree l1
, const_tree l2
)
6160 for (t1
= l1
, t2
= l2
; t1
&& t2
; t1
= TREE_CHAIN (t1
), t2
= TREE_CHAIN (t2
))
6161 if (TREE_VALUE (t1
) != TREE_VALUE (t2
)
6162 || (TREE_PURPOSE (t1
) != TREE_PURPOSE (t2
)
6163 && ! (1 == simple_cst_equal (TREE_PURPOSE (t1
), TREE_PURPOSE (t2
))
6164 && (TREE_TYPE (TREE_PURPOSE (t1
))
6165 == TREE_TYPE (TREE_PURPOSE (t2
))))))
6171 /* Returns the number of arguments to the FUNCTION_TYPE or METHOD_TYPE
6172 given by TYPE. If the argument list accepts variable arguments,
6173 then this function counts only the ordinary arguments. */
6176 type_num_arguments (const_tree fntype
)
6180 for (tree t
= TYPE_ARG_TYPES (fntype
); t
; t
= TREE_CHAIN (t
))
6181 /* If the function does not take a variable number of arguments,
6182 the last element in the list will have type `void'. */
6183 if (VOID_TYPE_P (TREE_VALUE (t
)))
6191 /* Return the type of the function TYPE's argument ARGNO if known.
6192 For vararg function's where ARGNO refers to one of the variadic
6193 arguments return null. Otherwise, return a void_type_node for
6194 out-of-bounds ARGNO. */
6197 type_argument_type (const_tree fntype
, unsigned argno
)
6199 /* Treat zero the same as an out-of-bounds argument number. */
6201 return void_type_node
;
6203 function_args_iterator iter
;
6207 FOREACH_FUNCTION_ARGS (fntype
, argtype
, iter
)
6209 /* A vararg function's argument list ends in a null. Otherwise,
6210 an ordinary function's argument list ends with void. Return
6211 null if ARGNO refers to a vararg argument, void_type_node if
6212 it's out of bounds, and the formal argument type otherwise. */
6216 if (i
== argno
|| VOID_TYPE_P (argtype
))
6225 /* Nonzero if integer constants T1 and T2
6226 represent the same constant value. */
6229 tree_int_cst_equal (const_tree t1
, const_tree t2
)
6234 if (t1
== 0 || t2
== 0)
6237 STRIP_ANY_LOCATION_WRAPPER (t1
);
6238 STRIP_ANY_LOCATION_WRAPPER (t2
);
6240 if (TREE_CODE (t1
) == INTEGER_CST
6241 && TREE_CODE (t2
) == INTEGER_CST
6242 && wi::to_widest (t1
) == wi::to_widest (t2
))
6248 /* Return true if T is an INTEGER_CST whose numerical value (extended
6249 according to TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. */
6252 tree_fits_shwi_p (const_tree t
)
6254 return (t
!= NULL_TREE
6255 && TREE_CODE (t
) == INTEGER_CST
6256 && wi::fits_shwi_p (wi::to_widest (t
)));
6259 /* Return true if T is an INTEGER_CST or POLY_INT_CST whose numerical
6260 value (extended according to TYPE_UNSIGNED) fits in a poly_int64. */
6263 tree_fits_poly_int64_p (const_tree t
)
6267 if (POLY_INT_CST_P (t
))
6269 for (unsigned int i
= 0; i
< NUM_POLY_INT_COEFFS
; i
++)
6270 if (!wi::fits_shwi_p (wi::to_wide (POLY_INT_CST_COEFF (t
, i
))))
6274 return (TREE_CODE (t
) == INTEGER_CST
6275 && wi::fits_shwi_p (wi::to_widest (t
)));
6278 /* Return true if T is an INTEGER_CST whose numerical value (extended
6279 according to TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. */
6282 tree_fits_uhwi_p (const_tree t
)
6284 return (t
!= NULL_TREE
6285 && TREE_CODE (t
) == INTEGER_CST
6286 && wi::fits_uhwi_p (wi::to_widest (t
)));
6289 /* Return true if T is an INTEGER_CST or POLY_INT_CST whose numerical
6290 value (extended according to TYPE_UNSIGNED) fits in a poly_uint64. */
6293 tree_fits_poly_uint64_p (const_tree t
)
6297 if (POLY_INT_CST_P (t
))
6299 for (unsigned int i
= 0; i
< NUM_POLY_INT_COEFFS
; i
++)
6300 if (!wi::fits_uhwi_p (wi::to_widest (POLY_INT_CST_COEFF (t
, i
))))
6304 return (TREE_CODE (t
) == INTEGER_CST
6305 && wi::fits_uhwi_p (wi::to_widest (t
)));
6308 /* T is an INTEGER_CST whose numerical value (extended according to
6309 TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. Return that
6313 tree_to_shwi (const_tree t
)
6315 gcc_assert (tree_fits_shwi_p (t
));
6316 return TREE_INT_CST_LOW (t
);
6319 /* T is an INTEGER_CST whose numerical value (extended according to
6320 TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. Return that
6323 unsigned HOST_WIDE_INT
6324 tree_to_uhwi (const_tree t
)
6326 gcc_assert (tree_fits_uhwi_p (t
));
6327 return TREE_INT_CST_LOW (t
);
6330 /* Return the most significant (sign) bit of T. */
6333 tree_int_cst_sign_bit (const_tree t
)
6335 unsigned bitno
= TYPE_PRECISION (TREE_TYPE (t
)) - 1;
6337 return wi::extract_uhwi (wi::to_wide (t
), bitno
, 1);
6340 /* Return an indication of the sign of the integer constant T.
6341 The return value is -1 if T < 0, 0 if T == 0, and 1 if T > 0.
6342 Note that -1 will never be returned if T's type is unsigned. */
6345 tree_int_cst_sgn (const_tree t
)
6347 if (wi::to_wide (t
) == 0)
6349 else if (TYPE_UNSIGNED (TREE_TYPE (t
)))
6351 else if (wi::neg_p (wi::to_wide (t
)))
6357 /* Return the minimum number of bits needed to represent VALUE in a
6358 signed or unsigned type, UNSIGNEDP says which. */
6361 tree_int_cst_min_precision (tree value
, signop sgn
)
6363 /* If the value is negative, compute its negative minus 1. The latter
6364 adjustment is because the absolute value of the largest negative value
6365 is one larger than the largest positive value. This is equivalent to
6366 a bit-wise negation, so use that operation instead. */
6368 if (tree_int_cst_sgn (value
) < 0)
6369 value
= fold_build1 (BIT_NOT_EXPR
, TREE_TYPE (value
), value
);
6371 /* Return the number of bits needed, taking into account the fact
6372 that we need one more bit for a signed than unsigned type.
6373 If value is 0 or -1, the minimum precision is 1 no matter
6374 whether unsignedp is true or false. */
6376 if (integer_zerop (value
))
6379 return tree_floor_log2 (value
) + 1 + (sgn
== SIGNED
? 1 : 0) ;
6382 /* Return truthvalue of whether T1 is the same tree structure as T2.
6383 Return 1 if they are the same.
6384 Return 0 if they are understandably different.
6385 Return -1 if either contains tree structure not understood by
6389 simple_cst_equal (const_tree t1
, const_tree t2
)
6391 enum tree_code code1
, code2
;
6397 if (t1
== 0 || t2
== 0)
6400 /* For location wrappers to be the same, they must be at the same
6401 source location (and wrap the same thing). */
6402 if (location_wrapper_p (t1
) && location_wrapper_p (t2
))
6404 if (EXPR_LOCATION (t1
) != EXPR_LOCATION (t2
))
6406 return simple_cst_equal (TREE_OPERAND (t1
, 0), TREE_OPERAND (t2
, 0));
6409 code1
= TREE_CODE (t1
);
6410 code2
= TREE_CODE (t2
);
6412 if (CONVERT_EXPR_CODE_P (code1
) || code1
== NON_LVALUE_EXPR
)
6414 if (CONVERT_EXPR_CODE_P (code2
)
6415 || code2
== NON_LVALUE_EXPR
)
6416 return simple_cst_equal (TREE_OPERAND (t1
, 0), TREE_OPERAND (t2
, 0));
6418 return simple_cst_equal (TREE_OPERAND (t1
, 0), t2
);
6421 else if (CONVERT_EXPR_CODE_P (code2
)
6422 || code2
== NON_LVALUE_EXPR
)
6423 return simple_cst_equal (t1
, TREE_OPERAND (t2
, 0));
6431 return wi::to_widest (t1
) == wi::to_widest (t2
);
6434 return real_identical (&TREE_REAL_CST (t1
), &TREE_REAL_CST (t2
));
6437 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1
), TREE_FIXED_CST (t2
));
6440 return (TREE_STRING_LENGTH (t1
) == TREE_STRING_LENGTH (t2
)
6441 && ! memcmp (TREE_STRING_POINTER (t1
), TREE_STRING_POINTER (t2
),
6442 TREE_STRING_LENGTH (t1
)));
6446 unsigned HOST_WIDE_INT idx
;
6447 vec
<constructor_elt
, va_gc
> *v1
= CONSTRUCTOR_ELTS (t1
);
6448 vec
<constructor_elt
, va_gc
> *v2
= CONSTRUCTOR_ELTS (t2
);
6450 if (vec_safe_length (v1
) != vec_safe_length (v2
))
6453 for (idx
= 0; idx
< vec_safe_length (v1
); ++idx
)
6454 /* ??? Should we handle also fields here? */
6455 if (!simple_cst_equal ((*v1
)[idx
].value
, (*v2
)[idx
].value
))
6461 return simple_cst_equal (TREE_OPERAND (t1
, 0), TREE_OPERAND (t2
, 0));
6464 cmp
= simple_cst_equal (CALL_EXPR_FN (t1
), CALL_EXPR_FN (t2
));
6467 if (call_expr_nargs (t1
) != call_expr_nargs (t2
))
6470 const_tree arg1
, arg2
;
6471 const_call_expr_arg_iterator iter1
, iter2
;
6472 for (arg1
= first_const_call_expr_arg (t1
, &iter1
),
6473 arg2
= first_const_call_expr_arg (t2
, &iter2
);
6475 arg1
= next_const_call_expr_arg (&iter1
),
6476 arg2
= next_const_call_expr_arg (&iter2
))
6478 cmp
= simple_cst_equal (arg1
, arg2
);
6482 return arg1
== arg2
;
6486 /* Special case: if either target is an unallocated VAR_DECL,
6487 it means that it's going to be unified with whatever the
6488 TARGET_EXPR is really supposed to initialize, so treat it
6489 as being equivalent to anything. */
6490 if ((TREE_CODE (TREE_OPERAND (t1
, 0)) == VAR_DECL
6491 && DECL_NAME (TREE_OPERAND (t1
, 0)) == NULL_TREE
6492 && !DECL_RTL_SET_P (TREE_OPERAND (t1
, 0)))
6493 || (TREE_CODE (TREE_OPERAND (t2
, 0)) == VAR_DECL
6494 && DECL_NAME (TREE_OPERAND (t2
, 0)) == NULL_TREE
6495 && !DECL_RTL_SET_P (TREE_OPERAND (t2
, 0))))
6498 cmp
= simple_cst_equal (TREE_OPERAND (t1
, 0), TREE_OPERAND (t2
, 0));
6503 return simple_cst_equal (TREE_OPERAND (t1
, 1), TREE_OPERAND (t2
, 1));
6505 case WITH_CLEANUP_EXPR
:
6506 cmp
= simple_cst_equal (TREE_OPERAND (t1
, 0), TREE_OPERAND (t2
, 0));
6510 return simple_cst_equal (TREE_OPERAND (t1
, 1), TREE_OPERAND (t1
, 1));
6513 if (TREE_OPERAND (t1
, 1) == TREE_OPERAND (t2
, 1))
6514 return simple_cst_equal (TREE_OPERAND (t1
, 0), TREE_OPERAND (t2
, 0));
6525 if (POLY_INT_CST_P (t1
))
6526 /* A false return means maybe_ne rather than known_ne. */
6527 return known_eq (poly_widest_int::from (poly_int_cst_value (t1
),
6528 TYPE_SIGN (TREE_TYPE (t1
))),
6529 poly_widest_int::from (poly_int_cst_value (t2
),
6530 TYPE_SIGN (TREE_TYPE (t2
))));
6534 /* This general rule works for most tree codes. All exceptions should be
6535 handled above. If this is a language-specific tree code, we can't
6536 trust what might be in the operand, so say we don't know
6538 if ((int) code1
>= (int) LAST_AND_UNUSED_TREE_CODE
)
6541 switch (TREE_CODE_CLASS (code1
))
6545 case tcc_comparison
:
6546 case tcc_expression
:
6550 for (i
= 0; i
< TREE_CODE_LENGTH (code1
); i
++)
6552 cmp
= simple_cst_equal (TREE_OPERAND (t1
, i
), TREE_OPERAND (t2
, i
));
6564 /* Compare the value of T, an INTEGER_CST, with U, an unsigned integer value.
6565 Return -1, 0, or 1 if the value of T is less than, equal to, or greater
6566 than U, respectively. */
6569 compare_tree_int (const_tree t
, unsigned HOST_WIDE_INT u
)
6571 if (tree_int_cst_sgn (t
) < 0)
6573 else if (!tree_fits_uhwi_p (t
))
6575 else if (TREE_INT_CST_LOW (t
) == u
)
6577 else if (TREE_INT_CST_LOW (t
) < u
)
6583 /* Return true if SIZE represents a constant size that is in bounds of
6584 what the middle-end and the backend accepts (covering not more than
6585 half of the address-space).
6586 When PERR is non-null, set *PERR on failure to the description of
6587 why SIZE is not valid. */
6590 valid_constant_size_p (const_tree size
, cst_size_error
*perr
/* = NULL */)
6592 if (POLY_INT_CST_P (size
))
6594 if (TREE_OVERFLOW (size
))
6596 for (unsigned int i
= 0; i
< NUM_POLY_INT_COEFFS
; ++i
)
6597 if (!valid_constant_size_p (POLY_INT_CST_COEFF (size
, i
)))
6602 cst_size_error error
;
6606 if (TREE_CODE (size
) != INTEGER_CST
)
6608 *perr
= cst_size_not_constant
;
6612 if (TREE_OVERFLOW_P (size
))
6614 *perr
= cst_size_overflow
;
6618 if (tree_int_cst_sgn (size
) < 0)
6620 *perr
= cst_size_negative
;
6623 if (!tree_fits_uhwi_p (size
)
6624 || (wi::to_widest (TYPE_MAX_VALUE (sizetype
))
6625 < wi::to_widest (size
) * 2))
6627 *perr
= cst_size_too_big
;
6634 /* Return the precision of the type, or for a complex or vector type the
6635 precision of the type of its elements. */
6638 element_precision (const_tree type
)
6641 type
= TREE_TYPE (type
);
6642 enum tree_code code
= TREE_CODE (type
);
6643 if (code
== COMPLEX_TYPE
|| code
== VECTOR_TYPE
)
6644 type
= TREE_TYPE (type
);
6646 return TYPE_PRECISION (type
);
6649 /* Return true if CODE represents an associative tree code. Otherwise
6652 associative_tree_code (enum tree_code code
)
6671 /* Return true if CODE represents a commutative tree code. Otherwise
6674 commutative_tree_code (enum tree_code code
)
6680 case MULT_HIGHPART_EXPR
:
6688 case UNORDERED_EXPR
:
6692 case TRUTH_AND_EXPR
:
6693 case TRUTH_XOR_EXPR
:
6695 case WIDEN_MULT_EXPR
:
6696 case VEC_WIDEN_MULT_HI_EXPR
:
6697 case VEC_WIDEN_MULT_LO_EXPR
:
6698 case VEC_WIDEN_MULT_EVEN_EXPR
:
6699 case VEC_WIDEN_MULT_ODD_EXPR
:
6708 /* Return true if CODE represents a ternary tree code for which the
6709 first two operands are commutative. Otherwise return false. */
6711 commutative_ternary_tree_code (enum tree_code code
)
6715 case WIDEN_MULT_PLUS_EXPR
:
6716 case WIDEN_MULT_MINUS_EXPR
:
6726 /* Returns true if CODE can overflow. */
6729 operation_can_overflow (enum tree_code code
)
6737 /* Can overflow in various ways. */
6739 case TRUNC_DIV_EXPR
:
6740 case EXACT_DIV_EXPR
:
6741 case FLOOR_DIV_EXPR
:
6743 /* For INT_MIN / -1. */
6750 /* These operators cannot overflow. */
6755 /* Returns true if CODE operating on operands of type TYPE doesn't overflow, or
6756 ftrapv doesn't generate trapping insns for CODE. */
6759 operation_no_trapping_overflow (tree type
, enum tree_code code
)
6761 gcc_checking_assert (ANY_INTEGRAL_TYPE_P (type
));
6763 /* We don't generate instructions that trap on overflow for complex or vector
6765 if (!INTEGRAL_TYPE_P (type
))
6768 if (!TYPE_OVERFLOW_TRAPS (type
))
6778 /* These operators can overflow, and -ftrapv generates trapping code for
6781 case TRUNC_DIV_EXPR
:
6782 case EXACT_DIV_EXPR
:
6783 case FLOOR_DIV_EXPR
:
6786 /* These operators can overflow, but -ftrapv does not generate trapping
6790 /* These operators cannot overflow. */
6795 /* Constructors for pointer, array and function types.
6796 (RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are
6797 constructed by language-dependent code, not here.) */
6799 /* Construct, lay out and return the type of pointers to TO_TYPE with
6800 mode MODE. If MODE is VOIDmode, a pointer mode for the address
6801 space of TO_TYPE will be picked. If CAN_ALIAS_ALL is TRUE,
6802 indicate this type can reference all of memory. If such a type has
6803 already been constructed, reuse it. */
6806 build_pointer_type_for_mode (tree to_type
, machine_mode mode
,
6810 bool could_alias
= can_alias_all
;
6812 if (to_type
== error_mark_node
)
6813 return error_mark_node
;
6815 if (mode
== VOIDmode
)
6817 addr_space_t as
= TYPE_ADDR_SPACE (to_type
);
6818 mode
= targetm
.addr_space
.pointer_mode (as
);
6821 /* If the pointed-to type has the may_alias attribute set, force
6822 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
6823 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type
)))
6824 can_alias_all
= true;
6826 /* In some cases, languages will have things that aren't a POINTER_TYPE
6827 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_POINTER_TO.
6828 In that case, return that type without regard to the rest of our
6831 ??? This is a kludge, but consistent with the way this function has
6832 always operated and there doesn't seem to be a good way to avoid this
6834 if (TYPE_POINTER_TO (to_type
) != 0
6835 && TREE_CODE (TYPE_POINTER_TO (to_type
)) != POINTER_TYPE
)
6836 return TYPE_POINTER_TO (to_type
);
6838 /* First, if we already have a type for pointers to TO_TYPE and it's
6839 the proper mode, use it. */
6840 for (t
= TYPE_POINTER_TO (to_type
); t
; t
= TYPE_NEXT_PTR_TO (t
))
6841 if (TYPE_MODE (t
) == mode
&& TYPE_REF_CAN_ALIAS_ALL (t
) == can_alias_all
)
6844 t
= make_node (POINTER_TYPE
);
6846 TREE_TYPE (t
) = to_type
;
6847 SET_TYPE_MODE (t
, mode
);
6848 TYPE_REF_CAN_ALIAS_ALL (t
) = can_alias_all
;
6849 TYPE_NEXT_PTR_TO (t
) = TYPE_POINTER_TO (to_type
);
6850 TYPE_POINTER_TO (to_type
) = t
;
6852 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */
6853 if (TYPE_STRUCTURAL_EQUALITY_P (to_type
) || in_lto_p
)
6854 SET_TYPE_STRUCTURAL_EQUALITY (t
);
6855 else if (TYPE_CANONICAL (to_type
) != to_type
|| could_alias
)
6857 = build_pointer_type_for_mode (TYPE_CANONICAL (to_type
),
6860 /* Lay out the type. This function has many callers that are concerned
6861 with expression-construction, and this simplifies them all. */
6867 /* By default build pointers in ptr_mode. */
6870 build_pointer_type (tree to_type
)
6872 return build_pointer_type_for_mode (to_type
, VOIDmode
, false);
6875 /* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE. */
6878 build_reference_type_for_mode (tree to_type
, machine_mode mode
,
6882 bool could_alias
= can_alias_all
;
6884 if (to_type
== error_mark_node
)
6885 return error_mark_node
;
6887 if (mode
== VOIDmode
)
6889 addr_space_t as
= TYPE_ADDR_SPACE (to_type
);
6890 mode
= targetm
.addr_space
.pointer_mode (as
);
6893 /* If the pointed-to type has the may_alias attribute set, force
6894 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
6895 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type
)))
6896 can_alias_all
= true;
6898 /* In some cases, languages will have things that aren't a REFERENCE_TYPE
6899 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_REFERENCE_TO.
6900 In that case, return that type without regard to the rest of our
6903 ??? This is a kludge, but consistent with the way this function has
6904 always operated and there doesn't seem to be a good way to avoid this
6906 if (TYPE_REFERENCE_TO (to_type
) != 0
6907 && TREE_CODE (TYPE_REFERENCE_TO (to_type
)) != REFERENCE_TYPE
)
6908 return TYPE_REFERENCE_TO (to_type
);
6910 /* First, if we already have a type for pointers to TO_TYPE and it's
6911 the proper mode, use it. */
6912 for (t
= TYPE_REFERENCE_TO (to_type
); t
; t
= TYPE_NEXT_REF_TO (t
))
6913 if (TYPE_MODE (t
) == mode
&& TYPE_REF_CAN_ALIAS_ALL (t
) == can_alias_all
)
6916 t
= make_node (REFERENCE_TYPE
);
6918 TREE_TYPE (t
) = to_type
;
6919 SET_TYPE_MODE (t
, mode
);
6920 TYPE_REF_CAN_ALIAS_ALL (t
) = can_alias_all
;
6921 TYPE_NEXT_REF_TO (t
) = TYPE_REFERENCE_TO (to_type
);
6922 TYPE_REFERENCE_TO (to_type
) = t
;
6924 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */
6925 if (TYPE_STRUCTURAL_EQUALITY_P (to_type
) || in_lto_p
)
6926 SET_TYPE_STRUCTURAL_EQUALITY (t
);
6927 else if (TYPE_CANONICAL (to_type
) != to_type
|| could_alias
)
6929 = build_reference_type_for_mode (TYPE_CANONICAL (to_type
),
6938 /* Build the node for the type of references-to-TO_TYPE by default
6942 build_reference_type (tree to_type
)
6944 return build_reference_type_for_mode (to_type
, VOIDmode
, false);
6947 #define MAX_INT_CACHED_PREC \
6948 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
6949 static GTY(()) tree nonstandard_integer_type_cache
[2 * MAX_INT_CACHED_PREC
+ 2];
6951 /* Builds a signed or unsigned integer type of precision PRECISION.
6952 Used for C bitfields whose precision does not match that of
6953 built-in target types. */
6955 build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision
,
6961 unsignedp
= MAX_INT_CACHED_PREC
+ 1;
6963 if (precision
<= MAX_INT_CACHED_PREC
)
6965 itype
= nonstandard_integer_type_cache
[precision
+ unsignedp
];
6970 itype
= make_node (INTEGER_TYPE
);
6971 TYPE_PRECISION (itype
) = precision
;
6974 fixup_unsigned_type (itype
);
6976 fixup_signed_type (itype
);
6978 inchash::hash hstate
;
6979 inchash::add_expr (TYPE_MAX_VALUE (itype
), hstate
);
6980 ret
= type_hash_canon (hstate
.end (), itype
);
6981 if (precision
<= MAX_INT_CACHED_PREC
)
6982 nonstandard_integer_type_cache
[precision
+ unsignedp
] = ret
;
6987 #define MAX_BOOL_CACHED_PREC \
6988 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
6989 static GTY(()) tree nonstandard_boolean_type_cache
[MAX_BOOL_CACHED_PREC
+ 1];
6991 /* Builds a boolean type of precision PRECISION.
6992 Used for boolean vectors to choose proper vector element size. */
6994 build_nonstandard_boolean_type (unsigned HOST_WIDE_INT precision
)
6998 if (precision
<= MAX_BOOL_CACHED_PREC
)
7000 type
= nonstandard_boolean_type_cache
[precision
];
7005 type
= make_node (BOOLEAN_TYPE
);
7006 TYPE_PRECISION (type
) = precision
;
7007 fixup_signed_type (type
);
7009 if (precision
<= MAX_INT_CACHED_PREC
)
7010 nonstandard_boolean_type_cache
[precision
] = type
;
7015 /* Create a range of some discrete type TYPE (an INTEGER_TYPE, ENUMERAL_TYPE
7016 or BOOLEAN_TYPE) with low bound LOWVAL and high bound HIGHVAL. If SHARED
7017 is true, reuse such a type that has already been constructed. */
7020 build_range_type_1 (tree type
, tree lowval
, tree highval
, bool shared
)
7022 tree itype
= make_node (INTEGER_TYPE
);
7024 TREE_TYPE (itype
) = type
;
7026 TYPE_MIN_VALUE (itype
) = fold_convert (type
, lowval
);
7027 TYPE_MAX_VALUE (itype
) = highval
? fold_convert (type
, highval
) : NULL
;
7029 TYPE_PRECISION (itype
) = TYPE_PRECISION (type
);
7030 SET_TYPE_MODE (itype
, TYPE_MODE (type
));
7031 TYPE_SIZE (itype
) = TYPE_SIZE (type
);
7032 TYPE_SIZE_UNIT (itype
) = TYPE_SIZE_UNIT (type
);
7033 SET_TYPE_ALIGN (itype
, TYPE_ALIGN (type
));
7034 TYPE_USER_ALIGN (itype
) = TYPE_USER_ALIGN (type
);
7035 SET_TYPE_WARN_IF_NOT_ALIGN (itype
, TYPE_WARN_IF_NOT_ALIGN (type
));
7040 if ((TYPE_MIN_VALUE (itype
)
7041 && TREE_CODE (TYPE_MIN_VALUE (itype
)) != INTEGER_CST
)
7042 || (TYPE_MAX_VALUE (itype
)
7043 && TREE_CODE (TYPE_MAX_VALUE (itype
)) != INTEGER_CST
))
7045 /* Since we cannot reliably merge this type, we need to compare it using
7046 structural equality checks. */
7047 SET_TYPE_STRUCTURAL_EQUALITY (itype
);
7051 hashval_t hash
= type_hash_canon_hash (itype
);
7052 itype
= type_hash_canon (hash
, itype
);
7057 /* Wrapper around build_range_type_1 with SHARED set to true. */
7060 build_range_type (tree type
, tree lowval
, tree highval
)
7062 return build_range_type_1 (type
, lowval
, highval
, true);
7065 /* Wrapper around build_range_type_1 with SHARED set to false. */
7068 build_nonshared_range_type (tree type
, tree lowval
, tree highval
)
7070 return build_range_type_1 (type
, lowval
, highval
, false);
7073 /* Create a type of integers to be the TYPE_DOMAIN of an ARRAY_TYPE.
7074 MAXVAL should be the maximum value in the domain
7075 (one less than the length of the array).
7077 The maximum value that MAXVAL can have is INT_MAX for a HOST_WIDE_INT.
7078 We don't enforce this limit, that is up to caller (e.g. language front end).
7079 The limit exists because the result is a signed type and we don't handle
7080 sizes that use more than one HOST_WIDE_INT. */
7083 build_index_type (tree maxval
)
7085 return build_range_type (sizetype
, size_zero_node
, maxval
);
7088 /* Return true if the debug information for TYPE, a subtype, should be emitted
7089 as a subrange type. If so, set LOWVAL to the low bound and HIGHVAL to the
7090 high bound, respectively. Sometimes doing so unnecessarily obfuscates the
7091 debug info and doesn't reflect the source code. */
7094 subrange_type_for_debug_p (const_tree type
, tree
*lowval
, tree
*highval
)
7096 tree base_type
= TREE_TYPE (type
), low
, high
;
7098 /* Subrange types have a base type which is an integral type. */
7099 if (!INTEGRAL_TYPE_P (base_type
))
7102 /* Get the real bounds of the subtype. */
7103 if (lang_hooks
.types
.get_subrange_bounds
)
7104 lang_hooks
.types
.get_subrange_bounds (type
, &low
, &high
);
7107 low
= TYPE_MIN_VALUE (type
);
7108 high
= TYPE_MAX_VALUE (type
);
7111 /* If the type and its base type have the same representation and the same
7112 name, then the type is not a subrange but a copy of the base type. */
7113 if ((TREE_CODE (base_type
) == INTEGER_TYPE
7114 || TREE_CODE (base_type
) == BOOLEAN_TYPE
)
7115 && int_size_in_bytes (type
) == int_size_in_bytes (base_type
)
7116 && tree_int_cst_equal (low
, TYPE_MIN_VALUE (base_type
))
7117 && tree_int_cst_equal (high
, TYPE_MAX_VALUE (base_type
))
7118 && TYPE_IDENTIFIER (type
) == TYPE_IDENTIFIER (base_type
))
7128 /* Construct, lay out and return the type of arrays of elements with ELT_TYPE
7129 and number of elements specified by the range of values of INDEX_TYPE.
7130 If TYPELESS_STORAGE is true, TYPE_TYPELESS_STORAGE flag is set on the type.
7131 If SHARED is true, reuse such a type that has already been constructed.
7132 If SET_CANONICAL is true, compute TYPE_CANONICAL from the element type. */
7135 build_array_type_1 (tree elt_type
, tree index_type
, bool typeless_storage
,
7136 bool shared
, bool set_canonical
)
7140 if (TREE_CODE (elt_type
) == FUNCTION_TYPE
)
7142 error ("arrays of functions are not meaningful");
7143 elt_type
= integer_type_node
;
7146 t
= make_node (ARRAY_TYPE
);
7147 TREE_TYPE (t
) = elt_type
;
7148 TYPE_DOMAIN (t
) = index_type
;
7149 TYPE_ADDR_SPACE (t
) = TYPE_ADDR_SPACE (elt_type
);
7150 TYPE_TYPELESS_STORAGE (t
) = typeless_storage
;
7155 hashval_t hash
= type_hash_canon_hash (t
);
7156 t
= type_hash_canon (hash
, t
);
7159 if (TYPE_CANONICAL (t
) == t
&& set_canonical
)
7161 if (TYPE_STRUCTURAL_EQUALITY_P (elt_type
)
7162 || (index_type
&& TYPE_STRUCTURAL_EQUALITY_P (index_type
))
7164 SET_TYPE_STRUCTURAL_EQUALITY (t
);
7165 else if (TYPE_CANONICAL (elt_type
) != elt_type
7166 || (index_type
&& TYPE_CANONICAL (index_type
) != index_type
))
7168 = build_array_type_1 (TYPE_CANONICAL (elt_type
),
7170 ? TYPE_CANONICAL (index_type
) : NULL_TREE
,
7171 typeless_storage
, shared
, set_canonical
);
7177 /* Wrapper around build_array_type_1 with SHARED set to true. */
7180 build_array_type (tree elt_type
, tree index_type
, bool typeless_storage
)
7183 build_array_type_1 (elt_type
, index_type
, typeless_storage
, true, true);
7186 /* Wrapper around build_array_type_1 with SHARED set to false. */
7189 build_nonshared_array_type (tree elt_type
, tree index_type
)
7191 return build_array_type_1 (elt_type
, index_type
, false, false, true);
7194 /* Return a representation of ELT_TYPE[NELTS], using indices of type
7198 build_array_type_nelts (tree elt_type
, poly_uint64 nelts
)
7200 return build_array_type (elt_type
, build_index_type (size_int (nelts
- 1)));
7203 /* Recursively examines the array elements of TYPE, until a non-array
7204 element type is found. */
7207 strip_array_types (tree type
)
7209 while (TREE_CODE (type
) == ARRAY_TYPE
)
7210 type
= TREE_TYPE (type
);
7215 /* Computes the canonical argument types from the argument type list
7218 Upon return, *ANY_STRUCTURAL_P will be true iff either it was true
7219 on entry to this function, or if any of the ARGTYPES are
7222 Upon return, *ANY_NONCANONICAL_P will be true iff either it was
7223 true on entry to this function, or if any of the ARGTYPES are
7226 Returns a canonical argument list, which may be ARGTYPES when the
7227 canonical argument list is unneeded (i.e., *ANY_STRUCTURAL_P is
7228 true) or would not differ from ARGTYPES. */
7231 maybe_canonicalize_argtypes (tree argtypes
,
7232 bool *any_structural_p
,
7233 bool *any_noncanonical_p
)
7236 bool any_noncanonical_argtypes_p
= false;
7238 for (arg
= argtypes
; arg
&& !(*any_structural_p
); arg
= TREE_CHAIN (arg
))
7240 if (!TREE_VALUE (arg
) || TREE_VALUE (arg
) == error_mark_node
)
7241 /* Fail gracefully by stating that the type is structural. */
7242 *any_structural_p
= true;
7243 else if (TYPE_STRUCTURAL_EQUALITY_P (TREE_VALUE (arg
)))
7244 *any_structural_p
= true;
7245 else if (TYPE_CANONICAL (TREE_VALUE (arg
)) != TREE_VALUE (arg
)
7246 || TREE_PURPOSE (arg
))
7247 /* If the argument has a default argument, we consider it
7248 non-canonical even though the type itself is canonical.
7249 That way, different variants of function and method types
7250 with default arguments will all point to the variant with
7251 no defaults as their canonical type. */
7252 any_noncanonical_argtypes_p
= true;
7255 if (*any_structural_p
)
7258 if (any_noncanonical_argtypes_p
)
7260 /* Build the canonical list of argument types. */
7261 tree canon_argtypes
= NULL_TREE
;
7262 bool is_void
= false;
7264 for (arg
= argtypes
; arg
; arg
= TREE_CHAIN (arg
))
7266 if (arg
== void_list_node
)
7269 canon_argtypes
= tree_cons (NULL_TREE
,
7270 TYPE_CANONICAL (TREE_VALUE (arg
)),
7274 canon_argtypes
= nreverse (canon_argtypes
);
7276 canon_argtypes
= chainon (canon_argtypes
, void_list_node
);
7278 /* There is a non-canonical type. */
7279 *any_noncanonical_p
= true;
7280 return canon_argtypes
;
7283 /* The canonical argument types are the same as ARGTYPES. */
7287 /* Construct, lay out and return
7288 the type of functions returning type VALUE_TYPE
7289 given arguments of types ARG_TYPES.
7290 ARG_TYPES is a chain of TREE_LIST nodes whose TREE_VALUEs
7291 are data type nodes for the arguments of the function.
7292 If such a type has already been constructed, reuse it. */
7295 build_function_type (tree value_type
, tree arg_types
)
7298 inchash::hash hstate
;
7299 bool any_structural_p
, any_noncanonical_p
;
7300 tree canon_argtypes
;
7302 gcc_assert (arg_types
!= error_mark_node
);
7304 if (TREE_CODE (value_type
) == FUNCTION_TYPE
)
7306 error ("function return type cannot be function");
7307 value_type
= integer_type_node
;
7310 /* Make a node of the sort we want. */
7311 t
= make_node (FUNCTION_TYPE
);
7312 TREE_TYPE (t
) = value_type
;
7313 TYPE_ARG_TYPES (t
) = arg_types
;
7315 /* If we already have such a type, use the old one. */
7316 hashval_t hash
= type_hash_canon_hash (t
);
7317 t
= type_hash_canon (hash
, t
);
7319 /* Set up the canonical type. */
7320 any_structural_p
= TYPE_STRUCTURAL_EQUALITY_P (value_type
);
7321 any_noncanonical_p
= TYPE_CANONICAL (value_type
) != value_type
;
7322 canon_argtypes
= maybe_canonicalize_argtypes (arg_types
,
7324 &any_noncanonical_p
);
7325 if (any_structural_p
)
7326 SET_TYPE_STRUCTURAL_EQUALITY (t
);
7327 else if (any_noncanonical_p
)
7328 TYPE_CANONICAL (t
) = build_function_type (TYPE_CANONICAL (value_type
),
7331 if (!COMPLETE_TYPE_P (t
))
7336 /* Build a function type. The RETURN_TYPE is the type returned by the
7337 function. If VAARGS is set, no void_type_node is appended to the
7338 list. ARGP must be always be terminated be a NULL_TREE. */
7341 build_function_type_list_1 (bool vaargs
, tree return_type
, va_list argp
)
7345 t
= va_arg (argp
, tree
);
7346 for (args
= NULL_TREE
; t
!= NULL_TREE
; t
= va_arg (argp
, tree
))
7347 args
= tree_cons (NULL_TREE
, t
, args
);
7352 if (args
!= NULL_TREE
)
7353 args
= nreverse (args
);
7354 gcc_assert (last
!= void_list_node
);
7356 else if (args
== NULL_TREE
)
7357 args
= void_list_node
;
7361 args
= nreverse (args
);
7362 TREE_CHAIN (last
) = void_list_node
;
7364 args
= build_function_type (return_type
, args
);
7369 /* Build a function type. The RETURN_TYPE is the type returned by the
7370 function. If additional arguments are provided, they are
7371 additional argument types. The list of argument types must always
7372 be terminated by NULL_TREE. */
7375 build_function_type_list (tree return_type
, ...)
7380 va_start (p
, return_type
);
7381 args
= build_function_type_list_1 (false, return_type
, p
);
7386 /* Build a variable argument function type. The RETURN_TYPE is the
7387 type returned by the function. If additional arguments are provided,
7388 they are additional argument types. The list of argument types must
7389 always be terminated by NULL_TREE. */
7392 build_varargs_function_type_list (tree return_type
, ...)
7397 va_start (p
, return_type
);
7398 args
= build_function_type_list_1 (true, return_type
, p
);
7404 /* Build a function type. RETURN_TYPE is the type returned by the
7405 function; VAARGS indicates whether the function takes varargs. The
7406 function takes N named arguments, the types of which are provided in
7410 build_function_type_array_1 (bool vaargs
, tree return_type
, int n
,
7414 tree t
= vaargs
? NULL_TREE
: void_list_node
;
7416 for (i
= n
- 1; i
>= 0; i
--)
7417 t
= tree_cons (NULL_TREE
, arg_types
[i
], t
);
7419 return build_function_type (return_type
, t
);
7422 /* Build a function type. RETURN_TYPE is the type returned by the
7423 function. The function takes N named arguments, the types of which
7424 are provided in ARG_TYPES. */
7427 build_function_type_array (tree return_type
, int n
, tree
*arg_types
)
7429 return build_function_type_array_1 (false, return_type
, n
, arg_types
);
7432 /* Build a variable argument function type. RETURN_TYPE is the type
7433 returned by the function. The function takes N named arguments, the
7434 types of which are provided in ARG_TYPES. */
7437 build_varargs_function_type_array (tree return_type
, int n
, tree
*arg_types
)
7439 return build_function_type_array_1 (true, return_type
, n
, arg_types
);
7442 /* Build a METHOD_TYPE for a member of BASETYPE. The RETTYPE (a TYPE)
7443 and ARGTYPES (a TREE_LIST) are the return type and arguments types
7444 for the method. An implicit additional parameter (of type
7445 pointer-to-BASETYPE) is added to the ARGTYPES. */
7448 build_method_type_directly (tree basetype
,
7454 bool any_structural_p
, any_noncanonical_p
;
7455 tree canon_argtypes
;
7457 /* Make a node of the sort we want. */
7458 t
= make_node (METHOD_TYPE
);
7460 TYPE_METHOD_BASETYPE (t
) = TYPE_MAIN_VARIANT (basetype
);
7461 TREE_TYPE (t
) = rettype
;
7462 ptype
= build_pointer_type (basetype
);
7464 /* The actual arglist for this function includes a "hidden" argument
7465 which is "this". Put it into the list of argument types. */
7466 argtypes
= tree_cons (NULL_TREE
, ptype
, argtypes
);
7467 TYPE_ARG_TYPES (t
) = argtypes
;
7469 /* If we already have such a type, use the old one. */
7470 hashval_t hash
= type_hash_canon_hash (t
);
7471 t
= type_hash_canon (hash
, t
);
7473 /* Set up the canonical type. */
7475 = (TYPE_STRUCTURAL_EQUALITY_P (basetype
)
7476 || TYPE_STRUCTURAL_EQUALITY_P (rettype
));
7478 = (TYPE_CANONICAL (basetype
) != basetype
7479 || TYPE_CANONICAL (rettype
) != rettype
);
7480 canon_argtypes
= maybe_canonicalize_argtypes (TREE_CHAIN (argtypes
),
7482 &any_noncanonical_p
);
7483 if (any_structural_p
)
7484 SET_TYPE_STRUCTURAL_EQUALITY (t
);
7485 else if (any_noncanonical_p
)
7487 = build_method_type_directly (TYPE_CANONICAL (basetype
),
7488 TYPE_CANONICAL (rettype
),
7490 if (!COMPLETE_TYPE_P (t
))
7496 /* Construct, lay out and return the type of methods belonging to class
7497 BASETYPE and whose arguments and values are described by TYPE.
7498 If that type exists already, reuse it.
7499 TYPE must be a FUNCTION_TYPE node. */
7502 build_method_type (tree basetype
, tree type
)
7504 gcc_assert (TREE_CODE (type
) == FUNCTION_TYPE
);
7506 return build_method_type_directly (basetype
,
7508 TYPE_ARG_TYPES (type
));
7511 /* Construct, lay out and return the type of offsets to a value
7512 of type TYPE, within an object of type BASETYPE.
7513 If a suitable offset type exists already, reuse it. */
7516 build_offset_type (tree basetype
, tree type
)
7520 /* Make a node of the sort we want. */
7521 t
= make_node (OFFSET_TYPE
);
7523 TYPE_OFFSET_BASETYPE (t
) = TYPE_MAIN_VARIANT (basetype
);
7524 TREE_TYPE (t
) = type
;
7526 /* If we already have such a type, use the old one. */
7527 hashval_t hash
= type_hash_canon_hash (t
);
7528 t
= type_hash_canon (hash
, t
);
7530 if (!COMPLETE_TYPE_P (t
))
7533 if (TYPE_CANONICAL (t
) == t
)
7535 if (TYPE_STRUCTURAL_EQUALITY_P (basetype
)
7536 || TYPE_STRUCTURAL_EQUALITY_P (type
))
7537 SET_TYPE_STRUCTURAL_EQUALITY (t
);
7538 else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype
)) != basetype
7539 || TYPE_CANONICAL (type
) != type
)
7541 = build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype
)),
7542 TYPE_CANONICAL (type
));
7548 /* Create a complex type whose components are COMPONENT_TYPE.
7550 If NAMED is true, the type is given a TYPE_NAME. We do not always
7551 do so because this creates a DECL node and thus make the DECL_UIDs
7552 dependent on the type canonicalization hashtable, which is GC-ed,
7553 so the DECL_UIDs would not be stable wrt garbage collection. */
7556 build_complex_type (tree component_type
, bool named
)
7558 gcc_assert (INTEGRAL_TYPE_P (component_type
)
7559 || SCALAR_FLOAT_TYPE_P (component_type
)
7560 || FIXED_POINT_TYPE_P (component_type
));
7562 /* Make a node of the sort we want. */
7563 tree probe
= make_node (COMPLEX_TYPE
);
7565 TREE_TYPE (probe
) = TYPE_MAIN_VARIANT (component_type
);
7567 /* If we already have such a type, use the old one. */
7568 hashval_t hash
= type_hash_canon_hash (probe
);
7569 tree t
= type_hash_canon (hash
, probe
);
7573 /* We created a new type. The hash insertion will have laid
7574 out the type. We need to check the canonicalization and
7575 maybe set the name. */
7576 gcc_checking_assert (COMPLETE_TYPE_P (t
)
7578 && TYPE_CANONICAL (t
) == t
);
7580 if (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (t
)))
7581 SET_TYPE_STRUCTURAL_EQUALITY (t
);
7582 else if (TYPE_CANONICAL (TREE_TYPE (t
)) != TREE_TYPE (t
))
7584 = build_complex_type (TYPE_CANONICAL (TREE_TYPE (t
)), named
);
7586 /* We need to create a name, since complex is a fundamental type. */
7589 const char *name
= NULL
;
7591 if (TREE_TYPE (t
) == char_type_node
)
7592 name
= "complex char";
7593 else if (TREE_TYPE (t
) == signed_char_type_node
)
7594 name
= "complex signed char";
7595 else if (TREE_TYPE (t
) == unsigned_char_type_node
)
7596 name
= "complex unsigned char";
7597 else if (TREE_TYPE (t
) == short_integer_type_node
)
7598 name
= "complex short int";
7599 else if (TREE_TYPE (t
) == short_unsigned_type_node
)
7600 name
= "complex short unsigned int";
7601 else if (TREE_TYPE (t
) == integer_type_node
)
7602 name
= "complex int";
7603 else if (TREE_TYPE (t
) == unsigned_type_node
)
7604 name
= "complex unsigned int";
7605 else if (TREE_TYPE (t
) == long_integer_type_node
)
7606 name
= "complex long int";
7607 else if (TREE_TYPE (t
) == long_unsigned_type_node
)
7608 name
= "complex long unsigned int";
7609 else if (TREE_TYPE (t
) == long_long_integer_type_node
)
7610 name
= "complex long long int";
7611 else if (TREE_TYPE (t
) == long_long_unsigned_type_node
)
7612 name
= "complex long long unsigned int";
7615 TYPE_NAME (t
) = build_decl (UNKNOWN_LOCATION
, TYPE_DECL
,
7616 get_identifier (name
), t
);
7620 return build_qualified_type (t
, TYPE_QUALS (component_type
));
7623 /* If TYPE is a real or complex floating-point type and the target
7624 does not directly support arithmetic on TYPE then return the wider
7625 type to be used for arithmetic on TYPE. Otherwise, return
7629 excess_precision_type (tree type
)
7631 /* The target can give two different responses to the question of
7632 which excess precision mode it would like depending on whether we
7633 are in -fexcess-precision=standard or -fexcess-precision=fast. */
7635 enum excess_precision_type requested_type
7636 = (flag_excess_precision
== EXCESS_PRECISION_FAST
7637 ? EXCESS_PRECISION_TYPE_FAST
7638 : EXCESS_PRECISION_TYPE_STANDARD
);
7640 enum flt_eval_method target_flt_eval_method
7641 = targetm
.c
.excess_precision (requested_type
);
7643 /* The target should not ask for unpredictable float evaluation (though
7644 it might advertise that implicitly the evaluation is unpredictable,
7645 but we don't care about that here, it will have been reported
7646 elsewhere). If it does ask for unpredictable evaluation, we have
7647 nothing to do here. */
7648 gcc_assert (target_flt_eval_method
!= FLT_EVAL_METHOD_UNPREDICTABLE
);
7650 /* Nothing to do. The target has asked for all types we know about
7651 to be computed with their native precision and range. */
7652 if (target_flt_eval_method
== FLT_EVAL_METHOD_PROMOTE_TO_FLOAT16
)
7655 /* The target will promote this type in a target-dependent way, so excess
7656 precision ought to leave it alone. */
7657 if (targetm
.promoted_type (type
) != NULL_TREE
)
7660 machine_mode float16_type_mode
= (float16_type_node
7661 ? TYPE_MODE (float16_type_node
)
7663 machine_mode float_type_mode
= TYPE_MODE (float_type_node
);
7664 machine_mode double_type_mode
= TYPE_MODE (double_type_node
);
7666 switch (TREE_CODE (type
))
7670 machine_mode type_mode
= TYPE_MODE (type
);
7671 switch (target_flt_eval_method
)
7673 case FLT_EVAL_METHOD_PROMOTE_TO_FLOAT
:
7674 if (type_mode
== float16_type_mode
)
7675 return float_type_node
;
7677 case FLT_EVAL_METHOD_PROMOTE_TO_DOUBLE
:
7678 if (type_mode
== float16_type_mode
7679 || type_mode
== float_type_mode
)
7680 return double_type_node
;
7682 case FLT_EVAL_METHOD_PROMOTE_TO_LONG_DOUBLE
:
7683 if (type_mode
== float16_type_mode
7684 || type_mode
== float_type_mode
7685 || type_mode
== double_type_mode
)
7686 return long_double_type_node
;
7695 if (TREE_CODE (TREE_TYPE (type
)) != REAL_TYPE
)
7697 machine_mode type_mode
= TYPE_MODE (TREE_TYPE (type
));
7698 switch (target_flt_eval_method
)
7700 case FLT_EVAL_METHOD_PROMOTE_TO_FLOAT
:
7701 if (type_mode
== float16_type_mode
)
7702 return complex_float_type_node
;
7704 case FLT_EVAL_METHOD_PROMOTE_TO_DOUBLE
:
7705 if (type_mode
== float16_type_mode
7706 || type_mode
== float_type_mode
)
7707 return complex_double_type_node
;
7709 case FLT_EVAL_METHOD_PROMOTE_TO_LONG_DOUBLE
:
7710 if (type_mode
== float16_type_mode
7711 || type_mode
== float_type_mode
7712 || type_mode
== double_type_mode
)
7713 return complex_long_double_type_node
;
7727 /* Return OP, stripped of any conversions to wider types as much as is safe.
7728 Converting the value back to OP's type makes a value equivalent to OP.
7730 If FOR_TYPE is nonzero, we return a value which, if converted to
7731 type FOR_TYPE, would be equivalent to converting OP to type FOR_TYPE.
7733 OP must have integer, real or enumeral type. Pointers are not allowed!
7735 There are some cases where the obvious value we could return
7736 would regenerate to OP if converted to OP's type,
7737 but would not extend like OP to wider types.
7738 If FOR_TYPE indicates such extension is contemplated, we eschew such values.
7739 For example, if OP is (unsigned short)(signed char)-1,
7740 we avoid returning (signed char)-1 if FOR_TYPE is int,
7741 even though extending that to an unsigned short would regenerate OP,
7742 since the result of extending (signed char)-1 to (int)
7743 is different from (int) OP. */
7746 get_unwidened (tree op
, tree for_type
)
7748 /* Set UNS initially if converting OP to FOR_TYPE is a zero-extension. */
7749 tree type
= TREE_TYPE (op
);
7751 = TYPE_PRECISION (for_type
!= 0 ? for_type
: type
);
7753 = (for_type
!= 0 && for_type
!= type
7754 && final_prec
> TYPE_PRECISION (type
)
7755 && TYPE_UNSIGNED (type
));
7758 while (CONVERT_EXPR_P (op
))
7762 /* TYPE_PRECISION on vector types has different meaning
7763 (TYPE_VECTOR_SUBPARTS) and casts from vectors are view conversions,
7764 so avoid them here. */
7765 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op
, 0))) == VECTOR_TYPE
)
7768 bitschange
= TYPE_PRECISION (TREE_TYPE (op
))
7769 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op
, 0)));
7771 /* Truncations are many-one so cannot be removed.
7772 Unless we are later going to truncate down even farther. */
7774 && final_prec
> TYPE_PRECISION (TREE_TYPE (op
)))
7777 /* See what's inside this conversion. If we decide to strip it,
7779 op
= TREE_OPERAND (op
, 0);
7781 /* If we have not stripped any zero-extensions (uns is 0),
7782 we can strip any kind of extension.
7783 If we have previously stripped a zero-extension,
7784 only zero-extensions can safely be stripped.
7785 Any extension can be stripped if the bits it would produce
7786 are all going to be discarded later by truncating to FOR_TYPE. */
7790 if (! uns
|| final_prec
<= TYPE_PRECISION (TREE_TYPE (op
)))
7792 /* TYPE_UNSIGNED says whether this is a zero-extension.
7793 Let's avoid computing it if it does not affect WIN
7794 and if UNS will not be needed again. */
7796 || CONVERT_EXPR_P (op
))
7797 && TYPE_UNSIGNED (TREE_TYPE (op
)))
7805 /* If we finally reach a constant see if it fits in sth smaller and
7806 in that case convert it. */
7807 if (TREE_CODE (win
) == INTEGER_CST
)
7809 tree wtype
= TREE_TYPE (win
);
7810 unsigned prec
= wi::min_precision (wi::to_wide (win
), TYPE_SIGN (wtype
));
7812 prec
= MAX (prec
, final_prec
);
7813 if (prec
< TYPE_PRECISION (wtype
))
7815 tree t
= lang_hooks
.types
.type_for_size (prec
, TYPE_UNSIGNED (wtype
));
7816 if (t
&& TYPE_PRECISION (t
) < TYPE_PRECISION (wtype
))
7817 win
= fold_convert (t
, win
);
7824 /* Return OP or a simpler expression for a narrower value
7825 which can be sign-extended or zero-extended to give back OP.
7826 Store in *UNSIGNEDP_PTR either 1 if the value should be zero-extended
7827 or 0 if the value should be sign-extended. */
7830 get_narrower (tree op
, int *unsignedp_ptr
)
7835 bool integral_p
= INTEGRAL_TYPE_P (TREE_TYPE (op
));
7837 if (TREE_CODE (op
) == COMPOUND_EXPR
)
7840 op
= TREE_OPERAND (op
, 1);
7841 while (TREE_CODE (op
) == COMPOUND_EXPR
);
7842 tree ret
= get_narrower (op
, unsignedp_ptr
);
7845 auto_vec
<tree
, 16> v
;
7847 for (op
= win
; TREE_CODE (op
) == COMPOUND_EXPR
;
7848 op
= TREE_OPERAND (op
, 1))
7850 FOR_EACH_VEC_ELT_REVERSE (v
, i
, op
)
7851 ret
= build2_loc (EXPR_LOCATION (op
), COMPOUND_EXPR
,
7852 TREE_TYPE (ret
), TREE_OPERAND (op
, 0),
7856 while (TREE_CODE (op
) == NOP_EXPR
)
7859 = (TYPE_PRECISION (TREE_TYPE (op
))
7860 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op
, 0))));
7862 /* Truncations are many-one so cannot be removed. */
7866 /* See what's inside this conversion. If we decide to strip it,
7871 op
= TREE_OPERAND (op
, 0);
7872 /* An extension: the outermost one can be stripped,
7873 but remember whether it is zero or sign extension. */
7875 uns
= TYPE_UNSIGNED (TREE_TYPE (op
));
7876 /* Otherwise, if a sign extension has been stripped,
7877 only sign extensions can now be stripped;
7878 if a zero extension has been stripped, only zero-extensions. */
7879 else if (uns
!= TYPE_UNSIGNED (TREE_TYPE (op
)))
7883 else /* bitschange == 0 */
7885 /* A change in nominal type can always be stripped, but we must
7886 preserve the unsignedness. */
7888 uns
= TYPE_UNSIGNED (TREE_TYPE (op
));
7890 op
= TREE_OPERAND (op
, 0);
7891 /* Keep trying to narrow, but don't assign op to win if it
7892 would turn an integral type into something else. */
7893 if (INTEGRAL_TYPE_P (TREE_TYPE (op
)) != integral_p
)
7900 if (TREE_CODE (op
) == COMPONENT_REF
7901 /* Since type_for_size always gives an integer type. */
7902 && TREE_CODE (TREE_TYPE (op
)) != REAL_TYPE
7903 && TREE_CODE (TREE_TYPE (op
)) != FIXED_POINT_TYPE
7904 /* Ensure field is laid out already. */
7905 && DECL_SIZE (TREE_OPERAND (op
, 1)) != 0
7906 && tree_fits_uhwi_p (DECL_SIZE (TREE_OPERAND (op
, 1))))
7908 unsigned HOST_WIDE_INT innerprec
7909 = tree_to_uhwi (DECL_SIZE (TREE_OPERAND (op
, 1)));
7910 int unsignedp
= (DECL_UNSIGNED (TREE_OPERAND (op
, 1))
7911 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op
, 1))));
7912 tree type
= lang_hooks
.types
.type_for_size (innerprec
, unsignedp
);
7914 /* We can get this structure field in a narrower type that fits it,
7915 but the resulting extension to its nominal type (a fullword type)
7916 must satisfy the same conditions as for other extensions.
7918 Do this only for fields that are aligned (not bit-fields),
7919 because when bit-field insns will be used there is no
7920 advantage in doing this. */
7922 if (innerprec
< TYPE_PRECISION (TREE_TYPE (op
))
7923 && ! DECL_BIT_FIELD (TREE_OPERAND (op
, 1))
7924 && (first
|| uns
== DECL_UNSIGNED (TREE_OPERAND (op
, 1)))
7928 uns
= DECL_UNSIGNED (TREE_OPERAND (op
, 1));
7929 win
= fold_convert (type
, op
);
7933 *unsignedp_ptr
= uns
;
7937 /* Return true if integer constant C has a value that is permissible
7938 for TYPE, an integral type. */
7941 int_fits_type_p (const_tree c
, const_tree type
)
7943 tree type_low_bound
, type_high_bound
;
7944 bool ok_for_low_bound
, ok_for_high_bound
;
7945 signop sgn_c
= TYPE_SIGN (TREE_TYPE (c
));
7947 /* Non-standard boolean types can have arbitrary precision but various
7948 transformations assume that they can only take values 0 and +/-1. */
7949 if (TREE_CODE (type
) == BOOLEAN_TYPE
)
7950 return wi::fits_to_boolean_p (wi::to_wide (c
), type
);
7953 type_low_bound
= TYPE_MIN_VALUE (type
);
7954 type_high_bound
= TYPE_MAX_VALUE (type
);
7956 /* If at least one bound of the type is a constant integer, we can check
7957 ourselves and maybe make a decision. If no such decision is possible, but
7958 this type is a subtype, try checking against that. Otherwise, use
7959 fits_to_tree_p, which checks against the precision.
7961 Compute the status for each possibly constant bound, and return if we see
7962 one does not match. Use ok_for_xxx_bound for this purpose, assigning -1
7963 for "unknown if constant fits", 0 for "constant known *not* to fit" and 1
7964 for "constant known to fit". */
7966 /* Check if c >= type_low_bound. */
7967 if (type_low_bound
&& TREE_CODE (type_low_bound
) == INTEGER_CST
)
7969 if (tree_int_cst_lt (c
, type_low_bound
))
7971 ok_for_low_bound
= true;
7974 ok_for_low_bound
= false;
7976 /* Check if c <= type_high_bound. */
7977 if (type_high_bound
&& TREE_CODE (type_high_bound
) == INTEGER_CST
)
7979 if (tree_int_cst_lt (type_high_bound
, c
))
7981 ok_for_high_bound
= true;
7984 ok_for_high_bound
= false;
7986 /* If the constant fits both bounds, the result is known. */
7987 if (ok_for_low_bound
&& ok_for_high_bound
)
7990 /* Perform some generic filtering which may allow making a decision
7991 even if the bounds are not constant. First, negative integers
7992 never fit in unsigned types, */
7993 if (TYPE_UNSIGNED (type
) && sgn_c
== SIGNED
&& wi::neg_p (wi::to_wide (c
)))
7996 /* Second, narrower types always fit in wider ones. */
7997 if (TYPE_PRECISION (type
) > TYPE_PRECISION (TREE_TYPE (c
)))
8000 /* Third, unsigned integers with top bit set never fit signed types. */
8001 if (!TYPE_UNSIGNED (type
) && sgn_c
== UNSIGNED
)
8003 int prec
= GET_MODE_PRECISION (SCALAR_INT_TYPE_MODE (TREE_TYPE (c
))) - 1;
8004 if (prec
< TYPE_PRECISION (TREE_TYPE (c
)))
8006 /* When a tree_cst is converted to a wide-int, the precision
8007 is taken from the type. However, if the precision of the
8008 mode underneath the type is smaller than that, it is
8009 possible that the value will not fit. The test below
8010 fails if any bit is set between the sign bit of the
8011 underlying mode and the top bit of the type. */
8012 if (wi::zext (wi::to_wide (c
), prec
- 1) != wi::to_wide (c
))
8015 else if (wi::neg_p (wi::to_wide (c
)))
8019 /* If we haven't been able to decide at this point, there nothing more we
8020 can check ourselves here. Look at the base type if we have one and it
8021 has the same precision. */
8022 if (TREE_CODE (type
) == INTEGER_TYPE
8023 && TREE_TYPE (type
) != 0
8024 && TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (type
)))
8026 type
= TREE_TYPE (type
);
8030 /* Or to fits_to_tree_p, if nothing else. */
8031 return wi::fits_to_tree_p (wi::to_wide (c
), type
);
8034 /* Stores bounds of an integer TYPE in MIN and MAX. If TYPE has non-constant
8035 bounds or is a POINTER_TYPE, the maximum and/or minimum values that can be
8036 represented (assuming two's-complement arithmetic) within the bit
8037 precision of the type are returned instead. */
8040 get_type_static_bounds (const_tree type
, mpz_t min
, mpz_t max
)
8042 if (!POINTER_TYPE_P (type
) && TYPE_MIN_VALUE (type
)
8043 && TREE_CODE (TYPE_MIN_VALUE (type
)) == INTEGER_CST
)
8044 wi::to_mpz (wi::to_wide (TYPE_MIN_VALUE (type
)), min
, TYPE_SIGN (type
));
8047 if (TYPE_UNSIGNED (type
))
8048 mpz_set_ui (min
, 0);
8051 wide_int mn
= wi::min_value (TYPE_PRECISION (type
), SIGNED
);
8052 wi::to_mpz (mn
, min
, SIGNED
);
8056 if (!POINTER_TYPE_P (type
) && TYPE_MAX_VALUE (type
)
8057 && TREE_CODE (TYPE_MAX_VALUE (type
)) == INTEGER_CST
)
8058 wi::to_mpz (wi::to_wide (TYPE_MAX_VALUE (type
)), max
, TYPE_SIGN (type
));
8061 wide_int mn
= wi::max_value (TYPE_PRECISION (type
), TYPE_SIGN (type
));
8062 wi::to_mpz (mn
, max
, TYPE_SIGN (type
));
8066 /* Return true if VAR is an automatic variable. */
8069 auto_var_p (const_tree var
)
8071 return ((((VAR_P (var
) && ! DECL_EXTERNAL (var
))
8072 || TREE_CODE (var
) == PARM_DECL
)
8073 && ! TREE_STATIC (var
))
8074 || TREE_CODE (var
) == RESULT_DECL
);
8077 /* Return true if VAR is an automatic variable defined in function FN. */
8080 auto_var_in_fn_p (const_tree var
, const_tree fn
)
8082 return (DECL_P (var
) && DECL_CONTEXT (var
) == fn
8083 && (auto_var_p (var
)
8084 || TREE_CODE (var
) == LABEL_DECL
));
8087 /* Subprogram of following function. Called by walk_tree.
8089 Return *TP if it is an automatic variable or parameter of the
8090 function passed in as DATA. */
8093 find_var_from_fn (tree
*tp
, int *walk_subtrees
, void *data
)
8095 tree fn
= (tree
) data
;
8100 else if (DECL_P (*tp
)
8101 && auto_var_in_fn_p (*tp
, fn
))
8107 /* Returns true if T is, contains, or refers to a type with variable
8108 size. For METHOD_TYPEs and FUNCTION_TYPEs we exclude the
8109 arguments, but not the return type. If FN is nonzero, only return
8110 true if a modifier of the type or position of FN is a variable or
8111 parameter inside FN.
8113 This concept is more general than that of C99 'variably modified types':
8114 in C99, a struct type is never variably modified because a VLA may not
8115 appear as a structure member. However, in GNU C code like:
8117 struct S { int i[f()]; };
8119 is valid, and other languages may define similar constructs. */
8122 variably_modified_type_p (tree type
, tree fn
)
8126 /* Test if T is either variable (if FN is zero) or an expression containing
8127 a variable in FN. If TYPE isn't gimplified, return true also if
8128 gimplify_one_sizepos would gimplify the expression into a local
8130 #define RETURN_TRUE_IF_VAR(T) \
8131 do { tree _t = (T); \
8132 if (_t != NULL_TREE \
8133 && _t != error_mark_node \
8134 && !CONSTANT_CLASS_P (_t) \
8135 && TREE_CODE (_t) != PLACEHOLDER_EXPR \
8137 || (!TYPE_SIZES_GIMPLIFIED (type) \
8138 && (TREE_CODE (_t) != VAR_DECL \
8139 && !CONTAINS_PLACEHOLDER_P (_t))) \
8140 || walk_tree (&_t, find_var_from_fn, fn, NULL))) \
8141 return true; } while (0)
8143 if (type
== error_mark_node
)
8146 /* If TYPE itself has variable size, it is variably modified. */
8147 RETURN_TRUE_IF_VAR (TYPE_SIZE (type
));
8148 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type
));
8150 switch (TREE_CODE (type
))
8153 case REFERENCE_TYPE
:
8155 /* Ada can have pointer types refering to themselves indirectly. */
8156 if (TREE_VISITED (type
))
8158 TREE_VISITED (type
) = true;
8159 if (variably_modified_type_p (TREE_TYPE (type
), fn
))
8161 TREE_VISITED (type
) = false;
8164 TREE_VISITED (type
) = false;
8169 /* If TYPE is a function type, it is variably modified if the
8170 return type is variably modified. */
8171 if (variably_modified_type_p (TREE_TYPE (type
), fn
))
8177 case FIXED_POINT_TYPE
:
8180 /* Scalar types are variably modified if their end points
8182 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type
));
8183 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type
));
8188 case QUAL_UNION_TYPE
:
8189 /* We can't see if any of the fields are variably-modified by the
8190 definition we normally use, since that would produce infinite
8191 recursion via pointers. */
8192 /* This is variably modified if some field's type is. */
8193 for (t
= TYPE_FIELDS (type
); t
; t
= DECL_CHAIN (t
))
8194 if (TREE_CODE (t
) == FIELD_DECL
)
8196 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t
));
8197 RETURN_TRUE_IF_VAR (DECL_SIZE (t
));
8198 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t
));
8200 /* If the type is a qualified union, then the DECL_QUALIFIER
8201 of fields can also be an expression containing a variable. */
8202 if (TREE_CODE (type
) == QUAL_UNION_TYPE
)
8203 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t
));
8205 /* If the field is a qualified union, then it's only a container
8206 for what's inside so we look into it. That's necessary in LTO
8207 mode because the sizes of the field tested above have been set
8208 to PLACEHOLDER_EXPRs by free_lang_data. */
8209 if (TREE_CODE (TREE_TYPE (t
)) == QUAL_UNION_TYPE
8210 && variably_modified_type_p (TREE_TYPE (t
), fn
))
8216 /* Do not call ourselves to avoid infinite recursion. This is
8217 variably modified if the element type is. */
8218 RETURN_TRUE_IF_VAR (TYPE_SIZE (TREE_TYPE (type
)));
8219 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (TREE_TYPE (type
)));
8226 /* The current language may have other cases to check, but in general,
8227 all other types are not variably modified. */
8228 return lang_hooks
.tree_inlining
.var_mod_type_p (type
, fn
);
8230 #undef RETURN_TRUE_IF_VAR
8233 /* Given a DECL or TYPE, return the scope in which it was declared, or
8234 NULL_TREE if there is no containing scope. */
8237 get_containing_scope (const_tree t
)
8239 return (TYPE_P (t
) ? TYPE_CONTEXT (t
) : DECL_CONTEXT (t
));
8242 /* Returns the ultimate TRANSLATION_UNIT_DECL context of DECL or NULL. */
8245 get_ultimate_context (const_tree decl
)
8247 while (decl
&& TREE_CODE (decl
) != TRANSLATION_UNIT_DECL
)
8249 if (TREE_CODE (decl
) == BLOCK
)
8250 decl
= BLOCK_SUPERCONTEXT (decl
);
8252 decl
= get_containing_scope (decl
);
8257 /* Return the innermost context enclosing DECL that is
8258 a FUNCTION_DECL, or zero if none. */
8261 decl_function_context (const_tree decl
)
8265 if (TREE_CODE (decl
) == ERROR_MARK
)
8268 /* C++ virtual functions use DECL_CONTEXT for the class of the vtable
8269 where we look up the function at runtime. Such functions always take
8270 a first argument of type 'pointer to real context'.
8272 C++ should really be fixed to use DECL_CONTEXT for the real context,
8273 and use something else for the "virtual context". */
8274 else if (TREE_CODE (decl
) == FUNCTION_DECL
&& DECL_VIRTUAL_P (decl
))
8277 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl
)))));
8279 context
= DECL_CONTEXT (decl
);
8281 while (context
&& TREE_CODE (context
) != FUNCTION_DECL
)
8283 if (TREE_CODE (context
) == BLOCK
)
8284 context
= BLOCK_SUPERCONTEXT (context
);
8286 context
= get_containing_scope (context
);
8292 /* Return the innermost context enclosing DECL that is
8293 a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE, or zero if none.
8294 TYPE_DECLs and FUNCTION_DECLs are transparent to this function. */
8297 decl_type_context (const_tree decl
)
8299 tree context
= DECL_CONTEXT (decl
);
8302 switch (TREE_CODE (context
))
8304 case NAMESPACE_DECL
:
8305 case TRANSLATION_UNIT_DECL
:
8310 case QUAL_UNION_TYPE
:
8315 context
= DECL_CONTEXT (context
);
8319 context
= BLOCK_SUPERCONTEXT (context
);
8329 /* CALL is a CALL_EXPR. Return the declaration for the function
8330 called, or NULL_TREE if the called function cannot be
8334 get_callee_fndecl (const_tree call
)
8338 if (call
== error_mark_node
)
8339 return error_mark_node
;
8341 /* It's invalid to call this function with anything but a
8343 gcc_assert (TREE_CODE (call
) == CALL_EXPR
);
8345 /* The first operand to the CALL is the address of the function
8347 addr
= CALL_EXPR_FN (call
);
8349 /* If there is no function, return early. */
8350 if (addr
== NULL_TREE
)
8355 /* If this is a readonly function pointer, extract its initial value. */
8356 if (DECL_P (addr
) && TREE_CODE (addr
) != FUNCTION_DECL
8357 && TREE_READONLY (addr
) && ! TREE_THIS_VOLATILE (addr
)
8358 && DECL_INITIAL (addr
))
8359 addr
= DECL_INITIAL (addr
);
8361 /* If the address is just `&f' for some function `f', then we know
8362 that `f' is being called. */
8363 if (TREE_CODE (addr
) == ADDR_EXPR
8364 && TREE_CODE (TREE_OPERAND (addr
, 0)) == FUNCTION_DECL
)
8365 return TREE_OPERAND (addr
, 0);
8367 /* We couldn't figure out what was being called. */
8371 /* If CALL_EXPR CALL calls a normal built-in function or an internal function,
8372 return the associated function code, otherwise return CFN_LAST. */
8375 get_call_combined_fn (const_tree call
)
8377 /* It's invalid to call this function with anything but a CALL_EXPR. */
8378 gcc_assert (TREE_CODE (call
) == CALL_EXPR
);
8380 if (!CALL_EXPR_FN (call
))
8381 return as_combined_fn (CALL_EXPR_IFN (call
));
8383 tree fndecl
= get_callee_fndecl (call
);
8384 if (fndecl
&& fndecl_built_in_p (fndecl
, BUILT_IN_NORMAL
))
8385 return as_combined_fn (DECL_FUNCTION_CODE (fndecl
));
8390 /* Comparator of indices based on tree_node_counts. */
8393 tree_nodes_cmp (const void *p1
, const void *p2
)
8395 const unsigned *n1
= (const unsigned *)p1
;
8396 const unsigned *n2
= (const unsigned *)p2
;
8398 return tree_node_counts
[*n1
] - tree_node_counts
[*n2
];
8401 /* Comparator of indices based on tree_code_counts. */
8404 tree_codes_cmp (const void *p1
, const void *p2
)
8406 const unsigned *n1
= (const unsigned *)p1
;
8407 const unsigned *n2
= (const unsigned *)p2
;
8409 return tree_code_counts
[*n1
] - tree_code_counts
[*n2
];
8412 #define TREE_MEM_USAGE_SPACES 40
8414 /* Print debugging information about tree nodes generated during the compile,
8415 and any language-specific information. */
8418 dump_tree_statistics (void)
8420 if (GATHER_STATISTICS
)
8422 uint64_t total_nodes
, total_bytes
;
8423 fprintf (stderr
, "\nKind Nodes Bytes\n");
8424 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES
);
8425 total_nodes
= total_bytes
= 0;
8428 auto_vec
<unsigned> indices (all_kinds
);
8429 for (unsigned i
= 0; i
< all_kinds
; i
++)
8430 indices
.quick_push (i
);
8431 indices
.qsort (tree_nodes_cmp
);
8433 for (unsigned i
= 0; i
< (int) all_kinds
; i
++)
8435 unsigned j
= indices
[i
];
8436 fprintf (stderr
, "%-20s %6" PRIu64
"%c %9" PRIu64
"%c\n",
8437 tree_node_kind_names
[j
], SIZE_AMOUNT (tree_node_counts
[j
]),
8438 SIZE_AMOUNT (tree_node_sizes
[j
]));
8439 total_nodes
+= tree_node_counts
[j
];
8440 total_bytes
+= tree_node_sizes
[j
];
8442 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES
);
8443 fprintf (stderr
, "%-20s %6" PRIu64
"%c %9" PRIu64
"%c\n", "Total",
8444 SIZE_AMOUNT (total_nodes
), SIZE_AMOUNT (total_bytes
));
8445 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES
);
8449 fprintf (stderr
, "Code Nodes\n");
8450 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES
);
8452 auto_vec
<unsigned> indices (MAX_TREE_CODES
);
8453 for (unsigned i
= 0; i
< MAX_TREE_CODES
; i
++)
8454 indices
.quick_push (i
);
8455 indices
.qsort (tree_codes_cmp
);
8457 for (unsigned i
= 0; i
< MAX_TREE_CODES
; i
++)
8459 unsigned j
= indices
[i
];
8460 fprintf (stderr
, "%-32s %6" PRIu64
"%c\n",
8461 get_tree_code_name ((enum tree_code
) j
),
8462 SIZE_AMOUNT (tree_code_counts
[j
]));
8464 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES
);
8465 fprintf (stderr
, "\n");
8466 ssanames_print_statistics ();
8467 fprintf (stderr
, "\n");
8468 phinodes_print_statistics ();
8469 fprintf (stderr
, "\n");
8473 fprintf (stderr
, "(No per-node statistics)\n");
8475 print_type_hash_statistics ();
8476 print_debug_expr_statistics ();
8477 print_value_expr_statistics ();
8478 lang_hooks
.print_statistics ();
8481 #define FILE_FUNCTION_FORMAT "_GLOBAL__%s_%s"
8483 /* Generate a crc32 of the low BYTES bytes of VALUE. */
8486 crc32_unsigned_n (unsigned chksum
, unsigned value
, unsigned bytes
)
8488 /* This relies on the raw feedback's top 4 bits being zero. */
8489 #define FEEDBACK(X) ((X) * 0x04c11db7)
8490 #define SYNDROME(X) (FEEDBACK ((X) & 1) ^ FEEDBACK ((X) & 2) \
8491 ^ FEEDBACK ((X) & 4) ^ FEEDBACK ((X) & 8))
8492 static const unsigned syndromes
[16] =
8494 SYNDROME(0x0), SYNDROME(0x1), SYNDROME(0x2), SYNDROME(0x3),
8495 SYNDROME(0x4), SYNDROME(0x5), SYNDROME(0x6), SYNDROME(0x7),
8496 SYNDROME(0x8), SYNDROME(0x9), SYNDROME(0xa), SYNDROME(0xb),
8497 SYNDROME(0xc), SYNDROME(0xd), SYNDROME(0xe), SYNDROME(0xf),
8502 value
<<= (32 - bytes
* 8);
8503 for (unsigned ix
= bytes
* 2; ix
--; value
<<= 4)
8505 unsigned feedback
= syndromes
[((value
^ chksum
) >> 28) & 0xf];
8507 chksum
= (chksum
<< 4) ^ feedback
;
8513 /* Generate a crc32 of a string. */
8516 crc32_string (unsigned chksum
, const char *string
)
8519 chksum
= crc32_byte (chksum
, *string
);
8524 /* P is a string that will be used in a symbol. Mask out any characters
8525 that are not valid in that context. */
8528 clean_symbol_name (char *p
)
8532 #ifndef NO_DOLLAR_IN_LABEL /* this for `$'; unlikely, but... -- kr */
8535 #ifndef NO_DOT_IN_LABEL /* this for `.'; unlikely, but... */
8542 static GTY(()) unsigned anon_cnt
= 0; /* Saved for PCH. */
8544 /* Create a unique anonymous identifier. The identifier is still a
8545 valid assembly label. */
8551 #if !defined (NO_DOT_IN_LABEL)
8553 #elif !defined (NO_DOLLAR_IN_LABEL)
8561 int len
= snprintf (buf
, sizeof (buf
), fmt
, anon_cnt
++);
8562 gcc_checking_assert (len
< int (sizeof (buf
)));
8564 tree id
= get_identifier_with_length (buf
, len
);
8565 IDENTIFIER_ANON_P (id
) = true;
8570 /* Generate a name for a special-purpose function.
8571 The generated name may need to be unique across the whole link.
8572 Changes to this function may also require corresponding changes to
8573 xstrdup_mask_random.
8574 TYPE is some string to identify the purpose of this function to the
8575 linker or collect2; it must start with an uppercase letter,
8577 I - for constructors
8579 N - for C++ anonymous namespaces
8580 F - for DWARF unwind frame information. */
8583 get_file_function_name (const char *type
)
8589 /* If we already have a name we know to be unique, just use that. */
8590 if (first_global_object_name
)
8591 p
= q
= ASTRDUP (first_global_object_name
);
8592 /* If the target is handling the constructors/destructors, they
8593 will be local to this file and the name is only necessary for
8595 We also assign sub_I and sub_D sufixes to constructors called from
8596 the global static constructors. These are always local. */
8597 else if (((type
[0] == 'I' || type
[0] == 'D') && targetm
.have_ctors_dtors
)
8598 || (startswith (type
, "sub_")
8599 && (type
[4] == 'I' || type
[4] == 'D')))
8601 const char *file
= main_input_filename
;
8603 file
= LOCATION_FILE (input_location
);
8604 /* Just use the file's basename, because the full pathname
8605 might be quite long. */
8606 p
= q
= ASTRDUP (lbasename (file
));
8610 /* Otherwise, the name must be unique across the entire link.
8611 We don't have anything that we know to be unique to this translation
8612 unit, so use what we do have and throw in some randomness. */
8614 const char *name
= weak_global_object_name
;
8615 const char *file
= main_input_filename
;
8620 file
= LOCATION_FILE (input_location
);
8622 len
= strlen (file
);
8623 q
= (char *) alloca (9 + 19 + len
+ 1);
8624 memcpy (q
, file
, len
+ 1);
8626 snprintf (q
+ len
, 9 + 19 + 1, "_%08X_" HOST_WIDE_INT_PRINT_HEX
,
8627 crc32_string (0, name
), get_random_seed (false));
8632 clean_symbol_name (q
);
8633 buf
= (char *) alloca (sizeof (FILE_FUNCTION_FORMAT
) + strlen (p
)
8636 /* Set up the name of the file-level functions we may need.
8637 Use a global object (which is already required to be unique over
8638 the program) rather than the file name (which imposes extra
8640 sprintf (buf
, FILE_FUNCTION_FORMAT
, type
, p
);
8642 return get_identifier (buf
);
8645 #if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007)
8647 /* Complain that the tree code of NODE does not match the expected 0
8648 terminated list of trailing codes. The trailing code list can be
8649 empty, for a more vague error message. FILE, LINE, and FUNCTION
8650 are of the caller. */
8653 tree_check_failed (const_tree node
, const char *file
,
8654 int line
, const char *function
, ...)
8658 unsigned length
= 0;
8659 enum tree_code code
;
8661 va_start (args
, function
);
8662 while ((code
= (enum tree_code
) va_arg (args
, int)))
8663 length
+= 4 + strlen (get_tree_code_name (code
));
8668 va_start (args
, function
);
8669 length
+= strlen ("expected ");
8670 buffer
= tmp
= (char *) alloca (length
);
8672 while ((code
= (enum tree_code
) va_arg (args
, int)))
8674 const char *prefix
= length
? " or " : "expected ";
8676 strcpy (tmp
+ length
, prefix
);
8677 length
+= strlen (prefix
);
8678 strcpy (tmp
+ length
, get_tree_code_name (code
));
8679 length
+= strlen (get_tree_code_name (code
));
8684 buffer
= "unexpected node";
8686 internal_error ("tree check: %s, have %s in %s, at %s:%d",
8687 buffer
, get_tree_code_name (TREE_CODE (node
)),
8688 function
, trim_filename (file
), line
);
8691 /* Complain that the tree code of NODE does match the expected 0
8692 terminated list of trailing codes. FILE, LINE, and FUNCTION are of
8696 tree_not_check_failed (const_tree node
, const char *file
,
8697 int line
, const char *function
, ...)
8701 unsigned length
= 0;
8702 enum tree_code code
;
8704 va_start (args
, function
);
8705 while ((code
= (enum tree_code
) va_arg (args
, int)))
8706 length
+= 4 + strlen (get_tree_code_name (code
));
8708 va_start (args
, function
);
8709 buffer
= (char *) alloca (length
);
8711 while ((code
= (enum tree_code
) va_arg (args
, int)))
8715 strcpy (buffer
+ length
, " or ");
8718 strcpy (buffer
+ length
, get_tree_code_name (code
));
8719 length
+= strlen (get_tree_code_name (code
));
8723 internal_error ("tree check: expected none of %s, have %s in %s, at %s:%d",
8724 buffer
, get_tree_code_name (TREE_CODE (node
)),
8725 function
, trim_filename (file
), line
);
8728 /* Similar to tree_check_failed, except that we check for a class of tree
8729 code, given in CL. */
8732 tree_class_check_failed (const_tree node
, const enum tree_code_class cl
,
8733 const char *file
, int line
, const char *function
)
8736 ("tree check: expected class %qs, have %qs (%s) in %s, at %s:%d",
8737 TREE_CODE_CLASS_STRING (cl
),
8738 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node
))),
8739 get_tree_code_name (TREE_CODE (node
)), function
, trim_filename (file
), line
);
8742 /* Similar to tree_check_failed, except that instead of specifying a
8743 dozen codes, use the knowledge that they're all sequential. */
8746 tree_range_check_failed (const_tree node
, const char *file
, int line
,
8747 const char *function
, enum tree_code c1
,
8751 unsigned length
= 0;
8754 for (c
= c1
; c
<= c2
; ++c
)
8755 length
+= 4 + strlen (get_tree_code_name ((enum tree_code
) c
));
8757 length
+= strlen ("expected ");
8758 buffer
= (char *) alloca (length
);
8761 for (c
= c1
; c
<= c2
; ++c
)
8763 const char *prefix
= length
? " or " : "expected ";
8765 strcpy (buffer
+ length
, prefix
);
8766 length
+= strlen (prefix
);
8767 strcpy (buffer
+ length
, get_tree_code_name ((enum tree_code
) c
));
8768 length
+= strlen (get_tree_code_name ((enum tree_code
) c
));
8771 internal_error ("tree check: %s, have %s in %s, at %s:%d",
8772 buffer
, get_tree_code_name (TREE_CODE (node
)),
8773 function
, trim_filename (file
), line
);
8777 /* Similar to tree_check_failed, except that we check that a tree does
8778 not have the specified code, given in CL. */
8781 tree_not_class_check_failed (const_tree node
, const enum tree_code_class cl
,
8782 const char *file
, int line
, const char *function
)
8785 ("tree check: did not expect class %qs, have %qs (%s) in %s, at %s:%d",
8786 TREE_CODE_CLASS_STRING (cl
),
8787 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node
))),
8788 get_tree_code_name (TREE_CODE (node
)), function
, trim_filename (file
), line
);
8792 /* Similar to tree_check_failed but applied to OMP_CLAUSE codes. */
8795 omp_clause_check_failed (const_tree node
, const char *file
, int line
,
8796 const char *function
, enum omp_clause_code code
)
8798 internal_error ("tree check: expected %<omp_clause %s%>, have %qs "
8800 omp_clause_code_name
[code
],
8801 get_tree_code_name (TREE_CODE (node
)),
8802 function
, trim_filename (file
), line
);
8806 /* Similar to tree_range_check_failed but applied to OMP_CLAUSE codes. */
8809 omp_clause_range_check_failed (const_tree node
, const char *file
, int line
,
8810 const char *function
, enum omp_clause_code c1
,
8811 enum omp_clause_code c2
)
8814 unsigned length
= 0;
8817 for (c
= c1
; c
<= c2
; ++c
)
8818 length
+= 4 + strlen (omp_clause_code_name
[c
]);
8820 length
+= strlen ("expected ");
8821 buffer
= (char *) alloca (length
);
8824 for (c
= c1
; c
<= c2
; ++c
)
8826 const char *prefix
= length
? " or " : "expected ";
8828 strcpy (buffer
+ length
, prefix
);
8829 length
+= strlen (prefix
);
8830 strcpy (buffer
+ length
, omp_clause_code_name
[c
]);
8831 length
+= strlen (omp_clause_code_name
[c
]);
8834 internal_error ("tree check: %s, have %s in %s, at %s:%d",
8835 buffer
, omp_clause_code_name
[TREE_CODE (node
)],
8836 function
, trim_filename (file
), line
);
8840 #undef DEFTREESTRUCT
8841 #define DEFTREESTRUCT(VAL, NAME) NAME,
8843 static const char *ts_enum_names
[] = {
8844 #include "treestruct.def"
8846 #undef DEFTREESTRUCT
8848 #define TS_ENUM_NAME(EN) (ts_enum_names[(EN)])
8850 /* Similar to tree_class_check_failed, except that we check for
8851 whether CODE contains the tree structure identified by EN. */
8854 tree_contains_struct_check_failed (const_tree node
,
8855 const enum tree_node_structure_enum en
,
8856 const char *file
, int line
,
8857 const char *function
)
8860 ("tree check: expected tree that contains %qs structure, have %qs in %s, at %s:%d",
8862 get_tree_code_name (TREE_CODE (node
)), function
, trim_filename (file
), line
);
8866 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
8867 (dynamically sized) vector. */
8870 tree_int_cst_elt_check_failed (int idx
, int len
, const char *file
, int line
,
8871 const char *function
)
8874 ("tree check: accessed elt %d of %<tree_int_cst%> with %d elts in %s, "
8876 idx
+ 1, len
, function
, trim_filename (file
), line
);
8879 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
8880 (dynamically sized) vector. */
8883 tree_vec_elt_check_failed (int idx
, int len
, const char *file
, int line
,
8884 const char *function
)
8887 ("tree check: accessed elt %d of %<tree_vec%> with %d elts in %s, at %s:%d",
8888 idx
+ 1, len
, function
, trim_filename (file
), line
);
8891 /* Similar to above, except that the check is for the bounds of the operand
8892 vector of an expression node EXP. */
8895 tree_operand_check_failed (int idx
, const_tree exp
, const char *file
,
8896 int line
, const char *function
)
8898 enum tree_code code
= TREE_CODE (exp
);
8900 ("tree check: accessed operand %d of %s with %d operands in %s, at %s:%d",
8901 idx
+ 1, get_tree_code_name (code
), TREE_OPERAND_LENGTH (exp
),
8902 function
, trim_filename (file
), line
);
8905 /* Similar to above, except that the check is for the number of
8906 operands of an OMP_CLAUSE node. */
8909 omp_clause_operand_check_failed (int idx
, const_tree t
, const char *file
,
8910 int line
, const char *function
)
8913 ("tree check: accessed operand %d of %<omp_clause %s%> with %d operands "
8914 "in %s, at %s:%d", idx
+ 1, omp_clause_code_name
[OMP_CLAUSE_CODE (t
)],
8915 omp_clause_num_ops
[OMP_CLAUSE_CODE (t
)], function
,
8916 trim_filename (file
), line
);
8918 #endif /* ENABLE_TREE_CHECKING */
8920 /* Create a new vector type node holding NUNITS units of type INNERTYPE,
8921 and mapped to the machine mode MODE. Initialize its fields and build
8922 the information necessary for debugging output. */
8925 make_vector_type (tree innertype
, poly_int64 nunits
, machine_mode mode
)
8928 tree mv_innertype
= TYPE_MAIN_VARIANT (innertype
);
8930 t
= make_node (VECTOR_TYPE
);
8931 TREE_TYPE (t
) = mv_innertype
;
8932 SET_TYPE_VECTOR_SUBPARTS (t
, nunits
);
8933 SET_TYPE_MODE (t
, mode
);
8935 if (TYPE_STRUCTURAL_EQUALITY_P (mv_innertype
) || in_lto_p
)
8936 SET_TYPE_STRUCTURAL_EQUALITY (t
);
8937 else if ((TYPE_CANONICAL (mv_innertype
) != innertype
8938 || mode
!= VOIDmode
)
8939 && !VECTOR_BOOLEAN_TYPE_P (t
))
8941 = make_vector_type (TYPE_CANONICAL (mv_innertype
), nunits
, VOIDmode
);
8945 hashval_t hash
= type_hash_canon_hash (t
);
8946 t
= type_hash_canon (hash
, t
);
8948 /* We have built a main variant, based on the main variant of the
8949 inner type. Use it to build the variant we return. */
8950 if ((TYPE_ATTRIBUTES (innertype
) || TYPE_QUALS (innertype
))
8951 && TREE_TYPE (t
) != innertype
)
8952 return build_type_attribute_qual_variant (t
,
8953 TYPE_ATTRIBUTES (innertype
),
8954 TYPE_QUALS (innertype
));
8960 make_or_reuse_type (unsigned size
, int unsignedp
)
8964 if (size
== INT_TYPE_SIZE
)
8965 return unsignedp
? unsigned_type_node
: integer_type_node
;
8966 if (size
== CHAR_TYPE_SIZE
)
8967 return unsignedp
? unsigned_char_type_node
: signed_char_type_node
;
8968 if (size
== SHORT_TYPE_SIZE
)
8969 return unsignedp
? short_unsigned_type_node
: short_integer_type_node
;
8970 if (size
== LONG_TYPE_SIZE
)
8971 return unsignedp
? long_unsigned_type_node
: long_integer_type_node
;
8972 if (size
== LONG_LONG_TYPE_SIZE
)
8973 return (unsignedp
? long_long_unsigned_type_node
8974 : long_long_integer_type_node
);
8976 for (i
= 0; i
< NUM_INT_N_ENTS
; i
++)
8977 if (size
== int_n_data
[i
].bitsize
8978 && int_n_enabled_p
[i
])
8979 return (unsignedp
? int_n_trees
[i
].unsigned_type
8980 : int_n_trees
[i
].signed_type
);
8983 return make_unsigned_type (size
);
8985 return make_signed_type (size
);
8988 /* Create or reuse a fract type by SIZE, UNSIGNEDP, and SATP. */
8991 make_or_reuse_fract_type (unsigned size
, int unsignedp
, int satp
)
8995 if (size
== SHORT_FRACT_TYPE_SIZE
)
8996 return unsignedp
? sat_unsigned_short_fract_type_node
8997 : sat_short_fract_type_node
;
8998 if (size
== FRACT_TYPE_SIZE
)
8999 return unsignedp
? sat_unsigned_fract_type_node
: sat_fract_type_node
;
9000 if (size
== LONG_FRACT_TYPE_SIZE
)
9001 return unsignedp
? sat_unsigned_long_fract_type_node
9002 : sat_long_fract_type_node
;
9003 if (size
== LONG_LONG_FRACT_TYPE_SIZE
)
9004 return unsignedp
? sat_unsigned_long_long_fract_type_node
9005 : sat_long_long_fract_type_node
;
9009 if (size
== SHORT_FRACT_TYPE_SIZE
)
9010 return unsignedp
? unsigned_short_fract_type_node
9011 : short_fract_type_node
;
9012 if (size
== FRACT_TYPE_SIZE
)
9013 return unsignedp
? unsigned_fract_type_node
: fract_type_node
;
9014 if (size
== LONG_FRACT_TYPE_SIZE
)
9015 return unsignedp
? unsigned_long_fract_type_node
9016 : long_fract_type_node
;
9017 if (size
== LONG_LONG_FRACT_TYPE_SIZE
)
9018 return unsignedp
? unsigned_long_long_fract_type_node
9019 : long_long_fract_type_node
;
9022 return make_fract_type (size
, unsignedp
, satp
);
9025 /* Create or reuse an accum type by SIZE, UNSIGNEDP, and SATP. */
9028 make_or_reuse_accum_type (unsigned size
, int unsignedp
, int satp
)
9032 if (size
== SHORT_ACCUM_TYPE_SIZE
)
9033 return unsignedp
? sat_unsigned_short_accum_type_node
9034 : sat_short_accum_type_node
;
9035 if (size
== ACCUM_TYPE_SIZE
)
9036 return unsignedp
? sat_unsigned_accum_type_node
: sat_accum_type_node
;
9037 if (size
== LONG_ACCUM_TYPE_SIZE
)
9038 return unsignedp
? sat_unsigned_long_accum_type_node
9039 : sat_long_accum_type_node
;
9040 if (size
== LONG_LONG_ACCUM_TYPE_SIZE
)
9041 return unsignedp
? sat_unsigned_long_long_accum_type_node
9042 : sat_long_long_accum_type_node
;
9046 if (size
== SHORT_ACCUM_TYPE_SIZE
)
9047 return unsignedp
? unsigned_short_accum_type_node
9048 : short_accum_type_node
;
9049 if (size
== ACCUM_TYPE_SIZE
)
9050 return unsignedp
? unsigned_accum_type_node
: accum_type_node
;
9051 if (size
== LONG_ACCUM_TYPE_SIZE
)
9052 return unsignedp
? unsigned_long_accum_type_node
9053 : long_accum_type_node
;
9054 if (size
== LONG_LONG_ACCUM_TYPE_SIZE
)
9055 return unsignedp
? unsigned_long_long_accum_type_node
9056 : long_long_accum_type_node
;
9059 return make_accum_type (size
, unsignedp
, satp
);
9063 /* Create an atomic variant node for TYPE. This routine is called
9064 during initialization of data types to create the 5 basic atomic
9065 types. The generic build_variant_type function requires these to
9066 already be set up in order to function properly, so cannot be
9067 called from there. If ALIGN is non-zero, then ensure alignment is
9068 overridden to this value. */
9071 build_atomic_base (tree type
, unsigned int align
)
9075 /* Make sure its not already registered. */
9076 if ((t
= get_qualified_type (type
, TYPE_QUAL_ATOMIC
)))
9079 t
= build_variant_type_copy (type
);
9080 set_type_quals (t
, TYPE_QUAL_ATOMIC
);
9083 SET_TYPE_ALIGN (t
, align
);
9088 /* Information about the _FloatN and _FloatNx types. This must be in
9089 the same order as the corresponding TI_* enum values. */
9090 const floatn_type_info floatn_nx_types
[NUM_FLOATN_NX_TYPES
] =
9102 /* Create nodes for all integer types (and error_mark_node) using the sizes
9103 of C datatypes. SIGNED_CHAR specifies whether char is signed. */
9106 build_common_tree_nodes (bool signed_char
)
9110 error_mark_node
= make_node (ERROR_MARK
);
9111 TREE_TYPE (error_mark_node
) = error_mark_node
;
9113 initialize_sizetypes ();
9115 /* Define both `signed char' and `unsigned char'. */
9116 signed_char_type_node
= make_signed_type (CHAR_TYPE_SIZE
);
9117 TYPE_STRING_FLAG (signed_char_type_node
) = 1;
9118 unsigned_char_type_node
= make_unsigned_type (CHAR_TYPE_SIZE
);
9119 TYPE_STRING_FLAG (unsigned_char_type_node
) = 1;
9121 /* Define `char', which is like either `signed char' or `unsigned char'
9122 but not the same as either. */
9125 ? make_signed_type (CHAR_TYPE_SIZE
)
9126 : make_unsigned_type (CHAR_TYPE_SIZE
));
9127 TYPE_STRING_FLAG (char_type_node
) = 1;
9129 short_integer_type_node
= make_signed_type (SHORT_TYPE_SIZE
);
9130 short_unsigned_type_node
= make_unsigned_type (SHORT_TYPE_SIZE
);
9131 integer_type_node
= make_signed_type (INT_TYPE_SIZE
);
9132 unsigned_type_node
= make_unsigned_type (INT_TYPE_SIZE
);
9133 long_integer_type_node
= make_signed_type (LONG_TYPE_SIZE
);
9134 long_unsigned_type_node
= make_unsigned_type (LONG_TYPE_SIZE
);
9135 long_long_integer_type_node
= make_signed_type (LONG_LONG_TYPE_SIZE
);
9136 long_long_unsigned_type_node
= make_unsigned_type (LONG_LONG_TYPE_SIZE
);
9138 for (i
= 0; i
< NUM_INT_N_ENTS
; i
++)
9140 int_n_trees
[i
].signed_type
= make_signed_type (int_n_data
[i
].bitsize
);
9141 int_n_trees
[i
].unsigned_type
= make_unsigned_type (int_n_data
[i
].bitsize
);
9143 if (int_n_enabled_p
[i
])
9145 integer_types
[itk_intN_0
+ i
* 2] = int_n_trees
[i
].signed_type
;
9146 integer_types
[itk_unsigned_intN_0
+ i
* 2] = int_n_trees
[i
].unsigned_type
;
9150 /* Define a boolean type. This type only represents boolean values but
9151 may be larger than char depending on the value of BOOL_TYPE_SIZE. */
9152 boolean_type_node
= make_unsigned_type (BOOL_TYPE_SIZE
);
9153 TREE_SET_CODE (boolean_type_node
, BOOLEAN_TYPE
);
9154 TYPE_PRECISION (boolean_type_node
) = 1;
9155 TYPE_MAX_VALUE (boolean_type_node
) = build_int_cst (boolean_type_node
, 1);
9157 /* Define what type to use for size_t. */
9158 if (strcmp (SIZE_TYPE
, "unsigned int") == 0)
9159 size_type_node
= unsigned_type_node
;
9160 else if (strcmp (SIZE_TYPE
, "long unsigned int") == 0)
9161 size_type_node
= long_unsigned_type_node
;
9162 else if (strcmp (SIZE_TYPE
, "long long unsigned int") == 0)
9163 size_type_node
= long_long_unsigned_type_node
;
9164 else if (strcmp (SIZE_TYPE
, "short unsigned int") == 0)
9165 size_type_node
= short_unsigned_type_node
;
9170 size_type_node
= NULL_TREE
;
9171 for (i
= 0; i
< NUM_INT_N_ENTS
; i
++)
9172 if (int_n_enabled_p
[i
])
9174 char name
[50], altname
[50];
9175 sprintf (name
, "__int%d unsigned", int_n_data
[i
].bitsize
);
9176 sprintf (altname
, "__int%d__ unsigned", int_n_data
[i
].bitsize
);
9178 if (strcmp (name
, SIZE_TYPE
) == 0
9179 || strcmp (altname
, SIZE_TYPE
) == 0)
9181 size_type_node
= int_n_trees
[i
].unsigned_type
;
9184 if (size_type_node
== NULL_TREE
)
9188 /* Define what type to use for ptrdiff_t. */
9189 if (strcmp (PTRDIFF_TYPE
, "int") == 0)
9190 ptrdiff_type_node
= integer_type_node
;
9191 else if (strcmp (PTRDIFF_TYPE
, "long int") == 0)
9192 ptrdiff_type_node
= long_integer_type_node
;
9193 else if (strcmp (PTRDIFF_TYPE
, "long long int") == 0)
9194 ptrdiff_type_node
= long_long_integer_type_node
;
9195 else if (strcmp (PTRDIFF_TYPE
, "short int") == 0)
9196 ptrdiff_type_node
= short_integer_type_node
;
9199 ptrdiff_type_node
= NULL_TREE
;
9200 for (int i
= 0; i
< NUM_INT_N_ENTS
; i
++)
9201 if (int_n_enabled_p
[i
])
9203 char name
[50], altname
[50];
9204 sprintf (name
, "__int%d", int_n_data
[i
].bitsize
);
9205 sprintf (altname
, "__int%d__", int_n_data
[i
].bitsize
);
9207 if (strcmp (name
, PTRDIFF_TYPE
) == 0
9208 || strcmp (altname
, PTRDIFF_TYPE
) == 0)
9209 ptrdiff_type_node
= int_n_trees
[i
].signed_type
;
9211 if (ptrdiff_type_node
== NULL_TREE
)
9215 /* Fill in the rest of the sized types. Reuse existing type nodes
9217 intQI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (QImode
), 0);
9218 intHI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (HImode
), 0);
9219 intSI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (SImode
), 0);
9220 intDI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (DImode
), 0);
9221 intTI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (TImode
), 0);
9223 unsigned_intQI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (QImode
), 1);
9224 unsigned_intHI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (HImode
), 1);
9225 unsigned_intSI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (SImode
), 1);
9226 unsigned_intDI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (DImode
), 1);
9227 unsigned_intTI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (TImode
), 1);
9229 /* Don't call build_qualified type for atomics. That routine does
9230 special processing for atomics, and until they are initialized
9231 it's better not to make that call.
9233 Check to see if there is a target override for atomic types. */
9235 atomicQI_type_node
= build_atomic_base (unsigned_intQI_type_node
,
9236 targetm
.atomic_align_for_mode (QImode
));
9237 atomicHI_type_node
= build_atomic_base (unsigned_intHI_type_node
,
9238 targetm
.atomic_align_for_mode (HImode
));
9239 atomicSI_type_node
= build_atomic_base (unsigned_intSI_type_node
,
9240 targetm
.atomic_align_for_mode (SImode
));
9241 atomicDI_type_node
= build_atomic_base (unsigned_intDI_type_node
,
9242 targetm
.atomic_align_for_mode (DImode
));
9243 atomicTI_type_node
= build_atomic_base (unsigned_intTI_type_node
,
9244 targetm
.atomic_align_for_mode (TImode
));
9246 access_public_node
= get_identifier ("public");
9247 access_protected_node
= get_identifier ("protected");
9248 access_private_node
= get_identifier ("private");
9250 /* Define these next since types below may used them. */
9251 integer_zero_node
= build_int_cst (integer_type_node
, 0);
9252 integer_one_node
= build_int_cst (integer_type_node
, 1);
9253 integer_three_node
= build_int_cst (integer_type_node
, 3);
9254 integer_minus_one_node
= build_int_cst (integer_type_node
, -1);
9256 size_zero_node
= size_int (0);
9257 size_one_node
= size_int (1);
9258 bitsize_zero_node
= bitsize_int (0);
9259 bitsize_one_node
= bitsize_int (1);
9260 bitsize_unit_node
= bitsize_int (BITS_PER_UNIT
);
9262 boolean_false_node
= TYPE_MIN_VALUE (boolean_type_node
);
9263 boolean_true_node
= TYPE_MAX_VALUE (boolean_type_node
);
9265 void_type_node
= make_node (VOID_TYPE
);
9266 layout_type (void_type_node
);
9268 /* We are not going to have real types in C with less than byte alignment,
9269 so we might as well not have any types that claim to have it. */
9270 SET_TYPE_ALIGN (void_type_node
, BITS_PER_UNIT
);
9271 TYPE_USER_ALIGN (void_type_node
) = 0;
9273 void_node
= make_node (VOID_CST
);
9274 TREE_TYPE (void_node
) = void_type_node
;
9276 null_pointer_node
= build_int_cst (build_pointer_type (void_type_node
), 0);
9277 layout_type (TREE_TYPE (null_pointer_node
));
9279 ptr_type_node
= build_pointer_type (void_type_node
);
9281 = build_pointer_type (build_type_variant (void_type_node
, 1, 0));
9282 for (unsigned i
= 0;
9283 i
< sizeof (builtin_structptr_types
) / sizeof (builtin_structptr_type
);
9285 builtin_structptr_types
[i
].node
= builtin_structptr_types
[i
].base
;
9287 pointer_sized_int_node
= build_nonstandard_integer_type (POINTER_SIZE
, 1);
9289 float_type_node
= make_node (REAL_TYPE
);
9290 TYPE_PRECISION (float_type_node
) = FLOAT_TYPE_SIZE
;
9291 layout_type (float_type_node
);
9293 double_type_node
= make_node (REAL_TYPE
);
9294 TYPE_PRECISION (double_type_node
) = DOUBLE_TYPE_SIZE
;
9295 layout_type (double_type_node
);
9297 long_double_type_node
= make_node (REAL_TYPE
);
9298 TYPE_PRECISION (long_double_type_node
) = LONG_DOUBLE_TYPE_SIZE
;
9299 layout_type (long_double_type_node
);
9301 for (i
= 0; i
< NUM_FLOATN_NX_TYPES
; i
++)
9303 int n
= floatn_nx_types
[i
].n
;
9304 bool extended
= floatn_nx_types
[i
].extended
;
9305 scalar_float_mode mode
;
9306 if (!targetm
.floatn_mode (n
, extended
).exists (&mode
))
9308 int precision
= GET_MODE_PRECISION (mode
);
9309 /* Work around the rs6000 KFmode having precision 113 not
9311 const struct real_format
*fmt
= REAL_MODE_FORMAT (mode
);
9312 gcc_assert (fmt
->b
== 2 && fmt
->emin
+ fmt
->emax
== 3);
9313 int min_precision
= fmt
->p
+ ceil_log2 (fmt
->emax
- fmt
->emin
);
9315 gcc_assert (min_precision
== n
);
9316 if (precision
< min_precision
)
9317 precision
= min_precision
;
9318 FLOATN_NX_TYPE_NODE (i
) = make_node (REAL_TYPE
);
9319 TYPE_PRECISION (FLOATN_NX_TYPE_NODE (i
)) = precision
;
9320 layout_type (FLOATN_NX_TYPE_NODE (i
));
9321 SET_TYPE_MODE (FLOATN_NX_TYPE_NODE (i
), mode
);
9324 float_ptr_type_node
= build_pointer_type (float_type_node
);
9325 double_ptr_type_node
= build_pointer_type (double_type_node
);
9326 long_double_ptr_type_node
= build_pointer_type (long_double_type_node
);
9327 integer_ptr_type_node
= build_pointer_type (integer_type_node
);
9329 /* Fixed size integer types. */
9330 uint16_type_node
= make_or_reuse_type (16, 1);
9331 uint32_type_node
= make_or_reuse_type (32, 1);
9332 uint64_type_node
= make_or_reuse_type (64, 1);
9333 if (targetm
.scalar_mode_supported_p (TImode
))
9334 uint128_type_node
= make_or_reuse_type (128, 1);
9336 /* Decimal float types. */
9337 if (targetm
.decimal_float_supported_p ())
9339 dfloat32_type_node
= make_node (REAL_TYPE
);
9340 TYPE_PRECISION (dfloat32_type_node
) = DECIMAL32_TYPE_SIZE
;
9341 SET_TYPE_MODE (dfloat32_type_node
, SDmode
);
9342 layout_type (dfloat32_type_node
);
9344 dfloat64_type_node
= make_node (REAL_TYPE
);
9345 TYPE_PRECISION (dfloat64_type_node
) = DECIMAL64_TYPE_SIZE
;
9346 SET_TYPE_MODE (dfloat64_type_node
, DDmode
);
9347 layout_type (dfloat64_type_node
);
9349 dfloat128_type_node
= make_node (REAL_TYPE
);
9350 TYPE_PRECISION (dfloat128_type_node
) = DECIMAL128_TYPE_SIZE
;
9351 SET_TYPE_MODE (dfloat128_type_node
, TDmode
);
9352 layout_type (dfloat128_type_node
);
9355 complex_integer_type_node
= build_complex_type (integer_type_node
, true);
9356 complex_float_type_node
= build_complex_type (float_type_node
, true);
9357 complex_double_type_node
= build_complex_type (double_type_node
, true);
9358 complex_long_double_type_node
= build_complex_type (long_double_type_node
,
9361 for (i
= 0; i
< NUM_FLOATN_NX_TYPES
; i
++)
9363 if (FLOATN_NX_TYPE_NODE (i
) != NULL_TREE
)
9364 COMPLEX_FLOATN_NX_TYPE_NODE (i
)
9365 = build_complex_type (FLOATN_NX_TYPE_NODE (i
));
9368 /* Make fixed-point nodes based on sat/non-sat and signed/unsigned. */
9369 #define MAKE_FIXED_TYPE_NODE(KIND,SIZE) \
9370 sat_ ## KIND ## _type_node = \
9371 make_sat_signed_ ## KIND ## _type (SIZE); \
9372 sat_unsigned_ ## KIND ## _type_node = \
9373 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9374 KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9375 unsigned_ ## KIND ## _type_node = \
9376 make_unsigned_ ## KIND ## _type (SIZE);
9378 #define MAKE_FIXED_TYPE_NODE_WIDTH(KIND,WIDTH,SIZE) \
9379 sat_ ## WIDTH ## KIND ## _type_node = \
9380 make_sat_signed_ ## KIND ## _type (SIZE); \
9381 sat_unsigned_ ## WIDTH ## KIND ## _type_node = \
9382 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9383 WIDTH ## KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9384 unsigned_ ## WIDTH ## KIND ## _type_node = \
9385 make_unsigned_ ## KIND ## _type (SIZE);
9387 /* Make fixed-point type nodes based on four different widths. */
9388 #define MAKE_FIXED_TYPE_NODE_FAMILY(N1,N2) \
9389 MAKE_FIXED_TYPE_NODE_WIDTH (N1, short_, SHORT_ ## N2 ## _TYPE_SIZE) \
9390 MAKE_FIXED_TYPE_NODE (N1, N2 ## _TYPE_SIZE) \
9391 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_, LONG_ ## N2 ## _TYPE_SIZE) \
9392 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_long_, LONG_LONG_ ## N2 ## _TYPE_SIZE)
9394 /* Make fixed-point mode nodes based on sat/non-sat and signed/unsigned. */
9395 #define MAKE_FIXED_MODE_NODE(KIND,NAME,MODE) \
9396 NAME ## _type_node = \
9397 make_or_reuse_signed_ ## KIND ## _type (GET_MODE_BITSIZE (MODE ## mode)); \
9398 u ## NAME ## _type_node = \
9399 make_or_reuse_unsigned_ ## KIND ## _type \
9400 (GET_MODE_BITSIZE (U ## MODE ## mode)); \
9401 sat_ ## NAME ## _type_node = \
9402 make_or_reuse_sat_signed_ ## KIND ## _type \
9403 (GET_MODE_BITSIZE (MODE ## mode)); \
9404 sat_u ## NAME ## _type_node = \
9405 make_or_reuse_sat_unsigned_ ## KIND ## _type \
9406 (GET_MODE_BITSIZE (U ## MODE ## mode));
9408 /* Fixed-point type and mode nodes. */
9409 MAKE_FIXED_TYPE_NODE_FAMILY (fract
, FRACT
)
9410 MAKE_FIXED_TYPE_NODE_FAMILY (accum
, ACCUM
)
9411 MAKE_FIXED_MODE_NODE (fract
, qq
, QQ
)
9412 MAKE_FIXED_MODE_NODE (fract
, hq
, HQ
)
9413 MAKE_FIXED_MODE_NODE (fract
, sq
, SQ
)
9414 MAKE_FIXED_MODE_NODE (fract
, dq
, DQ
)
9415 MAKE_FIXED_MODE_NODE (fract
, tq
, TQ
)
9416 MAKE_FIXED_MODE_NODE (accum
, ha
, HA
)
9417 MAKE_FIXED_MODE_NODE (accum
, sa
, SA
)
9418 MAKE_FIXED_MODE_NODE (accum
, da
, DA
)
9419 MAKE_FIXED_MODE_NODE (accum
, ta
, TA
)
9422 tree t
= targetm
.build_builtin_va_list ();
9424 /* Many back-ends define record types without setting TYPE_NAME.
9425 If we copied the record type here, we'd keep the original
9426 record type without a name. This breaks name mangling. So,
9427 don't copy record types and let c_common_nodes_and_builtins()
9428 declare the type to be __builtin_va_list. */
9429 if (TREE_CODE (t
) != RECORD_TYPE
)
9430 t
= build_variant_type_copy (t
);
9432 va_list_type_node
= t
;
9435 /* SCEV analyzer global shared trees. */
9436 chrec_dont_know
= make_node (SCEV_NOT_KNOWN
);
9437 TREE_TYPE (chrec_dont_know
) = void_type_node
;
9438 chrec_known
= make_node (SCEV_KNOWN
);
9439 TREE_TYPE (chrec_known
) = void_type_node
;
9442 /* Modify DECL for given flags.
9443 TM_PURE attribute is set only on types, so the function will modify
9444 DECL's type when ECF_TM_PURE is used. */
9447 set_call_expr_flags (tree decl
, int flags
)
9449 if (flags
& ECF_NOTHROW
)
9450 TREE_NOTHROW (decl
) = 1;
9451 if (flags
& ECF_CONST
)
9452 TREE_READONLY (decl
) = 1;
9453 if (flags
& ECF_PURE
)
9454 DECL_PURE_P (decl
) = 1;
9455 if (flags
& ECF_LOOPING_CONST_OR_PURE
)
9456 DECL_LOOPING_CONST_OR_PURE_P (decl
) = 1;
9457 if (flags
& ECF_NOVOPS
)
9458 DECL_IS_NOVOPS (decl
) = 1;
9459 if (flags
& ECF_NORETURN
)
9460 TREE_THIS_VOLATILE (decl
) = 1;
9461 if (flags
& ECF_MALLOC
)
9462 DECL_IS_MALLOC (decl
) = 1;
9463 if (flags
& ECF_RETURNS_TWICE
)
9464 DECL_IS_RETURNS_TWICE (decl
) = 1;
9465 if (flags
& ECF_LEAF
)
9466 DECL_ATTRIBUTES (decl
) = tree_cons (get_identifier ("leaf"),
9467 NULL
, DECL_ATTRIBUTES (decl
));
9468 if (flags
& ECF_COLD
)
9469 DECL_ATTRIBUTES (decl
) = tree_cons (get_identifier ("cold"),
9470 NULL
, DECL_ATTRIBUTES (decl
));
9471 if (flags
& ECF_RET1
)
9472 DECL_ATTRIBUTES (decl
)
9473 = tree_cons (get_identifier ("fn spec"),
9474 build_tree_list (NULL_TREE
, build_string (2, "1 ")),
9475 DECL_ATTRIBUTES (decl
));
9476 if ((flags
& ECF_TM_PURE
) && flag_tm
)
9477 apply_tm_attr (decl
, get_identifier ("transaction_pure"));
9478 /* Looping const or pure is implied by noreturn.
9479 There is currently no way to declare looping const or looping pure alone. */
9480 gcc_assert (!(flags
& ECF_LOOPING_CONST_OR_PURE
)
9481 || ((flags
& ECF_NORETURN
) && (flags
& (ECF_CONST
| ECF_PURE
))));
9485 /* A subroutine of build_common_builtin_nodes. Define a builtin function. */
9488 local_define_builtin (const char *name
, tree type
, enum built_in_function code
,
9489 const char *library_name
, int ecf_flags
)
9493 decl
= add_builtin_function (name
, type
, code
, BUILT_IN_NORMAL
,
9494 library_name
, NULL_TREE
);
9495 set_call_expr_flags (decl
, ecf_flags
);
9497 set_builtin_decl (code
, decl
, true);
9500 /* Call this function after instantiating all builtins that the language
9501 front end cares about. This will build the rest of the builtins
9502 and internal functions that are relied upon by the tree optimizers and
9506 build_common_builtin_nodes (void)
9511 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE
)
9512 || !builtin_decl_explicit_p (BUILT_IN_ABORT
))
9514 ftype
= build_function_type (void_type_node
, void_list_node
);
9515 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE
))
9516 local_define_builtin ("__builtin_unreachable", ftype
,
9517 BUILT_IN_UNREACHABLE
,
9518 "__builtin_unreachable",
9519 ECF_NOTHROW
| ECF_LEAF
| ECF_NORETURN
9520 | ECF_CONST
| ECF_COLD
);
9521 if (!builtin_decl_explicit_p (BUILT_IN_ABORT
))
9522 local_define_builtin ("__builtin_abort", ftype
, BUILT_IN_ABORT
,
9524 ECF_LEAF
| ECF_NORETURN
| ECF_CONST
| ECF_COLD
);
9527 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY
)
9528 || !builtin_decl_explicit_p (BUILT_IN_MEMMOVE
))
9530 ftype
= build_function_type_list (ptr_type_node
,
9531 ptr_type_node
, const_ptr_type_node
,
9532 size_type_node
, NULL_TREE
);
9534 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY
))
9535 local_define_builtin ("__builtin_memcpy", ftype
, BUILT_IN_MEMCPY
,
9536 "memcpy", ECF_NOTHROW
| ECF_LEAF
);
9537 if (!builtin_decl_explicit_p (BUILT_IN_MEMMOVE
))
9538 local_define_builtin ("__builtin_memmove", ftype
, BUILT_IN_MEMMOVE
,
9539 "memmove", ECF_NOTHROW
| ECF_LEAF
);
9542 if (!builtin_decl_explicit_p (BUILT_IN_MEMCMP
))
9544 ftype
= build_function_type_list (integer_type_node
, const_ptr_type_node
,
9545 const_ptr_type_node
, size_type_node
,
9547 local_define_builtin ("__builtin_memcmp", ftype
, BUILT_IN_MEMCMP
,
9548 "memcmp", ECF_PURE
| ECF_NOTHROW
| ECF_LEAF
);
9551 if (!builtin_decl_explicit_p (BUILT_IN_MEMSET
))
9553 ftype
= build_function_type_list (ptr_type_node
,
9554 ptr_type_node
, integer_type_node
,
9555 size_type_node
, NULL_TREE
);
9556 local_define_builtin ("__builtin_memset", ftype
, BUILT_IN_MEMSET
,
9557 "memset", ECF_NOTHROW
| ECF_LEAF
);
9560 /* If we're checking the stack, `alloca' can throw. */
9561 const int alloca_flags
9562 = ECF_MALLOC
| ECF_LEAF
| (flag_stack_check
? 0 : ECF_NOTHROW
);
9564 if (!builtin_decl_explicit_p (BUILT_IN_ALLOCA
))
9566 ftype
= build_function_type_list (ptr_type_node
,
9567 size_type_node
, NULL_TREE
);
9568 local_define_builtin ("__builtin_alloca", ftype
, BUILT_IN_ALLOCA
,
9569 "alloca", alloca_flags
);
9572 ftype
= build_function_type_list (ptr_type_node
, size_type_node
,
9573 size_type_node
, NULL_TREE
);
9574 local_define_builtin ("__builtin_alloca_with_align", ftype
,
9575 BUILT_IN_ALLOCA_WITH_ALIGN
,
9576 "__builtin_alloca_with_align",
9579 ftype
= build_function_type_list (ptr_type_node
, size_type_node
,
9580 size_type_node
, size_type_node
, NULL_TREE
);
9581 local_define_builtin ("__builtin_alloca_with_align_and_max", ftype
,
9582 BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
,
9583 "__builtin_alloca_with_align_and_max",
9586 ftype
= build_function_type_list (void_type_node
,
9587 ptr_type_node
, ptr_type_node
,
9588 ptr_type_node
, NULL_TREE
);
9589 local_define_builtin ("__builtin_init_trampoline", ftype
,
9590 BUILT_IN_INIT_TRAMPOLINE
,
9591 "__builtin_init_trampoline", ECF_NOTHROW
| ECF_LEAF
);
9592 local_define_builtin ("__builtin_init_heap_trampoline", ftype
,
9593 BUILT_IN_INIT_HEAP_TRAMPOLINE
,
9594 "__builtin_init_heap_trampoline",
9595 ECF_NOTHROW
| ECF_LEAF
);
9596 local_define_builtin ("__builtin_init_descriptor", ftype
,
9597 BUILT_IN_INIT_DESCRIPTOR
,
9598 "__builtin_init_descriptor", ECF_NOTHROW
| ECF_LEAF
);
9600 ftype
= build_function_type_list (ptr_type_node
, ptr_type_node
, NULL_TREE
);
9601 local_define_builtin ("__builtin_adjust_trampoline", ftype
,
9602 BUILT_IN_ADJUST_TRAMPOLINE
,
9603 "__builtin_adjust_trampoline",
9604 ECF_CONST
| ECF_NOTHROW
);
9605 local_define_builtin ("__builtin_adjust_descriptor", ftype
,
9606 BUILT_IN_ADJUST_DESCRIPTOR
,
9607 "__builtin_adjust_descriptor",
9608 ECF_CONST
| ECF_NOTHROW
);
9610 ftype
= build_function_type_list (void_type_node
,
9611 ptr_type_node
, ptr_type_node
, NULL_TREE
);
9612 if (!builtin_decl_explicit_p (BUILT_IN_CLEAR_CACHE
))
9613 local_define_builtin ("__builtin___clear_cache", ftype
,
9614 BUILT_IN_CLEAR_CACHE
,
9618 local_define_builtin ("__builtin_nonlocal_goto", ftype
,
9619 BUILT_IN_NONLOCAL_GOTO
,
9620 "__builtin_nonlocal_goto",
9621 ECF_NORETURN
| ECF_NOTHROW
);
9623 ftype
= build_function_type_list (void_type_node
,
9624 ptr_type_node
, ptr_type_node
, NULL_TREE
);
9625 local_define_builtin ("__builtin_setjmp_setup", ftype
,
9626 BUILT_IN_SETJMP_SETUP
,
9627 "__builtin_setjmp_setup", ECF_NOTHROW
);
9629 ftype
= build_function_type_list (void_type_node
, ptr_type_node
, NULL_TREE
);
9630 local_define_builtin ("__builtin_setjmp_receiver", ftype
,
9631 BUILT_IN_SETJMP_RECEIVER
,
9632 "__builtin_setjmp_receiver", ECF_NOTHROW
| ECF_LEAF
);
9634 ftype
= build_function_type_list (ptr_type_node
, NULL_TREE
);
9635 local_define_builtin ("__builtin_stack_save", ftype
, BUILT_IN_STACK_SAVE
,
9636 "__builtin_stack_save", ECF_NOTHROW
| ECF_LEAF
);
9638 ftype
= build_function_type_list (void_type_node
, ptr_type_node
, NULL_TREE
);
9639 local_define_builtin ("__builtin_stack_restore", ftype
,
9640 BUILT_IN_STACK_RESTORE
,
9641 "__builtin_stack_restore", ECF_NOTHROW
| ECF_LEAF
);
9643 ftype
= build_function_type_list (integer_type_node
, const_ptr_type_node
,
9644 const_ptr_type_node
, size_type_node
,
9646 local_define_builtin ("__builtin_memcmp_eq", ftype
, BUILT_IN_MEMCMP_EQ
,
9647 "__builtin_memcmp_eq",
9648 ECF_PURE
| ECF_NOTHROW
| ECF_LEAF
);
9650 local_define_builtin ("__builtin_strncmp_eq", ftype
, BUILT_IN_STRNCMP_EQ
,
9651 "__builtin_strncmp_eq",
9652 ECF_PURE
| ECF_NOTHROW
| ECF_LEAF
);
9654 local_define_builtin ("__builtin_strcmp_eq", ftype
, BUILT_IN_STRCMP_EQ
,
9655 "__builtin_strcmp_eq",
9656 ECF_PURE
| ECF_NOTHROW
| ECF_LEAF
);
9658 /* If there's a possibility that we might use the ARM EABI, build the
9659 alternate __cxa_end_cleanup node used to resume from C++. */
9660 if (targetm
.arm_eabi_unwinder
)
9662 ftype
= build_function_type_list (void_type_node
, NULL_TREE
);
9663 local_define_builtin ("__builtin_cxa_end_cleanup", ftype
,
9664 BUILT_IN_CXA_END_CLEANUP
,
9665 "__cxa_end_cleanup", ECF_NORETURN
| ECF_LEAF
);
9668 ftype
= build_function_type_list (void_type_node
, ptr_type_node
, NULL_TREE
);
9669 local_define_builtin ("__builtin_unwind_resume", ftype
,
9670 BUILT_IN_UNWIND_RESUME
,
9671 ((targetm_common
.except_unwind_info (&global_options
)
9673 ? "_Unwind_SjLj_Resume" : "_Unwind_Resume"),
9676 if (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS
) == NULL_TREE
)
9678 ftype
= build_function_type_list (ptr_type_node
, integer_type_node
,
9680 local_define_builtin ("__builtin_return_address", ftype
,
9681 BUILT_IN_RETURN_ADDRESS
,
9682 "__builtin_return_address",
9686 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER
)
9687 || !builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT
))
9689 ftype
= build_function_type_list (void_type_node
, ptr_type_node
,
9690 ptr_type_node
, NULL_TREE
);
9691 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER
))
9692 local_define_builtin ("__cyg_profile_func_enter", ftype
,
9693 BUILT_IN_PROFILE_FUNC_ENTER
,
9694 "__cyg_profile_func_enter", 0);
9695 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT
))
9696 local_define_builtin ("__cyg_profile_func_exit", ftype
,
9697 BUILT_IN_PROFILE_FUNC_EXIT
,
9698 "__cyg_profile_func_exit", 0);
9701 /* The exception object and filter values from the runtime. The argument
9702 must be zero before exception lowering, i.e. from the front end. After
9703 exception lowering, it will be the region number for the exception
9704 landing pad. These functions are PURE instead of CONST to prevent
9705 them from being hoisted past the exception edge that will initialize
9706 its value in the landing pad. */
9707 ftype
= build_function_type_list (ptr_type_node
,
9708 integer_type_node
, NULL_TREE
);
9709 ecf_flags
= ECF_PURE
| ECF_NOTHROW
| ECF_LEAF
;
9710 /* Only use TM_PURE if we have TM language support. */
9711 if (builtin_decl_explicit_p (BUILT_IN_TM_LOAD_1
))
9712 ecf_flags
|= ECF_TM_PURE
;
9713 local_define_builtin ("__builtin_eh_pointer", ftype
, BUILT_IN_EH_POINTER
,
9714 "__builtin_eh_pointer", ecf_flags
);
9716 tmp
= lang_hooks
.types
.type_for_mode (targetm
.eh_return_filter_mode (), 0);
9717 ftype
= build_function_type_list (tmp
, integer_type_node
, NULL_TREE
);
9718 local_define_builtin ("__builtin_eh_filter", ftype
, BUILT_IN_EH_FILTER
,
9719 "__builtin_eh_filter", ECF_PURE
| ECF_NOTHROW
| ECF_LEAF
);
9721 ftype
= build_function_type_list (void_type_node
,
9722 integer_type_node
, integer_type_node
,
9724 local_define_builtin ("__builtin_eh_copy_values", ftype
,
9725 BUILT_IN_EH_COPY_VALUES
,
9726 "__builtin_eh_copy_values", ECF_NOTHROW
);
9728 /* Complex multiplication and division. These are handled as builtins
9729 rather than optabs because emit_library_call_value doesn't support
9730 complex. Further, we can do slightly better with folding these
9731 beasties if the real and complex parts of the arguments are separate. */
9735 for (mode
= MIN_MODE_COMPLEX_FLOAT
; mode
<= MAX_MODE_COMPLEX_FLOAT
; ++mode
)
9737 char mode_name_buf
[4], *q
;
9739 enum built_in_function mcode
, dcode
;
9740 tree type
, inner_type
;
9741 const char *prefix
= "__";
9743 if (targetm
.libfunc_gnu_prefix
)
9746 type
= lang_hooks
.types
.type_for_mode ((machine_mode
) mode
, 0);
9749 inner_type
= TREE_TYPE (type
);
9751 ftype
= build_function_type_list (type
, inner_type
, inner_type
,
9752 inner_type
, inner_type
, NULL_TREE
);
9754 mcode
= ((enum built_in_function
)
9755 (BUILT_IN_COMPLEX_MUL_MIN
+ mode
- MIN_MODE_COMPLEX_FLOAT
));
9756 dcode
= ((enum built_in_function
)
9757 (BUILT_IN_COMPLEX_DIV_MIN
+ mode
- MIN_MODE_COMPLEX_FLOAT
));
9759 for (p
= GET_MODE_NAME (mode
), q
= mode_name_buf
; *p
; p
++, q
++)
9763 /* For -ftrapping-math these should throw from a former
9764 -fnon-call-exception stmt. */
9765 built_in_names
[mcode
] = concat (prefix
, "mul", mode_name_buf
, "3",
9767 local_define_builtin (built_in_names
[mcode
], ftype
, mcode
,
9768 built_in_names
[mcode
],
9769 ECF_CONST
| ECF_LEAF
);
9771 built_in_names
[dcode
] = concat (prefix
, "div", mode_name_buf
, "3",
9773 local_define_builtin (built_in_names
[dcode
], ftype
, dcode
,
9774 built_in_names
[dcode
],
9775 ECF_CONST
| ECF_LEAF
);
9779 init_internal_fns ();
9782 /* HACK. GROSS. This is absolutely disgusting. I wish there was a
9785 If we requested a pointer to a vector, build up the pointers that
9786 we stripped off while looking for the inner type. Similarly for
9787 return values from functions.
9789 The argument TYPE is the top of the chain, and BOTTOM is the
9790 new type which we will point to. */
9793 reconstruct_complex_type (tree type
, tree bottom
)
9797 if (TREE_CODE (type
) == POINTER_TYPE
)
9799 inner
= reconstruct_complex_type (TREE_TYPE (type
), bottom
);
9800 outer
= build_pointer_type_for_mode (inner
, TYPE_MODE (type
),
9801 TYPE_REF_CAN_ALIAS_ALL (type
));
9803 else if (TREE_CODE (type
) == REFERENCE_TYPE
)
9805 inner
= reconstruct_complex_type (TREE_TYPE (type
), bottom
);
9806 outer
= build_reference_type_for_mode (inner
, TYPE_MODE (type
),
9807 TYPE_REF_CAN_ALIAS_ALL (type
));
9809 else if (TREE_CODE (type
) == ARRAY_TYPE
)
9811 inner
= reconstruct_complex_type (TREE_TYPE (type
), bottom
);
9812 outer
= build_array_type (inner
, TYPE_DOMAIN (type
));
9814 else if (TREE_CODE (type
) == FUNCTION_TYPE
)
9816 inner
= reconstruct_complex_type (TREE_TYPE (type
), bottom
);
9817 outer
= build_function_type (inner
, TYPE_ARG_TYPES (type
));
9819 else if (TREE_CODE (type
) == METHOD_TYPE
)
9821 inner
= reconstruct_complex_type (TREE_TYPE (type
), bottom
);
9822 /* The build_method_type_directly() routine prepends 'this' to argument list,
9823 so we must compensate by getting rid of it. */
9825 = build_method_type_directly
9826 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (type
))),
9828 TREE_CHAIN (TYPE_ARG_TYPES (type
)));
9830 else if (TREE_CODE (type
) == OFFSET_TYPE
)
9832 inner
= reconstruct_complex_type (TREE_TYPE (type
), bottom
);
9833 outer
= build_offset_type (TYPE_OFFSET_BASETYPE (type
), inner
);
9838 return build_type_attribute_qual_variant (outer
, TYPE_ATTRIBUTES (type
),
9842 /* Returns a vector tree node given a mode (integer, vector, or BLKmode) and
9845 build_vector_type_for_mode (tree innertype
, machine_mode mode
)
9848 unsigned int bitsize
;
9850 switch (GET_MODE_CLASS (mode
))
9852 case MODE_VECTOR_BOOL
:
9853 case MODE_VECTOR_INT
:
9854 case MODE_VECTOR_FLOAT
:
9855 case MODE_VECTOR_FRACT
:
9856 case MODE_VECTOR_UFRACT
:
9857 case MODE_VECTOR_ACCUM
:
9858 case MODE_VECTOR_UACCUM
:
9859 nunits
= GET_MODE_NUNITS (mode
);
9863 /* Check that there are no leftover bits. */
9864 bitsize
= GET_MODE_BITSIZE (as_a
<scalar_int_mode
> (mode
));
9865 gcc_assert (bitsize
% TREE_INT_CST_LOW (TYPE_SIZE (innertype
)) == 0);
9866 nunits
= bitsize
/ TREE_INT_CST_LOW (TYPE_SIZE (innertype
));
9873 return make_vector_type (innertype
, nunits
, mode
);
9876 /* Similarly, but takes the inner type and number of units, which must be
9880 build_vector_type (tree innertype
, poly_int64 nunits
)
9882 return make_vector_type (innertype
, nunits
, VOIDmode
);
9885 /* Build a truth vector with NUNITS units, giving it mode MASK_MODE. */
9888 build_truth_vector_type_for_mode (poly_uint64 nunits
, machine_mode mask_mode
)
9890 gcc_assert (mask_mode
!= BLKmode
);
9892 unsigned HOST_WIDE_INT esize
;
9893 if (VECTOR_MODE_P (mask_mode
))
9895 poly_uint64 vsize
= GET_MODE_BITSIZE (mask_mode
);
9896 esize
= vector_element_size (vsize
, nunits
);
9901 tree bool_type
= build_nonstandard_boolean_type (esize
);
9903 return make_vector_type (bool_type
, nunits
, mask_mode
);
9906 /* Build a vector type that holds one boolean result for each element of
9907 vector type VECTYPE. The public interface for this operation is
9911 build_truth_vector_type_for (tree vectype
)
9913 machine_mode vector_mode
= TYPE_MODE (vectype
);
9914 poly_uint64 nunits
= TYPE_VECTOR_SUBPARTS (vectype
);
9916 machine_mode mask_mode
;
9917 if (VECTOR_MODE_P (vector_mode
)
9918 && targetm
.vectorize
.get_mask_mode (vector_mode
).exists (&mask_mode
))
9919 return build_truth_vector_type_for_mode (nunits
, mask_mode
);
9921 poly_uint64 vsize
= tree_to_poly_uint64 (TYPE_SIZE (vectype
));
9922 unsigned HOST_WIDE_INT esize
= vector_element_size (vsize
, nunits
);
9923 tree bool_type
= build_nonstandard_boolean_type (esize
);
9925 return make_vector_type (bool_type
, nunits
, VOIDmode
);
9928 /* Like build_vector_type, but builds a variant type with TYPE_VECTOR_OPAQUE
9932 build_opaque_vector_type (tree innertype
, poly_int64 nunits
)
9934 tree t
= make_vector_type (innertype
, nunits
, VOIDmode
);
9936 /* We always build the non-opaque variant before the opaque one,
9937 so if it already exists, it is TYPE_NEXT_VARIANT of this one. */
9938 cand
= TYPE_NEXT_VARIANT (t
);
9940 && TYPE_VECTOR_OPAQUE (cand
)
9941 && check_qualified_type (cand
, t
, TYPE_QUALS (t
)))
9943 /* Othewise build a variant type and make sure to queue it after
9944 the non-opaque type. */
9945 cand
= build_distinct_type_copy (t
);
9946 TYPE_VECTOR_OPAQUE (cand
) = true;
9947 TYPE_CANONICAL (cand
) = TYPE_CANONICAL (t
);
9948 TYPE_NEXT_VARIANT (cand
) = TYPE_NEXT_VARIANT (t
);
9949 TYPE_NEXT_VARIANT (t
) = cand
;
9950 TYPE_MAIN_VARIANT (cand
) = TYPE_MAIN_VARIANT (t
);
9954 /* Return the value of element I of VECTOR_CST T as a wide_int. */
9956 static poly_wide_int
9957 vector_cst_int_elt (const_tree t
, unsigned int i
)
9959 /* First handle elements that are directly encoded. */
9960 unsigned int encoded_nelts
= vector_cst_encoded_nelts (t
);
9961 if (i
< encoded_nelts
)
9962 return wi::to_poly_wide (VECTOR_CST_ENCODED_ELT (t
, i
));
9964 /* Identify the pattern that contains element I and work out the index of
9965 the last encoded element for that pattern. */
9966 unsigned int npatterns
= VECTOR_CST_NPATTERNS (t
);
9967 unsigned int pattern
= i
% npatterns
;
9968 unsigned int count
= i
/ npatterns
;
9969 unsigned int final_i
= encoded_nelts
- npatterns
+ pattern
;
9971 /* If there are no steps, the final encoded value is the right one. */
9972 if (!VECTOR_CST_STEPPED_P (t
))
9973 return wi::to_poly_wide (VECTOR_CST_ENCODED_ELT (t
, final_i
));
9975 /* Otherwise work out the value from the last two encoded elements. */
9976 tree v1
= VECTOR_CST_ENCODED_ELT (t
, final_i
- npatterns
);
9977 tree v2
= VECTOR_CST_ENCODED_ELT (t
, final_i
);
9978 poly_wide_int diff
= wi::to_poly_wide (v2
) - wi::to_poly_wide (v1
);
9979 return wi::to_poly_wide (v2
) + (count
- 2) * diff
;
9982 /* Return the value of element I of VECTOR_CST T. */
9985 vector_cst_elt (const_tree t
, unsigned int i
)
9987 /* First handle elements that are directly encoded. */
9988 unsigned int encoded_nelts
= vector_cst_encoded_nelts (t
);
9989 if (i
< encoded_nelts
)
9990 return VECTOR_CST_ENCODED_ELT (t
, i
);
9992 /* If there are no steps, the final encoded value is the right one. */
9993 if (!VECTOR_CST_STEPPED_P (t
))
9995 /* Identify the pattern that contains element I and work out the index of
9996 the last encoded element for that pattern. */
9997 unsigned int npatterns
= VECTOR_CST_NPATTERNS (t
);
9998 unsigned int pattern
= i
% npatterns
;
9999 unsigned int final_i
= encoded_nelts
- npatterns
+ pattern
;
10000 return VECTOR_CST_ENCODED_ELT (t
, final_i
);
10003 /* Otherwise work out the value from the last two encoded elements. */
10004 return wide_int_to_tree (TREE_TYPE (TREE_TYPE (t
)),
10005 vector_cst_int_elt (t
, i
));
10008 /* Given an initializer INIT, return TRUE if INIT is zero or some
10009 aggregate of zeros. Otherwise return FALSE. If NONZERO is not
10010 null, set *NONZERO if and only if INIT is known not to be all
10011 zeros. The combination of return value of false and *NONZERO
10012 false implies that INIT may but need not be all zeros. Other
10013 combinations indicate definitive answers. */
10016 initializer_zerop (const_tree init
, bool *nonzero
/* = NULL */)
10022 /* Conservatively clear NONZERO and set it only if INIT is definitely
10028 unsigned HOST_WIDE_INT off
= 0;
10030 switch (TREE_CODE (init
))
10033 if (integer_zerop (init
))
10040 /* ??? Note that this is not correct for C4X float formats. There,
10041 a bit pattern of all zeros is 1.0; 0.0 is encoded with the most
10042 negative exponent. */
10043 if (real_zerop (init
)
10044 && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (init
)))
10051 if (fixed_zerop (init
))
10058 if (integer_zerop (init
)
10059 || (real_zerop (init
)
10060 && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_REALPART (init
)))
10061 && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_IMAGPART (init
)))))
10068 if (VECTOR_CST_NPATTERNS (init
) == 1
10069 && VECTOR_CST_DUPLICATE_P (init
)
10070 && initializer_zerop (VECTOR_CST_ENCODED_ELT (init
, 0)))
10078 if (TREE_CLOBBER_P (init
))
10081 unsigned HOST_WIDE_INT idx
;
10084 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init
), idx
, elt
)
10085 if (!initializer_zerop (elt
, nonzero
))
10093 tree arg
= TREE_OPERAND (init
, 0);
10094 if (TREE_CODE (arg
) != ADDR_EXPR
)
10096 tree offset
= TREE_OPERAND (init
, 1);
10097 if (TREE_CODE (offset
) != INTEGER_CST
10098 || !tree_fits_uhwi_p (offset
))
10100 off
= tree_to_uhwi (offset
);
10103 arg
= TREE_OPERAND (arg
, 0);
10104 if (TREE_CODE (arg
) != STRING_CST
)
10108 /* Fall through. */
10112 gcc_assert (off
<= INT_MAX
);
10115 int n
= TREE_STRING_LENGTH (init
);
10119 /* We need to loop through all elements to handle cases like
10120 "\0" and "\0foobar". */
10121 for (i
= 0; i
< n
; ++i
)
10122 if (TREE_STRING_POINTER (init
)[i
] != '\0')
10136 /* Return true if EXPR is an initializer expression in which every element
10137 is a constant that is numerically equal to 0 or 1. The elements do not
10138 need to be equal to each other. */
10141 initializer_each_zero_or_onep (const_tree expr
)
10143 STRIP_ANY_LOCATION_WRAPPER (expr
);
10145 switch (TREE_CODE (expr
))
10148 return integer_zerop (expr
) || integer_onep (expr
);
10151 return real_zerop (expr
) || real_onep (expr
);
10155 unsigned HOST_WIDE_INT nelts
= vector_cst_encoded_nelts (expr
);
10156 if (VECTOR_CST_STEPPED_P (expr
)
10157 && !TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr
)).is_constant (&nelts
))
10160 for (unsigned int i
= 0; i
< nelts
; ++i
)
10162 tree elt
= vector_cst_elt (expr
, i
);
10163 if (!initializer_each_zero_or_onep (elt
))
10175 /* Check if vector VEC consists of all the equal elements and
10176 that the number of elements corresponds to the type of VEC.
10177 The function returns first element of the vector
10178 or NULL_TREE if the vector is not uniform. */
10180 uniform_vector_p (const_tree vec
)
10183 unsigned HOST_WIDE_INT i
, nelts
;
10185 if (vec
== NULL_TREE
)
10188 gcc_assert (VECTOR_TYPE_P (TREE_TYPE (vec
)));
10190 if (TREE_CODE (vec
) == VEC_DUPLICATE_EXPR
)
10191 return TREE_OPERAND (vec
, 0);
10193 else if (TREE_CODE (vec
) == VECTOR_CST
)
10195 if (VECTOR_CST_NPATTERNS (vec
) == 1 && VECTOR_CST_DUPLICATE_P (vec
))
10196 return VECTOR_CST_ENCODED_ELT (vec
, 0);
10200 else if (TREE_CODE (vec
) == CONSTRUCTOR
10201 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (vec
)).is_constant (&nelts
))
10203 first
= error_mark_node
;
10205 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (vec
), i
, t
)
10212 if (!operand_equal_p (first
, t
, 0))
10224 /* If the argument is INTEGER_CST, return it. If the argument is vector
10225 with all elements the same INTEGER_CST, return that INTEGER_CST. Otherwise
10227 Look through location wrappers. */
10230 uniform_integer_cst_p (tree t
)
10232 STRIP_ANY_LOCATION_WRAPPER (t
);
10234 if (TREE_CODE (t
) == INTEGER_CST
)
10237 if (VECTOR_TYPE_P (TREE_TYPE (t
)))
10239 t
= uniform_vector_p (t
);
10240 if (t
&& TREE_CODE (t
) == INTEGER_CST
)
10247 /* If VECTOR_CST T has a single nonzero element, return the index of that
10248 element, otherwise return -1. */
10251 single_nonzero_element (const_tree t
)
10253 unsigned HOST_WIDE_INT nelts
;
10254 unsigned int repeat_nelts
;
10255 if (VECTOR_CST_NELTS (t
).is_constant (&nelts
))
10256 repeat_nelts
= nelts
;
10257 else if (VECTOR_CST_NELTS_PER_PATTERN (t
) == 2)
10259 nelts
= vector_cst_encoded_nelts (t
);
10260 repeat_nelts
= VECTOR_CST_NPATTERNS (t
);
10266 for (unsigned int i
= 0; i
< nelts
; ++i
)
10268 tree elt
= vector_cst_elt (t
, i
);
10269 if (!integer_zerop (elt
) && !real_zerop (elt
))
10271 if (res
>= 0 || i
>= repeat_nelts
)
10279 /* Build an empty statement at location LOC. */
10282 build_empty_stmt (location_t loc
)
10284 tree t
= build1 (NOP_EXPR
, void_type_node
, size_zero_node
);
10285 SET_EXPR_LOCATION (t
, loc
);
10290 /* Build an OpenMP clause with code CODE. LOC is the location of the
10294 build_omp_clause (location_t loc
, enum omp_clause_code code
)
10299 length
= omp_clause_num_ops
[code
];
10300 size
= (sizeof (struct tree_omp_clause
) + (length
- 1) * sizeof (tree
));
10302 record_node_allocation_statistics (OMP_CLAUSE
, size
);
10304 t
= (tree
) ggc_internal_alloc (size
);
10305 memset (t
, 0, size
);
10306 TREE_SET_CODE (t
, OMP_CLAUSE
);
10307 OMP_CLAUSE_SET_CODE (t
, code
);
10308 OMP_CLAUSE_LOCATION (t
) = loc
;
10313 /* Build a tcc_vl_exp object with code CODE and room for LEN operands. LEN
10314 includes the implicit operand count in TREE_OPERAND 0, and so must be >= 1.
10315 Except for the CODE and operand count field, other storage for the
10316 object is initialized to zeros. */
10319 build_vl_exp (enum tree_code code
, int len MEM_STAT_DECL
)
10322 int length
= (len
- 1) * sizeof (tree
) + sizeof (struct tree_exp
);
10324 gcc_assert (TREE_CODE_CLASS (code
) == tcc_vl_exp
);
10325 gcc_assert (len
>= 1);
10327 record_node_allocation_statistics (code
, length
);
10329 t
= ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT
);
10331 TREE_SET_CODE (t
, code
);
10333 /* Can't use TREE_OPERAND to store the length because if checking is
10334 enabled, it will try to check the length before we store it. :-P */
10335 t
->exp
.operands
[0] = build_int_cst (sizetype
, len
);
10340 /* Helper function for build_call_* functions; build a CALL_EXPR with
10341 indicated RETURN_TYPE, FN, and NARGS, but do not initialize any of
10342 the argument slots. */
10345 build_call_1 (tree return_type
, tree fn
, int nargs
)
10349 t
= build_vl_exp (CALL_EXPR
, nargs
+ 3);
10350 TREE_TYPE (t
) = return_type
;
10351 CALL_EXPR_FN (t
) = fn
;
10352 CALL_EXPR_STATIC_CHAIN (t
) = NULL
;
10357 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10358 FN and a null static chain slot. NARGS is the number of call arguments
10359 which are specified as "..." arguments. */
10362 build_call_nary (tree return_type
, tree fn
, int nargs
, ...)
10366 va_start (args
, nargs
);
10367 ret
= build_call_valist (return_type
, fn
, nargs
, args
);
10372 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10373 FN and a null static chain slot. NARGS is the number of call arguments
10374 which are specified as a va_list ARGS. */
10377 build_call_valist (tree return_type
, tree fn
, int nargs
, va_list args
)
10382 t
= build_call_1 (return_type
, fn
, nargs
);
10383 for (i
= 0; i
< nargs
; i
++)
10384 CALL_EXPR_ARG (t
, i
) = va_arg (args
, tree
);
10385 process_call_operands (t
);
10389 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10390 FN and a null static chain slot. NARGS is the number of call arguments
10391 which are specified as a tree array ARGS. */
10394 build_call_array_loc (location_t loc
, tree return_type
, tree fn
,
10395 int nargs
, const tree
*args
)
10400 t
= build_call_1 (return_type
, fn
, nargs
);
10401 for (i
= 0; i
< nargs
; i
++)
10402 CALL_EXPR_ARG (t
, i
) = args
[i
];
10403 process_call_operands (t
);
10404 SET_EXPR_LOCATION (t
, loc
);
10408 /* Like build_call_array, but takes a vec. */
10411 build_call_vec (tree return_type
, tree fn
, vec
<tree
, va_gc
> *args
)
10416 ret
= build_call_1 (return_type
, fn
, vec_safe_length (args
));
10417 FOR_EACH_VEC_SAFE_ELT (args
, ix
, t
)
10418 CALL_EXPR_ARG (ret
, ix
) = t
;
10419 process_call_operands (ret
);
10423 /* Conveniently construct a function call expression. FNDECL names the
10424 function to be called and N arguments are passed in the array
10428 build_call_expr_loc_array (location_t loc
, tree fndecl
, int n
, tree
*argarray
)
10430 tree fntype
= TREE_TYPE (fndecl
);
10431 tree fn
= build1 (ADDR_EXPR
, build_pointer_type (fntype
), fndecl
);
10433 return fold_build_call_array_loc (loc
, TREE_TYPE (fntype
), fn
, n
, argarray
);
10436 /* Conveniently construct a function call expression. FNDECL names the
10437 function to be called and the arguments are passed in the vector
10441 build_call_expr_loc_vec (location_t loc
, tree fndecl
, vec
<tree
, va_gc
> *vec
)
10443 return build_call_expr_loc_array (loc
, fndecl
, vec_safe_length (vec
),
10444 vec_safe_address (vec
));
10448 /* Conveniently construct a function call expression. FNDECL names the
10449 function to be called, N is the number of arguments, and the "..."
10450 parameters are the argument expressions. */
10453 build_call_expr_loc (location_t loc
, tree fndecl
, int n
, ...)
10456 tree
*argarray
= XALLOCAVEC (tree
, n
);
10460 for (i
= 0; i
< n
; i
++)
10461 argarray
[i
] = va_arg (ap
, tree
);
10463 return build_call_expr_loc_array (loc
, fndecl
, n
, argarray
);
10466 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
10467 varargs macros aren't supported by all bootstrap compilers. */
10470 build_call_expr (tree fndecl
, int n
, ...)
10473 tree
*argarray
= XALLOCAVEC (tree
, n
);
10477 for (i
= 0; i
< n
; i
++)
10478 argarray
[i
] = va_arg (ap
, tree
);
10480 return build_call_expr_loc_array (UNKNOWN_LOCATION
, fndecl
, n
, argarray
);
10483 /* Build an internal call to IFN, with arguments ARGS[0:N-1] and with return
10484 type TYPE. This is just like CALL_EXPR, except its CALL_EXPR_FN is NULL.
10485 It will get gimplified later into an ordinary internal function. */
10488 build_call_expr_internal_loc_array (location_t loc
, internal_fn ifn
,
10489 tree type
, int n
, const tree
*args
)
10491 tree t
= build_call_1 (type
, NULL_TREE
, n
);
10492 for (int i
= 0; i
< n
; ++i
)
10493 CALL_EXPR_ARG (t
, i
) = args
[i
];
10494 SET_EXPR_LOCATION (t
, loc
);
10495 CALL_EXPR_IFN (t
) = ifn
;
10496 process_call_operands (t
);
10500 /* Build internal call expression. This is just like CALL_EXPR, except
10501 its CALL_EXPR_FN is NULL. It will get gimplified later into ordinary
10502 internal function. */
10505 build_call_expr_internal_loc (location_t loc
, enum internal_fn ifn
,
10506 tree type
, int n
, ...)
10509 tree
*argarray
= XALLOCAVEC (tree
, n
);
10513 for (i
= 0; i
< n
; i
++)
10514 argarray
[i
] = va_arg (ap
, tree
);
10516 return build_call_expr_internal_loc_array (loc
, ifn
, type
, n
, argarray
);
10519 /* Return a function call to FN, if the target is guaranteed to support it,
10522 N is the number of arguments, passed in the "...", and TYPE is the
10523 type of the return value. */
10526 maybe_build_call_expr_loc (location_t loc
, combined_fn fn
, tree type
,
10530 tree
*argarray
= XALLOCAVEC (tree
, n
);
10534 for (i
= 0; i
< n
; i
++)
10535 argarray
[i
] = va_arg (ap
, tree
);
10537 if (internal_fn_p (fn
))
10539 internal_fn ifn
= as_internal_fn (fn
);
10540 if (direct_internal_fn_p (ifn
))
10542 tree_pair types
= direct_internal_fn_types (ifn
, type
, argarray
);
10543 if (!direct_internal_fn_supported_p (ifn
, types
,
10544 OPTIMIZE_FOR_BOTH
))
10547 return build_call_expr_internal_loc_array (loc
, ifn
, type
, n
, argarray
);
10551 tree fndecl
= builtin_decl_implicit (as_builtin_fn (fn
));
10554 return build_call_expr_loc_array (loc
, fndecl
, n
, argarray
);
10558 /* Return a function call to the appropriate builtin alloca variant.
10560 SIZE is the size to be allocated. ALIGN, if non-zero, is the requested
10561 alignment of the allocated area. MAX_SIZE, if non-negative, is an upper
10562 bound for SIZE in case it is not a fixed value. */
10565 build_alloca_call_expr (tree size
, unsigned int align
, HOST_WIDE_INT max_size
)
10569 tree t
= builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
);
10571 build_call_expr (t
, 3, size
, size_int (align
), size_int (max_size
));
10573 else if (align
> 0)
10575 tree t
= builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
10576 return build_call_expr (t
, 2, size
, size_int (align
));
10580 tree t
= builtin_decl_explicit (BUILT_IN_ALLOCA
);
10581 return build_call_expr (t
, 1, size
);
10585 /* Create a new constant string literal of type ELTYPE[SIZE] (or LEN
10586 if SIZE == -1) and return a tree node representing char* pointer to
10587 it as an ADDR_EXPR (ARRAY_REF (ELTYPE, ...)). When STR is nonnull
10588 the STRING_CST value is the LEN bytes at STR (the representation
10589 of the string, which may be wide). Otherwise it's all zeros. */
10592 build_string_literal (unsigned len
, const char *str
/* = NULL */,
10593 tree eltype
/* = char_type_node */,
10594 unsigned HOST_WIDE_INT size
/* = -1 */)
10596 tree t
= build_string (len
, str
);
10597 /* Set the maximum valid index based on the string length or SIZE. */
10598 unsigned HOST_WIDE_INT maxidx
10599 = (size
== HOST_WIDE_INT_M1U
? len
: size
) - 1;
10601 tree index
= build_index_type (size_int (maxidx
));
10602 eltype
= build_type_variant (eltype
, 1, 0);
10603 tree type
= build_array_type (eltype
, index
);
10604 TREE_TYPE (t
) = type
;
10605 TREE_CONSTANT (t
) = 1;
10606 TREE_READONLY (t
) = 1;
10607 TREE_STATIC (t
) = 1;
10609 type
= build_pointer_type (eltype
);
10610 t
= build1 (ADDR_EXPR
, type
,
10611 build4 (ARRAY_REF
, eltype
,
10612 t
, integer_zero_node
, NULL_TREE
, NULL_TREE
));
10618 /* Return true if T (assumed to be a DECL) must be assigned a memory
10622 needs_to_live_in_memory (const_tree t
)
10624 return (TREE_ADDRESSABLE (t
)
10625 || is_global_var (t
)
10626 || (TREE_CODE (t
) == RESULT_DECL
10627 && !DECL_BY_REFERENCE (t
)
10628 && aggregate_value_p (t
, current_function_decl
)));
10631 /* Return value of a constant X and sign-extend it. */
10634 int_cst_value (const_tree x
)
10636 unsigned bits
= TYPE_PRECISION (TREE_TYPE (x
));
10637 unsigned HOST_WIDE_INT val
= TREE_INT_CST_LOW (x
);
10639 /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
10640 gcc_assert (cst_and_fits_in_hwi (x
));
10642 if (bits
< HOST_BITS_PER_WIDE_INT
)
10644 bool negative
= ((val
>> (bits
- 1)) & 1) != 0;
10646 val
|= HOST_WIDE_INT_M1U
<< (bits
- 1) << 1;
10648 val
&= ~(HOST_WIDE_INT_M1U
<< (bits
- 1) << 1);
10654 /* If TYPE is an integral or pointer type, return an integer type with
10655 the same precision which is unsigned iff UNSIGNEDP is true, or itself
10656 if TYPE is already an integer type of signedness UNSIGNEDP.
10657 If TYPE is a floating-point type, return an integer type with the same
10658 bitsize and with the signedness given by UNSIGNEDP; this is useful
10659 when doing bit-level operations on a floating-point value. */
10662 signed_or_unsigned_type_for (int unsignedp
, tree type
)
10664 if (ANY_INTEGRAL_TYPE_P (type
) && TYPE_UNSIGNED (type
) == unsignedp
)
10667 if (TREE_CODE (type
) == VECTOR_TYPE
)
10669 tree inner
= TREE_TYPE (type
);
10670 tree inner2
= signed_or_unsigned_type_for (unsignedp
, inner
);
10673 if (inner
== inner2
)
10675 return build_vector_type (inner2
, TYPE_VECTOR_SUBPARTS (type
));
10678 if (TREE_CODE (type
) == COMPLEX_TYPE
)
10680 tree inner
= TREE_TYPE (type
);
10681 tree inner2
= signed_or_unsigned_type_for (unsignedp
, inner
);
10684 if (inner
== inner2
)
10686 return build_complex_type (inner2
);
10690 if (INTEGRAL_TYPE_P (type
)
10691 || POINTER_TYPE_P (type
)
10692 || TREE_CODE (type
) == OFFSET_TYPE
)
10693 bits
= TYPE_PRECISION (type
);
10694 else if (TREE_CODE (type
) == REAL_TYPE
)
10695 bits
= GET_MODE_BITSIZE (SCALAR_TYPE_MODE (type
));
10699 return build_nonstandard_integer_type (bits
, unsignedp
);
10702 /* If TYPE is an integral or pointer type, return an integer type with
10703 the same precision which is unsigned, or itself if TYPE is already an
10704 unsigned integer type. If TYPE is a floating-point type, return an
10705 unsigned integer type with the same bitsize as TYPE. */
10708 unsigned_type_for (tree type
)
10710 return signed_or_unsigned_type_for (1, type
);
10713 /* If TYPE is an integral or pointer type, return an integer type with
10714 the same precision which is signed, or itself if TYPE is already a
10715 signed integer type. If TYPE is a floating-point type, return a
10716 signed integer type with the same bitsize as TYPE. */
10719 signed_type_for (tree type
)
10721 return signed_or_unsigned_type_for (0, type
);
10724 /* If TYPE is a vector type, return a signed integer vector type with the
10725 same width and number of subparts. Otherwise return boolean_type_node. */
10728 truth_type_for (tree type
)
10730 if (TREE_CODE (type
) == VECTOR_TYPE
)
10732 if (VECTOR_BOOLEAN_TYPE_P (type
))
10734 return build_truth_vector_type_for (type
);
10737 return boolean_type_node
;
10740 /* Returns the largest value obtainable by casting something in INNER type to
10744 upper_bound_in_type (tree outer
, tree inner
)
10746 unsigned int det
= 0;
10747 unsigned oprec
= TYPE_PRECISION (outer
);
10748 unsigned iprec
= TYPE_PRECISION (inner
);
10751 /* Compute a unique number for every combination. */
10752 det
|= (oprec
> iprec
) ? 4 : 0;
10753 det
|= TYPE_UNSIGNED (outer
) ? 2 : 0;
10754 det
|= TYPE_UNSIGNED (inner
) ? 1 : 0;
10756 /* Determine the exponent to use. */
10761 /* oprec <= iprec, outer: signed, inner: don't care. */
10766 /* oprec <= iprec, outer: unsigned, inner: don't care. */
10770 /* oprec > iprec, outer: signed, inner: signed. */
10774 /* oprec > iprec, outer: signed, inner: unsigned. */
10778 /* oprec > iprec, outer: unsigned, inner: signed. */
10782 /* oprec > iprec, outer: unsigned, inner: unsigned. */
10786 gcc_unreachable ();
10789 return wide_int_to_tree (outer
,
10790 wi::mask (prec
, false, TYPE_PRECISION (outer
)));
10793 /* Returns the smallest value obtainable by casting something in INNER type to
10797 lower_bound_in_type (tree outer
, tree inner
)
10799 unsigned oprec
= TYPE_PRECISION (outer
);
10800 unsigned iprec
= TYPE_PRECISION (inner
);
10802 /* If OUTER type is unsigned, we can definitely cast 0 to OUTER type
10804 if (TYPE_UNSIGNED (outer
)
10805 /* If we are widening something of an unsigned type, OUTER type
10806 contains all values of INNER type. In particular, both INNER
10807 and OUTER types have zero in common. */
10808 || (oprec
> iprec
&& TYPE_UNSIGNED (inner
)))
10809 return build_int_cst (outer
, 0);
10812 /* If we are widening a signed type to another signed type, we
10813 want to obtain -2^^(iprec-1). If we are keeping the
10814 precision or narrowing to a signed type, we want to obtain
10816 unsigned prec
= oprec
> iprec
? iprec
: oprec
;
10817 return wide_int_to_tree (outer
,
10818 wi::mask (prec
- 1, true,
10819 TYPE_PRECISION (outer
)));
10823 /* Return nonzero if two operands that are suitable for PHI nodes are
10824 necessarily equal. Specifically, both ARG0 and ARG1 must be either
10825 SSA_NAME or invariant. Note that this is strictly an optimization.
10826 That is, callers of this function can directly call operand_equal_p
10827 and get the same result, only slower. */
10830 operand_equal_for_phi_arg_p (const_tree arg0
, const_tree arg1
)
10834 if (TREE_CODE (arg0
) == SSA_NAME
|| TREE_CODE (arg1
) == SSA_NAME
)
10836 return operand_equal_p (arg0
, arg1
, 0);
10839 /* Returns number of zeros at the end of binary representation of X. */
10842 num_ending_zeros (const_tree x
)
10844 return build_int_cst (TREE_TYPE (x
), wi::ctz (wi::to_wide (x
)));
10848 #define WALK_SUBTREE(NODE) \
10851 result = walk_tree_1 (&(NODE), func, data, pset, lh); \
10857 /* This is a subroutine of walk_tree that walks field of TYPE that are to
10858 be walked whenever a type is seen in the tree. Rest of operands and return
10859 value are as for walk_tree. */
10862 walk_type_fields (tree type
, walk_tree_fn func
, void *data
,
10863 hash_set
<tree
> *pset
, walk_tree_lh lh
)
10865 tree result
= NULL_TREE
;
10867 switch (TREE_CODE (type
))
10870 case REFERENCE_TYPE
:
10872 /* We have to worry about mutually recursive pointers. These can't
10873 be written in C. They can in Ada. It's pathological, but
10874 there's an ACATS test (c38102a) that checks it. Deal with this
10875 by checking if we're pointing to another pointer, that one
10876 points to another pointer, that one does too, and we have no htab.
10877 If so, get a hash table. We check three levels deep to avoid
10878 the cost of the hash table if we don't need one. */
10879 if (POINTER_TYPE_P (TREE_TYPE (type
))
10880 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type
)))
10881 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type
))))
10884 result
= walk_tree_without_duplicates (&TREE_TYPE (type
),
10895 WALK_SUBTREE (TREE_TYPE (type
));
10899 WALK_SUBTREE (TYPE_METHOD_BASETYPE (type
));
10901 /* Fall through. */
10903 case FUNCTION_TYPE
:
10904 WALK_SUBTREE (TREE_TYPE (type
));
10908 /* We never want to walk into default arguments. */
10909 for (arg
= TYPE_ARG_TYPES (type
); arg
; arg
= TREE_CHAIN (arg
))
10910 WALK_SUBTREE (TREE_VALUE (arg
));
10915 /* Don't follow this nodes's type if a pointer for fear that
10916 we'll have infinite recursion. If we have a PSET, then we
10919 || (!POINTER_TYPE_P (TREE_TYPE (type
))
10920 && TREE_CODE (TREE_TYPE (type
)) != OFFSET_TYPE
))
10921 WALK_SUBTREE (TREE_TYPE (type
));
10922 WALK_SUBTREE (TYPE_DOMAIN (type
));
10926 WALK_SUBTREE (TREE_TYPE (type
));
10927 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type
));
10937 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is
10938 called with the DATA and the address of each sub-tree. If FUNC returns a
10939 non-NULL value, the traversal is stopped, and the value returned by FUNC
10940 is returned. If PSET is non-NULL it is used to record the nodes visited,
10941 and to avoid visiting a node more than once. */
10944 walk_tree_1 (tree
*tp
, walk_tree_fn func
, void *data
,
10945 hash_set
<tree
> *pset
, walk_tree_lh lh
)
10947 enum tree_code code
;
10951 #define WALK_SUBTREE_TAIL(NODE) \
10955 goto tail_recurse; \
10960 /* Skip empty subtrees. */
10964 /* Don't walk the same tree twice, if the user has requested
10965 that we avoid doing so. */
10966 if (pset
&& pset
->add (*tp
))
10969 /* Call the function. */
10971 result
= (*func
) (tp
, &walk_subtrees
, data
);
10973 /* If we found something, return it. */
10977 code
= TREE_CODE (*tp
);
10979 /* Even if we didn't, FUNC may have decided that there was nothing
10980 interesting below this point in the tree. */
10981 if (!walk_subtrees
)
10983 /* But we still need to check our siblings. */
10984 if (code
== TREE_LIST
)
10985 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp
));
10986 else if (code
== OMP_CLAUSE
)
10987 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp
));
10994 result
= (*lh
) (tp
, &walk_subtrees
, func
, data
, pset
);
10995 if (result
|| !walk_subtrees
)
11002 case IDENTIFIER_NODE
:
11008 case PLACEHOLDER_EXPR
:
11012 /* None of these have subtrees other than those already walked
11017 WALK_SUBTREE (TREE_VALUE (*tp
));
11018 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp
));
11023 int len
= TREE_VEC_LENGTH (*tp
);
11028 /* Walk all elements but the first. */
11030 WALK_SUBTREE (TREE_VEC_ELT (*tp
, len
));
11032 /* Now walk the first one as a tail call. */
11033 WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp
, 0));
11038 unsigned len
= vector_cst_encoded_nelts (*tp
);
11041 /* Walk all elements but the first. */
11043 WALK_SUBTREE (VECTOR_CST_ENCODED_ELT (*tp
, len
));
11044 /* Now walk the first one as a tail call. */
11045 WALK_SUBTREE_TAIL (VECTOR_CST_ENCODED_ELT (*tp
, 0));
11049 WALK_SUBTREE (TREE_REALPART (*tp
));
11050 WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp
));
11054 unsigned HOST_WIDE_INT idx
;
11055 constructor_elt
*ce
;
11057 for (idx
= 0; vec_safe_iterate (CONSTRUCTOR_ELTS (*tp
), idx
, &ce
);
11059 WALK_SUBTREE (ce
->value
);
11064 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp
, 0));
11069 for (decl
= BIND_EXPR_VARS (*tp
); decl
; decl
= DECL_CHAIN (decl
))
11071 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
11072 into declarations that are just mentioned, rather than
11073 declared; they don't really belong to this part of the tree.
11074 And, we can see cycles: the initializer for a declaration
11075 can refer to the declaration itself. */
11076 WALK_SUBTREE (DECL_INITIAL (decl
));
11077 WALK_SUBTREE (DECL_SIZE (decl
));
11078 WALK_SUBTREE (DECL_SIZE_UNIT (decl
));
11080 WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp
));
11083 case STATEMENT_LIST
:
11085 tree_stmt_iterator i
;
11086 for (i
= tsi_start (*tp
); !tsi_end_p (i
); tsi_next (&i
))
11087 WALK_SUBTREE (*tsi_stmt_ptr (i
));
11092 switch (OMP_CLAUSE_CODE (*tp
))
11094 case OMP_CLAUSE_GANG
:
11095 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp
, 1));
11098 case OMP_CLAUSE_AFFINITY
:
11099 case OMP_CLAUSE_ASYNC
:
11100 case OMP_CLAUSE_WAIT
:
11101 case OMP_CLAUSE_WORKER
:
11102 case OMP_CLAUSE_VECTOR
:
11103 case OMP_CLAUSE_NUM_GANGS
:
11104 case OMP_CLAUSE_NUM_WORKERS
:
11105 case OMP_CLAUSE_VECTOR_LENGTH
:
11106 case OMP_CLAUSE_PRIVATE
:
11107 case OMP_CLAUSE_SHARED
:
11108 case OMP_CLAUSE_FIRSTPRIVATE
:
11109 case OMP_CLAUSE_COPYIN
:
11110 case OMP_CLAUSE_COPYPRIVATE
:
11111 case OMP_CLAUSE_FINAL
:
11112 case OMP_CLAUSE_IF
:
11113 case OMP_CLAUSE_NUM_THREADS
:
11114 case OMP_CLAUSE_SCHEDULE
:
11115 case OMP_CLAUSE_UNIFORM
:
11116 case OMP_CLAUSE_DEPEND
:
11117 case OMP_CLAUSE_NONTEMPORAL
:
11118 case OMP_CLAUSE_NUM_TEAMS
:
11119 case OMP_CLAUSE_THREAD_LIMIT
:
11120 case OMP_CLAUSE_DEVICE
:
11121 case OMP_CLAUSE_DIST_SCHEDULE
:
11122 case OMP_CLAUSE_SAFELEN
:
11123 case OMP_CLAUSE_SIMDLEN
:
11124 case OMP_CLAUSE_ORDERED
:
11125 case OMP_CLAUSE_PRIORITY
:
11126 case OMP_CLAUSE_GRAINSIZE
:
11127 case OMP_CLAUSE_NUM_TASKS
:
11128 case OMP_CLAUSE_HINT
:
11129 case OMP_CLAUSE_TO_DECLARE
:
11130 case OMP_CLAUSE_LINK
:
11131 case OMP_CLAUSE_DETACH
:
11132 case OMP_CLAUSE_USE_DEVICE_PTR
:
11133 case OMP_CLAUSE_USE_DEVICE_ADDR
:
11134 case OMP_CLAUSE_IS_DEVICE_PTR
:
11135 case OMP_CLAUSE_INCLUSIVE
:
11136 case OMP_CLAUSE_EXCLUSIVE
:
11137 case OMP_CLAUSE__LOOPTEMP_
:
11138 case OMP_CLAUSE__REDUCTEMP_
:
11139 case OMP_CLAUSE__CONDTEMP_
:
11140 case OMP_CLAUSE__SCANTEMP_
:
11141 case OMP_CLAUSE__SIMDUID_
:
11142 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp
, 0));
11145 case OMP_CLAUSE_INDEPENDENT
:
11146 case OMP_CLAUSE_NOWAIT
:
11147 case OMP_CLAUSE_DEFAULT
:
11148 case OMP_CLAUSE_UNTIED
:
11149 case OMP_CLAUSE_MERGEABLE
:
11150 case OMP_CLAUSE_PROC_BIND
:
11151 case OMP_CLAUSE_DEVICE_TYPE
:
11152 case OMP_CLAUSE_INBRANCH
:
11153 case OMP_CLAUSE_NOTINBRANCH
:
11154 case OMP_CLAUSE_FOR
:
11155 case OMP_CLAUSE_PARALLEL
:
11156 case OMP_CLAUSE_SECTIONS
:
11157 case OMP_CLAUSE_TASKGROUP
:
11158 case OMP_CLAUSE_NOGROUP
:
11159 case OMP_CLAUSE_THREADS
:
11160 case OMP_CLAUSE_SIMD
:
11161 case OMP_CLAUSE_DEFAULTMAP
:
11162 case OMP_CLAUSE_ORDER
:
11163 case OMP_CLAUSE_BIND
:
11164 case OMP_CLAUSE_AUTO
:
11165 case OMP_CLAUSE_SEQ
:
11166 case OMP_CLAUSE_TILE
:
11167 case OMP_CLAUSE__SIMT_
:
11168 case OMP_CLAUSE_IF_PRESENT
:
11169 case OMP_CLAUSE_FINALIZE
:
11170 case OMP_CLAUSE_NOHOST
:
11171 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp
));
11173 case OMP_CLAUSE_LASTPRIVATE
:
11174 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp
));
11175 WALK_SUBTREE (OMP_CLAUSE_LASTPRIVATE_STMT (*tp
));
11176 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp
));
11178 case OMP_CLAUSE_COLLAPSE
:
11181 for (i
= 0; i
< 3; i
++)
11182 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp
, i
));
11183 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp
));
11186 case OMP_CLAUSE_LINEAR
:
11187 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp
));
11188 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STEP (*tp
));
11189 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STMT (*tp
));
11190 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp
));
11192 case OMP_CLAUSE_ALIGNED
:
11193 case OMP_CLAUSE_ALLOCATE
:
11194 case OMP_CLAUSE_FROM
:
11195 case OMP_CLAUSE_TO
:
11196 case OMP_CLAUSE_MAP
:
11197 case OMP_CLAUSE__CACHE_
:
11198 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp
));
11199 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp
, 1));
11200 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp
));
11202 case OMP_CLAUSE_REDUCTION
:
11203 case OMP_CLAUSE_TASK_REDUCTION
:
11204 case OMP_CLAUSE_IN_REDUCTION
:
11207 for (i
= 0; i
< 5; i
++)
11208 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp
, i
));
11209 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp
));
11213 gcc_unreachable ();
11221 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
11222 But, we only want to walk once. */
11223 len
= (TREE_OPERAND (*tp
, 3) == TREE_OPERAND (*tp
, 1)) ? 2 : 3;
11224 for (i
= 0; i
< len
; ++i
)
11225 WALK_SUBTREE (TREE_OPERAND (*tp
, i
));
11226 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp
, len
));
11230 /* If this is a TYPE_DECL, walk into the fields of the type that it's
11231 defining. We only want to walk into these fields of a type in this
11232 case and not in the general case of a mere reference to the type.
11234 The criterion is as follows: if the field can be an expression, it
11235 must be walked only here. This should be in keeping with the fields
11236 that are directly gimplified in gimplify_type_sizes in order for the
11237 mark/copy-if-shared/unmark machinery of the gimplifier to work with
11238 variable-sized types.
11240 Note that DECLs get walked as part of processing the BIND_EXPR. */
11241 if (TREE_CODE (DECL_EXPR_DECL (*tp
)) == TYPE_DECL
)
11243 /* Call the function for the decl so e.g. copy_tree_body_r can
11244 replace it with the remapped one. */
11245 result
= (*func
) (&DECL_EXPR_DECL (*tp
), &walk_subtrees
, data
);
11246 if (result
|| !walk_subtrees
)
11249 tree
*type_p
= &TREE_TYPE (DECL_EXPR_DECL (*tp
));
11250 if (TREE_CODE (*type_p
) == ERROR_MARK
)
11253 /* Call the function for the type. See if it returns anything or
11254 doesn't want us to continue. If we are to continue, walk both
11255 the normal fields and those for the declaration case. */
11256 result
= (*func
) (type_p
, &walk_subtrees
, data
);
11257 if (result
|| !walk_subtrees
)
11260 /* But do not walk a pointed-to type since it may itself need to
11261 be walked in the declaration case if it isn't anonymous. */
11262 if (!POINTER_TYPE_P (*type_p
))
11264 result
= walk_type_fields (*type_p
, func
, data
, pset
, lh
);
11269 /* If this is a record type, also walk the fields. */
11270 if (RECORD_OR_UNION_TYPE_P (*type_p
))
11274 for (field
= TYPE_FIELDS (*type_p
); field
;
11275 field
= DECL_CHAIN (field
))
11277 /* We'd like to look at the type of the field, but we can
11278 easily get infinite recursion. So assume it's pointed
11279 to elsewhere in the tree. Also, ignore things that
11281 if (TREE_CODE (field
) != FIELD_DECL
)
11284 WALK_SUBTREE (DECL_FIELD_OFFSET (field
));
11285 WALK_SUBTREE (DECL_SIZE (field
));
11286 WALK_SUBTREE (DECL_SIZE_UNIT (field
));
11287 if (TREE_CODE (*type_p
) == QUAL_UNION_TYPE
)
11288 WALK_SUBTREE (DECL_QUALIFIER (field
));
11292 /* Same for scalar types. */
11293 else if (TREE_CODE (*type_p
) == BOOLEAN_TYPE
11294 || TREE_CODE (*type_p
) == ENUMERAL_TYPE
11295 || TREE_CODE (*type_p
) == INTEGER_TYPE
11296 || TREE_CODE (*type_p
) == FIXED_POINT_TYPE
11297 || TREE_CODE (*type_p
) == REAL_TYPE
)
11299 WALK_SUBTREE (TYPE_MIN_VALUE (*type_p
));
11300 WALK_SUBTREE (TYPE_MAX_VALUE (*type_p
));
11303 WALK_SUBTREE (TYPE_SIZE (*type_p
));
11304 WALK_SUBTREE_TAIL (TYPE_SIZE_UNIT (*type_p
));
11309 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
)))
11313 /* Walk over all the sub-trees of this operand. */
11314 len
= TREE_OPERAND_LENGTH (*tp
);
11316 /* Go through the subtrees. We need to do this in forward order so
11317 that the scope of a FOR_EXPR is handled properly. */
11320 for (i
= 0; i
< len
- 1; ++i
)
11321 WALK_SUBTREE (TREE_OPERAND (*tp
, i
));
11322 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp
, len
- 1));
11325 /* If this is a type, walk the needed fields in the type. */
11326 else if (TYPE_P (*tp
))
11327 return walk_type_fields (*tp
, func
, data
, pset
, lh
);
11331 /* We didn't find what we were looking for. */
11334 #undef WALK_SUBTREE_TAIL
11336 #undef WALK_SUBTREE
11338 /* Like walk_tree, but does not walk duplicate nodes more than once. */
11341 walk_tree_without_duplicates_1 (tree
*tp
, walk_tree_fn func
, void *data
,
11346 hash_set
<tree
> pset
;
11347 result
= walk_tree_1 (tp
, func
, data
, &pset
, lh
);
11353 tree_block (tree t
)
11355 const enum tree_code_class c
= TREE_CODE_CLASS (TREE_CODE (t
));
11357 if (IS_EXPR_CODE_CLASS (c
))
11358 return LOCATION_BLOCK (t
->exp
.locus
);
11359 gcc_unreachable ();
11364 tree_set_block (tree t
, tree b
)
11366 const enum tree_code_class c
= TREE_CODE_CLASS (TREE_CODE (t
));
11368 if (IS_EXPR_CODE_CLASS (c
))
11370 t
->exp
.locus
= set_block (t
->exp
.locus
, b
);
11373 gcc_unreachable ();
11376 /* Create a nameless artificial label and put it in the current
11377 function context. The label has a location of LOC. Returns the
11378 newly created label. */
11381 create_artificial_label (location_t loc
)
11383 tree lab
= build_decl (loc
,
11384 LABEL_DECL
, NULL_TREE
, void_type_node
);
11386 DECL_ARTIFICIAL (lab
) = 1;
11387 DECL_IGNORED_P (lab
) = 1;
11388 DECL_CONTEXT (lab
) = current_function_decl
;
11392 /* Given a tree, try to return a useful variable name that we can use
11393 to prefix a temporary that is being assigned the value of the tree.
11394 I.E. given <temp> = &A, return A. */
11399 tree stripped_decl
;
11402 STRIP_NOPS (stripped_decl
);
11403 if (DECL_P (stripped_decl
) && DECL_NAME (stripped_decl
))
11404 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl
));
11405 else if (TREE_CODE (stripped_decl
) == SSA_NAME
)
11407 tree name
= SSA_NAME_IDENTIFIER (stripped_decl
);
11410 return IDENTIFIER_POINTER (name
);
11414 switch (TREE_CODE (stripped_decl
))
11417 return get_name (TREE_OPERAND (stripped_decl
, 0));
11424 /* Return true if TYPE has a variable argument list. */
11427 stdarg_p (const_tree fntype
)
11429 function_args_iterator args_iter
;
11430 tree n
= NULL_TREE
, t
;
11435 FOREACH_FUNCTION_ARGS (fntype
, t
, args_iter
)
11440 return n
!= NULL_TREE
&& n
!= void_type_node
;
11443 /* Return true if TYPE has a prototype. */
11446 prototype_p (const_tree fntype
)
11450 gcc_assert (fntype
!= NULL_TREE
);
11452 t
= TYPE_ARG_TYPES (fntype
);
11453 return (t
!= NULL_TREE
);
11456 /* If BLOCK is inlined from an __attribute__((__artificial__))
11457 routine, return pointer to location from where it has been
11460 block_nonartificial_location (tree block
)
11462 location_t
*ret
= NULL
;
11464 while (block
&& TREE_CODE (block
) == BLOCK
11465 && BLOCK_ABSTRACT_ORIGIN (block
))
11467 tree ao
= BLOCK_ABSTRACT_ORIGIN (block
);
11468 if (TREE_CODE (ao
) == FUNCTION_DECL
)
11470 /* If AO is an artificial inline, point RET to the
11471 call site locus at which it has been inlined and continue
11472 the loop, in case AO's caller is also an artificial
11474 if (DECL_DECLARED_INLINE_P (ao
)
11475 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao
)))
11476 ret
= &BLOCK_SOURCE_LOCATION (block
);
11480 else if (TREE_CODE (ao
) != BLOCK
)
11483 block
= BLOCK_SUPERCONTEXT (block
);
11489 /* If EXP is inlined from an __attribute__((__artificial__))
11490 function, return the location of the original call expression. */
11493 tree_nonartificial_location (tree exp
)
11495 location_t
*loc
= block_nonartificial_location (TREE_BLOCK (exp
));
11500 return EXPR_LOCATION (exp
);
11503 /* Return the location into which EXP has been inlined. Analogous
11504 to tree_nonartificial_location() above but not limited to artificial
11505 functions declared inline. If SYSTEM_HEADER is true, return
11506 the macro expansion point of the location if it's in a system header */
11509 tree_inlined_location (tree exp
, bool system_header
/* = true */)
11511 location_t loc
= UNKNOWN_LOCATION
;
11513 tree block
= TREE_BLOCK (exp
);
11515 while (block
&& TREE_CODE (block
) == BLOCK
11516 && BLOCK_ABSTRACT_ORIGIN (block
))
11518 tree ao
= BLOCK_ABSTRACT_ORIGIN (block
);
11519 if (TREE_CODE (ao
) == FUNCTION_DECL
)
11520 loc
= BLOCK_SOURCE_LOCATION (block
);
11521 else if (TREE_CODE (ao
) != BLOCK
)
11524 block
= BLOCK_SUPERCONTEXT (block
);
11527 if (loc
== UNKNOWN_LOCATION
)
11529 loc
= EXPR_LOCATION (exp
);
11531 /* Only consider macro expansion when the block traversal failed
11532 to find a location. Otherwise it's not relevant. */
11533 return expansion_point_location_if_in_system_header (loc
);
11539 /* These are the hash table functions for the hash table of OPTIMIZATION_NODE
11542 /* Return the hash code X, an OPTIMIZATION_NODE or TARGET_OPTION code. */
11545 cl_option_hasher::hash (tree x
)
11547 const_tree
const t
= x
;
11551 hashval_t hash
= 0;
11553 if (TREE_CODE (t
) == OPTIMIZATION_NODE
)
11555 p
= (const char *)TREE_OPTIMIZATION (t
);
11556 len
= sizeof (struct cl_optimization
);
11559 else if (TREE_CODE (t
) == TARGET_OPTION_NODE
)
11560 return cl_target_option_hash (TREE_TARGET_OPTION (t
));
11563 gcc_unreachable ();
11565 /* assume most opt flags are just 0/1, some are 2-3, and a few might be
11567 for (i
= 0; i
< len
; i
++)
11569 hash
= (hash
<< 4) ^ ((i
<< 2) | p
[i
]);
11574 /* Return nonzero if the value represented by *X (an OPTIMIZATION or
11575 TARGET_OPTION tree node) is the same as that given by *Y, which is the
11579 cl_option_hasher::equal (tree x
, tree y
)
11581 const_tree
const xt
= x
;
11582 const_tree
const yt
= y
;
11584 if (TREE_CODE (xt
) != TREE_CODE (yt
))
11587 if (TREE_CODE (xt
) == OPTIMIZATION_NODE
)
11588 return cl_optimization_option_eq (TREE_OPTIMIZATION (xt
),
11589 TREE_OPTIMIZATION (yt
));
11590 else if (TREE_CODE (xt
) == TARGET_OPTION_NODE
)
11591 return cl_target_option_eq (TREE_TARGET_OPTION (xt
),
11592 TREE_TARGET_OPTION (yt
));
11594 gcc_unreachable ();
11597 /* Build an OPTIMIZATION_NODE based on the options in OPTS and OPTS_SET. */
11600 build_optimization_node (struct gcc_options
*opts
,
11601 struct gcc_options
*opts_set
)
11605 /* Use the cache of optimization nodes. */
11607 cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node
),
11610 tree
*slot
= cl_option_hash_table
->find_slot (cl_optimization_node
, INSERT
);
11614 /* Insert this one into the hash table. */
11615 t
= cl_optimization_node
;
11618 /* Make a new node for next time round. */
11619 cl_optimization_node
= make_node (OPTIMIZATION_NODE
);
11625 /* Build a TARGET_OPTION_NODE based on the options in OPTS and OPTS_SET. */
11628 build_target_option_node (struct gcc_options
*opts
,
11629 struct gcc_options
*opts_set
)
11633 /* Use the cache of optimization nodes. */
11635 cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node
),
11638 tree
*slot
= cl_option_hash_table
->find_slot (cl_target_option_node
, INSERT
);
11642 /* Insert this one into the hash table. */
11643 t
= cl_target_option_node
;
11646 /* Make a new node for next time round. */
11647 cl_target_option_node
= make_node (TARGET_OPTION_NODE
);
11653 /* Clear TREE_TARGET_GLOBALS of all TARGET_OPTION_NODE trees,
11654 so that they aren't saved during PCH writing. */
11657 prepare_target_option_nodes_for_pch (void)
11659 hash_table
<cl_option_hasher
>::iterator iter
= cl_option_hash_table
->begin ();
11660 for (; iter
!= cl_option_hash_table
->end (); ++iter
)
11661 if (TREE_CODE (*iter
) == TARGET_OPTION_NODE
)
11662 TREE_TARGET_GLOBALS (*iter
) = NULL
;
11665 /* Determine the "ultimate origin" of a block. */
11668 block_ultimate_origin (const_tree block
)
11670 tree origin
= BLOCK_ABSTRACT_ORIGIN (block
);
11672 if (origin
== NULL_TREE
)
11676 gcc_checking_assert ((DECL_P (origin
)
11677 && DECL_ORIGIN (origin
) == origin
)
11678 || BLOCK_ORIGIN (origin
) == origin
);
11683 /* Return true iff conversion from INNER_TYPE to OUTER_TYPE generates
11687 tree_nop_conversion_p (const_tree outer_type
, const_tree inner_type
)
11689 /* Do not strip casts into or out of differing address spaces. */
11690 if (POINTER_TYPE_P (outer_type
)
11691 && TYPE_ADDR_SPACE (TREE_TYPE (outer_type
)) != ADDR_SPACE_GENERIC
)
11693 if (!POINTER_TYPE_P (inner_type
)
11694 || (TYPE_ADDR_SPACE (TREE_TYPE (outer_type
))
11695 != TYPE_ADDR_SPACE (TREE_TYPE (inner_type
))))
11698 else if (POINTER_TYPE_P (inner_type
)
11699 && TYPE_ADDR_SPACE (TREE_TYPE (inner_type
)) != ADDR_SPACE_GENERIC
)
11701 /* We already know that outer_type is not a pointer with
11702 a non-generic address space. */
11706 /* Use precision rather then machine mode when we can, which gives
11707 the correct answer even for submode (bit-field) types. */
11708 if ((INTEGRAL_TYPE_P (outer_type
)
11709 || POINTER_TYPE_P (outer_type
)
11710 || TREE_CODE (outer_type
) == OFFSET_TYPE
)
11711 && (INTEGRAL_TYPE_P (inner_type
)
11712 || POINTER_TYPE_P (inner_type
)
11713 || TREE_CODE (inner_type
) == OFFSET_TYPE
))
11714 return TYPE_PRECISION (outer_type
) == TYPE_PRECISION (inner_type
);
11716 /* Otherwise fall back on comparing machine modes (e.g. for
11717 aggregate types, floats). */
11718 return TYPE_MODE (outer_type
) == TYPE_MODE (inner_type
);
11721 /* Return true iff conversion in EXP generates no instruction. Mark
11722 it inline so that we fully inline into the stripping functions even
11723 though we have two uses of this function. */
11726 tree_nop_conversion (const_tree exp
)
11728 tree outer_type
, inner_type
;
11730 if (location_wrapper_p (exp
))
11732 if (!CONVERT_EXPR_P (exp
)
11733 && TREE_CODE (exp
) != NON_LVALUE_EXPR
)
11736 outer_type
= TREE_TYPE (exp
);
11737 inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
11738 if (!inner_type
|| inner_type
== error_mark_node
)
11741 return tree_nop_conversion_p (outer_type
, inner_type
);
11744 /* Return true iff conversion in EXP generates no instruction. Don't
11745 consider conversions changing the signedness. */
11748 tree_sign_nop_conversion (const_tree exp
)
11750 tree outer_type
, inner_type
;
11752 if (!tree_nop_conversion (exp
))
11755 outer_type
= TREE_TYPE (exp
);
11756 inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
11758 return (TYPE_UNSIGNED (outer_type
) == TYPE_UNSIGNED (inner_type
)
11759 && POINTER_TYPE_P (outer_type
) == POINTER_TYPE_P (inner_type
));
11762 /* Strip conversions from EXP according to tree_nop_conversion and
11763 return the resulting expression. */
11766 tree_strip_nop_conversions (tree exp
)
11768 while (tree_nop_conversion (exp
))
11769 exp
= TREE_OPERAND (exp
, 0);
11773 /* Strip conversions from EXP according to tree_sign_nop_conversion
11774 and return the resulting expression. */
11777 tree_strip_sign_nop_conversions (tree exp
)
11779 while (tree_sign_nop_conversion (exp
))
11780 exp
= TREE_OPERAND (exp
, 0);
11784 /* Avoid any floating point extensions from EXP. */
11786 strip_float_extensions (tree exp
)
11788 tree sub
, expt
, subt
;
11790 /* For floating point constant look up the narrowest type that can hold
11791 it properly and handle it like (type)(narrowest_type)constant.
11792 This way we can optimize for instance a=a*2.0 where "a" is float
11793 but 2.0 is double constant. */
11794 if (TREE_CODE (exp
) == REAL_CST
&& !DECIMAL_FLOAT_TYPE_P (TREE_TYPE (exp
)))
11796 REAL_VALUE_TYPE orig
;
11799 orig
= TREE_REAL_CST (exp
);
11800 if (TYPE_PRECISION (TREE_TYPE (exp
)) > TYPE_PRECISION (float_type_node
)
11801 && exact_real_truncate (TYPE_MODE (float_type_node
), &orig
))
11802 type
= float_type_node
;
11803 else if (TYPE_PRECISION (TREE_TYPE (exp
))
11804 > TYPE_PRECISION (double_type_node
)
11805 && exact_real_truncate (TYPE_MODE (double_type_node
), &orig
))
11806 type
= double_type_node
;
11808 return build_real_truncate (type
, orig
);
11811 if (!CONVERT_EXPR_P (exp
))
11814 sub
= TREE_OPERAND (exp
, 0);
11815 subt
= TREE_TYPE (sub
);
11816 expt
= TREE_TYPE (exp
);
11818 if (!FLOAT_TYPE_P (subt
))
11821 if (DECIMAL_FLOAT_TYPE_P (expt
) != DECIMAL_FLOAT_TYPE_P (subt
))
11824 if (TYPE_PRECISION (subt
) > TYPE_PRECISION (expt
))
11827 return strip_float_extensions (sub
);
11830 /* Strip out all handled components that produce invariant
11834 strip_invariant_refs (const_tree op
)
11836 while (handled_component_p (op
))
11838 switch (TREE_CODE (op
))
11841 case ARRAY_RANGE_REF
:
11842 if (!is_gimple_constant (TREE_OPERAND (op
, 1))
11843 || TREE_OPERAND (op
, 2) != NULL_TREE
11844 || TREE_OPERAND (op
, 3) != NULL_TREE
)
11848 case COMPONENT_REF
:
11849 if (TREE_OPERAND (op
, 2) != NULL_TREE
)
11855 op
= TREE_OPERAND (op
, 0);
11861 static GTY(()) tree gcc_eh_personality_decl
;
11863 /* Return the GCC personality function decl. */
11866 lhd_gcc_personality (void)
11868 if (!gcc_eh_personality_decl
)
11869 gcc_eh_personality_decl
= build_personality_function ("gcc");
11870 return gcc_eh_personality_decl
;
11873 /* TARGET is a call target of GIMPLE call statement
11874 (obtained by gimple_call_fn). Return true if it is
11875 OBJ_TYPE_REF representing an virtual call of C++ method.
11876 (As opposed to OBJ_TYPE_REF representing objc calls
11877 through a cast where middle-end devirtualization machinery
11878 can't apply.) FOR_DUMP_P is true when being called from
11879 the dump routines. */
11882 virtual_method_call_p (const_tree target
, bool for_dump_p
)
11884 if (TREE_CODE (target
) != OBJ_TYPE_REF
)
11886 tree t
= TREE_TYPE (target
);
11887 gcc_checking_assert (TREE_CODE (t
) == POINTER_TYPE
);
11889 if (TREE_CODE (t
) == FUNCTION_TYPE
)
11891 gcc_checking_assert (TREE_CODE (t
) == METHOD_TYPE
);
11892 /* If we do not have BINFO associated, it means that type was built
11893 without devirtualization enabled. Do not consider this a virtual
11895 if (!TYPE_BINFO (obj_type_ref_class (target
, for_dump_p
)))
11900 /* Lookup sub-BINFO of BINFO of TYPE at offset POS. */
11903 lookup_binfo_at_offset (tree binfo
, tree type
, HOST_WIDE_INT pos
)
11906 tree base_binfo
, b
;
11908 for (i
= 0; BINFO_BASE_ITERATE (binfo
, i
, base_binfo
); i
++)
11909 if (pos
== tree_to_shwi (BINFO_OFFSET (base_binfo
))
11910 && types_same_for_odr (TREE_TYPE (base_binfo
), type
))
11912 else if ((b
= lookup_binfo_at_offset (base_binfo
, type
, pos
)) != NULL
)
11917 /* Try to find a base info of BINFO that would have its field decl at offset
11918 OFFSET within the BINFO type and which is of EXPECTED_TYPE. If it can be
11919 found, return, otherwise return NULL_TREE. */
11922 get_binfo_at_offset (tree binfo
, poly_int64 offset
, tree expected_type
)
11924 tree type
= BINFO_TYPE (binfo
);
11928 HOST_WIDE_INT pos
, size
;
11932 if (types_same_for_odr (type
, expected_type
))
11934 if (maybe_lt (offset
, 0))
11937 for (fld
= TYPE_FIELDS (type
); fld
; fld
= DECL_CHAIN (fld
))
11939 if (TREE_CODE (fld
) != FIELD_DECL
|| !DECL_ARTIFICIAL (fld
))
11942 pos
= int_bit_position (fld
);
11943 size
= tree_to_uhwi (DECL_SIZE (fld
));
11944 if (known_in_range_p (offset
, pos
, size
))
11947 if (!fld
|| TREE_CODE (TREE_TYPE (fld
)) != RECORD_TYPE
)
11950 /* Offset 0 indicates the primary base, whose vtable contents are
11951 represented in the binfo for the derived class. */
11952 else if (maybe_ne (offset
, 0))
11954 tree found_binfo
= NULL
, base_binfo
;
11955 /* Offsets in BINFO are in bytes relative to the whole structure
11956 while POS is in bits relative to the containing field. */
11957 int binfo_offset
= (tree_to_shwi (BINFO_OFFSET (binfo
)) + pos
11960 for (i
= 0; BINFO_BASE_ITERATE (binfo
, i
, base_binfo
); i
++)
11961 if (tree_to_shwi (BINFO_OFFSET (base_binfo
)) == binfo_offset
11962 && types_same_for_odr (TREE_TYPE (base_binfo
), TREE_TYPE (fld
)))
11964 found_binfo
= base_binfo
;
11968 binfo
= found_binfo
;
11970 binfo
= lookup_binfo_at_offset (binfo
, TREE_TYPE (fld
),
11974 type
= TREE_TYPE (fld
);
11979 /* Returns true if X is a typedef decl. */
11982 is_typedef_decl (const_tree x
)
11984 return (x
&& TREE_CODE (x
) == TYPE_DECL
11985 && DECL_ORIGINAL_TYPE (x
) != NULL_TREE
);
11988 /* Returns true iff TYPE is a type variant created for a typedef. */
11991 typedef_variant_p (const_tree type
)
11993 return is_typedef_decl (TYPE_NAME (type
));
11996 /* PR 84195: Replace control characters in "unescaped" with their
11997 escaped equivalents. Allow newlines if -fmessage-length has
11998 been set to a non-zero value. This is done here, rather than
11999 where the attribute is recorded as the message length can
12000 change between these two locations. */
12003 escaped_string::escape (const char *unescaped
)
12006 size_t i
, new_i
, len
;
12011 m_str
= const_cast<char *> (unescaped
);
12014 if (unescaped
== NULL
|| *unescaped
== 0)
12017 len
= strlen (unescaped
);
12021 for (i
= 0; i
< len
; i
++)
12023 char c
= unescaped
[i
];
12028 escaped
[new_i
++] = c
;
12032 if (c
!= '\n' || !pp_is_wrapping_line (global_dc
->printer
))
12034 if (escaped
== NULL
)
12036 /* We only allocate space for a new string if we
12037 actually encounter a control character that
12038 needs replacing. */
12039 escaped
= (char *) xmalloc (len
* 2 + 1);
12040 strncpy (escaped
, unescaped
, i
);
12044 escaped
[new_i
++] = '\\';
12048 case '\a': escaped
[new_i
++] = 'a'; break;
12049 case '\b': escaped
[new_i
++] = 'b'; break;
12050 case '\f': escaped
[new_i
++] = 'f'; break;
12051 case '\n': escaped
[new_i
++] = 'n'; break;
12052 case '\r': escaped
[new_i
++] = 'r'; break;
12053 case '\t': escaped
[new_i
++] = 't'; break;
12054 case '\v': escaped
[new_i
++] = 'v'; break;
12055 default: escaped
[new_i
++] = '?'; break;
12059 escaped
[new_i
++] = c
;
12064 escaped
[new_i
] = 0;
12070 /* Warn about a use of an identifier which was marked deprecated. Returns
12071 whether a warning was given. */
12074 warn_deprecated_use (tree node
, tree attr
)
12076 escaped_string msg
;
12078 if (node
== 0 || !warn_deprecated_decl
)
12084 attr
= DECL_ATTRIBUTES (node
);
12085 else if (TYPE_P (node
))
12087 tree decl
= TYPE_STUB_DECL (node
);
12089 attr
= lookup_attribute ("deprecated",
12090 TYPE_ATTRIBUTES (TREE_TYPE (decl
)));
12095 attr
= lookup_attribute ("deprecated", attr
);
12098 msg
.escape (TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr
))));
12103 auto_diagnostic_group d
;
12105 w
= warning (OPT_Wdeprecated_declarations
,
12106 "%qD is deprecated: %s", node
, (const char *) msg
);
12108 w
= warning (OPT_Wdeprecated_declarations
,
12109 "%qD is deprecated", node
);
12111 inform (DECL_SOURCE_LOCATION (node
), "declared here");
12113 else if (TYPE_P (node
))
12115 tree what
= NULL_TREE
;
12116 tree decl
= TYPE_STUB_DECL (node
);
12118 if (TYPE_NAME (node
))
12120 if (TREE_CODE (TYPE_NAME (node
)) == IDENTIFIER_NODE
)
12121 what
= TYPE_NAME (node
);
12122 else if (TREE_CODE (TYPE_NAME (node
)) == TYPE_DECL
12123 && DECL_NAME (TYPE_NAME (node
)))
12124 what
= DECL_NAME (TYPE_NAME (node
));
12127 auto_diagnostic_group d
;
12131 w
= warning (OPT_Wdeprecated_declarations
,
12132 "%qE is deprecated: %s", what
, (const char *) msg
);
12134 w
= warning (OPT_Wdeprecated_declarations
,
12135 "%qE is deprecated", what
);
12140 w
= warning (OPT_Wdeprecated_declarations
,
12141 "type is deprecated: %s", (const char *) msg
);
12143 w
= warning (OPT_Wdeprecated_declarations
,
12144 "type is deprecated");
12148 inform (DECL_SOURCE_LOCATION (decl
), "declared here");
12154 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
12155 somewhere in it. */
12158 contains_bitfld_component_ref_p (const_tree ref
)
12160 while (handled_component_p (ref
))
12162 if (TREE_CODE (ref
) == COMPONENT_REF
12163 && DECL_BIT_FIELD (TREE_OPERAND (ref
, 1)))
12165 ref
= TREE_OPERAND (ref
, 0);
12171 /* Try to determine whether a TRY_CATCH expression can fall through.
12172 This is a subroutine of block_may_fallthru. */
12175 try_catch_may_fallthru (const_tree stmt
)
12177 tree_stmt_iterator i
;
12179 /* If the TRY block can fall through, the whole TRY_CATCH can
12181 if (block_may_fallthru (TREE_OPERAND (stmt
, 0)))
12184 i
= tsi_start (TREE_OPERAND (stmt
, 1));
12185 switch (TREE_CODE (tsi_stmt (i
)))
12188 /* We expect to see a sequence of CATCH_EXPR trees, each with a
12189 catch expression and a body. The whole TRY_CATCH may fall
12190 through iff any of the catch bodies falls through. */
12191 for (; !tsi_end_p (i
); tsi_next (&i
))
12193 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i
))))
12198 case EH_FILTER_EXPR
:
12199 /* The exception filter expression only matters if there is an
12200 exception. If the exception does not match EH_FILTER_TYPES,
12201 we will execute EH_FILTER_FAILURE, and we will fall through
12202 if that falls through. If the exception does match
12203 EH_FILTER_TYPES, the stack unwinder will continue up the
12204 stack, so we will not fall through. We don't know whether we
12205 will throw an exception which matches EH_FILTER_TYPES or not,
12206 so we just ignore EH_FILTER_TYPES and assume that we might
12207 throw an exception which doesn't match. */
12208 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i
)));
12211 /* This case represents statements to be executed when an
12212 exception occurs. Those statements are implicitly followed
12213 by a RESX statement to resume execution after the exception.
12214 So in this case the TRY_CATCH never falls through. */
12219 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
12220 need not be 100% accurate; simply be conservative and return true if we
12221 don't know. This is used only to avoid stupidly generating extra code.
12222 If we're wrong, we'll just delete the extra code later. */
12225 block_may_fallthru (const_tree block
)
12227 /* This CONST_CAST is okay because expr_last returns its argument
12228 unmodified and we assign it to a const_tree. */
12229 const_tree stmt
= expr_last (CONST_CAST_TREE (block
));
12231 switch (stmt
? TREE_CODE (stmt
) : ERROR_MARK
)
12235 /* Easy cases. If the last statement of the block implies
12236 control transfer, then we can't fall through. */
12240 /* If there is a default: label or case labels cover all possible
12241 SWITCH_COND values, then the SWITCH_EXPR will transfer control
12242 to some case label in all cases and all we care is whether the
12243 SWITCH_BODY falls through. */
12244 if (SWITCH_ALL_CASES_P (stmt
))
12245 return block_may_fallthru (SWITCH_BODY (stmt
));
12249 if (block_may_fallthru (COND_EXPR_THEN (stmt
)))
12251 return block_may_fallthru (COND_EXPR_ELSE (stmt
));
12254 return block_may_fallthru (BIND_EXPR_BODY (stmt
));
12256 case TRY_CATCH_EXPR
:
12257 return try_catch_may_fallthru (stmt
);
12259 case TRY_FINALLY_EXPR
:
12260 /* The finally clause is always executed after the try clause,
12261 so if it does not fall through, then the try-finally will not
12262 fall through. Otherwise, if the try clause does not fall
12263 through, then when the finally clause falls through it will
12264 resume execution wherever the try clause was going. So the
12265 whole try-finally will only fall through if both the try
12266 clause and the finally clause fall through. */
12267 return (block_may_fallthru (TREE_OPERAND (stmt
, 0))
12268 && block_may_fallthru (TREE_OPERAND (stmt
, 1)));
12271 return block_may_fallthru (TREE_OPERAND (stmt
, 0));
12274 if (TREE_CODE (TREE_OPERAND (stmt
, 1)) == CALL_EXPR
)
12275 stmt
= TREE_OPERAND (stmt
, 1);
12281 /* Functions that do not return do not fall through. */
12282 return (call_expr_flags (stmt
) & ECF_NORETURN
) == 0;
12284 case CLEANUP_POINT_EXPR
:
12285 return block_may_fallthru (TREE_OPERAND (stmt
, 0));
12288 return block_may_fallthru (TREE_OPERAND (stmt
, 1));
12294 return lang_hooks
.block_may_fallthru (stmt
);
12298 /* True if we are using EH to handle cleanups. */
12299 static bool using_eh_for_cleanups_flag
= false;
12301 /* This routine is called from front ends to indicate eh should be used for
12304 using_eh_for_cleanups (void)
12306 using_eh_for_cleanups_flag
= true;
12309 /* Query whether EH is used for cleanups. */
12311 using_eh_for_cleanups_p (void)
12313 return using_eh_for_cleanups_flag
;
12316 /* Wrapper for tree_code_name to ensure that tree code is valid */
12318 get_tree_code_name (enum tree_code code
)
12320 const char *invalid
= "<invalid tree code>";
12322 /* The tree_code enum promotes to signed, but we could be getting
12323 invalid values, so force an unsigned comparison. */
12324 if (unsigned (code
) >= MAX_TREE_CODES
)
12326 if ((unsigned)code
== 0xa5a5)
12327 return "ggc_freed";
12331 return tree_code_name
[code
];
12334 /* Drops the TREE_OVERFLOW flag from T. */
12337 drop_tree_overflow (tree t
)
12339 gcc_checking_assert (TREE_OVERFLOW (t
));
12341 /* For tree codes with a sharing machinery re-build the result. */
12342 if (poly_int_tree_p (t
))
12343 return wide_int_to_tree (TREE_TYPE (t
), wi::to_poly_wide (t
));
12345 /* For VECTOR_CST, remove the overflow bits from the encoded elements
12346 and canonicalize the result. */
12347 if (TREE_CODE (t
) == VECTOR_CST
)
12349 tree_vector_builder builder
;
12350 builder
.new_unary_operation (TREE_TYPE (t
), t
, true);
12351 unsigned int count
= builder
.encoded_nelts ();
12352 for (unsigned int i
= 0; i
< count
; ++i
)
12354 tree elt
= VECTOR_CST_ELT (t
, i
);
12355 if (TREE_OVERFLOW (elt
))
12356 elt
= drop_tree_overflow (elt
);
12357 builder
.quick_push (elt
);
12359 return builder
.build ();
12362 /* Otherwise, as all tcc_constants are possibly shared, copy the node
12363 and drop the flag. */
12365 TREE_OVERFLOW (t
) = 0;
12367 /* For constants that contain nested constants, drop the flag
12368 from those as well. */
12369 if (TREE_CODE (t
) == COMPLEX_CST
)
12371 if (TREE_OVERFLOW (TREE_REALPART (t
)))
12372 TREE_REALPART (t
) = drop_tree_overflow (TREE_REALPART (t
));
12373 if (TREE_OVERFLOW (TREE_IMAGPART (t
)))
12374 TREE_IMAGPART (t
) = drop_tree_overflow (TREE_IMAGPART (t
));
12380 /* Given a memory reference expression T, return its base address.
12381 The base address of a memory reference expression is the main
12382 object being referenced. For instance, the base address for
12383 'array[i].fld[j]' is 'array'. You can think of this as stripping
12384 away the offset part from a memory address.
12386 This function calls handled_component_p to strip away all the inner
12387 parts of the memory reference until it reaches the base object. */
12390 get_base_address (tree t
)
12392 if (TREE_CODE (t
) == WITH_SIZE_EXPR
)
12393 t
= TREE_OPERAND (t
, 0);
12394 while (handled_component_p (t
))
12395 t
= TREE_OPERAND (t
, 0);
12397 if ((TREE_CODE (t
) == MEM_REF
12398 || TREE_CODE (t
) == TARGET_MEM_REF
)
12399 && TREE_CODE (TREE_OPERAND (t
, 0)) == ADDR_EXPR
)
12400 t
= TREE_OPERAND (TREE_OPERAND (t
, 0), 0);
12405 /* Return a tree of sizetype representing the size, in bytes, of the element
12406 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12409 array_ref_element_size (tree exp
)
12411 tree aligned_size
= TREE_OPERAND (exp
, 3);
12412 tree elmt_type
= TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
12413 location_t loc
= EXPR_LOCATION (exp
);
12415 /* If a size was specified in the ARRAY_REF, it's the size measured
12416 in alignment units of the element type. So multiply by that value. */
12419 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
12420 sizetype from another type of the same width and signedness. */
12421 if (TREE_TYPE (aligned_size
) != sizetype
)
12422 aligned_size
= fold_convert_loc (loc
, sizetype
, aligned_size
);
12423 return size_binop_loc (loc
, MULT_EXPR
, aligned_size
,
12424 size_int (TYPE_ALIGN_UNIT (elmt_type
)));
12427 /* Otherwise, take the size from that of the element type. Substitute
12428 any PLACEHOLDER_EXPR that we have. */
12430 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type
), exp
);
12433 /* Return a tree representing the lower bound of the array mentioned in
12434 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12437 array_ref_low_bound (tree exp
)
12439 tree domain_type
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp
, 0)));
12441 /* If a lower bound is specified in EXP, use it. */
12442 if (TREE_OPERAND (exp
, 2))
12443 return TREE_OPERAND (exp
, 2);
12445 /* Otherwise, if there is a domain type and it has a lower bound, use it,
12446 substituting for a PLACEHOLDER_EXPR as needed. */
12447 if (domain_type
&& TYPE_MIN_VALUE (domain_type
))
12448 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type
), exp
);
12450 /* Otherwise, return a zero of the appropriate type. */
12451 tree idxtype
= TREE_TYPE (TREE_OPERAND (exp
, 1));
12452 return (idxtype
== error_mark_node
12453 ? integer_zero_node
: build_int_cst (idxtype
, 0));
12456 /* Return a tree representing the upper bound of the array mentioned in
12457 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12460 array_ref_up_bound (tree exp
)
12462 tree domain_type
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp
, 0)));
12464 /* If there is a domain type and it has an upper bound, use it, substituting
12465 for a PLACEHOLDER_EXPR as needed. */
12466 if (domain_type
&& TYPE_MAX_VALUE (domain_type
))
12467 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type
), exp
);
12469 /* Otherwise fail. */
12473 /* Returns true if REF is an array reference, component reference,
12474 or memory reference to an array at the end of a structure.
12475 If this is the case, the array may be allocated larger
12476 than its upper bound implies. */
12479 array_at_struct_end_p (tree ref
)
12483 if (TREE_CODE (ref
) == ARRAY_REF
12484 || TREE_CODE (ref
) == ARRAY_RANGE_REF
)
12486 atype
= TREE_TYPE (TREE_OPERAND (ref
, 0));
12487 ref
= TREE_OPERAND (ref
, 0);
12489 else if (TREE_CODE (ref
) == COMPONENT_REF
12490 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref
, 1))) == ARRAY_TYPE
)
12491 atype
= TREE_TYPE (TREE_OPERAND (ref
, 1));
12492 else if (TREE_CODE (ref
) == MEM_REF
)
12494 tree arg
= TREE_OPERAND (ref
, 0);
12495 if (TREE_CODE (arg
) == ADDR_EXPR
)
12496 arg
= TREE_OPERAND (arg
, 0);
12497 tree argtype
= TREE_TYPE (arg
);
12498 if (TREE_CODE (argtype
) == RECORD_TYPE
)
12500 if (tree fld
= last_field (argtype
))
12502 atype
= TREE_TYPE (fld
);
12503 if (TREE_CODE (atype
) != ARRAY_TYPE
)
12505 if (VAR_P (arg
) && DECL_SIZE (fld
))
12517 if (TREE_CODE (ref
) == STRING_CST
)
12520 tree ref_to_array
= ref
;
12521 while (handled_component_p (ref
))
12523 /* If the reference chain contains a component reference to a
12524 non-union type and there follows another field the reference
12525 is not at the end of a structure. */
12526 if (TREE_CODE (ref
) == COMPONENT_REF
)
12528 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (ref
, 0))) == RECORD_TYPE
)
12530 tree nextf
= DECL_CHAIN (TREE_OPERAND (ref
, 1));
12531 while (nextf
&& TREE_CODE (nextf
) != FIELD_DECL
)
12532 nextf
= DECL_CHAIN (nextf
);
12537 /* If we have a multi-dimensional array we do not consider
12538 a non-innermost dimension as flex array if the whole
12539 multi-dimensional array is at struct end.
12540 Same for an array of aggregates with a trailing array
12542 else if (TREE_CODE (ref
) == ARRAY_REF
)
12544 else if (TREE_CODE (ref
) == ARRAY_RANGE_REF
)
12546 /* If we view an underlying object as sth else then what we
12547 gathered up to now is what we have to rely on. */
12548 else if (TREE_CODE (ref
) == VIEW_CONVERT_EXPR
)
12551 gcc_unreachable ();
12553 ref
= TREE_OPERAND (ref
, 0);
12556 /* The array now is at struct end. Treat flexible arrays as
12557 always subject to extend, even into just padding constrained by
12558 an underlying decl. */
12559 if (! TYPE_SIZE (atype
)
12560 || ! TYPE_DOMAIN (atype
)
12561 || ! TYPE_MAX_VALUE (TYPE_DOMAIN (atype
)))
12564 /* If the reference is based on a declared entity, the size of the array
12565 is constrained by its given domain. (Do not trust commons PR/69368). */
12566 ref
= get_base_address (ref
);
12569 && !(flag_unconstrained_commons
12570 && VAR_P (ref
) && DECL_COMMON (ref
))
12571 && DECL_SIZE_UNIT (ref
)
12572 && TREE_CODE (DECL_SIZE_UNIT (ref
)) == INTEGER_CST
)
12574 /* Check whether the array domain covers all of the available
12577 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (atype
))) != INTEGER_CST
12578 || TREE_CODE (TYPE_MAX_VALUE (TYPE_DOMAIN (atype
))) != INTEGER_CST
12579 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (atype
))) != INTEGER_CST
)
12581 if (! get_addr_base_and_unit_offset (ref_to_array
, &offset
))
12584 /* If at least one extra element fits it is a flexarray. */
12585 if (known_le ((wi::to_offset (TYPE_MAX_VALUE (TYPE_DOMAIN (atype
)))
12586 - wi::to_offset (TYPE_MIN_VALUE (TYPE_DOMAIN (atype
)))
12588 * wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (atype
))),
12589 wi::to_offset (DECL_SIZE_UNIT (ref
)) - offset
))
12598 /* Return a tree representing the offset, in bytes, of the field referenced
12599 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
12602 component_ref_field_offset (tree exp
)
12604 tree aligned_offset
= TREE_OPERAND (exp
, 2);
12605 tree field
= TREE_OPERAND (exp
, 1);
12606 location_t loc
= EXPR_LOCATION (exp
);
12608 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
12609 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
12611 if (aligned_offset
)
12613 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
12614 sizetype from another type of the same width and signedness. */
12615 if (TREE_TYPE (aligned_offset
) != sizetype
)
12616 aligned_offset
= fold_convert_loc (loc
, sizetype
, aligned_offset
);
12617 return size_binop_loc (loc
, MULT_EXPR
, aligned_offset
,
12618 size_int (DECL_OFFSET_ALIGN (field
)
12622 /* Otherwise, take the offset from that of the field. Substitute
12623 any PLACEHOLDER_EXPR that we have. */
12625 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field
), exp
);
12628 /* Given the initializer INIT, return the initializer for the field
12629 DECL if it exists, otherwise null. Used to obtain the initializer
12630 for a flexible array member and determine its size. */
12633 get_initializer_for (tree init
, tree decl
)
12637 tree fld
, fld_init
;
12638 unsigned HOST_WIDE_INT i
;
12639 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init
), i
, fld
, fld_init
)
12644 if (TREE_CODE (fld
) == CONSTRUCTOR
)
12646 fld_init
= get_initializer_for (fld_init
, decl
);
12655 /* Determines the size of the member referenced by the COMPONENT_REF
12656 REF, using its initializer expression if necessary in order to
12657 determine the size of an initialized flexible array member.
12658 If non-null, set *ARK when REF refers to an interior zero-length
12659 array or a trailing one-element array.
12660 Returns the size as sizetype (which might be zero for an object
12661 with an uninitialized flexible array member) or null if the size
12662 cannot be determined. */
12665 component_ref_size (tree ref
, special_array_member
*sam
/* = NULL */)
12667 gcc_assert (TREE_CODE (ref
) == COMPONENT_REF
);
12669 special_array_member sambuf
;
12672 *sam
= special_array_member::none
;
12674 /* The object/argument referenced by the COMPONENT_REF and its type. */
12675 tree arg
= TREE_OPERAND (ref
, 0);
12676 tree argtype
= TREE_TYPE (arg
);
12677 /* The referenced member. */
12678 tree member
= TREE_OPERAND (ref
, 1);
12680 tree memsize
= DECL_SIZE_UNIT (member
);
12683 tree memtype
= TREE_TYPE (member
);
12684 if (TREE_CODE (memtype
) != ARRAY_TYPE
)
12685 /* DECL_SIZE may be less than TYPE_SIZE in C++ when referring
12686 to the type of a class with a virtual base which doesn't
12687 reflect the size of the virtual's members (see pr97595).
12688 If that's the case fail for now and implement something
12689 more robust in the future. */
12690 return (tree_int_cst_equal (memsize
, TYPE_SIZE_UNIT (memtype
))
12691 ? memsize
: NULL_TREE
);
12693 bool trailing
= array_at_struct_end_p (ref
);
12694 bool zero_length
= integer_zerop (memsize
);
12695 if (!trailing
&& !zero_length
)
12696 /* MEMBER is either an interior array or is an array with
12697 more than one element. */
12703 *sam
= special_array_member::trail_0
;
12706 *sam
= special_array_member::int_0
;
12707 memsize
= NULL_TREE
;
12712 if (tree dom
= TYPE_DOMAIN (memtype
))
12713 if (tree min
= TYPE_MIN_VALUE (dom
))
12714 if (tree max
= TYPE_MAX_VALUE (dom
))
12715 if (TREE_CODE (min
) == INTEGER_CST
12716 && TREE_CODE (max
) == INTEGER_CST
)
12718 offset_int minidx
= wi::to_offset (min
);
12719 offset_int maxidx
= wi::to_offset (max
);
12720 offset_int neltsm1
= maxidx
- minidx
;
12722 /* MEMBER is an array with more than one element. */
12726 *sam
= special_array_member::trail_1
;
12729 /* For a reference to a zero- or one-element array member of a union
12730 use the size of the union instead of the size of the member. */
12731 if (TREE_CODE (argtype
) == UNION_TYPE
)
12732 memsize
= TYPE_SIZE_UNIT (argtype
);
12735 /* MEMBER is either a bona fide flexible array member, or a zero-length
12736 array member, or an array of length one treated as such. */
12738 /* If the reference is to a declared object and the member a true
12739 flexible array, try to determine its size from its initializer. */
12740 poly_int64 baseoff
= 0;
12741 tree base
= get_addr_base_and_unit_offset (ref
, &baseoff
);
12742 if (!base
|| !VAR_P (base
))
12744 if (*sam
!= special_array_member::int_0
)
12747 if (TREE_CODE (arg
) != COMPONENT_REF
)
12751 while (TREE_CODE (base
) == COMPONENT_REF
)
12752 base
= TREE_OPERAND (base
, 0);
12753 baseoff
= tree_to_poly_int64 (byte_position (TREE_OPERAND (ref
, 1)));
12756 /* BASE is the declared object of which MEMBER is either a member
12757 or that is cast to ARGTYPE (e.g., a char buffer used to store
12758 an ARGTYPE object). */
12759 tree basetype
= TREE_TYPE (base
);
12761 /* Determine the base type of the referenced object. If it's
12762 the same as ARGTYPE and MEMBER has a known size, return it. */
12763 tree bt
= basetype
;
12764 if (*sam
!= special_array_member::int_0
)
12765 while (TREE_CODE (bt
) == ARRAY_TYPE
)
12766 bt
= TREE_TYPE (bt
);
12767 bool typematch
= useless_type_conversion_p (argtype
, bt
);
12768 if (memsize
&& typematch
)
12771 memsize
= NULL_TREE
;
12774 /* MEMBER is a true flexible array member. Compute its size from
12775 the initializer of the BASE object if it has one. */
12776 if (tree init
= DECL_P (base
) ? DECL_INITIAL (base
) : NULL_TREE
)
12777 if (init
!= error_mark_node
)
12779 init
= get_initializer_for (init
, member
);
12782 memsize
= TYPE_SIZE_UNIT (TREE_TYPE (init
));
12783 if (tree refsize
= TYPE_SIZE_UNIT (argtype
))
12785 /* Use the larger of the initializer size and the tail
12786 padding in the enclosing struct. */
12787 poly_int64 rsz
= tree_to_poly_int64 (refsize
);
12789 if (known_lt (tree_to_poly_int64 (memsize
), rsz
))
12790 memsize
= wide_int_to_tree (TREE_TYPE (memsize
), rsz
);
12802 && DECL_EXTERNAL (base
)
12804 && *sam
!= special_array_member::int_0
)
12805 /* The size of a flexible array member of an extern struct
12806 with no initializer cannot be determined (it's defined
12807 in another translation unit and can have an initializer
12808 with an arbitrary number of elements). */
12811 /* Use the size of the base struct or, for interior zero-length
12812 arrays, the size of the enclosing type. */
12813 memsize
= TYPE_SIZE_UNIT (bt
);
12815 else if (DECL_P (base
))
12816 /* Use the size of the BASE object (possibly an array of some
12817 other type such as char used to store the struct). */
12818 memsize
= DECL_SIZE_UNIT (base
);
12823 /* If the flexible array member has a known size use the greater
12824 of it and the tail padding in the enclosing struct.
12825 Otherwise, when the size of the flexible array member is unknown
12826 and the referenced object is not a struct, use the size of its
12827 type when known. This detects sizes of array buffers when cast
12828 to struct types with flexible array members. */
12831 poly_int64 memsz64
= memsize
? tree_to_poly_int64 (memsize
) : 0;
12832 if (known_lt (baseoff
, memsz64
))
12834 memsz64
-= baseoff
;
12835 return wide_int_to_tree (TREE_TYPE (memsize
), memsz64
);
12837 return size_zero_node
;
12840 /* Return "don't know" for an external non-array object since its
12841 flexible array member can be initialized to have any number of
12842 elements. Otherwise, return zero because the flexible array
12843 member has no elements. */
12844 return (DECL_P (base
)
12845 && DECL_EXTERNAL (base
)
12847 || TREE_CODE (basetype
) != ARRAY_TYPE
)
12848 ? NULL_TREE
: size_zero_node
);
12851 /* Return the machine mode of T. For vectors, returns the mode of the
12852 inner type. The main use case is to feed the result to HONOR_NANS,
12853 avoiding the BLKmode that a direct TYPE_MODE (T) might return. */
12856 element_mode (const_tree t
)
12860 if (VECTOR_TYPE_P (t
) || TREE_CODE (t
) == COMPLEX_TYPE
)
12862 return TYPE_MODE (t
);
12865 /* Vector types need to re-check the target flags each time we report
12866 the machine mode. We need to do this because attribute target can
12867 change the result of vector_mode_supported_p and have_regs_of_mode
12868 on a per-function basis. Thus the TYPE_MODE of a VECTOR_TYPE can
12869 change on a per-function basis. */
12870 /* ??? Possibly a better solution is to run through all the types
12871 referenced by a function and re-compute the TYPE_MODE once, rather
12872 than make the TYPE_MODE macro call a function. */
12875 vector_type_mode (const_tree t
)
12879 gcc_assert (TREE_CODE (t
) == VECTOR_TYPE
);
12881 mode
= t
->type_common
.mode
;
12882 if (VECTOR_MODE_P (mode
)
12883 && (!targetm
.vector_mode_supported_p (mode
)
12884 || !have_regs_of_mode
[mode
]))
12886 scalar_int_mode innermode
;
12888 /* For integers, try mapping it to a same-sized scalar mode. */
12889 if (is_int_mode (TREE_TYPE (t
)->type_common
.mode
, &innermode
))
12891 poly_int64 size
= (TYPE_VECTOR_SUBPARTS (t
)
12892 * GET_MODE_BITSIZE (innermode
));
12893 scalar_int_mode mode
;
12894 if (int_mode_for_size (size
, 0).exists (&mode
)
12895 && have_regs_of_mode
[mode
])
12905 /* Return the size in bits of each element of vector type TYPE. */
12908 vector_element_bits (const_tree type
)
12910 gcc_checking_assert (VECTOR_TYPE_P (type
));
12911 if (VECTOR_BOOLEAN_TYPE_P (type
))
12912 return TYPE_PRECISION (TREE_TYPE (type
));
12913 return tree_to_uhwi (TYPE_SIZE (TREE_TYPE (type
)));
12916 /* Calculate the size in bits of each element of vector type TYPE
12917 and return the result as a tree of type bitsizetype. */
12920 vector_element_bits_tree (const_tree type
)
12922 gcc_checking_assert (VECTOR_TYPE_P (type
));
12923 if (VECTOR_BOOLEAN_TYPE_P (type
))
12924 return bitsize_int (vector_element_bits (type
));
12925 return TYPE_SIZE (TREE_TYPE (type
));
12928 /* Verify that basic properties of T match TV and thus T can be a variant of
12929 TV. TV should be the more specified variant (i.e. the main variant). */
12932 verify_type_variant (const_tree t
, tree tv
)
12934 /* Type variant can differ by:
12936 - TYPE_QUALS: TYPE_READONLY, TYPE_VOLATILE, TYPE_ATOMIC, TYPE_RESTRICT,
12937 ENCODE_QUAL_ADDR_SPACE.
12938 - main variant may be TYPE_COMPLETE_P and variant types !TYPE_COMPLETE_P
12939 in this case some values may not be set in the variant types
12940 (see TYPE_COMPLETE_P checks).
12941 - it is possible to have TYPE_ARTIFICIAL variant of non-artifical type
12942 - by TYPE_NAME and attributes (i.e. when variant originate by typedef)
12943 - TYPE_CANONICAL (TYPE_ALIAS_SET is the same among variants)
12944 - by the alignment: TYPE_ALIGN and TYPE_USER_ALIGN
12945 - during LTO by TYPE_CONTEXT if type is TYPE_FILE_SCOPE_P
12946 this is necessary to make it possible to merge types form different TUs
12947 - arrays, pointers and references may have TREE_TYPE that is a variant
12948 of TREE_TYPE of their main variants.
12949 - aggregates may have new TYPE_FIELDS list that list variants of
12950 the main variant TYPE_FIELDS.
12951 - vector types may differ by TYPE_VECTOR_OPAQUE
12954 /* Convenience macro for matching individual fields. */
12955 #define verify_variant_match(flag) \
12957 if (flag (tv) != flag (t)) \
12959 error ("type variant differs by %s", #flag); \
12965 /* tree_base checks. */
12967 verify_variant_match (TREE_CODE
);
12968 /* FIXME: Ada builds non-artificial variants of artificial types. */
12970 if (TYPE_ARTIFICIAL (tv
))
12971 verify_variant_match (TYPE_ARTIFICIAL
);
12973 if (POINTER_TYPE_P (tv
))
12974 verify_variant_match (TYPE_REF_CAN_ALIAS_ALL
);
12975 /* FIXME: TYPE_SIZES_GIMPLIFIED may differs for Ada build. */
12976 verify_variant_match (TYPE_UNSIGNED
);
12977 verify_variant_match (TYPE_PACKED
);
12978 if (TREE_CODE (t
) == REFERENCE_TYPE
)
12979 verify_variant_match (TYPE_REF_IS_RVALUE
);
12980 if (AGGREGATE_TYPE_P (t
))
12981 verify_variant_match (TYPE_REVERSE_STORAGE_ORDER
);
12983 verify_variant_match (TYPE_SATURATING
);
12984 /* FIXME: This check trigger during libstdc++ build. */
12986 if (RECORD_OR_UNION_TYPE_P (t
) && COMPLETE_TYPE_P (t
))
12987 verify_variant_match (TYPE_FINAL_P
);
12990 /* tree_type_common checks. */
12992 if (COMPLETE_TYPE_P (t
))
12994 verify_variant_match (TYPE_MODE
);
12995 if (TREE_CODE (TYPE_SIZE (t
)) != PLACEHOLDER_EXPR
12996 && TREE_CODE (TYPE_SIZE (tv
)) != PLACEHOLDER_EXPR
)
12997 verify_variant_match (TYPE_SIZE
);
12998 if (TREE_CODE (TYPE_SIZE_UNIT (t
)) != PLACEHOLDER_EXPR
12999 && TREE_CODE (TYPE_SIZE_UNIT (tv
)) != PLACEHOLDER_EXPR
13000 && TYPE_SIZE_UNIT (t
) != TYPE_SIZE_UNIT (tv
))
13002 gcc_assert (!operand_equal_p (TYPE_SIZE_UNIT (t
),
13003 TYPE_SIZE_UNIT (tv
), 0));
13004 error ("type variant has different %<TYPE_SIZE_UNIT%>");
13006 error ("type variant%'s %<TYPE_SIZE_UNIT%>");
13007 debug_tree (TYPE_SIZE_UNIT (tv
));
13008 error ("type%'s %<TYPE_SIZE_UNIT%>");
13009 debug_tree (TYPE_SIZE_UNIT (t
));
13012 verify_variant_match (TYPE_NEEDS_CONSTRUCTING
);
13014 verify_variant_match (TYPE_PRECISION
);
13015 if (RECORD_OR_UNION_TYPE_P (t
))
13016 verify_variant_match (TYPE_TRANSPARENT_AGGR
);
13017 else if (TREE_CODE (t
) == ARRAY_TYPE
)
13018 verify_variant_match (TYPE_NONALIASED_COMPONENT
);
13019 /* During LTO we merge variant lists from diferent translation units
13020 that may differ BY TYPE_CONTEXT that in turn may point
13021 to TRANSLATION_UNIT_DECL.
13022 Ada also builds variants of types with different TYPE_CONTEXT. */
13024 if (!in_lto_p
|| !TYPE_FILE_SCOPE_P (t
))
13025 verify_variant_match (TYPE_CONTEXT
);
13027 if (TREE_CODE (t
) == ARRAY_TYPE
|| TREE_CODE (t
) == INTEGER_TYPE
)
13028 verify_variant_match (TYPE_STRING_FLAG
);
13029 if (TREE_CODE (t
) == RECORD_TYPE
|| TREE_CODE (t
) == UNION_TYPE
)
13030 verify_variant_match (TYPE_CXX_ODR_P
);
13031 if (TYPE_ALIAS_SET_KNOWN_P (t
))
13033 error ("type variant with %<TYPE_ALIAS_SET_KNOWN_P%>");
13038 /* tree_type_non_common checks. */
13040 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
13041 and dangle the pointer from time to time. */
13042 if (RECORD_OR_UNION_TYPE_P (t
) && TYPE_VFIELD (t
) != TYPE_VFIELD (tv
)
13043 && (in_lto_p
|| !TYPE_VFIELD (tv
)
13044 || TREE_CODE (TYPE_VFIELD (tv
)) != TREE_LIST
))
13046 error ("type variant has different %<TYPE_VFIELD%>");
13050 if ((TREE_CODE (t
) == ENUMERAL_TYPE
&& COMPLETE_TYPE_P (t
))
13051 || TREE_CODE (t
) == INTEGER_TYPE
13052 || TREE_CODE (t
) == BOOLEAN_TYPE
13053 || TREE_CODE (t
) == REAL_TYPE
13054 || TREE_CODE (t
) == FIXED_POINT_TYPE
)
13056 verify_variant_match (TYPE_MAX_VALUE
);
13057 verify_variant_match (TYPE_MIN_VALUE
);
13059 if (TREE_CODE (t
) == METHOD_TYPE
)
13060 verify_variant_match (TYPE_METHOD_BASETYPE
);
13061 if (TREE_CODE (t
) == OFFSET_TYPE
)
13062 verify_variant_match (TYPE_OFFSET_BASETYPE
);
13063 if (TREE_CODE (t
) == ARRAY_TYPE
)
13064 verify_variant_match (TYPE_ARRAY_MAX_SIZE
);
13065 /* FIXME: Be lax and allow TYPE_BINFO to be missing in variant types
13066 or even type's main variant. This is needed to make bootstrap pass
13067 and the bug seems new in GCC 5.
13068 C++ FE should be updated to make this consistent and we should check
13069 that TYPE_BINFO is always NULL for !COMPLETE_TYPE_P and otherwise there
13070 is a match with main variant.
13072 Also disable the check for Java for now because of parser hack that builds
13073 first an dummy BINFO and then sometimes replace it by real BINFO in some
13075 if (RECORD_OR_UNION_TYPE_P (t
) && TYPE_BINFO (t
) && TYPE_BINFO (tv
)
13076 && TYPE_BINFO (t
) != TYPE_BINFO (tv
)
13077 /* FIXME: Java sometimes keep dump TYPE_BINFOs on variant types.
13078 Since there is no cheap way to tell C++/Java type w/o LTO, do checking
13079 at LTO time only. */
13080 && (in_lto_p
&& odr_type_p (t
)))
13082 error ("type variant has different %<TYPE_BINFO%>");
13084 error ("type variant%'s %<TYPE_BINFO%>");
13085 debug_tree (TYPE_BINFO (tv
));
13086 error ("type%'s %<TYPE_BINFO%>");
13087 debug_tree (TYPE_BINFO (t
));
13091 /* Check various uses of TYPE_VALUES_RAW. */
13092 if (TREE_CODE (t
) == ENUMERAL_TYPE
13093 && TYPE_VALUES (t
))
13094 verify_variant_match (TYPE_VALUES
);
13095 else if (TREE_CODE (t
) == ARRAY_TYPE
)
13096 verify_variant_match (TYPE_DOMAIN
);
13097 /* Permit incomplete variants of complete type. While FEs may complete
13098 all variants, this does not happen for C++ templates in all cases. */
13099 else if (RECORD_OR_UNION_TYPE_P (t
)
13100 && COMPLETE_TYPE_P (t
)
13101 && TYPE_FIELDS (t
) != TYPE_FIELDS (tv
))
13105 /* Fortran builds qualified variants as new records with items of
13106 qualified type. Verify that they looks same. */
13107 for (f1
= TYPE_FIELDS (t
), f2
= TYPE_FIELDS (tv
);
13109 f1
= TREE_CHAIN (f1
), f2
= TREE_CHAIN (f2
))
13110 if (TREE_CODE (f1
) != FIELD_DECL
|| TREE_CODE (f2
) != FIELD_DECL
13111 || (TYPE_MAIN_VARIANT (TREE_TYPE (f1
))
13112 != TYPE_MAIN_VARIANT (TREE_TYPE (f2
))
13113 /* FIXME: gfc_nonrestricted_type builds all types as variants
13114 with exception of pointer types. It deeply copies the type
13115 which means that we may end up with a variant type
13116 referring non-variant pointer. We may change it to
13117 produce types as variants, too, like
13118 objc_get_protocol_qualified_type does. */
13119 && !POINTER_TYPE_P (TREE_TYPE (f1
)))
13120 || DECL_FIELD_OFFSET (f1
) != DECL_FIELD_OFFSET (f2
)
13121 || DECL_FIELD_BIT_OFFSET (f1
) != DECL_FIELD_BIT_OFFSET (f2
))
13125 error ("type variant has different %<TYPE_FIELDS%>");
13127 error ("first mismatch is field");
13129 error ("and field");
13134 else if ((TREE_CODE (t
) == FUNCTION_TYPE
|| TREE_CODE (t
) == METHOD_TYPE
))
13135 verify_variant_match (TYPE_ARG_TYPES
);
13136 /* For C++ the qualified variant of array type is really an array type
13137 of qualified TREE_TYPE.
13138 objc builds variants of pointer where pointer to type is a variant, too
13139 in objc_get_protocol_qualified_type. */
13140 if (TREE_TYPE (t
) != TREE_TYPE (tv
)
13141 && ((TREE_CODE (t
) != ARRAY_TYPE
13142 && !POINTER_TYPE_P (t
))
13143 || TYPE_MAIN_VARIANT (TREE_TYPE (t
))
13144 != TYPE_MAIN_VARIANT (TREE_TYPE (tv
))))
13146 error ("type variant has different %<TREE_TYPE%>");
13148 error ("type variant%'s %<TREE_TYPE%>");
13149 debug_tree (TREE_TYPE (tv
));
13150 error ("type%'s %<TREE_TYPE%>");
13151 debug_tree (TREE_TYPE (t
));
13154 if (type_with_alias_set_p (t
)
13155 && !gimple_canonical_types_compatible_p (t
, tv
, false))
13157 error ("type is not compatible with its variant");
13159 error ("type variant%'s %<TREE_TYPE%>");
13160 debug_tree (TREE_TYPE (tv
));
13161 error ("type%'s %<TREE_TYPE%>");
13162 debug_tree (TREE_TYPE (t
));
13166 #undef verify_variant_match
13170 /* The TYPE_CANONICAL merging machinery. It should closely resemble
13171 the middle-end types_compatible_p function. It needs to avoid
13172 claiming types are different for types that should be treated
13173 the same with respect to TBAA. Canonical types are also used
13174 for IL consistency checks via the useless_type_conversion_p
13175 predicate which does not handle all type kinds itself but falls
13176 back to pointer-comparison of TYPE_CANONICAL for aggregates
13179 /* Return true if TYPE_UNSIGNED of TYPE should be ignored for canonical
13180 type calculation because we need to allow inter-operability between signed
13181 and unsigned variants. */
13184 type_with_interoperable_signedness (const_tree type
)
13186 /* Fortran standard require C_SIGNED_CHAR to be interoperable with both
13187 signed char and unsigned char. Similarly fortran FE builds
13188 C_SIZE_T as signed type, while C defines it unsigned. */
13190 return tree_code_for_canonical_type_merging (TREE_CODE (type
))
13192 && (TYPE_PRECISION (type
) == TYPE_PRECISION (signed_char_type_node
)
13193 || TYPE_PRECISION (type
) == TYPE_PRECISION (size_type_node
));
13196 /* Return true iff T1 and T2 are structurally identical for what
13198 This function is used both by lto.c canonical type merging and by the
13199 verifier. If TRUST_TYPE_CANONICAL we do not look into structure of types
13200 that have TYPE_CANONICAL defined and assume them equivalent. This is useful
13201 only for LTO because only in these cases TYPE_CANONICAL equivalence
13202 correspond to one defined by gimple_canonical_types_compatible_p. */
13205 gimple_canonical_types_compatible_p (const_tree t1
, const_tree t2
,
13206 bool trust_type_canonical
)
13208 /* Type variants should be same as the main variant. When not doing sanity
13209 checking to verify this fact, go to main variants and save some work. */
13210 if (trust_type_canonical
)
13212 t1
= TYPE_MAIN_VARIANT (t1
);
13213 t2
= TYPE_MAIN_VARIANT (t2
);
13216 /* Check first for the obvious case of pointer identity. */
13220 /* Check that we have two types to compare. */
13221 if (t1
== NULL_TREE
|| t2
== NULL_TREE
)
13224 /* We consider complete types always compatible with incomplete type.
13225 This does not make sense for canonical type calculation and thus we
13226 need to ensure that we are never called on it.
13228 FIXME: For more correctness the function probably should have three modes
13229 1) mode assuming that types are complete mathcing their structure
13230 2) mode allowing incomplete types but producing equivalence classes
13231 and thus ignoring all info from complete types
13232 3) mode allowing incomplete types to match complete but checking
13233 compatibility between complete types.
13235 1 and 2 can be used for canonical type calculation. 3 is the real
13236 definition of type compatibility that can be used i.e. for warnings during
13237 declaration merging. */
13239 gcc_assert (!trust_type_canonical
13240 || (type_with_alias_set_p (t1
) && type_with_alias_set_p (t2
)));
13242 /* If the types have been previously registered and found equal
13245 if (TYPE_CANONICAL (t1
) && TYPE_CANONICAL (t2
)
13246 && trust_type_canonical
)
13248 /* Do not use TYPE_CANONICAL of pointer types. For LTO streamed types
13249 they are always NULL, but they are set to non-NULL for types
13250 constructed by build_pointer_type and variants. In this case the
13251 TYPE_CANONICAL is more fine grained than the equivalnce we test (where
13252 all pointers are considered equal. Be sure to not return false
13254 gcc_checking_assert (canonical_type_used_p (t1
)
13255 && canonical_type_used_p (t2
));
13256 return TYPE_CANONICAL (t1
) == TYPE_CANONICAL (t2
);
13259 /* For types where we do ODR based TBAA the canonical type is always
13260 set correctly, so we know that types are different if their
13261 canonical types does not match. */
13262 if (trust_type_canonical
13263 && (odr_type_p (t1
) && odr_based_tbaa_p (t1
))
13264 != (odr_type_p (t2
) && odr_based_tbaa_p (t2
)))
13267 /* Can't be the same type if the types don't have the same code. */
13268 enum tree_code code
= tree_code_for_canonical_type_merging (TREE_CODE (t1
));
13269 if (code
!= tree_code_for_canonical_type_merging (TREE_CODE (t2
)))
13272 /* Qualifiers do not matter for canonical type comparison purposes. */
13274 /* Void types and nullptr types are always the same. */
13275 if (TREE_CODE (t1
) == VOID_TYPE
13276 || TREE_CODE (t1
) == NULLPTR_TYPE
)
13279 /* Can't be the same type if they have different mode. */
13280 if (TYPE_MODE (t1
) != TYPE_MODE (t2
))
13283 /* Non-aggregate types can be handled cheaply. */
13284 if (INTEGRAL_TYPE_P (t1
)
13285 || SCALAR_FLOAT_TYPE_P (t1
)
13286 || FIXED_POINT_TYPE_P (t1
)
13287 || TREE_CODE (t1
) == VECTOR_TYPE
13288 || TREE_CODE (t1
) == COMPLEX_TYPE
13289 || TREE_CODE (t1
) == OFFSET_TYPE
13290 || POINTER_TYPE_P (t1
))
13292 /* Can't be the same type if they have different recision. */
13293 if (TYPE_PRECISION (t1
) != TYPE_PRECISION (t2
))
13296 /* In some cases the signed and unsigned types are required to be
13298 if (TYPE_UNSIGNED (t1
) != TYPE_UNSIGNED (t2
)
13299 && !type_with_interoperable_signedness (t1
))
13302 /* Fortran's C_SIGNED_CHAR is !TYPE_STRING_FLAG but needs to be
13303 interoperable with "signed char". Unless all frontends are revisited
13304 to agree on these types, we must ignore the flag completely. */
13306 /* Fortran standard define C_PTR type that is compatible with every
13307 C pointer. For this reason we need to glob all pointers into one.
13308 Still pointers in different address spaces are not compatible. */
13309 if (POINTER_TYPE_P (t1
))
13311 if (TYPE_ADDR_SPACE (TREE_TYPE (t1
))
13312 != TYPE_ADDR_SPACE (TREE_TYPE (t2
)))
13316 /* Tail-recurse to components. */
13317 if (TREE_CODE (t1
) == VECTOR_TYPE
13318 || TREE_CODE (t1
) == COMPLEX_TYPE
)
13319 return gimple_canonical_types_compatible_p (TREE_TYPE (t1
),
13321 trust_type_canonical
);
13326 /* Do type-specific comparisons. */
13327 switch (TREE_CODE (t1
))
13330 /* Array types are the same if the element types are the same and
13331 the number of elements are the same. */
13332 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1
), TREE_TYPE (t2
),
13333 trust_type_canonical
)
13334 || TYPE_STRING_FLAG (t1
) != TYPE_STRING_FLAG (t2
)
13335 || TYPE_REVERSE_STORAGE_ORDER (t1
) != TYPE_REVERSE_STORAGE_ORDER (t2
)
13336 || TYPE_NONALIASED_COMPONENT (t1
) != TYPE_NONALIASED_COMPONENT (t2
))
13340 tree i1
= TYPE_DOMAIN (t1
);
13341 tree i2
= TYPE_DOMAIN (t2
);
13343 /* For an incomplete external array, the type domain can be
13344 NULL_TREE. Check this condition also. */
13345 if (i1
== NULL_TREE
&& i2
== NULL_TREE
)
13347 else if (i1
== NULL_TREE
|| i2
== NULL_TREE
)
13351 tree min1
= TYPE_MIN_VALUE (i1
);
13352 tree min2
= TYPE_MIN_VALUE (i2
);
13353 tree max1
= TYPE_MAX_VALUE (i1
);
13354 tree max2
= TYPE_MAX_VALUE (i2
);
13356 /* The minimum/maximum values have to be the same. */
13359 && ((TREE_CODE (min1
) == PLACEHOLDER_EXPR
13360 && TREE_CODE (min2
) == PLACEHOLDER_EXPR
)
13361 || operand_equal_p (min1
, min2
, 0))))
13364 && ((TREE_CODE (max1
) == PLACEHOLDER_EXPR
13365 && TREE_CODE (max2
) == PLACEHOLDER_EXPR
)
13366 || operand_equal_p (max1
, max2
, 0)))))
13374 case FUNCTION_TYPE
:
13375 /* Function types are the same if the return type and arguments types
13377 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1
), TREE_TYPE (t2
),
13378 trust_type_canonical
))
13381 if (TYPE_ARG_TYPES (t1
) == TYPE_ARG_TYPES (t2
))
13385 tree parms1
, parms2
;
13387 for (parms1
= TYPE_ARG_TYPES (t1
), parms2
= TYPE_ARG_TYPES (t2
);
13389 parms1
= TREE_CHAIN (parms1
), parms2
= TREE_CHAIN (parms2
))
13391 if (!gimple_canonical_types_compatible_p
13392 (TREE_VALUE (parms1
), TREE_VALUE (parms2
),
13393 trust_type_canonical
))
13397 if (parms1
|| parms2
)
13405 case QUAL_UNION_TYPE
:
13409 /* Don't try to compare variants of an incomplete type, before
13410 TYPE_FIELDS has been copied around. */
13411 if (!COMPLETE_TYPE_P (t1
) && !COMPLETE_TYPE_P (t2
))
13415 if (TYPE_REVERSE_STORAGE_ORDER (t1
) != TYPE_REVERSE_STORAGE_ORDER (t2
))
13418 /* For aggregate types, all the fields must be the same. */
13419 for (f1
= TYPE_FIELDS (t1
), f2
= TYPE_FIELDS (t2
);
13421 f1
= TREE_CHAIN (f1
), f2
= TREE_CHAIN (f2
))
13423 /* Skip non-fields and zero-sized fields. */
13424 while (f1
&& (TREE_CODE (f1
) != FIELD_DECL
13426 && integer_zerop (DECL_SIZE (f1
)))))
13427 f1
= TREE_CHAIN (f1
);
13428 while (f2
&& (TREE_CODE (f2
) != FIELD_DECL
13430 && integer_zerop (DECL_SIZE (f2
)))))
13431 f2
= TREE_CHAIN (f2
);
13434 /* The fields must have the same name, offset and type. */
13435 if (DECL_NONADDRESSABLE_P (f1
) != DECL_NONADDRESSABLE_P (f2
)
13436 || !gimple_compare_field_offset (f1
, f2
)
13437 || !gimple_canonical_types_compatible_p
13438 (TREE_TYPE (f1
), TREE_TYPE (f2
),
13439 trust_type_canonical
))
13443 /* If one aggregate has more fields than the other, they
13444 are not the same. */
13452 /* Consider all types with language specific trees in them mutually
13453 compatible. This is executed only from verify_type and false
13454 positives can be tolerated. */
13455 gcc_assert (!in_lto_p
);
13460 /* Verify type T. */
13463 verify_type (const_tree t
)
13465 bool error_found
= false;
13466 tree mv
= TYPE_MAIN_VARIANT (t
);
13469 error ("main variant is not defined");
13470 error_found
= true;
13472 else if (mv
!= TYPE_MAIN_VARIANT (mv
))
13474 error ("%<TYPE_MAIN_VARIANT%> has different %<TYPE_MAIN_VARIANT%>");
13476 error_found
= true;
13478 else if (t
!= mv
&& !verify_type_variant (t
, mv
))
13479 error_found
= true;
13481 tree ct
= TYPE_CANONICAL (t
);
13484 else if (TYPE_CANONICAL (t
) != ct
)
13486 error ("%<TYPE_CANONICAL%> has different %<TYPE_CANONICAL%>");
13488 error_found
= true;
13490 /* Method and function types cannot be used to address memory and thus
13491 TYPE_CANONICAL really matters only for determining useless conversions.
13493 FIXME: C++ FE produce declarations of builtin functions that are not
13494 compatible with main variants. */
13495 else if (TREE_CODE (t
) == FUNCTION_TYPE
)
13498 /* FIXME: gimple_canonical_types_compatible_p cannot compare types
13499 with variably sized arrays because their sizes possibly
13500 gimplified to different variables. */
13501 && !variably_modified_type_p (ct
, NULL
)
13502 && !gimple_canonical_types_compatible_p (t
, ct
, false)
13503 && COMPLETE_TYPE_P (t
))
13505 error ("%<TYPE_CANONICAL%> is not compatible");
13507 error_found
= true;
13510 if (COMPLETE_TYPE_P (t
) && TYPE_CANONICAL (t
)
13511 && TYPE_MODE (t
) != TYPE_MODE (TYPE_CANONICAL (t
)))
13513 error ("%<TYPE_MODE%> of %<TYPE_CANONICAL%> is not compatible");
13515 error_found
= true;
13517 if (TYPE_MAIN_VARIANT (t
) == t
&& ct
&& TYPE_MAIN_VARIANT (ct
) != ct
)
13519 error ("%<TYPE_CANONICAL%> of main variant is not main variant");
13521 debug_tree (TYPE_MAIN_VARIANT (ct
));
13522 error_found
= true;
13526 /* Check various uses of TYPE_MIN_VALUE_RAW. */
13527 if (RECORD_OR_UNION_TYPE_P (t
))
13529 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
13530 and danagle the pointer from time to time. */
13531 if (TYPE_VFIELD (t
)
13532 && TREE_CODE (TYPE_VFIELD (t
)) != FIELD_DECL
13533 && TREE_CODE (TYPE_VFIELD (t
)) != TREE_LIST
)
13535 error ("%<TYPE_VFIELD%> is not %<FIELD_DECL%> nor %<TREE_LIST%>");
13536 debug_tree (TYPE_VFIELD (t
));
13537 error_found
= true;
13540 else if (TREE_CODE (t
) == POINTER_TYPE
)
13542 if (TYPE_NEXT_PTR_TO (t
)
13543 && TREE_CODE (TYPE_NEXT_PTR_TO (t
)) != POINTER_TYPE
)
13545 error ("%<TYPE_NEXT_PTR_TO%> is not %<POINTER_TYPE%>");
13546 debug_tree (TYPE_NEXT_PTR_TO (t
));
13547 error_found
= true;
13550 else if (TREE_CODE (t
) == REFERENCE_TYPE
)
13552 if (TYPE_NEXT_REF_TO (t
)
13553 && TREE_CODE (TYPE_NEXT_REF_TO (t
)) != REFERENCE_TYPE
)
13555 error ("%<TYPE_NEXT_REF_TO%> is not %<REFERENCE_TYPE%>");
13556 debug_tree (TYPE_NEXT_REF_TO (t
));
13557 error_found
= true;
13560 else if (INTEGRAL_TYPE_P (t
) || TREE_CODE (t
) == REAL_TYPE
13561 || TREE_CODE (t
) == FIXED_POINT_TYPE
)
13563 /* FIXME: The following check should pass:
13564 useless_type_conversion_p (const_cast <tree> (t),
13565 TREE_TYPE (TYPE_MIN_VALUE (t))
13566 but does not for C sizetypes in LTO. */
13569 /* Check various uses of TYPE_MAXVAL_RAW. */
13570 if (RECORD_OR_UNION_TYPE_P (t
))
13572 if (!TYPE_BINFO (t
))
13574 else if (TREE_CODE (TYPE_BINFO (t
)) != TREE_BINFO
)
13576 error ("%<TYPE_BINFO%> is not %<TREE_BINFO%>");
13577 debug_tree (TYPE_BINFO (t
));
13578 error_found
= true;
13580 else if (TREE_TYPE (TYPE_BINFO (t
)) != TYPE_MAIN_VARIANT (t
))
13582 error ("%<TYPE_BINFO%> type is not %<TYPE_MAIN_VARIANT%>");
13583 debug_tree (TREE_TYPE (TYPE_BINFO (t
)));
13584 error_found
= true;
13587 else if (TREE_CODE (t
) == FUNCTION_TYPE
|| TREE_CODE (t
) == METHOD_TYPE
)
13589 if (TYPE_METHOD_BASETYPE (t
)
13590 && TREE_CODE (TYPE_METHOD_BASETYPE (t
)) != RECORD_TYPE
13591 && TREE_CODE (TYPE_METHOD_BASETYPE (t
)) != UNION_TYPE
)
13593 error ("%<TYPE_METHOD_BASETYPE%> is not record nor union");
13594 debug_tree (TYPE_METHOD_BASETYPE (t
));
13595 error_found
= true;
13598 else if (TREE_CODE (t
) == OFFSET_TYPE
)
13600 if (TYPE_OFFSET_BASETYPE (t
)
13601 && TREE_CODE (TYPE_OFFSET_BASETYPE (t
)) != RECORD_TYPE
13602 && TREE_CODE (TYPE_OFFSET_BASETYPE (t
)) != UNION_TYPE
)
13604 error ("%<TYPE_OFFSET_BASETYPE%> is not record nor union");
13605 debug_tree (TYPE_OFFSET_BASETYPE (t
));
13606 error_found
= true;
13609 else if (INTEGRAL_TYPE_P (t
) || TREE_CODE (t
) == REAL_TYPE
13610 || TREE_CODE (t
) == FIXED_POINT_TYPE
)
13612 /* FIXME: The following check should pass:
13613 useless_type_conversion_p (const_cast <tree> (t),
13614 TREE_TYPE (TYPE_MAX_VALUE (t))
13615 but does not for C sizetypes in LTO. */
13617 else if (TREE_CODE (t
) == ARRAY_TYPE
)
13619 if (TYPE_ARRAY_MAX_SIZE (t
)
13620 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (t
)) != INTEGER_CST
)
13622 error ("%<TYPE_ARRAY_MAX_SIZE%> not %<INTEGER_CST%>");
13623 debug_tree (TYPE_ARRAY_MAX_SIZE (t
));
13624 error_found
= true;
13627 else if (TYPE_MAX_VALUE_RAW (t
))
13629 error ("%<TYPE_MAX_VALUE_RAW%> non-NULL");
13630 debug_tree (TYPE_MAX_VALUE_RAW (t
));
13631 error_found
= true;
13634 if (TYPE_LANG_SLOT_1 (t
) && in_lto_p
)
13636 error ("%<TYPE_LANG_SLOT_1 (binfo)%> field is non-NULL");
13637 debug_tree (TYPE_LANG_SLOT_1 (t
));
13638 error_found
= true;
13641 /* Check various uses of TYPE_VALUES_RAW. */
13642 if (TREE_CODE (t
) == ENUMERAL_TYPE
)
13643 for (tree l
= TYPE_VALUES (t
); l
; l
= TREE_CHAIN (l
))
13645 tree value
= TREE_VALUE (l
);
13646 tree name
= TREE_PURPOSE (l
);
13648 /* C FE porduce INTEGER_CST of INTEGER_TYPE, while C++ FE uses
13649 CONST_DECL of ENUMERAL TYPE. */
13650 if (TREE_CODE (value
) != INTEGER_CST
&& TREE_CODE (value
) != CONST_DECL
)
13652 error ("enum value is not %<CONST_DECL%> or %<INTEGER_CST%>");
13653 debug_tree (value
);
13655 error_found
= true;
13657 if (TREE_CODE (TREE_TYPE (value
)) != INTEGER_TYPE
13658 && !useless_type_conversion_p (const_cast <tree
> (t
), TREE_TYPE (value
)))
13660 error ("enum value type is not %<INTEGER_TYPE%> nor convertible "
13662 debug_tree (value
);
13664 error_found
= true;
13666 if (TREE_CODE (name
) != IDENTIFIER_NODE
)
13668 error ("enum value name is not %<IDENTIFIER_NODE%>");
13669 debug_tree (value
);
13671 error_found
= true;
13674 else if (TREE_CODE (t
) == ARRAY_TYPE
)
13676 if (TYPE_DOMAIN (t
) && TREE_CODE (TYPE_DOMAIN (t
)) != INTEGER_TYPE
)
13678 error ("array %<TYPE_DOMAIN%> is not integer type");
13679 debug_tree (TYPE_DOMAIN (t
));
13680 error_found
= true;
13683 else if (RECORD_OR_UNION_TYPE_P (t
))
13685 if (TYPE_FIELDS (t
) && !COMPLETE_TYPE_P (t
) && in_lto_p
)
13687 error ("%<TYPE_FIELDS%> defined in incomplete type");
13688 error_found
= true;
13690 for (tree fld
= TYPE_FIELDS (t
); fld
; fld
= TREE_CHAIN (fld
))
13692 /* TODO: verify properties of decls. */
13693 if (TREE_CODE (fld
) == FIELD_DECL
)
13695 else if (TREE_CODE (fld
) == TYPE_DECL
)
13697 else if (TREE_CODE (fld
) == CONST_DECL
)
13699 else if (VAR_P (fld
))
13701 else if (TREE_CODE (fld
) == TEMPLATE_DECL
)
13703 else if (TREE_CODE (fld
) == USING_DECL
)
13705 else if (TREE_CODE (fld
) == FUNCTION_DECL
)
13709 error ("wrong tree in %<TYPE_FIELDS%> list");
13711 error_found
= true;
13715 else if (TREE_CODE (t
) == INTEGER_TYPE
13716 || TREE_CODE (t
) == BOOLEAN_TYPE
13717 || TREE_CODE (t
) == OFFSET_TYPE
13718 || TREE_CODE (t
) == REFERENCE_TYPE
13719 || TREE_CODE (t
) == NULLPTR_TYPE
13720 || TREE_CODE (t
) == POINTER_TYPE
)
13722 if (TYPE_CACHED_VALUES_P (t
) != (TYPE_CACHED_VALUES (t
) != NULL
))
13724 error ("%<TYPE_CACHED_VALUES_P%> is %i while %<TYPE_CACHED_VALUES%> "
13726 TYPE_CACHED_VALUES_P (t
), (void *)TYPE_CACHED_VALUES (t
));
13727 error_found
= true;
13729 else if (TYPE_CACHED_VALUES_P (t
) && TREE_CODE (TYPE_CACHED_VALUES (t
)) != TREE_VEC
)
13731 error ("%<TYPE_CACHED_VALUES%> is not %<TREE_VEC%>");
13732 debug_tree (TYPE_CACHED_VALUES (t
));
13733 error_found
= true;
13735 /* Verify just enough of cache to ensure that no one copied it to new type.
13736 All copying should go by copy_node that should clear it. */
13737 else if (TYPE_CACHED_VALUES_P (t
))
13740 for (i
= 0; i
< TREE_VEC_LENGTH (TYPE_CACHED_VALUES (t
)); i
++)
13741 if (TREE_VEC_ELT (TYPE_CACHED_VALUES (t
), i
)
13742 && TREE_TYPE (TREE_VEC_ELT (TYPE_CACHED_VALUES (t
), i
)) != t
)
13744 error ("wrong %<TYPE_CACHED_VALUES%> entry");
13745 debug_tree (TREE_VEC_ELT (TYPE_CACHED_VALUES (t
), i
));
13746 error_found
= true;
13751 else if (TREE_CODE (t
) == FUNCTION_TYPE
|| TREE_CODE (t
) == METHOD_TYPE
)
13752 for (tree l
= TYPE_ARG_TYPES (t
); l
; l
= TREE_CHAIN (l
))
13754 /* C++ FE uses TREE_PURPOSE to store initial values. */
13755 if (TREE_PURPOSE (l
) && in_lto_p
)
13757 error ("%<TREE_PURPOSE%> is non-NULL in %<TYPE_ARG_TYPES%> list");
13759 error_found
= true;
13761 if (!TYPE_P (TREE_VALUE (l
)))
13763 error ("wrong entry in %<TYPE_ARG_TYPES%> list");
13765 error_found
= true;
13768 else if (!is_lang_specific (t
) && TYPE_VALUES_RAW (t
))
13770 error ("%<TYPE_VALUES_RAW%> field is non-NULL");
13771 debug_tree (TYPE_VALUES_RAW (t
));
13772 error_found
= true;
13774 if (TREE_CODE (t
) != INTEGER_TYPE
13775 && TREE_CODE (t
) != BOOLEAN_TYPE
13776 && TREE_CODE (t
) != OFFSET_TYPE
13777 && TREE_CODE (t
) != REFERENCE_TYPE
13778 && TREE_CODE (t
) != NULLPTR_TYPE
13779 && TREE_CODE (t
) != POINTER_TYPE
13780 && TYPE_CACHED_VALUES_P (t
))
13782 error ("%<TYPE_CACHED_VALUES_P%> is set while it should not be");
13783 error_found
= true;
13786 /* ipa-devirt makes an assumption that TYPE_METHOD_BASETYPE is always
13787 TYPE_MAIN_VARIANT and it would be odd to add methods only to variatns
13789 if (TREE_CODE (t
) == METHOD_TYPE
13790 && TYPE_MAIN_VARIANT (TYPE_METHOD_BASETYPE (t
)) != TYPE_METHOD_BASETYPE (t
))
13792 error ("%<TYPE_METHOD_BASETYPE%> is not main variant");
13793 error_found
= true;
13798 debug_tree (const_cast <tree
> (t
));
13799 internal_error ("%qs failed", __func__
);
13804 /* Return 1 if ARG interpreted as signed in its precision is known to be
13805 always positive or 2 if ARG is known to be always negative, or 3 if
13806 ARG may be positive or negative. */
13809 get_range_pos_neg (tree arg
)
13811 if (arg
== error_mark_node
)
13814 int prec
= TYPE_PRECISION (TREE_TYPE (arg
));
13816 if (TREE_CODE (arg
) == INTEGER_CST
)
13818 wide_int w
= wi::sext (wi::to_wide (arg
), prec
);
13824 while (CONVERT_EXPR_P (arg
)
13825 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg
, 0)))
13826 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg
, 0))) <= prec
)
13828 arg
= TREE_OPERAND (arg
, 0);
13829 /* Narrower value zero extended into wider type
13830 will always result in positive values. */
13831 if (TYPE_UNSIGNED (TREE_TYPE (arg
))
13832 && TYPE_PRECISION (TREE_TYPE (arg
)) < prec
)
13834 prec
= TYPE_PRECISION (TREE_TYPE (arg
));
13839 if (TREE_CODE (arg
) != SSA_NAME
)
13842 while (!get_global_range_query ()->range_of_expr (r
, arg
) || r
.kind () != VR_RANGE
)
13844 gimple
*g
= SSA_NAME_DEF_STMT (arg
);
13845 if (is_gimple_assign (g
)
13846 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (g
)))
13848 tree t
= gimple_assign_rhs1 (g
);
13849 if (INTEGRAL_TYPE_P (TREE_TYPE (t
))
13850 && TYPE_PRECISION (TREE_TYPE (t
)) <= prec
)
13852 if (TYPE_UNSIGNED (TREE_TYPE (t
))
13853 && TYPE_PRECISION (TREE_TYPE (t
)) < prec
)
13855 prec
= TYPE_PRECISION (TREE_TYPE (t
));
13864 if (TYPE_UNSIGNED (TREE_TYPE (arg
)))
13866 /* For unsigned values, the "positive" range comes
13867 below the "negative" range. */
13868 if (!wi::neg_p (wi::sext (r
.upper_bound (), prec
), SIGNED
))
13870 if (wi::neg_p (wi::sext (r
.lower_bound (), prec
), SIGNED
))
13875 if (!wi::neg_p (wi::sext (r
.lower_bound (), prec
), SIGNED
))
13877 if (wi::neg_p (wi::sext (r
.upper_bound (), prec
), SIGNED
))
13886 /* Return true if ARG is marked with the nonnull attribute in the
13887 current function signature. */
13890 nonnull_arg_p (const_tree arg
)
13892 tree t
, attrs
, fntype
;
13893 unsigned HOST_WIDE_INT arg_num
;
13895 gcc_assert (TREE_CODE (arg
) == PARM_DECL
13896 && (POINTER_TYPE_P (TREE_TYPE (arg
))
13897 || TREE_CODE (TREE_TYPE (arg
)) == OFFSET_TYPE
));
13899 /* The static chain decl is always non null. */
13900 if (arg
== cfun
->static_chain_decl
)
13903 /* THIS argument of method is always non-NULL. */
13904 if (TREE_CODE (TREE_TYPE (cfun
->decl
)) == METHOD_TYPE
13905 && arg
== DECL_ARGUMENTS (cfun
->decl
)
13906 && flag_delete_null_pointer_checks
)
13909 /* Values passed by reference are always non-NULL. */
13910 if (TREE_CODE (TREE_TYPE (arg
)) == REFERENCE_TYPE
13911 && flag_delete_null_pointer_checks
)
13914 fntype
= TREE_TYPE (cfun
->decl
);
13915 for (attrs
= TYPE_ATTRIBUTES (fntype
); attrs
; attrs
= TREE_CHAIN (attrs
))
13917 attrs
= lookup_attribute ("nonnull", attrs
);
13919 /* If "nonnull" wasn't specified, we know nothing about the argument. */
13920 if (attrs
== NULL_TREE
)
13923 /* If "nonnull" applies to all the arguments, then ARG is non-null. */
13924 if (TREE_VALUE (attrs
) == NULL_TREE
)
13927 /* Get the position number for ARG in the function signature. */
13928 for (arg_num
= 1, t
= DECL_ARGUMENTS (cfun
->decl
);
13930 t
= DECL_CHAIN (t
), arg_num
++)
13936 gcc_assert (t
== arg
);
13938 /* Now see if ARG_NUM is mentioned in the nonnull list. */
13939 for (t
= TREE_VALUE (attrs
); t
; t
= TREE_CHAIN (t
))
13941 if (compare_tree_int (TREE_VALUE (t
), arg_num
) == 0)
13949 /* Combine LOC and BLOCK to a combined adhoc loc, retaining any range
13953 set_block (location_t loc
, tree block
)
13955 location_t pure_loc
= get_pure_location (loc
);
13956 source_range src_range
= get_range_from_loc (line_table
, loc
);
13957 return COMBINE_LOCATION_DATA (line_table
, pure_loc
, src_range
, block
);
13961 set_source_range (tree expr
, location_t start
, location_t finish
)
13963 source_range src_range
;
13964 src_range
.m_start
= start
;
13965 src_range
.m_finish
= finish
;
13966 return set_source_range (expr
, src_range
);
13970 set_source_range (tree expr
, source_range src_range
)
13972 if (!EXPR_P (expr
))
13973 return UNKNOWN_LOCATION
;
13975 location_t pure_loc
= get_pure_location (EXPR_LOCATION (expr
));
13976 location_t adhoc
= COMBINE_LOCATION_DATA (line_table
,
13980 SET_EXPR_LOCATION (expr
, adhoc
);
13984 /* Return EXPR, potentially wrapped with a node expression LOC,
13985 if !CAN_HAVE_LOCATION_P (expr).
13987 NON_LVALUE_EXPR is used for wrapping constants, apart from STRING_CST.
13988 VIEW_CONVERT_EXPR is used for wrapping non-constants and STRING_CST.
13990 Wrapper nodes can be identified using location_wrapper_p. */
13993 maybe_wrap_with_location (tree expr
, location_t loc
)
13997 if (loc
== UNKNOWN_LOCATION
)
13999 if (CAN_HAVE_LOCATION_P (expr
))
14001 /* We should only be adding wrappers for constants and for decls,
14002 or for some exceptional tree nodes (e.g. BASELINK in the C++ FE). */
14003 gcc_assert (CONSTANT_CLASS_P (expr
)
14005 || EXCEPTIONAL_CLASS_P (expr
));
14007 /* For now, don't add wrappers to exceptional tree nodes, to minimize
14008 any impact of the wrapper nodes. */
14009 if (EXCEPTIONAL_CLASS_P (expr
))
14012 /* Compiler-generated temporary variables don't need a wrapper. */
14013 if (DECL_P (expr
) && DECL_ARTIFICIAL (expr
) && DECL_IGNORED_P (expr
))
14016 /* If any auto_suppress_location_wrappers are active, don't create
14018 if (suppress_location_wrappers
> 0)
14022 = (((CONSTANT_CLASS_P (expr
) && TREE_CODE (expr
) != STRING_CST
)
14023 || (TREE_CODE (expr
) == CONST_DECL
&& !TREE_STATIC (expr
)))
14024 ? NON_LVALUE_EXPR
: VIEW_CONVERT_EXPR
);
14025 tree wrapper
= build1_loc (loc
, code
, TREE_TYPE (expr
), expr
);
14026 /* Mark this node as being a wrapper. */
14027 EXPR_LOCATION_WRAPPER_P (wrapper
) = 1;
14031 int suppress_location_wrappers
;
14033 /* Return the name of combined function FN, for debugging purposes. */
14036 combined_fn_name (combined_fn fn
)
14038 if (builtin_fn_p (fn
))
14040 tree fndecl
= builtin_decl_explicit (as_builtin_fn (fn
));
14041 return IDENTIFIER_POINTER (DECL_NAME (fndecl
));
14044 return internal_fn_name (as_internal_fn (fn
));
14047 /* Return a bitmap with a bit set corresponding to each argument in
14048 a function call type FNTYPE declared with attribute nonnull,
14049 or null if none of the function's argument are nonnull. The caller
14050 must free the bitmap. */
14053 get_nonnull_args (const_tree fntype
)
14055 if (fntype
== NULL_TREE
)
14058 bitmap argmap
= NULL
;
14059 if (TREE_CODE (fntype
) == METHOD_TYPE
)
14061 /* The this pointer in C++ non-static member functions is
14062 implicitly nonnull whether or not it's declared as such. */
14063 argmap
= BITMAP_ALLOC (NULL
);
14064 bitmap_set_bit (argmap
, 0);
14067 tree attrs
= TYPE_ATTRIBUTES (fntype
);
14071 /* A function declaration can specify multiple attribute nonnull,
14072 each with zero or more arguments. The loop below creates a bitmap
14073 representing a union of all the arguments. An empty (but non-null)
14074 bitmap means that all arguments have been declaraed nonnull. */
14075 for ( ; attrs
; attrs
= TREE_CHAIN (attrs
))
14077 attrs
= lookup_attribute ("nonnull", attrs
);
14082 argmap
= BITMAP_ALLOC (NULL
);
14084 if (!TREE_VALUE (attrs
))
14086 /* Clear the bitmap in case a previous attribute nonnull
14087 set it and this one overrides it for all arguments. */
14088 bitmap_clear (argmap
);
14092 /* Iterate over the indices of the format arguments declared nonnull
14093 and set a bit for each. */
14094 for (tree idx
= TREE_VALUE (attrs
); idx
; idx
= TREE_CHAIN (idx
))
14096 unsigned int val
= TREE_INT_CST_LOW (TREE_VALUE (idx
)) - 1;
14097 bitmap_set_bit (argmap
, val
);
14104 /* Returns true if TYPE is a type where it and all of its subobjects
14105 (recursively) are of structure, union, or array type. */
14108 is_empty_type (const_tree type
)
14110 if (RECORD_OR_UNION_TYPE_P (type
))
14112 for (tree field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
14113 if (TREE_CODE (field
) == FIELD_DECL
14114 && !DECL_PADDING_P (field
)
14115 && !is_empty_type (TREE_TYPE (field
)))
14119 else if (TREE_CODE (type
) == ARRAY_TYPE
)
14120 return (integer_minus_onep (array_type_nelts (type
))
14121 || TYPE_DOMAIN (type
) == NULL_TREE
14122 || is_empty_type (TREE_TYPE (type
)));
14126 /* Implement TARGET_EMPTY_RECORD_P. Return true if TYPE is an empty type
14127 that shouldn't be passed via stack. */
14130 default_is_empty_record (const_tree type
)
14132 if (!abi_version_at_least (12))
14135 if (type
== error_mark_node
)
14138 if (TREE_ADDRESSABLE (type
))
14141 return is_empty_type (TYPE_MAIN_VARIANT (type
));
14144 /* Determine whether TYPE is a structure with a flexible array member,
14145 or a union containing such a structure (possibly recursively). */
14148 flexible_array_type_p (const_tree type
)
14151 switch (TREE_CODE (type
))
14155 for (x
= TYPE_FIELDS (type
); x
!= NULL_TREE
; x
= DECL_CHAIN (x
))
14156 if (TREE_CODE (x
) == FIELD_DECL
)
14158 if (last
== NULL_TREE
)
14160 if (TREE_CODE (TREE_TYPE (last
)) == ARRAY_TYPE
14161 && TYPE_SIZE (TREE_TYPE (last
)) == NULL_TREE
14162 && TYPE_DOMAIN (TREE_TYPE (last
)) != NULL_TREE
14163 && TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (last
))) == NULL_TREE
)
14167 for (x
= TYPE_FIELDS (type
); x
!= NULL_TREE
; x
= DECL_CHAIN (x
))
14169 if (TREE_CODE (x
) == FIELD_DECL
14170 && flexible_array_type_p (TREE_TYPE (x
)))
14179 /* Like int_size_in_bytes, but handle empty records specially. */
14182 arg_int_size_in_bytes (const_tree type
)
14184 return TYPE_EMPTY_P (type
) ? 0 : int_size_in_bytes (type
);
14187 /* Like size_in_bytes, but handle empty records specially. */
14190 arg_size_in_bytes (const_tree type
)
14192 return TYPE_EMPTY_P (type
) ? size_zero_node
: size_in_bytes (type
);
14195 /* Return true if an expression with CODE has to have the same result type as
14196 its first operand. */
14199 expr_type_first_operand_type_p (tree_code code
)
14212 case TRUNC_DIV_EXPR
:
14213 case CEIL_DIV_EXPR
:
14214 case FLOOR_DIV_EXPR
:
14215 case ROUND_DIV_EXPR
:
14216 case TRUNC_MOD_EXPR
:
14217 case CEIL_MOD_EXPR
:
14218 case FLOOR_MOD_EXPR
:
14219 case ROUND_MOD_EXPR
:
14221 case EXACT_DIV_EXPR
:
14239 /* Return a typenode for the "standard" C type with a given name. */
14241 get_typenode_from_name (const char *name
)
14243 if (name
== NULL
|| *name
== '\0')
14246 if (strcmp (name
, "char") == 0)
14247 return char_type_node
;
14248 if (strcmp (name
, "unsigned char") == 0)
14249 return unsigned_char_type_node
;
14250 if (strcmp (name
, "signed char") == 0)
14251 return signed_char_type_node
;
14253 if (strcmp (name
, "short int") == 0)
14254 return short_integer_type_node
;
14255 if (strcmp (name
, "short unsigned int") == 0)
14256 return short_unsigned_type_node
;
14258 if (strcmp (name
, "int") == 0)
14259 return integer_type_node
;
14260 if (strcmp (name
, "unsigned int") == 0)
14261 return unsigned_type_node
;
14263 if (strcmp (name
, "long int") == 0)
14264 return long_integer_type_node
;
14265 if (strcmp (name
, "long unsigned int") == 0)
14266 return long_unsigned_type_node
;
14268 if (strcmp (name
, "long long int") == 0)
14269 return long_long_integer_type_node
;
14270 if (strcmp (name
, "long long unsigned int") == 0)
14271 return long_long_unsigned_type_node
;
14273 gcc_unreachable ();
14276 /* List of pointer types used to declare builtins before we have seen their
14279 Keep the size up to date in tree.h ! */
14280 const builtin_structptr_type builtin_structptr_types
[6] =
14282 { fileptr_type_node
, ptr_type_node
, "FILE" },
14283 { const_tm_ptr_type_node
, const_ptr_type_node
, "tm" },
14284 { fenv_t_ptr_type_node
, ptr_type_node
, "fenv_t" },
14285 { const_fenv_t_ptr_type_node
, const_ptr_type_node
, "fenv_t" },
14286 { fexcept_t_ptr_type_node
, ptr_type_node
, "fexcept_t" },
14287 { const_fexcept_t_ptr_type_node
, const_ptr_type_node
, "fexcept_t" }
14290 /* Return the maximum object size. */
14293 max_object_size (void)
14295 /* To do: Make this a configurable parameter. */
14296 return TYPE_MAX_VALUE (ptrdiff_type_node
);
14299 /* A wrapper around TARGET_VERIFY_TYPE_CONTEXT that makes the silent_p
14300 parameter default to false and that weeds out error_mark_node. */
14303 verify_type_context (location_t loc
, type_context_kind context
,
14304 const_tree type
, bool silent_p
)
14306 if (type
== error_mark_node
)
14309 gcc_assert (TYPE_P (type
));
14310 return (!targetm
.verify_type_context
14311 || targetm
.verify_type_context (loc
, context
, type
, silent_p
));
14314 /* Return that NEW_ASM and DELETE_ASM name a valid pair of new and
14315 delete operators. */
14318 valid_new_delete_pair_p (tree new_asm
, tree delete_asm
)
14320 const char *new_name
= IDENTIFIER_POINTER (new_asm
);
14321 const char *delete_name
= IDENTIFIER_POINTER (delete_asm
);
14322 unsigned int new_len
= IDENTIFIER_LENGTH (new_asm
);
14323 unsigned int delete_len
= IDENTIFIER_LENGTH (delete_asm
);
14325 if (new_len
< 5 || delete_len
< 6)
14327 if (new_name
[0] == '_')
14328 ++new_name
, --new_len
;
14329 if (new_name
[0] == '_')
14330 ++new_name
, --new_len
;
14331 if (delete_name
[0] == '_')
14332 ++delete_name
, --delete_len
;
14333 if (delete_name
[0] == '_')
14334 ++delete_name
, --delete_len
;
14335 if (new_len
< 4 || delete_len
< 5)
14337 /* *_len is now just the length after initial underscores. */
14338 if (new_name
[0] != 'Z' || new_name
[1] != 'n')
14340 if (delete_name
[0] != 'Z' || delete_name
[1] != 'd')
14342 /* _Znw must match _Zdl, _Zna must match _Zda. */
14343 if ((new_name
[2] != 'w' || delete_name
[2] != 'l')
14344 && (new_name
[2] != 'a' || delete_name
[2] != 'a'))
14346 /* 'j', 'm' and 'y' correspond to size_t. */
14347 if (new_name
[3] != 'j' && new_name
[3] != 'm' && new_name
[3] != 'y')
14349 if (delete_name
[3] != 'P' || delete_name
[4] != 'v')
14352 || (new_len
== 18 && !memcmp (new_name
+ 4, "RKSt9nothrow_t", 14)))
14354 /* _ZnXY or _ZnXYRKSt9nothrow_t matches
14355 _ZdXPv, _ZdXPvY and _ZdXPvRKSt9nothrow_t. */
14356 if (delete_len
== 5)
14358 if (delete_len
== 6 && delete_name
[5] == new_name
[3])
14360 if (delete_len
== 19 && !memcmp (delete_name
+ 5, "RKSt9nothrow_t", 14))
14363 else if ((new_len
== 19 && !memcmp (new_name
+ 4, "St11align_val_t", 15))
14365 && !memcmp (new_name
+ 4, "St11align_val_tRKSt9nothrow_t", 29)))
14367 /* _ZnXYSt11align_val_t or _ZnXYSt11align_val_tRKSt9nothrow_t matches
14368 _ZdXPvSt11align_val_t or _ZdXPvYSt11align_val_t or or
14369 _ZdXPvSt11align_val_tRKSt9nothrow_t. */
14370 if (delete_len
== 20 && !memcmp (delete_name
+ 5, "St11align_val_t", 15))
14372 if (delete_len
== 21
14373 && delete_name
[5] == new_name
[3]
14374 && !memcmp (delete_name
+ 6, "St11align_val_t", 15))
14376 if (delete_len
== 34
14377 && !memcmp (delete_name
+ 5, "St11align_val_tRKSt9nothrow_t", 29))
14385 namespace selftest
{
14387 /* Selftests for tree. */
14389 /* Verify that integer constants are sane. */
14392 test_integer_constants ()
14394 ASSERT_TRUE (integer_type_node
!= NULL
);
14395 ASSERT_TRUE (build_int_cst (integer_type_node
, 0) != NULL
);
14397 tree type
= integer_type_node
;
14399 tree zero
= build_zero_cst (type
);
14400 ASSERT_EQ (INTEGER_CST
, TREE_CODE (zero
));
14401 ASSERT_EQ (type
, TREE_TYPE (zero
));
14403 tree one
= build_int_cst (type
, 1);
14404 ASSERT_EQ (INTEGER_CST
, TREE_CODE (one
));
14405 ASSERT_EQ (type
, TREE_TYPE (zero
));
14408 /* Verify identifiers. */
14411 test_identifiers ()
14413 tree identifier
= get_identifier ("foo");
14414 ASSERT_EQ (3, IDENTIFIER_LENGTH (identifier
));
14415 ASSERT_STREQ ("foo", IDENTIFIER_POINTER (identifier
));
14418 /* Verify LABEL_DECL. */
14423 tree identifier
= get_identifier ("err");
14424 tree label_decl
= build_decl (UNKNOWN_LOCATION
, LABEL_DECL
,
14425 identifier
, void_type_node
);
14426 ASSERT_EQ (-1, LABEL_DECL_UID (label_decl
));
14427 ASSERT_FALSE (FORCED_LABEL (label_decl
));
14430 /* Return a new VECTOR_CST node whose type is TYPE and whose values
14431 are given by VALS. */
14434 build_vector (tree type
, const vec
<tree
> &vals MEM_STAT_DECL
)
14436 gcc_assert (known_eq (vals
.length (), TYPE_VECTOR_SUBPARTS (type
)));
14437 tree_vector_builder
builder (type
, vals
.length (), 1);
14438 builder
.splice (vals
);
14439 return builder
.build ();
14442 /* Check that VECTOR_CST ACTUAL contains the elements in EXPECTED. */
14445 check_vector_cst (const vec
<tree
> &expected
, tree actual
)
14447 ASSERT_KNOWN_EQ (expected
.length (),
14448 TYPE_VECTOR_SUBPARTS (TREE_TYPE (actual
)));
14449 for (unsigned int i
= 0; i
< expected
.length (); ++i
)
14450 ASSERT_EQ (wi::to_wide (expected
[i
]),
14451 wi::to_wide (vector_cst_elt (actual
, i
)));
14454 /* Check that VECTOR_CST ACTUAL contains NPATTERNS duplicated elements,
14455 and that its elements match EXPECTED. */
14458 check_vector_cst_duplicate (const vec
<tree
> &expected
, tree actual
,
14459 unsigned int npatterns
)
14461 ASSERT_EQ (npatterns
, VECTOR_CST_NPATTERNS (actual
));
14462 ASSERT_EQ (1, VECTOR_CST_NELTS_PER_PATTERN (actual
));
14463 ASSERT_EQ (npatterns
, vector_cst_encoded_nelts (actual
));
14464 ASSERT_TRUE (VECTOR_CST_DUPLICATE_P (actual
));
14465 ASSERT_FALSE (VECTOR_CST_STEPPED_P (actual
));
14466 check_vector_cst (expected
, actual
);
14469 /* Check that VECTOR_CST ACTUAL contains NPATTERNS foreground elements
14470 and NPATTERNS background elements, and that its elements match
14474 check_vector_cst_fill (const vec
<tree
> &expected
, tree actual
,
14475 unsigned int npatterns
)
14477 ASSERT_EQ (npatterns
, VECTOR_CST_NPATTERNS (actual
));
14478 ASSERT_EQ (2, VECTOR_CST_NELTS_PER_PATTERN (actual
));
14479 ASSERT_EQ (2 * npatterns
, vector_cst_encoded_nelts (actual
));
14480 ASSERT_FALSE (VECTOR_CST_DUPLICATE_P (actual
));
14481 ASSERT_FALSE (VECTOR_CST_STEPPED_P (actual
));
14482 check_vector_cst (expected
, actual
);
14485 /* Check that VECTOR_CST ACTUAL contains NPATTERNS stepped patterns,
14486 and that its elements match EXPECTED. */
14489 check_vector_cst_stepped (const vec
<tree
> &expected
, tree actual
,
14490 unsigned int npatterns
)
14492 ASSERT_EQ (npatterns
, VECTOR_CST_NPATTERNS (actual
));
14493 ASSERT_EQ (3, VECTOR_CST_NELTS_PER_PATTERN (actual
));
14494 ASSERT_EQ (3 * npatterns
, vector_cst_encoded_nelts (actual
));
14495 ASSERT_FALSE (VECTOR_CST_DUPLICATE_P (actual
));
14496 ASSERT_TRUE (VECTOR_CST_STEPPED_P (actual
));
14497 check_vector_cst (expected
, actual
);
14500 /* Test the creation of VECTOR_CSTs. */
14503 test_vector_cst_patterns (ALONE_CXX_MEM_STAT_INFO
)
14505 auto_vec
<tree
, 8> elements (8);
14506 elements
.quick_grow (8);
14507 tree element_type
= build_nonstandard_integer_type (16, true);
14508 tree vector_type
= build_vector_type (element_type
, 8);
14510 /* Test a simple linear series with a base of 0 and a step of 1:
14511 { 0, 1, 2, 3, 4, 5, 6, 7 }. */
14512 for (unsigned int i
= 0; i
< 8; ++i
)
14513 elements
[i
] = build_int_cst (element_type
, i
);
14514 tree vector
= build_vector (vector_type
, elements PASS_MEM_STAT
);
14515 check_vector_cst_stepped (elements
, vector
, 1);
14517 /* Try the same with the first element replaced by 100:
14518 { 100, 1, 2, 3, 4, 5, 6, 7 }. */
14519 elements
[0] = build_int_cst (element_type
, 100);
14520 vector
= build_vector (vector_type
, elements PASS_MEM_STAT
);
14521 check_vector_cst_stepped (elements
, vector
, 1);
14523 /* Try a series that wraps around.
14524 { 100, 65531, 65532, 65533, 65534, 65535, 0, 1 }. */
14525 for (unsigned int i
= 1; i
< 8; ++i
)
14526 elements
[i
] = build_int_cst (element_type
, (65530 + i
) & 0xffff);
14527 vector
= build_vector (vector_type
, elements PASS_MEM_STAT
);
14528 check_vector_cst_stepped (elements
, vector
, 1);
14530 /* Try a downward series:
14531 { 100, 79, 78, 77, 76, 75, 75, 73 }. */
14532 for (unsigned int i
= 1; i
< 8; ++i
)
14533 elements
[i
] = build_int_cst (element_type
, 80 - i
);
14534 vector
= build_vector (vector_type
, elements PASS_MEM_STAT
);
14535 check_vector_cst_stepped (elements
, vector
, 1);
14537 /* Try two interleaved series with different bases and steps:
14538 { 100, 53, 66, 206, 62, 212, 58, 218 }. */
14539 elements
[1] = build_int_cst (element_type
, 53);
14540 for (unsigned int i
= 2; i
< 8; i
+= 2)
14542 elements
[i
] = build_int_cst (element_type
, 70 - i
* 2);
14543 elements
[i
+ 1] = build_int_cst (element_type
, 200 + i
* 3);
14545 vector
= build_vector (vector_type
, elements PASS_MEM_STAT
);
14546 check_vector_cst_stepped (elements
, vector
, 2);
14548 /* Try a duplicated value:
14549 { 100, 100, 100, 100, 100, 100, 100, 100 }. */
14550 for (unsigned int i
= 1; i
< 8; ++i
)
14551 elements
[i
] = elements
[0];
14552 vector
= build_vector (vector_type
, elements PASS_MEM_STAT
);
14553 check_vector_cst_duplicate (elements
, vector
, 1);
14555 /* Try an interleaved duplicated value:
14556 { 100, 55, 100, 55, 100, 55, 100, 55 }. */
14557 elements
[1] = build_int_cst (element_type
, 55);
14558 for (unsigned int i
= 2; i
< 8; ++i
)
14559 elements
[i
] = elements
[i
- 2];
14560 vector
= build_vector (vector_type
, elements PASS_MEM_STAT
);
14561 check_vector_cst_duplicate (elements
, vector
, 2);
14563 /* Try a duplicated value with 2 exceptions
14564 { 41, 97, 100, 55, 100, 55, 100, 55 }. */
14565 elements
[0] = build_int_cst (element_type
, 41);
14566 elements
[1] = build_int_cst (element_type
, 97);
14567 vector
= build_vector (vector_type
, elements PASS_MEM_STAT
);
14568 check_vector_cst_fill (elements
, vector
, 2);
14570 /* Try with and without a step
14571 { 41, 97, 100, 21, 100, 35, 100, 49 }. */
14572 for (unsigned int i
= 3; i
< 8; i
+= 2)
14573 elements
[i
] = build_int_cst (element_type
, i
* 7);
14574 vector
= build_vector (vector_type
, elements PASS_MEM_STAT
);
14575 check_vector_cst_stepped (elements
, vector
, 2);
14577 /* Try a fully-general constant:
14578 { 41, 97, 100, 21, 100, 9990, 100, 49 }. */
14579 elements
[5] = build_int_cst (element_type
, 9990);
14580 vector
= build_vector (vector_type
, elements PASS_MEM_STAT
);
14581 check_vector_cst_fill (elements
, vector
, 4);
14584 /* Verify that STRIP_NOPS (NODE) is EXPECTED.
14585 Helper function for test_location_wrappers, to deal with STRIP_NOPS
14586 modifying its argument in-place. */
14589 check_strip_nops (tree node
, tree expected
)
14592 ASSERT_EQ (expected
, node
);
14595 /* Verify location wrappers. */
14598 test_location_wrappers ()
14600 location_t loc
= BUILTINS_LOCATION
;
14602 ASSERT_EQ (NULL_TREE
, maybe_wrap_with_location (NULL_TREE
, loc
));
14604 /* Wrapping a constant. */
14605 tree int_cst
= build_int_cst (integer_type_node
, 42);
14606 ASSERT_FALSE (CAN_HAVE_LOCATION_P (int_cst
));
14607 ASSERT_FALSE (location_wrapper_p (int_cst
));
14609 tree wrapped_int_cst
= maybe_wrap_with_location (int_cst
, loc
);
14610 ASSERT_TRUE (location_wrapper_p (wrapped_int_cst
));
14611 ASSERT_EQ (loc
, EXPR_LOCATION (wrapped_int_cst
));
14612 ASSERT_EQ (int_cst
, tree_strip_any_location_wrapper (wrapped_int_cst
));
14614 /* We shouldn't add wrapper nodes for UNKNOWN_LOCATION. */
14615 ASSERT_EQ (int_cst
, maybe_wrap_with_location (int_cst
, UNKNOWN_LOCATION
));
14617 /* We shouldn't add wrapper nodes for nodes that CAN_HAVE_LOCATION_P. */
14618 tree cast
= build1 (NOP_EXPR
, char_type_node
, int_cst
);
14619 ASSERT_TRUE (CAN_HAVE_LOCATION_P (cast
));
14620 ASSERT_EQ (cast
, maybe_wrap_with_location (cast
, loc
));
14622 /* Wrapping a STRING_CST. */
14623 tree string_cst
= build_string (4, "foo");
14624 ASSERT_FALSE (CAN_HAVE_LOCATION_P (string_cst
));
14625 ASSERT_FALSE (location_wrapper_p (string_cst
));
14627 tree wrapped_string_cst
= maybe_wrap_with_location (string_cst
, loc
);
14628 ASSERT_TRUE (location_wrapper_p (wrapped_string_cst
));
14629 ASSERT_EQ (VIEW_CONVERT_EXPR
, TREE_CODE (wrapped_string_cst
));
14630 ASSERT_EQ (loc
, EXPR_LOCATION (wrapped_string_cst
));
14631 ASSERT_EQ (string_cst
, tree_strip_any_location_wrapper (wrapped_string_cst
));
14634 /* Wrapping a variable. */
14635 tree int_var
= build_decl (UNKNOWN_LOCATION
, VAR_DECL
,
14636 get_identifier ("some_int_var"),
14637 integer_type_node
);
14638 ASSERT_FALSE (CAN_HAVE_LOCATION_P (int_var
));
14639 ASSERT_FALSE (location_wrapper_p (int_var
));
14641 tree wrapped_int_var
= maybe_wrap_with_location (int_var
, loc
);
14642 ASSERT_TRUE (location_wrapper_p (wrapped_int_var
));
14643 ASSERT_EQ (loc
, EXPR_LOCATION (wrapped_int_var
));
14644 ASSERT_EQ (int_var
, tree_strip_any_location_wrapper (wrapped_int_var
));
14646 /* Verify that "reinterpret_cast<int>(some_int_var)" is not a location
14648 tree r_cast
= build1 (NON_LVALUE_EXPR
, integer_type_node
, int_var
);
14649 ASSERT_FALSE (location_wrapper_p (r_cast
));
14650 ASSERT_EQ (r_cast
, tree_strip_any_location_wrapper (r_cast
));
14652 /* Verify that STRIP_NOPS removes wrappers. */
14653 check_strip_nops (wrapped_int_cst
, int_cst
);
14654 check_strip_nops (wrapped_string_cst
, string_cst
);
14655 check_strip_nops (wrapped_int_var
, int_var
);
14658 /* Test various tree predicates. Verify that location wrappers don't
14659 affect the results. */
14664 /* Build various constants and wrappers around them. */
14666 location_t loc
= BUILTINS_LOCATION
;
14668 tree i_0
= build_int_cst (integer_type_node
, 0);
14669 tree wr_i_0
= maybe_wrap_with_location (i_0
, loc
);
14671 tree i_1
= build_int_cst (integer_type_node
, 1);
14672 tree wr_i_1
= maybe_wrap_with_location (i_1
, loc
);
14674 tree i_m1
= build_int_cst (integer_type_node
, -1);
14675 tree wr_i_m1
= maybe_wrap_with_location (i_m1
, loc
);
14677 tree f_0
= build_real_from_int_cst (float_type_node
, i_0
);
14678 tree wr_f_0
= maybe_wrap_with_location (f_0
, loc
);
14679 tree f_1
= build_real_from_int_cst (float_type_node
, i_1
);
14680 tree wr_f_1
= maybe_wrap_with_location (f_1
, loc
);
14681 tree f_m1
= build_real_from_int_cst (float_type_node
, i_m1
);
14682 tree wr_f_m1
= maybe_wrap_with_location (f_m1
, loc
);
14684 tree c_i_0
= build_complex (NULL_TREE
, i_0
, i_0
);
14685 tree c_i_1
= build_complex (NULL_TREE
, i_1
, i_0
);
14686 tree c_i_m1
= build_complex (NULL_TREE
, i_m1
, i_0
);
14688 tree c_f_0
= build_complex (NULL_TREE
, f_0
, f_0
);
14689 tree c_f_1
= build_complex (NULL_TREE
, f_1
, f_0
);
14690 tree c_f_m1
= build_complex (NULL_TREE
, f_m1
, f_0
);
14692 /* TODO: vector constants. */
14694 /* Test integer_onep. */
14695 ASSERT_FALSE (integer_onep (i_0
));
14696 ASSERT_FALSE (integer_onep (wr_i_0
));
14697 ASSERT_TRUE (integer_onep (i_1
));
14698 ASSERT_TRUE (integer_onep (wr_i_1
));
14699 ASSERT_FALSE (integer_onep (i_m1
));
14700 ASSERT_FALSE (integer_onep (wr_i_m1
));
14701 ASSERT_FALSE (integer_onep (f_0
));
14702 ASSERT_FALSE (integer_onep (wr_f_0
));
14703 ASSERT_FALSE (integer_onep (f_1
));
14704 ASSERT_FALSE (integer_onep (wr_f_1
));
14705 ASSERT_FALSE (integer_onep (f_m1
));
14706 ASSERT_FALSE (integer_onep (wr_f_m1
));
14707 ASSERT_FALSE (integer_onep (c_i_0
));
14708 ASSERT_TRUE (integer_onep (c_i_1
));
14709 ASSERT_FALSE (integer_onep (c_i_m1
));
14710 ASSERT_FALSE (integer_onep (c_f_0
));
14711 ASSERT_FALSE (integer_onep (c_f_1
));
14712 ASSERT_FALSE (integer_onep (c_f_m1
));
14714 /* Test integer_zerop. */
14715 ASSERT_TRUE (integer_zerop (i_0
));
14716 ASSERT_TRUE (integer_zerop (wr_i_0
));
14717 ASSERT_FALSE (integer_zerop (i_1
));
14718 ASSERT_FALSE (integer_zerop (wr_i_1
));
14719 ASSERT_FALSE (integer_zerop (i_m1
));
14720 ASSERT_FALSE (integer_zerop (wr_i_m1
));
14721 ASSERT_FALSE (integer_zerop (f_0
));
14722 ASSERT_FALSE (integer_zerop (wr_f_0
));
14723 ASSERT_FALSE (integer_zerop (f_1
));
14724 ASSERT_FALSE (integer_zerop (wr_f_1
));
14725 ASSERT_FALSE (integer_zerop (f_m1
));
14726 ASSERT_FALSE (integer_zerop (wr_f_m1
));
14727 ASSERT_TRUE (integer_zerop (c_i_0
));
14728 ASSERT_FALSE (integer_zerop (c_i_1
));
14729 ASSERT_FALSE (integer_zerop (c_i_m1
));
14730 ASSERT_FALSE (integer_zerop (c_f_0
));
14731 ASSERT_FALSE (integer_zerop (c_f_1
));
14732 ASSERT_FALSE (integer_zerop (c_f_m1
));
14734 /* Test integer_all_onesp. */
14735 ASSERT_FALSE (integer_all_onesp (i_0
));
14736 ASSERT_FALSE (integer_all_onesp (wr_i_0
));
14737 ASSERT_FALSE (integer_all_onesp (i_1
));
14738 ASSERT_FALSE (integer_all_onesp (wr_i_1
));
14739 ASSERT_TRUE (integer_all_onesp (i_m1
));
14740 ASSERT_TRUE (integer_all_onesp (wr_i_m1
));
14741 ASSERT_FALSE (integer_all_onesp (f_0
));
14742 ASSERT_FALSE (integer_all_onesp (wr_f_0
));
14743 ASSERT_FALSE (integer_all_onesp (f_1
));
14744 ASSERT_FALSE (integer_all_onesp (wr_f_1
));
14745 ASSERT_FALSE (integer_all_onesp (f_m1
));
14746 ASSERT_FALSE (integer_all_onesp (wr_f_m1
));
14747 ASSERT_FALSE (integer_all_onesp (c_i_0
));
14748 ASSERT_FALSE (integer_all_onesp (c_i_1
));
14749 ASSERT_FALSE (integer_all_onesp (c_i_m1
));
14750 ASSERT_FALSE (integer_all_onesp (c_f_0
));
14751 ASSERT_FALSE (integer_all_onesp (c_f_1
));
14752 ASSERT_FALSE (integer_all_onesp (c_f_m1
));
14754 /* Test integer_minus_onep. */
14755 ASSERT_FALSE (integer_minus_onep (i_0
));
14756 ASSERT_FALSE (integer_minus_onep (wr_i_0
));
14757 ASSERT_FALSE (integer_minus_onep (i_1
));
14758 ASSERT_FALSE (integer_minus_onep (wr_i_1
));
14759 ASSERT_TRUE (integer_minus_onep (i_m1
));
14760 ASSERT_TRUE (integer_minus_onep (wr_i_m1
));
14761 ASSERT_FALSE (integer_minus_onep (f_0
));
14762 ASSERT_FALSE (integer_minus_onep (wr_f_0
));
14763 ASSERT_FALSE (integer_minus_onep (f_1
));
14764 ASSERT_FALSE (integer_minus_onep (wr_f_1
));
14765 ASSERT_FALSE (integer_minus_onep (f_m1
));
14766 ASSERT_FALSE (integer_minus_onep (wr_f_m1
));
14767 ASSERT_FALSE (integer_minus_onep (c_i_0
));
14768 ASSERT_FALSE (integer_minus_onep (c_i_1
));
14769 ASSERT_TRUE (integer_minus_onep (c_i_m1
));
14770 ASSERT_FALSE (integer_minus_onep (c_f_0
));
14771 ASSERT_FALSE (integer_minus_onep (c_f_1
));
14772 ASSERT_FALSE (integer_minus_onep (c_f_m1
));
14774 /* Test integer_each_onep. */
14775 ASSERT_FALSE (integer_each_onep (i_0
));
14776 ASSERT_FALSE (integer_each_onep (wr_i_0
));
14777 ASSERT_TRUE (integer_each_onep (i_1
));
14778 ASSERT_TRUE (integer_each_onep (wr_i_1
));
14779 ASSERT_FALSE (integer_each_onep (i_m1
));
14780 ASSERT_FALSE (integer_each_onep (wr_i_m1
));
14781 ASSERT_FALSE (integer_each_onep (f_0
));
14782 ASSERT_FALSE (integer_each_onep (wr_f_0
));
14783 ASSERT_FALSE (integer_each_onep (f_1
));
14784 ASSERT_FALSE (integer_each_onep (wr_f_1
));
14785 ASSERT_FALSE (integer_each_onep (f_m1
));
14786 ASSERT_FALSE (integer_each_onep (wr_f_m1
));
14787 ASSERT_FALSE (integer_each_onep (c_i_0
));
14788 ASSERT_FALSE (integer_each_onep (c_i_1
));
14789 ASSERT_FALSE (integer_each_onep (c_i_m1
));
14790 ASSERT_FALSE (integer_each_onep (c_f_0
));
14791 ASSERT_FALSE (integer_each_onep (c_f_1
));
14792 ASSERT_FALSE (integer_each_onep (c_f_m1
));
14794 /* Test integer_truep. */
14795 ASSERT_FALSE (integer_truep (i_0
));
14796 ASSERT_FALSE (integer_truep (wr_i_0
));
14797 ASSERT_TRUE (integer_truep (i_1
));
14798 ASSERT_TRUE (integer_truep (wr_i_1
));
14799 ASSERT_FALSE (integer_truep (i_m1
));
14800 ASSERT_FALSE (integer_truep (wr_i_m1
));
14801 ASSERT_FALSE (integer_truep (f_0
));
14802 ASSERT_FALSE (integer_truep (wr_f_0
));
14803 ASSERT_FALSE (integer_truep (f_1
));
14804 ASSERT_FALSE (integer_truep (wr_f_1
));
14805 ASSERT_FALSE (integer_truep (f_m1
));
14806 ASSERT_FALSE (integer_truep (wr_f_m1
));
14807 ASSERT_FALSE (integer_truep (c_i_0
));
14808 ASSERT_TRUE (integer_truep (c_i_1
));
14809 ASSERT_FALSE (integer_truep (c_i_m1
));
14810 ASSERT_FALSE (integer_truep (c_f_0
));
14811 ASSERT_FALSE (integer_truep (c_f_1
));
14812 ASSERT_FALSE (integer_truep (c_f_m1
));
14814 /* Test integer_nonzerop. */
14815 ASSERT_FALSE (integer_nonzerop (i_0
));
14816 ASSERT_FALSE (integer_nonzerop (wr_i_0
));
14817 ASSERT_TRUE (integer_nonzerop (i_1
));
14818 ASSERT_TRUE (integer_nonzerop (wr_i_1
));
14819 ASSERT_TRUE (integer_nonzerop (i_m1
));
14820 ASSERT_TRUE (integer_nonzerop (wr_i_m1
));
14821 ASSERT_FALSE (integer_nonzerop (f_0
));
14822 ASSERT_FALSE (integer_nonzerop (wr_f_0
));
14823 ASSERT_FALSE (integer_nonzerop (f_1
));
14824 ASSERT_FALSE (integer_nonzerop (wr_f_1
));
14825 ASSERT_FALSE (integer_nonzerop (f_m1
));
14826 ASSERT_FALSE (integer_nonzerop (wr_f_m1
));
14827 ASSERT_FALSE (integer_nonzerop (c_i_0
));
14828 ASSERT_TRUE (integer_nonzerop (c_i_1
));
14829 ASSERT_TRUE (integer_nonzerop (c_i_m1
));
14830 ASSERT_FALSE (integer_nonzerop (c_f_0
));
14831 ASSERT_FALSE (integer_nonzerop (c_f_1
));
14832 ASSERT_FALSE (integer_nonzerop (c_f_m1
));
14834 /* Test real_zerop. */
14835 ASSERT_FALSE (real_zerop (i_0
));
14836 ASSERT_FALSE (real_zerop (wr_i_0
));
14837 ASSERT_FALSE (real_zerop (i_1
));
14838 ASSERT_FALSE (real_zerop (wr_i_1
));
14839 ASSERT_FALSE (real_zerop (i_m1
));
14840 ASSERT_FALSE (real_zerop (wr_i_m1
));
14841 ASSERT_TRUE (real_zerop (f_0
));
14842 ASSERT_TRUE (real_zerop (wr_f_0
));
14843 ASSERT_FALSE (real_zerop (f_1
));
14844 ASSERT_FALSE (real_zerop (wr_f_1
));
14845 ASSERT_FALSE (real_zerop (f_m1
));
14846 ASSERT_FALSE (real_zerop (wr_f_m1
));
14847 ASSERT_FALSE (real_zerop (c_i_0
));
14848 ASSERT_FALSE (real_zerop (c_i_1
));
14849 ASSERT_FALSE (real_zerop (c_i_m1
));
14850 ASSERT_TRUE (real_zerop (c_f_0
));
14851 ASSERT_FALSE (real_zerop (c_f_1
));
14852 ASSERT_FALSE (real_zerop (c_f_m1
));
14854 /* Test real_onep. */
14855 ASSERT_FALSE (real_onep (i_0
));
14856 ASSERT_FALSE (real_onep (wr_i_0
));
14857 ASSERT_FALSE (real_onep (i_1
));
14858 ASSERT_FALSE (real_onep (wr_i_1
));
14859 ASSERT_FALSE (real_onep (i_m1
));
14860 ASSERT_FALSE (real_onep (wr_i_m1
));
14861 ASSERT_FALSE (real_onep (f_0
));
14862 ASSERT_FALSE (real_onep (wr_f_0
));
14863 ASSERT_TRUE (real_onep (f_1
));
14864 ASSERT_TRUE (real_onep (wr_f_1
));
14865 ASSERT_FALSE (real_onep (f_m1
));
14866 ASSERT_FALSE (real_onep (wr_f_m1
));
14867 ASSERT_FALSE (real_onep (c_i_0
));
14868 ASSERT_FALSE (real_onep (c_i_1
));
14869 ASSERT_FALSE (real_onep (c_i_m1
));
14870 ASSERT_FALSE (real_onep (c_f_0
));
14871 ASSERT_TRUE (real_onep (c_f_1
));
14872 ASSERT_FALSE (real_onep (c_f_m1
));
14874 /* Test real_minus_onep. */
14875 ASSERT_FALSE (real_minus_onep (i_0
));
14876 ASSERT_FALSE (real_minus_onep (wr_i_0
));
14877 ASSERT_FALSE (real_minus_onep (i_1
));
14878 ASSERT_FALSE (real_minus_onep (wr_i_1
));
14879 ASSERT_FALSE (real_minus_onep (i_m1
));
14880 ASSERT_FALSE (real_minus_onep (wr_i_m1
));
14881 ASSERT_FALSE (real_minus_onep (f_0
));
14882 ASSERT_FALSE (real_minus_onep (wr_f_0
));
14883 ASSERT_FALSE (real_minus_onep (f_1
));
14884 ASSERT_FALSE (real_minus_onep (wr_f_1
));
14885 ASSERT_TRUE (real_minus_onep (f_m1
));
14886 ASSERT_TRUE (real_minus_onep (wr_f_m1
));
14887 ASSERT_FALSE (real_minus_onep (c_i_0
));
14888 ASSERT_FALSE (real_minus_onep (c_i_1
));
14889 ASSERT_FALSE (real_minus_onep (c_i_m1
));
14890 ASSERT_FALSE (real_minus_onep (c_f_0
));
14891 ASSERT_FALSE (real_minus_onep (c_f_1
));
14892 ASSERT_TRUE (real_minus_onep (c_f_m1
));
14895 ASSERT_TRUE (zerop (i_0
));
14896 ASSERT_TRUE (zerop (wr_i_0
));
14897 ASSERT_FALSE (zerop (i_1
));
14898 ASSERT_FALSE (zerop (wr_i_1
));
14899 ASSERT_FALSE (zerop (i_m1
));
14900 ASSERT_FALSE (zerop (wr_i_m1
));
14901 ASSERT_TRUE (zerop (f_0
));
14902 ASSERT_TRUE (zerop (wr_f_0
));
14903 ASSERT_FALSE (zerop (f_1
));
14904 ASSERT_FALSE (zerop (wr_f_1
));
14905 ASSERT_FALSE (zerop (f_m1
));
14906 ASSERT_FALSE (zerop (wr_f_m1
));
14907 ASSERT_TRUE (zerop (c_i_0
));
14908 ASSERT_FALSE (zerop (c_i_1
));
14909 ASSERT_FALSE (zerop (c_i_m1
));
14910 ASSERT_TRUE (zerop (c_f_0
));
14911 ASSERT_FALSE (zerop (c_f_1
));
14912 ASSERT_FALSE (zerop (c_f_m1
));
14914 /* Test tree_expr_nonnegative_p. */
14915 ASSERT_TRUE (tree_expr_nonnegative_p (i_0
));
14916 ASSERT_TRUE (tree_expr_nonnegative_p (wr_i_0
));
14917 ASSERT_TRUE (tree_expr_nonnegative_p (i_1
));
14918 ASSERT_TRUE (tree_expr_nonnegative_p (wr_i_1
));
14919 ASSERT_FALSE (tree_expr_nonnegative_p (i_m1
));
14920 ASSERT_FALSE (tree_expr_nonnegative_p (wr_i_m1
));
14921 ASSERT_TRUE (tree_expr_nonnegative_p (f_0
));
14922 ASSERT_TRUE (tree_expr_nonnegative_p (wr_f_0
));
14923 ASSERT_TRUE (tree_expr_nonnegative_p (f_1
));
14924 ASSERT_TRUE (tree_expr_nonnegative_p (wr_f_1
));
14925 ASSERT_FALSE (tree_expr_nonnegative_p (f_m1
));
14926 ASSERT_FALSE (tree_expr_nonnegative_p (wr_f_m1
));
14927 ASSERT_FALSE (tree_expr_nonnegative_p (c_i_0
));
14928 ASSERT_FALSE (tree_expr_nonnegative_p (c_i_1
));
14929 ASSERT_FALSE (tree_expr_nonnegative_p (c_i_m1
));
14930 ASSERT_FALSE (tree_expr_nonnegative_p (c_f_0
));
14931 ASSERT_FALSE (tree_expr_nonnegative_p (c_f_1
));
14932 ASSERT_FALSE (tree_expr_nonnegative_p (c_f_m1
));
14934 /* Test tree_expr_nonzero_p. */
14935 ASSERT_FALSE (tree_expr_nonzero_p (i_0
));
14936 ASSERT_FALSE (tree_expr_nonzero_p (wr_i_0
));
14937 ASSERT_TRUE (tree_expr_nonzero_p (i_1
));
14938 ASSERT_TRUE (tree_expr_nonzero_p (wr_i_1
));
14939 ASSERT_TRUE (tree_expr_nonzero_p (i_m1
));
14940 ASSERT_TRUE (tree_expr_nonzero_p (wr_i_m1
));
14942 /* Test integer_valued_real_p. */
14943 ASSERT_FALSE (integer_valued_real_p (i_0
));
14944 ASSERT_TRUE (integer_valued_real_p (f_0
));
14945 ASSERT_TRUE (integer_valued_real_p (wr_f_0
));
14946 ASSERT_TRUE (integer_valued_real_p (f_1
));
14947 ASSERT_TRUE (integer_valued_real_p (wr_f_1
));
14949 /* Test integer_pow2p. */
14950 ASSERT_FALSE (integer_pow2p (i_0
));
14951 ASSERT_TRUE (integer_pow2p (i_1
));
14952 ASSERT_TRUE (integer_pow2p (wr_i_1
));
14954 /* Test uniform_integer_cst_p. */
14955 ASSERT_TRUE (uniform_integer_cst_p (i_0
));
14956 ASSERT_TRUE (uniform_integer_cst_p (wr_i_0
));
14957 ASSERT_TRUE (uniform_integer_cst_p (i_1
));
14958 ASSERT_TRUE (uniform_integer_cst_p (wr_i_1
));
14959 ASSERT_TRUE (uniform_integer_cst_p (i_m1
));
14960 ASSERT_TRUE (uniform_integer_cst_p (wr_i_m1
));
14961 ASSERT_FALSE (uniform_integer_cst_p (f_0
));
14962 ASSERT_FALSE (uniform_integer_cst_p (wr_f_0
));
14963 ASSERT_FALSE (uniform_integer_cst_p (f_1
));
14964 ASSERT_FALSE (uniform_integer_cst_p (wr_f_1
));
14965 ASSERT_FALSE (uniform_integer_cst_p (f_m1
));
14966 ASSERT_FALSE (uniform_integer_cst_p (wr_f_m1
));
14967 ASSERT_FALSE (uniform_integer_cst_p (c_i_0
));
14968 ASSERT_FALSE (uniform_integer_cst_p (c_i_1
));
14969 ASSERT_FALSE (uniform_integer_cst_p (c_i_m1
));
14970 ASSERT_FALSE (uniform_integer_cst_p (c_f_0
));
14971 ASSERT_FALSE (uniform_integer_cst_p (c_f_1
));
14972 ASSERT_FALSE (uniform_integer_cst_p (c_f_m1
));
14975 /* Check that string escaping works correctly. */
14978 test_escaped_strings (void)
14981 escaped_string msg
;
14984 /* ASSERT_STREQ does not accept NULL as a valid test
14985 result, so we have to use ASSERT_EQ instead. */
14986 ASSERT_EQ (NULL
, (const char *) msg
);
14989 ASSERT_STREQ ("", (const char *) msg
);
14991 msg
.escape ("foobar");
14992 ASSERT_STREQ ("foobar", (const char *) msg
);
14994 /* Ensure that we have -fmessage-length set to 0. */
14995 saved_cutoff
= pp_line_cutoff (global_dc
->printer
);
14996 pp_line_cutoff (global_dc
->printer
) = 0;
14998 msg
.escape ("foo\nbar");
14999 ASSERT_STREQ ("foo\\nbar", (const char *) msg
);
15001 msg
.escape ("\a\b\f\n\r\t\v");
15002 ASSERT_STREQ ("\\a\\b\\f\\n\\r\\t\\v", (const char *) msg
);
15004 /* Now repeat the tests with -fmessage-length set to 5. */
15005 pp_line_cutoff (global_dc
->printer
) = 5;
15007 /* Note that the newline is not translated into an escape. */
15008 msg
.escape ("foo\nbar");
15009 ASSERT_STREQ ("foo\nbar", (const char *) msg
);
15011 msg
.escape ("\a\b\f\n\r\t\v");
15012 ASSERT_STREQ ("\\a\\b\\f\n\\r\\t\\v", (const char *) msg
);
15014 /* Restore the original message length setting. */
15015 pp_line_cutoff (global_dc
->printer
) = saved_cutoff
;
15018 /* Run all of the selftests within this file. */
15023 test_integer_constants ();
15024 test_identifiers ();
15026 test_vector_cst_patterns ();
15027 test_location_wrappers ();
15028 test_predicates ();
15029 test_escaped_strings ();
15032 } // namespace selftest
15034 #endif /* CHECKING_P */
15036 #include "gt-tree.h"