1 /* Language-independent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987-2021 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This file contains the low level primitives for operating on tree nodes,
21 including allocation, list operations, interning of identifiers,
22 construction of data type nodes and statement nodes,
23 and construction of type conversion nodes. It also contains
24 tables index by tree code that describe how to take apart
27 It is intended to be language-independent but can occasionally
28 calls language-dependent routines. */
32 #include "coretypes.h"
37 #include "tree-pass.h"
40 #include "diagnostic.h"
43 #include "fold-const.h"
44 #include "stor-layout.h"
47 #include "toplev.h" /* get_random_seed */
49 #include "common/common-target.h"
50 #include "langhooks.h"
51 #include "tree-inline.h"
52 #include "tree-iterator.h"
53 #include "internal-fn.h"
54 #include "gimple-iterator.h"
57 #include "langhooks-def.h"
58 #include "tree-diagnostic.h"
61 #include "print-tree.h"
62 #include "ipa-utils.h"
64 #include "stringpool.h"
68 #include "tree-vector-builder.h"
69 #include "gimple-fold.h"
70 #include "escaped_string.h"
72 /* Tree code classes. */
74 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) TYPE,
75 #define END_OF_BASE_TREE_CODES tcc_exceptional,
77 const enum tree_code_class tree_code_type
[] = {
78 #include "all-tree.def"
82 #undef END_OF_BASE_TREE_CODES
84 /* Table indexed by tree code giving number of expression
85 operands beyond the fixed part of the node structure.
86 Not used for types or decls. */
88 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) LENGTH,
89 #define END_OF_BASE_TREE_CODES 0,
91 const unsigned char tree_code_length
[] = {
92 #include "all-tree.def"
96 #undef END_OF_BASE_TREE_CODES
98 /* Names of tree components.
99 Used for printing out the tree and error messages. */
100 #define DEFTREECODE(SYM, NAME, TYPE, LEN) NAME,
101 #define END_OF_BASE_TREE_CODES "@dummy",
103 static const char *const tree_code_name
[] = {
104 #include "all-tree.def"
108 #undef END_OF_BASE_TREE_CODES
110 /* Each tree code class has an associated string representation.
111 These must correspond to the tree_code_class entries. */
113 const char *const tree_code_class_strings
[] =
128 /* obstack.[ch] explicitly declined to prototype this. */
129 extern int _obstack_allocated_p (struct obstack
*h
, void *obj
);
131 /* Statistics-gathering stuff. */
133 static uint64_t tree_code_counts
[MAX_TREE_CODES
];
134 uint64_t tree_node_counts
[(int) all_kinds
];
135 uint64_t tree_node_sizes
[(int) all_kinds
];
137 /* Keep in sync with tree.h:enum tree_node_kind. */
138 static const char * const tree_node_kind_names
[] = {
157 /* Unique id for next decl created. */
158 static GTY(()) int next_decl_uid
;
159 /* Unique id for next type created. */
160 static GTY(()) unsigned next_type_uid
= 1;
161 /* Unique id for next debug decl created. Use negative numbers,
162 to catch erroneous uses. */
163 static GTY(()) int next_debug_decl_uid
;
165 /* Since we cannot rehash a type after it is in the table, we have to
166 keep the hash code. */
168 struct GTY((for_user
)) type_hash
{
173 /* Initial size of the hash table (rounded to next prime). */
174 #define TYPE_HASH_INITIAL_SIZE 1000
176 struct type_cache_hasher
: ggc_cache_ptr_hash
<type_hash
>
178 static hashval_t
hash (type_hash
*t
) { return t
->hash
; }
179 static bool equal (type_hash
*a
, type_hash
*b
);
182 keep_cache_entry (type_hash
*&t
)
184 return ggc_marked_p (t
->type
);
188 /* Now here is the hash table. When recording a type, it is added to
189 the slot whose index is the hash code. Note that the hash table is
190 used for several kinds of types (function types, array types and
191 array index range types, for now). While all these live in the
192 same table, they are completely independent, and the hash code is
193 computed differently for each of these. */
195 static GTY ((cache
)) hash_table
<type_cache_hasher
> *type_hash_table
;
197 /* Hash table and temporary node for larger integer const values. */
198 static GTY (()) tree int_cst_node
;
200 struct int_cst_hasher
: ggc_cache_ptr_hash
<tree_node
>
202 static hashval_t
hash (tree t
);
203 static bool equal (tree x
, tree y
);
206 static GTY ((cache
)) hash_table
<int_cst_hasher
> *int_cst_hash_table
;
208 /* Class and variable for making sure that there is a single POLY_INT_CST
209 for a given value. */
210 struct poly_int_cst_hasher
: ggc_cache_ptr_hash
<tree_node
>
212 typedef std::pair
<tree
, const poly_wide_int
*> compare_type
;
213 static hashval_t
hash (tree t
);
214 static bool equal (tree x
, const compare_type
&y
);
217 static GTY ((cache
)) hash_table
<poly_int_cst_hasher
> *poly_int_cst_hash_table
;
219 /* Hash table for optimization flags and target option flags. Use the same
220 hash table for both sets of options. Nodes for building the current
221 optimization and target option nodes. The assumption is most of the time
222 the options created will already be in the hash table, so we avoid
223 allocating and freeing up a node repeatably. */
224 static GTY (()) tree cl_optimization_node
;
225 static GTY (()) tree cl_target_option_node
;
227 struct cl_option_hasher
: ggc_cache_ptr_hash
<tree_node
>
229 static hashval_t
hash (tree t
);
230 static bool equal (tree x
, tree y
);
233 static GTY ((cache
)) hash_table
<cl_option_hasher
> *cl_option_hash_table
;
235 /* General tree->tree mapping structure for use in hash tables. */
239 hash_table
<tree_decl_map_cache_hasher
> *debug_expr_for_decl
;
242 hash_table
<tree_decl_map_cache_hasher
> *value_expr_for_decl
;
244 struct tree_vec_map_cache_hasher
: ggc_cache_ptr_hash
<tree_vec_map
>
246 static hashval_t
hash (tree_vec_map
*m
) { return DECL_UID (m
->base
.from
); }
249 equal (tree_vec_map
*a
, tree_vec_map
*b
)
251 return a
->base
.from
== b
->base
.from
;
255 keep_cache_entry (tree_vec_map
*&m
)
257 return ggc_marked_p (m
->base
.from
);
262 hash_table
<tree_vec_map_cache_hasher
> *debug_args_for_decl
;
264 static void set_type_quals (tree
, int);
265 static void print_type_hash_statistics (void);
266 static void print_debug_expr_statistics (void);
267 static void print_value_expr_statistics (void);
269 tree global_trees
[TI_MAX
];
270 tree integer_types
[itk_none
];
272 bool int_n_enabled_p
[NUM_INT_N_ENTS
];
273 struct int_n_trees_t int_n_trees
[NUM_INT_N_ENTS
];
275 bool tree_contains_struct
[MAX_TREE_CODES
][64];
277 /* Number of operands for each OpenMP clause. */
278 unsigned const char omp_clause_num_ops
[] =
280 0, /* OMP_CLAUSE_ERROR */
281 1, /* OMP_CLAUSE_PRIVATE */
282 1, /* OMP_CLAUSE_SHARED */
283 1, /* OMP_CLAUSE_FIRSTPRIVATE */
284 2, /* OMP_CLAUSE_LASTPRIVATE */
285 5, /* OMP_CLAUSE_REDUCTION */
286 5, /* OMP_CLAUSE_TASK_REDUCTION */
287 5, /* OMP_CLAUSE_IN_REDUCTION */
288 1, /* OMP_CLAUSE_COPYIN */
289 1, /* OMP_CLAUSE_COPYPRIVATE */
290 3, /* OMP_CLAUSE_LINEAR */
291 2, /* OMP_CLAUSE_ALIGNED */
292 2, /* OMP_CLAUSE_ALLOCATE */
293 1, /* OMP_CLAUSE_DEPEND */
294 1, /* OMP_CLAUSE_NONTEMPORAL */
295 1, /* OMP_CLAUSE_UNIFORM */
296 1, /* OMP_CLAUSE_TO_DECLARE */
297 1, /* OMP_CLAUSE_LINK */
298 1, /* OMP_CLAUSE_DETACH */
299 1, /* OMP_CLAUSE_USE_DEVICE_PTR */
300 1, /* OMP_CLAUSE_USE_DEVICE_ADDR */
301 1, /* OMP_CLAUSE_IS_DEVICE_PTR */
302 1, /* OMP_CLAUSE_INCLUSIVE */
303 1, /* OMP_CLAUSE_EXCLUSIVE */
304 2, /* OMP_CLAUSE_FROM */
305 2, /* OMP_CLAUSE_TO */
306 2, /* OMP_CLAUSE_MAP */
307 2, /* OMP_CLAUSE__CACHE_ */
308 2, /* OMP_CLAUSE_GANG */
309 1, /* OMP_CLAUSE_ASYNC */
310 1, /* OMP_CLAUSE_WAIT */
311 0, /* OMP_CLAUSE_AUTO */
312 0, /* OMP_CLAUSE_SEQ */
313 1, /* OMP_CLAUSE__LOOPTEMP_ */
314 1, /* OMP_CLAUSE__REDUCTEMP_ */
315 1, /* OMP_CLAUSE__CONDTEMP_ */
316 1, /* OMP_CLAUSE__SCANTEMP_ */
317 1, /* OMP_CLAUSE_IF */
318 1, /* OMP_CLAUSE_NUM_THREADS */
319 1, /* OMP_CLAUSE_SCHEDULE */
320 0, /* OMP_CLAUSE_NOWAIT */
321 1, /* OMP_CLAUSE_ORDERED */
322 0, /* OMP_CLAUSE_DEFAULT */
323 3, /* OMP_CLAUSE_COLLAPSE */
324 0, /* OMP_CLAUSE_UNTIED */
325 1, /* OMP_CLAUSE_FINAL */
326 0, /* OMP_CLAUSE_MERGEABLE */
327 1, /* OMP_CLAUSE_DEVICE */
328 1, /* OMP_CLAUSE_DIST_SCHEDULE */
329 0, /* OMP_CLAUSE_INBRANCH */
330 0, /* OMP_CLAUSE_NOTINBRANCH */
331 1, /* OMP_CLAUSE_NUM_TEAMS */
332 1, /* OMP_CLAUSE_THREAD_LIMIT */
333 0, /* OMP_CLAUSE_PROC_BIND */
334 1, /* OMP_CLAUSE_SAFELEN */
335 1, /* OMP_CLAUSE_SIMDLEN */
336 0, /* OMP_CLAUSE_DEVICE_TYPE */
337 0, /* OMP_CLAUSE_FOR */
338 0, /* OMP_CLAUSE_PARALLEL */
339 0, /* OMP_CLAUSE_SECTIONS */
340 0, /* OMP_CLAUSE_TASKGROUP */
341 1, /* OMP_CLAUSE_PRIORITY */
342 1, /* OMP_CLAUSE_GRAINSIZE */
343 1, /* OMP_CLAUSE_NUM_TASKS */
344 0, /* OMP_CLAUSE_NOGROUP */
345 0, /* OMP_CLAUSE_THREADS */
346 0, /* OMP_CLAUSE_SIMD */
347 1, /* OMP_CLAUSE_HINT */
348 0, /* OMP_CLAUSE_DEFAULTMAP */
349 0, /* OMP_CLAUSE_ORDER */
350 0, /* OMP_CLAUSE_BIND */
351 1, /* OMP_CLAUSE__SIMDUID_ */
352 0, /* OMP_CLAUSE__SIMT_ */
353 0, /* OMP_CLAUSE_INDEPENDENT */
354 1, /* OMP_CLAUSE_WORKER */
355 1, /* OMP_CLAUSE_VECTOR */
356 1, /* OMP_CLAUSE_NUM_GANGS */
357 1, /* OMP_CLAUSE_NUM_WORKERS */
358 1, /* OMP_CLAUSE_VECTOR_LENGTH */
359 3, /* OMP_CLAUSE_TILE */
360 0, /* OMP_CLAUSE_IF_PRESENT */
361 0, /* OMP_CLAUSE_FINALIZE */
364 const char * const omp_clause_code_name
[] =
451 /* Return the tree node structure used by tree code CODE. */
453 static inline enum tree_node_structure_enum
454 tree_node_structure_for_code (enum tree_code code
)
456 switch (TREE_CODE_CLASS (code
))
458 case tcc_declaration
:
461 case CONST_DECL
: return TS_CONST_DECL
;
462 case DEBUG_EXPR_DECL
: return TS_DECL_WRTL
;
463 case FIELD_DECL
: return TS_FIELD_DECL
;
464 case FUNCTION_DECL
: return TS_FUNCTION_DECL
;
465 case LABEL_DECL
: return TS_LABEL_DECL
;
466 case PARM_DECL
: return TS_PARM_DECL
;
467 case RESULT_DECL
: return TS_RESULT_DECL
;
468 case TRANSLATION_UNIT_DECL
: return TS_TRANSLATION_UNIT_DECL
;
469 case TYPE_DECL
: return TS_TYPE_DECL
;
470 case VAR_DECL
: return TS_VAR_DECL
;
471 default: return TS_DECL_NON_COMMON
;
474 case tcc_type
: return TS_TYPE_NON_COMMON
;
482 case tcc_vl_exp
: return TS_EXP
;
484 default: /* tcc_constant and tcc_exceptional */
490 /* tcc_constant cases. */
491 case COMPLEX_CST
: return TS_COMPLEX
;
492 case FIXED_CST
: return TS_FIXED_CST
;
493 case INTEGER_CST
: return TS_INT_CST
;
494 case POLY_INT_CST
: return TS_POLY_INT_CST
;
495 case REAL_CST
: return TS_REAL_CST
;
496 case STRING_CST
: return TS_STRING
;
497 case VECTOR_CST
: return TS_VECTOR
;
498 case VOID_CST
: return TS_TYPED
;
500 /* tcc_exceptional cases. */
501 case BLOCK
: return TS_BLOCK
;
502 case CONSTRUCTOR
: return TS_CONSTRUCTOR
;
503 case ERROR_MARK
: return TS_COMMON
;
504 case IDENTIFIER_NODE
: return TS_IDENTIFIER
;
505 case OMP_CLAUSE
: return TS_OMP_CLAUSE
;
506 case OPTIMIZATION_NODE
: return TS_OPTIMIZATION
;
507 case PLACEHOLDER_EXPR
: return TS_COMMON
;
508 case SSA_NAME
: return TS_SSA_NAME
;
509 case STATEMENT_LIST
: return TS_STATEMENT_LIST
;
510 case TARGET_OPTION_NODE
: return TS_TARGET_OPTION
;
511 case TREE_BINFO
: return TS_BINFO
;
512 case TREE_LIST
: return TS_LIST
;
513 case TREE_VEC
: return TS_VEC
;
521 /* Initialize tree_contains_struct to describe the hierarchy of tree
525 initialize_tree_contains_struct (void)
529 for (i
= ERROR_MARK
; i
< LAST_AND_UNUSED_TREE_CODE
; i
++)
532 enum tree_node_structure_enum ts_code
;
534 code
= (enum tree_code
) i
;
535 ts_code
= tree_node_structure_for_code (code
);
537 /* Mark the TS structure itself. */
538 tree_contains_struct
[code
][ts_code
] = 1;
540 /* Mark all the structures that TS is derived from. */
545 case TS_OPTIMIZATION
:
546 case TS_TARGET_OPTION
:
552 case TS_POLY_INT_CST
:
561 case TS_STATEMENT_LIST
:
562 MARK_TS_TYPED (code
);
566 case TS_DECL_MINIMAL
:
572 MARK_TS_COMMON (code
);
575 case TS_TYPE_WITH_LANG_SPECIFIC
:
576 MARK_TS_TYPE_COMMON (code
);
579 case TS_TYPE_NON_COMMON
:
580 MARK_TS_TYPE_WITH_LANG_SPECIFIC (code
);
584 MARK_TS_DECL_MINIMAL (code
);
589 MARK_TS_DECL_COMMON (code
);
592 case TS_DECL_NON_COMMON
:
593 MARK_TS_DECL_WITH_VIS (code
);
596 case TS_DECL_WITH_VIS
:
600 MARK_TS_DECL_WRTL (code
);
604 MARK_TS_DECL_COMMON (code
);
608 MARK_TS_DECL_WITH_VIS (code
);
612 case TS_FUNCTION_DECL
:
613 MARK_TS_DECL_NON_COMMON (code
);
616 case TS_TRANSLATION_UNIT_DECL
:
617 MARK_TS_DECL_COMMON (code
);
625 /* Basic consistency checks for attributes used in fold. */
626 gcc_assert (tree_contains_struct
[FUNCTION_DECL
][TS_DECL_NON_COMMON
]);
627 gcc_assert (tree_contains_struct
[TYPE_DECL
][TS_DECL_NON_COMMON
]);
628 gcc_assert (tree_contains_struct
[CONST_DECL
][TS_DECL_COMMON
]);
629 gcc_assert (tree_contains_struct
[VAR_DECL
][TS_DECL_COMMON
]);
630 gcc_assert (tree_contains_struct
[PARM_DECL
][TS_DECL_COMMON
]);
631 gcc_assert (tree_contains_struct
[RESULT_DECL
][TS_DECL_COMMON
]);
632 gcc_assert (tree_contains_struct
[FUNCTION_DECL
][TS_DECL_COMMON
]);
633 gcc_assert (tree_contains_struct
[TYPE_DECL
][TS_DECL_COMMON
]);
634 gcc_assert (tree_contains_struct
[TRANSLATION_UNIT_DECL
][TS_DECL_COMMON
]);
635 gcc_assert (tree_contains_struct
[LABEL_DECL
][TS_DECL_COMMON
]);
636 gcc_assert (tree_contains_struct
[FIELD_DECL
][TS_DECL_COMMON
]);
637 gcc_assert (tree_contains_struct
[VAR_DECL
][TS_DECL_WRTL
]);
638 gcc_assert (tree_contains_struct
[PARM_DECL
][TS_DECL_WRTL
]);
639 gcc_assert (tree_contains_struct
[RESULT_DECL
][TS_DECL_WRTL
]);
640 gcc_assert (tree_contains_struct
[FUNCTION_DECL
][TS_DECL_WRTL
]);
641 gcc_assert (tree_contains_struct
[LABEL_DECL
][TS_DECL_WRTL
]);
642 gcc_assert (tree_contains_struct
[CONST_DECL
][TS_DECL_MINIMAL
]);
643 gcc_assert (tree_contains_struct
[VAR_DECL
][TS_DECL_MINIMAL
]);
644 gcc_assert (tree_contains_struct
[PARM_DECL
][TS_DECL_MINIMAL
]);
645 gcc_assert (tree_contains_struct
[RESULT_DECL
][TS_DECL_MINIMAL
]);
646 gcc_assert (tree_contains_struct
[FUNCTION_DECL
][TS_DECL_MINIMAL
]);
647 gcc_assert (tree_contains_struct
[TYPE_DECL
][TS_DECL_MINIMAL
]);
648 gcc_assert (tree_contains_struct
[TRANSLATION_UNIT_DECL
][TS_DECL_MINIMAL
]);
649 gcc_assert (tree_contains_struct
[LABEL_DECL
][TS_DECL_MINIMAL
]);
650 gcc_assert (tree_contains_struct
[FIELD_DECL
][TS_DECL_MINIMAL
]);
651 gcc_assert (tree_contains_struct
[VAR_DECL
][TS_DECL_WITH_VIS
]);
652 gcc_assert (tree_contains_struct
[FUNCTION_DECL
][TS_DECL_WITH_VIS
]);
653 gcc_assert (tree_contains_struct
[TYPE_DECL
][TS_DECL_WITH_VIS
]);
654 gcc_assert (tree_contains_struct
[VAR_DECL
][TS_VAR_DECL
]);
655 gcc_assert (tree_contains_struct
[FIELD_DECL
][TS_FIELD_DECL
]);
656 gcc_assert (tree_contains_struct
[PARM_DECL
][TS_PARM_DECL
]);
657 gcc_assert (tree_contains_struct
[LABEL_DECL
][TS_LABEL_DECL
]);
658 gcc_assert (tree_contains_struct
[RESULT_DECL
][TS_RESULT_DECL
]);
659 gcc_assert (tree_contains_struct
[CONST_DECL
][TS_CONST_DECL
]);
660 gcc_assert (tree_contains_struct
[TYPE_DECL
][TS_TYPE_DECL
]);
661 gcc_assert (tree_contains_struct
[FUNCTION_DECL
][TS_FUNCTION_DECL
]);
662 gcc_assert (tree_contains_struct
[IMPORTED_DECL
][TS_DECL_MINIMAL
]);
663 gcc_assert (tree_contains_struct
[IMPORTED_DECL
][TS_DECL_COMMON
]);
664 gcc_assert (tree_contains_struct
[NAMELIST_DECL
][TS_DECL_MINIMAL
]);
665 gcc_assert (tree_contains_struct
[NAMELIST_DECL
][TS_DECL_COMMON
]);
674 /* Initialize the hash table of types. */
676 = hash_table
<type_cache_hasher
>::create_ggc (TYPE_HASH_INITIAL_SIZE
);
679 = hash_table
<tree_decl_map_cache_hasher
>::create_ggc (512);
682 = hash_table
<tree_decl_map_cache_hasher
>::create_ggc (512);
684 int_cst_hash_table
= hash_table
<int_cst_hasher
>::create_ggc (1024);
686 poly_int_cst_hash_table
= hash_table
<poly_int_cst_hasher
>::create_ggc (64);
688 int_cst_node
= make_int_cst (1, 1);
690 cl_option_hash_table
= hash_table
<cl_option_hasher
>::create_ggc (64);
692 cl_optimization_node
= make_node (OPTIMIZATION_NODE
);
693 cl_target_option_node
= make_node (TARGET_OPTION_NODE
);
695 /* Initialize the tree_contains_struct array. */
696 initialize_tree_contains_struct ();
697 lang_hooks
.init_ts ();
701 /* The name of the object as the assembler will see it (but before any
702 translations made by ASM_OUTPUT_LABELREF). Often this is the same
703 as DECL_NAME. It is an IDENTIFIER_NODE. */
705 decl_assembler_name (tree decl
)
707 if (!DECL_ASSEMBLER_NAME_SET_P (decl
))
708 lang_hooks
.set_decl_assembler_name (decl
);
709 return DECL_ASSEMBLER_NAME_RAW (decl
);
712 /* The DECL_ASSEMBLER_NAME_RAW of DECL is being explicitly set to NAME
713 (either of which may be NULL). Inform the FE, if this changes the
717 overwrite_decl_assembler_name (tree decl
, tree name
)
719 if (DECL_ASSEMBLER_NAME_RAW (decl
) != name
)
720 lang_hooks
.overwrite_decl_assembler_name (decl
, name
);
723 /* Return true if DECL may need an assembler name to be set. */
726 need_assembler_name_p (tree decl
)
728 /* We use DECL_ASSEMBLER_NAME to hold mangled type names for One Definition
729 Rule merging. This makes type_odr_p to return true on those types during
730 LTO and by comparing the mangled name, we can say what types are intended
731 to be equivalent across compilation unit.
733 We do not store names of type_in_anonymous_namespace_p.
735 Record, union and enumeration type have linkage that allows use
736 to check type_in_anonymous_namespace_p. We do not mangle compound types
737 that always can be compared structurally.
739 Similarly for builtin types, we compare properties of their main variant.
740 A special case are integer types where mangling do make differences
741 between char/signed char/unsigned char etc. Storing name for these makes
742 e.g. -fno-signed-char/-fsigned-char mismatches to be handled well.
743 See cp/mangle.c:write_builtin_type for details. */
745 if (TREE_CODE (decl
) == TYPE_DECL
)
748 && decl
== TYPE_NAME (TREE_TYPE (decl
))
749 && TYPE_MAIN_VARIANT (TREE_TYPE (decl
)) == TREE_TYPE (decl
)
750 && !TYPE_ARTIFICIAL (TREE_TYPE (decl
))
751 && ((TREE_CODE (TREE_TYPE (decl
)) != RECORD_TYPE
752 && TREE_CODE (TREE_TYPE (decl
)) != UNION_TYPE
)
753 || TYPE_CXX_ODR_P (TREE_TYPE (decl
)))
754 && (type_with_linkage_p (TREE_TYPE (decl
))
755 || TREE_CODE (TREE_TYPE (decl
)) == INTEGER_TYPE
)
756 && !variably_modified_type_p (TREE_TYPE (decl
), NULL_TREE
))
757 return !DECL_ASSEMBLER_NAME_SET_P (decl
);
760 /* Only FUNCTION_DECLs and VAR_DECLs are considered. */
761 if (!VAR_OR_FUNCTION_DECL_P (decl
))
764 /* If DECL already has its assembler name set, it does not need a
766 if (!HAS_DECL_ASSEMBLER_NAME_P (decl
)
767 || DECL_ASSEMBLER_NAME_SET_P (decl
))
770 /* Abstract decls do not need an assembler name. */
771 if (DECL_ABSTRACT_P (decl
))
774 /* For VAR_DECLs, only static, public and external symbols need an
777 && !TREE_STATIC (decl
)
778 && !TREE_PUBLIC (decl
)
779 && !DECL_EXTERNAL (decl
))
782 if (TREE_CODE (decl
) == FUNCTION_DECL
)
784 /* Do not set assembler name on builtins. Allow RTL expansion to
785 decide whether to expand inline or via a regular call. */
786 if (fndecl_built_in_p (decl
)
787 && DECL_BUILT_IN_CLASS (decl
) != BUILT_IN_FRONTEND
)
790 /* Functions represented in the callgraph need an assembler name. */
791 if (cgraph_node::get (decl
) != NULL
)
794 /* Unused and not public functions don't need an assembler name. */
795 if (!TREE_USED (decl
) && !TREE_PUBLIC (decl
))
802 /* If T needs an assembler name, have one created for it. */
805 assign_assembler_name_if_needed (tree t
)
807 if (need_assembler_name_p (t
))
809 /* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit
810 diagnostics that use input_location to show locus
811 information. The problem here is that, at this point,
812 input_location is generally anchored to the end of the file
813 (since the parser is long gone), so we don't have a good
814 position to pin it to.
816 To alleviate this problem, this uses the location of T's
817 declaration. Examples of this are
818 testsuite/g++.dg/template/cond2.C and
819 testsuite/g++.dg/template/pr35240.C. */
820 location_t saved_location
= input_location
;
821 input_location
= DECL_SOURCE_LOCATION (t
);
823 decl_assembler_name (t
);
825 input_location
= saved_location
;
829 /* When the target supports COMDAT groups, this indicates which group the
830 DECL is associated with. This can be either an IDENTIFIER_NODE or a
831 decl, in which case its DECL_ASSEMBLER_NAME identifies the group. */
833 decl_comdat_group (const_tree node
)
835 struct symtab_node
*snode
= symtab_node::get (node
);
838 return snode
->get_comdat_group ();
841 /* Likewise, but make sure it's been reduced to an IDENTIFIER_NODE. */
843 decl_comdat_group_id (const_tree node
)
845 struct symtab_node
*snode
= symtab_node::get (node
);
848 return snode
->get_comdat_group_id ();
851 /* When the target supports named section, return its name as IDENTIFIER_NODE
852 or NULL if it is in no section. */
854 decl_section_name (const_tree node
)
856 struct symtab_node
*snode
= symtab_node::get (node
);
859 return snode
->get_section ();
862 /* Set section name of NODE to VALUE (that is expected to be
865 set_decl_section_name (tree node
, const char *value
)
867 struct symtab_node
*snode
;
871 snode
= symtab_node::get (node
);
875 else if (VAR_P (node
))
876 snode
= varpool_node::get_create (node
);
878 snode
= cgraph_node::get_create (node
);
879 snode
->set_section (value
);
882 /* Set section name of NODE to match the section name of OTHER.
884 set_decl_section_name (decl, other) is equivalent to
885 set_decl_section_name (decl, DECL_SECTION_NAME (other)), but possibly more
888 set_decl_section_name (tree decl
, const_tree other
)
890 struct symtab_node
*other_node
= symtab_node::get (other
);
893 struct symtab_node
*decl_node
;
895 decl_node
= varpool_node::get_create (decl
);
897 decl_node
= cgraph_node::get_create (decl
);
898 decl_node
->set_section (*other_node
);
902 struct symtab_node
*decl_node
= symtab_node::get (decl
);
905 decl_node
->set_section (NULL
);
909 /* Return TLS model of a variable NODE. */
911 decl_tls_model (const_tree node
)
913 struct varpool_node
*snode
= varpool_node::get (node
);
915 return TLS_MODEL_NONE
;
916 return snode
->tls_model
;
919 /* Set TLS model of variable NODE to MODEL. */
921 set_decl_tls_model (tree node
, enum tls_model model
)
923 struct varpool_node
*vnode
;
925 if (model
== TLS_MODEL_NONE
)
927 vnode
= varpool_node::get (node
);
932 vnode
= varpool_node::get_create (node
);
933 vnode
->tls_model
= model
;
936 /* Compute the number of bytes occupied by a tree with code CODE.
937 This function cannot be used for nodes that have variable sizes,
938 including TREE_VEC, INTEGER_CST, STRING_CST, and CALL_EXPR. */
940 tree_code_size (enum tree_code code
)
942 switch (TREE_CODE_CLASS (code
))
944 case tcc_declaration
: /* A decl node */
947 case FIELD_DECL
: return sizeof (tree_field_decl
);
948 case PARM_DECL
: return sizeof (tree_parm_decl
);
949 case VAR_DECL
: return sizeof (tree_var_decl
);
950 case LABEL_DECL
: return sizeof (tree_label_decl
);
951 case RESULT_DECL
: return sizeof (tree_result_decl
);
952 case CONST_DECL
: return sizeof (tree_const_decl
);
953 case TYPE_DECL
: return sizeof (tree_type_decl
);
954 case FUNCTION_DECL
: return sizeof (tree_function_decl
);
955 case DEBUG_EXPR_DECL
: return sizeof (tree_decl_with_rtl
);
956 case TRANSLATION_UNIT_DECL
: return sizeof (tree_translation_unit_decl
);
959 case NAMELIST_DECL
: return sizeof (tree_decl_non_common
);
961 gcc_checking_assert (code
>= NUM_TREE_CODES
);
962 return lang_hooks
.tree_size (code
);
965 case tcc_type
: /* a type node */
977 case FIXED_POINT_TYPE
:
983 case QUAL_UNION_TYPE
:
987 case LANG_TYPE
: return sizeof (tree_type_non_common
);
989 gcc_checking_assert (code
>= NUM_TREE_CODES
);
990 return lang_hooks
.tree_size (code
);
993 case tcc_reference
: /* a reference */
994 case tcc_expression
: /* an expression */
995 case tcc_statement
: /* an expression with side effects */
996 case tcc_comparison
: /* a comparison expression */
997 case tcc_unary
: /* a unary arithmetic expression */
998 case tcc_binary
: /* a binary arithmetic expression */
999 return (sizeof (struct tree_exp
)
1000 + (TREE_CODE_LENGTH (code
) - 1) * sizeof (tree
));
1002 case tcc_constant
: /* a constant */
1005 case VOID_CST
: return sizeof (tree_typed
);
1006 case INTEGER_CST
: gcc_unreachable ();
1007 case POLY_INT_CST
: return sizeof (tree_poly_int_cst
);
1008 case REAL_CST
: return sizeof (tree_real_cst
);
1009 case FIXED_CST
: return sizeof (tree_fixed_cst
);
1010 case COMPLEX_CST
: return sizeof (tree_complex
);
1011 case VECTOR_CST
: gcc_unreachable ();
1012 case STRING_CST
: gcc_unreachable ();
1014 gcc_checking_assert (code
>= NUM_TREE_CODES
);
1015 return lang_hooks
.tree_size (code
);
1018 case tcc_exceptional
: /* something random, like an identifier. */
1021 case IDENTIFIER_NODE
: return lang_hooks
.identifier_size
;
1022 case TREE_LIST
: return sizeof (tree_list
);
1025 case PLACEHOLDER_EXPR
: return sizeof (tree_common
);
1027 case TREE_VEC
: gcc_unreachable ();
1028 case OMP_CLAUSE
: gcc_unreachable ();
1030 case SSA_NAME
: return sizeof (tree_ssa_name
);
1032 case STATEMENT_LIST
: return sizeof (tree_statement_list
);
1033 case BLOCK
: return sizeof (struct tree_block
);
1034 case CONSTRUCTOR
: return sizeof (tree_constructor
);
1035 case OPTIMIZATION_NODE
: return sizeof (tree_optimization_option
);
1036 case TARGET_OPTION_NODE
: return sizeof (tree_target_option
);
1039 gcc_checking_assert (code
>= NUM_TREE_CODES
);
1040 return lang_hooks
.tree_size (code
);
1048 /* Compute the number of bytes occupied by NODE. This routine only
1049 looks at TREE_CODE, except for those nodes that have variable sizes. */
1051 tree_size (const_tree node
)
1053 const enum tree_code code
= TREE_CODE (node
);
1057 return (sizeof (struct tree_int_cst
)
1058 + (TREE_INT_CST_EXT_NUNITS (node
) - 1) * sizeof (HOST_WIDE_INT
));
1061 return (offsetof (struct tree_binfo
, base_binfos
)
1063 ::embedded_size (BINFO_N_BASE_BINFOS (node
)));
1066 return (sizeof (struct tree_vec
)
1067 + (TREE_VEC_LENGTH (node
) - 1) * sizeof (tree
));
1070 return (sizeof (struct tree_vector
)
1071 + (vector_cst_encoded_nelts (node
) - 1) * sizeof (tree
));
1074 return TREE_STRING_LENGTH (node
) + offsetof (struct tree_string
, str
) + 1;
1077 return (sizeof (struct tree_omp_clause
)
1078 + (omp_clause_num_ops
[OMP_CLAUSE_CODE (node
)] - 1)
1082 if (TREE_CODE_CLASS (code
) == tcc_vl_exp
)
1083 return (sizeof (struct tree_exp
)
1084 + (VL_EXP_OPERAND_LENGTH (node
) - 1) * sizeof (tree
));
1086 return tree_code_size (code
);
1090 /* Return tree node kind based on tree CODE. */
1092 static tree_node_kind
1093 get_stats_node_kind (enum tree_code code
)
1095 enum tree_code_class type
= TREE_CODE_CLASS (code
);
1099 case tcc_declaration
: /* A decl node */
1101 case tcc_type
: /* a type node */
1103 case tcc_statement
: /* an expression with side effects */
1105 case tcc_reference
: /* a reference */
1107 case tcc_expression
: /* an expression */
1108 case tcc_comparison
: /* a comparison expression */
1109 case tcc_unary
: /* a unary arithmetic expression */
1110 case tcc_binary
: /* a binary arithmetic expression */
1112 case tcc_constant
: /* a constant */
1114 case tcc_exceptional
: /* something random, like an identifier. */
1117 case IDENTIFIER_NODE
:
1124 return ssa_name_kind
;
1130 return omp_clause_kind
;
1142 /* Record interesting allocation statistics for a tree node with CODE
1146 record_node_allocation_statistics (enum tree_code code
, size_t length
)
1148 if (!GATHER_STATISTICS
)
1151 tree_node_kind kind
= get_stats_node_kind (code
);
1153 tree_code_counts
[(int) code
]++;
1154 tree_node_counts
[(int) kind
]++;
1155 tree_node_sizes
[(int) kind
] += length
;
1158 /* Allocate and return a new UID from the DECL_UID namespace. */
1161 allocate_decl_uid (void)
1163 return next_decl_uid
++;
1166 /* Return a newly allocated node of code CODE. For decl and type
1167 nodes, some other fields are initialized. The rest of the node is
1168 initialized to zero. This function cannot be used for TREE_VEC,
1169 INTEGER_CST or OMP_CLAUSE nodes, which is enforced by asserts in
1172 Achoo! I got a code in the node. */
1175 make_node (enum tree_code code MEM_STAT_DECL
)
1178 enum tree_code_class type
= TREE_CODE_CLASS (code
);
1179 size_t length
= tree_code_size (code
);
1181 record_node_allocation_statistics (code
, length
);
1183 t
= ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT
);
1184 TREE_SET_CODE (t
, code
);
1189 if (code
!= DEBUG_BEGIN_STMT
)
1190 TREE_SIDE_EFFECTS (t
) = 1;
1193 case tcc_declaration
:
1194 if (CODE_CONTAINS_STRUCT (code
, TS_DECL_COMMON
))
1196 if (code
== FUNCTION_DECL
)
1198 SET_DECL_ALIGN (t
, FUNCTION_ALIGNMENT (FUNCTION_BOUNDARY
));
1199 SET_DECL_MODE (t
, FUNCTION_MODE
);
1202 SET_DECL_ALIGN (t
, 1);
1204 DECL_SOURCE_LOCATION (t
) = input_location
;
1205 if (TREE_CODE (t
) == DEBUG_EXPR_DECL
)
1206 DECL_UID (t
) = --next_debug_decl_uid
;
1209 DECL_UID (t
) = allocate_decl_uid ();
1210 SET_DECL_PT_UID (t
, -1);
1212 if (TREE_CODE (t
) == LABEL_DECL
)
1213 LABEL_DECL_UID (t
) = -1;
1218 TYPE_UID (t
) = next_type_uid
++;
1219 SET_TYPE_ALIGN (t
, BITS_PER_UNIT
);
1220 TYPE_USER_ALIGN (t
) = 0;
1221 TYPE_MAIN_VARIANT (t
) = t
;
1222 TYPE_CANONICAL (t
) = t
;
1224 /* Default to no attributes for type, but let target change that. */
1225 TYPE_ATTRIBUTES (t
) = NULL_TREE
;
1226 targetm
.set_default_type_attributes (t
);
1228 /* We have not yet computed the alias set for this type. */
1229 TYPE_ALIAS_SET (t
) = -1;
1233 TREE_CONSTANT (t
) = 1;
1236 case tcc_expression
:
1242 case PREDECREMENT_EXPR
:
1243 case PREINCREMENT_EXPR
:
1244 case POSTDECREMENT_EXPR
:
1245 case POSTINCREMENT_EXPR
:
1246 /* All of these have side-effects, no matter what their
1248 TREE_SIDE_EFFECTS (t
) = 1;
1256 case tcc_exceptional
:
1259 case TARGET_OPTION_NODE
:
1260 TREE_TARGET_OPTION(t
)
1261 = ggc_cleared_alloc
<struct cl_target_option
> ();
1264 case OPTIMIZATION_NODE
:
1265 TREE_OPTIMIZATION (t
)
1266 = ggc_cleared_alloc
<struct cl_optimization
> ();
1275 /* Other classes need no special treatment. */
1282 /* Free tree node. */
1285 free_node (tree node
)
1287 enum tree_code code
= TREE_CODE (node
);
1288 if (GATHER_STATISTICS
)
1290 enum tree_node_kind kind
= get_stats_node_kind (code
);
1292 gcc_checking_assert (tree_code_counts
[(int) TREE_CODE (node
)] != 0);
1293 gcc_checking_assert (tree_node_counts
[(int) kind
] != 0);
1294 gcc_checking_assert (tree_node_sizes
[(int) kind
] >= tree_size (node
));
1296 tree_code_counts
[(int) TREE_CODE (node
)]--;
1297 tree_node_counts
[(int) kind
]--;
1298 tree_node_sizes
[(int) kind
] -= tree_size (node
);
1300 if (CODE_CONTAINS_STRUCT (code
, TS_CONSTRUCTOR
))
1301 vec_free (CONSTRUCTOR_ELTS (node
));
1302 else if (code
== BLOCK
)
1303 vec_free (BLOCK_NONLOCALIZED_VARS (node
));
1304 else if (code
== TREE_BINFO
)
1305 vec_free (BINFO_BASE_ACCESSES (node
));
1306 else if (code
== OPTIMIZATION_NODE
)
1307 cl_optimization_option_free (TREE_OPTIMIZATION (node
));
1308 else if (code
== TARGET_OPTION_NODE
)
1309 cl_target_option_free (TREE_TARGET_OPTION (node
));
1313 /* Return a new node with the same contents as NODE except that its
1314 TREE_CHAIN, if it has one, is zero and it has a fresh uid. */
1317 copy_node (tree node MEM_STAT_DECL
)
1320 enum tree_code code
= TREE_CODE (node
);
1323 gcc_assert (code
!= STATEMENT_LIST
);
1325 length
= tree_size (node
);
1326 record_node_allocation_statistics (code
, length
);
1327 t
= ggc_alloc_tree_node_stat (length PASS_MEM_STAT
);
1328 memcpy (t
, node
, length
);
1330 if (CODE_CONTAINS_STRUCT (code
, TS_COMMON
))
1332 TREE_ASM_WRITTEN (t
) = 0;
1333 TREE_VISITED (t
) = 0;
1335 if (TREE_CODE_CLASS (code
) == tcc_declaration
)
1337 if (code
== DEBUG_EXPR_DECL
)
1338 DECL_UID (t
) = --next_debug_decl_uid
;
1341 DECL_UID (t
) = allocate_decl_uid ();
1342 if (DECL_PT_UID_SET_P (node
))
1343 SET_DECL_PT_UID (t
, DECL_PT_UID (node
));
1345 if ((TREE_CODE (node
) == PARM_DECL
|| VAR_P (node
))
1346 && DECL_HAS_VALUE_EXPR_P (node
))
1348 SET_DECL_VALUE_EXPR (t
, DECL_VALUE_EXPR (node
));
1349 DECL_HAS_VALUE_EXPR_P (t
) = 1;
1351 /* DECL_DEBUG_EXPR is copied explicitly by callers. */
1354 DECL_HAS_DEBUG_EXPR_P (t
) = 0;
1355 t
->decl_with_vis
.symtab_node
= NULL
;
1357 if (VAR_P (node
) && DECL_HAS_INIT_PRIORITY_P (node
))
1359 SET_DECL_INIT_PRIORITY (t
, DECL_INIT_PRIORITY (node
));
1360 DECL_HAS_INIT_PRIORITY_P (t
) = 1;
1362 if (TREE_CODE (node
) == FUNCTION_DECL
)
1364 DECL_STRUCT_FUNCTION (t
) = NULL
;
1365 t
->decl_with_vis
.symtab_node
= NULL
;
1368 else if (TREE_CODE_CLASS (code
) == tcc_type
)
1370 TYPE_UID (t
) = next_type_uid
++;
1371 /* The following is so that the debug code for
1372 the copy is different from the original type.
1373 The two statements usually duplicate each other
1374 (because they clear fields of the same union),
1375 but the optimizer should catch that. */
1376 TYPE_SYMTAB_ADDRESS (t
) = 0;
1377 TYPE_SYMTAB_DIE (t
) = 0;
1379 /* Do not copy the values cache. */
1380 if (TYPE_CACHED_VALUES_P (t
))
1382 TYPE_CACHED_VALUES_P (t
) = 0;
1383 TYPE_CACHED_VALUES (t
) = NULL_TREE
;
1386 else if (code
== TARGET_OPTION_NODE
)
1388 TREE_TARGET_OPTION (t
) = ggc_alloc
<struct cl_target_option
>();
1389 memcpy (TREE_TARGET_OPTION (t
), TREE_TARGET_OPTION (node
),
1390 sizeof (struct cl_target_option
));
1392 else if (code
== OPTIMIZATION_NODE
)
1394 TREE_OPTIMIZATION (t
) = ggc_alloc
<struct cl_optimization
>();
1395 memcpy (TREE_OPTIMIZATION (t
), TREE_OPTIMIZATION (node
),
1396 sizeof (struct cl_optimization
));
1402 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
1403 For example, this can copy a list made of TREE_LIST nodes. */
1406 copy_list (tree list
)
1414 head
= prev
= copy_node (list
);
1415 next
= TREE_CHAIN (list
);
1418 TREE_CHAIN (prev
) = copy_node (next
);
1419 prev
= TREE_CHAIN (prev
);
1420 next
= TREE_CHAIN (next
);
1426 /* Return the value that TREE_INT_CST_EXT_NUNITS should have for an
1427 INTEGER_CST with value CST and type TYPE. */
1430 get_int_cst_ext_nunits (tree type
, const wide_int
&cst
)
1432 gcc_checking_assert (cst
.get_precision () == TYPE_PRECISION (type
));
1433 /* We need extra HWIs if CST is an unsigned integer with its
1435 if (TYPE_UNSIGNED (type
) && wi::neg_p (cst
))
1436 return cst
.get_precision () / HOST_BITS_PER_WIDE_INT
+ 1;
1437 return cst
.get_len ();
1440 /* Return a new INTEGER_CST with value CST and type TYPE. */
1443 build_new_int_cst (tree type
, const wide_int
&cst
)
1445 unsigned int len
= cst
.get_len ();
1446 unsigned int ext_len
= get_int_cst_ext_nunits (type
, cst
);
1447 tree nt
= make_int_cst (len
, ext_len
);
1452 TREE_INT_CST_ELT (nt
, ext_len
)
1453 = zext_hwi (-1, cst
.get_precision () % HOST_BITS_PER_WIDE_INT
);
1454 for (unsigned int i
= len
; i
< ext_len
; ++i
)
1455 TREE_INT_CST_ELT (nt
, i
) = -1;
1457 else if (TYPE_UNSIGNED (type
)
1458 && cst
.get_precision () < len
* HOST_BITS_PER_WIDE_INT
)
1461 TREE_INT_CST_ELT (nt
, len
)
1462 = zext_hwi (cst
.elt (len
),
1463 cst
.get_precision () % HOST_BITS_PER_WIDE_INT
);
1466 for (unsigned int i
= 0; i
< len
; i
++)
1467 TREE_INT_CST_ELT (nt
, i
) = cst
.elt (i
);
1468 TREE_TYPE (nt
) = type
;
1472 /* Return a new POLY_INT_CST with coefficients COEFFS and type TYPE. */
1475 build_new_poly_int_cst (tree type
, tree (&coeffs
)[NUM_POLY_INT_COEFFS
]
1478 size_t length
= sizeof (struct tree_poly_int_cst
);
1479 record_node_allocation_statistics (POLY_INT_CST
, length
);
1481 tree t
= ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT
);
1483 TREE_SET_CODE (t
, POLY_INT_CST
);
1484 TREE_CONSTANT (t
) = 1;
1485 TREE_TYPE (t
) = type
;
1486 for (unsigned int i
= 0; i
< NUM_POLY_INT_COEFFS
; ++i
)
1487 POLY_INT_CST_COEFF (t
, i
) = coeffs
[i
];
1491 /* Create a constant tree that contains CST sign-extended to TYPE. */
1494 build_int_cst (tree type
, poly_int64 cst
)
1496 /* Support legacy code. */
1498 type
= integer_type_node
;
1500 return wide_int_to_tree (type
, wi::shwi (cst
, TYPE_PRECISION (type
)));
1503 /* Create a constant tree that contains CST zero-extended to TYPE. */
1506 build_int_cstu (tree type
, poly_uint64 cst
)
1508 return wide_int_to_tree (type
, wi::uhwi (cst
, TYPE_PRECISION (type
)));
1511 /* Create a constant tree that contains CST sign-extended to TYPE. */
1514 build_int_cst_type (tree type
, poly_int64 cst
)
1517 return wide_int_to_tree (type
, wi::shwi (cst
, TYPE_PRECISION (type
)));
1520 /* Constructs tree in type TYPE from with value given by CST. Signedness
1521 of CST is assumed to be the same as the signedness of TYPE. */
1524 double_int_to_tree (tree type
, double_int cst
)
1526 return wide_int_to_tree (type
, widest_int::from (cst
, TYPE_SIGN (type
)));
1529 /* We force the wide_int CST to the range of the type TYPE by sign or
1530 zero extending it. OVERFLOWABLE indicates if we are interested in
1531 overflow of the value, when >0 we are only interested in signed
1532 overflow, for <0 we are interested in any overflow. OVERFLOWED
1533 indicates whether overflow has already occurred. CONST_OVERFLOWED
1534 indicates whether constant overflow has already occurred. We force
1535 T's value to be within range of T's type (by setting to 0 or 1 all
1536 the bits outside the type's range). We set TREE_OVERFLOWED if,
1537 OVERFLOWED is nonzero,
1538 or OVERFLOWABLE is >0 and signed overflow occurs
1539 or OVERFLOWABLE is <0 and any overflow occurs
1540 We return a new tree node for the extended wide_int. The node
1541 is shared if no overflow flags are set. */
1545 force_fit_type (tree type
, const poly_wide_int_ref
&cst
,
1546 int overflowable
, bool overflowed
)
1548 signop sign
= TYPE_SIGN (type
);
1550 /* If we need to set overflow flags, return a new unshared node. */
1551 if (overflowed
|| !wi::fits_to_tree_p (cst
, type
))
1555 || (overflowable
> 0 && sign
== SIGNED
))
1557 poly_wide_int tmp
= poly_wide_int::from (cst
, TYPE_PRECISION (type
),
1560 if (tmp
.is_constant ())
1561 t
= build_new_int_cst (type
, tmp
.coeffs
[0]);
1564 tree coeffs
[NUM_POLY_INT_COEFFS
];
1565 for (unsigned int i
= 0; i
< NUM_POLY_INT_COEFFS
; ++i
)
1567 coeffs
[i
] = build_new_int_cst (type
, tmp
.coeffs
[i
]);
1568 TREE_OVERFLOW (coeffs
[i
]) = 1;
1570 t
= build_new_poly_int_cst (type
, coeffs
);
1572 TREE_OVERFLOW (t
) = 1;
1577 /* Else build a shared node. */
1578 return wide_int_to_tree (type
, cst
);
1581 /* These are the hash table functions for the hash table of INTEGER_CST
1582 nodes of a sizetype. */
1584 /* Return the hash code X, an INTEGER_CST. */
1587 int_cst_hasher::hash (tree x
)
1589 const_tree
const t
= x
;
1590 hashval_t code
= TYPE_UID (TREE_TYPE (t
));
1593 for (i
= 0; i
< TREE_INT_CST_NUNITS (t
); i
++)
1594 code
= iterative_hash_host_wide_int (TREE_INT_CST_ELT(t
, i
), code
);
1599 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1600 is the same as that given by *Y, which is the same. */
1603 int_cst_hasher::equal (tree x
, tree y
)
1605 const_tree
const xt
= x
;
1606 const_tree
const yt
= y
;
1608 if (TREE_TYPE (xt
) != TREE_TYPE (yt
)
1609 || TREE_INT_CST_NUNITS (xt
) != TREE_INT_CST_NUNITS (yt
)
1610 || TREE_INT_CST_EXT_NUNITS (xt
) != TREE_INT_CST_EXT_NUNITS (yt
))
1613 for (int i
= 0; i
< TREE_INT_CST_NUNITS (xt
); i
++)
1614 if (TREE_INT_CST_ELT (xt
, i
) != TREE_INT_CST_ELT (yt
, i
))
1620 /* Cache wide_int CST into the TYPE_CACHED_VALUES cache for TYPE.
1621 SLOT is the slot entry to store it in, and MAX_SLOTS is the maximum
1622 number of slots that can be cached for the type. */
1625 cache_wide_int_in_type_cache (tree type
, const wide_int
&cst
,
1626 int slot
, int max_slots
)
1628 gcc_checking_assert (slot
>= 0);
1629 /* Initialize cache. */
1630 if (!TYPE_CACHED_VALUES_P (type
))
1632 TYPE_CACHED_VALUES_P (type
) = 1;
1633 TYPE_CACHED_VALUES (type
) = make_tree_vec (max_slots
);
1635 tree t
= TREE_VEC_ELT (TYPE_CACHED_VALUES (type
), slot
);
1638 /* Create a new shared int. */
1639 t
= build_new_int_cst (type
, cst
);
1640 TREE_VEC_ELT (TYPE_CACHED_VALUES (type
), slot
) = t
;
1645 /* Create an INT_CST node of TYPE and value CST.
1646 The returned node is always shared. For small integers we use a
1647 per-type vector cache, for larger ones we use a single hash table.
1648 The value is extended from its precision according to the sign of
1649 the type to be a multiple of HOST_BITS_PER_WIDE_INT. This defines
1650 the upper bits and ensures that hashing and value equality based
1651 upon the underlying HOST_WIDE_INTs works without masking. */
1654 wide_int_to_tree_1 (tree type
, const wide_int_ref
&pcst
)
1661 unsigned int prec
= TYPE_PRECISION (type
);
1662 signop sgn
= TYPE_SIGN (type
);
1664 /* Verify that everything is canonical. */
1665 int l
= pcst
.get_len ();
1668 if (pcst
.elt (l
- 1) == 0)
1669 gcc_checking_assert (pcst
.elt (l
- 2) < 0);
1670 if (pcst
.elt (l
- 1) == HOST_WIDE_INT_M1
)
1671 gcc_checking_assert (pcst
.elt (l
- 2) >= 0);
1674 wide_int cst
= wide_int::from (pcst
, prec
, sgn
);
1675 unsigned int ext_len
= get_int_cst_ext_nunits (type
, cst
);
1677 enum tree_code code
= TREE_CODE (type
);
1678 if (code
== POINTER_TYPE
|| code
== REFERENCE_TYPE
)
1680 /* Cache NULL pointer and zero bounds. */
1683 /* Cache upper bounds of pointers. */
1684 else if (cst
== wi::max_value (prec
, sgn
))
1686 /* Cache 1 which is used for a non-zero range. */
1692 t
= cache_wide_int_in_type_cache (type
, cst
, ix
, 3);
1693 /* Make sure no one is clobbering the shared constant. */
1694 gcc_checking_assert (TREE_TYPE (t
) == type
1695 && cst
== wi::to_wide (t
));
1701 /* We just need to store a single HOST_WIDE_INT. */
1703 if (TYPE_UNSIGNED (type
))
1704 hwi
= cst
.to_uhwi ();
1706 hwi
= cst
.to_shwi ();
1711 gcc_assert (hwi
== 0);
1715 case REFERENCE_TYPE
:
1716 /* Ignore pointers, as they were already handled above. */
1720 /* Cache false or true. */
1722 if (IN_RANGE (hwi
, 0, 1))
1728 if (TYPE_SIGN (type
) == UNSIGNED
)
1731 limit
= param_integer_share_limit
;
1732 if (IN_RANGE (hwi
, 0, param_integer_share_limit
- 1))
1737 /* Cache [-1, N). */
1738 limit
= param_integer_share_limit
+ 1;
1739 if (IN_RANGE (hwi
, -1, param_integer_share_limit
- 1))
1753 t
= cache_wide_int_in_type_cache (type
, cst
, ix
, limit
);
1754 /* Make sure no one is clobbering the shared constant. */
1755 gcc_checking_assert (TREE_TYPE (t
) == type
1756 && TREE_INT_CST_NUNITS (t
) == 1
1757 && TREE_INT_CST_OFFSET_NUNITS (t
) == 1
1758 && TREE_INT_CST_EXT_NUNITS (t
) == 1
1759 && TREE_INT_CST_ELT (t
, 0) == hwi
);
1764 /* Use the cache of larger shared ints, using int_cst_node as
1767 TREE_INT_CST_ELT (int_cst_node
, 0) = hwi
;
1768 TREE_TYPE (int_cst_node
) = type
;
1770 tree
*slot
= int_cst_hash_table
->find_slot (int_cst_node
, INSERT
);
1774 /* Insert this one into the hash table. */
1777 /* Make a new node for next time round. */
1778 int_cst_node
= make_int_cst (1, 1);
1784 /* The value either hashes properly or we drop it on the floor
1785 for the gc to take care of. There will not be enough of them
1788 tree nt
= build_new_int_cst (type
, cst
);
1789 tree
*slot
= int_cst_hash_table
->find_slot (nt
, INSERT
);
1793 /* Insert this one into the hash table. */
1805 poly_int_cst_hasher::hash (tree t
)
1807 inchash::hash hstate
;
1809 hstate
.add_int (TYPE_UID (TREE_TYPE (t
)));
1810 for (unsigned int i
= 0; i
< NUM_POLY_INT_COEFFS
; ++i
)
1811 hstate
.add_wide_int (wi::to_wide (POLY_INT_CST_COEFF (t
, i
)));
1813 return hstate
.end ();
1817 poly_int_cst_hasher::equal (tree x
, const compare_type
&y
)
1819 if (TREE_TYPE (x
) != y
.first
)
1821 for (unsigned int i
= 0; i
< NUM_POLY_INT_COEFFS
; ++i
)
1822 if (wi::to_wide (POLY_INT_CST_COEFF (x
, i
)) != y
.second
->coeffs
[i
])
1827 /* Build a POLY_INT_CST node with type TYPE and with the elements in VALUES.
1828 The elements must also have type TYPE. */
1831 build_poly_int_cst (tree type
, const poly_wide_int_ref
&values
)
1833 unsigned int prec
= TYPE_PRECISION (type
);
1834 gcc_assert (prec
<= values
.coeffs
[0].get_precision ());
1835 poly_wide_int c
= poly_wide_int::from (values
, prec
, SIGNED
);
1838 h
.add_int (TYPE_UID (type
));
1839 for (unsigned int i
= 0; i
< NUM_POLY_INT_COEFFS
; ++i
)
1840 h
.add_wide_int (c
.coeffs
[i
]);
1841 poly_int_cst_hasher::compare_type
comp (type
, &c
);
1842 tree
*slot
= poly_int_cst_hash_table
->find_slot_with_hash (comp
, h
.end (),
1844 if (*slot
== NULL_TREE
)
1846 tree coeffs
[NUM_POLY_INT_COEFFS
];
1847 for (unsigned int i
= 0; i
< NUM_POLY_INT_COEFFS
; ++i
)
1848 coeffs
[i
] = wide_int_to_tree_1 (type
, c
.coeffs
[i
]);
1849 *slot
= build_new_poly_int_cst (type
, coeffs
);
1854 /* Create a constant tree with value VALUE in type TYPE. */
1857 wide_int_to_tree (tree type
, const poly_wide_int_ref
&value
)
1859 if (value
.is_constant ())
1860 return wide_int_to_tree_1 (type
, value
.coeffs
[0]);
1861 return build_poly_int_cst (type
, value
);
1864 /* Insert INTEGER_CST T into a cache of integer constants. And return
1865 the cached constant (which may or may not be T). If MIGHT_DUPLICATE
1866 is false, and T falls into the type's 'smaller values' range, there
1867 cannot be an existing entry. Otherwise, if MIGHT_DUPLICATE is true,
1868 or the value is large, should an existing entry exist, it is
1869 returned (rather than inserting T). */
1872 cache_integer_cst (tree t
, bool might_duplicate ATTRIBUTE_UNUSED
)
1874 tree type
= TREE_TYPE (t
);
1877 int prec
= TYPE_PRECISION (type
);
1879 gcc_assert (!TREE_OVERFLOW (t
));
1881 /* The caching indices here must match those in
1882 wide_int_to_type_1. */
1883 switch (TREE_CODE (type
))
1886 gcc_checking_assert (integer_zerop (t
));
1890 case REFERENCE_TYPE
:
1892 if (integer_zerop (t
))
1894 else if (integer_onep (t
))
1903 /* Cache false or true. */
1905 if (wi::ltu_p (wi::to_wide (t
), 2))
1906 ix
= TREE_INT_CST_ELT (t
, 0);
1911 if (TYPE_UNSIGNED (type
))
1914 limit
= param_integer_share_limit
;
1916 /* This is a little hokie, but if the prec is smaller than
1917 what is necessary to hold param_integer_share_limit, then the
1918 obvious test will not get the correct answer. */
1919 if (prec
< HOST_BITS_PER_WIDE_INT
)
1921 if (tree_to_uhwi (t
)
1922 < (unsigned HOST_WIDE_INT
) param_integer_share_limit
)
1923 ix
= tree_to_uhwi (t
);
1925 else if (wi::ltu_p (wi::to_wide (t
), param_integer_share_limit
))
1926 ix
= tree_to_uhwi (t
);
1931 limit
= param_integer_share_limit
+ 1;
1933 if (integer_minus_onep (t
))
1935 else if (!wi::neg_p (wi::to_wide (t
)))
1937 if (prec
< HOST_BITS_PER_WIDE_INT
)
1939 if (tree_to_shwi (t
) < param_integer_share_limit
)
1940 ix
= tree_to_shwi (t
) + 1;
1942 else if (wi::ltu_p (wi::to_wide (t
), param_integer_share_limit
))
1943 ix
= tree_to_shwi (t
) + 1;
1949 /* The slot used by TYPE_CACHED_VALUES is used for the enum
1959 /* Look for it in the type's vector of small shared ints. */
1960 if (!TYPE_CACHED_VALUES_P (type
))
1962 TYPE_CACHED_VALUES_P (type
) = 1;
1963 TYPE_CACHED_VALUES (type
) = make_tree_vec (limit
);
1966 if (tree r
= TREE_VEC_ELT (TYPE_CACHED_VALUES (type
), ix
))
1968 gcc_checking_assert (might_duplicate
);
1972 TREE_VEC_ELT (TYPE_CACHED_VALUES (type
), ix
) = t
;
1976 /* Use the cache of larger shared ints. */
1977 tree
*slot
= int_cst_hash_table
->find_slot (t
, INSERT
);
1980 /* If there is already an entry for the number verify it's the
1982 gcc_checking_assert (wi::to_wide (tree (r
)) == wi::to_wide (t
));
1983 /* And return the cached value. */
1987 /* Otherwise insert this one into the hash table. */
1995 /* Builds an integer constant in TYPE such that lowest BITS bits are ones
1996 and the rest are zeros. */
1999 build_low_bits_mask (tree type
, unsigned bits
)
2001 gcc_assert (bits
<= TYPE_PRECISION (type
));
2003 return wide_int_to_tree (type
, wi::mask (bits
, false,
2004 TYPE_PRECISION (type
)));
2007 /* Checks that X is integer constant that can be expressed in (unsigned)
2008 HOST_WIDE_INT without loss of precision. */
2011 cst_and_fits_in_hwi (const_tree x
)
2013 return (TREE_CODE (x
) == INTEGER_CST
2014 && (tree_fits_shwi_p (x
) || tree_fits_uhwi_p (x
)));
2017 /* Build a newly constructed VECTOR_CST with the given values of
2018 (VECTOR_CST_)LOG2_NPATTERNS and (VECTOR_CST_)NELTS_PER_PATTERN. */
2021 make_vector (unsigned log2_npatterns
,
2022 unsigned int nelts_per_pattern MEM_STAT_DECL
)
2024 gcc_assert (IN_RANGE (nelts_per_pattern
, 1, 3));
2026 unsigned npatterns
= 1 << log2_npatterns
;
2027 unsigned encoded_nelts
= npatterns
* nelts_per_pattern
;
2028 unsigned length
= (sizeof (struct tree_vector
)
2029 + (encoded_nelts
- 1) * sizeof (tree
));
2031 record_node_allocation_statistics (VECTOR_CST
, length
);
2033 t
= ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT
);
2035 TREE_SET_CODE (t
, VECTOR_CST
);
2036 TREE_CONSTANT (t
) = 1;
2037 VECTOR_CST_LOG2_NPATTERNS (t
) = log2_npatterns
;
2038 VECTOR_CST_NELTS_PER_PATTERN (t
) = nelts_per_pattern
;
2043 /* Return a new VECTOR_CST node whose type is TYPE and whose values
2044 are extracted from V, a vector of CONSTRUCTOR_ELT. */
2047 build_vector_from_ctor (tree type
, vec
<constructor_elt
, va_gc
> *v
)
2049 if (vec_safe_length (v
) == 0)
2050 return build_zero_cst (type
);
2052 unsigned HOST_WIDE_INT idx
, nelts
;
2055 /* We can't construct a VECTOR_CST for a variable number of elements. */
2056 nelts
= TYPE_VECTOR_SUBPARTS (type
).to_constant ();
2057 tree_vector_builder
vec (type
, nelts
, 1);
2058 FOR_EACH_CONSTRUCTOR_VALUE (v
, idx
, value
)
2060 if (TREE_CODE (value
) == VECTOR_CST
)
2062 /* If NELTS is constant then this must be too. */
2063 unsigned int sub_nelts
= VECTOR_CST_NELTS (value
).to_constant ();
2064 for (unsigned i
= 0; i
< sub_nelts
; ++i
)
2065 vec
.quick_push (VECTOR_CST_ELT (value
, i
));
2068 vec
.quick_push (value
);
2070 while (vec
.length () < nelts
)
2071 vec
.quick_push (build_zero_cst (TREE_TYPE (type
)));
2073 return vec
.build ();
2076 /* Build a vector of type VECTYPE where all the elements are SCs. */
2078 build_vector_from_val (tree vectype
, tree sc
)
2080 unsigned HOST_WIDE_INT i
, nunits
;
2082 if (sc
== error_mark_node
)
2085 /* Verify that the vector type is suitable for SC. Note that there
2086 is some inconsistency in the type-system with respect to restrict
2087 qualifications of pointers. Vector types always have a main-variant
2088 element type and the qualification is applied to the vector-type.
2089 So TREE_TYPE (vector-type) does not return a properly qualified
2090 vector element-type. */
2091 gcc_checking_assert (types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (sc
)),
2092 TREE_TYPE (vectype
)));
2094 if (CONSTANT_CLASS_P (sc
))
2096 tree_vector_builder
v (vectype
, 1, 1);
2100 else if (!TYPE_VECTOR_SUBPARTS (vectype
).is_constant (&nunits
))
2101 return fold_build1 (VEC_DUPLICATE_EXPR
, vectype
, sc
);
2104 vec
<constructor_elt
, va_gc
> *v
;
2105 vec_alloc (v
, nunits
);
2106 for (i
= 0; i
< nunits
; ++i
)
2107 CONSTRUCTOR_APPEND_ELT (v
, NULL_TREE
, sc
);
2108 return build_constructor (vectype
, v
);
2112 /* If TYPE is not a vector type, just return SC, otherwise return
2113 build_vector_from_val (TYPE, SC). */
2116 build_uniform_cst (tree type
, tree sc
)
2118 if (!VECTOR_TYPE_P (type
))
2121 return build_vector_from_val (type
, sc
);
2124 /* Build a vector series of type TYPE in which element I has the value
2125 BASE + I * STEP. The result is a constant if BASE and STEP are constant
2126 and a VEC_SERIES_EXPR otherwise. */
2129 build_vec_series (tree type
, tree base
, tree step
)
2131 if (integer_zerop (step
))
2132 return build_vector_from_val (type
, base
);
2133 if (TREE_CODE (base
) == INTEGER_CST
&& TREE_CODE (step
) == INTEGER_CST
)
2135 tree_vector_builder
builder (type
, 1, 3);
2136 tree elt1
= wide_int_to_tree (TREE_TYPE (base
),
2137 wi::to_wide (base
) + wi::to_wide (step
));
2138 tree elt2
= wide_int_to_tree (TREE_TYPE (base
),
2139 wi::to_wide (elt1
) + wi::to_wide (step
));
2140 builder
.quick_push (base
);
2141 builder
.quick_push (elt1
);
2142 builder
.quick_push (elt2
);
2143 return builder
.build ();
2145 return build2 (VEC_SERIES_EXPR
, type
, base
, step
);
2148 /* Return a vector with the same number of units and number of bits
2149 as VEC_TYPE, but in which the elements are a linear series of unsigned
2150 integers { BASE, BASE + STEP, BASE + STEP * 2, ... }. */
2153 build_index_vector (tree vec_type
, poly_uint64 base
, poly_uint64 step
)
2155 tree index_vec_type
= vec_type
;
2156 tree index_elt_type
= TREE_TYPE (vec_type
);
2157 poly_uint64 nunits
= TYPE_VECTOR_SUBPARTS (vec_type
);
2158 if (!INTEGRAL_TYPE_P (index_elt_type
) || !TYPE_UNSIGNED (index_elt_type
))
2160 index_elt_type
= build_nonstandard_integer_type
2161 (GET_MODE_BITSIZE (SCALAR_TYPE_MODE (index_elt_type
)), true);
2162 index_vec_type
= build_vector_type (index_elt_type
, nunits
);
2165 tree_vector_builder
v (index_vec_type
, 1, 3);
2166 for (unsigned int i
= 0; i
< 3; ++i
)
2167 v
.quick_push (build_int_cstu (index_elt_type
, base
+ i
* step
));
2171 /* Return a VECTOR_CST of type VEC_TYPE in which the first NUM_A
2172 elements are A and the rest are B. */
2175 build_vector_a_then_b (tree vec_type
, unsigned int num_a
, tree a
, tree b
)
2177 gcc_assert (known_le (num_a
, TYPE_VECTOR_SUBPARTS (vec_type
)));
2178 unsigned int count
= constant_lower_bound (TYPE_VECTOR_SUBPARTS (vec_type
));
2179 /* Optimize the constant case. */
2180 if ((count
& 1) == 0 && TYPE_VECTOR_SUBPARTS (vec_type
).is_constant ())
2182 tree_vector_builder
builder (vec_type
, count
, 2);
2183 for (unsigned int i
= 0; i
< count
* 2; ++i
)
2184 builder
.quick_push (i
< num_a
? a
: b
);
2185 return builder
.build ();
2188 /* Something has messed with the elements of CONSTRUCTOR C after it was built;
2189 calculate TREE_CONSTANT and TREE_SIDE_EFFECTS. */
2192 recompute_constructor_flags (tree c
)
2196 bool constant_p
= true;
2197 bool side_effects_p
= false;
2198 vec
<constructor_elt
, va_gc
> *vals
= CONSTRUCTOR_ELTS (c
);
2200 FOR_EACH_CONSTRUCTOR_VALUE (vals
, i
, val
)
2202 /* Mostly ctors will have elts that don't have side-effects, so
2203 the usual case is to scan all the elements. Hence a single
2204 loop for both const and side effects, rather than one loop
2205 each (with early outs). */
2206 if (!TREE_CONSTANT (val
))
2208 if (TREE_SIDE_EFFECTS (val
))
2209 side_effects_p
= true;
2212 TREE_SIDE_EFFECTS (c
) = side_effects_p
;
2213 TREE_CONSTANT (c
) = constant_p
;
2216 /* Make sure that TREE_CONSTANT and TREE_SIDE_EFFECTS are correct for
2220 verify_constructor_flags (tree c
)
2224 bool constant_p
= TREE_CONSTANT (c
);
2225 bool side_effects_p
= TREE_SIDE_EFFECTS (c
);
2226 vec
<constructor_elt
, va_gc
> *vals
= CONSTRUCTOR_ELTS (c
);
2228 FOR_EACH_CONSTRUCTOR_VALUE (vals
, i
, val
)
2230 if (constant_p
&& !TREE_CONSTANT (val
))
2231 internal_error ("non-constant element in constant CONSTRUCTOR");
2232 if (!side_effects_p
&& TREE_SIDE_EFFECTS (val
))
2233 internal_error ("side-effects element in no-side-effects CONSTRUCTOR");
2237 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
2238 are in the vec pointed to by VALS. */
2240 build_constructor (tree type
, vec
<constructor_elt
, va_gc
> *vals MEM_STAT_DECL
)
2242 tree c
= make_node (CONSTRUCTOR PASS_MEM_STAT
);
2244 TREE_TYPE (c
) = type
;
2245 CONSTRUCTOR_ELTS (c
) = vals
;
2247 recompute_constructor_flags (c
);
2252 /* Build a CONSTRUCTOR node made of a single initializer, with the specified
2255 build_constructor_single (tree type
, tree index
, tree value
)
2257 vec
<constructor_elt
, va_gc
> *v
;
2258 constructor_elt elt
= {index
, value
};
2261 v
->quick_push (elt
);
2263 return build_constructor (type
, v
);
2267 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
2268 are in a list pointed to by VALS. */
2270 build_constructor_from_list (tree type
, tree vals
)
2273 vec
<constructor_elt
, va_gc
> *v
= NULL
;
2277 vec_alloc (v
, list_length (vals
));
2278 for (t
= vals
; t
; t
= TREE_CHAIN (t
))
2279 CONSTRUCTOR_APPEND_ELT (v
, TREE_PURPOSE (t
), TREE_VALUE (t
));
2282 return build_constructor (type
, v
);
2285 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
2286 are in a vector pointed to by VALS. Note that the TREE_PURPOSE
2287 fields in the constructor remain null. */
2290 build_constructor_from_vec (tree type
, const vec
<tree
, va_gc
> *vals
)
2292 vec
<constructor_elt
, va_gc
> *v
= NULL
;
2295 CONSTRUCTOR_APPEND_ELT (v
, NULL_TREE
, t
);
2297 return build_constructor (type
, v
);
2300 /* Return a new CONSTRUCTOR node whose type is TYPE. NELTS is the number
2301 of elements, provided as index/value pairs. */
2304 build_constructor_va (tree type
, int nelts
, ...)
2306 vec
<constructor_elt
, va_gc
> *v
= NULL
;
2309 va_start (p
, nelts
);
2310 vec_alloc (v
, nelts
);
2313 tree index
= va_arg (p
, tree
);
2314 tree value
= va_arg (p
, tree
);
2315 CONSTRUCTOR_APPEND_ELT (v
, index
, value
);
2318 return build_constructor (type
, v
);
2321 /* Return a node of type TYPE for which TREE_CLOBBER_P is true. */
2324 build_clobber (tree type
)
2326 tree clobber
= build_constructor (type
, NULL
);
2327 TREE_THIS_VOLATILE (clobber
) = true;
2331 /* Return a new FIXED_CST node whose type is TYPE and value is F. */
2334 build_fixed (tree type
, FIXED_VALUE_TYPE f
)
2337 FIXED_VALUE_TYPE
*fp
;
2339 v
= make_node (FIXED_CST
);
2340 fp
= ggc_alloc
<fixed_value
> ();
2341 memcpy (fp
, &f
, sizeof (FIXED_VALUE_TYPE
));
2343 TREE_TYPE (v
) = type
;
2344 TREE_FIXED_CST_PTR (v
) = fp
;
2348 /* Return a new REAL_CST node whose type is TYPE and value is D. */
2351 build_real (tree type
, REAL_VALUE_TYPE d
)
2354 REAL_VALUE_TYPE
*dp
;
2357 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
2358 Consider doing it via real_convert now. */
2360 v
= make_node (REAL_CST
);
2361 dp
= ggc_alloc
<real_value
> ();
2362 memcpy (dp
, &d
, sizeof (REAL_VALUE_TYPE
));
2364 TREE_TYPE (v
) = type
;
2365 TREE_REAL_CST_PTR (v
) = dp
;
2366 TREE_OVERFLOW (v
) = overflow
;
2370 /* Like build_real, but first truncate D to the type. */
2373 build_real_truncate (tree type
, REAL_VALUE_TYPE d
)
2375 return build_real (type
, real_value_truncate (TYPE_MODE (type
), d
));
2378 /* Return a new REAL_CST node whose type is TYPE
2379 and whose value is the integer value of the INTEGER_CST node I. */
2382 real_value_from_int_cst (const_tree type
, const_tree i
)
2386 /* Clear all bits of the real value type so that we can later do
2387 bitwise comparisons to see if two values are the same. */
2388 memset (&d
, 0, sizeof d
);
2390 real_from_integer (&d
, type
? TYPE_MODE (type
) : VOIDmode
, wi::to_wide (i
),
2391 TYPE_SIGN (TREE_TYPE (i
)));
2395 /* Given a tree representing an integer constant I, return a tree
2396 representing the same value as a floating-point constant of type TYPE. */
2399 build_real_from_int_cst (tree type
, const_tree i
)
2402 int overflow
= TREE_OVERFLOW (i
);
2404 v
= build_real (type
, real_value_from_int_cst (type
, i
));
2406 TREE_OVERFLOW (v
) |= overflow
;
2410 /* Return a new REAL_CST node whose type is TYPE
2411 and whose value is the integer value I which has sign SGN. */
2414 build_real_from_wide (tree type
, const wide_int_ref
&i
, signop sgn
)
2418 /* Clear all bits of the real value type so that we can later do
2419 bitwise comparisons to see if two values are the same. */
2420 memset (&d
, 0, sizeof d
);
2422 real_from_integer (&d
, TYPE_MODE (type
), i
, sgn
);
2423 return build_real (type
, d
);
2426 /* Return a newly constructed STRING_CST node whose value is the LEN
2427 characters at STR when STR is nonnull, or all zeros otherwise.
2428 Note that for a C string literal, LEN should include the trailing NUL.
2429 The TREE_TYPE is not initialized. */
2432 build_string (unsigned len
, const char *str
/*= NULL */)
2434 /* Do not waste bytes provided by padding of struct tree_string. */
2435 unsigned size
= len
+ offsetof (struct tree_string
, str
) + 1;
2437 record_node_allocation_statistics (STRING_CST
, size
);
2439 tree s
= (tree
) ggc_internal_alloc (size
);
2441 memset (s
, 0, sizeof (struct tree_typed
));
2442 TREE_SET_CODE (s
, STRING_CST
);
2443 TREE_CONSTANT (s
) = 1;
2444 TREE_STRING_LENGTH (s
) = len
;
2446 memcpy (s
->string
.str
, str
, len
);
2448 memset (s
->string
.str
, 0, len
);
2449 s
->string
.str
[len
] = '\0';
2454 /* Return a newly constructed COMPLEX_CST node whose value is
2455 specified by the real and imaginary parts REAL and IMAG.
2456 Both REAL and IMAG should be constant nodes. TYPE, if specified,
2457 will be the type of the COMPLEX_CST; otherwise a new type will be made. */
2460 build_complex (tree type
, tree real
, tree imag
)
2462 gcc_assert (CONSTANT_CLASS_P (real
));
2463 gcc_assert (CONSTANT_CLASS_P (imag
));
2465 tree t
= make_node (COMPLEX_CST
);
2467 TREE_REALPART (t
) = real
;
2468 TREE_IMAGPART (t
) = imag
;
2469 TREE_TYPE (t
) = type
? type
: build_complex_type (TREE_TYPE (real
));
2470 TREE_OVERFLOW (t
) = TREE_OVERFLOW (real
) | TREE_OVERFLOW (imag
);
2474 /* Build a complex (inf +- 0i), such as for the result of cproj.
2475 TYPE is the complex tree type of the result. If NEG is true, the
2476 imaginary zero is negative. */
2479 build_complex_inf (tree type
, bool neg
)
2481 REAL_VALUE_TYPE rinf
, rzero
= dconst0
;
2485 return build_complex (type
, build_real (TREE_TYPE (type
), rinf
),
2486 build_real (TREE_TYPE (type
), rzero
));
2489 /* Return the constant 1 in type TYPE. If TYPE has several elements, each
2490 element is set to 1. In particular, this is 1 + i for complex types. */
2493 build_each_one_cst (tree type
)
2495 if (TREE_CODE (type
) == COMPLEX_TYPE
)
2497 tree scalar
= build_one_cst (TREE_TYPE (type
));
2498 return build_complex (type
, scalar
, scalar
);
2501 return build_one_cst (type
);
2504 /* Return a constant of arithmetic type TYPE which is the
2505 multiplicative identity of the set TYPE. */
2508 build_one_cst (tree type
)
2510 switch (TREE_CODE (type
))
2512 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
2513 case POINTER_TYPE
: case REFERENCE_TYPE
:
2515 return build_int_cst (type
, 1);
2518 return build_real (type
, dconst1
);
2520 case FIXED_POINT_TYPE
:
2521 /* We can only generate 1 for accum types. */
2522 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type
)));
2523 return build_fixed (type
, FCONST1 (TYPE_MODE (type
)));
2527 tree scalar
= build_one_cst (TREE_TYPE (type
));
2529 return build_vector_from_val (type
, scalar
);
2533 return build_complex (type
,
2534 build_one_cst (TREE_TYPE (type
)),
2535 build_zero_cst (TREE_TYPE (type
)));
2542 /* Return an integer of type TYPE containing all 1's in as much precision as
2543 it contains, or a complex or vector whose subparts are such integers. */
2546 build_all_ones_cst (tree type
)
2548 if (TREE_CODE (type
) == COMPLEX_TYPE
)
2550 tree scalar
= build_all_ones_cst (TREE_TYPE (type
));
2551 return build_complex (type
, scalar
, scalar
);
2554 return build_minus_one_cst (type
);
2557 /* Return a constant of arithmetic type TYPE which is the
2558 opposite of the multiplicative identity of the set TYPE. */
2561 build_minus_one_cst (tree type
)
2563 switch (TREE_CODE (type
))
2565 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
2566 case POINTER_TYPE
: case REFERENCE_TYPE
:
2568 return build_int_cst (type
, -1);
2571 return build_real (type
, dconstm1
);
2573 case FIXED_POINT_TYPE
:
2574 /* We can only generate 1 for accum types. */
2575 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type
)));
2576 return build_fixed (type
,
2577 fixed_from_double_int (double_int_minus_one
,
2578 SCALAR_TYPE_MODE (type
)));
2582 tree scalar
= build_minus_one_cst (TREE_TYPE (type
));
2584 return build_vector_from_val (type
, scalar
);
2588 return build_complex (type
,
2589 build_minus_one_cst (TREE_TYPE (type
)),
2590 build_zero_cst (TREE_TYPE (type
)));
2597 /* Build 0 constant of type TYPE. This is used by constructor folding
2598 and thus the constant should be represented in memory by
2602 build_zero_cst (tree type
)
2604 switch (TREE_CODE (type
))
2606 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
2607 case POINTER_TYPE
: case REFERENCE_TYPE
:
2608 case OFFSET_TYPE
: case NULLPTR_TYPE
:
2609 return build_int_cst (type
, 0);
2612 return build_real (type
, dconst0
);
2614 case FIXED_POINT_TYPE
:
2615 return build_fixed (type
, FCONST0 (TYPE_MODE (type
)));
2619 tree scalar
= build_zero_cst (TREE_TYPE (type
));
2621 return build_vector_from_val (type
, scalar
);
2626 tree zero
= build_zero_cst (TREE_TYPE (type
));
2628 return build_complex (type
, zero
, zero
);
2632 if (!AGGREGATE_TYPE_P (type
))
2633 return fold_convert (type
, integer_zero_node
);
2634 return build_constructor (type
, NULL
);
2639 /* Build a BINFO with LEN language slots. */
2642 make_tree_binfo (unsigned base_binfos MEM_STAT_DECL
)
2645 size_t length
= (offsetof (struct tree_binfo
, base_binfos
)
2646 + vec
<tree
, va_gc
>::embedded_size (base_binfos
));
2648 record_node_allocation_statistics (TREE_BINFO
, length
);
2650 t
= ggc_alloc_tree_node_stat (length PASS_MEM_STAT
);
2652 memset (t
, 0, offsetof (struct tree_binfo
, base_binfos
));
2654 TREE_SET_CODE (t
, TREE_BINFO
);
2656 BINFO_BASE_BINFOS (t
)->embedded_init (base_binfos
);
2661 /* Create a CASE_LABEL_EXPR tree node and return it. */
2664 build_case_label (tree low_value
, tree high_value
, tree label_decl
)
2666 tree t
= make_node (CASE_LABEL_EXPR
);
2668 TREE_TYPE (t
) = void_type_node
;
2669 SET_EXPR_LOCATION (t
, DECL_SOURCE_LOCATION (label_decl
));
2671 CASE_LOW (t
) = low_value
;
2672 CASE_HIGH (t
) = high_value
;
2673 CASE_LABEL (t
) = label_decl
;
2674 CASE_CHAIN (t
) = NULL_TREE
;
2679 /* Build a newly constructed INTEGER_CST node. LEN and EXT_LEN are the
2680 values of TREE_INT_CST_NUNITS and TREE_INT_CST_EXT_NUNITS respectively.
2681 The latter determines the length of the HOST_WIDE_INT vector. */
2684 make_int_cst (int len
, int ext_len MEM_STAT_DECL
)
2687 int length
= ((ext_len
- 1) * sizeof (HOST_WIDE_INT
)
2688 + sizeof (struct tree_int_cst
));
2691 record_node_allocation_statistics (INTEGER_CST
, length
);
2693 t
= ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT
);
2695 TREE_SET_CODE (t
, INTEGER_CST
);
2696 TREE_INT_CST_NUNITS (t
) = len
;
2697 TREE_INT_CST_EXT_NUNITS (t
) = ext_len
;
2698 /* to_offset can only be applied to trees that are offset_int-sized
2699 or smaller. EXT_LEN is correct if it fits, otherwise the constant
2700 must be exactly the precision of offset_int and so LEN is correct. */
2701 if (ext_len
<= OFFSET_INT_ELTS
)
2702 TREE_INT_CST_OFFSET_NUNITS (t
) = ext_len
;
2704 TREE_INT_CST_OFFSET_NUNITS (t
) = len
;
2706 TREE_CONSTANT (t
) = 1;
2711 /* Build a newly constructed TREE_VEC node of length LEN. */
2714 make_tree_vec (int len MEM_STAT_DECL
)
2717 size_t length
= (len
- 1) * sizeof (tree
) + sizeof (struct tree_vec
);
2719 record_node_allocation_statistics (TREE_VEC
, length
);
2721 t
= ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT
);
2723 TREE_SET_CODE (t
, TREE_VEC
);
2724 TREE_VEC_LENGTH (t
) = len
;
2729 /* Grow a TREE_VEC node to new length LEN. */
2732 grow_tree_vec (tree v
, int len MEM_STAT_DECL
)
2734 gcc_assert (TREE_CODE (v
) == TREE_VEC
);
2736 int oldlen
= TREE_VEC_LENGTH (v
);
2737 gcc_assert (len
> oldlen
);
2739 size_t oldlength
= (oldlen
- 1) * sizeof (tree
) + sizeof (struct tree_vec
);
2740 size_t length
= (len
- 1) * sizeof (tree
) + sizeof (struct tree_vec
);
2742 record_node_allocation_statistics (TREE_VEC
, length
- oldlength
);
2744 v
= (tree
) ggc_realloc (v
, length PASS_MEM_STAT
);
2746 TREE_VEC_LENGTH (v
) = len
;
2751 /* Return 1 if EXPR is the constant zero, whether it is integral, float or
2752 fixed, and scalar, complex or vector. */
2755 zerop (const_tree expr
)
2757 return (integer_zerop (expr
)
2758 || real_zerop (expr
)
2759 || fixed_zerop (expr
));
2762 /* Return 1 if EXPR is the integer constant zero or a complex constant
2763 of zero, or a location wrapper for such a constant. */
2766 integer_zerop (const_tree expr
)
2768 STRIP_ANY_LOCATION_WRAPPER (expr
);
2770 switch (TREE_CODE (expr
))
2773 return wi::to_wide (expr
) == 0;
2775 return (integer_zerop (TREE_REALPART (expr
))
2776 && integer_zerop (TREE_IMAGPART (expr
)));
2778 return (VECTOR_CST_NPATTERNS (expr
) == 1
2779 && VECTOR_CST_DUPLICATE_P (expr
)
2780 && integer_zerop (VECTOR_CST_ENCODED_ELT (expr
, 0)));
2786 /* Return 1 if EXPR is the integer constant one or the corresponding
2787 complex constant, or a location wrapper for such a constant. */
2790 integer_onep (const_tree expr
)
2792 STRIP_ANY_LOCATION_WRAPPER (expr
);
2794 switch (TREE_CODE (expr
))
2797 return wi::eq_p (wi::to_widest (expr
), 1);
2799 return (integer_onep (TREE_REALPART (expr
))
2800 && integer_zerop (TREE_IMAGPART (expr
)));
2802 return (VECTOR_CST_NPATTERNS (expr
) == 1
2803 && VECTOR_CST_DUPLICATE_P (expr
)
2804 && integer_onep (VECTOR_CST_ENCODED_ELT (expr
, 0)));
2810 /* Return 1 if EXPR is the integer constant one. For complex and vector,
2811 return 1 if every piece is the integer constant one.
2812 Also return 1 for location wrappers for such a constant. */
2815 integer_each_onep (const_tree expr
)
2817 STRIP_ANY_LOCATION_WRAPPER (expr
);
2819 if (TREE_CODE (expr
) == COMPLEX_CST
)
2820 return (integer_onep (TREE_REALPART (expr
))
2821 && integer_onep (TREE_IMAGPART (expr
)));
2823 return integer_onep (expr
);
2826 /* Return 1 if EXPR is an integer containing all 1's in as much precision as
2827 it contains, or a complex or vector whose subparts are such integers,
2828 or a location wrapper for such a constant. */
2831 integer_all_onesp (const_tree expr
)
2833 STRIP_ANY_LOCATION_WRAPPER (expr
);
2835 if (TREE_CODE (expr
) == COMPLEX_CST
2836 && integer_all_onesp (TREE_REALPART (expr
))
2837 && integer_all_onesp (TREE_IMAGPART (expr
)))
2840 else if (TREE_CODE (expr
) == VECTOR_CST
)
2841 return (VECTOR_CST_NPATTERNS (expr
) == 1
2842 && VECTOR_CST_DUPLICATE_P (expr
)
2843 && integer_all_onesp (VECTOR_CST_ENCODED_ELT (expr
, 0)));
2845 else if (TREE_CODE (expr
) != INTEGER_CST
)
2848 return (wi::max_value (TYPE_PRECISION (TREE_TYPE (expr
)), UNSIGNED
)
2849 == wi::to_wide (expr
));
2852 /* Return 1 if EXPR is the integer constant minus one, or a location wrapper
2853 for such a constant. */
2856 integer_minus_onep (const_tree expr
)
2858 STRIP_ANY_LOCATION_WRAPPER (expr
);
2860 if (TREE_CODE (expr
) == COMPLEX_CST
)
2861 return (integer_all_onesp (TREE_REALPART (expr
))
2862 && integer_zerop (TREE_IMAGPART (expr
)));
2864 return integer_all_onesp (expr
);
2867 /* Return 1 if EXPR is an integer constant that is a power of 2 (i.e., has only
2868 one bit on), or a location wrapper for such a constant. */
2871 integer_pow2p (const_tree expr
)
2873 STRIP_ANY_LOCATION_WRAPPER (expr
);
2875 if (TREE_CODE (expr
) == COMPLEX_CST
2876 && integer_pow2p (TREE_REALPART (expr
))
2877 && integer_zerop (TREE_IMAGPART (expr
)))
2880 if (TREE_CODE (expr
) != INTEGER_CST
)
2883 return wi::popcount (wi::to_wide (expr
)) == 1;
2886 /* Return 1 if EXPR is an integer constant other than zero or a
2887 complex constant other than zero, or a location wrapper for such a
2891 integer_nonzerop (const_tree expr
)
2893 STRIP_ANY_LOCATION_WRAPPER (expr
);
2895 return ((TREE_CODE (expr
) == INTEGER_CST
2896 && wi::to_wide (expr
) != 0)
2897 || (TREE_CODE (expr
) == COMPLEX_CST
2898 && (integer_nonzerop (TREE_REALPART (expr
))
2899 || integer_nonzerop (TREE_IMAGPART (expr
)))));
2902 /* Return 1 if EXPR is the integer constant one. For vector,
2903 return 1 if every piece is the integer constant minus one
2904 (representing the value TRUE).
2905 Also return 1 for location wrappers for such a constant. */
2908 integer_truep (const_tree expr
)
2910 STRIP_ANY_LOCATION_WRAPPER (expr
);
2912 if (TREE_CODE (expr
) == VECTOR_CST
)
2913 return integer_all_onesp (expr
);
2914 return integer_onep (expr
);
2917 /* Return 1 if EXPR is the fixed-point constant zero, or a location wrapper
2918 for such a constant. */
2921 fixed_zerop (const_tree expr
)
2923 STRIP_ANY_LOCATION_WRAPPER (expr
);
2925 return (TREE_CODE (expr
) == FIXED_CST
2926 && TREE_FIXED_CST (expr
).data
.is_zero ());
2929 /* Return the power of two represented by a tree node known to be a
2933 tree_log2 (const_tree expr
)
2935 if (TREE_CODE (expr
) == COMPLEX_CST
)
2936 return tree_log2 (TREE_REALPART (expr
));
2938 return wi::exact_log2 (wi::to_wide (expr
));
2941 /* Similar, but return the largest integer Y such that 2 ** Y is less
2942 than or equal to EXPR. */
2945 tree_floor_log2 (const_tree expr
)
2947 if (TREE_CODE (expr
) == COMPLEX_CST
)
2948 return tree_log2 (TREE_REALPART (expr
));
2950 return wi::floor_log2 (wi::to_wide (expr
));
2953 /* Return number of known trailing zero bits in EXPR, or, if the value of
2954 EXPR is known to be zero, the precision of it's type. */
2957 tree_ctz (const_tree expr
)
2959 if (!INTEGRAL_TYPE_P (TREE_TYPE (expr
))
2960 && !POINTER_TYPE_P (TREE_TYPE (expr
)))
2963 unsigned int ret1
, ret2
, prec
= TYPE_PRECISION (TREE_TYPE (expr
));
2964 switch (TREE_CODE (expr
))
2967 ret1
= wi::ctz (wi::to_wide (expr
));
2968 return MIN (ret1
, prec
);
2970 ret1
= wi::ctz (get_nonzero_bits (expr
));
2971 return MIN (ret1
, prec
);
2978 ret1
= tree_ctz (TREE_OPERAND (expr
, 0));
2981 ret2
= tree_ctz (TREE_OPERAND (expr
, 1));
2982 return MIN (ret1
, ret2
);
2983 case POINTER_PLUS_EXPR
:
2984 ret1
= tree_ctz (TREE_OPERAND (expr
, 0));
2985 ret2
= tree_ctz (TREE_OPERAND (expr
, 1));
2986 /* Second operand is sizetype, which could be in theory
2987 wider than pointer's precision. Make sure we never
2988 return more than prec. */
2989 ret2
= MIN (ret2
, prec
);
2990 return MIN (ret1
, ret2
);
2992 ret1
= tree_ctz (TREE_OPERAND (expr
, 0));
2993 ret2
= tree_ctz (TREE_OPERAND (expr
, 1));
2994 return MAX (ret1
, ret2
);
2996 ret1
= tree_ctz (TREE_OPERAND (expr
, 0));
2997 ret2
= tree_ctz (TREE_OPERAND (expr
, 1));
2998 return MIN (ret1
+ ret2
, prec
);
3000 ret1
= tree_ctz (TREE_OPERAND (expr
, 0));
3001 if (tree_fits_uhwi_p (TREE_OPERAND (expr
, 1))
3002 && (tree_to_uhwi (TREE_OPERAND (expr
, 1)) < prec
))
3004 ret2
= tree_to_uhwi (TREE_OPERAND (expr
, 1));
3005 return MIN (ret1
+ ret2
, prec
);
3009 if (tree_fits_uhwi_p (TREE_OPERAND (expr
, 1))
3010 && (tree_to_uhwi (TREE_OPERAND (expr
, 1)) < prec
))
3012 ret1
= tree_ctz (TREE_OPERAND (expr
, 0));
3013 ret2
= tree_to_uhwi (TREE_OPERAND (expr
, 1));
3018 case TRUNC_DIV_EXPR
:
3020 case FLOOR_DIV_EXPR
:
3021 case ROUND_DIV_EXPR
:
3022 case EXACT_DIV_EXPR
:
3023 if (TREE_CODE (TREE_OPERAND (expr
, 1)) == INTEGER_CST
3024 && tree_int_cst_sgn (TREE_OPERAND (expr
, 1)) == 1)
3026 int l
= tree_log2 (TREE_OPERAND (expr
, 1));
3029 ret1
= tree_ctz (TREE_OPERAND (expr
, 0));
3037 ret1
= tree_ctz (TREE_OPERAND (expr
, 0));
3038 if (ret1
&& ret1
== TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr
, 0))))
3040 return MIN (ret1
, prec
);
3042 return tree_ctz (TREE_OPERAND (expr
, 0));
3044 ret1
= tree_ctz (TREE_OPERAND (expr
, 1));
3047 ret2
= tree_ctz (TREE_OPERAND (expr
, 2));
3048 return MIN (ret1
, ret2
);
3050 return tree_ctz (TREE_OPERAND (expr
, 1));
3052 ret1
= get_pointer_alignment (CONST_CAST_TREE (expr
));
3053 if (ret1
> BITS_PER_UNIT
)
3055 ret1
= ctz_hwi (ret1
/ BITS_PER_UNIT
);
3056 return MIN (ret1
, prec
);
3064 /* Return 1 if EXPR is the real constant zero. Trailing zeroes matter for
3065 decimal float constants, so don't return 1 for them.
3066 Also return 1 for location wrappers around such a constant. */
3069 real_zerop (const_tree expr
)
3071 STRIP_ANY_LOCATION_WRAPPER (expr
);
3073 switch (TREE_CODE (expr
))
3076 return real_equal (&TREE_REAL_CST (expr
), &dconst0
)
3077 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr
))));
3079 return real_zerop (TREE_REALPART (expr
))
3080 && real_zerop (TREE_IMAGPART (expr
));
3083 /* Don't simply check for a duplicate because the predicate
3084 accepts both +0.0 and -0.0. */
3085 unsigned count
= vector_cst_encoded_nelts (expr
);
3086 for (unsigned int i
= 0; i
< count
; ++i
)
3087 if (!real_zerop (VECTOR_CST_ENCODED_ELT (expr
, i
)))
3096 /* Return 1 if EXPR is the real constant one in real or complex form.
3097 Trailing zeroes matter for decimal float constants, so don't return
3099 Also return 1 for location wrappers around such a constant. */
3102 real_onep (const_tree expr
)
3104 STRIP_ANY_LOCATION_WRAPPER (expr
);
3106 switch (TREE_CODE (expr
))
3109 return real_equal (&TREE_REAL_CST (expr
), &dconst1
)
3110 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr
))));
3112 return real_onep (TREE_REALPART (expr
))
3113 && real_zerop (TREE_IMAGPART (expr
));
3115 return (VECTOR_CST_NPATTERNS (expr
) == 1
3116 && VECTOR_CST_DUPLICATE_P (expr
)
3117 && real_onep (VECTOR_CST_ENCODED_ELT (expr
, 0)));
3123 /* Return 1 if EXPR is the real constant minus one. Trailing zeroes
3124 matter for decimal float constants, so don't return 1 for them.
3125 Also return 1 for location wrappers around such a constant. */
3128 real_minus_onep (const_tree expr
)
3130 STRIP_ANY_LOCATION_WRAPPER (expr
);
3132 switch (TREE_CODE (expr
))
3135 return real_equal (&TREE_REAL_CST (expr
), &dconstm1
)
3136 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr
))));
3138 return real_minus_onep (TREE_REALPART (expr
))
3139 && real_zerop (TREE_IMAGPART (expr
));
3141 return (VECTOR_CST_NPATTERNS (expr
) == 1
3142 && VECTOR_CST_DUPLICATE_P (expr
)
3143 && real_minus_onep (VECTOR_CST_ENCODED_ELT (expr
, 0)));
3149 /* Nonzero if EXP is a constant or a cast of a constant. */
3152 really_constant_p (const_tree exp
)
3154 /* This is not quite the same as STRIP_NOPS. It does more. */
3155 while (CONVERT_EXPR_P (exp
)
3156 || TREE_CODE (exp
) == NON_LVALUE_EXPR
)
3157 exp
= TREE_OPERAND (exp
, 0);
3158 return TREE_CONSTANT (exp
);
3161 /* Return true if T holds a polynomial pointer difference, storing it in
3162 *VALUE if so. A true return means that T's precision is no greater
3163 than 64 bits, which is the largest address space we support, so *VALUE
3164 never loses precision. However, the signedness of the result does
3165 not necessarily match the signedness of T: sometimes an unsigned type
3166 like sizetype is used to encode a value that is actually negative. */
3169 ptrdiff_tree_p (const_tree t
, poly_int64_pod
*value
)
3173 if (TREE_CODE (t
) == INTEGER_CST
)
3175 if (!cst_and_fits_in_hwi (t
))
3177 *value
= int_cst_value (t
);
3180 if (POLY_INT_CST_P (t
))
3182 for (unsigned int i
= 0; i
< NUM_POLY_INT_COEFFS
; ++i
)
3183 if (!cst_and_fits_in_hwi (POLY_INT_CST_COEFF (t
, i
)))
3185 for (unsigned int i
= 0; i
< NUM_POLY_INT_COEFFS
; ++i
)
3186 value
->coeffs
[i
] = int_cst_value (POLY_INT_CST_COEFF (t
, i
));
3193 tree_to_poly_int64 (const_tree t
)
3195 gcc_assert (tree_fits_poly_int64_p (t
));
3196 if (POLY_INT_CST_P (t
))
3197 return poly_int_cst_value (t
).force_shwi ();
3198 return TREE_INT_CST_LOW (t
);
3202 tree_to_poly_uint64 (const_tree t
)
3204 gcc_assert (tree_fits_poly_uint64_p (t
));
3205 if (POLY_INT_CST_P (t
))
3206 return poly_int_cst_value (t
).force_uhwi ();
3207 return TREE_INT_CST_LOW (t
);
3210 /* Return first list element whose TREE_VALUE is ELEM.
3211 Return 0 if ELEM is not in LIST. */
3214 value_member (tree elem
, tree list
)
3218 if (elem
== TREE_VALUE (list
))
3220 list
= TREE_CHAIN (list
);
3225 /* Return first list element whose TREE_PURPOSE is ELEM.
3226 Return 0 if ELEM is not in LIST. */
3229 purpose_member (const_tree elem
, tree list
)
3233 if (elem
== TREE_PURPOSE (list
))
3235 list
= TREE_CHAIN (list
);
3240 /* Return true if ELEM is in V. */
3243 vec_member (const_tree elem
, vec
<tree
, va_gc
> *v
)
3247 FOR_EACH_VEC_SAFE_ELT (v
, ix
, t
)
3253 /* Returns element number IDX (zero-origin) of chain CHAIN, or
3257 chain_index (int idx
, tree chain
)
3259 for (; chain
&& idx
> 0; --idx
)
3260 chain
= TREE_CHAIN (chain
);
3264 /* Return nonzero if ELEM is part of the chain CHAIN. */
3267 chain_member (const_tree elem
, const_tree chain
)
3273 chain
= DECL_CHAIN (chain
);
3279 /* Return the length of a chain of nodes chained through TREE_CHAIN.
3280 We expect a null pointer to mark the end of the chain.
3281 This is the Lisp primitive `length'. */
3284 list_length (const_tree t
)
3287 #ifdef ENABLE_TREE_CHECKING
3295 #ifdef ENABLE_TREE_CHECKING
3298 gcc_assert (p
!= q
);
3306 /* Returns the first FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
3307 UNION_TYPE TYPE, or NULL_TREE if none. */
3310 first_field (const_tree type
)
3312 tree t
= TYPE_FIELDS (type
);
3313 while (t
&& TREE_CODE (t
) != FIELD_DECL
)
3318 /* Returns the last FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
3319 UNION_TYPE TYPE, or NULL_TREE if none. */
3322 last_field (const_tree type
)
3324 tree last
= NULL_TREE
;
3326 for (tree fld
= TYPE_FIELDS (type
); fld
; fld
= TREE_CHAIN (fld
))
3328 if (TREE_CODE (fld
) != FIELD_DECL
)
3337 /* Concatenate two chains of nodes (chained through TREE_CHAIN)
3338 by modifying the last node in chain 1 to point to chain 2.
3339 This is the Lisp primitive `nconc'. */
3342 chainon (tree op1
, tree op2
)
3351 for (t1
= op1
; TREE_CHAIN (t1
); t1
= TREE_CHAIN (t1
))
3353 TREE_CHAIN (t1
) = op2
;
3355 #ifdef ENABLE_TREE_CHECKING
3358 for (t2
= op2
; t2
; t2
= TREE_CHAIN (t2
))
3359 gcc_assert (t2
!= t1
);
3366 /* Return the last node in a chain of nodes (chained through TREE_CHAIN). */
3369 tree_last (tree chain
)
3373 while ((next
= TREE_CHAIN (chain
)))
3378 /* Reverse the order of elements in the chain T,
3379 and return the new head of the chain (old last element). */
3384 tree prev
= 0, decl
, next
;
3385 for (decl
= t
; decl
; decl
= next
)
3387 /* We shouldn't be using this function to reverse BLOCK chains; we
3388 have blocks_nreverse for that. */
3389 gcc_checking_assert (TREE_CODE (decl
) != BLOCK
);
3390 next
= TREE_CHAIN (decl
);
3391 TREE_CHAIN (decl
) = prev
;
3397 /* Return a newly created TREE_LIST node whose
3398 purpose and value fields are PARM and VALUE. */
3401 build_tree_list (tree parm
, tree value MEM_STAT_DECL
)
3403 tree t
= make_node (TREE_LIST PASS_MEM_STAT
);
3404 TREE_PURPOSE (t
) = parm
;
3405 TREE_VALUE (t
) = value
;
3409 /* Build a chain of TREE_LIST nodes from a vector. */
3412 build_tree_list_vec (const vec
<tree
, va_gc
> *vec MEM_STAT_DECL
)
3414 tree ret
= NULL_TREE
;
3418 FOR_EACH_VEC_SAFE_ELT (vec
, i
, t
)
3420 *pp
= build_tree_list (NULL
, t PASS_MEM_STAT
);
3421 pp
= &TREE_CHAIN (*pp
);
3426 /* Return a newly created TREE_LIST node whose
3427 purpose and value fields are PURPOSE and VALUE
3428 and whose TREE_CHAIN is CHAIN. */
3431 tree_cons (tree purpose
, tree value
, tree chain MEM_STAT_DECL
)
3435 node
= ggc_alloc_tree_node_stat (sizeof (struct tree_list
) PASS_MEM_STAT
);
3436 memset (node
, 0, sizeof (struct tree_common
));
3438 record_node_allocation_statistics (TREE_LIST
, sizeof (struct tree_list
));
3440 TREE_SET_CODE (node
, TREE_LIST
);
3441 TREE_CHAIN (node
) = chain
;
3442 TREE_PURPOSE (node
) = purpose
;
3443 TREE_VALUE (node
) = value
;
3447 /* Return the values of the elements of a CONSTRUCTOR as a vector of
3451 ctor_to_vec (tree ctor
)
3453 vec
<tree
, va_gc
> *vec
;
3454 vec_alloc (vec
, CONSTRUCTOR_NELTS (ctor
));
3458 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor
), ix
, val
)
3459 vec
->quick_push (val
);
3464 /* Return the size nominally occupied by an object of type TYPE
3465 when it resides in memory. The value is measured in units of bytes,
3466 and its data type is that normally used for type sizes
3467 (which is the first type created by make_signed_type or
3468 make_unsigned_type). */
3471 size_in_bytes_loc (location_t loc
, const_tree type
)
3475 if (type
== error_mark_node
)
3476 return integer_zero_node
;
3478 type
= TYPE_MAIN_VARIANT (type
);
3479 t
= TYPE_SIZE_UNIT (type
);
3483 lang_hooks
.types
.incomplete_type_error (loc
, NULL_TREE
, type
);
3484 return size_zero_node
;
3490 /* Return the size of TYPE (in bytes) as a wide integer
3491 or return -1 if the size can vary or is larger than an integer. */
3494 int_size_in_bytes (const_tree type
)
3498 if (type
== error_mark_node
)
3501 type
= TYPE_MAIN_VARIANT (type
);
3502 t
= TYPE_SIZE_UNIT (type
);
3504 if (t
&& tree_fits_uhwi_p (t
))
3505 return TREE_INT_CST_LOW (t
);
3510 /* Return the maximum size of TYPE (in bytes) as a wide integer
3511 or return -1 if the size can vary or is larger than an integer. */
3514 max_int_size_in_bytes (const_tree type
)
3516 HOST_WIDE_INT size
= -1;
3519 /* If this is an array type, check for a possible MAX_SIZE attached. */
3521 if (TREE_CODE (type
) == ARRAY_TYPE
)
3523 size_tree
= TYPE_ARRAY_MAX_SIZE (type
);
3525 if (size_tree
&& tree_fits_uhwi_p (size_tree
))
3526 size
= tree_to_uhwi (size_tree
);
3529 /* If we still haven't been able to get a size, see if the language
3530 can compute a maximum size. */
3534 size_tree
= lang_hooks
.types
.max_size (type
);
3536 if (size_tree
&& tree_fits_uhwi_p (size_tree
))
3537 size
= tree_to_uhwi (size_tree
);
3543 /* Return the bit position of FIELD, in bits from the start of the record.
3544 This is a tree of type bitsizetype. */
3547 bit_position (const_tree field
)
3549 return bit_from_pos (DECL_FIELD_OFFSET (field
),
3550 DECL_FIELD_BIT_OFFSET (field
));
3553 /* Return the byte position of FIELD, in bytes from the start of the record.
3554 This is a tree of type sizetype. */
3557 byte_position (const_tree field
)
3559 return byte_from_pos (DECL_FIELD_OFFSET (field
),
3560 DECL_FIELD_BIT_OFFSET (field
));
3563 /* Likewise, but return as an integer. It must be representable in
3564 that way (since it could be a signed value, we don't have the
3565 option of returning -1 like int_size_in_byte can. */
3568 int_byte_position (const_tree field
)
3570 return tree_to_shwi (byte_position (field
));
3573 /* Return, as a tree node, the number of elements for TYPE (which is an
3574 ARRAY_TYPE) minus one. This counts only elements of the top array. */
3577 array_type_nelts (const_tree type
)
3579 tree index_type
, min
, max
;
3581 /* If they did it with unspecified bounds, then we should have already
3582 given an error about it before we got here. */
3583 if (! TYPE_DOMAIN (type
))
3584 return error_mark_node
;
3586 index_type
= TYPE_DOMAIN (type
);
3587 min
= TYPE_MIN_VALUE (index_type
);
3588 max
= TYPE_MAX_VALUE (index_type
);
3590 /* TYPE_MAX_VALUE may not be set if the array has unknown length. */
3593 /* zero sized arrays are represented from C FE as complete types with
3594 NULL TYPE_MAX_VALUE and zero TYPE_SIZE, while C++ FE represents
3595 them as min 0, max -1. */
3596 if (COMPLETE_TYPE_P (type
)
3597 && integer_zerop (TYPE_SIZE (type
))
3598 && integer_zerop (min
))
3599 return build_int_cst (TREE_TYPE (min
), -1);
3601 return error_mark_node
;
3604 return (integer_zerop (min
)
3606 : fold_build2 (MINUS_EXPR
, TREE_TYPE (max
), max
, min
));
3609 /* If arg is static -- a reference to an object in static storage -- then
3610 return the object. This is not the same as the C meaning of `static'.
3611 If arg isn't static, return NULL. */
3616 switch (TREE_CODE (arg
))
3619 /* Nested functions are static, even though taking their address will
3620 involve a trampoline as we unnest the nested function and create
3621 the trampoline on the tree level. */
3625 return ((TREE_STATIC (arg
) || DECL_EXTERNAL (arg
))
3626 && ! DECL_THREAD_LOCAL_P (arg
)
3627 && ! DECL_DLLIMPORT_P (arg
)
3631 return ((TREE_STATIC (arg
) || DECL_EXTERNAL (arg
))
3635 return TREE_STATIC (arg
) ? arg
: NULL
;
3642 /* If the thing being referenced is not a field, then it is
3643 something language specific. */
3644 gcc_assert (TREE_CODE (TREE_OPERAND (arg
, 1)) == FIELD_DECL
);
3646 /* If we are referencing a bitfield, we can't evaluate an
3647 ADDR_EXPR at compile time and so it isn't a constant. */
3648 if (DECL_BIT_FIELD (TREE_OPERAND (arg
, 1)))
3651 return staticp (TREE_OPERAND (arg
, 0));
3657 return TREE_CONSTANT (TREE_OPERAND (arg
, 0)) ? arg
: NULL
;
3660 case ARRAY_RANGE_REF
:
3661 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (arg
))) == INTEGER_CST
3662 && TREE_CODE (TREE_OPERAND (arg
, 1)) == INTEGER_CST
)
3663 return staticp (TREE_OPERAND (arg
, 0));
3667 case COMPOUND_LITERAL_EXPR
:
3668 return TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (arg
)) ? arg
: NULL
;
3678 /* Return whether OP is a DECL whose address is function-invariant. */
3681 decl_address_invariant_p (const_tree op
)
3683 /* The conditions below are slightly less strict than the one in
3686 switch (TREE_CODE (op
))
3695 if ((TREE_STATIC (op
) || DECL_EXTERNAL (op
))
3696 || DECL_THREAD_LOCAL_P (op
)
3697 || DECL_CONTEXT (op
) == current_function_decl
3698 || decl_function_context (op
) == current_function_decl
)
3703 if ((TREE_STATIC (op
) || DECL_EXTERNAL (op
))
3704 || decl_function_context (op
) == current_function_decl
)
3715 /* Return whether OP is a DECL whose address is interprocedural-invariant. */
3718 decl_address_ip_invariant_p (const_tree op
)
3720 /* The conditions below are slightly less strict than the one in
3723 switch (TREE_CODE (op
))
3731 if (((TREE_STATIC (op
) || DECL_EXTERNAL (op
))
3732 && !DECL_DLLIMPORT_P (op
))
3733 || DECL_THREAD_LOCAL_P (op
))
3738 if ((TREE_STATIC (op
) || DECL_EXTERNAL (op
)))
3750 /* Return true if T is function-invariant (internal function, does
3751 not handle arithmetic; that's handled in skip_simple_arithmetic and
3752 tree_invariant_p). */
3755 tree_invariant_p_1 (tree t
)
3759 if (TREE_CONSTANT (t
)
3760 || (TREE_READONLY (t
) && !TREE_SIDE_EFFECTS (t
)))
3763 switch (TREE_CODE (t
))
3769 op
= TREE_OPERAND (t
, 0);
3770 while (handled_component_p (op
))
3772 switch (TREE_CODE (op
))
3775 case ARRAY_RANGE_REF
:
3776 if (!tree_invariant_p (TREE_OPERAND (op
, 1))
3777 || TREE_OPERAND (op
, 2) != NULL_TREE
3778 || TREE_OPERAND (op
, 3) != NULL_TREE
)
3783 if (TREE_OPERAND (op
, 2) != NULL_TREE
)
3789 op
= TREE_OPERAND (op
, 0);
3792 return CONSTANT_CLASS_P (op
) || decl_address_invariant_p (op
);
3801 /* Return true if T is function-invariant. */
3804 tree_invariant_p (tree t
)
3806 tree inner
= skip_simple_arithmetic (t
);
3807 return tree_invariant_p_1 (inner
);
3810 /* Wrap a SAVE_EXPR around EXPR, if appropriate.
3811 Do this to any expression which may be used in more than one place,
3812 but must be evaluated only once.
3814 Normally, expand_expr would reevaluate the expression each time.
3815 Calling save_expr produces something that is evaluated and recorded
3816 the first time expand_expr is called on it. Subsequent calls to
3817 expand_expr just reuse the recorded value.
3819 The call to expand_expr that generates code that actually computes
3820 the value is the first call *at compile time*. Subsequent calls
3821 *at compile time* generate code to use the saved value.
3822 This produces correct result provided that *at run time* control
3823 always flows through the insns made by the first expand_expr
3824 before reaching the other places where the save_expr was evaluated.
3825 You, the caller of save_expr, must make sure this is so.
3827 Constants, and certain read-only nodes, are returned with no
3828 SAVE_EXPR because that is safe. Expressions containing placeholders
3829 are not touched; see tree.def for an explanation of what these
3833 save_expr (tree expr
)
3837 /* If the tree evaluates to a constant, then we don't want to hide that
3838 fact (i.e. this allows further folding, and direct checks for constants).
3839 However, a read-only object that has side effects cannot be bypassed.
3840 Since it is no problem to reevaluate literals, we just return the
3842 inner
= skip_simple_arithmetic (expr
);
3843 if (TREE_CODE (inner
) == ERROR_MARK
)
3846 if (tree_invariant_p_1 (inner
))
3849 /* If INNER contains a PLACEHOLDER_EXPR, we must evaluate it each time, since
3850 it means that the size or offset of some field of an object depends on
3851 the value within another field.
3853 Note that it must not be the case that EXPR contains both a PLACEHOLDER_EXPR
3854 and some variable since it would then need to be both evaluated once and
3855 evaluated more than once. Front-ends must assure this case cannot
3856 happen by surrounding any such subexpressions in their own SAVE_EXPR
3857 and forcing evaluation at the proper time. */
3858 if (contains_placeholder_p (inner
))
3861 expr
= build1_loc (EXPR_LOCATION (expr
), SAVE_EXPR
, TREE_TYPE (expr
), expr
);
3863 /* This expression might be placed ahead of a jump to ensure that the
3864 value was computed on both sides of the jump. So make sure it isn't
3865 eliminated as dead. */
3866 TREE_SIDE_EFFECTS (expr
) = 1;
3870 /* Look inside EXPR into any simple arithmetic operations. Return the
3871 outermost non-arithmetic or non-invariant node. */
3874 skip_simple_arithmetic (tree expr
)
3876 /* We don't care about whether this can be used as an lvalue in this
3878 while (TREE_CODE (expr
) == NON_LVALUE_EXPR
)
3879 expr
= TREE_OPERAND (expr
, 0);
3881 /* If we have simple operations applied to a SAVE_EXPR or to a SAVE_EXPR and
3882 a constant, it will be more efficient to not make another SAVE_EXPR since
3883 it will allow better simplification and GCSE will be able to merge the
3884 computations if they actually occur. */
3887 if (UNARY_CLASS_P (expr
))
3888 expr
= TREE_OPERAND (expr
, 0);
3889 else if (BINARY_CLASS_P (expr
))
3891 if (tree_invariant_p (TREE_OPERAND (expr
, 1)))
3892 expr
= TREE_OPERAND (expr
, 0);
3893 else if (tree_invariant_p (TREE_OPERAND (expr
, 0)))
3894 expr
= TREE_OPERAND (expr
, 1);
3905 /* Look inside EXPR into simple arithmetic operations involving constants.
3906 Return the outermost non-arithmetic or non-constant node. */
3909 skip_simple_constant_arithmetic (tree expr
)
3911 while (TREE_CODE (expr
) == NON_LVALUE_EXPR
)
3912 expr
= TREE_OPERAND (expr
, 0);
3916 if (UNARY_CLASS_P (expr
))
3917 expr
= TREE_OPERAND (expr
, 0);
3918 else if (BINARY_CLASS_P (expr
))
3920 if (TREE_CONSTANT (TREE_OPERAND (expr
, 1)))
3921 expr
= TREE_OPERAND (expr
, 0);
3922 else if (TREE_CONSTANT (TREE_OPERAND (expr
, 0)))
3923 expr
= TREE_OPERAND (expr
, 1);
3934 /* Return which tree structure is used by T. */
3936 enum tree_node_structure_enum
3937 tree_node_structure (const_tree t
)
3939 const enum tree_code code
= TREE_CODE (t
);
3940 return tree_node_structure_for_code (code
);
3943 /* Set various status flags when building a CALL_EXPR object T. */
3946 process_call_operands (tree t
)
3948 bool side_effects
= TREE_SIDE_EFFECTS (t
);
3949 bool read_only
= false;
3950 int i
= call_expr_flags (t
);
3952 /* Calls have side-effects, except those to const or pure functions. */
3953 if ((i
& ECF_LOOPING_CONST_OR_PURE
) || !(i
& (ECF_CONST
| ECF_PURE
)))
3954 side_effects
= true;
3955 /* Propagate TREE_READONLY of arguments for const functions. */
3959 if (!side_effects
|| read_only
)
3960 for (i
= 1; i
< TREE_OPERAND_LENGTH (t
); i
++)
3962 tree op
= TREE_OPERAND (t
, i
);
3963 if (op
&& TREE_SIDE_EFFECTS (op
))
3964 side_effects
= true;
3965 if (op
&& !TREE_READONLY (op
) && !CONSTANT_CLASS_P (op
))
3969 TREE_SIDE_EFFECTS (t
) = side_effects
;
3970 TREE_READONLY (t
) = read_only
;
3973 /* Return true if EXP contains a PLACEHOLDER_EXPR, i.e. if it represents a
3974 size or offset that depends on a field within a record. */
3977 contains_placeholder_p (const_tree exp
)
3979 enum tree_code code
;
3984 code
= TREE_CODE (exp
);
3985 if (code
== PLACEHOLDER_EXPR
)
3988 switch (TREE_CODE_CLASS (code
))
3991 /* Don't look at any PLACEHOLDER_EXPRs that might be in index or bit
3992 position computations since they will be converted into a
3993 WITH_RECORD_EXPR involving the reference, which will assume
3994 here will be valid. */
3995 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp
, 0));
3997 case tcc_exceptional
:
3998 if (code
== TREE_LIST
)
3999 return (CONTAINS_PLACEHOLDER_P (TREE_VALUE (exp
))
4000 || CONTAINS_PLACEHOLDER_P (TREE_CHAIN (exp
)));
4005 case tcc_comparison
:
4006 case tcc_expression
:
4010 /* Ignoring the first operand isn't quite right, but works best. */
4011 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp
, 1));
4014 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp
, 0))
4015 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp
, 1))
4016 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp
, 2)));
4019 /* The save_expr function never wraps anything containing
4020 a PLACEHOLDER_EXPR. */
4027 switch (TREE_CODE_LENGTH (code
))
4030 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp
, 0));
4032 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp
, 0))
4033 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp
, 1)));
4044 const_call_expr_arg_iterator iter
;
4045 FOR_EACH_CONST_CALL_EXPR_ARG (arg
, iter
, exp
)
4046 if (CONTAINS_PLACEHOLDER_P (arg
))
4060 /* Return true if any part of the structure of TYPE involves a PLACEHOLDER_EXPR
4061 directly. This includes size, bounds, qualifiers (for QUAL_UNION_TYPE) and
4065 type_contains_placeholder_1 (const_tree type
)
4067 /* If the size contains a placeholder or the parent type (component type in
4068 the case of arrays) type involves a placeholder, this type does. */
4069 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type
))
4070 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (type
))
4071 || (!POINTER_TYPE_P (type
)
4073 && type_contains_placeholder_p (TREE_TYPE (type
))))
4076 /* Now do type-specific checks. Note that the last part of the check above
4077 greatly limits what we have to do below. */
4078 switch (TREE_CODE (type
))
4087 case REFERENCE_TYPE
:
4096 case FIXED_POINT_TYPE
:
4097 /* Here we just check the bounds. */
4098 return (CONTAINS_PLACEHOLDER_P (TYPE_MIN_VALUE (type
))
4099 || CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type
)));
4102 /* We have already checked the component type above, so just check
4103 the domain type. Flexible array members have a null domain. */
4104 return TYPE_DOMAIN (type
) ?
4105 type_contains_placeholder_p (TYPE_DOMAIN (type
)) : false;
4109 case QUAL_UNION_TYPE
:
4113 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
4114 if (TREE_CODE (field
) == FIELD_DECL
4115 && (CONTAINS_PLACEHOLDER_P (DECL_FIELD_OFFSET (field
))
4116 || (TREE_CODE (type
) == QUAL_UNION_TYPE
4117 && CONTAINS_PLACEHOLDER_P (DECL_QUALIFIER (field
)))
4118 || type_contains_placeholder_p (TREE_TYPE (field
))))
4129 /* Wrapper around above function used to cache its result. */
4132 type_contains_placeholder_p (tree type
)
4136 /* If the contains_placeholder_bits field has been initialized,
4137 then we know the answer. */
4138 if (TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type
) > 0)
4139 return TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type
) - 1;
4141 /* Indicate that we've seen this type node, and the answer is false.
4142 This is what we want to return if we run into recursion via fields. */
4143 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type
) = 1;
4145 /* Compute the real value. */
4146 result
= type_contains_placeholder_1 (type
);
4148 /* Store the real value. */
4149 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type
) = result
+ 1;
4154 /* Push tree EXP onto vector QUEUE if it is not already present. */
4157 push_without_duplicates (tree exp
, vec
<tree
> *queue
)
4162 FOR_EACH_VEC_ELT (*queue
, i
, iter
)
4163 if (simple_cst_equal (iter
, exp
) == 1)
4167 queue
->safe_push (exp
);
4170 /* Given a tree EXP, find all occurrences of references to fields
4171 in a PLACEHOLDER_EXPR and place them in vector REFS without
4172 duplicates. Also record VAR_DECLs and CONST_DECLs. Note that
4173 we assume here that EXP contains only arithmetic expressions
4174 or CALL_EXPRs with PLACEHOLDER_EXPRs occurring only in their
4178 find_placeholder_in_expr (tree exp
, vec
<tree
> *refs
)
4180 enum tree_code code
= TREE_CODE (exp
);
4184 /* We handle TREE_LIST and COMPONENT_REF separately. */
4185 if (code
== TREE_LIST
)
4187 FIND_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp
), refs
);
4188 FIND_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp
), refs
);
4190 else if (code
== COMPONENT_REF
)
4192 for (inner
= TREE_OPERAND (exp
, 0);
4193 REFERENCE_CLASS_P (inner
);
4194 inner
= TREE_OPERAND (inner
, 0))
4197 if (TREE_CODE (inner
) == PLACEHOLDER_EXPR
)
4198 push_without_duplicates (exp
, refs
);
4200 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 0), refs
);
4203 switch (TREE_CODE_CLASS (code
))
4208 case tcc_declaration
:
4209 /* Variables allocated to static storage can stay. */
4210 if (!TREE_STATIC (exp
))
4211 push_without_duplicates (exp
, refs
);
4214 case tcc_expression
:
4215 /* This is the pattern built in ada/make_aligning_type. */
4216 if (code
== ADDR_EXPR
4217 && TREE_CODE (TREE_OPERAND (exp
, 0)) == PLACEHOLDER_EXPR
)
4219 push_without_duplicates (exp
, refs
);
4225 case tcc_exceptional
:
4228 case tcc_comparison
:
4230 for (i
= 0; i
< TREE_CODE_LENGTH (code
); i
++)
4231 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, i
), refs
);
4235 for (i
= 1; i
< TREE_OPERAND_LENGTH (exp
); i
++)
4236 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, i
), refs
);
4244 /* Given a tree EXP, a FIELD_DECL F, and a replacement value R,
4245 return a tree with all occurrences of references to F in a
4246 PLACEHOLDER_EXPR replaced by R. Also handle VAR_DECLs and
4247 CONST_DECLs. Note that we assume here that EXP contains only
4248 arithmetic expressions or CALL_EXPRs with PLACEHOLDER_EXPRs
4249 occurring only in their argument list. */
4252 substitute_in_expr (tree exp
, tree f
, tree r
)
4254 enum tree_code code
= TREE_CODE (exp
);
4255 tree op0
, op1
, op2
, op3
;
4258 /* We handle TREE_LIST and COMPONENT_REF separately. */
4259 if (code
== TREE_LIST
)
4261 op0
= SUBSTITUTE_IN_EXPR (TREE_CHAIN (exp
), f
, r
);
4262 op1
= SUBSTITUTE_IN_EXPR (TREE_VALUE (exp
), f
, r
);
4263 if (op0
== TREE_CHAIN (exp
) && op1
== TREE_VALUE (exp
))
4266 return tree_cons (TREE_PURPOSE (exp
), op1
, op0
);
4268 else if (code
== COMPONENT_REF
)
4272 /* If this expression is getting a value from a PLACEHOLDER_EXPR
4273 and it is the right field, replace it with R. */
4274 for (inner
= TREE_OPERAND (exp
, 0);
4275 REFERENCE_CLASS_P (inner
);
4276 inner
= TREE_OPERAND (inner
, 0))
4280 op1
= TREE_OPERAND (exp
, 1);
4282 if (TREE_CODE (inner
) == PLACEHOLDER_EXPR
&& op1
== f
)
4285 /* If this expression hasn't been completed let, leave it alone. */
4286 if (TREE_CODE (inner
) == PLACEHOLDER_EXPR
&& !TREE_TYPE (inner
))
4289 op0
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 0), f
, r
);
4290 if (op0
== TREE_OPERAND (exp
, 0))
4294 = fold_build3 (COMPONENT_REF
, TREE_TYPE (exp
), op0
, op1
, NULL_TREE
);
4297 switch (TREE_CODE_CLASS (code
))
4302 case tcc_declaration
:
4308 case tcc_expression
:
4314 case tcc_exceptional
:
4317 case tcc_comparison
:
4319 switch (TREE_CODE_LENGTH (code
))
4325 op0
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 0), f
, r
);
4326 if (op0
== TREE_OPERAND (exp
, 0))
4329 new_tree
= fold_build1 (code
, TREE_TYPE (exp
), op0
);
4333 op0
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 0), f
, r
);
4334 op1
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 1), f
, r
);
4336 if (op0
== TREE_OPERAND (exp
, 0) && op1
== TREE_OPERAND (exp
, 1))
4339 new_tree
= fold_build2 (code
, TREE_TYPE (exp
), op0
, op1
);
4343 op0
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 0), f
, r
);
4344 op1
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 1), f
, r
);
4345 op2
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 2), f
, r
);
4347 if (op0
== TREE_OPERAND (exp
, 0) && op1
== TREE_OPERAND (exp
, 1)
4348 && op2
== TREE_OPERAND (exp
, 2))
4351 new_tree
= fold_build3 (code
, TREE_TYPE (exp
), op0
, op1
, op2
);
4355 op0
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 0), f
, r
);
4356 op1
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 1), f
, r
);
4357 op2
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 2), f
, r
);
4358 op3
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 3), f
, r
);
4360 if (op0
== TREE_OPERAND (exp
, 0) && op1
== TREE_OPERAND (exp
, 1)
4361 && op2
== TREE_OPERAND (exp
, 2)
4362 && op3
== TREE_OPERAND (exp
, 3))
4366 = fold (build4 (code
, TREE_TYPE (exp
), op0
, op1
, op2
, op3
));
4378 new_tree
= NULL_TREE
;
4380 /* If we are trying to replace F with a constant or with another
4381 instance of one of the arguments of the call, inline back
4382 functions which do nothing else than computing a value from
4383 the arguments they are passed. This makes it possible to
4384 fold partially or entirely the replacement expression. */
4385 if (code
== CALL_EXPR
)
4387 bool maybe_inline
= false;
4388 if (CONSTANT_CLASS_P (r
))
4389 maybe_inline
= true;
4391 for (i
= 3; i
< TREE_OPERAND_LENGTH (exp
); i
++)
4392 if (operand_equal_p (TREE_OPERAND (exp
, i
), r
, 0))
4394 maybe_inline
= true;
4399 tree t
= maybe_inline_call_in_expr (exp
);
4401 return SUBSTITUTE_IN_EXPR (t
, f
, r
);
4405 for (i
= 1; i
< TREE_OPERAND_LENGTH (exp
); i
++)
4407 tree op
= TREE_OPERAND (exp
, i
);
4408 tree new_op
= SUBSTITUTE_IN_EXPR (op
, f
, r
);
4412 new_tree
= copy_node (exp
);
4413 TREE_OPERAND (new_tree
, i
) = new_op
;
4419 new_tree
= fold (new_tree
);
4420 if (TREE_CODE (new_tree
) == CALL_EXPR
)
4421 process_call_operands (new_tree
);
4432 TREE_READONLY (new_tree
) |= TREE_READONLY (exp
);
4434 if (code
== INDIRECT_REF
|| code
== ARRAY_REF
|| code
== ARRAY_RANGE_REF
)
4435 TREE_THIS_NOTRAP (new_tree
) |= TREE_THIS_NOTRAP (exp
);
4440 /* Similar, but look for a PLACEHOLDER_EXPR in EXP and find a replacement
4441 for it within OBJ, a tree that is an object or a chain of references. */
4444 substitute_placeholder_in_expr (tree exp
, tree obj
)
4446 enum tree_code code
= TREE_CODE (exp
);
4447 tree op0
, op1
, op2
, op3
;
4450 /* If this is a PLACEHOLDER_EXPR, see if we find a corresponding type
4451 in the chain of OBJ. */
4452 if (code
== PLACEHOLDER_EXPR
)
4454 tree need_type
= TYPE_MAIN_VARIANT (TREE_TYPE (exp
));
4457 for (elt
= obj
; elt
!= 0;
4458 elt
= ((TREE_CODE (elt
) == COMPOUND_EXPR
4459 || TREE_CODE (elt
) == COND_EXPR
)
4460 ? TREE_OPERAND (elt
, 1)
4461 : (REFERENCE_CLASS_P (elt
)
4462 || UNARY_CLASS_P (elt
)
4463 || BINARY_CLASS_P (elt
)
4464 || VL_EXP_CLASS_P (elt
)
4465 || EXPRESSION_CLASS_P (elt
))
4466 ? TREE_OPERAND (elt
, 0) : 0))
4467 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt
)) == need_type
)
4470 for (elt
= obj
; elt
!= 0;
4471 elt
= ((TREE_CODE (elt
) == COMPOUND_EXPR
4472 || TREE_CODE (elt
) == COND_EXPR
)
4473 ? TREE_OPERAND (elt
, 1)
4474 : (REFERENCE_CLASS_P (elt
)
4475 || UNARY_CLASS_P (elt
)
4476 || BINARY_CLASS_P (elt
)
4477 || VL_EXP_CLASS_P (elt
)
4478 || EXPRESSION_CLASS_P (elt
))
4479 ? TREE_OPERAND (elt
, 0) : 0))
4480 if (POINTER_TYPE_P (TREE_TYPE (elt
))
4481 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt
)))
4483 return fold_build1 (INDIRECT_REF
, need_type
, elt
);
4485 /* If we didn't find it, return the original PLACEHOLDER_EXPR. If it
4486 survives until RTL generation, there will be an error. */
4490 /* TREE_LIST is special because we need to look at TREE_VALUE
4491 and TREE_CHAIN, not TREE_OPERANDS. */
4492 else if (code
== TREE_LIST
)
4494 op0
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp
), obj
);
4495 op1
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp
), obj
);
4496 if (op0
== TREE_CHAIN (exp
) && op1
== TREE_VALUE (exp
))
4499 return tree_cons (TREE_PURPOSE (exp
), op1
, op0
);
4502 switch (TREE_CODE_CLASS (code
))
4505 case tcc_declaration
:
4508 case tcc_exceptional
:
4511 case tcc_comparison
:
4512 case tcc_expression
:
4515 switch (TREE_CODE_LENGTH (code
))
4521 op0
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 0), obj
);
4522 if (op0
== TREE_OPERAND (exp
, 0))
4525 new_tree
= fold_build1 (code
, TREE_TYPE (exp
), op0
);
4529 op0
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 0), obj
);
4530 op1
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 1), obj
);
4532 if (op0
== TREE_OPERAND (exp
, 0) && op1
== TREE_OPERAND (exp
, 1))
4535 new_tree
= fold_build2 (code
, TREE_TYPE (exp
), op0
, op1
);
4539 op0
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 0), obj
);
4540 op1
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 1), obj
);
4541 op2
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 2), obj
);
4543 if (op0
== TREE_OPERAND (exp
, 0) && op1
== TREE_OPERAND (exp
, 1)
4544 && op2
== TREE_OPERAND (exp
, 2))
4547 new_tree
= fold_build3 (code
, TREE_TYPE (exp
), op0
, op1
, op2
);
4551 op0
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 0), obj
);
4552 op1
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 1), obj
);
4553 op2
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 2), obj
);
4554 op3
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 3), obj
);
4556 if (op0
== TREE_OPERAND (exp
, 0) && op1
== TREE_OPERAND (exp
, 1)
4557 && op2
== TREE_OPERAND (exp
, 2)
4558 && op3
== TREE_OPERAND (exp
, 3))
4562 = fold (build4 (code
, TREE_TYPE (exp
), op0
, op1
, op2
, op3
));
4574 new_tree
= NULL_TREE
;
4576 for (i
= 1; i
< TREE_OPERAND_LENGTH (exp
); i
++)
4578 tree op
= TREE_OPERAND (exp
, i
);
4579 tree new_op
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (op
, obj
);
4583 new_tree
= copy_node (exp
);
4584 TREE_OPERAND (new_tree
, i
) = new_op
;
4590 new_tree
= fold (new_tree
);
4591 if (TREE_CODE (new_tree
) == CALL_EXPR
)
4592 process_call_operands (new_tree
);
4603 TREE_READONLY (new_tree
) |= TREE_READONLY (exp
);
4605 if (code
== INDIRECT_REF
|| code
== ARRAY_REF
|| code
== ARRAY_RANGE_REF
)
4606 TREE_THIS_NOTRAP (new_tree
) |= TREE_THIS_NOTRAP (exp
);
4612 /* Subroutine of stabilize_reference; this is called for subtrees of
4613 references. Any expression with side-effects must be put in a SAVE_EXPR
4614 to ensure that it is only evaluated once.
4616 We don't put SAVE_EXPR nodes around everything, because assigning very
4617 simple expressions to temporaries causes us to miss good opportunities
4618 for optimizations. Among other things, the opportunity to fold in the
4619 addition of a constant into an addressing mode often gets lost, e.g.
4620 "y[i+1] += x;". In general, we take the approach that we should not make
4621 an assignment unless we are forced into it - i.e., that any non-side effect
4622 operator should be allowed, and that cse should take care of coalescing
4623 multiple utterances of the same expression should that prove fruitful. */
4626 stabilize_reference_1 (tree e
)
4629 enum tree_code code
= TREE_CODE (e
);
4631 /* We cannot ignore const expressions because it might be a reference
4632 to a const array but whose index contains side-effects. But we can
4633 ignore things that are actual constant or that already have been
4634 handled by this function. */
4636 if (tree_invariant_p (e
))
4639 switch (TREE_CODE_CLASS (code
))
4641 case tcc_exceptional
:
4642 /* Always wrap STATEMENT_LIST into SAVE_EXPR, even if it doesn't
4643 have side-effects. */
4644 if (code
== STATEMENT_LIST
)
4645 return save_expr (e
);
4648 case tcc_declaration
:
4649 case tcc_comparison
:
4651 case tcc_expression
:
4654 /* If the expression has side-effects, then encase it in a SAVE_EXPR
4655 so that it will only be evaluated once. */
4656 /* The reference (r) and comparison (<) classes could be handled as
4657 below, but it is generally faster to only evaluate them once. */
4658 if (TREE_SIDE_EFFECTS (e
))
4659 return save_expr (e
);
4663 /* Constants need no processing. In fact, we should never reach
4668 /* Division is slow and tends to be compiled with jumps,
4669 especially the division by powers of 2 that is often
4670 found inside of an array reference. So do it just once. */
4671 if (code
== TRUNC_DIV_EXPR
|| code
== TRUNC_MOD_EXPR
4672 || code
== FLOOR_DIV_EXPR
|| code
== FLOOR_MOD_EXPR
4673 || code
== CEIL_DIV_EXPR
|| code
== CEIL_MOD_EXPR
4674 || code
== ROUND_DIV_EXPR
|| code
== ROUND_MOD_EXPR
)
4675 return save_expr (e
);
4676 /* Recursively stabilize each operand. */
4677 result
= build_nt (code
, stabilize_reference_1 (TREE_OPERAND (e
, 0)),
4678 stabilize_reference_1 (TREE_OPERAND (e
, 1)));
4682 /* Recursively stabilize each operand. */
4683 result
= build_nt (code
, stabilize_reference_1 (TREE_OPERAND (e
, 0)));
4690 TREE_TYPE (result
) = TREE_TYPE (e
);
4691 TREE_READONLY (result
) = TREE_READONLY (e
);
4692 TREE_SIDE_EFFECTS (result
) = TREE_SIDE_EFFECTS (e
);
4693 TREE_THIS_VOLATILE (result
) = TREE_THIS_VOLATILE (e
);
4698 /* Stabilize a reference so that we can use it any number of times
4699 without causing its operands to be evaluated more than once.
4700 Returns the stabilized reference. This works by means of save_expr,
4701 so see the caveats in the comments about save_expr.
4703 Also allows conversion expressions whose operands are references.
4704 Any other kind of expression is returned unchanged. */
4707 stabilize_reference (tree ref
)
4710 enum tree_code code
= TREE_CODE (ref
);
4717 /* No action is needed in this case. */
4722 case FIX_TRUNC_EXPR
:
4723 result
= build_nt (code
, stabilize_reference (TREE_OPERAND (ref
, 0)));
4727 result
= build_nt (INDIRECT_REF
,
4728 stabilize_reference_1 (TREE_OPERAND (ref
, 0)));
4732 result
= build_nt (COMPONENT_REF
,
4733 stabilize_reference (TREE_OPERAND (ref
, 0)),
4734 TREE_OPERAND (ref
, 1), NULL_TREE
);
4738 result
= build_nt (BIT_FIELD_REF
,
4739 stabilize_reference (TREE_OPERAND (ref
, 0)),
4740 TREE_OPERAND (ref
, 1), TREE_OPERAND (ref
, 2));
4741 REF_REVERSE_STORAGE_ORDER (result
) = REF_REVERSE_STORAGE_ORDER (ref
);
4745 result
= build_nt (ARRAY_REF
,
4746 stabilize_reference (TREE_OPERAND (ref
, 0)),
4747 stabilize_reference_1 (TREE_OPERAND (ref
, 1)),
4748 TREE_OPERAND (ref
, 2), TREE_OPERAND (ref
, 3));
4751 case ARRAY_RANGE_REF
:
4752 result
= build_nt (ARRAY_RANGE_REF
,
4753 stabilize_reference (TREE_OPERAND (ref
, 0)),
4754 stabilize_reference_1 (TREE_OPERAND (ref
, 1)),
4755 TREE_OPERAND (ref
, 2), TREE_OPERAND (ref
, 3));
4759 /* We cannot wrap the first expression in a SAVE_EXPR, as then
4760 it wouldn't be ignored. This matters when dealing with
4762 return stabilize_reference_1 (ref
);
4764 /* If arg isn't a kind of lvalue we recognize, make no change.
4765 Caller should recognize the error for an invalid lvalue. */
4770 return error_mark_node
;
4773 TREE_TYPE (result
) = TREE_TYPE (ref
);
4774 TREE_READONLY (result
) = TREE_READONLY (ref
);
4775 TREE_SIDE_EFFECTS (result
) = TREE_SIDE_EFFECTS (ref
);
4776 TREE_THIS_VOLATILE (result
) = TREE_THIS_VOLATILE (ref
);
4781 /* Low-level constructors for expressions. */
4783 /* A helper function for build1 and constant folders. Set TREE_CONSTANT,
4784 and TREE_SIDE_EFFECTS for an ADDR_EXPR. */
4787 recompute_tree_invariant_for_addr_expr (tree t
)
4790 bool tc
= true, se
= false;
4792 gcc_assert (TREE_CODE (t
) == ADDR_EXPR
);
4794 /* We started out assuming this address is both invariant and constant, but
4795 does not have side effects. Now go down any handled components and see if
4796 any of them involve offsets that are either non-constant or non-invariant.
4797 Also check for side-effects.
4799 ??? Note that this code makes no attempt to deal with the case where
4800 taking the address of something causes a copy due to misalignment. */
4802 #define UPDATE_FLAGS(NODE) \
4803 do { tree _node = (NODE); \
4804 if (_node && !TREE_CONSTANT (_node)) tc = false; \
4805 if (_node && TREE_SIDE_EFFECTS (_node)) se = true; } while (0)
4807 for (node
= TREE_OPERAND (t
, 0); handled_component_p (node
);
4808 node
= TREE_OPERAND (node
, 0))
4810 /* If the first operand doesn't have an ARRAY_TYPE, this is a bogus
4811 array reference (probably made temporarily by the G++ front end),
4812 so ignore all the operands. */
4813 if ((TREE_CODE (node
) == ARRAY_REF
4814 || TREE_CODE (node
) == ARRAY_RANGE_REF
)
4815 && TREE_CODE (TREE_TYPE (TREE_OPERAND (node
, 0))) == ARRAY_TYPE
)
4817 UPDATE_FLAGS (TREE_OPERAND (node
, 1));
4818 if (TREE_OPERAND (node
, 2))
4819 UPDATE_FLAGS (TREE_OPERAND (node
, 2));
4820 if (TREE_OPERAND (node
, 3))
4821 UPDATE_FLAGS (TREE_OPERAND (node
, 3));
4823 /* Likewise, just because this is a COMPONENT_REF doesn't mean we have a
4824 FIELD_DECL, apparently. The G++ front end can put something else
4825 there, at least temporarily. */
4826 else if (TREE_CODE (node
) == COMPONENT_REF
4827 && TREE_CODE (TREE_OPERAND (node
, 1)) == FIELD_DECL
)
4829 if (TREE_OPERAND (node
, 2))
4830 UPDATE_FLAGS (TREE_OPERAND (node
, 2));
4834 node
= lang_hooks
.expr_to_decl (node
, &tc
, &se
);
4836 /* Now see what's inside. If it's an INDIRECT_REF, copy our properties from
4837 the address, since &(*a)->b is a form of addition. If it's a constant, the
4838 address is constant too. If it's a decl, its address is constant if the
4839 decl is static. Everything else is not constant and, furthermore,
4840 taking the address of a volatile variable is not volatile. */
4841 if (TREE_CODE (node
) == INDIRECT_REF
4842 || TREE_CODE (node
) == MEM_REF
)
4843 UPDATE_FLAGS (TREE_OPERAND (node
, 0));
4844 else if (CONSTANT_CLASS_P (node
))
4846 else if (DECL_P (node
))
4847 tc
&= (staticp (node
) != NULL_TREE
);
4851 se
|= TREE_SIDE_EFFECTS (node
);
4855 TREE_CONSTANT (t
) = tc
;
4856 TREE_SIDE_EFFECTS (t
) = se
;
4860 /* Build an expression of code CODE, data type TYPE, and operands as
4861 specified. Expressions and reference nodes can be created this way.
4862 Constants, decls, types and misc nodes cannot be.
4864 We define 5 non-variadic functions, from 0 to 4 arguments. This is
4865 enough for all extant tree codes. */
4868 build0 (enum tree_code code
, tree tt MEM_STAT_DECL
)
4872 gcc_assert (TREE_CODE_LENGTH (code
) == 0);
4874 t
= make_node (code PASS_MEM_STAT
);
4881 build1 (enum tree_code code
, tree type
, tree node MEM_STAT_DECL
)
4883 int length
= sizeof (struct tree_exp
);
4886 record_node_allocation_statistics (code
, length
);
4888 gcc_assert (TREE_CODE_LENGTH (code
) == 1);
4890 t
= ggc_alloc_tree_node_stat (length PASS_MEM_STAT
);
4892 memset (t
, 0, sizeof (struct tree_common
));
4894 TREE_SET_CODE (t
, code
);
4896 TREE_TYPE (t
) = type
;
4897 SET_EXPR_LOCATION (t
, UNKNOWN_LOCATION
);
4898 TREE_OPERAND (t
, 0) = node
;
4899 if (node
&& !TYPE_P (node
))
4901 TREE_SIDE_EFFECTS (t
) = TREE_SIDE_EFFECTS (node
);
4902 TREE_READONLY (t
) = TREE_READONLY (node
);
4905 if (TREE_CODE_CLASS (code
) == tcc_statement
)
4907 if (code
!= DEBUG_BEGIN_STMT
)
4908 TREE_SIDE_EFFECTS (t
) = 1;
4913 /* All of these have side-effects, no matter what their
4915 TREE_SIDE_EFFECTS (t
) = 1;
4916 TREE_READONLY (t
) = 0;
4920 /* Whether a dereference is readonly has nothing to do with whether
4921 its operand is readonly. */
4922 TREE_READONLY (t
) = 0;
4927 recompute_tree_invariant_for_addr_expr (t
);
4931 if ((TREE_CODE_CLASS (code
) == tcc_unary
|| code
== VIEW_CONVERT_EXPR
)
4932 && node
&& !TYPE_P (node
)
4933 && TREE_CONSTANT (node
))
4934 TREE_CONSTANT (t
) = 1;
4935 if (TREE_CODE_CLASS (code
) == tcc_reference
4936 && node
&& TREE_THIS_VOLATILE (node
))
4937 TREE_THIS_VOLATILE (t
) = 1;
4944 #define PROCESS_ARG(N) \
4946 TREE_OPERAND (t, N) = arg##N; \
4947 if (arg##N &&!TYPE_P (arg##N)) \
4949 if (TREE_SIDE_EFFECTS (arg##N)) \
4951 if (!TREE_READONLY (arg##N) \
4952 && !CONSTANT_CLASS_P (arg##N)) \
4953 (void) (read_only = 0); \
4954 if (!TREE_CONSTANT (arg##N)) \
4955 (void) (constant = 0); \
4960 build2 (enum tree_code code
, tree tt
, tree arg0
, tree arg1 MEM_STAT_DECL
)
4962 bool constant
, read_only
, side_effects
, div_by_zero
;
4965 gcc_assert (TREE_CODE_LENGTH (code
) == 2);
4967 if ((code
== MINUS_EXPR
|| code
== PLUS_EXPR
|| code
== MULT_EXPR
)
4968 && arg0
&& arg1
&& tt
&& POINTER_TYPE_P (tt
)
4969 /* When sizetype precision doesn't match that of pointers
4970 we need to be able to build explicit extensions or truncations
4971 of the offset argument. */
4972 && TYPE_PRECISION (sizetype
) == TYPE_PRECISION (tt
))
4973 gcc_assert (TREE_CODE (arg0
) == INTEGER_CST
4974 && TREE_CODE (arg1
) == INTEGER_CST
);
4976 if (code
== POINTER_PLUS_EXPR
&& arg0
&& arg1
&& tt
)
4977 gcc_assert (POINTER_TYPE_P (tt
) && POINTER_TYPE_P (TREE_TYPE (arg0
))
4978 && ptrofftype_p (TREE_TYPE (arg1
)));
4980 t
= make_node (code PASS_MEM_STAT
);
4983 /* Below, we automatically set TREE_SIDE_EFFECTS and TREE_READONLY for the
4984 result based on those same flags for the arguments. But if the
4985 arguments aren't really even `tree' expressions, we shouldn't be trying
4988 /* Expressions without side effects may be constant if their
4989 arguments are as well. */
4990 constant
= (TREE_CODE_CLASS (code
) == tcc_comparison
4991 || TREE_CODE_CLASS (code
) == tcc_binary
);
4993 side_effects
= TREE_SIDE_EFFECTS (t
);
4997 case TRUNC_DIV_EXPR
:
4999 case FLOOR_DIV_EXPR
:
5000 case ROUND_DIV_EXPR
:
5001 case EXACT_DIV_EXPR
:
5003 case FLOOR_MOD_EXPR
:
5004 case ROUND_MOD_EXPR
:
5005 case TRUNC_MOD_EXPR
:
5006 div_by_zero
= integer_zerop (arg1
);
5009 div_by_zero
= false;
5015 TREE_SIDE_EFFECTS (t
) = side_effects
;
5016 if (code
== MEM_REF
)
5018 if (arg0
&& TREE_CODE (arg0
) == ADDR_EXPR
)
5020 tree o
= TREE_OPERAND (arg0
, 0);
5021 TREE_READONLY (t
) = TREE_READONLY (o
);
5022 TREE_THIS_VOLATILE (t
) = TREE_THIS_VOLATILE (o
);
5027 TREE_READONLY (t
) = read_only
;
5028 /* Don't mark X / 0 as constant. */
5029 TREE_CONSTANT (t
) = constant
&& !div_by_zero
;
5030 TREE_THIS_VOLATILE (t
)
5031 = (TREE_CODE_CLASS (code
) == tcc_reference
5032 && arg0
&& TREE_THIS_VOLATILE (arg0
));
5040 build3 (enum tree_code code
, tree tt
, tree arg0
, tree arg1
,
5041 tree arg2 MEM_STAT_DECL
)
5043 bool constant
, read_only
, side_effects
;
5046 gcc_assert (TREE_CODE_LENGTH (code
) == 3);
5047 gcc_assert (TREE_CODE_CLASS (code
) != tcc_vl_exp
);
5049 t
= make_node (code PASS_MEM_STAT
);
5054 /* As a special exception, if COND_EXPR has NULL branches, we
5055 assume that it is a gimple statement and always consider
5056 it to have side effects. */
5057 if (code
== COND_EXPR
5058 && tt
== void_type_node
5059 && arg1
== NULL_TREE
5060 && arg2
== NULL_TREE
)
5061 side_effects
= true;
5063 side_effects
= TREE_SIDE_EFFECTS (t
);
5069 if (code
== COND_EXPR
)
5070 TREE_READONLY (t
) = read_only
;
5072 TREE_SIDE_EFFECTS (t
) = side_effects
;
5073 TREE_THIS_VOLATILE (t
)
5074 = (TREE_CODE_CLASS (code
) == tcc_reference
5075 && arg0
&& TREE_THIS_VOLATILE (arg0
));
5081 build4 (enum tree_code code
, tree tt
, tree arg0
, tree arg1
,
5082 tree arg2
, tree arg3 MEM_STAT_DECL
)
5084 bool constant
, read_only
, side_effects
;
5087 gcc_assert (TREE_CODE_LENGTH (code
) == 4);
5089 t
= make_node (code PASS_MEM_STAT
);
5092 side_effects
= TREE_SIDE_EFFECTS (t
);
5099 TREE_SIDE_EFFECTS (t
) = side_effects
;
5100 TREE_THIS_VOLATILE (t
)
5101 = (TREE_CODE_CLASS (code
) == tcc_reference
5102 && arg0
&& TREE_THIS_VOLATILE (arg0
));
5108 build5 (enum tree_code code
, tree tt
, tree arg0
, tree arg1
,
5109 tree arg2
, tree arg3
, tree arg4 MEM_STAT_DECL
)
5111 bool constant
, read_only
, side_effects
;
5114 gcc_assert (TREE_CODE_LENGTH (code
) == 5);
5116 t
= make_node (code PASS_MEM_STAT
);
5119 side_effects
= TREE_SIDE_EFFECTS (t
);
5127 TREE_SIDE_EFFECTS (t
) = side_effects
;
5128 if (code
== TARGET_MEM_REF
)
5130 if (arg0
&& TREE_CODE (arg0
) == ADDR_EXPR
)
5132 tree o
= TREE_OPERAND (arg0
, 0);
5133 TREE_READONLY (t
) = TREE_READONLY (o
);
5134 TREE_THIS_VOLATILE (t
) = TREE_THIS_VOLATILE (o
);
5138 TREE_THIS_VOLATILE (t
)
5139 = (TREE_CODE_CLASS (code
) == tcc_reference
5140 && arg0
&& TREE_THIS_VOLATILE (arg0
));
5145 /* Build a simple MEM_REF tree with the sematics of a plain INDIRECT_REF
5146 on the pointer PTR. */
5149 build_simple_mem_ref_loc (location_t loc
, tree ptr
)
5151 poly_int64 offset
= 0;
5152 tree ptype
= TREE_TYPE (ptr
);
5154 /* For convenience allow addresses that collapse to a simple base
5156 if (TREE_CODE (ptr
) == ADDR_EXPR
5157 && (handled_component_p (TREE_OPERAND (ptr
, 0))
5158 || TREE_CODE (TREE_OPERAND (ptr
, 0)) == MEM_REF
))
5160 ptr
= get_addr_base_and_unit_offset (TREE_OPERAND (ptr
, 0), &offset
);
5162 if (TREE_CODE (ptr
) == MEM_REF
)
5164 offset
+= mem_ref_offset (ptr
).force_shwi ();
5165 ptr
= TREE_OPERAND (ptr
, 0);
5168 ptr
= build_fold_addr_expr (ptr
);
5169 gcc_assert (is_gimple_reg (ptr
) || is_gimple_min_invariant (ptr
));
5171 tem
= build2 (MEM_REF
, TREE_TYPE (ptype
),
5172 ptr
, build_int_cst (ptype
, offset
));
5173 SET_EXPR_LOCATION (tem
, loc
);
5177 /* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T. */
5180 mem_ref_offset (const_tree t
)
5182 return poly_offset_int::from (wi::to_poly_wide (TREE_OPERAND (t
, 1)),
5186 /* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE
5187 offsetted by OFFSET units. */
5190 build_invariant_address (tree type
, tree base
, poly_int64 offset
)
5192 tree ref
= fold_build2 (MEM_REF
, TREE_TYPE (type
),
5193 build_fold_addr_expr (base
),
5194 build_int_cst (ptr_type_node
, offset
));
5195 tree addr
= build1 (ADDR_EXPR
, type
, ref
);
5196 recompute_tree_invariant_for_addr_expr (addr
);
5200 /* Similar except don't specify the TREE_TYPE
5201 and leave the TREE_SIDE_EFFECTS as 0.
5202 It is permissible for arguments to be null,
5203 or even garbage if their values do not matter. */
5206 build_nt (enum tree_code code
, ...)
5213 gcc_assert (TREE_CODE_CLASS (code
) != tcc_vl_exp
);
5217 t
= make_node (code
);
5218 length
= TREE_CODE_LENGTH (code
);
5220 for (i
= 0; i
< length
; i
++)
5221 TREE_OPERAND (t
, i
) = va_arg (p
, tree
);
5227 /* Similar to build_nt, but for creating a CALL_EXPR object with a
5231 build_nt_call_vec (tree fn
, vec
<tree
, va_gc
> *args
)
5236 ret
= build_vl_exp (CALL_EXPR
, vec_safe_length (args
) + 3);
5237 CALL_EXPR_FN (ret
) = fn
;
5238 CALL_EXPR_STATIC_CHAIN (ret
) = NULL_TREE
;
5239 FOR_EACH_VEC_SAFE_ELT (args
, ix
, t
)
5240 CALL_EXPR_ARG (ret
, ix
) = t
;
5244 /* Create a DECL_... node of code CODE, name NAME (if non-null)
5246 We do NOT enter this node in any sort of symbol table.
5248 LOC is the location of the decl.
5250 layout_decl is used to set up the decl's storage layout.
5251 Other slots are initialized to 0 or null pointers. */
5254 build_decl (location_t loc
, enum tree_code code
, tree name
,
5255 tree type MEM_STAT_DECL
)
5259 t
= make_node (code PASS_MEM_STAT
);
5260 DECL_SOURCE_LOCATION (t
) = loc
;
5262 /* if (type == error_mark_node)
5263 type = integer_type_node; */
5264 /* That is not done, deliberately, so that having error_mark_node
5265 as the type can suppress useless errors in the use of this variable. */
5267 DECL_NAME (t
) = name
;
5268 TREE_TYPE (t
) = type
;
5270 if (code
== VAR_DECL
|| code
== PARM_DECL
|| code
== RESULT_DECL
)
5276 /* Builds and returns function declaration with NAME and TYPE. */
5279 build_fn_decl (const char *name
, tree type
)
5281 tree id
= get_identifier (name
);
5282 tree decl
= build_decl (input_location
, FUNCTION_DECL
, id
, type
);
5284 DECL_EXTERNAL (decl
) = 1;
5285 TREE_PUBLIC (decl
) = 1;
5286 DECL_ARTIFICIAL (decl
) = 1;
5287 TREE_NOTHROW (decl
) = 1;
5292 vec
<tree
, va_gc
> *all_translation_units
;
5294 /* Builds a new translation-unit decl with name NAME, queues it in the
5295 global list of translation-unit decls and returns it. */
5298 build_translation_unit_decl (tree name
)
5300 tree tu
= build_decl (UNKNOWN_LOCATION
, TRANSLATION_UNIT_DECL
,
5302 TRANSLATION_UNIT_LANGUAGE (tu
) = lang_hooks
.name
;
5303 vec_safe_push (all_translation_units
, tu
);
5308 /* BLOCK nodes are used to represent the structure of binding contours
5309 and declarations, once those contours have been exited and their contents
5310 compiled. This information is used for outputting debugging info. */
5313 build_block (tree vars
, tree subblocks
, tree supercontext
, tree chain
)
5315 tree block
= make_node (BLOCK
);
5317 BLOCK_VARS (block
) = vars
;
5318 BLOCK_SUBBLOCKS (block
) = subblocks
;
5319 BLOCK_SUPERCONTEXT (block
) = supercontext
;
5320 BLOCK_CHAIN (block
) = chain
;
5325 /* Like SET_EXPR_LOCATION, but make sure the tree can have a location.
5327 LOC is the location to use in tree T. */
5330 protected_set_expr_location (tree t
, location_t loc
)
5332 if (CAN_HAVE_LOCATION_P (t
))
5333 SET_EXPR_LOCATION (t
, loc
);
5334 else if (t
&& TREE_CODE (t
) == STATEMENT_LIST
)
5336 t
= expr_single (t
);
5337 if (t
&& CAN_HAVE_LOCATION_P (t
))
5338 SET_EXPR_LOCATION (t
, loc
);
5342 /* Like PROTECTED_SET_EXPR_LOCATION, but only do that if T has
5343 UNKNOWN_LOCATION. */
5346 protected_set_expr_location_if_unset (tree t
, location_t loc
)
5348 t
= expr_single (t
);
5349 if (t
&& !EXPR_HAS_LOCATION (t
))
5350 protected_set_expr_location (t
, loc
);
5353 /* Set the type qualifiers for TYPE to TYPE_QUALS, which is a bitmask
5354 of the various TYPE_QUAL values. */
5357 set_type_quals (tree type
, int type_quals
)
5359 TYPE_READONLY (type
) = (type_quals
& TYPE_QUAL_CONST
) != 0;
5360 TYPE_VOLATILE (type
) = (type_quals
& TYPE_QUAL_VOLATILE
) != 0;
5361 TYPE_RESTRICT (type
) = (type_quals
& TYPE_QUAL_RESTRICT
) != 0;
5362 TYPE_ATOMIC (type
) = (type_quals
& TYPE_QUAL_ATOMIC
) != 0;
5363 TYPE_ADDR_SPACE (type
) = DECODE_QUAL_ADDR_SPACE (type_quals
);
5366 /* Returns true iff CAND and BASE have equivalent language-specific
5370 check_lang_type (const_tree cand
, const_tree base
)
5372 if (lang_hooks
.types
.type_hash_eq
== NULL
)
5374 /* type_hash_eq currently only applies to these types. */
5375 if (TREE_CODE (cand
) != FUNCTION_TYPE
5376 && TREE_CODE (cand
) != METHOD_TYPE
)
5378 return lang_hooks
.types
.type_hash_eq (cand
, base
);
5381 /* This function checks to see if TYPE matches the size one of the built-in
5382 atomic types, and returns that core atomic type. */
5385 find_atomic_core_type (const_tree type
)
5387 tree base_atomic_type
;
5389 /* Only handle complete types. */
5390 if (!tree_fits_uhwi_p (TYPE_SIZE (type
)))
5393 switch (tree_to_uhwi (TYPE_SIZE (type
)))
5396 base_atomic_type
= atomicQI_type_node
;
5400 base_atomic_type
= atomicHI_type_node
;
5404 base_atomic_type
= atomicSI_type_node
;
5408 base_atomic_type
= atomicDI_type_node
;
5412 base_atomic_type
= atomicTI_type_node
;
5416 base_atomic_type
= NULL_TREE
;
5419 return base_atomic_type
;
5422 /* Returns true iff unqualified CAND and BASE are equivalent. */
5425 check_base_type (const_tree cand
, const_tree base
)
5427 if (TYPE_NAME (cand
) != TYPE_NAME (base
)
5428 /* Apparently this is needed for Objective-C. */
5429 || TYPE_CONTEXT (cand
) != TYPE_CONTEXT (base
)
5430 || !attribute_list_equal (TYPE_ATTRIBUTES (cand
),
5431 TYPE_ATTRIBUTES (base
)))
5433 /* Check alignment. */
5434 if (TYPE_ALIGN (cand
) == TYPE_ALIGN (base
)
5435 && TYPE_USER_ALIGN (cand
) == TYPE_USER_ALIGN (base
))
5437 /* Atomic types increase minimal alignment. We must to do so as well
5438 or we get duplicated canonical types. See PR88686. */
5439 if ((TYPE_QUALS (cand
) & TYPE_QUAL_ATOMIC
))
5441 /* See if this object can map to a basic atomic type. */
5442 tree atomic_type
= find_atomic_core_type (cand
);
5443 if (atomic_type
&& TYPE_ALIGN (atomic_type
) == TYPE_ALIGN (cand
))
5449 /* Returns true iff CAND is equivalent to BASE with TYPE_QUALS. */
5452 check_qualified_type (const_tree cand
, const_tree base
, int type_quals
)
5454 return (TYPE_QUALS (cand
) == type_quals
5455 && check_base_type (cand
, base
)
5456 && check_lang_type (cand
, base
));
5459 /* Returns true iff CAND is equivalent to BASE with ALIGN. */
5462 check_aligned_type (const_tree cand
, const_tree base
, unsigned int align
)
5464 return (TYPE_QUALS (cand
) == TYPE_QUALS (base
)
5465 && TYPE_NAME (cand
) == TYPE_NAME (base
)
5466 /* Apparently this is needed for Objective-C. */
5467 && TYPE_CONTEXT (cand
) == TYPE_CONTEXT (base
)
5468 /* Check alignment. */
5469 && TYPE_ALIGN (cand
) == align
5470 /* Check this is a user-aligned type as build_aligned_type
5472 && TYPE_USER_ALIGN (cand
)
5473 && attribute_list_equal (TYPE_ATTRIBUTES (cand
),
5474 TYPE_ATTRIBUTES (base
))
5475 && check_lang_type (cand
, base
));
5478 /* Return a version of the TYPE, qualified as indicated by the
5479 TYPE_QUALS, if one exists. If no qualified version exists yet,
5480 return NULL_TREE. */
5483 get_qualified_type (tree type
, int type_quals
)
5485 if (TYPE_QUALS (type
) == type_quals
)
5488 tree mv
= TYPE_MAIN_VARIANT (type
);
5489 if (check_qualified_type (mv
, type
, type_quals
))
5492 /* Search the chain of variants to see if there is already one there just
5493 like the one we need to have. If so, use that existing one. We must
5494 preserve the TYPE_NAME, since there is code that depends on this. */
5495 for (tree
*tp
= &TYPE_NEXT_VARIANT (mv
); *tp
; tp
= &TYPE_NEXT_VARIANT (*tp
))
5496 if (check_qualified_type (*tp
, type
, type_quals
))
5498 /* Put the found variant at the head of the variant list so
5499 frequently searched variants get found faster. The C++ FE
5500 benefits greatly from this. */
5502 *tp
= TYPE_NEXT_VARIANT (t
);
5503 TYPE_NEXT_VARIANT (t
) = TYPE_NEXT_VARIANT (mv
);
5504 TYPE_NEXT_VARIANT (mv
) = t
;
5511 /* Like get_qualified_type, but creates the type if it does not
5512 exist. This function never returns NULL_TREE. */
5515 build_qualified_type (tree type
, int type_quals MEM_STAT_DECL
)
5519 /* See if we already have the appropriate qualified variant. */
5520 t
= get_qualified_type (type
, type_quals
);
5522 /* If not, build it. */
5525 t
= build_variant_type_copy (type PASS_MEM_STAT
);
5526 set_type_quals (t
, type_quals
);
5528 if (((type_quals
& TYPE_QUAL_ATOMIC
) == TYPE_QUAL_ATOMIC
))
5530 /* See if this object can map to a basic atomic type. */
5531 tree atomic_type
= find_atomic_core_type (type
);
5534 /* Ensure the alignment of this type is compatible with
5535 the required alignment of the atomic type. */
5536 if (TYPE_ALIGN (atomic_type
) > TYPE_ALIGN (t
))
5537 SET_TYPE_ALIGN (t
, TYPE_ALIGN (atomic_type
));
5541 if (TYPE_STRUCTURAL_EQUALITY_P (type
))
5542 /* Propagate structural equality. */
5543 SET_TYPE_STRUCTURAL_EQUALITY (t
);
5544 else if (TYPE_CANONICAL (type
) != type
)
5545 /* Build the underlying canonical type, since it is different
5548 tree c
= build_qualified_type (TYPE_CANONICAL (type
), type_quals
);
5549 TYPE_CANONICAL (t
) = TYPE_CANONICAL (c
);
5552 /* T is its own canonical type. */
5553 TYPE_CANONICAL (t
) = t
;
5560 /* Create a variant of type T with alignment ALIGN. */
5563 build_aligned_type (tree type
, unsigned int align
)
5567 if (TYPE_PACKED (type
)
5568 || TYPE_ALIGN (type
) == align
)
5571 for (t
= TYPE_MAIN_VARIANT (type
); t
; t
= TYPE_NEXT_VARIANT (t
))
5572 if (check_aligned_type (t
, type
, align
))
5575 t
= build_variant_type_copy (type
);
5576 SET_TYPE_ALIGN (t
, align
);
5577 TYPE_USER_ALIGN (t
) = 1;
5582 /* Create a new distinct copy of TYPE. The new type is made its own
5583 MAIN_VARIANT. If TYPE requires structural equality checks, the
5584 resulting type requires structural equality checks; otherwise, its
5585 TYPE_CANONICAL points to itself. */
5588 build_distinct_type_copy (tree type MEM_STAT_DECL
)
5590 tree t
= copy_node (type PASS_MEM_STAT
);
5592 TYPE_POINTER_TO (t
) = 0;
5593 TYPE_REFERENCE_TO (t
) = 0;
5595 /* Set the canonical type either to a new equivalence class, or
5596 propagate the need for structural equality checks. */
5597 if (TYPE_STRUCTURAL_EQUALITY_P (type
))
5598 SET_TYPE_STRUCTURAL_EQUALITY (t
);
5600 TYPE_CANONICAL (t
) = t
;
5602 /* Make it its own variant. */
5603 TYPE_MAIN_VARIANT (t
) = t
;
5604 TYPE_NEXT_VARIANT (t
) = 0;
5606 /* Note that it is now possible for TYPE_MIN_VALUE to be a value
5607 whose TREE_TYPE is not t. This can also happen in the Ada
5608 frontend when using subtypes. */
5613 /* Create a new variant of TYPE, equivalent but distinct. This is so
5614 the caller can modify it. TYPE_CANONICAL for the return type will
5615 be equivalent to TYPE_CANONICAL of TYPE, indicating that the types
5616 are considered equal by the language itself (or that both types
5617 require structural equality checks). */
5620 build_variant_type_copy (tree type MEM_STAT_DECL
)
5622 tree t
, m
= TYPE_MAIN_VARIANT (type
);
5624 t
= build_distinct_type_copy (type PASS_MEM_STAT
);
5626 /* Since we're building a variant, assume that it is a non-semantic
5627 variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */
5628 TYPE_CANONICAL (t
) = TYPE_CANONICAL (type
);
5629 /* Type variants have no alias set defined. */
5630 TYPE_ALIAS_SET (t
) = -1;
5632 /* Add the new type to the chain of variants of TYPE. */
5633 TYPE_NEXT_VARIANT (t
) = TYPE_NEXT_VARIANT (m
);
5634 TYPE_NEXT_VARIANT (m
) = t
;
5635 TYPE_MAIN_VARIANT (t
) = m
;
5640 /* Return true if the from tree in both tree maps are equal. */
5643 tree_map_base_eq (const void *va
, const void *vb
)
5645 const struct tree_map_base
*const a
= (const struct tree_map_base
*) va
,
5646 *const b
= (const struct tree_map_base
*) vb
;
5647 return (a
->from
== b
->from
);
5650 /* Hash a from tree in a tree_base_map. */
5653 tree_map_base_hash (const void *item
)
5655 return htab_hash_pointer (((const struct tree_map_base
*)item
)->from
);
5658 /* Return true if this tree map structure is marked for garbage collection
5659 purposes. We simply return true if the from tree is marked, so that this
5660 structure goes away when the from tree goes away. */
5663 tree_map_base_marked_p (const void *p
)
5665 return ggc_marked_p (((const struct tree_map_base
*) p
)->from
);
5668 /* Hash a from tree in a tree_map. */
5671 tree_map_hash (const void *item
)
5673 return (((const struct tree_map
*) item
)->hash
);
5676 /* Hash a from tree in a tree_decl_map. */
5679 tree_decl_map_hash (const void *item
)
5681 return DECL_UID (((const struct tree_decl_map
*) item
)->base
.from
);
5684 /* Return the initialization priority for DECL. */
5687 decl_init_priority_lookup (tree decl
)
5689 symtab_node
*snode
= symtab_node::get (decl
);
5692 return DEFAULT_INIT_PRIORITY
;
5694 snode
->get_init_priority ();
5697 /* Return the finalization priority for DECL. */
5700 decl_fini_priority_lookup (tree decl
)
5702 cgraph_node
*node
= cgraph_node::get (decl
);
5705 return DEFAULT_INIT_PRIORITY
;
5707 node
->get_fini_priority ();
5710 /* Set the initialization priority for DECL to PRIORITY. */
5713 decl_init_priority_insert (tree decl
, priority_type priority
)
5715 struct symtab_node
*snode
;
5717 if (priority
== DEFAULT_INIT_PRIORITY
)
5719 snode
= symtab_node::get (decl
);
5723 else if (VAR_P (decl
))
5724 snode
= varpool_node::get_create (decl
);
5726 snode
= cgraph_node::get_create (decl
);
5727 snode
->set_init_priority (priority
);
5730 /* Set the finalization priority for DECL to PRIORITY. */
5733 decl_fini_priority_insert (tree decl
, priority_type priority
)
5735 struct cgraph_node
*node
;
5737 if (priority
== DEFAULT_INIT_PRIORITY
)
5739 node
= cgraph_node::get (decl
);
5744 node
= cgraph_node::get_create (decl
);
5745 node
->set_fini_priority (priority
);
5748 /* Print out the statistics for the DECL_DEBUG_EXPR hash table. */
5751 print_debug_expr_statistics (void)
5753 fprintf (stderr
, "DECL_DEBUG_EXPR hash: size %ld, %ld elements, %f collisions\n",
5754 (long) debug_expr_for_decl
->size (),
5755 (long) debug_expr_for_decl
->elements (),
5756 debug_expr_for_decl
->collisions ());
5759 /* Print out the statistics for the DECL_VALUE_EXPR hash table. */
5762 print_value_expr_statistics (void)
5764 fprintf (stderr
, "DECL_VALUE_EXPR hash: size %ld, %ld elements, %f collisions\n",
5765 (long) value_expr_for_decl
->size (),
5766 (long) value_expr_for_decl
->elements (),
5767 value_expr_for_decl
->collisions ());
5770 /* Lookup a debug expression for FROM, and return it if we find one. */
5773 decl_debug_expr_lookup (tree from
)
5775 struct tree_decl_map
*h
, in
;
5776 in
.base
.from
= from
;
5778 h
= debug_expr_for_decl
->find_with_hash (&in
, DECL_UID (from
));
5784 /* Insert a mapping FROM->TO in the debug expression hashtable. */
5787 decl_debug_expr_insert (tree from
, tree to
)
5789 struct tree_decl_map
*h
;
5791 h
= ggc_alloc
<tree_decl_map
> ();
5792 h
->base
.from
= from
;
5794 *debug_expr_for_decl
->find_slot_with_hash (h
, DECL_UID (from
), INSERT
) = h
;
5797 /* Lookup a value expression for FROM, and return it if we find one. */
5800 decl_value_expr_lookup (tree from
)
5802 struct tree_decl_map
*h
, in
;
5803 in
.base
.from
= from
;
5805 h
= value_expr_for_decl
->find_with_hash (&in
, DECL_UID (from
));
5811 /* Insert a mapping FROM->TO in the value expression hashtable. */
5814 decl_value_expr_insert (tree from
, tree to
)
5816 struct tree_decl_map
*h
;
5818 h
= ggc_alloc
<tree_decl_map
> ();
5819 h
->base
.from
= from
;
5821 *value_expr_for_decl
->find_slot_with_hash (h
, DECL_UID (from
), INSERT
) = h
;
5824 /* Lookup a vector of debug arguments for FROM, and return it if we
5828 decl_debug_args_lookup (tree from
)
5830 struct tree_vec_map
*h
, in
;
5832 if (!DECL_HAS_DEBUG_ARGS_P (from
))
5834 gcc_checking_assert (debug_args_for_decl
!= NULL
);
5835 in
.base
.from
= from
;
5836 h
= debug_args_for_decl
->find_with_hash (&in
, DECL_UID (from
));
5842 /* Insert a mapping FROM->empty vector of debug arguments in the value
5843 expression hashtable. */
5846 decl_debug_args_insert (tree from
)
5848 struct tree_vec_map
*h
;
5851 if (DECL_HAS_DEBUG_ARGS_P (from
))
5852 return decl_debug_args_lookup (from
);
5853 if (debug_args_for_decl
== NULL
)
5854 debug_args_for_decl
= hash_table
<tree_vec_map_cache_hasher
>::create_ggc (64);
5855 h
= ggc_alloc
<tree_vec_map
> ();
5856 h
->base
.from
= from
;
5858 loc
= debug_args_for_decl
->find_slot_with_hash (h
, DECL_UID (from
), INSERT
);
5860 DECL_HAS_DEBUG_ARGS_P (from
) = 1;
5864 /* Hashing of types so that we don't make duplicates.
5865 The entry point is `type_hash_canon'. */
5867 /* Generate the default hash code for TYPE. This is designed for
5868 speed, rather than maximum entropy. */
5871 type_hash_canon_hash (tree type
)
5873 inchash::hash hstate
;
5875 hstate
.add_int (TREE_CODE (type
));
5877 if (TREE_TYPE (type
))
5878 hstate
.add_object (TYPE_HASH (TREE_TYPE (type
)));
5880 for (tree t
= TYPE_ATTRIBUTES (type
); t
; t
= TREE_CHAIN (t
))
5881 /* Just the identifier is adequate to distinguish. */
5882 hstate
.add_object (IDENTIFIER_HASH_VALUE (get_attribute_name (t
)));
5884 switch (TREE_CODE (type
))
5887 hstate
.add_object (TYPE_HASH (TYPE_METHOD_BASETYPE (type
)));
5890 for (tree t
= TYPE_ARG_TYPES (type
); t
; t
= TREE_CHAIN (t
))
5891 if (TREE_VALUE (t
) != error_mark_node
)
5892 hstate
.add_object (TYPE_HASH (TREE_VALUE (t
)));
5896 hstate
.add_object (TYPE_HASH (TYPE_OFFSET_BASETYPE (type
)));
5901 if (TYPE_DOMAIN (type
))
5902 hstate
.add_object (TYPE_HASH (TYPE_DOMAIN (type
)));
5903 if (!AGGREGATE_TYPE_P (TREE_TYPE (type
)))
5905 unsigned typeless
= TYPE_TYPELESS_STORAGE (type
);
5906 hstate
.add_object (typeless
);
5913 tree t
= TYPE_MAX_VALUE (type
);
5915 t
= TYPE_MIN_VALUE (type
);
5916 for (int i
= 0; i
< TREE_INT_CST_NUNITS (t
); i
++)
5917 hstate
.add_object (TREE_INT_CST_ELT (t
, i
));
5922 case FIXED_POINT_TYPE
:
5924 unsigned prec
= TYPE_PRECISION (type
);
5925 hstate
.add_object (prec
);
5930 hstate
.add_poly_int (TYPE_VECTOR_SUBPARTS (type
));
5937 return hstate
.end ();
5940 /* These are the Hashtable callback functions. */
5942 /* Returns true iff the types are equivalent. */
5945 type_cache_hasher::equal (type_hash
*a
, type_hash
*b
)
5947 /* First test the things that are the same for all types. */
5948 if (a
->hash
!= b
->hash
5949 || TREE_CODE (a
->type
) != TREE_CODE (b
->type
)
5950 || TREE_TYPE (a
->type
) != TREE_TYPE (b
->type
)
5951 || !attribute_list_equal (TYPE_ATTRIBUTES (a
->type
),
5952 TYPE_ATTRIBUTES (b
->type
))
5953 || (TREE_CODE (a
->type
) != COMPLEX_TYPE
5954 && TYPE_NAME (a
->type
) != TYPE_NAME (b
->type
)))
5957 /* Be careful about comparing arrays before and after the element type
5958 has been completed; don't compare TYPE_ALIGN unless both types are
5960 if (COMPLETE_TYPE_P (a
->type
) && COMPLETE_TYPE_P (b
->type
)
5961 && (TYPE_ALIGN (a
->type
) != TYPE_ALIGN (b
->type
)
5962 || TYPE_MODE (a
->type
) != TYPE_MODE (b
->type
)))
5965 switch (TREE_CODE (a
->type
))
5971 case REFERENCE_TYPE
:
5976 return known_eq (TYPE_VECTOR_SUBPARTS (a
->type
),
5977 TYPE_VECTOR_SUBPARTS (b
->type
));
5980 if (TYPE_VALUES (a
->type
) != TYPE_VALUES (b
->type
)
5981 && !(TYPE_VALUES (a
->type
)
5982 && TREE_CODE (TYPE_VALUES (a
->type
)) == TREE_LIST
5983 && TYPE_VALUES (b
->type
)
5984 && TREE_CODE (TYPE_VALUES (b
->type
)) == TREE_LIST
5985 && type_list_equal (TYPE_VALUES (a
->type
),
5986 TYPE_VALUES (b
->type
))))
5994 if (TYPE_PRECISION (a
->type
) != TYPE_PRECISION (b
->type
))
5996 return ((TYPE_MAX_VALUE (a
->type
) == TYPE_MAX_VALUE (b
->type
)
5997 || tree_int_cst_equal (TYPE_MAX_VALUE (a
->type
),
5998 TYPE_MAX_VALUE (b
->type
)))
5999 && (TYPE_MIN_VALUE (a
->type
) == TYPE_MIN_VALUE (b
->type
)
6000 || tree_int_cst_equal (TYPE_MIN_VALUE (a
->type
),
6001 TYPE_MIN_VALUE (b
->type
))));
6003 case FIXED_POINT_TYPE
:
6004 return TYPE_SATURATING (a
->type
) == TYPE_SATURATING (b
->type
);
6007 return TYPE_OFFSET_BASETYPE (a
->type
) == TYPE_OFFSET_BASETYPE (b
->type
);
6010 if (TYPE_METHOD_BASETYPE (a
->type
) == TYPE_METHOD_BASETYPE (b
->type
)
6011 && (TYPE_ARG_TYPES (a
->type
) == TYPE_ARG_TYPES (b
->type
)
6012 || (TYPE_ARG_TYPES (a
->type
)
6013 && TREE_CODE (TYPE_ARG_TYPES (a
->type
)) == TREE_LIST
6014 && TYPE_ARG_TYPES (b
->type
)
6015 && TREE_CODE (TYPE_ARG_TYPES (b
->type
)) == TREE_LIST
6016 && type_list_equal (TYPE_ARG_TYPES (a
->type
),
6017 TYPE_ARG_TYPES (b
->type
)))))
6021 /* Don't compare TYPE_TYPELESS_STORAGE flag on aggregates,
6022 where the flag should be inherited from the element type
6023 and can change after ARRAY_TYPEs are created; on non-aggregates
6024 compare it and hash it, scalars will never have that flag set
6025 and we need to differentiate between arrays created by different
6026 front-ends or middle-end created arrays. */
6027 return (TYPE_DOMAIN (a
->type
) == TYPE_DOMAIN (b
->type
)
6028 && (AGGREGATE_TYPE_P (TREE_TYPE (a
->type
))
6029 || (TYPE_TYPELESS_STORAGE (a
->type
)
6030 == TYPE_TYPELESS_STORAGE (b
->type
))));
6034 case QUAL_UNION_TYPE
:
6035 return (TYPE_FIELDS (a
->type
) == TYPE_FIELDS (b
->type
)
6036 || (TYPE_FIELDS (a
->type
)
6037 && TREE_CODE (TYPE_FIELDS (a
->type
)) == TREE_LIST
6038 && TYPE_FIELDS (b
->type
)
6039 && TREE_CODE (TYPE_FIELDS (b
->type
)) == TREE_LIST
6040 && type_list_equal (TYPE_FIELDS (a
->type
),
6041 TYPE_FIELDS (b
->type
))));
6044 if (TYPE_ARG_TYPES (a
->type
) == TYPE_ARG_TYPES (b
->type
)
6045 || (TYPE_ARG_TYPES (a
->type
)
6046 && TREE_CODE (TYPE_ARG_TYPES (a
->type
)) == TREE_LIST
6047 && TYPE_ARG_TYPES (b
->type
)
6048 && TREE_CODE (TYPE_ARG_TYPES (b
->type
)) == TREE_LIST
6049 && type_list_equal (TYPE_ARG_TYPES (a
->type
),
6050 TYPE_ARG_TYPES (b
->type
))))
6058 if (lang_hooks
.types
.type_hash_eq
!= NULL
)
6059 return lang_hooks
.types
.type_hash_eq (a
->type
, b
->type
);
6064 /* Given TYPE, and HASHCODE its hash code, return the canonical
6065 object for an identical type if one already exists.
6066 Otherwise, return TYPE, and record it as the canonical object.
6068 To use this function, first create a type of the sort you want.
6069 Then compute its hash code from the fields of the type that
6070 make it different from other similar types.
6071 Then call this function and use the value. */
6074 type_hash_canon (unsigned int hashcode
, tree type
)
6079 /* The hash table only contains main variants, so ensure that's what we're
6081 gcc_assert (TYPE_MAIN_VARIANT (type
) == type
);
6083 /* The TYPE_ALIGN field of a type is set by layout_type(), so we
6084 must call that routine before comparing TYPE_ALIGNs. */
6090 loc
= type_hash_table
->find_slot_with_hash (&in
, hashcode
, INSERT
);
6093 tree t1
= ((type_hash
*) *loc
)->type
;
6094 gcc_assert (TYPE_MAIN_VARIANT (t1
) == t1
6096 if (TYPE_UID (type
) + 1 == next_type_uid
)
6098 /* Free also min/max values and the cache for integer
6099 types. This can't be done in free_node, as LTO frees
6100 those on its own. */
6101 if (TREE_CODE (type
) == INTEGER_TYPE
)
6103 if (TYPE_MIN_VALUE (type
)
6104 && TREE_TYPE (TYPE_MIN_VALUE (type
)) == type
)
6106 /* Zero is always in TYPE_CACHED_VALUES. */
6107 if (! TYPE_UNSIGNED (type
))
6108 int_cst_hash_table
->remove_elt (TYPE_MIN_VALUE (type
));
6109 ggc_free (TYPE_MIN_VALUE (type
));
6111 if (TYPE_MAX_VALUE (type
)
6112 && TREE_TYPE (TYPE_MAX_VALUE (type
)) == type
)
6114 int_cst_hash_table
->remove_elt (TYPE_MAX_VALUE (type
));
6115 ggc_free (TYPE_MAX_VALUE (type
));
6117 if (TYPE_CACHED_VALUES_P (type
))
6118 ggc_free (TYPE_CACHED_VALUES (type
));
6125 struct type_hash
*h
;
6127 h
= ggc_alloc
<type_hash
> ();
6137 print_type_hash_statistics (void)
6139 fprintf (stderr
, "Type hash: size %ld, %ld elements, %f collisions\n",
6140 (long) type_hash_table
->size (),
6141 (long) type_hash_table
->elements (),
6142 type_hash_table
->collisions ());
6145 /* Given two lists of types
6146 (chains of TREE_LIST nodes with types in the TREE_VALUE slots)
6147 return 1 if the lists contain the same types in the same order.
6148 Also, the TREE_PURPOSEs must match. */
6151 type_list_equal (const_tree l1
, const_tree l2
)
6155 for (t1
= l1
, t2
= l2
; t1
&& t2
; t1
= TREE_CHAIN (t1
), t2
= TREE_CHAIN (t2
))
6156 if (TREE_VALUE (t1
) != TREE_VALUE (t2
)
6157 || (TREE_PURPOSE (t1
) != TREE_PURPOSE (t2
)
6158 && ! (1 == simple_cst_equal (TREE_PURPOSE (t1
), TREE_PURPOSE (t2
))
6159 && (TREE_TYPE (TREE_PURPOSE (t1
))
6160 == TREE_TYPE (TREE_PURPOSE (t2
))))))
6166 /* Returns the number of arguments to the FUNCTION_TYPE or METHOD_TYPE
6167 given by TYPE. If the argument list accepts variable arguments,
6168 then this function counts only the ordinary arguments. */
6171 type_num_arguments (const_tree fntype
)
6175 for (tree t
= TYPE_ARG_TYPES (fntype
); t
; t
= TREE_CHAIN (t
))
6176 /* If the function does not take a variable number of arguments,
6177 the last element in the list will have type `void'. */
6178 if (VOID_TYPE_P (TREE_VALUE (t
)))
6186 /* Return the type of the function TYPE's argument ARGNO if known.
6187 For vararg function's where ARGNO refers to one of the variadic
6188 arguments return null. Otherwise, return a void_type_node for
6189 out-of-bounds ARGNO. */
6192 type_argument_type (const_tree fntype
, unsigned argno
)
6194 /* Treat zero the same as an out-of-bounds argument number. */
6196 return void_type_node
;
6198 function_args_iterator iter
;
6202 FOREACH_FUNCTION_ARGS (fntype
, argtype
, iter
)
6204 /* A vararg function's argument list ends in a null. Otherwise,
6205 an ordinary function's argument list ends with void. Return
6206 null if ARGNO refers to a vararg argument, void_type_node if
6207 it's out of bounds, and the formal argument type otherwise. */
6211 if (i
== argno
|| VOID_TYPE_P (argtype
))
6220 /* Nonzero if integer constants T1 and T2
6221 represent the same constant value. */
6224 tree_int_cst_equal (const_tree t1
, const_tree t2
)
6229 if (t1
== 0 || t2
== 0)
6232 STRIP_ANY_LOCATION_WRAPPER (t1
);
6233 STRIP_ANY_LOCATION_WRAPPER (t2
);
6235 if (TREE_CODE (t1
) == INTEGER_CST
6236 && TREE_CODE (t2
) == INTEGER_CST
6237 && wi::to_widest (t1
) == wi::to_widest (t2
))
6243 /* Return true if T is an INTEGER_CST whose numerical value (extended
6244 according to TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. */
6247 tree_fits_shwi_p (const_tree t
)
6249 return (t
!= NULL_TREE
6250 && TREE_CODE (t
) == INTEGER_CST
6251 && wi::fits_shwi_p (wi::to_widest (t
)));
6254 /* Return true if T is an INTEGER_CST or POLY_INT_CST whose numerical
6255 value (extended according to TYPE_UNSIGNED) fits in a poly_int64. */
6258 tree_fits_poly_int64_p (const_tree t
)
6262 if (POLY_INT_CST_P (t
))
6264 for (unsigned int i
= 0; i
< NUM_POLY_INT_COEFFS
; i
++)
6265 if (!wi::fits_shwi_p (wi::to_wide (POLY_INT_CST_COEFF (t
, i
))))
6269 return (TREE_CODE (t
) == INTEGER_CST
6270 && wi::fits_shwi_p (wi::to_widest (t
)));
6273 /* Return true if T is an INTEGER_CST whose numerical value (extended
6274 according to TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. */
6277 tree_fits_uhwi_p (const_tree t
)
6279 return (t
!= NULL_TREE
6280 && TREE_CODE (t
) == INTEGER_CST
6281 && wi::fits_uhwi_p (wi::to_widest (t
)));
6284 /* Return true if T is an INTEGER_CST or POLY_INT_CST whose numerical
6285 value (extended according to TYPE_UNSIGNED) fits in a poly_uint64. */
6288 tree_fits_poly_uint64_p (const_tree t
)
6292 if (POLY_INT_CST_P (t
))
6294 for (unsigned int i
= 0; i
< NUM_POLY_INT_COEFFS
; i
++)
6295 if (!wi::fits_uhwi_p (wi::to_widest (POLY_INT_CST_COEFF (t
, i
))))
6299 return (TREE_CODE (t
) == INTEGER_CST
6300 && wi::fits_uhwi_p (wi::to_widest (t
)));
6303 /* T is an INTEGER_CST whose numerical value (extended according to
6304 TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. Return that
6308 tree_to_shwi (const_tree t
)
6310 gcc_assert (tree_fits_shwi_p (t
));
6311 return TREE_INT_CST_LOW (t
);
6314 /* T is an INTEGER_CST whose numerical value (extended according to
6315 TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. Return that
6318 unsigned HOST_WIDE_INT
6319 tree_to_uhwi (const_tree t
)
6321 gcc_assert (tree_fits_uhwi_p (t
));
6322 return TREE_INT_CST_LOW (t
);
6325 /* Return the most significant (sign) bit of T. */
6328 tree_int_cst_sign_bit (const_tree t
)
6330 unsigned bitno
= TYPE_PRECISION (TREE_TYPE (t
)) - 1;
6332 return wi::extract_uhwi (wi::to_wide (t
), bitno
, 1);
6335 /* Return an indication of the sign of the integer constant T.
6336 The return value is -1 if T < 0, 0 if T == 0, and 1 if T > 0.
6337 Note that -1 will never be returned if T's type is unsigned. */
6340 tree_int_cst_sgn (const_tree t
)
6342 if (wi::to_wide (t
) == 0)
6344 else if (TYPE_UNSIGNED (TREE_TYPE (t
)))
6346 else if (wi::neg_p (wi::to_wide (t
)))
6352 /* Return the minimum number of bits needed to represent VALUE in a
6353 signed or unsigned type, UNSIGNEDP says which. */
6356 tree_int_cst_min_precision (tree value
, signop sgn
)
6358 /* If the value is negative, compute its negative minus 1. The latter
6359 adjustment is because the absolute value of the largest negative value
6360 is one larger than the largest positive value. This is equivalent to
6361 a bit-wise negation, so use that operation instead. */
6363 if (tree_int_cst_sgn (value
) < 0)
6364 value
= fold_build1 (BIT_NOT_EXPR
, TREE_TYPE (value
), value
);
6366 /* Return the number of bits needed, taking into account the fact
6367 that we need one more bit for a signed than unsigned type.
6368 If value is 0 or -1, the minimum precision is 1 no matter
6369 whether unsignedp is true or false. */
6371 if (integer_zerop (value
))
6374 return tree_floor_log2 (value
) + 1 + (sgn
== SIGNED
? 1 : 0) ;
6377 /* Return truthvalue of whether T1 is the same tree structure as T2.
6378 Return 1 if they are the same.
6379 Return 0 if they are understandably different.
6380 Return -1 if either contains tree structure not understood by
6384 simple_cst_equal (const_tree t1
, const_tree t2
)
6386 enum tree_code code1
, code2
;
6392 if (t1
== 0 || t2
== 0)
6395 /* For location wrappers to be the same, they must be at the same
6396 source location (and wrap the same thing). */
6397 if (location_wrapper_p (t1
) && location_wrapper_p (t2
))
6399 if (EXPR_LOCATION (t1
) != EXPR_LOCATION (t2
))
6401 return simple_cst_equal (TREE_OPERAND (t1
, 0), TREE_OPERAND (t2
, 0));
6404 code1
= TREE_CODE (t1
);
6405 code2
= TREE_CODE (t2
);
6407 if (CONVERT_EXPR_CODE_P (code1
) || code1
== NON_LVALUE_EXPR
)
6409 if (CONVERT_EXPR_CODE_P (code2
)
6410 || code2
== NON_LVALUE_EXPR
)
6411 return simple_cst_equal (TREE_OPERAND (t1
, 0), TREE_OPERAND (t2
, 0));
6413 return simple_cst_equal (TREE_OPERAND (t1
, 0), t2
);
6416 else if (CONVERT_EXPR_CODE_P (code2
)
6417 || code2
== NON_LVALUE_EXPR
)
6418 return simple_cst_equal (t1
, TREE_OPERAND (t2
, 0));
6426 return wi::to_widest (t1
) == wi::to_widest (t2
);
6429 return real_identical (&TREE_REAL_CST (t1
), &TREE_REAL_CST (t2
));
6432 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1
), TREE_FIXED_CST (t2
));
6435 return (TREE_STRING_LENGTH (t1
) == TREE_STRING_LENGTH (t2
)
6436 && ! memcmp (TREE_STRING_POINTER (t1
), TREE_STRING_POINTER (t2
),
6437 TREE_STRING_LENGTH (t1
)));
6441 unsigned HOST_WIDE_INT idx
;
6442 vec
<constructor_elt
, va_gc
> *v1
= CONSTRUCTOR_ELTS (t1
);
6443 vec
<constructor_elt
, va_gc
> *v2
= CONSTRUCTOR_ELTS (t2
);
6445 if (vec_safe_length (v1
) != vec_safe_length (v2
))
6448 for (idx
= 0; idx
< vec_safe_length (v1
); ++idx
)
6449 /* ??? Should we handle also fields here? */
6450 if (!simple_cst_equal ((*v1
)[idx
].value
, (*v2
)[idx
].value
))
6456 return simple_cst_equal (TREE_OPERAND (t1
, 0), TREE_OPERAND (t2
, 0));
6459 cmp
= simple_cst_equal (CALL_EXPR_FN (t1
), CALL_EXPR_FN (t2
));
6462 if (call_expr_nargs (t1
) != call_expr_nargs (t2
))
6465 const_tree arg1
, arg2
;
6466 const_call_expr_arg_iterator iter1
, iter2
;
6467 for (arg1
= first_const_call_expr_arg (t1
, &iter1
),
6468 arg2
= first_const_call_expr_arg (t2
, &iter2
);
6470 arg1
= next_const_call_expr_arg (&iter1
),
6471 arg2
= next_const_call_expr_arg (&iter2
))
6473 cmp
= simple_cst_equal (arg1
, arg2
);
6477 return arg1
== arg2
;
6481 /* Special case: if either target is an unallocated VAR_DECL,
6482 it means that it's going to be unified with whatever the
6483 TARGET_EXPR is really supposed to initialize, so treat it
6484 as being equivalent to anything. */
6485 if ((TREE_CODE (TREE_OPERAND (t1
, 0)) == VAR_DECL
6486 && DECL_NAME (TREE_OPERAND (t1
, 0)) == NULL_TREE
6487 && !DECL_RTL_SET_P (TREE_OPERAND (t1
, 0)))
6488 || (TREE_CODE (TREE_OPERAND (t2
, 0)) == VAR_DECL
6489 && DECL_NAME (TREE_OPERAND (t2
, 0)) == NULL_TREE
6490 && !DECL_RTL_SET_P (TREE_OPERAND (t2
, 0))))
6493 cmp
= simple_cst_equal (TREE_OPERAND (t1
, 0), TREE_OPERAND (t2
, 0));
6498 return simple_cst_equal (TREE_OPERAND (t1
, 1), TREE_OPERAND (t2
, 1));
6500 case WITH_CLEANUP_EXPR
:
6501 cmp
= simple_cst_equal (TREE_OPERAND (t1
, 0), TREE_OPERAND (t2
, 0));
6505 return simple_cst_equal (TREE_OPERAND (t1
, 1), TREE_OPERAND (t1
, 1));
6508 if (TREE_OPERAND (t1
, 1) == TREE_OPERAND (t2
, 1))
6509 return simple_cst_equal (TREE_OPERAND (t1
, 0), TREE_OPERAND (t2
, 0));
6520 if (POLY_INT_CST_P (t1
))
6521 /* A false return means maybe_ne rather than known_ne. */
6522 return known_eq (poly_widest_int::from (poly_int_cst_value (t1
),
6523 TYPE_SIGN (TREE_TYPE (t1
))),
6524 poly_widest_int::from (poly_int_cst_value (t2
),
6525 TYPE_SIGN (TREE_TYPE (t2
))));
6529 /* This general rule works for most tree codes. All exceptions should be
6530 handled above. If this is a language-specific tree code, we can't
6531 trust what might be in the operand, so say we don't know
6533 if ((int) code1
>= (int) LAST_AND_UNUSED_TREE_CODE
)
6536 switch (TREE_CODE_CLASS (code1
))
6540 case tcc_comparison
:
6541 case tcc_expression
:
6545 for (i
= 0; i
< TREE_CODE_LENGTH (code1
); i
++)
6547 cmp
= simple_cst_equal (TREE_OPERAND (t1
, i
), TREE_OPERAND (t2
, i
));
6559 /* Compare the value of T, an INTEGER_CST, with U, an unsigned integer value.
6560 Return -1, 0, or 1 if the value of T is less than, equal to, or greater
6561 than U, respectively. */
6564 compare_tree_int (const_tree t
, unsigned HOST_WIDE_INT u
)
6566 if (tree_int_cst_sgn (t
) < 0)
6568 else if (!tree_fits_uhwi_p (t
))
6570 else if (TREE_INT_CST_LOW (t
) == u
)
6572 else if (TREE_INT_CST_LOW (t
) < u
)
6578 /* Return true if SIZE represents a constant size that is in bounds of
6579 what the middle-end and the backend accepts (covering not more than
6580 half of the address-space).
6581 When PERR is non-null, set *PERR on failure to the description of
6582 why SIZE is not valid. */
6585 valid_constant_size_p (const_tree size
, cst_size_error
*perr
/* = NULL */)
6587 if (POLY_INT_CST_P (size
))
6589 if (TREE_OVERFLOW (size
))
6591 for (unsigned int i
= 0; i
< NUM_POLY_INT_COEFFS
; ++i
)
6592 if (!valid_constant_size_p (POLY_INT_CST_COEFF (size
, i
)))
6597 cst_size_error error
;
6601 if (TREE_CODE (size
) != INTEGER_CST
)
6603 *perr
= cst_size_not_constant
;
6607 if (TREE_OVERFLOW_P (size
))
6609 *perr
= cst_size_overflow
;
6613 if (tree_int_cst_sgn (size
) < 0)
6615 *perr
= cst_size_negative
;
6618 if (!tree_fits_uhwi_p (size
)
6619 || (wi::to_widest (TYPE_MAX_VALUE (sizetype
))
6620 < wi::to_widest (size
) * 2))
6622 *perr
= cst_size_too_big
;
6629 /* Return the precision of the type, or for a complex or vector type the
6630 precision of the type of its elements. */
6633 element_precision (const_tree type
)
6636 type
= TREE_TYPE (type
);
6637 enum tree_code code
= TREE_CODE (type
);
6638 if (code
== COMPLEX_TYPE
|| code
== VECTOR_TYPE
)
6639 type
= TREE_TYPE (type
);
6641 return TYPE_PRECISION (type
);
6644 /* Return true if CODE represents an associative tree code. Otherwise
6647 associative_tree_code (enum tree_code code
)
6666 /* Return true if CODE represents a commutative tree code. Otherwise
6669 commutative_tree_code (enum tree_code code
)
6675 case MULT_HIGHPART_EXPR
:
6683 case UNORDERED_EXPR
:
6687 case TRUTH_AND_EXPR
:
6688 case TRUTH_XOR_EXPR
:
6690 case WIDEN_MULT_EXPR
:
6691 case VEC_WIDEN_MULT_HI_EXPR
:
6692 case VEC_WIDEN_MULT_LO_EXPR
:
6693 case VEC_WIDEN_MULT_EVEN_EXPR
:
6694 case VEC_WIDEN_MULT_ODD_EXPR
:
6703 /* Return true if CODE represents a ternary tree code for which the
6704 first two operands are commutative. Otherwise return false. */
6706 commutative_ternary_tree_code (enum tree_code code
)
6710 case WIDEN_MULT_PLUS_EXPR
:
6711 case WIDEN_MULT_MINUS_EXPR
:
6721 /* Returns true if CODE can overflow. */
6724 operation_can_overflow (enum tree_code code
)
6732 /* Can overflow in various ways. */
6734 case TRUNC_DIV_EXPR
:
6735 case EXACT_DIV_EXPR
:
6736 case FLOOR_DIV_EXPR
:
6738 /* For INT_MIN / -1. */
6745 /* These operators cannot overflow. */
6750 /* Returns true if CODE operating on operands of type TYPE doesn't overflow, or
6751 ftrapv doesn't generate trapping insns for CODE. */
6754 operation_no_trapping_overflow (tree type
, enum tree_code code
)
6756 gcc_checking_assert (ANY_INTEGRAL_TYPE_P (type
));
6758 /* We don't generate instructions that trap on overflow for complex or vector
6760 if (!INTEGRAL_TYPE_P (type
))
6763 if (!TYPE_OVERFLOW_TRAPS (type
))
6773 /* These operators can overflow, and -ftrapv generates trapping code for
6776 case TRUNC_DIV_EXPR
:
6777 case EXACT_DIV_EXPR
:
6778 case FLOOR_DIV_EXPR
:
6781 /* These operators can overflow, but -ftrapv does not generate trapping
6785 /* These operators cannot overflow. */
6790 /* Constructors for pointer, array and function types.
6791 (RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are
6792 constructed by language-dependent code, not here.) */
6794 /* Construct, lay out and return the type of pointers to TO_TYPE with
6795 mode MODE. If CAN_ALIAS_ALL is TRUE, indicate this type can
6796 reference all of memory. If such a type has already been
6797 constructed, reuse it. */
6800 build_pointer_type_for_mode (tree to_type
, machine_mode mode
,
6804 bool could_alias
= can_alias_all
;
6806 if (to_type
== error_mark_node
)
6807 return error_mark_node
;
6809 /* If the pointed-to type has the may_alias attribute set, force
6810 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
6811 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type
)))
6812 can_alias_all
= true;
6814 /* In some cases, languages will have things that aren't a POINTER_TYPE
6815 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_POINTER_TO.
6816 In that case, return that type without regard to the rest of our
6819 ??? This is a kludge, but consistent with the way this function has
6820 always operated and there doesn't seem to be a good way to avoid this
6822 if (TYPE_POINTER_TO (to_type
) != 0
6823 && TREE_CODE (TYPE_POINTER_TO (to_type
)) != POINTER_TYPE
)
6824 return TYPE_POINTER_TO (to_type
);
6826 /* First, if we already have a type for pointers to TO_TYPE and it's
6827 the proper mode, use it. */
6828 for (t
= TYPE_POINTER_TO (to_type
); t
; t
= TYPE_NEXT_PTR_TO (t
))
6829 if (TYPE_MODE (t
) == mode
&& TYPE_REF_CAN_ALIAS_ALL (t
) == can_alias_all
)
6832 t
= make_node (POINTER_TYPE
);
6834 TREE_TYPE (t
) = to_type
;
6835 SET_TYPE_MODE (t
, mode
);
6836 TYPE_REF_CAN_ALIAS_ALL (t
) = can_alias_all
;
6837 TYPE_NEXT_PTR_TO (t
) = TYPE_POINTER_TO (to_type
);
6838 TYPE_POINTER_TO (to_type
) = t
;
6840 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */
6841 if (TYPE_STRUCTURAL_EQUALITY_P (to_type
) || in_lto_p
)
6842 SET_TYPE_STRUCTURAL_EQUALITY (t
);
6843 else if (TYPE_CANONICAL (to_type
) != to_type
|| could_alias
)
6845 = build_pointer_type_for_mode (TYPE_CANONICAL (to_type
),
6848 /* Lay out the type. This function has many callers that are concerned
6849 with expression-construction, and this simplifies them all. */
6855 /* By default build pointers in ptr_mode. */
6858 build_pointer_type (tree to_type
)
6860 addr_space_t as
= to_type
== error_mark_node
? ADDR_SPACE_GENERIC
6861 : TYPE_ADDR_SPACE (to_type
);
6862 machine_mode pointer_mode
= targetm
.addr_space
.pointer_mode (as
);
6863 return build_pointer_type_for_mode (to_type
, pointer_mode
, false);
6866 /* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE. */
6869 build_reference_type_for_mode (tree to_type
, machine_mode mode
,
6873 bool could_alias
= can_alias_all
;
6875 if (to_type
== error_mark_node
)
6876 return error_mark_node
;
6878 /* If the pointed-to type has the may_alias attribute set, force
6879 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
6880 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type
)))
6881 can_alias_all
= true;
6883 /* In some cases, languages will have things that aren't a REFERENCE_TYPE
6884 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_REFERENCE_TO.
6885 In that case, return that type without regard to the rest of our
6888 ??? This is a kludge, but consistent with the way this function has
6889 always operated and there doesn't seem to be a good way to avoid this
6891 if (TYPE_REFERENCE_TO (to_type
) != 0
6892 && TREE_CODE (TYPE_REFERENCE_TO (to_type
)) != REFERENCE_TYPE
)
6893 return TYPE_REFERENCE_TO (to_type
);
6895 /* First, if we already have a type for pointers to TO_TYPE and it's
6896 the proper mode, use it. */
6897 for (t
= TYPE_REFERENCE_TO (to_type
); t
; t
= TYPE_NEXT_REF_TO (t
))
6898 if (TYPE_MODE (t
) == mode
&& TYPE_REF_CAN_ALIAS_ALL (t
) == can_alias_all
)
6901 t
= make_node (REFERENCE_TYPE
);
6903 TREE_TYPE (t
) = to_type
;
6904 SET_TYPE_MODE (t
, mode
);
6905 TYPE_REF_CAN_ALIAS_ALL (t
) = can_alias_all
;
6906 TYPE_NEXT_REF_TO (t
) = TYPE_REFERENCE_TO (to_type
);
6907 TYPE_REFERENCE_TO (to_type
) = t
;
6909 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */
6910 if (TYPE_STRUCTURAL_EQUALITY_P (to_type
) || in_lto_p
)
6911 SET_TYPE_STRUCTURAL_EQUALITY (t
);
6912 else if (TYPE_CANONICAL (to_type
) != to_type
|| could_alias
)
6914 = build_reference_type_for_mode (TYPE_CANONICAL (to_type
),
6923 /* Build the node for the type of references-to-TO_TYPE by default
6927 build_reference_type (tree to_type
)
6929 addr_space_t as
= to_type
== error_mark_node
? ADDR_SPACE_GENERIC
6930 : TYPE_ADDR_SPACE (to_type
);
6931 machine_mode pointer_mode
= targetm
.addr_space
.pointer_mode (as
);
6932 return build_reference_type_for_mode (to_type
, pointer_mode
, false);
6935 #define MAX_INT_CACHED_PREC \
6936 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
6937 static GTY(()) tree nonstandard_integer_type_cache
[2 * MAX_INT_CACHED_PREC
+ 2];
6939 /* Builds a signed or unsigned integer type of precision PRECISION.
6940 Used for C bitfields whose precision does not match that of
6941 built-in target types. */
6943 build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision
,
6949 unsignedp
= MAX_INT_CACHED_PREC
+ 1;
6951 if (precision
<= MAX_INT_CACHED_PREC
)
6953 itype
= nonstandard_integer_type_cache
[precision
+ unsignedp
];
6958 itype
= make_node (INTEGER_TYPE
);
6959 TYPE_PRECISION (itype
) = precision
;
6962 fixup_unsigned_type (itype
);
6964 fixup_signed_type (itype
);
6966 inchash::hash hstate
;
6967 inchash::add_expr (TYPE_MAX_VALUE (itype
), hstate
);
6968 ret
= type_hash_canon (hstate
.end (), itype
);
6969 if (precision
<= MAX_INT_CACHED_PREC
)
6970 nonstandard_integer_type_cache
[precision
+ unsignedp
] = ret
;
6975 #define MAX_BOOL_CACHED_PREC \
6976 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
6977 static GTY(()) tree nonstandard_boolean_type_cache
[MAX_BOOL_CACHED_PREC
+ 1];
6979 /* Builds a boolean type of precision PRECISION.
6980 Used for boolean vectors to choose proper vector element size. */
6982 build_nonstandard_boolean_type (unsigned HOST_WIDE_INT precision
)
6986 if (precision
<= MAX_BOOL_CACHED_PREC
)
6988 type
= nonstandard_boolean_type_cache
[precision
];
6993 type
= make_node (BOOLEAN_TYPE
);
6994 TYPE_PRECISION (type
) = precision
;
6995 fixup_signed_type (type
);
6997 if (precision
<= MAX_INT_CACHED_PREC
)
6998 nonstandard_boolean_type_cache
[precision
] = type
;
7003 /* Create a range of some discrete type TYPE (an INTEGER_TYPE, ENUMERAL_TYPE
7004 or BOOLEAN_TYPE) with low bound LOWVAL and high bound HIGHVAL. If SHARED
7005 is true, reuse such a type that has already been constructed. */
7008 build_range_type_1 (tree type
, tree lowval
, tree highval
, bool shared
)
7010 tree itype
= make_node (INTEGER_TYPE
);
7012 TREE_TYPE (itype
) = type
;
7014 TYPE_MIN_VALUE (itype
) = fold_convert (type
, lowval
);
7015 TYPE_MAX_VALUE (itype
) = highval
? fold_convert (type
, highval
) : NULL
;
7017 TYPE_PRECISION (itype
) = TYPE_PRECISION (type
);
7018 SET_TYPE_MODE (itype
, TYPE_MODE (type
));
7019 TYPE_SIZE (itype
) = TYPE_SIZE (type
);
7020 TYPE_SIZE_UNIT (itype
) = TYPE_SIZE_UNIT (type
);
7021 SET_TYPE_ALIGN (itype
, TYPE_ALIGN (type
));
7022 TYPE_USER_ALIGN (itype
) = TYPE_USER_ALIGN (type
);
7023 SET_TYPE_WARN_IF_NOT_ALIGN (itype
, TYPE_WARN_IF_NOT_ALIGN (type
));
7028 if ((TYPE_MIN_VALUE (itype
)
7029 && TREE_CODE (TYPE_MIN_VALUE (itype
)) != INTEGER_CST
)
7030 || (TYPE_MAX_VALUE (itype
)
7031 && TREE_CODE (TYPE_MAX_VALUE (itype
)) != INTEGER_CST
))
7033 /* Since we cannot reliably merge this type, we need to compare it using
7034 structural equality checks. */
7035 SET_TYPE_STRUCTURAL_EQUALITY (itype
);
7039 hashval_t hash
= type_hash_canon_hash (itype
);
7040 itype
= type_hash_canon (hash
, itype
);
7045 /* Wrapper around build_range_type_1 with SHARED set to true. */
7048 build_range_type (tree type
, tree lowval
, tree highval
)
7050 return build_range_type_1 (type
, lowval
, highval
, true);
7053 /* Wrapper around build_range_type_1 with SHARED set to false. */
7056 build_nonshared_range_type (tree type
, tree lowval
, tree highval
)
7058 return build_range_type_1 (type
, lowval
, highval
, false);
7061 /* Create a type of integers to be the TYPE_DOMAIN of an ARRAY_TYPE.
7062 MAXVAL should be the maximum value in the domain
7063 (one less than the length of the array).
7065 The maximum value that MAXVAL can have is INT_MAX for a HOST_WIDE_INT.
7066 We don't enforce this limit, that is up to caller (e.g. language front end).
7067 The limit exists because the result is a signed type and we don't handle
7068 sizes that use more than one HOST_WIDE_INT. */
7071 build_index_type (tree maxval
)
7073 return build_range_type (sizetype
, size_zero_node
, maxval
);
7076 /* Return true if the debug information for TYPE, a subtype, should be emitted
7077 as a subrange type. If so, set LOWVAL to the low bound and HIGHVAL to the
7078 high bound, respectively. Sometimes doing so unnecessarily obfuscates the
7079 debug info and doesn't reflect the source code. */
7082 subrange_type_for_debug_p (const_tree type
, tree
*lowval
, tree
*highval
)
7084 tree base_type
= TREE_TYPE (type
), low
, high
;
7086 /* Subrange types have a base type which is an integral type. */
7087 if (!INTEGRAL_TYPE_P (base_type
))
7090 /* Get the real bounds of the subtype. */
7091 if (lang_hooks
.types
.get_subrange_bounds
)
7092 lang_hooks
.types
.get_subrange_bounds (type
, &low
, &high
);
7095 low
= TYPE_MIN_VALUE (type
);
7096 high
= TYPE_MAX_VALUE (type
);
7099 /* If the type and its base type have the same representation and the same
7100 name, then the type is not a subrange but a copy of the base type. */
7101 if ((TREE_CODE (base_type
) == INTEGER_TYPE
7102 || TREE_CODE (base_type
) == BOOLEAN_TYPE
)
7103 && int_size_in_bytes (type
) == int_size_in_bytes (base_type
)
7104 && tree_int_cst_equal (low
, TYPE_MIN_VALUE (base_type
))
7105 && tree_int_cst_equal (high
, TYPE_MAX_VALUE (base_type
))
7106 && TYPE_IDENTIFIER (type
) == TYPE_IDENTIFIER (base_type
))
7116 /* Construct, lay out and return the type of arrays of elements with ELT_TYPE
7117 and number of elements specified by the range of values of INDEX_TYPE.
7118 If TYPELESS_STORAGE is true, TYPE_TYPELESS_STORAGE flag is set on the type.
7119 If SHARED is true, reuse such a type that has already been constructed.
7120 If SET_CANONICAL is true, compute TYPE_CANONICAL from the element type. */
7123 build_array_type_1 (tree elt_type
, tree index_type
, bool typeless_storage
,
7124 bool shared
, bool set_canonical
)
7128 if (TREE_CODE (elt_type
) == FUNCTION_TYPE
)
7130 error ("arrays of functions are not meaningful");
7131 elt_type
= integer_type_node
;
7134 t
= make_node (ARRAY_TYPE
);
7135 TREE_TYPE (t
) = elt_type
;
7136 TYPE_DOMAIN (t
) = index_type
;
7137 TYPE_ADDR_SPACE (t
) = TYPE_ADDR_SPACE (elt_type
);
7138 TYPE_TYPELESS_STORAGE (t
) = typeless_storage
;
7143 hashval_t hash
= type_hash_canon_hash (t
);
7144 t
= type_hash_canon (hash
, t
);
7147 if (TYPE_CANONICAL (t
) == t
&& set_canonical
)
7149 if (TYPE_STRUCTURAL_EQUALITY_P (elt_type
)
7150 || (index_type
&& TYPE_STRUCTURAL_EQUALITY_P (index_type
))
7152 SET_TYPE_STRUCTURAL_EQUALITY (t
);
7153 else if (TYPE_CANONICAL (elt_type
) != elt_type
7154 || (index_type
&& TYPE_CANONICAL (index_type
) != index_type
))
7156 = build_array_type_1 (TYPE_CANONICAL (elt_type
),
7158 ? TYPE_CANONICAL (index_type
) : NULL_TREE
,
7159 typeless_storage
, shared
, set_canonical
);
7165 /* Wrapper around build_array_type_1 with SHARED set to true. */
7168 build_array_type (tree elt_type
, tree index_type
, bool typeless_storage
)
7171 build_array_type_1 (elt_type
, index_type
, typeless_storage
, true, true);
7174 /* Wrapper around build_array_type_1 with SHARED set to false. */
7177 build_nonshared_array_type (tree elt_type
, tree index_type
)
7179 return build_array_type_1 (elt_type
, index_type
, false, false, true);
7182 /* Return a representation of ELT_TYPE[NELTS], using indices of type
7186 build_array_type_nelts (tree elt_type
, poly_uint64 nelts
)
7188 return build_array_type (elt_type
, build_index_type (size_int (nelts
- 1)));
7191 /* Recursively examines the array elements of TYPE, until a non-array
7192 element type is found. */
7195 strip_array_types (tree type
)
7197 while (TREE_CODE (type
) == ARRAY_TYPE
)
7198 type
= TREE_TYPE (type
);
7203 /* Computes the canonical argument types from the argument type list
7206 Upon return, *ANY_STRUCTURAL_P will be true iff either it was true
7207 on entry to this function, or if any of the ARGTYPES are
7210 Upon return, *ANY_NONCANONICAL_P will be true iff either it was
7211 true on entry to this function, or if any of the ARGTYPES are
7214 Returns a canonical argument list, which may be ARGTYPES when the
7215 canonical argument list is unneeded (i.e., *ANY_STRUCTURAL_P is
7216 true) or would not differ from ARGTYPES. */
7219 maybe_canonicalize_argtypes (tree argtypes
,
7220 bool *any_structural_p
,
7221 bool *any_noncanonical_p
)
7224 bool any_noncanonical_argtypes_p
= false;
7226 for (arg
= argtypes
; arg
&& !(*any_structural_p
); arg
= TREE_CHAIN (arg
))
7228 if (!TREE_VALUE (arg
) || TREE_VALUE (arg
) == error_mark_node
)
7229 /* Fail gracefully by stating that the type is structural. */
7230 *any_structural_p
= true;
7231 else if (TYPE_STRUCTURAL_EQUALITY_P (TREE_VALUE (arg
)))
7232 *any_structural_p
= true;
7233 else if (TYPE_CANONICAL (TREE_VALUE (arg
)) != TREE_VALUE (arg
)
7234 || TREE_PURPOSE (arg
))
7235 /* If the argument has a default argument, we consider it
7236 non-canonical even though the type itself is canonical.
7237 That way, different variants of function and method types
7238 with default arguments will all point to the variant with
7239 no defaults as their canonical type. */
7240 any_noncanonical_argtypes_p
= true;
7243 if (*any_structural_p
)
7246 if (any_noncanonical_argtypes_p
)
7248 /* Build the canonical list of argument types. */
7249 tree canon_argtypes
= NULL_TREE
;
7250 bool is_void
= false;
7252 for (arg
= argtypes
; arg
; arg
= TREE_CHAIN (arg
))
7254 if (arg
== void_list_node
)
7257 canon_argtypes
= tree_cons (NULL_TREE
,
7258 TYPE_CANONICAL (TREE_VALUE (arg
)),
7262 canon_argtypes
= nreverse (canon_argtypes
);
7264 canon_argtypes
= chainon (canon_argtypes
, void_list_node
);
7266 /* There is a non-canonical type. */
7267 *any_noncanonical_p
= true;
7268 return canon_argtypes
;
7271 /* The canonical argument types are the same as ARGTYPES. */
7275 /* Construct, lay out and return
7276 the type of functions returning type VALUE_TYPE
7277 given arguments of types ARG_TYPES.
7278 ARG_TYPES is a chain of TREE_LIST nodes whose TREE_VALUEs
7279 are data type nodes for the arguments of the function.
7280 If such a type has already been constructed, reuse it. */
7283 build_function_type (tree value_type
, tree arg_types
)
7286 inchash::hash hstate
;
7287 bool any_structural_p
, any_noncanonical_p
;
7288 tree canon_argtypes
;
7290 gcc_assert (arg_types
!= error_mark_node
);
7292 if (TREE_CODE (value_type
) == FUNCTION_TYPE
)
7294 error ("function return type cannot be function");
7295 value_type
= integer_type_node
;
7298 /* Make a node of the sort we want. */
7299 t
= make_node (FUNCTION_TYPE
);
7300 TREE_TYPE (t
) = value_type
;
7301 TYPE_ARG_TYPES (t
) = arg_types
;
7303 /* If we already have such a type, use the old one. */
7304 hashval_t hash
= type_hash_canon_hash (t
);
7305 t
= type_hash_canon (hash
, t
);
7307 /* Set up the canonical type. */
7308 any_structural_p
= TYPE_STRUCTURAL_EQUALITY_P (value_type
);
7309 any_noncanonical_p
= TYPE_CANONICAL (value_type
) != value_type
;
7310 canon_argtypes
= maybe_canonicalize_argtypes (arg_types
,
7312 &any_noncanonical_p
);
7313 if (any_structural_p
)
7314 SET_TYPE_STRUCTURAL_EQUALITY (t
);
7315 else if (any_noncanonical_p
)
7316 TYPE_CANONICAL (t
) = build_function_type (TYPE_CANONICAL (value_type
),
7319 if (!COMPLETE_TYPE_P (t
))
7324 /* Build a function type. The RETURN_TYPE is the type returned by the
7325 function. If VAARGS is set, no void_type_node is appended to the
7326 list. ARGP must be always be terminated be a NULL_TREE. */
7329 build_function_type_list_1 (bool vaargs
, tree return_type
, va_list argp
)
7333 t
= va_arg (argp
, tree
);
7334 for (args
= NULL_TREE
; t
!= NULL_TREE
; t
= va_arg (argp
, tree
))
7335 args
= tree_cons (NULL_TREE
, t
, args
);
7340 if (args
!= NULL_TREE
)
7341 args
= nreverse (args
);
7342 gcc_assert (last
!= void_list_node
);
7344 else if (args
== NULL_TREE
)
7345 args
= void_list_node
;
7349 args
= nreverse (args
);
7350 TREE_CHAIN (last
) = void_list_node
;
7352 args
= build_function_type (return_type
, args
);
7357 /* Build a function type. The RETURN_TYPE is the type returned by the
7358 function. If additional arguments are provided, they are
7359 additional argument types. The list of argument types must always
7360 be terminated by NULL_TREE. */
7363 build_function_type_list (tree return_type
, ...)
7368 va_start (p
, return_type
);
7369 args
= build_function_type_list_1 (false, return_type
, p
);
7374 /* Build a variable argument function type. The RETURN_TYPE is the
7375 type returned by the function. If additional arguments are provided,
7376 they are additional argument types. The list of argument types must
7377 always be terminated by NULL_TREE. */
7380 build_varargs_function_type_list (tree return_type
, ...)
7385 va_start (p
, return_type
);
7386 args
= build_function_type_list_1 (true, return_type
, p
);
7392 /* Build a function type. RETURN_TYPE is the type returned by the
7393 function; VAARGS indicates whether the function takes varargs. The
7394 function takes N named arguments, the types of which are provided in
7398 build_function_type_array_1 (bool vaargs
, tree return_type
, int n
,
7402 tree t
= vaargs
? NULL_TREE
: void_list_node
;
7404 for (i
= n
- 1; i
>= 0; i
--)
7405 t
= tree_cons (NULL_TREE
, arg_types
[i
], t
);
7407 return build_function_type (return_type
, t
);
7410 /* Build a function type. RETURN_TYPE is the type returned by the
7411 function. The function takes N named arguments, the types of which
7412 are provided in ARG_TYPES. */
7415 build_function_type_array (tree return_type
, int n
, tree
*arg_types
)
7417 return build_function_type_array_1 (false, return_type
, n
, arg_types
);
7420 /* Build a variable argument function type. RETURN_TYPE is the type
7421 returned by the function. The function takes N named arguments, the
7422 types of which are provided in ARG_TYPES. */
7425 build_varargs_function_type_array (tree return_type
, int n
, tree
*arg_types
)
7427 return build_function_type_array_1 (true, return_type
, n
, arg_types
);
7430 /* Build a METHOD_TYPE for a member of BASETYPE. The RETTYPE (a TYPE)
7431 and ARGTYPES (a TREE_LIST) are the return type and arguments types
7432 for the method. An implicit additional parameter (of type
7433 pointer-to-BASETYPE) is added to the ARGTYPES. */
7436 build_method_type_directly (tree basetype
,
7442 bool any_structural_p
, any_noncanonical_p
;
7443 tree canon_argtypes
;
7445 /* Make a node of the sort we want. */
7446 t
= make_node (METHOD_TYPE
);
7448 TYPE_METHOD_BASETYPE (t
) = TYPE_MAIN_VARIANT (basetype
);
7449 TREE_TYPE (t
) = rettype
;
7450 ptype
= build_pointer_type (basetype
);
7452 /* The actual arglist for this function includes a "hidden" argument
7453 which is "this". Put it into the list of argument types. */
7454 argtypes
= tree_cons (NULL_TREE
, ptype
, argtypes
);
7455 TYPE_ARG_TYPES (t
) = argtypes
;
7457 /* If we already have such a type, use the old one. */
7458 hashval_t hash
= type_hash_canon_hash (t
);
7459 t
= type_hash_canon (hash
, t
);
7461 /* Set up the canonical type. */
7463 = (TYPE_STRUCTURAL_EQUALITY_P (basetype
)
7464 || TYPE_STRUCTURAL_EQUALITY_P (rettype
));
7466 = (TYPE_CANONICAL (basetype
) != basetype
7467 || TYPE_CANONICAL (rettype
) != rettype
);
7468 canon_argtypes
= maybe_canonicalize_argtypes (TREE_CHAIN (argtypes
),
7470 &any_noncanonical_p
);
7471 if (any_structural_p
)
7472 SET_TYPE_STRUCTURAL_EQUALITY (t
);
7473 else if (any_noncanonical_p
)
7475 = build_method_type_directly (TYPE_CANONICAL (basetype
),
7476 TYPE_CANONICAL (rettype
),
7478 if (!COMPLETE_TYPE_P (t
))
7484 /* Construct, lay out and return the type of methods belonging to class
7485 BASETYPE and whose arguments and values are described by TYPE.
7486 If that type exists already, reuse it.
7487 TYPE must be a FUNCTION_TYPE node. */
7490 build_method_type (tree basetype
, tree type
)
7492 gcc_assert (TREE_CODE (type
) == FUNCTION_TYPE
);
7494 return build_method_type_directly (basetype
,
7496 TYPE_ARG_TYPES (type
));
7499 /* Construct, lay out and return the type of offsets to a value
7500 of type TYPE, within an object of type BASETYPE.
7501 If a suitable offset type exists already, reuse it. */
7504 build_offset_type (tree basetype
, tree type
)
7508 /* Make a node of the sort we want. */
7509 t
= make_node (OFFSET_TYPE
);
7511 TYPE_OFFSET_BASETYPE (t
) = TYPE_MAIN_VARIANT (basetype
);
7512 TREE_TYPE (t
) = type
;
7514 /* If we already have such a type, use the old one. */
7515 hashval_t hash
= type_hash_canon_hash (t
);
7516 t
= type_hash_canon (hash
, t
);
7518 if (!COMPLETE_TYPE_P (t
))
7521 if (TYPE_CANONICAL (t
) == t
)
7523 if (TYPE_STRUCTURAL_EQUALITY_P (basetype
)
7524 || TYPE_STRUCTURAL_EQUALITY_P (type
))
7525 SET_TYPE_STRUCTURAL_EQUALITY (t
);
7526 else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype
)) != basetype
7527 || TYPE_CANONICAL (type
) != type
)
7529 = build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype
)),
7530 TYPE_CANONICAL (type
));
7536 /* Create a complex type whose components are COMPONENT_TYPE.
7538 If NAMED is true, the type is given a TYPE_NAME. We do not always
7539 do so because this creates a DECL node and thus make the DECL_UIDs
7540 dependent on the type canonicalization hashtable, which is GC-ed,
7541 so the DECL_UIDs would not be stable wrt garbage collection. */
7544 build_complex_type (tree component_type
, bool named
)
7546 gcc_assert (INTEGRAL_TYPE_P (component_type
)
7547 || SCALAR_FLOAT_TYPE_P (component_type
)
7548 || FIXED_POINT_TYPE_P (component_type
));
7550 /* Make a node of the sort we want. */
7551 tree probe
= make_node (COMPLEX_TYPE
);
7553 TREE_TYPE (probe
) = TYPE_MAIN_VARIANT (component_type
);
7555 /* If we already have such a type, use the old one. */
7556 hashval_t hash
= type_hash_canon_hash (probe
);
7557 tree t
= type_hash_canon (hash
, probe
);
7561 /* We created a new type. The hash insertion will have laid
7562 out the type. We need to check the canonicalization and
7563 maybe set the name. */
7564 gcc_checking_assert (COMPLETE_TYPE_P (t
)
7566 && TYPE_CANONICAL (t
) == t
);
7568 if (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (t
)))
7569 SET_TYPE_STRUCTURAL_EQUALITY (t
);
7570 else if (TYPE_CANONICAL (TREE_TYPE (t
)) != TREE_TYPE (t
))
7572 = build_complex_type (TYPE_CANONICAL (TREE_TYPE (t
)), named
);
7574 /* We need to create a name, since complex is a fundamental type. */
7577 const char *name
= NULL
;
7579 if (TREE_TYPE (t
) == char_type_node
)
7580 name
= "complex char";
7581 else if (TREE_TYPE (t
) == signed_char_type_node
)
7582 name
= "complex signed char";
7583 else if (TREE_TYPE (t
) == unsigned_char_type_node
)
7584 name
= "complex unsigned char";
7585 else if (TREE_TYPE (t
) == short_integer_type_node
)
7586 name
= "complex short int";
7587 else if (TREE_TYPE (t
) == short_unsigned_type_node
)
7588 name
= "complex short unsigned int";
7589 else if (TREE_TYPE (t
) == integer_type_node
)
7590 name
= "complex int";
7591 else if (TREE_TYPE (t
) == unsigned_type_node
)
7592 name
= "complex unsigned int";
7593 else if (TREE_TYPE (t
) == long_integer_type_node
)
7594 name
= "complex long int";
7595 else if (TREE_TYPE (t
) == long_unsigned_type_node
)
7596 name
= "complex long unsigned int";
7597 else if (TREE_TYPE (t
) == long_long_integer_type_node
)
7598 name
= "complex long long int";
7599 else if (TREE_TYPE (t
) == long_long_unsigned_type_node
)
7600 name
= "complex long long unsigned int";
7603 TYPE_NAME (t
) = build_decl (UNKNOWN_LOCATION
, TYPE_DECL
,
7604 get_identifier (name
), t
);
7608 return build_qualified_type (t
, TYPE_QUALS (component_type
));
7611 /* If TYPE is a real or complex floating-point type and the target
7612 does not directly support arithmetic on TYPE then return the wider
7613 type to be used for arithmetic on TYPE. Otherwise, return
7617 excess_precision_type (tree type
)
7619 /* The target can give two different responses to the question of
7620 which excess precision mode it would like depending on whether we
7621 are in -fexcess-precision=standard or -fexcess-precision=fast. */
7623 enum excess_precision_type requested_type
7624 = (flag_excess_precision
== EXCESS_PRECISION_FAST
7625 ? EXCESS_PRECISION_TYPE_FAST
7626 : EXCESS_PRECISION_TYPE_STANDARD
);
7628 enum flt_eval_method target_flt_eval_method
7629 = targetm
.c
.excess_precision (requested_type
);
7631 /* The target should not ask for unpredictable float evaluation (though
7632 it might advertise that implicitly the evaluation is unpredictable,
7633 but we don't care about that here, it will have been reported
7634 elsewhere). If it does ask for unpredictable evaluation, we have
7635 nothing to do here. */
7636 gcc_assert (target_flt_eval_method
!= FLT_EVAL_METHOD_UNPREDICTABLE
);
7638 /* Nothing to do. The target has asked for all types we know about
7639 to be computed with their native precision and range. */
7640 if (target_flt_eval_method
== FLT_EVAL_METHOD_PROMOTE_TO_FLOAT16
)
7643 /* The target will promote this type in a target-dependent way, so excess
7644 precision ought to leave it alone. */
7645 if (targetm
.promoted_type (type
) != NULL_TREE
)
7648 machine_mode float16_type_mode
= (float16_type_node
7649 ? TYPE_MODE (float16_type_node
)
7651 machine_mode float_type_mode
= TYPE_MODE (float_type_node
);
7652 machine_mode double_type_mode
= TYPE_MODE (double_type_node
);
7654 switch (TREE_CODE (type
))
7658 machine_mode type_mode
= TYPE_MODE (type
);
7659 switch (target_flt_eval_method
)
7661 case FLT_EVAL_METHOD_PROMOTE_TO_FLOAT
:
7662 if (type_mode
== float16_type_mode
)
7663 return float_type_node
;
7665 case FLT_EVAL_METHOD_PROMOTE_TO_DOUBLE
:
7666 if (type_mode
== float16_type_mode
7667 || type_mode
== float_type_mode
)
7668 return double_type_node
;
7670 case FLT_EVAL_METHOD_PROMOTE_TO_LONG_DOUBLE
:
7671 if (type_mode
== float16_type_mode
7672 || type_mode
== float_type_mode
7673 || type_mode
== double_type_mode
)
7674 return long_double_type_node
;
7683 if (TREE_CODE (TREE_TYPE (type
)) != REAL_TYPE
)
7685 machine_mode type_mode
= TYPE_MODE (TREE_TYPE (type
));
7686 switch (target_flt_eval_method
)
7688 case FLT_EVAL_METHOD_PROMOTE_TO_FLOAT
:
7689 if (type_mode
== float16_type_mode
)
7690 return complex_float_type_node
;
7692 case FLT_EVAL_METHOD_PROMOTE_TO_DOUBLE
:
7693 if (type_mode
== float16_type_mode
7694 || type_mode
== float_type_mode
)
7695 return complex_double_type_node
;
7697 case FLT_EVAL_METHOD_PROMOTE_TO_LONG_DOUBLE
:
7698 if (type_mode
== float16_type_mode
7699 || type_mode
== float_type_mode
7700 || type_mode
== double_type_mode
)
7701 return complex_long_double_type_node
;
7715 /* Return OP, stripped of any conversions to wider types as much as is safe.
7716 Converting the value back to OP's type makes a value equivalent to OP.
7718 If FOR_TYPE is nonzero, we return a value which, if converted to
7719 type FOR_TYPE, would be equivalent to converting OP to type FOR_TYPE.
7721 OP must have integer, real or enumeral type. Pointers are not allowed!
7723 There are some cases where the obvious value we could return
7724 would regenerate to OP if converted to OP's type,
7725 but would not extend like OP to wider types.
7726 If FOR_TYPE indicates such extension is contemplated, we eschew such values.
7727 For example, if OP is (unsigned short)(signed char)-1,
7728 we avoid returning (signed char)-1 if FOR_TYPE is int,
7729 even though extending that to an unsigned short would regenerate OP,
7730 since the result of extending (signed char)-1 to (int)
7731 is different from (int) OP. */
7734 get_unwidened (tree op
, tree for_type
)
7736 /* Set UNS initially if converting OP to FOR_TYPE is a zero-extension. */
7737 tree type
= TREE_TYPE (op
);
7739 = TYPE_PRECISION (for_type
!= 0 ? for_type
: type
);
7741 = (for_type
!= 0 && for_type
!= type
7742 && final_prec
> TYPE_PRECISION (type
)
7743 && TYPE_UNSIGNED (type
));
7746 while (CONVERT_EXPR_P (op
))
7750 /* TYPE_PRECISION on vector types has different meaning
7751 (TYPE_VECTOR_SUBPARTS) and casts from vectors are view conversions,
7752 so avoid them here. */
7753 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op
, 0))) == VECTOR_TYPE
)
7756 bitschange
= TYPE_PRECISION (TREE_TYPE (op
))
7757 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op
, 0)));
7759 /* Truncations are many-one so cannot be removed.
7760 Unless we are later going to truncate down even farther. */
7762 && final_prec
> TYPE_PRECISION (TREE_TYPE (op
)))
7765 /* See what's inside this conversion. If we decide to strip it,
7767 op
= TREE_OPERAND (op
, 0);
7769 /* If we have not stripped any zero-extensions (uns is 0),
7770 we can strip any kind of extension.
7771 If we have previously stripped a zero-extension,
7772 only zero-extensions can safely be stripped.
7773 Any extension can be stripped if the bits it would produce
7774 are all going to be discarded later by truncating to FOR_TYPE. */
7778 if (! uns
|| final_prec
<= TYPE_PRECISION (TREE_TYPE (op
)))
7780 /* TYPE_UNSIGNED says whether this is a zero-extension.
7781 Let's avoid computing it if it does not affect WIN
7782 and if UNS will not be needed again. */
7784 || CONVERT_EXPR_P (op
))
7785 && TYPE_UNSIGNED (TREE_TYPE (op
)))
7793 /* If we finally reach a constant see if it fits in sth smaller and
7794 in that case convert it. */
7795 if (TREE_CODE (win
) == INTEGER_CST
)
7797 tree wtype
= TREE_TYPE (win
);
7798 unsigned prec
= wi::min_precision (wi::to_wide (win
), TYPE_SIGN (wtype
));
7800 prec
= MAX (prec
, final_prec
);
7801 if (prec
< TYPE_PRECISION (wtype
))
7803 tree t
= lang_hooks
.types
.type_for_size (prec
, TYPE_UNSIGNED (wtype
));
7804 if (t
&& TYPE_PRECISION (t
) < TYPE_PRECISION (wtype
))
7805 win
= fold_convert (t
, win
);
7812 /* Return OP or a simpler expression for a narrower value
7813 which can be sign-extended or zero-extended to give back OP.
7814 Store in *UNSIGNEDP_PTR either 1 if the value should be zero-extended
7815 or 0 if the value should be sign-extended. */
7818 get_narrower (tree op
, int *unsignedp_ptr
)
7823 bool integral_p
= INTEGRAL_TYPE_P (TREE_TYPE (op
));
7825 if (TREE_CODE (op
) == COMPOUND_EXPR
)
7828 op
= TREE_OPERAND (op
, 1);
7829 while (TREE_CODE (op
) == COMPOUND_EXPR
);
7830 tree ret
= get_narrower (op
, unsignedp_ptr
);
7833 auto_vec
<tree
, 16> v
;
7835 for (op
= win
; TREE_CODE (op
) == COMPOUND_EXPR
;
7836 op
= TREE_OPERAND (op
, 1))
7838 FOR_EACH_VEC_ELT_REVERSE (v
, i
, op
)
7839 ret
= build2_loc (EXPR_LOCATION (op
), COMPOUND_EXPR
,
7840 TREE_TYPE (ret
), TREE_OPERAND (op
, 0),
7844 while (TREE_CODE (op
) == NOP_EXPR
)
7847 = (TYPE_PRECISION (TREE_TYPE (op
))
7848 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op
, 0))));
7850 /* Truncations are many-one so cannot be removed. */
7854 /* See what's inside this conversion. If we decide to strip it,
7859 op
= TREE_OPERAND (op
, 0);
7860 /* An extension: the outermost one can be stripped,
7861 but remember whether it is zero or sign extension. */
7863 uns
= TYPE_UNSIGNED (TREE_TYPE (op
));
7864 /* Otherwise, if a sign extension has been stripped,
7865 only sign extensions can now be stripped;
7866 if a zero extension has been stripped, only zero-extensions. */
7867 else if (uns
!= TYPE_UNSIGNED (TREE_TYPE (op
)))
7871 else /* bitschange == 0 */
7873 /* A change in nominal type can always be stripped, but we must
7874 preserve the unsignedness. */
7876 uns
= TYPE_UNSIGNED (TREE_TYPE (op
));
7878 op
= TREE_OPERAND (op
, 0);
7879 /* Keep trying to narrow, but don't assign op to win if it
7880 would turn an integral type into something else. */
7881 if (INTEGRAL_TYPE_P (TREE_TYPE (op
)) != integral_p
)
7888 if (TREE_CODE (op
) == COMPONENT_REF
7889 /* Since type_for_size always gives an integer type. */
7890 && TREE_CODE (TREE_TYPE (op
)) != REAL_TYPE
7891 && TREE_CODE (TREE_TYPE (op
)) != FIXED_POINT_TYPE
7892 /* Ensure field is laid out already. */
7893 && DECL_SIZE (TREE_OPERAND (op
, 1)) != 0
7894 && tree_fits_uhwi_p (DECL_SIZE (TREE_OPERAND (op
, 1))))
7896 unsigned HOST_WIDE_INT innerprec
7897 = tree_to_uhwi (DECL_SIZE (TREE_OPERAND (op
, 1)));
7898 int unsignedp
= (DECL_UNSIGNED (TREE_OPERAND (op
, 1))
7899 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op
, 1))));
7900 tree type
= lang_hooks
.types
.type_for_size (innerprec
, unsignedp
);
7902 /* We can get this structure field in a narrower type that fits it,
7903 but the resulting extension to its nominal type (a fullword type)
7904 must satisfy the same conditions as for other extensions.
7906 Do this only for fields that are aligned (not bit-fields),
7907 because when bit-field insns will be used there is no
7908 advantage in doing this. */
7910 if (innerprec
< TYPE_PRECISION (TREE_TYPE (op
))
7911 && ! DECL_BIT_FIELD (TREE_OPERAND (op
, 1))
7912 && (first
|| uns
== DECL_UNSIGNED (TREE_OPERAND (op
, 1)))
7916 uns
= DECL_UNSIGNED (TREE_OPERAND (op
, 1));
7917 win
= fold_convert (type
, op
);
7921 *unsignedp_ptr
= uns
;
7925 /* Return true if integer constant C has a value that is permissible
7926 for TYPE, an integral type. */
7929 int_fits_type_p (const_tree c
, const_tree type
)
7931 tree type_low_bound
, type_high_bound
;
7932 bool ok_for_low_bound
, ok_for_high_bound
;
7933 signop sgn_c
= TYPE_SIGN (TREE_TYPE (c
));
7935 /* Non-standard boolean types can have arbitrary precision but various
7936 transformations assume that they can only take values 0 and +/-1. */
7937 if (TREE_CODE (type
) == BOOLEAN_TYPE
)
7938 return wi::fits_to_boolean_p (wi::to_wide (c
), type
);
7941 type_low_bound
= TYPE_MIN_VALUE (type
);
7942 type_high_bound
= TYPE_MAX_VALUE (type
);
7944 /* If at least one bound of the type is a constant integer, we can check
7945 ourselves and maybe make a decision. If no such decision is possible, but
7946 this type is a subtype, try checking against that. Otherwise, use
7947 fits_to_tree_p, which checks against the precision.
7949 Compute the status for each possibly constant bound, and return if we see
7950 one does not match. Use ok_for_xxx_bound for this purpose, assigning -1
7951 for "unknown if constant fits", 0 for "constant known *not* to fit" and 1
7952 for "constant known to fit". */
7954 /* Check if c >= type_low_bound. */
7955 if (type_low_bound
&& TREE_CODE (type_low_bound
) == INTEGER_CST
)
7957 if (tree_int_cst_lt (c
, type_low_bound
))
7959 ok_for_low_bound
= true;
7962 ok_for_low_bound
= false;
7964 /* Check if c <= type_high_bound. */
7965 if (type_high_bound
&& TREE_CODE (type_high_bound
) == INTEGER_CST
)
7967 if (tree_int_cst_lt (type_high_bound
, c
))
7969 ok_for_high_bound
= true;
7972 ok_for_high_bound
= false;
7974 /* If the constant fits both bounds, the result is known. */
7975 if (ok_for_low_bound
&& ok_for_high_bound
)
7978 /* Perform some generic filtering which may allow making a decision
7979 even if the bounds are not constant. First, negative integers
7980 never fit in unsigned types, */
7981 if (TYPE_UNSIGNED (type
) && sgn_c
== SIGNED
&& wi::neg_p (wi::to_wide (c
)))
7984 /* Second, narrower types always fit in wider ones. */
7985 if (TYPE_PRECISION (type
) > TYPE_PRECISION (TREE_TYPE (c
)))
7988 /* Third, unsigned integers with top bit set never fit signed types. */
7989 if (!TYPE_UNSIGNED (type
) && sgn_c
== UNSIGNED
)
7991 int prec
= GET_MODE_PRECISION (SCALAR_INT_TYPE_MODE (TREE_TYPE (c
))) - 1;
7992 if (prec
< TYPE_PRECISION (TREE_TYPE (c
)))
7994 /* When a tree_cst is converted to a wide-int, the precision
7995 is taken from the type. However, if the precision of the
7996 mode underneath the type is smaller than that, it is
7997 possible that the value will not fit. The test below
7998 fails if any bit is set between the sign bit of the
7999 underlying mode and the top bit of the type. */
8000 if (wi::zext (wi::to_wide (c
), prec
- 1) != wi::to_wide (c
))
8003 else if (wi::neg_p (wi::to_wide (c
)))
8007 /* If we haven't been able to decide at this point, there nothing more we
8008 can check ourselves here. Look at the base type if we have one and it
8009 has the same precision. */
8010 if (TREE_CODE (type
) == INTEGER_TYPE
8011 && TREE_TYPE (type
) != 0
8012 && TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (type
)))
8014 type
= TREE_TYPE (type
);
8018 /* Or to fits_to_tree_p, if nothing else. */
8019 return wi::fits_to_tree_p (wi::to_wide (c
), type
);
8022 /* Stores bounds of an integer TYPE in MIN and MAX. If TYPE has non-constant
8023 bounds or is a POINTER_TYPE, the maximum and/or minimum values that can be
8024 represented (assuming two's-complement arithmetic) within the bit
8025 precision of the type are returned instead. */
8028 get_type_static_bounds (const_tree type
, mpz_t min
, mpz_t max
)
8030 if (!POINTER_TYPE_P (type
) && TYPE_MIN_VALUE (type
)
8031 && TREE_CODE (TYPE_MIN_VALUE (type
)) == INTEGER_CST
)
8032 wi::to_mpz (wi::to_wide (TYPE_MIN_VALUE (type
)), min
, TYPE_SIGN (type
));
8035 if (TYPE_UNSIGNED (type
))
8036 mpz_set_ui (min
, 0);
8039 wide_int mn
= wi::min_value (TYPE_PRECISION (type
), SIGNED
);
8040 wi::to_mpz (mn
, min
, SIGNED
);
8044 if (!POINTER_TYPE_P (type
) && TYPE_MAX_VALUE (type
)
8045 && TREE_CODE (TYPE_MAX_VALUE (type
)) == INTEGER_CST
)
8046 wi::to_mpz (wi::to_wide (TYPE_MAX_VALUE (type
)), max
, TYPE_SIGN (type
));
8049 wide_int mn
= wi::max_value (TYPE_PRECISION (type
), TYPE_SIGN (type
));
8050 wi::to_mpz (mn
, max
, TYPE_SIGN (type
));
8054 /* Return true if VAR is an automatic variable. */
8057 auto_var_p (const_tree var
)
8059 return ((((VAR_P (var
) && ! DECL_EXTERNAL (var
))
8060 || TREE_CODE (var
) == PARM_DECL
)
8061 && ! TREE_STATIC (var
))
8062 || TREE_CODE (var
) == RESULT_DECL
);
8065 /* Return true if VAR is an automatic variable defined in function FN. */
8068 auto_var_in_fn_p (const_tree var
, const_tree fn
)
8070 return (DECL_P (var
) && DECL_CONTEXT (var
) == fn
8071 && (auto_var_p (var
)
8072 || TREE_CODE (var
) == LABEL_DECL
));
8075 /* Subprogram of following function. Called by walk_tree.
8077 Return *TP if it is an automatic variable or parameter of the
8078 function passed in as DATA. */
8081 find_var_from_fn (tree
*tp
, int *walk_subtrees
, void *data
)
8083 tree fn
= (tree
) data
;
8088 else if (DECL_P (*tp
)
8089 && auto_var_in_fn_p (*tp
, fn
))
8095 /* Returns true if T is, contains, or refers to a type with variable
8096 size. For METHOD_TYPEs and FUNCTION_TYPEs we exclude the
8097 arguments, but not the return type. If FN is nonzero, only return
8098 true if a modifier of the type or position of FN is a variable or
8099 parameter inside FN.
8101 This concept is more general than that of C99 'variably modified types':
8102 in C99, a struct type is never variably modified because a VLA may not
8103 appear as a structure member. However, in GNU C code like:
8105 struct S { int i[f()]; };
8107 is valid, and other languages may define similar constructs. */
8110 variably_modified_type_p (tree type
, tree fn
)
8114 /* Test if T is either variable (if FN is zero) or an expression containing
8115 a variable in FN. If TYPE isn't gimplified, return true also if
8116 gimplify_one_sizepos would gimplify the expression into a local
8118 #define RETURN_TRUE_IF_VAR(T) \
8119 do { tree _t = (T); \
8120 if (_t != NULL_TREE \
8121 && _t != error_mark_node \
8122 && !CONSTANT_CLASS_P (_t) \
8123 && TREE_CODE (_t) != PLACEHOLDER_EXPR \
8125 || (!TYPE_SIZES_GIMPLIFIED (type) \
8126 && (TREE_CODE (_t) != VAR_DECL \
8127 && !CONTAINS_PLACEHOLDER_P (_t))) \
8128 || walk_tree (&_t, find_var_from_fn, fn, NULL))) \
8129 return true; } while (0)
8131 if (type
== error_mark_node
)
8134 /* If TYPE itself has variable size, it is variably modified. */
8135 RETURN_TRUE_IF_VAR (TYPE_SIZE (type
));
8136 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type
));
8138 switch (TREE_CODE (type
))
8141 case REFERENCE_TYPE
:
8143 /* Ada can have pointer types refering to themselves indirectly. */
8144 if (TREE_VISITED (type
))
8146 TREE_VISITED (type
) = true;
8147 if (variably_modified_type_p (TREE_TYPE (type
), fn
))
8149 TREE_VISITED (type
) = false;
8152 TREE_VISITED (type
) = false;
8157 /* If TYPE is a function type, it is variably modified if the
8158 return type is variably modified. */
8159 if (variably_modified_type_p (TREE_TYPE (type
), fn
))
8165 case FIXED_POINT_TYPE
:
8168 /* Scalar types are variably modified if their end points
8170 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type
));
8171 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type
));
8176 case QUAL_UNION_TYPE
:
8177 /* We can't see if any of the fields are variably-modified by the
8178 definition we normally use, since that would produce infinite
8179 recursion via pointers. */
8180 /* This is variably modified if some field's type is. */
8181 for (t
= TYPE_FIELDS (type
); t
; t
= DECL_CHAIN (t
))
8182 if (TREE_CODE (t
) == FIELD_DECL
)
8184 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t
));
8185 RETURN_TRUE_IF_VAR (DECL_SIZE (t
));
8186 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t
));
8188 /* If the type is a qualified union, then the DECL_QUALIFIER
8189 of fields can also be an expression containing a variable. */
8190 if (TREE_CODE (type
) == QUAL_UNION_TYPE
)
8191 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t
));
8193 /* If the field is a qualified union, then it's only a container
8194 for what's inside so we look into it. That's necessary in LTO
8195 mode because the sizes of the field tested above have been set
8196 to PLACEHOLDER_EXPRs by free_lang_data. */
8197 if (TREE_CODE (TREE_TYPE (t
)) == QUAL_UNION_TYPE
8198 && variably_modified_type_p (TREE_TYPE (t
), fn
))
8204 /* Do not call ourselves to avoid infinite recursion. This is
8205 variably modified if the element type is. */
8206 RETURN_TRUE_IF_VAR (TYPE_SIZE (TREE_TYPE (type
)));
8207 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (TREE_TYPE (type
)));
8214 /* The current language may have other cases to check, but in general,
8215 all other types are not variably modified. */
8216 return lang_hooks
.tree_inlining
.var_mod_type_p (type
, fn
);
8218 #undef RETURN_TRUE_IF_VAR
8221 /* Given a DECL or TYPE, return the scope in which it was declared, or
8222 NULL_TREE if there is no containing scope. */
8225 get_containing_scope (const_tree t
)
8227 return (TYPE_P (t
) ? TYPE_CONTEXT (t
) : DECL_CONTEXT (t
));
8230 /* Returns the ultimate TRANSLATION_UNIT_DECL context of DECL or NULL. */
8233 get_ultimate_context (const_tree decl
)
8235 while (decl
&& TREE_CODE (decl
) != TRANSLATION_UNIT_DECL
)
8237 if (TREE_CODE (decl
) == BLOCK
)
8238 decl
= BLOCK_SUPERCONTEXT (decl
);
8240 decl
= get_containing_scope (decl
);
8245 /* Return the innermost context enclosing DECL that is
8246 a FUNCTION_DECL, or zero if none. */
8249 decl_function_context (const_tree decl
)
8253 if (TREE_CODE (decl
) == ERROR_MARK
)
8256 /* C++ virtual functions use DECL_CONTEXT for the class of the vtable
8257 where we look up the function at runtime. Such functions always take
8258 a first argument of type 'pointer to real context'.
8260 C++ should really be fixed to use DECL_CONTEXT for the real context,
8261 and use something else for the "virtual context". */
8262 else if (TREE_CODE (decl
) == FUNCTION_DECL
&& DECL_VIRTUAL_P (decl
))
8265 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl
)))));
8267 context
= DECL_CONTEXT (decl
);
8269 while (context
&& TREE_CODE (context
) != FUNCTION_DECL
)
8271 if (TREE_CODE (context
) == BLOCK
)
8272 context
= BLOCK_SUPERCONTEXT (context
);
8274 context
= get_containing_scope (context
);
8280 /* Return the innermost context enclosing DECL that is
8281 a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE, or zero if none.
8282 TYPE_DECLs and FUNCTION_DECLs are transparent to this function. */
8285 decl_type_context (const_tree decl
)
8287 tree context
= DECL_CONTEXT (decl
);
8290 switch (TREE_CODE (context
))
8292 case NAMESPACE_DECL
:
8293 case TRANSLATION_UNIT_DECL
:
8298 case QUAL_UNION_TYPE
:
8303 context
= DECL_CONTEXT (context
);
8307 context
= BLOCK_SUPERCONTEXT (context
);
8317 /* CALL is a CALL_EXPR. Return the declaration for the function
8318 called, or NULL_TREE if the called function cannot be
8322 get_callee_fndecl (const_tree call
)
8326 if (call
== error_mark_node
)
8327 return error_mark_node
;
8329 /* It's invalid to call this function with anything but a
8331 gcc_assert (TREE_CODE (call
) == CALL_EXPR
);
8333 /* The first operand to the CALL is the address of the function
8335 addr
= CALL_EXPR_FN (call
);
8337 /* If there is no function, return early. */
8338 if (addr
== NULL_TREE
)
8343 /* If this is a readonly function pointer, extract its initial value. */
8344 if (DECL_P (addr
) && TREE_CODE (addr
) != FUNCTION_DECL
8345 && TREE_READONLY (addr
) && ! TREE_THIS_VOLATILE (addr
)
8346 && DECL_INITIAL (addr
))
8347 addr
= DECL_INITIAL (addr
);
8349 /* If the address is just `&f' for some function `f', then we know
8350 that `f' is being called. */
8351 if (TREE_CODE (addr
) == ADDR_EXPR
8352 && TREE_CODE (TREE_OPERAND (addr
, 0)) == FUNCTION_DECL
)
8353 return TREE_OPERAND (addr
, 0);
8355 /* We couldn't figure out what was being called. */
8359 /* If CALL_EXPR CALL calls a normal built-in function or an internal function,
8360 return the associated function code, otherwise return CFN_LAST. */
8363 get_call_combined_fn (const_tree call
)
8365 /* It's invalid to call this function with anything but a CALL_EXPR. */
8366 gcc_assert (TREE_CODE (call
) == CALL_EXPR
);
8368 if (!CALL_EXPR_FN (call
))
8369 return as_combined_fn (CALL_EXPR_IFN (call
));
8371 tree fndecl
= get_callee_fndecl (call
);
8372 if (fndecl
&& fndecl_built_in_p (fndecl
, BUILT_IN_NORMAL
))
8373 return as_combined_fn (DECL_FUNCTION_CODE (fndecl
));
8378 /* Comparator of indices based on tree_node_counts. */
8381 tree_nodes_cmp (const void *p1
, const void *p2
)
8383 const unsigned *n1
= (const unsigned *)p1
;
8384 const unsigned *n2
= (const unsigned *)p2
;
8386 return tree_node_counts
[*n1
] - tree_node_counts
[*n2
];
8389 /* Comparator of indices based on tree_code_counts. */
8392 tree_codes_cmp (const void *p1
, const void *p2
)
8394 const unsigned *n1
= (const unsigned *)p1
;
8395 const unsigned *n2
= (const unsigned *)p2
;
8397 return tree_code_counts
[*n1
] - tree_code_counts
[*n2
];
8400 #define TREE_MEM_USAGE_SPACES 40
8402 /* Print debugging information about tree nodes generated during the compile,
8403 and any language-specific information. */
8406 dump_tree_statistics (void)
8408 if (GATHER_STATISTICS
)
8410 uint64_t total_nodes
, total_bytes
;
8411 fprintf (stderr
, "\nKind Nodes Bytes\n");
8412 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES
);
8413 total_nodes
= total_bytes
= 0;
8416 auto_vec
<unsigned> indices (all_kinds
);
8417 for (unsigned i
= 0; i
< all_kinds
; i
++)
8418 indices
.quick_push (i
);
8419 indices
.qsort (tree_nodes_cmp
);
8421 for (unsigned i
= 0; i
< (int) all_kinds
; i
++)
8423 unsigned j
= indices
[i
];
8424 fprintf (stderr
, "%-20s %6" PRIu64
"%c %9" PRIu64
"%c\n",
8425 tree_node_kind_names
[j
], SIZE_AMOUNT (tree_node_counts
[j
]),
8426 SIZE_AMOUNT (tree_node_sizes
[j
]));
8427 total_nodes
+= tree_node_counts
[j
];
8428 total_bytes
+= tree_node_sizes
[j
];
8430 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES
);
8431 fprintf (stderr
, "%-20s %6" PRIu64
"%c %9" PRIu64
"%c\n", "Total",
8432 SIZE_AMOUNT (total_nodes
), SIZE_AMOUNT (total_bytes
));
8433 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES
);
8437 fprintf (stderr
, "Code Nodes\n");
8438 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES
);
8440 auto_vec
<unsigned> indices (MAX_TREE_CODES
);
8441 for (unsigned i
= 0; i
< MAX_TREE_CODES
; i
++)
8442 indices
.quick_push (i
);
8443 indices
.qsort (tree_codes_cmp
);
8445 for (unsigned i
= 0; i
< MAX_TREE_CODES
; i
++)
8447 unsigned j
= indices
[i
];
8448 fprintf (stderr
, "%-32s %6" PRIu64
"%c\n",
8449 get_tree_code_name ((enum tree_code
) j
),
8450 SIZE_AMOUNT (tree_code_counts
[j
]));
8452 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES
);
8453 fprintf (stderr
, "\n");
8454 ssanames_print_statistics ();
8455 fprintf (stderr
, "\n");
8456 phinodes_print_statistics ();
8457 fprintf (stderr
, "\n");
8461 fprintf (stderr
, "(No per-node statistics)\n");
8463 print_type_hash_statistics ();
8464 print_debug_expr_statistics ();
8465 print_value_expr_statistics ();
8466 lang_hooks
.print_statistics ();
8469 #define FILE_FUNCTION_FORMAT "_GLOBAL__%s_%s"
8471 /* Generate a crc32 of the low BYTES bytes of VALUE. */
8474 crc32_unsigned_n (unsigned chksum
, unsigned value
, unsigned bytes
)
8476 /* This relies on the raw feedback's top 4 bits being zero. */
8477 #define FEEDBACK(X) ((X) * 0x04c11db7)
8478 #define SYNDROME(X) (FEEDBACK ((X) & 1) ^ FEEDBACK ((X) & 2) \
8479 ^ FEEDBACK ((X) & 4) ^ FEEDBACK ((X) & 8))
8480 static const unsigned syndromes
[16] =
8482 SYNDROME(0x0), SYNDROME(0x1), SYNDROME(0x2), SYNDROME(0x3),
8483 SYNDROME(0x4), SYNDROME(0x5), SYNDROME(0x6), SYNDROME(0x7),
8484 SYNDROME(0x8), SYNDROME(0x9), SYNDROME(0xa), SYNDROME(0xb),
8485 SYNDROME(0xc), SYNDROME(0xd), SYNDROME(0xe), SYNDROME(0xf),
8490 value
<<= (32 - bytes
* 8);
8491 for (unsigned ix
= bytes
* 2; ix
--; value
<<= 4)
8493 unsigned feedback
= syndromes
[((value
^ chksum
) >> 28) & 0xf];
8495 chksum
= (chksum
<< 4) ^ feedback
;
8501 /* Generate a crc32 of a string. */
8504 crc32_string (unsigned chksum
, const char *string
)
8507 chksum
= crc32_byte (chksum
, *string
);
8512 /* P is a string that will be used in a symbol. Mask out any characters
8513 that are not valid in that context. */
8516 clean_symbol_name (char *p
)
8520 #ifndef NO_DOLLAR_IN_LABEL /* this for `$'; unlikely, but... -- kr */
8523 #ifndef NO_DOT_IN_LABEL /* this for `.'; unlikely, but... */
8530 static GTY(()) unsigned anon_cnt
= 0; /* Saved for PCH. */
8532 /* Create a unique anonymous identifier. The identifier is still a
8533 valid assembly label. */
8539 #if !defined (NO_DOT_IN_LABEL)
8541 #elif !defined (NO_DOLLAR_IN_LABEL)
8549 int len
= snprintf (buf
, sizeof (buf
), fmt
, anon_cnt
++);
8550 gcc_checking_assert (len
< int (sizeof (buf
)));
8552 tree id
= get_identifier_with_length (buf
, len
);
8553 IDENTIFIER_ANON_P (id
) = true;
8558 /* Generate a name for a special-purpose function.
8559 The generated name may need to be unique across the whole link.
8560 Changes to this function may also require corresponding changes to
8561 xstrdup_mask_random.
8562 TYPE is some string to identify the purpose of this function to the
8563 linker or collect2; it must start with an uppercase letter,
8565 I - for constructors
8567 N - for C++ anonymous namespaces
8568 F - for DWARF unwind frame information. */
8571 get_file_function_name (const char *type
)
8577 /* If we already have a name we know to be unique, just use that. */
8578 if (first_global_object_name
)
8579 p
= q
= ASTRDUP (first_global_object_name
);
8580 /* If the target is handling the constructors/destructors, they
8581 will be local to this file and the name is only necessary for
8583 We also assign sub_I and sub_D sufixes to constructors called from
8584 the global static constructors. These are always local. */
8585 else if (((type
[0] == 'I' || type
[0] == 'D') && targetm
.have_ctors_dtors
)
8586 || (startswith (type
, "sub_")
8587 && (type
[4] == 'I' || type
[4] == 'D')))
8589 const char *file
= main_input_filename
;
8591 file
= LOCATION_FILE (input_location
);
8592 /* Just use the file's basename, because the full pathname
8593 might be quite long. */
8594 p
= q
= ASTRDUP (lbasename (file
));
8598 /* Otherwise, the name must be unique across the entire link.
8599 We don't have anything that we know to be unique to this translation
8600 unit, so use what we do have and throw in some randomness. */
8602 const char *name
= weak_global_object_name
;
8603 const char *file
= main_input_filename
;
8608 file
= LOCATION_FILE (input_location
);
8610 len
= strlen (file
);
8611 q
= (char *) alloca (9 + 19 + len
+ 1);
8612 memcpy (q
, file
, len
+ 1);
8614 snprintf (q
+ len
, 9 + 19 + 1, "_%08X_" HOST_WIDE_INT_PRINT_HEX
,
8615 crc32_string (0, name
), get_random_seed (false));
8620 clean_symbol_name (q
);
8621 buf
= (char *) alloca (sizeof (FILE_FUNCTION_FORMAT
) + strlen (p
)
8624 /* Set up the name of the file-level functions we may need.
8625 Use a global object (which is already required to be unique over
8626 the program) rather than the file name (which imposes extra
8628 sprintf (buf
, FILE_FUNCTION_FORMAT
, type
, p
);
8630 return get_identifier (buf
);
8633 #if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007)
8635 /* Complain that the tree code of NODE does not match the expected 0
8636 terminated list of trailing codes. The trailing code list can be
8637 empty, for a more vague error message. FILE, LINE, and FUNCTION
8638 are of the caller. */
8641 tree_check_failed (const_tree node
, const char *file
,
8642 int line
, const char *function
, ...)
8646 unsigned length
= 0;
8647 enum tree_code code
;
8649 va_start (args
, function
);
8650 while ((code
= (enum tree_code
) va_arg (args
, int)))
8651 length
+= 4 + strlen (get_tree_code_name (code
));
8656 va_start (args
, function
);
8657 length
+= strlen ("expected ");
8658 buffer
= tmp
= (char *) alloca (length
);
8660 while ((code
= (enum tree_code
) va_arg (args
, int)))
8662 const char *prefix
= length
? " or " : "expected ";
8664 strcpy (tmp
+ length
, prefix
);
8665 length
+= strlen (prefix
);
8666 strcpy (tmp
+ length
, get_tree_code_name (code
));
8667 length
+= strlen (get_tree_code_name (code
));
8672 buffer
= "unexpected node";
8674 internal_error ("tree check: %s, have %s in %s, at %s:%d",
8675 buffer
, get_tree_code_name (TREE_CODE (node
)),
8676 function
, trim_filename (file
), line
);
8679 /* Complain that the tree code of NODE does match the expected 0
8680 terminated list of trailing codes. FILE, LINE, and FUNCTION are of
8684 tree_not_check_failed (const_tree node
, const char *file
,
8685 int line
, const char *function
, ...)
8689 unsigned length
= 0;
8690 enum tree_code code
;
8692 va_start (args
, function
);
8693 while ((code
= (enum tree_code
) va_arg (args
, int)))
8694 length
+= 4 + strlen (get_tree_code_name (code
));
8696 va_start (args
, function
);
8697 buffer
= (char *) alloca (length
);
8699 while ((code
= (enum tree_code
) va_arg (args
, int)))
8703 strcpy (buffer
+ length
, " or ");
8706 strcpy (buffer
+ length
, get_tree_code_name (code
));
8707 length
+= strlen (get_tree_code_name (code
));
8711 internal_error ("tree check: expected none of %s, have %s in %s, at %s:%d",
8712 buffer
, get_tree_code_name (TREE_CODE (node
)),
8713 function
, trim_filename (file
), line
);
8716 /* Similar to tree_check_failed, except that we check for a class of tree
8717 code, given in CL. */
8720 tree_class_check_failed (const_tree node
, const enum tree_code_class cl
,
8721 const char *file
, int line
, const char *function
)
8724 ("tree check: expected class %qs, have %qs (%s) in %s, at %s:%d",
8725 TREE_CODE_CLASS_STRING (cl
),
8726 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node
))),
8727 get_tree_code_name (TREE_CODE (node
)), function
, trim_filename (file
), line
);
8730 /* Similar to tree_check_failed, except that instead of specifying a
8731 dozen codes, use the knowledge that they're all sequential. */
8734 tree_range_check_failed (const_tree node
, const char *file
, int line
,
8735 const char *function
, enum tree_code c1
,
8739 unsigned length
= 0;
8742 for (c
= c1
; c
<= c2
; ++c
)
8743 length
+= 4 + strlen (get_tree_code_name ((enum tree_code
) c
));
8745 length
+= strlen ("expected ");
8746 buffer
= (char *) alloca (length
);
8749 for (c
= c1
; c
<= c2
; ++c
)
8751 const char *prefix
= length
? " or " : "expected ";
8753 strcpy (buffer
+ length
, prefix
);
8754 length
+= strlen (prefix
);
8755 strcpy (buffer
+ length
, get_tree_code_name ((enum tree_code
) c
));
8756 length
+= strlen (get_tree_code_name ((enum tree_code
) c
));
8759 internal_error ("tree check: %s, have %s in %s, at %s:%d",
8760 buffer
, get_tree_code_name (TREE_CODE (node
)),
8761 function
, trim_filename (file
), line
);
8765 /* Similar to tree_check_failed, except that we check that a tree does
8766 not have the specified code, given in CL. */
8769 tree_not_class_check_failed (const_tree node
, const enum tree_code_class cl
,
8770 const char *file
, int line
, const char *function
)
8773 ("tree check: did not expect class %qs, have %qs (%s) in %s, at %s:%d",
8774 TREE_CODE_CLASS_STRING (cl
),
8775 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node
))),
8776 get_tree_code_name (TREE_CODE (node
)), function
, trim_filename (file
), line
);
8780 /* Similar to tree_check_failed but applied to OMP_CLAUSE codes. */
8783 omp_clause_check_failed (const_tree node
, const char *file
, int line
,
8784 const char *function
, enum omp_clause_code code
)
8786 internal_error ("tree check: expected %<omp_clause %s%>, have %qs "
8788 omp_clause_code_name
[code
],
8789 get_tree_code_name (TREE_CODE (node
)),
8790 function
, trim_filename (file
), line
);
8794 /* Similar to tree_range_check_failed but applied to OMP_CLAUSE codes. */
8797 omp_clause_range_check_failed (const_tree node
, const char *file
, int line
,
8798 const char *function
, enum omp_clause_code c1
,
8799 enum omp_clause_code c2
)
8802 unsigned length
= 0;
8805 for (c
= c1
; c
<= c2
; ++c
)
8806 length
+= 4 + strlen (omp_clause_code_name
[c
]);
8808 length
+= strlen ("expected ");
8809 buffer
= (char *) alloca (length
);
8812 for (c
= c1
; c
<= c2
; ++c
)
8814 const char *prefix
= length
? " or " : "expected ";
8816 strcpy (buffer
+ length
, prefix
);
8817 length
+= strlen (prefix
);
8818 strcpy (buffer
+ length
, omp_clause_code_name
[c
]);
8819 length
+= strlen (omp_clause_code_name
[c
]);
8822 internal_error ("tree check: %s, have %s in %s, at %s:%d",
8823 buffer
, omp_clause_code_name
[TREE_CODE (node
)],
8824 function
, trim_filename (file
), line
);
8828 #undef DEFTREESTRUCT
8829 #define DEFTREESTRUCT(VAL, NAME) NAME,
8831 static const char *ts_enum_names
[] = {
8832 #include "treestruct.def"
8834 #undef DEFTREESTRUCT
8836 #define TS_ENUM_NAME(EN) (ts_enum_names[(EN)])
8838 /* Similar to tree_class_check_failed, except that we check for
8839 whether CODE contains the tree structure identified by EN. */
8842 tree_contains_struct_check_failed (const_tree node
,
8843 const enum tree_node_structure_enum en
,
8844 const char *file
, int line
,
8845 const char *function
)
8848 ("tree check: expected tree that contains %qs structure, have %qs in %s, at %s:%d",
8850 get_tree_code_name (TREE_CODE (node
)), function
, trim_filename (file
), line
);
8854 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
8855 (dynamically sized) vector. */
8858 tree_int_cst_elt_check_failed (int idx
, int len
, const char *file
, int line
,
8859 const char *function
)
8862 ("tree check: accessed elt %d of %<tree_int_cst%> with %d elts in %s, "
8864 idx
+ 1, len
, function
, trim_filename (file
), line
);
8867 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
8868 (dynamically sized) vector. */
8871 tree_vec_elt_check_failed (int idx
, int len
, const char *file
, int line
,
8872 const char *function
)
8875 ("tree check: accessed elt %d of %<tree_vec%> with %d elts in %s, at %s:%d",
8876 idx
+ 1, len
, function
, trim_filename (file
), line
);
8879 /* Similar to above, except that the check is for the bounds of the operand
8880 vector of an expression node EXP. */
8883 tree_operand_check_failed (int idx
, const_tree exp
, const char *file
,
8884 int line
, const char *function
)
8886 enum tree_code code
= TREE_CODE (exp
);
8888 ("tree check: accessed operand %d of %s with %d operands in %s, at %s:%d",
8889 idx
+ 1, get_tree_code_name (code
), TREE_OPERAND_LENGTH (exp
),
8890 function
, trim_filename (file
), line
);
8893 /* Similar to above, except that the check is for the number of
8894 operands of an OMP_CLAUSE node. */
8897 omp_clause_operand_check_failed (int idx
, const_tree t
, const char *file
,
8898 int line
, const char *function
)
8901 ("tree check: accessed operand %d of %<omp_clause %s%> with %d operands "
8902 "in %s, at %s:%d", idx
+ 1, omp_clause_code_name
[OMP_CLAUSE_CODE (t
)],
8903 omp_clause_num_ops
[OMP_CLAUSE_CODE (t
)], function
,
8904 trim_filename (file
), line
);
8906 #endif /* ENABLE_TREE_CHECKING */
8908 /* Create a new vector type node holding NUNITS units of type INNERTYPE,
8909 and mapped to the machine mode MODE. Initialize its fields and build
8910 the information necessary for debugging output. */
8913 make_vector_type (tree innertype
, poly_int64 nunits
, machine_mode mode
)
8916 tree mv_innertype
= TYPE_MAIN_VARIANT (innertype
);
8918 t
= make_node (VECTOR_TYPE
);
8919 TREE_TYPE (t
) = mv_innertype
;
8920 SET_TYPE_VECTOR_SUBPARTS (t
, nunits
);
8921 SET_TYPE_MODE (t
, mode
);
8923 if (TYPE_STRUCTURAL_EQUALITY_P (mv_innertype
) || in_lto_p
)
8924 SET_TYPE_STRUCTURAL_EQUALITY (t
);
8925 else if ((TYPE_CANONICAL (mv_innertype
) != innertype
8926 || mode
!= VOIDmode
)
8927 && !VECTOR_BOOLEAN_TYPE_P (t
))
8929 = make_vector_type (TYPE_CANONICAL (mv_innertype
), nunits
, VOIDmode
);
8933 hashval_t hash
= type_hash_canon_hash (t
);
8934 t
= type_hash_canon (hash
, t
);
8936 /* We have built a main variant, based on the main variant of the
8937 inner type. Use it to build the variant we return. */
8938 if ((TYPE_ATTRIBUTES (innertype
) || TYPE_QUALS (innertype
))
8939 && TREE_TYPE (t
) != innertype
)
8940 return build_type_attribute_qual_variant (t
,
8941 TYPE_ATTRIBUTES (innertype
),
8942 TYPE_QUALS (innertype
));
8948 make_or_reuse_type (unsigned size
, int unsignedp
)
8952 if (size
== INT_TYPE_SIZE
)
8953 return unsignedp
? unsigned_type_node
: integer_type_node
;
8954 if (size
== CHAR_TYPE_SIZE
)
8955 return unsignedp
? unsigned_char_type_node
: signed_char_type_node
;
8956 if (size
== SHORT_TYPE_SIZE
)
8957 return unsignedp
? short_unsigned_type_node
: short_integer_type_node
;
8958 if (size
== LONG_TYPE_SIZE
)
8959 return unsignedp
? long_unsigned_type_node
: long_integer_type_node
;
8960 if (size
== LONG_LONG_TYPE_SIZE
)
8961 return (unsignedp
? long_long_unsigned_type_node
8962 : long_long_integer_type_node
);
8964 for (i
= 0; i
< NUM_INT_N_ENTS
; i
++)
8965 if (size
== int_n_data
[i
].bitsize
8966 && int_n_enabled_p
[i
])
8967 return (unsignedp
? int_n_trees
[i
].unsigned_type
8968 : int_n_trees
[i
].signed_type
);
8971 return make_unsigned_type (size
);
8973 return make_signed_type (size
);
8976 /* Create or reuse a fract type by SIZE, UNSIGNEDP, and SATP. */
8979 make_or_reuse_fract_type (unsigned size
, int unsignedp
, int satp
)
8983 if (size
== SHORT_FRACT_TYPE_SIZE
)
8984 return unsignedp
? sat_unsigned_short_fract_type_node
8985 : sat_short_fract_type_node
;
8986 if (size
== FRACT_TYPE_SIZE
)
8987 return unsignedp
? sat_unsigned_fract_type_node
: sat_fract_type_node
;
8988 if (size
== LONG_FRACT_TYPE_SIZE
)
8989 return unsignedp
? sat_unsigned_long_fract_type_node
8990 : sat_long_fract_type_node
;
8991 if (size
== LONG_LONG_FRACT_TYPE_SIZE
)
8992 return unsignedp
? sat_unsigned_long_long_fract_type_node
8993 : sat_long_long_fract_type_node
;
8997 if (size
== SHORT_FRACT_TYPE_SIZE
)
8998 return unsignedp
? unsigned_short_fract_type_node
8999 : short_fract_type_node
;
9000 if (size
== FRACT_TYPE_SIZE
)
9001 return unsignedp
? unsigned_fract_type_node
: fract_type_node
;
9002 if (size
== LONG_FRACT_TYPE_SIZE
)
9003 return unsignedp
? unsigned_long_fract_type_node
9004 : long_fract_type_node
;
9005 if (size
== LONG_LONG_FRACT_TYPE_SIZE
)
9006 return unsignedp
? unsigned_long_long_fract_type_node
9007 : long_long_fract_type_node
;
9010 return make_fract_type (size
, unsignedp
, satp
);
9013 /* Create or reuse an accum type by SIZE, UNSIGNEDP, and SATP. */
9016 make_or_reuse_accum_type (unsigned size
, int unsignedp
, int satp
)
9020 if (size
== SHORT_ACCUM_TYPE_SIZE
)
9021 return unsignedp
? sat_unsigned_short_accum_type_node
9022 : sat_short_accum_type_node
;
9023 if (size
== ACCUM_TYPE_SIZE
)
9024 return unsignedp
? sat_unsigned_accum_type_node
: sat_accum_type_node
;
9025 if (size
== LONG_ACCUM_TYPE_SIZE
)
9026 return unsignedp
? sat_unsigned_long_accum_type_node
9027 : sat_long_accum_type_node
;
9028 if (size
== LONG_LONG_ACCUM_TYPE_SIZE
)
9029 return unsignedp
? sat_unsigned_long_long_accum_type_node
9030 : sat_long_long_accum_type_node
;
9034 if (size
== SHORT_ACCUM_TYPE_SIZE
)
9035 return unsignedp
? unsigned_short_accum_type_node
9036 : short_accum_type_node
;
9037 if (size
== ACCUM_TYPE_SIZE
)
9038 return unsignedp
? unsigned_accum_type_node
: accum_type_node
;
9039 if (size
== LONG_ACCUM_TYPE_SIZE
)
9040 return unsignedp
? unsigned_long_accum_type_node
9041 : long_accum_type_node
;
9042 if (size
== LONG_LONG_ACCUM_TYPE_SIZE
)
9043 return unsignedp
? unsigned_long_long_accum_type_node
9044 : long_long_accum_type_node
;
9047 return make_accum_type (size
, unsignedp
, satp
);
9051 /* Create an atomic variant node for TYPE. This routine is called
9052 during initialization of data types to create the 5 basic atomic
9053 types. The generic build_variant_type function requires these to
9054 already be set up in order to function properly, so cannot be
9055 called from there. If ALIGN is non-zero, then ensure alignment is
9056 overridden to this value. */
9059 build_atomic_base (tree type
, unsigned int align
)
9063 /* Make sure its not already registered. */
9064 if ((t
= get_qualified_type (type
, TYPE_QUAL_ATOMIC
)))
9067 t
= build_variant_type_copy (type
);
9068 set_type_quals (t
, TYPE_QUAL_ATOMIC
);
9071 SET_TYPE_ALIGN (t
, align
);
9076 /* Information about the _FloatN and _FloatNx types. This must be in
9077 the same order as the corresponding TI_* enum values. */
9078 const floatn_type_info floatn_nx_types
[NUM_FLOATN_NX_TYPES
] =
9090 /* Create nodes for all integer types (and error_mark_node) using the sizes
9091 of C datatypes. SIGNED_CHAR specifies whether char is signed. */
9094 build_common_tree_nodes (bool signed_char
)
9098 error_mark_node
= make_node (ERROR_MARK
);
9099 TREE_TYPE (error_mark_node
) = error_mark_node
;
9101 initialize_sizetypes ();
9103 /* Define both `signed char' and `unsigned char'. */
9104 signed_char_type_node
= make_signed_type (CHAR_TYPE_SIZE
);
9105 TYPE_STRING_FLAG (signed_char_type_node
) = 1;
9106 unsigned_char_type_node
= make_unsigned_type (CHAR_TYPE_SIZE
);
9107 TYPE_STRING_FLAG (unsigned_char_type_node
) = 1;
9109 /* Define `char', which is like either `signed char' or `unsigned char'
9110 but not the same as either. */
9113 ? make_signed_type (CHAR_TYPE_SIZE
)
9114 : make_unsigned_type (CHAR_TYPE_SIZE
));
9115 TYPE_STRING_FLAG (char_type_node
) = 1;
9117 short_integer_type_node
= make_signed_type (SHORT_TYPE_SIZE
);
9118 short_unsigned_type_node
= make_unsigned_type (SHORT_TYPE_SIZE
);
9119 integer_type_node
= make_signed_type (INT_TYPE_SIZE
);
9120 unsigned_type_node
= make_unsigned_type (INT_TYPE_SIZE
);
9121 long_integer_type_node
= make_signed_type (LONG_TYPE_SIZE
);
9122 long_unsigned_type_node
= make_unsigned_type (LONG_TYPE_SIZE
);
9123 long_long_integer_type_node
= make_signed_type (LONG_LONG_TYPE_SIZE
);
9124 long_long_unsigned_type_node
= make_unsigned_type (LONG_LONG_TYPE_SIZE
);
9126 for (i
= 0; i
< NUM_INT_N_ENTS
; i
++)
9128 int_n_trees
[i
].signed_type
= make_signed_type (int_n_data
[i
].bitsize
);
9129 int_n_trees
[i
].unsigned_type
= make_unsigned_type (int_n_data
[i
].bitsize
);
9131 if (int_n_enabled_p
[i
])
9133 integer_types
[itk_intN_0
+ i
* 2] = int_n_trees
[i
].signed_type
;
9134 integer_types
[itk_unsigned_intN_0
+ i
* 2] = int_n_trees
[i
].unsigned_type
;
9138 /* Define a boolean type. This type only represents boolean values but
9139 may be larger than char depending on the value of BOOL_TYPE_SIZE. */
9140 boolean_type_node
= make_unsigned_type (BOOL_TYPE_SIZE
);
9141 TREE_SET_CODE (boolean_type_node
, BOOLEAN_TYPE
);
9142 TYPE_PRECISION (boolean_type_node
) = 1;
9143 TYPE_MAX_VALUE (boolean_type_node
) = build_int_cst (boolean_type_node
, 1);
9145 /* Define what type to use for size_t. */
9146 if (strcmp (SIZE_TYPE
, "unsigned int") == 0)
9147 size_type_node
= unsigned_type_node
;
9148 else if (strcmp (SIZE_TYPE
, "long unsigned int") == 0)
9149 size_type_node
= long_unsigned_type_node
;
9150 else if (strcmp (SIZE_TYPE
, "long long unsigned int") == 0)
9151 size_type_node
= long_long_unsigned_type_node
;
9152 else if (strcmp (SIZE_TYPE
, "short unsigned int") == 0)
9153 size_type_node
= short_unsigned_type_node
;
9158 size_type_node
= NULL_TREE
;
9159 for (i
= 0; i
< NUM_INT_N_ENTS
; i
++)
9160 if (int_n_enabled_p
[i
])
9162 char name
[50], altname
[50];
9163 sprintf (name
, "__int%d unsigned", int_n_data
[i
].bitsize
);
9164 sprintf (altname
, "__int%d__ unsigned", int_n_data
[i
].bitsize
);
9166 if (strcmp (name
, SIZE_TYPE
) == 0
9167 || strcmp (altname
, SIZE_TYPE
) == 0)
9169 size_type_node
= int_n_trees
[i
].unsigned_type
;
9172 if (size_type_node
== NULL_TREE
)
9176 /* Define what type to use for ptrdiff_t. */
9177 if (strcmp (PTRDIFF_TYPE
, "int") == 0)
9178 ptrdiff_type_node
= integer_type_node
;
9179 else if (strcmp (PTRDIFF_TYPE
, "long int") == 0)
9180 ptrdiff_type_node
= long_integer_type_node
;
9181 else if (strcmp (PTRDIFF_TYPE
, "long long int") == 0)
9182 ptrdiff_type_node
= long_long_integer_type_node
;
9183 else if (strcmp (PTRDIFF_TYPE
, "short int") == 0)
9184 ptrdiff_type_node
= short_integer_type_node
;
9187 ptrdiff_type_node
= NULL_TREE
;
9188 for (int i
= 0; i
< NUM_INT_N_ENTS
; i
++)
9189 if (int_n_enabled_p
[i
])
9191 char name
[50], altname
[50];
9192 sprintf (name
, "__int%d", int_n_data
[i
].bitsize
);
9193 sprintf (altname
, "__int%d__", int_n_data
[i
].bitsize
);
9195 if (strcmp (name
, PTRDIFF_TYPE
) == 0
9196 || strcmp (altname
, PTRDIFF_TYPE
) == 0)
9197 ptrdiff_type_node
= int_n_trees
[i
].signed_type
;
9199 if (ptrdiff_type_node
== NULL_TREE
)
9203 /* Fill in the rest of the sized types. Reuse existing type nodes
9205 intQI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (QImode
), 0);
9206 intHI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (HImode
), 0);
9207 intSI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (SImode
), 0);
9208 intDI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (DImode
), 0);
9209 intTI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (TImode
), 0);
9211 unsigned_intQI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (QImode
), 1);
9212 unsigned_intHI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (HImode
), 1);
9213 unsigned_intSI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (SImode
), 1);
9214 unsigned_intDI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (DImode
), 1);
9215 unsigned_intTI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (TImode
), 1);
9217 /* Don't call build_qualified type for atomics. That routine does
9218 special processing for atomics, and until they are initialized
9219 it's better not to make that call.
9221 Check to see if there is a target override for atomic types. */
9223 atomicQI_type_node
= build_atomic_base (unsigned_intQI_type_node
,
9224 targetm
.atomic_align_for_mode (QImode
));
9225 atomicHI_type_node
= build_atomic_base (unsigned_intHI_type_node
,
9226 targetm
.atomic_align_for_mode (HImode
));
9227 atomicSI_type_node
= build_atomic_base (unsigned_intSI_type_node
,
9228 targetm
.atomic_align_for_mode (SImode
));
9229 atomicDI_type_node
= build_atomic_base (unsigned_intDI_type_node
,
9230 targetm
.atomic_align_for_mode (DImode
));
9231 atomicTI_type_node
= build_atomic_base (unsigned_intTI_type_node
,
9232 targetm
.atomic_align_for_mode (TImode
));
9234 access_public_node
= get_identifier ("public");
9235 access_protected_node
= get_identifier ("protected");
9236 access_private_node
= get_identifier ("private");
9238 /* Define these next since types below may used them. */
9239 integer_zero_node
= build_int_cst (integer_type_node
, 0);
9240 integer_one_node
= build_int_cst (integer_type_node
, 1);
9241 integer_three_node
= build_int_cst (integer_type_node
, 3);
9242 integer_minus_one_node
= build_int_cst (integer_type_node
, -1);
9244 size_zero_node
= size_int (0);
9245 size_one_node
= size_int (1);
9246 bitsize_zero_node
= bitsize_int (0);
9247 bitsize_one_node
= bitsize_int (1);
9248 bitsize_unit_node
= bitsize_int (BITS_PER_UNIT
);
9250 boolean_false_node
= TYPE_MIN_VALUE (boolean_type_node
);
9251 boolean_true_node
= TYPE_MAX_VALUE (boolean_type_node
);
9253 void_type_node
= make_node (VOID_TYPE
);
9254 layout_type (void_type_node
);
9256 /* We are not going to have real types in C with less than byte alignment,
9257 so we might as well not have any types that claim to have it. */
9258 SET_TYPE_ALIGN (void_type_node
, BITS_PER_UNIT
);
9259 TYPE_USER_ALIGN (void_type_node
) = 0;
9261 void_node
= make_node (VOID_CST
);
9262 TREE_TYPE (void_node
) = void_type_node
;
9264 null_pointer_node
= build_int_cst (build_pointer_type (void_type_node
), 0);
9265 layout_type (TREE_TYPE (null_pointer_node
));
9267 ptr_type_node
= build_pointer_type (void_type_node
);
9269 = build_pointer_type (build_type_variant (void_type_node
, 1, 0));
9270 for (unsigned i
= 0;
9271 i
< sizeof (builtin_structptr_types
) / sizeof (builtin_structptr_type
);
9273 builtin_structptr_types
[i
].node
= builtin_structptr_types
[i
].base
;
9275 pointer_sized_int_node
= build_nonstandard_integer_type (POINTER_SIZE
, 1);
9277 float_type_node
= make_node (REAL_TYPE
);
9278 TYPE_PRECISION (float_type_node
) = FLOAT_TYPE_SIZE
;
9279 layout_type (float_type_node
);
9281 double_type_node
= make_node (REAL_TYPE
);
9282 TYPE_PRECISION (double_type_node
) = DOUBLE_TYPE_SIZE
;
9283 layout_type (double_type_node
);
9285 long_double_type_node
= make_node (REAL_TYPE
);
9286 TYPE_PRECISION (long_double_type_node
) = LONG_DOUBLE_TYPE_SIZE
;
9287 layout_type (long_double_type_node
);
9289 for (i
= 0; i
< NUM_FLOATN_NX_TYPES
; i
++)
9291 int n
= floatn_nx_types
[i
].n
;
9292 bool extended
= floatn_nx_types
[i
].extended
;
9293 scalar_float_mode mode
;
9294 if (!targetm
.floatn_mode (n
, extended
).exists (&mode
))
9296 int precision
= GET_MODE_PRECISION (mode
);
9297 /* Work around the rs6000 KFmode having precision 113 not
9299 const struct real_format
*fmt
= REAL_MODE_FORMAT (mode
);
9300 gcc_assert (fmt
->b
== 2 && fmt
->emin
+ fmt
->emax
== 3);
9301 int min_precision
= fmt
->p
+ ceil_log2 (fmt
->emax
- fmt
->emin
);
9303 gcc_assert (min_precision
== n
);
9304 if (precision
< min_precision
)
9305 precision
= min_precision
;
9306 FLOATN_NX_TYPE_NODE (i
) = make_node (REAL_TYPE
);
9307 TYPE_PRECISION (FLOATN_NX_TYPE_NODE (i
)) = precision
;
9308 layout_type (FLOATN_NX_TYPE_NODE (i
));
9309 SET_TYPE_MODE (FLOATN_NX_TYPE_NODE (i
), mode
);
9312 float_ptr_type_node
= build_pointer_type (float_type_node
);
9313 double_ptr_type_node
= build_pointer_type (double_type_node
);
9314 long_double_ptr_type_node
= build_pointer_type (long_double_type_node
);
9315 integer_ptr_type_node
= build_pointer_type (integer_type_node
);
9317 /* Fixed size integer types. */
9318 uint16_type_node
= make_or_reuse_type (16, 1);
9319 uint32_type_node
= make_or_reuse_type (32, 1);
9320 uint64_type_node
= make_or_reuse_type (64, 1);
9321 if (targetm
.scalar_mode_supported_p (TImode
))
9322 uint128_type_node
= make_or_reuse_type (128, 1);
9324 /* Decimal float types. */
9325 if (targetm
.decimal_float_supported_p ())
9327 dfloat32_type_node
= make_node (REAL_TYPE
);
9328 TYPE_PRECISION (dfloat32_type_node
) = DECIMAL32_TYPE_SIZE
;
9329 SET_TYPE_MODE (dfloat32_type_node
, SDmode
);
9330 layout_type (dfloat32_type_node
);
9332 dfloat64_type_node
= make_node (REAL_TYPE
);
9333 TYPE_PRECISION (dfloat64_type_node
) = DECIMAL64_TYPE_SIZE
;
9334 SET_TYPE_MODE (dfloat64_type_node
, DDmode
);
9335 layout_type (dfloat64_type_node
);
9337 dfloat128_type_node
= make_node (REAL_TYPE
);
9338 TYPE_PRECISION (dfloat128_type_node
) = DECIMAL128_TYPE_SIZE
;
9339 SET_TYPE_MODE (dfloat128_type_node
, TDmode
);
9340 layout_type (dfloat128_type_node
);
9343 complex_integer_type_node
= build_complex_type (integer_type_node
, true);
9344 complex_float_type_node
= build_complex_type (float_type_node
, true);
9345 complex_double_type_node
= build_complex_type (double_type_node
, true);
9346 complex_long_double_type_node
= build_complex_type (long_double_type_node
,
9349 for (i
= 0; i
< NUM_FLOATN_NX_TYPES
; i
++)
9351 if (FLOATN_NX_TYPE_NODE (i
) != NULL_TREE
)
9352 COMPLEX_FLOATN_NX_TYPE_NODE (i
)
9353 = build_complex_type (FLOATN_NX_TYPE_NODE (i
));
9356 /* Make fixed-point nodes based on sat/non-sat and signed/unsigned. */
9357 #define MAKE_FIXED_TYPE_NODE(KIND,SIZE) \
9358 sat_ ## KIND ## _type_node = \
9359 make_sat_signed_ ## KIND ## _type (SIZE); \
9360 sat_unsigned_ ## KIND ## _type_node = \
9361 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9362 KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9363 unsigned_ ## KIND ## _type_node = \
9364 make_unsigned_ ## KIND ## _type (SIZE);
9366 #define MAKE_FIXED_TYPE_NODE_WIDTH(KIND,WIDTH,SIZE) \
9367 sat_ ## WIDTH ## KIND ## _type_node = \
9368 make_sat_signed_ ## KIND ## _type (SIZE); \
9369 sat_unsigned_ ## WIDTH ## KIND ## _type_node = \
9370 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9371 WIDTH ## KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9372 unsigned_ ## WIDTH ## KIND ## _type_node = \
9373 make_unsigned_ ## KIND ## _type (SIZE);
9375 /* Make fixed-point type nodes based on four different widths. */
9376 #define MAKE_FIXED_TYPE_NODE_FAMILY(N1,N2) \
9377 MAKE_FIXED_TYPE_NODE_WIDTH (N1, short_, SHORT_ ## N2 ## _TYPE_SIZE) \
9378 MAKE_FIXED_TYPE_NODE (N1, N2 ## _TYPE_SIZE) \
9379 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_, LONG_ ## N2 ## _TYPE_SIZE) \
9380 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_long_, LONG_LONG_ ## N2 ## _TYPE_SIZE)
9382 /* Make fixed-point mode nodes based on sat/non-sat and signed/unsigned. */
9383 #define MAKE_FIXED_MODE_NODE(KIND,NAME,MODE) \
9384 NAME ## _type_node = \
9385 make_or_reuse_signed_ ## KIND ## _type (GET_MODE_BITSIZE (MODE ## mode)); \
9386 u ## NAME ## _type_node = \
9387 make_or_reuse_unsigned_ ## KIND ## _type \
9388 (GET_MODE_BITSIZE (U ## MODE ## mode)); \
9389 sat_ ## NAME ## _type_node = \
9390 make_or_reuse_sat_signed_ ## KIND ## _type \
9391 (GET_MODE_BITSIZE (MODE ## mode)); \
9392 sat_u ## NAME ## _type_node = \
9393 make_or_reuse_sat_unsigned_ ## KIND ## _type \
9394 (GET_MODE_BITSIZE (U ## MODE ## mode));
9396 /* Fixed-point type and mode nodes. */
9397 MAKE_FIXED_TYPE_NODE_FAMILY (fract
, FRACT
)
9398 MAKE_FIXED_TYPE_NODE_FAMILY (accum
, ACCUM
)
9399 MAKE_FIXED_MODE_NODE (fract
, qq
, QQ
)
9400 MAKE_FIXED_MODE_NODE (fract
, hq
, HQ
)
9401 MAKE_FIXED_MODE_NODE (fract
, sq
, SQ
)
9402 MAKE_FIXED_MODE_NODE (fract
, dq
, DQ
)
9403 MAKE_FIXED_MODE_NODE (fract
, tq
, TQ
)
9404 MAKE_FIXED_MODE_NODE (accum
, ha
, HA
)
9405 MAKE_FIXED_MODE_NODE (accum
, sa
, SA
)
9406 MAKE_FIXED_MODE_NODE (accum
, da
, DA
)
9407 MAKE_FIXED_MODE_NODE (accum
, ta
, TA
)
9410 tree t
= targetm
.build_builtin_va_list ();
9412 /* Many back-ends define record types without setting TYPE_NAME.
9413 If we copied the record type here, we'd keep the original
9414 record type without a name. This breaks name mangling. So,
9415 don't copy record types and let c_common_nodes_and_builtins()
9416 declare the type to be __builtin_va_list. */
9417 if (TREE_CODE (t
) != RECORD_TYPE
)
9418 t
= build_variant_type_copy (t
);
9420 va_list_type_node
= t
;
9423 /* SCEV analyzer global shared trees. */
9424 chrec_dont_know
= make_node (SCEV_NOT_KNOWN
);
9425 TREE_TYPE (chrec_dont_know
) = void_type_node
;
9426 chrec_known
= make_node (SCEV_KNOWN
);
9427 TREE_TYPE (chrec_known
) = void_type_node
;
9430 /* Modify DECL for given flags.
9431 TM_PURE attribute is set only on types, so the function will modify
9432 DECL's type when ECF_TM_PURE is used. */
9435 set_call_expr_flags (tree decl
, int flags
)
9437 if (flags
& ECF_NOTHROW
)
9438 TREE_NOTHROW (decl
) = 1;
9439 if (flags
& ECF_CONST
)
9440 TREE_READONLY (decl
) = 1;
9441 if (flags
& ECF_PURE
)
9442 DECL_PURE_P (decl
) = 1;
9443 if (flags
& ECF_LOOPING_CONST_OR_PURE
)
9444 DECL_LOOPING_CONST_OR_PURE_P (decl
) = 1;
9445 if (flags
& ECF_NOVOPS
)
9446 DECL_IS_NOVOPS (decl
) = 1;
9447 if (flags
& ECF_NORETURN
)
9448 TREE_THIS_VOLATILE (decl
) = 1;
9449 if (flags
& ECF_MALLOC
)
9450 DECL_IS_MALLOC (decl
) = 1;
9451 if (flags
& ECF_RETURNS_TWICE
)
9452 DECL_IS_RETURNS_TWICE (decl
) = 1;
9453 if (flags
& ECF_LEAF
)
9454 DECL_ATTRIBUTES (decl
) = tree_cons (get_identifier ("leaf"),
9455 NULL
, DECL_ATTRIBUTES (decl
));
9456 if (flags
& ECF_COLD
)
9457 DECL_ATTRIBUTES (decl
) = tree_cons (get_identifier ("cold"),
9458 NULL
, DECL_ATTRIBUTES (decl
));
9459 if (flags
& ECF_RET1
)
9460 DECL_ATTRIBUTES (decl
)
9461 = tree_cons (get_identifier ("fn spec"),
9462 build_tree_list (NULL_TREE
, build_string (2, "1 ")),
9463 DECL_ATTRIBUTES (decl
));
9464 if ((flags
& ECF_TM_PURE
) && flag_tm
)
9465 apply_tm_attr (decl
, get_identifier ("transaction_pure"));
9466 /* Looping const or pure is implied by noreturn.
9467 There is currently no way to declare looping const or looping pure alone. */
9468 gcc_assert (!(flags
& ECF_LOOPING_CONST_OR_PURE
)
9469 || ((flags
& ECF_NORETURN
) && (flags
& (ECF_CONST
| ECF_PURE
))));
9473 /* A subroutine of build_common_builtin_nodes. Define a builtin function. */
9476 local_define_builtin (const char *name
, tree type
, enum built_in_function code
,
9477 const char *library_name
, int ecf_flags
)
9481 decl
= add_builtin_function (name
, type
, code
, BUILT_IN_NORMAL
,
9482 library_name
, NULL_TREE
);
9483 set_call_expr_flags (decl
, ecf_flags
);
9485 set_builtin_decl (code
, decl
, true);
9488 /* Call this function after instantiating all builtins that the language
9489 front end cares about. This will build the rest of the builtins
9490 and internal functions that are relied upon by the tree optimizers and
9494 build_common_builtin_nodes (void)
9499 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE
)
9500 || !builtin_decl_explicit_p (BUILT_IN_ABORT
))
9502 ftype
= build_function_type (void_type_node
, void_list_node
);
9503 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE
))
9504 local_define_builtin ("__builtin_unreachable", ftype
,
9505 BUILT_IN_UNREACHABLE
,
9506 "__builtin_unreachable",
9507 ECF_NOTHROW
| ECF_LEAF
| ECF_NORETURN
9508 | ECF_CONST
| ECF_COLD
);
9509 if (!builtin_decl_explicit_p (BUILT_IN_ABORT
))
9510 local_define_builtin ("__builtin_abort", ftype
, BUILT_IN_ABORT
,
9512 ECF_LEAF
| ECF_NORETURN
| ECF_CONST
| ECF_COLD
);
9515 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY
)
9516 || !builtin_decl_explicit_p (BUILT_IN_MEMMOVE
))
9518 ftype
= build_function_type_list (ptr_type_node
,
9519 ptr_type_node
, const_ptr_type_node
,
9520 size_type_node
, NULL_TREE
);
9522 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY
))
9523 local_define_builtin ("__builtin_memcpy", ftype
, BUILT_IN_MEMCPY
,
9524 "memcpy", ECF_NOTHROW
| ECF_LEAF
);
9525 if (!builtin_decl_explicit_p (BUILT_IN_MEMMOVE
))
9526 local_define_builtin ("__builtin_memmove", ftype
, BUILT_IN_MEMMOVE
,
9527 "memmove", ECF_NOTHROW
| ECF_LEAF
);
9530 if (!builtin_decl_explicit_p (BUILT_IN_MEMCMP
))
9532 ftype
= build_function_type_list (integer_type_node
, const_ptr_type_node
,
9533 const_ptr_type_node
, size_type_node
,
9535 local_define_builtin ("__builtin_memcmp", ftype
, BUILT_IN_MEMCMP
,
9536 "memcmp", ECF_PURE
| ECF_NOTHROW
| ECF_LEAF
);
9539 if (!builtin_decl_explicit_p (BUILT_IN_MEMSET
))
9541 ftype
= build_function_type_list (ptr_type_node
,
9542 ptr_type_node
, integer_type_node
,
9543 size_type_node
, NULL_TREE
);
9544 local_define_builtin ("__builtin_memset", ftype
, BUILT_IN_MEMSET
,
9545 "memset", ECF_NOTHROW
| ECF_LEAF
);
9548 /* If we're checking the stack, `alloca' can throw. */
9549 const int alloca_flags
9550 = ECF_MALLOC
| ECF_LEAF
| (flag_stack_check
? 0 : ECF_NOTHROW
);
9552 if (!builtin_decl_explicit_p (BUILT_IN_ALLOCA
))
9554 ftype
= build_function_type_list (ptr_type_node
,
9555 size_type_node
, NULL_TREE
);
9556 local_define_builtin ("__builtin_alloca", ftype
, BUILT_IN_ALLOCA
,
9557 "alloca", alloca_flags
);
9560 ftype
= build_function_type_list (ptr_type_node
, size_type_node
,
9561 size_type_node
, NULL_TREE
);
9562 local_define_builtin ("__builtin_alloca_with_align", ftype
,
9563 BUILT_IN_ALLOCA_WITH_ALIGN
,
9564 "__builtin_alloca_with_align",
9567 ftype
= build_function_type_list (ptr_type_node
, size_type_node
,
9568 size_type_node
, size_type_node
, NULL_TREE
);
9569 local_define_builtin ("__builtin_alloca_with_align_and_max", ftype
,
9570 BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
,
9571 "__builtin_alloca_with_align_and_max",
9574 ftype
= build_function_type_list (void_type_node
,
9575 ptr_type_node
, ptr_type_node
,
9576 ptr_type_node
, NULL_TREE
);
9577 local_define_builtin ("__builtin_init_trampoline", ftype
,
9578 BUILT_IN_INIT_TRAMPOLINE
,
9579 "__builtin_init_trampoline", ECF_NOTHROW
| ECF_LEAF
);
9580 local_define_builtin ("__builtin_init_heap_trampoline", ftype
,
9581 BUILT_IN_INIT_HEAP_TRAMPOLINE
,
9582 "__builtin_init_heap_trampoline",
9583 ECF_NOTHROW
| ECF_LEAF
);
9584 local_define_builtin ("__builtin_init_descriptor", ftype
,
9585 BUILT_IN_INIT_DESCRIPTOR
,
9586 "__builtin_init_descriptor", ECF_NOTHROW
| ECF_LEAF
);
9588 ftype
= build_function_type_list (ptr_type_node
, ptr_type_node
, NULL_TREE
);
9589 local_define_builtin ("__builtin_adjust_trampoline", ftype
,
9590 BUILT_IN_ADJUST_TRAMPOLINE
,
9591 "__builtin_adjust_trampoline",
9592 ECF_CONST
| ECF_NOTHROW
);
9593 local_define_builtin ("__builtin_adjust_descriptor", ftype
,
9594 BUILT_IN_ADJUST_DESCRIPTOR
,
9595 "__builtin_adjust_descriptor",
9596 ECF_CONST
| ECF_NOTHROW
);
9598 ftype
= build_function_type_list (void_type_node
,
9599 ptr_type_node
, ptr_type_node
, NULL_TREE
);
9600 if (!builtin_decl_explicit_p (BUILT_IN_CLEAR_CACHE
))
9601 local_define_builtin ("__builtin___clear_cache", ftype
,
9602 BUILT_IN_CLEAR_CACHE
,
9606 local_define_builtin ("__builtin_nonlocal_goto", ftype
,
9607 BUILT_IN_NONLOCAL_GOTO
,
9608 "__builtin_nonlocal_goto",
9609 ECF_NORETURN
| ECF_NOTHROW
);
9611 ftype
= build_function_type_list (void_type_node
,
9612 ptr_type_node
, ptr_type_node
, NULL_TREE
);
9613 local_define_builtin ("__builtin_setjmp_setup", ftype
,
9614 BUILT_IN_SETJMP_SETUP
,
9615 "__builtin_setjmp_setup", ECF_NOTHROW
);
9617 ftype
= build_function_type_list (void_type_node
, ptr_type_node
, NULL_TREE
);
9618 local_define_builtin ("__builtin_setjmp_receiver", ftype
,
9619 BUILT_IN_SETJMP_RECEIVER
,
9620 "__builtin_setjmp_receiver", ECF_NOTHROW
| ECF_LEAF
);
9622 ftype
= build_function_type_list (ptr_type_node
, NULL_TREE
);
9623 local_define_builtin ("__builtin_stack_save", ftype
, BUILT_IN_STACK_SAVE
,
9624 "__builtin_stack_save", ECF_NOTHROW
| ECF_LEAF
);
9626 ftype
= build_function_type_list (void_type_node
, ptr_type_node
, NULL_TREE
);
9627 local_define_builtin ("__builtin_stack_restore", ftype
,
9628 BUILT_IN_STACK_RESTORE
,
9629 "__builtin_stack_restore", ECF_NOTHROW
| ECF_LEAF
);
9631 ftype
= build_function_type_list (integer_type_node
, const_ptr_type_node
,
9632 const_ptr_type_node
, size_type_node
,
9634 local_define_builtin ("__builtin_memcmp_eq", ftype
, BUILT_IN_MEMCMP_EQ
,
9635 "__builtin_memcmp_eq",
9636 ECF_PURE
| ECF_NOTHROW
| ECF_LEAF
);
9638 local_define_builtin ("__builtin_strncmp_eq", ftype
, BUILT_IN_STRNCMP_EQ
,
9639 "__builtin_strncmp_eq",
9640 ECF_PURE
| ECF_NOTHROW
| ECF_LEAF
);
9642 local_define_builtin ("__builtin_strcmp_eq", ftype
, BUILT_IN_STRCMP_EQ
,
9643 "__builtin_strcmp_eq",
9644 ECF_PURE
| ECF_NOTHROW
| ECF_LEAF
);
9646 /* If there's a possibility that we might use the ARM EABI, build the
9647 alternate __cxa_end_cleanup node used to resume from C++. */
9648 if (targetm
.arm_eabi_unwinder
)
9650 ftype
= build_function_type_list (void_type_node
, NULL_TREE
);
9651 local_define_builtin ("__builtin_cxa_end_cleanup", ftype
,
9652 BUILT_IN_CXA_END_CLEANUP
,
9653 "__cxa_end_cleanup", ECF_NORETURN
| ECF_LEAF
);
9656 ftype
= build_function_type_list (void_type_node
, ptr_type_node
, NULL_TREE
);
9657 local_define_builtin ("__builtin_unwind_resume", ftype
,
9658 BUILT_IN_UNWIND_RESUME
,
9659 ((targetm_common
.except_unwind_info (&global_options
)
9661 ? "_Unwind_SjLj_Resume" : "_Unwind_Resume"),
9664 if (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS
) == NULL_TREE
)
9666 ftype
= build_function_type_list (ptr_type_node
, integer_type_node
,
9668 local_define_builtin ("__builtin_return_address", ftype
,
9669 BUILT_IN_RETURN_ADDRESS
,
9670 "__builtin_return_address",
9674 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER
)
9675 || !builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT
))
9677 ftype
= build_function_type_list (void_type_node
, ptr_type_node
,
9678 ptr_type_node
, NULL_TREE
);
9679 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER
))
9680 local_define_builtin ("__cyg_profile_func_enter", ftype
,
9681 BUILT_IN_PROFILE_FUNC_ENTER
,
9682 "__cyg_profile_func_enter", 0);
9683 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT
))
9684 local_define_builtin ("__cyg_profile_func_exit", ftype
,
9685 BUILT_IN_PROFILE_FUNC_EXIT
,
9686 "__cyg_profile_func_exit", 0);
9689 /* The exception object and filter values from the runtime. The argument
9690 must be zero before exception lowering, i.e. from the front end. After
9691 exception lowering, it will be the region number for the exception
9692 landing pad. These functions are PURE instead of CONST to prevent
9693 them from being hoisted past the exception edge that will initialize
9694 its value in the landing pad. */
9695 ftype
= build_function_type_list (ptr_type_node
,
9696 integer_type_node
, NULL_TREE
);
9697 ecf_flags
= ECF_PURE
| ECF_NOTHROW
| ECF_LEAF
;
9698 /* Only use TM_PURE if we have TM language support. */
9699 if (builtin_decl_explicit_p (BUILT_IN_TM_LOAD_1
))
9700 ecf_flags
|= ECF_TM_PURE
;
9701 local_define_builtin ("__builtin_eh_pointer", ftype
, BUILT_IN_EH_POINTER
,
9702 "__builtin_eh_pointer", ecf_flags
);
9704 tmp
= lang_hooks
.types
.type_for_mode (targetm
.eh_return_filter_mode (), 0);
9705 ftype
= build_function_type_list (tmp
, integer_type_node
, NULL_TREE
);
9706 local_define_builtin ("__builtin_eh_filter", ftype
, BUILT_IN_EH_FILTER
,
9707 "__builtin_eh_filter", ECF_PURE
| ECF_NOTHROW
| ECF_LEAF
);
9709 ftype
= build_function_type_list (void_type_node
,
9710 integer_type_node
, integer_type_node
,
9712 local_define_builtin ("__builtin_eh_copy_values", ftype
,
9713 BUILT_IN_EH_COPY_VALUES
,
9714 "__builtin_eh_copy_values", ECF_NOTHROW
);
9716 /* Complex multiplication and division. These are handled as builtins
9717 rather than optabs because emit_library_call_value doesn't support
9718 complex. Further, we can do slightly better with folding these
9719 beasties if the real and complex parts of the arguments are separate. */
9723 for (mode
= MIN_MODE_COMPLEX_FLOAT
; mode
<= MAX_MODE_COMPLEX_FLOAT
; ++mode
)
9725 char mode_name_buf
[4], *q
;
9727 enum built_in_function mcode
, dcode
;
9728 tree type
, inner_type
;
9729 const char *prefix
= "__";
9731 if (targetm
.libfunc_gnu_prefix
)
9734 type
= lang_hooks
.types
.type_for_mode ((machine_mode
) mode
, 0);
9737 inner_type
= TREE_TYPE (type
);
9739 ftype
= build_function_type_list (type
, inner_type
, inner_type
,
9740 inner_type
, inner_type
, NULL_TREE
);
9742 mcode
= ((enum built_in_function
)
9743 (BUILT_IN_COMPLEX_MUL_MIN
+ mode
- MIN_MODE_COMPLEX_FLOAT
));
9744 dcode
= ((enum built_in_function
)
9745 (BUILT_IN_COMPLEX_DIV_MIN
+ mode
- MIN_MODE_COMPLEX_FLOAT
));
9747 for (p
= GET_MODE_NAME (mode
), q
= mode_name_buf
; *p
; p
++, q
++)
9751 /* For -ftrapping-math these should throw from a former
9752 -fnon-call-exception stmt. */
9753 built_in_names
[mcode
] = concat (prefix
, "mul", mode_name_buf
, "3",
9755 local_define_builtin (built_in_names
[mcode
], ftype
, mcode
,
9756 built_in_names
[mcode
],
9757 ECF_CONST
| ECF_LEAF
);
9759 built_in_names
[dcode
] = concat (prefix
, "div", mode_name_buf
, "3",
9761 local_define_builtin (built_in_names
[dcode
], ftype
, dcode
,
9762 built_in_names
[dcode
],
9763 ECF_CONST
| ECF_LEAF
);
9767 init_internal_fns ();
9770 /* HACK. GROSS. This is absolutely disgusting. I wish there was a
9773 If we requested a pointer to a vector, build up the pointers that
9774 we stripped off while looking for the inner type. Similarly for
9775 return values from functions.
9777 The argument TYPE is the top of the chain, and BOTTOM is the
9778 new type which we will point to. */
9781 reconstruct_complex_type (tree type
, tree bottom
)
9785 if (TREE_CODE (type
) == POINTER_TYPE
)
9787 inner
= reconstruct_complex_type (TREE_TYPE (type
), bottom
);
9788 outer
= build_pointer_type_for_mode (inner
, TYPE_MODE (type
),
9789 TYPE_REF_CAN_ALIAS_ALL (type
));
9791 else if (TREE_CODE (type
) == REFERENCE_TYPE
)
9793 inner
= reconstruct_complex_type (TREE_TYPE (type
), bottom
);
9794 outer
= build_reference_type_for_mode (inner
, TYPE_MODE (type
),
9795 TYPE_REF_CAN_ALIAS_ALL (type
));
9797 else if (TREE_CODE (type
) == ARRAY_TYPE
)
9799 inner
= reconstruct_complex_type (TREE_TYPE (type
), bottom
);
9800 outer
= build_array_type (inner
, TYPE_DOMAIN (type
));
9802 else if (TREE_CODE (type
) == FUNCTION_TYPE
)
9804 inner
= reconstruct_complex_type (TREE_TYPE (type
), bottom
);
9805 outer
= build_function_type (inner
, TYPE_ARG_TYPES (type
));
9807 else if (TREE_CODE (type
) == METHOD_TYPE
)
9809 inner
= reconstruct_complex_type (TREE_TYPE (type
), bottom
);
9810 /* The build_method_type_directly() routine prepends 'this' to argument list,
9811 so we must compensate by getting rid of it. */
9813 = build_method_type_directly
9814 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (type
))),
9816 TREE_CHAIN (TYPE_ARG_TYPES (type
)));
9818 else if (TREE_CODE (type
) == OFFSET_TYPE
)
9820 inner
= reconstruct_complex_type (TREE_TYPE (type
), bottom
);
9821 outer
= build_offset_type (TYPE_OFFSET_BASETYPE (type
), inner
);
9826 return build_type_attribute_qual_variant (outer
, TYPE_ATTRIBUTES (type
),
9830 /* Returns a vector tree node given a mode (integer, vector, or BLKmode) and
9833 build_vector_type_for_mode (tree innertype
, machine_mode mode
)
9836 unsigned int bitsize
;
9838 switch (GET_MODE_CLASS (mode
))
9840 case MODE_VECTOR_BOOL
:
9841 case MODE_VECTOR_INT
:
9842 case MODE_VECTOR_FLOAT
:
9843 case MODE_VECTOR_FRACT
:
9844 case MODE_VECTOR_UFRACT
:
9845 case MODE_VECTOR_ACCUM
:
9846 case MODE_VECTOR_UACCUM
:
9847 nunits
= GET_MODE_NUNITS (mode
);
9851 /* Check that there are no leftover bits. */
9852 bitsize
= GET_MODE_BITSIZE (as_a
<scalar_int_mode
> (mode
));
9853 gcc_assert (bitsize
% TREE_INT_CST_LOW (TYPE_SIZE (innertype
)) == 0);
9854 nunits
= bitsize
/ TREE_INT_CST_LOW (TYPE_SIZE (innertype
));
9861 return make_vector_type (innertype
, nunits
, mode
);
9864 /* Similarly, but takes the inner type and number of units, which must be
9868 build_vector_type (tree innertype
, poly_int64 nunits
)
9870 return make_vector_type (innertype
, nunits
, VOIDmode
);
9873 /* Build a truth vector with NUNITS units, giving it mode MASK_MODE. */
9876 build_truth_vector_type_for_mode (poly_uint64 nunits
, machine_mode mask_mode
)
9878 gcc_assert (mask_mode
!= BLKmode
);
9880 unsigned HOST_WIDE_INT esize
;
9881 if (VECTOR_MODE_P (mask_mode
))
9883 poly_uint64 vsize
= GET_MODE_BITSIZE (mask_mode
);
9884 esize
= vector_element_size (vsize
, nunits
);
9889 tree bool_type
= build_nonstandard_boolean_type (esize
);
9891 return make_vector_type (bool_type
, nunits
, mask_mode
);
9894 /* Build a vector type that holds one boolean result for each element of
9895 vector type VECTYPE. The public interface for this operation is
9899 build_truth_vector_type_for (tree vectype
)
9901 machine_mode vector_mode
= TYPE_MODE (vectype
);
9902 poly_uint64 nunits
= TYPE_VECTOR_SUBPARTS (vectype
);
9904 machine_mode mask_mode
;
9905 if (VECTOR_MODE_P (vector_mode
)
9906 && targetm
.vectorize
.get_mask_mode (vector_mode
).exists (&mask_mode
))
9907 return build_truth_vector_type_for_mode (nunits
, mask_mode
);
9909 poly_uint64 vsize
= tree_to_poly_uint64 (TYPE_SIZE (vectype
));
9910 unsigned HOST_WIDE_INT esize
= vector_element_size (vsize
, nunits
);
9911 tree bool_type
= build_nonstandard_boolean_type (esize
);
9913 return make_vector_type (bool_type
, nunits
, VOIDmode
);
9916 /* Like build_vector_type, but builds a variant type with TYPE_VECTOR_OPAQUE
9920 build_opaque_vector_type (tree innertype
, poly_int64 nunits
)
9922 tree t
= make_vector_type (innertype
, nunits
, VOIDmode
);
9924 /* We always build the non-opaque variant before the opaque one,
9925 so if it already exists, it is TYPE_NEXT_VARIANT of this one. */
9926 cand
= TYPE_NEXT_VARIANT (t
);
9928 && TYPE_VECTOR_OPAQUE (cand
)
9929 && check_qualified_type (cand
, t
, TYPE_QUALS (t
)))
9931 /* Othewise build a variant type and make sure to queue it after
9932 the non-opaque type. */
9933 cand
= build_distinct_type_copy (t
);
9934 TYPE_VECTOR_OPAQUE (cand
) = true;
9935 TYPE_CANONICAL (cand
) = TYPE_CANONICAL (t
);
9936 TYPE_NEXT_VARIANT (cand
) = TYPE_NEXT_VARIANT (t
);
9937 TYPE_NEXT_VARIANT (t
) = cand
;
9938 TYPE_MAIN_VARIANT (cand
) = TYPE_MAIN_VARIANT (t
);
9942 /* Return the value of element I of VECTOR_CST T as a wide_int. */
9944 static poly_wide_int
9945 vector_cst_int_elt (const_tree t
, unsigned int i
)
9947 /* First handle elements that are directly encoded. */
9948 unsigned int encoded_nelts
= vector_cst_encoded_nelts (t
);
9949 if (i
< encoded_nelts
)
9950 return wi::to_poly_wide (VECTOR_CST_ENCODED_ELT (t
, i
));
9952 /* Identify the pattern that contains element I and work out the index of
9953 the last encoded element for that pattern. */
9954 unsigned int npatterns
= VECTOR_CST_NPATTERNS (t
);
9955 unsigned int pattern
= i
% npatterns
;
9956 unsigned int count
= i
/ npatterns
;
9957 unsigned int final_i
= encoded_nelts
- npatterns
+ pattern
;
9959 /* If there are no steps, the final encoded value is the right one. */
9960 if (!VECTOR_CST_STEPPED_P (t
))
9961 return wi::to_poly_wide (VECTOR_CST_ENCODED_ELT (t
, final_i
));
9963 /* Otherwise work out the value from the last two encoded elements. */
9964 tree v1
= VECTOR_CST_ENCODED_ELT (t
, final_i
- npatterns
);
9965 tree v2
= VECTOR_CST_ENCODED_ELT (t
, final_i
);
9966 poly_wide_int diff
= wi::to_poly_wide (v2
) - wi::to_poly_wide (v1
);
9967 return wi::to_poly_wide (v2
) + (count
- 2) * diff
;
9970 /* Return the value of element I of VECTOR_CST T. */
9973 vector_cst_elt (const_tree t
, unsigned int i
)
9975 /* First handle elements that are directly encoded. */
9976 unsigned int encoded_nelts
= vector_cst_encoded_nelts (t
);
9977 if (i
< encoded_nelts
)
9978 return VECTOR_CST_ENCODED_ELT (t
, i
);
9980 /* If there are no steps, the final encoded value is the right one. */
9981 if (!VECTOR_CST_STEPPED_P (t
))
9983 /* Identify the pattern that contains element I and work out the index of
9984 the last encoded element for that pattern. */
9985 unsigned int npatterns
= VECTOR_CST_NPATTERNS (t
);
9986 unsigned int pattern
= i
% npatterns
;
9987 unsigned int final_i
= encoded_nelts
- npatterns
+ pattern
;
9988 return VECTOR_CST_ENCODED_ELT (t
, final_i
);
9991 /* Otherwise work out the value from the last two encoded elements. */
9992 return wide_int_to_tree (TREE_TYPE (TREE_TYPE (t
)),
9993 vector_cst_int_elt (t
, i
));
9996 /* Given an initializer INIT, return TRUE if INIT is zero or some
9997 aggregate of zeros. Otherwise return FALSE. If NONZERO is not
9998 null, set *NONZERO if and only if INIT is known not to be all
9999 zeros. The combination of return value of false and *NONZERO
10000 false implies that INIT may but need not be all zeros. Other
10001 combinations indicate definitive answers. */
10004 initializer_zerop (const_tree init
, bool *nonzero
/* = NULL */)
10010 /* Conservatively clear NONZERO and set it only if INIT is definitely
10016 unsigned HOST_WIDE_INT off
= 0;
10018 switch (TREE_CODE (init
))
10021 if (integer_zerop (init
))
10028 /* ??? Note that this is not correct for C4X float formats. There,
10029 a bit pattern of all zeros is 1.0; 0.0 is encoded with the most
10030 negative exponent. */
10031 if (real_zerop (init
)
10032 && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (init
)))
10039 if (fixed_zerop (init
))
10046 if (integer_zerop (init
)
10047 || (real_zerop (init
)
10048 && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_REALPART (init
)))
10049 && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_IMAGPART (init
)))))
10056 if (VECTOR_CST_NPATTERNS (init
) == 1
10057 && VECTOR_CST_DUPLICATE_P (init
)
10058 && initializer_zerop (VECTOR_CST_ENCODED_ELT (init
, 0)))
10066 if (TREE_CLOBBER_P (init
))
10069 unsigned HOST_WIDE_INT idx
;
10072 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init
), idx
, elt
)
10073 if (!initializer_zerop (elt
, nonzero
))
10081 tree arg
= TREE_OPERAND (init
, 0);
10082 if (TREE_CODE (arg
) != ADDR_EXPR
)
10084 tree offset
= TREE_OPERAND (init
, 1);
10085 if (TREE_CODE (offset
) != INTEGER_CST
10086 || !tree_fits_uhwi_p (offset
))
10088 off
= tree_to_uhwi (offset
);
10091 arg
= TREE_OPERAND (arg
, 0);
10092 if (TREE_CODE (arg
) != STRING_CST
)
10096 /* Fall through. */
10100 gcc_assert (off
<= INT_MAX
);
10103 int n
= TREE_STRING_LENGTH (init
);
10107 /* We need to loop through all elements to handle cases like
10108 "\0" and "\0foobar". */
10109 for (i
= 0; i
< n
; ++i
)
10110 if (TREE_STRING_POINTER (init
)[i
] != '\0')
10124 /* Return true if EXPR is an initializer expression in which every element
10125 is a constant that is numerically equal to 0 or 1. The elements do not
10126 need to be equal to each other. */
10129 initializer_each_zero_or_onep (const_tree expr
)
10131 STRIP_ANY_LOCATION_WRAPPER (expr
);
10133 switch (TREE_CODE (expr
))
10136 return integer_zerop (expr
) || integer_onep (expr
);
10139 return real_zerop (expr
) || real_onep (expr
);
10143 unsigned HOST_WIDE_INT nelts
= vector_cst_encoded_nelts (expr
);
10144 if (VECTOR_CST_STEPPED_P (expr
)
10145 && !TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr
)).is_constant (&nelts
))
10148 for (unsigned int i
= 0; i
< nelts
; ++i
)
10150 tree elt
= vector_cst_elt (expr
, i
);
10151 if (!initializer_each_zero_or_onep (elt
))
10163 /* Check if vector VEC consists of all the equal elements and
10164 that the number of elements corresponds to the type of VEC.
10165 The function returns first element of the vector
10166 or NULL_TREE if the vector is not uniform. */
10168 uniform_vector_p (const_tree vec
)
10171 unsigned HOST_WIDE_INT i
, nelts
;
10173 if (vec
== NULL_TREE
)
10176 gcc_assert (VECTOR_TYPE_P (TREE_TYPE (vec
)));
10178 if (TREE_CODE (vec
) == VEC_DUPLICATE_EXPR
)
10179 return TREE_OPERAND (vec
, 0);
10181 else if (TREE_CODE (vec
) == VECTOR_CST
)
10183 if (VECTOR_CST_NPATTERNS (vec
) == 1 && VECTOR_CST_DUPLICATE_P (vec
))
10184 return VECTOR_CST_ENCODED_ELT (vec
, 0);
10188 else if (TREE_CODE (vec
) == CONSTRUCTOR
10189 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (vec
)).is_constant (&nelts
))
10191 first
= error_mark_node
;
10193 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (vec
), i
, t
)
10200 if (!operand_equal_p (first
, t
, 0))
10212 /* If the argument is INTEGER_CST, return it. If the argument is vector
10213 with all elements the same INTEGER_CST, return that INTEGER_CST. Otherwise
10215 Look through location wrappers. */
10218 uniform_integer_cst_p (tree t
)
10220 STRIP_ANY_LOCATION_WRAPPER (t
);
10222 if (TREE_CODE (t
) == INTEGER_CST
)
10225 if (VECTOR_TYPE_P (TREE_TYPE (t
)))
10227 t
= uniform_vector_p (t
);
10228 if (t
&& TREE_CODE (t
) == INTEGER_CST
)
10235 /* If VECTOR_CST T has a single nonzero element, return the index of that
10236 element, otherwise return -1. */
10239 single_nonzero_element (const_tree t
)
10241 unsigned HOST_WIDE_INT nelts
;
10242 unsigned int repeat_nelts
;
10243 if (VECTOR_CST_NELTS (t
).is_constant (&nelts
))
10244 repeat_nelts
= nelts
;
10245 else if (VECTOR_CST_NELTS_PER_PATTERN (t
) == 2)
10247 nelts
= vector_cst_encoded_nelts (t
);
10248 repeat_nelts
= VECTOR_CST_NPATTERNS (t
);
10254 for (unsigned int i
= 0; i
< nelts
; ++i
)
10256 tree elt
= vector_cst_elt (t
, i
);
10257 if (!integer_zerop (elt
) && !real_zerop (elt
))
10259 if (res
>= 0 || i
>= repeat_nelts
)
10267 /* Build an empty statement at location LOC. */
10270 build_empty_stmt (location_t loc
)
10272 tree t
= build1 (NOP_EXPR
, void_type_node
, size_zero_node
);
10273 SET_EXPR_LOCATION (t
, loc
);
10278 /* Build an OpenMP clause with code CODE. LOC is the location of the
10282 build_omp_clause (location_t loc
, enum omp_clause_code code
)
10287 length
= omp_clause_num_ops
[code
];
10288 size
= (sizeof (struct tree_omp_clause
) + (length
- 1) * sizeof (tree
));
10290 record_node_allocation_statistics (OMP_CLAUSE
, size
);
10292 t
= (tree
) ggc_internal_alloc (size
);
10293 memset (t
, 0, size
);
10294 TREE_SET_CODE (t
, OMP_CLAUSE
);
10295 OMP_CLAUSE_SET_CODE (t
, code
);
10296 OMP_CLAUSE_LOCATION (t
) = loc
;
10301 /* Build a tcc_vl_exp object with code CODE and room for LEN operands. LEN
10302 includes the implicit operand count in TREE_OPERAND 0, and so must be >= 1.
10303 Except for the CODE and operand count field, other storage for the
10304 object is initialized to zeros. */
10307 build_vl_exp (enum tree_code code
, int len MEM_STAT_DECL
)
10310 int length
= (len
- 1) * sizeof (tree
) + sizeof (struct tree_exp
);
10312 gcc_assert (TREE_CODE_CLASS (code
) == tcc_vl_exp
);
10313 gcc_assert (len
>= 1);
10315 record_node_allocation_statistics (code
, length
);
10317 t
= ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT
);
10319 TREE_SET_CODE (t
, code
);
10321 /* Can't use TREE_OPERAND to store the length because if checking is
10322 enabled, it will try to check the length before we store it. :-P */
10323 t
->exp
.operands
[0] = build_int_cst (sizetype
, len
);
10328 /* Helper function for build_call_* functions; build a CALL_EXPR with
10329 indicated RETURN_TYPE, FN, and NARGS, but do not initialize any of
10330 the argument slots. */
10333 build_call_1 (tree return_type
, tree fn
, int nargs
)
10337 t
= build_vl_exp (CALL_EXPR
, nargs
+ 3);
10338 TREE_TYPE (t
) = return_type
;
10339 CALL_EXPR_FN (t
) = fn
;
10340 CALL_EXPR_STATIC_CHAIN (t
) = NULL
;
10345 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10346 FN and a null static chain slot. NARGS is the number of call arguments
10347 which are specified as "..." arguments. */
10350 build_call_nary (tree return_type
, tree fn
, int nargs
, ...)
10354 va_start (args
, nargs
);
10355 ret
= build_call_valist (return_type
, fn
, nargs
, args
);
10360 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10361 FN and a null static chain slot. NARGS is the number of call arguments
10362 which are specified as a va_list ARGS. */
10365 build_call_valist (tree return_type
, tree fn
, int nargs
, va_list args
)
10370 t
= build_call_1 (return_type
, fn
, nargs
);
10371 for (i
= 0; i
< nargs
; i
++)
10372 CALL_EXPR_ARG (t
, i
) = va_arg (args
, tree
);
10373 process_call_operands (t
);
10377 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10378 FN and a null static chain slot. NARGS is the number of call arguments
10379 which are specified as a tree array ARGS. */
10382 build_call_array_loc (location_t loc
, tree return_type
, tree fn
,
10383 int nargs
, const tree
*args
)
10388 t
= build_call_1 (return_type
, fn
, nargs
);
10389 for (i
= 0; i
< nargs
; i
++)
10390 CALL_EXPR_ARG (t
, i
) = args
[i
];
10391 process_call_operands (t
);
10392 SET_EXPR_LOCATION (t
, loc
);
10396 /* Like build_call_array, but takes a vec. */
10399 build_call_vec (tree return_type
, tree fn
, vec
<tree
, va_gc
> *args
)
10404 ret
= build_call_1 (return_type
, fn
, vec_safe_length (args
));
10405 FOR_EACH_VEC_SAFE_ELT (args
, ix
, t
)
10406 CALL_EXPR_ARG (ret
, ix
) = t
;
10407 process_call_operands (ret
);
10411 /* Conveniently construct a function call expression. FNDECL names the
10412 function to be called and N arguments are passed in the array
10416 build_call_expr_loc_array (location_t loc
, tree fndecl
, int n
, tree
*argarray
)
10418 tree fntype
= TREE_TYPE (fndecl
);
10419 tree fn
= build1 (ADDR_EXPR
, build_pointer_type (fntype
), fndecl
);
10421 return fold_build_call_array_loc (loc
, TREE_TYPE (fntype
), fn
, n
, argarray
);
10424 /* Conveniently construct a function call expression. FNDECL names the
10425 function to be called and the arguments are passed in the vector
10429 build_call_expr_loc_vec (location_t loc
, tree fndecl
, vec
<tree
, va_gc
> *vec
)
10431 return build_call_expr_loc_array (loc
, fndecl
, vec_safe_length (vec
),
10432 vec_safe_address (vec
));
10436 /* Conveniently construct a function call expression. FNDECL names the
10437 function to be called, N is the number of arguments, and the "..."
10438 parameters are the argument expressions. */
10441 build_call_expr_loc (location_t loc
, tree fndecl
, int n
, ...)
10444 tree
*argarray
= XALLOCAVEC (tree
, n
);
10448 for (i
= 0; i
< n
; i
++)
10449 argarray
[i
] = va_arg (ap
, tree
);
10451 return build_call_expr_loc_array (loc
, fndecl
, n
, argarray
);
10454 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
10455 varargs macros aren't supported by all bootstrap compilers. */
10458 build_call_expr (tree fndecl
, int n
, ...)
10461 tree
*argarray
= XALLOCAVEC (tree
, n
);
10465 for (i
= 0; i
< n
; i
++)
10466 argarray
[i
] = va_arg (ap
, tree
);
10468 return build_call_expr_loc_array (UNKNOWN_LOCATION
, fndecl
, n
, argarray
);
10471 /* Build an internal call to IFN, with arguments ARGS[0:N-1] and with return
10472 type TYPE. This is just like CALL_EXPR, except its CALL_EXPR_FN is NULL.
10473 It will get gimplified later into an ordinary internal function. */
10476 build_call_expr_internal_loc_array (location_t loc
, internal_fn ifn
,
10477 tree type
, int n
, const tree
*args
)
10479 tree t
= build_call_1 (type
, NULL_TREE
, n
);
10480 for (int i
= 0; i
< n
; ++i
)
10481 CALL_EXPR_ARG (t
, i
) = args
[i
];
10482 SET_EXPR_LOCATION (t
, loc
);
10483 CALL_EXPR_IFN (t
) = ifn
;
10484 process_call_operands (t
);
10488 /* Build internal call expression. This is just like CALL_EXPR, except
10489 its CALL_EXPR_FN is NULL. It will get gimplified later into ordinary
10490 internal function. */
10493 build_call_expr_internal_loc (location_t loc
, enum internal_fn ifn
,
10494 tree type
, int n
, ...)
10497 tree
*argarray
= XALLOCAVEC (tree
, n
);
10501 for (i
= 0; i
< n
; i
++)
10502 argarray
[i
] = va_arg (ap
, tree
);
10504 return build_call_expr_internal_loc_array (loc
, ifn
, type
, n
, argarray
);
10507 /* Return a function call to FN, if the target is guaranteed to support it,
10510 N is the number of arguments, passed in the "...", and TYPE is the
10511 type of the return value. */
10514 maybe_build_call_expr_loc (location_t loc
, combined_fn fn
, tree type
,
10518 tree
*argarray
= XALLOCAVEC (tree
, n
);
10522 for (i
= 0; i
< n
; i
++)
10523 argarray
[i
] = va_arg (ap
, tree
);
10525 if (internal_fn_p (fn
))
10527 internal_fn ifn
= as_internal_fn (fn
);
10528 if (direct_internal_fn_p (ifn
))
10530 tree_pair types
= direct_internal_fn_types (ifn
, type
, argarray
);
10531 if (!direct_internal_fn_supported_p (ifn
, types
,
10532 OPTIMIZE_FOR_BOTH
))
10535 return build_call_expr_internal_loc_array (loc
, ifn
, type
, n
, argarray
);
10539 tree fndecl
= builtin_decl_implicit (as_builtin_fn (fn
));
10542 return build_call_expr_loc_array (loc
, fndecl
, n
, argarray
);
10546 /* Return a function call to the appropriate builtin alloca variant.
10548 SIZE is the size to be allocated. ALIGN, if non-zero, is the requested
10549 alignment of the allocated area. MAX_SIZE, if non-negative, is an upper
10550 bound for SIZE in case it is not a fixed value. */
10553 build_alloca_call_expr (tree size
, unsigned int align
, HOST_WIDE_INT max_size
)
10557 tree t
= builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
);
10559 build_call_expr (t
, 3, size
, size_int (align
), size_int (max_size
));
10561 else if (align
> 0)
10563 tree t
= builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
10564 return build_call_expr (t
, 2, size
, size_int (align
));
10568 tree t
= builtin_decl_explicit (BUILT_IN_ALLOCA
);
10569 return build_call_expr (t
, 1, size
);
10573 /* Create a new constant string literal of type ELTYPE[SIZE] (or LEN
10574 if SIZE == -1) and return a tree node representing char* pointer to
10575 it as an ADDR_EXPR (ARRAY_REF (ELTYPE, ...)). When STR is nonnull
10576 the STRING_CST value is the LEN bytes at STR (the representation
10577 of the string, which may be wide). Otherwise it's all zeros. */
10580 build_string_literal (unsigned len
, const char *str
/* = NULL */,
10581 tree eltype
/* = char_type_node */,
10582 unsigned HOST_WIDE_INT size
/* = -1 */)
10584 tree t
= build_string (len
, str
);
10585 /* Set the maximum valid index based on the string length or SIZE. */
10586 unsigned HOST_WIDE_INT maxidx
10587 = (size
== HOST_WIDE_INT_M1U
? len
: size
) - 1;
10589 tree index
= build_index_type (size_int (maxidx
));
10590 eltype
= build_type_variant (eltype
, 1, 0);
10591 tree type
= build_array_type (eltype
, index
);
10592 TREE_TYPE (t
) = type
;
10593 TREE_CONSTANT (t
) = 1;
10594 TREE_READONLY (t
) = 1;
10595 TREE_STATIC (t
) = 1;
10597 type
= build_pointer_type (eltype
);
10598 t
= build1 (ADDR_EXPR
, type
,
10599 build4 (ARRAY_REF
, eltype
,
10600 t
, integer_zero_node
, NULL_TREE
, NULL_TREE
));
10606 /* Return true if T (assumed to be a DECL) must be assigned a memory
10610 needs_to_live_in_memory (const_tree t
)
10612 return (TREE_ADDRESSABLE (t
)
10613 || is_global_var (t
)
10614 || (TREE_CODE (t
) == RESULT_DECL
10615 && !DECL_BY_REFERENCE (t
)
10616 && aggregate_value_p (t
, current_function_decl
)));
10619 /* Return value of a constant X and sign-extend it. */
10622 int_cst_value (const_tree x
)
10624 unsigned bits
= TYPE_PRECISION (TREE_TYPE (x
));
10625 unsigned HOST_WIDE_INT val
= TREE_INT_CST_LOW (x
);
10627 /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
10628 gcc_assert (cst_and_fits_in_hwi (x
));
10630 if (bits
< HOST_BITS_PER_WIDE_INT
)
10632 bool negative
= ((val
>> (bits
- 1)) & 1) != 0;
10634 val
|= HOST_WIDE_INT_M1U
<< (bits
- 1) << 1;
10636 val
&= ~(HOST_WIDE_INT_M1U
<< (bits
- 1) << 1);
10642 /* If TYPE is an integral or pointer type, return an integer type with
10643 the same precision which is unsigned iff UNSIGNEDP is true, or itself
10644 if TYPE is already an integer type of signedness UNSIGNEDP.
10645 If TYPE is a floating-point type, return an integer type with the same
10646 bitsize and with the signedness given by UNSIGNEDP; this is useful
10647 when doing bit-level operations on a floating-point value. */
10650 signed_or_unsigned_type_for (int unsignedp
, tree type
)
10652 if (ANY_INTEGRAL_TYPE_P (type
) && TYPE_UNSIGNED (type
) == unsignedp
)
10655 if (TREE_CODE (type
) == VECTOR_TYPE
)
10657 tree inner
= TREE_TYPE (type
);
10658 tree inner2
= signed_or_unsigned_type_for (unsignedp
, inner
);
10661 if (inner
== inner2
)
10663 return build_vector_type (inner2
, TYPE_VECTOR_SUBPARTS (type
));
10666 if (TREE_CODE (type
) == COMPLEX_TYPE
)
10668 tree inner
= TREE_TYPE (type
);
10669 tree inner2
= signed_or_unsigned_type_for (unsignedp
, inner
);
10672 if (inner
== inner2
)
10674 return build_complex_type (inner2
);
10678 if (INTEGRAL_TYPE_P (type
)
10679 || POINTER_TYPE_P (type
)
10680 || TREE_CODE (type
) == OFFSET_TYPE
)
10681 bits
= TYPE_PRECISION (type
);
10682 else if (TREE_CODE (type
) == REAL_TYPE
)
10683 bits
= GET_MODE_BITSIZE (SCALAR_TYPE_MODE (type
));
10687 return build_nonstandard_integer_type (bits
, unsignedp
);
10690 /* If TYPE is an integral or pointer type, return an integer type with
10691 the same precision which is unsigned, or itself if TYPE is already an
10692 unsigned integer type. If TYPE is a floating-point type, return an
10693 unsigned integer type with the same bitsize as TYPE. */
10696 unsigned_type_for (tree type
)
10698 return signed_or_unsigned_type_for (1, type
);
10701 /* If TYPE is an integral or pointer type, return an integer type with
10702 the same precision which is signed, or itself if TYPE is already a
10703 signed integer type. If TYPE is a floating-point type, return a
10704 signed integer type with the same bitsize as TYPE. */
10707 signed_type_for (tree type
)
10709 return signed_or_unsigned_type_for (0, type
);
10712 /* If TYPE is a vector type, return a signed integer vector type with the
10713 same width and number of subparts. Otherwise return boolean_type_node. */
10716 truth_type_for (tree type
)
10718 if (TREE_CODE (type
) == VECTOR_TYPE
)
10720 if (VECTOR_BOOLEAN_TYPE_P (type
))
10722 return build_truth_vector_type_for (type
);
10725 return boolean_type_node
;
10728 /* Returns the largest value obtainable by casting something in INNER type to
10732 upper_bound_in_type (tree outer
, tree inner
)
10734 unsigned int det
= 0;
10735 unsigned oprec
= TYPE_PRECISION (outer
);
10736 unsigned iprec
= TYPE_PRECISION (inner
);
10739 /* Compute a unique number for every combination. */
10740 det
|= (oprec
> iprec
) ? 4 : 0;
10741 det
|= TYPE_UNSIGNED (outer
) ? 2 : 0;
10742 det
|= TYPE_UNSIGNED (inner
) ? 1 : 0;
10744 /* Determine the exponent to use. */
10749 /* oprec <= iprec, outer: signed, inner: don't care. */
10754 /* oprec <= iprec, outer: unsigned, inner: don't care. */
10758 /* oprec > iprec, outer: signed, inner: signed. */
10762 /* oprec > iprec, outer: signed, inner: unsigned. */
10766 /* oprec > iprec, outer: unsigned, inner: signed. */
10770 /* oprec > iprec, outer: unsigned, inner: unsigned. */
10774 gcc_unreachable ();
10777 return wide_int_to_tree (outer
,
10778 wi::mask (prec
, false, TYPE_PRECISION (outer
)));
10781 /* Returns the smallest value obtainable by casting something in INNER type to
10785 lower_bound_in_type (tree outer
, tree inner
)
10787 unsigned oprec
= TYPE_PRECISION (outer
);
10788 unsigned iprec
= TYPE_PRECISION (inner
);
10790 /* If OUTER type is unsigned, we can definitely cast 0 to OUTER type
10792 if (TYPE_UNSIGNED (outer
)
10793 /* If we are widening something of an unsigned type, OUTER type
10794 contains all values of INNER type. In particular, both INNER
10795 and OUTER types have zero in common. */
10796 || (oprec
> iprec
&& TYPE_UNSIGNED (inner
)))
10797 return build_int_cst (outer
, 0);
10800 /* If we are widening a signed type to another signed type, we
10801 want to obtain -2^^(iprec-1). If we are keeping the
10802 precision or narrowing to a signed type, we want to obtain
10804 unsigned prec
= oprec
> iprec
? iprec
: oprec
;
10805 return wide_int_to_tree (outer
,
10806 wi::mask (prec
- 1, true,
10807 TYPE_PRECISION (outer
)));
10811 /* Return nonzero if two operands that are suitable for PHI nodes are
10812 necessarily equal. Specifically, both ARG0 and ARG1 must be either
10813 SSA_NAME or invariant. Note that this is strictly an optimization.
10814 That is, callers of this function can directly call operand_equal_p
10815 and get the same result, only slower. */
10818 operand_equal_for_phi_arg_p (const_tree arg0
, const_tree arg1
)
10822 if (TREE_CODE (arg0
) == SSA_NAME
|| TREE_CODE (arg1
) == SSA_NAME
)
10824 return operand_equal_p (arg0
, arg1
, 0);
10827 /* Returns number of zeros at the end of binary representation of X. */
10830 num_ending_zeros (const_tree x
)
10832 return build_int_cst (TREE_TYPE (x
), wi::ctz (wi::to_wide (x
)));
10836 #define WALK_SUBTREE(NODE) \
10839 result = walk_tree_1 (&(NODE), func, data, pset, lh); \
10845 /* This is a subroutine of walk_tree that walks field of TYPE that are to
10846 be walked whenever a type is seen in the tree. Rest of operands and return
10847 value are as for walk_tree. */
10850 walk_type_fields (tree type
, walk_tree_fn func
, void *data
,
10851 hash_set
<tree
> *pset
, walk_tree_lh lh
)
10853 tree result
= NULL_TREE
;
10855 switch (TREE_CODE (type
))
10858 case REFERENCE_TYPE
:
10860 /* We have to worry about mutually recursive pointers. These can't
10861 be written in C. They can in Ada. It's pathological, but
10862 there's an ACATS test (c38102a) that checks it. Deal with this
10863 by checking if we're pointing to another pointer, that one
10864 points to another pointer, that one does too, and we have no htab.
10865 If so, get a hash table. We check three levels deep to avoid
10866 the cost of the hash table if we don't need one. */
10867 if (POINTER_TYPE_P (TREE_TYPE (type
))
10868 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type
)))
10869 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type
))))
10872 result
= walk_tree_without_duplicates (&TREE_TYPE (type
),
10883 WALK_SUBTREE (TREE_TYPE (type
));
10887 WALK_SUBTREE (TYPE_METHOD_BASETYPE (type
));
10889 /* Fall through. */
10891 case FUNCTION_TYPE
:
10892 WALK_SUBTREE (TREE_TYPE (type
));
10896 /* We never want to walk into default arguments. */
10897 for (arg
= TYPE_ARG_TYPES (type
); arg
; arg
= TREE_CHAIN (arg
))
10898 WALK_SUBTREE (TREE_VALUE (arg
));
10903 /* Don't follow this nodes's type if a pointer for fear that
10904 we'll have infinite recursion. If we have a PSET, then we
10907 || (!POINTER_TYPE_P (TREE_TYPE (type
))
10908 && TREE_CODE (TREE_TYPE (type
)) != OFFSET_TYPE
))
10909 WALK_SUBTREE (TREE_TYPE (type
));
10910 WALK_SUBTREE (TYPE_DOMAIN (type
));
10914 WALK_SUBTREE (TREE_TYPE (type
));
10915 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type
));
10925 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is
10926 called with the DATA and the address of each sub-tree. If FUNC returns a
10927 non-NULL value, the traversal is stopped, and the value returned by FUNC
10928 is returned. If PSET is non-NULL it is used to record the nodes visited,
10929 and to avoid visiting a node more than once. */
10932 walk_tree_1 (tree
*tp
, walk_tree_fn func
, void *data
,
10933 hash_set
<tree
> *pset
, walk_tree_lh lh
)
10935 enum tree_code code
;
10939 #define WALK_SUBTREE_TAIL(NODE) \
10943 goto tail_recurse; \
10948 /* Skip empty subtrees. */
10952 /* Don't walk the same tree twice, if the user has requested
10953 that we avoid doing so. */
10954 if (pset
&& pset
->add (*tp
))
10957 /* Call the function. */
10959 result
= (*func
) (tp
, &walk_subtrees
, data
);
10961 /* If we found something, return it. */
10965 code
= TREE_CODE (*tp
);
10967 /* Even if we didn't, FUNC may have decided that there was nothing
10968 interesting below this point in the tree. */
10969 if (!walk_subtrees
)
10971 /* But we still need to check our siblings. */
10972 if (code
== TREE_LIST
)
10973 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp
));
10974 else if (code
== OMP_CLAUSE
)
10975 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp
));
10982 result
= (*lh
) (tp
, &walk_subtrees
, func
, data
, pset
);
10983 if (result
|| !walk_subtrees
)
10990 case IDENTIFIER_NODE
:
10996 case PLACEHOLDER_EXPR
:
11000 /* None of these have subtrees other than those already walked
11005 WALK_SUBTREE (TREE_VALUE (*tp
));
11006 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp
));
11011 int len
= TREE_VEC_LENGTH (*tp
);
11016 /* Walk all elements but the first. */
11018 WALK_SUBTREE (TREE_VEC_ELT (*tp
, len
));
11020 /* Now walk the first one as a tail call. */
11021 WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp
, 0));
11026 unsigned len
= vector_cst_encoded_nelts (*tp
);
11029 /* Walk all elements but the first. */
11031 WALK_SUBTREE (VECTOR_CST_ENCODED_ELT (*tp
, len
));
11032 /* Now walk the first one as a tail call. */
11033 WALK_SUBTREE_TAIL (VECTOR_CST_ENCODED_ELT (*tp
, 0));
11037 WALK_SUBTREE (TREE_REALPART (*tp
));
11038 WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp
));
11042 unsigned HOST_WIDE_INT idx
;
11043 constructor_elt
*ce
;
11045 for (idx
= 0; vec_safe_iterate (CONSTRUCTOR_ELTS (*tp
), idx
, &ce
);
11047 WALK_SUBTREE (ce
->value
);
11052 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp
, 0));
11057 for (decl
= BIND_EXPR_VARS (*tp
); decl
; decl
= DECL_CHAIN (decl
))
11059 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
11060 into declarations that are just mentioned, rather than
11061 declared; they don't really belong to this part of the tree.
11062 And, we can see cycles: the initializer for a declaration
11063 can refer to the declaration itself. */
11064 WALK_SUBTREE (DECL_INITIAL (decl
));
11065 WALK_SUBTREE (DECL_SIZE (decl
));
11066 WALK_SUBTREE (DECL_SIZE_UNIT (decl
));
11068 WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp
));
11071 case STATEMENT_LIST
:
11073 tree_stmt_iterator i
;
11074 for (i
= tsi_start (*tp
); !tsi_end_p (i
); tsi_next (&i
))
11075 WALK_SUBTREE (*tsi_stmt_ptr (i
));
11080 switch (OMP_CLAUSE_CODE (*tp
))
11082 case OMP_CLAUSE_GANG
:
11083 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp
, 1));
11086 case OMP_CLAUSE_ASYNC
:
11087 case OMP_CLAUSE_WAIT
:
11088 case OMP_CLAUSE_WORKER
:
11089 case OMP_CLAUSE_VECTOR
:
11090 case OMP_CLAUSE_NUM_GANGS
:
11091 case OMP_CLAUSE_NUM_WORKERS
:
11092 case OMP_CLAUSE_VECTOR_LENGTH
:
11093 case OMP_CLAUSE_PRIVATE
:
11094 case OMP_CLAUSE_SHARED
:
11095 case OMP_CLAUSE_FIRSTPRIVATE
:
11096 case OMP_CLAUSE_COPYIN
:
11097 case OMP_CLAUSE_COPYPRIVATE
:
11098 case OMP_CLAUSE_FINAL
:
11099 case OMP_CLAUSE_IF
:
11100 case OMP_CLAUSE_NUM_THREADS
:
11101 case OMP_CLAUSE_SCHEDULE
:
11102 case OMP_CLAUSE_UNIFORM
:
11103 case OMP_CLAUSE_DEPEND
:
11104 case OMP_CLAUSE_NONTEMPORAL
:
11105 case OMP_CLAUSE_NUM_TEAMS
:
11106 case OMP_CLAUSE_THREAD_LIMIT
:
11107 case OMP_CLAUSE_DEVICE
:
11108 case OMP_CLAUSE_DIST_SCHEDULE
:
11109 case OMP_CLAUSE_SAFELEN
:
11110 case OMP_CLAUSE_SIMDLEN
:
11111 case OMP_CLAUSE_ORDERED
:
11112 case OMP_CLAUSE_PRIORITY
:
11113 case OMP_CLAUSE_GRAINSIZE
:
11114 case OMP_CLAUSE_NUM_TASKS
:
11115 case OMP_CLAUSE_HINT
:
11116 case OMP_CLAUSE_TO_DECLARE
:
11117 case OMP_CLAUSE_LINK
:
11118 case OMP_CLAUSE_DETACH
:
11119 case OMP_CLAUSE_USE_DEVICE_PTR
:
11120 case OMP_CLAUSE_USE_DEVICE_ADDR
:
11121 case OMP_CLAUSE_IS_DEVICE_PTR
:
11122 case OMP_CLAUSE_INCLUSIVE
:
11123 case OMP_CLAUSE_EXCLUSIVE
:
11124 case OMP_CLAUSE__LOOPTEMP_
:
11125 case OMP_CLAUSE__REDUCTEMP_
:
11126 case OMP_CLAUSE__CONDTEMP_
:
11127 case OMP_CLAUSE__SCANTEMP_
:
11128 case OMP_CLAUSE__SIMDUID_
:
11129 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp
, 0));
11132 case OMP_CLAUSE_INDEPENDENT
:
11133 case OMP_CLAUSE_NOWAIT
:
11134 case OMP_CLAUSE_DEFAULT
:
11135 case OMP_CLAUSE_UNTIED
:
11136 case OMP_CLAUSE_MERGEABLE
:
11137 case OMP_CLAUSE_PROC_BIND
:
11138 case OMP_CLAUSE_DEVICE_TYPE
:
11139 case OMP_CLAUSE_INBRANCH
:
11140 case OMP_CLAUSE_NOTINBRANCH
:
11141 case OMP_CLAUSE_FOR
:
11142 case OMP_CLAUSE_PARALLEL
:
11143 case OMP_CLAUSE_SECTIONS
:
11144 case OMP_CLAUSE_TASKGROUP
:
11145 case OMP_CLAUSE_NOGROUP
:
11146 case OMP_CLAUSE_THREADS
:
11147 case OMP_CLAUSE_SIMD
:
11148 case OMP_CLAUSE_DEFAULTMAP
:
11149 case OMP_CLAUSE_ORDER
:
11150 case OMP_CLAUSE_BIND
:
11151 case OMP_CLAUSE_AUTO
:
11152 case OMP_CLAUSE_SEQ
:
11153 case OMP_CLAUSE_TILE
:
11154 case OMP_CLAUSE__SIMT_
:
11155 case OMP_CLAUSE_IF_PRESENT
:
11156 case OMP_CLAUSE_FINALIZE
:
11157 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp
));
11159 case OMP_CLAUSE_LASTPRIVATE
:
11160 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp
));
11161 WALK_SUBTREE (OMP_CLAUSE_LASTPRIVATE_STMT (*tp
));
11162 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp
));
11164 case OMP_CLAUSE_COLLAPSE
:
11167 for (i
= 0; i
< 3; i
++)
11168 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp
, i
));
11169 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp
));
11172 case OMP_CLAUSE_LINEAR
:
11173 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp
));
11174 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STEP (*tp
));
11175 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STMT (*tp
));
11176 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp
));
11178 case OMP_CLAUSE_ALIGNED
:
11179 case OMP_CLAUSE_ALLOCATE
:
11180 case OMP_CLAUSE_FROM
:
11181 case OMP_CLAUSE_TO
:
11182 case OMP_CLAUSE_MAP
:
11183 case OMP_CLAUSE__CACHE_
:
11184 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp
));
11185 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp
, 1));
11186 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp
));
11188 case OMP_CLAUSE_REDUCTION
:
11189 case OMP_CLAUSE_TASK_REDUCTION
:
11190 case OMP_CLAUSE_IN_REDUCTION
:
11193 for (i
= 0; i
< 5; i
++)
11194 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp
, i
));
11195 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp
));
11199 gcc_unreachable ();
11207 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
11208 But, we only want to walk once. */
11209 len
= (TREE_OPERAND (*tp
, 3) == TREE_OPERAND (*tp
, 1)) ? 2 : 3;
11210 for (i
= 0; i
< len
; ++i
)
11211 WALK_SUBTREE (TREE_OPERAND (*tp
, i
));
11212 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp
, len
));
11216 /* If this is a TYPE_DECL, walk into the fields of the type that it's
11217 defining. We only want to walk into these fields of a type in this
11218 case and not in the general case of a mere reference to the type.
11220 The criterion is as follows: if the field can be an expression, it
11221 must be walked only here. This should be in keeping with the fields
11222 that are directly gimplified in gimplify_type_sizes in order for the
11223 mark/copy-if-shared/unmark machinery of the gimplifier to work with
11224 variable-sized types.
11226 Note that DECLs get walked as part of processing the BIND_EXPR. */
11227 if (TREE_CODE (DECL_EXPR_DECL (*tp
)) == TYPE_DECL
)
11229 /* Call the function for the decl so e.g. copy_tree_body_r can
11230 replace it with the remapped one. */
11231 result
= (*func
) (&DECL_EXPR_DECL (*tp
), &walk_subtrees
, data
);
11232 if (result
|| !walk_subtrees
)
11235 tree
*type_p
= &TREE_TYPE (DECL_EXPR_DECL (*tp
));
11236 if (TREE_CODE (*type_p
) == ERROR_MARK
)
11239 /* Call the function for the type. See if it returns anything or
11240 doesn't want us to continue. If we are to continue, walk both
11241 the normal fields and those for the declaration case. */
11242 result
= (*func
) (type_p
, &walk_subtrees
, data
);
11243 if (result
|| !walk_subtrees
)
11246 /* But do not walk a pointed-to type since it may itself need to
11247 be walked in the declaration case if it isn't anonymous. */
11248 if (!POINTER_TYPE_P (*type_p
))
11250 result
= walk_type_fields (*type_p
, func
, data
, pset
, lh
);
11255 /* If this is a record type, also walk the fields. */
11256 if (RECORD_OR_UNION_TYPE_P (*type_p
))
11260 for (field
= TYPE_FIELDS (*type_p
); field
;
11261 field
= DECL_CHAIN (field
))
11263 /* We'd like to look at the type of the field, but we can
11264 easily get infinite recursion. So assume it's pointed
11265 to elsewhere in the tree. Also, ignore things that
11267 if (TREE_CODE (field
) != FIELD_DECL
)
11270 WALK_SUBTREE (DECL_FIELD_OFFSET (field
));
11271 WALK_SUBTREE (DECL_SIZE (field
));
11272 WALK_SUBTREE (DECL_SIZE_UNIT (field
));
11273 if (TREE_CODE (*type_p
) == QUAL_UNION_TYPE
)
11274 WALK_SUBTREE (DECL_QUALIFIER (field
));
11278 /* Same for scalar types. */
11279 else if (TREE_CODE (*type_p
) == BOOLEAN_TYPE
11280 || TREE_CODE (*type_p
) == ENUMERAL_TYPE
11281 || TREE_CODE (*type_p
) == INTEGER_TYPE
11282 || TREE_CODE (*type_p
) == FIXED_POINT_TYPE
11283 || TREE_CODE (*type_p
) == REAL_TYPE
)
11285 WALK_SUBTREE (TYPE_MIN_VALUE (*type_p
));
11286 WALK_SUBTREE (TYPE_MAX_VALUE (*type_p
));
11289 WALK_SUBTREE (TYPE_SIZE (*type_p
));
11290 WALK_SUBTREE_TAIL (TYPE_SIZE_UNIT (*type_p
));
11295 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
)))
11299 /* Walk over all the sub-trees of this operand. */
11300 len
= TREE_OPERAND_LENGTH (*tp
);
11302 /* Go through the subtrees. We need to do this in forward order so
11303 that the scope of a FOR_EXPR is handled properly. */
11306 for (i
= 0; i
< len
- 1; ++i
)
11307 WALK_SUBTREE (TREE_OPERAND (*tp
, i
));
11308 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp
, len
- 1));
11311 /* If this is a type, walk the needed fields in the type. */
11312 else if (TYPE_P (*tp
))
11313 return walk_type_fields (*tp
, func
, data
, pset
, lh
);
11317 /* We didn't find what we were looking for. */
11320 #undef WALK_SUBTREE_TAIL
11322 #undef WALK_SUBTREE
11324 /* Like walk_tree, but does not walk duplicate nodes more than once. */
11327 walk_tree_without_duplicates_1 (tree
*tp
, walk_tree_fn func
, void *data
,
11332 hash_set
<tree
> pset
;
11333 result
= walk_tree_1 (tp
, func
, data
, &pset
, lh
);
11339 tree_block (tree t
)
11341 const enum tree_code_class c
= TREE_CODE_CLASS (TREE_CODE (t
));
11343 if (IS_EXPR_CODE_CLASS (c
))
11344 return LOCATION_BLOCK (t
->exp
.locus
);
11345 gcc_unreachable ();
11350 tree_set_block (tree t
, tree b
)
11352 const enum tree_code_class c
= TREE_CODE_CLASS (TREE_CODE (t
));
11354 if (IS_EXPR_CODE_CLASS (c
))
11356 t
->exp
.locus
= set_block (t
->exp
.locus
, b
);
11359 gcc_unreachable ();
11362 /* Create a nameless artificial label and put it in the current
11363 function context. The label has a location of LOC. Returns the
11364 newly created label. */
11367 create_artificial_label (location_t loc
)
11369 tree lab
= build_decl (loc
,
11370 LABEL_DECL
, NULL_TREE
, void_type_node
);
11372 DECL_ARTIFICIAL (lab
) = 1;
11373 DECL_IGNORED_P (lab
) = 1;
11374 DECL_CONTEXT (lab
) = current_function_decl
;
11378 /* Given a tree, try to return a useful variable name that we can use
11379 to prefix a temporary that is being assigned the value of the tree.
11380 I.E. given <temp> = &A, return A. */
11385 tree stripped_decl
;
11388 STRIP_NOPS (stripped_decl
);
11389 if (DECL_P (stripped_decl
) && DECL_NAME (stripped_decl
))
11390 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl
));
11391 else if (TREE_CODE (stripped_decl
) == SSA_NAME
)
11393 tree name
= SSA_NAME_IDENTIFIER (stripped_decl
);
11396 return IDENTIFIER_POINTER (name
);
11400 switch (TREE_CODE (stripped_decl
))
11403 return get_name (TREE_OPERAND (stripped_decl
, 0));
11410 /* Return true if TYPE has a variable argument list. */
11413 stdarg_p (const_tree fntype
)
11415 function_args_iterator args_iter
;
11416 tree n
= NULL_TREE
, t
;
11421 FOREACH_FUNCTION_ARGS (fntype
, t
, args_iter
)
11426 return n
!= NULL_TREE
&& n
!= void_type_node
;
11429 /* Return true if TYPE has a prototype. */
11432 prototype_p (const_tree fntype
)
11436 gcc_assert (fntype
!= NULL_TREE
);
11438 t
= TYPE_ARG_TYPES (fntype
);
11439 return (t
!= NULL_TREE
);
11442 /* If BLOCK is inlined from an __attribute__((__artificial__))
11443 routine, return pointer to location from where it has been
11446 block_nonartificial_location (tree block
)
11448 location_t
*ret
= NULL
;
11450 while (block
&& TREE_CODE (block
) == BLOCK
11451 && BLOCK_ABSTRACT_ORIGIN (block
))
11453 tree ao
= BLOCK_ABSTRACT_ORIGIN (block
);
11454 if (TREE_CODE (ao
) == FUNCTION_DECL
)
11456 /* If AO is an artificial inline, point RET to the
11457 call site locus at which it has been inlined and continue
11458 the loop, in case AO's caller is also an artificial
11460 if (DECL_DECLARED_INLINE_P (ao
)
11461 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao
)))
11462 ret
= &BLOCK_SOURCE_LOCATION (block
);
11466 else if (TREE_CODE (ao
) != BLOCK
)
11469 block
= BLOCK_SUPERCONTEXT (block
);
11475 /* If EXP is inlined from an __attribute__((__artificial__))
11476 function, return the location of the original call expression. */
11479 tree_nonartificial_location (tree exp
)
11481 location_t
*loc
= block_nonartificial_location (TREE_BLOCK (exp
));
11486 return EXPR_LOCATION (exp
);
11489 /* Return the location into which EXP has been inlined. Analogous
11490 to tree_nonartificial_location() above but not limited to artificial
11491 functions declared inline. If SYSTEM_HEADER is true, return
11492 the macro expansion point of the location if it's in a system header */
11495 tree_inlined_location (tree exp
, bool system_header
/* = true */)
11497 location_t loc
= UNKNOWN_LOCATION
;
11499 tree block
= TREE_BLOCK (exp
);
11501 while (block
&& TREE_CODE (block
) == BLOCK
11502 && BLOCK_ABSTRACT_ORIGIN (block
))
11504 tree ao
= BLOCK_ABSTRACT_ORIGIN (block
);
11505 if (TREE_CODE (ao
) == FUNCTION_DECL
)
11506 loc
= BLOCK_SOURCE_LOCATION (block
);
11507 else if (TREE_CODE (ao
) != BLOCK
)
11510 block
= BLOCK_SUPERCONTEXT (block
);
11513 if (loc
== UNKNOWN_LOCATION
)
11515 loc
= EXPR_LOCATION (exp
);
11517 /* Only consider macro expansion when the block traversal failed
11518 to find a location. Otherwise it's not relevant. */
11519 return expansion_point_location_if_in_system_header (loc
);
11525 /* These are the hash table functions for the hash table of OPTIMIZATION_NODE
11528 /* Return the hash code X, an OPTIMIZATION_NODE or TARGET_OPTION code. */
11531 cl_option_hasher::hash (tree x
)
11533 const_tree
const t
= x
;
11537 hashval_t hash
= 0;
11539 if (TREE_CODE (t
) == OPTIMIZATION_NODE
)
11541 p
= (const char *)TREE_OPTIMIZATION (t
);
11542 len
= sizeof (struct cl_optimization
);
11545 else if (TREE_CODE (t
) == TARGET_OPTION_NODE
)
11546 return cl_target_option_hash (TREE_TARGET_OPTION (t
));
11549 gcc_unreachable ();
11551 /* assume most opt flags are just 0/1, some are 2-3, and a few might be
11553 for (i
= 0; i
< len
; i
++)
11555 hash
= (hash
<< 4) ^ ((i
<< 2) | p
[i
]);
11560 /* Return nonzero if the value represented by *X (an OPTIMIZATION or
11561 TARGET_OPTION tree node) is the same as that given by *Y, which is the
11565 cl_option_hasher::equal (tree x
, tree y
)
11567 const_tree
const xt
= x
;
11568 const_tree
const yt
= y
;
11570 if (TREE_CODE (xt
) != TREE_CODE (yt
))
11573 if (TREE_CODE (xt
) == OPTIMIZATION_NODE
)
11574 return cl_optimization_option_eq (TREE_OPTIMIZATION (xt
),
11575 TREE_OPTIMIZATION (yt
));
11576 else if (TREE_CODE (xt
) == TARGET_OPTION_NODE
)
11577 return cl_target_option_eq (TREE_TARGET_OPTION (xt
),
11578 TREE_TARGET_OPTION (yt
));
11580 gcc_unreachable ();
11583 /* Build an OPTIMIZATION_NODE based on the options in OPTS and OPTS_SET. */
11586 build_optimization_node (struct gcc_options
*opts
,
11587 struct gcc_options
*opts_set
)
11591 /* Use the cache of optimization nodes. */
11593 cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node
),
11596 tree
*slot
= cl_option_hash_table
->find_slot (cl_optimization_node
, INSERT
);
11600 /* Insert this one into the hash table. */
11601 t
= cl_optimization_node
;
11604 /* Make a new node for next time round. */
11605 cl_optimization_node
= make_node (OPTIMIZATION_NODE
);
11611 /* Build a TARGET_OPTION_NODE based on the options in OPTS and OPTS_SET. */
11614 build_target_option_node (struct gcc_options
*opts
,
11615 struct gcc_options
*opts_set
)
11619 /* Use the cache of optimization nodes. */
11621 cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node
),
11624 tree
*slot
= cl_option_hash_table
->find_slot (cl_target_option_node
, INSERT
);
11628 /* Insert this one into the hash table. */
11629 t
= cl_target_option_node
;
11632 /* Make a new node for next time round. */
11633 cl_target_option_node
= make_node (TARGET_OPTION_NODE
);
11639 /* Clear TREE_TARGET_GLOBALS of all TARGET_OPTION_NODE trees,
11640 so that they aren't saved during PCH writing. */
11643 prepare_target_option_nodes_for_pch (void)
11645 hash_table
<cl_option_hasher
>::iterator iter
= cl_option_hash_table
->begin ();
11646 for (; iter
!= cl_option_hash_table
->end (); ++iter
)
11647 if (TREE_CODE (*iter
) == TARGET_OPTION_NODE
)
11648 TREE_TARGET_GLOBALS (*iter
) = NULL
;
11651 /* Determine the "ultimate origin" of a block. */
11654 block_ultimate_origin (const_tree block
)
11656 tree origin
= BLOCK_ABSTRACT_ORIGIN (block
);
11658 if (origin
== NULL_TREE
)
11662 gcc_checking_assert ((DECL_P (origin
)
11663 && DECL_ORIGIN (origin
) == origin
)
11664 || BLOCK_ORIGIN (origin
) == origin
);
11669 /* Return true iff conversion from INNER_TYPE to OUTER_TYPE generates
11673 tree_nop_conversion_p (const_tree outer_type
, const_tree inner_type
)
11675 /* Do not strip casts into or out of differing address spaces. */
11676 if (POINTER_TYPE_P (outer_type
)
11677 && TYPE_ADDR_SPACE (TREE_TYPE (outer_type
)) != ADDR_SPACE_GENERIC
)
11679 if (!POINTER_TYPE_P (inner_type
)
11680 || (TYPE_ADDR_SPACE (TREE_TYPE (outer_type
))
11681 != TYPE_ADDR_SPACE (TREE_TYPE (inner_type
))))
11684 else if (POINTER_TYPE_P (inner_type
)
11685 && TYPE_ADDR_SPACE (TREE_TYPE (inner_type
)) != ADDR_SPACE_GENERIC
)
11687 /* We already know that outer_type is not a pointer with
11688 a non-generic address space. */
11692 /* Use precision rather then machine mode when we can, which gives
11693 the correct answer even for submode (bit-field) types. */
11694 if ((INTEGRAL_TYPE_P (outer_type
)
11695 || POINTER_TYPE_P (outer_type
)
11696 || TREE_CODE (outer_type
) == OFFSET_TYPE
)
11697 && (INTEGRAL_TYPE_P (inner_type
)
11698 || POINTER_TYPE_P (inner_type
)
11699 || TREE_CODE (inner_type
) == OFFSET_TYPE
))
11700 return TYPE_PRECISION (outer_type
) == TYPE_PRECISION (inner_type
);
11702 /* Otherwise fall back on comparing machine modes (e.g. for
11703 aggregate types, floats). */
11704 return TYPE_MODE (outer_type
) == TYPE_MODE (inner_type
);
11707 /* Return true iff conversion in EXP generates no instruction. Mark
11708 it inline so that we fully inline into the stripping functions even
11709 though we have two uses of this function. */
11712 tree_nop_conversion (const_tree exp
)
11714 tree outer_type
, inner_type
;
11716 if (location_wrapper_p (exp
))
11718 if (!CONVERT_EXPR_P (exp
)
11719 && TREE_CODE (exp
) != NON_LVALUE_EXPR
)
11722 outer_type
= TREE_TYPE (exp
);
11723 inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
11724 if (!inner_type
|| inner_type
== error_mark_node
)
11727 return tree_nop_conversion_p (outer_type
, inner_type
);
11730 /* Return true iff conversion in EXP generates no instruction. Don't
11731 consider conversions changing the signedness. */
11734 tree_sign_nop_conversion (const_tree exp
)
11736 tree outer_type
, inner_type
;
11738 if (!tree_nop_conversion (exp
))
11741 outer_type
= TREE_TYPE (exp
);
11742 inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
11744 return (TYPE_UNSIGNED (outer_type
) == TYPE_UNSIGNED (inner_type
)
11745 && POINTER_TYPE_P (outer_type
) == POINTER_TYPE_P (inner_type
));
11748 /* Strip conversions from EXP according to tree_nop_conversion and
11749 return the resulting expression. */
11752 tree_strip_nop_conversions (tree exp
)
11754 while (tree_nop_conversion (exp
))
11755 exp
= TREE_OPERAND (exp
, 0);
11759 /* Strip conversions from EXP according to tree_sign_nop_conversion
11760 and return the resulting expression. */
11763 tree_strip_sign_nop_conversions (tree exp
)
11765 while (tree_sign_nop_conversion (exp
))
11766 exp
= TREE_OPERAND (exp
, 0);
11770 /* Avoid any floating point extensions from EXP. */
11772 strip_float_extensions (tree exp
)
11774 tree sub
, expt
, subt
;
11776 /* For floating point constant look up the narrowest type that can hold
11777 it properly and handle it like (type)(narrowest_type)constant.
11778 This way we can optimize for instance a=a*2.0 where "a" is float
11779 but 2.0 is double constant. */
11780 if (TREE_CODE (exp
) == REAL_CST
&& !DECIMAL_FLOAT_TYPE_P (TREE_TYPE (exp
)))
11782 REAL_VALUE_TYPE orig
;
11785 orig
= TREE_REAL_CST (exp
);
11786 if (TYPE_PRECISION (TREE_TYPE (exp
)) > TYPE_PRECISION (float_type_node
)
11787 && exact_real_truncate (TYPE_MODE (float_type_node
), &orig
))
11788 type
= float_type_node
;
11789 else if (TYPE_PRECISION (TREE_TYPE (exp
))
11790 > TYPE_PRECISION (double_type_node
)
11791 && exact_real_truncate (TYPE_MODE (double_type_node
), &orig
))
11792 type
= double_type_node
;
11794 return build_real_truncate (type
, orig
);
11797 if (!CONVERT_EXPR_P (exp
))
11800 sub
= TREE_OPERAND (exp
, 0);
11801 subt
= TREE_TYPE (sub
);
11802 expt
= TREE_TYPE (exp
);
11804 if (!FLOAT_TYPE_P (subt
))
11807 if (DECIMAL_FLOAT_TYPE_P (expt
) != DECIMAL_FLOAT_TYPE_P (subt
))
11810 if (TYPE_PRECISION (subt
) > TYPE_PRECISION (expt
))
11813 return strip_float_extensions (sub
);
11816 /* Strip out all handled components that produce invariant
11820 strip_invariant_refs (const_tree op
)
11822 while (handled_component_p (op
))
11824 switch (TREE_CODE (op
))
11827 case ARRAY_RANGE_REF
:
11828 if (!is_gimple_constant (TREE_OPERAND (op
, 1))
11829 || TREE_OPERAND (op
, 2) != NULL_TREE
11830 || TREE_OPERAND (op
, 3) != NULL_TREE
)
11834 case COMPONENT_REF
:
11835 if (TREE_OPERAND (op
, 2) != NULL_TREE
)
11841 op
= TREE_OPERAND (op
, 0);
11847 static GTY(()) tree gcc_eh_personality_decl
;
11849 /* Return the GCC personality function decl. */
11852 lhd_gcc_personality (void)
11854 if (!gcc_eh_personality_decl
)
11855 gcc_eh_personality_decl
= build_personality_function ("gcc");
11856 return gcc_eh_personality_decl
;
11859 /* TARGET is a call target of GIMPLE call statement
11860 (obtained by gimple_call_fn). Return true if it is
11861 OBJ_TYPE_REF representing an virtual call of C++ method.
11862 (As opposed to OBJ_TYPE_REF representing objc calls
11863 through a cast where middle-end devirtualization machinery
11864 can't apply.) FOR_DUMP_P is true when being called from
11865 the dump routines. */
11868 virtual_method_call_p (const_tree target
, bool for_dump_p
)
11870 if (TREE_CODE (target
) != OBJ_TYPE_REF
)
11872 tree t
= TREE_TYPE (target
);
11873 gcc_checking_assert (TREE_CODE (t
) == POINTER_TYPE
);
11875 if (TREE_CODE (t
) == FUNCTION_TYPE
)
11877 gcc_checking_assert (TREE_CODE (t
) == METHOD_TYPE
);
11878 /* If we do not have BINFO associated, it means that type was built
11879 without devirtualization enabled. Do not consider this a virtual
11881 if (!TYPE_BINFO (obj_type_ref_class (target
, for_dump_p
)))
11886 /* Lookup sub-BINFO of BINFO of TYPE at offset POS. */
11889 lookup_binfo_at_offset (tree binfo
, tree type
, HOST_WIDE_INT pos
)
11892 tree base_binfo
, b
;
11894 for (i
= 0; BINFO_BASE_ITERATE (binfo
, i
, base_binfo
); i
++)
11895 if (pos
== tree_to_shwi (BINFO_OFFSET (base_binfo
))
11896 && types_same_for_odr (TREE_TYPE (base_binfo
), type
))
11898 else if ((b
= lookup_binfo_at_offset (base_binfo
, type
, pos
)) != NULL
)
11903 /* Try to find a base info of BINFO that would have its field decl at offset
11904 OFFSET within the BINFO type and which is of EXPECTED_TYPE. If it can be
11905 found, return, otherwise return NULL_TREE. */
11908 get_binfo_at_offset (tree binfo
, poly_int64 offset
, tree expected_type
)
11910 tree type
= BINFO_TYPE (binfo
);
11914 HOST_WIDE_INT pos
, size
;
11918 if (types_same_for_odr (type
, expected_type
))
11920 if (maybe_lt (offset
, 0))
11923 for (fld
= TYPE_FIELDS (type
); fld
; fld
= DECL_CHAIN (fld
))
11925 if (TREE_CODE (fld
) != FIELD_DECL
|| !DECL_ARTIFICIAL (fld
))
11928 pos
= int_bit_position (fld
);
11929 size
= tree_to_uhwi (DECL_SIZE (fld
));
11930 if (known_in_range_p (offset
, pos
, size
))
11933 if (!fld
|| TREE_CODE (TREE_TYPE (fld
)) != RECORD_TYPE
)
11936 /* Offset 0 indicates the primary base, whose vtable contents are
11937 represented in the binfo for the derived class. */
11938 else if (maybe_ne (offset
, 0))
11940 tree found_binfo
= NULL
, base_binfo
;
11941 /* Offsets in BINFO are in bytes relative to the whole structure
11942 while POS is in bits relative to the containing field. */
11943 int binfo_offset
= (tree_to_shwi (BINFO_OFFSET (binfo
)) + pos
11946 for (i
= 0; BINFO_BASE_ITERATE (binfo
, i
, base_binfo
); i
++)
11947 if (tree_to_shwi (BINFO_OFFSET (base_binfo
)) == binfo_offset
11948 && types_same_for_odr (TREE_TYPE (base_binfo
), TREE_TYPE (fld
)))
11950 found_binfo
= base_binfo
;
11954 binfo
= found_binfo
;
11956 binfo
= lookup_binfo_at_offset (binfo
, TREE_TYPE (fld
),
11960 type
= TREE_TYPE (fld
);
11965 /* Returns true if X is a typedef decl. */
11968 is_typedef_decl (const_tree x
)
11970 return (x
&& TREE_CODE (x
) == TYPE_DECL
11971 && DECL_ORIGINAL_TYPE (x
) != NULL_TREE
);
11974 /* Returns true iff TYPE is a type variant created for a typedef. */
11977 typedef_variant_p (const_tree type
)
11979 return is_typedef_decl (TYPE_NAME (type
));
11982 /* PR 84195: Replace control characters in "unescaped" with their
11983 escaped equivalents. Allow newlines if -fmessage-length has
11984 been set to a non-zero value. This is done here, rather than
11985 where the attribute is recorded as the message length can
11986 change between these two locations. */
11989 escaped_string::escape (const char *unescaped
)
11992 size_t i
, new_i
, len
;
11997 m_str
= const_cast<char *> (unescaped
);
12000 if (unescaped
== NULL
|| *unescaped
== 0)
12003 len
= strlen (unescaped
);
12007 for (i
= 0; i
< len
; i
++)
12009 char c
= unescaped
[i
];
12014 escaped
[new_i
++] = c
;
12018 if (c
!= '\n' || !pp_is_wrapping_line (global_dc
->printer
))
12020 if (escaped
== NULL
)
12022 /* We only allocate space for a new string if we
12023 actually encounter a control character that
12024 needs replacing. */
12025 escaped
= (char *) xmalloc (len
* 2 + 1);
12026 strncpy (escaped
, unescaped
, i
);
12030 escaped
[new_i
++] = '\\';
12034 case '\a': escaped
[new_i
++] = 'a'; break;
12035 case '\b': escaped
[new_i
++] = 'b'; break;
12036 case '\f': escaped
[new_i
++] = 'f'; break;
12037 case '\n': escaped
[new_i
++] = 'n'; break;
12038 case '\r': escaped
[new_i
++] = 'r'; break;
12039 case '\t': escaped
[new_i
++] = 't'; break;
12040 case '\v': escaped
[new_i
++] = 'v'; break;
12041 default: escaped
[new_i
++] = '?'; break;
12045 escaped
[new_i
++] = c
;
12050 escaped
[new_i
] = 0;
12056 /* Warn about a use of an identifier which was marked deprecated. Returns
12057 whether a warning was given. */
12060 warn_deprecated_use (tree node
, tree attr
)
12062 escaped_string msg
;
12064 if (node
== 0 || !warn_deprecated_decl
)
12070 attr
= DECL_ATTRIBUTES (node
);
12071 else if (TYPE_P (node
))
12073 tree decl
= TYPE_STUB_DECL (node
);
12075 attr
= lookup_attribute ("deprecated",
12076 TYPE_ATTRIBUTES (TREE_TYPE (decl
)));
12081 attr
= lookup_attribute ("deprecated", attr
);
12084 msg
.escape (TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr
))));
12089 auto_diagnostic_group d
;
12091 w
= warning (OPT_Wdeprecated_declarations
,
12092 "%qD is deprecated: %s", node
, (const char *) msg
);
12094 w
= warning (OPT_Wdeprecated_declarations
,
12095 "%qD is deprecated", node
);
12097 inform (DECL_SOURCE_LOCATION (node
), "declared here");
12099 else if (TYPE_P (node
))
12101 tree what
= NULL_TREE
;
12102 tree decl
= TYPE_STUB_DECL (node
);
12104 if (TYPE_NAME (node
))
12106 if (TREE_CODE (TYPE_NAME (node
)) == IDENTIFIER_NODE
)
12107 what
= TYPE_NAME (node
);
12108 else if (TREE_CODE (TYPE_NAME (node
)) == TYPE_DECL
12109 && DECL_NAME (TYPE_NAME (node
)))
12110 what
= DECL_NAME (TYPE_NAME (node
));
12113 auto_diagnostic_group d
;
12117 w
= warning (OPT_Wdeprecated_declarations
,
12118 "%qE is deprecated: %s", what
, (const char *) msg
);
12120 w
= warning (OPT_Wdeprecated_declarations
,
12121 "%qE is deprecated", what
);
12126 w
= warning (OPT_Wdeprecated_declarations
,
12127 "type is deprecated: %s", (const char *) msg
);
12129 w
= warning (OPT_Wdeprecated_declarations
,
12130 "type is deprecated");
12134 inform (DECL_SOURCE_LOCATION (decl
), "declared here");
12140 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
12141 somewhere in it. */
12144 contains_bitfld_component_ref_p (const_tree ref
)
12146 while (handled_component_p (ref
))
12148 if (TREE_CODE (ref
) == COMPONENT_REF
12149 && DECL_BIT_FIELD (TREE_OPERAND (ref
, 1)))
12151 ref
= TREE_OPERAND (ref
, 0);
12157 /* Try to determine whether a TRY_CATCH expression can fall through.
12158 This is a subroutine of block_may_fallthru. */
12161 try_catch_may_fallthru (const_tree stmt
)
12163 tree_stmt_iterator i
;
12165 /* If the TRY block can fall through, the whole TRY_CATCH can
12167 if (block_may_fallthru (TREE_OPERAND (stmt
, 0)))
12170 i
= tsi_start (TREE_OPERAND (stmt
, 1));
12171 switch (TREE_CODE (tsi_stmt (i
)))
12174 /* We expect to see a sequence of CATCH_EXPR trees, each with a
12175 catch expression and a body. The whole TRY_CATCH may fall
12176 through iff any of the catch bodies falls through. */
12177 for (; !tsi_end_p (i
); tsi_next (&i
))
12179 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i
))))
12184 case EH_FILTER_EXPR
:
12185 /* The exception filter expression only matters if there is an
12186 exception. If the exception does not match EH_FILTER_TYPES,
12187 we will execute EH_FILTER_FAILURE, and we will fall through
12188 if that falls through. If the exception does match
12189 EH_FILTER_TYPES, the stack unwinder will continue up the
12190 stack, so we will not fall through. We don't know whether we
12191 will throw an exception which matches EH_FILTER_TYPES or not,
12192 so we just ignore EH_FILTER_TYPES and assume that we might
12193 throw an exception which doesn't match. */
12194 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i
)));
12197 /* This case represents statements to be executed when an
12198 exception occurs. Those statements are implicitly followed
12199 by a RESX statement to resume execution after the exception.
12200 So in this case the TRY_CATCH never falls through. */
12205 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
12206 need not be 100% accurate; simply be conservative and return true if we
12207 don't know. This is used only to avoid stupidly generating extra code.
12208 If we're wrong, we'll just delete the extra code later. */
12211 block_may_fallthru (const_tree block
)
12213 /* This CONST_CAST is okay because expr_last returns its argument
12214 unmodified and we assign it to a const_tree. */
12215 const_tree stmt
= expr_last (CONST_CAST_TREE (block
));
12217 switch (stmt
? TREE_CODE (stmt
) : ERROR_MARK
)
12221 /* Easy cases. If the last statement of the block implies
12222 control transfer, then we can't fall through. */
12226 /* If there is a default: label or case labels cover all possible
12227 SWITCH_COND values, then the SWITCH_EXPR will transfer control
12228 to some case label in all cases and all we care is whether the
12229 SWITCH_BODY falls through. */
12230 if (SWITCH_ALL_CASES_P (stmt
))
12231 return block_may_fallthru (SWITCH_BODY (stmt
));
12235 if (block_may_fallthru (COND_EXPR_THEN (stmt
)))
12237 return block_may_fallthru (COND_EXPR_ELSE (stmt
));
12240 return block_may_fallthru (BIND_EXPR_BODY (stmt
));
12242 case TRY_CATCH_EXPR
:
12243 return try_catch_may_fallthru (stmt
);
12245 case TRY_FINALLY_EXPR
:
12246 /* The finally clause is always executed after the try clause,
12247 so if it does not fall through, then the try-finally will not
12248 fall through. Otherwise, if the try clause does not fall
12249 through, then when the finally clause falls through it will
12250 resume execution wherever the try clause was going. So the
12251 whole try-finally will only fall through if both the try
12252 clause and the finally clause fall through. */
12253 return (block_may_fallthru (TREE_OPERAND (stmt
, 0))
12254 && block_may_fallthru (TREE_OPERAND (stmt
, 1)));
12257 return block_may_fallthru (TREE_OPERAND (stmt
, 0));
12260 if (TREE_CODE (TREE_OPERAND (stmt
, 1)) == CALL_EXPR
)
12261 stmt
= TREE_OPERAND (stmt
, 1);
12267 /* Functions that do not return do not fall through. */
12268 return (call_expr_flags (stmt
) & ECF_NORETURN
) == 0;
12270 case CLEANUP_POINT_EXPR
:
12271 return block_may_fallthru (TREE_OPERAND (stmt
, 0));
12274 return block_may_fallthru (TREE_OPERAND (stmt
, 1));
12280 return lang_hooks
.block_may_fallthru (stmt
);
12284 /* True if we are using EH to handle cleanups. */
12285 static bool using_eh_for_cleanups_flag
= false;
12287 /* This routine is called from front ends to indicate eh should be used for
12290 using_eh_for_cleanups (void)
12292 using_eh_for_cleanups_flag
= true;
12295 /* Query whether EH is used for cleanups. */
12297 using_eh_for_cleanups_p (void)
12299 return using_eh_for_cleanups_flag
;
12302 /* Wrapper for tree_code_name to ensure that tree code is valid */
12304 get_tree_code_name (enum tree_code code
)
12306 const char *invalid
= "<invalid tree code>";
12308 /* The tree_code enum promotes to signed, but we could be getting
12309 invalid values, so force an unsigned comparison. */
12310 if (unsigned (code
) >= MAX_TREE_CODES
)
12312 if ((unsigned)code
== 0xa5a5)
12313 return "ggc_freed";
12317 return tree_code_name
[code
];
12320 /* Drops the TREE_OVERFLOW flag from T. */
12323 drop_tree_overflow (tree t
)
12325 gcc_checking_assert (TREE_OVERFLOW (t
));
12327 /* For tree codes with a sharing machinery re-build the result. */
12328 if (poly_int_tree_p (t
))
12329 return wide_int_to_tree (TREE_TYPE (t
), wi::to_poly_wide (t
));
12331 /* For VECTOR_CST, remove the overflow bits from the encoded elements
12332 and canonicalize the result. */
12333 if (TREE_CODE (t
) == VECTOR_CST
)
12335 tree_vector_builder builder
;
12336 builder
.new_unary_operation (TREE_TYPE (t
), t
, true);
12337 unsigned int count
= builder
.encoded_nelts ();
12338 for (unsigned int i
= 0; i
< count
; ++i
)
12340 tree elt
= VECTOR_CST_ELT (t
, i
);
12341 if (TREE_OVERFLOW (elt
))
12342 elt
= drop_tree_overflow (elt
);
12343 builder
.quick_push (elt
);
12345 return builder
.build ();
12348 /* Otherwise, as all tcc_constants are possibly shared, copy the node
12349 and drop the flag. */
12351 TREE_OVERFLOW (t
) = 0;
12353 /* For constants that contain nested constants, drop the flag
12354 from those as well. */
12355 if (TREE_CODE (t
) == COMPLEX_CST
)
12357 if (TREE_OVERFLOW (TREE_REALPART (t
)))
12358 TREE_REALPART (t
) = drop_tree_overflow (TREE_REALPART (t
));
12359 if (TREE_OVERFLOW (TREE_IMAGPART (t
)))
12360 TREE_IMAGPART (t
) = drop_tree_overflow (TREE_IMAGPART (t
));
12366 /* Given a memory reference expression T, return its base address.
12367 The base address of a memory reference expression is the main
12368 object being referenced. For instance, the base address for
12369 'array[i].fld[j]' is 'array'. You can think of this as stripping
12370 away the offset part from a memory address.
12372 This function calls handled_component_p to strip away all the inner
12373 parts of the memory reference until it reaches the base object. */
12376 get_base_address (tree t
)
12378 while (handled_component_p (t
))
12379 t
= TREE_OPERAND (t
, 0);
12381 if ((TREE_CODE (t
) == MEM_REF
12382 || TREE_CODE (t
) == TARGET_MEM_REF
)
12383 && TREE_CODE (TREE_OPERAND (t
, 0)) == ADDR_EXPR
)
12384 t
= TREE_OPERAND (TREE_OPERAND (t
, 0), 0);
12386 /* ??? Either the alias oracle or all callers need to properly deal
12387 with WITH_SIZE_EXPRs before we can look through those. */
12388 if (TREE_CODE (t
) == WITH_SIZE_EXPR
)
12394 /* Return a tree of sizetype representing the size, in bytes, of the element
12395 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12398 array_ref_element_size (tree exp
)
12400 tree aligned_size
= TREE_OPERAND (exp
, 3);
12401 tree elmt_type
= TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
12402 location_t loc
= EXPR_LOCATION (exp
);
12404 /* If a size was specified in the ARRAY_REF, it's the size measured
12405 in alignment units of the element type. So multiply by that value. */
12408 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
12409 sizetype from another type of the same width and signedness. */
12410 if (TREE_TYPE (aligned_size
) != sizetype
)
12411 aligned_size
= fold_convert_loc (loc
, sizetype
, aligned_size
);
12412 return size_binop_loc (loc
, MULT_EXPR
, aligned_size
,
12413 size_int (TYPE_ALIGN_UNIT (elmt_type
)));
12416 /* Otherwise, take the size from that of the element type. Substitute
12417 any PLACEHOLDER_EXPR that we have. */
12419 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type
), exp
);
12422 /* Return a tree representing the lower bound of the array mentioned in
12423 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12426 array_ref_low_bound (tree exp
)
12428 tree domain_type
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp
, 0)));
12430 /* If a lower bound is specified in EXP, use it. */
12431 if (TREE_OPERAND (exp
, 2))
12432 return TREE_OPERAND (exp
, 2);
12434 /* Otherwise, if there is a domain type and it has a lower bound, use it,
12435 substituting for a PLACEHOLDER_EXPR as needed. */
12436 if (domain_type
&& TYPE_MIN_VALUE (domain_type
))
12437 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type
), exp
);
12439 /* Otherwise, return a zero of the appropriate type. */
12440 tree idxtype
= TREE_TYPE (TREE_OPERAND (exp
, 1));
12441 return (idxtype
== error_mark_node
12442 ? integer_zero_node
: build_int_cst (idxtype
, 0));
12445 /* Return a tree representing the upper bound of the array mentioned in
12446 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12449 array_ref_up_bound (tree exp
)
12451 tree domain_type
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp
, 0)));
12453 /* If there is a domain type and it has an upper bound, use it, substituting
12454 for a PLACEHOLDER_EXPR as needed. */
12455 if (domain_type
&& TYPE_MAX_VALUE (domain_type
))
12456 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type
), exp
);
12458 /* Otherwise fail. */
12462 /* Returns true if REF is an array reference, component reference,
12463 or memory reference to an array at the end of a structure.
12464 If this is the case, the array may be allocated larger
12465 than its upper bound implies. */
12468 array_at_struct_end_p (tree ref
)
12472 if (TREE_CODE (ref
) == ARRAY_REF
12473 || TREE_CODE (ref
) == ARRAY_RANGE_REF
)
12475 atype
= TREE_TYPE (TREE_OPERAND (ref
, 0));
12476 ref
= TREE_OPERAND (ref
, 0);
12478 else if (TREE_CODE (ref
) == COMPONENT_REF
12479 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref
, 1))) == ARRAY_TYPE
)
12480 atype
= TREE_TYPE (TREE_OPERAND (ref
, 1));
12481 else if (TREE_CODE (ref
) == MEM_REF
)
12483 tree arg
= TREE_OPERAND (ref
, 0);
12484 if (TREE_CODE (arg
) == ADDR_EXPR
)
12485 arg
= TREE_OPERAND (arg
, 0);
12486 tree argtype
= TREE_TYPE (arg
);
12487 if (TREE_CODE (argtype
) == RECORD_TYPE
)
12489 if (tree fld
= last_field (argtype
))
12491 atype
= TREE_TYPE (fld
);
12492 if (TREE_CODE (atype
) != ARRAY_TYPE
)
12494 if (VAR_P (arg
) && DECL_SIZE (fld
))
12506 if (TREE_CODE (ref
) == STRING_CST
)
12509 tree ref_to_array
= ref
;
12510 while (handled_component_p (ref
))
12512 /* If the reference chain contains a component reference to a
12513 non-union type and there follows another field the reference
12514 is not at the end of a structure. */
12515 if (TREE_CODE (ref
) == COMPONENT_REF
)
12517 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (ref
, 0))) == RECORD_TYPE
)
12519 tree nextf
= DECL_CHAIN (TREE_OPERAND (ref
, 1));
12520 while (nextf
&& TREE_CODE (nextf
) != FIELD_DECL
)
12521 nextf
= DECL_CHAIN (nextf
);
12526 /* If we have a multi-dimensional array we do not consider
12527 a non-innermost dimension as flex array if the whole
12528 multi-dimensional array is at struct end.
12529 Same for an array of aggregates with a trailing array
12531 else if (TREE_CODE (ref
) == ARRAY_REF
)
12533 else if (TREE_CODE (ref
) == ARRAY_RANGE_REF
)
12535 /* If we view an underlying object as sth else then what we
12536 gathered up to now is what we have to rely on. */
12537 else if (TREE_CODE (ref
) == VIEW_CONVERT_EXPR
)
12540 gcc_unreachable ();
12542 ref
= TREE_OPERAND (ref
, 0);
12545 /* The array now is at struct end. Treat flexible arrays as
12546 always subject to extend, even into just padding constrained by
12547 an underlying decl. */
12548 if (! TYPE_SIZE (atype
)
12549 || ! TYPE_DOMAIN (atype
)
12550 || ! TYPE_MAX_VALUE (TYPE_DOMAIN (atype
)))
12553 if (TREE_CODE (ref
) == MEM_REF
12554 && TREE_CODE (TREE_OPERAND (ref
, 0)) == ADDR_EXPR
)
12555 ref
= TREE_OPERAND (TREE_OPERAND (ref
, 0), 0);
12557 /* If the reference is based on a declared entity, the size of the array
12558 is constrained by its given domain. (Do not trust commons PR/69368). */
12560 && !(flag_unconstrained_commons
12561 && VAR_P (ref
) && DECL_COMMON (ref
))
12562 && DECL_SIZE_UNIT (ref
)
12563 && TREE_CODE (DECL_SIZE_UNIT (ref
)) == INTEGER_CST
)
12565 /* Check whether the array domain covers all of the available
12568 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (atype
))) != INTEGER_CST
12569 || TREE_CODE (TYPE_MAX_VALUE (TYPE_DOMAIN (atype
))) != INTEGER_CST
12570 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (atype
))) != INTEGER_CST
)
12572 if (! get_addr_base_and_unit_offset (ref_to_array
, &offset
))
12575 /* If at least one extra element fits it is a flexarray. */
12576 if (known_le ((wi::to_offset (TYPE_MAX_VALUE (TYPE_DOMAIN (atype
)))
12577 - wi::to_offset (TYPE_MIN_VALUE (TYPE_DOMAIN (atype
)))
12579 * wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (atype
))),
12580 wi::to_offset (DECL_SIZE_UNIT (ref
)) - offset
))
12589 /* Return a tree representing the offset, in bytes, of the field referenced
12590 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
12593 component_ref_field_offset (tree exp
)
12595 tree aligned_offset
= TREE_OPERAND (exp
, 2);
12596 tree field
= TREE_OPERAND (exp
, 1);
12597 location_t loc
= EXPR_LOCATION (exp
);
12599 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
12600 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
12602 if (aligned_offset
)
12604 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
12605 sizetype from another type of the same width and signedness. */
12606 if (TREE_TYPE (aligned_offset
) != sizetype
)
12607 aligned_offset
= fold_convert_loc (loc
, sizetype
, aligned_offset
);
12608 return size_binop_loc (loc
, MULT_EXPR
, aligned_offset
,
12609 size_int (DECL_OFFSET_ALIGN (field
)
12613 /* Otherwise, take the offset from that of the field. Substitute
12614 any PLACEHOLDER_EXPR that we have. */
12616 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field
), exp
);
12619 /* Given the initializer INIT, return the initializer for the field
12620 DECL if it exists, otherwise null. Used to obtain the initializer
12621 for a flexible array member and determine its size. */
12624 get_initializer_for (tree init
, tree decl
)
12628 tree fld
, fld_init
;
12629 unsigned HOST_WIDE_INT i
;
12630 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init
), i
, fld
, fld_init
)
12635 if (TREE_CODE (fld
) == CONSTRUCTOR
)
12637 fld_init
= get_initializer_for (fld_init
, decl
);
12646 /* Determines the size of the member referenced by the COMPONENT_REF
12647 REF, using its initializer expression if necessary in order to
12648 determine the size of an initialized flexible array member.
12649 If non-null, set *ARK when REF refers to an interior zero-length
12650 array or a trailing one-element array.
12651 Returns the size as sizetype (which might be zero for an object
12652 with an uninitialized flexible array member) or null if the size
12653 cannot be determined. */
12656 component_ref_size (tree ref
, special_array_member
*sam
/* = NULL */)
12658 gcc_assert (TREE_CODE (ref
) == COMPONENT_REF
);
12660 special_array_member sambuf
;
12663 *sam
= special_array_member::none
;
12665 /* The object/argument referenced by the COMPONENT_REF and its type. */
12666 tree arg
= TREE_OPERAND (ref
, 0);
12667 tree argtype
= TREE_TYPE (arg
);
12668 /* The referenced member. */
12669 tree member
= TREE_OPERAND (ref
, 1);
12671 tree memsize
= DECL_SIZE_UNIT (member
);
12674 tree memtype
= TREE_TYPE (member
);
12675 if (TREE_CODE (memtype
) != ARRAY_TYPE
)
12676 /* DECL_SIZE may be less than TYPE_SIZE in C++ when referring
12677 to the type of a class with a virtual base which doesn't
12678 reflect the size of the virtual's members (see pr97595).
12679 If that's the case fail for now and implement something
12680 more robust in the future. */
12681 return (tree_int_cst_equal (memsize
, TYPE_SIZE_UNIT (memtype
))
12682 ? memsize
: NULL_TREE
);
12684 bool trailing
= array_at_struct_end_p (ref
);
12685 bool zero_length
= integer_zerop (memsize
);
12686 if (!trailing
&& !zero_length
)
12687 /* MEMBER is either an interior array or is an array with
12688 more than one element. */
12694 *sam
= special_array_member::trail_0
;
12697 *sam
= special_array_member::int_0
;
12698 memsize
= NULL_TREE
;
12703 if (tree dom
= TYPE_DOMAIN (memtype
))
12704 if (tree min
= TYPE_MIN_VALUE (dom
))
12705 if (tree max
= TYPE_MAX_VALUE (dom
))
12706 if (TREE_CODE (min
) == INTEGER_CST
12707 && TREE_CODE (max
) == INTEGER_CST
)
12709 offset_int minidx
= wi::to_offset (min
);
12710 offset_int maxidx
= wi::to_offset (max
);
12711 offset_int neltsm1
= maxidx
- minidx
;
12713 /* MEMBER is an array with more than one element. */
12717 *sam
= special_array_member::trail_1
;
12720 /* For a reference to a zero- or one-element array member of a union
12721 use the size of the union instead of the size of the member. */
12722 if (TREE_CODE (argtype
) == UNION_TYPE
)
12723 memsize
= TYPE_SIZE_UNIT (argtype
);
12726 /* MEMBER is either a bona fide flexible array member, or a zero-length
12727 array member, or an array of length one treated as such. */
12729 /* If the reference is to a declared object and the member a true
12730 flexible array, try to determine its size from its initializer. */
12731 poly_int64 baseoff
= 0;
12732 tree base
= get_addr_base_and_unit_offset (ref
, &baseoff
);
12733 if (!base
|| !VAR_P (base
))
12735 if (*sam
!= special_array_member::int_0
)
12738 if (TREE_CODE (arg
) != COMPONENT_REF
)
12742 while (TREE_CODE (base
) == COMPONENT_REF
)
12743 base
= TREE_OPERAND (base
, 0);
12744 baseoff
= tree_to_poly_int64 (byte_position (TREE_OPERAND (ref
, 1)));
12747 /* BASE is the declared object of which MEMBER is either a member
12748 or that is cast to ARGTYPE (e.g., a char buffer used to store
12749 an ARGTYPE object). */
12750 tree basetype
= TREE_TYPE (base
);
12752 /* Determine the base type of the referenced object. If it's
12753 the same as ARGTYPE and MEMBER has a known size, return it. */
12754 tree bt
= basetype
;
12755 if (*sam
!= special_array_member::int_0
)
12756 while (TREE_CODE (bt
) == ARRAY_TYPE
)
12757 bt
= TREE_TYPE (bt
);
12758 bool typematch
= useless_type_conversion_p (argtype
, bt
);
12759 if (memsize
&& typematch
)
12762 memsize
= NULL_TREE
;
12765 /* MEMBER is a true flexible array member. Compute its size from
12766 the initializer of the BASE object if it has one. */
12767 if (tree init
= DECL_P (base
) ? DECL_INITIAL (base
) : NULL_TREE
)
12768 if (init
!= error_mark_node
)
12770 init
= get_initializer_for (init
, member
);
12773 memsize
= TYPE_SIZE_UNIT (TREE_TYPE (init
));
12774 if (tree refsize
= TYPE_SIZE_UNIT (argtype
))
12776 /* Use the larger of the initializer size and the tail
12777 padding in the enclosing struct. */
12778 poly_int64 rsz
= tree_to_poly_int64 (refsize
);
12780 if (known_lt (tree_to_poly_int64 (memsize
), rsz
))
12781 memsize
= wide_int_to_tree (TREE_TYPE (memsize
), rsz
);
12793 && DECL_EXTERNAL (base
)
12795 && *sam
!= special_array_member::int_0
)
12796 /* The size of a flexible array member of an extern struct
12797 with no initializer cannot be determined (it's defined
12798 in another translation unit and can have an initializer
12799 with an arbitrary number of elements). */
12802 /* Use the size of the base struct or, for interior zero-length
12803 arrays, the size of the enclosing type. */
12804 memsize
= TYPE_SIZE_UNIT (bt
);
12806 else if (DECL_P (base
))
12807 /* Use the size of the BASE object (possibly an array of some
12808 other type such as char used to store the struct). */
12809 memsize
= DECL_SIZE_UNIT (base
);
12814 /* If the flexible array member has a known size use the greater
12815 of it and the tail padding in the enclosing struct.
12816 Otherwise, when the size of the flexible array member is unknown
12817 and the referenced object is not a struct, use the size of its
12818 type when known. This detects sizes of array buffers when cast
12819 to struct types with flexible array members. */
12822 poly_int64 memsz64
= memsize
? tree_to_poly_int64 (memsize
) : 0;
12823 if (known_lt (baseoff
, memsz64
))
12825 memsz64
-= baseoff
;
12826 return wide_int_to_tree (TREE_TYPE (memsize
), memsz64
);
12828 return size_zero_node
;
12831 /* Return "don't know" for an external non-array object since its
12832 flexible array member can be initialized to have any number of
12833 elements. Otherwise, return zero because the flexible array
12834 member has no elements. */
12835 return (DECL_P (base
)
12836 && DECL_EXTERNAL (base
)
12838 || TREE_CODE (basetype
) != ARRAY_TYPE
)
12839 ? NULL_TREE
: size_zero_node
);
12842 /* Return the machine mode of T. For vectors, returns the mode of the
12843 inner type. The main use case is to feed the result to HONOR_NANS,
12844 avoiding the BLKmode that a direct TYPE_MODE (T) might return. */
12847 element_mode (const_tree t
)
12851 if (VECTOR_TYPE_P (t
) || TREE_CODE (t
) == COMPLEX_TYPE
)
12853 return TYPE_MODE (t
);
12856 /* Vector types need to re-check the target flags each time we report
12857 the machine mode. We need to do this because attribute target can
12858 change the result of vector_mode_supported_p and have_regs_of_mode
12859 on a per-function basis. Thus the TYPE_MODE of a VECTOR_TYPE can
12860 change on a per-function basis. */
12861 /* ??? Possibly a better solution is to run through all the types
12862 referenced by a function and re-compute the TYPE_MODE once, rather
12863 than make the TYPE_MODE macro call a function. */
12866 vector_type_mode (const_tree t
)
12870 gcc_assert (TREE_CODE (t
) == VECTOR_TYPE
);
12872 mode
= t
->type_common
.mode
;
12873 if (VECTOR_MODE_P (mode
)
12874 && (!targetm
.vector_mode_supported_p (mode
)
12875 || !have_regs_of_mode
[mode
]))
12877 scalar_int_mode innermode
;
12879 /* For integers, try mapping it to a same-sized scalar mode. */
12880 if (is_int_mode (TREE_TYPE (t
)->type_common
.mode
, &innermode
))
12882 poly_int64 size
= (TYPE_VECTOR_SUBPARTS (t
)
12883 * GET_MODE_BITSIZE (innermode
));
12884 scalar_int_mode mode
;
12885 if (int_mode_for_size (size
, 0).exists (&mode
)
12886 && have_regs_of_mode
[mode
])
12896 /* Return the size in bits of each element of vector type TYPE. */
12899 vector_element_bits (const_tree type
)
12901 gcc_checking_assert (VECTOR_TYPE_P (type
));
12902 if (VECTOR_BOOLEAN_TYPE_P (type
))
12903 return TYPE_PRECISION (TREE_TYPE (type
));
12904 return tree_to_uhwi (TYPE_SIZE (TREE_TYPE (type
)));
12907 /* Calculate the size in bits of each element of vector type TYPE
12908 and return the result as a tree of type bitsizetype. */
12911 vector_element_bits_tree (const_tree type
)
12913 gcc_checking_assert (VECTOR_TYPE_P (type
));
12914 if (VECTOR_BOOLEAN_TYPE_P (type
))
12915 return bitsize_int (vector_element_bits (type
));
12916 return TYPE_SIZE (TREE_TYPE (type
));
12919 /* Verify that basic properties of T match TV and thus T can be a variant of
12920 TV. TV should be the more specified variant (i.e. the main variant). */
12923 verify_type_variant (const_tree t
, tree tv
)
12925 /* Type variant can differ by:
12927 - TYPE_QUALS: TYPE_READONLY, TYPE_VOLATILE, TYPE_ATOMIC, TYPE_RESTRICT,
12928 ENCODE_QUAL_ADDR_SPACE.
12929 - main variant may be TYPE_COMPLETE_P and variant types !TYPE_COMPLETE_P
12930 in this case some values may not be set in the variant types
12931 (see TYPE_COMPLETE_P checks).
12932 - it is possible to have TYPE_ARTIFICIAL variant of non-artifical type
12933 - by TYPE_NAME and attributes (i.e. when variant originate by typedef)
12934 - TYPE_CANONICAL (TYPE_ALIAS_SET is the same among variants)
12935 - by the alignment: TYPE_ALIGN and TYPE_USER_ALIGN
12936 - during LTO by TYPE_CONTEXT if type is TYPE_FILE_SCOPE_P
12937 this is necessary to make it possible to merge types form different TUs
12938 - arrays, pointers and references may have TREE_TYPE that is a variant
12939 of TREE_TYPE of their main variants.
12940 - aggregates may have new TYPE_FIELDS list that list variants of
12941 the main variant TYPE_FIELDS.
12942 - vector types may differ by TYPE_VECTOR_OPAQUE
12945 /* Convenience macro for matching individual fields. */
12946 #define verify_variant_match(flag) \
12948 if (flag (tv) != flag (t)) \
12950 error ("type variant differs by %s", #flag); \
12956 /* tree_base checks. */
12958 verify_variant_match (TREE_CODE
);
12959 /* FIXME: Ada builds non-artificial variants of artificial types. */
12961 if (TYPE_ARTIFICIAL (tv
))
12962 verify_variant_match (TYPE_ARTIFICIAL
);
12964 if (POINTER_TYPE_P (tv
))
12965 verify_variant_match (TYPE_REF_CAN_ALIAS_ALL
);
12966 /* FIXME: TYPE_SIZES_GIMPLIFIED may differs for Ada build. */
12967 verify_variant_match (TYPE_UNSIGNED
);
12968 verify_variant_match (TYPE_PACKED
);
12969 if (TREE_CODE (t
) == REFERENCE_TYPE
)
12970 verify_variant_match (TYPE_REF_IS_RVALUE
);
12971 if (AGGREGATE_TYPE_P (t
))
12972 verify_variant_match (TYPE_REVERSE_STORAGE_ORDER
);
12974 verify_variant_match (TYPE_SATURATING
);
12975 /* FIXME: This check trigger during libstdc++ build. */
12977 if (RECORD_OR_UNION_TYPE_P (t
) && COMPLETE_TYPE_P (t
))
12978 verify_variant_match (TYPE_FINAL_P
);
12981 /* tree_type_common checks. */
12983 if (COMPLETE_TYPE_P (t
))
12985 verify_variant_match (TYPE_MODE
);
12986 if (TREE_CODE (TYPE_SIZE (t
)) != PLACEHOLDER_EXPR
12987 && TREE_CODE (TYPE_SIZE (tv
)) != PLACEHOLDER_EXPR
)
12988 verify_variant_match (TYPE_SIZE
);
12989 if (TREE_CODE (TYPE_SIZE_UNIT (t
)) != PLACEHOLDER_EXPR
12990 && TREE_CODE (TYPE_SIZE_UNIT (tv
)) != PLACEHOLDER_EXPR
12991 && TYPE_SIZE_UNIT (t
) != TYPE_SIZE_UNIT (tv
))
12993 gcc_assert (!operand_equal_p (TYPE_SIZE_UNIT (t
),
12994 TYPE_SIZE_UNIT (tv
), 0));
12995 error ("type variant has different %<TYPE_SIZE_UNIT%>");
12997 error ("type variant%'s %<TYPE_SIZE_UNIT%>");
12998 debug_tree (TYPE_SIZE_UNIT (tv
));
12999 error ("type%'s %<TYPE_SIZE_UNIT%>");
13000 debug_tree (TYPE_SIZE_UNIT (t
));
13003 verify_variant_match (TYPE_NEEDS_CONSTRUCTING
);
13005 verify_variant_match (TYPE_PRECISION
);
13006 if (RECORD_OR_UNION_TYPE_P (t
))
13007 verify_variant_match (TYPE_TRANSPARENT_AGGR
);
13008 else if (TREE_CODE (t
) == ARRAY_TYPE
)
13009 verify_variant_match (TYPE_NONALIASED_COMPONENT
);
13010 /* During LTO we merge variant lists from diferent translation units
13011 that may differ BY TYPE_CONTEXT that in turn may point
13012 to TRANSLATION_UNIT_DECL.
13013 Ada also builds variants of types with different TYPE_CONTEXT. */
13015 if (!in_lto_p
|| !TYPE_FILE_SCOPE_P (t
))
13016 verify_variant_match (TYPE_CONTEXT
);
13018 if (TREE_CODE (t
) == ARRAY_TYPE
|| TREE_CODE (t
) == INTEGER_TYPE
)
13019 verify_variant_match (TYPE_STRING_FLAG
);
13020 if (TREE_CODE (t
) == RECORD_TYPE
|| TREE_CODE (t
) == UNION_TYPE
)
13021 verify_variant_match (TYPE_CXX_ODR_P
);
13022 if (TYPE_ALIAS_SET_KNOWN_P (t
))
13024 error ("type variant with %<TYPE_ALIAS_SET_KNOWN_P%>");
13029 /* tree_type_non_common checks. */
13031 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
13032 and dangle the pointer from time to time. */
13033 if (RECORD_OR_UNION_TYPE_P (t
) && TYPE_VFIELD (t
) != TYPE_VFIELD (tv
)
13034 && (in_lto_p
|| !TYPE_VFIELD (tv
)
13035 || TREE_CODE (TYPE_VFIELD (tv
)) != TREE_LIST
))
13037 error ("type variant has different %<TYPE_VFIELD%>");
13041 if ((TREE_CODE (t
) == ENUMERAL_TYPE
&& COMPLETE_TYPE_P (t
))
13042 || TREE_CODE (t
) == INTEGER_TYPE
13043 || TREE_CODE (t
) == BOOLEAN_TYPE
13044 || TREE_CODE (t
) == REAL_TYPE
13045 || TREE_CODE (t
) == FIXED_POINT_TYPE
)
13047 verify_variant_match (TYPE_MAX_VALUE
);
13048 verify_variant_match (TYPE_MIN_VALUE
);
13050 if (TREE_CODE (t
) == METHOD_TYPE
)
13051 verify_variant_match (TYPE_METHOD_BASETYPE
);
13052 if (TREE_CODE (t
) == OFFSET_TYPE
)
13053 verify_variant_match (TYPE_OFFSET_BASETYPE
);
13054 if (TREE_CODE (t
) == ARRAY_TYPE
)
13055 verify_variant_match (TYPE_ARRAY_MAX_SIZE
);
13056 /* FIXME: Be lax and allow TYPE_BINFO to be missing in variant types
13057 or even type's main variant. This is needed to make bootstrap pass
13058 and the bug seems new in GCC 5.
13059 C++ FE should be updated to make this consistent and we should check
13060 that TYPE_BINFO is always NULL for !COMPLETE_TYPE_P and otherwise there
13061 is a match with main variant.
13063 Also disable the check for Java for now because of parser hack that builds
13064 first an dummy BINFO and then sometimes replace it by real BINFO in some
13066 if (RECORD_OR_UNION_TYPE_P (t
) && TYPE_BINFO (t
) && TYPE_BINFO (tv
)
13067 && TYPE_BINFO (t
) != TYPE_BINFO (tv
)
13068 /* FIXME: Java sometimes keep dump TYPE_BINFOs on variant types.
13069 Since there is no cheap way to tell C++/Java type w/o LTO, do checking
13070 at LTO time only. */
13071 && (in_lto_p
&& odr_type_p (t
)))
13073 error ("type variant has different %<TYPE_BINFO%>");
13075 error ("type variant%'s %<TYPE_BINFO%>");
13076 debug_tree (TYPE_BINFO (tv
));
13077 error ("type%'s %<TYPE_BINFO%>");
13078 debug_tree (TYPE_BINFO (t
));
13082 /* Check various uses of TYPE_VALUES_RAW. */
13083 if (TREE_CODE (t
) == ENUMERAL_TYPE
13084 && TYPE_VALUES (t
))
13085 verify_variant_match (TYPE_VALUES
);
13086 else if (TREE_CODE (t
) == ARRAY_TYPE
)
13087 verify_variant_match (TYPE_DOMAIN
);
13088 /* Permit incomplete variants of complete type. While FEs may complete
13089 all variants, this does not happen for C++ templates in all cases. */
13090 else if (RECORD_OR_UNION_TYPE_P (t
)
13091 && COMPLETE_TYPE_P (t
)
13092 && TYPE_FIELDS (t
) != TYPE_FIELDS (tv
))
13096 /* Fortran builds qualified variants as new records with items of
13097 qualified type. Verify that they looks same. */
13098 for (f1
= TYPE_FIELDS (t
), f2
= TYPE_FIELDS (tv
);
13100 f1
= TREE_CHAIN (f1
), f2
= TREE_CHAIN (f2
))
13101 if (TREE_CODE (f1
) != FIELD_DECL
|| TREE_CODE (f2
) != FIELD_DECL
13102 || (TYPE_MAIN_VARIANT (TREE_TYPE (f1
))
13103 != TYPE_MAIN_VARIANT (TREE_TYPE (f2
))
13104 /* FIXME: gfc_nonrestricted_type builds all types as variants
13105 with exception of pointer types. It deeply copies the type
13106 which means that we may end up with a variant type
13107 referring non-variant pointer. We may change it to
13108 produce types as variants, too, like
13109 objc_get_protocol_qualified_type does. */
13110 && !POINTER_TYPE_P (TREE_TYPE (f1
)))
13111 || DECL_FIELD_OFFSET (f1
) != DECL_FIELD_OFFSET (f2
)
13112 || DECL_FIELD_BIT_OFFSET (f1
) != DECL_FIELD_BIT_OFFSET (f2
))
13116 error ("type variant has different %<TYPE_FIELDS%>");
13118 error ("first mismatch is field");
13120 error ("and field");
13125 else if ((TREE_CODE (t
) == FUNCTION_TYPE
|| TREE_CODE (t
) == METHOD_TYPE
))
13126 verify_variant_match (TYPE_ARG_TYPES
);
13127 /* For C++ the qualified variant of array type is really an array type
13128 of qualified TREE_TYPE.
13129 objc builds variants of pointer where pointer to type is a variant, too
13130 in objc_get_protocol_qualified_type. */
13131 if (TREE_TYPE (t
) != TREE_TYPE (tv
)
13132 && ((TREE_CODE (t
) != ARRAY_TYPE
13133 && !POINTER_TYPE_P (t
))
13134 || TYPE_MAIN_VARIANT (TREE_TYPE (t
))
13135 != TYPE_MAIN_VARIANT (TREE_TYPE (tv
))))
13137 error ("type variant has different %<TREE_TYPE%>");
13139 error ("type variant%'s %<TREE_TYPE%>");
13140 debug_tree (TREE_TYPE (tv
));
13141 error ("type%'s %<TREE_TYPE%>");
13142 debug_tree (TREE_TYPE (t
));
13145 if (type_with_alias_set_p (t
)
13146 && !gimple_canonical_types_compatible_p (t
, tv
, false))
13148 error ("type is not compatible with its variant");
13150 error ("type variant%'s %<TREE_TYPE%>");
13151 debug_tree (TREE_TYPE (tv
));
13152 error ("type%'s %<TREE_TYPE%>");
13153 debug_tree (TREE_TYPE (t
));
13157 #undef verify_variant_match
13161 /* The TYPE_CANONICAL merging machinery. It should closely resemble
13162 the middle-end types_compatible_p function. It needs to avoid
13163 claiming types are different for types that should be treated
13164 the same with respect to TBAA. Canonical types are also used
13165 for IL consistency checks via the useless_type_conversion_p
13166 predicate which does not handle all type kinds itself but falls
13167 back to pointer-comparison of TYPE_CANONICAL for aggregates
13170 /* Return true if TYPE_UNSIGNED of TYPE should be ignored for canonical
13171 type calculation because we need to allow inter-operability between signed
13172 and unsigned variants. */
13175 type_with_interoperable_signedness (const_tree type
)
13177 /* Fortran standard require C_SIGNED_CHAR to be interoperable with both
13178 signed char and unsigned char. Similarly fortran FE builds
13179 C_SIZE_T as signed type, while C defines it unsigned. */
13181 return tree_code_for_canonical_type_merging (TREE_CODE (type
))
13183 && (TYPE_PRECISION (type
) == TYPE_PRECISION (signed_char_type_node
)
13184 || TYPE_PRECISION (type
) == TYPE_PRECISION (size_type_node
));
13187 /* Return true iff T1 and T2 are structurally identical for what
13189 This function is used both by lto.c canonical type merging and by the
13190 verifier. If TRUST_TYPE_CANONICAL we do not look into structure of types
13191 that have TYPE_CANONICAL defined and assume them equivalent. This is useful
13192 only for LTO because only in these cases TYPE_CANONICAL equivalence
13193 correspond to one defined by gimple_canonical_types_compatible_p. */
13196 gimple_canonical_types_compatible_p (const_tree t1
, const_tree t2
,
13197 bool trust_type_canonical
)
13199 /* Type variants should be same as the main variant. When not doing sanity
13200 checking to verify this fact, go to main variants and save some work. */
13201 if (trust_type_canonical
)
13203 t1
= TYPE_MAIN_VARIANT (t1
);
13204 t2
= TYPE_MAIN_VARIANT (t2
);
13207 /* Check first for the obvious case of pointer identity. */
13211 /* Check that we have two types to compare. */
13212 if (t1
== NULL_TREE
|| t2
== NULL_TREE
)
13215 /* We consider complete types always compatible with incomplete type.
13216 This does not make sense for canonical type calculation and thus we
13217 need to ensure that we are never called on it.
13219 FIXME: For more correctness the function probably should have three modes
13220 1) mode assuming that types are complete mathcing their structure
13221 2) mode allowing incomplete types but producing equivalence classes
13222 and thus ignoring all info from complete types
13223 3) mode allowing incomplete types to match complete but checking
13224 compatibility between complete types.
13226 1 and 2 can be used for canonical type calculation. 3 is the real
13227 definition of type compatibility that can be used i.e. for warnings during
13228 declaration merging. */
13230 gcc_assert (!trust_type_canonical
13231 || (type_with_alias_set_p (t1
) && type_with_alias_set_p (t2
)));
13233 /* If the types have been previously registered and found equal
13236 if (TYPE_CANONICAL (t1
) && TYPE_CANONICAL (t2
)
13237 && trust_type_canonical
)
13239 /* Do not use TYPE_CANONICAL of pointer types. For LTO streamed types
13240 they are always NULL, but they are set to non-NULL for types
13241 constructed by build_pointer_type and variants. In this case the
13242 TYPE_CANONICAL is more fine grained than the equivalnce we test (where
13243 all pointers are considered equal. Be sure to not return false
13245 gcc_checking_assert (canonical_type_used_p (t1
)
13246 && canonical_type_used_p (t2
));
13247 return TYPE_CANONICAL (t1
) == TYPE_CANONICAL (t2
);
13250 /* For types where we do ODR based TBAA the canonical type is always
13251 set correctly, so we know that types are different if their
13252 canonical types does not match. */
13253 if (trust_type_canonical
13254 && (odr_type_p (t1
) && odr_based_tbaa_p (t1
))
13255 != (odr_type_p (t2
) && odr_based_tbaa_p (t2
)))
13258 /* Can't be the same type if the types don't have the same code. */
13259 enum tree_code code
= tree_code_for_canonical_type_merging (TREE_CODE (t1
));
13260 if (code
!= tree_code_for_canonical_type_merging (TREE_CODE (t2
)))
13263 /* Qualifiers do not matter for canonical type comparison purposes. */
13265 /* Void types and nullptr types are always the same. */
13266 if (TREE_CODE (t1
) == VOID_TYPE
13267 || TREE_CODE (t1
) == NULLPTR_TYPE
)
13270 /* Can't be the same type if they have different mode. */
13271 if (TYPE_MODE (t1
) != TYPE_MODE (t2
))
13274 /* Non-aggregate types can be handled cheaply. */
13275 if (INTEGRAL_TYPE_P (t1
)
13276 || SCALAR_FLOAT_TYPE_P (t1
)
13277 || FIXED_POINT_TYPE_P (t1
)
13278 || TREE_CODE (t1
) == VECTOR_TYPE
13279 || TREE_CODE (t1
) == COMPLEX_TYPE
13280 || TREE_CODE (t1
) == OFFSET_TYPE
13281 || POINTER_TYPE_P (t1
))
13283 /* Can't be the same type if they have different recision. */
13284 if (TYPE_PRECISION (t1
) != TYPE_PRECISION (t2
))
13287 /* In some cases the signed and unsigned types are required to be
13289 if (TYPE_UNSIGNED (t1
) != TYPE_UNSIGNED (t2
)
13290 && !type_with_interoperable_signedness (t1
))
13293 /* Fortran's C_SIGNED_CHAR is !TYPE_STRING_FLAG but needs to be
13294 interoperable with "signed char". Unless all frontends are revisited
13295 to agree on these types, we must ignore the flag completely. */
13297 /* Fortran standard define C_PTR type that is compatible with every
13298 C pointer. For this reason we need to glob all pointers into one.
13299 Still pointers in different address spaces are not compatible. */
13300 if (POINTER_TYPE_P (t1
))
13302 if (TYPE_ADDR_SPACE (TREE_TYPE (t1
))
13303 != TYPE_ADDR_SPACE (TREE_TYPE (t2
)))
13307 /* Tail-recurse to components. */
13308 if (TREE_CODE (t1
) == VECTOR_TYPE
13309 || TREE_CODE (t1
) == COMPLEX_TYPE
)
13310 return gimple_canonical_types_compatible_p (TREE_TYPE (t1
),
13312 trust_type_canonical
);
13317 /* Do type-specific comparisons. */
13318 switch (TREE_CODE (t1
))
13321 /* Array types are the same if the element types are the same and
13322 the number of elements are the same. */
13323 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1
), TREE_TYPE (t2
),
13324 trust_type_canonical
)
13325 || TYPE_STRING_FLAG (t1
) != TYPE_STRING_FLAG (t2
)
13326 || TYPE_REVERSE_STORAGE_ORDER (t1
) != TYPE_REVERSE_STORAGE_ORDER (t2
)
13327 || TYPE_NONALIASED_COMPONENT (t1
) != TYPE_NONALIASED_COMPONENT (t2
))
13331 tree i1
= TYPE_DOMAIN (t1
);
13332 tree i2
= TYPE_DOMAIN (t2
);
13334 /* For an incomplete external array, the type domain can be
13335 NULL_TREE. Check this condition also. */
13336 if (i1
== NULL_TREE
&& i2
== NULL_TREE
)
13338 else if (i1
== NULL_TREE
|| i2
== NULL_TREE
)
13342 tree min1
= TYPE_MIN_VALUE (i1
);
13343 tree min2
= TYPE_MIN_VALUE (i2
);
13344 tree max1
= TYPE_MAX_VALUE (i1
);
13345 tree max2
= TYPE_MAX_VALUE (i2
);
13347 /* The minimum/maximum values have to be the same. */
13350 && ((TREE_CODE (min1
) == PLACEHOLDER_EXPR
13351 && TREE_CODE (min2
) == PLACEHOLDER_EXPR
)
13352 || operand_equal_p (min1
, min2
, 0))))
13355 && ((TREE_CODE (max1
) == PLACEHOLDER_EXPR
13356 && TREE_CODE (max2
) == PLACEHOLDER_EXPR
)
13357 || operand_equal_p (max1
, max2
, 0)))))
13365 case FUNCTION_TYPE
:
13366 /* Function types are the same if the return type and arguments types
13368 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1
), TREE_TYPE (t2
),
13369 trust_type_canonical
))
13372 if (TYPE_ARG_TYPES (t1
) == TYPE_ARG_TYPES (t2
))
13376 tree parms1
, parms2
;
13378 for (parms1
= TYPE_ARG_TYPES (t1
), parms2
= TYPE_ARG_TYPES (t2
);
13380 parms1
= TREE_CHAIN (parms1
), parms2
= TREE_CHAIN (parms2
))
13382 if (!gimple_canonical_types_compatible_p
13383 (TREE_VALUE (parms1
), TREE_VALUE (parms2
),
13384 trust_type_canonical
))
13388 if (parms1
|| parms2
)
13396 case QUAL_UNION_TYPE
:
13400 /* Don't try to compare variants of an incomplete type, before
13401 TYPE_FIELDS has been copied around. */
13402 if (!COMPLETE_TYPE_P (t1
) && !COMPLETE_TYPE_P (t2
))
13406 if (TYPE_REVERSE_STORAGE_ORDER (t1
) != TYPE_REVERSE_STORAGE_ORDER (t2
))
13409 /* For aggregate types, all the fields must be the same. */
13410 for (f1
= TYPE_FIELDS (t1
), f2
= TYPE_FIELDS (t2
);
13412 f1
= TREE_CHAIN (f1
), f2
= TREE_CHAIN (f2
))
13414 /* Skip non-fields and zero-sized fields. */
13415 while (f1
&& (TREE_CODE (f1
) != FIELD_DECL
13417 && integer_zerop (DECL_SIZE (f1
)))))
13418 f1
= TREE_CHAIN (f1
);
13419 while (f2
&& (TREE_CODE (f2
) != FIELD_DECL
13421 && integer_zerop (DECL_SIZE (f2
)))))
13422 f2
= TREE_CHAIN (f2
);
13425 /* The fields must have the same name, offset and type. */
13426 if (DECL_NONADDRESSABLE_P (f1
) != DECL_NONADDRESSABLE_P (f2
)
13427 || !gimple_compare_field_offset (f1
, f2
)
13428 || !gimple_canonical_types_compatible_p
13429 (TREE_TYPE (f1
), TREE_TYPE (f2
),
13430 trust_type_canonical
))
13434 /* If one aggregate has more fields than the other, they
13435 are not the same. */
13443 /* Consider all types with language specific trees in them mutually
13444 compatible. This is executed only from verify_type and false
13445 positives can be tolerated. */
13446 gcc_assert (!in_lto_p
);
13451 /* Verify type T. */
13454 verify_type (const_tree t
)
13456 bool error_found
= false;
13457 tree mv
= TYPE_MAIN_VARIANT (t
);
13460 error ("main variant is not defined");
13461 error_found
= true;
13463 else if (mv
!= TYPE_MAIN_VARIANT (mv
))
13465 error ("%<TYPE_MAIN_VARIANT%> has different %<TYPE_MAIN_VARIANT%>");
13467 error_found
= true;
13469 else if (t
!= mv
&& !verify_type_variant (t
, mv
))
13470 error_found
= true;
13472 tree ct
= TYPE_CANONICAL (t
);
13475 else if (TYPE_CANONICAL (t
) != ct
)
13477 error ("%<TYPE_CANONICAL%> has different %<TYPE_CANONICAL%>");
13479 error_found
= true;
13481 /* Method and function types cannot be used to address memory and thus
13482 TYPE_CANONICAL really matters only for determining useless conversions.
13484 FIXME: C++ FE produce declarations of builtin functions that are not
13485 compatible with main variants. */
13486 else if (TREE_CODE (t
) == FUNCTION_TYPE
)
13489 /* FIXME: gimple_canonical_types_compatible_p cannot compare types
13490 with variably sized arrays because their sizes possibly
13491 gimplified to different variables. */
13492 && !variably_modified_type_p (ct
, NULL
)
13493 && !gimple_canonical_types_compatible_p (t
, ct
, false)
13494 && COMPLETE_TYPE_P (t
))
13496 error ("%<TYPE_CANONICAL%> is not compatible");
13498 error_found
= true;
13501 if (COMPLETE_TYPE_P (t
) && TYPE_CANONICAL (t
)
13502 && TYPE_MODE (t
) != TYPE_MODE (TYPE_CANONICAL (t
)))
13504 error ("%<TYPE_MODE%> of %<TYPE_CANONICAL%> is not compatible");
13506 error_found
= true;
13508 if (TYPE_MAIN_VARIANT (t
) == t
&& ct
&& TYPE_MAIN_VARIANT (ct
) != ct
)
13510 error ("%<TYPE_CANONICAL%> of main variant is not main variant");
13512 debug_tree (TYPE_MAIN_VARIANT (ct
));
13513 error_found
= true;
13517 /* Check various uses of TYPE_MIN_VALUE_RAW. */
13518 if (RECORD_OR_UNION_TYPE_P (t
))
13520 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
13521 and danagle the pointer from time to time. */
13522 if (TYPE_VFIELD (t
)
13523 && TREE_CODE (TYPE_VFIELD (t
)) != FIELD_DECL
13524 && TREE_CODE (TYPE_VFIELD (t
)) != TREE_LIST
)
13526 error ("%<TYPE_VFIELD%> is not %<FIELD_DECL%> nor %<TREE_LIST%>");
13527 debug_tree (TYPE_VFIELD (t
));
13528 error_found
= true;
13531 else if (TREE_CODE (t
) == POINTER_TYPE
)
13533 if (TYPE_NEXT_PTR_TO (t
)
13534 && TREE_CODE (TYPE_NEXT_PTR_TO (t
)) != POINTER_TYPE
)
13536 error ("%<TYPE_NEXT_PTR_TO%> is not %<POINTER_TYPE%>");
13537 debug_tree (TYPE_NEXT_PTR_TO (t
));
13538 error_found
= true;
13541 else if (TREE_CODE (t
) == REFERENCE_TYPE
)
13543 if (TYPE_NEXT_REF_TO (t
)
13544 && TREE_CODE (TYPE_NEXT_REF_TO (t
)) != REFERENCE_TYPE
)
13546 error ("%<TYPE_NEXT_REF_TO%> is not %<REFERENCE_TYPE%>");
13547 debug_tree (TYPE_NEXT_REF_TO (t
));
13548 error_found
= true;
13551 else if (INTEGRAL_TYPE_P (t
) || TREE_CODE (t
) == REAL_TYPE
13552 || TREE_CODE (t
) == FIXED_POINT_TYPE
)
13554 /* FIXME: The following check should pass:
13555 useless_type_conversion_p (const_cast <tree> (t),
13556 TREE_TYPE (TYPE_MIN_VALUE (t))
13557 but does not for C sizetypes in LTO. */
13560 /* Check various uses of TYPE_MAXVAL_RAW. */
13561 if (RECORD_OR_UNION_TYPE_P (t
))
13563 if (!TYPE_BINFO (t
))
13565 else if (TREE_CODE (TYPE_BINFO (t
)) != TREE_BINFO
)
13567 error ("%<TYPE_BINFO%> is not %<TREE_BINFO%>");
13568 debug_tree (TYPE_BINFO (t
));
13569 error_found
= true;
13571 else if (TREE_TYPE (TYPE_BINFO (t
)) != TYPE_MAIN_VARIANT (t
))
13573 error ("%<TYPE_BINFO%> type is not %<TYPE_MAIN_VARIANT%>");
13574 debug_tree (TREE_TYPE (TYPE_BINFO (t
)));
13575 error_found
= true;
13578 else if (TREE_CODE (t
) == FUNCTION_TYPE
|| TREE_CODE (t
) == METHOD_TYPE
)
13580 if (TYPE_METHOD_BASETYPE (t
)
13581 && TREE_CODE (TYPE_METHOD_BASETYPE (t
)) != RECORD_TYPE
13582 && TREE_CODE (TYPE_METHOD_BASETYPE (t
)) != UNION_TYPE
)
13584 error ("%<TYPE_METHOD_BASETYPE%> is not record nor union");
13585 debug_tree (TYPE_METHOD_BASETYPE (t
));
13586 error_found
= true;
13589 else if (TREE_CODE (t
) == OFFSET_TYPE
)
13591 if (TYPE_OFFSET_BASETYPE (t
)
13592 && TREE_CODE (TYPE_OFFSET_BASETYPE (t
)) != RECORD_TYPE
13593 && TREE_CODE (TYPE_OFFSET_BASETYPE (t
)) != UNION_TYPE
)
13595 error ("%<TYPE_OFFSET_BASETYPE%> is not record nor union");
13596 debug_tree (TYPE_OFFSET_BASETYPE (t
));
13597 error_found
= true;
13600 else if (INTEGRAL_TYPE_P (t
) || TREE_CODE (t
) == REAL_TYPE
13601 || TREE_CODE (t
) == FIXED_POINT_TYPE
)
13603 /* FIXME: The following check should pass:
13604 useless_type_conversion_p (const_cast <tree> (t),
13605 TREE_TYPE (TYPE_MAX_VALUE (t))
13606 but does not for C sizetypes in LTO. */
13608 else if (TREE_CODE (t
) == ARRAY_TYPE
)
13610 if (TYPE_ARRAY_MAX_SIZE (t
)
13611 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (t
)) != INTEGER_CST
)
13613 error ("%<TYPE_ARRAY_MAX_SIZE%> not %<INTEGER_CST%>");
13614 debug_tree (TYPE_ARRAY_MAX_SIZE (t
));
13615 error_found
= true;
13618 else if (TYPE_MAX_VALUE_RAW (t
))
13620 error ("%<TYPE_MAX_VALUE_RAW%> non-NULL");
13621 debug_tree (TYPE_MAX_VALUE_RAW (t
));
13622 error_found
= true;
13625 if (TYPE_LANG_SLOT_1 (t
) && in_lto_p
)
13627 error ("%<TYPE_LANG_SLOT_1 (binfo)%> field is non-NULL");
13628 debug_tree (TYPE_LANG_SLOT_1 (t
));
13629 error_found
= true;
13632 /* Check various uses of TYPE_VALUES_RAW. */
13633 if (TREE_CODE (t
) == ENUMERAL_TYPE
)
13634 for (tree l
= TYPE_VALUES (t
); l
; l
= TREE_CHAIN (l
))
13636 tree value
= TREE_VALUE (l
);
13637 tree name
= TREE_PURPOSE (l
);
13639 /* C FE porduce INTEGER_CST of INTEGER_TYPE, while C++ FE uses
13640 CONST_DECL of ENUMERAL TYPE. */
13641 if (TREE_CODE (value
) != INTEGER_CST
&& TREE_CODE (value
) != CONST_DECL
)
13643 error ("enum value is not %<CONST_DECL%> or %<INTEGER_CST%>");
13644 debug_tree (value
);
13646 error_found
= true;
13648 if (TREE_CODE (TREE_TYPE (value
)) != INTEGER_TYPE
13649 && !useless_type_conversion_p (const_cast <tree
> (t
), TREE_TYPE (value
)))
13651 error ("enum value type is not %<INTEGER_TYPE%> nor convertible "
13653 debug_tree (value
);
13655 error_found
= true;
13657 if (TREE_CODE (name
) != IDENTIFIER_NODE
)
13659 error ("enum value name is not %<IDENTIFIER_NODE%>");
13660 debug_tree (value
);
13662 error_found
= true;
13665 else if (TREE_CODE (t
) == ARRAY_TYPE
)
13667 if (TYPE_DOMAIN (t
) && TREE_CODE (TYPE_DOMAIN (t
)) != INTEGER_TYPE
)
13669 error ("array %<TYPE_DOMAIN%> is not integer type");
13670 debug_tree (TYPE_DOMAIN (t
));
13671 error_found
= true;
13674 else if (RECORD_OR_UNION_TYPE_P (t
))
13676 if (TYPE_FIELDS (t
) && !COMPLETE_TYPE_P (t
) && in_lto_p
)
13678 error ("%<TYPE_FIELDS%> defined in incomplete type");
13679 error_found
= true;
13681 for (tree fld
= TYPE_FIELDS (t
); fld
; fld
= TREE_CHAIN (fld
))
13683 /* TODO: verify properties of decls. */
13684 if (TREE_CODE (fld
) == FIELD_DECL
)
13686 else if (TREE_CODE (fld
) == TYPE_DECL
)
13688 else if (TREE_CODE (fld
) == CONST_DECL
)
13690 else if (VAR_P (fld
))
13692 else if (TREE_CODE (fld
) == TEMPLATE_DECL
)
13694 else if (TREE_CODE (fld
) == USING_DECL
)
13696 else if (TREE_CODE (fld
) == FUNCTION_DECL
)
13700 error ("wrong tree in %<TYPE_FIELDS%> list");
13702 error_found
= true;
13706 else if (TREE_CODE (t
) == INTEGER_TYPE
13707 || TREE_CODE (t
) == BOOLEAN_TYPE
13708 || TREE_CODE (t
) == OFFSET_TYPE
13709 || TREE_CODE (t
) == REFERENCE_TYPE
13710 || TREE_CODE (t
) == NULLPTR_TYPE
13711 || TREE_CODE (t
) == POINTER_TYPE
)
13713 if (TYPE_CACHED_VALUES_P (t
) != (TYPE_CACHED_VALUES (t
) != NULL
))
13715 error ("%<TYPE_CACHED_VALUES_P%> is %i while %<TYPE_CACHED_VALUES%> "
13717 TYPE_CACHED_VALUES_P (t
), (void *)TYPE_CACHED_VALUES (t
));
13718 error_found
= true;
13720 else if (TYPE_CACHED_VALUES_P (t
) && TREE_CODE (TYPE_CACHED_VALUES (t
)) != TREE_VEC
)
13722 error ("%<TYPE_CACHED_VALUES%> is not %<TREE_VEC%>");
13723 debug_tree (TYPE_CACHED_VALUES (t
));
13724 error_found
= true;
13726 /* Verify just enough of cache to ensure that no one copied it to new type.
13727 All copying should go by copy_node that should clear it. */
13728 else if (TYPE_CACHED_VALUES_P (t
))
13731 for (i
= 0; i
< TREE_VEC_LENGTH (TYPE_CACHED_VALUES (t
)); i
++)
13732 if (TREE_VEC_ELT (TYPE_CACHED_VALUES (t
), i
)
13733 && TREE_TYPE (TREE_VEC_ELT (TYPE_CACHED_VALUES (t
), i
)) != t
)
13735 error ("wrong %<TYPE_CACHED_VALUES%> entry");
13736 debug_tree (TREE_VEC_ELT (TYPE_CACHED_VALUES (t
), i
));
13737 error_found
= true;
13742 else if (TREE_CODE (t
) == FUNCTION_TYPE
|| TREE_CODE (t
) == METHOD_TYPE
)
13743 for (tree l
= TYPE_ARG_TYPES (t
); l
; l
= TREE_CHAIN (l
))
13745 /* C++ FE uses TREE_PURPOSE to store initial values. */
13746 if (TREE_PURPOSE (l
) && in_lto_p
)
13748 error ("%<TREE_PURPOSE%> is non-NULL in %<TYPE_ARG_TYPES%> list");
13750 error_found
= true;
13752 if (!TYPE_P (TREE_VALUE (l
)))
13754 error ("wrong entry in %<TYPE_ARG_TYPES%> list");
13756 error_found
= true;
13759 else if (!is_lang_specific (t
) && TYPE_VALUES_RAW (t
))
13761 error ("%<TYPE_VALUES_RAW%> field is non-NULL");
13762 debug_tree (TYPE_VALUES_RAW (t
));
13763 error_found
= true;
13765 if (TREE_CODE (t
) != INTEGER_TYPE
13766 && TREE_CODE (t
) != BOOLEAN_TYPE
13767 && TREE_CODE (t
) != OFFSET_TYPE
13768 && TREE_CODE (t
) != REFERENCE_TYPE
13769 && TREE_CODE (t
) != NULLPTR_TYPE
13770 && TREE_CODE (t
) != POINTER_TYPE
13771 && TYPE_CACHED_VALUES_P (t
))
13773 error ("%<TYPE_CACHED_VALUES_P%> is set while it should not be");
13774 error_found
= true;
13777 /* ipa-devirt makes an assumption that TYPE_METHOD_BASETYPE is always
13778 TYPE_MAIN_VARIANT and it would be odd to add methods only to variatns
13780 if (TREE_CODE (t
) == METHOD_TYPE
13781 && TYPE_MAIN_VARIANT (TYPE_METHOD_BASETYPE (t
)) != TYPE_METHOD_BASETYPE (t
))
13783 error ("%<TYPE_METHOD_BASETYPE%> is not main variant");
13784 error_found
= true;
13789 debug_tree (const_cast <tree
> (t
));
13790 internal_error ("%qs failed", __func__
);
13795 /* Return 1 if ARG interpreted as signed in its precision is known to be
13796 always positive or 2 if ARG is known to be always negative, or 3 if
13797 ARG may be positive or negative. */
13800 get_range_pos_neg (tree arg
)
13802 if (arg
== error_mark_node
)
13805 int prec
= TYPE_PRECISION (TREE_TYPE (arg
));
13807 if (TREE_CODE (arg
) == INTEGER_CST
)
13809 wide_int w
= wi::sext (wi::to_wide (arg
), prec
);
13815 while (CONVERT_EXPR_P (arg
)
13816 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg
, 0)))
13817 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg
, 0))) <= prec
)
13819 arg
= TREE_OPERAND (arg
, 0);
13820 /* Narrower value zero extended into wider type
13821 will always result in positive values. */
13822 if (TYPE_UNSIGNED (TREE_TYPE (arg
))
13823 && TYPE_PRECISION (TREE_TYPE (arg
)) < prec
)
13825 prec
= TYPE_PRECISION (TREE_TYPE (arg
));
13830 if (TREE_CODE (arg
) != SSA_NAME
)
13832 wide_int arg_min
, arg_max
;
13833 while (get_range_info (arg
, &arg_min
, &arg_max
) != VR_RANGE
)
13835 gimple
*g
= SSA_NAME_DEF_STMT (arg
);
13836 if (is_gimple_assign (g
)
13837 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (g
)))
13839 tree t
= gimple_assign_rhs1 (g
);
13840 if (INTEGRAL_TYPE_P (TREE_TYPE (t
))
13841 && TYPE_PRECISION (TREE_TYPE (t
)) <= prec
)
13843 if (TYPE_UNSIGNED (TREE_TYPE (t
))
13844 && TYPE_PRECISION (TREE_TYPE (t
)) < prec
)
13846 prec
= TYPE_PRECISION (TREE_TYPE (t
));
13855 if (TYPE_UNSIGNED (TREE_TYPE (arg
)))
13857 /* For unsigned values, the "positive" range comes
13858 below the "negative" range. */
13859 if (!wi::neg_p (wi::sext (arg_max
, prec
), SIGNED
))
13861 if (wi::neg_p (wi::sext (arg_min
, prec
), SIGNED
))
13866 if (!wi::neg_p (wi::sext (arg_min
, prec
), SIGNED
))
13868 if (wi::neg_p (wi::sext (arg_max
, prec
), SIGNED
))
13877 /* Return true if ARG is marked with the nonnull attribute in the
13878 current function signature. */
13881 nonnull_arg_p (const_tree arg
)
13883 tree t
, attrs
, fntype
;
13884 unsigned HOST_WIDE_INT arg_num
;
13886 gcc_assert (TREE_CODE (arg
) == PARM_DECL
13887 && (POINTER_TYPE_P (TREE_TYPE (arg
))
13888 || TREE_CODE (TREE_TYPE (arg
)) == OFFSET_TYPE
));
13890 /* The static chain decl is always non null. */
13891 if (arg
== cfun
->static_chain_decl
)
13894 /* THIS argument of method is always non-NULL. */
13895 if (TREE_CODE (TREE_TYPE (cfun
->decl
)) == METHOD_TYPE
13896 && arg
== DECL_ARGUMENTS (cfun
->decl
)
13897 && flag_delete_null_pointer_checks
)
13900 /* Values passed by reference are always non-NULL. */
13901 if (TREE_CODE (TREE_TYPE (arg
)) == REFERENCE_TYPE
13902 && flag_delete_null_pointer_checks
)
13905 fntype
= TREE_TYPE (cfun
->decl
);
13906 for (attrs
= TYPE_ATTRIBUTES (fntype
); attrs
; attrs
= TREE_CHAIN (attrs
))
13908 attrs
= lookup_attribute ("nonnull", attrs
);
13910 /* If "nonnull" wasn't specified, we know nothing about the argument. */
13911 if (attrs
== NULL_TREE
)
13914 /* If "nonnull" applies to all the arguments, then ARG is non-null. */
13915 if (TREE_VALUE (attrs
) == NULL_TREE
)
13918 /* Get the position number for ARG in the function signature. */
13919 for (arg_num
= 1, t
= DECL_ARGUMENTS (cfun
->decl
);
13921 t
= DECL_CHAIN (t
), arg_num
++)
13927 gcc_assert (t
== arg
);
13929 /* Now see if ARG_NUM is mentioned in the nonnull list. */
13930 for (t
= TREE_VALUE (attrs
); t
; t
= TREE_CHAIN (t
))
13932 if (compare_tree_int (TREE_VALUE (t
), arg_num
) == 0)
13940 /* Combine LOC and BLOCK to a combined adhoc loc, retaining any range
13944 set_block (location_t loc
, tree block
)
13946 location_t pure_loc
= get_pure_location (loc
);
13947 source_range src_range
= get_range_from_loc (line_table
, loc
);
13948 return COMBINE_LOCATION_DATA (line_table
, pure_loc
, src_range
, block
);
13952 set_source_range (tree expr
, location_t start
, location_t finish
)
13954 source_range src_range
;
13955 src_range
.m_start
= start
;
13956 src_range
.m_finish
= finish
;
13957 return set_source_range (expr
, src_range
);
13961 set_source_range (tree expr
, source_range src_range
)
13963 if (!EXPR_P (expr
))
13964 return UNKNOWN_LOCATION
;
13966 location_t pure_loc
= get_pure_location (EXPR_LOCATION (expr
));
13967 location_t adhoc
= COMBINE_LOCATION_DATA (line_table
,
13971 SET_EXPR_LOCATION (expr
, adhoc
);
13975 /* Return EXPR, potentially wrapped with a node expression LOC,
13976 if !CAN_HAVE_LOCATION_P (expr).
13978 NON_LVALUE_EXPR is used for wrapping constants, apart from STRING_CST.
13979 VIEW_CONVERT_EXPR is used for wrapping non-constants and STRING_CST.
13981 Wrapper nodes can be identified using location_wrapper_p. */
13984 maybe_wrap_with_location (tree expr
, location_t loc
)
13988 if (loc
== UNKNOWN_LOCATION
)
13990 if (CAN_HAVE_LOCATION_P (expr
))
13992 /* We should only be adding wrappers for constants and for decls,
13993 or for some exceptional tree nodes (e.g. BASELINK in the C++ FE). */
13994 gcc_assert (CONSTANT_CLASS_P (expr
)
13996 || EXCEPTIONAL_CLASS_P (expr
));
13998 /* For now, don't add wrappers to exceptional tree nodes, to minimize
13999 any impact of the wrapper nodes. */
14000 if (EXCEPTIONAL_CLASS_P (expr
))
14003 /* Compiler-generated temporary variables don't need a wrapper. */
14004 if (DECL_P (expr
) && DECL_ARTIFICIAL (expr
) && DECL_IGNORED_P (expr
))
14007 /* If any auto_suppress_location_wrappers are active, don't create
14009 if (suppress_location_wrappers
> 0)
14013 = (((CONSTANT_CLASS_P (expr
) && TREE_CODE (expr
) != STRING_CST
)
14014 || (TREE_CODE (expr
) == CONST_DECL
&& !TREE_STATIC (expr
)))
14015 ? NON_LVALUE_EXPR
: VIEW_CONVERT_EXPR
);
14016 tree wrapper
= build1_loc (loc
, code
, TREE_TYPE (expr
), expr
);
14017 /* Mark this node as being a wrapper. */
14018 EXPR_LOCATION_WRAPPER_P (wrapper
) = 1;
14022 int suppress_location_wrappers
;
14024 /* Return the name of combined function FN, for debugging purposes. */
14027 combined_fn_name (combined_fn fn
)
14029 if (builtin_fn_p (fn
))
14031 tree fndecl
= builtin_decl_explicit (as_builtin_fn (fn
));
14032 return IDENTIFIER_POINTER (DECL_NAME (fndecl
));
14035 return internal_fn_name (as_internal_fn (fn
));
14038 /* Return a bitmap with a bit set corresponding to each argument in
14039 a function call type FNTYPE declared with attribute nonnull,
14040 or null if none of the function's argument are nonnull. The caller
14041 must free the bitmap. */
14044 get_nonnull_args (const_tree fntype
)
14046 if (fntype
== NULL_TREE
)
14049 bitmap argmap
= NULL
;
14050 if (TREE_CODE (fntype
) == METHOD_TYPE
)
14052 /* The this pointer in C++ non-static member functions is
14053 implicitly nonnull whether or not it's declared as such. */
14054 argmap
= BITMAP_ALLOC (NULL
);
14055 bitmap_set_bit (argmap
, 0);
14058 tree attrs
= TYPE_ATTRIBUTES (fntype
);
14062 /* A function declaration can specify multiple attribute nonnull,
14063 each with zero or more arguments. The loop below creates a bitmap
14064 representing a union of all the arguments. An empty (but non-null)
14065 bitmap means that all arguments have been declaraed nonnull. */
14066 for ( ; attrs
; attrs
= TREE_CHAIN (attrs
))
14068 attrs
= lookup_attribute ("nonnull", attrs
);
14073 argmap
= BITMAP_ALLOC (NULL
);
14075 if (!TREE_VALUE (attrs
))
14077 /* Clear the bitmap in case a previous attribute nonnull
14078 set it and this one overrides it for all arguments. */
14079 bitmap_clear (argmap
);
14083 /* Iterate over the indices of the format arguments declared nonnull
14084 and set a bit for each. */
14085 for (tree idx
= TREE_VALUE (attrs
); idx
; idx
= TREE_CHAIN (idx
))
14087 unsigned int val
= TREE_INT_CST_LOW (TREE_VALUE (idx
)) - 1;
14088 bitmap_set_bit (argmap
, val
);
14095 /* Returns true if TYPE is a type where it and all of its subobjects
14096 (recursively) are of structure, union, or array type. */
14099 is_empty_type (const_tree type
)
14101 if (RECORD_OR_UNION_TYPE_P (type
))
14103 for (tree field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
14104 if (TREE_CODE (field
) == FIELD_DECL
14105 && !DECL_PADDING_P (field
)
14106 && !is_empty_type (TREE_TYPE (field
)))
14110 else if (TREE_CODE (type
) == ARRAY_TYPE
)
14111 return (integer_minus_onep (array_type_nelts (type
))
14112 || TYPE_DOMAIN (type
) == NULL_TREE
14113 || is_empty_type (TREE_TYPE (type
)));
14117 /* Implement TARGET_EMPTY_RECORD_P. Return true if TYPE is an empty type
14118 that shouldn't be passed via stack. */
14121 default_is_empty_record (const_tree type
)
14123 if (!abi_version_at_least (12))
14126 if (type
== error_mark_node
)
14129 if (TREE_ADDRESSABLE (type
))
14132 return is_empty_type (TYPE_MAIN_VARIANT (type
));
14135 /* Determine whether TYPE is a structure with a flexible array member,
14136 or a union containing such a structure (possibly recursively). */
14139 flexible_array_type_p (const_tree type
)
14142 switch (TREE_CODE (type
))
14146 for (x
= TYPE_FIELDS (type
); x
!= NULL_TREE
; x
= DECL_CHAIN (x
))
14147 if (TREE_CODE (x
) == FIELD_DECL
)
14149 if (last
== NULL_TREE
)
14151 if (TREE_CODE (TREE_TYPE (last
)) == ARRAY_TYPE
14152 && TYPE_SIZE (TREE_TYPE (last
)) == NULL_TREE
14153 && TYPE_DOMAIN (TREE_TYPE (last
)) != NULL_TREE
14154 && TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (last
))) == NULL_TREE
)
14158 for (x
= TYPE_FIELDS (type
); x
!= NULL_TREE
; x
= DECL_CHAIN (x
))
14160 if (TREE_CODE (x
) == FIELD_DECL
14161 && flexible_array_type_p (TREE_TYPE (x
)))
14170 /* Like int_size_in_bytes, but handle empty records specially. */
14173 arg_int_size_in_bytes (const_tree type
)
14175 return TYPE_EMPTY_P (type
) ? 0 : int_size_in_bytes (type
);
14178 /* Like size_in_bytes, but handle empty records specially. */
14181 arg_size_in_bytes (const_tree type
)
14183 return TYPE_EMPTY_P (type
) ? size_zero_node
: size_in_bytes (type
);
14186 /* Return true if an expression with CODE has to have the same result type as
14187 its first operand. */
14190 expr_type_first_operand_type_p (tree_code code
)
14203 case TRUNC_DIV_EXPR
:
14204 case CEIL_DIV_EXPR
:
14205 case FLOOR_DIV_EXPR
:
14206 case ROUND_DIV_EXPR
:
14207 case TRUNC_MOD_EXPR
:
14208 case CEIL_MOD_EXPR
:
14209 case FLOOR_MOD_EXPR
:
14210 case ROUND_MOD_EXPR
:
14212 case EXACT_DIV_EXPR
:
14230 /* Return a typenode for the "standard" C type with a given name. */
14232 get_typenode_from_name (const char *name
)
14234 if (name
== NULL
|| *name
== '\0')
14237 if (strcmp (name
, "char") == 0)
14238 return char_type_node
;
14239 if (strcmp (name
, "unsigned char") == 0)
14240 return unsigned_char_type_node
;
14241 if (strcmp (name
, "signed char") == 0)
14242 return signed_char_type_node
;
14244 if (strcmp (name
, "short int") == 0)
14245 return short_integer_type_node
;
14246 if (strcmp (name
, "short unsigned int") == 0)
14247 return short_unsigned_type_node
;
14249 if (strcmp (name
, "int") == 0)
14250 return integer_type_node
;
14251 if (strcmp (name
, "unsigned int") == 0)
14252 return unsigned_type_node
;
14254 if (strcmp (name
, "long int") == 0)
14255 return long_integer_type_node
;
14256 if (strcmp (name
, "long unsigned int") == 0)
14257 return long_unsigned_type_node
;
14259 if (strcmp (name
, "long long int") == 0)
14260 return long_long_integer_type_node
;
14261 if (strcmp (name
, "long long unsigned int") == 0)
14262 return long_long_unsigned_type_node
;
14264 gcc_unreachable ();
14267 /* List of pointer types used to declare builtins before we have seen their
14270 Keep the size up to date in tree.h ! */
14271 const builtin_structptr_type builtin_structptr_types
[6] =
14273 { fileptr_type_node
, ptr_type_node
, "FILE" },
14274 { const_tm_ptr_type_node
, const_ptr_type_node
, "tm" },
14275 { fenv_t_ptr_type_node
, ptr_type_node
, "fenv_t" },
14276 { const_fenv_t_ptr_type_node
, const_ptr_type_node
, "fenv_t" },
14277 { fexcept_t_ptr_type_node
, ptr_type_node
, "fexcept_t" },
14278 { const_fexcept_t_ptr_type_node
, const_ptr_type_node
, "fexcept_t" }
14281 /* Return the maximum object size. */
14284 max_object_size (void)
14286 /* To do: Make this a configurable parameter. */
14287 return TYPE_MAX_VALUE (ptrdiff_type_node
);
14290 /* A wrapper around TARGET_VERIFY_TYPE_CONTEXT that makes the silent_p
14291 parameter default to false and that weeds out error_mark_node. */
14294 verify_type_context (location_t loc
, type_context_kind context
,
14295 const_tree type
, bool silent_p
)
14297 if (type
== error_mark_node
)
14300 gcc_assert (TYPE_P (type
));
14301 return (!targetm
.verify_type_context
14302 || targetm
.verify_type_context (loc
, context
, type
, silent_p
));
14305 /* Return that NEW_ASM and DELETE_ASM name a valid pair of new and
14306 delete operators. */
14309 valid_new_delete_pair_p (tree new_asm
, tree delete_asm
)
14311 const char *new_name
= IDENTIFIER_POINTER (new_asm
);
14312 const char *delete_name
= IDENTIFIER_POINTER (delete_asm
);
14313 unsigned int new_len
= IDENTIFIER_LENGTH (new_asm
);
14314 unsigned int delete_len
= IDENTIFIER_LENGTH (delete_asm
);
14316 if (new_len
< 5 || delete_len
< 6)
14318 if (new_name
[0] == '_')
14319 ++new_name
, --new_len
;
14320 if (new_name
[0] == '_')
14321 ++new_name
, --new_len
;
14322 if (delete_name
[0] == '_')
14323 ++delete_name
, --delete_len
;
14324 if (delete_name
[0] == '_')
14325 ++delete_name
, --delete_len
;
14326 if (new_len
< 4 || delete_len
< 5)
14328 /* *_len is now just the length after initial underscores. */
14329 if (new_name
[0] != 'Z' || new_name
[1] != 'n')
14331 if (delete_name
[0] != 'Z' || delete_name
[1] != 'd')
14333 /* _Znw must match _Zdl, _Zna must match _Zda. */
14334 if ((new_name
[2] != 'w' || delete_name
[2] != 'l')
14335 && (new_name
[2] != 'a' || delete_name
[2] != 'a'))
14337 /* 'j', 'm' and 'y' correspond to size_t. */
14338 if (new_name
[3] != 'j' && new_name
[3] != 'm' && new_name
[3] != 'y')
14340 if (delete_name
[3] != 'P' || delete_name
[4] != 'v')
14343 || (new_len
== 18 && !memcmp (new_name
+ 4, "RKSt9nothrow_t", 14)))
14345 /* _ZnXY or _ZnXYRKSt9nothrow_t matches
14346 _ZdXPv, _ZdXPvY and _ZdXPvRKSt9nothrow_t. */
14347 if (delete_len
== 5)
14349 if (delete_len
== 6 && delete_name
[5] == new_name
[3])
14351 if (delete_len
== 19 && !memcmp (delete_name
+ 5, "RKSt9nothrow_t", 14))
14354 else if ((new_len
== 19 && !memcmp (new_name
+ 4, "St11align_val_t", 15))
14356 && !memcmp (new_name
+ 4, "St11align_val_tRKSt9nothrow_t", 29)))
14358 /* _ZnXYSt11align_val_t or _ZnXYSt11align_val_tRKSt9nothrow_t matches
14359 _ZdXPvSt11align_val_t or _ZdXPvYSt11align_val_t or or
14360 _ZdXPvSt11align_val_tRKSt9nothrow_t. */
14361 if (delete_len
== 20 && !memcmp (delete_name
+ 5, "St11align_val_t", 15))
14363 if (delete_len
== 21
14364 && delete_name
[5] == new_name
[3]
14365 && !memcmp (delete_name
+ 6, "St11align_val_t", 15))
14367 if (delete_len
== 34
14368 && !memcmp (delete_name
+ 5, "St11align_val_tRKSt9nothrow_t", 29))
14376 namespace selftest
{
14378 /* Selftests for tree. */
14380 /* Verify that integer constants are sane. */
14383 test_integer_constants ()
14385 ASSERT_TRUE (integer_type_node
!= NULL
);
14386 ASSERT_TRUE (build_int_cst (integer_type_node
, 0) != NULL
);
14388 tree type
= integer_type_node
;
14390 tree zero
= build_zero_cst (type
);
14391 ASSERT_EQ (INTEGER_CST
, TREE_CODE (zero
));
14392 ASSERT_EQ (type
, TREE_TYPE (zero
));
14394 tree one
= build_int_cst (type
, 1);
14395 ASSERT_EQ (INTEGER_CST
, TREE_CODE (one
));
14396 ASSERT_EQ (type
, TREE_TYPE (zero
));
14399 /* Verify identifiers. */
14402 test_identifiers ()
14404 tree identifier
= get_identifier ("foo");
14405 ASSERT_EQ (3, IDENTIFIER_LENGTH (identifier
));
14406 ASSERT_STREQ ("foo", IDENTIFIER_POINTER (identifier
));
14409 /* Verify LABEL_DECL. */
14414 tree identifier
= get_identifier ("err");
14415 tree label_decl
= build_decl (UNKNOWN_LOCATION
, LABEL_DECL
,
14416 identifier
, void_type_node
);
14417 ASSERT_EQ (-1, LABEL_DECL_UID (label_decl
));
14418 ASSERT_FALSE (FORCED_LABEL (label_decl
));
14421 /* Return a new VECTOR_CST node whose type is TYPE and whose values
14422 are given by VALS. */
14425 build_vector (tree type
, vec
<tree
> vals MEM_STAT_DECL
)
14427 gcc_assert (known_eq (vals
.length (), TYPE_VECTOR_SUBPARTS (type
)));
14428 tree_vector_builder
builder (type
, vals
.length (), 1);
14429 builder
.splice (vals
);
14430 return builder
.build ();
14433 /* Check that VECTOR_CST ACTUAL contains the elements in EXPECTED. */
14436 check_vector_cst (vec
<tree
> expected
, tree actual
)
14438 ASSERT_KNOWN_EQ (expected
.length (),
14439 TYPE_VECTOR_SUBPARTS (TREE_TYPE (actual
)));
14440 for (unsigned int i
= 0; i
< expected
.length (); ++i
)
14441 ASSERT_EQ (wi::to_wide (expected
[i
]),
14442 wi::to_wide (vector_cst_elt (actual
, i
)));
14445 /* Check that VECTOR_CST ACTUAL contains NPATTERNS duplicated elements,
14446 and that its elements match EXPECTED. */
14449 check_vector_cst_duplicate (vec
<tree
> expected
, tree actual
,
14450 unsigned int npatterns
)
14452 ASSERT_EQ (npatterns
, VECTOR_CST_NPATTERNS (actual
));
14453 ASSERT_EQ (1, VECTOR_CST_NELTS_PER_PATTERN (actual
));
14454 ASSERT_EQ (npatterns
, vector_cst_encoded_nelts (actual
));
14455 ASSERT_TRUE (VECTOR_CST_DUPLICATE_P (actual
));
14456 ASSERT_FALSE (VECTOR_CST_STEPPED_P (actual
));
14457 check_vector_cst (expected
, actual
);
14460 /* Check that VECTOR_CST ACTUAL contains NPATTERNS foreground elements
14461 and NPATTERNS background elements, and that its elements match
14465 check_vector_cst_fill (vec
<tree
> expected
, tree actual
,
14466 unsigned int npatterns
)
14468 ASSERT_EQ (npatterns
, VECTOR_CST_NPATTERNS (actual
));
14469 ASSERT_EQ (2, VECTOR_CST_NELTS_PER_PATTERN (actual
));
14470 ASSERT_EQ (2 * npatterns
, vector_cst_encoded_nelts (actual
));
14471 ASSERT_FALSE (VECTOR_CST_DUPLICATE_P (actual
));
14472 ASSERT_FALSE (VECTOR_CST_STEPPED_P (actual
));
14473 check_vector_cst (expected
, actual
);
14476 /* Check that VECTOR_CST ACTUAL contains NPATTERNS stepped patterns,
14477 and that its elements match EXPECTED. */
14480 check_vector_cst_stepped (vec
<tree
> expected
, tree actual
,
14481 unsigned int npatterns
)
14483 ASSERT_EQ (npatterns
, VECTOR_CST_NPATTERNS (actual
));
14484 ASSERT_EQ (3, VECTOR_CST_NELTS_PER_PATTERN (actual
));
14485 ASSERT_EQ (3 * npatterns
, vector_cst_encoded_nelts (actual
));
14486 ASSERT_FALSE (VECTOR_CST_DUPLICATE_P (actual
));
14487 ASSERT_TRUE (VECTOR_CST_STEPPED_P (actual
));
14488 check_vector_cst (expected
, actual
);
14491 /* Test the creation of VECTOR_CSTs. */
14494 test_vector_cst_patterns (ALONE_CXX_MEM_STAT_INFO
)
14496 auto_vec
<tree
, 8> elements (8);
14497 elements
.quick_grow (8);
14498 tree element_type
= build_nonstandard_integer_type (16, true);
14499 tree vector_type
= build_vector_type (element_type
, 8);
14501 /* Test a simple linear series with a base of 0 and a step of 1:
14502 { 0, 1, 2, 3, 4, 5, 6, 7 }. */
14503 for (unsigned int i
= 0; i
< 8; ++i
)
14504 elements
[i
] = build_int_cst (element_type
, i
);
14505 tree vector
= build_vector (vector_type
, elements PASS_MEM_STAT
);
14506 check_vector_cst_stepped (elements
, vector
, 1);
14508 /* Try the same with the first element replaced by 100:
14509 { 100, 1, 2, 3, 4, 5, 6, 7 }. */
14510 elements
[0] = build_int_cst (element_type
, 100);
14511 vector
= build_vector (vector_type
, elements PASS_MEM_STAT
);
14512 check_vector_cst_stepped (elements
, vector
, 1);
14514 /* Try a series that wraps around.
14515 { 100, 65531, 65532, 65533, 65534, 65535, 0, 1 }. */
14516 for (unsigned int i
= 1; i
< 8; ++i
)
14517 elements
[i
] = build_int_cst (element_type
, (65530 + i
) & 0xffff);
14518 vector
= build_vector (vector_type
, elements PASS_MEM_STAT
);
14519 check_vector_cst_stepped (elements
, vector
, 1);
14521 /* Try a downward series:
14522 { 100, 79, 78, 77, 76, 75, 75, 73 }. */
14523 for (unsigned int i
= 1; i
< 8; ++i
)
14524 elements
[i
] = build_int_cst (element_type
, 80 - i
);
14525 vector
= build_vector (vector_type
, elements PASS_MEM_STAT
);
14526 check_vector_cst_stepped (elements
, vector
, 1);
14528 /* Try two interleaved series with different bases and steps:
14529 { 100, 53, 66, 206, 62, 212, 58, 218 }. */
14530 elements
[1] = build_int_cst (element_type
, 53);
14531 for (unsigned int i
= 2; i
< 8; i
+= 2)
14533 elements
[i
] = build_int_cst (element_type
, 70 - i
* 2);
14534 elements
[i
+ 1] = build_int_cst (element_type
, 200 + i
* 3);
14536 vector
= build_vector (vector_type
, elements PASS_MEM_STAT
);
14537 check_vector_cst_stepped (elements
, vector
, 2);
14539 /* Try a duplicated value:
14540 { 100, 100, 100, 100, 100, 100, 100, 100 }. */
14541 for (unsigned int i
= 1; i
< 8; ++i
)
14542 elements
[i
] = elements
[0];
14543 vector
= build_vector (vector_type
, elements PASS_MEM_STAT
);
14544 check_vector_cst_duplicate (elements
, vector
, 1);
14546 /* Try an interleaved duplicated value:
14547 { 100, 55, 100, 55, 100, 55, 100, 55 }. */
14548 elements
[1] = build_int_cst (element_type
, 55);
14549 for (unsigned int i
= 2; i
< 8; ++i
)
14550 elements
[i
] = elements
[i
- 2];
14551 vector
= build_vector (vector_type
, elements PASS_MEM_STAT
);
14552 check_vector_cst_duplicate (elements
, vector
, 2);
14554 /* Try a duplicated value with 2 exceptions
14555 { 41, 97, 100, 55, 100, 55, 100, 55 }. */
14556 elements
[0] = build_int_cst (element_type
, 41);
14557 elements
[1] = build_int_cst (element_type
, 97);
14558 vector
= build_vector (vector_type
, elements PASS_MEM_STAT
);
14559 check_vector_cst_fill (elements
, vector
, 2);
14561 /* Try with and without a step
14562 { 41, 97, 100, 21, 100, 35, 100, 49 }. */
14563 for (unsigned int i
= 3; i
< 8; i
+= 2)
14564 elements
[i
] = build_int_cst (element_type
, i
* 7);
14565 vector
= build_vector (vector_type
, elements PASS_MEM_STAT
);
14566 check_vector_cst_stepped (elements
, vector
, 2);
14568 /* Try a fully-general constant:
14569 { 41, 97, 100, 21, 100, 9990, 100, 49 }. */
14570 elements
[5] = build_int_cst (element_type
, 9990);
14571 vector
= build_vector (vector_type
, elements PASS_MEM_STAT
);
14572 check_vector_cst_fill (elements
, vector
, 4);
14575 /* Verify that STRIP_NOPS (NODE) is EXPECTED.
14576 Helper function for test_location_wrappers, to deal with STRIP_NOPS
14577 modifying its argument in-place. */
14580 check_strip_nops (tree node
, tree expected
)
14583 ASSERT_EQ (expected
, node
);
14586 /* Verify location wrappers. */
14589 test_location_wrappers ()
14591 location_t loc
= BUILTINS_LOCATION
;
14593 ASSERT_EQ (NULL_TREE
, maybe_wrap_with_location (NULL_TREE
, loc
));
14595 /* Wrapping a constant. */
14596 tree int_cst
= build_int_cst (integer_type_node
, 42);
14597 ASSERT_FALSE (CAN_HAVE_LOCATION_P (int_cst
));
14598 ASSERT_FALSE (location_wrapper_p (int_cst
));
14600 tree wrapped_int_cst
= maybe_wrap_with_location (int_cst
, loc
);
14601 ASSERT_TRUE (location_wrapper_p (wrapped_int_cst
));
14602 ASSERT_EQ (loc
, EXPR_LOCATION (wrapped_int_cst
));
14603 ASSERT_EQ (int_cst
, tree_strip_any_location_wrapper (wrapped_int_cst
));
14605 /* We shouldn't add wrapper nodes for UNKNOWN_LOCATION. */
14606 ASSERT_EQ (int_cst
, maybe_wrap_with_location (int_cst
, UNKNOWN_LOCATION
));
14608 /* We shouldn't add wrapper nodes for nodes that CAN_HAVE_LOCATION_P. */
14609 tree cast
= build1 (NOP_EXPR
, char_type_node
, int_cst
);
14610 ASSERT_TRUE (CAN_HAVE_LOCATION_P (cast
));
14611 ASSERT_EQ (cast
, maybe_wrap_with_location (cast
, loc
));
14613 /* Wrapping a STRING_CST. */
14614 tree string_cst
= build_string (4, "foo");
14615 ASSERT_FALSE (CAN_HAVE_LOCATION_P (string_cst
));
14616 ASSERT_FALSE (location_wrapper_p (string_cst
));
14618 tree wrapped_string_cst
= maybe_wrap_with_location (string_cst
, loc
);
14619 ASSERT_TRUE (location_wrapper_p (wrapped_string_cst
));
14620 ASSERT_EQ (VIEW_CONVERT_EXPR
, TREE_CODE (wrapped_string_cst
));
14621 ASSERT_EQ (loc
, EXPR_LOCATION (wrapped_string_cst
));
14622 ASSERT_EQ (string_cst
, tree_strip_any_location_wrapper (wrapped_string_cst
));
14625 /* Wrapping a variable. */
14626 tree int_var
= build_decl (UNKNOWN_LOCATION
, VAR_DECL
,
14627 get_identifier ("some_int_var"),
14628 integer_type_node
);
14629 ASSERT_FALSE (CAN_HAVE_LOCATION_P (int_var
));
14630 ASSERT_FALSE (location_wrapper_p (int_var
));
14632 tree wrapped_int_var
= maybe_wrap_with_location (int_var
, loc
);
14633 ASSERT_TRUE (location_wrapper_p (wrapped_int_var
));
14634 ASSERT_EQ (loc
, EXPR_LOCATION (wrapped_int_var
));
14635 ASSERT_EQ (int_var
, tree_strip_any_location_wrapper (wrapped_int_var
));
14637 /* Verify that "reinterpret_cast<int>(some_int_var)" is not a location
14639 tree r_cast
= build1 (NON_LVALUE_EXPR
, integer_type_node
, int_var
);
14640 ASSERT_FALSE (location_wrapper_p (r_cast
));
14641 ASSERT_EQ (r_cast
, tree_strip_any_location_wrapper (r_cast
));
14643 /* Verify that STRIP_NOPS removes wrappers. */
14644 check_strip_nops (wrapped_int_cst
, int_cst
);
14645 check_strip_nops (wrapped_string_cst
, string_cst
);
14646 check_strip_nops (wrapped_int_var
, int_var
);
14649 /* Test various tree predicates. Verify that location wrappers don't
14650 affect the results. */
14655 /* Build various constants and wrappers around them. */
14657 location_t loc
= BUILTINS_LOCATION
;
14659 tree i_0
= build_int_cst (integer_type_node
, 0);
14660 tree wr_i_0
= maybe_wrap_with_location (i_0
, loc
);
14662 tree i_1
= build_int_cst (integer_type_node
, 1);
14663 tree wr_i_1
= maybe_wrap_with_location (i_1
, loc
);
14665 tree i_m1
= build_int_cst (integer_type_node
, -1);
14666 tree wr_i_m1
= maybe_wrap_with_location (i_m1
, loc
);
14668 tree f_0
= build_real_from_int_cst (float_type_node
, i_0
);
14669 tree wr_f_0
= maybe_wrap_with_location (f_0
, loc
);
14670 tree f_1
= build_real_from_int_cst (float_type_node
, i_1
);
14671 tree wr_f_1
= maybe_wrap_with_location (f_1
, loc
);
14672 tree f_m1
= build_real_from_int_cst (float_type_node
, i_m1
);
14673 tree wr_f_m1
= maybe_wrap_with_location (f_m1
, loc
);
14675 tree c_i_0
= build_complex (NULL_TREE
, i_0
, i_0
);
14676 tree c_i_1
= build_complex (NULL_TREE
, i_1
, i_0
);
14677 tree c_i_m1
= build_complex (NULL_TREE
, i_m1
, i_0
);
14679 tree c_f_0
= build_complex (NULL_TREE
, f_0
, f_0
);
14680 tree c_f_1
= build_complex (NULL_TREE
, f_1
, f_0
);
14681 tree c_f_m1
= build_complex (NULL_TREE
, f_m1
, f_0
);
14683 /* TODO: vector constants. */
14685 /* Test integer_onep. */
14686 ASSERT_FALSE (integer_onep (i_0
));
14687 ASSERT_FALSE (integer_onep (wr_i_0
));
14688 ASSERT_TRUE (integer_onep (i_1
));
14689 ASSERT_TRUE (integer_onep (wr_i_1
));
14690 ASSERT_FALSE (integer_onep (i_m1
));
14691 ASSERT_FALSE (integer_onep (wr_i_m1
));
14692 ASSERT_FALSE (integer_onep (f_0
));
14693 ASSERT_FALSE (integer_onep (wr_f_0
));
14694 ASSERT_FALSE (integer_onep (f_1
));
14695 ASSERT_FALSE (integer_onep (wr_f_1
));
14696 ASSERT_FALSE (integer_onep (f_m1
));
14697 ASSERT_FALSE (integer_onep (wr_f_m1
));
14698 ASSERT_FALSE (integer_onep (c_i_0
));
14699 ASSERT_TRUE (integer_onep (c_i_1
));
14700 ASSERT_FALSE (integer_onep (c_i_m1
));
14701 ASSERT_FALSE (integer_onep (c_f_0
));
14702 ASSERT_FALSE (integer_onep (c_f_1
));
14703 ASSERT_FALSE (integer_onep (c_f_m1
));
14705 /* Test integer_zerop. */
14706 ASSERT_TRUE (integer_zerop (i_0
));
14707 ASSERT_TRUE (integer_zerop (wr_i_0
));
14708 ASSERT_FALSE (integer_zerop (i_1
));
14709 ASSERT_FALSE (integer_zerop (wr_i_1
));
14710 ASSERT_FALSE (integer_zerop (i_m1
));
14711 ASSERT_FALSE (integer_zerop (wr_i_m1
));
14712 ASSERT_FALSE (integer_zerop (f_0
));
14713 ASSERT_FALSE (integer_zerop (wr_f_0
));
14714 ASSERT_FALSE (integer_zerop (f_1
));
14715 ASSERT_FALSE (integer_zerop (wr_f_1
));
14716 ASSERT_FALSE (integer_zerop (f_m1
));
14717 ASSERT_FALSE (integer_zerop (wr_f_m1
));
14718 ASSERT_TRUE (integer_zerop (c_i_0
));
14719 ASSERT_FALSE (integer_zerop (c_i_1
));
14720 ASSERT_FALSE (integer_zerop (c_i_m1
));
14721 ASSERT_FALSE (integer_zerop (c_f_0
));
14722 ASSERT_FALSE (integer_zerop (c_f_1
));
14723 ASSERT_FALSE (integer_zerop (c_f_m1
));
14725 /* Test integer_all_onesp. */
14726 ASSERT_FALSE (integer_all_onesp (i_0
));
14727 ASSERT_FALSE (integer_all_onesp (wr_i_0
));
14728 ASSERT_FALSE (integer_all_onesp (i_1
));
14729 ASSERT_FALSE (integer_all_onesp (wr_i_1
));
14730 ASSERT_TRUE (integer_all_onesp (i_m1
));
14731 ASSERT_TRUE (integer_all_onesp (wr_i_m1
));
14732 ASSERT_FALSE (integer_all_onesp (f_0
));
14733 ASSERT_FALSE (integer_all_onesp (wr_f_0
));
14734 ASSERT_FALSE (integer_all_onesp (f_1
));
14735 ASSERT_FALSE (integer_all_onesp (wr_f_1
));
14736 ASSERT_FALSE (integer_all_onesp (f_m1
));
14737 ASSERT_FALSE (integer_all_onesp (wr_f_m1
));
14738 ASSERT_FALSE (integer_all_onesp (c_i_0
));
14739 ASSERT_FALSE (integer_all_onesp (c_i_1
));
14740 ASSERT_FALSE (integer_all_onesp (c_i_m1
));
14741 ASSERT_FALSE (integer_all_onesp (c_f_0
));
14742 ASSERT_FALSE (integer_all_onesp (c_f_1
));
14743 ASSERT_FALSE (integer_all_onesp (c_f_m1
));
14745 /* Test integer_minus_onep. */
14746 ASSERT_FALSE (integer_minus_onep (i_0
));
14747 ASSERT_FALSE (integer_minus_onep (wr_i_0
));
14748 ASSERT_FALSE (integer_minus_onep (i_1
));
14749 ASSERT_FALSE (integer_minus_onep (wr_i_1
));
14750 ASSERT_TRUE (integer_minus_onep (i_m1
));
14751 ASSERT_TRUE (integer_minus_onep (wr_i_m1
));
14752 ASSERT_FALSE (integer_minus_onep (f_0
));
14753 ASSERT_FALSE (integer_minus_onep (wr_f_0
));
14754 ASSERT_FALSE (integer_minus_onep (f_1
));
14755 ASSERT_FALSE (integer_minus_onep (wr_f_1
));
14756 ASSERT_FALSE (integer_minus_onep (f_m1
));
14757 ASSERT_FALSE (integer_minus_onep (wr_f_m1
));
14758 ASSERT_FALSE (integer_minus_onep (c_i_0
));
14759 ASSERT_FALSE (integer_minus_onep (c_i_1
));
14760 ASSERT_TRUE (integer_minus_onep (c_i_m1
));
14761 ASSERT_FALSE (integer_minus_onep (c_f_0
));
14762 ASSERT_FALSE (integer_minus_onep (c_f_1
));
14763 ASSERT_FALSE (integer_minus_onep (c_f_m1
));
14765 /* Test integer_each_onep. */
14766 ASSERT_FALSE (integer_each_onep (i_0
));
14767 ASSERT_FALSE (integer_each_onep (wr_i_0
));
14768 ASSERT_TRUE (integer_each_onep (i_1
));
14769 ASSERT_TRUE (integer_each_onep (wr_i_1
));
14770 ASSERT_FALSE (integer_each_onep (i_m1
));
14771 ASSERT_FALSE (integer_each_onep (wr_i_m1
));
14772 ASSERT_FALSE (integer_each_onep (f_0
));
14773 ASSERT_FALSE (integer_each_onep (wr_f_0
));
14774 ASSERT_FALSE (integer_each_onep (f_1
));
14775 ASSERT_FALSE (integer_each_onep (wr_f_1
));
14776 ASSERT_FALSE (integer_each_onep (f_m1
));
14777 ASSERT_FALSE (integer_each_onep (wr_f_m1
));
14778 ASSERT_FALSE (integer_each_onep (c_i_0
));
14779 ASSERT_FALSE (integer_each_onep (c_i_1
));
14780 ASSERT_FALSE (integer_each_onep (c_i_m1
));
14781 ASSERT_FALSE (integer_each_onep (c_f_0
));
14782 ASSERT_FALSE (integer_each_onep (c_f_1
));
14783 ASSERT_FALSE (integer_each_onep (c_f_m1
));
14785 /* Test integer_truep. */
14786 ASSERT_FALSE (integer_truep (i_0
));
14787 ASSERT_FALSE (integer_truep (wr_i_0
));
14788 ASSERT_TRUE (integer_truep (i_1
));
14789 ASSERT_TRUE (integer_truep (wr_i_1
));
14790 ASSERT_FALSE (integer_truep (i_m1
));
14791 ASSERT_FALSE (integer_truep (wr_i_m1
));
14792 ASSERT_FALSE (integer_truep (f_0
));
14793 ASSERT_FALSE (integer_truep (wr_f_0
));
14794 ASSERT_FALSE (integer_truep (f_1
));
14795 ASSERT_FALSE (integer_truep (wr_f_1
));
14796 ASSERT_FALSE (integer_truep (f_m1
));
14797 ASSERT_FALSE (integer_truep (wr_f_m1
));
14798 ASSERT_FALSE (integer_truep (c_i_0
));
14799 ASSERT_TRUE (integer_truep (c_i_1
));
14800 ASSERT_FALSE (integer_truep (c_i_m1
));
14801 ASSERT_FALSE (integer_truep (c_f_0
));
14802 ASSERT_FALSE (integer_truep (c_f_1
));
14803 ASSERT_FALSE (integer_truep (c_f_m1
));
14805 /* Test integer_nonzerop. */
14806 ASSERT_FALSE (integer_nonzerop (i_0
));
14807 ASSERT_FALSE (integer_nonzerop (wr_i_0
));
14808 ASSERT_TRUE (integer_nonzerop (i_1
));
14809 ASSERT_TRUE (integer_nonzerop (wr_i_1
));
14810 ASSERT_TRUE (integer_nonzerop (i_m1
));
14811 ASSERT_TRUE (integer_nonzerop (wr_i_m1
));
14812 ASSERT_FALSE (integer_nonzerop (f_0
));
14813 ASSERT_FALSE (integer_nonzerop (wr_f_0
));
14814 ASSERT_FALSE (integer_nonzerop (f_1
));
14815 ASSERT_FALSE (integer_nonzerop (wr_f_1
));
14816 ASSERT_FALSE (integer_nonzerop (f_m1
));
14817 ASSERT_FALSE (integer_nonzerop (wr_f_m1
));
14818 ASSERT_FALSE (integer_nonzerop (c_i_0
));
14819 ASSERT_TRUE (integer_nonzerop (c_i_1
));
14820 ASSERT_TRUE (integer_nonzerop (c_i_m1
));
14821 ASSERT_FALSE (integer_nonzerop (c_f_0
));
14822 ASSERT_FALSE (integer_nonzerop (c_f_1
));
14823 ASSERT_FALSE (integer_nonzerop (c_f_m1
));
14825 /* Test real_zerop. */
14826 ASSERT_FALSE (real_zerop (i_0
));
14827 ASSERT_FALSE (real_zerop (wr_i_0
));
14828 ASSERT_FALSE (real_zerop (i_1
));
14829 ASSERT_FALSE (real_zerop (wr_i_1
));
14830 ASSERT_FALSE (real_zerop (i_m1
));
14831 ASSERT_FALSE (real_zerop (wr_i_m1
));
14832 ASSERT_TRUE (real_zerop (f_0
));
14833 ASSERT_TRUE (real_zerop (wr_f_0
));
14834 ASSERT_FALSE (real_zerop (f_1
));
14835 ASSERT_FALSE (real_zerop (wr_f_1
));
14836 ASSERT_FALSE (real_zerop (f_m1
));
14837 ASSERT_FALSE (real_zerop (wr_f_m1
));
14838 ASSERT_FALSE (real_zerop (c_i_0
));
14839 ASSERT_FALSE (real_zerop (c_i_1
));
14840 ASSERT_FALSE (real_zerop (c_i_m1
));
14841 ASSERT_TRUE (real_zerop (c_f_0
));
14842 ASSERT_FALSE (real_zerop (c_f_1
));
14843 ASSERT_FALSE (real_zerop (c_f_m1
));
14845 /* Test real_onep. */
14846 ASSERT_FALSE (real_onep (i_0
));
14847 ASSERT_FALSE (real_onep (wr_i_0
));
14848 ASSERT_FALSE (real_onep (i_1
));
14849 ASSERT_FALSE (real_onep (wr_i_1
));
14850 ASSERT_FALSE (real_onep (i_m1
));
14851 ASSERT_FALSE (real_onep (wr_i_m1
));
14852 ASSERT_FALSE (real_onep (f_0
));
14853 ASSERT_FALSE (real_onep (wr_f_0
));
14854 ASSERT_TRUE (real_onep (f_1
));
14855 ASSERT_TRUE (real_onep (wr_f_1
));
14856 ASSERT_FALSE (real_onep (f_m1
));
14857 ASSERT_FALSE (real_onep (wr_f_m1
));
14858 ASSERT_FALSE (real_onep (c_i_0
));
14859 ASSERT_FALSE (real_onep (c_i_1
));
14860 ASSERT_FALSE (real_onep (c_i_m1
));
14861 ASSERT_FALSE (real_onep (c_f_0
));
14862 ASSERT_TRUE (real_onep (c_f_1
));
14863 ASSERT_FALSE (real_onep (c_f_m1
));
14865 /* Test real_minus_onep. */
14866 ASSERT_FALSE (real_minus_onep (i_0
));
14867 ASSERT_FALSE (real_minus_onep (wr_i_0
));
14868 ASSERT_FALSE (real_minus_onep (i_1
));
14869 ASSERT_FALSE (real_minus_onep (wr_i_1
));
14870 ASSERT_FALSE (real_minus_onep (i_m1
));
14871 ASSERT_FALSE (real_minus_onep (wr_i_m1
));
14872 ASSERT_FALSE (real_minus_onep (f_0
));
14873 ASSERT_FALSE (real_minus_onep (wr_f_0
));
14874 ASSERT_FALSE (real_minus_onep (f_1
));
14875 ASSERT_FALSE (real_minus_onep (wr_f_1
));
14876 ASSERT_TRUE (real_minus_onep (f_m1
));
14877 ASSERT_TRUE (real_minus_onep (wr_f_m1
));
14878 ASSERT_FALSE (real_minus_onep (c_i_0
));
14879 ASSERT_FALSE (real_minus_onep (c_i_1
));
14880 ASSERT_FALSE (real_minus_onep (c_i_m1
));
14881 ASSERT_FALSE (real_minus_onep (c_f_0
));
14882 ASSERT_FALSE (real_minus_onep (c_f_1
));
14883 ASSERT_TRUE (real_minus_onep (c_f_m1
));
14886 ASSERT_TRUE (zerop (i_0
));
14887 ASSERT_TRUE (zerop (wr_i_0
));
14888 ASSERT_FALSE (zerop (i_1
));
14889 ASSERT_FALSE (zerop (wr_i_1
));
14890 ASSERT_FALSE (zerop (i_m1
));
14891 ASSERT_FALSE (zerop (wr_i_m1
));
14892 ASSERT_TRUE (zerop (f_0
));
14893 ASSERT_TRUE (zerop (wr_f_0
));
14894 ASSERT_FALSE (zerop (f_1
));
14895 ASSERT_FALSE (zerop (wr_f_1
));
14896 ASSERT_FALSE (zerop (f_m1
));
14897 ASSERT_FALSE (zerop (wr_f_m1
));
14898 ASSERT_TRUE (zerop (c_i_0
));
14899 ASSERT_FALSE (zerop (c_i_1
));
14900 ASSERT_FALSE (zerop (c_i_m1
));
14901 ASSERT_TRUE (zerop (c_f_0
));
14902 ASSERT_FALSE (zerop (c_f_1
));
14903 ASSERT_FALSE (zerop (c_f_m1
));
14905 /* Test tree_expr_nonnegative_p. */
14906 ASSERT_TRUE (tree_expr_nonnegative_p (i_0
));
14907 ASSERT_TRUE (tree_expr_nonnegative_p (wr_i_0
));
14908 ASSERT_TRUE (tree_expr_nonnegative_p (i_1
));
14909 ASSERT_TRUE (tree_expr_nonnegative_p (wr_i_1
));
14910 ASSERT_FALSE (tree_expr_nonnegative_p (i_m1
));
14911 ASSERT_FALSE (tree_expr_nonnegative_p (wr_i_m1
));
14912 ASSERT_TRUE (tree_expr_nonnegative_p (f_0
));
14913 ASSERT_TRUE (tree_expr_nonnegative_p (wr_f_0
));
14914 ASSERT_TRUE (tree_expr_nonnegative_p (f_1
));
14915 ASSERT_TRUE (tree_expr_nonnegative_p (wr_f_1
));
14916 ASSERT_FALSE (tree_expr_nonnegative_p (f_m1
));
14917 ASSERT_FALSE (tree_expr_nonnegative_p (wr_f_m1
));
14918 ASSERT_FALSE (tree_expr_nonnegative_p (c_i_0
));
14919 ASSERT_FALSE (tree_expr_nonnegative_p (c_i_1
));
14920 ASSERT_FALSE (tree_expr_nonnegative_p (c_i_m1
));
14921 ASSERT_FALSE (tree_expr_nonnegative_p (c_f_0
));
14922 ASSERT_FALSE (tree_expr_nonnegative_p (c_f_1
));
14923 ASSERT_FALSE (tree_expr_nonnegative_p (c_f_m1
));
14925 /* Test tree_expr_nonzero_p. */
14926 ASSERT_FALSE (tree_expr_nonzero_p (i_0
));
14927 ASSERT_FALSE (tree_expr_nonzero_p (wr_i_0
));
14928 ASSERT_TRUE (tree_expr_nonzero_p (i_1
));
14929 ASSERT_TRUE (tree_expr_nonzero_p (wr_i_1
));
14930 ASSERT_TRUE (tree_expr_nonzero_p (i_m1
));
14931 ASSERT_TRUE (tree_expr_nonzero_p (wr_i_m1
));
14933 /* Test integer_valued_real_p. */
14934 ASSERT_FALSE (integer_valued_real_p (i_0
));
14935 ASSERT_TRUE (integer_valued_real_p (f_0
));
14936 ASSERT_TRUE (integer_valued_real_p (wr_f_0
));
14937 ASSERT_TRUE (integer_valued_real_p (f_1
));
14938 ASSERT_TRUE (integer_valued_real_p (wr_f_1
));
14940 /* Test integer_pow2p. */
14941 ASSERT_FALSE (integer_pow2p (i_0
));
14942 ASSERT_TRUE (integer_pow2p (i_1
));
14943 ASSERT_TRUE (integer_pow2p (wr_i_1
));
14945 /* Test uniform_integer_cst_p. */
14946 ASSERT_TRUE (uniform_integer_cst_p (i_0
));
14947 ASSERT_TRUE (uniform_integer_cst_p (wr_i_0
));
14948 ASSERT_TRUE (uniform_integer_cst_p (i_1
));
14949 ASSERT_TRUE (uniform_integer_cst_p (wr_i_1
));
14950 ASSERT_TRUE (uniform_integer_cst_p (i_m1
));
14951 ASSERT_TRUE (uniform_integer_cst_p (wr_i_m1
));
14952 ASSERT_FALSE (uniform_integer_cst_p (f_0
));
14953 ASSERT_FALSE (uniform_integer_cst_p (wr_f_0
));
14954 ASSERT_FALSE (uniform_integer_cst_p (f_1
));
14955 ASSERT_FALSE (uniform_integer_cst_p (wr_f_1
));
14956 ASSERT_FALSE (uniform_integer_cst_p (f_m1
));
14957 ASSERT_FALSE (uniform_integer_cst_p (wr_f_m1
));
14958 ASSERT_FALSE (uniform_integer_cst_p (c_i_0
));
14959 ASSERT_FALSE (uniform_integer_cst_p (c_i_1
));
14960 ASSERT_FALSE (uniform_integer_cst_p (c_i_m1
));
14961 ASSERT_FALSE (uniform_integer_cst_p (c_f_0
));
14962 ASSERT_FALSE (uniform_integer_cst_p (c_f_1
));
14963 ASSERT_FALSE (uniform_integer_cst_p (c_f_m1
));
14966 /* Check that string escaping works correctly. */
14969 test_escaped_strings (void)
14972 escaped_string msg
;
14975 /* ASSERT_STREQ does not accept NULL as a valid test
14976 result, so we have to use ASSERT_EQ instead. */
14977 ASSERT_EQ (NULL
, (const char *) msg
);
14980 ASSERT_STREQ ("", (const char *) msg
);
14982 msg
.escape ("foobar");
14983 ASSERT_STREQ ("foobar", (const char *) msg
);
14985 /* Ensure that we have -fmessage-length set to 0. */
14986 saved_cutoff
= pp_line_cutoff (global_dc
->printer
);
14987 pp_line_cutoff (global_dc
->printer
) = 0;
14989 msg
.escape ("foo\nbar");
14990 ASSERT_STREQ ("foo\\nbar", (const char *) msg
);
14992 msg
.escape ("\a\b\f\n\r\t\v");
14993 ASSERT_STREQ ("\\a\\b\\f\\n\\r\\t\\v", (const char *) msg
);
14995 /* Now repeat the tests with -fmessage-length set to 5. */
14996 pp_line_cutoff (global_dc
->printer
) = 5;
14998 /* Note that the newline is not translated into an escape. */
14999 msg
.escape ("foo\nbar");
15000 ASSERT_STREQ ("foo\nbar", (const char *) msg
);
15002 msg
.escape ("\a\b\f\n\r\t\v");
15003 ASSERT_STREQ ("\\a\\b\\f\n\\r\\t\\v", (const char *) msg
);
15005 /* Restore the original message length setting. */
15006 pp_line_cutoff (global_dc
->printer
) = saved_cutoff
;
15009 /* Run all of the selftests within this file. */
15014 test_integer_constants ();
15015 test_identifiers ();
15017 test_vector_cst_patterns ();
15018 test_location_wrappers ();
15019 test_predicates ();
15020 test_escaped_strings ();
15023 } // namespace selftest
15025 #endif /* CHECKING_P */
15027 #include "gt-tree.h"