1 /* Language-independent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987-2023 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This file contains the low level primitives for operating on tree nodes,
21 including allocation, list operations, interning of identifiers,
22 construction of data type nodes and statement nodes,
23 and construction of type conversion nodes. It also contains
24 tables index by tree code that describe how to take apart
27 It is intended to be language-independent but can occasionally
28 calls language-dependent routines. */
32 #include "coretypes.h"
37 #include "tree-pass.h"
40 #include "diagnostic.h"
43 #include "fold-const.h"
44 #include "stor-layout.h"
47 #include "toplev.h" /* get_random_seed */
49 #include "common/common-target.h"
50 #include "langhooks.h"
51 #include "tree-inline.h"
52 #include "tree-iterator.h"
53 #include "internal-fn.h"
54 #include "gimple-iterator.h"
57 #include "langhooks-def.h"
58 #include "tree-diagnostic.h"
61 #include "print-tree.h"
62 #include "ipa-utils.h"
64 #include "stringpool.h"
68 #include "tree-vector-builder.h"
69 #include "gimple-fold.h"
70 #include "escaped_string.h"
71 #include "gimple-range.h"
72 #include "gomp-constants.h"
79 /* Names of tree components.
80 Used for printing out the tree and error messages. */
81 #define DEFTREECODE(SYM, NAME, TYPE, LEN) NAME,
82 #define END_OF_BASE_TREE_CODES "@dummy",
84 static const char *const tree_code_name
[] = {
85 #include "all-tree.def"
89 #undef END_OF_BASE_TREE_CODES
91 /* Each tree code class has an associated string representation.
92 These must correspond to the tree_code_class entries. */
94 const char *const tree_code_class_strings
[] =
109 /* obstack.[ch] explicitly declined to prototype this. */
110 extern int _obstack_allocated_p (struct obstack
*h
, void *obj
);
112 /* Statistics-gathering stuff. */
114 static uint64_t tree_code_counts
[MAX_TREE_CODES
];
115 uint64_t tree_node_counts
[(int) all_kinds
];
116 uint64_t tree_node_sizes
[(int) all_kinds
];
118 /* Keep in sync with tree.h:enum tree_node_kind. */
119 static const char * const tree_node_kind_names
[] = {
138 /* Unique id for next decl created. */
139 static GTY(()) int next_decl_uid
;
140 /* Unique id for next type created. */
141 static GTY(()) unsigned next_type_uid
= 1;
142 /* Unique id for next debug decl created. Use negative numbers,
143 to catch erroneous uses. */
144 static GTY(()) int next_debug_decl_uid
;
146 /* Since we cannot rehash a type after it is in the table, we have to
147 keep the hash code. */
149 struct GTY((for_user
)) type_hash
{
154 /* Initial size of the hash table (rounded to next prime). */
155 #define TYPE_HASH_INITIAL_SIZE 1000
157 struct type_cache_hasher
: ggc_cache_ptr_hash
<type_hash
>
159 static hashval_t
hash (type_hash
*t
) { return t
->hash
; }
160 static bool equal (type_hash
*a
, type_hash
*b
);
163 keep_cache_entry (type_hash
*&t
)
165 return ggc_marked_p (t
->type
);
169 /* Now here is the hash table. When recording a type, it is added to
170 the slot whose index is the hash code. Note that the hash table is
171 used for several kinds of types (function types, array types and
172 array index range types, for now). While all these live in the
173 same table, they are completely independent, and the hash code is
174 computed differently for each of these. */
176 static GTY ((cache
)) hash_table
<type_cache_hasher
> *type_hash_table
;
178 /* Hash table and temporary node for larger integer const values. */
179 static GTY (()) tree int_cst_node
;
181 struct int_cst_hasher
: ggc_cache_ptr_hash
<tree_node
>
183 static hashval_t
hash (tree t
);
184 static bool equal (tree x
, tree y
);
187 static GTY ((cache
)) hash_table
<int_cst_hasher
> *int_cst_hash_table
;
189 /* Class and variable for making sure that there is a single POLY_INT_CST
190 for a given value. */
191 struct poly_int_cst_hasher
: ggc_cache_ptr_hash
<tree_node
>
193 typedef std::pair
<tree
, const poly_wide_int
*> compare_type
;
194 static hashval_t
hash (tree t
);
195 static bool equal (tree x
, const compare_type
&y
);
198 static GTY ((cache
)) hash_table
<poly_int_cst_hasher
> *poly_int_cst_hash_table
;
200 /* Hash table for optimization flags and target option flags. Use the same
201 hash table for both sets of options. Nodes for building the current
202 optimization and target option nodes. The assumption is most of the time
203 the options created will already be in the hash table, so we avoid
204 allocating and freeing up a node repeatably. */
205 static GTY (()) tree cl_optimization_node
;
206 static GTY (()) tree cl_target_option_node
;
208 struct cl_option_hasher
: ggc_cache_ptr_hash
<tree_node
>
210 static hashval_t
hash (tree t
);
211 static bool equal (tree x
, tree y
);
214 static GTY ((cache
)) hash_table
<cl_option_hasher
> *cl_option_hash_table
;
216 /* General tree->tree mapping structure for use in hash tables. */
220 hash_table
<tree_decl_map_cache_hasher
> *debug_expr_for_decl
;
223 hash_table
<tree_decl_map_cache_hasher
> *value_expr_for_decl
;
226 hash_table
<tree_vec_map_cache_hasher
> *debug_args_for_decl
;
228 static void set_type_quals (tree
, int);
229 static void print_type_hash_statistics (void);
230 static void print_debug_expr_statistics (void);
231 static void print_value_expr_statistics (void);
233 tree global_trees
[TI_MAX
];
234 tree integer_types
[itk_none
];
236 bool int_n_enabled_p
[NUM_INT_N_ENTS
];
237 struct int_n_trees_t int_n_trees
[NUM_INT_N_ENTS
];
239 bool tree_contains_struct
[MAX_TREE_CODES
][64];
241 /* Number of operands for each OMP clause. */
242 unsigned const char omp_clause_num_ops
[] =
244 0, /* OMP_CLAUSE_ERROR */
245 1, /* OMP_CLAUSE_PRIVATE */
246 1, /* OMP_CLAUSE_SHARED */
247 1, /* OMP_CLAUSE_FIRSTPRIVATE */
248 2, /* OMP_CLAUSE_LASTPRIVATE */
249 5, /* OMP_CLAUSE_REDUCTION */
250 5, /* OMP_CLAUSE_TASK_REDUCTION */
251 5, /* OMP_CLAUSE_IN_REDUCTION */
252 1, /* OMP_CLAUSE_COPYIN */
253 1, /* OMP_CLAUSE_COPYPRIVATE */
254 3, /* OMP_CLAUSE_LINEAR */
255 1, /* OMP_CLAUSE_AFFINITY */
256 2, /* OMP_CLAUSE_ALIGNED */
257 3, /* OMP_CLAUSE_ALLOCATE */
258 1, /* OMP_CLAUSE_DEPEND */
259 1, /* OMP_CLAUSE_NONTEMPORAL */
260 1, /* OMP_CLAUSE_UNIFORM */
261 1, /* OMP_CLAUSE_ENTER */
262 1, /* OMP_CLAUSE_LINK */
263 1, /* OMP_CLAUSE_DETACH */
264 1, /* OMP_CLAUSE_USE_DEVICE_PTR */
265 1, /* OMP_CLAUSE_USE_DEVICE_ADDR */
266 1, /* OMP_CLAUSE_IS_DEVICE_PTR */
267 1, /* OMP_CLAUSE_INCLUSIVE */
268 1, /* OMP_CLAUSE_EXCLUSIVE */
269 2, /* OMP_CLAUSE_FROM */
270 2, /* OMP_CLAUSE_TO */
271 2, /* OMP_CLAUSE_MAP */
272 1, /* OMP_CLAUSE_HAS_DEVICE_ADDR */
273 1, /* OMP_CLAUSE_DOACROSS */
274 2, /* OMP_CLAUSE__CACHE_ */
275 2, /* OMP_CLAUSE_GANG */
276 1, /* OMP_CLAUSE_ASYNC */
277 1, /* OMP_CLAUSE_WAIT */
278 0, /* OMP_CLAUSE_AUTO */
279 0, /* OMP_CLAUSE_SEQ */
280 1, /* OMP_CLAUSE__LOOPTEMP_ */
281 1, /* OMP_CLAUSE__REDUCTEMP_ */
282 1, /* OMP_CLAUSE__CONDTEMP_ */
283 1, /* OMP_CLAUSE__SCANTEMP_ */
284 1, /* OMP_CLAUSE_IF */
285 1, /* OMP_CLAUSE_NUM_THREADS */
286 1, /* OMP_CLAUSE_SCHEDULE */
287 0, /* OMP_CLAUSE_NOWAIT */
288 1, /* OMP_CLAUSE_ORDERED */
289 0, /* OMP_CLAUSE_DEFAULT */
290 3, /* OMP_CLAUSE_COLLAPSE */
291 0, /* OMP_CLAUSE_UNTIED */
292 1, /* OMP_CLAUSE_FINAL */
293 0, /* OMP_CLAUSE_MERGEABLE */
294 1, /* OMP_CLAUSE_DEVICE */
295 1, /* OMP_CLAUSE_DIST_SCHEDULE */
296 0, /* OMP_CLAUSE_INBRANCH */
297 0, /* OMP_CLAUSE_NOTINBRANCH */
298 2, /* OMP_CLAUSE_NUM_TEAMS */
299 1, /* OMP_CLAUSE_THREAD_LIMIT */
300 0, /* OMP_CLAUSE_PROC_BIND */
301 1, /* OMP_CLAUSE_SAFELEN */
302 1, /* OMP_CLAUSE_SIMDLEN */
303 0, /* OMP_CLAUSE_DEVICE_TYPE */
304 0, /* OMP_CLAUSE_FOR */
305 0, /* OMP_CLAUSE_PARALLEL */
306 0, /* OMP_CLAUSE_SECTIONS */
307 0, /* OMP_CLAUSE_TASKGROUP */
308 1, /* OMP_CLAUSE_PRIORITY */
309 1, /* OMP_CLAUSE_GRAINSIZE */
310 1, /* OMP_CLAUSE_NUM_TASKS */
311 0, /* OMP_CLAUSE_NOGROUP */
312 0, /* OMP_CLAUSE_THREADS */
313 0, /* OMP_CLAUSE_SIMD */
314 1, /* OMP_CLAUSE_HINT */
315 0, /* OMP_CLAUSE_DEFAULTMAP */
316 0, /* OMP_CLAUSE_ORDER */
317 0, /* OMP_CLAUSE_BIND */
318 1, /* OMP_CLAUSE_FILTER */
319 1, /* OMP_CLAUSE__SIMDUID_ */
320 0, /* OMP_CLAUSE__SIMT_ */
321 0, /* OMP_CLAUSE_INDEPENDENT */
322 1, /* OMP_CLAUSE_WORKER */
323 1, /* OMP_CLAUSE_VECTOR */
324 1, /* OMP_CLAUSE_NUM_GANGS */
325 1, /* OMP_CLAUSE_NUM_WORKERS */
326 1, /* OMP_CLAUSE_VECTOR_LENGTH */
327 3, /* OMP_CLAUSE_TILE */
328 0, /* OMP_CLAUSE_IF_PRESENT */
329 0, /* OMP_CLAUSE_FINALIZE */
330 0, /* OMP_CLAUSE_NOHOST */
333 const char * const omp_clause_code_name
[] =
424 /* Unless specific to OpenACC, we tend to internally maintain OpenMP-centric
425 clause names, but for use in diagnostics etc. would like to use the "user"
429 user_omp_clause_code_name (tree clause
, bool oacc
)
431 /* For OpenACC, the 'OMP_CLAUSE_MAP_KIND' of an 'OMP_CLAUSE_MAP' is used to
432 distinguish clauses as seen by the user. See also where front ends do
433 'build_omp_clause' with 'OMP_CLAUSE_MAP'. */
434 if (oacc
&& OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_MAP
)
435 switch (OMP_CLAUSE_MAP_KIND (clause
))
437 case GOMP_MAP_FORCE_ALLOC
:
438 case GOMP_MAP_ALLOC
: return "create";
439 case GOMP_MAP_FORCE_TO
:
440 case GOMP_MAP_TO
: return "copyin";
441 case GOMP_MAP_FORCE_FROM
:
442 case GOMP_MAP_FROM
: return "copyout";
443 case GOMP_MAP_FORCE_TOFROM
:
444 case GOMP_MAP_TOFROM
: return "copy";
445 case GOMP_MAP_RELEASE
: return "delete";
446 case GOMP_MAP_FORCE_PRESENT
: return "present";
447 case GOMP_MAP_ATTACH
: return "attach";
448 case GOMP_MAP_FORCE_DETACH
:
449 case GOMP_MAP_DETACH
: return "detach";
450 case GOMP_MAP_DEVICE_RESIDENT
: return "device_resident";
451 case GOMP_MAP_LINK
: return "link";
452 case GOMP_MAP_FORCE_DEVICEPTR
: return "deviceptr";
456 return omp_clause_code_name
[OMP_CLAUSE_CODE (clause
)];
460 /* Return the tree node structure used by tree code CODE. */
462 static inline enum tree_node_structure_enum
463 tree_node_structure_for_code (enum tree_code code
)
465 switch (TREE_CODE_CLASS (code
))
467 case tcc_declaration
:
470 case CONST_DECL
: return TS_CONST_DECL
;
471 case DEBUG_EXPR_DECL
: return TS_DECL_WRTL
;
472 case FIELD_DECL
: return TS_FIELD_DECL
;
473 case FUNCTION_DECL
: return TS_FUNCTION_DECL
;
474 case LABEL_DECL
: return TS_LABEL_DECL
;
475 case PARM_DECL
: return TS_PARM_DECL
;
476 case RESULT_DECL
: return TS_RESULT_DECL
;
477 case TRANSLATION_UNIT_DECL
: return TS_TRANSLATION_UNIT_DECL
;
478 case TYPE_DECL
: return TS_TYPE_DECL
;
479 case VAR_DECL
: return TS_VAR_DECL
;
480 default: return TS_DECL_NON_COMMON
;
483 case tcc_type
: return TS_TYPE_NON_COMMON
;
491 case tcc_vl_exp
: return TS_EXP
;
493 default: /* tcc_constant and tcc_exceptional */
499 /* tcc_constant cases. */
500 case COMPLEX_CST
: return TS_COMPLEX
;
501 case FIXED_CST
: return TS_FIXED_CST
;
502 case INTEGER_CST
: return TS_INT_CST
;
503 case POLY_INT_CST
: return TS_POLY_INT_CST
;
504 case REAL_CST
: return TS_REAL_CST
;
505 case STRING_CST
: return TS_STRING
;
506 case VECTOR_CST
: return TS_VECTOR
;
507 case VOID_CST
: return TS_TYPED
;
509 /* tcc_exceptional cases. */
510 case BLOCK
: return TS_BLOCK
;
511 case CONSTRUCTOR
: return TS_CONSTRUCTOR
;
512 case ERROR_MARK
: return TS_COMMON
;
513 case IDENTIFIER_NODE
: return TS_IDENTIFIER
;
514 case OMP_CLAUSE
: return TS_OMP_CLAUSE
;
515 case OPTIMIZATION_NODE
: return TS_OPTIMIZATION
;
516 case PLACEHOLDER_EXPR
: return TS_COMMON
;
517 case SSA_NAME
: return TS_SSA_NAME
;
518 case STATEMENT_LIST
: return TS_STATEMENT_LIST
;
519 case TARGET_OPTION_NODE
: return TS_TARGET_OPTION
;
520 case TREE_BINFO
: return TS_BINFO
;
521 case TREE_LIST
: return TS_LIST
;
522 case TREE_VEC
: return TS_VEC
;
530 /* Initialize tree_contains_struct to describe the hierarchy of tree
534 initialize_tree_contains_struct (void)
538 for (i
= ERROR_MARK
; i
< LAST_AND_UNUSED_TREE_CODE
; i
++)
541 enum tree_node_structure_enum ts_code
;
543 code
= (enum tree_code
) i
;
544 ts_code
= tree_node_structure_for_code (code
);
546 /* Mark the TS structure itself. */
547 tree_contains_struct
[code
][ts_code
] = 1;
549 /* Mark all the structures that TS is derived from. */
554 case TS_OPTIMIZATION
:
555 case TS_TARGET_OPTION
:
561 case TS_POLY_INT_CST
:
570 case TS_STATEMENT_LIST
:
571 MARK_TS_TYPED (code
);
575 case TS_DECL_MINIMAL
:
581 MARK_TS_COMMON (code
);
584 case TS_TYPE_WITH_LANG_SPECIFIC
:
585 MARK_TS_TYPE_COMMON (code
);
588 case TS_TYPE_NON_COMMON
:
589 MARK_TS_TYPE_WITH_LANG_SPECIFIC (code
);
593 MARK_TS_DECL_MINIMAL (code
);
598 MARK_TS_DECL_COMMON (code
);
601 case TS_DECL_NON_COMMON
:
602 MARK_TS_DECL_WITH_VIS (code
);
605 case TS_DECL_WITH_VIS
:
609 MARK_TS_DECL_WRTL (code
);
613 MARK_TS_DECL_COMMON (code
);
617 MARK_TS_DECL_WITH_VIS (code
);
621 case TS_FUNCTION_DECL
:
622 MARK_TS_DECL_NON_COMMON (code
);
625 case TS_TRANSLATION_UNIT_DECL
:
626 MARK_TS_DECL_COMMON (code
);
634 /* Basic consistency checks for attributes used in fold. */
635 gcc_assert (tree_contains_struct
[FUNCTION_DECL
][TS_DECL_NON_COMMON
]);
636 gcc_assert (tree_contains_struct
[TYPE_DECL
][TS_DECL_NON_COMMON
]);
637 gcc_assert (tree_contains_struct
[CONST_DECL
][TS_DECL_COMMON
]);
638 gcc_assert (tree_contains_struct
[VAR_DECL
][TS_DECL_COMMON
]);
639 gcc_assert (tree_contains_struct
[PARM_DECL
][TS_DECL_COMMON
]);
640 gcc_assert (tree_contains_struct
[RESULT_DECL
][TS_DECL_COMMON
]);
641 gcc_assert (tree_contains_struct
[FUNCTION_DECL
][TS_DECL_COMMON
]);
642 gcc_assert (tree_contains_struct
[TYPE_DECL
][TS_DECL_COMMON
]);
643 gcc_assert (tree_contains_struct
[TRANSLATION_UNIT_DECL
][TS_DECL_COMMON
]);
644 gcc_assert (tree_contains_struct
[LABEL_DECL
][TS_DECL_COMMON
]);
645 gcc_assert (tree_contains_struct
[FIELD_DECL
][TS_DECL_COMMON
]);
646 gcc_assert (tree_contains_struct
[VAR_DECL
][TS_DECL_WRTL
]);
647 gcc_assert (tree_contains_struct
[PARM_DECL
][TS_DECL_WRTL
]);
648 gcc_assert (tree_contains_struct
[RESULT_DECL
][TS_DECL_WRTL
]);
649 gcc_assert (tree_contains_struct
[FUNCTION_DECL
][TS_DECL_WRTL
]);
650 gcc_assert (tree_contains_struct
[LABEL_DECL
][TS_DECL_WRTL
]);
651 gcc_assert (tree_contains_struct
[CONST_DECL
][TS_DECL_MINIMAL
]);
652 gcc_assert (tree_contains_struct
[VAR_DECL
][TS_DECL_MINIMAL
]);
653 gcc_assert (tree_contains_struct
[PARM_DECL
][TS_DECL_MINIMAL
]);
654 gcc_assert (tree_contains_struct
[RESULT_DECL
][TS_DECL_MINIMAL
]);
655 gcc_assert (tree_contains_struct
[FUNCTION_DECL
][TS_DECL_MINIMAL
]);
656 gcc_assert (tree_contains_struct
[TYPE_DECL
][TS_DECL_MINIMAL
]);
657 gcc_assert (tree_contains_struct
[TRANSLATION_UNIT_DECL
][TS_DECL_MINIMAL
]);
658 gcc_assert (tree_contains_struct
[LABEL_DECL
][TS_DECL_MINIMAL
]);
659 gcc_assert (tree_contains_struct
[FIELD_DECL
][TS_DECL_MINIMAL
]);
660 gcc_assert (tree_contains_struct
[VAR_DECL
][TS_DECL_WITH_VIS
]);
661 gcc_assert (tree_contains_struct
[FUNCTION_DECL
][TS_DECL_WITH_VIS
]);
662 gcc_assert (tree_contains_struct
[TYPE_DECL
][TS_DECL_WITH_VIS
]);
663 gcc_assert (tree_contains_struct
[VAR_DECL
][TS_VAR_DECL
]);
664 gcc_assert (tree_contains_struct
[FIELD_DECL
][TS_FIELD_DECL
]);
665 gcc_assert (tree_contains_struct
[PARM_DECL
][TS_PARM_DECL
]);
666 gcc_assert (tree_contains_struct
[LABEL_DECL
][TS_LABEL_DECL
]);
667 gcc_assert (tree_contains_struct
[RESULT_DECL
][TS_RESULT_DECL
]);
668 gcc_assert (tree_contains_struct
[CONST_DECL
][TS_CONST_DECL
]);
669 gcc_assert (tree_contains_struct
[TYPE_DECL
][TS_TYPE_DECL
]);
670 gcc_assert (tree_contains_struct
[FUNCTION_DECL
][TS_FUNCTION_DECL
]);
671 gcc_assert (tree_contains_struct
[IMPORTED_DECL
][TS_DECL_MINIMAL
]);
672 gcc_assert (tree_contains_struct
[IMPORTED_DECL
][TS_DECL_COMMON
]);
673 gcc_assert (tree_contains_struct
[NAMELIST_DECL
][TS_DECL_MINIMAL
]);
674 gcc_assert (tree_contains_struct
[NAMELIST_DECL
][TS_DECL_COMMON
]);
683 /* Initialize the hash table of types. */
685 = hash_table
<type_cache_hasher
>::create_ggc (TYPE_HASH_INITIAL_SIZE
);
688 = hash_table
<tree_decl_map_cache_hasher
>::create_ggc (512);
691 = hash_table
<tree_decl_map_cache_hasher
>::create_ggc (512);
693 int_cst_hash_table
= hash_table
<int_cst_hasher
>::create_ggc (1024);
695 poly_int_cst_hash_table
= hash_table
<poly_int_cst_hasher
>::create_ggc (64);
697 int_cst_node
= make_int_cst (1, 1);
699 cl_option_hash_table
= hash_table
<cl_option_hasher
>::create_ggc (64);
701 cl_optimization_node
= make_node (OPTIMIZATION_NODE
);
702 cl_target_option_node
= make_node (TARGET_OPTION_NODE
);
704 /* Initialize the tree_contains_struct array. */
705 initialize_tree_contains_struct ();
706 lang_hooks
.init_ts ();
710 /* The name of the object as the assembler will see it (but before any
711 translations made by ASM_OUTPUT_LABELREF). Often this is the same
712 as DECL_NAME. It is an IDENTIFIER_NODE. */
714 decl_assembler_name (tree decl
)
716 if (!DECL_ASSEMBLER_NAME_SET_P (decl
))
717 lang_hooks
.set_decl_assembler_name (decl
);
718 return DECL_ASSEMBLER_NAME_RAW (decl
);
721 /* The DECL_ASSEMBLER_NAME_RAW of DECL is being explicitly set to NAME
722 (either of which may be NULL). Inform the FE, if this changes the
726 overwrite_decl_assembler_name (tree decl
, tree name
)
728 if (DECL_ASSEMBLER_NAME_RAW (decl
) != name
)
729 lang_hooks
.overwrite_decl_assembler_name (decl
, name
);
732 /* Return true if DECL may need an assembler name to be set. */
735 need_assembler_name_p (tree decl
)
737 /* We use DECL_ASSEMBLER_NAME to hold mangled type names for One Definition
738 Rule merging. This makes type_odr_p to return true on those types during
739 LTO and by comparing the mangled name, we can say what types are intended
740 to be equivalent across compilation unit.
742 We do not store names of type_in_anonymous_namespace_p.
744 Record, union and enumeration type have linkage that allows use
745 to check type_in_anonymous_namespace_p. We do not mangle compound types
746 that always can be compared structurally.
748 Similarly for builtin types, we compare properties of their main variant.
749 A special case are integer types where mangling do make differences
750 between char/signed char/unsigned char etc. Storing name for these makes
751 e.g. -fno-signed-char/-fsigned-char mismatches to be handled well.
752 See cp/mangle.cc:write_builtin_type for details. */
754 if (TREE_CODE (decl
) == TYPE_DECL
)
757 && decl
== TYPE_NAME (TREE_TYPE (decl
))
758 && TYPE_MAIN_VARIANT (TREE_TYPE (decl
)) == TREE_TYPE (decl
)
759 && !TYPE_ARTIFICIAL (TREE_TYPE (decl
))
760 && ((TREE_CODE (TREE_TYPE (decl
)) != RECORD_TYPE
761 && TREE_CODE (TREE_TYPE (decl
)) != UNION_TYPE
)
762 || TYPE_CXX_ODR_P (TREE_TYPE (decl
)))
763 && (type_with_linkage_p (TREE_TYPE (decl
))
764 || TREE_CODE (TREE_TYPE (decl
)) == INTEGER_TYPE
)
765 && !variably_modified_type_p (TREE_TYPE (decl
), NULL_TREE
))
766 return !DECL_ASSEMBLER_NAME_SET_P (decl
);
769 /* Only FUNCTION_DECLs and VAR_DECLs are considered. */
770 if (!VAR_OR_FUNCTION_DECL_P (decl
))
773 /* If DECL already has its assembler name set, it does not need a
775 if (!HAS_DECL_ASSEMBLER_NAME_P (decl
)
776 || DECL_ASSEMBLER_NAME_SET_P (decl
))
779 /* Abstract decls do not need an assembler name. */
780 if (DECL_ABSTRACT_P (decl
))
783 /* For VAR_DECLs, only static, public and external symbols need an
786 && !TREE_STATIC (decl
)
787 && !TREE_PUBLIC (decl
)
788 && !DECL_EXTERNAL (decl
))
791 if (TREE_CODE (decl
) == FUNCTION_DECL
)
793 /* Do not set assembler name on builtins. Allow RTL expansion to
794 decide whether to expand inline or via a regular call. */
795 if (fndecl_built_in_p (decl
)
796 && DECL_BUILT_IN_CLASS (decl
) != BUILT_IN_FRONTEND
)
799 /* Functions represented in the callgraph need an assembler name. */
800 if (cgraph_node::get (decl
) != NULL
)
803 /* Unused and not public functions don't need an assembler name. */
804 if (!TREE_USED (decl
) && !TREE_PUBLIC (decl
))
811 /* If T needs an assembler name, have one created for it. */
814 assign_assembler_name_if_needed (tree t
)
816 if (need_assembler_name_p (t
))
818 /* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit
819 diagnostics that use input_location to show locus
820 information. The problem here is that, at this point,
821 input_location is generally anchored to the end of the file
822 (since the parser is long gone), so we don't have a good
823 position to pin it to.
825 To alleviate this problem, this uses the location of T's
826 declaration. Examples of this are
827 testsuite/g++.dg/template/cond2.C and
828 testsuite/g++.dg/template/pr35240.C. */
829 location_t saved_location
= input_location
;
830 input_location
= DECL_SOURCE_LOCATION (t
);
832 decl_assembler_name (t
);
834 input_location
= saved_location
;
838 /* When the target supports COMDAT groups, this indicates which group the
839 DECL is associated with. This can be either an IDENTIFIER_NODE or a
840 decl, in which case its DECL_ASSEMBLER_NAME identifies the group. */
842 decl_comdat_group (const_tree node
)
844 struct symtab_node
*snode
= symtab_node::get (node
);
847 return snode
->get_comdat_group ();
850 /* Likewise, but make sure it's been reduced to an IDENTIFIER_NODE. */
852 decl_comdat_group_id (const_tree node
)
854 struct symtab_node
*snode
= symtab_node::get (node
);
857 return snode
->get_comdat_group_id ();
860 /* When the target supports named section, return its name as IDENTIFIER_NODE
861 or NULL if it is in no section. */
863 decl_section_name (const_tree node
)
865 struct symtab_node
*snode
= symtab_node::get (node
);
868 return snode
->get_section ();
871 /* Set section name of NODE to VALUE (that is expected to be
874 set_decl_section_name (tree node
, const char *value
)
876 struct symtab_node
*snode
;
880 snode
= symtab_node::get (node
);
884 else if (VAR_P (node
))
885 snode
= varpool_node::get_create (node
);
887 snode
= cgraph_node::get_create (node
);
888 snode
->set_section (value
);
891 /* Set section name of NODE to match the section name of OTHER.
893 set_decl_section_name (decl, other) is equivalent to
894 set_decl_section_name (decl, DECL_SECTION_NAME (other)), but possibly more
897 set_decl_section_name (tree decl
, const_tree other
)
899 struct symtab_node
*other_node
= symtab_node::get (other
);
902 struct symtab_node
*decl_node
;
904 decl_node
= varpool_node::get_create (decl
);
906 decl_node
= cgraph_node::get_create (decl
);
907 decl_node
->set_section (*other_node
);
911 struct symtab_node
*decl_node
= symtab_node::get (decl
);
914 decl_node
->set_section (NULL
);
918 /* Return TLS model of a variable NODE. */
920 decl_tls_model (const_tree node
)
922 struct varpool_node
*snode
= varpool_node::get (node
);
924 return TLS_MODEL_NONE
;
925 return snode
->tls_model
;
928 /* Set TLS model of variable NODE to MODEL. */
930 set_decl_tls_model (tree node
, enum tls_model model
)
932 struct varpool_node
*vnode
;
934 if (model
== TLS_MODEL_NONE
)
936 vnode
= varpool_node::get (node
);
941 vnode
= varpool_node::get_create (node
);
942 vnode
->tls_model
= model
;
945 /* Compute the number of bytes occupied by a tree with code CODE.
946 This function cannot be used for nodes that have variable sizes,
947 including TREE_VEC, INTEGER_CST, STRING_CST, and CALL_EXPR. */
949 tree_code_size (enum tree_code code
)
951 switch (TREE_CODE_CLASS (code
))
953 case tcc_declaration
: /* A decl node */
956 case FIELD_DECL
: return sizeof (tree_field_decl
);
957 case PARM_DECL
: return sizeof (tree_parm_decl
);
958 case VAR_DECL
: return sizeof (tree_var_decl
);
959 case LABEL_DECL
: return sizeof (tree_label_decl
);
960 case RESULT_DECL
: return sizeof (tree_result_decl
);
961 case CONST_DECL
: return sizeof (tree_const_decl
);
962 case TYPE_DECL
: return sizeof (tree_type_decl
);
963 case FUNCTION_DECL
: return sizeof (tree_function_decl
);
964 case DEBUG_EXPR_DECL
: return sizeof (tree_decl_with_rtl
);
965 case TRANSLATION_UNIT_DECL
: return sizeof (tree_translation_unit_decl
);
968 case NAMELIST_DECL
: return sizeof (tree_decl_non_common
);
970 gcc_checking_assert (code
>= NUM_TREE_CODES
);
971 return lang_hooks
.tree_size (code
);
974 case tcc_type
: /* a type node */
986 case FIXED_POINT_TYPE
:
992 case QUAL_UNION_TYPE
:
996 case LANG_TYPE
: return sizeof (tree_type_non_common
);
998 gcc_checking_assert (code
>= NUM_TREE_CODES
);
999 return lang_hooks
.tree_size (code
);
1002 case tcc_reference
: /* a reference */
1003 case tcc_expression
: /* an expression */
1004 case tcc_statement
: /* an expression with side effects */
1005 case tcc_comparison
: /* a comparison expression */
1006 case tcc_unary
: /* a unary arithmetic expression */
1007 case tcc_binary
: /* a binary arithmetic expression */
1008 return (sizeof (struct tree_exp
)
1009 + (TREE_CODE_LENGTH (code
) - 1) * sizeof (tree
));
1011 case tcc_constant
: /* a constant */
1014 case VOID_CST
: return sizeof (tree_typed
);
1015 case INTEGER_CST
: gcc_unreachable ();
1016 case POLY_INT_CST
: return sizeof (tree_poly_int_cst
);
1017 case REAL_CST
: return sizeof (tree_real_cst
);
1018 case FIXED_CST
: return sizeof (tree_fixed_cst
);
1019 case COMPLEX_CST
: return sizeof (tree_complex
);
1020 case VECTOR_CST
: gcc_unreachable ();
1021 case STRING_CST
: gcc_unreachable ();
1023 gcc_checking_assert (code
>= NUM_TREE_CODES
);
1024 return lang_hooks
.tree_size (code
);
1027 case tcc_exceptional
: /* something random, like an identifier. */
1030 case IDENTIFIER_NODE
: return lang_hooks
.identifier_size
;
1031 case TREE_LIST
: return sizeof (tree_list
);
1034 case PLACEHOLDER_EXPR
: return sizeof (tree_common
);
1036 case TREE_VEC
: gcc_unreachable ();
1037 case OMP_CLAUSE
: gcc_unreachable ();
1039 case SSA_NAME
: return sizeof (tree_ssa_name
);
1041 case STATEMENT_LIST
: return sizeof (tree_statement_list
);
1042 case BLOCK
: return sizeof (struct tree_block
);
1043 case CONSTRUCTOR
: return sizeof (tree_constructor
);
1044 case OPTIMIZATION_NODE
: return sizeof (tree_optimization_option
);
1045 case TARGET_OPTION_NODE
: return sizeof (tree_target_option
);
1048 gcc_checking_assert (code
>= NUM_TREE_CODES
);
1049 return lang_hooks
.tree_size (code
);
1057 /* Compute the number of bytes occupied by NODE. This routine only
1058 looks at TREE_CODE, except for those nodes that have variable sizes. */
1060 tree_size (const_tree node
)
1062 const enum tree_code code
= TREE_CODE (node
);
1066 return (sizeof (struct tree_int_cst
)
1067 + (TREE_INT_CST_EXT_NUNITS (node
) - 1) * sizeof (HOST_WIDE_INT
));
1070 return (offsetof (struct tree_binfo
, base_binfos
)
1072 ::embedded_size (BINFO_N_BASE_BINFOS (node
)));
1075 return (sizeof (struct tree_vec
)
1076 + (TREE_VEC_LENGTH (node
) - 1) * sizeof (tree
));
1079 return (sizeof (struct tree_vector
)
1080 + (vector_cst_encoded_nelts (node
) - 1) * sizeof (tree
));
1083 return TREE_STRING_LENGTH (node
) + offsetof (struct tree_string
, str
) + 1;
1086 return (sizeof (struct tree_omp_clause
)
1087 + (omp_clause_num_ops
[OMP_CLAUSE_CODE (node
)] - 1)
1091 if (TREE_CODE_CLASS (code
) == tcc_vl_exp
)
1092 return (sizeof (struct tree_exp
)
1093 + (VL_EXP_OPERAND_LENGTH (node
) - 1) * sizeof (tree
));
1095 return tree_code_size (code
);
1099 /* Return tree node kind based on tree CODE. */
1101 static tree_node_kind
1102 get_stats_node_kind (enum tree_code code
)
1104 enum tree_code_class type
= TREE_CODE_CLASS (code
);
1108 case tcc_declaration
: /* A decl node */
1110 case tcc_type
: /* a type node */
1112 case tcc_statement
: /* an expression with side effects */
1114 case tcc_reference
: /* a reference */
1116 case tcc_expression
: /* an expression */
1117 case tcc_comparison
: /* a comparison expression */
1118 case tcc_unary
: /* a unary arithmetic expression */
1119 case tcc_binary
: /* a binary arithmetic expression */
1121 case tcc_constant
: /* a constant */
1123 case tcc_exceptional
: /* something random, like an identifier. */
1126 case IDENTIFIER_NODE
:
1133 return ssa_name_kind
;
1139 return omp_clause_kind
;
1151 /* Record interesting allocation statistics for a tree node with CODE
1155 record_node_allocation_statistics (enum tree_code code
, size_t length
)
1157 if (!GATHER_STATISTICS
)
1160 tree_node_kind kind
= get_stats_node_kind (code
);
1162 tree_code_counts
[(int) code
]++;
1163 tree_node_counts
[(int) kind
]++;
1164 tree_node_sizes
[(int) kind
] += length
;
1167 /* Allocate and return a new UID from the DECL_UID namespace. */
1170 allocate_decl_uid (void)
1172 return next_decl_uid
++;
1175 /* Return a newly allocated node of code CODE. For decl and type
1176 nodes, some other fields are initialized. The rest of the node is
1177 initialized to zero. This function cannot be used for TREE_VEC,
1178 INTEGER_CST or OMP_CLAUSE nodes, which is enforced by asserts in
1181 Achoo! I got a code in the node. */
1184 make_node (enum tree_code code MEM_STAT_DECL
)
1187 enum tree_code_class type
= TREE_CODE_CLASS (code
);
1188 size_t length
= tree_code_size (code
);
1190 record_node_allocation_statistics (code
, length
);
1192 t
= ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT
);
1193 TREE_SET_CODE (t
, code
);
1198 if (code
!= DEBUG_BEGIN_STMT
)
1199 TREE_SIDE_EFFECTS (t
) = 1;
1202 case tcc_declaration
:
1203 if (CODE_CONTAINS_STRUCT (code
, TS_DECL_COMMON
))
1205 if (code
== FUNCTION_DECL
)
1207 SET_DECL_ALIGN (t
, FUNCTION_ALIGNMENT (FUNCTION_BOUNDARY
));
1208 SET_DECL_MODE (t
, FUNCTION_MODE
);
1211 SET_DECL_ALIGN (t
, 1);
1213 DECL_SOURCE_LOCATION (t
) = input_location
;
1214 if (TREE_CODE (t
) == DEBUG_EXPR_DECL
)
1215 DECL_UID (t
) = --next_debug_decl_uid
;
1218 DECL_UID (t
) = allocate_decl_uid ();
1219 SET_DECL_PT_UID (t
, -1);
1221 if (TREE_CODE (t
) == LABEL_DECL
)
1222 LABEL_DECL_UID (t
) = -1;
1227 TYPE_UID (t
) = next_type_uid
++;
1228 SET_TYPE_ALIGN (t
, BITS_PER_UNIT
);
1229 TYPE_USER_ALIGN (t
) = 0;
1230 TYPE_MAIN_VARIANT (t
) = t
;
1231 TYPE_CANONICAL (t
) = t
;
1233 /* Default to no attributes for type, but let target change that. */
1234 TYPE_ATTRIBUTES (t
) = NULL_TREE
;
1235 targetm
.set_default_type_attributes (t
);
1237 /* We have not yet computed the alias set for this type. */
1238 TYPE_ALIAS_SET (t
) = -1;
1242 TREE_CONSTANT (t
) = 1;
1245 case tcc_expression
:
1251 case PREDECREMENT_EXPR
:
1252 case PREINCREMENT_EXPR
:
1253 case POSTDECREMENT_EXPR
:
1254 case POSTINCREMENT_EXPR
:
1255 /* All of these have side-effects, no matter what their
1257 TREE_SIDE_EFFECTS (t
) = 1;
1265 case tcc_exceptional
:
1268 case TARGET_OPTION_NODE
:
1269 TREE_TARGET_OPTION(t
)
1270 = ggc_cleared_alloc
<struct cl_target_option
> ();
1273 case OPTIMIZATION_NODE
:
1274 TREE_OPTIMIZATION (t
)
1275 = ggc_cleared_alloc
<struct cl_optimization
> ();
1284 /* Other classes need no special treatment. */
1291 /* Free tree node. */
1294 free_node (tree node
)
1296 enum tree_code code
= TREE_CODE (node
);
1297 if (GATHER_STATISTICS
)
1299 enum tree_node_kind kind
= get_stats_node_kind (code
);
1301 gcc_checking_assert (tree_code_counts
[(int) TREE_CODE (node
)] != 0);
1302 gcc_checking_assert (tree_node_counts
[(int) kind
] != 0);
1303 gcc_checking_assert (tree_node_sizes
[(int) kind
] >= tree_size (node
));
1305 tree_code_counts
[(int) TREE_CODE (node
)]--;
1306 tree_node_counts
[(int) kind
]--;
1307 tree_node_sizes
[(int) kind
] -= tree_size (node
);
1309 if (CODE_CONTAINS_STRUCT (code
, TS_CONSTRUCTOR
))
1310 vec_free (CONSTRUCTOR_ELTS (node
));
1311 else if (code
== BLOCK
)
1312 vec_free (BLOCK_NONLOCALIZED_VARS (node
));
1313 else if (code
== TREE_BINFO
)
1314 vec_free (BINFO_BASE_ACCESSES (node
));
1315 else if (code
== OPTIMIZATION_NODE
)
1316 cl_optimization_option_free (TREE_OPTIMIZATION (node
));
1317 else if (code
== TARGET_OPTION_NODE
)
1318 cl_target_option_free (TREE_TARGET_OPTION (node
));
1322 /* Return a new node with the same contents as NODE except that its
1323 TREE_CHAIN, if it has one, is zero and it has a fresh uid. */
1326 copy_node (tree node MEM_STAT_DECL
)
1329 enum tree_code code
= TREE_CODE (node
);
1332 gcc_assert (code
!= STATEMENT_LIST
);
1334 length
= tree_size (node
);
1335 record_node_allocation_statistics (code
, length
);
1336 t
= ggc_alloc_tree_node_stat (length PASS_MEM_STAT
);
1337 memcpy (t
, node
, length
);
1339 if (CODE_CONTAINS_STRUCT (code
, TS_COMMON
))
1341 TREE_ASM_WRITTEN (t
) = 0;
1342 TREE_VISITED (t
) = 0;
1344 if (TREE_CODE_CLASS (code
) == tcc_declaration
)
1346 if (code
== DEBUG_EXPR_DECL
)
1347 DECL_UID (t
) = --next_debug_decl_uid
;
1350 DECL_UID (t
) = allocate_decl_uid ();
1351 if (DECL_PT_UID_SET_P (node
))
1352 SET_DECL_PT_UID (t
, DECL_PT_UID (node
));
1354 if ((TREE_CODE (node
) == PARM_DECL
|| VAR_P (node
))
1355 && DECL_HAS_VALUE_EXPR_P (node
))
1357 SET_DECL_VALUE_EXPR (t
, DECL_VALUE_EXPR (node
));
1358 DECL_HAS_VALUE_EXPR_P (t
) = 1;
1360 /* DECL_DEBUG_EXPR is copied explicitly by callers. */
1363 DECL_HAS_DEBUG_EXPR_P (t
) = 0;
1364 t
->decl_with_vis
.symtab_node
= NULL
;
1366 if (VAR_P (node
) && DECL_HAS_INIT_PRIORITY_P (node
))
1368 SET_DECL_INIT_PRIORITY (t
, DECL_INIT_PRIORITY (node
));
1369 DECL_HAS_INIT_PRIORITY_P (t
) = 1;
1371 if (TREE_CODE (node
) == FUNCTION_DECL
)
1373 DECL_STRUCT_FUNCTION (t
) = NULL
;
1374 t
->decl_with_vis
.symtab_node
= NULL
;
1377 else if (TREE_CODE_CLASS (code
) == tcc_type
)
1379 TYPE_UID (t
) = next_type_uid
++;
1380 /* The following is so that the debug code for
1381 the copy is different from the original type.
1382 The two statements usually duplicate each other
1383 (because they clear fields of the same union),
1384 but the optimizer should catch that. */
1385 TYPE_SYMTAB_ADDRESS (t
) = 0;
1386 TYPE_SYMTAB_DIE (t
) = 0;
1388 /* Do not copy the values cache. */
1389 if (TYPE_CACHED_VALUES_P (t
))
1391 TYPE_CACHED_VALUES_P (t
) = 0;
1392 TYPE_CACHED_VALUES (t
) = NULL_TREE
;
1395 else if (code
== TARGET_OPTION_NODE
)
1397 TREE_TARGET_OPTION (t
) = ggc_alloc
<struct cl_target_option
>();
1398 memcpy (TREE_TARGET_OPTION (t
), TREE_TARGET_OPTION (node
),
1399 sizeof (struct cl_target_option
));
1401 else if (code
== OPTIMIZATION_NODE
)
1403 TREE_OPTIMIZATION (t
) = ggc_alloc
<struct cl_optimization
>();
1404 memcpy (TREE_OPTIMIZATION (t
), TREE_OPTIMIZATION (node
),
1405 sizeof (struct cl_optimization
));
1411 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
1412 For example, this can copy a list made of TREE_LIST nodes. */
1415 copy_list (tree list
)
1423 head
= prev
= copy_node (list
);
1424 next
= TREE_CHAIN (list
);
1427 TREE_CHAIN (prev
) = copy_node (next
);
1428 prev
= TREE_CHAIN (prev
);
1429 next
= TREE_CHAIN (next
);
1435 /* Return the value that TREE_INT_CST_EXT_NUNITS should have for an
1436 INTEGER_CST with value CST and type TYPE. */
1439 get_int_cst_ext_nunits (tree type
, const wide_int
&cst
)
1441 gcc_checking_assert (cst
.get_precision () == TYPE_PRECISION (type
));
1442 /* We need extra HWIs if CST is an unsigned integer with its
1444 if (TYPE_UNSIGNED (type
) && wi::neg_p (cst
))
1445 return cst
.get_precision () / HOST_BITS_PER_WIDE_INT
+ 1;
1446 return cst
.get_len ();
1449 /* Return a new INTEGER_CST with value CST and type TYPE. */
1452 build_new_int_cst (tree type
, const wide_int
&cst
)
1454 unsigned int len
= cst
.get_len ();
1455 unsigned int ext_len
= get_int_cst_ext_nunits (type
, cst
);
1456 tree nt
= make_int_cst (len
, ext_len
);
1461 TREE_INT_CST_ELT (nt
, ext_len
)
1462 = zext_hwi (-1, cst
.get_precision () % HOST_BITS_PER_WIDE_INT
);
1463 for (unsigned int i
= len
; i
< ext_len
; ++i
)
1464 TREE_INT_CST_ELT (nt
, i
) = -1;
1466 else if (TYPE_UNSIGNED (type
)
1467 && cst
.get_precision () < len
* HOST_BITS_PER_WIDE_INT
)
1470 TREE_INT_CST_ELT (nt
, len
)
1471 = zext_hwi (cst
.elt (len
),
1472 cst
.get_precision () % HOST_BITS_PER_WIDE_INT
);
1475 for (unsigned int i
= 0; i
< len
; i
++)
1476 TREE_INT_CST_ELT (nt
, i
) = cst
.elt (i
);
1477 TREE_TYPE (nt
) = type
;
1481 /* Return a new POLY_INT_CST with coefficients COEFFS and type TYPE. */
1484 build_new_poly_int_cst (tree type
, tree (&coeffs
)[NUM_POLY_INT_COEFFS
]
1487 size_t length
= sizeof (struct tree_poly_int_cst
);
1488 record_node_allocation_statistics (POLY_INT_CST
, length
);
1490 tree t
= ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT
);
1492 TREE_SET_CODE (t
, POLY_INT_CST
);
1493 TREE_CONSTANT (t
) = 1;
1494 TREE_TYPE (t
) = type
;
1495 for (unsigned int i
= 0; i
< NUM_POLY_INT_COEFFS
; ++i
)
1496 POLY_INT_CST_COEFF (t
, i
) = coeffs
[i
];
1500 /* Create a constant tree that contains CST sign-extended to TYPE. */
1503 build_int_cst (tree type
, poly_int64 cst
)
1505 /* Support legacy code. */
1507 type
= integer_type_node
;
1509 return wide_int_to_tree (type
, wi::shwi (cst
, TYPE_PRECISION (type
)));
1512 /* Create a constant tree that contains CST zero-extended to TYPE. */
1515 build_int_cstu (tree type
, poly_uint64 cst
)
1517 return wide_int_to_tree (type
, wi::uhwi (cst
, TYPE_PRECISION (type
)));
1520 /* Create a constant tree that contains CST sign-extended to TYPE. */
1523 build_int_cst_type (tree type
, poly_int64 cst
)
1526 return wide_int_to_tree (type
, wi::shwi (cst
, TYPE_PRECISION (type
)));
1529 /* Constructs tree in type TYPE from with value given by CST. Signedness
1530 of CST is assumed to be the same as the signedness of TYPE. */
1533 double_int_to_tree (tree type
, double_int cst
)
1535 return wide_int_to_tree (type
, widest_int::from (cst
, TYPE_SIGN (type
)));
1538 /* We force the wide_int CST to the range of the type TYPE by sign or
1539 zero extending it. OVERFLOWABLE indicates if we are interested in
1540 overflow of the value, when >0 we are only interested in signed
1541 overflow, for <0 we are interested in any overflow. OVERFLOWED
1542 indicates whether overflow has already occurred. CONST_OVERFLOWED
1543 indicates whether constant overflow has already occurred. We force
1544 T's value to be within range of T's type (by setting to 0 or 1 all
1545 the bits outside the type's range). We set TREE_OVERFLOWED if,
1546 OVERFLOWED is nonzero,
1547 or OVERFLOWABLE is >0 and signed overflow occurs
1548 or OVERFLOWABLE is <0 and any overflow occurs
1549 We return a new tree node for the extended wide_int. The node
1550 is shared if no overflow flags are set. */
1554 force_fit_type (tree type
, const poly_wide_int_ref
&cst
,
1555 int overflowable
, bool overflowed
)
1557 signop sign
= TYPE_SIGN (type
);
1559 /* If we need to set overflow flags, return a new unshared node. */
1560 if (overflowed
|| !wi::fits_to_tree_p (cst
, type
))
1564 || (overflowable
> 0 && sign
== SIGNED
))
1566 poly_wide_int tmp
= poly_wide_int::from (cst
, TYPE_PRECISION (type
),
1569 if (tmp
.is_constant ())
1570 t
= build_new_int_cst (type
, tmp
.coeffs
[0]);
1573 tree coeffs
[NUM_POLY_INT_COEFFS
];
1574 for (unsigned int i
= 0; i
< NUM_POLY_INT_COEFFS
; ++i
)
1576 coeffs
[i
] = build_new_int_cst (type
, tmp
.coeffs
[i
]);
1577 TREE_OVERFLOW (coeffs
[i
]) = 1;
1579 t
= build_new_poly_int_cst (type
, coeffs
);
1581 TREE_OVERFLOW (t
) = 1;
1586 /* Else build a shared node. */
1587 return wide_int_to_tree (type
, cst
);
1590 /* These are the hash table functions for the hash table of INTEGER_CST
1591 nodes of a sizetype. */
1593 /* Return the hash code X, an INTEGER_CST. */
1596 int_cst_hasher::hash (tree x
)
1598 const_tree
const t
= x
;
1599 hashval_t code
= TYPE_UID (TREE_TYPE (t
));
1602 for (i
= 0; i
< TREE_INT_CST_NUNITS (t
); i
++)
1603 code
= iterative_hash_host_wide_int (TREE_INT_CST_ELT(t
, i
), code
);
1608 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1609 is the same as that given by *Y, which is the same. */
1612 int_cst_hasher::equal (tree x
, tree y
)
1614 const_tree
const xt
= x
;
1615 const_tree
const yt
= y
;
1617 if (TREE_TYPE (xt
) != TREE_TYPE (yt
)
1618 || TREE_INT_CST_NUNITS (xt
) != TREE_INT_CST_NUNITS (yt
)
1619 || TREE_INT_CST_EXT_NUNITS (xt
) != TREE_INT_CST_EXT_NUNITS (yt
))
1622 for (int i
= 0; i
< TREE_INT_CST_NUNITS (xt
); i
++)
1623 if (TREE_INT_CST_ELT (xt
, i
) != TREE_INT_CST_ELT (yt
, i
))
1629 /* Cache wide_int CST into the TYPE_CACHED_VALUES cache for TYPE.
1630 SLOT is the slot entry to store it in, and MAX_SLOTS is the maximum
1631 number of slots that can be cached for the type. */
1634 cache_wide_int_in_type_cache (tree type
, const wide_int
&cst
,
1635 int slot
, int max_slots
)
1637 gcc_checking_assert (slot
>= 0);
1638 /* Initialize cache. */
1639 if (!TYPE_CACHED_VALUES_P (type
))
1641 TYPE_CACHED_VALUES_P (type
) = 1;
1642 TYPE_CACHED_VALUES (type
) = make_tree_vec (max_slots
);
1644 tree t
= TREE_VEC_ELT (TYPE_CACHED_VALUES (type
), slot
);
1647 /* Create a new shared int. */
1648 t
= build_new_int_cst (type
, cst
);
1649 TREE_VEC_ELT (TYPE_CACHED_VALUES (type
), slot
) = t
;
1654 /* Create an INT_CST node of TYPE and value CST.
1655 The returned node is always shared. For small integers we use a
1656 per-type vector cache, for larger ones we use a single hash table.
1657 The value is extended from its precision according to the sign of
1658 the type to be a multiple of HOST_BITS_PER_WIDE_INT. This defines
1659 the upper bits and ensures that hashing and value equality based
1660 upon the underlying HOST_WIDE_INTs works without masking. */
1663 wide_int_to_tree_1 (tree type
, const wide_int_ref
&pcst
)
1670 unsigned int prec
= TYPE_PRECISION (type
);
1671 signop sgn
= TYPE_SIGN (type
);
1673 /* Verify that everything is canonical. */
1674 int l
= pcst
.get_len ();
1677 if (pcst
.elt (l
- 1) == 0)
1678 gcc_checking_assert (pcst
.elt (l
- 2) < 0);
1679 if (pcst
.elt (l
- 1) == HOST_WIDE_INT_M1
)
1680 gcc_checking_assert (pcst
.elt (l
- 2) >= 0);
1683 wide_int cst
= wide_int::from (pcst
, prec
, sgn
);
1684 unsigned int ext_len
= get_int_cst_ext_nunits (type
, cst
);
1686 enum tree_code code
= TREE_CODE (type
);
1687 if (code
== POINTER_TYPE
|| code
== REFERENCE_TYPE
)
1689 /* Cache NULL pointer and zero bounds. */
1692 /* Cache upper bounds of pointers. */
1693 else if (cst
== wi::max_value (prec
, sgn
))
1695 /* Cache 1 which is used for a non-zero range. */
1701 t
= cache_wide_int_in_type_cache (type
, cst
, ix
, 3);
1702 /* Make sure no one is clobbering the shared constant. */
1703 gcc_checking_assert (TREE_TYPE (t
) == type
1704 && cst
== wi::to_wide (t
));
1710 /* We just need to store a single HOST_WIDE_INT. */
1712 if (TYPE_UNSIGNED (type
))
1713 hwi
= cst
.to_uhwi ();
1715 hwi
= cst
.to_shwi ();
1720 gcc_assert (hwi
== 0);
1724 case REFERENCE_TYPE
:
1725 /* Ignore pointers, as they were already handled above. */
1729 /* Cache false or true. */
1731 if (IN_RANGE (hwi
, 0, 1))
1737 if (TYPE_SIGN (type
) == UNSIGNED
)
1740 limit
= param_integer_share_limit
;
1741 if (IN_RANGE (hwi
, 0, param_integer_share_limit
- 1))
1746 /* Cache [-1, N). */
1747 limit
= param_integer_share_limit
+ 1;
1748 if (IN_RANGE (hwi
, -1, param_integer_share_limit
- 1))
1762 t
= cache_wide_int_in_type_cache (type
, cst
, ix
, limit
);
1763 /* Make sure no one is clobbering the shared constant. */
1764 gcc_checking_assert (TREE_TYPE (t
) == type
1765 && TREE_INT_CST_NUNITS (t
) == 1
1766 && TREE_INT_CST_OFFSET_NUNITS (t
) == 1
1767 && TREE_INT_CST_EXT_NUNITS (t
) == 1
1768 && TREE_INT_CST_ELT (t
, 0) == hwi
);
1773 /* Use the cache of larger shared ints, using int_cst_node as
1776 TREE_INT_CST_ELT (int_cst_node
, 0) = hwi
;
1777 TREE_TYPE (int_cst_node
) = type
;
1779 tree
*slot
= int_cst_hash_table
->find_slot (int_cst_node
, INSERT
);
1783 /* Insert this one into the hash table. */
1786 /* Make a new node for next time round. */
1787 int_cst_node
= make_int_cst (1, 1);
1793 /* The value either hashes properly or we drop it on the floor
1794 for the gc to take care of. There will not be enough of them
1797 tree nt
= build_new_int_cst (type
, cst
);
1798 tree
*slot
= int_cst_hash_table
->find_slot (nt
, INSERT
);
1802 /* Insert this one into the hash table. */
1814 poly_int_cst_hasher::hash (tree t
)
1816 inchash::hash hstate
;
1818 hstate
.add_int (TYPE_UID (TREE_TYPE (t
)));
1819 for (unsigned int i
= 0; i
< NUM_POLY_INT_COEFFS
; ++i
)
1820 hstate
.add_wide_int (wi::to_wide (POLY_INT_CST_COEFF (t
, i
)));
1822 return hstate
.end ();
1826 poly_int_cst_hasher::equal (tree x
, const compare_type
&y
)
1828 if (TREE_TYPE (x
) != y
.first
)
1830 for (unsigned int i
= 0; i
< NUM_POLY_INT_COEFFS
; ++i
)
1831 if (wi::to_wide (POLY_INT_CST_COEFF (x
, i
)) != y
.second
->coeffs
[i
])
1836 /* Build a POLY_INT_CST node with type TYPE and with the elements in VALUES.
1837 The elements must also have type TYPE. */
1840 build_poly_int_cst (tree type
, const poly_wide_int_ref
&values
)
1842 unsigned int prec
= TYPE_PRECISION (type
);
1843 gcc_assert (prec
<= values
.coeffs
[0].get_precision ());
1844 poly_wide_int c
= poly_wide_int::from (values
, prec
, SIGNED
);
1847 h
.add_int (TYPE_UID (type
));
1848 for (unsigned int i
= 0; i
< NUM_POLY_INT_COEFFS
; ++i
)
1849 h
.add_wide_int (c
.coeffs
[i
]);
1850 poly_int_cst_hasher::compare_type
comp (type
, &c
);
1851 tree
*slot
= poly_int_cst_hash_table
->find_slot_with_hash (comp
, h
.end (),
1853 if (*slot
== NULL_TREE
)
1855 tree coeffs
[NUM_POLY_INT_COEFFS
];
1856 for (unsigned int i
= 0; i
< NUM_POLY_INT_COEFFS
; ++i
)
1857 coeffs
[i
] = wide_int_to_tree_1 (type
, c
.coeffs
[i
]);
1858 *slot
= build_new_poly_int_cst (type
, coeffs
);
1863 /* Create a constant tree with value VALUE in type TYPE. */
1866 wide_int_to_tree (tree type
, const poly_wide_int_ref
&value
)
1868 if (value
.is_constant ())
1869 return wide_int_to_tree_1 (type
, value
.coeffs
[0]);
1870 return build_poly_int_cst (type
, value
);
1873 /* Insert INTEGER_CST T into a cache of integer constants. And return
1874 the cached constant (which may or may not be T). If MIGHT_DUPLICATE
1875 is false, and T falls into the type's 'smaller values' range, there
1876 cannot be an existing entry. Otherwise, if MIGHT_DUPLICATE is true,
1877 or the value is large, should an existing entry exist, it is
1878 returned (rather than inserting T). */
1881 cache_integer_cst (tree t
, bool might_duplicate ATTRIBUTE_UNUSED
)
1883 tree type
= TREE_TYPE (t
);
1886 int prec
= TYPE_PRECISION (type
);
1888 gcc_assert (!TREE_OVERFLOW (t
));
1890 /* The caching indices here must match those in
1891 wide_int_to_type_1. */
1892 switch (TREE_CODE (type
))
1895 gcc_checking_assert (integer_zerop (t
));
1899 case REFERENCE_TYPE
:
1901 if (integer_zerop (t
))
1903 else if (integer_onep (t
))
1912 /* Cache false or true. */
1914 if (wi::ltu_p (wi::to_wide (t
), 2))
1915 ix
= TREE_INT_CST_ELT (t
, 0);
1920 if (TYPE_UNSIGNED (type
))
1923 limit
= param_integer_share_limit
;
1925 /* This is a little hokie, but if the prec is smaller than
1926 what is necessary to hold param_integer_share_limit, then the
1927 obvious test will not get the correct answer. */
1928 if (prec
< HOST_BITS_PER_WIDE_INT
)
1930 if (tree_to_uhwi (t
)
1931 < (unsigned HOST_WIDE_INT
) param_integer_share_limit
)
1932 ix
= tree_to_uhwi (t
);
1934 else if (wi::ltu_p (wi::to_wide (t
), param_integer_share_limit
))
1935 ix
= tree_to_uhwi (t
);
1940 limit
= param_integer_share_limit
+ 1;
1942 if (integer_minus_onep (t
))
1944 else if (!wi::neg_p (wi::to_wide (t
)))
1946 if (prec
< HOST_BITS_PER_WIDE_INT
)
1948 if (tree_to_shwi (t
) < param_integer_share_limit
)
1949 ix
= tree_to_shwi (t
) + 1;
1951 else if (wi::ltu_p (wi::to_wide (t
), param_integer_share_limit
))
1952 ix
= tree_to_shwi (t
) + 1;
1958 /* The slot used by TYPE_CACHED_VALUES is used for the enum
1968 /* Look for it in the type's vector of small shared ints. */
1969 if (!TYPE_CACHED_VALUES_P (type
))
1971 TYPE_CACHED_VALUES_P (type
) = 1;
1972 TYPE_CACHED_VALUES (type
) = make_tree_vec (limit
);
1975 if (tree r
= TREE_VEC_ELT (TYPE_CACHED_VALUES (type
), ix
))
1977 gcc_checking_assert (might_duplicate
);
1981 TREE_VEC_ELT (TYPE_CACHED_VALUES (type
), ix
) = t
;
1985 /* Use the cache of larger shared ints. */
1986 tree
*slot
= int_cst_hash_table
->find_slot (t
, INSERT
);
1989 /* If there is already an entry for the number verify it's the
1991 gcc_checking_assert (wi::to_wide (tree (r
)) == wi::to_wide (t
));
1992 /* And return the cached value. */
1996 /* Otherwise insert this one into the hash table. */
2004 /* Builds an integer constant in TYPE such that lowest BITS bits are ones
2005 and the rest are zeros. */
2008 build_low_bits_mask (tree type
, unsigned bits
)
2010 gcc_assert (bits
<= TYPE_PRECISION (type
));
2012 return wide_int_to_tree (type
, wi::mask (bits
, false,
2013 TYPE_PRECISION (type
)));
2016 /* Checks that X is integer constant that can be expressed in (unsigned)
2017 HOST_WIDE_INT without loss of precision. */
2020 cst_and_fits_in_hwi (const_tree x
)
2022 return (TREE_CODE (x
) == INTEGER_CST
2023 && (tree_fits_shwi_p (x
) || tree_fits_uhwi_p (x
)));
2026 /* Build a newly constructed VECTOR_CST with the given values of
2027 (VECTOR_CST_)LOG2_NPATTERNS and (VECTOR_CST_)NELTS_PER_PATTERN. */
2030 make_vector (unsigned log2_npatterns
,
2031 unsigned int nelts_per_pattern MEM_STAT_DECL
)
2033 gcc_assert (IN_RANGE (nelts_per_pattern
, 1, 3));
2035 unsigned npatterns
= 1 << log2_npatterns
;
2036 unsigned encoded_nelts
= npatterns
* nelts_per_pattern
;
2037 unsigned length
= (sizeof (struct tree_vector
)
2038 + (encoded_nelts
- 1) * sizeof (tree
));
2040 record_node_allocation_statistics (VECTOR_CST
, length
);
2042 t
= ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT
);
2044 TREE_SET_CODE (t
, VECTOR_CST
);
2045 TREE_CONSTANT (t
) = 1;
2046 VECTOR_CST_LOG2_NPATTERNS (t
) = log2_npatterns
;
2047 VECTOR_CST_NELTS_PER_PATTERN (t
) = nelts_per_pattern
;
2052 /* Return a new VECTOR_CST node whose type is TYPE and whose values
2053 are extracted from V, a vector of CONSTRUCTOR_ELT. */
2056 build_vector_from_ctor (tree type
, const vec
<constructor_elt
, va_gc
> *v
)
2058 if (vec_safe_length (v
) == 0)
2059 return build_zero_cst (type
);
2061 unsigned HOST_WIDE_INT idx
, nelts
;
2064 /* We can't construct a VECTOR_CST for a variable number of elements. */
2065 nelts
= TYPE_VECTOR_SUBPARTS (type
).to_constant ();
2066 tree_vector_builder
vec (type
, nelts
, 1);
2067 FOR_EACH_CONSTRUCTOR_VALUE (v
, idx
, value
)
2069 if (TREE_CODE (value
) == VECTOR_CST
)
2071 /* If NELTS is constant then this must be too. */
2072 unsigned int sub_nelts
= VECTOR_CST_NELTS (value
).to_constant ();
2073 for (unsigned i
= 0; i
< sub_nelts
; ++i
)
2074 vec
.quick_push (VECTOR_CST_ELT (value
, i
));
2077 vec
.quick_push (value
);
2079 while (vec
.length () < nelts
)
2080 vec
.quick_push (build_zero_cst (TREE_TYPE (type
)));
2082 return vec
.build ();
2085 /* Build a vector of type VECTYPE where all the elements are SCs. */
2087 build_vector_from_val (tree vectype
, tree sc
)
2089 unsigned HOST_WIDE_INT i
, nunits
;
2091 if (sc
== error_mark_node
)
2094 /* Verify that the vector type is suitable for SC. Note that there
2095 is some inconsistency in the type-system with respect to restrict
2096 qualifications of pointers. Vector types always have a main-variant
2097 element type and the qualification is applied to the vector-type.
2098 So TREE_TYPE (vector-type) does not return a properly qualified
2099 vector element-type. */
2100 gcc_checking_assert (types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (sc
)),
2101 TREE_TYPE (vectype
)));
2103 if (CONSTANT_CLASS_P (sc
))
2105 tree_vector_builder
v (vectype
, 1, 1);
2109 else if (!TYPE_VECTOR_SUBPARTS (vectype
).is_constant (&nunits
))
2110 return fold_build1 (VEC_DUPLICATE_EXPR
, vectype
, sc
);
2113 vec
<constructor_elt
, va_gc
> *v
;
2114 vec_alloc (v
, nunits
);
2115 for (i
= 0; i
< nunits
; ++i
)
2116 CONSTRUCTOR_APPEND_ELT (v
, NULL_TREE
, sc
);
2117 return build_constructor (vectype
, v
);
2121 /* If TYPE is not a vector type, just return SC, otherwise return
2122 build_vector_from_val (TYPE, SC). */
2125 build_uniform_cst (tree type
, tree sc
)
2127 if (!VECTOR_TYPE_P (type
))
2130 return build_vector_from_val (type
, sc
);
2133 /* Build a vector series of type TYPE in which element I has the value
2134 BASE + I * STEP. The result is a constant if BASE and STEP are constant
2135 and a VEC_SERIES_EXPR otherwise. */
2138 build_vec_series (tree type
, tree base
, tree step
)
2140 if (integer_zerop (step
))
2141 return build_vector_from_val (type
, base
);
2142 if (TREE_CODE (base
) == INTEGER_CST
&& TREE_CODE (step
) == INTEGER_CST
)
2144 tree_vector_builder
builder (type
, 1, 3);
2145 tree elt1
= wide_int_to_tree (TREE_TYPE (base
),
2146 wi::to_wide (base
) + wi::to_wide (step
));
2147 tree elt2
= wide_int_to_tree (TREE_TYPE (base
),
2148 wi::to_wide (elt1
) + wi::to_wide (step
));
2149 builder
.quick_push (base
);
2150 builder
.quick_push (elt1
);
2151 builder
.quick_push (elt2
);
2152 return builder
.build ();
2154 return build2 (VEC_SERIES_EXPR
, type
, base
, step
);
2157 /* Return a vector with the same number of units and number of bits
2158 as VEC_TYPE, but in which the elements are a linear series of unsigned
2159 integers { BASE, BASE + STEP, BASE + STEP * 2, ... }. */
2162 build_index_vector (tree vec_type
, poly_uint64 base
, poly_uint64 step
)
2164 tree index_vec_type
= vec_type
;
2165 tree index_elt_type
= TREE_TYPE (vec_type
);
2166 poly_uint64 nunits
= TYPE_VECTOR_SUBPARTS (vec_type
);
2167 if (!INTEGRAL_TYPE_P (index_elt_type
) || !TYPE_UNSIGNED (index_elt_type
))
2169 index_elt_type
= build_nonstandard_integer_type
2170 (GET_MODE_BITSIZE (SCALAR_TYPE_MODE (index_elt_type
)), true);
2171 index_vec_type
= build_vector_type (index_elt_type
, nunits
);
2174 tree_vector_builder
v (index_vec_type
, 1, 3);
2175 for (unsigned int i
= 0; i
< 3; ++i
)
2176 v
.quick_push (build_int_cstu (index_elt_type
, base
+ i
* step
));
2180 /* Return a VECTOR_CST of type VEC_TYPE in which the first NUM_A
2181 elements are A and the rest are B. */
2184 build_vector_a_then_b (tree vec_type
, unsigned int num_a
, tree a
, tree b
)
2186 gcc_assert (known_le (num_a
, TYPE_VECTOR_SUBPARTS (vec_type
)));
2187 unsigned int count
= constant_lower_bound (TYPE_VECTOR_SUBPARTS (vec_type
));
2188 /* Optimize the constant case. */
2189 if ((count
& 1) == 0 && TYPE_VECTOR_SUBPARTS (vec_type
).is_constant ())
2191 tree_vector_builder
builder (vec_type
, count
, 2);
2192 for (unsigned int i
= 0; i
< count
* 2; ++i
)
2193 builder
.quick_push (i
< num_a
? a
: b
);
2194 return builder
.build ();
2197 /* Something has messed with the elements of CONSTRUCTOR C after it was built;
2198 calculate TREE_CONSTANT and TREE_SIDE_EFFECTS. */
2201 recompute_constructor_flags (tree c
)
2205 bool constant_p
= true;
2206 bool side_effects_p
= false;
2207 vec
<constructor_elt
, va_gc
> *vals
= CONSTRUCTOR_ELTS (c
);
2209 FOR_EACH_CONSTRUCTOR_VALUE (vals
, i
, val
)
2211 /* Mostly ctors will have elts that don't have side-effects, so
2212 the usual case is to scan all the elements. Hence a single
2213 loop for both const and side effects, rather than one loop
2214 each (with early outs). */
2215 if (!TREE_CONSTANT (val
))
2217 if (TREE_SIDE_EFFECTS (val
))
2218 side_effects_p
= true;
2221 TREE_SIDE_EFFECTS (c
) = side_effects_p
;
2222 TREE_CONSTANT (c
) = constant_p
;
2225 /* Make sure that TREE_CONSTANT and TREE_SIDE_EFFECTS are correct for
2229 verify_constructor_flags (tree c
)
2233 bool constant_p
= TREE_CONSTANT (c
);
2234 bool side_effects_p
= TREE_SIDE_EFFECTS (c
);
2235 vec
<constructor_elt
, va_gc
> *vals
= CONSTRUCTOR_ELTS (c
);
2237 FOR_EACH_CONSTRUCTOR_VALUE (vals
, i
, val
)
2239 if (constant_p
&& !TREE_CONSTANT (val
))
2240 internal_error ("non-constant element in constant CONSTRUCTOR");
2241 if (!side_effects_p
&& TREE_SIDE_EFFECTS (val
))
2242 internal_error ("side-effects element in no-side-effects CONSTRUCTOR");
2246 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
2247 are in the vec pointed to by VALS. */
2249 build_constructor (tree type
, vec
<constructor_elt
, va_gc
> *vals MEM_STAT_DECL
)
2251 tree c
= make_node (CONSTRUCTOR PASS_MEM_STAT
);
2253 TREE_TYPE (c
) = type
;
2254 CONSTRUCTOR_ELTS (c
) = vals
;
2256 recompute_constructor_flags (c
);
2261 /* Build a CONSTRUCTOR node made of a single initializer, with the specified
2264 build_constructor_single (tree type
, tree index
, tree value
)
2266 vec
<constructor_elt
, va_gc
> *v
;
2267 constructor_elt elt
= {index
, value
};
2270 v
->quick_push (elt
);
2272 return build_constructor (type
, v
);
2276 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
2277 are in a list pointed to by VALS. */
2279 build_constructor_from_list (tree type
, tree vals
)
2282 vec
<constructor_elt
, va_gc
> *v
= NULL
;
2286 vec_alloc (v
, list_length (vals
));
2287 for (t
= vals
; t
; t
= TREE_CHAIN (t
))
2288 CONSTRUCTOR_APPEND_ELT (v
, TREE_PURPOSE (t
), TREE_VALUE (t
));
2291 return build_constructor (type
, v
);
2294 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
2295 are in a vector pointed to by VALS. Note that the TREE_PURPOSE
2296 fields in the constructor remain null. */
2299 build_constructor_from_vec (tree type
, const vec
<tree
, va_gc
> *vals
)
2301 vec
<constructor_elt
, va_gc
> *v
= NULL
;
2304 CONSTRUCTOR_APPEND_ELT (v
, NULL_TREE
, t
);
2306 return build_constructor (type
, v
);
2309 /* Return a new CONSTRUCTOR node whose type is TYPE. NELTS is the number
2310 of elements, provided as index/value pairs. */
2313 build_constructor_va (tree type
, int nelts
, ...)
2315 vec
<constructor_elt
, va_gc
> *v
= NULL
;
2318 va_start (p
, nelts
);
2319 vec_alloc (v
, nelts
);
2322 tree index
= va_arg (p
, tree
);
2323 tree value
= va_arg (p
, tree
);
2324 CONSTRUCTOR_APPEND_ELT (v
, index
, value
);
2327 return build_constructor (type
, v
);
2330 /* Return a node of type TYPE for which TREE_CLOBBER_P is true. */
2333 build_clobber (tree type
, enum clobber_kind kind
)
2335 tree clobber
= build_constructor (type
, NULL
);
2336 TREE_THIS_VOLATILE (clobber
) = true;
2337 CLOBBER_KIND (clobber
) = kind
;
2341 /* Return a new FIXED_CST node whose type is TYPE and value is F. */
2344 build_fixed (tree type
, FIXED_VALUE_TYPE f
)
2347 FIXED_VALUE_TYPE
*fp
;
2349 v
= make_node (FIXED_CST
);
2350 fp
= ggc_alloc
<fixed_value
> ();
2351 memcpy (fp
, &f
, sizeof (FIXED_VALUE_TYPE
));
2353 TREE_TYPE (v
) = type
;
2354 TREE_FIXED_CST_PTR (v
) = fp
;
2358 /* Return a new REAL_CST node whose type is TYPE and value is D. */
2361 build_real (tree type
, REAL_VALUE_TYPE d
)
2366 /* dconst{0,1,2,m1,half} are used in various places in
2367 the middle-end and optimizers, allow them here
2368 even for decimal floating point types as an exception
2369 by converting them to decimal. */
2370 if (DECIMAL_FLOAT_MODE_P (TYPE_MODE (type
))
2371 && (d
.cl
== rvc_normal
|| d
.cl
== rvc_zero
)
2374 if (memcmp (&d
, &dconst1
, sizeof (d
)) == 0)
2375 decimal_real_from_string (&d
, "1");
2376 else if (memcmp (&d
, &dconst2
, sizeof (d
)) == 0)
2377 decimal_real_from_string (&d
, "2");
2378 else if (memcmp (&d
, &dconstm1
, sizeof (d
)) == 0)
2379 decimal_real_from_string (&d
, "-1");
2380 else if (memcmp (&d
, &dconsthalf
, sizeof (d
)) == 0)
2381 decimal_real_from_string (&d
, "0.5");
2382 else if (memcmp (&d
, &dconst0
, sizeof (d
)) == 0)
2384 /* Make sure to give zero the minimum quantum exponent for
2385 the type (which corresponds to all bits zero). */
2386 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
2388 sprintf (buf
, "0e%d", fmt
->emin
- fmt
->p
);
2389 decimal_real_from_string (&d
, buf
);
2395 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
2396 Consider doing it via real_convert now. */
2398 v
= make_node (REAL_CST
);
2399 TREE_TYPE (v
) = type
;
2400 memcpy (TREE_REAL_CST_PTR (v
), &d
, sizeof (REAL_VALUE_TYPE
));
2401 TREE_OVERFLOW (v
) = overflow
;
2405 /* Like build_real, but first truncate D to the type. */
2408 build_real_truncate (tree type
, REAL_VALUE_TYPE d
)
2410 return build_real (type
, real_value_truncate (TYPE_MODE (type
), d
));
2413 /* Return a new REAL_CST node whose type is TYPE
2414 and whose value is the integer value of the INTEGER_CST node I. */
2417 real_value_from_int_cst (const_tree type
, const_tree i
)
2421 /* Clear all bits of the real value type so that we can later do
2422 bitwise comparisons to see if two values are the same. */
2423 memset (&d
, 0, sizeof d
);
2425 real_from_integer (&d
, type
? TYPE_MODE (type
) : VOIDmode
, wi::to_wide (i
),
2426 TYPE_SIGN (TREE_TYPE (i
)));
2430 /* Given a tree representing an integer constant I, return a tree
2431 representing the same value as a floating-point constant of type TYPE. */
2434 build_real_from_int_cst (tree type
, const_tree i
)
2437 int overflow
= TREE_OVERFLOW (i
);
2439 v
= build_real (type
, real_value_from_int_cst (type
, i
));
2441 TREE_OVERFLOW (v
) |= overflow
;
2445 /* Return a new REAL_CST node whose type is TYPE
2446 and whose value is the integer value I which has sign SGN. */
2449 build_real_from_wide (tree type
, const wide_int_ref
&i
, signop sgn
)
2453 /* Clear all bits of the real value type so that we can later do
2454 bitwise comparisons to see if two values are the same. */
2455 memset (&d
, 0, sizeof d
);
2457 real_from_integer (&d
, TYPE_MODE (type
), i
, sgn
);
2458 return build_real (type
, d
);
2461 /* Return a newly constructed STRING_CST node whose value is the LEN
2462 characters at STR when STR is nonnull, or all zeros otherwise.
2463 Note that for a C string literal, LEN should include the trailing NUL.
2464 The TREE_TYPE is not initialized. */
2467 build_string (unsigned len
, const char *str
/*= NULL */)
2469 /* Do not waste bytes provided by padding of struct tree_string. */
2470 unsigned size
= len
+ offsetof (struct tree_string
, str
) + 1;
2472 record_node_allocation_statistics (STRING_CST
, size
);
2474 tree s
= (tree
) ggc_internal_alloc (size
);
2476 memset (s
, 0, sizeof (struct tree_typed
));
2477 TREE_SET_CODE (s
, STRING_CST
);
2478 TREE_CONSTANT (s
) = 1;
2479 TREE_STRING_LENGTH (s
) = len
;
2481 memcpy (s
->string
.str
, str
, len
);
2483 memset (s
->string
.str
, 0, len
);
2484 s
->string
.str
[len
] = '\0';
2489 /* Return a newly constructed COMPLEX_CST node whose value is
2490 specified by the real and imaginary parts REAL and IMAG.
2491 Both REAL and IMAG should be constant nodes. TYPE, if specified,
2492 will be the type of the COMPLEX_CST; otherwise a new type will be made. */
2495 build_complex (tree type
, tree real
, tree imag
)
2497 gcc_assert (CONSTANT_CLASS_P (real
));
2498 gcc_assert (CONSTANT_CLASS_P (imag
));
2500 tree t
= make_node (COMPLEX_CST
);
2502 TREE_REALPART (t
) = real
;
2503 TREE_IMAGPART (t
) = imag
;
2504 TREE_TYPE (t
) = type
? type
: build_complex_type (TREE_TYPE (real
));
2505 TREE_OVERFLOW (t
) = TREE_OVERFLOW (real
) | TREE_OVERFLOW (imag
);
2509 /* Build a complex (inf +- 0i), such as for the result of cproj.
2510 TYPE is the complex tree type of the result. If NEG is true, the
2511 imaginary zero is negative. */
2514 build_complex_inf (tree type
, bool neg
)
2516 REAL_VALUE_TYPE rzero
= dconst0
;
2519 return build_complex (type
, build_real (TREE_TYPE (type
), dconstinf
),
2520 build_real (TREE_TYPE (type
), rzero
));
2523 /* Return the constant 1 in type TYPE. If TYPE has several elements, each
2524 element is set to 1. In particular, this is 1 + i for complex types. */
2527 build_each_one_cst (tree type
)
2529 if (TREE_CODE (type
) == COMPLEX_TYPE
)
2531 tree scalar
= build_one_cst (TREE_TYPE (type
));
2532 return build_complex (type
, scalar
, scalar
);
2535 return build_one_cst (type
);
2538 /* Return a constant of arithmetic type TYPE which is the
2539 multiplicative identity of the set TYPE. */
2542 build_one_cst (tree type
)
2544 switch (TREE_CODE (type
))
2546 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
2547 case POINTER_TYPE
: case REFERENCE_TYPE
:
2549 return build_int_cst (type
, 1);
2552 return build_real (type
, dconst1
);
2554 case FIXED_POINT_TYPE
:
2555 /* We can only generate 1 for accum types. */
2556 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type
)));
2557 return build_fixed (type
, FCONST1 (TYPE_MODE (type
)));
2561 tree scalar
= build_one_cst (TREE_TYPE (type
));
2563 return build_vector_from_val (type
, scalar
);
2567 return build_complex (type
,
2568 build_one_cst (TREE_TYPE (type
)),
2569 build_zero_cst (TREE_TYPE (type
)));
2576 /* Return an integer of type TYPE containing all 1's in as much precision as
2577 it contains, or a complex or vector whose subparts are such integers. */
2580 build_all_ones_cst (tree type
)
2582 if (TREE_CODE (type
) == COMPLEX_TYPE
)
2584 tree scalar
= build_all_ones_cst (TREE_TYPE (type
));
2585 return build_complex (type
, scalar
, scalar
);
2588 return build_minus_one_cst (type
);
2591 /* Return a constant of arithmetic type TYPE which is the
2592 opposite of the multiplicative identity of the set TYPE. */
2595 build_minus_one_cst (tree type
)
2597 switch (TREE_CODE (type
))
2599 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
2600 case POINTER_TYPE
: case REFERENCE_TYPE
:
2602 return build_int_cst (type
, -1);
2605 return build_real (type
, dconstm1
);
2607 case FIXED_POINT_TYPE
:
2608 /* We can only generate 1 for accum types. */
2609 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type
)));
2610 return build_fixed (type
,
2611 fixed_from_double_int (double_int_minus_one
,
2612 SCALAR_TYPE_MODE (type
)));
2616 tree scalar
= build_minus_one_cst (TREE_TYPE (type
));
2618 return build_vector_from_val (type
, scalar
);
2622 return build_complex (type
,
2623 build_minus_one_cst (TREE_TYPE (type
)),
2624 build_zero_cst (TREE_TYPE (type
)));
2631 /* Build 0 constant of type TYPE. This is used by constructor folding
2632 and thus the constant should be represented in memory by
2636 build_zero_cst (tree type
)
2638 switch (TREE_CODE (type
))
2640 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
2641 case POINTER_TYPE
: case REFERENCE_TYPE
:
2642 case OFFSET_TYPE
: case NULLPTR_TYPE
:
2643 return build_int_cst (type
, 0);
2646 return build_real (type
, dconst0
);
2648 case FIXED_POINT_TYPE
:
2649 return build_fixed (type
, FCONST0 (TYPE_MODE (type
)));
2653 tree scalar
= build_zero_cst (TREE_TYPE (type
));
2655 return build_vector_from_val (type
, scalar
);
2660 tree zero
= build_zero_cst (TREE_TYPE (type
));
2662 return build_complex (type
, zero
, zero
);
2666 if (!AGGREGATE_TYPE_P (type
))
2667 return fold_convert (type
, integer_zero_node
);
2668 return build_constructor (type
, NULL
);
2673 /* Build a BINFO with LEN language slots. */
2676 make_tree_binfo (unsigned base_binfos MEM_STAT_DECL
)
2679 size_t length
= (offsetof (struct tree_binfo
, base_binfos
)
2680 + vec
<tree
, va_gc
>::embedded_size (base_binfos
));
2682 record_node_allocation_statistics (TREE_BINFO
, length
);
2684 t
= ggc_alloc_tree_node_stat (length PASS_MEM_STAT
);
2686 memset (t
, 0, offsetof (struct tree_binfo
, base_binfos
));
2688 TREE_SET_CODE (t
, TREE_BINFO
);
2690 BINFO_BASE_BINFOS (t
)->embedded_init (base_binfos
);
2695 /* Create a CASE_LABEL_EXPR tree node and return it. */
2698 build_case_label (tree low_value
, tree high_value
, tree label_decl
)
2700 tree t
= make_node (CASE_LABEL_EXPR
);
2702 TREE_TYPE (t
) = void_type_node
;
2703 SET_EXPR_LOCATION (t
, DECL_SOURCE_LOCATION (label_decl
));
2705 CASE_LOW (t
) = low_value
;
2706 CASE_HIGH (t
) = high_value
;
2707 CASE_LABEL (t
) = label_decl
;
2708 CASE_CHAIN (t
) = NULL_TREE
;
2713 /* Build a newly constructed INTEGER_CST node. LEN and EXT_LEN are the
2714 values of TREE_INT_CST_NUNITS and TREE_INT_CST_EXT_NUNITS respectively.
2715 The latter determines the length of the HOST_WIDE_INT vector. */
2718 make_int_cst (int len
, int ext_len MEM_STAT_DECL
)
2721 int length
= ((ext_len
- 1) * sizeof (HOST_WIDE_INT
)
2722 + sizeof (struct tree_int_cst
));
2725 record_node_allocation_statistics (INTEGER_CST
, length
);
2727 t
= ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT
);
2729 TREE_SET_CODE (t
, INTEGER_CST
);
2730 TREE_INT_CST_NUNITS (t
) = len
;
2731 TREE_INT_CST_EXT_NUNITS (t
) = ext_len
;
2732 /* to_offset can only be applied to trees that are offset_int-sized
2733 or smaller. EXT_LEN is correct if it fits, otherwise the constant
2734 must be exactly the precision of offset_int and so LEN is correct. */
2735 if (ext_len
<= OFFSET_INT_ELTS
)
2736 TREE_INT_CST_OFFSET_NUNITS (t
) = ext_len
;
2738 TREE_INT_CST_OFFSET_NUNITS (t
) = len
;
2740 TREE_CONSTANT (t
) = 1;
2745 /* Build a newly constructed TREE_VEC node of length LEN. */
2748 make_tree_vec (int len MEM_STAT_DECL
)
2751 size_t length
= (len
- 1) * sizeof (tree
) + sizeof (struct tree_vec
);
2753 record_node_allocation_statistics (TREE_VEC
, length
);
2755 t
= ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT
);
2757 TREE_SET_CODE (t
, TREE_VEC
);
2758 TREE_VEC_LENGTH (t
) = len
;
2763 /* Grow a TREE_VEC node to new length LEN. */
2766 grow_tree_vec (tree v
, int len MEM_STAT_DECL
)
2768 gcc_assert (TREE_CODE (v
) == TREE_VEC
);
2770 int oldlen
= TREE_VEC_LENGTH (v
);
2771 gcc_assert (len
> oldlen
);
2773 size_t oldlength
= (oldlen
- 1) * sizeof (tree
) + sizeof (struct tree_vec
);
2774 size_t length
= (len
- 1) * sizeof (tree
) + sizeof (struct tree_vec
);
2776 record_node_allocation_statistics (TREE_VEC
, length
- oldlength
);
2778 v
= (tree
) ggc_realloc (v
, length PASS_MEM_STAT
);
2780 TREE_VEC_LENGTH (v
) = len
;
2785 /* Return 1 if EXPR is the constant zero, whether it is integral, float or
2786 fixed, and scalar, complex or vector. */
2789 zerop (const_tree expr
)
2791 return (integer_zerop (expr
)
2792 || real_zerop (expr
)
2793 || fixed_zerop (expr
));
2796 /* Return 1 if EXPR is the integer constant zero or a complex constant
2797 of zero, or a location wrapper for such a constant. */
2800 integer_zerop (const_tree expr
)
2802 STRIP_ANY_LOCATION_WRAPPER (expr
);
2804 switch (TREE_CODE (expr
))
2807 return wi::to_wide (expr
) == 0;
2809 return (integer_zerop (TREE_REALPART (expr
))
2810 && integer_zerop (TREE_IMAGPART (expr
)));
2812 return (VECTOR_CST_NPATTERNS (expr
) == 1
2813 && VECTOR_CST_DUPLICATE_P (expr
)
2814 && integer_zerop (VECTOR_CST_ENCODED_ELT (expr
, 0)));
2820 /* Return 1 if EXPR is the integer constant one or the corresponding
2821 complex constant, or a location wrapper for such a constant. */
2824 integer_onep (const_tree expr
)
2826 STRIP_ANY_LOCATION_WRAPPER (expr
);
2828 switch (TREE_CODE (expr
))
2831 return wi::eq_p (wi::to_widest (expr
), 1);
2833 return (integer_onep (TREE_REALPART (expr
))
2834 && integer_zerop (TREE_IMAGPART (expr
)));
2836 return (VECTOR_CST_NPATTERNS (expr
) == 1
2837 && VECTOR_CST_DUPLICATE_P (expr
)
2838 && integer_onep (VECTOR_CST_ENCODED_ELT (expr
, 0)));
2844 /* Return 1 if EXPR is the integer constant one. For complex and vector,
2845 return 1 if every piece is the integer constant one.
2846 Also return 1 for location wrappers for such a constant. */
2849 integer_each_onep (const_tree expr
)
2851 STRIP_ANY_LOCATION_WRAPPER (expr
);
2853 if (TREE_CODE (expr
) == COMPLEX_CST
)
2854 return (integer_onep (TREE_REALPART (expr
))
2855 && integer_onep (TREE_IMAGPART (expr
)));
2857 return integer_onep (expr
);
2860 /* Return 1 if EXPR is an integer containing all 1's in as much precision as
2861 it contains, or a complex or vector whose subparts are such integers,
2862 or a location wrapper for such a constant. */
2865 integer_all_onesp (const_tree expr
)
2867 STRIP_ANY_LOCATION_WRAPPER (expr
);
2869 if (TREE_CODE (expr
) == COMPLEX_CST
2870 && integer_all_onesp (TREE_REALPART (expr
))
2871 && integer_all_onesp (TREE_IMAGPART (expr
)))
2874 else if (TREE_CODE (expr
) == VECTOR_CST
)
2875 return (VECTOR_CST_NPATTERNS (expr
) == 1
2876 && VECTOR_CST_DUPLICATE_P (expr
)
2877 && integer_all_onesp (VECTOR_CST_ENCODED_ELT (expr
, 0)));
2879 else if (TREE_CODE (expr
) != INTEGER_CST
)
2882 return (wi::max_value (TYPE_PRECISION (TREE_TYPE (expr
)), UNSIGNED
)
2883 == wi::to_wide (expr
));
2886 /* Return 1 if EXPR is the integer constant minus one, or a location wrapper
2887 for such a constant. */
2890 integer_minus_onep (const_tree expr
)
2892 STRIP_ANY_LOCATION_WRAPPER (expr
);
2894 if (TREE_CODE (expr
) == COMPLEX_CST
)
2895 return (integer_all_onesp (TREE_REALPART (expr
))
2896 && integer_zerop (TREE_IMAGPART (expr
)));
2898 return integer_all_onesp (expr
);
2901 /* Return 1 if EXPR is an integer constant that is a power of 2 (i.e., has only
2902 one bit on), or a location wrapper for such a constant. */
2905 integer_pow2p (const_tree expr
)
2907 STRIP_ANY_LOCATION_WRAPPER (expr
);
2909 if (TREE_CODE (expr
) == COMPLEX_CST
2910 && integer_pow2p (TREE_REALPART (expr
))
2911 && integer_zerop (TREE_IMAGPART (expr
)))
2914 if (TREE_CODE (expr
) != INTEGER_CST
)
2917 return wi::popcount (wi::to_wide (expr
)) == 1;
2920 /* Return 1 if EXPR is an integer constant other than zero or a
2921 complex constant other than zero, or a location wrapper for such a
2925 integer_nonzerop (const_tree expr
)
2927 STRIP_ANY_LOCATION_WRAPPER (expr
);
2929 return ((TREE_CODE (expr
) == INTEGER_CST
2930 && wi::to_wide (expr
) != 0)
2931 || (TREE_CODE (expr
) == COMPLEX_CST
2932 && (integer_nonzerop (TREE_REALPART (expr
))
2933 || integer_nonzerop (TREE_IMAGPART (expr
)))));
2936 /* Return 1 if EXPR is the integer constant one. For vector,
2937 return 1 if every piece is the integer constant minus one
2938 (representing the value TRUE).
2939 Also return 1 for location wrappers for such a constant. */
2942 integer_truep (const_tree expr
)
2944 STRIP_ANY_LOCATION_WRAPPER (expr
);
2946 if (TREE_CODE (expr
) == VECTOR_CST
)
2947 return integer_all_onesp (expr
);
2948 return integer_onep (expr
);
2951 /* Return 1 if EXPR is the fixed-point constant zero, or a location wrapper
2952 for such a constant. */
2955 fixed_zerop (const_tree expr
)
2957 STRIP_ANY_LOCATION_WRAPPER (expr
);
2959 return (TREE_CODE (expr
) == FIXED_CST
2960 && TREE_FIXED_CST (expr
).data
.is_zero ());
2963 /* Return the power of two represented by a tree node known to be a
2967 tree_log2 (const_tree expr
)
2969 if (TREE_CODE (expr
) == COMPLEX_CST
)
2970 return tree_log2 (TREE_REALPART (expr
));
2972 return wi::exact_log2 (wi::to_wide (expr
));
2975 /* Similar, but return the largest integer Y such that 2 ** Y is less
2976 than or equal to EXPR. */
2979 tree_floor_log2 (const_tree expr
)
2981 if (TREE_CODE (expr
) == COMPLEX_CST
)
2982 return tree_log2 (TREE_REALPART (expr
));
2984 return wi::floor_log2 (wi::to_wide (expr
));
2987 /* Return number of known trailing zero bits in EXPR, or, if the value of
2988 EXPR is known to be zero, the precision of it's type. */
2991 tree_ctz (const_tree expr
)
2993 if (!INTEGRAL_TYPE_P (TREE_TYPE (expr
))
2994 && !POINTER_TYPE_P (TREE_TYPE (expr
)))
2997 unsigned int ret1
, ret2
, prec
= TYPE_PRECISION (TREE_TYPE (expr
));
2998 switch (TREE_CODE (expr
))
3001 ret1
= wi::ctz (wi::to_wide (expr
));
3002 return MIN (ret1
, prec
);
3004 ret1
= wi::ctz (get_nonzero_bits (expr
));
3005 return MIN (ret1
, prec
);
3012 ret1
= tree_ctz (TREE_OPERAND (expr
, 0));
3015 ret2
= tree_ctz (TREE_OPERAND (expr
, 1));
3016 return MIN (ret1
, ret2
);
3017 case POINTER_PLUS_EXPR
:
3018 ret1
= tree_ctz (TREE_OPERAND (expr
, 0));
3019 ret2
= tree_ctz (TREE_OPERAND (expr
, 1));
3020 /* Second operand is sizetype, which could be in theory
3021 wider than pointer's precision. Make sure we never
3022 return more than prec. */
3023 ret2
= MIN (ret2
, prec
);
3024 return MIN (ret1
, ret2
);
3026 ret1
= tree_ctz (TREE_OPERAND (expr
, 0));
3027 ret2
= tree_ctz (TREE_OPERAND (expr
, 1));
3028 return MAX (ret1
, ret2
);
3030 ret1
= tree_ctz (TREE_OPERAND (expr
, 0));
3031 ret2
= tree_ctz (TREE_OPERAND (expr
, 1));
3032 return MIN (ret1
+ ret2
, prec
);
3034 ret1
= tree_ctz (TREE_OPERAND (expr
, 0));
3035 if (tree_fits_uhwi_p (TREE_OPERAND (expr
, 1))
3036 && (tree_to_uhwi (TREE_OPERAND (expr
, 1)) < prec
))
3038 ret2
= tree_to_uhwi (TREE_OPERAND (expr
, 1));
3039 return MIN (ret1
+ ret2
, prec
);
3043 if (tree_fits_uhwi_p (TREE_OPERAND (expr
, 1))
3044 && (tree_to_uhwi (TREE_OPERAND (expr
, 1)) < prec
))
3046 ret1
= tree_ctz (TREE_OPERAND (expr
, 0));
3047 ret2
= tree_to_uhwi (TREE_OPERAND (expr
, 1));
3052 case TRUNC_DIV_EXPR
:
3054 case FLOOR_DIV_EXPR
:
3055 case ROUND_DIV_EXPR
:
3056 case EXACT_DIV_EXPR
:
3057 if (TREE_CODE (TREE_OPERAND (expr
, 1)) == INTEGER_CST
3058 && tree_int_cst_sgn (TREE_OPERAND (expr
, 1)) == 1)
3060 int l
= tree_log2 (TREE_OPERAND (expr
, 1));
3063 ret1
= tree_ctz (TREE_OPERAND (expr
, 0));
3071 ret1
= tree_ctz (TREE_OPERAND (expr
, 0));
3072 if (ret1
&& ret1
== TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr
, 0))))
3074 return MIN (ret1
, prec
);
3076 return tree_ctz (TREE_OPERAND (expr
, 0));
3078 ret1
= tree_ctz (TREE_OPERAND (expr
, 1));
3081 ret2
= tree_ctz (TREE_OPERAND (expr
, 2));
3082 return MIN (ret1
, ret2
);
3084 return tree_ctz (TREE_OPERAND (expr
, 1));
3086 ret1
= get_pointer_alignment (CONST_CAST_TREE (expr
));
3087 if (ret1
> BITS_PER_UNIT
)
3089 ret1
= ctz_hwi (ret1
/ BITS_PER_UNIT
);
3090 return MIN (ret1
, prec
);
3098 /* Return 1 if EXPR is the real constant zero. Trailing zeroes matter for
3099 decimal float constants, so don't return 1 for them.
3100 Also return 1 for location wrappers around such a constant. */
3103 real_zerop (const_tree expr
)
3105 STRIP_ANY_LOCATION_WRAPPER (expr
);
3107 switch (TREE_CODE (expr
))
3110 return real_equal (&TREE_REAL_CST (expr
), &dconst0
)
3111 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr
))));
3113 return real_zerop (TREE_REALPART (expr
))
3114 && real_zerop (TREE_IMAGPART (expr
));
3117 /* Don't simply check for a duplicate because the predicate
3118 accepts both +0.0 and -0.0. */
3119 unsigned count
= vector_cst_encoded_nelts (expr
);
3120 for (unsigned int i
= 0; i
< count
; ++i
)
3121 if (!real_zerop (VECTOR_CST_ENCODED_ELT (expr
, i
)))
3130 /* Return 1 if EXPR is the real constant one in real or complex form.
3131 Trailing zeroes matter for decimal float constants, so don't return
3133 Also return 1 for location wrappers around such a constant. */
3136 real_onep (const_tree expr
)
3138 STRIP_ANY_LOCATION_WRAPPER (expr
);
3140 switch (TREE_CODE (expr
))
3143 return real_equal (&TREE_REAL_CST (expr
), &dconst1
)
3144 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr
))));
3146 return real_onep (TREE_REALPART (expr
))
3147 && real_zerop (TREE_IMAGPART (expr
));
3149 return (VECTOR_CST_NPATTERNS (expr
) == 1
3150 && VECTOR_CST_DUPLICATE_P (expr
)
3151 && real_onep (VECTOR_CST_ENCODED_ELT (expr
, 0)));
3157 /* Return 1 if EXPR is the real constant minus one. Trailing zeroes
3158 matter for decimal float constants, so don't return 1 for them.
3159 Also return 1 for location wrappers around such a constant. */
3162 real_minus_onep (const_tree expr
)
3164 STRIP_ANY_LOCATION_WRAPPER (expr
);
3166 switch (TREE_CODE (expr
))
3169 return real_equal (&TREE_REAL_CST (expr
), &dconstm1
)
3170 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr
))));
3172 return real_minus_onep (TREE_REALPART (expr
))
3173 && real_zerop (TREE_IMAGPART (expr
));
3175 return (VECTOR_CST_NPATTERNS (expr
) == 1
3176 && VECTOR_CST_DUPLICATE_P (expr
)
3177 && real_minus_onep (VECTOR_CST_ENCODED_ELT (expr
, 0)));
3183 /* Return true if T could be a floating point zero. */
3186 real_maybe_zerop (const_tree expr
)
3188 switch (TREE_CODE (expr
))
3191 /* Can't use real_zerop here, as it always returns false for decimal
3192 floats. And can't use TREE_REAL_CST (expr).cl == rvc_zero
3193 either, as decimal zeros are rvc_normal. */
3194 return real_equal (&TREE_REAL_CST (expr
), &dconst0
);
3196 return (real_maybe_zerop (TREE_REALPART (expr
))
3197 || real_maybe_zerop (TREE_IMAGPART (expr
)));
3200 unsigned count
= vector_cst_encoded_nelts (expr
);
3201 for (unsigned int i
= 0; i
< count
; ++i
)
3202 if (real_maybe_zerop (VECTOR_CST_ENCODED_ELT (expr
, i
)))
3207 /* Perhaps for SSA_NAMEs we could query frange. */
3212 /* Nonzero if EXP is a constant or a cast of a constant. */
3215 really_constant_p (const_tree exp
)
3217 /* This is not quite the same as STRIP_NOPS. It does more. */
3218 while (CONVERT_EXPR_P (exp
)
3219 || TREE_CODE (exp
) == NON_LVALUE_EXPR
)
3220 exp
= TREE_OPERAND (exp
, 0);
3221 return TREE_CONSTANT (exp
);
3224 /* Return true if T holds a polynomial pointer difference, storing it in
3225 *VALUE if so. A true return means that T's precision is no greater
3226 than 64 bits, which is the largest address space we support, so *VALUE
3227 never loses precision. However, the signedness of the result does
3228 not necessarily match the signedness of T: sometimes an unsigned type
3229 like sizetype is used to encode a value that is actually negative. */
3232 ptrdiff_tree_p (const_tree t
, poly_int64_pod
*value
)
3236 if (TREE_CODE (t
) == INTEGER_CST
)
3238 if (!cst_and_fits_in_hwi (t
))
3240 *value
= int_cst_value (t
);
3243 if (POLY_INT_CST_P (t
))
3245 for (unsigned int i
= 0; i
< NUM_POLY_INT_COEFFS
; ++i
)
3246 if (!cst_and_fits_in_hwi (POLY_INT_CST_COEFF (t
, i
)))
3248 for (unsigned int i
= 0; i
< NUM_POLY_INT_COEFFS
; ++i
)
3249 value
->coeffs
[i
] = int_cst_value (POLY_INT_CST_COEFF (t
, i
));
3256 tree_to_poly_int64 (const_tree t
)
3258 gcc_assert (tree_fits_poly_int64_p (t
));
3259 if (POLY_INT_CST_P (t
))
3260 return poly_int_cst_value (t
).force_shwi ();
3261 return TREE_INT_CST_LOW (t
);
3265 tree_to_poly_uint64 (const_tree t
)
3267 gcc_assert (tree_fits_poly_uint64_p (t
));
3268 if (POLY_INT_CST_P (t
))
3269 return poly_int_cst_value (t
).force_uhwi ();
3270 return TREE_INT_CST_LOW (t
);
3273 /* Return first list element whose TREE_VALUE is ELEM.
3274 Return 0 if ELEM is not in LIST. */
3277 value_member (tree elem
, tree list
)
3281 if (elem
== TREE_VALUE (list
))
3283 list
= TREE_CHAIN (list
);
3288 /* Return first list element whose TREE_PURPOSE is ELEM.
3289 Return 0 if ELEM is not in LIST. */
3292 purpose_member (const_tree elem
, tree list
)
3296 if (elem
== TREE_PURPOSE (list
))
3298 list
= TREE_CHAIN (list
);
3303 /* Return true if ELEM is in V. */
3306 vec_member (const_tree elem
, vec
<tree
, va_gc
> *v
)
3310 FOR_EACH_VEC_SAFE_ELT (v
, ix
, t
)
3316 /* Returns element number IDX (zero-origin) of chain CHAIN, or
3320 chain_index (int idx
, tree chain
)
3322 for (; chain
&& idx
> 0; --idx
)
3323 chain
= TREE_CHAIN (chain
);
3327 /* Return nonzero if ELEM is part of the chain CHAIN. */
3330 chain_member (const_tree elem
, const_tree chain
)
3336 chain
= DECL_CHAIN (chain
);
3342 /* Return the length of a chain of nodes chained through TREE_CHAIN.
3343 We expect a null pointer to mark the end of the chain.
3344 This is the Lisp primitive `length'. */
3347 list_length (const_tree t
)
3350 #ifdef ENABLE_TREE_CHECKING
3358 #ifdef ENABLE_TREE_CHECKING
3361 gcc_assert (p
!= q
);
3369 /* Returns the first FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
3370 UNION_TYPE TYPE, or NULL_TREE if none. */
3373 first_field (const_tree type
)
3375 tree t
= TYPE_FIELDS (type
);
3376 while (t
&& TREE_CODE (t
) != FIELD_DECL
)
3381 /* Returns the last FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
3382 UNION_TYPE TYPE, or NULL_TREE if none. */
3385 last_field (const_tree type
)
3387 tree last
= NULL_TREE
;
3389 for (tree fld
= TYPE_FIELDS (type
); fld
; fld
= TREE_CHAIN (fld
))
3391 if (TREE_CODE (fld
) != FIELD_DECL
)
3400 /* Concatenate two chains of nodes (chained through TREE_CHAIN)
3401 by modifying the last node in chain 1 to point to chain 2.
3402 This is the Lisp primitive `nconc'. */
3405 chainon (tree op1
, tree op2
)
3414 for (t1
= op1
; TREE_CHAIN (t1
); t1
= TREE_CHAIN (t1
))
3416 TREE_CHAIN (t1
) = op2
;
3418 #ifdef ENABLE_TREE_CHECKING
3421 for (t2
= op2
; t2
; t2
= TREE_CHAIN (t2
))
3422 gcc_assert (t2
!= t1
);
3429 /* Return the last node in a chain of nodes (chained through TREE_CHAIN). */
3432 tree_last (tree chain
)
3436 while ((next
= TREE_CHAIN (chain
)))
3441 /* Reverse the order of elements in the chain T,
3442 and return the new head of the chain (old last element). */
3447 tree prev
= 0, decl
, next
;
3448 for (decl
= t
; decl
; decl
= next
)
3450 /* We shouldn't be using this function to reverse BLOCK chains; we
3451 have blocks_nreverse for that. */
3452 gcc_checking_assert (TREE_CODE (decl
) != BLOCK
);
3453 next
= TREE_CHAIN (decl
);
3454 TREE_CHAIN (decl
) = prev
;
3460 /* Return a newly created TREE_LIST node whose
3461 purpose and value fields are PARM and VALUE. */
3464 build_tree_list (tree parm
, tree value MEM_STAT_DECL
)
3466 tree t
= make_node (TREE_LIST PASS_MEM_STAT
);
3467 TREE_PURPOSE (t
) = parm
;
3468 TREE_VALUE (t
) = value
;
3472 /* Build a chain of TREE_LIST nodes from a vector. */
3475 build_tree_list_vec (const vec
<tree
, va_gc
> *vec MEM_STAT_DECL
)
3477 tree ret
= NULL_TREE
;
3481 FOR_EACH_VEC_SAFE_ELT (vec
, i
, t
)
3483 *pp
= build_tree_list (NULL
, t PASS_MEM_STAT
);
3484 pp
= &TREE_CHAIN (*pp
);
3489 /* Return a newly created TREE_LIST node whose
3490 purpose and value fields are PURPOSE and VALUE
3491 and whose TREE_CHAIN is CHAIN. */
3494 tree_cons (tree purpose
, tree value
, tree chain MEM_STAT_DECL
)
3498 node
= ggc_alloc_tree_node_stat (sizeof (struct tree_list
) PASS_MEM_STAT
);
3499 memset (node
, 0, sizeof (struct tree_common
));
3501 record_node_allocation_statistics (TREE_LIST
, sizeof (struct tree_list
));
3503 TREE_SET_CODE (node
, TREE_LIST
);
3504 TREE_CHAIN (node
) = chain
;
3505 TREE_PURPOSE (node
) = purpose
;
3506 TREE_VALUE (node
) = value
;
3510 /* Return the values of the elements of a CONSTRUCTOR as a vector of
3514 ctor_to_vec (tree ctor
)
3516 vec
<tree
, va_gc
> *vec
;
3517 vec_alloc (vec
, CONSTRUCTOR_NELTS (ctor
));
3521 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor
), ix
, val
)
3522 vec
->quick_push (val
);
3527 /* Return the size nominally occupied by an object of type TYPE
3528 when it resides in memory. The value is measured in units of bytes,
3529 and its data type is that normally used for type sizes
3530 (which is the first type created by make_signed_type or
3531 make_unsigned_type). */
3534 size_in_bytes_loc (location_t loc
, const_tree type
)
3538 if (type
== error_mark_node
)
3539 return integer_zero_node
;
3541 type
= TYPE_MAIN_VARIANT (type
);
3542 t
= TYPE_SIZE_UNIT (type
);
3546 lang_hooks
.types
.incomplete_type_error (loc
, NULL_TREE
, type
);
3547 return size_zero_node
;
3553 /* Return the size of TYPE (in bytes) as a wide integer
3554 or return -1 if the size can vary or is larger than an integer. */
3557 int_size_in_bytes (const_tree type
)
3561 if (type
== error_mark_node
)
3564 type
= TYPE_MAIN_VARIANT (type
);
3565 t
= TYPE_SIZE_UNIT (type
);
3567 if (t
&& tree_fits_uhwi_p (t
))
3568 return TREE_INT_CST_LOW (t
);
3573 /* Return the maximum size of TYPE (in bytes) as a wide integer
3574 or return -1 if the size can vary or is larger than an integer. */
3577 max_int_size_in_bytes (const_tree type
)
3579 HOST_WIDE_INT size
= -1;
3582 /* If this is an array type, check for a possible MAX_SIZE attached. */
3584 if (TREE_CODE (type
) == ARRAY_TYPE
)
3586 size_tree
= TYPE_ARRAY_MAX_SIZE (type
);
3588 if (size_tree
&& tree_fits_uhwi_p (size_tree
))
3589 size
= tree_to_uhwi (size_tree
);
3592 /* If we still haven't been able to get a size, see if the language
3593 can compute a maximum size. */
3597 size_tree
= lang_hooks
.types
.max_size (type
);
3599 if (size_tree
&& tree_fits_uhwi_p (size_tree
))
3600 size
= tree_to_uhwi (size_tree
);
3606 /* Return the bit position of FIELD, in bits from the start of the record.
3607 This is a tree of type bitsizetype. */
3610 bit_position (const_tree field
)
3612 return bit_from_pos (DECL_FIELD_OFFSET (field
),
3613 DECL_FIELD_BIT_OFFSET (field
));
3616 /* Return the byte position of FIELD, in bytes from the start of the record.
3617 This is a tree of type sizetype. */
3620 byte_position (const_tree field
)
3622 return byte_from_pos (DECL_FIELD_OFFSET (field
),
3623 DECL_FIELD_BIT_OFFSET (field
));
3626 /* Likewise, but return as an integer. It must be representable in
3627 that way (since it could be a signed value, we don't have the
3628 option of returning -1 like int_size_in_byte can. */
3631 int_byte_position (const_tree field
)
3633 return tree_to_shwi (byte_position (field
));
3636 /* Return, as a tree node, the number of elements for TYPE (which is an
3637 ARRAY_TYPE) minus one. This counts only elements of the top array. */
3640 array_type_nelts (const_tree type
)
3642 tree index_type
, min
, max
;
3644 /* If they did it with unspecified bounds, then we should have already
3645 given an error about it before we got here. */
3646 if (! TYPE_DOMAIN (type
))
3647 return error_mark_node
;
3649 index_type
= TYPE_DOMAIN (type
);
3650 min
= TYPE_MIN_VALUE (index_type
);
3651 max
= TYPE_MAX_VALUE (index_type
);
3653 /* TYPE_MAX_VALUE may not be set if the array has unknown length. */
3656 /* zero sized arrays are represented from C FE as complete types with
3657 NULL TYPE_MAX_VALUE and zero TYPE_SIZE, while C++ FE represents
3658 them as min 0, max -1. */
3659 if (COMPLETE_TYPE_P (type
)
3660 && integer_zerop (TYPE_SIZE (type
))
3661 && integer_zerop (min
))
3662 return build_int_cst (TREE_TYPE (min
), -1);
3664 return error_mark_node
;
3667 return (integer_zerop (min
)
3669 : fold_build2 (MINUS_EXPR
, TREE_TYPE (max
), max
, min
));
3672 /* If arg is static -- a reference to an object in static storage -- then
3673 return the object. This is not the same as the C meaning of `static'.
3674 If arg isn't static, return NULL. */
3679 switch (TREE_CODE (arg
))
3682 /* Nested functions are static, even though taking their address will
3683 involve a trampoline as we unnest the nested function and create
3684 the trampoline on the tree level. */
3688 return ((TREE_STATIC (arg
) || DECL_EXTERNAL (arg
))
3689 && ! DECL_THREAD_LOCAL_P (arg
)
3690 && ! DECL_DLLIMPORT_P (arg
)
3694 return ((TREE_STATIC (arg
) || DECL_EXTERNAL (arg
))
3698 return TREE_STATIC (arg
) ? arg
: NULL
;
3705 /* If the thing being referenced is not a field, then it is
3706 something language specific. */
3707 gcc_assert (TREE_CODE (TREE_OPERAND (arg
, 1)) == FIELD_DECL
);
3709 /* If we are referencing a bitfield, we can't evaluate an
3710 ADDR_EXPR at compile time and so it isn't a constant. */
3711 if (DECL_BIT_FIELD (TREE_OPERAND (arg
, 1)))
3714 return staticp (TREE_OPERAND (arg
, 0));
3720 return TREE_CONSTANT (TREE_OPERAND (arg
, 0)) ? arg
: NULL
;
3723 case ARRAY_RANGE_REF
:
3724 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (arg
))) == INTEGER_CST
3725 && TREE_CODE (TREE_OPERAND (arg
, 1)) == INTEGER_CST
)
3726 return staticp (TREE_OPERAND (arg
, 0));
3730 case COMPOUND_LITERAL_EXPR
:
3731 return TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (arg
)) ? arg
: NULL
;
3741 /* Return whether OP is a DECL whose address is function-invariant. */
3744 decl_address_invariant_p (const_tree op
)
3746 /* The conditions below are slightly less strict than the one in
3749 switch (TREE_CODE (op
))
3758 if ((TREE_STATIC (op
) || DECL_EXTERNAL (op
))
3759 || DECL_THREAD_LOCAL_P (op
)
3760 || DECL_CONTEXT (op
) == current_function_decl
3761 || decl_function_context (op
) == current_function_decl
)
3766 if ((TREE_STATIC (op
) || DECL_EXTERNAL (op
))
3767 || decl_function_context (op
) == current_function_decl
)
3778 /* Return whether OP is a DECL whose address is interprocedural-invariant. */
3781 decl_address_ip_invariant_p (const_tree op
)
3783 /* The conditions below are slightly less strict than the one in
3786 switch (TREE_CODE (op
))
3794 if (((TREE_STATIC (op
) || DECL_EXTERNAL (op
))
3795 && !DECL_DLLIMPORT_P (op
))
3796 || DECL_THREAD_LOCAL_P (op
))
3801 if ((TREE_STATIC (op
) || DECL_EXTERNAL (op
)))
3813 /* Return true if T is function-invariant (internal function, does
3814 not handle arithmetic; that's handled in skip_simple_arithmetic and
3815 tree_invariant_p). */
3818 tree_invariant_p_1 (tree t
)
3822 if (TREE_CONSTANT (t
)
3823 || (TREE_READONLY (t
) && !TREE_SIDE_EFFECTS (t
)))
3826 switch (TREE_CODE (t
))
3832 op
= TREE_OPERAND (t
, 0);
3833 while (handled_component_p (op
))
3835 switch (TREE_CODE (op
))
3838 case ARRAY_RANGE_REF
:
3839 if (!tree_invariant_p (TREE_OPERAND (op
, 1))
3840 || TREE_OPERAND (op
, 2) != NULL_TREE
3841 || TREE_OPERAND (op
, 3) != NULL_TREE
)
3846 if (TREE_OPERAND (op
, 2) != NULL_TREE
)
3852 op
= TREE_OPERAND (op
, 0);
3855 return CONSTANT_CLASS_P (op
) || decl_address_invariant_p (op
);
3864 /* Return true if T is function-invariant. */
3867 tree_invariant_p (tree t
)
3869 tree inner
= skip_simple_arithmetic (t
);
3870 return tree_invariant_p_1 (inner
);
3873 /* Wrap a SAVE_EXPR around EXPR, if appropriate.
3874 Do this to any expression which may be used in more than one place,
3875 but must be evaluated only once.
3877 Normally, expand_expr would reevaluate the expression each time.
3878 Calling save_expr produces something that is evaluated and recorded
3879 the first time expand_expr is called on it. Subsequent calls to
3880 expand_expr just reuse the recorded value.
3882 The call to expand_expr that generates code that actually computes
3883 the value is the first call *at compile time*. Subsequent calls
3884 *at compile time* generate code to use the saved value.
3885 This produces correct result provided that *at run time* control
3886 always flows through the insns made by the first expand_expr
3887 before reaching the other places where the save_expr was evaluated.
3888 You, the caller of save_expr, must make sure this is so.
3890 Constants, and certain read-only nodes, are returned with no
3891 SAVE_EXPR because that is safe. Expressions containing placeholders
3892 are not touched; see tree.def for an explanation of what these
3896 save_expr (tree expr
)
3900 /* If the tree evaluates to a constant, then we don't want to hide that
3901 fact (i.e. this allows further folding, and direct checks for constants).
3902 However, a read-only object that has side effects cannot be bypassed.
3903 Since it is no problem to reevaluate literals, we just return the
3905 inner
= skip_simple_arithmetic (expr
);
3906 if (TREE_CODE (inner
) == ERROR_MARK
)
3909 if (tree_invariant_p_1 (inner
))
3912 /* If INNER contains a PLACEHOLDER_EXPR, we must evaluate it each time, since
3913 it means that the size or offset of some field of an object depends on
3914 the value within another field.
3916 Note that it must not be the case that EXPR contains both a PLACEHOLDER_EXPR
3917 and some variable since it would then need to be both evaluated once and
3918 evaluated more than once. Front-ends must assure this case cannot
3919 happen by surrounding any such subexpressions in their own SAVE_EXPR
3920 and forcing evaluation at the proper time. */
3921 if (contains_placeholder_p (inner
))
3924 expr
= build1_loc (EXPR_LOCATION (expr
), SAVE_EXPR
, TREE_TYPE (expr
), expr
);
3926 /* This expression might be placed ahead of a jump to ensure that the
3927 value was computed on both sides of the jump. So make sure it isn't
3928 eliminated as dead. */
3929 TREE_SIDE_EFFECTS (expr
) = 1;
3933 /* Look inside EXPR into any simple arithmetic operations. Return the
3934 outermost non-arithmetic or non-invariant node. */
3937 skip_simple_arithmetic (tree expr
)
3939 /* We don't care about whether this can be used as an lvalue in this
3941 while (TREE_CODE (expr
) == NON_LVALUE_EXPR
)
3942 expr
= TREE_OPERAND (expr
, 0);
3944 /* If we have simple operations applied to a SAVE_EXPR or to a SAVE_EXPR and
3945 a constant, it will be more efficient to not make another SAVE_EXPR since
3946 it will allow better simplification and GCSE will be able to merge the
3947 computations if they actually occur. */
3950 if (UNARY_CLASS_P (expr
))
3951 expr
= TREE_OPERAND (expr
, 0);
3952 else if (BINARY_CLASS_P (expr
))
3954 if (tree_invariant_p (TREE_OPERAND (expr
, 1)))
3955 expr
= TREE_OPERAND (expr
, 0);
3956 else if (tree_invariant_p (TREE_OPERAND (expr
, 0)))
3957 expr
= TREE_OPERAND (expr
, 1);
3968 /* Look inside EXPR into simple arithmetic operations involving constants.
3969 Return the outermost non-arithmetic or non-constant node. */
3972 skip_simple_constant_arithmetic (tree expr
)
3974 while (TREE_CODE (expr
) == NON_LVALUE_EXPR
)
3975 expr
= TREE_OPERAND (expr
, 0);
3979 if (UNARY_CLASS_P (expr
))
3980 expr
= TREE_OPERAND (expr
, 0);
3981 else if (BINARY_CLASS_P (expr
))
3983 if (TREE_CONSTANT (TREE_OPERAND (expr
, 1)))
3984 expr
= TREE_OPERAND (expr
, 0);
3985 else if (TREE_CONSTANT (TREE_OPERAND (expr
, 0)))
3986 expr
= TREE_OPERAND (expr
, 1);
3997 /* Return which tree structure is used by T. */
3999 enum tree_node_structure_enum
4000 tree_node_structure (const_tree t
)
4002 const enum tree_code code
= TREE_CODE (t
);
4003 return tree_node_structure_for_code (code
);
4006 /* Set various status flags when building a CALL_EXPR object T. */
4009 process_call_operands (tree t
)
4011 bool side_effects
= TREE_SIDE_EFFECTS (t
);
4012 bool read_only
= false;
4013 int i
= call_expr_flags (t
);
4015 /* Calls have side-effects, except those to const or pure functions. */
4016 if ((i
& ECF_LOOPING_CONST_OR_PURE
) || !(i
& (ECF_CONST
| ECF_PURE
)))
4017 side_effects
= true;
4018 /* Propagate TREE_READONLY of arguments for const functions. */
4022 if (!side_effects
|| read_only
)
4023 for (i
= 1; i
< TREE_OPERAND_LENGTH (t
); i
++)
4025 tree op
= TREE_OPERAND (t
, i
);
4026 if (op
&& TREE_SIDE_EFFECTS (op
))
4027 side_effects
= true;
4028 if (op
&& !TREE_READONLY (op
) && !CONSTANT_CLASS_P (op
))
4032 TREE_SIDE_EFFECTS (t
) = side_effects
;
4033 TREE_READONLY (t
) = read_only
;
4036 /* Return true if EXP contains a PLACEHOLDER_EXPR, i.e. if it represents a
4037 size or offset that depends on a field within a record. */
4040 contains_placeholder_p (const_tree exp
)
4042 enum tree_code code
;
4047 code
= TREE_CODE (exp
);
4048 if (code
== PLACEHOLDER_EXPR
)
4051 switch (TREE_CODE_CLASS (code
))
4054 /* Don't look at any PLACEHOLDER_EXPRs that might be in index or bit
4055 position computations since they will be converted into a
4056 WITH_RECORD_EXPR involving the reference, which will assume
4057 here will be valid. */
4058 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp
, 0));
4060 case tcc_exceptional
:
4061 if (code
== TREE_LIST
)
4062 return (CONTAINS_PLACEHOLDER_P (TREE_VALUE (exp
))
4063 || CONTAINS_PLACEHOLDER_P (TREE_CHAIN (exp
)));
4068 case tcc_comparison
:
4069 case tcc_expression
:
4073 /* Ignoring the first operand isn't quite right, but works best. */
4074 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp
, 1));
4077 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp
, 0))
4078 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp
, 1))
4079 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp
, 2)));
4082 /* The save_expr function never wraps anything containing
4083 a PLACEHOLDER_EXPR. */
4090 switch (TREE_CODE_LENGTH (code
))
4093 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp
, 0));
4095 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp
, 0))
4096 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp
, 1)));
4107 const_call_expr_arg_iterator iter
;
4108 FOR_EACH_CONST_CALL_EXPR_ARG (arg
, iter
, exp
)
4109 if (CONTAINS_PLACEHOLDER_P (arg
))
4123 /* Return true if any part of the structure of TYPE involves a PLACEHOLDER_EXPR
4124 directly. This includes size, bounds, qualifiers (for QUAL_UNION_TYPE) and
4128 type_contains_placeholder_1 (const_tree type
)
4130 /* If the size contains a placeholder or the parent type (component type in
4131 the case of arrays) type involves a placeholder, this type does. */
4132 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type
))
4133 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (type
))
4134 || (!POINTER_TYPE_P (type
)
4136 && type_contains_placeholder_p (TREE_TYPE (type
))))
4139 /* Now do type-specific checks. Note that the last part of the check above
4140 greatly limits what we have to do below. */
4141 switch (TREE_CODE (type
))
4150 case REFERENCE_TYPE
:
4159 case FIXED_POINT_TYPE
:
4160 /* Here we just check the bounds. */
4161 return (CONTAINS_PLACEHOLDER_P (TYPE_MIN_VALUE (type
))
4162 || CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type
)));
4165 /* We have already checked the component type above, so just check
4166 the domain type. Flexible array members have a null domain. */
4167 return TYPE_DOMAIN (type
) ?
4168 type_contains_placeholder_p (TYPE_DOMAIN (type
)) : false;
4172 case QUAL_UNION_TYPE
:
4176 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
4177 if (TREE_CODE (field
) == FIELD_DECL
4178 && (CONTAINS_PLACEHOLDER_P (DECL_FIELD_OFFSET (field
))
4179 || (TREE_CODE (type
) == QUAL_UNION_TYPE
4180 && CONTAINS_PLACEHOLDER_P (DECL_QUALIFIER (field
)))
4181 || type_contains_placeholder_p (TREE_TYPE (field
))))
4192 /* Wrapper around above function used to cache its result. */
4195 type_contains_placeholder_p (tree type
)
4199 /* If the contains_placeholder_bits field has been initialized,
4200 then we know the answer. */
4201 if (TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type
) > 0)
4202 return TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type
) - 1;
4204 /* Indicate that we've seen this type node, and the answer is false.
4205 This is what we want to return if we run into recursion via fields. */
4206 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type
) = 1;
4208 /* Compute the real value. */
4209 result
= type_contains_placeholder_1 (type
);
4211 /* Store the real value. */
4212 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type
) = result
+ 1;
4217 /* Push tree EXP onto vector QUEUE if it is not already present. */
4220 push_without_duplicates (tree exp
, vec
<tree
> *queue
)
4225 FOR_EACH_VEC_ELT (*queue
, i
, iter
)
4226 if (simple_cst_equal (iter
, exp
) == 1)
4230 queue
->safe_push (exp
);
4233 /* Given a tree EXP, find all occurrences of references to fields
4234 in a PLACEHOLDER_EXPR and place them in vector REFS without
4235 duplicates. Also record VAR_DECLs and CONST_DECLs. Note that
4236 we assume here that EXP contains only arithmetic expressions
4237 or CALL_EXPRs with PLACEHOLDER_EXPRs occurring only in their
4241 find_placeholder_in_expr (tree exp
, vec
<tree
> *refs
)
4243 enum tree_code code
= TREE_CODE (exp
);
4247 /* We handle TREE_LIST and COMPONENT_REF separately. */
4248 if (code
== TREE_LIST
)
4250 FIND_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp
), refs
);
4251 FIND_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp
), refs
);
4253 else if (code
== COMPONENT_REF
)
4255 for (inner
= TREE_OPERAND (exp
, 0);
4256 REFERENCE_CLASS_P (inner
);
4257 inner
= TREE_OPERAND (inner
, 0))
4260 if (TREE_CODE (inner
) == PLACEHOLDER_EXPR
)
4261 push_without_duplicates (exp
, refs
);
4263 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 0), refs
);
4266 switch (TREE_CODE_CLASS (code
))
4271 case tcc_declaration
:
4272 /* Variables allocated to static storage can stay. */
4273 if (!TREE_STATIC (exp
))
4274 push_without_duplicates (exp
, refs
);
4277 case tcc_expression
:
4278 /* This is the pattern built in ada/make_aligning_type. */
4279 if (code
== ADDR_EXPR
4280 && TREE_CODE (TREE_OPERAND (exp
, 0)) == PLACEHOLDER_EXPR
)
4282 push_without_duplicates (exp
, refs
);
4288 case tcc_exceptional
:
4291 case tcc_comparison
:
4293 for (i
= 0; i
< TREE_CODE_LENGTH (code
); i
++)
4294 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, i
), refs
);
4298 for (i
= 1; i
< TREE_OPERAND_LENGTH (exp
); i
++)
4299 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, i
), refs
);
4307 /* Given a tree EXP, a FIELD_DECL F, and a replacement value R,
4308 return a tree with all occurrences of references to F in a
4309 PLACEHOLDER_EXPR replaced by R. Also handle VAR_DECLs and
4310 CONST_DECLs. Note that we assume here that EXP contains only
4311 arithmetic expressions or CALL_EXPRs with PLACEHOLDER_EXPRs
4312 occurring only in their argument list. */
4315 substitute_in_expr (tree exp
, tree f
, tree r
)
4317 enum tree_code code
= TREE_CODE (exp
);
4318 tree op0
, op1
, op2
, op3
;
4321 /* We handle TREE_LIST and COMPONENT_REF separately. */
4322 if (code
== TREE_LIST
)
4324 op0
= SUBSTITUTE_IN_EXPR (TREE_CHAIN (exp
), f
, r
);
4325 op1
= SUBSTITUTE_IN_EXPR (TREE_VALUE (exp
), f
, r
);
4326 if (op0
== TREE_CHAIN (exp
) && op1
== TREE_VALUE (exp
))
4329 return tree_cons (TREE_PURPOSE (exp
), op1
, op0
);
4331 else if (code
== COMPONENT_REF
)
4335 /* If this expression is getting a value from a PLACEHOLDER_EXPR
4336 and it is the right field, replace it with R. */
4337 for (inner
= TREE_OPERAND (exp
, 0);
4338 REFERENCE_CLASS_P (inner
);
4339 inner
= TREE_OPERAND (inner
, 0))
4343 op1
= TREE_OPERAND (exp
, 1);
4345 if (TREE_CODE (inner
) == PLACEHOLDER_EXPR
&& op1
== f
)
4348 /* If this expression hasn't been completed let, leave it alone. */
4349 if (TREE_CODE (inner
) == PLACEHOLDER_EXPR
&& !TREE_TYPE (inner
))
4352 op0
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 0), f
, r
);
4353 if (op0
== TREE_OPERAND (exp
, 0))
4357 = fold_build3 (COMPONENT_REF
, TREE_TYPE (exp
), op0
, op1
, NULL_TREE
);
4360 switch (TREE_CODE_CLASS (code
))
4365 case tcc_declaration
:
4371 case tcc_expression
:
4377 case tcc_exceptional
:
4380 case tcc_comparison
:
4382 switch (TREE_CODE_LENGTH (code
))
4388 op0
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 0), f
, r
);
4389 if (op0
== TREE_OPERAND (exp
, 0))
4392 new_tree
= fold_build1 (code
, TREE_TYPE (exp
), op0
);
4396 op0
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 0), f
, r
);
4397 op1
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 1), f
, r
);
4399 if (op0
== TREE_OPERAND (exp
, 0) && op1
== TREE_OPERAND (exp
, 1))
4402 new_tree
= fold_build2 (code
, TREE_TYPE (exp
), op0
, op1
);
4406 op0
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 0), f
, r
);
4407 op1
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 1), f
, r
);
4408 op2
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 2), f
, r
);
4410 if (op0
== TREE_OPERAND (exp
, 0) && op1
== TREE_OPERAND (exp
, 1)
4411 && op2
== TREE_OPERAND (exp
, 2))
4414 new_tree
= fold_build3 (code
, TREE_TYPE (exp
), op0
, op1
, op2
);
4418 op0
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 0), f
, r
);
4419 op1
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 1), f
, r
);
4420 op2
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 2), f
, r
);
4421 op3
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 3), f
, r
);
4423 if (op0
== TREE_OPERAND (exp
, 0) && op1
== TREE_OPERAND (exp
, 1)
4424 && op2
== TREE_OPERAND (exp
, 2)
4425 && op3
== TREE_OPERAND (exp
, 3))
4429 = fold (build4 (code
, TREE_TYPE (exp
), op0
, op1
, op2
, op3
));
4441 new_tree
= NULL_TREE
;
4443 /* If we are trying to replace F with a constant or with another
4444 instance of one of the arguments of the call, inline back
4445 functions which do nothing else than computing a value from
4446 the arguments they are passed. This makes it possible to
4447 fold partially or entirely the replacement expression. */
4448 if (code
== CALL_EXPR
)
4450 bool maybe_inline
= false;
4451 if (CONSTANT_CLASS_P (r
))
4452 maybe_inline
= true;
4454 for (i
= 3; i
< TREE_OPERAND_LENGTH (exp
); i
++)
4455 if (operand_equal_p (TREE_OPERAND (exp
, i
), r
, 0))
4457 maybe_inline
= true;
4462 tree t
= maybe_inline_call_in_expr (exp
);
4464 return SUBSTITUTE_IN_EXPR (t
, f
, r
);
4468 for (i
= 1; i
< TREE_OPERAND_LENGTH (exp
); i
++)
4470 tree op
= TREE_OPERAND (exp
, i
);
4471 tree new_op
= SUBSTITUTE_IN_EXPR (op
, f
, r
);
4475 new_tree
= copy_node (exp
);
4476 TREE_OPERAND (new_tree
, i
) = new_op
;
4482 new_tree
= fold (new_tree
);
4483 if (TREE_CODE (new_tree
) == CALL_EXPR
)
4484 process_call_operands (new_tree
);
4495 TREE_READONLY (new_tree
) |= TREE_READONLY (exp
);
4497 if (code
== INDIRECT_REF
|| code
== ARRAY_REF
|| code
== ARRAY_RANGE_REF
)
4498 TREE_THIS_NOTRAP (new_tree
) |= TREE_THIS_NOTRAP (exp
);
4503 /* Similar, but look for a PLACEHOLDER_EXPR in EXP and find a replacement
4504 for it within OBJ, a tree that is an object or a chain of references. */
4507 substitute_placeholder_in_expr (tree exp
, tree obj
)
4509 enum tree_code code
= TREE_CODE (exp
);
4510 tree op0
, op1
, op2
, op3
;
4513 /* If this is a PLACEHOLDER_EXPR, see if we find a corresponding type
4514 in the chain of OBJ. */
4515 if (code
== PLACEHOLDER_EXPR
)
4517 tree need_type
= TYPE_MAIN_VARIANT (TREE_TYPE (exp
));
4520 for (elt
= obj
; elt
!= 0;
4521 elt
= ((TREE_CODE (elt
) == COMPOUND_EXPR
4522 || TREE_CODE (elt
) == COND_EXPR
)
4523 ? TREE_OPERAND (elt
, 1)
4524 : (REFERENCE_CLASS_P (elt
)
4525 || UNARY_CLASS_P (elt
)
4526 || BINARY_CLASS_P (elt
)
4527 || VL_EXP_CLASS_P (elt
)
4528 || EXPRESSION_CLASS_P (elt
))
4529 ? TREE_OPERAND (elt
, 0) : 0))
4530 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt
)) == need_type
)
4533 for (elt
= obj
; elt
!= 0;
4534 elt
= ((TREE_CODE (elt
) == COMPOUND_EXPR
4535 || TREE_CODE (elt
) == COND_EXPR
)
4536 ? TREE_OPERAND (elt
, 1)
4537 : (REFERENCE_CLASS_P (elt
)
4538 || UNARY_CLASS_P (elt
)
4539 || BINARY_CLASS_P (elt
)
4540 || VL_EXP_CLASS_P (elt
)
4541 || EXPRESSION_CLASS_P (elt
))
4542 ? TREE_OPERAND (elt
, 0) : 0))
4543 if (POINTER_TYPE_P (TREE_TYPE (elt
))
4544 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt
)))
4546 return fold_build1 (INDIRECT_REF
, need_type
, elt
);
4548 /* If we didn't find it, return the original PLACEHOLDER_EXPR. If it
4549 survives until RTL generation, there will be an error. */
4553 /* TREE_LIST is special because we need to look at TREE_VALUE
4554 and TREE_CHAIN, not TREE_OPERANDS. */
4555 else if (code
== TREE_LIST
)
4557 op0
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp
), obj
);
4558 op1
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp
), obj
);
4559 if (op0
== TREE_CHAIN (exp
) && op1
== TREE_VALUE (exp
))
4562 return tree_cons (TREE_PURPOSE (exp
), op1
, op0
);
4565 switch (TREE_CODE_CLASS (code
))
4568 case tcc_declaration
:
4571 case tcc_exceptional
:
4574 case tcc_comparison
:
4575 case tcc_expression
:
4578 switch (TREE_CODE_LENGTH (code
))
4584 op0
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 0), obj
);
4585 if (op0
== TREE_OPERAND (exp
, 0))
4588 new_tree
= fold_build1 (code
, TREE_TYPE (exp
), op0
);
4592 op0
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 0), obj
);
4593 op1
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 1), obj
);
4595 if (op0
== TREE_OPERAND (exp
, 0) && op1
== TREE_OPERAND (exp
, 1))
4598 new_tree
= fold_build2 (code
, TREE_TYPE (exp
), op0
, op1
);
4602 op0
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 0), obj
);
4603 op1
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 1), obj
);
4604 op2
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 2), obj
);
4606 if (op0
== TREE_OPERAND (exp
, 0) && op1
== TREE_OPERAND (exp
, 1)
4607 && op2
== TREE_OPERAND (exp
, 2))
4610 new_tree
= fold_build3 (code
, TREE_TYPE (exp
), op0
, op1
, op2
);
4614 op0
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 0), obj
);
4615 op1
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 1), obj
);
4616 op2
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 2), obj
);
4617 op3
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 3), obj
);
4619 if (op0
== TREE_OPERAND (exp
, 0) && op1
== TREE_OPERAND (exp
, 1)
4620 && op2
== TREE_OPERAND (exp
, 2)
4621 && op3
== TREE_OPERAND (exp
, 3))
4625 = fold (build4 (code
, TREE_TYPE (exp
), op0
, op1
, op2
, op3
));
4637 new_tree
= NULL_TREE
;
4639 for (i
= 1; i
< TREE_OPERAND_LENGTH (exp
); i
++)
4641 tree op
= TREE_OPERAND (exp
, i
);
4642 tree new_op
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (op
, obj
);
4646 new_tree
= copy_node (exp
);
4647 TREE_OPERAND (new_tree
, i
) = new_op
;
4653 new_tree
= fold (new_tree
);
4654 if (TREE_CODE (new_tree
) == CALL_EXPR
)
4655 process_call_operands (new_tree
);
4666 TREE_READONLY (new_tree
) |= TREE_READONLY (exp
);
4668 if (code
== INDIRECT_REF
|| code
== ARRAY_REF
|| code
== ARRAY_RANGE_REF
)
4669 TREE_THIS_NOTRAP (new_tree
) |= TREE_THIS_NOTRAP (exp
);
4675 /* Subroutine of stabilize_reference; this is called for subtrees of
4676 references. Any expression with side-effects must be put in a SAVE_EXPR
4677 to ensure that it is only evaluated once.
4679 We don't put SAVE_EXPR nodes around everything, because assigning very
4680 simple expressions to temporaries causes us to miss good opportunities
4681 for optimizations. Among other things, the opportunity to fold in the
4682 addition of a constant into an addressing mode often gets lost, e.g.
4683 "y[i+1] += x;". In general, we take the approach that we should not make
4684 an assignment unless we are forced into it - i.e., that any non-side effect
4685 operator should be allowed, and that cse should take care of coalescing
4686 multiple utterances of the same expression should that prove fruitful. */
4689 stabilize_reference_1 (tree e
)
4692 enum tree_code code
= TREE_CODE (e
);
4694 /* We cannot ignore const expressions because it might be a reference
4695 to a const array but whose index contains side-effects. But we can
4696 ignore things that are actual constant or that already have been
4697 handled by this function. */
4699 if (tree_invariant_p (e
))
4702 switch (TREE_CODE_CLASS (code
))
4704 case tcc_exceptional
:
4705 /* Always wrap STATEMENT_LIST into SAVE_EXPR, even if it doesn't
4706 have side-effects. */
4707 if (code
== STATEMENT_LIST
)
4708 return save_expr (e
);
4711 case tcc_declaration
:
4712 case tcc_comparison
:
4714 case tcc_expression
:
4717 /* If the expression has side-effects, then encase it in a SAVE_EXPR
4718 so that it will only be evaluated once. */
4719 /* The reference (r) and comparison (<) classes could be handled as
4720 below, but it is generally faster to only evaluate them once. */
4721 if (TREE_SIDE_EFFECTS (e
))
4722 return save_expr (e
);
4726 /* Constants need no processing. In fact, we should never reach
4731 /* Division is slow and tends to be compiled with jumps,
4732 especially the division by powers of 2 that is often
4733 found inside of an array reference. So do it just once. */
4734 if (code
== TRUNC_DIV_EXPR
|| code
== TRUNC_MOD_EXPR
4735 || code
== FLOOR_DIV_EXPR
|| code
== FLOOR_MOD_EXPR
4736 || code
== CEIL_DIV_EXPR
|| code
== CEIL_MOD_EXPR
4737 || code
== ROUND_DIV_EXPR
|| code
== ROUND_MOD_EXPR
)
4738 return save_expr (e
);
4739 /* Recursively stabilize each operand. */
4740 result
= build_nt (code
, stabilize_reference_1 (TREE_OPERAND (e
, 0)),
4741 stabilize_reference_1 (TREE_OPERAND (e
, 1)));
4745 /* Recursively stabilize each operand. */
4746 result
= build_nt (code
, stabilize_reference_1 (TREE_OPERAND (e
, 0)));
4753 TREE_TYPE (result
) = TREE_TYPE (e
);
4754 TREE_READONLY (result
) = TREE_READONLY (e
);
4755 TREE_SIDE_EFFECTS (result
) = TREE_SIDE_EFFECTS (e
);
4756 TREE_THIS_VOLATILE (result
) = TREE_THIS_VOLATILE (e
);
4761 /* Stabilize a reference so that we can use it any number of times
4762 without causing its operands to be evaluated more than once.
4763 Returns the stabilized reference. This works by means of save_expr,
4764 so see the caveats in the comments about save_expr.
4766 Also allows conversion expressions whose operands are references.
4767 Any other kind of expression is returned unchanged. */
4770 stabilize_reference (tree ref
)
4773 enum tree_code code
= TREE_CODE (ref
);
4780 /* No action is needed in this case. */
4785 case FIX_TRUNC_EXPR
:
4786 result
= build_nt (code
, stabilize_reference (TREE_OPERAND (ref
, 0)));
4790 result
= build_nt (INDIRECT_REF
,
4791 stabilize_reference_1 (TREE_OPERAND (ref
, 0)));
4795 result
= build_nt (COMPONENT_REF
,
4796 stabilize_reference (TREE_OPERAND (ref
, 0)),
4797 TREE_OPERAND (ref
, 1), NULL_TREE
);
4801 result
= build_nt (BIT_FIELD_REF
,
4802 stabilize_reference (TREE_OPERAND (ref
, 0)),
4803 TREE_OPERAND (ref
, 1), TREE_OPERAND (ref
, 2));
4804 REF_REVERSE_STORAGE_ORDER (result
) = REF_REVERSE_STORAGE_ORDER (ref
);
4808 result
= build_nt (ARRAY_REF
,
4809 stabilize_reference (TREE_OPERAND (ref
, 0)),
4810 stabilize_reference_1 (TREE_OPERAND (ref
, 1)),
4811 TREE_OPERAND (ref
, 2), TREE_OPERAND (ref
, 3));
4814 case ARRAY_RANGE_REF
:
4815 result
= build_nt (ARRAY_RANGE_REF
,
4816 stabilize_reference (TREE_OPERAND (ref
, 0)),
4817 stabilize_reference_1 (TREE_OPERAND (ref
, 1)),
4818 TREE_OPERAND (ref
, 2), TREE_OPERAND (ref
, 3));
4822 /* We cannot wrap the first expression in a SAVE_EXPR, as then
4823 it wouldn't be ignored. This matters when dealing with
4825 return stabilize_reference_1 (ref
);
4827 /* If arg isn't a kind of lvalue we recognize, make no change.
4828 Caller should recognize the error for an invalid lvalue. */
4833 return error_mark_node
;
4836 TREE_TYPE (result
) = TREE_TYPE (ref
);
4837 TREE_READONLY (result
) = TREE_READONLY (ref
);
4838 TREE_SIDE_EFFECTS (result
) = TREE_SIDE_EFFECTS (ref
);
4839 TREE_THIS_VOLATILE (result
) = TREE_THIS_VOLATILE (ref
);
4840 protected_set_expr_location (result
, EXPR_LOCATION (ref
));
4845 /* Low-level constructors for expressions. */
4847 /* A helper function for build1 and constant folders. Set TREE_CONSTANT,
4848 and TREE_SIDE_EFFECTS for an ADDR_EXPR. */
4851 recompute_tree_invariant_for_addr_expr (tree t
)
4854 bool tc
= true, se
= false;
4856 gcc_assert (TREE_CODE (t
) == ADDR_EXPR
);
4858 /* We started out assuming this address is both invariant and constant, but
4859 does not have side effects. Now go down any handled components and see if
4860 any of them involve offsets that are either non-constant or non-invariant.
4861 Also check for side-effects.
4863 ??? Note that this code makes no attempt to deal with the case where
4864 taking the address of something causes a copy due to misalignment. */
4866 #define UPDATE_FLAGS(NODE) \
4867 do { tree _node = (NODE); \
4868 if (_node && !TREE_CONSTANT (_node)) tc = false; \
4869 if (_node && TREE_SIDE_EFFECTS (_node)) se = true; } while (0)
4871 for (node
= TREE_OPERAND (t
, 0); handled_component_p (node
);
4872 node
= TREE_OPERAND (node
, 0))
4874 /* If the first operand doesn't have an ARRAY_TYPE, this is a bogus
4875 array reference (probably made temporarily by the G++ front end),
4876 so ignore all the operands. */
4877 if ((TREE_CODE (node
) == ARRAY_REF
4878 || TREE_CODE (node
) == ARRAY_RANGE_REF
)
4879 && TREE_CODE (TREE_TYPE (TREE_OPERAND (node
, 0))) == ARRAY_TYPE
)
4881 UPDATE_FLAGS (TREE_OPERAND (node
, 1));
4882 if (TREE_OPERAND (node
, 2))
4883 UPDATE_FLAGS (TREE_OPERAND (node
, 2));
4884 if (TREE_OPERAND (node
, 3))
4885 UPDATE_FLAGS (TREE_OPERAND (node
, 3));
4887 /* Likewise, just because this is a COMPONENT_REF doesn't mean we have a
4888 FIELD_DECL, apparently. The G++ front end can put something else
4889 there, at least temporarily. */
4890 else if (TREE_CODE (node
) == COMPONENT_REF
4891 && TREE_CODE (TREE_OPERAND (node
, 1)) == FIELD_DECL
)
4893 if (TREE_OPERAND (node
, 2))
4894 UPDATE_FLAGS (TREE_OPERAND (node
, 2));
4898 node
= lang_hooks
.expr_to_decl (node
, &tc
, &se
);
4900 /* Now see what's inside. If it's an INDIRECT_REF, copy our properties from
4901 the address, since &(*a)->b is a form of addition. If it's a constant, the
4902 address is constant too. If it's a decl, its address is constant if the
4903 decl is static. Everything else is not constant and, furthermore,
4904 taking the address of a volatile variable is not volatile. */
4905 if (TREE_CODE (node
) == INDIRECT_REF
4906 || TREE_CODE (node
) == MEM_REF
)
4907 UPDATE_FLAGS (TREE_OPERAND (node
, 0));
4908 else if (CONSTANT_CLASS_P (node
))
4910 else if (DECL_P (node
))
4911 tc
&= (staticp (node
) != NULL_TREE
);
4915 se
|= TREE_SIDE_EFFECTS (node
);
4919 TREE_CONSTANT (t
) = tc
;
4920 TREE_SIDE_EFFECTS (t
) = se
;
4924 /* Build an expression of code CODE, data type TYPE, and operands as
4925 specified. Expressions and reference nodes can be created this way.
4926 Constants, decls, types and misc nodes cannot be.
4928 We define 5 non-variadic functions, from 0 to 4 arguments. This is
4929 enough for all extant tree codes. */
4932 build0 (enum tree_code code
, tree tt MEM_STAT_DECL
)
4936 gcc_assert (TREE_CODE_LENGTH (code
) == 0);
4938 t
= make_node (code PASS_MEM_STAT
);
4945 build1 (enum tree_code code
, tree type
, tree node MEM_STAT_DECL
)
4947 int length
= sizeof (struct tree_exp
);
4950 record_node_allocation_statistics (code
, length
);
4952 gcc_assert (TREE_CODE_LENGTH (code
) == 1);
4954 t
= ggc_alloc_tree_node_stat (length PASS_MEM_STAT
);
4956 memset (t
, 0, sizeof (struct tree_common
));
4958 TREE_SET_CODE (t
, code
);
4960 TREE_TYPE (t
) = type
;
4961 SET_EXPR_LOCATION (t
, UNKNOWN_LOCATION
);
4962 TREE_OPERAND (t
, 0) = node
;
4963 if (node
&& !TYPE_P (node
))
4965 TREE_SIDE_EFFECTS (t
) = TREE_SIDE_EFFECTS (node
);
4966 TREE_READONLY (t
) = TREE_READONLY (node
);
4969 if (TREE_CODE_CLASS (code
) == tcc_statement
)
4971 if (code
!= DEBUG_BEGIN_STMT
)
4972 TREE_SIDE_EFFECTS (t
) = 1;
4977 /* All of these have side-effects, no matter what their
4979 TREE_SIDE_EFFECTS (t
) = 1;
4980 TREE_READONLY (t
) = 0;
4984 /* Whether a dereference is readonly has nothing to do with whether
4985 its operand is readonly. */
4986 TREE_READONLY (t
) = 0;
4991 recompute_tree_invariant_for_addr_expr (t
);
4995 if ((TREE_CODE_CLASS (code
) == tcc_unary
|| code
== VIEW_CONVERT_EXPR
)
4996 && node
&& !TYPE_P (node
)
4997 && TREE_CONSTANT (node
))
4998 TREE_CONSTANT (t
) = 1;
4999 if (TREE_CODE_CLASS (code
) == tcc_reference
5000 && node
&& TREE_THIS_VOLATILE (node
))
5001 TREE_THIS_VOLATILE (t
) = 1;
5008 #define PROCESS_ARG(N) \
5010 TREE_OPERAND (t, N) = arg##N; \
5011 if (arg##N &&!TYPE_P (arg##N)) \
5013 if (TREE_SIDE_EFFECTS (arg##N)) \
5015 if (!TREE_READONLY (arg##N) \
5016 && !CONSTANT_CLASS_P (arg##N)) \
5017 (void) (read_only = 0); \
5018 if (!TREE_CONSTANT (arg##N)) \
5019 (void) (constant = 0); \
5024 build2 (enum tree_code code
, tree tt
, tree arg0
, tree arg1 MEM_STAT_DECL
)
5026 bool constant
, read_only
, side_effects
, div_by_zero
;
5029 gcc_assert (TREE_CODE_LENGTH (code
) == 2);
5031 if ((code
== MINUS_EXPR
|| code
== PLUS_EXPR
|| code
== MULT_EXPR
)
5032 && arg0
&& arg1
&& tt
&& POINTER_TYPE_P (tt
)
5033 /* When sizetype precision doesn't match that of pointers
5034 we need to be able to build explicit extensions or truncations
5035 of the offset argument. */
5036 && TYPE_PRECISION (sizetype
) == TYPE_PRECISION (tt
))
5037 gcc_assert (TREE_CODE (arg0
) == INTEGER_CST
5038 && TREE_CODE (arg1
) == INTEGER_CST
);
5040 if (code
== POINTER_PLUS_EXPR
&& arg0
&& arg1
&& tt
)
5041 gcc_assert (POINTER_TYPE_P (tt
) && POINTER_TYPE_P (TREE_TYPE (arg0
))
5042 && ptrofftype_p (TREE_TYPE (arg1
)));
5044 t
= make_node (code PASS_MEM_STAT
);
5047 /* Below, we automatically set TREE_SIDE_EFFECTS and TREE_READONLY for the
5048 result based on those same flags for the arguments. But if the
5049 arguments aren't really even `tree' expressions, we shouldn't be trying
5052 /* Expressions without side effects may be constant if their
5053 arguments are as well. */
5054 constant
= (TREE_CODE_CLASS (code
) == tcc_comparison
5055 || TREE_CODE_CLASS (code
) == tcc_binary
);
5057 side_effects
= TREE_SIDE_EFFECTS (t
);
5061 case TRUNC_DIV_EXPR
:
5063 case FLOOR_DIV_EXPR
:
5064 case ROUND_DIV_EXPR
:
5065 case EXACT_DIV_EXPR
:
5067 case FLOOR_MOD_EXPR
:
5068 case ROUND_MOD_EXPR
:
5069 case TRUNC_MOD_EXPR
:
5070 div_by_zero
= integer_zerop (arg1
);
5073 div_by_zero
= false;
5079 TREE_SIDE_EFFECTS (t
) = side_effects
;
5080 if (code
== MEM_REF
)
5082 if (arg0
&& TREE_CODE (arg0
) == ADDR_EXPR
)
5084 tree o
= TREE_OPERAND (arg0
, 0);
5085 TREE_READONLY (t
) = TREE_READONLY (o
);
5086 TREE_THIS_VOLATILE (t
) = TREE_THIS_VOLATILE (o
);
5091 TREE_READONLY (t
) = read_only
;
5092 /* Don't mark X / 0 as constant. */
5093 TREE_CONSTANT (t
) = constant
&& !div_by_zero
;
5094 TREE_THIS_VOLATILE (t
)
5095 = (TREE_CODE_CLASS (code
) == tcc_reference
5096 && arg0
&& TREE_THIS_VOLATILE (arg0
));
5104 build3 (enum tree_code code
, tree tt
, tree arg0
, tree arg1
,
5105 tree arg2 MEM_STAT_DECL
)
5107 bool constant
, read_only
, side_effects
;
5110 gcc_assert (TREE_CODE_LENGTH (code
) == 3);
5111 gcc_assert (TREE_CODE_CLASS (code
) != tcc_vl_exp
);
5113 t
= make_node (code PASS_MEM_STAT
);
5118 /* As a special exception, if COND_EXPR has NULL branches, we
5119 assume that it is a gimple statement and always consider
5120 it to have side effects. */
5121 if (code
== COND_EXPR
5122 && tt
== void_type_node
5123 && arg1
== NULL_TREE
5124 && arg2
== NULL_TREE
)
5125 side_effects
= true;
5127 side_effects
= TREE_SIDE_EFFECTS (t
);
5133 if (code
== COND_EXPR
)
5134 TREE_READONLY (t
) = read_only
;
5136 TREE_SIDE_EFFECTS (t
) = side_effects
;
5137 TREE_THIS_VOLATILE (t
)
5138 = (TREE_CODE_CLASS (code
) == tcc_reference
5139 && arg0
&& TREE_THIS_VOLATILE (arg0
));
5145 build4 (enum tree_code code
, tree tt
, tree arg0
, tree arg1
,
5146 tree arg2
, tree arg3 MEM_STAT_DECL
)
5148 bool constant
, read_only
, side_effects
;
5151 gcc_assert (TREE_CODE_LENGTH (code
) == 4);
5153 t
= make_node (code PASS_MEM_STAT
);
5156 side_effects
= TREE_SIDE_EFFECTS (t
);
5163 TREE_SIDE_EFFECTS (t
) = side_effects
;
5164 TREE_THIS_VOLATILE (t
)
5165 = (TREE_CODE_CLASS (code
) == tcc_reference
5166 && arg0
&& TREE_THIS_VOLATILE (arg0
));
5172 build5 (enum tree_code code
, tree tt
, tree arg0
, tree arg1
,
5173 tree arg2
, tree arg3
, tree arg4 MEM_STAT_DECL
)
5175 bool constant
, read_only
, side_effects
;
5178 gcc_assert (TREE_CODE_LENGTH (code
) == 5);
5180 t
= make_node (code PASS_MEM_STAT
);
5183 side_effects
= TREE_SIDE_EFFECTS (t
);
5191 TREE_SIDE_EFFECTS (t
) = side_effects
;
5192 if (code
== TARGET_MEM_REF
)
5194 if (arg0
&& TREE_CODE (arg0
) == ADDR_EXPR
)
5196 tree o
= TREE_OPERAND (arg0
, 0);
5197 TREE_READONLY (t
) = TREE_READONLY (o
);
5198 TREE_THIS_VOLATILE (t
) = TREE_THIS_VOLATILE (o
);
5202 TREE_THIS_VOLATILE (t
)
5203 = (TREE_CODE_CLASS (code
) == tcc_reference
5204 && arg0
&& TREE_THIS_VOLATILE (arg0
));
5209 /* Build a simple MEM_REF tree with the sematics of a plain INDIRECT_REF
5210 on the pointer PTR. */
5213 build_simple_mem_ref_loc (location_t loc
, tree ptr
)
5215 poly_int64 offset
= 0;
5216 tree ptype
= TREE_TYPE (ptr
);
5218 /* For convenience allow addresses that collapse to a simple base
5220 if (TREE_CODE (ptr
) == ADDR_EXPR
5221 && (handled_component_p (TREE_OPERAND (ptr
, 0))
5222 || TREE_CODE (TREE_OPERAND (ptr
, 0)) == MEM_REF
))
5224 ptr
= get_addr_base_and_unit_offset (TREE_OPERAND (ptr
, 0), &offset
);
5226 if (TREE_CODE (ptr
) == MEM_REF
)
5228 offset
+= mem_ref_offset (ptr
).force_shwi ();
5229 ptr
= TREE_OPERAND (ptr
, 0);
5232 ptr
= build_fold_addr_expr (ptr
);
5233 gcc_assert (is_gimple_reg (ptr
) || is_gimple_min_invariant (ptr
));
5235 tem
= build2 (MEM_REF
, TREE_TYPE (ptype
),
5236 ptr
, build_int_cst (ptype
, offset
));
5237 SET_EXPR_LOCATION (tem
, loc
);
5241 /* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T. */
5244 mem_ref_offset (const_tree t
)
5246 return poly_offset_int::from (wi::to_poly_wide (TREE_OPERAND (t
, 1)),
5250 /* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE
5251 offsetted by OFFSET units. */
5254 build_invariant_address (tree type
, tree base
, poly_int64 offset
)
5256 tree ref
= fold_build2 (MEM_REF
, TREE_TYPE (type
),
5257 build_fold_addr_expr (base
),
5258 build_int_cst (ptr_type_node
, offset
));
5259 tree addr
= build1 (ADDR_EXPR
, type
, ref
);
5260 recompute_tree_invariant_for_addr_expr (addr
);
5264 /* Similar except don't specify the TREE_TYPE
5265 and leave the TREE_SIDE_EFFECTS as 0.
5266 It is permissible for arguments to be null,
5267 or even garbage if their values do not matter. */
5270 build_nt (enum tree_code code
, ...)
5277 gcc_assert (TREE_CODE_CLASS (code
) != tcc_vl_exp
);
5281 t
= make_node (code
);
5282 length
= TREE_CODE_LENGTH (code
);
5284 for (i
= 0; i
< length
; i
++)
5285 TREE_OPERAND (t
, i
) = va_arg (p
, tree
);
5291 /* Similar to build_nt, but for creating a CALL_EXPR object with a
5295 build_nt_call_vec (tree fn
, vec
<tree
, va_gc
> *args
)
5300 ret
= build_vl_exp (CALL_EXPR
, vec_safe_length (args
) + 3);
5301 CALL_EXPR_FN (ret
) = fn
;
5302 CALL_EXPR_STATIC_CHAIN (ret
) = NULL_TREE
;
5303 FOR_EACH_VEC_SAFE_ELT (args
, ix
, t
)
5304 CALL_EXPR_ARG (ret
, ix
) = t
;
5308 /* Create a DECL_... node of code CODE, name NAME (if non-null)
5310 We do NOT enter this node in any sort of symbol table.
5312 LOC is the location of the decl.
5314 layout_decl is used to set up the decl's storage layout.
5315 Other slots are initialized to 0 or null pointers. */
5318 build_decl (location_t loc
, enum tree_code code
, tree name
,
5319 tree type MEM_STAT_DECL
)
5323 t
= make_node (code PASS_MEM_STAT
);
5324 DECL_SOURCE_LOCATION (t
) = loc
;
5326 /* if (type == error_mark_node)
5327 type = integer_type_node; */
5328 /* That is not done, deliberately, so that having error_mark_node
5329 as the type can suppress useless errors in the use of this variable. */
5331 DECL_NAME (t
) = name
;
5332 TREE_TYPE (t
) = type
;
5334 if (code
== VAR_DECL
|| code
== PARM_DECL
|| code
== RESULT_DECL
)
5340 /* Create and return a DEBUG_EXPR_DECL node of the given TYPE. */
5343 build_debug_expr_decl (tree type
)
5345 tree vexpr
= make_node (DEBUG_EXPR_DECL
);
5346 DECL_ARTIFICIAL (vexpr
) = 1;
5347 TREE_TYPE (vexpr
) = type
;
5348 SET_DECL_MODE (vexpr
, TYPE_MODE (type
));
5352 /* Builds and returns function declaration with NAME and TYPE. */
5355 build_fn_decl (const char *name
, tree type
)
5357 tree id
= get_identifier (name
);
5358 tree decl
= build_decl (input_location
, FUNCTION_DECL
, id
, type
);
5360 DECL_EXTERNAL (decl
) = 1;
5361 TREE_PUBLIC (decl
) = 1;
5362 DECL_ARTIFICIAL (decl
) = 1;
5363 TREE_NOTHROW (decl
) = 1;
5368 vec
<tree
, va_gc
> *all_translation_units
;
5370 /* Builds a new translation-unit decl with name NAME, queues it in the
5371 global list of translation-unit decls and returns it. */
5374 build_translation_unit_decl (tree name
)
5376 tree tu
= build_decl (UNKNOWN_LOCATION
, TRANSLATION_UNIT_DECL
,
5378 TRANSLATION_UNIT_LANGUAGE (tu
) = lang_hooks
.name
;
5379 vec_safe_push (all_translation_units
, tu
);
5384 /* BLOCK nodes are used to represent the structure of binding contours
5385 and declarations, once those contours have been exited and their contents
5386 compiled. This information is used for outputting debugging info. */
5389 build_block (tree vars
, tree subblocks
, tree supercontext
, tree chain
)
5391 tree block
= make_node (BLOCK
);
5393 BLOCK_VARS (block
) = vars
;
5394 BLOCK_SUBBLOCKS (block
) = subblocks
;
5395 BLOCK_SUPERCONTEXT (block
) = supercontext
;
5396 BLOCK_CHAIN (block
) = chain
;
5401 /* Like SET_EXPR_LOCATION, but make sure the tree can have a location.
5403 LOC is the location to use in tree T. */
5406 protected_set_expr_location (tree t
, location_t loc
)
5408 if (CAN_HAVE_LOCATION_P (t
))
5409 SET_EXPR_LOCATION (t
, loc
);
5410 else if (t
&& TREE_CODE (t
) == STATEMENT_LIST
)
5412 t
= expr_single (t
);
5413 if (t
&& CAN_HAVE_LOCATION_P (t
))
5414 SET_EXPR_LOCATION (t
, loc
);
5418 /* Like PROTECTED_SET_EXPR_LOCATION, but only do that if T has
5419 UNKNOWN_LOCATION. */
5422 protected_set_expr_location_if_unset (tree t
, location_t loc
)
5424 t
= expr_single (t
);
5425 if (t
&& !EXPR_HAS_LOCATION (t
))
5426 protected_set_expr_location (t
, loc
);
5429 /* Set the type qualifiers for TYPE to TYPE_QUALS, which is a bitmask
5430 of the various TYPE_QUAL values. */
5433 set_type_quals (tree type
, int type_quals
)
5435 TYPE_READONLY (type
) = (type_quals
& TYPE_QUAL_CONST
) != 0;
5436 TYPE_VOLATILE (type
) = (type_quals
& TYPE_QUAL_VOLATILE
) != 0;
5437 TYPE_RESTRICT (type
) = (type_quals
& TYPE_QUAL_RESTRICT
) != 0;
5438 TYPE_ATOMIC (type
) = (type_quals
& TYPE_QUAL_ATOMIC
) != 0;
5439 TYPE_ADDR_SPACE (type
) = DECODE_QUAL_ADDR_SPACE (type_quals
);
5442 /* Returns true iff CAND and BASE have equivalent language-specific
5446 check_lang_type (const_tree cand
, const_tree base
)
5448 if (lang_hooks
.types
.type_hash_eq
== NULL
)
5450 /* type_hash_eq currently only applies to these types. */
5451 if (TREE_CODE (cand
) != FUNCTION_TYPE
5452 && TREE_CODE (cand
) != METHOD_TYPE
)
5454 return lang_hooks
.types
.type_hash_eq (cand
, base
);
5457 /* This function checks to see if TYPE matches the size one of the built-in
5458 atomic types, and returns that core atomic type. */
5461 find_atomic_core_type (const_tree type
)
5463 tree base_atomic_type
;
5465 /* Only handle complete types. */
5466 if (!tree_fits_uhwi_p (TYPE_SIZE (type
)))
5469 switch (tree_to_uhwi (TYPE_SIZE (type
)))
5472 base_atomic_type
= atomicQI_type_node
;
5476 base_atomic_type
= atomicHI_type_node
;
5480 base_atomic_type
= atomicSI_type_node
;
5484 base_atomic_type
= atomicDI_type_node
;
5488 base_atomic_type
= atomicTI_type_node
;
5492 base_atomic_type
= NULL_TREE
;
5495 return base_atomic_type
;
5498 /* Returns true iff unqualified CAND and BASE are equivalent. */
5501 check_base_type (const_tree cand
, const_tree base
)
5503 if (TYPE_NAME (cand
) != TYPE_NAME (base
)
5504 /* Apparently this is needed for Objective-C. */
5505 || TYPE_CONTEXT (cand
) != TYPE_CONTEXT (base
)
5506 || !attribute_list_equal (TYPE_ATTRIBUTES (cand
),
5507 TYPE_ATTRIBUTES (base
)))
5509 /* Check alignment. */
5510 if (TYPE_ALIGN (cand
) == TYPE_ALIGN (base
)
5511 && TYPE_USER_ALIGN (cand
) == TYPE_USER_ALIGN (base
))
5513 /* Atomic types increase minimal alignment. We must to do so as well
5514 or we get duplicated canonical types. See PR88686. */
5515 if ((TYPE_QUALS (cand
) & TYPE_QUAL_ATOMIC
))
5517 /* See if this object can map to a basic atomic type. */
5518 tree atomic_type
= find_atomic_core_type (cand
);
5519 if (atomic_type
&& TYPE_ALIGN (atomic_type
) == TYPE_ALIGN (cand
))
5525 /* Returns true iff CAND is equivalent to BASE with TYPE_QUALS. */
5528 check_qualified_type (const_tree cand
, const_tree base
, int type_quals
)
5530 return (TYPE_QUALS (cand
) == type_quals
5531 && check_base_type (cand
, base
)
5532 && check_lang_type (cand
, base
));
5535 /* Returns true iff CAND is equivalent to BASE with ALIGN. */
5538 check_aligned_type (const_tree cand
, const_tree base
, unsigned int align
)
5540 return (TYPE_QUALS (cand
) == TYPE_QUALS (base
)
5541 && TYPE_NAME (cand
) == TYPE_NAME (base
)
5542 /* Apparently this is needed for Objective-C. */
5543 && TYPE_CONTEXT (cand
) == TYPE_CONTEXT (base
)
5544 /* Check alignment. */
5545 && TYPE_ALIGN (cand
) == align
5546 /* Check this is a user-aligned type as build_aligned_type
5548 && TYPE_USER_ALIGN (cand
)
5549 && attribute_list_equal (TYPE_ATTRIBUTES (cand
),
5550 TYPE_ATTRIBUTES (base
))
5551 && check_lang_type (cand
, base
));
5554 /* Return a version of the TYPE, qualified as indicated by the
5555 TYPE_QUALS, if one exists. If no qualified version exists yet,
5556 return NULL_TREE. */
5559 get_qualified_type (tree type
, int type_quals
)
5561 if (TYPE_QUALS (type
) == type_quals
)
5564 tree mv
= TYPE_MAIN_VARIANT (type
);
5565 if (check_qualified_type (mv
, type
, type_quals
))
5568 /* Search the chain of variants to see if there is already one there just
5569 like the one we need to have. If so, use that existing one. We must
5570 preserve the TYPE_NAME, since there is code that depends on this. */
5571 for (tree
*tp
= &TYPE_NEXT_VARIANT (mv
); *tp
; tp
= &TYPE_NEXT_VARIANT (*tp
))
5572 if (check_qualified_type (*tp
, type
, type_quals
))
5574 /* Put the found variant at the head of the variant list so
5575 frequently searched variants get found faster. The C++ FE
5576 benefits greatly from this. */
5578 *tp
= TYPE_NEXT_VARIANT (t
);
5579 TYPE_NEXT_VARIANT (t
) = TYPE_NEXT_VARIANT (mv
);
5580 TYPE_NEXT_VARIANT (mv
) = t
;
5587 /* Like get_qualified_type, but creates the type if it does not
5588 exist. This function never returns NULL_TREE. */
5591 build_qualified_type (tree type
, int type_quals MEM_STAT_DECL
)
5595 /* See if we already have the appropriate qualified variant. */
5596 t
= get_qualified_type (type
, type_quals
);
5598 /* If not, build it. */
5601 t
= build_variant_type_copy (type PASS_MEM_STAT
);
5602 set_type_quals (t
, type_quals
);
5604 if (((type_quals
& TYPE_QUAL_ATOMIC
) == TYPE_QUAL_ATOMIC
))
5606 /* See if this object can map to a basic atomic type. */
5607 tree atomic_type
= find_atomic_core_type (type
);
5610 /* Ensure the alignment of this type is compatible with
5611 the required alignment of the atomic type. */
5612 if (TYPE_ALIGN (atomic_type
) > TYPE_ALIGN (t
))
5613 SET_TYPE_ALIGN (t
, TYPE_ALIGN (atomic_type
));
5617 if (TYPE_STRUCTURAL_EQUALITY_P (type
))
5618 /* Propagate structural equality. */
5619 SET_TYPE_STRUCTURAL_EQUALITY (t
);
5620 else if (TYPE_CANONICAL (type
) != type
)
5621 /* Build the underlying canonical type, since it is different
5624 tree c
= build_qualified_type (TYPE_CANONICAL (type
), type_quals
);
5625 TYPE_CANONICAL (t
) = TYPE_CANONICAL (c
);
5628 /* T is its own canonical type. */
5629 TYPE_CANONICAL (t
) = t
;
5636 /* Create a variant of type T with alignment ALIGN. */
5639 build_aligned_type (tree type
, unsigned int align
)
5643 if (TYPE_PACKED (type
)
5644 || TYPE_ALIGN (type
) == align
)
5647 for (t
= TYPE_MAIN_VARIANT (type
); t
; t
= TYPE_NEXT_VARIANT (t
))
5648 if (check_aligned_type (t
, type
, align
))
5651 t
= build_variant_type_copy (type
);
5652 SET_TYPE_ALIGN (t
, align
);
5653 TYPE_USER_ALIGN (t
) = 1;
5658 /* Create a new distinct copy of TYPE. The new type is made its own
5659 MAIN_VARIANT. If TYPE requires structural equality checks, the
5660 resulting type requires structural equality checks; otherwise, its
5661 TYPE_CANONICAL points to itself. */
5664 build_distinct_type_copy (tree type MEM_STAT_DECL
)
5666 tree t
= copy_node (type PASS_MEM_STAT
);
5668 TYPE_POINTER_TO (t
) = 0;
5669 TYPE_REFERENCE_TO (t
) = 0;
5671 /* Set the canonical type either to a new equivalence class, or
5672 propagate the need for structural equality checks. */
5673 if (TYPE_STRUCTURAL_EQUALITY_P (type
))
5674 SET_TYPE_STRUCTURAL_EQUALITY (t
);
5676 TYPE_CANONICAL (t
) = t
;
5678 /* Make it its own variant. */
5679 TYPE_MAIN_VARIANT (t
) = t
;
5680 TYPE_NEXT_VARIANT (t
) = 0;
5682 /* Note that it is now possible for TYPE_MIN_VALUE to be a value
5683 whose TREE_TYPE is not t. This can also happen in the Ada
5684 frontend when using subtypes. */
5689 /* Create a new variant of TYPE, equivalent but distinct. This is so
5690 the caller can modify it. TYPE_CANONICAL for the return type will
5691 be equivalent to TYPE_CANONICAL of TYPE, indicating that the types
5692 are considered equal by the language itself (or that both types
5693 require structural equality checks). */
5696 build_variant_type_copy (tree type MEM_STAT_DECL
)
5698 tree t
, m
= TYPE_MAIN_VARIANT (type
);
5700 t
= build_distinct_type_copy (type PASS_MEM_STAT
);
5702 /* Since we're building a variant, assume that it is a non-semantic
5703 variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */
5704 TYPE_CANONICAL (t
) = TYPE_CANONICAL (type
);
5705 /* Type variants have no alias set defined. */
5706 TYPE_ALIAS_SET (t
) = -1;
5708 /* Add the new type to the chain of variants of TYPE. */
5709 TYPE_NEXT_VARIANT (t
) = TYPE_NEXT_VARIANT (m
);
5710 TYPE_NEXT_VARIANT (m
) = t
;
5711 TYPE_MAIN_VARIANT (t
) = m
;
5716 /* Return true if the from tree in both tree maps are equal. */
5719 tree_map_base_eq (const void *va
, const void *vb
)
5721 const struct tree_map_base
*const a
= (const struct tree_map_base
*) va
,
5722 *const b
= (const struct tree_map_base
*) vb
;
5723 return (a
->from
== b
->from
);
5726 /* Hash a from tree in a tree_base_map. */
5729 tree_map_base_hash (const void *item
)
5731 return htab_hash_pointer (((const struct tree_map_base
*)item
)->from
);
5734 /* Return true if this tree map structure is marked for garbage collection
5735 purposes. We simply return true if the from tree is marked, so that this
5736 structure goes away when the from tree goes away. */
5739 tree_map_base_marked_p (const void *p
)
5741 return ggc_marked_p (((const struct tree_map_base
*) p
)->from
);
5744 /* Hash a from tree in a tree_map. */
5747 tree_map_hash (const void *item
)
5749 return (((const struct tree_map
*) item
)->hash
);
5752 /* Hash a from tree in a tree_decl_map. */
5755 tree_decl_map_hash (const void *item
)
5757 return DECL_UID (((const struct tree_decl_map
*) item
)->base
.from
);
5760 /* Return the initialization priority for DECL. */
5763 decl_init_priority_lookup (tree decl
)
5765 symtab_node
*snode
= symtab_node::get (decl
);
5768 return DEFAULT_INIT_PRIORITY
;
5770 snode
->get_init_priority ();
5773 /* Return the finalization priority for DECL. */
5776 decl_fini_priority_lookup (tree decl
)
5778 cgraph_node
*node
= cgraph_node::get (decl
);
5781 return DEFAULT_INIT_PRIORITY
;
5783 node
->get_fini_priority ();
5786 /* Set the initialization priority for DECL to PRIORITY. */
5789 decl_init_priority_insert (tree decl
, priority_type priority
)
5791 struct symtab_node
*snode
;
5793 if (priority
== DEFAULT_INIT_PRIORITY
)
5795 snode
= symtab_node::get (decl
);
5799 else if (VAR_P (decl
))
5800 snode
= varpool_node::get_create (decl
);
5802 snode
= cgraph_node::get_create (decl
);
5803 snode
->set_init_priority (priority
);
5806 /* Set the finalization priority for DECL to PRIORITY. */
5809 decl_fini_priority_insert (tree decl
, priority_type priority
)
5811 struct cgraph_node
*node
;
5813 if (priority
== DEFAULT_INIT_PRIORITY
)
5815 node
= cgraph_node::get (decl
);
5820 node
= cgraph_node::get_create (decl
);
5821 node
->set_fini_priority (priority
);
5824 /* Print out the statistics for the DECL_DEBUG_EXPR hash table. */
5827 print_debug_expr_statistics (void)
5829 fprintf (stderr
, "DECL_DEBUG_EXPR hash: size %ld, %ld elements, %f collisions\n",
5830 (long) debug_expr_for_decl
->size (),
5831 (long) debug_expr_for_decl
->elements (),
5832 debug_expr_for_decl
->collisions ());
5835 /* Print out the statistics for the DECL_VALUE_EXPR hash table. */
5838 print_value_expr_statistics (void)
5840 fprintf (stderr
, "DECL_VALUE_EXPR hash: size %ld, %ld elements, %f collisions\n",
5841 (long) value_expr_for_decl
->size (),
5842 (long) value_expr_for_decl
->elements (),
5843 value_expr_for_decl
->collisions ());
5846 /* Lookup a debug expression for FROM, and return it if we find one. */
5849 decl_debug_expr_lookup (tree from
)
5851 struct tree_decl_map
*h
, in
;
5852 in
.base
.from
= from
;
5854 h
= debug_expr_for_decl
->find_with_hash (&in
, DECL_UID (from
));
5860 /* Insert a mapping FROM->TO in the debug expression hashtable. */
5863 decl_debug_expr_insert (tree from
, tree to
)
5865 struct tree_decl_map
*h
;
5867 h
= ggc_alloc
<tree_decl_map
> ();
5868 h
->base
.from
= from
;
5870 *debug_expr_for_decl
->find_slot_with_hash (h
, DECL_UID (from
), INSERT
) = h
;
5873 /* Lookup a value expression for FROM, and return it if we find one. */
5876 decl_value_expr_lookup (tree from
)
5878 struct tree_decl_map
*h
, in
;
5879 in
.base
.from
= from
;
5881 h
= value_expr_for_decl
->find_with_hash (&in
, DECL_UID (from
));
5887 /* Insert a mapping FROM->TO in the value expression hashtable. */
5890 decl_value_expr_insert (tree from
, tree to
)
5892 struct tree_decl_map
*h
;
5894 /* Uses of FROM shouldn't look like they happen at the location of TO. */
5895 to
= protected_set_expr_location_unshare (to
, UNKNOWN_LOCATION
);
5897 h
= ggc_alloc
<tree_decl_map
> ();
5898 h
->base
.from
= from
;
5900 *value_expr_for_decl
->find_slot_with_hash (h
, DECL_UID (from
), INSERT
) = h
;
5903 /* Lookup a vector of debug arguments for FROM, and return it if we
5907 decl_debug_args_lookup (tree from
)
5909 struct tree_vec_map
*h
, in
;
5911 if (!DECL_HAS_DEBUG_ARGS_P (from
))
5913 gcc_checking_assert (debug_args_for_decl
!= NULL
);
5914 in
.base
.from
= from
;
5915 h
= debug_args_for_decl
->find_with_hash (&in
, DECL_UID (from
));
5921 /* Insert a mapping FROM->empty vector of debug arguments in the value
5922 expression hashtable. */
5925 decl_debug_args_insert (tree from
)
5927 struct tree_vec_map
*h
;
5930 if (DECL_HAS_DEBUG_ARGS_P (from
))
5931 return decl_debug_args_lookup (from
);
5932 if (debug_args_for_decl
== NULL
)
5933 debug_args_for_decl
= hash_table
<tree_vec_map_cache_hasher
>::create_ggc (64);
5934 h
= ggc_alloc
<tree_vec_map
> ();
5935 h
->base
.from
= from
;
5937 loc
= debug_args_for_decl
->find_slot_with_hash (h
, DECL_UID (from
), INSERT
);
5939 DECL_HAS_DEBUG_ARGS_P (from
) = 1;
5943 /* Hashing of types so that we don't make duplicates.
5944 The entry point is `type_hash_canon'. */
5946 /* Generate the default hash code for TYPE. This is designed for
5947 speed, rather than maximum entropy. */
5950 type_hash_canon_hash (tree type
)
5952 inchash::hash hstate
;
5954 hstate
.add_int (TREE_CODE (type
));
5956 if (TREE_TYPE (type
))
5957 hstate
.add_object (TYPE_HASH (TREE_TYPE (type
)));
5959 for (tree t
= TYPE_ATTRIBUTES (type
); t
; t
= TREE_CHAIN (t
))
5960 /* Just the identifier is adequate to distinguish. */
5961 hstate
.add_object (IDENTIFIER_HASH_VALUE (get_attribute_name (t
)));
5963 switch (TREE_CODE (type
))
5966 hstate
.add_object (TYPE_HASH (TYPE_METHOD_BASETYPE (type
)));
5969 for (tree t
= TYPE_ARG_TYPES (type
); t
; t
= TREE_CHAIN (t
))
5970 if (TREE_VALUE (t
) != error_mark_node
)
5971 hstate
.add_object (TYPE_HASH (TREE_VALUE (t
)));
5975 hstate
.add_object (TYPE_HASH (TYPE_OFFSET_BASETYPE (type
)));
5980 if (TYPE_DOMAIN (type
))
5981 hstate
.add_object (TYPE_HASH (TYPE_DOMAIN (type
)));
5982 if (!AGGREGATE_TYPE_P (TREE_TYPE (type
)))
5984 unsigned typeless
= TYPE_TYPELESS_STORAGE (type
);
5985 hstate
.add_object (typeless
);
5992 tree t
= TYPE_MAX_VALUE (type
);
5994 t
= TYPE_MIN_VALUE (type
);
5995 for (int i
= 0; i
< TREE_INT_CST_NUNITS (t
); i
++)
5996 hstate
.add_object (TREE_INT_CST_ELT (t
, i
));
6001 case FIXED_POINT_TYPE
:
6003 unsigned prec
= TYPE_PRECISION (type
);
6004 hstate
.add_object (prec
);
6009 hstate
.add_poly_int (TYPE_VECTOR_SUBPARTS (type
));
6016 return hstate
.end ();
6019 /* These are the Hashtable callback functions. */
6021 /* Returns true iff the types are equivalent. */
6024 type_cache_hasher::equal (type_hash
*a
, type_hash
*b
)
6026 /* First test the things that are the same for all types. */
6027 if (a
->hash
!= b
->hash
6028 || TREE_CODE (a
->type
) != TREE_CODE (b
->type
)
6029 || TREE_TYPE (a
->type
) != TREE_TYPE (b
->type
)
6030 || !attribute_list_equal (TYPE_ATTRIBUTES (a
->type
),
6031 TYPE_ATTRIBUTES (b
->type
))
6032 || (TREE_CODE (a
->type
) != COMPLEX_TYPE
6033 && TYPE_NAME (a
->type
) != TYPE_NAME (b
->type
)))
6036 /* Be careful about comparing arrays before and after the element type
6037 has been completed; don't compare TYPE_ALIGN unless both types are
6039 if (COMPLETE_TYPE_P (a
->type
) && COMPLETE_TYPE_P (b
->type
)
6040 && (TYPE_ALIGN (a
->type
) != TYPE_ALIGN (b
->type
)
6041 || TYPE_MODE (a
->type
) != TYPE_MODE (b
->type
)))
6044 switch (TREE_CODE (a
->type
))
6050 case REFERENCE_TYPE
:
6055 return known_eq (TYPE_VECTOR_SUBPARTS (a
->type
),
6056 TYPE_VECTOR_SUBPARTS (b
->type
));
6059 if (TYPE_VALUES (a
->type
) != TYPE_VALUES (b
->type
)
6060 && !(TYPE_VALUES (a
->type
)
6061 && TREE_CODE (TYPE_VALUES (a
->type
)) == TREE_LIST
6062 && TYPE_VALUES (b
->type
)
6063 && TREE_CODE (TYPE_VALUES (b
->type
)) == TREE_LIST
6064 && type_list_equal (TYPE_VALUES (a
->type
),
6065 TYPE_VALUES (b
->type
))))
6073 if (TYPE_PRECISION (a
->type
) != TYPE_PRECISION (b
->type
))
6075 return ((TYPE_MAX_VALUE (a
->type
) == TYPE_MAX_VALUE (b
->type
)
6076 || tree_int_cst_equal (TYPE_MAX_VALUE (a
->type
),
6077 TYPE_MAX_VALUE (b
->type
)))
6078 && (TYPE_MIN_VALUE (a
->type
) == TYPE_MIN_VALUE (b
->type
)
6079 || tree_int_cst_equal (TYPE_MIN_VALUE (a
->type
),
6080 TYPE_MIN_VALUE (b
->type
))));
6082 case FIXED_POINT_TYPE
:
6083 return TYPE_SATURATING (a
->type
) == TYPE_SATURATING (b
->type
);
6086 return TYPE_OFFSET_BASETYPE (a
->type
) == TYPE_OFFSET_BASETYPE (b
->type
);
6089 if (TYPE_METHOD_BASETYPE (a
->type
) == TYPE_METHOD_BASETYPE (b
->type
)
6090 && (TYPE_ARG_TYPES (a
->type
) == TYPE_ARG_TYPES (b
->type
)
6091 || (TYPE_ARG_TYPES (a
->type
)
6092 && TREE_CODE (TYPE_ARG_TYPES (a
->type
)) == TREE_LIST
6093 && TYPE_ARG_TYPES (b
->type
)
6094 && TREE_CODE (TYPE_ARG_TYPES (b
->type
)) == TREE_LIST
6095 && type_list_equal (TYPE_ARG_TYPES (a
->type
),
6096 TYPE_ARG_TYPES (b
->type
)))))
6100 /* Don't compare TYPE_TYPELESS_STORAGE flag on aggregates,
6101 where the flag should be inherited from the element type
6102 and can change after ARRAY_TYPEs are created; on non-aggregates
6103 compare it and hash it, scalars will never have that flag set
6104 and we need to differentiate between arrays created by different
6105 front-ends or middle-end created arrays. */
6106 return (TYPE_DOMAIN (a
->type
) == TYPE_DOMAIN (b
->type
)
6107 && (AGGREGATE_TYPE_P (TREE_TYPE (a
->type
))
6108 || (TYPE_TYPELESS_STORAGE (a
->type
)
6109 == TYPE_TYPELESS_STORAGE (b
->type
))));
6113 case QUAL_UNION_TYPE
:
6114 return (TYPE_FIELDS (a
->type
) == TYPE_FIELDS (b
->type
)
6115 || (TYPE_FIELDS (a
->type
)
6116 && TREE_CODE (TYPE_FIELDS (a
->type
)) == TREE_LIST
6117 && TYPE_FIELDS (b
->type
)
6118 && TREE_CODE (TYPE_FIELDS (b
->type
)) == TREE_LIST
6119 && type_list_equal (TYPE_FIELDS (a
->type
),
6120 TYPE_FIELDS (b
->type
))));
6123 if ((TYPE_ARG_TYPES (a
->type
) == TYPE_ARG_TYPES (b
->type
)
6124 && (TYPE_NO_NAMED_ARGS_STDARG_P (a
->type
)
6125 == TYPE_NO_NAMED_ARGS_STDARG_P (b
->type
)))
6126 || (TYPE_ARG_TYPES (a
->type
)
6127 && TREE_CODE (TYPE_ARG_TYPES (a
->type
)) == TREE_LIST
6128 && TYPE_ARG_TYPES (b
->type
)
6129 && TREE_CODE (TYPE_ARG_TYPES (b
->type
)) == TREE_LIST
6130 && type_list_equal (TYPE_ARG_TYPES (a
->type
),
6131 TYPE_ARG_TYPES (b
->type
))))
6139 if (lang_hooks
.types
.type_hash_eq
!= NULL
)
6140 return lang_hooks
.types
.type_hash_eq (a
->type
, b
->type
);
6145 /* Given TYPE, and HASHCODE its hash code, return the canonical
6146 object for an identical type if one already exists.
6147 Otherwise, return TYPE, and record it as the canonical object.
6149 To use this function, first create a type of the sort you want.
6150 Then compute its hash code from the fields of the type that
6151 make it different from other similar types.
6152 Then call this function and use the value. */
6155 type_hash_canon (unsigned int hashcode
, tree type
)
6160 /* The hash table only contains main variants, so ensure that's what we're
6162 gcc_assert (TYPE_MAIN_VARIANT (type
) == type
);
6164 /* The TYPE_ALIGN field of a type is set by layout_type(), so we
6165 must call that routine before comparing TYPE_ALIGNs. */
6171 loc
= type_hash_table
->find_slot_with_hash (&in
, hashcode
, INSERT
);
6174 tree t1
= ((type_hash
*) *loc
)->type
;
6175 gcc_assert (TYPE_MAIN_VARIANT (t1
) == t1
6177 if (TYPE_UID (type
) + 1 == next_type_uid
)
6179 /* Free also min/max values and the cache for integer
6180 types. This can't be done in free_node, as LTO frees
6181 those on its own. */
6182 if (TREE_CODE (type
) == INTEGER_TYPE
)
6184 if (TYPE_MIN_VALUE (type
)
6185 && TREE_TYPE (TYPE_MIN_VALUE (type
)) == type
)
6187 /* Zero is always in TYPE_CACHED_VALUES. */
6188 if (! TYPE_UNSIGNED (type
))
6189 int_cst_hash_table
->remove_elt (TYPE_MIN_VALUE (type
));
6190 ggc_free (TYPE_MIN_VALUE (type
));
6192 if (TYPE_MAX_VALUE (type
)
6193 && TREE_TYPE (TYPE_MAX_VALUE (type
)) == type
)
6195 int_cst_hash_table
->remove_elt (TYPE_MAX_VALUE (type
));
6196 ggc_free (TYPE_MAX_VALUE (type
));
6198 if (TYPE_CACHED_VALUES_P (type
))
6199 ggc_free (TYPE_CACHED_VALUES (type
));
6206 struct type_hash
*h
;
6208 h
= ggc_alloc
<type_hash
> ();
6218 print_type_hash_statistics (void)
6220 fprintf (stderr
, "Type hash: size %ld, %ld elements, %f collisions\n",
6221 (long) type_hash_table
->size (),
6222 (long) type_hash_table
->elements (),
6223 type_hash_table
->collisions ());
6226 /* Given two lists of types
6227 (chains of TREE_LIST nodes with types in the TREE_VALUE slots)
6228 return 1 if the lists contain the same types in the same order.
6229 Also, the TREE_PURPOSEs must match. */
6232 type_list_equal (const_tree l1
, const_tree l2
)
6236 for (t1
= l1
, t2
= l2
; t1
&& t2
; t1
= TREE_CHAIN (t1
), t2
= TREE_CHAIN (t2
))
6237 if (TREE_VALUE (t1
) != TREE_VALUE (t2
)
6238 || (TREE_PURPOSE (t1
) != TREE_PURPOSE (t2
)
6239 && ! (1 == simple_cst_equal (TREE_PURPOSE (t1
), TREE_PURPOSE (t2
))
6240 && (TREE_TYPE (TREE_PURPOSE (t1
))
6241 == TREE_TYPE (TREE_PURPOSE (t2
))))))
6247 /* Returns the number of arguments to the FUNCTION_TYPE or METHOD_TYPE
6248 given by TYPE. If the argument list accepts variable arguments,
6249 then this function counts only the ordinary arguments. */
6252 type_num_arguments (const_tree fntype
)
6256 for (tree t
= TYPE_ARG_TYPES (fntype
); t
; t
= TREE_CHAIN (t
))
6257 /* If the function does not take a variable number of arguments,
6258 the last element in the list will have type `void'. */
6259 if (VOID_TYPE_P (TREE_VALUE (t
)))
6267 /* Return the type of the function TYPE's argument ARGNO if known.
6268 For vararg function's where ARGNO refers to one of the variadic
6269 arguments return null. Otherwise, return a void_type_node for
6270 out-of-bounds ARGNO. */
6273 type_argument_type (const_tree fntype
, unsigned argno
)
6275 /* Treat zero the same as an out-of-bounds argument number. */
6277 return void_type_node
;
6279 function_args_iterator iter
;
6283 FOREACH_FUNCTION_ARGS (fntype
, argtype
, iter
)
6285 /* A vararg function's argument list ends in a null. Otherwise,
6286 an ordinary function's argument list ends with void. Return
6287 null if ARGNO refers to a vararg argument, void_type_node if
6288 it's out of bounds, and the formal argument type otherwise. */
6292 if (i
== argno
|| VOID_TYPE_P (argtype
))
6301 /* Nonzero if integer constants T1 and T2
6302 represent the same constant value. */
6305 tree_int_cst_equal (const_tree t1
, const_tree t2
)
6310 if (t1
== 0 || t2
== 0)
6313 STRIP_ANY_LOCATION_WRAPPER (t1
);
6314 STRIP_ANY_LOCATION_WRAPPER (t2
);
6316 if (TREE_CODE (t1
) == INTEGER_CST
6317 && TREE_CODE (t2
) == INTEGER_CST
6318 && wi::to_widest (t1
) == wi::to_widest (t2
))
6324 /* Return true if T is an INTEGER_CST whose numerical value (extended
6325 according to TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. */
6328 tree_fits_shwi_p (const_tree t
)
6330 return (t
!= NULL_TREE
6331 && TREE_CODE (t
) == INTEGER_CST
6332 && wi::fits_shwi_p (wi::to_widest (t
)));
6335 /* Return true if T is an INTEGER_CST or POLY_INT_CST whose numerical
6336 value (extended according to TYPE_UNSIGNED) fits in a poly_int64. */
6339 tree_fits_poly_int64_p (const_tree t
)
6343 if (POLY_INT_CST_P (t
))
6345 for (unsigned int i
= 0; i
< NUM_POLY_INT_COEFFS
; i
++)
6346 if (!wi::fits_shwi_p (wi::to_wide (POLY_INT_CST_COEFF (t
, i
))))
6350 return (TREE_CODE (t
) == INTEGER_CST
6351 && wi::fits_shwi_p (wi::to_widest (t
)));
6354 /* Return true if T is an INTEGER_CST whose numerical value (extended
6355 according to TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. */
6358 tree_fits_uhwi_p (const_tree t
)
6360 return (t
!= NULL_TREE
6361 && TREE_CODE (t
) == INTEGER_CST
6362 && wi::fits_uhwi_p (wi::to_widest (t
)));
6365 /* Return true if T is an INTEGER_CST or POLY_INT_CST whose numerical
6366 value (extended according to TYPE_UNSIGNED) fits in a poly_uint64. */
6369 tree_fits_poly_uint64_p (const_tree t
)
6373 if (POLY_INT_CST_P (t
))
6375 for (unsigned int i
= 0; i
< NUM_POLY_INT_COEFFS
; i
++)
6376 if (!wi::fits_uhwi_p (wi::to_widest (POLY_INT_CST_COEFF (t
, i
))))
6380 return (TREE_CODE (t
) == INTEGER_CST
6381 && wi::fits_uhwi_p (wi::to_widest (t
)));
6384 /* T is an INTEGER_CST whose numerical value (extended according to
6385 TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. Return that
6389 tree_to_shwi (const_tree t
)
6391 gcc_assert (tree_fits_shwi_p (t
));
6392 return TREE_INT_CST_LOW (t
);
6395 /* T is an INTEGER_CST whose numerical value (extended according to
6396 TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. Return that
6399 unsigned HOST_WIDE_INT
6400 tree_to_uhwi (const_tree t
)
6402 gcc_assert (tree_fits_uhwi_p (t
));
6403 return TREE_INT_CST_LOW (t
);
6406 /* Return the most significant (sign) bit of T. */
6409 tree_int_cst_sign_bit (const_tree t
)
6411 unsigned bitno
= TYPE_PRECISION (TREE_TYPE (t
)) - 1;
6413 return wi::extract_uhwi (wi::to_wide (t
), bitno
, 1);
6416 /* Return an indication of the sign of the integer constant T.
6417 The return value is -1 if T < 0, 0 if T == 0, and 1 if T > 0.
6418 Note that -1 will never be returned if T's type is unsigned. */
6421 tree_int_cst_sgn (const_tree t
)
6423 if (wi::to_wide (t
) == 0)
6425 else if (TYPE_UNSIGNED (TREE_TYPE (t
)))
6427 else if (wi::neg_p (wi::to_wide (t
)))
6433 /* Return the minimum number of bits needed to represent VALUE in a
6434 signed or unsigned type, UNSIGNEDP says which. */
6437 tree_int_cst_min_precision (tree value
, signop sgn
)
6439 /* If the value is negative, compute its negative minus 1. The latter
6440 adjustment is because the absolute value of the largest negative value
6441 is one larger than the largest positive value. This is equivalent to
6442 a bit-wise negation, so use that operation instead. */
6444 if (tree_int_cst_sgn (value
) < 0)
6445 value
= fold_build1 (BIT_NOT_EXPR
, TREE_TYPE (value
), value
);
6447 /* Return the number of bits needed, taking into account the fact
6448 that we need one more bit for a signed than unsigned type.
6449 If value is 0 or -1, the minimum precision is 1 no matter
6450 whether unsignedp is true or false. */
6452 if (integer_zerop (value
))
6455 return tree_floor_log2 (value
) + 1 + (sgn
== SIGNED
? 1 : 0) ;
6458 /* Return truthvalue of whether T1 is the same tree structure as T2.
6459 Return 1 if they are the same.
6460 Return 0 if they are understandably different.
6461 Return -1 if either contains tree structure not understood by
6465 simple_cst_equal (const_tree t1
, const_tree t2
)
6467 enum tree_code code1
, code2
;
6473 if (t1
== 0 || t2
== 0)
6476 /* For location wrappers to be the same, they must be at the same
6477 source location (and wrap the same thing). */
6478 if (location_wrapper_p (t1
) && location_wrapper_p (t2
))
6480 if (EXPR_LOCATION (t1
) != EXPR_LOCATION (t2
))
6482 return simple_cst_equal (TREE_OPERAND (t1
, 0), TREE_OPERAND (t2
, 0));
6485 code1
= TREE_CODE (t1
);
6486 code2
= TREE_CODE (t2
);
6488 if (CONVERT_EXPR_CODE_P (code1
) || code1
== NON_LVALUE_EXPR
)
6490 if (CONVERT_EXPR_CODE_P (code2
)
6491 || code2
== NON_LVALUE_EXPR
)
6492 return simple_cst_equal (TREE_OPERAND (t1
, 0), TREE_OPERAND (t2
, 0));
6494 return simple_cst_equal (TREE_OPERAND (t1
, 0), t2
);
6497 else if (CONVERT_EXPR_CODE_P (code2
)
6498 || code2
== NON_LVALUE_EXPR
)
6499 return simple_cst_equal (t1
, TREE_OPERAND (t2
, 0));
6507 return wi::to_widest (t1
) == wi::to_widest (t2
);
6510 return real_identical (&TREE_REAL_CST (t1
), &TREE_REAL_CST (t2
));
6513 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1
), TREE_FIXED_CST (t2
));
6516 return (TREE_STRING_LENGTH (t1
) == TREE_STRING_LENGTH (t2
)
6517 && ! memcmp (TREE_STRING_POINTER (t1
), TREE_STRING_POINTER (t2
),
6518 TREE_STRING_LENGTH (t1
)));
6522 unsigned HOST_WIDE_INT idx
;
6523 vec
<constructor_elt
, va_gc
> *v1
= CONSTRUCTOR_ELTS (t1
);
6524 vec
<constructor_elt
, va_gc
> *v2
= CONSTRUCTOR_ELTS (t2
);
6526 if (vec_safe_length (v1
) != vec_safe_length (v2
))
6529 for (idx
= 0; idx
< vec_safe_length (v1
); ++idx
)
6530 /* ??? Should we handle also fields here? */
6531 if (!simple_cst_equal ((*v1
)[idx
].value
, (*v2
)[idx
].value
))
6537 return simple_cst_equal (TREE_OPERAND (t1
, 0), TREE_OPERAND (t2
, 0));
6540 cmp
= simple_cst_equal (CALL_EXPR_FN (t1
), CALL_EXPR_FN (t2
));
6543 if (call_expr_nargs (t1
) != call_expr_nargs (t2
))
6546 const_tree arg1
, arg2
;
6547 const_call_expr_arg_iterator iter1
, iter2
;
6548 for (arg1
= first_const_call_expr_arg (t1
, &iter1
),
6549 arg2
= first_const_call_expr_arg (t2
, &iter2
);
6551 arg1
= next_const_call_expr_arg (&iter1
),
6552 arg2
= next_const_call_expr_arg (&iter2
))
6554 cmp
= simple_cst_equal (arg1
, arg2
);
6558 return arg1
== arg2
;
6562 /* Special case: if either target is an unallocated VAR_DECL,
6563 it means that it's going to be unified with whatever the
6564 TARGET_EXPR is really supposed to initialize, so treat it
6565 as being equivalent to anything. */
6566 if ((TREE_CODE (TREE_OPERAND (t1
, 0)) == VAR_DECL
6567 && DECL_NAME (TREE_OPERAND (t1
, 0)) == NULL_TREE
6568 && !DECL_RTL_SET_P (TREE_OPERAND (t1
, 0)))
6569 || (TREE_CODE (TREE_OPERAND (t2
, 0)) == VAR_DECL
6570 && DECL_NAME (TREE_OPERAND (t2
, 0)) == NULL_TREE
6571 && !DECL_RTL_SET_P (TREE_OPERAND (t2
, 0))))
6574 cmp
= simple_cst_equal (TREE_OPERAND (t1
, 0), TREE_OPERAND (t2
, 0));
6579 return simple_cst_equal (TREE_OPERAND (t1
, 1), TREE_OPERAND (t2
, 1));
6581 case WITH_CLEANUP_EXPR
:
6582 cmp
= simple_cst_equal (TREE_OPERAND (t1
, 0), TREE_OPERAND (t2
, 0));
6586 return simple_cst_equal (TREE_OPERAND (t1
, 1), TREE_OPERAND (t1
, 1));
6589 if (TREE_OPERAND (t1
, 1) == TREE_OPERAND (t2
, 1))
6590 return simple_cst_equal (TREE_OPERAND (t1
, 0), TREE_OPERAND (t2
, 0));
6601 if (POLY_INT_CST_P (t1
))
6602 /* A false return means maybe_ne rather than known_ne. */
6603 return known_eq (poly_widest_int::from (poly_int_cst_value (t1
),
6604 TYPE_SIGN (TREE_TYPE (t1
))),
6605 poly_widest_int::from (poly_int_cst_value (t2
),
6606 TYPE_SIGN (TREE_TYPE (t2
))));
6610 /* This general rule works for most tree codes. All exceptions should be
6611 handled above. If this is a language-specific tree code, we can't
6612 trust what might be in the operand, so say we don't know
6614 if ((int) code1
>= (int) LAST_AND_UNUSED_TREE_CODE
)
6617 switch (TREE_CODE_CLASS (code1
))
6621 case tcc_comparison
:
6622 case tcc_expression
:
6626 for (i
= 0; i
< TREE_CODE_LENGTH (code1
); i
++)
6628 cmp
= simple_cst_equal (TREE_OPERAND (t1
, i
), TREE_OPERAND (t2
, i
));
6640 /* Compare the value of T, an INTEGER_CST, with U, an unsigned integer value.
6641 Return -1, 0, or 1 if the value of T is less than, equal to, or greater
6642 than U, respectively. */
6645 compare_tree_int (const_tree t
, unsigned HOST_WIDE_INT u
)
6647 if (tree_int_cst_sgn (t
) < 0)
6649 else if (!tree_fits_uhwi_p (t
))
6651 else if (TREE_INT_CST_LOW (t
) == u
)
6653 else if (TREE_INT_CST_LOW (t
) < u
)
6659 /* Return true if SIZE represents a constant size that is in bounds of
6660 what the middle-end and the backend accepts (covering not more than
6661 half of the address-space).
6662 When PERR is non-null, set *PERR on failure to the description of
6663 why SIZE is not valid. */
6666 valid_constant_size_p (const_tree size
, cst_size_error
*perr
/* = NULL */)
6668 if (POLY_INT_CST_P (size
))
6670 if (TREE_OVERFLOW (size
))
6672 for (unsigned int i
= 0; i
< NUM_POLY_INT_COEFFS
; ++i
)
6673 if (!valid_constant_size_p (POLY_INT_CST_COEFF (size
, i
)))
6678 cst_size_error error
;
6682 if (TREE_CODE (size
) != INTEGER_CST
)
6684 *perr
= cst_size_not_constant
;
6688 if (TREE_OVERFLOW_P (size
))
6690 *perr
= cst_size_overflow
;
6694 if (tree_int_cst_sgn (size
) < 0)
6696 *perr
= cst_size_negative
;
6699 if (!tree_fits_uhwi_p (size
)
6700 || (wi::to_widest (TYPE_MAX_VALUE (sizetype
))
6701 < wi::to_widest (size
) * 2))
6703 *perr
= cst_size_too_big
;
6710 /* Return the precision of the type, or for a complex or vector type the
6711 precision of the type of its elements. */
6714 element_precision (const_tree type
)
6717 type
= TREE_TYPE (type
);
6718 enum tree_code code
= TREE_CODE (type
);
6719 if (code
== COMPLEX_TYPE
|| code
== VECTOR_TYPE
)
6720 type
= TREE_TYPE (type
);
6722 return TYPE_PRECISION (type
);
6725 /* Return true if CODE represents an associative tree code. Otherwise
6728 associative_tree_code (enum tree_code code
)
6747 /* Return true if CODE represents a commutative tree code. Otherwise
6750 commutative_tree_code (enum tree_code code
)
6756 case MULT_HIGHPART_EXPR
:
6764 case UNORDERED_EXPR
:
6768 case TRUTH_AND_EXPR
:
6769 case TRUTH_XOR_EXPR
:
6771 case WIDEN_MULT_EXPR
:
6772 case VEC_WIDEN_MULT_HI_EXPR
:
6773 case VEC_WIDEN_MULT_LO_EXPR
:
6774 case VEC_WIDEN_MULT_EVEN_EXPR
:
6775 case VEC_WIDEN_MULT_ODD_EXPR
:
6784 /* Return true if CODE represents a ternary tree code for which the
6785 first two operands are commutative. Otherwise return false. */
6787 commutative_ternary_tree_code (enum tree_code code
)
6791 case WIDEN_MULT_PLUS_EXPR
:
6792 case WIDEN_MULT_MINUS_EXPR
:
6802 /* Returns true if CODE can overflow. */
6805 operation_can_overflow (enum tree_code code
)
6813 /* Can overflow in various ways. */
6815 case TRUNC_DIV_EXPR
:
6816 case EXACT_DIV_EXPR
:
6817 case FLOOR_DIV_EXPR
:
6819 /* For INT_MIN / -1. */
6826 /* These operators cannot overflow. */
6831 /* Returns true if CODE operating on operands of type TYPE doesn't overflow, or
6832 ftrapv doesn't generate trapping insns for CODE. */
6835 operation_no_trapping_overflow (tree type
, enum tree_code code
)
6837 gcc_checking_assert (ANY_INTEGRAL_TYPE_P (type
));
6839 /* We don't generate instructions that trap on overflow for complex or vector
6841 if (!INTEGRAL_TYPE_P (type
))
6844 if (!TYPE_OVERFLOW_TRAPS (type
))
6854 /* These operators can overflow, and -ftrapv generates trapping code for
6857 case TRUNC_DIV_EXPR
:
6858 case EXACT_DIV_EXPR
:
6859 case FLOOR_DIV_EXPR
:
6862 /* These operators can overflow, but -ftrapv does not generate trapping
6866 /* These operators cannot overflow. */
6871 /* Constructors for pointer, array and function types.
6872 (RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are
6873 constructed by language-dependent code, not here.) */
6875 /* Construct, lay out and return the type of pointers to TO_TYPE with
6876 mode MODE. If MODE is VOIDmode, a pointer mode for the address
6877 space of TO_TYPE will be picked. If CAN_ALIAS_ALL is TRUE,
6878 indicate this type can reference all of memory. If such a type has
6879 already been constructed, reuse it. */
6882 build_pointer_type_for_mode (tree to_type
, machine_mode mode
,
6886 bool could_alias
= can_alias_all
;
6888 if (to_type
== error_mark_node
)
6889 return error_mark_node
;
6891 if (mode
== VOIDmode
)
6893 addr_space_t as
= TYPE_ADDR_SPACE (to_type
);
6894 mode
= targetm
.addr_space
.pointer_mode (as
);
6897 /* If the pointed-to type has the may_alias attribute set, force
6898 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
6899 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type
)))
6900 can_alias_all
= true;
6902 /* In some cases, languages will have things that aren't a POINTER_TYPE
6903 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_POINTER_TO.
6904 In that case, return that type without regard to the rest of our
6907 ??? This is a kludge, but consistent with the way this function has
6908 always operated and there doesn't seem to be a good way to avoid this
6910 if (TYPE_POINTER_TO (to_type
) != 0
6911 && TREE_CODE (TYPE_POINTER_TO (to_type
)) != POINTER_TYPE
)
6912 return TYPE_POINTER_TO (to_type
);
6914 /* First, if we already have a type for pointers to TO_TYPE and it's
6915 the proper mode, use it. */
6916 for (t
= TYPE_POINTER_TO (to_type
); t
; t
= TYPE_NEXT_PTR_TO (t
))
6917 if (TYPE_MODE (t
) == mode
&& TYPE_REF_CAN_ALIAS_ALL (t
) == can_alias_all
)
6920 t
= make_node (POINTER_TYPE
);
6922 TREE_TYPE (t
) = to_type
;
6923 SET_TYPE_MODE (t
, mode
);
6924 TYPE_REF_CAN_ALIAS_ALL (t
) = can_alias_all
;
6925 TYPE_NEXT_PTR_TO (t
) = TYPE_POINTER_TO (to_type
);
6926 TYPE_POINTER_TO (to_type
) = t
;
6928 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */
6929 if (TYPE_STRUCTURAL_EQUALITY_P (to_type
) || in_lto_p
)
6930 SET_TYPE_STRUCTURAL_EQUALITY (t
);
6931 else if (TYPE_CANONICAL (to_type
) != to_type
|| could_alias
)
6933 = build_pointer_type_for_mode (TYPE_CANONICAL (to_type
),
6936 /* Lay out the type. This function has many callers that are concerned
6937 with expression-construction, and this simplifies them all. */
6943 /* By default build pointers in ptr_mode. */
6946 build_pointer_type (tree to_type
)
6948 return build_pointer_type_for_mode (to_type
, VOIDmode
, false);
6951 /* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE. */
6954 build_reference_type_for_mode (tree to_type
, machine_mode mode
,
6958 bool could_alias
= can_alias_all
;
6960 if (to_type
== error_mark_node
)
6961 return error_mark_node
;
6963 if (mode
== VOIDmode
)
6965 addr_space_t as
= TYPE_ADDR_SPACE (to_type
);
6966 mode
= targetm
.addr_space
.pointer_mode (as
);
6969 /* If the pointed-to type has the may_alias attribute set, force
6970 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
6971 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type
)))
6972 can_alias_all
= true;
6974 /* In some cases, languages will have things that aren't a REFERENCE_TYPE
6975 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_REFERENCE_TO.
6976 In that case, return that type without regard to the rest of our
6979 ??? This is a kludge, but consistent with the way this function has
6980 always operated and there doesn't seem to be a good way to avoid this
6982 if (TYPE_REFERENCE_TO (to_type
) != 0
6983 && TREE_CODE (TYPE_REFERENCE_TO (to_type
)) != REFERENCE_TYPE
)
6984 return TYPE_REFERENCE_TO (to_type
);
6986 /* First, if we already have a type for pointers to TO_TYPE and it's
6987 the proper mode, use it. */
6988 for (t
= TYPE_REFERENCE_TO (to_type
); t
; t
= TYPE_NEXT_REF_TO (t
))
6989 if (TYPE_MODE (t
) == mode
&& TYPE_REF_CAN_ALIAS_ALL (t
) == can_alias_all
)
6992 t
= make_node (REFERENCE_TYPE
);
6994 TREE_TYPE (t
) = to_type
;
6995 SET_TYPE_MODE (t
, mode
);
6996 TYPE_REF_CAN_ALIAS_ALL (t
) = can_alias_all
;
6997 TYPE_NEXT_REF_TO (t
) = TYPE_REFERENCE_TO (to_type
);
6998 TYPE_REFERENCE_TO (to_type
) = t
;
7000 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */
7001 if (TYPE_STRUCTURAL_EQUALITY_P (to_type
) || in_lto_p
)
7002 SET_TYPE_STRUCTURAL_EQUALITY (t
);
7003 else if (TYPE_CANONICAL (to_type
) != to_type
|| could_alias
)
7005 = build_reference_type_for_mode (TYPE_CANONICAL (to_type
),
7014 /* Build the node for the type of references-to-TO_TYPE by default
7018 build_reference_type (tree to_type
)
7020 return build_reference_type_for_mode (to_type
, VOIDmode
, false);
7023 #define MAX_INT_CACHED_PREC \
7024 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
7025 static GTY(()) tree nonstandard_integer_type_cache
[2 * MAX_INT_CACHED_PREC
+ 2];
7028 clear_nonstandard_integer_type_cache (void)
7030 for (size_t i
= 0 ; i
< 2 * MAX_INT_CACHED_PREC
+ 2 ; i
++)
7032 nonstandard_integer_type_cache
[i
] = NULL
;
7036 /* Builds a signed or unsigned integer type of precision PRECISION.
7037 Used for C bitfields whose precision does not match that of
7038 built-in target types. */
7040 build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision
,
7046 unsignedp
= MAX_INT_CACHED_PREC
+ 1;
7048 if (precision
<= MAX_INT_CACHED_PREC
)
7050 itype
= nonstandard_integer_type_cache
[precision
+ unsignedp
];
7055 itype
= make_node (INTEGER_TYPE
);
7056 TYPE_PRECISION (itype
) = precision
;
7059 fixup_unsigned_type (itype
);
7061 fixup_signed_type (itype
);
7063 inchash::hash hstate
;
7064 inchash::add_expr (TYPE_MAX_VALUE (itype
), hstate
);
7065 ret
= type_hash_canon (hstate
.end (), itype
);
7066 if (precision
<= MAX_INT_CACHED_PREC
)
7067 nonstandard_integer_type_cache
[precision
+ unsignedp
] = ret
;
7072 #define MAX_BOOL_CACHED_PREC \
7073 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
7074 static GTY(()) tree nonstandard_boolean_type_cache
[MAX_BOOL_CACHED_PREC
+ 1];
7076 /* Builds a boolean type of precision PRECISION.
7077 Used for boolean vectors to choose proper vector element size. */
7079 build_nonstandard_boolean_type (unsigned HOST_WIDE_INT precision
)
7083 if (precision
<= MAX_BOOL_CACHED_PREC
)
7085 type
= nonstandard_boolean_type_cache
[precision
];
7090 type
= make_node (BOOLEAN_TYPE
);
7091 TYPE_PRECISION (type
) = precision
;
7092 fixup_signed_type (type
);
7094 if (precision
<= MAX_INT_CACHED_PREC
)
7095 nonstandard_boolean_type_cache
[precision
] = type
;
7100 /* Create a range of some discrete type TYPE (an INTEGER_TYPE, ENUMERAL_TYPE
7101 or BOOLEAN_TYPE) with low bound LOWVAL and high bound HIGHVAL. If SHARED
7102 is true, reuse such a type that has already been constructed. */
7105 build_range_type_1 (tree type
, tree lowval
, tree highval
, bool shared
)
7107 tree itype
= make_node (INTEGER_TYPE
);
7109 TREE_TYPE (itype
) = type
;
7111 TYPE_MIN_VALUE (itype
) = fold_convert (type
, lowval
);
7112 TYPE_MAX_VALUE (itype
) = highval
? fold_convert (type
, highval
) : NULL
;
7114 TYPE_PRECISION (itype
) = TYPE_PRECISION (type
);
7115 SET_TYPE_MODE (itype
, TYPE_MODE (type
));
7116 TYPE_SIZE (itype
) = TYPE_SIZE (type
);
7117 TYPE_SIZE_UNIT (itype
) = TYPE_SIZE_UNIT (type
);
7118 SET_TYPE_ALIGN (itype
, TYPE_ALIGN (type
));
7119 TYPE_USER_ALIGN (itype
) = TYPE_USER_ALIGN (type
);
7120 SET_TYPE_WARN_IF_NOT_ALIGN (itype
, TYPE_WARN_IF_NOT_ALIGN (type
));
7125 if ((TYPE_MIN_VALUE (itype
)
7126 && TREE_CODE (TYPE_MIN_VALUE (itype
)) != INTEGER_CST
)
7127 || (TYPE_MAX_VALUE (itype
)
7128 && TREE_CODE (TYPE_MAX_VALUE (itype
)) != INTEGER_CST
))
7130 /* Since we cannot reliably merge this type, we need to compare it using
7131 structural equality checks. */
7132 SET_TYPE_STRUCTURAL_EQUALITY (itype
);
7136 hashval_t hash
= type_hash_canon_hash (itype
);
7137 itype
= type_hash_canon (hash
, itype
);
7142 /* Wrapper around build_range_type_1 with SHARED set to true. */
7145 build_range_type (tree type
, tree lowval
, tree highval
)
7147 return build_range_type_1 (type
, lowval
, highval
, true);
7150 /* Wrapper around build_range_type_1 with SHARED set to false. */
7153 build_nonshared_range_type (tree type
, tree lowval
, tree highval
)
7155 return build_range_type_1 (type
, lowval
, highval
, false);
7158 /* Create a type of integers to be the TYPE_DOMAIN of an ARRAY_TYPE.
7159 MAXVAL should be the maximum value in the domain
7160 (one less than the length of the array).
7162 The maximum value that MAXVAL can have is INT_MAX for a HOST_WIDE_INT.
7163 We don't enforce this limit, that is up to caller (e.g. language front end).
7164 The limit exists because the result is a signed type and we don't handle
7165 sizes that use more than one HOST_WIDE_INT. */
7168 build_index_type (tree maxval
)
7170 return build_range_type (sizetype
, size_zero_node
, maxval
);
7173 /* Return true if the debug information for TYPE, a subtype, should be emitted
7174 as a subrange type. If so, set LOWVAL to the low bound and HIGHVAL to the
7175 high bound, respectively. Sometimes doing so unnecessarily obfuscates the
7176 debug info and doesn't reflect the source code. */
7179 subrange_type_for_debug_p (const_tree type
, tree
*lowval
, tree
*highval
)
7181 tree base_type
= TREE_TYPE (type
), low
, high
;
7183 /* Subrange types have a base type which is an integral type. */
7184 if (!INTEGRAL_TYPE_P (base_type
))
7187 /* Get the real bounds of the subtype. */
7188 if (lang_hooks
.types
.get_subrange_bounds
)
7189 lang_hooks
.types
.get_subrange_bounds (type
, &low
, &high
);
7192 low
= TYPE_MIN_VALUE (type
);
7193 high
= TYPE_MAX_VALUE (type
);
7196 /* If the type and its base type have the same representation and the same
7197 name, then the type is not a subrange but a copy of the base type. */
7198 if ((TREE_CODE (base_type
) == INTEGER_TYPE
7199 || TREE_CODE (base_type
) == BOOLEAN_TYPE
)
7200 && int_size_in_bytes (type
) == int_size_in_bytes (base_type
)
7201 && tree_int_cst_equal (low
, TYPE_MIN_VALUE (base_type
))
7202 && tree_int_cst_equal (high
, TYPE_MAX_VALUE (base_type
))
7203 && TYPE_IDENTIFIER (type
) == TYPE_IDENTIFIER (base_type
))
7213 /* Construct, lay out and return the type of arrays of elements with ELT_TYPE
7214 and number of elements specified by the range of values of INDEX_TYPE.
7215 If TYPELESS_STORAGE is true, TYPE_TYPELESS_STORAGE flag is set on the type.
7216 If SHARED is true, reuse such a type that has already been constructed.
7217 If SET_CANONICAL is true, compute TYPE_CANONICAL from the element type. */
7220 build_array_type_1 (tree elt_type
, tree index_type
, bool typeless_storage
,
7221 bool shared
, bool set_canonical
)
7225 if (TREE_CODE (elt_type
) == FUNCTION_TYPE
)
7227 error ("arrays of functions are not meaningful");
7228 elt_type
= integer_type_node
;
7231 t
= make_node (ARRAY_TYPE
);
7232 TREE_TYPE (t
) = elt_type
;
7233 TYPE_DOMAIN (t
) = index_type
;
7234 TYPE_ADDR_SPACE (t
) = TYPE_ADDR_SPACE (elt_type
);
7235 TYPE_TYPELESS_STORAGE (t
) = typeless_storage
;
7240 hashval_t hash
= type_hash_canon_hash (t
);
7241 t
= type_hash_canon (hash
, t
);
7244 if (TYPE_CANONICAL (t
) == t
&& set_canonical
)
7246 if (TYPE_STRUCTURAL_EQUALITY_P (elt_type
)
7247 || (index_type
&& TYPE_STRUCTURAL_EQUALITY_P (index_type
))
7249 SET_TYPE_STRUCTURAL_EQUALITY (t
);
7250 else if (TYPE_CANONICAL (elt_type
) != elt_type
7251 || (index_type
&& TYPE_CANONICAL (index_type
) != index_type
))
7253 = build_array_type_1 (TYPE_CANONICAL (elt_type
),
7255 ? TYPE_CANONICAL (index_type
) : NULL_TREE
,
7256 typeless_storage
, shared
, set_canonical
);
7262 /* Wrapper around build_array_type_1 with SHARED set to true. */
7265 build_array_type (tree elt_type
, tree index_type
, bool typeless_storage
)
7268 build_array_type_1 (elt_type
, index_type
, typeless_storage
, true, true);
7271 /* Wrapper around build_array_type_1 with SHARED set to false. */
7274 build_nonshared_array_type (tree elt_type
, tree index_type
)
7276 return build_array_type_1 (elt_type
, index_type
, false, false, true);
7279 /* Return a representation of ELT_TYPE[NELTS], using indices of type
7283 build_array_type_nelts (tree elt_type
, poly_uint64 nelts
)
7285 return build_array_type (elt_type
, build_index_type (size_int (nelts
- 1)));
7288 /* Recursively examines the array elements of TYPE, until a non-array
7289 element type is found. */
7292 strip_array_types (tree type
)
7294 while (TREE_CODE (type
) == ARRAY_TYPE
)
7295 type
= TREE_TYPE (type
);
7300 /* Computes the canonical argument types from the argument type list
7303 Upon return, *ANY_STRUCTURAL_P will be true iff either it was true
7304 on entry to this function, or if any of the ARGTYPES are
7307 Upon return, *ANY_NONCANONICAL_P will be true iff either it was
7308 true on entry to this function, or if any of the ARGTYPES are
7311 Returns a canonical argument list, which may be ARGTYPES when the
7312 canonical argument list is unneeded (i.e., *ANY_STRUCTURAL_P is
7313 true) or would not differ from ARGTYPES. */
7316 maybe_canonicalize_argtypes (tree argtypes
,
7317 bool *any_structural_p
,
7318 bool *any_noncanonical_p
)
7321 bool any_noncanonical_argtypes_p
= false;
7323 for (arg
= argtypes
; arg
&& !(*any_structural_p
); arg
= TREE_CHAIN (arg
))
7325 if (!TREE_VALUE (arg
) || TREE_VALUE (arg
) == error_mark_node
)
7326 /* Fail gracefully by stating that the type is structural. */
7327 *any_structural_p
= true;
7328 else if (TYPE_STRUCTURAL_EQUALITY_P (TREE_VALUE (arg
)))
7329 *any_structural_p
= true;
7330 else if (TYPE_CANONICAL (TREE_VALUE (arg
)) != TREE_VALUE (arg
)
7331 || TREE_PURPOSE (arg
))
7332 /* If the argument has a default argument, we consider it
7333 non-canonical even though the type itself is canonical.
7334 That way, different variants of function and method types
7335 with default arguments will all point to the variant with
7336 no defaults as their canonical type. */
7337 any_noncanonical_argtypes_p
= true;
7340 if (*any_structural_p
)
7343 if (any_noncanonical_argtypes_p
)
7345 /* Build the canonical list of argument types. */
7346 tree canon_argtypes
= NULL_TREE
;
7347 bool is_void
= false;
7349 for (arg
= argtypes
; arg
; arg
= TREE_CHAIN (arg
))
7351 if (arg
== void_list_node
)
7354 canon_argtypes
= tree_cons (NULL_TREE
,
7355 TYPE_CANONICAL (TREE_VALUE (arg
)),
7359 canon_argtypes
= nreverse (canon_argtypes
);
7361 canon_argtypes
= chainon (canon_argtypes
, void_list_node
);
7363 /* There is a non-canonical type. */
7364 *any_noncanonical_p
= true;
7365 return canon_argtypes
;
7368 /* The canonical argument types are the same as ARGTYPES. */
7372 /* Construct, lay out and return
7373 the type of functions returning type VALUE_TYPE
7374 given arguments of types ARG_TYPES.
7375 ARG_TYPES is a chain of TREE_LIST nodes whose TREE_VALUEs
7376 are data type nodes for the arguments of the function.
7377 NO_NAMED_ARGS_STDARG_P is true if this is a prototyped
7378 variable-arguments function with (...) prototype (no named arguments).
7379 If such a type has already been constructed, reuse it. */
7382 build_function_type (tree value_type
, tree arg_types
,
7383 bool no_named_args_stdarg_p
)
7386 inchash::hash hstate
;
7387 bool any_structural_p
, any_noncanonical_p
;
7388 tree canon_argtypes
;
7390 gcc_assert (arg_types
!= error_mark_node
);
7392 if (TREE_CODE (value_type
) == FUNCTION_TYPE
)
7394 error ("function return type cannot be function");
7395 value_type
= integer_type_node
;
7398 /* Make a node of the sort we want. */
7399 t
= make_node (FUNCTION_TYPE
);
7400 TREE_TYPE (t
) = value_type
;
7401 TYPE_ARG_TYPES (t
) = arg_types
;
7402 if (no_named_args_stdarg_p
)
7404 gcc_assert (arg_types
== NULL_TREE
);
7405 TYPE_NO_NAMED_ARGS_STDARG_P (t
) = 1;
7408 /* If we already have such a type, use the old one. */
7409 hashval_t hash
= type_hash_canon_hash (t
);
7410 t
= type_hash_canon (hash
, t
);
7412 /* Set up the canonical type. */
7413 any_structural_p
= TYPE_STRUCTURAL_EQUALITY_P (value_type
);
7414 any_noncanonical_p
= TYPE_CANONICAL (value_type
) != value_type
;
7415 canon_argtypes
= maybe_canonicalize_argtypes (arg_types
,
7417 &any_noncanonical_p
);
7418 if (any_structural_p
)
7419 SET_TYPE_STRUCTURAL_EQUALITY (t
);
7420 else if (any_noncanonical_p
)
7421 TYPE_CANONICAL (t
) = build_function_type (TYPE_CANONICAL (value_type
),
7424 if (!COMPLETE_TYPE_P (t
))
7429 /* Build a function type. The RETURN_TYPE is the type returned by the
7430 function. If VAARGS is set, no void_type_node is appended to the
7431 list. ARGP must be always be terminated be a NULL_TREE. */
7434 build_function_type_list_1 (bool vaargs
, tree return_type
, va_list argp
)
7438 t
= va_arg (argp
, tree
);
7439 for (args
= NULL_TREE
; t
!= NULL_TREE
; t
= va_arg (argp
, tree
))
7440 args
= tree_cons (NULL_TREE
, t
, args
);
7445 if (args
!= NULL_TREE
)
7446 args
= nreverse (args
);
7447 gcc_assert (last
!= void_list_node
);
7449 else if (args
== NULL_TREE
)
7450 args
= void_list_node
;
7454 args
= nreverse (args
);
7455 TREE_CHAIN (last
) = void_list_node
;
7457 args
= build_function_type (return_type
, args
, vaargs
&& args
== NULL_TREE
);
7462 /* Build a function type. The RETURN_TYPE is the type returned by the
7463 function. If additional arguments are provided, they are
7464 additional argument types. The list of argument types must always
7465 be terminated by NULL_TREE. */
7468 build_function_type_list (tree return_type
, ...)
7473 va_start (p
, return_type
);
7474 args
= build_function_type_list_1 (false, return_type
, p
);
7479 /* Build a variable argument function type. The RETURN_TYPE is the
7480 type returned by the function. If additional arguments are provided,
7481 they are additional argument types. The list of argument types must
7482 always be terminated by NULL_TREE. */
7485 build_varargs_function_type_list (tree return_type
, ...)
7490 va_start (p
, return_type
);
7491 args
= build_function_type_list_1 (true, return_type
, p
);
7497 /* Build a function type. RETURN_TYPE is the type returned by the
7498 function; VAARGS indicates whether the function takes varargs. The
7499 function takes N named arguments, the types of which are provided in
7503 build_function_type_array_1 (bool vaargs
, tree return_type
, int n
,
7507 tree t
= vaargs
? NULL_TREE
: void_list_node
;
7509 for (i
= n
- 1; i
>= 0; i
--)
7510 t
= tree_cons (NULL_TREE
, arg_types
[i
], t
);
7512 return build_function_type (return_type
, t
, vaargs
&& n
== 0);
7515 /* Build a function type. RETURN_TYPE is the type returned by the
7516 function. The function takes N named arguments, the types of which
7517 are provided in ARG_TYPES. */
7520 build_function_type_array (tree return_type
, int n
, tree
*arg_types
)
7522 return build_function_type_array_1 (false, return_type
, n
, arg_types
);
7525 /* Build a variable argument function type. RETURN_TYPE is the type
7526 returned by the function. The function takes N named arguments, the
7527 types of which are provided in ARG_TYPES. */
7530 build_varargs_function_type_array (tree return_type
, int n
, tree
*arg_types
)
7532 return build_function_type_array_1 (true, return_type
, n
, arg_types
);
7535 /* Build a METHOD_TYPE for a member of BASETYPE. The RETTYPE (a TYPE)
7536 and ARGTYPES (a TREE_LIST) are the return type and arguments types
7537 for the method. An implicit additional parameter (of type
7538 pointer-to-BASETYPE) is added to the ARGTYPES. */
7541 build_method_type_directly (tree basetype
,
7547 bool any_structural_p
, any_noncanonical_p
;
7548 tree canon_argtypes
;
7550 /* Make a node of the sort we want. */
7551 t
= make_node (METHOD_TYPE
);
7553 TYPE_METHOD_BASETYPE (t
) = TYPE_MAIN_VARIANT (basetype
);
7554 TREE_TYPE (t
) = rettype
;
7555 ptype
= build_pointer_type (basetype
);
7557 /* The actual arglist for this function includes a "hidden" argument
7558 which is "this". Put it into the list of argument types. */
7559 argtypes
= tree_cons (NULL_TREE
, ptype
, argtypes
);
7560 TYPE_ARG_TYPES (t
) = argtypes
;
7562 /* If we already have such a type, use the old one. */
7563 hashval_t hash
= type_hash_canon_hash (t
);
7564 t
= type_hash_canon (hash
, t
);
7566 /* Set up the canonical type. */
7568 = (TYPE_STRUCTURAL_EQUALITY_P (basetype
)
7569 || TYPE_STRUCTURAL_EQUALITY_P (rettype
));
7571 = (TYPE_CANONICAL (basetype
) != basetype
7572 || TYPE_CANONICAL (rettype
) != rettype
);
7573 canon_argtypes
= maybe_canonicalize_argtypes (TREE_CHAIN (argtypes
),
7575 &any_noncanonical_p
);
7576 if (any_structural_p
)
7577 SET_TYPE_STRUCTURAL_EQUALITY (t
);
7578 else if (any_noncanonical_p
)
7580 = build_method_type_directly (TYPE_CANONICAL (basetype
),
7581 TYPE_CANONICAL (rettype
),
7583 if (!COMPLETE_TYPE_P (t
))
7589 /* Construct, lay out and return the type of methods belonging to class
7590 BASETYPE and whose arguments and values are described by TYPE.
7591 If that type exists already, reuse it.
7592 TYPE must be a FUNCTION_TYPE node. */
7595 build_method_type (tree basetype
, tree type
)
7597 gcc_assert (TREE_CODE (type
) == FUNCTION_TYPE
);
7599 return build_method_type_directly (basetype
,
7601 TYPE_ARG_TYPES (type
));
7604 /* Construct, lay out and return the type of offsets to a value
7605 of type TYPE, within an object of type BASETYPE.
7606 If a suitable offset type exists already, reuse it. */
7609 build_offset_type (tree basetype
, tree type
)
7613 /* Make a node of the sort we want. */
7614 t
= make_node (OFFSET_TYPE
);
7616 TYPE_OFFSET_BASETYPE (t
) = TYPE_MAIN_VARIANT (basetype
);
7617 TREE_TYPE (t
) = type
;
7619 /* If we already have such a type, use the old one. */
7620 hashval_t hash
= type_hash_canon_hash (t
);
7621 t
= type_hash_canon (hash
, t
);
7623 if (!COMPLETE_TYPE_P (t
))
7626 if (TYPE_CANONICAL (t
) == t
)
7628 if (TYPE_STRUCTURAL_EQUALITY_P (basetype
)
7629 || TYPE_STRUCTURAL_EQUALITY_P (type
))
7630 SET_TYPE_STRUCTURAL_EQUALITY (t
);
7631 else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype
)) != basetype
7632 || TYPE_CANONICAL (type
) != type
)
7634 = build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype
)),
7635 TYPE_CANONICAL (type
));
7641 /* Create a complex type whose components are COMPONENT_TYPE.
7643 If NAMED is true, the type is given a TYPE_NAME. We do not always
7644 do so because this creates a DECL node and thus make the DECL_UIDs
7645 dependent on the type canonicalization hashtable, which is GC-ed,
7646 so the DECL_UIDs would not be stable wrt garbage collection. */
7649 build_complex_type (tree component_type
, bool named
)
7651 gcc_assert (INTEGRAL_TYPE_P (component_type
)
7652 || SCALAR_FLOAT_TYPE_P (component_type
)
7653 || FIXED_POINT_TYPE_P (component_type
));
7655 /* Make a node of the sort we want. */
7656 tree probe
= make_node (COMPLEX_TYPE
);
7658 TREE_TYPE (probe
) = TYPE_MAIN_VARIANT (component_type
);
7660 /* If we already have such a type, use the old one. */
7661 hashval_t hash
= type_hash_canon_hash (probe
);
7662 tree t
= type_hash_canon (hash
, probe
);
7666 /* We created a new type. The hash insertion will have laid
7667 out the type. We need to check the canonicalization and
7668 maybe set the name. */
7669 gcc_checking_assert (COMPLETE_TYPE_P (t
)
7671 && TYPE_CANONICAL (t
) == t
);
7673 if (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (t
)))
7674 SET_TYPE_STRUCTURAL_EQUALITY (t
);
7675 else if (TYPE_CANONICAL (TREE_TYPE (t
)) != TREE_TYPE (t
))
7677 = build_complex_type (TYPE_CANONICAL (TREE_TYPE (t
)), named
);
7679 /* We need to create a name, since complex is a fundamental type. */
7682 const char *name
= NULL
;
7684 if (TREE_TYPE (t
) == char_type_node
)
7685 name
= "complex char";
7686 else if (TREE_TYPE (t
) == signed_char_type_node
)
7687 name
= "complex signed char";
7688 else if (TREE_TYPE (t
) == unsigned_char_type_node
)
7689 name
= "complex unsigned char";
7690 else if (TREE_TYPE (t
) == short_integer_type_node
)
7691 name
= "complex short int";
7692 else if (TREE_TYPE (t
) == short_unsigned_type_node
)
7693 name
= "complex short unsigned int";
7694 else if (TREE_TYPE (t
) == integer_type_node
)
7695 name
= "complex int";
7696 else if (TREE_TYPE (t
) == unsigned_type_node
)
7697 name
= "complex unsigned int";
7698 else if (TREE_TYPE (t
) == long_integer_type_node
)
7699 name
= "complex long int";
7700 else if (TREE_TYPE (t
) == long_unsigned_type_node
)
7701 name
= "complex long unsigned int";
7702 else if (TREE_TYPE (t
) == long_long_integer_type_node
)
7703 name
= "complex long long int";
7704 else if (TREE_TYPE (t
) == long_long_unsigned_type_node
)
7705 name
= "complex long long unsigned int";
7708 TYPE_NAME (t
) = build_decl (UNKNOWN_LOCATION
, TYPE_DECL
,
7709 get_identifier (name
), t
);
7713 return build_qualified_type (t
, TYPE_QUALS (component_type
));
7716 /* If TYPE is a real or complex floating-point type and the target
7717 does not directly support arithmetic on TYPE then return the wider
7718 type to be used for arithmetic on TYPE. Otherwise, return
7722 excess_precision_type (tree type
)
7724 /* The target can give two different responses to the question of
7725 which excess precision mode it would like depending on whether we
7726 are in -fexcess-precision=standard or -fexcess-precision=fast. */
7728 enum excess_precision_type requested_type
7729 = (flag_excess_precision
== EXCESS_PRECISION_FAST
7730 ? EXCESS_PRECISION_TYPE_FAST
7731 : (flag_excess_precision
== EXCESS_PRECISION_FLOAT16
7732 ? EXCESS_PRECISION_TYPE_FLOAT16
: EXCESS_PRECISION_TYPE_STANDARD
));
7734 enum flt_eval_method target_flt_eval_method
7735 = targetm
.c
.excess_precision (requested_type
);
7737 /* The target should not ask for unpredictable float evaluation (though
7738 it might advertise that implicitly the evaluation is unpredictable,
7739 but we don't care about that here, it will have been reported
7740 elsewhere). If it does ask for unpredictable evaluation, we have
7741 nothing to do here. */
7742 gcc_assert (target_flt_eval_method
!= FLT_EVAL_METHOD_UNPREDICTABLE
);
7744 /* Nothing to do. The target has asked for all types we know about
7745 to be computed with their native precision and range. */
7746 if (target_flt_eval_method
== FLT_EVAL_METHOD_PROMOTE_TO_FLOAT16
)
7749 /* The target will promote this type in a target-dependent way, so excess
7750 precision ought to leave it alone. */
7751 if (targetm
.promoted_type (type
) != NULL_TREE
)
7754 machine_mode float16_type_mode
= (float16_type_node
7755 ? TYPE_MODE (float16_type_node
)
7757 machine_mode bfloat16_type_mode
= (bfloat16_type_node
7758 ? TYPE_MODE (bfloat16_type_node
)
7760 machine_mode float_type_mode
= TYPE_MODE (float_type_node
);
7761 machine_mode double_type_mode
= TYPE_MODE (double_type_node
);
7763 switch (TREE_CODE (type
))
7767 machine_mode type_mode
= TYPE_MODE (type
);
7768 switch (target_flt_eval_method
)
7770 case FLT_EVAL_METHOD_PROMOTE_TO_FLOAT
:
7771 if (type_mode
== float16_type_mode
7772 || type_mode
== bfloat16_type_mode
)
7773 return float_type_node
;
7775 case FLT_EVAL_METHOD_PROMOTE_TO_DOUBLE
:
7776 if (type_mode
== float16_type_mode
7777 || type_mode
== bfloat16_type_mode
7778 || type_mode
== float_type_mode
)
7779 return double_type_node
;
7781 case FLT_EVAL_METHOD_PROMOTE_TO_LONG_DOUBLE
:
7782 if (type_mode
== float16_type_mode
7783 || type_mode
== bfloat16_type_mode
7784 || type_mode
== float_type_mode
7785 || type_mode
== double_type_mode
)
7786 return long_double_type_node
;
7795 if (TREE_CODE (TREE_TYPE (type
)) != REAL_TYPE
)
7797 machine_mode type_mode
= TYPE_MODE (TREE_TYPE (type
));
7798 switch (target_flt_eval_method
)
7800 case FLT_EVAL_METHOD_PROMOTE_TO_FLOAT
:
7801 if (type_mode
== float16_type_mode
7802 || type_mode
== bfloat16_type_mode
)
7803 return complex_float_type_node
;
7805 case FLT_EVAL_METHOD_PROMOTE_TO_DOUBLE
:
7806 if (type_mode
== float16_type_mode
7807 || type_mode
== bfloat16_type_mode
7808 || type_mode
== float_type_mode
)
7809 return complex_double_type_node
;
7811 case FLT_EVAL_METHOD_PROMOTE_TO_LONG_DOUBLE
:
7812 if (type_mode
== float16_type_mode
7813 || type_mode
== bfloat16_type_mode
7814 || type_mode
== float_type_mode
7815 || type_mode
== double_type_mode
)
7816 return complex_long_double_type_node
;
7830 /* Return OP, stripped of any conversions to wider types as much as is safe.
7831 Converting the value back to OP's type makes a value equivalent to OP.
7833 If FOR_TYPE is nonzero, we return a value which, if converted to
7834 type FOR_TYPE, would be equivalent to converting OP to type FOR_TYPE.
7836 OP must have integer, real or enumeral type. Pointers are not allowed!
7838 There are some cases where the obvious value we could return
7839 would regenerate to OP if converted to OP's type,
7840 but would not extend like OP to wider types.
7841 If FOR_TYPE indicates such extension is contemplated, we eschew such values.
7842 For example, if OP is (unsigned short)(signed char)-1,
7843 we avoid returning (signed char)-1 if FOR_TYPE is int,
7844 even though extending that to an unsigned short would regenerate OP,
7845 since the result of extending (signed char)-1 to (int)
7846 is different from (int) OP. */
7849 get_unwidened (tree op
, tree for_type
)
7851 /* Set UNS initially if converting OP to FOR_TYPE is a zero-extension. */
7852 tree type
= TREE_TYPE (op
);
7854 = TYPE_PRECISION (for_type
!= 0 ? for_type
: type
);
7856 = (for_type
!= 0 && for_type
!= type
7857 && final_prec
> TYPE_PRECISION (type
)
7858 && TYPE_UNSIGNED (type
));
7861 while (CONVERT_EXPR_P (op
))
7865 /* TYPE_PRECISION on vector types has different meaning
7866 (TYPE_VECTOR_SUBPARTS) and casts from vectors are view conversions,
7867 so avoid them here. */
7868 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op
, 0))) == VECTOR_TYPE
)
7871 bitschange
= TYPE_PRECISION (TREE_TYPE (op
))
7872 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op
, 0)));
7874 /* Truncations are many-one so cannot be removed.
7875 Unless we are later going to truncate down even farther. */
7877 && final_prec
> TYPE_PRECISION (TREE_TYPE (op
)))
7880 /* See what's inside this conversion. If we decide to strip it,
7882 op
= TREE_OPERAND (op
, 0);
7884 /* If we have not stripped any zero-extensions (uns is 0),
7885 we can strip any kind of extension.
7886 If we have previously stripped a zero-extension,
7887 only zero-extensions can safely be stripped.
7888 Any extension can be stripped if the bits it would produce
7889 are all going to be discarded later by truncating to FOR_TYPE. */
7893 if (! uns
|| final_prec
<= TYPE_PRECISION (TREE_TYPE (op
)))
7895 /* TYPE_UNSIGNED says whether this is a zero-extension.
7896 Let's avoid computing it if it does not affect WIN
7897 and if UNS will not be needed again. */
7899 || CONVERT_EXPR_P (op
))
7900 && TYPE_UNSIGNED (TREE_TYPE (op
)))
7908 /* If we finally reach a constant see if it fits in sth smaller and
7909 in that case convert it. */
7910 if (TREE_CODE (win
) == INTEGER_CST
)
7912 tree wtype
= TREE_TYPE (win
);
7913 unsigned prec
= wi::min_precision (wi::to_wide (win
), TYPE_SIGN (wtype
));
7915 prec
= MAX (prec
, final_prec
);
7916 if (prec
< TYPE_PRECISION (wtype
))
7918 tree t
= lang_hooks
.types
.type_for_size (prec
, TYPE_UNSIGNED (wtype
));
7919 if (t
&& TYPE_PRECISION (t
) < TYPE_PRECISION (wtype
))
7920 win
= fold_convert (t
, win
);
7927 /* Return OP or a simpler expression for a narrower value
7928 which can be sign-extended or zero-extended to give back OP.
7929 Store in *UNSIGNEDP_PTR either 1 if the value should be zero-extended
7930 or 0 if the value should be sign-extended. */
7933 get_narrower (tree op
, int *unsignedp_ptr
)
7938 bool integral_p
= INTEGRAL_TYPE_P (TREE_TYPE (op
));
7940 if (TREE_CODE (op
) == COMPOUND_EXPR
)
7943 op
= TREE_OPERAND (op
, 1);
7944 while (TREE_CODE (op
) == COMPOUND_EXPR
);
7945 tree ret
= get_narrower (op
, unsignedp_ptr
);
7948 auto_vec
<tree
, 16> v
;
7950 for (op
= win
; TREE_CODE (op
) == COMPOUND_EXPR
;
7951 op
= TREE_OPERAND (op
, 1))
7953 FOR_EACH_VEC_ELT_REVERSE (v
, i
, op
)
7954 ret
= build2_loc (EXPR_LOCATION (op
), COMPOUND_EXPR
,
7955 TREE_TYPE (ret
), TREE_OPERAND (op
, 0),
7959 while (TREE_CODE (op
) == NOP_EXPR
)
7962 = (TYPE_PRECISION (TREE_TYPE (op
))
7963 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op
, 0))));
7965 /* Truncations are many-one so cannot be removed. */
7969 /* See what's inside this conversion. If we decide to strip it,
7974 op
= TREE_OPERAND (op
, 0);
7975 /* An extension: the outermost one can be stripped,
7976 but remember whether it is zero or sign extension. */
7978 uns
= TYPE_UNSIGNED (TREE_TYPE (op
));
7979 /* Otherwise, if a sign extension has been stripped,
7980 only sign extensions can now be stripped;
7981 if a zero extension has been stripped, only zero-extensions. */
7982 else if (uns
!= TYPE_UNSIGNED (TREE_TYPE (op
)))
7986 else /* bitschange == 0 */
7988 /* A change in nominal type can always be stripped, but we must
7989 preserve the unsignedness. */
7991 uns
= TYPE_UNSIGNED (TREE_TYPE (op
));
7993 op
= TREE_OPERAND (op
, 0);
7994 /* Keep trying to narrow, but don't assign op to win if it
7995 would turn an integral type into something else. */
7996 if (INTEGRAL_TYPE_P (TREE_TYPE (op
)) != integral_p
)
8003 if (TREE_CODE (op
) == COMPONENT_REF
8004 /* Since type_for_size always gives an integer type. */
8005 && TREE_CODE (TREE_TYPE (op
)) != REAL_TYPE
8006 && TREE_CODE (TREE_TYPE (op
)) != FIXED_POINT_TYPE
8007 /* Ensure field is laid out already. */
8008 && DECL_SIZE (TREE_OPERAND (op
, 1)) != 0
8009 && tree_fits_uhwi_p (DECL_SIZE (TREE_OPERAND (op
, 1))))
8011 unsigned HOST_WIDE_INT innerprec
8012 = tree_to_uhwi (DECL_SIZE (TREE_OPERAND (op
, 1)));
8013 int unsignedp
= (DECL_UNSIGNED (TREE_OPERAND (op
, 1))
8014 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op
, 1))));
8015 tree type
= lang_hooks
.types
.type_for_size (innerprec
, unsignedp
);
8017 /* We can get this structure field in a narrower type that fits it,
8018 but the resulting extension to its nominal type (a fullword type)
8019 must satisfy the same conditions as for other extensions.
8021 Do this only for fields that are aligned (not bit-fields),
8022 because when bit-field insns will be used there is no
8023 advantage in doing this. */
8025 if (innerprec
< TYPE_PRECISION (TREE_TYPE (op
))
8026 && ! DECL_BIT_FIELD (TREE_OPERAND (op
, 1))
8027 && (first
|| uns
== DECL_UNSIGNED (TREE_OPERAND (op
, 1)))
8031 uns
= DECL_UNSIGNED (TREE_OPERAND (op
, 1));
8032 win
= fold_convert (type
, op
);
8036 *unsignedp_ptr
= uns
;
8040 /* Return true if integer constant C has a value that is permissible
8041 for TYPE, an integral type. */
8044 int_fits_type_p (const_tree c
, const_tree type
)
8046 tree type_low_bound
, type_high_bound
;
8047 bool ok_for_low_bound
, ok_for_high_bound
;
8048 signop sgn_c
= TYPE_SIGN (TREE_TYPE (c
));
8050 /* Non-standard boolean types can have arbitrary precision but various
8051 transformations assume that they can only take values 0 and +/-1. */
8052 if (TREE_CODE (type
) == BOOLEAN_TYPE
)
8053 return wi::fits_to_boolean_p (wi::to_wide (c
), type
);
8056 type_low_bound
= TYPE_MIN_VALUE (type
);
8057 type_high_bound
= TYPE_MAX_VALUE (type
);
8059 /* If at least one bound of the type is a constant integer, we can check
8060 ourselves and maybe make a decision. If no such decision is possible, but
8061 this type is a subtype, try checking against that. Otherwise, use
8062 fits_to_tree_p, which checks against the precision.
8064 Compute the status for each possibly constant bound, and return if we see
8065 one does not match. Use ok_for_xxx_bound for this purpose, assigning -1
8066 for "unknown if constant fits", 0 for "constant known *not* to fit" and 1
8067 for "constant known to fit". */
8069 /* Check if c >= type_low_bound. */
8070 if (type_low_bound
&& TREE_CODE (type_low_bound
) == INTEGER_CST
)
8072 if (tree_int_cst_lt (c
, type_low_bound
))
8074 ok_for_low_bound
= true;
8077 ok_for_low_bound
= false;
8079 /* Check if c <= type_high_bound. */
8080 if (type_high_bound
&& TREE_CODE (type_high_bound
) == INTEGER_CST
)
8082 if (tree_int_cst_lt (type_high_bound
, c
))
8084 ok_for_high_bound
= true;
8087 ok_for_high_bound
= false;
8089 /* If the constant fits both bounds, the result is known. */
8090 if (ok_for_low_bound
&& ok_for_high_bound
)
8093 /* Perform some generic filtering which may allow making a decision
8094 even if the bounds are not constant. First, negative integers
8095 never fit in unsigned types, */
8096 if (TYPE_UNSIGNED (type
) && sgn_c
== SIGNED
&& wi::neg_p (wi::to_wide (c
)))
8099 /* Second, narrower types always fit in wider ones. */
8100 if (TYPE_PRECISION (type
) > TYPE_PRECISION (TREE_TYPE (c
)))
8103 /* Third, unsigned integers with top bit set never fit signed types. */
8104 if (!TYPE_UNSIGNED (type
) && sgn_c
== UNSIGNED
)
8106 int prec
= GET_MODE_PRECISION (SCALAR_INT_TYPE_MODE (TREE_TYPE (c
))) - 1;
8107 if (prec
< TYPE_PRECISION (TREE_TYPE (c
)))
8109 /* When a tree_cst is converted to a wide-int, the precision
8110 is taken from the type. However, if the precision of the
8111 mode underneath the type is smaller than that, it is
8112 possible that the value will not fit. The test below
8113 fails if any bit is set between the sign bit of the
8114 underlying mode and the top bit of the type. */
8115 if (wi::zext (wi::to_wide (c
), prec
- 1) != wi::to_wide (c
))
8118 else if (wi::neg_p (wi::to_wide (c
)))
8122 /* If we haven't been able to decide at this point, there nothing more we
8123 can check ourselves here. Look at the base type if we have one and it
8124 has the same precision. */
8125 if (TREE_CODE (type
) == INTEGER_TYPE
8126 && TREE_TYPE (type
) != 0
8127 && TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (type
)))
8129 type
= TREE_TYPE (type
);
8133 /* Or to fits_to_tree_p, if nothing else. */
8134 return wi::fits_to_tree_p (wi::to_wide (c
), type
);
8137 /* Stores bounds of an integer TYPE in MIN and MAX. If TYPE has non-constant
8138 bounds or is a POINTER_TYPE, the maximum and/or minimum values that can be
8139 represented (assuming two's-complement arithmetic) within the bit
8140 precision of the type are returned instead. */
8143 get_type_static_bounds (const_tree type
, mpz_t min
, mpz_t max
)
8145 if (!POINTER_TYPE_P (type
) && TYPE_MIN_VALUE (type
)
8146 && TREE_CODE (TYPE_MIN_VALUE (type
)) == INTEGER_CST
)
8147 wi::to_mpz (wi::to_wide (TYPE_MIN_VALUE (type
)), min
, TYPE_SIGN (type
));
8150 if (TYPE_UNSIGNED (type
))
8151 mpz_set_ui (min
, 0);
8154 wide_int mn
= wi::min_value (TYPE_PRECISION (type
), SIGNED
);
8155 wi::to_mpz (mn
, min
, SIGNED
);
8159 if (!POINTER_TYPE_P (type
) && TYPE_MAX_VALUE (type
)
8160 && TREE_CODE (TYPE_MAX_VALUE (type
)) == INTEGER_CST
)
8161 wi::to_mpz (wi::to_wide (TYPE_MAX_VALUE (type
)), max
, TYPE_SIGN (type
));
8164 wide_int mn
= wi::max_value (TYPE_PRECISION (type
), TYPE_SIGN (type
));
8165 wi::to_mpz (mn
, max
, TYPE_SIGN (type
));
8169 /* Return true if VAR is an automatic variable. */
8172 auto_var_p (const_tree var
)
8174 return ((((VAR_P (var
) && ! DECL_EXTERNAL (var
))
8175 || TREE_CODE (var
) == PARM_DECL
)
8176 && ! TREE_STATIC (var
))
8177 || TREE_CODE (var
) == RESULT_DECL
);
8180 /* Return true if VAR is an automatic variable defined in function FN. */
8183 auto_var_in_fn_p (const_tree var
, const_tree fn
)
8185 return (DECL_P (var
) && DECL_CONTEXT (var
) == fn
8186 && (auto_var_p (var
)
8187 || TREE_CODE (var
) == LABEL_DECL
));
8190 /* Subprogram of following function. Called by walk_tree.
8192 Return *TP if it is an automatic variable or parameter of the
8193 function passed in as DATA. */
8196 find_var_from_fn (tree
*tp
, int *walk_subtrees
, void *data
)
8198 tree fn
= (tree
) data
;
8203 else if (DECL_P (*tp
)
8204 && auto_var_in_fn_p (*tp
, fn
))
8210 /* Returns true if T is, contains, or refers to a type with variable
8211 size. For METHOD_TYPEs and FUNCTION_TYPEs we exclude the
8212 arguments, but not the return type. If FN is nonzero, only return
8213 true if a modifier of the type or position of FN is a variable or
8214 parameter inside FN.
8216 This concept is more general than that of C99 'variably modified types':
8217 in C99, a struct type is never variably modified because a VLA may not
8218 appear as a structure member. However, in GNU C code like:
8220 struct S { int i[f()]; };
8222 is valid, and other languages may define similar constructs. */
8225 variably_modified_type_p (tree type
, tree fn
)
8229 /* Test if T is either variable (if FN is zero) or an expression containing
8230 a variable in FN. If TYPE isn't gimplified, return true also if
8231 gimplify_one_sizepos would gimplify the expression into a local
8233 #define RETURN_TRUE_IF_VAR(T) \
8234 do { tree _t = (T); \
8235 if (_t != NULL_TREE \
8236 && _t != error_mark_node \
8237 && !CONSTANT_CLASS_P (_t) \
8238 && TREE_CODE (_t) != PLACEHOLDER_EXPR \
8240 || (!TYPE_SIZES_GIMPLIFIED (type) \
8241 && (TREE_CODE (_t) != VAR_DECL \
8242 && !CONTAINS_PLACEHOLDER_P (_t))) \
8243 || walk_tree (&_t, find_var_from_fn, fn, NULL))) \
8244 return true; } while (0)
8246 if (type
== error_mark_node
)
8249 /* If TYPE itself has variable size, it is variably modified. */
8250 RETURN_TRUE_IF_VAR (TYPE_SIZE (type
));
8251 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type
));
8253 switch (TREE_CODE (type
))
8256 case REFERENCE_TYPE
:
8258 /* Ada can have pointer types refering to themselves indirectly. */
8259 if (TREE_VISITED (type
))
8261 TREE_VISITED (type
) = true;
8262 if (variably_modified_type_p (TREE_TYPE (type
), fn
))
8264 TREE_VISITED (type
) = false;
8267 TREE_VISITED (type
) = false;
8272 /* If TYPE is a function type, it is variably modified if the
8273 return type is variably modified. */
8274 if (variably_modified_type_p (TREE_TYPE (type
), fn
))
8280 case FIXED_POINT_TYPE
:
8283 /* Scalar types are variably modified if their end points
8285 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type
));
8286 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type
));
8291 case QUAL_UNION_TYPE
:
8292 /* We can't see if any of the fields are variably-modified by the
8293 definition we normally use, since that would produce infinite
8294 recursion via pointers. */
8295 /* This is variably modified if some field's type is. */
8296 for (t
= TYPE_FIELDS (type
); t
; t
= DECL_CHAIN (t
))
8297 if (TREE_CODE (t
) == FIELD_DECL
)
8299 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t
));
8300 RETURN_TRUE_IF_VAR (DECL_SIZE (t
));
8301 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t
));
8303 /* If the type is a qualified union, then the DECL_QUALIFIER
8304 of fields can also be an expression containing a variable. */
8305 if (TREE_CODE (type
) == QUAL_UNION_TYPE
)
8306 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t
));
8308 /* If the field is a qualified union, then it's only a container
8309 for what's inside so we look into it. That's necessary in LTO
8310 mode because the sizes of the field tested above have been set
8311 to PLACEHOLDER_EXPRs by free_lang_data. */
8312 if (TREE_CODE (TREE_TYPE (t
)) == QUAL_UNION_TYPE
8313 && variably_modified_type_p (TREE_TYPE (t
), fn
))
8319 /* Do not call ourselves to avoid infinite recursion. This is
8320 variably modified if the element type is. */
8321 RETURN_TRUE_IF_VAR (TYPE_SIZE (TREE_TYPE (type
)));
8322 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (TREE_TYPE (type
)));
8329 /* The current language may have other cases to check, but in general,
8330 all other types are not variably modified. */
8331 return lang_hooks
.tree_inlining
.var_mod_type_p (type
, fn
);
8333 #undef RETURN_TRUE_IF_VAR
8336 /* Given a DECL or TYPE, return the scope in which it was declared, or
8337 NULL_TREE if there is no containing scope. */
8340 get_containing_scope (const_tree t
)
8342 return (TYPE_P (t
) ? TYPE_CONTEXT (t
) : DECL_CONTEXT (t
));
8345 /* Returns the ultimate TRANSLATION_UNIT_DECL context of DECL or NULL. */
8348 get_ultimate_context (const_tree decl
)
8350 while (decl
&& TREE_CODE (decl
) != TRANSLATION_UNIT_DECL
)
8352 if (TREE_CODE (decl
) == BLOCK
)
8353 decl
= BLOCK_SUPERCONTEXT (decl
);
8355 decl
= get_containing_scope (decl
);
8360 /* Return the innermost context enclosing DECL that is
8361 a FUNCTION_DECL, or zero if none. */
8364 decl_function_context (const_tree decl
)
8368 if (TREE_CODE (decl
) == ERROR_MARK
)
8371 /* C++ virtual functions use DECL_CONTEXT for the class of the vtable
8372 where we look up the function at runtime. Such functions always take
8373 a first argument of type 'pointer to real context'.
8375 C++ should really be fixed to use DECL_CONTEXT for the real context,
8376 and use something else for the "virtual context". */
8377 else if (TREE_CODE (decl
) == FUNCTION_DECL
&& DECL_VIRTUAL_P (decl
))
8380 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl
)))));
8382 context
= DECL_CONTEXT (decl
);
8384 while (context
&& TREE_CODE (context
) != FUNCTION_DECL
)
8386 if (TREE_CODE (context
) == BLOCK
)
8387 context
= BLOCK_SUPERCONTEXT (context
);
8389 context
= get_containing_scope (context
);
8395 /* Return the innermost context enclosing DECL that is
8396 a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE, or zero if none.
8397 TYPE_DECLs and FUNCTION_DECLs are transparent to this function. */
8400 decl_type_context (const_tree decl
)
8402 tree context
= DECL_CONTEXT (decl
);
8405 switch (TREE_CODE (context
))
8407 case NAMESPACE_DECL
:
8408 case TRANSLATION_UNIT_DECL
:
8413 case QUAL_UNION_TYPE
:
8418 context
= DECL_CONTEXT (context
);
8422 context
= BLOCK_SUPERCONTEXT (context
);
8432 /* CALL is a CALL_EXPR. Return the declaration for the function
8433 called, or NULL_TREE if the called function cannot be
8437 get_callee_fndecl (const_tree call
)
8441 if (call
== error_mark_node
)
8442 return error_mark_node
;
8444 /* It's invalid to call this function with anything but a
8446 gcc_assert (TREE_CODE (call
) == CALL_EXPR
);
8448 /* The first operand to the CALL is the address of the function
8450 addr
= CALL_EXPR_FN (call
);
8452 /* If there is no function, return early. */
8453 if (addr
== NULL_TREE
)
8458 /* If this is a readonly function pointer, extract its initial value. */
8459 if (DECL_P (addr
) && TREE_CODE (addr
) != FUNCTION_DECL
8460 && TREE_READONLY (addr
) && ! TREE_THIS_VOLATILE (addr
)
8461 && DECL_INITIAL (addr
))
8462 addr
= DECL_INITIAL (addr
);
8464 /* If the address is just `&f' for some function `f', then we know
8465 that `f' is being called. */
8466 if (TREE_CODE (addr
) == ADDR_EXPR
8467 && TREE_CODE (TREE_OPERAND (addr
, 0)) == FUNCTION_DECL
)
8468 return TREE_OPERAND (addr
, 0);
8470 /* We couldn't figure out what was being called. */
8474 /* Return true when STMTs arguments and return value match those of FNDECL,
8475 a decl of a builtin function. */
8478 tree_builtin_call_types_compatible_p (const_tree call
, tree fndecl
)
8480 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl
) != NOT_BUILT_IN
);
8482 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
8483 if (tree decl
= builtin_decl_explicit (DECL_FUNCTION_CODE (fndecl
)))
8486 bool gimple_form
= (cfun
&& (cfun
->curr_properties
& PROP_gimple
)) != 0;
8488 ? !useless_type_conversion_p (TREE_TYPE (call
),
8489 TREE_TYPE (TREE_TYPE (fndecl
)))
8490 : (TYPE_MAIN_VARIANT (TREE_TYPE (call
))
8491 != TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (fndecl
)))))
8494 tree targs
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
8495 unsigned nargs
= call_expr_nargs (call
);
8496 for (unsigned i
= 0; i
< nargs
; ++i
, targs
= TREE_CHAIN (targs
))
8498 /* Variadic args follow. */
8501 tree arg
= CALL_EXPR_ARG (call
, i
);
8502 tree type
= TREE_VALUE (targs
);
8504 ? !useless_type_conversion_p (type
, TREE_TYPE (arg
))
8505 : TYPE_MAIN_VARIANT (type
) != TYPE_MAIN_VARIANT (TREE_TYPE (arg
)))
8507 /* For pointer arguments be more forgiving, e.g. due to
8508 FILE * vs. fileptr_type_node, or say char * vs. const char *
8511 && POINTER_TYPE_P (type
)
8512 && POINTER_TYPE_P (TREE_TYPE (arg
))
8513 && tree_nop_conversion_p (type
, TREE_TYPE (arg
)))
8515 /* char/short integral arguments are promoted to int
8516 by several frontends if targetm.calls.promote_prototypes
8517 is true. Allow such promotion too. */
8518 if (INTEGRAL_TYPE_P (type
)
8519 && TYPE_PRECISION (type
) < TYPE_PRECISION (integer_type_node
)
8520 && INTEGRAL_TYPE_P (TREE_TYPE (arg
))
8521 && !TYPE_UNSIGNED (TREE_TYPE (arg
))
8522 && targetm
.calls
.promote_prototypes (TREE_TYPE (fndecl
))
8524 ? useless_type_conversion_p (integer_type_node
,
8526 : tree_nop_conversion_p (integer_type_node
,
8532 if (targs
&& !VOID_TYPE_P (TREE_VALUE (targs
)))
8537 /* If CALL_EXPR CALL calls a normal built-in function or an internal function,
8538 return the associated function code, otherwise return CFN_LAST. */
8541 get_call_combined_fn (const_tree call
)
8543 /* It's invalid to call this function with anything but a CALL_EXPR. */
8544 gcc_assert (TREE_CODE (call
) == CALL_EXPR
);
8546 if (!CALL_EXPR_FN (call
))
8547 return as_combined_fn (CALL_EXPR_IFN (call
));
8549 tree fndecl
= get_callee_fndecl (call
);
8551 && fndecl_built_in_p (fndecl
, BUILT_IN_NORMAL
)
8552 && tree_builtin_call_types_compatible_p (call
, fndecl
))
8553 return as_combined_fn (DECL_FUNCTION_CODE (fndecl
));
8558 /* Comparator of indices based on tree_node_counts. */
8561 tree_nodes_cmp (const void *p1
, const void *p2
)
8563 const unsigned *n1
= (const unsigned *)p1
;
8564 const unsigned *n2
= (const unsigned *)p2
;
8566 return tree_node_counts
[*n1
] - tree_node_counts
[*n2
];
8569 /* Comparator of indices based on tree_code_counts. */
8572 tree_codes_cmp (const void *p1
, const void *p2
)
8574 const unsigned *n1
= (const unsigned *)p1
;
8575 const unsigned *n2
= (const unsigned *)p2
;
8577 return tree_code_counts
[*n1
] - tree_code_counts
[*n2
];
8580 #define TREE_MEM_USAGE_SPACES 40
8582 /* Print debugging information about tree nodes generated during the compile,
8583 and any language-specific information. */
8586 dump_tree_statistics (void)
8588 if (GATHER_STATISTICS
)
8590 uint64_t total_nodes
, total_bytes
;
8591 fprintf (stderr
, "\nKind Nodes Bytes\n");
8592 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES
);
8593 total_nodes
= total_bytes
= 0;
8596 auto_vec
<unsigned> indices (all_kinds
);
8597 for (unsigned i
= 0; i
< all_kinds
; i
++)
8598 indices
.quick_push (i
);
8599 indices
.qsort (tree_nodes_cmp
);
8601 for (unsigned i
= 0; i
< (int) all_kinds
; i
++)
8603 unsigned j
= indices
[i
];
8604 fprintf (stderr
, "%-20s %6" PRIu64
"%c %9" PRIu64
"%c\n",
8605 tree_node_kind_names
[j
], SIZE_AMOUNT (tree_node_counts
[j
]),
8606 SIZE_AMOUNT (tree_node_sizes
[j
]));
8607 total_nodes
+= tree_node_counts
[j
];
8608 total_bytes
+= tree_node_sizes
[j
];
8610 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES
);
8611 fprintf (stderr
, "%-20s %6" PRIu64
"%c %9" PRIu64
"%c\n", "Total",
8612 SIZE_AMOUNT (total_nodes
), SIZE_AMOUNT (total_bytes
));
8613 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES
);
8617 fprintf (stderr
, "Code Nodes\n");
8618 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES
);
8620 auto_vec
<unsigned> indices (MAX_TREE_CODES
);
8621 for (unsigned i
= 0; i
< MAX_TREE_CODES
; i
++)
8622 indices
.quick_push (i
);
8623 indices
.qsort (tree_codes_cmp
);
8625 for (unsigned i
= 0; i
< MAX_TREE_CODES
; i
++)
8627 unsigned j
= indices
[i
];
8628 fprintf (stderr
, "%-32s %6" PRIu64
"%c\n",
8629 get_tree_code_name ((enum tree_code
) j
),
8630 SIZE_AMOUNT (tree_code_counts
[j
]));
8632 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES
);
8633 fprintf (stderr
, "\n");
8634 ssanames_print_statistics ();
8635 fprintf (stderr
, "\n");
8636 phinodes_print_statistics ();
8637 fprintf (stderr
, "\n");
8641 fprintf (stderr
, "(No per-node statistics)\n");
8643 print_type_hash_statistics ();
8644 print_debug_expr_statistics ();
8645 print_value_expr_statistics ();
8646 lang_hooks
.print_statistics ();
8649 #define FILE_FUNCTION_FORMAT "_GLOBAL__%s_%s"
8651 /* Generate a crc32 of the low BYTES bytes of VALUE. */
8654 crc32_unsigned_n (unsigned chksum
, unsigned value
, unsigned bytes
)
8656 /* This relies on the raw feedback's top 4 bits being zero. */
8657 #define FEEDBACK(X) ((X) * 0x04c11db7)
8658 #define SYNDROME(X) (FEEDBACK ((X) & 1) ^ FEEDBACK ((X) & 2) \
8659 ^ FEEDBACK ((X) & 4) ^ FEEDBACK ((X) & 8))
8660 static const unsigned syndromes
[16] =
8662 SYNDROME(0x0), SYNDROME(0x1), SYNDROME(0x2), SYNDROME(0x3),
8663 SYNDROME(0x4), SYNDROME(0x5), SYNDROME(0x6), SYNDROME(0x7),
8664 SYNDROME(0x8), SYNDROME(0x9), SYNDROME(0xa), SYNDROME(0xb),
8665 SYNDROME(0xc), SYNDROME(0xd), SYNDROME(0xe), SYNDROME(0xf),
8670 value
<<= (32 - bytes
* 8);
8671 for (unsigned ix
= bytes
* 2; ix
--; value
<<= 4)
8673 unsigned feedback
= syndromes
[((value
^ chksum
) >> 28) & 0xf];
8675 chksum
= (chksum
<< 4) ^ feedback
;
8681 /* Generate a crc32 of a string. */
8684 crc32_string (unsigned chksum
, const char *string
)
8687 chksum
= crc32_byte (chksum
, *string
);
8692 /* P is a string that will be used in a symbol. Mask out any characters
8693 that are not valid in that context. */
8696 clean_symbol_name (char *p
)
8700 #ifndef NO_DOLLAR_IN_LABEL /* this for `$'; unlikely, but... -- kr */
8703 #ifndef NO_DOT_IN_LABEL /* this for `.'; unlikely, but... */
8710 static GTY(()) unsigned anon_cnt
= 0; /* Saved for PCH. */
8712 /* Create a unique anonymous identifier. The identifier is still a
8713 valid assembly label. */
8719 #if !defined (NO_DOT_IN_LABEL)
8721 #elif !defined (NO_DOLLAR_IN_LABEL)
8729 int len
= snprintf (buf
, sizeof (buf
), fmt
, anon_cnt
++);
8730 gcc_checking_assert (len
< int (sizeof (buf
)));
8732 tree id
= get_identifier_with_length (buf
, len
);
8733 IDENTIFIER_ANON_P (id
) = true;
8738 /* Generate a name for a special-purpose function.
8739 The generated name may need to be unique across the whole link.
8740 Changes to this function may also require corresponding changes to
8741 xstrdup_mask_random.
8742 TYPE is some string to identify the purpose of this function to the
8743 linker or collect2; it must start with an uppercase letter,
8745 I - for constructors
8747 N - for C++ anonymous namespaces
8748 F - for DWARF unwind frame information. */
8751 get_file_function_name (const char *type
)
8757 /* If we already have a name we know to be unique, just use that. */
8758 if (first_global_object_name
)
8759 p
= q
= ASTRDUP (first_global_object_name
);
8760 /* If the target is handling the constructors/destructors, they
8761 will be local to this file and the name is only necessary for
8763 We also assign sub_I and sub_D sufixes to constructors called from
8764 the global static constructors. These are always local. */
8765 else if (((type
[0] == 'I' || type
[0] == 'D') && targetm
.have_ctors_dtors
)
8766 || (startswith (type
, "sub_")
8767 && (type
[4] == 'I' || type
[4] == 'D')))
8769 const char *file
= main_input_filename
;
8771 file
= LOCATION_FILE (input_location
);
8772 /* Just use the file's basename, because the full pathname
8773 might be quite long. */
8774 p
= q
= ASTRDUP (lbasename (file
));
8778 /* Otherwise, the name must be unique across the entire link.
8779 We don't have anything that we know to be unique to this translation
8780 unit, so use what we do have and throw in some randomness. */
8782 const char *name
= weak_global_object_name
;
8783 const char *file
= main_input_filename
;
8788 file
= LOCATION_FILE (input_location
);
8790 len
= strlen (file
);
8791 q
= (char *) alloca (9 + 19 + len
+ 1);
8792 memcpy (q
, file
, len
+ 1);
8794 snprintf (q
+ len
, 9 + 19 + 1, "_%08X_" HOST_WIDE_INT_PRINT_HEX
,
8795 crc32_string (0, name
), get_random_seed (false));
8800 clean_symbol_name (q
);
8801 buf
= (char *) alloca (sizeof (FILE_FUNCTION_FORMAT
) + strlen (p
)
8804 /* Set up the name of the file-level functions we may need.
8805 Use a global object (which is already required to be unique over
8806 the program) rather than the file name (which imposes extra
8808 sprintf (buf
, FILE_FUNCTION_FORMAT
, type
, p
);
8810 return get_identifier (buf
);
8813 #if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007)
8815 /* Complain that the tree code of NODE does not match the expected 0
8816 terminated list of trailing codes. The trailing code list can be
8817 empty, for a more vague error message. FILE, LINE, and FUNCTION
8818 are of the caller. */
8821 tree_check_failed (const_tree node
, const char *file
,
8822 int line
, const char *function
, ...)
8826 unsigned length
= 0;
8827 enum tree_code code
;
8829 va_start (args
, function
);
8830 while ((code
= (enum tree_code
) va_arg (args
, int)))
8831 length
+= 4 + strlen (get_tree_code_name (code
));
8836 va_start (args
, function
);
8837 length
+= strlen ("expected ");
8838 buffer
= tmp
= (char *) alloca (length
);
8840 while ((code
= (enum tree_code
) va_arg (args
, int)))
8842 const char *prefix
= length
? " or " : "expected ";
8844 strcpy (tmp
+ length
, prefix
);
8845 length
+= strlen (prefix
);
8846 strcpy (tmp
+ length
, get_tree_code_name (code
));
8847 length
+= strlen (get_tree_code_name (code
));
8852 buffer
= "unexpected node";
8854 internal_error ("tree check: %s, have %s in %s, at %s:%d",
8855 buffer
, get_tree_code_name (TREE_CODE (node
)),
8856 function
, trim_filename (file
), line
);
8859 /* Complain that the tree code of NODE does match the expected 0
8860 terminated list of trailing codes. FILE, LINE, and FUNCTION are of
8864 tree_not_check_failed (const_tree node
, const char *file
,
8865 int line
, const char *function
, ...)
8869 unsigned length
= 0;
8870 enum tree_code code
;
8872 va_start (args
, function
);
8873 while ((code
= (enum tree_code
) va_arg (args
, int)))
8874 length
+= 4 + strlen (get_tree_code_name (code
));
8876 va_start (args
, function
);
8877 buffer
= (char *) alloca (length
);
8879 while ((code
= (enum tree_code
) va_arg (args
, int)))
8883 strcpy (buffer
+ length
, " or ");
8886 strcpy (buffer
+ length
, get_tree_code_name (code
));
8887 length
+= strlen (get_tree_code_name (code
));
8891 internal_error ("tree check: expected none of %s, have %s in %s, at %s:%d",
8892 buffer
, get_tree_code_name (TREE_CODE (node
)),
8893 function
, trim_filename (file
), line
);
8896 /* Similar to tree_check_failed, except that we check for a class of tree
8897 code, given in CL. */
8900 tree_class_check_failed (const_tree node
, const enum tree_code_class cl
,
8901 const char *file
, int line
, const char *function
)
8904 ("tree check: expected class %qs, have %qs (%s) in %s, at %s:%d",
8905 TREE_CODE_CLASS_STRING (cl
),
8906 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node
))),
8907 get_tree_code_name (TREE_CODE (node
)), function
, trim_filename (file
), line
);
8910 /* Similar to tree_check_failed, except that instead of specifying a
8911 dozen codes, use the knowledge that they're all sequential. */
8914 tree_range_check_failed (const_tree node
, const char *file
, int line
,
8915 const char *function
, enum tree_code c1
,
8919 unsigned length
= 0;
8922 for (c
= c1
; c
<= c2
; ++c
)
8923 length
+= 4 + strlen (get_tree_code_name ((enum tree_code
) c
));
8925 length
+= strlen ("expected ");
8926 buffer
= (char *) alloca (length
);
8929 for (c
= c1
; c
<= c2
; ++c
)
8931 const char *prefix
= length
? " or " : "expected ";
8933 strcpy (buffer
+ length
, prefix
);
8934 length
+= strlen (prefix
);
8935 strcpy (buffer
+ length
, get_tree_code_name ((enum tree_code
) c
));
8936 length
+= strlen (get_tree_code_name ((enum tree_code
) c
));
8939 internal_error ("tree check: %s, have %s in %s, at %s:%d",
8940 buffer
, get_tree_code_name (TREE_CODE (node
)),
8941 function
, trim_filename (file
), line
);
8945 /* Similar to tree_check_failed, except that we check that a tree does
8946 not have the specified code, given in CL. */
8949 tree_not_class_check_failed (const_tree node
, const enum tree_code_class cl
,
8950 const char *file
, int line
, const char *function
)
8953 ("tree check: did not expect class %qs, have %qs (%s) in %s, at %s:%d",
8954 TREE_CODE_CLASS_STRING (cl
),
8955 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node
))),
8956 get_tree_code_name (TREE_CODE (node
)), function
, trim_filename (file
), line
);
8960 /* Similar to tree_check_failed but applied to OMP_CLAUSE codes. */
8963 omp_clause_check_failed (const_tree node
, const char *file
, int line
,
8964 const char *function
, enum omp_clause_code code
)
8966 internal_error ("tree check: expected %<omp_clause %s%>, have %qs "
8968 omp_clause_code_name
[code
],
8969 get_tree_code_name (TREE_CODE (node
)),
8970 function
, trim_filename (file
), line
);
8974 /* Similar to tree_range_check_failed but applied to OMP_CLAUSE codes. */
8977 omp_clause_range_check_failed (const_tree node
, const char *file
, int line
,
8978 const char *function
, enum omp_clause_code c1
,
8979 enum omp_clause_code c2
)
8982 unsigned length
= 0;
8985 for (c
= c1
; c
<= c2
; ++c
)
8986 length
+= 4 + strlen (omp_clause_code_name
[c
]);
8988 length
+= strlen ("expected ");
8989 buffer
= (char *) alloca (length
);
8992 for (c
= c1
; c
<= c2
; ++c
)
8994 const char *prefix
= length
? " or " : "expected ";
8996 strcpy (buffer
+ length
, prefix
);
8997 length
+= strlen (prefix
);
8998 strcpy (buffer
+ length
, omp_clause_code_name
[c
]);
8999 length
+= strlen (omp_clause_code_name
[c
]);
9002 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9003 buffer
, omp_clause_code_name
[TREE_CODE (node
)],
9004 function
, trim_filename (file
), line
);
9008 #undef DEFTREESTRUCT
9009 #define DEFTREESTRUCT(VAL, NAME) NAME,
9011 static const char *ts_enum_names
[] = {
9012 #include "treestruct.def"
9014 #undef DEFTREESTRUCT
9016 #define TS_ENUM_NAME(EN) (ts_enum_names[(EN)])
9018 /* Similar to tree_class_check_failed, except that we check for
9019 whether CODE contains the tree structure identified by EN. */
9022 tree_contains_struct_check_failed (const_tree node
,
9023 const enum tree_node_structure_enum en
,
9024 const char *file
, int line
,
9025 const char *function
)
9028 ("tree check: expected tree that contains %qs structure, have %qs in %s, at %s:%d",
9030 get_tree_code_name (TREE_CODE (node
)), function
, trim_filename (file
), line
);
9034 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9035 (dynamically sized) vector. */
9038 tree_int_cst_elt_check_failed (int idx
, int len
, const char *file
, int line
,
9039 const char *function
)
9042 ("tree check: accessed elt %d of %<tree_int_cst%> with %d elts in %s, "
9044 idx
+ 1, len
, function
, trim_filename (file
), line
);
9047 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9048 (dynamically sized) vector. */
9051 tree_vec_elt_check_failed (int idx
, int len
, const char *file
, int line
,
9052 const char *function
)
9055 ("tree check: accessed elt %d of %<tree_vec%> with %d elts in %s, at %s:%d",
9056 idx
+ 1, len
, function
, trim_filename (file
), line
);
9059 /* Similar to above, except that the check is for the bounds of the operand
9060 vector of an expression node EXP. */
9063 tree_operand_check_failed (int idx
, const_tree exp
, const char *file
,
9064 int line
, const char *function
)
9066 enum tree_code code
= TREE_CODE (exp
);
9068 ("tree check: accessed operand %d of %s with %d operands in %s, at %s:%d",
9069 idx
+ 1, get_tree_code_name (code
), TREE_OPERAND_LENGTH (exp
),
9070 function
, trim_filename (file
), line
);
9073 /* Similar to above, except that the check is for the number of
9074 operands of an OMP_CLAUSE node. */
9077 omp_clause_operand_check_failed (int idx
, const_tree t
, const char *file
,
9078 int line
, const char *function
)
9081 ("tree check: accessed operand %d of %<omp_clause %s%> with %d operands "
9082 "in %s, at %s:%d", idx
+ 1, omp_clause_code_name
[OMP_CLAUSE_CODE (t
)],
9083 omp_clause_num_ops
[OMP_CLAUSE_CODE (t
)], function
,
9084 trim_filename (file
), line
);
9086 #endif /* ENABLE_TREE_CHECKING */
9088 /* Create a new vector type node holding NUNITS units of type INNERTYPE,
9089 and mapped to the machine mode MODE. Initialize its fields and build
9090 the information necessary for debugging output. */
9093 make_vector_type (tree innertype
, poly_int64 nunits
, machine_mode mode
)
9096 tree mv_innertype
= TYPE_MAIN_VARIANT (innertype
);
9098 t
= make_node (VECTOR_TYPE
);
9099 TREE_TYPE (t
) = mv_innertype
;
9100 SET_TYPE_VECTOR_SUBPARTS (t
, nunits
);
9101 SET_TYPE_MODE (t
, mode
);
9103 if (TYPE_STRUCTURAL_EQUALITY_P (mv_innertype
) || in_lto_p
)
9104 SET_TYPE_STRUCTURAL_EQUALITY (t
);
9105 else if ((TYPE_CANONICAL (mv_innertype
) != innertype
9106 || mode
!= VOIDmode
)
9107 && !VECTOR_BOOLEAN_TYPE_P (t
))
9109 = make_vector_type (TYPE_CANONICAL (mv_innertype
), nunits
, VOIDmode
);
9113 hashval_t hash
= type_hash_canon_hash (t
);
9114 t
= type_hash_canon (hash
, t
);
9116 /* We have built a main variant, based on the main variant of the
9117 inner type. Use it to build the variant we return. */
9118 if ((TYPE_ATTRIBUTES (innertype
) || TYPE_QUALS (innertype
))
9119 && TREE_TYPE (t
) != innertype
)
9120 return build_type_attribute_qual_variant (t
,
9121 TYPE_ATTRIBUTES (innertype
),
9122 TYPE_QUALS (innertype
));
9128 make_or_reuse_type (unsigned size
, int unsignedp
)
9132 if (size
== INT_TYPE_SIZE
)
9133 return unsignedp
? unsigned_type_node
: integer_type_node
;
9134 if (size
== CHAR_TYPE_SIZE
)
9135 return unsignedp
? unsigned_char_type_node
: signed_char_type_node
;
9136 if (size
== SHORT_TYPE_SIZE
)
9137 return unsignedp
? short_unsigned_type_node
: short_integer_type_node
;
9138 if (size
== LONG_TYPE_SIZE
)
9139 return unsignedp
? long_unsigned_type_node
: long_integer_type_node
;
9140 if (size
== LONG_LONG_TYPE_SIZE
)
9141 return (unsignedp
? long_long_unsigned_type_node
9142 : long_long_integer_type_node
);
9144 for (i
= 0; i
< NUM_INT_N_ENTS
; i
++)
9145 if (size
== int_n_data
[i
].bitsize
9146 && int_n_enabled_p
[i
])
9147 return (unsignedp
? int_n_trees
[i
].unsigned_type
9148 : int_n_trees
[i
].signed_type
);
9151 return make_unsigned_type (size
);
9153 return make_signed_type (size
);
9156 /* Create or reuse a fract type by SIZE, UNSIGNEDP, and SATP. */
9159 make_or_reuse_fract_type (unsigned size
, int unsignedp
, int satp
)
9163 if (size
== SHORT_FRACT_TYPE_SIZE
)
9164 return unsignedp
? sat_unsigned_short_fract_type_node
9165 : sat_short_fract_type_node
;
9166 if (size
== FRACT_TYPE_SIZE
)
9167 return unsignedp
? sat_unsigned_fract_type_node
: sat_fract_type_node
;
9168 if (size
== LONG_FRACT_TYPE_SIZE
)
9169 return unsignedp
? sat_unsigned_long_fract_type_node
9170 : sat_long_fract_type_node
;
9171 if (size
== LONG_LONG_FRACT_TYPE_SIZE
)
9172 return unsignedp
? sat_unsigned_long_long_fract_type_node
9173 : sat_long_long_fract_type_node
;
9177 if (size
== SHORT_FRACT_TYPE_SIZE
)
9178 return unsignedp
? unsigned_short_fract_type_node
9179 : short_fract_type_node
;
9180 if (size
== FRACT_TYPE_SIZE
)
9181 return unsignedp
? unsigned_fract_type_node
: fract_type_node
;
9182 if (size
== LONG_FRACT_TYPE_SIZE
)
9183 return unsignedp
? unsigned_long_fract_type_node
9184 : long_fract_type_node
;
9185 if (size
== LONG_LONG_FRACT_TYPE_SIZE
)
9186 return unsignedp
? unsigned_long_long_fract_type_node
9187 : long_long_fract_type_node
;
9190 return make_fract_type (size
, unsignedp
, satp
);
9193 /* Create or reuse an accum type by SIZE, UNSIGNEDP, and SATP. */
9196 make_or_reuse_accum_type (unsigned size
, int unsignedp
, int satp
)
9200 if (size
== SHORT_ACCUM_TYPE_SIZE
)
9201 return unsignedp
? sat_unsigned_short_accum_type_node
9202 : sat_short_accum_type_node
;
9203 if (size
== ACCUM_TYPE_SIZE
)
9204 return unsignedp
? sat_unsigned_accum_type_node
: sat_accum_type_node
;
9205 if (size
== LONG_ACCUM_TYPE_SIZE
)
9206 return unsignedp
? sat_unsigned_long_accum_type_node
9207 : sat_long_accum_type_node
;
9208 if (size
== LONG_LONG_ACCUM_TYPE_SIZE
)
9209 return unsignedp
? sat_unsigned_long_long_accum_type_node
9210 : sat_long_long_accum_type_node
;
9214 if (size
== SHORT_ACCUM_TYPE_SIZE
)
9215 return unsignedp
? unsigned_short_accum_type_node
9216 : short_accum_type_node
;
9217 if (size
== ACCUM_TYPE_SIZE
)
9218 return unsignedp
? unsigned_accum_type_node
: accum_type_node
;
9219 if (size
== LONG_ACCUM_TYPE_SIZE
)
9220 return unsignedp
? unsigned_long_accum_type_node
9221 : long_accum_type_node
;
9222 if (size
== LONG_LONG_ACCUM_TYPE_SIZE
)
9223 return unsignedp
? unsigned_long_long_accum_type_node
9224 : long_long_accum_type_node
;
9227 return make_accum_type (size
, unsignedp
, satp
);
9231 /* Create an atomic variant node for TYPE. This routine is called
9232 during initialization of data types to create the 5 basic atomic
9233 types. The generic build_variant_type function requires these to
9234 already be set up in order to function properly, so cannot be
9235 called from there. If ALIGN is non-zero, then ensure alignment is
9236 overridden to this value. */
9239 build_atomic_base (tree type
, unsigned int align
)
9243 /* Make sure its not already registered. */
9244 if ((t
= get_qualified_type (type
, TYPE_QUAL_ATOMIC
)))
9247 t
= build_variant_type_copy (type
);
9248 set_type_quals (t
, TYPE_QUAL_ATOMIC
);
9251 SET_TYPE_ALIGN (t
, align
);
9256 /* Information about the _FloatN and _FloatNx types. This must be in
9257 the same order as the corresponding TI_* enum values. */
9258 const floatn_type_info floatn_nx_types
[NUM_FLOATN_NX_TYPES
] =
9270 /* Create nodes for all integer types (and error_mark_node) using the sizes
9271 of C datatypes. SIGNED_CHAR specifies whether char is signed. */
9274 build_common_tree_nodes (bool signed_char
)
9278 error_mark_node
= make_node (ERROR_MARK
);
9279 TREE_TYPE (error_mark_node
) = error_mark_node
;
9281 initialize_sizetypes ();
9283 /* Define both `signed char' and `unsigned char'. */
9284 signed_char_type_node
= make_signed_type (CHAR_TYPE_SIZE
);
9285 TYPE_STRING_FLAG (signed_char_type_node
) = 1;
9286 unsigned_char_type_node
= make_unsigned_type (CHAR_TYPE_SIZE
);
9287 TYPE_STRING_FLAG (unsigned_char_type_node
) = 1;
9289 /* Define `char', which is like either `signed char' or `unsigned char'
9290 but not the same as either. */
9293 ? make_signed_type (CHAR_TYPE_SIZE
)
9294 : make_unsigned_type (CHAR_TYPE_SIZE
));
9295 TYPE_STRING_FLAG (char_type_node
) = 1;
9297 short_integer_type_node
= make_signed_type (SHORT_TYPE_SIZE
);
9298 short_unsigned_type_node
= make_unsigned_type (SHORT_TYPE_SIZE
);
9299 integer_type_node
= make_signed_type (INT_TYPE_SIZE
);
9300 unsigned_type_node
= make_unsigned_type (INT_TYPE_SIZE
);
9301 long_integer_type_node
= make_signed_type (LONG_TYPE_SIZE
);
9302 long_unsigned_type_node
= make_unsigned_type (LONG_TYPE_SIZE
);
9303 long_long_integer_type_node
= make_signed_type (LONG_LONG_TYPE_SIZE
);
9304 long_long_unsigned_type_node
= make_unsigned_type (LONG_LONG_TYPE_SIZE
);
9306 for (i
= 0; i
< NUM_INT_N_ENTS
; i
++)
9308 int_n_trees
[i
].signed_type
= make_signed_type (int_n_data
[i
].bitsize
);
9309 int_n_trees
[i
].unsigned_type
= make_unsigned_type (int_n_data
[i
].bitsize
);
9311 if (int_n_enabled_p
[i
])
9313 integer_types
[itk_intN_0
+ i
* 2] = int_n_trees
[i
].signed_type
;
9314 integer_types
[itk_unsigned_intN_0
+ i
* 2] = int_n_trees
[i
].unsigned_type
;
9318 /* Define a boolean type. This type only represents boolean values but
9319 may be larger than char depending on the value of BOOL_TYPE_SIZE. */
9320 boolean_type_node
= make_unsigned_type (BOOL_TYPE_SIZE
);
9321 TREE_SET_CODE (boolean_type_node
, BOOLEAN_TYPE
);
9322 TYPE_PRECISION (boolean_type_node
) = 1;
9323 TYPE_MAX_VALUE (boolean_type_node
) = build_int_cst (boolean_type_node
, 1);
9325 /* Define what type to use for size_t. */
9326 if (strcmp (SIZE_TYPE
, "unsigned int") == 0)
9327 size_type_node
= unsigned_type_node
;
9328 else if (strcmp (SIZE_TYPE
, "long unsigned int") == 0)
9329 size_type_node
= long_unsigned_type_node
;
9330 else if (strcmp (SIZE_TYPE
, "long long unsigned int") == 0)
9331 size_type_node
= long_long_unsigned_type_node
;
9332 else if (strcmp (SIZE_TYPE
, "short unsigned int") == 0)
9333 size_type_node
= short_unsigned_type_node
;
9338 size_type_node
= NULL_TREE
;
9339 for (i
= 0; i
< NUM_INT_N_ENTS
; i
++)
9340 if (int_n_enabled_p
[i
])
9342 char name
[50], altname
[50];
9343 sprintf (name
, "__int%d unsigned", int_n_data
[i
].bitsize
);
9344 sprintf (altname
, "__int%d__ unsigned", int_n_data
[i
].bitsize
);
9346 if (strcmp (name
, SIZE_TYPE
) == 0
9347 || strcmp (altname
, SIZE_TYPE
) == 0)
9349 size_type_node
= int_n_trees
[i
].unsigned_type
;
9352 if (size_type_node
== NULL_TREE
)
9356 /* Define what type to use for ptrdiff_t. */
9357 if (strcmp (PTRDIFF_TYPE
, "int") == 0)
9358 ptrdiff_type_node
= integer_type_node
;
9359 else if (strcmp (PTRDIFF_TYPE
, "long int") == 0)
9360 ptrdiff_type_node
= long_integer_type_node
;
9361 else if (strcmp (PTRDIFF_TYPE
, "long long int") == 0)
9362 ptrdiff_type_node
= long_long_integer_type_node
;
9363 else if (strcmp (PTRDIFF_TYPE
, "short int") == 0)
9364 ptrdiff_type_node
= short_integer_type_node
;
9367 ptrdiff_type_node
= NULL_TREE
;
9368 for (int i
= 0; i
< NUM_INT_N_ENTS
; i
++)
9369 if (int_n_enabled_p
[i
])
9371 char name
[50], altname
[50];
9372 sprintf (name
, "__int%d", int_n_data
[i
].bitsize
);
9373 sprintf (altname
, "__int%d__", int_n_data
[i
].bitsize
);
9375 if (strcmp (name
, PTRDIFF_TYPE
) == 0
9376 || strcmp (altname
, PTRDIFF_TYPE
) == 0)
9377 ptrdiff_type_node
= int_n_trees
[i
].signed_type
;
9379 if (ptrdiff_type_node
== NULL_TREE
)
9383 /* Fill in the rest of the sized types. Reuse existing type nodes
9385 intQI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (QImode
), 0);
9386 intHI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (HImode
), 0);
9387 intSI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (SImode
), 0);
9388 intDI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (DImode
), 0);
9389 intTI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (TImode
), 0);
9391 unsigned_intQI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (QImode
), 1);
9392 unsigned_intHI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (HImode
), 1);
9393 unsigned_intSI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (SImode
), 1);
9394 unsigned_intDI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (DImode
), 1);
9395 unsigned_intTI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (TImode
), 1);
9397 /* Don't call build_qualified type for atomics. That routine does
9398 special processing for atomics, and until they are initialized
9399 it's better not to make that call.
9401 Check to see if there is a target override for atomic types. */
9403 atomicQI_type_node
= build_atomic_base (unsigned_intQI_type_node
,
9404 targetm
.atomic_align_for_mode (QImode
));
9405 atomicHI_type_node
= build_atomic_base (unsigned_intHI_type_node
,
9406 targetm
.atomic_align_for_mode (HImode
));
9407 atomicSI_type_node
= build_atomic_base (unsigned_intSI_type_node
,
9408 targetm
.atomic_align_for_mode (SImode
));
9409 atomicDI_type_node
= build_atomic_base (unsigned_intDI_type_node
,
9410 targetm
.atomic_align_for_mode (DImode
));
9411 atomicTI_type_node
= build_atomic_base (unsigned_intTI_type_node
,
9412 targetm
.atomic_align_for_mode (TImode
));
9414 access_public_node
= get_identifier ("public");
9415 access_protected_node
= get_identifier ("protected");
9416 access_private_node
= get_identifier ("private");
9418 /* Define these next since types below may used them. */
9419 integer_zero_node
= build_int_cst (integer_type_node
, 0);
9420 integer_one_node
= build_int_cst (integer_type_node
, 1);
9421 integer_three_node
= build_int_cst (integer_type_node
, 3);
9422 integer_minus_one_node
= build_int_cst (integer_type_node
, -1);
9424 size_zero_node
= size_int (0);
9425 size_one_node
= size_int (1);
9426 bitsize_zero_node
= bitsize_int (0);
9427 bitsize_one_node
= bitsize_int (1);
9428 bitsize_unit_node
= bitsize_int (BITS_PER_UNIT
);
9430 boolean_false_node
= TYPE_MIN_VALUE (boolean_type_node
);
9431 boolean_true_node
= TYPE_MAX_VALUE (boolean_type_node
);
9433 void_type_node
= make_node (VOID_TYPE
);
9434 layout_type (void_type_node
);
9436 /* We are not going to have real types in C with less than byte alignment,
9437 so we might as well not have any types that claim to have it. */
9438 SET_TYPE_ALIGN (void_type_node
, BITS_PER_UNIT
);
9439 TYPE_USER_ALIGN (void_type_node
) = 0;
9441 void_node
= make_node (VOID_CST
);
9442 TREE_TYPE (void_node
) = void_type_node
;
9444 void_list_node
= build_tree_list (NULL_TREE
, void_type_node
);
9446 null_pointer_node
= build_int_cst (build_pointer_type (void_type_node
), 0);
9447 layout_type (TREE_TYPE (null_pointer_node
));
9449 ptr_type_node
= build_pointer_type (void_type_node
);
9451 = build_pointer_type (build_type_variant (void_type_node
, 1, 0));
9452 for (unsigned i
= 0; i
< ARRAY_SIZE (builtin_structptr_types
); ++i
)
9453 builtin_structptr_types
[i
].node
= builtin_structptr_types
[i
].base
;
9455 pointer_sized_int_node
= build_nonstandard_integer_type (POINTER_SIZE
, 1);
9457 float_type_node
= make_node (REAL_TYPE
);
9458 TYPE_PRECISION (float_type_node
) = FLOAT_TYPE_SIZE
;
9459 layout_type (float_type_node
);
9461 double_type_node
= make_node (REAL_TYPE
);
9462 TYPE_PRECISION (double_type_node
) = DOUBLE_TYPE_SIZE
;
9463 layout_type (double_type_node
);
9465 long_double_type_node
= make_node (REAL_TYPE
);
9466 TYPE_PRECISION (long_double_type_node
) = LONG_DOUBLE_TYPE_SIZE
;
9467 layout_type (long_double_type_node
);
9469 for (i
= 0; i
< NUM_FLOATN_NX_TYPES
; i
++)
9471 int n
= floatn_nx_types
[i
].n
;
9472 bool extended
= floatn_nx_types
[i
].extended
;
9473 scalar_float_mode mode
;
9474 if (!targetm
.floatn_mode (n
, extended
).exists (&mode
))
9476 int precision
= GET_MODE_PRECISION (mode
);
9477 /* Work around the rs6000 KFmode having precision 113 not
9479 const struct real_format
*fmt
= REAL_MODE_FORMAT (mode
);
9480 gcc_assert (fmt
->b
== 2 && fmt
->emin
+ fmt
->emax
== 3);
9481 int min_precision
= fmt
->p
+ ceil_log2 (fmt
->emax
- fmt
->emin
);
9483 gcc_assert (min_precision
== n
);
9484 if (precision
< min_precision
)
9485 precision
= min_precision
;
9486 FLOATN_NX_TYPE_NODE (i
) = make_node (REAL_TYPE
);
9487 TYPE_PRECISION (FLOATN_NX_TYPE_NODE (i
)) = precision
;
9488 layout_type (FLOATN_NX_TYPE_NODE (i
));
9489 SET_TYPE_MODE (FLOATN_NX_TYPE_NODE (i
), mode
);
9491 float128t_type_node
= float128_type_node
;
9493 if (REAL_MODE_FORMAT (BFmode
) == &arm_bfloat_half_format
9494 && targetm
.scalar_mode_supported_p (BFmode
)
9495 && targetm
.libgcc_floating_mode_supported_p (BFmode
))
9497 bfloat16_type_node
= make_node (REAL_TYPE
);
9498 TYPE_PRECISION (bfloat16_type_node
) = GET_MODE_PRECISION (BFmode
);
9499 layout_type (bfloat16_type_node
);
9500 SET_TYPE_MODE (bfloat16_type_node
, BFmode
);
9504 float_ptr_type_node
= build_pointer_type (float_type_node
);
9505 double_ptr_type_node
= build_pointer_type (double_type_node
);
9506 long_double_ptr_type_node
= build_pointer_type (long_double_type_node
);
9507 integer_ptr_type_node
= build_pointer_type (integer_type_node
);
9509 /* Fixed size integer types. */
9510 uint16_type_node
= make_or_reuse_type (16, 1);
9511 uint32_type_node
= make_or_reuse_type (32, 1);
9512 uint64_type_node
= make_or_reuse_type (64, 1);
9513 if (targetm
.scalar_mode_supported_p (TImode
))
9514 uint128_type_node
= make_or_reuse_type (128, 1);
9516 /* Decimal float types. */
9517 if (targetm
.decimal_float_supported_p ())
9519 dfloat32_type_node
= make_node (REAL_TYPE
);
9520 TYPE_PRECISION (dfloat32_type_node
) = DECIMAL32_TYPE_SIZE
;
9521 SET_TYPE_MODE (dfloat32_type_node
, SDmode
);
9522 layout_type (dfloat32_type_node
);
9524 dfloat64_type_node
= make_node (REAL_TYPE
);
9525 TYPE_PRECISION (dfloat64_type_node
) = DECIMAL64_TYPE_SIZE
;
9526 SET_TYPE_MODE (dfloat64_type_node
, DDmode
);
9527 layout_type (dfloat64_type_node
);
9529 dfloat128_type_node
= make_node (REAL_TYPE
);
9530 TYPE_PRECISION (dfloat128_type_node
) = DECIMAL128_TYPE_SIZE
;
9531 SET_TYPE_MODE (dfloat128_type_node
, TDmode
);
9532 layout_type (dfloat128_type_node
);
9535 complex_integer_type_node
= build_complex_type (integer_type_node
, true);
9536 complex_float_type_node
= build_complex_type (float_type_node
, true);
9537 complex_double_type_node
= build_complex_type (double_type_node
, true);
9538 complex_long_double_type_node
= build_complex_type (long_double_type_node
,
9541 for (i
= 0; i
< NUM_FLOATN_NX_TYPES
; i
++)
9543 if (FLOATN_NX_TYPE_NODE (i
) != NULL_TREE
)
9544 COMPLEX_FLOATN_NX_TYPE_NODE (i
)
9545 = build_complex_type (FLOATN_NX_TYPE_NODE (i
));
9548 /* Make fixed-point nodes based on sat/non-sat and signed/unsigned. */
9549 #define MAKE_FIXED_TYPE_NODE(KIND,SIZE) \
9550 sat_ ## KIND ## _type_node = \
9551 make_sat_signed_ ## KIND ## _type (SIZE); \
9552 sat_unsigned_ ## KIND ## _type_node = \
9553 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9554 KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9555 unsigned_ ## KIND ## _type_node = \
9556 make_unsigned_ ## KIND ## _type (SIZE);
9558 #define MAKE_FIXED_TYPE_NODE_WIDTH(KIND,WIDTH,SIZE) \
9559 sat_ ## WIDTH ## KIND ## _type_node = \
9560 make_sat_signed_ ## KIND ## _type (SIZE); \
9561 sat_unsigned_ ## WIDTH ## KIND ## _type_node = \
9562 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9563 WIDTH ## KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9564 unsigned_ ## WIDTH ## KIND ## _type_node = \
9565 make_unsigned_ ## KIND ## _type (SIZE);
9567 /* Make fixed-point type nodes based on four different widths. */
9568 #define MAKE_FIXED_TYPE_NODE_FAMILY(N1,N2) \
9569 MAKE_FIXED_TYPE_NODE_WIDTH (N1, short_, SHORT_ ## N2 ## _TYPE_SIZE) \
9570 MAKE_FIXED_TYPE_NODE (N1, N2 ## _TYPE_SIZE) \
9571 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_, LONG_ ## N2 ## _TYPE_SIZE) \
9572 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_long_, LONG_LONG_ ## N2 ## _TYPE_SIZE)
9574 /* Make fixed-point mode nodes based on sat/non-sat and signed/unsigned. */
9575 #define MAKE_FIXED_MODE_NODE(KIND,NAME,MODE) \
9576 NAME ## _type_node = \
9577 make_or_reuse_signed_ ## KIND ## _type (GET_MODE_BITSIZE (MODE ## mode)); \
9578 u ## NAME ## _type_node = \
9579 make_or_reuse_unsigned_ ## KIND ## _type \
9580 (GET_MODE_BITSIZE (U ## MODE ## mode)); \
9581 sat_ ## NAME ## _type_node = \
9582 make_or_reuse_sat_signed_ ## KIND ## _type \
9583 (GET_MODE_BITSIZE (MODE ## mode)); \
9584 sat_u ## NAME ## _type_node = \
9585 make_or_reuse_sat_unsigned_ ## KIND ## _type \
9586 (GET_MODE_BITSIZE (U ## MODE ## mode));
9588 /* Fixed-point type and mode nodes. */
9589 MAKE_FIXED_TYPE_NODE_FAMILY (fract
, FRACT
)
9590 MAKE_FIXED_TYPE_NODE_FAMILY (accum
, ACCUM
)
9591 MAKE_FIXED_MODE_NODE (fract
, qq
, QQ
)
9592 MAKE_FIXED_MODE_NODE (fract
, hq
, HQ
)
9593 MAKE_FIXED_MODE_NODE (fract
, sq
, SQ
)
9594 MAKE_FIXED_MODE_NODE (fract
, dq
, DQ
)
9595 MAKE_FIXED_MODE_NODE (fract
, tq
, TQ
)
9596 MAKE_FIXED_MODE_NODE (accum
, ha
, HA
)
9597 MAKE_FIXED_MODE_NODE (accum
, sa
, SA
)
9598 MAKE_FIXED_MODE_NODE (accum
, da
, DA
)
9599 MAKE_FIXED_MODE_NODE (accum
, ta
, TA
)
9602 tree t
= targetm
.build_builtin_va_list ();
9604 /* Many back-ends define record types without setting TYPE_NAME.
9605 If we copied the record type here, we'd keep the original
9606 record type without a name. This breaks name mangling. So,
9607 don't copy record types and let c_common_nodes_and_builtins()
9608 declare the type to be __builtin_va_list. */
9609 if (TREE_CODE (t
) != RECORD_TYPE
)
9610 t
= build_variant_type_copy (t
);
9612 va_list_type_node
= t
;
9615 /* SCEV analyzer global shared trees. */
9616 chrec_dont_know
= make_node (SCEV_NOT_KNOWN
);
9617 TREE_TYPE (chrec_dont_know
) = void_type_node
;
9618 chrec_known
= make_node (SCEV_KNOWN
);
9619 TREE_TYPE (chrec_known
) = void_type_node
;
9622 /* Modify DECL for given flags.
9623 TM_PURE attribute is set only on types, so the function will modify
9624 DECL's type when ECF_TM_PURE is used. */
9627 set_call_expr_flags (tree decl
, int flags
)
9629 if (flags
& ECF_NOTHROW
)
9630 TREE_NOTHROW (decl
) = 1;
9631 if (flags
& ECF_CONST
)
9632 TREE_READONLY (decl
) = 1;
9633 if (flags
& ECF_PURE
)
9634 DECL_PURE_P (decl
) = 1;
9635 if (flags
& ECF_LOOPING_CONST_OR_PURE
)
9636 DECL_LOOPING_CONST_OR_PURE_P (decl
) = 1;
9637 if (flags
& ECF_NOVOPS
)
9638 DECL_IS_NOVOPS (decl
) = 1;
9639 if (flags
& ECF_NORETURN
)
9640 TREE_THIS_VOLATILE (decl
) = 1;
9641 if (flags
& ECF_MALLOC
)
9642 DECL_IS_MALLOC (decl
) = 1;
9643 if (flags
& ECF_RETURNS_TWICE
)
9644 DECL_IS_RETURNS_TWICE (decl
) = 1;
9645 if (flags
& ECF_LEAF
)
9646 DECL_ATTRIBUTES (decl
) = tree_cons (get_identifier ("leaf"),
9647 NULL
, DECL_ATTRIBUTES (decl
));
9648 if (flags
& ECF_COLD
)
9649 DECL_ATTRIBUTES (decl
) = tree_cons (get_identifier ("cold"),
9650 NULL
, DECL_ATTRIBUTES (decl
));
9651 if (flags
& ECF_RET1
)
9652 DECL_ATTRIBUTES (decl
)
9653 = tree_cons (get_identifier ("fn spec"),
9654 build_tree_list (NULL_TREE
, build_string (2, "1 ")),
9655 DECL_ATTRIBUTES (decl
));
9656 if ((flags
& ECF_TM_PURE
) && flag_tm
)
9657 apply_tm_attr (decl
, get_identifier ("transaction_pure"));
9658 /* Looping const or pure is implied by noreturn.
9659 There is currently no way to declare looping const or looping pure alone. */
9660 gcc_assert (!(flags
& ECF_LOOPING_CONST_OR_PURE
)
9661 || ((flags
& ECF_NORETURN
) && (flags
& (ECF_CONST
| ECF_PURE
))));
9665 /* A subroutine of build_common_builtin_nodes. Define a builtin function. */
9668 local_define_builtin (const char *name
, tree type
, enum built_in_function code
,
9669 const char *library_name
, int ecf_flags
)
9673 decl
= add_builtin_function (name
, type
, code
, BUILT_IN_NORMAL
,
9674 library_name
, NULL_TREE
);
9675 set_call_expr_flags (decl
, ecf_flags
);
9677 set_builtin_decl (code
, decl
, true);
9680 /* Call this function after instantiating all builtins that the language
9681 front end cares about. This will build the rest of the builtins
9682 and internal functions that are relied upon by the tree optimizers and
9686 build_common_builtin_nodes (void)
9691 if (!builtin_decl_explicit_p (BUILT_IN_CLEAR_PADDING
))
9693 ftype
= build_function_type_list (void_type_node
,
9698 local_define_builtin ("__builtin_clear_padding", ftype
,
9699 BUILT_IN_CLEAR_PADDING
,
9700 "__builtin_clear_padding",
9701 ECF_LEAF
| ECF_NOTHROW
);
9704 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE
)
9705 || !builtin_decl_explicit_p (BUILT_IN_TRAP
)
9706 || !builtin_decl_explicit_p (BUILT_IN_ABORT
))
9708 ftype
= build_function_type (void_type_node
, void_list_node
);
9709 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE
))
9710 local_define_builtin ("__builtin_unreachable", ftype
,
9711 BUILT_IN_UNREACHABLE
,
9712 "__builtin_unreachable",
9713 ECF_NOTHROW
| ECF_LEAF
| ECF_NORETURN
9714 | ECF_CONST
| ECF_COLD
);
9715 if (!builtin_decl_explicit_p (BUILT_IN_ABORT
))
9716 local_define_builtin ("__builtin_abort", ftype
, BUILT_IN_ABORT
,
9718 ECF_LEAF
| ECF_NORETURN
| ECF_CONST
| ECF_COLD
);
9719 if (!builtin_decl_explicit_p (BUILT_IN_TRAP
))
9720 local_define_builtin ("__builtin_trap", ftype
, BUILT_IN_TRAP
,
9722 ECF_NORETURN
| ECF_NOTHROW
| ECF_LEAF
| ECF_COLD
);
9725 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY
)
9726 || !builtin_decl_explicit_p (BUILT_IN_MEMMOVE
))
9728 ftype
= build_function_type_list (ptr_type_node
,
9729 ptr_type_node
, const_ptr_type_node
,
9730 size_type_node
, NULL_TREE
);
9732 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY
))
9733 local_define_builtin ("__builtin_memcpy", ftype
, BUILT_IN_MEMCPY
,
9734 "memcpy", ECF_NOTHROW
| ECF_LEAF
);
9735 if (!builtin_decl_explicit_p (BUILT_IN_MEMMOVE
))
9736 local_define_builtin ("__builtin_memmove", ftype
, BUILT_IN_MEMMOVE
,
9737 "memmove", ECF_NOTHROW
| ECF_LEAF
);
9740 if (!builtin_decl_explicit_p (BUILT_IN_MEMCMP
))
9742 ftype
= build_function_type_list (integer_type_node
, const_ptr_type_node
,
9743 const_ptr_type_node
, size_type_node
,
9745 local_define_builtin ("__builtin_memcmp", ftype
, BUILT_IN_MEMCMP
,
9746 "memcmp", ECF_PURE
| ECF_NOTHROW
| ECF_LEAF
);
9749 if (!builtin_decl_explicit_p (BUILT_IN_MEMSET
))
9751 ftype
= build_function_type_list (ptr_type_node
,
9752 ptr_type_node
, integer_type_node
,
9753 size_type_node
, NULL_TREE
);
9754 local_define_builtin ("__builtin_memset", ftype
, BUILT_IN_MEMSET
,
9755 "memset", ECF_NOTHROW
| ECF_LEAF
);
9758 /* If we're checking the stack, `alloca' can throw. */
9759 const int alloca_flags
9760 = ECF_MALLOC
| ECF_LEAF
| (flag_stack_check
? 0 : ECF_NOTHROW
);
9762 if (!builtin_decl_explicit_p (BUILT_IN_ALLOCA
))
9764 ftype
= build_function_type_list (ptr_type_node
,
9765 size_type_node
, NULL_TREE
);
9766 local_define_builtin ("__builtin_alloca", ftype
, BUILT_IN_ALLOCA
,
9767 "alloca", alloca_flags
);
9770 ftype
= build_function_type_list (ptr_type_node
, size_type_node
,
9771 size_type_node
, NULL_TREE
);
9772 local_define_builtin ("__builtin_alloca_with_align", ftype
,
9773 BUILT_IN_ALLOCA_WITH_ALIGN
,
9774 "__builtin_alloca_with_align",
9777 ftype
= build_function_type_list (ptr_type_node
, size_type_node
,
9778 size_type_node
, size_type_node
, NULL_TREE
);
9779 local_define_builtin ("__builtin_alloca_with_align_and_max", ftype
,
9780 BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
,
9781 "__builtin_alloca_with_align_and_max",
9784 ftype
= build_function_type_list (void_type_node
,
9785 ptr_type_node
, ptr_type_node
,
9786 ptr_type_node
, NULL_TREE
);
9787 local_define_builtin ("__builtin_init_trampoline", ftype
,
9788 BUILT_IN_INIT_TRAMPOLINE
,
9789 "__builtin_init_trampoline", ECF_NOTHROW
| ECF_LEAF
);
9790 local_define_builtin ("__builtin_init_heap_trampoline", ftype
,
9791 BUILT_IN_INIT_HEAP_TRAMPOLINE
,
9792 "__builtin_init_heap_trampoline",
9793 ECF_NOTHROW
| ECF_LEAF
);
9794 local_define_builtin ("__builtin_init_descriptor", ftype
,
9795 BUILT_IN_INIT_DESCRIPTOR
,
9796 "__builtin_init_descriptor", ECF_NOTHROW
| ECF_LEAF
);
9798 ftype
= build_function_type_list (ptr_type_node
, ptr_type_node
, NULL_TREE
);
9799 local_define_builtin ("__builtin_adjust_trampoline", ftype
,
9800 BUILT_IN_ADJUST_TRAMPOLINE
,
9801 "__builtin_adjust_trampoline",
9802 ECF_CONST
| ECF_NOTHROW
);
9803 local_define_builtin ("__builtin_adjust_descriptor", ftype
,
9804 BUILT_IN_ADJUST_DESCRIPTOR
,
9805 "__builtin_adjust_descriptor",
9806 ECF_CONST
| ECF_NOTHROW
);
9808 ftype
= build_function_type_list (void_type_node
,
9809 ptr_type_node
, ptr_type_node
, NULL_TREE
);
9810 if (!builtin_decl_explicit_p (BUILT_IN_CLEAR_CACHE
))
9811 local_define_builtin ("__builtin___clear_cache", ftype
,
9812 BUILT_IN_CLEAR_CACHE
,
9816 local_define_builtin ("__builtin_nonlocal_goto", ftype
,
9817 BUILT_IN_NONLOCAL_GOTO
,
9818 "__builtin_nonlocal_goto",
9819 ECF_NORETURN
| ECF_NOTHROW
);
9821 ftype
= build_function_type_list (void_type_node
,
9822 ptr_type_node
, ptr_type_node
, NULL_TREE
);
9823 local_define_builtin ("__builtin_setjmp_setup", ftype
,
9824 BUILT_IN_SETJMP_SETUP
,
9825 "__builtin_setjmp_setup", ECF_NOTHROW
);
9827 ftype
= build_function_type_list (void_type_node
, ptr_type_node
, NULL_TREE
);
9828 local_define_builtin ("__builtin_setjmp_receiver", ftype
,
9829 BUILT_IN_SETJMP_RECEIVER
,
9830 "__builtin_setjmp_receiver", ECF_NOTHROW
| ECF_LEAF
);
9832 ftype
= build_function_type_list (ptr_type_node
, NULL_TREE
);
9833 local_define_builtin ("__builtin_stack_save", ftype
, BUILT_IN_STACK_SAVE
,
9834 "__builtin_stack_save", ECF_NOTHROW
| ECF_LEAF
);
9836 ftype
= build_function_type_list (void_type_node
, ptr_type_node
, NULL_TREE
);
9837 local_define_builtin ("__builtin_stack_restore", ftype
,
9838 BUILT_IN_STACK_RESTORE
,
9839 "__builtin_stack_restore", ECF_NOTHROW
| ECF_LEAF
);
9841 ftype
= build_function_type_list (integer_type_node
, const_ptr_type_node
,
9842 const_ptr_type_node
, size_type_node
,
9844 local_define_builtin ("__builtin_memcmp_eq", ftype
, BUILT_IN_MEMCMP_EQ
,
9845 "__builtin_memcmp_eq",
9846 ECF_PURE
| ECF_NOTHROW
| ECF_LEAF
);
9848 local_define_builtin ("__builtin_strncmp_eq", ftype
, BUILT_IN_STRNCMP_EQ
,
9849 "__builtin_strncmp_eq",
9850 ECF_PURE
| ECF_NOTHROW
| ECF_LEAF
);
9852 local_define_builtin ("__builtin_strcmp_eq", ftype
, BUILT_IN_STRCMP_EQ
,
9853 "__builtin_strcmp_eq",
9854 ECF_PURE
| ECF_NOTHROW
| ECF_LEAF
);
9856 /* If there's a possibility that we might use the ARM EABI, build the
9857 alternate __cxa_end_cleanup node used to resume from C++. */
9858 if (targetm
.arm_eabi_unwinder
)
9860 ftype
= build_function_type_list (void_type_node
, NULL_TREE
);
9861 local_define_builtin ("__builtin_cxa_end_cleanup", ftype
,
9862 BUILT_IN_CXA_END_CLEANUP
,
9863 "__cxa_end_cleanup", ECF_NORETURN
| ECF_LEAF
);
9866 ftype
= build_function_type_list (void_type_node
, ptr_type_node
, NULL_TREE
);
9867 local_define_builtin ("__builtin_unwind_resume", ftype
,
9868 BUILT_IN_UNWIND_RESUME
,
9869 ((targetm_common
.except_unwind_info (&global_options
)
9871 ? "_Unwind_SjLj_Resume" : "_Unwind_Resume"),
9874 if (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS
) == NULL_TREE
)
9876 ftype
= build_function_type_list (ptr_type_node
, integer_type_node
,
9878 local_define_builtin ("__builtin_return_address", ftype
,
9879 BUILT_IN_RETURN_ADDRESS
,
9880 "__builtin_return_address",
9884 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER
)
9885 || !builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT
))
9887 ftype
= build_function_type_list (void_type_node
, ptr_type_node
,
9888 ptr_type_node
, NULL_TREE
);
9889 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER
))
9890 local_define_builtin ("__cyg_profile_func_enter", ftype
,
9891 BUILT_IN_PROFILE_FUNC_ENTER
,
9892 "__cyg_profile_func_enter", 0);
9893 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT
))
9894 local_define_builtin ("__cyg_profile_func_exit", ftype
,
9895 BUILT_IN_PROFILE_FUNC_EXIT
,
9896 "__cyg_profile_func_exit", 0);
9899 /* The exception object and filter values from the runtime. The argument
9900 must be zero before exception lowering, i.e. from the front end. After
9901 exception lowering, it will be the region number for the exception
9902 landing pad. These functions are PURE instead of CONST to prevent
9903 them from being hoisted past the exception edge that will initialize
9904 its value in the landing pad. */
9905 ftype
= build_function_type_list (ptr_type_node
,
9906 integer_type_node
, NULL_TREE
);
9907 ecf_flags
= ECF_PURE
| ECF_NOTHROW
| ECF_LEAF
;
9908 /* Only use TM_PURE if we have TM language support. */
9909 if (builtin_decl_explicit_p (BUILT_IN_TM_LOAD_1
))
9910 ecf_flags
|= ECF_TM_PURE
;
9911 local_define_builtin ("__builtin_eh_pointer", ftype
, BUILT_IN_EH_POINTER
,
9912 "__builtin_eh_pointer", ecf_flags
);
9914 tmp
= lang_hooks
.types
.type_for_mode (targetm
.eh_return_filter_mode (), 0);
9915 ftype
= build_function_type_list (tmp
, integer_type_node
, NULL_TREE
);
9916 local_define_builtin ("__builtin_eh_filter", ftype
, BUILT_IN_EH_FILTER
,
9917 "__builtin_eh_filter", ECF_PURE
| ECF_NOTHROW
| ECF_LEAF
);
9919 ftype
= build_function_type_list (void_type_node
,
9920 integer_type_node
, integer_type_node
,
9922 local_define_builtin ("__builtin_eh_copy_values", ftype
,
9923 BUILT_IN_EH_COPY_VALUES
,
9924 "__builtin_eh_copy_values", ECF_NOTHROW
);
9926 /* Complex multiplication and division. These are handled as builtins
9927 rather than optabs because emit_library_call_value doesn't support
9928 complex. Further, we can do slightly better with folding these
9929 beasties if the real and complex parts of the arguments are separate. */
9933 for (mode
= MIN_MODE_COMPLEX_FLOAT
; mode
<= MAX_MODE_COMPLEX_FLOAT
; ++mode
)
9935 char mode_name_buf
[4], *q
;
9937 enum built_in_function mcode
, dcode
;
9938 tree type
, inner_type
;
9939 const char *prefix
= "__";
9941 if (targetm
.libfunc_gnu_prefix
)
9944 type
= lang_hooks
.types
.type_for_mode ((machine_mode
) mode
, 0);
9947 inner_type
= TREE_TYPE (type
);
9949 ftype
= build_function_type_list (type
, inner_type
, inner_type
,
9950 inner_type
, inner_type
, NULL_TREE
);
9952 mcode
= ((enum built_in_function
)
9953 (BUILT_IN_COMPLEX_MUL_MIN
+ mode
- MIN_MODE_COMPLEX_FLOAT
));
9954 dcode
= ((enum built_in_function
)
9955 (BUILT_IN_COMPLEX_DIV_MIN
+ mode
- MIN_MODE_COMPLEX_FLOAT
));
9957 for (p
= GET_MODE_NAME (mode
), q
= mode_name_buf
; *p
; p
++, q
++)
9961 /* For -ftrapping-math these should throw from a former
9962 -fnon-call-exception stmt. */
9963 built_in_names
[mcode
] = concat (prefix
, "mul", mode_name_buf
, "3",
9965 local_define_builtin (built_in_names
[mcode
], ftype
, mcode
,
9966 built_in_names
[mcode
],
9967 ECF_CONST
| ECF_LEAF
);
9969 built_in_names
[dcode
] = concat (prefix
, "div", mode_name_buf
, "3",
9971 local_define_builtin (built_in_names
[dcode
], ftype
, dcode
,
9972 built_in_names
[dcode
],
9973 ECF_CONST
| ECF_LEAF
);
9977 init_internal_fns ();
9980 /* HACK. GROSS. This is absolutely disgusting. I wish there was a
9983 If we requested a pointer to a vector, build up the pointers that
9984 we stripped off while looking for the inner type. Similarly for
9985 return values from functions.
9987 The argument TYPE is the top of the chain, and BOTTOM is the
9988 new type which we will point to. */
9991 reconstruct_complex_type (tree type
, tree bottom
)
9995 if (TREE_CODE (type
) == POINTER_TYPE
)
9997 inner
= reconstruct_complex_type (TREE_TYPE (type
), bottom
);
9998 outer
= build_pointer_type_for_mode (inner
, TYPE_MODE (type
),
9999 TYPE_REF_CAN_ALIAS_ALL (type
));
10001 else if (TREE_CODE (type
) == REFERENCE_TYPE
)
10003 inner
= reconstruct_complex_type (TREE_TYPE (type
), bottom
);
10004 outer
= build_reference_type_for_mode (inner
, TYPE_MODE (type
),
10005 TYPE_REF_CAN_ALIAS_ALL (type
));
10007 else if (TREE_CODE (type
) == ARRAY_TYPE
)
10009 inner
= reconstruct_complex_type (TREE_TYPE (type
), bottom
);
10010 outer
= build_array_type (inner
, TYPE_DOMAIN (type
));
10012 else if (TREE_CODE (type
) == FUNCTION_TYPE
)
10014 inner
= reconstruct_complex_type (TREE_TYPE (type
), bottom
);
10015 outer
= build_function_type (inner
, TYPE_ARG_TYPES (type
),
10016 TYPE_NO_NAMED_ARGS_STDARG_P (type
));
10018 else if (TREE_CODE (type
) == METHOD_TYPE
)
10020 inner
= reconstruct_complex_type (TREE_TYPE (type
), bottom
);
10021 /* The build_method_type_directly() routine prepends 'this' to argument list,
10022 so we must compensate by getting rid of it. */
10024 = build_method_type_directly
10025 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (type
))),
10027 TREE_CHAIN (TYPE_ARG_TYPES (type
)));
10029 else if (TREE_CODE (type
) == OFFSET_TYPE
)
10031 inner
= reconstruct_complex_type (TREE_TYPE (type
), bottom
);
10032 outer
= build_offset_type (TYPE_OFFSET_BASETYPE (type
), inner
);
10037 return build_type_attribute_qual_variant (outer
, TYPE_ATTRIBUTES (type
),
10038 TYPE_QUALS (type
));
10041 /* Returns a vector tree node given a mode (integer, vector, or BLKmode) and
10044 build_vector_type_for_mode (tree innertype
, machine_mode mode
)
10047 unsigned int bitsize
;
10049 switch (GET_MODE_CLASS (mode
))
10051 case MODE_VECTOR_BOOL
:
10052 case MODE_VECTOR_INT
:
10053 case MODE_VECTOR_FLOAT
:
10054 case MODE_VECTOR_FRACT
:
10055 case MODE_VECTOR_UFRACT
:
10056 case MODE_VECTOR_ACCUM
:
10057 case MODE_VECTOR_UACCUM
:
10058 nunits
= GET_MODE_NUNITS (mode
);
10062 /* Check that there are no leftover bits. */
10063 bitsize
= GET_MODE_BITSIZE (as_a
<scalar_int_mode
> (mode
));
10064 gcc_assert (bitsize
% TREE_INT_CST_LOW (TYPE_SIZE (innertype
)) == 0);
10065 nunits
= bitsize
/ TREE_INT_CST_LOW (TYPE_SIZE (innertype
));
10069 gcc_unreachable ();
10072 return make_vector_type (innertype
, nunits
, mode
);
10075 /* Similarly, but takes the inner type and number of units, which must be
10079 build_vector_type (tree innertype
, poly_int64 nunits
)
10081 return make_vector_type (innertype
, nunits
, VOIDmode
);
10084 /* Build a truth vector with NUNITS units, giving it mode MASK_MODE. */
10087 build_truth_vector_type_for_mode (poly_uint64 nunits
, machine_mode mask_mode
)
10089 gcc_assert (mask_mode
!= BLKmode
);
10091 unsigned HOST_WIDE_INT esize
;
10092 if (VECTOR_MODE_P (mask_mode
))
10094 poly_uint64 vsize
= GET_MODE_BITSIZE (mask_mode
);
10095 esize
= vector_element_size (vsize
, nunits
);
10100 tree bool_type
= build_nonstandard_boolean_type (esize
);
10102 return make_vector_type (bool_type
, nunits
, mask_mode
);
10105 /* Build a vector type that holds one boolean result for each element of
10106 vector type VECTYPE. The public interface for this operation is
10110 build_truth_vector_type_for (tree vectype
)
10112 machine_mode vector_mode
= TYPE_MODE (vectype
);
10113 poly_uint64 nunits
= TYPE_VECTOR_SUBPARTS (vectype
);
10115 machine_mode mask_mode
;
10116 if (VECTOR_MODE_P (vector_mode
)
10117 && targetm
.vectorize
.get_mask_mode (vector_mode
).exists (&mask_mode
))
10118 return build_truth_vector_type_for_mode (nunits
, mask_mode
);
10120 poly_uint64 vsize
= tree_to_poly_uint64 (TYPE_SIZE (vectype
));
10121 unsigned HOST_WIDE_INT esize
= vector_element_size (vsize
, nunits
);
10122 tree bool_type
= build_nonstandard_boolean_type (esize
);
10124 return make_vector_type (bool_type
, nunits
, VOIDmode
);
10127 /* Like build_vector_type, but builds a variant type with TYPE_VECTOR_OPAQUE
10131 build_opaque_vector_type (tree innertype
, poly_int64 nunits
)
10133 tree t
= make_vector_type (innertype
, nunits
, VOIDmode
);
10135 /* We always build the non-opaque variant before the opaque one,
10136 so if it already exists, it is TYPE_NEXT_VARIANT of this one. */
10137 cand
= TYPE_NEXT_VARIANT (t
);
10139 && TYPE_VECTOR_OPAQUE (cand
)
10140 && check_qualified_type (cand
, t
, TYPE_QUALS (t
)))
10142 /* Othewise build a variant type and make sure to queue it after
10143 the non-opaque type. */
10144 cand
= build_distinct_type_copy (t
);
10145 TYPE_VECTOR_OPAQUE (cand
) = true;
10146 TYPE_CANONICAL (cand
) = TYPE_CANONICAL (t
);
10147 TYPE_NEXT_VARIANT (cand
) = TYPE_NEXT_VARIANT (t
);
10148 TYPE_NEXT_VARIANT (t
) = cand
;
10149 TYPE_MAIN_VARIANT (cand
) = TYPE_MAIN_VARIANT (t
);
10153 /* Return the value of element I of VECTOR_CST T as a wide_int. */
10155 static poly_wide_int
10156 vector_cst_int_elt (const_tree t
, unsigned int i
)
10158 /* First handle elements that are directly encoded. */
10159 unsigned int encoded_nelts
= vector_cst_encoded_nelts (t
);
10160 if (i
< encoded_nelts
)
10161 return wi::to_poly_wide (VECTOR_CST_ENCODED_ELT (t
, i
));
10163 /* Identify the pattern that contains element I and work out the index of
10164 the last encoded element for that pattern. */
10165 unsigned int npatterns
= VECTOR_CST_NPATTERNS (t
);
10166 unsigned int pattern
= i
% npatterns
;
10167 unsigned int count
= i
/ npatterns
;
10168 unsigned int final_i
= encoded_nelts
- npatterns
+ pattern
;
10170 /* If there are no steps, the final encoded value is the right one. */
10171 if (!VECTOR_CST_STEPPED_P (t
))
10172 return wi::to_poly_wide (VECTOR_CST_ENCODED_ELT (t
, final_i
));
10174 /* Otherwise work out the value from the last two encoded elements. */
10175 tree v1
= VECTOR_CST_ENCODED_ELT (t
, final_i
- npatterns
);
10176 tree v2
= VECTOR_CST_ENCODED_ELT (t
, final_i
);
10177 poly_wide_int diff
= wi::to_poly_wide (v2
) - wi::to_poly_wide (v1
);
10178 return wi::to_poly_wide (v2
) + (count
- 2) * diff
;
10181 /* Return the value of element I of VECTOR_CST T. */
10184 vector_cst_elt (const_tree t
, unsigned int i
)
10186 /* First handle elements that are directly encoded. */
10187 unsigned int encoded_nelts
= vector_cst_encoded_nelts (t
);
10188 if (i
< encoded_nelts
)
10189 return VECTOR_CST_ENCODED_ELT (t
, i
);
10191 /* If there are no steps, the final encoded value is the right one. */
10192 if (!VECTOR_CST_STEPPED_P (t
))
10194 /* Identify the pattern that contains element I and work out the index of
10195 the last encoded element for that pattern. */
10196 unsigned int npatterns
= VECTOR_CST_NPATTERNS (t
);
10197 unsigned int pattern
= i
% npatterns
;
10198 unsigned int final_i
= encoded_nelts
- npatterns
+ pattern
;
10199 return VECTOR_CST_ENCODED_ELT (t
, final_i
);
10202 /* Otherwise work out the value from the last two encoded elements. */
10203 return wide_int_to_tree (TREE_TYPE (TREE_TYPE (t
)),
10204 vector_cst_int_elt (t
, i
));
10207 /* Given an initializer INIT, return TRUE if INIT is zero or some
10208 aggregate of zeros. Otherwise return FALSE. If NONZERO is not
10209 null, set *NONZERO if and only if INIT is known not to be all
10210 zeros. The combination of return value of false and *NONZERO
10211 false implies that INIT may but need not be all zeros. Other
10212 combinations indicate definitive answers. */
10215 initializer_zerop (const_tree init
, bool *nonzero
/* = NULL */)
10221 /* Conservatively clear NONZERO and set it only if INIT is definitely
10227 unsigned HOST_WIDE_INT off
= 0;
10229 switch (TREE_CODE (init
))
10232 if (integer_zerop (init
))
10239 /* ??? Note that this is not correct for C4X float formats. There,
10240 a bit pattern of all zeros is 1.0; 0.0 is encoded with the most
10241 negative exponent. */
10242 if (real_zerop (init
)
10243 && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (init
)))
10250 if (fixed_zerop (init
))
10257 if (integer_zerop (init
)
10258 || (real_zerop (init
)
10259 && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_REALPART (init
)))
10260 && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_IMAGPART (init
)))))
10267 if (VECTOR_CST_NPATTERNS (init
) == 1
10268 && VECTOR_CST_DUPLICATE_P (init
)
10269 && initializer_zerop (VECTOR_CST_ENCODED_ELT (init
, 0)))
10277 if (TREE_CLOBBER_P (init
))
10280 unsigned HOST_WIDE_INT idx
;
10283 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init
), idx
, elt
)
10284 if (!initializer_zerop (elt
, nonzero
))
10292 tree arg
= TREE_OPERAND (init
, 0);
10293 if (TREE_CODE (arg
) != ADDR_EXPR
)
10295 tree offset
= TREE_OPERAND (init
, 1);
10296 if (TREE_CODE (offset
) != INTEGER_CST
10297 || !tree_fits_uhwi_p (offset
))
10299 off
= tree_to_uhwi (offset
);
10302 arg
= TREE_OPERAND (arg
, 0);
10303 if (TREE_CODE (arg
) != STRING_CST
)
10307 /* Fall through. */
10311 gcc_assert (off
<= INT_MAX
);
10314 int n
= TREE_STRING_LENGTH (init
);
10318 /* We need to loop through all elements to handle cases like
10319 "\0" and "\0foobar". */
10320 for (i
= 0; i
< n
; ++i
)
10321 if (TREE_STRING_POINTER (init
)[i
] != '\0')
10335 /* Return true if EXPR is an initializer expression in which every element
10336 is a constant that is numerically equal to 0 or 1. The elements do not
10337 need to be equal to each other. */
10340 initializer_each_zero_or_onep (const_tree expr
)
10342 STRIP_ANY_LOCATION_WRAPPER (expr
);
10344 switch (TREE_CODE (expr
))
10347 return integer_zerop (expr
) || integer_onep (expr
);
10350 return real_zerop (expr
) || real_onep (expr
);
10354 unsigned HOST_WIDE_INT nelts
= vector_cst_encoded_nelts (expr
);
10355 if (VECTOR_CST_STEPPED_P (expr
)
10356 && !TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr
)).is_constant (&nelts
))
10359 for (unsigned int i
= 0; i
< nelts
; ++i
)
10361 tree elt
= vector_cst_elt (expr
, i
);
10362 if (!initializer_each_zero_or_onep (elt
))
10374 /* Check if vector VEC consists of all the equal elements and
10375 that the number of elements corresponds to the type of VEC.
10376 The function returns first element of the vector
10377 or NULL_TREE if the vector is not uniform. */
10379 uniform_vector_p (const_tree vec
)
10382 unsigned HOST_WIDE_INT i
, nelts
;
10384 if (vec
== NULL_TREE
)
10387 gcc_assert (VECTOR_TYPE_P (TREE_TYPE (vec
)));
10389 if (TREE_CODE (vec
) == VEC_DUPLICATE_EXPR
)
10390 return TREE_OPERAND (vec
, 0);
10392 else if (TREE_CODE (vec
) == VECTOR_CST
)
10394 if (VECTOR_CST_NPATTERNS (vec
) == 1 && VECTOR_CST_DUPLICATE_P (vec
))
10395 return VECTOR_CST_ENCODED_ELT (vec
, 0);
10399 else if (TREE_CODE (vec
) == CONSTRUCTOR
10400 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (vec
)).is_constant (&nelts
))
10402 first
= error_mark_node
;
10404 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (vec
), i
, t
)
10411 if (!operand_equal_p (first
, t
, 0))
10417 if (TREE_CODE (first
) == CONSTRUCTOR
|| TREE_CODE (first
) == VECTOR_CST
)
10418 return uniform_vector_p (first
);
10425 /* If the argument is INTEGER_CST, return it. If the argument is vector
10426 with all elements the same INTEGER_CST, return that INTEGER_CST. Otherwise
10428 Look through location wrappers. */
10431 uniform_integer_cst_p (tree t
)
10433 STRIP_ANY_LOCATION_WRAPPER (t
);
10435 if (TREE_CODE (t
) == INTEGER_CST
)
10438 if (VECTOR_TYPE_P (TREE_TYPE (t
)))
10440 t
= uniform_vector_p (t
);
10441 if (t
&& TREE_CODE (t
) == INTEGER_CST
)
10448 /* Checks to see if T is a constant or a constant vector and if each element E
10449 adheres to ~E + 1 == pow2 then return ~E otherwise NULL_TREE. */
10452 bitmask_inv_cst_vector_p (tree t
)
10455 tree_code code
= TREE_CODE (t
);
10456 tree type
= TREE_TYPE (t
);
10458 if (!INTEGRAL_TYPE_P (type
)
10459 && !VECTOR_INTEGER_TYPE_P (type
))
10462 unsigned HOST_WIDE_INT nelts
= 1;
10464 unsigned int idx
= 0;
10465 bool uniform
= uniform_integer_cst_p (t
);
10466 tree newtype
= unsigned_type_for (type
);
10467 tree_vector_builder builder
;
10468 if (code
== INTEGER_CST
)
10472 if (!VECTOR_CST_NELTS (t
).is_constant (&nelts
))
10475 cst
= vector_cst_elt (t
, 0);
10476 builder
.new_vector (newtype
, nelts
, 1);
10479 tree ty
= unsigned_type_for (TREE_TYPE (cst
));
10484 cst
= vector_cst_elt (t
, idx
);
10485 wide_int icst
= wi::to_wide (cst
);
10486 wide_int inv
= wi::bit_not (icst
);
10487 icst
= wi::add (1, inv
);
10488 if (wi::popcount (icst
) != 1)
10491 tree newcst
= wide_int_to_tree (ty
, inv
);
10494 return build_uniform_cst (newtype
, newcst
);
10496 builder
.quick_push (newcst
);
10498 while (++idx
< nelts
);
10500 return builder
.build ();
10503 /* If VECTOR_CST T has a single nonzero element, return the index of that
10504 element, otherwise return -1. */
10507 single_nonzero_element (const_tree t
)
10509 unsigned HOST_WIDE_INT nelts
;
10510 unsigned int repeat_nelts
;
10511 if (VECTOR_CST_NELTS (t
).is_constant (&nelts
))
10512 repeat_nelts
= nelts
;
10513 else if (VECTOR_CST_NELTS_PER_PATTERN (t
) == 2)
10515 nelts
= vector_cst_encoded_nelts (t
);
10516 repeat_nelts
= VECTOR_CST_NPATTERNS (t
);
10522 for (unsigned int i
= 0; i
< nelts
; ++i
)
10524 tree elt
= vector_cst_elt (t
, i
);
10525 if (!integer_zerop (elt
) && !real_zerop (elt
))
10527 if (res
>= 0 || i
>= repeat_nelts
)
10535 /* Build an empty statement at location LOC. */
10538 build_empty_stmt (location_t loc
)
10540 tree t
= build1 (NOP_EXPR
, void_type_node
, size_zero_node
);
10541 SET_EXPR_LOCATION (t
, loc
);
10546 /* Build an OMP clause with code CODE. LOC is the location of the
10550 build_omp_clause (location_t loc
, enum omp_clause_code code
)
10555 length
= omp_clause_num_ops
[code
];
10556 size
= (sizeof (struct tree_omp_clause
) + (length
- 1) * sizeof (tree
));
10558 record_node_allocation_statistics (OMP_CLAUSE
, size
);
10560 t
= (tree
) ggc_internal_alloc (size
);
10561 memset (t
, 0, size
);
10562 TREE_SET_CODE (t
, OMP_CLAUSE
);
10563 OMP_CLAUSE_SET_CODE (t
, code
);
10564 OMP_CLAUSE_LOCATION (t
) = loc
;
10569 /* Build a tcc_vl_exp object with code CODE and room for LEN operands. LEN
10570 includes the implicit operand count in TREE_OPERAND 0, and so must be >= 1.
10571 Except for the CODE and operand count field, other storage for the
10572 object is initialized to zeros. */
10575 build_vl_exp (enum tree_code code
, int len MEM_STAT_DECL
)
10578 int length
= (len
- 1) * sizeof (tree
) + sizeof (struct tree_exp
);
10580 gcc_assert (TREE_CODE_CLASS (code
) == tcc_vl_exp
);
10581 gcc_assert (len
>= 1);
10583 record_node_allocation_statistics (code
, length
);
10585 t
= ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT
);
10587 TREE_SET_CODE (t
, code
);
10589 /* Can't use TREE_OPERAND to store the length because if checking is
10590 enabled, it will try to check the length before we store it. :-P */
10591 t
->exp
.operands
[0] = build_int_cst (sizetype
, len
);
10596 /* Helper function for build_call_* functions; build a CALL_EXPR with
10597 indicated RETURN_TYPE, FN, and NARGS, but do not initialize any of
10598 the argument slots. */
10601 build_call_1 (tree return_type
, tree fn
, int nargs
)
10605 t
= build_vl_exp (CALL_EXPR
, nargs
+ 3);
10606 TREE_TYPE (t
) = return_type
;
10607 CALL_EXPR_FN (t
) = fn
;
10608 CALL_EXPR_STATIC_CHAIN (t
) = NULL
;
10613 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10614 FN and a null static chain slot. NARGS is the number of call arguments
10615 which are specified as "..." arguments. */
10618 build_call_nary (tree return_type
, tree fn
, int nargs
, ...)
10622 va_start (args
, nargs
);
10623 ret
= build_call_valist (return_type
, fn
, nargs
, args
);
10628 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10629 FN and a null static chain slot. NARGS is the number of call arguments
10630 which are specified as a va_list ARGS. */
10633 build_call_valist (tree return_type
, tree fn
, int nargs
, va_list args
)
10638 t
= build_call_1 (return_type
, fn
, nargs
);
10639 for (i
= 0; i
< nargs
; i
++)
10640 CALL_EXPR_ARG (t
, i
) = va_arg (args
, tree
);
10641 process_call_operands (t
);
10645 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10646 FN and a null static chain slot. NARGS is the number of call arguments
10647 which are specified as a tree array ARGS. */
10650 build_call_array_loc (location_t loc
, tree return_type
, tree fn
,
10651 int nargs
, const tree
*args
)
10656 t
= build_call_1 (return_type
, fn
, nargs
);
10657 for (i
= 0; i
< nargs
; i
++)
10658 CALL_EXPR_ARG (t
, i
) = args
[i
];
10659 process_call_operands (t
);
10660 SET_EXPR_LOCATION (t
, loc
);
10664 /* Like build_call_array, but takes a vec. */
10667 build_call_vec (tree return_type
, tree fn
, const vec
<tree
, va_gc
> *args
)
10672 ret
= build_call_1 (return_type
, fn
, vec_safe_length (args
));
10673 FOR_EACH_VEC_SAFE_ELT (args
, ix
, t
)
10674 CALL_EXPR_ARG (ret
, ix
) = t
;
10675 process_call_operands (ret
);
10679 /* Conveniently construct a function call expression. FNDECL names the
10680 function to be called and N arguments are passed in the array
10684 build_call_expr_loc_array (location_t loc
, tree fndecl
, int n
, tree
*argarray
)
10686 tree fntype
= TREE_TYPE (fndecl
);
10687 tree fn
= build1 (ADDR_EXPR
, build_pointer_type (fntype
), fndecl
);
10689 return fold_build_call_array_loc (loc
, TREE_TYPE (fntype
), fn
, n
, argarray
);
10692 /* Conveniently construct a function call expression. FNDECL names the
10693 function to be called and the arguments are passed in the vector
10697 build_call_expr_loc_vec (location_t loc
, tree fndecl
, vec
<tree
, va_gc
> *vec
)
10699 return build_call_expr_loc_array (loc
, fndecl
, vec_safe_length (vec
),
10700 vec_safe_address (vec
));
10704 /* Conveniently construct a function call expression. FNDECL names the
10705 function to be called, N is the number of arguments, and the "..."
10706 parameters are the argument expressions. */
10709 build_call_expr_loc (location_t loc
, tree fndecl
, int n
, ...)
10712 tree
*argarray
= XALLOCAVEC (tree
, n
);
10716 for (i
= 0; i
< n
; i
++)
10717 argarray
[i
] = va_arg (ap
, tree
);
10719 return build_call_expr_loc_array (loc
, fndecl
, n
, argarray
);
10722 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
10723 varargs macros aren't supported by all bootstrap compilers. */
10726 build_call_expr (tree fndecl
, int n
, ...)
10729 tree
*argarray
= XALLOCAVEC (tree
, n
);
10733 for (i
= 0; i
< n
; i
++)
10734 argarray
[i
] = va_arg (ap
, tree
);
10736 return build_call_expr_loc_array (UNKNOWN_LOCATION
, fndecl
, n
, argarray
);
10739 /* Build an internal call to IFN, with arguments ARGS[0:N-1] and with return
10740 type TYPE. This is just like CALL_EXPR, except its CALL_EXPR_FN is NULL.
10741 It will get gimplified later into an ordinary internal function. */
10744 build_call_expr_internal_loc_array (location_t loc
, internal_fn ifn
,
10745 tree type
, int n
, const tree
*args
)
10747 tree t
= build_call_1 (type
, NULL_TREE
, n
);
10748 for (int i
= 0; i
< n
; ++i
)
10749 CALL_EXPR_ARG (t
, i
) = args
[i
];
10750 SET_EXPR_LOCATION (t
, loc
);
10751 CALL_EXPR_IFN (t
) = ifn
;
10752 process_call_operands (t
);
10756 /* Build internal call expression. This is just like CALL_EXPR, except
10757 its CALL_EXPR_FN is NULL. It will get gimplified later into ordinary
10758 internal function. */
10761 build_call_expr_internal_loc (location_t loc
, enum internal_fn ifn
,
10762 tree type
, int n
, ...)
10765 tree
*argarray
= XALLOCAVEC (tree
, n
);
10769 for (i
= 0; i
< n
; i
++)
10770 argarray
[i
] = va_arg (ap
, tree
);
10772 return build_call_expr_internal_loc_array (loc
, ifn
, type
, n
, argarray
);
10775 /* Return a function call to FN, if the target is guaranteed to support it,
10778 N is the number of arguments, passed in the "...", and TYPE is the
10779 type of the return value. */
10782 maybe_build_call_expr_loc (location_t loc
, combined_fn fn
, tree type
,
10786 tree
*argarray
= XALLOCAVEC (tree
, n
);
10790 for (i
= 0; i
< n
; i
++)
10791 argarray
[i
] = va_arg (ap
, tree
);
10793 if (internal_fn_p (fn
))
10795 internal_fn ifn
= as_internal_fn (fn
);
10796 if (direct_internal_fn_p (ifn
))
10798 tree_pair types
= direct_internal_fn_types (ifn
, type
, argarray
);
10799 if (!direct_internal_fn_supported_p (ifn
, types
,
10800 OPTIMIZE_FOR_BOTH
))
10803 return build_call_expr_internal_loc_array (loc
, ifn
, type
, n
, argarray
);
10807 tree fndecl
= builtin_decl_implicit (as_builtin_fn (fn
));
10810 return build_call_expr_loc_array (loc
, fndecl
, n
, argarray
);
10814 /* Return a function call to the appropriate builtin alloca variant.
10816 SIZE is the size to be allocated. ALIGN, if non-zero, is the requested
10817 alignment of the allocated area. MAX_SIZE, if non-negative, is an upper
10818 bound for SIZE in case it is not a fixed value. */
10821 build_alloca_call_expr (tree size
, unsigned int align
, HOST_WIDE_INT max_size
)
10825 tree t
= builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
);
10827 build_call_expr (t
, 3, size
, size_int (align
), size_int (max_size
));
10829 else if (align
> 0)
10831 tree t
= builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
10832 return build_call_expr (t
, 2, size
, size_int (align
));
10836 tree t
= builtin_decl_explicit (BUILT_IN_ALLOCA
);
10837 return build_call_expr (t
, 1, size
);
10841 /* The built-in decl to use to mark code points believed to be unreachable.
10842 Typically __builtin_unreachable, but __builtin_trap if
10843 -fsanitize=unreachable -fsanitize-trap=unreachable. If only
10844 -fsanitize=unreachable, we rely on sanopt to replace calls with the
10845 appropriate ubsan function. When building a call directly, use
10846 {gimple_},build_builtin_unreachable instead. */
10849 builtin_decl_unreachable ()
10851 enum built_in_function fncode
= BUILT_IN_UNREACHABLE
;
10853 if (sanitize_flags_p (SANITIZE_UNREACHABLE
)
10854 ? (flag_sanitize_trap
& SANITIZE_UNREACHABLE
)
10855 : flag_unreachable_traps
)
10856 fncode
= BUILT_IN_TRAP
;
10857 /* For non-trapping sanitize, we will rewrite __builtin_unreachable () later,
10858 in the sanopt pass. */
10860 return builtin_decl_explicit (fncode
);
10863 /* Build a call to __builtin_unreachable, possibly rewritten by
10864 -fsanitize=unreachable. Use this rather than the above when practical. */
10867 build_builtin_unreachable (location_t loc
)
10869 tree data
= NULL_TREE
;
10870 tree fn
= sanitize_unreachable_fn (&data
, loc
);
10871 return build_call_expr_loc (loc
, fn
, data
!= NULL_TREE
, data
);
10874 /* Create a new constant string literal of type ELTYPE[SIZE] (or LEN
10875 if SIZE == -1) and return a tree node representing char* pointer to
10876 it as an ADDR_EXPR (ARRAY_REF (ELTYPE, ...)). When STR is nonnull
10877 the STRING_CST value is the LEN bytes at STR (the representation
10878 of the string, which may be wide). Otherwise it's all zeros. */
10881 build_string_literal (unsigned len
, const char *str
/* = NULL */,
10882 tree eltype
/* = char_type_node */,
10883 unsigned HOST_WIDE_INT size
/* = -1 */)
10885 tree t
= build_string (len
, str
);
10886 /* Set the maximum valid index based on the string length or SIZE. */
10887 unsigned HOST_WIDE_INT maxidx
10888 = (size
== HOST_WIDE_INT_M1U
? len
: size
) - 1;
10890 tree index
= build_index_type (size_int (maxidx
));
10891 eltype
= build_type_variant (eltype
, 1, 0);
10892 tree type
= build_array_type (eltype
, index
);
10893 TREE_TYPE (t
) = type
;
10894 TREE_CONSTANT (t
) = 1;
10895 TREE_READONLY (t
) = 1;
10896 TREE_STATIC (t
) = 1;
10898 type
= build_pointer_type (eltype
);
10899 t
= build1 (ADDR_EXPR
, type
,
10900 build4 (ARRAY_REF
, eltype
,
10901 t
, integer_zero_node
, NULL_TREE
, NULL_TREE
));
10907 /* Return true if T (assumed to be a DECL) must be assigned a memory
10911 needs_to_live_in_memory (const_tree t
)
10913 return (TREE_ADDRESSABLE (t
)
10914 || is_global_var (t
)
10915 || (TREE_CODE (t
) == RESULT_DECL
10916 && !DECL_BY_REFERENCE (t
)
10917 && aggregate_value_p (t
, current_function_decl
)));
10920 /* Return value of a constant X and sign-extend it. */
10923 int_cst_value (const_tree x
)
10925 unsigned bits
= TYPE_PRECISION (TREE_TYPE (x
));
10926 unsigned HOST_WIDE_INT val
= TREE_INT_CST_LOW (x
);
10928 /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
10929 gcc_assert (cst_and_fits_in_hwi (x
));
10931 if (bits
< HOST_BITS_PER_WIDE_INT
)
10933 bool negative
= ((val
>> (bits
- 1)) & 1) != 0;
10935 val
|= HOST_WIDE_INT_M1U
<< (bits
- 1) << 1;
10937 val
&= ~(HOST_WIDE_INT_M1U
<< (bits
- 1) << 1);
10943 /* If TYPE is an integral or pointer type, return an integer type with
10944 the same precision which is unsigned iff UNSIGNEDP is true, or itself
10945 if TYPE is already an integer type of signedness UNSIGNEDP.
10946 If TYPE is a floating-point type, return an integer type with the same
10947 bitsize and with the signedness given by UNSIGNEDP; this is useful
10948 when doing bit-level operations on a floating-point value. */
10951 signed_or_unsigned_type_for (int unsignedp
, tree type
)
10953 if (ANY_INTEGRAL_TYPE_P (type
) && TYPE_UNSIGNED (type
) == unsignedp
)
10956 if (TREE_CODE (type
) == VECTOR_TYPE
)
10958 tree inner
= TREE_TYPE (type
);
10959 tree inner2
= signed_or_unsigned_type_for (unsignedp
, inner
);
10962 if (inner
== inner2
)
10964 return build_vector_type (inner2
, TYPE_VECTOR_SUBPARTS (type
));
10967 if (TREE_CODE (type
) == COMPLEX_TYPE
)
10969 tree inner
= TREE_TYPE (type
);
10970 tree inner2
= signed_or_unsigned_type_for (unsignedp
, inner
);
10973 if (inner
== inner2
)
10975 return build_complex_type (inner2
);
10979 if (INTEGRAL_TYPE_P (type
)
10980 || POINTER_TYPE_P (type
)
10981 || TREE_CODE (type
) == OFFSET_TYPE
)
10982 bits
= TYPE_PRECISION (type
);
10983 else if (TREE_CODE (type
) == REAL_TYPE
)
10984 bits
= GET_MODE_BITSIZE (SCALAR_TYPE_MODE (type
));
10988 return build_nonstandard_integer_type (bits
, unsignedp
);
10991 /* If TYPE is an integral or pointer type, return an integer type with
10992 the same precision which is unsigned, or itself if TYPE is already an
10993 unsigned integer type. If TYPE is a floating-point type, return an
10994 unsigned integer type with the same bitsize as TYPE. */
10997 unsigned_type_for (tree type
)
10999 return signed_or_unsigned_type_for (1, type
);
11002 /* If TYPE is an integral or pointer type, return an integer type with
11003 the same precision which is signed, or itself if TYPE is already a
11004 signed integer type. If TYPE is a floating-point type, return a
11005 signed integer type with the same bitsize as TYPE. */
11008 signed_type_for (tree type
)
11010 return signed_or_unsigned_type_for (0, type
);
11013 /* - For VECTOR_TYPEs:
11014 - The truth type must be a VECTOR_BOOLEAN_TYPE.
11015 - The number of elements must match (known_eq).
11016 - targetm.vectorize.get_mask_mode exists, and exactly
11017 the same mode as the truth type.
11018 - Otherwise, the truth type must be a BOOLEAN_TYPE
11019 or useless_type_conversion_p to BOOLEAN_TYPE. */
11021 is_truth_type_for (tree type
, tree truth_type
)
11023 machine_mode mask_mode
= TYPE_MODE (truth_type
);
11024 machine_mode vmode
= TYPE_MODE (type
);
11025 machine_mode tmask_mode
;
11027 if (TREE_CODE (type
) == VECTOR_TYPE
)
11029 if (VECTOR_BOOLEAN_TYPE_P (truth_type
)
11030 && known_eq (TYPE_VECTOR_SUBPARTS (type
),
11031 TYPE_VECTOR_SUBPARTS (truth_type
))
11032 && targetm
.vectorize
.get_mask_mode (vmode
).exists (&tmask_mode
)
11033 && tmask_mode
== mask_mode
)
11039 return useless_type_conversion_p (boolean_type_node
, truth_type
);
11042 /* If TYPE is a vector type, return a signed integer vector type with the
11043 same width and number of subparts. Otherwise return boolean_type_node. */
11046 truth_type_for (tree type
)
11048 if (TREE_CODE (type
) == VECTOR_TYPE
)
11050 if (VECTOR_BOOLEAN_TYPE_P (type
))
11052 return build_truth_vector_type_for (type
);
11055 return boolean_type_node
;
11058 /* Returns the largest value obtainable by casting something in INNER type to
11062 upper_bound_in_type (tree outer
, tree inner
)
11064 unsigned int det
= 0;
11065 unsigned oprec
= TYPE_PRECISION (outer
);
11066 unsigned iprec
= TYPE_PRECISION (inner
);
11069 /* Compute a unique number for every combination. */
11070 det
|= (oprec
> iprec
) ? 4 : 0;
11071 det
|= TYPE_UNSIGNED (outer
) ? 2 : 0;
11072 det
|= TYPE_UNSIGNED (inner
) ? 1 : 0;
11074 /* Determine the exponent to use. */
11079 /* oprec <= iprec, outer: signed, inner: don't care. */
11084 /* oprec <= iprec, outer: unsigned, inner: don't care. */
11088 /* oprec > iprec, outer: signed, inner: signed. */
11092 /* oprec > iprec, outer: signed, inner: unsigned. */
11096 /* oprec > iprec, outer: unsigned, inner: signed. */
11100 /* oprec > iprec, outer: unsigned, inner: unsigned. */
11104 gcc_unreachable ();
11107 return wide_int_to_tree (outer
,
11108 wi::mask (prec
, false, TYPE_PRECISION (outer
)));
11111 /* Returns the smallest value obtainable by casting something in INNER type to
11115 lower_bound_in_type (tree outer
, tree inner
)
11117 unsigned oprec
= TYPE_PRECISION (outer
);
11118 unsigned iprec
= TYPE_PRECISION (inner
);
11120 /* If OUTER type is unsigned, we can definitely cast 0 to OUTER type
11122 if (TYPE_UNSIGNED (outer
)
11123 /* If we are widening something of an unsigned type, OUTER type
11124 contains all values of INNER type. In particular, both INNER
11125 and OUTER types have zero in common. */
11126 || (oprec
> iprec
&& TYPE_UNSIGNED (inner
)))
11127 return build_int_cst (outer
, 0);
11130 /* If we are widening a signed type to another signed type, we
11131 want to obtain -2^^(iprec-1). If we are keeping the
11132 precision or narrowing to a signed type, we want to obtain
11134 unsigned prec
= oprec
> iprec
? iprec
: oprec
;
11135 return wide_int_to_tree (outer
,
11136 wi::mask (prec
- 1, true,
11137 TYPE_PRECISION (outer
)));
11141 /* Return nonzero if two operands that are suitable for PHI nodes are
11142 necessarily equal. Specifically, both ARG0 and ARG1 must be either
11143 SSA_NAME or invariant. Note that this is strictly an optimization.
11144 That is, callers of this function can directly call operand_equal_p
11145 and get the same result, only slower. */
11148 operand_equal_for_phi_arg_p (const_tree arg0
, const_tree arg1
)
11152 if (TREE_CODE (arg0
) == SSA_NAME
|| TREE_CODE (arg1
) == SSA_NAME
)
11154 return operand_equal_p (arg0
, arg1
, 0);
11157 /* Returns number of zeros at the end of binary representation of X. */
11160 num_ending_zeros (const_tree x
)
11162 return build_int_cst (TREE_TYPE (x
), wi::ctz (wi::to_wide (x
)));
11166 #define WALK_SUBTREE(NODE) \
11169 result = walk_tree_1 (&(NODE), func, data, pset, lh); \
11175 /* This is a subroutine of walk_tree that walks field of TYPE that are to
11176 be walked whenever a type is seen in the tree. Rest of operands and return
11177 value are as for walk_tree. */
11180 walk_type_fields (tree type
, walk_tree_fn func
, void *data
,
11181 hash_set
<tree
> *pset
, walk_tree_lh lh
)
11183 tree result
= NULL_TREE
;
11185 switch (TREE_CODE (type
))
11188 case REFERENCE_TYPE
:
11190 /* We have to worry about mutually recursive pointers. These can't
11191 be written in C. They can in Ada. It's pathological, but
11192 there's an ACATS test (c38102a) that checks it. Deal with this
11193 by checking if we're pointing to another pointer, that one
11194 points to another pointer, that one does too, and we have no htab.
11195 If so, get a hash table. We check three levels deep to avoid
11196 the cost of the hash table if we don't need one. */
11197 if (POINTER_TYPE_P (TREE_TYPE (type
))
11198 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type
)))
11199 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type
))))
11202 result
= walk_tree_without_duplicates (&TREE_TYPE (type
),
11213 WALK_SUBTREE (TREE_TYPE (type
));
11217 WALK_SUBTREE (TYPE_METHOD_BASETYPE (type
));
11219 /* Fall through. */
11221 case FUNCTION_TYPE
:
11222 WALK_SUBTREE (TREE_TYPE (type
));
11226 /* We never want to walk into default arguments. */
11227 for (arg
= TYPE_ARG_TYPES (type
); arg
; arg
= TREE_CHAIN (arg
))
11228 WALK_SUBTREE (TREE_VALUE (arg
));
11233 /* Don't follow this nodes's type if a pointer for fear that
11234 we'll have infinite recursion. If we have a PSET, then we
11237 || (!POINTER_TYPE_P (TREE_TYPE (type
))
11238 && TREE_CODE (TREE_TYPE (type
)) != OFFSET_TYPE
))
11239 WALK_SUBTREE (TREE_TYPE (type
));
11240 WALK_SUBTREE (TYPE_DOMAIN (type
));
11244 WALK_SUBTREE (TREE_TYPE (type
));
11245 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type
));
11255 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is
11256 called with the DATA and the address of each sub-tree. If FUNC returns a
11257 non-NULL value, the traversal is stopped, and the value returned by FUNC
11258 is returned. If PSET is non-NULL it is used to record the nodes visited,
11259 and to avoid visiting a node more than once. */
11262 walk_tree_1 (tree
*tp
, walk_tree_fn func
, void *data
,
11263 hash_set
<tree
> *pset
, walk_tree_lh lh
)
11265 enum tree_code code
;
11269 #define WALK_SUBTREE_TAIL(NODE) \
11273 goto tail_recurse; \
11278 /* Skip empty subtrees. */
11282 /* Don't walk the same tree twice, if the user has requested
11283 that we avoid doing so. */
11284 if (pset
&& pset
->add (*tp
))
11287 /* Call the function. */
11289 result
= (*func
) (tp
, &walk_subtrees
, data
);
11291 /* If we found something, return it. */
11295 code
= TREE_CODE (*tp
);
11297 /* Even if we didn't, FUNC may have decided that there was nothing
11298 interesting below this point in the tree. */
11299 if (!walk_subtrees
)
11301 /* But we still need to check our siblings. */
11302 if (code
== TREE_LIST
)
11303 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp
));
11304 else if (code
== OMP_CLAUSE
)
11305 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp
));
11312 result
= (*lh
) (tp
, &walk_subtrees
, func
, data
, pset
);
11313 if (result
|| !walk_subtrees
)
11320 case IDENTIFIER_NODE
:
11326 case PLACEHOLDER_EXPR
:
11330 /* None of these have subtrees other than those already walked
11335 WALK_SUBTREE (TREE_VALUE (*tp
));
11336 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp
));
11340 int len
= TREE_VEC_LENGTH (*tp
);
11345 /* Walk all elements but the last. */
11346 for (int i
= 0; i
< len
- 1; ++i
)
11347 WALK_SUBTREE (TREE_VEC_ELT (*tp
, i
));
11349 /* Now walk the last one as a tail call. */
11350 WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp
, len
- 1));
11355 unsigned len
= vector_cst_encoded_nelts (*tp
);
11358 /* Walk all elements but the last. */
11359 for (unsigned i
= 0; i
< len
- 1; ++i
)
11360 WALK_SUBTREE (VECTOR_CST_ENCODED_ELT (*tp
, i
));
11361 /* Now walk the last one as a tail call. */
11362 WALK_SUBTREE_TAIL (VECTOR_CST_ENCODED_ELT (*tp
, len
- 1));
11366 WALK_SUBTREE (TREE_REALPART (*tp
));
11367 WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp
));
11371 unsigned HOST_WIDE_INT idx
;
11372 constructor_elt
*ce
;
11374 for (idx
= 0; vec_safe_iterate (CONSTRUCTOR_ELTS (*tp
), idx
, &ce
);
11376 WALK_SUBTREE (ce
->value
);
11381 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp
, 0));
11386 for (decl
= BIND_EXPR_VARS (*tp
); decl
; decl
= DECL_CHAIN (decl
))
11388 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
11389 into declarations that are just mentioned, rather than
11390 declared; they don't really belong to this part of the tree.
11391 And, we can see cycles: the initializer for a declaration
11392 can refer to the declaration itself. */
11393 WALK_SUBTREE (DECL_INITIAL (decl
));
11394 WALK_SUBTREE (DECL_SIZE (decl
));
11395 WALK_SUBTREE (DECL_SIZE_UNIT (decl
));
11397 WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp
));
11400 case STATEMENT_LIST
:
11402 tree_stmt_iterator i
;
11403 for (i
= tsi_start (*tp
); !tsi_end_p (i
); tsi_next (&i
))
11404 WALK_SUBTREE (*tsi_stmt_ptr (i
));
11410 int len
= omp_clause_num_ops
[OMP_CLAUSE_CODE (*tp
)];
11411 for (int i
= 0; i
< len
; i
++)
11412 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp
, i
));
11413 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp
));
11420 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
11421 But, we only want to walk once. */
11422 len
= (TREE_OPERAND (*tp
, 3) == TREE_OPERAND (*tp
, 1)) ? 2 : 3;
11423 for (i
= 0; i
< len
; ++i
)
11424 WALK_SUBTREE (TREE_OPERAND (*tp
, i
));
11425 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp
, len
));
11429 /* If this is a TYPE_DECL, walk into the fields of the type that it's
11430 defining. We only want to walk into these fields of a type in this
11431 case and not in the general case of a mere reference to the type.
11433 The criterion is as follows: if the field can be an expression, it
11434 must be walked only here. This should be in keeping with the fields
11435 that are directly gimplified in gimplify_type_sizes in order for the
11436 mark/copy-if-shared/unmark machinery of the gimplifier to work with
11437 variable-sized types.
11439 Note that DECLs get walked as part of processing the BIND_EXPR. */
11440 if (TREE_CODE (DECL_EXPR_DECL (*tp
)) == TYPE_DECL
)
11442 /* Call the function for the decl so e.g. copy_tree_body_r can
11443 replace it with the remapped one. */
11444 result
= (*func
) (&DECL_EXPR_DECL (*tp
), &walk_subtrees
, data
);
11445 if (result
|| !walk_subtrees
)
11448 tree
*type_p
= &TREE_TYPE (DECL_EXPR_DECL (*tp
));
11449 if (TREE_CODE (*type_p
) == ERROR_MARK
)
11452 /* Call the function for the type. See if it returns anything or
11453 doesn't want us to continue. If we are to continue, walk both
11454 the normal fields and those for the declaration case. */
11455 result
= (*func
) (type_p
, &walk_subtrees
, data
);
11456 if (result
|| !walk_subtrees
)
11459 /* But do not walk a pointed-to type since it may itself need to
11460 be walked in the declaration case if it isn't anonymous. */
11461 if (!POINTER_TYPE_P (*type_p
))
11463 result
= walk_type_fields (*type_p
, func
, data
, pset
, lh
);
11468 /* If this is a record type, also walk the fields. */
11469 if (RECORD_OR_UNION_TYPE_P (*type_p
))
11473 for (field
= TYPE_FIELDS (*type_p
); field
;
11474 field
= DECL_CHAIN (field
))
11476 /* We'd like to look at the type of the field, but we can
11477 easily get infinite recursion. So assume it's pointed
11478 to elsewhere in the tree. Also, ignore things that
11480 if (TREE_CODE (field
) != FIELD_DECL
)
11483 WALK_SUBTREE (DECL_FIELD_OFFSET (field
));
11484 WALK_SUBTREE (DECL_SIZE (field
));
11485 WALK_SUBTREE (DECL_SIZE_UNIT (field
));
11486 if (TREE_CODE (*type_p
) == QUAL_UNION_TYPE
)
11487 WALK_SUBTREE (DECL_QUALIFIER (field
));
11491 /* Same for scalar types. */
11492 else if (TREE_CODE (*type_p
) == BOOLEAN_TYPE
11493 || TREE_CODE (*type_p
) == ENUMERAL_TYPE
11494 || TREE_CODE (*type_p
) == INTEGER_TYPE
11495 || TREE_CODE (*type_p
) == FIXED_POINT_TYPE
11496 || TREE_CODE (*type_p
) == REAL_TYPE
)
11498 WALK_SUBTREE (TYPE_MIN_VALUE (*type_p
));
11499 WALK_SUBTREE (TYPE_MAX_VALUE (*type_p
));
11502 WALK_SUBTREE (TYPE_SIZE (*type_p
));
11503 WALK_SUBTREE_TAIL (TYPE_SIZE_UNIT (*type_p
));
11508 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
)))
11512 /* Walk over all the sub-trees of this operand. */
11513 len
= TREE_OPERAND_LENGTH (*tp
);
11515 /* Go through the subtrees. We need to do this in forward order so
11516 that the scope of a FOR_EXPR is handled properly. */
11519 for (i
= 0; i
< len
- 1; ++i
)
11520 WALK_SUBTREE (TREE_OPERAND (*tp
, i
));
11521 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp
, len
- 1));
11524 /* If this is a type, walk the needed fields in the type. */
11525 else if (TYPE_P (*tp
))
11526 return walk_type_fields (*tp
, func
, data
, pset
, lh
);
11530 /* We didn't find what we were looking for. */
11533 #undef WALK_SUBTREE_TAIL
11535 #undef WALK_SUBTREE
11537 /* Like walk_tree, but does not walk duplicate nodes more than once. */
11540 walk_tree_without_duplicates_1 (tree
*tp
, walk_tree_fn func
, void *data
,
11545 hash_set
<tree
> pset
;
11546 result
= walk_tree_1 (tp
, func
, data
, &pset
, lh
);
11552 tree_block (tree t
)
11554 const enum tree_code_class c
= TREE_CODE_CLASS (TREE_CODE (t
));
11556 if (IS_EXPR_CODE_CLASS (c
))
11557 return LOCATION_BLOCK (t
->exp
.locus
);
11558 gcc_unreachable ();
11563 tree_set_block (tree t
, tree b
)
11565 const enum tree_code_class c
= TREE_CODE_CLASS (TREE_CODE (t
));
11567 if (IS_EXPR_CODE_CLASS (c
))
11569 t
->exp
.locus
= set_block (t
->exp
.locus
, b
);
11572 gcc_unreachable ();
11575 /* Create a nameless artificial label and put it in the current
11576 function context. The label has a location of LOC. Returns the
11577 newly created label. */
11580 create_artificial_label (location_t loc
)
11582 tree lab
= build_decl (loc
,
11583 LABEL_DECL
, NULL_TREE
, void_type_node
);
11585 DECL_ARTIFICIAL (lab
) = 1;
11586 DECL_IGNORED_P (lab
) = 1;
11587 DECL_CONTEXT (lab
) = current_function_decl
;
11591 /* Given a tree, try to return a useful variable name that we can use
11592 to prefix a temporary that is being assigned the value of the tree.
11593 I.E. given <temp> = &A, return A. */
11598 tree stripped_decl
;
11601 STRIP_NOPS (stripped_decl
);
11602 if (DECL_P (stripped_decl
) && DECL_NAME (stripped_decl
))
11603 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl
));
11604 else if (TREE_CODE (stripped_decl
) == SSA_NAME
)
11606 tree name
= SSA_NAME_IDENTIFIER (stripped_decl
);
11609 return IDENTIFIER_POINTER (name
);
11613 switch (TREE_CODE (stripped_decl
))
11616 return get_name (TREE_OPERAND (stripped_decl
, 0));
11623 /* Return true if TYPE has a variable argument list. */
11626 stdarg_p (const_tree fntype
)
11628 function_args_iterator args_iter
;
11629 tree n
= NULL_TREE
, t
;
11634 if (TYPE_NO_NAMED_ARGS_STDARG_P (fntype
))
11637 FOREACH_FUNCTION_ARGS (fntype
, t
, args_iter
)
11642 return n
!= NULL_TREE
&& n
!= void_type_node
;
11645 /* Return true if TYPE has a prototype. */
11648 prototype_p (const_tree fntype
)
11652 gcc_assert (fntype
!= NULL_TREE
);
11654 if (TYPE_NO_NAMED_ARGS_STDARG_P (fntype
))
11657 t
= TYPE_ARG_TYPES (fntype
);
11658 return (t
!= NULL_TREE
);
11661 /* If BLOCK is inlined from an __attribute__((__artificial__))
11662 routine, return pointer to location from where it has been
11665 block_nonartificial_location (tree block
)
11667 location_t
*ret
= NULL
;
11669 while (block
&& TREE_CODE (block
) == BLOCK
11670 && BLOCK_ABSTRACT_ORIGIN (block
))
11672 tree ao
= BLOCK_ABSTRACT_ORIGIN (block
);
11673 if (TREE_CODE (ao
) == FUNCTION_DECL
)
11675 /* If AO is an artificial inline, point RET to the
11676 call site locus at which it has been inlined and continue
11677 the loop, in case AO's caller is also an artificial
11679 if (DECL_DECLARED_INLINE_P (ao
)
11680 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao
)))
11681 ret
= &BLOCK_SOURCE_LOCATION (block
);
11685 else if (TREE_CODE (ao
) != BLOCK
)
11688 block
= BLOCK_SUPERCONTEXT (block
);
11694 /* If EXP is inlined from an __attribute__((__artificial__))
11695 function, return the location of the original call expression. */
11698 tree_nonartificial_location (tree exp
)
11700 location_t
*loc
= block_nonartificial_location (TREE_BLOCK (exp
));
11705 return EXPR_LOCATION (exp
);
11708 /* Return the location into which EXP has been inlined. Analogous
11709 to tree_nonartificial_location() above but not limited to artificial
11710 functions declared inline. If SYSTEM_HEADER is true, return
11711 the macro expansion point of the location if it's in a system header */
11714 tree_inlined_location (tree exp
, bool system_header
/* = true */)
11716 location_t loc
= UNKNOWN_LOCATION
;
11718 tree block
= TREE_BLOCK (exp
);
11720 while (block
&& TREE_CODE (block
) == BLOCK
11721 && BLOCK_ABSTRACT_ORIGIN (block
))
11723 tree ao
= BLOCK_ABSTRACT_ORIGIN (block
);
11724 if (TREE_CODE (ao
) == FUNCTION_DECL
)
11725 loc
= BLOCK_SOURCE_LOCATION (block
);
11726 else if (TREE_CODE (ao
) != BLOCK
)
11729 block
= BLOCK_SUPERCONTEXT (block
);
11732 if (loc
== UNKNOWN_LOCATION
)
11734 loc
= EXPR_LOCATION (exp
);
11736 /* Only consider macro expansion when the block traversal failed
11737 to find a location. Otherwise it's not relevant. */
11738 return expansion_point_location_if_in_system_header (loc
);
11744 /* These are the hash table functions for the hash table of OPTIMIZATION_NODE
11747 /* Return the hash code X, an OPTIMIZATION_NODE or TARGET_OPTION code. */
11750 cl_option_hasher::hash (tree x
)
11752 const_tree
const t
= x
;
11754 if (TREE_CODE (t
) == OPTIMIZATION_NODE
)
11755 return cl_optimization_hash (TREE_OPTIMIZATION (t
));
11756 else if (TREE_CODE (t
) == TARGET_OPTION_NODE
)
11757 return cl_target_option_hash (TREE_TARGET_OPTION (t
));
11759 gcc_unreachable ();
11762 /* Return nonzero if the value represented by *X (an OPTIMIZATION or
11763 TARGET_OPTION tree node) is the same as that given by *Y, which is the
11767 cl_option_hasher::equal (tree x
, tree y
)
11769 const_tree
const xt
= x
;
11770 const_tree
const yt
= y
;
11772 if (TREE_CODE (xt
) != TREE_CODE (yt
))
11775 if (TREE_CODE (xt
) == OPTIMIZATION_NODE
)
11776 return cl_optimization_option_eq (TREE_OPTIMIZATION (xt
),
11777 TREE_OPTIMIZATION (yt
));
11778 else if (TREE_CODE (xt
) == TARGET_OPTION_NODE
)
11779 return cl_target_option_eq (TREE_TARGET_OPTION (xt
),
11780 TREE_TARGET_OPTION (yt
));
11782 gcc_unreachable ();
11785 /* Build an OPTIMIZATION_NODE based on the options in OPTS and OPTS_SET. */
11788 build_optimization_node (struct gcc_options
*opts
,
11789 struct gcc_options
*opts_set
)
11793 /* Use the cache of optimization nodes. */
11795 cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node
),
11798 tree
*slot
= cl_option_hash_table
->find_slot (cl_optimization_node
, INSERT
);
11802 /* Insert this one into the hash table. */
11803 t
= cl_optimization_node
;
11806 /* Make a new node for next time round. */
11807 cl_optimization_node
= make_node (OPTIMIZATION_NODE
);
11813 /* Build a TARGET_OPTION_NODE based on the options in OPTS and OPTS_SET. */
11816 build_target_option_node (struct gcc_options
*opts
,
11817 struct gcc_options
*opts_set
)
11821 /* Use the cache of optimization nodes. */
11823 cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node
),
11826 tree
*slot
= cl_option_hash_table
->find_slot (cl_target_option_node
, INSERT
);
11830 /* Insert this one into the hash table. */
11831 t
= cl_target_option_node
;
11834 /* Make a new node for next time round. */
11835 cl_target_option_node
= make_node (TARGET_OPTION_NODE
);
11841 /* Clear TREE_TARGET_GLOBALS of all TARGET_OPTION_NODE trees,
11842 so that they aren't saved during PCH writing. */
11845 prepare_target_option_nodes_for_pch (void)
11847 hash_table
<cl_option_hasher
>::iterator iter
= cl_option_hash_table
->begin ();
11848 for (; iter
!= cl_option_hash_table
->end (); ++iter
)
11849 if (TREE_CODE (*iter
) == TARGET_OPTION_NODE
)
11850 TREE_TARGET_GLOBALS (*iter
) = NULL
;
11853 /* Determine the "ultimate origin" of a block. */
11856 block_ultimate_origin (const_tree block
)
11858 tree origin
= BLOCK_ABSTRACT_ORIGIN (block
);
11860 if (origin
== NULL_TREE
)
11864 gcc_checking_assert ((DECL_P (origin
)
11865 && DECL_ORIGIN (origin
) == origin
)
11866 || BLOCK_ORIGIN (origin
) == origin
);
11871 /* Return true iff conversion from INNER_TYPE to OUTER_TYPE generates
11875 tree_nop_conversion_p (const_tree outer_type
, const_tree inner_type
)
11877 /* Do not strip casts into or out of differing address spaces. */
11878 if (POINTER_TYPE_P (outer_type
)
11879 && TYPE_ADDR_SPACE (TREE_TYPE (outer_type
)) != ADDR_SPACE_GENERIC
)
11881 if (!POINTER_TYPE_P (inner_type
)
11882 || (TYPE_ADDR_SPACE (TREE_TYPE (outer_type
))
11883 != TYPE_ADDR_SPACE (TREE_TYPE (inner_type
))))
11886 else if (POINTER_TYPE_P (inner_type
)
11887 && TYPE_ADDR_SPACE (TREE_TYPE (inner_type
)) != ADDR_SPACE_GENERIC
)
11889 /* We already know that outer_type is not a pointer with
11890 a non-generic address space. */
11894 /* Use precision rather then machine mode when we can, which gives
11895 the correct answer even for submode (bit-field) types. */
11896 if ((INTEGRAL_TYPE_P (outer_type
)
11897 || POINTER_TYPE_P (outer_type
)
11898 || TREE_CODE (outer_type
) == OFFSET_TYPE
)
11899 && (INTEGRAL_TYPE_P (inner_type
)
11900 || POINTER_TYPE_P (inner_type
)
11901 || TREE_CODE (inner_type
) == OFFSET_TYPE
))
11902 return TYPE_PRECISION (outer_type
) == TYPE_PRECISION (inner_type
);
11904 /* Otherwise fall back on comparing machine modes (e.g. for
11905 aggregate types, floats). */
11906 return TYPE_MODE (outer_type
) == TYPE_MODE (inner_type
);
11909 /* Return true iff conversion in EXP generates no instruction. Mark
11910 it inline so that we fully inline into the stripping functions even
11911 though we have two uses of this function. */
11914 tree_nop_conversion (const_tree exp
)
11916 tree outer_type
, inner_type
;
11918 if (location_wrapper_p (exp
))
11920 if (!CONVERT_EXPR_P (exp
)
11921 && TREE_CODE (exp
) != NON_LVALUE_EXPR
)
11924 outer_type
= TREE_TYPE (exp
);
11925 inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
11926 if (!inner_type
|| inner_type
== error_mark_node
)
11929 return tree_nop_conversion_p (outer_type
, inner_type
);
11932 /* Return true iff conversion in EXP generates no instruction. Don't
11933 consider conversions changing the signedness. */
11936 tree_sign_nop_conversion (const_tree exp
)
11938 tree outer_type
, inner_type
;
11940 if (!tree_nop_conversion (exp
))
11943 outer_type
= TREE_TYPE (exp
);
11944 inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
11946 return (TYPE_UNSIGNED (outer_type
) == TYPE_UNSIGNED (inner_type
)
11947 && POINTER_TYPE_P (outer_type
) == POINTER_TYPE_P (inner_type
));
11950 /* Strip conversions from EXP according to tree_nop_conversion and
11951 return the resulting expression. */
11954 tree_strip_nop_conversions (tree exp
)
11956 while (tree_nop_conversion (exp
))
11957 exp
= TREE_OPERAND (exp
, 0);
11961 /* Strip conversions from EXP according to tree_sign_nop_conversion
11962 and return the resulting expression. */
11965 tree_strip_sign_nop_conversions (tree exp
)
11967 while (tree_sign_nop_conversion (exp
))
11968 exp
= TREE_OPERAND (exp
, 0);
11972 /* Avoid any floating point extensions from EXP. */
11974 strip_float_extensions (tree exp
)
11976 tree sub
, expt
, subt
;
11978 /* For floating point constant look up the narrowest type that can hold
11979 it properly and handle it like (type)(narrowest_type)constant.
11980 This way we can optimize for instance a=a*2.0 where "a" is float
11981 but 2.0 is double constant. */
11982 if (TREE_CODE (exp
) == REAL_CST
&& !DECIMAL_FLOAT_TYPE_P (TREE_TYPE (exp
)))
11984 REAL_VALUE_TYPE orig
;
11987 orig
= TREE_REAL_CST (exp
);
11988 if (TYPE_PRECISION (TREE_TYPE (exp
)) > TYPE_PRECISION (float_type_node
)
11989 && exact_real_truncate (TYPE_MODE (float_type_node
), &orig
))
11990 type
= float_type_node
;
11991 else if (TYPE_PRECISION (TREE_TYPE (exp
))
11992 > TYPE_PRECISION (double_type_node
)
11993 && exact_real_truncate (TYPE_MODE (double_type_node
), &orig
))
11994 type
= double_type_node
;
11996 return build_real_truncate (type
, orig
);
11999 if (!CONVERT_EXPR_P (exp
))
12002 sub
= TREE_OPERAND (exp
, 0);
12003 subt
= TREE_TYPE (sub
);
12004 expt
= TREE_TYPE (exp
);
12006 if (!FLOAT_TYPE_P (subt
))
12009 if (DECIMAL_FLOAT_TYPE_P (expt
) != DECIMAL_FLOAT_TYPE_P (subt
))
12012 if (TYPE_PRECISION (subt
) > TYPE_PRECISION (expt
))
12015 return strip_float_extensions (sub
);
12018 /* Strip out all handled components that produce invariant
12022 strip_invariant_refs (const_tree op
)
12024 while (handled_component_p (op
))
12026 switch (TREE_CODE (op
))
12029 case ARRAY_RANGE_REF
:
12030 if (!is_gimple_constant (TREE_OPERAND (op
, 1))
12031 || TREE_OPERAND (op
, 2) != NULL_TREE
12032 || TREE_OPERAND (op
, 3) != NULL_TREE
)
12036 case COMPONENT_REF
:
12037 if (TREE_OPERAND (op
, 2) != NULL_TREE
)
12043 op
= TREE_OPERAND (op
, 0);
12049 /* Strip handled components with zero offset from OP. */
12052 strip_zero_offset_components (tree op
)
12054 while (TREE_CODE (op
) == COMPONENT_REF
12055 && integer_zerop (DECL_FIELD_OFFSET (TREE_OPERAND (op
, 1)))
12056 && integer_zerop (DECL_FIELD_BIT_OFFSET (TREE_OPERAND (op
, 1))))
12057 op
= TREE_OPERAND (op
, 0);
12061 static GTY(()) tree gcc_eh_personality_decl
;
12063 /* Return the GCC personality function decl. */
12066 lhd_gcc_personality (void)
12068 if (!gcc_eh_personality_decl
)
12069 gcc_eh_personality_decl
= build_personality_function ("gcc");
12070 return gcc_eh_personality_decl
;
12073 /* TARGET is a call target of GIMPLE call statement
12074 (obtained by gimple_call_fn). Return true if it is
12075 OBJ_TYPE_REF representing an virtual call of C++ method.
12076 (As opposed to OBJ_TYPE_REF representing objc calls
12077 through a cast where middle-end devirtualization machinery
12078 can't apply.) FOR_DUMP_P is true when being called from
12079 the dump routines. */
12082 virtual_method_call_p (const_tree target
, bool for_dump_p
)
12084 if (TREE_CODE (target
) != OBJ_TYPE_REF
)
12086 tree t
= TREE_TYPE (target
);
12087 gcc_checking_assert (TREE_CODE (t
) == POINTER_TYPE
);
12089 if (TREE_CODE (t
) == FUNCTION_TYPE
)
12091 gcc_checking_assert (TREE_CODE (t
) == METHOD_TYPE
);
12092 /* If we do not have BINFO associated, it means that type was built
12093 without devirtualization enabled. Do not consider this a virtual
12095 if (!TYPE_BINFO (obj_type_ref_class (target
, for_dump_p
)))
12100 /* Lookup sub-BINFO of BINFO of TYPE at offset POS. */
12103 lookup_binfo_at_offset (tree binfo
, tree type
, HOST_WIDE_INT pos
)
12106 tree base_binfo
, b
;
12108 for (i
= 0; BINFO_BASE_ITERATE (binfo
, i
, base_binfo
); i
++)
12109 if (pos
== tree_to_shwi (BINFO_OFFSET (base_binfo
))
12110 && types_same_for_odr (TREE_TYPE (base_binfo
), type
))
12112 else if ((b
= lookup_binfo_at_offset (base_binfo
, type
, pos
)) != NULL
)
12117 /* Try to find a base info of BINFO that would have its field decl at offset
12118 OFFSET within the BINFO type and which is of EXPECTED_TYPE. If it can be
12119 found, return, otherwise return NULL_TREE. */
12122 get_binfo_at_offset (tree binfo
, poly_int64 offset
, tree expected_type
)
12124 tree type
= BINFO_TYPE (binfo
);
12128 HOST_WIDE_INT pos
, size
;
12132 if (types_same_for_odr (type
, expected_type
))
12134 if (maybe_lt (offset
, 0))
12137 for (fld
= TYPE_FIELDS (type
); fld
; fld
= DECL_CHAIN (fld
))
12139 if (TREE_CODE (fld
) != FIELD_DECL
|| !DECL_ARTIFICIAL (fld
))
12142 pos
= int_bit_position (fld
);
12143 size
= tree_to_uhwi (DECL_SIZE (fld
));
12144 if (known_in_range_p (offset
, pos
, size
))
12147 if (!fld
|| TREE_CODE (TREE_TYPE (fld
)) != RECORD_TYPE
)
12150 /* Offset 0 indicates the primary base, whose vtable contents are
12151 represented in the binfo for the derived class. */
12152 else if (maybe_ne (offset
, 0))
12154 tree found_binfo
= NULL
, base_binfo
;
12155 /* Offsets in BINFO are in bytes relative to the whole structure
12156 while POS is in bits relative to the containing field. */
12157 int binfo_offset
= (tree_to_shwi (BINFO_OFFSET (binfo
)) + pos
12160 for (i
= 0; BINFO_BASE_ITERATE (binfo
, i
, base_binfo
); i
++)
12161 if (tree_to_shwi (BINFO_OFFSET (base_binfo
)) == binfo_offset
12162 && types_same_for_odr (TREE_TYPE (base_binfo
), TREE_TYPE (fld
)))
12164 found_binfo
= base_binfo
;
12168 binfo
= found_binfo
;
12170 binfo
= lookup_binfo_at_offset (binfo
, TREE_TYPE (fld
),
12174 type
= TREE_TYPE (fld
);
12179 /* Returns true if X is a typedef decl. */
12182 is_typedef_decl (const_tree x
)
12184 return (x
&& TREE_CODE (x
) == TYPE_DECL
12185 && DECL_ORIGINAL_TYPE (x
) != NULL_TREE
);
12188 /* Returns true iff TYPE is a type variant created for a typedef. */
12191 typedef_variant_p (const_tree type
)
12193 return is_typedef_decl (TYPE_NAME (type
));
12196 /* PR 84195: Replace control characters in "unescaped" with their
12197 escaped equivalents. Allow newlines if -fmessage-length has
12198 been set to a non-zero value. This is done here, rather than
12199 where the attribute is recorded as the message length can
12200 change between these two locations. */
12203 escaped_string::escape (const char *unescaped
)
12206 size_t i
, new_i
, len
;
12211 m_str
= const_cast<char *> (unescaped
);
12214 if (unescaped
== NULL
|| *unescaped
== 0)
12217 len
= strlen (unescaped
);
12221 for (i
= 0; i
< len
; i
++)
12223 char c
= unescaped
[i
];
12228 escaped
[new_i
++] = c
;
12232 if (c
!= '\n' || !pp_is_wrapping_line (global_dc
->printer
))
12234 if (escaped
== NULL
)
12236 /* We only allocate space for a new string if we
12237 actually encounter a control character that
12238 needs replacing. */
12239 escaped
= (char *) xmalloc (len
* 2 + 1);
12240 strncpy (escaped
, unescaped
, i
);
12244 escaped
[new_i
++] = '\\';
12248 case '\a': escaped
[new_i
++] = 'a'; break;
12249 case '\b': escaped
[new_i
++] = 'b'; break;
12250 case '\f': escaped
[new_i
++] = 'f'; break;
12251 case '\n': escaped
[new_i
++] = 'n'; break;
12252 case '\r': escaped
[new_i
++] = 'r'; break;
12253 case '\t': escaped
[new_i
++] = 't'; break;
12254 case '\v': escaped
[new_i
++] = 'v'; break;
12255 default: escaped
[new_i
++] = '?'; break;
12259 escaped
[new_i
++] = c
;
12264 escaped
[new_i
] = 0;
12270 /* Warn about a use of an identifier which was marked deprecated. Returns
12271 whether a warning was given. */
12274 warn_deprecated_use (tree node
, tree attr
)
12276 escaped_string msg
;
12278 if (node
== 0 || !warn_deprecated_decl
)
12284 attr
= DECL_ATTRIBUTES (node
);
12285 else if (TYPE_P (node
))
12287 tree decl
= TYPE_STUB_DECL (node
);
12289 attr
= TYPE_ATTRIBUTES (TREE_TYPE (decl
));
12290 else if ((decl
= TYPE_STUB_DECL (TYPE_MAIN_VARIANT (node
)))
12293 node
= TREE_TYPE (decl
);
12294 attr
= TYPE_ATTRIBUTES (node
);
12300 attr
= lookup_attribute ("deprecated", attr
);
12303 msg
.escape (TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr
))));
12308 auto_diagnostic_group d
;
12310 w
= warning (OPT_Wdeprecated_declarations
,
12311 "%qD is deprecated: %s", node
, (const char *) msg
);
12313 w
= warning (OPT_Wdeprecated_declarations
,
12314 "%qD is deprecated", node
);
12316 inform (DECL_SOURCE_LOCATION (node
), "declared here");
12318 else if (TYPE_P (node
))
12320 tree what
= NULL_TREE
;
12321 tree decl
= TYPE_STUB_DECL (node
);
12323 if (TYPE_NAME (node
))
12325 if (TREE_CODE (TYPE_NAME (node
)) == IDENTIFIER_NODE
)
12326 what
= TYPE_NAME (node
);
12327 else if (TREE_CODE (TYPE_NAME (node
)) == TYPE_DECL
12328 && DECL_NAME (TYPE_NAME (node
)))
12329 what
= DECL_NAME (TYPE_NAME (node
));
12332 auto_diagnostic_group d
;
12336 w
= warning (OPT_Wdeprecated_declarations
,
12337 "%qE is deprecated: %s", what
, (const char *) msg
);
12339 w
= warning (OPT_Wdeprecated_declarations
,
12340 "%qE is deprecated", what
);
12345 w
= warning (OPT_Wdeprecated_declarations
,
12346 "type is deprecated: %s", (const char *) msg
);
12348 w
= warning (OPT_Wdeprecated_declarations
,
12349 "type is deprecated");
12353 inform (DECL_SOURCE_LOCATION (decl
), "declared here");
12359 /* Error out with an identifier which was marked 'unavailable'. */
12361 error_unavailable_use (tree node
, tree attr
)
12363 escaped_string msg
;
12371 attr
= DECL_ATTRIBUTES (node
);
12372 else if (TYPE_P (node
))
12374 tree decl
= TYPE_STUB_DECL (node
);
12376 attr
= lookup_attribute ("unavailable",
12377 TYPE_ATTRIBUTES (TREE_TYPE (decl
)));
12382 attr
= lookup_attribute ("unavailable", attr
);
12385 msg
.escape (TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr
))));
12389 auto_diagnostic_group d
;
12391 error ("%qD is unavailable: %s", node
, (const char *) msg
);
12393 error ("%qD is unavailable", node
);
12394 inform (DECL_SOURCE_LOCATION (node
), "declared here");
12396 else if (TYPE_P (node
))
12398 tree what
= NULL_TREE
;
12399 tree decl
= TYPE_STUB_DECL (node
);
12401 if (TYPE_NAME (node
))
12403 if (TREE_CODE (TYPE_NAME (node
)) == IDENTIFIER_NODE
)
12404 what
= TYPE_NAME (node
);
12405 else if (TREE_CODE (TYPE_NAME (node
)) == TYPE_DECL
12406 && DECL_NAME (TYPE_NAME (node
)))
12407 what
= DECL_NAME (TYPE_NAME (node
));
12410 auto_diagnostic_group d
;
12414 error ("%qE is unavailable: %s", what
, (const char *) msg
);
12416 error ("%qE is unavailable", what
);
12421 error ("type is unavailable: %s", (const char *) msg
);
12423 error ("type is unavailable");
12427 inform (DECL_SOURCE_LOCATION (decl
), "declared here");
12431 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
12432 somewhere in it. */
12435 contains_bitfld_component_ref_p (const_tree ref
)
12437 while (handled_component_p (ref
))
12439 if (TREE_CODE (ref
) == COMPONENT_REF
12440 && DECL_BIT_FIELD (TREE_OPERAND (ref
, 1)))
12442 ref
= TREE_OPERAND (ref
, 0);
12448 /* Try to determine whether a TRY_CATCH expression can fall through.
12449 This is a subroutine of block_may_fallthru. */
12452 try_catch_may_fallthru (const_tree stmt
)
12454 tree_stmt_iterator i
;
12456 /* If the TRY block can fall through, the whole TRY_CATCH can
12458 if (block_may_fallthru (TREE_OPERAND (stmt
, 0)))
12461 i
= tsi_start (TREE_OPERAND (stmt
, 1));
12462 switch (TREE_CODE (tsi_stmt (i
)))
12465 /* We expect to see a sequence of CATCH_EXPR trees, each with a
12466 catch expression and a body. The whole TRY_CATCH may fall
12467 through iff any of the catch bodies falls through. */
12468 for (; !tsi_end_p (i
); tsi_next (&i
))
12470 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i
))))
12475 case EH_FILTER_EXPR
:
12476 /* The exception filter expression only matters if there is an
12477 exception. If the exception does not match EH_FILTER_TYPES,
12478 we will execute EH_FILTER_FAILURE, and we will fall through
12479 if that falls through. If the exception does match
12480 EH_FILTER_TYPES, the stack unwinder will continue up the
12481 stack, so we will not fall through. We don't know whether we
12482 will throw an exception which matches EH_FILTER_TYPES or not,
12483 so we just ignore EH_FILTER_TYPES and assume that we might
12484 throw an exception which doesn't match. */
12485 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i
)));
12488 /* This case represents statements to be executed when an
12489 exception occurs. Those statements are implicitly followed
12490 by a RESX statement to resume execution after the exception.
12491 So in this case the TRY_CATCH never falls through. */
12496 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
12497 need not be 100% accurate; simply be conservative and return true if we
12498 don't know. This is used only to avoid stupidly generating extra code.
12499 If we're wrong, we'll just delete the extra code later. */
12502 block_may_fallthru (const_tree block
)
12504 /* This CONST_CAST is okay because expr_last returns its argument
12505 unmodified and we assign it to a const_tree. */
12506 const_tree stmt
= expr_last (CONST_CAST_TREE (block
));
12508 switch (stmt
? TREE_CODE (stmt
) : ERROR_MARK
)
12512 /* Easy cases. If the last statement of the block implies
12513 control transfer, then we can't fall through. */
12517 /* If there is a default: label or case labels cover all possible
12518 SWITCH_COND values, then the SWITCH_EXPR will transfer control
12519 to some case label in all cases and all we care is whether the
12520 SWITCH_BODY falls through. */
12521 if (SWITCH_ALL_CASES_P (stmt
))
12522 return block_may_fallthru (SWITCH_BODY (stmt
));
12526 if (block_may_fallthru (COND_EXPR_THEN (stmt
)))
12528 return block_may_fallthru (COND_EXPR_ELSE (stmt
));
12531 return block_may_fallthru (BIND_EXPR_BODY (stmt
));
12533 case TRY_CATCH_EXPR
:
12534 return try_catch_may_fallthru (stmt
);
12536 case TRY_FINALLY_EXPR
:
12537 /* The finally clause is always executed after the try clause,
12538 so if it does not fall through, then the try-finally will not
12539 fall through. Otherwise, if the try clause does not fall
12540 through, then when the finally clause falls through it will
12541 resume execution wherever the try clause was going. So the
12542 whole try-finally will only fall through if both the try
12543 clause and the finally clause fall through. */
12544 return (block_may_fallthru (TREE_OPERAND (stmt
, 0))
12545 && block_may_fallthru (TREE_OPERAND (stmt
, 1)));
12548 return block_may_fallthru (TREE_OPERAND (stmt
, 0));
12551 if (TREE_CODE (TREE_OPERAND (stmt
, 1)) == CALL_EXPR
)
12552 stmt
= TREE_OPERAND (stmt
, 1);
12558 /* Functions that do not return do not fall through. */
12559 return (call_expr_flags (stmt
) & ECF_NORETURN
) == 0;
12561 case CLEANUP_POINT_EXPR
:
12562 return block_may_fallthru (TREE_OPERAND (stmt
, 0));
12565 return block_may_fallthru (TREE_OPERAND (stmt
, 1));
12571 return lang_hooks
.block_may_fallthru (stmt
);
12575 /* True if we are using EH to handle cleanups. */
12576 static bool using_eh_for_cleanups_flag
= false;
12578 /* This routine is called from front ends to indicate eh should be used for
12581 using_eh_for_cleanups (void)
12583 using_eh_for_cleanups_flag
= true;
12586 /* Query whether EH is used for cleanups. */
12588 using_eh_for_cleanups_p (void)
12590 return using_eh_for_cleanups_flag
;
12593 /* Wrapper for tree_code_name to ensure that tree code is valid */
12595 get_tree_code_name (enum tree_code code
)
12597 const char *invalid
= "<invalid tree code>";
12599 /* The tree_code enum promotes to signed, but we could be getting
12600 invalid values, so force an unsigned comparison. */
12601 if (unsigned (code
) >= MAX_TREE_CODES
)
12603 if ((unsigned)code
== 0xa5a5)
12604 return "ggc_freed";
12608 return tree_code_name
[code
];
12611 /* Drops the TREE_OVERFLOW flag from T. */
12614 drop_tree_overflow (tree t
)
12616 gcc_checking_assert (TREE_OVERFLOW (t
));
12618 /* For tree codes with a sharing machinery re-build the result. */
12619 if (poly_int_tree_p (t
))
12620 return wide_int_to_tree (TREE_TYPE (t
), wi::to_poly_wide (t
));
12622 /* For VECTOR_CST, remove the overflow bits from the encoded elements
12623 and canonicalize the result. */
12624 if (TREE_CODE (t
) == VECTOR_CST
)
12626 tree_vector_builder builder
;
12627 builder
.new_unary_operation (TREE_TYPE (t
), t
, true);
12628 unsigned int count
= builder
.encoded_nelts ();
12629 for (unsigned int i
= 0; i
< count
; ++i
)
12631 tree elt
= VECTOR_CST_ELT (t
, i
);
12632 if (TREE_OVERFLOW (elt
))
12633 elt
= drop_tree_overflow (elt
);
12634 builder
.quick_push (elt
);
12636 return builder
.build ();
12639 /* Otherwise, as all tcc_constants are possibly shared, copy the node
12640 and drop the flag. */
12642 TREE_OVERFLOW (t
) = 0;
12644 /* For constants that contain nested constants, drop the flag
12645 from those as well. */
12646 if (TREE_CODE (t
) == COMPLEX_CST
)
12648 if (TREE_OVERFLOW (TREE_REALPART (t
)))
12649 TREE_REALPART (t
) = drop_tree_overflow (TREE_REALPART (t
));
12650 if (TREE_OVERFLOW (TREE_IMAGPART (t
)))
12651 TREE_IMAGPART (t
) = drop_tree_overflow (TREE_IMAGPART (t
));
12657 /* Given a memory reference expression T, return its base address.
12658 The base address of a memory reference expression is the main
12659 object being referenced. For instance, the base address for
12660 'array[i].fld[j]' is 'array'. You can think of this as stripping
12661 away the offset part from a memory address.
12663 This function calls handled_component_p to strip away all the inner
12664 parts of the memory reference until it reaches the base object. */
12667 get_base_address (tree t
)
12669 if (TREE_CODE (t
) == WITH_SIZE_EXPR
)
12670 t
= TREE_OPERAND (t
, 0);
12671 while (handled_component_p (t
))
12672 t
= TREE_OPERAND (t
, 0);
12674 if ((TREE_CODE (t
) == MEM_REF
12675 || TREE_CODE (t
) == TARGET_MEM_REF
)
12676 && TREE_CODE (TREE_OPERAND (t
, 0)) == ADDR_EXPR
)
12677 t
= TREE_OPERAND (TREE_OPERAND (t
, 0), 0);
12682 /* Return a tree of sizetype representing the size, in bytes, of the element
12683 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12686 array_ref_element_size (tree exp
)
12688 tree aligned_size
= TREE_OPERAND (exp
, 3);
12689 tree elmt_type
= TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
12690 location_t loc
= EXPR_LOCATION (exp
);
12692 /* If a size was specified in the ARRAY_REF, it's the size measured
12693 in alignment units of the element type. So multiply by that value. */
12696 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
12697 sizetype from another type of the same width and signedness. */
12698 if (TREE_TYPE (aligned_size
) != sizetype
)
12699 aligned_size
= fold_convert_loc (loc
, sizetype
, aligned_size
);
12700 return size_binop_loc (loc
, MULT_EXPR
, aligned_size
,
12701 size_int (TYPE_ALIGN_UNIT (elmt_type
)));
12704 /* Otherwise, take the size from that of the element type. Substitute
12705 any PLACEHOLDER_EXPR that we have. */
12707 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type
), exp
);
12710 /* Return a tree representing the lower bound of the array mentioned in
12711 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12714 array_ref_low_bound (tree exp
)
12716 tree domain_type
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp
, 0)));
12718 /* If a lower bound is specified in EXP, use it. */
12719 if (TREE_OPERAND (exp
, 2))
12720 return TREE_OPERAND (exp
, 2);
12722 /* Otherwise, if there is a domain type and it has a lower bound, use it,
12723 substituting for a PLACEHOLDER_EXPR as needed. */
12724 if (domain_type
&& TYPE_MIN_VALUE (domain_type
))
12725 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type
), exp
);
12727 /* Otherwise, return a zero of the appropriate type. */
12728 tree idxtype
= TREE_TYPE (TREE_OPERAND (exp
, 1));
12729 return (idxtype
== error_mark_node
12730 ? integer_zero_node
: build_int_cst (idxtype
, 0));
12733 /* Return a tree representing the upper bound of the array mentioned in
12734 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12737 array_ref_up_bound (tree exp
)
12739 tree domain_type
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp
, 0)));
12741 /* If there is a domain type and it has an upper bound, use it, substituting
12742 for a PLACEHOLDER_EXPR as needed. */
12743 if (domain_type
&& TYPE_MAX_VALUE (domain_type
))
12744 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type
), exp
);
12746 /* Otherwise fail. */
12750 /* Returns true if REF is an array reference, a component reference,
12751 or a memory reference to an array whose actual size might be larger
12752 than its upper bound implies, there are multiple cases:
12753 A. a ref to a flexible array member at the end of a structure;
12754 B. a ref to an array with a different type against the original decl;
12757 short a[16] = { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16 };
12758 (*((char(*)[16])&a[0]))[i+8]
12760 C. a ref to an array that was passed as a parameter;
12763 int test (uint8_t *p, uint32_t t[1][1], int n) {
12764 for (int i = 0; i < 4; i++, p++)
12767 If non-null, set IS_TRAILING_ARRAY to true if the ref is the above case A.
12771 array_ref_flexible_size_p (tree ref
, bool *is_trailing_array
/* = NULL */)
12773 /* The TYPE for this array referece. */
12774 tree atype
= NULL_TREE
;
12775 /* The FIELD_DECL for the array field in the containing structure. */
12776 tree afield_decl
= NULL_TREE
;
12777 /* Whether this array is the trailing array of a structure. */
12778 bool is_trailing_array_tmp
= false;
12779 if (!is_trailing_array
)
12780 is_trailing_array
= &is_trailing_array_tmp
;
12782 if (TREE_CODE (ref
) == ARRAY_REF
12783 || TREE_CODE (ref
) == ARRAY_RANGE_REF
)
12785 atype
= TREE_TYPE (TREE_OPERAND (ref
, 0));
12786 ref
= TREE_OPERAND (ref
, 0);
12788 else if (TREE_CODE (ref
) == COMPONENT_REF
12789 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref
, 1))) == ARRAY_TYPE
)
12791 atype
= TREE_TYPE (TREE_OPERAND (ref
, 1));
12792 afield_decl
= TREE_OPERAND (ref
, 1);
12794 else if (TREE_CODE (ref
) == MEM_REF
)
12796 tree arg
= TREE_OPERAND (ref
, 0);
12797 if (TREE_CODE (arg
) == ADDR_EXPR
)
12798 arg
= TREE_OPERAND (arg
, 0);
12799 tree argtype
= TREE_TYPE (arg
);
12800 if (TREE_CODE (argtype
) == RECORD_TYPE
)
12802 if (tree fld
= last_field (argtype
))
12804 atype
= TREE_TYPE (fld
);
12806 if (TREE_CODE (atype
) != ARRAY_TYPE
)
12808 if (VAR_P (arg
) && DECL_SIZE (fld
))
12820 if (TREE_CODE (ref
) == STRING_CST
)
12823 tree ref_to_array
= ref
;
12824 while (handled_component_p (ref
))
12826 /* If the reference chain contains a component reference to a
12827 non-union type and there follows another field the reference
12828 is not at the end of a structure. */
12829 if (TREE_CODE (ref
) == COMPONENT_REF
)
12831 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (ref
, 0))) == RECORD_TYPE
)
12833 tree nextf
= DECL_CHAIN (TREE_OPERAND (ref
, 1));
12834 while (nextf
&& TREE_CODE (nextf
) != FIELD_DECL
)
12835 nextf
= DECL_CHAIN (nextf
);
12840 /* If we have a multi-dimensional array we do not consider
12841 a non-innermost dimension as flex array if the whole
12842 multi-dimensional array is at struct end.
12843 Same for an array of aggregates with a trailing array
12845 else if (TREE_CODE (ref
) == ARRAY_REF
)
12847 else if (TREE_CODE (ref
) == ARRAY_RANGE_REF
)
12849 /* If we view an underlying object as sth else then what we
12850 gathered up to now is what we have to rely on. */
12851 else if (TREE_CODE (ref
) == VIEW_CONVERT_EXPR
)
12854 gcc_unreachable ();
12856 ref
= TREE_OPERAND (ref
, 0);
12859 gcc_assert (!afield_decl
12860 || (afield_decl
&& TREE_CODE (afield_decl
) == FIELD_DECL
));
12862 /* The array now is at struct end. Treat flexible array member as
12863 always subject to extend, even into just padding constrained by
12864 an underlying decl. */
12865 if (! TYPE_SIZE (atype
)
12866 || ! TYPE_DOMAIN (atype
)
12867 || ! TYPE_MAX_VALUE (TYPE_DOMAIN (atype
)))
12869 *is_trailing_array
= afield_decl
&& TREE_CODE (afield_decl
) == FIELD_DECL
;
12870 return afield_decl
? !DECL_NOT_FLEXARRAY (afield_decl
) : true;
12873 /* If the reference is based on a declared entity, the size of the array
12874 is constrained by its given domain. (Do not trust commons PR/69368). */
12875 ref
= get_base_address (ref
);
12878 && !(flag_unconstrained_commons
12879 && VAR_P (ref
) && DECL_COMMON (ref
))
12880 && DECL_SIZE_UNIT (ref
)
12881 && TREE_CODE (DECL_SIZE_UNIT (ref
)) == INTEGER_CST
)
12883 /* If the object itself is the array it is not at struct end. */
12884 if (DECL_P (ref_to_array
))
12887 /* Check whether the array domain covers all of the available
12890 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (atype
))) != INTEGER_CST
12891 || TREE_CODE (TYPE_MAX_VALUE (TYPE_DOMAIN (atype
))) != INTEGER_CST
12892 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (atype
))) != INTEGER_CST
)
12895 = afield_decl
&& TREE_CODE (afield_decl
) == FIELD_DECL
;
12896 return afield_decl
? !DECL_NOT_FLEXARRAY (afield_decl
) : true;
12898 if (! get_addr_base_and_unit_offset (ref_to_array
, &offset
))
12901 = afield_decl
&& TREE_CODE (afield_decl
) == FIELD_DECL
;
12902 return afield_decl
? !DECL_NOT_FLEXARRAY (afield_decl
) : true;
12905 /* If at least one extra element fits it is a flexarray. */
12906 if (known_le ((wi::to_offset (TYPE_MAX_VALUE (TYPE_DOMAIN (atype
)))
12907 - wi::to_offset (TYPE_MIN_VALUE (TYPE_DOMAIN (atype
)))
12909 * wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (atype
))),
12910 wi::to_offset (DECL_SIZE_UNIT (ref
)) - offset
))
12913 = afield_decl
&& TREE_CODE (afield_decl
) == FIELD_DECL
;
12914 return afield_decl
? !DECL_NOT_FLEXARRAY (afield_decl
) : true;
12920 *is_trailing_array
= afield_decl
&& TREE_CODE (afield_decl
) == FIELD_DECL
;
12921 return afield_decl
? !DECL_NOT_FLEXARRAY (afield_decl
) : true;
12925 /* Return a tree representing the offset, in bytes, of the field referenced
12926 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
12929 component_ref_field_offset (tree exp
)
12931 tree aligned_offset
= TREE_OPERAND (exp
, 2);
12932 tree field
= TREE_OPERAND (exp
, 1);
12933 location_t loc
= EXPR_LOCATION (exp
);
12935 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
12936 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
12938 if (aligned_offset
)
12940 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
12941 sizetype from another type of the same width and signedness. */
12942 if (TREE_TYPE (aligned_offset
) != sizetype
)
12943 aligned_offset
= fold_convert_loc (loc
, sizetype
, aligned_offset
);
12944 return size_binop_loc (loc
, MULT_EXPR
, aligned_offset
,
12945 size_int (DECL_OFFSET_ALIGN (field
)
12949 /* Otherwise, take the offset from that of the field. Substitute
12950 any PLACEHOLDER_EXPR that we have. */
12952 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field
), exp
);
12955 /* Given the initializer INIT, return the initializer for the field
12956 DECL if it exists, otherwise null. Used to obtain the initializer
12957 for a flexible array member and determine its size. */
12960 get_initializer_for (tree init
, tree decl
)
12964 tree fld
, fld_init
;
12965 unsigned HOST_WIDE_INT i
;
12966 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init
), i
, fld
, fld_init
)
12971 if (TREE_CODE (fld
) == CONSTRUCTOR
)
12973 fld_init
= get_initializer_for (fld_init
, decl
);
12982 /* Determines the special array member type for the array reference REF. */
12983 special_array_member
12984 component_ref_sam_type (tree ref
)
12986 special_array_member sam_type
= special_array_member::none
;
12988 tree member
= TREE_OPERAND (ref
, 1);
12989 tree memsize
= DECL_SIZE_UNIT (member
);
12992 tree memtype
= TREE_TYPE (member
);
12993 if (TREE_CODE (memtype
) != ARRAY_TYPE
)
12996 bool trailing
= false;
12997 (void)array_ref_flexible_size_p (ref
, &trailing
);
12998 bool zero_length
= integer_zerop (memsize
);
12999 if (!trailing
&& !zero_length
)
13000 /* MEMBER is an interior array with
13001 more than one element. */
13002 return special_array_member::int_n
;
13007 return special_array_member::trail_0
;
13009 return special_array_member::int_0
;
13013 if (tree dom
= TYPE_DOMAIN (memtype
))
13014 if (tree min
= TYPE_MIN_VALUE (dom
))
13015 if (tree max
= TYPE_MAX_VALUE (dom
))
13016 if (TREE_CODE (min
) == INTEGER_CST
13017 && TREE_CODE (max
) == INTEGER_CST
)
13019 offset_int minidx
= wi::to_offset (min
);
13020 offset_int maxidx
= wi::to_offset (max
);
13021 offset_int neltsm1
= maxidx
- minidx
;
13023 /* MEMBER is a trailing array with more than
13025 return special_array_member::trail_n
;
13028 return special_array_member::trail_1
;
13035 /* Determines the size of the member referenced by the COMPONENT_REF
13036 REF, using its initializer expression if necessary in order to
13037 determine the size of an initialized flexible array member.
13038 If non-null, set *SAM to the type of special array member.
13039 Returns the size as sizetype (which might be zero for an object
13040 with an uninitialized flexible array member) or null if the size
13041 cannot be determined. */
13044 component_ref_size (tree ref
, special_array_member
*sam
/* = NULL */)
13046 gcc_assert (TREE_CODE (ref
) == COMPONENT_REF
);
13048 special_array_member sambuf
;
13051 *sam
= component_ref_sam_type (ref
);
13053 /* The object/argument referenced by the COMPONENT_REF and its type. */
13054 tree arg
= TREE_OPERAND (ref
, 0);
13055 tree argtype
= TREE_TYPE (arg
);
13056 /* The referenced member. */
13057 tree member
= TREE_OPERAND (ref
, 1);
13059 tree memsize
= DECL_SIZE_UNIT (member
);
13062 tree memtype
= TREE_TYPE (member
);
13063 if (TREE_CODE (memtype
) != ARRAY_TYPE
)
13064 /* DECL_SIZE may be less than TYPE_SIZE in C++ when referring
13065 to the type of a class with a virtual base which doesn't
13066 reflect the size of the virtual's members (see pr97595).
13067 If that's the case fail for now and implement something
13068 more robust in the future. */
13069 return (tree_int_cst_equal (memsize
, TYPE_SIZE_UNIT (memtype
))
13070 ? memsize
: NULL_TREE
);
13072 /* 2-or-more elements arrays are treated as normal arrays by default. */
13073 if (*sam
== special_array_member::int_n
13074 || *sam
== special_array_member::trail_n
)
13077 /* flag_strict_flex_arrays will control how to treat
13078 the trailing arrays as flexiable array members. */
13080 tree afield_decl
= TREE_OPERAND (ref
, 1);
13081 unsigned int strict_flex_array_level
13082 = strict_flex_array_level_of (afield_decl
);
13084 switch (strict_flex_array_level
)
13087 /* Treaing 0-length trailing arrays as normal array. */
13088 if (*sam
== special_array_member::trail_0
)
13089 return size_zero_node
;
13092 /* Treating 1-element trailing arrays as normal array. */
13093 if (*sam
== special_array_member::trail_1
)
13097 /* Treating 2-or-more elements trailing arrays as normal
13099 if (*sam
== special_array_member::trail_n
)
13105 gcc_unreachable ();
13108 if (*sam
== special_array_member::int_0
)
13109 memsize
= NULL_TREE
;
13111 /* For a reference to a flexible array member of a union
13112 use the size of the union instead of the size of the member. */
13113 if (TREE_CODE (argtype
) == UNION_TYPE
)
13114 memsize
= TYPE_SIZE_UNIT (argtype
);
13117 /* MEMBER is either a bona fide flexible array member, or a zero-length
13118 array member, or an array of length one treated as such. */
13120 /* If the reference is to a declared object and the member a true
13121 flexible array, try to determine its size from its initializer. */
13122 poly_int64 baseoff
= 0;
13123 tree base
= get_addr_base_and_unit_offset (ref
, &baseoff
);
13124 if (!base
|| !VAR_P (base
))
13126 if (*sam
!= special_array_member::int_0
)
13129 if (TREE_CODE (arg
) != COMPONENT_REF
)
13133 while (TREE_CODE (base
) == COMPONENT_REF
)
13134 base
= TREE_OPERAND (base
, 0);
13135 baseoff
= tree_to_poly_int64 (byte_position (TREE_OPERAND (ref
, 1)));
13138 /* BASE is the declared object of which MEMBER is either a member
13139 or that is cast to ARGTYPE (e.g., a char buffer used to store
13140 an ARGTYPE object). */
13141 tree basetype
= TREE_TYPE (base
);
13143 /* Determine the base type of the referenced object. If it's
13144 the same as ARGTYPE and MEMBER has a known size, return it. */
13145 tree bt
= basetype
;
13146 if (*sam
!= special_array_member::int_0
)
13147 while (TREE_CODE (bt
) == ARRAY_TYPE
)
13148 bt
= TREE_TYPE (bt
);
13149 bool typematch
= useless_type_conversion_p (argtype
, bt
);
13150 if (memsize
&& typematch
)
13153 memsize
= NULL_TREE
;
13156 /* MEMBER is a true flexible array member. Compute its size from
13157 the initializer of the BASE object if it has one. */
13158 if (tree init
= DECL_P (base
) ? DECL_INITIAL (base
) : NULL_TREE
)
13159 if (init
!= error_mark_node
)
13161 init
= get_initializer_for (init
, member
);
13164 memsize
= TYPE_SIZE_UNIT (TREE_TYPE (init
));
13165 if (tree refsize
= TYPE_SIZE_UNIT (argtype
))
13167 /* Use the larger of the initializer size and the tail
13168 padding in the enclosing struct. */
13169 poly_int64 rsz
= tree_to_poly_int64 (refsize
);
13171 if (known_lt (tree_to_poly_int64 (memsize
), rsz
))
13172 memsize
= wide_int_to_tree (TREE_TYPE (memsize
), rsz
);
13184 && DECL_EXTERNAL (base
)
13186 && *sam
!= special_array_member::int_0
)
13187 /* The size of a flexible array member of an extern struct
13188 with no initializer cannot be determined (it's defined
13189 in another translation unit and can have an initializer
13190 with an arbitrary number of elements). */
13193 /* Use the size of the base struct or, for interior zero-length
13194 arrays, the size of the enclosing type. */
13195 memsize
= TYPE_SIZE_UNIT (bt
);
13197 else if (DECL_P (base
))
13198 /* Use the size of the BASE object (possibly an array of some
13199 other type such as char used to store the struct). */
13200 memsize
= DECL_SIZE_UNIT (base
);
13205 /* If the flexible array member has a known size use the greater
13206 of it and the tail padding in the enclosing struct.
13207 Otherwise, when the size of the flexible array member is unknown
13208 and the referenced object is not a struct, use the size of its
13209 type when known. This detects sizes of array buffers when cast
13210 to struct types with flexible array members. */
13213 if (!tree_fits_poly_int64_p (memsize
))
13215 poly_int64 memsz64
= memsize
? tree_to_poly_int64 (memsize
) : 0;
13216 if (known_lt (baseoff
, memsz64
))
13218 memsz64
-= baseoff
;
13219 return wide_int_to_tree (TREE_TYPE (memsize
), memsz64
);
13221 return size_zero_node
;
13224 /* Return "don't know" for an external non-array object since its
13225 flexible array member can be initialized to have any number of
13226 elements. Otherwise, return zero because the flexible array
13227 member has no elements. */
13228 return (DECL_P (base
)
13229 && DECL_EXTERNAL (base
)
13231 || TREE_CODE (basetype
) != ARRAY_TYPE
)
13232 ? NULL_TREE
: size_zero_node
);
13235 /* Return the machine mode of T. For vectors, returns the mode of the
13236 inner type. The main use case is to feed the result to HONOR_NANS,
13237 avoiding the BLKmode that a direct TYPE_MODE (T) might return. */
13240 element_mode (const_tree t
)
13244 if (VECTOR_TYPE_P (t
) || TREE_CODE (t
) == COMPLEX_TYPE
)
13246 return TYPE_MODE (t
);
13249 /* Vector types need to re-check the target flags each time we report
13250 the machine mode. We need to do this because attribute target can
13251 change the result of vector_mode_supported_p and have_regs_of_mode
13252 on a per-function basis. Thus the TYPE_MODE of a VECTOR_TYPE can
13253 change on a per-function basis. */
13254 /* ??? Possibly a better solution is to run through all the types
13255 referenced by a function and re-compute the TYPE_MODE once, rather
13256 than make the TYPE_MODE macro call a function. */
13259 vector_type_mode (const_tree t
)
13263 gcc_assert (TREE_CODE (t
) == VECTOR_TYPE
);
13265 mode
= t
->type_common
.mode
;
13266 if (VECTOR_MODE_P (mode
)
13267 && (!targetm
.vector_mode_supported_p (mode
)
13268 || !have_regs_of_mode
[mode
]))
13270 scalar_int_mode innermode
;
13272 /* For integers, try mapping it to a same-sized scalar mode. */
13273 if (is_int_mode (TREE_TYPE (t
)->type_common
.mode
, &innermode
))
13275 poly_int64 size
= (TYPE_VECTOR_SUBPARTS (t
)
13276 * GET_MODE_BITSIZE (innermode
));
13277 scalar_int_mode mode
;
13278 if (int_mode_for_size (size
, 0).exists (&mode
)
13279 && have_regs_of_mode
[mode
])
13289 /* Return the size in bits of each element of vector type TYPE. */
13292 vector_element_bits (const_tree type
)
13294 gcc_checking_assert (VECTOR_TYPE_P (type
));
13295 if (VECTOR_BOOLEAN_TYPE_P (type
))
13296 return TYPE_PRECISION (TREE_TYPE (type
));
13297 return tree_to_uhwi (TYPE_SIZE (TREE_TYPE (type
)));
13300 /* Calculate the size in bits of each element of vector type TYPE
13301 and return the result as a tree of type bitsizetype. */
13304 vector_element_bits_tree (const_tree type
)
13306 gcc_checking_assert (VECTOR_TYPE_P (type
));
13307 if (VECTOR_BOOLEAN_TYPE_P (type
))
13308 return bitsize_int (vector_element_bits (type
));
13309 return TYPE_SIZE (TREE_TYPE (type
));
13312 /* Verify that basic properties of T match TV and thus T can be a variant of
13313 TV. TV should be the more specified variant (i.e. the main variant). */
13316 verify_type_variant (const_tree t
, tree tv
)
13318 /* Type variant can differ by:
13320 - TYPE_QUALS: TYPE_READONLY, TYPE_VOLATILE, TYPE_ATOMIC, TYPE_RESTRICT,
13321 ENCODE_QUAL_ADDR_SPACE.
13322 - main variant may be TYPE_COMPLETE_P and variant types !TYPE_COMPLETE_P
13323 in this case some values may not be set in the variant types
13324 (see TYPE_COMPLETE_P checks).
13325 - it is possible to have TYPE_ARTIFICIAL variant of non-artifical type
13326 - by TYPE_NAME and attributes (i.e. when variant originate by typedef)
13327 - TYPE_CANONICAL (TYPE_ALIAS_SET is the same among variants)
13328 - by the alignment: TYPE_ALIGN and TYPE_USER_ALIGN
13329 - during LTO by TYPE_CONTEXT if type is TYPE_FILE_SCOPE_P
13330 this is necessary to make it possible to merge types form different TUs
13331 - arrays, pointers and references may have TREE_TYPE that is a variant
13332 of TREE_TYPE of their main variants.
13333 - aggregates may have new TYPE_FIELDS list that list variants of
13334 the main variant TYPE_FIELDS.
13335 - vector types may differ by TYPE_VECTOR_OPAQUE
13338 /* Convenience macro for matching individual fields. */
13339 #define verify_variant_match(flag) \
13341 if (flag (tv) != flag (t)) \
13343 error ("type variant differs by %s", #flag); \
13349 /* tree_base checks. */
13351 verify_variant_match (TREE_CODE
);
13352 /* FIXME: Ada builds non-artificial variants of artificial types. */
13354 if (TYPE_ARTIFICIAL (tv
))
13355 verify_variant_match (TYPE_ARTIFICIAL
);
13357 if (POINTER_TYPE_P (tv
))
13358 verify_variant_match (TYPE_REF_CAN_ALIAS_ALL
);
13359 /* FIXME: TYPE_SIZES_GIMPLIFIED may differs for Ada build. */
13360 verify_variant_match (TYPE_UNSIGNED
);
13361 verify_variant_match (TYPE_PACKED
);
13362 if (TREE_CODE (t
) == REFERENCE_TYPE
)
13363 verify_variant_match (TYPE_REF_IS_RVALUE
);
13364 if (AGGREGATE_TYPE_P (t
))
13365 verify_variant_match (TYPE_REVERSE_STORAGE_ORDER
);
13367 verify_variant_match (TYPE_SATURATING
);
13368 /* FIXME: This check trigger during libstdc++ build. */
13370 if (RECORD_OR_UNION_TYPE_P (t
) && COMPLETE_TYPE_P (t
))
13371 verify_variant_match (TYPE_FINAL_P
);
13374 /* tree_type_common checks. */
13376 if (COMPLETE_TYPE_P (t
))
13378 verify_variant_match (TYPE_MODE
);
13379 if (TREE_CODE (TYPE_SIZE (t
)) != PLACEHOLDER_EXPR
13380 && TREE_CODE (TYPE_SIZE (tv
)) != PLACEHOLDER_EXPR
)
13381 verify_variant_match (TYPE_SIZE
);
13382 if (TREE_CODE (TYPE_SIZE_UNIT (t
)) != PLACEHOLDER_EXPR
13383 && TREE_CODE (TYPE_SIZE_UNIT (tv
)) != PLACEHOLDER_EXPR
13384 && TYPE_SIZE_UNIT (t
) != TYPE_SIZE_UNIT (tv
))
13386 gcc_assert (!operand_equal_p (TYPE_SIZE_UNIT (t
),
13387 TYPE_SIZE_UNIT (tv
), 0));
13388 error ("type variant has different %<TYPE_SIZE_UNIT%>");
13390 error ("type variant%'s %<TYPE_SIZE_UNIT%>");
13391 debug_tree (TYPE_SIZE_UNIT (tv
));
13392 error ("type%'s %<TYPE_SIZE_UNIT%>");
13393 debug_tree (TYPE_SIZE_UNIT (t
));
13396 verify_variant_match (TYPE_NEEDS_CONSTRUCTING
);
13398 verify_variant_match (TYPE_PRECISION
);
13399 if (RECORD_OR_UNION_TYPE_P (t
))
13400 verify_variant_match (TYPE_TRANSPARENT_AGGR
);
13401 else if (TREE_CODE (t
) == ARRAY_TYPE
)
13402 verify_variant_match (TYPE_NONALIASED_COMPONENT
);
13403 /* During LTO we merge variant lists from diferent translation units
13404 that may differ BY TYPE_CONTEXT that in turn may point
13405 to TRANSLATION_UNIT_DECL.
13406 Ada also builds variants of types with different TYPE_CONTEXT. */
13408 if (!in_lto_p
|| !TYPE_FILE_SCOPE_P (t
))
13409 verify_variant_match (TYPE_CONTEXT
);
13411 if (TREE_CODE (t
) == ARRAY_TYPE
|| TREE_CODE (t
) == INTEGER_TYPE
)
13412 verify_variant_match (TYPE_STRING_FLAG
);
13413 if (TREE_CODE (t
) == RECORD_TYPE
|| TREE_CODE (t
) == UNION_TYPE
)
13414 verify_variant_match (TYPE_CXX_ODR_P
);
13415 if (TYPE_ALIAS_SET_KNOWN_P (t
))
13417 error ("type variant with %<TYPE_ALIAS_SET_KNOWN_P%>");
13422 /* tree_type_non_common checks. */
13424 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
13425 and dangle the pointer from time to time. */
13426 if (RECORD_OR_UNION_TYPE_P (t
) && TYPE_VFIELD (t
) != TYPE_VFIELD (tv
)
13427 && (in_lto_p
|| !TYPE_VFIELD (tv
)
13428 || TREE_CODE (TYPE_VFIELD (tv
)) != TREE_LIST
))
13430 error ("type variant has different %<TYPE_VFIELD%>");
13434 if ((TREE_CODE (t
) == ENUMERAL_TYPE
&& COMPLETE_TYPE_P (t
))
13435 || TREE_CODE (t
) == INTEGER_TYPE
13436 || TREE_CODE (t
) == BOOLEAN_TYPE
13437 || TREE_CODE (t
) == REAL_TYPE
13438 || TREE_CODE (t
) == FIXED_POINT_TYPE
)
13440 verify_variant_match (TYPE_MAX_VALUE
);
13441 verify_variant_match (TYPE_MIN_VALUE
);
13443 if (TREE_CODE (t
) == METHOD_TYPE
)
13444 verify_variant_match (TYPE_METHOD_BASETYPE
);
13445 if (TREE_CODE (t
) == OFFSET_TYPE
)
13446 verify_variant_match (TYPE_OFFSET_BASETYPE
);
13447 if (TREE_CODE (t
) == ARRAY_TYPE
)
13448 verify_variant_match (TYPE_ARRAY_MAX_SIZE
);
13449 /* FIXME: Be lax and allow TYPE_BINFO to be missing in variant types
13450 or even type's main variant. This is needed to make bootstrap pass
13451 and the bug seems new in GCC 5.
13452 C++ FE should be updated to make this consistent and we should check
13453 that TYPE_BINFO is always NULL for !COMPLETE_TYPE_P and otherwise there
13454 is a match with main variant.
13456 Also disable the check for Java for now because of parser hack that builds
13457 first an dummy BINFO and then sometimes replace it by real BINFO in some
13459 if (RECORD_OR_UNION_TYPE_P (t
) && TYPE_BINFO (t
) && TYPE_BINFO (tv
)
13460 && TYPE_BINFO (t
) != TYPE_BINFO (tv
)
13461 /* FIXME: Java sometimes keep dump TYPE_BINFOs on variant types.
13462 Since there is no cheap way to tell C++/Java type w/o LTO, do checking
13463 at LTO time only. */
13464 && (in_lto_p
&& odr_type_p (t
)))
13466 error ("type variant has different %<TYPE_BINFO%>");
13468 error ("type variant%'s %<TYPE_BINFO%>");
13469 debug_tree (TYPE_BINFO (tv
));
13470 error ("type%'s %<TYPE_BINFO%>");
13471 debug_tree (TYPE_BINFO (t
));
13475 /* Check various uses of TYPE_VALUES_RAW. */
13476 if (TREE_CODE (t
) == ENUMERAL_TYPE
13477 && TYPE_VALUES (t
))
13478 verify_variant_match (TYPE_VALUES
);
13479 else if (TREE_CODE (t
) == ARRAY_TYPE
)
13480 verify_variant_match (TYPE_DOMAIN
);
13481 /* Permit incomplete variants of complete type. While FEs may complete
13482 all variants, this does not happen for C++ templates in all cases. */
13483 else if (RECORD_OR_UNION_TYPE_P (t
)
13484 && COMPLETE_TYPE_P (t
)
13485 && TYPE_FIELDS (t
) != TYPE_FIELDS (tv
))
13489 /* Fortran builds qualified variants as new records with items of
13490 qualified type. Verify that they looks same. */
13491 for (f1
= TYPE_FIELDS (t
), f2
= TYPE_FIELDS (tv
);
13493 f1
= TREE_CHAIN (f1
), f2
= TREE_CHAIN (f2
))
13494 if (TREE_CODE (f1
) != FIELD_DECL
|| TREE_CODE (f2
) != FIELD_DECL
13495 || (TYPE_MAIN_VARIANT (TREE_TYPE (f1
))
13496 != TYPE_MAIN_VARIANT (TREE_TYPE (f2
))
13497 /* FIXME: gfc_nonrestricted_type builds all types as variants
13498 with exception of pointer types. It deeply copies the type
13499 which means that we may end up with a variant type
13500 referring non-variant pointer. We may change it to
13501 produce types as variants, too, like
13502 objc_get_protocol_qualified_type does. */
13503 && !POINTER_TYPE_P (TREE_TYPE (f1
)))
13504 || DECL_FIELD_OFFSET (f1
) != DECL_FIELD_OFFSET (f2
)
13505 || DECL_FIELD_BIT_OFFSET (f1
) != DECL_FIELD_BIT_OFFSET (f2
))
13509 error ("type variant has different %<TYPE_FIELDS%>");
13511 error ("first mismatch is field");
13513 error ("and field");
13518 else if ((TREE_CODE (t
) == FUNCTION_TYPE
|| TREE_CODE (t
) == METHOD_TYPE
))
13519 verify_variant_match (TYPE_ARG_TYPES
);
13520 /* For C++ the qualified variant of array type is really an array type
13521 of qualified TREE_TYPE.
13522 objc builds variants of pointer where pointer to type is a variant, too
13523 in objc_get_protocol_qualified_type. */
13524 if (TREE_TYPE (t
) != TREE_TYPE (tv
)
13525 && ((TREE_CODE (t
) != ARRAY_TYPE
13526 && !POINTER_TYPE_P (t
))
13527 || TYPE_MAIN_VARIANT (TREE_TYPE (t
))
13528 != TYPE_MAIN_VARIANT (TREE_TYPE (tv
))))
13530 error ("type variant has different %<TREE_TYPE%>");
13532 error ("type variant%'s %<TREE_TYPE%>");
13533 debug_tree (TREE_TYPE (tv
));
13534 error ("type%'s %<TREE_TYPE%>");
13535 debug_tree (TREE_TYPE (t
));
13538 if (type_with_alias_set_p (t
)
13539 && !gimple_canonical_types_compatible_p (t
, tv
, false))
13541 error ("type is not compatible with its variant");
13543 error ("type variant%'s %<TREE_TYPE%>");
13544 debug_tree (TREE_TYPE (tv
));
13545 error ("type%'s %<TREE_TYPE%>");
13546 debug_tree (TREE_TYPE (t
));
13550 #undef verify_variant_match
13554 /* The TYPE_CANONICAL merging machinery. It should closely resemble
13555 the middle-end types_compatible_p function. It needs to avoid
13556 claiming types are different for types that should be treated
13557 the same with respect to TBAA. Canonical types are also used
13558 for IL consistency checks via the useless_type_conversion_p
13559 predicate which does not handle all type kinds itself but falls
13560 back to pointer-comparison of TYPE_CANONICAL for aggregates
13563 /* Return true if TYPE_UNSIGNED of TYPE should be ignored for canonical
13564 type calculation because we need to allow inter-operability between signed
13565 and unsigned variants. */
13568 type_with_interoperable_signedness (const_tree type
)
13570 /* Fortran standard require C_SIGNED_CHAR to be interoperable with both
13571 signed char and unsigned char. Similarly fortran FE builds
13572 C_SIZE_T as signed type, while C defines it unsigned. */
13574 return tree_code_for_canonical_type_merging (TREE_CODE (type
))
13576 && (TYPE_PRECISION (type
) == TYPE_PRECISION (signed_char_type_node
)
13577 || TYPE_PRECISION (type
) == TYPE_PRECISION (size_type_node
));
13580 /* Return true iff T1 and T2 are structurally identical for what
13582 This function is used both by lto.cc canonical type merging and by the
13583 verifier. If TRUST_TYPE_CANONICAL we do not look into structure of types
13584 that have TYPE_CANONICAL defined and assume them equivalent. This is useful
13585 only for LTO because only in these cases TYPE_CANONICAL equivalence
13586 correspond to one defined by gimple_canonical_types_compatible_p. */
13589 gimple_canonical_types_compatible_p (const_tree t1
, const_tree t2
,
13590 bool trust_type_canonical
)
13592 /* Type variants should be same as the main variant. When not doing sanity
13593 checking to verify this fact, go to main variants and save some work. */
13594 if (trust_type_canonical
)
13596 t1
= TYPE_MAIN_VARIANT (t1
);
13597 t2
= TYPE_MAIN_VARIANT (t2
);
13600 /* Check first for the obvious case of pointer identity. */
13604 /* Check that we have two types to compare. */
13605 if (t1
== NULL_TREE
|| t2
== NULL_TREE
)
13608 /* We consider complete types always compatible with incomplete type.
13609 This does not make sense for canonical type calculation and thus we
13610 need to ensure that we are never called on it.
13612 FIXME: For more correctness the function probably should have three modes
13613 1) mode assuming that types are complete mathcing their structure
13614 2) mode allowing incomplete types but producing equivalence classes
13615 and thus ignoring all info from complete types
13616 3) mode allowing incomplete types to match complete but checking
13617 compatibility between complete types.
13619 1 and 2 can be used for canonical type calculation. 3 is the real
13620 definition of type compatibility that can be used i.e. for warnings during
13621 declaration merging. */
13623 gcc_assert (!trust_type_canonical
13624 || (type_with_alias_set_p (t1
) && type_with_alias_set_p (t2
)));
13626 /* If the types have been previously registered and found equal
13629 if (TYPE_CANONICAL (t1
) && TYPE_CANONICAL (t2
)
13630 && trust_type_canonical
)
13632 /* Do not use TYPE_CANONICAL of pointer types. For LTO streamed types
13633 they are always NULL, but they are set to non-NULL for types
13634 constructed by build_pointer_type and variants. In this case the
13635 TYPE_CANONICAL is more fine grained than the equivalnce we test (where
13636 all pointers are considered equal. Be sure to not return false
13638 gcc_checking_assert (canonical_type_used_p (t1
)
13639 && canonical_type_used_p (t2
));
13640 return TYPE_CANONICAL (t1
) == TYPE_CANONICAL (t2
);
13643 /* For types where we do ODR based TBAA the canonical type is always
13644 set correctly, so we know that types are different if their
13645 canonical types does not match. */
13646 if (trust_type_canonical
13647 && (odr_type_p (t1
) && odr_based_tbaa_p (t1
))
13648 != (odr_type_p (t2
) && odr_based_tbaa_p (t2
)))
13651 /* Can't be the same type if the types don't have the same code. */
13652 enum tree_code code
= tree_code_for_canonical_type_merging (TREE_CODE (t1
));
13653 if (code
!= tree_code_for_canonical_type_merging (TREE_CODE (t2
)))
13656 /* Qualifiers do not matter for canonical type comparison purposes. */
13658 /* Void types and nullptr types are always the same. */
13659 if (TREE_CODE (t1
) == VOID_TYPE
13660 || TREE_CODE (t1
) == NULLPTR_TYPE
)
13663 /* Can't be the same type if they have different mode. */
13664 if (TYPE_MODE (t1
) != TYPE_MODE (t2
))
13667 /* Non-aggregate types can be handled cheaply. */
13668 if (INTEGRAL_TYPE_P (t1
)
13669 || SCALAR_FLOAT_TYPE_P (t1
)
13670 || FIXED_POINT_TYPE_P (t1
)
13671 || TREE_CODE (t1
) == VECTOR_TYPE
13672 || TREE_CODE (t1
) == COMPLEX_TYPE
13673 || TREE_CODE (t1
) == OFFSET_TYPE
13674 || POINTER_TYPE_P (t1
))
13676 /* Can't be the same type if they have different recision. */
13677 if (TYPE_PRECISION (t1
) != TYPE_PRECISION (t2
))
13680 /* In some cases the signed and unsigned types are required to be
13682 if (TYPE_UNSIGNED (t1
) != TYPE_UNSIGNED (t2
)
13683 && !type_with_interoperable_signedness (t1
))
13686 /* Fortran's C_SIGNED_CHAR is !TYPE_STRING_FLAG but needs to be
13687 interoperable with "signed char". Unless all frontends are revisited
13688 to agree on these types, we must ignore the flag completely. */
13690 /* Fortran standard define C_PTR type that is compatible with every
13691 C pointer. For this reason we need to glob all pointers into one.
13692 Still pointers in different address spaces are not compatible. */
13693 if (POINTER_TYPE_P (t1
))
13695 if (TYPE_ADDR_SPACE (TREE_TYPE (t1
))
13696 != TYPE_ADDR_SPACE (TREE_TYPE (t2
)))
13700 /* Tail-recurse to components. */
13701 if (TREE_CODE (t1
) == VECTOR_TYPE
13702 || TREE_CODE (t1
) == COMPLEX_TYPE
)
13703 return gimple_canonical_types_compatible_p (TREE_TYPE (t1
),
13705 trust_type_canonical
);
13710 /* Do type-specific comparisons. */
13711 switch (TREE_CODE (t1
))
13714 /* Array types are the same if the element types are the same and
13715 the number of elements are the same. */
13716 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1
), TREE_TYPE (t2
),
13717 trust_type_canonical
)
13718 || TYPE_STRING_FLAG (t1
) != TYPE_STRING_FLAG (t2
)
13719 || TYPE_REVERSE_STORAGE_ORDER (t1
) != TYPE_REVERSE_STORAGE_ORDER (t2
)
13720 || TYPE_NONALIASED_COMPONENT (t1
) != TYPE_NONALIASED_COMPONENT (t2
))
13724 tree i1
= TYPE_DOMAIN (t1
);
13725 tree i2
= TYPE_DOMAIN (t2
);
13727 /* For an incomplete external array, the type domain can be
13728 NULL_TREE. Check this condition also. */
13729 if (i1
== NULL_TREE
&& i2
== NULL_TREE
)
13731 else if (i1
== NULL_TREE
|| i2
== NULL_TREE
)
13735 tree min1
= TYPE_MIN_VALUE (i1
);
13736 tree min2
= TYPE_MIN_VALUE (i2
);
13737 tree max1
= TYPE_MAX_VALUE (i1
);
13738 tree max2
= TYPE_MAX_VALUE (i2
);
13740 /* The minimum/maximum values have to be the same. */
13743 && ((TREE_CODE (min1
) == PLACEHOLDER_EXPR
13744 && TREE_CODE (min2
) == PLACEHOLDER_EXPR
)
13745 || operand_equal_p (min1
, min2
, 0))))
13748 && ((TREE_CODE (max1
) == PLACEHOLDER_EXPR
13749 && TREE_CODE (max2
) == PLACEHOLDER_EXPR
)
13750 || operand_equal_p (max1
, max2
, 0)))))
13758 case FUNCTION_TYPE
:
13759 /* Function types are the same if the return type and arguments types
13761 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1
), TREE_TYPE (t2
),
13762 trust_type_canonical
))
13765 if (TYPE_ARG_TYPES (t1
) == TYPE_ARG_TYPES (t2
)
13766 && (TYPE_NO_NAMED_ARGS_STDARG_P (t1
)
13767 == TYPE_NO_NAMED_ARGS_STDARG_P (t2
)))
13771 tree parms1
, parms2
;
13773 for (parms1
= TYPE_ARG_TYPES (t1
), parms2
= TYPE_ARG_TYPES (t2
);
13775 parms1
= TREE_CHAIN (parms1
), parms2
= TREE_CHAIN (parms2
))
13777 if (!gimple_canonical_types_compatible_p
13778 (TREE_VALUE (parms1
), TREE_VALUE (parms2
),
13779 trust_type_canonical
))
13783 if (parms1
|| parms2
)
13791 case QUAL_UNION_TYPE
:
13795 /* Don't try to compare variants of an incomplete type, before
13796 TYPE_FIELDS has been copied around. */
13797 if (!COMPLETE_TYPE_P (t1
) && !COMPLETE_TYPE_P (t2
))
13801 if (TYPE_REVERSE_STORAGE_ORDER (t1
) != TYPE_REVERSE_STORAGE_ORDER (t2
))
13804 /* For aggregate types, all the fields must be the same. */
13805 for (f1
= TYPE_FIELDS (t1
), f2
= TYPE_FIELDS (t2
);
13807 f1
= TREE_CHAIN (f1
), f2
= TREE_CHAIN (f2
))
13809 /* Skip non-fields and zero-sized fields. */
13810 while (f1
&& (TREE_CODE (f1
) != FIELD_DECL
13812 && integer_zerop (DECL_SIZE (f1
)))))
13813 f1
= TREE_CHAIN (f1
);
13814 while (f2
&& (TREE_CODE (f2
) != FIELD_DECL
13816 && integer_zerop (DECL_SIZE (f2
)))))
13817 f2
= TREE_CHAIN (f2
);
13820 /* The fields must have the same name, offset and type. */
13821 if (DECL_NONADDRESSABLE_P (f1
) != DECL_NONADDRESSABLE_P (f2
)
13822 || !gimple_compare_field_offset (f1
, f2
)
13823 || !gimple_canonical_types_compatible_p
13824 (TREE_TYPE (f1
), TREE_TYPE (f2
),
13825 trust_type_canonical
))
13829 /* If one aggregate has more fields than the other, they
13830 are not the same. */
13838 /* Consider all types with language specific trees in them mutually
13839 compatible. This is executed only from verify_type and false
13840 positives can be tolerated. */
13841 gcc_assert (!in_lto_p
);
13846 /* For OPAQUE_TYPE T, it should have only size and alignment information
13847 and its mode should be of class MODE_OPAQUE. This function verifies
13848 these properties of T match TV which is the main variant of T and TC
13849 which is the canonical of T. */
13852 verify_opaque_type (const_tree t
, tree tv
, tree tc
)
13854 gcc_assert (OPAQUE_TYPE_P (t
));
13855 gcc_assert (tv
&& tv
== TYPE_MAIN_VARIANT (tv
));
13856 gcc_assert (tc
&& tc
== TYPE_CANONICAL (tc
));
13858 /* For an opaque type T1, check if some of its properties match
13859 the corresponding ones of the other opaque type T2, emit some
13860 error messages for those inconsistent ones. */
13861 auto check_properties_for_opaque_type
= [](const_tree t1
, tree t2
,
13862 const char *kind_msg
)
13864 if (!OPAQUE_TYPE_P (t2
))
13866 error ("type %s is not an opaque type", kind_msg
);
13870 if (!OPAQUE_MODE_P (TYPE_MODE (t2
)))
13872 error ("type %s is not with opaque mode", kind_msg
);
13876 if (TYPE_MODE (t1
) != TYPE_MODE (t2
))
13878 error ("type %s differs by %<TYPE_MODE%>", kind_msg
);
13882 poly_uint64 t1_size
= tree_to_poly_uint64 (TYPE_SIZE (t1
));
13883 poly_uint64 t2_size
= tree_to_poly_uint64 (TYPE_SIZE (t2
));
13884 if (maybe_ne (t1_size
, t2_size
))
13886 error ("type %s differs by %<TYPE_SIZE%>", kind_msg
);
13890 if (TYPE_ALIGN (t1
) != TYPE_ALIGN (t2
))
13892 error ("type %s differs by %<TYPE_ALIGN%>", kind_msg
);
13896 if (TYPE_USER_ALIGN (t1
) != TYPE_USER_ALIGN (t2
))
13898 error ("type %s differs by %<TYPE_USER_ALIGN%>", kind_msg
);
13905 check_properties_for_opaque_type (t
, tv
, "variant");
13908 check_properties_for_opaque_type (t
, tc
, "canonical");
13911 /* Verify type T. */
13914 verify_type (const_tree t
)
13916 bool error_found
= false;
13917 tree mv
= TYPE_MAIN_VARIANT (t
);
13918 tree ct
= TYPE_CANONICAL (t
);
13920 if (OPAQUE_TYPE_P (t
))
13922 verify_opaque_type (t
, mv
, ct
);
13928 error ("main variant is not defined");
13929 error_found
= true;
13931 else if (mv
!= TYPE_MAIN_VARIANT (mv
))
13933 error ("%<TYPE_MAIN_VARIANT%> has different %<TYPE_MAIN_VARIANT%>");
13935 error_found
= true;
13937 else if (t
!= mv
&& !verify_type_variant (t
, mv
))
13938 error_found
= true;
13942 else if (TYPE_CANONICAL (ct
) != ct
)
13944 error ("%<TYPE_CANONICAL%> has different %<TYPE_CANONICAL%>");
13946 error_found
= true;
13948 /* Method and function types cannot be used to address memory and thus
13949 TYPE_CANONICAL really matters only for determining useless conversions.
13951 FIXME: C++ FE produce declarations of builtin functions that are not
13952 compatible with main variants. */
13953 else if (TREE_CODE (t
) == FUNCTION_TYPE
)
13956 /* FIXME: gimple_canonical_types_compatible_p cannot compare types
13957 with variably sized arrays because their sizes possibly
13958 gimplified to different variables. */
13959 && !variably_modified_type_p (ct
, NULL
)
13960 && !gimple_canonical_types_compatible_p (t
, ct
, false)
13961 && COMPLETE_TYPE_P (t
))
13963 error ("%<TYPE_CANONICAL%> is not compatible");
13965 error_found
= true;
13968 if (COMPLETE_TYPE_P (t
) && TYPE_CANONICAL (t
)
13969 && TYPE_MODE (t
) != TYPE_MODE (TYPE_CANONICAL (t
)))
13971 error ("%<TYPE_MODE%> of %<TYPE_CANONICAL%> is not compatible");
13973 error_found
= true;
13975 if (TYPE_MAIN_VARIANT (t
) == t
&& ct
&& TYPE_MAIN_VARIANT (ct
) != ct
)
13977 error ("%<TYPE_CANONICAL%> of main variant is not main variant");
13979 debug_tree (TYPE_MAIN_VARIANT (ct
));
13980 error_found
= true;
13984 /* Check various uses of TYPE_MIN_VALUE_RAW. */
13985 if (RECORD_OR_UNION_TYPE_P (t
))
13987 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
13988 and danagle the pointer from time to time. */
13989 if (TYPE_VFIELD (t
)
13990 && TREE_CODE (TYPE_VFIELD (t
)) != FIELD_DECL
13991 && TREE_CODE (TYPE_VFIELD (t
)) != TREE_LIST
)
13993 error ("%<TYPE_VFIELD%> is not %<FIELD_DECL%> nor %<TREE_LIST%>");
13994 debug_tree (TYPE_VFIELD (t
));
13995 error_found
= true;
13998 else if (TREE_CODE (t
) == POINTER_TYPE
)
14000 if (TYPE_NEXT_PTR_TO (t
)
14001 && TREE_CODE (TYPE_NEXT_PTR_TO (t
)) != POINTER_TYPE
)
14003 error ("%<TYPE_NEXT_PTR_TO%> is not %<POINTER_TYPE%>");
14004 debug_tree (TYPE_NEXT_PTR_TO (t
));
14005 error_found
= true;
14008 else if (TREE_CODE (t
) == REFERENCE_TYPE
)
14010 if (TYPE_NEXT_REF_TO (t
)
14011 && TREE_CODE (TYPE_NEXT_REF_TO (t
)) != REFERENCE_TYPE
)
14013 error ("%<TYPE_NEXT_REF_TO%> is not %<REFERENCE_TYPE%>");
14014 debug_tree (TYPE_NEXT_REF_TO (t
));
14015 error_found
= true;
14018 else if (INTEGRAL_TYPE_P (t
) || TREE_CODE (t
) == REAL_TYPE
14019 || TREE_CODE (t
) == FIXED_POINT_TYPE
)
14021 /* FIXME: The following check should pass:
14022 useless_type_conversion_p (const_cast <tree> (t),
14023 TREE_TYPE (TYPE_MIN_VALUE (t))
14024 but does not for C sizetypes in LTO. */
14027 /* Check various uses of TYPE_MAXVAL_RAW. */
14028 if (RECORD_OR_UNION_TYPE_P (t
))
14030 if (!TYPE_BINFO (t
))
14032 else if (TREE_CODE (TYPE_BINFO (t
)) != TREE_BINFO
)
14034 error ("%<TYPE_BINFO%> is not %<TREE_BINFO%>");
14035 debug_tree (TYPE_BINFO (t
));
14036 error_found
= true;
14038 else if (TREE_TYPE (TYPE_BINFO (t
)) != TYPE_MAIN_VARIANT (t
))
14040 error ("%<TYPE_BINFO%> type is not %<TYPE_MAIN_VARIANT%>");
14041 debug_tree (TREE_TYPE (TYPE_BINFO (t
)));
14042 error_found
= true;
14045 else if (TREE_CODE (t
) == FUNCTION_TYPE
|| TREE_CODE (t
) == METHOD_TYPE
)
14047 if (TYPE_METHOD_BASETYPE (t
)
14048 && TREE_CODE (TYPE_METHOD_BASETYPE (t
)) != RECORD_TYPE
14049 && TREE_CODE (TYPE_METHOD_BASETYPE (t
)) != UNION_TYPE
)
14051 error ("%<TYPE_METHOD_BASETYPE%> is not record nor union");
14052 debug_tree (TYPE_METHOD_BASETYPE (t
));
14053 error_found
= true;
14056 else if (TREE_CODE (t
) == OFFSET_TYPE
)
14058 if (TYPE_OFFSET_BASETYPE (t
)
14059 && TREE_CODE (TYPE_OFFSET_BASETYPE (t
)) != RECORD_TYPE
14060 && TREE_CODE (TYPE_OFFSET_BASETYPE (t
)) != UNION_TYPE
)
14062 error ("%<TYPE_OFFSET_BASETYPE%> is not record nor union");
14063 debug_tree (TYPE_OFFSET_BASETYPE (t
));
14064 error_found
= true;
14067 else if (INTEGRAL_TYPE_P (t
) || TREE_CODE (t
) == REAL_TYPE
14068 || TREE_CODE (t
) == FIXED_POINT_TYPE
)
14070 /* FIXME: The following check should pass:
14071 useless_type_conversion_p (const_cast <tree> (t),
14072 TREE_TYPE (TYPE_MAX_VALUE (t))
14073 but does not for C sizetypes in LTO. */
14075 else if (TREE_CODE (t
) == ARRAY_TYPE
)
14077 if (TYPE_ARRAY_MAX_SIZE (t
)
14078 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (t
)) != INTEGER_CST
)
14080 error ("%<TYPE_ARRAY_MAX_SIZE%> not %<INTEGER_CST%>");
14081 debug_tree (TYPE_ARRAY_MAX_SIZE (t
));
14082 error_found
= true;
14085 else if (TYPE_MAX_VALUE_RAW (t
))
14087 error ("%<TYPE_MAX_VALUE_RAW%> non-NULL");
14088 debug_tree (TYPE_MAX_VALUE_RAW (t
));
14089 error_found
= true;
14092 if (TYPE_LANG_SLOT_1 (t
) && in_lto_p
)
14094 error ("%<TYPE_LANG_SLOT_1 (binfo)%> field is non-NULL");
14095 debug_tree (TYPE_LANG_SLOT_1 (t
));
14096 error_found
= true;
14099 /* Check various uses of TYPE_VALUES_RAW. */
14100 if (TREE_CODE (t
) == ENUMERAL_TYPE
)
14101 for (tree l
= TYPE_VALUES (t
); l
; l
= TREE_CHAIN (l
))
14103 tree value
= TREE_VALUE (l
);
14104 tree name
= TREE_PURPOSE (l
);
14106 /* C FE porduce INTEGER_CST of INTEGER_TYPE, while C++ FE uses
14107 CONST_DECL of ENUMERAL TYPE. */
14108 if (TREE_CODE (value
) != INTEGER_CST
&& TREE_CODE (value
) != CONST_DECL
)
14110 error ("enum value is not %<CONST_DECL%> or %<INTEGER_CST%>");
14111 debug_tree (value
);
14113 error_found
= true;
14115 if (TREE_CODE (TREE_TYPE (value
)) != INTEGER_TYPE
14116 && TREE_CODE (TREE_TYPE (value
)) != BOOLEAN_TYPE
14117 && !useless_type_conversion_p (const_cast <tree
> (t
), TREE_TYPE (value
)))
14119 error ("enum value type is not %<INTEGER_TYPE%> nor convertible "
14121 debug_tree (value
);
14123 error_found
= true;
14125 if (TREE_CODE (name
) != IDENTIFIER_NODE
)
14127 error ("enum value name is not %<IDENTIFIER_NODE%>");
14128 debug_tree (value
);
14130 error_found
= true;
14133 else if (TREE_CODE (t
) == ARRAY_TYPE
)
14135 if (TYPE_DOMAIN (t
) && TREE_CODE (TYPE_DOMAIN (t
)) != INTEGER_TYPE
)
14137 error ("array %<TYPE_DOMAIN%> is not integer type");
14138 debug_tree (TYPE_DOMAIN (t
));
14139 error_found
= true;
14142 else if (RECORD_OR_UNION_TYPE_P (t
))
14144 if (TYPE_FIELDS (t
) && !COMPLETE_TYPE_P (t
) && in_lto_p
)
14146 error ("%<TYPE_FIELDS%> defined in incomplete type");
14147 error_found
= true;
14149 for (tree fld
= TYPE_FIELDS (t
); fld
; fld
= TREE_CHAIN (fld
))
14151 /* TODO: verify properties of decls. */
14152 if (TREE_CODE (fld
) == FIELD_DECL
)
14154 else if (TREE_CODE (fld
) == TYPE_DECL
)
14156 else if (TREE_CODE (fld
) == CONST_DECL
)
14158 else if (VAR_P (fld
))
14160 else if (TREE_CODE (fld
) == TEMPLATE_DECL
)
14162 else if (TREE_CODE (fld
) == USING_DECL
)
14164 else if (TREE_CODE (fld
) == FUNCTION_DECL
)
14168 error ("wrong tree in %<TYPE_FIELDS%> list");
14170 error_found
= true;
14174 else if (TREE_CODE (t
) == INTEGER_TYPE
14175 || TREE_CODE (t
) == BOOLEAN_TYPE
14176 || TREE_CODE (t
) == OFFSET_TYPE
14177 || TREE_CODE (t
) == REFERENCE_TYPE
14178 || TREE_CODE (t
) == NULLPTR_TYPE
14179 || TREE_CODE (t
) == POINTER_TYPE
)
14181 if (TYPE_CACHED_VALUES_P (t
) != (TYPE_CACHED_VALUES (t
) != NULL
))
14183 error ("%<TYPE_CACHED_VALUES_P%> is %i while %<TYPE_CACHED_VALUES%> "
14185 TYPE_CACHED_VALUES_P (t
), (void *)TYPE_CACHED_VALUES (t
));
14186 error_found
= true;
14188 else if (TYPE_CACHED_VALUES_P (t
) && TREE_CODE (TYPE_CACHED_VALUES (t
)) != TREE_VEC
)
14190 error ("%<TYPE_CACHED_VALUES%> is not %<TREE_VEC%>");
14191 debug_tree (TYPE_CACHED_VALUES (t
));
14192 error_found
= true;
14194 /* Verify just enough of cache to ensure that no one copied it to new type.
14195 All copying should go by copy_node that should clear it. */
14196 else if (TYPE_CACHED_VALUES_P (t
))
14199 for (i
= 0; i
< TREE_VEC_LENGTH (TYPE_CACHED_VALUES (t
)); i
++)
14200 if (TREE_VEC_ELT (TYPE_CACHED_VALUES (t
), i
)
14201 && TREE_TYPE (TREE_VEC_ELT (TYPE_CACHED_VALUES (t
), i
)) != t
)
14203 error ("wrong %<TYPE_CACHED_VALUES%> entry");
14204 debug_tree (TREE_VEC_ELT (TYPE_CACHED_VALUES (t
), i
));
14205 error_found
= true;
14210 else if (TREE_CODE (t
) == FUNCTION_TYPE
|| TREE_CODE (t
) == METHOD_TYPE
)
14211 for (tree l
= TYPE_ARG_TYPES (t
); l
; l
= TREE_CHAIN (l
))
14213 /* C++ FE uses TREE_PURPOSE to store initial values. */
14214 if (TREE_PURPOSE (l
) && in_lto_p
)
14216 error ("%<TREE_PURPOSE%> is non-NULL in %<TYPE_ARG_TYPES%> list");
14218 error_found
= true;
14220 if (!TYPE_P (TREE_VALUE (l
)))
14222 error ("wrong entry in %<TYPE_ARG_TYPES%> list");
14224 error_found
= true;
14227 else if (!is_lang_specific (t
) && TYPE_VALUES_RAW (t
))
14229 error ("%<TYPE_VALUES_RAW%> field is non-NULL");
14230 debug_tree (TYPE_VALUES_RAW (t
));
14231 error_found
= true;
14233 if (TREE_CODE (t
) != INTEGER_TYPE
14234 && TREE_CODE (t
) != BOOLEAN_TYPE
14235 && TREE_CODE (t
) != OFFSET_TYPE
14236 && TREE_CODE (t
) != REFERENCE_TYPE
14237 && TREE_CODE (t
) != NULLPTR_TYPE
14238 && TREE_CODE (t
) != POINTER_TYPE
14239 && TYPE_CACHED_VALUES_P (t
))
14241 error ("%<TYPE_CACHED_VALUES_P%> is set while it should not be");
14242 error_found
= true;
14245 /* ipa-devirt makes an assumption that TYPE_METHOD_BASETYPE is always
14246 TYPE_MAIN_VARIANT and it would be odd to add methods only to variatns
14248 if (TREE_CODE (t
) == METHOD_TYPE
14249 && TYPE_MAIN_VARIANT (TYPE_METHOD_BASETYPE (t
)) != TYPE_METHOD_BASETYPE (t
))
14251 error ("%<TYPE_METHOD_BASETYPE%> is not main variant");
14252 error_found
= true;
14257 debug_tree (const_cast <tree
> (t
));
14258 internal_error ("%qs failed", __func__
);
14263 /* Return 1 if ARG interpreted as signed in its precision is known to be
14264 always positive or 2 if ARG is known to be always negative, or 3 if
14265 ARG may be positive or negative. */
14268 get_range_pos_neg (tree arg
)
14270 if (arg
== error_mark_node
)
14273 int prec
= TYPE_PRECISION (TREE_TYPE (arg
));
14275 if (TREE_CODE (arg
) == INTEGER_CST
)
14277 wide_int w
= wi::sext (wi::to_wide (arg
), prec
);
14283 while (CONVERT_EXPR_P (arg
)
14284 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg
, 0)))
14285 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg
, 0))) <= prec
)
14287 arg
= TREE_OPERAND (arg
, 0);
14288 /* Narrower value zero extended into wider type
14289 will always result in positive values. */
14290 if (TYPE_UNSIGNED (TREE_TYPE (arg
))
14291 && TYPE_PRECISION (TREE_TYPE (arg
)) < prec
)
14293 prec
= TYPE_PRECISION (TREE_TYPE (arg
));
14298 if (TREE_CODE (arg
) != SSA_NAME
)
14301 while (!get_global_range_query ()->range_of_expr (r
, arg
) || r
.kind () != VR_RANGE
)
14303 gimple
*g
= SSA_NAME_DEF_STMT (arg
);
14304 if (is_gimple_assign (g
)
14305 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (g
)))
14307 tree t
= gimple_assign_rhs1 (g
);
14308 if (INTEGRAL_TYPE_P (TREE_TYPE (t
))
14309 && TYPE_PRECISION (TREE_TYPE (t
)) <= prec
)
14311 if (TYPE_UNSIGNED (TREE_TYPE (t
))
14312 && TYPE_PRECISION (TREE_TYPE (t
)) < prec
)
14314 prec
= TYPE_PRECISION (TREE_TYPE (t
));
14323 if (TYPE_UNSIGNED (TREE_TYPE (arg
)))
14325 /* For unsigned values, the "positive" range comes
14326 below the "negative" range. */
14327 if (!wi::neg_p (wi::sext (r
.upper_bound (), prec
), SIGNED
))
14329 if (wi::neg_p (wi::sext (r
.lower_bound (), prec
), SIGNED
))
14334 if (!wi::neg_p (wi::sext (r
.lower_bound (), prec
), SIGNED
))
14336 if (wi::neg_p (wi::sext (r
.upper_bound (), prec
), SIGNED
))
14345 /* Return true if ARG is marked with the nonnull attribute in the
14346 current function signature. */
14349 nonnull_arg_p (const_tree arg
)
14351 tree t
, attrs
, fntype
;
14352 unsigned HOST_WIDE_INT arg_num
;
14354 gcc_assert (TREE_CODE (arg
) == PARM_DECL
14355 && (POINTER_TYPE_P (TREE_TYPE (arg
))
14356 || TREE_CODE (TREE_TYPE (arg
)) == OFFSET_TYPE
));
14358 /* The static chain decl is always non null. */
14359 if (arg
== cfun
->static_chain_decl
)
14362 /* THIS argument of method is always non-NULL. */
14363 if (TREE_CODE (TREE_TYPE (cfun
->decl
)) == METHOD_TYPE
14364 && arg
== DECL_ARGUMENTS (cfun
->decl
)
14365 && flag_delete_null_pointer_checks
)
14368 /* Values passed by reference are always non-NULL. */
14369 if (TREE_CODE (TREE_TYPE (arg
)) == REFERENCE_TYPE
14370 && flag_delete_null_pointer_checks
)
14373 fntype
= TREE_TYPE (cfun
->decl
);
14374 for (attrs
= TYPE_ATTRIBUTES (fntype
); attrs
; attrs
= TREE_CHAIN (attrs
))
14376 attrs
= lookup_attribute ("nonnull", attrs
);
14378 /* If "nonnull" wasn't specified, we know nothing about the argument. */
14379 if (attrs
== NULL_TREE
)
14382 /* If "nonnull" applies to all the arguments, then ARG is non-null. */
14383 if (TREE_VALUE (attrs
) == NULL_TREE
)
14386 /* Get the position number for ARG in the function signature. */
14387 for (arg_num
= 1, t
= DECL_ARGUMENTS (cfun
->decl
);
14389 t
= DECL_CHAIN (t
), arg_num
++)
14395 gcc_assert (t
== arg
);
14397 /* Now see if ARG_NUM is mentioned in the nonnull list. */
14398 for (t
= TREE_VALUE (attrs
); t
; t
= TREE_CHAIN (t
))
14400 if (compare_tree_int (TREE_VALUE (t
), arg_num
) == 0)
14408 /* Combine LOC and BLOCK to a combined adhoc loc, retaining any range
14412 set_block (location_t loc
, tree block
)
14414 location_t pure_loc
= get_pure_location (loc
);
14415 source_range src_range
= get_range_from_loc (line_table
, loc
);
14416 unsigned discriminator
= get_discriminator_from_loc (line_table
, loc
);
14417 return COMBINE_LOCATION_DATA (line_table
, pure_loc
, src_range
, block
, discriminator
);
14421 set_source_range (tree expr
, location_t start
, location_t finish
)
14423 source_range src_range
;
14424 src_range
.m_start
= start
;
14425 src_range
.m_finish
= finish
;
14426 return set_source_range (expr
, src_range
);
14430 set_source_range (tree expr
, source_range src_range
)
14432 if (!EXPR_P (expr
))
14433 return UNKNOWN_LOCATION
;
14435 location_t expr_location
= EXPR_LOCATION (expr
);
14436 location_t pure_loc
= get_pure_location (expr_location
);
14437 unsigned discriminator
= get_discriminator_from_loc (expr_location
);
14438 location_t adhoc
= COMBINE_LOCATION_DATA (line_table
,
14443 SET_EXPR_LOCATION (expr
, adhoc
);
14447 /* Return EXPR, potentially wrapped with a node expression LOC,
14448 if !CAN_HAVE_LOCATION_P (expr).
14450 NON_LVALUE_EXPR is used for wrapping constants, apart from STRING_CST.
14451 VIEW_CONVERT_EXPR is used for wrapping non-constants and STRING_CST.
14453 Wrapper nodes can be identified using location_wrapper_p. */
14456 maybe_wrap_with_location (tree expr
, location_t loc
)
14460 if (loc
== UNKNOWN_LOCATION
)
14462 if (CAN_HAVE_LOCATION_P (expr
))
14464 /* We should only be adding wrappers for constants and for decls,
14465 or for some exceptional tree nodes (e.g. BASELINK in the C++ FE). */
14466 gcc_assert (CONSTANT_CLASS_P (expr
)
14468 || EXCEPTIONAL_CLASS_P (expr
));
14470 /* For now, don't add wrappers to exceptional tree nodes, to minimize
14471 any impact of the wrapper nodes. */
14472 if (EXCEPTIONAL_CLASS_P (expr
) || error_operand_p (expr
))
14475 /* Compiler-generated temporary variables don't need a wrapper. */
14476 if (DECL_P (expr
) && DECL_ARTIFICIAL (expr
) && DECL_IGNORED_P (expr
))
14479 /* If any auto_suppress_location_wrappers are active, don't create
14481 if (suppress_location_wrappers
> 0)
14485 = (((CONSTANT_CLASS_P (expr
) && TREE_CODE (expr
) != STRING_CST
)
14486 || (TREE_CODE (expr
) == CONST_DECL
&& !TREE_STATIC (expr
)))
14487 ? NON_LVALUE_EXPR
: VIEW_CONVERT_EXPR
);
14488 tree wrapper
= build1_loc (loc
, code
, TREE_TYPE (expr
), expr
);
14489 /* Mark this node as being a wrapper. */
14490 EXPR_LOCATION_WRAPPER_P (wrapper
) = 1;
14494 int suppress_location_wrappers
;
14496 /* Return the name of combined function FN, for debugging purposes. */
14499 combined_fn_name (combined_fn fn
)
14501 if (builtin_fn_p (fn
))
14503 tree fndecl
= builtin_decl_explicit (as_builtin_fn (fn
));
14504 return IDENTIFIER_POINTER (DECL_NAME (fndecl
));
14507 return internal_fn_name (as_internal_fn (fn
));
14510 /* Return a bitmap with a bit set corresponding to each argument in
14511 a function call type FNTYPE declared with attribute nonnull,
14512 or null if none of the function's argument are nonnull. The caller
14513 must free the bitmap. */
14516 get_nonnull_args (const_tree fntype
)
14518 if (fntype
== NULL_TREE
)
14521 bitmap argmap
= NULL
;
14522 if (TREE_CODE (fntype
) == METHOD_TYPE
)
14524 /* The this pointer in C++ non-static member functions is
14525 implicitly nonnull whether or not it's declared as such. */
14526 argmap
= BITMAP_ALLOC (NULL
);
14527 bitmap_set_bit (argmap
, 0);
14530 tree attrs
= TYPE_ATTRIBUTES (fntype
);
14534 /* A function declaration can specify multiple attribute nonnull,
14535 each with zero or more arguments. The loop below creates a bitmap
14536 representing a union of all the arguments. An empty (but non-null)
14537 bitmap means that all arguments have been declaraed nonnull. */
14538 for ( ; attrs
; attrs
= TREE_CHAIN (attrs
))
14540 attrs
= lookup_attribute ("nonnull", attrs
);
14545 argmap
= BITMAP_ALLOC (NULL
);
14547 if (!TREE_VALUE (attrs
))
14549 /* Clear the bitmap in case a previous attribute nonnull
14550 set it and this one overrides it for all arguments. */
14551 bitmap_clear (argmap
);
14555 /* Iterate over the indices of the format arguments declared nonnull
14556 and set a bit for each. */
14557 for (tree idx
= TREE_VALUE (attrs
); idx
; idx
= TREE_CHAIN (idx
))
14559 unsigned int val
= TREE_INT_CST_LOW (TREE_VALUE (idx
)) - 1;
14560 bitmap_set_bit (argmap
, val
);
14567 /* Returns true if TYPE is a type where it and all of its subobjects
14568 (recursively) are of structure, union, or array type. */
14571 is_empty_type (const_tree type
)
14573 if (RECORD_OR_UNION_TYPE_P (type
))
14575 for (tree field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
14576 if (TREE_CODE (field
) == FIELD_DECL
14577 && !DECL_PADDING_P (field
)
14578 && !is_empty_type (TREE_TYPE (field
)))
14582 else if (TREE_CODE (type
) == ARRAY_TYPE
)
14583 return (integer_minus_onep (array_type_nelts (type
))
14584 || TYPE_DOMAIN (type
) == NULL_TREE
14585 || is_empty_type (TREE_TYPE (type
)));
14589 /* Implement TARGET_EMPTY_RECORD_P. Return true if TYPE is an empty type
14590 that shouldn't be passed via stack. */
14593 default_is_empty_record (const_tree type
)
14595 if (!abi_version_at_least (12))
14598 if (type
== error_mark_node
)
14601 if (TREE_ADDRESSABLE (type
))
14604 return is_empty_type (TYPE_MAIN_VARIANT (type
));
14607 /* Determine whether TYPE is a structure with a flexible array member,
14608 or a union containing such a structure (possibly recursively). */
14611 flexible_array_type_p (const_tree type
)
14614 switch (TREE_CODE (type
))
14618 for (x
= TYPE_FIELDS (type
); x
!= NULL_TREE
; x
= DECL_CHAIN (x
))
14619 if (TREE_CODE (x
) == FIELD_DECL
)
14621 if (last
== NULL_TREE
)
14623 if (TREE_CODE (TREE_TYPE (last
)) == ARRAY_TYPE
14624 && TYPE_SIZE (TREE_TYPE (last
)) == NULL_TREE
14625 && TYPE_DOMAIN (TREE_TYPE (last
)) != NULL_TREE
14626 && TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (last
))) == NULL_TREE
)
14630 for (x
= TYPE_FIELDS (type
); x
!= NULL_TREE
; x
= DECL_CHAIN (x
))
14632 if (TREE_CODE (x
) == FIELD_DECL
14633 && flexible_array_type_p (TREE_TYPE (x
)))
14642 /* Like int_size_in_bytes, but handle empty records specially. */
14645 arg_int_size_in_bytes (const_tree type
)
14647 return TYPE_EMPTY_P (type
) ? 0 : int_size_in_bytes (type
);
14650 /* Like size_in_bytes, but handle empty records specially. */
14653 arg_size_in_bytes (const_tree type
)
14655 return TYPE_EMPTY_P (type
) ? size_zero_node
: size_in_bytes (type
);
14658 /* Return true if an expression with CODE has to have the same result type as
14659 its first operand. */
14662 expr_type_first_operand_type_p (tree_code code
)
14675 case TRUNC_DIV_EXPR
:
14676 case CEIL_DIV_EXPR
:
14677 case FLOOR_DIV_EXPR
:
14678 case ROUND_DIV_EXPR
:
14679 case TRUNC_MOD_EXPR
:
14680 case CEIL_MOD_EXPR
:
14681 case FLOOR_MOD_EXPR
:
14682 case ROUND_MOD_EXPR
:
14684 case EXACT_DIV_EXPR
:
14702 /* Return a typenode for the "standard" C type with a given name. */
14704 get_typenode_from_name (const char *name
)
14706 if (name
== NULL
|| *name
== '\0')
14709 if (strcmp (name
, "char") == 0)
14710 return char_type_node
;
14711 if (strcmp (name
, "unsigned char") == 0)
14712 return unsigned_char_type_node
;
14713 if (strcmp (name
, "signed char") == 0)
14714 return signed_char_type_node
;
14716 if (strcmp (name
, "short int") == 0)
14717 return short_integer_type_node
;
14718 if (strcmp (name
, "short unsigned int") == 0)
14719 return short_unsigned_type_node
;
14721 if (strcmp (name
, "int") == 0)
14722 return integer_type_node
;
14723 if (strcmp (name
, "unsigned int") == 0)
14724 return unsigned_type_node
;
14726 if (strcmp (name
, "long int") == 0)
14727 return long_integer_type_node
;
14728 if (strcmp (name
, "long unsigned int") == 0)
14729 return long_unsigned_type_node
;
14731 if (strcmp (name
, "long long int") == 0)
14732 return long_long_integer_type_node
;
14733 if (strcmp (name
, "long long unsigned int") == 0)
14734 return long_long_unsigned_type_node
;
14736 gcc_unreachable ();
14739 /* List of pointer types used to declare builtins before we have seen their
14742 Keep the size up to date in tree.h ! */
14743 const builtin_structptr_type builtin_structptr_types
[6] =
14745 { fileptr_type_node
, ptr_type_node
, "FILE" },
14746 { const_tm_ptr_type_node
, const_ptr_type_node
, "tm" },
14747 { fenv_t_ptr_type_node
, ptr_type_node
, "fenv_t" },
14748 { const_fenv_t_ptr_type_node
, const_ptr_type_node
, "fenv_t" },
14749 { fexcept_t_ptr_type_node
, ptr_type_node
, "fexcept_t" },
14750 { const_fexcept_t_ptr_type_node
, const_ptr_type_node
, "fexcept_t" }
14753 /* Return the maximum object size. */
14756 max_object_size (void)
14758 /* To do: Make this a configurable parameter. */
14759 return TYPE_MAX_VALUE (ptrdiff_type_node
);
14762 /* A wrapper around TARGET_VERIFY_TYPE_CONTEXT that makes the silent_p
14763 parameter default to false and that weeds out error_mark_node. */
14766 verify_type_context (location_t loc
, type_context_kind context
,
14767 const_tree type
, bool silent_p
)
14769 if (type
== error_mark_node
)
14772 gcc_assert (TYPE_P (type
));
14773 return (!targetm
.verify_type_context
14774 || targetm
.verify_type_context (loc
, context
, type
, silent_p
));
14777 /* Return true if NEW_ASM and DELETE_ASM name a valid pair of new and
14778 delete operators. Return false if they may or may not name such
14779 a pair and, when nonnull, set *PCERTAIN to true if they certainly
14783 valid_new_delete_pair_p (tree new_asm
, tree delete_asm
,
14784 bool *pcertain
/* = NULL */)
14788 pcertain
= &certain
;
14790 const char *new_name
= IDENTIFIER_POINTER (new_asm
);
14791 const char *delete_name
= IDENTIFIER_POINTER (delete_asm
);
14792 unsigned int new_len
= IDENTIFIER_LENGTH (new_asm
);
14793 unsigned int delete_len
= IDENTIFIER_LENGTH (delete_asm
);
14795 /* The following failures are due to invalid names so they're not
14796 considered certain mismatches. */
14799 if (new_len
< 5 || delete_len
< 6)
14801 if (new_name
[0] == '_')
14802 ++new_name
, --new_len
;
14803 if (new_name
[0] == '_')
14804 ++new_name
, --new_len
;
14805 if (delete_name
[0] == '_')
14806 ++delete_name
, --delete_len
;
14807 if (delete_name
[0] == '_')
14808 ++delete_name
, --delete_len
;
14809 if (new_len
< 4 || delete_len
< 5)
14812 /* The following failures are due to names of user-defined operators
14813 so they're also not considered certain mismatches. */
14815 /* *_len is now just the length after initial underscores. */
14816 if (new_name
[0] != 'Z' || new_name
[1] != 'n')
14818 if (delete_name
[0] != 'Z' || delete_name
[1] != 'd')
14821 /* The following failures are certain mismatches. */
14824 /* _Znw must match _Zdl, _Zna must match _Zda. */
14825 if ((new_name
[2] != 'w' || delete_name
[2] != 'l')
14826 && (new_name
[2] != 'a' || delete_name
[2] != 'a'))
14828 /* 'j', 'm' and 'y' correspond to size_t. */
14829 if (new_name
[3] != 'j' && new_name
[3] != 'm' && new_name
[3] != 'y')
14831 if (delete_name
[3] != 'P' || delete_name
[4] != 'v')
14834 || (new_len
== 18 && !memcmp (new_name
+ 4, "RKSt9nothrow_t", 14)))
14836 /* _ZnXY or _ZnXYRKSt9nothrow_t matches
14837 _ZdXPv, _ZdXPvY and _ZdXPvRKSt9nothrow_t. */
14838 if (delete_len
== 5)
14840 if (delete_len
== 6 && delete_name
[5] == new_name
[3])
14842 if (delete_len
== 19 && !memcmp (delete_name
+ 5, "RKSt9nothrow_t", 14))
14845 else if ((new_len
== 19 && !memcmp (new_name
+ 4, "St11align_val_t", 15))
14847 && !memcmp (new_name
+ 4, "St11align_val_tRKSt9nothrow_t", 29)))
14849 /* _ZnXYSt11align_val_t or _ZnXYSt11align_val_tRKSt9nothrow_t matches
14850 _ZdXPvSt11align_val_t or _ZdXPvYSt11align_val_t or or
14851 _ZdXPvSt11align_val_tRKSt9nothrow_t. */
14852 if (delete_len
== 20 && !memcmp (delete_name
+ 5, "St11align_val_t", 15))
14854 if (delete_len
== 21
14855 && delete_name
[5] == new_name
[3]
14856 && !memcmp (delete_name
+ 6, "St11align_val_t", 15))
14858 if (delete_len
== 34
14859 && !memcmp (delete_name
+ 5, "St11align_val_tRKSt9nothrow_t", 29))
14863 /* The negative result is conservative. */
14868 /* Return the zero-based number corresponding to the argument being
14869 deallocated if FNDECL is a deallocation function or an out-of-bounds
14870 value if it isn't. */
14873 fndecl_dealloc_argno (tree fndecl
)
14875 /* A call to operator delete isn't recognized as one to a built-in. */
14876 if (DECL_IS_OPERATOR_DELETE_P (fndecl
))
14878 if (DECL_IS_REPLACEABLE_OPERATOR (fndecl
))
14881 /* Avoid placement delete that's not been inlined. */
14882 tree fname
= DECL_ASSEMBLER_NAME (fndecl
);
14883 if (id_equal (fname
, "_ZdlPvS_") // ordinary form
14884 || id_equal (fname
, "_ZdaPvS_")) // array form
14889 /* TODO: Handle user-defined functions with attribute malloc? Handle
14890 known non-built-ins like fopen? */
14891 if (fndecl_built_in_p (fndecl
, BUILT_IN_NORMAL
))
14893 switch (DECL_FUNCTION_CODE (fndecl
))
14895 case BUILT_IN_FREE
:
14896 case BUILT_IN_REALLOC
:
14904 tree attrs
= DECL_ATTRIBUTES (fndecl
);
14908 for (tree atfree
= attrs
;
14909 (atfree
= lookup_attribute ("*dealloc", atfree
));
14910 atfree
= TREE_CHAIN (atfree
))
14912 tree alloc
= TREE_VALUE (atfree
);
14916 tree pos
= TREE_CHAIN (alloc
);
14920 pos
= TREE_VALUE (pos
);
14921 return TREE_INT_CST_LOW (pos
) - 1;
14927 /* If EXPR refers to a character array or pointer declared attribute
14928 nonstring, return a decl for that array or pointer and set *REF
14929 to the referenced enclosing object or pointer. Otherwise return
14933 get_attr_nonstring_decl (tree expr
, tree
*ref
)
14936 tree var
= NULL_TREE
;
14937 if (TREE_CODE (decl
) == SSA_NAME
)
14939 gimple
*def
= SSA_NAME_DEF_STMT (decl
);
14941 if (is_gimple_assign (def
))
14943 tree_code code
= gimple_assign_rhs_code (def
);
14944 if (code
== ADDR_EXPR
14945 || code
== COMPONENT_REF
14946 || code
== VAR_DECL
)
14947 decl
= gimple_assign_rhs1 (def
);
14950 var
= SSA_NAME_VAR (decl
);
14953 if (TREE_CODE (decl
) == ADDR_EXPR
)
14954 decl
= TREE_OPERAND (decl
, 0);
14956 /* To simplify calling code, store the referenced DECL regardless of
14957 the attribute determined below, but avoid storing the SSA_NAME_VAR
14958 obtained above (it's not useful for dataflow purposes). */
14962 /* Use the SSA_NAME_VAR that was determined above to see if it's
14963 declared nonstring. Otherwise drill down into the referenced
14967 else if (TREE_CODE (decl
) == ARRAY_REF
)
14968 decl
= TREE_OPERAND (decl
, 0);
14969 else if (TREE_CODE (decl
) == COMPONENT_REF
)
14970 decl
= TREE_OPERAND (decl
, 1);
14971 else if (TREE_CODE (decl
) == MEM_REF
)
14972 return get_attr_nonstring_decl (TREE_OPERAND (decl
, 0), ref
);
14975 && lookup_attribute ("nonstring", DECL_ATTRIBUTES (decl
)))
14981 /* Return length of attribute names string,
14982 if arglist chain > 1, -1 otherwise. */
14985 get_target_clone_attr_len (tree arglist
)
14988 int str_len_sum
= 0;
14991 for (arg
= arglist
; arg
; arg
= TREE_CHAIN (arg
))
14993 const char *str
= TREE_STRING_POINTER (TREE_VALUE (arg
));
14994 size_t len
= strlen (str
);
14995 str_len_sum
+= len
+ 1;
14996 for (const char *p
= strchr (str
, ','); p
; p
= strchr (p
+ 1, ','))
15002 return str_len_sum
;
15006 tree_cc_finalize (void)
15008 clear_nonstandard_integer_type_cache ();
15013 namespace selftest
{
15015 /* Selftests for tree. */
15017 /* Verify that integer constants are sane. */
15020 test_integer_constants ()
15022 ASSERT_TRUE (integer_type_node
!= NULL
);
15023 ASSERT_TRUE (build_int_cst (integer_type_node
, 0) != NULL
);
15025 tree type
= integer_type_node
;
15027 tree zero
= build_zero_cst (type
);
15028 ASSERT_EQ (INTEGER_CST
, TREE_CODE (zero
));
15029 ASSERT_EQ (type
, TREE_TYPE (zero
));
15031 tree one
= build_int_cst (type
, 1);
15032 ASSERT_EQ (INTEGER_CST
, TREE_CODE (one
));
15033 ASSERT_EQ (type
, TREE_TYPE (zero
));
15036 /* Verify identifiers. */
15039 test_identifiers ()
15041 tree identifier
= get_identifier ("foo");
15042 ASSERT_EQ (3, IDENTIFIER_LENGTH (identifier
));
15043 ASSERT_STREQ ("foo", IDENTIFIER_POINTER (identifier
));
15046 /* Verify LABEL_DECL. */
15051 tree identifier
= get_identifier ("err");
15052 tree label_decl
= build_decl (UNKNOWN_LOCATION
, LABEL_DECL
,
15053 identifier
, void_type_node
);
15054 ASSERT_EQ (-1, LABEL_DECL_UID (label_decl
));
15055 ASSERT_FALSE (FORCED_LABEL (label_decl
));
15058 /* Return a new VECTOR_CST node whose type is TYPE and whose values
15059 are given by VALS. */
15062 build_vector (tree type
, const vec
<tree
> &vals MEM_STAT_DECL
)
15064 gcc_assert (known_eq (vals
.length (), TYPE_VECTOR_SUBPARTS (type
)));
15065 tree_vector_builder
builder (type
, vals
.length (), 1);
15066 builder
.splice (vals
);
15067 return builder
.build ();
15070 /* Check that VECTOR_CST ACTUAL contains the elements in EXPECTED. */
15073 check_vector_cst (const vec
<tree
> &expected
, tree actual
)
15075 ASSERT_KNOWN_EQ (expected
.length (),
15076 TYPE_VECTOR_SUBPARTS (TREE_TYPE (actual
)));
15077 for (unsigned int i
= 0; i
< expected
.length (); ++i
)
15078 ASSERT_EQ (wi::to_wide (expected
[i
]),
15079 wi::to_wide (vector_cst_elt (actual
, i
)));
15082 /* Check that VECTOR_CST ACTUAL contains NPATTERNS duplicated elements,
15083 and that its elements match EXPECTED. */
15086 check_vector_cst_duplicate (const vec
<tree
> &expected
, tree actual
,
15087 unsigned int npatterns
)
15089 ASSERT_EQ (npatterns
, VECTOR_CST_NPATTERNS (actual
));
15090 ASSERT_EQ (1, VECTOR_CST_NELTS_PER_PATTERN (actual
));
15091 ASSERT_EQ (npatterns
, vector_cst_encoded_nelts (actual
));
15092 ASSERT_TRUE (VECTOR_CST_DUPLICATE_P (actual
));
15093 ASSERT_FALSE (VECTOR_CST_STEPPED_P (actual
));
15094 check_vector_cst (expected
, actual
);
15097 /* Check that VECTOR_CST ACTUAL contains NPATTERNS foreground elements
15098 and NPATTERNS background elements, and that its elements match
15102 check_vector_cst_fill (const vec
<tree
> &expected
, tree actual
,
15103 unsigned int npatterns
)
15105 ASSERT_EQ (npatterns
, VECTOR_CST_NPATTERNS (actual
));
15106 ASSERT_EQ (2, VECTOR_CST_NELTS_PER_PATTERN (actual
));
15107 ASSERT_EQ (2 * npatterns
, vector_cst_encoded_nelts (actual
));
15108 ASSERT_FALSE (VECTOR_CST_DUPLICATE_P (actual
));
15109 ASSERT_FALSE (VECTOR_CST_STEPPED_P (actual
));
15110 check_vector_cst (expected
, actual
);
15113 /* Check that VECTOR_CST ACTUAL contains NPATTERNS stepped patterns,
15114 and that its elements match EXPECTED. */
15117 check_vector_cst_stepped (const vec
<tree
> &expected
, tree actual
,
15118 unsigned int npatterns
)
15120 ASSERT_EQ (npatterns
, VECTOR_CST_NPATTERNS (actual
));
15121 ASSERT_EQ (3, VECTOR_CST_NELTS_PER_PATTERN (actual
));
15122 ASSERT_EQ (3 * npatterns
, vector_cst_encoded_nelts (actual
));
15123 ASSERT_FALSE (VECTOR_CST_DUPLICATE_P (actual
));
15124 ASSERT_TRUE (VECTOR_CST_STEPPED_P (actual
));
15125 check_vector_cst (expected
, actual
);
15128 /* Test the creation of VECTOR_CSTs. */
15131 test_vector_cst_patterns (ALONE_CXX_MEM_STAT_INFO
)
15133 auto_vec
<tree
, 8> elements (8);
15134 elements
.quick_grow (8);
15135 tree element_type
= build_nonstandard_integer_type (16, true);
15136 tree vector_type
= build_vector_type (element_type
, 8);
15138 /* Test a simple linear series with a base of 0 and a step of 1:
15139 { 0, 1, 2, 3, 4, 5, 6, 7 }. */
15140 for (unsigned int i
= 0; i
< 8; ++i
)
15141 elements
[i
] = build_int_cst (element_type
, i
);
15142 tree vector
= build_vector (vector_type
, elements PASS_MEM_STAT
);
15143 check_vector_cst_stepped (elements
, vector
, 1);
15145 /* Try the same with the first element replaced by 100:
15146 { 100, 1, 2, 3, 4, 5, 6, 7 }. */
15147 elements
[0] = build_int_cst (element_type
, 100);
15148 vector
= build_vector (vector_type
, elements PASS_MEM_STAT
);
15149 check_vector_cst_stepped (elements
, vector
, 1);
15151 /* Try a series that wraps around.
15152 { 100, 65531, 65532, 65533, 65534, 65535, 0, 1 }. */
15153 for (unsigned int i
= 1; i
< 8; ++i
)
15154 elements
[i
] = build_int_cst (element_type
, (65530 + i
) & 0xffff);
15155 vector
= build_vector (vector_type
, elements PASS_MEM_STAT
);
15156 check_vector_cst_stepped (elements
, vector
, 1);
15158 /* Try a downward series:
15159 { 100, 79, 78, 77, 76, 75, 75, 73 }. */
15160 for (unsigned int i
= 1; i
< 8; ++i
)
15161 elements
[i
] = build_int_cst (element_type
, 80 - i
);
15162 vector
= build_vector (vector_type
, elements PASS_MEM_STAT
);
15163 check_vector_cst_stepped (elements
, vector
, 1);
15165 /* Try two interleaved series with different bases and steps:
15166 { 100, 53, 66, 206, 62, 212, 58, 218 }. */
15167 elements
[1] = build_int_cst (element_type
, 53);
15168 for (unsigned int i
= 2; i
< 8; i
+= 2)
15170 elements
[i
] = build_int_cst (element_type
, 70 - i
* 2);
15171 elements
[i
+ 1] = build_int_cst (element_type
, 200 + i
* 3);
15173 vector
= build_vector (vector_type
, elements PASS_MEM_STAT
);
15174 check_vector_cst_stepped (elements
, vector
, 2);
15176 /* Try a duplicated value:
15177 { 100, 100, 100, 100, 100, 100, 100, 100 }. */
15178 for (unsigned int i
= 1; i
< 8; ++i
)
15179 elements
[i
] = elements
[0];
15180 vector
= build_vector (vector_type
, elements PASS_MEM_STAT
);
15181 check_vector_cst_duplicate (elements
, vector
, 1);
15183 /* Try an interleaved duplicated value:
15184 { 100, 55, 100, 55, 100, 55, 100, 55 }. */
15185 elements
[1] = build_int_cst (element_type
, 55);
15186 for (unsigned int i
= 2; i
< 8; ++i
)
15187 elements
[i
] = elements
[i
- 2];
15188 vector
= build_vector (vector_type
, elements PASS_MEM_STAT
);
15189 check_vector_cst_duplicate (elements
, vector
, 2);
15191 /* Try a duplicated value with 2 exceptions
15192 { 41, 97, 100, 55, 100, 55, 100, 55 }. */
15193 elements
[0] = build_int_cst (element_type
, 41);
15194 elements
[1] = build_int_cst (element_type
, 97);
15195 vector
= build_vector (vector_type
, elements PASS_MEM_STAT
);
15196 check_vector_cst_fill (elements
, vector
, 2);
15198 /* Try with and without a step
15199 { 41, 97, 100, 21, 100, 35, 100, 49 }. */
15200 for (unsigned int i
= 3; i
< 8; i
+= 2)
15201 elements
[i
] = build_int_cst (element_type
, i
* 7);
15202 vector
= build_vector (vector_type
, elements PASS_MEM_STAT
);
15203 check_vector_cst_stepped (elements
, vector
, 2);
15205 /* Try a fully-general constant:
15206 { 41, 97, 100, 21, 100, 9990, 100, 49 }. */
15207 elements
[5] = build_int_cst (element_type
, 9990);
15208 vector
= build_vector (vector_type
, elements PASS_MEM_STAT
);
15209 check_vector_cst_fill (elements
, vector
, 4);
15212 /* Verify that STRIP_NOPS (NODE) is EXPECTED.
15213 Helper function for test_location_wrappers, to deal with STRIP_NOPS
15214 modifying its argument in-place. */
15217 check_strip_nops (tree node
, tree expected
)
15220 ASSERT_EQ (expected
, node
);
15223 /* Verify location wrappers. */
15226 test_location_wrappers ()
15228 location_t loc
= BUILTINS_LOCATION
;
15230 ASSERT_EQ (NULL_TREE
, maybe_wrap_with_location (NULL_TREE
, loc
));
15232 /* Wrapping a constant. */
15233 tree int_cst
= build_int_cst (integer_type_node
, 42);
15234 ASSERT_FALSE (CAN_HAVE_LOCATION_P (int_cst
));
15235 ASSERT_FALSE (location_wrapper_p (int_cst
));
15237 tree wrapped_int_cst
= maybe_wrap_with_location (int_cst
, loc
);
15238 ASSERT_TRUE (location_wrapper_p (wrapped_int_cst
));
15239 ASSERT_EQ (loc
, EXPR_LOCATION (wrapped_int_cst
));
15240 ASSERT_EQ (int_cst
, tree_strip_any_location_wrapper (wrapped_int_cst
));
15242 /* We shouldn't add wrapper nodes for UNKNOWN_LOCATION. */
15243 ASSERT_EQ (int_cst
, maybe_wrap_with_location (int_cst
, UNKNOWN_LOCATION
));
15245 /* We shouldn't add wrapper nodes for nodes that CAN_HAVE_LOCATION_P. */
15246 tree cast
= build1 (NOP_EXPR
, char_type_node
, int_cst
);
15247 ASSERT_TRUE (CAN_HAVE_LOCATION_P (cast
));
15248 ASSERT_EQ (cast
, maybe_wrap_with_location (cast
, loc
));
15250 /* Wrapping a STRING_CST. */
15251 tree string_cst
= build_string (4, "foo");
15252 ASSERT_FALSE (CAN_HAVE_LOCATION_P (string_cst
));
15253 ASSERT_FALSE (location_wrapper_p (string_cst
));
15255 tree wrapped_string_cst
= maybe_wrap_with_location (string_cst
, loc
);
15256 ASSERT_TRUE (location_wrapper_p (wrapped_string_cst
));
15257 ASSERT_EQ (VIEW_CONVERT_EXPR
, TREE_CODE (wrapped_string_cst
));
15258 ASSERT_EQ (loc
, EXPR_LOCATION (wrapped_string_cst
));
15259 ASSERT_EQ (string_cst
, tree_strip_any_location_wrapper (wrapped_string_cst
));
15262 /* Wrapping a variable. */
15263 tree int_var
= build_decl (UNKNOWN_LOCATION
, VAR_DECL
,
15264 get_identifier ("some_int_var"),
15265 integer_type_node
);
15266 ASSERT_FALSE (CAN_HAVE_LOCATION_P (int_var
));
15267 ASSERT_FALSE (location_wrapper_p (int_var
));
15269 tree wrapped_int_var
= maybe_wrap_with_location (int_var
, loc
);
15270 ASSERT_TRUE (location_wrapper_p (wrapped_int_var
));
15271 ASSERT_EQ (loc
, EXPR_LOCATION (wrapped_int_var
));
15272 ASSERT_EQ (int_var
, tree_strip_any_location_wrapper (wrapped_int_var
));
15274 /* Verify that "reinterpret_cast<int>(some_int_var)" is not a location
15276 tree r_cast
= build1 (NON_LVALUE_EXPR
, integer_type_node
, int_var
);
15277 ASSERT_FALSE (location_wrapper_p (r_cast
));
15278 ASSERT_EQ (r_cast
, tree_strip_any_location_wrapper (r_cast
));
15280 /* Verify that STRIP_NOPS removes wrappers. */
15281 check_strip_nops (wrapped_int_cst
, int_cst
);
15282 check_strip_nops (wrapped_string_cst
, string_cst
);
15283 check_strip_nops (wrapped_int_var
, int_var
);
15286 /* Test various tree predicates. Verify that location wrappers don't
15287 affect the results. */
15292 /* Build various constants and wrappers around them. */
15294 location_t loc
= BUILTINS_LOCATION
;
15296 tree i_0
= build_int_cst (integer_type_node
, 0);
15297 tree wr_i_0
= maybe_wrap_with_location (i_0
, loc
);
15299 tree i_1
= build_int_cst (integer_type_node
, 1);
15300 tree wr_i_1
= maybe_wrap_with_location (i_1
, loc
);
15302 tree i_m1
= build_int_cst (integer_type_node
, -1);
15303 tree wr_i_m1
= maybe_wrap_with_location (i_m1
, loc
);
15305 tree f_0
= build_real_from_int_cst (float_type_node
, i_0
);
15306 tree wr_f_0
= maybe_wrap_with_location (f_0
, loc
);
15307 tree f_1
= build_real_from_int_cst (float_type_node
, i_1
);
15308 tree wr_f_1
= maybe_wrap_with_location (f_1
, loc
);
15309 tree f_m1
= build_real_from_int_cst (float_type_node
, i_m1
);
15310 tree wr_f_m1
= maybe_wrap_with_location (f_m1
, loc
);
15312 tree c_i_0
= build_complex (NULL_TREE
, i_0
, i_0
);
15313 tree c_i_1
= build_complex (NULL_TREE
, i_1
, i_0
);
15314 tree c_i_m1
= build_complex (NULL_TREE
, i_m1
, i_0
);
15316 tree c_f_0
= build_complex (NULL_TREE
, f_0
, f_0
);
15317 tree c_f_1
= build_complex (NULL_TREE
, f_1
, f_0
);
15318 tree c_f_m1
= build_complex (NULL_TREE
, f_m1
, f_0
);
15320 /* TODO: vector constants. */
15322 /* Test integer_onep. */
15323 ASSERT_FALSE (integer_onep (i_0
));
15324 ASSERT_FALSE (integer_onep (wr_i_0
));
15325 ASSERT_TRUE (integer_onep (i_1
));
15326 ASSERT_TRUE (integer_onep (wr_i_1
));
15327 ASSERT_FALSE (integer_onep (i_m1
));
15328 ASSERT_FALSE (integer_onep (wr_i_m1
));
15329 ASSERT_FALSE (integer_onep (f_0
));
15330 ASSERT_FALSE (integer_onep (wr_f_0
));
15331 ASSERT_FALSE (integer_onep (f_1
));
15332 ASSERT_FALSE (integer_onep (wr_f_1
));
15333 ASSERT_FALSE (integer_onep (f_m1
));
15334 ASSERT_FALSE (integer_onep (wr_f_m1
));
15335 ASSERT_FALSE (integer_onep (c_i_0
));
15336 ASSERT_TRUE (integer_onep (c_i_1
));
15337 ASSERT_FALSE (integer_onep (c_i_m1
));
15338 ASSERT_FALSE (integer_onep (c_f_0
));
15339 ASSERT_FALSE (integer_onep (c_f_1
));
15340 ASSERT_FALSE (integer_onep (c_f_m1
));
15342 /* Test integer_zerop. */
15343 ASSERT_TRUE (integer_zerop (i_0
));
15344 ASSERT_TRUE (integer_zerop (wr_i_0
));
15345 ASSERT_FALSE (integer_zerop (i_1
));
15346 ASSERT_FALSE (integer_zerop (wr_i_1
));
15347 ASSERT_FALSE (integer_zerop (i_m1
));
15348 ASSERT_FALSE (integer_zerop (wr_i_m1
));
15349 ASSERT_FALSE (integer_zerop (f_0
));
15350 ASSERT_FALSE (integer_zerop (wr_f_0
));
15351 ASSERT_FALSE (integer_zerop (f_1
));
15352 ASSERT_FALSE (integer_zerop (wr_f_1
));
15353 ASSERT_FALSE (integer_zerop (f_m1
));
15354 ASSERT_FALSE (integer_zerop (wr_f_m1
));
15355 ASSERT_TRUE (integer_zerop (c_i_0
));
15356 ASSERT_FALSE (integer_zerop (c_i_1
));
15357 ASSERT_FALSE (integer_zerop (c_i_m1
));
15358 ASSERT_FALSE (integer_zerop (c_f_0
));
15359 ASSERT_FALSE (integer_zerop (c_f_1
));
15360 ASSERT_FALSE (integer_zerop (c_f_m1
));
15362 /* Test integer_all_onesp. */
15363 ASSERT_FALSE (integer_all_onesp (i_0
));
15364 ASSERT_FALSE (integer_all_onesp (wr_i_0
));
15365 ASSERT_FALSE (integer_all_onesp (i_1
));
15366 ASSERT_FALSE (integer_all_onesp (wr_i_1
));
15367 ASSERT_TRUE (integer_all_onesp (i_m1
));
15368 ASSERT_TRUE (integer_all_onesp (wr_i_m1
));
15369 ASSERT_FALSE (integer_all_onesp (f_0
));
15370 ASSERT_FALSE (integer_all_onesp (wr_f_0
));
15371 ASSERT_FALSE (integer_all_onesp (f_1
));
15372 ASSERT_FALSE (integer_all_onesp (wr_f_1
));
15373 ASSERT_FALSE (integer_all_onesp (f_m1
));
15374 ASSERT_FALSE (integer_all_onesp (wr_f_m1
));
15375 ASSERT_FALSE (integer_all_onesp (c_i_0
));
15376 ASSERT_FALSE (integer_all_onesp (c_i_1
));
15377 ASSERT_FALSE (integer_all_onesp (c_i_m1
));
15378 ASSERT_FALSE (integer_all_onesp (c_f_0
));
15379 ASSERT_FALSE (integer_all_onesp (c_f_1
));
15380 ASSERT_FALSE (integer_all_onesp (c_f_m1
));
15382 /* Test integer_minus_onep. */
15383 ASSERT_FALSE (integer_minus_onep (i_0
));
15384 ASSERT_FALSE (integer_minus_onep (wr_i_0
));
15385 ASSERT_FALSE (integer_minus_onep (i_1
));
15386 ASSERT_FALSE (integer_minus_onep (wr_i_1
));
15387 ASSERT_TRUE (integer_minus_onep (i_m1
));
15388 ASSERT_TRUE (integer_minus_onep (wr_i_m1
));
15389 ASSERT_FALSE (integer_minus_onep (f_0
));
15390 ASSERT_FALSE (integer_minus_onep (wr_f_0
));
15391 ASSERT_FALSE (integer_minus_onep (f_1
));
15392 ASSERT_FALSE (integer_minus_onep (wr_f_1
));
15393 ASSERT_FALSE (integer_minus_onep (f_m1
));
15394 ASSERT_FALSE (integer_minus_onep (wr_f_m1
));
15395 ASSERT_FALSE (integer_minus_onep (c_i_0
));
15396 ASSERT_FALSE (integer_minus_onep (c_i_1
));
15397 ASSERT_TRUE (integer_minus_onep (c_i_m1
));
15398 ASSERT_FALSE (integer_minus_onep (c_f_0
));
15399 ASSERT_FALSE (integer_minus_onep (c_f_1
));
15400 ASSERT_FALSE (integer_minus_onep (c_f_m1
));
15402 /* Test integer_each_onep. */
15403 ASSERT_FALSE (integer_each_onep (i_0
));
15404 ASSERT_FALSE (integer_each_onep (wr_i_0
));
15405 ASSERT_TRUE (integer_each_onep (i_1
));
15406 ASSERT_TRUE (integer_each_onep (wr_i_1
));
15407 ASSERT_FALSE (integer_each_onep (i_m1
));
15408 ASSERT_FALSE (integer_each_onep (wr_i_m1
));
15409 ASSERT_FALSE (integer_each_onep (f_0
));
15410 ASSERT_FALSE (integer_each_onep (wr_f_0
));
15411 ASSERT_FALSE (integer_each_onep (f_1
));
15412 ASSERT_FALSE (integer_each_onep (wr_f_1
));
15413 ASSERT_FALSE (integer_each_onep (f_m1
));
15414 ASSERT_FALSE (integer_each_onep (wr_f_m1
));
15415 ASSERT_FALSE (integer_each_onep (c_i_0
));
15416 ASSERT_FALSE (integer_each_onep (c_i_1
));
15417 ASSERT_FALSE (integer_each_onep (c_i_m1
));
15418 ASSERT_FALSE (integer_each_onep (c_f_0
));
15419 ASSERT_FALSE (integer_each_onep (c_f_1
));
15420 ASSERT_FALSE (integer_each_onep (c_f_m1
));
15422 /* Test integer_truep. */
15423 ASSERT_FALSE (integer_truep (i_0
));
15424 ASSERT_FALSE (integer_truep (wr_i_0
));
15425 ASSERT_TRUE (integer_truep (i_1
));
15426 ASSERT_TRUE (integer_truep (wr_i_1
));
15427 ASSERT_FALSE (integer_truep (i_m1
));
15428 ASSERT_FALSE (integer_truep (wr_i_m1
));
15429 ASSERT_FALSE (integer_truep (f_0
));
15430 ASSERT_FALSE (integer_truep (wr_f_0
));
15431 ASSERT_FALSE (integer_truep (f_1
));
15432 ASSERT_FALSE (integer_truep (wr_f_1
));
15433 ASSERT_FALSE (integer_truep (f_m1
));
15434 ASSERT_FALSE (integer_truep (wr_f_m1
));
15435 ASSERT_FALSE (integer_truep (c_i_0
));
15436 ASSERT_TRUE (integer_truep (c_i_1
));
15437 ASSERT_FALSE (integer_truep (c_i_m1
));
15438 ASSERT_FALSE (integer_truep (c_f_0
));
15439 ASSERT_FALSE (integer_truep (c_f_1
));
15440 ASSERT_FALSE (integer_truep (c_f_m1
));
15442 /* Test integer_nonzerop. */
15443 ASSERT_FALSE (integer_nonzerop (i_0
));
15444 ASSERT_FALSE (integer_nonzerop (wr_i_0
));
15445 ASSERT_TRUE (integer_nonzerop (i_1
));
15446 ASSERT_TRUE (integer_nonzerop (wr_i_1
));
15447 ASSERT_TRUE (integer_nonzerop (i_m1
));
15448 ASSERT_TRUE (integer_nonzerop (wr_i_m1
));
15449 ASSERT_FALSE (integer_nonzerop (f_0
));
15450 ASSERT_FALSE (integer_nonzerop (wr_f_0
));
15451 ASSERT_FALSE (integer_nonzerop (f_1
));
15452 ASSERT_FALSE (integer_nonzerop (wr_f_1
));
15453 ASSERT_FALSE (integer_nonzerop (f_m1
));
15454 ASSERT_FALSE (integer_nonzerop (wr_f_m1
));
15455 ASSERT_FALSE (integer_nonzerop (c_i_0
));
15456 ASSERT_TRUE (integer_nonzerop (c_i_1
));
15457 ASSERT_TRUE (integer_nonzerop (c_i_m1
));
15458 ASSERT_FALSE (integer_nonzerop (c_f_0
));
15459 ASSERT_FALSE (integer_nonzerop (c_f_1
));
15460 ASSERT_FALSE (integer_nonzerop (c_f_m1
));
15462 /* Test real_zerop. */
15463 ASSERT_FALSE (real_zerop (i_0
));
15464 ASSERT_FALSE (real_zerop (wr_i_0
));
15465 ASSERT_FALSE (real_zerop (i_1
));
15466 ASSERT_FALSE (real_zerop (wr_i_1
));
15467 ASSERT_FALSE (real_zerop (i_m1
));
15468 ASSERT_FALSE (real_zerop (wr_i_m1
));
15469 ASSERT_TRUE (real_zerop (f_0
));
15470 ASSERT_TRUE (real_zerop (wr_f_0
));
15471 ASSERT_FALSE (real_zerop (f_1
));
15472 ASSERT_FALSE (real_zerop (wr_f_1
));
15473 ASSERT_FALSE (real_zerop (f_m1
));
15474 ASSERT_FALSE (real_zerop (wr_f_m1
));
15475 ASSERT_FALSE (real_zerop (c_i_0
));
15476 ASSERT_FALSE (real_zerop (c_i_1
));
15477 ASSERT_FALSE (real_zerop (c_i_m1
));
15478 ASSERT_TRUE (real_zerop (c_f_0
));
15479 ASSERT_FALSE (real_zerop (c_f_1
));
15480 ASSERT_FALSE (real_zerop (c_f_m1
));
15482 /* Test real_onep. */
15483 ASSERT_FALSE (real_onep (i_0
));
15484 ASSERT_FALSE (real_onep (wr_i_0
));
15485 ASSERT_FALSE (real_onep (i_1
));
15486 ASSERT_FALSE (real_onep (wr_i_1
));
15487 ASSERT_FALSE (real_onep (i_m1
));
15488 ASSERT_FALSE (real_onep (wr_i_m1
));
15489 ASSERT_FALSE (real_onep (f_0
));
15490 ASSERT_FALSE (real_onep (wr_f_0
));
15491 ASSERT_TRUE (real_onep (f_1
));
15492 ASSERT_TRUE (real_onep (wr_f_1
));
15493 ASSERT_FALSE (real_onep (f_m1
));
15494 ASSERT_FALSE (real_onep (wr_f_m1
));
15495 ASSERT_FALSE (real_onep (c_i_0
));
15496 ASSERT_FALSE (real_onep (c_i_1
));
15497 ASSERT_FALSE (real_onep (c_i_m1
));
15498 ASSERT_FALSE (real_onep (c_f_0
));
15499 ASSERT_TRUE (real_onep (c_f_1
));
15500 ASSERT_FALSE (real_onep (c_f_m1
));
15502 /* Test real_minus_onep. */
15503 ASSERT_FALSE (real_minus_onep (i_0
));
15504 ASSERT_FALSE (real_minus_onep (wr_i_0
));
15505 ASSERT_FALSE (real_minus_onep (i_1
));
15506 ASSERT_FALSE (real_minus_onep (wr_i_1
));
15507 ASSERT_FALSE (real_minus_onep (i_m1
));
15508 ASSERT_FALSE (real_minus_onep (wr_i_m1
));
15509 ASSERT_FALSE (real_minus_onep (f_0
));
15510 ASSERT_FALSE (real_minus_onep (wr_f_0
));
15511 ASSERT_FALSE (real_minus_onep (f_1
));
15512 ASSERT_FALSE (real_minus_onep (wr_f_1
));
15513 ASSERT_TRUE (real_minus_onep (f_m1
));
15514 ASSERT_TRUE (real_minus_onep (wr_f_m1
));
15515 ASSERT_FALSE (real_minus_onep (c_i_0
));
15516 ASSERT_FALSE (real_minus_onep (c_i_1
));
15517 ASSERT_FALSE (real_minus_onep (c_i_m1
));
15518 ASSERT_FALSE (real_minus_onep (c_f_0
));
15519 ASSERT_FALSE (real_minus_onep (c_f_1
));
15520 ASSERT_TRUE (real_minus_onep (c_f_m1
));
15523 ASSERT_TRUE (zerop (i_0
));
15524 ASSERT_TRUE (zerop (wr_i_0
));
15525 ASSERT_FALSE (zerop (i_1
));
15526 ASSERT_FALSE (zerop (wr_i_1
));
15527 ASSERT_FALSE (zerop (i_m1
));
15528 ASSERT_FALSE (zerop (wr_i_m1
));
15529 ASSERT_TRUE (zerop (f_0
));
15530 ASSERT_TRUE (zerop (wr_f_0
));
15531 ASSERT_FALSE (zerop (f_1
));
15532 ASSERT_FALSE (zerop (wr_f_1
));
15533 ASSERT_FALSE (zerop (f_m1
));
15534 ASSERT_FALSE (zerop (wr_f_m1
));
15535 ASSERT_TRUE (zerop (c_i_0
));
15536 ASSERT_FALSE (zerop (c_i_1
));
15537 ASSERT_FALSE (zerop (c_i_m1
));
15538 ASSERT_TRUE (zerop (c_f_0
));
15539 ASSERT_FALSE (zerop (c_f_1
));
15540 ASSERT_FALSE (zerop (c_f_m1
));
15542 /* Test tree_expr_nonnegative_p. */
15543 ASSERT_TRUE (tree_expr_nonnegative_p (i_0
));
15544 ASSERT_TRUE (tree_expr_nonnegative_p (wr_i_0
));
15545 ASSERT_TRUE (tree_expr_nonnegative_p (i_1
));
15546 ASSERT_TRUE (tree_expr_nonnegative_p (wr_i_1
));
15547 ASSERT_FALSE (tree_expr_nonnegative_p (i_m1
));
15548 ASSERT_FALSE (tree_expr_nonnegative_p (wr_i_m1
));
15549 ASSERT_TRUE (tree_expr_nonnegative_p (f_0
));
15550 ASSERT_TRUE (tree_expr_nonnegative_p (wr_f_0
));
15551 ASSERT_TRUE (tree_expr_nonnegative_p (f_1
));
15552 ASSERT_TRUE (tree_expr_nonnegative_p (wr_f_1
));
15553 ASSERT_FALSE (tree_expr_nonnegative_p (f_m1
));
15554 ASSERT_FALSE (tree_expr_nonnegative_p (wr_f_m1
));
15555 ASSERT_FALSE (tree_expr_nonnegative_p (c_i_0
));
15556 ASSERT_FALSE (tree_expr_nonnegative_p (c_i_1
));
15557 ASSERT_FALSE (tree_expr_nonnegative_p (c_i_m1
));
15558 ASSERT_FALSE (tree_expr_nonnegative_p (c_f_0
));
15559 ASSERT_FALSE (tree_expr_nonnegative_p (c_f_1
));
15560 ASSERT_FALSE (tree_expr_nonnegative_p (c_f_m1
));
15562 /* Test tree_expr_nonzero_p. */
15563 ASSERT_FALSE (tree_expr_nonzero_p (i_0
));
15564 ASSERT_FALSE (tree_expr_nonzero_p (wr_i_0
));
15565 ASSERT_TRUE (tree_expr_nonzero_p (i_1
));
15566 ASSERT_TRUE (tree_expr_nonzero_p (wr_i_1
));
15567 ASSERT_TRUE (tree_expr_nonzero_p (i_m1
));
15568 ASSERT_TRUE (tree_expr_nonzero_p (wr_i_m1
));
15570 /* Test integer_valued_real_p. */
15571 ASSERT_FALSE (integer_valued_real_p (i_0
));
15572 ASSERT_TRUE (integer_valued_real_p (f_0
));
15573 ASSERT_TRUE (integer_valued_real_p (wr_f_0
));
15574 ASSERT_TRUE (integer_valued_real_p (f_1
));
15575 ASSERT_TRUE (integer_valued_real_p (wr_f_1
));
15577 /* Test integer_pow2p. */
15578 ASSERT_FALSE (integer_pow2p (i_0
));
15579 ASSERT_TRUE (integer_pow2p (i_1
));
15580 ASSERT_TRUE (integer_pow2p (wr_i_1
));
15582 /* Test uniform_integer_cst_p. */
15583 ASSERT_TRUE (uniform_integer_cst_p (i_0
));
15584 ASSERT_TRUE (uniform_integer_cst_p (wr_i_0
));
15585 ASSERT_TRUE (uniform_integer_cst_p (i_1
));
15586 ASSERT_TRUE (uniform_integer_cst_p (wr_i_1
));
15587 ASSERT_TRUE (uniform_integer_cst_p (i_m1
));
15588 ASSERT_TRUE (uniform_integer_cst_p (wr_i_m1
));
15589 ASSERT_FALSE (uniform_integer_cst_p (f_0
));
15590 ASSERT_FALSE (uniform_integer_cst_p (wr_f_0
));
15591 ASSERT_FALSE (uniform_integer_cst_p (f_1
));
15592 ASSERT_FALSE (uniform_integer_cst_p (wr_f_1
));
15593 ASSERT_FALSE (uniform_integer_cst_p (f_m1
));
15594 ASSERT_FALSE (uniform_integer_cst_p (wr_f_m1
));
15595 ASSERT_FALSE (uniform_integer_cst_p (c_i_0
));
15596 ASSERT_FALSE (uniform_integer_cst_p (c_i_1
));
15597 ASSERT_FALSE (uniform_integer_cst_p (c_i_m1
));
15598 ASSERT_FALSE (uniform_integer_cst_p (c_f_0
));
15599 ASSERT_FALSE (uniform_integer_cst_p (c_f_1
));
15600 ASSERT_FALSE (uniform_integer_cst_p (c_f_m1
));
15603 /* Check that string escaping works correctly. */
15606 test_escaped_strings (void)
15609 escaped_string msg
;
15612 /* ASSERT_STREQ does not accept NULL as a valid test
15613 result, so we have to use ASSERT_EQ instead. */
15614 ASSERT_EQ (NULL
, (const char *) msg
);
15617 ASSERT_STREQ ("", (const char *) msg
);
15619 msg
.escape ("foobar");
15620 ASSERT_STREQ ("foobar", (const char *) msg
);
15622 /* Ensure that we have -fmessage-length set to 0. */
15623 saved_cutoff
= pp_line_cutoff (global_dc
->printer
);
15624 pp_line_cutoff (global_dc
->printer
) = 0;
15626 msg
.escape ("foo\nbar");
15627 ASSERT_STREQ ("foo\\nbar", (const char *) msg
);
15629 msg
.escape ("\a\b\f\n\r\t\v");
15630 ASSERT_STREQ ("\\a\\b\\f\\n\\r\\t\\v", (const char *) msg
);
15632 /* Now repeat the tests with -fmessage-length set to 5. */
15633 pp_line_cutoff (global_dc
->printer
) = 5;
15635 /* Note that the newline is not translated into an escape. */
15636 msg
.escape ("foo\nbar");
15637 ASSERT_STREQ ("foo\nbar", (const char *) msg
);
15639 msg
.escape ("\a\b\f\n\r\t\v");
15640 ASSERT_STREQ ("\\a\\b\\f\n\\r\\t\\v", (const char *) msg
);
15642 /* Restore the original message length setting. */
15643 pp_line_cutoff (global_dc
->printer
) = saved_cutoff
;
15646 /* Run all of the selftests within this file. */
15651 test_integer_constants ();
15652 test_identifiers ();
15654 test_vector_cst_patterns ();
15655 test_location_wrappers ();
15656 test_predicates ();
15657 test_escaped_strings ();
15660 } // namespace selftest
15662 #endif /* CHECKING_P */
15664 #include "gt-tree.h"