]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree.c
c++: ICE on enum with bool value [PR99968]
[thirdparty/gcc.git] / gcc / tree.c
1 /* Language-independent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987-2021 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* This file contains the low level primitives for operating on tree nodes,
21 including allocation, list operations, interning of identifiers,
22 construction of data type nodes and statement nodes,
23 and construction of type conversion nodes. It also contains
24 tables index by tree code that describe how to take apart
25 nodes of that code.
26
27 It is intended to be language-independent but can occasionally
28 calls language-dependent routines. */
29
30 #include "config.h"
31 #include "system.h"
32 #include "coretypes.h"
33 #include "backend.h"
34 #include "target.h"
35 #include "tree.h"
36 #include "gimple.h"
37 #include "tree-pass.h"
38 #include "ssa.h"
39 #include "cgraph.h"
40 #include "diagnostic.h"
41 #include "flags.h"
42 #include "alias.h"
43 #include "fold-const.h"
44 #include "stor-layout.h"
45 #include "calls.h"
46 #include "attribs.h"
47 #include "toplev.h" /* get_random_seed */
48 #include "output.h"
49 #include "common/common-target.h"
50 #include "langhooks.h"
51 #include "tree-inline.h"
52 #include "tree-iterator.h"
53 #include "internal-fn.h"
54 #include "gimple-iterator.h"
55 #include "gimplify.h"
56 #include "tree-dfa.h"
57 #include "langhooks-def.h"
58 #include "tree-diagnostic.h"
59 #include "except.h"
60 #include "builtins.h"
61 #include "print-tree.h"
62 #include "ipa-utils.h"
63 #include "selftest.h"
64 #include "stringpool.h"
65 #include "attribs.h"
66 #include "rtl.h"
67 #include "regs.h"
68 #include "tree-vector-builder.h"
69 #include "gimple-fold.h"
70 #include "escaped_string.h"
71 #include "gimple-range.h"
72
73 /* Tree code classes. */
74
75 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) TYPE,
76 #define END_OF_BASE_TREE_CODES tcc_exceptional,
77
78 const enum tree_code_class tree_code_type[] = {
79 #include "all-tree.def"
80 };
81
82 #undef DEFTREECODE
83 #undef END_OF_BASE_TREE_CODES
84
85 /* Table indexed by tree code giving number of expression
86 operands beyond the fixed part of the node structure.
87 Not used for types or decls. */
88
89 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) LENGTH,
90 #define END_OF_BASE_TREE_CODES 0,
91
92 const unsigned char tree_code_length[] = {
93 #include "all-tree.def"
94 };
95
96 #undef DEFTREECODE
97 #undef END_OF_BASE_TREE_CODES
98
99 /* Names of tree components.
100 Used for printing out the tree and error messages. */
101 #define DEFTREECODE(SYM, NAME, TYPE, LEN) NAME,
102 #define END_OF_BASE_TREE_CODES "@dummy",
103
104 static const char *const tree_code_name[] = {
105 #include "all-tree.def"
106 };
107
108 #undef DEFTREECODE
109 #undef END_OF_BASE_TREE_CODES
110
111 /* Each tree code class has an associated string representation.
112 These must correspond to the tree_code_class entries. */
113
114 const char *const tree_code_class_strings[] =
115 {
116 "exceptional",
117 "constant",
118 "type",
119 "declaration",
120 "reference",
121 "comparison",
122 "unary",
123 "binary",
124 "statement",
125 "vl_exp",
126 "expression"
127 };
128
129 /* obstack.[ch] explicitly declined to prototype this. */
130 extern int _obstack_allocated_p (struct obstack *h, void *obj);
131
132 /* Statistics-gathering stuff. */
133
134 static uint64_t tree_code_counts[MAX_TREE_CODES];
135 uint64_t tree_node_counts[(int) all_kinds];
136 uint64_t tree_node_sizes[(int) all_kinds];
137
138 /* Keep in sync with tree.h:enum tree_node_kind. */
139 static const char * const tree_node_kind_names[] = {
140 "decls",
141 "types",
142 "blocks",
143 "stmts",
144 "refs",
145 "exprs",
146 "constants",
147 "identifiers",
148 "vecs",
149 "binfos",
150 "ssa names",
151 "constructors",
152 "random kinds",
153 "lang_decl kinds",
154 "lang_type kinds",
155 "omp clauses",
156 };
157
158 /* Unique id for next decl created. */
159 static GTY(()) int next_decl_uid;
160 /* Unique id for next type created. */
161 static GTY(()) unsigned next_type_uid = 1;
162 /* Unique id for next debug decl created. Use negative numbers,
163 to catch erroneous uses. */
164 static GTY(()) int next_debug_decl_uid;
165
166 /* Since we cannot rehash a type after it is in the table, we have to
167 keep the hash code. */
168
169 struct GTY((for_user)) type_hash {
170 unsigned long hash;
171 tree type;
172 };
173
174 /* Initial size of the hash table (rounded to next prime). */
175 #define TYPE_HASH_INITIAL_SIZE 1000
176
177 struct type_cache_hasher : ggc_cache_ptr_hash<type_hash>
178 {
179 static hashval_t hash (type_hash *t) { return t->hash; }
180 static bool equal (type_hash *a, type_hash *b);
181
182 static int
183 keep_cache_entry (type_hash *&t)
184 {
185 return ggc_marked_p (t->type);
186 }
187 };
188
189 /* Now here is the hash table. When recording a type, it is added to
190 the slot whose index is the hash code. Note that the hash table is
191 used for several kinds of types (function types, array types and
192 array index range types, for now). While all these live in the
193 same table, they are completely independent, and the hash code is
194 computed differently for each of these. */
195
196 static GTY ((cache)) hash_table<type_cache_hasher> *type_hash_table;
197
198 /* Hash table and temporary node for larger integer const values. */
199 static GTY (()) tree int_cst_node;
200
201 struct int_cst_hasher : ggc_cache_ptr_hash<tree_node>
202 {
203 static hashval_t hash (tree t);
204 static bool equal (tree x, tree y);
205 };
206
207 static GTY ((cache)) hash_table<int_cst_hasher> *int_cst_hash_table;
208
209 /* Class and variable for making sure that there is a single POLY_INT_CST
210 for a given value. */
211 struct poly_int_cst_hasher : ggc_cache_ptr_hash<tree_node>
212 {
213 typedef std::pair<tree, const poly_wide_int *> compare_type;
214 static hashval_t hash (tree t);
215 static bool equal (tree x, const compare_type &y);
216 };
217
218 static GTY ((cache)) hash_table<poly_int_cst_hasher> *poly_int_cst_hash_table;
219
220 /* Hash table for optimization flags and target option flags. Use the same
221 hash table for both sets of options. Nodes for building the current
222 optimization and target option nodes. The assumption is most of the time
223 the options created will already be in the hash table, so we avoid
224 allocating and freeing up a node repeatably. */
225 static GTY (()) tree cl_optimization_node;
226 static GTY (()) tree cl_target_option_node;
227
228 struct cl_option_hasher : ggc_cache_ptr_hash<tree_node>
229 {
230 static hashval_t hash (tree t);
231 static bool equal (tree x, tree y);
232 };
233
234 static GTY ((cache)) hash_table<cl_option_hasher> *cl_option_hash_table;
235
236 /* General tree->tree mapping structure for use in hash tables. */
237
238
239 static GTY ((cache))
240 hash_table<tree_decl_map_cache_hasher> *debug_expr_for_decl;
241
242 static GTY ((cache))
243 hash_table<tree_decl_map_cache_hasher> *value_expr_for_decl;
244
245 struct tree_vec_map_cache_hasher : ggc_cache_ptr_hash<tree_vec_map>
246 {
247 static hashval_t hash (tree_vec_map *m) { return DECL_UID (m->base.from); }
248
249 static bool
250 equal (tree_vec_map *a, tree_vec_map *b)
251 {
252 return a->base.from == b->base.from;
253 }
254
255 static int
256 keep_cache_entry (tree_vec_map *&m)
257 {
258 return ggc_marked_p (m->base.from);
259 }
260 };
261
262 static GTY ((cache))
263 hash_table<tree_vec_map_cache_hasher> *debug_args_for_decl;
264
265 static void set_type_quals (tree, int);
266 static void print_type_hash_statistics (void);
267 static void print_debug_expr_statistics (void);
268 static void print_value_expr_statistics (void);
269
270 tree global_trees[TI_MAX];
271 tree integer_types[itk_none];
272
273 bool int_n_enabled_p[NUM_INT_N_ENTS];
274 struct int_n_trees_t int_n_trees [NUM_INT_N_ENTS];
275
276 bool tree_contains_struct[MAX_TREE_CODES][64];
277
278 /* Number of operands for each OMP clause. */
279 unsigned const char omp_clause_num_ops[] =
280 {
281 0, /* OMP_CLAUSE_ERROR */
282 1, /* OMP_CLAUSE_PRIVATE */
283 1, /* OMP_CLAUSE_SHARED */
284 1, /* OMP_CLAUSE_FIRSTPRIVATE */
285 2, /* OMP_CLAUSE_LASTPRIVATE */
286 5, /* OMP_CLAUSE_REDUCTION */
287 5, /* OMP_CLAUSE_TASK_REDUCTION */
288 5, /* OMP_CLAUSE_IN_REDUCTION */
289 1, /* OMP_CLAUSE_COPYIN */
290 1, /* OMP_CLAUSE_COPYPRIVATE */
291 3, /* OMP_CLAUSE_LINEAR */
292 1, /* OMP_CLAUSE_AFFINITY */
293 2, /* OMP_CLAUSE_ALIGNED */
294 3, /* OMP_CLAUSE_ALLOCATE */
295 1, /* OMP_CLAUSE_DEPEND */
296 1, /* OMP_CLAUSE_NONTEMPORAL */
297 1, /* OMP_CLAUSE_UNIFORM */
298 1, /* OMP_CLAUSE_TO_DECLARE */
299 1, /* OMP_CLAUSE_LINK */
300 1, /* OMP_CLAUSE_DETACH */
301 1, /* OMP_CLAUSE_USE_DEVICE_PTR */
302 1, /* OMP_CLAUSE_USE_DEVICE_ADDR */
303 1, /* OMP_CLAUSE_IS_DEVICE_PTR */
304 1, /* OMP_CLAUSE_INCLUSIVE */
305 1, /* OMP_CLAUSE_EXCLUSIVE */
306 2, /* OMP_CLAUSE_FROM */
307 2, /* OMP_CLAUSE_TO */
308 2, /* OMP_CLAUSE_MAP */
309 2, /* OMP_CLAUSE__CACHE_ */
310 2, /* OMP_CLAUSE_GANG */
311 1, /* OMP_CLAUSE_ASYNC */
312 1, /* OMP_CLAUSE_WAIT */
313 0, /* OMP_CLAUSE_AUTO */
314 0, /* OMP_CLAUSE_SEQ */
315 1, /* OMP_CLAUSE__LOOPTEMP_ */
316 1, /* OMP_CLAUSE__REDUCTEMP_ */
317 1, /* OMP_CLAUSE__CONDTEMP_ */
318 1, /* OMP_CLAUSE__SCANTEMP_ */
319 1, /* OMP_CLAUSE_IF */
320 1, /* OMP_CLAUSE_NUM_THREADS */
321 1, /* OMP_CLAUSE_SCHEDULE */
322 0, /* OMP_CLAUSE_NOWAIT */
323 1, /* OMP_CLAUSE_ORDERED */
324 0, /* OMP_CLAUSE_DEFAULT */
325 3, /* OMP_CLAUSE_COLLAPSE */
326 0, /* OMP_CLAUSE_UNTIED */
327 1, /* OMP_CLAUSE_FINAL */
328 0, /* OMP_CLAUSE_MERGEABLE */
329 1, /* OMP_CLAUSE_DEVICE */
330 1, /* OMP_CLAUSE_DIST_SCHEDULE */
331 0, /* OMP_CLAUSE_INBRANCH */
332 0, /* OMP_CLAUSE_NOTINBRANCH */
333 2, /* OMP_CLAUSE_NUM_TEAMS */
334 1, /* OMP_CLAUSE_THREAD_LIMIT */
335 0, /* OMP_CLAUSE_PROC_BIND */
336 1, /* OMP_CLAUSE_SAFELEN */
337 1, /* OMP_CLAUSE_SIMDLEN */
338 0, /* OMP_CLAUSE_DEVICE_TYPE */
339 0, /* OMP_CLAUSE_FOR */
340 0, /* OMP_CLAUSE_PARALLEL */
341 0, /* OMP_CLAUSE_SECTIONS */
342 0, /* OMP_CLAUSE_TASKGROUP */
343 1, /* OMP_CLAUSE_PRIORITY */
344 1, /* OMP_CLAUSE_GRAINSIZE */
345 1, /* OMP_CLAUSE_NUM_TASKS */
346 0, /* OMP_CLAUSE_NOGROUP */
347 0, /* OMP_CLAUSE_THREADS */
348 0, /* OMP_CLAUSE_SIMD */
349 1, /* OMP_CLAUSE_HINT */
350 0, /* OMP_CLAUSE_DEFAULTMAP */
351 0, /* OMP_CLAUSE_ORDER */
352 0, /* OMP_CLAUSE_BIND */
353 1, /* OMP_CLAUSE_FILTER */
354 1, /* OMP_CLAUSE__SIMDUID_ */
355 0, /* OMP_CLAUSE__SIMT_ */
356 0, /* OMP_CLAUSE_INDEPENDENT */
357 1, /* OMP_CLAUSE_WORKER */
358 1, /* OMP_CLAUSE_VECTOR */
359 1, /* OMP_CLAUSE_NUM_GANGS */
360 1, /* OMP_CLAUSE_NUM_WORKERS */
361 1, /* OMP_CLAUSE_VECTOR_LENGTH */
362 3, /* OMP_CLAUSE_TILE */
363 0, /* OMP_CLAUSE_IF_PRESENT */
364 0, /* OMP_CLAUSE_FINALIZE */
365 0, /* OMP_CLAUSE_NOHOST */
366 };
367
368 const char * const omp_clause_code_name[] =
369 {
370 "error_clause",
371 "private",
372 "shared",
373 "firstprivate",
374 "lastprivate",
375 "reduction",
376 "task_reduction",
377 "in_reduction",
378 "copyin",
379 "copyprivate",
380 "linear",
381 "affinity",
382 "aligned",
383 "allocate",
384 "depend",
385 "nontemporal",
386 "uniform",
387 "to",
388 "link",
389 "detach",
390 "use_device_ptr",
391 "use_device_addr",
392 "is_device_ptr",
393 "inclusive",
394 "exclusive",
395 "from",
396 "to",
397 "map",
398 "_cache_",
399 "gang",
400 "async",
401 "wait",
402 "auto",
403 "seq",
404 "_looptemp_",
405 "_reductemp_",
406 "_condtemp_",
407 "_scantemp_",
408 "if",
409 "num_threads",
410 "schedule",
411 "nowait",
412 "ordered",
413 "default",
414 "collapse",
415 "untied",
416 "final",
417 "mergeable",
418 "device",
419 "dist_schedule",
420 "inbranch",
421 "notinbranch",
422 "num_teams",
423 "thread_limit",
424 "proc_bind",
425 "safelen",
426 "simdlen",
427 "device_type",
428 "for",
429 "parallel",
430 "sections",
431 "taskgroup",
432 "priority",
433 "grainsize",
434 "num_tasks",
435 "nogroup",
436 "threads",
437 "simd",
438 "hint",
439 "defaultmap",
440 "order",
441 "bind",
442 "filter",
443 "_simduid_",
444 "_simt_",
445 "independent",
446 "worker",
447 "vector",
448 "num_gangs",
449 "num_workers",
450 "vector_length",
451 "tile",
452 "if_present",
453 "finalize",
454 "nohost",
455 };
456
457
458 /* Return the tree node structure used by tree code CODE. */
459
460 static inline enum tree_node_structure_enum
461 tree_node_structure_for_code (enum tree_code code)
462 {
463 switch (TREE_CODE_CLASS (code))
464 {
465 case tcc_declaration:
466 switch (code)
467 {
468 case CONST_DECL: return TS_CONST_DECL;
469 case DEBUG_EXPR_DECL: return TS_DECL_WRTL;
470 case FIELD_DECL: return TS_FIELD_DECL;
471 case FUNCTION_DECL: return TS_FUNCTION_DECL;
472 case LABEL_DECL: return TS_LABEL_DECL;
473 case PARM_DECL: return TS_PARM_DECL;
474 case RESULT_DECL: return TS_RESULT_DECL;
475 case TRANSLATION_UNIT_DECL: return TS_TRANSLATION_UNIT_DECL;
476 case TYPE_DECL: return TS_TYPE_DECL;
477 case VAR_DECL: return TS_VAR_DECL;
478 default: return TS_DECL_NON_COMMON;
479 }
480
481 case tcc_type: return TS_TYPE_NON_COMMON;
482
483 case tcc_binary:
484 case tcc_comparison:
485 case tcc_expression:
486 case tcc_reference:
487 case tcc_statement:
488 case tcc_unary:
489 case tcc_vl_exp: return TS_EXP;
490
491 default: /* tcc_constant and tcc_exceptional */
492 break;
493 }
494
495 switch (code)
496 {
497 /* tcc_constant cases. */
498 case COMPLEX_CST: return TS_COMPLEX;
499 case FIXED_CST: return TS_FIXED_CST;
500 case INTEGER_CST: return TS_INT_CST;
501 case POLY_INT_CST: return TS_POLY_INT_CST;
502 case REAL_CST: return TS_REAL_CST;
503 case STRING_CST: return TS_STRING;
504 case VECTOR_CST: return TS_VECTOR;
505 case VOID_CST: return TS_TYPED;
506
507 /* tcc_exceptional cases. */
508 case BLOCK: return TS_BLOCK;
509 case CONSTRUCTOR: return TS_CONSTRUCTOR;
510 case ERROR_MARK: return TS_COMMON;
511 case IDENTIFIER_NODE: return TS_IDENTIFIER;
512 case OMP_CLAUSE: return TS_OMP_CLAUSE;
513 case OPTIMIZATION_NODE: return TS_OPTIMIZATION;
514 case PLACEHOLDER_EXPR: return TS_COMMON;
515 case SSA_NAME: return TS_SSA_NAME;
516 case STATEMENT_LIST: return TS_STATEMENT_LIST;
517 case TARGET_OPTION_NODE: return TS_TARGET_OPTION;
518 case TREE_BINFO: return TS_BINFO;
519 case TREE_LIST: return TS_LIST;
520 case TREE_VEC: return TS_VEC;
521
522 default:
523 gcc_unreachable ();
524 }
525 }
526
527
528 /* Initialize tree_contains_struct to describe the hierarchy of tree
529 nodes. */
530
531 static void
532 initialize_tree_contains_struct (void)
533 {
534 unsigned i;
535
536 for (i = ERROR_MARK; i < LAST_AND_UNUSED_TREE_CODE; i++)
537 {
538 enum tree_code code;
539 enum tree_node_structure_enum ts_code;
540
541 code = (enum tree_code) i;
542 ts_code = tree_node_structure_for_code (code);
543
544 /* Mark the TS structure itself. */
545 tree_contains_struct[code][ts_code] = 1;
546
547 /* Mark all the structures that TS is derived from. */
548 switch (ts_code)
549 {
550 case TS_TYPED:
551 case TS_BLOCK:
552 case TS_OPTIMIZATION:
553 case TS_TARGET_OPTION:
554 MARK_TS_BASE (code);
555 break;
556
557 case TS_COMMON:
558 case TS_INT_CST:
559 case TS_POLY_INT_CST:
560 case TS_REAL_CST:
561 case TS_FIXED_CST:
562 case TS_VECTOR:
563 case TS_STRING:
564 case TS_COMPLEX:
565 case TS_SSA_NAME:
566 case TS_CONSTRUCTOR:
567 case TS_EXP:
568 case TS_STATEMENT_LIST:
569 MARK_TS_TYPED (code);
570 break;
571
572 case TS_IDENTIFIER:
573 case TS_DECL_MINIMAL:
574 case TS_TYPE_COMMON:
575 case TS_LIST:
576 case TS_VEC:
577 case TS_BINFO:
578 case TS_OMP_CLAUSE:
579 MARK_TS_COMMON (code);
580 break;
581
582 case TS_TYPE_WITH_LANG_SPECIFIC:
583 MARK_TS_TYPE_COMMON (code);
584 break;
585
586 case TS_TYPE_NON_COMMON:
587 MARK_TS_TYPE_WITH_LANG_SPECIFIC (code);
588 break;
589
590 case TS_DECL_COMMON:
591 MARK_TS_DECL_MINIMAL (code);
592 break;
593
594 case TS_DECL_WRTL:
595 case TS_CONST_DECL:
596 MARK_TS_DECL_COMMON (code);
597 break;
598
599 case TS_DECL_NON_COMMON:
600 MARK_TS_DECL_WITH_VIS (code);
601 break;
602
603 case TS_DECL_WITH_VIS:
604 case TS_PARM_DECL:
605 case TS_LABEL_DECL:
606 case TS_RESULT_DECL:
607 MARK_TS_DECL_WRTL (code);
608 break;
609
610 case TS_FIELD_DECL:
611 MARK_TS_DECL_COMMON (code);
612 break;
613
614 case TS_VAR_DECL:
615 MARK_TS_DECL_WITH_VIS (code);
616 break;
617
618 case TS_TYPE_DECL:
619 case TS_FUNCTION_DECL:
620 MARK_TS_DECL_NON_COMMON (code);
621 break;
622
623 case TS_TRANSLATION_UNIT_DECL:
624 MARK_TS_DECL_COMMON (code);
625 break;
626
627 default:
628 gcc_unreachable ();
629 }
630 }
631
632 /* Basic consistency checks for attributes used in fold. */
633 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_NON_COMMON]);
634 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_NON_COMMON]);
635 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_COMMON]);
636 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_COMMON]);
637 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_COMMON]);
638 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_COMMON]);
639 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_COMMON]);
640 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_COMMON]);
641 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_COMMON]);
642 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_COMMON]);
643 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_COMMON]);
644 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WRTL]);
645 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_WRTL]);
646 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_WRTL]);
647 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WRTL]);
648 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_WRTL]);
649 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_MINIMAL]);
650 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_MINIMAL]);
651 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_MINIMAL]);
652 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_MINIMAL]);
653 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_MINIMAL]);
654 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_MINIMAL]);
655 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_MINIMAL]);
656 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_MINIMAL]);
657 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_MINIMAL]);
658 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WITH_VIS]);
659 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WITH_VIS]);
660 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_WITH_VIS]);
661 gcc_assert (tree_contains_struct[VAR_DECL][TS_VAR_DECL]);
662 gcc_assert (tree_contains_struct[FIELD_DECL][TS_FIELD_DECL]);
663 gcc_assert (tree_contains_struct[PARM_DECL][TS_PARM_DECL]);
664 gcc_assert (tree_contains_struct[LABEL_DECL][TS_LABEL_DECL]);
665 gcc_assert (tree_contains_struct[RESULT_DECL][TS_RESULT_DECL]);
666 gcc_assert (tree_contains_struct[CONST_DECL][TS_CONST_DECL]);
667 gcc_assert (tree_contains_struct[TYPE_DECL][TS_TYPE_DECL]);
668 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_FUNCTION_DECL]);
669 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_MINIMAL]);
670 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_COMMON]);
671 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_MINIMAL]);
672 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_COMMON]);
673 }
674
675
676 /* Init tree.c. */
677
678 void
679 init_ttree (void)
680 {
681 /* Initialize the hash table of types. */
682 type_hash_table
683 = hash_table<type_cache_hasher>::create_ggc (TYPE_HASH_INITIAL_SIZE);
684
685 debug_expr_for_decl
686 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
687
688 value_expr_for_decl
689 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
690
691 int_cst_hash_table = hash_table<int_cst_hasher>::create_ggc (1024);
692
693 poly_int_cst_hash_table = hash_table<poly_int_cst_hasher>::create_ggc (64);
694
695 int_cst_node = make_int_cst (1, 1);
696
697 cl_option_hash_table = hash_table<cl_option_hasher>::create_ggc (64);
698
699 cl_optimization_node = make_node (OPTIMIZATION_NODE);
700 cl_target_option_node = make_node (TARGET_OPTION_NODE);
701
702 /* Initialize the tree_contains_struct array. */
703 initialize_tree_contains_struct ();
704 lang_hooks.init_ts ();
705 }
706
707 \f
708 /* The name of the object as the assembler will see it (but before any
709 translations made by ASM_OUTPUT_LABELREF). Often this is the same
710 as DECL_NAME. It is an IDENTIFIER_NODE. */
711 tree
712 decl_assembler_name (tree decl)
713 {
714 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
715 lang_hooks.set_decl_assembler_name (decl);
716 return DECL_ASSEMBLER_NAME_RAW (decl);
717 }
718
719 /* The DECL_ASSEMBLER_NAME_RAW of DECL is being explicitly set to NAME
720 (either of which may be NULL). Inform the FE, if this changes the
721 name. */
722
723 void
724 overwrite_decl_assembler_name (tree decl, tree name)
725 {
726 if (DECL_ASSEMBLER_NAME_RAW (decl) != name)
727 lang_hooks.overwrite_decl_assembler_name (decl, name);
728 }
729
730 /* Return true if DECL may need an assembler name to be set. */
731
732 static inline bool
733 need_assembler_name_p (tree decl)
734 {
735 /* We use DECL_ASSEMBLER_NAME to hold mangled type names for One Definition
736 Rule merging. This makes type_odr_p to return true on those types during
737 LTO and by comparing the mangled name, we can say what types are intended
738 to be equivalent across compilation unit.
739
740 We do not store names of type_in_anonymous_namespace_p.
741
742 Record, union and enumeration type have linkage that allows use
743 to check type_in_anonymous_namespace_p. We do not mangle compound types
744 that always can be compared structurally.
745
746 Similarly for builtin types, we compare properties of their main variant.
747 A special case are integer types where mangling do make differences
748 between char/signed char/unsigned char etc. Storing name for these makes
749 e.g. -fno-signed-char/-fsigned-char mismatches to be handled well.
750 See cp/mangle.c:write_builtin_type for details. */
751
752 if (TREE_CODE (decl) == TYPE_DECL)
753 {
754 if (DECL_NAME (decl)
755 && decl == TYPE_NAME (TREE_TYPE (decl))
756 && TYPE_MAIN_VARIANT (TREE_TYPE (decl)) == TREE_TYPE (decl)
757 && !TYPE_ARTIFICIAL (TREE_TYPE (decl))
758 && ((TREE_CODE (TREE_TYPE (decl)) != RECORD_TYPE
759 && TREE_CODE (TREE_TYPE (decl)) != UNION_TYPE)
760 || TYPE_CXX_ODR_P (TREE_TYPE (decl)))
761 && (type_with_linkage_p (TREE_TYPE (decl))
762 || TREE_CODE (TREE_TYPE (decl)) == INTEGER_TYPE)
763 && !variably_modified_type_p (TREE_TYPE (decl), NULL_TREE))
764 return !DECL_ASSEMBLER_NAME_SET_P (decl);
765 return false;
766 }
767 /* Only FUNCTION_DECLs and VAR_DECLs are considered. */
768 if (!VAR_OR_FUNCTION_DECL_P (decl))
769 return false;
770
771 /* If DECL already has its assembler name set, it does not need a
772 new one. */
773 if (!HAS_DECL_ASSEMBLER_NAME_P (decl)
774 || DECL_ASSEMBLER_NAME_SET_P (decl))
775 return false;
776
777 /* Abstract decls do not need an assembler name. */
778 if (DECL_ABSTRACT_P (decl))
779 return false;
780
781 /* For VAR_DECLs, only static, public and external symbols need an
782 assembler name. */
783 if (VAR_P (decl)
784 && !TREE_STATIC (decl)
785 && !TREE_PUBLIC (decl)
786 && !DECL_EXTERNAL (decl))
787 return false;
788
789 if (TREE_CODE (decl) == FUNCTION_DECL)
790 {
791 /* Do not set assembler name on builtins. Allow RTL expansion to
792 decide whether to expand inline or via a regular call. */
793 if (fndecl_built_in_p (decl)
794 && DECL_BUILT_IN_CLASS (decl) != BUILT_IN_FRONTEND)
795 return false;
796
797 /* Functions represented in the callgraph need an assembler name. */
798 if (cgraph_node::get (decl) != NULL)
799 return true;
800
801 /* Unused and not public functions don't need an assembler name. */
802 if (!TREE_USED (decl) && !TREE_PUBLIC (decl))
803 return false;
804 }
805
806 return true;
807 }
808
809 /* If T needs an assembler name, have one created for it. */
810
811 void
812 assign_assembler_name_if_needed (tree t)
813 {
814 if (need_assembler_name_p (t))
815 {
816 /* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit
817 diagnostics that use input_location to show locus
818 information. The problem here is that, at this point,
819 input_location is generally anchored to the end of the file
820 (since the parser is long gone), so we don't have a good
821 position to pin it to.
822
823 To alleviate this problem, this uses the location of T's
824 declaration. Examples of this are
825 testsuite/g++.dg/template/cond2.C and
826 testsuite/g++.dg/template/pr35240.C. */
827 location_t saved_location = input_location;
828 input_location = DECL_SOURCE_LOCATION (t);
829
830 decl_assembler_name (t);
831
832 input_location = saved_location;
833 }
834 }
835
836 /* When the target supports COMDAT groups, this indicates which group the
837 DECL is associated with. This can be either an IDENTIFIER_NODE or a
838 decl, in which case its DECL_ASSEMBLER_NAME identifies the group. */
839 tree
840 decl_comdat_group (const_tree node)
841 {
842 struct symtab_node *snode = symtab_node::get (node);
843 if (!snode)
844 return NULL;
845 return snode->get_comdat_group ();
846 }
847
848 /* Likewise, but make sure it's been reduced to an IDENTIFIER_NODE. */
849 tree
850 decl_comdat_group_id (const_tree node)
851 {
852 struct symtab_node *snode = symtab_node::get (node);
853 if (!snode)
854 return NULL;
855 return snode->get_comdat_group_id ();
856 }
857
858 /* When the target supports named section, return its name as IDENTIFIER_NODE
859 or NULL if it is in no section. */
860 const char *
861 decl_section_name (const_tree node)
862 {
863 struct symtab_node *snode = symtab_node::get (node);
864 if (!snode)
865 return NULL;
866 return snode->get_section ();
867 }
868
869 /* Set section name of NODE to VALUE (that is expected to be
870 identifier node) */
871 void
872 set_decl_section_name (tree node, const char *value)
873 {
874 struct symtab_node *snode;
875
876 if (value == NULL)
877 {
878 snode = symtab_node::get (node);
879 if (!snode)
880 return;
881 }
882 else if (VAR_P (node))
883 snode = varpool_node::get_create (node);
884 else
885 snode = cgraph_node::get_create (node);
886 snode->set_section (value);
887 }
888
889 /* Set section name of NODE to match the section name of OTHER.
890
891 set_decl_section_name (decl, other) is equivalent to
892 set_decl_section_name (decl, DECL_SECTION_NAME (other)), but possibly more
893 efficient. */
894 void
895 set_decl_section_name (tree decl, const_tree other)
896 {
897 struct symtab_node *other_node = symtab_node::get (other);
898 if (other_node)
899 {
900 struct symtab_node *decl_node;
901 if (VAR_P (decl))
902 decl_node = varpool_node::get_create (decl);
903 else
904 decl_node = cgraph_node::get_create (decl);
905 decl_node->set_section (*other_node);
906 }
907 else
908 {
909 struct symtab_node *decl_node = symtab_node::get (decl);
910 if (!decl_node)
911 return;
912 decl_node->set_section (NULL);
913 }
914 }
915
916 /* Return TLS model of a variable NODE. */
917 enum tls_model
918 decl_tls_model (const_tree node)
919 {
920 struct varpool_node *snode = varpool_node::get (node);
921 if (!snode)
922 return TLS_MODEL_NONE;
923 return snode->tls_model;
924 }
925
926 /* Set TLS model of variable NODE to MODEL. */
927 void
928 set_decl_tls_model (tree node, enum tls_model model)
929 {
930 struct varpool_node *vnode;
931
932 if (model == TLS_MODEL_NONE)
933 {
934 vnode = varpool_node::get (node);
935 if (!vnode)
936 return;
937 }
938 else
939 vnode = varpool_node::get_create (node);
940 vnode->tls_model = model;
941 }
942
943 /* Compute the number of bytes occupied by a tree with code CODE.
944 This function cannot be used for nodes that have variable sizes,
945 including TREE_VEC, INTEGER_CST, STRING_CST, and CALL_EXPR. */
946 size_t
947 tree_code_size (enum tree_code code)
948 {
949 switch (TREE_CODE_CLASS (code))
950 {
951 case tcc_declaration: /* A decl node */
952 switch (code)
953 {
954 case FIELD_DECL: return sizeof (tree_field_decl);
955 case PARM_DECL: return sizeof (tree_parm_decl);
956 case VAR_DECL: return sizeof (tree_var_decl);
957 case LABEL_DECL: return sizeof (tree_label_decl);
958 case RESULT_DECL: return sizeof (tree_result_decl);
959 case CONST_DECL: return sizeof (tree_const_decl);
960 case TYPE_DECL: return sizeof (tree_type_decl);
961 case FUNCTION_DECL: return sizeof (tree_function_decl);
962 case DEBUG_EXPR_DECL: return sizeof (tree_decl_with_rtl);
963 case TRANSLATION_UNIT_DECL: return sizeof (tree_translation_unit_decl);
964 case NAMESPACE_DECL:
965 case IMPORTED_DECL:
966 case NAMELIST_DECL: return sizeof (tree_decl_non_common);
967 default:
968 gcc_checking_assert (code >= NUM_TREE_CODES);
969 return lang_hooks.tree_size (code);
970 }
971
972 case tcc_type: /* a type node */
973 switch (code)
974 {
975 case OFFSET_TYPE:
976 case ENUMERAL_TYPE:
977 case BOOLEAN_TYPE:
978 case INTEGER_TYPE:
979 case REAL_TYPE:
980 case OPAQUE_TYPE:
981 case POINTER_TYPE:
982 case REFERENCE_TYPE:
983 case NULLPTR_TYPE:
984 case FIXED_POINT_TYPE:
985 case COMPLEX_TYPE:
986 case VECTOR_TYPE:
987 case ARRAY_TYPE:
988 case RECORD_TYPE:
989 case UNION_TYPE:
990 case QUAL_UNION_TYPE:
991 case VOID_TYPE:
992 case FUNCTION_TYPE:
993 case METHOD_TYPE:
994 case LANG_TYPE: return sizeof (tree_type_non_common);
995 default:
996 gcc_checking_assert (code >= NUM_TREE_CODES);
997 return lang_hooks.tree_size (code);
998 }
999
1000 case tcc_reference: /* a reference */
1001 case tcc_expression: /* an expression */
1002 case tcc_statement: /* an expression with side effects */
1003 case tcc_comparison: /* a comparison expression */
1004 case tcc_unary: /* a unary arithmetic expression */
1005 case tcc_binary: /* a binary arithmetic expression */
1006 return (sizeof (struct tree_exp)
1007 + (TREE_CODE_LENGTH (code) - 1) * sizeof (tree));
1008
1009 case tcc_constant: /* a constant */
1010 switch (code)
1011 {
1012 case VOID_CST: return sizeof (tree_typed);
1013 case INTEGER_CST: gcc_unreachable ();
1014 case POLY_INT_CST: return sizeof (tree_poly_int_cst);
1015 case REAL_CST: return sizeof (tree_real_cst);
1016 case FIXED_CST: return sizeof (tree_fixed_cst);
1017 case COMPLEX_CST: return sizeof (tree_complex);
1018 case VECTOR_CST: gcc_unreachable ();
1019 case STRING_CST: gcc_unreachable ();
1020 default:
1021 gcc_checking_assert (code >= NUM_TREE_CODES);
1022 return lang_hooks.tree_size (code);
1023 }
1024
1025 case tcc_exceptional: /* something random, like an identifier. */
1026 switch (code)
1027 {
1028 case IDENTIFIER_NODE: return lang_hooks.identifier_size;
1029 case TREE_LIST: return sizeof (tree_list);
1030
1031 case ERROR_MARK:
1032 case PLACEHOLDER_EXPR: return sizeof (tree_common);
1033
1034 case TREE_VEC: gcc_unreachable ();
1035 case OMP_CLAUSE: gcc_unreachable ();
1036
1037 case SSA_NAME: return sizeof (tree_ssa_name);
1038
1039 case STATEMENT_LIST: return sizeof (tree_statement_list);
1040 case BLOCK: return sizeof (struct tree_block);
1041 case CONSTRUCTOR: return sizeof (tree_constructor);
1042 case OPTIMIZATION_NODE: return sizeof (tree_optimization_option);
1043 case TARGET_OPTION_NODE: return sizeof (tree_target_option);
1044
1045 default:
1046 gcc_checking_assert (code >= NUM_TREE_CODES);
1047 return lang_hooks.tree_size (code);
1048 }
1049
1050 default:
1051 gcc_unreachable ();
1052 }
1053 }
1054
1055 /* Compute the number of bytes occupied by NODE. This routine only
1056 looks at TREE_CODE, except for those nodes that have variable sizes. */
1057 size_t
1058 tree_size (const_tree node)
1059 {
1060 const enum tree_code code = TREE_CODE (node);
1061 switch (code)
1062 {
1063 case INTEGER_CST:
1064 return (sizeof (struct tree_int_cst)
1065 + (TREE_INT_CST_EXT_NUNITS (node) - 1) * sizeof (HOST_WIDE_INT));
1066
1067 case TREE_BINFO:
1068 return (offsetof (struct tree_binfo, base_binfos)
1069 + vec<tree, va_gc>
1070 ::embedded_size (BINFO_N_BASE_BINFOS (node)));
1071
1072 case TREE_VEC:
1073 return (sizeof (struct tree_vec)
1074 + (TREE_VEC_LENGTH (node) - 1) * sizeof (tree));
1075
1076 case VECTOR_CST:
1077 return (sizeof (struct tree_vector)
1078 + (vector_cst_encoded_nelts (node) - 1) * sizeof (tree));
1079
1080 case STRING_CST:
1081 return TREE_STRING_LENGTH (node) + offsetof (struct tree_string, str) + 1;
1082
1083 case OMP_CLAUSE:
1084 return (sizeof (struct tree_omp_clause)
1085 + (omp_clause_num_ops[OMP_CLAUSE_CODE (node)] - 1)
1086 * sizeof (tree));
1087
1088 default:
1089 if (TREE_CODE_CLASS (code) == tcc_vl_exp)
1090 return (sizeof (struct tree_exp)
1091 + (VL_EXP_OPERAND_LENGTH (node) - 1) * sizeof (tree));
1092 else
1093 return tree_code_size (code);
1094 }
1095 }
1096
1097 /* Return tree node kind based on tree CODE. */
1098
1099 static tree_node_kind
1100 get_stats_node_kind (enum tree_code code)
1101 {
1102 enum tree_code_class type = TREE_CODE_CLASS (code);
1103
1104 switch (type)
1105 {
1106 case tcc_declaration: /* A decl node */
1107 return d_kind;
1108 case tcc_type: /* a type node */
1109 return t_kind;
1110 case tcc_statement: /* an expression with side effects */
1111 return s_kind;
1112 case tcc_reference: /* a reference */
1113 return r_kind;
1114 case tcc_expression: /* an expression */
1115 case tcc_comparison: /* a comparison expression */
1116 case tcc_unary: /* a unary arithmetic expression */
1117 case tcc_binary: /* a binary arithmetic expression */
1118 return e_kind;
1119 case tcc_constant: /* a constant */
1120 return c_kind;
1121 case tcc_exceptional: /* something random, like an identifier. */
1122 switch (code)
1123 {
1124 case IDENTIFIER_NODE:
1125 return id_kind;
1126 case TREE_VEC:
1127 return vec_kind;
1128 case TREE_BINFO:
1129 return binfo_kind;
1130 case SSA_NAME:
1131 return ssa_name_kind;
1132 case BLOCK:
1133 return b_kind;
1134 case CONSTRUCTOR:
1135 return constr_kind;
1136 case OMP_CLAUSE:
1137 return omp_clause_kind;
1138 default:
1139 return x_kind;
1140 }
1141 break;
1142 case tcc_vl_exp:
1143 return e_kind;
1144 default:
1145 gcc_unreachable ();
1146 }
1147 }
1148
1149 /* Record interesting allocation statistics for a tree node with CODE
1150 and LENGTH. */
1151
1152 static void
1153 record_node_allocation_statistics (enum tree_code code, size_t length)
1154 {
1155 if (!GATHER_STATISTICS)
1156 return;
1157
1158 tree_node_kind kind = get_stats_node_kind (code);
1159
1160 tree_code_counts[(int) code]++;
1161 tree_node_counts[(int) kind]++;
1162 tree_node_sizes[(int) kind] += length;
1163 }
1164
1165 /* Allocate and return a new UID from the DECL_UID namespace. */
1166
1167 int
1168 allocate_decl_uid (void)
1169 {
1170 return next_decl_uid++;
1171 }
1172
1173 /* Return a newly allocated node of code CODE. For decl and type
1174 nodes, some other fields are initialized. The rest of the node is
1175 initialized to zero. This function cannot be used for TREE_VEC,
1176 INTEGER_CST or OMP_CLAUSE nodes, which is enforced by asserts in
1177 tree_code_size.
1178
1179 Achoo! I got a code in the node. */
1180
1181 tree
1182 make_node (enum tree_code code MEM_STAT_DECL)
1183 {
1184 tree t;
1185 enum tree_code_class type = TREE_CODE_CLASS (code);
1186 size_t length = tree_code_size (code);
1187
1188 record_node_allocation_statistics (code, length);
1189
1190 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1191 TREE_SET_CODE (t, code);
1192
1193 switch (type)
1194 {
1195 case tcc_statement:
1196 if (code != DEBUG_BEGIN_STMT)
1197 TREE_SIDE_EFFECTS (t) = 1;
1198 break;
1199
1200 case tcc_declaration:
1201 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1202 {
1203 if (code == FUNCTION_DECL)
1204 {
1205 SET_DECL_ALIGN (t, FUNCTION_ALIGNMENT (FUNCTION_BOUNDARY));
1206 SET_DECL_MODE (t, FUNCTION_MODE);
1207 }
1208 else
1209 SET_DECL_ALIGN (t, 1);
1210 }
1211 DECL_SOURCE_LOCATION (t) = input_location;
1212 if (TREE_CODE (t) == DEBUG_EXPR_DECL)
1213 DECL_UID (t) = --next_debug_decl_uid;
1214 else
1215 {
1216 DECL_UID (t) = allocate_decl_uid ();
1217 SET_DECL_PT_UID (t, -1);
1218 }
1219 if (TREE_CODE (t) == LABEL_DECL)
1220 LABEL_DECL_UID (t) = -1;
1221
1222 break;
1223
1224 case tcc_type:
1225 TYPE_UID (t) = next_type_uid++;
1226 SET_TYPE_ALIGN (t, BITS_PER_UNIT);
1227 TYPE_USER_ALIGN (t) = 0;
1228 TYPE_MAIN_VARIANT (t) = t;
1229 TYPE_CANONICAL (t) = t;
1230
1231 /* Default to no attributes for type, but let target change that. */
1232 TYPE_ATTRIBUTES (t) = NULL_TREE;
1233 targetm.set_default_type_attributes (t);
1234
1235 /* We have not yet computed the alias set for this type. */
1236 TYPE_ALIAS_SET (t) = -1;
1237 break;
1238
1239 case tcc_constant:
1240 TREE_CONSTANT (t) = 1;
1241 break;
1242
1243 case tcc_expression:
1244 switch (code)
1245 {
1246 case INIT_EXPR:
1247 case MODIFY_EXPR:
1248 case VA_ARG_EXPR:
1249 case PREDECREMENT_EXPR:
1250 case PREINCREMENT_EXPR:
1251 case POSTDECREMENT_EXPR:
1252 case POSTINCREMENT_EXPR:
1253 /* All of these have side-effects, no matter what their
1254 operands are. */
1255 TREE_SIDE_EFFECTS (t) = 1;
1256 break;
1257
1258 default:
1259 break;
1260 }
1261 break;
1262
1263 case tcc_exceptional:
1264 switch (code)
1265 {
1266 case TARGET_OPTION_NODE:
1267 TREE_TARGET_OPTION(t)
1268 = ggc_cleared_alloc<struct cl_target_option> ();
1269 break;
1270
1271 case OPTIMIZATION_NODE:
1272 TREE_OPTIMIZATION (t)
1273 = ggc_cleared_alloc<struct cl_optimization> ();
1274 break;
1275
1276 default:
1277 break;
1278 }
1279 break;
1280
1281 default:
1282 /* Other classes need no special treatment. */
1283 break;
1284 }
1285
1286 return t;
1287 }
1288
1289 /* Free tree node. */
1290
1291 void
1292 free_node (tree node)
1293 {
1294 enum tree_code code = TREE_CODE (node);
1295 if (GATHER_STATISTICS)
1296 {
1297 enum tree_node_kind kind = get_stats_node_kind (code);
1298
1299 gcc_checking_assert (tree_code_counts[(int) TREE_CODE (node)] != 0);
1300 gcc_checking_assert (tree_node_counts[(int) kind] != 0);
1301 gcc_checking_assert (tree_node_sizes[(int) kind] >= tree_size (node));
1302
1303 tree_code_counts[(int) TREE_CODE (node)]--;
1304 tree_node_counts[(int) kind]--;
1305 tree_node_sizes[(int) kind] -= tree_size (node);
1306 }
1307 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1308 vec_free (CONSTRUCTOR_ELTS (node));
1309 else if (code == BLOCK)
1310 vec_free (BLOCK_NONLOCALIZED_VARS (node));
1311 else if (code == TREE_BINFO)
1312 vec_free (BINFO_BASE_ACCESSES (node));
1313 else if (code == OPTIMIZATION_NODE)
1314 cl_optimization_option_free (TREE_OPTIMIZATION (node));
1315 else if (code == TARGET_OPTION_NODE)
1316 cl_target_option_free (TREE_TARGET_OPTION (node));
1317 ggc_free (node);
1318 }
1319 \f
1320 /* Return a new node with the same contents as NODE except that its
1321 TREE_CHAIN, if it has one, is zero and it has a fresh uid. */
1322
1323 tree
1324 copy_node (tree node MEM_STAT_DECL)
1325 {
1326 tree t;
1327 enum tree_code code = TREE_CODE (node);
1328 size_t length;
1329
1330 gcc_assert (code != STATEMENT_LIST);
1331
1332 length = tree_size (node);
1333 record_node_allocation_statistics (code, length);
1334 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
1335 memcpy (t, node, length);
1336
1337 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
1338 TREE_CHAIN (t) = 0;
1339 TREE_ASM_WRITTEN (t) = 0;
1340 TREE_VISITED (t) = 0;
1341
1342 if (TREE_CODE_CLASS (code) == tcc_declaration)
1343 {
1344 if (code == DEBUG_EXPR_DECL)
1345 DECL_UID (t) = --next_debug_decl_uid;
1346 else
1347 {
1348 DECL_UID (t) = allocate_decl_uid ();
1349 if (DECL_PT_UID_SET_P (node))
1350 SET_DECL_PT_UID (t, DECL_PT_UID (node));
1351 }
1352 if ((TREE_CODE (node) == PARM_DECL || VAR_P (node))
1353 && DECL_HAS_VALUE_EXPR_P (node))
1354 {
1355 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (node));
1356 DECL_HAS_VALUE_EXPR_P (t) = 1;
1357 }
1358 /* DECL_DEBUG_EXPR is copied explicitly by callers. */
1359 if (VAR_P (node))
1360 {
1361 DECL_HAS_DEBUG_EXPR_P (t) = 0;
1362 t->decl_with_vis.symtab_node = NULL;
1363 }
1364 if (VAR_P (node) && DECL_HAS_INIT_PRIORITY_P (node))
1365 {
1366 SET_DECL_INIT_PRIORITY (t, DECL_INIT_PRIORITY (node));
1367 DECL_HAS_INIT_PRIORITY_P (t) = 1;
1368 }
1369 if (TREE_CODE (node) == FUNCTION_DECL)
1370 {
1371 DECL_STRUCT_FUNCTION (t) = NULL;
1372 t->decl_with_vis.symtab_node = NULL;
1373 }
1374 }
1375 else if (TREE_CODE_CLASS (code) == tcc_type)
1376 {
1377 TYPE_UID (t) = next_type_uid++;
1378 /* The following is so that the debug code for
1379 the copy is different from the original type.
1380 The two statements usually duplicate each other
1381 (because they clear fields of the same union),
1382 but the optimizer should catch that. */
1383 TYPE_SYMTAB_ADDRESS (t) = 0;
1384 TYPE_SYMTAB_DIE (t) = 0;
1385
1386 /* Do not copy the values cache. */
1387 if (TYPE_CACHED_VALUES_P (t))
1388 {
1389 TYPE_CACHED_VALUES_P (t) = 0;
1390 TYPE_CACHED_VALUES (t) = NULL_TREE;
1391 }
1392 }
1393 else if (code == TARGET_OPTION_NODE)
1394 {
1395 TREE_TARGET_OPTION (t) = ggc_alloc<struct cl_target_option>();
1396 memcpy (TREE_TARGET_OPTION (t), TREE_TARGET_OPTION (node),
1397 sizeof (struct cl_target_option));
1398 }
1399 else if (code == OPTIMIZATION_NODE)
1400 {
1401 TREE_OPTIMIZATION (t) = ggc_alloc<struct cl_optimization>();
1402 memcpy (TREE_OPTIMIZATION (t), TREE_OPTIMIZATION (node),
1403 sizeof (struct cl_optimization));
1404 }
1405
1406 return t;
1407 }
1408
1409 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
1410 For example, this can copy a list made of TREE_LIST nodes. */
1411
1412 tree
1413 copy_list (tree list)
1414 {
1415 tree head;
1416 tree prev, next;
1417
1418 if (list == 0)
1419 return 0;
1420
1421 head = prev = copy_node (list);
1422 next = TREE_CHAIN (list);
1423 while (next)
1424 {
1425 TREE_CHAIN (prev) = copy_node (next);
1426 prev = TREE_CHAIN (prev);
1427 next = TREE_CHAIN (next);
1428 }
1429 return head;
1430 }
1431
1432 \f
1433 /* Return the value that TREE_INT_CST_EXT_NUNITS should have for an
1434 INTEGER_CST with value CST and type TYPE. */
1435
1436 static unsigned int
1437 get_int_cst_ext_nunits (tree type, const wide_int &cst)
1438 {
1439 gcc_checking_assert (cst.get_precision () == TYPE_PRECISION (type));
1440 /* We need extra HWIs if CST is an unsigned integer with its
1441 upper bit set. */
1442 if (TYPE_UNSIGNED (type) && wi::neg_p (cst))
1443 return cst.get_precision () / HOST_BITS_PER_WIDE_INT + 1;
1444 return cst.get_len ();
1445 }
1446
1447 /* Return a new INTEGER_CST with value CST and type TYPE. */
1448
1449 static tree
1450 build_new_int_cst (tree type, const wide_int &cst)
1451 {
1452 unsigned int len = cst.get_len ();
1453 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1454 tree nt = make_int_cst (len, ext_len);
1455
1456 if (len < ext_len)
1457 {
1458 --ext_len;
1459 TREE_INT_CST_ELT (nt, ext_len)
1460 = zext_hwi (-1, cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1461 for (unsigned int i = len; i < ext_len; ++i)
1462 TREE_INT_CST_ELT (nt, i) = -1;
1463 }
1464 else if (TYPE_UNSIGNED (type)
1465 && cst.get_precision () < len * HOST_BITS_PER_WIDE_INT)
1466 {
1467 len--;
1468 TREE_INT_CST_ELT (nt, len)
1469 = zext_hwi (cst.elt (len),
1470 cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1471 }
1472
1473 for (unsigned int i = 0; i < len; i++)
1474 TREE_INT_CST_ELT (nt, i) = cst.elt (i);
1475 TREE_TYPE (nt) = type;
1476 return nt;
1477 }
1478
1479 /* Return a new POLY_INT_CST with coefficients COEFFS and type TYPE. */
1480
1481 static tree
1482 build_new_poly_int_cst (tree type, tree (&coeffs)[NUM_POLY_INT_COEFFS]
1483 CXX_MEM_STAT_INFO)
1484 {
1485 size_t length = sizeof (struct tree_poly_int_cst);
1486 record_node_allocation_statistics (POLY_INT_CST, length);
1487
1488 tree t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1489
1490 TREE_SET_CODE (t, POLY_INT_CST);
1491 TREE_CONSTANT (t) = 1;
1492 TREE_TYPE (t) = type;
1493 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1494 POLY_INT_CST_COEFF (t, i) = coeffs[i];
1495 return t;
1496 }
1497
1498 /* Create a constant tree that contains CST sign-extended to TYPE. */
1499
1500 tree
1501 build_int_cst (tree type, poly_int64 cst)
1502 {
1503 /* Support legacy code. */
1504 if (!type)
1505 type = integer_type_node;
1506
1507 return wide_int_to_tree (type, wi::shwi (cst, TYPE_PRECISION (type)));
1508 }
1509
1510 /* Create a constant tree that contains CST zero-extended to TYPE. */
1511
1512 tree
1513 build_int_cstu (tree type, poly_uint64 cst)
1514 {
1515 return wide_int_to_tree (type, wi::uhwi (cst, TYPE_PRECISION (type)));
1516 }
1517
1518 /* Create a constant tree that contains CST sign-extended to TYPE. */
1519
1520 tree
1521 build_int_cst_type (tree type, poly_int64 cst)
1522 {
1523 gcc_assert (type);
1524 return wide_int_to_tree (type, wi::shwi (cst, TYPE_PRECISION (type)));
1525 }
1526
1527 /* Constructs tree in type TYPE from with value given by CST. Signedness
1528 of CST is assumed to be the same as the signedness of TYPE. */
1529
1530 tree
1531 double_int_to_tree (tree type, double_int cst)
1532 {
1533 return wide_int_to_tree (type, widest_int::from (cst, TYPE_SIGN (type)));
1534 }
1535
1536 /* We force the wide_int CST to the range of the type TYPE by sign or
1537 zero extending it. OVERFLOWABLE indicates if we are interested in
1538 overflow of the value, when >0 we are only interested in signed
1539 overflow, for <0 we are interested in any overflow. OVERFLOWED
1540 indicates whether overflow has already occurred. CONST_OVERFLOWED
1541 indicates whether constant overflow has already occurred. We force
1542 T's value to be within range of T's type (by setting to 0 or 1 all
1543 the bits outside the type's range). We set TREE_OVERFLOWED if,
1544 OVERFLOWED is nonzero,
1545 or OVERFLOWABLE is >0 and signed overflow occurs
1546 or OVERFLOWABLE is <0 and any overflow occurs
1547 We return a new tree node for the extended wide_int. The node
1548 is shared if no overflow flags are set. */
1549
1550
1551 tree
1552 force_fit_type (tree type, const poly_wide_int_ref &cst,
1553 int overflowable, bool overflowed)
1554 {
1555 signop sign = TYPE_SIGN (type);
1556
1557 /* If we need to set overflow flags, return a new unshared node. */
1558 if (overflowed || !wi::fits_to_tree_p (cst, type))
1559 {
1560 if (overflowed
1561 || overflowable < 0
1562 || (overflowable > 0 && sign == SIGNED))
1563 {
1564 poly_wide_int tmp = poly_wide_int::from (cst, TYPE_PRECISION (type),
1565 sign);
1566 tree t;
1567 if (tmp.is_constant ())
1568 t = build_new_int_cst (type, tmp.coeffs[0]);
1569 else
1570 {
1571 tree coeffs[NUM_POLY_INT_COEFFS];
1572 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1573 {
1574 coeffs[i] = build_new_int_cst (type, tmp.coeffs[i]);
1575 TREE_OVERFLOW (coeffs[i]) = 1;
1576 }
1577 t = build_new_poly_int_cst (type, coeffs);
1578 }
1579 TREE_OVERFLOW (t) = 1;
1580 return t;
1581 }
1582 }
1583
1584 /* Else build a shared node. */
1585 return wide_int_to_tree (type, cst);
1586 }
1587
1588 /* These are the hash table functions for the hash table of INTEGER_CST
1589 nodes of a sizetype. */
1590
1591 /* Return the hash code X, an INTEGER_CST. */
1592
1593 hashval_t
1594 int_cst_hasher::hash (tree x)
1595 {
1596 const_tree const t = x;
1597 hashval_t code = TYPE_UID (TREE_TYPE (t));
1598 int i;
1599
1600 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
1601 code = iterative_hash_host_wide_int (TREE_INT_CST_ELT(t, i), code);
1602
1603 return code;
1604 }
1605
1606 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1607 is the same as that given by *Y, which is the same. */
1608
1609 bool
1610 int_cst_hasher::equal (tree x, tree y)
1611 {
1612 const_tree const xt = x;
1613 const_tree const yt = y;
1614
1615 if (TREE_TYPE (xt) != TREE_TYPE (yt)
1616 || TREE_INT_CST_NUNITS (xt) != TREE_INT_CST_NUNITS (yt)
1617 || TREE_INT_CST_EXT_NUNITS (xt) != TREE_INT_CST_EXT_NUNITS (yt))
1618 return false;
1619
1620 for (int i = 0; i < TREE_INT_CST_NUNITS (xt); i++)
1621 if (TREE_INT_CST_ELT (xt, i) != TREE_INT_CST_ELT (yt, i))
1622 return false;
1623
1624 return true;
1625 }
1626
1627 /* Cache wide_int CST into the TYPE_CACHED_VALUES cache for TYPE.
1628 SLOT is the slot entry to store it in, and MAX_SLOTS is the maximum
1629 number of slots that can be cached for the type. */
1630
1631 static inline tree
1632 cache_wide_int_in_type_cache (tree type, const wide_int &cst,
1633 int slot, int max_slots)
1634 {
1635 gcc_checking_assert (slot >= 0);
1636 /* Initialize cache. */
1637 if (!TYPE_CACHED_VALUES_P (type))
1638 {
1639 TYPE_CACHED_VALUES_P (type) = 1;
1640 TYPE_CACHED_VALUES (type) = make_tree_vec (max_slots);
1641 }
1642 tree t = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), slot);
1643 if (!t)
1644 {
1645 /* Create a new shared int. */
1646 t = build_new_int_cst (type, cst);
1647 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), slot) = t;
1648 }
1649 return t;
1650 }
1651
1652 /* Create an INT_CST node of TYPE and value CST.
1653 The returned node is always shared. For small integers we use a
1654 per-type vector cache, for larger ones we use a single hash table.
1655 The value is extended from its precision according to the sign of
1656 the type to be a multiple of HOST_BITS_PER_WIDE_INT. This defines
1657 the upper bits and ensures that hashing and value equality based
1658 upon the underlying HOST_WIDE_INTs works without masking. */
1659
1660 static tree
1661 wide_int_to_tree_1 (tree type, const wide_int_ref &pcst)
1662 {
1663 tree t;
1664 int ix = -1;
1665 int limit = 0;
1666
1667 gcc_assert (type);
1668 unsigned int prec = TYPE_PRECISION (type);
1669 signop sgn = TYPE_SIGN (type);
1670
1671 /* Verify that everything is canonical. */
1672 int l = pcst.get_len ();
1673 if (l > 1)
1674 {
1675 if (pcst.elt (l - 1) == 0)
1676 gcc_checking_assert (pcst.elt (l - 2) < 0);
1677 if (pcst.elt (l - 1) == HOST_WIDE_INT_M1)
1678 gcc_checking_assert (pcst.elt (l - 2) >= 0);
1679 }
1680
1681 wide_int cst = wide_int::from (pcst, prec, sgn);
1682 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1683
1684 enum tree_code code = TREE_CODE (type);
1685 if (code == POINTER_TYPE || code == REFERENCE_TYPE)
1686 {
1687 /* Cache NULL pointer and zero bounds. */
1688 if (cst == 0)
1689 ix = 0;
1690 /* Cache upper bounds of pointers. */
1691 else if (cst == wi::max_value (prec, sgn))
1692 ix = 1;
1693 /* Cache 1 which is used for a non-zero range. */
1694 else if (cst == 1)
1695 ix = 2;
1696
1697 if (ix >= 0)
1698 {
1699 t = cache_wide_int_in_type_cache (type, cst, ix, 3);
1700 /* Make sure no one is clobbering the shared constant. */
1701 gcc_checking_assert (TREE_TYPE (t) == type
1702 && cst == wi::to_wide (t));
1703 return t;
1704 }
1705 }
1706 if (ext_len == 1)
1707 {
1708 /* We just need to store a single HOST_WIDE_INT. */
1709 HOST_WIDE_INT hwi;
1710 if (TYPE_UNSIGNED (type))
1711 hwi = cst.to_uhwi ();
1712 else
1713 hwi = cst.to_shwi ();
1714
1715 switch (code)
1716 {
1717 case NULLPTR_TYPE:
1718 gcc_assert (hwi == 0);
1719 /* Fallthru. */
1720
1721 case POINTER_TYPE:
1722 case REFERENCE_TYPE:
1723 /* Ignore pointers, as they were already handled above. */
1724 break;
1725
1726 case BOOLEAN_TYPE:
1727 /* Cache false or true. */
1728 limit = 2;
1729 if (IN_RANGE (hwi, 0, 1))
1730 ix = hwi;
1731 break;
1732
1733 case INTEGER_TYPE:
1734 case OFFSET_TYPE:
1735 if (TYPE_SIGN (type) == UNSIGNED)
1736 {
1737 /* Cache [0, N). */
1738 limit = param_integer_share_limit;
1739 if (IN_RANGE (hwi, 0, param_integer_share_limit - 1))
1740 ix = hwi;
1741 }
1742 else
1743 {
1744 /* Cache [-1, N). */
1745 limit = param_integer_share_limit + 1;
1746 if (IN_RANGE (hwi, -1, param_integer_share_limit - 1))
1747 ix = hwi + 1;
1748 }
1749 break;
1750
1751 case ENUMERAL_TYPE:
1752 break;
1753
1754 default:
1755 gcc_unreachable ();
1756 }
1757
1758 if (ix >= 0)
1759 {
1760 t = cache_wide_int_in_type_cache (type, cst, ix, limit);
1761 /* Make sure no one is clobbering the shared constant. */
1762 gcc_checking_assert (TREE_TYPE (t) == type
1763 && TREE_INT_CST_NUNITS (t) == 1
1764 && TREE_INT_CST_OFFSET_NUNITS (t) == 1
1765 && TREE_INT_CST_EXT_NUNITS (t) == 1
1766 && TREE_INT_CST_ELT (t, 0) == hwi);
1767 return t;
1768 }
1769 else
1770 {
1771 /* Use the cache of larger shared ints, using int_cst_node as
1772 a temporary. */
1773
1774 TREE_INT_CST_ELT (int_cst_node, 0) = hwi;
1775 TREE_TYPE (int_cst_node) = type;
1776
1777 tree *slot = int_cst_hash_table->find_slot (int_cst_node, INSERT);
1778 t = *slot;
1779 if (!t)
1780 {
1781 /* Insert this one into the hash table. */
1782 t = int_cst_node;
1783 *slot = t;
1784 /* Make a new node for next time round. */
1785 int_cst_node = make_int_cst (1, 1);
1786 }
1787 }
1788 }
1789 else
1790 {
1791 /* The value either hashes properly or we drop it on the floor
1792 for the gc to take care of. There will not be enough of them
1793 to worry about. */
1794
1795 tree nt = build_new_int_cst (type, cst);
1796 tree *slot = int_cst_hash_table->find_slot (nt, INSERT);
1797 t = *slot;
1798 if (!t)
1799 {
1800 /* Insert this one into the hash table. */
1801 t = nt;
1802 *slot = t;
1803 }
1804 else
1805 ggc_free (nt);
1806 }
1807
1808 return t;
1809 }
1810
1811 hashval_t
1812 poly_int_cst_hasher::hash (tree t)
1813 {
1814 inchash::hash hstate;
1815
1816 hstate.add_int (TYPE_UID (TREE_TYPE (t)));
1817 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1818 hstate.add_wide_int (wi::to_wide (POLY_INT_CST_COEFF (t, i)));
1819
1820 return hstate.end ();
1821 }
1822
1823 bool
1824 poly_int_cst_hasher::equal (tree x, const compare_type &y)
1825 {
1826 if (TREE_TYPE (x) != y.first)
1827 return false;
1828 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1829 if (wi::to_wide (POLY_INT_CST_COEFF (x, i)) != y.second->coeffs[i])
1830 return false;
1831 return true;
1832 }
1833
1834 /* Build a POLY_INT_CST node with type TYPE and with the elements in VALUES.
1835 The elements must also have type TYPE. */
1836
1837 tree
1838 build_poly_int_cst (tree type, const poly_wide_int_ref &values)
1839 {
1840 unsigned int prec = TYPE_PRECISION (type);
1841 gcc_assert (prec <= values.coeffs[0].get_precision ());
1842 poly_wide_int c = poly_wide_int::from (values, prec, SIGNED);
1843
1844 inchash::hash h;
1845 h.add_int (TYPE_UID (type));
1846 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1847 h.add_wide_int (c.coeffs[i]);
1848 poly_int_cst_hasher::compare_type comp (type, &c);
1849 tree *slot = poly_int_cst_hash_table->find_slot_with_hash (comp, h.end (),
1850 INSERT);
1851 if (*slot == NULL_TREE)
1852 {
1853 tree coeffs[NUM_POLY_INT_COEFFS];
1854 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1855 coeffs[i] = wide_int_to_tree_1 (type, c.coeffs[i]);
1856 *slot = build_new_poly_int_cst (type, coeffs);
1857 }
1858 return *slot;
1859 }
1860
1861 /* Create a constant tree with value VALUE in type TYPE. */
1862
1863 tree
1864 wide_int_to_tree (tree type, const poly_wide_int_ref &value)
1865 {
1866 if (value.is_constant ())
1867 return wide_int_to_tree_1 (type, value.coeffs[0]);
1868 return build_poly_int_cst (type, value);
1869 }
1870
1871 /* Insert INTEGER_CST T into a cache of integer constants. And return
1872 the cached constant (which may or may not be T). If MIGHT_DUPLICATE
1873 is false, and T falls into the type's 'smaller values' range, there
1874 cannot be an existing entry. Otherwise, if MIGHT_DUPLICATE is true,
1875 or the value is large, should an existing entry exist, it is
1876 returned (rather than inserting T). */
1877
1878 tree
1879 cache_integer_cst (tree t, bool might_duplicate ATTRIBUTE_UNUSED)
1880 {
1881 tree type = TREE_TYPE (t);
1882 int ix = -1;
1883 int limit = 0;
1884 int prec = TYPE_PRECISION (type);
1885
1886 gcc_assert (!TREE_OVERFLOW (t));
1887
1888 /* The caching indices here must match those in
1889 wide_int_to_type_1. */
1890 switch (TREE_CODE (type))
1891 {
1892 case NULLPTR_TYPE:
1893 gcc_checking_assert (integer_zerop (t));
1894 /* Fallthru. */
1895
1896 case POINTER_TYPE:
1897 case REFERENCE_TYPE:
1898 {
1899 if (integer_zerop (t))
1900 ix = 0;
1901 else if (integer_onep (t))
1902 ix = 2;
1903
1904 if (ix >= 0)
1905 limit = 3;
1906 }
1907 break;
1908
1909 case BOOLEAN_TYPE:
1910 /* Cache false or true. */
1911 limit = 2;
1912 if (wi::ltu_p (wi::to_wide (t), 2))
1913 ix = TREE_INT_CST_ELT (t, 0);
1914 break;
1915
1916 case INTEGER_TYPE:
1917 case OFFSET_TYPE:
1918 if (TYPE_UNSIGNED (type))
1919 {
1920 /* Cache 0..N */
1921 limit = param_integer_share_limit;
1922
1923 /* This is a little hokie, but if the prec is smaller than
1924 what is necessary to hold param_integer_share_limit, then the
1925 obvious test will not get the correct answer. */
1926 if (prec < HOST_BITS_PER_WIDE_INT)
1927 {
1928 if (tree_to_uhwi (t)
1929 < (unsigned HOST_WIDE_INT) param_integer_share_limit)
1930 ix = tree_to_uhwi (t);
1931 }
1932 else if (wi::ltu_p (wi::to_wide (t), param_integer_share_limit))
1933 ix = tree_to_uhwi (t);
1934 }
1935 else
1936 {
1937 /* Cache -1..N */
1938 limit = param_integer_share_limit + 1;
1939
1940 if (integer_minus_onep (t))
1941 ix = 0;
1942 else if (!wi::neg_p (wi::to_wide (t)))
1943 {
1944 if (prec < HOST_BITS_PER_WIDE_INT)
1945 {
1946 if (tree_to_shwi (t) < param_integer_share_limit)
1947 ix = tree_to_shwi (t) + 1;
1948 }
1949 else if (wi::ltu_p (wi::to_wide (t), param_integer_share_limit))
1950 ix = tree_to_shwi (t) + 1;
1951 }
1952 }
1953 break;
1954
1955 case ENUMERAL_TYPE:
1956 /* The slot used by TYPE_CACHED_VALUES is used for the enum
1957 members. */
1958 break;
1959
1960 default:
1961 gcc_unreachable ();
1962 }
1963
1964 if (ix >= 0)
1965 {
1966 /* Look for it in the type's vector of small shared ints. */
1967 if (!TYPE_CACHED_VALUES_P (type))
1968 {
1969 TYPE_CACHED_VALUES_P (type) = 1;
1970 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1971 }
1972
1973 if (tree r = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix))
1974 {
1975 gcc_checking_assert (might_duplicate);
1976 t = r;
1977 }
1978 else
1979 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1980 }
1981 else
1982 {
1983 /* Use the cache of larger shared ints. */
1984 tree *slot = int_cst_hash_table->find_slot (t, INSERT);
1985 if (tree r = *slot)
1986 {
1987 /* If there is already an entry for the number verify it's the
1988 same value. */
1989 gcc_checking_assert (wi::to_wide (tree (r)) == wi::to_wide (t));
1990 /* And return the cached value. */
1991 t = r;
1992 }
1993 else
1994 /* Otherwise insert this one into the hash table. */
1995 *slot = t;
1996 }
1997
1998 return t;
1999 }
2000
2001
2002 /* Builds an integer constant in TYPE such that lowest BITS bits are ones
2003 and the rest are zeros. */
2004
2005 tree
2006 build_low_bits_mask (tree type, unsigned bits)
2007 {
2008 gcc_assert (bits <= TYPE_PRECISION (type));
2009
2010 return wide_int_to_tree (type, wi::mask (bits, false,
2011 TYPE_PRECISION (type)));
2012 }
2013
2014 /* Checks that X is integer constant that can be expressed in (unsigned)
2015 HOST_WIDE_INT without loss of precision. */
2016
2017 bool
2018 cst_and_fits_in_hwi (const_tree x)
2019 {
2020 return (TREE_CODE (x) == INTEGER_CST
2021 && (tree_fits_shwi_p (x) || tree_fits_uhwi_p (x)));
2022 }
2023
2024 /* Build a newly constructed VECTOR_CST with the given values of
2025 (VECTOR_CST_)LOG2_NPATTERNS and (VECTOR_CST_)NELTS_PER_PATTERN. */
2026
2027 tree
2028 make_vector (unsigned log2_npatterns,
2029 unsigned int nelts_per_pattern MEM_STAT_DECL)
2030 {
2031 gcc_assert (IN_RANGE (nelts_per_pattern, 1, 3));
2032 tree t;
2033 unsigned npatterns = 1 << log2_npatterns;
2034 unsigned encoded_nelts = npatterns * nelts_per_pattern;
2035 unsigned length = (sizeof (struct tree_vector)
2036 + (encoded_nelts - 1) * sizeof (tree));
2037
2038 record_node_allocation_statistics (VECTOR_CST, length);
2039
2040 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2041
2042 TREE_SET_CODE (t, VECTOR_CST);
2043 TREE_CONSTANT (t) = 1;
2044 VECTOR_CST_LOG2_NPATTERNS (t) = log2_npatterns;
2045 VECTOR_CST_NELTS_PER_PATTERN (t) = nelts_per_pattern;
2046
2047 return t;
2048 }
2049
2050 /* Return a new VECTOR_CST node whose type is TYPE and whose values
2051 are extracted from V, a vector of CONSTRUCTOR_ELT. */
2052
2053 tree
2054 build_vector_from_ctor (tree type, const vec<constructor_elt, va_gc> *v)
2055 {
2056 if (vec_safe_length (v) == 0)
2057 return build_zero_cst (type);
2058
2059 unsigned HOST_WIDE_INT idx, nelts;
2060 tree value;
2061
2062 /* We can't construct a VECTOR_CST for a variable number of elements. */
2063 nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
2064 tree_vector_builder vec (type, nelts, 1);
2065 FOR_EACH_CONSTRUCTOR_VALUE (v, idx, value)
2066 {
2067 if (TREE_CODE (value) == VECTOR_CST)
2068 {
2069 /* If NELTS is constant then this must be too. */
2070 unsigned int sub_nelts = VECTOR_CST_NELTS (value).to_constant ();
2071 for (unsigned i = 0; i < sub_nelts; ++i)
2072 vec.quick_push (VECTOR_CST_ELT (value, i));
2073 }
2074 else
2075 vec.quick_push (value);
2076 }
2077 while (vec.length () < nelts)
2078 vec.quick_push (build_zero_cst (TREE_TYPE (type)));
2079
2080 return vec.build ();
2081 }
2082
2083 /* Build a vector of type VECTYPE where all the elements are SCs. */
2084 tree
2085 build_vector_from_val (tree vectype, tree sc)
2086 {
2087 unsigned HOST_WIDE_INT i, nunits;
2088
2089 if (sc == error_mark_node)
2090 return sc;
2091
2092 /* Verify that the vector type is suitable for SC. Note that there
2093 is some inconsistency in the type-system with respect to restrict
2094 qualifications of pointers. Vector types always have a main-variant
2095 element type and the qualification is applied to the vector-type.
2096 So TREE_TYPE (vector-type) does not return a properly qualified
2097 vector element-type. */
2098 gcc_checking_assert (types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (sc)),
2099 TREE_TYPE (vectype)));
2100
2101 if (CONSTANT_CLASS_P (sc))
2102 {
2103 tree_vector_builder v (vectype, 1, 1);
2104 v.quick_push (sc);
2105 return v.build ();
2106 }
2107 else if (!TYPE_VECTOR_SUBPARTS (vectype).is_constant (&nunits))
2108 return fold_build1 (VEC_DUPLICATE_EXPR, vectype, sc);
2109 else
2110 {
2111 vec<constructor_elt, va_gc> *v;
2112 vec_alloc (v, nunits);
2113 for (i = 0; i < nunits; ++i)
2114 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, sc);
2115 return build_constructor (vectype, v);
2116 }
2117 }
2118
2119 /* If TYPE is not a vector type, just return SC, otherwise return
2120 build_vector_from_val (TYPE, SC). */
2121
2122 tree
2123 build_uniform_cst (tree type, tree sc)
2124 {
2125 if (!VECTOR_TYPE_P (type))
2126 return sc;
2127
2128 return build_vector_from_val (type, sc);
2129 }
2130
2131 /* Build a vector series of type TYPE in which element I has the value
2132 BASE + I * STEP. The result is a constant if BASE and STEP are constant
2133 and a VEC_SERIES_EXPR otherwise. */
2134
2135 tree
2136 build_vec_series (tree type, tree base, tree step)
2137 {
2138 if (integer_zerop (step))
2139 return build_vector_from_val (type, base);
2140 if (TREE_CODE (base) == INTEGER_CST && TREE_CODE (step) == INTEGER_CST)
2141 {
2142 tree_vector_builder builder (type, 1, 3);
2143 tree elt1 = wide_int_to_tree (TREE_TYPE (base),
2144 wi::to_wide (base) + wi::to_wide (step));
2145 tree elt2 = wide_int_to_tree (TREE_TYPE (base),
2146 wi::to_wide (elt1) + wi::to_wide (step));
2147 builder.quick_push (base);
2148 builder.quick_push (elt1);
2149 builder.quick_push (elt2);
2150 return builder.build ();
2151 }
2152 return build2 (VEC_SERIES_EXPR, type, base, step);
2153 }
2154
2155 /* Return a vector with the same number of units and number of bits
2156 as VEC_TYPE, but in which the elements are a linear series of unsigned
2157 integers { BASE, BASE + STEP, BASE + STEP * 2, ... }. */
2158
2159 tree
2160 build_index_vector (tree vec_type, poly_uint64 base, poly_uint64 step)
2161 {
2162 tree index_vec_type = vec_type;
2163 tree index_elt_type = TREE_TYPE (vec_type);
2164 poly_uint64 nunits = TYPE_VECTOR_SUBPARTS (vec_type);
2165 if (!INTEGRAL_TYPE_P (index_elt_type) || !TYPE_UNSIGNED (index_elt_type))
2166 {
2167 index_elt_type = build_nonstandard_integer_type
2168 (GET_MODE_BITSIZE (SCALAR_TYPE_MODE (index_elt_type)), true);
2169 index_vec_type = build_vector_type (index_elt_type, nunits);
2170 }
2171
2172 tree_vector_builder v (index_vec_type, 1, 3);
2173 for (unsigned int i = 0; i < 3; ++i)
2174 v.quick_push (build_int_cstu (index_elt_type, base + i * step));
2175 return v.build ();
2176 }
2177
2178 /* Return a VECTOR_CST of type VEC_TYPE in which the first NUM_A
2179 elements are A and the rest are B. */
2180
2181 tree
2182 build_vector_a_then_b (tree vec_type, unsigned int num_a, tree a, tree b)
2183 {
2184 gcc_assert (known_le (num_a, TYPE_VECTOR_SUBPARTS (vec_type)));
2185 unsigned int count = constant_lower_bound (TYPE_VECTOR_SUBPARTS (vec_type));
2186 /* Optimize the constant case. */
2187 if ((count & 1) == 0 && TYPE_VECTOR_SUBPARTS (vec_type).is_constant ())
2188 count /= 2;
2189 tree_vector_builder builder (vec_type, count, 2);
2190 for (unsigned int i = 0; i < count * 2; ++i)
2191 builder.quick_push (i < num_a ? a : b);
2192 return builder.build ();
2193 }
2194
2195 /* Something has messed with the elements of CONSTRUCTOR C after it was built;
2196 calculate TREE_CONSTANT and TREE_SIDE_EFFECTS. */
2197
2198 void
2199 recompute_constructor_flags (tree c)
2200 {
2201 unsigned int i;
2202 tree val;
2203 bool constant_p = true;
2204 bool side_effects_p = false;
2205 vec<constructor_elt, va_gc> *vals = CONSTRUCTOR_ELTS (c);
2206
2207 FOR_EACH_CONSTRUCTOR_VALUE (vals, i, val)
2208 {
2209 /* Mostly ctors will have elts that don't have side-effects, so
2210 the usual case is to scan all the elements. Hence a single
2211 loop for both const and side effects, rather than one loop
2212 each (with early outs). */
2213 if (!TREE_CONSTANT (val))
2214 constant_p = false;
2215 if (TREE_SIDE_EFFECTS (val))
2216 side_effects_p = true;
2217 }
2218
2219 TREE_SIDE_EFFECTS (c) = side_effects_p;
2220 TREE_CONSTANT (c) = constant_p;
2221 }
2222
2223 /* Make sure that TREE_CONSTANT and TREE_SIDE_EFFECTS are correct for
2224 CONSTRUCTOR C. */
2225
2226 void
2227 verify_constructor_flags (tree c)
2228 {
2229 unsigned int i;
2230 tree val;
2231 bool constant_p = TREE_CONSTANT (c);
2232 bool side_effects_p = TREE_SIDE_EFFECTS (c);
2233 vec<constructor_elt, va_gc> *vals = CONSTRUCTOR_ELTS (c);
2234
2235 FOR_EACH_CONSTRUCTOR_VALUE (vals, i, val)
2236 {
2237 if (constant_p && !TREE_CONSTANT (val))
2238 internal_error ("non-constant element in constant CONSTRUCTOR");
2239 if (!side_effects_p && TREE_SIDE_EFFECTS (val))
2240 internal_error ("side-effects element in no-side-effects CONSTRUCTOR");
2241 }
2242 }
2243
2244 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
2245 are in the vec pointed to by VALS. */
2246 tree
2247 build_constructor (tree type, vec<constructor_elt, va_gc> *vals MEM_STAT_DECL)
2248 {
2249 tree c = make_node (CONSTRUCTOR PASS_MEM_STAT);
2250
2251 TREE_TYPE (c) = type;
2252 CONSTRUCTOR_ELTS (c) = vals;
2253
2254 recompute_constructor_flags (c);
2255
2256 return c;
2257 }
2258
2259 /* Build a CONSTRUCTOR node made of a single initializer, with the specified
2260 INDEX and VALUE. */
2261 tree
2262 build_constructor_single (tree type, tree index, tree value)
2263 {
2264 vec<constructor_elt, va_gc> *v;
2265 constructor_elt elt = {index, value};
2266
2267 vec_alloc (v, 1);
2268 v->quick_push (elt);
2269
2270 return build_constructor (type, v);
2271 }
2272
2273
2274 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
2275 are in a list pointed to by VALS. */
2276 tree
2277 build_constructor_from_list (tree type, tree vals)
2278 {
2279 tree t;
2280 vec<constructor_elt, va_gc> *v = NULL;
2281
2282 if (vals)
2283 {
2284 vec_alloc (v, list_length (vals));
2285 for (t = vals; t; t = TREE_CHAIN (t))
2286 CONSTRUCTOR_APPEND_ELT (v, TREE_PURPOSE (t), TREE_VALUE (t));
2287 }
2288
2289 return build_constructor (type, v);
2290 }
2291
2292 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
2293 are in a vector pointed to by VALS. Note that the TREE_PURPOSE
2294 fields in the constructor remain null. */
2295
2296 tree
2297 build_constructor_from_vec (tree type, const vec<tree, va_gc> *vals)
2298 {
2299 vec<constructor_elt, va_gc> *v = NULL;
2300
2301 for (tree t : vals)
2302 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, t);
2303
2304 return build_constructor (type, v);
2305 }
2306
2307 /* Return a new CONSTRUCTOR node whose type is TYPE. NELTS is the number
2308 of elements, provided as index/value pairs. */
2309
2310 tree
2311 build_constructor_va (tree type, int nelts, ...)
2312 {
2313 vec<constructor_elt, va_gc> *v = NULL;
2314 va_list p;
2315
2316 va_start (p, nelts);
2317 vec_alloc (v, nelts);
2318 while (nelts--)
2319 {
2320 tree index = va_arg (p, tree);
2321 tree value = va_arg (p, tree);
2322 CONSTRUCTOR_APPEND_ELT (v, index, value);
2323 }
2324 va_end (p);
2325 return build_constructor (type, v);
2326 }
2327
2328 /* Return a node of type TYPE for which TREE_CLOBBER_P is true. */
2329
2330 tree
2331 build_clobber (tree type)
2332 {
2333 tree clobber = build_constructor (type, NULL);
2334 TREE_THIS_VOLATILE (clobber) = true;
2335 return clobber;
2336 }
2337
2338 /* Return a new FIXED_CST node whose type is TYPE and value is F. */
2339
2340 tree
2341 build_fixed (tree type, FIXED_VALUE_TYPE f)
2342 {
2343 tree v;
2344 FIXED_VALUE_TYPE *fp;
2345
2346 v = make_node (FIXED_CST);
2347 fp = ggc_alloc<fixed_value> ();
2348 memcpy (fp, &f, sizeof (FIXED_VALUE_TYPE));
2349
2350 TREE_TYPE (v) = type;
2351 TREE_FIXED_CST_PTR (v) = fp;
2352 return v;
2353 }
2354
2355 /* Return a new REAL_CST node whose type is TYPE and value is D. */
2356
2357 tree
2358 build_real (tree type, REAL_VALUE_TYPE d)
2359 {
2360 tree v;
2361 REAL_VALUE_TYPE *dp;
2362 int overflow = 0;
2363
2364 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
2365 Consider doing it via real_convert now. */
2366
2367 v = make_node (REAL_CST);
2368 dp = ggc_alloc<real_value> ();
2369 memcpy (dp, &d, sizeof (REAL_VALUE_TYPE));
2370
2371 TREE_TYPE (v) = type;
2372 TREE_REAL_CST_PTR (v) = dp;
2373 TREE_OVERFLOW (v) = overflow;
2374 return v;
2375 }
2376
2377 /* Like build_real, but first truncate D to the type. */
2378
2379 tree
2380 build_real_truncate (tree type, REAL_VALUE_TYPE d)
2381 {
2382 return build_real (type, real_value_truncate (TYPE_MODE (type), d));
2383 }
2384
2385 /* Return a new REAL_CST node whose type is TYPE
2386 and whose value is the integer value of the INTEGER_CST node I. */
2387
2388 REAL_VALUE_TYPE
2389 real_value_from_int_cst (const_tree type, const_tree i)
2390 {
2391 REAL_VALUE_TYPE d;
2392
2393 /* Clear all bits of the real value type so that we can later do
2394 bitwise comparisons to see if two values are the same. */
2395 memset (&d, 0, sizeof d);
2396
2397 real_from_integer (&d, type ? TYPE_MODE (type) : VOIDmode, wi::to_wide (i),
2398 TYPE_SIGN (TREE_TYPE (i)));
2399 return d;
2400 }
2401
2402 /* Given a tree representing an integer constant I, return a tree
2403 representing the same value as a floating-point constant of type TYPE. */
2404
2405 tree
2406 build_real_from_int_cst (tree type, const_tree i)
2407 {
2408 tree v;
2409 int overflow = TREE_OVERFLOW (i);
2410
2411 v = build_real (type, real_value_from_int_cst (type, i));
2412
2413 TREE_OVERFLOW (v) |= overflow;
2414 return v;
2415 }
2416
2417 /* Return a new REAL_CST node whose type is TYPE
2418 and whose value is the integer value I which has sign SGN. */
2419
2420 tree
2421 build_real_from_wide (tree type, const wide_int_ref &i, signop sgn)
2422 {
2423 REAL_VALUE_TYPE d;
2424
2425 /* Clear all bits of the real value type so that we can later do
2426 bitwise comparisons to see if two values are the same. */
2427 memset (&d, 0, sizeof d);
2428
2429 real_from_integer (&d, TYPE_MODE (type), i, sgn);
2430 return build_real (type, d);
2431 }
2432
2433 /* Return a newly constructed STRING_CST node whose value is the LEN
2434 characters at STR when STR is nonnull, or all zeros otherwise.
2435 Note that for a C string literal, LEN should include the trailing NUL.
2436 The TREE_TYPE is not initialized. */
2437
2438 tree
2439 build_string (unsigned len, const char *str /*= NULL */)
2440 {
2441 /* Do not waste bytes provided by padding of struct tree_string. */
2442 unsigned size = len + offsetof (struct tree_string, str) + 1;
2443
2444 record_node_allocation_statistics (STRING_CST, size);
2445
2446 tree s = (tree) ggc_internal_alloc (size);
2447
2448 memset (s, 0, sizeof (struct tree_typed));
2449 TREE_SET_CODE (s, STRING_CST);
2450 TREE_CONSTANT (s) = 1;
2451 TREE_STRING_LENGTH (s) = len;
2452 if (str)
2453 memcpy (s->string.str, str, len);
2454 else
2455 memset (s->string.str, 0, len);
2456 s->string.str[len] = '\0';
2457
2458 return s;
2459 }
2460
2461 /* Return a newly constructed COMPLEX_CST node whose value is
2462 specified by the real and imaginary parts REAL and IMAG.
2463 Both REAL and IMAG should be constant nodes. TYPE, if specified,
2464 will be the type of the COMPLEX_CST; otherwise a new type will be made. */
2465
2466 tree
2467 build_complex (tree type, tree real, tree imag)
2468 {
2469 gcc_assert (CONSTANT_CLASS_P (real));
2470 gcc_assert (CONSTANT_CLASS_P (imag));
2471
2472 tree t = make_node (COMPLEX_CST);
2473
2474 TREE_REALPART (t) = real;
2475 TREE_IMAGPART (t) = imag;
2476 TREE_TYPE (t) = type ? type : build_complex_type (TREE_TYPE (real));
2477 TREE_OVERFLOW (t) = TREE_OVERFLOW (real) | TREE_OVERFLOW (imag);
2478 return t;
2479 }
2480
2481 /* Build a complex (inf +- 0i), such as for the result of cproj.
2482 TYPE is the complex tree type of the result. If NEG is true, the
2483 imaginary zero is negative. */
2484
2485 tree
2486 build_complex_inf (tree type, bool neg)
2487 {
2488 REAL_VALUE_TYPE rinf, rzero = dconst0;
2489
2490 real_inf (&rinf);
2491 rzero.sign = neg;
2492 return build_complex (type, build_real (TREE_TYPE (type), rinf),
2493 build_real (TREE_TYPE (type), rzero));
2494 }
2495
2496 /* Return the constant 1 in type TYPE. If TYPE has several elements, each
2497 element is set to 1. In particular, this is 1 + i for complex types. */
2498
2499 tree
2500 build_each_one_cst (tree type)
2501 {
2502 if (TREE_CODE (type) == COMPLEX_TYPE)
2503 {
2504 tree scalar = build_one_cst (TREE_TYPE (type));
2505 return build_complex (type, scalar, scalar);
2506 }
2507 else
2508 return build_one_cst (type);
2509 }
2510
2511 /* Return a constant of arithmetic type TYPE which is the
2512 multiplicative identity of the set TYPE. */
2513
2514 tree
2515 build_one_cst (tree type)
2516 {
2517 switch (TREE_CODE (type))
2518 {
2519 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2520 case POINTER_TYPE: case REFERENCE_TYPE:
2521 case OFFSET_TYPE:
2522 return build_int_cst (type, 1);
2523
2524 case REAL_TYPE:
2525 return build_real (type, dconst1);
2526
2527 case FIXED_POINT_TYPE:
2528 /* We can only generate 1 for accum types. */
2529 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2530 return build_fixed (type, FCONST1 (TYPE_MODE (type)));
2531
2532 case VECTOR_TYPE:
2533 {
2534 tree scalar = build_one_cst (TREE_TYPE (type));
2535
2536 return build_vector_from_val (type, scalar);
2537 }
2538
2539 case COMPLEX_TYPE:
2540 return build_complex (type,
2541 build_one_cst (TREE_TYPE (type)),
2542 build_zero_cst (TREE_TYPE (type)));
2543
2544 default:
2545 gcc_unreachable ();
2546 }
2547 }
2548
2549 /* Return an integer of type TYPE containing all 1's in as much precision as
2550 it contains, or a complex or vector whose subparts are such integers. */
2551
2552 tree
2553 build_all_ones_cst (tree type)
2554 {
2555 if (TREE_CODE (type) == COMPLEX_TYPE)
2556 {
2557 tree scalar = build_all_ones_cst (TREE_TYPE (type));
2558 return build_complex (type, scalar, scalar);
2559 }
2560 else
2561 return build_minus_one_cst (type);
2562 }
2563
2564 /* Return a constant of arithmetic type TYPE which is the
2565 opposite of the multiplicative identity of the set TYPE. */
2566
2567 tree
2568 build_minus_one_cst (tree type)
2569 {
2570 switch (TREE_CODE (type))
2571 {
2572 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2573 case POINTER_TYPE: case REFERENCE_TYPE:
2574 case OFFSET_TYPE:
2575 return build_int_cst (type, -1);
2576
2577 case REAL_TYPE:
2578 return build_real (type, dconstm1);
2579
2580 case FIXED_POINT_TYPE:
2581 /* We can only generate 1 for accum types. */
2582 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2583 return build_fixed (type,
2584 fixed_from_double_int (double_int_minus_one,
2585 SCALAR_TYPE_MODE (type)));
2586
2587 case VECTOR_TYPE:
2588 {
2589 tree scalar = build_minus_one_cst (TREE_TYPE (type));
2590
2591 return build_vector_from_val (type, scalar);
2592 }
2593
2594 case COMPLEX_TYPE:
2595 return build_complex (type,
2596 build_minus_one_cst (TREE_TYPE (type)),
2597 build_zero_cst (TREE_TYPE (type)));
2598
2599 default:
2600 gcc_unreachable ();
2601 }
2602 }
2603
2604 /* Build 0 constant of type TYPE. This is used by constructor folding
2605 and thus the constant should be represented in memory by
2606 zero(es). */
2607
2608 tree
2609 build_zero_cst (tree type)
2610 {
2611 switch (TREE_CODE (type))
2612 {
2613 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2614 case POINTER_TYPE: case REFERENCE_TYPE:
2615 case OFFSET_TYPE: case NULLPTR_TYPE:
2616 return build_int_cst (type, 0);
2617
2618 case REAL_TYPE:
2619 return build_real (type, dconst0);
2620
2621 case FIXED_POINT_TYPE:
2622 return build_fixed (type, FCONST0 (TYPE_MODE (type)));
2623
2624 case VECTOR_TYPE:
2625 {
2626 tree scalar = build_zero_cst (TREE_TYPE (type));
2627
2628 return build_vector_from_val (type, scalar);
2629 }
2630
2631 case COMPLEX_TYPE:
2632 {
2633 tree zero = build_zero_cst (TREE_TYPE (type));
2634
2635 return build_complex (type, zero, zero);
2636 }
2637
2638 default:
2639 if (!AGGREGATE_TYPE_P (type))
2640 return fold_convert (type, integer_zero_node);
2641 return build_constructor (type, NULL);
2642 }
2643 }
2644
2645
2646 /* Build a BINFO with LEN language slots. */
2647
2648 tree
2649 make_tree_binfo (unsigned base_binfos MEM_STAT_DECL)
2650 {
2651 tree t;
2652 size_t length = (offsetof (struct tree_binfo, base_binfos)
2653 + vec<tree, va_gc>::embedded_size (base_binfos));
2654
2655 record_node_allocation_statistics (TREE_BINFO, length);
2656
2657 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
2658
2659 memset (t, 0, offsetof (struct tree_binfo, base_binfos));
2660
2661 TREE_SET_CODE (t, TREE_BINFO);
2662
2663 BINFO_BASE_BINFOS (t)->embedded_init (base_binfos);
2664
2665 return t;
2666 }
2667
2668 /* Create a CASE_LABEL_EXPR tree node and return it. */
2669
2670 tree
2671 build_case_label (tree low_value, tree high_value, tree label_decl)
2672 {
2673 tree t = make_node (CASE_LABEL_EXPR);
2674
2675 TREE_TYPE (t) = void_type_node;
2676 SET_EXPR_LOCATION (t, DECL_SOURCE_LOCATION (label_decl));
2677
2678 CASE_LOW (t) = low_value;
2679 CASE_HIGH (t) = high_value;
2680 CASE_LABEL (t) = label_decl;
2681 CASE_CHAIN (t) = NULL_TREE;
2682
2683 return t;
2684 }
2685
2686 /* Build a newly constructed INTEGER_CST node. LEN and EXT_LEN are the
2687 values of TREE_INT_CST_NUNITS and TREE_INT_CST_EXT_NUNITS respectively.
2688 The latter determines the length of the HOST_WIDE_INT vector. */
2689
2690 tree
2691 make_int_cst (int len, int ext_len MEM_STAT_DECL)
2692 {
2693 tree t;
2694 int length = ((ext_len - 1) * sizeof (HOST_WIDE_INT)
2695 + sizeof (struct tree_int_cst));
2696
2697 gcc_assert (len);
2698 record_node_allocation_statistics (INTEGER_CST, length);
2699
2700 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2701
2702 TREE_SET_CODE (t, INTEGER_CST);
2703 TREE_INT_CST_NUNITS (t) = len;
2704 TREE_INT_CST_EXT_NUNITS (t) = ext_len;
2705 /* to_offset can only be applied to trees that are offset_int-sized
2706 or smaller. EXT_LEN is correct if it fits, otherwise the constant
2707 must be exactly the precision of offset_int and so LEN is correct. */
2708 if (ext_len <= OFFSET_INT_ELTS)
2709 TREE_INT_CST_OFFSET_NUNITS (t) = ext_len;
2710 else
2711 TREE_INT_CST_OFFSET_NUNITS (t) = len;
2712
2713 TREE_CONSTANT (t) = 1;
2714
2715 return t;
2716 }
2717
2718 /* Build a newly constructed TREE_VEC node of length LEN. */
2719
2720 tree
2721 make_tree_vec (int len MEM_STAT_DECL)
2722 {
2723 tree t;
2724 size_t length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2725
2726 record_node_allocation_statistics (TREE_VEC, length);
2727
2728 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2729
2730 TREE_SET_CODE (t, TREE_VEC);
2731 TREE_VEC_LENGTH (t) = len;
2732
2733 return t;
2734 }
2735
2736 /* Grow a TREE_VEC node to new length LEN. */
2737
2738 tree
2739 grow_tree_vec (tree v, int len MEM_STAT_DECL)
2740 {
2741 gcc_assert (TREE_CODE (v) == TREE_VEC);
2742
2743 int oldlen = TREE_VEC_LENGTH (v);
2744 gcc_assert (len > oldlen);
2745
2746 size_t oldlength = (oldlen - 1) * sizeof (tree) + sizeof (struct tree_vec);
2747 size_t length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2748
2749 record_node_allocation_statistics (TREE_VEC, length - oldlength);
2750
2751 v = (tree) ggc_realloc (v, length PASS_MEM_STAT);
2752
2753 TREE_VEC_LENGTH (v) = len;
2754
2755 return v;
2756 }
2757 \f
2758 /* Return 1 if EXPR is the constant zero, whether it is integral, float or
2759 fixed, and scalar, complex or vector. */
2760
2761 bool
2762 zerop (const_tree expr)
2763 {
2764 return (integer_zerop (expr)
2765 || real_zerop (expr)
2766 || fixed_zerop (expr));
2767 }
2768
2769 /* Return 1 if EXPR is the integer constant zero or a complex constant
2770 of zero, or a location wrapper for such a constant. */
2771
2772 bool
2773 integer_zerop (const_tree expr)
2774 {
2775 STRIP_ANY_LOCATION_WRAPPER (expr);
2776
2777 switch (TREE_CODE (expr))
2778 {
2779 case INTEGER_CST:
2780 return wi::to_wide (expr) == 0;
2781 case COMPLEX_CST:
2782 return (integer_zerop (TREE_REALPART (expr))
2783 && integer_zerop (TREE_IMAGPART (expr)));
2784 case VECTOR_CST:
2785 return (VECTOR_CST_NPATTERNS (expr) == 1
2786 && VECTOR_CST_DUPLICATE_P (expr)
2787 && integer_zerop (VECTOR_CST_ENCODED_ELT (expr, 0)));
2788 default:
2789 return false;
2790 }
2791 }
2792
2793 /* Return 1 if EXPR is the integer constant one or the corresponding
2794 complex constant, or a location wrapper for such a constant. */
2795
2796 bool
2797 integer_onep (const_tree expr)
2798 {
2799 STRIP_ANY_LOCATION_WRAPPER (expr);
2800
2801 switch (TREE_CODE (expr))
2802 {
2803 case INTEGER_CST:
2804 return wi::eq_p (wi::to_widest (expr), 1);
2805 case COMPLEX_CST:
2806 return (integer_onep (TREE_REALPART (expr))
2807 && integer_zerop (TREE_IMAGPART (expr)));
2808 case VECTOR_CST:
2809 return (VECTOR_CST_NPATTERNS (expr) == 1
2810 && VECTOR_CST_DUPLICATE_P (expr)
2811 && integer_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
2812 default:
2813 return false;
2814 }
2815 }
2816
2817 /* Return 1 if EXPR is the integer constant one. For complex and vector,
2818 return 1 if every piece is the integer constant one.
2819 Also return 1 for location wrappers for such a constant. */
2820
2821 bool
2822 integer_each_onep (const_tree expr)
2823 {
2824 STRIP_ANY_LOCATION_WRAPPER (expr);
2825
2826 if (TREE_CODE (expr) == COMPLEX_CST)
2827 return (integer_onep (TREE_REALPART (expr))
2828 && integer_onep (TREE_IMAGPART (expr)));
2829 else
2830 return integer_onep (expr);
2831 }
2832
2833 /* Return 1 if EXPR is an integer containing all 1's in as much precision as
2834 it contains, or a complex or vector whose subparts are such integers,
2835 or a location wrapper for such a constant. */
2836
2837 bool
2838 integer_all_onesp (const_tree expr)
2839 {
2840 STRIP_ANY_LOCATION_WRAPPER (expr);
2841
2842 if (TREE_CODE (expr) == COMPLEX_CST
2843 && integer_all_onesp (TREE_REALPART (expr))
2844 && integer_all_onesp (TREE_IMAGPART (expr)))
2845 return true;
2846
2847 else if (TREE_CODE (expr) == VECTOR_CST)
2848 return (VECTOR_CST_NPATTERNS (expr) == 1
2849 && VECTOR_CST_DUPLICATE_P (expr)
2850 && integer_all_onesp (VECTOR_CST_ENCODED_ELT (expr, 0)));
2851
2852 else if (TREE_CODE (expr) != INTEGER_CST)
2853 return false;
2854
2855 return (wi::max_value (TYPE_PRECISION (TREE_TYPE (expr)), UNSIGNED)
2856 == wi::to_wide (expr));
2857 }
2858
2859 /* Return 1 if EXPR is the integer constant minus one, or a location wrapper
2860 for such a constant. */
2861
2862 bool
2863 integer_minus_onep (const_tree expr)
2864 {
2865 STRIP_ANY_LOCATION_WRAPPER (expr);
2866
2867 if (TREE_CODE (expr) == COMPLEX_CST)
2868 return (integer_all_onesp (TREE_REALPART (expr))
2869 && integer_zerop (TREE_IMAGPART (expr)));
2870 else
2871 return integer_all_onesp (expr);
2872 }
2873
2874 /* Return 1 if EXPR is an integer constant that is a power of 2 (i.e., has only
2875 one bit on), or a location wrapper for such a constant. */
2876
2877 bool
2878 integer_pow2p (const_tree expr)
2879 {
2880 STRIP_ANY_LOCATION_WRAPPER (expr);
2881
2882 if (TREE_CODE (expr) == COMPLEX_CST
2883 && integer_pow2p (TREE_REALPART (expr))
2884 && integer_zerop (TREE_IMAGPART (expr)))
2885 return true;
2886
2887 if (TREE_CODE (expr) != INTEGER_CST)
2888 return false;
2889
2890 return wi::popcount (wi::to_wide (expr)) == 1;
2891 }
2892
2893 /* Return 1 if EXPR is an integer constant other than zero or a
2894 complex constant other than zero, or a location wrapper for such a
2895 constant. */
2896
2897 bool
2898 integer_nonzerop (const_tree expr)
2899 {
2900 STRIP_ANY_LOCATION_WRAPPER (expr);
2901
2902 return ((TREE_CODE (expr) == INTEGER_CST
2903 && wi::to_wide (expr) != 0)
2904 || (TREE_CODE (expr) == COMPLEX_CST
2905 && (integer_nonzerop (TREE_REALPART (expr))
2906 || integer_nonzerop (TREE_IMAGPART (expr)))));
2907 }
2908
2909 /* Return 1 if EXPR is the integer constant one. For vector,
2910 return 1 if every piece is the integer constant minus one
2911 (representing the value TRUE).
2912 Also return 1 for location wrappers for such a constant. */
2913
2914 bool
2915 integer_truep (const_tree expr)
2916 {
2917 STRIP_ANY_LOCATION_WRAPPER (expr);
2918
2919 if (TREE_CODE (expr) == VECTOR_CST)
2920 return integer_all_onesp (expr);
2921 return integer_onep (expr);
2922 }
2923
2924 /* Return 1 if EXPR is the fixed-point constant zero, or a location wrapper
2925 for such a constant. */
2926
2927 bool
2928 fixed_zerop (const_tree expr)
2929 {
2930 STRIP_ANY_LOCATION_WRAPPER (expr);
2931
2932 return (TREE_CODE (expr) == FIXED_CST
2933 && TREE_FIXED_CST (expr).data.is_zero ());
2934 }
2935
2936 /* Return the power of two represented by a tree node known to be a
2937 power of two. */
2938
2939 int
2940 tree_log2 (const_tree expr)
2941 {
2942 if (TREE_CODE (expr) == COMPLEX_CST)
2943 return tree_log2 (TREE_REALPART (expr));
2944
2945 return wi::exact_log2 (wi::to_wide (expr));
2946 }
2947
2948 /* Similar, but return the largest integer Y such that 2 ** Y is less
2949 than or equal to EXPR. */
2950
2951 int
2952 tree_floor_log2 (const_tree expr)
2953 {
2954 if (TREE_CODE (expr) == COMPLEX_CST)
2955 return tree_log2 (TREE_REALPART (expr));
2956
2957 return wi::floor_log2 (wi::to_wide (expr));
2958 }
2959
2960 /* Return number of known trailing zero bits in EXPR, or, if the value of
2961 EXPR is known to be zero, the precision of it's type. */
2962
2963 unsigned int
2964 tree_ctz (const_tree expr)
2965 {
2966 if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
2967 && !POINTER_TYPE_P (TREE_TYPE (expr)))
2968 return 0;
2969
2970 unsigned int ret1, ret2, prec = TYPE_PRECISION (TREE_TYPE (expr));
2971 switch (TREE_CODE (expr))
2972 {
2973 case INTEGER_CST:
2974 ret1 = wi::ctz (wi::to_wide (expr));
2975 return MIN (ret1, prec);
2976 case SSA_NAME:
2977 ret1 = wi::ctz (get_nonzero_bits (expr));
2978 return MIN (ret1, prec);
2979 case PLUS_EXPR:
2980 case MINUS_EXPR:
2981 case BIT_IOR_EXPR:
2982 case BIT_XOR_EXPR:
2983 case MIN_EXPR:
2984 case MAX_EXPR:
2985 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2986 if (ret1 == 0)
2987 return ret1;
2988 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2989 return MIN (ret1, ret2);
2990 case POINTER_PLUS_EXPR:
2991 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2992 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2993 /* Second operand is sizetype, which could be in theory
2994 wider than pointer's precision. Make sure we never
2995 return more than prec. */
2996 ret2 = MIN (ret2, prec);
2997 return MIN (ret1, ret2);
2998 case BIT_AND_EXPR:
2999 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3000 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
3001 return MAX (ret1, ret2);
3002 case MULT_EXPR:
3003 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3004 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
3005 return MIN (ret1 + ret2, prec);
3006 case LSHIFT_EXPR:
3007 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3008 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
3009 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
3010 {
3011 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
3012 return MIN (ret1 + ret2, prec);
3013 }
3014 return ret1;
3015 case RSHIFT_EXPR:
3016 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
3017 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
3018 {
3019 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3020 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
3021 if (ret1 > ret2)
3022 return ret1 - ret2;
3023 }
3024 return 0;
3025 case TRUNC_DIV_EXPR:
3026 case CEIL_DIV_EXPR:
3027 case FLOOR_DIV_EXPR:
3028 case ROUND_DIV_EXPR:
3029 case EXACT_DIV_EXPR:
3030 if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
3031 && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) == 1)
3032 {
3033 int l = tree_log2 (TREE_OPERAND (expr, 1));
3034 if (l >= 0)
3035 {
3036 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3037 ret2 = l;
3038 if (ret1 > ret2)
3039 return ret1 - ret2;
3040 }
3041 }
3042 return 0;
3043 CASE_CONVERT:
3044 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3045 if (ret1 && ret1 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
3046 ret1 = prec;
3047 return MIN (ret1, prec);
3048 case SAVE_EXPR:
3049 return tree_ctz (TREE_OPERAND (expr, 0));
3050 case COND_EXPR:
3051 ret1 = tree_ctz (TREE_OPERAND (expr, 1));
3052 if (ret1 == 0)
3053 return 0;
3054 ret2 = tree_ctz (TREE_OPERAND (expr, 2));
3055 return MIN (ret1, ret2);
3056 case COMPOUND_EXPR:
3057 return tree_ctz (TREE_OPERAND (expr, 1));
3058 case ADDR_EXPR:
3059 ret1 = get_pointer_alignment (CONST_CAST_TREE (expr));
3060 if (ret1 > BITS_PER_UNIT)
3061 {
3062 ret1 = ctz_hwi (ret1 / BITS_PER_UNIT);
3063 return MIN (ret1, prec);
3064 }
3065 return 0;
3066 default:
3067 return 0;
3068 }
3069 }
3070
3071 /* Return 1 if EXPR is the real constant zero. Trailing zeroes matter for
3072 decimal float constants, so don't return 1 for them.
3073 Also return 1 for location wrappers around such a constant. */
3074
3075 bool
3076 real_zerop (const_tree expr)
3077 {
3078 STRIP_ANY_LOCATION_WRAPPER (expr);
3079
3080 switch (TREE_CODE (expr))
3081 {
3082 case REAL_CST:
3083 return real_equal (&TREE_REAL_CST (expr), &dconst0)
3084 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
3085 case COMPLEX_CST:
3086 return real_zerop (TREE_REALPART (expr))
3087 && real_zerop (TREE_IMAGPART (expr));
3088 case VECTOR_CST:
3089 {
3090 /* Don't simply check for a duplicate because the predicate
3091 accepts both +0.0 and -0.0. */
3092 unsigned count = vector_cst_encoded_nelts (expr);
3093 for (unsigned int i = 0; i < count; ++i)
3094 if (!real_zerop (VECTOR_CST_ENCODED_ELT (expr, i)))
3095 return false;
3096 return true;
3097 }
3098 default:
3099 return false;
3100 }
3101 }
3102
3103 /* Return 1 if EXPR is the real constant one in real or complex form.
3104 Trailing zeroes matter for decimal float constants, so don't return
3105 1 for them.
3106 Also return 1 for location wrappers around such a constant. */
3107
3108 bool
3109 real_onep (const_tree expr)
3110 {
3111 STRIP_ANY_LOCATION_WRAPPER (expr);
3112
3113 switch (TREE_CODE (expr))
3114 {
3115 case REAL_CST:
3116 return real_equal (&TREE_REAL_CST (expr), &dconst1)
3117 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
3118 case COMPLEX_CST:
3119 return real_onep (TREE_REALPART (expr))
3120 && real_zerop (TREE_IMAGPART (expr));
3121 case VECTOR_CST:
3122 return (VECTOR_CST_NPATTERNS (expr) == 1
3123 && VECTOR_CST_DUPLICATE_P (expr)
3124 && real_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
3125 default:
3126 return false;
3127 }
3128 }
3129
3130 /* Return 1 if EXPR is the real constant minus one. Trailing zeroes
3131 matter for decimal float constants, so don't return 1 for them.
3132 Also return 1 for location wrappers around such a constant. */
3133
3134 bool
3135 real_minus_onep (const_tree expr)
3136 {
3137 STRIP_ANY_LOCATION_WRAPPER (expr);
3138
3139 switch (TREE_CODE (expr))
3140 {
3141 case REAL_CST:
3142 return real_equal (&TREE_REAL_CST (expr), &dconstm1)
3143 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
3144 case COMPLEX_CST:
3145 return real_minus_onep (TREE_REALPART (expr))
3146 && real_zerop (TREE_IMAGPART (expr));
3147 case VECTOR_CST:
3148 return (VECTOR_CST_NPATTERNS (expr) == 1
3149 && VECTOR_CST_DUPLICATE_P (expr)
3150 && real_minus_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
3151 default:
3152 return false;
3153 }
3154 }
3155
3156 /* Nonzero if EXP is a constant or a cast of a constant. */
3157
3158 bool
3159 really_constant_p (const_tree exp)
3160 {
3161 /* This is not quite the same as STRIP_NOPS. It does more. */
3162 while (CONVERT_EXPR_P (exp)
3163 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3164 exp = TREE_OPERAND (exp, 0);
3165 return TREE_CONSTANT (exp);
3166 }
3167
3168 /* Return true if T holds a polynomial pointer difference, storing it in
3169 *VALUE if so. A true return means that T's precision is no greater
3170 than 64 bits, which is the largest address space we support, so *VALUE
3171 never loses precision. However, the signedness of the result does
3172 not necessarily match the signedness of T: sometimes an unsigned type
3173 like sizetype is used to encode a value that is actually negative. */
3174
3175 bool
3176 ptrdiff_tree_p (const_tree t, poly_int64_pod *value)
3177 {
3178 if (!t)
3179 return false;
3180 if (TREE_CODE (t) == INTEGER_CST)
3181 {
3182 if (!cst_and_fits_in_hwi (t))
3183 return false;
3184 *value = int_cst_value (t);
3185 return true;
3186 }
3187 if (POLY_INT_CST_P (t))
3188 {
3189 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
3190 if (!cst_and_fits_in_hwi (POLY_INT_CST_COEFF (t, i)))
3191 return false;
3192 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
3193 value->coeffs[i] = int_cst_value (POLY_INT_CST_COEFF (t, i));
3194 return true;
3195 }
3196 return false;
3197 }
3198
3199 poly_int64
3200 tree_to_poly_int64 (const_tree t)
3201 {
3202 gcc_assert (tree_fits_poly_int64_p (t));
3203 if (POLY_INT_CST_P (t))
3204 return poly_int_cst_value (t).force_shwi ();
3205 return TREE_INT_CST_LOW (t);
3206 }
3207
3208 poly_uint64
3209 tree_to_poly_uint64 (const_tree t)
3210 {
3211 gcc_assert (tree_fits_poly_uint64_p (t));
3212 if (POLY_INT_CST_P (t))
3213 return poly_int_cst_value (t).force_uhwi ();
3214 return TREE_INT_CST_LOW (t);
3215 }
3216 \f
3217 /* Return first list element whose TREE_VALUE is ELEM.
3218 Return 0 if ELEM is not in LIST. */
3219
3220 tree
3221 value_member (tree elem, tree list)
3222 {
3223 while (list)
3224 {
3225 if (elem == TREE_VALUE (list))
3226 return list;
3227 list = TREE_CHAIN (list);
3228 }
3229 return NULL_TREE;
3230 }
3231
3232 /* Return first list element whose TREE_PURPOSE is ELEM.
3233 Return 0 if ELEM is not in LIST. */
3234
3235 tree
3236 purpose_member (const_tree elem, tree list)
3237 {
3238 while (list)
3239 {
3240 if (elem == TREE_PURPOSE (list))
3241 return list;
3242 list = TREE_CHAIN (list);
3243 }
3244 return NULL_TREE;
3245 }
3246
3247 /* Return true if ELEM is in V. */
3248
3249 bool
3250 vec_member (const_tree elem, vec<tree, va_gc> *v)
3251 {
3252 unsigned ix;
3253 tree t;
3254 FOR_EACH_VEC_SAFE_ELT (v, ix, t)
3255 if (elem == t)
3256 return true;
3257 return false;
3258 }
3259
3260 /* Returns element number IDX (zero-origin) of chain CHAIN, or
3261 NULL_TREE. */
3262
3263 tree
3264 chain_index (int idx, tree chain)
3265 {
3266 for (; chain && idx > 0; --idx)
3267 chain = TREE_CHAIN (chain);
3268 return chain;
3269 }
3270
3271 /* Return nonzero if ELEM is part of the chain CHAIN. */
3272
3273 bool
3274 chain_member (const_tree elem, const_tree chain)
3275 {
3276 while (chain)
3277 {
3278 if (elem == chain)
3279 return true;
3280 chain = DECL_CHAIN (chain);
3281 }
3282
3283 return false;
3284 }
3285
3286 /* Return the length of a chain of nodes chained through TREE_CHAIN.
3287 We expect a null pointer to mark the end of the chain.
3288 This is the Lisp primitive `length'. */
3289
3290 int
3291 list_length (const_tree t)
3292 {
3293 const_tree p = t;
3294 #ifdef ENABLE_TREE_CHECKING
3295 const_tree q = t;
3296 #endif
3297 int len = 0;
3298
3299 while (p)
3300 {
3301 p = TREE_CHAIN (p);
3302 #ifdef ENABLE_TREE_CHECKING
3303 if (len % 2)
3304 q = TREE_CHAIN (q);
3305 gcc_assert (p != q);
3306 #endif
3307 len++;
3308 }
3309
3310 return len;
3311 }
3312
3313 /* Returns the first FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
3314 UNION_TYPE TYPE, or NULL_TREE if none. */
3315
3316 tree
3317 first_field (const_tree type)
3318 {
3319 tree t = TYPE_FIELDS (type);
3320 while (t && TREE_CODE (t) != FIELD_DECL)
3321 t = TREE_CHAIN (t);
3322 return t;
3323 }
3324
3325 /* Returns the last FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
3326 UNION_TYPE TYPE, or NULL_TREE if none. */
3327
3328 tree
3329 last_field (const_tree type)
3330 {
3331 tree last = NULL_TREE;
3332
3333 for (tree fld = TYPE_FIELDS (type); fld; fld = TREE_CHAIN (fld))
3334 {
3335 if (TREE_CODE (fld) != FIELD_DECL)
3336 continue;
3337
3338 last = fld;
3339 }
3340
3341 return last;
3342 }
3343
3344 /* Concatenate two chains of nodes (chained through TREE_CHAIN)
3345 by modifying the last node in chain 1 to point to chain 2.
3346 This is the Lisp primitive `nconc'. */
3347
3348 tree
3349 chainon (tree op1, tree op2)
3350 {
3351 tree t1;
3352
3353 if (!op1)
3354 return op2;
3355 if (!op2)
3356 return op1;
3357
3358 for (t1 = op1; TREE_CHAIN (t1); t1 = TREE_CHAIN (t1))
3359 continue;
3360 TREE_CHAIN (t1) = op2;
3361
3362 #ifdef ENABLE_TREE_CHECKING
3363 {
3364 tree t2;
3365 for (t2 = op2; t2; t2 = TREE_CHAIN (t2))
3366 gcc_assert (t2 != t1);
3367 }
3368 #endif
3369
3370 return op1;
3371 }
3372
3373 /* Return the last node in a chain of nodes (chained through TREE_CHAIN). */
3374
3375 tree
3376 tree_last (tree chain)
3377 {
3378 tree next;
3379 if (chain)
3380 while ((next = TREE_CHAIN (chain)))
3381 chain = next;
3382 return chain;
3383 }
3384
3385 /* Reverse the order of elements in the chain T,
3386 and return the new head of the chain (old last element). */
3387
3388 tree
3389 nreverse (tree t)
3390 {
3391 tree prev = 0, decl, next;
3392 for (decl = t; decl; decl = next)
3393 {
3394 /* We shouldn't be using this function to reverse BLOCK chains; we
3395 have blocks_nreverse for that. */
3396 gcc_checking_assert (TREE_CODE (decl) != BLOCK);
3397 next = TREE_CHAIN (decl);
3398 TREE_CHAIN (decl) = prev;
3399 prev = decl;
3400 }
3401 return prev;
3402 }
3403 \f
3404 /* Return a newly created TREE_LIST node whose
3405 purpose and value fields are PARM and VALUE. */
3406
3407 tree
3408 build_tree_list (tree parm, tree value MEM_STAT_DECL)
3409 {
3410 tree t = make_node (TREE_LIST PASS_MEM_STAT);
3411 TREE_PURPOSE (t) = parm;
3412 TREE_VALUE (t) = value;
3413 return t;
3414 }
3415
3416 /* Build a chain of TREE_LIST nodes from a vector. */
3417
3418 tree
3419 build_tree_list_vec (const vec<tree, va_gc> *vec MEM_STAT_DECL)
3420 {
3421 tree ret = NULL_TREE;
3422 tree *pp = &ret;
3423 unsigned int i;
3424 tree t;
3425 FOR_EACH_VEC_SAFE_ELT (vec, i, t)
3426 {
3427 *pp = build_tree_list (NULL, t PASS_MEM_STAT);
3428 pp = &TREE_CHAIN (*pp);
3429 }
3430 return ret;
3431 }
3432
3433 /* Return a newly created TREE_LIST node whose
3434 purpose and value fields are PURPOSE and VALUE
3435 and whose TREE_CHAIN is CHAIN. */
3436
3437 tree
3438 tree_cons (tree purpose, tree value, tree chain MEM_STAT_DECL)
3439 {
3440 tree node;
3441
3442 node = ggc_alloc_tree_node_stat (sizeof (struct tree_list) PASS_MEM_STAT);
3443 memset (node, 0, sizeof (struct tree_common));
3444
3445 record_node_allocation_statistics (TREE_LIST, sizeof (struct tree_list));
3446
3447 TREE_SET_CODE (node, TREE_LIST);
3448 TREE_CHAIN (node) = chain;
3449 TREE_PURPOSE (node) = purpose;
3450 TREE_VALUE (node) = value;
3451 return node;
3452 }
3453
3454 /* Return the values of the elements of a CONSTRUCTOR as a vector of
3455 trees. */
3456
3457 vec<tree, va_gc> *
3458 ctor_to_vec (tree ctor)
3459 {
3460 vec<tree, va_gc> *vec;
3461 vec_alloc (vec, CONSTRUCTOR_NELTS (ctor));
3462 unsigned int ix;
3463 tree val;
3464
3465 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor), ix, val)
3466 vec->quick_push (val);
3467
3468 return vec;
3469 }
3470 \f
3471 /* Return the size nominally occupied by an object of type TYPE
3472 when it resides in memory. The value is measured in units of bytes,
3473 and its data type is that normally used for type sizes
3474 (which is the first type created by make_signed_type or
3475 make_unsigned_type). */
3476
3477 tree
3478 size_in_bytes_loc (location_t loc, const_tree type)
3479 {
3480 tree t;
3481
3482 if (type == error_mark_node)
3483 return integer_zero_node;
3484
3485 type = TYPE_MAIN_VARIANT (type);
3486 t = TYPE_SIZE_UNIT (type);
3487
3488 if (t == 0)
3489 {
3490 lang_hooks.types.incomplete_type_error (loc, NULL_TREE, type);
3491 return size_zero_node;
3492 }
3493
3494 return t;
3495 }
3496
3497 /* Return the size of TYPE (in bytes) as a wide integer
3498 or return -1 if the size can vary or is larger than an integer. */
3499
3500 HOST_WIDE_INT
3501 int_size_in_bytes (const_tree type)
3502 {
3503 tree t;
3504
3505 if (type == error_mark_node)
3506 return 0;
3507
3508 type = TYPE_MAIN_VARIANT (type);
3509 t = TYPE_SIZE_UNIT (type);
3510
3511 if (t && tree_fits_uhwi_p (t))
3512 return TREE_INT_CST_LOW (t);
3513 else
3514 return -1;
3515 }
3516
3517 /* Return the maximum size of TYPE (in bytes) as a wide integer
3518 or return -1 if the size can vary or is larger than an integer. */
3519
3520 HOST_WIDE_INT
3521 max_int_size_in_bytes (const_tree type)
3522 {
3523 HOST_WIDE_INT size = -1;
3524 tree size_tree;
3525
3526 /* If this is an array type, check for a possible MAX_SIZE attached. */
3527
3528 if (TREE_CODE (type) == ARRAY_TYPE)
3529 {
3530 size_tree = TYPE_ARRAY_MAX_SIZE (type);
3531
3532 if (size_tree && tree_fits_uhwi_p (size_tree))
3533 size = tree_to_uhwi (size_tree);
3534 }
3535
3536 /* If we still haven't been able to get a size, see if the language
3537 can compute a maximum size. */
3538
3539 if (size == -1)
3540 {
3541 size_tree = lang_hooks.types.max_size (type);
3542
3543 if (size_tree && tree_fits_uhwi_p (size_tree))
3544 size = tree_to_uhwi (size_tree);
3545 }
3546
3547 return size;
3548 }
3549 \f
3550 /* Return the bit position of FIELD, in bits from the start of the record.
3551 This is a tree of type bitsizetype. */
3552
3553 tree
3554 bit_position (const_tree field)
3555 {
3556 return bit_from_pos (DECL_FIELD_OFFSET (field),
3557 DECL_FIELD_BIT_OFFSET (field));
3558 }
3559 \f
3560 /* Return the byte position of FIELD, in bytes from the start of the record.
3561 This is a tree of type sizetype. */
3562
3563 tree
3564 byte_position (const_tree field)
3565 {
3566 return byte_from_pos (DECL_FIELD_OFFSET (field),
3567 DECL_FIELD_BIT_OFFSET (field));
3568 }
3569
3570 /* Likewise, but return as an integer. It must be representable in
3571 that way (since it could be a signed value, we don't have the
3572 option of returning -1 like int_size_in_byte can. */
3573
3574 HOST_WIDE_INT
3575 int_byte_position (const_tree field)
3576 {
3577 return tree_to_shwi (byte_position (field));
3578 }
3579 \f
3580 /* Return, as a tree node, the number of elements for TYPE (which is an
3581 ARRAY_TYPE) minus one. This counts only elements of the top array. */
3582
3583 tree
3584 array_type_nelts (const_tree type)
3585 {
3586 tree index_type, min, max;
3587
3588 /* If they did it with unspecified bounds, then we should have already
3589 given an error about it before we got here. */
3590 if (! TYPE_DOMAIN (type))
3591 return error_mark_node;
3592
3593 index_type = TYPE_DOMAIN (type);
3594 min = TYPE_MIN_VALUE (index_type);
3595 max = TYPE_MAX_VALUE (index_type);
3596
3597 /* TYPE_MAX_VALUE may not be set if the array has unknown length. */
3598 if (!max)
3599 {
3600 /* zero sized arrays are represented from C FE as complete types with
3601 NULL TYPE_MAX_VALUE and zero TYPE_SIZE, while C++ FE represents
3602 them as min 0, max -1. */
3603 if (COMPLETE_TYPE_P (type)
3604 && integer_zerop (TYPE_SIZE (type))
3605 && integer_zerop (min))
3606 return build_int_cst (TREE_TYPE (min), -1);
3607
3608 return error_mark_node;
3609 }
3610
3611 return (integer_zerop (min)
3612 ? max
3613 : fold_build2 (MINUS_EXPR, TREE_TYPE (max), max, min));
3614 }
3615 \f
3616 /* If arg is static -- a reference to an object in static storage -- then
3617 return the object. This is not the same as the C meaning of `static'.
3618 If arg isn't static, return NULL. */
3619
3620 tree
3621 staticp (tree arg)
3622 {
3623 switch (TREE_CODE (arg))
3624 {
3625 case FUNCTION_DECL:
3626 /* Nested functions are static, even though taking their address will
3627 involve a trampoline as we unnest the nested function and create
3628 the trampoline on the tree level. */
3629 return arg;
3630
3631 case VAR_DECL:
3632 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3633 && ! DECL_THREAD_LOCAL_P (arg)
3634 && ! DECL_DLLIMPORT_P (arg)
3635 ? arg : NULL);
3636
3637 case CONST_DECL:
3638 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3639 ? arg : NULL);
3640
3641 case CONSTRUCTOR:
3642 return TREE_STATIC (arg) ? arg : NULL;
3643
3644 case LABEL_DECL:
3645 case STRING_CST:
3646 return arg;
3647
3648 case COMPONENT_REF:
3649 /* If the thing being referenced is not a field, then it is
3650 something language specific. */
3651 gcc_assert (TREE_CODE (TREE_OPERAND (arg, 1)) == FIELD_DECL);
3652
3653 /* If we are referencing a bitfield, we can't evaluate an
3654 ADDR_EXPR at compile time and so it isn't a constant. */
3655 if (DECL_BIT_FIELD (TREE_OPERAND (arg, 1)))
3656 return NULL;
3657
3658 return staticp (TREE_OPERAND (arg, 0));
3659
3660 case BIT_FIELD_REF:
3661 return NULL;
3662
3663 case INDIRECT_REF:
3664 return TREE_CONSTANT (TREE_OPERAND (arg, 0)) ? arg : NULL;
3665
3666 case ARRAY_REF:
3667 case ARRAY_RANGE_REF:
3668 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (arg))) == INTEGER_CST
3669 && TREE_CODE (TREE_OPERAND (arg, 1)) == INTEGER_CST)
3670 return staticp (TREE_OPERAND (arg, 0));
3671 else
3672 return NULL;
3673
3674 case COMPOUND_LITERAL_EXPR:
3675 return TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (arg)) ? arg : NULL;
3676
3677 default:
3678 return NULL;
3679 }
3680 }
3681
3682 \f
3683
3684
3685 /* Return whether OP is a DECL whose address is function-invariant. */
3686
3687 bool
3688 decl_address_invariant_p (const_tree op)
3689 {
3690 /* The conditions below are slightly less strict than the one in
3691 staticp. */
3692
3693 switch (TREE_CODE (op))
3694 {
3695 case PARM_DECL:
3696 case RESULT_DECL:
3697 case LABEL_DECL:
3698 case FUNCTION_DECL:
3699 return true;
3700
3701 case VAR_DECL:
3702 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3703 || DECL_THREAD_LOCAL_P (op)
3704 || DECL_CONTEXT (op) == current_function_decl
3705 || decl_function_context (op) == current_function_decl)
3706 return true;
3707 break;
3708
3709 case CONST_DECL:
3710 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3711 || decl_function_context (op) == current_function_decl)
3712 return true;
3713 break;
3714
3715 default:
3716 break;
3717 }
3718
3719 return false;
3720 }
3721
3722 /* Return whether OP is a DECL whose address is interprocedural-invariant. */
3723
3724 bool
3725 decl_address_ip_invariant_p (const_tree op)
3726 {
3727 /* The conditions below are slightly less strict than the one in
3728 staticp. */
3729
3730 switch (TREE_CODE (op))
3731 {
3732 case LABEL_DECL:
3733 case FUNCTION_DECL:
3734 case STRING_CST:
3735 return true;
3736
3737 case VAR_DECL:
3738 if (((TREE_STATIC (op) || DECL_EXTERNAL (op))
3739 && !DECL_DLLIMPORT_P (op))
3740 || DECL_THREAD_LOCAL_P (op))
3741 return true;
3742 break;
3743
3744 case CONST_DECL:
3745 if ((TREE_STATIC (op) || DECL_EXTERNAL (op)))
3746 return true;
3747 break;
3748
3749 default:
3750 break;
3751 }
3752
3753 return false;
3754 }
3755
3756
3757 /* Return true if T is function-invariant (internal function, does
3758 not handle arithmetic; that's handled in skip_simple_arithmetic and
3759 tree_invariant_p). */
3760
3761 static bool
3762 tree_invariant_p_1 (tree t)
3763 {
3764 tree op;
3765
3766 if (TREE_CONSTANT (t)
3767 || (TREE_READONLY (t) && !TREE_SIDE_EFFECTS (t)))
3768 return true;
3769
3770 switch (TREE_CODE (t))
3771 {
3772 case SAVE_EXPR:
3773 return true;
3774
3775 case ADDR_EXPR:
3776 op = TREE_OPERAND (t, 0);
3777 while (handled_component_p (op))
3778 {
3779 switch (TREE_CODE (op))
3780 {
3781 case ARRAY_REF:
3782 case ARRAY_RANGE_REF:
3783 if (!tree_invariant_p (TREE_OPERAND (op, 1))
3784 || TREE_OPERAND (op, 2) != NULL_TREE
3785 || TREE_OPERAND (op, 3) != NULL_TREE)
3786 return false;
3787 break;
3788
3789 case COMPONENT_REF:
3790 if (TREE_OPERAND (op, 2) != NULL_TREE)
3791 return false;
3792 break;
3793
3794 default:;
3795 }
3796 op = TREE_OPERAND (op, 0);
3797 }
3798
3799 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
3800
3801 default:
3802 break;
3803 }
3804
3805 return false;
3806 }
3807
3808 /* Return true if T is function-invariant. */
3809
3810 bool
3811 tree_invariant_p (tree t)
3812 {
3813 tree inner = skip_simple_arithmetic (t);
3814 return tree_invariant_p_1 (inner);
3815 }
3816
3817 /* Wrap a SAVE_EXPR around EXPR, if appropriate.
3818 Do this to any expression which may be used in more than one place,
3819 but must be evaluated only once.
3820
3821 Normally, expand_expr would reevaluate the expression each time.
3822 Calling save_expr produces something that is evaluated and recorded
3823 the first time expand_expr is called on it. Subsequent calls to
3824 expand_expr just reuse the recorded value.
3825
3826 The call to expand_expr that generates code that actually computes
3827 the value is the first call *at compile time*. Subsequent calls
3828 *at compile time* generate code to use the saved value.
3829 This produces correct result provided that *at run time* control
3830 always flows through the insns made by the first expand_expr
3831 before reaching the other places where the save_expr was evaluated.
3832 You, the caller of save_expr, must make sure this is so.
3833
3834 Constants, and certain read-only nodes, are returned with no
3835 SAVE_EXPR because that is safe. Expressions containing placeholders
3836 are not touched; see tree.def for an explanation of what these
3837 are used for. */
3838
3839 tree
3840 save_expr (tree expr)
3841 {
3842 tree inner;
3843
3844 /* If the tree evaluates to a constant, then we don't want to hide that
3845 fact (i.e. this allows further folding, and direct checks for constants).
3846 However, a read-only object that has side effects cannot be bypassed.
3847 Since it is no problem to reevaluate literals, we just return the
3848 literal node. */
3849 inner = skip_simple_arithmetic (expr);
3850 if (TREE_CODE (inner) == ERROR_MARK)
3851 return inner;
3852
3853 if (tree_invariant_p_1 (inner))
3854 return expr;
3855
3856 /* If INNER contains a PLACEHOLDER_EXPR, we must evaluate it each time, since
3857 it means that the size or offset of some field of an object depends on
3858 the value within another field.
3859
3860 Note that it must not be the case that EXPR contains both a PLACEHOLDER_EXPR
3861 and some variable since it would then need to be both evaluated once and
3862 evaluated more than once. Front-ends must assure this case cannot
3863 happen by surrounding any such subexpressions in their own SAVE_EXPR
3864 and forcing evaluation at the proper time. */
3865 if (contains_placeholder_p (inner))
3866 return expr;
3867
3868 expr = build1_loc (EXPR_LOCATION (expr), SAVE_EXPR, TREE_TYPE (expr), expr);
3869
3870 /* This expression might be placed ahead of a jump to ensure that the
3871 value was computed on both sides of the jump. So make sure it isn't
3872 eliminated as dead. */
3873 TREE_SIDE_EFFECTS (expr) = 1;
3874 return expr;
3875 }
3876
3877 /* Look inside EXPR into any simple arithmetic operations. Return the
3878 outermost non-arithmetic or non-invariant node. */
3879
3880 tree
3881 skip_simple_arithmetic (tree expr)
3882 {
3883 /* We don't care about whether this can be used as an lvalue in this
3884 context. */
3885 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3886 expr = TREE_OPERAND (expr, 0);
3887
3888 /* If we have simple operations applied to a SAVE_EXPR or to a SAVE_EXPR and
3889 a constant, it will be more efficient to not make another SAVE_EXPR since
3890 it will allow better simplification and GCSE will be able to merge the
3891 computations if they actually occur. */
3892 while (true)
3893 {
3894 if (UNARY_CLASS_P (expr))
3895 expr = TREE_OPERAND (expr, 0);
3896 else if (BINARY_CLASS_P (expr))
3897 {
3898 if (tree_invariant_p (TREE_OPERAND (expr, 1)))
3899 expr = TREE_OPERAND (expr, 0);
3900 else if (tree_invariant_p (TREE_OPERAND (expr, 0)))
3901 expr = TREE_OPERAND (expr, 1);
3902 else
3903 break;
3904 }
3905 else
3906 break;
3907 }
3908
3909 return expr;
3910 }
3911
3912 /* Look inside EXPR into simple arithmetic operations involving constants.
3913 Return the outermost non-arithmetic or non-constant node. */
3914
3915 tree
3916 skip_simple_constant_arithmetic (tree expr)
3917 {
3918 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3919 expr = TREE_OPERAND (expr, 0);
3920
3921 while (true)
3922 {
3923 if (UNARY_CLASS_P (expr))
3924 expr = TREE_OPERAND (expr, 0);
3925 else if (BINARY_CLASS_P (expr))
3926 {
3927 if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
3928 expr = TREE_OPERAND (expr, 0);
3929 else if (TREE_CONSTANT (TREE_OPERAND (expr, 0)))
3930 expr = TREE_OPERAND (expr, 1);
3931 else
3932 break;
3933 }
3934 else
3935 break;
3936 }
3937
3938 return expr;
3939 }
3940
3941 /* Return which tree structure is used by T. */
3942
3943 enum tree_node_structure_enum
3944 tree_node_structure (const_tree t)
3945 {
3946 const enum tree_code code = TREE_CODE (t);
3947 return tree_node_structure_for_code (code);
3948 }
3949
3950 /* Set various status flags when building a CALL_EXPR object T. */
3951
3952 static void
3953 process_call_operands (tree t)
3954 {
3955 bool side_effects = TREE_SIDE_EFFECTS (t);
3956 bool read_only = false;
3957 int i = call_expr_flags (t);
3958
3959 /* Calls have side-effects, except those to const or pure functions. */
3960 if ((i & ECF_LOOPING_CONST_OR_PURE) || !(i & (ECF_CONST | ECF_PURE)))
3961 side_effects = true;
3962 /* Propagate TREE_READONLY of arguments for const functions. */
3963 if (i & ECF_CONST)
3964 read_only = true;
3965
3966 if (!side_effects || read_only)
3967 for (i = 1; i < TREE_OPERAND_LENGTH (t); i++)
3968 {
3969 tree op = TREE_OPERAND (t, i);
3970 if (op && TREE_SIDE_EFFECTS (op))
3971 side_effects = true;
3972 if (op && !TREE_READONLY (op) && !CONSTANT_CLASS_P (op))
3973 read_only = false;
3974 }
3975
3976 TREE_SIDE_EFFECTS (t) = side_effects;
3977 TREE_READONLY (t) = read_only;
3978 }
3979 \f
3980 /* Return true if EXP contains a PLACEHOLDER_EXPR, i.e. if it represents a
3981 size or offset that depends on a field within a record. */
3982
3983 bool
3984 contains_placeholder_p (const_tree exp)
3985 {
3986 enum tree_code code;
3987
3988 if (!exp)
3989 return 0;
3990
3991 code = TREE_CODE (exp);
3992 if (code == PLACEHOLDER_EXPR)
3993 return 1;
3994
3995 switch (TREE_CODE_CLASS (code))
3996 {
3997 case tcc_reference:
3998 /* Don't look at any PLACEHOLDER_EXPRs that might be in index or bit
3999 position computations since they will be converted into a
4000 WITH_RECORD_EXPR involving the reference, which will assume
4001 here will be valid. */
4002 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
4003
4004 case tcc_exceptional:
4005 if (code == TREE_LIST)
4006 return (CONTAINS_PLACEHOLDER_P (TREE_VALUE (exp))
4007 || CONTAINS_PLACEHOLDER_P (TREE_CHAIN (exp)));
4008 break;
4009
4010 case tcc_unary:
4011 case tcc_binary:
4012 case tcc_comparison:
4013 case tcc_expression:
4014 switch (code)
4015 {
4016 case COMPOUND_EXPR:
4017 /* Ignoring the first operand isn't quite right, but works best. */
4018 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1));
4019
4020 case COND_EXPR:
4021 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
4022 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1))
4023 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 2)));
4024
4025 case SAVE_EXPR:
4026 /* The save_expr function never wraps anything containing
4027 a PLACEHOLDER_EXPR. */
4028 return 0;
4029
4030 default:
4031 break;
4032 }
4033
4034 switch (TREE_CODE_LENGTH (code))
4035 {
4036 case 1:
4037 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
4038 case 2:
4039 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
4040 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1)));
4041 default:
4042 return 0;
4043 }
4044
4045 case tcc_vl_exp:
4046 switch (code)
4047 {
4048 case CALL_EXPR:
4049 {
4050 const_tree arg;
4051 const_call_expr_arg_iterator iter;
4052 FOR_EACH_CONST_CALL_EXPR_ARG (arg, iter, exp)
4053 if (CONTAINS_PLACEHOLDER_P (arg))
4054 return 1;
4055 return 0;
4056 }
4057 default:
4058 return 0;
4059 }
4060
4061 default:
4062 return 0;
4063 }
4064 return 0;
4065 }
4066
4067 /* Return true if any part of the structure of TYPE involves a PLACEHOLDER_EXPR
4068 directly. This includes size, bounds, qualifiers (for QUAL_UNION_TYPE) and
4069 field positions. */
4070
4071 static bool
4072 type_contains_placeholder_1 (const_tree type)
4073 {
4074 /* If the size contains a placeholder or the parent type (component type in
4075 the case of arrays) type involves a placeholder, this type does. */
4076 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type))
4077 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (type))
4078 || (!POINTER_TYPE_P (type)
4079 && TREE_TYPE (type)
4080 && type_contains_placeholder_p (TREE_TYPE (type))))
4081 return true;
4082
4083 /* Now do type-specific checks. Note that the last part of the check above
4084 greatly limits what we have to do below. */
4085 switch (TREE_CODE (type))
4086 {
4087 case VOID_TYPE:
4088 case OPAQUE_TYPE:
4089 case COMPLEX_TYPE:
4090 case ENUMERAL_TYPE:
4091 case BOOLEAN_TYPE:
4092 case POINTER_TYPE:
4093 case OFFSET_TYPE:
4094 case REFERENCE_TYPE:
4095 case METHOD_TYPE:
4096 case FUNCTION_TYPE:
4097 case VECTOR_TYPE:
4098 case NULLPTR_TYPE:
4099 return false;
4100
4101 case INTEGER_TYPE:
4102 case REAL_TYPE:
4103 case FIXED_POINT_TYPE:
4104 /* Here we just check the bounds. */
4105 return (CONTAINS_PLACEHOLDER_P (TYPE_MIN_VALUE (type))
4106 || CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type)));
4107
4108 case ARRAY_TYPE:
4109 /* We have already checked the component type above, so just check
4110 the domain type. Flexible array members have a null domain. */
4111 return TYPE_DOMAIN (type) ?
4112 type_contains_placeholder_p (TYPE_DOMAIN (type)) : false;
4113
4114 case RECORD_TYPE:
4115 case UNION_TYPE:
4116 case QUAL_UNION_TYPE:
4117 {
4118 tree field;
4119
4120 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
4121 if (TREE_CODE (field) == FIELD_DECL
4122 && (CONTAINS_PLACEHOLDER_P (DECL_FIELD_OFFSET (field))
4123 || (TREE_CODE (type) == QUAL_UNION_TYPE
4124 && CONTAINS_PLACEHOLDER_P (DECL_QUALIFIER (field)))
4125 || type_contains_placeholder_p (TREE_TYPE (field))))
4126 return true;
4127
4128 return false;
4129 }
4130
4131 default:
4132 gcc_unreachable ();
4133 }
4134 }
4135
4136 /* Wrapper around above function used to cache its result. */
4137
4138 bool
4139 type_contains_placeholder_p (tree type)
4140 {
4141 bool result;
4142
4143 /* If the contains_placeholder_bits field has been initialized,
4144 then we know the answer. */
4145 if (TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) > 0)
4146 return TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) - 1;
4147
4148 /* Indicate that we've seen this type node, and the answer is false.
4149 This is what we want to return if we run into recursion via fields. */
4150 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = 1;
4151
4152 /* Compute the real value. */
4153 result = type_contains_placeholder_1 (type);
4154
4155 /* Store the real value. */
4156 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = result + 1;
4157
4158 return result;
4159 }
4160 \f
4161 /* Push tree EXP onto vector QUEUE if it is not already present. */
4162
4163 static void
4164 push_without_duplicates (tree exp, vec<tree> *queue)
4165 {
4166 unsigned int i;
4167 tree iter;
4168
4169 FOR_EACH_VEC_ELT (*queue, i, iter)
4170 if (simple_cst_equal (iter, exp) == 1)
4171 break;
4172
4173 if (!iter)
4174 queue->safe_push (exp);
4175 }
4176
4177 /* Given a tree EXP, find all occurrences of references to fields
4178 in a PLACEHOLDER_EXPR and place them in vector REFS without
4179 duplicates. Also record VAR_DECLs and CONST_DECLs. Note that
4180 we assume here that EXP contains only arithmetic expressions
4181 or CALL_EXPRs with PLACEHOLDER_EXPRs occurring only in their
4182 argument list. */
4183
4184 void
4185 find_placeholder_in_expr (tree exp, vec<tree> *refs)
4186 {
4187 enum tree_code code = TREE_CODE (exp);
4188 tree inner;
4189 int i;
4190
4191 /* We handle TREE_LIST and COMPONENT_REF separately. */
4192 if (code == TREE_LIST)
4193 {
4194 FIND_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), refs);
4195 FIND_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), refs);
4196 }
4197 else if (code == COMPONENT_REF)
4198 {
4199 for (inner = TREE_OPERAND (exp, 0);
4200 REFERENCE_CLASS_P (inner);
4201 inner = TREE_OPERAND (inner, 0))
4202 ;
4203
4204 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
4205 push_without_duplicates (exp, refs);
4206 else
4207 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), refs);
4208 }
4209 else
4210 switch (TREE_CODE_CLASS (code))
4211 {
4212 case tcc_constant:
4213 break;
4214
4215 case tcc_declaration:
4216 /* Variables allocated to static storage can stay. */
4217 if (!TREE_STATIC (exp))
4218 push_without_duplicates (exp, refs);
4219 break;
4220
4221 case tcc_expression:
4222 /* This is the pattern built in ada/make_aligning_type. */
4223 if (code == ADDR_EXPR
4224 && TREE_CODE (TREE_OPERAND (exp, 0)) == PLACEHOLDER_EXPR)
4225 {
4226 push_without_duplicates (exp, refs);
4227 break;
4228 }
4229
4230 /* Fall through. */
4231
4232 case tcc_exceptional:
4233 case tcc_unary:
4234 case tcc_binary:
4235 case tcc_comparison:
4236 case tcc_reference:
4237 for (i = 0; i < TREE_CODE_LENGTH (code); i++)
4238 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
4239 break;
4240
4241 case tcc_vl_exp:
4242 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4243 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
4244 break;
4245
4246 default:
4247 gcc_unreachable ();
4248 }
4249 }
4250
4251 /* Given a tree EXP, a FIELD_DECL F, and a replacement value R,
4252 return a tree with all occurrences of references to F in a
4253 PLACEHOLDER_EXPR replaced by R. Also handle VAR_DECLs and
4254 CONST_DECLs. Note that we assume here that EXP contains only
4255 arithmetic expressions or CALL_EXPRs with PLACEHOLDER_EXPRs
4256 occurring only in their argument list. */
4257
4258 tree
4259 substitute_in_expr (tree exp, tree f, tree r)
4260 {
4261 enum tree_code code = TREE_CODE (exp);
4262 tree op0, op1, op2, op3;
4263 tree new_tree;
4264
4265 /* We handle TREE_LIST and COMPONENT_REF separately. */
4266 if (code == TREE_LIST)
4267 {
4268 op0 = SUBSTITUTE_IN_EXPR (TREE_CHAIN (exp), f, r);
4269 op1 = SUBSTITUTE_IN_EXPR (TREE_VALUE (exp), f, r);
4270 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
4271 return exp;
4272
4273 return tree_cons (TREE_PURPOSE (exp), op1, op0);
4274 }
4275 else if (code == COMPONENT_REF)
4276 {
4277 tree inner;
4278
4279 /* If this expression is getting a value from a PLACEHOLDER_EXPR
4280 and it is the right field, replace it with R. */
4281 for (inner = TREE_OPERAND (exp, 0);
4282 REFERENCE_CLASS_P (inner);
4283 inner = TREE_OPERAND (inner, 0))
4284 ;
4285
4286 /* The field. */
4287 op1 = TREE_OPERAND (exp, 1);
4288
4289 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && op1 == f)
4290 return r;
4291
4292 /* If this expression hasn't been completed let, leave it alone. */
4293 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && !TREE_TYPE (inner))
4294 return exp;
4295
4296 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4297 if (op0 == TREE_OPERAND (exp, 0))
4298 return exp;
4299
4300 new_tree
4301 = fold_build3 (COMPONENT_REF, TREE_TYPE (exp), op0, op1, NULL_TREE);
4302 }
4303 else
4304 switch (TREE_CODE_CLASS (code))
4305 {
4306 case tcc_constant:
4307 return exp;
4308
4309 case tcc_declaration:
4310 if (exp == f)
4311 return r;
4312 else
4313 return exp;
4314
4315 case tcc_expression:
4316 if (exp == f)
4317 return r;
4318
4319 /* Fall through. */
4320
4321 case tcc_exceptional:
4322 case tcc_unary:
4323 case tcc_binary:
4324 case tcc_comparison:
4325 case tcc_reference:
4326 switch (TREE_CODE_LENGTH (code))
4327 {
4328 case 0:
4329 return exp;
4330
4331 case 1:
4332 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4333 if (op0 == TREE_OPERAND (exp, 0))
4334 return exp;
4335
4336 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
4337 break;
4338
4339 case 2:
4340 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4341 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
4342
4343 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
4344 return exp;
4345
4346 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
4347 break;
4348
4349 case 3:
4350 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4351 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
4352 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
4353
4354 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4355 && op2 == TREE_OPERAND (exp, 2))
4356 return exp;
4357
4358 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
4359 break;
4360
4361 case 4:
4362 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4363 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
4364 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
4365 op3 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 3), f, r);
4366
4367 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4368 && op2 == TREE_OPERAND (exp, 2)
4369 && op3 == TREE_OPERAND (exp, 3))
4370 return exp;
4371
4372 new_tree
4373 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
4374 break;
4375
4376 default:
4377 gcc_unreachable ();
4378 }
4379 break;
4380
4381 case tcc_vl_exp:
4382 {
4383 int i;
4384
4385 new_tree = NULL_TREE;
4386
4387 /* If we are trying to replace F with a constant or with another
4388 instance of one of the arguments of the call, inline back
4389 functions which do nothing else than computing a value from
4390 the arguments they are passed. This makes it possible to
4391 fold partially or entirely the replacement expression. */
4392 if (code == CALL_EXPR)
4393 {
4394 bool maybe_inline = false;
4395 if (CONSTANT_CLASS_P (r))
4396 maybe_inline = true;
4397 else
4398 for (i = 3; i < TREE_OPERAND_LENGTH (exp); i++)
4399 if (operand_equal_p (TREE_OPERAND (exp, i), r, 0))
4400 {
4401 maybe_inline = true;
4402 break;
4403 }
4404 if (maybe_inline)
4405 {
4406 tree t = maybe_inline_call_in_expr (exp);
4407 if (t)
4408 return SUBSTITUTE_IN_EXPR (t, f, r);
4409 }
4410 }
4411
4412 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4413 {
4414 tree op = TREE_OPERAND (exp, i);
4415 tree new_op = SUBSTITUTE_IN_EXPR (op, f, r);
4416 if (new_op != op)
4417 {
4418 if (!new_tree)
4419 new_tree = copy_node (exp);
4420 TREE_OPERAND (new_tree, i) = new_op;
4421 }
4422 }
4423
4424 if (new_tree)
4425 {
4426 new_tree = fold (new_tree);
4427 if (TREE_CODE (new_tree) == CALL_EXPR)
4428 process_call_operands (new_tree);
4429 }
4430 else
4431 return exp;
4432 }
4433 break;
4434
4435 default:
4436 gcc_unreachable ();
4437 }
4438
4439 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
4440
4441 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
4442 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
4443
4444 return new_tree;
4445 }
4446
4447 /* Similar, but look for a PLACEHOLDER_EXPR in EXP and find a replacement
4448 for it within OBJ, a tree that is an object or a chain of references. */
4449
4450 tree
4451 substitute_placeholder_in_expr (tree exp, tree obj)
4452 {
4453 enum tree_code code = TREE_CODE (exp);
4454 tree op0, op1, op2, op3;
4455 tree new_tree;
4456
4457 /* If this is a PLACEHOLDER_EXPR, see if we find a corresponding type
4458 in the chain of OBJ. */
4459 if (code == PLACEHOLDER_EXPR)
4460 {
4461 tree need_type = TYPE_MAIN_VARIANT (TREE_TYPE (exp));
4462 tree elt;
4463
4464 for (elt = obj; elt != 0;
4465 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
4466 || TREE_CODE (elt) == COND_EXPR)
4467 ? TREE_OPERAND (elt, 1)
4468 : (REFERENCE_CLASS_P (elt)
4469 || UNARY_CLASS_P (elt)
4470 || BINARY_CLASS_P (elt)
4471 || VL_EXP_CLASS_P (elt)
4472 || EXPRESSION_CLASS_P (elt))
4473 ? TREE_OPERAND (elt, 0) : 0))
4474 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
4475 return elt;
4476
4477 for (elt = obj; elt != 0;
4478 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
4479 || TREE_CODE (elt) == COND_EXPR)
4480 ? TREE_OPERAND (elt, 1)
4481 : (REFERENCE_CLASS_P (elt)
4482 || UNARY_CLASS_P (elt)
4483 || BINARY_CLASS_P (elt)
4484 || VL_EXP_CLASS_P (elt)
4485 || EXPRESSION_CLASS_P (elt))
4486 ? TREE_OPERAND (elt, 0) : 0))
4487 if (POINTER_TYPE_P (TREE_TYPE (elt))
4488 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
4489 == need_type))
4490 return fold_build1 (INDIRECT_REF, need_type, elt);
4491
4492 /* If we didn't find it, return the original PLACEHOLDER_EXPR. If it
4493 survives until RTL generation, there will be an error. */
4494 return exp;
4495 }
4496
4497 /* TREE_LIST is special because we need to look at TREE_VALUE
4498 and TREE_CHAIN, not TREE_OPERANDS. */
4499 else if (code == TREE_LIST)
4500 {
4501 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), obj);
4502 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), obj);
4503 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
4504 return exp;
4505
4506 return tree_cons (TREE_PURPOSE (exp), op1, op0);
4507 }
4508 else
4509 switch (TREE_CODE_CLASS (code))
4510 {
4511 case tcc_constant:
4512 case tcc_declaration:
4513 return exp;
4514
4515 case tcc_exceptional:
4516 case tcc_unary:
4517 case tcc_binary:
4518 case tcc_comparison:
4519 case tcc_expression:
4520 case tcc_reference:
4521 case tcc_statement:
4522 switch (TREE_CODE_LENGTH (code))
4523 {
4524 case 0:
4525 return exp;
4526
4527 case 1:
4528 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4529 if (op0 == TREE_OPERAND (exp, 0))
4530 return exp;
4531
4532 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
4533 break;
4534
4535 case 2:
4536 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4537 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4538
4539 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
4540 return exp;
4541
4542 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
4543 break;
4544
4545 case 3:
4546 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4547 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4548 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
4549
4550 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4551 && op2 == TREE_OPERAND (exp, 2))
4552 return exp;
4553
4554 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
4555 break;
4556
4557 case 4:
4558 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4559 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4560 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
4561 op3 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 3), obj);
4562
4563 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4564 && op2 == TREE_OPERAND (exp, 2)
4565 && op3 == TREE_OPERAND (exp, 3))
4566 return exp;
4567
4568 new_tree
4569 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
4570 break;
4571
4572 default:
4573 gcc_unreachable ();
4574 }
4575 break;
4576
4577 case tcc_vl_exp:
4578 {
4579 int i;
4580
4581 new_tree = NULL_TREE;
4582
4583 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4584 {
4585 tree op = TREE_OPERAND (exp, i);
4586 tree new_op = SUBSTITUTE_PLACEHOLDER_IN_EXPR (op, obj);
4587 if (new_op != op)
4588 {
4589 if (!new_tree)
4590 new_tree = copy_node (exp);
4591 TREE_OPERAND (new_tree, i) = new_op;
4592 }
4593 }
4594
4595 if (new_tree)
4596 {
4597 new_tree = fold (new_tree);
4598 if (TREE_CODE (new_tree) == CALL_EXPR)
4599 process_call_operands (new_tree);
4600 }
4601 else
4602 return exp;
4603 }
4604 break;
4605
4606 default:
4607 gcc_unreachable ();
4608 }
4609
4610 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
4611
4612 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
4613 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
4614
4615 return new_tree;
4616 }
4617 \f
4618
4619 /* Subroutine of stabilize_reference; this is called for subtrees of
4620 references. Any expression with side-effects must be put in a SAVE_EXPR
4621 to ensure that it is only evaluated once.
4622
4623 We don't put SAVE_EXPR nodes around everything, because assigning very
4624 simple expressions to temporaries causes us to miss good opportunities
4625 for optimizations. Among other things, the opportunity to fold in the
4626 addition of a constant into an addressing mode often gets lost, e.g.
4627 "y[i+1] += x;". In general, we take the approach that we should not make
4628 an assignment unless we are forced into it - i.e., that any non-side effect
4629 operator should be allowed, and that cse should take care of coalescing
4630 multiple utterances of the same expression should that prove fruitful. */
4631
4632 static tree
4633 stabilize_reference_1 (tree e)
4634 {
4635 tree result;
4636 enum tree_code code = TREE_CODE (e);
4637
4638 /* We cannot ignore const expressions because it might be a reference
4639 to a const array but whose index contains side-effects. But we can
4640 ignore things that are actual constant or that already have been
4641 handled by this function. */
4642
4643 if (tree_invariant_p (e))
4644 return e;
4645
4646 switch (TREE_CODE_CLASS (code))
4647 {
4648 case tcc_exceptional:
4649 /* Always wrap STATEMENT_LIST into SAVE_EXPR, even if it doesn't
4650 have side-effects. */
4651 if (code == STATEMENT_LIST)
4652 return save_expr (e);
4653 /* FALLTHRU */
4654 case tcc_type:
4655 case tcc_declaration:
4656 case tcc_comparison:
4657 case tcc_statement:
4658 case tcc_expression:
4659 case tcc_reference:
4660 case tcc_vl_exp:
4661 /* If the expression has side-effects, then encase it in a SAVE_EXPR
4662 so that it will only be evaluated once. */
4663 /* The reference (r) and comparison (<) classes could be handled as
4664 below, but it is generally faster to only evaluate them once. */
4665 if (TREE_SIDE_EFFECTS (e))
4666 return save_expr (e);
4667 return e;
4668
4669 case tcc_constant:
4670 /* Constants need no processing. In fact, we should never reach
4671 here. */
4672 return e;
4673
4674 case tcc_binary:
4675 /* Division is slow and tends to be compiled with jumps,
4676 especially the division by powers of 2 that is often
4677 found inside of an array reference. So do it just once. */
4678 if (code == TRUNC_DIV_EXPR || code == TRUNC_MOD_EXPR
4679 || code == FLOOR_DIV_EXPR || code == FLOOR_MOD_EXPR
4680 || code == CEIL_DIV_EXPR || code == CEIL_MOD_EXPR
4681 || code == ROUND_DIV_EXPR || code == ROUND_MOD_EXPR)
4682 return save_expr (e);
4683 /* Recursively stabilize each operand. */
4684 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)),
4685 stabilize_reference_1 (TREE_OPERAND (e, 1)));
4686 break;
4687
4688 case tcc_unary:
4689 /* Recursively stabilize each operand. */
4690 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)));
4691 break;
4692
4693 default:
4694 gcc_unreachable ();
4695 }
4696
4697 TREE_TYPE (result) = TREE_TYPE (e);
4698 TREE_READONLY (result) = TREE_READONLY (e);
4699 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (e);
4700 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
4701
4702 return result;
4703 }
4704
4705 /* Stabilize a reference so that we can use it any number of times
4706 without causing its operands to be evaluated more than once.
4707 Returns the stabilized reference. This works by means of save_expr,
4708 so see the caveats in the comments about save_expr.
4709
4710 Also allows conversion expressions whose operands are references.
4711 Any other kind of expression is returned unchanged. */
4712
4713 tree
4714 stabilize_reference (tree ref)
4715 {
4716 tree result;
4717 enum tree_code code = TREE_CODE (ref);
4718
4719 switch (code)
4720 {
4721 case VAR_DECL:
4722 case PARM_DECL:
4723 case RESULT_DECL:
4724 /* No action is needed in this case. */
4725 return ref;
4726
4727 CASE_CONVERT:
4728 case FLOAT_EXPR:
4729 case FIX_TRUNC_EXPR:
4730 result = build_nt (code, stabilize_reference (TREE_OPERAND (ref, 0)));
4731 break;
4732
4733 case INDIRECT_REF:
4734 result = build_nt (INDIRECT_REF,
4735 stabilize_reference_1 (TREE_OPERAND (ref, 0)));
4736 break;
4737
4738 case COMPONENT_REF:
4739 result = build_nt (COMPONENT_REF,
4740 stabilize_reference (TREE_OPERAND (ref, 0)),
4741 TREE_OPERAND (ref, 1), NULL_TREE);
4742 break;
4743
4744 case BIT_FIELD_REF:
4745 result = build_nt (BIT_FIELD_REF,
4746 stabilize_reference (TREE_OPERAND (ref, 0)),
4747 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
4748 REF_REVERSE_STORAGE_ORDER (result) = REF_REVERSE_STORAGE_ORDER (ref);
4749 break;
4750
4751 case ARRAY_REF:
4752 result = build_nt (ARRAY_REF,
4753 stabilize_reference (TREE_OPERAND (ref, 0)),
4754 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4755 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4756 break;
4757
4758 case ARRAY_RANGE_REF:
4759 result = build_nt (ARRAY_RANGE_REF,
4760 stabilize_reference (TREE_OPERAND (ref, 0)),
4761 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4762 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4763 break;
4764
4765 case COMPOUND_EXPR:
4766 /* We cannot wrap the first expression in a SAVE_EXPR, as then
4767 it wouldn't be ignored. This matters when dealing with
4768 volatiles. */
4769 return stabilize_reference_1 (ref);
4770
4771 /* If arg isn't a kind of lvalue we recognize, make no change.
4772 Caller should recognize the error for an invalid lvalue. */
4773 default:
4774 return ref;
4775
4776 case ERROR_MARK:
4777 return error_mark_node;
4778 }
4779
4780 TREE_TYPE (result) = TREE_TYPE (ref);
4781 TREE_READONLY (result) = TREE_READONLY (ref);
4782 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (ref);
4783 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
4784 protected_set_expr_location (result, EXPR_LOCATION (ref));
4785
4786 return result;
4787 }
4788 \f
4789 /* Low-level constructors for expressions. */
4790
4791 /* A helper function for build1 and constant folders. Set TREE_CONSTANT,
4792 and TREE_SIDE_EFFECTS for an ADDR_EXPR. */
4793
4794 void
4795 recompute_tree_invariant_for_addr_expr (tree t)
4796 {
4797 tree node;
4798 bool tc = true, se = false;
4799
4800 gcc_assert (TREE_CODE (t) == ADDR_EXPR);
4801
4802 /* We started out assuming this address is both invariant and constant, but
4803 does not have side effects. Now go down any handled components and see if
4804 any of them involve offsets that are either non-constant or non-invariant.
4805 Also check for side-effects.
4806
4807 ??? Note that this code makes no attempt to deal with the case where
4808 taking the address of something causes a copy due to misalignment. */
4809
4810 #define UPDATE_FLAGS(NODE) \
4811 do { tree _node = (NODE); \
4812 if (_node && !TREE_CONSTANT (_node)) tc = false; \
4813 if (_node && TREE_SIDE_EFFECTS (_node)) se = true; } while (0)
4814
4815 for (node = TREE_OPERAND (t, 0); handled_component_p (node);
4816 node = TREE_OPERAND (node, 0))
4817 {
4818 /* If the first operand doesn't have an ARRAY_TYPE, this is a bogus
4819 array reference (probably made temporarily by the G++ front end),
4820 so ignore all the operands. */
4821 if ((TREE_CODE (node) == ARRAY_REF
4822 || TREE_CODE (node) == ARRAY_RANGE_REF)
4823 && TREE_CODE (TREE_TYPE (TREE_OPERAND (node, 0))) == ARRAY_TYPE)
4824 {
4825 UPDATE_FLAGS (TREE_OPERAND (node, 1));
4826 if (TREE_OPERAND (node, 2))
4827 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4828 if (TREE_OPERAND (node, 3))
4829 UPDATE_FLAGS (TREE_OPERAND (node, 3));
4830 }
4831 /* Likewise, just because this is a COMPONENT_REF doesn't mean we have a
4832 FIELD_DECL, apparently. The G++ front end can put something else
4833 there, at least temporarily. */
4834 else if (TREE_CODE (node) == COMPONENT_REF
4835 && TREE_CODE (TREE_OPERAND (node, 1)) == FIELD_DECL)
4836 {
4837 if (TREE_OPERAND (node, 2))
4838 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4839 }
4840 }
4841
4842 node = lang_hooks.expr_to_decl (node, &tc, &se);
4843
4844 /* Now see what's inside. If it's an INDIRECT_REF, copy our properties from
4845 the address, since &(*a)->b is a form of addition. If it's a constant, the
4846 address is constant too. If it's a decl, its address is constant if the
4847 decl is static. Everything else is not constant and, furthermore,
4848 taking the address of a volatile variable is not volatile. */
4849 if (TREE_CODE (node) == INDIRECT_REF
4850 || TREE_CODE (node) == MEM_REF)
4851 UPDATE_FLAGS (TREE_OPERAND (node, 0));
4852 else if (CONSTANT_CLASS_P (node))
4853 ;
4854 else if (DECL_P (node))
4855 tc &= (staticp (node) != NULL_TREE);
4856 else
4857 {
4858 tc = false;
4859 se |= TREE_SIDE_EFFECTS (node);
4860 }
4861
4862
4863 TREE_CONSTANT (t) = tc;
4864 TREE_SIDE_EFFECTS (t) = se;
4865 #undef UPDATE_FLAGS
4866 }
4867
4868 /* Build an expression of code CODE, data type TYPE, and operands as
4869 specified. Expressions and reference nodes can be created this way.
4870 Constants, decls, types and misc nodes cannot be.
4871
4872 We define 5 non-variadic functions, from 0 to 4 arguments. This is
4873 enough for all extant tree codes. */
4874
4875 tree
4876 build0 (enum tree_code code, tree tt MEM_STAT_DECL)
4877 {
4878 tree t;
4879
4880 gcc_assert (TREE_CODE_LENGTH (code) == 0);
4881
4882 t = make_node (code PASS_MEM_STAT);
4883 TREE_TYPE (t) = tt;
4884
4885 return t;
4886 }
4887
4888 tree
4889 build1 (enum tree_code code, tree type, tree node MEM_STAT_DECL)
4890 {
4891 int length = sizeof (struct tree_exp);
4892 tree t;
4893
4894 record_node_allocation_statistics (code, length);
4895
4896 gcc_assert (TREE_CODE_LENGTH (code) == 1);
4897
4898 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
4899
4900 memset (t, 0, sizeof (struct tree_common));
4901
4902 TREE_SET_CODE (t, code);
4903
4904 TREE_TYPE (t) = type;
4905 SET_EXPR_LOCATION (t, UNKNOWN_LOCATION);
4906 TREE_OPERAND (t, 0) = node;
4907 if (node && !TYPE_P (node))
4908 {
4909 TREE_SIDE_EFFECTS (t) = TREE_SIDE_EFFECTS (node);
4910 TREE_READONLY (t) = TREE_READONLY (node);
4911 }
4912
4913 if (TREE_CODE_CLASS (code) == tcc_statement)
4914 {
4915 if (code != DEBUG_BEGIN_STMT)
4916 TREE_SIDE_EFFECTS (t) = 1;
4917 }
4918 else switch (code)
4919 {
4920 case VA_ARG_EXPR:
4921 /* All of these have side-effects, no matter what their
4922 operands are. */
4923 TREE_SIDE_EFFECTS (t) = 1;
4924 TREE_READONLY (t) = 0;
4925 break;
4926
4927 case INDIRECT_REF:
4928 /* Whether a dereference is readonly has nothing to do with whether
4929 its operand is readonly. */
4930 TREE_READONLY (t) = 0;
4931 break;
4932
4933 case ADDR_EXPR:
4934 if (node)
4935 recompute_tree_invariant_for_addr_expr (t);
4936 break;
4937
4938 default:
4939 if ((TREE_CODE_CLASS (code) == tcc_unary || code == VIEW_CONVERT_EXPR)
4940 && node && !TYPE_P (node)
4941 && TREE_CONSTANT (node))
4942 TREE_CONSTANT (t) = 1;
4943 if (TREE_CODE_CLASS (code) == tcc_reference
4944 && node && TREE_THIS_VOLATILE (node))
4945 TREE_THIS_VOLATILE (t) = 1;
4946 break;
4947 }
4948
4949 return t;
4950 }
4951
4952 #define PROCESS_ARG(N) \
4953 do { \
4954 TREE_OPERAND (t, N) = arg##N; \
4955 if (arg##N &&!TYPE_P (arg##N)) \
4956 { \
4957 if (TREE_SIDE_EFFECTS (arg##N)) \
4958 side_effects = 1; \
4959 if (!TREE_READONLY (arg##N) \
4960 && !CONSTANT_CLASS_P (arg##N)) \
4961 (void) (read_only = 0); \
4962 if (!TREE_CONSTANT (arg##N)) \
4963 (void) (constant = 0); \
4964 } \
4965 } while (0)
4966
4967 tree
4968 build2 (enum tree_code code, tree tt, tree arg0, tree arg1 MEM_STAT_DECL)
4969 {
4970 bool constant, read_only, side_effects, div_by_zero;
4971 tree t;
4972
4973 gcc_assert (TREE_CODE_LENGTH (code) == 2);
4974
4975 if ((code == MINUS_EXPR || code == PLUS_EXPR || code == MULT_EXPR)
4976 && arg0 && arg1 && tt && POINTER_TYPE_P (tt)
4977 /* When sizetype precision doesn't match that of pointers
4978 we need to be able to build explicit extensions or truncations
4979 of the offset argument. */
4980 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (tt))
4981 gcc_assert (TREE_CODE (arg0) == INTEGER_CST
4982 && TREE_CODE (arg1) == INTEGER_CST);
4983
4984 if (code == POINTER_PLUS_EXPR && arg0 && arg1 && tt)
4985 gcc_assert (POINTER_TYPE_P (tt) && POINTER_TYPE_P (TREE_TYPE (arg0))
4986 && ptrofftype_p (TREE_TYPE (arg1)));
4987
4988 t = make_node (code PASS_MEM_STAT);
4989 TREE_TYPE (t) = tt;
4990
4991 /* Below, we automatically set TREE_SIDE_EFFECTS and TREE_READONLY for the
4992 result based on those same flags for the arguments. But if the
4993 arguments aren't really even `tree' expressions, we shouldn't be trying
4994 to do this. */
4995
4996 /* Expressions without side effects may be constant if their
4997 arguments are as well. */
4998 constant = (TREE_CODE_CLASS (code) == tcc_comparison
4999 || TREE_CODE_CLASS (code) == tcc_binary);
5000 read_only = 1;
5001 side_effects = TREE_SIDE_EFFECTS (t);
5002
5003 switch (code)
5004 {
5005 case TRUNC_DIV_EXPR:
5006 case CEIL_DIV_EXPR:
5007 case FLOOR_DIV_EXPR:
5008 case ROUND_DIV_EXPR:
5009 case EXACT_DIV_EXPR:
5010 case CEIL_MOD_EXPR:
5011 case FLOOR_MOD_EXPR:
5012 case ROUND_MOD_EXPR:
5013 case TRUNC_MOD_EXPR:
5014 div_by_zero = integer_zerop (arg1);
5015 break;
5016 default:
5017 div_by_zero = false;
5018 }
5019
5020 PROCESS_ARG (0);
5021 PROCESS_ARG (1);
5022
5023 TREE_SIDE_EFFECTS (t) = side_effects;
5024 if (code == MEM_REF)
5025 {
5026 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
5027 {
5028 tree o = TREE_OPERAND (arg0, 0);
5029 TREE_READONLY (t) = TREE_READONLY (o);
5030 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
5031 }
5032 }
5033 else
5034 {
5035 TREE_READONLY (t) = read_only;
5036 /* Don't mark X / 0 as constant. */
5037 TREE_CONSTANT (t) = constant && !div_by_zero;
5038 TREE_THIS_VOLATILE (t)
5039 = (TREE_CODE_CLASS (code) == tcc_reference
5040 && arg0 && TREE_THIS_VOLATILE (arg0));
5041 }
5042
5043 return t;
5044 }
5045
5046
5047 tree
5048 build3 (enum tree_code code, tree tt, tree arg0, tree arg1,
5049 tree arg2 MEM_STAT_DECL)
5050 {
5051 bool constant, read_only, side_effects;
5052 tree t;
5053
5054 gcc_assert (TREE_CODE_LENGTH (code) == 3);
5055 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
5056
5057 t = make_node (code PASS_MEM_STAT);
5058 TREE_TYPE (t) = tt;
5059
5060 read_only = 1;
5061
5062 /* As a special exception, if COND_EXPR has NULL branches, we
5063 assume that it is a gimple statement and always consider
5064 it to have side effects. */
5065 if (code == COND_EXPR
5066 && tt == void_type_node
5067 && arg1 == NULL_TREE
5068 && arg2 == NULL_TREE)
5069 side_effects = true;
5070 else
5071 side_effects = TREE_SIDE_EFFECTS (t);
5072
5073 PROCESS_ARG (0);
5074 PROCESS_ARG (1);
5075 PROCESS_ARG (2);
5076
5077 if (code == COND_EXPR)
5078 TREE_READONLY (t) = read_only;
5079
5080 TREE_SIDE_EFFECTS (t) = side_effects;
5081 TREE_THIS_VOLATILE (t)
5082 = (TREE_CODE_CLASS (code) == tcc_reference
5083 && arg0 && TREE_THIS_VOLATILE (arg0));
5084
5085 return t;
5086 }
5087
5088 tree
5089 build4 (enum tree_code code, tree tt, tree arg0, tree arg1,
5090 tree arg2, tree arg3 MEM_STAT_DECL)
5091 {
5092 bool constant, read_only, side_effects;
5093 tree t;
5094
5095 gcc_assert (TREE_CODE_LENGTH (code) == 4);
5096
5097 t = make_node (code PASS_MEM_STAT);
5098 TREE_TYPE (t) = tt;
5099
5100 side_effects = TREE_SIDE_EFFECTS (t);
5101
5102 PROCESS_ARG (0);
5103 PROCESS_ARG (1);
5104 PROCESS_ARG (2);
5105 PROCESS_ARG (3);
5106
5107 TREE_SIDE_EFFECTS (t) = side_effects;
5108 TREE_THIS_VOLATILE (t)
5109 = (TREE_CODE_CLASS (code) == tcc_reference
5110 && arg0 && TREE_THIS_VOLATILE (arg0));
5111
5112 return t;
5113 }
5114
5115 tree
5116 build5 (enum tree_code code, tree tt, tree arg0, tree arg1,
5117 tree arg2, tree arg3, tree arg4 MEM_STAT_DECL)
5118 {
5119 bool constant, read_only, side_effects;
5120 tree t;
5121
5122 gcc_assert (TREE_CODE_LENGTH (code) == 5);
5123
5124 t = make_node (code PASS_MEM_STAT);
5125 TREE_TYPE (t) = tt;
5126
5127 side_effects = TREE_SIDE_EFFECTS (t);
5128
5129 PROCESS_ARG (0);
5130 PROCESS_ARG (1);
5131 PROCESS_ARG (2);
5132 PROCESS_ARG (3);
5133 PROCESS_ARG (4);
5134
5135 TREE_SIDE_EFFECTS (t) = side_effects;
5136 if (code == TARGET_MEM_REF)
5137 {
5138 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
5139 {
5140 tree o = TREE_OPERAND (arg0, 0);
5141 TREE_READONLY (t) = TREE_READONLY (o);
5142 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
5143 }
5144 }
5145 else
5146 TREE_THIS_VOLATILE (t)
5147 = (TREE_CODE_CLASS (code) == tcc_reference
5148 && arg0 && TREE_THIS_VOLATILE (arg0));
5149
5150 return t;
5151 }
5152
5153 /* Build a simple MEM_REF tree with the sematics of a plain INDIRECT_REF
5154 on the pointer PTR. */
5155
5156 tree
5157 build_simple_mem_ref_loc (location_t loc, tree ptr)
5158 {
5159 poly_int64 offset = 0;
5160 tree ptype = TREE_TYPE (ptr);
5161 tree tem;
5162 /* For convenience allow addresses that collapse to a simple base
5163 and offset. */
5164 if (TREE_CODE (ptr) == ADDR_EXPR
5165 && (handled_component_p (TREE_OPERAND (ptr, 0))
5166 || TREE_CODE (TREE_OPERAND (ptr, 0)) == MEM_REF))
5167 {
5168 ptr = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &offset);
5169 gcc_assert (ptr);
5170 if (TREE_CODE (ptr) == MEM_REF)
5171 {
5172 offset += mem_ref_offset (ptr).force_shwi ();
5173 ptr = TREE_OPERAND (ptr, 0);
5174 }
5175 else
5176 ptr = build_fold_addr_expr (ptr);
5177 gcc_assert (is_gimple_reg (ptr) || is_gimple_min_invariant (ptr));
5178 }
5179 tem = build2 (MEM_REF, TREE_TYPE (ptype),
5180 ptr, build_int_cst (ptype, offset));
5181 SET_EXPR_LOCATION (tem, loc);
5182 return tem;
5183 }
5184
5185 /* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T. */
5186
5187 poly_offset_int
5188 mem_ref_offset (const_tree t)
5189 {
5190 return poly_offset_int::from (wi::to_poly_wide (TREE_OPERAND (t, 1)),
5191 SIGNED);
5192 }
5193
5194 /* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE
5195 offsetted by OFFSET units. */
5196
5197 tree
5198 build_invariant_address (tree type, tree base, poly_int64 offset)
5199 {
5200 tree ref = fold_build2 (MEM_REF, TREE_TYPE (type),
5201 build_fold_addr_expr (base),
5202 build_int_cst (ptr_type_node, offset));
5203 tree addr = build1 (ADDR_EXPR, type, ref);
5204 recompute_tree_invariant_for_addr_expr (addr);
5205 return addr;
5206 }
5207
5208 /* Similar except don't specify the TREE_TYPE
5209 and leave the TREE_SIDE_EFFECTS as 0.
5210 It is permissible for arguments to be null,
5211 or even garbage if their values do not matter. */
5212
5213 tree
5214 build_nt (enum tree_code code, ...)
5215 {
5216 tree t;
5217 int length;
5218 int i;
5219 va_list p;
5220
5221 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
5222
5223 va_start (p, code);
5224
5225 t = make_node (code);
5226 length = TREE_CODE_LENGTH (code);
5227
5228 for (i = 0; i < length; i++)
5229 TREE_OPERAND (t, i) = va_arg (p, tree);
5230
5231 va_end (p);
5232 return t;
5233 }
5234
5235 /* Similar to build_nt, but for creating a CALL_EXPR object with a
5236 tree vec. */
5237
5238 tree
5239 build_nt_call_vec (tree fn, vec<tree, va_gc> *args)
5240 {
5241 tree ret, t;
5242 unsigned int ix;
5243
5244 ret = build_vl_exp (CALL_EXPR, vec_safe_length (args) + 3);
5245 CALL_EXPR_FN (ret) = fn;
5246 CALL_EXPR_STATIC_CHAIN (ret) = NULL_TREE;
5247 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
5248 CALL_EXPR_ARG (ret, ix) = t;
5249 return ret;
5250 }
5251 \f
5252 /* Create a DECL_... node of code CODE, name NAME (if non-null)
5253 and data type TYPE.
5254 We do NOT enter this node in any sort of symbol table.
5255
5256 LOC is the location of the decl.
5257
5258 layout_decl is used to set up the decl's storage layout.
5259 Other slots are initialized to 0 or null pointers. */
5260
5261 tree
5262 build_decl (location_t loc, enum tree_code code, tree name,
5263 tree type MEM_STAT_DECL)
5264 {
5265 tree t;
5266
5267 t = make_node (code PASS_MEM_STAT);
5268 DECL_SOURCE_LOCATION (t) = loc;
5269
5270 /* if (type == error_mark_node)
5271 type = integer_type_node; */
5272 /* That is not done, deliberately, so that having error_mark_node
5273 as the type can suppress useless errors in the use of this variable. */
5274
5275 DECL_NAME (t) = name;
5276 TREE_TYPE (t) = type;
5277
5278 if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
5279 layout_decl (t, 0);
5280
5281 return t;
5282 }
5283
5284 /* Create and return a DEBUG_EXPR_DECL node of the given TYPE. */
5285
5286 tree
5287 build_debug_expr_decl (tree type)
5288 {
5289 tree vexpr = make_node (DEBUG_EXPR_DECL);
5290 DECL_ARTIFICIAL (vexpr) = 1;
5291 TREE_TYPE (vexpr) = type;
5292 SET_DECL_MODE (vexpr, TYPE_MODE (type));
5293 return vexpr;
5294 }
5295
5296 /* Builds and returns function declaration with NAME and TYPE. */
5297
5298 tree
5299 build_fn_decl (const char *name, tree type)
5300 {
5301 tree id = get_identifier (name);
5302 tree decl = build_decl (input_location, FUNCTION_DECL, id, type);
5303
5304 DECL_EXTERNAL (decl) = 1;
5305 TREE_PUBLIC (decl) = 1;
5306 DECL_ARTIFICIAL (decl) = 1;
5307 TREE_NOTHROW (decl) = 1;
5308
5309 return decl;
5310 }
5311
5312 vec<tree, va_gc> *all_translation_units;
5313
5314 /* Builds a new translation-unit decl with name NAME, queues it in the
5315 global list of translation-unit decls and returns it. */
5316
5317 tree
5318 build_translation_unit_decl (tree name)
5319 {
5320 tree tu = build_decl (UNKNOWN_LOCATION, TRANSLATION_UNIT_DECL,
5321 name, NULL_TREE);
5322 TRANSLATION_UNIT_LANGUAGE (tu) = lang_hooks.name;
5323 vec_safe_push (all_translation_units, tu);
5324 return tu;
5325 }
5326
5327 \f
5328 /* BLOCK nodes are used to represent the structure of binding contours
5329 and declarations, once those contours have been exited and their contents
5330 compiled. This information is used for outputting debugging info. */
5331
5332 tree
5333 build_block (tree vars, tree subblocks, tree supercontext, tree chain)
5334 {
5335 tree block = make_node (BLOCK);
5336
5337 BLOCK_VARS (block) = vars;
5338 BLOCK_SUBBLOCKS (block) = subblocks;
5339 BLOCK_SUPERCONTEXT (block) = supercontext;
5340 BLOCK_CHAIN (block) = chain;
5341 return block;
5342 }
5343
5344 \f
5345 /* Like SET_EXPR_LOCATION, but make sure the tree can have a location.
5346
5347 LOC is the location to use in tree T. */
5348
5349 void
5350 protected_set_expr_location (tree t, location_t loc)
5351 {
5352 if (CAN_HAVE_LOCATION_P (t))
5353 SET_EXPR_LOCATION (t, loc);
5354 else if (t && TREE_CODE (t) == STATEMENT_LIST)
5355 {
5356 t = expr_single (t);
5357 if (t && CAN_HAVE_LOCATION_P (t))
5358 SET_EXPR_LOCATION (t, loc);
5359 }
5360 }
5361
5362 /* Like PROTECTED_SET_EXPR_LOCATION, but only do that if T has
5363 UNKNOWN_LOCATION. */
5364
5365 void
5366 protected_set_expr_location_if_unset (tree t, location_t loc)
5367 {
5368 t = expr_single (t);
5369 if (t && !EXPR_HAS_LOCATION (t))
5370 protected_set_expr_location (t, loc);
5371 }
5372 \f
5373 /* Set the type qualifiers for TYPE to TYPE_QUALS, which is a bitmask
5374 of the various TYPE_QUAL values. */
5375
5376 static void
5377 set_type_quals (tree type, int type_quals)
5378 {
5379 TYPE_READONLY (type) = (type_quals & TYPE_QUAL_CONST) != 0;
5380 TYPE_VOLATILE (type) = (type_quals & TYPE_QUAL_VOLATILE) != 0;
5381 TYPE_RESTRICT (type) = (type_quals & TYPE_QUAL_RESTRICT) != 0;
5382 TYPE_ATOMIC (type) = (type_quals & TYPE_QUAL_ATOMIC) != 0;
5383 TYPE_ADDR_SPACE (type) = DECODE_QUAL_ADDR_SPACE (type_quals);
5384 }
5385
5386 /* Returns true iff CAND and BASE have equivalent language-specific
5387 qualifiers. */
5388
5389 bool
5390 check_lang_type (const_tree cand, const_tree base)
5391 {
5392 if (lang_hooks.types.type_hash_eq == NULL)
5393 return true;
5394 /* type_hash_eq currently only applies to these types. */
5395 if (TREE_CODE (cand) != FUNCTION_TYPE
5396 && TREE_CODE (cand) != METHOD_TYPE)
5397 return true;
5398 return lang_hooks.types.type_hash_eq (cand, base);
5399 }
5400
5401 /* This function checks to see if TYPE matches the size one of the built-in
5402 atomic types, and returns that core atomic type. */
5403
5404 static tree
5405 find_atomic_core_type (const_tree type)
5406 {
5407 tree base_atomic_type;
5408
5409 /* Only handle complete types. */
5410 if (!tree_fits_uhwi_p (TYPE_SIZE (type)))
5411 return NULL_TREE;
5412
5413 switch (tree_to_uhwi (TYPE_SIZE (type)))
5414 {
5415 case 8:
5416 base_atomic_type = atomicQI_type_node;
5417 break;
5418
5419 case 16:
5420 base_atomic_type = atomicHI_type_node;
5421 break;
5422
5423 case 32:
5424 base_atomic_type = atomicSI_type_node;
5425 break;
5426
5427 case 64:
5428 base_atomic_type = atomicDI_type_node;
5429 break;
5430
5431 case 128:
5432 base_atomic_type = atomicTI_type_node;
5433 break;
5434
5435 default:
5436 base_atomic_type = NULL_TREE;
5437 }
5438
5439 return base_atomic_type;
5440 }
5441
5442 /* Returns true iff unqualified CAND and BASE are equivalent. */
5443
5444 bool
5445 check_base_type (const_tree cand, const_tree base)
5446 {
5447 if (TYPE_NAME (cand) != TYPE_NAME (base)
5448 /* Apparently this is needed for Objective-C. */
5449 || TYPE_CONTEXT (cand) != TYPE_CONTEXT (base)
5450 || !attribute_list_equal (TYPE_ATTRIBUTES (cand),
5451 TYPE_ATTRIBUTES (base)))
5452 return false;
5453 /* Check alignment. */
5454 if (TYPE_ALIGN (cand) == TYPE_ALIGN (base)
5455 && TYPE_USER_ALIGN (cand) == TYPE_USER_ALIGN (base))
5456 return true;
5457 /* Atomic types increase minimal alignment. We must to do so as well
5458 or we get duplicated canonical types. See PR88686. */
5459 if ((TYPE_QUALS (cand) & TYPE_QUAL_ATOMIC))
5460 {
5461 /* See if this object can map to a basic atomic type. */
5462 tree atomic_type = find_atomic_core_type (cand);
5463 if (atomic_type && TYPE_ALIGN (atomic_type) == TYPE_ALIGN (cand))
5464 return true;
5465 }
5466 return false;
5467 }
5468
5469 /* Returns true iff CAND is equivalent to BASE with TYPE_QUALS. */
5470
5471 bool
5472 check_qualified_type (const_tree cand, const_tree base, int type_quals)
5473 {
5474 return (TYPE_QUALS (cand) == type_quals
5475 && check_base_type (cand, base)
5476 && check_lang_type (cand, base));
5477 }
5478
5479 /* Returns true iff CAND is equivalent to BASE with ALIGN. */
5480
5481 static bool
5482 check_aligned_type (const_tree cand, const_tree base, unsigned int align)
5483 {
5484 return (TYPE_QUALS (cand) == TYPE_QUALS (base)
5485 && TYPE_NAME (cand) == TYPE_NAME (base)
5486 /* Apparently this is needed for Objective-C. */
5487 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
5488 /* Check alignment. */
5489 && TYPE_ALIGN (cand) == align
5490 /* Check this is a user-aligned type as build_aligned_type
5491 would create. */
5492 && TYPE_USER_ALIGN (cand)
5493 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
5494 TYPE_ATTRIBUTES (base))
5495 && check_lang_type (cand, base));
5496 }
5497
5498 /* Return a version of the TYPE, qualified as indicated by the
5499 TYPE_QUALS, if one exists. If no qualified version exists yet,
5500 return NULL_TREE. */
5501
5502 tree
5503 get_qualified_type (tree type, int type_quals)
5504 {
5505 if (TYPE_QUALS (type) == type_quals)
5506 return type;
5507
5508 tree mv = TYPE_MAIN_VARIANT (type);
5509 if (check_qualified_type (mv, type, type_quals))
5510 return mv;
5511
5512 /* Search the chain of variants to see if there is already one there just
5513 like the one we need to have. If so, use that existing one. We must
5514 preserve the TYPE_NAME, since there is code that depends on this. */
5515 for (tree *tp = &TYPE_NEXT_VARIANT (mv); *tp; tp = &TYPE_NEXT_VARIANT (*tp))
5516 if (check_qualified_type (*tp, type, type_quals))
5517 {
5518 /* Put the found variant at the head of the variant list so
5519 frequently searched variants get found faster. The C++ FE
5520 benefits greatly from this. */
5521 tree t = *tp;
5522 *tp = TYPE_NEXT_VARIANT (t);
5523 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (mv);
5524 TYPE_NEXT_VARIANT (mv) = t;
5525 return t;
5526 }
5527
5528 return NULL_TREE;
5529 }
5530
5531 /* Like get_qualified_type, but creates the type if it does not
5532 exist. This function never returns NULL_TREE. */
5533
5534 tree
5535 build_qualified_type (tree type, int type_quals MEM_STAT_DECL)
5536 {
5537 tree t;
5538
5539 /* See if we already have the appropriate qualified variant. */
5540 t = get_qualified_type (type, type_quals);
5541
5542 /* If not, build it. */
5543 if (!t)
5544 {
5545 t = build_variant_type_copy (type PASS_MEM_STAT);
5546 set_type_quals (t, type_quals);
5547
5548 if (((type_quals & TYPE_QUAL_ATOMIC) == TYPE_QUAL_ATOMIC))
5549 {
5550 /* See if this object can map to a basic atomic type. */
5551 tree atomic_type = find_atomic_core_type (type);
5552 if (atomic_type)
5553 {
5554 /* Ensure the alignment of this type is compatible with
5555 the required alignment of the atomic type. */
5556 if (TYPE_ALIGN (atomic_type) > TYPE_ALIGN (t))
5557 SET_TYPE_ALIGN (t, TYPE_ALIGN (atomic_type));
5558 }
5559 }
5560
5561 if (TYPE_STRUCTURAL_EQUALITY_P (type))
5562 /* Propagate structural equality. */
5563 SET_TYPE_STRUCTURAL_EQUALITY (t);
5564 else if (TYPE_CANONICAL (type) != type)
5565 /* Build the underlying canonical type, since it is different
5566 from TYPE. */
5567 {
5568 tree c = build_qualified_type (TYPE_CANONICAL (type), type_quals);
5569 TYPE_CANONICAL (t) = TYPE_CANONICAL (c);
5570 }
5571 else
5572 /* T is its own canonical type. */
5573 TYPE_CANONICAL (t) = t;
5574
5575 }
5576
5577 return t;
5578 }
5579
5580 /* Create a variant of type T with alignment ALIGN. */
5581
5582 tree
5583 build_aligned_type (tree type, unsigned int align)
5584 {
5585 tree t;
5586
5587 if (TYPE_PACKED (type)
5588 || TYPE_ALIGN (type) == align)
5589 return type;
5590
5591 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
5592 if (check_aligned_type (t, type, align))
5593 return t;
5594
5595 t = build_variant_type_copy (type);
5596 SET_TYPE_ALIGN (t, align);
5597 TYPE_USER_ALIGN (t) = 1;
5598
5599 return t;
5600 }
5601
5602 /* Create a new distinct copy of TYPE. The new type is made its own
5603 MAIN_VARIANT. If TYPE requires structural equality checks, the
5604 resulting type requires structural equality checks; otherwise, its
5605 TYPE_CANONICAL points to itself. */
5606
5607 tree
5608 build_distinct_type_copy (tree type MEM_STAT_DECL)
5609 {
5610 tree t = copy_node (type PASS_MEM_STAT);
5611
5612 TYPE_POINTER_TO (t) = 0;
5613 TYPE_REFERENCE_TO (t) = 0;
5614
5615 /* Set the canonical type either to a new equivalence class, or
5616 propagate the need for structural equality checks. */
5617 if (TYPE_STRUCTURAL_EQUALITY_P (type))
5618 SET_TYPE_STRUCTURAL_EQUALITY (t);
5619 else
5620 TYPE_CANONICAL (t) = t;
5621
5622 /* Make it its own variant. */
5623 TYPE_MAIN_VARIANT (t) = t;
5624 TYPE_NEXT_VARIANT (t) = 0;
5625
5626 /* Note that it is now possible for TYPE_MIN_VALUE to be a value
5627 whose TREE_TYPE is not t. This can also happen in the Ada
5628 frontend when using subtypes. */
5629
5630 return t;
5631 }
5632
5633 /* Create a new variant of TYPE, equivalent but distinct. This is so
5634 the caller can modify it. TYPE_CANONICAL for the return type will
5635 be equivalent to TYPE_CANONICAL of TYPE, indicating that the types
5636 are considered equal by the language itself (or that both types
5637 require structural equality checks). */
5638
5639 tree
5640 build_variant_type_copy (tree type MEM_STAT_DECL)
5641 {
5642 tree t, m = TYPE_MAIN_VARIANT (type);
5643
5644 t = build_distinct_type_copy (type PASS_MEM_STAT);
5645
5646 /* Since we're building a variant, assume that it is a non-semantic
5647 variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */
5648 TYPE_CANONICAL (t) = TYPE_CANONICAL (type);
5649 /* Type variants have no alias set defined. */
5650 TYPE_ALIAS_SET (t) = -1;
5651
5652 /* Add the new type to the chain of variants of TYPE. */
5653 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (m);
5654 TYPE_NEXT_VARIANT (m) = t;
5655 TYPE_MAIN_VARIANT (t) = m;
5656
5657 return t;
5658 }
5659 \f
5660 /* Return true if the from tree in both tree maps are equal. */
5661
5662 int
5663 tree_map_base_eq (const void *va, const void *vb)
5664 {
5665 const struct tree_map_base *const a = (const struct tree_map_base *) va,
5666 *const b = (const struct tree_map_base *) vb;
5667 return (a->from == b->from);
5668 }
5669
5670 /* Hash a from tree in a tree_base_map. */
5671
5672 unsigned int
5673 tree_map_base_hash (const void *item)
5674 {
5675 return htab_hash_pointer (((const struct tree_map_base *)item)->from);
5676 }
5677
5678 /* Return true if this tree map structure is marked for garbage collection
5679 purposes. We simply return true if the from tree is marked, so that this
5680 structure goes away when the from tree goes away. */
5681
5682 int
5683 tree_map_base_marked_p (const void *p)
5684 {
5685 return ggc_marked_p (((const struct tree_map_base *) p)->from);
5686 }
5687
5688 /* Hash a from tree in a tree_map. */
5689
5690 unsigned int
5691 tree_map_hash (const void *item)
5692 {
5693 return (((const struct tree_map *) item)->hash);
5694 }
5695
5696 /* Hash a from tree in a tree_decl_map. */
5697
5698 unsigned int
5699 tree_decl_map_hash (const void *item)
5700 {
5701 return DECL_UID (((const struct tree_decl_map *) item)->base.from);
5702 }
5703
5704 /* Return the initialization priority for DECL. */
5705
5706 priority_type
5707 decl_init_priority_lookup (tree decl)
5708 {
5709 symtab_node *snode = symtab_node::get (decl);
5710
5711 if (!snode)
5712 return DEFAULT_INIT_PRIORITY;
5713 return
5714 snode->get_init_priority ();
5715 }
5716
5717 /* Return the finalization priority for DECL. */
5718
5719 priority_type
5720 decl_fini_priority_lookup (tree decl)
5721 {
5722 cgraph_node *node = cgraph_node::get (decl);
5723
5724 if (!node)
5725 return DEFAULT_INIT_PRIORITY;
5726 return
5727 node->get_fini_priority ();
5728 }
5729
5730 /* Set the initialization priority for DECL to PRIORITY. */
5731
5732 void
5733 decl_init_priority_insert (tree decl, priority_type priority)
5734 {
5735 struct symtab_node *snode;
5736
5737 if (priority == DEFAULT_INIT_PRIORITY)
5738 {
5739 snode = symtab_node::get (decl);
5740 if (!snode)
5741 return;
5742 }
5743 else if (VAR_P (decl))
5744 snode = varpool_node::get_create (decl);
5745 else
5746 snode = cgraph_node::get_create (decl);
5747 snode->set_init_priority (priority);
5748 }
5749
5750 /* Set the finalization priority for DECL to PRIORITY. */
5751
5752 void
5753 decl_fini_priority_insert (tree decl, priority_type priority)
5754 {
5755 struct cgraph_node *node;
5756
5757 if (priority == DEFAULT_INIT_PRIORITY)
5758 {
5759 node = cgraph_node::get (decl);
5760 if (!node)
5761 return;
5762 }
5763 else
5764 node = cgraph_node::get_create (decl);
5765 node->set_fini_priority (priority);
5766 }
5767
5768 /* Print out the statistics for the DECL_DEBUG_EXPR hash table. */
5769
5770 static void
5771 print_debug_expr_statistics (void)
5772 {
5773 fprintf (stderr, "DECL_DEBUG_EXPR hash: size %ld, %ld elements, %f collisions\n",
5774 (long) debug_expr_for_decl->size (),
5775 (long) debug_expr_for_decl->elements (),
5776 debug_expr_for_decl->collisions ());
5777 }
5778
5779 /* Print out the statistics for the DECL_VALUE_EXPR hash table. */
5780
5781 static void
5782 print_value_expr_statistics (void)
5783 {
5784 fprintf (stderr, "DECL_VALUE_EXPR hash: size %ld, %ld elements, %f collisions\n",
5785 (long) value_expr_for_decl->size (),
5786 (long) value_expr_for_decl->elements (),
5787 value_expr_for_decl->collisions ());
5788 }
5789
5790 /* Lookup a debug expression for FROM, and return it if we find one. */
5791
5792 tree
5793 decl_debug_expr_lookup (tree from)
5794 {
5795 struct tree_decl_map *h, in;
5796 in.base.from = from;
5797
5798 h = debug_expr_for_decl->find_with_hash (&in, DECL_UID (from));
5799 if (h)
5800 return h->to;
5801 return NULL_TREE;
5802 }
5803
5804 /* Insert a mapping FROM->TO in the debug expression hashtable. */
5805
5806 void
5807 decl_debug_expr_insert (tree from, tree to)
5808 {
5809 struct tree_decl_map *h;
5810
5811 h = ggc_alloc<tree_decl_map> ();
5812 h->base.from = from;
5813 h->to = to;
5814 *debug_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
5815 }
5816
5817 /* Lookup a value expression for FROM, and return it if we find one. */
5818
5819 tree
5820 decl_value_expr_lookup (tree from)
5821 {
5822 struct tree_decl_map *h, in;
5823 in.base.from = from;
5824
5825 h = value_expr_for_decl->find_with_hash (&in, DECL_UID (from));
5826 if (h)
5827 return h->to;
5828 return NULL_TREE;
5829 }
5830
5831 /* Insert a mapping FROM->TO in the value expression hashtable. */
5832
5833 void
5834 decl_value_expr_insert (tree from, tree to)
5835 {
5836 struct tree_decl_map *h;
5837
5838 h = ggc_alloc<tree_decl_map> ();
5839 h->base.from = from;
5840 h->to = to;
5841 *value_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
5842 }
5843
5844 /* Lookup a vector of debug arguments for FROM, and return it if we
5845 find one. */
5846
5847 vec<tree, va_gc> **
5848 decl_debug_args_lookup (tree from)
5849 {
5850 struct tree_vec_map *h, in;
5851
5852 if (!DECL_HAS_DEBUG_ARGS_P (from))
5853 return NULL;
5854 gcc_checking_assert (debug_args_for_decl != NULL);
5855 in.base.from = from;
5856 h = debug_args_for_decl->find_with_hash (&in, DECL_UID (from));
5857 if (h)
5858 return &h->to;
5859 return NULL;
5860 }
5861
5862 /* Insert a mapping FROM->empty vector of debug arguments in the value
5863 expression hashtable. */
5864
5865 vec<tree, va_gc> **
5866 decl_debug_args_insert (tree from)
5867 {
5868 struct tree_vec_map *h;
5869 tree_vec_map **loc;
5870
5871 if (DECL_HAS_DEBUG_ARGS_P (from))
5872 return decl_debug_args_lookup (from);
5873 if (debug_args_for_decl == NULL)
5874 debug_args_for_decl = hash_table<tree_vec_map_cache_hasher>::create_ggc (64);
5875 h = ggc_alloc<tree_vec_map> ();
5876 h->base.from = from;
5877 h->to = NULL;
5878 loc = debug_args_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT);
5879 *loc = h;
5880 DECL_HAS_DEBUG_ARGS_P (from) = 1;
5881 return &h->to;
5882 }
5883
5884 /* Hashing of types so that we don't make duplicates.
5885 The entry point is `type_hash_canon'. */
5886
5887 /* Generate the default hash code for TYPE. This is designed for
5888 speed, rather than maximum entropy. */
5889
5890 hashval_t
5891 type_hash_canon_hash (tree type)
5892 {
5893 inchash::hash hstate;
5894
5895 hstate.add_int (TREE_CODE (type));
5896
5897 if (TREE_TYPE (type))
5898 hstate.add_object (TYPE_HASH (TREE_TYPE (type)));
5899
5900 for (tree t = TYPE_ATTRIBUTES (type); t; t = TREE_CHAIN (t))
5901 /* Just the identifier is adequate to distinguish. */
5902 hstate.add_object (IDENTIFIER_HASH_VALUE (get_attribute_name (t)));
5903
5904 switch (TREE_CODE (type))
5905 {
5906 case METHOD_TYPE:
5907 hstate.add_object (TYPE_HASH (TYPE_METHOD_BASETYPE (type)));
5908 /* FALLTHROUGH. */
5909 case FUNCTION_TYPE:
5910 for (tree t = TYPE_ARG_TYPES (type); t; t = TREE_CHAIN (t))
5911 if (TREE_VALUE (t) != error_mark_node)
5912 hstate.add_object (TYPE_HASH (TREE_VALUE (t)));
5913 break;
5914
5915 case OFFSET_TYPE:
5916 hstate.add_object (TYPE_HASH (TYPE_OFFSET_BASETYPE (type)));
5917 break;
5918
5919 case ARRAY_TYPE:
5920 {
5921 if (TYPE_DOMAIN (type))
5922 hstate.add_object (TYPE_HASH (TYPE_DOMAIN (type)));
5923 if (!AGGREGATE_TYPE_P (TREE_TYPE (type)))
5924 {
5925 unsigned typeless = TYPE_TYPELESS_STORAGE (type);
5926 hstate.add_object (typeless);
5927 }
5928 }
5929 break;
5930
5931 case INTEGER_TYPE:
5932 {
5933 tree t = TYPE_MAX_VALUE (type);
5934 if (!t)
5935 t = TYPE_MIN_VALUE (type);
5936 for (int i = 0; i < TREE_INT_CST_NUNITS (t); i++)
5937 hstate.add_object (TREE_INT_CST_ELT (t, i));
5938 break;
5939 }
5940
5941 case REAL_TYPE:
5942 case FIXED_POINT_TYPE:
5943 {
5944 unsigned prec = TYPE_PRECISION (type);
5945 hstate.add_object (prec);
5946 break;
5947 }
5948
5949 case VECTOR_TYPE:
5950 hstate.add_poly_int (TYPE_VECTOR_SUBPARTS (type));
5951 break;
5952
5953 default:
5954 break;
5955 }
5956
5957 return hstate.end ();
5958 }
5959
5960 /* These are the Hashtable callback functions. */
5961
5962 /* Returns true iff the types are equivalent. */
5963
5964 bool
5965 type_cache_hasher::equal (type_hash *a, type_hash *b)
5966 {
5967 /* First test the things that are the same for all types. */
5968 if (a->hash != b->hash
5969 || TREE_CODE (a->type) != TREE_CODE (b->type)
5970 || TREE_TYPE (a->type) != TREE_TYPE (b->type)
5971 || !attribute_list_equal (TYPE_ATTRIBUTES (a->type),
5972 TYPE_ATTRIBUTES (b->type))
5973 || (TREE_CODE (a->type) != COMPLEX_TYPE
5974 && TYPE_NAME (a->type) != TYPE_NAME (b->type)))
5975 return 0;
5976
5977 /* Be careful about comparing arrays before and after the element type
5978 has been completed; don't compare TYPE_ALIGN unless both types are
5979 complete. */
5980 if (COMPLETE_TYPE_P (a->type) && COMPLETE_TYPE_P (b->type)
5981 && (TYPE_ALIGN (a->type) != TYPE_ALIGN (b->type)
5982 || TYPE_MODE (a->type) != TYPE_MODE (b->type)))
5983 return 0;
5984
5985 switch (TREE_CODE (a->type))
5986 {
5987 case VOID_TYPE:
5988 case OPAQUE_TYPE:
5989 case COMPLEX_TYPE:
5990 case POINTER_TYPE:
5991 case REFERENCE_TYPE:
5992 case NULLPTR_TYPE:
5993 return 1;
5994
5995 case VECTOR_TYPE:
5996 return known_eq (TYPE_VECTOR_SUBPARTS (a->type),
5997 TYPE_VECTOR_SUBPARTS (b->type));
5998
5999 case ENUMERAL_TYPE:
6000 if (TYPE_VALUES (a->type) != TYPE_VALUES (b->type)
6001 && !(TYPE_VALUES (a->type)
6002 && TREE_CODE (TYPE_VALUES (a->type)) == TREE_LIST
6003 && TYPE_VALUES (b->type)
6004 && TREE_CODE (TYPE_VALUES (b->type)) == TREE_LIST
6005 && type_list_equal (TYPE_VALUES (a->type),
6006 TYPE_VALUES (b->type))))
6007 return 0;
6008
6009 /* fall through */
6010
6011 case INTEGER_TYPE:
6012 case REAL_TYPE:
6013 case BOOLEAN_TYPE:
6014 if (TYPE_PRECISION (a->type) != TYPE_PRECISION (b->type))
6015 return false;
6016 return ((TYPE_MAX_VALUE (a->type) == TYPE_MAX_VALUE (b->type)
6017 || tree_int_cst_equal (TYPE_MAX_VALUE (a->type),
6018 TYPE_MAX_VALUE (b->type)))
6019 && (TYPE_MIN_VALUE (a->type) == TYPE_MIN_VALUE (b->type)
6020 || tree_int_cst_equal (TYPE_MIN_VALUE (a->type),
6021 TYPE_MIN_VALUE (b->type))));
6022
6023 case FIXED_POINT_TYPE:
6024 return TYPE_SATURATING (a->type) == TYPE_SATURATING (b->type);
6025
6026 case OFFSET_TYPE:
6027 return TYPE_OFFSET_BASETYPE (a->type) == TYPE_OFFSET_BASETYPE (b->type);
6028
6029 case METHOD_TYPE:
6030 if (TYPE_METHOD_BASETYPE (a->type) == TYPE_METHOD_BASETYPE (b->type)
6031 && (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6032 || (TYPE_ARG_TYPES (a->type)
6033 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6034 && TYPE_ARG_TYPES (b->type)
6035 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6036 && type_list_equal (TYPE_ARG_TYPES (a->type),
6037 TYPE_ARG_TYPES (b->type)))))
6038 break;
6039 return 0;
6040 case ARRAY_TYPE:
6041 /* Don't compare TYPE_TYPELESS_STORAGE flag on aggregates,
6042 where the flag should be inherited from the element type
6043 and can change after ARRAY_TYPEs are created; on non-aggregates
6044 compare it and hash it, scalars will never have that flag set
6045 and we need to differentiate between arrays created by different
6046 front-ends or middle-end created arrays. */
6047 return (TYPE_DOMAIN (a->type) == TYPE_DOMAIN (b->type)
6048 && (AGGREGATE_TYPE_P (TREE_TYPE (a->type))
6049 || (TYPE_TYPELESS_STORAGE (a->type)
6050 == TYPE_TYPELESS_STORAGE (b->type))));
6051
6052 case RECORD_TYPE:
6053 case UNION_TYPE:
6054 case QUAL_UNION_TYPE:
6055 return (TYPE_FIELDS (a->type) == TYPE_FIELDS (b->type)
6056 || (TYPE_FIELDS (a->type)
6057 && TREE_CODE (TYPE_FIELDS (a->type)) == TREE_LIST
6058 && TYPE_FIELDS (b->type)
6059 && TREE_CODE (TYPE_FIELDS (b->type)) == TREE_LIST
6060 && type_list_equal (TYPE_FIELDS (a->type),
6061 TYPE_FIELDS (b->type))));
6062
6063 case FUNCTION_TYPE:
6064 if (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6065 || (TYPE_ARG_TYPES (a->type)
6066 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6067 && TYPE_ARG_TYPES (b->type)
6068 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6069 && type_list_equal (TYPE_ARG_TYPES (a->type),
6070 TYPE_ARG_TYPES (b->type))))
6071 break;
6072 return 0;
6073
6074 default:
6075 return 0;
6076 }
6077
6078 if (lang_hooks.types.type_hash_eq != NULL)
6079 return lang_hooks.types.type_hash_eq (a->type, b->type);
6080
6081 return 1;
6082 }
6083
6084 /* Given TYPE, and HASHCODE its hash code, return the canonical
6085 object for an identical type if one already exists.
6086 Otherwise, return TYPE, and record it as the canonical object.
6087
6088 To use this function, first create a type of the sort you want.
6089 Then compute its hash code from the fields of the type that
6090 make it different from other similar types.
6091 Then call this function and use the value. */
6092
6093 tree
6094 type_hash_canon (unsigned int hashcode, tree type)
6095 {
6096 type_hash in;
6097 type_hash **loc;
6098
6099 /* The hash table only contains main variants, so ensure that's what we're
6100 being passed. */
6101 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
6102
6103 /* The TYPE_ALIGN field of a type is set by layout_type(), so we
6104 must call that routine before comparing TYPE_ALIGNs. */
6105 layout_type (type);
6106
6107 in.hash = hashcode;
6108 in.type = type;
6109
6110 loc = type_hash_table->find_slot_with_hash (&in, hashcode, INSERT);
6111 if (*loc)
6112 {
6113 tree t1 = ((type_hash *) *loc)->type;
6114 gcc_assert (TYPE_MAIN_VARIANT (t1) == t1
6115 && t1 != type);
6116 if (TYPE_UID (type) + 1 == next_type_uid)
6117 --next_type_uid;
6118 /* Free also min/max values and the cache for integer
6119 types. This can't be done in free_node, as LTO frees
6120 those on its own. */
6121 if (TREE_CODE (type) == INTEGER_TYPE)
6122 {
6123 if (TYPE_MIN_VALUE (type)
6124 && TREE_TYPE (TYPE_MIN_VALUE (type)) == type)
6125 {
6126 /* Zero is always in TYPE_CACHED_VALUES. */
6127 if (! TYPE_UNSIGNED (type))
6128 int_cst_hash_table->remove_elt (TYPE_MIN_VALUE (type));
6129 ggc_free (TYPE_MIN_VALUE (type));
6130 }
6131 if (TYPE_MAX_VALUE (type)
6132 && TREE_TYPE (TYPE_MAX_VALUE (type)) == type)
6133 {
6134 int_cst_hash_table->remove_elt (TYPE_MAX_VALUE (type));
6135 ggc_free (TYPE_MAX_VALUE (type));
6136 }
6137 if (TYPE_CACHED_VALUES_P (type))
6138 ggc_free (TYPE_CACHED_VALUES (type));
6139 }
6140 free_node (type);
6141 return t1;
6142 }
6143 else
6144 {
6145 struct type_hash *h;
6146
6147 h = ggc_alloc<type_hash> ();
6148 h->hash = hashcode;
6149 h->type = type;
6150 *loc = h;
6151
6152 return type;
6153 }
6154 }
6155
6156 static void
6157 print_type_hash_statistics (void)
6158 {
6159 fprintf (stderr, "Type hash: size %ld, %ld elements, %f collisions\n",
6160 (long) type_hash_table->size (),
6161 (long) type_hash_table->elements (),
6162 type_hash_table->collisions ());
6163 }
6164
6165 /* Given two lists of types
6166 (chains of TREE_LIST nodes with types in the TREE_VALUE slots)
6167 return 1 if the lists contain the same types in the same order.
6168 Also, the TREE_PURPOSEs must match. */
6169
6170 bool
6171 type_list_equal (const_tree l1, const_tree l2)
6172 {
6173 const_tree t1, t2;
6174
6175 for (t1 = l1, t2 = l2; t1 && t2; t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
6176 if (TREE_VALUE (t1) != TREE_VALUE (t2)
6177 || (TREE_PURPOSE (t1) != TREE_PURPOSE (t2)
6178 && ! (1 == simple_cst_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2))
6179 && (TREE_TYPE (TREE_PURPOSE (t1))
6180 == TREE_TYPE (TREE_PURPOSE (t2))))))
6181 return false;
6182
6183 return t1 == t2;
6184 }
6185
6186 /* Returns the number of arguments to the FUNCTION_TYPE or METHOD_TYPE
6187 given by TYPE. If the argument list accepts variable arguments,
6188 then this function counts only the ordinary arguments. */
6189
6190 int
6191 type_num_arguments (const_tree fntype)
6192 {
6193 int i = 0;
6194
6195 for (tree t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
6196 /* If the function does not take a variable number of arguments,
6197 the last element in the list will have type `void'. */
6198 if (VOID_TYPE_P (TREE_VALUE (t)))
6199 break;
6200 else
6201 ++i;
6202
6203 return i;
6204 }
6205
6206 /* Return the type of the function TYPE's argument ARGNO if known.
6207 For vararg function's where ARGNO refers to one of the variadic
6208 arguments return null. Otherwise, return a void_type_node for
6209 out-of-bounds ARGNO. */
6210
6211 tree
6212 type_argument_type (const_tree fntype, unsigned argno)
6213 {
6214 /* Treat zero the same as an out-of-bounds argument number. */
6215 if (!argno)
6216 return void_type_node;
6217
6218 function_args_iterator iter;
6219
6220 tree argtype;
6221 unsigned i = 1;
6222 FOREACH_FUNCTION_ARGS (fntype, argtype, iter)
6223 {
6224 /* A vararg function's argument list ends in a null. Otherwise,
6225 an ordinary function's argument list ends with void. Return
6226 null if ARGNO refers to a vararg argument, void_type_node if
6227 it's out of bounds, and the formal argument type otherwise. */
6228 if (!argtype)
6229 break;
6230
6231 if (i == argno || VOID_TYPE_P (argtype))
6232 return argtype;
6233
6234 ++i;
6235 }
6236
6237 return NULL_TREE;
6238 }
6239
6240 /* Nonzero if integer constants T1 and T2
6241 represent the same constant value. */
6242
6243 int
6244 tree_int_cst_equal (const_tree t1, const_tree t2)
6245 {
6246 if (t1 == t2)
6247 return 1;
6248
6249 if (t1 == 0 || t2 == 0)
6250 return 0;
6251
6252 STRIP_ANY_LOCATION_WRAPPER (t1);
6253 STRIP_ANY_LOCATION_WRAPPER (t2);
6254
6255 if (TREE_CODE (t1) == INTEGER_CST
6256 && TREE_CODE (t2) == INTEGER_CST
6257 && wi::to_widest (t1) == wi::to_widest (t2))
6258 return 1;
6259
6260 return 0;
6261 }
6262
6263 /* Return true if T is an INTEGER_CST whose numerical value (extended
6264 according to TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. */
6265
6266 bool
6267 tree_fits_shwi_p (const_tree t)
6268 {
6269 return (t != NULL_TREE
6270 && TREE_CODE (t) == INTEGER_CST
6271 && wi::fits_shwi_p (wi::to_widest (t)));
6272 }
6273
6274 /* Return true if T is an INTEGER_CST or POLY_INT_CST whose numerical
6275 value (extended according to TYPE_UNSIGNED) fits in a poly_int64. */
6276
6277 bool
6278 tree_fits_poly_int64_p (const_tree t)
6279 {
6280 if (t == NULL_TREE)
6281 return false;
6282 if (POLY_INT_CST_P (t))
6283 {
6284 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; i++)
6285 if (!wi::fits_shwi_p (wi::to_wide (POLY_INT_CST_COEFF (t, i))))
6286 return false;
6287 return true;
6288 }
6289 return (TREE_CODE (t) == INTEGER_CST
6290 && wi::fits_shwi_p (wi::to_widest (t)));
6291 }
6292
6293 /* Return true if T is an INTEGER_CST whose numerical value (extended
6294 according to TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. */
6295
6296 bool
6297 tree_fits_uhwi_p (const_tree t)
6298 {
6299 return (t != NULL_TREE
6300 && TREE_CODE (t) == INTEGER_CST
6301 && wi::fits_uhwi_p (wi::to_widest (t)));
6302 }
6303
6304 /* Return true if T is an INTEGER_CST or POLY_INT_CST whose numerical
6305 value (extended according to TYPE_UNSIGNED) fits in a poly_uint64. */
6306
6307 bool
6308 tree_fits_poly_uint64_p (const_tree t)
6309 {
6310 if (t == NULL_TREE)
6311 return false;
6312 if (POLY_INT_CST_P (t))
6313 {
6314 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; i++)
6315 if (!wi::fits_uhwi_p (wi::to_widest (POLY_INT_CST_COEFF (t, i))))
6316 return false;
6317 return true;
6318 }
6319 return (TREE_CODE (t) == INTEGER_CST
6320 && wi::fits_uhwi_p (wi::to_widest (t)));
6321 }
6322
6323 /* T is an INTEGER_CST whose numerical value (extended according to
6324 TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. Return that
6325 HOST_WIDE_INT. */
6326
6327 HOST_WIDE_INT
6328 tree_to_shwi (const_tree t)
6329 {
6330 gcc_assert (tree_fits_shwi_p (t));
6331 return TREE_INT_CST_LOW (t);
6332 }
6333
6334 /* T is an INTEGER_CST whose numerical value (extended according to
6335 TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. Return that
6336 HOST_WIDE_INT. */
6337
6338 unsigned HOST_WIDE_INT
6339 tree_to_uhwi (const_tree t)
6340 {
6341 gcc_assert (tree_fits_uhwi_p (t));
6342 return TREE_INT_CST_LOW (t);
6343 }
6344
6345 /* Return the most significant (sign) bit of T. */
6346
6347 int
6348 tree_int_cst_sign_bit (const_tree t)
6349 {
6350 unsigned bitno = TYPE_PRECISION (TREE_TYPE (t)) - 1;
6351
6352 return wi::extract_uhwi (wi::to_wide (t), bitno, 1);
6353 }
6354
6355 /* Return an indication of the sign of the integer constant T.
6356 The return value is -1 if T < 0, 0 if T == 0, and 1 if T > 0.
6357 Note that -1 will never be returned if T's type is unsigned. */
6358
6359 int
6360 tree_int_cst_sgn (const_tree t)
6361 {
6362 if (wi::to_wide (t) == 0)
6363 return 0;
6364 else if (TYPE_UNSIGNED (TREE_TYPE (t)))
6365 return 1;
6366 else if (wi::neg_p (wi::to_wide (t)))
6367 return -1;
6368 else
6369 return 1;
6370 }
6371
6372 /* Return the minimum number of bits needed to represent VALUE in a
6373 signed or unsigned type, UNSIGNEDP says which. */
6374
6375 unsigned int
6376 tree_int_cst_min_precision (tree value, signop sgn)
6377 {
6378 /* If the value is negative, compute its negative minus 1. The latter
6379 adjustment is because the absolute value of the largest negative value
6380 is one larger than the largest positive value. This is equivalent to
6381 a bit-wise negation, so use that operation instead. */
6382
6383 if (tree_int_cst_sgn (value) < 0)
6384 value = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (value), value);
6385
6386 /* Return the number of bits needed, taking into account the fact
6387 that we need one more bit for a signed than unsigned type.
6388 If value is 0 or -1, the minimum precision is 1 no matter
6389 whether unsignedp is true or false. */
6390
6391 if (integer_zerop (value))
6392 return 1;
6393 else
6394 return tree_floor_log2 (value) + 1 + (sgn == SIGNED ? 1 : 0) ;
6395 }
6396
6397 /* Return truthvalue of whether T1 is the same tree structure as T2.
6398 Return 1 if they are the same.
6399 Return 0 if they are understandably different.
6400 Return -1 if either contains tree structure not understood by
6401 this function. */
6402
6403 int
6404 simple_cst_equal (const_tree t1, const_tree t2)
6405 {
6406 enum tree_code code1, code2;
6407 int cmp;
6408 int i;
6409
6410 if (t1 == t2)
6411 return 1;
6412 if (t1 == 0 || t2 == 0)
6413 return 0;
6414
6415 /* For location wrappers to be the same, they must be at the same
6416 source location (and wrap the same thing). */
6417 if (location_wrapper_p (t1) && location_wrapper_p (t2))
6418 {
6419 if (EXPR_LOCATION (t1) != EXPR_LOCATION (t2))
6420 return 0;
6421 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6422 }
6423
6424 code1 = TREE_CODE (t1);
6425 code2 = TREE_CODE (t2);
6426
6427 if (CONVERT_EXPR_CODE_P (code1) || code1 == NON_LVALUE_EXPR)
6428 {
6429 if (CONVERT_EXPR_CODE_P (code2)
6430 || code2 == NON_LVALUE_EXPR)
6431 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6432 else
6433 return simple_cst_equal (TREE_OPERAND (t1, 0), t2);
6434 }
6435
6436 else if (CONVERT_EXPR_CODE_P (code2)
6437 || code2 == NON_LVALUE_EXPR)
6438 return simple_cst_equal (t1, TREE_OPERAND (t2, 0));
6439
6440 if (code1 != code2)
6441 return 0;
6442
6443 switch (code1)
6444 {
6445 case INTEGER_CST:
6446 return wi::to_widest (t1) == wi::to_widest (t2);
6447
6448 case REAL_CST:
6449 return real_identical (&TREE_REAL_CST (t1), &TREE_REAL_CST (t2));
6450
6451 case FIXED_CST:
6452 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2));
6453
6454 case STRING_CST:
6455 return (TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
6456 && ! memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
6457 TREE_STRING_LENGTH (t1)));
6458
6459 case CONSTRUCTOR:
6460 {
6461 unsigned HOST_WIDE_INT idx;
6462 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (t1);
6463 vec<constructor_elt, va_gc> *v2 = CONSTRUCTOR_ELTS (t2);
6464
6465 if (vec_safe_length (v1) != vec_safe_length (v2))
6466 return false;
6467
6468 for (idx = 0; idx < vec_safe_length (v1); ++idx)
6469 /* ??? Should we handle also fields here? */
6470 if (!simple_cst_equal ((*v1)[idx].value, (*v2)[idx].value))
6471 return false;
6472 return true;
6473 }
6474
6475 case SAVE_EXPR:
6476 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6477
6478 case CALL_EXPR:
6479 cmp = simple_cst_equal (CALL_EXPR_FN (t1), CALL_EXPR_FN (t2));
6480 if (cmp <= 0)
6481 return cmp;
6482 if (call_expr_nargs (t1) != call_expr_nargs (t2))
6483 return 0;
6484 {
6485 const_tree arg1, arg2;
6486 const_call_expr_arg_iterator iter1, iter2;
6487 for (arg1 = first_const_call_expr_arg (t1, &iter1),
6488 arg2 = first_const_call_expr_arg (t2, &iter2);
6489 arg1 && arg2;
6490 arg1 = next_const_call_expr_arg (&iter1),
6491 arg2 = next_const_call_expr_arg (&iter2))
6492 {
6493 cmp = simple_cst_equal (arg1, arg2);
6494 if (cmp <= 0)
6495 return cmp;
6496 }
6497 return arg1 == arg2;
6498 }
6499
6500 case TARGET_EXPR:
6501 /* Special case: if either target is an unallocated VAR_DECL,
6502 it means that it's going to be unified with whatever the
6503 TARGET_EXPR is really supposed to initialize, so treat it
6504 as being equivalent to anything. */
6505 if ((TREE_CODE (TREE_OPERAND (t1, 0)) == VAR_DECL
6506 && DECL_NAME (TREE_OPERAND (t1, 0)) == NULL_TREE
6507 && !DECL_RTL_SET_P (TREE_OPERAND (t1, 0)))
6508 || (TREE_CODE (TREE_OPERAND (t2, 0)) == VAR_DECL
6509 && DECL_NAME (TREE_OPERAND (t2, 0)) == NULL_TREE
6510 && !DECL_RTL_SET_P (TREE_OPERAND (t2, 0))))
6511 cmp = 1;
6512 else
6513 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6514
6515 if (cmp <= 0)
6516 return cmp;
6517
6518 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
6519
6520 case WITH_CLEANUP_EXPR:
6521 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6522 if (cmp <= 0)
6523 return cmp;
6524
6525 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t1, 1));
6526
6527 case COMPONENT_REF:
6528 if (TREE_OPERAND (t1, 1) == TREE_OPERAND (t2, 1))
6529 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6530
6531 return 0;
6532
6533 case VAR_DECL:
6534 case PARM_DECL:
6535 case CONST_DECL:
6536 case FUNCTION_DECL:
6537 return 0;
6538
6539 default:
6540 if (POLY_INT_CST_P (t1))
6541 /* A false return means maybe_ne rather than known_ne. */
6542 return known_eq (poly_widest_int::from (poly_int_cst_value (t1),
6543 TYPE_SIGN (TREE_TYPE (t1))),
6544 poly_widest_int::from (poly_int_cst_value (t2),
6545 TYPE_SIGN (TREE_TYPE (t2))));
6546 break;
6547 }
6548
6549 /* This general rule works for most tree codes. All exceptions should be
6550 handled above. If this is a language-specific tree code, we can't
6551 trust what might be in the operand, so say we don't know
6552 the situation. */
6553 if ((int) code1 >= (int) LAST_AND_UNUSED_TREE_CODE)
6554 return -1;
6555
6556 switch (TREE_CODE_CLASS (code1))
6557 {
6558 case tcc_unary:
6559 case tcc_binary:
6560 case tcc_comparison:
6561 case tcc_expression:
6562 case tcc_reference:
6563 case tcc_statement:
6564 cmp = 1;
6565 for (i = 0; i < TREE_CODE_LENGTH (code1); i++)
6566 {
6567 cmp = simple_cst_equal (TREE_OPERAND (t1, i), TREE_OPERAND (t2, i));
6568 if (cmp <= 0)
6569 return cmp;
6570 }
6571
6572 return cmp;
6573
6574 default:
6575 return -1;
6576 }
6577 }
6578
6579 /* Compare the value of T, an INTEGER_CST, with U, an unsigned integer value.
6580 Return -1, 0, or 1 if the value of T is less than, equal to, or greater
6581 than U, respectively. */
6582
6583 int
6584 compare_tree_int (const_tree t, unsigned HOST_WIDE_INT u)
6585 {
6586 if (tree_int_cst_sgn (t) < 0)
6587 return -1;
6588 else if (!tree_fits_uhwi_p (t))
6589 return 1;
6590 else if (TREE_INT_CST_LOW (t) == u)
6591 return 0;
6592 else if (TREE_INT_CST_LOW (t) < u)
6593 return -1;
6594 else
6595 return 1;
6596 }
6597
6598 /* Return true if SIZE represents a constant size that is in bounds of
6599 what the middle-end and the backend accepts (covering not more than
6600 half of the address-space).
6601 When PERR is non-null, set *PERR on failure to the description of
6602 why SIZE is not valid. */
6603
6604 bool
6605 valid_constant_size_p (const_tree size, cst_size_error *perr /* = NULL */)
6606 {
6607 if (POLY_INT_CST_P (size))
6608 {
6609 if (TREE_OVERFLOW (size))
6610 return false;
6611 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
6612 if (!valid_constant_size_p (POLY_INT_CST_COEFF (size, i)))
6613 return false;
6614 return true;
6615 }
6616
6617 cst_size_error error;
6618 if (!perr)
6619 perr = &error;
6620
6621 if (TREE_CODE (size) != INTEGER_CST)
6622 {
6623 *perr = cst_size_not_constant;
6624 return false;
6625 }
6626
6627 if (TREE_OVERFLOW_P (size))
6628 {
6629 *perr = cst_size_overflow;
6630 return false;
6631 }
6632
6633 if (tree_int_cst_sgn (size) < 0)
6634 {
6635 *perr = cst_size_negative;
6636 return false;
6637 }
6638 if (!tree_fits_uhwi_p (size)
6639 || (wi::to_widest (TYPE_MAX_VALUE (sizetype))
6640 < wi::to_widest (size) * 2))
6641 {
6642 *perr = cst_size_too_big;
6643 return false;
6644 }
6645
6646 return true;
6647 }
6648
6649 /* Return the precision of the type, or for a complex or vector type the
6650 precision of the type of its elements. */
6651
6652 unsigned int
6653 element_precision (const_tree type)
6654 {
6655 if (!TYPE_P (type))
6656 type = TREE_TYPE (type);
6657 enum tree_code code = TREE_CODE (type);
6658 if (code == COMPLEX_TYPE || code == VECTOR_TYPE)
6659 type = TREE_TYPE (type);
6660
6661 return TYPE_PRECISION (type);
6662 }
6663
6664 /* Return true if CODE represents an associative tree code. Otherwise
6665 return false. */
6666 bool
6667 associative_tree_code (enum tree_code code)
6668 {
6669 switch (code)
6670 {
6671 case BIT_IOR_EXPR:
6672 case BIT_AND_EXPR:
6673 case BIT_XOR_EXPR:
6674 case PLUS_EXPR:
6675 case MULT_EXPR:
6676 case MIN_EXPR:
6677 case MAX_EXPR:
6678 return true;
6679
6680 default:
6681 break;
6682 }
6683 return false;
6684 }
6685
6686 /* Return true if CODE represents a commutative tree code. Otherwise
6687 return false. */
6688 bool
6689 commutative_tree_code (enum tree_code code)
6690 {
6691 switch (code)
6692 {
6693 case PLUS_EXPR:
6694 case MULT_EXPR:
6695 case MULT_HIGHPART_EXPR:
6696 case MIN_EXPR:
6697 case MAX_EXPR:
6698 case BIT_IOR_EXPR:
6699 case BIT_XOR_EXPR:
6700 case BIT_AND_EXPR:
6701 case NE_EXPR:
6702 case EQ_EXPR:
6703 case UNORDERED_EXPR:
6704 case ORDERED_EXPR:
6705 case UNEQ_EXPR:
6706 case LTGT_EXPR:
6707 case TRUTH_AND_EXPR:
6708 case TRUTH_XOR_EXPR:
6709 case TRUTH_OR_EXPR:
6710 case WIDEN_MULT_EXPR:
6711 case VEC_WIDEN_MULT_HI_EXPR:
6712 case VEC_WIDEN_MULT_LO_EXPR:
6713 case VEC_WIDEN_MULT_EVEN_EXPR:
6714 case VEC_WIDEN_MULT_ODD_EXPR:
6715 return true;
6716
6717 default:
6718 break;
6719 }
6720 return false;
6721 }
6722
6723 /* Return true if CODE represents a ternary tree code for which the
6724 first two operands are commutative. Otherwise return false. */
6725 bool
6726 commutative_ternary_tree_code (enum tree_code code)
6727 {
6728 switch (code)
6729 {
6730 case WIDEN_MULT_PLUS_EXPR:
6731 case WIDEN_MULT_MINUS_EXPR:
6732 case DOT_PROD_EXPR:
6733 return true;
6734
6735 default:
6736 break;
6737 }
6738 return false;
6739 }
6740
6741 /* Returns true if CODE can overflow. */
6742
6743 bool
6744 operation_can_overflow (enum tree_code code)
6745 {
6746 switch (code)
6747 {
6748 case PLUS_EXPR:
6749 case MINUS_EXPR:
6750 case MULT_EXPR:
6751 case LSHIFT_EXPR:
6752 /* Can overflow in various ways. */
6753 return true;
6754 case TRUNC_DIV_EXPR:
6755 case EXACT_DIV_EXPR:
6756 case FLOOR_DIV_EXPR:
6757 case CEIL_DIV_EXPR:
6758 /* For INT_MIN / -1. */
6759 return true;
6760 case NEGATE_EXPR:
6761 case ABS_EXPR:
6762 /* For -INT_MIN. */
6763 return true;
6764 default:
6765 /* These operators cannot overflow. */
6766 return false;
6767 }
6768 }
6769
6770 /* Returns true if CODE operating on operands of type TYPE doesn't overflow, or
6771 ftrapv doesn't generate trapping insns for CODE. */
6772
6773 bool
6774 operation_no_trapping_overflow (tree type, enum tree_code code)
6775 {
6776 gcc_checking_assert (ANY_INTEGRAL_TYPE_P (type));
6777
6778 /* We don't generate instructions that trap on overflow for complex or vector
6779 types. */
6780 if (!INTEGRAL_TYPE_P (type))
6781 return true;
6782
6783 if (!TYPE_OVERFLOW_TRAPS (type))
6784 return true;
6785
6786 switch (code)
6787 {
6788 case PLUS_EXPR:
6789 case MINUS_EXPR:
6790 case MULT_EXPR:
6791 case NEGATE_EXPR:
6792 case ABS_EXPR:
6793 /* These operators can overflow, and -ftrapv generates trapping code for
6794 these. */
6795 return false;
6796 case TRUNC_DIV_EXPR:
6797 case EXACT_DIV_EXPR:
6798 case FLOOR_DIV_EXPR:
6799 case CEIL_DIV_EXPR:
6800 case LSHIFT_EXPR:
6801 /* These operators can overflow, but -ftrapv does not generate trapping
6802 code for these. */
6803 return true;
6804 default:
6805 /* These operators cannot overflow. */
6806 return true;
6807 }
6808 }
6809
6810 /* Constructors for pointer, array and function types.
6811 (RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are
6812 constructed by language-dependent code, not here.) */
6813
6814 /* Construct, lay out and return the type of pointers to TO_TYPE with
6815 mode MODE. If MODE is VOIDmode, a pointer mode for the address
6816 space of TO_TYPE will be picked. If CAN_ALIAS_ALL is TRUE,
6817 indicate this type can reference all of memory. If such a type has
6818 already been constructed, reuse it. */
6819
6820 tree
6821 build_pointer_type_for_mode (tree to_type, machine_mode mode,
6822 bool can_alias_all)
6823 {
6824 tree t;
6825 bool could_alias = can_alias_all;
6826
6827 if (to_type == error_mark_node)
6828 return error_mark_node;
6829
6830 if (mode == VOIDmode)
6831 {
6832 addr_space_t as = TYPE_ADDR_SPACE (to_type);
6833 mode = targetm.addr_space.pointer_mode (as);
6834 }
6835
6836 /* If the pointed-to type has the may_alias attribute set, force
6837 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
6838 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
6839 can_alias_all = true;
6840
6841 /* In some cases, languages will have things that aren't a POINTER_TYPE
6842 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_POINTER_TO.
6843 In that case, return that type without regard to the rest of our
6844 operands.
6845
6846 ??? This is a kludge, but consistent with the way this function has
6847 always operated and there doesn't seem to be a good way to avoid this
6848 at the moment. */
6849 if (TYPE_POINTER_TO (to_type) != 0
6850 && TREE_CODE (TYPE_POINTER_TO (to_type)) != POINTER_TYPE)
6851 return TYPE_POINTER_TO (to_type);
6852
6853 /* First, if we already have a type for pointers to TO_TYPE and it's
6854 the proper mode, use it. */
6855 for (t = TYPE_POINTER_TO (to_type); t; t = TYPE_NEXT_PTR_TO (t))
6856 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
6857 return t;
6858
6859 t = make_node (POINTER_TYPE);
6860
6861 TREE_TYPE (t) = to_type;
6862 SET_TYPE_MODE (t, mode);
6863 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
6864 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (to_type);
6865 TYPE_POINTER_TO (to_type) = t;
6866
6867 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */
6868 if (TYPE_STRUCTURAL_EQUALITY_P (to_type) || in_lto_p)
6869 SET_TYPE_STRUCTURAL_EQUALITY (t);
6870 else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
6871 TYPE_CANONICAL (t)
6872 = build_pointer_type_for_mode (TYPE_CANONICAL (to_type),
6873 mode, false);
6874
6875 /* Lay out the type. This function has many callers that are concerned
6876 with expression-construction, and this simplifies them all. */
6877 layout_type (t);
6878
6879 return t;
6880 }
6881
6882 /* By default build pointers in ptr_mode. */
6883
6884 tree
6885 build_pointer_type (tree to_type)
6886 {
6887 return build_pointer_type_for_mode (to_type, VOIDmode, false);
6888 }
6889
6890 /* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE. */
6891
6892 tree
6893 build_reference_type_for_mode (tree to_type, machine_mode mode,
6894 bool can_alias_all)
6895 {
6896 tree t;
6897 bool could_alias = can_alias_all;
6898
6899 if (to_type == error_mark_node)
6900 return error_mark_node;
6901
6902 if (mode == VOIDmode)
6903 {
6904 addr_space_t as = TYPE_ADDR_SPACE (to_type);
6905 mode = targetm.addr_space.pointer_mode (as);
6906 }
6907
6908 /* If the pointed-to type has the may_alias attribute set, force
6909 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
6910 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
6911 can_alias_all = true;
6912
6913 /* In some cases, languages will have things that aren't a REFERENCE_TYPE
6914 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_REFERENCE_TO.
6915 In that case, return that type without regard to the rest of our
6916 operands.
6917
6918 ??? This is a kludge, but consistent with the way this function has
6919 always operated and there doesn't seem to be a good way to avoid this
6920 at the moment. */
6921 if (TYPE_REFERENCE_TO (to_type) != 0
6922 && TREE_CODE (TYPE_REFERENCE_TO (to_type)) != REFERENCE_TYPE)
6923 return TYPE_REFERENCE_TO (to_type);
6924
6925 /* First, if we already have a type for pointers to TO_TYPE and it's
6926 the proper mode, use it. */
6927 for (t = TYPE_REFERENCE_TO (to_type); t; t = TYPE_NEXT_REF_TO (t))
6928 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
6929 return t;
6930
6931 t = make_node (REFERENCE_TYPE);
6932
6933 TREE_TYPE (t) = to_type;
6934 SET_TYPE_MODE (t, mode);
6935 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
6936 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (to_type);
6937 TYPE_REFERENCE_TO (to_type) = t;
6938
6939 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */
6940 if (TYPE_STRUCTURAL_EQUALITY_P (to_type) || in_lto_p)
6941 SET_TYPE_STRUCTURAL_EQUALITY (t);
6942 else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
6943 TYPE_CANONICAL (t)
6944 = build_reference_type_for_mode (TYPE_CANONICAL (to_type),
6945 mode, false);
6946
6947 layout_type (t);
6948
6949 return t;
6950 }
6951
6952
6953 /* Build the node for the type of references-to-TO_TYPE by default
6954 in ptr_mode. */
6955
6956 tree
6957 build_reference_type (tree to_type)
6958 {
6959 return build_reference_type_for_mode (to_type, VOIDmode, false);
6960 }
6961
6962 #define MAX_INT_CACHED_PREC \
6963 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
6964 static GTY(()) tree nonstandard_integer_type_cache[2 * MAX_INT_CACHED_PREC + 2];
6965
6966 /* Builds a signed or unsigned integer type of precision PRECISION.
6967 Used for C bitfields whose precision does not match that of
6968 built-in target types. */
6969 tree
6970 build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision,
6971 int unsignedp)
6972 {
6973 tree itype, ret;
6974
6975 if (unsignedp)
6976 unsignedp = MAX_INT_CACHED_PREC + 1;
6977
6978 if (precision <= MAX_INT_CACHED_PREC)
6979 {
6980 itype = nonstandard_integer_type_cache[precision + unsignedp];
6981 if (itype)
6982 return itype;
6983 }
6984
6985 itype = make_node (INTEGER_TYPE);
6986 TYPE_PRECISION (itype) = precision;
6987
6988 if (unsignedp)
6989 fixup_unsigned_type (itype);
6990 else
6991 fixup_signed_type (itype);
6992
6993 inchash::hash hstate;
6994 inchash::add_expr (TYPE_MAX_VALUE (itype), hstate);
6995 ret = type_hash_canon (hstate.end (), itype);
6996 if (precision <= MAX_INT_CACHED_PREC)
6997 nonstandard_integer_type_cache[precision + unsignedp] = ret;
6998
6999 return ret;
7000 }
7001
7002 #define MAX_BOOL_CACHED_PREC \
7003 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
7004 static GTY(()) tree nonstandard_boolean_type_cache[MAX_BOOL_CACHED_PREC + 1];
7005
7006 /* Builds a boolean type of precision PRECISION.
7007 Used for boolean vectors to choose proper vector element size. */
7008 tree
7009 build_nonstandard_boolean_type (unsigned HOST_WIDE_INT precision)
7010 {
7011 tree type;
7012
7013 if (precision <= MAX_BOOL_CACHED_PREC)
7014 {
7015 type = nonstandard_boolean_type_cache[precision];
7016 if (type)
7017 return type;
7018 }
7019
7020 type = make_node (BOOLEAN_TYPE);
7021 TYPE_PRECISION (type) = precision;
7022 fixup_signed_type (type);
7023
7024 if (precision <= MAX_INT_CACHED_PREC)
7025 nonstandard_boolean_type_cache[precision] = type;
7026
7027 return type;
7028 }
7029
7030 /* Create a range of some discrete type TYPE (an INTEGER_TYPE, ENUMERAL_TYPE
7031 or BOOLEAN_TYPE) with low bound LOWVAL and high bound HIGHVAL. If SHARED
7032 is true, reuse such a type that has already been constructed. */
7033
7034 static tree
7035 build_range_type_1 (tree type, tree lowval, tree highval, bool shared)
7036 {
7037 tree itype = make_node (INTEGER_TYPE);
7038
7039 TREE_TYPE (itype) = type;
7040
7041 TYPE_MIN_VALUE (itype) = fold_convert (type, lowval);
7042 TYPE_MAX_VALUE (itype) = highval ? fold_convert (type, highval) : NULL;
7043
7044 TYPE_PRECISION (itype) = TYPE_PRECISION (type);
7045 SET_TYPE_MODE (itype, TYPE_MODE (type));
7046 TYPE_SIZE (itype) = TYPE_SIZE (type);
7047 TYPE_SIZE_UNIT (itype) = TYPE_SIZE_UNIT (type);
7048 SET_TYPE_ALIGN (itype, TYPE_ALIGN (type));
7049 TYPE_USER_ALIGN (itype) = TYPE_USER_ALIGN (type);
7050 SET_TYPE_WARN_IF_NOT_ALIGN (itype, TYPE_WARN_IF_NOT_ALIGN (type));
7051
7052 if (!shared)
7053 return itype;
7054
7055 if ((TYPE_MIN_VALUE (itype)
7056 && TREE_CODE (TYPE_MIN_VALUE (itype)) != INTEGER_CST)
7057 || (TYPE_MAX_VALUE (itype)
7058 && TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST))
7059 {
7060 /* Since we cannot reliably merge this type, we need to compare it using
7061 structural equality checks. */
7062 SET_TYPE_STRUCTURAL_EQUALITY (itype);
7063 return itype;
7064 }
7065
7066 hashval_t hash = type_hash_canon_hash (itype);
7067 itype = type_hash_canon (hash, itype);
7068
7069 return itype;
7070 }
7071
7072 /* Wrapper around build_range_type_1 with SHARED set to true. */
7073
7074 tree
7075 build_range_type (tree type, tree lowval, tree highval)
7076 {
7077 return build_range_type_1 (type, lowval, highval, true);
7078 }
7079
7080 /* Wrapper around build_range_type_1 with SHARED set to false. */
7081
7082 tree
7083 build_nonshared_range_type (tree type, tree lowval, tree highval)
7084 {
7085 return build_range_type_1 (type, lowval, highval, false);
7086 }
7087
7088 /* Create a type of integers to be the TYPE_DOMAIN of an ARRAY_TYPE.
7089 MAXVAL should be the maximum value in the domain
7090 (one less than the length of the array).
7091
7092 The maximum value that MAXVAL can have is INT_MAX for a HOST_WIDE_INT.
7093 We don't enforce this limit, that is up to caller (e.g. language front end).
7094 The limit exists because the result is a signed type and we don't handle
7095 sizes that use more than one HOST_WIDE_INT. */
7096
7097 tree
7098 build_index_type (tree maxval)
7099 {
7100 return build_range_type (sizetype, size_zero_node, maxval);
7101 }
7102
7103 /* Return true if the debug information for TYPE, a subtype, should be emitted
7104 as a subrange type. If so, set LOWVAL to the low bound and HIGHVAL to the
7105 high bound, respectively. Sometimes doing so unnecessarily obfuscates the
7106 debug info and doesn't reflect the source code. */
7107
7108 bool
7109 subrange_type_for_debug_p (const_tree type, tree *lowval, tree *highval)
7110 {
7111 tree base_type = TREE_TYPE (type), low, high;
7112
7113 /* Subrange types have a base type which is an integral type. */
7114 if (!INTEGRAL_TYPE_P (base_type))
7115 return false;
7116
7117 /* Get the real bounds of the subtype. */
7118 if (lang_hooks.types.get_subrange_bounds)
7119 lang_hooks.types.get_subrange_bounds (type, &low, &high);
7120 else
7121 {
7122 low = TYPE_MIN_VALUE (type);
7123 high = TYPE_MAX_VALUE (type);
7124 }
7125
7126 /* If the type and its base type have the same representation and the same
7127 name, then the type is not a subrange but a copy of the base type. */
7128 if ((TREE_CODE (base_type) == INTEGER_TYPE
7129 || TREE_CODE (base_type) == BOOLEAN_TYPE)
7130 && int_size_in_bytes (type) == int_size_in_bytes (base_type)
7131 && tree_int_cst_equal (low, TYPE_MIN_VALUE (base_type))
7132 && tree_int_cst_equal (high, TYPE_MAX_VALUE (base_type))
7133 && TYPE_IDENTIFIER (type) == TYPE_IDENTIFIER (base_type))
7134 return false;
7135
7136 if (lowval)
7137 *lowval = low;
7138 if (highval)
7139 *highval = high;
7140 return true;
7141 }
7142
7143 /* Construct, lay out and return the type of arrays of elements with ELT_TYPE
7144 and number of elements specified by the range of values of INDEX_TYPE.
7145 If TYPELESS_STORAGE is true, TYPE_TYPELESS_STORAGE flag is set on the type.
7146 If SHARED is true, reuse such a type that has already been constructed.
7147 If SET_CANONICAL is true, compute TYPE_CANONICAL from the element type. */
7148
7149 tree
7150 build_array_type_1 (tree elt_type, tree index_type, bool typeless_storage,
7151 bool shared, bool set_canonical)
7152 {
7153 tree t;
7154
7155 if (TREE_CODE (elt_type) == FUNCTION_TYPE)
7156 {
7157 error ("arrays of functions are not meaningful");
7158 elt_type = integer_type_node;
7159 }
7160
7161 t = make_node (ARRAY_TYPE);
7162 TREE_TYPE (t) = elt_type;
7163 TYPE_DOMAIN (t) = index_type;
7164 TYPE_ADDR_SPACE (t) = TYPE_ADDR_SPACE (elt_type);
7165 TYPE_TYPELESS_STORAGE (t) = typeless_storage;
7166 layout_type (t);
7167
7168 if (shared)
7169 {
7170 hashval_t hash = type_hash_canon_hash (t);
7171 t = type_hash_canon (hash, t);
7172 }
7173
7174 if (TYPE_CANONICAL (t) == t && set_canonical)
7175 {
7176 if (TYPE_STRUCTURAL_EQUALITY_P (elt_type)
7177 || (index_type && TYPE_STRUCTURAL_EQUALITY_P (index_type))
7178 || in_lto_p)
7179 SET_TYPE_STRUCTURAL_EQUALITY (t);
7180 else if (TYPE_CANONICAL (elt_type) != elt_type
7181 || (index_type && TYPE_CANONICAL (index_type) != index_type))
7182 TYPE_CANONICAL (t)
7183 = build_array_type_1 (TYPE_CANONICAL (elt_type),
7184 index_type
7185 ? TYPE_CANONICAL (index_type) : NULL_TREE,
7186 typeless_storage, shared, set_canonical);
7187 }
7188
7189 return t;
7190 }
7191
7192 /* Wrapper around build_array_type_1 with SHARED set to true. */
7193
7194 tree
7195 build_array_type (tree elt_type, tree index_type, bool typeless_storage)
7196 {
7197 return
7198 build_array_type_1 (elt_type, index_type, typeless_storage, true, true);
7199 }
7200
7201 /* Wrapper around build_array_type_1 with SHARED set to false. */
7202
7203 tree
7204 build_nonshared_array_type (tree elt_type, tree index_type)
7205 {
7206 return build_array_type_1 (elt_type, index_type, false, false, true);
7207 }
7208
7209 /* Return a representation of ELT_TYPE[NELTS], using indices of type
7210 sizetype. */
7211
7212 tree
7213 build_array_type_nelts (tree elt_type, poly_uint64 nelts)
7214 {
7215 return build_array_type (elt_type, build_index_type (size_int (nelts - 1)));
7216 }
7217
7218 /* Recursively examines the array elements of TYPE, until a non-array
7219 element type is found. */
7220
7221 tree
7222 strip_array_types (tree type)
7223 {
7224 while (TREE_CODE (type) == ARRAY_TYPE)
7225 type = TREE_TYPE (type);
7226
7227 return type;
7228 }
7229
7230 /* Computes the canonical argument types from the argument type list
7231 ARGTYPES.
7232
7233 Upon return, *ANY_STRUCTURAL_P will be true iff either it was true
7234 on entry to this function, or if any of the ARGTYPES are
7235 structural.
7236
7237 Upon return, *ANY_NONCANONICAL_P will be true iff either it was
7238 true on entry to this function, or if any of the ARGTYPES are
7239 non-canonical.
7240
7241 Returns a canonical argument list, which may be ARGTYPES when the
7242 canonical argument list is unneeded (i.e., *ANY_STRUCTURAL_P is
7243 true) or would not differ from ARGTYPES. */
7244
7245 static tree
7246 maybe_canonicalize_argtypes (tree argtypes,
7247 bool *any_structural_p,
7248 bool *any_noncanonical_p)
7249 {
7250 tree arg;
7251 bool any_noncanonical_argtypes_p = false;
7252
7253 for (arg = argtypes; arg && !(*any_structural_p); arg = TREE_CHAIN (arg))
7254 {
7255 if (!TREE_VALUE (arg) || TREE_VALUE (arg) == error_mark_node)
7256 /* Fail gracefully by stating that the type is structural. */
7257 *any_structural_p = true;
7258 else if (TYPE_STRUCTURAL_EQUALITY_P (TREE_VALUE (arg)))
7259 *any_structural_p = true;
7260 else if (TYPE_CANONICAL (TREE_VALUE (arg)) != TREE_VALUE (arg)
7261 || TREE_PURPOSE (arg))
7262 /* If the argument has a default argument, we consider it
7263 non-canonical even though the type itself is canonical.
7264 That way, different variants of function and method types
7265 with default arguments will all point to the variant with
7266 no defaults as their canonical type. */
7267 any_noncanonical_argtypes_p = true;
7268 }
7269
7270 if (*any_structural_p)
7271 return argtypes;
7272
7273 if (any_noncanonical_argtypes_p)
7274 {
7275 /* Build the canonical list of argument types. */
7276 tree canon_argtypes = NULL_TREE;
7277 bool is_void = false;
7278
7279 for (arg = argtypes; arg; arg = TREE_CHAIN (arg))
7280 {
7281 if (arg == void_list_node)
7282 is_void = true;
7283 else
7284 canon_argtypes = tree_cons (NULL_TREE,
7285 TYPE_CANONICAL (TREE_VALUE (arg)),
7286 canon_argtypes);
7287 }
7288
7289 canon_argtypes = nreverse (canon_argtypes);
7290 if (is_void)
7291 canon_argtypes = chainon (canon_argtypes, void_list_node);
7292
7293 /* There is a non-canonical type. */
7294 *any_noncanonical_p = true;
7295 return canon_argtypes;
7296 }
7297
7298 /* The canonical argument types are the same as ARGTYPES. */
7299 return argtypes;
7300 }
7301
7302 /* Construct, lay out and return
7303 the type of functions returning type VALUE_TYPE
7304 given arguments of types ARG_TYPES.
7305 ARG_TYPES is a chain of TREE_LIST nodes whose TREE_VALUEs
7306 are data type nodes for the arguments of the function.
7307 If such a type has already been constructed, reuse it. */
7308
7309 tree
7310 build_function_type (tree value_type, tree arg_types)
7311 {
7312 tree t;
7313 inchash::hash hstate;
7314 bool any_structural_p, any_noncanonical_p;
7315 tree canon_argtypes;
7316
7317 gcc_assert (arg_types != error_mark_node);
7318
7319 if (TREE_CODE (value_type) == FUNCTION_TYPE)
7320 {
7321 error ("function return type cannot be function");
7322 value_type = integer_type_node;
7323 }
7324
7325 /* Make a node of the sort we want. */
7326 t = make_node (FUNCTION_TYPE);
7327 TREE_TYPE (t) = value_type;
7328 TYPE_ARG_TYPES (t) = arg_types;
7329
7330 /* If we already have such a type, use the old one. */
7331 hashval_t hash = type_hash_canon_hash (t);
7332 t = type_hash_canon (hash, t);
7333
7334 /* Set up the canonical type. */
7335 any_structural_p = TYPE_STRUCTURAL_EQUALITY_P (value_type);
7336 any_noncanonical_p = TYPE_CANONICAL (value_type) != value_type;
7337 canon_argtypes = maybe_canonicalize_argtypes (arg_types,
7338 &any_structural_p,
7339 &any_noncanonical_p);
7340 if (any_structural_p)
7341 SET_TYPE_STRUCTURAL_EQUALITY (t);
7342 else if (any_noncanonical_p)
7343 TYPE_CANONICAL (t) = build_function_type (TYPE_CANONICAL (value_type),
7344 canon_argtypes);
7345
7346 if (!COMPLETE_TYPE_P (t))
7347 layout_type (t);
7348 return t;
7349 }
7350
7351 /* Build a function type. The RETURN_TYPE is the type returned by the
7352 function. If VAARGS is set, no void_type_node is appended to the
7353 list. ARGP must be always be terminated be a NULL_TREE. */
7354
7355 static tree
7356 build_function_type_list_1 (bool vaargs, tree return_type, va_list argp)
7357 {
7358 tree t, args, last;
7359
7360 t = va_arg (argp, tree);
7361 for (args = NULL_TREE; t != NULL_TREE; t = va_arg (argp, tree))
7362 args = tree_cons (NULL_TREE, t, args);
7363
7364 if (vaargs)
7365 {
7366 last = args;
7367 if (args != NULL_TREE)
7368 args = nreverse (args);
7369 gcc_assert (last != void_list_node);
7370 }
7371 else if (args == NULL_TREE)
7372 args = void_list_node;
7373 else
7374 {
7375 last = args;
7376 args = nreverse (args);
7377 TREE_CHAIN (last) = void_list_node;
7378 }
7379 args = build_function_type (return_type, args);
7380
7381 return args;
7382 }
7383
7384 /* Build a function type. The RETURN_TYPE is the type returned by the
7385 function. If additional arguments are provided, they are
7386 additional argument types. The list of argument types must always
7387 be terminated by NULL_TREE. */
7388
7389 tree
7390 build_function_type_list (tree return_type, ...)
7391 {
7392 tree args;
7393 va_list p;
7394
7395 va_start (p, return_type);
7396 args = build_function_type_list_1 (false, return_type, p);
7397 va_end (p);
7398 return args;
7399 }
7400
7401 /* Build a variable argument function type. The RETURN_TYPE is the
7402 type returned by the function. If additional arguments are provided,
7403 they are additional argument types. The list of argument types must
7404 always be terminated by NULL_TREE. */
7405
7406 tree
7407 build_varargs_function_type_list (tree return_type, ...)
7408 {
7409 tree args;
7410 va_list p;
7411
7412 va_start (p, return_type);
7413 args = build_function_type_list_1 (true, return_type, p);
7414 va_end (p);
7415
7416 return args;
7417 }
7418
7419 /* Build a function type. RETURN_TYPE is the type returned by the
7420 function; VAARGS indicates whether the function takes varargs. The
7421 function takes N named arguments, the types of which are provided in
7422 ARG_TYPES. */
7423
7424 static tree
7425 build_function_type_array_1 (bool vaargs, tree return_type, int n,
7426 tree *arg_types)
7427 {
7428 int i;
7429 tree t = vaargs ? NULL_TREE : void_list_node;
7430
7431 for (i = n - 1; i >= 0; i--)
7432 t = tree_cons (NULL_TREE, arg_types[i], t);
7433
7434 return build_function_type (return_type, t);
7435 }
7436
7437 /* Build a function type. RETURN_TYPE is the type returned by the
7438 function. The function takes N named arguments, the types of which
7439 are provided in ARG_TYPES. */
7440
7441 tree
7442 build_function_type_array (tree return_type, int n, tree *arg_types)
7443 {
7444 return build_function_type_array_1 (false, return_type, n, arg_types);
7445 }
7446
7447 /* Build a variable argument function type. RETURN_TYPE is the type
7448 returned by the function. The function takes N named arguments, the
7449 types of which are provided in ARG_TYPES. */
7450
7451 tree
7452 build_varargs_function_type_array (tree return_type, int n, tree *arg_types)
7453 {
7454 return build_function_type_array_1 (true, return_type, n, arg_types);
7455 }
7456
7457 /* Build a METHOD_TYPE for a member of BASETYPE. The RETTYPE (a TYPE)
7458 and ARGTYPES (a TREE_LIST) are the return type and arguments types
7459 for the method. An implicit additional parameter (of type
7460 pointer-to-BASETYPE) is added to the ARGTYPES. */
7461
7462 tree
7463 build_method_type_directly (tree basetype,
7464 tree rettype,
7465 tree argtypes)
7466 {
7467 tree t;
7468 tree ptype;
7469 bool any_structural_p, any_noncanonical_p;
7470 tree canon_argtypes;
7471
7472 /* Make a node of the sort we want. */
7473 t = make_node (METHOD_TYPE);
7474
7475 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
7476 TREE_TYPE (t) = rettype;
7477 ptype = build_pointer_type (basetype);
7478
7479 /* The actual arglist for this function includes a "hidden" argument
7480 which is "this". Put it into the list of argument types. */
7481 argtypes = tree_cons (NULL_TREE, ptype, argtypes);
7482 TYPE_ARG_TYPES (t) = argtypes;
7483
7484 /* If we already have such a type, use the old one. */
7485 hashval_t hash = type_hash_canon_hash (t);
7486 t = type_hash_canon (hash, t);
7487
7488 /* Set up the canonical type. */
7489 any_structural_p
7490 = (TYPE_STRUCTURAL_EQUALITY_P (basetype)
7491 || TYPE_STRUCTURAL_EQUALITY_P (rettype));
7492 any_noncanonical_p
7493 = (TYPE_CANONICAL (basetype) != basetype
7494 || TYPE_CANONICAL (rettype) != rettype);
7495 canon_argtypes = maybe_canonicalize_argtypes (TREE_CHAIN (argtypes),
7496 &any_structural_p,
7497 &any_noncanonical_p);
7498 if (any_structural_p)
7499 SET_TYPE_STRUCTURAL_EQUALITY (t);
7500 else if (any_noncanonical_p)
7501 TYPE_CANONICAL (t)
7502 = build_method_type_directly (TYPE_CANONICAL (basetype),
7503 TYPE_CANONICAL (rettype),
7504 canon_argtypes);
7505 if (!COMPLETE_TYPE_P (t))
7506 layout_type (t);
7507
7508 return t;
7509 }
7510
7511 /* Construct, lay out and return the type of methods belonging to class
7512 BASETYPE and whose arguments and values are described by TYPE.
7513 If that type exists already, reuse it.
7514 TYPE must be a FUNCTION_TYPE node. */
7515
7516 tree
7517 build_method_type (tree basetype, tree type)
7518 {
7519 gcc_assert (TREE_CODE (type) == FUNCTION_TYPE);
7520
7521 return build_method_type_directly (basetype,
7522 TREE_TYPE (type),
7523 TYPE_ARG_TYPES (type));
7524 }
7525
7526 /* Construct, lay out and return the type of offsets to a value
7527 of type TYPE, within an object of type BASETYPE.
7528 If a suitable offset type exists already, reuse it. */
7529
7530 tree
7531 build_offset_type (tree basetype, tree type)
7532 {
7533 tree t;
7534
7535 /* Make a node of the sort we want. */
7536 t = make_node (OFFSET_TYPE);
7537
7538 TYPE_OFFSET_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
7539 TREE_TYPE (t) = type;
7540
7541 /* If we already have such a type, use the old one. */
7542 hashval_t hash = type_hash_canon_hash (t);
7543 t = type_hash_canon (hash, t);
7544
7545 if (!COMPLETE_TYPE_P (t))
7546 layout_type (t);
7547
7548 if (TYPE_CANONICAL (t) == t)
7549 {
7550 if (TYPE_STRUCTURAL_EQUALITY_P (basetype)
7551 || TYPE_STRUCTURAL_EQUALITY_P (type))
7552 SET_TYPE_STRUCTURAL_EQUALITY (t);
7553 else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)) != basetype
7554 || TYPE_CANONICAL (type) != type)
7555 TYPE_CANONICAL (t)
7556 = build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)),
7557 TYPE_CANONICAL (type));
7558 }
7559
7560 return t;
7561 }
7562
7563 /* Create a complex type whose components are COMPONENT_TYPE.
7564
7565 If NAMED is true, the type is given a TYPE_NAME. We do not always
7566 do so because this creates a DECL node and thus make the DECL_UIDs
7567 dependent on the type canonicalization hashtable, which is GC-ed,
7568 so the DECL_UIDs would not be stable wrt garbage collection. */
7569
7570 tree
7571 build_complex_type (tree component_type, bool named)
7572 {
7573 gcc_assert (INTEGRAL_TYPE_P (component_type)
7574 || SCALAR_FLOAT_TYPE_P (component_type)
7575 || FIXED_POINT_TYPE_P (component_type));
7576
7577 /* Make a node of the sort we want. */
7578 tree probe = make_node (COMPLEX_TYPE);
7579
7580 TREE_TYPE (probe) = TYPE_MAIN_VARIANT (component_type);
7581
7582 /* If we already have such a type, use the old one. */
7583 hashval_t hash = type_hash_canon_hash (probe);
7584 tree t = type_hash_canon (hash, probe);
7585
7586 if (t == probe)
7587 {
7588 /* We created a new type. The hash insertion will have laid
7589 out the type. We need to check the canonicalization and
7590 maybe set the name. */
7591 gcc_checking_assert (COMPLETE_TYPE_P (t)
7592 && !TYPE_NAME (t)
7593 && TYPE_CANONICAL (t) == t);
7594
7595 if (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (t)))
7596 SET_TYPE_STRUCTURAL_EQUALITY (t);
7597 else if (TYPE_CANONICAL (TREE_TYPE (t)) != TREE_TYPE (t))
7598 TYPE_CANONICAL (t)
7599 = build_complex_type (TYPE_CANONICAL (TREE_TYPE (t)), named);
7600
7601 /* We need to create a name, since complex is a fundamental type. */
7602 if (named)
7603 {
7604 const char *name = NULL;
7605
7606 if (TREE_TYPE (t) == char_type_node)
7607 name = "complex char";
7608 else if (TREE_TYPE (t) == signed_char_type_node)
7609 name = "complex signed char";
7610 else if (TREE_TYPE (t) == unsigned_char_type_node)
7611 name = "complex unsigned char";
7612 else if (TREE_TYPE (t) == short_integer_type_node)
7613 name = "complex short int";
7614 else if (TREE_TYPE (t) == short_unsigned_type_node)
7615 name = "complex short unsigned int";
7616 else if (TREE_TYPE (t) == integer_type_node)
7617 name = "complex int";
7618 else if (TREE_TYPE (t) == unsigned_type_node)
7619 name = "complex unsigned int";
7620 else if (TREE_TYPE (t) == long_integer_type_node)
7621 name = "complex long int";
7622 else if (TREE_TYPE (t) == long_unsigned_type_node)
7623 name = "complex long unsigned int";
7624 else if (TREE_TYPE (t) == long_long_integer_type_node)
7625 name = "complex long long int";
7626 else if (TREE_TYPE (t) == long_long_unsigned_type_node)
7627 name = "complex long long unsigned int";
7628
7629 if (name != NULL)
7630 TYPE_NAME (t) = build_decl (UNKNOWN_LOCATION, TYPE_DECL,
7631 get_identifier (name), t);
7632 }
7633 }
7634
7635 return build_qualified_type (t, TYPE_QUALS (component_type));
7636 }
7637
7638 /* If TYPE is a real or complex floating-point type and the target
7639 does not directly support arithmetic on TYPE then return the wider
7640 type to be used for arithmetic on TYPE. Otherwise, return
7641 NULL_TREE. */
7642
7643 tree
7644 excess_precision_type (tree type)
7645 {
7646 /* The target can give two different responses to the question of
7647 which excess precision mode it would like depending on whether we
7648 are in -fexcess-precision=standard or -fexcess-precision=fast. */
7649
7650 enum excess_precision_type requested_type
7651 = (flag_excess_precision == EXCESS_PRECISION_FAST
7652 ? EXCESS_PRECISION_TYPE_FAST
7653 : (flag_excess_precision == EXCESS_PRECISION_FLOAT16
7654 ? EXCESS_PRECISION_TYPE_FLOAT16 :EXCESS_PRECISION_TYPE_STANDARD));
7655
7656 enum flt_eval_method target_flt_eval_method
7657 = targetm.c.excess_precision (requested_type);
7658
7659 /* The target should not ask for unpredictable float evaluation (though
7660 it might advertise that implicitly the evaluation is unpredictable,
7661 but we don't care about that here, it will have been reported
7662 elsewhere). If it does ask for unpredictable evaluation, we have
7663 nothing to do here. */
7664 gcc_assert (target_flt_eval_method != FLT_EVAL_METHOD_UNPREDICTABLE);
7665
7666 /* Nothing to do. The target has asked for all types we know about
7667 to be computed with their native precision and range. */
7668 if (target_flt_eval_method == FLT_EVAL_METHOD_PROMOTE_TO_FLOAT16)
7669 return NULL_TREE;
7670
7671 /* The target will promote this type in a target-dependent way, so excess
7672 precision ought to leave it alone. */
7673 if (targetm.promoted_type (type) != NULL_TREE)
7674 return NULL_TREE;
7675
7676 machine_mode float16_type_mode = (float16_type_node
7677 ? TYPE_MODE (float16_type_node)
7678 : VOIDmode);
7679 machine_mode float_type_mode = TYPE_MODE (float_type_node);
7680 machine_mode double_type_mode = TYPE_MODE (double_type_node);
7681
7682 switch (TREE_CODE (type))
7683 {
7684 case REAL_TYPE:
7685 {
7686 machine_mode type_mode = TYPE_MODE (type);
7687 switch (target_flt_eval_method)
7688 {
7689 case FLT_EVAL_METHOD_PROMOTE_TO_FLOAT:
7690 if (type_mode == float16_type_mode)
7691 return float_type_node;
7692 break;
7693 case FLT_EVAL_METHOD_PROMOTE_TO_DOUBLE:
7694 if (type_mode == float16_type_mode
7695 || type_mode == float_type_mode)
7696 return double_type_node;
7697 break;
7698 case FLT_EVAL_METHOD_PROMOTE_TO_LONG_DOUBLE:
7699 if (type_mode == float16_type_mode
7700 || type_mode == float_type_mode
7701 || type_mode == double_type_mode)
7702 return long_double_type_node;
7703 break;
7704 default:
7705 gcc_unreachable ();
7706 }
7707 break;
7708 }
7709 case COMPLEX_TYPE:
7710 {
7711 if (TREE_CODE (TREE_TYPE (type)) != REAL_TYPE)
7712 return NULL_TREE;
7713 machine_mode type_mode = TYPE_MODE (TREE_TYPE (type));
7714 switch (target_flt_eval_method)
7715 {
7716 case FLT_EVAL_METHOD_PROMOTE_TO_FLOAT:
7717 if (type_mode == float16_type_mode)
7718 return complex_float_type_node;
7719 break;
7720 case FLT_EVAL_METHOD_PROMOTE_TO_DOUBLE:
7721 if (type_mode == float16_type_mode
7722 || type_mode == float_type_mode)
7723 return complex_double_type_node;
7724 break;
7725 case FLT_EVAL_METHOD_PROMOTE_TO_LONG_DOUBLE:
7726 if (type_mode == float16_type_mode
7727 || type_mode == float_type_mode
7728 || type_mode == double_type_mode)
7729 return complex_long_double_type_node;
7730 break;
7731 default:
7732 gcc_unreachable ();
7733 }
7734 break;
7735 }
7736 default:
7737 break;
7738 }
7739
7740 return NULL_TREE;
7741 }
7742 \f
7743 /* Return OP, stripped of any conversions to wider types as much as is safe.
7744 Converting the value back to OP's type makes a value equivalent to OP.
7745
7746 If FOR_TYPE is nonzero, we return a value which, if converted to
7747 type FOR_TYPE, would be equivalent to converting OP to type FOR_TYPE.
7748
7749 OP must have integer, real or enumeral type. Pointers are not allowed!
7750
7751 There are some cases where the obvious value we could return
7752 would regenerate to OP if converted to OP's type,
7753 but would not extend like OP to wider types.
7754 If FOR_TYPE indicates such extension is contemplated, we eschew such values.
7755 For example, if OP is (unsigned short)(signed char)-1,
7756 we avoid returning (signed char)-1 if FOR_TYPE is int,
7757 even though extending that to an unsigned short would regenerate OP,
7758 since the result of extending (signed char)-1 to (int)
7759 is different from (int) OP. */
7760
7761 tree
7762 get_unwidened (tree op, tree for_type)
7763 {
7764 /* Set UNS initially if converting OP to FOR_TYPE is a zero-extension. */
7765 tree type = TREE_TYPE (op);
7766 unsigned final_prec
7767 = TYPE_PRECISION (for_type != 0 ? for_type : type);
7768 int uns
7769 = (for_type != 0 && for_type != type
7770 && final_prec > TYPE_PRECISION (type)
7771 && TYPE_UNSIGNED (type));
7772 tree win = op;
7773
7774 while (CONVERT_EXPR_P (op))
7775 {
7776 int bitschange;
7777
7778 /* TYPE_PRECISION on vector types has different meaning
7779 (TYPE_VECTOR_SUBPARTS) and casts from vectors are view conversions,
7780 so avoid them here. */
7781 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op, 0))) == VECTOR_TYPE)
7782 break;
7783
7784 bitschange = TYPE_PRECISION (TREE_TYPE (op))
7785 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0)));
7786
7787 /* Truncations are many-one so cannot be removed.
7788 Unless we are later going to truncate down even farther. */
7789 if (bitschange < 0
7790 && final_prec > TYPE_PRECISION (TREE_TYPE (op)))
7791 break;
7792
7793 /* See what's inside this conversion. If we decide to strip it,
7794 we will set WIN. */
7795 op = TREE_OPERAND (op, 0);
7796
7797 /* If we have not stripped any zero-extensions (uns is 0),
7798 we can strip any kind of extension.
7799 If we have previously stripped a zero-extension,
7800 only zero-extensions can safely be stripped.
7801 Any extension can be stripped if the bits it would produce
7802 are all going to be discarded later by truncating to FOR_TYPE. */
7803
7804 if (bitschange > 0)
7805 {
7806 if (! uns || final_prec <= TYPE_PRECISION (TREE_TYPE (op)))
7807 win = op;
7808 /* TYPE_UNSIGNED says whether this is a zero-extension.
7809 Let's avoid computing it if it does not affect WIN
7810 and if UNS will not be needed again. */
7811 if ((uns
7812 || CONVERT_EXPR_P (op))
7813 && TYPE_UNSIGNED (TREE_TYPE (op)))
7814 {
7815 uns = 1;
7816 win = op;
7817 }
7818 }
7819 }
7820
7821 /* If we finally reach a constant see if it fits in sth smaller and
7822 in that case convert it. */
7823 if (TREE_CODE (win) == INTEGER_CST)
7824 {
7825 tree wtype = TREE_TYPE (win);
7826 unsigned prec = wi::min_precision (wi::to_wide (win), TYPE_SIGN (wtype));
7827 if (for_type)
7828 prec = MAX (prec, final_prec);
7829 if (prec < TYPE_PRECISION (wtype))
7830 {
7831 tree t = lang_hooks.types.type_for_size (prec, TYPE_UNSIGNED (wtype));
7832 if (t && TYPE_PRECISION (t) < TYPE_PRECISION (wtype))
7833 win = fold_convert (t, win);
7834 }
7835 }
7836
7837 return win;
7838 }
7839 \f
7840 /* Return OP or a simpler expression for a narrower value
7841 which can be sign-extended or zero-extended to give back OP.
7842 Store in *UNSIGNEDP_PTR either 1 if the value should be zero-extended
7843 or 0 if the value should be sign-extended. */
7844
7845 tree
7846 get_narrower (tree op, int *unsignedp_ptr)
7847 {
7848 int uns = 0;
7849 int first = 1;
7850 tree win = op;
7851 bool integral_p = INTEGRAL_TYPE_P (TREE_TYPE (op));
7852
7853 if (TREE_CODE (op) == COMPOUND_EXPR)
7854 {
7855 do
7856 op = TREE_OPERAND (op, 1);
7857 while (TREE_CODE (op) == COMPOUND_EXPR);
7858 tree ret = get_narrower (op, unsignedp_ptr);
7859 if (ret == op)
7860 return win;
7861 auto_vec <tree, 16> v;
7862 unsigned int i;
7863 for (op = win; TREE_CODE (op) == COMPOUND_EXPR;
7864 op = TREE_OPERAND (op, 1))
7865 v.safe_push (op);
7866 FOR_EACH_VEC_ELT_REVERSE (v, i, op)
7867 ret = build2_loc (EXPR_LOCATION (op), COMPOUND_EXPR,
7868 TREE_TYPE (ret), TREE_OPERAND (op, 0),
7869 ret);
7870 return ret;
7871 }
7872 while (TREE_CODE (op) == NOP_EXPR)
7873 {
7874 int bitschange
7875 = (TYPE_PRECISION (TREE_TYPE (op))
7876 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0))));
7877
7878 /* Truncations are many-one so cannot be removed. */
7879 if (bitschange < 0)
7880 break;
7881
7882 /* See what's inside this conversion. If we decide to strip it,
7883 we will set WIN. */
7884
7885 if (bitschange > 0)
7886 {
7887 op = TREE_OPERAND (op, 0);
7888 /* An extension: the outermost one can be stripped,
7889 but remember whether it is zero or sign extension. */
7890 if (first)
7891 uns = TYPE_UNSIGNED (TREE_TYPE (op));
7892 /* Otherwise, if a sign extension has been stripped,
7893 only sign extensions can now be stripped;
7894 if a zero extension has been stripped, only zero-extensions. */
7895 else if (uns != TYPE_UNSIGNED (TREE_TYPE (op)))
7896 break;
7897 first = 0;
7898 }
7899 else /* bitschange == 0 */
7900 {
7901 /* A change in nominal type can always be stripped, but we must
7902 preserve the unsignedness. */
7903 if (first)
7904 uns = TYPE_UNSIGNED (TREE_TYPE (op));
7905 first = 0;
7906 op = TREE_OPERAND (op, 0);
7907 /* Keep trying to narrow, but don't assign op to win if it
7908 would turn an integral type into something else. */
7909 if (INTEGRAL_TYPE_P (TREE_TYPE (op)) != integral_p)
7910 continue;
7911 }
7912
7913 win = op;
7914 }
7915
7916 if (TREE_CODE (op) == COMPONENT_REF
7917 /* Since type_for_size always gives an integer type. */
7918 && TREE_CODE (TREE_TYPE (op)) != REAL_TYPE
7919 && TREE_CODE (TREE_TYPE (op)) != FIXED_POINT_TYPE
7920 /* Ensure field is laid out already. */
7921 && DECL_SIZE (TREE_OPERAND (op, 1)) != 0
7922 && tree_fits_uhwi_p (DECL_SIZE (TREE_OPERAND (op, 1))))
7923 {
7924 unsigned HOST_WIDE_INT innerprec
7925 = tree_to_uhwi (DECL_SIZE (TREE_OPERAND (op, 1)));
7926 int unsignedp = (DECL_UNSIGNED (TREE_OPERAND (op, 1))
7927 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op, 1))));
7928 tree type = lang_hooks.types.type_for_size (innerprec, unsignedp);
7929
7930 /* We can get this structure field in a narrower type that fits it,
7931 but the resulting extension to its nominal type (a fullword type)
7932 must satisfy the same conditions as for other extensions.
7933
7934 Do this only for fields that are aligned (not bit-fields),
7935 because when bit-field insns will be used there is no
7936 advantage in doing this. */
7937
7938 if (innerprec < TYPE_PRECISION (TREE_TYPE (op))
7939 && ! DECL_BIT_FIELD (TREE_OPERAND (op, 1))
7940 && (first || uns == DECL_UNSIGNED (TREE_OPERAND (op, 1)))
7941 && type != 0)
7942 {
7943 if (first)
7944 uns = DECL_UNSIGNED (TREE_OPERAND (op, 1));
7945 win = fold_convert (type, op);
7946 }
7947 }
7948
7949 *unsignedp_ptr = uns;
7950 return win;
7951 }
7952 \f
7953 /* Return true if integer constant C has a value that is permissible
7954 for TYPE, an integral type. */
7955
7956 bool
7957 int_fits_type_p (const_tree c, const_tree type)
7958 {
7959 tree type_low_bound, type_high_bound;
7960 bool ok_for_low_bound, ok_for_high_bound;
7961 signop sgn_c = TYPE_SIGN (TREE_TYPE (c));
7962
7963 /* Non-standard boolean types can have arbitrary precision but various
7964 transformations assume that they can only take values 0 and +/-1. */
7965 if (TREE_CODE (type) == BOOLEAN_TYPE)
7966 return wi::fits_to_boolean_p (wi::to_wide (c), type);
7967
7968 retry:
7969 type_low_bound = TYPE_MIN_VALUE (type);
7970 type_high_bound = TYPE_MAX_VALUE (type);
7971
7972 /* If at least one bound of the type is a constant integer, we can check
7973 ourselves and maybe make a decision. If no such decision is possible, but
7974 this type is a subtype, try checking against that. Otherwise, use
7975 fits_to_tree_p, which checks against the precision.
7976
7977 Compute the status for each possibly constant bound, and return if we see
7978 one does not match. Use ok_for_xxx_bound for this purpose, assigning -1
7979 for "unknown if constant fits", 0 for "constant known *not* to fit" and 1
7980 for "constant known to fit". */
7981
7982 /* Check if c >= type_low_bound. */
7983 if (type_low_bound && TREE_CODE (type_low_bound) == INTEGER_CST)
7984 {
7985 if (tree_int_cst_lt (c, type_low_bound))
7986 return false;
7987 ok_for_low_bound = true;
7988 }
7989 else
7990 ok_for_low_bound = false;
7991
7992 /* Check if c <= type_high_bound. */
7993 if (type_high_bound && TREE_CODE (type_high_bound) == INTEGER_CST)
7994 {
7995 if (tree_int_cst_lt (type_high_bound, c))
7996 return false;
7997 ok_for_high_bound = true;
7998 }
7999 else
8000 ok_for_high_bound = false;
8001
8002 /* If the constant fits both bounds, the result is known. */
8003 if (ok_for_low_bound && ok_for_high_bound)
8004 return true;
8005
8006 /* Perform some generic filtering which may allow making a decision
8007 even if the bounds are not constant. First, negative integers
8008 never fit in unsigned types, */
8009 if (TYPE_UNSIGNED (type) && sgn_c == SIGNED && wi::neg_p (wi::to_wide (c)))
8010 return false;
8011
8012 /* Second, narrower types always fit in wider ones. */
8013 if (TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (c)))
8014 return true;
8015
8016 /* Third, unsigned integers with top bit set never fit signed types. */
8017 if (!TYPE_UNSIGNED (type) && sgn_c == UNSIGNED)
8018 {
8019 int prec = GET_MODE_PRECISION (SCALAR_INT_TYPE_MODE (TREE_TYPE (c))) - 1;
8020 if (prec < TYPE_PRECISION (TREE_TYPE (c)))
8021 {
8022 /* When a tree_cst is converted to a wide-int, the precision
8023 is taken from the type. However, if the precision of the
8024 mode underneath the type is smaller than that, it is
8025 possible that the value will not fit. The test below
8026 fails if any bit is set between the sign bit of the
8027 underlying mode and the top bit of the type. */
8028 if (wi::zext (wi::to_wide (c), prec - 1) != wi::to_wide (c))
8029 return false;
8030 }
8031 else if (wi::neg_p (wi::to_wide (c)))
8032 return false;
8033 }
8034
8035 /* If we haven't been able to decide at this point, there nothing more we
8036 can check ourselves here. Look at the base type if we have one and it
8037 has the same precision. */
8038 if (TREE_CODE (type) == INTEGER_TYPE
8039 && TREE_TYPE (type) != 0
8040 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (type)))
8041 {
8042 type = TREE_TYPE (type);
8043 goto retry;
8044 }
8045
8046 /* Or to fits_to_tree_p, if nothing else. */
8047 return wi::fits_to_tree_p (wi::to_wide (c), type);
8048 }
8049
8050 /* Stores bounds of an integer TYPE in MIN and MAX. If TYPE has non-constant
8051 bounds or is a POINTER_TYPE, the maximum and/or minimum values that can be
8052 represented (assuming two's-complement arithmetic) within the bit
8053 precision of the type are returned instead. */
8054
8055 void
8056 get_type_static_bounds (const_tree type, mpz_t min, mpz_t max)
8057 {
8058 if (!POINTER_TYPE_P (type) && TYPE_MIN_VALUE (type)
8059 && TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST)
8060 wi::to_mpz (wi::to_wide (TYPE_MIN_VALUE (type)), min, TYPE_SIGN (type));
8061 else
8062 {
8063 if (TYPE_UNSIGNED (type))
8064 mpz_set_ui (min, 0);
8065 else
8066 {
8067 wide_int mn = wi::min_value (TYPE_PRECISION (type), SIGNED);
8068 wi::to_mpz (mn, min, SIGNED);
8069 }
8070 }
8071
8072 if (!POINTER_TYPE_P (type) && TYPE_MAX_VALUE (type)
8073 && TREE_CODE (TYPE_MAX_VALUE (type)) == INTEGER_CST)
8074 wi::to_mpz (wi::to_wide (TYPE_MAX_VALUE (type)), max, TYPE_SIGN (type));
8075 else
8076 {
8077 wide_int mn = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
8078 wi::to_mpz (mn, max, TYPE_SIGN (type));
8079 }
8080 }
8081
8082 /* Return true if VAR is an automatic variable. */
8083
8084 bool
8085 auto_var_p (const_tree var)
8086 {
8087 return ((((VAR_P (var) && ! DECL_EXTERNAL (var))
8088 || TREE_CODE (var) == PARM_DECL)
8089 && ! TREE_STATIC (var))
8090 || TREE_CODE (var) == RESULT_DECL);
8091 }
8092
8093 /* Return true if VAR is an automatic variable defined in function FN. */
8094
8095 bool
8096 auto_var_in_fn_p (const_tree var, const_tree fn)
8097 {
8098 return (DECL_P (var) && DECL_CONTEXT (var) == fn
8099 && (auto_var_p (var)
8100 || TREE_CODE (var) == LABEL_DECL));
8101 }
8102
8103 /* Subprogram of following function. Called by walk_tree.
8104
8105 Return *TP if it is an automatic variable or parameter of the
8106 function passed in as DATA. */
8107
8108 static tree
8109 find_var_from_fn (tree *tp, int *walk_subtrees, void *data)
8110 {
8111 tree fn = (tree) data;
8112
8113 if (TYPE_P (*tp))
8114 *walk_subtrees = 0;
8115
8116 else if (DECL_P (*tp)
8117 && auto_var_in_fn_p (*tp, fn))
8118 return *tp;
8119
8120 return NULL_TREE;
8121 }
8122
8123 /* Returns true if T is, contains, or refers to a type with variable
8124 size. For METHOD_TYPEs and FUNCTION_TYPEs we exclude the
8125 arguments, but not the return type. If FN is nonzero, only return
8126 true if a modifier of the type or position of FN is a variable or
8127 parameter inside FN.
8128
8129 This concept is more general than that of C99 'variably modified types':
8130 in C99, a struct type is never variably modified because a VLA may not
8131 appear as a structure member. However, in GNU C code like:
8132
8133 struct S { int i[f()]; };
8134
8135 is valid, and other languages may define similar constructs. */
8136
8137 bool
8138 variably_modified_type_p (tree type, tree fn)
8139 {
8140 tree t;
8141
8142 /* Test if T is either variable (if FN is zero) or an expression containing
8143 a variable in FN. If TYPE isn't gimplified, return true also if
8144 gimplify_one_sizepos would gimplify the expression into a local
8145 variable. */
8146 #define RETURN_TRUE_IF_VAR(T) \
8147 do { tree _t = (T); \
8148 if (_t != NULL_TREE \
8149 && _t != error_mark_node \
8150 && !CONSTANT_CLASS_P (_t) \
8151 && TREE_CODE (_t) != PLACEHOLDER_EXPR \
8152 && (!fn \
8153 || (!TYPE_SIZES_GIMPLIFIED (type) \
8154 && (TREE_CODE (_t) != VAR_DECL \
8155 && !CONTAINS_PLACEHOLDER_P (_t))) \
8156 || walk_tree (&_t, find_var_from_fn, fn, NULL))) \
8157 return true; } while (0)
8158
8159 if (type == error_mark_node)
8160 return false;
8161
8162 /* If TYPE itself has variable size, it is variably modified. */
8163 RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
8164 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
8165
8166 switch (TREE_CODE (type))
8167 {
8168 case POINTER_TYPE:
8169 case REFERENCE_TYPE:
8170 case VECTOR_TYPE:
8171 /* Ada can have pointer types refering to themselves indirectly. */
8172 if (TREE_VISITED (type))
8173 return false;
8174 TREE_VISITED (type) = true;
8175 if (variably_modified_type_p (TREE_TYPE (type), fn))
8176 {
8177 TREE_VISITED (type) = false;
8178 return true;
8179 }
8180 TREE_VISITED (type) = false;
8181 break;
8182
8183 case FUNCTION_TYPE:
8184 case METHOD_TYPE:
8185 /* If TYPE is a function type, it is variably modified if the
8186 return type is variably modified. */
8187 if (variably_modified_type_p (TREE_TYPE (type), fn))
8188 return true;
8189 break;
8190
8191 case INTEGER_TYPE:
8192 case REAL_TYPE:
8193 case FIXED_POINT_TYPE:
8194 case ENUMERAL_TYPE:
8195 case BOOLEAN_TYPE:
8196 /* Scalar types are variably modified if their end points
8197 aren't constant. */
8198 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
8199 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
8200 break;
8201
8202 case RECORD_TYPE:
8203 case UNION_TYPE:
8204 case QUAL_UNION_TYPE:
8205 /* We can't see if any of the fields are variably-modified by the
8206 definition we normally use, since that would produce infinite
8207 recursion via pointers. */
8208 /* This is variably modified if some field's type is. */
8209 for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
8210 if (TREE_CODE (t) == FIELD_DECL)
8211 {
8212 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
8213 RETURN_TRUE_IF_VAR (DECL_SIZE (t));
8214 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
8215
8216 /* If the type is a qualified union, then the DECL_QUALIFIER
8217 of fields can also be an expression containing a variable. */
8218 if (TREE_CODE (type) == QUAL_UNION_TYPE)
8219 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
8220
8221 /* If the field is a qualified union, then it's only a container
8222 for what's inside so we look into it. That's necessary in LTO
8223 mode because the sizes of the field tested above have been set
8224 to PLACEHOLDER_EXPRs by free_lang_data. */
8225 if (TREE_CODE (TREE_TYPE (t)) == QUAL_UNION_TYPE
8226 && variably_modified_type_p (TREE_TYPE (t), fn))
8227 return true;
8228 }
8229 break;
8230
8231 case ARRAY_TYPE:
8232 /* Do not call ourselves to avoid infinite recursion. This is
8233 variably modified if the element type is. */
8234 RETURN_TRUE_IF_VAR (TYPE_SIZE (TREE_TYPE (type)));
8235 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (TREE_TYPE (type)));
8236 break;
8237
8238 default:
8239 break;
8240 }
8241
8242 /* The current language may have other cases to check, but in general,
8243 all other types are not variably modified. */
8244 return lang_hooks.tree_inlining.var_mod_type_p (type, fn);
8245
8246 #undef RETURN_TRUE_IF_VAR
8247 }
8248
8249 /* Given a DECL or TYPE, return the scope in which it was declared, or
8250 NULL_TREE if there is no containing scope. */
8251
8252 tree
8253 get_containing_scope (const_tree t)
8254 {
8255 return (TYPE_P (t) ? TYPE_CONTEXT (t) : DECL_CONTEXT (t));
8256 }
8257
8258 /* Returns the ultimate TRANSLATION_UNIT_DECL context of DECL or NULL. */
8259
8260 const_tree
8261 get_ultimate_context (const_tree decl)
8262 {
8263 while (decl && TREE_CODE (decl) != TRANSLATION_UNIT_DECL)
8264 {
8265 if (TREE_CODE (decl) == BLOCK)
8266 decl = BLOCK_SUPERCONTEXT (decl);
8267 else
8268 decl = get_containing_scope (decl);
8269 }
8270 return decl;
8271 }
8272
8273 /* Return the innermost context enclosing DECL that is
8274 a FUNCTION_DECL, or zero if none. */
8275
8276 tree
8277 decl_function_context (const_tree decl)
8278 {
8279 tree context;
8280
8281 if (TREE_CODE (decl) == ERROR_MARK)
8282 return 0;
8283
8284 /* C++ virtual functions use DECL_CONTEXT for the class of the vtable
8285 where we look up the function at runtime. Such functions always take
8286 a first argument of type 'pointer to real context'.
8287
8288 C++ should really be fixed to use DECL_CONTEXT for the real context,
8289 and use something else for the "virtual context". */
8290 else if (TREE_CODE (decl) == FUNCTION_DECL && DECL_VIRTUAL_P (decl))
8291 context
8292 = TYPE_MAIN_VARIANT
8293 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
8294 else
8295 context = DECL_CONTEXT (decl);
8296
8297 while (context && TREE_CODE (context) != FUNCTION_DECL)
8298 {
8299 if (TREE_CODE (context) == BLOCK)
8300 context = BLOCK_SUPERCONTEXT (context);
8301 else
8302 context = get_containing_scope (context);
8303 }
8304
8305 return context;
8306 }
8307
8308 /* Return the innermost context enclosing DECL that is
8309 a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE, or zero if none.
8310 TYPE_DECLs and FUNCTION_DECLs are transparent to this function. */
8311
8312 tree
8313 decl_type_context (const_tree decl)
8314 {
8315 tree context = DECL_CONTEXT (decl);
8316
8317 while (context)
8318 switch (TREE_CODE (context))
8319 {
8320 case NAMESPACE_DECL:
8321 case TRANSLATION_UNIT_DECL:
8322 return NULL_TREE;
8323
8324 case RECORD_TYPE:
8325 case UNION_TYPE:
8326 case QUAL_UNION_TYPE:
8327 return context;
8328
8329 case TYPE_DECL:
8330 case FUNCTION_DECL:
8331 context = DECL_CONTEXT (context);
8332 break;
8333
8334 case BLOCK:
8335 context = BLOCK_SUPERCONTEXT (context);
8336 break;
8337
8338 default:
8339 gcc_unreachable ();
8340 }
8341
8342 return NULL_TREE;
8343 }
8344
8345 /* CALL is a CALL_EXPR. Return the declaration for the function
8346 called, or NULL_TREE if the called function cannot be
8347 determined. */
8348
8349 tree
8350 get_callee_fndecl (const_tree call)
8351 {
8352 tree addr;
8353
8354 if (call == error_mark_node)
8355 return error_mark_node;
8356
8357 /* It's invalid to call this function with anything but a
8358 CALL_EXPR. */
8359 gcc_assert (TREE_CODE (call) == CALL_EXPR);
8360
8361 /* The first operand to the CALL is the address of the function
8362 called. */
8363 addr = CALL_EXPR_FN (call);
8364
8365 /* If there is no function, return early. */
8366 if (addr == NULL_TREE)
8367 return NULL_TREE;
8368
8369 STRIP_NOPS (addr);
8370
8371 /* If this is a readonly function pointer, extract its initial value. */
8372 if (DECL_P (addr) && TREE_CODE (addr) != FUNCTION_DECL
8373 && TREE_READONLY (addr) && ! TREE_THIS_VOLATILE (addr)
8374 && DECL_INITIAL (addr))
8375 addr = DECL_INITIAL (addr);
8376
8377 /* If the address is just `&f' for some function `f', then we know
8378 that `f' is being called. */
8379 if (TREE_CODE (addr) == ADDR_EXPR
8380 && TREE_CODE (TREE_OPERAND (addr, 0)) == FUNCTION_DECL)
8381 return TREE_OPERAND (addr, 0);
8382
8383 /* We couldn't figure out what was being called. */
8384 return NULL_TREE;
8385 }
8386
8387 /* If CALL_EXPR CALL calls a normal built-in function or an internal function,
8388 return the associated function code, otherwise return CFN_LAST. */
8389
8390 combined_fn
8391 get_call_combined_fn (const_tree call)
8392 {
8393 /* It's invalid to call this function with anything but a CALL_EXPR. */
8394 gcc_assert (TREE_CODE (call) == CALL_EXPR);
8395
8396 if (!CALL_EXPR_FN (call))
8397 return as_combined_fn (CALL_EXPR_IFN (call));
8398
8399 tree fndecl = get_callee_fndecl (call);
8400 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
8401 return as_combined_fn (DECL_FUNCTION_CODE (fndecl));
8402
8403 return CFN_LAST;
8404 }
8405
8406 /* Comparator of indices based on tree_node_counts. */
8407
8408 static int
8409 tree_nodes_cmp (const void *p1, const void *p2)
8410 {
8411 const unsigned *n1 = (const unsigned *)p1;
8412 const unsigned *n2 = (const unsigned *)p2;
8413
8414 return tree_node_counts[*n1] - tree_node_counts[*n2];
8415 }
8416
8417 /* Comparator of indices based on tree_code_counts. */
8418
8419 static int
8420 tree_codes_cmp (const void *p1, const void *p2)
8421 {
8422 const unsigned *n1 = (const unsigned *)p1;
8423 const unsigned *n2 = (const unsigned *)p2;
8424
8425 return tree_code_counts[*n1] - tree_code_counts[*n2];
8426 }
8427
8428 #define TREE_MEM_USAGE_SPACES 40
8429
8430 /* Print debugging information about tree nodes generated during the compile,
8431 and any language-specific information. */
8432
8433 void
8434 dump_tree_statistics (void)
8435 {
8436 if (GATHER_STATISTICS)
8437 {
8438 uint64_t total_nodes, total_bytes;
8439 fprintf (stderr, "\nKind Nodes Bytes\n");
8440 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8441 total_nodes = total_bytes = 0;
8442
8443 {
8444 auto_vec<unsigned> indices (all_kinds);
8445 for (unsigned i = 0; i < all_kinds; i++)
8446 indices.quick_push (i);
8447 indices.qsort (tree_nodes_cmp);
8448
8449 for (unsigned i = 0; i < (int) all_kinds; i++)
8450 {
8451 unsigned j = indices[i];
8452 fprintf (stderr, "%-20s %6" PRIu64 "%c %9" PRIu64 "%c\n",
8453 tree_node_kind_names[j], SIZE_AMOUNT (tree_node_counts[j]),
8454 SIZE_AMOUNT (tree_node_sizes[j]));
8455 total_nodes += tree_node_counts[j];
8456 total_bytes += tree_node_sizes[j];
8457 }
8458 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8459 fprintf (stderr, "%-20s %6" PRIu64 "%c %9" PRIu64 "%c\n", "Total",
8460 SIZE_AMOUNT (total_nodes), SIZE_AMOUNT (total_bytes));
8461 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8462 }
8463
8464 {
8465 fprintf (stderr, "Code Nodes\n");
8466 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8467
8468 auto_vec<unsigned> indices (MAX_TREE_CODES);
8469 for (unsigned i = 0; i < MAX_TREE_CODES; i++)
8470 indices.quick_push (i);
8471 indices.qsort (tree_codes_cmp);
8472
8473 for (unsigned i = 0; i < MAX_TREE_CODES; i++)
8474 {
8475 unsigned j = indices[i];
8476 fprintf (stderr, "%-32s %6" PRIu64 "%c\n",
8477 get_tree_code_name ((enum tree_code) j),
8478 SIZE_AMOUNT (tree_code_counts[j]));
8479 }
8480 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8481 fprintf (stderr, "\n");
8482 ssanames_print_statistics ();
8483 fprintf (stderr, "\n");
8484 phinodes_print_statistics ();
8485 fprintf (stderr, "\n");
8486 }
8487 }
8488 else
8489 fprintf (stderr, "(No per-node statistics)\n");
8490
8491 print_type_hash_statistics ();
8492 print_debug_expr_statistics ();
8493 print_value_expr_statistics ();
8494 lang_hooks.print_statistics ();
8495 }
8496 \f
8497 #define FILE_FUNCTION_FORMAT "_GLOBAL__%s_%s"
8498
8499 /* Generate a crc32 of the low BYTES bytes of VALUE. */
8500
8501 unsigned
8502 crc32_unsigned_n (unsigned chksum, unsigned value, unsigned bytes)
8503 {
8504 /* This relies on the raw feedback's top 4 bits being zero. */
8505 #define FEEDBACK(X) ((X) * 0x04c11db7)
8506 #define SYNDROME(X) (FEEDBACK ((X) & 1) ^ FEEDBACK ((X) & 2) \
8507 ^ FEEDBACK ((X) & 4) ^ FEEDBACK ((X) & 8))
8508 static const unsigned syndromes[16] =
8509 {
8510 SYNDROME(0x0), SYNDROME(0x1), SYNDROME(0x2), SYNDROME(0x3),
8511 SYNDROME(0x4), SYNDROME(0x5), SYNDROME(0x6), SYNDROME(0x7),
8512 SYNDROME(0x8), SYNDROME(0x9), SYNDROME(0xa), SYNDROME(0xb),
8513 SYNDROME(0xc), SYNDROME(0xd), SYNDROME(0xe), SYNDROME(0xf),
8514 };
8515 #undef FEEDBACK
8516 #undef SYNDROME
8517
8518 value <<= (32 - bytes * 8);
8519 for (unsigned ix = bytes * 2; ix--; value <<= 4)
8520 {
8521 unsigned feedback = syndromes[((value ^ chksum) >> 28) & 0xf];
8522
8523 chksum = (chksum << 4) ^ feedback;
8524 }
8525
8526 return chksum;
8527 }
8528
8529 /* Generate a crc32 of a string. */
8530
8531 unsigned
8532 crc32_string (unsigned chksum, const char *string)
8533 {
8534 do
8535 chksum = crc32_byte (chksum, *string);
8536 while (*string++);
8537 return chksum;
8538 }
8539
8540 /* P is a string that will be used in a symbol. Mask out any characters
8541 that are not valid in that context. */
8542
8543 void
8544 clean_symbol_name (char *p)
8545 {
8546 for (; *p; p++)
8547 if (! (ISALNUM (*p)
8548 #ifndef NO_DOLLAR_IN_LABEL /* this for `$'; unlikely, but... -- kr */
8549 || *p == '$'
8550 #endif
8551 #ifndef NO_DOT_IN_LABEL /* this for `.'; unlikely, but... */
8552 || *p == '.'
8553 #endif
8554 ))
8555 *p = '_';
8556 }
8557
8558 static GTY(()) unsigned anon_cnt = 0; /* Saved for PCH. */
8559
8560 /* Create a unique anonymous identifier. The identifier is still a
8561 valid assembly label. */
8562
8563 tree
8564 make_anon_name ()
8565 {
8566 const char *fmt =
8567 #if !defined (NO_DOT_IN_LABEL)
8568 "."
8569 #elif !defined (NO_DOLLAR_IN_LABEL)
8570 "$"
8571 #else
8572 "_"
8573 #endif
8574 "_anon_%d";
8575
8576 char buf[24];
8577 int len = snprintf (buf, sizeof (buf), fmt, anon_cnt++);
8578 gcc_checking_assert (len < int (sizeof (buf)));
8579
8580 tree id = get_identifier_with_length (buf, len);
8581 IDENTIFIER_ANON_P (id) = true;
8582
8583 return id;
8584 }
8585
8586 /* Generate a name for a special-purpose function.
8587 The generated name may need to be unique across the whole link.
8588 Changes to this function may also require corresponding changes to
8589 xstrdup_mask_random.
8590 TYPE is some string to identify the purpose of this function to the
8591 linker or collect2; it must start with an uppercase letter,
8592 one of:
8593 I - for constructors
8594 D - for destructors
8595 N - for C++ anonymous namespaces
8596 F - for DWARF unwind frame information. */
8597
8598 tree
8599 get_file_function_name (const char *type)
8600 {
8601 char *buf;
8602 const char *p;
8603 char *q;
8604
8605 /* If we already have a name we know to be unique, just use that. */
8606 if (first_global_object_name)
8607 p = q = ASTRDUP (first_global_object_name);
8608 /* If the target is handling the constructors/destructors, they
8609 will be local to this file and the name is only necessary for
8610 debugging purposes.
8611 We also assign sub_I and sub_D sufixes to constructors called from
8612 the global static constructors. These are always local. */
8613 else if (((type[0] == 'I' || type[0] == 'D') && targetm.have_ctors_dtors)
8614 || (startswith (type, "sub_")
8615 && (type[4] == 'I' || type[4] == 'D')))
8616 {
8617 const char *file = main_input_filename;
8618 if (! file)
8619 file = LOCATION_FILE (input_location);
8620 /* Just use the file's basename, because the full pathname
8621 might be quite long. */
8622 p = q = ASTRDUP (lbasename (file));
8623 }
8624 else
8625 {
8626 /* Otherwise, the name must be unique across the entire link.
8627 We don't have anything that we know to be unique to this translation
8628 unit, so use what we do have and throw in some randomness. */
8629 unsigned len;
8630 const char *name = weak_global_object_name;
8631 const char *file = main_input_filename;
8632
8633 if (! name)
8634 name = "";
8635 if (! file)
8636 file = LOCATION_FILE (input_location);
8637
8638 len = strlen (file);
8639 q = (char *) alloca (9 + 19 + len + 1);
8640 memcpy (q, file, len + 1);
8641
8642 snprintf (q + len, 9 + 19 + 1, "_%08X_" HOST_WIDE_INT_PRINT_HEX,
8643 crc32_string (0, name), get_random_seed (false));
8644
8645 p = q;
8646 }
8647
8648 clean_symbol_name (q);
8649 buf = (char *) alloca (sizeof (FILE_FUNCTION_FORMAT) + strlen (p)
8650 + strlen (type));
8651
8652 /* Set up the name of the file-level functions we may need.
8653 Use a global object (which is already required to be unique over
8654 the program) rather than the file name (which imposes extra
8655 constraints). */
8656 sprintf (buf, FILE_FUNCTION_FORMAT, type, p);
8657
8658 return get_identifier (buf);
8659 }
8660 \f
8661 #if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007)
8662
8663 /* Complain that the tree code of NODE does not match the expected 0
8664 terminated list of trailing codes. The trailing code list can be
8665 empty, for a more vague error message. FILE, LINE, and FUNCTION
8666 are of the caller. */
8667
8668 void
8669 tree_check_failed (const_tree node, const char *file,
8670 int line, const char *function, ...)
8671 {
8672 va_list args;
8673 const char *buffer;
8674 unsigned length = 0;
8675 enum tree_code code;
8676
8677 va_start (args, function);
8678 while ((code = (enum tree_code) va_arg (args, int)))
8679 length += 4 + strlen (get_tree_code_name (code));
8680 va_end (args);
8681 if (length)
8682 {
8683 char *tmp;
8684 va_start (args, function);
8685 length += strlen ("expected ");
8686 buffer = tmp = (char *) alloca (length);
8687 length = 0;
8688 while ((code = (enum tree_code) va_arg (args, int)))
8689 {
8690 const char *prefix = length ? " or " : "expected ";
8691
8692 strcpy (tmp + length, prefix);
8693 length += strlen (prefix);
8694 strcpy (tmp + length, get_tree_code_name (code));
8695 length += strlen (get_tree_code_name (code));
8696 }
8697 va_end (args);
8698 }
8699 else
8700 buffer = "unexpected node";
8701
8702 internal_error ("tree check: %s, have %s in %s, at %s:%d",
8703 buffer, get_tree_code_name (TREE_CODE (node)),
8704 function, trim_filename (file), line);
8705 }
8706
8707 /* Complain that the tree code of NODE does match the expected 0
8708 terminated list of trailing codes. FILE, LINE, and FUNCTION are of
8709 the caller. */
8710
8711 void
8712 tree_not_check_failed (const_tree node, const char *file,
8713 int line, const char *function, ...)
8714 {
8715 va_list args;
8716 char *buffer;
8717 unsigned length = 0;
8718 enum tree_code code;
8719
8720 va_start (args, function);
8721 while ((code = (enum tree_code) va_arg (args, int)))
8722 length += 4 + strlen (get_tree_code_name (code));
8723 va_end (args);
8724 va_start (args, function);
8725 buffer = (char *) alloca (length);
8726 length = 0;
8727 while ((code = (enum tree_code) va_arg (args, int)))
8728 {
8729 if (length)
8730 {
8731 strcpy (buffer + length, " or ");
8732 length += 4;
8733 }
8734 strcpy (buffer + length, get_tree_code_name (code));
8735 length += strlen (get_tree_code_name (code));
8736 }
8737 va_end (args);
8738
8739 internal_error ("tree check: expected none of %s, have %s in %s, at %s:%d",
8740 buffer, get_tree_code_name (TREE_CODE (node)),
8741 function, trim_filename (file), line);
8742 }
8743
8744 /* Similar to tree_check_failed, except that we check for a class of tree
8745 code, given in CL. */
8746
8747 void
8748 tree_class_check_failed (const_tree node, const enum tree_code_class cl,
8749 const char *file, int line, const char *function)
8750 {
8751 internal_error
8752 ("tree check: expected class %qs, have %qs (%s) in %s, at %s:%d",
8753 TREE_CODE_CLASS_STRING (cl),
8754 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
8755 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
8756 }
8757
8758 /* Similar to tree_check_failed, except that instead of specifying a
8759 dozen codes, use the knowledge that they're all sequential. */
8760
8761 void
8762 tree_range_check_failed (const_tree node, const char *file, int line,
8763 const char *function, enum tree_code c1,
8764 enum tree_code c2)
8765 {
8766 char *buffer;
8767 unsigned length = 0;
8768 unsigned int c;
8769
8770 for (c = c1; c <= c2; ++c)
8771 length += 4 + strlen (get_tree_code_name ((enum tree_code) c));
8772
8773 length += strlen ("expected ");
8774 buffer = (char *) alloca (length);
8775 length = 0;
8776
8777 for (c = c1; c <= c2; ++c)
8778 {
8779 const char *prefix = length ? " or " : "expected ";
8780
8781 strcpy (buffer + length, prefix);
8782 length += strlen (prefix);
8783 strcpy (buffer + length, get_tree_code_name ((enum tree_code) c));
8784 length += strlen (get_tree_code_name ((enum tree_code) c));
8785 }
8786
8787 internal_error ("tree check: %s, have %s in %s, at %s:%d",
8788 buffer, get_tree_code_name (TREE_CODE (node)),
8789 function, trim_filename (file), line);
8790 }
8791
8792
8793 /* Similar to tree_check_failed, except that we check that a tree does
8794 not have the specified code, given in CL. */
8795
8796 void
8797 tree_not_class_check_failed (const_tree node, const enum tree_code_class cl,
8798 const char *file, int line, const char *function)
8799 {
8800 internal_error
8801 ("tree check: did not expect class %qs, have %qs (%s) in %s, at %s:%d",
8802 TREE_CODE_CLASS_STRING (cl),
8803 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
8804 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
8805 }
8806
8807
8808 /* Similar to tree_check_failed but applied to OMP_CLAUSE codes. */
8809
8810 void
8811 omp_clause_check_failed (const_tree node, const char *file, int line,
8812 const char *function, enum omp_clause_code code)
8813 {
8814 internal_error ("tree check: expected %<omp_clause %s%>, have %qs "
8815 "in %s, at %s:%d",
8816 omp_clause_code_name[code],
8817 get_tree_code_name (TREE_CODE (node)),
8818 function, trim_filename (file), line);
8819 }
8820
8821
8822 /* Similar to tree_range_check_failed but applied to OMP_CLAUSE codes. */
8823
8824 void
8825 omp_clause_range_check_failed (const_tree node, const char *file, int line,
8826 const char *function, enum omp_clause_code c1,
8827 enum omp_clause_code c2)
8828 {
8829 char *buffer;
8830 unsigned length = 0;
8831 unsigned int c;
8832
8833 for (c = c1; c <= c2; ++c)
8834 length += 4 + strlen (omp_clause_code_name[c]);
8835
8836 length += strlen ("expected ");
8837 buffer = (char *) alloca (length);
8838 length = 0;
8839
8840 for (c = c1; c <= c2; ++c)
8841 {
8842 const char *prefix = length ? " or " : "expected ";
8843
8844 strcpy (buffer + length, prefix);
8845 length += strlen (prefix);
8846 strcpy (buffer + length, omp_clause_code_name[c]);
8847 length += strlen (omp_clause_code_name[c]);
8848 }
8849
8850 internal_error ("tree check: %s, have %s in %s, at %s:%d",
8851 buffer, omp_clause_code_name[TREE_CODE (node)],
8852 function, trim_filename (file), line);
8853 }
8854
8855
8856 #undef DEFTREESTRUCT
8857 #define DEFTREESTRUCT(VAL, NAME) NAME,
8858
8859 static const char *ts_enum_names[] = {
8860 #include "treestruct.def"
8861 };
8862 #undef DEFTREESTRUCT
8863
8864 #define TS_ENUM_NAME(EN) (ts_enum_names[(EN)])
8865
8866 /* Similar to tree_class_check_failed, except that we check for
8867 whether CODE contains the tree structure identified by EN. */
8868
8869 void
8870 tree_contains_struct_check_failed (const_tree node,
8871 const enum tree_node_structure_enum en,
8872 const char *file, int line,
8873 const char *function)
8874 {
8875 internal_error
8876 ("tree check: expected tree that contains %qs structure, have %qs in %s, at %s:%d",
8877 TS_ENUM_NAME (en),
8878 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
8879 }
8880
8881
8882 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
8883 (dynamically sized) vector. */
8884
8885 void
8886 tree_int_cst_elt_check_failed (int idx, int len, const char *file, int line,
8887 const char *function)
8888 {
8889 internal_error
8890 ("tree check: accessed elt %d of %<tree_int_cst%> with %d elts in %s, "
8891 "at %s:%d",
8892 idx + 1, len, function, trim_filename (file), line);
8893 }
8894
8895 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
8896 (dynamically sized) vector. */
8897
8898 void
8899 tree_vec_elt_check_failed (int idx, int len, const char *file, int line,
8900 const char *function)
8901 {
8902 internal_error
8903 ("tree check: accessed elt %d of %<tree_vec%> with %d elts in %s, at %s:%d",
8904 idx + 1, len, function, trim_filename (file), line);
8905 }
8906
8907 /* Similar to above, except that the check is for the bounds of the operand
8908 vector of an expression node EXP. */
8909
8910 void
8911 tree_operand_check_failed (int idx, const_tree exp, const char *file,
8912 int line, const char *function)
8913 {
8914 enum tree_code code = TREE_CODE (exp);
8915 internal_error
8916 ("tree check: accessed operand %d of %s with %d operands in %s, at %s:%d",
8917 idx + 1, get_tree_code_name (code), TREE_OPERAND_LENGTH (exp),
8918 function, trim_filename (file), line);
8919 }
8920
8921 /* Similar to above, except that the check is for the number of
8922 operands of an OMP_CLAUSE node. */
8923
8924 void
8925 omp_clause_operand_check_failed (int idx, const_tree t, const char *file,
8926 int line, const char *function)
8927 {
8928 internal_error
8929 ("tree check: accessed operand %d of %<omp_clause %s%> with %d operands "
8930 "in %s, at %s:%d", idx + 1, omp_clause_code_name[OMP_CLAUSE_CODE (t)],
8931 omp_clause_num_ops [OMP_CLAUSE_CODE (t)], function,
8932 trim_filename (file), line);
8933 }
8934 #endif /* ENABLE_TREE_CHECKING */
8935 \f
8936 /* Create a new vector type node holding NUNITS units of type INNERTYPE,
8937 and mapped to the machine mode MODE. Initialize its fields and build
8938 the information necessary for debugging output. */
8939
8940 static tree
8941 make_vector_type (tree innertype, poly_int64 nunits, machine_mode mode)
8942 {
8943 tree t;
8944 tree mv_innertype = TYPE_MAIN_VARIANT (innertype);
8945
8946 t = make_node (VECTOR_TYPE);
8947 TREE_TYPE (t) = mv_innertype;
8948 SET_TYPE_VECTOR_SUBPARTS (t, nunits);
8949 SET_TYPE_MODE (t, mode);
8950
8951 if (TYPE_STRUCTURAL_EQUALITY_P (mv_innertype) || in_lto_p)
8952 SET_TYPE_STRUCTURAL_EQUALITY (t);
8953 else if ((TYPE_CANONICAL (mv_innertype) != innertype
8954 || mode != VOIDmode)
8955 && !VECTOR_BOOLEAN_TYPE_P (t))
8956 TYPE_CANONICAL (t)
8957 = make_vector_type (TYPE_CANONICAL (mv_innertype), nunits, VOIDmode);
8958
8959 layout_type (t);
8960
8961 hashval_t hash = type_hash_canon_hash (t);
8962 t = type_hash_canon (hash, t);
8963
8964 /* We have built a main variant, based on the main variant of the
8965 inner type. Use it to build the variant we return. */
8966 if ((TYPE_ATTRIBUTES (innertype) || TYPE_QUALS (innertype))
8967 && TREE_TYPE (t) != innertype)
8968 return build_type_attribute_qual_variant (t,
8969 TYPE_ATTRIBUTES (innertype),
8970 TYPE_QUALS (innertype));
8971
8972 return t;
8973 }
8974
8975 static tree
8976 make_or_reuse_type (unsigned size, int unsignedp)
8977 {
8978 int i;
8979
8980 if (size == INT_TYPE_SIZE)
8981 return unsignedp ? unsigned_type_node : integer_type_node;
8982 if (size == CHAR_TYPE_SIZE)
8983 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
8984 if (size == SHORT_TYPE_SIZE)
8985 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
8986 if (size == LONG_TYPE_SIZE)
8987 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
8988 if (size == LONG_LONG_TYPE_SIZE)
8989 return (unsignedp ? long_long_unsigned_type_node
8990 : long_long_integer_type_node);
8991
8992 for (i = 0; i < NUM_INT_N_ENTS; i ++)
8993 if (size == int_n_data[i].bitsize
8994 && int_n_enabled_p[i])
8995 return (unsignedp ? int_n_trees[i].unsigned_type
8996 : int_n_trees[i].signed_type);
8997
8998 if (unsignedp)
8999 return make_unsigned_type (size);
9000 else
9001 return make_signed_type (size);
9002 }
9003
9004 /* Create or reuse a fract type by SIZE, UNSIGNEDP, and SATP. */
9005
9006 static tree
9007 make_or_reuse_fract_type (unsigned size, int unsignedp, int satp)
9008 {
9009 if (satp)
9010 {
9011 if (size == SHORT_FRACT_TYPE_SIZE)
9012 return unsignedp ? sat_unsigned_short_fract_type_node
9013 : sat_short_fract_type_node;
9014 if (size == FRACT_TYPE_SIZE)
9015 return unsignedp ? sat_unsigned_fract_type_node : sat_fract_type_node;
9016 if (size == LONG_FRACT_TYPE_SIZE)
9017 return unsignedp ? sat_unsigned_long_fract_type_node
9018 : sat_long_fract_type_node;
9019 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9020 return unsignedp ? sat_unsigned_long_long_fract_type_node
9021 : sat_long_long_fract_type_node;
9022 }
9023 else
9024 {
9025 if (size == SHORT_FRACT_TYPE_SIZE)
9026 return unsignedp ? unsigned_short_fract_type_node
9027 : short_fract_type_node;
9028 if (size == FRACT_TYPE_SIZE)
9029 return unsignedp ? unsigned_fract_type_node : fract_type_node;
9030 if (size == LONG_FRACT_TYPE_SIZE)
9031 return unsignedp ? unsigned_long_fract_type_node
9032 : long_fract_type_node;
9033 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9034 return unsignedp ? unsigned_long_long_fract_type_node
9035 : long_long_fract_type_node;
9036 }
9037
9038 return make_fract_type (size, unsignedp, satp);
9039 }
9040
9041 /* Create or reuse an accum type by SIZE, UNSIGNEDP, and SATP. */
9042
9043 static tree
9044 make_or_reuse_accum_type (unsigned size, int unsignedp, int satp)
9045 {
9046 if (satp)
9047 {
9048 if (size == SHORT_ACCUM_TYPE_SIZE)
9049 return unsignedp ? sat_unsigned_short_accum_type_node
9050 : sat_short_accum_type_node;
9051 if (size == ACCUM_TYPE_SIZE)
9052 return unsignedp ? sat_unsigned_accum_type_node : sat_accum_type_node;
9053 if (size == LONG_ACCUM_TYPE_SIZE)
9054 return unsignedp ? sat_unsigned_long_accum_type_node
9055 : sat_long_accum_type_node;
9056 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9057 return unsignedp ? sat_unsigned_long_long_accum_type_node
9058 : sat_long_long_accum_type_node;
9059 }
9060 else
9061 {
9062 if (size == SHORT_ACCUM_TYPE_SIZE)
9063 return unsignedp ? unsigned_short_accum_type_node
9064 : short_accum_type_node;
9065 if (size == ACCUM_TYPE_SIZE)
9066 return unsignedp ? unsigned_accum_type_node : accum_type_node;
9067 if (size == LONG_ACCUM_TYPE_SIZE)
9068 return unsignedp ? unsigned_long_accum_type_node
9069 : long_accum_type_node;
9070 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9071 return unsignedp ? unsigned_long_long_accum_type_node
9072 : long_long_accum_type_node;
9073 }
9074
9075 return make_accum_type (size, unsignedp, satp);
9076 }
9077
9078
9079 /* Create an atomic variant node for TYPE. This routine is called
9080 during initialization of data types to create the 5 basic atomic
9081 types. The generic build_variant_type function requires these to
9082 already be set up in order to function properly, so cannot be
9083 called from there. If ALIGN is non-zero, then ensure alignment is
9084 overridden to this value. */
9085
9086 static tree
9087 build_atomic_base (tree type, unsigned int align)
9088 {
9089 tree t;
9090
9091 /* Make sure its not already registered. */
9092 if ((t = get_qualified_type (type, TYPE_QUAL_ATOMIC)))
9093 return t;
9094
9095 t = build_variant_type_copy (type);
9096 set_type_quals (t, TYPE_QUAL_ATOMIC);
9097
9098 if (align)
9099 SET_TYPE_ALIGN (t, align);
9100
9101 return t;
9102 }
9103
9104 /* Information about the _FloatN and _FloatNx types. This must be in
9105 the same order as the corresponding TI_* enum values. */
9106 const floatn_type_info floatn_nx_types[NUM_FLOATN_NX_TYPES] =
9107 {
9108 { 16, false },
9109 { 32, false },
9110 { 64, false },
9111 { 128, false },
9112 { 32, true },
9113 { 64, true },
9114 { 128, true },
9115 };
9116
9117
9118 /* Create nodes for all integer types (and error_mark_node) using the sizes
9119 of C datatypes. SIGNED_CHAR specifies whether char is signed. */
9120
9121 void
9122 build_common_tree_nodes (bool signed_char)
9123 {
9124 int i;
9125
9126 error_mark_node = make_node (ERROR_MARK);
9127 TREE_TYPE (error_mark_node) = error_mark_node;
9128
9129 initialize_sizetypes ();
9130
9131 /* Define both `signed char' and `unsigned char'. */
9132 signed_char_type_node = make_signed_type (CHAR_TYPE_SIZE);
9133 TYPE_STRING_FLAG (signed_char_type_node) = 1;
9134 unsigned_char_type_node = make_unsigned_type (CHAR_TYPE_SIZE);
9135 TYPE_STRING_FLAG (unsigned_char_type_node) = 1;
9136
9137 /* Define `char', which is like either `signed char' or `unsigned char'
9138 but not the same as either. */
9139 char_type_node
9140 = (signed_char
9141 ? make_signed_type (CHAR_TYPE_SIZE)
9142 : make_unsigned_type (CHAR_TYPE_SIZE));
9143 TYPE_STRING_FLAG (char_type_node) = 1;
9144
9145 short_integer_type_node = make_signed_type (SHORT_TYPE_SIZE);
9146 short_unsigned_type_node = make_unsigned_type (SHORT_TYPE_SIZE);
9147 integer_type_node = make_signed_type (INT_TYPE_SIZE);
9148 unsigned_type_node = make_unsigned_type (INT_TYPE_SIZE);
9149 long_integer_type_node = make_signed_type (LONG_TYPE_SIZE);
9150 long_unsigned_type_node = make_unsigned_type (LONG_TYPE_SIZE);
9151 long_long_integer_type_node = make_signed_type (LONG_LONG_TYPE_SIZE);
9152 long_long_unsigned_type_node = make_unsigned_type (LONG_LONG_TYPE_SIZE);
9153
9154 for (i = 0; i < NUM_INT_N_ENTS; i ++)
9155 {
9156 int_n_trees[i].signed_type = make_signed_type (int_n_data[i].bitsize);
9157 int_n_trees[i].unsigned_type = make_unsigned_type (int_n_data[i].bitsize);
9158
9159 if (int_n_enabled_p[i])
9160 {
9161 integer_types[itk_intN_0 + i * 2] = int_n_trees[i].signed_type;
9162 integer_types[itk_unsigned_intN_0 + i * 2] = int_n_trees[i].unsigned_type;
9163 }
9164 }
9165
9166 /* Define a boolean type. This type only represents boolean values but
9167 may be larger than char depending on the value of BOOL_TYPE_SIZE. */
9168 boolean_type_node = make_unsigned_type (BOOL_TYPE_SIZE);
9169 TREE_SET_CODE (boolean_type_node, BOOLEAN_TYPE);
9170 TYPE_PRECISION (boolean_type_node) = 1;
9171 TYPE_MAX_VALUE (boolean_type_node) = build_int_cst (boolean_type_node, 1);
9172
9173 /* Define what type to use for size_t. */
9174 if (strcmp (SIZE_TYPE, "unsigned int") == 0)
9175 size_type_node = unsigned_type_node;
9176 else if (strcmp (SIZE_TYPE, "long unsigned int") == 0)
9177 size_type_node = long_unsigned_type_node;
9178 else if (strcmp (SIZE_TYPE, "long long unsigned int") == 0)
9179 size_type_node = long_long_unsigned_type_node;
9180 else if (strcmp (SIZE_TYPE, "short unsigned int") == 0)
9181 size_type_node = short_unsigned_type_node;
9182 else
9183 {
9184 int i;
9185
9186 size_type_node = NULL_TREE;
9187 for (i = 0; i < NUM_INT_N_ENTS; i++)
9188 if (int_n_enabled_p[i])
9189 {
9190 char name[50], altname[50];
9191 sprintf (name, "__int%d unsigned", int_n_data[i].bitsize);
9192 sprintf (altname, "__int%d__ unsigned", int_n_data[i].bitsize);
9193
9194 if (strcmp (name, SIZE_TYPE) == 0
9195 || strcmp (altname, SIZE_TYPE) == 0)
9196 {
9197 size_type_node = int_n_trees[i].unsigned_type;
9198 }
9199 }
9200 if (size_type_node == NULL_TREE)
9201 gcc_unreachable ();
9202 }
9203
9204 /* Define what type to use for ptrdiff_t. */
9205 if (strcmp (PTRDIFF_TYPE, "int") == 0)
9206 ptrdiff_type_node = integer_type_node;
9207 else if (strcmp (PTRDIFF_TYPE, "long int") == 0)
9208 ptrdiff_type_node = long_integer_type_node;
9209 else if (strcmp (PTRDIFF_TYPE, "long long int") == 0)
9210 ptrdiff_type_node = long_long_integer_type_node;
9211 else if (strcmp (PTRDIFF_TYPE, "short int") == 0)
9212 ptrdiff_type_node = short_integer_type_node;
9213 else
9214 {
9215 ptrdiff_type_node = NULL_TREE;
9216 for (int i = 0; i < NUM_INT_N_ENTS; i++)
9217 if (int_n_enabled_p[i])
9218 {
9219 char name[50], altname[50];
9220 sprintf (name, "__int%d", int_n_data[i].bitsize);
9221 sprintf (altname, "__int%d__", int_n_data[i].bitsize);
9222
9223 if (strcmp (name, PTRDIFF_TYPE) == 0
9224 || strcmp (altname, PTRDIFF_TYPE) == 0)
9225 ptrdiff_type_node = int_n_trees[i].signed_type;
9226 }
9227 if (ptrdiff_type_node == NULL_TREE)
9228 gcc_unreachable ();
9229 }
9230
9231 /* Fill in the rest of the sized types. Reuse existing type nodes
9232 when possible. */
9233 intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 0);
9234 intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 0);
9235 intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 0);
9236 intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 0);
9237 intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 0);
9238
9239 unsigned_intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 1);
9240 unsigned_intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 1);
9241 unsigned_intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 1);
9242 unsigned_intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 1);
9243 unsigned_intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 1);
9244
9245 /* Don't call build_qualified type for atomics. That routine does
9246 special processing for atomics, and until they are initialized
9247 it's better not to make that call.
9248
9249 Check to see if there is a target override for atomic types. */
9250
9251 atomicQI_type_node = build_atomic_base (unsigned_intQI_type_node,
9252 targetm.atomic_align_for_mode (QImode));
9253 atomicHI_type_node = build_atomic_base (unsigned_intHI_type_node,
9254 targetm.atomic_align_for_mode (HImode));
9255 atomicSI_type_node = build_atomic_base (unsigned_intSI_type_node,
9256 targetm.atomic_align_for_mode (SImode));
9257 atomicDI_type_node = build_atomic_base (unsigned_intDI_type_node,
9258 targetm.atomic_align_for_mode (DImode));
9259 atomicTI_type_node = build_atomic_base (unsigned_intTI_type_node,
9260 targetm.atomic_align_for_mode (TImode));
9261
9262 access_public_node = get_identifier ("public");
9263 access_protected_node = get_identifier ("protected");
9264 access_private_node = get_identifier ("private");
9265
9266 /* Define these next since types below may used them. */
9267 integer_zero_node = build_int_cst (integer_type_node, 0);
9268 integer_one_node = build_int_cst (integer_type_node, 1);
9269 integer_three_node = build_int_cst (integer_type_node, 3);
9270 integer_minus_one_node = build_int_cst (integer_type_node, -1);
9271
9272 size_zero_node = size_int (0);
9273 size_one_node = size_int (1);
9274 bitsize_zero_node = bitsize_int (0);
9275 bitsize_one_node = bitsize_int (1);
9276 bitsize_unit_node = bitsize_int (BITS_PER_UNIT);
9277
9278 boolean_false_node = TYPE_MIN_VALUE (boolean_type_node);
9279 boolean_true_node = TYPE_MAX_VALUE (boolean_type_node);
9280
9281 void_type_node = make_node (VOID_TYPE);
9282 layout_type (void_type_node);
9283
9284 /* We are not going to have real types in C with less than byte alignment,
9285 so we might as well not have any types that claim to have it. */
9286 SET_TYPE_ALIGN (void_type_node, BITS_PER_UNIT);
9287 TYPE_USER_ALIGN (void_type_node) = 0;
9288
9289 void_node = make_node (VOID_CST);
9290 TREE_TYPE (void_node) = void_type_node;
9291
9292 null_pointer_node = build_int_cst (build_pointer_type (void_type_node), 0);
9293 layout_type (TREE_TYPE (null_pointer_node));
9294
9295 ptr_type_node = build_pointer_type (void_type_node);
9296 const_ptr_type_node
9297 = build_pointer_type (build_type_variant (void_type_node, 1, 0));
9298 for (unsigned i = 0;
9299 i < sizeof (builtin_structptr_types) / sizeof (builtin_structptr_type);
9300 ++i)
9301 builtin_structptr_types[i].node = builtin_structptr_types[i].base;
9302
9303 pointer_sized_int_node = build_nonstandard_integer_type (POINTER_SIZE, 1);
9304
9305 float_type_node = make_node (REAL_TYPE);
9306 TYPE_PRECISION (float_type_node) = FLOAT_TYPE_SIZE;
9307 layout_type (float_type_node);
9308
9309 double_type_node = make_node (REAL_TYPE);
9310 TYPE_PRECISION (double_type_node) = DOUBLE_TYPE_SIZE;
9311 layout_type (double_type_node);
9312
9313 long_double_type_node = make_node (REAL_TYPE);
9314 TYPE_PRECISION (long_double_type_node) = LONG_DOUBLE_TYPE_SIZE;
9315 layout_type (long_double_type_node);
9316
9317 for (i = 0; i < NUM_FLOATN_NX_TYPES; i++)
9318 {
9319 int n = floatn_nx_types[i].n;
9320 bool extended = floatn_nx_types[i].extended;
9321 scalar_float_mode mode;
9322 if (!targetm.floatn_mode (n, extended).exists (&mode))
9323 continue;
9324 int precision = GET_MODE_PRECISION (mode);
9325 /* Work around the rs6000 KFmode having precision 113 not
9326 128. */
9327 const struct real_format *fmt = REAL_MODE_FORMAT (mode);
9328 gcc_assert (fmt->b == 2 && fmt->emin + fmt->emax == 3);
9329 int min_precision = fmt->p + ceil_log2 (fmt->emax - fmt->emin);
9330 if (!extended)
9331 gcc_assert (min_precision == n);
9332 if (precision < min_precision)
9333 precision = min_precision;
9334 FLOATN_NX_TYPE_NODE (i) = make_node (REAL_TYPE);
9335 TYPE_PRECISION (FLOATN_NX_TYPE_NODE (i)) = precision;
9336 layout_type (FLOATN_NX_TYPE_NODE (i));
9337 SET_TYPE_MODE (FLOATN_NX_TYPE_NODE (i), mode);
9338 }
9339
9340 float_ptr_type_node = build_pointer_type (float_type_node);
9341 double_ptr_type_node = build_pointer_type (double_type_node);
9342 long_double_ptr_type_node = build_pointer_type (long_double_type_node);
9343 integer_ptr_type_node = build_pointer_type (integer_type_node);
9344
9345 /* Fixed size integer types. */
9346 uint16_type_node = make_or_reuse_type (16, 1);
9347 uint32_type_node = make_or_reuse_type (32, 1);
9348 uint64_type_node = make_or_reuse_type (64, 1);
9349 if (targetm.scalar_mode_supported_p (TImode))
9350 uint128_type_node = make_or_reuse_type (128, 1);
9351
9352 /* Decimal float types. */
9353 if (targetm.decimal_float_supported_p ())
9354 {
9355 dfloat32_type_node = make_node (REAL_TYPE);
9356 TYPE_PRECISION (dfloat32_type_node) = DECIMAL32_TYPE_SIZE;
9357 SET_TYPE_MODE (dfloat32_type_node, SDmode);
9358 layout_type (dfloat32_type_node);
9359
9360 dfloat64_type_node = make_node (REAL_TYPE);
9361 TYPE_PRECISION (dfloat64_type_node) = DECIMAL64_TYPE_SIZE;
9362 SET_TYPE_MODE (dfloat64_type_node, DDmode);
9363 layout_type (dfloat64_type_node);
9364
9365 dfloat128_type_node = make_node (REAL_TYPE);
9366 TYPE_PRECISION (dfloat128_type_node) = DECIMAL128_TYPE_SIZE;
9367 SET_TYPE_MODE (dfloat128_type_node, TDmode);
9368 layout_type (dfloat128_type_node);
9369 }
9370
9371 complex_integer_type_node = build_complex_type (integer_type_node, true);
9372 complex_float_type_node = build_complex_type (float_type_node, true);
9373 complex_double_type_node = build_complex_type (double_type_node, true);
9374 complex_long_double_type_node = build_complex_type (long_double_type_node,
9375 true);
9376
9377 for (i = 0; i < NUM_FLOATN_NX_TYPES; i++)
9378 {
9379 if (FLOATN_NX_TYPE_NODE (i) != NULL_TREE)
9380 COMPLEX_FLOATN_NX_TYPE_NODE (i)
9381 = build_complex_type (FLOATN_NX_TYPE_NODE (i));
9382 }
9383
9384 /* Make fixed-point nodes based on sat/non-sat and signed/unsigned. */
9385 #define MAKE_FIXED_TYPE_NODE(KIND,SIZE) \
9386 sat_ ## KIND ## _type_node = \
9387 make_sat_signed_ ## KIND ## _type (SIZE); \
9388 sat_unsigned_ ## KIND ## _type_node = \
9389 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9390 KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9391 unsigned_ ## KIND ## _type_node = \
9392 make_unsigned_ ## KIND ## _type (SIZE);
9393
9394 #define MAKE_FIXED_TYPE_NODE_WIDTH(KIND,WIDTH,SIZE) \
9395 sat_ ## WIDTH ## KIND ## _type_node = \
9396 make_sat_signed_ ## KIND ## _type (SIZE); \
9397 sat_unsigned_ ## WIDTH ## KIND ## _type_node = \
9398 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9399 WIDTH ## KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9400 unsigned_ ## WIDTH ## KIND ## _type_node = \
9401 make_unsigned_ ## KIND ## _type (SIZE);
9402
9403 /* Make fixed-point type nodes based on four different widths. */
9404 #define MAKE_FIXED_TYPE_NODE_FAMILY(N1,N2) \
9405 MAKE_FIXED_TYPE_NODE_WIDTH (N1, short_, SHORT_ ## N2 ## _TYPE_SIZE) \
9406 MAKE_FIXED_TYPE_NODE (N1, N2 ## _TYPE_SIZE) \
9407 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_, LONG_ ## N2 ## _TYPE_SIZE) \
9408 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_long_, LONG_LONG_ ## N2 ## _TYPE_SIZE)
9409
9410 /* Make fixed-point mode nodes based on sat/non-sat and signed/unsigned. */
9411 #define MAKE_FIXED_MODE_NODE(KIND,NAME,MODE) \
9412 NAME ## _type_node = \
9413 make_or_reuse_signed_ ## KIND ## _type (GET_MODE_BITSIZE (MODE ## mode)); \
9414 u ## NAME ## _type_node = \
9415 make_or_reuse_unsigned_ ## KIND ## _type \
9416 (GET_MODE_BITSIZE (U ## MODE ## mode)); \
9417 sat_ ## NAME ## _type_node = \
9418 make_or_reuse_sat_signed_ ## KIND ## _type \
9419 (GET_MODE_BITSIZE (MODE ## mode)); \
9420 sat_u ## NAME ## _type_node = \
9421 make_or_reuse_sat_unsigned_ ## KIND ## _type \
9422 (GET_MODE_BITSIZE (U ## MODE ## mode));
9423
9424 /* Fixed-point type and mode nodes. */
9425 MAKE_FIXED_TYPE_NODE_FAMILY (fract, FRACT)
9426 MAKE_FIXED_TYPE_NODE_FAMILY (accum, ACCUM)
9427 MAKE_FIXED_MODE_NODE (fract, qq, QQ)
9428 MAKE_FIXED_MODE_NODE (fract, hq, HQ)
9429 MAKE_FIXED_MODE_NODE (fract, sq, SQ)
9430 MAKE_FIXED_MODE_NODE (fract, dq, DQ)
9431 MAKE_FIXED_MODE_NODE (fract, tq, TQ)
9432 MAKE_FIXED_MODE_NODE (accum, ha, HA)
9433 MAKE_FIXED_MODE_NODE (accum, sa, SA)
9434 MAKE_FIXED_MODE_NODE (accum, da, DA)
9435 MAKE_FIXED_MODE_NODE (accum, ta, TA)
9436
9437 {
9438 tree t = targetm.build_builtin_va_list ();
9439
9440 /* Many back-ends define record types without setting TYPE_NAME.
9441 If we copied the record type here, we'd keep the original
9442 record type without a name. This breaks name mangling. So,
9443 don't copy record types and let c_common_nodes_and_builtins()
9444 declare the type to be __builtin_va_list. */
9445 if (TREE_CODE (t) != RECORD_TYPE)
9446 t = build_variant_type_copy (t);
9447
9448 va_list_type_node = t;
9449 }
9450
9451 /* SCEV analyzer global shared trees. */
9452 chrec_dont_know = make_node (SCEV_NOT_KNOWN);
9453 TREE_TYPE (chrec_dont_know) = void_type_node;
9454 chrec_known = make_node (SCEV_KNOWN);
9455 TREE_TYPE (chrec_known) = void_type_node;
9456 }
9457
9458 /* Modify DECL for given flags.
9459 TM_PURE attribute is set only on types, so the function will modify
9460 DECL's type when ECF_TM_PURE is used. */
9461
9462 void
9463 set_call_expr_flags (tree decl, int flags)
9464 {
9465 if (flags & ECF_NOTHROW)
9466 TREE_NOTHROW (decl) = 1;
9467 if (flags & ECF_CONST)
9468 TREE_READONLY (decl) = 1;
9469 if (flags & ECF_PURE)
9470 DECL_PURE_P (decl) = 1;
9471 if (flags & ECF_LOOPING_CONST_OR_PURE)
9472 DECL_LOOPING_CONST_OR_PURE_P (decl) = 1;
9473 if (flags & ECF_NOVOPS)
9474 DECL_IS_NOVOPS (decl) = 1;
9475 if (flags & ECF_NORETURN)
9476 TREE_THIS_VOLATILE (decl) = 1;
9477 if (flags & ECF_MALLOC)
9478 DECL_IS_MALLOC (decl) = 1;
9479 if (flags & ECF_RETURNS_TWICE)
9480 DECL_IS_RETURNS_TWICE (decl) = 1;
9481 if (flags & ECF_LEAF)
9482 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("leaf"),
9483 NULL, DECL_ATTRIBUTES (decl));
9484 if (flags & ECF_COLD)
9485 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("cold"),
9486 NULL, DECL_ATTRIBUTES (decl));
9487 if (flags & ECF_RET1)
9488 DECL_ATTRIBUTES (decl)
9489 = tree_cons (get_identifier ("fn spec"),
9490 build_tree_list (NULL_TREE, build_string (2, "1 ")),
9491 DECL_ATTRIBUTES (decl));
9492 if ((flags & ECF_TM_PURE) && flag_tm)
9493 apply_tm_attr (decl, get_identifier ("transaction_pure"));
9494 /* Looping const or pure is implied by noreturn.
9495 There is currently no way to declare looping const or looping pure alone. */
9496 gcc_assert (!(flags & ECF_LOOPING_CONST_OR_PURE)
9497 || ((flags & ECF_NORETURN) && (flags & (ECF_CONST | ECF_PURE))));
9498 }
9499
9500
9501 /* A subroutine of build_common_builtin_nodes. Define a builtin function. */
9502
9503 static void
9504 local_define_builtin (const char *name, tree type, enum built_in_function code,
9505 const char *library_name, int ecf_flags)
9506 {
9507 tree decl;
9508
9509 decl = add_builtin_function (name, type, code, BUILT_IN_NORMAL,
9510 library_name, NULL_TREE);
9511 set_call_expr_flags (decl, ecf_flags);
9512
9513 set_builtin_decl (code, decl, true);
9514 }
9515
9516 /* Call this function after instantiating all builtins that the language
9517 front end cares about. This will build the rest of the builtins
9518 and internal functions that are relied upon by the tree optimizers and
9519 the middle-end. */
9520
9521 void
9522 build_common_builtin_nodes (void)
9523 {
9524 tree tmp, ftype;
9525 int ecf_flags;
9526
9527 if (!builtin_decl_explicit_p (BUILT_IN_CLEAR_PADDING))
9528 {
9529 ftype = build_function_type_list (void_type_node,
9530 ptr_type_node,
9531 ptr_type_node,
9532 integer_type_node,
9533 NULL_TREE);
9534 local_define_builtin ("__builtin_clear_padding", ftype,
9535 BUILT_IN_CLEAR_PADDING,
9536 "__builtin_clear_padding",
9537 ECF_LEAF | ECF_NOTHROW);
9538 }
9539
9540 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE)
9541 || !builtin_decl_explicit_p (BUILT_IN_ABORT))
9542 {
9543 ftype = build_function_type (void_type_node, void_list_node);
9544 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE))
9545 local_define_builtin ("__builtin_unreachable", ftype,
9546 BUILT_IN_UNREACHABLE,
9547 "__builtin_unreachable",
9548 ECF_NOTHROW | ECF_LEAF | ECF_NORETURN
9549 | ECF_CONST | ECF_COLD);
9550 if (!builtin_decl_explicit_p (BUILT_IN_ABORT))
9551 local_define_builtin ("__builtin_abort", ftype, BUILT_IN_ABORT,
9552 "abort",
9553 ECF_LEAF | ECF_NORETURN | ECF_CONST | ECF_COLD);
9554 }
9555
9556 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY)
9557 || !builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
9558 {
9559 ftype = build_function_type_list (ptr_type_node,
9560 ptr_type_node, const_ptr_type_node,
9561 size_type_node, NULL_TREE);
9562
9563 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY))
9564 local_define_builtin ("__builtin_memcpy", ftype, BUILT_IN_MEMCPY,
9565 "memcpy", ECF_NOTHROW | ECF_LEAF);
9566 if (!builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
9567 local_define_builtin ("__builtin_memmove", ftype, BUILT_IN_MEMMOVE,
9568 "memmove", ECF_NOTHROW | ECF_LEAF);
9569 }
9570
9571 if (!builtin_decl_explicit_p (BUILT_IN_MEMCMP))
9572 {
9573 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
9574 const_ptr_type_node, size_type_node,
9575 NULL_TREE);
9576 local_define_builtin ("__builtin_memcmp", ftype, BUILT_IN_MEMCMP,
9577 "memcmp", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9578 }
9579
9580 if (!builtin_decl_explicit_p (BUILT_IN_MEMSET))
9581 {
9582 ftype = build_function_type_list (ptr_type_node,
9583 ptr_type_node, integer_type_node,
9584 size_type_node, NULL_TREE);
9585 local_define_builtin ("__builtin_memset", ftype, BUILT_IN_MEMSET,
9586 "memset", ECF_NOTHROW | ECF_LEAF);
9587 }
9588
9589 /* If we're checking the stack, `alloca' can throw. */
9590 const int alloca_flags
9591 = ECF_MALLOC | ECF_LEAF | (flag_stack_check ? 0 : ECF_NOTHROW);
9592
9593 if (!builtin_decl_explicit_p (BUILT_IN_ALLOCA))
9594 {
9595 ftype = build_function_type_list (ptr_type_node,
9596 size_type_node, NULL_TREE);
9597 local_define_builtin ("__builtin_alloca", ftype, BUILT_IN_ALLOCA,
9598 "alloca", alloca_flags);
9599 }
9600
9601 ftype = build_function_type_list (ptr_type_node, size_type_node,
9602 size_type_node, NULL_TREE);
9603 local_define_builtin ("__builtin_alloca_with_align", ftype,
9604 BUILT_IN_ALLOCA_WITH_ALIGN,
9605 "__builtin_alloca_with_align",
9606 alloca_flags);
9607
9608 ftype = build_function_type_list (ptr_type_node, size_type_node,
9609 size_type_node, size_type_node, NULL_TREE);
9610 local_define_builtin ("__builtin_alloca_with_align_and_max", ftype,
9611 BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX,
9612 "__builtin_alloca_with_align_and_max",
9613 alloca_flags);
9614
9615 ftype = build_function_type_list (void_type_node,
9616 ptr_type_node, ptr_type_node,
9617 ptr_type_node, NULL_TREE);
9618 local_define_builtin ("__builtin_init_trampoline", ftype,
9619 BUILT_IN_INIT_TRAMPOLINE,
9620 "__builtin_init_trampoline", ECF_NOTHROW | ECF_LEAF);
9621 local_define_builtin ("__builtin_init_heap_trampoline", ftype,
9622 BUILT_IN_INIT_HEAP_TRAMPOLINE,
9623 "__builtin_init_heap_trampoline",
9624 ECF_NOTHROW | ECF_LEAF);
9625 local_define_builtin ("__builtin_init_descriptor", ftype,
9626 BUILT_IN_INIT_DESCRIPTOR,
9627 "__builtin_init_descriptor", ECF_NOTHROW | ECF_LEAF);
9628
9629 ftype = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE);
9630 local_define_builtin ("__builtin_adjust_trampoline", ftype,
9631 BUILT_IN_ADJUST_TRAMPOLINE,
9632 "__builtin_adjust_trampoline",
9633 ECF_CONST | ECF_NOTHROW);
9634 local_define_builtin ("__builtin_adjust_descriptor", ftype,
9635 BUILT_IN_ADJUST_DESCRIPTOR,
9636 "__builtin_adjust_descriptor",
9637 ECF_CONST | ECF_NOTHROW);
9638
9639 ftype = build_function_type_list (void_type_node,
9640 ptr_type_node, ptr_type_node, NULL_TREE);
9641 if (!builtin_decl_explicit_p (BUILT_IN_CLEAR_CACHE))
9642 local_define_builtin ("__builtin___clear_cache", ftype,
9643 BUILT_IN_CLEAR_CACHE,
9644 "__clear_cache",
9645 ECF_NOTHROW);
9646
9647 local_define_builtin ("__builtin_nonlocal_goto", ftype,
9648 BUILT_IN_NONLOCAL_GOTO,
9649 "__builtin_nonlocal_goto",
9650 ECF_NORETURN | ECF_NOTHROW);
9651
9652 ftype = build_function_type_list (void_type_node,
9653 ptr_type_node, ptr_type_node, NULL_TREE);
9654 local_define_builtin ("__builtin_setjmp_setup", ftype,
9655 BUILT_IN_SETJMP_SETUP,
9656 "__builtin_setjmp_setup", ECF_NOTHROW);
9657
9658 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
9659 local_define_builtin ("__builtin_setjmp_receiver", ftype,
9660 BUILT_IN_SETJMP_RECEIVER,
9661 "__builtin_setjmp_receiver", ECF_NOTHROW | ECF_LEAF);
9662
9663 ftype = build_function_type_list (ptr_type_node, NULL_TREE);
9664 local_define_builtin ("__builtin_stack_save", ftype, BUILT_IN_STACK_SAVE,
9665 "__builtin_stack_save", ECF_NOTHROW | ECF_LEAF);
9666
9667 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
9668 local_define_builtin ("__builtin_stack_restore", ftype,
9669 BUILT_IN_STACK_RESTORE,
9670 "__builtin_stack_restore", ECF_NOTHROW | ECF_LEAF);
9671
9672 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
9673 const_ptr_type_node, size_type_node,
9674 NULL_TREE);
9675 local_define_builtin ("__builtin_memcmp_eq", ftype, BUILT_IN_MEMCMP_EQ,
9676 "__builtin_memcmp_eq",
9677 ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9678
9679 local_define_builtin ("__builtin_strncmp_eq", ftype, BUILT_IN_STRNCMP_EQ,
9680 "__builtin_strncmp_eq",
9681 ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9682
9683 local_define_builtin ("__builtin_strcmp_eq", ftype, BUILT_IN_STRCMP_EQ,
9684 "__builtin_strcmp_eq",
9685 ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9686
9687 /* If there's a possibility that we might use the ARM EABI, build the
9688 alternate __cxa_end_cleanup node used to resume from C++. */
9689 if (targetm.arm_eabi_unwinder)
9690 {
9691 ftype = build_function_type_list (void_type_node, NULL_TREE);
9692 local_define_builtin ("__builtin_cxa_end_cleanup", ftype,
9693 BUILT_IN_CXA_END_CLEANUP,
9694 "__cxa_end_cleanup", ECF_NORETURN | ECF_LEAF);
9695 }
9696
9697 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
9698 local_define_builtin ("__builtin_unwind_resume", ftype,
9699 BUILT_IN_UNWIND_RESUME,
9700 ((targetm_common.except_unwind_info (&global_options)
9701 == UI_SJLJ)
9702 ? "_Unwind_SjLj_Resume" : "_Unwind_Resume"),
9703 ECF_NORETURN);
9704
9705 if (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS) == NULL_TREE)
9706 {
9707 ftype = build_function_type_list (ptr_type_node, integer_type_node,
9708 NULL_TREE);
9709 local_define_builtin ("__builtin_return_address", ftype,
9710 BUILT_IN_RETURN_ADDRESS,
9711 "__builtin_return_address",
9712 ECF_NOTHROW);
9713 }
9714
9715 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER)
9716 || !builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
9717 {
9718 ftype = build_function_type_list (void_type_node, ptr_type_node,
9719 ptr_type_node, NULL_TREE);
9720 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER))
9721 local_define_builtin ("__cyg_profile_func_enter", ftype,
9722 BUILT_IN_PROFILE_FUNC_ENTER,
9723 "__cyg_profile_func_enter", 0);
9724 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
9725 local_define_builtin ("__cyg_profile_func_exit", ftype,
9726 BUILT_IN_PROFILE_FUNC_EXIT,
9727 "__cyg_profile_func_exit", 0);
9728 }
9729
9730 /* The exception object and filter values from the runtime. The argument
9731 must be zero before exception lowering, i.e. from the front end. After
9732 exception lowering, it will be the region number for the exception
9733 landing pad. These functions are PURE instead of CONST to prevent
9734 them from being hoisted past the exception edge that will initialize
9735 its value in the landing pad. */
9736 ftype = build_function_type_list (ptr_type_node,
9737 integer_type_node, NULL_TREE);
9738 ecf_flags = ECF_PURE | ECF_NOTHROW | ECF_LEAF;
9739 /* Only use TM_PURE if we have TM language support. */
9740 if (builtin_decl_explicit_p (BUILT_IN_TM_LOAD_1))
9741 ecf_flags |= ECF_TM_PURE;
9742 local_define_builtin ("__builtin_eh_pointer", ftype, BUILT_IN_EH_POINTER,
9743 "__builtin_eh_pointer", ecf_flags);
9744
9745 tmp = lang_hooks.types.type_for_mode (targetm.eh_return_filter_mode (), 0);
9746 ftype = build_function_type_list (tmp, integer_type_node, NULL_TREE);
9747 local_define_builtin ("__builtin_eh_filter", ftype, BUILT_IN_EH_FILTER,
9748 "__builtin_eh_filter", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9749
9750 ftype = build_function_type_list (void_type_node,
9751 integer_type_node, integer_type_node,
9752 NULL_TREE);
9753 local_define_builtin ("__builtin_eh_copy_values", ftype,
9754 BUILT_IN_EH_COPY_VALUES,
9755 "__builtin_eh_copy_values", ECF_NOTHROW);
9756
9757 /* Complex multiplication and division. These are handled as builtins
9758 rather than optabs because emit_library_call_value doesn't support
9759 complex. Further, we can do slightly better with folding these
9760 beasties if the real and complex parts of the arguments are separate. */
9761 {
9762 int mode;
9763
9764 for (mode = MIN_MODE_COMPLEX_FLOAT; mode <= MAX_MODE_COMPLEX_FLOAT; ++mode)
9765 {
9766 char mode_name_buf[4], *q;
9767 const char *p;
9768 enum built_in_function mcode, dcode;
9769 tree type, inner_type;
9770 const char *prefix = "__";
9771
9772 if (targetm.libfunc_gnu_prefix)
9773 prefix = "__gnu_";
9774
9775 type = lang_hooks.types.type_for_mode ((machine_mode) mode, 0);
9776 if (type == NULL)
9777 continue;
9778 inner_type = TREE_TYPE (type);
9779
9780 ftype = build_function_type_list (type, inner_type, inner_type,
9781 inner_type, inner_type, NULL_TREE);
9782
9783 mcode = ((enum built_in_function)
9784 (BUILT_IN_COMPLEX_MUL_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
9785 dcode = ((enum built_in_function)
9786 (BUILT_IN_COMPLEX_DIV_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
9787
9788 for (p = GET_MODE_NAME (mode), q = mode_name_buf; *p; p++, q++)
9789 *q = TOLOWER (*p);
9790 *q = '\0';
9791
9792 /* For -ftrapping-math these should throw from a former
9793 -fnon-call-exception stmt. */
9794 built_in_names[mcode] = concat (prefix, "mul", mode_name_buf, "3",
9795 NULL);
9796 local_define_builtin (built_in_names[mcode], ftype, mcode,
9797 built_in_names[mcode],
9798 ECF_CONST | ECF_LEAF);
9799
9800 built_in_names[dcode] = concat (prefix, "div", mode_name_buf, "3",
9801 NULL);
9802 local_define_builtin (built_in_names[dcode], ftype, dcode,
9803 built_in_names[dcode],
9804 ECF_CONST | ECF_LEAF);
9805 }
9806 }
9807
9808 init_internal_fns ();
9809 }
9810
9811 /* HACK. GROSS. This is absolutely disgusting. I wish there was a
9812 better way.
9813
9814 If we requested a pointer to a vector, build up the pointers that
9815 we stripped off while looking for the inner type. Similarly for
9816 return values from functions.
9817
9818 The argument TYPE is the top of the chain, and BOTTOM is the
9819 new type which we will point to. */
9820
9821 tree
9822 reconstruct_complex_type (tree type, tree bottom)
9823 {
9824 tree inner, outer;
9825
9826 if (TREE_CODE (type) == POINTER_TYPE)
9827 {
9828 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
9829 outer = build_pointer_type_for_mode (inner, TYPE_MODE (type),
9830 TYPE_REF_CAN_ALIAS_ALL (type));
9831 }
9832 else if (TREE_CODE (type) == REFERENCE_TYPE)
9833 {
9834 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
9835 outer = build_reference_type_for_mode (inner, TYPE_MODE (type),
9836 TYPE_REF_CAN_ALIAS_ALL (type));
9837 }
9838 else if (TREE_CODE (type) == ARRAY_TYPE)
9839 {
9840 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
9841 outer = build_array_type (inner, TYPE_DOMAIN (type));
9842 }
9843 else if (TREE_CODE (type) == FUNCTION_TYPE)
9844 {
9845 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
9846 outer = build_function_type (inner, TYPE_ARG_TYPES (type));
9847 }
9848 else if (TREE_CODE (type) == METHOD_TYPE)
9849 {
9850 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
9851 /* The build_method_type_directly() routine prepends 'this' to argument list,
9852 so we must compensate by getting rid of it. */
9853 outer
9854 = build_method_type_directly
9855 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (type))),
9856 inner,
9857 TREE_CHAIN (TYPE_ARG_TYPES (type)));
9858 }
9859 else if (TREE_CODE (type) == OFFSET_TYPE)
9860 {
9861 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
9862 outer = build_offset_type (TYPE_OFFSET_BASETYPE (type), inner);
9863 }
9864 else
9865 return bottom;
9866
9867 return build_type_attribute_qual_variant (outer, TYPE_ATTRIBUTES (type),
9868 TYPE_QUALS (type));
9869 }
9870
9871 /* Returns a vector tree node given a mode (integer, vector, or BLKmode) and
9872 the inner type. */
9873 tree
9874 build_vector_type_for_mode (tree innertype, machine_mode mode)
9875 {
9876 poly_int64 nunits;
9877 unsigned int bitsize;
9878
9879 switch (GET_MODE_CLASS (mode))
9880 {
9881 case MODE_VECTOR_BOOL:
9882 case MODE_VECTOR_INT:
9883 case MODE_VECTOR_FLOAT:
9884 case MODE_VECTOR_FRACT:
9885 case MODE_VECTOR_UFRACT:
9886 case MODE_VECTOR_ACCUM:
9887 case MODE_VECTOR_UACCUM:
9888 nunits = GET_MODE_NUNITS (mode);
9889 break;
9890
9891 case MODE_INT:
9892 /* Check that there are no leftover bits. */
9893 bitsize = GET_MODE_BITSIZE (as_a <scalar_int_mode> (mode));
9894 gcc_assert (bitsize % TREE_INT_CST_LOW (TYPE_SIZE (innertype)) == 0);
9895 nunits = bitsize / TREE_INT_CST_LOW (TYPE_SIZE (innertype));
9896 break;
9897
9898 default:
9899 gcc_unreachable ();
9900 }
9901
9902 return make_vector_type (innertype, nunits, mode);
9903 }
9904
9905 /* Similarly, but takes the inner type and number of units, which must be
9906 a power of two. */
9907
9908 tree
9909 build_vector_type (tree innertype, poly_int64 nunits)
9910 {
9911 return make_vector_type (innertype, nunits, VOIDmode);
9912 }
9913
9914 /* Build a truth vector with NUNITS units, giving it mode MASK_MODE. */
9915
9916 tree
9917 build_truth_vector_type_for_mode (poly_uint64 nunits, machine_mode mask_mode)
9918 {
9919 gcc_assert (mask_mode != BLKmode);
9920
9921 unsigned HOST_WIDE_INT esize;
9922 if (VECTOR_MODE_P (mask_mode))
9923 {
9924 poly_uint64 vsize = GET_MODE_BITSIZE (mask_mode);
9925 esize = vector_element_size (vsize, nunits);
9926 }
9927 else
9928 esize = 1;
9929
9930 tree bool_type = build_nonstandard_boolean_type (esize);
9931
9932 return make_vector_type (bool_type, nunits, mask_mode);
9933 }
9934
9935 /* Build a vector type that holds one boolean result for each element of
9936 vector type VECTYPE. The public interface for this operation is
9937 truth_type_for. */
9938
9939 static tree
9940 build_truth_vector_type_for (tree vectype)
9941 {
9942 machine_mode vector_mode = TYPE_MODE (vectype);
9943 poly_uint64 nunits = TYPE_VECTOR_SUBPARTS (vectype);
9944
9945 machine_mode mask_mode;
9946 if (VECTOR_MODE_P (vector_mode)
9947 && targetm.vectorize.get_mask_mode (vector_mode).exists (&mask_mode))
9948 return build_truth_vector_type_for_mode (nunits, mask_mode);
9949
9950 poly_uint64 vsize = tree_to_poly_uint64 (TYPE_SIZE (vectype));
9951 unsigned HOST_WIDE_INT esize = vector_element_size (vsize, nunits);
9952 tree bool_type = build_nonstandard_boolean_type (esize);
9953
9954 return make_vector_type (bool_type, nunits, VOIDmode);
9955 }
9956
9957 /* Like build_vector_type, but builds a variant type with TYPE_VECTOR_OPAQUE
9958 set. */
9959
9960 tree
9961 build_opaque_vector_type (tree innertype, poly_int64 nunits)
9962 {
9963 tree t = make_vector_type (innertype, nunits, VOIDmode);
9964 tree cand;
9965 /* We always build the non-opaque variant before the opaque one,
9966 so if it already exists, it is TYPE_NEXT_VARIANT of this one. */
9967 cand = TYPE_NEXT_VARIANT (t);
9968 if (cand
9969 && TYPE_VECTOR_OPAQUE (cand)
9970 && check_qualified_type (cand, t, TYPE_QUALS (t)))
9971 return cand;
9972 /* Othewise build a variant type and make sure to queue it after
9973 the non-opaque type. */
9974 cand = build_distinct_type_copy (t);
9975 TYPE_VECTOR_OPAQUE (cand) = true;
9976 TYPE_CANONICAL (cand) = TYPE_CANONICAL (t);
9977 TYPE_NEXT_VARIANT (cand) = TYPE_NEXT_VARIANT (t);
9978 TYPE_NEXT_VARIANT (t) = cand;
9979 TYPE_MAIN_VARIANT (cand) = TYPE_MAIN_VARIANT (t);
9980 return cand;
9981 }
9982
9983 /* Return the value of element I of VECTOR_CST T as a wide_int. */
9984
9985 static poly_wide_int
9986 vector_cst_int_elt (const_tree t, unsigned int i)
9987 {
9988 /* First handle elements that are directly encoded. */
9989 unsigned int encoded_nelts = vector_cst_encoded_nelts (t);
9990 if (i < encoded_nelts)
9991 return wi::to_poly_wide (VECTOR_CST_ENCODED_ELT (t, i));
9992
9993 /* Identify the pattern that contains element I and work out the index of
9994 the last encoded element for that pattern. */
9995 unsigned int npatterns = VECTOR_CST_NPATTERNS (t);
9996 unsigned int pattern = i % npatterns;
9997 unsigned int count = i / npatterns;
9998 unsigned int final_i = encoded_nelts - npatterns + pattern;
9999
10000 /* If there are no steps, the final encoded value is the right one. */
10001 if (!VECTOR_CST_STEPPED_P (t))
10002 return wi::to_poly_wide (VECTOR_CST_ENCODED_ELT (t, final_i));
10003
10004 /* Otherwise work out the value from the last two encoded elements. */
10005 tree v1 = VECTOR_CST_ENCODED_ELT (t, final_i - npatterns);
10006 tree v2 = VECTOR_CST_ENCODED_ELT (t, final_i);
10007 poly_wide_int diff = wi::to_poly_wide (v2) - wi::to_poly_wide (v1);
10008 return wi::to_poly_wide (v2) + (count - 2) * diff;
10009 }
10010
10011 /* Return the value of element I of VECTOR_CST T. */
10012
10013 tree
10014 vector_cst_elt (const_tree t, unsigned int i)
10015 {
10016 /* First handle elements that are directly encoded. */
10017 unsigned int encoded_nelts = vector_cst_encoded_nelts (t);
10018 if (i < encoded_nelts)
10019 return VECTOR_CST_ENCODED_ELT (t, i);
10020
10021 /* If there are no steps, the final encoded value is the right one. */
10022 if (!VECTOR_CST_STEPPED_P (t))
10023 {
10024 /* Identify the pattern that contains element I and work out the index of
10025 the last encoded element for that pattern. */
10026 unsigned int npatterns = VECTOR_CST_NPATTERNS (t);
10027 unsigned int pattern = i % npatterns;
10028 unsigned int final_i = encoded_nelts - npatterns + pattern;
10029 return VECTOR_CST_ENCODED_ELT (t, final_i);
10030 }
10031
10032 /* Otherwise work out the value from the last two encoded elements. */
10033 return wide_int_to_tree (TREE_TYPE (TREE_TYPE (t)),
10034 vector_cst_int_elt (t, i));
10035 }
10036
10037 /* Given an initializer INIT, return TRUE if INIT is zero or some
10038 aggregate of zeros. Otherwise return FALSE. If NONZERO is not
10039 null, set *NONZERO if and only if INIT is known not to be all
10040 zeros. The combination of return value of false and *NONZERO
10041 false implies that INIT may but need not be all zeros. Other
10042 combinations indicate definitive answers. */
10043
10044 bool
10045 initializer_zerop (const_tree init, bool *nonzero /* = NULL */)
10046 {
10047 bool dummy;
10048 if (!nonzero)
10049 nonzero = &dummy;
10050
10051 /* Conservatively clear NONZERO and set it only if INIT is definitely
10052 not all zero. */
10053 *nonzero = false;
10054
10055 STRIP_NOPS (init);
10056
10057 unsigned HOST_WIDE_INT off = 0;
10058
10059 switch (TREE_CODE (init))
10060 {
10061 case INTEGER_CST:
10062 if (integer_zerop (init))
10063 return true;
10064
10065 *nonzero = true;
10066 return false;
10067
10068 case REAL_CST:
10069 /* ??? Note that this is not correct for C4X float formats. There,
10070 a bit pattern of all zeros is 1.0; 0.0 is encoded with the most
10071 negative exponent. */
10072 if (real_zerop (init)
10073 && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (init)))
10074 return true;
10075
10076 *nonzero = true;
10077 return false;
10078
10079 case FIXED_CST:
10080 if (fixed_zerop (init))
10081 return true;
10082
10083 *nonzero = true;
10084 return false;
10085
10086 case COMPLEX_CST:
10087 if (integer_zerop (init)
10088 || (real_zerop (init)
10089 && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_REALPART (init)))
10090 && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_IMAGPART (init)))))
10091 return true;
10092
10093 *nonzero = true;
10094 return false;
10095
10096 case VECTOR_CST:
10097 if (VECTOR_CST_NPATTERNS (init) == 1
10098 && VECTOR_CST_DUPLICATE_P (init)
10099 && initializer_zerop (VECTOR_CST_ENCODED_ELT (init, 0)))
10100 return true;
10101
10102 *nonzero = true;
10103 return false;
10104
10105 case CONSTRUCTOR:
10106 {
10107 if (TREE_CLOBBER_P (init))
10108 return false;
10109
10110 unsigned HOST_WIDE_INT idx;
10111 tree elt;
10112
10113 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init), idx, elt)
10114 if (!initializer_zerop (elt, nonzero))
10115 return false;
10116
10117 return true;
10118 }
10119
10120 case MEM_REF:
10121 {
10122 tree arg = TREE_OPERAND (init, 0);
10123 if (TREE_CODE (arg) != ADDR_EXPR)
10124 return false;
10125 tree offset = TREE_OPERAND (init, 1);
10126 if (TREE_CODE (offset) != INTEGER_CST
10127 || !tree_fits_uhwi_p (offset))
10128 return false;
10129 off = tree_to_uhwi (offset);
10130 if (INT_MAX < off)
10131 return false;
10132 arg = TREE_OPERAND (arg, 0);
10133 if (TREE_CODE (arg) != STRING_CST)
10134 return false;
10135 init = arg;
10136 }
10137 /* Fall through. */
10138
10139 case STRING_CST:
10140 {
10141 gcc_assert (off <= INT_MAX);
10142
10143 int i = off;
10144 int n = TREE_STRING_LENGTH (init);
10145 if (n <= i)
10146 return false;
10147
10148 /* We need to loop through all elements to handle cases like
10149 "\0" and "\0foobar". */
10150 for (i = 0; i < n; ++i)
10151 if (TREE_STRING_POINTER (init)[i] != '\0')
10152 {
10153 *nonzero = true;
10154 return false;
10155 }
10156
10157 return true;
10158 }
10159
10160 default:
10161 return false;
10162 }
10163 }
10164
10165 /* Return true if EXPR is an initializer expression in which every element
10166 is a constant that is numerically equal to 0 or 1. The elements do not
10167 need to be equal to each other. */
10168
10169 bool
10170 initializer_each_zero_or_onep (const_tree expr)
10171 {
10172 STRIP_ANY_LOCATION_WRAPPER (expr);
10173
10174 switch (TREE_CODE (expr))
10175 {
10176 case INTEGER_CST:
10177 return integer_zerop (expr) || integer_onep (expr);
10178
10179 case REAL_CST:
10180 return real_zerop (expr) || real_onep (expr);
10181
10182 case VECTOR_CST:
10183 {
10184 unsigned HOST_WIDE_INT nelts = vector_cst_encoded_nelts (expr);
10185 if (VECTOR_CST_STEPPED_P (expr)
10186 && !TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr)).is_constant (&nelts))
10187 return false;
10188
10189 for (unsigned int i = 0; i < nelts; ++i)
10190 {
10191 tree elt = vector_cst_elt (expr, i);
10192 if (!initializer_each_zero_or_onep (elt))
10193 return false;
10194 }
10195
10196 return true;
10197 }
10198
10199 default:
10200 return false;
10201 }
10202 }
10203
10204 /* Check if vector VEC consists of all the equal elements and
10205 that the number of elements corresponds to the type of VEC.
10206 The function returns first element of the vector
10207 or NULL_TREE if the vector is not uniform. */
10208 tree
10209 uniform_vector_p (const_tree vec)
10210 {
10211 tree first, t;
10212 unsigned HOST_WIDE_INT i, nelts;
10213
10214 if (vec == NULL_TREE)
10215 return NULL_TREE;
10216
10217 gcc_assert (VECTOR_TYPE_P (TREE_TYPE (vec)));
10218
10219 if (TREE_CODE (vec) == VEC_DUPLICATE_EXPR)
10220 return TREE_OPERAND (vec, 0);
10221
10222 else if (TREE_CODE (vec) == VECTOR_CST)
10223 {
10224 if (VECTOR_CST_NPATTERNS (vec) == 1 && VECTOR_CST_DUPLICATE_P (vec))
10225 return VECTOR_CST_ENCODED_ELT (vec, 0);
10226 return NULL_TREE;
10227 }
10228
10229 else if (TREE_CODE (vec) == CONSTRUCTOR
10230 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (vec)).is_constant (&nelts))
10231 {
10232 first = error_mark_node;
10233
10234 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (vec), i, t)
10235 {
10236 if (i == 0)
10237 {
10238 first = t;
10239 continue;
10240 }
10241 if (!operand_equal_p (first, t, 0))
10242 return NULL_TREE;
10243 }
10244 if (i != nelts)
10245 return NULL_TREE;
10246
10247 return first;
10248 }
10249
10250 return NULL_TREE;
10251 }
10252
10253 /* If the argument is INTEGER_CST, return it. If the argument is vector
10254 with all elements the same INTEGER_CST, return that INTEGER_CST. Otherwise
10255 return NULL_TREE.
10256 Look through location wrappers. */
10257
10258 tree
10259 uniform_integer_cst_p (tree t)
10260 {
10261 STRIP_ANY_LOCATION_WRAPPER (t);
10262
10263 if (TREE_CODE (t) == INTEGER_CST)
10264 return t;
10265
10266 if (VECTOR_TYPE_P (TREE_TYPE (t)))
10267 {
10268 t = uniform_vector_p (t);
10269 if (t && TREE_CODE (t) == INTEGER_CST)
10270 return t;
10271 }
10272
10273 return NULL_TREE;
10274 }
10275
10276 /* Checks to see if T is a constant or a constant vector and if each element E
10277 adheres to ~E + 1 == pow2 then return ~E otherwise NULL_TREE. */
10278
10279 tree
10280 bitmask_inv_cst_vector_p (tree t)
10281 {
10282
10283 tree_code code = TREE_CODE (t);
10284 tree type = TREE_TYPE (t);
10285
10286 if (!INTEGRAL_TYPE_P (type)
10287 && !VECTOR_INTEGER_TYPE_P (type))
10288 return NULL_TREE;
10289
10290 unsigned HOST_WIDE_INT nelts = 1;
10291 tree cst;
10292 unsigned int idx = 0;
10293 bool uniform = uniform_integer_cst_p (t);
10294 tree newtype = unsigned_type_for (type);
10295 tree_vector_builder builder;
10296 if (code == INTEGER_CST)
10297 cst = t;
10298 else
10299 {
10300 if (!VECTOR_CST_NELTS (t).is_constant (&nelts))
10301 return NULL_TREE;
10302
10303 cst = vector_cst_elt (t, 0);
10304 builder.new_vector (newtype, nelts, 1);
10305 }
10306
10307 tree ty = unsigned_type_for (TREE_TYPE (cst));
10308
10309 do
10310 {
10311 if (idx > 0)
10312 cst = vector_cst_elt (t, idx);
10313 wide_int icst = wi::to_wide (cst);
10314 wide_int inv = wi::bit_not (icst);
10315 icst = wi::add (1, inv);
10316 if (wi::popcount (icst) != 1)
10317 return NULL_TREE;
10318
10319 tree newcst = wide_int_to_tree (ty, inv);
10320
10321 if (uniform)
10322 return build_uniform_cst (newtype, newcst);
10323
10324 builder.quick_push (newcst);
10325 }
10326 while (++idx < nelts);
10327
10328 return builder.build ();
10329 }
10330
10331 /* If VECTOR_CST T has a single nonzero element, return the index of that
10332 element, otherwise return -1. */
10333
10334 int
10335 single_nonzero_element (const_tree t)
10336 {
10337 unsigned HOST_WIDE_INT nelts;
10338 unsigned int repeat_nelts;
10339 if (VECTOR_CST_NELTS (t).is_constant (&nelts))
10340 repeat_nelts = nelts;
10341 else if (VECTOR_CST_NELTS_PER_PATTERN (t) == 2)
10342 {
10343 nelts = vector_cst_encoded_nelts (t);
10344 repeat_nelts = VECTOR_CST_NPATTERNS (t);
10345 }
10346 else
10347 return -1;
10348
10349 int res = -1;
10350 for (unsigned int i = 0; i < nelts; ++i)
10351 {
10352 tree elt = vector_cst_elt (t, i);
10353 if (!integer_zerop (elt) && !real_zerop (elt))
10354 {
10355 if (res >= 0 || i >= repeat_nelts)
10356 return -1;
10357 res = i;
10358 }
10359 }
10360 return res;
10361 }
10362
10363 /* Build an empty statement at location LOC. */
10364
10365 tree
10366 build_empty_stmt (location_t loc)
10367 {
10368 tree t = build1 (NOP_EXPR, void_type_node, size_zero_node);
10369 SET_EXPR_LOCATION (t, loc);
10370 return t;
10371 }
10372
10373
10374 /* Build an OMP clause with code CODE. LOC is the location of the
10375 clause. */
10376
10377 tree
10378 build_omp_clause (location_t loc, enum omp_clause_code code)
10379 {
10380 tree t;
10381 int size, length;
10382
10383 length = omp_clause_num_ops[code];
10384 size = (sizeof (struct tree_omp_clause) + (length - 1) * sizeof (tree));
10385
10386 record_node_allocation_statistics (OMP_CLAUSE, size);
10387
10388 t = (tree) ggc_internal_alloc (size);
10389 memset (t, 0, size);
10390 TREE_SET_CODE (t, OMP_CLAUSE);
10391 OMP_CLAUSE_SET_CODE (t, code);
10392 OMP_CLAUSE_LOCATION (t) = loc;
10393
10394 return t;
10395 }
10396
10397 /* Build a tcc_vl_exp object with code CODE and room for LEN operands. LEN
10398 includes the implicit operand count in TREE_OPERAND 0, and so must be >= 1.
10399 Except for the CODE and operand count field, other storage for the
10400 object is initialized to zeros. */
10401
10402 tree
10403 build_vl_exp (enum tree_code code, int len MEM_STAT_DECL)
10404 {
10405 tree t;
10406 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_exp);
10407
10408 gcc_assert (TREE_CODE_CLASS (code) == tcc_vl_exp);
10409 gcc_assert (len >= 1);
10410
10411 record_node_allocation_statistics (code, length);
10412
10413 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
10414
10415 TREE_SET_CODE (t, code);
10416
10417 /* Can't use TREE_OPERAND to store the length because if checking is
10418 enabled, it will try to check the length before we store it. :-P */
10419 t->exp.operands[0] = build_int_cst (sizetype, len);
10420
10421 return t;
10422 }
10423
10424 /* Helper function for build_call_* functions; build a CALL_EXPR with
10425 indicated RETURN_TYPE, FN, and NARGS, but do not initialize any of
10426 the argument slots. */
10427
10428 static tree
10429 build_call_1 (tree return_type, tree fn, int nargs)
10430 {
10431 tree t;
10432
10433 t = build_vl_exp (CALL_EXPR, nargs + 3);
10434 TREE_TYPE (t) = return_type;
10435 CALL_EXPR_FN (t) = fn;
10436 CALL_EXPR_STATIC_CHAIN (t) = NULL;
10437
10438 return t;
10439 }
10440
10441 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10442 FN and a null static chain slot. NARGS is the number of call arguments
10443 which are specified as "..." arguments. */
10444
10445 tree
10446 build_call_nary (tree return_type, tree fn, int nargs, ...)
10447 {
10448 tree ret;
10449 va_list args;
10450 va_start (args, nargs);
10451 ret = build_call_valist (return_type, fn, nargs, args);
10452 va_end (args);
10453 return ret;
10454 }
10455
10456 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10457 FN and a null static chain slot. NARGS is the number of call arguments
10458 which are specified as a va_list ARGS. */
10459
10460 tree
10461 build_call_valist (tree return_type, tree fn, int nargs, va_list args)
10462 {
10463 tree t;
10464 int i;
10465
10466 t = build_call_1 (return_type, fn, nargs);
10467 for (i = 0; i < nargs; i++)
10468 CALL_EXPR_ARG (t, i) = va_arg (args, tree);
10469 process_call_operands (t);
10470 return t;
10471 }
10472
10473 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10474 FN and a null static chain slot. NARGS is the number of call arguments
10475 which are specified as a tree array ARGS. */
10476
10477 tree
10478 build_call_array_loc (location_t loc, tree return_type, tree fn,
10479 int nargs, const tree *args)
10480 {
10481 tree t;
10482 int i;
10483
10484 t = build_call_1 (return_type, fn, nargs);
10485 for (i = 0; i < nargs; i++)
10486 CALL_EXPR_ARG (t, i) = args[i];
10487 process_call_operands (t);
10488 SET_EXPR_LOCATION (t, loc);
10489 return t;
10490 }
10491
10492 /* Like build_call_array, but takes a vec. */
10493
10494 tree
10495 build_call_vec (tree return_type, tree fn, vec<tree, va_gc> *args)
10496 {
10497 tree ret, t;
10498 unsigned int ix;
10499
10500 ret = build_call_1 (return_type, fn, vec_safe_length (args));
10501 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
10502 CALL_EXPR_ARG (ret, ix) = t;
10503 process_call_operands (ret);
10504 return ret;
10505 }
10506
10507 /* Conveniently construct a function call expression. FNDECL names the
10508 function to be called and N arguments are passed in the array
10509 ARGARRAY. */
10510
10511 tree
10512 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
10513 {
10514 tree fntype = TREE_TYPE (fndecl);
10515 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10516
10517 return fold_build_call_array_loc (loc, TREE_TYPE (fntype), fn, n, argarray);
10518 }
10519
10520 /* Conveniently construct a function call expression. FNDECL names the
10521 function to be called and the arguments are passed in the vector
10522 VEC. */
10523
10524 tree
10525 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec)
10526 {
10527 return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec),
10528 vec_safe_address (vec));
10529 }
10530
10531
10532 /* Conveniently construct a function call expression. FNDECL names the
10533 function to be called, N is the number of arguments, and the "..."
10534 parameters are the argument expressions. */
10535
10536 tree
10537 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
10538 {
10539 va_list ap;
10540 tree *argarray = XALLOCAVEC (tree, n);
10541 int i;
10542
10543 va_start (ap, n);
10544 for (i = 0; i < n; i++)
10545 argarray[i] = va_arg (ap, tree);
10546 va_end (ap);
10547 return build_call_expr_loc_array (loc, fndecl, n, argarray);
10548 }
10549
10550 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
10551 varargs macros aren't supported by all bootstrap compilers. */
10552
10553 tree
10554 build_call_expr (tree fndecl, int n, ...)
10555 {
10556 va_list ap;
10557 tree *argarray = XALLOCAVEC (tree, n);
10558 int i;
10559
10560 va_start (ap, n);
10561 for (i = 0; i < n; i++)
10562 argarray[i] = va_arg (ap, tree);
10563 va_end (ap);
10564 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
10565 }
10566
10567 /* Build an internal call to IFN, with arguments ARGS[0:N-1] and with return
10568 type TYPE. This is just like CALL_EXPR, except its CALL_EXPR_FN is NULL.
10569 It will get gimplified later into an ordinary internal function. */
10570
10571 tree
10572 build_call_expr_internal_loc_array (location_t loc, internal_fn ifn,
10573 tree type, int n, const tree *args)
10574 {
10575 tree t = build_call_1 (type, NULL_TREE, n);
10576 for (int i = 0; i < n; ++i)
10577 CALL_EXPR_ARG (t, i) = args[i];
10578 SET_EXPR_LOCATION (t, loc);
10579 CALL_EXPR_IFN (t) = ifn;
10580 process_call_operands (t);
10581 return t;
10582 }
10583
10584 /* Build internal call expression. This is just like CALL_EXPR, except
10585 its CALL_EXPR_FN is NULL. It will get gimplified later into ordinary
10586 internal function. */
10587
10588 tree
10589 build_call_expr_internal_loc (location_t loc, enum internal_fn ifn,
10590 tree type, int n, ...)
10591 {
10592 va_list ap;
10593 tree *argarray = XALLOCAVEC (tree, n);
10594 int i;
10595
10596 va_start (ap, n);
10597 for (i = 0; i < n; i++)
10598 argarray[i] = va_arg (ap, tree);
10599 va_end (ap);
10600 return build_call_expr_internal_loc_array (loc, ifn, type, n, argarray);
10601 }
10602
10603 /* Return a function call to FN, if the target is guaranteed to support it,
10604 or null otherwise.
10605
10606 N is the number of arguments, passed in the "...", and TYPE is the
10607 type of the return value. */
10608
10609 tree
10610 maybe_build_call_expr_loc (location_t loc, combined_fn fn, tree type,
10611 int n, ...)
10612 {
10613 va_list ap;
10614 tree *argarray = XALLOCAVEC (tree, n);
10615 int i;
10616
10617 va_start (ap, n);
10618 for (i = 0; i < n; i++)
10619 argarray[i] = va_arg (ap, tree);
10620 va_end (ap);
10621 if (internal_fn_p (fn))
10622 {
10623 internal_fn ifn = as_internal_fn (fn);
10624 if (direct_internal_fn_p (ifn))
10625 {
10626 tree_pair types = direct_internal_fn_types (ifn, type, argarray);
10627 if (!direct_internal_fn_supported_p (ifn, types,
10628 OPTIMIZE_FOR_BOTH))
10629 return NULL_TREE;
10630 }
10631 return build_call_expr_internal_loc_array (loc, ifn, type, n, argarray);
10632 }
10633 else
10634 {
10635 tree fndecl = builtin_decl_implicit (as_builtin_fn (fn));
10636 if (!fndecl)
10637 return NULL_TREE;
10638 return build_call_expr_loc_array (loc, fndecl, n, argarray);
10639 }
10640 }
10641
10642 /* Return a function call to the appropriate builtin alloca variant.
10643
10644 SIZE is the size to be allocated. ALIGN, if non-zero, is the requested
10645 alignment of the allocated area. MAX_SIZE, if non-negative, is an upper
10646 bound for SIZE in case it is not a fixed value. */
10647
10648 tree
10649 build_alloca_call_expr (tree size, unsigned int align, HOST_WIDE_INT max_size)
10650 {
10651 if (max_size >= 0)
10652 {
10653 tree t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX);
10654 return
10655 build_call_expr (t, 3, size, size_int (align), size_int (max_size));
10656 }
10657 else if (align > 0)
10658 {
10659 tree t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
10660 return build_call_expr (t, 2, size, size_int (align));
10661 }
10662 else
10663 {
10664 tree t = builtin_decl_explicit (BUILT_IN_ALLOCA);
10665 return build_call_expr (t, 1, size);
10666 }
10667 }
10668
10669 /* Create a new constant string literal of type ELTYPE[SIZE] (or LEN
10670 if SIZE == -1) and return a tree node representing char* pointer to
10671 it as an ADDR_EXPR (ARRAY_REF (ELTYPE, ...)). When STR is nonnull
10672 the STRING_CST value is the LEN bytes at STR (the representation
10673 of the string, which may be wide). Otherwise it's all zeros. */
10674
10675 tree
10676 build_string_literal (unsigned len, const char *str /* = NULL */,
10677 tree eltype /* = char_type_node */,
10678 unsigned HOST_WIDE_INT size /* = -1 */)
10679 {
10680 tree t = build_string (len, str);
10681 /* Set the maximum valid index based on the string length or SIZE. */
10682 unsigned HOST_WIDE_INT maxidx
10683 = (size == HOST_WIDE_INT_M1U ? len : size) - 1;
10684
10685 tree index = build_index_type (size_int (maxidx));
10686 eltype = build_type_variant (eltype, 1, 0);
10687 tree type = build_array_type (eltype, index);
10688 TREE_TYPE (t) = type;
10689 TREE_CONSTANT (t) = 1;
10690 TREE_READONLY (t) = 1;
10691 TREE_STATIC (t) = 1;
10692
10693 type = build_pointer_type (eltype);
10694 t = build1 (ADDR_EXPR, type,
10695 build4 (ARRAY_REF, eltype,
10696 t, integer_zero_node, NULL_TREE, NULL_TREE));
10697 return t;
10698 }
10699
10700
10701
10702 /* Return true if T (assumed to be a DECL) must be assigned a memory
10703 location. */
10704
10705 bool
10706 needs_to_live_in_memory (const_tree t)
10707 {
10708 return (TREE_ADDRESSABLE (t)
10709 || is_global_var (t)
10710 || (TREE_CODE (t) == RESULT_DECL
10711 && !DECL_BY_REFERENCE (t)
10712 && aggregate_value_p (t, current_function_decl)));
10713 }
10714
10715 /* Return value of a constant X and sign-extend it. */
10716
10717 HOST_WIDE_INT
10718 int_cst_value (const_tree x)
10719 {
10720 unsigned bits = TYPE_PRECISION (TREE_TYPE (x));
10721 unsigned HOST_WIDE_INT val = TREE_INT_CST_LOW (x);
10722
10723 /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
10724 gcc_assert (cst_and_fits_in_hwi (x));
10725
10726 if (bits < HOST_BITS_PER_WIDE_INT)
10727 {
10728 bool negative = ((val >> (bits - 1)) & 1) != 0;
10729 if (negative)
10730 val |= HOST_WIDE_INT_M1U << (bits - 1) << 1;
10731 else
10732 val &= ~(HOST_WIDE_INT_M1U << (bits - 1) << 1);
10733 }
10734
10735 return val;
10736 }
10737
10738 /* If TYPE is an integral or pointer type, return an integer type with
10739 the same precision which is unsigned iff UNSIGNEDP is true, or itself
10740 if TYPE is already an integer type of signedness UNSIGNEDP.
10741 If TYPE is a floating-point type, return an integer type with the same
10742 bitsize and with the signedness given by UNSIGNEDP; this is useful
10743 when doing bit-level operations on a floating-point value. */
10744
10745 tree
10746 signed_or_unsigned_type_for (int unsignedp, tree type)
10747 {
10748 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_UNSIGNED (type) == unsignedp)
10749 return type;
10750
10751 if (TREE_CODE (type) == VECTOR_TYPE)
10752 {
10753 tree inner = TREE_TYPE (type);
10754 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
10755 if (!inner2)
10756 return NULL_TREE;
10757 if (inner == inner2)
10758 return type;
10759 return build_vector_type (inner2, TYPE_VECTOR_SUBPARTS (type));
10760 }
10761
10762 if (TREE_CODE (type) == COMPLEX_TYPE)
10763 {
10764 tree inner = TREE_TYPE (type);
10765 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
10766 if (!inner2)
10767 return NULL_TREE;
10768 if (inner == inner2)
10769 return type;
10770 return build_complex_type (inner2);
10771 }
10772
10773 unsigned int bits;
10774 if (INTEGRAL_TYPE_P (type)
10775 || POINTER_TYPE_P (type)
10776 || TREE_CODE (type) == OFFSET_TYPE)
10777 bits = TYPE_PRECISION (type);
10778 else if (TREE_CODE (type) == REAL_TYPE)
10779 bits = GET_MODE_BITSIZE (SCALAR_TYPE_MODE (type));
10780 else
10781 return NULL_TREE;
10782
10783 return build_nonstandard_integer_type (bits, unsignedp);
10784 }
10785
10786 /* If TYPE is an integral or pointer type, return an integer type with
10787 the same precision which is unsigned, or itself if TYPE is already an
10788 unsigned integer type. If TYPE is a floating-point type, return an
10789 unsigned integer type with the same bitsize as TYPE. */
10790
10791 tree
10792 unsigned_type_for (tree type)
10793 {
10794 return signed_or_unsigned_type_for (1, type);
10795 }
10796
10797 /* If TYPE is an integral or pointer type, return an integer type with
10798 the same precision which is signed, or itself if TYPE is already a
10799 signed integer type. If TYPE is a floating-point type, return a
10800 signed integer type with the same bitsize as TYPE. */
10801
10802 tree
10803 signed_type_for (tree type)
10804 {
10805 return signed_or_unsigned_type_for (0, type);
10806 }
10807
10808 /* - For VECTOR_TYPEs:
10809 - The truth type must be a VECTOR_BOOLEAN_TYPE.
10810 - The number of elements must match (known_eq).
10811 - targetm.vectorize.get_mask_mode exists, and exactly
10812 the same mode as the truth type.
10813 - Otherwise, the truth type must be a BOOLEAN_TYPE
10814 or useless_type_conversion_p to BOOLEAN_TYPE. */
10815 bool
10816 is_truth_type_for (tree type, tree truth_type)
10817 {
10818 machine_mode mask_mode = TYPE_MODE (truth_type);
10819 machine_mode vmode = TYPE_MODE (type);
10820 machine_mode tmask_mode;
10821
10822 if (TREE_CODE (type) == VECTOR_TYPE)
10823 {
10824 if (VECTOR_BOOLEAN_TYPE_P (truth_type)
10825 && known_eq (TYPE_VECTOR_SUBPARTS (type),
10826 TYPE_VECTOR_SUBPARTS (truth_type))
10827 && targetm.vectorize.get_mask_mode (vmode).exists (&tmask_mode)
10828 && tmask_mode == mask_mode)
10829 return true;
10830
10831 return false;
10832 }
10833
10834 return useless_type_conversion_p (boolean_type_node, truth_type);
10835 }
10836
10837 /* If TYPE is a vector type, return a signed integer vector type with the
10838 same width and number of subparts. Otherwise return boolean_type_node. */
10839
10840 tree
10841 truth_type_for (tree type)
10842 {
10843 if (TREE_CODE (type) == VECTOR_TYPE)
10844 {
10845 if (VECTOR_BOOLEAN_TYPE_P (type))
10846 return type;
10847 return build_truth_vector_type_for (type);
10848 }
10849 else
10850 return boolean_type_node;
10851 }
10852
10853 /* Returns the largest value obtainable by casting something in INNER type to
10854 OUTER type. */
10855
10856 tree
10857 upper_bound_in_type (tree outer, tree inner)
10858 {
10859 unsigned int det = 0;
10860 unsigned oprec = TYPE_PRECISION (outer);
10861 unsigned iprec = TYPE_PRECISION (inner);
10862 unsigned prec;
10863
10864 /* Compute a unique number for every combination. */
10865 det |= (oprec > iprec) ? 4 : 0;
10866 det |= TYPE_UNSIGNED (outer) ? 2 : 0;
10867 det |= TYPE_UNSIGNED (inner) ? 1 : 0;
10868
10869 /* Determine the exponent to use. */
10870 switch (det)
10871 {
10872 case 0:
10873 case 1:
10874 /* oprec <= iprec, outer: signed, inner: don't care. */
10875 prec = oprec - 1;
10876 break;
10877 case 2:
10878 case 3:
10879 /* oprec <= iprec, outer: unsigned, inner: don't care. */
10880 prec = oprec;
10881 break;
10882 case 4:
10883 /* oprec > iprec, outer: signed, inner: signed. */
10884 prec = iprec - 1;
10885 break;
10886 case 5:
10887 /* oprec > iprec, outer: signed, inner: unsigned. */
10888 prec = iprec;
10889 break;
10890 case 6:
10891 /* oprec > iprec, outer: unsigned, inner: signed. */
10892 prec = oprec;
10893 break;
10894 case 7:
10895 /* oprec > iprec, outer: unsigned, inner: unsigned. */
10896 prec = iprec;
10897 break;
10898 default:
10899 gcc_unreachable ();
10900 }
10901
10902 return wide_int_to_tree (outer,
10903 wi::mask (prec, false, TYPE_PRECISION (outer)));
10904 }
10905
10906 /* Returns the smallest value obtainable by casting something in INNER type to
10907 OUTER type. */
10908
10909 tree
10910 lower_bound_in_type (tree outer, tree inner)
10911 {
10912 unsigned oprec = TYPE_PRECISION (outer);
10913 unsigned iprec = TYPE_PRECISION (inner);
10914
10915 /* If OUTER type is unsigned, we can definitely cast 0 to OUTER type
10916 and obtain 0. */
10917 if (TYPE_UNSIGNED (outer)
10918 /* If we are widening something of an unsigned type, OUTER type
10919 contains all values of INNER type. In particular, both INNER
10920 and OUTER types have zero in common. */
10921 || (oprec > iprec && TYPE_UNSIGNED (inner)))
10922 return build_int_cst (outer, 0);
10923 else
10924 {
10925 /* If we are widening a signed type to another signed type, we
10926 want to obtain -2^^(iprec-1). If we are keeping the
10927 precision or narrowing to a signed type, we want to obtain
10928 -2^(oprec-1). */
10929 unsigned prec = oprec > iprec ? iprec : oprec;
10930 return wide_int_to_tree (outer,
10931 wi::mask (prec - 1, true,
10932 TYPE_PRECISION (outer)));
10933 }
10934 }
10935
10936 /* Return nonzero if two operands that are suitable for PHI nodes are
10937 necessarily equal. Specifically, both ARG0 and ARG1 must be either
10938 SSA_NAME or invariant. Note that this is strictly an optimization.
10939 That is, callers of this function can directly call operand_equal_p
10940 and get the same result, only slower. */
10941
10942 int
10943 operand_equal_for_phi_arg_p (const_tree arg0, const_tree arg1)
10944 {
10945 if (arg0 == arg1)
10946 return 1;
10947 if (TREE_CODE (arg0) == SSA_NAME || TREE_CODE (arg1) == SSA_NAME)
10948 return 0;
10949 return operand_equal_p (arg0, arg1, 0);
10950 }
10951
10952 /* Returns number of zeros at the end of binary representation of X. */
10953
10954 tree
10955 num_ending_zeros (const_tree x)
10956 {
10957 return build_int_cst (TREE_TYPE (x), wi::ctz (wi::to_wide (x)));
10958 }
10959
10960
10961 #define WALK_SUBTREE(NODE) \
10962 do \
10963 { \
10964 result = walk_tree_1 (&(NODE), func, data, pset, lh); \
10965 if (result) \
10966 return result; \
10967 } \
10968 while (0)
10969
10970 /* This is a subroutine of walk_tree that walks field of TYPE that are to
10971 be walked whenever a type is seen in the tree. Rest of operands and return
10972 value are as for walk_tree. */
10973
10974 static tree
10975 walk_type_fields (tree type, walk_tree_fn func, void *data,
10976 hash_set<tree> *pset, walk_tree_lh lh)
10977 {
10978 tree result = NULL_TREE;
10979
10980 switch (TREE_CODE (type))
10981 {
10982 case POINTER_TYPE:
10983 case REFERENCE_TYPE:
10984 case VECTOR_TYPE:
10985 /* We have to worry about mutually recursive pointers. These can't
10986 be written in C. They can in Ada. It's pathological, but
10987 there's an ACATS test (c38102a) that checks it. Deal with this
10988 by checking if we're pointing to another pointer, that one
10989 points to another pointer, that one does too, and we have no htab.
10990 If so, get a hash table. We check three levels deep to avoid
10991 the cost of the hash table if we don't need one. */
10992 if (POINTER_TYPE_P (TREE_TYPE (type))
10993 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type)))
10994 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type))))
10995 && !pset)
10996 {
10997 result = walk_tree_without_duplicates (&TREE_TYPE (type),
10998 func, data);
10999 if (result)
11000 return result;
11001
11002 break;
11003 }
11004
11005 /* fall through */
11006
11007 case COMPLEX_TYPE:
11008 WALK_SUBTREE (TREE_TYPE (type));
11009 break;
11010
11011 case METHOD_TYPE:
11012 WALK_SUBTREE (TYPE_METHOD_BASETYPE (type));
11013
11014 /* Fall through. */
11015
11016 case FUNCTION_TYPE:
11017 WALK_SUBTREE (TREE_TYPE (type));
11018 {
11019 tree arg;
11020
11021 /* We never want to walk into default arguments. */
11022 for (arg = TYPE_ARG_TYPES (type); arg; arg = TREE_CHAIN (arg))
11023 WALK_SUBTREE (TREE_VALUE (arg));
11024 }
11025 break;
11026
11027 case ARRAY_TYPE:
11028 /* Don't follow this nodes's type if a pointer for fear that
11029 we'll have infinite recursion. If we have a PSET, then we
11030 need not fear. */
11031 if (pset
11032 || (!POINTER_TYPE_P (TREE_TYPE (type))
11033 && TREE_CODE (TREE_TYPE (type)) != OFFSET_TYPE))
11034 WALK_SUBTREE (TREE_TYPE (type));
11035 WALK_SUBTREE (TYPE_DOMAIN (type));
11036 break;
11037
11038 case OFFSET_TYPE:
11039 WALK_SUBTREE (TREE_TYPE (type));
11040 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type));
11041 break;
11042
11043 default:
11044 break;
11045 }
11046
11047 return NULL_TREE;
11048 }
11049
11050 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is
11051 called with the DATA and the address of each sub-tree. If FUNC returns a
11052 non-NULL value, the traversal is stopped, and the value returned by FUNC
11053 is returned. If PSET is non-NULL it is used to record the nodes visited,
11054 and to avoid visiting a node more than once. */
11055
11056 tree
11057 walk_tree_1 (tree *tp, walk_tree_fn func, void *data,
11058 hash_set<tree> *pset, walk_tree_lh lh)
11059 {
11060 enum tree_code code;
11061 int walk_subtrees;
11062 tree result;
11063
11064 #define WALK_SUBTREE_TAIL(NODE) \
11065 do \
11066 { \
11067 tp = & (NODE); \
11068 goto tail_recurse; \
11069 } \
11070 while (0)
11071
11072 tail_recurse:
11073 /* Skip empty subtrees. */
11074 if (!*tp)
11075 return NULL_TREE;
11076
11077 /* Don't walk the same tree twice, if the user has requested
11078 that we avoid doing so. */
11079 if (pset && pset->add (*tp))
11080 return NULL_TREE;
11081
11082 /* Call the function. */
11083 walk_subtrees = 1;
11084 result = (*func) (tp, &walk_subtrees, data);
11085
11086 /* If we found something, return it. */
11087 if (result)
11088 return result;
11089
11090 code = TREE_CODE (*tp);
11091
11092 /* Even if we didn't, FUNC may have decided that there was nothing
11093 interesting below this point in the tree. */
11094 if (!walk_subtrees)
11095 {
11096 /* But we still need to check our siblings. */
11097 if (code == TREE_LIST)
11098 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11099 else if (code == OMP_CLAUSE)
11100 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11101 else
11102 return NULL_TREE;
11103 }
11104
11105 if (lh)
11106 {
11107 result = (*lh) (tp, &walk_subtrees, func, data, pset);
11108 if (result || !walk_subtrees)
11109 return result;
11110 }
11111
11112 switch (code)
11113 {
11114 case ERROR_MARK:
11115 case IDENTIFIER_NODE:
11116 case INTEGER_CST:
11117 case REAL_CST:
11118 case FIXED_CST:
11119 case STRING_CST:
11120 case BLOCK:
11121 case PLACEHOLDER_EXPR:
11122 case SSA_NAME:
11123 case FIELD_DECL:
11124 case RESULT_DECL:
11125 /* None of these have subtrees other than those already walked
11126 above. */
11127 break;
11128
11129 case TREE_LIST:
11130 WALK_SUBTREE (TREE_VALUE (*tp));
11131 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11132
11133 case TREE_VEC:
11134 {
11135 int len = TREE_VEC_LENGTH (*tp);
11136
11137 if (len == 0)
11138 break;
11139
11140 /* Walk all elements but the first. */
11141 while (--len)
11142 WALK_SUBTREE (TREE_VEC_ELT (*tp, len));
11143
11144 /* Now walk the first one as a tail call. */
11145 WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp, 0));
11146 }
11147
11148 case VECTOR_CST:
11149 {
11150 unsigned len = vector_cst_encoded_nelts (*tp);
11151 if (len == 0)
11152 break;
11153 /* Walk all elements but the first. */
11154 while (--len)
11155 WALK_SUBTREE (VECTOR_CST_ENCODED_ELT (*tp, len));
11156 /* Now walk the first one as a tail call. */
11157 WALK_SUBTREE_TAIL (VECTOR_CST_ENCODED_ELT (*tp, 0));
11158 }
11159
11160 case COMPLEX_CST:
11161 WALK_SUBTREE (TREE_REALPART (*tp));
11162 WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp));
11163
11164 case CONSTRUCTOR:
11165 {
11166 unsigned HOST_WIDE_INT idx;
11167 constructor_elt *ce;
11168
11169 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (*tp), idx, &ce);
11170 idx++)
11171 WALK_SUBTREE (ce->value);
11172 }
11173 break;
11174
11175 case SAVE_EXPR:
11176 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 0));
11177
11178 case BIND_EXPR:
11179 {
11180 tree decl;
11181 for (decl = BIND_EXPR_VARS (*tp); decl; decl = DECL_CHAIN (decl))
11182 {
11183 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
11184 into declarations that are just mentioned, rather than
11185 declared; they don't really belong to this part of the tree.
11186 And, we can see cycles: the initializer for a declaration
11187 can refer to the declaration itself. */
11188 WALK_SUBTREE (DECL_INITIAL (decl));
11189 WALK_SUBTREE (DECL_SIZE (decl));
11190 WALK_SUBTREE (DECL_SIZE_UNIT (decl));
11191 }
11192 WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp));
11193 }
11194
11195 case STATEMENT_LIST:
11196 {
11197 tree_stmt_iterator i;
11198 for (i = tsi_start (*tp); !tsi_end_p (i); tsi_next (&i))
11199 WALK_SUBTREE (*tsi_stmt_ptr (i));
11200 }
11201 break;
11202
11203 case OMP_CLAUSE:
11204 {
11205 int len = omp_clause_num_ops[OMP_CLAUSE_CODE (*tp)];
11206 for (int i = 0; i < len; i++)
11207 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11208 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11209 }
11210
11211 case TARGET_EXPR:
11212 {
11213 int i, len;
11214
11215 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
11216 But, we only want to walk once. */
11217 len = (TREE_OPERAND (*tp, 3) == TREE_OPERAND (*tp, 1)) ? 2 : 3;
11218 for (i = 0; i < len; ++i)
11219 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11220 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len));
11221 }
11222
11223 case DECL_EXPR:
11224 /* If this is a TYPE_DECL, walk into the fields of the type that it's
11225 defining. We only want to walk into these fields of a type in this
11226 case and not in the general case of a mere reference to the type.
11227
11228 The criterion is as follows: if the field can be an expression, it
11229 must be walked only here. This should be in keeping with the fields
11230 that are directly gimplified in gimplify_type_sizes in order for the
11231 mark/copy-if-shared/unmark machinery of the gimplifier to work with
11232 variable-sized types.
11233
11234 Note that DECLs get walked as part of processing the BIND_EXPR. */
11235 if (TREE_CODE (DECL_EXPR_DECL (*tp)) == TYPE_DECL)
11236 {
11237 /* Call the function for the decl so e.g. copy_tree_body_r can
11238 replace it with the remapped one. */
11239 result = (*func) (&DECL_EXPR_DECL (*tp), &walk_subtrees, data);
11240 if (result || !walk_subtrees)
11241 return result;
11242
11243 tree *type_p = &TREE_TYPE (DECL_EXPR_DECL (*tp));
11244 if (TREE_CODE (*type_p) == ERROR_MARK)
11245 return NULL_TREE;
11246
11247 /* Call the function for the type. See if it returns anything or
11248 doesn't want us to continue. If we are to continue, walk both
11249 the normal fields and those for the declaration case. */
11250 result = (*func) (type_p, &walk_subtrees, data);
11251 if (result || !walk_subtrees)
11252 return result;
11253
11254 /* But do not walk a pointed-to type since it may itself need to
11255 be walked in the declaration case if it isn't anonymous. */
11256 if (!POINTER_TYPE_P (*type_p))
11257 {
11258 result = walk_type_fields (*type_p, func, data, pset, lh);
11259 if (result)
11260 return result;
11261 }
11262
11263 /* If this is a record type, also walk the fields. */
11264 if (RECORD_OR_UNION_TYPE_P (*type_p))
11265 {
11266 tree field;
11267
11268 for (field = TYPE_FIELDS (*type_p); field;
11269 field = DECL_CHAIN (field))
11270 {
11271 /* We'd like to look at the type of the field, but we can
11272 easily get infinite recursion. So assume it's pointed
11273 to elsewhere in the tree. Also, ignore things that
11274 aren't fields. */
11275 if (TREE_CODE (field) != FIELD_DECL)
11276 continue;
11277
11278 WALK_SUBTREE (DECL_FIELD_OFFSET (field));
11279 WALK_SUBTREE (DECL_SIZE (field));
11280 WALK_SUBTREE (DECL_SIZE_UNIT (field));
11281 if (TREE_CODE (*type_p) == QUAL_UNION_TYPE)
11282 WALK_SUBTREE (DECL_QUALIFIER (field));
11283 }
11284 }
11285
11286 /* Same for scalar types. */
11287 else if (TREE_CODE (*type_p) == BOOLEAN_TYPE
11288 || TREE_CODE (*type_p) == ENUMERAL_TYPE
11289 || TREE_CODE (*type_p) == INTEGER_TYPE
11290 || TREE_CODE (*type_p) == FIXED_POINT_TYPE
11291 || TREE_CODE (*type_p) == REAL_TYPE)
11292 {
11293 WALK_SUBTREE (TYPE_MIN_VALUE (*type_p));
11294 WALK_SUBTREE (TYPE_MAX_VALUE (*type_p));
11295 }
11296
11297 WALK_SUBTREE (TYPE_SIZE (*type_p));
11298 WALK_SUBTREE_TAIL (TYPE_SIZE_UNIT (*type_p));
11299 }
11300 /* FALLTHRU */
11301
11302 default:
11303 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
11304 {
11305 int i, len;
11306
11307 /* Walk over all the sub-trees of this operand. */
11308 len = TREE_OPERAND_LENGTH (*tp);
11309
11310 /* Go through the subtrees. We need to do this in forward order so
11311 that the scope of a FOR_EXPR is handled properly. */
11312 if (len)
11313 {
11314 for (i = 0; i < len - 1; ++i)
11315 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11316 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len - 1));
11317 }
11318 }
11319 /* If this is a type, walk the needed fields in the type. */
11320 else if (TYPE_P (*tp))
11321 return walk_type_fields (*tp, func, data, pset, lh);
11322 break;
11323 }
11324
11325 /* We didn't find what we were looking for. */
11326 return NULL_TREE;
11327
11328 #undef WALK_SUBTREE_TAIL
11329 }
11330 #undef WALK_SUBTREE
11331
11332 /* Like walk_tree, but does not walk duplicate nodes more than once. */
11333
11334 tree
11335 walk_tree_without_duplicates_1 (tree *tp, walk_tree_fn func, void *data,
11336 walk_tree_lh lh)
11337 {
11338 tree result;
11339
11340 hash_set<tree> pset;
11341 result = walk_tree_1 (tp, func, data, &pset, lh);
11342 return result;
11343 }
11344
11345
11346 tree
11347 tree_block (tree t)
11348 {
11349 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11350
11351 if (IS_EXPR_CODE_CLASS (c))
11352 return LOCATION_BLOCK (t->exp.locus);
11353 gcc_unreachable ();
11354 return NULL;
11355 }
11356
11357 void
11358 tree_set_block (tree t, tree b)
11359 {
11360 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11361
11362 if (IS_EXPR_CODE_CLASS (c))
11363 {
11364 t->exp.locus = set_block (t->exp.locus, b);
11365 }
11366 else
11367 gcc_unreachable ();
11368 }
11369
11370 /* Create a nameless artificial label and put it in the current
11371 function context. The label has a location of LOC. Returns the
11372 newly created label. */
11373
11374 tree
11375 create_artificial_label (location_t loc)
11376 {
11377 tree lab = build_decl (loc,
11378 LABEL_DECL, NULL_TREE, void_type_node);
11379
11380 DECL_ARTIFICIAL (lab) = 1;
11381 DECL_IGNORED_P (lab) = 1;
11382 DECL_CONTEXT (lab) = current_function_decl;
11383 return lab;
11384 }
11385
11386 /* Given a tree, try to return a useful variable name that we can use
11387 to prefix a temporary that is being assigned the value of the tree.
11388 I.E. given <temp> = &A, return A. */
11389
11390 const char *
11391 get_name (tree t)
11392 {
11393 tree stripped_decl;
11394
11395 stripped_decl = t;
11396 STRIP_NOPS (stripped_decl);
11397 if (DECL_P (stripped_decl) && DECL_NAME (stripped_decl))
11398 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl));
11399 else if (TREE_CODE (stripped_decl) == SSA_NAME)
11400 {
11401 tree name = SSA_NAME_IDENTIFIER (stripped_decl);
11402 if (!name)
11403 return NULL;
11404 return IDENTIFIER_POINTER (name);
11405 }
11406 else
11407 {
11408 switch (TREE_CODE (stripped_decl))
11409 {
11410 case ADDR_EXPR:
11411 return get_name (TREE_OPERAND (stripped_decl, 0));
11412 default:
11413 return NULL;
11414 }
11415 }
11416 }
11417
11418 /* Return true if TYPE has a variable argument list. */
11419
11420 bool
11421 stdarg_p (const_tree fntype)
11422 {
11423 function_args_iterator args_iter;
11424 tree n = NULL_TREE, t;
11425
11426 if (!fntype)
11427 return false;
11428
11429 FOREACH_FUNCTION_ARGS (fntype, t, args_iter)
11430 {
11431 n = t;
11432 }
11433
11434 return n != NULL_TREE && n != void_type_node;
11435 }
11436
11437 /* Return true if TYPE has a prototype. */
11438
11439 bool
11440 prototype_p (const_tree fntype)
11441 {
11442 tree t;
11443
11444 gcc_assert (fntype != NULL_TREE);
11445
11446 t = TYPE_ARG_TYPES (fntype);
11447 return (t != NULL_TREE);
11448 }
11449
11450 /* If BLOCK is inlined from an __attribute__((__artificial__))
11451 routine, return pointer to location from where it has been
11452 called. */
11453 location_t *
11454 block_nonartificial_location (tree block)
11455 {
11456 location_t *ret = NULL;
11457
11458 while (block && TREE_CODE (block) == BLOCK
11459 && BLOCK_ABSTRACT_ORIGIN (block))
11460 {
11461 tree ao = BLOCK_ABSTRACT_ORIGIN (block);
11462 if (TREE_CODE (ao) == FUNCTION_DECL)
11463 {
11464 /* If AO is an artificial inline, point RET to the
11465 call site locus at which it has been inlined and continue
11466 the loop, in case AO's caller is also an artificial
11467 inline. */
11468 if (DECL_DECLARED_INLINE_P (ao)
11469 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao)))
11470 ret = &BLOCK_SOURCE_LOCATION (block);
11471 else
11472 break;
11473 }
11474 else if (TREE_CODE (ao) != BLOCK)
11475 break;
11476
11477 block = BLOCK_SUPERCONTEXT (block);
11478 }
11479 return ret;
11480 }
11481
11482
11483 /* If EXP is inlined from an __attribute__((__artificial__))
11484 function, return the location of the original call expression. */
11485
11486 location_t
11487 tree_nonartificial_location (tree exp)
11488 {
11489 location_t *loc = block_nonartificial_location (TREE_BLOCK (exp));
11490
11491 if (loc)
11492 return *loc;
11493 else
11494 return EXPR_LOCATION (exp);
11495 }
11496
11497 /* Return the location into which EXP has been inlined. Analogous
11498 to tree_nonartificial_location() above but not limited to artificial
11499 functions declared inline. If SYSTEM_HEADER is true, return
11500 the macro expansion point of the location if it's in a system header */
11501
11502 location_t
11503 tree_inlined_location (tree exp, bool system_header /* = true */)
11504 {
11505 location_t loc = UNKNOWN_LOCATION;
11506
11507 tree block = TREE_BLOCK (exp);
11508
11509 while (block && TREE_CODE (block) == BLOCK
11510 && BLOCK_ABSTRACT_ORIGIN (block))
11511 {
11512 tree ao = BLOCK_ABSTRACT_ORIGIN (block);
11513 if (TREE_CODE (ao) == FUNCTION_DECL)
11514 loc = BLOCK_SOURCE_LOCATION (block);
11515 else if (TREE_CODE (ao) != BLOCK)
11516 break;
11517
11518 block = BLOCK_SUPERCONTEXT (block);
11519 }
11520
11521 if (loc == UNKNOWN_LOCATION)
11522 {
11523 loc = EXPR_LOCATION (exp);
11524 if (system_header)
11525 /* Only consider macro expansion when the block traversal failed
11526 to find a location. Otherwise it's not relevant. */
11527 return expansion_point_location_if_in_system_header (loc);
11528 }
11529
11530 return loc;
11531 }
11532
11533 /* These are the hash table functions for the hash table of OPTIMIZATION_NODE
11534 nodes. */
11535
11536 /* Return the hash code X, an OPTIMIZATION_NODE or TARGET_OPTION code. */
11537
11538 hashval_t
11539 cl_option_hasher::hash (tree x)
11540 {
11541 const_tree const t = x;
11542
11543 if (TREE_CODE (t) == OPTIMIZATION_NODE)
11544 return cl_optimization_hash (TREE_OPTIMIZATION (t));
11545 else if (TREE_CODE (t) == TARGET_OPTION_NODE)
11546 return cl_target_option_hash (TREE_TARGET_OPTION (t));
11547 else
11548 gcc_unreachable ();
11549 }
11550
11551 /* Return nonzero if the value represented by *X (an OPTIMIZATION or
11552 TARGET_OPTION tree node) is the same as that given by *Y, which is the
11553 same. */
11554
11555 bool
11556 cl_option_hasher::equal (tree x, tree y)
11557 {
11558 const_tree const xt = x;
11559 const_tree const yt = y;
11560
11561 if (TREE_CODE (xt) != TREE_CODE (yt))
11562 return 0;
11563
11564 if (TREE_CODE (xt) == OPTIMIZATION_NODE)
11565 return cl_optimization_option_eq (TREE_OPTIMIZATION (xt),
11566 TREE_OPTIMIZATION (yt));
11567 else if (TREE_CODE (xt) == TARGET_OPTION_NODE)
11568 return cl_target_option_eq (TREE_TARGET_OPTION (xt),
11569 TREE_TARGET_OPTION (yt));
11570 else
11571 gcc_unreachable ();
11572 }
11573
11574 /* Build an OPTIMIZATION_NODE based on the options in OPTS and OPTS_SET. */
11575
11576 tree
11577 build_optimization_node (struct gcc_options *opts,
11578 struct gcc_options *opts_set)
11579 {
11580 tree t;
11581
11582 /* Use the cache of optimization nodes. */
11583
11584 cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node),
11585 opts, opts_set);
11586
11587 tree *slot = cl_option_hash_table->find_slot (cl_optimization_node, INSERT);
11588 t = *slot;
11589 if (!t)
11590 {
11591 /* Insert this one into the hash table. */
11592 t = cl_optimization_node;
11593 *slot = t;
11594
11595 /* Make a new node for next time round. */
11596 cl_optimization_node = make_node (OPTIMIZATION_NODE);
11597 }
11598
11599 return t;
11600 }
11601
11602 /* Build a TARGET_OPTION_NODE based on the options in OPTS and OPTS_SET. */
11603
11604 tree
11605 build_target_option_node (struct gcc_options *opts,
11606 struct gcc_options *opts_set)
11607 {
11608 tree t;
11609
11610 /* Use the cache of optimization nodes. */
11611
11612 cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node),
11613 opts, opts_set);
11614
11615 tree *slot = cl_option_hash_table->find_slot (cl_target_option_node, INSERT);
11616 t = *slot;
11617 if (!t)
11618 {
11619 /* Insert this one into the hash table. */
11620 t = cl_target_option_node;
11621 *slot = t;
11622
11623 /* Make a new node for next time round. */
11624 cl_target_option_node = make_node (TARGET_OPTION_NODE);
11625 }
11626
11627 return t;
11628 }
11629
11630 /* Clear TREE_TARGET_GLOBALS of all TARGET_OPTION_NODE trees,
11631 so that they aren't saved during PCH writing. */
11632
11633 void
11634 prepare_target_option_nodes_for_pch (void)
11635 {
11636 hash_table<cl_option_hasher>::iterator iter = cl_option_hash_table->begin ();
11637 for (; iter != cl_option_hash_table->end (); ++iter)
11638 if (TREE_CODE (*iter) == TARGET_OPTION_NODE)
11639 TREE_TARGET_GLOBALS (*iter) = NULL;
11640 }
11641
11642 /* Determine the "ultimate origin" of a block. */
11643
11644 tree
11645 block_ultimate_origin (const_tree block)
11646 {
11647 tree origin = BLOCK_ABSTRACT_ORIGIN (block);
11648
11649 if (origin == NULL_TREE)
11650 return NULL_TREE;
11651 else
11652 {
11653 gcc_checking_assert ((DECL_P (origin)
11654 && DECL_ORIGIN (origin) == origin)
11655 || BLOCK_ORIGIN (origin) == origin);
11656 return origin;
11657 }
11658 }
11659
11660 /* Return true iff conversion from INNER_TYPE to OUTER_TYPE generates
11661 no instruction. */
11662
11663 bool
11664 tree_nop_conversion_p (const_tree outer_type, const_tree inner_type)
11665 {
11666 /* Do not strip casts into or out of differing address spaces. */
11667 if (POINTER_TYPE_P (outer_type)
11668 && TYPE_ADDR_SPACE (TREE_TYPE (outer_type)) != ADDR_SPACE_GENERIC)
11669 {
11670 if (!POINTER_TYPE_P (inner_type)
11671 || (TYPE_ADDR_SPACE (TREE_TYPE (outer_type))
11672 != TYPE_ADDR_SPACE (TREE_TYPE (inner_type))))
11673 return false;
11674 }
11675 else if (POINTER_TYPE_P (inner_type)
11676 && TYPE_ADDR_SPACE (TREE_TYPE (inner_type)) != ADDR_SPACE_GENERIC)
11677 {
11678 /* We already know that outer_type is not a pointer with
11679 a non-generic address space. */
11680 return false;
11681 }
11682
11683 /* Use precision rather then machine mode when we can, which gives
11684 the correct answer even for submode (bit-field) types. */
11685 if ((INTEGRAL_TYPE_P (outer_type)
11686 || POINTER_TYPE_P (outer_type)
11687 || TREE_CODE (outer_type) == OFFSET_TYPE)
11688 && (INTEGRAL_TYPE_P (inner_type)
11689 || POINTER_TYPE_P (inner_type)
11690 || TREE_CODE (inner_type) == OFFSET_TYPE))
11691 return TYPE_PRECISION (outer_type) == TYPE_PRECISION (inner_type);
11692
11693 /* Otherwise fall back on comparing machine modes (e.g. for
11694 aggregate types, floats). */
11695 return TYPE_MODE (outer_type) == TYPE_MODE (inner_type);
11696 }
11697
11698 /* Return true iff conversion in EXP generates no instruction. Mark
11699 it inline so that we fully inline into the stripping functions even
11700 though we have two uses of this function. */
11701
11702 static inline bool
11703 tree_nop_conversion (const_tree exp)
11704 {
11705 tree outer_type, inner_type;
11706
11707 if (location_wrapper_p (exp))
11708 return true;
11709 if (!CONVERT_EXPR_P (exp)
11710 && TREE_CODE (exp) != NON_LVALUE_EXPR)
11711 return false;
11712
11713 outer_type = TREE_TYPE (exp);
11714 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
11715 if (!inner_type || inner_type == error_mark_node)
11716 return false;
11717
11718 return tree_nop_conversion_p (outer_type, inner_type);
11719 }
11720
11721 /* Return true iff conversion in EXP generates no instruction. Don't
11722 consider conversions changing the signedness. */
11723
11724 static bool
11725 tree_sign_nop_conversion (const_tree exp)
11726 {
11727 tree outer_type, inner_type;
11728
11729 if (!tree_nop_conversion (exp))
11730 return false;
11731
11732 outer_type = TREE_TYPE (exp);
11733 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
11734
11735 return (TYPE_UNSIGNED (outer_type) == TYPE_UNSIGNED (inner_type)
11736 && POINTER_TYPE_P (outer_type) == POINTER_TYPE_P (inner_type));
11737 }
11738
11739 /* Strip conversions from EXP according to tree_nop_conversion and
11740 return the resulting expression. */
11741
11742 tree
11743 tree_strip_nop_conversions (tree exp)
11744 {
11745 while (tree_nop_conversion (exp))
11746 exp = TREE_OPERAND (exp, 0);
11747 return exp;
11748 }
11749
11750 /* Strip conversions from EXP according to tree_sign_nop_conversion
11751 and return the resulting expression. */
11752
11753 tree
11754 tree_strip_sign_nop_conversions (tree exp)
11755 {
11756 while (tree_sign_nop_conversion (exp))
11757 exp = TREE_OPERAND (exp, 0);
11758 return exp;
11759 }
11760
11761 /* Avoid any floating point extensions from EXP. */
11762 tree
11763 strip_float_extensions (tree exp)
11764 {
11765 tree sub, expt, subt;
11766
11767 /* For floating point constant look up the narrowest type that can hold
11768 it properly and handle it like (type)(narrowest_type)constant.
11769 This way we can optimize for instance a=a*2.0 where "a" is float
11770 but 2.0 is double constant. */
11771 if (TREE_CODE (exp) == REAL_CST && !DECIMAL_FLOAT_TYPE_P (TREE_TYPE (exp)))
11772 {
11773 REAL_VALUE_TYPE orig;
11774 tree type = NULL;
11775
11776 orig = TREE_REAL_CST (exp);
11777 if (TYPE_PRECISION (TREE_TYPE (exp)) > TYPE_PRECISION (float_type_node)
11778 && exact_real_truncate (TYPE_MODE (float_type_node), &orig))
11779 type = float_type_node;
11780 else if (TYPE_PRECISION (TREE_TYPE (exp))
11781 > TYPE_PRECISION (double_type_node)
11782 && exact_real_truncate (TYPE_MODE (double_type_node), &orig))
11783 type = double_type_node;
11784 if (type)
11785 return build_real_truncate (type, orig);
11786 }
11787
11788 if (!CONVERT_EXPR_P (exp))
11789 return exp;
11790
11791 sub = TREE_OPERAND (exp, 0);
11792 subt = TREE_TYPE (sub);
11793 expt = TREE_TYPE (exp);
11794
11795 if (!FLOAT_TYPE_P (subt))
11796 return exp;
11797
11798 if (DECIMAL_FLOAT_TYPE_P (expt) != DECIMAL_FLOAT_TYPE_P (subt))
11799 return exp;
11800
11801 if (TYPE_PRECISION (subt) > TYPE_PRECISION (expt))
11802 return exp;
11803
11804 return strip_float_extensions (sub);
11805 }
11806
11807 /* Strip out all handled components that produce invariant
11808 offsets. */
11809
11810 const_tree
11811 strip_invariant_refs (const_tree op)
11812 {
11813 while (handled_component_p (op))
11814 {
11815 switch (TREE_CODE (op))
11816 {
11817 case ARRAY_REF:
11818 case ARRAY_RANGE_REF:
11819 if (!is_gimple_constant (TREE_OPERAND (op, 1))
11820 || TREE_OPERAND (op, 2) != NULL_TREE
11821 || TREE_OPERAND (op, 3) != NULL_TREE)
11822 return NULL;
11823 break;
11824
11825 case COMPONENT_REF:
11826 if (TREE_OPERAND (op, 2) != NULL_TREE)
11827 return NULL;
11828 break;
11829
11830 default:;
11831 }
11832 op = TREE_OPERAND (op, 0);
11833 }
11834
11835 return op;
11836 }
11837
11838 static GTY(()) tree gcc_eh_personality_decl;
11839
11840 /* Return the GCC personality function decl. */
11841
11842 tree
11843 lhd_gcc_personality (void)
11844 {
11845 if (!gcc_eh_personality_decl)
11846 gcc_eh_personality_decl = build_personality_function ("gcc");
11847 return gcc_eh_personality_decl;
11848 }
11849
11850 /* TARGET is a call target of GIMPLE call statement
11851 (obtained by gimple_call_fn). Return true if it is
11852 OBJ_TYPE_REF representing an virtual call of C++ method.
11853 (As opposed to OBJ_TYPE_REF representing objc calls
11854 through a cast where middle-end devirtualization machinery
11855 can't apply.) FOR_DUMP_P is true when being called from
11856 the dump routines. */
11857
11858 bool
11859 virtual_method_call_p (const_tree target, bool for_dump_p)
11860 {
11861 if (TREE_CODE (target) != OBJ_TYPE_REF)
11862 return false;
11863 tree t = TREE_TYPE (target);
11864 gcc_checking_assert (TREE_CODE (t) == POINTER_TYPE);
11865 t = TREE_TYPE (t);
11866 if (TREE_CODE (t) == FUNCTION_TYPE)
11867 return false;
11868 gcc_checking_assert (TREE_CODE (t) == METHOD_TYPE);
11869 /* If we do not have BINFO associated, it means that type was built
11870 without devirtualization enabled. Do not consider this a virtual
11871 call. */
11872 if (!TYPE_BINFO (obj_type_ref_class (target, for_dump_p)))
11873 return false;
11874 return true;
11875 }
11876
11877 /* Lookup sub-BINFO of BINFO of TYPE at offset POS. */
11878
11879 static tree
11880 lookup_binfo_at_offset (tree binfo, tree type, HOST_WIDE_INT pos)
11881 {
11882 unsigned int i;
11883 tree base_binfo, b;
11884
11885 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
11886 if (pos == tree_to_shwi (BINFO_OFFSET (base_binfo))
11887 && types_same_for_odr (TREE_TYPE (base_binfo), type))
11888 return base_binfo;
11889 else if ((b = lookup_binfo_at_offset (base_binfo, type, pos)) != NULL)
11890 return b;
11891 return NULL;
11892 }
11893
11894 /* Try to find a base info of BINFO that would have its field decl at offset
11895 OFFSET within the BINFO type and which is of EXPECTED_TYPE. If it can be
11896 found, return, otherwise return NULL_TREE. */
11897
11898 tree
11899 get_binfo_at_offset (tree binfo, poly_int64 offset, tree expected_type)
11900 {
11901 tree type = BINFO_TYPE (binfo);
11902
11903 while (true)
11904 {
11905 HOST_WIDE_INT pos, size;
11906 tree fld;
11907 int i;
11908
11909 if (types_same_for_odr (type, expected_type))
11910 return binfo;
11911 if (maybe_lt (offset, 0))
11912 return NULL_TREE;
11913
11914 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
11915 {
11916 if (TREE_CODE (fld) != FIELD_DECL || !DECL_ARTIFICIAL (fld))
11917 continue;
11918
11919 pos = int_bit_position (fld);
11920 size = tree_to_uhwi (DECL_SIZE (fld));
11921 if (known_in_range_p (offset, pos, size))
11922 break;
11923 }
11924 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
11925 return NULL_TREE;
11926
11927 /* Offset 0 indicates the primary base, whose vtable contents are
11928 represented in the binfo for the derived class. */
11929 else if (maybe_ne (offset, 0))
11930 {
11931 tree found_binfo = NULL, base_binfo;
11932 /* Offsets in BINFO are in bytes relative to the whole structure
11933 while POS is in bits relative to the containing field. */
11934 int binfo_offset = (tree_to_shwi (BINFO_OFFSET (binfo)) + pos
11935 / BITS_PER_UNIT);
11936
11937 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
11938 if (tree_to_shwi (BINFO_OFFSET (base_binfo)) == binfo_offset
11939 && types_same_for_odr (TREE_TYPE (base_binfo), TREE_TYPE (fld)))
11940 {
11941 found_binfo = base_binfo;
11942 break;
11943 }
11944 if (found_binfo)
11945 binfo = found_binfo;
11946 else
11947 binfo = lookup_binfo_at_offset (binfo, TREE_TYPE (fld),
11948 binfo_offset);
11949 }
11950
11951 type = TREE_TYPE (fld);
11952 offset -= pos;
11953 }
11954 }
11955
11956 /* Returns true if X is a typedef decl. */
11957
11958 bool
11959 is_typedef_decl (const_tree x)
11960 {
11961 return (x && TREE_CODE (x) == TYPE_DECL
11962 && DECL_ORIGINAL_TYPE (x) != NULL_TREE);
11963 }
11964
11965 /* Returns true iff TYPE is a type variant created for a typedef. */
11966
11967 bool
11968 typedef_variant_p (const_tree type)
11969 {
11970 return is_typedef_decl (TYPE_NAME (type));
11971 }
11972
11973 /* PR 84195: Replace control characters in "unescaped" with their
11974 escaped equivalents. Allow newlines if -fmessage-length has
11975 been set to a non-zero value. This is done here, rather than
11976 where the attribute is recorded as the message length can
11977 change between these two locations. */
11978
11979 void
11980 escaped_string::escape (const char *unescaped)
11981 {
11982 char *escaped;
11983 size_t i, new_i, len;
11984
11985 if (m_owned)
11986 free (m_str);
11987
11988 m_str = const_cast<char *> (unescaped);
11989 m_owned = false;
11990
11991 if (unescaped == NULL || *unescaped == 0)
11992 return;
11993
11994 len = strlen (unescaped);
11995 escaped = NULL;
11996 new_i = 0;
11997
11998 for (i = 0; i < len; i++)
11999 {
12000 char c = unescaped[i];
12001
12002 if (!ISCNTRL (c))
12003 {
12004 if (escaped)
12005 escaped[new_i++] = c;
12006 continue;
12007 }
12008
12009 if (c != '\n' || !pp_is_wrapping_line (global_dc->printer))
12010 {
12011 if (escaped == NULL)
12012 {
12013 /* We only allocate space for a new string if we
12014 actually encounter a control character that
12015 needs replacing. */
12016 escaped = (char *) xmalloc (len * 2 + 1);
12017 strncpy (escaped, unescaped, i);
12018 new_i = i;
12019 }
12020
12021 escaped[new_i++] = '\\';
12022
12023 switch (c)
12024 {
12025 case '\a': escaped[new_i++] = 'a'; break;
12026 case '\b': escaped[new_i++] = 'b'; break;
12027 case '\f': escaped[new_i++] = 'f'; break;
12028 case '\n': escaped[new_i++] = 'n'; break;
12029 case '\r': escaped[new_i++] = 'r'; break;
12030 case '\t': escaped[new_i++] = 't'; break;
12031 case '\v': escaped[new_i++] = 'v'; break;
12032 default: escaped[new_i++] = '?'; break;
12033 }
12034 }
12035 else if (escaped)
12036 escaped[new_i++] = c;
12037 }
12038
12039 if (escaped)
12040 {
12041 escaped[new_i] = 0;
12042 m_str = escaped;
12043 m_owned = true;
12044 }
12045 }
12046
12047 /* Warn about a use of an identifier which was marked deprecated. Returns
12048 whether a warning was given. */
12049
12050 bool
12051 warn_deprecated_use (tree node, tree attr)
12052 {
12053 escaped_string msg;
12054
12055 if (node == 0 || !warn_deprecated_decl)
12056 return false;
12057
12058 if (!attr)
12059 {
12060 if (DECL_P (node))
12061 attr = DECL_ATTRIBUTES (node);
12062 else if (TYPE_P (node))
12063 {
12064 tree decl = TYPE_STUB_DECL (node);
12065 if (decl)
12066 attr = lookup_attribute ("deprecated",
12067 TYPE_ATTRIBUTES (TREE_TYPE (decl)));
12068 }
12069 }
12070
12071 if (attr)
12072 attr = lookup_attribute ("deprecated", attr);
12073
12074 if (attr)
12075 msg.escape (TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
12076
12077 bool w = false;
12078 if (DECL_P (node))
12079 {
12080 auto_diagnostic_group d;
12081 if (msg)
12082 w = warning (OPT_Wdeprecated_declarations,
12083 "%qD is deprecated: %s", node, (const char *) msg);
12084 else
12085 w = warning (OPT_Wdeprecated_declarations,
12086 "%qD is deprecated", node);
12087 if (w)
12088 inform (DECL_SOURCE_LOCATION (node), "declared here");
12089 }
12090 else if (TYPE_P (node))
12091 {
12092 tree what = NULL_TREE;
12093 tree decl = TYPE_STUB_DECL (node);
12094
12095 if (TYPE_NAME (node))
12096 {
12097 if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE)
12098 what = TYPE_NAME (node);
12099 else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL
12100 && DECL_NAME (TYPE_NAME (node)))
12101 what = DECL_NAME (TYPE_NAME (node));
12102 }
12103
12104 auto_diagnostic_group d;
12105 if (what)
12106 {
12107 if (msg)
12108 w = warning (OPT_Wdeprecated_declarations,
12109 "%qE is deprecated: %s", what, (const char *) msg);
12110 else
12111 w = warning (OPT_Wdeprecated_declarations,
12112 "%qE is deprecated", what);
12113 }
12114 else
12115 {
12116 if (msg)
12117 w = warning (OPT_Wdeprecated_declarations,
12118 "type is deprecated: %s", (const char *) msg);
12119 else
12120 w = warning (OPT_Wdeprecated_declarations,
12121 "type is deprecated");
12122 }
12123
12124 if (w && decl)
12125 inform (DECL_SOURCE_LOCATION (decl), "declared here");
12126 }
12127
12128 return w;
12129 }
12130
12131 /* Error out with an identifier which was marked 'unavailable'. */
12132 void
12133 error_unavailable_use (tree node, tree attr)
12134 {
12135 escaped_string msg;
12136
12137 if (node == 0)
12138 return;
12139
12140 if (!attr)
12141 {
12142 if (DECL_P (node))
12143 attr = DECL_ATTRIBUTES (node);
12144 else if (TYPE_P (node))
12145 {
12146 tree decl = TYPE_STUB_DECL (node);
12147 if (decl)
12148 attr = lookup_attribute ("unavailable",
12149 TYPE_ATTRIBUTES (TREE_TYPE (decl)));
12150 }
12151 }
12152
12153 if (attr)
12154 attr = lookup_attribute ("unavailable", attr);
12155
12156 if (attr)
12157 msg.escape (TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
12158
12159 if (DECL_P (node))
12160 {
12161 auto_diagnostic_group d;
12162 if (msg)
12163 error ("%qD is unavailable: %s", node, (const char *) msg);
12164 else
12165 error ("%qD is unavailable", node);
12166 inform (DECL_SOURCE_LOCATION (node), "declared here");
12167 }
12168 else if (TYPE_P (node))
12169 {
12170 tree what = NULL_TREE;
12171 tree decl = TYPE_STUB_DECL (node);
12172
12173 if (TYPE_NAME (node))
12174 {
12175 if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE)
12176 what = TYPE_NAME (node);
12177 else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL
12178 && DECL_NAME (TYPE_NAME (node)))
12179 what = DECL_NAME (TYPE_NAME (node));
12180 }
12181
12182 auto_diagnostic_group d;
12183 if (what)
12184 {
12185 if (msg)
12186 error ("%qE is unavailable: %s", what, (const char *) msg);
12187 else
12188 error ("%qE is unavailable", what);
12189 }
12190 else
12191 {
12192 if (msg)
12193 error ("type is unavailable: %s", (const char *) msg);
12194 else
12195 error ("type is unavailable");
12196 }
12197
12198 if (decl)
12199 inform (DECL_SOURCE_LOCATION (decl), "declared here");
12200 }
12201 }
12202
12203 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
12204 somewhere in it. */
12205
12206 bool
12207 contains_bitfld_component_ref_p (const_tree ref)
12208 {
12209 while (handled_component_p (ref))
12210 {
12211 if (TREE_CODE (ref) == COMPONENT_REF
12212 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1)))
12213 return true;
12214 ref = TREE_OPERAND (ref, 0);
12215 }
12216
12217 return false;
12218 }
12219
12220 /* Try to determine whether a TRY_CATCH expression can fall through.
12221 This is a subroutine of block_may_fallthru. */
12222
12223 static bool
12224 try_catch_may_fallthru (const_tree stmt)
12225 {
12226 tree_stmt_iterator i;
12227
12228 /* If the TRY block can fall through, the whole TRY_CATCH can
12229 fall through. */
12230 if (block_may_fallthru (TREE_OPERAND (stmt, 0)))
12231 return true;
12232
12233 i = tsi_start (TREE_OPERAND (stmt, 1));
12234 switch (TREE_CODE (tsi_stmt (i)))
12235 {
12236 case CATCH_EXPR:
12237 /* We expect to see a sequence of CATCH_EXPR trees, each with a
12238 catch expression and a body. The whole TRY_CATCH may fall
12239 through iff any of the catch bodies falls through. */
12240 for (; !tsi_end_p (i); tsi_next (&i))
12241 {
12242 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i))))
12243 return true;
12244 }
12245 return false;
12246
12247 case EH_FILTER_EXPR:
12248 /* The exception filter expression only matters if there is an
12249 exception. If the exception does not match EH_FILTER_TYPES,
12250 we will execute EH_FILTER_FAILURE, and we will fall through
12251 if that falls through. If the exception does match
12252 EH_FILTER_TYPES, the stack unwinder will continue up the
12253 stack, so we will not fall through. We don't know whether we
12254 will throw an exception which matches EH_FILTER_TYPES or not,
12255 so we just ignore EH_FILTER_TYPES and assume that we might
12256 throw an exception which doesn't match. */
12257 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i)));
12258
12259 default:
12260 /* This case represents statements to be executed when an
12261 exception occurs. Those statements are implicitly followed
12262 by a RESX statement to resume execution after the exception.
12263 So in this case the TRY_CATCH never falls through. */
12264 return false;
12265 }
12266 }
12267
12268 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
12269 need not be 100% accurate; simply be conservative and return true if we
12270 don't know. This is used only to avoid stupidly generating extra code.
12271 If we're wrong, we'll just delete the extra code later. */
12272
12273 bool
12274 block_may_fallthru (const_tree block)
12275 {
12276 /* This CONST_CAST is okay because expr_last returns its argument
12277 unmodified and we assign it to a const_tree. */
12278 const_tree stmt = expr_last (CONST_CAST_TREE (block));
12279
12280 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
12281 {
12282 case GOTO_EXPR:
12283 case RETURN_EXPR:
12284 /* Easy cases. If the last statement of the block implies
12285 control transfer, then we can't fall through. */
12286 return false;
12287
12288 case SWITCH_EXPR:
12289 /* If there is a default: label or case labels cover all possible
12290 SWITCH_COND values, then the SWITCH_EXPR will transfer control
12291 to some case label in all cases and all we care is whether the
12292 SWITCH_BODY falls through. */
12293 if (SWITCH_ALL_CASES_P (stmt))
12294 return block_may_fallthru (SWITCH_BODY (stmt));
12295 return true;
12296
12297 case COND_EXPR:
12298 if (block_may_fallthru (COND_EXPR_THEN (stmt)))
12299 return true;
12300 return block_may_fallthru (COND_EXPR_ELSE (stmt));
12301
12302 case BIND_EXPR:
12303 return block_may_fallthru (BIND_EXPR_BODY (stmt));
12304
12305 case TRY_CATCH_EXPR:
12306 return try_catch_may_fallthru (stmt);
12307
12308 case TRY_FINALLY_EXPR:
12309 /* The finally clause is always executed after the try clause,
12310 so if it does not fall through, then the try-finally will not
12311 fall through. Otherwise, if the try clause does not fall
12312 through, then when the finally clause falls through it will
12313 resume execution wherever the try clause was going. So the
12314 whole try-finally will only fall through if both the try
12315 clause and the finally clause fall through. */
12316 return (block_may_fallthru (TREE_OPERAND (stmt, 0))
12317 && block_may_fallthru (TREE_OPERAND (stmt, 1)));
12318
12319 case EH_ELSE_EXPR:
12320 return block_may_fallthru (TREE_OPERAND (stmt, 0));
12321
12322 case MODIFY_EXPR:
12323 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
12324 stmt = TREE_OPERAND (stmt, 1);
12325 else
12326 return true;
12327 /* FALLTHRU */
12328
12329 case CALL_EXPR:
12330 /* Functions that do not return do not fall through. */
12331 return (call_expr_flags (stmt) & ECF_NORETURN) == 0;
12332
12333 case CLEANUP_POINT_EXPR:
12334 return block_may_fallthru (TREE_OPERAND (stmt, 0));
12335
12336 case TARGET_EXPR:
12337 return block_may_fallthru (TREE_OPERAND (stmt, 1));
12338
12339 case ERROR_MARK:
12340 return true;
12341
12342 default:
12343 return lang_hooks.block_may_fallthru (stmt);
12344 }
12345 }
12346
12347 /* True if we are using EH to handle cleanups. */
12348 static bool using_eh_for_cleanups_flag = false;
12349
12350 /* This routine is called from front ends to indicate eh should be used for
12351 cleanups. */
12352 void
12353 using_eh_for_cleanups (void)
12354 {
12355 using_eh_for_cleanups_flag = true;
12356 }
12357
12358 /* Query whether EH is used for cleanups. */
12359 bool
12360 using_eh_for_cleanups_p (void)
12361 {
12362 return using_eh_for_cleanups_flag;
12363 }
12364
12365 /* Wrapper for tree_code_name to ensure that tree code is valid */
12366 const char *
12367 get_tree_code_name (enum tree_code code)
12368 {
12369 const char *invalid = "<invalid tree code>";
12370
12371 /* The tree_code enum promotes to signed, but we could be getting
12372 invalid values, so force an unsigned comparison. */
12373 if (unsigned (code) >= MAX_TREE_CODES)
12374 {
12375 if ((unsigned)code == 0xa5a5)
12376 return "ggc_freed";
12377 return invalid;
12378 }
12379
12380 return tree_code_name[code];
12381 }
12382
12383 /* Drops the TREE_OVERFLOW flag from T. */
12384
12385 tree
12386 drop_tree_overflow (tree t)
12387 {
12388 gcc_checking_assert (TREE_OVERFLOW (t));
12389
12390 /* For tree codes with a sharing machinery re-build the result. */
12391 if (poly_int_tree_p (t))
12392 return wide_int_to_tree (TREE_TYPE (t), wi::to_poly_wide (t));
12393
12394 /* For VECTOR_CST, remove the overflow bits from the encoded elements
12395 and canonicalize the result. */
12396 if (TREE_CODE (t) == VECTOR_CST)
12397 {
12398 tree_vector_builder builder;
12399 builder.new_unary_operation (TREE_TYPE (t), t, true);
12400 unsigned int count = builder.encoded_nelts ();
12401 for (unsigned int i = 0; i < count; ++i)
12402 {
12403 tree elt = VECTOR_CST_ELT (t, i);
12404 if (TREE_OVERFLOW (elt))
12405 elt = drop_tree_overflow (elt);
12406 builder.quick_push (elt);
12407 }
12408 return builder.build ();
12409 }
12410
12411 /* Otherwise, as all tcc_constants are possibly shared, copy the node
12412 and drop the flag. */
12413 t = copy_node (t);
12414 TREE_OVERFLOW (t) = 0;
12415
12416 /* For constants that contain nested constants, drop the flag
12417 from those as well. */
12418 if (TREE_CODE (t) == COMPLEX_CST)
12419 {
12420 if (TREE_OVERFLOW (TREE_REALPART (t)))
12421 TREE_REALPART (t) = drop_tree_overflow (TREE_REALPART (t));
12422 if (TREE_OVERFLOW (TREE_IMAGPART (t)))
12423 TREE_IMAGPART (t) = drop_tree_overflow (TREE_IMAGPART (t));
12424 }
12425
12426 return t;
12427 }
12428
12429 /* Given a memory reference expression T, return its base address.
12430 The base address of a memory reference expression is the main
12431 object being referenced. For instance, the base address for
12432 'array[i].fld[j]' is 'array'. You can think of this as stripping
12433 away the offset part from a memory address.
12434
12435 This function calls handled_component_p to strip away all the inner
12436 parts of the memory reference until it reaches the base object. */
12437
12438 tree
12439 get_base_address (tree t)
12440 {
12441 if (TREE_CODE (t) == WITH_SIZE_EXPR)
12442 t = TREE_OPERAND (t, 0);
12443 while (handled_component_p (t))
12444 t = TREE_OPERAND (t, 0);
12445
12446 if ((TREE_CODE (t) == MEM_REF
12447 || TREE_CODE (t) == TARGET_MEM_REF)
12448 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
12449 t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
12450
12451 return t;
12452 }
12453
12454 /* Return a tree of sizetype representing the size, in bytes, of the element
12455 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12456
12457 tree
12458 array_ref_element_size (tree exp)
12459 {
12460 tree aligned_size = TREE_OPERAND (exp, 3);
12461 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
12462 location_t loc = EXPR_LOCATION (exp);
12463
12464 /* If a size was specified in the ARRAY_REF, it's the size measured
12465 in alignment units of the element type. So multiply by that value. */
12466 if (aligned_size)
12467 {
12468 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
12469 sizetype from another type of the same width and signedness. */
12470 if (TREE_TYPE (aligned_size) != sizetype)
12471 aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
12472 return size_binop_loc (loc, MULT_EXPR, aligned_size,
12473 size_int (TYPE_ALIGN_UNIT (elmt_type)));
12474 }
12475
12476 /* Otherwise, take the size from that of the element type. Substitute
12477 any PLACEHOLDER_EXPR that we have. */
12478 else
12479 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
12480 }
12481
12482 /* Return a tree representing the lower bound of the array mentioned in
12483 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12484
12485 tree
12486 array_ref_low_bound (tree exp)
12487 {
12488 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
12489
12490 /* If a lower bound is specified in EXP, use it. */
12491 if (TREE_OPERAND (exp, 2))
12492 return TREE_OPERAND (exp, 2);
12493
12494 /* Otherwise, if there is a domain type and it has a lower bound, use it,
12495 substituting for a PLACEHOLDER_EXPR as needed. */
12496 if (domain_type && TYPE_MIN_VALUE (domain_type))
12497 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
12498
12499 /* Otherwise, return a zero of the appropriate type. */
12500 tree idxtype = TREE_TYPE (TREE_OPERAND (exp, 1));
12501 return (idxtype == error_mark_node
12502 ? integer_zero_node : build_int_cst (idxtype, 0));
12503 }
12504
12505 /* Return a tree representing the upper bound of the array mentioned in
12506 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12507
12508 tree
12509 array_ref_up_bound (tree exp)
12510 {
12511 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
12512
12513 /* If there is a domain type and it has an upper bound, use it, substituting
12514 for a PLACEHOLDER_EXPR as needed. */
12515 if (domain_type && TYPE_MAX_VALUE (domain_type))
12516 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
12517
12518 /* Otherwise fail. */
12519 return NULL_TREE;
12520 }
12521
12522 /* Returns true if REF is an array reference, component reference,
12523 or memory reference to an array at the end of a structure.
12524 If this is the case, the array may be allocated larger
12525 than its upper bound implies. */
12526
12527 bool
12528 array_at_struct_end_p (tree ref)
12529 {
12530 tree atype;
12531
12532 if (TREE_CODE (ref) == ARRAY_REF
12533 || TREE_CODE (ref) == ARRAY_RANGE_REF)
12534 {
12535 atype = TREE_TYPE (TREE_OPERAND (ref, 0));
12536 ref = TREE_OPERAND (ref, 0);
12537 }
12538 else if (TREE_CODE (ref) == COMPONENT_REF
12539 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 1))) == ARRAY_TYPE)
12540 atype = TREE_TYPE (TREE_OPERAND (ref, 1));
12541 else if (TREE_CODE (ref) == MEM_REF)
12542 {
12543 tree arg = TREE_OPERAND (ref, 0);
12544 if (TREE_CODE (arg) == ADDR_EXPR)
12545 arg = TREE_OPERAND (arg, 0);
12546 tree argtype = TREE_TYPE (arg);
12547 if (TREE_CODE (argtype) == RECORD_TYPE)
12548 {
12549 if (tree fld = last_field (argtype))
12550 {
12551 atype = TREE_TYPE (fld);
12552 if (TREE_CODE (atype) != ARRAY_TYPE)
12553 return false;
12554 if (VAR_P (arg) && DECL_SIZE (fld))
12555 return false;
12556 }
12557 else
12558 return false;
12559 }
12560 else
12561 return false;
12562 }
12563 else
12564 return false;
12565
12566 if (TREE_CODE (ref) == STRING_CST)
12567 return false;
12568
12569 tree ref_to_array = ref;
12570 while (handled_component_p (ref))
12571 {
12572 /* If the reference chain contains a component reference to a
12573 non-union type and there follows another field the reference
12574 is not at the end of a structure. */
12575 if (TREE_CODE (ref) == COMPONENT_REF)
12576 {
12577 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 0))) == RECORD_TYPE)
12578 {
12579 tree nextf = DECL_CHAIN (TREE_OPERAND (ref, 1));
12580 while (nextf && TREE_CODE (nextf) != FIELD_DECL)
12581 nextf = DECL_CHAIN (nextf);
12582 if (nextf)
12583 return false;
12584 }
12585 }
12586 /* If we have a multi-dimensional array we do not consider
12587 a non-innermost dimension as flex array if the whole
12588 multi-dimensional array is at struct end.
12589 Same for an array of aggregates with a trailing array
12590 member. */
12591 else if (TREE_CODE (ref) == ARRAY_REF)
12592 return false;
12593 else if (TREE_CODE (ref) == ARRAY_RANGE_REF)
12594 ;
12595 /* If we view an underlying object as sth else then what we
12596 gathered up to now is what we have to rely on. */
12597 else if (TREE_CODE (ref) == VIEW_CONVERT_EXPR)
12598 break;
12599 else
12600 gcc_unreachable ();
12601
12602 ref = TREE_OPERAND (ref, 0);
12603 }
12604
12605 /* The array now is at struct end. Treat flexible arrays as
12606 always subject to extend, even into just padding constrained by
12607 an underlying decl. */
12608 if (! TYPE_SIZE (atype)
12609 || ! TYPE_DOMAIN (atype)
12610 || ! TYPE_MAX_VALUE (TYPE_DOMAIN (atype)))
12611 return true;
12612
12613 /* If the reference is based on a declared entity, the size of the array
12614 is constrained by its given domain. (Do not trust commons PR/69368). */
12615 ref = get_base_address (ref);
12616 if (ref
12617 && DECL_P (ref)
12618 && !(flag_unconstrained_commons
12619 && VAR_P (ref) && DECL_COMMON (ref))
12620 && DECL_SIZE_UNIT (ref)
12621 && TREE_CODE (DECL_SIZE_UNIT (ref)) == INTEGER_CST)
12622 {
12623 /* Check whether the array domain covers all of the available
12624 padding. */
12625 poly_int64 offset;
12626 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (atype))) != INTEGER_CST
12627 || TREE_CODE (TYPE_MAX_VALUE (TYPE_DOMAIN (atype))) != INTEGER_CST
12628 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (atype))) != INTEGER_CST)
12629 return true;
12630 if (! get_addr_base_and_unit_offset (ref_to_array, &offset))
12631 return true;
12632
12633 /* If at least one extra element fits it is a flexarray. */
12634 if (known_le ((wi::to_offset (TYPE_MAX_VALUE (TYPE_DOMAIN (atype)))
12635 - wi::to_offset (TYPE_MIN_VALUE (TYPE_DOMAIN (atype)))
12636 + 2)
12637 * wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (atype))),
12638 wi::to_offset (DECL_SIZE_UNIT (ref)) - offset))
12639 return true;
12640
12641 return false;
12642 }
12643
12644 return true;
12645 }
12646
12647 /* Return a tree representing the offset, in bytes, of the field referenced
12648 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
12649
12650 tree
12651 component_ref_field_offset (tree exp)
12652 {
12653 tree aligned_offset = TREE_OPERAND (exp, 2);
12654 tree field = TREE_OPERAND (exp, 1);
12655 location_t loc = EXPR_LOCATION (exp);
12656
12657 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
12658 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
12659 value. */
12660 if (aligned_offset)
12661 {
12662 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
12663 sizetype from another type of the same width and signedness. */
12664 if (TREE_TYPE (aligned_offset) != sizetype)
12665 aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
12666 return size_binop_loc (loc, MULT_EXPR, aligned_offset,
12667 size_int (DECL_OFFSET_ALIGN (field)
12668 / BITS_PER_UNIT));
12669 }
12670
12671 /* Otherwise, take the offset from that of the field. Substitute
12672 any PLACEHOLDER_EXPR that we have. */
12673 else
12674 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
12675 }
12676
12677 /* Given the initializer INIT, return the initializer for the field
12678 DECL if it exists, otherwise null. Used to obtain the initializer
12679 for a flexible array member and determine its size. */
12680
12681 static tree
12682 get_initializer_for (tree init, tree decl)
12683 {
12684 STRIP_NOPS (init);
12685
12686 tree fld, fld_init;
12687 unsigned HOST_WIDE_INT i;
12688 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), i, fld, fld_init)
12689 {
12690 if (decl == fld)
12691 return fld_init;
12692
12693 if (TREE_CODE (fld) == CONSTRUCTOR)
12694 {
12695 fld_init = get_initializer_for (fld_init, decl);
12696 if (fld_init)
12697 return fld_init;
12698 }
12699 }
12700
12701 return NULL_TREE;
12702 }
12703
12704 /* Determines the size of the member referenced by the COMPONENT_REF
12705 REF, using its initializer expression if necessary in order to
12706 determine the size of an initialized flexible array member.
12707 If non-null, set *ARK when REF refers to an interior zero-length
12708 array or a trailing one-element array.
12709 Returns the size as sizetype (which might be zero for an object
12710 with an uninitialized flexible array member) or null if the size
12711 cannot be determined. */
12712
12713 tree
12714 component_ref_size (tree ref, special_array_member *sam /* = NULL */)
12715 {
12716 gcc_assert (TREE_CODE (ref) == COMPONENT_REF);
12717
12718 special_array_member sambuf;
12719 if (!sam)
12720 sam = &sambuf;
12721 *sam = special_array_member::none;
12722
12723 /* The object/argument referenced by the COMPONENT_REF and its type. */
12724 tree arg = TREE_OPERAND (ref, 0);
12725 tree argtype = TREE_TYPE (arg);
12726 /* The referenced member. */
12727 tree member = TREE_OPERAND (ref, 1);
12728
12729 tree memsize = DECL_SIZE_UNIT (member);
12730 if (memsize)
12731 {
12732 tree memtype = TREE_TYPE (member);
12733 if (TREE_CODE (memtype) != ARRAY_TYPE)
12734 /* DECL_SIZE may be less than TYPE_SIZE in C++ when referring
12735 to the type of a class with a virtual base which doesn't
12736 reflect the size of the virtual's members (see pr97595).
12737 If that's the case fail for now and implement something
12738 more robust in the future. */
12739 return (tree_int_cst_equal (memsize, TYPE_SIZE_UNIT (memtype))
12740 ? memsize : NULL_TREE);
12741
12742 bool trailing = array_at_struct_end_p (ref);
12743 bool zero_length = integer_zerop (memsize);
12744 if (!trailing && !zero_length)
12745 /* MEMBER is either an interior array or is an array with
12746 more than one element. */
12747 return memsize;
12748
12749 if (zero_length)
12750 {
12751 if (trailing)
12752 *sam = special_array_member::trail_0;
12753 else
12754 {
12755 *sam = special_array_member::int_0;
12756 memsize = NULL_TREE;
12757 }
12758 }
12759
12760 if (!zero_length)
12761 if (tree dom = TYPE_DOMAIN (memtype))
12762 if (tree min = TYPE_MIN_VALUE (dom))
12763 if (tree max = TYPE_MAX_VALUE (dom))
12764 if (TREE_CODE (min) == INTEGER_CST
12765 && TREE_CODE (max) == INTEGER_CST)
12766 {
12767 offset_int minidx = wi::to_offset (min);
12768 offset_int maxidx = wi::to_offset (max);
12769 offset_int neltsm1 = maxidx - minidx;
12770 if (neltsm1 > 0)
12771 /* MEMBER is an array with more than one element. */
12772 return memsize;
12773
12774 if (neltsm1 == 0)
12775 *sam = special_array_member::trail_1;
12776 }
12777
12778 /* For a reference to a zero- or one-element array member of a union
12779 use the size of the union instead of the size of the member. */
12780 if (TREE_CODE (argtype) == UNION_TYPE)
12781 memsize = TYPE_SIZE_UNIT (argtype);
12782 }
12783
12784 /* MEMBER is either a bona fide flexible array member, or a zero-length
12785 array member, or an array of length one treated as such. */
12786
12787 /* If the reference is to a declared object and the member a true
12788 flexible array, try to determine its size from its initializer. */
12789 poly_int64 baseoff = 0;
12790 tree base = get_addr_base_and_unit_offset (ref, &baseoff);
12791 if (!base || !VAR_P (base))
12792 {
12793 if (*sam != special_array_member::int_0)
12794 return NULL_TREE;
12795
12796 if (TREE_CODE (arg) != COMPONENT_REF)
12797 return NULL_TREE;
12798
12799 base = arg;
12800 while (TREE_CODE (base) == COMPONENT_REF)
12801 base = TREE_OPERAND (base, 0);
12802 baseoff = tree_to_poly_int64 (byte_position (TREE_OPERAND (ref, 1)));
12803 }
12804
12805 /* BASE is the declared object of which MEMBER is either a member
12806 or that is cast to ARGTYPE (e.g., a char buffer used to store
12807 an ARGTYPE object). */
12808 tree basetype = TREE_TYPE (base);
12809
12810 /* Determine the base type of the referenced object. If it's
12811 the same as ARGTYPE and MEMBER has a known size, return it. */
12812 tree bt = basetype;
12813 if (*sam != special_array_member::int_0)
12814 while (TREE_CODE (bt) == ARRAY_TYPE)
12815 bt = TREE_TYPE (bt);
12816 bool typematch = useless_type_conversion_p (argtype, bt);
12817 if (memsize && typematch)
12818 return memsize;
12819
12820 memsize = NULL_TREE;
12821
12822 if (typematch)
12823 /* MEMBER is a true flexible array member. Compute its size from
12824 the initializer of the BASE object if it has one. */
12825 if (tree init = DECL_P (base) ? DECL_INITIAL (base) : NULL_TREE)
12826 if (init != error_mark_node)
12827 {
12828 init = get_initializer_for (init, member);
12829 if (init)
12830 {
12831 memsize = TYPE_SIZE_UNIT (TREE_TYPE (init));
12832 if (tree refsize = TYPE_SIZE_UNIT (argtype))
12833 {
12834 /* Use the larger of the initializer size and the tail
12835 padding in the enclosing struct. */
12836 poly_int64 rsz = tree_to_poly_int64 (refsize);
12837 rsz -= baseoff;
12838 if (known_lt (tree_to_poly_int64 (memsize), rsz))
12839 memsize = wide_int_to_tree (TREE_TYPE (memsize), rsz);
12840 }
12841
12842 baseoff = 0;
12843 }
12844 }
12845
12846 if (!memsize)
12847 {
12848 if (typematch)
12849 {
12850 if (DECL_P (base)
12851 && DECL_EXTERNAL (base)
12852 && bt == basetype
12853 && *sam != special_array_member::int_0)
12854 /* The size of a flexible array member of an extern struct
12855 with no initializer cannot be determined (it's defined
12856 in another translation unit and can have an initializer
12857 with an arbitrary number of elements). */
12858 return NULL_TREE;
12859
12860 /* Use the size of the base struct or, for interior zero-length
12861 arrays, the size of the enclosing type. */
12862 memsize = TYPE_SIZE_UNIT (bt);
12863 }
12864 else if (DECL_P (base))
12865 /* Use the size of the BASE object (possibly an array of some
12866 other type such as char used to store the struct). */
12867 memsize = DECL_SIZE_UNIT (base);
12868 else
12869 return NULL_TREE;
12870 }
12871
12872 /* If the flexible array member has a known size use the greater
12873 of it and the tail padding in the enclosing struct.
12874 Otherwise, when the size of the flexible array member is unknown
12875 and the referenced object is not a struct, use the size of its
12876 type when known. This detects sizes of array buffers when cast
12877 to struct types with flexible array members. */
12878 if (memsize)
12879 {
12880 poly_int64 memsz64 = memsize ? tree_to_poly_int64 (memsize) : 0;
12881 if (known_lt (baseoff, memsz64))
12882 {
12883 memsz64 -= baseoff;
12884 return wide_int_to_tree (TREE_TYPE (memsize), memsz64);
12885 }
12886 return size_zero_node;
12887 }
12888
12889 /* Return "don't know" for an external non-array object since its
12890 flexible array member can be initialized to have any number of
12891 elements. Otherwise, return zero because the flexible array
12892 member has no elements. */
12893 return (DECL_P (base)
12894 && DECL_EXTERNAL (base)
12895 && (!typematch
12896 || TREE_CODE (basetype) != ARRAY_TYPE)
12897 ? NULL_TREE : size_zero_node);
12898 }
12899
12900 /* Return the machine mode of T. For vectors, returns the mode of the
12901 inner type. The main use case is to feed the result to HONOR_NANS,
12902 avoiding the BLKmode that a direct TYPE_MODE (T) might return. */
12903
12904 machine_mode
12905 element_mode (const_tree t)
12906 {
12907 if (!TYPE_P (t))
12908 t = TREE_TYPE (t);
12909 if (VECTOR_TYPE_P (t) || TREE_CODE (t) == COMPLEX_TYPE)
12910 t = TREE_TYPE (t);
12911 return TYPE_MODE (t);
12912 }
12913
12914 /* Vector types need to re-check the target flags each time we report
12915 the machine mode. We need to do this because attribute target can
12916 change the result of vector_mode_supported_p and have_regs_of_mode
12917 on a per-function basis. Thus the TYPE_MODE of a VECTOR_TYPE can
12918 change on a per-function basis. */
12919 /* ??? Possibly a better solution is to run through all the types
12920 referenced by a function and re-compute the TYPE_MODE once, rather
12921 than make the TYPE_MODE macro call a function. */
12922
12923 machine_mode
12924 vector_type_mode (const_tree t)
12925 {
12926 machine_mode mode;
12927
12928 gcc_assert (TREE_CODE (t) == VECTOR_TYPE);
12929
12930 mode = t->type_common.mode;
12931 if (VECTOR_MODE_P (mode)
12932 && (!targetm.vector_mode_supported_p (mode)
12933 || !have_regs_of_mode[mode]))
12934 {
12935 scalar_int_mode innermode;
12936
12937 /* For integers, try mapping it to a same-sized scalar mode. */
12938 if (is_int_mode (TREE_TYPE (t)->type_common.mode, &innermode))
12939 {
12940 poly_int64 size = (TYPE_VECTOR_SUBPARTS (t)
12941 * GET_MODE_BITSIZE (innermode));
12942 scalar_int_mode mode;
12943 if (int_mode_for_size (size, 0).exists (&mode)
12944 && have_regs_of_mode[mode])
12945 return mode;
12946 }
12947
12948 return BLKmode;
12949 }
12950
12951 return mode;
12952 }
12953
12954 /* Return the size in bits of each element of vector type TYPE. */
12955
12956 unsigned int
12957 vector_element_bits (const_tree type)
12958 {
12959 gcc_checking_assert (VECTOR_TYPE_P (type));
12960 if (VECTOR_BOOLEAN_TYPE_P (type))
12961 return TYPE_PRECISION (TREE_TYPE (type));
12962 return tree_to_uhwi (TYPE_SIZE (TREE_TYPE (type)));
12963 }
12964
12965 /* Calculate the size in bits of each element of vector type TYPE
12966 and return the result as a tree of type bitsizetype. */
12967
12968 tree
12969 vector_element_bits_tree (const_tree type)
12970 {
12971 gcc_checking_assert (VECTOR_TYPE_P (type));
12972 if (VECTOR_BOOLEAN_TYPE_P (type))
12973 return bitsize_int (vector_element_bits (type));
12974 return TYPE_SIZE (TREE_TYPE (type));
12975 }
12976
12977 /* Verify that basic properties of T match TV and thus T can be a variant of
12978 TV. TV should be the more specified variant (i.e. the main variant). */
12979
12980 static bool
12981 verify_type_variant (const_tree t, tree tv)
12982 {
12983 /* Type variant can differ by:
12984
12985 - TYPE_QUALS: TYPE_READONLY, TYPE_VOLATILE, TYPE_ATOMIC, TYPE_RESTRICT,
12986 ENCODE_QUAL_ADDR_SPACE.
12987 - main variant may be TYPE_COMPLETE_P and variant types !TYPE_COMPLETE_P
12988 in this case some values may not be set in the variant types
12989 (see TYPE_COMPLETE_P checks).
12990 - it is possible to have TYPE_ARTIFICIAL variant of non-artifical type
12991 - by TYPE_NAME and attributes (i.e. when variant originate by typedef)
12992 - TYPE_CANONICAL (TYPE_ALIAS_SET is the same among variants)
12993 - by the alignment: TYPE_ALIGN and TYPE_USER_ALIGN
12994 - during LTO by TYPE_CONTEXT if type is TYPE_FILE_SCOPE_P
12995 this is necessary to make it possible to merge types form different TUs
12996 - arrays, pointers and references may have TREE_TYPE that is a variant
12997 of TREE_TYPE of their main variants.
12998 - aggregates may have new TYPE_FIELDS list that list variants of
12999 the main variant TYPE_FIELDS.
13000 - vector types may differ by TYPE_VECTOR_OPAQUE
13001 */
13002
13003 /* Convenience macro for matching individual fields. */
13004 #define verify_variant_match(flag) \
13005 do { \
13006 if (flag (tv) != flag (t)) \
13007 { \
13008 error ("type variant differs by %s", #flag); \
13009 debug_tree (tv); \
13010 return false; \
13011 } \
13012 } while (false)
13013
13014 /* tree_base checks. */
13015
13016 verify_variant_match (TREE_CODE);
13017 /* FIXME: Ada builds non-artificial variants of artificial types. */
13018 #if 0
13019 if (TYPE_ARTIFICIAL (tv))
13020 verify_variant_match (TYPE_ARTIFICIAL);
13021 #endif
13022 if (POINTER_TYPE_P (tv))
13023 verify_variant_match (TYPE_REF_CAN_ALIAS_ALL);
13024 /* FIXME: TYPE_SIZES_GIMPLIFIED may differs for Ada build. */
13025 verify_variant_match (TYPE_UNSIGNED);
13026 verify_variant_match (TYPE_PACKED);
13027 if (TREE_CODE (t) == REFERENCE_TYPE)
13028 verify_variant_match (TYPE_REF_IS_RVALUE);
13029 if (AGGREGATE_TYPE_P (t))
13030 verify_variant_match (TYPE_REVERSE_STORAGE_ORDER);
13031 else
13032 verify_variant_match (TYPE_SATURATING);
13033 /* FIXME: This check trigger during libstdc++ build. */
13034 #if 0
13035 if (RECORD_OR_UNION_TYPE_P (t) && COMPLETE_TYPE_P (t))
13036 verify_variant_match (TYPE_FINAL_P);
13037 #endif
13038
13039 /* tree_type_common checks. */
13040
13041 if (COMPLETE_TYPE_P (t))
13042 {
13043 verify_variant_match (TYPE_MODE);
13044 if (TREE_CODE (TYPE_SIZE (t)) != PLACEHOLDER_EXPR
13045 && TREE_CODE (TYPE_SIZE (tv)) != PLACEHOLDER_EXPR)
13046 verify_variant_match (TYPE_SIZE);
13047 if (TREE_CODE (TYPE_SIZE_UNIT (t)) != PLACEHOLDER_EXPR
13048 && TREE_CODE (TYPE_SIZE_UNIT (tv)) != PLACEHOLDER_EXPR
13049 && TYPE_SIZE_UNIT (t) != TYPE_SIZE_UNIT (tv))
13050 {
13051 gcc_assert (!operand_equal_p (TYPE_SIZE_UNIT (t),
13052 TYPE_SIZE_UNIT (tv), 0));
13053 error ("type variant has different %<TYPE_SIZE_UNIT%>");
13054 debug_tree (tv);
13055 error ("type variant%'s %<TYPE_SIZE_UNIT%>");
13056 debug_tree (TYPE_SIZE_UNIT (tv));
13057 error ("type%'s %<TYPE_SIZE_UNIT%>");
13058 debug_tree (TYPE_SIZE_UNIT (t));
13059 return false;
13060 }
13061 verify_variant_match (TYPE_NEEDS_CONSTRUCTING);
13062 }
13063 verify_variant_match (TYPE_PRECISION);
13064 if (RECORD_OR_UNION_TYPE_P (t))
13065 verify_variant_match (TYPE_TRANSPARENT_AGGR);
13066 else if (TREE_CODE (t) == ARRAY_TYPE)
13067 verify_variant_match (TYPE_NONALIASED_COMPONENT);
13068 /* During LTO we merge variant lists from diferent translation units
13069 that may differ BY TYPE_CONTEXT that in turn may point
13070 to TRANSLATION_UNIT_DECL.
13071 Ada also builds variants of types with different TYPE_CONTEXT. */
13072 #if 0
13073 if (!in_lto_p || !TYPE_FILE_SCOPE_P (t))
13074 verify_variant_match (TYPE_CONTEXT);
13075 #endif
13076 if (TREE_CODE (t) == ARRAY_TYPE || TREE_CODE (t) == INTEGER_TYPE)
13077 verify_variant_match (TYPE_STRING_FLAG);
13078 if (TREE_CODE (t) == RECORD_TYPE || TREE_CODE (t) == UNION_TYPE)
13079 verify_variant_match (TYPE_CXX_ODR_P);
13080 if (TYPE_ALIAS_SET_KNOWN_P (t))
13081 {
13082 error ("type variant with %<TYPE_ALIAS_SET_KNOWN_P%>");
13083 debug_tree (tv);
13084 return false;
13085 }
13086
13087 /* tree_type_non_common checks. */
13088
13089 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
13090 and dangle the pointer from time to time. */
13091 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_VFIELD (t) != TYPE_VFIELD (tv)
13092 && (in_lto_p || !TYPE_VFIELD (tv)
13093 || TREE_CODE (TYPE_VFIELD (tv)) != TREE_LIST))
13094 {
13095 error ("type variant has different %<TYPE_VFIELD%>");
13096 debug_tree (tv);
13097 return false;
13098 }
13099 if ((TREE_CODE (t) == ENUMERAL_TYPE && COMPLETE_TYPE_P (t))
13100 || TREE_CODE (t) == INTEGER_TYPE
13101 || TREE_CODE (t) == BOOLEAN_TYPE
13102 || TREE_CODE (t) == REAL_TYPE
13103 || TREE_CODE (t) == FIXED_POINT_TYPE)
13104 {
13105 verify_variant_match (TYPE_MAX_VALUE);
13106 verify_variant_match (TYPE_MIN_VALUE);
13107 }
13108 if (TREE_CODE (t) == METHOD_TYPE)
13109 verify_variant_match (TYPE_METHOD_BASETYPE);
13110 if (TREE_CODE (t) == OFFSET_TYPE)
13111 verify_variant_match (TYPE_OFFSET_BASETYPE);
13112 if (TREE_CODE (t) == ARRAY_TYPE)
13113 verify_variant_match (TYPE_ARRAY_MAX_SIZE);
13114 /* FIXME: Be lax and allow TYPE_BINFO to be missing in variant types
13115 or even type's main variant. This is needed to make bootstrap pass
13116 and the bug seems new in GCC 5.
13117 C++ FE should be updated to make this consistent and we should check
13118 that TYPE_BINFO is always NULL for !COMPLETE_TYPE_P and otherwise there
13119 is a match with main variant.
13120
13121 Also disable the check for Java for now because of parser hack that builds
13122 first an dummy BINFO and then sometimes replace it by real BINFO in some
13123 of the copies. */
13124 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t) && TYPE_BINFO (tv)
13125 && TYPE_BINFO (t) != TYPE_BINFO (tv)
13126 /* FIXME: Java sometimes keep dump TYPE_BINFOs on variant types.
13127 Since there is no cheap way to tell C++/Java type w/o LTO, do checking
13128 at LTO time only. */
13129 && (in_lto_p && odr_type_p (t)))
13130 {
13131 error ("type variant has different %<TYPE_BINFO%>");
13132 debug_tree (tv);
13133 error ("type variant%'s %<TYPE_BINFO%>");
13134 debug_tree (TYPE_BINFO (tv));
13135 error ("type%'s %<TYPE_BINFO%>");
13136 debug_tree (TYPE_BINFO (t));
13137 return false;
13138 }
13139
13140 /* Check various uses of TYPE_VALUES_RAW. */
13141 if (TREE_CODE (t) == ENUMERAL_TYPE
13142 && TYPE_VALUES (t))
13143 verify_variant_match (TYPE_VALUES);
13144 else if (TREE_CODE (t) == ARRAY_TYPE)
13145 verify_variant_match (TYPE_DOMAIN);
13146 /* Permit incomplete variants of complete type. While FEs may complete
13147 all variants, this does not happen for C++ templates in all cases. */
13148 else if (RECORD_OR_UNION_TYPE_P (t)
13149 && COMPLETE_TYPE_P (t)
13150 && TYPE_FIELDS (t) != TYPE_FIELDS (tv))
13151 {
13152 tree f1, f2;
13153
13154 /* Fortran builds qualified variants as new records with items of
13155 qualified type. Verify that they looks same. */
13156 for (f1 = TYPE_FIELDS (t), f2 = TYPE_FIELDS (tv);
13157 f1 && f2;
13158 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
13159 if (TREE_CODE (f1) != FIELD_DECL || TREE_CODE (f2) != FIELD_DECL
13160 || (TYPE_MAIN_VARIANT (TREE_TYPE (f1))
13161 != TYPE_MAIN_VARIANT (TREE_TYPE (f2))
13162 /* FIXME: gfc_nonrestricted_type builds all types as variants
13163 with exception of pointer types. It deeply copies the type
13164 which means that we may end up with a variant type
13165 referring non-variant pointer. We may change it to
13166 produce types as variants, too, like
13167 objc_get_protocol_qualified_type does. */
13168 && !POINTER_TYPE_P (TREE_TYPE (f1)))
13169 || DECL_FIELD_OFFSET (f1) != DECL_FIELD_OFFSET (f2)
13170 || DECL_FIELD_BIT_OFFSET (f1) != DECL_FIELD_BIT_OFFSET (f2))
13171 break;
13172 if (f1 || f2)
13173 {
13174 error ("type variant has different %<TYPE_FIELDS%>");
13175 debug_tree (tv);
13176 error ("first mismatch is field");
13177 debug_tree (f1);
13178 error ("and field");
13179 debug_tree (f2);
13180 return false;
13181 }
13182 }
13183 else if ((TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE))
13184 verify_variant_match (TYPE_ARG_TYPES);
13185 /* For C++ the qualified variant of array type is really an array type
13186 of qualified TREE_TYPE.
13187 objc builds variants of pointer where pointer to type is a variant, too
13188 in objc_get_protocol_qualified_type. */
13189 if (TREE_TYPE (t) != TREE_TYPE (tv)
13190 && ((TREE_CODE (t) != ARRAY_TYPE
13191 && !POINTER_TYPE_P (t))
13192 || TYPE_MAIN_VARIANT (TREE_TYPE (t))
13193 != TYPE_MAIN_VARIANT (TREE_TYPE (tv))))
13194 {
13195 error ("type variant has different %<TREE_TYPE%>");
13196 debug_tree (tv);
13197 error ("type variant%'s %<TREE_TYPE%>");
13198 debug_tree (TREE_TYPE (tv));
13199 error ("type%'s %<TREE_TYPE%>");
13200 debug_tree (TREE_TYPE (t));
13201 return false;
13202 }
13203 if (type_with_alias_set_p (t)
13204 && !gimple_canonical_types_compatible_p (t, tv, false))
13205 {
13206 error ("type is not compatible with its variant");
13207 debug_tree (tv);
13208 error ("type variant%'s %<TREE_TYPE%>");
13209 debug_tree (TREE_TYPE (tv));
13210 error ("type%'s %<TREE_TYPE%>");
13211 debug_tree (TREE_TYPE (t));
13212 return false;
13213 }
13214 return true;
13215 #undef verify_variant_match
13216 }
13217
13218
13219 /* The TYPE_CANONICAL merging machinery. It should closely resemble
13220 the middle-end types_compatible_p function. It needs to avoid
13221 claiming types are different for types that should be treated
13222 the same with respect to TBAA. Canonical types are also used
13223 for IL consistency checks via the useless_type_conversion_p
13224 predicate which does not handle all type kinds itself but falls
13225 back to pointer-comparison of TYPE_CANONICAL for aggregates
13226 for example. */
13227
13228 /* Return true if TYPE_UNSIGNED of TYPE should be ignored for canonical
13229 type calculation because we need to allow inter-operability between signed
13230 and unsigned variants. */
13231
13232 bool
13233 type_with_interoperable_signedness (const_tree type)
13234 {
13235 /* Fortran standard require C_SIGNED_CHAR to be interoperable with both
13236 signed char and unsigned char. Similarly fortran FE builds
13237 C_SIZE_T as signed type, while C defines it unsigned. */
13238
13239 return tree_code_for_canonical_type_merging (TREE_CODE (type))
13240 == INTEGER_TYPE
13241 && (TYPE_PRECISION (type) == TYPE_PRECISION (signed_char_type_node)
13242 || TYPE_PRECISION (type) == TYPE_PRECISION (size_type_node));
13243 }
13244
13245 /* Return true iff T1 and T2 are structurally identical for what
13246 TBAA is concerned.
13247 This function is used both by lto.c canonical type merging and by the
13248 verifier. If TRUST_TYPE_CANONICAL we do not look into structure of types
13249 that have TYPE_CANONICAL defined and assume them equivalent. This is useful
13250 only for LTO because only in these cases TYPE_CANONICAL equivalence
13251 correspond to one defined by gimple_canonical_types_compatible_p. */
13252
13253 bool
13254 gimple_canonical_types_compatible_p (const_tree t1, const_tree t2,
13255 bool trust_type_canonical)
13256 {
13257 /* Type variants should be same as the main variant. When not doing sanity
13258 checking to verify this fact, go to main variants and save some work. */
13259 if (trust_type_canonical)
13260 {
13261 t1 = TYPE_MAIN_VARIANT (t1);
13262 t2 = TYPE_MAIN_VARIANT (t2);
13263 }
13264
13265 /* Check first for the obvious case of pointer identity. */
13266 if (t1 == t2)
13267 return true;
13268
13269 /* Check that we have two types to compare. */
13270 if (t1 == NULL_TREE || t2 == NULL_TREE)
13271 return false;
13272
13273 /* We consider complete types always compatible with incomplete type.
13274 This does not make sense for canonical type calculation and thus we
13275 need to ensure that we are never called on it.
13276
13277 FIXME: For more correctness the function probably should have three modes
13278 1) mode assuming that types are complete mathcing their structure
13279 2) mode allowing incomplete types but producing equivalence classes
13280 and thus ignoring all info from complete types
13281 3) mode allowing incomplete types to match complete but checking
13282 compatibility between complete types.
13283
13284 1 and 2 can be used for canonical type calculation. 3 is the real
13285 definition of type compatibility that can be used i.e. for warnings during
13286 declaration merging. */
13287
13288 gcc_assert (!trust_type_canonical
13289 || (type_with_alias_set_p (t1) && type_with_alias_set_p (t2)));
13290
13291 /* If the types have been previously registered and found equal
13292 they still are. */
13293
13294 if (TYPE_CANONICAL (t1) && TYPE_CANONICAL (t2)
13295 && trust_type_canonical)
13296 {
13297 /* Do not use TYPE_CANONICAL of pointer types. For LTO streamed types
13298 they are always NULL, but they are set to non-NULL for types
13299 constructed by build_pointer_type and variants. In this case the
13300 TYPE_CANONICAL is more fine grained than the equivalnce we test (where
13301 all pointers are considered equal. Be sure to not return false
13302 negatives. */
13303 gcc_checking_assert (canonical_type_used_p (t1)
13304 && canonical_type_used_p (t2));
13305 return TYPE_CANONICAL (t1) == TYPE_CANONICAL (t2);
13306 }
13307
13308 /* For types where we do ODR based TBAA the canonical type is always
13309 set correctly, so we know that types are different if their
13310 canonical types does not match. */
13311 if (trust_type_canonical
13312 && (odr_type_p (t1) && odr_based_tbaa_p (t1))
13313 != (odr_type_p (t2) && odr_based_tbaa_p (t2)))
13314 return false;
13315
13316 /* Can't be the same type if the types don't have the same code. */
13317 enum tree_code code = tree_code_for_canonical_type_merging (TREE_CODE (t1));
13318 if (code != tree_code_for_canonical_type_merging (TREE_CODE (t2)))
13319 return false;
13320
13321 /* Qualifiers do not matter for canonical type comparison purposes. */
13322
13323 /* Void types and nullptr types are always the same. */
13324 if (TREE_CODE (t1) == VOID_TYPE
13325 || TREE_CODE (t1) == NULLPTR_TYPE)
13326 return true;
13327
13328 /* Can't be the same type if they have different mode. */
13329 if (TYPE_MODE (t1) != TYPE_MODE (t2))
13330 return false;
13331
13332 /* Non-aggregate types can be handled cheaply. */
13333 if (INTEGRAL_TYPE_P (t1)
13334 || SCALAR_FLOAT_TYPE_P (t1)
13335 || FIXED_POINT_TYPE_P (t1)
13336 || TREE_CODE (t1) == VECTOR_TYPE
13337 || TREE_CODE (t1) == COMPLEX_TYPE
13338 || TREE_CODE (t1) == OFFSET_TYPE
13339 || POINTER_TYPE_P (t1))
13340 {
13341 /* Can't be the same type if they have different recision. */
13342 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2))
13343 return false;
13344
13345 /* In some cases the signed and unsigned types are required to be
13346 inter-operable. */
13347 if (TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2)
13348 && !type_with_interoperable_signedness (t1))
13349 return false;
13350
13351 /* Fortran's C_SIGNED_CHAR is !TYPE_STRING_FLAG but needs to be
13352 interoperable with "signed char". Unless all frontends are revisited
13353 to agree on these types, we must ignore the flag completely. */
13354
13355 /* Fortran standard define C_PTR type that is compatible with every
13356 C pointer. For this reason we need to glob all pointers into one.
13357 Still pointers in different address spaces are not compatible. */
13358 if (POINTER_TYPE_P (t1))
13359 {
13360 if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
13361 != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
13362 return false;
13363 }
13364
13365 /* Tail-recurse to components. */
13366 if (TREE_CODE (t1) == VECTOR_TYPE
13367 || TREE_CODE (t1) == COMPLEX_TYPE)
13368 return gimple_canonical_types_compatible_p (TREE_TYPE (t1),
13369 TREE_TYPE (t2),
13370 trust_type_canonical);
13371
13372 return true;
13373 }
13374
13375 /* Do type-specific comparisons. */
13376 switch (TREE_CODE (t1))
13377 {
13378 case ARRAY_TYPE:
13379 /* Array types are the same if the element types are the same and
13380 the number of elements are the same. */
13381 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
13382 trust_type_canonical)
13383 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
13384 || TYPE_REVERSE_STORAGE_ORDER (t1) != TYPE_REVERSE_STORAGE_ORDER (t2)
13385 || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
13386 return false;
13387 else
13388 {
13389 tree i1 = TYPE_DOMAIN (t1);
13390 tree i2 = TYPE_DOMAIN (t2);
13391
13392 /* For an incomplete external array, the type domain can be
13393 NULL_TREE. Check this condition also. */
13394 if (i1 == NULL_TREE && i2 == NULL_TREE)
13395 return true;
13396 else if (i1 == NULL_TREE || i2 == NULL_TREE)
13397 return false;
13398 else
13399 {
13400 tree min1 = TYPE_MIN_VALUE (i1);
13401 tree min2 = TYPE_MIN_VALUE (i2);
13402 tree max1 = TYPE_MAX_VALUE (i1);
13403 tree max2 = TYPE_MAX_VALUE (i2);
13404
13405 /* The minimum/maximum values have to be the same. */
13406 if ((min1 == min2
13407 || (min1 && min2
13408 && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
13409 && TREE_CODE (min2) == PLACEHOLDER_EXPR)
13410 || operand_equal_p (min1, min2, 0))))
13411 && (max1 == max2
13412 || (max1 && max2
13413 && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
13414 && TREE_CODE (max2) == PLACEHOLDER_EXPR)
13415 || operand_equal_p (max1, max2, 0)))))
13416 return true;
13417 else
13418 return false;
13419 }
13420 }
13421
13422 case METHOD_TYPE:
13423 case FUNCTION_TYPE:
13424 /* Function types are the same if the return type and arguments types
13425 are the same. */
13426 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
13427 trust_type_canonical))
13428 return false;
13429
13430 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
13431 return true;
13432 else
13433 {
13434 tree parms1, parms2;
13435
13436 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
13437 parms1 && parms2;
13438 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
13439 {
13440 if (!gimple_canonical_types_compatible_p
13441 (TREE_VALUE (parms1), TREE_VALUE (parms2),
13442 trust_type_canonical))
13443 return false;
13444 }
13445
13446 if (parms1 || parms2)
13447 return false;
13448
13449 return true;
13450 }
13451
13452 case RECORD_TYPE:
13453 case UNION_TYPE:
13454 case QUAL_UNION_TYPE:
13455 {
13456 tree f1, f2;
13457
13458 /* Don't try to compare variants of an incomplete type, before
13459 TYPE_FIELDS has been copied around. */
13460 if (!COMPLETE_TYPE_P (t1) && !COMPLETE_TYPE_P (t2))
13461 return true;
13462
13463
13464 if (TYPE_REVERSE_STORAGE_ORDER (t1) != TYPE_REVERSE_STORAGE_ORDER (t2))
13465 return false;
13466
13467 /* For aggregate types, all the fields must be the same. */
13468 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
13469 f1 || f2;
13470 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
13471 {
13472 /* Skip non-fields and zero-sized fields. */
13473 while (f1 && (TREE_CODE (f1) != FIELD_DECL
13474 || (DECL_SIZE (f1)
13475 && integer_zerop (DECL_SIZE (f1)))))
13476 f1 = TREE_CHAIN (f1);
13477 while (f2 && (TREE_CODE (f2) != FIELD_DECL
13478 || (DECL_SIZE (f2)
13479 && integer_zerop (DECL_SIZE (f2)))))
13480 f2 = TREE_CHAIN (f2);
13481 if (!f1 || !f2)
13482 break;
13483 /* The fields must have the same name, offset and type. */
13484 if (DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
13485 || !gimple_compare_field_offset (f1, f2)
13486 || !gimple_canonical_types_compatible_p
13487 (TREE_TYPE (f1), TREE_TYPE (f2),
13488 trust_type_canonical))
13489 return false;
13490 }
13491
13492 /* If one aggregate has more fields than the other, they
13493 are not the same. */
13494 if (f1 || f2)
13495 return false;
13496
13497 return true;
13498 }
13499
13500 default:
13501 /* Consider all types with language specific trees in them mutually
13502 compatible. This is executed only from verify_type and false
13503 positives can be tolerated. */
13504 gcc_assert (!in_lto_p);
13505 return true;
13506 }
13507 }
13508
13509 /* Verify type T. */
13510
13511 void
13512 verify_type (const_tree t)
13513 {
13514 bool error_found = false;
13515 tree mv = TYPE_MAIN_VARIANT (t);
13516 if (!mv)
13517 {
13518 error ("main variant is not defined");
13519 error_found = true;
13520 }
13521 else if (mv != TYPE_MAIN_VARIANT (mv))
13522 {
13523 error ("%<TYPE_MAIN_VARIANT%> has different %<TYPE_MAIN_VARIANT%>");
13524 debug_tree (mv);
13525 error_found = true;
13526 }
13527 else if (t != mv && !verify_type_variant (t, mv))
13528 error_found = true;
13529
13530 tree ct = TYPE_CANONICAL (t);
13531 if (!ct)
13532 ;
13533 else if (TYPE_CANONICAL (ct) != ct)
13534 {
13535 error ("%<TYPE_CANONICAL%> has different %<TYPE_CANONICAL%>");
13536 debug_tree (ct);
13537 error_found = true;
13538 }
13539 /* Method and function types cannot be used to address memory and thus
13540 TYPE_CANONICAL really matters only for determining useless conversions.
13541
13542 FIXME: C++ FE produce declarations of builtin functions that are not
13543 compatible with main variants. */
13544 else if (TREE_CODE (t) == FUNCTION_TYPE)
13545 ;
13546 else if (t != ct
13547 /* FIXME: gimple_canonical_types_compatible_p cannot compare types
13548 with variably sized arrays because their sizes possibly
13549 gimplified to different variables. */
13550 && !variably_modified_type_p (ct, NULL)
13551 && !gimple_canonical_types_compatible_p (t, ct, false)
13552 && COMPLETE_TYPE_P (t))
13553 {
13554 error ("%<TYPE_CANONICAL%> is not compatible");
13555 debug_tree (ct);
13556 error_found = true;
13557 }
13558
13559 if (COMPLETE_TYPE_P (t) && TYPE_CANONICAL (t)
13560 && TYPE_MODE (t) != TYPE_MODE (TYPE_CANONICAL (t)))
13561 {
13562 error ("%<TYPE_MODE%> of %<TYPE_CANONICAL%> is not compatible");
13563 debug_tree (ct);
13564 error_found = true;
13565 }
13566 if (TYPE_MAIN_VARIANT (t) == t && ct && TYPE_MAIN_VARIANT (ct) != ct)
13567 {
13568 error ("%<TYPE_CANONICAL%> of main variant is not main variant");
13569 debug_tree (ct);
13570 debug_tree (TYPE_MAIN_VARIANT (ct));
13571 error_found = true;
13572 }
13573
13574
13575 /* Check various uses of TYPE_MIN_VALUE_RAW. */
13576 if (RECORD_OR_UNION_TYPE_P (t))
13577 {
13578 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
13579 and danagle the pointer from time to time. */
13580 if (TYPE_VFIELD (t)
13581 && TREE_CODE (TYPE_VFIELD (t)) != FIELD_DECL
13582 && TREE_CODE (TYPE_VFIELD (t)) != TREE_LIST)
13583 {
13584 error ("%<TYPE_VFIELD%> is not %<FIELD_DECL%> nor %<TREE_LIST%>");
13585 debug_tree (TYPE_VFIELD (t));
13586 error_found = true;
13587 }
13588 }
13589 else if (TREE_CODE (t) == POINTER_TYPE)
13590 {
13591 if (TYPE_NEXT_PTR_TO (t)
13592 && TREE_CODE (TYPE_NEXT_PTR_TO (t)) != POINTER_TYPE)
13593 {
13594 error ("%<TYPE_NEXT_PTR_TO%> is not %<POINTER_TYPE%>");
13595 debug_tree (TYPE_NEXT_PTR_TO (t));
13596 error_found = true;
13597 }
13598 }
13599 else if (TREE_CODE (t) == REFERENCE_TYPE)
13600 {
13601 if (TYPE_NEXT_REF_TO (t)
13602 && TREE_CODE (TYPE_NEXT_REF_TO (t)) != REFERENCE_TYPE)
13603 {
13604 error ("%<TYPE_NEXT_REF_TO%> is not %<REFERENCE_TYPE%>");
13605 debug_tree (TYPE_NEXT_REF_TO (t));
13606 error_found = true;
13607 }
13608 }
13609 else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
13610 || TREE_CODE (t) == FIXED_POINT_TYPE)
13611 {
13612 /* FIXME: The following check should pass:
13613 useless_type_conversion_p (const_cast <tree> (t),
13614 TREE_TYPE (TYPE_MIN_VALUE (t))
13615 but does not for C sizetypes in LTO. */
13616 }
13617
13618 /* Check various uses of TYPE_MAXVAL_RAW. */
13619 if (RECORD_OR_UNION_TYPE_P (t))
13620 {
13621 if (!TYPE_BINFO (t))
13622 ;
13623 else if (TREE_CODE (TYPE_BINFO (t)) != TREE_BINFO)
13624 {
13625 error ("%<TYPE_BINFO%> is not %<TREE_BINFO%>");
13626 debug_tree (TYPE_BINFO (t));
13627 error_found = true;
13628 }
13629 else if (TREE_TYPE (TYPE_BINFO (t)) != TYPE_MAIN_VARIANT (t))
13630 {
13631 error ("%<TYPE_BINFO%> type is not %<TYPE_MAIN_VARIANT%>");
13632 debug_tree (TREE_TYPE (TYPE_BINFO (t)));
13633 error_found = true;
13634 }
13635 }
13636 else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
13637 {
13638 if (TYPE_METHOD_BASETYPE (t)
13639 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != RECORD_TYPE
13640 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != UNION_TYPE)
13641 {
13642 error ("%<TYPE_METHOD_BASETYPE%> is not record nor union");
13643 debug_tree (TYPE_METHOD_BASETYPE (t));
13644 error_found = true;
13645 }
13646 }
13647 else if (TREE_CODE (t) == OFFSET_TYPE)
13648 {
13649 if (TYPE_OFFSET_BASETYPE (t)
13650 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != RECORD_TYPE
13651 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != UNION_TYPE)
13652 {
13653 error ("%<TYPE_OFFSET_BASETYPE%> is not record nor union");
13654 debug_tree (TYPE_OFFSET_BASETYPE (t));
13655 error_found = true;
13656 }
13657 }
13658 else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
13659 || TREE_CODE (t) == FIXED_POINT_TYPE)
13660 {
13661 /* FIXME: The following check should pass:
13662 useless_type_conversion_p (const_cast <tree> (t),
13663 TREE_TYPE (TYPE_MAX_VALUE (t))
13664 but does not for C sizetypes in LTO. */
13665 }
13666 else if (TREE_CODE (t) == ARRAY_TYPE)
13667 {
13668 if (TYPE_ARRAY_MAX_SIZE (t)
13669 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (t)) != INTEGER_CST)
13670 {
13671 error ("%<TYPE_ARRAY_MAX_SIZE%> not %<INTEGER_CST%>");
13672 debug_tree (TYPE_ARRAY_MAX_SIZE (t));
13673 error_found = true;
13674 }
13675 }
13676 else if (TYPE_MAX_VALUE_RAW (t))
13677 {
13678 error ("%<TYPE_MAX_VALUE_RAW%> non-NULL");
13679 debug_tree (TYPE_MAX_VALUE_RAW (t));
13680 error_found = true;
13681 }
13682
13683 if (TYPE_LANG_SLOT_1 (t) && in_lto_p)
13684 {
13685 error ("%<TYPE_LANG_SLOT_1 (binfo)%> field is non-NULL");
13686 debug_tree (TYPE_LANG_SLOT_1 (t));
13687 error_found = true;
13688 }
13689
13690 /* Check various uses of TYPE_VALUES_RAW. */
13691 if (TREE_CODE (t) == ENUMERAL_TYPE)
13692 for (tree l = TYPE_VALUES (t); l; l = TREE_CHAIN (l))
13693 {
13694 tree value = TREE_VALUE (l);
13695 tree name = TREE_PURPOSE (l);
13696
13697 /* C FE porduce INTEGER_CST of INTEGER_TYPE, while C++ FE uses
13698 CONST_DECL of ENUMERAL TYPE. */
13699 if (TREE_CODE (value) != INTEGER_CST && TREE_CODE (value) != CONST_DECL)
13700 {
13701 error ("enum value is not %<CONST_DECL%> or %<INTEGER_CST%>");
13702 debug_tree (value);
13703 debug_tree (name);
13704 error_found = true;
13705 }
13706 if (TREE_CODE (TREE_TYPE (value)) != INTEGER_TYPE
13707 && TREE_CODE (TREE_TYPE (value)) != BOOLEAN_TYPE
13708 && !useless_type_conversion_p (const_cast <tree> (t), TREE_TYPE (value)))
13709 {
13710 error ("enum value type is not %<INTEGER_TYPE%> nor convertible "
13711 "to the enum");
13712 debug_tree (value);
13713 debug_tree (name);
13714 error_found = true;
13715 }
13716 if (TREE_CODE (name) != IDENTIFIER_NODE)
13717 {
13718 error ("enum value name is not %<IDENTIFIER_NODE%>");
13719 debug_tree (value);
13720 debug_tree (name);
13721 error_found = true;
13722 }
13723 }
13724 else if (TREE_CODE (t) == ARRAY_TYPE)
13725 {
13726 if (TYPE_DOMAIN (t) && TREE_CODE (TYPE_DOMAIN (t)) != INTEGER_TYPE)
13727 {
13728 error ("array %<TYPE_DOMAIN%> is not integer type");
13729 debug_tree (TYPE_DOMAIN (t));
13730 error_found = true;
13731 }
13732 }
13733 else if (RECORD_OR_UNION_TYPE_P (t))
13734 {
13735 if (TYPE_FIELDS (t) && !COMPLETE_TYPE_P (t) && in_lto_p)
13736 {
13737 error ("%<TYPE_FIELDS%> defined in incomplete type");
13738 error_found = true;
13739 }
13740 for (tree fld = TYPE_FIELDS (t); fld; fld = TREE_CHAIN (fld))
13741 {
13742 /* TODO: verify properties of decls. */
13743 if (TREE_CODE (fld) == FIELD_DECL)
13744 ;
13745 else if (TREE_CODE (fld) == TYPE_DECL)
13746 ;
13747 else if (TREE_CODE (fld) == CONST_DECL)
13748 ;
13749 else if (VAR_P (fld))
13750 ;
13751 else if (TREE_CODE (fld) == TEMPLATE_DECL)
13752 ;
13753 else if (TREE_CODE (fld) == USING_DECL)
13754 ;
13755 else if (TREE_CODE (fld) == FUNCTION_DECL)
13756 ;
13757 else
13758 {
13759 error ("wrong tree in %<TYPE_FIELDS%> list");
13760 debug_tree (fld);
13761 error_found = true;
13762 }
13763 }
13764 }
13765 else if (TREE_CODE (t) == INTEGER_TYPE
13766 || TREE_CODE (t) == BOOLEAN_TYPE
13767 || TREE_CODE (t) == OFFSET_TYPE
13768 || TREE_CODE (t) == REFERENCE_TYPE
13769 || TREE_CODE (t) == NULLPTR_TYPE
13770 || TREE_CODE (t) == POINTER_TYPE)
13771 {
13772 if (TYPE_CACHED_VALUES_P (t) != (TYPE_CACHED_VALUES (t) != NULL))
13773 {
13774 error ("%<TYPE_CACHED_VALUES_P%> is %i while %<TYPE_CACHED_VALUES%> "
13775 "is %p",
13776 TYPE_CACHED_VALUES_P (t), (void *)TYPE_CACHED_VALUES (t));
13777 error_found = true;
13778 }
13779 else if (TYPE_CACHED_VALUES_P (t) && TREE_CODE (TYPE_CACHED_VALUES (t)) != TREE_VEC)
13780 {
13781 error ("%<TYPE_CACHED_VALUES%> is not %<TREE_VEC%>");
13782 debug_tree (TYPE_CACHED_VALUES (t));
13783 error_found = true;
13784 }
13785 /* Verify just enough of cache to ensure that no one copied it to new type.
13786 All copying should go by copy_node that should clear it. */
13787 else if (TYPE_CACHED_VALUES_P (t))
13788 {
13789 int i;
13790 for (i = 0; i < TREE_VEC_LENGTH (TYPE_CACHED_VALUES (t)); i++)
13791 if (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)
13792 && TREE_TYPE (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)) != t)
13793 {
13794 error ("wrong %<TYPE_CACHED_VALUES%> entry");
13795 debug_tree (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i));
13796 error_found = true;
13797 break;
13798 }
13799 }
13800 }
13801 else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
13802 for (tree l = TYPE_ARG_TYPES (t); l; l = TREE_CHAIN (l))
13803 {
13804 /* C++ FE uses TREE_PURPOSE to store initial values. */
13805 if (TREE_PURPOSE (l) && in_lto_p)
13806 {
13807 error ("%<TREE_PURPOSE%> is non-NULL in %<TYPE_ARG_TYPES%> list");
13808 debug_tree (l);
13809 error_found = true;
13810 }
13811 if (!TYPE_P (TREE_VALUE (l)))
13812 {
13813 error ("wrong entry in %<TYPE_ARG_TYPES%> list");
13814 debug_tree (l);
13815 error_found = true;
13816 }
13817 }
13818 else if (!is_lang_specific (t) && TYPE_VALUES_RAW (t))
13819 {
13820 error ("%<TYPE_VALUES_RAW%> field is non-NULL");
13821 debug_tree (TYPE_VALUES_RAW (t));
13822 error_found = true;
13823 }
13824 if (TREE_CODE (t) != INTEGER_TYPE
13825 && TREE_CODE (t) != BOOLEAN_TYPE
13826 && TREE_CODE (t) != OFFSET_TYPE
13827 && TREE_CODE (t) != REFERENCE_TYPE
13828 && TREE_CODE (t) != NULLPTR_TYPE
13829 && TREE_CODE (t) != POINTER_TYPE
13830 && TYPE_CACHED_VALUES_P (t))
13831 {
13832 error ("%<TYPE_CACHED_VALUES_P%> is set while it should not be");
13833 error_found = true;
13834 }
13835
13836 /* ipa-devirt makes an assumption that TYPE_METHOD_BASETYPE is always
13837 TYPE_MAIN_VARIANT and it would be odd to add methods only to variatns
13838 of a type. */
13839 if (TREE_CODE (t) == METHOD_TYPE
13840 && TYPE_MAIN_VARIANT (TYPE_METHOD_BASETYPE (t)) != TYPE_METHOD_BASETYPE (t))
13841 {
13842 error ("%<TYPE_METHOD_BASETYPE%> is not main variant");
13843 error_found = true;
13844 }
13845
13846 if (error_found)
13847 {
13848 debug_tree (const_cast <tree> (t));
13849 internal_error ("%qs failed", __func__);
13850 }
13851 }
13852
13853
13854 /* Return 1 if ARG interpreted as signed in its precision is known to be
13855 always positive or 2 if ARG is known to be always negative, or 3 if
13856 ARG may be positive or negative. */
13857
13858 int
13859 get_range_pos_neg (tree arg)
13860 {
13861 if (arg == error_mark_node)
13862 return 3;
13863
13864 int prec = TYPE_PRECISION (TREE_TYPE (arg));
13865 int cnt = 0;
13866 if (TREE_CODE (arg) == INTEGER_CST)
13867 {
13868 wide_int w = wi::sext (wi::to_wide (arg), prec);
13869 if (wi::neg_p (w))
13870 return 2;
13871 else
13872 return 1;
13873 }
13874 while (CONVERT_EXPR_P (arg)
13875 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
13876 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg, 0))) <= prec)
13877 {
13878 arg = TREE_OPERAND (arg, 0);
13879 /* Narrower value zero extended into wider type
13880 will always result in positive values. */
13881 if (TYPE_UNSIGNED (TREE_TYPE (arg))
13882 && TYPE_PRECISION (TREE_TYPE (arg)) < prec)
13883 return 1;
13884 prec = TYPE_PRECISION (TREE_TYPE (arg));
13885 if (++cnt > 30)
13886 return 3;
13887 }
13888
13889 if (TREE_CODE (arg) != SSA_NAME)
13890 return 3;
13891 value_range r;
13892 while (!get_global_range_query ()->range_of_expr (r, arg) || r.kind () != VR_RANGE)
13893 {
13894 gimple *g = SSA_NAME_DEF_STMT (arg);
13895 if (is_gimple_assign (g)
13896 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (g)))
13897 {
13898 tree t = gimple_assign_rhs1 (g);
13899 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
13900 && TYPE_PRECISION (TREE_TYPE (t)) <= prec)
13901 {
13902 if (TYPE_UNSIGNED (TREE_TYPE (t))
13903 && TYPE_PRECISION (TREE_TYPE (t)) < prec)
13904 return 1;
13905 prec = TYPE_PRECISION (TREE_TYPE (t));
13906 arg = t;
13907 if (++cnt > 30)
13908 return 3;
13909 continue;
13910 }
13911 }
13912 return 3;
13913 }
13914 if (TYPE_UNSIGNED (TREE_TYPE (arg)))
13915 {
13916 /* For unsigned values, the "positive" range comes
13917 below the "negative" range. */
13918 if (!wi::neg_p (wi::sext (r.upper_bound (), prec), SIGNED))
13919 return 1;
13920 if (wi::neg_p (wi::sext (r.lower_bound (), prec), SIGNED))
13921 return 2;
13922 }
13923 else
13924 {
13925 if (!wi::neg_p (wi::sext (r.lower_bound (), prec), SIGNED))
13926 return 1;
13927 if (wi::neg_p (wi::sext (r.upper_bound (), prec), SIGNED))
13928 return 2;
13929 }
13930 return 3;
13931 }
13932
13933
13934
13935
13936 /* Return true if ARG is marked with the nonnull attribute in the
13937 current function signature. */
13938
13939 bool
13940 nonnull_arg_p (const_tree arg)
13941 {
13942 tree t, attrs, fntype;
13943 unsigned HOST_WIDE_INT arg_num;
13944
13945 gcc_assert (TREE_CODE (arg) == PARM_DECL
13946 && (POINTER_TYPE_P (TREE_TYPE (arg))
13947 || TREE_CODE (TREE_TYPE (arg)) == OFFSET_TYPE));
13948
13949 /* The static chain decl is always non null. */
13950 if (arg == cfun->static_chain_decl)
13951 return true;
13952
13953 /* THIS argument of method is always non-NULL. */
13954 if (TREE_CODE (TREE_TYPE (cfun->decl)) == METHOD_TYPE
13955 && arg == DECL_ARGUMENTS (cfun->decl)
13956 && flag_delete_null_pointer_checks)
13957 return true;
13958
13959 /* Values passed by reference are always non-NULL. */
13960 if (TREE_CODE (TREE_TYPE (arg)) == REFERENCE_TYPE
13961 && flag_delete_null_pointer_checks)
13962 return true;
13963
13964 fntype = TREE_TYPE (cfun->decl);
13965 for (attrs = TYPE_ATTRIBUTES (fntype); attrs; attrs = TREE_CHAIN (attrs))
13966 {
13967 attrs = lookup_attribute ("nonnull", attrs);
13968
13969 /* If "nonnull" wasn't specified, we know nothing about the argument. */
13970 if (attrs == NULL_TREE)
13971 return false;
13972
13973 /* If "nonnull" applies to all the arguments, then ARG is non-null. */
13974 if (TREE_VALUE (attrs) == NULL_TREE)
13975 return true;
13976
13977 /* Get the position number for ARG in the function signature. */
13978 for (arg_num = 1, t = DECL_ARGUMENTS (cfun->decl);
13979 t;
13980 t = DECL_CHAIN (t), arg_num++)
13981 {
13982 if (t == arg)
13983 break;
13984 }
13985
13986 gcc_assert (t == arg);
13987
13988 /* Now see if ARG_NUM is mentioned in the nonnull list. */
13989 for (t = TREE_VALUE (attrs); t; t = TREE_CHAIN (t))
13990 {
13991 if (compare_tree_int (TREE_VALUE (t), arg_num) == 0)
13992 return true;
13993 }
13994 }
13995
13996 return false;
13997 }
13998
13999 /* Combine LOC and BLOCK to a combined adhoc loc, retaining any range
14000 information. */
14001
14002 location_t
14003 set_block (location_t loc, tree block)
14004 {
14005 location_t pure_loc = get_pure_location (loc);
14006 source_range src_range = get_range_from_loc (line_table, loc);
14007 return COMBINE_LOCATION_DATA (line_table, pure_loc, src_range, block);
14008 }
14009
14010 location_t
14011 set_source_range (tree expr, location_t start, location_t finish)
14012 {
14013 source_range src_range;
14014 src_range.m_start = start;
14015 src_range.m_finish = finish;
14016 return set_source_range (expr, src_range);
14017 }
14018
14019 location_t
14020 set_source_range (tree expr, source_range src_range)
14021 {
14022 if (!EXPR_P (expr))
14023 return UNKNOWN_LOCATION;
14024
14025 location_t pure_loc = get_pure_location (EXPR_LOCATION (expr));
14026 location_t adhoc = COMBINE_LOCATION_DATA (line_table,
14027 pure_loc,
14028 src_range,
14029 NULL);
14030 SET_EXPR_LOCATION (expr, adhoc);
14031 return adhoc;
14032 }
14033
14034 /* Return EXPR, potentially wrapped with a node expression LOC,
14035 if !CAN_HAVE_LOCATION_P (expr).
14036
14037 NON_LVALUE_EXPR is used for wrapping constants, apart from STRING_CST.
14038 VIEW_CONVERT_EXPR is used for wrapping non-constants and STRING_CST.
14039
14040 Wrapper nodes can be identified using location_wrapper_p. */
14041
14042 tree
14043 maybe_wrap_with_location (tree expr, location_t loc)
14044 {
14045 if (expr == NULL)
14046 return NULL;
14047 if (loc == UNKNOWN_LOCATION)
14048 return expr;
14049 if (CAN_HAVE_LOCATION_P (expr))
14050 return expr;
14051 /* We should only be adding wrappers for constants and for decls,
14052 or for some exceptional tree nodes (e.g. BASELINK in the C++ FE). */
14053 gcc_assert (CONSTANT_CLASS_P (expr)
14054 || DECL_P (expr)
14055 || EXCEPTIONAL_CLASS_P (expr));
14056
14057 /* For now, don't add wrappers to exceptional tree nodes, to minimize
14058 any impact of the wrapper nodes. */
14059 if (EXCEPTIONAL_CLASS_P (expr))
14060 return expr;
14061
14062 /* Compiler-generated temporary variables don't need a wrapper. */
14063 if (DECL_P (expr) && DECL_ARTIFICIAL (expr) && DECL_IGNORED_P (expr))
14064 return expr;
14065
14066 /* If any auto_suppress_location_wrappers are active, don't create
14067 wrappers. */
14068 if (suppress_location_wrappers > 0)
14069 return expr;
14070
14071 tree_code code
14072 = (((CONSTANT_CLASS_P (expr) && TREE_CODE (expr) != STRING_CST)
14073 || (TREE_CODE (expr) == CONST_DECL && !TREE_STATIC (expr)))
14074 ? NON_LVALUE_EXPR : VIEW_CONVERT_EXPR);
14075 tree wrapper = build1_loc (loc, code, TREE_TYPE (expr), expr);
14076 /* Mark this node as being a wrapper. */
14077 EXPR_LOCATION_WRAPPER_P (wrapper) = 1;
14078 return wrapper;
14079 }
14080
14081 int suppress_location_wrappers;
14082
14083 /* Return the name of combined function FN, for debugging purposes. */
14084
14085 const char *
14086 combined_fn_name (combined_fn fn)
14087 {
14088 if (builtin_fn_p (fn))
14089 {
14090 tree fndecl = builtin_decl_explicit (as_builtin_fn (fn));
14091 return IDENTIFIER_POINTER (DECL_NAME (fndecl));
14092 }
14093 else
14094 return internal_fn_name (as_internal_fn (fn));
14095 }
14096
14097 /* Return a bitmap with a bit set corresponding to each argument in
14098 a function call type FNTYPE declared with attribute nonnull,
14099 or null if none of the function's argument are nonnull. The caller
14100 must free the bitmap. */
14101
14102 bitmap
14103 get_nonnull_args (const_tree fntype)
14104 {
14105 if (fntype == NULL_TREE)
14106 return NULL;
14107
14108 bitmap argmap = NULL;
14109 if (TREE_CODE (fntype) == METHOD_TYPE)
14110 {
14111 /* The this pointer in C++ non-static member functions is
14112 implicitly nonnull whether or not it's declared as such. */
14113 argmap = BITMAP_ALLOC (NULL);
14114 bitmap_set_bit (argmap, 0);
14115 }
14116
14117 tree attrs = TYPE_ATTRIBUTES (fntype);
14118 if (!attrs)
14119 return argmap;
14120
14121 /* A function declaration can specify multiple attribute nonnull,
14122 each with zero or more arguments. The loop below creates a bitmap
14123 representing a union of all the arguments. An empty (but non-null)
14124 bitmap means that all arguments have been declaraed nonnull. */
14125 for ( ; attrs; attrs = TREE_CHAIN (attrs))
14126 {
14127 attrs = lookup_attribute ("nonnull", attrs);
14128 if (!attrs)
14129 break;
14130
14131 if (!argmap)
14132 argmap = BITMAP_ALLOC (NULL);
14133
14134 if (!TREE_VALUE (attrs))
14135 {
14136 /* Clear the bitmap in case a previous attribute nonnull
14137 set it and this one overrides it for all arguments. */
14138 bitmap_clear (argmap);
14139 return argmap;
14140 }
14141
14142 /* Iterate over the indices of the format arguments declared nonnull
14143 and set a bit for each. */
14144 for (tree idx = TREE_VALUE (attrs); idx; idx = TREE_CHAIN (idx))
14145 {
14146 unsigned int val = TREE_INT_CST_LOW (TREE_VALUE (idx)) - 1;
14147 bitmap_set_bit (argmap, val);
14148 }
14149 }
14150
14151 return argmap;
14152 }
14153
14154 /* Returns true if TYPE is a type where it and all of its subobjects
14155 (recursively) are of structure, union, or array type. */
14156
14157 bool
14158 is_empty_type (const_tree type)
14159 {
14160 if (RECORD_OR_UNION_TYPE_P (type))
14161 {
14162 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
14163 if (TREE_CODE (field) == FIELD_DECL
14164 && !DECL_PADDING_P (field)
14165 && !is_empty_type (TREE_TYPE (field)))
14166 return false;
14167 return true;
14168 }
14169 else if (TREE_CODE (type) == ARRAY_TYPE)
14170 return (integer_minus_onep (array_type_nelts (type))
14171 || TYPE_DOMAIN (type) == NULL_TREE
14172 || is_empty_type (TREE_TYPE (type)));
14173 return false;
14174 }
14175
14176 /* Implement TARGET_EMPTY_RECORD_P. Return true if TYPE is an empty type
14177 that shouldn't be passed via stack. */
14178
14179 bool
14180 default_is_empty_record (const_tree type)
14181 {
14182 if (!abi_version_at_least (12))
14183 return false;
14184
14185 if (type == error_mark_node)
14186 return false;
14187
14188 if (TREE_ADDRESSABLE (type))
14189 return false;
14190
14191 return is_empty_type (TYPE_MAIN_VARIANT (type));
14192 }
14193
14194 /* Determine whether TYPE is a structure with a flexible array member,
14195 or a union containing such a structure (possibly recursively). */
14196
14197 bool
14198 flexible_array_type_p (const_tree type)
14199 {
14200 tree x, last;
14201 switch (TREE_CODE (type))
14202 {
14203 case RECORD_TYPE:
14204 last = NULL_TREE;
14205 for (x = TYPE_FIELDS (type); x != NULL_TREE; x = DECL_CHAIN (x))
14206 if (TREE_CODE (x) == FIELD_DECL)
14207 last = x;
14208 if (last == NULL_TREE)
14209 return false;
14210 if (TREE_CODE (TREE_TYPE (last)) == ARRAY_TYPE
14211 && TYPE_SIZE (TREE_TYPE (last)) == NULL_TREE
14212 && TYPE_DOMAIN (TREE_TYPE (last)) != NULL_TREE
14213 && TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (last))) == NULL_TREE)
14214 return true;
14215 return false;
14216 case UNION_TYPE:
14217 for (x = TYPE_FIELDS (type); x != NULL_TREE; x = DECL_CHAIN (x))
14218 {
14219 if (TREE_CODE (x) == FIELD_DECL
14220 && flexible_array_type_p (TREE_TYPE (x)))
14221 return true;
14222 }
14223 return false;
14224 default:
14225 return false;
14226 }
14227 }
14228
14229 /* Like int_size_in_bytes, but handle empty records specially. */
14230
14231 HOST_WIDE_INT
14232 arg_int_size_in_bytes (const_tree type)
14233 {
14234 return TYPE_EMPTY_P (type) ? 0 : int_size_in_bytes (type);
14235 }
14236
14237 /* Like size_in_bytes, but handle empty records specially. */
14238
14239 tree
14240 arg_size_in_bytes (const_tree type)
14241 {
14242 return TYPE_EMPTY_P (type) ? size_zero_node : size_in_bytes (type);
14243 }
14244
14245 /* Return true if an expression with CODE has to have the same result type as
14246 its first operand. */
14247
14248 bool
14249 expr_type_first_operand_type_p (tree_code code)
14250 {
14251 switch (code)
14252 {
14253 case NEGATE_EXPR:
14254 case ABS_EXPR:
14255 case BIT_NOT_EXPR:
14256 case PAREN_EXPR:
14257 case CONJ_EXPR:
14258
14259 case PLUS_EXPR:
14260 case MINUS_EXPR:
14261 case MULT_EXPR:
14262 case TRUNC_DIV_EXPR:
14263 case CEIL_DIV_EXPR:
14264 case FLOOR_DIV_EXPR:
14265 case ROUND_DIV_EXPR:
14266 case TRUNC_MOD_EXPR:
14267 case CEIL_MOD_EXPR:
14268 case FLOOR_MOD_EXPR:
14269 case ROUND_MOD_EXPR:
14270 case RDIV_EXPR:
14271 case EXACT_DIV_EXPR:
14272 case MIN_EXPR:
14273 case MAX_EXPR:
14274 case BIT_IOR_EXPR:
14275 case BIT_XOR_EXPR:
14276 case BIT_AND_EXPR:
14277
14278 case LSHIFT_EXPR:
14279 case RSHIFT_EXPR:
14280 case LROTATE_EXPR:
14281 case RROTATE_EXPR:
14282 return true;
14283
14284 default:
14285 return false;
14286 }
14287 }
14288
14289 /* Return a typenode for the "standard" C type with a given name. */
14290 tree
14291 get_typenode_from_name (const char *name)
14292 {
14293 if (name == NULL || *name == '\0')
14294 return NULL_TREE;
14295
14296 if (strcmp (name, "char") == 0)
14297 return char_type_node;
14298 if (strcmp (name, "unsigned char") == 0)
14299 return unsigned_char_type_node;
14300 if (strcmp (name, "signed char") == 0)
14301 return signed_char_type_node;
14302
14303 if (strcmp (name, "short int") == 0)
14304 return short_integer_type_node;
14305 if (strcmp (name, "short unsigned int") == 0)
14306 return short_unsigned_type_node;
14307
14308 if (strcmp (name, "int") == 0)
14309 return integer_type_node;
14310 if (strcmp (name, "unsigned int") == 0)
14311 return unsigned_type_node;
14312
14313 if (strcmp (name, "long int") == 0)
14314 return long_integer_type_node;
14315 if (strcmp (name, "long unsigned int") == 0)
14316 return long_unsigned_type_node;
14317
14318 if (strcmp (name, "long long int") == 0)
14319 return long_long_integer_type_node;
14320 if (strcmp (name, "long long unsigned int") == 0)
14321 return long_long_unsigned_type_node;
14322
14323 gcc_unreachable ();
14324 }
14325
14326 /* List of pointer types used to declare builtins before we have seen their
14327 real declaration.
14328
14329 Keep the size up to date in tree.h ! */
14330 const builtin_structptr_type builtin_structptr_types[6] =
14331 {
14332 { fileptr_type_node, ptr_type_node, "FILE" },
14333 { const_tm_ptr_type_node, const_ptr_type_node, "tm" },
14334 { fenv_t_ptr_type_node, ptr_type_node, "fenv_t" },
14335 { const_fenv_t_ptr_type_node, const_ptr_type_node, "fenv_t" },
14336 { fexcept_t_ptr_type_node, ptr_type_node, "fexcept_t" },
14337 { const_fexcept_t_ptr_type_node, const_ptr_type_node, "fexcept_t" }
14338 };
14339
14340 /* Return the maximum object size. */
14341
14342 tree
14343 max_object_size (void)
14344 {
14345 /* To do: Make this a configurable parameter. */
14346 return TYPE_MAX_VALUE (ptrdiff_type_node);
14347 }
14348
14349 /* A wrapper around TARGET_VERIFY_TYPE_CONTEXT that makes the silent_p
14350 parameter default to false and that weeds out error_mark_node. */
14351
14352 bool
14353 verify_type_context (location_t loc, type_context_kind context,
14354 const_tree type, bool silent_p)
14355 {
14356 if (type == error_mark_node)
14357 return true;
14358
14359 gcc_assert (TYPE_P (type));
14360 return (!targetm.verify_type_context
14361 || targetm.verify_type_context (loc, context, type, silent_p));
14362 }
14363
14364 /* Return true if NEW_ASM and DELETE_ASM name a valid pair of new and
14365 delete operators. Return false if they may or may not name such
14366 a pair and, when nonnull, set *PCERTAIN to true if they certainly
14367 do not. */
14368
14369 bool
14370 valid_new_delete_pair_p (tree new_asm, tree delete_asm,
14371 bool *pcertain /* = NULL */)
14372 {
14373 bool certain;
14374 if (!pcertain)
14375 pcertain = &certain;
14376
14377 const char *new_name = IDENTIFIER_POINTER (new_asm);
14378 const char *delete_name = IDENTIFIER_POINTER (delete_asm);
14379 unsigned int new_len = IDENTIFIER_LENGTH (new_asm);
14380 unsigned int delete_len = IDENTIFIER_LENGTH (delete_asm);
14381
14382 /* The following failures are due to invalid names so they're not
14383 considered certain mismatches. */
14384 *pcertain = false;
14385
14386 if (new_len < 5 || delete_len < 6)
14387 return false;
14388 if (new_name[0] == '_')
14389 ++new_name, --new_len;
14390 if (new_name[0] == '_')
14391 ++new_name, --new_len;
14392 if (delete_name[0] == '_')
14393 ++delete_name, --delete_len;
14394 if (delete_name[0] == '_')
14395 ++delete_name, --delete_len;
14396 if (new_len < 4 || delete_len < 5)
14397 return false;
14398
14399 /* The following failures are due to names of user-defined operators
14400 so they're also not considered certain mismatches. */
14401
14402 /* *_len is now just the length after initial underscores. */
14403 if (new_name[0] != 'Z' || new_name[1] != 'n')
14404 return false;
14405 if (delete_name[0] != 'Z' || delete_name[1] != 'd')
14406 return false;
14407
14408 /* The following failures are certain mismatches. */
14409 *pcertain = true;
14410
14411 /* _Znw must match _Zdl, _Zna must match _Zda. */
14412 if ((new_name[2] != 'w' || delete_name[2] != 'l')
14413 && (new_name[2] != 'a' || delete_name[2] != 'a'))
14414 return false;
14415 /* 'j', 'm' and 'y' correspond to size_t. */
14416 if (new_name[3] != 'j' && new_name[3] != 'm' && new_name[3] != 'y')
14417 return false;
14418 if (delete_name[3] != 'P' || delete_name[4] != 'v')
14419 return false;
14420 if (new_len == 4
14421 || (new_len == 18 && !memcmp (new_name + 4, "RKSt9nothrow_t", 14)))
14422 {
14423 /* _ZnXY or _ZnXYRKSt9nothrow_t matches
14424 _ZdXPv, _ZdXPvY and _ZdXPvRKSt9nothrow_t. */
14425 if (delete_len == 5)
14426 return true;
14427 if (delete_len == 6 && delete_name[5] == new_name[3])
14428 return true;
14429 if (delete_len == 19 && !memcmp (delete_name + 5, "RKSt9nothrow_t", 14))
14430 return true;
14431 }
14432 else if ((new_len == 19 && !memcmp (new_name + 4, "St11align_val_t", 15))
14433 || (new_len == 33
14434 && !memcmp (new_name + 4, "St11align_val_tRKSt9nothrow_t", 29)))
14435 {
14436 /* _ZnXYSt11align_val_t or _ZnXYSt11align_val_tRKSt9nothrow_t matches
14437 _ZdXPvSt11align_val_t or _ZdXPvYSt11align_val_t or or
14438 _ZdXPvSt11align_val_tRKSt9nothrow_t. */
14439 if (delete_len == 20 && !memcmp (delete_name + 5, "St11align_val_t", 15))
14440 return true;
14441 if (delete_len == 21
14442 && delete_name[5] == new_name[3]
14443 && !memcmp (delete_name + 6, "St11align_val_t", 15))
14444 return true;
14445 if (delete_len == 34
14446 && !memcmp (delete_name + 5, "St11align_val_tRKSt9nothrow_t", 29))
14447 return true;
14448 }
14449
14450 /* The negative result is conservative. */
14451 *pcertain = false;
14452 return false;
14453 }
14454
14455 /* Return the zero-based number corresponding to the argument being
14456 deallocated if FNDECL is a deallocation function or an out-of-bounds
14457 value if it isn't. */
14458
14459 unsigned
14460 fndecl_dealloc_argno (tree fndecl)
14461 {
14462 /* A call to operator delete isn't recognized as one to a built-in. */
14463 if (DECL_IS_OPERATOR_DELETE_P (fndecl))
14464 {
14465 if (DECL_IS_REPLACEABLE_OPERATOR (fndecl))
14466 return 0;
14467
14468 /* Avoid placement delete that's not been inlined. */
14469 tree fname = DECL_ASSEMBLER_NAME (fndecl);
14470 if (id_equal (fname, "_ZdlPvS_") // ordinary form
14471 || id_equal (fname, "_ZdaPvS_")) // array form
14472 return UINT_MAX;
14473 return 0;
14474 }
14475
14476 /* TODO: Handle user-defined functions with attribute malloc? Handle
14477 known non-built-ins like fopen? */
14478 if (fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
14479 {
14480 switch (DECL_FUNCTION_CODE (fndecl))
14481 {
14482 case BUILT_IN_FREE:
14483 case BUILT_IN_REALLOC:
14484 return 0;
14485 default:
14486 break;
14487 }
14488 return UINT_MAX;
14489 }
14490
14491 tree attrs = DECL_ATTRIBUTES (fndecl);
14492 if (!attrs)
14493 return UINT_MAX;
14494
14495 for (tree atfree = attrs;
14496 (atfree = lookup_attribute ("*dealloc", atfree));
14497 atfree = TREE_CHAIN (atfree))
14498 {
14499 tree alloc = TREE_VALUE (atfree);
14500 if (!alloc)
14501 continue;
14502
14503 tree pos = TREE_CHAIN (alloc);
14504 if (!pos)
14505 return 0;
14506
14507 pos = TREE_VALUE (pos);
14508 return TREE_INT_CST_LOW (pos) - 1;
14509 }
14510
14511 return UINT_MAX;
14512 }
14513
14514 /* If EXPR refers to a character array or pointer declared attribute
14515 nonstring, return a decl for that array or pointer and set *REF
14516 to the referenced enclosing object or pointer. Otherwise return
14517 null. */
14518
14519 tree
14520 get_attr_nonstring_decl (tree expr, tree *ref)
14521 {
14522 tree decl = expr;
14523 tree var = NULL_TREE;
14524 if (TREE_CODE (decl) == SSA_NAME)
14525 {
14526 gimple *def = SSA_NAME_DEF_STMT (decl);
14527
14528 if (is_gimple_assign (def))
14529 {
14530 tree_code code = gimple_assign_rhs_code (def);
14531 if (code == ADDR_EXPR
14532 || code == COMPONENT_REF
14533 || code == VAR_DECL)
14534 decl = gimple_assign_rhs1 (def);
14535 }
14536 else
14537 var = SSA_NAME_VAR (decl);
14538 }
14539
14540 if (TREE_CODE (decl) == ADDR_EXPR)
14541 decl = TREE_OPERAND (decl, 0);
14542
14543 /* To simplify calling code, store the referenced DECL regardless of
14544 the attribute determined below, but avoid storing the SSA_NAME_VAR
14545 obtained above (it's not useful for dataflow purposes). */
14546 if (ref)
14547 *ref = decl;
14548
14549 /* Use the SSA_NAME_VAR that was determined above to see if it's
14550 declared nonstring. Otherwise drill down into the referenced
14551 DECL. */
14552 if (var)
14553 decl = var;
14554 else if (TREE_CODE (decl) == ARRAY_REF)
14555 decl = TREE_OPERAND (decl, 0);
14556 else if (TREE_CODE (decl) == COMPONENT_REF)
14557 decl = TREE_OPERAND (decl, 1);
14558 else if (TREE_CODE (decl) == MEM_REF)
14559 return get_attr_nonstring_decl (TREE_OPERAND (decl, 0), ref);
14560
14561 if (DECL_P (decl)
14562 && lookup_attribute ("nonstring", DECL_ATTRIBUTES (decl)))
14563 return decl;
14564
14565 return NULL_TREE;
14566 }
14567
14568 #if CHECKING_P
14569
14570 namespace selftest {
14571
14572 /* Selftests for tree. */
14573
14574 /* Verify that integer constants are sane. */
14575
14576 static void
14577 test_integer_constants ()
14578 {
14579 ASSERT_TRUE (integer_type_node != NULL);
14580 ASSERT_TRUE (build_int_cst (integer_type_node, 0) != NULL);
14581
14582 tree type = integer_type_node;
14583
14584 tree zero = build_zero_cst (type);
14585 ASSERT_EQ (INTEGER_CST, TREE_CODE (zero));
14586 ASSERT_EQ (type, TREE_TYPE (zero));
14587
14588 tree one = build_int_cst (type, 1);
14589 ASSERT_EQ (INTEGER_CST, TREE_CODE (one));
14590 ASSERT_EQ (type, TREE_TYPE (zero));
14591 }
14592
14593 /* Verify identifiers. */
14594
14595 static void
14596 test_identifiers ()
14597 {
14598 tree identifier = get_identifier ("foo");
14599 ASSERT_EQ (3, IDENTIFIER_LENGTH (identifier));
14600 ASSERT_STREQ ("foo", IDENTIFIER_POINTER (identifier));
14601 }
14602
14603 /* Verify LABEL_DECL. */
14604
14605 static void
14606 test_labels ()
14607 {
14608 tree identifier = get_identifier ("err");
14609 tree label_decl = build_decl (UNKNOWN_LOCATION, LABEL_DECL,
14610 identifier, void_type_node);
14611 ASSERT_EQ (-1, LABEL_DECL_UID (label_decl));
14612 ASSERT_FALSE (FORCED_LABEL (label_decl));
14613 }
14614
14615 /* Return a new VECTOR_CST node whose type is TYPE and whose values
14616 are given by VALS. */
14617
14618 static tree
14619 build_vector (tree type, const vec<tree> &vals MEM_STAT_DECL)
14620 {
14621 gcc_assert (known_eq (vals.length (), TYPE_VECTOR_SUBPARTS (type)));
14622 tree_vector_builder builder (type, vals.length (), 1);
14623 builder.splice (vals);
14624 return builder.build ();
14625 }
14626
14627 /* Check that VECTOR_CST ACTUAL contains the elements in EXPECTED. */
14628
14629 static void
14630 check_vector_cst (const vec<tree> &expected, tree actual)
14631 {
14632 ASSERT_KNOWN_EQ (expected.length (),
14633 TYPE_VECTOR_SUBPARTS (TREE_TYPE (actual)));
14634 for (unsigned int i = 0; i < expected.length (); ++i)
14635 ASSERT_EQ (wi::to_wide (expected[i]),
14636 wi::to_wide (vector_cst_elt (actual, i)));
14637 }
14638
14639 /* Check that VECTOR_CST ACTUAL contains NPATTERNS duplicated elements,
14640 and that its elements match EXPECTED. */
14641
14642 static void
14643 check_vector_cst_duplicate (const vec<tree> &expected, tree actual,
14644 unsigned int npatterns)
14645 {
14646 ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
14647 ASSERT_EQ (1, VECTOR_CST_NELTS_PER_PATTERN (actual));
14648 ASSERT_EQ (npatterns, vector_cst_encoded_nelts (actual));
14649 ASSERT_TRUE (VECTOR_CST_DUPLICATE_P (actual));
14650 ASSERT_FALSE (VECTOR_CST_STEPPED_P (actual));
14651 check_vector_cst (expected, actual);
14652 }
14653
14654 /* Check that VECTOR_CST ACTUAL contains NPATTERNS foreground elements
14655 and NPATTERNS background elements, and that its elements match
14656 EXPECTED. */
14657
14658 static void
14659 check_vector_cst_fill (const vec<tree> &expected, tree actual,
14660 unsigned int npatterns)
14661 {
14662 ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
14663 ASSERT_EQ (2, VECTOR_CST_NELTS_PER_PATTERN (actual));
14664 ASSERT_EQ (2 * npatterns, vector_cst_encoded_nelts (actual));
14665 ASSERT_FALSE (VECTOR_CST_DUPLICATE_P (actual));
14666 ASSERT_FALSE (VECTOR_CST_STEPPED_P (actual));
14667 check_vector_cst (expected, actual);
14668 }
14669
14670 /* Check that VECTOR_CST ACTUAL contains NPATTERNS stepped patterns,
14671 and that its elements match EXPECTED. */
14672
14673 static void
14674 check_vector_cst_stepped (const vec<tree> &expected, tree actual,
14675 unsigned int npatterns)
14676 {
14677 ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
14678 ASSERT_EQ (3, VECTOR_CST_NELTS_PER_PATTERN (actual));
14679 ASSERT_EQ (3 * npatterns, vector_cst_encoded_nelts (actual));
14680 ASSERT_FALSE (VECTOR_CST_DUPLICATE_P (actual));
14681 ASSERT_TRUE (VECTOR_CST_STEPPED_P (actual));
14682 check_vector_cst (expected, actual);
14683 }
14684
14685 /* Test the creation of VECTOR_CSTs. */
14686
14687 static void
14688 test_vector_cst_patterns (ALONE_CXX_MEM_STAT_INFO)
14689 {
14690 auto_vec<tree, 8> elements (8);
14691 elements.quick_grow (8);
14692 tree element_type = build_nonstandard_integer_type (16, true);
14693 tree vector_type = build_vector_type (element_type, 8);
14694
14695 /* Test a simple linear series with a base of 0 and a step of 1:
14696 { 0, 1, 2, 3, 4, 5, 6, 7 }. */
14697 for (unsigned int i = 0; i < 8; ++i)
14698 elements[i] = build_int_cst (element_type, i);
14699 tree vector = build_vector (vector_type, elements PASS_MEM_STAT);
14700 check_vector_cst_stepped (elements, vector, 1);
14701
14702 /* Try the same with the first element replaced by 100:
14703 { 100, 1, 2, 3, 4, 5, 6, 7 }. */
14704 elements[0] = build_int_cst (element_type, 100);
14705 vector = build_vector (vector_type, elements PASS_MEM_STAT);
14706 check_vector_cst_stepped (elements, vector, 1);
14707
14708 /* Try a series that wraps around.
14709 { 100, 65531, 65532, 65533, 65534, 65535, 0, 1 }. */
14710 for (unsigned int i = 1; i < 8; ++i)
14711 elements[i] = build_int_cst (element_type, (65530 + i) & 0xffff);
14712 vector = build_vector (vector_type, elements PASS_MEM_STAT);
14713 check_vector_cst_stepped (elements, vector, 1);
14714
14715 /* Try a downward series:
14716 { 100, 79, 78, 77, 76, 75, 75, 73 }. */
14717 for (unsigned int i = 1; i < 8; ++i)
14718 elements[i] = build_int_cst (element_type, 80 - i);
14719 vector = build_vector (vector_type, elements PASS_MEM_STAT);
14720 check_vector_cst_stepped (elements, vector, 1);
14721
14722 /* Try two interleaved series with different bases and steps:
14723 { 100, 53, 66, 206, 62, 212, 58, 218 }. */
14724 elements[1] = build_int_cst (element_type, 53);
14725 for (unsigned int i = 2; i < 8; i += 2)
14726 {
14727 elements[i] = build_int_cst (element_type, 70 - i * 2);
14728 elements[i + 1] = build_int_cst (element_type, 200 + i * 3);
14729 }
14730 vector = build_vector (vector_type, elements PASS_MEM_STAT);
14731 check_vector_cst_stepped (elements, vector, 2);
14732
14733 /* Try a duplicated value:
14734 { 100, 100, 100, 100, 100, 100, 100, 100 }. */
14735 for (unsigned int i = 1; i < 8; ++i)
14736 elements[i] = elements[0];
14737 vector = build_vector (vector_type, elements PASS_MEM_STAT);
14738 check_vector_cst_duplicate (elements, vector, 1);
14739
14740 /* Try an interleaved duplicated value:
14741 { 100, 55, 100, 55, 100, 55, 100, 55 }. */
14742 elements[1] = build_int_cst (element_type, 55);
14743 for (unsigned int i = 2; i < 8; ++i)
14744 elements[i] = elements[i - 2];
14745 vector = build_vector (vector_type, elements PASS_MEM_STAT);
14746 check_vector_cst_duplicate (elements, vector, 2);
14747
14748 /* Try a duplicated value with 2 exceptions
14749 { 41, 97, 100, 55, 100, 55, 100, 55 }. */
14750 elements[0] = build_int_cst (element_type, 41);
14751 elements[1] = build_int_cst (element_type, 97);
14752 vector = build_vector (vector_type, elements PASS_MEM_STAT);
14753 check_vector_cst_fill (elements, vector, 2);
14754
14755 /* Try with and without a step
14756 { 41, 97, 100, 21, 100, 35, 100, 49 }. */
14757 for (unsigned int i = 3; i < 8; i += 2)
14758 elements[i] = build_int_cst (element_type, i * 7);
14759 vector = build_vector (vector_type, elements PASS_MEM_STAT);
14760 check_vector_cst_stepped (elements, vector, 2);
14761
14762 /* Try a fully-general constant:
14763 { 41, 97, 100, 21, 100, 9990, 100, 49 }. */
14764 elements[5] = build_int_cst (element_type, 9990);
14765 vector = build_vector (vector_type, elements PASS_MEM_STAT);
14766 check_vector_cst_fill (elements, vector, 4);
14767 }
14768
14769 /* Verify that STRIP_NOPS (NODE) is EXPECTED.
14770 Helper function for test_location_wrappers, to deal with STRIP_NOPS
14771 modifying its argument in-place. */
14772
14773 static void
14774 check_strip_nops (tree node, tree expected)
14775 {
14776 STRIP_NOPS (node);
14777 ASSERT_EQ (expected, node);
14778 }
14779
14780 /* Verify location wrappers. */
14781
14782 static void
14783 test_location_wrappers ()
14784 {
14785 location_t loc = BUILTINS_LOCATION;
14786
14787 ASSERT_EQ (NULL_TREE, maybe_wrap_with_location (NULL_TREE, loc));
14788
14789 /* Wrapping a constant. */
14790 tree int_cst = build_int_cst (integer_type_node, 42);
14791 ASSERT_FALSE (CAN_HAVE_LOCATION_P (int_cst));
14792 ASSERT_FALSE (location_wrapper_p (int_cst));
14793
14794 tree wrapped_int_cst = maybe_wrap_with_location (int_cst, loc);
14795 ASSERT_TRUE (location_wrapper_p (wrapped_int_cst));
14796 ASSERT_EQ (loc, EXPR_LOCATION (wrapped_int_cst));
14797 ASSERT_EQ (int_cst, tree_strip_any_location_wrapper (wrapped_int_cst));
14798
14799 /* We shouldn't add wrapper nodes for UNKNOWN_LOCATION. */
14800 ASSERT_EQ (int_cst, maybe_wrap_with_location (int_cst, UNKNOWN_LOCATION));
14801
14802 /* We shouldn't add wrapper nodes for nodes that CAN_HAVE_LOCATION_P. */
14803 tree cast = build1 (NOP_EXPR, char_type_node, int_cst);
14804 ASSERT_TRUE (CAN_HAVE_LOCATION_P (cast));
14805 ASSERT_EQ (cast, maybe_wrap_with_location (cast, loc));
14806
14807 /* Wrapping a STRING_CST. */
14808 tree string_cst = build_string (4, "foo");
14809 ASSERT_FALSE (CAN_HAVE_LOCATION_P (string_cst));
14810 ASSERT_FALSE (location_wrapper_p (string_cst));
14811
14812 tree wrapped_string_cst = maybe_wrap_with_location (string_cst, loc);
14813 ASSERT_TRUE (location_wrapper_p (wrapped_string_cst));
14814 ASSERT_EQ (VIEW_CONVERT_EXPR, TREE_CODE (wrapped_string_cst));
14815 ASSERT_EQ (loc, EXPR_LOCATION (wrapped_string_cst));
14816 ASSERT_EQ (string_cst, tree_strip_any_location_wrapper (wrapped_string_cst));
14817
14818
14819 /* Wrapping a variable. */
14820 tree int_var = build_decl (UNKNOWN_LOCATION, VAR_DECL,
14821 get_identifier ("some_int_var"),
14822 integer_type_node);
14823 ASSERT_FALSE (CAN_HAVE_LOCATION_P (int_var));
14824 ASSERT_FALSE (location_wrapper_p (int_var));
14825
14826 tree wrapped_int_var = maybe_wrap_with_location (int_var, loc);
14827 ASSERT_TRUE (location_wrapper_p (wrapped_int_var));
14828 ASSERT_EQ (loc, EXPR_LOCATION (wrapped_int_var));
14829 ASSERT_EQ (int_var, tree_strip_any_location_wrapper (wrapped_int_var));
14830
14831 /* Verify that "reinterpret_cast<int>(some_int_var)" is not a location
14832 wrapper. */
14833 tree r_cast = build1 (NON_LVALUE_EXPR, integer_type_node, int_var);
14834 ASSERT_FALSE (location_wrapper_p (r_cast));
14835 ASSERT_EQ (r_cast, tree_strip_any_location_wrapper (r_cast));
14836
14837 /* Verify that STRIP_NOPS removes wrappers. */
14838 check_strip_nops (wrapped_int_cst, int_cst);
14839 check_strip_nops (wrapped_string_cst, string_cst);
14840 check_strip_nops (wrapped_int_var, int_var);
14841 }
14842
14843 /* Test various tree predicates. Verify that location wrappers don't
14844 affect the results. */
14845
14846 static void
14847 test_predicates ()
14848 {
14849 /* Build various constants and wrappers around them. */
14850
14851 location_t loc = BUILTINS_LOCATION;
14852
14853 tree i_0 = build_int_cst (integer_type_node, 0);
14854 tree wr_i_0 = maybe_wrap_with_location (i_0, loc);
14855
14856 tree i_1 = build_int_cst (integer_type_node, 1);
14857 tree wr_i_1 = maybe_wrap_with_location (i_1, loc);
14858
14859 tree i_m1 = build_int_cst (integer_type_node, -1);
14860 tree wr_i_m1 = maybe_wrap_with_location (i_m1, loc);
14861
14862 tree f_0 = build_real_from_int_cst (float_type_node, i_0);
14863 tree wr_f_0 = maybe_wrap_with_location (f_0, loc);
14864 tree f_1 = build_real_from_int_cst (float_type_node, i_1);
14865 tree wr_f_1 = maybe_wrap_with_location (f_1, loc);
14866 tree f_m1 = build_real_from_int_cst (float_type_node, i_m1);
14867 tree wr_f_m1 = maybe_wrap_with_location (f_m1, loc);
14868
14869 tree c_i_0 = build_complex (NULL_TREE, i_0, i_0);
14870 tree c_i_1 = build_complex (NULL_TREE, i_1, i_0);
14871 tree c_i_m1 = build_complex (NULL_TREE, i_m1, i_0);
14872
14873 tree c_f_0 = build_complex (NULL_TREE, f_0, f_0);
14874 tree c_f_1 = build_complex (NULL_TREE, f_1, f_0);
14875 tree c_f_m1 = build_complex (NULL_TREE, f_m1, f_0);
14876
14877 /* TODO: vector constants. */
14878
14879 /* Test integer_onep. */
14880 ASSERT_FALSE (integer_onep (i_0));
14881 ASSERT_FALSE (integer_onep (wr_i_0));
14882 ASSERT_TRUE (integer_onep (i_1));
14883 ASSERT_TRUE (integer_onep (wr_i_1));
14884 ASSERT_FALSE (integer_onep (i_m1));
14885 ASSERT_FALSE (integer_onep (wr_i_m1));
14886 ASSERT_FALSE (integer_onep (f_0));
14887 ASSERT_FALSE (integer_onep (wr_f_0));
14888 ASSERT_FALSE (integer_onep (f_1));
14889 ASSERT_FALSE (integer_onep (wr_f_1));
14890 ASSERT_FALSE (integer_onep (f_m1));
14891 ASSERT_FALSE (integer_onep (wr_f_m1));
14892 ASSERT_FALSE (integer_onep (c_i_0));
14893 ASSERT_TRUE (integer_onep (c_i_1));
14894 ASSERT_FALSE (integer_onep (c_i_m1));
14895 ASSERT_FALSE (integer_onep (c_f_0));
14896 ASSERT_FALSE (integer_onep (c_f_1));
14897 ASSERT_FALSE (integer_onep (c_f_m1));
14898
14899 /* Test integer_zerop. */
14900 ASSERT_TRUE (integer_zerop (i_0));
14901 ASSERT_TRUE (integer_zerop (wr_i_0));
14902 ASSERT_FALSE (integer_zerop (i_1));
14903 ASSERT_FALSE (integer_zerop (wr_i_1));
14904 ASSERT_FALSE (integer_zerop (i_m1));
14905 ASSERT_FALSE (integer_zerop (wr_i_m1));
14906 ASSERT_FALSE (integer_zerop (f_0));
14907 ASSERT_FALSE (integer_zerop (wr_f_0));
14908 ASSERT_FALSE (integer_zerop (f_1));
14909 ASSERT_FALSE (integer_zerop (wr_f_1));
14910 ASSERT_FALSE (integer_zerop (f_m1));
14911 ASSERT_FALSE (integer_zerop (wr_f_m1));
14912 ASSERT_TRUE (integer_zerop (c_i_0));
14913 ASSERT_FALSE (integer_zerop (c_i_1));
14914 ASSERT_FALSE (integer_zerop (c_i_m1));
14915 ASSERT_FALSE (integer_zerop (c_f_0));
14916 ASSERT_FALSE (integer_zerop (c_f_1));
14917 ASSERT_FALSE (integer_zerop (c_f_m1));
14918
14919 /* Test integer_all_onesp. */
14920 ASSERT_FALSE (integer_all_onesp (i_0));
14921 ASSERT_FALSE (integer_all_onesp (wr_i_0));
14922 ASSERT_FALSE (integer_all_onesp (i_1));
14923 ASSERT_FALSE (integer_all_onesp (wr_i_1));
14924 ASSERT_TRUE (integer_all_onesp (i_m1));
14925 ASSERT_TRUE (integer_all_onesp (wr_i_m1));
14926 ASSERT_FALSE (integer_all_onesp (f_0));
14927 ASSERT_FALSE (integer_all_onesp (wr_f_0));
14928 ASSERT_FALSE (integer_all_onesp (f_1));
14929 ASSERT_FALSE (integer_all_onesp (wr_f_1));
14930 ASSERT_FALSE (integer_all_onesp (f_m1));
14931 ASSERT_FALSE (integer_all_onesp (wr_f_m1));
14932 ASSERT_FALSE (integer_all_onesp (c_i_0));
14933 ASSERT_FALSE (integer_all_onesp (c_i_1));
14934 ASSERT_FALSE (integer_all_onesp (c_i_m1));
14935 ASSERT_FALSE (integer_all_onesp (c_f_0));
14936 ASSERT_FALSE (integer_all_onesp (c_f_1));
14937 ASSERT_FALSE (integer_all_onesp (c_f_m1));
14938
14939 /* Test integer_minus_onep. */
14940 ASSERT_FALSE (integer_minus_onep (i_0));
14941 ASSERT_FALSE (integer_minus_onep (wr_i_0));
14942 ASSERT_FALSE (integer_minus_onep (i_1));
14943 ASSERT_FALSE (integer_minus_onep (wr_i_1));
14944 ASSERT_TRUE (integer_minus_onep (i_m1));
14945 ASSERT_TRUE (integer_minus_onep (wr_i_m1));
14946 ASSERT_FALSE (integer_minus_onep (f_0));
14947 ASSERT_FALSE (integer_minus_onep (wr_f_0));
14948 ASSERT_FALSE (integer_minus_onep (f_1));
14949 ASSERT_FALSE (integer_minus_onep (wr_f_1));
14950 ASSERT_FALSE (integer_minus_onep (f_m1));
14951 ASSERT_FALSE (integer_minus_onep (wr_f_m1));
14952 ASSERT_FALSE (integer_minus_onep (c_i_0));
14953 ASSERT_FALSE (integer_minus_onep (c_i_1));
14954 ASSERT_TRUE (integer_minus_onep (c_i_m1));
14955 ASSERT_FALSE (integer_minus_onep (c_f_0));
14956 ASSERT_FALSE (integer_minus_onep (c_f_1));
14957 ASSERT_FALSE (integer_minus_onep (c_f_m1));
14958
14959 /* Test integer_each_onep. */
14960 ASSERT_FALSE (integer_each_onep (i_0));
14961 ASSERT_FALSE (integer_each_onep (wr_i_0));
14962 ASSERT_TRUE (integer_each_onep (i_1));
14963 ASSERT_TRUE (integer_each_onep (wr_i_1));
14964 ASSERT_FALSE (integer_each_onep (i_m1));
14965 ASSERT_FALSE (integer_each_onep (wr_i_m1));
14966 ASSERT_FALSE (integer_each_onep (f_0));
14967 ASSERT_FALSE (integer_each_onep (wr_f_0));
14968 ASSERT_FALSE (integer_each_onep (f_1));
14969 ASSERT_FALSE (integer_each_onep (wr_f_1));
14970 ASSERT_FALSE (integer_each_onep (f_m1));
14971 ASSERT_FALSE (integer_each_onep (wr_f_m1));
14972 ASSERT_FALSE (integer_each_onep (c_i_0));
14973 ASSERT_FALSE (integer_each_onep (c_i_1));
14974 ASSERT_FALSE (integer_each_onep (c_i_m1));
14975 ASSERT_FALSE (integer_each_onep (c_f_0));
14976 ASSERT_FALSE (integer_each_onep (c_f_1));
14977 ASSERT_FALSE (integer_each_onep (c_f_m1));
14978
14979 /* Test integer_truep. */
14980 ASSERT_FALSE (integer_truep (i_0));
14981 ASSERT_FALSE (integer_truep (wr_i_0));
14982 ASSERT_TRUE (integer_truep (i_1));
14983 ASSERT_TRUE (integer_truep (wr_i_1));
14984 ASSERT_FALSE (integer_truep (i_m1));
14985 ASSERT_FALSE (integer_truep (wr_i_m1));
14986 ASSERT_FALSE (integer_truep (f_0));
14987 ASSERT_FALSE (integer_truep (wr_f_0));
14988 ASSERT_FALSE (integer_truep (f_1));
14989 ASSERT_FALSE (integer_truep (wr_f_1));
14990 ASSERT_FALSE (integer_truep (f_m1));
14991 ASSERT_FALSE (integer_truep (wr_f_m1));
14992 ASSERT_FALSE (integer_truep (c_i_0));
14993 ASSERT_TRUE (integer_truep (c_i_1));
14994 ASSERT_FALSE (integer_truep (c_i_m1));
14995 ASSERT_FALSE (integer_truep (c_f_0));
14996 ASSERT_FALSE (integer_truep (c_f_1));
14997 ASSERT_FALSE (integer_truep (c_f_m1));
14998
14999 /* Test integer_nonzerop. */
15000 ASSERT_FALSE (integer_nonzerop (i_0));
15001 ASSERT_FALSE (integer_nonzerop (wr_i_0));
15002 ASSERT_TRUE (integer_nonzerop (i_1));
15003 ASSERT_TRUE (integer_nonzerop (wr_i_1));
15004 ASSERT_TRUE (integer_nonzerop (i_m1));
15005 ASSERT_TRUE (integer_nonzerop (wr_i_m1));
15006 ASSERT_FALSE (integer_nonzerop (f_0));
15007 ASSERT_FALSE (integer_nonzerop (wr_f_0));
15008 ASSERT_FALSE (integer_nonzerop (f_1));
15009 ASSERT_FALSE (integer_nonzerop (wr_f_1));
15010 ASSERT_FALSE (integer_nonzerop (f_m1));
15011 ASSERT_FALSE (integer_nonzerop (wr_f_m1));
15012 ASSERT_FALSE (integer_nonzerop (c_i_0));
15013 ASSERT_TRUE (integer_nonzerop (c_i_1));
15014 ASSERT_TRUE (integer_nonzerop (c_i_m1));
15015 ASSERT_FALSE (integer_nonzerop (c_f_0));
15016 ASSERT_FALSE (integer_nonzerop (c_f_1));
15017 ASSERT_FALSE (integer_nonzerop (c_f_m1));
15018
15019 /* Test real_zerop. */
15020 ASSERT_FALSE (real_zerop (i_0));
15021 ASSERT_FALSE (real_zerop (wr_i_0));
15022 ASSERT_FALSE (real_zerop (i_1));
15023 ASSERT_FALSE (real_zerop (wr_i_1));
15024 ASSERT_FALSE (real_zerop (i_m1));
15025 ASSERT_FALSE (real_zerop (wr_i_m1));
15026 ASSERT_TRUE (real_zerop (f_0));
15027 ASSERT_TRUE (real_zerop (wr_f_0));
15028 ASSERT_FALSE (real_zerop (f_1));
15029 ASSERT_FALSE (real_zerop (wr_f_1));
15030 ASSERT_FALSE (real_zerop (f_m1));
15031 ASSERT_FALSE (real_zerop (wr_f_m1));
15032 ASSERT_FALSE (real_zerop (c_i_0));
15033 ASSERT_FALSE (real_zerop (c_i_1));
15034 ASSERT_FALSE (real_zerop (c_i_m1));
15035 ASSERT_TRUE (real_zerop (c_f_0));
15036 ASSERT_FALSE (real_zerop (c_f_1));
15037 ASSERT_FALSE (real_zerop (c_f_m1));
15038
15039 /* Test real_onep. */
15040 ASSERT_FALSE (real_onep (i_0));
15041 ASSERT_FALSE (real_onep (wr_i_0));
15042 ASSERT_FALSE (real_onep (i_1));
15043 ASSERT_FALSE (real_onep (wr_i_1));
15044 ASSERT_FALSE (real_onep (i_m1));
15045 ASSERT_FALSE (real_onep (wr_i_m1));
15046 ASSERT_FALSE (real_onep (f_0));
15047 ASSERT_FALSE (real_onep (wr_f_0));
15048 ASSERT_TRUE (real_onep (f_1));
15049 ASSERT_TRUE (real_onep (wr_f_1));
15050 ASSERT_FALSE (real_onep (f_m1));
15051 ASSERT_FALSE (real_onep (wr_f_m1));
15052 ASSERT_FALSE (real_onep (c_i_0));
15053 ASSERT_FALSE (real_onep (c_i_1));
15054 ASSERT_FALSE (real_onep (c_i_m1));
15055 ASSERT_FALSE (real_onep (c_f_0));
15056 ASSERT_TRUE (real_onep (c_f_1));
15057 ASSERT_FALSE (real_onep (c_f_m1));
15058
15059 /* Test real_minus_onep. */
15060 ASSERT_FALSE (real_minus_onep (i_0));
15061 ASSERT_FALSE (real_minus_onep (wr_i_0));
15062 ASSERT_FALSE (real_minus_onep (i_1));
15063 ASSERT_FALSE (real_minus_onep (wr_i_1));
15064 ASSERT_FALSE (real_minus_onep (i_m1));
15065 ASSERT_FALSE (real_minus_onep (wr_i_m1));
15066 ASSERT_FALSE (real_minus_onep (f_0));
15067 ASSERT_FALSE (real_minus_onep (wr_f_0));
15068 ASSERT_FALSE (real_minus_onep (f_1));
15069 ASSERT_FALSE (real_minus_onep (wr_f_1));
15070 ASSERT_TRUE (real_minus_onep (f_m1));
15071 ASSERT_TRUE (real_minus_onep (wr_f_m1));
15072 ASSERT_FALSE (real_minus_onep (c_i_0));
15073 ASSERT_FALSE (real_minus_onep (c_i_1));
15074 ASSERT_FALSE (real_minus_onep (c_i_m1));
15075 ASSERT_FALSE (real_minus_onep (c_f_0));
15076 ASSERT_FALSE (real_minus_onep (c_f_1));
15077 ASSERT_TRUE (real_minus_onep (c_f_m1));
15078
15079 /* Test zerop. */
15080 ASSERT_TRUE (zerop (i_0));
15081 ASSERT_TRUE (zerop (wr_i_0));
15082 ASSERT_FALSE (zerop (i_1));
15083 ASSERT_FALSE (zerop (wr_i_1));
15084 ASSERT_FALSE (zerop (i_m1));
15085 ASSERT_FALSE (zerop (wr_i_m1));
15086 ASSERT_TRUE (zerop (f_0));
15087 ASSERT_TRUE (zerop (wr_f_0));
15088 ASSERT_FALSE (zerop (f_1));
15089 ASSERT_FALSE (zerop (wr_f_1));
15090 ASSERT_FALSE (zerop (f_m1));
15091 ASSERT_FALSE (zerop (wr_f_m1));
15092 ASSERT_TRUE (zerop (c_i_0));
15093 ASSERT_FALSE (zerop (c_i_1));
15094 ASSERT_FALSE (zerop (c_i_m1));
15095 ASSERT_TRUE (zerop (c_f_0));
15096 ASSERT_FALSE (zerop (c_f_1));
15097 ASSERT_FALSE (zerop (c_f_m1));
15098
15099 /* Test tree_expr_nonnegative_p. */
15100 ASSERT_TRUE (tree_expr_nonnegative_p (i_0));
15101 ASSERT_TRUE (tree_expr_nonnegative_p (wr_i_0));
15102 ASSERT_TRUE (tree_expr_nonnegative_p (i_1));
15103 ASSERT_TRUE (tree_expr_nonnegative_p (wr_i_1));
15104 ASSERT_FALSE (tree_expr_nonnegative_p (i_m1));
15105 ASSERT_FALSE (tree_expr_nonnegative_p (wr_i_m1));
15106 ASSERT_TRUE (tree_expr_nonnegative_p (f_0));
15107 ASSERT_TRUE (tree_expr_nonnegative_p (wr_f_0));
15108 ASSERT_TRUE (tree_expr_nonnegative_p (f_1));
15109 ASSERT_TRUE (tree_expr_nonnegative_p (wr_f_1));
15110 ASSERT_FALSE (tree_expr_nonnegative_p (f_m1));
15111 ASSERT_FALSE (tree_expr_nonnegative_p (wr_f_m1));
15112 ASSERT_FALSE (tree_expr_nonnegative_p (c_i_0));
15113 ASSERT_FALSE (tree_expr_nonnegative_p (c_i_1));
15114 ASSERT_FALSE (tree_expr_nonnegative_p (c_i_m1));
15115 ASSERT_FALSE (tree_expr_nonnegative_p (c_f_0));
15116 ASSERT_FALSE (tree_expr_nonnegative_p (c_f_1));
15117 ASSERT_FALSE (tree_expr_nonnegative_p (c_f_m1));
15118
15119 /* Test tree_expr_nonzero_p. */
15120 ASSERT_FALSE (tree_expr_nonzero_p (i_0));
15121 ASSERT_FALSE (tree_expr_nonzero_p (wr_i_0));
15122 ASSERT_TRUE (tree_expr_nonzero_p (i_1));
15123 ASSERT_TRUE (tree_expr_nonzero_p (wr_i_1));
15124 ASSERT_TRUE (tree_expr_nonzero_p (i_m1));
15125 ASSERT_TRUE (tree_expr_nonzero_p (wr_i_m1));
15126
15127 /* Test integer_valued_real_p. */
15128 ASSERT_FALSE (integer_valued_real_p (i_0));
15129 ASSERT_TRUE (integer_valued_real_p (f_0));
15130 ASSERT_TRUE (integer_valued_real_p (wr_f_0));
15131 ASSERT_TRUE (integer_valued_real_p (f_1));
15132 ASSERT_TRUE (integer_valued_real_p (wr_f_1));
15133
15134 /* Test integer_pow2p. */
15135 ASSERT_FALSE (integer_pow2p (i_0));
15136 ASSERT_TRUE (integer_pow2p (i_1));
15137 ASSERT_TRUE (integer_pow2p (wr_i_1));
15138
15139 /* Test uniform_integer_cst_p. */
15140 ASSERT_TRUE (uniform_integer_cst_p (i_0));
15141 ASSERT_TRUE (uniform_integer_cst_p (wr_i_0));
15142 ASSERT_TRUE (uniform_integer_cst_p (i_1));
15143 ASSERT_TRUE (uniform_integer_cst_p (wr_i_1));
15144 ASSERT_TRUE (uniform_integer_cst_p (i_m1));
15145 ASSERT_TRUE (uniform_integer_cst_p (wr_i_m1));
15146 ASSERT_FALSE (uniform_integer_cst_p (f_0));
15147 ASSERT_FALSE (uniform_integer_cst_p (wr_f_0));
15148 ASSERT_FALSE (uniform_integer_cst_p (f_1));
15149 ASSERT_FALSE (uniform_integer_cst_p (wr_f_1));
15150 ASSERT_FALSE (uniform_integer_cst_p (f_m1));
15151 ASSERT_FALSE (uniform_integer_cst_p (wr_f_m1));
15152 ASSERT_FALSE (uniform_integer_cst_p (c_i_0));
15153 ASSERT_FALSE (uniform_integer_cst_p (c_i_1));
15154 ASSERT_FALSE (uniform_integer_cst_p (c_i_m1));
15155 ASSERT_FALSE (uniform_integer_cst_p (c_f_0));
15156 ASSERT_FALSE (uniform_integer_cst_p (c_f_1));
15157 ASSERT_FALSE (uniform_integer_cst_p (c_f_m1));
15158 }
15159
15160 /* Check that string escaping works correctly. */
15161
15162 static void
15163 test_escaped_strings (void)
15164 {
15165 int saved_cutoff;
15166 escaped_string msg;
15167
15168 msg.escape (NULL);
15169 /* ASSERT_STREQ does not accept NULL as a valid test
15170 result, so we have to use ASSERT_EQ instead. */
15171 ASSERT_EQ (NULL, (const char *) msg);
15172
15173 msg.escape ("");
15174 ASSERT_STREQ ("", (const char *) msg);
15175
15176 msg.escape ("foobar");
15177 ASSERT_STREQ ("foobar", (const char *) msg);
15178
15179 /* Ensure that we have -fmessage-length set to 0. */
15180 saved_cutoff = pp_line_cutoff (global_dc->printer);
15181 pp_line_cutoff (global_dc->printer) = 0;
15182
15183 msg.escape ("foo\nbar");
15184 ASSERT_STREQ ("foo\\nbar", (const char *) msg);
15185
15186 msg.escape ("\a\b\f\n\r\t\v");
15187 ASSERT_STREQ ("\\a\\b\\f\\n\\r\\t\\v", (const char *) msg);
15188
15189 /* Now repeat the tests with -fmessage-length set to 5. */
15190 pp_line_cutoff (global_dc->printer) = 5;
15191
15192 /* Note that the newline is not translated into an escape. */
15193 msg.escape ("foo\nbar");
15194 ASSERT_STREQ ("foo\nbar", (const char *) msg);
15195
15196 msg.escape ("\a\b\f\n\r\t\v");
15197 ASSERT_STREQ ("\\a\\b\\f\n\\r\\t\\v", (const char *) msg);
15198
15199 /* Restore the original message length setting. */
15200 pp_line_cutoff (global_dc->printer) = saved_cutoff;
15201 }
15202
15203 /* Run all of the selftests within this file. */
15204
15205 void
15206 tree_c_tests ()
15207 {
15208 test_integer_constants ();
15209 test_identifiers ();
15210 test_labels ();
15211 test_vector_cst_patterns ();
15212 test_location_wrappers ();
15213 test_predicates ();
15214 test_escaped_strings ();
15215 }
15216
15217 } // namespace selftest
15218
15219 #endif /* CHECKING_P */
15220
15221 #include "gt-tree.h"