]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree.c
Move more warning code to gimple-ssa-warn-access etc.
[thirdparty/gcc.git] / gcc / tree.c
1 /* Language-independent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987-2021 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* This file contains the low level primitives for operating on tree nodes,
21 including allocation, list operations, interning of identifiers,
22 construction of data type nodes and statement nodes,
23 and construction of type conversion nodes. It also contains
24 tables index by tree code that describe how to take apart
25 nodes of that code.
26
27 It is intended to be language-independent but can occasionally
28 calls language-dependent routines. */
29
30 #include "config.h"
31 #include "system.h"
32 #include "coretypes.h"
33 #include "backend.h"
34 #include "target.h"
35 #include "tree.h"
36 #include "gimple.h"
37 #include "tree-pass.h"
38 #include "ssa.h"
39 #include "cgraph.h"
40 #include "diagnostic.h"
41 #include "flags.h"
42 #include "alias.h"
43 #include "fold-const.h"
44 #include "stor-layout.h"
45 #include "calls.h"
46 #include "attribs.h"
47 #include "toplev.h" /* get_random_seed */
48 #include "output.h"
49 #include "common/common-target.h"
50 #include "langhooks.h"
51 #include "tree-inline.h"
52 #include "tree-iterator.h"
53 #include "internal-fn.h"
54 #include "gimple-iterator.h"
55 #include "gimplify.h"
56 #include "tree-dfa.h"
57 #include "langhooks-def.h"
58 #include "tree-diagnostic.h"
59 #include "except.h"
60 #include "builtins.h"
61 #include "print-tree.h"
62 #include "ipa-utils.h"
63 #include "selftest.h"
64 #include "stringpool.h"
65 #include "attribs.h"
66 #include "rtl.h"
67 #include "regs.h"
68 #include "tree-vector-builder.h"
69 #include "gimple-fold.h"
70 #include "escaped_string.h"
71 #include "gimple-range.h"
72
73 /* Tree code classes. */
74
75 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) TYPE,
76 #define END_OF_BASE_TREE_CODES tcc_exceptional,
77
78 const enum tree_code_class tree_code_type[] = {
79 #include "all-tree.def"
80 };
81
82 #undef DEFTREECODE
83 #undef END_OF_BASE_TREE_CODES
84
85 /* Table indexed by tree code giving number of expression
86 operands beyond the fixed part of the node structure.
87 Not used for types or decls. */
88
89 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) LENGTH,
90 #define END_OF_BASE_TREE_CODES 0,
91
92 const unsigned char tree_code_length[] = {
93 #include "all-tree.def"
94 };
95
96 #undef DEFTREECODE
97 #undef END_OF_BASE_TREE_CODES
98
99 /* Names of tree components.
100 Used for printing out the tree and error messages. */
101 #define DEFTREECODE(SYM, NAME, TYPE, LEN) NAME,
102 #define END_OF_BASE_TREE_CODES "@dummy",
103
104 static const char *const tree_code_name[] = {
105 #include "all-tree.def"
106 };
107
108 #undef DEFTREECODE
109 #undef END_OF_BASE_TREE_CODES
110
111 /* Each tree code class has an associated string representation.
112 These must correspond to the tree_code_class entries. */
113
114 const char *const tree_code_class_strings[] =
115 {
116 "exceptional",
117 "constant",
118 "type",
119 "declaration",
120 "reference",
121 "comparison",
122 "unary",
123 "binary",
124 "statement",
125 "vl_exp",
126 "expression"
127 };
128
129 /* obstack.[ch] explicitly declined to prototype this. */
130 extern int _obstack_allocated_p (struct obstack *h, void *obj);
131
132 /* Statistics-gathering stuff. */
133
134 static uint64_t tree_code_counts[MAX_TREE_CODES];
135 uint64_t tree_node_counts[(int) all_kinds];
136 uint64_t tree_node_sizes[(int) all_kinds];
137
138 /* Keep in sync with tree.h:enum tree_node_kind. */
139 static const char * const tree_node_kind_names[] = {
140 "decls",
141 "types",
142 "blocks",
143 "stmts",
144 "refs",
145 "exprs",
146 "constants",
147 "identifiers",
148 "vecs",
149 "binfos",
150 "ssa names",
151 "constructors",
152 "random kinds",
153 "lang_decl kinds",
154 "lang_type kinds",
155 "omp clauses",
156 };
157
158 /* Unique id for next decl created. */
159 static GTY(()) int next_decl_uid;
160 /* Unique id for next type created. */
161 static GTY(()) unsigned next_type_uid = 1;
162 /* Unique id for next debug decl created. Use negative numbers,
163 to catch erroneous uses. */
164 static GTY(()) int next_debug_decl_uid;
165
166 /* Since we cannot rehash a type after it is in the table, we have to
167 keep the hash code. */
168
169 struct GTY((for_user)) type_hash {
170 unsigned long hash;
171 tree type;
172 };
173
174 /* Initial size of the hash table (rounded to next prime). */
175 #define TYPE_HASH_INITIAL_SIZE 1000
176
177 struct type_cache_hasher : ggc_cache_ptr_hash<type_hash>
178 {
179 static hashval_t hash (type_hash *t) { return t->hash; }
180 static bool equal (type_hash *a, type_hash *b);
181
182 static int
183 keep_cache_entry (type_hash *&t)
184 {
185 return ggc_marked_p (t->type);
186 }
187 };
188
189 /* Now here is the hash table. When recording a type, it is added to
190 the slot whose index is the hash code. Note that the hash table is
191 used for several kinds of types (function types, array types and
192 array index range types, for now). While all these live in the
193 same table, they are completely independent, and the hash code is
194 computed differently for each of these. */
195
196 static GTY ((cache)) hash_table<type_cache_hasher> *type_hash_table;
197
198 /* Hash table and temporary node for larger integer const values. */
199 static GTY (()) tree int_cst_node;
200
201 struct int_cst_hasher : ggc_cache_ptr_hash<tree_node>
202 {
203 static hashval_t hash (tree t);
204 static bool equal (tree x, tree y);
205 };
206
207 static GTY ((cache)) hash_table<int_cst_hasher> *int_cst_hash_table;
208
209 /* Class and variable for making sure that there is a single POLY_INT_CST
210 for a given value. */
211 struct poly_int_cst_hasher : ggc_cache_ptr_hash<tree_node>
212 {
213 typedef std::pair<tree, const poly_wide_int *> compare_type;
214 static hashval_t hash (tree t);
215 static bool equal (tree x, const compare_type &y);
216 };
217
218 static GTY ((cache)) hash_table<poly_int_cst_hasher> *poly_int_cst_hash_table;
219
220 /* Hash table for optimization flags and target option flags. Use the same
221 hash table for both sets of options. Nodes for building the current
222 optimization and target option nodes. The assumption is most of the time
223 the options created will already be in the hash table, so we avoid
224 allocating and freeing up a node repeatably. */
225 static GTY (()) tree cl_optimization_node;
226 static GTY (()) tree cl_target_option_node;
227
228 struct cl_option_hasher : ggc_cache_ptr_hash<tree_node>
229 {
230 static hashval_t hash (tree t);
231 static bool equal (tree x, tree y);
232 };
233
234 static GTY ((cache)) hash_table<cl_option_hasher> *cl_option_hash_table;
235
236 /* General tree->tree mapping structure for use in hash tables. */
237
238
239 static GTY ((cache))
240 hash_table<tree_decl_map_cache_hasher> *debug_expr_for_decl;
241
242 static GTY ((cache))
243 hash_table<tree_decl_map_cache_hasher> *value_expr_for_decl;
244
245 struct tree_vec_map_cache_hasher : ggc_cache_ptr_hash<tree_vec_map>
246 {
247 static hashval_t hash (tree_vec_map *m) { return DECL_UID (m->base.from); }
248
249 static bool
250 equal (tree_vec_map *a, tree_vec_map *b)
251 {
252 return a->base.from == b->base.from;
253 }
254
255 static int
256 keep_cache_entry (tree_vec_map *&m)
257 {
258 return ggc_marked_p (m->base.from);
259 }
260 };
261
262 static GTY ((cache))
263 hash_table<tree_vec_map_cache_hasher> *debug_args_for_decl;
264
265 static void set_type_quals (tree, int);
266 static void print_type_hash_statistics (void);
267 static void print_debug_expr_statistics (void);
268 static void print_value_expr_statistics (void);
269
270 tree global_trees[TI_MAX];
271 tree integer_types[itk_none];
272
273 bool int_n_enabled_p[NUM_INT_N_ENTS];
274 struct int_n_trees_t int_n_trees [NUM_INT_N_ENTS];
275
276 bool tree_contains_struct[MAX_TREE_CODES][64];
277
278 /* Number of operands for each OpenMP clause. */
279 unsigned const char omp_clause_num_ops[] =
280 {
281 0, /* OMP_CLAUSE_ERROR */
282 1, /* OMP_CLAUSE_PRIVATE */
283 1, /* OMP_CLAUSE_SHARED */
284 1, /* OMP_CLAUSE_FIRSTPRIVATE */
285 2, /* OMP_CLAUSE_LASTPRIVATE */
286 5, /* OMP_CLAUSE_REDUCTION */
287 5, /* OMP_CLAUSE_TASK_REDUCTION */
288 5, /* OMP_CLAUSE_IN_REDUCTION */
289 1, /* OMP_CLAUSE_COPYIN */
290 1, /* OMP_CLAUSE_COPYPRIVATE */
291 3, /* OMP_CLAUSE_LINEAR */
292 1, /* OMP_CLAUSE_AFFINITY */
293 2, /* OMP_CLAUSE_ALIGNED */
294 2, /* OMP_CLAUSE_ALLOCATE */
295 1, /* OMP_CLAUSE_DEPEND */
296 1, /* OMP_CLAUSE_NONTEMPORAL */
297 1, /* OMP_CLAUSE_UNIFORM */
298 1, /* OMP_CLAUSE_TO_DECLARE */
299 1, /* OMP_CLAUSE_LINK */
300 1, /* OMP_CLAUSE_DETACH */
301 1, /* OMP_CLAUSE_USE_DEVICE_PTR */
302 1, /* OMP_CLAUSE_USE_DEVICE_ADDR */
303 1, /* OMP_CLAUSE_IS_DEVICE_PTR */
304 1, /* OMP_CLAUSE_INCLUSIVE */
305 1, /* OMP_CLAUSE_EXCLUSIVE */
306 2, /* OMP_CLAUSE_FROM */
307 2, /* OMP_CLAUSE_TO */
308 2, /* OMP_CLAUSE_MAP */
309 2, /* OMP_CLAUSE__CACHE_ */
310 2, /* OMP_CLAUSE_GANG */
311 1, /* OMP_CLAUSE_ASYNC */
312 1, /* OMP_CLAUSE_WAIT */
313 0, /* OMP_CLAUSE_AUTO */
314 0, /* OMP_CLAUSE_SEQ */
315 1, /* OMP_CLAUSE__LOOPTEMP_ */
316 1, /* OMP_CLAUSE__REDUCTEMP_ */
317 1, /* OMP_CLAUSE__CONDTEMP_ */
318 1, /* OMP_CLAUSE__SCANTEMP_ */
319 1, /* OMP_CLAUSE_IF */
320 1, /* OMP_CLAUSE_NUM_THREADS */
321 1, /* OMP_CLAUSE_SCHEDULE */
322 0, /* OMP_CLAUSE_NOWAIT */
323 1, /* OMP_CLAUSE_ORDERED */
324 0, /* OMP_CLAUSE_DEFAULT */
325 3, /* OMP_CLAUSE_COLLAPSE */
326 0, /* OMP_CLAUSE_UNTIED */
327 1, /* OMP_CLAUSE_FINAL */
328 0, /* OMP_CLAUSE_MERGEABLE */
329 1, /* OMP_CLAUSE_DEVICE */
330 1, /* OMP_CLAUSE_DIST_SCHEDULE */
331 0, /* OMP_CLAUSE_INBRANCH */
332 0, /* OMP_CLAUSE_NOTINBRANCH */
333 1, /* OMP_CLAUSE_NUM_TEAMS */
334 1, /* OMP_CLAUSE_THREAD_LIMIT */
335 0, /* OMP_CLAUSE_PROC_BIND */
336 1, /* OMP_CLAUSE_SAFELEN */
337 1, /* OMP_CLAUSE_SIMDLEN */
338 0, /* OMP_CLAUSE_DEVICE_TYPE */
339 0, /* OMP_CLAUSE_FOR */
340 0, /* OMP_CLAUSE_PARALLEL */
341 0, /* OMP_CLAUSE_SECTIONS */
342 0, /* OMP_CLAUSE_TASKGROUP */
343 1, /* OMP_CLAUSE_PRIORITY */
344 1, /* OMP_CLAUSE_GRAINSIZE */
345 1, /* OMP_CLAUSE_NUM_TASKS */
346 0, /* OMP_CLAUSE_NOGROUP */
347 0, /* OMP_CLAUSE_THREADS */
348 0, /* OMP_CLAUSE_SIMD */
349 1, /* OMP_CLAUSE_HINT */
350 0, /* OMP_CLAUSE_DEFAULTMAP */
351 0, /* OMP_CLAUSE_ORDER */
352 0, /* OMP_CLAUSE_BIND */
353 1, /* OMP_CLAUSE_FILTER */
354 1, /* OMP_CLAUSE__SIMDUID_ */
355 0, /* OMP_CLAUSE__SIMT_ */
356 0, /* OMP_CLAUSE_INDEPENDENT */
357 1, /* OMP_CLAUSE_WORKER */
358 1, /* OMP_CLAUSE_VECTOR */
359 1, /* OMP_CLAUSE_NUM_GANGS */
360 1, /* OMP_CLAUSE_NUM_WORKERS */
361 1, /* OMP_CLAUSE_VECTOR_LENGTH */
362 3, /* OMP_CLAUSE_TILE */
363 0, /* OMP_CLAUSE_IF_PRESENT */
364 0, /* OMP_CLAUSE_FINALIZE */
365 0, /* OMP_CLAUSE_NOHOST */
366 };
367
368 const char * const omp_clause_code_name[] =
369 {
370 "error_clause",
371 "private",
372 "shared",
373 "firstprivate",
374 "lastprivate",
375 "reduction",
376 "task_reduction",
377 "in_reduction",
378 "copyin",
379 "copyprivate",
380 "linear",
381 "affinity",
382 "aligned",
383 "allocate",
384 "depend",
385 "nontemporal",
386 "uniform",
387 "to",
388 "link",
389 "detach",
390 "use_device_ptr",
391 "use_device_addr",
392 "is_device_ptr",
393 "inclusive",
394 "exclusive",
395 "from",
396 "to",
397 "map",
398 "_cache_",
399 "gang",
400 "async",
401 "wait",
402 "auto",
403 "seq",
404 "_looptemp_",
405 "_reductemp_",
406 "_condtemp_",
407 "_scantemp_",
408 "if",
409 "num_threads",
410 "schedule",
411 "nowait",
412 "ordered",
413 "default",
414 "collapse",
415 "untied",
416 "final",
417 "mergeable",
418 "device",
419 "dist_schedule",
420 "inbranch",
421 "notinbranch",
422 "num_teams",
423 "thread_limit",
424 "proc_bind",
425 "safelen",
426 "simdlen",
427 "device_type",
428 "for",
429 "parallel",
430 "sections",
431 "taskgroup",
432 "priority",
433 "grainsize",
434 "num_tasks",
435 "nogroup",
436 "threads",
437 "simd",
438 "hint",
439 "defaultmap",
440 "order",
441 "bind",
442 "filter",
443 "_simduid_",
444 "_simt_",
445 "independent",
446 "worker",
447 "vector",
448 "num_gangs",
449 "num_workers",
450 "vector_length",
451 "tile",
452 "if_present",
453 "finalize",
454 "nohost",
455 };
456
457
458 /* Return the tree node structure used by tree code CODE. */
459
460 static inline enum tree_node_structure_enum
461 tree_node_structure_for_code (enum tree_code code)
462 {
463 switch (TREE_CODE_CLASS (code))
464 {
465 case tcc_declaration:
466 switch (code)
467 {
468 case CONST_DECL: return TS_CONST_DECL;
469 case DEBUG_EXPR_DECL: return TS_DECL_WRTL;
470 case FIELD_DECL: return TS_FIELD_DECL;
471 case FUNCTION_DECL: return TS_FUNCTION_DECL;
472 case LABEL_DECL: return TS_LABEL_DECL;
473 case PARM_DECL: return TS_PARM_DECL;
474 case RESULT_DECL: return TS_RESULT_DECL;
475 case TRANSLATION_UNIT_DECL: return TS_TRANSLATION_UNIT_DECL;
476 case TYPE_DECL: return TS_TYPE_DECL;
477 case VAR_DECL: return TS_VAR_DECL;
478 default: return TS_DECL_NON_COMMON;
479 }
480
481 case tcc_type: return TS_TYPE_NON_COMMON;
482
483 case tcc_binary:
484 case tcc_comparison:
485 case tcc_expression:
486 case tcc_reference:
487 case tcc_statement:
488 case tcc_unary:
489 case tcc_vl_exp: return TS_EXP;
490
491 default: /* tcc_constant and tcc_exceptional */
492 break;
493 }
494
495 switch (code)
496 {
497 /* tcc_constant cases. */
498 case COMPLEX_CST: return TS_COMPLEX;
499 case FIXED_CST: return TS_FIXED_CST;
500 case INTEGER_CST: return TS_INT_CST;
501 case POLY_INT_CST: return TS_POLY_INT_CST;
502 case REAL_CST: return TS_REAL_CST;
503 case STRING_CST: return TS_STRING;
504 case VECTOR_CST: return TS_VECTOR;
505 case VOID_CST: return TS_TYPED;
506
507 /* tcc_exceptional cases. */
508 case BLOCK: return TS_BLOCK;
509 case CONSTRUCTOR: return TS_CONSTRUCTOR;
510 case ERROR_MARK: return TS_COMMON;
511 case IDENTIFIER_NODE: return TS_IDENTIFIER;
512 case OMP_CLAUSE: return TS_OMP_CLAUSE;
513 case OPTIMIZATION_NODE: return TS_OPTIMIZATION;
514 case PLACEHOLDER_EXPR: return TS_COMMON;
515 case SSA_NAME: return TS_SSA_NAME;
516 case STATEMENT_LIST: return TS_STATEMENT_LIST;
517 case TARGET_OPTION_NODE: return TS_TARGET_OPTION;
518 case TREE_BINFO: return TS_BINFO;
519 case TREE_LIST: return TS_LIST;
520 case TREE_VEC: return TS_VEC;
521
522 default:
523 gcc_unreachable ();
524 }
525 }
526
527
528 /* Initialize tree_contains_struct to describe the hierarchy of tree
529 nodes. */
530
531 static void
532 initialize_tree_contains_struct (void)
533 {
534 unsigned i;
535
536 for (i = ERROR_MARK; i < LAST_AND_UNUSED_TREE_CODE; i++)
537 {
538 enum tree_code code;
539 enum tree_node_structure_enum ts_code;
540
541 code = (enum tree_code) i;
542 ts_code = tree_node_structure_for_code (code);
543
544 /* Mark the TS structure itself. */
545 tree_contains_struct[code][ts_code] = 1;
546
547 /* Mark all the structures that TS is derived from. */
548 switch (ts_code)
549 {
550 case TS_TYPED:
551 case TS_BLOCK:
552 case TS_OPTIMIZATION:
553 case TS_TARGET_OPTION:
554 MARK_TS_BASE (code);
555 break;
556
557 case TS_COMMON:
558 case TS_INT_CST:
559 case TS_POLY_INT_CST:
560 case TS_REAL_CST:
561 case TS_FIXED_CST:
562 case TS_VECTOR:
563 case TS_STRING:
564 case TS_COMPLEX:
565 case TS_SSA_NAME:
566 case TS_CONSTRUCTOR:
567 case TS_EXP:
568 case TS_STATEMENT_LIST:
569 MARK_TS_TYPED (code);
570 break;
571
572 case TS_IDENTIFIER:
573 case TS_DECL_MINIMAL:
574 case TS_TYPE_COMMON:
575 case TS_LIST:
576 case TS_VEC:
577 case TS_BINFO:
578 case TS_OMP_CLAUSE:
579 MARK_TS_COMMON (code);
580 break;
581
582 case TS_TYPE_WITH_LANG_SPECIFIC:
583 MARK_TS_TYPE_COMMON (code);
584 break;
585
586 case TS_TYPE_NON_COMMON:
587 MARK_TS_TYPE_WITH_LANG_SPECIFIC (code);
588 break;
589
590 case TS_DECL_COMMON:
591 MARK_TS_DECL_MINIMAL (code);
592 break;
593
594 case TS_DECL_WRTL:
595 case TS_CONST_DECL:
596 MARK_TS_DECL_COMMON (code);
597 break;
598
599 case TS_DECL_NON_COMMON:
600 MARK_TS_DECL_WITH_VIS (code);
601 break;
602
603 case TS_DECL_WITH_VIS:
604 case TS_PARM_DECL:
605 case TS_LABEL_DECL:
606 case TS_RESULT_DECL:
607 MARK_TS_DECL_WRTL (code);
608 break;
609
610 case TS_FIELD_DECL:
611 MARK_TS_DECL_COMMON (code);
612 break;
613
614 case TS_VAR_DECL:
615 MARK_TS_DECL_WITH_VIS (code);
616 break;
617
618 case TS_TYPE_DECL:
619 case TS_FUNCTION_DECL:
620 MARK_TS_DECL_NON_COMMON (code);
621 break;
622
623 case TS_TRANSLATION_UNIT_DECL:
624 MARK_TS_DECL_COMMON (code);
625 break;
626
627 default:
628 gcc_unreachable ();
629 }
630 }
631
632 /* Basic consistency checks for attributes used in fold. */
633 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_NON_COMMON]);
634 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_NON_COMMON]);
635 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_COMMON]);
636 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_COMMON]);
637 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_COMMON]);
638 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_COMMON]);
639 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_COMMON]);
640 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_COMMON]);
641 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_COMMON]);
642 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_COMMON]);
643 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_COMMON]);
644 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WRTL]);
645 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_WRTL]);
646 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_WRTL]);
647 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WRTL]);
648 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_WRTL]);
649 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_MINIMAL]);
650 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_MINIMAL]);
651 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_MINIMAL]);
652 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_MINIMAL]);
653 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_MINIMAL]);
654 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_MINIMAL]);
655 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_MINIMAL]);
656 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_MINIMAL]);
657 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_MINIMAL]);
658 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WITH_VIS]);
659 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WITH_VIS]);
660 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_WITH_VIS]);
661 gcc_assert (tree_contains_struct[VAR_DECL][TS_VAR_DECL]);
662 gcc_assert (tree_contains_struct[FIELD_DECL][TS_FIELD_DECL]);
663 gcc_assert (tree_contains_struct[PARM_DECL][TS_PARM_DECL]);
664 gcc_assert (tree_contains_struct[LABEL_DECL][TS_LABEL_DECL]);
665 gcc_assert (tree_contains_struct[RESULT_DECL][TS_RESULT_DECL]);
666 gcc_assert (tree_contains_struct[CONST_DECL][TS_CONST_DECL]);
667 gcc_assert (tree_contains_struct[TYPE_DECL][TS_TYPE_DECL]);
668 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_FUNCTION_DECL]);
669 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_MINIMAL]);
670 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_COMMON]);
671 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_MINIMAL]);
672 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_COMMON]);
673 }
674
675
676 /* Init tree.c. */
677
678 void
679 init_ttree (void)
680 {
681 /* Initialize the hash table of types. */
682 type_hash_table
683 = hash_table<type_cache_hasher>::create_ggc (TYPE_HASH_INITIAL_SIZE);
684
685 debug_expr_for_decl
686 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
687
688 value_expr_for_decl
689 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
690
691 int_cst_hash_table = hash_table<int_cst_hasher>::create_ggc (1024);
692
693 poly_int_cst_hash_table = hash_table<poly_int_cst_hasher>::create_ggc (64);
694
695 int_cst_node = make_int_cst (1, 1);
696
697 cl_option_hash_table = hash_table<cl_option_hasher>::create_ggc (64);
698
699 cl_optimization_node = make_node (OPTIMIZATION_NODE);
700 cl_target_option_node = make_node (TARGET_OPTION_NODE);
701
702 /* Initialize the tree_contains_struct array. */
703 initialize_tree_contains_struct ();
704 lang_hooks.init_ts ();
705 }
706
707 \f
708 /* The name of the object as the assembler will see it (but before any
709 translations made by ASM_OUTPUT_LABELREF). Often this is the same
710 as DECL_NAME. It is an IDENTIFIER_NODE. */
711 tree
712 decl_assembler_name (tree decl)
713 {
714 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
715 lang_hooks.set_decl_assembler_name (decl);
716 return DECL_ASSEMBLER_NAME_RAW (decl);
717 }
718
719 /* The DECL_ASSEMBLER_NAME_RAW of DECL is being explicitly set to NAME
720 (either of which may be NULL). Inform the FE, if this changes the
721 name. */
722
723 void
724 overwrite_decl_assembler_name (tree decl, tree name)
725 {
726 if (DECL_ASSEMBLER_NAME_RAW (decl) != name)
727 lang_hooks.overwrite_decl_assembler_name (decl, name);
728 }
729
730 /* Return true if DECL may need an assembler name to be set. */
731
732 static inline bool
733 need_assembler_name_p (tree decl)
734 {
735 /* We use DECL_ASSEMBLER_NAME to hold mangled type names for One Definition
736 Rule merging. This makes type_odr_p to return true on those types during
737 LTO and by comparing the mangled name, we can say what types are intended
738 to be equivalent across compilation unit.
739
740 We do not store names of type_in_anonymous_namespace_p.
741
742 Record, union and enumeration type have linkage that allows use
743 to check type_in_anonymous_namespace_p. We do not mangle compound types
744 that always can be compared structurally.
745
746 Similarly for builtin types, we compare properties of their main variant.
747 A special case are integer types where mangling do make differences
748 between char/signed char/unsigned char etc. Storing name for these makes
749 e.g. -fno-signed-char/-fsigned-char mismatches to be handled well.
750 See cp/mangle.c:write_builtin_type for details. */
751
752 if (TREE_CODE (decl) == TYPE_DECL)
753 {
754 if (DECL_NAME (decl)
755 && decl == TYPE_NAME (TREE_TYPE (decl))
756 && TYPE_MAIN_VARIANT (TREE_TYPE (decl)) == TREE_TYPE (decl)
757 && !TYPE_ARTIFICIAL (TREE_TYPE (decl))
758 && ((TREE_CODE (TREE_TYPE (decl)) != RECORD_TYPE
759 && TREE_CODE (TREE_TYPE (decl)) != UNION_TYPE)
760 || TYPE_CXX_ODR_P (TREE_TYPE (decl)))
761 && (type_with_linkage_p (TREE_TYPE (decl))
762 || TREE_CODE (TREE_TYPE (decl)) == INTEGER_TYPE)
763 && !variably_modified_type_p (TREE_TYPE (decl), NULL_TREE))
764 return !DECL_ASSEMBLER_NAME_SET_P (decl);
765 return false;
766 }
767 /* Only FUNCTION_DECLs and VAR_DECLs are considered. */
768 if (!VAR_OR_FUNCTION_DECL_P (decl))
769 return false;
770
771 /* If DECL already has its assembler name set, it does not need a
772 new one. */
773 if (!HAS_DECL_ASSEMBLER_NAME_P (decl)
774 || DECL_ASSEMBLER_NAME_SET_P (decl))
775 return false;
776
777 /* Abstract decls do not need an assembler name. */
778 if (DECL_ABSTRACT_P (decl))
779 return false;
780
781 /* For VAR_DECLs, only static, public and external symbols need an
782 assembler name. */
783 if (VAR_P (decl)
784 && !TREE_STATIC (decl)
785 && !TREE_PUBLIC (decl)
786 && !DECL_EXTERNAL (decl))
787 return false;
788
789 if (TREE_CODE (decl) == FUNCTION_DECL)
790 {
791 /* Do not set assembler name on builtins. Allow RTL expansion to
792 decide whether to expand inline or via a regular call. */
793 if (fndecl_built_in_p (decl)
794 && DECL_BUILT_IN_CLASS (decl) != BUILT_IN_FRONTEND)
795 return false;
796
797 /* Functions represented in the callgraph need an assembler name. */
798 if (cgraph_node::get (decl) != NULL)
799 return true;
800
801 /* Unused and not public functions don't need an assembler name. */
802 if (!TREE_USED (decl) && !TREE_PUBLIC (decl))
803 return false;
804 }
805
806 return true;
807 }
808
809 /* If T needs an assembler name, have one created for it. */
810
811 void
812 assign_assembler_name_if_needed (tree t)
813 {
814 if (need_assembler_name_p (t))
815 {
816 /* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit
817 diagnostics that use input_location to show locus
818 information. The problem here is that, at this point,
819 input_location is generally anchored to the end of the file
820 (since the parser is long gone), so we don't have a good
821 position to pin it to.
822
823 To alleviate this problem, this uses the location of T's
824 declaration. Examples of this are
825 testsuite/g++.dg/template/cond2.C and
826 testsuite/g++.dg/template/pr35240.C. */
827 location_t saved_location = input_location;
828 input_location = DECL_SOURCE_LOCATION (t);
829
830 decl_assembler_name (t);
831
832 input_location = saved_location;
833 }
834 }
835
836 /* When the target supports COMDAT groups, this indicates which group the
837 DECL is associated with. This can be either an IDENTIFIER_NODE or a
838 decl, in which case its DECL_ASSEMBLER_NAME identifies the group. */
839 tree
840 decl_comdat_group (const_tree node)
841 {
842 struct symtab_node *snode = symtab_node::get (node);
843 if (!snode)
844 return NULL;
845 return snode->get_comdat_group ();
846 }
847
848 /* Likewise, but make sure it's been reduced to an IDENTIFIER_NODE. */
849 tree
850 decl_comdat_group_id (const_tree node)
851 {
852 struct symtab_node *snode = symtab_node::get (node);
853 if (!snode)
854 return NULL;
855 return snode->get_comdat_group_id ();
856 }
857
858 /* When the target supports named section, return its name as IDENTIFIER_NODE
859 or NULL if it is in no section. */
860 const char *
861 decl_section_name (const_tree node)
862 {
863 struct symtab_node *snode = symtab_node::get (node);
864 if (!snode)
865 return NULL;
866 return snode->get_section ();
867 }
868
869 /* Set section name of NODE to VALUE (that is expected to be
870 identifier node) */
871 void
872 set_decl_section_name (tree node, const char *value)
873 {
874 struct symtab_node *snode;
875
876 if (value == NULL)
877 {
878 snode = symtab_node::get (node);
879 if (!snode)
880 return;
881 }
882 else if (VAR_P (node))
883 snode = varpool_node::get_create (node);
884 else
885 snode = cgraph_node::get_create (node);
886 snode->set_section (value);
887 }
888
889 /* Set section name of NODE to match the section name of OTHER.
890
891 set_decl_section_name (decl, other) is equivalent to
892 set_decl_section_name (decl, DECL_SECTION_NAME (other)), but possibly more
893 efficient. */
894 void
895 set_decl_section_name (tree decl, const_tree other)
896 {
897 struct symtab_node *other_node = symtab_node::get (other);
898 if (other_node)
899 {
900 struct symtab_node *decl_node;
901 if (VAR_P (decl))
902 decl_node = varpool_node::get_create (decl);
903 else
904 decl_node = cgraph_node::get_create (decl);
905 decl_node->set_section (*other_node);
906 }
907 else
908 {
909 struct symtab_node *decl_node = symtab_node::get (decl);
910 if (!decl_node)
911 return;
912 decl_node->set_section (NULL);
913 }
914 }
915
916 /* Return TLS model of a variable NODE. */
917 enum tls_model
918 decl_tls_model (const_tree node)
919 {
920 struct varpool_node *snode = varpool_node::get (node);
921 if (!snode)
922 return TLS_MODEL_NONE;
923 return snode->tls_model;
924 }
925
926 /* Set TLS model of variable NODE to MODEL. */
927 void
928 set_decl_tls_model (tree node, enum tls_model model)
929 {
930 struct varpool_node *vnode;
931
932 if (model == TLS_MODEL_NONE)
933 {
934 vnode = varpool_node::get (node);
935 if (!vnode)
936 return;
937 }
938 else
939 vnode = varpool_node::get_create (node);
940 vnode->tls_model = model;
941 }
942
943 /* Compute the number of bytes occupied by a tree with code CODE.
944 This function cannot be used for nodes that have variable sizes,
945 including TREE_VEC, INTEGER_CST, STRING_CST, and CALL_EXPR. */
946 size_t
947 tree_code_size (enum tree_code code)
948 {
949 switch (TREE_CODE_CLASS (code))
950 {
951 case tcc_declaration: /* A decl node */
952 switch (code)
953 {
954 case FIELD_DECL: return sizeof (tree_field_decl);
955 case PARM_DECL: return sizeof (tree_parm_decl);
956 case VAR_DECL: return sizeof (tree_var_decl);
957 case LABEL_DECL: return sizeof (tree_label_decl);
958 case RESULT_DECL: return sizeof (tree_result_decl);
959 case CONST_DECL: return sizeof (tree_const_decl);
960 case TYPE_DECL: return sizeof (tree_type_decl);
961 case FUNCTION_DECL: return sizeof (tree_function_decl);
962 case DEBUG_EXPR_DECL: return sizeof (tree_decl_with_rtl);
963 case TRANSLATION_UNIT_DECL: return sizeof (tree_translation_unit_decl);
964 case NAMESPACE_DECL:
965 case IMPORTED_DECL:
966 case NAMELIST_DECL: return sizeof (tree_decl_non_common);
967 default:
968 gcc_checking_assert (code >= NUM_TREE_CODES);
969 return lang_hooks.tree_size (code);
970 }
971
972 case tcc_type: /* a type node */
973 switch (code)
974 {
975 case OFFSET_TYPE:
976 case ENUMERAL_TYPE:
977 case BOOLEAN_TYPE:
978 case INTEGER_TYPE:
979 case REAL_TYPE:
980 case OPAQUE_TYPE:
981 case POINTER_TYPE:
982 case REFERENCE_TYPE:
983 case NULLPTR_TYPE:
984 case FIXED_POINT_TYPE:
985 case COMPLEX_TYPE:
986 case VECTOR_TYPE:
987 case ARRAY_TYPE:
988 case RECORD_TYPE:
989 case UNION_TYPE:
990 case QUAL_UNION_TYPE:
991 case VOID_TYPE:
992 case FUNCTION_TYPE:
993 case METHOD_TYPE:
994 case LANG_TYPE: return sizeof (tree_type_non_common);
995 default:
996 gcc_checking_assert (code >= NUM_TREE_CODES);
997 return lang_hooks.tree_size (code);
998 }
999
1000 case tcc_reference: /* a reference */
1001 case tcc_expression: /* an expression */
1002 case tcc_statement: /* an expression with side effects */
1003 case tcc_comparison: /* a comparison expression */
1004 case tcc_unary: /* a unary arithmetic expression */
1005 case tcc_binary: /* a binary arithmetic expression */
1006 return (sizeof (struct tree_exp)
1007 + (TREE_CODE_LENGTH (code) - 1) * sizeof (tree));
1008
1009 case tcc_constant: /* a constant */
1010 switch (code)
1011 {
1012 case VOID_CST: return sizeof (tree_typed);
1013 case INTEGER_CST: gcc_unreachable ();
1014 case POLY_INT_CST: return sizeof (tree_poly_int_cst);
1015 case REAL_CST: return sizeof (tree_real_cst);
1016 case FIXED_CST: return sizeof (tree_fixed_cst);
1017 case COMPLEX_CST: return sizeof (tree_complex);
1018 case VECTOR_CST: gcc_unreachable ();
1019 case STRING_CST: gcc_unreachable ();
1020 default:
1021 gcc_checking_assert (code >= NUM_TREE_CODES);
1022 return lang_hooks.tree_size (code);
1023 }
1024
1025 case tcc_exceptional: /* something random, like an identifier. */
1026 switch (code)
1027 {
1028 case IDENTIFIER_NODE: return lang_hooks.identifier_size;
1029 case TREE_LIST: return sizeof (tree_list);
1030
1031 case ERROR_MARK:
1032 case PLACEHOLDER_EXPR: return sizeof (tree_common);
1033
1034 case TREE_VEC: gcc_unreachable ();
1035 case OMP_CLAUSE: gcc_unreachable ();
1036
1037 case SSA_NAME: return sizeof (tree_ssa_name);
1038
1039 case STATEMENT_LIST: return sizeof (tree_statement_list);
1040 case BLOCK: return sizeof (struct tree_block);
1041 case CONSTRUCTOR: return sizeof (tree_constructor);
1042 case OPTIMIZATION_NODE: return sizeof (tree_optimization_option);
1043 case TARGET_OPTION_NODE: return sizeof (tree_target_option);
1044
1045 default:
1046 gcc_checking_assert (code >= NUM_TREE_CODES);
1047 return lang_hooks.tree_size (code);
1048 }
1049
1050 default:
1051 gcc_unreachable ();
1052 }
1053 }
1054
1055 /* Compute the number of bytes occupied by NODE. This routine only
1056 looks at TREE_CODE, except for those nodes that have variable sizes. */
1057 size_t
1058 tree_size (const_tree node)
1059 {
1060 const enum tree_code code = TREE_CODE (node);
1061 switch (code)
1062 {
1063 case INTEGER_CST:
1064 return (sizeof (struct tree_int_cst)
1065 + (TREE_INT_CST_EXT_NUNITS (node) - 1) * sizeof (HOST_WIDE_INT));
1066
1067 case TREE_BINFO:
1068 return (offsetof (struct tree_binfo, base_binfos)
1069 + vec<tree, va_gc>
1070 ::embedded_size (BINFO_N_BASE_BINFOS (node)));
1071
1072 case TREE_VEC:
1073 return (sizeof (struct tree_vec)
1074 + (TREE_VEC_LENGTH (node) - 1) * sizeof (tree));
1075
1076 case VECTOR_CST:
1077 return (sizeof (struct tree_vector)
1078 + (vector_cst_encoded_nelts (node) - 1) * sizeof (tree));
1079
1080 case STRING_CST:
1081 return TREE_STRING_LENGTH (node) + offsetof (struct tree_string, str) + 1;
1082
1083 case OMP_CLAUSE:
1084 return (sizeof (struct tree_omp_clause)
1085 + (omp_clause_num_ops[OMP_CLAUSE_CODE (node)] - 1)
1086 * sizeof (tree));
1087
1088 default:
1089 if (TREE_CODE_CLASS (code) == tcc_vl_exp)
1090 return (sizeof (struct tree_exp)
1091 + (VL_EXP_OPERAND_LENGTH (node) - 1) * sizeof (tree));
1092 else
1093 return tree_code_size (code);
1094 }
1095 }
1096
1097 /* Return tree node kind based on tree CODE. */
1098
1099 static tree_node_kind
1100 get_stats_node_kind (enum tree_code code)
1101 {
1102 enum tree_code_class type = TREE_CODE_CLASS (code);
1103
1104 switch (type)
1105 {
1106 case tcc_declaration: /* A decl node */
1107 return d_kind;
1108 case tcc_type: /* a type node */
1109 return t_kind;
1110 case tcc_statement: /* an expression with side effects */
1111 return s_kind;
1112 case tcc_reference: /* a reference */
1113 return r_kind;
1114 case tcc_expression: /* an expression */
1115 case tcc_comparison: /* a comparison expression */
1116 case tcc_unary: /* a unary arithmetic expression */
1117 case tcc_binary: /* a binary arithmetic expression */
1118 return e_kind;
1119 case tcc_constant: /* a constant */
1120 return c_kind;
1121 case tcc_exceptional: /* something random, like an identifier. */
1122 switch (code)
1123 {
1124 case IDENTIFIER_NODE:
1125 return id_kind;
1126 case TREE_VEC:
1127 return vec_kind;
1128 case TREE_BINFO:
1129 return binfo_kind;
1130 case SSA_NAME:
1131 return ssa_name_kind;
1132 case BLOCK:
1133 return b_kind;
1134 case CONSTRUCTOR:
1135 return constr_kind;
1136 case OMP_CLAUSE:
1137 return omp_clause_kind;
1138 default:
1139 return x_kind;
1140 }
1141 break;
1142 case tcc_vl_exp:
1143 return e_kind;
1144 default:
1145 gcc_unreachable ();
1146 }
1147 }
1148
1149 /* Record interesting allocation statistics for a tree node with CODE
1150 and LENGTH. */
1151
1152 static void
1153 record_node_allocation_statistics (enum tree_code code, size_t length)
1154 {
1155 if (!GATHER_STATISTICS)
1156 return;
1157
1158 tree_node_kind kind = get_stats_node_kind (code);
1159
1160 tree_code_counts[(int) code]++;
1161 tree_node_counts[(int) kind]++;
1162 tree_node_sizes[(int) kind] += length;
1163 }
1164
1165 /* Allocate and return a new UID from the DECL_UID namespace. */
1166
1167 int
1168 allocate_decl_uid (void)
1169 {
1170 return next_decl_uid++;
1171 }
1172
1173 /* Return a newly allocated node of code CODE. For decl and type
1174 nodes, some other fields are initialized. The rest of the node is
1175 initialized to zero. This function cannot be used for TREE_VEC,
1176 INTEGER_CST or OMP_CLAUSE nodes, which is enforced by asserts in
1177 tree_code_size.
1178
1179 Achoo! I got a code in the node. */
1180
1181 tree
1182 make_node (enum tree_code code MEM_STAT_DECL)
1183 {
1184 tree t;
1185 enum tree_code_class type = TREE_CODE_CLASS (code);
1186 size_t length = tree_code_size (code);
1187
1188 record_node_allocation_statistics (code, length);
1189
1190 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1191 TREE_SET_CODE (t, code);
1192
1193 switch (type)
1194 {
1195 case tcc_statement:
1196 if (code != DEBUG_BEGIN_STMT)
1197 TREE_SIDE_EFFECTS (t) = 1;
1198 break;
1199
1200 case tcc_declaration:
1201 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1202 {
1203 if (code == FUNCTION_DECL)
1204 {
1205 SET_DECL_ALIGN (t, FUNCTION_ALIGNMENT (FUNCTION_BOUNDARY));
1206 SET_DECL_MODE (t, FUNCTION_MODE);
1207 }
1208 else
1209 SET_DECL_ALIGN (t, 1);
1210 }
1211 DECL_SOURCE_LOCATION (t) = input_location;
1212 if (TREE_CODE (t) == DEBUG_EXPR_DECL)
1213 DECL_UID (t) = --next_debug_decl_uid;
1214 else
1215 {
1216 DECL_UID (t) = allocate_decl_uid ();
1217 SET_DECL_PT_UID (t, -1);
1218 }
1219 if (TREE_CODE (t) == LABEL_DECL)
1220 LABEL_DECL_UID (t) = -1;
1221
1222 break;
1223
1224 case tcc_type:
1225 TYPE_UID (t) = next_type_uid++;
1226 SET_TYPE_ALIGN (t, BITS_PER_UNIT);
1227 TYPE_USER_ALIGN (t) = 0;
1228 TYPE_MAIN_VARIANT (t) = t;
1229 TYPE_CANONICAL (t) = t;
1230
1231 /* Default to no attributes for type, but let target change that. */
1232 TYPE_ATTRIBUTES (t) = NULL_TREE;
1233 targetm.set_default_type_attributes (t);
1234
1235 /* We have not yet computed the alias set for this type. */
1236 TYPE_ALIAS_SET (t) = -1;
1237 break;
1238
1239 case tcc_constant:
1240 TREE_CONSTANT (t) = 1;
1241 break;
1242
1243 case tcc_expression:
1244 switch (code)
1245 {
1246 case INIT_EXPR:
1247 case MODIFY_EXPR:
1248 case VA_ARG_EXPR:
1249 case PREDECREMENT_EXPR:
1250 case PREINCREMENT_EXPR:
1251 case POSTDECREMENT_EXPR:
1252 case POSTINCREMENT_EXPR:
1253 /* All of these have side-effects, no matter what their
1254 operands are. */
1255 TREE_SIDE_EFFECTS (t) = 1;
1256 break;
1257
1258 default:
1259 break;
1260 }
1261 break;
1262
1263 case tcc_exceptional:
1264 switch (code)
1265 {
1266 case TARGET_OPTION_NODE:
1267 TREE_TARGET_OPTION(t)
1268 = ggc_cleared_alloc<struct cl_target_option> ();
1269 break;
1270
1271 case OPTIMIZATION_NODE:
1272 TREE_OPTIMIZATION (t)
1273 = ggc_cleared_alloc<struct cl_optimization> ();
1274 break;
1275
1276 default:
1277 break;
1278 }
1279 break;
1280
1281 default:
1282 /* Other classes need no special treatment. */
1283 break;
1284 }
1285
1286 return t;
1287 }
1288
1289 /* Free tree node. */
1290
1291 void
1292 free_node (tree node)
1293 {
1294 enum tree_code code = TREE_CODE (node);
1295 if (GATHER_STATISTICS)
1296 {
1297 enum tree_node_kind kind = get_stats_node_kind (code);
1298
1299 gcc_checking_assert (tree_code_counts[(int) TREE_CODE (node)] != 0);
1300 gcc_checking_assert (tree_node_counts[(int) kind] != 0);
1301 gcc_checking_assert (tree_node_sizes[(int) kind] >= tree_size (node));
1302
1303 tree_code_counts[(int) TREE_CODE (node)]--;
1304 tree_node_counts[(int) kind]--;
1305 tree_node_sizes[(int) kind] -= tree_size (node);
1306 }
1307 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1308 vec_free (CONSTRUCTOR_ELTS (node));
1309 else if (code == BLOCK)
1310 vec_free (BLOCK_NONLOCALIZED_VARS (node));
1311 else if (code == TREE_BINFO)
1312 vec_free (BINFO_BASE_ACCESSES (node));
1313 else if (code == OPTIMIZATION_NODE)
1314 cl_optimization_option_free (TREE_OPTIMIZATION (node));
1315 else if (code == TARGET_OPTION_NODE)
1316 cl_target_option_free (TREE_TARGET_OPTION (node));
1317 ggc_free (node);
1318 }
1319 \f
1320 /* Return a new node with the same contents as NODE except that its
1321 TREE_CHAIN, if it has one, is zero and it has a fresh uid. */
1322
1323 tree
1324 copy_node (tree node MEM_STAT_DECL)
1325 {
1326 tree t;
1327 enum tree_code code = TREE_CODE (node);
1328 size_t length;
1329
1330 gcc_assert (code != STATEMENT_LIST);
1331
1332 length = tree_size (node);
1333 record_node_allocation_statistics (code, length);
1334 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
1335 memcpy (t, node, length);
1336
1337 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
1338 TREE_CHAIN (t) = 0;
1339 TREE_ASM_WRITTEN (t) = 0;
1340 TREE_VISITED (t) = 0;
1341
1342 if (TREE_CODE_CLASS (code) == tcc_declaration)
1343 {
1344 if (code == DEBUG_EXPR_DECL)
1345 DECL_UID (t) = --next_debug_decl_uid;
1346 else
1347 {
1348 DECL_UID (t) = allocate_decl_uid ();
1349 if (DECL_PT_UID_SET_P (node))
1350 SET_DECL_PT_UID (t, DECL_PT_UID (node));
1351 }
1352 if ((TREE_CODE (node) == PARM_DECL || VAR_P (node))
1353 && DECL_HAS_VALUE_EXPR_P (node))
1354 {
1355 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (node));
1356 DECL_HAS_VALUE_EXPR_P (t) = 1;
1357 }
1358 /* DECL_DEBUG_EXPR is copied explicitly by callers. */
1359 if (VAR_P (node))
1360 {
1361 DECL_HAS_DEBUG_EXPR_P (t) = 0;
1362 t->decl_with_vis.symtab_node = NULL;
1363 }
1364 if (VAR_P (node) && DECL_HAS_INIT_PRIORITY_P (node))
1365 {
1366 SET_DECL_INIT_PRIORITY (t, DECL_INIT_PRIORITY (node));
1367 DECL_HAS_INIT_PRIORITY_P (t) = 1;
1368 }
1369 if (TREE_CODE (node) == FUNCTION_DECL)
1370 {
1371 DECL_STRUCT_FUNCTION (t) = NULL;
1372 t->decl_with_vis.symtab_node = NULL;
1373 }
1374 }
1375 else if (TREE_CODE_CLASS (code) == tcc_type)
1376 {
1377 TYPE_UID (t) = next_type_uid++;
1378 /* The following is so that the debug code for
1379 the copy is different from the original type.
1380 The two statements usually duplicate each other
1381 (because they clear fields of the same union),
1382 but the optimizer should catch that. */
1383 TYPE_SYMTAB_ADDRESS (t) = 0;
1384 TYPE_SYMTAB_DIE (t) = 0;
1385
1386 /* Do not copy the values cache. */
1387 if (TYPE_CACHED_VALUES_P (t))
1388 {
1389 TYPE_CACHED_VALUES_P (t) = 0;
1390 TYPE_CACHED_VALUES (t) = NULL_TREE;
1391 }
1392 }
1393 else if (code == TARGET_OPTION_NODE)
1394 {
1395 TREE_TARGET_OPTION (t) = ggc_alloc<struct cl_target_option>();
1396 memcpy (TREE_TARGET_OPTION (t), TREE_TARGET_OPTION (node),
1397 sizeof (struct cl_target_option));
1398 }
1399 else if (code == OPTIMIZATION_NODE)
1400 {
1401 TREE_OPTIMIZATION (t) = ggc_alloc<struct cl_optimization>();
1402 memcpy (TREE_OPTIMIZATION (t), TREE_OPTIMIZATION (node),
1403 sizeof (struct cl_optimization));
1404 }
1405
1406 return t;
1407 }
1408
1409 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
1410 For example, this can copy a list made of TREE_LIST nodes. */
1411
1412 tree
1413 copy_list (tree list)
1414 {
1415 tree head;
1416 tree prev, next;
1417
1418 if (list == 0)
1419 return 0;
1420
1421 head = prev = copy_node (list);
1422 next = TREE_CHAIN (list);
1423 while (next)
1424 {
1425 TREE_CHAIN (prev) = copy_node (next);
1426 prev = TREE_CHAIN (prev);
1427 next = TREE_CHAIN (next);
1428 }
1429 return head;
1430 }
1431
1432 \f
1433 /* Return the value that TREE_INT_CST_EXT_NUNITS should have for an
1434 INTEGER_CST with value CST and type TYPE. */
1435
1436 static unsigned int
1437 get_int_cst_ext_nunits (tree type, const wide_int &cst)
1438 {
1439 gcc_checking_assert (cst.get_precision () == TYPE_PRECISION (type));
1440 /* We need extra HWIs if CST is an unsigned integer with its
1441 upper bit set. */
1442 if (TYPE_UNSIGNED (type) && wi::neg_p (cst))
1443 return cst.get_precision () / HOST_BITS_PER_WIDE_INT + 1;
1444 return cst.get_len ();
1445 }
1446
1447 /* Return a new INTEGER_CST with value CST and type TYPE. */
1448
1449 static tree
1450 build_new_int_cst (tree type, const wide_int &cst)
1451 {
1452 unsigned int len = cst.get_len ();
1453 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1454 tree nt = make_int_cst (len, ext_len);
1455
1456 if (len < ext_len)
1457 {
1458 --ext_len;
1459 TREE_INT_CST_ELT (nt, ext_len)
1460 = zext_hwi (-1, cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1461 for (unsigned int i = len; i < ext_len; ++i)
1462 TREE_INT_CST_ELT (nt, i) = -1;
1463 }
1464 else if (TYPE_UNSIGNED (type)
1465 && cst.get_precision () < len * HOST_BITS_PER_WIDE_INT)
1466 {
1467 len--;
1468 TREE_INT_CST_ELT (nt, len)
1469 = zext_hwi (cst.elt (len),
1470 cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1471 }
1472
1473 for (unsigned int i = 0; i < len; i++)
1474 TREE_INT_CST_ELT (nt, i) = cst.elt (i);
1475 TREE_TYPE (nt) = type;
1476 return nt;
1477 }
1478
1479 /* Return a new POLY_INT_CST with coefficients COEFFS and type TYPE. */
1480
1481 static tree
1482 build_new_poly_int_cst (tree type, tree (&coeffs)[NUM_POLY_INT_COEFFS]
1483 CXX_MEM_STAT_INFO)
1484 {
1485 size_t length = sizeof (struct tree_poly_int_cst);
1486 record_node_allocation_statistics (POLY_INT_CST, length);
1487
1488 tree t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1489
1490 TREE_SET_CODE (t, POLY_INT_CST);
1491 TREE_CONSTANT (t) = 1;
1492 TREE_TYPE (t) = type;
1493 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1494 POLY_INT_CST_COEFF (t, i) = coeffs[i];
1495 return t;
1496 }
1497
1498 /* Create a constant tree that contains CST sign-extended to TYPE. */
1499
1500 tree
1501 build_int_cst (tree type, poly_int64 cst)
1502 {
1503 /* Support legacy code. */
1504 if (!type)
1505 type = integer_type_node;
1506
1507 return wide_int_to_tree (type, wi::shwi (cst, TYPE_PRECISION (type)));
1508 }
1509
1510 /* Create a constant tree that contains CST zero-extended to TYPE. */
1511
1512 tree
1513 build_int_cstu (tree type, poly_uint64 cst)
1514 {
1515 return wide_int_to_tree (type, wi::uhwi (cst, TYPE_PRECISION (type)));
1516 }
1517
1518 /* Create a constant tree that contains CST sign-extended to TYPE. */
1519
1520 tree
1521 build_int_cst_type (tree type, poly_int64 cst)
1522 {
1523 gcc_assert (type);
1524 return wide_int_to_tree (type, wi::shwi (cst, TYPE_PRECISION (type)));
1525 }
1526
1527 /* Constructs tree in type TYPE from with value given by CST. Signedness
1528 of CST is assumed to be the same as the signedness of TYPE. */
1529
1530 tree
1531 double_int_to_tree (tree type, double_int cst)
1532 {
1533 return wide_int_to_tree (type, widest_int::from (cst, TYPE_SIGN (type)));
1534 }
1535
1536 /* We force the wide_int CST to the range of the type TYPE by sign or
1537 zero extending it. OVERFLOWABLE indicates if we are interested in
1538 overflow of the value, when >0 we are only interested in signed
1539 overflow, for <0 we are interested in any overflow. OVERFLOWED
1540 indicates whether overflow has already occurred. CONST_OVERFLOWED
1541 indicates whether constant overflow has already occurred. We force
1542 T's value to be within range of T's type (by setting to 0 or 1 all
1543 the bits outside the type's range). We set TREE_OVERFLOWED if,
1544 OVERFLOWED is nonzero,
1545 or OVERFLOWABLE is >0 and signed overflow occurs
1546 or OVERFLOWABLE is <0 and any overflow occurs
1547 We return a new tree node for the extended wide_int. The node
1548 is shared if no overflow flags are set. */
1549
1550
1551 tree
1552 force_fit_type (tree type, const poly_wide_int_ref &cst,
1553 int overflowable, bool overflowed)
1554 {
1555 signop sign = TYPE_SIGN (type);
1556
1557 /* If we need to set overflow flags, return a new unshared node. */
1558 if (overflowed || !wi::fits_to_tree_p (cst, type))
1559 {
1560 if (overflowed
1561 || overflowable < 0
1562 || (overflowable > 0 && sign == SIGNED))
1563 {
1564 poly_wide_int tmp = poly_wide_int::from (cst, TYPE_PRECISION (type),
1565 sign);
1566 tree t;
1567 if (tmp.is_constant ())
1568 t = build_new_int_cst (type, tmp.coeffs[0]);
1569 else
1570 {
1571 tree coeffs[NUM_POLY_INT_COEFFS];
1572 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1573 {
1574 coeffs[i] = build_new_int_cst (type, tmp.coeffs[i]);
1575 TREE_OVERFLOW (coeffs[i]) = 1;
1576 }
1577 t = build_new_poly_int_cst (type, coeffs);
1578 }
1579 TREE_OVERFLOW (t) = 1;
1580 return t;
1581 }
1582 }
1583
1584 /* Else build a shared node. */
1585 return wide_int_to_tree (type, cst);
1586 }
1587
1588 /* These are the hash table functions for the hash table of INTEGER_CST
1589 nodes of a sizetype. */
1590
1591 /* Return the hash code X, an INTEGER_CST. */
1592
1593 hashval_t
1594 int_cst_hasher::hash (tree x)
1595 {
1596 const_tree const t = x;
1597 hashval_t code = TYPE_UID (TREE_TYPE (t));
1598 int i;
1599
1600 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
1601 code = iterative_hash_host_wide_int (TREE_INT_CST_ELT(t, i), code);
1602
1603 return code;
1604 }
1605
1606 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1607 is the same as that given by *Y, which is the same. */
1608
1609 bool
1610 int_cst_hasher::equal (tree x, tree y)
1611 {
1612 const_tree const xt = x;
1613 const_tree const yt = y;
1614
1615 if (TREE_TYPE (xt) != TREE_TYPE (yt)
1616 || TREE_INT_CST_NUNITS (xt) != TREE_INT_CST_NUNITS (yt)
1617 || TREE_INT_CST_EXT_NUNITS (xt) != TREE_INT_CST_EXT_NUNITS (yt))
1618 return false;
1619
1620 for (int i = 0; i < TREE_INT_CST_NUNITS (xt); i++)
1621 if (TREE_INT_CST_ELT (xt, i) != TREE_INT_CST_ELT (yt, i))
1622 return false;
1623
1624 return true;
1625 }
1626
1627 /* Cache wide_int CST into the TYPE_CACHED_VALUES cache for TYPE.
1628 SLOT is the slot entry to store it in, and MAX_SLOTS is the maximum
1629 number of slots that can be cached for the type. */
1630
1631 static inline tree
1632 cache_wide_int_in_type_cache (tree type, const wide_int &cst,
1633 int slot, int max_slots)
1634 {
1635 gcc_checking_assert (slot >= 0);
1636 /* Initialize cache. */
1637 if (!TYPE_CACHED_VALUES_P (type))
1638 {
1639 TYPE_CACHED_VALUES_P (type) = 1;
1640 TYPE_CACHED_VALUES (type) = make_tree_vec (max_slots);
1641 }
1642 tree t = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), slot);
1643 if (!t)
1644 {
1645 /* Create a new shared int. */
1646 t = build_new_int_cst (type, cst);
1647 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), slot) = t;
1648 }
1649 return t;
1650 }
1651
1652 /* Create an INT_CST node of TYPE and value CST.
1653 The returned node is always shared. For small integers we use a
1654 per-type vector cache, for larger ones we use a single hash table.
1655 The value is extended from its precision according to the sign of
1656 the type to be a multiple of HOST_BITS_PER_WIDE_INT. This defines
1657 the upper bits and ensures that hashing and value equality based
1658 upon the underlying HOST_WIDE_INTs works without masking. */
1659
1660 static tree
1661 wide_int_to_tree_1 (tree type, const wide_int_ref &pcst)
1662 {
1663 tree t;
1664 int ix = -1;
1665 int limit = 0;
1666
1667 gcc_assert (type);
1668 unsigned int prec = TYPE_PRECISION (type);
1669 signop sgn = TYPE_SIGN (type);
1670
1671 /* Verify that everything is canonical. */
1672 int l = pcst.get_len ();
1673 if (l > 1)
1674 {
1675 if (pcst.elt (l - 1) == 0)
1676 gcc_checking_assert (pcst.elt (l - 2) < 0);
1677 if (pcst.elt (l - 1) == HOST_WIDE_INT_M1)
1678 gcc_checking_assert (pcst.elt (l - 2) >= 0);
1679 }
1680
1681 wide_int cst = wide_int::from (pcst, prec, sgn);
1682 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1683
1684 enum tree_code code = TREE_CODE (type);
1685 if (code == POINTER_TYPE || code == REFERENCE_TYPE)
1686 {
1687 /* Cache NULL pointer and zero bounds. */
1688 if (cst == 0)
1689 ix = 0;
1690 /* Cache upper bounds of pointers. */
1691 else if (cst == wi::max_value (prec, sgn))
1692 ix = 1;
1693 /* Cache 1 which is used for a non-zero range. */
1694 else if (cst == 1)
1695 ix = 2;
1696
1697 if (ix >= 0)
1698 {
1699 t = cache_wide_int_in_type_cache (type, cst, ix, 3);
1700 /* Make sure no one is clobbering the shared constant. */
1701 gcc_checking_assert (TREE_TYPE (t) == type
1702 && cst == wi::to_wide (t));
1703 return t;
1704 }
1705 }
1706 if (ext_len == 1)
1707 {
1708 /* We just need to store a single HOST_WIDE_INT. */
1709 HOST_WIDE_INT hwi;
1710 if (TYPE_UNSIGNED (type))
1711 hwi = cst.to_uhwi ();
1712 else
1713 hwi = cst.to_shwi ();
1714
1715 switch (code)
1716 {
1717 case NULLPTR_TYPE:
1718 gcc_assert (hwi == 0);
1719 /* Fallthru. */
1720
1721 case POINTER_TYPE:
1722 case REFERENCE_TYPE:
1723 /* Ignore pointers, as they were already handled above. */
1724 break;
1725
1726 case BOOLEAN_TYPE:
1727 /* Cache false or true. */
1728 limit = 2;
1729 if (IN_RANGE (hwi, 0, 1))
1730 ix = hwi;
1731 break;
1732
1733 case INTEGER_TYPE:
1734 case OFFSET_TYPE:
1735 if (TYPE_SIGN (type) == UNSIGNED)
1736 {
1737 /* Cache [0, N). */
1738 limit = param_integer_share_limit;
1739 if (IN_RANGE (hwi, 0, param_integer_share_limit - 1))
1740 ix = hwi;
1741 }
1742 else
1743 {
1744 /* Cache [-1, N). */
1745 limit = param_integer_share_limit + 1;
1746 if (IN_RANGE (hwi, -1, param_integer_share_limit - 1))
1747 ix = hwi + 1;
1748 }
1749 break;
1750
1751 case ENUMERAL_TYPE:
1752 break;
1753
1754 default:
1755 gcc_unreachable ();
1756 }
1757
1758 if (ix >= 0)
1759 {
1760 t = cache_wide_int_in_type_cache (type, cst, ix, limit);
1761 /* Make sure no one is clobbering the shared constant. */
1762 gcc_checking_assert (TREE_TYPE (t) == type
1763 && TREE_INT_CST_NUNITS (t) == 1
1764 && TREE_INT_CST_OFFSET_NUNITS (t) == 1
1765 && TREE_INT_CST_EXT_NUNITS (t) == 1
1766 && TREE_INT_CST_ELT (t, 0) == hwi);
1767 return t;
1768 }
1769 else
1770 {
1771 /* Use the cache of larger shared ints, using int_cst_node as
1772 a temporary. */
1773
1774 TREE_INT_CST_ELT (int_cst_node, 0) = hwi;
1775 TREE_TYPE (int_cst_node) = type;
1776
1777 tree *slot = int_cst_hash_table->find_slot (int_cst_node, INSERT);
1778 t = *slot;
1779 if (!t)
1780 {
1781 /* Insert this one into the hash table. */
1782 t = int_cst_node;
1783 *slot = t;
1784 /* Make a new node for next time round. */
1785 int_cst_node = make_int_cst (1, 1);
1786 }
1787 }
1788 }
1789 else
1790 {
1791 /* The value either hashes properly or we drop it on the floor
1792 for the gc to take care of. There will not be enough of them
1793 to worry about. */
1794
1795 tree nt = build_new_int_cst (type, cst);
1796 tree *slot = int_cst_hash_table->find_slot (nt, INSERT);
1797 t = *slot;
1798 if (!t)
1799 {
1800 /* Insert this one into the hash table. */
1801 t = nt;
1802 *slot = t;
1803 }
1804 else
1805 ggc_free (nt);
1806 }
1807
1808 return t;
1809 }
1810
1811 hashval_t
1812 poly_int_cst_hasher::hash (tree t)
1813 {
1814 inchash::hash hstate;
1815
1816 hstate.add_int (TYPE_UID (TREE_TYPE (t)));
1817 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1818 hstate.add_wide_int (wi::to_wide (POLY_INT_CST_COEFF (t, i)));
1819
1820 return hstate.end ();
1821 }
1822
1823 bool
1824 poly_int_cst_hasher::equal (tree x, const compare_type &y)
1825 {
1826 if (TREE_TYPE (x) != y.first)
1827 return false;
1828 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1829 if (wi::to_wide (POLY_INT_CST_COEFF (x, i)) != y.second->coeffs[i])
1830 return false;
1831 return true;
1832 }
1833
1834 /* Build a POLY_INT_CST node with type TYPE and with the elements in VALUES.
1835 The elements must also have type TYPE. */
1836
1837 tree
1838 build_poly_int_cst (tree type, const poly_wide_int_ref &values)
1839 {
1840 unsigned int prec = TYPE_PRECISION (type);
1841 gcc_assert (prec <= values.coeffs[0].get_precision ());
1842 poly_wide_int c = poly_wide_int::from (values, prec, SIGNED);
1843
1844 inchash::hash h;
1845 h.add_int (TYPE_UID (type));
1846 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1847 h.add_wide_int (c.coeffs[i]);
1848 poly_int_cst_hasher::compare_type comp (type, &c);
1849 tree *slot = poly_int_cst_hash_table->find_slot_with_hash (comp, h.end (),
1850 INSERT);
1851 if (*slot == NULL_TREE)
1852 {
1853 tree coeffs[NUM_POLY_INT_COEFFS];
1854 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1855 coeffs[i] = wide_int_to_tree_1 (type, c.coeffs[i]);
1856 *slot = build_new_poly_int_cst (type, coeffs);
1857 }
1858 return *slot;
1859 }
1860
1861 /* Create a constant tree with value VALUE in type TYPE. */
1862
1863 tree
1864 wide_int_to_tree (tree type, const poly_wide_int_ref &value)
1865 {
1866 if (value.is_constant ())
1867 return wide_int_to_tree_1 (type, value.coeffs[0]);
1868 return build_poly_int_cst (type, value);
1869 }
1870
1871 /* Insert INTEGER_CST T into a cache of integer constants. And return
1872 the cached constant (which may or may not be T). If MIGHT_DUPLICATE
1873 is false, and T falls into the type's 'smaller values' range, there
1874 cannot be an existing entry. Otherwise, if MIGHT_DUPLICATE is true,
1875 or the value is large, should an existing entry exist, it is
1876 returned (rather than inserting T). */
1877
1878 tree
1879 cache_integer_cst (tree t, bool might_duplicate ATTRIBUTE_UNUSED)
1880 {
1881 tree type = TREE_TYPE (t);
1882 int ix = -1;
1883 int limit = 0;
1884 int prec = TYPE_PRECISION (type);
1885
1886 gcc_assert (!TREE_OVERFLOW (t));
1887
1888 /* The caching indices here must match those in
1889 wide_int_to_type_1. */
1890 switch (TREE_CODE (type))
1891 {
1892 case NULLPTR_TYPE:
1893 gcc_checking_assert (integer_zerop (t));
1894 /* Fallthru. */
1895
1896 case POINTER_TYPE:
1897 case REFERENCE_TYPE:
1898 {
1899 if (integer_zerop (t))
1900 ix = 0;
1901 else if (integer_onep (t))
1902 ix = 2;
1903
1904 if (ix >= 0)
1905 limit = 3;
1906 }
1907 break;
1908
1909 case BOOLEAN_TYPE:
1910 /* Cache false or true. */
1911 limit = 2;
1912 if (wi::ltu_p (wi::to_wide (t), 2))
1913 ix = TREE_INT_CST_ELT (t, 0);
1914 break;
1915
1916 case INTEGER_TYPE:
1917 case OFFSET_TYPE:
1918 if (TYPE_UNSIGNED (type))
1919 {
1920 /* Cache 0..N */
1921 limit = param_integer_share_limit;
1922
1923 /* This is a little hokie, but if the prec is smaller than
1924 what is necessary to hold param_integer_share_limit, then the
1925 obvious test will not get the correct answer. */
1926 if (prec < HOST_BITS_PER_WIDE_INT)
1927 {
1928 if (tree_to_uhwi (t)
1929 < (unsigned HOST_WIDE_INT) param_integer_share_limit)
1930 ix = tree_to_uhwi (t);
1931 }
1932 else if (wi::ltu_p (wi::to_wide (t), param_integer_share_limit))
1933 ix = tree_to_uhwi (t);
1934 }
1935 else
1936 {
1937 /* Cache -1..N */
1938 limit = param_integer_share_limit + 1;
1939
1940 if (integer_minus_onep (t))
1941 ix = 0;
1942 else if (!wi::neg_p (wi::to_wide (t)))
1943 {
1944 if (prec < HOST_BITS_PER_WIDE_INT)
1945 {
1946 if (tree_to_shwi (t) < param_integer_share_limit)
1947 ix = tree_to_shwi (t) + 1;
1948 }
1949 else if (wi::ltu_p (wi::to_wide (t), param_integer_share_limit))
1950 ix = tree_to_shwi (t) + 1;
1951 }
1952 }
1953 break;
1954
1955 case ENUMERAL_TYPE:
1956 /* The slot used by TYPE_CACHED_VALUES is used for the enum
1957 members. */
1958 break;
1959
1960 default:
1961 gcc_unreachable ();
1962 }
1963
1964 if (ix >= 0)
1965 {
1966 /* Look for it in the type's vector of small shared ints. */
1967 if (!TYPE_CACHED_VALUES_P (type))
1968 {
1969 TYPE_CACHED_VALUES_P (type) = 1;
1970 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1971 }
1972
1973 if (tree r = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix))
1974 {
1975 gcc_checking_assert (might_duplicate);
1976 t = r;
1977 }
1978 else
1979 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1980 }
1981 else
1982 {
1983 /* Use the cache of larger shared ints. */
1984 tree *slot = int_cst_hash_table->find_slot (t, INSERT);
1985 if (tree r = *slot)
1986 {
1987 /* If there is already an entry for the number verify it's the
1988 same value. */
1989 gcc_checking_assert (wi::to_wide (tree (r)) == wi::to_wide (t));
1990 /* And return the cached value. */
1991 t = r;
1992 }
1993 else
1994 /* Otherwise insert this one into the hash table. */
1995 *slot = t;
1996 }
1997
1998 return t;
1999 }
2000
2001
2002 /* Builds an integer constant in TYPE such that lowest BITS bits are ones
2003 and the rest are zeros. */
2004
2005 tree
2006 build_low_bits_mask (tree type, unsigned bits)
2007 {
2008 gcc_assert (bits <= TYPE_PRECISION (type));
2009
2010 return wide_int_to_tree (type, wi::mask (bits, false,
2011 TYPE_PRECISION (type)));
2012 }
2013
2014 /* Checks that X is integer constant that can be expressed in (unsigned)
2015 HOST_WIDE_INT without loss of precision. */
2016
2017 bool
2018 cst_and_fits_in_hwi (const_tree x)
2019 {
2020 return (TREE_CODE (x) == INTEGER_CST
2021 && (tree_fits_shwi_p (x) || tree_fits_uhwi_p (x)));
2022 }
2023
2024 /* Build a newly constructed VECTOR_CST with the given values of
2025 (VECTOR_CST_)LOG2_NPATTERNS and (VECTOR_CST_)NELTS_PER_PATTERN. */
2026
2027 tree
2028 make_vector (unsigned log2_npatterns,
2029 unsigned int nelts_per_pattern MEM_STAT_DECL)
2030 {
2031 gcc_assert (IN_RANGE (nelts_per_pattern, 1, 3));
2032 tree t;
2033 unsigned npatterns = 1 << log2_npatterns;
2034 unsigned encoded_nelts = npatterns * nelts_per_pattern;
2035 unsigned length = (sizeof (struct tree_vector)
2036 + (encoded_nelts - 1) * sizeof (tree));
2037
2038 record_node_allocation_statistics (VECTOR_CST, length);
2039
2040 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2041
2042 TREE_SET_CODE (t, VECTOR_CST);
2043 TREE_CONSTANT (t) = 1;
2044 VECTOR_CST_LOG2_NPATTERNS (t) = log2_npatterns;
2045 VECTOR_CST_NELTS_PER_PATTERN (t) = nelts_per_pattern;
2046
2047 return t;
2048 }
2049
2050 /* Return a new VECTOR_CST node whose type is TYPE and whose values
2051 are extracted from V, a vector of CONSTRUCTOR_ELT. */
2052
2053 tree
2054 build_vector_from_ctor (tree type, const vec<constructor_elt, va_gc> *v)
2055 {
2056 if (vec_safe_length (v) == 0)
2057 return build_zero_cst (type);
2058
2059 unsigned HOST_WIDE_INT idx, nelts;
2060 tree value;
2061
2062 /* We can't construct a VECTOR_CST for a variable number of elements. */
2063 nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
2064 tree_vector_builder vec (type, nelts, 1);
2065 FOR_EACH_CONSTRUCTOR_VALUE (v, idx, value)
2066 {
2067 if (TREE_CODE (value) == VECTOR_CST)
2068 {
2069 /* If NELTS is constant then this must be too. */
2070 unsigned int sub_nelts = VECTOR_CST_NELTS (value).to_constant ();
2071 for (unsigned i = 0; i < sub_nelts; ++i)
2072 vec.quick_push (VECTOR_CST_ELT (value, i));
2073 }
2074 else
2075 vec.quick_push (value);
2076 }
2077 while (vec.length () < nelts)
2078 vec.quick_push (build_zero_cst (TREE_TYPE (type)));
2079
2080 return vec.build ();
2081 }
2082
2083 /* Build a vector of type VECTYPE where all the elements are SCs. */
2084 tree
2085 build_vector_from_val (tree vectype, tree sc)
2086 {
2087 unsigned HOST_WIDE_INT i, nunits;
2088
2089 if (sc == error_mark_node)
2090 return sc;
2091
2092 /* Verify that the vector type is suitable for SC. Note that there
2093 is some inconsistency in the type-system with respect to restrict
2094 qualifications of pointers. Vector types always have a main-variant
2095 element type and the qualification is applied to the vector-type.
2096 So TREE_TYPE (vector-type) does not return a properly qualified
2097 vector element-type. */
2098 gcc_checking_assert (types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (sc)),
2099 TREE_TYPE (vectype)));
2100
2101 if (CONSTANT_CLASS_P (sc))
2102 {
2103 tree_vector_builder v (vectype, 1, 1);
2104 v.quick_push (sc);
2105 return v.build ();
2106 }
2107 else if (!TYPE_VECTOR_SUBPARTS (vectype).is_constant (&nunits))
2108 return fold_build1 (VEC_DUPLICATE_EXPR, vectype, sc);
2109 else
2110 {
2111 vec<constructor_elt, va_gc> *v;
2112 vec_alloc (v, nunits);
2113 for (i = 0; i < nunits; ++i)
2114 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, sc);
2115 return build_constructor (vectype, v);
2116 }
2117 }
2118
2119 /* If TYPE is not a vector type, just return SC, otherwise return
2120 build_vector_from_val (TYPE, SC). */
2121
2122 tree
2123 build_uniform_cst (tree type, tree sc)
2124 {
2125 if (!VECTOR_TYPE_P (type))
2126 return sc;
2127
2128 return build_vector_from_val (type, sc);
2129 }
2130
2131 /* Build a vector series of type TYPE in which element I has the value
2132 BASE + I * STEP. The result is a constant if BASE and STEP are constant
2133 and a VEC_SERIES_EXPR otherwise. */
2134
2135 tree
2136 build_vec_series (tree type, tree base, tree step)
2137 {
2138 if (integer_zerop (step))
2139 return build_vector_from_val (type, base);
2140 if (TREE_CODE (base) == INTEGER_CST && TREE_CODE (step) == INTEGER_CST)
2141 {
2142 tree_vector_builder builder (type, 1, 3);
2143 tree elt1 = wide_int_to_tree (TREE_TYPE (base),
2144 wi::to_wide (base) + wi::to_wide (step));
2145 tree elt2 = wide_int_to_tree (TREE_TYPE (base),
2146 wi::to_wide (elt1) + wi::to_wide (step));
2147 builder.quick_push (base);
2148 builder.quick_push (elt1);
2149 builder.quick_push (elt2);
2150 return builder.build ();
2151 }
2152 return build2 (VEC_SERIES_EXPR, type, base, step);
2153 }
2154
2155 /* Return a vector with the same number of units and number of bits
2156 as VEC_TYPE, but in which the elements are a linear series of unsigned
2157 integers { BASE, BASE + STEP, BASE + STEP * 2, ... }. */
2158
2159 tree
2160 build_index_vector (tree vec_type, poly_uint64 base, poly_uint64 step)
2161 {
2162 tree index_vec_type = vec_type;
2163 tree index_elt_type = TREE_TYPE (vec_type);
2164 poly_uint64 nunits = TYPE_VECTOR_SUBPARTS (vec_type);
2165 if (!INTEGRAL_TYPE_P (index_elt_type) || !TYPE_UNSIGNED (index_elt_type))
2166 {
2167 index_elt_type = build_nonstandard_integer_type
2168 (GET_MODE_BITSIZE (SCALAR_TYPE_MODE (index_elt_type)), true);
2169 index_vec_type = build_vector_type (index_elt_type, nunits);
2170 }
2171
2172 tree_vector_builder v (index_vec_type, 1, 3);
2173 for (unsigned int i = 0; i < 3; ++i)
2174 v.quick_push (build_int_cstu (index_elt_type, base + i * step));
2175 return v.build ();
2176 }
2177
2178 /* Return a VECTOR_CST of type VEC_TYPE in which the first NUM_A
2179 elements are A and the rest are B. */
2180
2181 tree
2182 build_vector_a_then_b (tree vec_type, unsigned int num_a, tree a, tree b)
2183 {
2184 gcc_assert (known_le (num_a, TYPE_VECTOR_SUBPARTS (vec_type)));
2185 unsigned int count = constant_lower_bound (TYPE_VECTOR_SUBPARTS (vec_type));
2186 /* Optimize the constant case. */
2187 if ((count & 1) == 0 && TYPE_VECTOR_SUBPARTS (vec_type).is_constant ())
2188 count /= 2;
2189 tree_vector_builder builder (vec_type, count, 2);
2190 for (unsigned int i = 0; i < count * 2; ++i)
2191 builder.quick_push (i < num_a ? a : b);
2192 return builder.build ();
2193 }
2194
2195 /* Something has messed with the elements of CONSTRUCTOR C after it was built;
2196 calculate TREE_CONSTANT and TREE_SIDE_EFFECTS. */
2197
2198 void
2199 recompute_constructor_flags (tree c)
2200 {
2201 unsigned int i;
2202 tree val;
2203 bool constant_p = true;
2204 bool side_effects_p = false;
2205 vec<constructor_elt, va_gc> *vals = CONSTRUCTOR_ELTS (c);
2206
2207 FOR_EACH_CONSTRUCTOR_VALUE (vals, i, val)
2208 {
2209 /* Mostly ctors will have elts that don't have side-effects, so
2210 the usual case is to scan all the elements. Hence a single
2211 loop for both const and side effects, rather than one loop
2212 each (with early outs). */
2213 if (!TREE_CONSTANT (val))
2214 constant_p = false;
2215 if (TREE_SIDE_EFFECTS (val))
2216 side_effects_p = true;
2217 }
2218
2219 TREE_SIDE_EFFECTS (c) = side_effects_p;
2220 TREE_CONSTANT (c) = constant_p;
2221 }
2222
2223 /* Make sure that TREE_CONSTANT and TREE_SIDE_EFFECTS are correct for
2224 CONSTRUCTOR C. */
2225
2226 void
2227 verify_constructor_flags (tree c)
2228 {
2229 unsigned int i;
2230 tree val;
2231 bool constant_p = TREE_CONSTANT (c);
2232 bool side_effects_p = TREE_SIDE_EFFECTS (c);
2233 vec<constructor_elt, va_gc> *vals = CONSTRUCTOR_ELTS (c);
2234
2235 FOR_EACH_CONSTRUCTOR_VALUE (vals, i, val)
2236 {
2237 if (constant_p && !TREE_CONSTANT (val))
2238 internal_error ("non-constant element in constant CONSTRUCTOR");
2239 if (!side_effects_p && TREE_SIDE_EFFECTS (val))
2240 internal_error ("side-effects element in no-side-effects CONSTRUCTOR");
2241 }
2242 }
2243
2244 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
2245 are in the vec pointed to by VALS. */
2246 tree
2247 build_constructor (tree type, vec<constructor_elt, va_gc> *vals MEM_STAT_DECL)
2248 {
2249 tree c = make_node (CONSTRUCTOR PASS_MEM_STAT);
2250
2251 TREE_TYPE (c) = type;
2252 CONSTRUCTOR_ELTS (c) = vals;
2253
2254 recompute_constructor_flags (c);
2255
2256 return c;
2257 }
2258
2259 /* Build a CONSTRUCTOR node made of a single initializer, with the specified
2260 INDEX and VALUE. */
2261 tree
2262 build_constructor_single (tree type, tree index, tree value)
2263 {
2264 vec<constructor_elt, va_gc> *v;
2265 constructor_elt elt = {index, value};
2266
2267 vec_alloc (v, 1);
2268 v->quick_push (elt);
2269
2270 return build_constructor (type, v);
2271 }
2272
2273
2274 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
2275 are in a list pointed to by VALS. */
2276 tree
2277 build_constructor_from_list (tree type, tree vals)
2278 {
2279 tree t;
2280 vec<constructor_elt, va_gc> *v = NULL;
2281
2282 if (vals)
2283 {
2284 vec_alloc (v, list_length (vals));
2285 for (t = vals; t; t = TREE_CHAIN (t))
2286 CONSTRUCTOR_APPEND_ELT (v, TREE_PURPOSE (t), TREE_VALUE (t));
2287 }
2288
2289 return build_constructor (type, v);
2290 }
2291
2292 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
2293 are in a vector pointed to by VALS. Note that the TREE_PURPOSE
2294 fields in the constructor remain null. */
2295
2296 tree
2297 build_constructor_from_vec (tree type, const vec<tree, va_gc> *vals)
2298 {
2299 vec<constructor_elt, va_gc> *v = NULL;
2300
2301 for (tree t : vals)
2302 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, t);
2303
2304 return build_constructor (type, v);
2305 }
2306
2307 /* Return a new CONSTRUCTOR node whose type is TYPE. NELTS is the number
2308 of elements, provided as index/value pairs. */
2309
2310 tree
2311 build_constructor_va (tree type, int nelts, ...)
2312 {
2313 vec<constructor_elt, va_gc> *v = NULL;
2314 va_list p;
2315
2316 va_start (p, nelts);
2317 vec_alloc (v, nelts);
2318 while (nelts--)
2319 {
2320 tree index = va_arg (p, tree);
2321 tree value = va_arg (p, tree);
2322 CONSTRUCTOR_APPEND_ELT (v, index, value);
2323 }
2324 va_end (p);
2325 return build_constructor (type, v);
2326 }
2327
2328 /* Return a node of type TYPE for which TREE_CLOBBER_P is true. */
2329
2330 tree
2331 build_clobber (tree type)
2332 {
2333 tree clobber = build_constructor (type, NULL);
2334 TREE_THIS_VOLATILE (clobber) = true;
2335 return clobber;
2336 }
2337
2338 /* Return a new FIXED_CST node whose type is TYPE and value is F. */
2339
2340 tree
2341 build_fixed (tree type, FIXED_VALUE_TYPE f)
2342 {
2343 tree v;
2344 FIXED_VALUE_TYPE *fp;
2345
2346 v = make_node (FIXED_CST);
2347 fp = ggc_alloc<fixed_value> ();
2348 memcpy (fp, &f, sizeof (FIXED_VALUE_TYPE));
2349
2350 TREE_TYPE (v) = type;
2351 TREE_FIXED_CST_PTR (v) = fp;
2352 return v;
2353 }
2354
2355 /* Return a new REAL_CST node whose type is TYPE and value is D. */
2356
2357 tree
2358 build_real (tree type, REAL_VALUE_TYPE d)
2359 {
2360 tree v;
2361 REAL_VALUE_TYPE *dp;
2362 int overflow = 0;
2363
2364 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
2365 Consider doing it via real_convert now. */
2366
2367 v = make_node (REAL_CST);
2368 dp = ggc_alloc<real_value> ();
2369 memcpy (dp, &d, sizeof (REAL_VALUE_TYPE));
2370
2371 TREE_TYPE (v) = type;
2372 TREE_REAL_CST_PTR (v) = dp;
2373 TREE_OVERFLOW (v) = overflow;
2374 return v;
2375 }
2376
2377 /* Like build_real, but first truncate D to the type. */
2378
2379 tree
2380 build_real_truncate (tree type, REAL_VALUE_TYPE d)
2381 {
2382 return build_real (type, real_value_truncate (TYPE_MODE (type), d));
2383 }
2384
2385 /* Return a new REAL_CST node whose type is TYPE
2386 and whose value is the integer value of the INTEGER_CST node I. */
2387
2388 REAL_VALUE_TYPE
2389 real_value_from_int_cst (const_tree type, const_tree i)
2390 {
2391 REAL_VALUE_TYPE d;
2392
2393 /* Clear all bits of the real value type so that we can later do
2394 bitwise comparisons to see if two values are the same. */
2395 memset (&d, 0, sizeof d);
2396
2397 real_from_integer (&d, type ? TYPE_MODE (type) : VOIDmode, wi::to_wide (i),
2398 TYPE_SIGN (TREE_TYPE (i)));
2399 return d;
2400 }
2401
2402 /* Given a tree representing an integer constant I, return a tree
2403 representing the same value as a floating-point constant of type TYPE. */
2404
2405 tree
2406 build_real_from_int_cst (tree type, const_tree i)
2407 {
2408 tree v;
2409 int overflow = TREE_OVERFLOW (i);
2410
2411 v = build_real (type, real_value_from_int_cst (type, i));
2412
2413 TREE_OVERFLOW (v) |= overflow;
2414 return v;
2415 }
2416
2417 /* Return a new REAL_CST node whose type is TYPE
2418 and whose value is the integer value I which has sign SGN. */
2419
2420 tree
2421 build_real_from_wide (tree type, const wide_int_ref &i, signop sgn)
2422 {
2423 REAL_VALUE_TYPE d;
2424
2425 /* Clear all bits of the real value type so that we can later do
2426 bitwise comparisons to see if two values are the same. */
2427 memset (&d, 0, sizeof d);
2428
2429 real_from_integer (&d, TYPE_MODE (type), i, sgn);
2430 return build_real (type, d);
2431 }
2432
2433 /* Return a newly constructed STRING_CST node whose value is the LEN
2434 characters at STR when STR is nonnull, or all zeros otherwise.
2435 Note that for a C string literal, LEN should include the trailing NUL.
2436 The TREE_TYPE is not initialized. */
2437
2438 tree
2439 build_string (unsigned len, const char *str /*= NULL */)
2440 {
2441 /* Do not waste bytes provided by padding of struct tree_string. */
2442 unsigned size = len + offsetof (struct tree_string, str) + 1;
2443
2444 record_node_allocation_statistics (STRING_CST, size);
2445
2446 tree s = (tree) ggc_internal_alloc (size);
2447
2448 memset (s, 0, sizeof (struct tree_typed));
2449 TREE_SET_CODE (s, STRING_CST);
2450 TREE_CONSTANT (s) = 1;
2451 TREE_STRING_LENGTH (s) = len;
2452 if (str)
2453 memcpy (s->string.str, str, len);
2454 else
2455 memset (s->string.str, 0, len);
2456 s->string.str[len] = '\0';
2457
2458 return s;
2459 }
2460
2461 /* Return a newly constructed COMPLEX_CST node whose value is
2462 specified by the real and imaginary parts REAL and IMAG.
2463 Both REAL and IMAG should be constant nodes. TYPE, if specified,
2464 will be the type of the COMPLEX_CST; otherwise a new type will be made. */
2465
2466 tree
2467 build_complex (tree type, tree real, tree imag)
2468 {
2469 gcc_assert (CONSTANT_CLASS_P (real));
2470 gcc_assert (CONSTANT_CLASS_P (imag));
2471
2472 tree t = make_node (COMPLEX_CST);
2473
2474 TREE_REALPART (t) = real;
2475 TREE_IMAGPART (t) = imag;
2476 TREE_TYPE (t) = type ? type : build_complex_type (TREE_TYPE (real));
2477 TREE_OVERFLOW (t) = TREE_OVERFLOW (real) | TREE_OVERFLOW (imag);
2478 return t;
2479 }
2480
2481 /* Build a complex (inf +- 0i), such as for the result of cproj.
2482 TYPE is the complex tree type of the result. If NEG is true, the
2483 imaginary zero is negative. */
2484
2485 tree
2486 build_complex_inf (tree type, bool neg)
2487 {
2488 REAL_VALUE_TYPE rinf, rzero = dconst0;
2489
2490 real_inf (&rinf);
2491 rzero.sign = neg;
2492 return build_complex (type, build_real (TREE_TYPE (type), rinf),
2493 build_real (TREE_TYPE (type), rzero));
2494 }
2495
2496 /* Return the constant 1 in type TYPE. If TYPE has several elements, each
2497 element is set to 1. In particular, this is 1 + i for complex types. */
2498
2499 tree
2500 build_each_one_cst (tree type)
2501 {
2502 if (TREE_CODE (type) == COMPLEX_TYPE)
2503 {
2504 tree scalar = build_one_cst (TREE_TYPE (type));
2505 return build_complex (type, scalar, scalar);
2506 }
2507 else
2508 return build_one_cst (type);
2509 }
2510
2511 /* Return a constant of arithmetic type TYPE which is the
2512 multiplicative identity of the set TYPE. */
2513
2514 tree
2515 build_one_cst (tree type)
2516 {
2517 switch (TREE_CODE (type))
2518 {
2519 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2520 case POINTER_TYPE: case REFERENCE_TYPE:
2521 case OFFSET_TYPE:
2522 return build_int_cst (type, 1);
2523
2524 case REAL_TYPE:
2525 return build_real (type, dconst1);
2526
2527 case FIXED_POINT_TYPE:
2528 /* We can only generate 1 for accum types. */
2529 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2530 return build_fixed (type, FCONST1 (TYPE_MODE (type)));
2531
2532 case VECTOR_TYPE:
2533 {
2534 tree scalar = build_one_cst (TREE_TYPE (type));
2535
2536 return build_vector_from_val (type, scalar);
2537 }
2538
2539 case COMPLEX_TYPE:
2540 return build_complex (type,
2541 build_one_cst (TREE_TYPE (type)),
2542 build_zero_cst (TREE_TYPE (type)));
2543
2544 default:
2545 gcc_unreachable ();
2546 }
2547 }
2548
2549 /* Return an integer of type TYPE containing all 1's in as much precision as
2550 it contains, or a complex or vector whose subparts are such integers. */
2551
2552 tree
2553 build_all_ones_cst (tree type)
2554 {
2555 if (TREE_CODE (type) == COMPLEX_TYPE)
2556 {
2557 tree scalar = build_all_ones_cst (TREE_TYPE (type));
2558 return build_complex (type, scalar, scalar);
2559 }
2560 else
2561 return build_minus_one_cst (type);
2562 }
2563
2564 /* Return a constant of arithmetic type TYPE which is the
2565 opposite of the multiplicative identity of the set TYPE. */
2566
2567 tree
2568 build_minus_one_cst (tree type)
2569 {
2570 switch (TREE_CODE (type))
2571 {
2572 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2573 case POINTER_TYPE: case REFERENCE_TYPE:
2574 case OFFSET_TYPE:
2575 return build_int_cst (type, -1);
2576
2577 case REAL_TYPE:
2578 return build_real (type, dconstm1);
2579
2580 case FIXED_POINT_TYPE:
2581 /* We can only generate 1 for accum types. */
2582 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2583 return build_fixed (type,
2584 fixed_from_double_int (double_int_minus_one,
2585 SCALAR_TYPE_MODE (type)));
2586
2587 case VECTOR_TYPE:
2588 {
2589 tree scalar = build_minus_one_cst (TREE_TYPE (type));
2590
2591 return build_vector_from_val (type, scalar);
2592 }
2593
2594 case COMPLEX_TYPE:
2595 return build_complex (type,
2596 build_minus_one_cst (TREE_TYPE (type)),
2597 build_zero_cst (TREE_TYPE (type)));
2598
2599 default:
2600 gcc_unreachable ();
2601 }
2602 }
2603
2604 /* Build 0 constant of type TYPE. This is used by constructor folding
2605 and thus the constant should be represented in memory by
2606 zero(es). */
2607
2608 tree
2609 build_zero_cst (tree type)
2610 {
2611 switch (TREE_CODE (type))
2612 {
2613 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2614 case POINTER_TYPE: case REFERENCE_TYPE:
2615 case OFFSET_TYPE: case NULLPTR_TYPE:
2616 return build_int_cst (type, 0);
2617
2618 case REAL_TYPE:
2619 return build_real (type, dconst0);
2620
2621 case FIXED_POINT_TYPE:
2622 return build_fixed (type, FCONST0 (TYPE_MODE (type)));
2623
2624 case VECTOR_TYPE:
2625 {
2626 tree scalar = build_zero_cst (TREE_TYPE (type));
2627
2628 return build_vector_from_val (type, scalar);
2629 }
2630
2631 case COMPLEX_TYPE:
2632 {
2633 tree zero = build_zero_cst (TREE_TYPE (type));
2634
2635 return build_complex (type, zero, zero);
2636 }
2637
2638 default:
2639 if (!AGGREGATE_TYPE_P (type))
2640 return fold_convert (type, integer_zero_node);
2641 return build_constructor (type, NULL);
2642 }
2643 }
2644
2645
2646 /* Build a BINFO with LEN language slots. */
2647
2648 tree
2649 make_tree_binfo (unsigned base_binfos MEM_STAT_DECL)
2650 {
2651 tree t;
2652 size_t length = (offsetof (struct tree_binfo, base_binfos)
2653 + vec<tree, va_gc>::embedded_size (base_binfos));
2654
2655 record_node_allocation_statistics (TREE_BINFO, length);
2656
2657 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
2658
2659 memset (t, 0, offsetof (struct tree_binfo, base_binfos));
2660
2661 TREE_SET_CODE (t, TREE_BINFO);
2662
2663 BINFO_BASE_BINFOS (t)->embedded_init (base_binfos);
2664
2665 return t;
2666 }
2667
2668 /* Create a CASE_LABEL_EXPR tree node and return it. */
2669
2670 tree
2671 build_case_label (tree low_value, tree high_value, tree label_decl)
2672 {
2673 tree t = make_node (CASE_LABEL_EXPR);
2674
2675 TREE_TYPE (t) = void_type_node;
2676 SET_EXPR_LOCATION (t, DECL_SOURCE_LOCATION (label_decl));
2677
2678 CASE_LOW (t) = low_value;
2679 CASE_HIGH (t) = high_value;
2680 CASE_LABEL (t) = label_decl;
2681 CASE_CHAIN (t) = NULL_TREE;
2682
2683 return t;
2684 }
2685
2686 /* Build a newly constructed INTEGER_CST node. LEN and EXT_LEN are the
2687 values of TREE_INT_CST_NUNITS and TREE_INT_CST_EXT_NUNITS respectively.
2688 The latter determines the length of the HOST_WIDE_INT vector. */
2689
2690 tree
2691 make_int_cst (int len, int ext_len MEM_STAT_DECL)
2692 {
2693 tree t;
2694 int length = ((ext_len - 1) * sizeof (HOST_WIDE_INT)
2695 + sizeof (struct tree_int_cst));
2696
2697 gcc_assert (len);
2698 record_node_allocation_statistics (INTEGER_CST, length);
2699
2700 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2701
2702 TREE_SET_CODE (t, INTEGER_CST);
2703 TREE_INT_CST_NUNITS (t) = len;
2704 TREE_INT_CST_EXT_NUNITS (t) = ext_len;
2705 /* to_offset can only be applied to trees that are offset_int-sized
2706 or smaller. EXT_LEN is correct if it fits, otherwise the constant
2707 must be exactly the precision of offset_int and so LEN is correct. */
2708 if (ext_len <= OFFSET_INT_ELTS)
2709 TREE_INT_CST_OFFSET_NUNITS (t) = ext_len;
2710 else
2711 TREE_INT_CST_OFFSET_NUNITS (t) = len;
2712
2713 TREE_CONSTANT (t) = 1;
2714
2715 return t;
2716 }
2717
2718 /* Build a newly constructed TREE_VEC node of length LEN. */
2719
2720 tree
2721 make_tree_vec (int len MEM_STAT_DECL)
2722 {
2723 tree t;
2724 size_t length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2725
2726 record_node_allocation_statistics (TREE_VEC, length);
2727
2728 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2729
2730 TREE_SET_CODE (t, TREE_VEC);
2731 TREE_VEC_LENGTH (t) = len;
2732
2733 return t;
2734 }
2735
2736 /* Grow a TREE_VEC node to new length LEN. */
2737
2738 tree
2739 grow_tree_vec (tree v, int len MEM_STAT_DECL)
2740 {
2741 gcc_assert (TREE_CODE (v) == TREE_VEC);
2742
2743 int oldlen = TREE_VEC_LENGTH (v);
2744 gcc_assert (len > oldlen);
2745
2746 size_t oldlength = (oldlen - 1) * sizeof (tree) + sizeof (struct tree_vec);
2747 size_t length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2748
2749 record_node_allocation_statistics (TREE_VEC, length - oldlength);
2750
2751 v = (tree) ggc_realloc (v, length PASS_MEM_STAT);
2752
2753 TREE_VEC_LENGTH (v) = len;
2754
2755 return v;
2756 }
2757 \f
2758 /* Return 1 if EXPR is the constant zero, whether it is integral, float or
2759 fixed, and scalar, complex or vector. */
2760
2761 bool
2762 zerop (const_tree expr)
2763 {
2764 return (integer_zerop (expr)
2765 || real_zerop (expr)
2766 || fixed_zerop (expr));
2767 }
2768
2769 /* Return 1 if EXPR is the integer constant zero or a complex constant
2770 of zero, or a location wrapper for such a constant. */
2771
2772 bool
2773 integer_zerop (const_tree expr)
2774 {
2775 STRIP_ANY_LOCATION_WRAPPER (expr);
2776
2777 switch (TREE_CODE (expr))
2778 {
2779 case INTEGER_CST:
2780 return wi::to_wide (expr) == 0;
2781 case COMPLEX_CST:
2782 return (integer_zerop (TREE_REALPART (expr))
2783 && integer_zerop (TREE_IMAGPART (expr)));
2784 case VECTOR_CST:
2785 return (VECTOR_CST_NPATTERNS (expr) == 1
2786 && VECTOR_CST_DUPLICATE_P (expr)
2787 && integer_zerop (VECTOR_CST_ENCODED_ELT (expr, 0)));
2788 default:
2789 return false;
2790 }
2791 }
2792
2793 /* Return 1 if EXPR is the integer constant one or the corresponding
2794 complex constant, or a location wrapper for such a constant. */
2795
2796 bool
2797 integer_onep (const_tree expr)
2798 {
2799 STRIP_ANY_LOCATION_WRAPPER (expr);
2800
2801 switch (TREE_CODE (expr))
2802 {
2803 case INTEGER_CST:
2804 return wi::eq_p (wi::to_widest (expr), 1);
2805 case COMPLEX_CST:
2806 return (integer_onep (TREE_REALPART (expr))
2807 && integer_zerop (TREE_IMAGPART (expr)));
2808 case VECTOR_CST:
2809 return (VECTOR_CST_NPATTERNS (expr) == 1
2810 && VECTOR_CST_DUPLICATE_P (expr)
2811 && integer_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
2812 default:
2813 return false;
2814 }
2815 }
2816
2817 /* Return 1 if EXPR is the integer constant one. For complex and vector,
2818 return 1 if every piece is the integer constant one.
2819 Also return 1 for location wrappers for such a constant. */
2820
2821 bool
2822 integer_each_onep (const_tree expr)
2823 {
2824 STRIP_ANY_LOCATION_WRAPPER (expr);
2825
2826 if (TREE_CODE (expr) == COMPLEX_CST)
2827 return (integer_onep (TREE_REALPART (expr))
2828 && integer_onep (TREE_IMAGPART (expr)));
2829 else
2830 return integer_onep (expr);
2831 }
2832
2833 /* Return 1 if EXPR is an integer containing all 1's in as much precision as
2834 it contains, or a complex or vector whose subparts are such integers,
2835 or a location wrapper for such a constant. */
2836
2837 bool
2838 integer_all_onesp (const_tree expr)
2839 {
2840 STRIP_ANY_LOCATION_WRAPPER (expr);
2841
2842 if (TREE_CODE (expr) == COMPLEX_CST
2843 && integer_all_onesp (TREE_REALPART (expr))
2844 && integer_all_onesp (TREE_IMAGPART (expr)))
2845 return true;
2846
2847 else if (TREE_CODE (expr) == VECTOR_CST)
2848 return (VECTOR_CST_NPATTERNS (expr) == 1
2849 && VECTOR_CST_DUPLICATE_P (expr)
2850 && integer_all_onesp (VECTOR_CST_ENCODED_ELT (expr, 0)));
2851
2852 else if (TREE_CODE (expr) != INTEGER_CST)
2853 return false;
2854
2855 return (wi::max_value (TYPE_PRECISION (TREE_TYPE (expr)), UNSIGNED)
2856 == wi::to_wide (expr));
2857 }
2858
2859 /* Return 1 if EXPR is the integer constant minus one, or a location wrapper
2860 for such a constant. */
2861
2862 bool
2863 integer_minus_onep (const_tree expr)
2864 {
2865 STRIP_ANY_LOCATION_WRAPPER (expr);
2866
2867 if (TREE_CODE (expr) == COMPLEX_CST)
2868 return (integer_all_onesp (TREE_REALPART (expr))
2869 && integer_zerop (TREE_IMAGPART (expr)));
2870 else
2871 return integer_all_onesp (expr);
2872 }
2873
2874 /* Return 1 if EXPR is an integer constant that is a power of 2 (i.e., has only
2875 one bit on), or a location wrapper for such a constant. */
2876
2877 bool
2878 integer_pow2p (const_tree expr)
2879 {
2880 STRIP_ANY_LOCATION_WRAPPER (expr);
2881
2882 if (TREE_CODE (expr) == COMPLEX_CST
2883 && integer_pow2p (TREE_REALPART (expr))
2884 && integer_zerop (TREE_IMAGPART (expr)))
2885 return true;
2886
2887 if (TREE_CODE (expr) != INTEGER_CST)
2888 return false;
2889
2890 return wi::popcount (wi::to_wide (expr)) == 1;
2891 }
2892
2893 /* Return 1 if EXPR is an integer constant other than zero or a
2894 complex constant other than zero, or a location wrapper for such a
2895 constant. */
2896
2897 bool
2898 integer_nonzerop (const_tree expr)
2899 {
2900 STRIP_ANY_LOCATION_WRAPPER (expr);
2901
2902 return ((TREE_CODE (expr) == INTEGER_CST
2903 && wi::to_wide (expr) != 0)
2904 || (TREE_CODE (expr) == COMPLEX_CST
2905 && (integer_nonzerop (TREE_REALPART (expr))
2906 || integer_nonzerop (TREE_IMAGPART (expr)))));
2907 }
2908
2909 /* Return 1 if EXPR is the integer constant one. For vector,
2910 return 1 if every piece is the integer constant minus one
2911 (representing the value TRUE).
2912 Also return 1 for location wrappers for such a constant. */
2913
2914 bool
2915 integer_truep (const_tree expr)
2916 {
2917 STRIP_ANY_LOCATION_WRAPPER (expr);
2918
2919 if (TREE_CODE (expr) == VECTOR_CST)
2920 return integer_all_onesp (expr);
2921 return integer_onep (expr);
2922 }
2923
2924 /* Return 1 if EXPR is the fixed-point constant zero, or a location wrapper
2925 for such a constant. */
2926
2927 bool
2928 fixed_zerop (const_tree expr)
2929 {
2930 STRIP_ANY_LOCATION_WRAPPER (expr);
2931
2932 return (TREE_CODE (expr) == FIXED_CST
2933 && TREE_FIXED_CST (expr).data.is_zero ());
2934 }
2935
2936 /* Return the power of two represented by a tree node known to be a
2937 power of two. */
2938
2939 int
2940 tree_log2 (const_tree expr)
2941 {
2942 if (TREE_CODE (expr) == COMPLEX_CST)
2943 return tree_log2 (TREE_REALPART (expr));
2944
2945 return wi::exact_log2 (wi::to_wide (expr));
2946 }
2947
2948 /* Similar, but return the largest integer Y such that 2 ** Y is less
2949 than or equal to EXPR. */
2950
2951 int
2952 tree_floor_log2 (const_tree expr)
2953 {
2954 if (TREE_CODE (expr) == COMPLEX_CST)
2955 return tree_log2 (TREE_REALPART (expr));
2956
2957 return wi::floor_log2 (wi::to_wide (expr));
2958 }
2959
2960 /* Return number of known trailing zero bits in EXPR, or, if the value of
2961 EXPR is known to be zero, the precision of it's type. */
2962
2963 unsigned int
2964 tree_ctz (const_tree expr)
2965 {
2966 if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
2967 && !POINTER_TYPE_P (TREE_TYPE (expr)))
2968 return 0;
2969
2970 unsigned int ret1, ret2, prec = TYPE_PRECISION (TREE_TYPE (expr));
2971 switch (TREE_CODE (expr))
2972 {
2973 case INTEGER_CST:
2974 ret1 = wi::ctz (wi::to_wide (expr));
2975 return MIN (ret1, prec);
2976 case SSA_NAME:
2977 ret1 = wi::ctz (get_nonzero_bits (expr));
2978 return MIN (ret1, prec);
2979 case PLUS_EXPR:
2980 case MINUS_EXPR:
2981 case BIT_IOR_EXPR:
2982 case BIT_XOR_EXPR:
2983 case MIN_EXPR:
2984 case MAX_EXPR:
2985 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2986 if (ret1 == 0)
2987 return ret1;
2988 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2989 return MIN (ret1, ret2);
2990 case POINTER_PLUS_EXPR:
2991 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2992 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2993 /* Second operand is sizetype, which could be in theory
2994 wider than pointer's precision. Make sure we never
2995 return more than prec. */
2996 ret2 = MIN (ret2, prec);
2997 return MIN (ret1, ret2);
2998 case BIT_AND_EXPR:
2999 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3000 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
3001 return MAX (ret1, ret2);
3002 case MULT_EXPR:
3003 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3004 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
3005 return MIN (ret1 + ret2, prec);
3006 case LSHIFT_EXPR:
3007 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3008 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
3009 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
3010 {
3011 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
3012 return MIN (ret1 + ret2, prec);
3013 }
3014 return ret1;
3015 case RSHIFT_EXPR:
3016 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
3017 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
3018 {
3019 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3020 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
3021 if (ret1 > ret2)
3022 return ret1 - ret2;
3023 }
3024 return 0;
3025 case TRUNC_DIV_EXPR:
3026 case CEIL_DIV_EXPR:
3027 case FLOOR_DIV_EXPR:
3028 case ROUND_DIV_EXPR:
3029 case EXACT_DIV_EXPR:
3030 if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
3031 && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) == 1)
3032 {
3033 int l = tree_log2 (TREE_OPERAND (expr, 1));
3034 if (l >= 0)
3035 {
3036 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3037 ret2 = l;
3038 if (ret1 > ret2)
3039 return ret1 - ret2;
3040 }
3041 }
3042 return 0;
3043 CASE_CONVERT:
3044 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3045 if (ret1 && ret1 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
3046 ret1 = prec;
3047 return MIN (ret1, prec);
3048 case SAVE_EXPR:
3049 return tree_ctz (TREE_OPERAND (expr, 0));
3050 case COND_EXPR:
3051 ret1 = tree_ctz (TREE_OPERAND (expr, 1));
3052 if (ret1 == 0)
3053 return 0;
3054 ret2 = tree_ctz (TREE_OPERAND (expr, 2));
3055 return MIN (ret1, ret2);
3056 case COMPOUND_EXPR:
3057 return tree_ctz (TREE_OPERAND (expr, 1));
3058 case ADDR_EXPR:
3059 ret1 = get_pointer_alignment (CONST_CAST_TREE (expr));
3060 if (ret1 > BITS_PER_UNIT)
3061 {
3062 ret1 = ctz_hwi (ret1 / BITS_PER_UNIT);
3063 return MIN (ret1, prec);
3064 }
3065 return 0;
3066 default:
3067 return 0;
3068 }
3069 }
3070
3071 /* Return 1 if EXPR is the real constant zero. Trailing zeroes matter for
3072 decimal float constants, so don't return 1 for them.
3073 Also return 1 for location wrappers around such a constant. */
3074
3075 bool
3076 real_zerop (const_tree expr)
3077 {
3078 STRIP_ANY_LOCATION_WRAPPER (expr);
3079
3080 switch (TREE_CODE (expr))
3081 {
3082 case REAL_CST:
3083 return real_equal (&TREE_REAL_CST (expr), &dconst0)
3084 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
3085 case COMPLEX_CST:
3086 return real_zerop (TREE_REALPART (expr))
3087 && real_zerop (TREE_IMAGPART (expr));
3088 case VECTOR_CST:
3089 {
3090 /* Don't simply check for a duplicate because the predicate
3091 accepts both +0.0 and -0.0. */
3092 unsigned count = vector_cst_encoded_nelts (expr);
3093 for (unsigned int i = 0; i < count; ++i)
3094 if (!real_zerop (VECTOR_CST_ENCODED_ELT (expr, i)))
3095 return false;
3096 return true;
3097 }
3098 default:
3099 return false;
3100 }
3101 }
3102
3103 /* Return 1 if EXPR is the real constant one in real or complex form.
3104 Trailing zeroes matter for decimal float constants, so don't return
3105 1 for them.
3106 Also return 1 for location wrappers around such a constant. */
3107
3108 bool
3109 real_onep (const_tree expr)
3110 {
3111 STRIP_ANY_LOCATION_WRAPPER (expr);
3112
3113 switch (TREE_CODE (expr))
3114 {
3115 case REAL_CST:
3116 return real_equal (&TREE_REAL_CST (expr), &dconst1)
3117 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
3118 case COMPLEX_CST:
3119 return real_onep (TREE_REALPART (expr))
3120 && real_zerop (TREE_IMAGPART (expr));
3121 case VECTOR_CST:
3122 return (VECTOR_CST_NPATTERNS (expr) == 1
3123 && VECTOR_CST_DUPLICATE_P (expr)
3124 && real_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
3125 default:
3126 return false;
3127 }
3128 }
3129
3130 /* Return 1 if EXPR is the real constant minus one. Trailing zeroes
3131 matter for decimal float constants, so don't return 1 for them.
3132 Also return 1 for location wrappers around such a constant. */
3133
3134 bool
3135 real_minus_onep (const_tree expr)
3136 {
3137 STRIP_ANY_LOCATION_WRAPPER (expr);
3138
3139 switch (TREE_CODE (expr))
3140 {
3141 case REAL_CST:
3142 return real_equal (&TREE_REAL_CST (expr), &dconstm1)
3143 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
3144 case COMPLEX_CST:
3145 return real_minus_onep (TREE_REALPART (expr))
3146 && real_zerop (TREE_IMAGPART (expr));
3147 case VECTOR_CST:
3148 return (VECTOR_CST_NPATTERNS (expr) == 1
3149 && VECTOR_CST_DUPLICATE_P (expr)
3150 && real_minus_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
3151 default:
3152 return false;
3153 }
3154 }
3155
3156 /* Nonzero if EXP is a constant or a cast of a constant. */
3157
3158 bool
3159 really_constant_p (const_tree exp)
3160 {
3161 /* This is not quite the same as STRIP_NOPS. It does more. */
3162 while (CONVERT_EXPR_P (exp)
3163 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3164 exp = TREE_OPERAND (exp, 0);
3165 return TREE_CONSTANT (exp);
3166 }
3167
3168 /* Return true if T holds a polynomial pointer difference, storing it in
3169 *VALUE if so. A true return means that T's precision is no greater
3170 than 64 bits, which is the largest address space we support, so *VALUE
3171 never loses precision. However, the signedness of the result does
3172 not necessarily match the signedness of T: sometimes an unsigned type
3173 like sizetype is used to encode a value that is actually negative. */
3174
3175 bool
3176 ptrdiff_tree_p (const_tree t, poly_int64_pod *value)
3177 {
3178 if (!t)
3179 return false;
3180 if (TREE_CODE (t) == INTEGER_CST)
3181 {
3182 if (!cst_and_fits_in_hwi (t))
3183 return false;
3184 *value = int_cst_value (t);
3185 return true;
3186 }
3187 if (POLY_INT_CST_P (t))
3188 {
3189 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
3190 if (!cst_and_fits_in_hwi (POLY_INT_CST_COEFF (t, i)))
3191 return false;
3192 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
3193 value->coeffs[i] = int_cst_value (POLY_INT_CST_COEFF (t, i));
3194 return true;
3195 }
3196 return false;
3197 }
3198
3199 poly_int64
3200 tree_to_poly_int64 (const_tree t)
3201 {
3202 gcc_assert (tree_fits_poly_int64_p (t));
3203 if (POLY_INT_CST_P (t))
3204 return poly_int_cst_value (t).force_shwi ();
3205 return TREE_INT_CST_LOW (t);
3206 }
3207
3208 poly_uint64
3209 tree_to_poly_uint64 (const_tree t)
3210 {
3211 gcc_assert (tree_fits_poly_uint64_p (t));
3212 if (POLY_INT_CST_P (t))
3213 return poly_int_cst_value (t).force_uhwi ();
3214 return TREE_INT_CST_LOW (t);
3215 }
3216 \f
3217 /* Return first list element whose TREE_VALUE is ELEM.
3218 Return 0 if ELEM is not in LIST. */
3219
3220 tree
3221 value_member (tree elem, tree list)
3222 {
3223 while (list)
3224 {
3225 if (elem == TREE_VALUE (list))
3226 return list;
3227 list = TREE_CHAIN (list);
3228 }
3229 return NULL_TREE;
3230 }
3231
3232 /* Return first list element whose TREE_PURPOSE is ELEM.
3233 Return 0 if ELEM is not in LIST. */
3234
3235 tree
3236 purpose_member (const_tree elem, tree list)
3237 {
3238 while (list)
3239 {
3240 if (elem == TREE_PURPOSE (list))
3241 return list;
3242 list = TREE_CHAIN (list);
3243 }
3244 return NULL_TREE;
3245 }
3246
3247 /* Return true if ELEM is in V. */
3248
3249 bool
3250 vec_member (const_tree elem, vec<tree, va_gc> *v)
3251 {
3252 unsigned ix;
3253 tree t;
3254 FOR_EACH_VEC_SAFE_ELT (v, ix, t)
3255 if (elem == t)
3256 return true;
3257 return false;
3258 }
3259
3260 /* Returns element number IDX (zero-origin) of chain CHAIN, or
3261 NULL_TREE. */
3262
3263 tree
3264 chain_index (int idx, tree chain)
3265 {
3266 for (; chain && idx > 0; --idx)
3267 chain = TREE_CHAIN (chain);
3268 return chain;
3269 }
3270
3271 /* Return nonzero if ELEM is part of the chain CHAIN. */
3272
3273 bool
3274 chain_member (const_tree elem, const_tree chain)
3275 {
3276 while (chain)
3277 {
3278 if (elem == chain)
3279 return true;
3280 chain = DECL_CHAIN (chain);
3281 }
3282
3283 return false;
3284 }
3285
3286 /* Return the length of a chain of nodes chained through TREE_CHAIN.
3287 We expect a null pointer to mark the end of the chain.
3288 This is the Lisp primitive `length'. */
3289
3290 int
3291 list_length (const_tree t)
3292 {
3293 const_tree p = t;
3294 #ifdef ENABLE_TREE_CHECKING
3295 const_tree q = t;
3296 #endif
3297 int len = 0;
3298
3299 while (p)
3300 {
3301 p = TREE_CHAIN (p);
3302 #ifdef ENABLE_TREE_CHECKING
3303 if (len % 2)
3304 q = TREE_CHAIN (q);
3305 gcc_assert (p != q);
3306 #endif
3307 len++;
3308 }
3309
3310 return len;
3311 }
3312
3313 /* Returns the first FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
3314 UNION_TYPE TYPE, or NULL_TREE if none. */
3315
3316 tree
3317 first_field (const_tree type)
3318 {
3319 tree t = TYPE_FIELDS (type);
3320 while (t && TREE_CODE (t) != FIELD_DECL)
3321 t = TREE_CHAIN (t);
3322 return t;
3323 }
3324
3325 /* Returns the last FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
3326 UNION_TYPE TYPE, or NULL_TREE if none. */
3327
3328 tree
3329 last_field (const_tree type)
3330 {
3331 tree last = NULL_TREE;
3332
3333 for (tree fld = TYPE_FIELDS (type); fld; fld = TREE_CHAIN (fld))
3334 {
3335 if (TREE_CODE (fld) != FIELD_DECL)
3336 continue;
3337
3338 last = fld;
3339 }
3340
3341 return last;
3342 }
3343
3344 /* Concatenate two chains of nodes (chained through TREE_CHAIN)
3345 by modifying the last node in chain 1 to point to chain 2.
3346 This is the Lisp primitive `nconc'. */
3347
3348 tree
3349 chainon (tree op1, tree op2)
3350 {
3351 tree t1;
3352
3353 if (!op1)
3354 return op2;
3355 if (!op2)
3356 return op1;
3357
3358 for (t1 = op1; TREE_CHAIN (t1); t1 = TREE_CHAIN (t1))
3359 continue;
3360 TREE_CHAIN (t1) = op2;
3361
3362 #ifdef ENABLE_TREE_CHECKING
3363 {
3364 tree t2;
3365 for (t2 = op2; t2; t2 = TREE_CHAIN (t2))
3366 gcc_assert (t2 != t1);
3367 }
3368 #endif
3369
3370 return op1;
3371 }
3372
3373 /* Return the last node in a chain of nodes (chained through TREE_CHAIN). */
3374
3375 tree
3376 tree_last (tree chain)
3377 {
3378 tree next;
3379 if (chain)
3380 while ((next = TREE_CHAIN (chain)))
3381 chain = next;
3382 return chain;
3383 }
3384
3385 /* Reverse the order of elements in the chain T,
3386 and return the new head of the chain (old last element). */
3387
3388 tree
3389 nreverse (tree t)
3390 {
3391 tree prev = 0, decl, next;
3392 for (decl = t; decl; decl = next)
3393 {
3394 /* We shouldn't be using this function to reverse BLOCK chains; we
3395 have blocks_nreverse for that. */
3396 gcc_checking_assert (TREE_CODE (decl) != BLOCK);
3397 next = TREE_CHAIN (decl);
3398 TREE_CHAIN (decl) = prev;
3399 prev = decl;
3400 }
3401 return prev;
3402 }
3403 \f
3404 /* Return a newly created TREE_LIST node whose
3405 purpose and value fields are PARM and VALUE. */
3406
3407 tree
3408 build_tree_list (tree parm, tree value MEM_STAT_DECL)
3409 {
3410 tree t = make_node (TREE_LIST PASS_MEM_STAT);
3411 TREE_PURPOSE (t) = parm;
3412 TREE_VALUE (t) = value;
3413 return t;
3414 }
3415
3416 /* Build a chain of TREE_LIST nodes from a vector. */
3417
3418 tree
3419 build_tree_list_vec (const vec<tree, va_gc> *vec MEM_STAT_DECL)
3420 {
3421 tree ret = NULL_TREE;
3422 tree *pp = &ret;
3423 unsigned int i;
3424 tree t;
3425 FOR_EACH_VEC_SAFE_ELT (vec, i, t)
3426 {
3427 *pp = build_tree_list (NULL, t PASS_MEM_STAT);
3428 pp = &TREE_CHAIN (*pp);
3429 }
3430 return ret;
3431 }
3432
3433 /* Return a newly created TREE_LIST node whose
3434 purpose and value fields are PURPOSE and VALUE
3435 and whose TREE_CHAIN is CHAIN. */
3436
3437 tree
3438 tree_cons (tree purpose, tree value, tree chain MEM_STAT_DECL)
3439 {
3440 tree node;
3441
3442 node = ggc_alloc_tree_node_stat (sizeof (struct tree_list) PASS_MEM_STAT);
3443 memset (node, 0, sizeof (struct tree_common));
3444
3445 record_node_allocation_statistics (TREE_LIST, sizeof (struct tree_list));
3446
3447 TREE_SET_CODE (node, TREE_LIST);
3448 TREE_CHAIN (node) = chain;
3449 TREE_PURPOSE (node) = purpose;
3450 TREE_VALUE (node) = value;
3451 return node;
3452 }
3453
3454 /* Return the values of the elements of a CONSTRUCTOR as a vector of
3455 trees. */
3456
3457 vec<tree, va_gc> *
3458 ctor_to_vec (tree ctor)
3459 {
3460 vec<tree, va_gc> *vec;
3461 vec_alloc (vec, CONSTRUCTOR_NELTS (ctor));
3462 unsigned int ix;
3463 tree val;
3464
3465 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor), ix, val)
3466 vec->quick_push (val);
3467
3468 return vec;
3469 }
3470 \f
3471 /* Return the size nominally occupied by an object of type TYPE
3472 when it resides in memory. The value is measured in units of bytes,
3473 and its data type is that normally used for type sizes
3474 (which is the first type created by make_signed_type or
3475 make_unsigned_type). */
3476
3477 tree
3478 size_in_bytes_loc (location_t loc, const_tree type)
3479 {
3480 tree t;
3481
3482 if (type == error_mark_node)
3483 return integer_zero_node;
3484
3485 type = TYPE_MAIN_VARIANT (type);
3486 t = TYPE_SIZE_UNIT (type);
3487
3488 if (t == 0)
3489 {
3490 lang_hooks.types.incomplete_type_error (loc, NULL_TREE, type);
3491 return size_zero_node;
3492 }
3493
3494 return t;
3495 }
3496
3497 /* Return the size of TYPE (in bytes) as a wide integer
3498 or return -1 if the size can vary or is larger than an integer. */
3499
3500 HOST_WIDE_INT
3501 int_size_in_bytes (const_tree type)
3502 {
3503 tree t;
3504
3505 if (type == error_mark_node)
3506 return 0;
3507
3508 type = TYPE_MAIN_VARIANT (type);
3509 t = TYPE_SIZE_UNIT (type);
3510
3511 if (t && tree_fits_uhwi_p (t))
3512 return TREE_INT_CST_LOW (t);
3513 else
3514 return -1;
3515 }
3516
3517 /* Return the maximum size of TYPE (in bytes) as a wide integer
3518 or return -1 if the size can vary or is larger than an integer. */
3519
3520 HOST_WIDE_INT
3521 max_int_size_in_bytes (const_tree type)
3522 {
3523 HOST_WIDE_INT size = -1;
3524 tree size_tree;
3525
3526 /* If this is an array type, check for a possible MAX_SIZE attached. */
3527
3528 if (TREE_CODE (type) == ARRAY_TYPE)
3529 {
3530 size_tree = TYPE_ARRAY_MAX_SIZE (type);
3531
3532 if (size_tree && tree_fits_uhwi_p (size_tree))
3533 size = tree_to_uhwi (size_tree);
3534 }
3535
3536 /* If we still haven't been able to get a size, see if the language
3537 can compute a maximum size. */
3538
3539 if (size == -1)
3540 {
3541 size_tree = lang_hooks.types.max_size (type);
3542
3543 if (size_tree && tree_fits_uhwi_p (size_tree))
3544 size = tree_to_uhwi (size_tree);
3545 }
3546
3547 return size;
3548 }
3549 \f
3550 /* Return the bit position of FIELD, in bits from the start of the record.
3551 This is a tree of type bitsizetype. */
3552
3553 tree
3554 bit_position (const_tree field)
3555 {
3556 return bit_from_pos (DECL_FIELD_OFFSET (field),
3557 DECL_FIELD_BIT_OFFSET (field));
3558 }
3559 \f
3560 /* Return the byte position of FIELD, in bytes from the start of the record.
3561 This is a tree of type sizetype. */
3562
3563 tree
3564 byte_position (const_tree field)
3565 {
3566 return byte_from_pos (DECL_FIELD_OFFSET (field),
3567 DECL_FIELD_BIT_OFFSET (field));
3568 }
3569
3570 /* Likewise, but return as an integer. It must be representable in
3571 that way (since it could be a signed value, we don't have the
3572 option of returning -1 like int_size_in_byte can. */
3573
3574 HOST_WIDE_INT
3575 int_byte_position (const_tree field)
3576 {
3577 return tree_to_shwi (byte_position (field));
3578 }
3579 \f
3580 /* Return, as a tree node, the number of elements for TYPE (which is an
3581 ARRAY_TYPE) minus one. This counts only elements of the top array. */
3582
3583 tree
3584 array_type_nelts (const_tree type)
3585 {
3586 tree index_type, min, max;
3587
3588 /* If they did it with unspecified bounds, then we should have already
3589 given an error about it before we got here. */
3590 if (! TYPE_DOMAIN (type))
3591 return error_mark_node;
3592
3593 index_type = TYPE_DOMAIN (type);
3594 min = TYPE_MIN_VALUE (index_type);
3595 max = TYPE_MAX_VALUE (index_type);
3596
3597 /* TYPE_MAX_VALUE may not be set if the array has unknown length. */
3598 if (!max)
3599 {
3600 /* zero sized arrays are represented from C FE as complete types with
3601 NULL TYPE_MAX_VALUE and zero TYPE_SIZE, while C++ FE represents
3602 them as min 0, max -1. */
3603 if (COMPLETE_TYPE_P (type)
3604 && integer_zerop (TYPE_SIZE (type))
3605 && integer_zerop (min))
3606 return build_int_cst (TREE_TYPE (min), -1);
3607
3608 return error_mark_node;
3609 }
3610
3611 return (integer_zerop (min)
3612 ? max
3613 : fold_build2 (MINUS_EXPR, TREE_TYPE (max), max, min));
3614 }
3615 \f
3616 /* If arg is static -- a reference to an object in static storage -- then
3617 return the object. This is not the same as the C meaning of `static'.
3618 If arg isn't static, return NULL. */
3619
3620 tree
3621 staticp (tree arg)
3622 {
3623 switch (TREE_CODE (arg))
3624 {
3625 case FUNCTION_DECL:
3626 /* Nested functions are static, even though taking their address will
3627 involve a trampoline as we unnest the nested function and create
3628 the trampoline on the tree level. */
3629 return arg;
3630
3631 case VAR_DECL:
3632 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3633 && ! DECL_THREAD_LOCAL_P (arg)
3634 && ! DECL_DLLIMPORT_P (arg)
3635 ? arg : NULL);
3636
3637 case CONST_DECL:
3638 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3639 ? arg : NULL);
3640
3641 case CONSTRUCTOR:
3642 return TREE_STATIC (arg) ? arg : NULL;
3643
3644 case LABEL_DECL:
3645 case STRING_CST:
3646 return arg;
3647
3648 case COMPONENT_REF:
3649 /* If the thing being referenced is not a field, then it is
3650 something language specific. */
3651 gcc_assert (TREE_CODE (TREE_OPERAND (arg, 1)) == FIELD_DECL);
3652
3653 /* If we are referencing a bitfield, we can't evaluate an
3654 ADDR_EXPR at compile time and so it isn't a constant. */
3655 if (DECL_BIT_FIELD (TREE_OPERAND (arg, 1)))
3656 return NULL;
3657
3658 return staticp (TREE_OPERAND (arg, 0));
3659
3660 case BIT_FIELD_REF:
3661 return NULL;
3662
3663 case INDIRECT_REF:
3664 return TREE_CONSTANT (TREE_OPERAND (arg, 0)) ? arg : NULL;
3665
3666 case ARRAY_REF:
3667 case ARRAY_RANGE_REF:
3668 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (arg))) == INTEGER_CST
3669 && TREE_CODE (TREE_OPERAND (arg, 1)) == INTEGER_CST)
3670 return staticp (TREE_OPERAND (arg, 0));
3671 else
3672 return NULL;
3673
3674 case COMPOUND_LITERAL_EXPR:
3675 return TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (arg)) ? arg : NULL;
3676
3677 default:
3678 return NULL;
3679 }
3680 }
3681
3682 \f
3683
3684
3685 /* Return whether OP is a DECL whose address is function-invariant. */
3686
3687 bool
3688 decl_address_invariant_p (const_tree op)
3689 {
3690 /* The conditions below are slightly less strict than the one in
3691 staticp. */
3692
3693 switch (TREE_CODE (op))
3694 {
3695 case PARM_DECL:
3696 case RESULT_DECL:
3697 case LABEL_DECL:
3698 case FUNCTION_DECL:
3699 return true;
3700
3701 case VAR_DECL:
3702 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3703 || DECL_THREAD_LOCAL_P (op)
3704 || DECL_CONTEXT (op) == current_function_decl
3705 || decl_function_context (op) == current_function_decl)
3706 return true;
3707 break;
3708
3709 case CONST_DECL:
3710 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3711 || decl_function_context (op) == current_function_decl)
3712 return true;
3713 break;
3714
3715 default:
3716 break;
3717 }
3718
3719 return false;
3720 }
3721
3722 /* Return whether OP is a DECL whose address is interprocedural-invariant. */
3723
3724 bool
3725 decl_address_ip_invariant_p (const_tree op)
3726 {
3727 /* The conditions below are slightly less strict than the one in
3728 staticp. */
3729
3730 switch (TREE_CODE (op))
3731 {
3732 case LABEL_DECL:
3733 case FUNCTION_DECL:
3734 case STRING_CST:
3735 return true;
3736
3737 case VAR_DECL:
3738 if (((TREE_STATIC (op) || DECL_EXTERNAL (op))
3739 && !DECL_DLLIMPORT_P (op))
3740 || DECL_THREAD_LOCAL_P (op))
3741 return true;
3742 break;
3743
3744 case CONST_DECL:
3745 if ((TREE_STATIC (op) || DECL_EXTERNAL (op)))
3746 return true;
3747 break;
3748
3749 default:
3750 break;
3751 }
3752
3753 return false;
3754 }
3755
3756
3757 /* Return true if T is function-invariant (internal function, does
3758 not handle arithmetic; that's handled in skip_simple_arithmetic and
3759 tree_invariant_p). */
3760
3761 static bool
3762 tree_invariant_p_1 (tree t)
3763 {
3764 tree op;
3765
3766 if (TREE_CONSTANT (t)
3767 || (TREE_READONLY (t) && !TREE_SIDE_EFFECTS (t)))
3768 return true;
3769
3770 switch (TREE_CODE (t))
3771 {
3772 case SAVE_EXPR:
3773 return true;
3774
3775 case ADDR_EXPR:
3776 op = TREE_OPERAND (t, 0);
3777 while (handled_component_p (op))
3778 {
3779 switch (TREE_CODE (op))
3780 {
3781 case ARRAY_REF:
3782 case ARRAY_RANGE_REF:
3783 if (!tree_invariant_p (TREE_OPERAND (op, 1))
3784 || TREE_OPERAND (op, 2) != NULL_TREE
3785 || TREE_OPERAND (op, 3) != NULL_TREE)
3786 return false;
3787 break;
3788
3789 case COMPONENT_REF:
3790 if (TREE_OPERAND (op, 2) != NULL_TREE)
3791 return false;
3792 break;
3793
3794 default:;
3795 }
3796 op = TREE_OPERAND (op, 0);
3797 }
3798
3799 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
3800
3801 default:
3802 break;
3803 }
3804
3805 return false;
3806 }
3807
3808 /* Return true if T is function-invariant. */
3809
3810 bool
3811 tree_invariant_p (tree t)
3812 {
3813 tree inner = skip_simple_arithmetic (t);
3814 return tree_invariant_p_1 (inner);
3815 }
3816
3817 /* Wrap a SAVE_EXPR around EXPR, if appropriate.
3818 Do this to any expression which may be used in more than one place,
3819 but must be evaluated only once.
3820
3821 Normally, expand_expr would reevaluate the expression each time.
3822 Calling save_expr produces something that is evaluated and recorded
3823 the first time expand_expr is called on it. Subsequent calls to
3824 expand_expr just reuse the recorded value.
3825
3826 The call to expand_expr that generates code that actually computes
3827 the value is the first call *at compile time*. Subsequent calls
3828 *at compile time* generate code to use the saved value.
3829 This produces correct result provided that *at run time* control
3830 always flows through the insns made by the first expand_expr
3831 before reaching the other places where the save_expr was evaluated.
3832 You, the caller of save_expr, must make sure this is so.
3833
3834 Constants, and certain read-only nodes, are returned with no
3835 SAVE_EXPR because that is safe. Expressions containing placeholders
3836 are not touched; see tree.def for an explanation of what these
3837 are used for. */
3838
3839 tree
3840 save_expr (tree expr)
3841 {
3842 tree inner;
3843
3844 /* If the tree evaluates to a constant, then we don't want to hide that
3845 fact (i.e. this allows further folding, and direct checks for constants).
3846 However, a read-only object that has side effects cannot be bypassed.
3847 Since it is no problem to reevaluate literals, we just return the
3848 literal node. */
3849 inner = skip_simple_arithmetic (expr);
3850 if (TREE_CODE (inner) == ERROR_MARK)
3851 return inner;
3852
3853 if (tree_invariant_p_1 (inner))
3854 return expr;
3855
3856 /* If INNER contains a PLACEHOLDER_EXPR, we must evaluate it each time, since
3857 it means that the size or offset of some field of an object depends on
3858 the value within another field.
3859
3860 Note that it must not be the case that EXPR contains both a PLACEHOLDER_EXPR
3861 and some variable since it would then need to be both evaluated once and
3862 evaluated more than once. Front-ends must assure this case cannot
3863 happen by surrounding any such subexpressions in their own SAVE_EXPR
3864 and forcing evaluation at the proper time. */
3865 if (contains_placeholder_p (inner))
3866 return expr;
3867
3868 expr = build1_loc (EXPR_LOCATION (expr), SAVE_EXPR, TREE_TYPE (expr), expr);
3869
3870 /* This expression might be placed ahead of a jump to ensure that the
3871 value was computed on both sides of the jump. So make sure it isn't
3872 eliminated as dead. */
3873 TREE_SIDE_EFFECTS (expr) = 1;
3874 return expr;
3875 }
3876
3877 /* Look inside EXPR into any simple arithmetic operations. Return the
3878 outermost non-arithmetic or non-invariant node. */
3879
3880 tree
3881 skip_simple_arithmetic (tree expr)
3882 {
3883 /* We don't care about whether this can be used as an lvalue in this
3884 context. */
3885 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3886 expr = TREE_OPERAND (expr, 0);
3887
3888 /* If we have simple operations applied to a SAVE_EXPR or to a SAVE_EXPR and
3889 a constant, it will be more efficient to not make another SAVE_EXPR since
3890 it will allow better simplification and GCSE will be able to merge the
3891 computations if they actually occur. */
3892 while (true)
3893 {
3894 if (UNARY_CLASS_P (expr))
3895 expr = TREE_OPERAND (expr, 0);
3896 else if (BINARY_CLASS_P (expr))
3897 {
3898 if (tree_invariant_p (TREE_OPERAND (expr, 1)))
3899 expr = TREE_OPERAND (expr, 0);
3900 else if (tree_invariant_p (TREE_OPERAND (expr, 0)))
3901 expr = TREE_OPERAND (expr, 1);
3902 else
3903 break;
3904 }
3905 else
3906 break;
3907 }
3908
3909 return expr;
3910 }
3911
3912 /* Look inside EXPR into simple arithmetic operations involving constants.
3913 Return the outermost non-arithmetic or non-constant node. */
3914
3915 tree
3916 skip_simple_constant_arithmetic (tree expr)
3917 {
3918 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3919 expr = TREE_OPERAND (expr, 0);
3920
3921 while (true)
3922 {
3923 if (UNARY_CLASS_P (expr))
3924 expr = TREE_OPERAND (expr, 0);
3925 else if (BINARY_CLASS_P (expr))
3926 {
3927 if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
3928 expr = TREE_OPERAND (expr, 0);
3929 else if (TREE_CONSTANT (TREE_OPERAND (expr, 0)))
3930 expr = TREE_OPERAND (expr, 1);
3931 else
3932 break;
3933 }
3934 else
3935 break;
3936 }
3937
3938 return expr;
3939 }
3940
3941 /* Return which tree structure is used by T. */
3942
3943 enum tree_node_structure_enum
3944 tree_node_structure (const_tree t)
3945 {
3946 const enum tree_code code = TREE_CODE (t);
3947 return tree_node_structure_for_code (code);
3948 }
3949
3950 /* Set various status flags when building a CALL_EXPR object T. */
3951
3952 static void
3953 process_call_operands (tree t)
3954 {
3955 bool side_effects = TREE_SIDE_EFFECTS (t);
3956 bool read_only = false;
3957 int i = call_expr_flags (t);
3958
3959 /* Calls have side-effects, except those to const or pure functions. */
3960 if ((i & ECF_LOOPING_CONST_OR_PURE) || !(i & (ECF_CONST | ECF_PURE)))
3961 side_effects = true;
3962 /* Propagate TREE_READONLY of arguments for const functions. */
3963 if (i & ECF_CONST)
3964 read_only = true;
3965
3966 if (!side_effects || read_only)
3967 for (i = 1; i < TREE_OPERAND_LENGTH (t); i++)
3968 {
3969 tree op = TREE_OPERAND (t, i);
3970 if (op && TREE_SIDE_EFFECTS (op))
3971 side_effects = true;
3972 if (op && !TREE_READONLY (op) && !CONSTANT_CLASS_P (op))
3973 read_only = false;
3974 }
3975
3976 TREE_SIDE_EFFECTS (t) = side_effects;
3977 TREE_READONLY (t) = read_only;
3978 }
3979 \f
3980 /* Return true if EXP contains a PLACEHOLDER_EXPR, i.e. if it represents a
3981 size or offset that depends on a field within a record. */
3982
3983 bool
3984 contains_placeholder_p (const_tree exp)
3985 {
3986 enum tree_code code;
3987
3988 if (!exp)
3989 return 0;
3990
3991 code = TREE_CODE (exp);
3992 if (code == PLACEHOLDER_EXPR)
3993 return 1;
3994
3995 switch (TREE_CODE_CLASS (code))
3996 {
3997 case tcc_reference:
3998 /* Don't look at any PLACEHOLDER_EXPRs that might be in index or bit
3999 position computations since they will be converted into a
4000 WITH_RECORD_EXPR involving the reference, which will assume
4001 here will be valid. */
4002 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
4003
4004 case tcc_exceptional:
4005 if (code == TREE_LIST)
4006 return (CONTAINS_PLACEHOLDER_P (TREE_VALUE (exp))
4007 || CONTAINS_PLACEHOLDER_P (TREE_CHAIN (exp)));
4008 break;
4009
4010 case tcc_unary:
4011 case tcc_binary:
4012 case tcc_comparison:
4013 case tcc_expression:
4014 switch (code)
4015 {
4016 case COMPOUND_EXPR:
4017 /* Ignoring the first operand isn't quite right, but works best. */
4018 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1));
4019
4020 case COND_EXPR:
4021 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
4022 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1))
4023 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 2)));
4024
4025 case SAVE_EXPR:
4026 /* The save_expr function never wraps anything containing
4027 a PLACEHOLDER_EXPR. */
4028 return 0;
4029
4030 default:
4031 break;
4032 }
4033
4034 switch (TREE_CODE_LENGTH (code))
4035 {
4036 case 1:
4037 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
4038 case 2:
4039 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
4040 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1)));
4041 default:
4042 return 0;
4043 }
4044
4045 case tcc_vl_exp:
4046 switch (code)
4047 {
4048 case CALL_EXPR:
4049 {
4050 const_tree arg;
4051 const_call_expr_arg_iterator iter;
4052 FOR_EACH_CONST_CALL_EXPR_ARG (arg, iter, exp)
4053 if (CONTAINS_PLACEHOLDER_P (arg))
4054 return 1;
4055 return 0;
4056 }
4057 default:
4058 return 0;
4059 }
4060
4061 default:
4062 return 0;
4063 }
4064 return 0;
4065 }
4066
4067 /* Return true if any part of the structure of TYPE involves a PLACEHOLDER_EXPR
4068 directly. This includes size, bounds, qualifiers (for QUAL_UNION_TYPE) and
4069 field positions. */
4070
4071 static bool
4072 type_contains_placeholder_1 (const_tree type)
4073 {
4074 /* If the size contains a placeholder or the parent type (component type in
4075 the case of arrays) type involves a placeholder, this type does. */
4076 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type))
4077 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (type))
4078 || (!POINTER_TYPE_P (type)
4079 && TREE_TYPE (type)
4080 && type_contains_placeholder_p (TREE_TYPE (type))))
4081 return true;
4082
4083 /* Now do type-specific checks. Note that the last part of the check above
4084 greatly limits what we have to do below. */
4085 switch (TREE_CODE (type))
4086 {
4087 case VOID_TYPE:
4088 case OPAQUE_TYPE:
4089 case COMPLEX_TYPE:
4090 case ENUMERAL_TYPE:
4091 case BOOLEAN_TYPE:
4092 case POINTER_TYPE:
4093 case OFFSET_TYPE:
4094 case REFERENCE_TYPE:
4095 case METHOD_TYPE:
4096 case FUNCTION_TYPE:
4097 case VECTOR_TYPE:
4098 case NULLPTR_TYPE:
4099 return false;
4100
4101 case INTEGER_TYPE:
4102 case REAL_TYPE:
4103 case FIXED_POINT_TYPE:
4104 /* Here we just check the bounds. */
4105 return (CONTAINS_PLACEHOLDER_P (TYPE_MIN_VALUE (type))
4106 || CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type)));
4107
4108 case ARRAY_TYPE:
4109 /* We have already checked the component type above, so just check
4110 the domain type. Flexible array members have a null domain. */
4111 return TYPE_DOMAIN (type) ?
4112 type_contains_placeholder_p (TYPE_DOMAIN (type)) : false;
4113
4114 case RECORD_TYPE:
4115 case UNION_TYPE:
4116 case QUAL_UNION_TYPE:
4117 {
4118 tree field;
4119
4120 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
4121 if (TREE_CODE (field) == FIELD_DECL
4122 && (CONTAINS_PLACEHOLDER_P (DECL_FIELD_OFFSET (field))
4123 || (TREE_CODE (type) == QUAL_UNION_TYPE
4124 && CONTAINS_PLACEHOLDER_P (DECL_QUALIFIER (field)))
4125 || type_contains_placeholder_p (TREE_TYPE (field))))
4126 return true;
4127
4128 return false;
4129 }
4130
4131 default:
4132 gcc_unreachable ();
4133 }
4134 }
4135
4136 /* Wrapper around above function used to cache its result. */
4137
4138 bool
4139 type_contains_placeholder_p (tree type)
4140 {
4141 bool result;
4142
4143 /* If the contains_placeholder_bits field has been initialized,
4144 then we know the answer. */
4145 if (TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) > 0)
4146 return TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) - 1;
4147
4148 /* Indicate that we've seen this type node, and the answer is false.
4149 This is what we want to return if we run into recursion via fields. */
4150 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = 1;
4151
4152 /* Compute the real value. */
4153 result = type_contains_placeholder_1 (type);
4154
4155 /* Store the real value. */
4156 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = result + 1;
4157
4158 return result;
4159 }
4160 \f
4161 /* Push tree EXP onto vector QUEUE if it is not already present. */
4162
4163 static void
4164 push_without_duplicates (tree exp, vec<tree> *queue)
4165 {
4166 unsigned int i;
4167 tree iter;
4168
4169 FOR_EACH_VEC_ELT (*queue, i, iter)
4170 if (simple_cst_equal (iter, exp) == 1)
4171 break;
4172
4173 if (!iter)
4174 queue->safe_push (exp);
4175 }
4176
4177 /* Given a tree EXP, find all occurrences of references to fields
4178 in a PLACEHOLDER_EXPR and place them in vector REFS without
4179 duplicates. Also record VAR_DECLs and CONST_DECLs. Note that
4180 we assume here that EXP contains only arithmetic expressions
4181 or CALL_EXPRs with PLACEHOLDER_EXPRs occurring only in their
4182 argument list. */
4183
4184 void
4185 find_placeholder_in_expr (tree exp, vec<tree> *refs)
4186 {
4187 enum tree_code code = TREE_CODE (exp);
4188 tree inner;
4189 int i;
4190
4191 /* We handle TREE_LIST and COMPONENT_REF separately. */
4192 if (code == TREE_LIST)
4193 {
4194 FIND_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), refs);
4195 FIND_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), refs);
4196 }
4197 else if (code == COMPONENT_REF)
4198 {
4199 for (inner = TREE_OPERAND (exp, 0);
4200 REFERENCE_CLASS_P (inner);
4201 inner = TREE_OPERAND (inner, 0))
4202 ;
4203
4204 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
4205 push_without_duplicates (exp, refs);
4206 else
4207 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), refs);
4208 }
4209 else
4210 switch (TREE_CODE_CLASS (code))
4211 {
4212 case tcc_constant:
4213 break;
4214
4215 case tcc_declaration:
4216 /* Variables allocated to static storage can stay. */
4217 if (!TREE_STATIC (exp))
4218 push_without_duplicates (exp, refs);
4219 break;
4220
4221 case tcc_expression:
4222 /* This is the pattern built in ada/make_aligning_type. */
4223 if (code == ADDR_EXPR
4224 && TREE_CODE (TREE_OPERAND (exp, 0)) == PLACEHOLDER_EXPR)
4225 {
4226 push_without_duplicates (exp, refs);
4227 break;
4228 }
4229
4230 /* Fall through. */
4231
4232 case tcc_exceptional:
4233 case tcc_unary:
4234 case tcc_binary:
4235 case tcc_comparison:
4236 case tcc_reference:
4237 for (i = 0; i < TREE_CODE_LENGTH (code); i++)
4238 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
4239 break;
4240
4241 case tcc_vl_exp:
4242 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4243 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
4244 break;
4245
4246 default:
4247 gcc_unreachable ();
4248 }
4249 }
4250
4251 /* Given a tree EXP, a FIELD_DECL F, and a replacement value R,
4252 return a tree with all occurrences of references to F in a
4253 PLACEHOLDER_EXPR replaced by R. Also handle VAR_DECLs and
4254 CONST_DECLs. Note that we assume here that EXP contains only
4255 arithmetic expressions or CALL_EXPRs with PLACEHOLDER_EXPRs
4256 occurring only in their argument list. */
4257
4258 tree
4259 substitute_in_expr (tree exp, tree f, tree r)
4260 {
4261 enum tree_code code = TREE_CODE (exp);
4262 tree op0, op1, op2, op3;
4263 tree new_tree;
4264
4265 /* We handle TREE_LIST and COMPONENT_REF separately. */
4266 if (code == TREE_LIST)
4267 {
4268 op0 = SUBSTITUTE_IN_EXPR (TREE_CHAIN (exp), f, r);
4269 op1 = SUBSTITUTE_IN_EXPR (TREE_VALUE (exp), f, r);
4270 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
4271 return exp;
4272
4273 return tree_cons (TREE_PURPOSE (exp), op1, op0);
4274 }
4275 else if (code == COMPONENT_REF)
4276 {
4277 tree inner;
4278
4279 /* If this expression is getting a value from a PLACEHOLDER_EXPR
4280 and it is the right field, replace it with R. */
4281 for (inner = TREE_OPERAND (exp, 0);
4282 REFERENCE_CLASS_P (inner);
4283 inner = TREE_OPERAND (inner, 0))
4284 ;
4285
4286 /* The field. */
4287 op1 = TREE_OPERAND (exp, 1);
4288
4289 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && op1 == f)
4290 return r;
4291
4292 /* If this expression hasn't been completed let, leave it alone. */
4293 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && !TREE_TYPE (inner))
4294 return exp;
4295
4296 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4297 if (op0 == TREE_OPERAND (exp, 0))
4298 return exp;
4299
4300 new_tree
4301 = fold_build3 (COMPONENT_REF, TREE_TYPE (exp), op0, op1, NULL_TREE);
4302 }
4303 else
4304 switch (TREE_CODE_CLASS (code))
4305 {
4306 case tcc_constant:
4307 return exp;
4308
4309 case tcc_declaration:
4310 if (exp == f)
4311 return r;
4312 else
4313 return exp;
4314
4315 case tcc_expression:
4316 if (exp == f)
4317 return r;
4318
4319 /* Fall through. */
4320
4321 case tcc_exceptional:
4322 case tcc_unary:
4323 case tcc_binary:
4324 case tcc_comparison:
4325 case tcc_reference:
4326 switch (TREE_CODE_LENGTH (code))
4327 {
4328 case 0:
4329 return exp;
4330
4331 case 1:
4332 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4333 if (op0 == TREE_OPERAND (exp, 0))
4334 return exp;
4335
4336 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
4337 break;
4338
4339 case 2:
4340 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4341 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
4342
4343 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
4344 return exp;
4345
4346 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
4347 break;
4348
4349 case 3:
4350 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4351 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
4352 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
4353
4354 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4355 && op2 == TREE_OPERAND (exp, 2))
4356 return exp;
4357
4358 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
4359 break;
4360
4361 case 4:
4362 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4363 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
4364 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
4365 op3 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 3), f, r);
4366
4367 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4368 && op2 == TREE_OPERAND (exp, 2)
4369 && op3 == TREE_OPERAND (exp, 3))
4370 return exp;
4371
4372 new_tree
4373 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
4374 break;
4375
4376 default:
4377 gcc_unreachable ();
4378 }
4379 break;
4380
4381 case tcc_vl_exp:
4382 {
4383 int i;
4384
4385 new_tree = NULL_TREE;
4386
4387 /* If we are trying to replace F with a constant or with another
4388 instance of one of the arguments of the call, inline back
4389 functions which do nothing else than computing a value from
4390 the arguments they are passed. This makes it possible to
4391 fold partially or entirely the replacement expression. */
4392 if (code == CALL_EXPR)
4393 {
4394 bool maybe_inline = false;
4395 if (CONSTANT_CLASS_P (r))
4396 maybe_inline = true;
4397 else
4398 for (i = 3; i < TREE_OPERAND_LENGTH (exp); i++)
4399 if (operand_equal_p (TREE_OPERAND (exp, i), r, 0))
4400 {
4401 maybe_inline = true;
4402 break;
4403 }
4404 if (maybe_inline)
4405 {
4406 tree t = maybe_inline_call_in_expr (exp);
4407 if (t)
4408 return SUBSTITUTE_IN_EXPR (t, f, r);
4409 }
4410 }
4411
4412 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4413 {
4414 tree op = TREE_OPERAND (exp, i);
4415 tree new_op = SUBSTITUTE_IN_EXPR (op, f, r);
4416 if (new_op != op)
4417 {
4418 if (!new_tree)
4419 new_tree = copy_node (exp);
4420 TREE_OPERAND (new_tree, i) = new_op;
4421 }
4422 }
4423
4424 if (new_tree)
4425 {
4426 new_tree = fold (new_tree);
4427 if (TREE_CODE (new_tree) == CALL_EXPR)
4428 process_call_operands (new_tree);
4429 }
4430 else
4431 return exp;
4432 }
4433 break;
4434
4435 default:
4436 gcc_unreachable ();
4437 }
4438
4439 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
4440
4441 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
4442 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
4443
4444 return new_tree;
4445 }
4446
4447 /* Similar, but look for a PLACEHOLDER_EXPR in EXP and find a replacement
4448 for it within OBJ, a tree that is an object or a chain of references. */
4449
4450 tree
4451 substitute_placeholder_in_expr (tree exp, tree obj)
4452 {
4453 enum tree_code code = TREE_CODE (exp);
4454 tree op0, op1, op2, op3;
4455 tree new_tree;
4456
4457 /* If this is a PLACEHOLDER_EXPR, see if we find a corresponding type
4458 in the chain of OBJ. */
4459 if (code == PLACEHOLDER_EXPR)
4460 {
4461 tree need_type = TYPE_MAIN_VARIANT (TREE_TYPE (exp));
4462 tree elt;
4463
4464 for (elt = obj; elt != 0;
4465 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
4466 || TREE_CODE (elt) == COND_EXPR)
4467 ? TREE_OPERAND (elt, 1)
4468 : (REFERENCE_CLASS_P (elt)
4469 || UNARY_CLASS_P (elt)
4470 || BINARY_CLASS_P (elt)
4471 || VL_EXP_CLASS_P (elt)
4472 || EXPRESSION_CLASS_P (elt))
4473 ? TREE_OPERAND (elt, 0) : 0))
4474 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
4475 return elt;
4476
4477 for (elt = obj; elt != 0;
4478 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
4479 || TREE_CODE (elt) == COND_EXPR)
4480 ? TREE_OPERAND (elt, 1)
4481 : (REFERENCE_CLASS_P (elt)
4482 || UNARY_CLASS_P (elt)
4483 || BINARY_CLASS_P (elt)
4484 || VL_EXP_CLASS_P (elt)
4485 || EXPRESSION_CLASS_P (elt))
4486 ? TREE_OPERAND (elt, 0) : 0))
4487 if (POINTER_TYPE_P (TREE_TYPE (elt))
4488 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
4489 == need_type))
4490 return fold_build1 (INDIRECT_REF, need_type, elt);
4491
4492 /* If we didn't find it, return the original PLACEHOLDER_EXPR. If it
4493 survives until RTL generation, there will be an error. */
4494 return exp;
4495 }
4496
4497 /* TREE_LIST is special because we need to look at TREE_VALUE
4498 and TREE_CHAIN, not TREE_OPERANDS. */
4499 else if (code == TREE_LIST)
4500 {
4501 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), obj);
4502 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), obj);
4503 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
4504 return exp;
4505
4506 return tree_cons (TREE_PURPOSE (exp), op1, op0);
4507 }
4508 else
4509 switch (TREE_CODE_CLASS (code))
4510 {
4511 case tcc_constant:
4512 case tcc_declaration:
4513 return exp;
4514
4515 case tcc_exceptional:
4516 case tcc_unary:
4517 case tcc_binary:
4518 case tcc_comparison:
4519 case tcc_expression:
4520 case tcc_reference:
4521 case tcc_statement:
4522 switch (TREE_CODE_LENGTH (code))
4523 {
4524 case 0:
4525 return exp;
4526
4527 case 1:
4528 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4529 if (op0 == TREE_OPERAND (exp, 0))
4530 return exp;
4531
4532 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
4533 break;
4534
4535 case 2:
4536 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4537 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4538
4539 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
4540 return exp;
4541
4542 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
4543 break;
4544
4545 case 3:
4546 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4547 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4548 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
4549
4550 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4551 && op2 == TREE_OPERAND (exp, 2))
4552 return exp;
4553
4554 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
4555 break;
4556
4557 case 4:
4558 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4559 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4560 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
4561 op3 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 3), obj);
4562
4563 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4564 && op2 == TREE_OPERAND (exp, 2)
4565 && op3 == TREE_OPERAND (exp, 3))
4566 return exp;
4567
4568 new_tree
4569 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
4570 break;
4571
4572 default:
4573 gcc_unreachable ();
4574 }
4575 break;
4576
4577 case tcc_vl_exp:
4578 {
4579 int i;
4580
4581 new_tree = NULL_TREE;
4582
4583 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4584 {
4585 tree op = TREE_OPERAND (exp, i);
4586 tree new_op = SUBSTITUTE_PLACEHOLDER_IN_EXPR (op, obj);
4587 if (new_op != op)
4588 {
4589 if (!new_tree)
4590 new_tree = copy_node (exp);
4591 TREE_OPERAND (new_tree, i) = new_op;
4592 }
4593 }
4594
4595 if (new_tree)
4596 {
4597 new_tree = fold (new_tree);
4598 if (TREE_CODE (new_tree) == CALL_EXPR)
4599 process_call_operands (new_tree);
4600 }
4601 else
4602 return exp;
4603 }
4604 break;
4605
4606 default:
4607 gcc_unreachable ();
4608 }
4609
4610 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
4611
4612 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
4613 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
4614
4615 return new_tree;
4616 }
4617 \f
4618
4619 /* Subroutine of stabilize_reference; this is called for subtrees of
4620 references. Any expression with side-effects must be put in a SAVE_EXPR
4621 to ensure that it is only evaluated once.
4622
4623 We don't put SAVE_EXPR nodes around everything, because assigning very
4624 simple expressions to temporaries causes us to miss good opportunities
4625 for optimizations. Among other things, the opportunity to fold in the
4626 addition of a constant into an addressing mode often gets lost, e.g.
4627 "y[i+1] += x;". In general, we take the approach that we should not make
4628 an assignment unless we are forced into it - i.e., that any non-side effect
4629 operator should be allowed, and that cse should take care of coalescing
4630 multiple utterances of the same expression should that prove fruitful. */
4631
4632 static tree
4633 stabilize_reference_1 (tree e)
4634 {
4635 tree result;
4636 enum tree_code code = TREE_CODE (e);
4637
4638 /* We cannot ignore const expressions because it might be a reference
4639 to a const array but whose index contains side-effects. But we can
4640 ignore things that are actual constant or that already have been
4641 handled by this function. */
4642
4643 if (tree_invariant_p (e))
4644 return e;
4645
4646 switch (TREE_CODE_CLASS (code))
4647 {
4648 case tcc_exceptional:
4649 /* Always wrap STATEMENT_LIST into SAVE_EXPR, even if it doesn't
4650 have side-effects. */
4651 if (code == STATEMENT_LIST)
4652 return save_expr (e);
4653 /* FALLTHRU */
4654 case tcc_type:
4655 case tcc_declaration:
4656 case tcc_comparison:
4657 case tcc_statement:
4658 case tcc_expression:
4659 case tcc_reference:
4660 case tcc_vl_exp:
4661 /* If the expression has side-effects, then encase it in a SAVE_EXPR
4662 so that it will only be evaluated once. */
4663 /* The reference (r) and comparison (<) classes could be handled as
4664 below, but it is generally faster to only evaluate them once. */
4665 if (TREE_SIDE_EFFECTS (e))
4666 return save_expr (e);
4667 return e;
4668
4669 case tcc_constant:
4670 /* Constants need no processing. In fact, we should never reach
4671 here. */
4672 return e;
4673
4674 case tcc_binary:
4675 /* Division is slow and tends to be compiled with jumps,
4676 especially the division by powers of 2 that is often
4677 found inside of an array reference. So do it just once. */
4678 if (code == TRUNC_DIV_EXPR || code == TRUNC_MOD_EXPR
4679 || code == FLOOR_DIV_EXPR || code == FLOOR_MOD_EXPR
4680 || code == CEIL_DIV_EXPR || code == CEIL_MOD_EXPR
4681 || code == ROUND_DIV_EXPR || code == ROUND_MOD_EXPR)
4682 return save_expr (e);
4683 /* Recursively stabilize each operand. */
4684 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)),
4685 stabilize_reference_1 (TREE_OPERAND (e, 1)));
4686 break;
4687
4688 case tcc_unary:
4689 /* Recursively stabilize each operand. */
4690 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)));
4691 break;
4692
4693 default:
4694 gcc_unreachable ();
4695 }
4696
4697 TREE_TYPE (result) = TREE_TYPE (e);
4698 TREE_READONLY (result) = TREE_READONLY (e);
4699 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (e);
4700 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
4701
4702 return result;
4703 }
4704
4705 /* Stabilize a reference so that we can use it any number of times
4706 without causing its operands to be evaluated more than once.
4707 Returns the stabilized reference. This works by means of save_expr,
4708 so see the caveats in the comments about save_expr.
4709
4710 Also allows conversion expressions whose operands are references.
4711 Any other kind of expression is returned unchanged. */
4712
4713 tree
4714 stabilize_reference (tree ref)
4715 {
4716 tree result;
4717 enum tree_code code = TREE_CODE (ref);
4718
4719 switch (code)
4720 {
4721 case VAR_DECL:
4722 case PARM_DECL:
4723 case RESULT_DECL:
4724 /* No action is needed in this case. */
4725 return ref;
4726
4727 CASE_CONVERT:
4728 case FLOAT_EXPR:
4729 case FIX_TRUNC_EXPR:
4730 result = build_nt (code, stabilize_reference (TREE_OPERAND (ref, 0)));
4731 break;
4732
4733 case INDIRECT_REF:
4734 result = build_nt (INDIRECT_REF,
4735 stabilize_reference_1 (TREE_OPERAND (ref, 0)));
4736 break;
4737
4738 case COMPONENT_REF:
4739 result = build_nt (COMPONENT_REF,
4740 stabilize_reference (TREE_OPERAND (ref, 0)),
4741 TREE_OPERAND (ref, 1), NULL_TREE);
4742 break;
4743
4744 case BIT_FIELD_REF:
4745 result = build_nt (BIT_FIELD_REF,
4746 stabilize_reference (TREE_OPERAND (ref, 0)),
4747 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
4748 REF_REVERSE_STORAGE_ORDER (result) = REF_REVERSE_STORAGE_ORDER (ref);
4749 break;
4750
4751 case ARRAY_REF:
4752 result = build_nt (ARRAY_REF,
4753 stabilize_reference (TREE_OPERAND (ref, 0)),
4754 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4755 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4756 break;
4757
4758 case ARRAY_RANGE_REF:
4759 result = build_nt (ARRAY_RANGE_REF,
4760 stabilize_reference (TREE_OPERAND (ref, 0)),
4761 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4762 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4763 break;
4764
4765 case COMPOUND_EXPR:
4766 /* We cannot wrap the first expression in a SAVE_EXPR, as then
4767 it wouldn't be ignored. This matters when dealing with
4768 volatiles. */
4769 return stabilize_reference_1 (ref);
4770
4771 /* If arg isn't a kind of lvalue we recognize, make no change.
4772 Caller should recognize the error for an invalid lvalue. */
4773 default:
4774 return ref;
4775
4776 case ERROR_MARK:
4777 return error_mark_node;
4778 }
4779
4780 TREE_TYPE (result) = TREE_TYPE (ref);
4781 TREE_READONLY (result) = TREE_READONLY (ref);
4782 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (ref);
4783 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
4784
4785 return result;
4786 }
4787 \f
4788 /* Low-level constructors for expressions. */
4789
4790 /* A helper function for build1 and constant folders. Set TREE_CONSTANT,
4791 and TREE_SIDE_EFFECTS for an ADDR_EXPR. */
4792
4793 void
4794 recompute_tree_invariant_for_addr_expr (tree t)
4795 {
4796 tree node;
4797 bool tc = true, se = false;
4798
4799 gcc_assert (TREE_CODE (t) == ADDR_EXPR);
4800
4801 /* We started out assuming this address is both invariant and constant, but
4802 does not have side effects. Now go down any handled components and see if
4803 any of them involve offsets that are either non-constant or non-invariant.
4804 Also check for side-effects.
4805
4806 ??? Note that this code makes no attempt to deal with the case where
4807 taking the address of something causes a copy due to misalignment. */
4808
4809 #define UPDATE_FLAGS(NODE) \
4810 do { tree _node = (NODE); \
4811 if (_node && !TREE_CONSTANT (_node)) tc = false; \
4812 if (_node && TREE_SIDE_EFFECTS (_node)) se = true; } while (0)
4813
4814 for (node = TREE_OPERAND (t, 0); handled_component_p (node);
4815 node = TREE_OPERAND (node, 0))
4816 {
4817 /* If the first operand doesn't have an ARRAY_TYPE, this is a bogus
4818 array reference (probably made temporarily by the G++ front end),
4819 so ignore all the operands. */
4820 if ((TREE_CODE (node) == ARRAY_REF
4821 || TREE_CODE (node) == ARRAY_RANGE_REF)
4822 && TREE_CODE (TREE_TYPE (TREE_OPERAND (node, 0))) == ARRAY_TYPE)
4823 {
4824 UPDATE_FLAGS (TREE_OPERAND (node, 1));
4825 if (TREE_OPERAND (node, 2))
4826 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4827 if (TREE_OPERAND (node, 3))
4828 UPDATE_FLAGS (TREE_OPERAND (node, 3));
4829 }
4830 /* Likewise, just because this is a COMPONENT_REF doesn't mean we have a
4831 FIELD_DECL, apparently. The G++ front end can put something else
4832 there, at least temporarily. */
4833 else if (TREE_CODE (node) == COMPONENT_REF
4834 && TREE_CODE (TREE_OPERAND (node, 1)) == FIELD_DECL)
4835 {
4836 if (TREE_OPERAND (node, 2))
4837 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4838 }
4839 }
4840
4841 node = lang_hooks.expr_to_decl (node, &tc, &se);
4842
4843 /* Now see what's inside. If it's an INDIRECT_REF, copy our properties from
4844 the address, since &(*a)->b is a form of addition. If it's a constant, the
4845 address is constant too. If it's a decl, its address is constant if the
4846 decl is static. Everything else is not constant and, furthermore,
4847 taking the address of a volatile variable is not volatile. */
4848 if (TREE_CODE (node) == INDIRECT_REF
4849 || TREE_CODE (node) == MEM_REF)
4850 UPDATE_FLAGS (TREE_OPERAND (node, 0));
4851 else if (CONSTANT_CLASS_P (node))
4852 ;
4853 else if (DECL_P (node))
4854 tc &= (staticp (node) != NULL_TREE);
4855 else
4856 {
4857 tc = false;
4858 se |= TREE_SIDE_EFFECTS (node);
4859 }
4860
4861
4862 TREE_CONSTANT (t) = tc;
4863 TREE_SIDE_EFFECTS (t) = se;
4864 #undef UPDATE_FLAGS
4865 }
4866
4867 /* Build an expression of code CODE, data type TYPE, and operands as
4868 specified. Expressions and reference nodes can be created this way.
4869 Constants, decls, types and misc nodes cannot be.
4870
4871 We define 5 non-variadic functions, from 0 to 4 arguments. This is
4872 enough for all extant tree codes. */
4873
4874 tree
4875 build0 (enum tree_code code, tree tt MEM_STAT_DECL)
4876 {
4877 tree t;
4878
4879 gcc_assert (TREE_CODE_LENGTH (code) == 0);
4880
4881 t = make_node (code PASS_MEM_STAT);
4882 TREE_TYPE (t) = tt;
4883
4884 return t;
4885 }
4886
4887 tree
4888 build1 (enum tree_code code, tree type, tree node MEM_STAT_DECL)
4889 {
4890 int length = sizeof (struct tree_exp);
4891 tree t;
4892
4893 record_node_allocation_statistics (code, length);
4894
4895 gcc_assert (TREE_CODE_LENGTH (code) == 1);
4896
4897 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
4898
4899 memset (t, 0, sizeof (struct tree_common));
4900
4901 TREE_SET_CODE (t, code);
4902
4903 TREE_TYPE (t) = type;
4904 SET_EXPR_LOCATION (t, UNKNOWN_LOCATION);
4905 TREE_OPERAND (t, 0) = node;
4906 if (node && !TYPE_P (node))
4907 {
4908 TREE_SIDE_EFFECTS (t) = TREE_SIDE_EFFECTS (node);
4909 TREE_READONLY (t) = TREE_READONLY (node);
4910 }
4911
4912 if (TREE_CODE_CLASS (code) == tcc_statement)
4913 {
4914 if (code != DEBUG_BEGIN_STMT)
4915 TREE_SIDE_EFFECTS (t) = 1;
4916 }
4917 else switch (code)
4918 {
4919 case VA_ARG_EXPR:
4920 /* All of these have side-effects, no matter what their
4921 operands are. */
4922 TREE_SIDE_EFFECTS (t) = 1;
4923 TREE_READONLY (t) = 0;
4924 break;
4925
4926 case INDIRECT_REF:
4927 /* Whether a dereference is readonly has nothing to do with whether
4928 its operand is readonly. */
4929 TREE_READONLY (t) = 0;
4930 break;
4931
4932 case ADDR_EXPR:
4933 if (node)
4934 recompute_tree_invariant_for_addr_expr (t);
4935 break;
4936
4937 default:
4938 if ((TREE_CODE_CLASS (code) == tcc_unary || code == VIEW_CONVERT_EXPR)
4939 && node && !TYPE_P (node)
4940 && TREE_CONSTANT (node))
4941 TREE_CONSTANT (t) = 1;
4942 if (TREE_CODE_CLASS (code) == tcc_reference
4943 && node && TREE_THIS_VOLATILE (node))
4944 TREE_THIS_VOLATILE (t) = 1;
4945 break;
4946 }
4947
4948 return t;
4949 }
4950
4951 #define PROCESS_ARG(N) \
4952 do { \
4953 TREE_OPERAND (t, N) = arg##N; \
4954 if (arg##N &&!TYPE_P (arg##N)) \
4955 { \
4956 if (TREE_SIDE_EFFECTS (arg##N)) \
4957 side_effects = 1; \
4958 if (!TREE_READONLY (arg##N) \
4959 && !CONSTANT_CLASS_P (arg##N)) \
4960 (void) (read_only = 0); \
4961 if (!TREE_CONSTANT (arg##N)) \
4962 (void) (constant = 0); \
4963 } \
4964 } while (0)
4965
4966 tree
4967 build2 (enum tree_code code, tree tt, tree arg0, tree arg1 MEM_STAT_DECL)
4968 {
4969 bool constant, read_only, side_effects, div_by_zero;
4970 tree t;
4971
4972 gcc_assert (TREE_CODE_LENGTH (code) == 2);
4973
4974 if ((code == MINUS_EXPR || code == PLUS_EXPR || code == MULT_EXPR)
4975 && arg0 && arg1 && tt && POINTER_TYPE_P (tt)
4976 /* When sizetype precision doesn't match that of pointers
4977 we need to be able to build explicit extensions or truncations
4978 of the offset argument. */
4979 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (tt))
4980 gcc_assert (TREE_CODE (arg0) == INTEGER_CST
4981 && TREE_CODE (arg1) == INTEGER_CST);
4982
4983 if (code == POINTER_PLUS_EXPR && arg0 && arg1 && tt)
4984 gcc_assert (POINTER_TYPE_P (tt) && POINTER_TYPE_P (TREE_TYPE (arg0))
4985 && ptrofftype_p (TREE_TYPE (arg1)));
4986
4987 t = make_node (code PASS_MEM_STAT);
4988 TREE_TYPE (t) = tt;
4989
4990 /* Below, we automatically set TREE_SIDE_EFFECTS and TREE_READONLY for the
4991 result based on those same flags for the arguments. But if the
4992 arguments aren't really even `tree' expressions, we shouldn't be trying
4993 to do this. */
4994
4995 /* Expressions without side effects may be constant if their
4996 arguments are as well. */
4997 constant = (TREE_CODE_CLASS (code) == tcc_comparison
4998 || TREE_CODE_CLASS (code) == tcc_binary);
4999 read_only = 1;
5000 side_effects = TREE_SIDE_EFFECTS (t);
5001
5002 switch (code)
5003 {
5004 case TRUNC_DIV_EXPR:
5005 case CEIL_DIV_EXPR:
5006 case FLOOR_DIV_EXPR:
5007 case ROUND_DIV_EXPR:
5008 case EXACT_DIV_EXPR:
5009 case CEIL_MOD_EXPR:
5010 case FLOOR_MOD_EXPR:
5011 case ROUND_MOD_EXPR:
5012 case TRUNC_MOD_EXPR:
5013 div_by_zero = integer_zerop (arg1);
5014 break;
5015 default:
5016 div_by_zero = false;
5017 }
5018
5019 PROCESS_ARG (0);
5020 PROCESS_ARG (1);
5021
5022 TREE_SIDE_EFFECTS (t) = side_effects;
5023 if (code == MEM_REF)
5024 {
5025 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
5026 {
5027 tree o = TREE_OPERAND (arg0, 0);
5028 TREE_READONLY (t) = TREE_READONLY (o);
5029 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
5030 }
5031 }
5032 else
5033 {
5034 TREE_READONLY (t) = read_only;
5035 /* Don't mark X / 0 as constant. */
5036 TREE_CONSTANT (t) = constant && !div_by_zero;
5037 TREE_THIS_VOLATILE (t)
5038 = (TREE_CODE_CLASS (code) == tcc_reference
5039 && arg0 && TREE_THIS_VOLATILE (arg0));
5040 }
5041
5042 return t;
5043 }
5044
5045
5046 tree
5047 build3 (enum tree_code code, tree tt, tree arg0, tree arg1,
5048 tree arg2 MEM_STAT_DECL)
5049 {
5050 bool constant, read_only, side_effects;
5051 tree t;
5052
5053 gcc_assert (TREE_CODE_LENGTH (code) == 3);
5054 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
5055
5056 t = make_node (code PASS_MEM_STAT);
5057 TREE_TYPE (t) = tt;
5058
5059 read_only = 1;
5060
5061 /* As a special exception, if COND_EXPR has NULL branches, we
5062 assume that it is a gimple statement and always consider
5063 it to have side effects. */
5064 if (code == COND_EXPR
5065 && tt == void_type_node
5066 && arg1 == NULL_TREE
5067 && arg2 == NULL_TREE)
5068 side_effects = true;
5069 else
5070 side_effects = TREE_SIDE_EFFECTS (t);
5071
5072 PROCESS_ARG (0);
5073 PROCESS_ARG (1);
5074 PROCESS_ARG (2);
5075
5076 if (code == COND_EXPR)
5077 TREE_READONLY (t) = read_only;
5078
5079 TREE_SIDE_EFFECTS (t) = side_effects;
5080 TREE_THIS_VOLATILE (t)
5081 = (TREE_CODE_CLASS (code) == tcc_reference
5082 && arg0 && TREE_THIS_VOLATILE (arg0));
5083
5084 return t;
5085 }
5086
5087 tree
5088 build4 (enum tree_code code, tree tt, tree arg0, tree arg1,
5089 tree arg2, tree arg3 MEM_STAT_DECL)
5090 {
5091 bool constant, read_only, side_effects;
5092 tree t;
5093
5094 gcc_assert (TREE_CODE_LENGTH (code) == 4);
5095
5096 t = make_node (code PASS_MEM_STAT);
5097 TREE_TYPE (t) = tt;
5098
5099 side_effects = TREE_SIDE_EFFECTS (t);
5100
5101 PROCESS_ARG (0);
5102 PROCESS_ARG (1);
5103 PROCESS_ARG (2);
5104 PROCESS_ARG (3);
5105
5106 TREE_SIDE_EFFECTS (t) = side_effects;
5107 TREE_THIS_VOLATILE (t)
5108 = (TREE_CODE_CLASS (code) == tcc_reference
5109 && arg0 && TREE_THIS_VOLATILE (arg0));
5110
5111 return t;
5112 }
5113
5114 tree
5115 build5 (enum tree_code code, tree tt, tree arg0, tree arg1,
5116 tree arg2, tree arg3, tree arg4 MEM_STAT_DECL)
5117 {
5118 bool constant, read_only, side_effects;
5119 tree t;
5120
5121 gcc_assert (TREE_CODE_LENGTH (code) == 5);
5122
5123 t = make_node (code PASS_MEM_STAT);
5124 TREE_TYPE (t) = tt;
5125
5126 side_effects = TREE_SIDE_EFFECTS (t);
5127
5128 PROCESS_ARG (0);
5129 PROCESS_ARG (1);
5130 PROCESS_ARG (2);
5131 PROCESS_ARG (3);
5132 PROCESS_ARG (4);
5133
5134 TREE_SIDE_EFFECTS (t) = side_effects;
5135 if (code == TARGET_MEM_REF)
5136 {
5137 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
5138 {
5139 tree o = TREE_OPERAND (arg0, 0);
5140 TREE_READONLY (t) = TREE_READONLY (o);
5141 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
5142 }
5143 }
5144 else
5145 TREE_THIS_VOLATILE (t)
5146 = (TREE_CODE_CLASS (code) == tcc_reference
5147 && arg0 && TREE_THIS_VOLATILE (arg0));
5148
5149 return t;
5150 }
5151
5152 /* Build a simple MEM_REF tree with the sematics of a plain INDIRECT_REF
5153 on the pointer PTR. */
5154
5155 tree
5156 build_simple_mem_ref_loc (location_t loc, tree ptr)
5157 {
5158 poly_int64 offset = 0;
5159 tree ptype = TREE_TYPE (ptr);
5160 tree tem;
5161 /* For convenience allow addresses that collapse to a simple base
5162 and offset. */
5163 if (TREE_CODE (ptr) == ADDR_EXPR
5164 && (handled_component_p (TREE_OPERAND (ptr, 0))
5165 || TREE_CODE (TREE_OPERAND (ptr, 0)) == MEM_REF))
5166 {
5167 ptr = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &offset);
5168 gcc_assert (ptr);
5169 if (TREE_CODE (ptr) == MEM_REF)
5170 {
5171 offset += mem_ref_offset (ptr).force_shwi ();
5172 ptr = TREE_OPERAND (ptr, 0);
5173 }
5174 else
5175 ptr = build_fold_addr_expr (ptr);
5176 gcc_assert (is_gimple_reg (ptr) || is_gimple_min_invariant (ptr));
5177 }
5178 tem = build2 (MEM_REF, TREE_TYPE (ptype),
5179 ptr, build_int_cst (ptype, offset));
5180 SET_EXPR_LOCATION (tem, loc);
5181 return tem;
5182 }
5183
5184 /* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T. */
5185
5186 poly_offset_int
5187 mem_ref_offset (const_tree t)
5188 {
5189 return poly_offset_int::from (wi::to_poly_wide (TREE_OPERAND (t, 1)),
5190 SIGNED);
5191 }
5192
5193 /* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE
5194 offsetted by OFFSET units. */
5195
5196 tree
5197 build_invariant_address (tree type, tree base, poly_int64 offset)
5198 {
5199 tree ref = fold_build2 (MEM_REF, TREE_TYPE (type),
5200 build_fold_addr_expr (base),
5201 build_int_cst (ptr_type_node, offset));
5202 tree addr = build1 (ADDR_EXPR, type, ref);
5203 recompute_tree_invariant_for_addr_expr (addr);
5204 return addr;
5205 }
5206
5207 /* Similar except don't specify the TREE_TYPE
5208 and leave the TREE_SIDE_EFFECTS as 0.
5209 It is permissible for arguments to be null,
5210 or even garbage if their values do not matter. */
5211
5212 tree
5213 build_nt (enum tree_code code, ...)
5214 {
5215 tree t;
5216 int length;
5217 int i;
5218 va_list p;
5219
5220 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
5221
5222 va_start (p, code);
5223
5224 t = make_node (code);
5225 length = TREE_CODE_LENGTH (code);
5226
5227 for (i = 0; i < length; i++)
5228 TREE_OPERAND (t, i) = va_arg (p, tree);
5229
5230 va_end (p);
5231 return t;
5232 }
5233
5234 /* Similar to build_nt, but for creating a CALL_EXPR object with a
5235 tree vec. */
5236
5237 tree
5238 build_nt_call_vec (tree fn, vec<tree, va_gc> *args)
5239 {
5240 tree ret, t;
5241 unsigned int ix;
5242
5243 ret = build_vl_exp (CALL_EXPR, vec_safe_length (args) + 3);
5244 CALL_EXPR_FN (ret) = fn;
5245 CALL_EXPR_STATIC_CHAIN (ret) = NULL_TREE;
5246 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
5247 CALL_EXPR_ARG (ret, ix) = t;
5248 return ret;
5249 }
5250 \f
5251 /* Create a DECL_... node of code CODE, name NAME (if non-null)
5252 and data type TYPE.
5253 We do NOT enter this node in any sort of symbol table.
5254
5255 LOC is the location of the decl.
5256
5257 layout_decl is used to set up the decl's storage layout.
5258 Other slots are initialized to 0 or null pointers. */
5259
5260 tree
5261 build_decl (location_t loc, enum tree_code code, tree name,
5262 tree type MEM_STAT_DECL)
5263 {
5264 tree t;
5265
5266 t = make_node (code PASS_MEM_STAT);
5267 DECL_SOURCE_LOCATION (t) = loc;
5268
5269 /* if (type == error_mark_node)
5270 type = integer_type_node; */
5271 /* That is not done, deliberately, so that having error_mark_node
5272 as the type can suppress useless errors in the use of this variable. */
5273
5274 DECL_NAME (t) = name;
5275 TREE_TYPE (t) = type;
5276
5277 if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
5278 layout_decl (t, 0);
5279
5280 return t;
5281 }
5282
5283 /* Builds and returns function declaration with NAME and TYPE. */
5284
5285 tree
5286 build_fn_decl (const char *name, tree type)
5287 {
5288 tree id = get_identifier (name);
5289 tree decl = build_decl (input_location, FUNCTION_DECL, id, type);
5290
5291 DECL_EXTERNAL (decl) = 1;
5292 TREE_PUBLIC (decl) = 1;
5293 DECL_ARTIFICIAL (decl) = 1;
5294 TREE_NOTHROW (decl) = 1;
5295
5296 return decl;
5297 }
5298
5299 vec<tree, va_gc> *all_translation_units;
5300
5301 /* Builds a new translation-unit decl with name NAME, queues it in the
5302 global list of translation-unit decls and returns it. */
5303
5304 tree
5305 build_translation_unit_decl (tree name)
5306 {
5307 tree tu = build_decl (UNKNOWN_LOCATION, TRANSLATION_UNIT_DECL,
5308 name, NULL_TREE);
5309 TRANSLATION_UNIT_LANGUAGE (tu) = lang_hooks.name;
5310 vec_safe_push (all_translation_units, tu);
5311 return tu;
5312 }
5313
5314 \f
5315 /* BLOCK nodes are used to represent the structure of binding contours
5316 and declarations, once those contours have been exited and their contents
5317 compiled. This information is used for outputting debugging info. */
5318
5319 tree
5320 build_block (tree vars, tree subblocks, tree supercontext, tree chain)
5321 {
5322 tree block = make_node (BLOCK);
5323
5324 BLOCK_VARS (block) = vars;
5325 BLOCK_SUBBLOCKS (block) = subblocks;
5326 BLOCK_SUPERCONTEXT (block) = supercontext;
5327 BLOCK_CHAIN (block) = chain;
5328 return block;
5329 }
5330
5331 \f
5332 /* Like SET_EXPR_LOCATION, but make sure the tree can have a location.
5333
5334 LOC is the location to use in tree T. */
5335
5336 void
5337 protected_set_expr_location (tree t, location_t loc)
5338 {
5339 if (CAN_HAVE_LOCATION_P (t))
5340 SET_EXPR_LOCATION (t, loc);
5341 else if (t && TREE_CODE (t) == STATEMENT_LIST)
5342 {
5343 t = expr_single (t);
5344 if (t && CAN_HAVE_LOCATION_P (t))
5345 SET_EXPR_LOCATION (t, loc);
5346 }
5347 }
5348
5349 /* Like PROTECTED_SET_EXPR_LOCATION, but only do that if T has
5350 UNKNOWN_LOCATION. */
5351
5352 void
5353 protected_set_expr_location_if_unset (tree t, location_t loc)
5354 {
5355 t = expr_single (t);
5356 if (t && !EXPR_HAS_LOCATION (t))
5357 protected_set_expr_location (t, loc);
5358 }
5359 \f
5360 /* Set the type qualifiers for TYPE to TYPE_QUALS, which is a bitmask
5361 of the various TYPE_QUAL values. */
5362
5363 static void
5364 set_type_quals (tree type, int type_quals)
5365 {
5366 TYPE_READONLY (type) = (type_quals & TYPE_QUAL_CONST) != 0;
5367 TYPE_VOLATILE (type) = (type_quals & TYPE_QUAL_VOLATILE) != 0;
5368 TYPE_RESTRICT (type) = (type_quals & TYPE_QUAL_RESTRICT) != 0;
5369 TYPE_ATOMIC (type) = (type_quals & TYPE_QUAL_ATOMIC) != 0;
5370 TYPE_ADDR_SPACE (type) = DECODE_QUAL_ADDR_SPACE (type_quals);
5371 }
5372
5373 /* Returns true iff CAND and BASE have equivalent language-specific
5374 qualifiers. */
5375
5376 bool
5377 check_lang_type (const_tree cand, const_tree base)
5378 {
5379 if (lang_hooks.types.type_hash_eq == NULL)
5380 return true;
5381 /* type_hash_eq currently only applies to these types. */
5382 if (TREE_CODE (cand) != FUNCTION_TYPE
5383 && TREE_CODE (cand) != METHOD_TYPE)
5384 return true;
5385 return lang_hooks.types.type_hash_eq (cand, base);
5386 }
5387
5388 /* This function checks to see if TYPE matches the size one of the built-in
5389 atomic types, and returns that core atomic type. */
5390
5391 static tree
5392 find_atomic_core_type (const_tree type)
5393 {
5394 tree base_atomic_type;
5395
5396 /* Only handle complete types. */
5397 if (!tree_fits_uhwi_p (TYPE_SIZE (type)))
5398 return NULL_TREE;
5399
5400 switch (tree_to_uhwi (TYPE_SIZE (type)))
5401 {
5402 case 8:
5403 base_atomic_type = atomicQI_type_node;
5404 break;
5405
5406 case 16:
5407 base_atomic_type = atomicHI_type_node;
5408 break;
5409
5410 case 32:
5411 base_atomic_type = atomicSI_type_node;
5412 break;
5413
5414 case 64:
5415 base_atomic_type = atomicDI_type_node;
5416 break;
5417
5418 case 128:
5419 base_atomic_type = atomicTI_type_node;
5420 break;
5421
5422 default:
5423 base_atomic_type = NULL_TREE;
5424 }
5425
5426 return base_atomic_type;
5427 }
5428
5429 /* Returns true iff unqualified CAND and BASE are equivalent. */
5430
5431 bool
5432 check_base_type (const_tree cand, const_tree base)
5433 {
5434 if (TYPE_NAME (cand) != TYPE_NAME (base)
5435 /* Apparently this is needed for Objective-C. */
5436 || TYPE_CONTEXT (cand) != TYPE_CONTEXT (base)
5437 || !attribute_list_equal (TYPE_ATTRIBUTES (cand),
5438 TYPE_ATTRIBUTES (base)))
5439 return false;
5440 /* Check alignment. */
5441 if (TYPE_ALIGN (cand) == TYPE_ALIGN (base)
5442 && TYPE_USER_ALIGN (cand) == TYPE_USER_ALIGN (base))
5443 return true;
5444 /* Atomic types increase minimal alignment. We must to do so as well
5445 or we get duplicated canonical types. See PR88686. */
5446 if ((TYPE_QUALS (cand) & TYPE_QUAL_ATOMIC))
5447 {
5448 /* See if this object can map to a basic atomic type. */
5449 tree atomic_type = find_atomic_core_type (cand);
5450 if (atomic_type && TYPE_ALIGN (atomic_type) == TYPE_ALIGN (cand))
5451 return true;
5452 }
5453 return false;
5454 }
5455
5456 /* Returns true iff CAND is equivalent to BASE with TYPE_QUALS. */
5457
5458 bool
5459 check_qualified_type (const_tree cand, const_tree base, int type_quals)
5460 {
5461 return (TYPE_QUALS (cand) == type_quals
5462 && check_base_type (cand, base)
5463 && check_lang_type (cand, base));
5464 }
5465
5466 /* Returns true iff CAND is equivalent to BASE with ALIGN. */
5467
5468 static bool
5469 check_aligned_type (const_tree cand, const_tree base, unsigned int align)
5470 {
5471 return (TYPE_QUALS (cand) == TYPE_QUALS (base)
5472 && TYPE_NAME (cand) == TYPE_NAME (base)
5473 /* Apparently this is needed for Objective-C. */
5474 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
5475 /* Check alignment. */
5476 && TYPE_ALIGN (cand) == align
5477 /* Check this is a user-aligned type as build_aligned_type
5478 would create. */
5479 && TYPE_USER_ALIGN (cand)
5480 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
5481 TYPE_ATTRIBUTES (base))
5482 && check_lang_type (cand, base));
5483 }
5484
5485 /* Return a version of the TYPE, qualified as indicated by the
5486 TYPE_QUALS, if one exists. If no qualified version exists yet,
5487 return NULL_TREE. */
5488
5489 tree
5490 get_qualified_type (tree type, int type_quals)
5491 {
5492 if (TYPE_QUALS (type) == type_quals)
5493 return type;
5494
5495 tree mv = TYPE_MAIN_VARIANT (type);
5496 if (check_qualified_type (mv, type, type_quals))
5497 return mv;
5498
5499 /* Search the chain of variants to see if there is already one there just
5500 like the one we need to have. If so, use that existing one. We must
5501 preserve the TYPE_NAME, since there is code that depends on this. */
5502 for (tree *tp = &TYPE_NEXT_VARIANT (mv); *tp; tp = &TYPE_NEXT_VARIANT (*tp))
5503 if (check_qualified_type (*tp, type, type_quals))
5504 {
5505 /* Put the found variant at the head of the variant list so
5506 frequently searched variants get found faster. The C++ FE
5507 benefits greatly from this. */
5508 tree t = *tp;
5509 *tp = TYPE_NEXT_VARIANT (t);
5510 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (mv);
5511 TYPE_NEXT_VARIANT (mv) = t;
5512 return t;
5513 }
5514
5515 return NULL_TREE;
5516 }
5517
5518 /* Like get_qualified_type, but creates the type if it does not
5519 exist. This function never returns NULL_TREE. */
5520
5521 tree
5522 build_qualified_type (tree type, int type_quals MEM_STAT_DECL)
5523 {
5524 tree t;
5525
5526 /* See if we already have the appropriate qualified variant. */
5527 t = get_qualified_type (type, type_quals);
5528
5529 /* If not, build it. */
5530 if (!t)
5531 {
5532 t = build_variant_type_copy (type PASS_MEM_STAT);
5533 set_type_quals (t, type_quals);
5534
5535 if (((type_quals & TYPE_QUAL_ATOMIC) == TYPE_QUAL_ATOMIC))
5536 {
5537 /* See if this object can map to a basic atomic type. */
5538 tree atomic_type = find_atomic_core_type (type);
5539 if (atomic_type)
5540 {
5541 /* Ensure the alignment of this type is compatible with
5542 the required alignment of the atomic type. */
5543 if (TYPE_ALIGN (atomic_type) > TYPE_ALIGN (t))
5544 SET_TYPE_ALIGN (t, TYPE_ALIGN (atomic_type));
5545 }
5546 }
5547
5548 if (TYPE_STRUCTURAL_EQUALITY_P (type))
5549 /* Propagate structural equality. */
5550 SET_TYPE_STRUCTURAL_EQUALITY (t);
5551 else if (TYPE_CANONICAL (type) != type)
5552 /* Build the underlying canonical type, since it is different
5553 from TYPE. */
5554 {
5555 tree c = build_qualified_type (TYPE_CANONICAL (type), type_quals);
5556 TYPE_CANONICAL (t) = TYPE_CANONICAL (c);
5557 }
5558 else
5559 /* T is its own canonical type. */
5560 TYPE_CANONICAL (t) = t;
5561
5562 }
5563
5564 return t;
5565 }
5566
5567 /* Create a variant of type T with alignment ALIGN. */
5568
5569 tree
5570 build_aligned_type (tree type, unsigned int align)
5571 {
5572 tree t;
5573
5574 if (TYPE_PACKED (type)
5575 || TYPE_ALIGN (type) == align)
5576 return type;
5577
5578 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
5579 if (check_aligned_type (t, type, align))
5580 return t;
5581
5582 t = build_variant_type_copy (type);
5583 SET_TYPE_ALIGN (t, align);
5584 TYPE_USER_ALIGN (t) = 1;
5585
5586 return t;
5587 }
5588
5589 /* Create a new distinct copy of TYPE. The new type is made its own
5590 MAIN_VARIANT. If TYPE requires structural equality checks, the
5591 resulting type requires structural equality checks; otherwise, its
5592 TYPE_CANONICAL points to itself. */
5593
5594 tree
5595 build_distinct_type_copy (tree type MEM_STAT_DECL)
5596 {
5597 tree t = copy_node (type PASS_MEM_STAT);
5598
5599 TYPE_POINTER_TO (t) = 0;
5600 TYPE_REFERENCE_TO (t) = 0;
5601
5602 /* Set the canonical type either to a new equivalence class, or
5603 propagate the need for structural equality checks. */
5604 if (TYPE_STRUCTURAL_EQUALITY_P (type))
5605 SET_TYPE_STRUCTURAL_EQUALITY (t);
5606 else
5607 TYPE_CANONICAL (t) = t;
5608
5609 /* Make it its own variant. */
5610 TYPE_MAIN_VARIANT (t) = t;
5611 TYPE_NEXT_VARIANT (t) = 0;
5612
5613 /* Note that it is now possible for TYPE_MIN_VALUE to be a value
5614 whose TREE_TYPE is not t. This can also happen in the Ada
5615 frontend when using subtypes. */
5616
5617 return t;
5618 }
5619
5620 /* Create a new variant of TYPE, equivalent but distinct. This is so
5621 the caller can modify it. TYPE_CANONICAL for the return type will
5622 be equivalent to TYPE_CANONICAL of TYPE, indicating that the types
5623 are considered equal by the language itself (or that both types
5624 require structural equality checks). */
5625
5626 tree
5627 build_variant_type_copy (tree type MEM_STAT_DECL)
5628 {
5629 tree t, m = TYPE_MAIN_VARIANT (type);
5630
5631 t = build_distinct_type_copy (type PASS_MEM_STAT);
5632
5633 /* Since we're building a variant, assume that it is a non-semantic
5634 variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */
5635 TYPE_CANONICAL (t) = TYPE_CANONICAL (type);
5636 /* Type variants have no alias set defined. */
5637 TYPE_ALIAS_SET (t) = -1;
5638
5639 /* Add the new type to the chain of variants of TYPE. */
5640 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (m);
5641 TYPE_NEXT_VARIANT (m) = t;
5642 TYPE_MAIN_VARIANT (t) = m;
5643
5644 return t;
5645 }
5646 \f
5647 /* Return true if the from tree in both tree maps are equal. */
5648
5649 int
5650 tree_map_base_eq (const void *va, const void *vb)
5651 {
5652 const struct tree_map_base *const a = (const struct tree_map_base *) va,
5653 *const b = (const struct tree_map_base *) vb;
5654 return (a->from == b->from);
5655 }
5656
5657 /* Hash a from tree in a tree_base_map. */
5658
5659 unsigned int
5660 tree_map_base_hash (const void *item)
5661 {
5662 return htab_hash_pointer (((const struct tree_map_base *)item)->from);
5663 }
5664
5665 /* Return true if this tree map structure is marked for garbage collection
5666 purposes. We simply return true if the from tree is marked, so that this
5667 structure goes away when the from tree goes away. */
5668
5669 int
5670 tree_map_base_marked_p (const void *p)
5671 {
5672 return ggc_marked_p (((const struct tree_map_base *) p)->from);
5673 }
5674
5675 /* Hash a from tree in a tree_map. */
5676
5677 unsigned int
5678 tree_map_hash (const void *item)
5679 {
5680 return (((const struct tree_map *) item)->hash);
5681 }
5682
5683 /* Hash a from tree in a tree_decl_map. */
5684
5685 unsigned int
5686 tree_decl_map_hash (const void *item)
5687 {
5688 return DECL_UID (((const struct tree_decl_map *) item)->base.from);
5689 }
5690
5691 /* Return the initialization priority for DECL. */
5692
5693 priority_type
5694 decl_init_priority_lookup (tree decl)
5695 {
5696 symtab_node *snode = symtab_node::get (decl);
5697
5698 if (!snode)
5699 return DEFAULT_INIT_PRIORITY;
5700 return
5701 snode->get_init_priority ();
5702 }
5703
5704 /* Return the finalization priority for DECL. */
5705
5706 priority_type
5707 decl_fini_priority_lookup (tree decl)
5708 {
5709 cgraph_node *node = cgraph_node::get (decl);
5710
5711 if (!node)
5712 return DEFAULT_INIT_PRIORITY;
5713 return
5714 node->get_fini_priority ();
5715 }
5716
5717 /* Set the initialization priority for DECL to PRIORITY. */
5718
5719 void
5720 decl_init_priority_insert (tree decl, priority_type priority)
5721 {
5722 struct symtab_node *snode;
5723
5724 if (priority == DEFAULT_INIT_PRIORITY)
5725 {
5726 snode = symtab_node::get (decl);
5727 if (!snode)
5728 return;
5729 }
5730 else if (VAR_P (decl))
5731 snode = varpool_node::get_create (decl);
5732 else
5733 snode = cgraph_node::get_create (decl);
5734 snode->set_init_priority (priority);
5735 }
5736
5737 /* Set the finalization priority for DECL to PRIORITY. */
5738
5739 void
5740 decl_fini_priority_insert (tree decl, priority_type priority)
5741 {
5742 struct cgraph_node *node;
5743
5744 if (priority == DEFAULT_INIT_PRIORITY)
5745 {
5746 node = cgraph_node::get (decl);
5747 if (!node)
5748 return;
5749 }
5750 else
5751 node = cgraph_node::get_create (decl);
5752 node->set_fini_priority (priority);
5753 }
5754
5755 /* Print out the statistics for the DECL_DEBUG_EXPR hash table. */
5756
5757 static void
5758 print_debug_expr_statistics (void)
5759 {
5760 fprintf (stderr, "DECL_DEBUG_EXPR hash: size %ld, %ld elements, %f collisions\n",
5761 (long) debug_expr_for_decl->size (),
5762 (long) debug_expr_for_decl->elements (),
5763 debug_expr_for_decl->collisions ());
5764 }
5765
5766 /* Print out the statistics for the DECL_VALUE_EXPR hash table. */
5767
5768 static void
5769 print_value_expr_statistics (void)
5770 {
5771 fprintf (stderr, "DECL_VALUE_EXPR hash: size %ld, %ld elements, %f collisions\n",
5772 (long) value_expr_for_decl->size (),
5773 (long) value_expr_for_decl->elements (),
5774 value_expr_for_decl->collisions ());
5775 }
5776
5777 /* Lookup a debug expression for FROM, and return it if we find one. */
5778
5779 tree
5780 decl_debug_expr_lookup (tree from)
5781 {
5782 struct tree_decl_map *h, in;
5783 in.base.from = from;
5784
5785 h = debug_expr_for_decl->find_with_hash (&in, DECL_UID (from));
5786 if (h)
5787 return h->to;
5788 return NULL_TREE;
5789 }
5790
5791 /* Insert a mapping FROM->TO in the debug expression hashtable. */
5792
5793 void
5794 decl_debug_expr_insert (tree from, tree to)
5795 {
5796 struct tree_decl_map *h;
5797
5798 h = ggc_alloc<tree_decl_map> ();
5799 h->base.from = from;
5800 h->to = to;
5801 *debug_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
5802 }
5803
5804 /* Lookup a value expression for FROM, and return it if we find one. */
5805
5806 tree
5807 decl_value_expr_lookup (tree from)
5808 {
5809 struct tree_decl_map *h, in;
5810 in.base.from = from;
5811
5812 h = value_expr_for_decl->find_with_hash (&in, DECL_UID (from));
5813 if (h)
5814 return h->to;
5815 return NULL_TREE;
5816 }
5817
5818 /* Insert a mapping FROM->TO in the value expression hashtable. */
5819
5820 void
5821 decl_value_expr_insert (tree from, tree to)
5822 {
5823 struct tree_decl_map *h;
5824
5825 h = ggc_alloc<tree_decl_map> ();
5826 h->base.from = from;
5827 h->to = to;
5828 *value_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
5829 }
5830
5831 /* Lookup a vector of debug arguments for FROM, and return it if we
5832 find one. */
5833
5834 vec<tree, va_gc> **
5835 decl_debug_args_lookup (tree from)
5836 {
5837 struct tree_vec_map *h, in;
5838
5839 if (!DECL_HAS_DEBUG_ARGS_P (from))
5840 return NULL;
5841 gcc_checking_assert (debug_args_for_decl != NULL);
5842 in.base.from = from;
5843 h = debug_args_for_decl->find_with_hash (&in, DECL_UID (from));
5844 if (h)
5845 return &h->to;
5846 return NULL;
5847 }
5848
5849 /* Insert a mapping FROM->empty vector of debug arguments in the value
5850 expression hashtable. */
5851
5852 vec<tree, va_gc> **
5853 decl_debug_args_insert (tree from)
5854 {
5855 struct tree_vec_map *h;
5856 tree_vec_map **loc;
5857
5858 if (DECL_HAS_DEBUG_ARGS_P (from))
5859 return decl_debug_args_lookup (from);
5860 if (debug_args_for_decl == NULL)
5861 debug_args_for_decl = hash_table<tree_vec_map_cache_hasher>::create_ggc (64);
5862 h = ggc_alloc<tree_vec_map> ();
5863 h->base.from = from;
5864 h->to = NULL;
5865 loc = debug_args_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT);
5866 *loc = h;
5867 DECL_HAS_DEBUG_ARGS_P (from) = 1;
5868 return &h->to;
5869 }
5870
5871 /* Hashing of types so that we don't make duplicates.
5872 The entry point is `type_hash_canon'. */
5873
5874 /* Generate the default hash code for TYPE. This is designed for
5875 speed, rather than maximum entropy. */
5876
5877 hashval_t
5878 type_hash_canon_hash (tree type)
5879 {
5880 inchash::hash hstate;
5881
5882 hstate.add_int (TREE_CODE (type));
5883
5884 if (TREE_TYPE (type))
5885 hstate.add_object (TYPE_HASH (TREE_TYPE (type)));
5886
5887 for (tree t = TYPE_ATTRIBUTES (type); t; t = TREE_CHAIN (t))
5888 /* Just the identifier is adequate to distinguish. */
5889 hstate.add_object (IDENTIFIER_HASH_VALUE (get_attribute_name (t)));
5890
5891 switch (TREE_CODE (type))
5892 {
5893 case METHOD_TYPE:
5894 hstate.add_object (TYPE_HASH (TYPE_METHOD_BASETYPE (type)));
5895 /* FALLTHROUGH. */
5896 case FUNCTION_TYPE:
5897 for (tree t = TYPE_ARG_TYPES (type); t; t = TREE_CHAIN (t))
5898 if (TREE_VALUE (t) != error_mark_node)
5899 hstate.add_object (TYPE_HASH (TREE_VALUE (t)));
5900 break;
5901
5902 case OFFSET_TYPE:
5903 hstate.add_object (TYPE_HASH (TYPE_OFFSET_BASETYPE (type)));
5904 break;
5905
5906 case ARRAY_TYPE:
5907 {
5908 if (TYPE_DOMAIN (type))
5909 hstate.add_object (TYPE_HASH (TYPE_DOMAIN (type)));
5910 if (!AGGREGATE_TYPE_P (TREE_TYPE (type)))
5911 {
5912 unsigned typeless = TYPE_TYPELESS_STORAGE (type);
5913 hstate.add_object (typeless);
5914 }
5915 }
5916 break;
5917
5918 case INTEGER_TYPE:
5919 {
5920 tree t = TYPE_MAX_VALUE (type);
5921 if (!t)
5922 t = TYPE_MIN_VALUE (type);
5923 for (int i = 0; i < TREE_INT_CST_NUNITS (t); i++)
5924 hstate.add_object (TREE_INT_CST_ELT (t, i));
5925 break;
5926 }
5927
5928 case REAL_TYPE:
5929 case FIXED_POINT_TYPE:
5930 {
5931 unsigned prec = TYPE_PRECISION (type);
5932 hstate.add_object (prec);
5933 break;
5934 }
5935
5936 case VECTOR_TYPE:
5937 hstate.add_poly_int (TYPE_VECTOR_SUBPARTS (type));
5938 break;
5939
5940 default:
5941 break;
5942 }
5943
5944 return hstate.end ();
5945 }
5946
5947 /* These are the Hashtable callback functions. */
5948
5949 /* Returns true iff the types are equivalent. */
5950
5951 bool
5952 type_cache_hasher::equal (type_hash *a, type_hash *b)
5953 {
5954 /* First test the things that are the same for all types. */
5955 if (a->hash != b->hash
5956 || TREE_CODE (a->type) != TREE_CODE (b->type)
5957 || TREE_TYPE (a->type) != TREE_TYPE (b->type)
5958 || !attribute_list_equal (TYPE_ATTRIBUTES (a->type),
5959 TYPE_ATTRIBUTES (b->type))
5960 || (TREE_CODE (a->type) != COMPLEX_TYPE
5961 && TYPE_NAME (a->type) != TYPE_NAME (b->type)))
5962 return 0;
5963
5964 /* Be careful about comparing arrays before and after the element type
5965 has been completed; don't compare TYPE_ALIGN unless both types are
5966 complete. */
5967 if (COMPLETE_TYPE_P (a->type) && COMPLETE_TYPE_P (b->type)
5968 && (TYPE_ALIGN (a->type) != TYPE_ALIGN (b->type)
5969 || TYPE_MODE (a->type) != TYPE_MODE (b->type)))
5970 return 0;
5971
5972 switch (TREE_CODE (a->type))
5973 {
5974 case VOID_TYPE:
5975 case OPAQUE_TYPE:
5976 case COMPLEX_TYPE:
5977 case POINTER_TYPE:
5978 case REFERENCE_TYPE:
5979 case NULLPTR_TYPE:
5980 return 1;
5981
5982 case VECTOR_TYPE:
5983 return known_eq (TYPE_VECTOR_SUBPARTS (a->type),
5984 TYPE_VECTOR_SUBPARTS (b->type));
5985
5986 case ENUMERAL_TYPE:
5987 if (TYPE_VALUES (a->type) != TYPE_VALUES (b->type)
5988 && !(TYPE_VALUES (a->type)
5989 && TREE_CODE (TYPE_VALUES (a->type)) == TREE_LIST
5990 && TYPE_VALUES (b->type)
5991 && TREE_CODE (TYPE_VALUES (b->type)) == TREE_LIST
5992 && type_list_equal (TYPE_VALUES (a->type),
5993 TYPE_VALUES (b->type))))
5994 return 0;
5995
5996 /* fall through */
5997
5998 case INTEGER_TYPE:
5999 case REAL_TYPE:
6000 case BOOLEAN_TYPE:
6001 if (TYPE_PRECISION (a->type) != TYPE_PRECISION (b->type))
6002 return false;
6003 return ((TYPE_MAX_VALUE (a->type) == TYPE_MAX_VALUE (b->type)
6004 || tree_int_cst_equal (TYPE_MAX_VALUE (a->type),
6005 TYPE_MAX_VALUE (b->type)))
6006 && (TYPE_MIN_VALUE (a->type) == TYPE_MIN_VALUE (b->type)
6007 || tree_int_cst_equal (TYPE_MIN_VALUE (a->type),
6008 TYPE_MIN_VALUE (b->type))));
6009
6010 case FIXED_POINT_TYPE:
6011 return TYPE_SATURATING (a->type) == TYPE_SATURATING (b->type);
6012
6013 case OFFSET_TYPE:
6014 return TYPE_OFFSET_BASETYPE (a->type) == TYPE_OFFSET_BASETYPE (b->type);
6015
6016 case METHOD_TYPE:
6017 if (TYPE_METHOD_BASETYPE (a->type) == TYPE_METHOD_BASETYPE (b->type)
6018 && (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6019 || (TYPE_ARG_TYPES (a->type)
6020 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6021 && TYPE_ARG_TYPES (b->type)
6022 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6023 && type_list_equal (TYPE_ARG_TYPES (a->type),
6024 TYPE_ARG_TYPES (b->type)))))
6025 break;
6026 return 0;
6027 case ARRAY_TYPE:
6028 /* Don't compare TYPE_TYPELESS_STORAGE flag on aggregates,
6029 where the flag should be inherited from the element type
6030 and can change after ARRAY_TYPEs are created; on non-aggregates
6031 compare it and hash it, scalars will never have that flag set
6032 and we need to differentiate between arrays created by different
6033 front-ends or middle-end created arrays. */
6034 return (TYPE_DOMAIN (a->type) == TYPE_DOMAIN (b->type)
6035 && (AGGREGATE_TYPE_P (TREE_TYPE (a->type))
6036 || (TYPE_TYPELESS_STORAGE (a->type)
6037 == TYPE_TYPELESS_STORAGE (b->type))));
6038
6039 case RECORD_TYPE:
6040 case UNION_TYPE:
6041 case QUAL_UNION_TYPE:
6042 return (TYPE_FIELDS (a->type) == TYPE_FIELDS (b->type)
6043 || (TYPE_FIELDS (a->type)
6044 && TREE_CODE (TYPE_FIELDS (a->type)) == TREE_LIST
6045 && TYPE_FIELDS (b->type)
6046 && TREE_CODE (TYPE_FIELDS (b->type)) == TREE_LIST
6047 && type_list_equal (TYPE_FIELDS (a->type),
6048 TYPE_FIELDS (b->type))));
6049
6050 case FUNCTION_TYPE:
6051 if (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6052 || (TYPE_ARG_TYPES (a->type)
6053 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6054 && TYPE_ARG_TYPES (b->type)
6055 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6056 && type_list_equal (TYPE_ARG_TYPES (a->type),
6057 TYPE_ARG_TYPES (b->type))))
6058 break;
6059 return 0;
6060
6061 default:
6062 return 0;
6063 }
6064
6065 if (lang_hooks.types.type_hash_eq != NULL)
6066 return lang_hooks.types.type_hash_eq (a->type, b->type);
6067
6068 return 1;
6069 }
6070
6071 /* Given TYPE, and HASHCODE its hash code, return the canonical
6072 object for an identical type if one already exists.
6073 Otherwise, return TYPE, and record it as the canonical object.
6074
6075 To use this function, first create a type of the sort you want.
6076 Then compute its hash code from the fields of the type that
6077 make it different from other similar types.
6078 Then call this function and use the value. */
6079
6080 tree
6081 type_hash_canon (unsigned int hashcode, tree type)
6082 {
6083 type_hash in;
6084 type_hash **loc;
6085
6086 /* The hash table only contains main variants, so ensure that's what we're
6087 being passed. */
6088 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
6089
6090 /* The TYPE_ALIGN field of a type is set by layout_type(), so we
6091 must call that routine before comparing TYPE_ALIGNs. */
6092 layout_type (type);
6093
6094 in.hash = hashcode;
6095 in.type = type;
6096
6097 loc = type_hash_table->find_slot_with_hash (&in, hashcode, INSERT);
6098 if (*loc)
6099 {
6100 tree t1 = ((type_hash *) *loc)->type;
6101 gcc_assert (TYPE_MAIN_VARIANT (t1) == t1
6102 && t1 != type);
6103 if (TYPE_UID (type) + 1 == next_type_uid)
6104 --next_type_uid;
6105 /* Free also min/max values and the cache for integer
6106 types. This can't be done in free_node, as LTO frees
6107 those on its own. */
6108 if (TREE_CODE (type) == INTEGER_TYPE)
6109 {
6110 if (TYPE_MIN_VALUE (type)
6111 && TREE_TYPE (TYPE_MIN_VALUE (type)) == type)
6112 {
6113 /* Zero is always in TYPE_CACHED_VALUES. */
6114 if (! TYPE_UNSIGNED (type))
6115 int_cst_hash_table->remove_elt (TYPE_MIN_VALUE (type));
6116 ggc_free (TYPE_MIN_VALUE (type));
6117 }
6118 if (TYPE_MAX_VALUE (type)
6119 && TREE_TYPE (TYPE_MAX_VALUE (type)) == type)
6120 {
6121 int_cst_hash_table->remove_elt (TYPE_MAX_VALUE (type));
6122 ggc_free (TYPE_MAX_VALUE (type));
6123 }
6124 if (TYPE_CACHED_VALUES_P (type))
6125 ggc_free (TYPE_CACHED_VALUES (type));
6126 }
6127 free_node (type);
6128 return t1;
6129 }
6130 else
6131 {
6132 struct type_hash *h;
6133
6134 h = ggc_alloc<type_hash> ();
6135 h->hash = hashcode;
6136 h->type = type;
6137 *loc = h;
6138
6139 return type;
6140 }
6141 }
6142
6143 static void
6144 print_type_hash_statistics (void)
6145 {
6146 fprintf (stderr, "Type hash: size %ld, %ld elements, %f collisions\n",
6147 (long) type_hash_table->size (),
6148 (long) type_hash_table->elements (),
6149 type_hash_table->collisions ());
6150 }
6151
6152 /* Given two lists of types
6153 (chains of TREE_LIST nodes with types in the TREE_VALUE slots)
6154 return 1 if the lists contain the same types in the same order.
6155 Also, the TREE_PURPOSEs must match. */
6156
6157 bool
6158 type_list_equal (const_tree l1, const_tree l2)
6159 {
6160 const_tree t1, t2;
6161
6162 for (t1 = l1, t2 = l2; t1 && t2; t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
6163 if (TREE_VALUE (t1) != TREE_VALUE (t2)
6164 || (TREE_PURPOSE (t1) != TREE_PURPOSE (t2)
6165 && ! (1 == simple_cst_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2))
6166 && (TREE_TYPE (TREE_PURPOSE (t1))
6167 == TREE_TYPE (TREE_PURPOSE (t2))))))
6168 return false;
6169
6170 return t1 == t2;
6171 }
6172
6173 /* Returns the number of arguments to the FUNCTION_TYPE or METHOD_TYPE
6174 given by TYPE. If the argument list accepts variable arguments,
6175 then this function counts only the ordinary arguments. */
6176
6177 int
6178 type_num_arguments (const_tree fntype)
6179 {
6180 int i = 0;
6181
6182 for (tree t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
6183 /* If the function does not take a variable number of arguments,
6184 the last element in the list will have type `void'. */
6185 if (VOID_TYPE_P (TREE_VALUE (t)))
6186 break;
6187 else
6188 ++i;
6189
6190 return i;
6191 }
6192
6193 /* Return the type of the function TYPE's argument ARGNO if known.
6194 For vararg function's where ARGNO refers to one of the variadic
6195 arguments return null. Otherwise, return a void_type_node for
6196 out-of-bounds ARGNO. */
6197
6198 tree
6199 type_argument_type (const_tree fntype, unsigned argno)
6200 {
6201 /* Treat zero the same as an out-of-bounds argument number. */
6202 if (!argno)
6203 return void_type_node;
6204
6205 function_args_iterator iter;
6206
6207 tree argtype;
6208 unsigned i = 1;
6209 FOREACH_FUNCTION_ARGS (fntype, argtype, iter)
6210 {
6211 /* A vararg function's argument list ends in a null. Otherwise,
6212 an ordinary function's argument list ends with void. Return
6213 null if ARGNO refers to a vararg argument, void_type_node if
6214 it's out of bounds, and the formal argument type otherwise. */
6215 if (!argtype)
6216 break;
6217
6218 if (i == argno || VOID_TYPE_P (argtype))
6219 return argtype;
6220
6221 ++i;
6222 }
6223
6224 return NULL_TREE;
6225 }
6226
6227 /* Nonzero if integer constants T1 and T2
6228 represent the same constant value. */
6229
6230 int
6231 tree_int_cst_equal (const_tree t1, const_tree t2)
6232 {
6233 if (t1 == t2)
6234 return 1;
6235
6236 if (t1 == 0 || t2 == 0)
6237 return 0;
6238
6239 STRIP_ANY_LOCATION_WRAPPER (t1);
6240 STRIP_ANY_LOCATION_WRAPPER (t2);
6241
6242 if (TREE_CODE (t1) == INTEGER_CST
6243 && TREE_CODE (t2) == INTEGER_CST
6244 && wi::to_widest (t1) == wi::to_widest (t2))
6245 return 1;
6246
6247 return 0;
6248 }
6249
6250 /* Return true if T is an INTEGER_CST whose numerical value (extended
6251 according to TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. */
6252
6253 bool
6254 tree_fits_shwi_p (const_tree t)
6255 {
6256 return (t != NULL_TREE
6257 && TREE_CODE (t) == INTEGER_CST
6258 && wi::fits_shwi_p (wi::to_widest (t)));
6259 }
6260
6261 /* Return true if T is an INTEGER_CST or POLY_INT_CST whose numerical
6262 value (extended according to TYPE_UNSIGNED) fits in a poly_int64. */
6263
6264 bool
6265 tree_fits_poly_int64_p (const_tree t)
6266 {
6267 if (t == NULL_TREE)
6268 return false;
6269 if (POLY_INT_CST_P (t))
6270 {
6271 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; i++)
6272 if (!wi::fits_shwi_p (wi::to_wide (POLY_INT_CST_COEFF (t, i))))
6273 return false;
6274 return true;
6275 }
6276 return (TREE_CODE (t) == INTEGER_CST
6277 && wi::fits_shwi_p (wi::to_widest (t)));
6278 }
6279
6280 /* Return true if T is an INTEGER_CST whose numerical value (extended
6281 according to TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. */
6282
6283 bool
6284 tree_fits_uhwi_p (const_tree t)
6285 {
6286 return (t != NULL_TREE
6287 && TREE_CODE (t) == INTEGER_CST
6288 && wi::fits_uhwi_p (wi::to_widest (t)));
6289 }
6290
6291 /* Return true if T is an INTEGER_CST or POLY_INT_CST whose numerical
6292 value (extended according to TYPE_UNSIGNED) fits in a poly_uint64. */
6293
6294 bool
6295 tree_fits_poly_uint64_p (const_tree t)
6296 {
6297 if (t == NULL_TREE)
6298 return false;
6299 if (POLY_INT_CST_P (t))
6300 {
6301 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; i++)
6302 if (!wi::fits_uhwi_p (wi::to_widest (POLY_INT_CST_COEFF (t, i))))
6303 return false;
6304 return true;
6305 }
6306 return (TREE_CODE (t) == INTEGER_CST
6307 && wi::fits_uhwi_p (wi::to_widest (t)));
6308 }
6309
6310 /* T is an INTEGER_CST whose numerical value (extended according to
6311 TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. Return that
6312 HOST_WIDE_INT. */
6313
6314 HOST_WIDE_INT
6315 tree_to_shwi (const_tree t)
6316 {
6317 gcc_assert (tree_fits_shwi_p (t));
6318 return TREE_INT_CST_LOW (t);
6319 }
6320
6321 /* T is an INTEGER_CST whose numerical value (extended according to
6322 TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. Return that
6323 HOST_WIDE_INT. */
6324
6325 unsigned HOST_WIDE_INT
6326 tree_to_uhwi (const_tree t)
6327 {
6328 gcc_assert (tree_fits_uhwi_p (t));
6329 return TREE_INT_CST_LOW (t);
6330 }
6331
6332 /* Return the most significant (sign) bit of T. */
6333
6334 int
6335 tree_int_cst_sign_bit (const_tree t)
6336 {
6337 unsigned bitno = TYPE_PRECISION (TREE_TYPE (t)) - 1;
6338
6339 return wi::extract_uhwi (wi::to_wide (t), bitno, 1);
6340 }
6341
6342 /* Return an indication of the sign of the integer constant T.
6343 The return value is -1 if T < 0, 0 if T == 0, and 1 if T > 0.
6344 Note that -1 will never be returned if T's type is unsigned. */
6345
6346 int
6347 tree_int_cst_sgn (const_tree t)
6348 {
6349 if (wi::to_wide (t) == 0)
6350 return 0;
6351 else if (TYPE_UNSIGNED (TREE_TYPE (t)))
6352 return 1;
6353 else if (wi::neg_p (wi::to_wide (t)))
6354 return -1;
6355 else
6356 return 1;
6357 }
6358
6359 /* Return the minimum number of bits needed to represent VALUE in a
6360 signed or unsigned type, UNSIGNEDP says which. */
6361
6362 unsigned int
6363 tree_int_cst_min_precision (tree value, signop sgn)
6364 {
6365 /* If the value is negative, compute its negative minus 1. The latter
6366 adjustment is because the absolute value of the largest negative value
6367 is one larger than the largest positive value. This is equivalent to
6368 a bit-wise negation, so use that operation instead. */
6369
6370 if (tree_int_cst_sgn (value) < 0)
6371 value = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (value), value);
6372
6373 /* Return the number of bits needed, taking into account the fact
6374 that we need one more bit for a signed than unsigned type.
6375 If value is 0 or -1, the minimum precision is 1 no matter
6376 whether unsignedp is true or false. */
6377
6378 if (integer_zerop (value))
6379 return 1;
6380 else
6381 return tree_floor_log2 (value) + 1 + (sgn == SIGNED ? 1 : 0) ;
6382 }
6383
6384 /* Return truthvalue of whether T1 is the same tree structure as T2.
6385 Return 1 if they are the same.
6386 Return 0 if they are understandably different.
6387 Return -1 if either contains tree structure not understood by
6388 this function. */
6389
6390 int
6391 simple_cst_equal (const_tree t1, const_tree t2)
6392 {
6393 enum tree_code code1, code2;
6394 int cmp;
6395 int i;
6396
6397 if (t1 == t2)
6398 return 1;
6399 if (t1 == 0 || t2 == 0)
6400 return 0;
6401
6402 /* For location wrappers to be the same, they must be at the same
6403 source location (and wrap the same thing). */
6404 if (location_wrapper_p (t1) && location_wrapper_p (t2))
6405 {
6406 if (EXPR_LOCATION (t1) != EXPR_LOCATION (t2))
6407 return 0;
6408 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6409 }
6410
6411 code1 = TREE_CODE (t1);
6412 code2 = TREE_CODE (t2);
6413
6414 if (CONVERT_EXPR_CODE_P (code1) || code1 == NON_LVALUE_EXPR)
6415 {
6416 if (CONVERT_EXPR_CODE_P (code2)
6417 || code2 == NON_LVALUE_EXPR)
6418 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6419 else
6420 return simple_cst_equal (TREE_OPERAND (t1, 0), t2);
6421 }
6422
6423 else if (CONVERT_EXPR_CODE_P (code2)
6424 || code2 == NON_LVALUE_EXPR)
6425 return simple_cst_equal (t1, TREE_OPERAND (t2, 0));
6426
6427 if (code1 != code2)
6428 return 0;
6429
6430 switch (code1)
6431 {
6432 case INTEGER_CST:
6433 return wi::to_widest (t1) == wi::to_widest (t2);
6434
6435 case REAL_CST:
6436 return real_identical (&TREE_REAL_CST (t1), &TREE_REAL_CST (t2));
6437
6438 case FIXED_CST:
6439 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2));
6440
6441 case STRING_CST:
6442 return (TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
6443 && ! memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
6444 TREE_STRING_LENGTH (t1)));
6445
6446 case CONSTRUCTOR:
6447 {
6448 unsigned HOST_WIDE_INT idx;
6449 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (t1);
6450 vec<constructor_elt, va_gc> *v2 = CONSTRUCTOR_ELTS (t2);
6451
6452 if (vec_safe_length (v1) != vec_safe_length (v2))
6453 return false;
6454
6455 for (idx = 0; idx < vec_safe_length (v1); ++idx)
6456 /* ??? Should we handle also fields here? */
6457 if (!simple_cst_equal ((*v1)[idx].value, (*v2)[idx].value))
6458 return false;
6459 return true;
6460 }
6461
6462 case SAVE_EXPR:
6463 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6464
6465 case CALL_EXPR:
6466 cmp = simple_cst_equal (CALL_EXPR_FN (t1), CALL_EXPR_FN (t2));
6467 if (cmp <= 0)
6468 return cmp;
6469 if (call_expr_nargs (t1) != call_expr_nargs (t2))
6470 return 0;
6471 {
6472 const_tree arg1, arg2;
6473 const_call_expr_arg_iterator iter1, iter2;
6474 for (arg1 = first_const_call_expr_arg (t1, &iter1),
6475 arg2 = first_const_call_expr_arg (t2, &iter2);
6476 arg1 && arg2;
6477 arg1 = next_const_call_expr_arg (&iter1),
6478 arg2 = next_const_call_expr_arg (&iter2))
6479 {
6480 cmp = simple_cst_equal (arg1, arg2);
6481 if (cmp <= 0)
6482 return cmp;
6483 }
6484 return arg1 == arg2;
6485 }
6486
6487 case TARGET_EXPR:
6488 /* Special case: if either target is an unallocated VAR_DECL,
6489 it means that it's going to be unified with whatever the
6490 TARGET_EXPR is really supposed to initialize, so treat it
6491 as being equivalent to anything. */
6492 if ((TREE_CODE (TREE_OPERAND (t1, 0)) == VAR_DECL
6493 && DECL_NAME (TREE_OPERAND (t1, 0)) == NULL_TREE
6494 && !DECL_RTL_SET_P (TREE_OPERAND (t1, 0)))
6495 || (TREE_CODE (TREE_OPERAND (t2, 0)) == VAR_DECL
6496 && DECL_NAME (TREE_OPERAND (t2, 0)) == NULL_TREE
6497 && !DECL_RTL_SET_P (TREE_OPERAND (t2, 0))))
6498 cmp = 1;
6499 else
6500 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6501
6502 if (cmp <= 0)
6503 return cmp;
6504
6505 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
6506
6507 case WITH_CLEANUP_EXPR:
6508 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6509 if (cmp <= 0)
6510 return cmp;
6511
6512 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t1, 1));
6513
6514 case COMPONENT_REF:
6515 if (TREE_OPERAND (t1, 1) == TREE_OPERAND (t2, 1))
6516 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6517
6518 return 0;
6519
6520 case VAR_DECL:
6521 case PARM_DECL:
6522 case CONST_DECL:
6523 case FUNCTION_DECL:
6524 return 0;
6525
6526 default:
6527 if (POLY_INT_CST_P (t1))
6528 /* A false return means maybe_ne rather than known_ne. */
6529 return known_eq (poly_widest_int::from (poly_int_cst_value (t1),
6530 TYPE_SIGN (TREE_TYPE (t1))),
6531 poly_widest_int::from (poly_int_cst_value (t2),
6532 TYPE_SIGN (TREE_TYPE (t2))));
6533 break;
6534 }
6535
6536 /* This general rule works for most tree codes. All exceptions should be
6537 handled above. If this is a language-specific tree code, we can't
6538 trust what might be in the operand, so say we don't know
6539 the situation. */
6540 if ((int) code1 >= (int) LAST_AND_UNUSED_TREE_CODE)
6541 return -1;
6542
6543 switch (TREE_CODE_CLASS (code1))
6544 {
6545 case tcc_unary:
6546 case tcc_binary:
6547 case tcc_comparison:
6548 case tcc_expression:
6549 case tcc_reference:
6550 case tcc_statement:
6551 cmp = 1;
6552 for (i = 0; i < TREE_CODE_LENGTH (code1); i++)
6553 {
6554 cmp = simple_cst_equal (TREE_OPERAND (t1, i), TREE_OPERAND (t2, i));
6555 if (cmp <= 0)
6556 return cmp;
6557 }
6558
6559 return cmp;
6560
6561 default:
6562 return -1;
6563 }
6564 }
6565
6566 /* Compare the value of T, an INTEGER_CST, with U, an unsigned integer value.
6567 Return -1, 0, or 1 if the value of T is less than, equal to, or greater
6568 than U, respectively. */
6569
6570 int
6571 compare_tree_int (const_tree t, unsigned HOST_WIDE_INT u)
6572 {
6573 if (tree_int_cst_sgn (t) < 0)
6574 return -1;
6575 else if (!tree_fits_uhwi_p (t))
6576 return 1;
6577 else if (TREE_INT_CST_LOW (t) == u)
6578 return 0;
6579 else if (TREE_INT_CST_LOW (t) < u)
6580 return -1;
6581 else
6582 return 1;
6583 }
6584
6585 /* Return true if SIZE represents a constant size that is in bounds of
6586 what the middle-end and the backend accepts (covering not more than
6587 half of the address-space).
6588 When PERR is non-null, set *PERR on failure to the description of
6589 why SIZE is not valid. */
6590
6591 bool
6592 valid_constant_size_p (const_tree size, cst_size_error *perr /* = NULL */)
6593 {
6594 if (POLY_INT_CST_P (size))
6595 {
6596 if (TREE_OVERFLOW (size))
6597 return false;
6598 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
6599 if (!valid_constant_size_p (POLY_INT_CST_COEFF (size, i)))
6600 return false;
6601 return true;
6602 }
6603
6604 cst_size_error error;
6605 if (!perr)
6606 perr = &error;
6607
6608 if (TREE_CODE (size) != INTEGER_CST)
6609 {
6610 *perr = cst_size_not_constant;
6611 return false;
6612 }
6613
6614 if (TREE_OVERFLOW_P (size))
6615 {
6616 *perr = cst_size_overflow;
6617 return false;
6618 }
6619
6620 if (tree_int_cst_sgn (size) < 0)
6621 {
6622 *perr = cst_size_negative;
6623 return false;
6624 }
6625 if (!tree_fits_uhwi_p (size)
6626 || (wi::to_widest (TYPE_MAX_VALUE (sizetype))
6627 < wi::to_widest (size) * 2))
6628 {
6629 *perr = cst_size_too_big;
6630 return false;
6631 }
6632
6633 return true;
6634 }
6635
6636 /* Return the precision of the type, or for a complex or vector type the
6637 precision of the type of its elements. */
6638
6639 unsigned int
6640 element_precision (const_tree type)
6641 {
6642 if (!TYPE_P (type))
6643 type = TREE_TYPE (type);
6644 enum tree_code code = TREE_CODE (type);
6645 if (code == COMPLEX_TYPE || code == VECTOR_TYPE)
6646 type = TREE_TYPE (type);
6647
6648 return TYPE_PRECISION (type);
6649 }
6650
6651 /* Return true if CODE represents an associative tree code. Otherwise
6652 return false. */
6653 bool
6654 associative_tree_code (enum tree_code code)
6655 {
6656 switch (code)
6657 {
6658 case BIT_IOR_EXPR:
6659 case BIT_AND_EXPR:
6660 case BIT_XOR_EXPR:
6661 case PLUS_EXPR:
6662 case MULT_EXPR:
6663 case MIN_EXPR:
6664 case MAX_EXPR:
6665 return true;
6666
6667 default:
6668 break;
6669 }
6670 return false;
6671 }
6672
6673 /* Return true if CODE represents a commutative tree code. Otherwise
6674 return false. */
6675 bool
6676 commutative_tree_code (enum tree_code code)
6677 {
6678 switch (code)
6679 {
6680 case PLUS_EXPR:
6681 case MULT_EXPR:
6682 case MULT_HIGHPART_EXPR:
6683 case MIN_EXPR:
6684 case MAX_EXPR:
6685 case BIT_IOR_EXPR:
6686 case BIT_XOR_EXPR:
6687 case BIT_AND_EXPR:
6688 case NE_EXPR:
6689 case EQ_EXPR:
6690 case UNORDERED_EXPR:
6691 case ORDERED_EXPR:
6692 case UNEQ_EXPR:
6693 case LTGT_EXPR:
6694 case TRUTH_AND_EXPR:
6695 case TRUTH_XOR_EXPR:
6696 case TRUTH_OR_EXPR:
6697 case WIDEN_MULT_EXPR:
6698 case VEC_WIDEN_MULT_HI_EXPR:
6699 case VEC_WIDEN_MULT_LO_EXPR:
6700 case VEC_WIDEN_MULT_EVEN_EXPR:
6701 case VEC_WIDEN_MULT_ODD_EXPR:
6702 return true;
6703
6704 default:
6705 break;
6706 }
6707 return false;
6708 }
6709
6710 /* Return true if CODE represents a ternary tree code for which the
6711 first two operands are commutative. Otherwise return false. */
6712 bool
6713 commutative_ternary_tree_code (enum tree_code code)
6714 {
6715 switch (code)
6716 {
6717 case WIDEN_MULT_PLUS_EXPR:
6718 case WIDEN_MULT_MINUS_EXPR:
6719 case DOT_PROD_EXPR:
6720 return true;
6721
6722 default:
6723 break;
6724 }
6725 return false;
6726 }
6727
6728 /* Returns true if CODE can overflow. */
6729
6730 bool
6731 operation_can_overflow (enum tree_code code)
6732 {
6733 switch (code)
6734 {
6735 case PLUS_EXPR:
6736 case MINUS_EXPR:
6737 case MULT_EXPR:
6738 case LSHIFT_EXPR:
6739 /* Can overflow in various ways. */
6740 return true;
6741 case TRUNC_DIV_EXPR:
6742 case EXACT_DIV_EXPR:
6743 case FLOOR_DIV_EXPR:
6744 case CEIL_DIV_EXPR:
6745 /* For INT_MIN / -1. */
6746 return true;
6747 case NEGATE_EXPR:
6748 case ABS_EXPR:
6749 /* For -INT_MIN. */
6750 return true;
6751 default:
6752 /* These operators cannot overflow. */
6753 return false;
6754 }
6755 }
6756
6757 /* Returns true if CODE operating on operands of type TYPE doesn't overflow, or
6758 ftrapv doesn't generate trapping insns for CODE. */
6759
6760 bool
6761 operation_no_trapping_overflow (tree type, enum tree_code code)
6762 {
6763 gcc_checking_assert (ANY_INTEGRAL_TYPE_P (type));
6764
6765 /* We don't generate instructions that trap on overflow for complex or vector
6766 types. */
6767 if (!INTEGRAL_TYPE_P (type))
6768 return true;
6769
6770 if (!TYPE_OVERFLOW_TRAPS (type))
6771 return true;
6772
6773 switch (code)
6774 {
6775 case PLUS_EXPR:
6776 case MINUS_EXPR:
6777 case MULT_EXPR:
6778 case NEGATE_EXPR:
6779 case ABS_EXPR:
6780 /* These operators can overflow, and -ftrapv generates trapping code for
6781 these. */
6782 return false;
6783 case TRUNC_DIV_EXPR:
6784 case EXACT_DIV_EXPR:
6785 case FLOOR_DIV_EXPR:
6786 case CEIL_DIV_EXPR:
6787 case LSHIFT_EXPR:
6788 /* These operators can overflow, but -ftrapv does not generate trapping
6789 code for these. */
6790 return true;
6791 default:
6792 /* These operators cannot overflow. */
6793 return true;
6794 }
6795 }
6796
6797 /* Constructors for pointer, array and function types.
6798 (RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are
6799 constructed by language-dependent code, not here.) */
6800
6801 /* Construct, lay out and return the type of pointers to TO_TYPE with
6802 mode MODE. If MODE is VOIDmode, a pointer mode for the address
6803 space of TO_TYPE will be picked. If CAN_ALIAS_ALL is TRUE,
6804 indicate this type can reference all of memory. If such a type has
6805 already been constructed, reuse it. */
6806
6807 tree
6808 build_pointer_type_for_mode (tree to_type, machine_mode mode,
6809 bool can_alias_all)
6810 {
6811 tree t;
6812 bool could_alias = can_alias_all;
6813
6814 if (to_type == error_mark_node)
6815 return error_mark_node;
6816
6817 if (mode == VOIDmode)
6818 {
6819 addr_space_t as = TYPE_ADDR_SPACE (to_type);
6820 mode = targetm.addr_space.pointer_mode (as);
6821 }
6822
6823 /* If the pointed-to type has the may_alias attribute set, force
6824 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
6825 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
6826 can_alias_all = true;
6827
6828 /* In some cases, languages will have things that aren't a POINTER_TYPE
6829 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_POINTER_TO.
6830 In that case, return that type without regard to the rest of our
6831 operands.
6832
6833 ??? This is a kludge, but consistent with the way this function has
6834 always operated and there doesn't seem to be a good way to avoid this
6835 at the moment. */
6836 if (TYPE_POINTER_TO (to_type) != 0
6837 && TREE_CODE (TYPE_POINTER_TO (to_type)) != POINTER_TYPE)
6838 return TYPE_POINTER_TO (to_type);
6839
6840 /* First, if we already have a type for pointers to TO_TYPE and it's
6841 the proper mode, use it. */
6842 for (t = TYPE_POINTER_TO (to_type); t; t = TYPE_NEXT_PTR_TO (t))
6843 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
6844 return t;
6845
6846 t = make_node (POINTER_TYPE);
6847
6848 TREE_TYPE (t) = to_type;
6849 SET_TYPE_MODE (t, mode);
6850 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
6851 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (to_type);
6852 TYPE_POINTER_TO (to_type) = t;
6853
6854 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */
6855 if (TYPE_STRUCTURAL_EQUALITY_P (to_type) || in_lto_p)
6856 SET_TYPE_STRUCTURAL_EQUALITY (t);
6857 else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
6858 TYPE_CANONICAL (t)
6859 = build_pointer_type_for_mode (TYPE_CANONICAL (to_type),
6860 mode, false);
6861
6862 /* Lay out the type. This function has many callers that are concerned
6863 with expression-construction, and this simplifies them all. */
6864 layout_type (t);
6865
6866 return t;
6867 }
6868
6869 /* By default build pointers in ptr_mode. */
6870
6871 tree
6872 build_pointer_type (tree to_type)
6873 {
6874 return build_pointer_type_for_mode (to_type, VOIDmode, false);
6875 }
6876
6877 /* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE. */
6878
6879 tree
6880 build_reference_type_for_mode (tree to_type, machine_mode mode,
6881 bool can_alias_all)
6882 {
6883 tree t;
6884 bool could_alias = can_alias_all;
6885
6886 if (to_type == error_mark_node)
6887 return error_mark_node;
6888
6889 if (mode == VOIDmode)
6890 {
6891 addr_space_t as = TYPE_ADDR_SPACE (to_type);
6892 mode = targetm.addr_space.pointer_mode (as);
6893 }
6894
6895 /* If the pointed-to type has the may_alias attribute set, force
6896 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
6897 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
6898 can_alias_all = true;
6899
6900 /* In some cases, languages will have things that aren't a REFERENCE_TYPE
6901 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_REFERENCE_TO.
6902 In that case, return that type without regard to the rest of our
6903 operands.
6904
6905 ??? This is a kludge, but consistent with the way this function has
6906 always operated and there doesn't seem to be a good way to avoid this
6907 at the moment. */
6908 if (TYPE_REFERENCE_TO (to_type) != 0
6909 && TREE_CODE (TYPE_REFERENCE_TO (to_type)) != REFERENCE_TYPE)
6910 return TYPE_REFERENCE_TO (to_type);
6911
6912 /* First, if we already have a type for pointers to TO_TYPE and it's
6913 the proper mode, use it. */
6914 for (t = TYPE_REFERENCE_TO (to_type); t; t = TYPE_NEXT_REF_TO (t))
6915 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
6916 return t;
6917
6918 t = make_node (REFERENCE_TYPE);
6919
6920 TREE_TYPE (t) = to_type;
6921 SET_TYPE_MODE (t, mode);
6922 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
6923 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (to_type);
6924 TYPE_REFERENCE_TO (to_type) = t;
6925
6926 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */
6927 if (TYPE_STRUCTURAL_EQUALITY_P (to_type) || in_lto_p)
6928 SET_TYPE_STRUCTURAL_EQUALITY (t);
6929 else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
6930 TYPE_CANONICAL (t)
6931 = build_reference_type_for_mode (TYPE_CANONICAL (to_type),
6932 mode, false);
6933
6934 layout_type (t);
6935
6936 return t;
6937 }
6938
6939
6940 /* Build the node for the type of references-to-TO_TYPE by default
6941 in ptr_mode. */
6942
6943 tree
6944 build_reference_type (tree to_type)
6945 {
6946 return build_reference_type_for_mode (to_type, VOIDmode, false);
6947 }
6948
6949 #define MAX_INT_CACHED_PREC \
6950 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
6951 static GTY(()) tree nonstandard_integer_type_cache[2 * MAX_INT_CACHED_PREC + 2];
6952
6953 /* Builds a signed or unsigned integer type of precision PRECISION.
6954 Used for C bitfields whose precision does not match that of
6955 built-in target types. */
6956 tree
6957 build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision,
6958 int unsignedp)
6959 {
6960 tree itype, ret;
6961
6962 if (unsignedp)
6963 unsignedp = MAX_INT_CACHED_PREC + 1;
6964
6965 if (precision <= MAX_INT_CACHED_PREC)
6966 {
6967 itype = nonstandard_integer_type_cache[precision + unsignedp];
6968 if (itype)
6969 return itype;
6970 }
6971
6972 itype = make_node (INTEGER_TYPE);
6973 TYPE_PRECISION (itype) = precision;
6974
6975 if (unsignedp)
6976 fixup_unsigned_type (itype);
6977 else
6978 fixup_signed_type (itype);
6979
6980 inchash::hash hstate;
6981 inchash::add_expr (TYPE_MAX_VALUE (itype), hstate);
6982 ret = type_hash_canon (hstate.end (), itype);
6983 if (precision <= MAX_INT_CACHED_PREC)
6984 nonstandard_integer_type_cache[precision + unsignedp] = ret;
6985
6986 return ret;
6987 }
6988
6989 #define MAX_BOOL_CACHED_PREC \
6990 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
6991 static GTY(()) tree nonstandard_boolean_type_cache[MAX_BOOL_CACHED_PREC + 1];
6992
6993 /* Builds a boolean type of precision PRECISION.
6994 Used for boolean vectors to choose proper vector element size. */
6995 tree
6996 build_nonstandard_boolean_type (unsigned HOST_WIDE_INT precision)
6997 {
6998 tree type;
6999
7000 if (precision <= MAX_BOOL_CACHED_PREC)
7001 {
7002 type = nonstandard_boolean_type_cache[precision];
7003 if (type)
7004 return type;
7005 }
7006
7007 type = make_node (BOOLEAN_TYPE);
7008 TYPE_PRECISION (type) = precision;
7009 fixup_signed_type (type);
7010
7011 if (precision <= MAX_INT_CACHED_PREC)
7012 nonstandard_boolean_type_cache[precision] = type;
7013
7014 return type;
7015 }
7016
7017 /* Create a range of some discrete type TYPE (an INTEGER_TYPE, ENUMERAL_TYPE
7018 or BOOLEAN_TYPE) with low bound LOWVAL and high bound HIGHVAL. If SHARED
7019 is true, reuse such a type that has already been constructed. */
7020
7021 static tree
7022 build_range_type_1 (tree type, tree lowval, tree highval, bool shared)
7023 {
7024 tree itype = make_node (INTEGER_TYPE);
7025
7026 TREE_TYPE (itype) = type;
7027
7028 TYPE_MIN_VALUE (itype) = fold_convert (type, lowval);
7029 TYPE_MAX_VALUE (itype) = highval ? fold_convert (type, highval) : NULL;
7030
7031 TYPE_PRECISION (itype) = TYPE_PRECISION (type);
7032 SET_TYPE_MODE (itype, TYPE_MODE (type));
7033 TYPE_SIZE (itype) = TYPE_SIZE (type);
7034 TYPE_SIZE_UNIT (itype) = TYPE_SIZE_UNIT (type);
7035 SET_TYPE_ALIGN (itype, TYPE_ALIGN (type));
7036 TYPE_USER_ALIGN (itype) = TYPE_USER_ALIGN (type);
7037 SET_TYPE_WARN_IF_NOT_ALIGN (itype, TYPE_WARN_IF_NOT_ALIGN (type));
7038
7039 if (!shared)
7040 return itype;
7041
7042 if ((TYPE_MIN_VALUE (itype)
7043 && TREE_CODE (TYPE_MIN_VALUE (itype)) != INTEGER_CST)
7044 || (TYPE_MAX_VALUE (itype)
7045 && TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST))
7046 {
7047 /* Since we cannot reliably merge this type, we need to compare it using
7048 structural equality checks. */
7049 SET_TYPE_STRUCTURAL_EQUALITY (itype);
7050 return itype;
7051 }
7052
7053 hashval_t hash = type_hash_canon_hash (itype);
7054 itype = type_hash_canon (hash, itype);
7055
7056 return itype;
7057 }
7058
7059 /* Wrapper around build_range_type_1 with SHARED set to true. */
7060
7061 tree
7062 build_range_type (tree type, tree lowval, tree highval)
7063 {
7064 return build_range_type_1 (type, lowval, highval, true);
7065 }
7066
7067 /* Wrapper around build_range_type_1 with SHARED set to false. */
7068
7069 tree
7070 build_nonshared_range_type (tree type, tree lowval, tree highval)
7071 {
7072 return build_range_type_1 (type, lowval, highval, false);
7073 }
7074
7075 /* Create a type of integers to be the TYPE_DOMAIN of an ARRAY_TYPE.
7076 MAXVAL should be the maximum value in the domain
7077 (one less than the length of the array).
7078
7079 The maximum value that MAXVAL can have is INT_MAX for a HOST_WIDE_INT.
7080 We don't enforce this limit, that is up to caller (e.g. language front end).
7081 The limit exists because the result is a signed type and we don't handle
7082 sizes that use more than one HOST_WIDE_INT. */
7083
7084 tree
7085 build_index_type (tree maxval)
7086 {
7087 return build_range_type (sizetype, size_zero_node, maxval);
7088 }
7089
7090 /* Return true if the debug information for TYPE, a subtype, should be emitted
7091 as a subrange type. If so, set LOWVAL to the low bound and HIGHVAL to the
7092 high bound, respectively. Sometimes doing so unnecessarily obfuscates the
7093 debug info and doesn't reflect the source code. */
7094
7095 bool
7096 subrange_type_for_debug_p (const_tree type, tree *lowval, tree *highval)
7097 {
7098 tree base_type = TREE_TYPE (type), low, high;
7099
7100 /* Subrange types have a base type which is an integral type. */
7101 if (!INTEGRAL_TYPE_P (base_type))
7102 return false;
7103
7104 /* Get the real bounds of the subtype. */
7105 if (lang_hooks.types.get_subrange_bounds)
7106 lang_hooks.types.get_subrange_bounds (type, &low, &high);
7107 else
7108 {
7109 low = TYPE_MIN_VALUE (type);
7110 high = TYPE_MAX_VALUE (type);
7111 }
7112
7113 /* If the type and its base type have the same representation and the same
7114 name, then the type is not a subrange but a copy of the base type. */
7115 if ((TREE_CODE (base_type) == INTEGER_TYPE
7116 || TREE_CODE (base_type) == BOOLEAN_TYPE)
7117 && int_size_in_bytes (type) == int_size_in_bytes (base_type)
7118 && tree_int_cst_equal (low, TYPE_MIN_VALUE (base_type))
7119 && tree_int_cst_equal (high, TYPE_MAX_VALUE (base_type))
7120 && TYPE_IDENTIFIER (type) == TYPE_IDENTIFIER (base_type))
7121 return false;
7122
7123 if (lowval)
7124 *lowval = low;
7125 if (highval)
7126 *highval = high;
7127 return true;
7128 }
7129
7130 /* Construct, lay out and return the type of arrays of elements with ELT_TYPE
7131 and number of elements specified by the range of values of INDEX_TYPE.
7132 If TYPELESS_STORAGE is true, TYPE_TYPELESS_STORAGE flag is set on the type.
7133 If SHARED is true, reuse such a type that has already been constructed.
7134 If SET_CANONICAL is true, compute TYPE_CANONICAL from the element type. */
7135
7136 tree
7137 build_array_type_1 (tree elt_type, tree index_type, bool typeless_storage,
7138 bool shared, bool set_canonical)
7139 {
7140 tree t;
7141
7142 if (TREE_CODE (elt_type) == FUNCTION_TYPE)
7143 {
7144 error ("arrays of functions are not meaningful");
7145 elt_type = integer_type_node;
7146 }
7147
7148 t = make_node (ARRAY_TYPE);
7149 TREE_TYPE (t) = elt_type;
7150 TYPE_DOMAIN (t) = index_type;
7151 TYPE_ADDR_SPACE (t) = TYPE_ADDR_SPACE (elt_type);
7152 TYPE_TYPELESS_STORAGE (t) = typeless_storage;
7153 layout_type (t);
7154
7155 if (shared)
7156 {
7157 hashval_t hash = type_hash_canon_hash (t);
7158 t = type_hash_canon (hash, t);
7159 }
7160
7161 if (TYPE_CANONICAL (t) == t && set_canonical)
7162 {
7163 if (TYPE_STRUCTURAL_EQUALITY_P (elt_type)
7164 || (index_type && TYPE_STRUCTURAL_EQUALITY_P (index_type))
7165 || in_lto_p)
7166 SET_TYPE_STRUCTURAL_EQUALITY (t);
7167 else if (TYPE_CANONICAL (elt_type) != elt_type
7168 || (index_type && TYPE_CANONICAL (index_type) != index_type))
7169 TYPE_CANONICAL (t)
7170 = build_array_type_1 (TYPE_CANONICAL (elt_type),
7171 index_type
7172 ? TYPE_CANONICAL (index_type) : NULL_TREE,
7173 typeless_storage, shared, set_canonical);
7174 }
7175
7176 return t;
7177 }
7178
7179 /* Wrapper around build_array_type_1 with SHARED set to true. */
7180
7181 tree
7182 build_array_type (tree elt_type, tree index_type, bool typeless_storage)
7183 {
7184 return
7185 build_array_type_1 (elt_type, index_type, typeless_storage, true, true);
7186 }
7187
7188 /* Wrapper around build_array_type_1 with SHARED set to false. */
7189
7190 tree
7191 build_nonshared_array_type (tree elt_type, tree index_type)
7192 {
7193 return build_array_type_1 (elt_type, index_type, false, false, true);
7194 }
7195
7196 /* Return a representation of ELT_TYPE[NELTS], using indices of type
7197 sizetype. */
7198
7199 tree
7200 build_array_type_nelts (tree elt_type, poly_uint64 nelts)
7201 {
7202 return build_array_type (elt_type, build_index_type (size_int (nelts - 1)));
7203 }
7204
7205 /* Recursively examines the array elements of TYPE, until a non-array
7206 element type is found. */
7207
7208 tree
7209 strip_array_types (tree type)
7210 {
7211 while (TREE_CODE (type) == ARRAY_TYPE)
7212 type = TREE_TYPE (type);
7213
7214 return type;
7215 }
7216
7217 /* Computes the canonical argument types from the argument type list
7218 ARGTYPES.
7219
7220 Upon return, *ANY_STRUCTURAL_P will be true iff either it was true
7221 on entry to this function, or if any of the ARGTYPES are
7222 structural.
7223
7224 Upon return, *ANY_NONCANONICAL_P will be true iff either it was
7225 true on entry to this function, or if any of the ARGTYPES are
7226 non-canonical.
7227
7228 Returns a canonical argument list, which may be ARGTYPES when the
7229 canonical argument list is unneeded (i.e., *ANY_STRUCTURAL_P is
7230 true) or would not differ from ARGTYPES. */
7231
7232 static tree
7233 maybe_canonicalize_argtypes (tree argtypes,
7234 bool *any_structural_p,
7235 bool *any_noncanonical_p)
7236 {
7237 tree arg;
7238 bool any_noncanonical_argtypes_p = false;
7239
7240 for (arg = argtypes; arg && !(*any_structural_p); arg = TREE_CHAIN (arg))
7241 {
7242 if (!TREE_VALUE (arg) || TREE_VALUE (arg) == error_mark_node)
7243 /* Fail gracefully by stating that the type is structural. */
7244 *any_structural_p = true;
7245 else if (TYPE_STRUCTURAL_EQUALITY_P (TREE_VALUE (arg)))
7246 *any_structural_p = true;
7247 else if (TYPE_CANONICAL (TREE_VALUE (arg)) != TREE_VALUE (arg)
7248 || TREE_PURPOSE (arg))
7249 /* If the argument has a default argument, we consider it
7250 non-canonical even though the type itself is canonical.
7251 That way, different variants of function and method types
7252 with default arguments will all point to the variant with
7253 no defaults as their canonical type. */
7254 any_noncanonical_argtypes_p = true;
7255 }
7256
7257 if (*any_structural_p)
7258 return argtypes;
7259
7260 if (any_noncanonical_argtypes_p)
7261 {
7262 /* Build the canonical list of argument types. */
7263 tree canon_argtypes = NULL_TREE;
7264 bool is_void = false;
7265
7266 for (arg = argtypes; arg; arg = TREE_CHAIN (arg))
7267 {
7268 if (arg == void_list_node)
7269 is_void = true;
7270 else
7271 canon_argtypes = tree_cons (NULL_TREE,
7272 TYPE_CANONICAL (TREE_VALUE (arg)),
7273 canon_argtypes);
7274 }
7275
7276 canon_argtypes = nreverse (canon_argtypes);
7277 if (is_void)
7278 canon_argtypes = chainon (canon_argtypes, void_list_node);
7279
7280 /* There is a non-canonical type. */
7281 *any_noncanonical_p = true;
7282 return canon_argtypes;
7283 }
7284
7285 /* The canonical argument types are the same as ARGTYPES. */
7286 return argtypes;
7287 }
7288
7289 /* Construct, lay out and return
7290 the type of functions returning type VALUE_TYPE
7291 given arguments of types ARG_TYPES.
7292 ARG_TYPES is a chain of TREE_LIST nodes whose TREE_VALUEs
7293 are data type nodes for the arguments of the function.
7294 If such a type has already been constructed, reuse it. */
7295
7296 tree
7297 build_function_type (tree value_type, tree arg_types)
7298 {
7299 tree t;
7300 inchash::hash hstate;
7301 bool any_structural_p, any_noncanonical_p;
7302 tree canon_argtypes;
7303
7304 gcc_assert (arg_types != error_mark_node);
7305
7306 if (TREE_CODE (value_type) == FUNCTION_TYPE)
7307 {
7308 error ("function return type cannot be function");
7309 value_type = integer_type_node;
7310 }
7311
7312 /* Make a node of the sort we want. */
7313 t = make_node (FUNCTION_TYPE);
7314 TREE_TYPE (t) = value_type;
7315 TYPE_ARG_TYPES (t) = arg_types;
7316
7317 /* If we already have such a type, use the old one. */
7318 hashval_t hash = type_hash_canon_hash (t);
7319 t = type_hash_canon (hash, t);
7320
7321 /* Set up the canonical type. */
7322 any_structural_p = TYPE_STRUCTURAL_EQUALITY_P (value_type);
7323 any_noncanonical_p = TYPE_CANONICAL (value_type) != value_type;
7324 canon_argtypes = maybe_canonicalize_argtypes (arg_types,
7325 &any_structural_p,
7326 &any_noncanonical_p);
7327 if (any_structural_p)
7328 SET_TYPE_STRUCTURAL_EQUALITY (t);
7329 else if (any_noncanonical_p)
7330 TYPE_CANONICAL (t) = build_function_type (TYPE_CANONICAL (value_type),
7331 canon_argtypes);
7332
7333 if (!COMPLETE_TYPE_P (t))
7334 layout_type (t);
7335 return t;
7336 }
7337
7338 /* Build a function type. The RETURN_TYPE is the type returned by the
7339 function. If VAARGS is set, no void_type_node is appended to the
7340 list. ARGP must be always be terminated be a NULL_TREE. */
7341
7342 static tree
7343 build_function_type_list_1 (bool vaargs, tree return_type, va_list argp)
7344 {
7345 tree t, args, last;
7346
7347 t = va_arg (argp, tree);
7348 for (args = NULL_TREE; t != NULL_TREE; t = va_arg (argp, tree))
7349 args = tree_cons (NULL_TREE, t, args);
7350
7351 if (vaargs)
7352 {
7353 last = args;
7354 if (args != NULL_TREE)
7355 args = nreverse (args);
7356 gcc_assert (last != void_list_node);
7357 }
7358 else if (args == NULL_TREE)
7359 args = void_list_node;
7360 else
7361 {
7362 last = args;
7363 args = nreverse (args);
7364 TREE_CHAIN (last) = void_list_node;
7365 }
7366 args = build_function_type (return_type, args);
7367
7368 return args;
7369 }
7370
7371 /* Build a function type. The RETURN_TYPE is the type returned by the
7372 function. If additional arguments are provided, they are
7373 additional argument types. The list of argument types must always
7374 be terminated by NULL_TREE. */
7375
7376 tree
7377 build_function_type_list (tree return_type, ...)
7378 {
7379 tree args;
7380 va_list p;
7381
7382 va_start (p, return_type);
7383 args = build_function_type_list_1 (false, return_type, p);
7384 va_end (p);
7385 return args;
7386 }
7387
7388 /* Build a variable argument function type. The RETURN_TYPE is the
7389 type returned by the function. If additional arguments are provided,
7390 they are additional argument types. The list of argument types must
7391 always be terminated by NULL_TREE. */
7392
7393 tree
7394 build_varargs_function_type_list (tree return_type, ...)
7395 {
7396 tree args;
7397 va_list p;
7398
7399 va_start (p, return_type);
7400 args = build_function_type_list_1 (true, return_type, p);
7401 va_end (p);
7402
7403 return args;
7404 }
7405
7406 /* Build a function type. RETURN_TYPE is the type returned by the
7407 function; VAARGS indicates whether the function takes varargs. The
7408 function takes N named arguments, the types of which are provided in
7409 ARG_TYPES. */
7410
7411 static tree
7412 build_function_type_array_1 (bool vaargs, tree return_type, int n,
7413 tree *arg_types)
7414 {
7415 int i;
7416 tree t = vaargs ? NULL_TREE : void_list_node;
7417
7418 for (i = n - 1; i >= 0; i--)
7419 t = tree_cons (NULL_TREE, arg_types[i], t);
7420
7421 return build_function_type (return_type, t);
7422 }
7423
7424 /* Build a function type. RETURN_TYPE is the type returned by the
7425 function. The function takes N named arguments, the types of which
7426 are provided in ARG_TYPES. */
7427
7428 tree
7429 build_function_type_array (tree return_type, int n, tree *arg_types)
7430 {
7431 return build_function_type_array_1 (false, return_type, n, arg_types);
7432 }
7433
7434 /* Build a variable argument function type. RETURN_TYPE is the type
7435 returned by the function. The function takes N named arguments, the
7436 types of which are provided in ARG_TYPES. */
7437
7438 tree
7439 build_varargs_function_type_array (tree return_type, int n, tree *arg_types)
7440 {
7441 return build_function_type_array_1 (true, return_type, n, arg_types);
7442 }
7443
7444 /* Build a METHOD_TYPE for a member of BASETYPE. The RETTYPE (a TYPE)
7445 and ARGTYPES (a TREE_LIST) are the return type and arguments types
7446 for the method. An implicit additional parameter (of type
7447 pointer-to-BASETYPE) is added to the ARGTYPES. */
7448
7449 tree
7450 build_method_type_directly (tree basetype,
7451 tree rettype,
7452 tree argtypes)
7453 {
7454 tree t;
7455 tree ptype;
7456 bool any_structural_p, any_noncanonical_p;
7457 tree canon_argtypes;
7458
7459 /* Make a node of the sort we want. */
7460 t = make_node (METHOD_TYPE);
7461
7462 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
7463 TREE_TYPE (t) = rettype;
7464 ptype = build_pointer_type (basetype);
7465
7466 /* The actual arglist for this function includes a "hidden" argument
7467 which is "this". Put it into the list of argument types. */
7468 argtypes = tree_cons (NULL_TREE, ptype, argtypes);
7469 TYPE_ARG_TYPES (t) = argtypes;
7470
7471 /* If we already have such a type, use the old one. */
7472 hashval_t hash = type_hash_canon_hash (t);
7473 t = type_hash_canon (hash, t);
7474
7475 /* Set up the canonical type. */
7476 any_structural_p
7477 = (TYPE_STRUCTURAL_EQUALITY_P (basetype)
7478 || TYPE_STRUCTURAL_EQUALITY_P (rettype));
7479 any_noncanonical_p
7480 = (TYPE_CANONICAL (basetype) != basetype
7481 || TYPE_CANONICAL (rettype) != rettype);
7482 canon_argtypes = maybe_canonicalize_argtypes (TREE_CHAIN (argtypes),
7483 &any_structural_p,
7484 &any_noncanonical_p);
7485 if (any_structural_p)
7486 SET_TYPE_STRUCTURAL_EQUALITY (t);
7487 else if (any_noncanonical_p)
7488 TYPE_CANONICAL (t)
7489 = build_method_type_directly (TYPE_CANONICAL (basetype),
7490 TYPE_CANONICAL (rettype),
7491 canon_argtypes);
7492 if (!COMPLETE_TYPE_P (t))
7493 layout_type (t);
7494
7495 return t;
7496 }
7497
7498 /* Construct, lay out and return the type of methods belonging to class
7499 BASETYPE and whose arguments and values are described by TYPE.
7500 If that type exists already, reuse it.
7501 TYPE must be a FUNCTION_TYPE node. */
7502
7503 tree
7504 build_method_type (tree basetype, tree type)
7505 {
7506 gcc_assert (TREE_CODE (type) == FUNCTION_TYPE);
7507
7508 return build_method_type_directly (basetype,
7509 TREE_TYPE (type),
7510 TYPE_ARG_TYPES (type));
7511 }
7512
7513 /* Construct, lay out and return the type of offsets to a value
7514 of type TYPE, within an object of type BASETYPE.
7515 If a suitable offset type exists already, reuse it. */
7516
7517 tree
7518 build_offset_type (tree basetype, tree type)
7519 {
7520 tree t;
7521
7522 /* Make a node of the sort we want. */
7523 t = make_node (OFFSET_TYPE);
7524
7525 TYPE_OFFSET_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
7526 TREE_TYPE (t) = type;
7527
7528 /* If we already have such a type, use the old one. */
7529 hashval_t hash = type_hash_canon_hash (t);
7530 t = type_hash_canon (hash, t);
7531
7532 if (!COMPLETE_TYPE_P (t))
7533 layout_type (t);
7534
7535 if (TYPE_CANONICAL (t) == t)
7536 {
7537 if (TYPE_STRUCTURAL_EQUALITY_P (basetype)
7538 || TYPE_STRUCTURAL_EQUALITY_P (type))
7539 SET_TYPE_STRUCTURAL_EQUALITY (t);
7540 else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)) != basetype
7541 || TYPE_CANONICAL (type) != type)
7542 TYPE_CANONICAL (t)
7543 = build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)),
7544 TYPE_CANONICAL (type));
7545 }
7546
7547 return t;
7548 }
7549
7550 /* Create a complex type whose components are COMPONENT_TYPE.
7551
7552 If NAMED is true, the type is given a TYPE_NAME. We do not always
7553 do so because this creates a DECL node and thus make the DECL_UIDs
7554 dependent on the type canonicalization hashtable, which is GC-ed,
7555 so the DECL_UIDs would not be stable wrt garbage collection. */
7556
7557 tree
7558 build_complex_type (tree component_type, bool named)
7559 {
7560 gcc_assert (INTEGRAL_TYPE_P (component_type)
7561 || SCALAR_FLOAT_TYPE_P (component_type)
7562 || FIXED_POINT_TYPE_P (component_type));
7563
7564 /* Make a node of the sort we want. */
7565 tree probe = make_node (COMPLEX_TYPE);
7566
7567 TREE_TYPE (probe) = TYPE_MAIN_VARIANT (component_type);
7568
7569 /* If we already have such a type, use the old one. */
7570 hashval_t hash = type_hash_canon_hash (probe);
7571 tree t = type_hash_canon (hash, probe);
7572
7573 if (t == probe)
7574 {
7575 /* We created a new type. The hash insertion will have laid
7576 out the type. We need to check the canonicalization and
7577 maybe set the name. */
7578 gcc_checking_assert (COMPLETE_TYPE_P (t)
7579 && !TYPE_NAME (t)
7580 && TYPE_CANONICAL (t) == t);
7581
7582 if (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (t)))
7583 SET_TYPE_STRUCTURAL_EQUALITY (t);
7584 else if (TYPE_CANONICAL (TREE_TYPE (t)) != TREE_TYPE (t))
7585 TYPE_CANONICAL (t)
7586 = build_complex_type (TYPE_CANONICAL (TREE_TYPE (t)), named);
7587
7588 /* We need to create a name, since complex is a fundamental type. */
7589 if (named)
7590 {
7591 const char *name = NULL;
7592
7593 if (TREE_TYPE (t) == char_type_node)
7594 name = "complex char";
7595 else if (TREE_TYPE (t) == signed_char_type_node)
7596 name = "complex signed char";
7597 else if (TREE_TYPE (t) == unsigned_char_type_node)
7598 name = "complex unsigned char";
7599 else if (TREE_TYPE (t) == short_integer_type_node)
7600 name = "complex short int";
7601 else if (TREE_TYPE (t) == short_unsigned_type_node)
7602 name = "complex short unsigned int";
7603 else if (TREE_TYPE (t) == integer_type_node)
7604 name = "complex int";
7605 else if (TREE_TYPE (t) == unsigned_type_node)
7606 name = "complex unsigned int";
7607 else if (TREE_TYPE (t) == long_integer_type_node)
7608 name = "complex long int";
7609 else if (TREE_TYPE (t) == long_unsigned_type_node)
7610 name = "complex long unsigned int";
7611 else if (TREE_TYPE (t) == long_long_integer_type_node)
7612 name = "complex long long int";
7613 else if (TREE_TYPE (t) == long_long_unsigned_type_node)
7614 name = "complex long long unsigned int";
7615
7616 if (name != NULL)
7617 TYPE_NAME (t) = build_decl (UNKNOWN_LOCATION, TYPE_DECL,
7618 get_identifier (name), t);
7619 }
7620 }
7621
7622 return build_qualified_type (t, TYPE_QUALS (component_type));
7623 }
7624
7625 /* If TYPE is a real or complex floating-point type and the target
7626 does not directly support arithmetic on TYPE then return the wider
7627 type to be used for arithmetic on TYPE. Otherwise, return
7628 NULL_TREE. */
7629
7630 tree
7631 excess_precision_type (tree type)
7632 {
7633 /* The target can give two different responses to the question of
7634 which excess precision mode it would like depending on whether we
7635 are in -fexcess-precision=standard or -fexcess-precision=fast. */
7636
7637 enum excess_precision_type requested_type
7638 = (flag_excess_precision == EXCESS_PRECISION_FAST
7639 ? EXCESS_PRECISION_TYPE_FAST
7640 : EXCESS_PRECISION_TYPE_STANDARD);
7641
7642 enum flt_eval_method target_flt_eval_method
7643 = targetm.c.excess_precision (requested_type);
7644
7645 /* The target should not ask for unpredictable float evaluation (though
7646 it might advertise that implicitly the evaluation is unpredictable,
7647 but we don't care about that here, it will have been reported
7648 elsewhere). If it does ask for unpredictable evaluation, we have
7649 nothing to do here. */
7650 gcc_assert (target_flt_eval_method != FLT_EVAL_METHOD_UNPREDICTABLE);
7651
7652 /* Nothing to do. The target has asked for all types we know about
7653 to be computed with their native precision and range. */
7654 if (target_flt_eval_method == FLT_EVAL_METHOD_PROMOTE_TO_FLOAT16)
7655 return NULL_TREE;
7656
7657 /* The target will promote this type in a target-dependent way, so excess
7658 precision ought to leave it alone. */
7659 if (targetm.promoted_type (type) != NULL_TREE)
7660 return NULL_TREE;
7661
7662 machine_mode float16_type_mode = (float16_type_node
7663 ? TYPE_MODE (float16_type_node)
7664 : VOIDmode);
7665 machine_mode float_type_mode = TYPE_MODE (float_type_node);
7666 machine_mode double_type_mode = TYPE_MODE (double_type_node);
7667
7668 switch (TREE_CODE (type))
7669 {
7670 case REAL_TYPE:
7671 {
7672 machine_mode type_mode = TYPE_MODE (type);
7673 switch (target_flt_eval_method)
7674 {
7675 case FLT_EVAL_METHOD_PROMOTE_TO_FLOAT:
7676 if (type_mode == float16_type_mode)
7677 return float_type_node;
7678 break;
7679 case FLT_EVAL_METHOD_PROMOTE_TO_DOUBLE:
7680 if (type_mode == float16_type_mode
7681 || type_mode == float_type_mode)
7682 return double_type_node;
7683 break;
7684 case FLT_EVAL_METHOD_PROMOTE_TO_LONG_DOUBLE:
7685 if (type_mode == float16_type_mode
7686 || type_mode == float_type_mode
7687 || type_mode == double_type_mode)
7688 return long_double_type_node;
7689 break;
7690 default:
7691 gcc_unreachable ();
7692 }
7693 break;
7694 }
7695 case COMPLEX_TYPE:
7696 {
7697 if (TREE_CODE (TREE_TYPE (type)) != REAL_TYPE)
7698 return NULL_TREE;
7699 machine_mode type_mode = TYPE_MODE (TREE_TYPE (type));
7700 switch (target_flt_eval_method)
7701 {
7702 case FLT_EVAL_METHOD_PROMOTE_TO_FLOAT:
7703 if (type_mode == float16_type_mode)
7704 return complex_float_type_node;
7705 break;
7706 case FLT_EVAL_METHOD_PROMOTE_TO_DOUBLE:
7707 if (type_mode == float16_type_mode
7708 || type_mode == float_type_mode)
7709 return complex_double_type_node;
7710 break;
7711 case FLT_EVAL_METHOD_PROMOTE_TO_LONG_DOUBLE:
7712 if (type_mode == float16_type_mode
7713 || type_mode == float_type_mode
7714 || type_mode == double_type_mode)
7715 return complex_long_double_type_node;
7716 break;
7717 default:
7718 gcc_unreachable ();
7719 }
7720 break;
7721 }
7722 default:
7723 break;
7724 }
7725
7726 return NULL_TREE;
7727 }
7728 \f
7729 /* Return OP, stripped of any conversions to wider types as much as is safe.
7730 Converting the value back to OP's type makes a value equivalent to OP.
7731
7732 If FOR_TYPE is nonzero, we return a value which, if converted to
7733 type FOR_TYPE, would be equivalent to converting OP to type FOR_TYPE.
7734
7735 OP must have integer, real or enumeral type. Pointers are not allowed!
7736
7737 There are some cases where the obvious value we could return
7738 would regenerate to OP if converted to OP's type,
7739 but would not extend like OP to wider types.
7740 If FOR_TYPE indicates such extension is contemplated, we eschew such values.
7741 For example, if OP is (unsigned short)(signed char)-1,
7742 we avoid returning (signed char)-1 if FOR_TYPE is int,
7743 even though extending that to an unsigned short would regenerate OP,
7744 since the result of extending (signed char)-1 to (int)
7745 is different from (int) OP. */
7746
7747 tree
7748 get_unwidened (tree op, tree for_type)
7749 {
7750 /* Set UNS initially if converting OP to FOR_TYPE is a zero-extension. */
7751 tree type = TREE_TYPE (op);
7752 unsigned final_prec
7753 = TYPE_PRECISION (for_type != 0 ? for_type : type);
7754 int uns
7755 = (for_type != 0 && for_type != type
7756 && final_prec > TYPE_PRECISION (type)
7757 && TYPE_UNSIGNED (type));
7758 tree win = op;
7759
7760 while (CONVERT_EXPR_P (op))
7761 {
7762 int bitschange;
7763
7764 /* TYPE_PRECISION on vector types has different meaning
7765 (TYPE_VECTOR_SUBPARTS) and casts from vectors are view conversions,
7766 so avoid them here. */
7767 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op, 0))) == VECTOR_TYPE)
7768 break;
7769
7770 bitschange = TYPE_PRECISION (TREE_TYPE (op))
7771 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0)));
7772
7773 /* Truncations are many-one so cannot be removed.
7774 Unless we are later going to truncate down even farther. */
7775 if (bitschange < 0
7776 && final_prec > TYPE_PRECISION (TREE_TYPE (op)))
7777 break;
7778
7779 /* See what's inside this conversion. If we decide to strip it,
7780 we will set WIN. */
7781 op = TREE_OPERAND (op, 0);
7782
7783 /* If we have not stripped any zero-extensions (uns is 0),
7784 we can strip any kind of extension.
7785 If we have previously stripped a zero-extension,
7786 only zero-extensions can safely be stripped.
7787 Any extension can be stripped if the bits it would produce
7788 are all going to be discarded later by truncating to FOR_TYPE. */
7789
7790 if (bitschange > 0)
7791 {
7792 if (! uns || final_prec <= TYPE_PRECISION (TREE_TYPE (op)))
7793 win = op;
7794 /* TYPE_UNSIGNED says whether this is a zero-extension.
7795 Let's avoid computing it if it does not affect WIN
7796 and if UNS will not be needed again. */
7797 if ((uns
7798 || CONVERT_EXPR_P (op))
7799 && TYPE_UNSIGNED (TREE_TYPE (op)))
7800 {
7801 uns = 1;
7802 win = op;
7803 }
7804 }
7805 }
7806
7807 /* If we finally reach a constant see if it fits in sth smaller and
7808 in that case convert it. */
7809 if (TREE_CODE (win) == INTEGER_CST)
7810 {
7811 tree wtype = TREE_TYPE (win);
7812 unsigned prec = wi::min_precision (wi::to_wide (win), TYPE_SIGN (wtype));
7813 if (for_type)
7814 prec = MAX (prec, final_prec);
7815 if (prec < TYPE_PRECISION (wtype))
7816 {
7817 tree t = lang_hooks.types.type_for_size (prec, TYPE_UNSIGNED (wtype));
7818 if (t && TYPE_PRECISION (t) < TYPE_PRECISION (wtype))
7819 win = fold_convert (t, win);
7820 }
7821 }
7822
7823 return win;
7824 }
7825 \f
7826 /* Return OP or a simpler expression for a narrower value
7827 which can be sign-extended or zero-extended to give back OP.
7828 Store in *UNSIGNEDP_PTR either 1 if the value should be zero-extended
7829 or 0 if the value should be sign-extended. */
7830
7831 tree
7832 get_narrower (tree op, int *unsignedp_ptr)
7833 {
7834 int uns = 0;
7835 int first = 1;
7836 tree win = op;
7837 bool integral_p = INTEGRAL_TYPE_P (TREE_TYPE (op));
7838
7839 if (TREE_CODE (op) == COMPOUND_EXPR)
7840 {
7841 do
7842 op = TREE_OPERAND (op, 1);
7843 while (TREE_CODE (op) == COMPOUND_EXPR);
7844 tree ret = get_narrower (op, unsignedp_ptr);
7845 if (ret == op)
7846 return win;
7847 auto_vec <tree, 16> v;
7848 unsigned int i;
7849 for (op = win; TREE_CODE (op) == COMPOUND_EXPR;
7850 op = TREE_OPERAND (op, 1))
7851 v.safe_push (op);
7852 FOR_EACH_VEC_ELT_REVERSE (v, i, op)
7853 ret = build2_loc (EXPR_LOCATION (op), COMPOUND_EXPR,
7854 TREE_TYPE (ret), TREE_OPERAND (op, 0),
7855 ret);
7856 return ret;
7857 }
7858 while (TREE_CODE (op) == NOP_EXPR)
7859 {
7860 int bitschange
7861 = (TYPE_PRECISION (TREE_TYPE (op))
7862 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0))));
7863
7864 /* Truncations are many-one so cannot be removed. */
7865 if (bitschange < 0)
7866 break;
7867
7868 /* See what's inside this conversion. If we decide to strip it,
7869 we will set WIN. */
7870
7871 if (bitschange > 0)
7872 {
7873 op = TREE_OPERAND (op, 0);
7874 /* An extension: the outermost one can be stripped,
7875 but remember whether it is zero or sign extension. */
7876 if (first)
7877 uns = TYPE_UNSIGNED (TREE_TYPE (op));
7878 /* Otherwise, if a sign extension has been stripped,
7879 only sign extensions can now be stripped;
7880 if a zero extension has been stripped, only zero-extensions. */
7881 else if (uns != TYPE_UNSIGNED (TREE_TYPE (op)))
7882 break;
7883 first = 0;
7884 }
7885 else /* bitschange == 0 */
7886 {
7887 /* A change in nominal type can always be stripped, but we must
7888 preserve the unsignedness. */
7889 if (first)
7890 uns = TYPE_UNSIGNED (TREE_TYPE (op));
7891 first = 0;
7892 op = TREE_OPERAND (op, 0);
7893 /* Keep trying to narrow, but don't assign op to win if it
7894 would turn an integral type into something else. */
7895 if (INTEGRAL_TYPE_P (TREE_TYPE (op)) != integral_p)
7896 continue;
7897 }
7898
7899 win = op;
7900 }
7901
7902 if (TREE_CODE (op) == COMPONENT_REF
7903 /* Since type_for_size always gives an integer type. */
7904 && TREE_CODE (TREE_TYPE (op)) != REAL_TYPE
7905 && TREE_CODE (TREE_TYPE (op)) != FIXED_POINT_TYPE
7906 /* Ensure field is laid out already. */
7907 && DECL_SIZE (TREE_OPERAND (op, 1)) != 0
7908 && tree_fits_uhwi_p (DECL_SIZE (TREE_OPERAND (op, 1))))
7909 {
7910 unsigned HOST_WIDE_INT innerprec
7911 = tree_to_uhwi (DECL_SIZE (TREE_OPERAND (op, 1)));
7912 int unsignedp = (DECL_UNSIGNED (TREE_OPERAND (op, 1))
7913 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op, 1))));
7914 tree type = lang_hooks.types.type_for_size (innerprec, unsignedp);
7915
7916 /* We can get this structure field in a narrower type that fits it,
7917 but the resulting extension to its nominal type (a fullword type)
7918 must satisfy the same conditions as for other extensions.
7919
7920 Do this only for fields that are aligned (not bit-fields),
7921 because when bit-field insns will be used there is no
7922 advantage in doing this. */
7923
7924 if (innerprec < TYPE_PRECISION (TREE_TYPE (op))
7925 && ! DECL_BIT_FIELD (TREE_OPERAND (op, 1))
7926 && (first || uns == DECL_UNSIGNED (TREE_OPERAND (op, 1)))
7927 && type != 0)
7928 {
7929 if (first)
7930 uns = DECL_UNSIGNED (TREE_OPERAND (op, 1));
7931 win = fold_convert (type, op);
7932 }
7933 }
7934
7935 *unsignedp_ptr = uns;
7936 return win;
7937 }
7938 \f
7939 /* Return true if integer constant C has a value that is permissible
7940 for TYPE, an integral type. */
7941
7942 bool
7943 int_fits_type_p (const_tree c, const_tree type)
7944 {
7945 tree type_low_bound, type_high_bound;
7946 bool ok_for_low_bound, ok_for_high_bound;
7947 signop sgn_c = TYPE_SIGN (TREE_TYPE (c));
7948
7949 /* Non-standard boolean types can have arbitrary precision but various
7950 transformations assume that they can only take values 0 and +/-1. */
7951 if (TREE_CODE (type) == BOOLEAN_TYPE)
7952 return wi::fits_to_boolean_p (wi::to_wide (c), type);
7953
7954 retry:
7955 type_low_bound = TYPE_MIN_VALUE (type);
7956 type_high_bound = TYPE_MAX_VALUE (type);
7957
7958 /* If at least one bound of the type is a constant integer, we can check
7959 ourselves and maybe make a decision. If no such decision is possible, but
7960 this type is a subtype, try checking against that. Otherwise, use
7961 fits_to_tree_p, which checks against the precision.
7962
7963 Compute the status for each possibly constant bound, and return if we see
7964 one does not match. Use ok_for_xxx_bound for this purpose, assigning -1
7965 for "unknown if constant fits", 0 for "constant known *not* to fit" and 1
7966 for "constant known to fit". */
7967
7968 /* Check if c >= type_low_bound. */
7969 if (type_low_bound && TREE_CODE (type_low_bound) == INTEGER_CST)
7970 {
7971 if (tree_int_cst_lt (c, type_low_bound))
7972 return false;
7973 ok_for_low_bound = true;
7974 }
7975 else
7976 ok_for_low_bound = false;
7977
7978 /* Check if c <= type_high_bound. */
7979 if (type_high_bound && TREE_CODE (type_high_bound) == INTEGER_CST)
7980 {
7981 if (tree_int_cst_lt (type_high_bound, c))
7982 return false;
7983 ok_for_high_bound = true;
7984 }
7985 else
7986 ok_for_high_bound = false;
7987
7988 /* If the constant fits both bounds, the result is known. */
7989 if (ok_for_low_bound && ok_for_high_bound)
7990 return true;
7991
7992 /* Perform some generic filtering which may allow making a decision
7993 even if the bounds are not constant. First, negative integers
7994 never fit in unsigned types, */
7995 if (TYPE_UNSIGNED (type) && sgn_c == SIGNED && wi::neg_p (wi::to_wide (c)))
7996 return false;
7997
7998 /* Second, narrower types always fit in wider ones. */
7999 if (TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (c)))
8000 return true;
8001
8002 /* Third, unsigned integers with top bit set never fit signed types. */
8003 if (!TYPE_UNSIGNED (type) && sgn_c == UNSIGNED)
8004 {
8005 int prec = GET_MODE_PRECISION (SCALAR_INT_TYPE_MODE (TREE_TYPE (c))) - 1;
8006 if (prec < TYPE_PRECISION (TREE_TYPE (c)))
8007 {
8008 /* When a tree_cst is converted to a wide-int, the precision
8009 is taken from the type. However, if the precision of the
8010 mode underneath the type is smaller than that, it is
8011 possible that the value will not fit. The test below
8012 fails if any bit is set between the sign bit of the
8013 underlying mode and the top bit of the type. */
8014 if (wi::zext (wi::to_wide (c), prec - 1) != wi::to_wide (c))
8015 return false;
8016 }
8017 else if (wi::neg_p (wi::to_wide (c)))
8018 return false;
8019 }
8020
8021 /* If we haven't been able to decide at this point, there nothing more we
8022 can check ourselves here. Look at the base type if we have one and it
8023 has the same precision. */
8024 if (TREE_CODE (type) == INTEGER_TYPE
8025 && TREE_TYPE (type) != 0
8026 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (type)))
8027 {
8028 type = TREE_TYPE (type);
8029 goto retry;
8030 }
8031
8032 /* Or to fits_to_tree_p, if nothing else. */
8033 return wi::fits_to_tree_p (wi::to_wide (c), type);
8034 }
8035
8036 /* Stores bounds of an integer TYPE in MIN and MAX. If TYPE has non-constant
8037 bounds or is a POINTER_TYPE, the maximum and/or minimum values that can be
8038 represented (assuming two's-complement arithmetic) within the bit
8039 precision of the type are returned instead. */
8040
8041 void
8042 get_type_static_bounds (const_tree type, mpz_t min, mpz_t max)
8043 {
8044 if (!POINTER_TYPE_P (type) && TYPE_MIN_VALUE (type)
8045 && TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST)
8046 wi::to_mpz (wi::to_wide (TYPE_MIN_VALUE (type)), min, TYPE_SIGN (type));
8047 else
8048 {
8049 if (TYPE_UNSIGNED (type))
8050 mpz_set_ui (min, 0);
8051 else
8052 {
8053 wide_int mn = wi::min_value (TYPE_PRECISION (type), SIGNED);
8054 wi::to_mpz (mn, min, SIGNED);
8055 }
8056 }
8057
8058 if (!POINTER_TYPE_P (type) && TYPE_MAX_VALUE (type)
8059 && TREE_CODE (TYPE_MAX_VALUE (type)) == INTEGER_CST)
8060 wi::to_mpz (wi::to_wide (TYPE_MAX_VALUE (type)), max, TYPE_SIGN (type));
8061 else
8062 {
8063 wide_int mn = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
8064 wi::to_mpz (mn, max, TYPE_SIGN (type));
8065 }
8066 }
8067
8068 /* Return true if VAR is an automatic variable. */
8069
8070 bool
8071 auto_var_p (const_tree var)
8072 {
8073 return ((((VAR_P (var) && ! DECL_EXTERNAL (var))
8074 || TREE_CODE (var) == PARM_DECL)
8075 && ! TREE_STATIC (var))
8076 || TREE_CODE (var) == RESULT_DECL);
8077 }
8078
8079 /* Return true if VAR is an automatic variable defined in function FN. */
8080
8081 bool
8082 auto_var_in_fn_p (const_tree var, const_tree fn)
8083 {
8084 return (DECL_P (var) && DECL_CONTEXT (var) == fn
8085 && (auto_var_p (var)
8086 || TREE_CODE (var) == LABEL_DECL));
8087 }
8088
8089 /* Subprogram of following function. Called by walk_tree.
8090
8091 Return *TP if it is an automatic variable or parameter of the
8092 function passed in as DATA. */
8093
8094 static tree
8095 find_var_from_fn (tree *tp, int *walk_subtrees, void *data)
8096 {
8097 tree fn = (tree) data;
8098
8099 if (TYPE_P (*tp))
8100 *walk_subtrees = 0;
8101
8102 else if (DECL_P (*tp)
8103 && auto_var_in_fn_p (*tp, fn))
8104 return *tp;
8105
8106 return NULL_TREE;
8107 }
8108
8109 /* Returns true if T is, contains, or refers to a type with variable
8110 size. For METHOD_TYPEs and FUNCTION_TYPEs we exclude the
8111 arguments, but not the return type. If FN is nonzero, only return
8112 true if a modifier of the type or position of FN is a variable or
8113 parameter inside FN.
8114
8115 This concept is more general than that of C99 'variably modified types':
8116 in C99, a struct type is never variably modified because a VLA may not
8117 appear as a structure member. However, in GNU C code like:
8118
8119 struct S { int i[f()]; };
8120
8121 is valid, and other languages may define similar constructs. */
8122
8123 bool
8124 variably_modified_type_p (tree type, tree fn)
8125 {
8126 tree t;
8127
8128 /* Test if T is either variable (if FN is zero) or an expression containing
8129 a variable in FN. If TYPE isn't gimplified, return true also if
8130 gimplify_one_sizepos would gimplify the expression into a local
8131 variable. */
8132 #define RETURN_TRUE_IF_VAR(T) \
8133 do { tree _t = (T); \
8134 if (_t != NULL_TREE \
8135 && _t != error_mark_node \
8136 && !CONSTANT_CLASS_P (_t) \
8137 && TREE_CODE (_t) != PLACEHOLDER_EXPR \
8138 && (!fn \
8139 || (!TYPE_SIZES_GIMPLIFIED (type) \
8140 && (TREE_CODE (_t) != VAR_DECL \
8141 && !CONTAINS_PLACEHOLDER_P (_t))) \
8142 || walk_tree (&_t, find_var_from_fn, fn, NULL))) \
8143 return true; } while (0)
8144
8145 if (type == error_mark_node)
8146 return false;
8147
8148 /* If TYPE itself has variable size, it is variably modified. */
8149 RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
8150 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
8151
8152 switch (TREE_CODE (type))
8153 {
8154 case POINTER_TYPE:
8155 case REFERENCE_TYPE:
8156 case VECTOR_TYPE:
8157 /* Ada can have pointer types refering to themselves indirectly. */
8158 if (TREE_VISITED (type))
8159 return false;
8160 TREE_VISITED (type) = true;
8161 if (variably_modified_type_p (TREE_TYPE (type), fn))
8162 {
8163 TREE_VISITED (type) = false;
8164 return true;
8165 }
8166 TREE_VISITED (type) = false;
8167 break;
8168
8169 case FUNCTION_TYPE:
8170 case METHOD_TYPE:
8171 /* If TYPE is a function type, it is variably modified if the
8172 return type is variably modified. */
8173 if (variably_modified_type_p (TREE_TYPE (type), fn))
8174 return true;
8175 break;
8176
8177 case INTEGER_TYPE:
8178 case REAL_TYPE:
8179 case FIXED_POINT_TYPE:
8180 case ENUMERAL_TYPE:
8181 case BOOLEAN_TYPE:
8182 /* Scalar types are variably modified if their end points
8183 aren't constant. */
8184 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
8185 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
8186 break;
8187
8188 case RECORD_TYPE:
8189 case UNION_TYPE:
8190 case QUAL_UNION_TYPE:
8191 /* We can't see if any of the fields are variably-modified by the
8192 definition we normally use, since that would produce infinite
8193 recursion via pointers. */
8194 /* This is variably modified if some field's type is. */
8195 for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
8196 if (TREE_CODE (t) == FIELD_DECL)
8197 {
8198 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
8199 RETURN_TRUE_IF_VAR (DECL_SIZE (t));
8200 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
8201
8202 /* If the type is a qualified union, then the DECL_QUALIFIER
8203 of fields can also be an expression containing a variable. */
8204 if (TREE_CODE (type) == QUAL_UNION_TYPE)
8205 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
8206
8207 /* If the field is a qualified union, then it's only a container
8208 for what's inside so we look into it. That's necessary in LTO
8209 mode because the sizes of the field tested above have been set
8210 to PLACEHOLDER_EXPRs by free_lang_data. */
8211 if (TREE_CODE (TREE_TYPE (t)) == QUAL_UNION_TYPE
8212 && variably_modified_type_p (TREE_TYPE (t), fn))
8213 return true;
8214 }
8215 break;
8216
8217 case ARRAY_TYPE:
8218 /* Do not call ourselves to avoid infinite recursion. This is
8219 variably modified if the element type is. */
8220 RETURN_TRUE_IF_VAR (TYPE_SIZE (TREE_TYPE (type)));
8221 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (TREE_TYPE (type)));
8222 break;
8223
8224 default:
8225 break;
8226 }
8227
8228 /* The current language may have other cases to check, but in general,
8229 all other types are not variably modified. */
8230 return lang_hooks.tree_inlining.var_mod_type_p (type, fn);
8231
8232 #undef RETURN_TRUE_IF_VAR
8233 }
8234
8235 /* Given a DECL or TYPE, return the scope in which it was declared, or
8236 NULL_TREE if there is no containing scope. */
8237
8238 tree
8239 get_containing_scope (const_tree t)
8240 {
8241 return (TYPE_P (t) ? TYPE_CONTEXT (t) : DECL_CONTEXT (t));
8242 }
8243
8244 /* Returns the ultimate TRANSLATION_UNIT_DECL context of DECL or NULL. */
8245
8246 const_tree
8247 get_ultimate_context (const_tree decl)
8248 {
8249 while (decl && TREE_CODE (decl) != TRANSLATION_UNIT_DECL)
8250 {
8251 if (TREE_CODE (decl) == BLOCK)
8252 decl = BLOCK_SUPERCONTEXT (decl);
8253 else
8254 decl = get_containing_scope (decl);
8255 }
8256 return decl;
8257 }
8258
8259 /* Return the innermost context enclosing DECL that is
8260 a FUNCTION_DECL, or zero if none. */
8261
8262 tree
8263 decl_function_context (const_tree decl)
8264 {
8265 tree context;
8266
8267 if (TREE_CODE (decl) == ERROR_MARK)
8268 return 0;
8269
8270 /* C++ virtual functions use DECL_CONTEXT for the class of the vtable
8271 where we look up the function at runtime. Such functions always take
8272 a first argument of type 'pointer to real context'.
8273
8274 C++ should really be fixed to use DECL_CONTEXT for the real context,
8275 and use something else for the "virtual context". */
8276 else if (TREE_CODE (decl) == FUNCTION_DECL && DECL_VIRTUAL_P (decl))
8277 context
8278 = TYPE_MAIN_VARIANT
8279 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
8280 else
8281 context = DECL_CONTEXT (decl);
8282
8283 while (context && TREE_CODE (context) != FUNCTION_DECL)
8284 {
8285 if (TREE_CODE (context) == BLOCK)
8286 context = BLOCK_SUPERCONTEXT (context);
8287 else
8288 context = get_containing_scope (context);
8289 }
8290
8291 return context;
8292 }
8293
8294 /* Return the innermost context enclosing DECL that is
8295 a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE, or zero if none.
8296 TYPE_DECLs and FUNCTION_DECLs are transparent to this function. */
8297
8298 tree
8299 decl_type_context (const_tree decl)
8300 {
8301 tree context = DECL_CONTEXT (decl);
8302
8303 while (context)
8304 switch (TREE_CODE (context))
8305 {
8306 case NAMESPACE_DECL:
8307 case TRANSLATION_UNIT_DECL:
8308 return NULL_TREE;
8309
8310 case RECORD_TYPE:
8311 case UNION_TYPE:
8312 case QUAL_UNION_TYPE:
8313 return context;
8314
8315 case TYPE_DECL:
8316 case FUNCTION_DECL:
8317 context = DECL_CONTEXT (context);
8318 break;
8319
8320 case BLOCK:
8321 context = BLOCK_SUPERCONTEXT (context);
8322 break;
8323
8324 default:
8325 gcc_unreachable ();
8326 }
8327
8328 return NULL_TREE;
8329 }
8330
8331 /* CALL is a CALL_EXPR. Return the declaration for the function
8332 called, or NULL_TREE if the called function cannot be
8333 determined. */
8334
8335 tree
8336 get_callee_fndecl (const_tree call)
8337 {
8338 tree addr;
8339
8340 if (call == error_mark_node)
8341 return error_mark_node;
8342
8343 /* It's invalid to call this function with anything but a
8344 CALL_EXPR. */
8345 gcc_assert (TREE_CODE (call) == CALL_EXPR);
8346
8347 /* The first operand to the CALL is the address of the function
8348 called. */
8349 addr = CALL_EXPR_FN (call);
8350
8351 /* If there is no function, return early. */
8352 if (addr == NULL_TREE)
8353 return NULL_TREE;
8354
8355 STRIP_NOPS (addr);
8356
8357 /* If this is a readonly function pointer, extract its initial value. */
8358 if (DECL_P (addr) && TREE_CODE (addr) != FUNCTION_DECL
8359 && TREE_READONLY (addr) && ! TREE_THIS_VOLATILE (addr)
8360 && DECL_INITIAL (addr))
8361 addr = DECL_INITIAL (addr);
8362
8363 /* If the address is just `&f' for some function `f', then we know
8364 that `f' is being called. */
8365 if (TREE_CODE (addr) == ADDR_EXPR
8366 && TREE_CODE (TREE_OPERAND (addr, 0)) == FUNCTION_DECL)
8367 return TREE_OPERAND (addr, 0);
8368
8369 /* We couldn't figure out what was being called. */
8370 return NULL_TREE;
8371 }
8372
8373 /* If CALL_EXPR CALL calls a normal built-in function or an internal function,
8374 return the associated function code, otherwise return CFN_LAST. */
8375
8376 combined_fn
8377 get_call_combined_fn (const_tree call)
8378 {
8379 /* It's invalid to call this function with anything but a CALL_EXPR. */
8380 gcc_assert (TREE_CODE (call) == CALL_EXPR);
8381
8382 if (!CALL_EXPR_FN (call))
8383 return as_combined_fn (CALL_EXPR_IFN (call));
8384
8385 tree fndecl = get_callee_fndecl (call);
8386 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
8387 return as_combined_fn (DECL_FUNCTION_CODE (fndecl));
8388
8389 return CFN_LAST;
8390 }
8391
8392 /* Comparator of indices based on tree_node_counts. */
8393
8394 static int
8395 tree_nodes_cmp (const void *p1, const void *p2)
8396 {
8397 const unsigned *n1 = (const unsigned *)p1;
8398 const unsigned *n2 = (const unsigned *)p2;
8399
8400 return tree_node_counts[*n1] - tree_node_counts[*n2];
8401 }
8402
8403 /* Comparator of indices based on tree_code_counts. */
8404
8405 static int
8406 tree_codes_cmp (const void *p1, const void *p2)
8407 {
8408 const unsigned *n1 = (const unsigned *)p1;
8409 const unsigned *n2 = (const unsigned *)p2;
8410
8411 return tree_code_counts[*n1] - tree_code_counts[*n2];
8412 }
8413
8414 #define TREE_MEM_USAGE_SPACES 40
8415
8416 /* Print debugging information about tree nodes generated during the compile,
8417 and any language-specific information. */
8418
8419 void
8420 dump_tree_statistics (void)
8421 {
8422 if (GATHER_STATISTICS)
8423 {
8424 uint64_t total_nodes, total_bytes;
8425 fprintf (stderr, "\nKind Nodes Bytes\n");
8426 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8427 total_nodes = total_bytes = 0;
8428
8429 {
8430 auto_vec<unsigned> indices (all_kinds);
8431 for (unsigned i = 0; i < all_kinds; i++)
8432 indices.quick_push (i);
8433 indices.qsort (tree_nodes_cmp);
8434
8435 for (unsigned i = 0; i < (int) all_kinds; i++)
8436 {
8437 unsigned j = indices[i];
8438 fprintf (stderr, "%-20s %6" PRIu64 "%c %9" PRIu64 "%c\n",
8439 tree_node_kind_names[j], SIZE_AMOUNT (tree_node_counts[j]),
8440 SIZE_AMOUNT (tree_node_sizes[j]));
8441 total_nodes += tree_node_counts[j];
8442 total_bytes += tree_node_sizes[j];
8443 }
8444 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8445 fprintf (stderr, "%-20s %6" PRIu64 "%c %9" PRIu64 "%c\n", "Total",
8446 SIZE_AMOUNT (total_nodes), SIZE_AMOUNT (total_bytes));
8447 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8448 }
8449
8450 {
8451 fprintf (stderr, "Code Nodes\n");
8452 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8453
8454 auto_vec<unsigned> indices (MAX_TREE_CODES);
8455 for (unsigned i = 0; i < MAX_TREE_CODES; i++)
8456 indices.quick_push (i);
8457 indices.qsort (tree_codes_cmp);
8458
8459 for (unsigned i = 0; i < MAX_TREE_CODES; i++)
8460 {
8461 unsigned j = indices[i];
8462 fprintf (stderr, "%-32s %6" PRIu64 "%c\n",
8463 get_tree_code_name ((enum tree_code) j),
8464 SIZE_AMOUNT (tree_code_counts[j]));
8465 }
8466 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8467 fprintf (stderr, "\n");
8468 ssanames_print_statistics ();
8469 fprintf (stderr, "\n");
8470 phinodes_print_statistics ();
8471 fprintf (stderr, "\n");
8472 }
8473 }
8474 else
8475 fprintf (stderr, "(No per-node statistics)\n");
8476
8477 print_type_hash_statistics ();
8478 print_debug_expr_statistics ();
8479 print_value_expr_statistics ();
8480 lang_hooks.print_statistics ();
8481 }
8482 \f
8483 #define FILE_FUNCTION_FORMAT "_GLOBAL__%s_%s"
8484
8485 /* Generate a crc32 of the low BYTES bytes of VALUE. */
8486
8487 unsigned
8488 crc32_unsigned_n (unsigned chksum, unsigned value, unsigned bytes)
8489 {
8490 /* This relies on the raw feedback's top 4 bits being zero. */
8491 #define FEEDBACK(X) ((X) * 0x04c11db7)
8492 #define SYNDROME(X) (FEEDBACK ((X) & 1) ^ FEEDBACK ((X) & 2) \
8493 ^ FEEDBACK ((X) & 4) ^ FEEDBACK ((X) & 8))
8494 static const unsigned syndromes[16] =
8495 {
8496 SYNDROME(0x0), SYNDROME(0x1), SYNDROME(0x2), SYNDROME(0x3),
8497 SYNDROME(0x4), SYNDROME(0x5), SYNDROME(0x6), SYNDROME(0x7),
8498 SYNDROME(0x8), SYNDROME(0x9), SYNDROME(0xa), SYNDROME(0xb),
8499 SYNDROME(0xc), SYNDROME(0xd), SYNDROME(0xe), SYNDROME(0xf),
8500 };
8501 #undef FEEDBACK
8502 #undef SYNDROME
8503
8504 value <<= (32 - bytes * 8);
8505 for (unsigned ix = bytes * 2; ix--; value <<= 4)
8506 {
8507 unsigned feedback = syndromes[((value ^ chksum) >> 28) & 0xf];
8508
8509 chksum = (chksum << 4) ^ feedback;
8510 }
8511
8512 return chksum;
8513 }
8514
8515 /* Generate a crc32 of a string. */
8516
8517 unsigned
8518 crc32_string (unsigned chksum, const char *string)
8519 {
8520 do
8521 chksum = crc32_byte (chksum, *string);
8522 while (*string++);
8523 return chksum;
8524 }
8525
8526 /* P is a string that will be used in a symbol. Mask out any characters
8527 that are not valid in that context. */
8528
8529 void
8530 clean_symbol_name (char *p)
8531 {
8532 for (; *p; p++)
8533 if (! (ISALNUM (*p)
8534 #ifndef NO_DOLLAR_IN_LABEL /* this for `$'; unlikely, but... -- kr */
8535 || *p == '$'
8536 #endif
8537 #ifndef NO_DOT_IN_LABEL /* this for `.'; unlikely, but... */
8538 || *p == '.'
8539 #endif
8540 ))
8541 *p = '_';
8542 }
8543
8544 static GTY(()) unsigned anon_cnt = 0; /* Saved for PCH. */
8545
8546 /* Create a unique anonymous identifier. The identifier is still a
8547 valid assembly label. */
8548
8549 tree
8550 make_anon_name ()
8551 {
8552 const char *fmt =
8553 #if !defined (NO_DOT_IN_LABEL)
8554 "."
8555 #elif !defined (NO_DOLLAR_IN_LABEL)
8556 "$"
8557 #else
8558 "_"
8559 #endif
8560 "_anon_%d";
8561
8562 char buf[24];
8563 int len = snprintf (buf, sizeof (buf), fmt, anon_cnt++);
8564 gcc_checking_assert (len < int (sizeof (buf)));
8565
8566 tree id = get_identifier_with_length (buf, len);
8567 IDENTIFIER_ANON_P (id) = true;
8568
8569 return id;
8570 }
8571
8572 /* Generate a name for a special-purpose function.
8573 The generated name may need to be unique across the whole link.
8574 Changes to this function may also require corresponding changes to
8575 xstrdup_mask_random.
8576 TYPE is some string to identify the purpose of this function to the
8577 linker or collect2; it must start with an uppercase letter,
8578 one of:
8579 I - for constructors
8580 D - for destructors
8581 N - for C++ anonymous namespaces
8582 F - for DWARF unwind frame information. */
8583
8584 tree
8585 get_file_function_name (const char *type)
8586 {
8587 char *buf;
8588 const char *p;
8589 char *q;
8590
8591 /* If we already have a name we know to be unique, just use that. */
8592 if (first_global_object_name)
8593 p = q = ASTRDUP (first_global_object_name);
8594 /* If the target is handling the constructors/destructors, they
8595 will be local to this file and the name is only necessary for
8596 debugging purposes.
8597 We also assign sub_I and sub_D sufixes to constructors called from
8598 the global static constructors. These are always local. */
8599 else if (((type[0] == 'I' || type[0] == 'D') && targetm.have_ctors_dtors)
8600 || (startswith (type, "sub_")
8601 && (type[4] == 'I' || type[4] == 'D')))
8602 {
8603 const char *file = main_input_filename;
8604 if (! file)
8605 file = LOCATION_FILE (input_location);
8606 /* Just use the file's basename, because the full pathname
8607 might be quite long. */
8608 p = q = ASTRDUP (lbasename (file));
8609 }
8610 else
8611 {
8612 /* Otherwise, the name must be unique across the entire link.
8613 We don't have anything that we know to be unique to this translation
8614 unit, so use what we do have and throw in some randomness. */
8615 unsigned len;
8616 const char *name = weak_global_object_name;
8617 const char *file = main_input_filename;
8618
8619 if (! name)
8620 name = "";
8621 if (! file)
8622 file = LOCATION_FILE (input_location);
8623
8624 len = strlen (file);
8625 q = (char *) alloca (9 + 19 + len + 1);
8626 memcpy (q, file, len + 1);
8627
8628 snprintf (q + len, 9 + 19 + 1, "_%08X_" HOST_WIDE_INT_PRINT_HEX,
8629 crc32_string (0, name), get_random_seed (false));
8630
8631 p = q;
8632 }
8633
8634 clean_symbol_name (q);
8635 buf = (char *) alloca (sizeof (FILE_FUNCTION_FORMAT) + strlen (p)
8636 + strlen (type));
8637
8638 /* Set up the name of the file-level functions we may need.
8639 Use a global object (which is already required to be unique over
8640 the program) rather than the file name (which imposes extra
8641 constraints). */
8642 sprintf (buf, FILE_FUNCTION_FORMAT, type, p);
8643
8644 return get_identifier (buf);
8645 }
8646 \f
8647 #if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007)
8648
8649 /* Complain that the tree code of NODE does not match the expected 0
8650 terminated list of trailing codes. The trailing code list can be
8651 empty, for a more vague error message. FILE, LINE, and FUNCTION
8652 are of the caller. */
8653
8654 void
8655 tree_check_failed (const_tree node, const char *file,
8656 int line, const char *function, ...)
8657 {
8658 va_list args;
8659 const char *buffer;
8660 unsigned length = 0;
8661 enum tree_code code;
8662
8663 va_start (args, function);
8664 while ((code = (enum tree_code) va_arg (args, int)))
8665 length += 4 + strlen (get_tree_code_name (code));
8666 va_end (args);
8667 if (length)
8668 {
8669 char *tmp;
8670 va_start (args, function);
8671 length += strlen ("expected ");
8672 buffer = tmp = (char *) alloca (length);
8673 length = 0;
8674 while ((code = (enum tree_code) va_arg (args, int)))
8675 {
8676 const char *prefix = length ? " or " : "expected ";
8677
8678 strcpy (tmp + length, prefix);
8679 length += strlen (prefix);
8680 strcpy (tmp + length, get_tree_code_name (code));
8681 length += strlen (get_tree_code_name (code));
8682 }
8683 va_end (args);
8684 }
8685 else
8686 buffer = "unexpected node";
8687
8688 internal_error ("tree check: %s, have %s in %s, at %s:%d",
8689 buffer, get_tree_code_name (TREE_CODE (node)),
8690 function, trim_filename (file), line);
8691 }
8692
8693 /* Complain that the tree code of NODE does match the expected 0
8694 terminated list of trailing codes. FILE, LINE, and FUNCTION are of
8695 the caller. */
8696
8697 void
8698 tree_not_check_failed (const_tree node, const char *file,
8699 int line, const char *function, ...)
8700 {
8701 va_list args;
8702 char *buffer;
8703 unsigned length = 0;
8704 enum tree_code code;
8705
8706 va_start (args, function);
8707 while ((code = (enum tree_code) va_arg (args, int)))
8708 length += 4 + strlen (get_tree_code_name (code));
8709 va_end (args);
8710 va_start (args, function);
8711 buffer = (char *) alloca (length);
8712 length = 0;
8713 while ((code = (enum tree_code) va_arg (args, int)))
8714 {
8715 if (length)
8716 {
8717 strcpy (buffer + length, " or ");
8718 length += 4;
8719 }
8720 strcpy (buffer + length, get_tree_code_name (code));
8721 length += strlen (get_tree_code_name (code));
8722 }
8723 va_end (args);
8724
8725 internal_error ("tree check: expected none of %s, have %s in %s, at %s:%d",
8726 buffer, get_tree_code_name (TREE_CODE (node)),
8727 function, trim_filename (file), line);
8728 }
8729
8730 /* Similar to tree_check_failed, except that we check for a class of tree
8731 code, given in CL. */
8732
8733 void
8734 tree_class_check_failed (const_tree node, const enum tree_code_class cl,
8735 const char *file, int line, const char *function)
8736 {
8737 internal_error
8738 ("tree check: expected class %qs, have %qs (%s) in %s, at %s:%d",
8739 TREE_CODE_CLASS_STRING (cl),
8740 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
8741 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
8742 }
8743
8744 /* Similar to tree_check_failed, except that instead of specifying a
8745 dozen codes, use the knowledge that they're all sequential. */
8746
8747 void
8748 tree_range_check_failed (const_tree node, const char *file, int line,
8749 const char *function, enum tree_code c1,
8750 enum tree_code c2)
8751 {
8752 char *buffer;
8753 unsigned length = 0;
8754 unsigned int c;
8755
8756 for (c = c1; c <= c2; ++c)
8757 length += 4 + strlen (get_tree_code_name ((enum tree_code) c));
8758
8759 length += strlen ("expected ");
8760 buffer = (char *) alloca (length);
8761 length = 0;
8762
8763 for (c = c1; c <= c2; ++c)
8764 {
8765 const char *prefix = length ? " or " : "expected ";
8766
8767 strcpy (buffer + length, prefix);
8768 length += strlen (prefix);
8769 strcpy (buffer + length, get_tree_code_name ((enum tree_code) c));
8770 length += strlen (get_tree_code_name ((enum tree_code) c));
8771 }
8772
8773 internal_error ("tree check: %s, have %s in %s, at %s:%d",
8774 buffer, get_tree_code_name (TREE_CODE (node)),
8775 function, trim_filename (file), line);
8776 }
8777
8778
8779 /* Similar to tree_check_failed, except that we check that a tree does
8780 not have the specified code, given in CL. */
8781
8782 void
8783 tree_not_class_check_failed (const_tree node, const enum tree_code_class cl,
8784 const char *file, int line, const char *function)
8785 {
8786 internal_error
8787 ("tree check: did not expect class %qs, have %qs (%s) in %s, at %s:%d",
8788 TREE_CODE_CLASS_STRING (cl),
8789 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
8790 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
8791 }
8792
8793
8794 /* Similar to tree_check_failed but applied to OMP_CLAUSE codes. */
8795
8796 void
8797 omp_clause_check_failed (const_tree node, const char *file, int line,
8798 const char *function, enum omp_clause_code code)
8799 {
8800 internal_error ("tree check: expected %<omp_clause %s%>, have %qs "
8801 "in %s, at %s:%d",
8802 omp_clause_code_name[code],
8803 get_tree_code_name (TREE_CODE (node)),
8804 function, trim_filename (file), line);
8805 }
8806
8807
8808 /* Similar to tree_range_check_failed but applied to OMP_CLAUSE codes. */
8809
8810 void
8811 omp_clause_range_check_failed (const_tree node, const char *file, int line,
8812 const char *function, enum omp_clause_code c1,
8813 enum omp_clause_code c2)
8814 {
8815 char *buffer;
8816 unsigned length = 0;
8817 unsigned int c;
8818
8819 for (c = c1; c <= c2; ++c)
8820 length += 4 + strlen (omp_clause_code_name[c]);
8821
8822 length += strlen ("expected ");
8823 buffer = (char *) alloca (length);
8824 length = 0;
8825
8826 for (c = c1; c <= c2; ++c)
8827 {
8828 const char *prefix = length ? " or " : "expected ";
8829
8830 strcpy (buffer + length, prefix);
8831 length += strlen (prefix);
8832 strcpy (buffer + length, omp_clause_code_name[c]);
8833 length += strlen (omp_clause_code_name[c]);
8834 }
8835
8836 internal_error ("tree check: %s, have %s in %s, at %s:%d",
8837 buffer, omp_clause_code_name[TREE_CODE (node)],
8838 function, trim_filename (file), line);
8839 }
8840
8841
8842 #undef DEFTREESTRUCT
8843 #define DEFTREESTRUCT(VAL, NAME) NAME,
8844
8845 static const char *ts_enum_names[] = {
8846 #include "treestruct.def"
8847 };
8848 #undef DEFTREESTRUCT
8849
8850 #define TS_ENUM_NAME(EN) (ts_enum_names[(EN)])
8851
8852 /* Similar to tree_class_check_failed, except that we check for
8853 whether CODE contains the tree structure identified by EN. */
8854
8855 void
8856 tree_contains_struct_check_failed (const_tree node,
8857 const enum tree_node_structure_enum en,
8858 const char *file, int line,
8859 const char *function)
8860 {
8861 internal_error
8862 ("tree check: expected tree that contains %qs structure, have %qs in %s, at %s:%d",
8863 TS_ENUM_NAME (en),
8864 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
8865 }
8866
8867
8868 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
8869 (dynamically sized) vector. */
8870
8871 void
8872 tree_int_cst_elt_check_failed (int idx, int len, const char *file, int line,
8873 const char *function)
8874 {
8875 internal_error
8876 ("tree check: accessed elt %d of %<tree_int_cst%> with %d elts in %s, "
8877 "at %s:%d",
8878 idx + 1, len, function, trim_filename (file), line);
8879 }
8880
8881 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
8882 (dynamically sized) vector. */
8883
8884 void
8885 tree_vec_elt_check_failed (int idx, int len, const char *file, int line,
8886 const char *function)
8887 {
8888 internal_error
8889 ("tree check: accessed elt %d of %<tree_vec%> with %d elts in %s, at %s:%d",
8890 idx + 1, len, function, trim_filename (file), line);
8891 }
8892
8893 /* Similar to above, except that the check is for the bounds of the operand
8894 vector of an expression node EXP. */
8895
8896 void
8897 tree_operand_check_failed (int idx, const_tree exp, const char *file,
8898 int line, const char *function)
8899 {
8900 enum tree_code code = TREE_CODE (exp);
8901 internal_error
8902 ("tree check: accessed operand %d of %s with %d operands in %s, at %s:%d",
8903 idx + 1, get_tree_code_name (code), TREE_OPERAND_LENGTH (exp),
8904 function, trim_filename (file), line);
8905 }
8906
8907 /* Similar to above, except that the check is for the number of
8908 operands of an OMP_CLAUSE node. */
8909
8910 void
8911 omp_clause_operand_check_failed (int idx, const_tree t, const char *file,
8912 int line, const char *function)
8913 {
8914 internal_error
8915 ("tree check: accessed operand %d of %<omp_clause %s%> with %d operands "
8916 "in %s, at %s:%d", idx + 1, omp_clause_code_name[OMP_CLAUSE_CODE (t)],
8917 omp_clause_num_ops [OMP_CLAUSE_CODE (t)], function,
8918 trim_filename (file), line);
8919 }
8920 #endif /* ENABLE_TREE_CHECKING */
8921 \f
8922 /* Create a new vector type node holding NUNITS units of type INNERTYPE,
8923 and mapped to the machine mode MODE. Initialize its fields and build
8924 the information necessary for debugging output. */
8925
8926 static tree
8927 make_vector_type (tree innertype, poly_int64 nunits, machine_mode mode)
8928 {
8929 tree t;
8930 tree mv_innertype = TYPE_MAIN_VARIANT (innertype);
8931
8932 t = make_node (VECTOR_TYPE);
8933 TREE_TYPE (t) = mv_innertype;
8934 SET_TYPE_VECTOR_SUBPARTS (t, nunits);
8935 SET_TYPE_MODE (t, mode);
8936
8937 if (TYPE_STRUCTURAL_EQUALITY_P (mv_innertype) || in_lto_p)
8938 SET_TYPE_STRUCTURAL_EQUALITY (t);
8939 else if ((TYPE_CANONICAL (mv_innertype) != innertype
8940 || mode != VOIDmode)
8941 && !VECTOR_BOOLEAN_TYPE_P (t))
8942 TYPE_CANONICAL (t)
8943 = make_vector_type (TYPE_CANONICAL (mv_innertype), nunits, VOIDmode);
8944
8945 layout_type (t);
8946
8947 hashval_t hash = type_hash_canon_hash (t);
8948 t = type_hash_canon (hash, t);
8949
8950 /* We have built a main variant, based on the main variant of the
8951 inner type. Use it to build the variant we return. */
8952 if ((TYPE_ATTRIBUTES (innertype) || TYPE_QUALS (innertype))
8953 && TREE_TYPE (t) != innertype)
8954 return build_type_attribute_qual_variant (t,
8955 TYPE_ATTRIBUTES (innertype),
8956 TYPE_QUALS (innertype));
8957
8958 return t;
8959 }
8960
8961 static tree
8962 make_or_reuse_type (unsigned size, int unsignedp)
8963 {
8964 int i;
8965
8966 if (size == INT_TYPE_SIZE)
8967 return unsignedp ? unsigned_type_node : integer_type_node;
8968 if (size == CHAR_TYPE_SIZE)
8969 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
8970 if (size == SHORT_TYPE_SIZE)
8971 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
8972 if (size == LONG_TYPE_SIZE)
8973 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
8974 if (size == LONG_LONG_TYPE_SIZE)
8975 return (unsignedp ? long_long_unsigned_type_node
8976 : long_long_integer_type_node);
8977
8978 for (i = 0; i < NUM_INT_N_ENTS; i ++)
8979 if (size == int_n_data[i].bitsize
8980 && int_n_enabled_p[i])
8981 return (unsignedp ? int_n_trees[i].unsigned_type
8982 : int_n_trees[i].signed_type);
8983
8984 if (unsignedp)
8985 return make_unsigned_type (size);
8986 else
8987 return make_signed_type (size);
8988 }
8989
8990 /* Create or reuse a fract type by SIZE, UNSIGNEDP, and SATP. */
8991
8992 static tree
8993 make_or_reuse_fract_type (unsigned size, int unsignedp, int satp)
8994 {
8995 if (satp)
8996 {
8997 if (size == SHORT_FRACT_TYPE_SIZE)
8998 return unsignedp ? sat_unsigned_short_fract_type_node
8999 : sat_short_fract_type_node;
9000 if (size == FRACT_TYPE_SIZE)
9001 return unsignedp ? sat_unsigned_fract_type_node : sat_fract_type_node;
9002 if (size == LONG_FRACT_TYPE_SIZE)
9003 return unsignedp ? sat_unsigned_long_fract_type_node
9004 : sat_long_fract_type_node;
9005 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9006 return unsignedp ? sat_unsigned_long_long_fract_type_node
9007 : sat_long_long_fract_type_node;
9008 }
9009 else
9010 {
9011 if (size == SHORT_FRACT_TYPE_SIZE)
9012 return unsignedp ? unsigned_short_fract_type_node
9013 : short_fract_type_node;
9014 if (size == FRACT_TYPE_SIZE)
9015 return unsignedp ? unsigned_fract_type_node : fract_type_node;
9016 if (size == LONG_FRACT_TYPE_SIZE)
9017 return unsignedp ? unsigned_long_fract_type_node
9018 : long_fract_type_node;
9019 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9020 return unsignedp ? unsigned_long_long_fract_type_node
9021 : long_long_fract_type_node;
9022 }
9023
9024 return make_fract_type (size, unsignedp, satp);
9025 }
9026
9027 /* Create or reuse an accum type by SIZE, UNSIGNEDP, and SATP. */
9028
9029 static tree
9030 make_or_reuse_accum_type (unsigned size, int unsignedp, int satp)
9031 {
9032 if (satp)
9033 {
9034 if (size == SHORT_ACCUM_TYPE_SIZE)
9035 return unsignedp ? sat_unsigned_short_accum_type_node
9036 : sat_short_accum_type_node;
9037 if (size == ACCUM_TYPE_SIZE)
9038 return unsignedp ? sat_unsigned_accum_type_node : sat_accum_type_node;
9039 if (size == LONG_ACCUM_TYPE_SIZE)
9040 return unsignedp ? sat_unsigned_long_accum_type_node
9041 : sat_long_accum_type_node;
9042 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9043 return unsignedp ? sat_unsigned_long_long_accum_type_node
9044 : sat_long_long_accum_type_node;
9045 }
9046 else
9047 {
9048 if (size == SHORT_ACCUM_TYPE_SIZE)
9049 return unsignedp ? unsigned_short_accum_type_node
9050 : short_accum_type_node;
9051 if (size == ACCUM_TYPE_SIZE)
9052 return unsignedp ? unsigned_accum_type_node : accum_type_node;
9053 if (size == LONG_ACCUM_TYPE_SIZE)
9054 return unsignedp ? unsigned_long_accum_type_node
9055 : long_accum_type_node;
9056 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9057 return unsignedp ? unsigned_long_long_accum_type_node
9058 : long_long_accum_type_node;
9059 }
9060
9061 return make_accum_type (size, unsignedp, satp);
9062 }
9063
9064
9065 /* Create an atomic variant node for TYPE. This routine is called
9066 during initialization of data types to create the 5 basic atomic
9067 types. The generic build_variant_type function requires these to
9068 already be set up in order to function properly, so cannot be
9069 called from there. If ALIGN is non-zero, then ensure alignment is
9070 overridden to this value. */
9071
9072 static tree
9073 build_atomic_base (tree type, unsigned int align)
9074 {
9075 tree t;
9076
9077 /* Make sure its not already registered. */
9078 if ((t = get_qualified_type (type, TYPE_QUAL_ATOMIC)))
9079 return t;
9080
9081 t = build_variant_type_copy (type);
9082 set_type_quals (t, TYPE_QUAL_ATOMIC);
9083
9084 if (align)
9085 SET_TYPE_ALIGN (t, align);
9086
9087 return t;
9088 }
9089
9090 /* Information about the _FloatN and _FloatNx types. This must be in
9091 the same order as the corresponding TI_* enum values. */
9092 const floatn_type_info floatn_nx_types[NUM_FLOATN_NX_TYPES] =
9093 {
9094 { 16, false },
9095 { 32, false },
9096 { 64, false },
9097 { 128, false },
9098 { 32, true },
9099 { 64, true },
9100 { 128, true },
9101 };
9102
9103
9104 /* Create nodes for all integer types (and error_mark_node) using the sizes
9105 of C datatypes. SIGNED_CHAR specifies whether char is signed. */
9106
9107 void
9108 build_common_tree_nodes (bool signed_char)
9109 {
9110 int i;
9111
9112 error_mark_node = make_node (ERROR_MARK);
9113 TREE_TYPE (error_mark_node) = error_mark_node;
9114
9115 initialize_sizetypes ();
9116
9117 /* Define both `signed char' and `unsigned char'. */
9118 signed_char_type_node = make_signed_type (CHAR_TYPE_SIZE);
9119 TYPE_STRING_FLAG (signed_char_type_node) = 1;
9120 unsigned_char_type_node = make_unsigned_type (CHAR_TYPE_SIZE);
9121 TYPE_STRING_FLAG (unsigned_char_type_node) = 1;
9122
9123 /* Define `char', which is like either `signed char' or `unsigned char'
9124 but not the same as either. */
9125 char_type_node
9126 = (signed_char
9127 ? make_signed_type (CHAR_TYPE_SIZE)
9128 : make_unsigned_type (CHAR_TYPE_SIZE));
9129 TYPE_STRING_FLAG (char_type_node) = 1;
9130
9131 short_integer_type_node = make_signed_type (SHORT_TYPE_SIZE);
9132 short_unsigned_type_node = make_unsigned_type (SHORT_TYPE_SIZE);
9133 integer_type_node = make_signed_type (INT_TYPE_SIZE);
9134 unsigned_type_node = make_unsigned_type (INT_TYPE_SIZE);
9135 long_integer_type_node = make_signed_type (LONG_TYPE_SIZE);
9136 long_unsigned_type_node = make_unsigned_type (LONG_TYPE_SIZE);
9137 long_long_integer_type_node = make_signed_type (LONG_LONG_TYPE_SIZE);
9138 long_long_unsigned_type_node = make_unsigned_type (LONG_LONG_TYPE_SIZE);
9139
9140 for (i = 0; i < NUM_INT_N_ENTS; i ++)
9141 {
9142 int_n_trees[i].signed_type = make_signed_type (int_n_data[i].bitsize);
9143 int_n_trees[i].unsigned_type = make_unsigned_type (int_n_data[i].bitsize);
9144
9145 if (int_n_enabled_p[i])
9146 {
9147 integer_types[itk_intN_0 + i * 2] = int_n_trees[i].signed_type;
9148 integer_types[itk_unsigned_intN_0 + i * 2] = int_n_trees[i].unsigned_type;
9149 }
9150 }
9151
9152 /* Define a boolean type. This type only represents boolean values but
9153 may be larger than char depending on the value of BOOL_TYPE_SIZE. */
9154 boolean_type_node = make_unsigned_type (BOOL_TYPE_SIZE);
9155 TREE_SET_CODE (boolean_type_node, BOOLEAN_TYPE);
9156 TYPE_PRECISION (boolean_type_node) = 1;
9157 TYPE_MAX_VALUE (boolean_type_node) = build_int_cst (boolean_type_node, 1);
9158
9159 /* Define what type to use for size_t. */
9160 if (strcmp (SIZE_TYPE, "unsigned int") == 0)
9161 size_type_node = unsigned_type_node;
9162 else if (strcmp (SIZE_TYPE, "long unsigned int") == 0)
9163 size_type_node = long_unsigned_type_node;
9164 else if (strcmp (SIZE_TYPE, "long long unsigned int") == 0)
9165 size_type_node = long_long_unsigned_type_node;
9166 else if (strcmp (SIZE_TYPE, "short unsigned int") == 0)
9167 size_type_node = short_unsigned_type_node;
9168 else
9169 {
9170 int i;
9171
9172 size_type_node = NULL_TREE;
9173 for (i = 0; i < NUM_INT_N_ENTS; i++)
9174 if (int_n_enabled_p[i])
9175 {
9176 char name[50], altname[50];
9177 sprintf (name, "__int%d unsigned", int_n_data[i].bitsize);
9178 sprintf (altname, "__int%d__ unsigned", int_n_data[i].bitsize);
9179
9180 if (strcmp (name, SIZE_TYPE) == 0
9181 || strcmp (altname, SIZE_TYPE) == 0)
9182 {
9183 size_type_node = int_n_trees[i].unsigned_type;
9184 }
9185 }
9186 if (size_type_node == NULL_TREE)
9187 gcc_unreachable ();
9188 }
9189
9190 /* Define what type to use for ptrdiff_t. */
9191 if (strcmp (PTRDIFF_TYPE, "int") == 0)
9192 ptrdiff_type_node = integer_type_node;
9193 else if (strcmp (PTRDIFF_TYPE, "long int") == 0)
9194 ptrdiff_type_node = long_integer_type_node;
9195 else if (strcmp (PTRDIFF_TYPE, "long long int") == 0)
9196 ptrdiff_type_node = long_long_integer_type_node;
9197 else if (strcmp (PTRDIFF_TYPE, "short int") == 0)
9198 ptrdiff_type_node = short_integer_type_node;
9199 else
9200 {
9201 ptrdiff_type_node = NULL_TREE;
9202 for (int i = 0; i < NUM_INT_N_ENTS; i++)
9203 if (int_n_enabled_p[i])
9204 {
9205 char name[50], altname[50];
9206 sprintf (name, "__int%d", int_n_data[i].bitsize);
9207 sprintf (altname, "__int%d__", int_n_data[i].bitsize);
9208
9209 if (strcmp (name, PTRDIFF_TYPE) == 0
9210 || strcmp (altname, PTRDIFF_TYPE) == 0)
9211 ptrdiff_type_node = int_n_trees[i].signed_type;
9212 }
9213 if (ptrdiff_type_node == NULL_TREE)
9214 gcc_unreachable ();
9215 }
9216
9217 /* Fill in the rest of the sized types. Reuse existing type nodes
9218 when possible. */
9219 intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 0);
9220 intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 0);
9221 intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 0);
9222 intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 0);
9223 intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 0);
9224
9225 unsigned_intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 1);
9226 unsigned_intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 1);
9227 unsigned_intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 1);
9228 unsigned_intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 1);
9229 unsigned_intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 1);
9230
9231 /* Don't call build_qualified type for atomics. That routine does
9232 special processing for atomics, and until they are initialized
9233 it's better not to make that call.
9234
9235 Check to see if there is a target override for atomic types. */
9236
9237 atomicQI_type_node = build_atomic_base (unsigned_intQI_type_node,
9238 targetm.atomic_align_for_mode (QImode));
9239 atomicHI_type_node = build_atomic_base (unsigned_intHI_type_node,
9240 targetm.atomic_align_for_mode (HImode));
9241 atomicSI_type_node = build_atomic_base (unsigned_intSI_type_node,
9242 targetm.atomic_align_for_mode (SImode));
9243 atomicDI_type_node = build_atomic_base (unsigned_intDI_type_node,
9244 targetm.atomic_align_for_mode (DImode));
9245 atomicTI_type_node = build_atomic_base (unsigned_intTI_type_node,
9246 targetm.atomic_align_for_mode (TImode));
9247
9248 access_public_node = get_identifier ("public");
9249 access_protected_node = get_identifier ("protected");
9250 access_private_node = get_identifier ("private");
9251
9252 /* Define these next since types below may used them. */
9253 integer_zero_node = build_int_cst (integer_type_node, 0);
9254 integer_one_node = build_int_cst (integer_type_node, 1);
9255 integer_three_node = build_int_cst (integer_type_node, 3);
9256 integer_minus_one_node = build_int_cst (integer_type_node, -1);
9257
9258 size_zero_node = size_int (0);
9259 size_one_node = size_int (1);
9260 bitsize_zero_node = bitsize_int (0);
9261 bitsize_one_node = bitsize_int (1);
9262 bitsize_unit_node = bitsize_int (BITS_PER_UNIT);
9263
9264 boolean_false_node = TYPE_MIN_VALUE (boolean_type_node);
9265 boolean_true_node = TYPE_MAX_VALUE (boolean_type_node);
9266
9267 void_type_node = make_node (VOID_TYPE);
9268 layout_type (void_type_node);
9269
9270 /* We are not going to have real types in C with less than byte alignment,
9271 so we might as well not have any types that claim to have it. */
9272 SET_TYPE_ALIGN (void_type_node, BITS_PER_UNIT);
9273 TYPE_USER_ALIGN (void_type_node) = 0;
9274
9275 void_node = make_node (VOID_CST);
9276 TREE_TYPE (void_node) = void_type_node;
9277
9278 null_pointer_node = build_int_cst (build_pointer_type (void_type_node), 0);
9279 layout_type (TREE_TYPE (null_pointer_node));
9280
9281 ptr_type_node = build_pointer_type (void_type_node);
9282 const_ptr_type_node
9283 = build_pointer_type (build_type_variant (void_type_node, 1, 0));
9284 for (unsigned i = 0;
9285 i < sizeof (builtin_structptr_types) / sizeof (builtin_structptr_type);
9286 ++i)
9287 builtin_structptr_types[i].node = builtin_structptr_types[i].base;
9288
9289 pointer_sized_int_node = build_nonstandard_integer_type (POINTER_SIZE, 1);
9290
9291 float_type_node = make_node (REAL_TYPE);
9292 TYPE_PRECISION (float_type_node) = FLOAT_TYPE_SIZE;
9293 layout_type (float_type_node);
9294
9295 double_type_node = make_node (REAL_TYPE);
9296 TYPE_PRECISION (double_type_node) = DOUBLE_TYPE_SIZE;
9297 layout_type (double_type_node);
9298
9299 long_double_type_node = make_node (REAL_TYPE);
9300 TYPE_PRECISION (long_double_type_node) = LONG_DOUBLE_TYPE_SIZE;
9301 layout_type (long_double_type_node);
9302
9303 for (i = 0; i < NUM_FLOATN_NX_TYPES; i++)
9304 {
9305 int n = floatn_nx_types[i].n;
9306 bool extended = floatn_nx_types[i].extended;
9307 scalar_float_mode mode;
9308 if (!targetm.floatn_mode (n, extended).exists (&mode))
9309 continue;
9310 int precision = GET_MODE_PRECISION (mode);
9311 /* Work around the rs6000 KFmode having precision 113 not
9312 128. */
9313 const struct real_format *fmt = REAL_MODE_FORMAT (mode);
9314 gcc_assert (fmt->b == 2 && fmt->emin + fmt->emax == 3);
9315 int min_precision = fmt->p + ceil_log2 (fmt->emax - fmt->emin);
9316 if (!extended)
9317 gcc_assert (min_precision == n);
9318 if (precision < min_precision)
9319 precision = min_precision;
9320 FLOATN_NX_TYPE_NODE (i) = make_node (REAL_TYPE);
9321 TYPE_PRECISION (FLOATN_NX_TYPE_NODE (i)) = precision;
9322 layout_type (FLOATN_NX_TYPE_NODE (i));
9323 SET_TYPE_MODE (FLOATN_NX_TYPE_NODE (i), mode);
9324 }
9325
9326 float_ptr_type_node = build_pointer_type (float_type_node);
9327 double_ptr_type_node = build_pointer_type (double_type_node);
9328 long_double_ptr_type_node = build_pointer_type (long_double_type_node);
9329 integer_ptr_type_node = build_pointer_type (integer_type_node);
9330
9331 /* Fixed size integer types. */
9332 uint16_type_node = make_or_reuse_type (16, 1);
9333 uint32_type_node = make_or_reuse_type (32, 1);
9334 uint64_type_node = make_or_reuse_type (64, 1);
9335 if (targetm.scalar_mode_supported_p (TImode))
9336 uint128_type_node = make_or_reuse_type (128, 1);
9337
9338 /* Decimal float types. */
9339 if (targetm.decimal_float_supported_p ())
9340 {
9341 dfloat32_type_node = make_node (REAL_TYPE);
9342 TYPE_PRECISION (dfloat32_type_node) = DECIMAL32_TYPE_SIZE;
9343 SET_TYPE_MODE (dfloat32_type_node, SDmode);
9344 layout_type (dfloat32_type_node);
9345
9346 dfloat64_type_node = make_node (REAL_TYPE);
9347 TYPE_PRECISION (dfloat64_type_node) = DECIMAL64_TYPE_SIZE;
9348 SET_TYPE_MODE (dfloat64_type_node, DDmode);
9349 layout_type (dfloat64_type_node);
9350
9351 dfloat128_type_node = make_node (REAL_TYPE);
9352 TYPE_PRECISION (dfloat128_type_node) = DECIMAL128_TYPE_SIZE;
9353 SET_TYPE_MODE (dfloat128_type_node, TDmode);
9354 layout_type (dfloat128_type_node);
9355 }
9356
9357 complex_integer_type_node = build_complex_type (integer_type_node, true);
9358 complex_float_type_node = build_complex_type (float_type_node, true);
9359 complex_double_type_node = build_complex_type (double_type_node, true);
9360 complex_long_double_type_node = build_complex_type (long_double_type_node,
9361 true);
9362
9363 for (i = 0; i < NUM_FLOATN_NX_TYPES; i++)
9364 {
9365 if (FLOATN_NX_TYPE_NODE (i) != NULL_TREE)
9366 COMPLEX_FLOATN_NX_TYPE_NODE (i)
9367 = build_complex_type (FLOATN_NX_TYPE_NODE (i));
9368 }
9369
9370 /* Make fixed-point nodes based on sat/non-sat and signed/unsigned. */
9371 #define MAKE_FIXED_TYPE_NODE(KIND,SIZE) \
9372 sat_ ## KIND ## _type_node = \
9373 make_sat_signed_ ## KIND ## _type (SIZE); \
9374 sat_unsigned_ ## KIND ## _type_node = \
9375 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9376 KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9377 unsigned_ ## KIND ## _type_node = \
9378 make_unsigned_ ## KIND ## _type (SIZE);
9379
9380 #define MAKE_FIXED_TYPE_NODE_WIDTH(KIND,WIDTH,SIZE) \
9381 sat_ ## WIDTH ## KIND ## _type_node = \
9382 make_sat_signed_ ## KIND ## _type (SIZE); \
9383 sat_unsigned_ ## WIDTH ## KIND ## _type_node = \
9384 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9385 WIDTH ## KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9386 unsigned_ ## WIDTH ## KIND ## _type_node = \
9387 make_unsigned_ ## KIND ## _type (SIZE);
9388
9389 /* Make fixed-point type nodes based on four different widths. */
9390 #define MAKE_FIXED_TYPE_NODE_FAMILY(N1,N2) \
9391 MAKE_FIXED_TYPE_NODE_WIDTH (N1, short_, SHORT_ ## N2 ## _TYPE_SIZE) \
9392 MAKE_FIXED_TYPE_NODE (N1, N2 ## _TYPE_SIZE) \
9393 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_, LONG_ ## N2 ## _TYPE_SIZE) \
9394 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_long_, LONG_LONG_ ## N2 ## _TYPE_SIZE)
9395
9396 /* Make fixed-point mode nodes based on sat/non-sat and signed/unsigned. */
9397 #define MAKE_FIXED_MODE_NODE(KIND,NAME,MODE) \
9398 NAME ## _type_node = \
9399 make_or_reuse_signed_ ## KIND ## _type (GET_MODE_BITSIZE (MODE ## mode)); \
9400 u ## NAME ## _type_node = \
9401 make_or_reuse_unsigned_ ## KIND ## _type \
9402 (GET_MODE_BITSIZE (U ## MODE ## mode)); \
9403 sat_ ## NAME ## _type_node = \
9404 make_or_reuse_sat_signed_ ## KIND ## _type \
9405 (GET_MODE_BITSIZE (MODE ## mode)); \
9406 sat_u ## NAME ## _type_node = \
9407 make_or_reuse_sat_unsigned_ ## KIND ## _type \
9408 (GET_MODE_BITSIZE (U ## MODE ## mode));
9409
9410 /* Fixed-point type and mode nodes. */
9411 MAKE_FIXED_TYPE_NODE_FAMILY (fract, FRACT)
9412 MAKE_FIXED_TYPE_NODE_FAMILY (accum, ACCUM)
9413 MAKE_FIXED_MODE_NODE (fract, qq, QQ)
9414 MAKE_FIXED_MODE_NODE (fract, hq, HQ)
9415 MAKE_FIXED_MODE_NODE (fract, sq, SQ)
9416 MAKE_FIXED_MODE_NODE (fract, dq, DQ)
9417 MAKE_FIXED_MODE_NODE (fract, tq, TQ)
9418 MAKE_FIXED_MODE_NODE (accum, ha, HA)
9419 MAKE_FIXED_MODE_NODE (accum, sa, SA)
9420 MAKE_FIXED_MODE_NODE (accum, da, DA)
9421 MAKE_FIXED_MODE_NODE (accum, ta, TA)
9422
9423 {
9424 tree t = targetm.build_builtin_va_list ();
9425
9426 /* Many back-ends define record types without setting TYPE_NAME.
9427 If we copied the record type here, we'd keep the original
9428 record type without a name. This breaks name mangling. So,
9429 don't copy record types and let c_common_nodes_and_builtins()
9430 declare the type to be __builtin_va_list. */
9431 if (TREE_CODE (t) != RECORD_TYPE)
9432 t = build_variant_type_copy (t);
9433
9434 va_list_type_node = t;
9435 }
9436
9437 /* SCEV analyzer global shared trees. */
9438 chrec_dont_know = make_node (SCEV_NOT_KNOWN);
9439 TREE_TYPE (chrec_dont_know) = void_type_node;
9440 chrec_known = make_node (SCEV_KNOWN);
9441 TREE_TYPE (chrec_known) = void_type_node;
9442 }
9443
9444 /* Modify DECL for given flags.
9445 TM_PURE attribute is set only on types, so the function will modify
9446 DECL's type when ECF_TM_PURE is used. */
9447
9448 void
9449 set_call_expr_flags (tree decl, int flags)
9450 {
9451 if (flags & ECF_NOTHROW)
9452 TREE_NOTHROW (decl) = 1;
9453 if (flags & ECF_CONST)
9454 TREE_READONLY (decl) = 1;
9455 if (flags & ECF_PURE)
9456 DECL_PURE_P (decl) = 1;
9457 if (flags & ECF_LOOPING_CONST_OR_PURE)
9458 DECL_LOOPING_CONST_OR_PURE_P (decl) = 1;
9459 if (flags & ECF_NOVOPS)
9460 DECL_IS_NOVOPS (decl) = 1;
9461 if (flags & ECF_NORETURN)
9462 TREE_THIS_VOLATILE (decl) = 1;
9463 if (flags & ECF_MALLOC)
9464 DECL_IS_MALLOC (decl) = 1;
9465 if (flags & ECF_RETURNS_TWICE)
9466 DECL_IS_RETURNS_TWICE (decl) = 1;
9467 if (flags & ECF_LEAF)
9468 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("leaf"),
9469 NULL, DECL_ATTRIBUTES (decl));
9470 if (flags & ECF_COLD)
9471 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("cold"),
9472 NULL, DECL_ATTRIBUTES (decl));
9473 if (flags & ECF_RET1)
9474 DECL_ATTRIBUTES (decl)
9475 = tree_cons (get_identifier ("fn spec"),
9476 build_tree_list (NULL_TREE, build_string (2, "1 ")),
9477 DECL_ATTRIBUTES (decl));
9478 if ((flags & ECF_TM_PURE) && flag_tm)
9479 apply_tm_attr (decl, get_identifier ("transaction_pure"));
9480 /* Looping const or pure is implied by noreturn.
9481 There is currently no way to declare looping const or looping pure alone. */
9482 gcc_assert (!(flags & ECF_LOOPING_CONST_OR_PURE)
9483 || ((flags & ECF_NORETURN) && (flags & (ECF_CONST | ECF_PURE))));
9484 }
9485
9486
9487 /* A subroutine of build_common_builtin_nodes. Define a builtin function. */
9488
9489 static void
9490 local_define_builtin (const char *name, tree type, enum built_in_function code,
9491 const char *library_name, int ecf_flags)
9492 {
9493 tree decl;
9494
9495 decl = add_builtin_function (name, type, code, BUILT_IN_NORMAL,
9496 library_name, NULL_TREE);
9497 set_call_expr_flags (decl, ecf_flags);
9498
9499 set_builtin_decl (code, decl, true);
9500 }
9501
9502 /* Call this function after instantiating all builtins that the language
9503 front end cares about. This will build the rest of the builtins
9504 and internal functions that are relied upon by the tree optimizers and
9505 the middle-end. */
9506
9507 void
9508 build_common_builtin_nodes (void)
9509 {
9510 tree tmp, ftype;
9511 int ecf_flags;
9512
9513 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE)
9514 || !builtin_decl_explicit_p (BUILT_IN_ABORT))
9515 {
9516 ftype = build_function_type (void_type_node, void_list_node);
9517 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE))
9518 local_define_builtin ("__builtin_unreachable", ftype,
9519 BUILT_IN_UNREACHABLE,
9520 "__builtin_unreachable",
9521 ECF_NOTHROW | ECF_LEAF | ECF_NORETURN
9522 | ECF_CONST | ECF_COLD);
9523 if (!builtin_decl_explicit_p (BUILT_IN_ABORT))
9524 local_define_builtin ("__builtin_abort", ftype, BUILT_IN_ABORT,
9525 "abort",
9526 ECF_LEAF | ECF_NORETURN | ECF_CONST | ECF_COLD);
9527 }
9528
9529 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY)
9530 || !builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
9531 {
9532 ftype = build_function_type_list (ptr_type_node,
9533 ptr_type_node, const_ptr_type_node,
9534 size_type_node, NULL_TREE);
9535
9536 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY))
9537 local_define_builtin ("__builtin_memcpy", ftype, BUILT_IN_MEMCPY,
9538 "memcpy", ECF_NOTHROW | ECF_LEAF);
9539 if (!builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
9540 local_define_builtin ("__builtin_memmove", ftype, BUILT_IN_MEMMOVE,
9541 "memmove", ECF_NOTHROW | ECF_LEAF);
9542 }
9543
9544 if (!builtin_decl_explicit_p (BUILT_IN_MEMCMP))
9545 {
9546 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
9547 const_ptr_type_node, size_type_node,
9548 NULL_TREE);
9549 local_define_builtin ("__builtin_memcmp", ftype, BUILT_IN_MEMCMP,
9550 "memcmp", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9551 }
9552
9553 if (!builtin_decl_explicit_p (BUILT_IN_MEMSET))
9554 {
9555 ftype = build_function_type_list (ptr_type_node,
9556 ptr_type_node, integer_type_node,
9557 size_type_node, NULL_TREE);
9558 local_define_builtin ("__builtin_memset", ftype, BUILT_IN_MEMSET,
9559 "memset", ECF_NOTHROW | ECF_LEAF);
9560 }
9561
9562 /* If we're checking the stack, `alloca' can throw. */
9563 const int alloca_flags
9564 = ECF_MALLOC | ECF_LEAF | (flag_stack_check ? 0 : ECF_NOTHROW);
9565
9566 if (!builtin_decl_explicit_p (BUILT_IN_ALLOCA))
9567 {
9568 ftype = build_function_type_list (ptr_type_node,
9569 size_type_node, NULL_TREE);
9570 local_define_builtin ("__builtin_alloca", ftype, BUILT_IN_ALLOCA,
9571 "alloca", alloca_flags);
9572 }
9573
9574 ftype = build_function_type_list (ptr_type_node, size_type_node,
9575 size_type_node, NULL_TREE);
9576 local_define_builtin ("__builtin_alloca_with_align", ftype,
9577 BUILT_IN_ALLOCA_WITH_ALIGN,
9578 "__builtin_alloca_with_align",
9579 alloca_flags);
9580
9581 ftype = build_function_type_list (ptr_type_node, size_type_node,
9582 size_type_node, size_type_node, NULL_TREE);
9583 local_define_builtin ("__builtin_alloca_with_align_and_max", ftype,
9584 BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX,
9585 "__builtin_alloca_with_align_and_max",
9586 alloca_flags);
9587
9588 ftype = build_function_type_list (void_type_node,
9589 ptr_type_node, ptr_type_node,
9590 ptr_type_node, NULL_TREE);
9591 local_define_builtin ("__builtin_init_trampoline", ftype,
9592 BUILT_IN_INIT_TRAMPOLINE,
9593 "__builtin_init_trampoline", ECF_NOTHROW | ECF_LEAF);
9594 local_define_builtin ("__builtin_init_heap_trampoline", ftype,
9595 BUILT_IN_INIT_HEAP_TRAMPOLINE,
9596 "__builtin_init_heap_trampoline",
9597 ECF_NOTHROW | ECF_LEAF);
9598 local_define_builtin ("__builtin_init_descriptor", ftype,
9599 BUILT_IN_INIT_DESCRIPTOR,
9600 "__builtin_init_descriptor", ECF_NOTHROW | ECF_LEAF);
9601
9602 ftype = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE);
9603 local_define_builtin ("__builtin_adjust_trampoline", ftype,
9604 BUILT_IN_ADJUST_TRAMPOLINE,
9605 "__builtin_adjust_trampoline",
9606 ECF_CONST | ECF_NOTHROW);
9607 local_define_builtin ("__builtin_adjust_descriptor", ftype,
9608 BUILT_IN_ADJUST_DESCRIPTOR,
9609 "__builtin_adjust_descriptor",
9610 ECF_CONST | ECF_NOTHROW);
9611
9612 ftype = build_function_type_list (void_type_node,
9613 ptr_type_node, ptr_type_node, NULL_TREE);
9614 if (!builtin_decl_explicit_p (BUILT_IN_CLEAR_CACHE))
9615 local_define_builtin ("__builtin___clear_cache", ftype,
9616 BUILT_IN_CLEAR_CACHE,
9617 "__clear_cache",
9618 ECF_NOTHROW);
9619
9620 local_define_builtin ("__builtin_nonlocal_goto", ftype,
9621 BUILT_IN_NONLOCAL_GOTO,
9622 "__builtin_nonlocal_goto",
9623 ECF_NORETURN | ECF_NOTHROW);
9624
9625 ftype = build_function_type_list (void_type_node,
9626 ptr_type_node, ptr_type_node, NULL_TREE);
9627 local_define_builtin ("__builtin_setjmp_setup", ftype,
9628 BUILT_IN_SETJMP_SETUP,
9629 "__builtin_setjmp_setup", ECF_NOTHROW);
9630
9631 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
9632 local_define_builtin ("__builtin_setjmp_receiver", ftype,
9633 BUILT_IN_SETJMP_RECEIVER,
9634 "__builtin_setjmp_receiver", ECF_NOTHROW | ECF_LEAF);
9635
9636 ftype = build_function_type_list (ptr_type_node, NULL_TREE);
9637 local_define_builtin ("__builtin_stack_save", ftype, BUILT_IN_STACK_SAVE,
9638 "__builtin_stack_save", ECF_NOTHROW | ECF_LEAF);
9639
9640 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
9641 local_define_builtin ("__builtin_stack_restore", ftype,
9642 BUILT_IN_STACK_RESTORE,
9643 "__builtin_stack_restore", ECF_NOTHROW | ECF_LEAF);
9644
9645 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
9646 const_ptr_type_node, size_type_node,
9647 NULL_TREE);
9648 local_define_builtin ("__builtin_memcmp_eq", ftype, BUILT_IN_MEMCMP_EQ,
9649 "__builtin_memcmp_eq",
9650 ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9651
9652 local_define_builtin ("__builtin_strncmp_eq", ftype, BUILT_IN_STRNCMP_EQ,
9653 "__builtin_strncmp_eq",
9654 ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9655
9656 local_define_builtin ("__builtin_strcmp_eq", ftype, BUILT_IN_STRCMP_EQ,
9657 "__builtin_strcmp_eq",
9658 ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9659
9660 /* If there's a possibility that we might use the ARM EABI, build the
9661 alternate __cxa_end_cleanup node used to resume from C++. */
9662 if (targetm.arm_eabi_unwinder)
9663 {
9664 ftype = build_function_type_list (void_type_node, NULL_TREE);
9665 local_define_builtin ("__builtin_cxa_end_cleanup", ftype,
9666 BUILT_IN_CXA_END_CLEANUP,
9667 "__cxa_end_cleanup", ECF_NORETURN | ECF_LEAF);
9668 }
9669
9670 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
9671 local_define_builtin ("__builtin_unwind_resume", ftype,
9672 BUILT_IN_UNWIND_RESUME,
9673 ((targetm_common.except_unwind_info (&global_options)
9674 == UI_SJLJ)
9675 ? "_Unwind_SjLj_Resume" : "_Unwind_Resume"),
9676 ECF_NORETURN);
9677
9678 if (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS) == NULL_TREE)
9679 {
9680 ftype = build_function_type_list (ptr_type_node, integer_type_node,
9681 NULL_TREE);
9682 local_define_builtin ("__builtin_return_address", ftype,
9683 BUILT_IN_RETURN_ADDRESS,
9684 "__builtin_return_address",
9685 ECF_NOTHROW);
9686 }
9687
9688 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER)
9689 || !builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
9690 {
9691 ftype = build_function_type_list (void_type_node, ptr_type_node,
9692 ptr_type_node, NULL_TREE);
9693 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER))
9694 local_define_builtin ("__cyg_profile_func_enter", ftype,
9695 BUILT_IN_PROFILE_FUNC_ENTER,
9696 "__cyg_profile_func_enter", 0);
9697 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
9698 local_define_builtin ("__cyg_profile_func_exit", ftype,
9699 BUILT_IN_PROFILE_FUNC_EXIT,
9700 "__cyg_profile_func_exit", 0);
9701 }
9702
9703 /* The exception object and filter values from the runtime. The argument
9704 must be zero before exception lowering, i.e. from the front end. After
9705 exception lowering, it will be the region number for the exception
9706 landing pad. These functions are PURE instead of CONST to prevent
9707 them from being hoisted past the exception edge that will initialize
9708 its value in the landing pad. */
9709 ftype = build_function_type_list (ptr_type_node,
9710 integer_type_node, NULL_TREE);
9711 ecf_flags = ECF_PURE | ECF_NOTHROW | ECF_LEAF;
9712 /* Only use TM_PURE if we have TM language support. */
9713 if (builtin_decl_explicit_p (BUILT_IN_TM_LOAD_1))
9714 ecf_flags |= ECF_TM_PURE;
9715 local_define_builtin ("__builtin_eh_pointer", ftype, BUILT_IN_EH_POINTER,
9716 "__builtin_eh_pointer", ecf_flags);
9717
9718 tmp = lang_hooks.types.type_for_mode (targetm.eh_return_filter_mode (), 0);
9719 ftype = build_function_type_list (tmp, integer_type_node, NULL_TREE);
9720 local_define_builtin ("__builtin_eh_filter", ftype, BUILT_IN_EH_FILTER,
9721 "__builtin_eh_filter", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9722
9723 ftype = build_function_type_list (void_type_node,
9724 integer_type_node, integer_type_node,
9725 NULL_TREE);
9726 local_define_builtin ("__builtin_eh_copy_values", ftype,
9727 BUILT_IN_EH_COPY_VALUES,
9728 "__builtin_eh_copy_values", ECF_NOTHROW);
9729
9730 /* Complex multiplication and division. These are handled as builtins
9731 rather than optabs because emit_library_call_value doesn't support
9732 complex. Further, we can do slightly better with folding these
9733 beasties if the real and complex parts of the arguments are separate. */
9734 {
9735 int mode;
9736
9737 for (mode = MIN_MODE_COMPLEX_FLOAT; mode <= MAX_MODE_COMPLEX_FLOAT; ++mode)
9738 {
9739 char mode_name_buf[4], *q;
9740 const char *p;
9741 enum built_in_function mcode, dcode;
9742 tree type, inner_type;
9743 const char *prefix = "__";
9744
9745 if (targetm.libfunc_gnu_prefix)
9746 prefix = "__gnu_";
9747
9748 type = lang_hooks.types.type_for_mode ((machine_mode) mode, 0);
9749 if (type == NULL)
9750 continue;
9751 inner_type = TREE_TYPE (type);
9752
9753 ftype = build_function_type_list (type, inner_type, inner_type,
9754 inner_type, inner_type, NULL_TREE);
9755
9756 mcode = ((enum built_in_function)
9757 (BUILT_IN_COMPLEX_MUL_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
9758 dcode = ((enum built_in_function)
9759 (BUILT_IN_COMPLEX_DIV_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
9760
9761 for (p = GET_MODE_NAME (mode), q = mode_name_buf; *p; p++, q++)
9762 *q = TOLOWER (*p);
9763 *q = '\0';
9764
9765 /* For -ftrapping-math these should throw from a former
9766 -fnon-call-exception stmt. */
9767 built_in_names[mcode] = concat (prefix, "mul", mode_name_buf, "3",
9768 NULL);
9769 local_define_builtin (built_in_names[mcode], ftype, mcode,
9770 built_in_names[mcode],
9771 ECF_CONST | ECF_LEAF);
9772
9773 built_in_names[dcode] = concat (prefix, "div", mode_name_buf, "3",
9774 NULL);
9775 local_define_builtin (built_in_names[dcode], ftype, dcode,
9776 built_in_names[dcode],
9777 ECF_CONST | ECF_LEAF);
9778 }
9779 }
9780
9781 init_internal_fns ();
9782 }
9783
9784 /* HACK. GROSS. This is absolutely disgusting. I wish there was a
9785 better way.
9786
9787 If we requested a pointer to a vector, build up the pointers that
9788 we stripped off while looking for the inner type. Similarly for
9789 return values from functions.
9790
9791 The argument TYPE is the top of the chain, and BOTTOM is the
9792 new type which we will point to. */
9793
9794 tree
9795 reconstruct_complex_type (tree type, tree bottom)
9796 {
9797 tree inner, outer;
9798
9799 if (TREE_CODE (type) == POINTER_TYPE)
9800 {
9801 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
9802 outer = build_pointer_type_for_mode (inner, TYPE_MODE (type),
9803 TYPE_REF_CAN_ALIAS_ALL (type));
9804 }
9805 else if (TREE_CODE (type) == REFERENCE_TYPE)
9806 {
9807 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
9808 outer = build_reference_type_for_mode (inner, TYPE_MODE (type),
9809 TYPE_REF_CAN_ALIAS_ALL (type));
9810 }
9811 else if (TREE_CODE (type) == ARRAY_TYPE)
9812 {
9813 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
9814 outer = build_array_type (inner, TYPE_DOMAIN (type));
9815 }
9816 else if (TREE_CODE (type) == FUNCTION_TYPE)
9817 {
9818 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
9819 outer = build_function_type (inner, TYPE_ARG_TYPES (type));
9820 }
9821 else if (TREE_CODE (type) == METHOD_TYPE)
9822 {
9823 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
9824 /* The build_method_type_directly() routine prepends 'this' to argument list,
9825 so we must compensate by getting rid of it. */
9826 outer
9827 = build_method_type_directly
9828 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (type))),
9829 inner,
9830 TREE_CHAIN (TYPE_ARG_TYPES (type)));
9831 }
9832 else if (TREE_CODE (type) == OFFSET_TYPE)
9833 {
9834 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
9835 outer = build_offset_type (TYPE_OFFSET_BASETYPE (type), inner);
9836 }
9837 else
9838 return bottom;
9839
9840 return build_type_attribute_qual_variant (outer, TYPE_ATTRIBUTES (type),
9841 TYPE_QUALS (type));
9842 }
9843
9844 /* Returns a vector tree node given a mode (integer, vector, or BLKmode) and
9845 the inner type. */
9846 tree
9847 build_vector_type_for_mode (tree innertype, machine_mode mode)
9848 {
9849 poly_int64 nunits;
9850 unsigned int bitsize;
9851
9852 switch (GET_MODE_CLASS (mode))
9853 {
9854 case MODE_VECTOR_BOOL:
9855 case MODE_VECTOR_INT:
9856 case MODE_VECTOR_FLOAT:
9857 case MODE_VECTOR_FRACT:
9858 case MODE_VECTOR_UFRACT:
9859 case MODE_VECTOR_ACCUM:
9860 case MODE_VECTOR_UACCUM:
9861 nunits = GET_MODE_NUNITS (mode);
9862 break;
9863
9864 case MODE_INT:
9865 /* Check that there are no leftover bits. */
9866 bitsize = GET_MODE_BITSIZE (as_a <scalar_int_mode> (mode));
9867 gcc_assert (bitsize % TREE_INT_CST_LOW (TYPE_SIZE (innertype)) == 0);
9868 nunits = bitsize / TREE_INT_CST_LOW (TYPE_SIZE (innertype));
9869 break;
9870
9871 default:
9872 gcc_unreachable ();
9873 }
9874
9875 return make_vector_type (innertype, nunits, mode);
9876 }
9877
9878 /* Similarly, but takes the inner type and number of units, which must be
9879 a power of two. */
9880
9881 tree
9882 build_vector_type (tree innertype, poly_int64 nunits)
9883 {
9884 return make_vector_type (innertype, nunits, VOIDmode);
9885 }
9886
9887 /* Build a truth vector with NUNITS units, giving it mode MASK_MODE. */
9888
9889 tree
9890 build_truth_vector_type_for_mode (poly_uint64 nunits, machine_mode mask_mode)
9891 {
9892 gcc_assert (mask_mode != BLKmode);
9893
9894 unsigned HOST_WIDE_INT esize;
9895 if (VECTOR_MODE_P (mask_mode))
9896 {
9897 poly_uint64 vsize = GET_MODE_BITSIZE (mask_mode);
9898 esize = vector_element_size (vsize, nunits);
9899 }
9900 else
9901 esize = 1;
9902
9903 tree bool_type = build_nonstandard_boolean_type (esize);
9904
9905 return make_vector_type (bool_type, nunits, mask_mode);
9906 }
9907
9908 /* Build a vector type that holds one boolean result for each element of
9909 vector type VECTYPE. The public interface for this operation is
9910 truth_type_for. */
9911
9912 static tree
9913 build_truth_vector_type_for (tree vectype)
9914 {
9915 machine_mode vector_mode = TYPE_MODE (vectype);
9916 poly_uint64 nunits = TYPE_VECTOR_SUBPARTS (vectype);
9917
9918 machine_mode mask_mode;
9919 if (VECTOR_MODE_P (vector_mode)
9920 && targetm.vectorize.get_mask_mode (vector_mode).exists (&mask_mode))
9921 return build_truth_vector_type_for_mode (nunits, mask_mode);
9922
9923 poly_uint64 vsize = tree_to_poly_uint64 (TYPE_SIZE (vectype));
9924 unsigned HOST_WIDE_INT esize = vector_element_size (vsize, nunits);
9925 tree bool_type = build_nonstandard_boolean_type (esize);
9926
9927 return make_vector_type (bool_type, nunits, VOIDmode);
9928 }
9929
9930 /* Like build_vector_type, but builds a variant type with TYPE_VECTOR_OPAQUE
9931 set. */
9932
9933 tree
9934 build_opaque_vector_type (tree innertype, poly_int64 nunits)
9935 {
9936 tree t = make_vector_type (innertype, nunits, VOIDmode);
9937 tree cand;
9938 /* We always build the non-opaque variant before the opaque one,
9939 so if it already exists, it is TYPE_NEXT_VARIANT of this one. */
9940 cand = TYPE_NEXT_VARIANT (t);
9941 if (cand
9942 && TYPE_VECTOR_OPAQUE (cand)
9943 && check_qualified_type (cand, t, TYPE_QUALS (t)))
9944 return cand;
9945 /* Othewise build a variant type and make sure to queue it after
9946 the non-opaque type. */
9947 cand = build_distinct_type_copy (t);
9948 TYPE_VECTOR_OPAQUE (cand) = true;
9949 TYPE_CANONICAL (cand) = TYPE_CANONICAL (t);
9950 TYPE_NEXT_VARIANT (cand) = TYPE_NEXT_VARIANT (t);
9951 TYPE_NEXT_VARIANT (t) = cand;
9952 TYPE_MAIN_VARIANT (cand) = TYPE_MAIN_VARIANT (t);
9953 return cand;
9954 }
9955
9956 /* Return the value of element I of VECTOR_CST T as a wide_int. */
9957
9958 static poly_wide_int
9959 vector_cst_int_elt (const_tree t, unsigned int i)
9960 {
9961 /* First handle elements that are directly encoded. */
9962 unsigned int encoded_nelts = vector_cst_encoded_nelts (t);
9963 if (i < encoded_nelts)
9964 return wi::to_poly_wide (VECTOR_CST_ENCODED_ELT (t, i));
9965
9966 /* Identify the pattern that contains element I and work out the index of
9967 the last encoded element for that pattern. */
9968 unsigned int npatterns = VECTOR_CST_NPATTERNS (t);
9969 unsigned int pattern = i % npatterns;
9970 unsigned int count = i / npatterns;
9971 unsigned int final_i = encoded_nelts - npatterns + pattern;
9972
9973 /* If there are no steps, the final encoded value is the right one. */
9974 if (!VECTOR_CST_STEPPED_P (t))
9975 return wi::to_poly_wide (VECTOR_CST_ENCODED_ELT (t, final_i));
9976
9977 /* Otherwise work out the value from the last two encoded elements. */
9978 tree v1 = VECTOR_CST_ENCODED_ELT (t, final_i - npatterns);
9979 tree v2 = VECTOR_CST_ENCODED_ELT (t, final_i);
9980 poly_wide_int diff = wi::to_poly_wide (v2) - wi::to_poly_wide (v1);
9981 return wi::to_poly_wide (v2) + (count - 2) * diff;
9982 }
9983
9984 /* Return the value of element I of VECTOR_CST T. */
9985
9986 tree
9987 vector_cst_elt (const_tree t, unsigned int i)
9988 {
9989 /* First handle elements that are directly encoded. */
9990 unsigned int encoded_nelts = vector_cst_encoded_nelts (t);
9991 if (i < encoded_nelts)
9992 return VECTOR_CST_ENCODED_ELT (t, i);
9993
9994 /* If there are no steps, the final encoded value is the right one. */
9995 if (!VECTOR_CST_STEPPED_P (t))
9996 {
9997 /* Identify the pattern that contains element I and work out the index of
9998 the last encoded element for that pattern. */
9999 unsigned int npatterns = VECTOR_CST_NPATTERNS (t);
10000 unsigned int pattern = i % npatterns;
10001 unsigned int final_i = encoded_nelts - npatterns + pattern;
10002 return VECTOR_CST_ENCODED_ELT (t, final_i);
10003 }
10004
10005 /* Otherwise work out the value from the last two encoded elements. */
10006 return wide_int_to_tree (TREE_TYPE (TREE_TYPE (t)),
10007 vector_cst_int_elt (t, i));
10008 }
10009
10010 /* Given an initializer INIT, return TRUE if INIT is zero or some
10011 aggregate of zeros. Otherwise return FALSE. If NONZERO is not
10012 null, set *NONZERO if and only if INIT is known not to be all
10013 zeros. The combination of return value of false and *NONZERO
10014 false implies that INIT may but need not be all zeros. Other
10015 combinations indicate definitive answers. */
10016
10017 bool
10018 initializer_zerop (const_tree init, bool *nonzero /* = NULL */)
10019 {
10020 bool dummy;
10021 if (!nonzero)
10022 nonzero = &dummy;
10023
10024 /* Conservatively clear NONZERO and set it only if INIT is definitely
10025 not all zero. */
10026 *nonzero = false;
10027
10028 STRIP_NOPS (init);
10029
10030 unsigned HOST_WIDE_INT off = 0;
10031
10032 switch (TREE_CODE (init))
10033 {
10034 case INTEGER_CST:
10035 if (integer_zerop (init))
10036 return true;
10037
10038 *nonzero = true;
10039 return false;
10040
10041 case REAL_CST:
10042 /* ??? Note that this is not correct for C4X float formats. There,
10043 a bit pattern of all zeros is 1.0; 0.0 is encoded with the most
10044 negative exponent. */
10045 if (real_zerop (init)
10046 && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (init)))
10047 return true;
10048
10049 *nonzero = true;
10050 return false;
10051
10052 case FIXED_CST:
10053 if (fixed_zerop (init))
10054 return true;
10055
10056 *nonzero = true;
10057 return false;
10058
10059 case COMPLEX_CST:
10060 if (integer_zerop (init)
10061 || (real_zerop (init)
10062 && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_REALPART (init)))
10063 && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_IMAGPART (init)))))
10064 return true;
10065
10066 *nonzero = true;
10067 return false;
10068
10069 case VECTOR_CST:
10070 if (VECTOR_CST_NPATTERNS (init) == 1
10071 && VECTOR_CST_DUPLICATE_P (init)
10072 && initializer_zerop (VECTOR_CST_ENCODED_ELT (init, 0)))
10073 return true;
10074
10075 *nonzero = true;
10076 return false;
10077
10078 case CONSTRUCTOR:
10079 {
10080 if (TREE_CLOBBER_P (init))
10081 return false;
10082
10083 unsigned HOST_WIDE_INT idx;
10084 tree elt;
10085
10086 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init), idx, elt)
10087 if (!initializer_zerop (elt, nonzero))
10088 return false;
10089
10090 return true;
10091 }
10092
10093 case MEM_REF:
10094 {
10095 tree arg = TREE_OPERAND (init, 0);
10096 if (TREE_CODE (arg) != ADDR_EXPR)
10097 return false;
10098 tree offset = TREE_OPERAND (init, 1);
10099 if (TREE_CODE (offset) != INTEGER_CST
10100 || !tree_fits_uhwi_p (offset))
10101 return false;
10102 off = tree_to_uhwi (offset);
10103 if (INT_MAX < off)
10104 return false;
10105 arg = TREE_OPERAND (arg, 0);
10106 if (TREE_CODE (arg) != STRING_CST)
10107 return false;
10108 init = arg;
10109 }
10110 /* Fall through. */
10111
10112 case STRING_CST:
10113 {
10114 gcc_assert (off <= INT_MAX);
10115
10116 int i = off;
10117 int n = TREE_STRING_LENGTH (init);
10118 if (n <= i)
10119 return false;
10120
10121 /* We need to loop through all elements to handle cases like
10122 "\0" and "\0foobar". */
10123 for (i = 0; i < n; ++i)
10124 if (TREE_STRING_POINTER (init)[i] != '\0')
10125 {
10126 *nonzero = true;
10127 return false;
10128 }
10129
10130 return true;
10131 }
10132
10133 default:
10134 return false;
10135 }
10136 }
10137
10138 /* Return true if EXPR is an initializer expression in which every element
10139 is a constant that is numerically equal to 0 or 1. The elements do not
10140 need to be equal to each other. */
10141
10142 bool
10143 initializer_each_zero_or_onep (const_tree expr)
10144 {
10145 STRIP_ANY_LOCATION_WRAPPER (expr);
10146
10147 switch (TREE_CODE (expr))
10148 {
10149 case INTEGER_CST:
10150 return integer_zerop (expr) || integer_onep (expr);
10151
10152 case REAL_CST:
10153 return real_zerop (expr) || real_onep (expr);
10154
10155 case VECTOR_CST:
10156 {
10157 unsigned HOST_WIDE_INT nelts = vector_cst_encoded_nelts (expr);
10158 if (VECTOR_CST_STEPPED_P (expr)
10159 && !TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr)).is_constant (&nelts))
10160 return false;
10161
10162 for (unsigned int i = 0; i < nelts; ++i)
10163 {
10164 tree elt = vector_cst_elt (expr, i);
10165 if (!initializer_each_zero_or_onep (elt))
10166 return false;
10167 }
10168
10169 return true;
10170 }
10171
10172 default:
10173 return false;
10174 }
10175 }
10176
10177 /* Check if vector VEC consists of all the equal elements and
10178 that the number of elements corresponds to the type of VEC.
10179 The function returns first element of the vector
10180 or NULL_TREE if the vector is not uniform. */
10181 tree
10182 uniform_vector_p (const_tree vec)
10183 {
10184 tree first, t;
10185 unsigned HOST_WIDE_INT i, nelts;
10186
10187 if (vec == NULL_TREE)
10188 return NULL_TREE;
10189
10190 gcc_assert (VECTOR_TYPE_P (TREE_TYPE (vec)));
10191
10192 if (TREE_CODE (vec) == VEC_DUPLICATE_EXPR)
10193 return TREE_OPERAND (vec, 0);
10194
10195 else if (TREE_CODE (vec) == VECTOR_CST)
10196 {
10197 if (VECTOR_CST_NPATTERNS (vec) == 1 && VECTOR_CST_DUPLICATE_P (vec))
10198 return VECTOR_CST_ENCODED_ELT (vec, 0);
10199 return NULL_TREE;
10200 }
10201
10202 else if (TREE_CODE (vec) == CONSTRUCTOR
10203 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (vec)).is_constant (&nelts))
10204 {
10205 first = error_mark_node;
10206
10207 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (vec), i, t)
10208 {
10209 if (i == 0)
10210 {
10211 first = t;
10212 continue;
10213 }
10214 if (!operand_equal_p (first, t, 0))
10215 return NULL_TREE;
10216 }
10217 if (i != nelts)
10218 return NULL_TREE;
10219
10220 return first;
10221 }
10222
10223 return NULL_TREE;
10224 }
10225
10226 /* If the argument is INTEGER_CST, return it. If the argument is vector
10227 with all elements the same INTEGER_CST, return that INTEGER_CST. Otherwise
10228 return NULL_TREE.
10229 Look through location wrappers. */
10230
10231 tree
10232 uniform_integer_cst_p (tree t)
10233 {
10234 STRIP_ANY_LOCATION_WRAPPER (t);
10235
10236 if (TREE_CODE (t) == INTEGER_CST)
10237 return t;
10238
10239 if (VECTOR_TYPE_P (TREE_TYPE (t)))
10240 {
10241 t = uniform_vector_p (t);
10242 if (t && TREE_CODE (t) == INTEGER_CST)
10243 return t;
10244 }
10245
10246 return NULL_TREE;
10247 }
10248
10249 /* If VECTOR_CST T has a single nonzero element, return the index of that
10250 element, otherwise return -1. */
10251
10252 int
10253 single_nonzero_element (const_tree t)
10254 {
10255 unsigned HOST_WIDE_INT nelts;
10256 unsigned int repeat_nelts;
10257 if (VECTOR_CST_NELTS (t).is_constant (&nelts))
10258 repeat_nelts = nelts;
10259 else if (VECTOR_CST_NELTS_PER_PATTERN (t) == 2)
10260 {
10261 nelts = vector_cst_encoded_nelts (t);
10262 repeat_nelts = VECTOR_CST_NPATTERNS (t);
10263 }
10264 else
10265 return -1;
10266
10267 int res = -1;
10268 for (unsigned int i = 0; i < nelts; ++i)
10269 {
10270 tree elt = vector_cst_elt (t, i);
10271 if (!integer_zerop (elt) && !real_zerop (elt))
10272 {
10273 if (res >= 0 || i >= repeat_nelts)
10274 return -1;
10275 res = i;
10276 }
10277 }
10278 return res;
10279 }
10280
10281 /* Build an empty statement at location LOC. */
10282
10283 tree
10284 build_empty_stmt (location_t loc)
10285 {
10286 tree t = build1 (NOP_EXPR, void_type_node, size_zero_node);
10287 SET_EXPR_LOCATION (t, loc);
10288 return t;
10289 }
10290
10291
10292 /* Build an OpenMP clause with code CODE. LOC is the location of the
10293 clause. */
10294
10295 tree
10296 build_omp_clause (location_t loc, enum omp_clause_code code)
10297 {
10298 tree t;
10299 int size, length;
10300
10301 length = omp_clause_num_ops[code];
10302 size = (sizeof (struct tree_omp_clause) + (length - 1) * sizeof (tree));
10303
10304 record_node_allocation_statistics (OMP_CLAUSE, size);
10305
10306 t = (tree) ggc_internal_alloc (size);
10307 memset (t, 0, size);
10308 TREE_SET_CODE (t, OMP_CLAUSE);
10309 OMP_CLAUSE_SET_CODE (t, code);
10310 OMP_CLAUSE_LOCATION (t) = loc;
10311
10312 return t;
10313 }
10314
10315 /* Build a tcc_vl_exp object with code CODE and room for LEN operands. LEN
10316 includes the implicit operand count in TREE_OPERAND 0, and so must be >= 1.
10317 Except for the CODE and operand count field, other storage for the
10318 object is initialized to zeros. */
10319
10320 tree
10321 build_vl_exp (enum tree_code code, int len MEM_STAT_DECL)
10322 {
10323 tree t;
10324 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_exp);
10325
10326 gcc_assert (TREE_CODE_CLASS (code) == tcc_vl_exp);
10327 gcc_assert (len >= 1);
10328
10329 record_node_allocation_statistics (code, length);
10330
10331 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
10332
10333 TREE_SET_CODE (t, code);
10334
10335 /* Can't use TREE_OPERAND to store the length because if checking is
10336 enabled, it will try to check the length before we store it. :-P */
10337 t->exp.operands[0] = build_int_cst (sizetype, len);
10338
10339 return t;
10340 }
10341
10342 /* Helper function for build_call_* functions; build a CALL_EXPR with
10343 indicated RETURN_TYPE, FN, and NARGS, but do not initialize any of
10344 the argument slots. */
10345
10346 static tree
10347 build_call_1 (tree return_type, tree fn, int nargs)
10348 {
10349 tree t;
10350
10351 t = build_vl_exp (CALL_EXPR, nargs + 3);
10352 TREE_TYPE (t) = return_type;
10353 CALL_EXPR_FN (t) = fn;
10354 CALL_EXPR_STATIC_CHAIN (t) = NULL;
10355
10356 return t;
10357 }
10358
10359 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10360 FN and a null static chain slot. NARGS is the number of call arguments
10361 which are specified as "..." arguments. */
10362
10363 tree
10364 build_call_nary (tree return_type, tree fn, int nargs, ...)
10365 {
10366 tree ret;
10367 va_list args;
10368 va_start (args, nargs);
10369 ret = build_call_valist (return_type, fn, nargs, args);
10370 va_end (args);
10371 return ret;
10372 }
10373
10374 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10375 FN and a null static chain slot. NARGS is the number of call arguments
10376 which are specified as a va_list ARGS. */
10377
10378 tree
10379 build_call_valist (tree return_type, tree fn, int nargs, va_list args)
10380 {
10381 tree t;
10382 int i;
10383
10384 t = build_call_1 (return_type, fn, nargs);
10385 for (i = 0; i < nargs; i++)
10386 CALL_EXPR_ARG (t, i) = va_arg (args, tree);
10387 process_call_operands (t);
10388 return t;
10389 }
10390
10391 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10392 FN and a null static chain slot. NARGS is the number of call arguments
10393 which are specified as a tree array ARGS. */
10394
10395 tree
10396 build_call_array_loc (location_t loc, tree return_type, tree fn,
10397 int nargs, const tree *args)
10398 {
10399 tree t;
10400 int i;
10401
10402 t = build_call_1 (return_type, fn, nargs);
10403 for (i = 0; i < nargs; i++)
10404 CALL_EXPR_ARG (t, i) = args[i];
10405 process_call_operands (t);
10406 SET_EXPR_LOCATION (t, loc);
10407 return t;
10408 }
10409
10410 /* Like build_call_array, but takes a vec. */
10411
10412 tree
10413 build_call_vec (tree return_type, tree fn, vec<tree, va_gc> *args)
10414 {
10415 tree ret, t;
10416 unsigned int ix;
10417
10418 ret = build_call_1 (return_type, fn, vec_safe_length (args));
10419 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
10420 CALL_EXPR_ARG (ret, ix) = t;
10421 process_call_operands (ret);
10422 return ret;
10423 }
10424
10425 /* Conveniently construct a function call expression. FNDECL names the
10426 function to be called and N arguments are passed in the array
10427 ARGARRAY. */
10428
10429 tree
10430 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
10431 {
10432 tree fntype = TREE_TYPE (fndecl);
10433 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10434
10435 return fold_build_call_array_loc (loc, TREE_TYPE (fntype), fn, n, argarray);
10436 }
10437
10438 /* Conveniently construct a function call expression. FNDECL names the
10439 function to be called and the arguments are passed in the vector
10440 VEC. */
10441
10442 tree
10443 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec)
10444 {
10445 return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec),
10446 vec_safe_address (vec));
10447 }
10448
10449
10450 /* Conveniently construct a function call expression. FNDECL names the
10451 function to be called, N is the number of arguments, and the "..."
10452 parameters are the argument expressions. */
10453
10454 tree
10455 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
10456 {
10457 va_list ap;
10458 tree *argarray = XALLOCAVEC (tree, n);
10459 int i;
10460
10461 va_start (ap, n);
10462 for (i = 0; i < n; i++)
10463 argarray[i] = va_arg (ap, tree);
10464 va_end (ap);
10465 return build_call_expr_loc_array (loc, fndecl, n, argarray);
10466 }
10467
10468 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
10469 varargs macros aren't supported by all bootstrap compilers. */
10470
10471 tree
10472 build_call_expr (tree fndecl, int n, ...)
10473 {
10474 va_list ap;
10475 tree *argarray = XALLOCAVEC (tree, n);
10476 int i;
10477
10478 va_start (ap, n);
10479 for (i = 0; i < n; i++)
10480 argarray[i] = va_arg (ap, tree);
10481 va_end (ap);
10482 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
10483 }
10484
10485 /* Build an internal call to IFN, with arguments ARGS[0:N-1] and with return
10486 type TYPE. This is just like CALL_EXPR, except its CALL_EXPR_FN is NULL.
10487 It will get gimplified later into an ordinary internal function. */
10488
10489 tree
10490 build_call_expr_internal_loc_array (location_t loc, internal_fn ifn,
10491 tree type, int n, const tree *args)
10492 {
10493 tree t = build_call_1 (type, NULL_TREE, n);
10494 for (int i = 0; i < n; ++i)
10495 CALL_EXPR_ARG (t, i) = args[i];
10496 SET_EXPR_LOCATION (t, loc);
10497 CALL_EXPR_IFN (t) = ifn;
10498 process_call_operands (t);
10499 return t;
10500 }
10501
10502 /* Build internal call expression. This is just like CALL_EXPR, except
10503 its CALL_EXPR_FN is NULL. It will get gimplified later into ordinary
10504 internal function. */
10505
10506 tree
10507 build_call_expr_internal_loc (location_t loc, enum internal_fn ifn,
10508 tree type, int n, ...)
10509 {
10510 va_list ap;
10511 tree *argarray = XALLOCAVEC (tree, n);
10512 int i;
10513
10514 va_start (ap, n);
10515 for (i = 0; i < n; i++)
10516 argarray[i] = va_arg (ap, tree);
10517 va_end (ap);
10518 return build_call_expr_internal_loc_array (loc, ifn, type, n, argarray);
10519 }
10520
10521 /* Return a function call to FN, if the target is guaranteed to support it,
10522 or null otherwise.
10523
10524 N is the number of arguments, passed in the "...", and TYPE is the
10525 type of the return value. */
10526
10527 tree
10528 maybe_build_call_expr_loc (location_t loc, combined_fn fn, tree type,
10529 int n, ...)
10530 {
10531 va_list ap;
10532 tree *argarray = XALLOCAVEC (tree, n);
10533 int i;
10534
10535 va_start (ap, n);
10536 for (i = 0; i < n; i++)
10537 argarray[i] = va_arg (ap, tree);
10538 va_end (ap);
10539 if (internal_fn_p (fn))
10540 {
10541 internal_fn ifn = as_internal_fn (fn);
10542 if (direct_internal_fn_p (ifn))
10543 {
10544 tree_pair types = direct_internal_fn_types (ifn, type, argarray);
10545 if (!direct_internal_fn_supported_p (ifn, types,
10546 OPTIMIZE_FOR_BOTH))
10547 return NULL_TREE;
10548 }
10549 return build_call_expr_internal_loc_array (loc, ifn, type, n, argarray);
10550 }
10551 else
10552 {
10553 tree fndecl = builtin_decl_implicit (as_builtin_fn (fn));
10554 if (!fndecl)
10555 return NULL_TREE;
10556 return build_call_expr_loc_array (loc, fndecl, n, argarray);
10557 }
10558 }
10559
10560 /* Return a function call to the appropriate builtin alloca variant.
10561
10562 SIZE is the size to be allocated. ALIGN, if non-zero, is the requested
10563 alignment of the allocated area. MAX_SIZE, if non-negative, is an upper
10564 bound for SIZE in case it is not a fixed value. */
10565
10566 tree
10567 build_alloca_call_expr (tree size, unsigned int align, HOST_WIDE_INT max_size)
10568 {
10569 if (max_size >= 0)
10570 {
10571 tree t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX);
10572 return
10573 build_call_expr (t, 3, size, size_int (align), size_int (max_size));
10574 }
10575 else if (align > 0)
10576 {
10577 tree t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
10578 return build_call_expr (t, 2, size, size_int (align));
10579 }
10580 else
10581 {
10582 tree t = builtin_decl_explicit (BUILT_IN_ALLOCA);
10583 return build_call_expr (t, 1, size);
10584 }
10585 }
10586
10587 /* Create a new constant string literal of type ELTYPE[SIZE] (or LEN
10588 if SIZE == -1) and return a tree node representing char* pointer to
10589 it as an ADDR_EXPR (ARRAY_REF (ELTYPE, ...)). When STR is nonnull
10590 the STRING_CST value is the LEN bytes at STR (the representation
10591 of the string, which may be wide). Otherwise it's all zeros. */
10592
10593 tree
10594 build_string_literal (unsigned len, const char *str /* = NULL */,
10595 tree eltype /* = char_type_node */,
10596 unsigned HOST_WIDE_INT size /* = -1 */)
10597 {
10598 tree t = build_string (len, str);
10599 /* Set the maximum valid index based on the string length or SIZE. */
10600 unsigned HOST_WIDE_INT maxidx
10601 = (size == HOST_WIDE_INT_M1U ? len : size) - 1;
10602
10603 tree index = build_index_type (size_int (maxidx));
10604 eltype = build_type_variant (eltype, 1, 0);
10605 tree type = build_array_type (eltype, index);
10606 TREE_TYPE (t) = type;
10607 TREE_CONSTANT (t) = 1;
10608 TREE_READONLY (t) = 1;
10609 TREE_STATIC (t) = 1;
10610
10611 type = build_pointer_type (eltype);
10612 t = build1 (ADDR_EXPR, type,
10613 build4 (ARRAY_REF, eltype,
10614 t, integer_zero_node, NULL_TREE, NULL_TREE));
10615 return t;
10616 }
10617
10618
10619
10620 /* Return true if T (assumed to be a DECL) must be assigned a memory
10621 location. */
10622
10623 bool
10624 needs_to_live_in_memory (const_tree t)
10625 {
10626 return (TREE_ADDRESSABLE (t)
10627 || is_global_var (t)
10628 || (TREE_CODE (t) == RESULT_DECL
10629 && !DECL_BY_REFERENCE (t)
10630 && aggregate_value_p (t, current_function_decl)));
10631 }
10632
10633 /* Return value of a constant X and sign-extend it. */
10634
10635 HOST_WIDE_INT
10636 int_cst_value (const_tree x)
10637 {
10638 unsigned bits = TYPE_PRECISION (TREE_TYPE (x));
10639 unsigned HOST_WIDE_INT val = TREE_INT_CST_LOW (x);
10640
10641 /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
10642 gcc_assert (cst_and_fits_in_hwi (x));
10643
10644 if (bits < HOST_BITS_PER_WIDE_INT)
10645 {
10646 bool negative = ((val >> (bits - 1)) & 1) != 0;
10647 if (negative)
10648 val |= HOST_WIDE_INT_M1U << (bits - 1) << 1;
10649 else
10650 val &= ~(HOST_WIDE_INT_M1U << (bits - 1) << 1);
10651 }
10652
10653 return val;
10654 }
10655
10656 /* If TYPE is an integral or pointer type, return an integer type with
10657 the same precision which is unsigned iff UNSIGNEDP is true, or itself
10658 if TYPE is already an integer type of signedness UNSIGNEDP.
10659 If TYPE is a floating-point type, return an integer type with the same
10660 bitsize and with the signedness given by UNSIGNEDP; this is useful
10661 when doing bit-level operations on a floating-point value. */
10662
10663 tree
10664 signed_or_unsigned_type_for (int unsignedp, tree type)
10665 {
10666 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_UNSIGNED (type) == unsignedp)
10667 return type;
10668
10669 if (TREE_CODE (type) == VECTOR_TYPE)
10670 {
10671 tree inner = TREE_TYPE (type);
10672 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
10673 if (!inner2)
10674 return NULL_TREE;
10675 if (inner == inner2)
10676 return type;
10677 return build_vector_type (inner2, TYPE_VECTOR_SUBPARTS (type));
10678 }
10679
10680 if (TREE_CODE (type) == COMPLEX_TYPE)
10681 {
10682 tree inner = TREE_TYPE (type);
10683 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
10684 if (!inner2)
10685 return NULL_TREE;
10686 if (inner == inner2)
10687 return type;
10688 return build_complex_type (inner2);
10689 }
10690
10691 unsigned int bits;
10692 if (INTEGRAL_TYPE_P (type)
10693 || POINTER_TYPE_P (type)
10694 || TREE_CODE (type) == OFFSET_TYPE)
10695 bits = TYPE_PRECISION (type);
10696 else if (TREE_CODE (type) == REAL_TYPE)
10697 bits = GET_MODE_BITSIZE (SCALAR_TYPE_MODE (type));
10698 else
10699 return NULL_TREE;
10700
10701 return build_nonstandard_integer_type (bits, unsignedp);
10702 }
10703
10704 /* If TYPE is an integral or pointer type, return an integer type with
10705 the same precision which is unsigned, or itself if TYPE is already an
10706 unsigned integer type. If TYPE is a floating-point type, return an
10707 unsigned integer type with the same bitsize as TYPE. */
10708
10709 tree
10710 unsigned_type_for (tree type)
10711 {
10712 return signed_or_unsigned_type_for (1, type);
10713 }
10714
10715 /* If TYPE is an integral or pointer type, return an integer type with
10716 the same precision which is signed, or itself if TYPE is already a
10717 signed integer type. If TYPE is a floating-point type, return a
10718 signed integer type with the same bitsize as TYPE. */
10719
10720 tree
10721 signed_type_for (tree type)
10722 {
10723 return signed_or_unsigned_type_for (0, type);
10724 }
10725
10726 /* If TYPE is a vector type, return a signed integer vector type with the
10727 same width and number of subparts. Otherwise return boolean_type_node. */
10728
10729 tree
10730 truth_type_for (tree type)
10731 {
10732 if (TREE_CODE (type) == VECTOR_TYPE)
10733 {
10734 if (VECTOR_BOOLEAN_TYPE_P (type))
10735 return type;
10736 return build_truth_vector_type_for (type);
10737 }
10738 else
10739 return boolean_type_node;
10740 }
10741
10742 /* Returns the largest value obtainable by casting something in INNER type to
10743 OUTER type. */
10744
10745 tree
10746 upper_bound_in_type (tree outer, tree inner)
10747 {
10748 unsigned int det = 0;
10749 unsigned oprec = TYPE_PRECISION (outer);
10750 unsigned iprec = TYPE_PRECISION (inner);
10751 unsigned prec;
10752
10753 /* Compute a unique number for every combination. */
10754 det |= (oprec > iprec) ? 4 : 0;
10755 det |= TYPE_UNSIGNED (outer) ? 2 : 0;
10756 det |= TYPE_UNSIGNED (inner) ? 1 : 0;
10757
10758 /* Determine the exponent to use. */
10759 switch (det)
10760 {
10761 case 0:
10762 case 1:
10763 /* oprec <= iprec, outer: signed, inner: don't care. */
10764 prec = oprec - 1;
10765 break;
10766 case 2:
10767 case 3:
10768 /* oprec <= iprec, outer: unsigned, inner: don't care. */
10769 prec = oprec;
10770 break;
10771 case 4:
10772 /* oprec > iprec, outer: signed, inner: signed. */
10773 prec = iprec - 1;
10774 break;
10775 case 5:
10776 /* oprec > iprec, outer: signed, inner: unsigned. */
10777 prec = iprec;
10778 break;
10779 case 6:
10780 /* oprec > iprec, outer: unsigned, inner: signed. */
10781 prec = oprec;
10782 break;
10783 case 7:
10784 /* oprec > iprec, outer: unsigned, inner: unsigned. */
10785 prec = iprec;
10786 break;
10787 default:
10788 gcc_unreachable ();
10789 }
10790
10791 return wide_int_to_tree (outer,
10792 wi::mask (prec, false, TYPE_PRECISION (outer)));
10793 }
10794
10795 /* Returns the smallest value obtainable by casting something in INNER type to
10796 OUTER type. */
10797
10798 tree
10799 lower_bound_in_type (tree outer, tree inner)
10800 {
10801 unsigned oprec = TYPE_PRECISION (outer);
10802 unsigned iprec = TYPE_PRECISION (inner);
10803
10804 /* If OUTER type is unsigned, we can definitely cast 0 to OUTER type
10805 and obtain 0. */
10806 if (TYPE_UNSIGNED (outer)
10807 /* If we are widening something of an unsigned type, OUTER type
10808 contains all values of INNER type. In particular, both INNER
10809 and OUTER types have zero in common. */
10810 || (oprec > iprec && TYPE_UNSIGNED (inner)))
10811 return build_int_cst (outer, 0);
10812 else
10813 {
10814 /* If we are widening a signed type to another signed type, we
10815 want to obtain -2^^(iprec-1). If we are keeping the
10816 precision or narrowing to a signed type, we want to obtain
10817 -2^(oprec-1). */
10818 unsigned prec = oprec > iprec ? iprec : oprec;
10819 return wide_int_to_tree (outer,
10820 wi::mask (prec - 1, true,
10821 TYPE_PRECISION (outer)));
10822 }
10823 }
10824
10825 /* Return nonzero if two operands that are suitable for PHI nodes are
10826 necessarily equal. Specifically, both ARG0 and ARG1 must be either
10827 SSA_NAME or invariant. Note that this is strictly an optimization.
10828 That is, callers of this function can directly call operand_equal_p
10829 and get the same result, only slower. */
10830
10831 int
10832 operand_equal_for_phi_arg_p (const_tree arg0, const_tree arg1)
10833 {
10834 if (arg0 == arg1)
10835 return 1;
10836 if (TREE_CODE (arg0) == SSA_NAME || TREE_CODE (arg1) == SSA_NAME)
10837 return 0;
10838 return operand_equal_p (arg0, arg1, 0);
10839 }
10840
10841 /* Returns number of zeros at the end of binary representation of X. */
10842
10843 tree
10844 num_ending_zeros (const_tree x)
10845 {
10846 return build_int_cst (TREE_TYPE (x), wi::ctz (wi::to_wide (x)));
10847 }
10848
10849
10850 #define WALK_SUBTREE(NODE) \
10851 do \
10852 { \
10853 result = walk_tree_1 (&(NODE), func, data, pset, lh); \
10854 if (result) \
10855 return result; \
10856 } \
10857 while (0)
10858
10859 /* This is a subroutine of walk_tree that walks field of TYPE that are to
10860 be walked whenever a type is seen in the tree. Rest of operands and return
10861 value are as for walk_tree. */
10862
10863 static tree
10864 walk_type_fields (tree type, walk_tree_fn func, void *data,
10865 hash_set<tree> *pset, walk_tree_lh lh)
10866 {
10867 tree result = NULL_TREE;
10868
10869 switch (TREE_CODE (type))
10870 {
10871 case POINTER_TYPE:
10872 case REFERENCE_TYPE:
10873 case VECTOR_TYPE:
10874 /* We have to worry about mutually recursive pointers. These can't
10875 be written in C. They can in Ada. It's pathological, but
10876 there's an ACATS test (c38102a) that checks it. Deal with this
10877 by checking if we're pointing to another pointer, that one
10878 points to another pointer, that one does too, and we have no htab.
10879 If so, get a hash table. We check three levels deep to avoid
10880 the cost of the hash table if we don't need one. */
10881 if (POINTER_TYPE_P (TREE_TYPE (type))
10882 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type)))
10883 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type))))
10884 && !pset)
10885 {
10886 result = walk_tree_without_duplicates (&TREE_TYPE (type),
10887 func, data);
10888 if (result)
10889 return result;
10890
10891 break;
10892 }
10893
10894 /* fall through */
10895
10896 case COMPLEX_TYPE:
10897 WALK_SUBTREE (TREE_TYPE (type));
10898 break;
10899
10900 case METHOD_TYPE:
10901 WALK_SUBTREE (TYPE_METHOD_BASETYPE (type));
10902
10903 /* Fall through. */
10904
10905 case FUNCTION_TYPE:
10906 WALK_SUBTREE (TREE_TYPE (type));
10907 {
10908 tree arg;
10909
10910 /* We never want to walk into default arguments. */
10911 for (arg = TYPE_ARG_TYPES (type); arg; arg = TREE_CHAIN (arg))
10912 WALK_SUBTREE (TREE_VALUE (arg));
10913 }
10914 break;
10915
10916 case ARRAY_TYPE:
10917 /* Don't follow this nodes's type if a pointer for fear that
10918 we'll have infinite recursion. If we have a PSET, then we
10919 need not fear. */
10920 if (pset
10921 || (!POINTER_TYPE_P (TREE_TYPE (type))
10922 && TREE_CODE (TREE_TYPE (type)) != OFFSET_TYPE))
10923 WALK_SUBTREE (TREE_TYPE (type));
10924 WALK_SUBTREE (TYPE_DOMAIN (type));
10925 break;
10926
10927 case OFFSET_TYPE:
10928 WALK_SUBTREE (TREE_TYPE (type));
10929 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type));
10930 break;
10931
10932 default:
10933 break;
10934 }
10935
10936 return NULL_TREE;
10937 }
10938
10939 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is
10940 called with the DATA and the address of each sub-tree. If FUNC returns a
10941 non-NULL value, the traversal is stopped, and the value returned by FUNC
10942 is returned. If PSET is non-NULL it is used to record the nodes visited,
10943 and to avoid visiting a node more than once. */
10944
10945 tree
10946 walk_tree_1 (tree *tp, walk_tree_fn func, void *data,
10947 hash_set<tree> *pset, walk_tree_lh lh)
10948 {
10949 enum tree_code code;
10950 int walk_subtrees;
10951 tree result;
10952
10953 #define WALK_SUBTREE_TAIL(NODE) \
10954 do \
10955 { \
10956 tp = & (NODE); \
10957 goto tail_recurse; \
10958 } \
10959 while (0)
10960
10961 tail_recurse:
10962 /* Skip empty subtrees. */
10963 if (!*tp)
10964 return NULL_TREE;
10965
10966 /* Don't walk the same tree twice, if the user has requested
10967 that we avoid doing so. */
10968 if (pset && pset->add (*tp))
10969 return NULL_TREE;
10970
10971 /* Call the function. */
10972 walk_subtrees = 1;
10973 result = (*func) (tp, &walk_subtrees, data);
10974
10975 /* If we found something, return it. */
10976 if (result)
10977 return result;
10978
10979 code = TREE_CODE (*tp);
10980
10981 /* Even if we didn't, FUNC may have decided that there was nothing
10982 interesting below this point in the tree. */
10983 if (!walk_subtrees)
10984 {
10985 /* But we still need to check our siblings. */
10986 if (code == TREE_LIST)
10987 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
10988 else if (code == OMP_CLAUSE)
10989 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
10990 else
10991 return NULL_TREE;
10992 }
10993
10994 if (lh)
10995 {
10996 result = (*lh) (tp, &walk_subtrees, func, data, pset);
10997 if (result || !walk_subtrees)
10998 return result;
10999 }
11000
11001 switch (code)
11002 {
11003 case ERROR_MARK:
11004 case IDENTIFIER_NODE:
11005 case INTEGER_CST:
11006 case REAL_CST:
11007 case FIXED_CST:
11008 case STRING_CST:
11009 case BLOCK:
11010 case PLACEHOLDER_EXPR:
11011 case SSA_NAME:
11012 case FIELD_DECL:
11013 case RESULT_DECL:
11014 /* None of these have subtrees other than those already walked
11015 above. */
11016 break;
11017
11018 case TREE_LIST:
11019 WALK_SUBTREE (TREE_VALUE (*tp));
11020 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11021 break;
11022
11023 case TREE_VEC:
11024 {
11025 int len = TREE_VEC_LENGTH (*tp);
11026
11027 if (len == 0)
11028 break;
11029
11030 /* Walk all elements but the first. */
11031 while (--len)
11032 WALK_SUBTREE (TREE_VEC_ELT (*tp, len));
11033
11034 /* Now walk the first one as a tail call. */
11035 WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp, 0));
11036 }
11037
11038 case VECTOR_CST:
11039 {
11040 unsigned len = vector_cst_encoded_nelts (*tp);
11041 if (len == 0)
11042 break;
11043 /* Walk all elements but the first. */
11044 while (--len)
11045 WALK_SUBTREE (VECTOR_CST_ENCODED_ELT (*tp, len));
11046 /* Now walk the first one as a tail call. */
11047 WALK_SUBTREE_TAIL (VECTOR_CST_ENCODED_ELT (*tp, 0));
11048 }
11049
11050 case COMPLEX_CST:
11051 WALK_SUBTREE (TREE_REALPART (*tp));
11052 WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp));
11053
11054 case CONSTRUCTOR:
11055 {
11056 unsigned HOST_WIDE_INT idx;
11057 constructor_elt *ce;
11058
11059 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (*tp), idx, &ce);
11060 idx++)
11061 WALK_SUBTREE (ce->value);
11062 }
11063 break;
11064
11065 case SAVE_EXPR:
11066 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 0));
11067
11068 case BIND_EXPR:
11069 {
11070 tree decl;
11071 for (decl = BIND_EXPR_VARS (*tp); decl; decl = DECL_CHAIN (decl))
11072 {
11073 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
11074 into declarations that are just mentioned, rather than
11075 declared; they don't really belong to this part of the tree.
11076 And, we can see cycles: the initializer for a declaration
11077 can refer to the declaration itself. */
11078 WALK_SUBTREE (DECL_INITIAL (decl));
11079 WALK_SUBTREE (DECL_SIZE (decl));
11080 WALK_SUBTREE (DECL_SIZE_UNIT (decl));
11081 }
11082 WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp));
11083 }
11084
11085 case STATEMENT_LIST:
11086 {
11087 tree_stmt_iterator i;
11088 for (i = tsi_start (*tp); !tsi_end_p (i); tsi_next (&i))
11089 WALK_SUBTREE (*tsi_stmt_ptr (i));
11090 }
11091 break;
11092
11093 case OMP_CLAUSE:
11094 switch (OMP_CLAUSE_CODE (*tp))
11095 {
11096 case OMP_CLAUSE_GANG:
11097 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
11098 /* FALLTHRU */
11099
11100 case OMP_CLAUSE_AFFINITY:
11101 case OMP_CLAUSE_ASYNC:
11102 case OMP_CLAUSE_WAIT:
11103 case OMP_CLAUSE_WORKER:
11104 case OMP_CLAUSE_VECTOR:
11105 case OMP_CLAUSE_NUM_GANGS:
11106 case OMP_CLAUSE_NUM_WORKERS:
11107 case OMP_CLAUSE_VECTOR_LENGTH:
11108 case OMP_CLAUSE_PRIVATE:
11109 case OMP_CLAUSE_SHARED:
11110 case OMP_CLAUSE_FIRSTPRIVATE:
11111 case OMP_CLAUSE_COPYIN:
11112 case OMP_CLAUSE_COPYPRIVATE:
11113 case OMP_CLAUSE_FINAL:
11114 case OMP_CLAUSE_IF:
11115 case OMP_CLAUSE_NUM_THREADS:
11116 case OMP_CLAUSE_SCHEDULE:
11117 case OMP_CLAUSE_UNIFORM:
11118 case OMP_CLAUSE_DEPEND:
11119 case OMP_CLAUSE_NONTEMPORAL:
11120 case OMP_CLAUSE_NUM_TEAMS:
11121 case OMP_CLAUSE_THREAD_LIMIT:
11122 case OMP_CLAUSE_DEVICE:
11123 case OMP_CLAUSE_DIST_SCHEDULE:
11124 case OMP_CLAUSE_SAFELEN:
11125 case OMP_CLAUSE_SIMDLEN:
11126 case OMP_CLAUSE_ORDERED:
11127 case OMP_CLAUSE_PRIORITY:
11128 case OMP_CLAUSE_GRAINSIZE:
11129 case OMP_CLAUSE_NUM_TASKS:
11130 case OMP_CLAUSE_HINT:
11131 case OMP_CLAUSE_FILTER:
11132 case OMP_CLAUSE_TO_DECLARE:
11133 case OMP_CLAUSE_LINK:
11134 case OMP_CLAUSE_DETACH:
11135 case OMP_CLAUSE_USE_DEVICE_PTR:
11136 case OMP_CLAUSE_USE_DEVICE_ADDR:
11137 case OMP_CLAUSE_IS_DEVICE_PTR:
11138 case OMP_CLAUSE_INCLUSIVE:
11139 case OMP_CLAUSE_EXCLUSIVE:
11140 case OMP_CLAUSE__LOOPTEMP_:
11141 case OMP_CLAUSE__REDUCTEMP_:
11142 case OMP_CLAUSE__CONDTEMP_:
11143 case OMP_CLAUSE__SCANTEMP_:
11144 case OMP_CLAUSE__SIMDUID_:
11145 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 0));
11146 /* FALLTHRU */
11147
11148 case OMP_CLAUSE_INDEPENDENT:
11149 case OMP_CLAUSE_NOWAIT:
11150 case OMP_CLAUSE_DEFAULT:
11151 case OMP_CLAUSE_UNTIED:
11152 case OMP_CLAUSE_MERGEABLE:
11153 case OMP_CLAUSE_PROC_BIND:
11154 case OMP_CLAUSE_DEVICE_TYPE:
11155 case OMP_CLAUSE_INBRANCH:
11156 case OMP_CLAUSE_NOTINBRANCH:
11157 case OMP_CLAUSE_FOR:
11158 case OMP_CLAUSE_PARALLEL:
11159 case OMP_CLAUSE_SECTIONS:
11160 case OMP_CLAUSE_TASKGROUP:
11161 case OMP_CLAUSE_NOGROUP:
11162 case OMP_CLAUSE_THREADS:
11163 case OMP_CLAUSE_SIMD:
11164 case OMP_CLAUSE_DEFAULTMAP:
11165 case OMP_CLAUSE_ORDER:
11166 case OMP_CLAUSE_BIND:
11167 case OMP_CLAUSE_AUTO:
11168 case OMP_CLAUSE_SEQ:
11169 case OMP_CLAUSE_TILE:
11170 case OMP_CLAUSE__SIMT_:
11171 case OMP_CLAUSE_IF_PRESENT:
11172 case OMP_CLAUSE_FINALIZE:
11173 case OMP_CLAUSE_NOHOST:
11174 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11175
11176 case OMP_CLAUSE_LASTPRIVATE:
11177 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11178 WALK_SUBTREE (OMP_CLAUSE_LASTPRIVATE_STMT (*tp));
11179 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11180
11181 case OMP_CLAUSE_COLLAPSE:
11182 {
11183 int i;
11184 for (i = 0; i < 3; i++)
11185 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11186 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11187 }
11188
11189 case OMP_CLAUSE_LINEAR:
11190 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11191 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STEP (*tp));
11192 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STMT (*tp));
11193 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11194
11195 case OMP_CLAUSE_ALIGNED:
11196 case OMP_CLAUSE_ALLOCATE:
11197 case OMP_CLAUSE_FROM:
11198 case OMP_CLAUSE_TO:
11199 case OMP_CLAUSE_MAP:
11200 case OMP_CLAUSE__CACHE_:
11201 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11202 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
11203 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11204
11205 case OMP_CLAUSE_REDUCTION:
11206 case OMP_CLAUSE_TASK_REDUCTION:
11207 case OMP_CLAUSE_IN_REDUCTION:
11208 {
11209 int i;
11210 for (i = 0; i < 5; i++)
11211 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11212 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11213 }
11214
11215 default:
11216 gcc_unreachable ();
11217 }
11218 break;
11219
11220 case TARGET_EXPR:
11221 {
11222 int i, len;
11223
11224 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
11225 But, we only want to walk once. */
11226 len = (TREE_OPERAND (*tp, 3) == TREE_OPERAND (*tp, 1)) ? 2 : 3;
11227 for (i = 0; i < len; ++i)
11228 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11229 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len));
11230 }
11231
11232 case DECL_EXPR:
11233 /* If this is a TYPE_DECL, walk into the fields of the type that it's
11234 defining. We only want to walk into these fields of a type in this
11235 case and not in the general case of a mere reference to the type.
11236
11237 The criterion is as follows: if the field can be an expression, it
11238 must be walked only here. This should be in keeping with the fields
11239 that are directly gimplified in gimplify_type_sizes in order for the
11240 mark/copy-if-shared/unmark machinery of the gimplifier to work with
11241 variable-sized types.
11242
11243 Note that DECLs get walked as part of processing the BIND_EXPR. */
11244 if (TREE_CODE (DECL_EXPR_DECL (*tp)) == TYPE_DECL)
11245 {
11246 /* Call the function for the decl so e.g. copy_tree_body_r can
11247 replace it with the remapped one. */
11248 result = (*func) (&DECL_EXPR_DECL (*tp), &walk_subtrees, data);
11249 if (result || !walk_subtrees)
11250 return result;
11251
11252 tree *type_p = &TREE_TYPE (DECL_EXPR_DECL (*tp));
11253 if (TREE_CODE (*type_p) == ERROR_MARK)
11254 return NULL_TREE;
11255
11256 /* Call the function for the type. See if it returns anything or
11257 doesn't want us to continue. If we are to continue, walk both
11258 the normal fields and those for the declaration case. */
11259 result = (*func) (type_p, &walk_subtrees, data);
11260 if (result || !walk_subtrees)
11261 return result;
11262
11263 /* But do not walk a pointed-to type since it may itself need to
11264 be walked in the declaration case if it isn't anonymous. */
11265 if (!POINTER_TYPE_P (*type_p))
11266 {
11267 result = walk_type_fields (*type_p, func, data, pset, lh);
11268 if (result)
11269 return result;
11270 }
11271
11272 /* If this is a record type, also walk the fields. */
11273 if (RECORD_OR_UNION_TYPE_P (*type_p))
11274 {
11275 tree field;
11276
11277 for (field = TYPE_FIELDS (*type_p); field;
11278 field = DECL_CHAIN (field))
11279 {
11280 /* We'd like to look at the type of the field, but we can
11281 easily get infinite recursion. So assume it's pointed
11282 to elsewhere in the tree. Also, ignore things that
11283 aren't fields. */
11284 if (TREE_CODE (field) != FIELD_DECL)
11285 continue;
11286
11287 WALK_SUBTREE (DECL_FIELD_OFFSET (field));
11288 WALK_SUBTREE (DECL_SIZE (field));
11289 WALK_SUBTREE (DECL_SIZE_UNIT (field));
11290 if (TREE_CODE (*type_p) == QUAL_UNION_TYPE)
11291 WALK_SUBTREE (DECL_QUALIFIER (field));
11292 }
11293 }
11294
11295 /* Same for scalar types. */
11296 else if (TREE_CODE (*type_p) == BOOLEAN_TYPE
11297 || TREE_CODE (*type_p) == ENUMERAL_TYPE
11298 || TREE_CODE (*type_p) == INTEGER_TYPE
11299 || TREE_CODE (*type_p) == FIXED_POINT_TYPE
11300 || TREE_CODE (*type_p) == REAL_TYPE)
11301 {
11302 WALK_SUBTREE (TYPE_MIN_VALUE (*type_p));
11303 WALK_SUBTREE (TYPE_MAX_VALUE (*type_p));
11304 }
11305
11306 WALK_SUBTREE (TYPE_SIZE (*type_p));
11307 WALK_SUBTREE_TAIL (TYPE_SIZE_UNIT (*type_p));
11308 }
11309 /* FALLTHRU */
11310
11311 default:
11312 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
11313 {
11314 int i, len;
11315
11316 /* Walk over all the sub-trees of this operand. */
11317 len = TREE_OPERAND_LENGTH (*tp);
11318
11319 /* Go through the subtrees. We need to do this in forward order so
11320 that the scope of a FOR_EXPR is handled properly. */
11321 if (len)
11322 {
11323 for (i = 0; i < len - 1; ++i)
11324 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11325 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len - 1));
11326 }
11327 }
11328 /* If this is a type, walk the needed fields in the type. */
11329 else if (TYPE_P (*tp))
11330 return walk_type_fields (*tp, func, data, pset, lh);
11331 break;
11332 }
11333
11334 /* We didn't find what we were looking for. */
11335 return NULL_TREE;
11336
11337 #undef WALK_SUBTREE_TAIL
11338 }
11339 #undef WALK_SUBTREE
11340
11341 /* Like walk_tree, but does not walk duplicate nodes more than once. */
11342
11343 tree
11344 walk_tree_without_duplicates_1 (tree *tp, walk_tree_fn func, void *data,
11345 walk_tree_lh lh)
11346 {
11347 tree result;
11348
11349 hash_set<tree> pset;
11350 result = walk_tree_1 (tp, func, data, &pset, lh);
11351 return result;
11352 }
11353
11354
11355 tree
11356 tree_block (tree t)
11357 {
11358 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11359
11360 if (IS_EXPR_CODE_CLASS (c))
11361 return LOCATION_BLOCK (t->exp.locus);
11362 gcc_unreachable ();
11363 return NULL;
11364 }
11365
11366 void
11367 tree_set_block (tree t, tree b)
11368 {
11369 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11370
11371 if (IS_EXPR_CODE_CLASS (c))
11372 {
11373 t->exp.locus = set_block (t->exp.locus, b);
11374 }
11375 else
11376 gcc_unreachable ();
11377 }
11378
11379 /* Create a nameless artificial label and put it in the current
11380 function context. The label has a location of LOC. Returns the
11381 newly created label. */
11382
11383 tree
11384 create_artificial_label (location_t loc)
11385 {
11386 tree lab = build_decl (loc,
11387 LABEL_DECL, NULL_TREE, void_type_node);
11388
11389 DECL_ARTIFICIAL (lab) = 1;
11390 DECL_IGNORED_P (lab) = 1;
11391 DECL_CONTEXT (lab) = current_function_decl;
11392 return lab;
11393 }
11394
11395 /* Given a tree, try to return a useful variable name that we can use
11396 to prefix a temporary that is being assigned the value of the tree.
11397 I.E. given <temp> = &A, return A. */
11398
11399 const char *
11400 get_name (tree t)
11401 {
11402 tree stripped_decl;
11403
11404 stripped_decl = t;
11405 STRIP_NOPS (stripped_decl);
11406 if (DECL_P (stripped_decl) && DECL_NAME (stripped_decl))
11407 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl));
11408 else if (TREE_CODE (stripped_decl) == SSA_NAME)
11409 {
11410 tree name = SSA_NAME_IDENTIFIER (stripped_decl);
11411 if (!name)
11412 return NULL;
11413 return IDENTIFIER_POINTER (name);
11414 }
11415 else
11416 {
11417 switch (TREE_CODE (stripped_decl))
11418 {
11419 case ADDR_EXPR:
11420 return get_name (TREE_OPERAND (stripped_decl, 0));
11421 default:
11422 return NULL;
11423 }
11424 }
11425 }
11426
11427 /* Return true if TYPE has a variable argument list. */
11428
11429 bool
11430 stdarg_p (const_tree fntype)
11431 {
11432 function_args_iterator args_iter;
11433 tree n = NULL_TREE, t;
11434
11435 if (!fntype)
11436 return false;
11437
11438 FOREACH_FUNCTION_ARGS (fntype, t, args_iter)
11439 {
11440 n = t;
11441 }
11442
11443 return n != NULL_TREE && n != void_type_node;
11444 }
11445
11446 /* Return true if TYPE has a prototype. */
11447
11448 bool
11449 prototype_p (const_tree fntype)
11450 {
11451 tree t;
11452
11453 gcc_assert (fntype != NULL_TREE);
11454
11455 t = TYPE_ARG_TYPES (fntype);
11456 return (t != NULL_TREE);
11457 }
11458
11459 /* If BLOCK is inlined from an __attribute__((__artificial__))
11460 routine, return pointer to location from where it has been
11461 called. */
11462 location_t *
11463 block_nonartificial_location (tree block)
11464 {
11465 location_t *ret = NULL;
11466
11467 while (block && TREE_CODE (block) == BLOCK
11468 && BLOCK_ABSTRACT_ORIGIN (block))
11469 {
11470 tree ao = BLOCK_ABSTRACT_ORIGIN (block);
11471 if (TREE_CODE (ao) == FUNCTION_DECL)
11472 {
11473 /* If AO is an artificial inline, point RET to the
11474 call site locus at which it has been inlined and continue
11475 the loop, in case AO's caller is also an artificial
11476 inline. */
11477 if (DECL_DECLARED_INLINE_P (ao)
11478 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao)))
11479 ret = &BLOCK_SOURCE_LOCATION (block);
11480 else
11481 break;
11482 }
11483 else if (TREE_CODE (ao) != BLOCK)
11484 break;
11485
11486 block = BLOCK_SUPERCONTEXT (block);
11487 }
11488 return ret;
11489 }
11490
11491
11492 /* If EXP is inlined from an __attribute__((__artificial__))
11493 function, return the location of the original call expression. */
11494
11495 location_t
11496 tree_nonartificial_location (tree exp)
11497 {
11498 location_t *loc = block_nonartificial_location (TREE_BLOCK (exp));
11499
11500 if (loc)
11501 return *loc;
11502 else
11503 return EXPR_LOCATION (exp);
11504 }
11505
11506 /* Return the location into which EXP has been inlined. Analogous
11507 to tree_nonartificial_location() above but not limited to artificial
11508 functions declared inline. If SYSTEM_HEADER is true, return
11509 the macro expansion point of the location if it's in a system header */
11510
11511 location_t
11512 tree_inlined_location (tree exp, bool system_header /* = true */)
11513 {
11514 location_t loc = UNKNOWN_LOCATION;
11515
11516 tree block = TREE_BLOCK (exp);
11517
11518 while (block && TREE_CODE (block) == BLOCK
11519 && BLOCK_ABSTRACT_ORIGIN (block))
11520 {
11521 tree ao = BLOCK_ABSTRACT_ORIGIN (block);
11522 if (TREE_CODE (ao) == FUNCTION_DECL)
11523 loc = BLOCK_SOURCE_LOCATION (block);
11524 else if (TREE_CODE (ao) != BLOCK)
11525 break;
11526
11527 block = BLOCK_SUPERCONTEXT (block);
11528 }
11529
11530 if (loc == UNKNOWN_LOCATION)
11531 {
11532 loc = EXPR_LOCATION (exp);
11533 if (system_header)
11534 /* Only consider macro expansion when the block traversal failed
11535 to find a location. Otherwise it's not relevant. */
11536 return expansion_point_location_if_in_system_header (loc);
11537 }
11538
11539 return loc;
11540 }
11541
11542 /* These are the hash table functions for the hash table of OPTIMIZATION_NODE
11543 nodes. */
11544
11545 /* Return the hash code X, an OPTIMIZATION_NODE or TARGET_OPTION code. */
11546
11547 hashval_t
11548 cl_option_hasher::hash (tree x)
11549 {
11550 const_tree const t = x;
11551 const char *p;
11552 size_t i;
11553 size_t len = 0;
11554 hashval_t hash = 0;
11555
11556 if (TREE_CODE (t) == OPTIMIZATION_NODE)
11557 {
11558 p = (const char *)TREE_OPTIMIZATION (t);
11559 len = sizeof (struct cl_optimization);
11560 }
11561
11562 else if (TREE_CODE (t) == TARGET_OPTION_NODE)
11563 return cl_target_option_hash (TREE_TARGET_OPTION (t));
11564
11565 else
11566 gcc_unreachable ();
11567
11568 /* assume most opt flags are just 0/1, some are 2-3, and a few might be
11569 something else. */
11570 for (i = 0; i < len; i++)
11571 if (p[i])
11572 hash = (hash << 4) ^ ((i << 2) | p[i]);
11573
11574 return hash;
11575 }
11576
11577 /* Return nonzero if the value represented by *X (an OPTIMIZATION or
11578 TARGET_OPTION tree node) is the same as that given by *Y, which is the
11579 same. */
11580
11581 bool
11582 cl_option_hasher::equal (tree x, tree y)
11583 {
11584 const_tree const xt = x;
11585 const_tree const yt = y;
11586
11587 if (TREE_CODE (xt) != TREE_CODE (yt))
11588 return 0;
11589
11590 if (TREE_CODE (xt) == OPTIMIZATION_NODE)
11591 return cl_optimization_option_eq (TREE_OPTIMIZATION (xt),
11592 TREE_OPTIMIZATION (yt));
11593 else if (TREE_CODE (xt) == TARGET_OPTION_NODE)
11594 return cl_target_option_eq (TREE_TARGET_OPTION (xt),
11595 TREE_TARGET_OPTION (yt));
11596 else
11597 gcc_unreachable ();
11598 }
11599
11600 /* Build an OPTIMIZATION_NODE based on the options in OPTS and OPTS_SET. */
11601
11602 tree
11603 build_optimization_node (struct gcc_options *opts,
11604 struct gcc_options *opts_set)
11605 {
11606 tree t;
11607
11608 /* Use the cache of optimization nodes. */
11609
11610 cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node),
11611 opts, opts_set);
11612
11613 tree *slot = cl_option_hash_table->find_slot (cl_optimization_node, INSERT);
11614 t = *slot;
11615 if (!t)
11616 {
11617 /* Insert this one into the hash table. */
11618 t = cl_optimization_node;
11619 *slot = t;
11620
11621 /* Make a new node for next time round. */
11622 cl_optimization_node = make_node (OPTIMIZATION_NODE);
11623 }
11624
11625 return t;
11626 }
11627
11628 /* Build a TARGET_OPTION_NODE based on the options in OPTS and OPTS_SET. */
11629
11630 tree
11631 build_target_option_node (struct gcc_options *opts,
11632 struct gcc_options *opts_set)
11633 {
11634 tree t;
11635
11636 /* Use the cache of optimization nodes. */
11637
11638 cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node),
11639 opts, opts_set);
11640
11641 tree *slot = cl_option_hash_table->find_slot (cl_target_option_node, INSERT);
11642 t = *slot;
11643 if (!t)
11644 {
11645 /* Insert this one into the hash table. */
11646 t = cl_target_option_node;
11647 *slot = t;
11648
11649 /* Make a new node for next time round. */
11650 cl_target_option_node = make_node (TARGET_OPTION_NODE);
11651 }
11652
11653 return t;
11654 }
11655
11656 /* Clear TREE_TARGET_GLOBALS of all TARGET_OPTION_NODE trees,
11657 so that they aren't saved during PCH writing. */
11658
11659 void
11660 prepare_target_option_nodes_for_pch (void)
11661 {
11662 hash_table<cl_option_hasher>::iterator iter = cl_option_hash_table->begin ();
11663 for (; iter != cl_option_hash_table->end (); ++iter)
11664 if (TREE_CODE (*iter) == TARGET_OPTION_NODE)
11665 TREE_TARGET_GLOBALS (*iter) = NULL;
11666 }
11667
11668 /* Determine the "ultimate origin" of a block. */
11669
11670 tree
11671 block_ultimate_origin (const_tree block)
11672 {
11673 tree origin = BLOCK_ABSTRACT_ORIGIN (block);
11674
11675 if (origin == NULL_TREE)
11676 return NULL_TREE;
11677 else
11678 {
11679 gcc_checking_assert ((DECL_P (origin)
11680 && DECL_ORIGIN (origin) == origin)
11681 || BLOCK_ORIGIN (origin) == origin);
11682 return origin;
11683 }
11684 }
11685
11686 /* Return true iff conversion from INNER_TYPE to OUTER_TYPE generates
11687 no instruction. */
11688
11689 bool
11690 tree_nop_conversion_p (const_tree outer_type, const_tree inner_type)
11691 {
11692 /* Do not strip casts into or out of differing address spaces. */
11693 if (POINTER_TYPE_P (outer_type)
11694 && TYPE_ADDR_SPACE (TREE_TYPE (outer_type)) != ADDR_SPACE_GENERIC)
11695 {
11696 if (!POINTER_TYPE_P (inner_type)
11697 || (TYPE_ADDR_SPACE (TREE_TYPE (outer_type))
11698 != TYPE_ADDR_SPACE (TREE_TYPE (inner_type))))
11699 return false;
11700 }
11701 else if (POINTER_TYPE_P (inner_type)
11702 && TYPE_ADDR_SPACE (TREE_TYPE (inner_type)) != ADDR_SPACE_GENERIC)
11703 {
11704 /* We already know that outer_type is not a pointer with
11705 a non-generic address space. */
11706 return false;
11707 }
11708
11709 /* Use precision rather then machine mode when we can, which gives
11710 the correct answer even for submode (bit-field) types. */
11711 if ((INTEGRAL_TYPE_P (outer_type)
11712 || POINTER_TYPE_P (outer_type)
11713 || TREE_CODE (outer_type) == OFFSET_TYPE)
11714 && (INTEGRAL_TYPE_P (inner_type)
11715 || POINTER_TYPE_P (inner_type)
11716 || TREE_CODE (inner_type) == OFFSET_TYPE))
11717 return TYPE_PRECISION (outer_type) == TYPE_PRECISION (inner_type);
11718
11719 /* Otherwise fall back on comparing machine modes (e.g. for
11720 aggregate types, floats). */
11721 return TYPE_MODE (outer_type) == TYPE_MODE (inner_type);
11722 }
11723
11724 /* Return true iff conversion in EXP generates no instruction. Mark
11725 it inline so that we fully inline into the stripping functions even
11726 though we have two uses of this function. */
11727
11728 static inline bool
11729 tree_nop_conversion (const_tree exp)
11730 {
11731 tree outer_type, inner_type;
11732
11733 if (location_wrapper_p (exp))
11734 return true;
11735 if (!CONVERT_EXPR_P (exp)
11736 && TREE_CODE (exp) != NON_LVALUE_EXPR)
11737 return false;
11738
11739 outer_type = TREE_TYPE (exp);
11740 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
11741 if (!inner_type || inner_type == error_mark_node)
11742 return false;
11743
11744 return tree_nop_conversion_p (outer_type, inner_type);
11745 }
11746
11747 /* Return true iff conversion in EXP generates no instruction. Don't
11748 consider conversions changing the signedness. */
11749
11750 static bool
11751 tree_sign_nop_conversion (const_tree exp)
11752 {
11753 tree outer_type, inner_type;
11754
11755 if (!tree_nop_conversion (exp))
11756 return false;
11757
11758 outer_type = TREE_TYPE (exp);
11759 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
11760
11761 return (TYPE_UNSIGNED (outer_type) == TYPE_UNSIGNED (inner_type)
11762 && POINTER_TYPE_P (outer_type) == POINTER_TYPE_P (inner_type));
11763 }
11764
11765 /* Strip conversions from EXP according to tree_nop_conversion and
11766 return the resulting expression. */
11767
11768 tree
11769 tree_strip_nop_conversions (tree exp)
11770 {
11771 while (tree_nop_conversion (exp))
11772 exp = TREE_OPERAND (exp, 0);
11773 return exp;
11774 }
11775
11776 /* Strip conversions from EXP according to tree_sign_nop_conversion
11777 and return the resulting expression. */
11778
11779 tree
11780 tree_strip_sign_nop_conversions (tree exp)
11781 {
11782 while (tree_sign_nop_conversion (exp))
11783 exp = TREE_OPERAND (exp, 0);
11784 return exp;
11785 }
11786
11787 /* Avoid any floating point extensions from EXP. */
11788 tree
11789 strip_float_extensions (tree exp)
11790 {
11791 tree sub, expt, subt;
11792
11793 /* For floating point constant look up the narrowest type that can hold
11794 it properly and handle it like (type)(narrowest_type)constant.
11795 This way we can optimize for instance a=a*2.0 where "a" is float
11796 but 2.0 is double constant. */
11797 if (TREE_CODE (exp) == REAL_CST && !DECIMAL_FLOAT_TYPE_P (TREE_TYPE (exp)))
11798 {
11799 REAL_VALUE_TYPE orig;
11800 tree type = NULL;
11801
11802 orig = TREE_REAL_CST (exp);
11803 if (TYPE_PRECISION (TREE_TYPE (exp)) > TYPE_PRECISION (float_type_node)
11804 && exact_real_truncate (TYPE_MODE (float_type_node), &orig))
11805 type = float_type_node;
11806 else if (TYPE_PRECISION (TREE_TYPE (exp))
11807 > TYPE_PRECISION (double_type_node)
11808 && exact_real_truncate (TYPE_MODE (double_type_node), &orig))
11809 type = double_type_node;
11810 if (type)
11811 return build_real_truncate (type, orig);
11812 }
11813
11814 if (!CONVERT_EXPR_P (exp))
11815 return exp;
11816
11817 sub = TREE_OPERAND (exp, 0);
11818 subt = TREE_TYPE (sub);
11819 expt = TREE_TYPE (exp);
11820
11821 if (!FLOAT_TYPE_P (subt))
11822 return exp;
11823
11824 if (DECIMAL_FLOAT_TYPE_P (expt) != DECIMAL_FLOAT_TYPE_P (subt))
11825 return exp;
11826
11827 if (TYPE_PRECISION (subt) > TYPE_PRECISION (expt))
11828 return exp;
11829
11830 return strip_float_extensions (sub);
11831 }
11832
11833 /* Strip out all handled components that produce invariant
11834 offsets. */
11835
11836 const_tree
11837 strip_invariant_refs (const_tree op)
11838 {
11839 while (handled_component_p (op))
11840 {
11841 switch (TREE_CODE (op))
11842 {
11843 case ARRAY_REF:
11844 case ARRAY_RANGE_REF:
11845 if (!is_gimple_constant (TREE_OPERAND (op, 1))
11846 || TREE_OPERAND (op, 2) != NULL_TREE
11847 || TREE_OPERAND (op, 3) != NULL_TREE)
11848 return NULL;
11849 break;
11850
11851 case COMPONENT_REF:
11852 if (TREE_OPERAND (op, 2) != NULL_TREE)
11853 return NULL;
11854 break;
11855
11856 default:;
11857 }
11858 op = TREE_OPERAND (op, 0);
11859 }
11860
11861 return op;
11862 }
11863
11864 static GTY(()) tree gcc_eh_personality_decl;
11865
11866 /* Return the GCC personality function decl. */
11867
11868 tree
11869 lhd_gcc_personality (void)
11870 {
11871 if (!gcc_eh_personality_decl)
11872 gcc_eh_personality_decl = build_personality_function ("gcc");
11873 return gcc_eh_personality_decl;
11874 }
11875
11876 /* TARGET is a call target of GIMPLE call statement
11877 (obtained by gimple_call_fn). Return true if it is
11878 OBJ_TYPE_REF representing an virtual call of C++ method.
11879 (As opposed to OBJ_TYPE_REF representing objc calls
11880 through a cast where middle-end devirtualization machinery
11881 can't apply.) FOR_DUMP_P is true when being called from
11882 the dump routines. */
11883
11884 bool
11885 virtual_method_call_p (const_tree target, bool for_dump_p)
11886 {
11887 if (TREE_CODE (target) != OBJ_TYPE_REF)
11888 return false;
11889 tree t = TREE_TYPE (target);
11890 gcc_checking_assert (TREE_CODE (t) == POINTER_TYPE);
11891 t = TREE_TYPE (t);
11892 if (TREE_CODE (t) == FUNCTION_TYPE)
11893 return false;
11894 gcc_checking_assert (TREE_CODE (t) == METHOD_TYPE);
11895 /* If we do not have BINFO associated, it means that type was built
11896 without devirtualization enabled. Do not consider this a virtual
11897 call. */
11898 if (!TYPE_BINFO (obj_type_ref_class (target, for_dump_p)))
11899 return false;
11900 return true;
11901 }
11902
11903 /* Lookup sub-BINFO of BINFO of TYPE at offset POS. */
11904
11905 static tree
11906 lookup_binfo_at_offset (tree binfo, tree type, HOST_WIDE_INT pos)
11907 {
11908 unsigned int i;
11909 tree base_binfo, b;
11910
11911 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
11912 if (pos == tree_to_shwi (BINFO_OFFSET (base_binfo))
11913 && types_same_for_odr (TREE_TYPE (base_binfo), type))
11914 return base_binfo;
11915 else if ((b = lookup_binfo_at_offset (base_binfo, type, pos)) != NULL)
11916 return b;
11917 return NULL;
11918 }
11919
11920 /* Try to find a base info of BINFO that would have its field decl at offset
11921 OFFSET within the BINFO type and which is of EXPECTED_TYPE. If it can be
11922 found, return, otherwise return NULL_TREE. */
11923
11924 tree
11925 get_binfo_at_offset (tree binfo, poly_int64 offset, tree expected_type)
11926 {
11927 tree type = BINFO_TYPE (binfo);
11928
11929 while (true)
11930 {
11931 HOST_WIDE_INT pos, size;
11932 tree fld;
11933 int i;
11934
11935 if (types_same_for_odr (type, expected_type))
11936 return binfo;
11937 if (maybe_lt (offset, 0))
11938 return NULL_TREE;
11939
11940 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
11941 {
11942 if (TREE_CODE (fld) != FIELD_DECL || !DECL_ARTIFICIAL (fld))
11943 continue;
11944
11945 pos = int_bit_position (fld);
11946 size = tree_to_uhwi (DECL_SIZE (fld));
11947 if (known_in_range_p (offset, pos, size))
11948 break;
11949 }
11950 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
11951 return NULL_TREE;
11952
11953 /* Offset 0 indicates the primary base, whose vtable contents are
11954 represented in the binfo for the derived class. */
11955 else if (maybe_ne (offset, 0))
11956 {
11957 tree found_binfo = NULL, base_binfo;
11958 /* Offsets in BINFO are in bytes relative to the whole structure
11959 while POS is in bits relative to the containing field. */
11960 int binfo_offset = (tree_to_shwi (BINFO_OFFSET (binfo)) + pos
11961 / BITS_PER_UNIT);
11962
11963 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
11964 if (tree_to_shwi (BINFO_OFFSET (base_binfo)) == binfo_offset
11965 && types_same_for_odr (TREE_TYPE (base_binfo), TREE_TYPE (fld)))
11966 {
11967 found_binfo = base_binfo;
11968 break;
11969 }
11970 if (found_binfo)
11971 binfo = found_binfo;
11972 else
11973 binfo = lookup_binfo_at_offset (binfo, TREE_TYPE (fld),
11974 binfo_offset);
11975 }
11976
11977 type = TREE_TYPE (fld);
11978 offset -= pos;
11979 }
11980 }
11981
11982 /* Returns true if X is a typedef decl. */
11983
11984 bool
11985 is_typedef_decl (const_tree x)
11986 {
11987 return (x && TREE_CODE (x) == TYPE_DECL
11988 && DECL_ORIGINAL_TYPE (x) != NULL_TREE);
11989 }
11990
11991 /* Returns true iff TYPE is a type variant created for a typedef. */
11992
11993 bool
11994 typedef_variant_p (const_tree type)
11995 {
11996 return is_typedef_decl (TYPE_NAME (type));
11997 }
11998
11999 /* PR 84195: Replace control characters in "unescaped" with their
12000 escaped equivalents. Allow newlines if -fmessage-length has
12001 been set to a non-zero value. This is done here, rather than
12002 where the attribute is recorded as the message length can
12003 change between these two locations. */
12004
12005 void
12006 escaped_string::escape (const char *unescaped)
12007 {
12008 char *escaped;
12009 size_t i, new_i, len;
12010
12011 if (m_owned)
12012 free (m_str);
12013
12014 m_str = const_cast<char *> (unescaped);
12015 m_owned = false;
12016
12017 if (unescaped == NULL || *unescaped == 0)
12018 return;
12019
12020 len = strlen (unescaped);
12021 escaped = NULL;
12022 new_i = 0;
12023
12024 for (i = 0; i < len; i++)
12025 {
12026 char c = unescaped[i];
12027
12028 if (!ISCNTRL (c))
12029 {
12030 if (escaped)
12031 escaped[new_i++] = c;
12032 continue;
12033 }
12034
12035 if (c != '\n' || !pp_is_wrapping_line (global_dc->printer))
12036 {
12037 if (escaped == NULL)
12038 {
12039 /* We only allocate space for a new string if we
12040 actually encounter a control character that
12041 needs replacing. */
12042 escaped = (char *) xmalloc (len * 2 + 1);
12043 strncpy (escaped, unescaped, i);
12044 new_i = i;
12045 }
12046
12047 escaped[new_i++] = '\\';
12048
12049 switch (c)
12050 {
12051 case '\a': escaped[new_i++] = 'a'; break;
12052 case '\b': escaped[new_i++] = 'b'; break;
12053 case '\f': escaped[new_i++] = 'f'; break;
12054 case '\n': escaped[new_i++] = 'n'; break;
12055 case '\r': escaped[new_i++] = 'r'; break;
12056 case '\t': escaped[new_i++] = 't'; break;
12057 case '\v': escaped[new_i++] = 'v'; break;
12058 default: escaped[new_i++] = '?'; break;
12059 }
12060 }
12061 else if (escaped)
12062 escaped[new_i++] = c;
12063 }
12064
12065 if (escaped)
12066 {
12067 escaped[new_i] = 0;
12068 m_str = escaped;
12069 m_owned = true;
12070 }
12071 }
12072
12073 /* Warn about a use of an identifier which was marked deprecated. Returns
12074 whether a warning was given. */
12075
12076 bool
12077 warn_deprecated_use (tree node, tree attr)
12078 {
12079 escaped_string msg;
12080
12081 if (node == 0 || !warn_deprecated_decl)
12082 return false;
12083
12084 if (!attr)
12085 {
12086 if (DECL_P (node))
12087 attr = DECL_ATTRIBUTES (node);
12088 else if (TYPE_P (node))
12089 {
12090 tree decl = TYPE_STUB_DECL (node);
12091 if (decl)
12092 attr = lookup_attribute ("deprecated",
12093 TYPE_ATTRIBUTES (TREE_TYPE (decl)));
12094 }
12095 }
12096
12097 if (attr)
12098 attr = lookup_attribute ("deprecated", attr);
12099
12100 if (attr)
12101 msg.escape (TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
12102
12103 bool w = false;
12104 if (DECL_P (node))
12105 {
12106 auto_diagnostic_group d;
12107 if (msg)
12108 w = warning (OPT_Wdeprecated_declarations,
12109 "%qD is deprecated: %s", node, (const char *) msg);
12110 else
12111 w = warning (OPT_Wdeprecated_declarations,
12112 "%qD is deprecated", node);
12113 if (w)
12114 inform (DECL_SOURCE_LOCATION (node), "declared here");
12115 }
12116 else if (TYPE_P (node))
12117 {
12118 tree what = NULL_TREE;
12119 tree decl = TYPE_STUB_DECL (node);
12120
12121 if (TYPE_NAME (node))
12122 {
12123 if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE)
12124 what = TYPE_NAME (node);
12125 else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL
12126 && DECL_NAME (TYPE_NAME (node)))
12127 what = DECL_NAME (TYPE_NAME (node));
12128 }
12129
12130 auto_diagnostic_group d;
12131 if (what)
12132 {
12133 if (msg)
12134 w = warning (OPT_Wdeprecated_declarations,
12135 "%qE is deprecated: %s", what, (const char *) msg);
12136 else
12137 w = warning (OPT_Wdeprecated_declarations,
12138 "%qE is deprecated", what);
12139 }
12140 else
12141 {
12142 if (msg)
12143 w = warning (OPT_Wdeprecated_declarations,
12144 "type is deprecated: %s", (const char *) msg);
12145 else
12146 w = warning (OPT_Wdeprecated_declarations,
12147 "type is deprecated");
12148 }
12149
12150 if (w && decl)
12151 inform (DECL_SOURCE_LOCATION (decl), "declared here");
12152 }
12153
12154 return w;
12155 }
12156
12157 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
12158 somewhere in it. */
12159
12160 bool
12161 contains_bitfld_component_ref_p (const_tree ref)
12162 {
12163 while (handled_component_p (ref))
12164 {
12165 if (TREE_CODE (ref) == COMPONENT_REF
12166 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1)))
12167 return true;
12168 ref = TREE_OPERAND (ref, 0);
12169 }
12170
12171 return false;
12172 }
12173
12174 /* Try to determine whether a TRY_CATCH expression can fall through.
12175 This is a subroutine of block_may_fallthru. */
12176
12177 static bool
12178 try_catch_may_fallthru (const_tree stmt)
12179 {
12180 tree_stmt_iterator i;
12181
12182 /* If the TRY block can fall through, the whole TRY_CATCH can
12183 fall through. */
12184 if (block_may_fallthru (TREE_OPERAND (stmt, 0)))
12185 return true;
12186
12187 i = tsi_start (TREE_OPERAND (stmt, 1));
12188 switch (TREE_CODE (tsi_stmt (i)))
12189 {
12190 case CATCH_EXPR:
12191 /* We expect to see a sequence of CATCH_EXPR trees, each with a
12192 catch expression and a body. The whole TRY_CATCH may fall
12193 through iff any of the catch bodies falls through. */
12194 for (; !tsi_end_p (i); tsi_next (&i))
12195 {
12196 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i))))
12197 return true;
12198 }
12199 return false;
12200
12201 case EH_FILTER_EXPR:
12202 /* The exception filter expression only matters if there is an
12203 exception. If the exception does not match EH_FILTER_TYPES,
12204 we will execute EH_FILTER_FAILURE, and we will fall through
12205 if that falls through. If the exception does match
12206 EH_FILTER_TYPES, the stack unwinder will continue up the
12207 stack, so we will not fall through. We don't know whether we
12208 will throw an exception which matches EH_FILTER_TYPES or not,
12209 so we just ignore EH_FILTER_TYPES and assume that we might
12210 throw an exception which doesn't match. */
12211 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i)));
12212
12213 default:
12214 /* This case represents statements to be executed when an
12215 exception occurs. Those statements are implicitly followed
12216 by a RESX statement to resume execution after the exception.
12217 So in this case the TRY_CATCH never falls through. */
12218 return false;
12219 }
12220 }
12221
12222 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
12223 need not be 100% accurate; simply be conservative and return true if we
12224 don't know. This is used only to avoid stupidly generating extra code.
12225 If we're wrong, we'll just delete the extra code later. */
12226
12227 bool
12228 block_may_fallthru (const_tree block)
12229 {
12230 /* This CONST_CAST is okay because expr_last returns its argument
12231 unmodified and we assign it to a const_tree. */
12232 const_tree stmt = expr_last (CONST_CAST_TREE (block));
12233
12234 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
12235 {
12236 case GOTO_EXPR:
12237 case RETURN_EXPR:
12238 /* Easy cases. If the last statement of the block implies
12239 control transfer, then we can't fall through. */
12240 return false;
12241
12242 case SWITCH_EXPR:
12243 /* If there is a default: label or case labels cover all possible
12244 SWITCH_COND values, then the SWITCH_EXPR will transfer control
12245 to some case label in all cases and all we care is whether the
12246 SWITCH_BODY falls through. */
12247 if (SWITCH_ALL_CASES_P (stmt))
12248 return block_may_fallthru (SWITCH_BODY (stmt));
12249 return true;
12250
12251 case COND_EXPR:
12252 if (block_may_fallthru (COND_EXPR_THEN (stmt)))
12253 return true;
12254 return block_may_fallthru (COND_EXPR_ELSE (stmt));
12255
12256 case BIND_EXPR:
12257 return block_may_fallthru (BIND_EXPR_BODY (stmt));
12258
12259 case TRY_CATCH_EXPR:
12260 return try_catch_may_fallthru (stmt);
12261
12262 case TRY_FINALLY_EXPR:
12263 /* The finally clause is always executed after the try clause,
12264 so if it does not fall through, then the try-finally will not
12265 fall through. Otherwise, if the try clause does not fall
12266 through, then when the finally clause falls through it will
12267 resume execution wherever the try clause was going. So the
12268 whole try-finally will only fall through if both the try
12269 clause and the finally clause fall through. */
12270 return (block_may_fallthru (TREE_OPERAND (stmt, 0))
12271 && block_may_fallthru (TREE_OPERAND (stmt, 1)));
12272
12273 case EH_ELSE_EXPR:
12274 return block_may_fallthru (TREE_OPERAND (stmt, 0));
12275
12276 case MODIFY_EXPR:
12277 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
12278 stmt = TREE_OPERAND (stmt, 1);
12279 else
12280 return true;
12281 /* FALLTHRU */
12282
12283 case CALL_EXPR:
12284 /* Functions that do not return do not fall through. */
12285 return (call_expr_flags (stmt) & ECF_NORETURN) == 0;
12286
12287 case CLEANUP_POINT_EXPR:
12288 return block_may_fallthru (TREE_OPERAND (stmt, 0));
12289
12290 case TARGET_EXPR:
12291 return block_may_fallthru (TREE_OPERAND (stmt, 1));
12292
12293 case ERROR_MARK:
12294 return true;
12295
12296 default:
12297 return lang_hooks.block_may_fallthru (stmt);
12298 }
12299 }
12300
12301 /* True if we are using EH to handle cleanups. */
12302 static bool using_eh_for_cleanups_flag = false;
12303
12304 /* This routine is called from front ends to indicate eh should be used for
12305 cleanups. */
12306 void
12307 using_eh_for_cleanups (void)
12308 {
12309 using_eh_for_cleanups_flag = true;
12310 }
12311
12312 /* Query whether EH is used for cleanups. */
12313 bool
12314 using_eh_for_cleanups_p (void)
12315 {
12316 return using_eh_for_cleanups_flag;
12317 }
12318
12319 /* Wrapper for tree_code_name to ensure that tree code is valid */
12320 const char *
12321 get_tree_code_name (enum tree_code code)
12322 {
12323 const char *invalid = "<invalid tree code>";
12324
12325 /* The tree_code enum promotes to signed, but we could be getting
12326 invalid values, so force an unsigned comparison. */
12327 if (unsigned (code) >= MAX_TREE_CODES)
12328 {
12329 if ((unsigned)code == 0xa5a5)
12330 return "ggc_freed";
12331 return invalid;
12332 }
12333
12334 return tree_code_name[code];
12335 }
12336
12337 /* Drops the TREE_OVERFLOW flag from T. */
12338
12339 tree
12340 drop_tree_overflow (tree t)
12341 {
12342 gcc_checking_assert (TREE_OVERFLOW (t));
12343
12344 /* For tree codes with a sharing machinery re-build the result. */
12345 if (poly_int_tree_p (t))
12346 return wide_int_to_tree (TREE_TYPE (t), wi::to_poly_wide (t));
12347
12348 /* For VECTOR_CST, remove the overflow bits from the encoded elements
12349 and canonicalize the result. */
12350 if (TREE_CODE (t) == VECTOR_CST)
12351 {
12352 tree_vector_builder builder;
12353 builder.new_unary_operation (TREE_TYPE (t), t, true);
12354 unsigned int count = builder.encoded_nelts ();
12355 for (unsigned int i = 0; i < count; ++i)
12356 {
12357 tree elt = VECTOR_CST_ELT (t, i);
12358 if (TREE_OVERFLOW (elt))
12359 elt = drop_tree_overflow (elt);
12360 builder.quick_push (elt);
12361 }
12362 return builder.build ();
12363 }
12364
12365 /* Otherwise, as all tcc_constants are possibly shared, copy the node
12366 and drop the flag. */
12367 t = copy_node (t);
12368 TREE_OVERFLOW (t) = 0;
12369
12370 /* For constants that contain nested constants, drop the flag
12371 from those as well. */
12372 if (TREE_CODE (t) == COMPLEX_CST)
12373 {
12374 if (TREE_OVERFLOW (TREE_REALPART (t)))
12375 TREE_REALPART (t) = drop_tree_overflow (TREE_REALPART (t));
12376 if (TREE_OVERFLOW (TREE_IMAGPART (t)))
12377 TREE_IMAGPART (t) = drop_tree_overflow (TREE_IMAGPART (t));
12378 }
12379
12380 return t;
12381 }
12382
12383 /* Given a memory reference expression T, return its base address.
12384 The base address of a memory reference expression is the main
12385 object being referenced. For instance, the base address for
12386 'array[i].fld[j]' is 'array'. You can think of this as stripping
12387 away the offset part from a memory address.
12388
12389 This function calls handled_component_p to strip away all the inner
12390 parts of the memory reference until it reaches the base object. */
12391
12392 tree
12393 get_base_address (tree t)
12394 {
12395 if (TREE_CODE (t) == WITH_SIZE_EXPR)
12396 t = TREE_OPERAND (t, 0);
12397 while (handled_component_p (t))
12398 t = TREE_OPERAND (t, 0);
12399
12400 if ((TREE_CODE (t) == MEM_REF
12401 || TREE_CODE (t) == TARGET_MEM_REF)
12402 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
12403 t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
12404
12405 return t;
12406 }
12407
12408 /* Return a tree of sizetype representing the size, in bytes, of the element
12409 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12410
12411 tree
12412 array_ref_element_size (tree exp)
12413 {
12414 tree aligned_size = TREE_OPERAND (exp, 3);
12415 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
12416 location_t loc = EXPR_LOCATION (exp);
12417
12418 /* If a size was specified in the ARRAY_REF, it's the size measured
12419 in alignment units of the element type. So multiply by that value. */
12420 if (aligned_size)
12421 {
12422 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
12423 sizetype from another type of the same width and signedness. */
12424 if (TREE_TYPE (aligned_size) != sizetype)
12425 aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
12426 return size_binop_loc (loc, MULT_EXPR, aligned_size,
12427 size_int (TYPE_ALIGN_UNIT (elmt_type)));
12428 }
12429
12430 /* Otherwise, take the size from that of the element type. Substitute
12431 any PLACEHOLDER_EXPR that we have. */
12432 else
12433 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
12434 }
12435
12436 /* Return a tree representing the lower bound of the array mentioned in
12437 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12438
12439 tree
12440 array_ref_low_bound (tree exp)
12441 {
12442 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
12443
12444 /* If a lower bound is specified in EXP, use it. */
12445 if (TREE_OPERAND (exp, 2))
12446 return TREE_OPERAND (exp, 2);
12447
12448 /* Otherwise, if there is a domain type and it has a lower bound, use it,
12449 substituting for a PLACEHOLDER_EXPR as needed. */
12450 if (domain_type && TYPE_MIN_VALUE (domain_type))
12451 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
12452
12453 /* Otherwise, return a zero of the appropriate type. */
12454 tree idxtype = TREE_TYPE (TREE_OPERAND (exp, 1));
12455 return (idxtype == error_mark_node
12456 ? integer_zero_node : build_int_cst (idxtype, 0));
12457 }
12458
12459 /* Return a tree representing the upper bound of the array mentioned in
12460 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12461
12462 tree
12463 array_ref_up_bound (tree exp)
12464 {
12465 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
12466
12467 /* If there is a domain type and it has an upper bound, use it, substituting
12468 for a PLACEHOLDER_EXPR as needed. */
12469 if (domain_type && TYPE_MAX_VALUE (domain_type))
12470 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
12471
12472 /* Otherwise fail. */
12473 return NULL_TREE;
12474 }
12475
12476 /* Returns true if REF is an array reference, component reference,
12477 or memory reference to an array at the end of a structure.
12478 If this is the case, the array may be allocated larger
12479 than its upper bound implies. */
12480
12481 bool
12482 array_at_struct_end_p (tree ref)
12483 {
12484 tree atype;
12485
12486 if (TREE_CODE (ref) == ARRAY_REF
12487 || TREE_CODE (ref) == ARRAY_RANGE_REF)
12488 {
12489 atype = TREE_TYPE (TREE_OPERAND (ref, 0));
12490 ref = TREE_OPERAND (ref, 0);
12491 }
12492 else if (TREE_CODE (ref) == COMPONENT_REF
12493 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 1))) == ARRAY_TYPE)
12494 atype = TREE_TYPE (TREE_OPERAND (ref, 1));
12495 else if (TREE_CODE (ref) == MEM_REF)
12496 {
12497 tree arg = TREE_OPERAND (ref, 0);
12498 if (TREE_CODE (arg) == ADDR_EXPR)
12499 arg = TREE_OPERAND (arg, 0);
12500 tree argtype = TREE_TYPE (arg);
12501 if (TREE_CODE (argtype) == RECORD_TYPE)
12502 {
12503 if (tree fld = last_field (argtype))
12504 {
12505 atype = TREE_TYPE (fld);
12506 if (TREE_CODE (atype) != ARRAY_TYPE)
12507 return false;
12508 if (VAR_P (arg) && DECL_SIZE (fld))
12509 return false;
12510 }
12511 else
12512 return false;
12513 }
12514 else
12515 return false;
12516 }
12517 else
12518 return false;
12519
12520 if (TREE_CODE (ref) == STRING_CST)
12521 return false;
12522
12523 tree ref_to_array = ref;
12524 while (handled_component_p (ref))
12525 {
12526 /* If the reference chain contains a component reference to a
12527 non-union type and there follows another field the reference
12528 is not at the end of a structure. */
12529 if (TREE_CODE (ref) == COMPONENT_REF)
12530 {
12531 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 0))) == RECORD_TYPE)
12532 {
12533 tree nextf = DECL_CHAIN (TREE_OPERAND (ref, 1));
12534 while (nextf && TREE_CODE (nextf) != FIELD_DECL)
12535 nextf = DECL_CHAIN (nextf);
12536 if (nextf)
12537 return false;
12538 }
12539 }
12540 /* If we have a multi-dimensional array we do not consider
12541 a non-innermost dimension as flex array if the whole
12542 multi-dimensional array is at struct end.
12543 Same for an array of aggregates with a trailing array
12544 member. */
12545 else if (TREE_CODE (ref) == ARRAY_REF)
12546 return false;
12547 else if (TREE_CODE (ref) == ARRAY_RANGE_REF)
12548 ;
12549 /* If we view an underlying object as sth else then what we
12550 gathered up to now is what we have to rely on. */
12551 else if (TREE_CODE (ref) == VIEW_CONVERT_EXPR)
12552 break;
12553 else
12554 gcc_unreachable ();
12555
12556 ref = TREE_OPERAND (ref, 0);
12557 }
12558
12559 /* The array now is at struct end. Treat flexible arrays as
12560 always subject to extend, even into just padding constrained by
12561 an underlying decl. */
12562 if (! TYPE_SIZE (atype)
12563 || ! TYPE_DOMAIN (atype)
12564 || ! TYPE_MAX_VALUE (TYPE_DOMAIN (atype)))
12565 return true;
12566
12567 /* If the reference is based on a declared entity, the size of the array
12568 is constrained by its given domain. (Do not trust commons PR/69368). */
12569 ref = get_base_address (ref);
12570 if (ref
12571 && DECL_P (ref)
12572 && !(flag_unconstrained_commons
12573 && VAR_P (ref) && DECL_COMMON (ref))
12574 && DECL_SIZE_UNIT (ref)
12575 && TREE_CODE (DECL_SIZE_UNIT (ref)) == INTEGER_CST)
12576 {
12577 /* Check whether the array domain covers all of the available
12578 padding. */
12579 poly_int64 offset;
12580 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (atype))) != INTEGER_CST
12581 || TREE_CODE (TYPE_MAX_VALUE (TYPE_DOMAIN (atype))) != INTEGER_CST
12582 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (atype))) != INTEGER_CST)
12583 return true;
12584 if (! get_addr_base_and_unit_offset (ref_to_array, &offset))
12585 return true;
12586
12587 /* If at least one extra element fits it is a flexarray. */
12588 if (known_le ((wi::to_offset (TYPE_MAX_VALUE (TYPE_DOMAIN (atype)))
12589 - wi::to_offset (TYPE_MIN_VALUE (TYPE_DOMAIN (atype)))
12590 + 2)
12591 * wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (atype))),
12592 wi::to_offset (DECL_SIZE_UNIT (ref)) - offset))
12593 return true;
12594
12595 return false;
12596 }
12597
12598 return true;
12599 }
12600
12601 /* Return a tree representing the offset, in bytes, of the field referenced
12602 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
12603
12604 tree
12605 component_ref_field_offset (tree exp)
12606 {
12607 tree aligned_offset = TREE_OPERAND (exp, 2);
12608 tree field = TREE_OPERAND (exp, 1);
12609 location_t loc = EXPR_LOCATION (exp);
12610
12611 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
12612 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
12613 value. */
12614 if (aligned_offset)
12615 {
12616 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
12617 sizetype from another type of the same width and signedness. */
12618 if (TREE_TYPE (aligned_offset) != sizetype)
12619 aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
12620 return size_binop_loc (loc, MULT_EXPR, aligned_offset,
12621 size_int (DECL_OFFSET_ALIGN (field)
12622 / BITS_PER_UNIT));
12623 }
12624
12625 /* Otherwise, take the offset from that of the field. Substitute
12626 any PLACEHOLDER_EXPR that we have. */
12627 else
12628 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
12629 }
12630
12631 /* Given the initializer INIT, return the initializer for the field
12632 DECL if it exists, otherwise null. Used to obtain the initializer
12633 for a flexible array member and determine its size. */
12634
12635 static tree
12636 get_initializer_for (tree init, tree decl)
12637 {
12638 STRIP_NOPS (init);
12639
12640 tree fld, fld_init;
12641 unsigned HOST_WIDE_INT i;
12642 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), i, fld, fld_init)
12643 {
12644 if (decl == fld)
12645 return fld_init;
12646
12647 if (TREE_CODE (fld) == CONSTRUCTOR)
12648 {
12649 fld_init = get_initializer_for (fld_init, decl);
12650 if (fld_init)
12651 return fld_init;
12652 }
12653 }
12654
12655 return NULL_TREE;
12656 }
12657
12658 /* Determines the size of the member referenced by the COMPONENT_REF
12659 REF, using its initializer expression if necessary in order to
12660 determine the size of an initialized flexible array member.
12661 If non-null, set *ARK when REF refers to an interior zero-length
12662 array or a trailing one-element array.
12663 Returns the size as sizetype (which might be zero for an object
12664 with an uninitialized flexible array member) or null if the size
12665 cannot be determined. */
12666
12667 tree
12668 component_ref_size (tree ref, special_array_member *sam /* = NULL */)
12669 {
12670 gcc_assert (TREE_CODE (ref) == COMPONENT_REF);
12671
12672 special_array_member sambuf;
12673 if (!sam)
12674 sam = &sambuf;
12675 *sam = special_array_member::none;
12676
12677 /* The object/argument referenced by the COMPONENT_REF and its type. */
12678 tree arg = TREE_OPERAND (ref, 0);
12679 tree argtype = TREE_TYPE (arg);
12680 /* The referenced member. */
12681 tree member = TREE_OPERAND (ref, 1);
12682
12683 tree memsize = DECL_SIZE_UNIT (member);
12684 if (memsize)
12685 {
12686 tree memtype = TREE_TYPE (member);
12687 if (TREE_CODE (memtype) != ARRAY_TYPE)
12688 /* DECL_SIZE may be less than TYPE_SIZE in C++ when referring
12689 to the type of a class with a virtual base which doesn't
12690 reflect the size of the virtual's members (see pr97595).
12691 If that's the case fail for now and implement something
12692 more robust in the future. */
12693 return (tree_int_cst_equal (memsize, TYPE_SIZE_UNIT (memtype))
12694 ? memsize : NULL_TREE);
12695
12696 bool trailing = array_at_struct_end_p (ref);
12697 bool zero_length = integer_zerop (memsize);
12698 if (!trailing && !zero_length)
12699 /* MEMBER is either an interior array or is an array with
12700 more than one element. */
12701 return memsize;
12702
12703 if (zero_length)
12704 {
12705 if (trailing)
12706 *sam = special_array_member::trail_0;
12707 else
12708 {
12709 *sam = special_array_member::int_0;
12710 memsize = NULL_TREE;
12711 }
12712 }
12713
12714 if (!zero_length)
12715 if (tree dom = TYPE_DOMAIN (memtype))
12716 if (tree min = TYPE_MIN_VALUE (dom))
12717 if (tree max = TYPE_MAX_VALUE (dom))
12718 if (TREE_CODE (min) == INTEGER_CST
12719 && TREE_CODE (max) == INTEGER_CST)
12720 {
12721 offset_int minidx = wi::to_offset (min);
12722 offset_int maxidx = wi::to_offset (max);
12723 offset_int neltsm1 = maxidx - minidx;
12724 if (neltsm1 > 0)
12725 /* MEMBER is an array with more than one element. */
12726 return memsize;
12727
12728 if (neltsm1 == 0)
12729 *sam = special_array_member::trail_1;
12730 }
12731
12732 /* For a reference to a zero- or one-element array member of a union
12733 use the size of the union instead of the size of the member. */
12734 if (TREE_CODE (argtype) == UNION_TYPE)
12735 memsize = TYPE_SIZE_UNIT (argtype);
12736 }
12737
12738 /* MEMBER is either a bona fide flexible array member, or a zero-length
12739 array member, or an array of length one treated as such. */
12740
12741 /* If the reference is to a declared object and the member a true
12742 flexible array, try to determine its size from its initializer. */
12743 poly_int64 baseoff = 0;
12744 tree base = get_addr_base_and_unit_offset (ref, &baseoff);
12745 if (!base || !VAR_P (base))
12746 {
12747 if (*sam != special_array_member::int_0)
12748 return NULL_TREE;
12749
12750 if (TREE_CODE (arg) != COMPONENT_REF)
12751 return NULL_TREE;
12752
12753 base = arg;
12754 while (TREE_CODE (base) == COMPONENT_REF)
12755 base = TREE_OPERAND (base, 0);
12756 baseoff = tree_to_poly_int64 (byte_position (TREE_OPERAND (ref, 1)));
12757 }
12758
12759 /* BASE is the declared object of which MEMBER is either a member
12760 or that is cast to ARGTYPE (e.g., a char buffer used to store
12761 an ARGTYPE object). */
12762 tree basetype = TREE_TYPE (base);
12763
12764 /* Determine the base type of the referenced object. If it's
12765 the same as ARGTYPE and MEMBER has a known size, return it. */
12766 tree bt = basetype;
12767 if (*sam != special_array_member::int_0)
12768 while (TREE_CODE (bt) == ARRAY_TYPE)
12769 bt = TREE_TYPE (bt);
12770 bool typematch = useless_type_conversion_p (argtype, bt);
12771 if (memsize && typematch)
12772 return memsize;
12773
12774 memsize = NULL_TREE;
12775
12776 if (typematch)
12777 /* MEMBER is a true flexible array member. Compute its size from
12778 the initializer of the BASE object if it has one. */
12779 if (tree init = DECL_P (base) ? DECL_INITIAL (base) : NULL_TREE)
12780 if (init != error_mark_node)
12781 {
12782 init = get_initializer_for (init, member);
12783 if (init)
12784 {
12785 memsize = TYPE_SIZE_UNIT (TREE_TYPE (init));
12786 if (tree refsize = TYPE_SIZE_UNIT (argtype))
12787 {
12788 /* Use the larger of the initializer size and the tail
12789 padding in the enclosing struct. */
12790 poly_int64 rsz = tree_to_poly_int64 (refsize);
12791 rsz -= baseoff;
12792 if (known_lt (tree_to_poly_int64 (memsize), rsz))
12793 memsize = wide_int_to_tree (TREE_TYPE (memsize), rsz);
12794 }
12795
12796 baseoff = 0;
12797 }
12798 }
12799
12800 if (!memsize)
12801 {
12802 if (typematch)
12803 {
12804 if (DECL_P (base)
12805 && DECL_EXTERNAL (base)
12806 && bt == basetype
12807 && *sam != special_array_member::int_0)
12808 /* The size of a flexible array member of an extern struct
12809 with no initializer cannot be determined (it's defined
12810 in another translation unit and can have an initializer
12811 with an arbitrary number of elements). */
12812 return NULL_TREE;
12813
12814 /* Use the size of the base struct or, for interior zero-length
12815 arrays, the size of the enclosing type. */
12816 memsize = TYPE_SIZE_UNIT (bt);
12817 }
12818 else if (DECL_P (base))
12819 /* Use the size of the BASE object (possibly an array of some
12820 other type such as char used to store the struct). */
12821 memsize = DECL_SIZE_UNIT (base);
12822 else
12823 return NULL_TREE;
12824 }
12825
12826 /* If the flexible array member has a known size use the greater
12827 of it and the tail padding in the enclosing struct.
12828 Otherwise, when the size of the flexible array member is unknown
12829 and the referenced object is not a struct, use the size of its
12830 type when known. This detects sizes of array buffers when cast
12831 to struct types with flexible array members. */
12832 if (memsize)
12833 {
12834 poly_int64 memsz64 = memsize ? tree_to_poly_int64 (memsize) : 0;
12835 if (known_lt (baseoff, memsz64))
12836 {
12837 memsz64 -= baseoff;
12838 return wide_int_to_tree (TREE_TYPE (memsize), memsz64);
12839 }
12840 return size_zero_node;
12841 }
12842
12843 /* Return "don't know" for an external non-array object since its
12844 flexible array member can be initialized to have any number of
12845 elements. Otherwise, return zero because the flexible array
12846 member has no elements. */
12847 return (DECL_P (base)
12848 && DECL_EXTERNAL (base)
12849 && (!typematch
12850 || TREE_CODE (basetype) != ARRAY_TYPE)
12851 ? NULL_TREE : size_zero_node);
12852 }
12853
12854 /* Return the machine mode of T. For vectors, returns the mode of the
12855 inner type. The main use case is to feed the result to HONOR_NANS,
12856 avoiding the BLKmode that a direct TYPE_MODE (T) might return. */
12857
12858 machine_mode
12859 element_mode (const_tree t)
12860 {
12861 if (!TYPE_P (t))
12862 t = TREE_TYPE (t);
12863 if (VECTOR_TYPE_P (t) || TREE_CODE (t) == COMPLEX_TYPE)
12864 t = TREE_TYPE (t);
12865 return TYPE_MODE (t);
12866 }
12867
12868 /* Vector types need to re-check the target flags each time we report
12869 the machine mode. We need to do this because attribute target can
12870 change the result of vector_mode_supported_p and have_regs_of_mode
12871 on a per-function basis. Thus the TYPE_MODE of a VECTOR_TYPE can
12872 change on a per-function basis. */
12873 /* ??? Possibly a better solution is to run through all the types
12874 referenced by a function and re-compute the TYPE_MODE once, rather
12875 than make the TYPE_MODE macro call a function. */
12876
12877 machine_mode
12878 vector_type_mode (const_tree t)
12879 {
12880 machine_mode mode;
12881
12882 gcc_assert (TREE_CODE (t) == VECTOR_TYPE);
12883
12884 mode = t->type_common.mode;
12885 if (VECTOR_MODE_P (mode)
12886 && (!targetm.vector_mode_supported_p (mode)
12887 || !have_regs_of_mode[mode]))
12888 {
12889 scalar_int_mode innermode;
12890
12891 /* For integers, try mapping it to a same-sized scalar mode. */
12892 if (is_int_mode (TREE_TYPE (t)->type_common.mode, &innermode))
12893 {
12894 poly_int64 size = (TYPE_VECTOR_SUBPARTS (t)
12895 * GET_MODE_BITSIZE (innermode));
12896 scalar_int_mode mode;
12897 if (int_mode_for_size (size, 0).exists (&mode)
12898 && have_regs_of_mode[mode])
12899 return mode;
12900 }
12901
12902 return BLKmode;
12903 }
12904
12905 return mode;
12906 }
12907
12908 /* Return the size in bits of each element of vector type TYPE. */
12909
12910 unsigned int
12911 vector_element_bits (const_tree type)
12912 {
12913 gcc_checking_assert (VECTOR_TYPE_P (type));
12914 if (VECTOR_BOOLEAN_TYPE_P (type))
12915 return TYPE_PRECISION (TREE_TYPE (type));
12916 return tree_to_uhwi (TYPE_SIZE (TREE_TYPE (type)));
12917 }
12918
12919 /* Calculate the size in bits of each element of vector type TYPE
12920 and return the result as a tree of type bitsizetype. */
12921
12922 tree
12923 vector_element_bits_tree (const_tree type)
12924 {
12925 gcc_checking_assert (VECTOR_TYPE_P (type));
12926 if (VECTOR_BOOLEAN_TYPE_P (type))
12927 return bitsize_int (vector_element_bits (type));
12928 return TYPE_SIZE (TREE_TYPE (type));
12929 }
12930
12931 /* Verify that basic properties of T match TV and thus T can be a variant of
12932 TV. TV should be the more specified variant (i.e. the main variant). */
12933
12934 static bool
12935 verify_type_variant (const_tree t, tree tv)
12936 {
12937 /* Type variant can differ by:
12938
12939 - TYPE_QUALS: TYPE_READONLY, TYPE_VOLATILE, TYPE_ATOMIC, TYPE_RESTRICT,
12940 ENCODE_QUAL_ADDR_SPACE.
12941 - main variant may be TYPE_COMPLETE_P and variant types !TYPE_COMPLETE_P
12942 in this case some values may not be set in the variant types
12943 (see TYPE_COMPLETE_P checks).
12944 - it is possible to have TYPE_ARTIFICIAL variant of non-artifical type
12945 - by TYPE_NAME and attributes (i.e. when variant originate by typedef)
12946 - TYPE_CANONICAL (TYPE_ALIAS_SET is the same among variants)
12947 - by the alignment: TYPE_ALIGN and TYPE_USER_ALIGN
12948 - during LTO by TYPE_CONTEXT if type is TYPE_FILE_SCOPE_P
12949 this is necessary to make it possible to merge types form different TUs
12950 - arrays, pointers and references may have TREE_TYPE that is a variant
12951 of TREE_TYPE of their main variants.
12952 - aggregates may have new TYPE_FIELDS list that list variants of
12953 the main variant TYPE_FIELDS.
12954 - vector types may differ by TYPE_VECTOR_OPAQUE
12955 */
12956
12957 /* Convenience macro for matching individual fields. */
12958 #define verify_variant_match(flag) \
12959 do { \
12960 if (flag (tv) != flag (t)) \
12961 { \
12962 error ("type variant differs by %s", #flag); \
12963 debug_tree (tv); \
12964 return false; \
12965 } \
12966 } while (false)
12967
12968 /* tree_base checks. */
12969
12970 verify_variant_match (TREE_CODE);
12971 /* FIXME: Ada builds non-artificial variants of artificial types. */
12972 #if 0
12973 if (TYPE_ARTIFICIAL (tv))
12974 verify_variant_match (TYPE_ARTIFICIAL);
12975 #endif
12976 if (POINTER_TYPE_P (tv))
12977 verify_variant_match (TYPE_REF_CAN_ALIAS_ALL);
12978 /* FIXME: TYPE_SIZES_GIMPLIFIED may differs for Ada build. */
12979 verify_variant_match (TYPE_UNSIGNED);
12980 verify_variant_match (TYPE_PACKED);
12981 if (TREE_CODE (t) == REFERENCE_TYPE)
12982 verify_variant_match (TYPE_REF_IS_RVALUE);
12983 if (AGGREGATE_TYPE_P (t))
12984 verify_variant_match (TYPE_REVERSE_STORAGE_ORDER);
12985 else
12986 verify_variant_match (TYPE_SATURATING);
12987 /* FIXME: This check trigger during libstdc++ build. */
12988 #if 0
12989 if (RECORD_OR_UNION_TYPE_P (t) && COMPLETE_TYPE_P (t))
12990 verify_variant_match (TYPE_FINAL_P);
12991 #endif
12992
12993 /* tree_type_common checks. */
12994
12995 if (COMPLETE_TYPE_P (t))
12996 {
12997 verify_variant_match (TYPE_MODE);
12998 if (TREE_CODE (TYPE_SIZE (t)) != PLACEHOLDER_EXPR
12999 && TREE_CODE (TYPE_SIZE (tv)) != PLACEHOLDER_EXPR)
13000 verify_variant_match (TYPE_SIZE);
13001 if (TREE_CODE (TYPE_SIZE_UNIT (t)) != PLACEHOLDER_EXPR
13002 && TREE_CODE (TYPE_SIZE_UNIT (tv)) != PLACEHOLDER_EXPR
13003 && TYPE_SIZE_UNIT (t) != TYPE_SIZE_UNIT (tv))
13004 {
13005 gcc_assert (!operand_equal_p (TYPE_SIZE_UNIT (t),
13006 TYPE_SIZE_UNIT (tv), 0));
13007 error ("type variant has different %<TYPE_SIZE_UNIT%>");
13008 debug_tree (tv);
13009 error ("type variant%'s %<TYPE_SIZE_UNIT%>");
13010 debug_tree (TYPE_SIZE_UNIT (tv));
13011 error ("type%'s %<TYPE_SIZE_UNIT%>");
13012 debug_tree (TYPE_SIZE_UNIT (t));
13013 return false;
13014 }
13015 verify_variant_match (TYPE_NEEDS_CONSTRUCTING);
13016 }
13017 verify_variant_match (TYPE_PRECISION);
13018 if (RECORD_OR_UNION_TYPE_P (t))
13019 verify_variant_match (TYPE_TRANSPARENT_AGGR);
13020 else if (TREE_CODE (t) == ARRAY_TYPE)
13021 verify_variant_match (TYPE_NONALIASED_COMPONENT);
13022 /* During LTO we merge variant lists from diferent translation units
13023 that may differ BY TYPE_CONTEXT that in turn may point
13024 to TRANSLATION_UNIT_DECL.
13025 Ada also builds variants of types with different TYPE_CONTEXT. */
13026 #if 0
13027 if (!in_lto_p || !TYPE_FILE_SCOPE_P (t))
13028 verify_variant_match (TYPE_CONTEXT);
13029 #endif
13030 if (TREE_CODE (t) == ARRAY_TYPE || TREE_CODE (t) == INTEGER_TYPE)
13031 verify_variant_match (TYPE_STRING_FLAG);
13032 if (TREE_CODE (t) == RECORD_TYPE || TREE_CODE (t) == UNION_TYPE)
13033 verify_variant_match (TYPE_CXX_ODR_P);
13034 if (TYPE_ALIAS_SET_KNOWN_P (t))
13035 {
13036 error ("type variant with %<TYPE_ALIAS_SET_KNOWN_P%>");
13037 debug_tree (tv);
13038 return false;
13039 }
13040
13041 /* tree_type_non_common checks. */
13042
13043 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
13044 and dangle the pointer from time to time. */
13045 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_VFIELD (t) != TYPE_VFIELD (tv)
13046 && (in_lto_p || !TYPE_VFIELD (tv)
13047 || TREE_CODE (TYPE_VFIELD (tv)) != TREE_LIST))
13048 {
13049 error ("type variant has different %<TYPE_VFIELD%>");
13050 debug_tree (tv);
13051 return false;
13052 }
13053 if ((TREE_CODE (t) == ENUMERAL_TYPE && COMPLETE_TYPE_P (t))
13054 || TREE_CODE (t) == INTEGER_TYPE
13055 || TREE_CODE (t) == BOOLEAN_TYPE
13056 || TREE_CODE (t) == REAL_TYPE
13057 || TREE_CODE (t) == FIXED_POINT_TYPE)
13058 {
13059 verify_variant_match (TYPE_MAX_VALUE);
13060 verify_variant_match (TYPE_MIN_VALUE);
13061 }
13062 if (TREE_CODE (t) == METHOD_TYPE)
13063 verify_variant_match (TYPE_METHOD_BASETYPE);
13064 if (TREE_CODE (t) == OFFSET_TYPE)
13065 verify_variant_match (TYPE_OFFSET_BASETYPE);
13066 if (TREE_CODE (t) == ARRAY_TYPE)
13067 verify_variant_match (TYPE_ARRAY_MAX_SIZE);
13068 /* FIXME: Be lax and allow TYPE_BINFO to be missing in variant types
13069 or even type's main variant. This is needed to make bootstrap pass
13070 and the bug seems new in GCC 5.
13071 C++ FE should be updated to make this consistent and we should check
13072 that TYPE_BINFO is always NULL for !COMPLETE_TYPE_P and otherwise there
13073 is a match with main variant.
13074
13075 Also disable the check for Java for now because of parser hack that builds
13076 first an dummy BINFO and then sometimes replace it by real BINFO in some
13077 of the copies. */
13078 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t) && TYPE_BINFO (tv)
13079 && TYPE_BINFO (t) != TYPE_BINFO (tv)
13080 /* FIXME: Java sometimes keep dump TYPE_BINFOs on variant types.
13081 Since there is no cheap way to tell C++/Java type w/o LTO, do checking
13082 at LTO time only. */
13083 && (in_lto_p && odr_type_p (t)))
13084 {
13085 error ("type variant has different %<TYPE_BINFO%>");
13086 debug_tree (tv);
13087 error ("type variant%'s %<TYPE_BINFO%>");
13088 debug_tree (TYPE_BINFO (tv));
13089 error ("type%'s %<TYPE_BINFO%>");
13090 debug_tree (TYPE_BINFO (t));
13091 return false;
13092 }
13093
13094 /* Check various uses of TYPE_VALUES_RAW. */
13095 if (TREE_CODE (t) == ENUMERAL_TYPE
13096 && TYPE_VALUES (t))
13097 verify_variant_match (TYPE_VALUES);
13098 else if (TREE_CODE (t) == ARRAY_TYPE)
13099 verify_variant_match (TYPE_DOMAIN);
13100 /* Permit incomplete variants of complete type. While FEs may complete
13101 all variants, this does not happen for C++ templates in all cases. */
13102 else if (RECORD_OR_UNION_TYPE_P (t)
13103 && COMPLETE_TYPE_P (t)
13104 && TYPE_FIELDS (t) != TYPE_FIELDS (tv))
13105 {
13106 tree f1, f2;
13107
13108 /* Fortran builds qualified variants as new records with items of
13109 qualified type. Verify that they looks same. */
13110 for (f1 = TYPE_FIELDS (t), f2 = TYPE_FIELDS (tv);
13111 f1 && f2;
13112 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
13113 if (TREE_CODE (f1) != FIELD_DECL || TREE_CODE (f2) != FIELD_DECL
13114 || (TYPE_MAIN_VARIANT (TREE_TYPE (f1))
13115 != TYPE_MAIN_VARIANT (TREE_TYPE (f2))
13116 /* FIXME: gfc_nonrestricted_type builds all types as variants
13117 with exception of pointer types. It deeply copies the type
13118 which means that we may end up with a variant type
13119 referring non-variant pointer. We may change it to
13120 produce types as variants, too, like
13121 objc_get_protocol_qualified_type does. */
13122 && !POINTER_TYPE_P (TREE_TYPE (f1)))
13123 || DECL_FIELD_OFFSET (f1) != DECL_FIELD_OFFSET (f2)
13124 || DECL_FIELD_BIT_OFFSET (f1) != DECL_FIELD_BIT_OFFSET (f2))
13125 break;
13126 if (f1 || f2)
13127 {
13128 error ("type variant has different %<TYPE_FIELDS%>");
13129 debug_tree (tv);
13130 error ("first mismatch is field");
13131 debug_tree (f1);
13132 error ("and field");
13133 debug_tree (f2);
13134 return false;
13135 }
13136 }
13137 else if ((TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE))
13138 verify_variant_match (TYPE_ARG_TYPES);
13139 /* For C++ the qualified variant of array type is really an array type
13140 of qualified TREE_TYPE.
13141 objc builds variants of pointer where pointer to type is a variant, too
13142 in objc_get_protocol_qualified_type. */
13143 if (TREE_TYPE (t) != TREE_TYPE (tv)
13144 && ((TREE_CODE (t) != ARRAY_TYPE
13145 && !POINTER_TYPE_P (t))
13146 || TYPE_MAIN_VARIANT (TREE_TYPE (t))
13147 != TYPE_MAIN_VARIANT (TREE_TYPE (tv))))
13148 {
13149 error ("type variant has different %<TREE_TYPE%>");
13150 debug_tree (tv);
13151 error ("type variant%'s %<TREE_TYPE%>");
13152 debug_tree (TREE_TYPE (tv));
13153 error ("type%'s %<TREE_TYPE%>");
13154 debug_tree (TREE_TYPE (t));
13155 return false;
13156 }
13157 if (type_with_alias_set_p (t)
13158 && !gimple_canonical_types_compatible_p (t, tv, false))
13159 {
13160 error ("type is not compatible with its variant");
13161 debug_tree (tv);
13162 error ("type variant%'s %<TREE_TYPE%>");
13163 debug_tree (TREE_TYPE (tv));
13164 error ("type%'s %<TREE_TYPE%>");
13165 debug_tree (TREE_TYPE (t));
13166 return false;
13167 }
13168 return true;
13169 #undef verify_variant_match
13170 }
13171
13172
13173 /* The TYPE_CANONICAL merging machinery. It should closely resemble
13174 the middle-end types_compatible_p function. It needs to avoid
13175 claiming types are different for types that should be treated
13176 the same with respect to TBAA. Canonical types are also used
13177 for IL consistency checks via the useless_type_conversion_p
13178 predicate which does not handle all type kinds itself but falls
13179 back to pointer-comparison of TYPE_CANONICAL for aggregates
13180 for example. */
13181
13182 /* Return true if TYPE_UNSIGNED of TYPE should be ignored for canonical
13183 type calculation because we need to allow inter-operability between signed
13184 and unsigned variants. */
13185
13186 bool
13187 type_with_interoperable_signedness (const_tree type)
13188 {
13189 /* Fortran standard require C_SIGNED_CHAR to be interoperable with both
13190 signed char and unsigned char. Similarly fortran FE builds
13191 C_SIZE_T as signed type, while C defines it unsigned. */
13192
13193 return tree_code_for_canonical_type_merging (TREE_CODE (type))
13194 == INTEGER_TYPE
13195 && (TYPE_PRECISION (type) == TYPE_PRECISION (signed_char_type_node)
13196 || TYPE_PRECISION (type) == TYPE_PRECISION (size_type_node));
13197 }
13198
13199 /* Return true iff T1 and T2 are structurally identical for what
13200 TBAA is concerned.
13201 This function is used both by lto.c canonical type merging and by the
13202 verifier. If TRUST_TYPE_CANONICAL we do not look into structure of types
13203 that have TYPE_CANONICAL defined and assume them equivalent. This is useful
13204 only for LTO because only in these cases TYPE_CANONICAL equivalence
13205 correspond to one defined by gimple_canonical_types_compatible_p. */
13206
13207 bool
13208 gimple_canonical_types_compatible_p (const_tree t1, const_tree t2,
13209 bool trust_type_canonical)
13210 {
13211 /* Type variants should be same as the main variant. When not doing sanity
13212 checking to verify this fact, go to main variants and save some work. */
13213 if (trust_type_canonical)
13214 {
13215 t1 = TYPE_MAIN_VARIANT (t1);
13216 t2 = TYPE_MAIN_VARIANT (t2);
13217 }
13218
13219 /* Check first for the obvious case of pointer identity. */
13220 if (t1 == t2)
13221 return true;
13222
13223 /* Check that we have two types to compare. */
13224 if (t1 == NULL_TREE || t2 == NULL_TREE)
13225 return false;
13226
13227 /* We consider complete types always compatible with incomplete type.
13228 This does not make sense for canonical type calculation and thus we
13229 need to ensure that we are never called on it.
13230
13231 FIXME: For more correctness the function probably should have three modes
13232 1) mode assuming that types are complete mathcing their structure
13233 2) mode allowing incomplete types but producing equivalence classes
13234 and thus ignoring all info from complete types
13235 3) mode allowing incomplete types to match complete but checking
13236 compatibility between complete types.
13237
13238 1 and 2 can be used for canonical type calculation. 3 is the real
13239 definition of type compatibility that can be used i.e. for warnings during
13240 declaration merging. */
13241
13242 gcc_assert (!trust_type_canonical
13243 || (type_with_alias_set_p (t1) && type_with_alias_set_p (t2)));
13244
13245 /* If the types have been previously registered and found equal
13246 they still are. */
13247
13248 if (TYPE_CANONICAL (t1) && TYPE_CANONICAL (t2)
13249 && trust_type_canonical)
13250 {
13251 /* Do not use TYPE_CANONICAL of pointer types. For LTO streamed types
13252 they are always NULL, but they are set to non-NULL for types
13253 constructed by build_pointer_type and variants. In this case the
13254 TYPE_CANONICAL is more fine grained than the equivalnce we test (where
13255 all pointers are considered equal. Be sure to not return false
13256 negatives. */
13257 gcc_checking_assert (canonical_type_used_p (t1)
13258 && canonical_type_used_p (t2));
13259 return TYPE_CANONICAL (t1) == TYPE_CANONICAL (t2);
13260 }
13261
13262 /* For types where we do ODR based TBAA the canonical type is always
13263 set correctly, so we know that types are different if their
13264 canonical types does not match. */
13265 if (trust_type_canonical
13266 && (odr_type_p (t1) && odr_based_tbaa_p (t1))
13267 != (odr_type_p (t2) && odr_based_tbaa_p (t2)))
13268 return false;
13269
13270 /* Can't be the same type if the types don't have the same code. */
13271 enum tree_code code = tree_code_for_canonical_type_merging (TREE_CODE (t1));
13272 if (code != tree_code_for_canonical_type_merging (TREE_CODE (t2)))
13273 return false;
13274
13275 /* Qualifiers do not matter for canonical type comparison purposes. */
13276
13277 /* Void types and nullptr types are always the same. */
13278 if (TREE_CODE (t1) == VOID_TYPE
13279 || TREE_CODE (t1) == NULLPTR_TYPE)
13280 return true;
13281
13282 /* Can't be the same type if they have different mode. */
13283 if (TYPE_MODE (t1) != TYPE_MODE (t2))
13284 return false;
13285
13286 /* Non-aggregate types can be handled cheaply. */
13287 if (INTEGRAL_TYPE_P (t1)
13288 || SCALAR_FLOAT_TYPE_P (t1)
13289 || FIXED_POINT_TYPE_P (t1)
13290 || TREE_CODE (t1) == VECTOR_TYPE
13291 || TREE_CODE (t1) == COMPLEX_TYPE
13292 || TREE_CODE (t1) == OFFSET_TYPE
13293 || POINTER_TYPE_P (t1))
13294 {
13295 /* Can't be the same type if they have different recision. */
13296 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2))
13297 return false;
13298
13299 /* In some cases the signed and unsigned types are required to be
13300 inter-operable. */
13301 if (TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2)
13302 && !type_with_interoperable_signedness (t1))
13303 return false;
13304
13305 /* Fortran's C_SIGNED_CHAR is !TYPE_STRING_FLAG but needs to be
13306 interoperable with "signed char". Unless all frontends are revisited
13307 to agree on these types, we must ignore the flag completely. */
13308
13309 /* Fortran standard define C_PTR type that is compatible with every
13310 C pointer. For this reason we need to glob all pointers into one.
13311 Still pointers in different address spaces are not compatible. */
13312 if (POINTER_TYPE_P (t1))
13313 {
13314 if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
13315 != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
13316 return false;
13317 }
13318
13319 /* Tail-recurse to components. */
13320 if (TREE_CODE (t1) == VECTOR_TYPE
13321 || TREE_CODE (t1) == COMPLEX_TYPE)
13322 return gimple_canonical_types_compatible_p (TREE_TYPE (t1),
13323 TREE_TYPE (t2),
13324 trust_type_canonical);
13325
13326 return true;
13327 }
13328
13329 /* Do type-specific comparisons. */
13330 switch (TREE_CODE (t1))
13331 {
13332 case ARRAY_TYPE:
13333 /* Array types are the same if the element types are the same and
13334 the number of elements are the same. */
13335 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
13336 trust_type_canonical)
13337 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
13338 || TYPE_REVERSE_STORAGE_ORDER (t1) != TYPE_REVERSE_STORAGE_ORDER (t2)
13339 || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
13340 return false;
13341 else
13342 {
13343 tree i1 = TYPE_DOMAIN (t1);
13344 tree i2 = TYPE_DOMAIN (t2);
13345
13346 /* For an incomplete external array, the type domain can be
13347 NULL_TREE. Check this condition also. */
13348 if (i1 == NULL_TREE && i2 == NULL_TREE)
13349 return true;
13350 else if (i1 == NULL_TREE || i2 == NULL_TREE)
13351 return false;
13352 else
13353 {
13354 tree min1 = TYPE_MIN_VALUE (i1);
13355 tree min2 = TYPE_MIN_VALUE (i2);
13356 tree max1 = TYPE_MAX_VALUE (i1);
13357 tree max2 = TYPE_MAX_VALUE (i2);
13358
13359 /* The minimum/maximum values have to be the same. */
13360 if ((min1 == min2
13361 || (min1 && min2
13362 && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
13363 && TREE_CODE (min2) == PLACEHOLDER_EXPR)
13364 || operand_equal_p (min1, min2, 0))))
13365 && (max1 == max2
13366 || (max1 && max2
13367 && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
13368 && TREE_CODE (max2) == PLACEHOLDER_EXPR)
13369 || operand_equal_p (max1, max2, 0)))))
13370 return true;
13371 else
13372 return false;
13373 }
13374 }
13375
13376 case METHOD_TYPE:
13377 case FUNCTION_TYPE:
13378 /* Function types are the same if the return type and arguments types
13379 are the same. */
13380 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
13381 trust_type_canonical))
13382 return false;
13383
13384 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
13385 return true;
13386 else
13387 {
13388 tree parms1, parms2;
13389
13390 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
13391 parms1 && parms2;
13392 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
13393 {
13394 if (!gimple_canonical_types_compatible_p
13395 (TREE_VALUE (parms1), TREE_VALUE (parms2),
13396 trust_type_canonical))
13397 return false;
13398 }
13399
13400 if (parms1 || parms2)
13401 return false;
13402
13403 return true;
13404 }
13405
13406 case RECORD_TYPE:
13407 case UNION_TYPE:
13408 case QUAL_UNION_TYPE:
13409 {
13410 tree f1, f2;
13411
13412 /* Don't try to compare variants of an incomplete type, before
13413 TYPE_FIELDS has been copied around. */
13414 if (!COMPLETE_TYPE_P (t1) && !COMPLETE_TYPE_P (t2))
13415 return true;
13416
13417
13418 if (TYPE_REVERSE_STORAGE_ORDER (t1) != TYPE_REVERSE_STORAGE_ORDER (t2))
13419 return false;
13420
13421 /* For aggregate types, all the fields must be the same. */
13422 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
13423 f1 || f2;
13424 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
13425 {
13426 /* Skip non-fields and zero-sized fields. */
13427 while (f1 && (TREE_CODE (f1) != FIELD_DECL
13428 || (DECL_SIZE (f1)
13429 && integer_zerop (DECL_SIZE (f1)))))
13430 f1 = TREE_CHAIN (f1);
13431 while (f2 && (TREE_CODE (f2) != FIELD_DECL
13432 || (DECL_SIZE (f2)
13433 && integer_zerop (DECL_SIZE (f2)))))
13434 f2 = TREE_CHAIN (f2);
13435 if (!f1 || !f2)
13436 break;
13437 /* The fields must have the same name, offset and type. */
13438 if (DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
13439 || !gimple_compare_field_offset (f1, f2)
13440 || !gimple_canonical_types_compatible_p
13441 (TREE_TYPE (f1), TREE_TYPE (f2),
13442 trust_type_canonical))
13443 return false;
13444 }
13445
13446 /* If one aggregate has more fields than the other, they
13447 are not the same. */
13448 if (f1 || f2)
13449 return false;
13450
13451 return true;
13452 }
13453
13454 default:
13455 /* Consider all types with language specific trees in them mutually
13456 compatible. This is executed only from verify_type and false
13457 positives can be tolerated. */
13458 gcc_assert (!in_lto_p);
13459 return true;
13460 }
13461 }
13462
13463 /* Verify type T. */
13464
13465 void
13466 verify_type (const_tree t)
13467 {
13468 bool error_found = false;
13469 tree mv = TYPE_MAIN_VARIANT (t);
13470 if (!mv)
13471 {
13472 error ("main variant is not defined");
13473 error_found = true;
13474 }
13475 else if (mv != TYPE_MAIN_VARIANT (mv))
13476 {
13477 error ("%<TYPE_MAIN_VARIANT%> has different %<TYPE_MAIN_VARIANT%>");
13478 debug_tree (mv);
13479 error_found = true;
13480 }
13481 else if (t != mv && !verify_type_variant (t, mv))
13482 error_found = true;
13483
13484 tree ct = TYPE_CANONICAL (t);
13485 if (!ct)
13486 ;
13487 else if (TYPE_CANONICAL (t) != ct)
13488 {
13489 error ("%<TYPE_CANONICAL%> has different %<TYPE_CANONICAL%>");
13490 debug_tree (ct);
13491 error_found = true;
13492 }
13493 /* Method and function types cannot be used to address memory and thus
13494 TYPE_CANONICAL really matters only for determining useless conversions.
13495
13496 FIXME: C++ FE produce declarations of builtin functions that are not
13497 compatible with main variants. */
13498 else if (TREE_CODE (t) == FUNCTION_TYPE)
13499 ;
13500 else if (t != ct
13501 /* FIXME: gimple_canonical_types_compatible_p cannot compare types
13502 with variably sized arrays because their sizes possibly
13503 gimplified to different variables. */
13504 && !variably_modified_type_p (ct, NULL)
13505 && !gimple_canonical_types_compatible_p (t, ct, false)
13506 && COMPLETE_TYPE_P (t))
13507 {
13508 error ("%<TYPE_CANONICAL%> is not compatible");
13509 debug_tree (ct);
13510 error_found = true;
13511 }
13512
13513 if (COMPLETE_TYPE_P (t) && TYPE_CANONICAL (t)
13514 && TYPE_MODE (t) != TYPE_MODE (TYPE_CANONICAL (t)))
13515 {
13516 error ("%<TYPE_MODE%> of %<TYPE_CANONICAL%> is not compatible");
13517 debug_tree (ct);
13518 error_found = true;
13519 }
13520 if (TYPE_MAIN_VARIANT (t) == t && ct && TYPE_MAIN_VARIANT (ct) != ct)
13521 {
13522 error ("%<TYPE_CANONICAL%> of main variant is not main variant");
13523 debug_tree (ct);
13524 debug_tree (TYPE_MAIN_VARIANT (ct));
13525 error_found = true;
13526 }
13527
13528
13529 /* Check various uses of TYPE_MIN_VALUE_RAW. */
13530 if (RECORD_OR_UNION_TYPE_P (t))
13531 {
13532 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
13533 and danagle the pointer from time to time. */
13534 if (TYPE_VFIELD (t)
13535 && TREE_CODE (TYPE_VFIELD (t)) != FIELD_DECL
13536 && TREE_CODE (TYPE_VFIELD (t)) != TREE_LIST)
13537 {
13538 error ("%<TYPE_VFIELD%> is not %<FIELD_DECL%> nor %<TREE_LIST%>");
13539 debug_tree (TYPE_VFIELD (t));
13540 error_found = true;
13541 }
13542 }
13543 else if (TREE_CODE (t) == POINTER_TYPE)
13544 {
13545 if (TYPE_NEXT_PTR_TO (t)
13546 && TREE_CODE (TYPE_NEXT_PTR_TO (t)) != POINTER_TYPE)
13547 {
13548 error ("%<TYPE_NEXT_PTR_TO%> is not %<POINTER_TYPE%>");
13549 debug_tree (TYPE_NEXT_PTR_TO (t));
13550 error_found = true;
13551 }
13552 }
13553 else if (TREE_CODE (t) == REFERENCE_TYPE)
13554 {
13555 if (TYPE_NEXT_REF_TO (t)
13556 && TREE_CODE (TYPE_NEXT_REF_TO (t)) != REFERENCE_TYPE)
13557 {
13558 error ("%<TYPE_NEXT_REF_TO%> is not %<REFERENCE_TYPE%>");
13559 debug_tree (TYPE_NEXT_REF_TO (t));
13560 error_found = true;
13561 }
13562 }
13563 else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
13564 || TREE_CODE (t) == FIXED_POINT_TYPE)
13565 {
13566 /* FIXME: The following check should pass:
13567 useless_type_conversion_p (const_cast <tree> (t),
13568 TREE_TYPE (TYPE_MIN_VALUE (t))
13569 but does not for C sizetypes in LTO. */
13570 }
13571
13572 /* Check various uses of TYPE_MAXVAL_RAW. */
13573 if (RECORD_OR_UNION_TYPE_P (t))
13574 {
13575 if (!TYPE_BINFO (t))
13576 ;
13577 else if (TREE_CODE (TYPE_BINFO (t)) != TREE_BINFO)
13578 {
13579 error ("%<TYPE_BINFO%> is not %<TREE_BINFO%>");
13580 debug_tree (TYPE_BINFO (t));
13581 error_found = true;
13582 }
13583 else if (TREE_TYPE (TYPE_BINFO (t)) != TYPE_MAIN_VARIANT (t))
13584 {
13585 error ("%<TYPE_BINFO%> type is not %<TYPE_MAIN_VARIANT%>");
13586 debug_tree (TREE_TYPE (TYPE_BINFO (t)));
13587 error_found = true;
13588 }
13589 }
13590 else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
13591 {
13592 if (TYPE_METHOD_BASETYPE (t)
13593 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != RECORD_TYPE
13594 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != UNION_TYPE)
13595 {
13596 error ("%<TYPE_METHOD_BASETYPE%> is not record nor union");
13597 debug_tree (TYPE_METHOD_BASETYPE (t));
13598 error_found = true;
13599 }
13600 }
13601 else if (TREE_CODE (t) == OFFSET_TYPE)
13602 {
13603 if (TYPE_OFFSET_BASETYPE (t)
13604 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != RECORD_TYPE
13605 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != UNION_TYPE)
13606 {
13607 error ("%<TYPE_OFFSET_BASETYPE%> is not record nor union");
13608 debug_tree (TYPE_OFFSET_BASETYPE (t));
13609 error_found = true;
13610 }
13611 }
13612 else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
13613 || TREE_CODE (t) == FIXED_POINT_TYPE)
13614 {
13615 /* FIXME: The following check should pass:
13616 useless_type_conversion_p (const_cast <tree> (t),
13617 TREE_TYPE (TYPE_MAX_VALUE (t))
13618 but does not for C sizetypes in LTO. */
13619 }
13620 else if (TREE_CODE (t) == ARRAY_TYPE)
13621 {
13622 if (TYPE_ARRAY_MAX_SIZE (t)
13623 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (t)) != INTEGER_CST)
13624 {
13625 error ("%<TYPE_ARRAY_MAX_SIZE%> not %<INTEGER_CST%>");
13626 debug_tree (TYPE_ARRAY_MAX_SIZE (t));
13627 error_found = true;
13628 }
13629 }
13630 else if (TYPE_MAX_VALUE_RAW (t))
13631 {
13632 error ("%<TYPE_MAX_VALUE_RAW%> non-NULL");
13633 debug_tree (TYPE_MAX_VALUE_RAW (t));
13634 error_found = true;
13635 }
13636
13637 if (TYPE_LANG_SLOT_1 (t) && in_lto_p)
13638 {
13639 error ("%<TYPE_LANG_SLOT_1 (binfo)%> field is non-NULL");
13640 debug_tree (TYPE_LANG_SLOT_1 (t));
13641 error_found = true;
13642 }
13643
13644 /* Check various uses of TYPE_VALUES_RAW. */
13645 if (TREE_CODE (t) == ENUMERAL_TYPE)
13646 for (tree l = TYPE_VALUES (t); l; l = TREE_CHAIN (l))
13647 {
13648 tree value = TREE_VALUE (l);
13649 tree name = TREE_PURPOSE (l);
13650
13651 /* C FE porduce INTEGER_CST of INTEGER_TYPE, while C++ FE uses
13652 CONST_DECL of ENUMERAL TYPE. */
13653 if (TREE_CODE (value) != INTEGER_CST && TREE_CODE (value) != CONST_DECL)
13654 {
13655 error ("enum value is not %<CONST_DECL%> or %<INTEGER_CST%>");
13656 debug_tree (value);
13657 debug_tree (name);
13658 error_found = true;
13659 }
13660 if (TREE_CODE (TREE_TYPE (value)) != INTEGER_TYPE
13661 && !useless_type_conversion_p (const_cast <tree> (t), TREE_TYPE (value)))
13662 {
13663 error ("enum value type is not %<INTEGER_TYPE%> nor convertible "
13664 "to the enum");
13665 debug_tree (value);
13666 debug_tree (name);
13667 error_found = true;
13668 }
13669 if (TREE_CODE (name) != IDENTIFIER_NODE)
13670 {
13671 error ("enum value name is not %<IDENTIFIER_NODE%>");
13672 debug_tree (value);
13673 debug_tree (name);
13674 error_found = true;
13675 }
13676 }
13677 else if (TREE_CODE (t) == ARRAY_TYPE)
13678 {
13679 if (TYPE_DOMAIN (t) && TREE_CODE (TYPE_DOMAIN (t)) != INTEGER_TYPE)
13680 {
13681 error ("array %<TYPE_DOMAIN%> is not integer type");
13682 debug_tree (TYPE_DOMAIN (t));
13683 error_found = true;
13684 }
13685 }
13686 else if (RECORD_OR_UNION_TYPE_P (t))
13687 {
13688 if (TYPE_FIELDS (t) && !COMPLETE_TYPE_P (t) && in_lto_p)
13689 {
13690 error ("%<TYPE_FIELDS%> defined in incomplete type");
13691 error_found = true;
13692 }
13693 for (tree fld = TYPE_FIELDS (t); fld; fld = TREE_CHAIN (fld))
13694 {
13695 /* TODO: verify properties of decls. */
13696 if (TREE_CODE (fld) == FIELD_DECL)
13697 ;
13698 else if (TREE_CODE (fld) == TYPE_DECL)
13699 ;
13700 else if (TREE_CODE (fld) == CONST_DECL)
13701 ;
13702 else if (VAR_P (fld))
13703 ;
13704 else if (TREE_CODE (fld) == TEMPLATE_DECL)
13705 ;
13706 else if (TREE_CODE (fld) == USING_DECL)
13707 ;
13708 else if (TREE_CODE (fld) == FUNCTION_DECL)
13709 ;
13710 else
13711 {
13712 error ("wrong tree in %<TYPE_FIELDS%> list");
13713 debug_tree (fld);
13714 error_found = true;
13715 }
13716 }
13717 }
13718 else if (TREE_CODE (t) == INTEGER_TYPE
13719 || TREE_CODE (t) == BOOLEAN_TYPE
13720 || TREE_CODE (t) == OFFSET_TYPE
13721 || TREE_CODE (t) == REFERENCE_TYPE
13722 || TREE_CODE (t) == NULLPTR_TYPE
13723 || TREE_CODE (t) == POINTER_TYPE)
13724 {
13725 if (TYPE_CACHED_VALUES_P (t) != (TYPE_CACHED_VALUES (t) != NULL))
13726 {
13727 error ("%<TYPE_CACHED_VALUES_P%> is %i while %<TYPE_CACHED_VALUES%> "
13728 "is %p",
13729 TYPE_CACHED_VALUES_P (t), (void *)TYPE_CACHED_VALUES (t));
13730 error_found = true;
13731 }
13732 else if (TYPE_CACHED_VALUES_P (t) && TREE_CODE (TYPE_CACHED_VALUES (t)) != TREE_VEC)
13733 {
13734 error ("%<TYPE_CACHED_VALUES%> is not %<TREE_VEC%>");
13735 debug_tree (TYPE_CACHED_VALUES (t));
13736 error_found = true;
13737 }
13738 /* Verify just enough of cache to ensure that no one copied it to new type.
13739 All copying should go by copy_node that should clear it. */
13740 else if (TYPE_CACHED_VALUES_P (t))
13741 {
13742 int i;
13743 for (i = 0; i < TREE_VEC_LENGTH (TYPE_CACHED_VALUES (t)); i++)
13744 if (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)
13745 && TREE_TYPE (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)) != t)
13746 {
13747 error ("wrong %<TYPE_CACHED_VALUES%> entry");
13748 debug_tree (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i));
13749 error_found = true;
13750 break;
13751 }
13752 }
13753 }
13754 else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
13755 for (tree l = TYPE_ARG_TYPES (t); l; l = TREE_CHAIN (l))
13756 {
13757 /* C++ FE uses TREE_PURPOSE to store initial values. */
13758 if (TREE_PURPOSE (l) && in_lto_p)
13759 {
13760 error ("%<TREE_PURPOSE%> is non-NULL in %<TYPE_ARG_TYPES%> list");
13761 debug_tree (l);
13762 error_found = true;
13763 }
13764 if (!TYPE_P (TREE_VALUE (l)))
13765 {
13766 error ("wrong entry in %<TYPE_ARG_TYPES%> list");
13767 debug_tree (l);
13768 error_found = true;
13769 }
13770 }
13771 else if (!is_lang_specific (t) && TYPE_VALUES_RAW (t))
13772 {
13773 error ("%<TYPE_VALUES_RAW%> field is non-NULL");
13774 debug_tree (TYPE_VALUES_RAW (t));
13775 error_found = true;
13776 }
13777 if (TREE_CODE (t) != INTEGER_TYPE
13778 && TREE_CODE (t) != BOOLEAN_TYPE
13779 && TREE_CODE (t) != OFFSET_TYPE
13780 && TREE_CODE (t) != REFERENCE_TYPE
13781 && TREE_CODE (t) != NULLPTR_TYPE
13782 && TREE_CODE (t) != POINTER_TYPE
13783 && TYPE_CACHED_VALUES_P (t))
13784 {
13785 error ("%<TYPE_CACHED_VALUES_P%> is set while it should not be");
13786 error_found = true;
13787 }
13788
13789 /* ipa-devirt makes an assumption that TYPE_METHOD_BASETYPE is always
13790 TYPE_MAIN_VARIANT and it would be odd to add methods only to variatns
13791 of a type. */
13792 if (TREE_CODE (t) == METHOD_TYPE
13793 && TYPE_MAIN_VARIANT (TYPE_METHOD_BASETYPE (t)) != TYPE_METHOD_BASETYPE (t))
13794 {
13795 error ("%<TYPE_METHOD_BASETYPE%> is not main variant");
13796 error_found = true;
13797 }
13798
13799 if (error_found)
13800 {
13801 debug_tree (const_cast <tree> (t));
13802 internal_error ("%qs failed", __func__);
13803 }
13804 }
13805
13806
13807 /* Return 1 if ARG interpreted as signed in its precision is known to be
13808 always positive or 2 if ARG is known to be always negative, or 3 if
13809 ARG may be positive or negative. */
13810
13811 int
13812 get_range_pos_neg (tree arg)
13813 {
13814 if (arg == error_mark_node)
13815 return 3;
13816
13817 int prec = TYPE_PRECISION (TREE_TYPE (arg));
13818 int cnt = 0;
13819 if (TREE_CODE (arg) == INTEGER_CST)
13820 {
13821 wide_int w = wi::sext (wi::to_wide (arg), prec);
13822 if (wi::neg_p (w))
13823 return 2;
13824 else
13825 return 1;
13826 }
13827 while (CONVERT_EXPR_P (arg)
13828 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
13829 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg, 0))) <= prec)
13830 {
13831 arg = TREE_OPERAND (arg, 0);
13832 /* Narrower value zero extended into wider type
13833 will always result in positive values. */
13834 if (TYPE_UNSIGNED (TREE_TYPE (arg))
13835 && TYPE_PRECISION (TREE_TYPE (arg)) < prec)
13836 return 1;
13837 prec = TYPE_PRECISION (TREE_TYPE (arg));
13838 if (++cnt > 30)
13839 return 3;
13840 }
13841
13842 if (TREE_CODE (arg) != SSA_NAME)
13843 return 3;
13844 value_range r;
13845 while (!get_global_range_query ()->range_of_expr (r, arg) || r.kind () != VR_RANGE)
13846 {
13847 gimple *g = SSA_NAME_DEF_STMT (arg);
13848 if (is_gimple_assign (g)
13849 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (g)))
13850 {
13851 tree t = gimple_assign_rhs1 (g);
13852 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
13853 && TYPE_PRECISION (TREE_TYPE (t)) <= prec)
13854 {
13855 if (TYPE_UNSIGNED (TREE_TYPE (t))
13856 && TYPE_PRECISION (TREE_TYPE (t)) < prec)
13857 return 1;
13858 prec = TYPE_PRECISION (TREE_TYPE (t));
13859 arg = t;
13860 if (++cnt > 30)
13861 return 3;
13862 continue;
13863 }
13864 }
13865 return 3;
13866 }
13867 if (TYPE_UNSIGNED (TREE_TYPE (arg)))
13868 {
13869 /* For unsigned values, the "positive" range comes
13870 below the "negative" range. */
13871 if (!wi::neg_p (wi::sext (r.upper_bound (), prec), SIGNED))
13872 return 1;
13873 if (wi::neg_p (wi::sext (r.lower_bound (), prec), SIGNED))
13874 return 2;
13875 }
13876 else
13877 {
13878 if (!wi::neg_p (wi::sext (r.lower_bound (), prec), SIGNED))
13879 return 1;
13880 if (wi::neg_p (wi::sext (r.upper_bound (), prec), SIGNED))
13881 return 2;
13882 }
13883 return 3;
13884 }
13885
13886
13887
13888
13889 /* Return true if ARG is marked with the nonnull attribute in the
13890 current function signature. */
13891
13892 bool
13893 nonnull_arg_p (const_tree arg)
13894 {
13895 tree t, attrs, fntype;
13896 unsigned HOST_WIDE_INT arg_num;
13897
13898 gcc_assert (TREE_CODE (arg) == PARM_DECL
13899 && (POINTER_TYPE_P (TREE_TYPE (arg))
13900 || TREE_CODE (TREE_TYPE (arg)) == OFFSET_TYPE));
13901
13902 /* The static chain decl is always non null. */
13903 if (arg == cfun->static_chain_decl)
13904 return true;
13905
13906 /* THIS argument of method is always non-NULL. */
13907 if (TREE_CODE (TREE_TYPE (cfun->decl)) == METHOD_TYPE
13908 && arg == DECL_ARGUMENTS (cfun->decl)
13909 && flag_delete_null_pointer_checks)
13910 return true;
13911
13912 /* Values passed by reference are always non-NULL. */
13913 if (TREE_CODE (TREE_TYPE (arg)) == REFERENCE_TYPE
13914 && flag_delete_null_pointer_checks)
13915 return true;
13916
13917 fntype = TREE_TYPE (cfun->decl);
13918 for (attrs = TYPE_ATTRIBUTES (fntype); attrs; attrs = TREE_CHAIN (attrs))
13919 {
13920 attrs = lookup_attribute ("nonnull", attrs);
13921
13922 /* If "nonnull" wasn't specified, we know nothing about the argument. */
13923 if (attrs == NULL_TREE)
13924 return false;
13925
13926 /* If "nonnull" applies to all the arguments, then ARG is non-null. */
13927 if (TREE_VALUE (attrs) == NULL_TREE)
13928 return true;
13929
13930 /* Get the position number for ARG in the function signature. */
13931 for (arg_num = 1, t = DECL_ARGUMENTS (cfun->decl);
13932 t;
13933 t = DECL_CHAIN (t), arg_num++)
13934 {
13935 if (t == arg)
13936 break;
13937 }
13938
13939 gcc_assert (t == arg);
13940
13941 /* Now see if ARG_NUM is mentioned in the nonnull list. */
13942 for (t = TREE_VALUE (attrs); t; t = TREE_CHAIN (t))
13943 {
13944 if (compare_tree_int (TREE_VALUE (t), arg_num) == 0)
13945 return true;
13946 }
13947 }
13948
13949 return false;
13950 }
13951
13952 /* Combine LOC and BLOCK to a combined adhoc loc, retaining any range
13953 information. */
13954
13955 location_t
13956 set_block (location_t loc, tree block)
13957 {
13958 location_t pure_loc = get_pure_location (loc);
13959 source_range src_range = get_range_from_loc (line_table, loc);
13960 return COMBINE_LOCATION_DATA (line_table, pure_loc, src_range, block);
13961 }
13962
13963 location_t
13964 set_source_range (tree expr, location_t start, location_t finish)
13965 {
13966 source_range src_range;
13967 src_range.m_start = start;
13968 src_range.m_finish = finish;
13969 return set_source_range (expr, src_range);
13970 }
13971
13972 location_t
13973 set_source_range (tree expr, source_range src_range)
13974 {
13975 if (!EXPR_P (expr))
13976 return UNKNOWN_LOCATION;
13977
13978 location_t pure_loc = get_pure_location (EXPR_LOCATION (expr));
13979 location_t adhoc = COMBINE_LOCATION_DATA (line_table,
13980 pure_loc,
13981 src_range,
13982 NULL);
13983 SET_EXPR_LOCATION (expr, adhoc);
13984 return adhoc;
13985 }
13986
13987 /* Return EXPR, potentially wrapped with a node expression LOC,
13988 if !CAN_HAVE_LOCATION_P (expr).
13989
13990 NON_LVALUE_EXPR is used for wrapping constants, apart from STRING_CST.
13991 VIEW_CONVERT_EXPR is used for wrapping non-constants and STRING_CST.
13992
13993 Wrapper nodes can be identified using location_wrapper_p. */
13994
13995 tree
13996 maybe_wrap_with_location (tree expr, location_t loc)
13997 {
13998 if (expr == NULL)
13999 return NULL;
14000 if (loc == UNKNOWN_LOCATION)
14001 return expr;
14002 if (CAN_HAVE_LOCATION_P (expr))
14003 return expr;
14004 /* We should only be adding wrappers for constants and for decls,
14005 or for some exceptional tree nodes (e.g. BASELINK in the C++ FE). */
14006 gcc_assert (CONSTANT_CLASS_P (expr)
14007 || DECL_P (expr)
14008 || EXCEPTIONAL_CLASS_P (expr));
14009
14010 /* For now, don't add wrappers to exceptional tree nodes, to minimize
14011 any impact of the wrapper nodes. */
14012 if (EXCEPTIONAL_CLASS_P (expr))
14013 return expr;
14014
14015 /* Compiler-generated temporary variables don't need a wrapper. */
14016 if (DECL_P (expr) && DECL_ARTIFICIAL (expr) && DECL_IGNORED_P (expr))
14017 return expr;
14018
14019 /* If any auto_suppress_location_wrappers are active, don't create
14020 wrappers. */
14021 if (suppress_location_wrappers > 0)
14022 return expr;
14023
14024 tree_code code
14025 = (((CONSTANT_CLASS_P (expr) && TREE_CODE (expr) != STRING_CST)
14026 || (TREE_CODE (expr) == CONST_DECL && !TREE_STATIC (expr)))
14027 ? NON_LVALUE_EXPR : VIEW_CONVERT_EXPR);
14028 tree wrapper = build1_loc (loc, code, TREE_TYPE (expr), expr);
14029 /* Mark this node as being a wrapper. */
14030 EXPR_LOCATION_WRAPPER_P (wrapper) = 1;
14031 return wrapper;
14032 }
14033
14034 int suppress_location_wrappers;
14035
14036 /* Return the name of combined function FN, for debugging purposes. */
14037
14038 const char *
14039 combined_fn_name (combined_fn fn)
14040 {
14041 if (builtin_fn_p (fn))
14042 {
14043 tree fndecl = builtin_decl_explicit (as_builtin_fn (fn));
14044 return IDENTIFIER_POINTER (DECL_NAME (fndecl));
14045 }
14046 else
14047 return internal_fn_name (as_internal_fn (fn));
14048 }
14049
14050 /* Return a bitmap with a bit set corresponding to each argument in
14051 a function call type FNTYPE declared with attribute nonnull,
14052 or null if none of the function's argument are nonnull. The caller
14053 must free the bitmap. */
14054
14055 bitmap
14056 get_nonnull_args (const_tree fntype)
14057 {
14058 if (fntype == NULL_TREE)
14059 return NULL;
14060
14061 bitmap argmap = NULL;
14062 if (TREE_CODE (fntype) == METHOD_TYPE)
14063 {
14064 /* The this pointer in C++ non-static member functions is
14065 implicitly nonnull whether or not it's declared as such. */
14066 argmap = BITMAP_ALLOC (NULL);
14067 bitmap_set_bit (argmap, 0);
14068 }
14069
14070 tree attrs = TYPE_ATTRIBUTES (fntype);
14071 if (!attrs)
14072 return argmap;
14073
14074 /* A function declaration can specify multiple attribute nonnull,
14075 each with zero or more arguments. The loop below creates a bitmap
14076 representing a union of all the arguments. An empty (but non-null)
14077 bitmap means that all arguments have been declaraed nonnull. */
14078 for ( ; attrs; attrs = TREE_CHAIN (attrs))
14079 {
14080 attrs = lookup_attribute ("nonnull", attrs);
14081 if (!attrs)
14082 break;
14083
14084 if (!argmap)
14085 argmap = BITMAP_ALLOC (NULL);
14086
14087 if (!TREE_VALUE (attrs))
14088 {
14089 /* Clear the bitmap in case a previous attribute nonnull
14090 set it and this one overrides it for all arguments. */
14091 bitmap_clear (argmap);
14092 return argmap;
14093 }
14094
14095 /* Iterate over the indices of the format arguments declared nonnull
14096 and set a bit for each. */
14097 for (tree idx = TREE_VALUE (attrs); idx; idx = TREE_CHAIN (idx))
14098 {
14099 unsigned int val = TREE_INT_CST_LOW (TREE_VALUE (idx)) - 1;
14100 bitmap_set_bit (argmap, val);
14101 }
14102 }
14103
14104 return argmap;
14105 }
14106
14107 /* Returns true if TYPE is a type where it and all of its subobjects
14108 (recursively) are of structure, union, or array type. */
14109
14110 bool
14111 is_empty_type (const_tree type)
14112 {
14113 if (RECORD_OR_UNION_TYPE_P (type))
14114 {
14115 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
14116 if (TREE_CODE (field) == FIELD_DECL
14117 && !DECL_PADDING_P (field)
14118 && !is_empty_type (TREE_TYPE (field)))
14119 return false;
14120 return true;
14121 }
14122 else if (TREE_CODE (type) == ARRAY_TYPE)
14123 return (integer_minus_onep (array_type_nelts (type))
14124 || TYPE_DOMAIN (type) == NULL_TREE
14125 || is_empty_type (TREE_TYPE (type)));
14126 return false;
14127 }
14128
14129 /* Implement TARGET_EMPTY_RECORD_P. Return true if TYPE is an empty type
14130 that shouldn't be passed via stack. */
14131
14132 bool
14133 default_is_empty_record (const_tree type)
14134 {
14135 if (!abi_version_at_least (12))
14136 return false;
14137
14138 if (type == error_mark_node)
14139 return false;
14140
14141 if (TREE_ADDRESSABLE (type))
14142 return false;
14143
14144 return is_empty_type (TYPE_MAIN_VARIANT (type));
14145 }
14146
14147 /* Determine whether TYPE is a structure with a flexible array member,
14148 or a union containing such a structure (possibly recursively). */
14149
14150 bool
14151 flexible_array_type_p (const_tree type)
14152 {
14153 tree x, last;
14154 switch (TREE_CODE (type))
14155 {
14156 case RECORD_TYPE:
14157 last = NULL_TREE;
14158 for (x = TYPE_FIELDS (type); x != NULL_TREE; x = DECL_CHAIN (x))
14159 if (TREE_CODE (x) == FIELD_DECL)
14160 last = x;
14161 if (last == NULL_TREE)
14162 return false;
14163 if (TREE_CODE (TREE_TYPE (last)) == ARRAY_TYPE
14164 && TYPE_SIZE (TREE_TYPE (last)) == NULL_TREE
14165 && TYPE_DOMAIN (TREE_TYPE (last)) != NULL_TREE
14166 && TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (last))) == NULL_TREE)
14167 return true;
14168 return false;
14169 case UNION_TYPE:
14170 for (x = TYPE_FIELDS (type); x != NULL_TREE; x = DECL_CHAIN (x))
14171 {
14172 if (TREE_CODE (x) == FIELD_DECL
14173 && flexible_array_type_p (TREE_TYPE (x)))
14174 return true;
14175 }
14176 return false;
14177 default:
14178 return false;
14179 }
14180 }
14181
14182 /* Like int_size_in_bytes, but handle empty records specially. */
14183
14184 HOST_WIDE_INT
14185 arg_int_size_in_bytes (const_tree type)
14186 {
14187 return TYPE_EMPTY_P (type) ? 0 : int_size_in_bytes (type);
14188 }
14189
14190 /* Like size_in_bytes, but handle empty records specially. */
14191
14192 tree
14193 arg_size_in_bytes (const_tree type)
14194 {
14195 return TYPE_EMPTY_P (type) ? size_zero_node : size_in_bytes (type);
14196 }
14197
14198 /* Return true if an expression with CODE has to have the same result type as
14199 its first operand. */
14200
14201 bool
14202 expr_type_first_operand_type_p (tree_code code)
14203 {
14204 switch (code)
14205 {
14206 case NEGATE_EXPR:
14207 case ABS_EXPR:
14208 case BIT_NOT_EXPR:
14209 case PAREN_EXPR:
14210 case CONJ_EXPR:
14211
14212 case PLUS_EXPR:
14213 case MINUS_EXPR:
14214 case MULT_EXPR:
14215 case TRUNC_DIV_EXPR:
14216 case CEIL_DIV_EXPR:
14217 case FLOOR_DIV_EXPR:
14218 case ROUND_DIV_EXPR:
14219 case TRUNC_MOD_EXPR:
14220 case CEIL_MOD_EXPR:
14221 case FLOOR_MOD_EXPR:
14222 case ROUND_MOD_EXPR:
14223 case RDIV_EXPR:
14224 case EXACT_DIV_EXPR:
14225 case MIN_EXPR:
14226 case MAX_EXPR:
14227 case BIT_IOR_EXPR:
14228 case BIT_XOR_EXPR:
14229 case BIT_AND_EXPR:
14230
14231 case LSHIFT_EXPR:
14232 case RSHIFT_EXPR:
14233 case LROTATE_EXPR:
14234 case RROTATE_EXPR:
14235 return true;
14236
14237 default:
14238 return false;
14239 }
14240 }
14241
14242 /* Return a typenode for the "standard" C type with a given name. */
14243 tree
14244 get_typenode_from_name (const char *name)
14245 {
14246 if (name == NULL || *name == '\0')
14247 return NULL_TREE;
14248
14249 if (strcmp (name, "char") == 0)
14250 return char_type_node;
14251 if (strcmp (name, "unsigned char") == 0)
14252 return unsigned_char_type_node;
14253 if (strcmp (name, "signed char") == 0)
14254 return signed_char_type_node;
14255
14256 if (strcmp (name, "short int") == 0)
14257 return short_integer_type_node;
14258 if (strcmp (name, "short unsigned int") == 0)
14259 return short_unsigned_type_node;
14260
14261 if (strcmp (name, "int") == 0)
14262 return integer_type_node;
14263 if (strcmp (name, "unsigned int") == 0)
14264 return unsigned_type_node;
14265
14266 if (strcmp (name, "long int") == 0)
14267 return long_integer_type_node;
14268 if (strcmp (name, "long unsigned int") == 0)
14269 return long_unsigned_type_node;
14270
14271 if (strcmp (name, "long long int") == 0)
14272 return long_long_integer_type_node;
14273 if (strcmp (name, "long long unsigned int") == 0)
14274 return long_long_unsigned_type_node;
14275
14276 gcc_unreachable ();
14277 }
14278
14279 /* List of pointer types used to declare builtins before we have seen their
14280 real declaration.
14281
14282 Keep the size up to date in tree.h ! */
14283 const builtin_structptr_type builtin_structptr_types[6] =
14284 {
14285 { fileptr_type_node, ptr_type_node, "FILE" },
14286 { const_tm_ptr_type_node, const_ptr_type_node, "tm" },
14287 { fenv_t_ptr_type_node, ptr_type_node, "fenv_t" },
14288 { const_fenv_t_ptr_type_node, const_ptr_type_node, "fenv_t" },
14289 { fexcept_t_ptr_type_node, ptr_type_node, "fexcept_t" },
14290 { const_fexcept_t_ptr_type_node, const_ptr_type_node, "fexcept_t" }
14291 };
14292
14293 /* Return the maximum object size. */
14294
14295 tree
14296 max_object_size (void)
14297 {
14298 /* To do: Make this a configurable parameter. */
14299 return TYPE_MAX_VALUE (ptrdiff_type_node);
14300 }
14301
14302 /* A wrapper around TARGET_VERIFY_TYPE_CONTEXT that makes the silent_p
14303 parameter default to false and that weeds out error_mark_node. */
14304
14305 bool
14306 verify_type_context (location_t loc, type_context_kind context,
14307 const_tree type, bool silent_p)
14308 {
14309 if (type == error_mark_node)
14310 return true;
14311
14312 gcc_assert (TYPE_P (type));
14313 return (!targetm.verify_type_context
14314 || targetm.verify_type_context (loc, context, type, silent_p));
14315 }
14316
14317 /* Return true if NEW_ASM and DELETE_ASM name a valid pair of new and
14318 delete operators. Return false if they may or may not name such
14319 a pair and, when nonnull, set *PCERTAIN to true if they certainly
14320 do not. */
14321
14322 bool
14323 valid_new_delete_pair_p (tree new_asm, tree delete_asm,
14324 bool *pcertain /* = NULL */)
14325 {
14326 bool certain;
14327 if (!pcertain)
14328 pcertain = &certain;
14329
14330 const char *new_name = IDENTIFIER_POINTER (new_asm);
14331 const char *delete_name = IDENTIFIER_POINTER (delete_asm);
14332 unsigned int new_len = IDENTIFIER_LENGTH (new_asm);
14333 unsigned int delete_len = IDENTIFIER_LENGTH (delete_asm);
14334
14335 /* The following failures are due to invalid names so they're not
14336 considered certain mismatches. */
14337 *pcertain = false;
14338
14339 if (new_len < 5 || delete_len < 6)
14340 return false;
14341 if (new_name[0] == '_')
14342 ++new_name, --new_len;
14343 if (new_name[0] == '_')
14344 ++new_name, --new_len;
14345 if (delete_name[0] == '_')
14346 ++delete_name, --delete_len;
14347 if (delete_name[0] == '_')
14348 ++delete_name, --delete_len;
14349 if (new_len < 4 || delete_len < 5)
14350 return false;
14351
14352 /* The following failures are due to names of user-defined operators
14353 so they're also not considered certain mismatches. */
14354
14355 /* *_len is now just the length after initial underscores. */
14356 if (new_name[0] != 'Z' || new_name[1] != 'n')
14357 return false;
14358 if (delete_name[0] != 'Z' || delete_name[1] != 'd')
14359 return false;
14360
14361 /* The following failures are certain mismatches. */
14362 *pcertain = true;
14363
14364 /* _Znw must match _Zdl, _Zna must match _Zda. */
14365 if ((new_name[2] != 'w' || delete_name[2] != 'l')
14366 && (new_name[2] != 'a' || delete_name[2] != 'a'))
14367 return false;
14368 /* 'j', 'm' and 'y' correspond to size_t. */
14369 if (new_name[3] != 'j' && new_name[3] != 'm' && new_name[3] != 'y')
14370 return false;
14371 if (delete_name[3] != 'P' || delete_name[4] != 'v')
14372 return false;
14373 if (new_len == 4
14374 || (new_len == 18 && !memcmp (new_name + 4, "RKSt9nothrow_t", 14)))
14375 {
14376 /* _ZnXY or _ZnXYRKSt9nothrow_t matches
14377 _ZdXPv, _ZdXPvY and _ZdXPvRKSt9nothrow_t. */
14378 if (delete_len == 5)
14379 return true;
14380 if (delete_len == 6 && delete_name[5] == new_name[3])
14381 return true;
14382 if (delete_len == 19 && !memcmp (delete_name + 5, "RKSt9nothrow_t", 14))
14383 return true;
14384 }
14385 else if ((new_len == 19 && !memcmp (new_name + 4, "St11align_val_t", 15))
14386 || (new_len == 33
14387 && !memcmp (new_name + 4, "St11align_val_tRKSt9nothrow_t", 29)))
14388 {
14389 /* _ZnXYSt11align_val_t or _ZnXYSt11align_val_tRKSt9nothrow_t matches
14390 _ZdXPvSt11align_val_t or _ZdXPvYSt11align_val_t or or
14391 _ZdXPvSt11align_val_tRKSt9nothrow_t. */
14392 if (delete_len == 20 && !memcmp (delete_name + 5, "St11align_val_t", 15))
14393 return true;
14394 if (delete_len == 21
14395 && delete_name[5] == new_name[3]
14396 && !memcmp (delete_name + 6, "St11align_val_t", 15))
14397 return true;
14398 if (delete_len == 34
14399 && !memcmp (delete_name + 5, "St11align_val_tRKSt9nothrow_t", 29))
14400 return true;
14401 }
14402
14403 /* The negative result is conservative. */
14404 *pcertain = false;
14405 return false;
14406 }
14407
14408 /* Return the zero-based number corresponding to the argument being
14409 deallocated if FNDECL is a deallocation function or an out-of-bounds
14410 value if it isn't. */
14411
14412 unsigned
14413 fndecl_dealloc_argno (tree fndecl)
14414 {
14415 /* A call to operator delete isn't recognized as one to a built-in. */
14416 if (DECL_IS_OPERATOR_DELETE_P (fndecl))
14417 {
14418 if (DECL_IS_REPLACEABLE_OPERATOR (fndecl))
14419 return 0;
14420
14421 /* Avoid placement delete that's not been inlined. */
14422 tree fname = DECL_ASSEMBLER_NAME (fndecl);
14423 if (id_equal (fname, "_ZdlPvS_") // ordinary form
14424 || id_equal (fname, "_ZdaPvS_")) // array form
14425 return UINT_MAX;
14426 return 0;
14427 }
14428
14429 /* TODO: Handle user-defined functions with attribute malloc? Handle
14430 known non-built-ins like fopen? */
14431 if (fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
14432 {
14433 switch (DECL_FUNCTION_CODE (fndecl))
14434 {
14435 case BUILT_IN_FREE:
14436 case BUILT_IN_REALLOC:
14437 return 0;
14438 default:
14439 break;
14440 }
14441 return UINT_MAX;
14442 }
14443
14444 tree attrs = DECL_ATTRIBUTES (fndecl);
14445 if (!attrs)
14446 return UINT_MAX;
14447
14448 for (tree atfree = attrs;
14449 (atfree = lookup_attribute ("*dealloc", atfree));
14450 atfree = TREE_CHAIN (atfree))
14451 {
14452 tree alloc = TREE_VALUE (atfree);
14453 if (!alloc)
14454 continue;
14455
14456 tree pos = TREE_CHAIN (alloc);
14457 if (!pos)
14458 return 0;
14459
14460 pos = TREE_VALUE (pos);
14461 return TREE_INT_CST_LOW (pos) - 1;
14462 }
14463
14464 return UINT_MAX;
14465 }
14466
14467 /* If EXPR refers to a character array or pointer declared attribute
14468 nonstring, return a decl for that array or pointer and set *REF
14469 to the referenced enclosing object or pointer. Otherwise return
14470 null. */
14471
14472 tree
14473 get_attr_nonstring_decl (tree expr, tree *ref)
14474 {
14475 tree decl = expr;
14476 tree var = NULL_TREE;
14477 if (TREE_CODE (decl) == SSA_NAME)
14478 {
14479 gimple *def = SSA_NAME_DEF_STMT (decl);
14480
14481 if (is_gimple_assign (def))
14482 {
14483 tree_code code = gimple_assign_rhs_code (def);
14484 if (code == ADDR_EXPR
14485 || code == COMPONENT_REF
14486 || code == VAR_DECL)
14487 decl = gimple_assign_rhs1 (def);
14488 }
14489 else
14490 var = SSA_NAME_VAR (decl);
14491 }
14492
14493 if (TREE_CODE (decl) == ADDR_EXPR)
14494 decl = TREE_OPERAND (decl, 0);
14495
14496 /* To simplify calling code, store the referenced DECL regardless of
14497 the attribute determined below, but avoid storing the SSA_NAME_VAR
14498 obtained above (it's not useful for dataflow purposes). */
14499 if (ref)
14500 *ref = decl;
14501
14502 /* Use the SSA_NAME_VAR that was determined above to see if it's
14503 declared nonstring. Otherwise drill down into the referenced
14504 DECL. */
14505 if (var)
14506 decl = var;
14507 else if (TREE_CODE (decl) == ARRAY_REF)
14508 decl = TREE_OPERAND (decl, 0);
14509 else if (TREE_CODE (decl) == COMPONENT_REF)
14510 decl = TREE_OPERAND (decl, 1);
14511 else if (TREE_CODE (decl) == MEM_REF)
14512 return get_attr_nonstring_decl (TREE_OPERAND (decl, 0), ref);
14513
14514 if (DECL_P (decl)
14515 && lookup_attribute ("nonstring", DECL_ATTRIBUTES (decl)))
14516 return decl;
14517
14518 return NULL_TREE;
14519 }
14520
14521 #if CHECKING_P
14522
14523 namespace selftest {
14524
14525 /* Selftests for tree. */
14526
14527 /* Verify that integer constants are sane. */
14528
14529 static void
14530 test_integer_constants ()
14531 {
14532 ASSERT_TRUE (integer_type_node != NULL);
14533 ASSERT_TRUE (build_int_cst (integer_type_node, 0) != NULL);
14534
14535 tree type = integer_type_node;
14536
14537 tree zero = build_zero_cst (type);
14538 ASSERT_EQ (INTEGER_CST, TREE_CODE (zero));
14539 ASSERT_EQ (type, TREE_TYPE (zero));
14540
14541 tree one = build_int_cst (type, 1);
14542 ASSERT_EQ (INTEGER_CST, TREE_CODE (one));
14543 ASSERT_EQ (type, TREE_TYPE (zero));
14544 }
14545
14546 /* Verify identifiers. */
14547
14548 static void
14549 test_identifiers ()
14550 {
14551 tree identifier = get_identifier ("foo");
14552 ASSERT_EQ (3, IDENTIFIER_LENGTH (identifier));
14553 ASSERT_STREQ ("foo", IDENTIFIER_POINTER (identifier));
14554 }
14555
14556 /* Verify LABEL_DECL. */
14557
14558 static void
14559 test_labels ()
14560 {
14561 tree identifier = get_identifier ("err");
14562 tree label_decl = build_decl (UNKNOWN_LOCATION, LABEL_DECL,
14563 identifier, void_type_node);
14564 ASSERT_EQ (-1, LABEL_DECL_UID (label_decl));
14565 ASSERT_FALSE (FORCED_LABEL (label_decl));
14566 }
14567
14568 /* Return a new VECTOR_CST node whose type is TYPE and whose values
14569 are given by VALS. */
14570
14571 static tree
14572 build_vector (tree type, const vec<tree> &vals MEM_STAT_DECL)
14573 {
14574 gcc_assert (known_eq (vals.length (), TYPE_VECTOR_SUBPARTS (type)));
14575 tree_vector_builder builder (type, vals.length (), 1);
14576 builder.splice (vals);
14577 return builder.build ();
14578 }
14579
14580 /* Check that VECTOR_CST ACTUAL contains the elements in EXPECTED. */
14581
14582 static void
14583 check_vector_cst (const vec<tree> &expected, tree actual)
14584 {
14585 ASSERT_KNOWN_EQ (expected.length (),
14586 TYPE_VECTOR_SUBPARTS (TREE_TYPE (actual)));
14587 for (unsigned int i = 0; i < expected.length (); ++i)
14588 ASSERT_EQ (wi::to_wide (expected[i]),
14589 wi::to_wide (vector_cst_elt (actual, i)));
14590 }
14591
14592 /* Check that VECTOR_CST ACTUAL contains NPATTERNS duplicated elements,
14593 and that its elements match EXPECTED. */
14594
14595 static void
14596 check_vector_cst_duplicate (const vec<tree> &expected, tree actual,
14597 unsigned int npatterns)
14598 {
14599 ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
14600 ASSERT_EQ (1, VECTOR_CST_NELTS_PER_PATTERN (actual));
14601 ASSERT_EQ (npatterns, vector_cst_encoded_nelts (actual));
14602 ASSERT_TRUE (VECTOR_CST_DUPLICATE_P (actual));
14603 ASSERT_FALSE (VECTOR_CST_STEPPED_P (actual));
14604 check_vector_cst (expected, actual);
14605 }
14606
14607 /* Check that VECTOR_CST ACTUAL contains NPATTERNS foreground elements
14608 and NPATTERNS background elements, and that its elements match
14609 EXPECTED. */
14610
14611 static void
14612 check_vector_cst_fill (const vec<tree> &expected, tree actual,
14613 unsigned int npatterns)
14614 {
14615 ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
14616 ASSERT_EQ (2, VECTOR_CST_NELTS_PER_PATTERN (actual));
14617 ASSERT_EQ (2 * npatterns, vector_cst_encoded_nelts (actual));
14618 ASSERT_FALSE (VECTOR_CST_DUPLICATE_P (actual));
14619 ASSERT_FALSE (VECTOR_CST_STEPPED_P (actual));
14620 check_vector_cst (expected, actual);
14621 }
14622
14623 /* Check that VECTOR_CST ACTUAL contains NPATTERNS stepped patterns,
14624 and that its elements match EXPECTED. */
14625
14626 static void
14627 check_vector_cst_stepped (const vec<tree> &expected, tree actual,
14628 unsigned int npatterns)
14629 {
14630 ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
14631 ASSERT_EQ (3, VECTOR_CST_NELTS_PER_PATTERN (actual));
14632 ASSERT_EQ (3 * npatterns, vector_cst_encoded_nelts (actual));
14633 ASSERT_FALSE (VECTOR_CST_DUPLICATE_P (actual));
14634 ASSERT_TRUE (VECTOR_CST_STEPPED_P (actual));
14635 check_vector_cst (expected, actual);
14636 }
14637
14638 /* Test the creation of VECTOR_CSTs. */
14639
14640 static void
14641 test_vector_cst_patterns (ALONE_CXX_MEM_STAT_INFO)
14642 {
14643 auto_vec<tree, 8> elements (8);
14644 elements.quick_grow (8);
14645 tree element_type = build_nonstandard_integer_type (16, true);
14646 tree vector_type = build_vector_type (element_type, 8);
14647
14648 /* Test a simple linear series with a base of 0 and a step of 1:
14649 { 0, 1, 2, 3, 4, 5, 6, 7 }. */
14650 for (unsigned int i = 0; i < 8; ++i)
14651 elements[i] = build_int_cst (element_type, i);
14652 tree vector = build_vector (vector_type, elements PASS_MEM_STAT);
14653 check_vector_cst_stepped (elements, vector, 1);
14654
14655 /* Try the same with the first element replaced by 100:
14656 { 100, 1, 2, 3, 4, 5, 6, 7 }. */
14657 elements[0] = build_int_cst (element_type, 100);
14658 vector = build_vector (vector_type, elements PASS_MEM_STAT);
14659 check_vector_cst_stepped (elements, vector, 1);
14660
14661 /* Try a series that wraps around.
14662 { 100, 65531, 65532, 65533, 65534, 65535, 0, 1 }. */
14663 for (unsigned int i = 1; i < 8; ++i)
14664 elements[i] = build_int_cst (element_type, (65530 + i) & 0xffff);
14665 vector = build_vector (vector_type, elements PASS_MEM_STAT);
14666 check_vector_cst_stepped (elements, vector, 1);
14667
14668 /* Try a downward series:
14669 { 100, 79, 78, 77, 76, 75, 75, 73 }. */
14670 for (unsigned int i = 1; i < 8; ++i)
14671 elements[i] = build_int_cst (element_type, 80 - i);
14672 vector = build_vector (vector_type, elements PASS_MEM_STAT);
14673 check_vector_cst_stepped (elements, vector, 1);
14674
14675 /* Try two interleaved series with different bases and steps:
14676 { 100, 53, 66, 206, 62, 212, 58, 218 }. */
14677 elements[1] = build_int_cst (element_type, 53);
14678 for (unsigned int i = 2; i < 8; i += 2)
14679 {
14680 elements[i] = build_int_cst (element_type, 70 - i * 2);
14681 elements[i + 1] = build_int_cst (element_type, 200 + i * 3);
14682 }
14683 vector = build_vector (vector_type, elements PASS_MEM_STAT);
14684 check_vector_cst_stepped (elements, vector, 2);
14685
14686 /* Try a duplicated value:
14687 { 100, 100, 100, 100, 100, 100, 100, 100 }. */
14688 for (unsigned int i = 1; i < 8; ++i)
14689 elements[i] = elements[0];
14690 vector = build_vector (vector_type, elements PASS_MEM_STAT);
14691 check_vector_cst_duplicate (elements, vector, 1);
14692
14693 /* Try an interleaved duplicated value:
14694 { 100, 55, 100, 55, 100, 55, 100, 55 }. */
14695 elements[1] = build_int_cst (element_type, 55);
14696 for (unsigned int i = 2; i < 8; ++i)
14697 elements[i] = elements[i - 2];
14698 vector = build_vector (vector_type, elements PASS_MEM_STAT);
14699 check_vector_cst_duplicate (elements, vector, 2);
14700
14701 /* Try a duplicated value with 2 exceptions
14702 { 41, 97, 100, 55, 100, 55, 100, 55 }. */
14703 elements[0] = build_int_cst (element_type, 41);
14704 elements[1] = build_int_cst (element_type, 97);
14705 vector = build_vector (vector_type, elements PASS_MEM_STAT);
14706 check_vector_cst_fill (elements, vector, 2);
14707
14708 /* Try with and without a step
14709 { 41, 97, 100, 21, 100, 35, 100, 49 }. */
14710 for (unsigned int i = 3; i < 8; i += 2)
14711 elements[i] = build_int_cst (element_type, i * 7);
14712 vector = build_vector (vector_type, elements PASS_MEM_STAT);
14713 check_vector_cst_stepped (elements, vector, 2);
14714
14715 /* Try a fully-general constant:
14716 { 41, 97, 100, 21, 100, 9990, 100, 49 }. */
14717 elements[5] = build_int_cst (element_type, 9990);
14718 vector = build_vector (vector_type, elements PASS_MEM_STAT);
14719 check_vector_cst_fill (elements, vector, 4);
14720 }
14721
14722 /* Verify that STRIP_NOPS (NODE) is EXPECTED.
14723 Helper function for test_location_wrappers, to deal with STRIP_NOPS
14724 modifying its argument in-place. */
14725
14726 static void
14727 check_strip_nops (tree node, tree expected)
14728 {
14729 STRIP_NOPS (node);
14730 ASSERT_EQ (expected, node);
14731 }
14732
14733 /* Verify location wrappers. */
14734
14735 static void
14736 test_location_wrappers ()
14737 {
14738 location_t loc = BUILTINS_LOCATION;
14739
14740 ASSERT_EQ (NULL_TREE, maybe_wrap_with_location (NULL_TREE, loc));
14741
14742 /* Wrapping a constant. */
14743 tree int_cst = build_int_cst (integer_type_node, 42);
14744 ASSERT_FALSE (CAN_HAVE_LOCATION_P (int_cst));
14745 ASSERT_FALSE (location_wrapper_p (int_cst));
14746
14747 tree wrapped_int_cst = maybe_wrap_with_location (int_cst, loc);
14748 ASSERT_TRUE (location_wrapper_p (wrapped_int_cst));
14749 ASSERT_EQ (loc, EXPR_LOCATION (wrapped_int_cst));
14750 ASSERT_EQ (int_cst, tree_strip_any_location_wrapper (wrapped_int_cst));
14751
14752 /* We shouldn't add wrapper nodes for UNKNOWN_LOCATION. */
14753 ASSERT_EQ (int_cst, maybe_wrap_with_location (int_cst, UNKNOWN_LOCATION));
14754
14755 /* We shouldn't add wrapper nodes for nodes that CAN_HAVE_LOCATION_P. */
14756 tree cast = build1 (NOP_EXPR, char_type_node, int_cst);
14757 ASSERT_TRUE (CAN_HAVE_LOCATION_P (cast));
14758 ASSERT_EQ (cast, maybe_wrap_with_location (cast, loc));
14759
14760 /* Wrapping a STRING_CST. */
14761 tree string_cst = build_string (4, "foo");
14762 ASSERT_FALSE (CAN_HAVE_LOCATION_P (string_cst));
14763 ASSERT_FALSE (location_wrapper_p (string_cst));
14764
14765 tree wrapped_string_cst = maybe_wrap_with_location (string_cst, loc);
14766 ASSERT_TRUE (location_wrapper_p (wrapped_string_cst));
14767 ASSERT_EQ (VIEW_CONVERT_EXPR, TREE_CODE (wrapped_string_cst));
14768 ASSERT_EQ (loc, EXPR_LOCATION (wrapped_string_cst));
14769 ASSERT_EQ (string_cst, tree_strip_any_location_wrapper (wrapped_string_cst));
14770
14771
14772 /* Wrapping a variable. */
14773 tree int_var = build_decl (UNKNOWN_LOCATION, VAR_DECL,
14774 get_identifier ("some_int_var"),
14775 integer_type_node);
14776 ASSERT_FALSE (CAN_HAVE_LOCATION_P (int_var));
14777 ASSERT_FALSE (location_wrapper_p (int_var));
14778
14779 tree wrapped_int_var = maybe_wrap_with_location (int_var, loc);
14780 ASSERT_TRUE (location_wrapper_p (wrapped_int_var));
14781 ASSERT_EQ (loc, EXPR_LOCATION (wrapped_int_var));
14782 ASSERT_EQ (int_var, tree_strip_any_location_wrapper (wrapped_int_var));
14783
14784 /* Verify that "reinterpret_cast<int>(some_int_var)" is not a location
14785 wrapper. */
14786 tree r_cast = build1 (NON_LVALUE_EXPR, integer_type_node, int_var);
14787 ASSERT_FALSE (location_wrapper_p (r_cast));
14788 ASSERT_EQ (r_cast, tree_strip_any_location_wrapper (r_cast));
14789
14790 /* Verify that STRIP_NOPS removes wrappers. */
14791 check_strip_nops (wrapped_int_cst, int_cst);
14792 check_strip_nops (wrapped_string_cst, string_cst);
14793 check_strip_nops (wrapped_int_var, int_var);
14794 }
14795
14796 /* Test various tree predicates. Verify that location wrappers don't
14797 affect the results. */
14798
14799 static void
14800 test_predicates ()
14801 {
14802 /* Build various constants and wrappers around them. */
14803
14804 location_t loc = BUILTINS_LOCATION;
14805
14806 tree i_0 = build_int_cst (integer_type_node, 0);
14807 tree wr_i_0 = maybe_wrap_with_location (i_0, loc);
14808
14809 tree i_1 = build_int_cst (integer_type_node, 1);
14810 tree wr_i_1 = maybe_wrap_with_location (i_1, loc);
14811
14812 tree i_m1 = build_int_cst (integer_type_node, -1);
14813 tree wr_i_m1 = maybe_wrap_with_location (i_m1, loc);
14814
14815 tree f_0 = build_real_from_int_cst (float_type_node, i_0);
14816 tree wr_f_0 = maybe_wrap_with_location (f_0, loc);
14817 tree f_1 = build_real_from_int_cst (float_type_node, i_1);
14818 tree wr_f_1 = maybe_wrap_with_location (f_1, loc);
14819 tree f_m1 = build_real_from_int_cst (float_type_node, i_m1);
14820 tree wr_f_m1 = maybe_wrap_with_location (f_m1, loc);
14821
14822 tree c_i_0 = build_complex (NULL_TREE, i_0, i_0);
14823 tree c_i_1 = build_complex (NULL_TREE, i_1, i_0);
14824 tree c_i_m1 = build_complex (NULL_TREE, i_m1, i_0);
14825
14826 tree c_f_0 = build_complex (NULL_TREE, f_0, f_0);
14827 tree c_f_1 = build_complex (NULL_TREE, f_1, f_0);
14828 tree c_f_m1 = build_complex (NULL_TREE, f_m1, f_0);
14829
14830 /* TODO: vector constants. */
14831
14832 /* Test integer_onep. */
14833 ASSERT_FALSE (integer_onep (i_0));
14834 ASSERT_FALSE (integer_onep (wr_i_0));
14835 ASSERT_TRUE (integer_onep (i_1));
14836 ASSERT_TRUE (integer_onep (wr_i_1));
14837 ASSERT_FALSE (integer_onep (i_m1));
14838 ASSERT_FALSE (integer_onep (wr_i_m1));
14839 ASSERT_FALSE (integer_onep (f_0));
14840 ASSERT_FALSE (integer_onep (wr_f_0));
14841 ASSERT_FALSE (integer_onep (f_1));
14842 ASSERT_FALSE (integer_onep (wr_f_1));
14843 ASSERT_FALSE (integer_onep (f_m1));
14844 ASSERT_FALSE (integer_onep (wr_f_m1));
14845 ASSERT_FALSE (integer_onep (c_i_0));
14846 ASSERT_TRUE (integer_onep (c_i_1));
14847 ASSERT_FALSE (integer_onep (c_i_m1));
14848 ASSERT_FALSE (integer_onep (c_f_0));
14849 ASSERT_FALSE (integer_onep (c_f_1));
14850 ASSERT_FALSE (integer_onep (c_f_m1));
14851
14852 /* Test integer_zerop. */
14853 ASSERT_TRUE (integer_zerop (i_0));
14854 ASSERT_TRUE (integer_zerop (wr_i_0));
14855 ASSERT_FALSE (integer_zerop (i_1));
14856 ASSERT_FALSE (integer_zerop (wr_i_1));
14857 ASSERT_FALSE (integer_zerop (i_m1));
14858 ASSERT_FALSE (integer_zerop (wr_i_m1));
14859 ASSERT_FALSE (integer_zerop (f_0));
14860 ASSERT_FALSE (integer_zerop (wr_f_0));
14861 ASSERT_FALSE (integer_zerop (f_1));
14862 ASSERT_FALSE (integer_zerop (wr_f_1));
14863 ASSERT_FALSE (integer_zerop (f_m1));
14864 ASSERT_FALSE (integer_zerop (wr_f_m1));
14865 ASSERT_TRUE (integer_zerop (c_i_0));
14866 ASSERT_FALSE (integer_zerop (c_i_1));
14867 ASSERT_FALSE (integer_zerop (c_i_m1));
14868 ASSERT_FALSE (integer_zerop (c_f_0));
14869 ASSERT_FALSE (integer_zerop (c_f_1));
14870 ASSERT_FALSE (integer_zerop (c_f_m1));
14871
14872 /* Test integer_all_onesp. */
14873 ASSERT_FALSE (integer_all_onesp (i_0));
14874 ASSERT_FALSE (integer_all_onesp (wr_i_0));
14875 ASSERT_FALSE (integer_all_onesp (i_1));
14876 ASSERT_FALSE (integer_all_onesp (wr_i_1));
14877 ASSERT_TRUE (integer_all_onesp (i_m1));
14878 ASSERT_TRUE (integer_all_onesp (wr_i_m1));
14879 ASSERT_FALSE (integer_all_onesp (f_0));
14880 ASSERT_FALSE (integer_all_onesp (wr_f_0));
14881 ASSERT_FALSE (integer_all_onesp (f_1));
14882 ASSERT_FALSE (integer_all_onesp (wr_f_1));
14883 ASSERT_FALSE (integer_all_onesp (f_m1));
14884 ASSERT_FALSE (integer_all_onesp (wr_f_m1));
14885 ASSERT_FALSE (integer_all_onesp (c_i_0));
14886 ASSERT_FALSE (integer_all_onesp (c_i_1));
14887 ASSERT_FALSE (integer_all_onesp (c_i_m1));
14888 ASSERT_FALSE (integer_all_onesp (c_f_0));
14889 ASSERT_FALSE (integer_all_onesp (c_f_1));
14890 ASSERT_FALSE (integer_all_onesp (c_f_m1));
14891
14892 /* Test integer_minus_onep. */
14893 ASSERT_FALSE (integer_minus_onep (i_0));
14894 ASSERT_FALSE (integer_minus_onep (wr_i_0));
14895 ASSERT_FALSE (integer_minus_onep (i_1));
14896 ASSERT_FALSE (integer_minus_onep (wr_i_1));
14897 ASSERT_TRUE (integer_minus_onep (i_m1));
14898 ASSERT_TRUE (integer_minus_onep (wr_i_m1));
14899 ASSERT_FALSE (integer_minus_onep (f_0));
14900 ASSERT_FALSE (integer_minus_onep (wr_f_0));
14901 ASSERT_FALSE (integer_minus_onep (f_1));
14902 ASSERT_FALSE (integer_minus_onep (wr_f_1));
14903 ASSERT_FALSE (integer_minus_onep (f_m1));
14904 ASSERT_FALSE (integer_minus_onep (wr_f_m1));
14905 ASSERT_FALSE (integer_minus_onep (c_i_0));
14906 ASSERT_FALSE (integer_minus_onep (c_i_1));
14907 ASSERT_TRUE (integer_minus_onep (c_i_m1));
14908 ASSERT_FALSE (integer_minus_onep (c_f_0));
14909 ASSERT_FALSE (integer_minus_onep (c_f_1));
14910 ASSERT_FALSE (integer_minus_onep (c_f_m1));
14911
14912 /* Test integer_each_onep. */
14913 ASSERT_FALSE (integer_each_onep (i_0));
14914 ASSERT_FALSE (integer_each_onep (wr_i_0));
14915 ASSERT_TRUE (integer_each_onep (i_1));
14916 ASSERT_TRUE (integer_each_onep (wr_i_1));
14917 ASSERT_FALSE (integer_each_onep (i_m1));
14918 ASSERT_FALSE (integer_each_onep (wr_i_m1));
14919 ASSERT_FALSE (integer_each_onep (f_0));
14920 ASSERT_FALSE (integer_each_onep (wr_f_0));
14921 ASSERT_FALSE (integer_each_onep (f_1));
14922 ASSERT_FALSE (integer_each_onep (wr_f_1));
14923 ASSERT_FALSE (integer_each_onep (f_m1));
14924 ASSERT_FALSE (integer_each_onep (wr_f_m1));
14925 ASSERT_FALSE (integer_each_onep (c_i_0));
14926 ASSERT_FALSE (integer_each_onep (c_i_1));
14927 ASSERT_FALSE (integer_each_onep (c_i_m1));
14928 ASSERT_FALSE (integer_each_onep (c_f_0));
14929 ASSERT_FALSE (integer_each_onep (c_f_1));
14930 ASSERT_FALSE (integer_each_onep (c_f_m1));
14931
14932 /* Test integer_truep. */
14933 ASSERT_FALSE (integer_truep (i_0));
14934 ASSERT_FALSE (integer_truep (wr_i_0));
14935 ASSERT_TRUE (integer_truep (i_1));
14936 ASSERT_TRUE (integer_truep (wr_i_1));
14937 ASSERT_FALSE (integer_truep (i_m1));
14938 ASSERT_FALSE (integer_truep (wr_i_m1));
14939 ASSERT_FALSE (integer_truep (f_0));
14940 ASSERT_FALSE (integer_truep (wr_f_0));
14941 ASSERT_FALSE (integer_truep (f_1));
14942 ASSERT_FALSE (integer_truep (wr_f_1));
14943 ASSERT_FALSE (integer_truep (f_m1));
14944 ASSERT_FALSE (integer_truep (wr_f_m1));
14945 ASSERT_FALSE (integer_truep (c_i_0));
14946 ASSERT_TRUE (integer_truep (c_i_1));
14947 ASSERT_FALSE (integer_truep (c_i_m1));
14948 ASSERT_FALSE (integer_truep (c_f_0));
14949 ASSERT_FALSE (integer_truep (c_f_1));
14950 ASSERT_FALSE (integer_truep (c_f_m1));
14951
14952 /* Test integer_nonzerop. */
14953 ASSERT_FALSE (integer_nonzerop (i_0));
14954 ASSERT_FALSE (integer_nonzerop (wr_i_0));
14955 ASSERT_TRUE (integer_nonzerop (i_1));
14956 ASSERT_TRUE (integer_nonzerop (wr_i_1));
14957 ASSERT_TRUE (integer_nonzerop (i_m1));
14958 ASSERT_TRUE (integer_nonzerop (wr_i_m1));
14959 ASSERT_FALSE (integer_nonzerop (f_0));
14960 ASSERT_FALSE (integer_nonzerop (wr_f_0));
14961 ASSERT_FALSE (integer_nonzerop (f_1));
14962 ASSERT_FALSE (integer_nonzerop (wr_f_1));
14963 ASSERT_FALSE (integer_nonzerop (f_m1));
14964 ASSERT_FALSE (integer_nonzerop (wr_f_m1));
14965 ASSERT_FALSE (integer_nonzerop (c_i_0));
14966 ASSERT_TRUE (integer_nonzerop (c_i_1));
14967 ASSERT_TRUE (integer_nonzerop (c_i_m1));
14968 ASSERT_FALSE (integer_nonzerop (c_f_0));
14969 ASSERT_FALSE (integer_nonzerop (c_f_1));
14970 ASSERT_FALSE (integer_nonzerop (c_f_m1));
14971
14972 /* Test real_zerop. */
14973 ASSERT_FALSE (real_zerop (i_0));
14974 ASSERT_FALSE (real_zerop (wr_i_0));
14975 ASSERT_FALSE (real_zerop (i_1));
14976 ASSERT_FALSE (real_zerop (wr_i_1));
14977 ASSERT_FALSE (real_zerop (i_m1));
14978 ASSERT_FALSE (real_zerop (wr_i_m1));
14979 ASSERT_TRUE (real_zerop (f_0));
14980 ASSERT_TRUE (real_zerop (wr_f_0));
14981 ASSERT_FALSE (real_zerop (f_1));
14982 ASSERT_FALSE (real_zerop (wr_f_1));
14983 ASSERT_FALSE (real_zerop (f_m1));
14984 ASSERT_FALSE (real_zerop (wr_f_m1));
14985 ASSERT_FALSE (real_zerop (c_i_0));
14986 ASSERT_FALSE (real_zerop (c_i_1));
14987 ASSERT_FALSE (real_zerop (c_i_m1));
14988 ASSERT_TRUE (real_zerop (c_f_0));
14989 ASSERT_FALSE (real_zerop (c_f_1));
14990 ASSERT_FALSE (real_zerop (c_f_m1));
14991
14992 /* Test real_onep. */
14993 ASSERT_FALSE (real_onep (i_0));
14994 ASSERT_FALSE (real_onep (wr_i_0));
14995 ASSERT_FALSE (real_onep (i_1));
14996 ASSERT_FALSE (real_onep (wr_i_1));
14997 ASSERT_FALSE (real_onep (i_m1));
14998 ASSERT_FALSE (real_onep (wr_i_m1));
14999 ASSERT_FALSE (real_onep (f_0));
15000 ASSERT_FALSE (real_onep (wr_f_0));
15001 ASSERT_TRUE (real_onep (f_1));
15002 ASSERT_TRUE (real_onep (wr_f_1));
15003 ASSERT_FALSE (real_onep (f_m1));
15004 ASSERT_FALSE (real_onep (wr_f_m1));
15005 ASSERT_FALSE (real_onep (c_i_0));
15006 ASSERT_FALSE (real_onep (c_i_1));
15007 ASSERT_FALSE (real_onep (c_i_m1));
15008 ASSERT_FALSE (real_onep (c_f_0));
15009 ASSERT_TRUE (real_onep (c_f_1));
15010 ASSERT_FALSE (real_onep (c_f_m1));
15011
15012 /* Test real_minus_onep. */
15013 ASSERT_FALSE (real_minus_onep (i_0));
15014 ASSERT_FALSE (real_minus_onep (wr_i_0));
15015 ASSERT_FALSE (real_minus_onep (i_1));
15016 ASSERT_FALSE (real_minus_onep (wr_i_1));
15017 ASSERT_FALSE (real_minus_onep (i_m1));
15018 ASSERT_FALSE (real_minus_onep (wr_i_m1));
15019 ASSERT_FALSE (real_minus_onep (f_0));
15020 ASSERT_FALSE (real_minus_onep (wr_f_0));
15021 ASSERT_FALSE (real_minus_onep (f_1));
15022 ASSERT_FALSE (real_minus_onep (wr_f_1));
15023 ASSERT_TRUE (real_minus_onep (f_m1));
15024 ASSERT_TRUE (real_minus_onep (wr_f_m1));
15025 ASSERT_FALSE (real_minus_onep (c_i_0));
15026 ASSERT_FALSE (real_minus_onep (c_i_1));
15027 ASSERT_FALSE (real_minus_onep (c_i_m1));
15028 ASSERT_FALSE (real_minus_onep (c_f_0));
15029 ASSERT_FALSE (real_minus_onep (c_f_1));
15030 ASSERT_TRUE (real_minus_onep (c_f_m1));
15031
15032 /* Test zerop. */
15033 ASSERT_TRUE (zerop (i_0));
15034 ASSERT_TRUE (zerop (wr_i_0));
15035 ASSERT_FALSE (zerop (i_1));
15036 ASSERT_FALSE (zerop (wr_i_1));
15037 ASSERT_FALSE (zerop (i_m1));
15038 ASSERT_FALSE (zerop (wr_i_m1));
15039 ASSERT_TRUE (zerop (f_0));
15040 ASSERT_TRUE (zerop (wr_f_0));
15041 ASSERT_FALSE (zerop (f_1));
15042 ASSERT_FALSE (zerop (wr_f_1));
15043 ASSERT_FALSE (zerop (f_m1));
15044 ASSERT_FALSE (zerop (wr_f_m1));
15045 ASSERT_TRUE (zerop (c_i_0));
15046 ASSERT_FALSE (zerop (c_i_1));
15047 ASSERT_FALSE (zerop (c_i_m1));
15048 ASSERT_TRUE (zerop (c_f_0));
15049 ASSERT_FALSE (zerop (c_f_1));
15050 ASSERT_FALSE (zerop (c_f_m1));
15051
15052 /* Test tree_expr_nonnegative_p. */
15053 ASSERT_TRUE (tree_expr_nonnegative_p (i_0));
15054 ASSERT_TRUE (tree_expr_nonnegative_p (wr_i_0));
15055 ASSERT_TRUE (tree_expr_nonnegative_p (i_1));
15056 ASSERT_TRUE (tree_expr_nonnegative_p (wr_i_1));
15057 ASSERT_FALSE (tree_expr_nonnegative_p (i_m1));
15058 ASSERT_FALSE (tree_expr_nonnegative_p (wr_i_m1));
15059 ASSERT_TRUE (tree_expr_nonnegative_p (f_0));
15060 ASSERT_TRUE (tree_expr_nonnegative_p (wr_f_0));
15061 ASSERT_TRUE (tree_expr_nonnegative_p (f_1));
15062 ASSERT_TRUE (tree_expr_nonnegative_p (wr_f_1));
15063 ASSERT_FALSE (tree_expr_nonnegative_p (f_m1));
15064 ASSERT_FALSE (tree_expr_nonnegative_p (wr_f_m1));
15065 ASSERT_FALSE (tree_expr_nonnegative_p (c_i_0));
15066 ASSERT_FALSE (tree_expr_nonnegative_p (c_i_1));
15067 ASSERT_FALSE (tree_expr_nonnegative_p (c_i_m1));
15068 ASSERT_FALSE (tree_expr_nonnegative_p (c_f_0));
15069 ASSERT_FALSE (tree_expr_nonnegative_p (c_f_1));
15070 ASSERT_FALSE (tree_expr_nonnegative_p (c_f_m1));
15071
15072 /* Test tree_expr_nonzero_p. */
15073 ASSERT_FALSE (tree_expr_nonzero_p (i_0));
15074 ASSERT_FALSE (tree_expr_nonzero_p (wr_i_0));
15075 ASSERT_TRUE (tree_expr_nonzero_p (i_1));
15076 ASSERT_TRUE (tree_expr_nonzero_p (wr_i_1));
15077 ASSERT_TRUE (tree_expr_nonzero_p (i_m1));
15078 ASSERT_TRUE (tree_expr_nonzero_p (wr_i_m1));
15079
15080 /* Test integer_valued_real_p. */
15081 ASSERT_FALSE (integer_valued_real_p (i_0));
15082 ASSERT_TRUE (integer_valued_real_p (f_0));
15083 ASSERT_TRUE (integer_valued_real_p (wr_f_0));
15084 ASSERT_TRUE (integer_valued_real_p (f_1));
15085 ASSERT_TRUE (integer_valued_real_p (wr_f_1));
15086
15087 /* Test integer_pow2p. */
15088 ASSERT_FALSE (integer_pow2p (i_0));
15089 ASSERT_TRUE (integer_pow2p (i_1));
15090 ASSERT_TRUE (integer_pow2p (wr_i_1));
15091
15092 /* Test uniform_integer_cst_p. */
15093 ASSERT_TRUE (uniform_integer_cst_p (i_0));
15094 ASSERT_TRUE (uniform_integer_cst_p (wr_i_0));
15095 ASSERT_TRUE (uniform_integer_cst_p (i_1));
15096 ASSERT_TRUE (uniform_integer_cst_p (wr_i_1));
15097 ASSERT_TRUE (uniform_integer_cst_p (i_m1));
15098 ASSERT_TRUE (uniform_integer_cst_p (wr_i_m1));
15099 ASSERT_FALSE (uniform_integer_cst_p (f_0));
15100 ASSERT_FALSE (uniform_integer_cst_p (wr_f_0));
15101 ASSERT_FALSE (uniform_integer_cst_p (f_1));
15102 ASSERT_FALSE (uniform_integer_cst_p (wr_f_1));
15103 ASSERT_FALSE (uniform_integer_cst_p (f_m1));
15104 ASSERT_FALSE (uniform_integer_cst_p (wr_f_m1));
15105 ASSERT_FALSE (uniform_integer_cst_p (c_i_0));
15106 ASSERT_FALSE (uniform_integer_cst_p (c_i_1));
15107 ASSERT_FALSE (uniform_integer_cst_p (c_i_m1));
15108 ASSERT_FALSE (uniform_integer_cst_p (c_f_0));
15109 ASSERT_FALSE (uniform_integer_cst_p (c_f_1));
15110 ASSERT_FALSE (uniform_integer_cst_p (c_f_m1));
15111 }
15112
15113 /* Check that string escaping works correctly. */
15114
15115 static void
15116 test_escaped_strings (void)
15117 {
15118 int saved_cutoff;
15119 escaped_string msg;
15120
15121 msg.escape (NULL);
15122 /* ASSERT_STREQ does not accept NULL as a valid test
15123 result, so we have to use ASSERT_EQ instead. */
15124 ASSERT_EQ (NULL, (const char *) msg);
15125
15126 msg.escape ("");
15127 ASSERT_STREQ ("", (const char *) msg);
15128
15129 msg.escape ("foobar");
15130 ASSERT_STREQ ("foobar", (const char *) msg);
15131
15132 /* Ensure that we have -fmessage-length set to 0. */
15133 saved_cutoff = pp_line_cutoff (global_dc->printer);
15134 pp_line_cutoff (global_dc->printer) = 0;
15135
15136 msg.escape ("foo\nbar");
15137 ASSERT_STREQ ("foo\\nbar", (const char *) msg);
15138
15139 msg.escape ("\a\b\f\n\r\t\v");
15140 ASSERT_STREQ ("\\a\\b\\f\\n\\r\\t\\v", (const char *) msg);
15141
15142 /* Now repeat the tests with -fmessage-length set to 5. */
15143 pp_line_cutoff (global_dc->printer) = 5;
15144
15145 /* Note that the newline is not translated into an escape. */
15146 msg.escape ("foo\nbar");
15147 ASSERT_STREQ ("foo\nbar", (const char *) msg);
15148
15149 msg.escape ("\a\b\f\n\r\t\v");
15150 ASSERT_STREQ ("\\a\\b\\f\n\\r\\t\\v", (const char *) msg);
15151
15152 /* Restore the original message length setting. */
15153 pp_line_cutoff (global_dc->printer) = saved_cutoff;
15154 }
15155
15156 /* Run all of the selftests within this file. */
15157
15158 void
15159 tree_c_tests ()
15160 {
15161 test_integer_constants ();
15162 test_identifiers ();
15163 test_labels ();
15164 test_vector_cst_patterns ();
15165 test_location_wrappers ();
15166 test_predicates ();
15167 test_escaped_strings ();
15168 }
15169
15170 } // namespace selftest
15171
15172 #endif /* CHECKING_P */
15173
15174 #include "gt-tree.h"