]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree.c
Come up with startswith function.
[thirdparty/gcc.git] / gcc / tree.c
1 /* Language-independent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987-2021 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* This file contains the low level primitives for operating on tree nodes,
21 including allocation, list operations, interning of identifiers,
22 construction of data type nodes and statement nodes,
23 and construction of type conversion nodes. It also contains
24 tables index by tree code that describe how to take apart
25 nodes of that code.
26
27 It is intended to be language-independent but can occasionally
28 calls language-dependent routines. */
29
30 #include "config.h"
31 #include "system.h"
32 #include "coretypes.h"
33 #include "backend.h"
34 #include "target.h"
35 #include "tree.h"
36 #include "gimple.h"
37 #include "tree-pass.h"
38 #include "ssa.h"
39 #include "cgraph.h"
40 #include "diagnostic.h"
41 #include "flags.h"
42 #include "alias.h"
43 #include "fold-const.h"
44 #include "stor-layout.h"
45 #include "calls.h"
46 #include "attribs.h"
47 #include "toplev.h" /* get_random_seed */
48 #include "output.h"
49 #include "common/common-target.h"
50 #include "langhooks.h"
51 #include "tree-inline.h"
52 #include "tree-iterator.h"
53 #include "internal-fn.h"
54 #include "gimple-iterator.h"
55 #include "gimplify.h"
56 #include "tree-dfa.h"
57 #include "langhooks-def.h"
58 #include "tree-diagnostic.h"
59 #include "except.h"
60 #include "builtins.h"
61 #include "print-tree.h"
62 #include "ipa-utils.h"
63 #include "selftest.h"
64 #include "stringpool.h"
65 #include "attribs.h"
66 #include "rtl.h"
67 #include "regs.h"
68 #include "tree-vector-builder.h"
69 #include "gimple-fold.h"
70 #include "escaped_string.h"
71
72 /* Tree code classes. */
73
74 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) TYPE,
75 #define END_OF_BASE_TREE_CODES tcc_exceptional,
76
77 const enum tree_code_class tree_code_type[] = {
78 #include "all-tree.def"
79 };
80
81 #undef DEFTREECODE
82 #undef END_OF_BASE_TREE_CODES
83
84 /* Table indexed by tree code giving number of expression
85 operands beyond the fixed part of the node structure.
86 Not used for types or decls. */
87
88 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) LENGTH,
89 #define END_OF_BASE_TREE_CODES 0,
90
91 const unsigned char tree_code_length[] = {
92 #include "all-tree.def"
93 };
94
95 #undef DEFTREECODE
96 #undef END_OF_BASE_TREE_CODES
97
98 /* Names of tree components.
99 Used for printing out the tree and error messages. */
100 #define DEFTREECODE(SYM, NAME, TYPE, LEN) NAME,
101 #define END_OF_BASE_TREE_CODES "@dummy",
102
103 static const char *const tree_code_name[] = {
104 #include "all-tree.def"
105 };
106
107 #undef DEFTREECODE
108 #undef END_OF_BASE_TREE_CODES
109
110 /* Each tree code class has an associated string representation.
111 These must correspond to the tree_code_class entries. */
112
113 const char *const tree_code_class_strings[] =
114 {
115 "exceptional",
116 "constant",
117 "type",
118 "declaration",
119 "reference",
120 "comparison",
121 "unary",
122 "binary",
123 "statement",
124 "vl_exp",
125 "expression"
126 };
127
128 /* obstack.[ch] explicitly declined to prototype this. */
129 extern int _obstack_allocated_p (struct obstack *h, void *obj);
130
131 /* Statistics-gathering stuff. */
132
133 static uint64_t tree_code_counts[MAX_TREE_CODES];
134 uint64_t tree_node_counts[(int) all_kinds];
135 uint64_t tree_node_sizes[(int) all_kinds];
136
137 /* Keep in sync with tree.h:enum tree_node_kind. */
138 static const char * const tree_node_kind_names[] = {
139 "decls",
140 "types",
141 "blocks",
142 "stmts",
143 "refs",
144 "exprs",
145 "constants",
146 "identifiers",
147 "vecs",
148 "binfos",
149 "ssa names",
150 "constructors",
151 "random kinds",
152 "lang_decl kinds",
153 "lang_type kinds",
154 "omp clauses",
155 };
156
157 /* Unique id for next decl created. */
158 static GTY(()) int next_decl_uid;
159 /* Unique id for next type created. */
160 static GTY(()) unsigned next_type_uid = 1;
161 /* Unique id for next debug decl created. Use negative numbers,
162 to catch erroneous uses. */
163 static GTY(()) int next_debug_decl_uid;
164
165 /* Since we cannot rehash a type after it is in the table, we have to
166 keep the hash code. */
167
168 struct GTY((for_user)) type_hash {
169 unsigned long hash;
170 tree type;
171 };
172
173 /* Initial size of the hash table (rounded to next prime). */
174 #define TYPE_HASH_INITIAL_SIZE 1000
175
176 struct type_cache_hasher : ggc_cache_ptr_hash<type_hash>
177 {
178 static hashval_t hash (type_hash *t) { return t->hash; }
179 static bool equal (type_hash *a, type_hash *b);
180
181 static int
182 keep_cache_entry (type_hash *&t)
183 {
184 return ggc_marked_p (t->type);
185 }
186 };
187
188 /* Now here is the hash table. When recording a type, it is added to
189 the slot whose index is the hash code. Note that the hash table is
190 used for several kinds of types (function types, array types and
191 array index range types, for now). While all these live in the
192 same table, they are completely independent, and the hash code is
193 computed differently for each of these. */
194
195 static GTY ((cache)) hash_table<type_cache_hasher> *type_hash_table;
196
197 /* Hash table and temporary node for larger integer const values. */
198 static GTY (()) tree int_cst_node;
199
200 struct int_cst_hasher : ggc_cache_ptr_hash<tree_node>
201 {
202 static hashval_t hash (tree t);
203 static bool equal (tree x, tree y);
204 };
205
206 static GTY ((cache)) hash_table<int_cst_hasher> *int_cst_hash_table;
207
208 /* Class and variable for making sure that there is a single POLY_INT_CST
209 for a given value. */
210 struct poly_int_cst_hasher : ggc_cache_ptr_hash<tree_node>
211 {
212 typedef std::pair<tree, const poly_wide_int *> compare_type;
213 static hashval_t hash (tree t);
214 static bool equal (tree x, const compare_type &y);
215 };
216
217 static GTY ((cache)) hash_table<poly_int_cst_hasher> *poly_int_cst_hash_table;
218
219 /* Hash table for optimization flags and target option flags. Use the same
220 hash table for both sets of options. Nodes for building the current
221 optimization and target option nodes. The assumption is most of the time
222 the options created will already be in the hash table, so we avoid
223 allocating and freeing up a node repeatably. */
224 static GTY (()) tree cl_optimization_node;
225 static GTY (()) tree cl_target_option_node;
226
227 struct cl_option_hasher : ggc_cache_ptr_hash<tree_node>
228 {
229 static hashval_t hash (tree t);
230 static bool equal (tree x, tree y);
231 };
232
233 static GTY ((cache)) hash_table<cl_option_hasher> *cl_option_hash_table;
234
235 /* General tree->tree mapping structure for use in hash tables. */
236
237
238 static GTY ((cache))
239 hash_table<tree_decl_map_cache_hasher> *debug_expr_for_decl;
240
241 static GTY ((cache))
242 hash_table<tree_decl_map_cache_hasher> *value_expr_for_decl;
243
244 struct tree_vec_map_cache_hasher : ggc_cache_ptr_hash<tree_vec_map>
245 {
246 static hashval_t hash (tree_vec_map *m) { return DECL_UID (m->base.from); }
247
248 static bool
249 equal (tree_vec_map *a, tree_vec_map *b)
250 {
251 return a->base.from == b->base.from;
252 }
253
254 static int
255 keep_cache_entry (tree_vec_map *&m)
256 {
257 return ggc_marked_p (m->base.from);
258 }
259 };
260
261 static GTY ((cache))
262 hash_table<tree_vec_map_cache_hasher> *debug_args_for_decl;
263
264 static void set_type_quals (tree, int);
265 static void print_type_hash_statistics (void);
266 static void print_debug_expr_statistics (void);
267 static void print_value_expr_statistics (void);
268
269 tree global_trees[TI_MAX];
270 tree integer_types[itk_none];
271
272 bool int_n_enabled_p[NUM_INT_N_ENTS];
273 struct int_n_trees_t int_n_trees [NUM_INT_N_ENTS];
274
275 bool tree_contains_struct[MAX_TREE_CODES][64];
276
277 /* Number of operands for each OpenMP clause. */
278 unsigned const char omp_clause_num_ops[] =
279 {
280 0, /* OMP_CLAUSE_ERROR */
281 1, /* OMP_CLAUSE_PRIVATE */
282 1, /* OMP_CLAUSE_SHARED */
283 1, /* OMP_CLAUSE_FIRSTPRIVATE */
284 2, /* OMP_CLAUSE_LASTPRIVATE */
285 5, /* OMP_CLAUSE_REDUCTION */
286 5, /* OMP_CLAUSE_TASK_REDUCTION */
287 5, /* OMP_CLAUSE_IN_REDUCTION */
288 1, /* OMP_CLAUSE_COPYIN */
289 1, /* OMP_CLAUSE_COPYPRIVATE */
290 3, /* OMP_CLAUSE_LINEAR */
291 2, /* OMP_CLAUSE_ALIGNED */
292 2, /* OMP_CLAUSE_ALLOCATE */
293 1, /* OMP_CLAUSE_DEPEND */
294 1, /* OMP_CLAUSE_NONTEMPORAL */
295 1, /* OMP_CLAUSE_UNIFORM */
296 1, /* OMP_CLAUSE_TO_DECLARE */
297 1, /* OMP_CLAUSE_LINK */
298 1, /* OMP_CLAUSE_DETACH */
299 1, /* OMP_CLAUSE_USE_DEVICE_PTR */
300 1, /* OMP_CLAUSE_USE_DEVICE_ADDR */
301 1, /* OMP_CLAUSE_IS_DEVICE_PTR */
302 1, /* OMP_CLAUSE_INCLUSIVE */
303 1, /* OMP_CLAUSE_EXCLUSIVE */
304 2, /* OMP_CLAUSE_FROM */
305 2, /* OMP_CLAUSE_TO */
306 2, /* OMP_CLAUSE_MAP */
307 2, /* OMP_CLAUSE__CACHE_ */
308 2, /* OMP_CLAUSE_GANG */
309 1, /* OMP_CLAUSE_ASYNC */
310 1, /* OMP_CLAUSE_WAIT */
311 0, /* OMP_CLAUSE_AUTO */
312 0, /* OMP_CLAUSE_SEQ */
313 1, /* OMP_CLAUSE__LOOPTEMP_ */
314 1, /* OMP_CLAUSE__REDUCTEMP_ */
315 1, /* OMP_CLAUSE__CONDTEMP_ */
316 1, /* OMP_CLAUSE__SCANTEMP_ */
317 1, /* OMP_CLAUSE_IF */
318 1, /* OMP_CLAUSE_NUM_THREADS */
319 1, /* OMP_CLAUSE_SCHEDULE */
320 0, /* OMP_CLAUSE_NOWAIT */
321 1, /* OMP_CLAUSE_ORDERED */
322 0, /* OMP_CLAUSE_DEFAULT */
323 3, /* OMP_CLAUSE_COLLAPSE */
324 0, /* OMP_CLAUSE_UNTIED */
325 1, /* OMP_CLAUSE_FINAL */
326 0, /* OMP_CLAUSE_MERGEABLE */
327 1, /* OMP_CLAUSE_DEVICE */
328 1, /* OMP_CLAUSE_DIST_SCHEDULE */
329 0, /* OMP_CLAUSE_INBRANCH */
330 0, /* OMP_CLAUSE_NOTINBRANCH */
331 1, /* OMP_CLAUSE_NUM_TEAMS */
332 1, /* OMP_CLAUSE_THREAD_LIMIT */
333 0, /* OMP_CLAUSE_PROC_BIND */
334 1, /* OMP_CLAUSE_SAFELEN */
335 1, /* OMP_CLAUSE_SIMDLEN */
336 0, /* OMP_CLAUSE_DEVICE_TYPE */
337 0, /* OMP_CLAUSE_FOR */
338 0, /* OMP_CLAUSE_PARALLEL */
339 0, /* OMP_CLAUSE_SECTIONS */
340 0, /* OMP_CLAUSE_TASKGROUP */
341 1, /* OMP_CLAUSE_PRIORITY */
342 1, /* OMP_CLAUSE_GRAINSIZE */
343 1, /* OMP_CLAUSE_NUM_TASKS */
344 0, /* OMP_CLAUSE_NOGROUP */
345 0, /* OMP_CLAUSE_THREADS */
346 0, /* OMP_CLAUSE_SIMD */
347 1, /* OMP_CLAUSE_HINT */
348 0, /* OMP_CLAUSE_DEFAULTMAP */
349 0, /* OMP_CLAUSE_ORDER */
350 0, /* OMP_CLAUSE_BIND */
351 1, /* OMP_CLAUSE__SIMDUID_ */
352 0, /* OMP_CLAUSE__SIMT_ */
353 0, /* OMP_CLAUSE_INDEPENDENT */
354 1, /* OMP_CLAUSE_WORKER */
355 1, /* OMP_CLAUSE_VECTOR */
356 1, /* OMP_CLAUSE_NUM_GANGS */
357 1, /* OMP_CLAUSE_NUM_WORKERS */
358 1, /* OMP_CLAUSE_VECTOR_LENGTH */
359 3, /* OMP_CLAUSE_TILE */
360 0, /* OMP_CLAUSE_IF_PRESENT */
361 0, /* OMP_CLAUSE_FINALIZE */
362 };
363
364 const char * const omp_clause_code_name[] =
365 {
366 "error_clause",
367 "private",
368 "shared",
369 "firstprivate",
370 "lastprivate",
371 "reduction",
372 "task_reduction",
373 "in_reduction",
374 "copyin",
375 "copyprivate",
376 "linear",
377 "aligned",
378 "allocate",
379 "depend",
380 "nontemporal",
381 "uniform",
382 "to",
383 "link",
384 "detach",
385 "use_device_ptr",
386 "use_device_addr",
387 "is_device_ptr",
388 "inclusive",
389 "exclusive",
390 "from",
391 "to",
392 "map",
393 "_cache_",
394 "gang",
395 "async",
396 "wait",
397 "auto",
398 "seq",
399 "_looptemp_",
400 "_reductemp_",
401 "_condtemp_",
402 "_scantemp_",
403 "if",
404 "num_threads",
405 "schedule",
406 "nowait",
407 "ordered",
408 "default",
409 "collapse",
410 "untied",
411 "final",
412 "mergeable",
413 "device",
414 "dist_schedule",
415 "inbranch",
416 "notinbranch",
417 "num_teams",
418 "thread_limit",
419 "proc_bind",
420 "safelen",
421 "simdlen",
422 "device_type",
423 "for",
424 "parallel",
425 "sections",
426 "taskgroup",
427 "priority",
428 "grainsize",
429 "num_tasks",
430 "nogroup",
431 "threads",
432 "simd",
433 "hint",
434 "defaultmap",
435 "order",
436 "bind",
437 "_simduid_",
438 "_simt_",
439 "independent",
440 "worker",
441 "vector",
442 "num_gangs",
443 "num_workers",
444 "vector_length",
445 "tile",
446 "if_present",
447 "finalize",
448 };
449
450
451 /* Return the tree node structure used by tree code CODE. */
452
453 static inline enum tree_node_structure_enum
454 tree_node_structure_for_code (enum tree_code code)
455 {
456 switch (TREE_CODE_CLASS (code))
457 {
458 case tcc_declaration:
459 switch (code)
460 {
461 case CONST_DECL: return TS_CONST_DECL;
462 case DEBUG_EXPR_DECL: return TS_DECL_WRTL;
463 case FIELD_DECL: return TS_FIELD_DECL;
464 case FUNCTION_DECL: return TS_FUNCTION_DECL;
465 case LABEL_DECL: return TS_LABEL_DECL;
466 case PARM_DECL: return TS_PARM_DECL;
467 case RESULT_DECL: return TS_RESULT_DECL;
468 case TRANSLATION_UNIT_DECL: return TS_TRANSLATION_UNIT_DECL;
469 case TYPE_DECL: return TS_TYPE_DECL;
470 case VAR_DECL: return TS_VAR_DECL;
471 default: return TS_DECL_NON_COMMON;
472 }
473
474 case tcc_type: return TS_TYPE_NON_COMMON;
475
476 case tcc_binary:
477 case tcc_comparison:
478 case tcc_expression:
479 case tcc_reference:
480 case tcc_statement:
481 case tcc_unary:
482 case tcc_vl_exp: return TS_EXP;
483
484 default: /* tcc_constant and tcc_exceptional */
485 break;
486 }
487
488 switch (code)
489 {
490 /* tcc_constant cases. */
491 case COMPLEX_CST: return TS_COMPLEX;
492 case FIXED_CST: return TS_FIXED_CST;
493 case INTEGER_CST: return TS_INT_CST;
494 case POLY_INT_CST: return TS_POLY_INT_CST;
495 case REAL_CST: return TS_REAL_CST;
496 case STRING_CST: return TS_STRING;
497 case VECTOR_CST: return TS_VECTOR;
498 case VOID_CST: return TS_TYPED;
499
500 /* tcc_exceptional cases. */
501 case BLOCK: return TS_BLOCK;
502 case CONSTRUCTOR: return TS_CONSTRUCTOR;
503 case ERROR_MARK: return TS_COMMON;
504 case IDENTIFIER_NODE: return TS_IDENTIFIER;
505 case OMP_CLAUSE: return TS_OMP_CLAUSE;
506 case OPTIMIZATION_NODE: return TS_OPTIMIZATION;
507 case PLACEHOLDER_EXPR: return TS_COMMON;
508 case SSA_NAME: return TS_SSA_NAME;
509 case STATEMENT_LIST: return TS_STATEMENT_LIST;
510 case TARGET_OPTION_NODE: return TS_TARGET_OPTION;
511 case TREE_BINFO: return TS_BINFO;
512 case TREE_LIST: return TS_LIST;
513 case TREE_VEC: return TS_VEC;
514
515 default:
516 gcc_unreachable ();
517 }
518 }
519
520
521 /* Initialize tree_contains_struct to describe the hierarchy of tree
522 nodes. */
523
524 static void
525 initialize_tree_contains_struct (void)
526 {
527 unsigned i;
528
529 for (i = ERROR_MARK; i < LAST_AND_UNUSED_TREE_CODE; i++)
530 {
531 enum tree_code code;
532 enum tree_node_structure_enum ts_code;
533
534 code = (enum tree_code) i;
535 ts_code = tree_node_structure_for_code (code);
536
537 /* Mark the TS structure itself. */
538 tree_contains_struct[code][ts_code] = 1;
539
540 /* Mark all the structures that TS is derived from. */
541 switch (ts_code)
542 {
543 case TS_TYPED:
544 case TS_BLOCK:
545 case TS_OPTIMIZATION:
546 case TS_TARGET_OPTION:
547 MARK_TS_BASE (code);
548 break;
549
550 case TS_COMMON:
551 case TS_INT_CST:
552 case TS_POLY_INT_CST:
553 case TS_REAL_CST:
554 case TS_FIXED_CST:
555 case TS_VECTOR:
556 case TS_STRING:
557 case TS_COMPLEX:
558 case TS_SSA_NAME:
559 case TS_CONSTRUCTOR:
560 case TS_EXP:
561 case TS_STATEMENT_LIST:
562 MARK_TS_TYPED (code);
563 break;
564
565 case TS_IDENTIFIER:
566 case TS_DECL_MINIMAL:
567 case TS_TYPE_COMMON:
568 case TS_LIST:
569 case TS_VEC:
570 case TS_BINFO:
571 case TS_OMP_CLAUSE:
572 MARK_TS_COMMON (code);
573 break;
574
575 case TS_TYPE_WITH_LANG_SPECIFIC:
576 MARK_TS_TYPE_COMMON (code);
577 break;
578
579 case TS_TYPE_NON_COMMON:
580 MARK_TS_TYPE_WITH_LANG_SPECIFIC (code);
581 break;
582
583 case TS_DECL_COMMON:
584 MARK_TS_DECL_MINIMAL (code);
585 break;
586
587 case TS_DECL_WRTL:
588 case TS_CONST_DECL:
589 MARK_TS_DECL_COMMON (code);
590 break;
591
592 case TS_DECL_NON_COMMON:
593 MARK_TS_DECL_WITH_VIS (code);
594 break;
595
596 case TS_DECL_WITH_VIS:
597 case TS_PARM_DECL:
598 case TS_LABEL_DECL:
599 case TS_RESULT_DECL:
600 MARK_TS_DECL_WRTL (code);
601 break;
602
603 case TS_FIELD_DECL:
604 MARK_TS_DECL_COMMON (code);
605 break;
606
607 case TS_VAR_DECL:
608 MARK_TS_DECL_WITH_VIS (code);
609 break;
610
611 case TS_TYPE_DECL:
612 case TS_FUNCTION_DECL:
613 MARK_TS_DECL_NON_COMMON (code);
614 break;
615
616 case TS_TRANSLATION_UNIT_DECL:
617 MARK_TS_DECL_COMMON (code);
618 break;
619
620 default:
621 gcc_unreachable ();
622 }
623 }
624
625 /* Basic consistency checks for attributes used in fold. */
626 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_NON_COMMON]);
627 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_NON_COMMON]);
628 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_COMMON]);
629 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_COMMON]);
630 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_COMMON]);
631 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_COMMON]);
632 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_COMMON]);
633 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_COMMON]);
634 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_COMMON]);
635 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_COMMON]);
636 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_COMMON]);
637 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WRTL]);
638 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_WRTL]);
639 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_WRTL]);
640 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WRTL]);
641 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_WRTL]);
642 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_MINIMAL]);
643 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_MINIMAL]);
644 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_MINIMAL]);
645 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_MINIMAL]);
646 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_MINIMAL]);
647 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_MINIMAL]);
648 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_MINIMAL]);
649 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_MINIMAL]);
650 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_MINIMAL]);
651 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WITH_VIS]);
652 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WITH_VIS]);
653 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_WITH_VIS]);
654 gcc_assert (tree_contains_struct[VAR_DECL][TS_VAR_DECL]);
655 gcc_assert (tree_contains_struct[FIELD_DECL][TS_FIELD_DECL]);
656 gcc_assert (tree_contains_struct[PARM_DECL][TS_PARM_DECL]);
657 gcc_assert (tree_contains_struct[LABEL_DECL][TS_LABEL_DECL]);
658 gcc_assert (tree_contains_struct[RESULT_DECL][TS_RESULT_DECL]);
659 gcc_assert (tree_contains_struct[CONST_DECL][TS_CONST_DECL]);
660 gcc_assert (tree_contains_struct[TYPE_DECL][TS_TYPE_DECL]);
661 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_FUNCTION_DECL]);
662 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_MINIMAL]);
663 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_COMMON]);
664 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_MINIMAL]);
665 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_COMMON]);
666 }
667
668
669 /* Init tree.c. */
670
671 void
672 init_ttree (void)
673 {
674 /* Initialize the hash table of types. */
675 type_hash_table
676 = hash_table<type_cache_hasher>::create_ggc (TYPE_HASH_INITIAL_SIZE);
677
678 debug_expr_for_decl
679 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
680
681 value_expr_for_decl
682 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
683
684 int_cst_hash_table = hash_table<int_cst_hasher>::create_ggc (1024);
685
686 poly_int_cst_hash_table = hash_table<poly_int_cst_hasher>::create_ggc (64);
687
688 int_cst_node = make_int_cst (1, 1);
689
690 cl_option_hash_table = hash_table<cl_option_hasher>::create_ggc (64);
691
692 cl_optimization_node = make_node (OPTIMIZATION_NODE);
693 cl_target_option_node = make_node (TARGET_OPTION_NODE);
694
695 /* Initialize the tree_contains_struct array. */
696 initialize_tree_contains_struct ();
697 lang_hooks.init_ts ();
698 }
699
700 \f
701 /* The name of the object as the assembler will see it (but before any
702 translations made by ASM_OUTPUT_LABELREF). Often this is the same
703 as DECL_NAME. It is an IDENTIFIER_NODE. */
704 tree
705 decl_assembler_name (tree decl)
706 {
707 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
708 lang_hooks.set_decl_assembler_name (decl);
709 return DECL_ASSEMBLER_NAME_RAW (decl);
710 }
711
712 /* The DECL_ASSEMBLER_NAME_RAW of DECL is being explicitly set to NAME
713 (either of which may be NULL). Inform the FE, if this changes the
714 name. */
715
716 void
717 overwrite_decl_assembler_name (tree decl, tree name)
718 {
719 if (DECL_ASSEMBLER_NAME_RAW (decl) != name)
720 lang_hooks.overwrite_decl_assembler_name (decl, name);
721 }
722
723 /* Return true if DECL may need an assembler name to be set. */
724
725 static inline bool
726 need_assembler_name_p (tree decl)
727 {
728 /* We use DECL_ASSEMBLER_NAME to hold mangled type names for One Definition
729 Rule merging. This makes type_odr_p to return true on those types during
730 LTO and by comparing the mangled name, we can say what types are intended
731 to be equivalent across compilation unit.
732
733 We do not store names of type_in_anonymous_namespace_p.
734
735 Record, union and enumeration type have linkage that allows use
736 to check type_in_anonymous_namespace_p. We do not mangle compound types
737 that always can be compared structurally.
738
739 Similarly for builtin types, we compare properties of their main variant.
740 A special case are integer types where mangling do make differences
741 between char/signed char/unsigned char etc. Storing name for these makes
742 e.g. -fno-signed-char/-fsigned-char mismatches to be handled well.
743 See cp/mangle.c:write_builtin_type for details. */
744
745 if (TREE_CODE (decl) == TYPE_DECL)
746 {
747 if (DECL_NAME (decl)
748 && decl == TYPE_NAME (TREE_TYPE (decl))
749 && TYPE_MAIN_VARIANT (TREE_TYPE (decl)) == TREE_TYPE (decl)
750 && !TYPE_ARTIFICIAL (TREE_TYPE (decl))
751 && ((TREE_CODE (TREE_TYPE (decl)) != RECORD_TYPE
752 && TREE_CODE (TREE_TYPE (decl)) != UNION_TYPE)
753 || TYPE_CXX_ODR_P (TREE_TYPE (decl)))
754 && (type_with_linkage_p (TREE_TYPE (decl))
755 || TREE_CODE (TREE_TYPE (decl)) == INTEGER_TYPE)
756 && !variably_modified_type_p (TREE_TYPE (decl), NULL_TREE))
757 return !DECL_ASSEMBLER_NAME_SET_P (decl);
758 return false;
759 }
760 /* Only FUNCTION_DECLs and VAR_DECLs are considered. */
761 if (!VAR_OR_FUNCTION_DECL_P (decl))
762 return false;
763
764 /* If DECL already has its assembler name set, it does not need a
765 new one. */
766 if (!HAS_DECL_ASSEMBLER_NAME_P (decl)
767 || DECL_ASSEMBLER_NAME_SET_P (decl))
768 return false;
769
770 /* Abstract decls do not need an assembler name. */
771 if (DECL_ABSTRACT_P (decl))
772 return false;
773
774 /* For VAR_DECLs, only static, public and external symbols need an
775 assembler name. */
776 if (VAR_P (decl)
777 && !TREE_STATIC (decl)
778 && !TREE_PUBLIC (decl)
779 && !DECL_EXTERNAL (decl))
780 return false;
781
782 if (TREE_CODE (decl) == FUNCTION_DECL)
783 {
784 /* Do not set assembler name on builtins. Allow RTL expansion to
785 decide whether to expand inline or via a regular call. */
786 if (fndecl_built_in_p (decl)
787 && DECL_BUILT_IN_CLASS (decl) != BUILT_IN_FRONTEND)
788 return false;
789
790 /* Functions represented in the callgraph need an assembler name. */
791 if (cgraph_node::get (decl) != NULL)
792 return true;
793
794 /* Unused and not public functions don't need an assembler name. */
795 if (!TREE_USED (decl) && !TREE_PUBLIC (decl))
796 return false;
797 }
798
799 return true;
800 }
801
802 /* If T needs an assembler name, have one created for it. */
803
804 void
805 assign_assembler_name_if_needed (tree t)
806 {
807 if (need_assembler_name_p (t))
808 {
809 /* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit
810 diagnostics that use input_location to show locus
811 information. The problem here is that, at this point,
812 input_location is generally anchored to the end of the file
813 (since the parser is long gone), so we don't have a good
814 position to pin it to.
815
816 To alleviate this problem, this uses the location of T's
817 declaration. Examples of this are
818 testsuite/g++.dg/template/cond2.C and
819 testsuite/g++.dg/template/pr35240.C. */
820 location_t saved_location = input_location;
821 input_location = DECL_SOURCE_LOCATION (t);
822
823 decl_assembler_name (t);
824
825 input_location = saved_location;
826 }
827 }
828
829 /* When the target supports COMDAT groups, this indicates which group the
830 DECL is associated with. This can be either an IDENTIFIER_NODE or a
831 decl, in which case its DECL_ASSEMBLER_NAME identifies the group. */
832 tree
833 decl_comdat_group (const_tree node)
834 {
835 struct symtab_node *snode = symtab_node::get (node);
836 if (!snode)
837 return NULL;
838 return snode->get_comdat_group ();
839 }
840
841 /* Likewise, but make sure it's been reduced to an IDENTIFIER_NODE. */
842 tree
843 decl_comdat_group_id (const_tree node)
844 {
845 struct symtab_node *snode = symtab_node::get (node);
846 if (!snode)
847 return NULL;
848 return snode->get_comdat_group_id ();
849 }
850
851 /* When the target supports named section, return its name as IDENTIFIER_NODE
852 or NULL if it is in no section. */
853 const char *
854 decl_section_name (const_tree node)
855 {
856 struct symtab_node *snode = symtab_node::get (node);
857 if (!snode)
858 return NULL;
859 return snode->get_section ();
860 }
861
862 /* Set section name of NODE to VALUE (that is expected to be
863 identifier node) */
864 void
865 set_decl_section_name (tree node, const char *value)
866 {
867 struct symtab_node *snode;
868
869 if (value == NULL)
870 {
871 snode = symtab_node::get (node);
872 if (!snode)
873 return;
874 }
875 else if (VAR_P (node))
876 snode = varpool_node::get_create (node);
877 else
878 snode = cgraph_node::get_create (node);
879 snode->set_section (value);
880 }
881
882 /* Set section name of NODE to match the section name of OTHER.
883
884 set_decl_section_name (decl, other) is equivalent to
885 set_decl_section_name (decl, DECL_SECTION_NAME (other)), but possibly more
886 efficient. */
887 void
888 set_decl_section_name (tree decl, const_tree other)
889 {
890 struct symtab_node *other_node = symtab_node::get (other);
891 if (other_node)
892 {
893 struct symtab_node *decl_node;
894 if (VAR_P (decl))
895 decl_node = varpool_node::get_create (decl);
896 else
897 decl_node = cgraph_node::get_create (decl);
898 decl_node->set_section (*other_node);
899 }
900 else
901 {
902 struct symtab_node *decl_node = symtab_node::get (decl);
903 if (!decl_node)
904 return;
905 decl_node->set_section (NULL);
906 }
907 }
908
909 /* Return TLS model of a variable NODE. */
910 enum tls_model
911 decl_tls_model (const_tree node)
912 {
913 struct varpool_node *snode = varpool_node::get (node);
914 if (!snode)
915 return TLS_MODEL_NONE;
916 return snode->tls_model;
917 }
918
919 /* Set TLS model of variable NODE to MODEL. */
920 void
921 set_decl_tls_model (tree node, enum tls_model model)
922 {
923 struct varpool_node *vnode;
924
925 if (model == TLS_MODEL_NONE)
926 {
927 vnode = varpool_node::get (node);
928 if (!vnode)
929 return;
930 }
931 else
932 vnode = varpool_node::get_create (node);
933 vnode->tls_model = model;
934 }
935
936 /* Compute the number of bytes occupied by a tree with code CODE.
937 This function cannot be used for nodes that have variable sizes,
938 including TREE_VEC, INTEGER_CST, STRING_CST, and CALL_EXPR. */
939 size_t
940 tree_code_size (enum tree_code code)
941 {
942 switch (TREE_CODE_CLASS (code))
943 {
944 case tcc_declaration: /* A decl node */
945 switch (code)
946 {
947 case FIELD_DECL: return sizeof (tree_field_decl);
948 case PARM_DECL: return sizeof (tree_parm_decl);
949 case VAR_DECL: return sizeof (tree_var_decl);
950 case LABEL_DECL: return sizeof (tree_label_decl);
951 case RESULT_DECL: return sizeof (tree_result_decl);
952 case CONST_DECL: return sizeof (tree_const_decl);
953 case TYPE_DECL: return sizeof (tree_type_decl);
954 case FUNCTION_DECL: return sizeof (tree_function_decl);
955 case DEBUG_EXPR_DECL: return sizeof (tree_decl_with_rtl);
956 case TRANSLATION_UNIT_DECL: return sizeof (tree_translation_unit_decl);
957 case NAMESPACE_DECL:
958 case IMPORTED_DECL:
959 case NAMELIST_DECL: return sizeof (tree_decl_non_common);
960 default:
961 gcc_checking_assert (code >= NUM_TREE_CODES);
962 return lang_hooks.tree_size (code);
963 }
964
965 case tcc_type: /* a type node */
966 switch (code)
967 {
968 case OFFSET_TYPE:
969 case ENUMERAL_TYPE:
970 case BOOLEAN_TYPE:
971 case INTEGER_TYPE:
972 case REAL_TYPE:
973 case OPAQUE_TYPE:
974 case POINTER_TYPE:
975 case REFERENCE_TYPE:
976 case NULLPTR_TYPE:
977 case FIXED_POINT_TYPE:
978 case COMPLEX_TYPE:
979 case VECTOR_TYPE:
980 case ARRAY_TYPE:
981 case RECORD_TYPE:
982 case UNION_TYPE:
983 case QUAL_UNION_TYPE:
984 case VOID_TYPE:
985 case FUNCTION_TYPE:
986 case METHOD_TYPE:
987 case LANG_TYPE: return sizeof (tree_type_non_common);
988 default:
989 gcc_checking_assert (code >= NUM_TREE_CODES);
990 return lang_hooks.tree_size (code);
991 }
992
993 case tcc_reference: /* a reference */
994 case tcc_expression: /* an expression */
995 case tcc_statement: /* an expression with side effects */
996 case tcc_comparison: /* a comparison expression */
997 case tcc_unary: /* a unary arithmetic expression */
998 case tcc_binary: /* a binary arithmetic expression */
999 return (sizeof (struct tree_exp)
1000 + (TREE_CODE_LENGTH (code) - 1) * sizeof (tree));
1001
1002 case tcc_constant: /* a constant */
1003 switch (code)
1004 {
1005 case VOID_CST: return sizeof (tree_typed);
1006 case INTEGER_CST: gcc_unreachable ();
1007 case POLY_INT_CST: return sizeof (tree_poly_int_cst);
1008 case REAL_CST: return sizeof (tree_real_cst);
1009 case FIXED_CST: return sizeof (tree_fixed_cst);
1010 case COMPLEX_CST: return sizeof (tree_complex);
1011 case VECTOR_CST: gcc_unreachable ();
1012 case STRING_CST: gcc_unreachable ();
1013 default:
1014 gcc_checking_assert (code >= NUM_TREE_CODES);
1015 return lang_hooks.tree_size (code);
1016 }
1017
1018 case tcc_exceptional: /* something random, like an identifier. */
1019 switch (code)
1020 {
1021 case IDENTIFIER_NODE: return lang_hooks.identifier_size;
1022 case TREE_LIST: return sizeof (tree_list);
1023
1024 case ERROR_MARK:
1025 case PLACEHOLDER_EXPR: return sizeof (tree_common);
1026
1027 case TREE_VEC: gcc_unreachable ();
1028 case OMP_CLAUSE: gcc_unreachable ();
1029
1030 case SSA_NAME: return sizeof (tree_ssa_name);
1031
1032 case STATEMENT_LIST: return sizeof (tree_statement_list);
1033 case BLOCK: return sizeof (struct tree_block);
1034 case CONSTRUCTOR: return sizeof (tree_constructor);
1035 case OPTIMIZATION_NODE: return sizeof (tree_optimization_option);
1036 case TARGET_OPTION_NODE: return sizeof (tree_target_option);
1037
1038 default:
1039 gcc_checking_assert (code >= NUM_TREE_CODES);
1040 return lang_hooks.tree_size (code);
1041 }
1042
1043 default:
1044 gcc_unreachable ();
1045 }
1046 }
1047
1048 /* Compute the number of bytes occupied by NODE. This routine only
1049 looks at TREE_CODE, except for those nodes that have variable sizes. */
1050 size_t
1051 tree_size (const_tree node)
1052 {
1053 const enum tree_code code = TREE_CODE (node);
1054 switch (code)
1055 {
1056 case INTEGER_CST:
1057 return (sizeof (struct tree_int_cst)
1058 + (TREE_INT_CST_EXT_NUNITS (node) - 1) * sizeof (HOST_WIDE_INT));
1059
1060 case TREE_BINFO:
1061 return (offsetof (struct tree_binfo, base_binfos)
1062 + vec<tree, va_gc>
1063 ::embedded_size (BINFO_N_BASE_BINFOS (node)));
1064
1065 case TREE_VEC:
1066 return (sizeof (struct tree_vec)
1067 + (TREE_VEC_LENGTH (node) - 1) * sizeof (tree));
1068
1069 case VECTOR_CST:
1070 return (sizeof (struct tree_vector)
1071 + (vector_cst_encoded_nelts (node) - 1) * sizeof (tree));
1072
1073 case STRING_CST:
1074 return TREE_STRING_LENGTH (node) + offsetof (struct tree_string, str) + 1;
1075
1076 case OMP_CLAUSE:
1077 return (sizeof (struct tree_omp_clause)
1078 + (omp_clause_num_ops[OMP_CLAUSE_CODE (node)] - 1)
1079 * sizeof (tree));
1080
1081 default:
1082 if (TREE_CODE_CLASS (code) == tcc_vl_exp)
1083 return (sizeof (struct tree_exp)
1084 + (VL_EXP_OPERAND_LENGTH (node) - 1) * sizeof (tree));
1085 else
1086 return tree_code_size (code);
1087 }
1088 }
1089
1090 /* Return tree node kind based on tree CODE. */
1091
1092 static tree_node_kind
1093 get_stats_node_kind (enum tree_code code)
1094 {
1095 enum tree_code_class type = TREE_CODE_CLASS (code);
1096
1097 switch (type)
1098 {
1099 case tcc_declaration: /* A decl node */
1100 return d_kind;
1101 case tcc_type: /* a type node */
1102 return t_kind;
1103 case tcc_statement: /* an expression with side effects */
1104 return s_kind;
1105 case tcc_reference: /* a reference */
1106 return r_kind;
1107 case tcc_expression: /* an expression */
1108 case tcc_comparison: /* a comparison expression */
1109 case tcc_unary: /* a unary arithmetic expression */
1110 case tcc_binary: /* a binary arithmetic expression */
1111 return e_kind;
1112 case tcc_constant: /* a constant */
1113 return c_kind;
1114 case tcc_exceptional: /* something random, like an identifier. */
1115 switch (code)
1116 {
1117 case IDENTIFIER_NODE:
1118 return id_kind;
1119 case TREE_VEC:
1120 return vec_kind;
1121 case TREE_BINFO:
1122 return binfo_kind;
1123 case SSA_NAME:
1124 return ssa_name_kind;
1125 case BLOCK:
1126 return b_kind;
1127 case CONSTRUCTOR:
1128 return constr_kind;
1129 case OMP_CLAUSE:
1130 return omp_clause_kind;
1131 default:
1132 return x_kind;
1133 }
1134 break;
1135 case tcc_vl_exp:
1136 return e_kind;
1137 default:
1138 gcc_unreachable ();
1139 }
1140 }
1141
1142 /* Record interesting allocation statistics for a tree node with CODE
1143 and LENGTH. */
1144
1145 static void
1146 record_node_allocation_statistics (enum tree_code code, size_t length)
1147 {
1148 if (!GATHER_STATISTICS)
1149 return;
1150
1151 tree_node_kind kind = get_stats_node_kind (code);
1152
1153 tree_code_counts[(int) code]++;
1154 tree_node_counts[(int) kind]++;
1155 tree_node_sizes[(int) kind] += length;
1156 }
1157
1158 /* Allocate and return a new UID from the DECL_UID namespace. */
1159
1160 int
1161 allocate_decl_uid (void)
1162 {
1163 return next_decl_uid++;
1164 }
1165
1166 /* Return a newly allocated node of code CODE. For decl and type
1167 nodes, some other fields are initialized. The rest of the node is
1168 initialized to zero. This function cannot be used for TREE_VEC,
1169 INTEGER_CST or OMP_CLAUSE nodes, which is enforced by asserts in
1170 tree_code_size.
1171
1172 Achoo! I got a code in the node. */
1173
1174 tree
1175 make_node (enum tree_code code MEM_STAT_DECL)
1176 {
1177 tree t;
1178 enum tree_code_class type = TREE_CODE_CLASS (code);
1179 size_t length = tree_code_size (code);
1180
1181 record_node_allocation_statistics (code, length);
1182
1183 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1184 TREE_SET_CODE (t, code);
1185
1186 switch (type)
1187 {
1188 case tcc_statement:
1189 if (code != DEBUG_BEGIN_STMT)
1190 TREE_SIDE_EFFECTS (t) = 1;
1191 break;
1192
1193 case tcc_declaration:
1194 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1195 {
1196 if (code == FUNCTION_DECL)
1197 {
1198 SET_DECL_ALIGN (t, FUNCTION_ALIGNMENT (FUNCTION_BOUNDARY));
1199 SET_DECL_MODE (t, FUNCTION_MODE);
1200 }
1201 else
1202 SET_DECL_ALIGN (t, 1);
1203 }
1204 DECL_SOURCE_LOCATION (t) = input_location;
1205 if (TREE_CODE (t) == DEBUG_EXPR_DECL)
1206 DECL_UID (t) = --next_debug_decl_uid;
1207 else
1208 {
1209 DECL_UID (t) = allocate_decl_uid ();
1210 SET_DECL_PT_UID (t, -1);
1211 }
1212 if (TREE_CODE (t) == LABEL_DECL)
1213 LABEL_DECL_UID (t) = -1;
1214
1215 break;
1216
1217 case tcc_type:
1218 TYPE_UID (t) = next_type_uid++;
1219 SET_TYPE_ALIGN (t, BITS_PER_UNIT);
1220 TYPE_USER_ALIGN (t) = 0;
1221 TYPE_MAIN_VARIANT (t) = t;
1222 TYPE_CANONICAL (t) = t;
1223
1224 /* Default to no attributes for type, but let target change that. */
1225 TYPE_ATTRIBUTES (t) = NULL_TREE;
1226 targetm.set_default_type_attributes (t);
1227
1228 /* We have not yet computed the alias set for this type. */
1229 TYPE_ALIAS_SET (t) = -1;
1230 break;
1231
1232 case tcc_constant:
1233 TREE_CONSTANT (t) = 1;
1234 break;
1235
1236 case tcc_expression:
1237 switch (code)
1238 {
1239 case INIT_EXPR:
1240 case MODIFY_EXPR:
1241 case VA_ARG_EXPR:
1242 case PREDECREMENT_EXPR:
1243 case PREINCREMENT_EXPR:
1244 case POSTDECREMENT_EXPR:
1245 case POSTINCREMENT_EXPR:
1246 /* All of these have side-effects, no matter what their
1247 operands are. */
1248 TREE_SIDE_EFFECTS (t) = 1;
1249 break;
1250
1251 default:
1252 break;
1253 }
1254 break;
1255
1256 case tcc_exceptional:
1257 switch (code)
1258 {
1259 case TARGET_OPTION_NODE:
1260 TREE_TARGET_OPTION(t)
1261 = ggc_cleared_alloc<struct cl_target_option> ();
1262 break;
1263
1264 case OPTIMIZATION_NODE:
1265 TREE_OPTIMIZATION (t)
1266 = ggc_cleared_alloc<struct cl_optimization> ();
1267 break;
1268
1269 default:
1270 break;
1271 }
1272 break;
1273
1274 default:
1275 /* Other classes need no special treatment. */
1276 break;
1277 }
1278
1279 return t;
1280 }
1281
1282 /* Free tree node. */
1283
1284 void
1285 free_node (tree node)
1286 {
1287 enum tree_code code = TREE_CODE (node);
1288 if (GATHER_STATISTICS)
1289 {
1290 enum tree_node_kind kind = get_stats_node_kind (code);
1291
1292 gcc_checking_assert (tree_code_counts[(int) TREE_CODE (node)] != 0);
1293 gcc_checking_assert (tree_node_counts[(int) kind] != 0);
1294 gcc_checking_assert (tree_node_sizes[(int) kind] >= tree_size (node));
1295
1296 tree_code_counts[(int) TREE_CODE (node)]--;
1297 tree_node_counts[(int) kind]--;
1298 tree_node_sizes[(int) kind] -= tree_size (node);
1299 }
1300 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1301 vec_free (CONSTRUCTOR_ELTS (node));
1302 else if (code == BLOCK)
1303 vec_free (BLOCK_NONLOCALIZED_VARS (node));
1304 else if (code == TREE_BINFO)
1305 vec_free (BINFO_BASE_ACCESSES (node));
1306 else if (code == OPTIMIZATION_NODE)
1307 cl_optimization_option_free (TREE_OPTIMIZATION (node));
1308 else if (code == TARGET_OPTION_NODE)
1309 cl_target_option_free (TREE_TARGET_OPTION (node));
1310 ggc_free (node);
1311 }
1312 \f
1313 /* Return a new node with the same contents as NODE except that its
1314 TREE_CHAIN, if it has one, is zero and it has a fresh uid. */
1315
1316 tree
1317 copy_node (tree node MEM_STAT_DECL)
1318 {
1319 tree t;
1320 enum tree_code code = TREE_CODE (node);
1321 size_t length;
1322
1323 gcc_assert (code != STATEMENT_LIST);
1324
1325 length = tree_size (node);
1326 record_node_allocation_statistics (code, length);
1327 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
1328 memcpy (t, node, length);
1329
1330 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
1331 TREE_CHAIN (t) = 0;
1332 TREE_ASM_WRITTEN (t) = 0;
1333 TREE_VISITED (t) = 0;
1334
1335 if (TREE_CODE_CLASS (code) == tcc_declaration)
1336 {
1337 if (code == DEBUG_EXPR_DECL)
1338 DECL_UID (t) = --next_debug_decl_uid;
1339 else
1340 {
1341 DECL_UID (t) = allocate_decl_uid ();
1342 if (DECL_PT_UID_SET_P (node))
1343 SET_DECL_PT_UID (t, DECL_PT_UID (node));
1344 }
1345 if ((TREE_CODE (node) == PARM_DECL || VAR_P (node))
1346 && DECL_HAS_VALUE_EXPR_P (node))
1347 {
1348 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (node));
1349 DECL_HAS_VALUE_EXPR_P (t) = 1;
1350 }
1351 /* DECL_DEBUG_EXPR is copied explicitly by callers. */
1352 if (VAR_P (node))
1353 {
1354 DECL_HAS_DEBUG_EXPR_P (t) = 0;
1355 t->decl_with_vis.symtab_node = NULL;
1356 }
1357 if (VAR_P (node) && DECL_HAS_INIT_PRIORITY_P (node))
1358 {
1359 SET_DECL_INIT_PRIORITY (t, DECL_INIT_PRIORITY (node));
1360 DECL_HAS_INIT_PRIORITY_P (t) = 1;
1361 }
1362 if (TREE_CODE (node) == FUNCTION_DECL)
1363 {
1364 DECL_STRUCT_FUNCTION (t) = NULL;
1365 t->decl_with_vis.symtab_node = NULL;
1366 }
1367 }
1368 else if (TREE_CODE_CLASS (code) == tcc_type)
1369 {
1370 TYPE_UID (t) = next_type_uid++;
1371 /* The following is so that the debug code for
1372 the copy is different from the original type.
1373 The two statements usually duplicate each other
1374 (because they clear fields of the same union),
1375 but the optimizer should catch that. */
1376 TYPE_SYMTAB_ADDRESS (t) = 0;
1377 TYPE_SYMTAB_DIE (t) = 0;
1378
1379 /* Do not copy the values cache. */
1380 if (TYPE_CACHED_VALUES_P (t))
1381 {
1382 TYPE_CACHED_VALUES_P (t) = 0;
1383 TYPE_CACHED_VALUES (t) = NULL_TREE;
1384 }
1385 }
1386 else if (code == TARGET_OPTION_NODE)
1387 {
1388 TREE_TARGET_OPTION (t) = ggc_alloc<struct cl_target_option>();
1389 memcpy (TREE_TARGET_OPTION (t), TREE_TARGET_OPTION (node),
1390 sizeof (struct cl_target_option));
1391 }
1392 else if (code == OPTIMIZATION_NODE)
1393 {
1394 TREE_OPTIMIZATION (t) = ggc_alloc<struct cl_optimization>();
1395 memcpy (TREE_OPTIMIZATION (t), TREE_OPTIMIZATION (node),
1396 sizeof (struct cl_optimization));
1397 }
1398
1399 return t;
1400 }
1401
1402 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
1403 For example, this can copy a list made of TREE_LIST nodes. */
1404
1405 tree
1406 copy_list (tree list)
1407 {
1408 tree head;
1409 tree prev, next;
1410
1411 if (list == 0)
1412 return 0;
1413
1414 head = prev = copy_node (list);
1415 next = TREE_CHAIN (list);
1416 while (next)
1417 {
1418 TREE_CHAIN (prev) = copy_node (next);
1419 prev = TREE_CHAIN (prev);
1420 next = TREE_CHAIN (next);
1421 }
1422 return head;
1423 }
1424
1425 \f
1426 /* Return the value that TREE_INT_CST_EXT_NUNITS should have for an
1427 INTEGER_CST with value CST and type TYPE. */
1428
1429 static unsigned int
1430 get_int_cst_ext_nunits (tree type, const wide_int &cst)
1431 {
1432 gcc_checking_assert (cst.get_precision () == TYPE_PRECISION (type));
1433 /* We need extra HWIs if CST is an unsigned integer with its
1434 upper bit set. */
1435 if (TYPE_UNSIGNED (type) && wi::neg_p (cst))
1436 return cst.get_precision () / HOST_BITS_PER_WIDE_INT + 1;
1437 return cst.get_len ();
1438 }
1439
1440 /* Return a new INTEGER_CST with value CST and type TYPE. */
1441
1442 static tree
1443 build_new_int_cst (tree type, const wide_int &cst)
1444 {
1445 unsigned int len = cst.get_len ();
1446 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1447 tree nt = make_int_cst (len, ext_len);
1448
1449 if (len < ext_len)
1450 {
1451 --ext_len;
1452 TREE_INT_CST_ELT (nt, ext_len)
1453 = zext_hwi (-1, cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1454 for (unsigned int i = len; i < ext_len; ++i)
1455 TREE_INT_CST_ELT (nt, i) = -1;
1456 }
1457 else if (TYPE_UNSIGNED (type)
1458 && cst.get_precision () < len * HOST_BITS_PER_WIDE_INT)
1459 {
1460 len--;
1461 TREE_INT_CST_ELT (nt, len)
1462 = zext_hwi (cst.elt (len),
1463 cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1464 }
1465
1466 for (unsigned int i = 0; i < len; i++)
1467 TREE_INT_CST_ELT (nt, i) = cst.elt (i);
1468 TREE_TYPE (nt) = type;
1469 return nt;
1470 }
1471
1472 /* Return a new POLY_INT_CST with coefficients COEFFS and type TYPE. */
1473
1474 static tree
1475 build_new_poly_int_cst (tree type, tree (&coeffs)[NUM_POLY_INT_COEFFS]
1476 CXX_MEM_STAT_INFO)
1477 {
1478 size_t length = sizeof (struct tree_poly_int_cst);
1479 record_node_allocation_statistics (POLY_INT_CST, length);
1480
1481 tree t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1482
1483 TREE_SET_CODE (t, POLY_INT_CST);
1484 TREE_CONSTANT (t) = 1;
1485 TREE_TYPE (t) = type;
1486 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1487 POLY_INT_CST_COEFF (t, i) = coeffs[i];
1488 return t;
1489 }
1490
1491 /* Create a constant tree that contains CST sign-extended to TYPE. */
1492
1493 tree
1494 build_int_cst (tree type, poly_int64 cst)
1495 {
1496 /* Support legacy code. */
1497 if (!type)
1498 type = integer_type_node;
1499
1500 return wide_int_to_tree (type, wi::shwi (cst, TYPE_PRECISION (type)));
1501 }
1502
1503 /* Create a constant tree that contains CST zero-extended to TYPE. */
1504
1505 tree
1506 build_int_cstu (tree type, poly_uint64 cst)
1507 {
1508 return wide_int_to_tree (type, wi::uhwi (cst, TYPE_PRECISION (type)));
1509 }
1510
1511 /* Create a constant tree that contains CST sign-extended to TYPE. */
1512
1513 tree
1514 build_int_cst_type (tree type, poly_int64 cst)
1515 {
1516 gcc_assert (type);
1517 return wide_int_to_tree (type, wi::shwi (cst, TYPE_PRECISION (type)));
1518 }
1519
1520 /* Constructs tree in type TYPE from with value given by CST. Signedness
1521 of CST is assumed to be the same as the signedness of TYPE. */
1522
1523 tree
1524 double_int_to_tree (tree type, double_int cst)
1525 {
1526 return wide_int_to_tree (type, widest_int::from (cst, TYPE_SIGN (type)));
1527 }
1528
1529 /* We force the wide_int CST to the range of the type TYPE by sign or
1530 zero extending it. OVERFLOWABLE indicates if we are interested in
1531 overflow of the value, when >0 we are only interested in signed
1532 overflow, for <0 we are interested in any overflow. OVERFLOWED
1533 indicates whether overflow has already occurred. CONST_OVERFLOWED
1534 indicates whether constant overflow has already occurred. We force
1535 T's value to be within range of T's type (by setting to 0 or 1 all
1536 the bits outside the type's range). We set TREE_OVERFLOWED if,
1537 OVERFLOWED is nonzero,
1538 or OVERFLOWABLE is >0 and signed overflow occurs
1539 or OVERFLOWABLE is <0 and any overflow occurs
1540 We return a new tree node for the extended wide_int. The node
1541 is shared if no overflow flags are set. */
1542
1543
1544 tree
1545 force_fit_type (tree type, const poly_wide_int_ref &cst,
1546 int overflowable, bool overflowed)
1547 {
1548 signop sign = TYPE_SIGN (type);
1549
1550 /* If we need to set overflow flags, return a new unshared node. */
1551 if (overflowed || !wi::fits_to_tree_p (cst, type))
1552 {
1553 if (overflowed
1554 || overflowable < 0
1555 || (overflowable > 0 && sign == SIGNED))
1556 {
1557 poly_wide_int tmp = poly_wide_int::from (cst, TYPE_PRECISION (type),
1558 sign);
1559 tree t;
1560 if (tmp.is_constant ())
1561 t = build_new_int_cst (type, tmp.coeffs[0]);
1562 else
1563 {
1564 tree coeffs[NUM_POLY_INT_COEFFS];
1565 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1566 {
1567 coeffs[i] = build_new_int_cst (type, tmp.coeffs[i]);
1568 TREE_OVERFLOW (coeffs[i]) = 1;
1569 }
1570 t = build_new_poly_int_cst (type, coeffs);
1571 }
1572 TREE_OVERFLOW (t) = 1;
1573 return t;
1574 }
1575 }
1576
1577 /* Else build a shared node. */
1578 return wide_int_to_tree (type, cst);
1579 }
1580
1581 /* These are the hash table functions for the hash table of INTEGER_CST
1582 nodes of a sizetype. */
1583
1584 /* Return the hash code X, an INTEGER_CST. */
1585
1586 hashval_t
1587 int_cst_hasher::hash (tree x)
1588 {
1589 const_tree const t = x;
1590 hashval_t code = TYPE_UID (TREE_TYPE (t));
1591 int i;
1592
1593 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
1594 code = iterative_hash_host_wide_int (TREE_INT_CST_ELT(t, i), code);
1595
1596 return code;
1597 }
1598
1599 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1600 is the same as that given by *Y, which is the same. */
1601
1602 bool
1603 int_cst_hasher::equal (tree x, tree y)
1604 {
1605 const_tree const xt = x;
1606 const_tree const yt = y;
1607
1608 if (TREE_TYPE (xt) != TREE_TYPE (yt)
1609 || TREE_INT_CST_NUNITS (xt) != TREE_INT_CST_NUNITS (yt)
1610 || TREE_INT_CST_EXT_NUNITS (xt) != TREE_INT_CST_EXT_NUNITS (yt))
1611 return false;
1612
1613 for (int i = 0; i < TREE_INT_CST_NUNITS (xt); i++)
1614 if (TREE_INT_CST_ELT (xt, i) != TREE_INT_CST_ELT (yt, i))
1615 return false;
1616
1617 return true;
1618 }
1619
1620 /* Cache wide_int CST into the TYPE_CACHED_VALUES cache for TYPE.
1621 SLOT is the slot entry to store it in, and MAX_SLOTS is the maximum
1622 number of slots that can be cached for the type. */
1623
1624 static inline tree
1625 cache_wide_int_in_type_cache (tree type, const wide_int &cst,
1626 int slot, int max_slots)
1627 {
1628 gcc_checking_assert (slot >= 0);
1629 /* Initialize cache. */
1630 if (!TYPE_CACHED_VALUES_P (type))
1631 {
1632 TYPE_CACHED_VALUES_P (type) = 1;
1633 TYPE_CACHED_VALUES (type) = make_tree_vec (max_slots);
1634 }
1635 tree t = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), slot);
1636 if (!t)
1637 {
1638 /* Create a new shared int. */
1639 t = build_new_int_cst (type, cst);
1640 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), slot) = t;
1641 }
1642 return t;
1643 }
1644
1645 /* Create an INT_CST node of TYPE and value CST.
1646 The returned node is always shared. For small integers we use a
1647 per-type vector cache, for larger ones we use a single hash table.
1648 The value is extended from its precision according to the sign of
1649 the type to be a multiple of HOST_BITS_PER_WIDE_INT. This defines
1650 the upper bits and ensures that hashing and value equality based
1651 upon the underlying HOST_WIDE_INTs works without masking. */
1652
1653 static tree
1654 wide_int_to_tree_1 (tree type, const wide_int_ref &pcst)
1655 {
1656 tree t;
1657 int ix = -1;
1658 int limit = 0;
1659
1660 gcc_assert (type);
1661 unsigned int prec = TYPE_PRECISION (type);
1662 signop sgn = TYPE_SIGN (type);
1663
1664 /* Verify that everything is canonical. */
1665 int l = pcst.get_len ();
1666 if (l > 1)
1667 {
1668 if (pcst.elt (l - 1) == 0)
1669 gcc_checking_assert (pcst.elt (l - 2) < 0);
1670 if (pcst.elt (l - 1) == HOST_WIDE_INT_M1)
1671 gcc_checking_assert (pcst.elt (l - 2) >= 0);
1672 }
1673
1674 wide_int cst = wide_int::from (pcst, prec, sgn);
1675 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1676
1677 enum tree_code code = TREE_CODE (type);
1678 if (code == POINTER_TYPE || code == REFERENCE_TYPE)
1679 {
1680 /* Cache NULL pointer and zero bounds. */
1681 if (cst == 0)
1682 ix = 0;
1683 /* Cache upper bounds of pointers. */
1684 else if (cst == wi::max_value (prec, sgn))
1685 ix = 1;
1686 /* Cache 1 which is used for a non-zero range. */
1687 else if (cst == 1)
1688 ix = 2;
1689
1690 if (ix >= 0)
1691 {
1692 t = cache_wide_int_in_type_cache (type, cst, ix, 3);
1693 /* Make sure no one is clobbering the shared constant. */
1694 gcc_checking_assert (TREE_TYPE (t) == type
1695 && cst == wi::to_wide (t));
1696 return t;
1697 }
1698 }
1699 if (ext_len == 1)
1700 {
1701 /* We just need to store a single HOST_WIDE_INT. */
1702 HOST_WIDE_INT hwi;
1703 if (TYPE_UNSIGNED (type))
1704 hwi = cst.to_uhwi ();
1705 else
1706 hwi = cst.to_shwi ();
1707
1708 switch (code)
1709 {
1710 case NULLPTR_TYPE:
1711 gcc_assert (hwi == 0);
1712 /* Fallthru. */
1713
1714 case POINTER_TYPE:
1715 case REFERENCE_TYPE:
1716 /* Ignore pointers, as they were already handled above. */
1717 break;
1718
1719 case BOOLEAN_TYPE:
1720 /* Cache false or true. */
1721 limit = 2;
1722 if (IN_RANGE (hwi, 0, 1))
1723 ix = hwi;
1724 break;
1725
1726 case INTEGER_TYPE:
1727 case OFFSET_TYPE:
1728 if (TYPE_SIGN (type) == UNSIGNED)
1729 {
1730 /* Cache [0, N). */
1731 limit = param_integer_share_limit;
1732 if (IN_RANGE (hwi, 0, param_integer_share_limit - 1))
1733 ix = hwi;
1734 }
1735 else
1736 {
1737 /* Cache [-1, N). */
1738 limit = param_integer_share_limit + 1;
1739 if (IN_RANGE (hwi, -1, param_integer_share_limit - 1))
1740 ix = hwi + 1;
1741 }
1742 break;
1743
1744 case ENUMERAL_TYPE:
1745 break;
1746
1747 default:
1748 gcc_unreachable ();
1749 }
1750
1751 if (ix >= 0)
1752 {
1753 t = cache_wide_int_in_type_cache (type, cst, ix, limit);
1754 /* Make sure no one is clobbering the shared constant. */
1755 gcc_checking_assert (TREE_TYPE (t) == type
1756 && TREE_INT_CST_NUNITS (t) == 1
1757 && TREE_INT_CST_OFFSET_NUNITS (t) == 1
1758 && TREE_INT_CST_EXT_NUNITS (t) == 1
1759 && TREE_INT_CST_ELT (t, 0) == hwi);
1760 return t;
1761 }
1762 else
1763 {
1764 /* Use the cache of larger shared ints, using int_cst_node as
1765 a temporary. */
1766
1767 TREE_INT_CST_ELT (int_cst_node, 0) = hwi;
1768 TREE_TYPE (int_cst_node) = type;
1769
1770 tree *slot = int_cst_hash_table->find_slot (int_cst_node, INSERT);
1771 t = *slot;
1772 if (!t)
1773 {
1774 /* Insert this one into the hash table. */
1775 t = int_cst_node;
1776 *slot = t;
1777 /* Make a new node for next time round. */
1778 int_cst_node = make_int_cst (1, 1);
1779 }
1780 }
1781 }
1782 else
1783 {
1784 /* The value either hashes properly or we drop it on the floor
1785 for the gc to take care of. There will not be enough of them
1786 to worry about. */
1787
1788 tree nt = build_new_int_cst (type, cst);
1789 tree *slot = int_cst_hash_table->find_slot (nt, INSERT);
1790 t = *slot;
1791 if (!t)
1792 {
1793 /* Insert this one into the hash table. */
1794 t = nt;
1795 *slot = t;
1796 }
1797 else
1798 ggc_free (nt);
1799 }
1800
1801 return t;
1802 }
1803
1804 hashval_t
1805 poly_int_cst_hasher::hash (tree t)
1806 {
1807 inchash::hash hstate;
1808
1809 hstate.add_int (TYPE_UID (TREE_TYPE (t)));
1810 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1811 hstate.add_wide_int (wi::to_wide (POLY_INT_CST_COEFF (t, i)));
1812
1813 return hstate.end ();
1814 }
1815
1816 bool
1817 poly_int_cst_hasher::equal (tree x, const compare_type &y)
1818 {
1819 if (TREE_TYPE (x) != y.first)
1820 return false;
1821 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1822 if (wi::to_wide (POLY_INT_CST_COEFF (x, i)) != y.second->coeffs[i])
1823 return false;
1824 return true;
1825 }
1826
1827 /* Build a POLY_INT_CST node with type TYPE and with the elements in VALUES.
1828 The elements must also have type TYPE. */
1829
1830 tree
1831 build_poly_int_cst (tree type, const poly_wide_int_ref &values)
1832 {
1833 unsigned int prec = TYPE_PRECISION (type);
1834 gcc_assert (prec <= values.coeffs[0].get_precision ());
1835 poly_wide_int c = poly_wide_int::from (values, prec, SIGNED);
1836
1837 inchash::hash h;
1838 h.add_int (TYPE_UID (type));
1839 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1840 h.add_wide_int (c.coeffs[i]);
1841 poly_int_cst_hasher::compare_type comp (type, &c);
1842 tree *slot = poly_int_cst_hash_table->find_slot_with_hash (comp, h.end (),
1843 INSERT);
1844 if (*slot == NULL_TREE)
1845 {
1846 tree coeffs[NUM_POLY_INT_COEFFS];
1847 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1848 coeffs[i] = wide_int_to_tree_1 (type, c.coeffs[i]);
1849 *slot = build_new_poly_int_cst (type, coeffs);
1850 }
1851 return *slot;
1852 }
1853
1854 /* Create a constant tree with value VALUE in type TYPE. */
1855
1856 tree
1857 wide_int_to_tree (tree type, const poly_wide_int_ref &value)
1858 {
1859 if (value.is_constant ())
1860 return wide_int_to_tree_1 (type, value.coeffs[0]);
1861 return build_poly_int_cst (type, value);
1862 }
1863
1864 /* Insert INTEGER_CST T into a cache of integer constants. And return
1865 the cached constant (which may or may not be T). If MIGHT_DUPLICATE
1866 is false, and T falls into the type's 'smaller values' range, there
1867 cannot be an existing entry. Otherwise, if MIGHT_DUPLICATE is true,
1868 or the value is large, should an existing entry exist, it is
1869 returned (rather than inserting T). */
1870
1871 tree
1872 cache_integer_cst (tree t, bool might_duplicate ATTRIBUTE_UNUSED)
1873 {
1874 tree type = TREE_TYPE (t);
1875 int ix = -1;
1876 int limit = 0;
1877 int prec = TYPE_PRECISION (type);
1878
1879 gcc_assert (!TREE_OVERFLOW (t));
1880
1881 /* The caching indices here must match those in
1882 wide_int_to_type_1. */
1883 switch (TREE_CODE (type))
1884 {
1885 case NULLPTR_TYPE:
1886 gcc_checking_assert (integer_zerop (t));
1887 /* Fallthru. */
1888
1889 case POINTER_TYPE:
1890 case REFERENCE_TYPE:
1891 {
1892 if (integer_zerop (t))
1893 ix = 0;
1894 else if (integer_onep (t))
1895 ix = 2;
1896
1897 if (ix >= 0)
1898 limit = 3;
1899 }
1900 break;
1901
1902 case BOOLEAN_TYPE:
1903 /* Cache false or true. */
1904 limit = 2;
1905 if (wi::ltu_p (wi::to_wide (t), 2))
1906 ix = TREE_INT_CST_ELT (t, 0);
1907 break;
1908
1909 case INTEGER_TYPE:
1910 case OFFSET_TYPE:
1911 if (TYPE_UNSIGNED (type))
1912 {
1913 /* Cache 0..N */
1914 limit = param_integer_share_limit;
1915
1916 /* This is a little hokie, but if the prec is smaller than
1917 what is necessary to hold param_integer_share_limit, then the
1918 obvious test will not get the correct answer. */
1919 if (prec < HOST_BITS_PER_WIDE_INT)
1920 {
1921 if (tree_to_uhwi (t)
1922 < (unsigned HOST_WIDE_INT) param_integer_share_limit)
1923 ix = tree_to_uhwi (t);
1924 }
1925 else if (wi::ltu_p (wi::to_wide (t), param_integer_share_limit))
1926 ix = tree_to_uhwi (t);
1927 }
1928 else
1929 {
1930 /* Cache -1..N */
1931 limit = param_integer_share_limit + 1;
1932
1933 if (integer_minus_onep (t))
1934 ix = 0;
1935 else if (!wi::neg_p (wi::to_wide (t)))
1936 {
1937 if (prec < HOST_BITS_PER_WIDE_INT)
1938 {
1939 if (tree_to_shwi (t) < param_integer_share_limit)
1940 ix = tree_to_shwi (t) + 1;
1941 }
1942 else if (wi::ltu_p (wi::to_wide (t), param_integer_share_limit))
1943 ix = tree_to_shwi (t) + 1;
1944 }
1945 }
1946 break;
1947
1948 case ENUMERAL_TYPE:
1949 /* The slot used by TYPE_CACHED_VALUES is used for the enum
1950 members. */
1951 break;
1952
1953 default:
1954 gcc_unreachable ();
1955 }
1956
1957 if (ix >= 0)
1958 {
1959 /* Look for it in the type's vector of small shared ints. */
1960 if (!TYPE_CACHED_VALUES_P (type))
1961 {
1962 TYPE_CACHED_VALUES_P (type) = 1;
1963 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1964 }
1965
1966 if (tree r = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix))
1967 {
1968 gcc_checking_assert (might_duplicate);
1969 t = r;
1970 }
1971 else
1972 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1973 }
1974 else
1975 {
1976 /* Use the cache of larger shared ints. */
1977 tree *slot = int_cst_hash_table->find_slot (t, INSERT);
1978 if (tree r = *slot)
1979 {
1980 /* If there is already an entry for the number verify it's the
1981 same value. */
1982 gcc_checking_assert (wi::to_wide (tree (r)) == wi::to_wide (t));
1983 /* And return the cached value. */
1984 t = r;
1985 }
1986 else
1987 /* Otherwise insert this one into the hash table. */
1988 *slot = t;
1989 }
1990
1991 return t;
1992 }
1993
1994
1995 /* Builds an integer constant in TYPE such that lowest BITS bits are ones
1996 and the rest are zeros. */
1997
1998 tree
1999 build_low_bits_mask (tree type, unsigned bits)
2000 {
2001 gcc_assert (bits <= TYPE_PRECISION (type));
2002
2003 return wide_int_to_tree (type, wi::mask (bits, false,
2004 TYPE_PRECISION (type)));
2005 }
2006
2007 /* Checks that X is integer constant that can be expressed in (unsigned)
2008 HOST_WIDE_INT without loss of precision. */
2009
2010 bool
2011 cst_and_fits_in_hwi (const_tree x)
2012 {
2013 return (TREE_CODE (x) == INTEGER_CST
2014 && (tree_fits_shwi_p (x) || tree_fits_uhwi_p (x)));
2015 }
2016
2017 /* Build a newly constructed VECTOR_CST with the given values of
2018 (VECTOR_CST_)LOG2_NPATTERNS and (VECTOR_CST_)NELTS_PER_PATTERN. */
2019
2020 tree
2021 make_vector (unsigned log2_npatterns,
2022 unsigned int nelts_per_pattern MEM_STAT_DECL)
2023 {
2024 gcc_assert (IN_RANGE (nelts_per_pattern, 1, 3));
2025 tree t;
2026 unsigned npatterns = 1 << log2_npatterns;
2027 unsigned encoded_nelts = npatterns * nelts_per_pattern;
2028 unsigned length = (sizeof (struct tree_vector)
2029 + (encoded_nelts - 1) * sizeof (tree));
2030
2031 record_node_allocation_statistics (VECTOR_CST, length);
2032
2033 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2034
2035 TREE_SET_CODE (t, VECTOR_CST);
2036 TREE_CONSTANT (t) = 1;
2037 VECTOR_CST_LOG2_NPATTERNS (t) = log2_npatterns;
2038 VECTOR_CST_NELTS_PER_PATTERN (t) = nelts_per_pattern;
2039
2040 return t;
2041 }
2042
2043 /* Return a new VECTOR_CST node whose type is TYPE and whose values
2044 are extracted from V, a vector of CONSTRUCTOR_ELT. */
2045
2046 tree
2047 build_vector_from_ctor (tree type, vec<constructor_elt, va_gc> *v)
2048 {
2049 if (vec_safe_length (v) == 0)
2050 return build_zero_cst (type);
2051
2052 unsigned HOST_WIDE_INT idx, nelts;
2053 tree value;
2054
2055 /* We can't construct a VECTOR_CST for a variable number of elements. */
2056 nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
2057 tree_vector_builder vec (type, nelts, 1);
2058 FOR_EACH_CONSTRUCTOR_VALUE (v, idx, value)
2059 {
2060 if (TREE_CODE (value) == VECTOR_CST)
2061 {
2062 /* If NELTS is constant then this must be too. */
2063 unsigned int sub_nelts = VECTOR_CST_NELTS (value).to_constant ();
2064 for (unsigned i = 0; i < sub_nelts; ++i)
2065 vec.quick_push (VECTOR_CST_ELT (value, i));
2066 }
2067 else
2068 vec.quick_push (value);
2069 }
2070 while (vec.length () < nelts)
2071 vec.quick_push (build_zero_cst (TREE_TYPE (type)));
2072
2073 return vec.build ();
2074 }
2075
2076 /* Build a vector of type VECTYPE where all the elements are SCs. */
2077 tree
2078 build_vector_from_val (tree vectype, tree sc)
2079 {
2080 unsigned HOST_WIDE_INT i, nunits;
2081
2082 if (sc == error_mark_node)
2083 return sc;
2084
2085 /* Verify that the vector type is suitable for SC. Note that there
2086 is some inconsistency in the type-system with respect to restrict
2087 qualifications of pointers. Vector types always have a main-variant
2088 element type and the qualification is applied to the vector-type.
2089 So TREE_TYPE (vector-type) does not return a properly qualified
2090 vector element-type. */
2091 gcc_checking_assert (types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (sc)),
2092 TREE_TYPE (vectype)));
2093
2094 if (CONSTANT_CLASS_P (sc))
2095 {
2096 tree_vector_builder v (vectype, 1, 1);
2097 v.quick_push (sc);
2098 return v.build ();
2099 }
2100 else if (!TYPE_VECTOR_SUBPARTS (vectype).is_constant (&nunits))
2101 return fold_build1 (VEC_DUPLICATE_EXPR, vectype, sc);
2102 else
2103 {
2104 vec<constructor_elt, va_gc> *v;
2105 vec_alloc (v, nunits);
2106 for (i = 0; i < nunits; ++i)
2107 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, sc);
2108 return build_constructor (vectype, v);
2109 }
2110 }
2111
2112 /* If TYPE is not a vector type, just return SC, otherwise return
2113 build_vector_from_val (TYPE, SC). */
2114
2115 tree
2116 build_uniform_cst (tree type, tree sc)
2117 {
2118 if (!VECTOR_TYPE_P (type))
2119 return sc;
2120
2121 return build_vector_from_val (type, sc);
2122 }
2123
2124 /* Build a vector series of type TYPE in which element I has the value
2125 BASE + I * STEP. The result is a constant if BASE and STEP are constant
2126 and a VEC_SERIES_EXPR otherwise. */
2127
2128 tree
2129 build_vec_series (tree type, tree base, tree step)
2130 {
2131 if (integer_zerop (step))
2132 return build_vector_from_val (type, base);
2133 if (TREE_CODE (base) == INTEGER_CST && TREE_CODE (step) == INTEGER_CST)
2134 {
2135 tree_vector_builder builder (type, 1, 3);
2136 tree elt1 = wide_int_to_tree (TREE_TYPE (base),
2137 wi::to_wide (base) + wi::to_wide (step));
2138 tree elt2 = wide_int_to_tree (TREE_TYPE (base),
2139 wi::to_wide (elt1) + wi::to_wide (step));
2140 builder.quick_push (base);
2141 builder.quick_push (elt1);
2142 builder.quick_push (elt2);
2143 return builder.build ();
2144 }
2145 return build2 (VEC_SERIES_EXPR, type, base, step);
2146 }
2147
2148 /* Return a vector with the same number of units and number of bits
2149 as VEC_TYPE, but in which the elements are a linear series of unsigned
2150 integers { BASE, BASE + STEP, BASE + STEP * 2, ... }. */
2151
2152 tree
2153 build_index_vector (tree vec_type, poly_uint64 base, poly_uint64 step)
2154 {
2155 tree index_vec_type = vec_type;
2156 tree index_elt_type = TREE_TYPE (vec_type);
2157 poly_uint64 nunits = TYPE_VECTOR_SUBPARTS (vec_type);
2158 if (!INTEGRAL_TYPE_P (index_elt_type) || !TYPE_UNSIGNED (index_elt_type))
2159 {
2160 index_elt_type = build_nonstandard_integer_type
2161 (GET_MODE_BITSIZE (SCALAR_TYPE_MODE (index_elt_type)), true);
2162 index_vec_type = build_vector_type (index_elt_type, nunits);
2163 }
2164
2165 tree_vector_builder v (index_vec_type, 1, 3);
2166 for (unsigned int i = 0; i < 3; ++i)
2167 v.quick_push (build_int_cstu (index_elt_type, base + i * step));
2168 return v.build ();
2169 }
2170
2171 /* Return a VECTOR_CST of type VEC_TYPE in which the first NUM_A
2172 elements are A and the rest are B. */
2173
2174 tree
2175 build_vector_a_then_b (tree vec_type, unsigned int num_a, tree a, tree b)
2176 {
2177 gcc_assert (known_le (num_a, TYPE_VECTOR_SUBPARTS (vec_type)));
2178 unsigned int count = constant_lower_bound (TYPE_VECTOR_SUBPARTS (vec_type));
2179 /* Optimize the constant case. */
2180 if ((count & 1) == 0 && TYPE_VECTOR_SUBPARTS (vec_type).is_constant ())
2181 count /= 2;
2182 tree_vector_builder builder (vec_type, count, 2);
2183 for (unsigned int i = 0; i < count * 2; ++i)
2184 builder.quick_push (i < num_a ? a : b);
2185 return builder.build ();
2186 }
2187
2188 /* Something has messed with the elements of CONSTRUCTOR C after it was built;
2189 calculate TREE_CONSTANT and TREE_SIDE_EFFECTS. */
2190
2191 void
2192 recompute_constructor_flags (tree c)
2193 {
2194 unsigned int i;
2195 tree val;
2196 bool constant_p = true;
2197 bool side_effects_p = false;
2198 vec<constructor_elt, va_gc> *vals = CONSTRUCTOR_ELTS (c);
2199
2200 FOR_EACH_CONSTRUCTOR_VALUE (vals, i, val)
2201 {
2202 /* Mostly ctors will have elts that don't have side-effects, so
2203 the usual case is to scan all the elements. Hence a single
2204 loop for both const and side effects, rather than one loop
2205 each (with early outs). */
2206 if (!TREE_CONSTANT (val))
2207 constant_p = false;
2208 if (TREE_SIDE_EFFECTS (val))
2209 side_effects_p = true;
2210 }
2211
2212 TREE_SIDE_EFFECTS (c) = side_effects_p;
2213 TREE_CONSTANT (c) = constant_p;
2214 }
2215
2216 /* Make sure that TREE_CONSTANT and TREE_SIDE_EFFECTS are correct for
2217 CONSTRUCTOR C. */
2218
2219 void
2220 verify_constructor_flags (tree c)
2221 {
2222 unsigned int i;
2223 tree val;
2224 bool constant_p = TREE_CONSTANT (c);
2225 bool side_effects_p = TREE_SIDE_EFFECTS (c);
2226 vec<constructor_elt, va_gc> *vals = CONSTRUCTOR_ELTS (c);
2227
2228 FOR_EACH_CONSTRUCTOR_VALUE (vals, i, val)
2229 {
2230 if (constant_p && !TREE_CONSTANT (val))
2231 internal_error ("non-constant element in constant CONSTRUCTOR");
2232 if (!side_effects_p && TREE_SIDE_EFFECTS (val))
2233 internal_error ("side-effects element in no-side-effects CONSTRUCTOR");
2234 }
2235 }
2236
2237 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
2238 are in the vec pointed to by VALS. */
2239 tree
2240 build_constructor (tree type, vec<constructor_elt, va_gc> *vals MEM_STAT_DECL)
2241 {
2242 tree c = make_node (CONSTRUCTOR PASS_MEM_STAT);
2243
2244 TREE_TYPE (c) = type;
2245 CONSTRUCTOR_ELTS (c) = vals;
2246
2247 recompute_constructor_flags (c);
2248
2249 return c;
2250 }
2251
2252 /* Build a CONSTRUCTOR node made of a single initializer, with the specified
2253 INDEX and VALUE. */
2254 tree
2255 build_constructor_single (tree type, tree index, tree value)
2256 {
2257 vec<constructor_elt, va_gc> *v;
2258 constructor_elt elt = {index, value};
2259
2260 vec_alloc (v, 1);
2261 v->quick_push (elt);
2262
2263 return build_constructor (type, v);
2264 }
2265
2266
2267 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
2268 are in a list pointed to by VALS. */
2269 tree
2270 build_constructor_from_list (tree type, tree vals)
2271 {
2272 tree t;
2273 vec<constructor_elt, va_gc> *v = NULL;
2274
2275 if (vals)
2276 {
2277 vec_alloc (v, list_length (vals));
2278 for (t = vals; t; t = TREE_CHAIN (t))
2279 CONSTRUCTOR_APPEND_ELT (v, TREE_PURPOSE (t), TREE_VALUE (t));
2280 }
2281
2282 return build_constructor (type, v);
2283 }
2284
2285 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
2286 are in a vector pointed to by VALS. Note that the TREE_PURPOSE
2287 fields in the constructor remain null. */
2288
2289 tree
2290 build_constructor_from_vec (tree type, const vec<tree, va_gc> *vals)
2291 {
2292 vec<constructor_elt, va_gc> *v = NULL;
2293
2294 for (tree t : vals)
2295 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, t);
2296
2297 return build_constructor (type, v);
2298 }
2299
2300 /* Return a new CONSTRUCTOR node whose type is TYPE. NELTS is the number
2301 of elements, provided as index/value pairs. */
2302
2303 tree
2304 build_constructor_va (tree type, int nelts, ...)
2305 {
2306 vec<constructor_elt, va_gc> *v = NULL;
2307 va_list p;
2308
2309 va_start (p, nelts);
2310 vec_alloc (v, nelts);
2311 while (nelts--)
2312 {
2313 tree index = va_arg (p, tree);
2314 tree value = va_arg (p, tree);
2315 CONSTRUCTOR_APPEND_ELT (v, index, value);
2316 }
2317 va_end (p);
2318 return build_constructor (type, v);
2319 }
2320
2321 /* Return a node of type TYPE for which TREE_CLOBBER_P is true. */
2322
2323 tree
2324 build_clobber (tree type)
2325 {
2326 tree clobber = build_constructor (type, NULL);
2327 TREE_THIS_VOLATILE (clobber) = true;
2328 return clobber;
2329 }
2330
2331 /* Return a new FIXED_CST node whose type is TYPE and value is F. */
2332
2333 tree
2334 build_fixed (tree type, FIXED_VALUE_TYPE f)
2335 {
2336 tree v;
2337 FIXED_VALUE_TYPE *fp;
2338
2339 v = make_node (FIXED_CST);
2340 fp = ggc_alloc<fixed_value> ();
2341 memcpy (fp, &f, sizeof (FIXED_VALUE_TYPE));
2342
2343 TREE_TYPE (v) = type;
2344 TREE_FIXED_CST_PTR (v) = fp;
2345 return v;
2346 }
2347
2348 /* Return a new REAL_CST node whose type is TYPE and value is D. */
2349
2350 tree
2351 build_real (tree type, REAL_VALUE_TYPE d)
2352 {
2353 tree v;
2354 REAL_VALUE_TYPE *dp;
2355 int overflow = 0;
2356
2357 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
2358 Consider doing it via real_convert now. */
2359
2360 v = make_node (REAL_CST);
2361 dp = ggc_alloc<real_value> ();
2362 memcpy (dp, &d, sizeof (REAL_VALUE_TYPE));
2363
2364 TREE_TYPE (v) = type;
2365 TREE_REAL_CST_PTR (v) = dp;
2366 TREE_OVERFLOW (v) = overflow;
2367 return v;
2368 }
2369
2370 /* Like build_real, but first truncate D to the type. */
2371
2372 tree
2373 build_real_truncate (tree type, REAL_VALUE_TYPE d)
2374 {
2375 return build_real (type, real_value_truncate (TYPE_MODE (type), d));
2376 }
2377
2378 /* Return a new REAL_CST node whose type is TYPE
2379 and whose value is the integer value of the INTEGER_CST node I. */
2380
2381 REAL_VALUE_TYPE
2382 real_value_from_int_cst (const_tree type, const_tree i)
2383 {
2384 REAL_VALUE_TYPE d;
2385
2386 /* Clear all bits of the real value type so that we can later do
2387 bitwise comparisons to see if two values are the same. */
2388 memset (&d, 0, sizeof d);
2389
2390 real_from_integer (&d, type ? TYPE_MODE (type) : VOIDmode, wi::to_wide (i),
2391 TYPE_SIGN (TREE_TYPE (i)));
2392 return d;
2393 }
2394
2395 /* Given a tree representing an integer constant I, return a tree
2396 representing the same value as a floating-point constant of type TYPE. */
2397
2398 tree
2399 build_real_from_int_cst (tree type, const_tree i)
2400 {
2401 tree v;
2402 int overflow = TREE_OVERFLOW (i);
2403
2404 v = build_real (type, real_value_from_int_cst (type, i));
2405
2406 TREE_OVERFLOW (v) |= overflow;
2407 return v;
2408 }
2409
2410 /* Return a new REAL_CST node whose type is TYPE
2411 and whose value is the integer value I which has sign SGN. */
2412
2413 tree
2414 build_real_from_wide (tree type, const wide_int_ref &i, signop sgn)
2415 {
2416 REAL_VALUE_TYPE d;
2417
2418 /* Clear all bits of the real value type so that we can later do
2419 bitwise comparisons to see if two values are the same. */
2420 memset (&d, 0, sizeof d);
2421
2422 real_from_integer (&d, TYPE_MODE (type), i, sgn);
2423 return build_real (type, d);
2424 }
2425
2426 /* Return a newly constructed STRING_CST node whose value is the LEN
2427 characters at STR when STR is nonnull, or all zeros otherwise.
2428 Note that for a C string literal, LEN should include the trailing NUL.
2429 The TREE_TYPE is not initialized. */
2430
2431 tree
2432 build_string (unsigned len, const char *str /*= NULL */)
2433 {
2434 /* Do not waste bytes provided by padding of struct tree_string. */
2435 unsigned size = len + offsetof (struct tree_string, str) + 1;
2436
2437 record_node_allocation_statistics (STRING_CST, size);
2438
2439 tree s = (tree) ggc_internal_alloc (size);
2440
2441 memset (s, 0, sizeof (struct tree_typed));
2442 TREE_SET_CODE (s, STRING_CST);
2443 TREE_CONSTANT (s) = 1;
2444 TREE_STRING_LENGTH (s) = len;
2445 if (str)
2446 memcpy (s->string.str, str, len);
2447 else
2448 memset (s->string.str, 0, len);
2449 s->string.str[len] = '\0';
2450
2451 return s;
2452 }
2453
2454 /* Return a newly constructed COMPLEX_CST node whose value is
2455 specified by the real and imaginary parts REAL and IMAG.
2456 Both REAL and IMAG should be constant nodes. TYPE, if specified,
2457 will be the type of the COMPLEX_CST; otherwise a new type will be made. */
2458
2459 tree
2460 build_complex (tree type, tree real, tree imag)
2461 {
2462 gcc_assert (CONSTANT_CLASS_P (real));
2463 gcc_assert (CONSTANT_CLASS_P (imag));
2464
2465 tree t = make_node (COMPLEX_CST);
2466
2467 TREE_REALPART (t) = real;
2468 TREE_IMAGPART (t) = imag;
2469 TREE_TYPE (t) = type ? type : build_complex_type (TREE_TYPE (real));
2470 TREE_OVERFLOW (t) = TREE_OVERFLOW (real) | TREE_OVERFLOW (imag);
2471 return t;
2472 }
2473
2474 /* Build a complex (inf +- 0i), such as for the result of cproj.
2475 TYPE is the complex tree type of the result. If NEG is true, the
2476 imaginary zero is negative. */
2477
2478 tree
2479 build_complex_inf (tree type, bool neg)
2480 {
2481 REAL_VALUE_TYPE rinf, rzero = dconst0;
2482
2483 real_inf (&rinf);
2484 rzero.sign = neg;
2485 return build_complex (type, build_real (TREE_TYPE (type), rinf),
2486 build_real (TREE_TYPE (type), rzero));
2487 }
2488
2489 /* Return the constant 1 in type TYPE. If TYPE has several elements, each
2490 element is set to 1. In particular, this is 1 + i for complex types. */
2491
2492 tree
2493 build_each_one_cst (tree type)
2494 {
2495 if (TREE_CODE (type) == COMPLEX_TYPE)
2496 {
2497 tree scalar = build_one_cst (TREE_TYPE (type));
2498 return build_complex (type, scalar, scalar);
2499 }
2500 else
2501 return build_one_cst (type);
2502 }
2503
2504 /* Return a constant of arithmetic type TYPE which is the
2505 multiplicative identity of the set TYPE. */
2506
2507 tree
2508 build_one_cst (tree type)
2509 {
2510 switch (TREE_CODE (type))
2511 {
2512 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2513 case POINTER_TYPE: case REFERENCE_TYPE:
2514 case OFFSET_TYPE:
2515 return build_int_cst (type, 1);
2516
2517 case REAL_TYPE:
2518 return build_real (type, dconst1);
2519
2520 case FIXED_POINT_TYPE:
2521 /* We can only generate 1 for accum types. */
2522 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2523 return build_fixed (type, FCONST1 (TYPE_MODE (type)));
2524
2525 case VECTOR_TYPE:
2526 {
2527 tree scalar = build_one_cst (TREE_TYPE (type));
2528
2529 return build_vector_from_val (type, scalar);
2530 }
2531
2532 case COMPLEX_TYPE:
2533 return build_complex (type,
2534 build_one_cst (TREE_TYPE (type)),
2535 build_zero_cst (TREE_TYPE (type)));
2536
2537 default:
2538 gcc_unreachable ();
2539 }
2540 }
2541
2542 /* Return an integer of type TYPE containing all 1's in as much precision as
2543 it contains, or a complex or vector whose subparts are such integers. */
2544
2545 tree
2546 build_all_ones_cst (tree type)
2547 {
2548 if (TREE_CODE (type) == COMPLEX_TYPE)
2549 {
2550 tree scalar = build_all_ones_cst (TREE_TYPE (type));
2551 return build_complex (type, scalar, scalar);
2552 }
2553 else
2554 return build_minus_one_cst (type);
2555 }
2556
2557 /* Return a constant of arithmetic type TYPE which is the
2558 opposite of the multiplicative identity of the set TYPE. */
2559
2560 tree
2561 build_minus_one_cst (tree type)
2562 {
2563 switch (TREE_CODE (type))
2564 {
2565 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2566 case POINTER_TYPE: case REFERENCE_TYPE:
2567 case OFFSET_TYPE:
2568 return build_int_cst (type, -1);
2569
2570 case REAL_TYPE:
2571 return build_real (type, dconstm1);
2572
2573 case FIXED_POINT_TYPE:
2574 /* We can only generate 1 for accum types. */
2575 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2576 return build_fixed (type,
2577 fixed_from_double_int (double_int_minus_one,
2578 SCALAR_TYPE_MODE (type)));
2579
2580 case VECTOR_TYPE:
2581 {
2582 tree scalar = build_minus_one_cst (TREE_TYPE (type));
2583
2584 return build_vector_from_val (type, scalar);
2585 }
2586
2587 case COMPLEX_TYPE:
2588 return build_complex (type,
2589 build_minus_one_cst (TREE_TYPE (type)),
2590 build_zero_cst (TREE_TYPE (type)));
2591
2592 default:
2593 gcc_unreachable ();
2594 }
2595 }
2596
2597 /* Build 0 constant of type TYPE. This is used by constructor folding
2598 and thus the constant should be represented in memory by
2599 zero(es). */
2600
2601 tree
2602 build_zero_cst (tree type)
2603 {
2604 switch (TREE_CODE (type))
2605 {
2606 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2607 case POINTER_TYPE: case REFERENCE_TYPE:
2608 case OFFSET_TYPE: case NULLPTR_TYPE:
2609 return build_int_cst (type, 0);
2610
2611 case REAL_TYPE:
2612 return build_real (type, dconst0);
2613
2614 case FIXED_POINT_TYPE:
2615 return build_fixed (type, FCONST0 (TYPE_MODE (type)));
2616
2617 case VECTOR_TYPE:
2618 {
2619 tree scalar = build_zero_cst (TREE_TYPE (type));
2620
2621 return build_vector_from_val (type, scalar);
2622 }
2623
2624 case COMPLEX_TYPE:
2625 {
2626 tree zero = build_zero_cst (TREE_TYPE (type));
2627
2628 return build_complex (type, zero, zero);
2629 }
2630
2631 default:
2632 if (!AGGREGATE_TYPE_P (type))
2633 return fold_convert (type, integer_zero_node);
2634 return build_constructor (type, NULL);
2635 }
2636 }
2637
2638
2639 /* Build a BINFO with LEN language slots. */
2640
2641 tree
2642 make_tree_binfo (unsigned base_binfos MEM_STAT_DECL)
2643 {
2644 tree t;
2645 size_t length = (offsetof (struct tree_binfo, base_binfos)
2646 + vec<tree, va_gc>::embedded_size (base_binfos));
2647
2648 record_node_allocation_statistics (TREE_BINFO, length);
2649
2650 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
2651
2652 memset (t, 0, offsetof (struct tree_binfo, base_binfos));
2653
2654 TREE_SET_CODE (t, TREE_BINFO);
2655
2656 BINFO_BASE_BINFOS (t)->embedded_init (base_binfos);
2657
2658 return t;
2659 }
2660
2661 /* Create a CASE_LABEL_EXPR tree node and return it. */
2662
2663 tree
2664 build_case_label (tree low_value, tree high_value, tree label_decl)
2665 {
2666 tree t = make_node (CASE_LABEL_EXPR);
2667
2668 TREE_TYPE (t) = void_type_node;
2669 SET_EXPR_LOCATION (t, DECL_SOURCE_LOCATION (label_decl));
2670
2671 CASE_LOW (t) = low_value;
2672 CASE_HIGH (t) = high_value;
2673 CASE_LABEL (t) = label_decl;
2674 CASE_CHAIN (t) = NULL_TREE;
2675
2676 return t;
2677 }
2678
2679 /* Build a newly constructed INTEGER_CST node. LEN and EXT_LEN are the
2680 values of TREE_INT_CST_NUNITS and TREE_INT_CST_EXT_NUNITS respectively.
2681 The latter determines the length of the HOST_WIDE_INT vector. */
2682
2683 tree
2684 make_int_cst (int len, int ext_len MEM_STAT_DECL)
2685 {
2686 tree t;
2687 int length = ((ext_len - 1) * sizeof (HOST_WIDE_INT)
2688 + sizeof (struct tree_int_cst));
2689
2690 gcc_assert (len);
2691 record_node_allocation_statistics (INTEGER_CST, length);
2692
2693 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2694
2695 TREE_SET_CODE (t, INTEGER_CST);
2696 TREE_INT_CST_NUNITS (t) = len;
2697 TREE_INT_CST_EXT_NUNITS (t) = ext_len;
2698 /* to_offset can only be applied to trees that are offset_int-sized
2699 or smaller. EXT_LEN is correct if it fits, otherwise the constant
2700 must be exactly the precision of offset_int and so LEN is correct. */
2701 if (ext_len <= OFFSET_INT_ELTS)
2702 TREE_INT_CST_OFFSET_NUNITS (t) = ext_len;
2703 else
2704 TREE_INT_CST_OFFSET_NUNITS (t) = len;
2705
2706 TREE_CONSTANT (t) = 1;
2707
2708 return t;
2709 }
2710
2711 /* Build a newly constructed TREE_VEC node of length LEN. */
2712
2713 tree
2714 make_tree_vec (int len MEM_STAT_DECL)
2715 {
2716 tree t;
2717 size_t length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2718
2719 record_node_allocation_statistics (TREE_VEC, length);
2720
2721 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2722
2723 TREE_SET_CODE (t, TREE_VEC);
2724 TREE_VEC_LENGTH (t) = len;
2725
2726 return t;
2727 }
2728
2729 /* Grow a TREE_VEC node to new length LEN. */
2730
2731 tree
2732 grow_tree_vec (tree v, int len MEM_STAT_DECL)
2733 {
2734 gcc_assert (TREE_CODE (v) == TREE_VEC);
2735
2736 int oldlen = TREE_VEC_LENGTH (v);
2737 gcc_assert (len > oldlen);
2738
2739 size_t oldlength = (oldlen - 1) * sizeof (tree) + sizeof (struct tree_vec);
2740 size_t length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2741
2742 record_node_allocation_statistics (TREE_VEC, length - oldlength);
2743
2744 v = (tree) ggc_realloc (v, length PASS_MEM_STAT);
2745
2746 TREE_VEC_LENGTH (v) = len;
2747
2748 return v;
2749 }
2750 \f
2751 /* Return 1 if EXPR is the constant zero, whether it is integral, float or
2752 fixed, and scalar, complex or vector. */
2753
2754 bool
2755 zerop (const_tree expr)
2756 {
2757 return (integer_zerop (expr)
2758 || real_zerop (expr)
2759 || fixed_zerop (expr));
2760 }
2761
2762 /* Return 1 if EXPR is the integer constant zero or a complex constant
2763 of zero, or a location wrapper for such a constant. */
2764
2765 bool
2766 integer_zerop (const_tree expr)
2767 {
2768 STRIP_ANY_LOCATION_WRAPPER (expr);
2769
2770 switch (TREE_CODE (expr))
2771 {
2772 case INTEGER_CST:
2773 return wi::to_wide (expr) == 0;
2774 case COMPLEX_CST:
2775 return (integer_zerop (TREE_REALPART (expr))
2776 && integer_zerop (TREE_IMAGPART (expr)));
2777 case VECTOR_CST:
2778 return (VECTOR_CST_NPATTERNS (expr) == 1
2779 && VECTOR_CST_DUPLICATE_P (expr)
2780 && integer_zerop (VECTOR_CST_ENCODED_ELT (expr, 0)));
2781 default:
2782 return false;
2783 }
2784 }
2785
2786 /* Return 1 if EXPR is the integer constant one or the corresponding
2787 complex constant, or a location wrapper for such a constant. */
2788
2789 bool
2790 integer_onep (const_tree expr)
2791 {
2792 STRIP_ANY_LOCATION_WRAPPER (expr);
2793
2794 switch (TREE_CODE (expr))
2795 {
2796 case INTEGER_CST:
2797 return wi::eq_p (wi::to_widest (expr), 1);
2798 case COMPLEX_CST:
2799 return (integer_onep (TREE_REALPART (expr))
2800 && integer_zerop (TREE_IMAGPART (expr)));
2801 case VECTOR_CST:
2802 return (VECTOR_CST_NPATTERNS (expr) == 1
2803 && VECTOR_CST_DUPLICATE_P (expr)
2804 && integer_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
2805 default:
2806 return false;
2807 }
2808 }
2809
2810 /* Return 1 if EXPR is the integer constant one. For complex and vector,
2811 return 1 if every piece is the integer constant one.
2812 Also return 1 for location wrappers for such a constant. */
2813
2814 bool
2815 integer_each_onep (const_tree expr)
2816 {
2817 STRIP_ANY_LOCATION_WRAPPER (expr);
2818
2819 if (TREE_CODE (expr) == COMPLEX_CST)
2820 return (integer_onep (TREE_REALPART (expr))
2821 && integer_onep (TREE_IMAGPART (expr)));
2822 else
2823 return integer_onep (expr);
2824 }
2825
2826 /* Return 1 if EXPR is an integer containing all 1's in as much precision as
2827 it contains, or a complex or vector whose subparts are such integers,
2828 or a location wrapper for such a constant. */
2829
2830 bool
2831 integer_all_onesp (const_tree expr)
2832 {
2833 STRIP_ANY_LOCATION_WRAPPER (expr);
2834
2835 if (TREE_CODE (expr) == COMPLEX_CST
2836 && integer_all_onesp (TREE_REALPART (expr))
2837 && integer_all_onesp (TREE_IMAGPART (expr)))
2838 return true;
2839
2840 else if (TREE_CODE (expr) == VECTOR_CST)
2841 return (VECTOR_CST_NPATTERNS (expr) == 1
2842 && VECTOR_CST_DUPLICATE_P (expr)
2843 && integer_all_onesp (VECTOR_CST_ENCODED_ELT (expr, 0)));
2844
2845 else if (TREE_CODE (expr) != INTEGER_CST)
2846 return false;
2847
2848 return (wi::max_value (TYPE_PRECISION (TREE_TYPE (expr)), UNSIGNED)
2849 == wi::to_wide (expr));
2850 }
2851
2852 /* Return 1 if EXPR is the integer constant minus one, or a location wrapper
2853 for such a constant. */
2854
2855 bool
2856 integer_minus_onep (const_tree expr)
2857 {
2858 STRIP_ANY_LOCATION_WRAPPER (expr);
2859
2860 if (TREE_CODE (expr) == COMPLEX_CST)
2861 return (integer_all_onesp (TREE_REALPART (expr))
2862 && integer_zerop (TREE_IMAGPART (expr)));
2863 else
2864 return integer_all_onesp (expr);
2865 }
2866
2867 /* Return 1 if EXPR is an integer constant that is a power of 2 (i.e., has only
2868 one bit on), or a location wrapper for such a constant. */
2869
2870 bool
2871 integer_pow2p (const_tree expr)
2872 {
2873 STRIP_ANY_LOCATION_WRAPPER (expr);
2874
2875 if (TREE_CODE (expr) == COMPLEX_CST
2876 && integer_pow2p (TREE_REALPART (expr))
2877 && integer_zerop (TREE_IMAGPART (expr)))
2878 return true;
2879
2880 if (TREE_CODE (expr) != INTEGER_CST)
2881 return false;
2882
2883 return wi::popcount (wi::to_wide (expr)) == 1;
2884 }
2885
2886 /* Return 1 if EXPR is an integer constant other than zero or a
2887 complex constant other than zero, or a location wrapper for such a
2888 constant. */
2889
2890 bool
2891 integer_nonzerop (const_tree expr)
2892 {
2893 STRIP_ANY_LOCATION_WRAPPER (expr);
2894
2895 return ((TREE_CODE (expr) == INTEGER_CST
2896 && wi::to_wide (expr) != 0)
2897 || (TREE_CODE (expr) == COMPLEX_CST
2898 && (integer_nonzerop (TREE_REALPART (expr))
2899 || integer_nonzerop (TREE_IMAGPART (expr)))));
2900 }
2901
2902 /* Return 1 if EXPR is the integer constant one. For vector,
2903 return 1 if every piece is the integer constant minus one
2904 (representing the value TRUE).
2905 Also return 1 for location wrappers for such a constant. */
2906
2907 bool
2908 integer_truep (const_tree expr)
2909 {
2910 STRIP_ANY_LOCATION_WRAPPER (expr);
2911
2912 if (TREE_CODE (expr) == VECTOR_CST)
2913 return integer_all_onesp (expr);
2914 return integer_onep (expr);
2915 }
2916
2917 /* Return 1 if EXPR is the fixed-point constant zero, or a location wrapper
2918 for such a constant. */
2919
2920 bool
2921 fixed_zerop (const_tree expr)
2922 {
2923 STRIP_ANY_LOCATION_WRAPPER (expr);
2924
2925 return (TREE_CODE (expr) == FIXED_CST
2926 && TREE_FIXED_CST (expr).data.is_zero ());
2927 }
2928
2929 /* Return the power of two represented by a tree node known to be a
2930 power of two. */
2931
2932 int
2933 tree_log2 (const_tree expr)
2934 {
2935 if (TREE_CODE (expr) == COMPLEX_CST)
2936 return tree_log2 (TREE_REALPART (expr));
2937
2938 return wi::exact_log2 (wi::to_wide (expr));
2939 }
2940
2941 /* Similar, but return the largest integer Y such that 2 ** Y is less
2942 than or equal to EXPR. */
2943
2944 int
2945 tree_floor_log2 (const_tree expr)
2946 {
2947 if (TREE_CODE (expr) == COMPLEX_CST)
2948 return tree_log2 (TREE_REALPART (expr));
2949
2950 return wi::floor_log2 (wi::to_wide (expr));
2951 }
2952
2953 /* Return number of known trailing zero bits in EXPR, or, if the value of
2954 EXPR is known to be zero, the precision of it's type. */
2955
2956 unsigned int
2957 tree_ctz (const_tree expr)
2958 {
2959 if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
2960 && !POINTER_TYPE_P (TREE_TYPE (expr)))
2961 return 0;
2962
2963 unsigned int ret1, ret2, prec = TYPE_PRECISION (TREE_TYPE (expr));
2964 switch (TREE_CODE (expr))
2965 {
2966 case INTEGER_CST:
2967 ret1 = wi::ctz (wi::to_wide (expr));
2968 return MIN (ret1, prec);
2969 case SSA_NAME:
2970 ret1 = wi::ctz (get_nonzero_bits (expr));
2971 return MIN (ret1, prec);
2972 case PLUS_EXPR:
2973 case MINUS_EXPR:
2974 case BIT_IOR_EXPR:
2975 case BIT_XOR_EXPR:
2976 case MIN_EXPR:
2977 case MAX_EXPR:
2978 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2979 if (ret1 == 0)
2980 return ret1;
2981 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2982 return MIN (ret1, ret2);
2983 case POINTER_PLUS_EXPR:
2984 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2985 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2986 /* Second operand is sizetype, which could be in theory
2987 wider than pointer's precision. Make sure we never
2988 return more than prec. */
2989 ret2 = MIN (ret2, prec);
2990 return MIN (ret1, ret2);
2991 case BIT_AND_EXPR:
2992 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2993 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2994 return MAX (ret1, ret2);
2995 case MULT_EXPR:
2996 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2997 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2998 return MIN (ret1 + ret2, prec);
2999 case LSHIFT_EXPR:
3000 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3001 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
3002 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
3003 {
3004 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
3005 return MIN (ret1 + ret2, prec);
3006 }
3007 return ret1;
3008 case RSHIFT_EXPR:
3009 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
3010 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
3011 {
3012 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3013 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
3014 if (ret1 > ret2)
3015 return ret1 - ret2;
3016 }
3017 return 0;
3018 case TRUNC_DIV_EXPR:
3019 case CEIL_DIV_EXPR:
3020 case FLOOR_DIV_EXPR:
3021 case ROUND_DIV_EXPR:
3022 case EXACT_DIV_EXPR:
3023 if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
3024 && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) == 1)
3025 {
3026 int l = tree_log2 (TREE_OPERAND (expr, 1));
3027 if (l >= 0)
3028 {
3029 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3030 ret2 = l;
3031 if (ret1 > ret2)
3032 return ret1 - ret2;
3033 }
3034 }
3035 return 0;
3036 CASE_CONVERT:
3037 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3038 if (ret1 && ret1 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
3039 ret1 = prec;
3040 return MIN (ret1, prec);
3041 case SAVE_EXPR:
3042 return tree_ctz (TREE_OPERAND (expr, 0));
3043 case COND_EXPR:
3044 ret1 = tree_ctz (TREE_OPERAND (expr, 1));
3045 if (ret1 == 0)
3046 return 0;
3047 ret2 = tree_ctz (TREE_OPERAND (expr, 2));
3048 return MIN (ret1, ret2);
3049 case COMPOUND_EXPR:
3050 return tree_ctz (TREE_OPERAND (expr, 1));
3051 case ADDR_EXPR:
3052 ret1 = get_pointer_alignment (CONST_CAST_TREE (expr));
3053 if (ret1 > BITS_PER_UNIT)
3054 {
3055 ret1 = ctz_hwi (ret1 / BITS_PER_UNIT);
3056 return MIN (ret1, prec);
3057 }
3058 return 0;
3059 default:
3060 return 0;
3061 }
3062 }
3063
3064 /* Return 1 if EXPR is the real constant zero. Trailing zeroes matter for
3065 decimal float constants, so don't return 1 for them.
3066 Also return 1 for location wrappers around such a constant. */
3067
3068 bool
3069 real_zerop (const_tree expr)
3070 {
3071 STRIP_ANY_LOCATION_WRAPPER (expr);
3072
3073 switch (TREE_CODE (expr))
3074 {
3075 case REAL_CST:
3076 return real_equal (&TREE_REAL_CST (expr), &dconst0)
3077 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
3078 case COMPLEX_CST:
3079 return real_zerop (TREE_REALPART (expr))
3080 && real_zerop (TREE_IMAGPART (expr));
3081 case VECTOR_CST:
3082 {
3083 /* Don't simply check for a duplicate because the predicate
3084 accepts both +0.0 and -0.0. */
3085 unsigned count = vector_cst_encoded_nelts (expr);
3086 for (unsigned int i = 0; i < count; ++i)
3087 if (!real_zerop (VECTOR_CST_ENCODED_ELT (expr, i)))
3088 return false;
3089 return true;
3090 }
3091 default:
3092 return false;
3093 }
3094 }
3095
3096 /* Return 1 if EXPR is the real constant one in real or complex form.
3097 Trailing zeroes matter for decimal float constants, so don't return
3098 1 for them.
3099 Also return 1 for location wrappers around such a constant. */
3100
3101 bool
3102 real_onep (const_tree expr)
3103 {
3104 STRIP_ANY_LOCATION_WRAPPER (expr);
3105
3106 switch (TREE_CODE (expr))
3107 {
3108 case REAL_CST:
3109 return real_equal (&TREE_REAL_CST (expr), &dconst1)
3110 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
3111 case COMPLEX_CST:
3112 return real_onep (TREE_REALPART (expr))
3113 && real_zerop (TREE_IMAGPART (expr));
3114 case VECTOR_CST:
3115 return (VECTOR_CST_NPATTERNS (expr) == 1
3116 && VECTOR_CST_DUPLICATE_P (expr)
3117 && real_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
3118 default:
3119 return false;
3120 }
3121 }
3122
3123 /* Return 1 if EXPR is the real constant minus one. Trailing zeroes
3124 matter for decimal float constants, so don't return 1 for them.
3125 Also return 1 for location wrappers around such a constant. */
3126
3127 bool
3128 real_minus_onep (const_tree expr)
3129 {
3130 STRIP_ANY_LOCATION_WRAPPER (expr);
3131
3132 switch (TREE_CODE (expr))
3133 {
3134 case REAL_CST:
3135 return real_equal (&TREE_REAL_CST (expr), &dconstm1)
3136 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
3137 case COMPLEX_CST:
3138 return real_minus_onep (TREE_REALPART (expr))
3139 && real_zerop (TREE_IMAGPART (expr));
3140 case VECTOR_CST:
3141 return (VECTOR_CST_NPATTERNS (expr) == 1
3142 && VECTOR_CST_DUPLICATE_P (expr)
3143 && real_minus_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
3144 default:
3145 return false;
3146 }
3147 }
3148
3149 /* Nonzero if EXP is a constant or a cast of a constant. */
3150
3151 bool
3152 really_constant_p (const_tree exp)
3153 {
3154 /* This is not quite the same as STRIP_NOPS. It does more. */
3155 while (CONVERT_EXPR_P (exp)
3156 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3157 exp = TREE_OPERAND (exp, 0);
3158 return TREE_CONSTANT (exp);
3159 }
3160
3161 /* Return true if T holds a polynomial pointer difference, storing it in
3162 *VALUE if so. A true return means that T's precision is no greater
3163 than 64 bits, which is the largest address space we support, so *VALUE
3164 never loses precision. However, the signedness of the result does
3165 not necessarily match the signedness of T: sometimes an unsigned type
3166 like sizetype is used to encode a value that is actually negative. */
3167
3168 bool
3169 ptrdiff_tree_p (const_tree t, poly_int64_pod *value)
3170 {
3171 if (!t)
3172 return false;
3173 if (TREE_CODE (t) == INTEGER_CST)
3174 {
3175 if (!cst_and_fits_in_hwi (t))
3176 return false;
3177 *value = int_cst_value (t);
3178 return true;
3179 }
3180 if (POLY_INT_CST_P (t))
3181 {
3182 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
3183 if (!cst_and_fits_in_hwi (POLY_INT_CST_COEFF (t, i)))
3184 return false;
3185 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
3186 value->coeffs[i] = int_cst_value (POLY_INT_CST_COEFF (t, i));
3187 return true;
3188 }
3189 return false;
3190 }
3191
3192 poly_int64
3193 tree_to_poly_int64 (const_tree t)
3194 {
3195 gcc_assert (tree_fits_poly_int64_p (t));
3196 if (POLY_INT_CST_P (t))
3197 return poly_int_cst_value (t).force_shwi ();
3198 return TREE_INT_CST_LOW (t);
3199 }
3200
3201 poly_uint64
3202 tree_to_poly_uint64 (const_tree t)
3203 {
3204 gcc_assert (tree_fits_poly_uint64_p (t));
3205 if (POLY_INT_CST_P (t))
3206 return poly_int_cst_value (t).force_uhwi ();
3207 return TREE_INT_CST_LOW (t);
3208 }
3209 \f
3210 /* Return first list element whose TREE_VALUE is ELEM.
3211 Return 0 if ELEM is not in LIST. */
3212
3213 tree
3214 value_member (tree elem, tree list)
3215 {
3216 while (list)
3217 {
3218 if (elem == TREE_VALUE (list))
3219 return list;
3220 list = TREE_CHAIN (list);
3221 }
3222 return NULL_TREE;
3223 }
3224
3225 /* Return first list element whose TREE_PURPOSE is ELEM.
3226 Return 0 if ELEM is not in LIST. */
3227
3228 tree
3229 purpose_member (const_tree elem, tree list)
3230 {
3231 while (list)
3232 {
3233 if (elem == TREE_PURPOSE (list))
3234 return list;
3235 list = TREE_CHAIN (list);
3236 }
3237 return NULL_TREE;
3238 }
3239
3240 /* Return true if ELEM is in V. */
3241
3242 bool
3243 vec_member (const_tree elem, vec<tree, va_gc> *v)
3244 {
3245 unsigned ix;
3246 tree t;
3247 FOR_EACH_VEC_SAFE_ELT (v, ix, t)
3248 if (elem == t)
3249 return true;
3250 return false;
3251 }
3252
3253 /* Returns element number IDX (zero-origin) of chain CHAIN, or
3254 NULL_TREE. */
3255
3256 tree
3257 chain_index (int idx, tree chain)
3258 {
3259 for (; chain && idx > 0; --idx)
3260 chain = TREE_CHAIN (chain);
3261 return chain;
3262 }
3263
3264 /* Return nonzero if ELEM is part of the chain CHAIN. */
3265
3266 bool
3267 chain_member (const_tree elem, const_tree chain)
3268 {
3269 while (chain)
3270 {
3271 if (elem == chain)
3272 return true;
3273 chain = DECL_CHAIN (chain);
3274 }
3275
3276 return false;
3277 }
3278
3279 /* Return the length of a chain of nodes chained through TREE_CHAIN.
3280 We expect a null pointer to mark the end of the chain.
3281 This is the Lisp primitive `length'. */
3282
3283 int
3284 list_length (const_tree t)
3285 {
3286 const_tree p = t;
3287 #ifdef ENABLE_TREE_CHECKING
3288 const_tree q = t;
3289 #endif
3290 int len = 0;
3291
3292 while (p)
3293 {
3294 p = TREE_CHAIN (p);
3295 #ifdef ENABLE_TREE_CHECKING
3296 if (len % 2)
3297 q = TREE_CHAIN (q);
3298 gcc_assert (p != q);
3299 #endif
3300 len++;
3301 }
3302
3303 return len;
3304 }
3305
3306 /* Returns the first FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
3307 UNION_TYPE TYPE, or NULL_TREE if none. */
3308
3309 tree
3310 first_field (const_tree type)
3311 {
3312 tree t = TYPE_FIELDS (type);
3313 while (t && TREE_CODE (t) != FIELD_DECL)
3314 t = TREE_CHAIN (t);
3315 return t;
3316 }
3317
3318 /* Returns the last FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
3319 UNION_TYPE TYPE, or NULL_TREE if none. */
3320
3321 tree
3322 last_field (const_tree type)
3323 {
3324 tree last = NULL_TREE;
3325
3326 for (tree fld = TYPE_FIELDS (type); fld; fld = TREE_CHAIN (fld))
3327 {
3328 if (TREE_CODE (fld) != FIELD_DECL)
3329 continue;
3330
3331 last = fld;
3332 }
3333
3334 return last;
3335 }
3336
3337 /* Concatenate two chains of nodes (chained through TREE_CHAIN)
3338 by modifying the last node in chain 1 to point to chain 2.
3339 This is the Lisp primitive `nconc'. */
3340
3341 tree
3342 chainon (tree op1, tree op2)
3343 {
3344 tree t1;
3345
3346 if (!op1)
3347 return op2;
3348 if (!op2)
3349 return op1;
3350
3351 for (t1 = op1; TREE_CHAIN (t1); t1 = TREE_CHAIN (t1))
3352 continue;
3353 TREE_CHAIN (t1) = op2;
3354
3355 #ifdef ENABLE_TREE_CHECKING
3356 {
3357 tree t2;
3358 for (t2 = op2; t2; t2 = TREE_CHAIN (t2))
3359 gcc_assert (t2 != t1);
3360 }
3361 #endif
3362
3363 return op1;
3364 }
3365
3366 /* Return the last node in a chain of nodes (chained through TREE_CHAIN). */
3367
3368 tree
3369 tree_last (tree chain)
3370 {
3371 tree next;
3372 if (chain)
3373 while ((next = TREE_CHAIN (chain)))
3374 chain = next;
3375 return chain;
3376 }
3377
3378 /* Reverse the order of elements in the chain T,
3379 and return the new head of the chain (old last element). */
3380
3381 tree
3382 nreverse (tree t)
3383 {
3384 tree prev = 0, decl, next;
3385 for (decl = t; decl; decl = next)
3386 {
3387 /* We shouldn't be using this function to reverse BLOCK chains; we
3388 have blocks_nreverse for that. */
3389 gcc_checking_assert (TREE_CODE (decl) != BLOCK);
3390 next = TREE_CHAIN (decl);
3391 TREE_CHAIN (decl) = prev;
3392 prev = decl;
3393 }
3394 return prev;
3395 }
3396 \f
3397 /* Return a newly created TREE_LIST node whose
3398 purpose and value fields are PARM and VALUE. */
3399
3400 tree
3401 build_tree_list (tree parm, tree value MEM_STAT_DECL)
3402 {
3403 tree t = make_node (TREE_LIST PASS_MEM_STAT);
3404 TREE_PURPOSE (t) = parm;
3405 TREE_VALUE (t) = value;
3406 return t;
3407 }
3408
3409 /* Build a chain of TREE_LIST nodes from a vector. */
3410
3411 tree
3412 build_tree_list_vec (const vec<tree, va_gc> *vec MEM_STAT_DECL)
3413 {
3414 tree ret = NULL_TREE;
3415 tree *pp = &ret;
3416 unsigned int i;
3417 tree t;
3418 FOR_EACH_VEC_SAFE_ELT (vec, i, t)
3419 {
3420 *pp = build_tree_list (NULL, t PASS_MEM_STAT);
3421 pp = &TREE_CHAIN (*pp);
3422 }
3423 return ret;
3424 }
3425
3426 /* Return a newly created TREE_LIST node whose
3427 purpose and value fields are PURPOSE and VALUE
3428 and whose TREE_CHAIN is CHAIN. */
3429
3430 tree
3431 tree_cons (tree purpose, tree value, tree chain MEM_STAT_DECL)
3432 {
3433 tree node;
3434
3435 node = ggc_alloc_tree_node_stat (sizeof (struct tree_list) PASS_MEM_STAT);
3436 memset (node, 0, sizeof (struct tree_common));
3437
3438 record_node_allocation_statistics (TREE_LIST, sizeof (struct tree_list));
3439
3440 TREE_SET_CODE (node, TREE_LIST);
3441 TREE_CHAIN (node) = chain;
3442 TREE_PURPOSE (node) = purpose;
3443 TREE_VALUE (node) = value;
3444 return node;
3445 }
3446
3447 /* Return the values of the elements of a CONSTRUCTOR as a vector of
3448 trees. */
3449
3450 vec<tree, va_gc> *
3451 ctor_to_vec (tree ctor)
3452 {
3453 vec<tree, va_gc> *vec;
3454 vec_alloc (vec, CONSTRUCTOR_NELTS (ctor));
3455 unsigned int ix;
3456 tree val;
3457
3458 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor), ix, val)
3459 vec->quick_push (val);
3460
3461 return vec;
3462 }
3463 \f
3464 /* Return the size nominally occupied by an object of type TYPE
3465 when it resides in memory. The value is measured in units of bytes,
3466 and its data type is that normally used for type sizes
3467 (which is the first type created by make_signed_type or
3468 make_unsigned_type). */
3469
3470 tree
3471 size_in_bytes_loc (location_t loc, const_tree type)
3472 {
3473 tree t;
3474
3475 if (type == error_mark_node)
3476 return integer_zero_node;
3477
3478 type = TYPE_MAIN_VARIANT (type);
3479 t = TYPE_SIZE_UNIT (type);
3480
3481 if (t == 0)
3482 {
3483 lang_hooks.types.incomplete_type_error (loc, NULL_TREE, type);
3484 return size_zero_node;
3485 }
3486
3487 return t;
3488 }
3489
3490 /* Return the size of TYPE (in bytes) as a wide integer
3491 or return -1 if the size can vary or is larger than an integer. */
3492
3493 HOST_WIDE_INT
3494 int_size_in_bytes (const_tree type)
3495 {
3496 tree t;
3497
3498 if (type == error_mark_node)
3499 return 0;
3500
3501 type = TYPE_MAIN_VARIANT (type);
3502 t = TYPE_SIZE_UNIT (type);
3503
3504 if (t && tree_fits_uhwi_p (t))
3505 return TREE_INT_CST_LOW (t);
3506 else
3507 return -1;
3508 }
3509
3510 /* Return the maximum size of TYPE (in bytes) as a wide integer
3511 or return -1 if the size can vary or is larger than an integer. */
3512
3513 HOST_WIDE_INT
3514 max_int_size_in_bytes (const_tree type)
3515 {
3516 HOST_WIDE_INT size = -1;
3517 tree size_tree;
3518
3519 /* If this is an array type, check for a possible MAX_SIZE attached. */
3520
3521 if (TREE_CODE (type) == ARRAY_TYPE)
3522 {
3523 size_tree = TYPE_ARRAY_MAX_SIZE (type);
3524
3525 if (size_tree && tree_fits_uhwi_p (size_tree))
3526 size = tree_to_uhwi (size_tree);
3527 }
3528
3529 /* If we still haven't been able to get a size, see if the language
3530 can compute a maximum size. */
3531
3532 if (size == -1)
3533 {
3534 size_tree = lang_hooks.types.max_size (type);
3535
3536 if (size_tree && tree_fits_uhwi_p (size_tree))
3537 size = tree_to_uhwi (size_tree);
3538 }
3539
3540 return size;
3541 }
3542 \f
3543 /* Return the bit position of FIELD, in bits from the start of the record.
3544 This is a tree of type bitsizetype. */
3545
3546 tree
3547 bit_position (const_tree field)
3548 {
3549 return bit_from_pos (DECL_FIELD_OFFSET (field),
3550 DECL_FIELD_BIT_OFFSET (field));
3551 }
3552 \f
3553 /* Return the byte position of FIELD, in bytes from the start of the record.
3554 This is a tree of type sizetype. */
3555
3556 tree
3557 byte_position (const_tree field)
3558 {
3559 return byte_from_pos (DECL_FIELD_OFFSET (field),
3560 DECL_FIELD_BIT_OFFSET (field));
3561 }
3562
3563 /* Likewise, but return as an integer. It must be representable in
3564 that way (since it could be a signed value, we don't have the
3565 option of returning -1 like int_size_in_byte can. */
3566
3567 HOST_WIDE_INT
3568 int_byte_position (const_tree field)
3569 {
3570 return tree_to_shwi (byte_position (field));
3571 }
3572 \f
3573 /* Return, as a tree node, the number of elements for TYPE (which is an
3574 ARRAY_TYPE) minus one. This counts only elements of the top array. */
3575
3576 tree
3577 array_type_nelts (const_tree type)
3578 {
3579 tree index_type, min, max;
3580
3581 /* If they did it with unspecified bounds, then we should have already
3582 given an error about it before we got here. */
3583 if (! TYPE_DOMAIN (type))
3584 return error_mark_node;
3585
3586 index_type = TYPE_DOMAIN (type);
3587 min = TYPE_MIN_VALUE (index_type);
3588 max = TYPE_MAX_VALUE (index_type);
3589
3590 /* TYPE_MAX_VALUE may not be set if the array has unknown length. */
3591 if (!max)
3592 {
3593 /* zero sized arrays are represented from C FE as complete types with
3594 NULL TYPE_MAX_VALUE and zero TYPE_SIZE, while C++ FE represents
3595 them as min 0, max -1. */
3596 if (COMPLETE_TYPE_P (type)
3597 && integer_zerop (TYPE_SIZE (type))
3598 && integer_zerop (min))
3599 return build_int_cst (TREE_TYPE (min), -1);
3600
3601 return error_mark_node;
3602 }
3603
3604 return (integer_zerop (min)
3605 ? max
3606 : fold_build2 (MINUS_EXPR, TREE_TYPE (max), max, min));
3607 }
3608 \f
3609 /* If arg is static -- a reference to an object in static storage -- then
3610 return the object. This is not the same as the C meaning of `static'.
3611 If arg isn't static, return NULL. */
3612
3613 tree
3614 staticp (tree arg)
3615 {
3616 switch (TREE_CODE (arg))
3617 {
3618 case FUNCTION_DECL:
3619 /* Nested functions are static, even though taking their address will
3620 involve a trampoline as we unnest the nested function and create
3621 the trampoline on the tree level. */
3622 return arg;
3623
3624 case VAR_DECL:
3625 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3626 && ! DECL_THREAD_LOCAL_P (arg)
3627 && ! DECL_DLLIMPORT_P (arg)
3628 ? arg : NULL);
3629
3630 case CONST_DECL:
3631 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3632 ? arg : NULL);
3633
3634 case CONSTRUCTOR:
3635 return TREE_STATIC (arg) ? arg : NULL;
3636
3637 case LABEL_DECL:
3638 case STRING_CST:
3639 return arg;
3640
3641 case COMPONENT_REF:
3642 /* If the thing being referenced is not a field, then it is
3643 something language specific. */
3644 gcc_assert (TREE_CODE (TREE_OPERAND (arg, 1)) == FIELD_DECL);
3645
3646 /* If we are referencing a bitfield, we can't evaluate an
3647 ADDR_EXPR at compile time and so it isn't a constant. */
3648 if (DECL_BIT_FIELD (TREE_OPERAND (arg, 1)))
3649 return NULL;
3650
3651 return staticp (TREE_OPERAND (arg, 0));
3652
3653 case BIT_FIELD_REF:
3654 return NULL;
3655
3656 case INDIRECT_REF:
3657 return TREE_CONSTANT (TREE_OPERAND (arg, 0)) ? arg : NULL;
3658
3659 case ARRAY_REF:
3660 case ARRAY_RANGE_REF:
3661 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (arg))) == INTEGER_CST
3662 && TREE_CODE (TREE_OPERAND (arg, 1)) == INTEGER_CST)
3663 return staticp (TREE_OPERAND (arg, 0));
3664 else
3665 return NULL;
3666
3667 case COMPOUND_LITERAL_EXPR:
3668 return TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (arg)) ? arg : NULL;
3669
3670 default:
3671 return NULL;
3672 }
3673 }
3674
3675 \f
3676
3677
3678 /* Return whether OP is a DECL whose address is function-invariant. */
3679
3680 bool
3681 decl_address_invariant_p (const_tree op)
3682 {
3683 /* The conditions below are slightly less strict than the one in
3684 staticp. */
3685
3686 switch (TREE_CODE (op))
3687 {
3688 case PARM_DECL:
3689 case RESULT_DECL:
3690 case LABEL_DECL:
3691 case FUNCTION_DECL:
3692 return true;
3693
3694 case VAR_DECL:
3695 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3696 || DECL_THREAD_LOCAL_P (op)
3697 || DECL_CONTEXT (op) == current_function_decl
3698 || decl_function_context (op) == current_function_decl)
3699 return true;
3700 break;
3701
3702 case CONST_DECL:
3703 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3704 || decl_function_context (op) == current_function_decl)
3705 return true;
3706 break;
3707
3708 default:
3709 break;
3710 }
3711
3712 return false;
3713 }
3714
3715 /* Return whether OP is a DECL whose address is interprocedural-invariant. */
3716
3717 bool
3718 decl_address_ip_invariant_p (const_tree op)
3719 {
3720 /* The conditions below are slightly less strict than the one in
3721 staticp. */
3722
3723 switch (TREE_CODE (op))
3724 {
3725 case LABEL_DECL:
3726 case FUNCTION_DECL:
3727 case STRING_CST:
3728 return true;
3729
3730 case VAR_DECL:
3731 if (((TREE_STATIC (op) || DECL_EXTERNAL (op))
3732 && !DECL_DLLIMPORT_P (op))
3733 || DECL_THREAD_LOCAL_P (op))
3734 return true;
3735 break;
3736
3737 case CONST_DECL:
3738 if ((TREE_STATIC (op) || DECL_EXTERNAL (op)))
3739 return true;
3740 break;
3741
3742 default:
3743 break;
3744 }
3745
3746 return false;
3747 }
3748
3749
3750 /* Return true if T is function-invariant (internal function, does
3751 not handle arithmetic; that's handled in skip_simple_arithmetic and
3752 tree_invariant_p). */
3753
3754 static bool
3755 tree_invariant_p_1 (tree t)
3756 {
3757 tree op;
3758
3759 if (TREE_CONSTANT (t)
3760 || (TREE_READONLY (t) && !TREE_SIDE_EFFECTS (t)))
3761 return true;
3762
3763 switch (TREE_CODE (t))
3764 {
3765 case SAVE_EXPR:
3766 return true;
3767
3768 case ADDR_EXPR:
3769 op = TREE_OPERAND (t, 0);
3770 while (handled_component_p (op))
3771 {
3772 switch (TREE_CODE (op))
3773 {
3774 case ARRAY_REF:
3775 case ARRAY_RANGE_REF:
3776 if (!tree_invariant_p (TREE_OPERAND (op, 1))
3777 || TREE_OPERAND (op, 2) != NULL_TREE
3778 || TREE_OPERAND (op, 3) != NULL_TREE)
3779 return false;
3780 break;
3781
3782 case COMPONENT_REF:
3783 if (TREE_OPERAND (op, 2) != NULL_TREE)
3784 return false;
3785 break;
3786
3787 default:;
3788 }
3789 op = TREE_OPERAND (op, 0);
3790 }
3791
3792 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
3793
3794 default:
3795 break;
3796 }
3797
3798 return false;
3799 }
3800
3801 /* Return true if T is function-invariant. */
3802
3803 bool
3804 tree_invariant_p (tree t)
3805 {
3806 tree inner = skip_simple_arithmetic (t);
3807 return tree_invariant_p_1 (inner);
3808 }
3809
3810 /* Wrap a SAVE_EXPR around EXPR, if appropriate.
3811 Do this to any expression which may be used in more than one place,
3812 but must be evaluated only once.
3813
3814 Normally, expand_expr would reevaluate the expression each time.
3815 Calling save_expr produces something that is evaluated and recorded
3816 the first time expand_expr is called on it. Subsequent calls to
3817 expand_expr just reuse the recorded value.
3818
3819 The call to expand_expr that generates code that actually computes
3820 the value is the first call *at compile time*. Subsequent calls
3821 *at compile time* generate code to use the saved value.
3822 This produces correct result provided that *at run time* control
3823 always flows through the insns made by the first expand_expr
3824 before reaching the other places where the save_expr was evaluated.
3825 You, the caller of save_expr, must make sure this is so.
3826
3827 Constants, and certain read-only nodes, are returned with no
3828 SAVE_EXPR because that is safe. Expressions containing placeholders
3829 are not touched; see tree.def for an explanation of what these
3830 are used for. */
3831
3832 tree
3833 save_expr (tree expr)
3834 {
3835 tree inner;
3836
3837 /* If the tree evaluates to a constant, then we don't want to hide that
3838 fact (i.e. this allows further folding, and direct checks for constants).
3839 However, a read-only object that has side effects cannot be bypassed.
3840 Since it is no problem to reevaluate literals, we just return the
3841 literal node. */
3842 inner = skip_simple_arithmetic (expr);
3843 if (TREE_CODE (inner) == ERROR_MARK)
3844 return inner;
3845
3846 if (tree_invariant_p_1 (inner))
3847 return expr;
3848
3849 /* If INNER contains a PLACEHOLDER_EXPR, we must evaluate it each time, since
3850 it means that the size or offset of some field of an object depends on
3851 the value within another field.
3852
3853 Note that it must not be the case that EXPR contains both a PLACEHOLDER_EXPR
3854 and some variable since it would then need to be both evaluated once and
3855 evaluated more than once. Front-ends must assure this case cannot
3856 happen by surrounding any such subexpressions in their own SAVE_EXPR
3857 and forcing evaluation at the proper time. */
3858 if (contains_placeholder_p (inner))
3859 return expr;
3860
3861 expr = build1_loc (EXPR_LOCATION (expr), SAVE_EXPR, TREE_TYPE (expr), expr);
3862
3863 /* This expression might be placed ahead of a jump to ensure that the
3864 value was computed on both sides of the jump. So make sure it isn't
3865 eliminated as dead. */
3866 TREE_SIDE_EFFECTS (expr) = 1;
3867 return expr;
3868 }
3869
3870 /* Look inside EXPR into any simple arithmetic operations. Return the
3871 outermost non-arithmetic or non-invariant node. */
3872
3873 tree
3874 skip_simple_arithmetic (tree expr)
3875 {
3876 /* We don't care about whether this can be used as an lvalue in this
3877 context. */
3878 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3879 expr = TREE_OPERAND (expr, 0);
3880
3881 /* If we have simple operations applied to a SAVE_EXPR or to a SAVE_EXPR and
3882 a constant, it will be more efficient to not make another SAVE_EXPR since
3883 it will allow better simplification and GCSE will be able to merge the
3884 computations if they actually occur. */
3885 while (true)
3886 {
3887 if (UNARY_CLASS_P (expr))
3888 expr = TREE_OPERAND (expr, 0);
3889 else if (BINARY_CLASS_P (expr))
3890 {
3891 if (tree_invariant_p (TREE_OPERAND (expr, 1)))
3892 expr = TREE_OPERAND (expr, 0);
3893 else if (tree_invariant_p (TREE_OPERAND (expr, 0)))
3894 expr = TREE_OPERAND (expr, 1);
3895 else
3896 break;
3897 }
3898 else
3899 break;
3900 }
3901
3902 return expr;
3903 }
3904
3905 /* Look inside EXPR into simple arithmetic operations involving constants.
3906 Return the outermost non-arithmetic or non-constant node. */
3907
3908 tree
3909 skip_simple_constant_arithmetic (tree expr)
3910 {
3911 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3912 expr = TREE_OPERAND (expr, 0);
3913
3914 while (true)
3915 {
3916 if (UNARY_CLASS_P (expr))
3917 expr = TREE_OPERAND (expr, 0);
3918 else if (BINARY_CLASS_P (expr))
3919 {
3920 if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
3921 expr = TREE_OPERAND (expr, 0);
3922 else if (TREE_CONSTANT (TREE_OPERAND (expr, 0)))
3923 expr = TREE_OPERAND (expr, 1);
3924 else
3925 break;
3926 }
3927 else
3928 break;
3929 }
3930
3931 return expr;
3932 }
3933
3934 /* Return which tree structure is used by T. */
3935
3936 enum tree_node_structure_enum
3937 tree_node_structure (const_tree t)
3938 {
3939 const enum tree_code code = TREE_CODE (t);
3940 return tree_node_structure_for_code (code);
3941 }
3942
3943 /* Set various status flags when building a CALL_EXPR object T. */
3944
3945 static void
3946 process_call_operands (tree t)
3947 {
3948 bool side_effects = TREE_SIDE_EFFECTS (t);
3949 bool read_only = false;
3950 int i = call_expr_flags (t);
3951
3952 /* Calls have side-effects, except those to const or pure functions. */
3953 if ((i & ECF_LOOPING_CONST_OR_PURE) || !(i & (ECF_CONST | ECF_PURE)))
3954 side_effects = true;
3955 /* Propagate TREE_READONLY of arguments for const functions. */
3956 if (i & ECF_CONST)
3957 read_only = true;
3958
3959 if (!side_effects || read_only)
3960 for (i = 1; i < TREE_OPERAND_LENGTH (t); i++)
3961 {
3962 tree op = TREE_OPERAND (t, i);
3963 if (op && TREE_SIDE_EFFECTS (op))
3964 side_effects = true;
3965 if (op && !TREE_READONLY (op) && !CONSTANT_CLASS_P (op))
3966 read_only = false;
3967 }
3968
3969 TREE_SIDE_EFFECTS (t) = side_effects;
3970 TREE_READONLY (t) = read_only;
3971 }
3972 \f
3973 /* Return true if EXP contains a PLACEHOLDER_EXPR, i.e. if it represents a
3974 size or offset that depends on a field within a record. */
3975
3976 bool
3977 contains_placeholder_p (const_tree exp)
3978 {
3979 enum tree_code code;
3980
3981 if (!exp)
3982 return 0;
3983
3984 code = TREE_CODE (exp);
3985 if (code == PLACEHOLDER_EXPR)
3986 return 1;
3987
3988 switch (TREE_CODE_CLASS (code))
3989 {
3990 case tcc_reference:
3991 /* Don't look at any PLACEHOLDER_EXPRs that might be in index or bit
3992 position computations since they will be converted into a
3993 WITH_RECORD_EXPR involving the reference, which will assume
3994 here will be valid. */
3995 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3996
3997 case tcc_exceptional:
3998 if (code == TREE_LIST)
3999 return (CONTAINS_PLACEHOLDER_P (TREE_VALUE (exp))
4000 || CONTAINS_PLACEHOLDER_P (TREE_CHAIN (exp)));
4001 break;
4002
4003 case tcc_unary:
4004 case tcc_binary:
4005 case tcc_comparison:
4006 case tcc_expression:
4007 switch (code)
4008 {
4009 case COMPOUND_EXPR:
4010 /* Ignoring the first operand isn't quite right, but works best. */
4011 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1));
4012
4013 case COND_EXPR:
4014 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
4015 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1))
4016 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 2)));
4017
4018 case SAVE_EXPR:
4019 /* The save_expr function never wraps anything containing
4020 a PLACEHOLDER_EXPR. */
4021 return 0;
4022
4023 default:
4024 break;
4025 }
4026
4027 switch (TREE_CODE_LENGTH (code))
4028 {
4029 case 1:
4030 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
4031 case 2:
4032 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
4033 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1)));
4034 default:
4035 return 0;
4036 }
4037
4038 case tcc_vl_exp:
4039 switch (code)
4040 {
4041 case CALL_EXPR:
4042 {
4043 const_tree arg;
4044 const_call_expr_arg_iterator iter;
4045 FOR_EACH_CONST_CALL_EXPR_ARG (arg, iter, exp)
4046 if (CONTAINS_PLACEHOLDER_P (arg))
4047 return 1;
4048 return 0;
4049 }
4050 default:
4051 return 0;
4052 }
4053
4054 default:
4055 return 0;
4056 }
4057 return 0;
4058 }
4059
4060 /* Return true if any part of the structure of TYPE involves a PLACEHOLDER_EXPR
4061 directly. This includes size, bounds, qualifiers (for QUAL_UNION_TYPE) and
4062 field positions. */
4063
4064 static bool
4065 type_contains_placeholder_1 (const_tree type)
4066 {
4067 /* If the size contains a placeholder or the parent type (component type in
4068 the case of arrays) type involves a placeholder, this type does. */
4069 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type))
4070 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (type))
4071 || (!POINTER_TYPE_P (type)
4072 && TREE_TYPE (type)
4073 && type_contains_placeholder_p (TREE_TYPE (type))))
4074 return true;
4075
4076 /* Now do type-specific checks. Note that the last part of the check above
4077 greatly limits what we have to do below. */
4078 switch (TREE_CODE (type))
4079 {
4080 case VOID_TYPE:
4081 case OPAQUE_TYPE:
4082 case COMPLEX_TYPE:
4083 case ENUMERAL_TYPE:
4084 case BOOLEAN_TYPE:
4085 case POINTER_TYPE:
4086 case OFFSET_TYPE:
4087 case REFERENCE_TYPE:
4088 case METHOD_TYPE:
4089 case FUNCTION_TYPE:
4090 case VECTOR_TYPE:
4091 case NULLPTR_TYPE:
4092 return false;
4093
4094 case INTEGER_TYPE:
4095 case REAL_TYPE:
4096 case FIXED_POINT_TYPE:
4097 /* Here we just check the bounds. */
4098 return (CONTAINS_PLACEHOLDER_P (TYPE_MIN_VALUE (type))
4099 || CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type)));
4100
4101 case ARRAY_TYPE:
4102 /* We have already checked the component type above, so just check
4103 the domain type. Flexible array members have a null domain. */
4104 return TYPE_DOMAIN (type) ?
4105 type_contains_placeholder_p (TYPE_DOMAIN (type)) : false;
4106
4107 case RECORD_TYPE:
4108 case UNION_TYPE:
4109 case QUAL_UNION_TYPE:
4110 {
4111 tree field;
4112
4113 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
4114 if (TREE_CODE (field) == FIELD_DECL
4115 && (CONTAINS_PLACEHOLDER_P (DECL_FIELD_OFFSET (field))
4116 || (TREE_CODE (type) == QUAL_UNION_TYPE
4117 && CONTAINS_PLACEHOLDER_P (DECL_QUALIFIER (field)))
4118 || type_contains_placeholder_p (TREE_TYPE (field))))
4119 return true;
4120
4121 return false;
4122 }
4123
4124 default:
4125 gcc_unreachable ();
4126 }
4127 }
4128
4129 /* Wrapper around above function used to cache its result. */
4130
4131 bool
4132 type_contains_placeholder_p (tree type)
4133 {
4134 bool result;
4135
4136 /* If the contains_placeholder_bits field has been initialized,
4137 then we know the answer. */
4138 if (TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) > 0)
4139 return TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) - 1;
4140
4141 /* Indicate that we've seen this type node, and the answer is false.
4142 This is what we want to return if we run into recursion via fields. */
4143 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = 1;
4144
4145 /* Compute the real value. */
4146 result = type_contains_placeholder_1 (type);
4147
4148 /* Store the real value. */
4149 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = result + 1;
4150
4151 return result;
4152 }
4153 \f
4154 /* Push tree EXP onto vector QUEUE if it is not already present. */
4155
4156 static void
4157 push_without_duplicates (tree exp, vec<tree> *queue)
4158 {
4159 unsigned int i;
4160 tree iter;
4161
4162 FOR_EACH_VEC_ELT (*queue, i, iter)
4163 if (simple_cst_equal (iter, exp) == 1)
4164 break;
4165
4166 if (!iter)
4167 queue->safe_push (exp);
4168 }
4169
4170 /* Given a tree EXP, find all occurrences of references to fields
4171 in a PLACEHOLDER_EXPR and place them in vector REFS without
4172 duplicates. Also record VAR_DECLs and CONST_DECLs. Note that
4173 we assume here that EXP contains only arithmetic expressions
4174 or CALL_EXPRs with PLACEHOLDER_EXPRs occurring only in their
4175 argument list. */
4176
4177 void
4178 find_placeholder_in_expr (tree exp, vec<tree> *refs)
4179 {
4180 enum tree_code code = TREE_CODE (exp);
4181 tree inner;
4182 int i;
4183
4184 /* We handle TREE_LIST and COMPONENT_REF separately. */
4185 if (code == TREE_LIST)
4186 {
4187 FIND_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), refs);
4188 FIND_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), refs);
4189 }
4190 else if (code == COMPONENT_REF)
4191 {
4192 for (inner = TREE_OPERAND (exp, 0);
4193 REFERENCE_CLASS_P (inner);
4194 inner = TREE_OPERAND (inner, 0))
4195 ;
4196
4197 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
4198 push_without_duplicates (exp, refs);
4199 else
4200 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), refs);
4201 }
4202 else
4203 switch (TREE_CODE_CLASS (code))
4204 {
4205 case tcc_constant:
4206 break;
4207
4208 case tcc_declaration:
4209 /* Variables allocated to static storage can stay. */
4210 if (!TREE_STATIC (exp))
4211 push_without_duplicates (exp, refs);
4212 break;
4213
4214 case tcc_expression:
4215 /* This is the pattern built in ada/make_aligning_type. */
4216 if (code == ADDR_EXPR
4217 && TREE_CODE (TREE_OPERAND (exp, 0)) == PLACEHOLDER_EXPR)
4218 {
4219 push_without_duplicates (exp, refs);
4220 break;
4221 }
4222
4223 /* Fall through. */
4224
4225 case tcc_exceptional:
4226 case tcc_unary:
4227 case tcc_binary:
4228 case tcc_comparison:
4229 case tcc_reference:
4230 for (i = 0; i < TREE_CODE_LENGTH (code); i++)
4231 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
4232 break;
4233
4234 case tcc_vl_exp:
4235 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4236 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
4237 break;
4238
4239 default:
4240 gcc_unreachable ();
4241 }
4242 }
4243
4244 /* Given a tree EXP, a FIELD_DECL F, and a replacement value R,
4245 return a tree with all occurrences of references to F in a
4246 PLACEHOLDER_EXPR replaced by R. Also handle VAR_DECLs and
4247 CONST_DECLs. Note that we assume here that EXP contains only
4248 arithmetic expressions or CALL_EXPRs with PLACEHOLDER_EXPRs
4249 occurring only in their argument list. */
4250
4251 tree
4252 substitute_in_expr (tree exp, tree f, tree r)
4253 {
4254 enum tree_code code = TREE_CODE (exp);
4255 tree op0, op1, op2, op3;
4256 tree new_tree;
4257
4258 /* We handle TREE_LIST and COMPONENT_REF separately. */
4259 if (code == TREE_LIST)
4260 {
4261 op0 = SUBSTITUTE_IN_EXPR (TREE_CHAIN (exp), f, r);
4262 op1 = SUBSTITUTE_IN_EXPR (TREE_VALUE (exp), f, r);
4263 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
4264 return exp;
4265
4266 return tree_cons (TREE_PURPOSE (exp), op1, op0);
4267 }
4268 else if (code == COMPONENT_REF)
4269 {
4270 tree inner;
4271
4272 /* If this expression is getting a value from a PLACEHOLDER_EXPR
4273 and it is the right field, replace it with R. */
4274 for (inner = TREE_OPERAND (exp, 0);
4275 REFERENCE_CLASS_P (inner);
4276 inner = TREE_OPERAND (inner, 0))
4277 ;
4278
4279 /* The field. */
4280 op1 = TREE_OPERAND (exp, 1);
4281
4282 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && op1 == f)
4283 return r;
4284
4285 /* If this expression hasn't been completed let, leave it alone. */
4286 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && !TREE_TYPE (inner))
4287 return exp;
4288
4289 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4290 if (op0 == TREE_OPERAND (exp, 0))
4291 return exp;
4292
4293 new_tree
4294 = fold_build3 (COMPONENT_REF, TREE_TYPE (exp), op0, op1, NULL_TREE);
4295 }
4296 else
4297 switch (TREE_CODE_CLASS (code))
4298 {
4299 case tcc_constant:
4300 return exp;
4301
4302 case tcc_declaration:
4303 if (exp == f)
4304 return r;
4305 else
4306 return exp;
4307
4308 case tcc_expression:
4309 if (exp == f)
4310 return r;
4311
4312 /* Fall through. */
4313
4314 case tcc_exceptional:
4315 case tcc_unary:
4316 case tcc_binary:
4317 case tcc_comparison:
4318 case tcc_reference:
4319 switch (TREE_CODE_LENGTH (code))
4320 {
4321 case 0:
4322 return exp;
4323
4324 case 1:
4325 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4326 if (op0 == TREE_OPERAND (exp, 0))
4327 return exp;
4328
4329 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
4330 break;
4331
4332 case 2:
4333 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4334 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
4335
4336 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
4337 return exp;
4338
4339 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
4340 break;
4341
4342 case 3:
4343 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4344 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
4345 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
4346
4347 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4348 && op2 == TREE_OPERAND (exp, 2))
4349 return exp;
4350
4351 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
4352 break;
4353
4354 case 4:
4355 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4356 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
4357 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
4358 op3 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 3), f, r);
4359
4360 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4361 && op2 == TREE_OPERAND (exp, 2)
4362 && op3 == TREE_OPERAND (exp, 3))
4363 return exp;
4364
4365 new_tree
4366 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
4367 break;
4368
4369 default:
4370 gcc_unreachable ();
4371 }
4372 break;
4373
4374 case tcc_vl_exp:
4375 {
4376 int i;
4377
4378 new_tree = NULL_TREE;
4379
4380 /* If we are trying to replace F with a constant or with another
4381 instance of one of the arguments of the call, inline back
4382 functions which do nothing else than computing a value from
4383 the arguments they are passed. This makes it possible to
4384 fold partially or entirely the replacement expression. */
4385 if (code == CALL_EXPR)
4386 {
4387 bool maybe_inline = false;
4388 if (CONSTANT_CLASS_P (r))
4389 maybe_inline = true;
4390 else
4391 for (i = 3; i < TREE_OPERAND_LENGTH (exp); i++)
4392 if (operand_equal_p (TREE_OPERAND (exp, i), r, 0))
4393 {
4394 maybe_inline = true;
4395 break;
4396 }
4397 if (maybe_inline)
4398 {
4399 tree t = maybe_inline_call_in_expr (exp);
4400 if (t)
4401 return SUBSTITUTE_IN_EXPR (t, f, r);
4402 }
4403 }
4404
4405 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4406 {
4407 tree op = TREE_OPERAND (exp, i);
4408 tree new_op = SUBSTITUTE_IN_EXPR (op, f, r);
4409 if (new_op != op)
4410 {
4411 if (!new_tree)
4412 new_tree = copy_node (exp);
4413 TREE_OPERAND (new_tree, i) = new_op;
4414 }
4415 }
4416
4417 if (new_tree)
4418 {
4419 new_tree = fold (new_tree);
4420 if (TREE_CODE (new_tree) == CALL_EXPR)
4421 process_call_operands (new_tree);
4422 }
4423 else
4424 return exp;
4425 }
4426 break;
4427
4428 default:
4429 gcc_unreachable ();
4430 }
4431
4432 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
4433
4434 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
4435 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
4436
4437 return new_tree;
4438 }
4439
4440 /* Similar, but look for a PLACEHOLDER_EXPR in EXP and find a replacement
4441 for it within OBJ, a tree that is an object or a chain of references. */
4442
4443 tree
4444 substitute_placeholder_in_expr (tree exp, tree obj)
4445 {
4446 enum tree_code code = TREE_CODE (exp);
4447 tree op0, op1, op2, op3;
4448 tree new_tree;
4449
4450 /* If this is a PLACEHOLDER_EXPR, see if we find a corresponding type
4451 in the chain of OBJ. */
4452 if (code == PLACEHOLDER_EXPR)
4453 {
4454 tree need_type = TYPE_MAIN_VARIANT (TREE_TYPE (exp));
4455 tree elt;
4456
4457 for (elt = obj; elt != 0;
4458 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
4459 || TREE_CODE (elt) == COND_EXPR)
4460 ? TREE_OPERAND (elt, 1)
4461 : (REFERENCE_CLASS_P (elt)
4462 || UNARY_CLASS_P (elt)
4463 || BINARY_CLASS_P (elt)
4464 || VL_EXP_CLASS_P (elt)
4465 || EXPRESSION_CLASS_P (elt))
4466 ? TREE_OPERAND (elt, 0) : 0))
4467 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
4468 return elt;
4469
4470 for (elt = obj; elt != 0;
4471 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
4472 || TREE_CODE (elt) == COND_EXPR)
4473 ? TREE_OPERAND (elt, 1)
4474 : (REFERENCE_CLASS_P (elt)
4475 || UNARY_CLASS_P (elt)
4476 || BINARY_CLASS_P (elt)
4477 || VL_EXP_CLASS_P (elt)
4478 || EXPRESSION_CLASS_P (elt))
4479 ? TREE_OPERAND (elt, 0) : 0))
4480 if (POINTER_TYPE_P (TREE_TYPE (elt))
4481 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
4482 == need_type))
4483 return fold_build1 (INDIRECT_REF, need_type, elt);
4484
4485 /* If we didn't find it, return the original PLACEHOLDER_EXPR. If it
4486 survives until RTL generation, there will be an error. */
4487 return exp;
4488 }
4489
4490 /* TREE_LIST is special because we need to look at TREE_VALUE
4491 and TREE_CHAIN, not TREE_OPERANDS. */
4492 else if (code == TREE_LIST)
4493 {
4494 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), obj);
4495 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), obj);
4496 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
4497 return exp;
4498
4499 return tree_cons (TREE_PURPOSE (exp), op1, op0);
4500 }
4501 else
4502 switch (TREE_CODE_CLASS (code))
4503 {
4504 case tcc_constant:
4505 case tcc_declaration:
4506 return exp;
4507
4508 case tcc_exceptional:
4509 case tcc_unary:
4510 case tcc_binary:
4511 case tcc_comparison:
4512 case tcc_expression:
4513 case tcc_reference:
4514 case tcc_statement:
4515 switch (TREE_CODE_LENGTH (code))
4516 {
4517 case 0:
4518 return exp;
4519
4520 case 1:
4521 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4522 if (op0 == TREE_OPERAND (exp, 0))
4523 return exp;
4524
4525 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
4526 break;
4527
4528 case 2:
4529 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4530 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4531
4532 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
4533 return exp;
4534
4535 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
4536 break;
4537
4538 case 3:
4539 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4540 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4541 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
4542
4543 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4544 && op2 == TREE_OPERAND (exp, 2))
4545 return exp;
4546
4547 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
4548 break;
4549
4550 case 4:
4551 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4552 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4553 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
4554 op3 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 3), obj);
4555
4556 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4557 && op2 == TREE_OPERAND (exp, 2)
4558 && op3 == TREE_OPERAND (exp, 3))
4559 return exp;
4560
4561 new_tree
4562 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
4563 break;
4564
4565 default:
4566 gcc_unreachable ();
4567 }
4568 break;
4569
4570 case tcc_vl_exp:
4571 {
4572 int i;
4573
4574 new_tree = NULL_TREE;
4575
4576 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4577 {
4578 tree op = TREE_OPERAND (exp, i);
4579 tree new_op = SUBSTITUTE_PLACEHOLDER_IN_EXPR (op, obj);
4580 if (new_op != op)
4581 {
4582 if (!new_tree)
4583 new_tree = copy_node (exp);
4584 TREE_OPERAND (new_tree, i) = new_op;
4585 }
4586 }
4587
4588 if (new_tree)
4589 {
4590 new_tree = fold (new_tree);
4591 if (TREE_CODE (new_tree) == CALL_EXPR)
4592 process_call_operands (new_tree);
4593 }
4594 else
4595 return exp;
4596 }
4597 break;
4598
4599 default:
4600 gcc_unreachable ();
4601 }
4602
4603 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
4604
4605 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
4606 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
4607
4608 return new_tree;
4609 }
4610 \f
4611
4612 /* Subroutine of stabilize_reference; this is called for subtrees of
4613 references. Any expression with side-effects must be put in a SAVE_EXPR
4614 to ensure that it is only evaluated once.
4615
4616 We don't put SAVE_EXPR nodes around everything, because assigning very
4617 simple expressions to temporaries causes us to miss good opportunities
4618 for optimizations. Among other things, the opportunity to fold in the
4619 addition of a constant into an addressing mode often gets lost, e.g.
4620 "y[i+1] += x;". In general, we take the approach that we should not make
4621 an assignment unless we are forced into it - i.e., that any non-side effect
4622 operator should be allowed, and that cse should take care of coalescing
4623 multiple utterances of the same expression should that prove fruitful. */
4624
4625 static tree
4626 stabilize_reference_1 (tree e)
4627 {
4628 tree result;
4629 enum tree_code code = TREE_CODE (e);
4630
4631 /* We cannot ignore const expressions because it might be a reference
4632 to a const array but whose index contains side-effects. But we can
4633 ignore things that are actual constant or that already have been
4634 handled by this function. */
4635
4636 if (tree_invariant_p (e))
4637 return e;
4638
4639 switch (TREE_CODE_CLASS (code))
4640 {
4641 case tcc_exceptional:
4642 /* Always wrap STATEMENT_LIST into SAVE_EXPR, even if it doesn't
4643 have side-effects. */
4644 if (code == STATEMENT_LIST)
4645 return save_expr (e);
4646 /* FALLTHRU */
4647 case tcc_type:
4648 case tcc_declaration:
4649 case tcc_comparison:
4650 case tcc_statement:
4651 case tcc_expression:
4652 case tcc_reference:
4653 case tcc_vl_exp:
4654 /* If the expression has side-effects, then encase it in a SAVE_EXPR
4655 so that it will only be evaluated once. */
4656 /* The reference (r) and comparison (<) classes could be handled as
4657 below, but it is generally faster to only evaluate them once. */
4658 if (TREE_SIDE_EFFECTS (e))
4659 return save_expr (e);
4660 return e;
4661
4662 case tcc_constant:
4663 /* Constants need no processing. In fact, we should never reach
4664 here. */
4665 return e;
4666
4667 case tcc_binary:
4668 /* Division is slow and tends to be compiled with jumps,
4669 especially the division by powers of 2 that is often
4670 found inside of an array reference. So do it just once. */
4671 if (code == TRUNC_DIV_EXPR || code == TRUNC_MOD_EXPR
4672 || code == FLOOR_DIV_EXPR || code == FLOOR_MOD_EXPR
4673 || code == CEIL_DIV_EXPR || code == CEIL_MOD_EXPR
4674 || code == ROUND_DIV_EXPR || code == ROUND_MOD_EXPR)
4675 return save_expr (e);
4676 /* Recursively stabilize each operand. */
4677 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)),
4678 stabilize_reference_1 (TREE_OPERAND (e, 1)));
4679 break;
4680
4681 case tcc_unary:
4682 /* Recursively stabilize each operand. */
4683 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)));
4684 break;
4685
4686 default:
4687 gcc_unreachable ();
4688 }
4689
4690 TREE_TYPE (result) = TREE_TYPE (e);
4691 TREE_READONLY (result) = TREE_READONLY (e);
4692 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (e);
4693 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
4694
4695 return result;
4696 }
4697
4698 /* Stabilize a reference so that we can use it any number of times
4699 without causing its operands to be evaluated more than once.
4700 Returns the stabilized reference. This works by means of save_expr,
4701 so see the caveats in the comments about save_expr.
4702
4703 Also allows conversion expressions whose operands are references.
4704 Any other kind of expression is returned unchanged. */
4705
4706 tree
4707 stabilize_reference (tree ref)
4708 {
4709 tree result;
4710 enum tree_code code = TREE_CODE (ref);
4711
4712 switch (code)
4713 {
4714 case VAR_DECL:
4715 case PARM_DECL:
4716 case RESULT_DECL:
4717 /* No action is needed in this case. */
4718 return ref;
4719
4720 CASE_CONVERT:
4721 case FLOAT_EXPR:
4722 case FIX_TRUNC_EXPR:
4723 result = build_nt (code, stabilize_reference (TREE_OPERAND (ref, 0)));
4724 break;
4725
4726 case INDIRECT_REF:
4727 result = build_nt (INDIRECT_REF,
4728 stabilize_reference_1 (TREE_OPERAND (ref, 0)));
4729 break;
4730
4731 case COMPONENT_REF:
4732 result = build_nt (COMPONENT_REF,
4733 stabilize_reference (TREE_OPERAND (ref, 0)),
4734 TREE_OPERAND (ref, 1), NULL_TREE);
4735 break;
4736
4737 case BIT_FIELD_REF:
4738 result = build_nt (BIT_FIELD_REF,
4739 stabilize_reference (TREE_OPERAND (ref, 0)),
4740 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
4741 REF_REVERSE_STORAGE_ORDER (result) = REF_REVERSE_STORAGE_ORDER (ref);
4742 break;
4743
4744 case ARRAY_REF:
4745 result = build_nt (ARRAY_REF,
4746 stabilize_reference (TREE_OPERAND (ref, 0)),
4747 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4748 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4749 break;
4750
4751 case ARRAY_RANGE_REF:
4752 result = build_nt (ARRAY_RANGE_REF,
4753 stabilize_reference (TREE_OPERAND (ref, 0)),
4754 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4755 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4756 break;
4757
4758 case COMPOUND_EXPR:
4759 /* We cannot wrap the first expression in a SAVE_EXPR, as then
4760 it wouldn't be ignored. This matters when dealing with
4761 volatiles. */
4762 return stabilize_reference_1 (ref);
4763
4764 /* If arg isn't a kind of lvalue we recognize, make no change.
4765 Caller should recognize the error for an invalid lvalue. */
4766 default:
4767 return ref;
4768
4769 case ERROR_MARK:
4770 return error_mark_node;
4771 }
4772
4773 TREE_TYPE (result) = TREE_TYPE (ref);
4774 TREE_READONLY (result) = TREE_READONLY (ref);
4775 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (ref);
4776 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
4777
4778 return result;
4779 }
4780 \f
4781 /* Low-level constructors for expressions. */
4782
4783 /* A helper function for build1 and constant folders. Set TREE_CONSTANT,
4784 and TREE_SIDE_EFFECTS for an ADDR_EXPR. */
4785
4786 void
4787 recompute_tree_invariant_for_addr_expr (tree t)
4788 {
4789 tree node;
4790 bool tc = true, se = false;
4791
4792 gcc_assert (TREE_CODE (t) == ADDR_EXPR);
4793
4794 /* We started out assuming this address is both invariant and constant, but
4795 does not have side effects. Now go down any handled components and see if
4796 any of them involve offsets that are either non-constant or non-invariant.
4797 Also check for side-effects.
4798
4799 ??? Note that this code makes no attempt to deal with the case where
4800 taking the address of something causes a copy due to misalignment. */
4801
4802 #define UPDATE_FLAGS(NODE) \
4803 do { tree _node = (NODE); \
4804 if (_node && !TREE_CONSTANT (_node)) tc = false; \
4805 if (_node && TREE_SIDE_EFFECTS (_node)) se = true; } while (0)
4806
4807 for (node = TREE_OPERAND (t, 0); handled_component_p (node);
4808 node = TREE_OPERAND (node, 0))
4809 {
4810 /* If the first operand doesn't have an ARRAY_TYPE, this is a bogus
4811 array reference (probably made temporarily by the G++ front end),
4812 so ignore all the operands. */
4813 if ((TREE_CODE (node) == ARRAY_REF
4814 || TREE_CODE (node) == ARRAY_RANGE_REF)
4815 && TREE_CODE (TREE_TYPE (TREE_OPERAND (node, 0))) == ARRAY_TYPE)
4816 {
4817 UPDATE_FLAGS (TREE_OPERAND (node, 1));
4818 if (TREE_OPERAND (node, 2))
4819 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4820 if (TREE_OPERAND (node, 3))
4821 UPDATE_FLAGS (TREE_OPERAND (node, 3));
4822 }
4823 /* Likewise, just because this is a COMPONENT_REF doesn't mean we have a
4824 FIELD_DECL, apparently. The G++ front end can put something else
4825 there, at least temporarily. */
4826 else if (TREE_CODE (node) == COMPONENT_REF
4827 && TREE_CODE (TREE_OPERAND (node, 1)) == FIELD_DECL)
4828 {
4829 if (TREE_OPERAND (node, 2))
4830 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4831 }
4832 }
4833
4834 node = lang_hooks.expr_to_decl (node, &tc, &se);
4835
4836 /* Now see what's inside. If it's an INDIRECT_REF, copy our properties from
4837 the address, since &(*a)->b is a form of addition. If it's a constant, the
4838 address is constant too. If it's a decl, its address is constant if the
4839 decl is static. Everything else is not constant and, furthermore,
4840 taking the address of a volatile variable is not volatile. */
4841 if (TREE_CODE (node) == INDIRECT_REF
4842 || TREE_CODE (node) == MEM_REF)
4843 UPDATE_FLAGS (TREE_OPERAND (node, 0));
4844 else if (CONSTANT_CLASS_P (node))
4845 ;
4846 else if (DECL_P (node))
4847 tc &= (staticp (node) != NULL_TREE);
4848 else
4849 {
4850 tc = false;
4851 se |= TREE_SIDE_EFFECTS (node);
4852 }
4853
4854
4855 TREE_CONSTANT (t) = tc;
4856 TREE_SIDE_EFFECTS (t) = se;
4857 #undef UPDATE_FLAGS
4858 }
4859
4860 /* Build an expression of code CODE, data type TYPE, and operands as
4861 specified. Expressions and reference nodes can be created this way.
4862 Constants, decls, types and misc nodes cannot be.
4863
4864 We define 5 non-variadic functions, from 0 to 4 arguments. This is
4865 enough for all extant tree codes. */
4866
4867 tree
4868 build0 (enum tree_code code, tree tt MEM_STAT_DECL)
4869 {
4870 tree t;
4871
4872 gcc_assert (TREE_CODE_LENGTH (code) == 0);
4873
4874 t = make_node (code PASS_MEM_STAT);
4875 TREE_TYPE (t) = tt;
4876
4877 return t;
4878 }
4879
4880 tree
4881 build1 (enum tree_code code, tree type, tree node MEM_STAT_DECL)
4882 {
4883 int length = sizeof (struct tree_exp);
4884 tree t;
4885
4886 record_node_allocation_statistics (code, length);
4887
4888 gcc_assert (TREE_CODE_LENGTH (code) == 1);
4889
4890 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
4891
4892 memset (t, 0, sizeof (struct tree_common));
4893
4894 TREE_SET_CODE (t, code);
4895
4896 TREE_TYPE (t) = type;
4897 SET_EXPR_LOCATION (t, UNKNOWN_LOCATION);
4898 TREE_OPERAND (t, 0) = node;
4899 if (node && !TYPE_P (node))
4900 {
4901 TREE_SIDE_EFFECTS (t) = TREE_SIDE_EFFECTS (node);
4902 TREE_READONLY (t) = TREE_READONLY (node);
4903 }
4904
4905 if (TREE_CODE_CLASS (code) == tcc_statement)
4906 {
4907 if (code != DEBUG_BEGIN_STMT)
4908 TREE_SIDE_EFFECTS (t) = 1;
4909 }
4910 else switch (code)
4911 {
4912 case VA_ARG_EXPR:
4913 /* All of these have side-effects, no matter what their
4914 operands are. */
4915 TREE_SIDE_EFFECTS (t) = 1;
4916 TREE_READONLY (t) = 0;
4917 break;
4918
4919 case INDIRECT_REF:
4920 /* Whether a dereference is readonly has nothing to do with whether
4921 its operand is readonly. */
4922 TREE_READONLY (t) = 0;
4923 break;
4924
4925 case ADDR_EXPR:
4926 if (node)
4927 recompute_tree_invariant_for_addr_expr (t);
4928 break;
4929
4930 default:
4931 if ((TREE_CODE_CLASS (code) == tcc_unary || code == VIEW_CONVERT_EXPR)
4932 && node && !TYPE_P (node)
4933 && TREE_CONSTANT (node))
4934 TREE_CONSTANT (t) = 1;
4935 if (TREE_CODE_CLASS (code) == tcc_reference
4936 && node && TREE_THIS_VOLATILE (node))
4937 TREE_THIS_VOLATILE (t) = 1;
4938 break;
4939 }
4940
4941 return t;
4942 }
4943
4944 #define PROCESS_ARG(N) \
4945 do { \
4946 TREE_OPERAND (t, N) = arg##N; \
4947 if (arg##N &&!TYPE_P (arg##N)) \
4948 { \
4949 if (TREE_SIDE_EFFECTS (arg##N)) \
4950 side_effects = 1; \
4951 if (!TREE_READONLY (arg##N) \
4952 && !CONSTANT_CLASS_P (arg##N)) \
4953 (void) (read_only = 0); \
4954 if (!TREE_CONSTANT (arg##N)) \
4955 (void) (constant = 0); \
4956 } \
4957 } while (0)
4958
4959 tree
4960 build2 (enum tree_code code, tree tt, tree arg0, tree arg1 MEM_STAT_DECL)
4961 {
4962 bool constant, read_only, side_effects, div_by_zero;
4963 tree t;
4964
4965 gcc_assert (TREE_CODE_LENGTH (code) == 2);
4966
4967 if ((code == MINUS_EXPR || code == PLUS_EXPR || code == MULT_EXPR)
4968 && arg0 && arg1 && tt && POINTER_TYPE_P (tt)
4969 /* When sizetype precision doesn't match that of pointers
4970 we need to be able to build explicit extensions or truncations
4971 of the offset argument. */
4972 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (tt))
4973 gcc_assert (TREE_CODE (arg0) == INTEGER_CST
4974 && TREE_CODE (arg1) == INTEGER_CST);
4975
4976 if (code == POINTER_PLUS_EXPR && arg0 && arg1 && tt)
4977 gcc_assert (POINTER_TYPE_P (tt) && POINTER_TYPE_P (TREE_TYPE (arg0))
4978 && ptrofftype_p (TREE_TYPE (arg1)));
4979
4980 t = make_node (code PASS_MEM_STAT);
4981 TREE_TYPE (t) = tt;
4982
4983 /* Below, we automatically set TREE_SIDE_EFFECTS and TREE_READONLY for the
4984 result based on those same flags for the arguments. But if the
4985 arguments aren't really even `tree' expressions, we shouldn't be trying
4986 to do this. */
4987
4988 /* Expressions without side effects may be constant if their
4989 arguments are as well. */
4990 constant = (TREE_CODE_CLASS (code) == tcc_comparison
4991 || TREE_CODE_CLASS (code) == tcc_binary);
4992 read_only = 1;
4993 side_effects = TREE_SIDE_EFFECTS (t);
4994
4995 switch (code)
4996 {
4997 case TRUNC_DIV_EXPR:
4998 case CEIL_DIV_EXPR:
4999 case FLOOR_DIV_EXPR:
5000 case ROUND_DIV_EXPR:
5001 case EXACT_DIV_EXPR:
5002 case CEIL_MOD_EXPR:
5003 case FLOOR_MOD_EXPR:
5004 case ROUND_MOD_EXPR:
5005 case TRUNC_MOD_EXPR:
5006 div_by_zero = integer_zerop (arg1);
5007 break;
5008 default:
5009 div_by_zero = false;
5010 }
5011
5012 PROCESS_ARG (0);
5013 PROCESS_ARG (1);
5014
5015 TREE_SIDE_EFFECTS (t) = side_effects;
5016 if (code == MEM_REF)
5017 {
5018 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
5019 {
5020 tree o = TREE_OPERAND (arg0, 0);
5021 TREE_READONLY (t) = TREE_READONLY (o);
5022 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
5023 }
5024 }
5025 else
5026 {
5027 TREE_READONLY (t) = read_only;
5028 /* Don't mark X / 0 as constant. */
5029 TREE_CONSTANT (t) = constant && !div_by_zero;
5030 TREE_THIS_VOLATILE (t)
5031 = (TREE_CODE_CLASS (code) == tcc_reference
5032 && arg0 && TREE_THIS_VOLATILE (arg0));
5033 }
5034
5035 return t;
5036 }
5037
5038
5039 tree
5040 build3 (enum tree_code code, tree tt, tree arg0, tree arg1,
5041 tree arg2 MEM_STAT_DECL)
5042 {
5043 bool constant, read_only, side_effects;
5044 tree t;
5045
5046 gcc_assert (TREE_CODE_LENGTH (code) == 3);
5047 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
5048
5049 t = make_node (code PASS_MEM_STAT);
5050 TREE_TYPE (t) = tt;
5051
5052 read_only = 1;
5053
5054 /* As a special exception, if COND_EXPR has NULL branches, we
5055 assume that it is a gimple statement and always consider
5056 it to have side effects. */
5057 if (code == COND_EXPR
5058 && tt == void_type_node
5059 && arg1 == NULL_TREE
5060 && arg2 == NULL_TREE)
5061 side_effects = true;
5062 else
5063 side_effects = TREE_SIDE_EFFECTS (t);
5064
5065 PROCESS_ARG (0);
5066 PROCESS_ARG (1);
5067 PROCESS_ARG (2);
5068
5069 if (code == COND_EXPR)
5070 TREE_READONLY (t) = read_only;
5071
5072 TREE_SIDE_EFFECTS (t) = side_effects;
5073 TREE_THIS_VOLATILE (t)
5074 = (TREE_CODE_CLASS (code) == tcc_reference
5075 && arg0 && TREE_THIS_VOLATILE (arg0));
5076
5077 return t;
5078 }
5079
5080 tree
5081 build4 (enum tree_code code, tree tt, tree arg0, tree arg1,
5082 tree arg2, tree arg3 MEM_STAT_DECL)
5083 {
5084 bool constant, read_only, side_effects;
5085 tree t;
5086
5087 gcc_assert (TREE_CODE_LENGTH (code) == 4);
5088
5089 t = make_node (code PASS_MEM_STAT);
5090 TREE_TYPE (t) = tt;
5091
5092 side_effects = TREE_SIDE_EFFECTS (t);
5093
5094 PROCESS_ARG (0);
5095 PROCESS_ARG (1);
5096 PROCESS_ARG (2);
5097 PROCESS_ARG (3);
5098
5099 TREE_SIDE_EFFECTS (t) = side_effects;
5100 TREE_THIS_VOLATILE (t)
5101 = (TREE_CODE_CLASS (code) == tcc_reference
5102 && arg0 && TREE_THIS_VOLATILE (arg0));
5103
5104 return t;
5105 }
5106
5107 tree
5108 build5 (enum tree_code code, tree tt, tree arg0, tree arg1,
5109 tree arg2, tree arg3, tree arg4 MEM_STAT_DECL)
5110 {
5111 bool constant, read_only, side_effects;
5112 tree t;
5113
5114 gcc_assert (TREE_CODE_LENGTH (code) == 5);
5115
5116 t = make_node (code PASS_MEM_STAT);
5117 TREE_TYPE (t) = tt;
5118
5119 side_effects = TREE_SIDE_EFFECTS (t);
5120
5121 PROCESS_ARG (0);
5122 PROCESS_ARG (1);
5123 PROCESS_ARG (2);
5124 PROCESS_ARG (3);
5125 PROCESS_ARG (4);
5126
5127 TREE_SIDE_EFFECTS (t) = side_effects;
5128 if (code == TARGET_MEM_REF)
5129 {
5130 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
5131 {
5132 tree o = TREE_OPERAND (arg0, 0);
5133 TREE_READONLY (t) = TREE_READONLY (o);
5134 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
5135 }
5136 }
5137 else
5138 TREE_THIS_VOLATILE (t)
5139 = (TREE_CODE_CLASS (code) == tcc_reference
5140 && arg0 && TREE_THIS_VOLATILE (arg0));
5141
5142 return t;
5143 }
5144
5145 /* Build a simple MEM_REF tree with the sematics of a plain INDIRECT_REF
5146 on the pointer PTR. */
5147
5148 tree
5149 build_simple_mem_ref_loc (location_t loc, tree ptr)
5150 {
5151 poly_int64 offset = 0;
5152 tree ptype = TREE_TYPE (ptr);
5153 tree tem;
5154 /* For convenience allow addresses that collapse to a simple base
5155 and offset. */
5156 if (TREE_CODE (ptr) == ADDR_EXPR
5157 && (handled_component_p (TREE_OPERAND (ptr, 0))
5158 || TREE_CODE (TREE_OPERAND (ptr, 0)) == MEM_REF))
5159 {
5160 ptr = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &offset);
5161 gcc_assert (ptr);
5162 if (TREE_CODE (ptr) == MEM_REF)
5163 {
5164 offset += mem_ref_offset (ptr).force_shwi ();
5165 ptr = TREE_OPERAND (ptr, 0);
5166 }
5167 else
5168 ptr = build_fold_addr_expr (ptr);
5169 gcc_assert (is_gimple_reg (ptr) || is_gimple_min_invariant (ptr));
5170 }
5171 tem = build2 (MEM_REF, TREE_TYPE (ptype),
5172 ptr, build_int_cst (ptype, offset));
5173 SET_EXPR_LOCATION (tem, loc);
5174 return tem;
5175 }
5176
5177 /* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T. */
5178
5179 poly_offset_int
5180 mem_ref_offset (const_tree t)
5181 {
5182 return poly_offset_int::from (wi::to_poly_wide (TREE_OPERAND (t, 1)),
5183 SIGNED);
5184 }
5185
5186 /* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE
5187 offsetted by OFFSET units. */
5188
5189 tree
5190 build_invariant_address (tree type, tree base, poly_int64 offset)
5191 {
5192 tree ref = fold_build2 (MEM_REF, TREE_TYPE (type),
5193 build_fold_addr_expr (base),
5194 build_int_cst (ptr_type_node, offset));
5195 tree addr = build1 (ADDR_EXPR, type, ref);
5196 recompute_tree_invariant_for_addr_expr (addr);
5197 return addr;
5198 }
5199
5200 /* Similar except don't specify the TREE_TYPE
5201 and leave the TREE_SIDE_EFFECTS as 0.
5202 It is permissible for arguments to be null,
5203 or even garbage if their values do not matter. */
5204
5205 tree
5206 build_nt (enum tree_code code, ...)
5207 {
5208 tree t;
5209 int length;
5210 int i;
5211 va_list p;
5212
5213 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
5214
5215 va_start (p, code);
5216
5217 t = make_node (code);
5218 length = TREE_CODE_LENGTH (code);
5219
5220 for (i = 0; i < length; i++)
5221 TREE_OPERAND (t, i) = va_arg (p, tree);
5222
5223 va_end (p);
5224 return t;
5225 }
5226
5227 /* Similar to build_nt, but for creating a CALL_EXPR object with a
5228 tree vec. */
5229
5230 tree
5231 build_nt_call_vec (tree fn, vec<tree, va_gc> *args)
5232 {
5233 tree ret, t;
5234 unsigned int ix;
5235
5236 ret = build_vl_exp (CALL_EXPR, vec_safe_length (args) + 3);
5237 CALL_EXPR_FN (ret) = fn;
5238 CALL_EXPR_STATIC_CHAIN (ret) = NULL_TREE;
5239 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
5240 CALL_EXPR_ARG (ret, ix) = t;
5241 return ret;
5242 }
5243 \f
5244 /* Create a DECL_... node of code CODE, name NAME (if non-null)
5245 and data type TYPE.
5246 We do NOT enter this node in any sort of symbol table.
5247
5248 LOC is the location of the decl.
5249
5250 layout_decl is used to set up the decl's storage layout.
5251 Other slots are initialized to 0 or null pointers. */
5252
5253 tree
5254 build_decl (location_t loc, enum tree_code code, tree name,
5255 tree type MEM_STAT_DECL)
5256 {
5257 tree t;
5258
5259 t = make_node (code PASS_MEM_STAT);
5260 DECL_SOURCE_LOCATION (t) = loc;
5261
5262 /* if (type == error_mark_node)
5263 type = integer_type_node; */
5264 /* That is not done, deliberately, so that having error_mark_node
5265 as the type can suppress useless errors in the use of this variable. */
5266
5267 DECL_NAME (t) = name;
5268 TREE_TYPE (t) = type;
5269
5270 if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
5271 layout_decl (t, 0);
5272
5273 return t;
5274 }
5275
5276 /* Builds and returns function declaration with NAME and TYPE. */
5277
5278 tree
5279 build_fn_decl (const char *name, tree type)
5280 {
5281 tree id = get_identifier (name);
5282 tree decl = build_decl (input_location, FUNCTION_DECL, id, type);
5283
5284 DECL_EXTERNAL (decl) = 1;
5285 TREE_PUBLIC (decl) = 1;
5286 DECL_ARTIFICIAL (decl) = 1;
5287 TREE_NOTHROW (decl) = 1;
5288
5289 return decl;
5290 }
5291
5292 vec<tree, va_gc> *all_translation_units;
5293
5294 /* Builds a new translation-unit decl with name NAME, queues it in the
5295 global list of translation-unit decls and returns it. */
5296
5297 tree
5298 build_translation_unit_decl (tree name)
5299 {
5300 tree tu = build_decl (UNKNOWN_LOCATION, TRANSLATION_UNIT_DECL,
5301 name, NULL_TREE);
5302 TRANSLATION_UNIT_LANGUAGE (tu) = lang_hooks.name;
5303 vec_safe_push (all_translation_units, tu);
5304 return tu;
5305 }
5306
5307 \f
5308 /* BLOCK nodes are used to represent the structure of binding contours
5309 and declarations, once those contours have been exited and their contents
5310 compiled. This information is used for outputting debugging info. */
5311
5312 tree
5313 build_block (tree vars, tree subblocks, tree supercontext, tree chain)
5314 {
5315 tree block = make_node (BLOCK);
5316
5317 BLOCK_VARS (block) = vars;
5318 BLOCK_SUBBLOCKS (block) = subblocks;
5319 BLOCK_SUPERCONTEXT (block) = supercontext;
5320 BLOCK_CHAIN (block) = chain;
5321 return block;
5322 }
5323
5324 \f
5325 /* Like SET_EXPR_LOCATION, but make sure the tree can have a location.
5326
5327 LOC is the location to use in tree T. */
5328
5329 void
5330 protected_set_expr_location (tree t, location_t loc)
5331 {
5332 if (CAN_HAVE_LOCATION_P (t))
5333 SET_EXPR_LOCATION (t, loc);
5334 else if (t && TREE_CODE (t) == STATEMENT_LIST)
5335 {
5336 t = expr_single (t);
5337 if (t && CAN_HAVE_LOCATION_P (t))
5338 SET_EXPR_LOCATION (t, loc);
5339 }
5340 }
5341
5342 /* Like PROTECTED_SET_EXPR_LOCATION, but only do that if T has
5343 UNKNOWN_LOCATION. */
5344
5345 void
5346 protected_set_expr_location_if_unset (tree t, location_t loc)
5347 {
5348 t = expr_single (t);
5349 if (t && !EXPR_HAS_LOCATION (t))
5350 protected_set_expr_location (t, loc);
5351 }
5352 \f
5353 /* Set the type qualifiers for TYPE to TYPE_QUALS, which is a bitmask
5354 of the various TYPE_QUAL values. */
5355
5356 static void
5357 set_type_quals (tree type, int type_quals)
5358 {
5359 TYPE_READONLY (type) = (type_quals & TYPE_QUAL_CONST) != 0;
5360 TYPE_VOLATILE (type) = (type_quals & TYPE_QUAL_VOLATILE) != 0;
5361 TYPE_RESTRICT (type) = (type_quals & TYPE_QUAL_RESTRICT) != 0;
5362 TYPE_ATOMIC (type) = (type_quals & TYPE_QUAL_ATOMIC) != 0;
5363 TYPE_ADDR_SPACE (type) = DECODE_QUAL_ADDR_SPACE (type_quals);
5364 }
5365
5366 /* Returns true iff CAND and BASE have equivalent language-specific
5367 qualifiers. */
5368
5369 bool
5370 check_lang_type (const_tree cand, const_tree base)
5371 {
5372 if (lang_hooks.types.type_hash_eq == NULL)
5373 return true;
5374 /* type_hash_eq currently only applies to these types. */
5375 if (TREE_CODE (cand) != FUNCTION_TYPE
5376 && TREE_CODE (cand) != METHOD_TYPE)
5377 return true;
5378 return lang_hooks.types.type_hash_eq (cand, base);
5379 }
5380
5381 /* This function checks to see if TYPE matches the size one of the built-in
5382 atomic types, and returns that core atomic type. */
5383
5384 static tree
5385 find_atomic_core_type (const_tree type)
5386 {
5387 tree base_atomic_type;
5388
5389 /* Only handle complete types. */
5390 if (!tree_fits_uhwi_p (TYPE_SIZE (type)))
5391 return NULL_TREE;
5392
5393 switch (tree_to_uhwi (TYPE_SIZE (type)))
5394 {
5395 case 8:
5396 base_atomic_type = atomicQI_type_node;
5397 break;
5398
5399 case 16:
5400 base_atomic_type = atomicHI_type_node;
5401 break;
5402
5403 case 32:
5404 base_atomic_type = atomicSI_type_node;
5405 break;
5406
5407 case 64:
5408 base_atomic_type = atomicDI_type_node;
5409 break;
5410
5411 case 128:
5412 base_atomic_type = atomicTI_type_node;
5413 break;
5414
5415 default:
5416 base_atomic_type = NULL_TREE;
5417 }
5418
5419 return base_atomic_type;
5420 }
5421
5422 /* Returns true iff unqualified CAND and BASE are equivalent. */
5423
5424 bool
5425 check_base_type (const_tree cand, const_tree base)
5426 {
5427 if (TYPE_NAME (cand) != TYPE_NAME (base)
5428 /* Apparently this is needed for Objective-C. */
5429 || TYPE_CONTEXT (cand) != TYPE_CONTEXT (base)
5430 || !attribute_list_equal (TYPE_ATTRIBUTES (cand),
5431 TYPE_ATTRIBUTES (base)))
5432 return false;
5433 /* Check alignment. */
5434 if (TYPE_ALIGN (cand) == TYPE_ALIGN (base)
5435 && TYPE_USER_ALIGN (cand) == TYPE_USER_ALIGN (base))
5436 return true;
5437 /* Atomic types increase minimal alignment. We must to do so as well
5438 or we get duplicated canonical types. See PR88686. */
5439 if ((TYPE_QUALS (cand) & TYPE_QUAL_ATOMIC))
5440 {
5441 /* See if this object can map to a basic atomic type. */
5442 tree atomic_type = find_atomic_core_type (cand);
5443 if (atomic_type && TYPE_ALIGN (atomic_type) == TYPE_ALIGN (cand))
5444 return true;
5445 }
5446 return false;
5447 }
5448
5449 /* Returns true iff CAND is equivalent to BASE with TYPE_QUALS. */
5450
5451 bool
5452 check_qualified_type (const_tree cand, const_tree base, int type_quals)
5453 {
5454 return (TYPE_QUALS (cand) == type_quals
5455 && check_base_type (cand, base)
5456 && check_lang_type (cand, base));
5457 }
5458
5459 /* Returns true iff CAND is equivalent to BASE with ALIGN. */
5460
5461 static bool
5462 check_aligned_type (const_tree cand, const_tree base, unsigned int align)
5463 {
5464 return (TYPE_QUALS (cand) == TYPE_QUALS (base)
5465 && TYPE_NAME (cand) == TYPE_NAME (base)
5466 /* Apparently this is needed for Objective-C. */
5467 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
5468 /* Check alignment. */
5469 && TYPE_ALIGN (cand) == align
5470 /* Check this is a user-aligned type as build_aligned_type
5471 would create. */
5472 && TYPE_USER_ALIGN (cand)
5473 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
5474 TYPE_ATTRIBUTES (base))
5475 && check_lang_type (cand, base));
5476 }
5477
5478 /* Return a version of the TYPE, qualified as indicated by the
5479 TYPE_QUALS, if one exists. If no qualified version exists yet,
5480 return NULL_TREE. */
5481
5482 tree
5483 get_qualified_type (tree type, int type_quals)
5484 {
5485 if (TYPE_QUALS (type) == type_quals)
5486 return type;
5487
5488 tree mv = TYPE_MAIN_VARIANT (type);
5489 if (check_qualified_type (mv, type, type_quals))
5490 return mv;
5491
5492 /* Search the chain of variants to see if there is already one there just
5493 like the one we need to have. If so, use that existing one. We must
5494 preserve the TYPE_NAME, since there is code that depends on this. */
5495 for (tree *tp = &TYPE_NEXT_VARIANT (mv); *tp; tp = &TYPE_NEXT_VARIANT (*tp))
5496 if (check_qualified_type (*tp, type, type_quals))
5497 {
5498 /* Put the found variant at the head of the variant list so
5499 frequently searched variants get found faster. The C++ FE
5500 benefits greatly from this. */
5501 tree t = *tp;
5502 *tp = TYPE_NEXT_VARIANT (t);
5503 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (mv);
5504 TYPE_NEXT_VARIANT (mv) = t;
5505 return t;
5506 }
5507
5508 return NULL_TREE;
5509 }
5510
5511 /* Like get_qualified_type, but creates the type if it does not
5512 exist. This function never returns NULL_TREE. */
5513
5514 tree
5515 build_qualified_type (tree type, int type_quals MEM_STAT_DECL)
5516 {
5517 tree t;
5518
5519 /* See if we already have the appropriate qualified variant. */
5520 t = get_qualified_type (type, type_quals);
5521
5522 /* If not, build it. */
5523 if (!t)
5524 {
5525 t = build_variant_type_copy (type PASS_MEM_STAT);
5526 set_type_quals (t, type_quals);
5527
5528 if (((type_quals & TYPE_QUAL_ATOMIC) == TYPE_QUAL_ATOMIC))
5529 {
5530 /* See if this object can map to a basic atomic type. */
5531 tree atomic_type = find_atomic_core_type (type);
5532 if (atomic_type)
5533 {
5534 /* Ensure the alignment of this type is compatible with
5535 the required alignment of the atomic type. */
5536 if (TYPE_ALIGN (atomic_type) > TYPE_ALIGN (t))
5537 SET_TYPE_ALIGN (t, TYPE_ALIGN (atomic_type));
5538 }
5539 }
5540
5541 if (TYPE_STRUCTURAL_EQUALITY_P (type))
5542 /* Propagate structural equality. */
5543 SET_TYPE_STRUCTURAL_EQUALITY (t);
5544 else if (TYPE_CANONICAL (type) != type)
5545 /* Build the underlying canonical type, since it is different
5546 from TYPE. */
5547 {
5548 tree c = build_qualified_type (TYPE_CANONICAL (type), type_quals);
5549 TYPE_CANONICAL (t) = TYPE_CANONICAL (c);
5550 }
5551 else
5552 /* T is its own canonical type. */
5553 TYPE_CANONICAL (t) = t;
5554
5555 }
5556
5557 return t;
5558 }
5559
5560 /* Create a variant of type T with alignment ALIGN. */
5561
5562 tree
5563 build_aligned_type (tree type, unsigned int align)
5564 {
5565 tree t;
5566
5567 if (TYPE_PACKED (type)
5568 || TYPE_ALIGN (type) == align)
5569 return type;
5570
5571 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
5572 if (check_aligned_type (t, type, align))
5573 return t;
5574
5575 t = build_variant_type_copy (type);
5576 SET_TYPE_ALIGN (t, align);
5577 TYPE_USER_ALIGN (t) = 1;
5578
5579 return t;
5580 }
5581
5582 /* Create a new distinct copy of TYPE. The new type is made its own
5583 MAIN_VARIANT. If TYPE requires structural equality checks, the
5584 resulting type requires structural equality checks; otherwise, its
5585 TYPE_CANONICAL points to itself. */
5586
5587 tree
5588 build_distinct_type_copy (tree type MEM_STAT_DECL)
5589 {
5590 tree t = copy_node (type PASS_MEM_STAT);
5591
5592 TYPE_POINTER_TO (t) = 0;
5593 TYPE_REFERENCE_TO (t) = 0;
5594
5595 /* Set the canonical type either to a new equivalence class, or
5596 propagate the need for structural equality checks. */
5597 if (TYPE_STRUCTURAL_EQUALITY_P (type))
5598 SET_TYPE_STRUCTURAL_EQUALITY (t);
5599 else
5600 TYPE_CANONICAL (t) = t;
5601
5602 /* Make it its own variant. */
5603 TYPE_MAIN_VARIANT (t) = t;
5604 TYPE_NEXT_VARIANT (t) = 0;
5605
5606 /* Note that it is now possible for TYPE_MIN_VALUE to be a value
5607 whose TREE_TYPE is not t. This can also happen in the Ada
5608 frontend when using subtypes. */
5609
5610 return t;
5611 }
5612
5613 /* Create a new variant of TYPE, equivalent but distinct. This is so
5614 the caller can modify it. TYPE_CANONICAL for the return type will
5615 be equivalent to TYPE_CANONICAL of TYPE, indicating that the types
5616 are considered equal by the language itself (or that both types
5617 require structural equality checks). */
5618
5619 tree
5620 build_variant_type_copy (tree type MEM_STAT_DECL)
5621 {
5622 tree t, m = TYPE_MAIN_VARIANT (type);
5623
5624 t = build_distinct_type_copy (type PASS_MEM_STAT);
5625
5626 /* Since we're building a variant, assume that it is a non-semantic
5627 variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */
5628 TYPE_CANONICAL (t) = TYPE_CANONICAL (type);
5629 /* Type variants have no alias set defined. */
5630 TYPE_ALIAS_SET (t) = -1;
5631
5632 /* Add the new type to the chain of variants of TYPE. */
5633 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (m);
5634 TYPE_NEXT_VARIANT (m) = t;
5635 TYPE_MAIN_VARIANT (t) = m;
5636
5637 return t;
5638 }
5639 \f
5640 /* Return true if the from tree in both tree maps are equal. */
5641
5642 int
5643 tree_map_base_eq (const void *va, const void *vb)
5644 {
5645 const struct tree_map_base *const a = (const struct tree_map_base *) va,
5646 *const b = (const struct tree_map_base *) vb;
5647 return (a->from == b->from);
5648 }
5649
5650 /* Hash a from tree in a tree_base_map. */
5651
5652 unsigned int
5653 tree_map_base_hash (const void *item)
5654 {
5655 return htab_hash_pointer (((const struct tree_map_base *)item)->from);
5656 }
5657
5658 /* Return true if this tree map structure is marked for garbage collection
5659 purposes. We simply return true if the from tree is marked, so that this
5660 structure goes away when the from tree goes away. */
5661
5662 int
5663 tree_map_base_marked_p (const void *p)
5664 {
5665 return ggc_marked_p (((const struct tree_map_base *) p)->from);
5666 }
5667
5668 /* Hash a from tree in a tree_map. */
5669
5670 unsigned int
5671 tree_map_hash (const void *item)
5672 {
5673 return (((const struct tree_map *) item)->hash);
5674 }
5675
5676 /* Hash a from tree in a tree_decl_map. */
5677
5678 unsigned int
5679 tree_decl_map_hash (const void *item)
5680 {
5681 return DECL_UID (((const struct tree_decl_map *) item)->base.from);
5682 }
5683
5684 /* Return the initialization priority for DECL. */
5685
5686 priority_type
5687 decl_init_priority_lookup (tree decl)
5688 {
5689 symtab_node *snode = symtab_node::get (decl);
5690
5691 if (!snode)
5692 return DEFAULT_INIT_PRIORITY;
5693 return
5694 snode->get_init_priority ();
5695 }
5696
5697 /* Return the finalization priority for DECL. */
5698
5699 priority_type
5700 decl_fini_priority_lookup (tree decl)
5701 {
5702 cgraph_node *node = cgraph_node::get (decl);
5703
5704 if (!node)
5705 return DEFAULT_INIT_PRIORITY;
5706 return
5707 node->get_fini_priority ();
5708 }
5709
5710 /* Set the initialization priority for DECL to PRIORITY. */
5711
5712 void
5713 decl_init_priority_insert (tree decl, priority_type priority)
5714 {
5715 struct symtab_node *snode;
5716
5717 if (priority == DEFAULT_INIT_PRIORITY)
5718 {
5719 snode = symtab_node::get (decl);
5720 if (!snode)
5721 return;
5722 }
5723 else if (VAR_P (decl))
5724 snode = varpool_node::get_create (decl);
5725 else
5726 snode = cgraph_node::get_create (decl);
5727 snode->set_init_priority (priority);
5728 }
5729
5730 /* Set the finalization priority for DECL to PRIORITY. */
5731
5732 void
5733 decl_fini_priority_insert (tree decl, priority_type priority)
5734 {
5735 struct cgraph_node *node;
5736
5737 if (priority == DEFAULT_INIT_PRIORITY)
5738 {
5739 node = cgraph_node::get (decl);
5740 if (!node)
5741 return;
5742 }
5743 else
5744 node = cgraph_node::get_create (decl);
5745 node->set_fini_priority (priority);
5746 }
5747
5748 /* Print out the statistics for the DECL_DEBUG_EXPR hash table. */
5749
5750 static void
5751 print_debug_expr_statistics (void)
5752 {
5753 fprintf (stderr, "DECL_DEBUG_EXPR hash: size %ld, %ld elements, %f collisions\n",
5754 (long) debug_expr_for_decl->size (),
5755 (long) debug_expr_for_decl->elements (),
5756 debug_expr_for_decl->collisions ());
5757 }
5758
5759 /* Print out the statistics for the DECL_VALUE_EXPR hash table. */
5760
5761 static void
5762 print_value_expr_statistics (void)
5763 {
5764 fprintf (stderr, "DECL_VALUE_EXPR hash: size %ld, %ld elements, %f collisions\n",
5765 (long) value_expr_for_decl->size (),
5766 (long) value_expr_for_decl->elements (),
5767 value_expr_for_decl->collisions ());
5768 }
5769
5770 /* Lookup a debug expression for FROM, and return it if we find one. */
5771
5772 tree
5773 decl_debug_expr_lookup (tree from)
5774 {
5775 struct tree_decl_map *h, in;
5776 in.base.from = from;
5777
5778 h = debug_expr_for_decl->find_with_hash (&in, DECL_UID (from));
5779 if (h)
5780 return h->to;
5781 return NULL_TREE;
5782 }
5783
5784 /* Insert a mapping FROM->TO in the debug expression hashtable. */
5785
5786 void
5787 decl_debug_expr_insert (tree from, tree to)
5788 {
5789 struct tree_decl_map *h;
5790
5791 h = ggc_alloc<tree_decl_map> ();
5792 h->base.from = from;
5793 h->to = to;
5794 *debug_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
5795 }
5796
5797 /* Lookup a value expression for FROM, and return it if we find one. */
5798
5799 tree
5800 decl_value_expr_lookup (tree from)
5801 {
5802 struct tree_decl_map *h, in;
5803 in.base.from = from;
5804
5805 h = value_expr_for_decl->find_with_hash (&in, DECL_UID (from));
5806 if (h)
5807 return h->to;
5808 return NULL_TREE;
5809 }
5810
5811 /* Insert a mapping FROM->TO in the value expression hashtable. */
5812
5813 void
5814 decl_value_expr_insert (tree from, tree to)
5815 {
5816 struct tree_decl_map *h;
5817
5818 h = ggc_alloc<tree_decl_map> ();
5819 h->base.from = from;
5820 h->to = to;
5821 *value_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
5822 }
5823
5824 /* Lookup a vector of debug arguments for FROM, and return it if we
5825 find one. */
5826
5827 vec<tree, va_gc> **
5828 decl_debug_args_lookup (tree from)
5829 {
5830 struct tree_vec_map *h, in;
5831
5832 if (!DECL_HAS_DEBUG_ARGS_P (from))
5833 return NULL;
5834 gcc_checking_assert (debug_args_for_decl != NULL);
5835 in.base.from = from;
5836 h = debug_args_for_decl->find_with_hash (&in, DECL_UID (from));
5837 if (h)
5838 return &h->to;
5839 return NULL;
5840 }
5841
5842 /* Insert a mapping FROM->empty vector of debug arguments in the value
5843 expression hashtable. */
5844
5845 vec<tree, va_gc> **
5846 decl_debug_args_insert (tree from)
5847 {
5848 struct tree_vec_map *h;
5849 tree_vec_map **loc;
5850
5851 if (DECL_HAS_DEBUG_ARGS_P (from))
5852 return decl_debug_args_lookup (from);
5853 if (debug_args_for_decl == NULL)
5854 debug_args_for_decl = hash_table<tree_vec_map_cache_hasher>::create_ggc (64);
5855 h = ggc_alloc<tree_vec_map> ();
5856 h->base.from = from;
5857 h->to = NULL;
5858 loc = debug_args_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT);
5859 *loc = h;
5860 DECL_HAS_DEBUG_ARGS_P (from) = 1;
5861 return &h->to;
5862 }
5863
5864 /* Hashing of types so that we don't make duplicates.
5865 The entry point is `type_hash_canon'. */
5866
5867 /* Generate the default hash code for TYPE. This is designed for
5868 speed, rather than maximum entropy. */
5869
5870 hashval_t
5871 type_hash_canon_hash (tree type)
5872 {
5873 inchash::hash hstate;
5874
5875 hstate.add_int (TREE_CODE (type));
5876
5877 if (TREE_TYPE (type))
5878 hstate.add_object (TYPE_HASH (TREE_TYPE (type)));
5879
5880 for (tree t = TYPE_ATTRIBUTES (type); t; t = TREE_CHAIN (t))
5881 /* Just the identifier is adequate to distinguish. */
5882 hstate.add_object (IDENTIFIER_HASH_VALUE (get_attribute_name (t)));
5883
5884 switch (TREE_CODE (type))
5885 {
5886 case METHOD_TYPE:
5887 hstate.add_object (TYPE_HASH (TYPE_METHOD_BASETYPE (type)));
5888 /* FALLTHROUGH. */
5889 case FUNCTION_TYPE:
5890 for (tree t = TYPE_ARG_TYPES (type); t; t = TREE_CHAIN (t))
5891 if (TREE_VALUE (t) != error_mark_node)
5892 hstate.add_object (TYPE_HASH (TREE_VALUE (t)));
5893 break;
5894
5895 case OFFSET_TYPE:
5896 hstate.add_object (TYPE_HASH (TYPE_OFFSET_BASETYPE (type)));
5897 break;
5898
5899 case ARRAY_TYPE:
5900 {
5901 if (TYPE_DOMAIN (type))
5902 hstate.add_object (TYPE_HASH (TYPE_DOMAIN (type)));
5903 if (!AGGREGATE_TYPE_P (TREE_TYPE (type)))
5904 {
5905 unsigned typeless = TYPE_TYPELESS_STORAGE (type);
5906 hstate.add_object (typeless);
5907 }
5908 }
5909 break;
5910
5911 case INTEGER_TYPE:
5912 {
5913 tree t = TYPE_MAX_VALUE (type);
5914 if (!t)
5915 t = TYPE_MIN_VALUE (type);
5916 for (int i = 0; i < TREE_INT_CST_NUNITS (t); i++)
5917 hstate.add_object (TREE_INT_CST_ELT (t, i));
5918 break;
5919 }
5920
5921 case REAL_TYPE:
5922 case FIXED_POINT_TYPE:
5923 {
5924 unsigned prec = TYPE_PRECISION (type);
5925 hstate.add_object (prec);
5926 break;
5927 }
5928
5929 case VECTOR_TYPE:
5930 hstate.add_poly_int (TYPE_VECTOR_SUBPARTS (type));
5931 break;
5932
5933 default:
5934 break;
5935 }
5936
5937 return hstate.end ();
5938 }
5939
5940 /* These are the Hashtable callback functions. */
5941
5942 /* Returns true iff the types are equivalent. */
5943
5944 bool
5945 type_cache_hasher::equal (type_hash *a, type_hash *b)
5946 {
5947 /* First test the things that are the same for all types. */
5948 if (a->hash != b->hash
5949 || TREE_CODE (a->type) != TREE_CODE (b->type)
5950 || TREE_TYPE (a->type) != TREE_TYPE (b->type)
5951 || !attribute_list_equal (TYPE_ATTRIBUTES (a->type),
5952 TYPE_ATTRIBUTES (b->type))
5953 || (TREE_CODE (a->type) != COMPLEX_TYPE
5954 && TYPE_NAME (a->type) != TYPE_NAME (b->type)))
5955 return 0;
5956
5957 /* Be careful about comparing arrays before and after the element type
5958 has been completed; don't compare TYPE_ALIGN unless both types are
5959 complete. */
5960 if (COMPLETE_TYPE_P (a->type) && COMPLETE_TYPE_P (b->type)
5961 && (TYPE_ALIGN (a->type) != TYPE_ALIGN (b->type)
5962 || TYPE_MODE (a->type) != TYPE_MODE (b->type)))
5963 return 0;
5964
5965 switch (TREE_CODE (a->type))
5966 {
5967 case VOID_TYPE:
5968 case OPAQUE_TYPE:
5969 case COMPLEX_TYPE:
5970 case POINTER_TYPE:
5971 case REFERENCE_TYPE:
5972 case NULLPTR_TYPE:
5973 return 1;
5974
5975 case VECTOR_TYPE:
5976 return known_eq (TYPE_VECTOR_SUBPARTS (a->type),
5977 TYPE_VECTOR_SUBPARTS (b->type));
5978
5979 case ENUMERAL_TYPE:
5980 if (TYPE_VALUES (a->type) != TYPE_VALUES (b->type)
5981 && !(TYPE_VALUES (a->type)
5982 && TREE_CODE (TYPE_VALUES (a->type)) == TREE_LIST
5983 && TYPE_VALUES (b->type)
5984 && TREE_CODE (TYPE_VALUES (b->type)) == TREE_LIST
5985 && type_list_equal (TYPE_VALUES (a->type),
5986 TYPE_VALUES (b->type))))
5987 return 0;
5988
5989 /* fall through */
5990
5991 case INTEGER_TYPE:
5992 case REAL_TYPE:
5993 case BOOLEAN_TYPE:
5994 if (TYPE_PRECISION (a->type) != TYPE_PRECISION (b->type))
5995 return false;
5996 return ((TYPE_MAX_VALUE (a->type) == TYPE_MAX_VALUE (b->type)
5997 || tree_int_cst_equal (TYPE_MAX_VALUE (a->type),
5998 TYPE_MAX_VALUE (b->type)))
5999 && (TYPE_MIN_VALUE (a->type) == TYPE_MIN_VALUE (b->type)
6000 || tree_int_cst_equal (TYPE_MIN_VALUE (a->type),
6001 TYPE_MIN_VALUE (b->type))));
6002
6003 case FIXED_POINT_TYPE:
6004 return TYPE_SATURATING (a->type) == TYPE_SATURATING (b->type);
6005
6006 case OFFSET_TYPE:
6007 return TYPE_OFFSET_BASETYPE (a->type) == TYPE_OFFSET_BASETYPE (b->type);
6008
6009 case METHOD_TYPE:
6010 if (TYPE_METHOD_BASETYPE (a->type) == TYPE_METHOD_BASETYPE (b->type)
6011 && (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6012 || (TYPE_ARG_TYPES (a->type)
6013 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6014 && TYPE_ARG_TYPES (b->type)
6015 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6016 && type_list_equal (TYPE_ARG_TYPES (a->type),
6017 TYPE_ARG_TYPES (b->type)))))
6018 break;
6019 return 0;
6020 case ARRAY_TYPE:
6021 /* Don't compare TYPE_TYPELESS_STORAGE flag on aggregates,
6022 where the flag should be inherited from the element type
6023 and can change after ARRAY_TYPEs are created; on non-aggregates
6024 compare it and hash it, scalars will never have that flag set
6025 and we need to differentiate between arrays created by different
6026 front-ends or middle-end created arrays. */
6027 return (TYPE_DOMAIN (a->type) == TYPE_DOMAIN (b->type)
6028 && (AGGREGATE_TYPE_P (TREE_TYPE (a->type))
6029 || (TYPE_TYPELESS_STORAGE (a->type)
6030 == TYPE_TYPELESS_STORAGE (b->type))));
6031
6032 case RECORD_TYPE:
6033 case UNION_TYPE:
6034 case QUAL_UNION_TYPE:
6035 return (TYPE_FIELDS (a->type) == TYPE_FIELDS (b->type)
6036 || (TYPE_FIELDS (a->type)
6037 && TREE_CODE (TYPE_FIELDS (a->type)) == TREE_LIST
6038 && TYPE_FIELDS (b->type)
6039 && TREE_CODE (TYPE_FIELDS (b->type)) == TREE_LIST
6040 && type_list_equal (TYPE_FIELDS (a->type),
6041 TYPE_FIELDS (b->type))));
6042
6043 case FUNCTION_TYPE:
6044 if (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6045 || (TYPE_ARG_TYPES (a->type)
6046 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6047 && TYPE_ARG_TYPES (b->type)
6048 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6049 && type_list_equal (TYPE_ARG_TYPES (a->type),
6050 TYPE_ARG_TYPES (b->type))))
6051 break;
6052 return 0;
6053
6054 default:
6055 return 0;
6056 }
6057
6058 if (lang_hooks.types.type_hash_eq != NULL)
6059 return lang_hooks.types.type_hash_eq (a->type, b->type);
6060
6061 return 1;
6062 }
6063
6064 /* Given TYPE, and HASHCODE its hash code, return the canonical
6065 object for an identical type if one already exists.
6066 Otherwise, return TYPE, and record it as the canonical object.
6067
6068 To use this function, first create a type of the sort you want.
6069 Then compute its hash code from the fields of the type that
6070 make it different from other similar types.
6071 Then call this function and use the value. */
6072
6073 tree
6074 type_hash_canon (unsigned int hashcode, tree type)
6075 {
6076 type_hash in;
6077 type_hash **loc;
6078
6079 /* The hash table only contains main variants, so ensure that's what we're
6080 being passed. */
6081 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
6082
6083 /* The TYPE_ALIGN field of a type is set by layout_type(), so we
6084 must call that routine before comparing TYPE_ALIGNs. */
6085 layout_type (type);
6086
6087 in.hash = hashcode;
6088 in.type = type;
6089
6090 loc = type_hash_table->find_slot_with_hash (&in, hashcode, INSERT);
6091 if (*loc)
6092 {
6093 tree t1 = ((type_hash *) *loc)->type;
6094 gcc_assert (TYPE_MAIN_VARIANT (t1) == t1
6095 && t1 != type);
6096 if (TYPE_UID (type) + 1 == next_type_uid)
6097 --next_type_uid;
6098 /* Free also min/max values and the cache for integer
6099 types. This can't be done in free_node, as LTO frees
6100 those on its own. */
6101 if (TREE_CODE (type) == INTEGER_TYPE)
6102 {
6103 if (TYPE_MIN_VALUE (type)
6104 && TREE_TYPE (TYPE_MIN_VALUE (type)) == type)
6105 {
6106 /* Zero is always in TYPE_CACHED_VALUES. */
6107 if (! TYPE_UNSIGNED (type))
6108 int_cst_hash_table->remove_elt (TYPE_MIN_VALUE (type));
6109 ggc_free (TYPE_MIN_VALUE (type));
6110 }
6111 if (TYPE_MAX_VALUE (type)
6112 && TREE_TYPE (TYPE_MAX_VALUE (type)) == type)
6113 {
6114 int_cst_hash_table->remove_elt (TYPE_MAX_VALUE (type));
6115 ggc_free (TYPE_MAX_VALUE (type));
6116 }
6117 if (TYPE_CACHED_VALUES_P (type))
6118 ggc_free (TYPE_CACHED_VALUES (type));
6119 }
6120 free_node (type);
6121 return t1;
6122 }
6123 else
6124 {
6125 struct type_hash *h;
6126
6127 h = ggc_alloc<type_hash> ();
6128 h->hash = hashcode;
6129 h->type = type;
6130 *loc = h;
6131
6132 return type;
6133 }
6134 }
6135
6136 static void
6137 print_type_hash_statistics (void)
6138 {
6139 fprintf (stderr, "Type hash: size %ld, %ld elements, %f collisions\n",
6140 (long) type_hash_table->size (),
6141 (long) type_hash_table->elements (),
6142 type_hash_table->collisions ());
6143 }
6144
6145 /* Given two lists of types
6146 (chains of TREE_LIST nodes with types in the TREE_VALUE slots)
6147 return 1 if the lists contain the same types in the same order.
6148 Also, the TREE_PURPOSEs must match. */
6149
6150 bool
6151 type_list_equal (const_tree l1, const_tree l2)
6152 {
6153 const_tree t1, t2;
6154
6155 for (t1 = l1, t2 = l2; t1 && t2; t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
6156 if (TREE_VALUE (t1) != TREE_VALUE (t2)
6157 || (TREE_PURPOSE (t1) != TREE_PURPOSE (t2)
6158 && ! (1 == simple_cst_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2))
6159 && (TREE_TYPE (TREE_PURPOSE (t1))
6160 == TREE_TYPE (TREE_PURPOSE (t2))))))
6161 return false;
6162
6163 return t1 == t2;
6164 }
6165
6166 /* Returns the number of arguments to the FUNCTION_TYPE or METHOD_TYPE
6167 given by TYPE. If the argument list accepts variable arguments,
6168 then this function counts only the ordinary arguments. */
6169
6170 int
6171 type_num_arguments (const_tree fntype)
6172 {
6173 int i = 0;
6174
6175 for (tree t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
6176 /* If the function does not take a variable number of arguments,
6177 the last element in the list will have type `void'. */
6178 if (VOID_TYPE_P (TREE_VALUE (t)))
6179 break;
6180 else
6181 ++i;
6182
6183 return i;
6184 }
6185
6186 /* Return the type of the function TYPE's argument ARGNO if known.
6187 For vararg function's where ARGNO refers to one of the variadic
6188 arguments return null. Otherwise, return a void_type_node for
6189 out-of-bounds ARGNO. */
6190
6191 tree
6192 type_argument_type (const_tree fntype, unsigned argno)
6193 {
6194 /* Treat zero the same as an out-of-bounds argument number. */
6195 if (!argno)
6196 return void_type_node;
6197
6198 function_args_iterator iter;
6199
6200 tree argtype;
6201 unsigned i = 1;
6202 FOREACH_FUNCTION_ARGS (fntype, argtype, iter)
6203 {
6204 /* A vararg function's argument list ends in a null. Otherwise,
6205 an ordinary function's argument list ends with void. Return
6206 null if ARGNO refers to a vararg argument, void_type_node if
6207 it's out of bounds, and the formal argument type otherwise. */
6208 if (!argtype)
6209 break;
6210
6211 if (i == argno || VOID_TYPE_P (argtype))
6212 return argtype;
6213
6214 ++i;
6215 }
6216
6217 return NULL_TREE;
6218 }
6219
6220 /* Nonzero if integer constants T1 and T2
6221 represent the same constant value. */
6222
6223 int
6224 tree_int_cst_equal (const_tree t1, const_tree t2)
6225 {
6226 if (t1 == t2)
6227 return 1;
6228
6229 if (t1 == 0 || t2 == 0)
6230 return 0;
6231
6232 STRIP_ANY_LOCATION_WRAPPER (t1);
6233 STRIP_ANY_LOCATION_WRAPPER (t2);
6234
6235 if (TREE_CODE (t1) == INTEGER_CST
6236 && TREE_CODE (t2) == INTEGER_CST
6237 && wi::to_widest (t1) == wi::to_widest (t2))
6238 return 1;
6239
6240 return 0;
6241 }
6242
6243 /* Return true if T is an INTEGER_CST whose numerical value (extended
6244 according to TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. */
6245
6246 bool
6247 tree_fits_shwi_p (const_tree t)
6248 {
6249 return (t != NULL_TREE
6250 && TREE_CODE (t) == INTEGER_CST
6251 && wi::fits_shwi_p (wi::to_widest (t)));
6252 }
6253
6254 /* Return true if T is an INTEGER_CST or POLY_INT_CST whose numerical
6255 value (extended according to TYPE_UNSIGNED) fits in a poly_int64. */
6256
6257 bool
6258 tree_fits_poly_int64_p (const_tree t)
6259 {
6260 if (t == NULL_TREE)
6261 return false;
6262 if (POLY_INT_CST_P (t))
6263 {
6264 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; i++)
6265 if (!wi::fits_shwi_p (wi::to_wide (POLY_INT_CST_COEFF (t, i))))
6266 return false;
6267 return true;
6268 }
6269 return (TREE_CODE (t) == INTEGER_CST
6270 && wi::fits_shwi_p (wi::to_widest (t)));
6271 }
6272
6273 /* Return true if T is an INTEGER_CST whose numerical value (extended
6274 according to TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. */
6275
6276 bool
6277 tree_fits_uhwi_p (const_tree t)
6278 {
6279 return (t != NULL_TREE
6280 && TREE_CODE (t) == INTEGER_CST
6281 && wi::fits_uhwi_p (wi::to_widest (t)));
6282 }
6283
6284 /* Return true if T is an INTEGER_CST or POLY_INT_CST whose numerical
6285 value (extended according to TYPE_UNSIGNED) fits in a poly_uint64. */
6286
6287 bool
6288 tree_fits_poly_uint64_p (const_tree t)
6289 {
6290 if (t == NULL_TREE)
6291 return false;
6292 if (POLY_INT_CST_P (t))
6293 {
6294 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; i++)
6295 if (!wi::fits_uhwi_p (wi::to_widest (POLY_INT_CST_COEFF (t, i))))
6296 return false;
6297 return true;
6298 }
6299 return (TREE_CODE (t) == INTEGER_CST
6300 && wi::fits_uhwi_p (wi::to_widest (t)));
6301 }
6302
6303 /* T is an INTEGER_CST whose numerical value (extended according to
6304 TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. Return that
6305 HOST_WIDE_INT. */
6306
6307 HOST_WIDE_INT
6308 tree_to_shwi (const_tree t)
6309 {
6310 gcc_assert (tree_fits_shwi_p (t));
6311 return TREE_INT_CST_LOW (t);
6312 }
6313
6314 /* T is an INTEGER_CST whose numerical value (extended according to
6315 TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. Return that
6316 HOST_WIDE_INT. */
6317
6318 unsigned HOST_WIDE_INT
6319 tree_to_uhwi (const_tree t)
6320 {
6321 gcc_assert (tree_fits_uhwi_p (t));
6322 return TREE_INT_CST_LOW (t);
6323 }
6324
6325 /* Return the most significant (sign) bit of T. */
6326
6327 int
6328 tree_int_cst_sign_bit (const_tree t)
6329 {
6330 unsigned bitno = TYPE_PRECISION (TREE_TYPE (t)) - 1;
6331
6332 return wi::extract_uhwi (wi::to_wide (t), bitno, 1);
6333 }
6334
6335 /* Return an indication of the sign of the integer constant T.
6336 The return value is -1 if T < 0, 0 if T == 0, and 1 if T > 0.
6337 Note that -1 will never be returned if T's type is unsigned. */
6338
6339 int
6340 tree_int_cst_sgn (const_tree t)
6341 {
6342 if (wi::to_wide (t) == 0)
6343 return 0;
6344 else if (TYPE_UNSIGNED (TREE_TYPE (t)))
6345 return 1;
6346 else if (wi::neg_p (wi::to_wide (t)))
6347 return -1;
6348 else
6349 return 1;
6350 }
6351
6352 /* Return the minimum number of bits needed to represent VALUE in a
6353 signed or unsigned type, UNSIGNEDP says which. */
6354
6355 unsigned int
6356 tree_int_cst_min_precision (tree value, signop sgn)
6357 {
6358 /* If the value is negative, compute its negative minus 1. The latter
6359 adjustment is because the absolute value of the largest negative value
6360 is one larger than the largest positive value. This is equivalent to
6361 a bit-wise negation, so use that operation instead. */
6362
6363 if (tree_int_cst_sgn (value) < 0)
6364 value = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (value), value);
6365
6366 /* Return the number of bits needed, taking into account the fact
6367 that we need one more bit for a signed than unsigned type.
6368 If value is 0 or -1, the minimum precision is 1 no matter
6369 whether unsignedp is true or false. */
6370
6371 if (integer_zerop (value))
6372 return 1;
6373 else
6374 return tree_floor_log2 (value) + 1 + (sgn == SIGNED ? 1 : 0) ;
6375 }
6376
6377 /* Return truthvalue of whether T1 is the same tree structure as T2.
6378 Return 1 if they are the same.
6379 Return 0 if they are understandably different.
6380 Return -1 if either contains tree structure not understood by
6381 this function. */
6382
6383 int
6384 simple_cst_equal (const_tree t1, const_tree t2)
6385 {
6386 enum tree_code code1, code2;
6387 int cmp;
6388 int i;
6389
6390 if (t1 == t2)
6391 return 1;
6392 if (t1 == 0 || t2 == 0)
6393 return 0;
6394
6395 /* For location wrappers to be the same, they must be at the same
6396 source location (and wrap the same thing). */
6397 if (location_wrapper_p (t1) && location_wrapper_p (t2))
6398 {
6399 if (EXPR_LOCATION (t1) != EXPR_LOCATION (t2))
6400 return 0;
6401 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6402 }
6403
6404 code1 = TREE_CODE (t1);
6405 code2 = TREE_CODE (t2);
6406
6407 if (CONVERT_EXPR_CODE_P (code1) || code1 == NON_LVALUE_EXPR)
6408 {
6409 if (CONVERT_EXPR_CODE_P (code2)
6410 || code2 == NON_LVALUE_EXPR)
6411 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6412 else
6413 return simple_cst_equal (TREE_OPERAND (t1, 0), t2);
6414 }
6415
6416 else if (CONVERT_EXPR_CODE_P (code2)
6417 || code2 == NON_LVALUE_EXPR)
6418 return simple_cst_equal (t1, TREE_OPERAND (t2, 0));
6419
6420 if (code1 != code2)
6421 return 0;
6422
6423 switch (code1)
6424 {
6425 case INTEGER_CST:
6426 return wi::to_widest (t1) == wi::to_widest (t2);
6427
6428 case REAL_CST:
6429 return real_identical (&TREE_REAL_CST (t1), &TREE_REAL_CST (t2));
6430
6431 case FIXED_CST:
6432 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2));
6433
6434 case STRING_CST:
6435 return (TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
6436 && ! memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
6437 TREE_STRING_LENGTH (t1)));
6438
6439 case CONSTRUCTOR:
6440 {
6441 unsigned HOST_WIDE_INT idx;
6442 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (t1);
6443 vec<constructor_elt, va_gc> *v2 = CONSTRUCTOR_ELTS (t2);
6444
6445 if (vec_safe_length (v1) != vec_safe_length (v2))
6446 return false;
6447
6448 for (idx = 0; idx < vec_safe_length (v1); ++idx)
6449 /* ??? Should we handle also fields here? */
6450 if (!simple_cst_equal ((*v1)[idx].value, (*v2)[idx].value))
6451 return false;
6452 return true;
6453 }
6454
6455 case SAVE_EXPR:
6456 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6457
6458 case CALL_EXPR:
6459 cmp = simple_cst_equal (CALL_EXPR_FN (t1), CALL_EXPR_FN (t2));
6460 if (cmp <= 0)
6461 return cmp;
6462 if (call_expr_nargs (t1) != call_expr_nargs (t2))
6463 return 0;
6464 {
6465 const_tree arg1, arg2;
6466 const_call_expr_arg_iterator iter1, iter2;
6467 for (arg1 = first_const_call_expr_arg (t1, &iter1),
6468 arg2 = first_const_call_expr_arg (t2, &iter2);
6469 arg1 && arg2;
6470 arg1 = next_const_call_expr_arg (&iter1),
6471 arg2 = next_const_call_expr_arg (&iter2))
6472 {
6473 cmp = simple_cst_equal (arg1, arg2);
6474 if (cmp <= 0)
6475 return cmp;
6476 }
6477 return arg1 == arg2;
6478 }
6479
6480 case TARGET_EXPR:
6481 /* Special case: if either target is an unallocated VAR_DECL,
6482 it means that it's going to be unified with whatever the
6483 TARGET_EXPR is really supposed to initialize, so treat it
6484 as being equivalent to anything. */
6485 if ((TREE_CODE (TREE_OPERAND (t1, 0)) == VAR_DECL
6486 && DECL_NAME (TREE_OPERAND (t1, 0)) == NULL_TREE
6487 && !DECL_RTL_SET_P (TREE_OPERAND (t1, 0)))
6488 || (TREE_CODE (TREE_OPERAND (t2, 0)) == VAR_DECL
6489 && DECL_NAME (TREE_OPERAND (t2, 0)) == NULL_TREE
6490 && !DECL_RTL_SET_P (TREE_OPERAND (t2, 0))))
6491 cmp = 1;
6492 else
6493 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6494
6495 if (cmp <= 0)
6496 return cmp;
6497
6498 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
6499
6500 case WITH_CLEANUP_EXPR:
6501 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6502 if (cmp <= 0)
6503 return cmp;
6504
6505 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t1, 1));
6506
6507 case COMPONENT_REF:
6508 if (TREE_OPERAND (t1, 1) == TREE_OPERAND (t2, 1))
6509 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6510
6511 return 0;
6512
6513 case VAR_DECL:
6514 case PARM_DECL:
6515 case CONST_DECL:
6516 case FUNCTION_DECL:
6517 return 0;
6518
6519 default:
6520 if (POLY_INT_CST_P (t1))
6521 /* A false return means maybe_ne rather than known_ne. */
6522 return known_eq (poly_widest_int::from (poly_int_cst_value (t1),
6523 TYPE_SIGN (TREE_TYPE (t1))),
6524 poly_widest_int::from (poly_int_cst_value (t2),
6525 TYPE_SIGN (TREE_TYPE (t2))));
6526 break;
6527 }
6528
6529 /* This general rule works for most tree codes. All exceptions should be
6530 handled above. If this is a language-specific tree code, we can't
6531 trust what might be in the operand, so say we don't know
6532 the situation. */
6533 if ((int) code1 >= (int) LAST_AND_UNUSED_TREE_CODE)
6534 return -1;
6535
6536 switch (TREE_CODE_CLASS (code1))
6537 {
6538 case tcc_unary:
6539 case tcc_binary:
6540 case tcc_comparison:
6541 case tcc_expression:
6542 case tcc_reference:
6543 case tcc_statement:
6544 cmp = 1;
6545 for (i = 0; i < TREE_CODE_LENGTH (code1); i++)
6546 {
6547 cmp = simple_cst_equal (TREE_OPERAND (t1, i), TREE_OPERAND (t2, i));
6548 if (cmp <= 0)
6549 return cmp;
6550 }
6551
6552 return cmp;
6553
6554 default:
6555 return -1;
6556 }
6557 }
6558
6559 /* Compare the value of T, an INTEGER_CST, with U, an unsigned integer value.
6560 Return -1, 0, or 1 if the value of T is less than, equal to, or greater
6561 than U, respectively. */
6562
6563 int
6564 compare_tree_int (const_tree t, unsigned HOST_WIDE_INT u)
6565 {
6566 if (tree_int_cst_sgn (t) < 0)
6567 return -1;
6568 else if (!tree_fits_uhwi_p (t))
6569 return 1;
6570 else if (TREE_INT_CST_LOW (t) == u)
6571 return 0;
6572 else if (TREE_INT_CST_LOW (t) < u)
6573 return -1;
6574 else
6575 return 1;
6576 }
6577
6578 /* Return true if SIZE represents a constant size that is in bounds of
6579 what the middle-end and the backend accepts (covering not more than
6580 half of the address-space).
6581 When PERR is non-null, set *PERR on failure to the description of
6582 why SIZE is not valid. */
6583
6584 bool
6585 valid_constant_size_p (const_tree size, cst_size_error *perr /* = NULL */)
6586 {
6587 if (POLY_INT_CST_P (size))
6588 {
6589 if (TREE_OVERFLOW (size))
6590 return false;
6591 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
6592 if (!valid_constant_size_p (POLY_INT_CST_COEFF (size, i)))
6593 return false;
6594 return true;
6595 }
6596
6597 cst_size_error error;
6598 if (!perr)
6599 perr = &error;
6600
6601 if (TREE_CODE (size) != INTEGER_CST)
6602 {
6603 *perr = cst_size_not_constant;
6604 return false;
6605 }
6606
6607 if (TREE_OVERFLOW_P (size))
6608 {
6609 *perr = cst_size_overflow;
6610 return false;
6611 }
6612
6613 if (tree_int_cst_sgn (size) < 0)
6614 {
6615 *perr = cst_size_negative;
6616 return false;
6617 }
6618 if (!tree_fits_uhwi_p (size)
6619 || (wi::to_widest (TYPE_MAX_VALUE (sizetype))
6620 < wi::to_widest (size) * 2))
6621 {
6622 *perr = cst_size_too_big;
6623 return false;
6624 }
6625
6626 return true;
6627 }
6628
6629 /* Return the precision of the type, or for a complex or vector type the
6630 precision of the type of its elements. */
6631
6632 unsigned int
6633 element_precision (const_tree type)
6634 {
6635 if (!TYPE_P (type))
6636 type = TREE_TYPE (type);
6637 enum tree_code code = TREE_CODE (type);
6638 if (code == COMPLEX_TYPE || code == VECTOR_TYPE)
6639 type = TREE_TYPE (type);
6640
6641 return TYPE_PRECISION (type);
6642 }
6643
6644 /* Return true if CODE represents an associative tree code. Otherwise
6645 return false. */
6646 bool
6647 associative_tree_code (enum tree_code code)
6648 {
6649 switch (code)
6650 {
6651 case BIT_IOR_EXPR:
6652 case BIT_AND_EXPR:
6653 case BIT_XOR_EXPR:
6654 case PLUS_EXPR:
6655 case MULT_EXPR:
6656 case MIN_EXPR:
6657 case MAX_EXPR:
6658 return true;
6659
6660 default:
6661 break;
6662 }
6663 return false;
6664 }
6665
6666 /* Return true if CODE represents a commutative tree code. Otherwise
6667 return false. */
6668 bool
6669 commutative_tree_code (enum tree_code code)
6670 {
6671 switch (code)
6672 {
6673 case PLUS_EXPR:
6674 case MULT_EXPR:
6675 case MULT_HIGHPART_EXPR:
6676 case MIN_EXPR:
6677 case MAX_EXPR:
6678 case BIT_IOR_EXPR:
6679 case BIT_XOR_EXPR:
6680 case BIT_AND_EXPR:
6681 case NE_EXPR:
6682 case EQ_EXPR:
6683 case UNORDERED_EXPR:
6684 case ORDERED_EXPR:
6685 case UNEQ_EXPR:
6686 case LTGT_EXPR:
6687 case TRUTH_AND_EXPR:
6688 case TRUTH_XOR_EXPR:
6689 case TRUTH_OR_EXPR:
6690 case WIDEN_MULT_EXPR:
6691 case VEC_WIDEN_MULT_HI_EXPR:
6692 case VEC_WIDEN_MULT_LO_EXPR:
6693 case VEC_WIDEN_MULT_EVEN_EXPR:
6694 case VEC_WIDEN_MULT_ODD_EXPR:
6695 return true;
6696
6697 default:
6698 break;
6699 }
6700 return false;
6701 }
6702
6703 /* Return true if CODE represents a ternary tree code for which the
6704 first two operands are commutative. Otherwise return false. */
6705 bool
6706 commutative_ternary_tree_code (enum tree_code code)
6707 {
6708 switch (code)
6709 {
6710 case WIDEN_MULT_PLUS_EXPR:
6711 case WIDEN_MULT_MINUS_EXPR:
6712 case DOT_PROD_EXPR:
6713 return true;
6714
6715 default:
6716 break;
6717 }
6718 return false;
6719 }
6720
6721 /* Returns true if CODE can overflow. */
6722
6723 bool
6724 operation_can_overflow (enum tree_code code)
6725 {
6726 switch (code)
6727 {
6728 case PLUS_EXPR:
6729 case MINUS_EXPR:
6730 case MULT_EXPR:
6731 case LSHIFT_EXPR:
6732 /* Can overflow in various ways. */
6733 return true;
6734 case TRUNC_DIV_EXPR:
6735 case EXACT_DIV_EXPR:
6736 case FLOOR_DIV_EXPR:
6737 case CEIL_DIV_EXPR:
6738 /* For INT_MIN / -1. */
6739 return true;
6740 case NEGATE_EXPR:
6741 case ABS_EXPR:
6742 /* For -INT_MIN. */
6743 return true;
6744 default:
6745 /* These operators cannot overflow. */
6746 return false;
6747 }
6748 }
6749
6750 /* Returns true if CODE operating on operands of type TYPE doesn't overflow, or
6751 ftrapv doesn't generate trapping insns for CODE. */
6752
6753 bool
6754 operation_no_trapping_overflow (tree type, enum tree_code code)
6755 {
6756 gcc_checking_assert (ANY_INTEGRAL_TYPE_P (type));
6757
6758 /* We don't generate instructions that trap on overflow for complex or vector
6759 types. */
6760 if (!INTEGRAL_TYPE_P (type))
6761 return true;
6762
6763 if (!TYPE_OVERFLOW_TRAPS (type))
6764 return true;
6765
6766 switch (code)
6767 {
6768 case PLUS_EXPR:
6769 case MINUS_EXPR:
6770 case MULT_EXPR:
6771 case NEGATE_EXPR:
6772 case ABS_EXPR:
6773 /* These operators can overflow, and -ftrapv generates trapping code for
6774 these. */
6775 return false;
6776 case TRUNC_DIV_EXPR:
6777 case EXACT_DIV_EXPR:
6778 case FLOOR_DIV_EXPR:
6779 case CEIL_DIV_EXPR:
6780 case LSHIFT_EXPR:
6781 /* These operators can overflow, but -ftrapv does not generate trapping
6782 code for these. */
6783 return true;
6784 default:
6785 /* These operators cannot overflow. */
6786 return true;
6787 }
6788 }
6789
6790 /* Constructors for pointer, array and function types.
6791 (RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are
6792 constructed by language-dependent code, not here.) */
6793
6794 /* Construct, lay out and return the type of pointers to TO_TYPE with
6795 mode MODE. If CAN_ALIAS_ALL is TRUE, indicate this type can
6796 reference all of memory. If such a type has already been
6797 constructed, reuse it. */
6798
6799 tree
6800 build_pointer_type_for_mode (tree to_type, machine_mode mode,
6801 bool can_alias_all)
6802 {
6803 tree t;
6804 bool could_alias = can_alias_all;
6805
6806 if (to_type == error_mark_node)
6807 return error_mark_node;
6808
6809 /* If the pointed-to type has the may_alias attribute set, force
6810 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
6811 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
6812 can_alias_all = true;
6813
6814 /* In some cases, languages will have things that aren't a POINTER_TYPE
6815 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_POINTER_TO.
6816 In that case, return that type without regard to the rest of our
6817 operands.
6818
6819 ??? This is a kludge, but consistent with the way this function has
6820 always operated and there doesn't seem to be a good way to avoid this
6821 at the moment. */
6822 if (TYPE_POINTER_TO (to_type) != 0
6823 && TREE_CODE (TYPE_POINTER_TO (to_type)) != POINTER_TYPE)
6824 return TYPE_POINTER_TO (to_type);
6825
6826 /* First, if we already have a type for pointers to TO_TYPE and it's
6827 the proper mode, use it. */
6828 for (t = TYPE_POINTER_TO (to_type); t; t = TYPE_NEXT_PTR_TO (t))
6829 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
6830 return t;
6831
6832 t = make_node (POINTER_TYPE);
6833
6834 TREE_TYPE (t) = to_type;
6835 SET_TYPE_MODE (t, mode);
6836 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
6837 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (to_type);
6838 TYPE_POINTER_TO (to_type) = t;
6839
6840 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */
6841 if (TYPE_STRUCTURAL_EQUALITY_P (to_type) || in_lto_p)
6842 SET_TYPE_STRUCTURAL_EQUALITY (t);
6843 else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
6844 TYPE_CANONICAL (t)
6845 = build_pointer_type_for_mode (TYPE_CANONICAL (to_type),
6846 mode, false);
6847
6848 /* Lay out the type. This function has many callers that are concerned
6849 with expression-construction, and this simplifies them all. */
6850 layout_type (t);
6851
6852 return t;
6853 }
6854
6855 /* By default build pointers in ptr_mode. */
6856
6857 tree
6858 build_pointer_type (tree to_type)
6859 {
6860 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
6861 : TYPE_ADDR_SPACE (to_type);
6862 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
6863 return build_pointer_type_for_mode (to_type, pointer_mode, false);
6864 }
6865
6866 /* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE. */
6867
6868 tree
6869 build_reference_type_for_mode (tree to_type, machine_mode mode,
6870 bool can_alias_all)
6871 {
6872 tree t;
6873 bool could_alias = can_alias_all;
6874
6875 if (to_type == error_mark_node)
6876 return error_mark_node;
6877
6878 /* If the pointed-to type has the may_alias attribute set, force
6879 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
6880 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
6881 can_alias_all = true;
6882
6883 /* In some cases, languages will have things that aren't a REFERENCE_TYPE
6884 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_REFERENCE_TO.
6885 In that case, return that type without regard to the rest of our
6886 operands.
6887
6888 ??? This is a kludge, but consistent with the way this function has
6889 always operated and there doesn't seem to be a good way to avoid this
6890 at the moment. */
6891 if (TYPE_REFERENCE_TO (to_type) != 0
6892 && TREE_CODE (TYPE_REFERENCE_TO (to_type)) != REFERENCE_TYPE)
6893 return TYPE_REFERENCE_TO (to_type);
6894
6895 /* First, if we already have a type for pointers to TO_TYPE and it's
6896 the proper mode, use it. */
6897 for (t = TYPE_REFERENCE_TO (to_type); t; t = TYPE_NEXT_REF_TO (t))
6898 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
6899 return t;
6900
6901 t = make_node (REFERENCE_TYPE);
6902
6903 TREE_TYPE (t) = to_type;
6904 SET_TYPE_MODE (t, mode);
6905 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
6906 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (to_type);
6907 TYPE_REFERENCE_TO (to_type) = t;
6908
6909 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */
6910 if (TYPE_STRUCTURAL_EQUALITY_P (to_type) || in_lto_p)
6911 SET_TYPE_STRUCTURAL_EQUALITY (t);
6912 else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
6913 TYPE_CANONICAL (t)
6914 = build_reference_type_for_mode (TYPE_CANONICAL (to_type),
6915 mode, false);
6916
6917 layout_type (t);
6918
6919 return t;
6920 }
6921
6922
6923 /* Build the node for the type of references-to-TO_TYPE by default
6924 in ptr_mode. */
6925
6926 tree
6927 build_reference_type (tree to_type)
6928 {
6929 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
6930 : TYPE_ADDR_SPACE (to_type);
6931 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
6932 return build_reference_type_for_mode (to_type, pointer_mode, false);
6933 }
6934
6935 #define MAX_INT_CACHED_PREC \
6936 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
6937 static GTY(()) tree nonstandard_integer_type_cache[2 * MAX_INT_CACHED_PREC + 2];
6938
6939 /* Builds a signed or unsigned integer type of precision PRECISION.
6940 Used for C bitfields whose precision does not match that of
6941 built-in target types. */
6942 tree
6943 build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision,
6944 int unsignedp)
6945 {
6946 tree itype, ret;
6947
6948 if (unsignedp)
6949 unsignedp = MAX_INT_CACHED_PREC + 1;
6950
6951 if (precision <= MAX_INT_CACHED_PREC)
6952 {
6953 itype = nonstandard_integer_type_cache[precision + unsignedp];
6954 if (itype)
6955 return itype;
6956 }
6957
6958 itype = make_node (INTEGER_TYPE);
6959 TYPE_PRECISION (itype) = precision;
6960
6961 if (unsignedp)
6962 fixup_unsigned_type (itype);
6963 else
6964 fixup_signed_type (itype);
6965
6966 inchash::hash hstate;
6967 inchash::add_expr (TYPE_MAX_VALUE (itype), hstate);
6968 ret = type_hash_canon (hstate.end (), itype);
6969 if (precision <= MAX_INT_CACHED_PREC)
6970 nonstandard_integer_type_cache[precision + unsignedp] = ret;
6971
6972 return ret;
6973 }
6974
6975 #define MAX_BOOL_CACHED_PREC \
6976 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
6977 static GTY(()) tree nonstandard_boolean_type_cache[MAX_BOOL_CACHED_PREC + 1];
6978
6979 /* Builds a boolean type of precision PRECISION.
6980 Used for boolean vectors to choose proper vector element size. */
6981 tree
6982 build_nonstandard_boolean_type (unsigned HOST_WIDE_INT precision)
6983 {
6984 tree type;
6985
6986 if (precision <= MAX_BOOL_CACHED_PREC)
6987 {
6988 type = nonstandard_boolean_type_cache[precision];
6989 if (type)
6990 return type;
6991 }
6992
6993 type = make_node (BOOLEAN_TYPE);
6994 TYPE_PRECISION (type) = precision;
6995 fixup_signed_type (type);
6996
6997 if (precision <= MAX_INT_CACHED_PREC)
6998 nonstandard_boolean_type_cache[precision] = type;
6999
7000 return type;
7001 }
7002
7003 /* Create a range of some discrete type TYPE (an INTEGER_TYPE, ENUMERAL_TYPE
7004 or BOOLEAN_TYPE) with low bound LOWVAL and high bound HIGHVAL. If SHARED
7005 is true, reuse such a type that has already been constructed. */
7006
7007 static tree
7008 build_range_type_1 (tree type, tree lowval, tree highval, bool shared)
7009 {
7010 tree itype = make_node (INTEGER_TYPE);
7011
7012 TREE_TYPE (itype) = type;
7013
7014 TYPE_MIN_VALUE (itype) = fold_convert (type, lowval);
7015 TYPE_MAX_VALUE (itype) = highval ? fold_convert (type, highval) : NULL;
7016
7017 TYPE_PRECISION (itype) = TYPE_PRECISION (type);
7018 SET_TYPE_MODE (itype, TYPE_MODE (type));
7019 TYPE_SIZE (itype) = TYPE_SIZE (type);
7020 TYPE_SIZE_UNIT (itype) = TYPE_SIZE_UNIT (type);
7021 SET_TYPE_ALIGN (itype, TYPE_ALIGN (type));
7022 TYPE_USER_ALIGN (itype) = TYPE_USER_ALIGN (type);
7023 SET_TYPE_WARN_IF_NOT_ALIGN (itype, TYPE_WARN_IF_NOT_ALIGN (type));
7024
7025 if (!shared)
7026 return itype;
7027
7028 if ((TYPE_MIN_VALUE (itype)
7029 && TREE_CODE (TYPE_MIN_VALUE (itype)) != INTEGER_CST)
7030 || (TYPE_MAX_VALUE (itype)
7031 && TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST))
7032 {
7033 /* Since we cannot reliably merge this type, we need to compare it using
7034 structural equality checks. */
7035 SET_TYPE_STRUCTURAL_EQUALITY (itype);
7036 return itype;
7037 }
7038
7039 hashval_t hash = type_hash_canon_hash (itype);
7040 itype = type_hash_canon (hash, itype);
7041
7042 return itype;
7043 }
7044
7045 /* Wrapper around build_range_type_1 with SHARED set to true. */
7046
7047 tree
7048 build_range_type (tree type, tree lowval, tree highval)
7049 {
7050 return build_range_type_1 (type, lowval, highval, true);
7051 }
7052
7053 /* Wrapper around build_range_type_1 with SHARED set to false. */
7054
7055 tree
7056 build_nonshared_range_type (tree type, tree lowval, tree highval)
7057 {
7058 return build_range_type_1 (type, lowval, highval, false);
7059 }
7060
7061 /* Create a type of integers to be the TYPE_DOMAIN of an ARRAY_TYPE.
7062 MAXVAL should be the maximum value in the domain
7063 (one less than the length of the array).
7064
7065 The maximum value that MAXVAL can have is INT_MAX for a HOST_WIDE_INT.
7066 We don't enforce this limit, that is up to caller (e.g. language front end).
7067 The limit exists because the result is a signed type and we don't handle
7068 sizes that use more than one HOST_WIDE_INT. */
7069
7070 tree
7071 build_index_type (tree maxval)
7072 {
7073 return build_range_type (sizetype, size_zero_node, maxval);
7074 }
7075
7076 /* Return true if the debug information for TYPE, a subtype, should be emitted
7077 as a subrange type. If so, set LOWVAL to the low bound and HIGHVAL to the
7078 high bound, respectively. Sometimes doing so unnecessarily obfuscates the
7079 debug info and doesn't reflect the source code. */
7080
7081 bool
7082 subrange_type_for_debug_p (const_tree type, tree *lowval, tree *highval)
7083 {
7084 tree base_type = TREE_TYPE (type), low, high;
7085
7086 /* Subrange types have a base type which is an integral type. */
7087 if (!INTEGRAL_TYPE_P (base_type))
7088 return false;
7089
7090 /* Get the real bounds of the subtype. */
7091 if (lang_hooks.types.get_subrange_bounds)
7092 lang_hooks.types.get_subrange_bounds (type, &low, &high);
7093 else
7094 {
7095 low = TYPE_MIN_VALUE (type);
7096 high = TYPE_MAX_VALUE (type);
7097 }
7098
7099 /* If the type and its base type have the same representation and the same
7100 name, then the type is not a subrange but a copy of the base type. */
7101 if ((TREE_CODE (base_type) == INTEGER_TYPE
7102 || TREE_CODE (base_type) == BOOLEAN_TYPE)
7103 && int_size_in_bytes (type) == int_size_in_bytes (base_type)
7104 && tree_int_cst_equal (low, TYPE_MIN_VALUE (base_type))
7105 && tree_int_cst_equal (high, TYPE_MAX_VALUE (base_type))
7106 && TYPE_IDENTIFIER (type) == TYPE_IDENTIFIER (base_type))
7107 return false;
7108
7109 if (lowval)
7110 *lowval = low;
7111 if (highval)
7112 *highval = high;
7113 return true;
7114 }
7115
7116 /* Construct, lay out and return the type of arrays of elements with ELT_TYPE
7117 and number of elements specified by the range of values of INDEX_TYPE.
7118 If TYPELESS_STORAGE is true, TYPE_TYPELESS_STORAGE flag is set on the type.
7119 If SHARED is true, reuse such a type that has already been constructed.
7120 If SET_CANONICAL is true, compute TYPE_CANONICAL from the element type. */
7121
7122 tree
7123 build_array_type_1 (tree elt_type, tree index_type, bool typeless_storage,
7124 bool shared, bool set_canonical)
7125 {
7126 tree t;
7127
7128 if (TREE_CODE (elt_type) == FUNCTION_TYPE)
7129 {
7130 error ("arrays of functions are not meaningful");
7131 elt_type = integer_type_node;
7132 }
7133
7134 t = make_node (ARRAY_TYPE);
7135 TREE_TYPE (t) = elt_type;
7136 TYPE_DOMAIN (t) = index_type;
7137 TYPE_ADDR_SPACE (t) = TYPE_ADDR_SPACE (elt_type);
7138 TYPE_TYPELESS_STORAGE (t) = typeless_storage;
7139 layout_type (t);
7140
7141 if (shared)
7142 {
7143 hashval_t hash = type_hash_canon_hash (t);
7144 t = type_hash_canon (hash, t);
7145 }
7146
7147 if (TYPE_CANONICAL (t) == t && set_canonical)
7148 {
7149 if (TYPE_STRUCTURAL_EQUALITY_P (elt_type)
7150 || (index_type && TYPE_STRUCTURAL_EQUALITY_P (index_type))
7151 || in_lto_p)
7152 SET_TYPE_STRUCTURAL_EQUALITY (t);
7153 else if (TYPE_CANONICAL (elt_type) != elt_type
7154 || (index_type && TYPE_CANONICAL (index_type) != index_type))
7155 TYPE_CANONICAL (t)
7156 = build_array_type_1 (TYPE_CANONICAL (elt_type),
7157 index_type
7158 ? TYPE_CANONICAL (index_type) : NULL_TREE,
7159 typeless_storage, shared, set_canonical);
7160 }
7161
7162 return t;
7163 }
7164
7165 /* Wrapper around build_array_type_1 with SHARED set to true. */
7166
7167 tree
7168 build_array_type (tree elt_type, tree index_type, bool typeless_storage)
7169 {
7170 return
7171 build_array_type_1 (elt_type, index_type, typeless_storage, true, true);
7172 }
7173
7174 /* Wrapper around build_array_type_1 with SHARED set to false. */
7175
7176 tree
7177 build_nonshared_array_type (tree elt_type, tree index_type)
7178 {
7179 return build_array_type_1 (elt_type, index_type, false, false, true);
7180 }
7181
7182 /* Return a representation of ELT_TYPE[NELTS], using indices of type
7183 sizetype. */
7184
7185 tree
7186 build_array_type_nelts (tree elt_type, poly_uint64 nelts)
7187 {
7188 return build_array_type (elt_type, build_index_type (size_int (nelts - 1)));
7189 }
7190
7191 /* Recursively examines the array elements of TYPE, until a non-array
7192 element type is found. */
7193
7194 tree
7195 strip_array_types (tree type)
7196 {
7197 while (TREE_CODE (type) == ARRAY_TYPE)
7198 type = TREE_TYPE (type);
7199
7200 return type;
7201 }
7202
7203 /* Computes the canonical argument types from the argument type list
7204 ARGTYPES.
7205
7206 Upon return, *ANY_STRUCTURAL_P will be true iff either it was true
7207 on entry to this function, or if any of the ARGTYPES are
7208 structural.
7209
7210 Upon return, *ANY_NONCANONICAL_P will be true iff either it was
7211 true on entry to this function, or if any of the ARGTYPES are
7212 non-canonical.
7213
7214 Returns a canonical argument list, which may be ARGTYPES when the
7215 canonical argument list is unneeded (i.e., *ANY_STRUCTURAL_P is
7216 true) or would not differ from ARGTYPES. */
7217
7218 static tree
7219 maybe_canonicalize_argtypes (tree argtypes,
7220 bool *any_structural_p,
7221 bool *any_noncanonical_p)
7222 {
7223 tree arg;
7224 bool any_noncanonical_argtypes_p = false;
7225
7226 for (arg = argtypes; arg && !(*any_structural_p); arg = TREE_CHAIN (arg))
7227 {
7228 if (!TREE_VALUE (arg) || TREE_VALUE (arg) == error_mark_node)
7229 /* Fail gracefully by stating that the type is structural. */
7230 *any_structural_p = true;
7231 else if (TYPE_STRUCTURAL_EQUALITY_P (TREE_VALUE (arg)))
7232 *any_structural_p = true;
7233 else if (TYPE_CANONICAL (TREE_VALUE (arg)) != TREE_VALUE (arg)
7234 || TREE_PURPOSE (arg))
7235 /* If the argument has a default argument, we consider it
7236 non-canonical even though the type itself is canonical.
7237 That way, different variants of function and method types
7238 with default arguments will all point to the variant with
7239 no defaults as their canonical type. */
7240 any_noncanonical_argtypes_p = true;
7241 }
7242
7243 if (*any_structural_p)
7244 return argtypes;
7245
7246 if (any_noncanonical_argtypes_p)
7247 {
7248 /* Build the canonical list of argument types. */
7249 tree canon_argtypes = NULL_TREE;
7250 bool is_void = false;
7251
7252 for (arg = argtypes; arg; arg = TREE_CHAIN (arg))
7253 {
7254 if (arg == void_list_node)
7255 is_void = true;
7256 else
7257 canon_argtypes = tree_cons (NULL_TREE,
7258 TYPE_CANONICAL (TREE_VALUE (arg)),
7259 canon_argtypes);
7260 }
7261
7262 canon_argtypes = nreverse (canon_argtypes);
7263 if (is_void)
7264 canon_argtypes = chainon (canon_argtypes, void_list_node);
7265
7266 /* There is a non-canonical type. */
7267 *any_noncanonical_p = true;
7268 return canon_argtypes;
7269 }
7270
7271 /* The canonical argument types are the same as ARGTYPES. */
7272 return argtypes;
7273 }
7274
7275 /* Construct, lay out and return
7276 the type of functions returning type VALUE_TYPE
7277 given arguments of types ARG_TYPES.
7278 ARG_TYPES is a chain of TREE_LIST nodes whose TREE_VALUEs
7279 are data type nodes for the arguments of the function.
7280 If such a type has already been constructed, reuse it. */
7281
7282 tree
7283 build_function_type (tree value_type, tree arg_types)
7284 {
7285 tree t;
7286 inchash::hash hstate;
7287 bool any_structural_p, any_noncanonical_p;
7288 tree canon_argtypes;
7289
7290 gcc_assert (arg_types != error_mark_node);
7291
7292 if (TREE_CODE (value_type) == FUNCTION_TYPE)
7293 {
7294 error ("function return type cannot be function");
7295 value_type = integer_type_node;
7296 }
7297
7298 /* Make a node of the sort we want. */
7299 t = make_node (FUNCTION_TYPE);
7300 TREE_TYPE (t) = value_type;
7301 TYPE_ARG_TYPES (t) = arg_types;
7302
7303 /* If we already have such a type, use the old one. */
7304 hashval_t hash = type_hash_canon_hash (t);
7305 t = type_hash_canon (hash, t);
7306
7307 /* Set up the canonical type. */
7308 any_structural_p = TYPE_STRUCTURAL_EQUALITY_P (value_type);
7309 any_noncanonical_p = TYPE_CANONICAL (value_type) != value_type;
7310 canon_argtypes = maybe_canonicalize_argtypes (arg_types,
7311 &any_structural_p,
7312 &any_noncanonical_p);
7313 if (any_structural_p)
7314 SET_TYPE_STRUCTURAL_EQUALITY (t);
7315 else if (any_noncanonical_p)
7316 TYPE_CANONICAL (t) = build_function_type (TYPE_CANONICAL (value_type),
7317 canon_argtypes);
7318
7319 if (!COMPLETE_TYPE_P (t))
7320 layout_type (t);
7321 return t;
7322 }
7323
7324 /* Build a function type. The RETURN_TYPE is the type returned by the
7325 function. If VAARGS is set, no void_type_node is appended to the
7326 list. ARGP must be always be terminated be a NULL_TREE. */
7327
7328 static tree
7329 build_function_type_list_1 (bool vaargs, tree return_type, va_list argp)
7330 {
7331 tree t, args, last;
7332
7333 t = va_arg (argp, tree);
7334 for (args = NULL_TREE; t != NULL_TREE; t = va_arg (argp, tree))
7335 args = tree_cons (NULL_TREE, t, args);
7336
7337 if (vaargs)
7338 {
7339 last = args;
7340 if (args != NULL_TREE)
7341 args = nreverse (args);
7342 gcc_assert (last != void_list_node);
7343 }
7344 else if (args == NULL_TREE)
7345 args = void_list_node;
7346 else
7347 {
7348 last = args;
7349 args = nreverse (args);
7350 TREE_CHAIN (last) = void_list_node;
7351 }
7352 args = build_function_type (return_type, args);
7353
7354 return args;
7355 }
7356
7357 /* Build a function type. The RETURN_TYPE is the type returned by the
7358 function. If additional arguments are provided, they are
7359 additional argument types. The list of argument types must always
7360 be terminated by NULL_TREE. */
7361
7362 tree
7363 build_function_type_list (tree return_type, ...)
7364 {
7365 tree args;
7366 va_list p;
7367
7368 va_start (p, return_type);
7369 args = build_function_type_list_1 (false, return_type, p);
7370 va_end (p);
7371 return args;
7372 }
7373
7374 /* Build a variable argument function type. The RETURN_TYPE is the
7375 type returned by the function. If additional arguments are provided,
7376 they are additional argument types. The list of argument types must
7377 always be terminated by NULL_TREE. */
7378
7379 tree
7380 build_varargs_function_type_list (tree return_type, ...)
7381 {
7382 tree args;
7383 va_list p;
7384
7385 va_start (p, return_type);
7386 args = build_function_type_list_1 (true, return_type, p);
7387 va_end (p);
7388
7389 return args;
7390 }
7391
7392 /* Build a function type. RETURN_TYPE is the type returned by the
7393 function; VAARGS indicates whether the function takes varargs. The
7394 function takes N named arguments, the types of which are provided in
7395 ARG_TYPES. */
7396
7397 static tree
7398 build_function_type_array_1 (bool vaargs, tree return_type, int n,
7399 tree *arg_types)
7400 {
7401 int i;
7402 tree t = vaargs ? NULL_TREE : void_list_node;
7403
7404 for (i = n - 1; i >= 0; i--)
7405 t = tree_cons (NULL_TREE, arg_types[i], t);
7406
7407 return build_function_type (return_type, t);
7408 }
7409
7410 /* Build a function type. RETURN_TYPE is the type returned by the
7411 function. The function takes N named arguments, the types of which
7412 are provided in ARG_TYPES. */
7413
7414 tree
7415 build_function_type_array (tree return_type, int n, tree *arg_types)
7416 {
7417 return build_function_type_array_1 (false, return_type, n, arg_types);
7418 }
7419
7420 /* Build a variable argument function type. RETURN_TYPE is the type
7421 returned by the function. The function takes N named arguments, the
7422 types of which are provided in ARG_TYPES. */
7423
7424 tree
7425 build_varargs_function_type_array (tree return_type, int n, tree *arg_types)
7426 {
7427 return build_function_type_array_1 (true, return_type, n, arg_types);
7428 }
7429
7430 /* Build a METHOD_TYPE for a member of BASETYPE. The RETTYPE (a TYPE)
7431 and ARGTYPES (a TREE_LIST) are the return type and arguments types
7432 for the method. An implicit additional parameter (of type
7433 pointer-to-BASETYPE) is added to the ARGTYPES. */
7434
7435 tree
7436 build_method_type_directly (tree basetype,
7437 tree rettype,
7438 tree argtypes)
7439 {
7440 tree t;
7441 tree ptype;
7442 bool any_structural_p, any_noncanonical_p;
7443 tree canon_argtypes;
7444
7445 /* Make a node of the sort we want. */
7446 t = make_node (METHOD_TYPE);
7447
7448 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
7449 TREE_TYPE (t) = rettype;
7450 ptype = build_pointer_type (basetype);
7451
7452 /* The actual arglist for this function includes a "hidden" argument
7453 which is "this". Put it into the list of argument types. */
7454 argtypes = tree_cons (NULL_TREE, ptype, argtypes);
7455 TYPE_ARG_TYPES (t) = argtypes;
7456
7457 /* If we already have such a type, use the old one. */
7458 hashval_t hash = type_hash_canon_hash (t);
7459 t = type_hash_canon (hash, t);
7460
7461 /* Set up the canonical type. */
7462 any_structural_p
7463 = (TYPE_STRUCTURAL_EQUALITY_P (basetype)
7464 || TYPE_STRUCTURAL_EQUALITY_P (rettype));
7465 any_noncanonical_p
7466 = (TYPE_CANONICAL (basetype) != basetype
7467 || TYPE_CANONICAL (rettype) != rettype);
7468 canon_argtypes = maybe_canonicalize_argtypes (TREE_CHAIN (argtypes),
7469 &any_structural_p,
7470 &any_noncanonical_p);
7471 if (any_structural_p)
7472 SET_TYPE_STRUCTURAL_EQUALITY (t);
7473 else if (any_noncanonical_p)
7474 TYPE_CANONICAL (t)
7475 = build_method_type_directly (TYPE_CANONICAL (basetype),
7476 TYPE_CANONICAL (rettype),
7477 canon_argtypes);
7478 if (!COMPLETE_TYPE_P (t))
7479 layout_type (t);
7480
7481 return t;
7482 }
7483
7484 /* Construct, lay out and return the type of methods belonging to class
7485 BASETYPE and whose arguments and values are described by TYPE.
7486 If that type exists already, reuse it.
7487 TYPE must be a FUNCTION_TYPE node. */
7488
7489 tree
7490 build_method_type (tree basetype, tree type)
7491 {
7492 gcc_assert (TREE_CODE (type) == FUNCTION_TYPE);
7493
7494 return build_method_type_directly (basetype,
7495 TREE_TYPE (type),
7496 TYPE_ARG_TYPES (type));
7497 }
7498
7499 /* Construct, lay out and return the type of offsets to a value
7500 of type TYPE, within an object of type BASETYPE.
7501 If a suitable offset type exists already, reuse it. */
7502
7503 tree
7504 build_offset_type (tree basetype, tree type)
7505 {
7506 tree t;
7507
7508 /* Make a node of the sort we want. */
7509 t = make_node (OFFSET_TYPE);
7510
7511 TYPE_OFFSET_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
7512 TREE_TYPE (t) = type;
7513
7514 /* If we already have such a type, use the old one. */
7515 hashval_t hash = type_hash_canon_hash (t);
7516 t = type_hash_canon (hash, t);
7517
7518 if (!COMPLETE_TYPE_P (t))
7519 layout_type (t);
7520
7521 if (TYPE_CANONICAL (t) == t)
7522 {
7523 if (TYPE_STRUCTURAL_EQUALITY_P (basetype)
7524 || TYPE_STRUCTURAL_EQUALITY_P (type))
7525 SET_TYPE_STRUCTURAL_EQUALITY (t);
7526 else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)) != basetype
7527 || TYPE_CANONICAL (type) != type)
7528 TYPE_CANONICAL (t)
7529 = build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)),
7530 TYPE_CANONICAL (type));
7531 }
7532
7533 return t;
7534 }
7535
7536 /* Create a complex type whose components are COMPONENT_TYPE.
7537
7538 If NAMED is true, the type is given a TYPE_NAME. We do not always
7539 do so because this creates a DECL node and thus make the DECL_UIDs
7540 dependent on the type canonicalization hashtable, which is GC-ed,
7541 so the DECL_UIDs would not be stable wrt garbage collection. */
7542
7543 tree
7544 build_complex_type (tree component_type, bool named)
7545 {
7546 gcc_assert (INTEGRAL_TYPE_P (component_type)
7547 || SCALAR_FLOAT_TYPE_P (component_type)
7548 || FIXED_POINT_TYPE_P (component_type));
7549
7550 /* Make a node of the sort we want. */
7551 tree probe = make_node (COMPLEX_TYPE);
7552
7553 TREE_TYPE (probe) = TYPE_MAIN_VARIANT (component_type);
7554
7555 /* If we already have such a type, use the old one. */
7556 hashval_t hash = type_hash_canon_hash (probe);
7557 tree t = type_hash_canon (hash, probe);
7558
7559 if (t == probe)
7560 {
7561 /* We created a new type. The hash insertion will have laid
7562 out the type. We need to check the canonicalization and
7563 maybe set the name. */
7564 gcc_checking_assert (COMPLETE_TYPE_P (t)
7565 && !TYPE_NAME (t)
7566 && TYPE_CANONICAL (t) == t);
7567
7568 if (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (t)))
7569 SET_TYPE_STRUCTURAL_EQUALITY (t);
7570 else if (TYPE_CANONICAL (TREE_TYPE (t)) != TREE_TYPE (t))
7571 TYPE_CANONICAL (t)
7572 = build_complex_type (TYPE_CANONICAL (TREE_TYPE (t)), named);
7573
7574 /* We need to create a name, since complex is a fundamental type. */
7575 if (named)
7576 {
7577 const char *name = NULL;
7578
7579 if (TREE_TYPE (t) == char_type_node)
7580 name = "complex char";
7581 else if (TREE_TYPE (t) == signed_char_type_node)
7582 name = "complex signed char";
7583 else if (TREE_TYPE (t) == unsigned_char_type_node)
7584 name = "complex unsigned char";
7585 else if (TREE_TYPE (t) == short_integer_type_node)
7586 name = "complex short int";
7587 else if (TREE_TYPE (t) == short_unsigned_type_node)
7588 name = "complex short unsigned int";
7589 else if (TREE_TYPE (t) == integer_type_node)
7590 name = "complex int";
7591 else if (TREE_TYPE (t) == unsigned_type_node)
7592 name = "complex unsigned int";
7593 else if (TREE_TYPE (t) == long_integer_type_node)
7594 name = "complex long int";
7595 else if (TREE_TYPE (t) == long_unsigned_type_node)
7596 name = "complex long unsigned int";
7597 else if (TREE_TYPE (t) == long_long_integer_type_node)
7598 name = "complex long long int";
7599 else if (TREE_TYPE (t) == long_long_unsigned_type_node)
7600 name = "complex long long unsigned int";
7601
7602 if (name != NULL)
7603 TYPE_NAME (t) = build_decl (UNKNOWN_LOCATION, TYPE_DECL,
7604 get_identifier (name), t);
7605 }
7606 }
7607
7608 return build_qualified_type (t, TYPE_QUALS (component_type));
7609 }
7610
7611 /* If TYPE is a real or complex floating-point type and the target
7612 does not directly support arithmetic on TYPE then return the wider
7613 type to be used for arithmetic on TYPE. Otherwise, return
7614 NULL_TREE. */
7615
7616 tree
7617 excess_precision_type (tree type)
7618 {
7619 /* The target can give two different responses to the question of
7620 which excess precision mode it would like depending on whether we
7621 are in -fexcess-precision=standard or -fexcess-precision=fast. */
7622
7623 enum excess_precision_type requested_type
7624 = (flag_excess_precision == EXCESS_PRECISION_FAST
7625 ? EXCESS_PRECISION_TYPE_FAST
7626 : EXCESS_PRECISION_TYPE_STANDARD);
7627
7628 enum flt_eval_method target_flt_eval_method
7629 = targetm.c.excess_precision (requested_type);
7630
7631 /* The target should not ask for unpredictable float evaluation (though
7632 it might advertise that implicitly the evaluation is unpredictable,
7633 but we don't care about that here, it will have been reported
7634 elsewhere). If it does ask for unpredictable evaluation, we have
7635 nothing to do here. */
7636 gcc_assert (target_flt_eval_method != FLT_EVAL_METHOD_UNPREDICTABLE);
7637
7638 /* Nothing to do. The target has asked for all types we know about
7639 to be computed with their native precision and range. */
7640 if (target_flt_eval_method == FLT_EVAL_METHOD_PROMOTE_TO_FLOAT16)
7641 return NULL_TREE;
7642
7643 /* The target will promote this type in a target-dependent way, so excess
7644 precision ought to leave it alone. */
7645 if (targetm.promoted_type (type) != NULL_TREE)
7646 return NULL_TREE;
7647
7648 machine_mode float16_type_mode = (float16_type_node
7649 ? TYPE_MODE (float16_type_node)
7650 : VOIDmode);
7651 machine_mode float_type_mode = TYPE_MODE (float_type_node);
7652 machine_mode double_type_mode = TYPE_MODE (double_type_node);
7653
7654 switch (TREE_CODE (type))
7655 {
7656 case REAL_TYPE:
7657 {
7658 machine_mode type_mode = TYPE_MODE (type);
7659 switch (target_flt_eval_method)
7660 {
7661 case FLT_EVAL_METHOD_PROMOTE_TO_FLOAT:
7662 if (type_mode == float16_type_mode)
7663 return float_type_node;
7664 break;
7665 case FLT_EVAL_METHOD_PROMOTE_TO_DOUBLE:
7666 if (type_mode == float16_type_mode
7667 || type_mode == float_type_mode)
7668 return double_type_node;
7669 break;
7670 case FLT_EVAL_METHOD_PROMOTE_TO_LONG_DOUBLE:
7671 if (type_mode == float16_type_mode
7672 || type_mode == float_type_mode
7673 || type_mode == double_type_mode)
7674 return long_double_type_node;
7675 break;
7676 default:
7677 gcc_unreachable ();
7678 }
7679 break;
7680 }
7681 case COMPLEX_TYPE:
7682 {
7683 if (TREE_CODE (TREE_TYPE (type)) != REAL_TYPE)
7684 return NULL_TREE;
7685 machine_mode type_mode = TYPE_MODE (TREE_TYPE (type));
7686 switch (target_flt_eval_method)
7687 {
7688 case FLT_EVAL_METHOD_PROMOTE_TO_FLOAT:
7689 if (type_mode == float16_type_mode)
7690 return complex_float_type_node;
7691 break;
7692 case FLT_EVAL_METHOD_PROMOTE_TO_DOUBLE:
7693 if (type_mode == float16_type_mode
7694 || type_mode == float_type_mode)
7695 return complex_double_type_node;
7696 break;
7697 case FLT_EVAL_METHOD_PROMOTE_TO_LONG_DOUBLE:
7698 if (type_mode == float16_type_mode
7699 || type_mode == float_type_mode
7700 || type_mode == double_type_mode)
7701 return complex_long_double_type_node;
7702 break;
7703 default:
7704 gcc_unreachable ();
7705 }
7706 break;
7707 }
7708 default:
7709 break;
7710 }
7711
7712 return NULL_TREE;
7713 }
7714 \f
7715 /* Return OP, stripped of any conversions to wider types as much as is safe.
7716 Converting the value back to OP's type makes a value equivalent to OP.
7717
7718 If FOR_TYPE is nonzero, we return a value which, if converted to
7719 type FOR_TYPE, would be equivalent to converting OP to type FOR_TYPE.
7720
7721 OP must have integer, real or enumeral type. Pointers are not allowed!
7722
7723 There are some cases where the obvious value we could return
7724 would regenerate to OP if converted to OP's type,
7725 but would not extend like OP to wider types.
7726 If FOR_TYPE indicates such extension is contemplated, we eschew such values.
7727 For example, if OP is (unsigned short)(signed char)-1,
7728 we avoid returning (signed char)-1 if FOR_TYPE is int,
7729 even though extending that to an unsigned short would regenerate OP,
7730 since the result of extending (signed char)-1 to (int)
7731 is different from (int) OP. */
7732
7733 tree
7734 get_unwidened (tree op, tree for_type)
7735 {
7736 /* Set UNS initially if converting OP to FOR_TYPE is a zero-extension. */
7737 tree type = TREE_TYPE (op);
7738 unsigned final_prec
7739 = TYPE_PRECISION (for_type != 0 ? for_type : type);
7740 int uns
7741 = (for_type != 0 && for_type != type
7742 && final_prec > TYPE_PRECISION (type)
7743 && TYPE_UNSIGNED (type));
7744 tree win = op;
7745
7746 while (CONVERT_EXPR_P (op))
7747 {
7748 int bitschange;
7749
7750 /* TYPE_PRECISION on vector types has different meaning
7751 (TYPE_VECTOR_SUBPARTS) and casts from vectors are view conversions,
7752 so avoid them here. */
7753 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op, 0))) == VECTOR_TYPE)
7754 break;
7755
7756 bitschange = TYPE_PRECISION (TREE_TYPE (op))
7757 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0)));
7758
7759 /* Truncations are many-one so cannot be removed.
7760 Unless we are later going to truncate down even farther. */
7761 if (bitschange < 0
7762 && final_prec > TYPE_PRECISION (TREE_TYPE (op)))
7763 break;
7764
7765 /* See what's inside this conversion. If we decide to strip it,
7766 we will set WIN. */
7767 op = TREE_OPERAND (op, 0);
7768
7769 /* If we have not stripped any zero-extensions (uns is 0),
7770 we can strip any kind of extension.
7771 If we have previously stripped a zero-extension,
7772 only zero-extensions can safely be stripped.
7773 Any extension can be stripped if the bits it would produce
7774 are all going to be discarded later by truncating to FOR_TYPE. */
7775
7776 if (bitschange > 0)
7777 {
7778 if (! uns || final_prec <= TYPE_PRECISION (TREE_TYPE (op)))
7779 win = op;
7780 /* TYPE_UNSIGNED says whether this is a zero-extension.
7781 Let's avoid computing it if it does not affect WIN
7782 and if UNS will not be needed again. */
7783 if ((uns
7784 || CONVERT_EXPR_P (op))
7785 && TYPE_UNSIGNED (TREE_TYPE (op)))
7786 {
7787 uns = 1;
7788 win = op;
7789 }
7790 }
7791 }
7792
7793 /* If we finally reach a constant see if it fits in sth smaller and
7794 in that case convert it. */
7795 if (TREE_CODE (win) == INTEGER_CST)
7796 {
7797 tree wtype = TREE_TYPE (win);
7798 unsigned prec = wi::min_precision (wi::to_wide (win), TYPE_SIGN (wtype));
7799 if (for_type)
7800 prec = MAX (prec, final_prec);
7801 if (prec < TYPE_PRECISION (wtype))
7802 {
7803 tree t = lang_hooks.types.type_for_size (prec, TYPE_UNSIGNED (wtype));
7804 if (t && TYPE_PRECISION (t) < TYPE_PRECISION (wtype))
7805 win = fold_convert (t, win);
7806 }
7807 }
7808
7809 return win;
7810 }
7811 \f
7812 /* Return OP or a simpler expression for a narrower value
7813 which can be sign-extended or zero-extended to give back OP.
7814 Store in *UNSIGNEDP_PTR either 1 if the value should be zero-extended
7815 or 0 if the value should be sign-extended. */
7816
7817 tree
7818 get_narrower (tree op, int *unsignedp_ptr)
7819 {
7820 int uns = 0;
7821 int first = 1;
7822 tree win = op;
7823 bool integral_p = INTEGRAL_TYPE_P (TREE_TYPE (op));
7824
7825 if (TREE_CODE (op) == COMPOUND_EXPR)
7826 {
7827 do
7828 op = TREE_OPERAND (op, 1);
7829 while (TREE_CODE (op) == COMPOUND_EXPR);
7830 tree ret = get_narrower (op, unsignedp_ptr);
7831 if (ret == op)
7832 return win;
7833 auto_vec <tree, 16> v;
7834 unsigned int i;
7835 for (op = win; TREE_CODE (op) == COMPOUND_EXPR;
7836 op = TREE_OPERAND (op, 1))
7837 v.safe_push (op);
7838 FOR_EACH_VEC_ELT_REVERSE (v, i, op)
7839 ret = build2_loc (EXPR_LOCATION (op), COMPOUND_EXPR,
7840 TREE_TYPE (ret), TREE_OPERAND (op, 0),
7841 ret);
7842 return ret;
7843 }
7844 while (TREE_CODE (op) == NOP_EXPR)
7845 {
7846 int bitschange
7847 = (TYPE_PRECISION (TREE_TYPE (op))
7848 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0))));
7849
7850 /* Truncations are many-one so cannot be removed. */
7851 if (bitschange < 0)
7852 break;
7853
7854 /* See what's inside this conversion. If we decide to strip it,
7855 we will set WIN. */
7856
7857 if (bitschange > 0)
7858 {
7859 op = TREE_OPERAND (op, 0);
7860 /* An extension: the outermost one can be stripped,
7861 but remember whether it is zero or sign extension. */
7862 if (first)
7863 uns = TYPE_UNSIGNED (TREE_TYPE (op));
7864 /* Otherwise, if a sign extension has been stripped,
7865 only sign extensions can now be stripped;
7866 if a zero extension has been stripped, only zero-extensions. */
7867 else if (uns != TYPE_UNSIGNED (TREE_TYPE (op)))
7868 break;
7869 first = 0;
7870 }
7871 else /* bitschange == 0 */
7872 {
7873 /* A change in nominal type can always be stripped, but we must
7874 preserve the unsignedness. */
7875 if (first)
7876 uns = TYPE_UNSIGNED (TREE_TYPE (op));
7877 first = 0;
7878 op = TREE_OPERAND (op, 0);
7879 /* Keep trying to narrow, but don't assign op to win if it
7880 would turn an integral type into something else. */
7881 if (INTEGRAL_TYPE_P (TREE_TYPE (op)) != integral_p)
7882 continue;
7883 }
7884
7885 win = op;
7886 }
7887
7888 if (TREE_CODE (op) == COMPONENT_REF
7889 /* Since type_for_size always gives an integer type. */
7890 && TREE_CODE (TREE_TYPE (op)) != REAL_TYPE
7891 && TREE_CODE (TREE_TYPE (op)) != FIXED_POINT_TYPE
7892 /* Ensure field is laid out already. */
7893 && DECL_SIZE (TREE_OPERAND (op, 1)) != 0
7894 && tree_fits_uhwi_p (DECL_SIZE (TREE_OPERAND (op, 1))))
7895 {
7896 unsigned HOST_WIDE_INT innerprec
7897 = tree_to_uhwi (DECL_SIZE (TREE_OPERAND (op, 1)));
7898 int unsignedp = (DECL_UNSIGNED (TREE_OPERAND (op, 1))
7899 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op, 1))));
7900 tree type = lang_hooks.types.type_for_size (innerprec, unsignedp);
7901
7902 /* We can get this structure field in a narrower type that fits it,
7903 but the resulting extension to its nominal type (a fullword type)
7904 must satisfy the same conditions as for other extensions.
7905
7906 Do this only for fields that are aligned (not bit-fields),
7907 because when bit-field insns will be used there is no
7908 advantage in doing this. */
7909
7910 if (innerprec < TYPE_PRECISION (TREE_TYPE (op))
7911 && ! DECL_BIT_FIELD (TREE_OPERAND (op, 1))
7912 && (first || uns == DECL_UNSIGNED (TREE_OPERAND (op, 1)))
7913 && type != 0)
7914 {
7915 if (first)
7916 uns = DECL_UNSIGNED (TREE_OPERAND (op, 1));
7917 win = fold_convert (type, op);
7918 }
7919 }
7920
7921 *unsignedp_ptr = uns;
7922 return win;
7923 }
7924 \f
7925 /* Return true if integer constant C has a value that is permissible
7926 for TYPE, an integral type. */
7927
7928 bool
7929 int_fits_type_p (const_tree c, const_tree type)
7930 {
7931 tree type_low_bound, type_high_bound;
7932 bool ok_for_low_bound, ok_for_high_bound;
7933 signop sgn_c = TYPE_SIGN (TREE_TYPE (c));
7934
7935 /* Non-standard boolean types can have arbitrary precision but various
7936 transformations assume that they can only take values 0 and +/-1. */
7937 if (TREE_CODE (type) == BOOLEAN_TYPE)
7938 return wi::fits_to_boolean_p (wi::to_wide (c), type);
7939
7940 retry:
7941 type_low_bound = TYPE_MIN_VALUE (type);
7942 type_high_bound = TYPE_MAX_VALUE (type);
7943
7944 /* If at least one bound of the type is a constant integer, we can check
7945 ourselves and maybe make a decision. If no such decision is possible, but
7946 this type is a subtype, try checking against that. Otherwise, use
7947 fits_to_tree_p, which checks against the precision.
7948
7949 Compute the status for each possibly constant bound, and return if we see
7950 one does not match. Use ok_for_xxx_bound for this purpose, assigning -1
7951 for "unknown if constant fits", 0 for "constant known *not* to fit" and 1
7952 for "constant known to fit". */
7953
7954 /* Check if c >= type_low_bound. */
7955 if (type_low_bound && TREE_CODE (type_low_bound) == INTEGER_CST)
7956 {
7957 if (tree_int_cst_lt (c, type_low_bound))
7958 return false;
7959 ok_for_low_bound = true;
7960 }
7961 else
7962 ok_for_low_bound = false;
7963
7964 /* Check if c <= type_high_bound. */
7965 if (type_high_bound && TREE_CODE (type_high_bound) == INTEGER_CST)
7966 {
7967 if (tree_int_cst_lt (type_high_bound, c))
7968 return false;
7969 ok_for_high_bound = true;
7970 }
7971 else
7972 ok_for_high_bound = false;
7973
7974 /* If the constant fits both bounds, the result is known. */
7975 if (ok_for_low_bound && ok_for_high_bound)
7976 return true;
7977
7978 /* Perform some generic filtering which may allow making a decision
7979 even if the bounds are not constant. First, negative integers
7980 never fit in unsigned types, */
7981 if (TYPE_UNSIGNED (type) && sgn_c == SIGNED && wi::neg_p (wi::to_wide (c)))
7982 return false;
7983
7984 /* Second, narrower types always fit in wider ones. */
7985 if (TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (c)))
7986 return true;
7987
7988 /* Third, unsigned integers with top bit set never fit signed types. */
7989 if (!TYPE_UNSIGNED (type) && sgn_c == UNSIGNED)
7990 {
7991 int prec = GET_MODE_PRECISION (SCALAR_INT_TYPE_MODE (TREE_TYPE (c))) - 1;
7992 if (prec < TYPE_PRECISION (TREE_TYPE (c)))
7993 {
7994 /* When a tree_cst is converted to a wide-int, the precision
7995 is taken from the type. However, if the precision of the
7996 mode underneath the type is smaller than that, it is
7997 possible that the value will not fit. The test below
7998 fails if any bit is set between the sign bit of the
7999 underlying mode and the top bit of the type. */
8000 if (wi::zext (wi::to_wide (c), prec - 1) != wi::to_wide (c))
8001 return false;
8002 }
8003 else if (wi::neg_p (wi::to_wide (c)))
8004 return false;
8005 }
8006
8007 /* If we haven't been able to decide at this point, there nothing more we
8008 can check ourselves here. Look at the base type if we have one and it
8009 has the same precision. */
8010 if (TREE_CODE (type) == INTEGER_TYPE
8011 && TREE_TYPE (type) != 0
8012 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (type)))
8013 {
8014 type = TREE_TYPE (type);
8015 goto retry;
8016 }
8017
8018 /* Or to fits_to_tree_p, if nothing else. */
8019 return wi::fits_to_tree_p (wi::to_wide (c), type);
8020 }
8021
8022 /* Stores bounds of an integer TYPE in MIN and MAX. If TYPE has non-constant
8023 bounds or is a POINTER_TYPE, the maximum and/or minimum values that can be
8024 represented (assuming two's-complement arithmetic) within the bit
8025 precision of the type are returned instead. */
8026
8027 void
8028 get_type_static_bounds (const_tree type, mpz_t min, mpz_t max)
8029 {
8030 if (!POINTER_TYPE_P (type) && TYPE_MIN_VALUE (type)
8031 && TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST)
8032 wi::to_mpz (wi::to_wide (TYPE_MIN_VALUE (type)), min, TYPE_SIGN (type));
8033 else
8034 {
8035 if (TYPE_UNSIGNED (type))
8036 mpz_set_ui (min, 0);
8037 else
8038 {
8039 wide_int mn = wi::min_value (TYPE_PRECISION (type), SIGNED);
8040 wi::to_mpz (mn, min, SIGNED);
8041 }
8042 }
8043
8044 if (!POINTER_TYPE_P (type) && TYPE_MAX_VALUE (type)
8045 && TREE_CODE (TYPE_MAX_VALUE (type)) == INTEGER_CST)
8046 wi::to_mpz (wi::to_wide (TYPE_MAX_VALUE (type)), max, TYPE_SIGN (type));
8047 else
8048 {
8049 wide_int mn = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
8050 wi::to_mpz (mn, max, TYPE_SIGN (type));
8051 }
8052 }
8053
8054 /* Return true if VAR is an automatic variable. */
8055
8056 bool
8057 auto_var_p (const_tree var)
8058 {
8059 return ((((VAR_P (var) && ! DECL_EXTERNAL (var))
8060 || TREE_CODE (var) == PARM_DECL)
8061 && ! TREE_STATIC (var))
8062 || TREE_CODE (var) == RESULT_DECL);
8063 }
8064
8065 /* Return true if VAR is an automatic variable defined in function FN. */
8066
8067 bool
8068 auto_var_in_fn_p (const_tree var, const_tree fn)
8069 {
8070 return (DECL_P (var) && DECL_CONTEXT (var) == fn
8071 && (auto_var_p (var)
8072 || TREE_CODE (var) == LABEL_DECL));
8073 }
8074
8075 /* Subprogram of following function. Called by walk_tree.
8076
8077 Return *TP if it is an automatic variable or parameter of the
8078 function passed in as DATA. */
8079
8080 static tree
8081 find_var_from_fn (tree *tp, int *walk_subtrees, void *data)
8082 {
8083 tree fn = (tree) data;
8084
8085 if (TYPE_P (*tp))
8086 *walk_subtrees = 0;
8087
8088 else if (DECL_P (*tp)
8089 && auto_var_in_fn_p (*tp, fn))
8090 return *tp;
8091
8092 return NULL_TREE;
8093 }
8094
8095 /* Returns true if T is, contains, or refers to a type with variable
8096 size. For METHOD_TYPEs and FUNCTION_TYPEs we exclude the
8097 arguments, but not the return type. If FN is nonzero, only return
8098 true if a modifier of the type or position of FN is a variable or
8099 parameter inside FN.
8100
8101 This concept is more general than that of C99 'variably modified types':
8102 in C99, a struct type is never variably modified because a VLA may not
8103 appear as a structure member. However, in GNU C code like:
8104
8105 struct S { int i[f()]; };
8106
8107 is valid, and other languages may define similar constructs. */
8108
8109 bool
8110 variably_modified_type_p (tree type, tree fn)
8111 {
8112 tree t;
8113
8114 /* Test if T is either variable (if FN is zero) or an expression containing
8115 a variable in FN. If TYPE isn't gimplified, return true also if
8116 gimplify_one_sizepos would gimplify the expression into a local
8117 variable. */
8118 #define RETURN_TRUE_IF_VAR(T) \
8119 do { tree _t = (T); \
8120 if (_t != NULL_TREE \
8121 && _t != error_mark_node \
8122 && !CONSTANT_CLASS_P (_t) \
8123 && TREE_CODE (_t) != PLACEHOLDER_EXPR \
8124 && (!fn \
8125 || (!TYPE_SIZES_GIMPLIFIED (type) \
8126 && (TREE_CODE (_t) != VAR_DECL \
8127 && !CONTAINS_PLACEHOLDER_P (_t))) \
8128 || walk_tree (&_t, find_var_from_fn, fn, NULL))) \
8129 return true; } while (0)
8130
8131 if (type == error_mark_node)
8132 return false;
8133
8134 /* If TYPE itself has variable size, it is variably modified. */
8135 RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
8136 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
8137
8138 switch (TREE_CODE (type))
8139 {
8140 case POINTER_TYPE:
8141 case REFERENCE_TYPE:
8142 case VECTOR_TYPE:
8143 /* Ada can have pointer types refering to themselves indirectly. */
8144 if (TREE_VISITED (type))
8145 return false;
8146 TREE_VISITED (type) = true;
8147 if (variably_modified_type_p (TREE_TYPE (type), fn))
8148 {
8149 TREE_VISITED (type) = false;
8150 return true;
8151 }
8152 TREE_VISITED (type) = false;
8153 break;
8154
8155 case FUNCTION_TYPE:
8156 case METHOD_TYPE:
8157 /* If TYPE is a function type, it is variably modified if the
8158 return type is variably modified. */
8159 if (variably_modified_type_p (TREE_TYPE (type), fn))
8160 return true;
8161 break;
8162
8163 case INTEGER_TYPE:
8164 case REAL_TYPE:
8165 case FIXED_POINT_TYPE:
8166 case ENUMERAL_TYPE:
8167 case BOOLEAN_TYPE:
8168 /* Scalar types are variably modified if their end points
8169 aren't constant. */
8170 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
8171 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
8172 break;
8173
8174 case RECORD_TYPE:
8175 case UNION_TYPE:
8176 case QUAL_UNION_TYPE:
8177 /* We can't see if any of the fields are variably-modified by the
8178 definition we normally use, since that would produce infinite
8179 recursion via pointers. */
8180 /* This is variably modified if some field's type is. */
8181 for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
8182 if (TREE_CODE (t) == FIELD_DECL)
8183 {
8184 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
8185 RETURN_TRUE_IF_VAR (DECL_SIZE (t));
8186 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
8187
8188 /* If the type is a qualified union, then the DECL_QUALIFIER
8189 of fields can also be an expression containing a variable. */
8190 if (TREE_CODE (type) == QUAL_UNION_TYPE)
8191 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
8192
8193 /* If the field is a qualified union, then it's only a container
8194 for what's inside so we look into it. That's necessary in LTO
8195 mode because the sizes of the field tested above have been set
8196 to PLACEHOLDER_EXPRs by free_lang_data. */
8197 if (TREE_CODE (TREE_TYPE (t)) == QUAL_UNION_TYPE
8198 && variably_modified_type_p (TREE_TYPE (t), fn))
8199 return true;
8200 }
8201 break;
8202
8203 case ARRAY_TYPE:
8204 /* Do not call ourselves to avoid infinite recursion. This is
8205 variably modified if the element type is. */
8206 RETURN_TRUE_IF_VAR (TYPE_SIZE (TREE_TYPE (type)));
8207 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (TREE_TYPE (type)));
8208 break;
8209
8210 default:
8211 break;
8212 }
8213
8214 /* The current language may have other cases to check, but in general,
8215 all other types are not variably modified. */
8216 return lang_hooks.tree_inlining.var_mod_type_p (type, fn);
8217
8218 #undef RETURN_TRUE_IF_VAR
8219 }
8220
8221 /* Given a DECL or TYPE, return the scope in which it was declared, or
8222 NULL_TREE if there is no containing scope. */
8223
8224 tree
8225 get_containing_scope (const_tree t)
8226 {
8227 return (TYPE_P (t) ? TYPE_CONTEXT (t) : DECL_CONTEXT (t));
8228 }
8229
8230 /* Returns the ultimate TRANSLATION_UNIT_DECL context of DECL or NULL. */
8231
8232 const_tree
8233 get_ultimate_context (const_tree decl)
8234 {
8235 while (decl && TREE_CODE (decl) != TRANSLATION_UNIT_DECL)
8236 {
8237 if (TREE_CODE (decl) == BLOCK)
8238 decl = BLOCK_SUPERCONTEXT (decl);
8239 else
8240 decl = get_containing_scope (decl);
8241 }
8242 return decl;
8243 }
8244
8245 /* Return the innermost context enclosing DECL that is
8246 a FUNCTION_DECL, or zero if none. */
8247
8248 tree
8249 decl_function_context (const_tree decl)
8250 {
8251 tree context;
8252
8253 if (TREE_CODE (decl) == ERROR_MARK)
8254 return 0;
8255
8256 /* C++ virtual functions use DECL_CONTEXT for the class of the vtable
8257 where we look up the function at runtime. Such functions always take
8258 a first argument of type 'pointer to real context'.
8259
8260 C++ should really be fixed to use DECL_CONTEXT for the real context,
8261 and use something else for the "virtual context". */
8262 else if (TREE_CODE (decl) == FUNCTION_DECL && DECL_VIRTUAL_P (decl))
8263 context
8264 = TYPE_MAIN_VARIANT
8265 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
8266 else
8267 context = DECL_CONTEXT (decl);
8268
8269 while (context && TREE_CODE (context) != FUNCTION_DECL)
8270 {
8271 if (TREE_CODE (context) == BLOCK)
8272 context = BLOCK_SUPERCONTEXT (context);
8273 else
8274 context = get_containing_scope (context);
8275 }
8276
8277 return context;
8278 }
8279
8280 /* Return the innermost context enclosing DECL that is
8281 a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE, or zero if none.
8282 TYPE_DECLs and FUNCTION_DECLs are transparent to this function. */
8283
8284 tree
8285 decl_type_context (const_tree decl)
8286 {
8287 tree context = DECL_CONTEXT (decl);
8288
8289 while (context)
8290 switch (TREE_CODE (context))
8291 {
8292 case NAMESPACE_DECL:
8293 case TRANSLATION_UNIT_DECL:
8294 return NULL_TREE;
8295
8296 case RECORD_TYPE:
8297 case UNION_TYPE:
8298 case QUAL_UNION_TYPE:
8299 return context;
8300
8301 case TYPE_DECL:
8302 case FUNCTION_DECL:
8303 context = DECL_CONTEXT (context);
8304 break;
8305
8306 case BLOCK:
8307 context = BLOCK_SUPERCONTEXT (context);
8308 break;
8309
8310 default:
8311 gcc_unreachable ();
8312 }
8313
8314 return NULL_TREE;
8315 }
8316
8317 /* CALL is a CALL_EXPR. Return the declaration for the function
8318 called, or NULL_TREE if the called function cannot be
8319 determined. */
8320
8321 tree
8322 get_callee_fndecl (const_tree call)
8323 {
8324 tree addr;
8325
8326 if (call == error_mark_node)
8327 return error_mark_node;
8328
8329 /* It's invalid to call this function with anything but a
8330 CALL_EXPR. */
8331 gcc_assert (TREE_CODE (call) == CALL_EXPR);
8332
8333 /* The first operand to the CALL is the address of the function
8334 called. */
8335 addr = CALL_EXPR_FN (call);
8336
8337 /* If there is no function, return early. */
8338 if (addr == NULL_TREE)
8339 return NULL_TREE;
8340
8341 STRIP_NOPS (addr);
8342
8343 /* If this is a readonly function pointer, extract its initial value. */
8344 if (DECL_P (addr) && TREE_CODE (addr) != FUNCTION_DECL
8345 && TREE_READONLY (addr) && ! TREE_THIS_VOLATILE (addr)
8346 && DECL_INITIAL (addr))
8347 addr = DECL_INITIAL (addr);
8348
8349 /* If the address is just `&f' for some function `f', then we know
8350 that `f' is being called. */
8351 if (TREE_CODE (addr) == ADDR_EXPR
8352 && TREE_CODE (TREE_OPERAND (addr, 0)) == FUNCTION_DECL)
8353 return TREE_OPERAND (addr, 0);
8354
8355 /* We couldn't figure out what was being called. */
8356 return NULL_TREE;
8357 }
8358
8359 /* If CALL_EXPR CALL calls a normal built-in function or an internal function,
8360 return the associated function code, otherwise return CFN_LAST. */
8361
8362 combined_fn
8363 get_call_combined_fn (const_tree call)
8364 {
8365 /* It's invalid to call this function with anything but a CALL_EXPR. */
8366 gcc_assert (TREE_CODE (call) == CALL_EXPR);
8367
8368 if (!CALL_EXPR_FN (call))
8369 return as_combined_fn (CALL_EXPR_IFN (call));
8370
8371 tree fndecl = get_callee_fndecl (call);
8372 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
8373 return as_combined_fn (DECL_FUNCTION_CODE (fndecl));
8374
8375 return CFN_LAST;
8376 }
8377
8378 /* Comparator of indices based on tree_node_counts. */
8379
8380 static int
8381 tree_nodes_cmp (const void *p1, const void *p2)
8382 {
8383 const unsigned *n1 = (const unsigned *)p1;
8384 const unsigned *n2 = (const unsigned *)p2;
8385
8386 return tree_node_counts[*n1] - tree_node_counts[*n2];
8387 }
8388
8389 /* Comparator of indices based on tree_code_counts. */
8390
8391 static int
8392 tree_codes_cmp (const void *p1, const void *p2)
8393 {
8394 const unsigned *n1 = (const unsigned *)p1;
8395 const unsigned *n2 = (const unsigned *)p2;
8396
8397 return tree_code_counts[*n1] - tree_code_counts[*n2];
8398 }
8399
8400 #define TREE_MEM_USAGE_SPACES 40
8401
8402 /* Print debugging information about tree nodes generated during the compile,
8403 and any language-specific information. */
8404
8405 void
8406 dump_tree_statistics (void)
8407 {
8408 if (GATHER_STATISTICS)
8409 {
8410 uint64_t total_nodes, total_bytes;
8411 fprintf (stderr, "\nKind Nodes Bytes\n");
8412 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8413 total_nodes = total_bytes = 0;
8414
8415 {
8416 auto_vec<unsigned> indices (all_kinds);
8417 for (unsigned i = 0; i < all_kinds; i++)
8418 indices.quick_push (i);
8419 indices.qsort (tree_nodes_cmp);
8420
8421 for (unsigned i = 0; i < (int) all_kinds; i++)
8422 {
8423 unsigned j = indices[i];
8424 fprintf (stderr, "%-20s %6" PRIu64 "%c %9" PRIu64 "%c\n",
8425 tree_node_kind_names[j], SIZE_AMOUNT (tree_node_counts[j]),
8426 SIZE_AMOUNT (tree_node_sizes[j]));
8427 total_nodes += tree_node_counts[j];
8428 total_bytes += tree_node_sizes[j];
8429 }
8430 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8431 fprintf (stderr, "%-20s %6" PRIu64 "%c %9" PRIu64 "%c\n", "Total",
8432 SIZE_AMOUNT (total_nodes), SIZE_AMOUNT (total_bytes));
8433 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8434 }
8435
8436 {
8437 fprintf (stderr, "Code Nodes\n");
8438 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8439
8440 auto_vec<unsigned> indices (MAX_TREE_CODES);
8441 for (unsigned i = 0; i < MAX_TREE_CODES; i++)
8442 indices.quick_push (i);
8443 indices.qsort (tree_codes_cmp);
8444
8445 for (unsigned i = 0; i < MAX_TREE_CODES; i++)
8446 {
8447 unsigned j = indices[i];
8448 fprintf (stderr, "%-32s %6" PRIu64 "%c\n",
8449 get_tree_code_name ((enum tree_code) j),
8450 SIZE_AMOUNT (tree_code_counts[j]));
8451 }
8452 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8453 fprintf (stderr, "\n");
8454 ssanames_print_statistics ();
8455 fprintf (stderr, "\n");
8456 phinodes_print_statistics ();
8457 fprintf (stderr, "\n");
8458 }
8459 }
8460 else
8461 fprintf (stderr, "(No per-node statistics)\n");
8462
8463 print_type_hash_statistics ();
8464 print_debug_expr_statistics ();
8465 print_value_expr_statistics ();
8466 lang_hooks.print_statistics ();
8467 }
8468 \f
8469 #define FILE_FUNCTION_FORMAT "_GLOBAL__%s_%s"
8470
8471 /* Generate a crc32 of the low BYTES bytes of VALUE. */
8472
8473 unsigned
8474 crc32_unsigned_n (unsigned chksum, unsigned value, unsigned bytes)
8475 {
8476 /* This relies on the raw feedback's top 4 bits being zero. */
8477 #define FEEDBACK(X) ((X) * 0x04c11db7)
8478 #define SYNDROME(X) (FEEDBACK ((X) & 1) ^ FEEDBACK ((X) & 2) \
8479 ^ FEEDBACK ((X) & 4) ^ FEEDBACK ((X) & 8))
8480 static const unsigned syndromes[16] =
8481 {
8482 SYNDROME(0x0), SYNDROME(0x1), SYNDROME(0x2), SYNDROME(0x3),
8483 SYNDROME(0x4), SYNDROME(0x5), SYNDROME(0x6), SYNDROME(0x7),
8484 SYNDROME(0x8), SYNDROME(0x9), SYNDROME(0xa), SYNDROME(0xb),
8485 SYNDROME(0xc), SYNDROME(0xd), SYNDROME(0xe), SYNDROME(0xf),
8486 };
8487 #undef FEEDBACK
8488 #undef SYNDROME
8489
8490 value <<= (32 - bytes * 8);
8491 for (unsigned ix = bytes * 2; ix--; value <<= 4)
8492 {
8493 unsigned feedback = syndromes[((value ^ chksum) >> 28) & 0xf];
8494
8495 chksum = (chksum << 4) ^ feedback;
8496 }
8497
8498 return chksum;
8499 }
8500
8501 /* Generate a crc32 of a string. */
8502
8503 unsigned
8504 crc32_string (unsigned chksum, const char *string)
8505 {
8506 do
8507 chksum = crc32_byte (chksum, *string);
8508 while (*string++);
8509 return chksum;
8510 }
8511
8512 /* P is a string that will be used in a symbol. Mask out any characters
8513 that are not valid in that context. */
8514
8515 void
8516 clean_symbol_name (char *p)
8517 {
8518 for (; *p; p++)
8519 if (! (ISALNUM (*p)
8520 #ifndef NO_DOLLAR_IN_LABEL /* this for `$'; unlikely, but... -- kr */
8521 || *p == '$'
8522 #endif
8523 #ifndef NO_DOT_IN_LABEL /* this for `.'; unlikely, but... */
8524 || *p == '.'
8525 #endif
8526 ))
8527 *p = '_';
8528 }
8529
8530 static GTY(()) unsigned anon_cnt = 0; /* Saved for PCH. */
8531
8532 /* Create a unique anonymous identifier. The identifier is still a
8533 valid assembly label. */
8534
8535 tree
8536 make_anon_name ()
8537 {
8538 const char *fmt =
8539 #if !defined (NO_DOT_IN_LABEL)
8540 "."
8541 #elif !defined (NO_DOLLAR_IN_LABEL)
8542 "$"
8543 #else
8544 "_"
8545 #endif
8546 "_anon_%d";
8547
8548 char buf[24];
8549 int len = snprintf (buf, sizeof (buf), fmt, anon_cnt++);
8550 gcc_checking_assert (len < int (sizeof (buf)));
8551
8552 tree id = get_identifier_with_length (buf, len);
8553 IDENTIFIER_ANON_P (id) = true;
8554
8555 return id;
8556 }
8557
8558 /* Generate a name for a special-purpose function.
8559 The generated name may need to be unique across the whole link.
8560 Changes to this function may also require corresponding changes to
8561 xstrdup_mask_random.
8562 TYPE is some string to identify the purpose of this function to the
8563 linker or collect2; it must start with an uppercase letter,
8564 one of:
8565 I - for constructors
8566 D - for destructors
8567 N - for C++ anonymous namespaces
8568 F - for DWARF unwind frame information. */
8569
8570 tree
8571 get_file_function_name (const char *type)
8572 {
8573 char *buf;
8574 const char *p;
8575 char *q;
8576
8577 /* If we already have a name we know to be unique, just use that. */
8578 if (first_global_object_name)
8579 p = q = ASTRDUP (first_global_object_name);
8580 /* If the target is handling the constructors/destructors, they
8581 will be local to this file and the name is only necessary for
8582 debugging purposes.
8583 We also assign sub_I and sub_D sufixes to constructors called from
8584 the global static constructors. These are always local. */
8585 else if (((type[0] == 'I' || type[0] == 'D') && targetm.have_ctors_dtors)
8586 || (startswith (type, "sub_")
8587 && (type[4] == 'I' || type[4] == 'D')))
8588 {
8589 const char *file = main_input_filename;
8590 if (! file)
8591 file = LOCATION_FILE (input_location);
8592 /* Just use the file's basename, because the full pathname
8593 might be quite long. */
8594 p = q = ASTRDUP (lbasename (file));
8595 }
8596 else
8597 {
8598 /* Otherwise, the name must be unique across the entire link.
8599 We don't have anything that we know to be unique to this translation
8600 unit, so use what we do have and throw in some randomness. */
8601 unsigned len;
8602 const char *name = weak_global_object_name;
8603 const char *file = main_input_filename;
8604
8605 if (! name)
8606 name = "";
8607 if (! file)
8608 file = LOCATION_FILE (input_location);
8609
8610 len = strlen (file);
8611 q = (char *) alloca (9 + 19 + len + 1);
8612 memcpy (q, file, len + 1);
8613
8614 snprintf (q + len, 9 + 19 + 1, "_%08X_" HOST_WIDE_INT_PRINT_HEX,
8615 crc32_string (0, name), get_random_seed (false));
8616
8617 p = q;
8618 }
8619
8620 clean_symbol_name (q);
8621 buf = (char *) alloca (sizeof (FILE_FUNCTION_FORMAT) + strlen (p)
8622 + strlen (type));
8623
8624 /* Set up the name of the file-level functions we may need.
8625 Use a global object (which is already required to be unique over
8626 the program) rather than the file name (which imposes extra
8627 constraints). */
8628 sprintf (buf, FILE_FUNCTION_FORMAT, type, p);
8629
8630 return get_identifier (buf);
8631 }
8632 \f
8633 #if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007)
8634
8635 /* Complain that the tree code of NODE does not match the expected 0
8636 terminated list of trailing codes. The trailing code list can be
8637 empty, for a more vague error message. FILE, LINE, and FUNCTION
8638 are of the caller. */
8639
8640 void
8641 tree_check_failed (const_tree node, const char *file,
8642 int line, const char *function, ...)
8643 {
8644 va_list args;
8645 const char *buffer;
8646 unsigned length = 0;
8647 enum tree_code code;
8648
8649 va_start (args, function);
8650 while ((code = (enum tree_code) va_arg (args, int)))
8651 length += 4 + strlen (get_tree_code_name (code));
8652 va_end (args);
8653 if (length)
8654 {
8655 char *tmp;
8656 va_start (args, function);
8657 length += strlen ("expected ");
8658 buffer = tmp = (char *) alloca (length);
8659 length = 0;
8660 while ((code = (enum tree_code) va_arg (args, int)))
8661 {
8662 const char *prefix = length ? " or " : "expected ";
8663
8664 strcpy (tmp + length, prefix);
8665 length += strlen (prefix);
8666 strcpy (tmp + length, get_tree_code_name (code));
8667 length += strlen (get_tree_code_name (code));
8668 }
8669 va_end (args);
8670 }
8671 else
8672 buffer = "unexpected node";
8673
8674 internal_error ("tree check: %s, have %s in %s, at %s:%d",
8675 buffer, get_tree_code_name (TREE_CODE (node)),
8676 function, trim_filename (file), line);
8677 }
8678
8679 /* Complain that the tree code of NODE does match the expected 0
8680 terminated list of trailing codes. FILE, LINE, and FUNCTION are of
8681 the caller. */
8682
8683 void
8684 tree_not_check_failed (const_tree node, const char *file,
8685 int line, const char *function, ...)
8686 {
8687 va_list args;
8688 char *buffer;
8689 unsigned length = 0;
8690 enum tree_code code;
8691
8692 va_start (args, function);
8693 while ((code = (enum tree_code) va_arg (args, int)))
8694 length += 4 + strlen (get_tree_code_name (code));
8695 va_end (args);
8696 va_start (args, function);
8697 buffer = (char *) alloca (length);
8698 length = 0;
8699 while ((code = (enum tree_code) va_arg (args, int)))
8700 {
8701 if (length)
8702 {
8703 strcpy (buffer + length, " or ");
8704 length += 4;
8705 }
8706 strcpy (buffer + length, get_tree_code_name (code));
8707 length += strlen (get_tree_code_name (code));
8708 }
8709 va_end (args);
8710
8711 internal_error ("tree check: expected none of %s, have %s in %s, at %s:%d",
8712 buffer, get_tree_code_name (TREE_CODE (node)),
8713 function, trim_filename (file), line);
8714 }
8715
8716 /* Similar to tree_check_failed, except that we check for a class of tree
8717 code, given in CL. */
8718
8719 void
8720 tree_class_check_failed (const_tree node, const enum tree_code_class cl,
8721 const char *file, int line, const char *function)
8722 {
8723 internal_error
8724 ("tree check: expected class %qs, have %qs (%s) in %s, at %s:%d",
8725 TREE_CODE_CLASS_STRING (cl),
8726 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
8727 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
8728 }
8729
8730 /* Similar to tree_check_failed, except that instead of specifying a
8731 dozen codes, use the knowledge that they're all sequential. */
8732
8733 void
8734 tree_range_check_failed (const_tree node, const char *file, int line,
8735 const char *function, enum tree_code c1,
8736 enum tree_code c2)
8737 {
8738 char *buffer;
8739 unsigned length = 0;
8740 unsigned int c;
8741
8742 for (c = c1; c <= c2; ++c)
8743 length += 4 + strlen (get_tree_code_name ((enum tree_code) c));
8744
8745 length += strlen ("expected ");
8746 buffer = (char *) alloca (length);
8747 length = 0;
8748
8749 for (c = c1; c <= c2; ++c)
8750 {
8751 const char *prefix = length ? " or " : "expected ";
8752
8753 strcpy (buffer + length, prefix);
8754 length += strlen (prefix);
8755 strcpy (buffer + length, get_tree_code_name ((enum tree_code) c));
8756 length += strlen (get_tree_code_name ((enum tree_code) c));
8757 }
8758
8759 internal_error ("tree check: %s, have %s in %s, at %s:%d",
8760 buffer, get_tree_code_name (TREE_CODE (node)),
8761 function, trim_filename (file), line);
8762 }
8763
8764
8765 /* Similar to tree_check_failed, except that we check that a tree does
8766 not have the specified code, given in CL. */
8767
8768 void
8769 tree_not_class_check_failed (const_tree node, const enum tree_code_class cl,
8770 const char *file, int line, const char *function)
8771 {
8772 internal_error
8773 ("tree check: did not expect class %qs, have %qs (%s) in %s, at %s:%d",
8774 TREE_CODE_CLASS_STRING (cl),
8775 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
8776 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
8777 }
8778
8779
8780 /* Similar to tree_check_failed but applied to OMP_CLAUSE codes. */
8781
8782 void
8783 omp_clause_check_failed (const_tree node, const char *file, int line,
8784 const char *function, enum omp_clause_code code)
8785 {
8786 internal_error ("tree check: expected %<omp_clause %s%>, have %qs "
8787 "in %s, at %s:%d",
8788 omp_clause_code_name[code],
8789 get_tree_code_name (TREE_CODE (node)),
8790 function, trim_filename (file), line);
8791 }
8792
8793
8794 /* Similar to tree_range_check_failed but applied to OMP_CLAUSE codes. */
8795
8796 void
8797 omp_clause_range_check_failed (const_tree node, const char *file, int line,
8798 const char *function, enum omp_clause_code c1,
8799 enum omp_clause_code c2)
8800 {
8801 char *buffer;
8802 unsigned length = 0;
8803 unsigned int c;
8804
8805 for (c = c1; c <= c2; ++c)
8806 length += 4 + strlen (omp_clause_code_name[c]);
8807
8808 length += strlen ("expected ");
8809 buffer = (char *) alloca (length);
8810 length = 0;
8811
8812 for (c = c1; c <= c2; ++c)
8813 {
8814 const char *prefix = length ? " or " : "expected ";
8815
8816 strcpy (buffer + length, prefix);
8817 length += strlen (prefix);
8818 strcpy (buffer + length, omp_clause_code_name[c]);
8819 length += strlen (omp_clause_code_name[c]);
8820 }
8821
8822 internal_error ("tree check: %s, have %s in %s, at %s:%d",
8823 buffer, omp_clause_code_name[TREE_CODE (node)],
8824 function, trim_filename (file), line);
8825 }
8826
8827
8828 #undef DEFTREESTRUCT
8829 #define DEFTREESTRUCT(VAL, NAME) NAME,
8830
8831 static const char *ts_enum_names[] = {
8832 #include "treestruct.def"
8833 };
8834 #undef DEFTREESTRUCT
8835
8836 #define TS_ENUM_NAME(EN) (ts_enum_names[(EN)])
8837
8838 /* Similar to tree_class_check_failed, except that we check for
8839 whether CODE contains the tree structure identified by EN. */
8840
8841 void
8842 tree_contains_struct_check_failed (const_tree node,
8843 const enum tree_node_structure_enum en,
8844 const char *file, int line,
8845 const char *function)
8846 {
8847 internal_error
8848 ("tree check: expected tree that contains %qs structure, have %qs in %s, at %s:%d",
8849 TS_ENUM_NAME (en),
8850 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
8851 }
8852
8853
8854 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
8855 (dynamically sized) vector. */
8856
8857 void
8858 tree_int_cst_elt_check_failed (int idx, int len, const char *file, int line,
8859 const char *function)
8860 {
8861 internal_error
8862 ("tree check: accessed elt %d of %<tree_int_cst%> with %d elts in %s, "
8863 "at %s:%d",
8864 idx + 1, len, function, trim_filename (file), line);
8865 }
8866
8867 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
8868 (dynamically sized) vector. */
8869
8870 void
8871 tree_vec_elt_check_failed (int idx, int len, const char *file, int line,
8872 const char *function)
8873 {
8874 internal_error
8875 ("tree check: accessed elt %d of %<tree_vec%> with %d elts in %s, at %s:%d",
8876 idx + 1, len, function, trim_filename (file), line);
8877 }
8878
8879 /* Similar to above, except that the check is for the bounds of the operand
8880 vector of an expression node EXP. */
8881
8882 void
8883 tree_operand_check_failed (int idx, const_tree exp, const char *file,
8884 int line, const char *function)
8885 {
8886 enum tree_code code = TREE_CODE (exp);
8887 internal_error
8888 ("tree check: accessed operand %d of %s with %d operands in %s, at %s:%d",
8889 idx + 1, get_tree_code_name (code), TREE_OPERAND_LENGTH (exp),
8890 function, trim_filename (file), line);
8891 }
8892
8893 /* Similar to above, except that the check is for the number of
8894 operands of an OMP_CLAUSE node. */
8895
8896 void
8897 omp_clause_operand_check_failed (int idx, const_tree t, const char *file,
8898 int line, const char *function)
8899 {
8900 internal_error
8901 ("tree check: accessed operand %d of %<omp_clause %s%> with %d operands "
8902 "in %s, at %s:%d", idx + 1, omp_clause_code_name[OMP_CLAUSE_CODE (t)],
8903 omp_clause_num_ops [OMP_CLAUSE_CODE (t)], function,
8904 trim_filename (file), line);
8905 }
8906 #endif /* ENABLE_TREE_CHECKING */
8907 \f
8908 /* Create a new vector type node holding NUNITS units of type INNERTYPE,
8909 and mapped to the machine mode MODE. Initialize its fields and build
8910 the information necessary for debugging output. */
8911
8912 static tree
8913 make_vector_type (tree innertype, poly_int64 nunits, machine_mode mode)
8914 {
8915 tree t;
8916 tree mv_innertype = TYPE_MAIN_VARIANT (innertype);
8917
8918 t = make_node (VECTOR_TYPE);
8919 TREE_TYPE (t) = mv_innertype;
8920 SET_TYPE_VECTOR_SUBPARTS (t, nunits);
8921 SET_TYPE_MODE (t, mode);
8922
8923 if (TYPE_STRUCTURAL_EQUALITY_P (mv_innertype) || in_lto_p)
8924 SET_TYPE_STRUCTURAL_EQUALITY (t);
8925 else if ((TYPE_CANONICAL (mv_innertype) != innertype
8926 || mode != VOIDmode)
8927 && !VECTOR_BOOLEAN_TYPE_P (t))
8928 TYPE_CANONICAL (t)
8929 = make_vector_type (TYPE_CANONICAL (mv_innertype), nunits, VOIDmode);
8930
8931 layout_type (t);
8932
8933 hashval_t hash = type_hash_canon_hash (t);
8934 t = type_hash_canon (hash, t);
8935
8936 /* We have built a main variant, based on the main variant of the
8937 inner type. Use it to build the variant we return. */
8938 if ((TYPE_ATTRIBUTES (innertype) || TYPE_QUALS (innertype))
8939 && TREE_TYPE (t) != innertype)
8940 return build_type_attribute_qual_variant (t,
8941 TYPE_ATTRIBUTES (innertype),
8942 TYPE_QUALS (innertype));
8943
8944 return t;
8945 }
8946
8947 static tree
8948 make_or_reuse_type (unsigned size, int unsignedp)
8949 {
8950 int i;
8951
8952 if (size == INT_TYPE_SIZE)
8953 return unsignedp ? unsigned_type_node : integer_type_node;
8954 if (size == CHAR_TYPE_SIZE)
8955 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
8956 if (size == SHORT_TYPE_SIZE)
8957 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
8958 if (size == LONG_TYPE_SIZE)
8959 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
8960 if (size == LONG_LONG_TYPE_SIZE)
8961 return (unsignedp ? long_long_unsigned_type_node
8962 : long_long_integer_type_node);
8963
8964 for (i = 0; i < NUM_INT_N_ENTS; i ++)
8965 if (size == int_n_data[i].bitsize
8966 && int_n_enabled_p[i])
8967 return (unsignedp ? int_n_trees[i].unsigned_type
8968 : int_n_trees[i].signed_type);
8969
8970 if (unsignedp)
8971 return make_unsigned_type (size);
8972 else
8973 return make_signed_type (size);
8974 }
8975
8976 /* Create or reuse a fract type by SIZE, UNSIGNEDP, and SATP. */
8977
8978 static tree
8979 make_or_reuse_fract_type (unsigned size, int unsignedp, int satp)
8980 {
8981 if (satp)
8982 {
8983 if (size == SHORT_FRACT_TYPE_SIZE)
8984 return unsignedp ? sat_unsigned_short_fract_type_node
8985 : sat_short_fract_type_node;
8986 if (size == FRACT_TYPE_SIZE)
8987 return unsignedp ? sat_unsigned_fract_type_node : sat_fract_type_node;
8988 if (size == LONG_FRACT_TYPE_SIZE)
8989 return unsignedp ? sat_unsigned_long_fract_type_node
8990 : sat_long_fract_type_node;
8991 if (size == LONG_LONG_FRACT_TYPE_SIZE)
8992 return unsignedp ? sat_unsigned_long_long_fract_type_node
8993 : sat_long_long_fract_type_node;
8994 }
8995 else
8996 {
8997 if (size == SHORT_FRACT_TYPE_SIZE)
8998 return unsignedp ? unsigned_short_fract_type_node
8999 : short_fract_type_node;
9000 if (size == FRACT_TYPE_SIZE)
9001 return unsignedp ? unsigned_fract_type_node : fract_type_node;
9002 if (size == LONG_FRACT_TYPE_SIZE)
9003 return unsignedp ? unsigned_long_fract_type_node
9004 : long_fract_type_node;
9005 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9006 return unsignedp ? unsigned_long_long_fract_type_node
9007 : long_long_fract_type_node;
9008 }
9009
9010 return make_fract_type (size, unsignedp, satp);
9011 }
9012
9013 /* Create or reuse an accum type by SIZE, UNSIGNEDP, and SATP. */
9014
9015 static tree
9016 make_or_reuse_accum_type (unsigned size, int unsignedp, int satp)
9017 {
9018 if (satp)
9019 {
9020 if (size == SHORT_ACCUM_TYPE_SIZE)
9021 return unsignedp ? sat_unsigned_short_accum_type_node
9022 : sat_short_accum_type_node;
9023 if (size == ACCUM_TYPE_SIZE)
9024 return unsignedp ? sat_unsigned_accum_type_node : sat_accum_type_node;
9025 if (size == LONG_ACCUM_TYPE_SIZE)
9026 return unsignedp ? sat_unsigned_long_accum_type_node
9027 : sat_long_accum_type_node;
9028 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9029 return unsignedp ? sat_unsigned_long_long_accum_type_node
9030 : sat_long_long_accum_type_node;
9031 }
9032 else
9033 {
9034 if (size == SHORT_ACCUM_TYPE_SIZE)
9035 return unsignedp ? unsigned_short_accum_type_node
9036 : short_accum_type_node;
9037 if (size == ACCUM_TYPE_SIZE)
9038 return unsignedp ? unsigned_accum_type_node : accum_type_node;
9039 if (size == LONG_ACCUM_TYPE_SIZE)
9040 return unsignedp ? unsigned_long_accum_type_node
9041 : long_accum_type_node;
9042 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9043 return unsignedp ? unsigned_long_long_accum_type_node
9044 : long_long_accum_type_node;
9045 }
9046
9047 return make_accum_type (size, unsignedp, satp);
9048 }
9049
9050
9051 /* Create an atomic variant node for TYPE. This routine is called
9052 during initialization of data types to create the 5 basic atomic
9053 types. The generic build_variant_type function requires these to
9054 already be set up in order to function properly, so cannot be
9055 called from there. If ALIGN is non-zero, then ensure alignment is
9056 overridden to this value. */
9057
9058 static tree
9059 build_atomic_base (tree type, unsigned int align)
9060 {
9061 tree t;
9062
9063 /* Make sure its not already registered. */
9064 if ((t = get_qualified_type (type, TYPE_QUAL_ATOMIC)))
9065 return t;
9066
9067 t = build_variant_type_copy (type);
9068 set_type_quals (t, TYPE_QUAL_ATOMIC);
9069
9070 if (align)
9071 SET_TYPE_ALIGN (t, align);
9072
9073 return t;
9074 }
9075
9076 /* Information about the _FloatN and _FloatNx types. This must be in
9077 the same order as the corresponding TI_* enum values. */
9078 const floatn_type_info floatn_nx_types[NUM_FLOATN_NX_TYPES] =
9079 {
9080 { 16, false },
9081 { 32, false },
9082 { 64, false },
9083 { 128, false },
9084 { 32, true },
9085 { 64, true },
9086 { 128, true },
9087 };
9088
9089
9090 /* Create nodes for all integer types (and error_mark_node) using the sizes
9091 of C datatypes. SIGNED_CHAR specifies whether char is signed. */
9092
9093 void
9094 build_common_tree_nodes (bool signed_char)
9095 {
9096 int i;
9097
9098 error_mark_node = make_node (ERROR_MARK);
9099 TREE_TYPE (error_mark_node) = error_mark_node;
9100
9101 initialize_sizetypes ();
9102
9103 /* Define both `signed char' and `unsigned char'. */
9104 signed_char_type_node = make_signed_type (CHAR_TYPE_SIZE);
9105 TYPE_STRING_FLAG (signed_char_type_node) = 1;
9106 unsigned_char_type_node = make_unsigned_type (CHAR_TYPE_SIZE);
9107 TYPE_STRING_FLAG (unsigned_char_type_node) = 1;
9108
9109 /* Define `char', which is like either `signed char' or `unsigned char'
9110 but not the same as either. */
9111 char_type_node
9112 = (signed_char
9113 ? make_signed_type (CHAR_TYPE_SIZE)
9114 : make_unsigned_type (CHAR_TYPE_SIZE));
9115 TYPE_STRING_FLAG (char_type_node) = 1;
9116
9117 short_integer_type_node = make_signed_type (SHORT_TYPE_SIZE);
9118 short_unsigned_type_node = make_unsigned_type (SHORT_TYPE_SIZE);
9119 integer_type_node = make_signed_type (INT_TYPE_SIZE);
9120 unsigned_type_node = make_unsigned_type (INT_TYPE_SIZE);
9121 long_integer_type_node = make_signed_type (LONG_TYPE_SIZE);
9122 long_unsigned_type_node = make_unsigned_type (LONG_TYPE_SIZE);
9123 long_long_integer_type_node = make_signed_type (LONG_LONG_TYPE_SIZE);
9124 long_long_unsigned_type_node = make_unsigned_type (LONG_LONG_TYPE_SIZE);
9125
9126 for (i = 0; i < NUM_INT_N_ENTS; i ++)
9127 {
9128 int_n_trees[i].signed_type = make_signed_type (int_n_data[i].bitsize);
9129 int_n_trees[i].unsigned_type = make_unsigned_type (int_n_data[i].bitsize);
9130
9131 if (int_n_enabled_p[i])
9132 {
9133 integer_types[itk_intN_0 + i * 2] = int_n_trees[i].signed_type;
9134 integer_types[itk_unsigned_intN_0 + i * 2] = int_n_trees[i].unsigned_type;
9135 }
9136 }
9137
9138 /* Define a boolean type. This type only represents boolean values but
9139 may be larger than char depending on the value of BOOL_TYPE_SIZE. */
9140 boolean_type_node = make_unsigned_type (BOOL_TYPE_SIZE);
9141 TREE_SET_CODE (boolean_type_node, BOOLEAN_TYPE);
9142 TYPE_PRECISION (boolean_type_node) = 1;
9143 TYPE_MAX_VALUE (boolean_type_node) = build_int_cst (boolean_type_node, 1);
9144
9145 /* Define what type to use for size_t. */
9146 if (strcmp (SIZE_TYPE, "unsigned int") == 0)
9147 size_type_node = unsigned_type_node;
9148 else if (strcmp (SIZE_TYPE, "long unsigned int") == 0)
9149 size_type_node = long_unsigned_type_node;
9150 else if (strcmp (SIZE_TYPE, "long long unsigned int") == 0)
9151 size_type_node = long_long_unsigned_type_node;
9152 else if (strcmp (SIZE_TYPE, "short unsigned int") == 0)
9153 size_type_node = short_unsigned_type_node;
9154 else
9155 {
9156 int i;
9157
9158 size_type_node = NULL_TREE;
9159 for (i = 0; i < NUM_INT_N_ENTS; i++)
9160 if (int_n_enabled_p[i])
9161 {
9162 char name[50], altname[50];
9163 sprintf (name, "__int%d unsigned", int_n_data[i].bitsize);
9164 sprintf (altname, "__int%d__ unsigned", int_n_data[i].bitsize);
9165
9166 if (strcmp (name, SIZE_TYPE) == 0
9167 || strcmp (altname, SIZE_TYPE) == 0)
9168 {
9169 size_type_node = int_n_trees[i].unsigned_type;
9170 }
9171 }
9172 if (size_type_node == NULL_TREE)
9173 gcc_unreachable ();
9174 }
9175
9176 /* Define what type to use for ptrdiff_t. */
9177 if (strcmp (PTRDIFF_TYPE, "int") == 0)
9178 ptrdiff_type_node = integer_type_node;
9179 else if (strcmp (PTRDIFF_TYPE, "long int") == 0)
9180 ptrdiff_type_node = long_integer_type_node;
9181 else if (strcmp (PTRDIFF_TYPE, "long long int") == 0)
9182 ptrdiff_type_node = long_long_integer_type_node;
9183 else if (strcmp (PTRDIFF_TYPE, "short int") == 0)
9184 ptrdiff_type_node = short_integer_type_node;
9185 else
9186 {
9187 ptrdiff_type_node = NULL_TREE;
9188 for (int i = 0; i < NUM_INT_N_ENTS; i++)
9189 if (int_n_enabled_p[i])
9190 {
9191 char name[50], altname[50];
9192 sprintf (name, "__int%d", int_n_data[i].bitsize);
9193 sprintf (altname, "__int%d__", int_n_data[i].bitsize);
9194
9195 if (strcmp (name, PTRDIFF_TYPE) == 0
9196 || strcmp (altname, PTRDIFF_TYPE) == 0)
9197 ptrdiff_type_node = int_n_trees[i].signed_type;
9198 }
9199 if (ptrdiff_type_node == NULL_TREE)
9200 gcc_unreachable ();
9201 }
9202
9203 /* Fill in the rest of the sized types. Reuse existing type nodes
9204 when possible. */
9205 intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 0);
9206 intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 0);
9207 intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 0);
9208 intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 0);
9209 intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 0);
9210
9211 unsigned_intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 1);
9212 unsigned_intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 1);
9213 unsigned_intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 1);
9214 unsigned_intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 1);
9215 unsigned_intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 1);
9216
9217 /* Don't call build_qualified type for atomics. That routine does
9218 special processing for atomics, and until they are initialized
9219 it's better not to make that call.
9220
9221 Check to see if there is a target override for atomic types. */
9222
9223 atomicQI_type_node = build_atomic_base (unsigned_intQI_type_node,
9224 targetm.atomic_align_for_mode (QImode));
9225 atomicHI_type_node = build_atomic_base (unsigned_intHI_type_node,
9226 targetm.atomic_align_for_mode (HImode));
9227 atomicSI_type_node = build_atomic_base (unsigned_intSI_type_node,
9228 targetm.atomic_align_for_mode (SImode));
9229 atomicDI_type_node = build_atomic_base (unsigned_intDI_type_node,
9230 targetm.atomic_align_for_mode (DImode));
9231 atomicTI_type_node = build_atomic_base (unsigned_intTI_type_node,
9232 targetm.atomic_align_for_mode (TImode));
9233
9234 access_public_node = get_identifier ("public");
9235 access_protected_node = get_identifier ("protected");
9236 access_private_node = get_identifier ("private");
9237
9238 /* Define these next since types below may used them. */
9239 integer_zero_node = build_int_cst (integer_type_node, 0);
9240 integer_one_node = build_int_cst (integer_type_node, 1);
9241 integer_three_node = build_int_cst (integer_type_node, 3);
9242 integer_minus_one_node = build_int_cst (integer_type_node, -1);
9243
9244 size_zero_node = size_int (0);
9245 size_one_node = size_int (1);
9246 bitsize_zero_node = bitsize_int (0);
9247 bitsize_one_node = bitsize_int (1);
9248 bitsize_unit_node = bitsize_int (BITS_PER_UNIT);
9249
9250 boolean_false_node = TYPE_MIN_VALUE (boolean_type_node);
9251 boolean_true_node = TYPE_MAX_VALUE (boolean_type_node);
9252
9253 void_type_node = make_node (VOID_TYPE);
9254 layout_type (void_type_node);
9255
9256 /* We are not going to have real types in C with less than byte alignment,
9257 so we might as well not have any types that claim to have it. */
9258 SET_TYPE_ALIGN (void_type_node, BITS_PER_UNIT);
9259 TYPE_USER_ALIGN (void_type_node) = 0;
9260
9261 void_node = make_node (VOID_CST);
9262 TREE_TYPE (void_node) = void_type_node;
9263
9264 null_pointer_node = build_int_cst (build_pointer_type (void_type_node), 0);
9265 layout_type (TREE_TYPE (null_pointer_node));
9266
9267 ptr_type_node = build_pointer_type (void_type_node);
9268 const_ptr_type_node
9269 = build_pointer_type (build_type_variant (void_type_node, 1, 0));
9270 for (unsigned i = 0;
9271 i < sizeof (builtin_structptr_types) / sizeof (builtin_structptr_type);
9272 ++i)
9273 builtin_structptr_types[i].node = builtin_structptr_types[i].base;
9274
9275 pointer_sized_int_node = build_nonstandard_integer_type (POINTER_SIZE, 1);
9276
9277 float_type_node = make_node (REAL_TYPE);
9278 TYPE_PRECISION (float_type_node) = FLOAT_TYPE_SIZE;
9279 layout_type (float_type_node);
9280
9281 double_type_node = make_node (REAL_TYPE);
9282 TYPE_PRECISION (double_type_node) = DOUBLE_TYPE_SIZE;
9283 layout_type (double_type_node);
9284
9285 long_double_type_node = make_node (REAL_TYPE);
9286 TYPE_PRECISION (long_double_type_node) = LONG_DOUBLE_TYPE_SIZE;
9287 layout_type (long_double_type_node);
9288
9289 for (i = 0; i < NUM_FLOATN_NX_TYPES; i++)
9290 {
9291 int n = floatn_nx_types[i].n;
9292 bool extended = floatn_nx_types[i].extended;
9293 scalar_float_mode mode;
9294 if (!targetm.floatn_mode (n, extended).exists (&mode))
9295 continue;
9296 int precision = GET_MODE_PRECISION (mode);
9297 /* Work around the rs6000 KFmode having precision 113 not
9298 128. */
9299 const struct real_format *fmt = REAL_MODE_FORMAT (mode);
9300 gcc_assert (fmt->b == 2 && fmt->emin + fmt->emax == 3);
9301 int min_precision = fmt->p + ceil_log2 (fmt->emax - fmt->emin);
9302 if (!extended)
9303 gcc_assert (min_precision == n);
9304 if (precision < min_precision)
9305 precision = min_precision;
9306 FLOATN_NX_TYPE_NODE (i) = make_node (REAL_TYPE);
9307 TYPE_PRECISION (FLOATN_NX_TYPE_NODE (i)) = precision;
9308 layout_type (FLOATN_NX_TYPE_NODE (i));
9309 SET_TYPE_MODE (FLOATN_NX_TYPE_NODE (i), mode);
9310 }
9311
9312 float_ptr_type_node = build_pointer_type (float_type_node);
9313 double_ptr_type_node = build_pointer_type (double_type_node);
9314 long_double_ptr_type_node = build_pointer_type (long_double_type_node);
9315 integer_ptr_type_node = build_pointer_type (integer_type_node);
9316
9317 /* Fixed size integer types. */
9318 uint16_type_node = make_or_reuse_type (16, 1);
9319 uint32_type_node = make_or_reuse_type (32, 1);
9320 uint64_type_node = make_or_reuse_type (64, 1);
9321 if (targetm.scalar_mode_supported_p (TImode))
9322 uint128_type_node = make_or_reuse_type (128, 1);
9323
9324 /* Decimal float types. */
9325 if (targetm.decimal_float_supported_p ())
9326 {
9327 dfloat32_type_node = make_node (REAL_TYPE);
9328 TYPE_PRECISION (dfloat32_type_node) = DECIMAL32_TYPE_SIZE;
9329 SET_TYPE_MODE (dfloat32_type_node, SDmode);
9330 layout_type (dfloat32_type_node);
9331
9332 dfloat64_type_node = make_node (REAL_TYPE);
9333 TYPE_PRECISION (dfloat64_type_node) = DECIMAL64_TYPE_SIZE;
9334 SET_TYPE_MODE (dfloat64_type_node, DDmode);
9335 layout_type (dfloat64_type_node);
9336
9337 dfloat128_type_node = make_node (REAL_TYPE);
9338 TYPE_PRECISION (dfloat128_type_node) = DECIMAL128_TYPE_SIZE;
9339 SET_TYPE_MODE (dfloat128_type_node, TDmode);
9340 layout_type (dfloat128_type_node);
9341 }
9342
9343 complex_integer_type_node = build_complex_type (integer_type_node, true);
9344 complex_float_type_node = build_complex_type (float_type_node, true);
9345 complex_double_type_node = build_complex_type (double_type_node, true);
9346 complex_long_double_type_node = build_complex_type (long_double_type_node,
9347 true);
9348
9349 for (i = 0; i < NUM_FLOATN_NX_TYPES; i++)
9350 {
9351 if (FLOATN_NX_TYPE_NODE (i) != NULL_TREE)
9352 COMPLEX_FLOATN_NX_TYPE_NODE (i)
9353 = build_complex_type (FLOATN_NX_TYPE_NODE (i));
9354 }
9355
9356 /* Make fixed-point nodes based on sat/non-sat and signed/unsigned. */
9357 #define MAKE_FIXED_TYPE_NODE(KIND,SIZE) \
9358 sat_ ## KIND ## _type_node = \
9359 make_sat_signed_ ## KIND ## _type (SIZE); \
9360 sat_unsigned_ ## KIND ## _type_node = \
9361 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9362 KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9363 unsigned_ ## KIND ## _type_node = \
9364 make_unsigned_ ## KIND ## _type (SIZE);
9365
9366 #define MAKE_FIXED_TYPE_NODE_WIDTH(KIND,WIDTH,SIZE) \
9367 sat_ ## WIDTH ## KIND ## _type_node = \
9368 make_sat_signed_ ## KIND ## _type (SIZE); \
9369 sat_unsigned_ ## WIDTH ## KIND ## _type_node = \
9370 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9371 WIDTH ## KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9372 unsigned_ ## WIDTH ## KIND ## _type_node = \
9373 make_unsigned_ ## KIND ## _type (SIZE);
9374
9375 /* Make fixed-point type nodes based on four different widths. */
9376 #define MAKE_FIXED_TYPE_NODE_FAMILY(N1,N2) \
9377 MAKE_FIXED_TYPE_NODE_WIDTH (N1, short_, SHORT_ ## N2 ## _TYPE_SIZE) \
9378 MAKE_FIXED_TYPE_NODE (N1, N2 ## _TYPE_SIZE) \
9379 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_, LONG_ ## N2 ## _TYPE_SIZE) \
9380 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_long_, LONG_LONG_ ## N2 ## _TYPE_SIZE)
9381
9382 /* Make fixed-point mode nodes based on sat/non-sat and signed/unsigned. */
9383 #define MAKE_FIXED_MODE_NODE(KIND,NAME,MODE) \
9384 NAME ## _type_node = \
9385 make_or_reuse_signed_ ## KIND ## _type (GET_MODE_BITSIZE (MODE ## mode)); \
9386 u ## NAME ## _type_node = \
9387 make_or_reuse_unsigned_ ## KIND ## _type \
9388 (GET_MODE_BITSIZE (U ## MODE ## mode)); \
9389 sat_ ## NAME ## _type_node = \
9390 make_or_reuse_sat_signed_ ## KIND ## _type \
9391 (GET_MODE_BITSIZE (MODE ## mode)); \
9392 sat_u ## NAME ## _type_node = \
9393 make_or_reuse_sat_unsigned_ ## KIND ## _type \
9394 (GET_MODE_BITSIZE (U ## MODE ## mode));
9395
9396 /* Fixed-point type and mode nodes. */
9397 MAKE_FIXED_TYPE_NODE_FAMILY (fract, FRACT)
9398 MAKE_FIXED_TYPE_NODE_FAMILY (accum, ACCUM)
9399 MAKE_FIXED_MODE_NODE (fract, qq, QQ)
9400 MAKE_FIXED_MODE_NODE (fract, hq, HQ)
9401 MAKE_FIXED_MODE_NODE (fract, sq, SQ)
9402 MAKE_FIXED_MODE_NODE (fract, dq, DQ)
9403 MAKE_FIXED_MODE_NODE (fract, tq, TQ)
9404 MAKE_FIXED_MODE_NODE (accum, ha, HA)
9405 MAKE_FIXED_MODE_NODE (accum, sa, SA)
9406 MAKE_FIXED_MODE_NODE (accum, da, DA)
9407 MAKE_FIXED_MODE_NODE (accum, ta, TA)
9408
9409 {
9410 tree t = targetm.build_builtin_va_list ();
9411
9412 /* Many back-ends define record types without setting TYPE_NAME.
9413 If we copied the record type here, we'd keep the original
9414 record type without a name. This breaks name mangling. So,
9415 don't copy record types and let c_common_nodes_and_builtins()
9416 declare the type to be __builtin_va_list. */
9417 if (TREE_CODE (t) != RECORD_TYPE)
9418 t = build_variant_type_copy (t);
9419
9420 va_list_type_node = t;
9421 }
9422
9423 /* SCEV analyzer global shared trees. */
9424 chrec_dont_know = make_node (SCEV_NOT_KNOWN);
9425 TREE_TYPE (chrec_dont_know) = void_type_node;
9426 chrec_known = make_node (SCEV_KNOWN);
9427 TREE_TYPE (chrec_known) = void_type_node;
9428 }
9429
9430 /* Modify DECL for given flags.
9431 TM_PURE attribute is set only on types, so the function will modify
9432 DECL's type when ECF_TM_PURE is used. */
9433
9434 void
9435 set_call_expr_flags (tree decl, int flags)
9436 {
9437 if (flags & ECF_NOTHROW)
9438 TREE_NOTHROW (decl) = 1;
9439 if (flags & ECF_CONST)
9440 TREE_READONLY (decl) = 1;
9441 if (flags & ECF_PURE)
9442 DECL_PURE_P (decl) = 1;
9443 if (flags & ECF_LOOPING_CONST_OR_PURE)
9444 DECL_LOOPING_CONST_OR_PURE_P (decl) = 1;
9445 if (flags & ECF_NOVOPS)
9446 DECL_IS_NOVOPS (decl) = 1;
9447 if (flags & ECF_NORETURN)
9448 TREE_THIS_VOLATILE (decl) = 1;
9449 if (flags & ECF_MALLOC)
9450 DECL_IS_MALLOC (decl) = 1;
9451 if (flags & ECF_RETURNS_TWICE)
9452 DECL_IS_RETURNS_TWICE (decl) = 1;
9453 if (flags & ECF_LEAF)
9454 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("leaf"),
9455 NULL, DECL_ATTRIBUTES (decl));
9456 if (flags & ECF_COLD)
9457 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("cold"),
9458 NULL, DECL_ATTRIBUTES (decl));
9459 if (flags & ECF_RET1)
9460 DECL_ATTRIBUTES (decl)
9461 = tree_cons (get_identifier ("fn spec"),
9462 build_tree_list (NULL_TREE, build_string (2, "1 ")),
9463 DECL_ATTRIBUTES (decl));
9464 if ((flags & ECF_TM_PURE) && flag_tm)
9465 apply_tm_attr (decl, get_identifier ("transaction_pure"));
9466 /* Looping const or pure is implied by noreturn.
9467 There is currently no way to declare looping const or looping pure alone. */
9468 gcc_assert (!(flags & ECF_LOOPING_CONST_OR_PURE)
9469 || ((flags & ECF_NORETURN) && (flags & (ECF_CONST | ECF_PURE))));
9470 }
9471
9472
9473 /* A subroutine of build_common_builtin_nodes. Define a builtin function. */
9474
9475 static void
9476 local_define_builtin (const char *name, tree type, enum built_in_function code,
9477 const char *library_name, int ecf_flags)
9478 {
9479 tree decl;
9480
9481 decl = add_builtin_function (name, type, code, BUILT_IN_NORMAL,
9482 library_name, NULL_TREE);
9483 set_call_expr_flags (decl, ecf_flags);
9484
9485 set_builtin_decl (code, decl, true);
9486 }
9487
9488 /* Call this function after instantiating all builtins that the language
9489 front end cares about. This will build the rest of the builtins
9490 and internal functions that are relied upon by the tree optimizers and
9491 the middle-end. */
9492
9493 void
9494 build_common_builtin_nodes (void)
9495 {
9496 tree tmp, ftype;
9497 int ecf_flags;
9498
9499 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE)
9500 || !builtin_decl_explicit_p (BUILT_IN_ABORT))
9501 {
9502 ftype = build_function_type (void_type_node, void_list_node);
9503 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE))
9504 local_define_builtin ("__builtin_unreachable", ftype,
9505 BUILT_IN_UNREACHABLE,
9506 "__builtin_unreachable",
9507 ECF_NOTHROW | ECF_LEAF | ECF_NORETURN
9508 | ECF_CONST | ECF_COLD);
9509 if (!builtin_decl_explicit_p (BUILT_IN_ABORT))
9510 local_define_builtin ("__builtin_abort", ftype, BUILT_IN_ABORT,
9511 "abort",
9512 ECF_LEAF | ECF_NORETURN | ECF_CONST | ECF_COLD);
9513 }
9514
9515 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY)
9516 || !builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
9517 {
9518 ftype = build_function_type_list (ptr_type_node,
9519 ptr_type_node, const_ptr_type_node,
9520 size_type_node, NULL_TREE);
9521
9522 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY))
9523 local_define_builtin ("__builtin_memcpy", ftype, BUILT_IN_MEMCPY,
9524 "memcpy", ECF_NOTHROW | ECF_LEAF);
9525 if (!builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
9526 local_define_builtin ("__builtin_memmove", ftype, BUILT_IN_MEMMOVE,
9527 "memmove", ECF_NOTHROW | ECF_LEAF);
9528 }
9529
9530 if (!builtin_decl_explicit_p (BUILT_IN_MEMCMP))
9531 {
9532 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
9533 const_ptr_type_node, size_type_node,
9534 NULL_TREE);
9535 local_define_builtin ("__builtin_memcmp", ftype, BUILT_IN_MEMCMP,
9536 "memcmp", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9537 }
9538
9539 if (!builtin_decl_explicit_p (BUILT_IN_MEMSET))
9540 {
9541 ftype = build_function_type_list (ptr_type_node,
9542 ptr_type_node, integer_type_node,
9543 size_type_node, NULL_TREE);
9544 local_define_builtin ("__builtin_memset", ftype, BUILT_IN_MEMSET,
9545 "memset", ECF_NOTHROW | ECF_LEAF);
9546 }
9547
9548 /* If we're checking the stack, `alloca' can throw. */
9549 const int alloca_flags
9550 = ECF_MALLOC | ECF_LEAF | (flag_stack_check ? 0 : ECF_NOTHROW);
9551
9552 if (!builtin_decl_explicit_p (BUILT_IN_ALLOCA))
9553 {
9554 ftype = build_function_type_list (ptr_type_node,
9555 size_type_node, NULL_TREE);
9556 local_define_builtin ("__builtin_alloca", ftype, BUILT_IN_ALLOCA,
9557 "alloca", alloca_flags);
9558 }
9559
9560 ftype = build_function_type_list (ptr_type_node, size_type_node,
9561 size_type_node, NULL_TREE);
9562 local_define_builtin ("__builtin_alloca_with_align", ftype,
9563 BUILT_IN_ALLOCA_WITH_ALIGN,
9564 "__builtin_alloca_with_align",
9565 alloca_flags);
9566
9567 ftype = build_function_type_list (ptr_type_node, size_type_node,
9568 size_type_node, size_type_node, NULL_TREE);
9569 local_define_builtin ("__builtin_alloca_with_align_and_max", ftype,
9570 BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX,
9571 "__builtin_alloca_with_align_and_max",
9572 alloca_flags);
9573
9574 ftype = build_function_type_list (void_type_node,
9575 ptr_type_node, ptr_type_node,
9576 ptr_type_node, NULL_TREE);
9577 local_define_builtin ("__builtin_init_trampoline", ftype,
9578 BUILT_IN_INIT_TRAMPOLINE,
9579 "__builtin_init_trampoline", ECF_NOTHROW | ECF_LEAF);
9580 local_define_builtin ("__builtin_init_heap_trampoline", ftype,
9581 BUILT_IN_INIT_HEAP_TRAMPOLINE,
9582 "__builtin_init_heap_trampoline",
9583 ECF_NOTHROW | ECF_LEAF);
9584 local_define_builtin ("__builtin_init_descriptor", ftype,
9585 BUILT_IN_INIT_DESCRIPTOR,
9586 "__builtin_init_descriptor", ECF_NOTHROW | ECF_LEAF);
9587
9588 ftype = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE);
9589 local_define_builtin ("__builtin_adjust_trampoline", ftype,
9590 BUILT_IN_ADJUST_TRAMPOLINE,
9591 "__builtin_adjust_trampoline",
9592 ECF_CONST | ECF_NOTHROW);
9593 local_define_builtin ("__builtin_adjust_descriptor", ftype,
9594 BUILT_IN_ADJUST_DESCRIPTOR,
9595 "__builtin_adjust_descriptor",
9596 ECF_CONST | ECF_NOTHROW);
9597
9598 ftype = build_function_type_list (void_type_node,
9599 ptr_type_node, ptr_type_node, NULL_TREE);
9600 if (!builtin_decl_explicit_p (BUILT_IN_CLEAR_CACHE))
9601 local_define_builtin ("__builtin___clear_cache", ftype,
9602 BUILT_IN_CLEAR_CACHE,
9603 "__clear_cache",
9604 ECF_NOTHROW);
9605
9606 local_define_builtin ("__builtin_nonlocal_goto", ftype,
9607 BUILT_IN_NONLOCAL_GOTO,
9608 "__builtin_nonlocal_goto",
9609 ECF_NORETURN | ECF_NOTHROW);
9610
9611 ftype = build_function_type_list (void_type_node,
9612 ptr_type_node, ptr_type_node, NULL_TREE);
9613 local_define_builtin ("__builtin_setjmp_setup", ftype,
9614 BUILT_IN_SETJMP_SETUP,
9615 "__builtin_setjmp_setup", ECF_NOTHROW);
9616
9617 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
9618 local_define_builtin ("__builtin_setjmp_receiver", ftype,
9619 BUILT_IN_SETJMP_RECEIVER,
9620 "__builtin_setjmp_receiver", ECF_NOTHROW | ECF_LEAF);
9621
9622 ftype = build_function_type_list (ptr_type_node, NULL_TREE);
9623 local_define_builtin ("__builtin_stack_save", ftype, BUILT_IN_STACK_SAVE,
9624 "__builtin_stack_save", ECF_NOTHROW | ECF_LEAF);
9625
9626 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
9627 local_define_builtin ("__builtin_stack_restore", ftype,
9628 BUILT_IN_STACK_RESTORE,
9629 "__builtin_stack_restore", ECF_NOTHROW | ECF_LEAF);
9630
9631 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
9632 const_ptr_type_node, size_type_node,
9633 NULL_TREE);
9634 local_define_builtin ("__builtin_memcmp_eq", ftype, BUILT_IN_MEMCMP_EQ,
9635 "__builtin_memcmp_eq",
9636 ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9637
9638 local_define_builtin ("__builtin_strncmp_eq", ftype, BUILT_IN_STRNCMP_EQ,
9639 "__builtin_strncmp_eq",
9640 ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9641
9642 local_define_builtin ("__builtin_strcmp_eq", ftype, BUILT_IN_STRCMP_EQ,
9643 "__builtin_strcmp_eq",
9644 ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9645
9646 /* If there's a possibility that we might use the ARM EABI, build the
9647 alternate __cxa_end_cleanup node used to resume from C++. */
9648 if (targetm.arm_eabi_unwinder)
9649 {
9650 ftype = build_function_type_list (void_type_node, NULL_TREE);
9651 local_define_builtin ("__builtin_cxa_end_cleanup", ftype,
9652 BUILT_IN_CXA_END_CLEANUP,
9653 "__cxa_end_cleanup", ECF_NORETURN | ECF_LEAF);
9654 }
9655
9656 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
9657 local_define_builtin ("__builtin_unwind_resume", ftype,
9658 BUILT_IN_UNWIND_RESUME,
9659 ((targetm_common.except_unwind_info (&global_options)
9660 == UI_SJLJ)
9661 ? "_Unwind_SjLj_Resume" : "_Unwind_Resume"),
9662 ECF_NORETURN);
9663
9664 if (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS) == NULL_TREE)
9665 {
9666 ftype = build_function_type_list (ptr_type_node, integer_type_node,
9667 NULL_TREE);
9668 local_define_builtin ("__builtin_return_address", ftype,
9669 BUILT_IN_RETURN_ADDRESS,
9670 "__builtin_return_address",
9671 ECF_NOTHROW);
9672 }
9673
9674 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER)
9675 || !builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
9676 {
9677 ftype = build_function_type_list (void_type_node, ptr_type_node,
9678 ptr_type_node, NULL_TREE);
9679 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER))
9680 local_define_builtin ("__cyg_profile_func_enter", ftype,
9681 BUILT_IN_PROFILE_FUNC_ENTER,
9682 "__cyg_profile_func_enter", 0);
9683 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
9684 local_define_builtin ("__cyg_profile_func_exit", ftype,
9685 BUILT_IN_PROFILE_FUNC_EXIT,
9686 "__cyg_profile_func_exit", 0);
9687 }
9688
9689 /* The exception object and filter values from the runtime. The argument
9690 must be zero before exception lowering, i.e. from the front end. After
9691 exception lowering, it will be the region number for the exception
9692 landing pad. These functions are PURE instead of CONST to prevent
9693 them from being hoisted past the exception edge that will initialize
9694 its value in the landing pad. */
9695 ftype = build_function_type_list (ptr_type_node,
9696 integer_type_node, NULL_TREE);
9697 ecf_flags = ECF_PURE | ECF_NOTHROW | ECF_LEAF;
9698 /* Only use TM_PURE if we have TM language support. */
9699 if (builtin_decl_explicit_p (BUILT_IN_TM_LOAD_1))
9700 ecf_flags |= ECF_TM_PURE;
9701 local_define_builtin ("__builtin_eh_pointer", ftype, BUILT_IN_EH_POINTER,
9702 "__builtin_eh_pointer", ecf_flags);
9703
9704 tmp = lang_hooks.types.type_for_mode (targetm.eh_return_filter_mode (), 0);
9705 ftype = build_function_type_list (tmp, integer_type_node, NULL_TREE);
9706 local_define_builtin ("__builtin_eh_filter", ftype, BUILT_IN_EH_FILTER,
9707 "__builtin_eh_filter", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9708
9709 ftype = build_function_type_list (void_type_node,
9710 integer_type_node, integer_type_node,
9711 NULL_TREE);
9712 local_define_builtin ("__builtin_eh_copy_values", ftype,
9713 BUILT_IN_EH_COPY_VALUES,
9714 "__builtin_eh_copy_values", ECF_NOTHROW);
9715
9716 /* Complex multiplication and division. These are handled as builtins
9717 rather than optabs because emit_library_call_value doesn't support
9718 complex. Further, we can do slightly better with folding these
9719 beasties if the real and complex parts of the arguments are separate. */
9720 {
9721 int mode;
9722
9723 for (mode = MIN_MODE_COMPLEX_FLOAT; mode <= MAX_MODE_COMPLEX_FLOAT; ++mode)
9724 {
9725 char mode_name_buf[4], *q;
9726 const char *p;
9727 enum built_in_function mcode, dcode;
9728 tree type, inner_type;
9729 const char *prefix = "__";
9730
9731 if (targetm.libfunc_gnu_prefix)
9732 prefix = "__gnu_";
9733
9734 type = lang_hooks.types.type_for_mode ((machine_mode) mode, 0);
9735 if (type == NULL)
9736 continue;
9737 inner_type = TREE_TYPE (type);
9738
9739 ftype = build_function_type_list (type, inner_type, inner_type,
9740 inner_type, inner_type, NULL_TREE);
9741
9742 mcode = ((enum built_in_function)
9743 (BUILT_IN_COMPLEX_MUL_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
9744 dcode = ((enum built_in_function)
9745 (BUILT_IN_COMPLEX_DIV_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
9746
9747 for (p = GET_MODE_NAME (mode), q = mode_name_buf; *p; p++, q++)
9748 *q = TOLOWER (*p);
9749 *q = '\0';
9750
9751 /* For -ftrapping-math these should throw from a former
9752 -fnon-call-exception stmt. */
9753 built_in_names[mcode] = concat (prefix, "mul", mode_name_buf, "3",
9754 NULL);
9755 local_define_builtin (built_in_names[mcode], ftype, mcode,
9756 built_in_names[mcode],
9757 ECF_CONST | ECF_LEAF);
9758
9759 built_in_names[dcode] = concat (prefix, "div", mode_name_buf, "3",
9760 NULL);
9761 local_define_builtin (built_in_names[dcode], ftype, dcode,
9762 built_in_names[dcode],
9763 ECF_CONST | ECF_LEAF);
9764 }
9765 }
9766
9767 init_internal_fns ();
9768 }
9769
9770 /* HACK. GROSS. This is absolutely disgusting. I wish there was a
9771 better way.
9772
9773 If we requested a pointer to a vector, build up the pointers that
9774 we stripped off while looking for the inner type. Similarly for
9775 return values from functions.
9776
9777 The argument TYPE is the top of the chain, and BOTTOM is the
9778 new type which we will point to. */
9779
9780 tree
9781 reconstruct_complex_type (tree type, tree bottom)
9782 {
9783 tree inner, outer;
9784
9785 if (TREE_CODE (type) == POINTER_TYPE)
9786 {
9787 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
9788 outer = build_pointer_type_for_mode (inner, TYPE_MODE (type),
9789 TYPE_REF_CAN_ALIAS_ALL (type));
9790 }
9791 else if (TREE_CODE (type) == REFERENCE_TYPE)
9792 {
9793 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
9794 outer = build_reference_type_for_mode (inner, TYPE_MODE (type),
9795 TYPE_REF_CAN_ALIAS_ALL (type));
9796 }
9797 else if (TREE_CODE (type) == ARRAY_TYPE)
9798 {
9799 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
9800 outer = build_array_type (inner, TYPE_DOMAIN (type));
9801 }
9802 else if (TREE_CODE (type) == FUNCTION_TYPE)
9803 {
9804 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
9805 outer = build_function_type (inner, TYPE_ARG_TYPES (type));
9806 }
9807 else if (TREE_CODE (type) == METHOD_TYPE)
9808 {
9809 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
9810 /* The build_method_type_directly() routine prepends 'this' to argument list,
9811 so we must compensate by getting rid of it. */
9812 outer
9813 = build_method_type_directly
9814 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (type))),
9815 inner,
9816 TREE_CHAIN (TYPE_ARG_TYPES (type)));
9817 }
9818 else if (TREE_CODE (type) == OFFSET_TYPE)
9819 {
9820 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
9821 outer = build_offset_type (TYPE_OFFSET_BASETYPE (type), inner);
9822 }
9823 else
9824 return bottom;
9825
9826 return build_type_attribute_qual_variant (outer, TYPE_ATTRIBUTES (type),
9827 TYPE_QUALS (type));
9828 }
9829
9830 /* Returns a vector tree node given a mode (integer, vector, or BLKmode) and
9831 the inner type. */
9832 tree
9833 build_vector_type_for_mode (tree innertype, machine_mode mode)
9834 {
9835 poly_int64 nunits;
9836 unsigned int bitsize;
9837
9838 switch (GET_MODE_CLASS (mode))
9839 {
9840 case MODE_VECTOR_BOOL:
9841 case MODE_VECTOR_INT:
9842 case MODE_VECTOR_FLOAT:
9843 case MODE_VECTOR_FRACT:
9844 case MODE_VECTOR_UFRACT:
9845 case MODE_VECTOR_ACCUM:
9846 case MODE_VECTOR_UACCUM:
9847 nunits = GET_MODE_NUNITS (mode);
9848 break;
9849
9850 case MODE_INT:
9851 /* Check that there are no leftover bits. */
9852 bitsize = GET_MODE_BITSIZE (as_a <scalar_int_mode> (mode));
9853 gcc_assert (bitsize % TREE_INT_CST_LOW (TYPE_SIZE (innertype)) == 0);
9854 nunits = bitsize / TREE_INT_CST_LOW (TYPE_SIZE (innertype));
9855 break;
9856
9857 default:
9858 gcc_unreachable ();
9859 }
9860
9861 return make_vector_type (innertype, nunits, mode);
9862 }
9863
9864 /* Similarly, but takes the inner type and number of units, which must be
9865 a power of two. */
9866
9867 tree
9868 build_vector_type (tree innertype, poly_int64 nunits)
9869 {
9870 return make_vector_type (innertype, nunits, VOIDmode);
9871 }
9872
9873 /* Build a truth vector with NUNITS units, giving it mode MASK_MODE. */
9874
9875 tree
9876 build_truth_vector_type_for_mode (poly_uint64 nunits, machine_mode mask_mode)
9877 {
9878 gcc_assert (mask_mode != BLKmode);
9879
9880 unsigned HOST_WIDE_INT esize;
9881 if (VECTOR_MODE_P (mask_mode))
9882 {
9883 poly_uint64 vsize = GET_MODE_BITSIZE (mask_mode);
9884 esize = vector_element_size (vsize, nunits);
9885 }
9886 else
9887 esize = 1;
9888
9889 tree bool_type = build_nonstandard_boolean_type (esize);
9890
9891 return make_vector_type (bool_type, nunits, mask_mode);
9892 }
9893
9894 /* Build a vector type that holds one boolean result for each element of
9895 vector type VECTYPE. The public interface for this operation is
9896 truth_type_for. */
9897
9898 static tree
9899 build_truth_vector_type_for (tree vectype)
9900 {
9901 machine_mode vector_mode = TYPE_MODE (vectype);
9902 poly_uint64 nunits = TYPE_VECTOR_SUBPARTS (vectype);
9903
9904 machine_mode mask_mode;
9905 if (VECTOR_MODE_P (vector_mode)
9906 && targetm.vectorize.get_mask_mode (vector_mode).exists (&mask_mode))
9907 return build_truth_vector_type_for_mode (nunits, mask_mode);
9908
9909 poly_uint64 vsize = tree_to_poly_uint64 (TYPE_SIZE (vectype));
9910 unsigned HOST_WIDE_INT esize = vector_element_size (vsize, nunits);
9911 tree bool_type = build_nonstandard_boolean_type (esize);
9912
9913 return make_vector_type (bool_type, nunits, VOIDmode);
9914 }
9915
9916 /* Like build_vector_type, but builds a variant type with TYPE_VECTOR_OPAQUE
9917 set. */
9918
9919 tree
9920 build_opaque_vector_type (tree innertype, poly_int64 nunits)
9921 {
9922 tree t = make_vector_type (innertype, nunits, VOIDmode);
9923 tree cand;
9924 /* We always build the non-opaque variant before the opaque one,
9925 so if it already exists, it is TYPE_NEXT_VARIANT of this one. */
9926 cand = TYPE_NEXT_VARIANT (t);
9927 if (cand
9928 && TYPE_VECTOR_OPAQUE (cand)
9929 && check_qualified_type (cand, t, TYPE_QUALS (t)))
9930 return cand;
9931 /* Othewise build a variant type and make sure to queue it after
9932 the non-opaque type. */
9933 cand = build_distinct_type_copy (t);
9934 TYPE_VECTOR_OPAQUE (cand) = true;
9935 TYPE_CANONICAL (cand) = TYPE_CANONICAL (t);
9936 TYPE_NEXT_VARIANT (cand) = TYPE_NEXT_VARIANT (t);
9937 TYPE_NEXT_VARIANT (t) = cand;
9938 TYPE_MAIN_VARIANT (cand) = TYPE_MAIN_VARIANT (t);
9939 return cand;
9940 }
9941
9942 /* Return the value of element I of VECTOR_CST T as a wide_int. */
9943
9944 static poly_wide_int
9945 vector_cst_int_elt (const_tree t, unsigned int i)
9946 {
9947 /* First handle elements that are directly encoded. */
9948 unsigned int encoded_nelts = vector_cst_encoded_nelts (t);
9949 if (i < encoded_nelts)
9950 return wi::to_poly_wide (VECTOR_CST_ENCODED_ELT (t, i));
9951
9952 /* Identify the pattern that contains element I and work out the index of
9953 the last encoded element for that pattern. */
9954 unsigned int npatterns = VECTOR_CST_NPATTERNS (t);
9955 unsigned int pattern = i % npatterns;
9956 unsigned int count = i / npatterns;
9957 unsigned int final_i = encoded_nelts - npatterns + pattern;
9958
9959 /* If there are no steps, the final encoded value is the right one. */
9960 if (!VECTOR_CST_STEPPED_P (t))
9961 return wi::to_poly_wide (VECTOR_CST_ENCODED_ELT (t, final_i));
9962
9963 /* Otherwise work out the value from the last two encoded elements. */
9964 tree v1 = VECTOR_CST_ENCODED_ELT (t, final_i - npatterns);
9965 tree v2 = VECTOR_CST_ENCODED_ELT (t, final_i);
9966 poly_wide_int diff = wi::to_poly_wide (v2) - wi::to_poly_wide (v1);
9967 return wi::to_poly_wide (v2) + (count - 2) * diff;
9968 }
9969
9970 /* Return the value of element I of VECTOR_CST T. */
9971
9972 tree
9973 vector_cst_elt (const_tree t, unsigned int i)
9974 {
9975 /* First handle elements that are directly encoded. */
9976 unsigned int encoded_nelts = vector_cst_encoded_nelts (t);
9977 if (i < encoded_nelts)
9978 return VECTOR_CST_ENCODED_ELT (t, i);
9979
9980 /* If there are no steps, the final encoded value is the right one. */
9981 if (!VECTOR_CST_STEPPED_P (t))
9982 {
9983 /* Identify the pattern that contains element I and work out the index of
9984 the last encoded element for that pattern. */
9985 unsigned int npatterns = VECTOR_CST_NPATTERNS (t);
9986 unsigned int pattern = i % npatterns;
9987 unsigned int final_i = encoded_nelts - npatterns + pattern;
9988 return VECTOR_CST_ENCODED_ELT (t, final_i);
9989 }
9990
9991 /* Otherwise work out the value from the last two encoded elements. */
9992 return wide_int_to_tree (TREE_TYPE (TREE_TYPE (t)),
9993 vector_cst_int_elt (t, i));
9994 }
9995
9996 /* Given an initializer INIT, return TRUE if INIT is zero or some
9997 aggregate of zeros. Otherwise return FALSE. If NONZERO is not
9998 null, set *NONZERO if and only if INIT is known not to be all
9999 zeros. The combination of return value of false and *NONZERO
10000 false implies that INIT may but need not be all zeros. Other
10001 combinations indicate definitive answers. */
10002
10003 bool
10004 initializer_zerop (const_tree init, bool *nonzero /* = NULL */)
10005 {
10006 bool dummy;
10007 if (!nonzero)
10008 nonzero = &dummy;
10009
10010 /* Conservatively clear NONZERO and set it only if INIT is definitely
10011 not all zero. */
10012 *nonzero = false;
10013
10014 STRIP_NOPS (init);
10015
10016 unsigned HOST_WIDE_INT off = 0;
10017
10018 switch (TREE_CODE (init))
10019 {
10020 case INTEGER_CST:
10021 if (integer_zerop (init))
10022 return true;
10023
10024 *nonzero = true;
10025 return false;
10026
10027 case REAL_CST:
10028 /* ??? Note that this is not correct for C4X float formats. There,
10029 a bit pattern of all zeros is 1.0; 0.0 is encoded with the most
10030 negative exponent. */
10031 if (real_zerop (init)
10032 && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (init)))
10033 return true;
10034
10035 *nonzero = true;
10036 return false;
10037
10038 case FIXED_CST:
10039 if (fixed_zerop (init))
10040 return true;
10041
10042 *nonzero = true;
10043 return false;
10044
10045 case COMPLEX_CST:
10046 if (integer_zerop (init)
10047 || (real_zerop (init)
10048 && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_REALPART (init)))
10049 && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_IMAGPART (init)))))
10050 return true;
10051
10052 *nonzero = true;
10053 return false;
10054
10055 case VECTOR_CST:
10056 if (VECTOR_CST_NPATTERNS (init) == 1
10057 && VECTOR_CST_DUPLICATE_P (init)
10058 && initializer_zerop (VECTOR_CST_ENCODED_ELT (init, 0)))
10059 return true;
10060
10061 *nonzero = true;
10062 return false;
10063
10064 case CONSTRUCTOR:
10065 {
10066 if (TREE_CLOBBER_P (init))
10067 return false;
10068
10069 unsigned HOST_WIDE_INT idx;
10070 tree elt;
10071
10072 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init), idx, elt)
10073 if (!initializer_zerop (elt, nonzero))
10074 return false;
10075
10076 return true;
10077 }
10078
10079 case MEM_REF:
10080 {
10081 tree arg = TREE_OPERAND (init, 0);
10082 if (TREE_CODE (arg) != ADDR_EXPR)
10083 return false;
10084 tree offset = TREE_OPERAND (init, 1);
10085 if (TREE_CODE (offset) != INTEGER_CST
10086 || !tree_fits_uhwi_p (offset))
10087 return false;
10088 off = tree_to_uhwi (offset);
10089 if (INT_MAX < off)
10090 return false;
10091 arg = TREE_OPERAND (arg, 0);
10092 if (TREE_CODE (arg) != STRING_CST)
10093 return false;
10094 init = arg;
10095 }
10096 /* Fall through. */
10097
10098 case STRING_CST:
10099 {
10100 gcc_assert (off <= INT_MAX);
10101
10102 int i = off;
10103 int n = TREE_STRING_LENGTH (init);
10104 if (n <= i)
10105 return false;
10106
10107 /* We need to loop through all elements to handle cases like
10108 "\0" and "\0foobar". */
10109 for (i = 0; i < n; ++i)
10110 if (TREE_STRING_POINTER (init)[i] != '\0')
10111 {
10112 *nonzero = true;
10113 return false;
10114 }
10115
10116 return true;
10117 }
10118
10119 default:
10120 return false;
10121 }
10122 }
10123
10124 /* Return true if EXPR is an initializer expression in which every element
10125 is a constant that is numerically equal to 0 or 1. The elements do not
10126 need to be equal to each other. */
10127
10128 bool
10129 initializer_each_zero_or_onep (const_tree expr)
10130 {
10131 STRIP_ANY_LOCATION_WRAPPER (expr);
10132
10133 switch (TREE_CODE (expr))
10134 {
10135 case INTEGER_CST:
10136 return integer_zerop (expr) || integer_onep (expr);
10137
10138 case REAL_CST:
10139 return real_zerop (expr) || real_onep (expr);
10140
10141 case VECTOR_CST:
10142 {
10143 unsigned HOST_WIDE_INT nelts = vector_cst_encoded_nelts (expr);
10144 if (VECTOR_CST_STEPPED_P (expr)
10145 && !TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr)).is_constant (&nelts))
10146 return false;
10147
10148 for (unsigned int i = 0; i < nelts; ++i)
10149 {
10150 tree elt = vector_cst_elt (expr, i);
10151 if (!initializer_each_zero_or_onep (elt))
10152 return false;
10153 }
10154
10155 return true;
10156 }
10157
10158 default:
10159 return false;
10160 }
10161 }
10162
10163 /* Check if vector VEC consists of all the equal elements and
10164 that the number of elements corresponds to the type of VEC.
10165 The function returns first element of the vector
10166 or NULL_TREE if the vector is not uniform. */
10167 tree
10168 uniform_vector_p (const_tree vec)
10169 {
10170 tree first, t;
10171 unsigned HOST_WIDE_INT i, nelts;
10172
10173 if (vec == NULL_TREE)
10174 return NULL_TREE;
10175
10176 gcc_assert (VECTOR_TYPE_P (TREE_TYPE (vec)));
10177
10178 if (TREE_CODE (vec) == VEC_DUPLICATE_EXPR)
10179 return TREE_OPERAND (vec, 0);
10180
10181 else if (TREE_CODE (vec) == VECTOR_CST)
10182 {
10183 if (VECTOR_CST_NPATTERNS (vec) == 1 && VECTOR_CST_DUPLICATE_P (vec))
10184 return VECTOR_CST_ENCODED_ELT (vec, 0);
10185 return NULL_TREE;
10186 }
10187
10188 else if (TREE_CODE (vec) == CONSTRUCTOR
10189 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (vec)).is_constant (&nelts))
10190 {
10191 first = error_mark_node;
10192
10193 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (vec), i, t)
10194 {
10195 if (i == 0)
10196 {
10197 first = t;
10198 continue;
10199 }
10200 if (!operand_equal_p (first, t, 0))
10201 return NULL_TREE;
10202 }
10203 if (i != nelts)
10204 return NULL_TREE;
10205
10206 return first;
10207 }
10208
10209 return NULL_TREE;
10210 }
10211
10212 /* If the argument is INTEGER_CST, return it. If the argument is vector
10213 with all elements the same INTEGER_CST, return that INTEGER_CST. Otherwise
10214 return NULL_TREE.
10215 Look through location wrappers. */
10216
10217 tree
10218 uniform_integer_cst_p (tree t)
10219 {
10220 STRIP_ANY_LOCATION_WRAPPER (t);
10221
10222 if (TREE_CODE (t) == INTEGER_CST)
10223 return t;
10224
10225 if (VECTOR_TYPE_P (TREE_TYPE (t)))
10226 {
10227 t = uniform_vector_p (t);
10228 if (t && TREE_CODE (t) == INTEGER_CST)
10229 return t;
10230 }
10231
10232 return NULL_TREE;
10233 }
10234
10235 /* If VECTOR_CST T has a single nonzero element, return the index of that
10236 element, otherwise return -1. */
10237
10238 int
10239 single_nonzero_element (const_tree t)
10240 {
10241 unsigned HOST_WIDE_INT nelts;
10242 unsigned int repeat_nelts;
10243 if (VECTOR_CST_NELTS (t).is_constant (&nelts))
10244 repeat_nelts = nelts;
10245 else if (VECTOR_CST_NELTS_PER_PATTERN (t) == 2)
10246 {
10247 nelts = vector_cst_encoded_nelts (t);
10248 repeat_nelts = VECTOR_CST_NPATTERNS (t);
10249 }
10250 else
10251 return -1;
10252
10253 int res = -1;
10254 for (unsigned int i = 0; i < nelts; ++i)
10255 {
10256 tree elt = vector_cst_elt (t, i);
10257 if (!integer_zerop (elt) && !real_zerop (elt))
10258 {
10259 if (res >= 0 || i >= repeat_nelts)
10260 return -1;
10261 res = i;
10262 }
10263 }
10264 return res;
10265 }
10266
10267 /* Build an empty statement at location LOC. */
10268
10269 tree
10270 build_empty_stmt (location_t loc)
10271 {
10272 tree t = build1 (NOP_EXPR, void_type_node, size_zero_node);
10273 SET_EXPR_LOCATION (t, loc);
10274 return t;
10275 }
10276
10277
10278 /* Build an OpenMP clause with code CODE. LOC is the location of the
10279 clause. */
10280
10281 tree
10282 build_omp_clause (location_t loc, enum omp_clause_code code)
10283 {
10284 tree t;
10285 int size, length;
10286
10287 length = omp_clause_num_ops[code];
10288 size = (sizeof (struct tree_omp_clause) + (length - 1) * sizeof (tree));
10289
10290 record_node_allocation_statistics (OMP_CLAUSE, size);
10291
10292 t = (tree) ggc_internal_alloc (size);
10293 memset (t, 0, size);
10294 TREE_SET_CODE (t, OMP_CLAUSE);
10295 OMP_CLAUSE_SET_CODE (t, code);
10296 OMP_CLAUSE_LOCATION (t) = loc;
10297
10298 return t;
10299 }
10300
10301 /* Build a tcc_vl_exp object with code CODE and room for LEN operands. LEN
10302 includes the implicit operand count in TREE_OPERAND 0, and so must be >= 1.
10303 Except for the CODE and operand count field, other storage for the
10304 object is initialized to zeros. */
10305
10306 tree
10307 build_vl_exp (enum tree_code code, int len MEM_STAT_DECL)
10308 {
10309 tree t;
10310 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_exp);
10311
10312 gcc_assert (TREE_CODE_CLASS (code) == tcc_vl_exp);
10313 gcc_assert (len >= 1);
10314
10315 record_node_allocation_statistics (code, length);
10316
10317 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
10318
10319 TREE_SET_CODE (t, code);
10320
10321 /* Can't use TREE_OPERAND to store the length because if checking is
10322 enabled, it will try to check the length before we store it. :-P */
10323 t->exp.operands[0] = build_int_cst (sizetype, len);
10324
10325 return t;
10326 }
10327
10328 /* Helper function for build_call_* functions; build a CALL_EXPR with
10329 indicated RETURN_TYPE, FN, and NARGS, but do not initialize any of
10330 the argument slots. */
10331
10332 static tree
10333 build_call_1 (tree return_type, tree fn, int nargs)
10334 {
10335 tree t;
10336
10337 t = build_vl_exp (CALL_EXPR, nargs + 3);
10338 TREE_TYPE (t) = return_type;
10339 CALL_EXPR_FN (t) = fn;
10340 CALL_EXPR_STATIC_CHAIN (t) = NULL;
10341
10342 return t;
10343 }
10344
10345 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10346 FN and a null static chain slot. NARGS is the number of call arguments
10347 which are specified as "..." arguments. */
10348
10349 tree
10350 build_call_nary (tree return_type, tree fn, int nargs, ...)
10351 {
10352 tree ret;
10353 va_list args;
10354 va_start (args, nargs);
10355 ret = build_call_valist (return_type, fn, nargs, args);
10356 va_end (args);
10357 return ret;
10358 }
10359
10360 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10361 FN and a null static chain slot. NARGS is the number of call arguments
10362 which are specified as a va_list ARGS. */
10363
10364 tree
10365 build_call_valist (tree return_type, tree fn, int nargs, va_list args)
10366 {
10367 tree t;
10368 int i;
10369
10370 t = build_call_1 (return_type, fn, nargs);
10371 for (i = 0; i < nargs; i++)
10372 CALL_EXPR_ARG (t, i) = va_arg (args, tree);
10373 process_call_operands (t);
10374 return t;
10375 }
10376
10377 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10378 FN and a null static chain slot. NARGS is the number of call arguments
10379 which are specified as a tree array ARGS. */
10380
10381 tree
10382 build_call_array_loc (location_t loc, tree return_type, tree fn,
10383 int nargs, const tree *args)
10384 {
10385 tree t;
10386 int i;
10387
10388 t = build_call_1 (return_type, fn, nargs);
10389 for (i = 0; i < nargs; i++)
10390 CALL_EXPR_ARG (t, i) = args[i];
10391 process_call_operands (t);
10392 SET_EXPR_LOCATION (t, loc);
10393 return t;
10394 }
10395
10396 /* Like build_call_array, but takes a vec. */
10397
10398 tree
10399 build_call_vec (tree return_type, tree fn, vec<tree, va_gc> *args)
10400 {
10401 tree ret, t;
10402 unsigned int ix;
10403
10404 ret = build_call_1 (return_type, fn, vec_safe_length (args));
10405 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
10406 CALL_EXPR_ARG (ret, ix) = t;
10407 process_call_operands (ret);
10408 return ret;
10409 }
10410
10411 /* Conveniently construct a function call expression. FNDECL names the
10412 function to be called and N arguments are passed in the array
10413 ARGARRAY. */
10414
10415 tree
10416 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
10417 {
10418 tree fntype = TREE_TYPE (fndecl);
10419 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10420
10421 return fold_build_call_array_loc (loc, TREE_TYPE (fntype), fn, n, argarray);
10422 }
10423
10424 /* Conveniently construct a function call expression. FNDECL names the
10425 function to be called and the arguments are passed in the vector
10426 VEC. */
10427
10428 tree
10429 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec)
10430 {
10431 return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec),
10432 vec_safe_address (vec));
10433 }
10434
10435
10436 /* Conveniently construct a function call expression. FNDECL names the
10437 function to be called, N is the number of arguments, and the "..."
10438 parameters are the argument expressions. */
10439
10440 tree
10441 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
10442 {
10443 va_list ap;
10444 tree *argarray = XALLOCAVEC (tree, n);
10445 int i;
10446
10447 va_start (ap, n);
10448 for (i = 0; i < n; i++)
10449 argarray[i] = va_arg (ap, tree);
10450 va_end (ap);
10451 return build_call_expr_loc_array (loc, fndecl, n, argarray);
10452 }
10453
10454 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
10455 varargs macros aren't supported by all bootstrap compilers. */
10456
10457 tree
10458 build_call_expr (tree fndecl, int n, ...)
10459 {
10460 va_list ap;
10461 tree *argarray = XALLOCAVEC (tree, n);
10462 int i;
10463
10464 va_start (ap, n);
10465 for (i = 0; i < n; i++)
10466 argarray[i] = va_arg (ap, tree);
10467 va_end (ap);
10468 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
10469 }
10470
10471 /* Build an internal call to IFN, with arguments ARGS[0:N-1] and with return
10472 type TYPE. This is just like CALL_EXPR, except its CALL_EXPR_FN is NULL.
10473 It will get gimplified later into an ordinary internal function. */
10474
10475 tree
10476 build_call_expr_internal_loc_array (location_t loc, internal_fn ifn,
10477 tree type, int n, const tree *args)
10478 {
10479 tree t = build_call_1 (type, NULL_TREE, n);
10480 for (int i = 0; i < n; ++i)
10481 CALL_EXPR_ARG (t, i) = args[i];
10482 SET_EXPR_LOCATION (t, loc);
10483 CALL_EXPR_IFN (t) = ifn;
10484 process_call_operands (t);
10485 return t;
10486 }
10487
10488 /* Build internal call expression. This is just like CALL_EXPR, except
10489 its CALL_EXPR_FN is NULL. It will get gimplified later into ordinary
10490 internal function. */
10491
10492 tree
10493 build_call_expr_internal_loc (location_t loc, enum internal_fn ifn,
10494 tree type, int n, ...)
10495 {
10496 va_list ap;
10497 tree *argarray = XALLOCAVEC (tree, n);
10498 int i;
10499
10500 va_start (ap, n);
10501 for (i = 0; i < n; i++)
10502 argarray[i] = va_arg (ap, tree);
10503 va_end (ap);
10504 return build_call_expr_internal_loc_array (loc, ifn, type, n, argarray);
10505 }
10506
10507 /* Return a function call to FN, if the target is guaranteed to support it,
10508 or null otherwise.
10509
10510 N is the number of arguments, passed in the "...", and TYPE is the
10511 type of the return value. */
10512
10513 tree
10514 maybe_build_call_expr_loc (location_t loc, combined_fn fn, tree type,
10515 int n, ...)
10516 {
10517 va_list ap;
10518 tree *argarray = XALLOCAVEC (tree, n);
10519 int i;
10520
10521 va_start (ap, n);
10522 for (i = 0; i < n; i++)
10523 argarray[i] = va_arg (ap, tree);
10524 va_end (ap);
10525 if (internal_fn_p (fn))
10526 {
10527 internal_fn ifn = as_internal_fn (fn);
10528 if (direct_internal_fn_p (ifn))
10529 {
10530 tree_pair types = direct_internal_fn_types (ifn, type, argarray);
10531 if (!direct_internal_fn_supported_p (ifn, types,
10532 OPTIMIZE_FOR_BOTH))
10533 return NULL_TREE;
10534 }
10535 return build_call_expr_internal_loc_array (loc, ifn, type, n, argarray);
10536 }
10537 else
10538 {
10539 tree fndecl = builtin_decl_implicit (as_builtin_fn (fn));
10540 if (!fndecl)
10541 return NULL_TREE;
10542 return build_call_expr_loc_array (loc, fndecl, n, argarray);
10543 }
10544 }
10545
10546 /* Return a function call to the appropriate builtin alloca variant.
10547
10548 SIZE is the size to be allocated. ALIGN, if non-zero, is the requested
10549 alignment of the allocated area. MAX_SIZE, if non-negative, is an upper
10550 bound for SIZE in case it is not a fixed value. */
10551
10552 tree
10553 build_alloca_call_expr (tree size, unsigned int align, HOST_WIDE_INT max_size)
10554 {
10555 if (max_size >= 0)
10556 {
10557 tree t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX);
10558 return
10559 build_call_expr (t, 3, size, size_int (align), size_int (max_size));
10560 }
10561 else if (align > 0)
10562 {
10563 tree t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
10564 return build_call_expr (t, 2, size, size_int (align));
10565 }
10566 else
10567 {
10568 tree t = builtin_decl_explicit (BUILT_IN_ALLOCA);
10569 return build_call_expr (t, 1, size);
10570 }
10571 }
10572
10573 /* Create a new constant string literal of type ELTYPE[SIZE] (or LEN
10574 if SIZE == -1) and return a tree node representing char* pointer to
10575 it as an ADDR_EXPR (ARRAY_REF (ELTYPE, ...)). When STR is nonnull
10576 the STRING_CST value is the LEN bytes at STR (the representation
10577 of the string, which may be wide). Otherwise it's all zeros. */
10578
10579 tree
10580 build_string_literal (unsigned len, const char *str /* = NULL */,
10581 tree eltype /* = char_type_node */,
10582 unsigned HOST_WIDE_INT size /* = -1 */)
10583 {
10584 tree t = build_string (len, str);
10585 /* Set the maximum valid index based on the string length or SIZE. */
10586 unsigned HOST_WIDE_INT maxidx
10587 = (size == HOST_WIDE_INT_M1U ? len : size) - 1;
10588
10589 tree index = build_index_type (size_int (maxidx));
10590 eltype = build_type_variant (eltype, 1, 0);
10591 tree type = build_array_type (eltype, index);
10592 TREE_TYPE (t) = type;
10593 TREE_CONSTANT (t) = 1;
10594 TREE_READONLY (t) = 1;
10595 TREE_STATIC (t) = 1;
10596
10597 type = build_pointer_type (eltype);
10598 t = build1 (ADDR_EXPR, type,
10599 build4 (ARRAY_REF, eltype,
10600 t, integer_zero_node, NULL_TREE, NULL_TREE));
10601 return t;
10602 }
10603
10604
10605
10606 /* Return true if T (assumed to be a DECL) must be assigned a memory
10607 location. */
10608
10609 bool
10610 needs_to_live_in_memory (const_tree t)
10611 {
10612 return (TREE_ADDRESSABLE (t)
10613 || is_global_var (t)
10614 || (TREE_CODE (t) == RESULT_DECL
10615 && !DECL_BY_REFERENCE (t)
10616 && aggregate_value_p (t, current_function_decl)));
10617 }
10618
10619 /* Return value of a constant X and sign-extend it. */
10620
10621 HOST_WIDE_INT
10622 int_cst_value (const_tree x)
10623 {
10624 unsigned bits = TYPE_PRECISION (TREE_TYPE (x));
10625 unsigned HOST_WIDE_INT val = TREE_INT_CST_LOW (x);
10626
10627 /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
10628 gcc_assert (cst_and_fits_in_hwi (x));
10629
10630 if (bits < HOST_BITS_PER_WIDE_INT)
10631 {
10632 bool negative = ((val >> (bits - 1)) & 1) != 0;
10633 if (negative)
10634 val |= HOST_WIDE_INT_M1U << (bits - 1) << 1;
10635 else
10636 val &= ~(HOST_WIDE_INT_M1U << (bits - 1) << 1);
10637 }
10638
10639 return val;
10640 }
10641
10642 /* If TYPE is an integral or pointer type, return an integer type with
10643 the same precision which is unsigned iff UNSIGNEDP is true, or itself
10644 if TYPE is already an integer type of signedness UNSIGNEDP.
10645 If TYPE is a floating-point type, return an integer type with the same
10646 bitsize and with the signedness given by UNSIGNEDP; this is useful
10647 when doing bit-level operations on a floating-point value. */
10648
10649 tree
10650 signed_or_unsigned_type_for (int unsignedp, tree type)
10651 {
10652 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_UNSIGNED (type) == unsignedp)
10653 return type;
10654
10655 if (TREE_CODE (type) == VECTOR_TYPE)
10656 {
10657 tree inner = TREE_TYPE (type);
10658 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
10659 if (!inner2)
10660 return NULL_TREE;
10661 if (inner == inner2)
10662 return type;
10663 return build_vector_type (inner2, TYPE_VECTOR_SUBPARTS (type));
10664 }
10665
10666 if (TREE_CODE (type) == COMPLEX_TYPE)
10667 {
10668 tree inner = TREE_TYPE (type);
10669 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
10670 if (!inner2)
10671 return NULL_TREE;
10672 if (inner == inner2)
10673 return type;
10674 return build_complex_type (inner2);
10675 }
10676
10677 unsigned int bits;
10678 if (INTEGRAL_TYPE_P (type)
10679 || POINTER_TYPE_P (type)
10680 || TREE_CODE (type) == OFFSET_TYPE)
10681 bits = TYPE_PRECISION (type);
10682 else if (TREE_CODE (type) == REAL_TYPE)
10683 bits = GET_MODE_BITSIZE (SCALAR_TYPE_MODE (type));
10684 else
10685 return NULL_TREE;
10686
10687 return build_nonstandard_integer_type (bits, unsignedp);
10688 }
10689
10690 /* If TYPE is an integral or pointer type, return an integer type with
10691 the same precision which is unsigned, or itself if TYPE is already an
10692 unsigned integer type. If TYPE is a floating-point type, return an
10693 unsigned integer type with the same bitsize as TYPE. */
10694
10695 tree
10696 unsigned_type_for (tree type)
10697 {
10698 return signed_or_unsigned_type_for (1, type);
10699 }
10700
10701 /* If TYPE is an integral or pointer type, return an integer type with
10702 the same precision which is signed, or itself if TYPE is already a
10703 signed integer type. If TYPE is a floating-point type, return a
10704 signed integer type with the same bitsize as TYPE. */
10705
10706 tree
10707 signed_type_for (tree type)
10708 {
10709 return signed_or_unsigned_type_for (0, type);
10710 }
10711
10712 /* If TYPE is a vector type, return a signed integer vector type with the
10713 same width and number of subparts. Otherwise return boolean_type_node. */
10714
10715 tree
10716 truth_type_for (tree type)
10717 {
10718 if (TREE_CODE (type) == VECTOR_TYPE)
10719 {
10720 if (VECTOR_BOOLEAN_TYPE_P (type))
10721 return type;
10722 return build_truth_vector_type_for (type);
10723 }
10724 else
10725 return boolean_type_node;
10726 }
10727
10728 /* Returns the largest value obtainable by casting something in INNER type to
10729 OUTER type. */
10730
10731 tree
10732 upper_bound_in_type (tree outer, tree inner)
10733 {
10734 unsigned int det = 0;
10735 unsigned oprec = TYPE_PRECISION (outer);
10736 unsigned iprec = TYPE_PRECISION (inner);
10737 unsigned prec;
10738
10739 /* Compute a unique number for every combination. */
10740 det |= (oprec > iprec) ? 4 : 0;
10741 det |= TYPE_UNSIGNED (outer) ? 2 : 0;
10742 det |= TYPE_UNSIGNED (inner) ? 1 : 0;
10743
10744 /* Determine the exponent to use. */
10745 switch (det)
10746 {
10747 case 0:
10748 case 1:
10749 /* oprec <= iprec, outer: signed, inner: don't care. */
10750 prec = oprec - 1;
10751 break;
10752 case 2:
10753 case 3:
10754 /* oprec <= iprec, outer: unsigned, inner: don't care. */
10755 prec = oprec;
10756 break;
10757 case 4:
10758 /* oprec > iprec, outer: signed, inner: signed. */
10759 prec = iprec - 1;
10760 break;
10761 case 5:
10762 /* oprec > iprec, outer: signed, inner: unsigned. */
10763 prec = iprec;
10764 break;
10765 case 6:
10766 /* oprec > iprec, outer: unsigned, inner: signed. */
10767 prec = oprec;
10768 break;
10769 case 7:
10770 /* oprec > iprec, outer: unsigned, inner: unsigned. */
10771 prec = iprec;
10772 break;
10773 default:
10774 gcc_unreachable ();
10775 }
10776
10777 return wide_int_to_tree (outer,
10778 wi::mask (prec, false, TYPE_PRECISION (outer)));
10779 }
10780
10781 /* Returns the smallest value obtainable by casting something in INNER type to
10782 OUTER type. */
10783
10784 tree
10785 lower_bound_in_type (tree outer, tree inner)
10786 {
10787 unsigned oprec = TYPE_PRECISION (outer);
10788 unsigned iprec = TYPE_PRECISION (inner);
10789
10790 /* If OUTER type is unsigned, we can definitely cast 0 to OUTER type
10791 and obtain 0. */
10792 if (TYPE_UNSIGNED (outer)
10793 /* If we are widening something of an unsigned type, OUTER type
10794 contains all values of INNER type. In particular, both INNER
10795 and OUTER types have zero in common. */
10796 || (oprec > iprec && TYPE_UNSIGNED (inner)))
10797 return build_int_cst (outer, 0);
10798 else
10799 {
10800 /* If we are widening a signed type to another signed type, we
10801 want to obtain -2^^(iprec-1). If we are keeping the
10802 precision or narrowing to a signed type, we want to obtain
10803 -2^(oprec-1). */
10804 unsigned prec = oprec > iprec ? iprec : oprec;
10805 return wide_int_to_tree (outer,
10806 wi::mask (prec - 1, true,
10807 TYPE_PRECISION (outer)));
10808 }
10809 }
10810
10811 /* Return nonzero if two operands that are suitable for PHI nodes are
10812 necessarily equal. Specifically, both ARG0 and ARG1 must be either
10813 SSA_NAME or invariant. Note that this is strictly an optimization.
10814 That is, callers of this function can directly call operand_equal_p
10815 and get the same result, only slower. */
10816
10817 int
10818 operand_equal_for_phi_arg_p (const_tree arg0, const_tree arg1)
10819 {
10820 if (arg0 == arg1)
10821 return 1;
10822 if (TREE_CODE (arg0) == SSA_NAME || TREE_CODE (arg1) == SSA_NAME)
10823 return 0;
10824 return operand_equal_p (arg0, arg1, 0);
10825 }
10826
10827 /* Returns number of zeros at the end of binary representation of X. */
10828
10829 tree
10830 num_ending_zeros (const_tree x)
10831 {
10832 return build_int_cst (TREE_TYPE (x), wi::ctz (wi::to_wide (x)));
10833 }
10834
10835
10836 #define WALK_SUBTREE(NODE) \
10837 do \
10838 { \
10839 result = walk_tree_1 (&(NODE), func, data, pset, lh); \
10840 if (result) \
10841 return result; \
10842 } \
10843 while (0)
10844
10845 /* This is a subroutine of walk_tree that walks field of TYPE that are to
10846 be walked whenever a type is seen in the tree. Rest of operands and return
10847 value are as for walk_tree. */
10848
10849 static tree
10850 walk_type_fields (tree type, walk_tree_fn func, void *data,
10851 hash_set<tree> *pset, walk_tree_lh lh)
10852 {
10853 tree result = NULL_TREE;
10854
10855 switch (TREE_CODE (type))
10856 {
10857 case POINTER_TYPE:
10858 case REFERENCE_TYPE:
10859 case VECTOR_TYPE:
10860 /* We have to worry about mutually recursive pointers. These can't
10861 be written in C. They can in Ada. It's pathological, but
10862 there's an ACATS test (c38102a) that checks it. Deal with this
10863 by checking if we're pointing to another pointer, that one
10864 points to another pointer, that one does too, and we have no htab.
10865 If so, get a hash table. We check three levels deep to avoid
10866 the cost of the hash table if we don't need one. */
10867 if (POINTER_TYPE_P (TREE_TYPE (type))
10868 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type)))
10869 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type))))
10870 && !pset)
10871 {
10872 result = walk_tree_without_duplicates (&TREE_TYPE (type),
10873 func, data);
10874 if (result)
10875 return result;
10876
10877 break;
10878 }
10879
10880 /* fall through */
10881
10882 case COMPLEX_TYPE:
10883 WALK_SUBTREE (TREE_TYPE (type));
10884 break;
10885
10886 case METHOD_TYPE:
10887 WALK_SUBTREE (TYPE_METHOD_BASETYPE (type));
10888
10889 /* Fall through. */
10890
10891 case FUNCTION_TYPE:
10892 WALK_SUBTREE (TREE_TYPE (type));
10893 {
10894 tree arg;
10895
10896 /* We never want to walk into default arguments. */
10897 for (arg = TYPE_ARG_TYPES (type); arg; arg = TREE_CHAIN (arg))
10898 WALK_SUBTREE (TREE_VALUE (arg));
10899 }
10900 break;
10901
10902 case ARRAY_TYPE:
10903 /* Don't follow this nodes's type if a pointer for fear that
10904 we'll have infinite recursion. If we have a PSET, then we
10905 need not fear. */
10906 if (pset
10907 || (!POINTER_TYPE_P (TREE_TYPE (type))
10908 && TREE_CODE (TREE_TYPE (type)) != OFFSET_TYPE))
10909 WALK_SUBTREE (TREE_TYPE (type));
10910 WALK_SUBTREE (TYPE_DOMAIN (type));
10911 break;
10912
10913 case OFFSET_TYPE:
10914 WALK_SUBTREE (TREE_TYPE (type));
10915 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type));
10916 break;
10917
10918 default:
10919 break;
10920 }
10921
10922 return NULL_TREE;
10923 }
10924
10925 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is
10926 called with the DATA and the address of each sub-tree. If FUNC returns a
10927 non-NULL value, the traversal is stopped, and the value returned by FUNC
10928 is returned. If PSET is non-NULL it is used to record the nodes visited,
10929 and to avoid visiting a node more than once. */
10930
10931 tree
10932 walk_tree_1 (tree *tp, walk_tree_fn func, void *data,
10933 hash_set<tree> *pset, walk_tree_lh lh)
10934 {
10935 enum tree_code code;
10936 int walk_subtrees;
10937 tree result;
10938
10939 #define WALK_SUBTREE_TAIL(NODE) \
10940 do \
10941 { \
10942 tp = & (NODE); \
10943 goto tail_recurse; \
10944 } \
10945 while (0)
10946
10947 tail_recurse:
10948 /* Skip empty subtrees. */
10949 if (!*tp)
10950 return NULL_TREE;
10951
10952 /* Don't walk the same tree twice, if the user has requested
10953 that we avoid doing so. */
10954 if (pset && pset->add (*tp))
10955 return NULL_TREE;
10956
10957 /* Call the function. */
10958 walk_subtrees = 1;
10959 result = (*func) (tp, &walk_subtrees, data);
10960
10961 /* If we found something, return it. */
10962 if (result)
10963 return result;
10964
10965 code = TREE_CODE (*tp);
10966
10967 /* Even if we didn't, FUNC may have decided that there was nothing
10968 interesting below this point in the tree. */
10969 if (!walk_subtrees)
10970 {
10971 /* But we still need to check our siblings. */
10972 if (code == TREE_LIST)
10973 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
10974 else if (code == OMP_CLAUSE)
10975 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
10976 else
10977 return NULL_TREE;
10978 }
10979
10980 if (lh)
10981 {
10982 result = (*lh) (tp, &walk_subtrees, func, data, pset);
10983 if (result || !walk_subtrees)
10984 return result;
10985 }
10986
10987 switch (code)
10988 {
10989 case ERROR_MARK:
10990 case IDENTIFIER_NODE:
10991 case INTEGER_CST:
10992 case REAL_CST:
10993 case FIXED_CST:
10994 case STRING_CST:
10995 case BLOCK:
10996 case PLACEHOLDER_EXPR:
10997 case SSA_NAME:
10998 case FIELD_DECL:
10999 case RESULT_DECL:
11000 /* None of these have subtrees other than those already walked
11001 above. */
11002 break;
11003
11004 case TREE_LIST:
11005 WALK_SUBTREE (TREE_VALUE (*tp));
11006 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11007 break;
11008
11009 case TREE_VEC:
11010 {
11011 int len = TREE_VEC_LENGTH (*tp);
11012
11013 if (len == 0)
11014 break;
11015
11016 /* Walk all elements but the first. */
11017 while (--len)
11018 WALK_SUBTREE (TREE_VEC_ELT (*tp, len));
11019
11020 /* Now walk the first one as a tail call. */
11021 WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp, 0));
11022 }
11023
11024 case VECTOR_CST:
11025 {
11026 unsigned len = vector_cst_encoded_nelts (*tp);
11027 if (len == 0)
11028 break;
11029 /* Walk all elements but the first. */
11030 while (--len)
11031 WALK_SUBTREE (VECTOR_CST_ENCODED_ELT (*tp, len));
11032 /* Now walk the first one as a tail call. */
11033 WALK_SUBTREE_TAIL (VECTOR_CST_ENCODED_ELT (*tp, 0));
11034 }
11035
11036 case COMPLEX_CST:
11037 WALK_SUBTREE (TREE_REALPART (*tp));
11038 WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp));
11039
11040 case CONSTRUCTOR:
11041 {
11042 unsigned HOST_WIDE_INT idx;
11043 constructor_elt *ce;
11044
11045 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (*tp), idx, &ce);
11046 idx++)
11047 WALK_SUBTREE (ce->value);
11048 }
11049 break;
11050
11051 case SAVE_EXPR:
11052 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 0));
11053
11054 case BIND_EXPR:
11055 {
11056 tree decl;
11057 for (decl = BIND_EXPR_VARS (*tp); decl; decl = DECL_CHAIN (decl))
11058 {
11059 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
11060 into declarations that are just mentioned, rather than
11061 declared; they don't really belong to this part of the tree.
11062 And, we can see cycles: the initializer for a declaration
11063 can refer to the declaration itself. */
11064 WALK_SUBTREE (DECL_INITIAL (decl));
11065 WALK_SUBTREE (DECL_SIZE (decl));
11066 WALK_SUBTREE (DECL_SIZE_UNIT (decl));
11067 }
11068 WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp));
11069 }
11070
11071 case STATEMENT_LIST:
11072 {
11073 tree_stmt_iterator i;
11074 for (i = tsi_start (*tp); !tsi_end_p (i); tsi_next (&i))
11075 WALK_SUBTREE (*tsi_stmt_ptr (i));
11076 }
11077 break;
11078
11079 case OMP_CLAUSE:
11080 switch (OMP_CLAUSE_CODE (*tp))
11081 {
11082 case OMP_CLAUSE_GANG:
11083 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
11084 /* FALLTHRU */
11085
11086 case OMP_CLAUSE_ASYNC:
11087 case OMP_CLAUSE_WAIT:
11088 case OMP_CLAUSE_WORKER:
11089 case OMP_CLAUSE_VECTOR:
11090 case OMP_CLAUSE_NUM_GANGS:
11091 case OMP_CLAUSE_NUM_WORKERS:
11092 case OMP_CLAUSE_VECTOR_LENGTH:
11093 case OMP_CLAUSE_PRIVATE:
11094 case OMP_CLAUSE_SHARED:
11095 case OMP_CLAUSE_FIRSTPRIVATE:
11096 case OMP_CLAUSE_COPYIN:
11097 case OMP_CLAUSE_COPYPRIVATE:
11098 case OMP_CLAUSE_FINAL:
11099 case OMP_CLAUSE_IF:
11100 case OMP_CLAUSE_NUM_THREADS:
11101 case OMP_CLAUSE_SCHEDULE:
11102 case OMP_CLAUSE_UNIFORM:
11103 case OMP_CLAUSE_DEPEND:
11104 case OMP_CLAUSE_NONTEMPORAL:
11105 case OMP_CLAUSE_NUM_TEAMS:
11106 case OMP_CLAUSE_THREAD_LIMIT:
11107 case OMP_CLAUSE_DEVICE:
11108 case OMP_CLAUSE_DIST_SCHEDULE:
11109 case OMP_CLAUSE_SAFELEN:
11110 case OMP_CLAUSE_SIMDLEN:
11111 case OMP_CLAUSE_ORDERED:
11112 case OMP_CLAUSE_PRIORITY:
11113 case OMP_CLAUSE_GRAINSIZE:
11114 case OMP_CLAUSE_NUM_TASKS:
11115 case OMP_CLAUSE_HINT:
11116 case OMP_CLAUSE_TO_DECLARE:
11117 case OMP_CLAUSE_LINK:
11118 case OMP_CLAUSE_DETACH:
11119 case OMP_CLAUSE_USE_DEVICE_PTR:
11120 case OMP_CLAUSE_USE_DEVICE_ADDR:
11121 case OMP_CLAUSE_IS_DEVICE_PTR:
11122 case OMP_CLAUSE_INCLUSIVE:
11123 case OMP_CLAUSE_EXCLUSIVE:
11124 case OMP_CLAUSE__LOOPTEMP_:
11125 case OMP_CLAUSE__REDUCTEMP_:
11126 case OMP_CLAUSE__CONDTEMP_:
11127 case OMP_CLAUSE__SCANTEMP_:
11128 case OMP_CLAUSE__SIMDUID_:
11129 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 0));
11130 /* FALLTHRU */
11131
11132 case OMP_CLAUSE_INDEPENDENT:
11133 case OMP_CLAUSE_NOWAIT:
11134 case OMP_CLAUSE_DEFAULT:
11135 case OMP_CLAUSE_UNTIED:
11136 case OMP_CLAUSE_MERGEABLE:
11137 case OMP_CLAUSE_PROC_BIND:
11138 case OMP_CLAUSE_DEVICE_TYPE:
11139 case OMP_CLAUSE_INBRANCH:
11140 case OMP_CLAUSE_NOTINBRANCH:
11141 case OMP_CLAUSE_FOR:
11142 case OMP_CLAUSE_PARALLEL:
11143 case OMP_CLAUSE_SECTIONS:
11144 case OMP_CLAUSE_TASKGROUP:
11145 case OMP_CLAUSE_NOGROUP:
11146 case OMP_CLAUSE_THREADS:
11147 case OMP_CLAUSE_SIMD:
11148 case OMP_CLAUSE_DEFAULTMAP:
11149 case OMP_CLAUSE_ORDER:
11150 case OMP_CLAUSE_BIND:
11151 case OMP_CLAUSE_AUTO:
11152 case OMP_CLAUSE_SEQ:
11153 case OMP_CLAUSE_TILE:
11154 case OMP_CLAUSE__SIMT_:
11155 case OMP_CLAUSE_IF_PRESENT:
11156 case OMP_CLAUSE_FINALIZE:
11157 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11158
11159 case OMP_CLAUSE_LASTPRIVATE:
11160 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11161 WALK_SUBTREE (OMP_CLAUSE_LASTPRIVATE_STMT (*tp));
11162 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11163
11164 case OMP_CLAUSE_COLLAPSE:
11165 {
11166 int i;
11167 for (i = 0; i < 3; i++)
11168 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11169 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11170 }
11171
11172 case OMP_CLAUSE_LINEAR:
11173 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11174 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STEP (*tp));
11175 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STMT (*tp));
11176 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11177
11178 case OMP_CLAUSE_ALIGNED:
11179 case OMP_CLAUSE_ALLOCATE:
11180 case OMP_CLAUSE_FROM:
11181 case OMP_CLAUSE_TO:
11182 case OMP_CLAUSE_MAP:
11183 case OMP_CLAUSE__CACHE_:
11184 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11185 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
11186 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11187
11188 case OMP_CLAUSE_REDUCTION:
11189 case OMP_CLAUSE_TASK_REDUCTION:
11190 case OMP_CLAUSE_IN_REDUCTION:
11191 {
11192 int i;
11193 for (i = 0; i < 5; i++)
11194 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11195 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11196 }
11197
11198 default:
11199 gcc_unreachable ();
11200 }
11201 break;
11202
11203 case TARGET_EXPR:
11204 {
11205 int i, len;
11206
11207 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
11208 But, we only want to walk once. */
11209 len = (TREE_OPERAND (*tp, 3) == TREE_OPERAND (*tp, 1)) ? 2 : 3;
11210 for (i = 0; i < len; ++i)
11211 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11212 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len));
11213 }
11214
11215 case DECL_EXPR:
11216 /* If this is a TYPE_DECL, walk into the fields of the type that it's
11217 defining. We only want to walk into these fields of a type in this
11218 case and not in the general case of a mere reference to the type.
11219
11220 The criterion is as follows: if the field can be an expression, it
11221 must be walked only here. This should be in keeping with the fields
11222 that are directly gimplified in gimplify_type_sizes in order for the
11223 mark/copy-if-shared/unmark machinery of the gimplifier to work with
11224 variable-sized types.
11225
11226 Note that DECLs get walked as part of processing the BIND_EXPR. */
11227 if (TREE_CODE (DECL_EXPR_DECL (*tp)) == TYPE_DECL)
11228 {
11229 /* Call the function for the decl so e.g. copy_tree_body_r can
11230 replace it with the remapped one. */
11231 result = (*func) (&DECL_EXPR_DECL (*tp), &walk_subtrees, data);
11232 if (result || !walk_subtrees)
11233 return result;
11234
11235 tree *type_p = &TREE_TYPE (DECL_EXPR_DECL (*tp));
11236 if (TREE_CODE (*type_p) == ERROR_MARK)
11237 return NULL_TREE;
11238
11239 /* Call the function for the type. See if it returns anything or
11240 doesn't want us to continue. If we are to continue, walk both
11241 the normal fields and those for the declaration case. */
11242 result = (*func) (type_p, &walk_subtrees, data);
11243 if (result || !walk_subtrees)
11244 return result;
11245
11246 /* But do not walk a pointed-to type since it may itself need to
11247 be walked in the declaration case if it isn't anonymous. */
11248 if (!POINTER_TYPE_P (*type_p))
11249 {
11250 result = walk_type_fields (*type_p, func, data, pset, lh);
11251 if (result)
11252 return result;
11253 }
11254
11255 /* If this is a record type, also walk the fields. */
11256 if (RECORD_OR_UNION_TYPE_P (*type_p))
11257 {
11258 tree field;
11259
11260 for (field = TYPE_FIELDS (*type_p); field;
11261 field = DECL_CHAIN (field))
11262 {
11263 /* We'd like to look at the type of the field, but we can
11264 easily get infinite recursion. So assume it's pointed
11265 to elsewhere in the tree. Also, ignore things that
11266 aren't fields. */
11267 if (TREE_CODE (field) != FIELD_DECL)
11268 continue;
11269
11270 WALK_SUBTREE (DECL_FIELD_OFFSET (field));
11271 WALK_SUBTREE (DECL_SIZE (field));
11272 WALK_SUBTREE (DECL_SIZE_UNIT (field));
11273 if (TREE_CODE (*type_p) == QUAL_UNION_TYPE)
11274 WALK_SUBTREE (DECL_QUALIFIER (field));
11275 }
11276 }
11277
11278 /* Same for scalar types. */
11279 else if (TREE_CODE (*type_p) == BOOLEAN_TYPE
11280 || TREE_CODE (*type_p) == ENUMERAL_TYPE
11281 || TREE_CODE (*type_p) == INTEGER_TYPE
11282 || TREE_CODE (*type_p) == FIXED_POINT_TYPE
11283 || TREE_CODE (*type_p) == REAL_TYPE)
11284 {
11285 WALK_SUBTREE (TYPE_MIN_VALUE (*type_p));
11286 WALK_SUBTREE (TYPE_MAX_VALUE (*type_p));
11287 }
11288
11289 WALK_SUBTREE (TYPE_SIZE (*type_p));
11290 WALK_SUBTREE_TAIL (TYPE_SIZE_UNIT (*type_p));
11291 }
11292 /* FALLTHRU */
11293
11294 default:
11295 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
11296 {
11297 int i, len;
11298
11299 /* Walk over all the sub-trees of this operand. */
11300 len = TREE_OPERAND_LENGTH (*tp);
11301
11302 /* Go through the subtrees. We need to do this in forward order so
11303 that the scope of a FOR_EXPR is handled properly. */
11304 if (len)
11305 {
11306 for (i = 0; i < len - 1; ++i)
11307 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11308 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len - 1));
11309 }
11310 }
11311 /* If this is a type, walk the needed fields in the type. */
11312 else if (TYPE_P (*tp))
11313 return walk_type_fields (*tp, func, data, pset, lh);
11314 break;
11315 }
11316
11317 /* We didn't find what we were looking for. */
11318 return NULL_TREE;
11319
11320 #undef WALK_SUBTREE_TAIL
11321 }
11322 #undef WALK_SUBTREE
11323
11324 /* Like walk_tree, but does not walk duplicate nodes more than once. */
11325
11326 tree
11327 walk_tree_without_duplicates_1 (tree *tp, walk_tree_fn func, void *data,
11328 walk_tree_lh lh)
11329 {
11330 tree result;
11331
11332 hash_set<tree> pset;
11333 result = walk_tree_1 (tp, func, data, &pset, lh);
11334 return result;
11335 }
11336
11337
11338 tree
11339 tree_block (tree t)
11340 {
11341 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11342
11343 if (IS_EXPR_CODE_CLASS (c))
11344 return LOCATION_BLOCK (t->exp.locus);
11345 gcc_unreachable ();
11346 return NULL;
11347 }
11348
11349 void
11350 tree_set_block (tree t, tree b)
11351 {
11352 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11353
11354 if (IS_EXPR_CODE_CLASS (c))
11355 {
11356 t->exp.locus = set_block (t->exp.locus, b);
11357 }
11358 else
11359 gcc_unreachable ();
11360 }
11361
11362 /* Create a nameless artificial label and put it in the current
11363 function context. The label has a location of LOC. Returns the
11364 newly created label. */
11365
11366 tree
11367 create_artificial_label (location_t loc)
11368 {
11369 tree lab = build_decl (loc,
11370 LABEL_DECL, NULL_TREE, void_type_node);
11371
11372 DECL_ARTIFICIAL (lab) = 1;
11373 DECL_IGNORED_P (lab) = 1;
11374 DECL_CONTEXT (lab) = current_function_decl;
11375 return lab;
11376 }
11377
11378 /* Given a tree, try to return a useful variable name that we can use
11379 to prefix a temporary that is being assigned the value of the tree.
11380 I.E. given <temp> = &A, return A. */
11381
11382 const char *
11383 get_name (tree t)
11384 {
11385 tree stripped_decl;
11386
11387 stripped_decl = t;
11388 STRIP_NOPS (stripped_decl);
11389 if (DECL_P (stripped_decl) && DECL_NAME (stripped_decl))
11390 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl));
11391 else if (TREE_CODE (stripped_decl) == SSA_NAME)
11392 {
11393 tree name = SSA_NAME_IDENTIFIER (stripped_decl);
11394 if (!name)
11395 return NULL;
11396 return IDENTIFIER_POINTER (name);
11397 }
11398 else
11399 {
11400 switch (TREE_CODE (stripped_decl))
11401 {
11402 case ADDR_EXPR:
11403 return get_name (TREE_OPERAND (stripped_decl, 0));
11404 default:
11405 return NULL;
11406 }
11407 }
11408 }
11409
11410 /* Return true if TYPE has a variable argument list. */
11411
11412 bool
11413 stdarg_p (const_tree fntype)
11414 {
11415 function_args_iterator args_iter;
11416 tree n = NULL_TREE, t;
11417
11418 if (!fntype)
11419 return false;
11420
11421 FOREACH_FUNCTION_ARGS (fntype, t, args_iter)
11422 {
11423 n = t;
11424 }
11425
11426 return n != NULL_TREE && n != void_type_node;
11427 }
11428
11429 /* Return true if TYPE has a prototype. */
11430
11431 bool
11432 prototype_p (const_tree fntype)
11433 {
11434 tree t;
11435
11436 gcc_assert (fntype != NULL_TREE);
11437
11438 t = TYPE_ARG_TYPES (fntype);
11439 return (t != NULL_TREE);
11440 }
11441
11442 /* If BLOCK is inlined from an __attribute__((__artificial__))
11443 routine, return pointer to location from where it has been
11444 called. */
11445 location_t *
11446 block_nonartificial_location (tree block)
11447 {
11448 location_t *ret = NULL;
11449
11450 while (block && TREE_CODE (block) == BLOCK
11451 && BLOCK_ABSTRACT_ORIGIN (block))
11452 {
11453 tree ao = BLOCK_ABSTRACT_ORIGIN (block);
11454 if (TREE_CODE (ao) == FUNCTION_DECL)
11455 {
11456 /* If AO is an artificial inline, point RET to the
11457 call site locus at which it has been inlined and continue
11458 the loop, in case AO's caller is also an artificial
11459 inline. */
11460 if (DECL_DECLARED_INLINE_P (ao)
11461 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao)))
11462 ret = &BLOCK_SOURCE_LOCATION (block);
11463 else
11464 break;
11465 }
11466 else if (TREE_CODE (ao) != BLOCK)
11467 break;
11468
11469 block = BLOCK_SUPERCONTEXT (block);
11470 }
11471 return ret;
11472 }
11473
11474
11475 /* If EXP is inlined from an __attribute__((__artificial__))
11476 function, return the location of the original call expression. */
11477
11478 location_t
11479 tree_nonartificial_location (tree exp)
11480 {
11481 location_t *loc = block_nonartificial_location (TREE_BLOCK (exp));
11482
11483 if (loc)
11484 return *loc;
11485 else
11486 return EXPR_LOCATION (exp);
11487 }
11488
11489 /* Return the location into which EXP has been inlined. Analogous
11490 to tree_nonartificial_location() above but not limited to artificial
11491 functions declared inline. If SYSTEM_HEADER is true, return
11492 the macro expansion point of the location if it's in a system header */
11493
11494 location_t
11495 tree_inlined_location (tree exp, bool system_header /* = true */)
11496 {
11497 location_t loc = UNKNOWN_LOCATION;
11498
11499 tree block = TREE_BLOCK (exp);
11500
11501 while (block && TREE_CODE (block) == BLOCK
11502 && BLOCK_ABSTRACT_ORIGIN (block))
11503 {
11504 tree ao = BLOCK_ABSTRACT_ORIGIN (block);
11505 if (TREE_CODE (ao) == FUNCTION_DECL)
11506 loc = BLOCK_SOURCE_LOCATION (block);
11507 else if (TREE_CODE (ao) != BLOCK)
11508 break;
11509
11510 block = BLOCK_SUPERCONTEXT (block);
11511 }
11512
11513 if (loc == UNKNOWN_LOCATION)
11514 {
11515 loc = EXPR_LOCATION (exp);
11516 if (system_header)
11517 /* Only consider macro expansion when the block traversal failed
11518 to find a location. Otherwise it's not relevant. */
11519 return expansion_point_location_if_in_system_header (loc);
11520 }
11521
11522 return loc;
11523 }
11524
11525 /* These are the hash table functions for the hash table of OPTIMIZATION_NODE
11526 nodes. */
11527
11528 /* Return the hash code X, an OPTIMIZATION_NODE or TARGET_OPTION code. */
11529
11530 hashval_t
11531 cl_option_hasher::hash (tree x)
11532 {
11533 const_tree const t = x;
11534 const char *p;
11535 size_t i;
11536 size_t len = 0;
11537 hashval_t hash = 0;
11538
11539 if (TREE_CODE (t) == OPTIMIZATION_NODE)
11540 {
11541 p = (const char *)TREE_OPTIMIZATION (t);
11542 len = sizeof (struct cl_optimization);
11543 }
11544
11545 else if (TREE_CODE (t) == TARGET_OPTION_NODE)
11546 return cl_target_option_hash (TREE_TARGET_OPTION (t));
11547
11548 else
11549 gcc_unreachable ();
11550
11551 /* assume most opt flags are just 0/1, some are 2-3, and a few might be
11552 something else. */
11553 for (i = 0; i < len; i++)
11554 if (p[i])
11555 hash = (hash << 4) ^ ((i << 2) | p[i]);
11556
11557 return hash;
11558 }
11559
11560 /* Return nonzero if the value represented by *X (an OPTIMIZATION or
11561 TARGET_OPTION tree node) is the same as that given by *Y, which is the
11562 same. */
11563
11564 bool
11565 cl_option_hasher::equal (tree x, tree y)
11566 {
11567 const_tree const xt = x;
11568 const_tree const yt = y;
11569
11570 if (TREE_CODE (xt) != TREE_CODE (yt))
11571 return 0;
11572
11573 if (TREE_CODE (xt) == OPTIMIZATION_NODE)
11574 return cl_optimization_option_eq (TREE_OPTIMIZATION (xt),
11575 TREE_OPTIMIZATION (yt));
11576 else if (TREE_CODE (xt) == TARGET_OPTION_NODE)
11577 return cl_target_option_eq (TREE_TARGET_OPTION (xt),
11578 TREE_TARGET_OPTION (yt));
11579 else
11580 gcc_unreachable ();
11581 }
11582
11583 /* Build an OPTIMIZATION_NODE based on the options in OPTS and OPTS_SET. */
11584
11585 tree
11586 build_optimization_node (struct gcc_options *opts,
11587 struct gcc_options *opts_set)
11588 {
11589 tree t;
11590
11591 /* Use the cache of optimization nodes. */
11592
11593 cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node),
11594 opts, opts_set);
11595
11596 tree *slot = cl_option_hash_table->find_slot (cl_optimization_node, INSERT);
11597 t = *slot;
11598 if (!t)
11599 {
11600 /* Insert this one into the hash table. */
11601 t = cl_optimization_node;
11602 *slot = t;
11603
11604 /* Make a new node for next time round. */
11605 cl_optimization_node = make_node (OPTIMIZATION_NODE);
11606 }
11607
11608 return t;
11609 }
11610
11611 /* Build a TARGET_OPTION_NODE based on the options in OPTS and OPTS_SET. */
11612
11613 tree
11614 build_target_option_node (struct gcc_options *opts,
11615 struct gcc_options *opts_set)
11616 {
11617 tree t;
11618
11619 /* Use the cache of optimization nodes. */
11620
11621 cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node),
11622 opts, opts_set);
11623
11624 tree *slot = cl_option_hash_table->find_slot (cl_target_option_node, INSERT);
11625 t = *slot;
11626 if (!t)
11627 {
11628 /* Insert this one into the hash table. */
11629 t = cl_target_option_node;
11630 *slot = t;
11631
11632 /* Make a new node for next time round. */
11633 cl_target_option_node = make_node (TARGET_OPTION_NODE);
11634 }
11635
11636 return t;
11637 }
11638
11639 /* Clear TREE_TARGET_GLOBALS of all TARGET_OPTION_NODE trees,
11640 so that they aren't saved during PCH writing. */
11641
11642 void
11643 prepare_target_option_nodes_for_pch (void)
11644 {
11645 hash_table<cl_option_hasher>::iterator iter = cl_option_hash_table->begin ();
11646 for (; iter != cl_option_hash_table->end (); ++iter)
11647 if (TREE_CODE (*iter) == TARGET_OPTION_NODE)
11648 TREE_TARGET_GLOBALS (*iter) = NULL;
11649 }
11650
11651 /* Determine the "ultimate origin" of a block. */
11652
11653 tree
11654 block_ultimate_origin (const_tree block)
11655 {
11656 tree origin = BLOCK_ABSTRACT_ORIGIN (block);
11657
11658 if (origin == NULL_TREE)
11659 return NULL_TREE;
11660 else
11661 {
11662 gcc_checking_assert ((DECL_P (origin)
11663 && DECL_ORIGIN (origin) == origin)
11664 || BLOCK_ORIGIN (origin) == origin);
11665 return origin;
11666 }
11667 }
11668
11669 /* Return true iff conversion from INNER_TYPE to OUTER_TYPE generates
11670 no instruction. */
11671
11672 bool
11673 tree_nop_conversion_p (const_tree outer_type, const_tree inner_type)
11674 {
11675 /* Do not strip casts into or out of differing address spaces. */
11676 if (POINTER_TYPE_P (outer_type)
11677 && TYPE_ADDR_SPACE (TREE_TYPE (outer_type)) != ADDR_SPACE_GENERIC)
11678 {
11679 if (!POINTER_TYPE_P (inner_type)
11680 || (TYPE_ADDR_SPACE (TREE_TYPE (outer_type))
11681 != TYPE_ADDR_SPACE (TREE_TYPE (inner_type))))
11682 return false;
11683 }
11684 else if (POINTER_TYPE_P (inner_type)
11685 && TYPE_ADDR_SPACE (TREE_TYPE (inner_type)) != ADDR_SPACE_GENERIC)
11686 {
11687 /* We already know that outer_type is not a pointer with
11688 a non-generic address space. */
11689 return false;
11690 }
11691
11692 /* Use precision rather then machine mode when we can, which gives
11693 the correct answer even for submode (bit-field) types. */
11694 if ((INTEGRAL_TYPE_P (outer_type)
11695 || POINTER_TYPE_P (outer_type)
11696 || TREE_CODE (outer_type) == OFFSET_TYPE)
11697 && (INTEGRAL_TYPE_P (inner_type)
11698 || POINTER_TYPE_P (inner_type)
11699 || TREE_CODE (inner_type) == OFFSET_TYPE))
11700 return TYPE_PRECISION (outer_type) == TYPE_PRECISION (inner_type);
11701
11702 /* Otherwise fall back on comparing machine modes (e.g. for
11703 aggregate types, floats). */
11704 return TYPE_MODE (outer_type) == TYPE_MODE (inner_type);
11705 }
11706
11707 /* Return true iff conversion in EXP generates no instruction. Mark
11708 it inline so that we fully inline into the stripping functions even
11709 though we have two uses of this function. */
11710
11711 static inline bool
11712 tree_nop_conversion (const_tree exp)
11713 {
11714 tree outer_type, inner_type;
11715
11716 if (location_wrapper_p (exp))
11717 return true;
11718 if (!CONVERT_EXPR_P (exp)
11719 && TREE_CODE (exp) != NON_LVALUE_EXPR)
11720 return false;
11721
11722 outer_type = TREE_TYPE (exp);
11723 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
11724 if (!inner_type || inner_type == error_mark_node)
11725 return false;
11726
11727 return tree_nop_conversion_p (outer_type, inner_type);
11728 }
11729
11730 /* Return true iff conversion in EXP generates no instruction. Don't
11731 consider conversions changing the signedness. */
11732
11733 static bool
11734 tree_sign_nop_conversion (const_tree exp)
11735 {
11736 tree outer_type, inner_type;
11737
11738 if (!tree_nop_conversion (exp))
11739 return false;
11740
11741 outer_type = TREE_TYPE (exp);
11742 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
11743
11744 return (TYPE_UNSIGNED (outer_type) == TYPE_UNSIGNED (inner_type)
11745 && POINTER_TYPE_P (outer_type) == POINTER_TYPE_P (inner_type));
11746 }
11747
11748 /* Strip conversions from EXP according to tree_nop_conversion and
11749 return the resulting expression. */
11750
11751 tree
11752 tree_strip_nop_conversions (tree exp)
11753 {
11754 while (tree_nop_conversion (exp))
11755 exp = TREE_OPERAND (exp, 0);
11756 return exp;
11757 }
11758
11759 /* Strip conversions from EXP according to tree_sign_nop_conversion
11760 and return the resulting expression. */
11761
11762 tree
11763 tree_strip_sign_nop_conversions (tree exp)
11764 {
11765 while (tree_sign_nop_conversion (exp))
11766 exp = TREE_OPERAND (exp, 0);
11767 return exp;
11768 }
11769
11770 /* Avoid any floating point extensions from EXP. */
11771 tree
11772 strip_float_extensions (tree exp)
11773 {
11774 tree sub, expt, subt;
11775
11776 /* For floating point constant look up the narrowest type that can hold
11777 it properly and handle it like (type)(narrowest_type)constant.
11778 This way we can optimize for instance a=a*2.0 where "a" is float
11779 but 2.0 is double constant. */
11780 if (TREE_CODE (exp) == REAL_CST && !DECIMAL_FLOAT_TYPE_P (TREE_TYPE (exp)))
11781 {
11782 REAL_VALUE_TYPE orig;
11783 tree type = NULL;
11784
11785 orig = TREE_REAL_CST (exp);
11786 if (TYPE_PRECISION (TREE_TYPE (exp)) > TYPE_PRECISION (float_type_node)
11787 && exact_real_truncate (TYPE_MODE (float_type_node), &orig))
11788 type = float_type_node;
11789 else if (TYPE_PRECISION (TREE_TYPE (exp))
11790 > TYPE_PRECISION (double_type_node)
11791 && exact_real_truncate (TYPE_MODE (double_type_node), &orig))
11792 type = double_type_node;
11793 if (type)
11794 return build_real_truncate (type, orig);
11795 }
11796
11797 if (!CONVERT_EXPR_P (exp))
11798 return exp;
11799
11800 sub = TREE_OPERAND (exp, 0);
11801 subt = TREE_TYPE (sub);
11802 expt = TREE_TYPE (exp);
11803
11804 if (!FLOAT_TYPE_P (subt))
11805 return exp;
11806
11807 if (DECIMAL_FLOAT_TYPE_P (expt) != DECIMAL_FLOAT_TYPE_P (subt))
11808 return exp;
11809
11810 if (TYPE_PRECISION (subt) > TYPE_PRECISION (expt))
11811 return exp;
11812
11813 return strip_float_extensions (sub);
11814 }
11815
11816 /* Strip out all handled components that produce invariant
11817 offsets. */
11818
11819 const_tree
11820 strip_invariant_refs (const_tree op)
11821 {
11822 while (handled_component_p (op))
11823 {
11824 switch (TREE_CODE (op))
11825 {
11826 case ARRAY_REF:
11827 case ARRAY_RANGE_REF:
11828 if (!is_gimple_constant (TREE_OPERAND (op, 1))
11829 || TREE_OPERAND (op, 2) != NULL_TREE
11830 || TREE_OPERAND (op, 3) != NULL_TREE)
11831 return NULL;
11832 break;
11833
11834 case COMPONENT_REF:
11835 if (TREE_OPERAND (op, 2) != NULL_TREE)
11836 return NULL;
11837 break;
11838
11839 default:;
11840 }
11841 op = TREE_OPERAND (op, 0);
11842 }
11843
11844 return op;
11845 }
11846
11847 static GTY(()) tree gcc_eh_personality_decl;
11848
11849 /* Return the GCC personality function decl. */
11850
11851 tree
11852 lhd_gcc_personality (void)
11853 {
11854 if (!gcc_eh_personality_decl)
11855 gcc_eh_personality_decl = build_personality_function ("gcc");
11856 return gcc_eh_personality_decl;
11857 }
11858
11859 /* TARGET is a call target of GIMPLE call statement
11860 (obtained by gimple_call_fn). Return true if it is
11861 OBJ_TYPE_REF representing an virtual call of C++ method.
11862 (As opposed to OBJ_TYPE_REF representing objc calls
11863 through a cast where middle-end devirtualization machinery
11864 can't apply.) FOR_DUMP_P is true when being called from
11865 the dump routines. */
11866
11867 bool
11868 virtual_method_call_p (const_tree target, bool for_dump_p)
11869 {
11870 if (TREE_CODE (target) != OBJ_TYPE_REF)
11871 return false;
11872 tree t = TREE_TYPE (target);
11873 gcc_checking_assert (TREE_CODE (t) == POINTER_TYPE);
11874 t = TREE_TYPE (t);
11875 if (TREE_CODE (t) == FUNCTION_TYPE)
11876 return false;
11877 gcc_checking_assert (TREE_CODE (t) == METHOD_TYPE);
11878 /* If we do not have BINFO associated, it means that type was built
11879 without devirtualization enabled. Do not consider this a virtual
11880 call. */
11881 if (!TYPE_BINFO (obj_type_ref_class (target, for_dump_p)))
11882 return false;
11883 return true;
11884 }
11885
11886 /* Lookup sub-BINFO of BINFO of TYPE at offset POS. */
11887
11888 static tree
11889 lookup_binfo_at_offset (tree binfo, tree type, HOST_WIDE_INT pos)
11890 {
11891 unsigned int i;
11892 tree base_binfo, b;
11893
11894 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
11895 if (pos == tree_to_shwi (BINFO_OFFSET (base_binfo))
11896 && types_same_for_odr (TREE_TYPE (base_binfo), type))
11897 return base_binfo;
11898 else if ((b = lookup_binfo_at_offset (base_binfo, type, pos)) != NULL)
11899 return b;
11900 return NULL;
11901 }
11902
11903 /* Try to find a base info of BINFO that would have its field decl at offset
11904 OFFSET within the BINFO type and which is of EXPECTED_TYPE. If it can be
11905 found, return, otherwise return NULL_TREE. */
11906
11907 tree
11908 get_binfo_at_offset (tree binfo, poly_int64 offset, tree expected_type)
11909 {
11910 tree type = BINFO_TYPE (binfo);
11911
11912 while (true)
11913 {
11914 HOST_WIDE_INT pos, size;
11915 tree fld;
11916 int i;
11917
11918 if (types_same_for_odr (type, expected_type))
11919 return binfo;
11920 if (maybe_lt (offset, 0))
11921 return NULL_TREE;
11922
11923 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
11924 {
11925 if (TREE_CODE (fld) != FIELD_DECL || !DECL_ARTIFICIAL (fld))
11926 continue;
11927
11928 pos = int_bit_position (fld);
11929 size = tree_to_uhwi (DECL_SIZE (fld));
11930 if (known_in_range_p (offset, pos, size))
11931 break;
11932 }
11933 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
11934 return NULL_TREE;
11935
11936 /* Offset 0 indicates the primary base, whose vtable contents are
11937 represented in the binfo for the derived class. */
11938 else if (maybe_ne (offset, 0))
11939 {
11940 tree found_binfo = NULL, base_binfo;
11941 /* Offsets in BINFO are in bytes relative to the whole structure
11942 while POS is in bits relative to the containing field. */
11943 int binfo_offset = (tree_to_shwi (BINFO_OFFSET (binfo)) + pos
11944 / BITS_PER_UNIT);
11945
11946 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
11947 if (tree_to_shwi (BINFO_OFFSET (base_binfo)) == binfo_offset
11948 && types_same_for_odr (TREE_TYPE (base_binfo), TREE_TYPE (fld)))
11949 {
11950 found_binfo = base_binfo;
11951 break;
11952 }
11953 if (found_binfo)
11954 binfo = found_binfo;
11955 else
11956 binfo = lookup_binfo_at_offset (binfo, TREE_TYPE (fld),
11957 binfo_offset);
11958 }
11959
11960 type = TREE_TYPE (fld);
11961 offset -= pos;
11962 }
11963 }
11964
11965 /* Returns true if X is a typedef decl. */
11966
11967 bool
11968 is_typedef_decl (const_tree x)
11969 {
11970 return (x && TREE_CODE (x) == TYPE_DECL
11971 && DECL_ORIGINAL_TYPE (x) != NULL_TREE);
11972 }
11973
11974 /* Returns true iff TYPE is a type variant created for a typedef. */
11975
11976 bool
11977 typedef_variant_p (const_tree type)
11978 {
11979 return is_typedef_decl (TYPE_NAME (type));
11980 }
11981
11982 /* PR 84195: Replace control characters in "unescaped" with their
11983 escaped equivalents. Allow newlines if -fmessage-length has
11984 been set to a non-zero value. This is done here, rather than
11985 where the attribute is recorded as the message length can
11986 change between these two locations. */
11987
11988 void
11989 escaped_string::escape (const char *unescaped)
11990 {
11991 char *escaped;
11992 size_t i, new_i, len;
11993
11994 if (m_owned)
11995 free (m_str);
11996
11997 m_str = const_cast<char *> (unescaped);
11998 m_owned = false;
11999
12000 if (unescaped == NULL || *unescaped == 0)
12001 return;
12002
12003 len = strlen (unescaped);
12004 escaped = NULL;
12005 new_i = 0;
12006
12007 for (i = 0; i < len; i++)
12008 {
12009 char c = unescaped[i];
12010
12011 if (!ISCNTRL (c))
12012 {
12013 if (escaped)
12014 escaped[new_i++] = c;
12015 continue;
12016 }
12017
12018 if (c != '\n' || !pp_is_wrapping_line (global_dc->printer))
12019 {
12020 if (escaped == NULL)
12021 {
12022 /* We only allocate space for a new string if we
12023 actually encounter a control character that
12024 needs replacing. */
12025 escaped = (char *) xmalloc (len * 2 + 1);
12026 strncpy (escaped, unescaped, i);
12027 new_i = i;
12028 }
12029
12030 escaped[new_i++] = '\\';
12031
12032 switch (c)
12033 {
12034 case '\a': escaped[new_i++] = 'a'; break;
12035 case '\b': escaped[new_i++] = 'b'; break;
12036 case '\f': escaped[new_i++] = 'f'; break;
12037 case '\n': escaped[new_i++] = 'n'; break;
12038 case '\r': escaped[new_i++] = 'r'; break;
12039 case '\t': escaped[new_i++] = 't'; break;
12040 case '\v': escaped[new_i++] = 'v'; break;
12041 default: escaped[new_i++] = '?'; break;
12042 }
12043 }
12044 else if (escaped)
12045 escaped[new_i++] = c;
12046 }
12047
12048 if (escaped)
12049 {
12050 escaped[new_i] = 0;
12051 m_str = escaped;
12052 m_owned = true;
12053 }
12054 }
12055
12056 /* Warn about a use of an identifier which was marked deprecated. Returns
12057 whether a warning was given. */
12058
12059 bool
12060 warn_deprecated_use (tree node, tree attr)
12061 {
12062 escaped_string msg;
12063
12064 if (node == 0 || !warn_deprecated_decl)
12065 return false;
12066
12067 if (!attr)
12068 {
12069 if (DECL_P (node))
12070 attr = DECL_ATTRIBUTES (node);
12071 else if (TYPE_P (node))
12072 {
12073 tree decl = TYPE_STUB_DECL (node);
12074 if (decl)
12075 attr = lookup_attribute ("deprecated",
12076 TYPE_ATTRIBUTES (TREE_TYPE (decl)));
12077 }
12078 }
12079
12080 if (attr)
12081 attr = lookup_attribute ("deprecated", attr);
12082
12083 if (attr)
12084 msg.escape (TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
12085
12086 bool w = false;
12087 if (DECL_P (node))
12088 {
12089 auto_diagnostic_group d;
12090 if (msg)
12091 w = warning (OPT_Wdeprecated_declarations,
12092 "%qD is deprecated: %s", node, (const char *) msg);
12093 else
12094 w = warning (OPT_Wdeprecated_declarations,
12095 "%qD is deprecated", node);
12096 if (w)
12097 inform (DECL_SOURCE_LOCATION (node), "declared here");
12098 }
12099 else if (TYPE_P (node))
12100 {
12101 tree what = NULL_TREE;
12102 tree decl = TYPE_STUB_DECL (node);
12103
12104 if (TYPE_NAME (node))
12105 {
12106 if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE)
12107 what = TYPE_NAME (node);
12108 else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL
12109 && DECL_NAME (TYPE_NAME (node)))
12110 what = DECL_NAME (TYPE_NAME (node));
12111 }
12112
12113 auto_diagnostic_group d;
12114 if (what)
12115 {
12116 if (msg)
12117 w = warning (OPT_Wdeprecated_declarations,
12118 "%qE is deprecated: %s", what, (const char *) msg);
12119 else
12120 w = warning (OPT_Wdeprecated_declarations,
12121 "%qE is deprecated", what);
12122 }
12123 else
12124 {
12125 if (msg)
12126 w = warning (OPT_Wdeprecated_declarations,
12127 "type is deprecated: %s", (const char *) msg);
12128 else
12129 w = warning (OPT_Wdeprecated_declarations,
12130 "type is deprecated");
12131 }
12132
12133 if (w && decl)
12134 inform (DECL_SOURCE_LOCATION (decl), "declared here");
12135 }
12136
12137 return w;
12138 }
12139
12140 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
12141 somewhere in it. */
12142
12143 bool
12144 contains_bitfld_component_ref_p (const_tree ref)
12145 {
12146 while (handled_component_p (ref))
12147 {
12148 if (TREE_CODE (ref) == COMPONENT_REF
12149 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1)))
12150 return true;
12151 ref = TREE_OPERAND (ref, 0);
12152 }
12153
12154 return false;
12155 }
12156
12157 /* Try to determine whether a TRY_CATCH expression can fall through.
12158 This is a subroutine of block_may_fallthru. */
12159
12160 static bool
12161 try_catch_may_fallthru (const_tree stmt)
12162 {
12163 tree_stmt_iterator i;
12164
12165 /* If the TRY block can fall through, the whole TRY_CATCH can
12166 fall through. */
12167 if (block_may_fallthru (TREE_OPERAND (stmt, 0)))
12168 return true;
12169
12170 i = tsi_start (TREE_OPERAND (stmt, 1));
12171 switch (TREE_CODE (tsi_stmt (i)))
12172 {
12173 case CATCH_EXPR:
12174 /* We expect to see a sequence of CATCH_EXPR trees, each with a
12175 catch expression and a body. The whole TRY_CATCH may fall
12176 through iff any of the catch bodies falls through. */
12177 for (; !tsi_end_p (i); tsi_next (&i))
12178 {
12179 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i))))
12180 return true;
12181 }
12182 return false;
12183
12184 case EH_FILTER_EXPR:
12185 /* The exception filter expression only matters if there is an
12186 exception. If the exception does not match EH_FILTER_TYPES,
12187 we will execute EH_FILTER_FAILURE, and we will fall through
12188 if that falls through. If the exception does match
12189 EH_FILTER_TYPES, the stack unwinder will continue up the
12190 stack, so we will not fall through. We don't know whether we
12191 will throw an exception which matches EH_FILTER_TYPES or not,
12192 so we just ignore EH_FILTER_TYPES and assume that we might
12193 throw an exception which doesn't match. */
12194 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i)));
12195
12196 default:
12197 /* This case represents statements to be executed when an
12198 exception occurs. Those statements are implicitly followed
12199 by a RESX statement to resume execution after the exception.
12200 So in this case the TRY_CATCH never falls through. */
12201 return false;
12202 }
12203 }
12204
12205 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
12206 need not be 100% accurate; simply be conservative and return true if we
12207 don't know. This is used only to avoid stupidly generating extra code.
12208 If we're wrong, we'll just delete the extra code later. */
12209
12210 bool
12211 block_may_fallthru (const_tree block)
12212 {
12213 /* This CONST_CAST is okay because expr_last returns its argument
12214 unmodified and we assign it to a const_tree. */
12215 const_tree stmt = expr_last (CONST_CAST_TREE (block));
12216
12217 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
12218 {
12219 case GOTO_EXPR:
12220 case RETURN_EXPR:
12221 /* Easy cases. If the last statement of the block implies
12222 control transfer, then we can't fall through. */
12223 return false;
12224
12225 case SWITCH_EXPR:
12226 /* If there is a default: label or case labels cover all possible
12227 SWITCH_COND values, then the SWITCH_EXPR will transfer control
12228 to some case label in all cases and all we care is whether the
12229 SWITCH_BODY falls through. */
12230 if (SWITCH_ALL_CASES_P (stmt))
12231 return block_may_fallthru (SWITCH_BODY (stmt));
12232 return true;
12233
12234 case COND_EXPR:
12235 if (block_may_fallthru (COND_EXPR_THEN (stmt)))
12236 return true;
12237 return block_may_fallthru (COND_EXPR_ELSE (stmt));
12238
12239 case BIND_EXPR:
12240 return block_may_fallthru (BIND_EXPR_BODY (stmt));
12241
12242 case TRY_CATCH_EXPR:
12243 return try_catch_may_fallthru (stmt);
12244
12245 case TRY_FINALLY_EXPR:
12246 /* The finally clause is always executed after the try clause,
12247 so if it does not fall through, then the try-finally will not
12248 fall through. Otherwise, if the try clause does not fall
12249 through, then when the finally clause falls through it will
12250 resume execution wherever the try clause was going. So the
12251 whole try-finally will only fall through if both the try
12252 clause and the finally clause fall through. */
12253 return (block_may_fallthru (TREE_OPERAND (stmt, 0))
12254 && block_may_fallthru (TREE_OPERAND (stmt, 1)));
12255
12256 case EH_ELSE_EXPR:
12257 return block_may_fallthru (TREE_OPERAND (stmt, 0));
12258
12259 case MODIFY_EXPR:
12260 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
12261 stmt = TREE_OPERAND (stmt, 1);
12262 else
12263 return true;
12264 /* FALLTHRU */
12265
12266 case CALL_EXPR:
12267 /* Functions that do not return do not fall through. */
12268 return (call_expr_flags (stmt) & ECF_NORETURN) == 0;
12269
12270 case CLEANUP_POINT_EXPR:
12271 return block_may_fallthru (TREE_OPERAND (stmt, 0));
12272
12273 case TARGET_EXPR:
12274 return block_may_fallthru (TREE_OPERAND (stmt, 1));
12275
12276 case ERROR_MARK:
12277 return true;
12278
12279 default:
12280 return lang_hooks.block_may_fallthru (stmt);
12281 }
12282 }
12283
12284 /* True if we are using EH to handle cleanups. */
12285 static bool using_eh_for_cleanups_flag = false;
12286
12287 /* This routine is called from front ends to indicate eh should be used for
12288 cleanups. */
12289 void
12290 using_eh_for_cleanups (void)
12291 {
12292 using_eh_for_cleanups_flag = true;
12293 }
12294
12295 /* Query whether EH is used for cleanups. */
12296 bool
12297 using_eh_for_cleanups_p (void)
12298 {
12299 return using_eh_for_cleanups_flag;
12300 }
12301
12302 /* Wrapper for tree_code_name to ensure that tree code is valid */
12303 const char *
12304 get_tree_code_name (enum tree_code code)
12305 {
12306 const char *invalid = "<invalid tree code>";
12307
12308 /* The tree_code enum promotes to signed, but we could be getting
12309 invalid values, so force an unsigned comparison. */
12310 if (unsigned (code) >= MAX_TREE_CODES)
12311 {
12312 if ((unsigned)code == 0xa5a5)
12313 return "ggc_freed";
12314 return invalid;
12315 }
12316
12317 return tree_code_name[code];
12318 }
12319
12320 /* Drops the TREE_OVERFLOW flag from T. */
12321
12322 tree
12323 drop_tree_overflow (tree t)
12324 {
12325 gcc_checking_assert (TREE_OVERFLOW (t));
12326
12327 /* For tree codes with a sharing machinery re-build the result. */
12328 if (poly_int_tree_p (t))
12329 return wide_int_to_tree (TREE_TYPE (t), wi::to_poly_wide (t));
12330
12331 /* For VECTOR_CST, remove the overflow bits from the encoded elements
12332 and canonicalize the result. */
12333 if (TREE_CODE (t) == VECTOR_CST)
12334 {
12335 tree_vector_builder builder;
12336 builder.new_unary_operation (TREE_TYPE (t), t, true);
12337 unsigned int count = builder.encoded_nelts ();
12338 for (unsigned int i = 0; i < count; ++i)
12339 {
12340 tree elt = VECTOR_CST_ELT (t, i);
12341 if (TREE_OVERFLOW (elt))
12342 elt = drop_tree_overflow (elt);
12343 builder.quick_push (elt);
12344 }
12345 return builder.build ();
12346 }
12347
12348 /* Otherwise, as all tcc_constants are possibly shared, copy the node
12349 and drop the flag. */
12350 t = copy_node (t);
12351 TREE_OVERFLOW (t) = 0;
12352
12353 /* For constants that contain nested constants, drop the flag
12354 from those as well. */
12355 if (TREE_CODE (t) == COMPLEX_CST)
12356 {
12357 if (TREE_OVERFLOW (TREE_REALPART (t)))
12358 TREE_REALPART (t) = drop_tree_overflow (TREE_REALPART (t));
12359 if (TREE_OVERFLOW (TREE_IMAGPART (t)))
12360 TREE_IMAGPART (t) = drop_tree_overflow (TREE_IMAGPART (t));
12361 }
12362
12363 return t;
12364 }
12365
12366 /* Given a memory reference expression T, return its base address.
12367 The base address of a memory reference expression is the main
12368 object being referenced. For instance, the base address for
12369 'array[i].fld[j]' is 'array'. You can think of this as stripping
12370 away the offset part from a memory address.
12371
12372 This function calls handled_component_p to strip away all the inner
12373 parts of the memory reference until it reaches the base object. */
12374
12375 tree
12376 get_base_address (tree t)
12377 {
12378 while (handled_component_p (t))
12379 t = TREE_OPERAND (t, 0);
12380
12381 if ((TREE_CODE (t) == MEM_REF
12382 || TREE_CODE (t) == TARGET_MEM_REF)
12383 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
12384 t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
12385
12386 /* ??? Either the alias oracle or all callers need to properly deal
12387 with WITH_SIZE_EXPRs before we can look through those. */
12388 if (TREE_CODE (t) == WITH_SIZE_EXPR)
12389 return NULL_TREE;
12390
12391 return t;
12392 }
12393
12394 /* Return a tree of sizetype representing the size, in bytes, of the element
12395 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12396
12397 tree
12398 array_ref_element_size (tree exp)
12399 {
12400 tree aligned_size = TREE_OPERAND (exp, 3);
12401 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
12402 location_t loc = EXPR_LOCATION (exp);
12403
12404 /* If a size was specified in the ARRAY_REF, it's the size measured
12405 in alignment units of the element type. So multiply by that value. */
12406 if (aligned_size)
12407 {
12408 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
12409 sizetype from another type of the same width and signedness. */
12410 if (TREE_TYPE (aligned_size) != sizetype)
12411 aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
12412 return size_binop_loc (loc, MULT_EXPR, aligned_size,
12413 size_int (TYPE_ALIGN_UNIT (elmt_type)));
12414 }
12415
12416 /* Otherwise, take the size from that of the element type. Substitute
12417 any PLACEHOLDER_EXPR that we have. */
12418 else
12419 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
12420 }
12421
12422 /* Return a tree representing the lower bound of the array mentioned in
12423 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12424
12425 tree
12426 array_ref_low_bound (tree exp)
12427 {
12428 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
12429
12430 /* If a lower bound is specified in EXP, use it. */
12431 if (TREE_OPERAND (exp, 2))
12432 return TREE_OPERAND (exp, 2);
12433
12434 /* Otherwise, if there is a domain type and it has a lower bound, use it,
12435 substituting for a PLACEHOLDER_EXPR as needed. */
12436 if (domain_type && TYPE_MIN_VALUE (domain_type))
12437 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
12438
12439 /* Otherwise, return a zero of the appropriate type. */
12440 tree idxtype = TREE_TYPE (TREE_OPERAND (exp, 1));
12441 return (idxtype == error_mark_node
12442 ? integer_zero_node : build_int_cst (idxtype, 0));
12443 }
12444
12445 /* Return a tree representing the upper bound of the array mentioned in
12446 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12447
12448 tree
12449 array_ref_up_bound (tree exp)
12450 {
12451 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
12452
12453 /* If there is a domain type and it has an upper bound, use it, substituting
12454 for a PLACEHOLDER_EXPR as needed. */
12455 if (domain_type && TYPE_MAX_VALUE (domain_type))
12456 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
12457
12458 /* Otherwise fail. */
12459 return NULL_TREE;
12460 }
12461
12462 /* Returns true if REF is an array reference, component reference,
12463 or memory reference to an array at the end of a structure.
12464 If this is the case, the array may be allocated larger
12465 than its upper bound implies. */
12466
12467 bool
12468 array_at_struct_end_p (tree ref)
12469 {
12470 tree atype;
12471
12472 if (TREE_CODE (ref) == ARRAY_REF
12473 || TREE_CODE (ref) == ARRAY_RANGE_REF)
12474 {
12475 atype = TREE_TYPE (TREE_OPERAND (ref, 0));
12476 ref = TREE_OPERAND (ref, 0);
12477 }
12478 else if (TREE_CODE (ref) == COMPONENT_REF
12479 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 1))) == ARRAY_TYPE)
12480 atype = TREE_TYPE (TREE_OPERAND (ref, 1));
12481 else if (TREE_CODE (ref) == MEM_REF)
12482 {
12483 tree arg = TREE_OPERAND (ref, 0);
12484 if (TREE_CODE (arg) == ADDR_EXPR)
12485 arg = TREE_OPERAND (arg, 0);
12486 tree argtype = TREE_TYPE (arg);
12487 if (TREE_CODE (argtype) == RECORD_TYPE)
12488 {
12489 if (tree fld = last_field (argtype))
12490 {
12491 atype = TREE_TYPE (fld);
12492 if (TREE_CODE (atype) != ARRAY_TYPE)
12493 return false;
12494 if (VAR_P (arg) && DECL_SIZE (fld))
12495 return false;
12496 }
12497 else
12498 return false;
12499 }
12500 else
12501 return false;
12502 }
12503 else
12504 return false;
12505
12506 if (TREE_CODE (ref) == STRING_CST)
12507 return false;
12508
12509 tree ref_to_array = ref;
12510 while (handled_component_p (ref))
12511 {
12512 /* If the reference chain contains a component reference to a
12513 non-union type and there follows another field the reference
12514 is not at the end of a structure. */
12515 if (TREE_CODE (ref) == COMPONENT_REF)
12516 {
12517 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 0))) == RECORD_TYPE)
12518 {
12519 tree nextf = DECL_CHAIN (TREE_OPERAND (ref, 1));
12520 while (nextf && TREE_CODE (nextf) != FIELD_DECL)
12521 nextf = DECL_CHAIN (nextf);
12522 if (nextf)
12523 return false;
12524 }
12525 }
12526 /* If we have a multi-dimensional array we do not consider
12527 a non-innermost dimension as flex array if the whole
12528 multi-dimensional array is at struct end.
12529 Same for an array of aggregates with a trailing array
12530 member. */
12531 else if (TREE_CODE (ref) == ARRAY_REF)
12532 return false;
12533 else if (TREE_CODE (ref) == ARRAY_RANGE_REF)
12534 ;
12535 /* If we view an underlying object as sth else then what we
12536 gathered up to now is what we have to rely on. */
12537 else if (TREE_CODE (ref) == VIEW_CONVERT_EXPR)
12538 break;
12539 else
12540 gcc_unreachable ();
12541
12542 ref = TREE_OPERAND (ref, 0);
12543 }
12544
12545 /* The array now is at struct end. Treat flexible arrays as
12546 always subject to extend, even into just padding constrained by
12547 an underlying decl. */
12548 if (! TYPE_SIZE (atype)
12549 || ! TYPE_DOMAIN (atype)
12550 || ! TYPE_MAX_VALUE (TYPE_DOMAIN (atype)))
12551 return true;
12552
12553 if (TREE_CODE (ref) == MEM_REF
12554 && TREE_CODE (TREE_OPERAND (ref, 0)) == ADDR_EXPR)
12555 ref = TREE_OPERAND (TREE_OPERAND (ref, 0), 0);
12556
12557 /* If the reference is based on a declared entity, the size of the array
12558 is constrained by its given domain. (Do not trust commons PR/69368). */
12559 if (DECL_P (ref)
12560 && !(flag_unconstrained_commons
12561 && VAR_P (ref) && DECL_COMMON (ref))
12562 && DECL_SIZE_UNIT (ref)
12563 && TREE_CODE (DECL_SIZE_UNIT (ref)) == INTEGER_CST)
12564 {
12565 /* Check whether the array domain covers all of the available
12566 padding. */
12567 poly_int64 offset;
12568 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (atype))) != INTEGER_CST
12569 || TREE_CODE (TYPE_MAX_VALUE (TYPE_DOMAIN (atype))) != INTEGER_CST
12570 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (atype))) != INTEGER_CST)
12571 return true;
12572 if (! get_addr_base_and_unit_offset (ref_to_array, &offset))
12573 return true;
12574
12575 /* If at least one extra element fits it is a flexarray. */
12576 if (known_le ((wi::to_offset (TYPE_MAX_VALUE (TYPE_DOMAIN (atype)))
12577 - wi::to_offset (TYPE_MIN_VALUE (TYPE_DOMAIN (atype)))
12578 + 2)
12579 * wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (atype))),
12580 wi::to_offset (DECL_SIZE_UNIT (ref)) - offset))
12581 return true;
12582
12583 return false;
12584 }
12585
12586 return true;
12587 }
12588
12589 /* Return a tree representing the offset, in bytes, of the field referenced
12590 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
12591
12592 tree
12593 component_ref_field_offset (tree exp)
12594 {
12595 tree aligned_offset = TREE_OPERAND (exp, 2);
12596 tree field = TREE_OPERAND (exp, 1);
12597 location_t loc = EXPR_LOCATION (exp);
12598
12599 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
12600 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
12601 value. */
12602 if (aligned_offset)
12603 {
12604 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
12605 sizetype from another type of the same width and signedness. */
12606 if (TREE_TYPE (aligned_offset) != sizetype)
12607 aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
12608 return size_binop_loc (loc, MULT_EXPR, aligned_offset,
12609 size_int (DECL_OFFSET_ALIGN (field)
12610 / BITS_PER_UNIT));
12611 }
12612
12613 /* Otherwise, take the offset from that of the field. Substitute
12614 any PLACEHOLDER_EXPR that we have. */
12615 else
12616 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
12617 }
12618
12619 /* Given the initializer INIT, return the initializer for the field
12620 DECL if it exists, otherwise null. Used to obtain the initializer
12621 for a flexible array member and determine its size. */
12622
12623 static tree
12624 get_initializer_for (tree init, tree decl)
12625 {
12626 STRIP_NOPS (init);
12627
12628 tree fld, fld_init;
12629 unsigned HOST_WIDE_INT i;
12630 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), i, fld, fld_init)
12631 {
12632 if (decl == fld)
12633 return fld_init;
12634
12635 if (TREE_CODE (fld) == CONSTRUCTOR)
12636 {
12637 fld_init = get_initializer_for (fld_init, decl);
12638 if (fld_init)
12639 return fld_init;
12640 }
12641 }
12642
12643 return NULL_TREE;
12644 }
12645
12646 /* Determines the size of the member referenced by the COMPONENT_REF
12647 REF, using its initializer expression if necessary in order to
12648 determine the size of an initialized flexible array member.
12649 If non-null, set *ARK when REF refers to an interior zero-length
12650 array or a trailing one-element array.
12651 Returns the size as sizetype (which might be zero for an object
12652 with an uninitialized flexible array member) or null if the size
12653 cannot be determined. */
12654
12655 tree
12656 component_ref_size (tree ref, special_array_member *sam /* = NULL */)
12657 {
12658 gcc_assert (TREE_CODE (ref) == COMPONENT_REF);
12659
12660 special_array_member sambuf;
12661 if (!sam)
12662 sam = &sambuf;
12663 *sam = special_array_member::none;
12664
12665 /* The object/argument referenced by the COMPONENT_REF and its type. */
12666 tree arg = TREE_OPERAND (ref, 0);
12667 tree argtype = TREE_TYPE (arg);
12668 /* The referenced member. */
12669 tree member = TREE_OPERAND (ref, 1);
12670
12671 tree memsize = DECL_SIZE_UNIT (member);
12672 if (memsize)
12673 {
12674 tree memtype = TREE_TYPE (member);
12675 if (TREE_CODE (memtype) != ARRAY_TYPE)
12676 /* DECL_SIZE may be less than TYPE_SIZE in C++ when referring
12677 to the type of a class with a virtual base which doesn't
12678 reflect the size of the virtual's members (see pr97595).
12679 If that's the case fail for now and implement something
12680 more robust in the future. */
12681 return (tree_int_cst_equal (memsize, TYPE_SIZE_UNIT (memtype))
12682 ? memsize : NULL_TREE);
12683
12684 bool trailing = array_at_struct_end_p (ref);
12685 bool zero_length = integer_zerop (memsize);
12686 if (!trailing && !zero_length)
12687 /* MEMBER is either an interior array or is an array with
12688 more than one element. */
12689 return memsize;
12690
12691 if (zero_length)
12692 {
12693 if (trailing)
12694 *sam = special_array_member::trail_0;
12695 else
12696 {
12697 *sam = special_array_member::int_0;
12698 memsize = NULL_TREE;
12699 }
12700 }
12701
12702 if (!zero_length)
12703 if (tree dom = TYPE_DOMAIN (memtype))
12704 if (tree min = TYPE_MIN_VALUE (dom))
12705 if (tree max = TYPE_MAX_VALUE (dom))
12706 if (TREE_CODE (min) == INTEGER_CST
12707 && TREE_CODE (max) == INTEGER_CST)
12708 {
12709 offset_int minidx = wi::to_offset (min);
12710 offset_int maxidx = wi::to_offset (max);
12711 offset_int neltsm1 = maxidx - minidx;
12712 if (neltsm1 > 0)
12713 /* MEMBER is an array with more than one element. */
12714 return memsize;
12715
12716 if (neltsm1 == 0)
12717 *sam = special_array_member::trail_1;
12718 }
12719
12720 /* For a reference to a zero- or one-element array member of a union
12721 use the size of the union instead of the size of the member. */
12722 if (TREE_CODE (argtype) == UNION_TYPE)
12723 memsize = TYPE_SIZE_UNIT (argtype);
12724 }
12725
12726 /* MEMBER is either a bona fide flexible array member, or a zero-length
12727 array member, or an array of length one treated as such. */
12728
12729 /* If the reference is to a declared object and the member a true
12730 flexible array, try to determine its size from its initializer. */
12731 poly_int64 baseoff = 0;
12732 tree base = get_addr_base_and_unit_offset (ref, &baseoff);
12733 if (!base || !VAR_P (base))
12734 {
12735 if (*sam != special_array_member::int_0)
12736 return NULL_TREE;
12737
12738 if (TREE_CODE (arg) != COMPONENT_REF)
12739 return NULL_TREE;
12740
12741 base = arg;
12742 while (TREE_CODE (base) == COMPONENT_REF)
12743 base = TREE_OPERAND (base, 0);
12744 baseoff = tree_to_poly_int64 (byte_position (TREE_OPERAND (ref, 1)));
12745 }
12746
12747 /* BASE is the declared object of which MEMBER is either a member
12748 or that is cast to ARGTYPE (e.g., a char buffer used to store
12749 an ARGTYPE object). */
12750 tree basetype = TREE_TYPE (base);
12751
12752 /* Determine the base type of the referenced object. If it's
12753 the same as ARGTYPE and MEMBER has a known size, return it. */
12754 tree bt = basetype;
12755 if (*sam != special_array_member::int_0)
12756 while (TREE_CODE (bt) == ARRAY_TYPE)
12757 bt = TREE_TYPE (bt);
12758 bool typematch = useless_type_conversion_p (argtype, bt);
12759 if (memsize && typematch)
12760 return memsize;
12761
12762 memsize = NULL_TREE;
12763
12764 if (typematch)
12765 /* MEMBER is a true flexible array member. Compute its size from
12766 the initializer of the BASE object if it has one. */
12767 if (tree init = DECL_P (base) ? DECL_INITIAL (base) : NULL_TREE)
12768 if (init != error_mark_node)
12769 {
12770 init = get_initializer_for (init, member);
12771 if (init)
12772 {
12773 memsize = TYPE_SIZE_UNIT (TREE_TYPE (init));
12774 if (tree refsize = TYPE_SIZE_UNIT (argtype))
12775 {
12776 /* Use the larger of the initializer size and the tail
12777 padding in the enclosing struct. */
12778 poly_int64 rsz = tree_to_poly_int64 (refsize);
12779 rsz -= baseoff;
12780 if (known_lt (tree_to_poly_int64 (memsize), rsz))
12781 memsize = wide_int_to_tree (TREE_TYPE (memsize), rsz);
12782 }
12783
12784 baseoff = 0;
12785 }
12786 }
12787
12788 if (!memsize)
12789 {
12790 if (typematch)
12791 {
12792 if (DECL_P (base)
12793 && DECL_EXTERNAL (base)
12794 && bt == basetype
12795 && *sam != special_array_member::int_0)
12796 /* The size of a flexible array member of an extern struct
12797 with no initializer cannot be determined (it's defined
12798 in another translation unit and can have an initializer
12799 with an arbitrary number of elements). */
12800 return NULL_TREE;
12801
12802 /* Use the size of the base struct or, for interior zero-length
12803 arrays, the size of the enclosing type. */
12804 memsize = TYPE_SIZE_UNIT (bt);
12805 }
12806 else if (DECL_P (base))
12807 /* Use the size of the BASE object (possibly an array of some
12808 other type such as char used to store the struct). */
12809 memsize = DECL_SIZE_UNIT (base);
12810 else
12811 return NULL_TREE;
12812 }
12813
12814 /* If the flexible array member has a known size use the greater
12815 of it and the tail padding in the enclosing struct.
12816 Otherwise, when the size of the flexible array member is unknown
12817 and the referenced object is not a struct, use the size of its
12818 type when known. This detects sizes of array buffers when cast
12819 to struct types with flexible array members. */
12820 if (memsize)
12821 {
12822 poly_int64 memsz64 = memsize ? tree_to_poly_int64 (memsize) : 0;
12823 if (known_lt (baseoff, memsz64))
12824 {
12825 memsz64 -= baseoff;
12826 return wide_int_to_tree (TREE_TYPE (memsize), memsz64);
12827 }
12828 return size_zero_node;
12829 }
12830
12831 /* Return "don't know" for an external non-array object since its
12832 flexible array member can be initialized to have any number of
12833 elements. Otherwise, return zero because the flexible array
12834 member has no elements. */
12835 return (DECL_P (base)
12836 && DECL_EXTERNAL (base)
12837 && (!typematch
12838 || TREE_CODE (basetype) != ARRAY_TYPE)
12839 ? NULL_TREE : size_zero_node);
12840 }
12841
12842 /* Return the machine mode of T. For vectors, returns the mode of the
12843 inner type. The main use case is to feed the result to HONOR_NANS,
12844 avoiding the BLKmode that a direct TYPE_MODE (T) might return. */
12845
12846 machine_mode
12847 element_mode (const_tree t)
12848 {
12849 if (!TYPE_P (t))
12850 t = TREE_TYPE (t);
12851 if (VECTOR_TYPE_P (t) || TREE_CODE (t) == COMPLEX_TYPE)
12852 t = TREE_TYPE (t);
12853 return TYPE_MODE (t);
12854 }
12855
12856 /* Vector types need to re-check the target flags each time we report
12857 the machine mode. We need to do this because attribute target can
12858 change the result of vector_mode_supported_p and have_regs_of_mode
12859 on a per-function basis. Thus the TYPE_MODE of a VECTOR_TYPE can
12860 change on a per-function basis. */
12861 /* ??? Possibly a better solution is to run through all the types
12862 referenced by a function and re-compute the TYPE_MODE once, rather
12863 than make the TYPE_MODE macro call a function. */
12864
12865 machine_mode
12866 vector_type_mode (const_tree t)
12867 {
12868 machine_mode mode;
12869
12870 gcc_assert (TREE_CODE (t) == VECTOR_TYPE);
12871
12872 mode = t->type_common.mode;
12873 if (VECTOR_MODE_P (mode)
12874 && (!targetm.vector_mode_supported_p (mode)
12875 || !have_regs_of_mode[mode]))
12876 {
12877 scalar_int_mode innermode;
12878
12879 /* For integers, try mapping it to a same-sized scalar mode. */
12880 if (is_int_mode (TREE_TYPE (t)->type_common.mode, &innermode))
12881 {
12882 poly_int64 size = (TYPE_VECTOR_SUBPARTS (t)
12883 * GET_MODE_BITSIZE (innermode));
12884 scalar_int_mode mode;
12885 if (int_mode_for_size (size, 0).exists (&mode)
12886 && have_regs_of_mode[mode])
12887 return mode;
12888 }
12889
12890 return BLKmode;
12891 }
12892
12893 return mode;
12894 }
12895
12896 /* Return the size in bits of each element of vector type TYPE. */
12897
12898 unsigned int
12899 vector_element_bits (const_tree type)
12900 {
12901 gcc_checking_assert (VECTOR_TYPE_P (type));
12902 if (VECTOR_BOOLEAN_TYPE_P (type))
12903 return TYPE_PRECISION (TREE_TYPE (type));
12904 return tree_to_uhwi (TYPE_SIZE (TREE_TYPE (type)));
12905 }
12906
12907 /* Calculate the size in bits of each element of vector type TYPE
12908 and return the result as a tree of type bitsizetype. */
12909
12910 tree
12911 vector_element_bits_tree (const_tree type)
12912 {
12913 gcc_checking_assert (VECTOR_TYPE_P (type));
12914 if (VECTOR_BOOLEAN_TYPE_P (type))
12915 return bitsize_int (vector_element_bits (type));
12916 return TYPE_SIZE (TREE_TYPE (type));
12917 }
12918
12919 /* Verify that basic properties of T match TV and thus T can be a variant of
12920 TV. TV should be the more specified variant (i.e. the main variant). */
12921
12922 static bool
12923 verify_type_variant (const_tree t, tree tv)
12924 {
12925 /* Type variant can differ by:
12926
12927 - TYPE_QUALS: TYPE_READONLY, TYPE_VOLATILE, TYPE_ATOMIC, TYPE_RESTRICT,
12928 ENCODE_QUAL_ADDR_SPACE.
12929 - main variant may be TYPE_COMPLETE_P and variant types !TYPE_COMPLETE_P
12930 in this case some values may not be set in the variant types
12931 (see TYPE_COMPLETE_P checks).
12932 - it is possible to have TYPE_ARTIFICIAL variant of non-artifical type
12933 - by TYPE_NAME and attributes (i.e. when variant originate by typedef)
12934 - TYPE_CANONICAL (TYPE_ALIAS_SET is the same among variants)
12935 - by the alignment: TYPE_ALIGN and TYPE_USER_ALIGN
12936 - during LTO by TYPE_CONTEXT if type is TYPE_FILE_SCOPE_P
12937 this is necessary to make it possible to merge types form different TUs
12938 - arrays, pointers and references may have TREE_TYPE that is a variant
12939 of TREE_TYPE of their main variants.
12940 - aggregates may have new TYPE_FIELDS list that list variants of
12941 the main variant TYPE_FIELDS.
12942 - vector types may differ by TYPE_VECTOR_OPAQUE
12943 */
12944
12945 /* Convenience macro for matching individual fields. */
12946 #define verify_variant_match(flag) \
12947 do { \
12948 if (flag (tv) != flag (t)) \
12949 { \
12950 error ("type variant differs by %s", #flag); \
12951 debug_tree (tv); \
12952 return false; \
12953 } \
12954 } while (false)
12955
12956 /* tree_base checks. */
12957
12958 verify_variant_match (TREE_CODE);
12959 /* FIXME: Ada builds non-artificial variants of artificial types. */
12960 #if 0
12961 if (TYPE_ARTIFICIAL (tv))
12962 verify_variant_match (TYPE_ARTIFICIAL);
12963 #endif
12964 if (POINTER_TYPE_P (tv))
12965 verify_variant_match (TYPE_REF_CAN_ALIAS_ALL);
12966 /* FIXME: TYPE_SIZES_GIMPLIFIED may differs for Ada build. */
12967 verify_variant_match (TYPE_UNSIGNED);
12968 verify_variant_match (TYPE_PACKED);
12969 if (TREE_CODE (t) == REFERENCE_TYPE)
12970 verify_variant_match (TYPE_REF_IS_RVALUE);
12971 if (AGGREGATE_TYPE_P (t))
12972 verify_variant_match (TYPE_REVERSE_STORAGE_ORDER);
12973 else
12974 verify_variant_match (TYPE_SATURATING);
12975 /* FIXME: This check trigger during libstdc++ build. */
12976 #if 0
12977 if (RECORD_OR_UNION_TYPE_P (t) && COMPLETE_TYPE_P (t))
12978 verify_variant_match (TYPE_FINAL_P);
12979 #endif
12980
12981 /* tree_type_common checks. */
12982
12983 if (COMPLETE_TYPE_P (t))
12984 {
12985 verify_variant_match (TYPE_MODE);
12986 if (TREE_CODE (TYPE_SIZE (t)) != PLACEHOLDER_EXPR
12987 && TREE_CODE (TYPE_SIZE (tv)) != PLACEHOLDER_EXPR)
12988 verify_variant_match (TYPE_SIZE);
12989 if (TREE_CODE (TYPE_SIZE_UNIT (t)) != PLACEHOLDER_EXPR
12990 && TREE_CODE (TYPE_SIZE_UNIT (tv)) != PLACEHOLDER_EXPR
12991 && TYPE_SIZE_UNIT (t) != TYPE_SIZE_UNIT (tv))
12992 {
12993 gcc_assert (!operand_equal_p (TYPE_SIZE_UNIT (t),
12994 TYPE_SIZE_UNIT (tv), 0));
12995 error ("type variant has different %<TYPE_SIZE_UNIT%>");
12996 debug_tree (tv);
12997 error ("type variant%'s %<TYPE_SIZE_UNIT%>");
12998 debug_tree (TYPE_SIZE_UNIT (tv));
12999 error ("type%'s %<TYPE_SIZE_UNIT%>");
13000 debug_tree (TYPE_SIZE_UNIT (t));
13001 return false;
13002 }
13003 verify_variant_match (TYPE_NEEDS_CONSTRUCTING);
13004 }
13005 verify_variant_match (TYPE_PRECISION);
13006 if (RECORD_OR_UNION_TYPE_P (t))
13007 verify_variant_match (TYPE_TRANSPARENT_AGGR);
13008 else if (TREE_CODE (t) == ARRAY_TYPE)
13009 verify_variant_match (TYPE_NONALIASED_COMPONENT);
13010 /* During LTO we merge variant lists from diferent translation units
13011 that may differ BY TYPE_CONTEXT that in turn may point
13012 to TRANSLATION_UNIT_DECL.
13013 Ada also builds variants of types with different TYPE_CONTEXT. */
13014 #if 0
13015 if (!in_lto_p || !TYPE_FILE_SCOPE_P (t))
13016 verify_variant_match (TYPE_CONTEXT);
13017 #endif
13018 if (TREE_CODE (t) == ARRAY_TYPE || TREE_CODE (t) == INTEGER_TYPE)
13019 verify_variant_match (TYPE_STRING_FLAG);
13020 if (TREE_CODE (t) == RECORD_TYPE || TREE_CODE (t) == UNION_TYPE)
13021 verify_variant_match (TYPE_CXX_ODR_P);
13022 if (TYPE_ALIAS_SET_KNOWN_P (t))
13023 {
13024 error ("type variant with %<TYPE_ALIAS_SET_KNOWN_P%>");
13025 debug_tree (tv);
13026 return false;
13027 }
13028
13029 /* tree_type_non_common checks. */
13030
13031 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
13032 and dangle the pointer from time to time. */
13033 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_VFIELD (t) != TYPE_VFIELD (tv)
13034 && (in_lto_p || !TYPE_VFIELD (tv)
13035 || TREE_CODE (TYPE_VFIELD (tv)) != TREE_LIST))
13036 {
13037 error ("type variant has different %<TYPE_VFIELD%>");
13038 debug_tree (tv);
13039 return false;
13040 }
13041 if ((TREE_CODE (t) == ENUMERAL_TYPE && COMPLETE_TYPE_P (t))
13042 || TREE_CODE (t) == INTEGER_TYPE
13043 || TREE_CODE (t) == BOOLEAN_TYPE
13044 || TREE_CODE (t) == REAL_TYPE
13045 || TREE_CODE (t) == FIXED_POINT_TYPE)
13046 {
13047 verify_variant_match (TYPE_MAX_VALUE);
13048 verify_variant_match (TYPE_MIN_VALUE);
13049 }
13050 if (TREE_CODE (t) == METHOD_TYPE)
13051 verify_variant_match (TYPE_METHOD_BASETYPE);
13052 if (TREE_CODE (t) == OFFSET_TYPE)
13053 verify_variant_match (TYPE_OFFSET_BASETYPE);
13054 if (TREE_CODE (t) == ARRAY_TYPE)
13055 verify_variant_match (TYPE_ARRAY_MAX_SIZE);
13056 /* FIXME: Be lax and allow TYPE_BINFO to be missing in variant types
13057 or even type's main variant. This is needed to make bootstrap pass
13058 and the bug seems new in GCC 5.
13059 C++ FE should be updated to make this consistent and we should check
13060 that TYPE_BINFO is always NULL for !COMPLETE_TYPE_P and otherwise there
13061 is a match with main variant.
13062
13063 Also disable the check for Java for now because of parser hack that builds
13064 first an dummy BINFO and then sometimes replace it by real BINFO in some
13065 of the copies. */
13066 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t) && TYPE_BINFO (tv)
13067 && TYPE_BINFO (t) != TYPE_BINFO (tv)
13068 /* FIXME: Java sometimes keep dump TYPE_BINFOs on variant types.
13069 Since there is no cheap way to tell C++/Java type w/o LTO, do checking
13070 at LTO time only. */
13071 && (in_lto_p && odr_type_p (t)))
13072 {
13073 error ("type variant has different %<TYPE_BINFO%>");
13074 debug_tree (tv);
13075 error ("type variant%'s %<TYPE_BINFO%>");
13076 debug_tree (TYPE_BINFO (tv));
13077 error ("type%'s %<TYPE_BINFO%>");
13078 debug_tree (TYPE_BINFO (t));
13079 return false;
13080 }
13081
13082 /* Check various uses of TYPE_VALUES_RAW. */
13083 if (TREE_CODE (t) == ENUMERAL_TYPE
13084 && TYPE_VALUES (t))
13085 verify_variant_match (TYPE_VALUES);
13086 else if (TREE_CODE (t) == ARRAY_TYPE)
13087 verify_variant_match (TYPE_DOMAIN);
13088 /* Permit incomplete variants of complete type. While FEs may complete
13089 all variants, this does not happen for C++ templates in all cases. */
13090 else if (RECORD_OR_UNION_TYPE_P (t)
13091 && COMPLETE_TYPE_P (t)
13092 && TYPE_FIELDS (t) != TYPE_FIELDS (tv))
13093 {
13094 tree f1, f2;
13095
13096 /* Fortran builds qualified variants as new records with items of
13097 qualified type. Verify that they looks same. */
13098 for (f1 = TYPE_FIELDS (t), f2 = TYPE_FIELDS (tv);
13099 f1 && f2;
13100 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
13101 if (TREE_CODE (f1) != FIELD_DECL || TREE_CODE (f2) != FIELD_DECL
13102 || (TYPE_MAIN_VARIANT (TREE_TYPE (f1))
13103 != TYPE_MAIN_VARIANT (TREE_TYPE (f2))
13104 /* FIXME: gfc_nonrestricted_type builds all types as variants
13105 with exception of pointer types. It deeply copies the type
13106 which means that we may end up with a variant type
13107 referring non-variant pointer. We may change it to
13108 produce types as variants, too, like
13109 objc_get_protocol_qualified_type does. */
13110 && !POINTER_TYPE_P (TREE_TYPE (f1)))
13111 || DECL_FIELD_OFFSET (f1) != DECL_FIELD_OFFSET (f2)
13112 || DECL_FIELD_BIT_OFFSET (f1) != DECL_FIELD_BIT_OFFSET (f2))
13113 break;
13114 if (f1 || f2)
13115 {
13116 error ("type variant has different %<TYPE_FIELDS%>");
13117 debug_tree (tv);
13118 error ("first mismatch is field");
13119 debug_tree (f1);
13120 error ("and field");
13121 debug_tree (f2);
13122 return false;
13123 }
13124 }
13125 else if ((TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE))
13126 verify_variant_match (TYPE_ARG_TYPES);
13127 /* For C++ the qualified variant of array type is really an array type
13128 of qualified TREE_TYPE.
13129 objc builds variants of pointer where pointer to type is a variant, too
13130 in objc_get_protocol_qualified_type. */
13131 if (TREE_TYPE (t) != TREE_TYPE (tv)
13132 && ((TREE_CODE (t) != ARRAY_TYPE
13133 && !POINTER_TYPE_P (t))
13134 || TYPE_MAIN_VARIANT (TREE_TYPE (t))
13135 != TYPE_MAIN_VARIANT (TREE_TYPE (tv))))
13136 {
13137 error ("type variant has different %<TREE_TYPE%>");
13138 debug_tree (tv);
13139 error ("type variant%'s %<TREE_TYPE%>");
13140 debug_tree (TREE_TYPE (tv));
13141 error ("type%'s %<TREE_TYPE%>");
13142 debug_tree (TREE_TYPE (t));
13143 return false;
13144 }
13145 if (type_with_alias_set_p (t)
13146 && !gimple_canonical_types_compatible_p (t, tv, false))
13147 {
13148 error ("type is not compatible with its variant");
13149 debug_tree (tv);
13150 error ("type variant%'s %<TREE_TYPE%>");
13151 debug_tree (TREE_TYPE (tv));
13152 error ("type%'s %<TREE_TYPE%>");
13153 debug_tree (TREE_TYPE (t));
13154 return false;
13155 }
13156 return true;
13157 #undef verify_variant_match
13158 }
13159
13160
13161 /* The TYPE_CANONICAL merging machinery. It should closely resemble
13162 the middle-end types_compatible_p function. It needs to avoid
13163 claiming types are different for types that should be treated
13164 the same with respect to TBAA. Canonical types are also used
13165 for IL consistency checks via the useless_type_conversion_p
13166 predicate which does not handle all type kinds itself but falls
13167 back to pointer-comparison of TYPE_CANONICAL for aggregates
13168 for example. */
13169
13170 /* Return true if TYPE_UNSIGNED of TYPE should be ignored for canonical
13171 type calculation because we need to allow inter-operability between signed
13172 and unsigned variants. */
13173
13174 bool
13175 type_with_interoperable_signedness (const_tree type)
13176 {
13177 /* Fortran standard require C_SIGNED_CHAR to be interoperable with both
13178 signed char and unsigned char. Similarly fortran FE builds
13179 C_SIZE_T as signed type, while C defines it unsigned. */
13180
13181 return tree_code_for_canonical_type_merging (TREE_CODE (type))
13182 == INTEGER_TYPE
13183 && (TYPE_PRECISION (type) == TYPE_PRECISION (signed_char_type_node)
13184 || TYPE_PRECISION (type) == TYPE_PRECISION (size_type_node));
13185 }
13186
13187 /* Return true iff T1 and T2 are structurally identical for what
13188 TBAA is concerned.
13189 This function is used both by lto.c canonical type merging and by the
13190 verifier. If TRUST_TYPE_CANONICAL we do not look into structure of types
13191 that have TYPE_CANONICAL defined and assume them equivalent. This is useful
13192 only for LTO because only in these cases TYPE_CANONICAL equivalence
13193 correspond to one defined by gimple_canonical_types_compatible_p. */
13194
13195 bool
13196 gimple_canonical_types_compatible_p (const_tree t1, const_tree t2,
13197 bool trust_type_canonical)
13198 {
13199 /* Type variants should be same as the main variant. When not doing sanity
13200 checking to verify this fact, go to main variants and save some work. */
13201 if (trust_type_canonical)
13202 {
13203 t1 = TYPE_MAIN_VARIANT (t1);
13204 t2 = TYPE_MAIN_VARIANT (t2);
13205 }
13206
13207 /* Check first for the obvious case of pointer identity. */
13208 if (t1 == t2)
13209 return true;
13210
13211 /* Check that we have two types to compare. */
13212 if (t1 == NULL_TREE || t2 == NULL_TREE)
13213 return false;
13214
13215 /* We consider complete types always compatible with incomplete type.
13216 This does not make sense for canonical type calculation and thus we
13217 need to ensure that we are never called on it.
13218
13219 FIXME: For more correctness the function probably should have three modes
13220 1) mode assuming that types are complete mathcing their structure
13221 2) mode allowing incomplete types but producing equivalence classes
13222 and thus ignoring all info from complete types
13223 3) mode allowing incomplete types to match complete but checking
13224 compatibility between complete types.
13225
13226 1 and 2 can be used for canonical type calculation. 3 is the real
13227 definition of type compatibility that can be used i.e. for warnings during
13228 declaration merging. */
13229
13230 gcc_assert (!trust_type_canonical
13231 || (type_with_alias_set_p (t1) && type_with_alias_set_p (t2)));
13232
13233 /* If the types have been previously registered and found equal
13234 they still are. */
13235
13236 if (TYPE_CANONICAL (t1) && TYPE_CANONICAL (t2)
13237 && trust_type_canonical)
13238 {
13239 /* Do not use TYPE_CANONICAL of pointer types. For LTO streamed types
13240 they are always NULL, but they are set to non-NULL for types
13241 constructed by build_pointer_type and variants. In this case the
13242 TYPE_CANONICAL is more fine grained than the equivalnce we test (where
13243 all pointers are considered equal. Be sure to not return false
13244 negatives. */
13245 gcc_checking_assert (canonical_type_used_p (t1)
13246 && canonical_type_used_p (t2));
13247 return TYPE_CANONICAL (t1) == TYPE_CANONICAL (t2);
13248 }
13249
13250 /* For types where we do ODR based TBAA the canonical type is always
13251 set correctly, so we know that types are different if their
13252 canonical types does not match. */
13253 if (trust_type_canonical
13254 && (odr_type_p (t1) && odr_based_tbaa_p (t1))
13255 != (odr_type_p (t2) && odr_based_tbaa_p (t2)))
13256 return false;
13257
13258 /* Can't be the same type if the types don't have the same code. */
13259 enum tree_code code = tree_code_for_canonical_type_merging (TREE_CODE (t1));
13260 if (code != tree_code_for_canonical_type_merging (TREE_CODE (t2)))
13261 return false;
13262
13263 /* Qualifiers do not matter for canonical type comparison purposes. */
13264
13265 /* Void types and nullptr types are always the same. */
13266 if (TREE_CODE (t1) == VOID_TYPE
13267 || TREE_CODE (t1) == NULLPTR_TYPE)
13268 return true;
13269
13270 /* Can't be the same type if they have different mode. */
13271 if (TYPE_MODE (t1) != TYPE_MODE (t2))
13272 return false;
13273
13274 /* Non-aggregate types can be handled cheaply. */
13275 if (INTEGRAL_TYPE_P (t1)
13276 || SCALAR_FLOAT_TYPE_P (t1)
13277 || FIXED_POINT_TYPE_P (t1)
13278 || TREE_CODE (t1) == VECTOR_TYPE
13279 || TREE_CODE (t1) == COMPLEX_TYPE
13280 || TREE_CODE (t1) == OFFSET_TYPE
13281 || POINTER_TYPE_P (t1))
13282 {
13283 /* Can't be the same type if they have different recision. */
13284 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2))
13285 return false;
13286
13287 /* In some cases the signed and unsigned types are required to be
13288 inter-operable. */
13289 if (TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2)
13290 && !type_with_interoperable_signedness (t1))
13291 return false;
13292
13293 /* Fortran's C_SIGNED_CHAR is !TYPE_STRING_FLAG but needs to be
13294 interoperable with "signed char". Unless all frontends are revisited
13295 to agree on these types, we must ignore the flag completely. */
13296
13297 /* Fortran standard define C_PTR type that is compatible with every
13298 C pointer. For this reason we need to glob all pointers into one.
13299 Still pointers in different address spaces are not compatible. */
13300 if (POINTER_TYPE_P (t1))
13301 {
13302 if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
13303 != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
13304 return false;
13305 }
13306
13307 /* Tail-recurse to components. */
13308 if (TREE_CODE (t1) == VECTOR_TYPE
13309 || TREE_CODE (t1) == COMPLEX_TYPE)
13310 return gimple_canonical_types_compatible_p (TREE_TYPE (t1),
13311 TREE_TYPE (t2),
13312 trust_type_canonical);
13313
13314 return true;
13315 }
13316
13317 /* Do type-specific comparisons. */
13318 switch (TREE_CODE (t1))
13319 {
13320 case ARRAY_TYPE:
13321 /* Array types are the same if the element types are the same and
13322 the number of elements are the same. */
13323 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
13324 trust_type_canonical)
13325 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
13326 || TYPE_REVERSE_STORAGE_ORDER (t1) != TYPE_REVERSE_STORAGE_ORDER (t2)
13327 || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
13328 return false;
13329 else
13330 {
13331 tree i1 = TYPE_DOMAIN (t1);
13332 tree i2 = TYPE_DOMAIN (t2);
13333
13334 /* For an incomplete external array, the type domain can be
13335 NULL_TREE. Check this condition also. */
13336 if (i1 == NULL_TREE && i2 == NULL_TREE)
13337 return true;
13338 else if (i1 == NULL_TREE || i2 == NULL_TREE)
13339 return false;
13340 else
13341 {
13342 tree min1 = TYPE_MIN_VALUE (i1);
13343 tree min2 = TYPE_MIN_VALUE (i2);
13344 tree max1 = TYPE_MAX_VALUE (i1);
13345 tree max2 = TYPE_MAX_VALUE (i2);
13346
13347 /* The minimum/maximum values have to be the same. */
13348 if ((min1 == min2
13349 || (min1 && min2
13350 && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
13351 && TREE_CODE (min2) == PLACEHOLDER_EXPR)
13352 || operand_equal_p (min1, min2, 0))))
13353 && (max1 == max2
13354 || (max1 && max2
13355 && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
13356 && TREE_CODE (max2) == PLACEHOLDER_EXPR)
13357 || operand_equal_p (max1, max2, 0)))))
13358 return true;
13359 else
13360 return false;
13361 }
13362 }
13363
13364 case METHOD_TYPE:
13365 case FUNCTION_TYPE:
13366 /* Function types are the same if the return type and arguments types
13367 are the same. */
13368 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
13369 trust_type_canonical))
13370 return false;
13371
13372 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
13373 return true;
13374 else
13375 {
13376 tree parms1, parms2;
13377
13378 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
13379 parms1 && parms2;
13380 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
13381 {
13382 if (!gimple_canonical_types_compatible_p
13383 (TREE_VALUE (parms1), TREE_VALUE (parms2),
13384 trust_type_canonical))
13385 return false;
13386 }
13387
13388 if (parms1 || parms2)
13389 return false;
13390
13391 return true;
13392 }
13393
13394 case RECORD_TYPE:
13395 case UNION_TYPE:
13396 case QUAL_UNION_TYPE:
13397 {
13398 tree f1, f2;
13399
13400 /* Don't try to compare variants of an incomplete type, before
13401 TYPE_FIELDS has been copied around. */
13402 if (!COMPLETE_TYPE_P (t1) && !COMPLETE_TYPE_P (t2))
13403 return true;
13404
13405
13406 if (TYPE_REVERSE_STORAGE_ORDER (t1) != TYPE_REVERSE_STORAGE_ORDER (t2))
13407 return false;
13408
13409 /* For aggregate types, all the fields must be the same. */
13410 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
13411 f1 || f2;
13412 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
13413 {
13414 /* Skip non-fields and zero-sized fields. */
13415 while (f1 && (TREE_CODE (f1) != FIELD_DECL
13416 || (DECL_SIZE (f1)
13417 && integer_zerop (DECL_SIZE (f1)))))
13418 f1 = TREE_CHAIN (f1);
13419 while (f2 && (TREE_CODE (f2) != FIELD_DECL
13420 || (DECL_SIZE (f2)
13421 && integer_zerop (DECL_SIZE (f2)))))
13422 f2 = TREE_CHAIN (f2);
13423 if (!f1 || !f2)
13424 break;
13425 /* The fields must have the same name, offset and type. */
13426 if (DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
13427 || !gimple_compare_field_offset (f1, f2)
13428 || !gimple_canonical_types_compatible_p
13429 (TREE_TYPE (f1), TREE_TYPE (f2),
13430 trust_type_canonical))
13431 return false;
13432 }
13433
13434 /* If one aggregate has more fields than the other, they
13435 are not the same. */
13436 if (f1 || f2)
13437 return false;
13438
13439 return true;
13440 }
13441
13442 default:
13443 /* Consider all types with language specific trees in them mutually
13444 compatible. This is executed only from verify_type and false
13445 positives can be tolerated. */
13446 gcc_assert (!in_lto_p);
13447 return true;
13448 }
13449 }
13450
13451 /* Verify type T. */
13452
13453 void
13454 verify_type (const_tree t)
13455 {
13456 bool error_found = false;
13457 tree mv = TYPE_MAIN_VARIANT (t);
13458 if (!mv)
13459 {
13460 error ("main variant is not defined");
13461 error_found = true;
13462 }
13463 else if (mv != TYPE_MAIN_VARIANT (mv))
13464 {
13465 error ("%<TYPE_MAIN_VARIANT%> has different %<TYPE_MAIN_VARIANT%>");
13466 debug_tree (mv);
13467 error_found = true;
13468 }
13469 else if (t != mv && !verify_type_variant (t, mv))
13470 error_found = true;
13471
13472 tree ct = TYPE_CANONICAL (t);
13473 if (!ct)
13474 ;
13475 else if (TYPE_CANONICAL (t) != ct)
13476 {
13477 error ("%<TYPE_CANONICAL%> has different %<TYPE_CANONICAL%>");
13478 debug_tree (ct);
13479 error_found = true;
13480 }
13481 /* Method and function types cannot be used to address memory and thus
13482 TYPE_CANONICAL really matters only for determining useless conversions.
13483
13484 FIXME: C++ FE produce declarations of builtin functions that are not
13485 compatible with main variants. */
13486 else if (TREE_CODE (t) == FUNCTION_TYPE)
13487 ;
13488 else if (t != ct
13489 /* FIXME: gimple_canonical_types_compatible_p cannot compare types
13490 with variably sized arrays because their sizes possibly
13491 gimplified to different variables. */
13492 && !variably_modified_type_p (ct, NULL)
13493 && !gimple_canonical_types_compatible_p (t, ct, false)
13494 && COMPLETE_TYPE_P (t))
13495 {
13496 error ("%<TYPE_CANONICAL%> is not compatible");
13497 debug_tree (ct);
13498 error_found = true;
13499 }
13500
13501 if (COMPLETE_TYPE_P (t) && TYPE_CANONICAL (t)
13502 && TYPE_MODE (t) != TYPE_MODE (TYPE_CANONICAL (t)))
13503 {
13504 error ("%<TYPE_MODE%> of %<TYPE_CANONICAL%> is not compatible");
13505 debug_tree (ct);
13506 error_found = true;
13507 }
13508 if (TYPE_MAIN_VARIANT (t) == t && ct && TYPE_MAIN_VARIANT (ct) != ct)
13509 {
13510 error ("%<TYPE_CANONICAL%> of main variant is not main variant");
13511 debug_tree (ct);
13512 debug_tree (TYPE_MAIN_VARIANT (ct));
13513 error_found = true;
13514 }
13515
13516
13517 /* Check various uses of TYPE_MIN_VALUE_RAW. */
13518 if (RECORD_OR_UNION_TYPE_P (t))
13519 {
13520 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
13521 and danagle the pointer from time to time. */
13522 if (TYPE_VFIELD (t)
13523 && TREE_CODE (TYPE_VFIELD (t)) != FIELD_DECL
13524 && TREE_CODE (TYPE_VFIELD (t)) != TREE_LIST)
13525 {
13526 error ("%<TYPE_VFIELD%> is not %<FIELD_DECL%> nor %<TREE_LIST%>");
13527 debug_tree (TYPE_VFIELD (t));
13528 error_found = true;
13529 }
13530 }
13531 else if (TREE_CODE (t) == POINTER_TYPE)
13532 {
13533 if (TYPE_NEXT_PTR_TO (t)
13534 && TREE_CODE (TYPE_NEXT_PTR_TO (t)) != POINTER_TYPE)
13535 {
13536 error ("%<TYPE_NEXT_PTR_TO%> is not %<POINTER_TYPE%>");
13537 debug_tree (TYPE_NEXT_PTR_TO (t));
13538 error_found = true;
13539 }
13540 }
13541 else if (TREE_CODE (t) == REFERENCE_TYPE)
13542 {
13543 if (TYPE_NEXT_REF_TO (t)
13544 && TREE_CODE (TYPE_NEXT_REF_TO (t)) != REFERENCE_TYPE)
13545 {
13546 error ("%<TYPE_NEXT_REF_TO%> is not %<REFERENCE_TYPE%>");
13547 debug_tree (TYPE_NEXT_REF_TO (t));
13548 error_found = true;
13549 }
13550 }
13551 else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
13552 || TREE_CODE (t) == FIXED_POINT_TYPE)
13553 {
13554 /* FIXME: The following check should pass:
13555 useless_type_conversion_p (const_cast <tree> (t),
13556 TREE_TYPE (TYPE_MIN_VALUE (t))
13557 but does not for C sizetypes in LTO. */
13558 }
13559
13560 /* Check various uses of TYPE_MAXVAL_RAW. */
13561 if (RECORD_OR_UNION_TYPE_P (t))
13562 {
13563 if (!TYPE_BINFO (t))
13564 ;
13565 else if (TREE_CODE (TYPE_BINFO (t)) != TREE_BINFO)
13566 {
13567 error ("%<TYPE_BINFO%> is not %<TREE_BINFO%>");
13568 debug_tree (TYPE_BINFO (t));
13569 error_found = true;
13570 }
13571 else if (TREE_TYPE (TYPE_BINFO (t)) != TYPE_MAIN_VARIANT (t))
13572 {
13573 error ("%<TYPE_BINFO%> type is not %<TYPE_MAIN_VARIANT%>");
13574 debug_tree (TREE_TYPE (TYPE_BINFO (t)));
13575 error_found = true;
13576 }
13577 }
13578 else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
13579 {
13580 if (TYPE_METHOD_BASETYPE (t)
13581 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != RECORD_TYPE
13582 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != UNION_TYPE)
13583 {
13584 error ("%<TYPE_METHOD_BASETYPE%> is not record nor union");
13585 debug_tree (TYPE_METHOD_BASETYPE (t));
13586 error_found = true;
13587 }
13588 }
13589 else if (TREE_CODE (t) == OFFSET_TYPE)
13590 {
13591 if (TYPE_OFFSET_BASETYPE (t)
13592 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != RECORD_TYPE
13593 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != UNION_TYPE)
13594 {
13595 error ("%<TYPE_OFFSET_BASETYPE%> is not record nor union");
13596 debug_tree (TYPE_OFFSET_BASETYPE (t));
13597 error_found = true;
13598 }
13599 }
13600 else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
13601 || TREE_CODE (t) == FIXED_POINT_TYPE)
13602 {
13603 /* FIXME: The following check should pass:
13604 useless_type_conversion_p (const_cast <tree> (t),
13605 TREE_TYPE (TYPE_MAX_VALUE (t))
13606 but does not for C sizetypes in LTO. */
13607 }
13608 else if (TREE_CODE (t) == ARRAY_TYPE)
13609 {
13610 if (TYPE_ARRAY_MAX_SIZE (t)
13611 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (t)) != INTEGER_CST)
13612 {
13613 error ("%<TYPE_ARRAY_MAX_SIZE%> not %<INTEGER_CST%>");
13614 debug_tree (TYPE_ARRAY_MAX_SIZE (t));
13615 error_found = true;
13616 }
13617 }
13618 else if (TYPE_MAX_VALUE_RAW (t))
13619 {
13620 error ("%<TYPE_MAX_VALUE_RAW%> non-NULL");
13621 debug_tree (TYPE_MAX_VALUE_RAW (t));
13622 error_found = true;
13623 }
13624
13625 if (TYPE_LANG_SLOT_1 (t) && in_lto_p)
13626 {
13627 error ("%<TYPE_LANG_SLOT_1 (binfo)%> field is non-NULL");
13628 debug_tree (TYPE_LANG_SLOT_1 (t));
13629 error_found = true;
13630 }
13631
13632 /* Check various uses of TYPE_VALUES_RAW. */
13633 if (TREE_CODE (t) == ENUMERAL_TYPE)
13634 for (tree l = TYPE_VALUES (t); l; l = TREE_CHAIN (l))
13635 {
13636 tree value = TREE_VALUE (l);
13637 tree name = TREE_PURPOSE (l);
13638
13639 /* C FE porduce INTEGER_CST of INTEGER_TYPE, while C++ FE uses
13640 CONST_DECL of ENUMERAL TYPE. */
13641 if (TREE_CODE (value) != INTEGER_CST && TREE_CODE (value) != CONST_DECL)
13642 {
13643 error ("enum value is not %<CONST_DECL%> or %<INTEGER_CST%>");
13644 debug_tree (value);
13645 debug_tree (name);
13646 error_found = true;
13647 }
13648 if (TREE_CODE (TREE_TYPE (value)) != INTEGER_TYPE
13649 && !useless_type_conversion_p (const_cast <tree> (t), TREE_TYPE (value)))
13650 {
13651 error ("enum value type is not %<INTEGER_TYPE%> nor convertible "
13652 "to the enum");
13653 debug_tree (value);
13654 debug_tree (name);
13655 error_found = true;
13656 }
13657 if (TREE_CODE (name) != IDENTIFIER_NODE)
13658 {
13659 error ("enum value name is not %<IDENTIFIER_NODE%>");
13660 debug_tree (value);
13661 debug_tree (name);
13662 error_found = true;
13663 }
13664 }
13665 else if (TREE_CODE (t) == ARRAY_TYPE)
13666 {
13667 if (TYPE_DOMAIN (t) && TREE_CODE (TYPE_DOMAIN (t)) != INTEGER_TYPE)
13668 {
13669 error ("array %<TYPE_DOMAIN%> is not integer type");
13670 debug_tree (TYPE_DOMAIN (t));
13671 error_found = true;
13672 }
13673 }
13674 else if (RECORD_OR_UNION_TYPE_P (t))
13675 {
13676 if (TYPE_FIELDS (t) && !COMPLETE_TYPE_P (t) && in_lto_p)
13677 {
13678 error ("%<TYPE_FIELDS%> defined in incomplete type");
13679 error_found = true;
13680 }
13681 for (tree fld = TYPE_FIELDS (t); fld; fld = TREE_CHAIN (fld))
13682 {
13683 /* TODO: verify properties of decls. */
13684 if (TREE_CODE (fld) == FIELD_DECL)
13685 ;
13686 else if (TREE_CODE (fld) == TYPE_DECL)
13687 ;
13688 else if (TREE_CODE (fld) == CONST_DECL)
13689 ;
13690 else if (VAR_P (fld))
13691 ;
13692 else if (TREE_CODE (fld) == TEMPLATE_DECL)
13693 ;
13694 else if (TREE_CODE (fld) == USING_DECL)
13695 ;
13696 else if (TREE_CODE (fld) == FUNCTION_DECL)
13697 ;
13698 else
13699 {
13700 error ("wrong tree in %<TYPE_FIELDS%> list");
13701 debug_tree (fld);
13702 error_found = true;
13703 }
13704 }
13705 }
13706 else if (TREE_CODE (t) == INTEGER_TYPE
13707 || TREE_CODE (t) == BOOLEAN_TYPE
13708 || TREE_CODE (t) == OFFSET_TYPE
13709 || TREE_CODE (t) == REFERENCE_TYPE
13710 || TREE_CODE (t) == NULLPTR_TYPE
13711 || TREE_CODE (t) == POINTER_TYPE)
13712 {
13713 if (TYPE_CACHED_VALUES_P (t) != (TYPE_CACHED_VALUES (t) != NULL))
13714 {
13715 error ("%<TYPE_CACHED_VALUES_P%> is %i while %<TYPE_CACHED_VALUES%> "
13716 "is %p",
13717 TYPE_CACHED_VALUES_P (t), (void *)TYPE_CACHED_VALUES (t));
13718 error_found = true;
13719 }
13720 else if (TYPE_CACHED_VALUES_P (t) && TREE_CODE (TYPE_CACHED_VALUES (t)) != TREE_VEC)
13721 {
13722 error ("%<TYPE_CACHED_VALUES%> is not %<TREE_VEC%>");
13723 debug_tree (TYPE_CACHED_VALUES (t));
13724 error_found = true;
13725 }
13726 /* Verify just enough of cache to ensure that no one copied it to new type.
13727 All copying should go by copy_node that should clear it. */
13728 else if (TYPE_CACHED_VALUES_P (t))
13729 {
13730 int i;
13731 for (i = 0; i < TREE_VEC_LENGTH (TYPE_CACHED_VALUES (t)); i++)
13732 if (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)
13733 && TREE_TYPE (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)) != t)
13734 {
13735 error ("wrong %<TYPE_CACHED_VALUES%> entry");
13736 debug_tree (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i));
13737 error_found = true;
13738 break;
13739 }
13740 }
13741 }
13742 else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
13743 for (tree l = TYPE_ARG_TYPES (t); l; l = TREE_CHAIN (l))
13744 {
13745 /* C++ FE uses TREE_PURPOSE to store initial values. */
13746 if (TREE_PURPOSE (l) && in_lto_p)
13747 {
13748 error ("%<TREE_PURPOSE%> is non-NULL in %<TYPE_ARG_TYPES%> list");
13749 debug_tree (l);
13750 error_found = true;
13751 }
13752 if (!TYPE_P (TREE_VALUE (l)))
13753 {
13754 error ("wrong entry in %<TYPE_ARG_TYPES%> list");
13755 debug_tree (l);
13756 error_found = true;
13757 }
13758 }
13759 else if (!is_lang_specific (t) && TYPE_VALUES_RAW (t))
13760 {
13761 error ("%<TYPE_VALUES_RAW%> field is non-NULL");
13762 debug_tree (TYPE_VALUES_RAW (t));
13763 error_found = true;
13764 }
13765 if (TREE_CODE (t) != INTEGER_TYPE
13766 && TREE_CODE (t) != BOOLEAN_TYPE
13767 && TREE_CODE (t) != OFFSET_TYPE
13768 && TREE_CODE (t) != REFERENCE_TYPE
13769 && TREE_CODE (t) != NULLPTR_TYPE
13770 && TREE_CODE (t) != POINTER_TYPE
13771 && TYPE_CACHED_VALUES_P (t))
13772 {
13773 error ("%<TYPE_CACHED_VALUES_P%> is set while it should not be");
13774 error_found = true;
13775 }
13776
13777 /* ipa-devirt makes an assumption that TYPE_METHOD_BASETYPE is always
13778 TYPE_MAIN_VARIANT and it would be odd to add methods only to variatns
13779 of a type. */
13780 if (TREE_CODE (t) == METHOD_TYPE
13781 && TYPE_MAIN_VARIANT (TYPE_METHOD_BASETYPE (t)) != TYPE_METHOD_BASETYPE (t))
13782 {
13783 error ("%<TYPE_METHOD_BASETYPE%> is not main variant");
13784 error_found = true;
13785 }
13786
13787 if (error_found)
13788 {
13789 debug_tree (const_cast <tree> (t));
13790 internal_error ("%qs failed", __func__);
13791 }
13792 }
13793
13794
13795 /* Return 1 if ARG interpreted as signed in its precision is known to be
13796 always positive or 2 if ARG is known to be always negative, or 3 if
13797 ARG may be positive or negative. */
13798
13799 int
13800 get_range_pos_neg (tree arg)
13801 {
13802 if (arg == error_mark_node)
13803 return 3;
13804
13805 int prec = TYPE_PRECISION (TREE_TYPE (arg));
13806 int cnt = 0;
13807 if (TREE_CODE (arg) == INTEGER_CST)
13808 {
13809 wide_int w = wi::sext (wi::to_wide (arg), prec);
13810 if (wi::neg_p (w))
13811 return 2;
13812 else
13813 return 1;
13814 }
13815 while (CONVERT_EXPR_P (arg)
13816 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
13817 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg, 0))) <= prec)
13818 {
13819 arg = TREE_OPERAND (arg, 0);
13820 /* Narrower value zero extended into wider type
13821 will always result in positive values. */
13822 if (TYPE_UNSIGNED (TREE_TYPE (arg))
13823 && TYPE_PRECISION (TREE_TYPE (arg)) < prec)
13824 return 1;
13825 prec = TYPE_PRECISION (TREE_TYPE (arg));
13826 if (++cnt > 30)
13827 return 3;
13828 }
13829
13830 if (TREE_CODE (arg) != SSA_NAME)
13831 return 3;
13832 wide_int arg_min, arg_max;
13833 while (get_range_info (arg, &arg_min, &arg_max) != VR_RANGE)
13834 {
13835 gimple *g = SSA_NAME_DEF_STMT (arg);
13836 if (is_gimple_assign (g)
13837 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (g)))
13838 {
13839 tree t = gimple_assign_rhs1 (g);
13840 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
13841 && TYPE_PRECISION (TREE_TYPE (t)) <= prec)
13842 {
13843 if (TYPE_UNSIGNED (TREE_TYPE (t))
13844 && TYPE_PRECISION (TREE_TYPE (t)) < prec)
13845 return 1;
13846 prec = TYPE_PRECISION (TREE_TYPE (t));
13847 arg = t;
13848 if (++cnt > 30)
13849 return 3;
13850 continue;
13851 }
13852 }
13853 return 3;
13854 }
13855 if (TYPE_UNSIGNED (TREE_TYPE (arg)))
13856 {
13857 /* For unsigned values, the "positive" range comes
13858 below the "negative" range. */
13859 if (!wi::neg_p (wi::sext (arg_max, prec), SIGNED))
13860 return 1;
13861 if (wi::neg_p (wi::sext (arg_min, prec), SIGNED))
13862 return 2;
13863 }
13864 else
13865 {
13866 if (!wi::neg_p (wi::sext (arg_min, prec), SIGNED))
13867 return 1;
13868 if (wi::neg_p (wi::sext (arg_max, prec), SIGNED))
13869 return 2;
13870 }
13871 return 3;
13872 }
13873
13874
13875
13876
13877 /* Return true if ARG is marked with the nonnull attribute in the
13878 current function signature. */
13879
13880 bool
13881 nonnull_arg_p (const_tree arg)
13882 {
13883 tree t, attrs, fntype;
13884 unsigned HOST_WIDE_INT arg_num;
13885
13886 gcc_assert (TREE_CODE (arg) == PARM_DECL
13887 && (POINTER_TYPE_P (TREE_TYPE (arg))
13888 || TREE_CODE (TREE_TYPE (arg)) == OFFSET_TYPE));
13889
13890 /* The static chain decl is always non null. */
13891 if (arg == cfun->static_chain_decl)
13892 return true;
13893
13894 /* THIS argument of method is always non-NULL. */
13895 if (TREE_CODE (TREE_TYPE (cfun->decl)) == METHOD_TYPE
13896 && arg == DECL_ARGUMENTS (cfun->decl)
13897 && flag_delete_null_pointer_checks)
13898 return true;
13899
13900 /* Values passed by reference are always non-NULL. */
13901 if (TREE_CODE (TREE_TYPE (arg)) == REFERENCE_TYPE
13902 && flag_delete_null_pointer_checks)
13903 return true;
13904
13905 fntype = TREE_TYPE (cfun->decl);
13906 for (attrs = TYPE_ATTRIBUTES (fntype); attrs; attrs = TREE_CHAIN (attrs))
13907 {
13908 attrs = lookup_attribute ("nonnull", attrs);
13909
13910 /* If "nonnull" wasn't specified, we know nothing about the argument. */
13911 if (attrs == NULL_TREE)
13912 return false;
13913
13914 /* If "nonnull" applies to all the arguments, then ARG is non-null. */
13915 if (TREE_VALUE (attrs) == NULL_TREE)
13916 return true;
13917
13918 /* Get the position number for ARG in the function signature. */
13919 for (arg_num = 1, t = DECL_ARGUMENTS (cfun->decl);
13920 t;
13921 t = DECL_CHAIN (t), arg_num++)
13922 {
13923 if (t == arg)
13924 break;
13925 }
13926
13927 gcc_assert (t == arg);
13928
13929 /* Now see if ARG_NUM is mentioned in the nonnull list. */
13930 for (t = TREE_VALUE (attrs); t; t = TREE_CHAIN (t))
13931 {
13932 if (compare_tree_int (TREE_VALUE (t), arg_num) == 0)
13933 return true;
13934 }
13935 }
13936
13937 return false;
13938 }
13939
13940 /* Combine LOC and BLOCK to a combined adhoc loc, retaining any range
13941 information. */
13942
13943 location_t
13944 set_block (location_t loc, tree block)
13945 {
13946 location_t pure_loc = get_pure_location (loc);
13947 source_range src_range = get_range_from_loc (line_table, loc);
13948 return COMBINE_LOCATION_DATA (line_table, pure_loc, src_range, block);
13949 }
13950
13951 location_t
13952 set_source_range (tree expr, location_t start, location_t finish)
13953 {
13954 source_range src_range;
13955 src_range.m_start = start;
13956 src_range.m_finish = finish;
13957 return set_source_range (expr, src_range);
13958 }
13959
13960 location_t
13961 set_source_range (tree expr, source_range src_range)
13962 {
13963 if (!EXPR_P (expr))
13964 return UNKNOWN_LOCATION;
13965
13966 location_t pure_loc = get_pure_location (EXPR_LOCATION (expr));
13967 location_t adhoc = COMBINE_LOCATION_DATA (line_table,
13968 pure_loc,
13969 src_range,
13970 NULL);
13971 SET_EXPR_LOCATION (expr, adhoc);
13972 return adhoc;
13973 }
13974
13975 /* Return EXPR, potentially wrapped with a node expression LOC,
13976 if !CAN_HAVE_LOCATION_P (expr).
13977
13978 NON_LVALUE_EXPR is used for wrapping constants, apart from STRING_CST.
13979 VIEW_CONVERT_EXPR is used for wrapping non-constants and STRING_CST.
13980
13981 Wrapper nodes can be identified using location_wrapper_p. */
13982
13983 tree
13984 maybe_wrap_with_location (tree expr, location_t loc)
13985 {
13986 if (expr == NULL)
13987 return NULL;
13988 if (loc == UNKNOWN_LOCATION)
13989 return expr;
13990 if (CAN_HAVE_LOCATION_P (expr))
13991 return expr;
13992 /* We should only be adding wrappers for constants and for decls,
13993 or for some exceptional tree nodes (e.g. BASELINK in the C++ FE). */
13994 gcc_assert (CONSTANT_CLASS_P (expr)
13995 || DECL_P (expr)
13996 || EXCEPTIONAL_CLASS_P (expr));
13997
13998 /* For now, don't add wrappers to exceptional tree nodes, to minimize
13999 any impact of the wrapper nodes. */
14000 if (EXCEPTIONAL_CLASS_P (expr))
14001 return expr;
14002
14003 /* Compiler-generated temporary variables don't need a wrapper. */
14004 if (DECL_P (expr) && DECL_ARTIFICIAL (expr) && DECL_IGNORED_P (expr))
14005 return expr;
14006
14007 /* If any auto_suppress_location_wrappers are active, don't create
14008 wrappers. */
14009 if (suppress_location_wrappers > 0)
14010 return expr;
14011
14012 tree_code code
14013 = (((CONSTANT_CLASS_P (expr) && TREE_CODE (expr) != STRING_CST)
14014 || (TREE_CODE (expr) == CONST_DECL && !TREE_STATIC (expr)))
14015 ? NON_LVALUE_EXPR : VIEW_CONVERT_EXPR);
14016 tree wrapper = build1_loc (loc, code, TREE_TYPE (expr), expr);
14017 /* Mark this node as being a wrapper. */
14018 EXPR_LOCATION_WRAPPER_P (wrapper) = 1;
14019 return wrapper;
14020 }
14021
14022 int suppress_location_wrappers;
14023
14024 /* Return the name of combined function FN, for debugging purposes. */
14025
14026 const char *
14027 combined_fn_name (combined_fn fn)
14028 {
14029 if (builtin_fn_p (fn))
14030 {
14031 tree fndecl = builtin_decl_explicit (as_builtin_fn (fn));
14032 return IDENTIFIER_POINTER (DECL_NAME (fndecl));
14033 }
14034 else
14035 return internal_fn_name (as_internal_fn (fn));
14036 }
14037
14038 /* Return a bitmap with a bit set corresponding to each argument in
14039 a function call type FNTYPE declared with attribute nonnull,
14040 or null if none of the function's argument are nonnull. The caller
14041 must free the bitmap. */
14042
14043 bitmap
14044 get_nonnull_args (const_tree fntype)
14045 {
14046 if (fntype == NULL_TREE)
14047 return NULL;
14048
14049 bitmap argmap = NULL;
14050 if (TREE_CODE (fntype) == METHOD_TYPE)
14051 {
14052 /* The this pointer in C++ non-static member functions is
14053 implicitly nonnull whether or not it's declared as such. */
14054 argmap = BITMAP_ALLOC (NULL);
14055 bitmap_set_bit (argmap, 0);
14056 }
14057
14058 tree attrs = TYPE_ATTRIBUTES (fntype);
14059 if (!attrs)
14060 return argmap;
14061
14062 /* A function declaration can specify multiple attribute nonnull,
14063 each with zero or more arguments. The loop below creates a bitmap
14064 representing a union of all the arguments. An empty (but non-null)
14065 bitmap means that all arguments have been declaraed nonnull. */
14066 for ( ; attrs; attrs = TREE_CHAIN (attrs))
14067 {
14068 attrs = lookup_attribute ("nonnull", attrs);
14069 if (!attrs)
14070 break;
14071
14072 if (!argmap)
14073 argmap = BITMAP_ALLOC (NULL);
14074
14075 if (!TREE_VALUE (attrs))
14076 {
14077 /* Clear the bitmap in case a previous attribute nonnull
14078 set it and this one overrides it for all arguments. */
14079 bitmap_clear (argmap);
14080 return argmap;
14081 }
14082
14083 /* Iterate over the indices of the format arguments declared nonnull
14084 and set a bit for each. */
14085 for (tree idx = TREE_VALUE (attrs); idx; idx = TREE_CHAIN (idx))
14086 {
14087 unsigned int val = TREE_INT_CST_LOW (TREE_VALUE (idx)) - 1;
14088 bitmap_set_bit (argmap, val);
14089 }
14090 }
14091
14092 return argmap;
14093 }
14094
14095 /* Returns true if TYPE is a type where it and all of its subobjects
14096 (recursively) are of structure, union, or array type. */
14097
14098 bool
14099 is_empty_type (const_tree type)
14100 {
14101 if (RECORD_OR_UNION_TYPE_P (type))
14102 {
14103 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
14104 if (TREE_CODE (field) == FIELD_DECL
14105 && !DECL_PADDING_P (field)
14106 && !is_empty_type (TREE_TYPE (field)))
14107 return false;
14108 return true;
14109 }
14110 else if (TREE_CODE (type) == ARRAY_TYPE)
14111 return (integer_minus_onep (array_type_nelts (type))
14112 || TYPE_DOMAIN (type) == NULL_TREE
14113 || is_empty_type (TREE_TYPE (type)));
14114 return false;
14115 }
14116
14117 /* Implement TARGET_EMPTY_RECORD_P. Return true if TYPE is an empty type
14118 that shouldn't be passed via stack. */
14119
14120 bool
14121 default_is_empty_record (const_tree type)
14122 {
14123 if (!abi_version_at_least (12))
14124 return false;
14125
14126 if (type == error_mark_node)
14127 return false;
14128
14129 if (TREE_ADDRESSABLE (type))
14130 return false;
14131
14132 return is_empty_type (TYPE_MAIN_VARIANT (type));
14133 }
14134
14135 /* Determine whether TYPE is a structure with a flexible array member,
14136 or a union containing such a structure (possibly recursively). */
14137
14138 bool
14139 flexible_array_type_p (const_tree type)
14140 {
14141 tree x, last;
14142 switch (TREE_CODE (type))
14143 {
14144 case RECORD_TYPE:
14145 last = NULL_TREE;
14146 for (x = TYPE_FIELDS (type); x != NULL_TREE; x = DECL_CHAIN (x))
14147 if (TREE_CODE (x) == FIELD_DECL)
14148 last = x;
14149 if (last == NULL_TREE)
14150 return false;
14151 if (TREE_CODE (TREE_TYPE (last)) == ARRAY_TYPE
14152 && TYPE_SIZE (TREE_TYPE (last)) == NULL_TREE
14153 && TYPE_DOMAIN (TREE_TYPE (last)) != NULL_TREE
14154 && TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (last))) == NULL_TREE)
14155 return true;
14156 return false;
14157 case UNION_TYPE:
14158 for (x = TYPE_FIELDS (type); x != NULL_TREE; x = DECL_CHAIN (x))
14159 {
14160 if (TREE_CODE (x) == FIELD_DECL
14161 && flexible_array_type_p (TREE_TYPE (x)))
14162 return true;
14163 }
14164 return false;
14165 default:
14166 return false;
14167 }
14168 }
14169
14170 /* Like int_size_in_bytes, but handle empty records specially. */
14171
14172 HOST_WIDE_INT
14173 arg_int_size_in_bytes (const_tree type)
14174 {
14175 return TYPE_EMPTY_P (type) ? 0 : int_size_in_bytes (type);
14176 }
14177
14178 /* Like size_in_bytes, but handle empty records specially. */
14179
14180 tree
14181 arg_size_in_bytes (const_tree type)
14182 {
14183 return TYPE_EMPTY_P (type) ? size_zero_node : size_in_bytes (type);
14184 }
14185
14186 /* Return true if an expression with CODE has to have the same result type as
14187 its first operand. */
14188
14189 bool
14190 expr_type_first_operand_type_p (tree_code code)
14191 {
14192 switch (code)
14193 {
14194 case NEGATE_EXPR:
14195 case ABS_EXPR:
14196 case BIT_NOT_EXPR:
14197 case PAREN_EXPR:
14198 case CONJ_EXPR:
14199
14200 case PLUS_EXPR:
14201 case MINUS_EXPR:
14202 case MULT_EXPR:
14203 case TRUNC_DIV_EXPR:
14204 case CEIL_DIV_EXPR:
14205 case FLOOR_DIV_EXPR:
14206 case ROUND_DIV_EXPR:
14207 case TRUNC_MOD_EXPR:
14208 case CEIL_MOD_EXPR:
14209 case FLOOR_MOD_EXPR:
14210 case ROUND_MOD_EXPR:
14211 case RDIV_EXPR:
14212 case EXACT_DIV_EXPR:
14213 case MIN_EXPR:
14214 case MAX_EXPR:
14215 case BIT_IOR_EXPR:
14216 case BIT_XOR_EXPR:
14217 case BIT_AND_EXPR:
14218
14219 case LSHIFT_EXPR:
14220 case RSHIFT_EXPR:
14221 case LROTATE_EXPR:
14222 case RROTATE_EXPR:
14223 return true;
14224
14225 default:
14226 return false;
14227 }
14228 }
14229
14230 /* Return a typenode for the "standard" C type with a given name. */
14231 tree
14232 get_typenode_from_name (const char *name)
14233 {
14234 if (name == NULL || *name == '\0')
14235 return NULL_TREE;
14236
14237 if (strcmp (name, "char") == 0)
14238 return char_type_node;
14239 if (strcmp (name, "unsigned char") == 0)
14240 return unsigned_char_type_node;
14241 if (strcmp (name, "signed char") == 0)
14242 return signed_char_type_node;
14243
14244 if (strcmp (name, "short int") == 0)
14245 return short_integer_type_node;
14246 if (strcmp (name, "short unsigned int") == 0)
14247 return short_unsigned_type_node;
14248
14249 if (strcmp (name, "int") == 0)
14250 return integer_type_node;
14251 if (strcmp (name, "unsigned int") == 0)
14252 return unsigned_type_node;
14253
14254 if (strcmp (name, "long int") == 0)
14255 return long_integer_type_node;
14256 if (strcmp (name, "long unsigned int") == 0)
14257 return long_unsigned_type_node;
14258
14259 if (strcmp (name, "long long int") == 0)
14260 return long_long_integer_type_node;
14261 if (strcmp (name, "long long unsigned int") == 0)
14262 return long_long_unsigned_type_node;
14263
14264 gcc_unreachable ();
14265 }
14266
14267 /* List of pointer types used to declare builtins before we have seen their
14268 real declaration.
14269
14270 Keep the size up to date in tree.h ! */
14271 const builtin_structptr_type builtin_structptr_types[6] =
14272 {
14273 { fileptr_type_node, ptr_type_node, "FILE" },
14274 { const_tm_ptr_type_node, const_ptr_type_node, "tm" },
14275 { fenv_t_ptr_type_node, ptr_type_node, "fenv_t" },
14276 { const_fenv_t_ptr_type_node, const_ptr_type_node, "fenv_t" },
14277 { fexcept_t_ptr_type_node, ptr_type_node, "fexcept_t" },
14278 { const_fexcept_t_ptr_type_node, const_ptr_type_node, "fexcept_t" }
14279 };
14280
14281 /* Return the maximum object size. */
14282
14283 tree
14284 max_object_size (void)
14285 {
14286 /* To do: Make this a configurable parameter. */
14287 return TYPE_MAX_VALUE (ptrdiff_type_node);
14288 }
14289
14290 /* A wrapper around TARGET_VERIFY_TYPE_CONTEXT that makes the silent_p
14291 parameter default to false and that weeds out error_mark_node. */
14292
14293 bool
14294 verify_type_context (location_t loc, type_context_kind context,
14295 const_tree type, bool silent_p)
14296 {
14297 if (type == error_mark_node)
14298 return true;
14299
14300 gcc_assert (TYPE_P (type));
14301 return (!targetm.verify_type_context
14302 || targetm.verify_type_context (loc, context, type, silent_p));
14303 }
14304
14305 /* Return that NEW_ASM and DELETE_ASM name a valid pair of new and
14306 delete operators. */
14307
14308 bool
14309 valid_new_delete_pair_p (tree new_asm, tree delete_asm)
14310 {
14311 const char *new_name = IDENTIFIER_POINTER (new_asm);
14312 const char *delete_name = IDENTIFIER_POINTER (delete_asm);
14313 unsigned int new_len = IDENTIFIER_LENGTH (new_asm);
14314 unsigned int delete_len = IDENTIFIER_LENGTH (delete_asm);
14315
14316 if (new_len < 5 || delete_len < 6)
14317 return false;
14318 if (new_name[0] == '_')
14319 ++new_name, --new_len;
14320 if (new_name[0] == '_')
14321 ++new_name, --new_len;
14322 if (delete_name[0] == '_')
14323 ++delete_name, --delete_len;
14324 if (delete_name[0] == '_')
14325 ++delete_name, --delete_len;
14326 if (new_len < 4 || delete_len < 5)
14327 return false;
14328 /* *_len is now just the length after initial underscores. */
14329 if (new_name[0] != 'Z' || new_name[1] != 'n')
14330 return false;
14331 if (delete_name[0] != 'Z' || delete_name[1] != 'd')
14332 return false;
14333 /* _Znw must match _Zdl, _Zna must match _Zda. */
14334 if ((new_name[2] != 'w' || delete_name[2] != 'l')
14335 && (new_name[2] != 'a' || delete_name[2] != 'a'))
14336 return false;
14337 /* 'j', 'm' and 'y' correspond to size_t. */
14338 if (new_name[3] != 'j' && new_name[3] != 'm' && new_name[3] != 'y')
14339 return false;
14340 if (delete_name[3] != 'P' || delete_name[4] != 'v')
14341 return false;
14342 if (new_len == 4
14343 || (new_len == 18 && !memcmp (new_name + 4, "RKSt9nothrow_t", 14)))
14344 {
14345 /* _ZnXY or _ZnXYRKSt9nothrow_t matches
14346 _ZdXPv, _ZdXPvY and _ZdXPvRKSt9nothrow_t. */
14347 if (delete_len == 5)
14348 return true;
14349 if (delete_len == 6 && delete_name[5] == new_name[3])
14350 return true;
14351 if (delete_len == 19 && !memcmp (delete_name + 5, "RKSt9nothrow_t", 14))
14352 return true;
14353 }
14354 else if ((new_len == 19 && !memcmp (new_name + 4, "St11align_val_t", 15))
14355 || (new_len == 33
14356 && !memcmp (new_name + 4, "St11align_val_tRKSt9nothrow_t", 29)))
14357 {
14358 /* _ZnXYSt11align_val_t or _ZnXYSt11align_val_tRKSt9nothrow_t matches
14359 _ZdXPvSt11align_val_t or _ZdXPvYSt11align_val_t or or
14360 _ZdXPvSt11align_val_tRKSt9nothrow_t. */
14361 if (delete_len == 20 && !memcmp (delete_name + 5, "St11align_val_t", 15))
14362 return true;
14363 if (delete_len == 21
14364 && delete_name[5] == new_name[3]
14365 && !memcmp (delete_name + 6, "St11align_val_t", 15))
14366 return true;
14367 if (delete_len == 34
14368 && !memcmp (delete_name + 5, "St11align_val_tRKSt9nothrow_t", 29))
14369 return true;
14370 }
14371 return false;
14372 }
14373
14374 #if CHECKING_P
14375
14376 namespace selftest {
14377
14378 /* Selftests for tree. */
14379
14380 /* Verify that integer constants are sane. */
14381
14382 static void
14383 test_integer_constants ()
14384 {
14385 ASSERT_TRUE (integer_type_node != NULL);
14386 ASSERT_TRUE (build_int_cst (integer_type_node, 0) != NULL);
14387
14388 tree type = integer_type_node;
14389
14390 tree zero = build_zero_cst (type);
14391 ASSERT_EQ (INTEGER_CST, TREE_CODE (zero));
14392 ASSERT_EQ (type, TREE_TYPE (zero));
14393
14394 tree one = build_int_cst (type, 1);
14395 ASSERT_EQ (INTEGER_CST, TREE_CODE (one));
14396 ASSERT_EQ (type, TREE_TYPE (zero));
14397 }
14398
14399 /* Verify identifiers. */
14400
14401 static void
14402 test_identifiers ()
14403 {
14404 tree identifier = get_identifier ("foo");
14405 ASSERT_EQ (3, IDENTIFIER_LENGTH (identifier));
14406 ASSERT_STREQ ("foo", IDENTIFIER_POINTER (identifier));
14407 }
14408
14409 /* Verify LABEL_DECL. */
14410
14411 static void
14412 test_labels ()
14413 {
14414 tree identifier = get_identifier ("err");
14415 tree label_decl = build_decl (UNKNOWN_LOCATION, LABEL_DECL,
14416 identifier, void_type_node);
14417 ASSERT_EQ (-1, LABEL_DECL_UID (label_decl));
14418 ASSERT_FALSE (FORCED_LABEL (label_decl));
14419 }
14420
14421 /* Return a new VECTOR_CST node whose type is TYPE and whose values
14422 are given by VALS. */
14423
14424 static tree
14425 build_vector (tree type, vec<tree> vals MEM_STAT_DECL)
14426 {
14427 gcc_assert (known_eq (vals.length (), TYPE_VECTOR_SUBPARTS (type)));
14428 tree_vector_builder builder (type, vals.length (), 1);
14429 builder.splice (vals);
14430 return builder.build ();
14431 }
14432
14433 /* Check that VECTOR_CST ACTUAL contains the elements in EXPECTED. */
14434
14435 static void
14436 check_vector_cst (vec<tree> expected, tree actual)
14437 {
14438 ASSERT_KNOWN_EQ (expected.length (),
14439 TYPE_VECTOR_SUBPARTS (TREE_TYPE (actual)));
14440 for (unsigned int i = 0; i < expected.length (); ++i)
14441 ASSERT_EQ (wi::to_wide (expected[i]),
14442 wi::to_wide (vector_cst_elt (actual, i)));
14443 }
14444
14445 /* Check that VECTOR_CST ACTUAL contains NPATTERNS duplicated elements,
14446 and that its elements match EXPECTED. */
14447
14448 static void
14449 check_vector_cst_duplicate (vec<tree> expected, tree actual,
14450 unsigned int npatterns)
14451 {
14452 ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
14453 ASSERT_EQ (1, VECTOR_CST_NELTS_PER_PATTERN (actual));
14454 ASSERT_EQ (npatterns, vector_cst_encoded_nelts (actual));
14455 ASSERT_TRUE (VECTOR_CST_DUPLICATE_P (actual));
14456 ASSERT_FALSE (VECTOR_CST_STEPPED_P (actual));
14457 check_vector_cst (expected, actual);
14458 }
14459
14460 /* Check that VECTOR_CST ACTUAL contains NPATTERNS foreground elements
14461 and NPATTERNS background elements, and that its elements match
14462 EXPECTED. */
14463
14464 static void
14465 check_vector_cst_fill (vec<tree> expected, tree actual,
14466 unsigned int npatterns)
14467 {
14468 ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
14469 ASSERT_EQ (2, VECTOR_CST_NELTS_PER_PATTERN (actual));
14470 ASSERT_EQ (2 * npatterns, vector_cst_encoded_nelts (actual));
14471 ASSERT_FALSE (VECTOR_CST_DUPLICATE_P (actual));
14472 ASSERT_FALSE (VECTOR_CST_STEPPED_P (actual));
14473 check_vector_cst (expected, actual);
14474 }
14475
14476 /* Check that VECTOR_CST ACTUAL contains NPATTERNS stepped patterns,
14477 and that its elements match EXPECTED. */
14478
14479 static void
14480 check_vector_cst_stepped (vec<tree> expected, tree actual,
14481 unsigned int npatterns)
14482 {
14483 ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
14484 ASSERT_EQ (3, VECTOR_CST_NELTS_PER_PATTERN (actual));
14485 ASSERT_EQ (3 * npatterns, vector_cst_encoded_nelts (actual));
14486 ASSERT_FALSE (VECTOR_CST_DUPLICATE_P (actual));
14487 ASSERT_TRUE (VECTOR_CST_STEPPED_P (actual));
14488 check_vector_cst (expected, actual);
14489 }
14490
14491 /* Test the creation of VECTOR_CSTs. */
14492
14493 static void
14494 test_vector_cst_patterns (ALONE_CXX_MEM_STAT_INFO)
14495 {
14496 auto_vec<tree, 8> elements (8);
14497 elements.quick_grow (8);
14498 tree element_type = build_nonstandard_integer_type (16, true);
14499 tree vector_type = build_vector_type (element_type, 8);
14500
14501 /* Test a simple linear series with a base of 0 and a step of 1:
14502 { 0, 1, 2, 3, 4, 5, 6, 7 }. */
14503 for (unsigned int i = 0; i < 8; ++i)
14504 elements[i] = build_int_cst (element_type, i);
14505 tree vector = build_vector (vector_type, elements PASS_MEM_STAT);
14506 check_vector_cst_stepped (elements, vector, 1);
14507
14508 /* Try the same with the first element replaced by 100:
14509 { 100, 1, 2, 3, 4, 5, 6, 7 }. */
14510 elements[0] = build_int_cst (element_type, 100);
14511 vector = build_vector (vector_type, elements PASS_MEM_STAT);
14512 check_vector_cst_stepped (elements, vector, 1);
14513
14514 /* Try a series that wraps around.
14515 { 100, 65531, 65532, 65533, 65534, 65535, 0, 1 }. */
14516 for (unsigned int i = 1; i < 8; ++i)
14517 elements[i] = build_int_cst (element_type, (65530 + i) & 0xffff);
14518 vector = build_vector (vector_type, elements PASS_MEM_STAT);
14519 check_vector_cst_stepped (elements, vector, 1);
14520
14521 /* Try a downward series:
14522 { 100, 79, 78, 77, 76, 75, 75, 73 }. */
14523 for (unsigned int i = 1; i < 8; ++i)
14524 elements[i] = build_int_cst (element_type, 80 - i);
14525 vector = build_vector (vector_type, elements PASS_MEM_STAT);
14526 check_vector_cst_stepped (elements, vector, 1);
14527
14528 /* Try two interleaved series with different bases and steps:
14529 { 100, 53, 66, 206, 62, 212, 58, 218 }. */
14530 elements[1] = build_int_cst (element_type, 53);
14531 for (unsigned int i = 2; i < 8; i += 2)
14532 {
14533 elements[i] = build_int_cst (element_type, 70 - i * 2);
14534 elements[i + 1] = build_int_cst (element_type, 200 + i * 3);
14535 }
14536 vector = build_vector (vector_type, elements PASS_MEM_STAT);
14537 check_vector_cst_stepped (elements, vector, 2);
14538
14539 /* Try a duplicated value:
14540 { 100, 100, 100, 100, 100, 100, 100, 100 }. */
14541 for (unsigned int i = 1; i < 8; ++i)
14542 elements[i] = elements[0];
14543 vector = build_vector (vector_type, elements PASS_MEM_STAT);
14544 check_vector_cst_duplicate (elements, vector, 1);
14545
14546 /* Try an interleaved duplicated value:
14547 { 100, 55, 100, 55, 100, 55, 100, 55 }. */
14548 elements[1] = build_int_cst (element_type, 55);
14549 for (unsigned int i = 2; i < 8; ++i)
14550 elements[i] = elements[i - 2];
14551 vector = build_vector (vector_type, elements PASS_MEM_STAT);
14552 check_vector_cst_duplicate (elements, vector, 2);
14553
14554 /* Try a duplicated value with 2 exceptions
14555 { 41, 97, 100, 55, 100, 55, 100, 55 }. */
14556 elements[0] = build_int_cst (element_type, 41);
14557 elements[1] = build_int_cst (element_type, 97);
14558 vector = build_vector (vector_type, elements PASS_MEM_STAT);
14559 check_vector_cst_fill (elements, vector, 2);
14560
14561 /* Try with and without a step
14562 { 41, 97, 100, 21, 100, 35, 100, 49 }. */
14563 for (unsigned int i = 3; i < 8; i += 2)
14564 elements[i] = build_int_cst (element_type, i * 7);
14565 vector = build_vector (vector_type, elements PASS_MEM_STAT);
14566 check_vector_cst_stepped (elements, vector, 2);
14567
14568 /* Try a fully-general constant:
14569 { 41, 97, 100, 21, 100, 9990, 100, 49 }. */
14570 elements[5] = build_int_cst (element_type, 9990);
14571 vector = build_vector (vector_type, elements PASS_MEM_STAT);
14572 check_vector_cst_fill (elements, vector, 4);
14573 }
14574
14575 /* Verify that STRIP_NOPS (NODE) is EXPECTED.
14576 Helper function for test_location_wrappers, to deal with STRIP_NOPS
14577 modifying its argument in-place. */
14578
14579 static void
14580 check_strip_nops (tree node, tree expected)
14581 {
14582 STRIP_NOPS (node);
14583 ASSERT_EQ (expected, node);
14584 }
14585
14586 /* Verify location wrappers. */
14587
14588 static void
14589 test_location_wrappers ()
14590 {
14591 location_t loc = BUILTINS_LOCATION;
14592
14593 ASSERT_EQ (NULL_TREE, maybe_wrap_with_location (NULL_TREE, loc));
14594
14595 /* Wrapping a constant. */
14596 tree int_cst = build_int_cst (integer_type_node, 42);
14597 ASSERT_FALSE (CAN_HAVE_LOCATION_P (int_cst));
14598 ASSERT_FALSE (location_wrapper_p (int_cst));
14599
14600 tree wrapped_int_cst = maybe_wrap_with_location (int_cst, loc);
14601 ASSERT_TRUE (location_wrapper_p (wrapped_int_cst));
14602 ASSERT_EQ (loc, EXPR_LOCATION (wrapped_int_cst));
14603 ASSERT_EQ (int_cst, tree_strip_any_location_wrapper (wrapped_int_cst));
14604
14605 /* We shouldn't add wrapper nodes for UNKNOWN_LOCATION. */
14606 ASSERT_EQ (int_cst, maybe_wrap_with_location (int_cst, UNKNOWN_LOCATION));
14607
14608 /* We shouldn't add wrapper nodes for nodes that CAN_HAVE_LOCATION_P. */
14609 tree cast = build1 (NOP_EXPR, char_type_node, int_cst);
14610 ASSERT_TRUE (CAN_HAVE_LOCATION_P (cast));
14611 ASSERT_EQ (cast, maybe_wrap_with_location (cast, loc));
14612
14613 /* Wrapping a STRING_CST. */
14614 tree string_cst = build_string (4, "foo");
14615 ASSERT_FALSE (CAN_HAVE_LOCATION_P (string_cst));
14616 ASSERT_FALSE (location_wrapper_p (string_cst));
14617
14618 tree wrapped_string_cst = maybe_wrap_with_location (string_cst, loc);
14619 ASSERT_TRUE (location_wrapper_p (wrapped_string_cst));
14620 ASSERT_EQ (VIEW_CONVERT_EXPR, TREE_CODE (wrapped_string_cst));
14621 ASSERT_EQ (loc, EXPR_LOCATION (wrapped_string_cst));
14622 ASSERT_EQ (string_cst, tree_strip_any_location_wrapper (wrapped_string_cst));
14623
14624
14625 /* Wrapping a variable. */
14626 tree int_var = build_decl (UNKNOWN_LOCATION, VAR_DECL,
14627 get_identifier ("some_int_var"),
14628 integer_type_node);
14629 ASSERT_FALSE (CAN_HAVE_LOCATION_P (int_var));
14630 ASSERT_FALSE (location_wrapper_p (int_var));
14631
14632 tree wrapped_int_var = maybe_wrap_with_location (int_var, loc);
14633 ASSERT_TRUE (location_wrapper_p (wrapped_int_var));
14634 ASSERT_EQ (loc, EXPR_LOCATION (wrapped_int_var));
14635 ASSERT_EQ (int_var, tree_strip_any_location_wrapper (wrapped_int_var));
14636
14637 /* Verify that "reinterpret_cast<int>(some_int_var)" is not a location
14638 wrapper. */
14639 tree r_cast = build1 (NON_LVALUE_EXPR, integer_type_node, int_var);
14640 ASSERT_FALSE (location_wrapper_p (r_cast));
14641 ASSERT_EQ (r_cast, tree_strip_any_location_wrapper (r_cast));
14642
14643 /* Verify that STRIP_NOPS removes wrappers. */
14644 check_strip_nops (wrapped_int_cst, int_cst);
14645 check_strip_nops (wrapped_string_cst, string_cst);
14646 check_strip_nops (wrapped_int_var, int_var);
14647 }
14648
14649 /* Test various tree predicates. Verify that location wrappers don't
14650 affect the results. */
14651
14652 static void
14653 test_predicates ()
14654 {
14655 /* Build various constants and wrappers around them. */
14656
14657 location_t loc = BUILTINS_LOCATION;
14658
14659 tree i_0 = build_int_cst (integer_type_node, 0);
14660 tree wr_i_0 = maybe_wrap_with_location (i_0, loc);
14661
14662 tree i_1 = build_int_cst (integer_type_node, 1);
14663 tree wr_i_1 = maybe_wrap_with_location (i_1, loc);
14664
14665 tree i_m1 = build_int_cst (integer_type_node, -1);
14666 tree wr_i_m1 = maybe_wrap_with_location (i_m1, loc);
14667
14668 tree f_0 = build_real_from_int_cst (float_type_node, i_0);
14669 tree wr_f_0 = maybe_wrap_with_location (f_0, loc);
14670 tree f_1 = build_real_from_int_cst (float_type_node, i_1);
14671 tree wr_f_1 = maybe_wrap_with_location (f_1, loc);
14672 tree f_m1 = build_real_from_int_cst (float_type_node, i_m1);
14673 tree wr_f_m1 = maybe_wrap_with_location (f_m1, loc);
14674
14675 tree c_i_0 = build_complex (NULL_TREE, i_0, i_0);
14676 tree c_i_1 = build_complex (NULL_TREE, i_1, i_0);
14677 tree c_i_m1 = build_complex (NULL_TREE, i_m1, i_0);
14678
14679 tree c_f_0 = build_complex (NULL_TREE, f_0, f_0);
14680 tree c_f_1 = build_complex (NULL_TREE, f_1, f_0);
14681 tree c_f_m1 = build_complex (NULL_TREE, f_m1, f_0);
14682
14683 /* TODO: vector constants. */
14684
14685 /* Test integer_onep. */
14686 ASSERT_FALSE (integer_onep (i_0));
14687 ASSERT_FALSE (integer_onep (wr_i_0));
14688 ASSERT_TRUE (integer_onep (i_1));
14689 ASSERT_TRUE (integer_onep (wr_i_1));
14690 ASSERT_FALSE (integer_onep (i_m1));
14691 ASSERT_FALSE (integer_onep (wr_i_m1));
14692 ASSERT_FALSE (integer_onep (f_0));
14693 ASSERT_FALSE (integer_onep (wr_f_0));
14694 ASSERT_FALSE (integer_onep (f_1));
14695 ASSERT_FALSE (integer_onep (wr_f_1));
14696 ASSERT_FALSE (integer_onep (f_m1));
14697 ASSERT_FALSE (integer_onep (wr_f_m1));
14698 ASSERT_FALSE (integer_onep (c_i_0));
14699 ASSERT_TRUE (integer_onep (c_i_1));
14700 ASSERT_FALSE (integer_onep (c_i_m1));
14701 ASSERT_FALSE (integer_onep (c_f_0));
14702 ASSERT_FALSE (integer_onep (c_f_1));
14703 ASSERT_FALSE (integer_onep (c_f_m1));
14704
14705 /* Test integer_zerop. */
14706 ASSERT_TRUE (integer_zerop (i_0));
14707 ASSERT_TRUE (integer_zerop (wr_i_0));
14708 ASSERT_FALSE (integer_zerop (i_1));
14709 ASSERT_FALSE (integer_zerop (wr_i_1));
14710 ASSERT_FALSE (integer_zerop (i_m1));
14711 ASSERT_FALSE (integer_zerop (wr_i_m1));
14712 ASSERT_FALSE (integer_zerop (f_0));
14713 ASSERT_FALSE (integer_zerop (wr_f_0));
14714 ASSERT_FALSE (integer_zerop (f_1));
14715 ASSERT_FALSE (integer_zerop (wr_f_1));
14716 ASSERT_FALSE (integer_zerop (f_m1));
14717 ASSERT_FALSE (integer_zerop (wr_f_m1));
14718 ASSERT_TRUE (integer_zerop (c_i_0));
14719 ASSERT_FALSE (integer_zerop (c_i_1));
14720 ASSERT_FALSE (integer_zerop (c_i_m1));
14721 ASSERT_FALSE (integer_zerop (c_f_0));
14722 ASSERT_FALSE (integer_zerop (c_f_1));
14723 ASSERT_FALSE (integer_zerop (c_f_m1));
14724
14725 /* Test integer_all_onesp. */
14726 ASSERT_FALSE (integer_all_onesp (i_0));
14727 ASSERT_FALSE (integer_all_onesp (wr_i_0));
14728 ASSERT_FALSE (integer_all_onesp (i_1));
14729 ASSERT_FALSE (integer_all_onesp (wr_i_1));
14730 ASSERT_TRUE (integer_all_onesp (i_m1));
14731 ASSERT_TRUE (integer_all_onesp (wr_i_m1));
14732 ASSERT_FALSE (integer_all_onesp (f_0));
14733 ASSERT_FALSE (integer_all_onesp (wr_f_0));
14734 ASSERT_FALSE (integer_all_onesp (f_1));
14735 ASSERT_FALSE (integer_all_onesp (wr_f_1));
14736 ASSERT_FALSE (integer_all_onesp (f_m1));
14737 ASSERT_FALSE (integer_all_onesp (wr_f_m1));
14738 ASSERT_FALSE (integer_all_onesp (c_i_0));
14739 ASSERT_FALSE (integer_all_onesp (c_i_1));
14740 ASSERT_FALSE (integer_all_onesp (c_i_m1));
14741 ASSERT_FALSE (integer_all_onesp (c_f_0));
14742 ASSERT_FALSE (integer_all_onesp (c_f_1));
14743 ASSERT_FALSE (integer_all_onesp (c_f_m1));
14744
14745 /* Test integer_minus_onep. */
14746 ASSERT_FALSE (integer_minus_onep (i_0));
14747 ASSERT_FALSE (integer_minus_onep (wr_i_0));
14748 ASSERT_FALSE (integer_minus_onep (i_1));
14749 ASSERT_FALSE (integer_minus_onep (wr_i_1));
14750 ASSERT_TRUE (integer_minus_onep (i_m1));
14751 ASSERT_TRUE (integer_minus_onep (wr_i_m1));
14752 ASSERT_FALSE (integer_minus_onep (f_0));
14753 ASSERT_FALSE (integer_minus_onep (wr_f_0));
14754 ASSERT_FALSE (integer_minus_onep (f_1));
14755 ASSERT_FALSE (integer_minus_onep (wr_f_1));
14756 ASSERT_FALSE (integer_minus_onep (f_m1));
14757 ASSERT_FALSE (integer_minus_onep (wr_f_m1));
14758 ASSERT_FALSE (integer_minus_onep (c_i_0));
14759 ASSERT_FALSE (integer_minus_onep (c_i_1));
14760 ASSERT_TRUE (integer_minus_onep (c_i_m1));
14761 ASSERT_FALSE (integer_minus_onep (c_f_0));
14762 ASSERT_FALSE (integer_minus_onep (c_f_1));
14763 ASSERT_FALSE (integer_minus_onep (c_f_m1));
14764
14765 /* Test integer_each_onep. */
14766 ASSERT_FALSE (integer_each_onep (i_0));
14767 ASSERT_FALSE (integer_each_onep (wr_i_0));
14768 ASSERT_TRUE (integer_each_onep (i_1));
14769 ASSERT_TRUE (integer_each_onep (wr_i_1));
14770 ASSERT_FALSE (integer_each_onep (i_m1));
14771 ASSERT_FALSE (integer_each_onep (wr_i_m1));
14772 ASSERT_FALSE (integer_each_onep (f_0));
14773 ASSERT_FALSE (integer_each_onep (wr_f_0));
14774 ASSERT_FALSE (integer_each_onep (f_1));
14775 ASSERT_FALSE (integer_each_onep (wr_f_1));
14776 ASSERT_FALSE (integer_each_onep (f_m1));
14777 ASSERT_FALSE (integer_each_onep (wr_f_m1));
14778 ASSERT_FALSE (integer_each_onep (c_i_0));
14779 ASSERT_FALSE (integer_each_onep (c_i_1));
14780 ASSERT_FALSE (integer_each_onep (c_i_m1));
14781 ASSERT_FALSE (integer_each_onep (c_f_0));
14782 ASSERT_FALSE (integer_each_onep (c_f_1));
14783 ASSERT_FALSE (integer_each_onep (c_f_m1));
14784
14785 /* Test integer_truep. */
14786 ASSERT_FALSE (integer_truep (i_0));
14787 ASSERT_FALSE (integer_truep (wr_i_0));
14788 ASSERT_TRUE (integer_truep (i_1));
14789 ASSERT_TRUE (integer_truep (wr_i_1));
14790 ASSERT_FALSE (integer_truep (i_m1));
14791 ASSERT_FALSE (integer_truep (wr_i_m1));
14792 ASSERT_FALSE (integer_truep (f_0));
14793 ASSERT_FALSE (integer_truep (wr_f_0));
14794 ASSERT_FALSE (integer_truep (f_1));
14795 ASSERT_FALSE (integer_truep (wr_f_1));
14796 ASSERT_FALSE (integer_truep (f_m1));
14797 ASSERT_FALSE (integer_truep (wr_f_m1));
14798 ASSERT_FALSE (integer_truep (c_i_0));
14799 ASSERT_TRUE (integer_truep (c_i_1));
14800 ASSERT_FALSE (integer_truep (c_i_m1));
14801 ASSERT_FALSE (integer_truep (c_f_0));
14802 ASSERT_FALSE (integer_truep (c_f_1));
14803 ASSERT_FALSE (integer_truep (c_f_m1));
14804
14805 /* Test integer_nonzerop. */
14806 ASSERT_FALSE (integer_nonzerop (i_0));
14807 ASSERT_FALSE (integer_nonzerop (wr_i_0));
14808 ASSERT_TRUE (integer_nonzerop (i_1));
14809 ASSERT_TRUE (integer_nonzerop (wr_i_1));
14810 ASSERT_TRUE (integer_nonzerop (i_m1));
14811 ASSERT_TRUE (integer_nonzerop (wr_i_m1));
14812 ASSERT_FALSE (integer_nonzerop (f_0));
14813 ASSERT_FALSE (integer_nonzerop (wr_f_0));
14814 ASSERT_FALSE (integer_nonzerop (f_1));
14815 ASSERT_FALSE (integer_nonzerop (wr_f_1));
14816 ASSERT_FALSE (integer_nonzerop (f_m1));
14817 ASSERT_FALSE (integer_nonzerop (wr_f_m1));
14818 ASSERT_FALSE (integer_nonzerop (c_i_0));
14819 ASSERT_TRUE (integer_nonzerop (c_i_1));
14820 ASSERT_TRUE (integer_nonzerop (c_i_m1));
14821 ASSERT_FALSE (integer_nonzerop (c_f_0));
14822 ASSERT_FALSE (integer_nonzerop (c_f_1));
14823 ASSERT_FALSE (integer_nonzerop (c_f_m1));
14824
14825 /* Test real_zerop. */
14826 ASSERT_FALSE (real_zerop (i_0));
14827 ASSERT_FALSE (real_zerop (wr_i_0));
14828 ASSERT_FALSE (real_zerop (i_1));
14829 ASSERT_FALSE (real_zerop (wr_i_1));
14830 ASSERT_FALSE (real_zerop (i_m1));
14831 ASSERT_FALSE (real_zerop (wr_i_m1));
14832 ASSERT_TRUE (real_zerop (f_0));
14833 ASSERT_TRUE (real_zerop (wr_f_0));
14834 ASSERT_FALSE (real_zerop (f_1));
14835 ASSERT_FALSE (real_zerop (wr_f_1));
14836 ASSERT_FALSE (real_zerop (f_m1));
14837 ASSERT_FALSE (real_zerop (wr_f_m1));
14838 ASSERT_FALSE (real_zerop (c_i_0));
14839 ASSERT_FALSE (real_zerop (c_i_1));
14840 ASSERT_FALSE (real_zerop (c_i_m1));
14841 ASSERT_TRUE (real_zerop (c_f_0));
14842 ASSERT_FALSE (real_zerop (c_f_1));
14843 ASSERT_FALSE (real_zerop (c_f_m1));
14844
14845 /* Test real_onep. */
14846 ASSERT_FALSE (real_onep (i_0));
14847 ASSERT_FALSE (real_onep (wr_i_0));
14848 ASSERT_FALSE (real_onep (i_1));
14849 ASSERT_FALSE (real_onep (wr_i_1));
14850 ASSERT_FALSE (real_onep (i_m1));
14851 ASSERT_FALSE (real_onep (wr_i_m1));
14852 ASSERT_FALSE (real_onep (f_0));
14853 ASSERT_FALSE (real_onep (wr_f_0));
14854 ASSERT_TRUE (real_onep (f_1));
14855 ASSERT_TRUE (real_onep (wr_f_1));
14856 ASSERT_FALSE (real_onep (f_m1));
14857 ASSERT_FALSE (real_onep (wr_f_m1));
14858 ASSERT_FALSE (real_onep (c_i_0));
14859 ASSERT_FALSE (real_onep (c_i_1));
14860 ASSERT_FALSE (real_onep (c_i_m1));
14861 ASSERT_FALSE (real_onep (c_f_0));
14862 ASSERT_TRUE (real_onep (c_f_1));
14863 ASSERT_FALSE (real_onep (c_f_m1));
14864
14865 /* Test real_minus_onep. */
14866 ASSERT_FALSE (real_minus_onep (i_0));
14867 ASSERT_FALSE (real_minus_onep (wr_i_0));
14868 ASSERT_FALSE (real_minus_onep (i_1));
14869 ASSERT_FALSE (real_minus_onep (wr_i_1));
14870 ASSERT_FALSE (real_minus_onep (i_m1));
14871 ASSERT_FALSE (real_minus_onep (wr_i_m1));
14872 ASSERT_FALSE (real_minus_onep (f_0));
14873 ASSERT_FALSE (real_minus_onep (wr_f_0));
14874 ASSERT_FALSE (real_minus_onep (f_1));
14875 ASSERT_FALSE (real_minus_onep (wr_f_1));
14876 ASSERT_TRUE (real_minus_onep (f_m1));
14877 ASSERT_TRUE (real_minus_onep (wr_f_m1));
14878 ASSERT_FALSE (real_minus_onep (c_i_0));
14879 ASSERT_FALSE (real_minus_onep (c_i_1));
14880 ASSERT_FALSE (real_minus_onep (c_i_m1));
14881 ASSERT_FALSE (real_minus_onep (c_f_0));
14882 ASSERT_FALSE (real_minus_onep (c_f_1));
14883 ASSERT_TRUE (real_minus_onep (c_f_m1));
14884
14885 /* Test zerop. */
14886 ASSERT_TRUE (zerop (i_0));
14887 ASSERT_TRUE (zerop (wr_i_0));
14888 ASSERT_FALSE (zerop (i_1));
14889 ASSERT_FALSE (zerop (wr_i_1));
14890 ASSERT_FALSE (zerop (i_m1));
14891 ASSERT_FALSE (zerop (wr_i_m1));
14892 ASSERT_TRUE (zerop (f_0));
14893 ASSERT_TRUE (zerop (wr_f_0));
14894 ASSERT_FALSE (zerop (f_1));
14895 ASSERT_FALSE (zerop (wr_f_1));
14896 ASSERT_FALSE (zerop (f_m1));
14897 ASSERT_FALSE (zerop (wr_f_m1));
14898 ASSERT_TRUE (zerop (c_i_0));
14899 ASSERT_FALSE (zerop (c_i_1));
14900 ASSERT_FALSE (zerop (c_i_m1));
14901 ASSERT_TRUE (zerop (c_f_0));
14902 ASSERT_FALSE (zerop (c_f_1));
14903 ASSERT_FALSE (zerop (c_f_m1));
14904
14905 /* Test tree_expr_nonnegative_p. */
14906 ASSERT_TRUE (tree_expr_nonnegative_p (i_0));
14907 ASSERT_TRUE (tree_expr_nonnegative_p (wr_i_0));
14908 ASSERT_TRUE (tree_expr_nonnegative_p (i_1));
14909 ASSERT_TRUE (tree_expr_nonnegative_p (wr_i_1));
14910 ASSERT_FALSE (tree_expr_nonnegative_p (i_m1));
14911 ASSERT_FALSE (tree_expr_nonnegative_p (wr_i_m1));
14912 ASSERT_TRUE (tree_expr_nonnegative_p (f_0));
14913 ASSERT_TRUE (tree_expr_nonnegative_p (wr_f_0));
14914 ASSERT_TRUE (tree_expr_nonnegative_p (f_1));
14915 ASSERT_TRUE (tree_expr_nonnegative_p (wr_f_1));
14916 ASSERT_FALSE (tree_expr_nonnegative_p (f_m1));
14917 ASSERT_FALSE (tree_expr_nonnegative_p (wr_f_m1));
14918 ASSERT_FALSE (tree_expr_nonnegative_p (c_i_0));
14919 ASSERT_FALSE (tree_expr_nonnegative_p (c_i_1));
14920 ASSERT_FALSE (tree_expr_nonnegative_p (c_i_m1));
14921 ASSERT_FALSE (tree_expr_nonnegative_p (c_f_0));
14922 ASSERT_FALSE (tree_expr_nonnegative_p (c_f_1));
14923 ASSERT_FALSE (tree_expr_nonnegative_p (c_f_m1));
14924
14925 /* Test tree_expr_nonzero_p. */
14926 ASSERT_FALSE (tree_expr_nonzero_p (i_0));
14927 ASSERT_FALSE (tree_expr_nonzero_p (wr_i_0));
14928 ASSERT_TRUE (tree_expr_nonzero_p (i_1));
14929 ASSERT_TRUE (tree_expr_nonzero_p (wr_i_1));
14930 ASSERT_TRUE (tree_expr_nonzero_p (i_m1));
14931 ASSERT_TRUE (tree_expr_nonzero_p (wr_i_m1));
14932
14933 /* Test integer_valued_real_p. */
14934 ASSERT_FALSE (integer_valued_real_p (i_0));
14935 ASSERT_TRUE (integer_valued_real_p (f_0));
14936 ASSERT_TRUE (integer_valued_real_p (wr_f_0));
14937 ASSERT_TRUE (integer_valued_real_p (f_1));
14938 ASSERT_TRUE (integer_valued_real_p (wr_f_1));
14939
14940 /* Test integer_pow2p. */
14941 ASSERT_FALSE (integer_pow2p (i_0));
14942 ASSERT_TRUE (integer_pow2p (i_1));
14943 ASSERT_TRUE (integer_pow2p (wr_i_1));
14944
14945 /* Test uniform_integer_cst_p. */
14946 ASSERT_TRUE (uniform_integer_cst_p (i_0));
14947 ASSERT_TRUE (uniform_integer_cst_p (wr_i_0));
14948 ASSERT_TRUE (uniform_integer_cst_p (i_1));
14949 ASSERT_TRUE (uniform_integer_cst_p (wr_i_1));
14950 ASSERT_TRUE (uniform_integer_cst_p (i_m1));
14951 ASSERT_TRUE (uniform_integer_cst_p (wr_i_m1));
14952 ASSERT_FALSE (uniform_integer_cst_p (f_0));
14953 ASSERT_FALSE (uniform_integer_cst_p (wr_f_0));
14954 ASSERT_FALSE (uniform_integer_cst_p (f_1));
14955 ASSERT_FALSE (uniform_integer_cst_p (wr_f_1));
14956 ASSERT_FALSE (uniform_integer_cst_p (f_m1));
14957 ASSERT_FALSE (uniform_integer_cst_p (wr_f_m1));
14958 ASSERT_FALSE (uniform_integer_cst_p (c_i_0));
14959 ASSERT_FALSE (uniform_integer_cst_p (c_i_1));
14960 ASSERT_FALSE (uniform_integer_cst_p (c_i_m1));
14961 ASSERT_FALSE (uniform_integer_cst_p (c_f_0));
14962 ASSERT_FALSE (uniform_integer_cst_p (c_f_1));
14963 ASSERT_FALSE (uniform_integer_cst_p (c_f_m1));
14964 }
14965
14966 /* Check that string escaping works correctly. */
14967
14968 static void
14969 test_escaped_strings (void)
14970 {
14971 int saved_cutoff;
14972 escaped_string msg;
14973
14974 msg.escape (NULL);
14975 /* ASSERT_STREQ does not accept NULL as a valid test
14976 result, so we have to use ASSERT_EQ instead. */
14977 ASSERT_EQ (NULL, (const char *) msg);
14978
14979 msg.escape ("");
14980 ASSERT_STREQ ("", (const char *) msg);
14981
14982 msg.escape ("foobar");
14983 ASSERT_STREQ ("foobar", (const char *) msg);
14984
14985 /* Ensure that we have -fmessage-length set to 0. */
14986 saved_cutoff = pp_line_cutoff (global_dc->printer);
14987 pp_line_cutoff (global_dc->printer) = 0;
14988
14989 msg.escape ("foo\nbar");
14990 ASSERT_STREQ ("foo\\nbar", (const char *) msg);
14991
14992 msg.escape ("\a\b\f\n\r\t\v");
14993 ASSERT_STREQ ("\\a\\b\\f\\n\\r\\t\\v", (const char *) msg);
14994
14995 /* Now repeat the tests with -fmessage-length set to 5. */
14996 pp_line_cutoff (global_dc->printer) = 5;
14997
14998 /* Note that the newline is not translated into an escape. */
14999 msg.escape ("foo\nbar");
15000 ASSERT_STREQ ("foo\nbar", (const char *) msg);
15001
15002 msg.escape ("\a\b\f\n\r\t\v");
15003 ASSERT_STREQ ("\\a\\b\\f\n\\r\\t\\v", (const char *) msg);
15004
15005 /* Restore the original message length setting. */
15006 pp_line_cutoff (global_dc->printer) = saved_cutoff;
15007 }
15008
15009 /* Run all of the selftests within this file. */
15010
15011 void
15012 tree_c_tests ()
15013 {
15014 test_integer_constants ();
15015 test_identifiers ();
15016 test_labels ();
15017 test_vector_cst_patterns ();
15018 test_location_wrappers ();
15019 test_predicates ();
15020 test_escaped_strings ();
15021 }
15022
15023 } // namespace selftest
15024
15025 #endif /* CHECKING_P */
15026
15027 #include "gt-tree.h"