]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree.c
c621f870880cb19748b1f04bab016a1f097e5987
[thirdparty/gcc.git] / gcc / tree.c
1 /* Language-independent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987-2021 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* This file contains the low level primitives for operating on tree nodes,
21 including allocation, list operations, interning of identifiers,
22 construction of data type nodes and statement nodes,
23 and construction of type conversion nodes. It also contains
24 tables index by tree code that describe how to take apart
25 nodes of that code.
26
27 It is intended to be language-independent but can occasionally
28 calls language-dependent routines. */
29
30 #include "config.h"
31 #include "system.h"
32 #include "coretypes.h"
33 #include "backend.h"
34 #include "target.h"
35 #include "tree.h"
36 #include "gimple.h"
37 #include "tree-pass.h"
38 #include "ssa.h"
39 #include "cgraph.h"
40 #include "diagnostic.h"
41 #include "flags.h"
42 #include "alias.h"
43 #include "fold-const.h"
44 #include "stor-layout.h"
45 #include "calls.h"
46 #include "attribs.h"
47 #include "toplev.h" /* get_random_seed */
48 #include "output.h"
49 #include "common/common-target.h"
50 #include "langhooks.h"
51 #include "tree-inline.h"
52 #include "tree-iterator.h"
53 #include "internal-fn.h"
54 #include "gimple-iterator.h"
55 #include "gimplify.h"
56 #include "tree-dfa.h"
57 #include "langhooks-def.h"
58 #include "tree-diagnostic.h"
59 #include "except.h"
60 #include "builtins.h"
61 #include "print-tree.h"
62 #include "ipa-utils.h"
63 #include "selftest.h"
64 #include "stringpool.h"
65 #include "attribs.h"
66 #include "rtl.h"
67 #include "regs.h"
68 #include "tree-vector-builder.h"
69 #include "gimple-fold.h"
70 #include "escaped_string.h"
71 #include "gimple-range.h"
72
73 /* Tree code classes. */
74
75 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) TYPE,
76 #define END_OF_BASE_TREE_CODES tcc_exceptional,
77
78 const enum tree_code_class tree_code_type[] = {
79 #include "all-tree.def"
80 };
81
82 #undef DEFTREECODE
83 #undef END_OF_BASE_TREE_CODES
84
85 /* Table indexed by tree code giving number of expression
86 operands beyond the fixed part of the node structure.
87 Not used for types or decls. */
88
89 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) LENGTH,
90 #define END_OF_BASE_TREE_CODES 0,
91
92 const unsigned char tree_code_length[] = {
93 #include "all-tree.def"
94 };
95
96 #undef DEFTREECODE
97 #undef END_OF_BASE_TREE_CODES
98
99 /* Names of tree components.
100 Used for printing out the tree and error messages. */
101 #define DEFTREECODE(SYM, NAME, TYPE, LEN) NAME,
102 #define END_OF_BASE_TREE_CODES "@dummy",
103
104 static const char *const tree_code_name[] = {
105 #include "all-tree.def"
106 };
107
108 #undef DEFTREECODE
109 #undef END_OF_BASE_TREE_CODES
110
111 /* Each tree code class has an associated string representation.
112 These must correspond to the tree_code_class entries. */
113
114 const char *const tree_code_class_strings[] =
115 {
116 "exceptional",
117 "constant",
118 "type",
119 "declaration",
120 "reference",
121 "comparison",
122 "unary",
123 "binary",
124 "statement",
125 "vl_exp",
126 "expression"
127 };
128
129 /* obstack.[ch] explicitly declined to prototype this. */
130 extern int _obstack_allocated_p (struct obstack *h, void *obj);
131
132 /* Statistics-gathering stuff. */
133
134 static uint64_t tree_code_counts[MAX_TREE_CODES];
135 uint64_t tree_node_counts[(int) all_kinds];
136 uint64_t tree_node_sizes[(int) all_kinds];
137
138 /* Keep in sync with tree.h:enum tree_node_kind. */
139 static const char * const tree_node_kind_names[] = {
140 "decls",
141 "types",
142 "blocks",
143 "stmts",
144 "refs",
145 "exprs",
146 "constants",
147 "identifiers",
148 "vecs",
149 "binfos",
150 "ssa names",
151 "constructors",
152 "random kinds",
153 "lang_decl kinds",
154 "lang_type kinds",
155 "omp clauses",
156 };
157
158 /* Unique id for next decl created. */
159 static GTY(()) int next_decl_uid;
160 /* Unique id for next type created. */
161 static GTY(()) unsigned next_type_uid = 1;
162 /* Unique id for next debug decl created. Use negative numbers,
163 to catch erroneous uses. */
164 static GTY(()) int next_debug_decl_uid;
165
166 /* Since we cannot rehash a type after it is in the table, we have to
167 keep the hash code. */
168
169 struct GTY((for_user)) type_hash {
170 unsigned long hash;
171 tree type;
172 };
173
174 /* Initial size of the hash table (rounded to next prime). */
175 #define TYPE_HASH_INITIAL_SIZE 1000
176
177 struct type_cache_hasher : ggc_cache_ptr_hash<type_hash>
178 {
179 static hashval_t hash (type_hash *t) { return t->hash; }
180 static bool equal (type_hash *a, type_hash *b);
181
182 static int
183 keep_cache_entry (type_hash *&t)
184 {
185 return ggc_marked_p (t->type);
186 }
187 };
188
189 /* Now here is the hash table. When recording a type, it is added to
190 the slot whose index is the hash code. Note that the hash table is
191 used for several kinds of types (function types, array types and
192 array index range types, for now). While all these live in the
193 same table, they are completely independent, and the hash code is
194 computed differently for each of these. */
195
196 static GTY ((cache)) hash_table<type_cache_hasher> *type_hash_table;
197
198 /* Hash table and temporary node for larger integer const values. */
199 static GTY (()) tree int_cst_node;
200
201 struct int_cst_hasher : ggc_cache_ptr_hash<tree_node>
202 {
203 static hashval_t hash (tree t);
204 static bool equal (tree x, tree y);
205 };
206
207 static GTY ((cache)) hash_table<int_cst_hasher> *int_cst_hash_table;
208
209 /* Class and variable for making sure that there is a single POLY_INT_CST
210 for a given value. */
211 struct poly_int_cst_hasher : ggc_cache_ptr_hash<tree_node>
212 {
213 typedef std::pair<tree, const poly_wide_int *> compare_type;
214 static hashval_t hash (tree t);
215 static bool equal (tree x, const compare_type &y);
216 };
217
218 static GTY ((cache)) hash_table<poly_int_cst_hasher> *poly_int_cst_hash_table;
219
220 /* Hash table for optimization flags and target option flags. Use the same
221 hash table for both sets of options. Nodes for building the current
222 optimization and target option nodes. The assumption is most of the time
223 the options created will already be in the hash table, so we avoid
224 allocating and freeing up a node repeatably. */
225 static GTY (()) tree cl_optimization_node;
226 static GTY (()) tree cl_target_option_node;
227
228 struct cl_option_hasher : ggc_cache_ptr_hash<tree_node>
229 {
230 static hashval_t hash (tree t);
231 static bool equal (tree x, tree y);
232 };
233
234 static GTY ((cache)) hash_table<cl_option_hasher> *cl_option_hash_table;
235
236 /* General tree->tree mapping structure for use in hash tables. */
237
238
239 static GTY ((cache))
240 hash_table<tree_decl_map_cache_hasher> *debug_expr_for_decl;
241
242 static GTY ((cache))
243 hash_table<tree_decl_map_cache_hasher> *value_expr_for_decl;
244
245 struct tree_vec_map_cache_hasher : ggc_cache_ptr_hash<tree_vec_map>
246 {
247 static hashval_t hash (tree_vec_map *m) { return DECL_UID (m->base.from); }
248
249 static bool
250 equal (tree_vec_map *a, tree_vec_map *b)
251 {
252 return a->base.from == b->base.from;
253 }
254
255 static int
256 keep_cache_entry (tree_vec_map *&m)
257 {
258 return ggc_marked_p (m->base.from);
259 }
260 };
261
262 static GTY ((cache))
263 hash_table<tree_vec_map_cache_hasher> *debug_args_for_decl;
264
265 static void set_type_quals (tree, int);
266 static void print_type_hash_statistics (void);
267 static void print_debug_expr_statistics (void);
268 static void print_value_expr_statistics (void);
269
270 tree global_trees[TI_MAX];
271 tree integer_types[itk_none];
272
273 bool int_n_enabled_p[NUM_INT_N_ENTS];
274 struct int_n_trees_t int_n_trees [NUM_INT_N_ENTS];
275
276 bool tree_contains_struct[MAX_TREE_CODES][64];
277
278 /* Number of operands for each OpenMP clause. */
279 unsigned const char omp_clause_num_ops[] =
280 {
281 0, /* OMP_CLAUSE_ERROR */
282 1, /* OMP_CLAUSE_PRIVATE */
283 1, /* OMP_CLAUSE_SHARED */
284 1, /* OMP_CLAUSE_FIRSTPRIVATE */
285 2, /* OMP_CLAUSE_LASTPRIVATE */
286 5, /* OMP_CLAUSE_REDUCTION */
287 5, /* OMP_CLAUSE_TASK_REDUCTION */
288 5, /* OMP_CLAUSE_IN_REDUCTION */
289 1, /* OMP_CLAUSE_COPYIN */
290 1, /* OMP_CLAUSE_COPYPRIVATE */
291 3, /* OMP_CLAUSE_LINEAR */
292 1, /* OMP_CLAUSE_AFFINITY */
293 2, /* OMP_CLAUSE_ALIGNED */
294 2, /* OMP_CLAUSE_ALLOCATE */
295 1, /* OMP_CLAUSE_DEPEND */
296 1, /* OMP_CLAUSE_NONTEMPORAL */
297 1, /* OMP_CLAUSE_UNIFORM */
298 1, /* OMP_CLAUSE_TO_DECLARE */
299 1, /* OMP_CLAUSE_LINK */
300 1, /* OMP_CLAUSE_DETACH */
301 1, /* OMP_CLAUSE_USE_DEVICE_PTR */
302 1, /* OMP_CLAUSE_USE_DEVICE_ADDR */
303 1, /* OMP_CLAUSE_IS_DEVICE_PTR */
304 1, /* OMP_CLAUSE_INCLUSIVE */
305 1, /* OMP_CLAUSE_EXCLUSIVE */
306 2, /* OMP_CLAUSE_FROM */
307 2, /* OMP_CLAUSE_TO */
308 2, /* OMP_CLAUSE_MAP */
309 2, /* OMP_CLAUSE__CACHE_ */
310 2, /* OMP_CLAUSE_GANG */
311 1, /* OMP_CLAUSE_ASYNC */
312 1, /* OMP_CLAUSE_WAIT */
313 0, /* OMP_CLAUSE_AUTO */
314 0, /* OMP_CLAUSE_SEQ */
315 1, /* OMP_CLAUSE__LOOPTEMP_ */
316 1, /* OMP_CLAUSE__REDUCTEMP_ */
317 1, /* OMP_CLAUSE__CONDTEMP_ */
318 1, /* OMP_CLAUSE__SCANTEMP_ */
319 1, /* OMP_CLAUSE_IF */
320 1, /* OMP_CLAUSE_NUM_THREADS */
321 1, /* OMP_CLAUSE_SCHEDULE */
322 0, /* OMP_CLAUSE_NOWAIT */
323 1, /* OMP_CLAUSE_ORDERED */
324 0, /* OMP_CLAUSE_DEFAULT */
325 3, /* OMP_CLAUSE_COLLAPSE */
326 0, /* OMP_CLAUSE_UNTIED */
327 1, /* OMP_CLAUSE_FINAL */
328 0, /* OMP_CLAUSE_MERGEABLE */
329 1, /* OMP_CLAUSE_DEVICE */
330 1, /* OMP_CLAUSE_DIST_SCHEDULE */
331 0, /* OMP_CLAUSE_INBRANCH */
332 0, /* OMP_CLAUSE_NOTINBRANCH */
333 1, /* OMP_CLAUSE_NUM_TEAMS */
334 1, /* OMP_CLAUSE_THREAD_LIMIT */
335 0, /* OMP_CLAUSE_PROC_BIND */
336 1, /* OMP_CLAUSE_SAFELEN */
337 1, /* OMP_CLAUSE_SIMDLEN */
338 0, /* OMP_CLAUSE_DEVICE_TYPE */
339 0, /* OMP_CLAUSE_FOR */
340 0, /* OMP_CLAUSE_PARALLEL */
341 0, /* OMP_CLAUSE_SECTIONS */
342 0, /* OMP_CLAUSE_TASKGROUP */
343 1, /* OMP_CLAUSE_PRIORITY */
344 1, /* OMP_CLAUSE_GRAINSIZE */
345 1, /* OMP_CLAUSE_NUM_TASKS */
346 0, /* OMP_CLAUSE_NOGROUP */
347 0, /* OMP_CLAUSE_THREADS */
348 0, /* OMP_CLAUSE_SIMD */
349 1, /* OMP_CLAUSE_HINT */
350 0, /* OMP_CLAUSE_DEFAULTMAP */
351 0, /* OMP_CLAUSE_ORDER */
352 0, /* OMP_CLAUSE_BIND */
353 1, /* OMP_CLAUSE__SIMDUID_ */
354 0, /* OMP_CLAUSE__SIMT_ */
355 0, /* OMP_CLAUSE_INDEPENDENT */
356 1, /* OMP_CLAUSE_WORKER */
357 1, /* OMP_CLAUSE_VECTOR */
358 1, /* OMP_CLAUSE_NUM_GANGS */
359 1, /* OMP_CLAUSE_NUM_WORKERS */
360 1, /* OMP_CLAUSE_VECTOR_LENGTH */
361 3, /* OMP_CLAUSE_TILE */
362 0, /* OMP_CLAUSE_IF_PRESENT */
363 0, /* OMP_CLAUSE_FINALIZE */
364 0, /* OMP_CLAUSE_NOHOST */
365 };
366
367 const char * const omp_clause_code_name[] =
368 {
369 "error_clause",
370 "private",
371 "shared",
372 "firstprivate",
373 "lastprivate",
374 "reduction",
375 "task_reduction",
376 "in_reduction",
377 "copyin",
378 "copyprivate",
379 "linear",
380 "affinity",
381 "aligned",
382 "allocate",
383 "depend",
384 "nontemporal",
385 "uniform",
386 "to",
387 "link",
388 "detach",
389 "use_device_ptr",
390 "use_device_addr",
391 "is_device_ptr",
392 "inclusive",
393 "exclusive",
394 "from",
395 "to",
396 "map",
397 "_cache_",
398 "gang",
399 "async",
400 "wait",
401 "auto",
402 "seq",
403 "_looptemp_",
404 "_reductemp_",
405 "_condtemp_",
406 "_scantemp_",
407 "if",
408 "num_threads",
409 "schedule",
410 "nowait",
411 "ordered",
412 "default",
413 "collapse",
414 "untied",
415 "final",
416 "mergeable",
417 "device",
418 "dist_schedule",
419 "inbranch",
420 "notinbranch",
421 "num_teams",
422 "thread_limit",
423 "proc_bind",
424 "safelen",
425 "simdlen",
426 "device_type",
427 "for",
428 "parallel",
429 "sections",
430 "taskgroup",
431 "priority",
432 "grainsize",
433 "num_tasks",
434 "nogroup",
435 "threads",
436 "simd",
437 "hint",
438 "defaultmap",
439 "order",
440 "bind",
441 "_simduid_",
442 "_simt_",
443 "independent",
444 "worker",
445 "vector",
446 "num_gangs",
447 "num_workers",
448 "vector_length",
449 "tile",
450 "if_present",
451 "finalize",
452 "nohost",
453 };
454
455
456 /* Return the tree node structure used by tree code CODE. */
457
458 static inline enum tree_node_structure_enum
459 tree_node_structure_for_code (enum tree_code code)
460 {
461 switch (TREE_CODE_CLASS (code))
462 {
463 case tcc_declaration:
464 switch (code)
465 {
466 case CONST_DECL: return TS_CONST_DECL;
467 case DEBUG_EXPR_DECL: return TS_DECL_WRTL;
468 case FIELD_DECL: return TS_FIELD_DECL;
469 case FUNCTION_DECL: return TS_FUNCTION_DECL;
470 case LABEL_DECL: return TS_LABEL_DECL;
471 case PARM_DECL: return TS_PARM_DECL;
472 case RESULT_DECL: return TS_RESULT_DECL;
473 case TRANSLATION_UNIT_DECL: return TS_TRANSLATION_UNIT_DECL;
474 case TYPE_DECL: return TS_TYPE_DECL;
475 case VAR_DECL: return TS_VAR_DECL;
476 default: return TS_DECL_NON_COMMON;
477 }
478
479 case tcc_type: return TS_TYPE_NON_COMMON;
480
481 case tcc_binary:
482 case tcc_comparison:
483 case tcc_expression:
484 case tcc_reference:
485 case tcc_statement:
486 case tcc_unary:
487 case tcc_vl_exp: return TS_EXP;
488
489 default: /* tcc_constant and tcc_exceptional */
490 break;
491 }
492
493 switch (code)
494 {
495 /* tcc_constant cases. */
496 case COMPLEX_CST: return TS_COMPLEX;
497 case FIXED_CST: return TS_FIXED_CST;
498 case INTEGER_CST: return TS_INT_CST;
499 case POLY_INT_CST: return TS_POLY_INT_CST;
500 case REAL_CST: return TS_REAL_CST;
501 case STRING_CST: return TS_STRING;
502 case VECTOR_CST: return TS_VECTOR;
503 case VOID_CST: return TS_TYPED;
504
505 /* tcc_exceptional cases. */
506 case BLOCK: return TS_BLOCK;
507 case CONSTRUCTOR: return TS_CONSTRUCTOR;
508 case ERROR_MARK: return TS_COMMON;
509 case IDENTIFIER_NODE: return TS_IDENTIFIER;
510 case OMP_CLAUSE: return TS_OMP_CLAUSE;
511 case OPTIMIZATION_NODE: return TS_OPTIMIZATION;
512 case PLACEHOLDER_EXPR: return TS_COMMON;
513 case SSA_NAME: return TS_SSA_NAME;
514 case STATEMENT_LIST: return TS_STATEMENT_LIST;
515 case TARGET_OPTION_NODE: return TS_TARGET_OPTION;
516 case TREE_BINFO: return TS_BINFO;
517 case TREE_LIST: return TS_LIST;
518 case TREE_VEC: return TS_VEC;
519
520 default:
521 gcc_unreachable ();
522 }
523 }
524
525
526 /* Initialize tree_contains_struct to describe the hierarchy of tree
527 nodes. */
528
529 static void
530 initialize_tree_contains_struct (void)
531 {
532 unsigned i;
533
534 for (i = ERROR_MARK; i < LAST_AND_UNUSED_TREE_CODE; i++)
535 {
536 enum tree_code code;
537 enum tree_node_structure_enum ts_code;
538
539 code = (enum tree_code) i;
540 ts_code = tree_node_structure_for_code (code);
541
542 /* Mark the TS structure itself. */
543 tree_contains_struct[code][ts_code] = 1;
544
545 /* Mark all the structures that TS is derived from. */
546 switch (ts_code)
547 {
548 case TS_TYPED:
549 case TS_BLOCK:
550 case TS_OPTIMIZATION:
551 case TS_TARGET_OPTION:
552 MARK_TS_BASE (code);
553 break;
554
555 case TS_COMMON:
556 case TS_INT_CST:
557 case TS_POLY_INT_CST:
558 case TS_REAL_CST:
559 case TS_FIXED_CST:
560 case TS_VECTOR:
561 case TS_STRING:
562 case TS_COMPLEX:
563 case TS_SSA_NAME:
564 case TS_CONSTRUCTOR:
565 case TS_EXP:
566 case TS_STATEMENT_LIST:
567 MARK_TS_TYPED (code);
568 break;
569
570 case TS_IDENTIFIER:
571 case TS_DECL_MINIMAL:
572 case TS_TYPE_COMMON:
573 case TS_LIST:
574 case TS_VEC:
575 case TS_BINFO:
576 case TS_OMP_CLAUSE:
577 MARK_TS_COMMON (code);
578 break;
579
580 case TS_TYPE_WITH_LANG_SPECIFIC:
581 MARK_TS_TYPE_COMMON (code);
582 break;
583
584 case TS_TYPE_NON_COMMON:
585 MARK_TS_TYPE_WITH_LANG_SPECIFIC (code);
586 break;
587
588 case TS_DECL_COMMON:
589 MARK_TS_DECL_MINIMAL (code);
590 break;
591
592 case TS_DECL_WRTL:
593 case TS_CONST_DECL:
594 MARK_TS_DECL_COMMON (code);
595 break;
596
597 case TS_DECL_NON_COMMON:
598 MARK_TS_DECL_WITH_VIS (code);
599 break;
600
601 case TS_DECL_WITH_VIS:
602 case TS_PARM_DECL:
603 case TS_LABEL_DECL:
604 case TS_RESULT_DECL:
605 MARK_TS_DECL_WRTL (code);
606 break;
607
608 case TS_FIELD_DECL:
609 MARK_TS_DECL_COMMON (code);
610 break;
611
612 case TS_VAR_DECL:
613 MARK_TS_DECL_WITH_VIS (code);
614 break;
615
616 case TS_TYPE_DECL:
617 case TS_FUNCTION_DECL:
618 MARK_TS_DECL_NON_COMMON (code);
619 break;
620
621 case TS_TRANSLATION_UNIT_DECL:
622 MARK_TS_DECL_COMMON (code);
623 break;
624
625 default:
626 gcc_unreachable ();
627 }
628 }
629
630 /* Basic consistency checks for attributes used in fold. */
631 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_NON_COMMON]);
632 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_NON_COMMON]);
633 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_COMMON]);
634 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_COMMON]);
635 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_COMMON]);
636 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_COMMON]);
637 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_COMMON]);
638 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_COMMON]);
639 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_COMMON]);
640 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_COMMON]);
641 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_COMMON]);
642 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WRTL]);
643 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_WRTL]);
644 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_WRTL]);
645 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WRTL]);
646 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_WRTL]);
647 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_MINIMAL]);
648 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_MINIMAL]);
649 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_MINIMAL]);
650 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_MINIMAL]);
651 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_MINIMAL]);
652 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_MINIMAL]);
653 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_MINIMAL]);
654 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_MINIMAL]);
655 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_MINIMAL]);
656 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WITH_VIS]);
657 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WITH_VIS]);
658 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_WITH_VIS]);
659 gcc_assert (tree_contains_struct[VAR_DECL][TS_VAR_DECL]);
660 gcc_assert (tree_contains_struct[FIELD_DECL][TS_FIELD_DECL]);
661 gcc_assert (tree_contains_struct[PARM_DECL][TS_PARM_DECL]);
662 gcc_assert (tree_contains_struct[LABEL_DECL][TS_LABEL_DECL]);
663 gcc_assert (tree_contains_struct[RESULT_DECL][TS_RESULT_DECL]);
664 gcc_assert (tree_contains_struct[CONST_DECL][TS_CONST_DECL]);
665 gcc_assert (tree_contains_struct[TYPE_DECL][TS_TYPE_DECL]);
666 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_FUNCTION_DECL]);
667 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_MINIMAL]);
668 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_COMMON]);
669 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_MINIMAL]);
670 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_COMMON]);
671 }
672
673
674 /* Init tree.c. */
675
676 void
677 init_ttree (void)
678 {
679 /* Initialize the hash table of types. */
680 type_hash_table
681 = hash_table<type_cache_hasher>::create_ggc (TYPE_HASH_INITIAL_SIZE);
682
683 debug_expr_for_decl
684 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
685
686 value_expr_for_decl
687 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
688
689 int_cst_hash_table = hash_table<int_cst_hasher>::create_ggc (1024);
690
691 poly_int_cst_hash_table = hash_table<poly_int_cst_hasher>::create_ggc (64);
692
693 int_cst_node = make_int_cst (1, 1);
694
695 cl_option_hash_table = hash_table<cl_option_hasher>::create_ggc (64);
696
697 cl_optimization_node = make_node (OPTIMIZATION_NODE);
698 cl_target_option_node = make_node (TARGET_OPTION_NODE);
699
700 /* Initialize the tree_contains_struct array. */
701 initialize_tree_contains_struct ();
702 lang_hooks.init_ts ();
703 }
704
705 \f
706 /* The name of the object as the assembler will see it (but before any
707 translations made by ASM_OUTPUT_LABELREF). Often this is the same
708 as DECL_NAME. It is an IDENTIFIER_NODE. */
709 tree
710 decl_assembler_name (tree decl)
711 {
712 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
713 lang_hooks.set_decl_assembler_name (decl);
714 return DECL_ASSEMBLER_NAME_RAW (decl);
715 }
716
717 /* The DECL_ASSEMBLER_NAME_RAW of DECL is being explicitly set to NAME
718 (either of which may be NULL). Inform the FE, if this changes the
719 name. */
720
721 void
722 overwrite_decl_assembler_name (tree decl, tree name)
723 {
724 if (DECL_ASSEMBLER_NAME_RAW (decl) != name)
725 lang_hooks.overwrite_decl_assembler_name (decl, name);
726 }
727
728 /* Return true if DECL may need an assembler name to be set. */
729
730 static inline bool
731 need_assembler_name_p (tree decl)
732 {
733 /* We use DECL_ASSEMBLER_NAME to hold mangled type names for One Definition
734 Rule merging. This makes type_odr_p to return true on those types during
735 LTO and by comparing the mangled name, we can say what types are intended
736 to be equivalent across compilation unit.
737
738 We do not store names of type_in_anonymous_namespace_p.
739
740 Record, union and enumeration type have linkage that allows use
741 to check type_in_anonymous_namespace_p. We do not mangle compound types
742 that always can be compared structurally.
743
744 Similarly for builtin types, we compare properties of their main variant.
745 A special case are integer types where mangling do make differences
746 between char/signed char/unsigned char etc. Storing name for these makes
747 e.g. -fno-signed-char/-fsigned-char mismatches to be handled well.
748 See cp/mangle.c:write_builtin_type for details. */
749
750 if (TREE_CODE (decl) == TYPE_DECL)
751 {
752 if (DECL_NAME (decl)
753 && decl == TYPE_NAME (TREE_TYPE (decl))
754 && TYPE_MAIN_VARIANT (TREE_TYPE (decl)) == TREE_TYPE (decl)
755 && !TYPE_ARTIFICIAL (TREE_TYPE (decl))
756 && ((TREE_CODE (TREE_TYPE (decl)) != RECORD_TYPE
757 && TREE_CODE (TREE_TYPE (decl)) != UNION_TYPE)
758 || TYPE_CXX_ODR_P (TREE_TYPE (decl)))
759 && (type_with_linkage_p (TREE_TYPE (decl))
760 || TREE_CODE (TREE_TYPE (decl)) == INTEGER_TYPE)
761 && !variably_modified_type_p (TREE_TYPE (decl), NULL_TREE))
762 return !DECL_ASSEMBLER_NAME_SET_P (decl);
763 return false;
764 }
765 /* Only FUNCTION_DECLs and VAR_DECLs are considered. */
766 if (!VAR_OR_FUNCTION_DECL_P (decl))
767 return false;
768
769 /* If DECL already has its assembler name set, it does not need a
770 new one. */
771 if (!HAS_DECL_ASSEMBLER_NAME_P (decl)
772 || DECL_ASSEMBLER_NAME_SET_P (decl))
773 return false;
774
775 /* Abstract decls do not need an assembler name. */
776 if (DECL_ABSTRACT_P (decl))
777 return false;
778
779 /* For VAR_DECLs, only static, public and external symbols need an
780 assembler name. */
781 if (VAR_P (decl)
782 && !TREE_STATIC (decl)
783 && !TREE_PUBLIC (decl)
784 && !DECL_EXTERNAL (decl))
785 return false;
786
787 if (TREE_CODE (decl) == FUNCTION_DECL)
788 {
789 /* Do not set assembler name on builtins. Allow RTL expansion to
790 decide whether to expand inline or via a regular call. */
791 if (fndecl_built_in_p (decl)
792 && DECL_BUILT_IN_CLASS (decl) != BUILT_IN_FRONTEND)
793 return false;
794
795 /* Functions represented in the callgraph need an assembler name. */
796 if (cgraph_node::get (decl) != NULL)
797 return true;
798
799 /* Unused and not public functions don't need an assembler name. */
800 if (!TREE_USED (decl) && !TREE_PUBLIC (decl))
801 return false;
802 }
803
804 return true;
805 }
806
807 /* If T needs an assembler name, have one created for it. */
808
809 void
810 assign_assembler_name_if_needed (tree t)
811 {
812 if (need_assembler_name_p (t))
813 {
814 /* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit
815 diagnostics that use input_location to show locus
816 information. The problem here is that, at this point,
817 input_location is generally anchored to the end of the file
818 (since the parser is long gone), so we don't have a good
819 position to pin it to.
820
821 To alleviate this problem, this uses the location of T's
822 declaration. Examples of this are
823 testsuite/g++.dg/template/cond2.C and
824 testsuite/g++.dg/template/pr35240.C. */
825 location_t saved_location = input_location;
826 input_location = DECL_SOURCE_LOCATION (t);
827
828 decl_assembler_name (t);
829
830 input_location = saved_location;
831 }
832 }
833
834 /* When the target supports COMDAT groups, this indicates which group the
835 DECL is associated with. This can be either an IDENTIFIER_NODE or a
836 decl, in which case its DECL_ASSEMBLER_NAME identifies the group. */
837 tree
838 decl_comdat_group (const_tree node)
839 {
840 struct symtab_node *snode = symtab_node::get (node);
841 if (!snode)
842 return NULL;
843 return snode->get_comdat_group ();
844 }
845
846 /* Likewise, but make sure it's been reduced to an IDENTIFIER_NODE. */
847 tree
848 decl_comdat_group_id (const_tree node)
849 {
850 struct symtab_node *snode = symtab_node::get (node);
851 if (!snode)
852 return NULL;
853 return snode->get_comdat_group_id ();
854 }
855
856 /* When the target supports named section, return its name as IDENTIFIER_NODE
857 or NULL if it is in no section. */
858 const char *
859 decl_section_name (const_tree node)
860 {
861 struct symtab_node *snode = symtab_node::get (node);
862 if (!snode)
863 return NULL;
864 return snode->get_section ();
865 }
866
867 /* Set section name of NODE to VALUE (that is expected to be
868 identifier node) */
869 void
870 set_decl_section_name (tree node, const char *value)
871 {
872 struct symtab_node *snode;
873
874 if (value == NULL)
875 {
876 snode = symtab_node::get (node);
877 if (!snode)
878 return;
879 }
880 else if (VAR_P (node))
881 snode = varpool_node::get_create (node);
882 else
883 snode = cgraph_node::get_create (node);
884 snode->set_section (value);
885 }
886
887 /* Set section name of NODE to match the section name of OTHER.
888
889 set_decl_section_name (decl, other) is equivalent to
890 set_decl_section_name (decl, DECL_SECTION_NAME (other)), but possibly more
891 efficient. */
892 void
893 set_decl_section_name (tree decl, const_tree other)
894 {
895 struct symtab_node *other_node = symtab_node::get (other);
896 if (other_node)
897 {
898 struct symtab_node *decl_node;
899 if (VAR_P (decl))
900 decl_node = varpool_node::get_create (decl);
901 else
902 decl_node = cgraph_node::get_create (decl);
903 decl_node->set_section (*other_node);
904 }
905 else
906 {
907 struct symtab_node *decl_node = symtab_node::get (decl);
908 if (!decl_node)
909 return;
910 decl_node->set_section (NULL);
911 }
912 }
913
914 /* Return TLS model of a variable NODE. */
915 enum tls_model
916 decl_tls_model (const_tree node)
917 {
918 struct varpool_node *snode = varpool_node::get (node);
919 if (!snode)
920 return TLS_MODEL_NONE;
921 return snode->tls_model;
922 }
923
924 /* Set TLS model of variable NODE to MODEL. */
925 void
926 set_decl_tls_model (tree node, enum tls_model model)
927 {
928 struct varpool_node *vnode;
929
930 if (model == TLS_MODEL_NONE)
931 {
932 vnode = varpool_node::get (node);
933 if (!vnode)
934 return;
935 }
936 else
937 vnode = varpool_node::get_create (node);
938 vnode->tls_model = model;
939 }
940
941 /* Compute the number of bytes occupied by a tree with code CODE.
942 This function cannot be used for nodes that have variable sizes,
943 including TREE_VEC, INTEGER_CST, STRING_CST, and CALL_EXPR. */
944 size_t
945 tree_code_size (enum tree_code code)
946 {
947 switch (TREE_CODE_CLASS (code))
948 {
949 case tcc_declaration: /* A decl node */
950 switch (code)
951 {
952 case FIELD_DECL: return sizeof (tree_field_decl);
953 case PARM_DECL: return sizeof (tree_parm_decl);
954 case VAR_DECL: return sizeof (tree_var_decl);
955 case LABEL_DECL: return sizeof (tree_label_decl);
956 case RESULT_DECL: return sizeof (tree_result_decl);
957 case CONST_DECL: return sizeof (tree_const_decl);
958 case TYPE_DECL: return sizeof (tree_type_decl);
959 case FUNCTION_DECL: return sizeof (tree_function_decl);
960 case DEBUG_EXPR_DECL: return sizeof (tree_decl_with_rtl);
961 case TRANSLATION_UNIT_DECL: return sizeof (tree_translation_unit_decl);
962 case NAMESPACE_DECL:
963 case IMPORTED_DECL:
964 case NAMELIST_DECL: return sizeof (tree_decl_non_common);
965 default:
966 gcc_checking_assert (code >= NUM_TREE_CODES);
967 return lang_hooks.tree_size (code);
968 }
969
970 case tcc_type: /* a type node */
971 switch (code)
972 {
973 case OFFSET_TYPE:
974 case ENUMERAL_TYPE:
975 case BOOLEAN_TYPE:
976 case INTEGER_TYPE:
977 case REAL_TYPE:
978 case OPAQUE_TYPE:
979 case POINTER_TYPE:
980 case REFERENCE_TYPE:
981 case NULLPTR_TYPE:
982 case FIXED_POINT_TYPE:
983 case COMPLEX_TYPE:
984 case VECTOR_TYPE:
985 case ARRAY_TYPE:
986 case RECORD_TYPE:
987 case UNION_TYPE:
988 case QUAL_UNION_TYPE:
989 case VOID_TYPE:
990 case FUNCTION_TYPE:
991 case METHOD_TYPE:
992 case LANG_TYPE: return sizeof (tree_type_non_common);
993 default:
994 gcc_checking_assert (code >= NUM_TREE_CODES);
995 return lang_hooks.tree_size (code);
996 }
997
998 case tcc_reference: /* a reference */
999 case tcc_expression: /* an expression */
1000 case tcc_statement: /* an expression with side effects */
1001 case tcc_comparison: /* a comparison expression */
1002 case tcc_unary: /* a unary arithmetic expression */
1003 case tcc_binary: /* a binary arithmetic expression */
1004 return (sizeof (struct tree_exp)
1005 + (TREE_CODE_LENGTH (code) - 1) * sizeof (tree));
1006
1007 case tcc_constant: /* a constant */
1008 switch (code)
1009 {
1010 case VOID_CST: return sizeof (tree_typed);
1011 case INTEGER_CST: gcc_unreachable ();
1012 case POLY_INT_CST: return sizeof (tree_poly_int_cst);
1013 case REAL_CST: return sizeof (tree_real_cst);
1014 case FIXED_CST: return sizeof (tree_fixed_cst);
1015 case COMPLEX_CST: return sizeof (tree_complex);
1016 case VECTOR_CST: gcc_unreachable ();
1017 case STRING_CST: gcc_unreachable ();
1018 default:
1019 gcc_checking_assert (code >= NUM_TREE_CODES);
1020 return lang_hooks.tree_size (code);
1021 }
1022
1023 case tcc_exceptional: /* something random, like an identifier. */
1024 switch (code)
1025 {
1026 case IDENTIFIER_NODE: return lang_hooks.identifier_size;
1027 case TREE_LIST: return sizeof (tree_list);
1028
1029 case ERROR_MARK:
1030 case PLACEHOLDER_EXPR: return sizeof (tree_common);
1031
1032 case TREE_VEC: gcc_unreachable ();
1033 case OMP_CLAUSE: gcc_unreachable ();
1034
1035 case SSA_NAME: return sizeof (tree_ssa_name);
1036
1037 case STATEMENT_LIST: return sizeof (tree_statement_list);
1038 case BLOCK: return sizeof (struct tree_block);
1039 case CONSTRUCTOR: return sizeof (tree_constructor);
1040 case OPTIMIZATION_NODE: return sizeof (tree_optimization_option);
1041 case TARGET_OPTION_NODE: return sizeof (tree_target_option);
1042
1043 default:
1044 gcc_checking_assert (code >= NUM_TREE_CODES);
1045 return lang_hooks.tree_size (code);
1046 }
1047
1048 default:
1049 gcc_unreachable ();
1050 }
1051 }
1052
1053 /* Compute the number of bytes occupied by NODE. This routine only
1054 looks at TREE_CODE, except for those nodes that have variable sizes. */
1055 size_t
1056 tree_size (const_tree node)
1057 {
1058 const enum tree_code code = TREE_CODE (node);
1059 switch (code)
1060 {
1061 case INTEGER_CST:
1062 return (sizeof (struct tree_int_cst)
1063 + (TREE_INT_CST_EXT_NUNITS (node) - 1) * sizeof (HOST_WIDE_INT));
1064
1065 case TREE_BINFO:
1066 return (offsetof (struct tree_binfo, base_binfos)
1067 + vec<tree, va_gc>
1068 ::embedded_size (BINFO_N_BASE_BINFOS (node)));
1069
1070 case TREE_VEC:
1071 return (sizeof (struct tree_vec)
1072 + (TREE_VEC_LENGTH (node) - 1) * sizeof (tree));
1073
1074 case VECTOR_CST:
1075 return (sizeof (struct tree_vector)
1076 + (vector_cst_encoded_nelts (node) - 1) * sizeof (tree));
1077
1078 case STRING_CST:
1079 return TREE_STRING_LENGTH (node) + offsetof (struct tree_string, str) + 1;
1080
1081 case OMP_CLAUSE:
1082 return (sizeof (struct tree_omp_clause)
1083 + (omp_clause_num_ops[OMP_CLAUSE_CODE (node)] - 1)
1084 * sizeof (tree));
1085
1086 default:
1087 if (TREE_CODE_CLASS (code) == tcc_vl_exp)
1088 return (sizeof (struct tree_exp)
1089 + (VL_EXP_OPERAND_LENGTH (node) - 1) * sizeof (tree));
1090 else
1091 return tree_code_size (code);
1092 }
1093 }
1094
1095 /* Return tree node kind based on tree CODE. */
1096
1097 static tree_node_kind
1098 get_stats_node_kind (enum tree_code code)
1099 {
1100 enum tree_code_class type = TREE_CODE_CLASS (code);
1101
1102 switch (type)
1103 {
1104 case tcc_declaration: /* A decl node */
1105 return d_kind;
1106 case tcc_type: /* a type node */
1107 return t_kind;
1108 case tcc_statement: /* an expression with side effects */
1109 return s_kind;
1110 case tcc_reference: /* a reference */
1111 return r_kind;
1112 case tcc_expression: /* an expression */
1113 case tcc_comparison: /* a comparison expression */
1114 case tcc_unary: /* a unary arithmetic expression */
1115 case tcc_binary: /* a binary arithmetic expression */
1116 return e_kind;
1117 case tcc_constant: /* a constant */
1118 return c_kind;
1119 case tcc_exceptional: /* something random, like an identifier. */
1120 switch (code)
1121 {
1122 case IDENTIFIER_NODE:
1123 return id_kind;
1124 case TREE_VEC:
1125 return vec_kind;
1126 case TREE_BINFO:
1127 return binfo_kind;
1128 case SSA_NAME:
1129 return ssa_name_kind;
1130 case BLOCK:
1131 return b_kind;
1132 case CONSTRUCTOR:
1133 return constr_kind;
1134 case OMP_CLAUSE:
1135 return omp_clause_kind;
1136 default:
1137 return x_kind;
1138 }
1139 break;
1140 case tcc_vl_exp:
1141 return e_kind;
1142 default:
1143 gcc_unreachable ();
1144 }
1145 }
1146
1147 /* Record interesting allocation statistics for a tree node with CODE
1148 and LENGTH. */
1149
1150 static void
1151 record_node_allocation_statistics (enum tree_code code, size_t length)
1152 {
1153 if (!GATHER_STATISTICS)
1154 return;
1155
1156 tree_node_kind kind = get_stats_node_kind (code);
1157
1158 tree_code_counts[(int) code]++;
1159 tree_node_counts[(int) kind]++;
1160 tree_node_sizes[(int) kind] += length;
1161 }
1162
1163 /* Allocate and return a new UID from the DECL_UID namespace. */
1164
1165 int
1166 allocate_decl_uid (void)
1167 {
1168 return next_decl_uid++;
1169 }
1170
1171 /* Return a newly allocated node of code CODE. For decl and type
1172 nodes, some other fields are initialized. The rest of the node is
1173 initialized to zero. This function cannot be used for TREE_VEC,
1174 INTEGER_CST or OMP_CLAUSE nodes, which is enforced by asserts in
1175 tree_code_size.
1176
1177 Achoo! I got a code in the node. */
1178
1179 tree
1180 make_node (enum tree_code code MEM_STAT_DECL)
1181 {
1182 tree t;
1183 enum tree_code_class type = TREE_CODE_CLASS (code);
1184 size_t length = tree_code_size (code);
1185
1186 record_node_allocation_statistics (code, length);
1187
1188 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1189 TREE_SET_CODE (t, code);
1190
1191 switch (type)
1192 {
1193 case tcc_statement:
1194 if (code != DEBUG_BEGIN_STMT)
1195 TREE_SIDE_EFFECTS (t) = 1;
1196 break;
1197
1198 case tcc_declaration:
1199 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1200 {
1201 if (code == FUNCTION_DECL)
1202 {
1203 SET_DECL_ALIGN (t, FUNCTION_ALIGNMENT (FUNCTION_BOUNDARY));
1204 SET_DECL_MODE (t, FUNCTION_MODE);
1205 }
1206 else
1207 SET_DECL_ALIGN (t, 1);
1208 }
1209 DECL_SOURCE_LOCATION (t) = input_location;
1210 if (TREE_CODE (t) == DEBUG_EXPR_DECL)
1211 DECL_UID (t) = --next_debug_decl_uid;
1212 else
1213 {
1214 DECL_UID (t) = allocate_decl_uid ();
1215 SET_DECL_PT_UID (t, -1);
1216 }
1217 if (TREE_CODE (t) == LABEL_DECL)
1218 LABEL_DECL_UID (t) = -1;
1219
1220 break;
1221
1222 case tcc_type:
1223 TYPE_UID (t) = next_type_uid++;
1224 SET_TYPE_ALIGN (t, BITS_PER_UNIT);
1225 TYPE_USER_ALIGN (t) = 0;
1226 TYPE_MAIN_VARIANT (t) = t;
1227 TYPE_CANONICAL (t) = t;
1228
1229 /* Default to no attributes for type, but let target change that. */
1230 TYPE_ATTRIBUTES (t) = NULL_TREE;
1231 targetm.set_default_type_attributes (t);
1232
1233 /* We have not yet computed the alias set for this type. */
1234 TYPE_ALIAS_SET (t) = -1;
1235 break;
1236
1237 case tcc_constant:
1238 TREE_CONSTANT (t) = 1;
1239 break;
1240
1241 case tcc_expression:
1242 switch (code)
1243 {
1244 case INIT_EXPR:
1245 case MODIFY_EXPR:
1246 case VA_ARG_EXPR:
1247 case PREDECREMENT_EXPR:
1248 case PREINCREMENT_EXPR:
1249 case POSTDECREMENT_EXPR:
1250 case POSTINCREMENT_EXPR:
1251 /* All of these have side-effects, no matter what their
1252 operands are. */
1253 TREE_SIDE_EFFECTS (t) = 1;
1254 break;
1255
1256 default:
1257 break;
1258 }
1259 break;
1260
1261 case tcc_exceptional:
1262 switch (code)
1263 {
1264 case TARGET_OPTION_NODE:
1265 TREE_TARGET_OPTION(t)
1266 = ggc_cleared_alloc<struct cl_target_option> ();
1267 break;
1268
1269 case OPTIMIZATION_NODE:
1270 TREE_OPTIMIZATION (t)
1271 = ggc_cleared_alloc<struct cl_optimization> ();
1272 break;
1273
1274 default:
1275 break;
1276 }
1277 break;
1278
1279 default:
1280 /* Other classes need no special treatment. */
1281 break;
1282 }
1283
1284 return t;
1285 }
1286
1287 /* Free tree node. */
1288
1289 void
1290 free_node (tree node)
1291 {
1292 enum tree_code code = TREE_CODE (node);
1293 if (GATHER_STATISTICS)
1294 {
1295 enum tree_node_kind kind = get_stats_node_kind (code);
1296
1297 gcc_checking_assert (tree_code_counts[(int) TREE_CODE (node)] != 0);
1298 gcc_checking_assert (tree_node_counts[(int) kind] != 0);
1299 gcc_checking_assert (tree_node_sizes[(int) kind] >= tree_size (node));
1300
1301 tree_code_counts[(int) TREE_CODE (node)]--;
1302 tree_node_counts[(int) kind]--;
1303 tree_node_sizes[(int) kind] -= tree_size (node);
1304 }
1305 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1306 vec_free (CONSTRUCTOR_ELTS (node));
1307 else if (code == BLOCK)
1308 vec_free (BLOCK_NONLOCALIZED_VARS (node));
1309 else if (code == TREE_BINFO)
1310 vec_free (BINFO_BASE_ACCESSES (node));
1311 else if (code == OPTIMIZATION_NODE)
1312 cl_optimization_option_free (TREE_OPTIMIZATION (node));
1313 else if (code == TARGET_OPTION_NODE)
1314 cl_target_option_free (TREE_TARGET_OPTION (node));
1315 ggc_free (node);
1316 }
1317 \f
1318 /* Return a new node with the same contents as NODE except that its
1319 TREE_CHAIN, if it has one, is zero and it has a fresh uid. */
1320
1321 tree
1322 copy_node (tree node MEM_STAT_DECL)
1323 {
1324 tree t;
1325 enum tree_code code = TREE_CODE (node);
1326 size_t length;
1327
1328 gcc_assert (code != STATEMENT_LIST);
1329
1330 length = tree_size (node);
1331 record_node_allocation_statistics (code, length);
1332 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
1333 memcpy (t, node, length);
1334
1335 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
1336 TREE_CHAIN (t) = 0;
1337 TREE_ASM_WRITTEN (t) = 0;
1338 TREE_VISITED (t) = 0;
1339
1340 if (TREE_CODE_CLASS (code) == tcc_declaration)
1341 {
1342 if (code == DEBUG_EXPR_DECL)
1343 DECL_UID (t) = --next_debug_decl_uid;
1344 else
1345 {
1346 DECL_UID (t) = allocate_decl_uid ();
1347 if (DECL_PT_UID_SET_P (node))
1348 SET_DECL_PT_UID (t, DECL_PT_UID (node));
1349 }
1350 if ((TREE_CODE (node) == PARM_DECL || VAR_P (node))
1351 && DECL_HAS_VALUE_EXPR_P (node))
1352 {
1353 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (node));
1354 DECL_HAS_VALUE_EXPR_P (t) = 1;
1355 }
1356 /* DECL_DEBUG_EXPR is copied explicitly by callers. */
1357 if (VAR_P (node))
1358 {
1359 DECL_HAS_DEBUG_EXPR_P (t) = 0;
1360 t->decl_with_vis.symtab_node = NULL;
1361 }
1362 if (VAR_P (node) && DECL_HAS_INIT_PRIORITY_P (node))
1363 {
1364 SET_DECL_INIT_PRIORITY (t, DECL_INIT_PRIORITY (node));
1365 DECL_HAS_INIT_PRIORITY_P (t) = 1;
1366 }
1367 if (TREE_CODE (node) == FUNCTION_DECL)
1368 {
1369 DECL_STRUCT_FUNCTION (t) = NULL;
1370 t->decl_with_vis.symtab_node = NULL;
1371 }
1372 }
1373 else if (TREE_CODE_CLASS (code) == tcc_type)
1374 {
1375 TYPE_UID (t) = next_type_uid++;
1376 /* The following is so that the debug code for
1377 the copy is different from the original type.
1378 The two statements usually duplicate each other
1379 (because they clear fields of the same union),
1380 but the optimizer should catch that. */
1381 TYPE_SYMTAB_ADDRESS (t) = 0;
1382 TYPE_SYMTAB_DIE (t) = 0;
1383
1384 /* Do not copy the values cache. */
1385 if (TYPE_CACHED_VALUES_P (t))
1386 {
1387 TYPE_CACHED_VALUES_P (t) = 0;
1388 TYPE_CACHED_VALUES (t) = NULL_TREE;
1389 }
1390 }
1391 else if (code == TARGET_OPTION_NODE)
1392 {
1393 TREE_TARGET_OPTION (t) = ggc_alloc<struct cl_target_option>();
1394 memcpy (TREE_TARGET_OPTION (t), TREE_TARGET_OPTION (node),
1395 sizeof (struct cl_target_option));
1396 }
1397 else if (code == OPTIMIZATION_NODE)
1398 {
1399 TREE_OPTIMIZATION (t) = ggc_alloc<struct cl_optimization>();
1400 memcpy (TREE_OPTIMIZATION (t), TREE_OPTIMIZATION (node),
1401 sizeof (struct cl_optimization));
1402 }
1403
1404 return t;
1405 }
1406
1407 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
1408 For example, this can copy a list made of TREE_LIST nodes. */
1409
1410 tree
1411 copy_list (tree list)
1412 {
1413 tree head;
1414 tree prev, next;
1415
1416 if (list == 0)
1417 return 0;
1418
1419 head = prev = copy_node (list);
1420 next = TREE_CHAIN (list);
1421 while (next)
1422 {
1423 TREE_CHAIN (prev) = copy_node (next);
1424 prev = TREE_CHAIN (prev);
1425 next = TREE_CHAIN (next);
1426 }
1427 return head;
1428 }
1429
1430 \f
1431 /* Return the value that TREE_INT_CST_EXT_NUNITS should have for an
1432 INTEGER_CST with value CST and type TYPE. */
1433
1434 static unsigned int
1435 get_int_cst_ext_nunits (tree type, const wide_int &cst)
1436 {
1437 gcc_checking_assert (cst.get_precision () == TYPE_PRECISION (type));
1438 /* We need extra HWIs if CST is an unsigned integer with its
1439 upper bit set. */
1440 if (TYPE_UNSIGNED (type) && wi::neg_p (cst))
1441 return cst.get_precision () / HOST_BITS_PER_WIDE_INT + 1;
1442 return cst.get_len ();
1443 }
1444
1445 /* Return a new INTEGER_CST with value CST and type TYPE. */
1446
1447 static tree
1448 build_new_int_cst (tree type, const wide_int &cst)
1449 {
1450 unsigned int len = cst.get_len ();
1451 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1452 tree nt = make_int_cst (len, ext_len);
1453
1454 if (len < ext_len)
1455 {
1456 --ext_len;
1457 TREE_INT_CST_ELT (nt, ext_len)
1458 = zext_hwi (-1, cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1459 for (unsigned int i = len; i < ext_len; ++i)
1460 TREE_INT_CST_ELT (nt, i) = -1;
1461 }
1462 else if (TYPE_UNSIGNED (type)
1463 && cst.get_precision () < len * HOST_BITS_PER_WIDE_INT)
1464 {
1465 len--;
1466 TREE_INT_CST_ELT (nt, len)
1467 = zext_hwi (cst.elt (len),
1468 cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1469 }
1470
1471 for (unsigned int i = 0; i < len; i++)
1472 TREE_INT_CST_ELT (nt, i) = cst.elt (i);
1473 TREE_TYPE (nt) = type;
1474 return nt;
1475 }
1476
1477 /* Return a new POLY_INT_CST with coefficients COEFFS and type TYPE. */
1478
1479 static tree
1480 build_new_poly_int_cst (tree type, tree (&coeffs)[NUM_POLY_INT_COEFFS]
1481 CXX_MEM_STAT_INFO)
1482 {
1483 size_t length = sizeof (struct tree_poly_int_cst);
1484 record_node_allocation_statistics (POLY_INT_CST, length);
1485
1486 tree t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1487
1488 TREE_SET_CODE (t, POLY_INT_CST);
1489 TREE_CONSTANT (t) = 1;
1490 TREE_TYPE (t) = type;
1491 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1492 POLY_INT_CST_COEFF (t, i) = coeffs[i];
1493 return t;
1494 }
1495
1496 /* Create a constant tree that contains CST sign-extended to TYPE. */
1497
1498 tree
1499 build_int_cst (tree type, poly_int64 cst)
1500 {
1501 /* Support legacy code. */
1502 if (!type)
1503 type = integer_type_node;
1504
1505 return wide_int_to_tree (type, wi::shwi (cst, TYPE_PRECISION (type)));
1506 }
1507
1508 /* Create a constant tree that contains CST zero-extended to TYPE. */
1509
1510 tree
1511 build_int_cstu (tree type, poly_uint64 cst)
1512 {
1513 return wide_int_to_tree (type, wi::uhwi (cst, TYPE_PRECISION (type)));
1514 }
1515
1516 /* Create a constant tree that contains CST sign-extended to TYPE. */
1517
1518 tree
1519 build_int_cst_type (tree type, poly_int64 cst)
1520 {
1521 gcc_assert (type);
1522 return wide_int_to_tree (type, wi::shwi (cst, TYPE_PRECISION (type)));
1523 }
1524
1525 /* Constructs tree in type TYPE from with value given by CST. Signedness
1526 of CST is assumed to be the same as the signedness of TYPE. */
1527
1528 tree
1529 double_int_to_tree (tree type, double_int cst)
1530 {
1531 return wide_int_to_tree (type, widest_int::from (cst, TYPE_SIGN (type)));
1532 }
1533
1534 /* We force the wide_int CST to the range of the type TYPE by sign or
1535 zero extending it. OVERFLOWABLE indicates if we are interested in
1536 overflow of the value, when >0 we are only interested in signed
1537 overflow, for <0 we are interested in any overflow. OVERFLOWED
1538 indicates whether overflow has already occurred. CONST_OVERFLOWED
1539 indicates whether constant overflow has already occurred. We force
1540 T's value to be within range of T's type (by setting to 0 or 1 all
1541 the bits outside the type's range). We set TREE_OVERFLOWED if,
1542 OVERFLOWED is nonzero,
1543 or OVERFLOWABLE is >0 and signed overflow occurs
1544 or OVERFLOWABLE is <0 and any overflow occurs
1545 We return a new tree node for the extended wide_int. The node
1546 is shared if no overflow flags are set. */
1547
1548
1549 tree
1550 force_fit_type (tree type, const poly_wide_int_ref &cst,
1551 int overflowable, bool overflowed)
1552 {
1553 signop sign = TYPE_SIGN (type);
1554
1555 /* If we need to set overflow flags, return a new unshared node. */
1556 if (overflowed || !wi::fits_to_tree_p (cst, type))
1557 {
1558 if (overflowed
1559 || overflowable < 0
1560 || (overflowable > 0 && sign == SIGNED))
1561 {
1562 poly_wide_int tmp = poly_wide_int::from (cst, TYPE_PRECISION (type),
1563 sign);
1564 tree t;
1565 if (tmp.is_constant ())
1566 t = build_new_int_cst (type, tmp.coeffs[0]);
1567 else
1568 {
1569 tree coeffs[NUM_POLY_INT_COEFFS];
1570 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1571 {
1572 coeffs[i] = build_new_int_cst (type, tmp.coeffs[i]);
1573 TREE_OVERFLOW (coeffs[i]) = 1;
1574 }
1575 t = build_new_poly_int_cst (type, coeffs);
1576 }
1577 TREE_OVERFLOW (t) = 1;
1578 return t;
1579 }
1580 }
1581
1582 /* Else build a shared node. */
1583 return wide_int_to_tree (type, cst);
1584 }
1585
1586 /* These are the hash table functions for the hash table of INTEGER_CST
1587 nodes of a sizetype. */
1588
1589 /* Return the hash code X, an INTEGER_CST. */
1590
1591 hashval_t
1592 int_cst_hasher::hash (tree x)
1593 {
1594 const_tree const t = x;
1595 hashval_t code = TYPE_UID (TREE_TYPE (t));
1596 int i;
1597
1598 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
1599 code = iterative_hash_host_wide_int (TREE_INT_CST_ELT(t, i), code);
1600
1601 return code;
1602 }
1603
1604 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1605 is the same as that given by *Y, which is the same. */
1606
1607 bool
1608 int_cst_hasher::equal (tree x, tree y)
1609 {
1610 const_tree const xt = x;
1611 const_tree const yt = y;
1612
1613 if (TREE_TYPE (xt) != TREE_TYPE (yt)
1614 || TREE_INT_CST_NUNITS (xt) != TREE_INT_CST_NUNITS (yt)
1615 || TREE_INT_CST_EXT_NUNITS (xt) != TREE_INT_CST_EXT_NUNITS (yt))
1616 return false;
1617
1618 for (int i = 0; i < TREE_INT_CST_NUNITS (xt); i++)
1619 if (TREE_INT_CST_ELT (xt, i) != TREE_INT_CST_ELT (yt, i))
1620 return false;
1621
1622 return true;
1623 }
1624
1625 /* Cache wide_int CST into the TYPE_CACHED_VALUES cache for TYPE.
1626 SLOT is the slot entry to store it in, and MAX_SLOTS is the maximum
1627 number of slots that can be cached for the type. */
1628
1629 static inline tree
1630 cache_wide_int_in_type_cache (tree type, const wide_int &cst,
1631 int slot, int max_slots)
1632 {
1633 gcc_checking_assert (slot >= 0);
1634 /* Initialize cache. */
1635 if (!TYPE_CACHED_VALUES_P (type))
1636 {
1637 TYPE_CACHED_VALUES_P (type) = 1;
1638 TYPE_CACHED_VALUES (type) = make_tree_vec (max_slots);
1639 }
1640 tree t = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), slot);
1641 if (!t)
1642 {
1643 /* Create a new shared int. */
1644 t = build_new_int_cst (type, cst);
1645 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), slot) = t;
1646 }
1647 return t;
1648 }
1649
1650 /* Create an INT_CST node of TYPE and value CST.
1651 The returned node is always shared. For small integers we use a
1652 per-type vector cache, for larger ones we use a single hash table.
1653 The value is extended from its precision according to the sign of
1654 the type to be a multiple of HOST_BITS_PER_WIDE_INT. This defines
1655 the upper bits and ensures that hashing and value equality based
1656 upon the underlying HOST_WIDE_INTs works without masking. */
1657
1658 static tree
1659 wide_int_to_tree_1 (tree type, const wide_int_ref &pcst)
1660 {
1661 tree t;
1662 int ix = -1;
1663 int limit = 0;
1664
1665 gcc_assert (type);
1666 unsigned int prec = TYPE_PRECISION (type);
1667 signop sgn = TYPE_SIGN (type);
1668
1669 /* Verify that everything is canonical. */
1670 int l = pcst.get_len ();
1671 if (l > 1)
1672 {
1673 if (pcst.elt (l - 1) == 0)
1674 gcc_checking_assert (pcst.elt (l - 2) < 0);
1675 if (pcst.elt (l - 1) == HOST_WIDE_INT_M1)
1676 gcc_checking_assert (pcst.elt (l - 2) >= 0);
1677 }
1678
1679 wide_int cst = wide_int::from (pcst, prec, sgn);
1680 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1681
1682 enum tree_code code = TREE_CODE (type);
1683 if (code == POINTER_TYPE || code == REFERENCE_TYPE)
1684 {
1685 /* Cache NULL pointer and zero bounds. */
1686 if (cst == 0)
1687 ix = 0;
1688 /* Cache upper bounds of pointers. */
1689 else if (cst == wi::max_value (prec, sgn))
1690 ix = 1;
1691 /* Cache 1 which is used for a non-zero range. */
1692 else if (cst == 1)
1693 ix = 2;
1694
1695 if (ix >= 0)
1696 {
1697 t = cache_wide_int_in_type_cache (type, cst, ix, 3);
1698 /* Make sure no one is clobbering the shared constant. */
1699 gcc_checking_assert (TREE_TYPE (t) == type
1700 && cst == wi::to_wide (t));
1701 return t;
1702 }
1703 }
1704 if (ext_len == 1)
1705 {
1706 /* We just need to store a single HOST_WIDE_INT. */
1707 HOST_WIDE_INT hwi;
1708 if (TYPE_UNSIGNED (type))
1709 hwi = cst.to_uhwi ();
1710 else
1711 hwi = cst.to_shwi ();
1712
1713 switch (code)
1714 {
1715 case NULLPTR_TYPE:
1716 gcc_assert (hwi == 0);
1717 /* Fallthru. */
1718
1719 case POINTER_TYPE:
1720 case REFERENCE_TYPE:
1721 /* Ignore pointers, as they were already handled above. */
1722 break;
1723
1724 case BOOLEAN_TYPE:
1725 /* Cache false or true. */
1726 limit = 2;
1727 if (IN_RANGE (hwi, 0, 1))
1728 ix = hwi;
1729 break;
1730
1731 case INTEGER_TYPE:
1732 case OFFSET_TYPE:
1733 if (TYPE_SIGN (type) == UNSIGNED)
1734 {
1735 /* Cache [0, N). */
1736 limit = param_integer_share_limit;
1737 if (IN_RANGE (hwi, 0, param_integer_share_limit - 1))
1738 ix = hwi;
1739 }
1740 else
1741 {
1742 /* Cache [-1, N). */
1743 limit = param_integer_share_limit + 1;
1744 if (IN_RANGE (hwi, -1, param_integer_share_limit - 1))
1745 ix = hwi + 1;
1746 }
1747 break;
1748
1749 case ENUMERAL_TYPE:
1750 break;
1751
1752 default:
1753 gcc_unreachable ();
1754 }
1755
1756 if (ix >= 0)
1757 {
1758 t = cache_wide_int_in_type_cache (type, cst, ix, limit);
1759 /* Make sure no one is clobbering the shared constant. */
1760 gcc_checking_assert (TREE_TYPE (t) == type
1761 && TREE_INT_CST_NUNITS (t) == 1
1762 && TREE_INT_CST_OFFSET_NUNITS (t) == 1
1763 && TREE_INT_CST_EXT_NUNITS (t) == 1
1764 && TREE_INT_CST_ELT (t, 0) == hwi);
1765 return t;
1766 }
1767 else
1768 {
1769 /* Use the cache of larger shared ints, using int_cst_node as
1770 a temporary. */
1771
1772 TREE_INT_CST_ELT (int_cst_node, 0) = hwi;
1773 TREE_TYPE (int_cst_node) = type;
1774
1775 tree *slot = int_cst_hash_table->find_slot (int_cst_node, INSERT);
1776 t = *slot;
1777 if (!t)
1778 {
1779 /* Insert this one into the hash table. */
1780 t = int_cst_node;
1781 *slot = t;
1782 /* Make a new node for next time round. */
1783 int_cst_node = make_int_cst (1, 1);
1784 }
1785 }
1786 }
1787 else
1788 {
1789 /* The value either hashes properly or we drop it on the floor
1790 for the gc to take care of. There will not be enough of them
1791 to worry about. */
1792
1793 tree nt = build_new_int_cst (type, cst);
1794 tree *slot = int_cst_hash_table->find_slot (nt, INSERT);
1795 t = *slot;
1796 if (!t)
1797 {
1798 /* Insert this one into the hash table. */
1799 t = nt;
1800 *slot = t;
1801 }
1802 else
1803 ggc_free (nt);
1804 }
1805
1806 return t;
1807 }
1808
1809 hashval_t
1810 poly_int_cst_hasher::hash (tree t)
1811 {
1812 inchash::hash hstate;
1813
1814 hstate.add_int (TYPE_UID (TREE_TYPE (t)));
1815 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1816 hstate.add_wide_int (wi::to_wide (POLY_INT_CST_COEFF (t, i)));
1817
1818 return hstate.end ();
1819 }
1820
1821 bool
1822 poly_int_cst_hasher::equal (tree x, const compare_type &y)
1823 {
1824 if (TREE_TYPE (x) != y.first)
1825 return false;
1826 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1827 if (wi::to_wide (POLY_INT_CST_COEFF (x, i)) != y.second->coeffs[i])
1828 return false;
1829 return true;
1830 }
1831
1832 /* Build a POLY_INT_CST node with type TYPE and with the elements in VALUES.
1833 The elements must also have type TYPE. */
1834
1835 tree
1836 build_poly_int_cst (tree type, const poly_wide_int_ref &values)
1837 {
1838 unsigned int prec = TYPE_PRECISION (type);
1839 gcc_assert (prec <= values.coeffs[0].get_precision ());
1840 poly_wide_int c = poly_wide_int::from (values, prec, SIGNED);
1841
1842 inchash::hash h;
1843 h.add_int (TYPE_UID (type));
1844 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1845 h.add_wide_int (c.coeffs[i]);
1846 poly_int_cst_hasher::compare_type comp (type, &c);
1847 tree *slot = poly_int_cst_hash_table->find_slot_with_hash (comp, h.end (),
1848 INSERT);
1849 if (*slot == NULL_TREE)
1850 {
1851 tree coeffs[NUM_POLY_INT_COEFFS];
1852 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1853 coeffs[i] = wide_int_to_tree_1 (type, c.coeffs[i]);
1854 *slot = build_new_poly_int_cst (type, coeffs);
1855 }
1856 return *slot;
1857 }
1858
1859 /* Create a constant tree with value VALUE in type TYPE. */
1860
1861 tree
1862 wide_int_to_tree (tree type, const poly_wide_int_ref &value)
1863 {
1864 if (value.is_constant ())
1865 return wide_int_to_tree_1 (type, value.coeffs[0]);
1866 return build_poly_int_cst (type, value);
1867 }
1868
1869 /* Insert INTEGER_CST T into a cache of integer constants. And return
1870 the cached constant (which may or may not be T). If MIGHT_DUPLICATE
1871 is false, and T falls into the type's 'smaller values' range, there
1872 cannot be an existing entry. Otherwise, if MIGHT_DUPLICATE is true,
1873 or the value is large, should an existing entry exist, it is
1874 returned (rather than inserting T). */
1875
1876 tree
1877 cache_integer_cst (tree t, bool might_duplicate ATTRIBUTE_UNUSED)
1878 {
1879 tree type = TREE_TYPE (t);
1880 int ix = -1;
1881 int limit = 0;
1882 int prec = TYPE_PRECISION (type);
1883
1884 gcc_assert (!TREE_OVERFLOW (t));
1885
1886 /* The caching indices here must match those in
1887 wide_int_to_type_1. */
1888 switch (TREE_CODE (type))
1889 {
1890 case NULLPTR_TYPE:
1891 gcc_checking_assert (integer_zerop (t));
1892 /* Fallthru. */
1893
1894 case POINTER_TYPE:
1895 case REFERENCE_TYPE:
1896 {
1897 if (integer_zerop (t))
1898 ix = 0;
1899 else if (integer_onep (t))
1900 ix = 2;
1901
1902 if (ix >= 0)
1903 limit = 3;
1904 }
1905 break;
1906
1907 case BOOLEAN_TYPE:
1908 /* Cache false or true. */
1909 limit = 2;
1910 if (wi::ltu_p (wi::to_wide (t), 2))
1911 ix = TREE_INT_CST_ELT (t, 0);
1912 break;
1913
1914 case INTEGER_TYPE:
1915 case OFFSET_TYPE:
1916 if (TYPE_UNSIGNED (type))
1917 {
1918 /* Cache 0..N */
1919 limit = param_integer_share_limit;
1920
1921 /* This is a little hokie, but if the prec is smaller than
1922 what is necessary to hold param_integer_share_limit, then the
1923 obvious test will not get the correct answer. */
1924 if (prec < HOST_BITS_PER_WIDE_INT)
1925 {
1926 if (tree_to_uhwi (t)
1927 < (unsigned HOST_WIDE_INT) param_integer_share_limit)
1928 ix = tree_to_uhwi (t);
1929 }
1930 else if (wi::ltu_p (wi::to_wide (t), param_integer_share_limit))
1931 ix = tree_to_uhwi (t);
1932 }
1933 else
1934 {
1935 /* Cache -1..N */
1936 limit = param_integer_share_limit + 1;
1937
1938 if (integer_minus_onep (t))
1939 ix = 0;
1940 else if (!wi::neg_p (wi::to_wide (t)))
1941 {
1942 if (prec < HOST_BITS_PER_WIDE_INT)
1943 {
1944 if (tree_to_shwi (t) < param_integer_share_limit)
1945 ix = tree_to_shwi (t) + 1;
1946 }
1947 else if (wi::ltu_p (wi::to_wide (t), param_integer_share_limit))
1948 ix = tree_to_shwi (t) + 1;
1949 }
1950 }
1951 break;
1952
1953 case ENUMERAL_TYPE:
1954 /* The slot used by TYPE_CACHED_VALUES is used for the enum
1955 members. */
1956 break;
1957
1958 default:
1959 gcc_unreachable ();
1960 }
1961
1962 if (ix >= 0)
1963 {
1964 /* Look for it in the type's vector of small shared ints. */
1965 if (!TYPE_CACHED_VALUES_P (type))
1966 {
1967 TYPE_CACHED_VALUES_P (type) = 1;
1968 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1969 }
1970
1971 if (tree r = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix))
1972 {
1973 gcc_checking_assert (might_duplicate);
1974 t = r;
1975 }
1976 else
1977 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1978 }
1979 else
1980 {
1981 /* Use the cache of larger shared ints. */
1982 tree *slot = int_cst_hash_table->find_slot (t, INSERT);
1983 if (tree r = *slot)
1984 {
1985 /* If there is already an entry for the number verify it's the
1986 same value. */
1987 gcc_checking_assert (wi::to_wide (tree (r)) == wi::to_wide (t));
1988 /* And return the cached value. */
1989 t = r;
1990 }
1991 else
1992 /* Otherwise insert this one into the hash table. */
1993 *slot = t;
1994 }
1995
1996 return t;
1997 }
1998
1999
2000 /* Builds an integer constant in TYPE such that lowest BITS bits are ones
2001 and the rest are zeros. */
2002
2003 tree
2004 build_low_bits_mask (tree type, unsigned bits)
2005 {
2006 gcc_assert (bits <= TYPE_PRECISION (type));
2007
2008 return wide_int_to_tree (type, wi::mask (bits, false,
2009 TYPE_PRECISION (type)));
2010 }
2011
2012 /* Checks that X is integer constant that can be expressed in (unsigned)
2013 HOST_WIDE_INT without loss of precision. */
2014
2015 bool
2016 cst_and_fits_in_hwi (const_tree x)
2017 {
2018 return (TREE_CODE (x) == INTEGER_CST
2019 && (tree_fits_shwi_p (x) || tree_fits_uhwi_p (x)));
2020 }
2021
2022 /* Build a newly constructed VECTOR_CST with the given values of
2023 (VECTOR_CST_)LOG2_NPATTERNS and (VECTOR_CST_)NELTS_PER_PATTERN. */
2024
2025 tree
2026 make_vector (unsigned log2_npatterns,
2027 unsigned int nelts_per_pattern MEM_STAT_DECL)
2028 {
2029 gcc_assert (IN_RANGE (nelts_per_pattern, 1, 3));
2030 tree t;
2031 unsigned npatterns = 1 << log2_npatterns;
2032 unsigned encoded_nelts = npatterns * nelts_per_pattern;
2033 unsigned length = (sizeof (struct tree_vector)
2034 + (encoded_nelts - 1) * sizeof (tree));
2035
2036 record_node_allocation_statistics (VECTOR_CST, length);
2037
2038 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2039
2040 TREE_SET_CODE (t, VECTOR_CST);
2041 TREE_CONSTANT (t) = 1;
2042 VECTOR_CST_LOG2_NPATTERNS (t) = log2_npatterns;
2043 VECTOR_CST_NELTS_PER_PATTERN (t) = nelts_per_pattern;
2044
2045 return t;
2046 }
2047
2048 /* Return a new VECTOR_CST node whose type is TYPE and whose values
2049 are extracted from V, a vector of CONSTRUCTOR_ELT. */
2050
2051 tree
2052 build_vector_from_ctor (tree type, const vec<constructor_elt, va_gc> *v)
2053 {
2054 if (vec_safe_length (v) == 0)
2055 return build_zero_cst (type);
2056
2057 unsigned HOST_WIDE_INT idx, nelts;
2058 tree value;
2059
2060 /* We can't construct a VECTOR_CST for a variable number of elements. */
2061 nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
2062 tree_vector_builder vec (type, nelts, 1);
2063 FOR_EACH_CONSTRUCTOR_VALUE (v, idx, value)
2064 {
2065 if (TREE_CODE (value) == VECTOR_CST)
2066 {
2067 /* If NELTS is constant then this must be too. */
2068 unsigned int sub_nelts = VECTOR_CST_NELTS (value).to_constant ();
2069 for (unsigned i = 0; i < sub_nelts; ++i)
2070 vec.quick_push (VECTOR_CST_ELT (value, i));
2071 }
2072 else
2073 vec.quick_push (value);
2074 }
2075 while (vec.length () < nelts)
2076 vec.quick_push (build_zero_cst (TREE_TYPE (type)));
2077
2078 return vec.build ();
2079 }
2080
2081 /* Build a vector of type VECTYPE where all the elements are SCs. */
2082 tree
2083 build_vector_from_val (tree vectype, tree sc)
2084 {
2085 unsigned HOST_WIDE_INT i, nunits;
2086
2087 if (sc == error_mark_node)
2088 return sc;
2089
2090 /* Verify that the vector type is suitable for SC. Note that there
2091 is some inconsistency in the type-system with respect to restrict
2092 qualifications of pointers. Vector types always have a main-variant
2093 element type and the qualification is applied to the vector-type.
2094 So TREE_TYPE (vector-type) does not return a properly qualified
2095 vector element-type. */
2096 gcc_checking_assert (types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (sc)),
2097 TREE_TYPE (vectype)));
2098
2099 if (CONSTANT_CLASS_P (sc))
2100 {
2101 tree_vector_builder v (vectype, 1, 1);
2102 v.quick_push (sc);
2103 return v.build ();
2104 }
2105 else if (!TYPE_VECTOR_SUBPARTS (vectype).is_constant (&nunits))
2106 return fold_build1 (VEC_DUPLICATE_EXPR, vectype, sc);
2107 else
2108 {
2109 vec<constructor_elt, va_gc> *v;
2110 vec_alloc (v, nunits);
2111 for (i = 0; i < nunits; ++i)
2112 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, sc);
2113 return build_constructor (vectype, v);
2114 }
2115 }
2116
2117 /* If TYPE is not a vector type, just return SC, otherwise return
2118 build_vector_from_val (TYPE, SC). */
2119
2120 tree
2121 build_uniform_cst (tree type, tree sc)
2122 {
2123 if (!VECTOR_TYPE_P (type))
2124 return sc;
2125
2126 return build_vector_from_val (type, sc);
2127 }
2128
2129 /* Build a vector series of type TYPE in which element I has the value
2130 BASE + I * STEP. The result is a constant if BASE and STEP are constant
2131 and a VEC_SERIES_EXPR otherwise. */
2132
2133 tree
2134 build_vec_series (tree type, tree base, tree step)
2135 {
2136 if (integer_zerop (step))
2137 return build_vector_from_val (type, base);
2138 if (TREE_CODE (base) == INTEGER_CST && TREE_CODE (step) == INTEGER_CST)
2139 {
2140 tree_vector_builder builder (type, 1, 3);
2141 tree elt1 = wide_int_to_tree (TREE_TYPE (base),
2142 wi::to_wide (base) + wi::to_wide (step));
2143 tree elt2 = wide_int_to_tree (TREE_TYPE (base),
2144 wi::to_wide (elt1) + wi::to_wide (step));
2145 builder.quick_push (base);
2146 builder.quick_push (elt1);
2147 builder.quick_push (elt2);
2148 return builder.build ();
2149 }
2150 return build2 (VEC_SERIES_EXPR, type, base, step);
2151 }
2152
2153 /* Return a vector with the same number of units and number of bits
2154 as VEC_TYPE, but in which the elements are a linear series of unsigned
2155 integers { BASE, BASE + STEP, BASE + STEP * 2, ... }. */
2156
2157 tree
2158 build_index_vector (tree vec_type, poly_uint64 base, poly_uint64 step)
2159 {
2160 tree index_vec_type = vec_type;
2161 tree index_elt_type = TREE_TYPE (vec_type);
2162 poly_uint64 nunits = TYPE_VECTOR_SUBPARTS (vec_type);
2163 if (!INTEGRAL_TYPE_P (index_elt_type) || !TYPE_UNSIGNED (index_elt_type))
2164 {
2165 index_elt_type = build_nonstandard_integer_type
2166 (GET_MODE_BITSIZE (SCALAR_TYPE_MODE (index_elt_type)), true);
2167 index_vec_type = build_vector_type (index_elt_type, nunits);
2168 }
2169
2170 tree_vector_builder v (index_vec_type, 1, 3);
2171 for (unsigned int i = 0; i < 3; ++i)
2172 v.quick_push (build_int_cstu (index_elt_type, base + i * step));
2173 return v.build ();
2174 }
2175
2176 /* Return a VECTOR_CST of type VEC_TYPE in which the first NUM_A
2177 elements are A and the rest are B. */
2178
2179 tree
2180 build_vector_a_then_b (tree vec_type, unsigned int num_a, tree a, tree b)
2181 {
2182 gcc_assert (known_le (num_a, TYPE_VECTOR_SUBPARTS (vec_type)));
2183 unsigned int count = constant_lower_bound (TYPE_VECTOR_SUBPARTS (vec_type));
2184 /* Optimize the constant case. */
2185 if ((count & 1) == 0 && TYPE_VECTOR_SUBPARTS (vec_type).is_constant ())
2186 count /= 2;
2187 tree_vector_builder builder (vec_type, count, 2);
2188 for (unsigned int i = 0; i < count * 2; ++i)
2189 builder.quick_push (i < num_a ? a : b);
2190 return builder.build ();
2191 }
2192
2193 /* Something has messed with the elements of CONSTRUCTOR C after it was built;
2194 calculate TREE_CONSTANT and TREE_SIDE_EFFECTS. */
2195
2196 void
2197 recompute_constructor_flags (tree c)
2198 {
2199 unsigned int i;
2200 tree val;
2201 bool constant_p = true;
2202 bool side_effects_p = false;
2203 vec<constructor_elt, va_gc> *vals = CONSTRUCTOR_ELTS (c);
2204
2205 FOR_EACH_CONSTRUCTOR_VALUE (vals, i, val)
2206 {
2207 /* Mostly ctors will have elts that don't have side-effects, so
2208 the usual case is to scan all the elements. Hence a single
2209 loop for both const and side effects, rather than one loop
2210 each (with early outs). */
2211 if (!TREE_CONSTANT (val))
2212 constant_p = false;
2213 if (TREE_SIDE_EFFECTS (val))
2214 side_effects_p = true;
2215 }
2216
2217 TREE_SIDE_EFFECTS (c) = side_effects_p;
2218 TREE_CONSTANT (c) = constant_p;
2219 }
2220
2221 /* Make sure that TREE_CONSTANT and TREE_SIDE_EFFECTS are correct for
2222 CONSTRUCTOR C. */
2223
2224 void
2225 verify_constructor_flags (tree c)
2226 {
2227 unsigned int i;
2228 tree val;
2229 bool constant_p = TREE_CONSTANT (c);
2230 bool side_effects_p = TREE_SIDE_EFFECTS (c);
2231 vec<constructor_elt, va_gc> *vals = CONSTRUCTOR_ELTS (c);
2232
2233 FOR_EACH_CONSTRUCTOR_VALUE (vals, i, val)
2234 {
2235 if (constant_p && !TREE_CONSTANT (val))
2236 internal_error ("non-constant element in constant CONSTRUCTOR");
2237 if (!side_effects_p && TREE_SIDE_EFFECTS (val))
2238 internal_error ("side-effects element in no-side-effects CONSTRUCTOR");
2239 }
2240 }
2241
2242 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
2243 are in the vec pointed to by VALS. */
2244 tree
2245 build_constructor (tree type, vec<constructor_elt, va_gc> *vals MEM_STAT_DECL)
2246 {
2247 tree c = make_node (CONSTRUCTOR PASS_MEM_STAT);
2248
2249 TREE_TYPE (c) = type;
2250 CONSTRUCTOR_ELTS (c) = vals;
2251
2252 recompute_constructor_flags (c);
2253
2254 return c;
2255 }
2256
2257 /* Build a CONSTRUCTOR node made of a single initializer, with the specified
2258 INDEX and VALUE. */
2259 tree
2260 build_constructor_single (tree type, tree index, tree value)
2261 {
2262 vec<constructor_elt, va_gc> *v;
2263 constructor_elt elt = {index, value};
2264
2265 vec_alloc (v, 1);
2266 v->quick_push (elt);
2267
2268 return build_constructor (type, v);
2269 }
2270
2271
2272 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
2273 are in a list pointed to by VALS. */
2274 tree
2275 build_constructor_from_list (tree type, tree vals)
2276 {
2277 tree t;
2278 vec<constructor_elt, va_gc> *v = NULL;
2279
2280 if (vals)
2281 {
2282 vec_alloc (v, list_length (vals));
2283 for (t = vals; t; t = TREE_CHAIN (t))
2284 CONSTRUCTOR_APPEND_ELT (v, TREE_PURPOSE (t), TREE_VALUE (t));
2285 }
2286
2287 return build_constructor (type, v);
2288 }
2289
2290 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
2291 are in a vector pointed to by VALS. Note that the TREE_PURPOSE
2292 fields in the constructor remain null. */
2293
2294 tree
2295 build_constructor_from_vec (tree type, const vec<tree, va_gc> *vals)
2296 {
2297 vec<constructor_elt, va_gc> *v = NULL;
2298
2299 for (tree t : vals)
2300 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, t);
2301
2302 return build_constructor (type, v);
2303 }
2304
2305 /* Return a new CONSTRUCTOR node whose type is TYPE. NELTS is the number
2306 of elements, provided as index/value pairs. */
2307
2308 tree
2309 build_constructor_va (tree type, int nelts, ...)
2310 {
2311 vec<constructor_elt, va_gc> *v = NULL;
2312 va_list p;
2313
2314 va_start (p, nelts);
2315 vec_alloc (v, nelts);
2316 while (nelts--)
2317 {
2318 tree index = va_arg (p, tree);
2319 tree value = va_arg (p, tree);
2320 CONSTRUCTOR_APPEND_ELT (v, index, value);
2321 }
2322 va_end (p);
2323 return build_constructor (type, v);
2324 }
2325
2326 /* Return a node of type TYPE for which TREE_CLOBBER_P is true. */
2327
2328 tree
2329 build_clobber (tree type)
2330 {
2331 tree clobber = build_constructor (type, NULL);
2332 TREE_THIS_VOLATILE (clobber) = true;
2333 return clobber;
2334 }
2335
2336 /* Return a new FIXED_CST node whose type is TYPE and value is F. */
2337
2338 tree
2339 build_fixed (tree type, FIXED_VALUE_TYPE f)
2340 {
2341 tree v;
2342 FIXED_VALUE_TYPE *fp;
2343
2344 v = make_node (FIXED_CST);
2345 fp = ggc_alloc<fixed_value> ();
2346 memcpy (fp, &f, sizeof (FIXED_VALUE_TYPE));
2347
2348 TREE_TYPE (v) = type;
2349 TREE_FIXED_CST_PTR (v) = fp;
2350 return v;
2351 }
2352
2353 /* Return a new REAL_CST node whose type is TYPE and value is D. */
2354
2355 tree
2356 build_real (tree type, REAL_VALUE_TYPE d)
2357 {
2358 tree v;
2359 REAL_VALUE_TYPE *dp;
2360 int overflow = 0;
2361
2362 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
2363 Consider doing it via real_convert now. */
2364
2365 v = make_node (REAL_CST);
2366 dp = ggc_alloc<real_value> ();
2367 memcpy (dp, &d, sizeof (REAL_VALUE_TYPE));
2368
2369 TREE_TYPE (v) = type;
2370 TREE_REAL_CST_PTR (v) = dp;
2371 TREE_OVERFLOW (v) = overflow;
2372 return v;
2373 }
2374
2375 /* Like build_real, but first truncate D to the type. */
2376
2377 tree
2378 build_real_truncate (tree type, REAL_VALUE_TYPE d)
2379 {
2380 return build_real (type, real_value_truncate (TYPE_MODE (type), d));
2381 }
2382
2383 /* Return a new REAL_CST node whose type is TYPE
2384 and whose value is the integer value of the INTEGER_CST node I. */
2385
2386 REAL_VALUE_TYPE
2387 real_value_from_int_cst (const_tree type, const_tree i)
2388 {
2389 REAL_VALUE_TYPE d;
2390
2391 /* Clear all bits of the real value type so that we can later do
2392 bitwise comparisons to see if two values are the same. */
2393 memset (&d, 0, sizeof d);
2394
2395 real_from_integer (&d, type ? TYPE_MODE (type) : VOIDmode, wi::to_wide (i),
2396 TYPE_SIGN (TREE_TYPE (i)));
2397 return d;
2398 }
2399
2400 /* Given a tree representing an integer constant I, return a tree
2401 representing the same value as a floating-point constant of type TYPE. */
2402
2403 tree
2404 build_real_from_int_cst (tree type, const_tree i)
2405 {
2406 tree v;
2407 int overflow = TREE_OVERFLOW (i);
2408
2409 v = build_real (type, real_value_from_int_cst (type, i));
2410
2411 TREE_OVERFLOW (v) |= overflow;
2412 return v;
2413 }
2414
2415 /* Return a new REAL_CST node whose type is TYPE
2416 and whose value is the integer value I which has sign SGN. */
2417
2418 tree
2419 build_real_from_wide (tree type, const wide_int_ref &i, signop sgn)
2420 {
2421 REAL_VALUE_TYPE d;
2422
2423 /* Clear all bits of the real value type so that we can later do
2424 bitwise comparisons to see if two values are the same. */
2425 memset (&d, 0, sizeof d);
2426
2427 real_from_integer (&d, TYPE_MODE (type), i, sgn);
2428 return build_real (type, d);
2429 }
2430
2431 /* Return a newly constructed STRING_CST node whose value is the LEN
2432 characters at STR when STR is nonnull, or all zeros otherwise.
2433 Note that for a C string literal, LEN should include the trailing NUL.
2434 The TREE_TYPE is not initialized. */
2435
2436 tree
2437 build_string (unsigned len, const char *str /*= NULL */)
2438 {
2439 /* Do not waste bytes provided by padding of struct tree_string. */
2440 unsigned size = len + offsetof (struct tree_string, str) + 1;
2441
2442 record_node_allocation_statistics (STRING_CST, size);
2443
2444 tree s = (tree) ggc_internal_alloc (size);
2445
2446 memset (s, 0, sizeof (struct tree_typed));
2447 TREE_SET_CODE (s, STRING_CST);
2448 TREE_CONSTANT (s) = 1;
2449 TREE_STRING_LENGTH (s) = len;
2450 if (str)
2451 memcpy (s->string.str, str, len);
2452 else
2453 memset (s->string.str, 0, len);
2454 s->string.str[len] = '\0';
2455
2456 return s;
2457 }
2458
2459 /* Return a newly constructed COMPLEX_CST node whose value is
2460 specified by the real and imaginary parts REAL and IMAG.
2461 Both REAL and IMAG should be constant nodes. TYPE, if specified,
2462 will be the type of the COMPLEX_CST; otherwise a new type will be made. */
2463
2464 tree
2465 build_complex (tree type, tree real, tree imag)
2466 {
2467 gcc_assert (CONSTANT_CLASS_P (real));
2468 gcc_assert (CONSTANT_CLASS_P (imag));
2469
2470 tree t = make_node (COMPLEX_CST);
2471
2472 TREE_REALPART (t) = real;
2473 TREE_IMAGPART (t) = imag;
2474 TREE_TYPE (t) = type ? type : build_complex_type (TREE_TYPE (real));
2475 TREE_OVERFLOW (t) = TREE_OVERFLOW (real) | TREE_OVERFLOW (imag);
2476 return t;
2477 }
2478
2479 /* Build a complex (inf +- 0i), such as for the result of cproj.
2480 TYPE is the complex tree type of the result. If NEG is true, the
2481 imaginary zero is negative. */
2482
2483 tree
2484 build_complex_inf (tree type, bool neg)
2485 {
2486 REAL_VALUE_TYPE rinf, rzero = dconst0;
2487
2488 real_inf (&rinf);
2489 rzero.sign = neg;
2490 return build_complex (type, build_real (TREE_TYPE (type), rinf),
2491 build_real (TREE_TYPE (type), rzero));
2492 }
2493
2494 /* Return the constant 1 in type TYPE. If TYPE has several elements, each
2495 element is set to 1. In particular, this is 1 + i for complex types. */
2496
2497 tree
2498 build_each_one_cst (tree type)
2499 {
2500 if (TREE_CODE (type) == COMPLEX_TYPE)
2501 {
2502 tree scalar = build_one_cst (TREE_TYPE (type));
2503 return build_complex (type, scalar, scalar);
2504 }
2505 else
2506 return build_one_cst (type);
2507 }
2508
2509 /* Return a constant of arithmetic type TYPE which is the
2510 multiplicative identity of the set TYPE. */
2511
2512 tree
2513 build_one_cst (tree type)
2514 {
2515 switch (TREE_CODE (type))
2516 {
2517 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2518 case POINTER_TYPE: case REFERENCE_TYPE:
2519 case OFFSET_TYPE:
2520 return build_int_cst (type, 1);
2521
2522 case REAL_TYPE:
2523 return build_real (type, dconst1);
2524
2525 case FIXED_POINT_TYPE:
2526 /* We can only generate 1 for accum types. */
2527 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2528 return build_fixed (type, FCONST1 (TYPE_MODE (type)));
2529
2530 case VECTOR_TYPE:
2531 {
2532 tree scalar = build_one_cst (TREE_TYPE (type));
2533
2534 return build_vector_from_val (type, scalar);
2535 }
2536
2537 case COMPLEX_TYPE:
2538 return build_complex (type,
2539 build_one_cst (TREE_TYPE (type)),
2540 build_zero_cst (TREE_TYPE (type)));
2541
2542 default:
2543 gcc_unreachable ();
2544 }
2545 }
2546
2547 /* Return an integer of type TYPE containing all 1's in as much precision as
2548 it contains, or a complex or vector whose subparts are such integers. */
2549
2550 tree
2551 build_all_ones_cst (tree type)
2552 {
2553 if (TREE_CODE (type) == COMPLEX_TYPE)
2554 {
2555 tree scalar = build_all_ones_cst (TREE_TYPE (type));
2556 return build_complex (type, scalar, scalar);
2557 }
2558 else
2559 return build_minus_one_cst (type);
2560 }
2561
2562 /* Return a constant of arithmetic type TYPE which is the
2563 opposite of the multiplicative identity of the set TYPE. */
2564
2565 tree
2566 build_minus_one_cst (tree type)
2567 {
2568 switch (TREE_CODE (type))
2569 {
2570 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2571 case POINTER_TYPE: case REFERENCE_TYPE:
2572 case OFFSET_TYPE:
2573 return build_int_cst (type, -1);
2574
2575 case REAL_TYPE:
2576 return build_real (type, dconstm1);
2577
2578 case FIXED_POINT_TYPE:
2579 /* We can only generate 1 for accum types. */
2580 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2581 return build_fixed (type,
2582 fixed_from_double_int (double_int_minus_one,
2583 SCALAR_TYPE_MODE (type)));
2584
2585 case VECTOR_TYPE:
2586 {
2587 tree scalar = build_minus_one_cst (TREE_TYPE (type));
2588
2589 return build_vector_from_val (type, scalar);
2590 }
2591
2592 case COMPLEX_TYPE:
2593 return build_complex (type,
2594 build_minus_one_cst (TREE_TYPE (type)),
2595 build_zero_cst (TREE_TYPE (type)));
2596
2597 default:
2598 gcc_unreachable ();
2599 }
2600 }
2601
2602 /* Build 0 constant of type TYPE. This is used by constructor folding
2603 and thus the constant should be represented in memory by
2604 zero(es). */
2605
2606 tree
2607 build_zero_cst (tree type)
2608 {
2609 switch (TREE_CODE (type))
2610 {
2611 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2612 case POINTER_TYPE: case REFERENCE_TYPE:
2613 case OFFSET_TYPE: case NULLPTR_TYPE:
2614 return build_int_cst (type, 0);
2615
2616 case REAL_TYPE:
2617 return build_real (type, dconst0);
2618
2619 case FIXED_POINT_TYPE:
2620 return build_fixed (type, FCONST0 (TYPE_MODE (type)));
2621
2622 case VECTOR_TYPE:
2623 {
2624 tree scalar = build_zero_cst (TREE_TYPE (type));
2625
2626 return build_vector_from_val (type, scalar);
2627 }
2628
2629 case COMPLEX_TYPE:
2630 {
2631 tree zero = build_zero_cst (TREE_TYPE (type));
2632
2633 return build_complex (type, zero, zero);
2634 }
2635
2636 default:
2637 if (!AGGREGATE_TYPE_P (type))
2638 return fold_convert (type, integer_zero_node);
2639 return build_constructor (type, NULL);
2640 }
2641 }
2642
2643
2644 /* Build a BINFO with LEN language slots. */
2645
2646 tree
2647 make_tree_binfo (unsigned base_binfos MEM_STAT_DECL)
2648 {
2649 tree t;
2650 size_t length = (offsetof (struct tree_binfo, base_binfos)
2651 + vec<tree, va_gc>::embedded_size (base_binfos));
2652
2653 record_node_allocation_statistics (TREE_BINFO, length);
2654
2655 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
2656
2657 memset (t, 0, offsetof (struct tree_binfo, base_binfos));
2658
2659 TREE_SET_CODE (t, TREE_BINFO);
2660
2661 BINFO_BASE_BINFOS (t)->embedded_init (base_binfos);
2662
2663 return t;
2664 }
2665
2666 /* Create a CASE_LABEL_EXPR tree node and return it. */
2667
2668 tree
2669 build_case_label (tree low_value, tree high_value, tree label_decl)
2670 {
2671 tree t = make_node (CASE_LABEL_EXPR);
2672
2673 TREE_TYPE (t) = void_type_node;
2674 SET_EXPR_LOCATION (t, DECL_SOURCE_LOCATION (label_decl));
2675
2676 CASE_LOW (t) = low_value;
2677 CASE_HIGH (t) = high_value;
2678 CASE_LABEL (t) = label_decl;
2679 CASE_CHAIN (t) = NULL_TREE;
2680
2681 return t;
2682 }
2683
2684 /* Build a newly constructed INTEGER_CST node. LEN and EXT_LEN are the
2685 values of TREE_INT_CST_NUNITS and TREE_INT_CST_EXT_NUNITS respectively.
2686 The latter determines the length of the HOST_WIDE_INT vector. */
2687
2688 tree
2689 make_int_cst (int len, int ext_len MEM_STAT_DECL)
2690 {
2691 tree t;
2692 int length = ((ext_len - 1) * sizeof (HOST_WIDE_INT)
2693 + sizeof (struct tree_int_cst));
2694
2695 gcc_assert (len);
2696 record_node_allocation_statistics (INTEGER_CST, length);
2697
2698 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2699
2700 TREE_SET_CODE (t, INTEGER_CST);
2701 TREE_INT_CST_NUNITS (t) = len;
2702 TREE_INT_CST_EXT_NUNITS (t) = ext_len;
2703 /* to_offset can only be applied to trees that are offset_int-sized
2704 or smaller. EXT_LEN is correct if it fits, otherwise the constant
2705 must be exactly the precision of offset_int and so LEN is correct. */
2706 if (ext_len <= OFFSET_INT_ELTS)
2707 TREE_INT_CST_OFFSET_NUNITS (t) = ext_len;
2708 else
2709 TREE_INT_CST_OFFSET_NUNITS (t) = len;
2710
2711 TREE_CONSTANT (t) = 1;
2712
2713 return t;
2714 }
2715
2716 /* Build a newly constructed TREE_VEC node of length LEN. */
2717
2718 tree
2719 make_tree_vec (int len MEM_STAT_DECL)
2720 {
2721 tree t;
2722 size_t length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2723
2724 record_node_allocation_statistics (TREE_VEC, length);
2725
2726 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2727
2728 TREE_SET_CODE (t, TREE_VEC);
2729 TREE_VEC_LENGTH (t) = len;
2730
2731 return t;
2732 }
2733
2734 /* Grow a TREE_VEC node to new length LEN. */
2735
2736 tree
2737 grow_tree_vec (tree v, int len MEM_STAT_DECL)
2738 {
2739 gcc_assert (TREE_CODE (v) == TREE_VEC);
2740
2741 int oldlen = TREE_VEC_LENGTH (v);
2742 gcc_assert (len > oldlen);
2743
2744 size_t oldlength = (oldlen - 1) * sizeof (tree) + sizeof (struct tree_vec);
2745 size_t length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2746
2747 record_node_allocation_statistics (TREE_VEC, length - oldlength);
2748
2749 v = (tree) ggc_realloc (v, length PASS_MEM_STAT);
2750
2751 TREE_VEC_LENGTH (v) = len;
2752
2753 return v;
2754 }
2755 \f
2756 /* Return 1 if EXPR is the constant zero, whether it is integral, float or
2757 fixed, and scalar, complex or vector. */
2758
2759 bool
2760 zerop (const_tree expr)
2761 {
2762 return (integer_zerop (expr)
2763 || real_zerop (expr)
2764 || fixed_zerop (expr));
2765 }
2766
2767 /* Return 1 if EXPR is the integer constant zero or a complex constant
2768 of zero, or a location wrapper for such a constant. */
2769
2770 bool
2771 integer_zerop (const_tree expr)
2772 {
2773 STRIP_ANY_LOCATION_WRAPPER (expr);
2774
2775 switch (TREE_CODE (expr))
2776 {
2777 case INTEGER_CST:
2778 return wi::to_wide (expr) == 0;
2779 case COMPLEX_CST:
2780 return (integer_zerop (TREE_REALPART (expr))
2781 && integer_zerop (TREE_IMAGPART (expr)));
2782 case VECTOR_CST:
2783 return (VECTOR_CST_NPATTERNS (expr) == 1
2784 && VECTOR_CST_DUPLICATE_P (expr)
2785 && integer_zerop (VECTOR_CST_ENCODED_ELT (expr, 0)));
2786 default:
2787 return false;
2788 }
2789 }
2790
2791 /* Return 1 if EXPR is the integer constant one or the corresponding
2792 complex constant, or a location wrapper for such a constant. */
2793
2794 bool
2795 integer_onep (const_tree expr)
2796 {
2797 STRIP_ANY_LOCATION_WRAPPER (expr);
2798
2799 switch (TREE_CODE (expr))
2800 {
2801 case INTEGER_CST:
2802 return wi::eq_p (wi::to_widest (expr), 1);
2803 case COMPLEX_CST:
2804 return (integer_onep (TREE_REALPART (expr))
2805 && integer_zerop (TREE_IMAGPART (expr)));
2806 case VECTOR_CST:
2807 return (VECTOR_CST_NPATTERNS (expr) == 1
2808 && VECTOR_CST_DUPLICATE_P (expr)
2809 && integer_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
2810 default:
2811 return false;
2812 }
2813 }
2814
2815 /* Return 1 if EXPR is the integer constant one. For complex and vector,
2816 return 1 if every piece is the integer constant one.
2817 Also return 1 for location wrappers for such a constant. */
2818
2819 bool
2820 integer_each_onep (const_tree expr)
2821 {
2822 STRIP_ANY_LOCATION_WRAPPER (expr);
2823
2824 if (TREE_CODE (expr) == COMPLEX_CST)
2825 return (integer_onep (TREE_REALPART (expr))
2826 && integer_onep (TREE_IMAGPART (expr)));
2827 else
2828 return integer_onep (expr);
2829 }
2830
2831 /* Return 1 if EXPR is an integer containing all 1's in as much precision as
2832 it contains, or a complex or vector whose subparts are such integers,
2833 or a location wrapper for such a constant. */
2834
2835 bool
2836 integer_all_onesp (const_tree expr)
2837 {
2838 STRIP_ANY_LOCATION_WRAPPER (expr);
2839
2840 if (TREE_CODE (expr) == COMPLEX_CST
2841 && integer_all_onesp (TREE_REALPART (expr))
2842 && integer_all_onesp (TREE_IMAGPART (expr)))
2843 return true;
2844
2845 else if (TREE_CODE (expr) == VECTOR_CST)
2846 return (VECTOR_CST_NPATTERNS (expr) == 1
2847 && VECTOR_CST_DUPLICATE_P (expr)
2848 && integer_all_onesp (VECTOR_CST_ENCODED_ELT (expr, 0)));
2849
2850 else if (TREE_CODE (expr) != INTEGER_CST)
2851 return false;
2852
2853 return (wi::max_value (TYPE_PRECISION (TREE_TYPE (expr)), UNSIGNED)
2854 == wi::to_wide (expr));
2855 }
2856
2857 /* Return 1 if EXPR is the integer constant minus one, or a location wrapper
2858 for such a constant. */
2859
2860 bool
2861 integer_minus_onep (const_tree expr)
2862 {
2863 STRIP_ANY_LOCATION_WRAPPER (expr);
2864
2865 if (TREE_CODE (expr) == COMPLEX_CST)
2866 return (integer_all_onesp (TREE_REALPART (expr))
2867 && integer_zerop (TREE_IMAGPART (expr)));
2868 else
2869 return integer_all_onesp (expr);
2870 }
2871
2872 /* Return 1 if EXPR is an integer constant that is a power of 2 (i.e., has only
2873 one bit on), or a location wrapper for such a constant. */
2874
2875 bool
2876 integer_pow2p (const_tree expr)
2877 {
2878 STRIP_ANY_LOCATION_WRAPPER (expr);
2879
2880 if (TREE_CODE (expr) == COMPLEX_CST
2881 && integer_pow2p (TREE_REALPART (expr))
2882 && integer_zerop (TREE_IMAGPART (expr)))
2883 return true;
2884
2885 if (TREE_CODE (expr) != INTEGER_CST)
2886 return false;
2887
2888 return wi::popcount (wi::to_wide (expr)) == 1;
2889 }
2890
2891 /* Return 1 if EXPR is an integer constant other than zero or a
2892 complex constant other than zero, or a location wrapper for such a
2893 constant. */
2894
2895 bool
2896 integer_nonzerop (const_tree expr)
2897 {
2898 STRIP_ANY_LOCATION_WRAPPER (expr);
2899
2900 return ((TREE_CODE (expr) == INTEGER_CST
2901 && wi::to_wide (expr) != 0)
2902 || (TREE_CODE (expr) == COMPLEX_CST
2903 && (integer_nonzerop (TREE_REALPART (expr))
2904 || integer_nonzerop (TREE_IMAGPART (expr)))));
2905 }
2906
2907 /* Return 1 if EXPR is the integer constant one. For vector,
2908 return 1 if every piece is the integer constant minus one
2909 (representing the value TRUE).
2910 Also return 1 for location wrappers for such a constant. */
2911
2912 bool
2913 integer_truep (const_tree expr)
2914 {
2915 STRIP_ANY_LOCATION_WRAPPER (expr);
2916
2917 if (TREE_CODE (expr) == VECTOR_CST)
2918 return integer_all_onesp (expr);
2919 return integer_onep (expr);
2920 }
2921
2922 /* Return 1 if EXPR is the fixed-point constant zero, or a location wrapper
2923 for such a constant. */
2924
2925 bool
2926 fixed_zerop (const_tree expr)
2927 {
2928 STRIP_ANY_LOCATION_WRAPPER (expr);
2929
2930 return (TREE_CODE (expr) == FIXED_CST
2931 && TREE_FIXED_CST (expr).data.is_zero ());
2932 }
2933
2934 /* Return the power of two represented by a tree node known to be a
2935 power of two. */
2936
2937 int
2938 tree_log2 (const_tree expr)
2939 {
2940 if (TREE_CODE (expr) == COMPLEX_CST)
2941 return tree_log2 (TREE_REALPART (expr));
2942
2943 return wi::exact_log2 (wi::to_wide (expr));
2944 }
2945
2946 /* Similar, but return the largest integer Y such that 2 ** Y is less
2947 than or equal to EXPR. */
2948
2949 int
2950 tree_floor_log2 (const_tree expr)
2951 {
2952 if (TREE_CODE (expr) == COMPLEX_CST)
2953 return tree_log2 (TREE_REALPART (expr));
2954
2955 return wi::floor_log2 (wi::to_wide (expr));
2956 }
2957
2958 /* Return number of known trailing zero bits in EXPR, or, if the value of
2959 EXPR is known to be zero, the precision of it's type. */
2960
2961 unsigned int
2962 tree_ctz (const_tree expr)
2963 {
2964 if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
2965 && !POINTER_TYPE_P (TREE_TYPE (expr)))
2966 return 0;
2967
2968 unsigned int ret1, ret2, prec = TYPE_PRECISION (TREE_TYPE (expr));
2969 switch (TREE_CODE (expr))
2970 {
2971 case INTEGER_CST:
2972 ret1 = wi::ctz (wi::to_wide (expr));
2973 return MIN (ret1, prec);
2974 case SSA_NAME:
2975 ret1 = wi::ctz (get_nonzero_bits (expr));
2976 return MIN (ret1, prec);
2977 case PLUS_EXPR:
2978 case MINUS_EXPR:
2979 case BIT_IOR_EXPR:
2980 case BIT_XOR_EXPR:
2981 case MIN_EXPR:
2982 case MAX_EXPR:
2983 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2984 if (ret1 == 0)
2985 return ret1;
2986 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2987 return MIN (ret1, ret2);
2988 case POINTER_PLUS_EXPR:
2989 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2990 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2991 /* Second operand is sizetype, which could be in theory
2992 wider than pointer's precision. Make sure we never
2993 return more than prec. */
2994 ret2 = MIN (ret2, prec);
2995 return MIN (ret1, ret2);
2996 case BIT_AND_EXPR:
2997 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2998 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2999 return MAX (ret1, ret2);
3000 case MULT_EXPR:
3001 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3002 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
3003 return MIN (ret1 + ret2, prec);
3004 case LSHIFT_EXPR:
3005 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3006 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
3007 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
3008 {
3009 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
3010 return MIN (ret1 + ret2, prec);
3011 }
3012 return ret1;
3013 case RSHIFT_EXPR:
3014 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
3015 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
3016 {
3017 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3018 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
3019 if (ret1 > ret2)
3020 return ret1 - ret2;
3021 }
3022 return 0;
3023 case TRUNC_DIV_EXPR:
3024 case CEIL_DIV_EXPR:
3025 case FLOOR_DIV_EXPR:
3026 case ROUND_DIV_EXPR:
3027 case EXACT_DIV_EXPR:
3028 if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
3029 && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) == 1)
3030 {
3031 int l = tree_log2 (TREE_OPERAND (expr, 1));
3032 if (l >= 0)
3033 {
3034 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3035 ret2 = l;
3036 if (ret1 > ret2)
3037 return ret1 - ret2;
3038 }
3039 }
3040 return 0;
3041 CASE_CONVERT:
3042 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3043 if (ret1 && ret1 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
3044 ret1 = prec;
3045 return MIN (ret1, prec);
3046 case SAVE_EXPR:
3047 return tree_ctz (TREE_OPERAND (expr, 0));
3048 case COND_EXPR:
3049 ret1 = tree_ctz (TREE_OPERAND (expr, 1));
3050 if (ret1 == 0)
3051 return 0;
3052 ret2 = tree_ctz (TREE_OPERAND (expr, 2));
3053 return MIN (ret1, ret2);
3054 case COMPOUND_EXPR:
3055 return tree_ctz (TREE_OPERAND (expr, 1));
3056 case ADDR_EXPR:
3057 ret1 = get_pointer_alignment (CONST_CAST_TREE (expr));
3058 if (ret1 > BITS_PER_UNIT)
3059 {
3060 ret1 = ctz_hwi (ret1 / BITS_PER_UNIT);
3061 return MIN (ret1, prec);
3062 }
3063 return 0;
3064 default:
3065 return 0;
3066 }
3067 }
3068
3069 /* Return 1 if EXPR is the real constant zero. Trailing zeroes matter for
3070 decimal float constants, so don't return 1 for them.
3071 Also return 1 for location wrappers around such a constant. */
3072
3073 bool
3074 real_zerop (const_tree expr)
3075 {
3076 STRIP_ANY_LOCATION_WRAPPER (expr);
3077
3078 switch (TREE_CODE (expr))
3079 {
3080 case REAL_CST:
3081 return real_equal (&TREE_REAL_CST (expr), &dconst0)
3082 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
3083 case COMPLEX_CST:
3084 return real_zerop (TREE_REALPART (expr))
3085 && real_zerop (TREE_IMAGPART (expr));
3086 case VECTOR_CST:
3087 {
3088 /* Don't simply check for a duplicate because the predicate
3089 accepts both +0.0 and -0.0. */
3090 unsigned count = vector_cst_encoded_nelts (expr);
3091 for (unsigned int i = 0; i < count; ++i)
3092 if (!real_zerop (VECTOR_CST_ENCODED_ELT (expr, i)))
3093 return false;
3094 return true;
3095 }
3096 default:
3097 return false;
3098 }
3099 }
3100
3101 /* Return 1 if EXPR is the real constant one in real or complex form.
3102 Trailing zeroes matter for decimal float constants, so don't return
3103 1 for them.
3104 Also return 1 for location wrappers around such a constant. */
3105
3106 bool
3107 real_onep (const_tree expr)
3108 {
3109 STRIP_ANY_LOCATION_WRAPPER (expr);
3110
3111 switch (TREE_CODE (expr))
3112 {
3113 case REAL_CST:
3114 return real_equal (&TREE_REAL_CST (expr), &dconst1)
3115 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
3116 case COMPLEX_CST:
3117 return real_onep (TREE_REALPART (expr))
3118 && real_zerop (TREE_IMAGPART (expr));
3119 case VECTOR_CST:
3120 return (VECTOR_CST_NPATTERNS (expr) == 1
3121 && VECTOR_CST_DUPLICATE_P (expr)
3122 && real_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
3123 default:
3124 return false;
3125 }
3126 }
3127
3128 /* Return 1 if EXPR is the real constant minus one. Trailing zeroes
3129 matter for decimal float constants, so don't return 1 for them.
3130 Also return 1 for location wrappers around such a constant. */
3131
3132 bool
3133 real_minus_onep (const_tree expr)
3134 {
3135 STRIP_ANY_LOCATION_WRAPPER (expr);
3136
3137 switch (TREE_CODE (expr))
3138 {
3139 case REAL_CST:
3140 return real_equal (&TREE_REAL_CST (expr), &dconstm1)
3141 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
3142 case COMPLEX_CST:
3143 return real_minus_onep (TREE_REALPART (expr))
3144 && real_zerop (TREE_IMAGPART (expr));
3145 case VECTOR_CST:
3146 return (VECTOR_CST_NPATTERNS (expr) == 1
3147 && VECTOR_CST_DUPLICATE_P (expr)
3148 && real_minus_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
3149 default:
3150 return false;
3151 }
3152 }
3153
3154 /* Nonzero if EXP is a constant or a cast of a constant. */
3155
3156 bool
3157 really_constant_p (const_tree exp)
3158 {
3159 /* This is not quite the same as STRIP_NOPS. It does more. */
3160 while (CONVERT_EXPR_P (exp)
3161 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3162 exp = TREE_OPERAND (exp, 0);
3163 return TREE_CONSTANT (exp);
3164 }
3165
3166 /* Return true if T holds a polynomial pointer difference, storing it in
3167 *VALUE if so. A true return means that T's precision is no greater
3168 than 64 bits, which is the largest address space we support, so *VALUE
3169 never loses precision. However, the signedness of the result does
3170 not necessarily match the signedness of T: sometimes an unsigned type
3171 like sizetype is used to encode a value that is actually negative. */
3172
3173 bool
3174 ptrdiff_tree_p (const_tree t, poly_int64_pod *value)
3175 {
3176 if (!t)
3177 return false;
3178 if (TREE_CODE (t) == INTEGER_CST)
3179 {
3180 if (!cst_and_fits_in_hwi (t))
3181 return false;
3182 *value = int_cst_value (t);
3183 return true;
3184 }
3185 if (POLY_INT_CST_P (t))
3186 {
3187 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
3188 if (!cst_and_fits_in_hwi (POLY_INT_CST_COEFF (t, i)))
3189 return false;
3190 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
3191 value->coeffs[i] = int_cst_value (POLY_INT_CST_COEFF (t, i));
3192 return true;
3193 }
3194 return false;
3195 }
3196
3197 poly_int64
3198 tree_to_poly_int64 (const_tree t)
3199 {
3200 gcc_assert (tree_fits_poly_int64_p (t));
3201 if (POLY_INT_CST_P (t))
3202 return poly_int_cst_value (t).force_shwi ();
3203 return TREE_INT_CST_LOW (t);
3204 }
3205
3206 poly_uint64
3207 tree_to_poly_uint64 (const_tree t)
3208 {
3209 gcc_assert (tree_fits_poly_uint64_p (t));
3210 if (POLY_INT_CST_P (t))
3211 return poly_int_cst_value (t).force_uhwi ();
3212 return TREE_INT_CST_LOW (t);
3213 }
3214 \f
3215 /* Return first list element whose TREE_VALUE is ELEM.
3216 Return 0 if ELEM is not in LIST. */
3217
3218 tree
3219 value_member (tree elem, tree list)
3220 {
3221 while (list)
3222 {
3223 if (elem == TREE_VALUE (list))
3224 return list;
3225 list = TREE_CHAIN (list);
3226 }
3227 return NULL_TREE;
3228 }
3229
3230 /* Return first list element whose TREE_PURPOSE is ELEM.
3231 Return 0 if ELEM is not in LIST. */
3232
3233 tree
3234 purpose_member (const_tree elem, tree list)
3235 {
3236 while (list)
3237 {
3238 if (elem == TREE_PURPOSE (list))
3239 return list;
3240 list = TREE_CHAIN (list);
3241 }
3242 return NULL_TREE;
3243 }
3244
3245 /* Return true if ELEM is in V. */
3246
3247 bool
3248 vec_member (const_tree elem, vec<tree, va_gc> *v)
3249 {
3250 unsigned ix;
3251 tree t;
3252 FOR_EACH_VEC_SAFE_ELT (v, ix, t)
3253 if (elem == t)
3254 return true;
3255 return false;
3256 }
3257
3258 /* Returns element number IDX (zero-origin) of chain CHAIN, or
3259 NULL_TREE. */
3260
3261 tree
3262 chain_index (int idx, tree chain)
3263 {
3264 for (; chain && idx > 0; --idx)
3265 chain = TREE_CHAIN (chain);
3266 return chain;
3267 }
3268
3269 /* Return nonzero if ELEM is part of the chain CHAIN. */
3270
3271 bool
3272 chain_member (const_tree elem, const_tree chain)
3273 {
3274 while (chain)
3275 {
3276 if (elem == chain)
3277 return true;
3278 chain = DECL_CHAIN (chain);
3279 }
3280
3281 return false;
3282 }
3283
3284 /* Return the length of a chain of nodes chained through TREE_CHAIN.
3285 We expect a null pointer to mark the end of the chain.
3286 This is the Lisp primitive `length'. */
3287
3288 int
3289 list_length (const_tree t)
3290 {
3291 const_tree p = t;
3292 #ifdef ENABLE_TREE_CHECKING
3293 const_tree q = t;
3294 #endif
3295 int len = 0;
3296
3297 while (p)
3298 {
3299 p = TREE_CHAIN (p);
3300 #ifdef ENABLE_TREE_CHECKING
3301 if (len % 2)
3302 q = TREE_CHAIN (q);
3303 gcc_assert (p != q);
3304 #endif
3305 len++;
3306 }
3307
3308 return len;
3309 }
3310
3311 /* Returns the first FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
3312 UNION_TYPE TYPE, or NULL_TREE if none. */
3313
3314 tree
3315 first_field (const_tree type)
3316 {
3317 tree t = TYPE_FIELDS (type);
3318 while (t && TREE_CODE (t) != FIELD_DECL)
3319 t = TREE_CHAIN (t);
3320 return t;
3321 }
3322
3323 /* Returns the last FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
3324 UNION_TYPE TYPE, or NULL_TREE if none. */
3325
3326 tree
3327 last_field (const_tree type)
3328 {
3329 tree last = NULL_TREE;
3330
3331 for (tree fld = TYPE_FIELDS (type); fld; fld = TREE_CHAIN (fld))
3332 {
3333 if (TREE_CODE (fld) != FIELD_DECL)
3334 continue;
3335
3336 last = fld;
3337 }
3338
3339 return last;
3340 }
3341
3342 /* Concatenate two chains of nodes (chained through TREE_CHAIN)
3343 by modifying the last node in chain 1 to point to chain 2.
3344 This is the Lisp primitive `nconc'. */
3345
3346 tree
3347 chainon (tree op1, tree op2)
3348 {
3349 tree t1;
3350
3351 if (!op1)
3352 return op2;
3353 if (!op2)
3354 return op1;
3355
3356 for (t1 = op1; TREE_CHAIN (t1); t1 = TREE_CHAIN (t1))
3357 continue;
3358 TREE_CHAIN (t1) = op2;
3359
3360 #ifdef ENABLE_TREE_CHECKING
3361 {
3362 tree t2;
3363 for (t2 = op2; t2; t2 = TREE_CHAIN (t2))
3364 gcc_assert (t2 != t1);
3365 }
3366 #endif
3367
3368 return op1;
3369 }
3370
3371 /* Return the last node in a chain of nodes (chained through TREE_CHAIN). */
3372
3373 tree
3374 tree_last (tree chain)
3375 {
3376 tree next;
3377 if (chain)
3378 while ((next = TREE_CHAIN (chain)))
3379 chain = next;
3380 return chain;
3381 }
3382
3383 /* Reverse the order of elements in the chain T,
3384 and return the new head of the chain (old last element). */
3385
3386 tree
3387 nreverse (tree t)
3388 {
3389 tree prev = 0, decl, next;
3390 for (decl = t; decl; decl = next)
3391 {
3392 /* We shouldn't be using this function to reverse BLOCK chains; we
3393 have blocks_nreverse for that. */
3394 gcc_checking_assert (TREE_CODE (decl) != BLOCK);
3395 next = TREE_CHAIN (decl);
3396 TREE_CHAIN (decl) = prev;
3397 prev = decl;
3398 }
3399 return prev;
3400 }
3401 \f
3402 /* Return a newly created TREE_LIST node whose
3403 purpose and value fields are PARM and VALUE. */
3404
3405 tree
3406 build_tree_list (tree parm, tree value MEM_STAT_DECL)
3407 {
3408 tree t = make_node (TREE_LIST PASS_MEM_STAT);
3409 TREE_PURPOSE (t) = parm;
3410 TREE_VALUE (t) = value;
3411 return t;
3412 }
3413
3414 /* Build a chain of TREE_LIST nodes from a vector. */
3415
3416 tree
3417 build_tree_list_vec (const vec<tree, va_gc> *vec MEM_STAT_DECL)
3418 {
3419 tree ret = NULL_TREE;
3420 tree *pp = &ret;
3421 unsigned int i;
3422 tree t;
3423 FOR_EACH_VEC_SAFE_ELT (vec, i, t)
3424 {
3425 *pp = build_tree_list (NULL, t PASS_MEM_STAT);
3426 pp = &TREE_CHAIN (*pp);
3427 }
3428 return ret;
3429 }
3430
3431 /* Return a newly created TREE_LIST node whose
3432 purpose and value fields are PURPOSE and VALUE
3433 and whose TREE_CHAIN is CHAIN. */
3434
3435 tree
3436 tree_cons (tree purpose, tree value, tree chain MEM_STAT_DECL)
3437 {
3438 tree node;
3439
3440 node = ggc_alloc_tree_node_stat (sizeof (struct tree_list) PASS_MEM_STAT);
3441 memset (node, 0, sizeof (struct tree_common));
3442
3443 record_node_allocation_statistics (TREE_LIST, sizeof (struct tree_list));
3444
3445 TREE_SET_CODE (node, TREE_LIST);
3446 TREE_CHAIN (node) = chain;
3447 TREE_PURPOSE (node) = purpose;
3448 TREE_VALUE (node) = value;
3449 return node;
3450 }
3451
3452 /* Return the values of the elements of a CONSTRUCTOR as a vector of
3453 trees. */
3454
3455 vec<tree, va_gc> *
3456 ctor_to_vec (tree ctor)
3457 {
3458 vec<tree, va_gc> *vec;
3459 vec_alloc (vec, CONSTRUCTOR_NELTS (ctor));
3460 unsigned int ix;
3461 tree val;
3462
3463 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor), ix, val)
3464 vec->quick_push (val);
3465
3466 return vec;
3467 }
3468 \f
3469 /* Return the size nominally occupied by an object of type TYPE
3470 when it resides in memory. The value is measured in units of bytes,
3471 and its data type is that normally used for type sizes
3472 (which is the first type created by make_signed_type or
3473 make_unsigned_type). */
3474
3475 tree
3476 size_in_bytes_loc (location_t loc, const_tree type)
3477 {
3478 tree t;
3479
3480 if (type == error_mark_node)
3481 return integer_zero_node;
3482
3483 type = TYPE_MAIN_VARIANT (type);
3484 t = TYPE_SIZE_UNIT (type);
3485
3486 if (t == 0)
3487 {
3488 lang_hooks.types.incomplete_type_error (loc, NULL_TREE, type);
3489 return size_zero_node;
3490 }
3491
3492 return t;
3493 }
3494
3495 /* Return the size of TYPE (in bytes) as a wide integer
3496 or return -1 if the size can vary or is larger than an integer. */
3497
3498 HOST_WIDE_INT
3499 int_size_in_bytes (const_tree type)
3500 {
3501 tree t;
3502
3503 if (type == error_mark_node)
3504 return 0;
3505
3506 type = TYPE_MAIN_VARIANT (type);
3507 t = TYPE_SIZE_UNIT (type);
3508
3509 if (t && tree_fits_uhwi_p (t))
3510 return TREE_INT_CST_LOW (t);
3511 else
3512 return -1;
3513 }
3514
3515 /* Return the maximum size of TYPE (in bytes) as a wide integer
3516 or return -1 if the size can vary or is larger than an integer. */
3517
3518 HOST_WIDE_INT
3519 max_int_size_in_bytes (const_tree type)
3520 {
3521 HOST_WIDE_INT size = -1;
3522 tree size_tree;
3523
3524 /* If this is an array type, check for a possible MAX_SIZE attached. */
3525
3526 if (TREE_CODE (type) == ARRAY_TYPE)
3527 {
3528 size_tree = TYPE_ARRAY_MAX_SIZE (type);
3529
3530 if (size_tree && tree_fits_uhwi_p (size_tree))
3531 size = tree_to_uhwi (size_tree);
3532 }
3533
3534 /* If we still haven't been able to get a size, see if the language
3535 can compute a maximum size. */
3536
3537 if (size == -1)
3538 {
3539 size_tree = lang_hooks.types.max_size (type);
3540
3541 if (size_tree && tree_fits_uhwi_p (size_tree))
3542 size = tree_to_uhwi (size_tree);
3543 }
3544
3545 return size;
3546 }
3547 \f
3548 /* Return the bit position of FIELD, in bits from the start of the record.
3549 This is a tree of type bitsizetype. */
3550
3551 tree
3552 bit_position (const_tree field)
3553 {
3554 return bit_from_pos (DECL_FIELD_OFFSET (field),
3555 DECL_FIELD_BIT_OFFSET (field));
3556 }
3557 \f
3558 /* Return the byte position of FIELD, in bytes from the start of the record.
3559 This is a tree of type sizetype. */
3560
3561 tree
3562 byte_position (const_tree field)
3563 {
3564 return byte_from_pos (DECL_FIELD_OFFSET (field),
3565 DECL_FIELD_BIT_OFFSET (field));
3566 }
3567
3568 /* Likewise, but return as an integer. It must be representable in
3569 that way (since it could be a signed value, we don't have the
3570 option of returning -1 like int_size_in_byte can. */
3571
3572 HOST_WIDE_INT
3573 int_byte_position (const_tree field)
3574 {
3575 return tree_to_shwi (byte_position (field));
3576 }
3577 \f
3578 /* Return, as a tree node, the number of elements for TYPE (which is an
3579 ARRAY_TYPE) minus one. This counts only elements of the top array. */
3580
3581 tree
3582 array_type_nelts (const_tree type)
3583 {
3584 tree index_type, min, max;
3585
3586 /* If they did it with unspecified bounds, then we should have already
3587 given an error about it before we got here. */
3588 if (! TYPE_DOMAIN (type))
3589 return error_mark_node;
3590
3591 index_type = TYPE_DOMAIN (type);
3592 min = TYPE_MIN_VALUE (index_type);
3593 max = TYPE_MAX_VALUE (index_type);
3594
3595 /* TYPE_MAX_VALUE may not be set if the array has unknown length. */
3596 if (!max)
3597 {
3598 /* zero sized arrays are represented from C FE as complete types with
3599 NULL TYPE_MAX_VALUE and zero TYPE_SIZE, while C++ FE represents
3600 them as min 0, max -1. */
3601 if (COMPLETE_TYPE_P (type)
3602 && integer_zerop (TYPE_SIZE (type))
3603 && integer_zerop (min))
3604 return build_int_cst (TREE_TYPE (min), -1);
3605
3606 return error_mark_node;
3607 }
3608
3609 return (integer_zerop (min)
3610 ? max
3611 : fold_build2 (MINUS_EXPR, TREE_TYPE (max), max, min));
3612 }
3613 \f
3614 /* If arg is static -- a reference to an object in static storage -- then
3615 return the object. This is not the same as the C meaning of `static'.
3616 If arg isn't static, return NULL. */
3617
3618 tree
3619 staticp (tree arg)
3620 {
3621 switch (TREE_CODE (arg))
3622 {
3623 case FUNCTION_DECL:
3624 /* Nested functions are static, even though taking their address will
3625 involve a trampoline as we unnest the nested function and create
3626 the trampoline on the tree level. */
3627 return arg;
3628
3629 case VAR_DECL:
3630 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3631 && ! DECL_THREAD_LOCAL_P (arg)
3632 && ! DECL_DLLIMPORT_P (arg)
3633 ? arg : NULL);
3634
3635 case CONST_DECL:
3636 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3637 ? arg : NULL);
3638
3639 case CONSTRUCTOR:
3640 return TREE_STATIC (arg) ? arg : NULL;
3641
3642 case LABEL_DECL:
3643 case STRING_CST:
3644 return arg;
3645
3646 case COMPONENT_REF:
3647 /* If the thing being referenced is not a field, then it is
3648 something language specific. */
3649 gcc_assert (TREE_CODE (TREE_OPERAND (arg, 1)) == FIELD_DECL);
3650
3651 /* If we are referencing a bitfield, we can't evaluate an
3652 ADDR_EXPR at compile time and so it isn't a constant. */
3653 if (DECL_BIT_FIELD (TREE_OPERAND (arg, 1)))
3654 return NULL;
3655
3656 return staticp (TREE_OPERAND (arg, 0));
3657
3658 case BIT_FIELD_REF:
3659 return NULL;
3660
3661 case INDIRECT_REF:
3662 return TREE_CONSTANT (TREE_OPERAND (arg, 0)) ? arg : NULL;
3663
3664 case ARRAY_REF:
3665 case ARRAY_RANGE_REF:
3666 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (arg))) == INTEGER_CST
3667 && TREE_CODE (TREE_OPERAND (arg, 1)) == INTEGER_CST)
3668 return staticp (TREE_OPERAND (arg, 0));
3669 else
3670 return NULL;
3671
3672 case COMPOUND_LITERAL_EXPR:
3673 return TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (arg)) ? arg : NULL;
3674
3675 default:
3676 return NULL;
3677 }
3678 }
3679
3680 \f
3681
3682
3683 /* Return whether OP is a DECL whose address is function-invariant. */
3684
3685 bool
3686 decl_address_invariant_p (const_tree op)
3687 {
3688 /* The conditions below are slightly less strict than the one in
3689 staticp. */
3690
3691 switch (TREE_CODE (op))
3692 {
3693 case PARM_DECL:
3694 case RESULT_DECL:
3695 case LABEL_DECL:
3696 case FUNCTION_DECL:
3697 return true;
3698
3699 case VAR_DECL:
3700 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3701 || DECL_THREAD_LOCAL_P (op)
3702 || DECL_CONTEXT (op) == current_function_decl
3703 || decl_function_context (op) == current_function_decl)
3704 return true;
3705 break;
3706
3707 case CONST_DECL:
3708 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3709 || decl_function_context (op) == current_function_decl)
3710 return true;
3711 break;
3712
3713 default:
3714 break;
3715 }
3716
3717 return false;
3718 }
3719
3720 /* Return whether OP is a DECL whose address is interprocedural-invariant. */
3721
3722 bool
3723 decl_address_ip_invariant_p (const_tree op)
3724 {
3725 /* The conditions below are slightly less strict than the one in
3726 staticp. */
3727
3728 switch (TREE_CODE (op))
3729 {
3730 case LABEL_DECL:
3731 case FUNCTION_DECL:
3732 case STRING_CST:
3733 return true;
3734
3735 case VAR_DECL:
3736 if (((TREE_STATIC (op) || DECL_EXTERNAL (op))
3737 && !DECL_DLLIMPORT_P (op))
3738 || DECL_THREAD_LOCAL_P (op))
3739 return true;
3740 break;
3741
3742 case CONST_DECL:
3743 if ((TREE_STATIC (op) || DECL_EXTERNAL (op)))
3744 return true;
3745 break;
3746
3747 default:
3748 break;
3749 }
3750
3751 return false;
3752 }
3753
3754
3755 /* Return true if T is function-invariant (internal function, does
3756 not handle arithmetic; that's handled in skip_simple_arithmetic and
3757 tree_invariant_p). */
3758
3759 static bool
3760 tree_invariant_p_1 (tree t)
3761 {
3762 tree op;
3763
3764 if (TREE_CONSTANT (t)
3765 || (TREE_READONLY (t) && !TREE_SIDE_EFFECTS (t)))
3766 return true;
3767
3768 switch (TREE_CODE (t))
3769 {
3770 case SAVE_EXPR:
3771 return true;
3772
3773 case ADDR_EXPR:
3774 op = TREE_OPERAND (t, 0);
3775 while (handled_component_p (op))
3776 {
3777 switch (TREE_CODE (op))
3778 {
3779 case ARRAY_REF:
3780 case ARRAY_RANGE_REF:
3781 if (!tree_invariant_p (TREE_OPERAND (op, 1))
3782 || TREE_OPERAND (op, 2) != NULL_TREE
3783 || TREE_OPERAND (op, 3) != NULL_TREE)
3784 return false;
3785 break;
3786
3787 case COMPONENT_REF:
3788 if (TREE_OPERAND (op, 2) != NULL_TREE)
3789 return false;
3790 break;
3791
3792 default:;
3793 }
3794 op = TREE_OPERAND (op, 0);
3795 }
3796
3797 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
3798
3799 default:
3800 break;
3801 }
3802
3803 return false;
3804 }
3805
3806 /* Return true if T is function-invariant. */
3807
3808 bool
3809 tree_invariant_p (tree t)
3810 {
3811 tree inner = skip_simple_arithmetic (t);
3812 return tree_invariant_p_1 (inner);
3813 }
3814
3815 /* Wrap a SAVE_EXPR around EXPR, if appropriate.
3816 Do this to any expression which may be used in more than one place,
3817 but must be evaluated only once.
3818
3819 Normally, expand_expr would reevaluate the expression each time.
3820 Calling save_expr produces something that is evaluated and recorded
3821 the first time expand_expr is called on it. Subsequent calls to
3822 expand_expr just reuse the recorded value.
3823
3824 The call to expand_expr that generates code that actually computes
3825 the value is the first call *at compile time*. Subsequent calls
3826 *at compile time* generate code to use the saved value.
3827 This produces correct result provided that *at run time* control
3828 always flows through the insns made by the first expand_expr
3829 before reaching the other places where the save_expr was evaluated.
3830 You, the caller of save_expr, must make sure this is so.
3831
3832 Constants, and certain read-only nodes, are returned with no
3833 SAVE_EXPR because that is safe. Expressions containing placeholders
3834 are not touched; see tree.def for an explanation of what these
3835 are used for. */
3836
3837 tree
3838 save_expr (tree expr)
3839 {
3840 tree inner;
3841
3842 /* If the tree evaluates to a constant, then we don't want to hide that
3843 fact (i.e. this allows further folding, and direct checks for constants).
3844 However, a read-only object that has side effects cannot be bypassed.
3845 Since it is no problem to reevaluate literals, we just return the
3846 literal node. */
3847 inner = skip_simple_arithmetic (expr);
3848 if (TREE_CODE (inner) == ERROR_MARK)
3849 return inner;
3850
3851 if (tree_invariant_p_1 (inner))
3852 return expr;
3853
3854 /* If INNER contains a PLACEHOLDER_EXPR, we must evaluate it each time, since
3855 it means that the size or offset of some field of an object depends on
3856 the value within another field.
3857
3858 Note that it must not be the case that EXPR contains both a PLACEHOLDER_EXPR
3859 and some variable since it would then need to be both evaluated once and
3860 evaluated more than once. Front-ends must assure this case cannot
3861 happen by surrounding any such subexpressions in their own SAVE_EXPR
3862 and forcing evaluation at the proper time. */
3863 if (contains_placeholder_p (inner))
3864 return expr;
3865
3866 expr = build1_loc (EXPR_LOCATION (expr), SAVE_EXPR, TREE_TYPE (expr), expr);
3867
3868 /* This expression might be placed ahead of a jump to ensure that the
3869 value was computed on both sides of the jump. So make sure it isn't
3870 eliminated as dead. */
3871 TREE_SIDE_EFFECTS (expr) = 1;
3872 return expr;
3873 }
3874
3875 /* Look inside EXPR into any simple arithmetic operations. Return the
3876 outermost non-arithmetic or non-invariant node. */
3877
3878 tree
3879 skip_simple_arithmetic (tree expr)
3880 {
3881 /* We don't care about whether this can be used as an lvalue in this
3882 context. */
3883 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3884 expr = TREE_OPERAND (expr, 0);
3885
3886 /* If we have simple operations applied to a SAVE_EXPR or to a SAVE_EXPR and
3887 a constant, it will be more efficient to not make another SAVE_EXPR since
3888 it will allow better simplification and GCSE will be able to merge the
3889 computations if they actually occur. */
3890 while (true)
3891 {
3892 if (UNARY_CLASS_P (expr))
3893 expr = TREE_OPERAND (expr, 0);
3894 else if (BINARY_CLASS_P (expr))
3895 {
3896 if (tree_invariant_p (TREE_OPERAND (expr, 1)))
3897 expr = TREE_OPERAND (expr, 0);
3898 else if (tree_invariant_p (TREE_OPERAND (expr, 0)))
3899 expr = TREE_OPERAND (expr, 1);
3900 else
3901 break;
3902 }
3903 else
3904 break;
3905 }
3906
3907 return expr;
3908 }
3909
3910 /* Look inside EXPR into simple arithmetic operations involving constants.
3911 Return the outermost non-arithmetic or non-constant node. */
3912
3913 tree
3914 skip_simple_constant_arithmetic (tree expr)
3915 {
3916 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3917 expr = TREE_OPERAND (expr, 0);
3918
3919 while (true)
3920 {
3921 if (UNARY_CLASS_P (expr))
3922 expr = TREE_OPERAND (expr, 0);
3923 else if (BINARY_CLASS_P (expr))
3924 {
3925 if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
3926 expr = TREE_OPERAND (expr, 0);
3927 else if (TREE_CONSTANT (TREE_OPERAND (expr, 0)))
3928 expr = TREE_OPERAND (expr, 1);
3929 else
3930 break;
3931 }
3932 else
3933 break;
3934 }
3935
3936 return expr;
3937 }
3938
3939 /* Return which tree structure is used by T. */
3940
3941 enum tree_node_structure_enum
3942 tree_node_structure (const_tree t)
3943 {
3944 const enum tree_code code = TREE_CODE (t);
3945 return tree_node_structure_for_code (code);
3946 }
3947
3948 /* Set various status flags when building a CALL_EXPR object T. */
3949
3950 static void
3951 process_call_operands (tree t)
3952 {
3953 bool side_effects = TREE_SIDE_EFFECTS (t);
3954 bool read_only = false;
3955 int i = call_expr_flags (t);
3956
3957 /* Calls have side-effects, except those to const or pure functions. */
3958 if ((i & ECF_LOOPING_CONST_OR_PURE) || !(i & (ECF_CONST | ECF_PURE)))
3959 side_effects = true;
3960 /* Propagate TREE_READONLY of arguments for const functions. */
3961 if (i & ECF_CONST)
3962 read_only = true;
3963
3964 if (!side_effects || read_only)
3965 for (i = 1; i < TREE_OPERAND_LENGTH (t); i++)
3966 {
3967 tree op = TREE_OPERAND (t, i);
3968 if (op && TREE_SIDE_EFFECTS (op))
3969 side_effects = true;
3970 if (op && !TREE_READONLY (op) && !CONSTANT_CLASS_P (op))
3971 read_only = false;
3972 }
3973
3974 TREE_SIDE_EFFECTS (t) = side_effects;
3975 TREE_READONLY (t) = read_only;
3976 }
3977 \f
3978 /* Return true if EXP contains a PLACEHOLDER_EXPR, i.e. if it represents a
3979 size or offset that depends on a field within a record. */
3980
3981 bool
3982 contains_placeholder_p (const_tree exp)
3983 {
3984 enum tree_code code;
3985
3986 if (!exp)
3987 return 0;
3988
3989 code = TREE_CODE (exp);
3990 if (code == PLACEHOLDER_EXPR)
3991 return 1;
3992
3993 switch (TREE_CODE_CLASS (code))
3994 {
3995 case tcc_reference:
3996 /* Don't look at any PLACEHOLDER_EXPRs that might be in index or bit
3997 position computations since they will be converted into a
3998 WITH_RECORD_EXPR involving the reference, which will assume
3999 here will be valid. */
4000 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
4001
4002 case tcc_exceptional:
4003 if (code == TREE_LIST)
4004 return (CONTAINS_PLACEHOLDER_P (TREE_VALUE (exp))
4005 || CONTAINS_PLACEHOLDER_P (TREE_CHAIN (exp)));
4006 break;
4007
4008 case tcc_unary:
4009 case tcc_binary:
4010 case tcc_comparison:
4011 case tcc_expression:
4012 switch (code)
4013 {
4014 case COMPOUND_EXPR:
4015 /* Ignoring the first operand isn't quite right, but works best. */
4016 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1));
4017
4018 case COND_EXPR:
4019 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
4020 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1))
4021 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 2)));
4022
4023 case SAVE_EXPR:
4024 /* The save_expr function never wraps anything containing
4025 a PLACEHOLDER_EXPR. */
4026 return 0;
4027
4028 default:
4029 break;
4030 }
4031
4032 switch (TREE_CODE_LENGTH (code))
4033 {
4034 case 1:
4035 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
4036 case 2:
4037 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
4038 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1)));
4039 default:
4040 return 0;
4041 }
4042
4043 case tcc_vl_exp:
4044 switch (code)
4045 {
4046 case CALL_EXPR:
4047 {
4048 const_tree arg;
4049 const_call_expr_arg_iterator iter;
4050 FOR_EACH_CONST_CALL_EXPR_ARG (arg, iter, exp)
4051 if (CONTAINS_PLACEHOLDER_P (arg))
4052 return 1;
4053 return 0;
4054 }
4055 default:
4056 return 0;
4057 }
4058
4059 default:
4060 return 0;
4061 }
4062 return 0;
4063 }
4064
4065 /* Return true if any part of the structure of TYPE involves a PLACEHOLDER_EXPR
4066 directly. This includes size, bounds, qualifiers (for QUAL_UNION_TYPE) and
4067 field positions. */
4068
4069 static bool
4070 type_contains_placeholder_1 (const_tree type)
4071 {
4072 /* If the size contains a placeholder or the parent type (component type in
4073 the case of arrays) type involves a placeholder, this type does. */
4074 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type))
4075 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (type))
4076 || (!POINTER_TYPE_P (type)
4077 && TREE_TYPE (type)
4078 && type_contains_placeholder_p (TREE_TYPE (type))))
4079 return true;
4080
4081 /* Now do type-specific checks. Note that the last part of the check above
4082 greatly limits what we have to do below. */
4083 switch (TREE_CODE (type))
4084 {
4085 case VOID_TYPE:
4086 case OPAQUE_TYPE:
4087 case COMPLEX_TYPE:
4088 case ENUMERAL_TYPE:
4089 case BOOLEAN_TYPE:
4090 case POINTER_TYPE:
4091 case OFFSET_TYPE:
4092 case REFERENCE_TYPE:
4093 case METHOD_TYPE:
4094 case FUNCTION_TYPE:
4095 case VECTOR_TYPE:
4096 case NULLPTR_TYPE:
4097 return false;
4098
4099 case INTEGER_TYPE:
4100 case REAL_TYPE:
4101 case FIXED_POINT_TYPE:
4102 /* Here we just check the bounds. */
4103 return (CONTAINS_PLACEHOLDER_P (TYPE_MIN_VALUE (type))
4104 || CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type)));
4105
4106 case ARRAY_TYPE:
4107 /* We have already checked the component type above, so just check
4108 the domain type. Flexible array members have a null domain. */
4109 return TYPE_DOMAIN (type) ?
4110 type_contains_placeholder_p (TYPE_DOMAIN (type)) : false;
4111
4112 case RECORD_TYPE:
4113 case UNION_TYPE:
4114 case QUAL_UNION_TYPE:
4115 {
4116 tree field;
4117
4118 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
4119 if (TREE_CODE (field) == FIELD_DECL
4120 && (CONTAINS_PLACEHOLDER_P (DECL_FIELD_OFFSET (field))
4121 || (TREE_CODE (type) == QUAL_UNION_TYPE
4122 && CONTAINS_PLACEHOLDER_P (DECL_QUALIFIER (field)))
4123 || type_contains_placeholder_p (TREE_TYPE (field))))
4124 return true;
4125
4126 return false;
4127 }
4128
4129 default:
4130 gcc_unreachable ();
4131 }
4132 }
4133
4134 /* Wrapper around above function used to cache its result. */
4135
4136 bool
4137 type_contains_placeholder_p (tree type)
4138 {
4139 bool result;
4140
4141 /* If the contains_placeholder_bits field has been initialized,
4142 then we know the answer. */
4143 if (TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) > 0)
4144 return TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) - 1;
4145
4146 /* Indicate that we've seen this type node, and the answer is false.
4147 This is what we want to return if we run into recursion via fields. */
4148 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = 1;
4149
4150 /* Compute the real value. */
4151 result = type_contains_placeholder_1 (type);
4152
4153 /* Store the real value. */
4154 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = result + 1;
4155
4156 return result;
4157 }
4158 \f
4159 /* Push tree EXP onto vector QUEUE if it is not already present. */
4160
4161 static void
4162 push_without_duplicates (tree exp, vec<tree> *queue)
4163 {
4164 unsigned int i;
4165 tree iter;
4166
4167 FOR_EACH_VEC_ELT (*queue, i, iter)
4168 if (simple_cst_equal (iter, exp) == 1)
4169 break;
4170
4171 if (!iter)
4172 queue->safe_push (exp);
4173 }
4174
4175 /* Given a tree EXP, find all occurrences of references to fields
4176 in a PLACEHOLDER_EXPR and place them in vector REFS without
4177 duplicates. Also record VAR_DECLs and CONST_DECLs. Note that
4178 we assume here that EXP contains only arithmetic expressions
4179 or CALL_EXPRs with PLACEHOLDER_EXPRs occurring only in their
4180 argument list. */
4181
4182 void
4183 find_placeholder_in_expr (tree exp, vec<tree> *refs)
4184 {
4185 enum tree_code code = TREE_CODE (exp);
4186 tree inner;
4187 int i;
4188
4189 /* We handle TREE_LIST and COMPONENT_REF separately. */
4190 if (code == TREE_LIST)
4191 {
4192 FIND_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), refs);
4193 FIND_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), refs);
4194 }
4195 else if (code == COMPONENT_REF)
4196 {
4197 for (inner = TREE_OPERAND (exp, 0);
4198 REFERENCE_CLASS_P (inner);
4199 inner = TREE_OPERAND (inner, 0))
4200 ;
4201
4202 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
4203 push_without_duplicates (exp, refs);
4204 else
4205 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), refs);
4206 }
4207 else
4208 switch (TREE_CODE_CLASS (code))
4209 {
4210 case tcc_constant:
4211 break;
4212
4213 case tcc_declaration:
4214 /* Variables allocated to static storage can stay. */
4215 if (!TREE_STATIC (exp))
4216 push_without_duplicates (exp, refs);
4217 break;
4218
4219 case tcc_expression:
4220 /* This is the pattern built in ada/make_aligning_type. */
4221 if (code == ADDR_EXPR
4222 && TREE_CODE (TREE_OPERAND (exp, 0)) == PLACEHOLDER_EXPR)
4223 {
4224 push_without_duplicates (exp, refs);
4225 break;
4226 }
4227
4228 /* Fall through. */
4229
4230 case tcc_exceptional:
4231 case tcc_unary:
4232 case tcc_binary:
4233 case tcc_comparison:
4234 case tcc_reference:
4235 for (i = 0; i < TREE_CODE_LENGTH (code); i++)
4236 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
4237 break;
4238
4239 case tcc_vl_exp:
4240 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4241 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
4242 break;
4243
4244 default:
4245 gcc_unreachable ();
4246 }
4247 }
4248
4249 /* Given a tree EXP, a FIELD_DECL F, and a replacement value R,
4250 return a tree with all occurrences of references to F in a
4251 PLACEHOLDER_EXPR replaced by R. Also handle VAR_DECLs and
4252 CONST_DECLs. Note that we assume here that EXP contains only
4253 arithmetic expressions or CALL_EXPRs with PLACEHOLDER_EXPRs
4254 occurring only in their argument list. */
4255
4256 tree
4257 substitute_in_expr (tree exp, tree f, tree r)
4258 {
4259 enum tree_code code = TREE_CODE (exp);
4260 tree op0, op1, op2, op3;
4261 tree new_tree;
4262
4263 /* We handle TREE_LIST and COMPONENT_REF separately. */
4264 if (code == TREE_LIST)
4265 {
4266 op0 = SUBSTITUTE_IN_EXPR (TREE_CHAIN (exp), f, r);
4267 op1 = SUBSTITUTE_IN_EXPR (TREE_VALUE (exp), f, r);
4268 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
4269 return exp;
4270
4271 return tree_cons (TREE_PURPOSE (exp), op1, op0);
4272 }
4273 else if (code == COMPONENT_REF)
4274 {
4275 tree inner;
4276
4277 /* If this expression is getting a value from a PLACEHOLDER_EXPR
4278 and it is the right field, replace it with R. */
4279 for (inner = TREE_OPERAND (exp, 0);
4280 REFERENCE_CLASS_P (inner);
4281 inner = TREE_OPERAND (inner, 0))
4282 ;
4283
4284 /* The field. */
4285 op1 = TREE_OPERAND (exp, 1);
4286
4287 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && op1 == f)
4288 return r;
4289
4290 /* If this expression hasn't been completed let, leave it alone. */
4291 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && !TREE_TYPE (inner))
4292 return exp;
4293
4294 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4295 if (op0 == TREE_OPERAND (exp, 0))
4296 return exp;
4297
4298 new_tree
4299 = fold_build3 (COMPONENT_REF, TREE_TYPE (exp), op0, op1, NULL_TREE);
4300 }
4301 else
4302 switch (TREE_CODE_CLASS (code))
4303 {
4304 case tcc_constant:
4305 return exp;
4306
4307 case tcc_declaration:
4308 if (exp == f)
4309 return r;
4310 else
4311 return exp;
4312
4313 case tcc_expression:
4314 if (exp == f)
4315 return r;
4316
4317 /* Fall through. */
4318
4319 case tcc_exceptional:
4320 case tcc_unary:
4321 case tcc_binary:
4322 case tcc_comparison:
4323 case tcc_reference:
4324 switch (TREE_CODE_LENGTH (code))
4325 {
4326 case 0:
4327 return exp;
4328
4329 case 1:
4330 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4331 if (op0 == TREE_OPERAND (exp, 0))
4332 return exp;
4333
4334 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
4335 break;
4336
4337 case 2:
4338 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4339 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
4340
4341 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
4342 return exp;
4343
4344 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
4345 break;
4346
4347 case 3:
4348 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4349 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
4350 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
4351
4352 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4353 && op2 == TREE_OPERAND (exp, 2))
4354 return exp;
4355
4356 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
4357 break;
4358
4359 case 4:
4360 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4361 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
4362 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
4363 op3 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 3), f, r);
4364
4365 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4366 && op2 == TREE_OPERAND (exp, 2)
4367 && op3 == TREE_OPERAND (exp, 3))
4368 return exp;
4369
4370 new_tree
4371 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
4372 break;
4373
4374 default:
4375 gcc_unreachable ();
4376 }
4377 break;
4378
4379 case tcc_vl_exp:
4380 {
4381 int i;
4382
4383 new_tree = NULL_TREE;
4384
4385 /* If we are trying to replace F with a constant or with another
4386 instance of one of the arguments of the call, inline back
4387 functions which do nothing else than computing a value from
4388 the arguments they are passed. This makes it possible to
4389 fold partially or entirely the replacement expression. */
4390 if (code == CALL_EXPR)
4391 {
4392 bool maybe_inline = false;
4393 if (CONSTANT_CLASS_P (r))
4394 maybe_inline = true;
4395 else
4396 for (i = 3; i < TREE_OPERAND_LENGTH (exp); i++)
4397 if (operand_equal_p (TREE_OPERAND (exp, i), r, 0))
4398 {
4399 maybe_inline = true;
4400 break;
4401 }
4402 if (maybe_inline)
4403 {
4404 tree t = maybe_inline_call_in_expr (exp);
4405 if (t)
4406 return SUBSTITUTE_IN_EXPR (t, f, r);
4407 }
4408 }
4409
4410 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4411 {
4412 tree op = TREE_OPERAND (exp, i);
4413 tree new_op = SUBSTITUTE_IN_EXPR (op, f, r);
4414 if (new_op != op)
4415 {
4416 if (!new_tree)
4417 new_tree = copy_node (exp);
4418 TREE_OPERAND (new_tree, i) = new_op;
4419 }
4420 }
4421
4422 if (new_tree)
4423 {
4424 new_tree = fold (new_tree);
4425 if (TREE_CODE (new_tree) == CALL_EXPR)
4426 process_call_operands (new_tree);
4427 }
4428 else
4429 return exp;
4430 }
4431 break;
4432
4433 default:
4434 gcc_unreachable ();
4435 }
4436
4437 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
4438
4439 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
4440 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
4441
4442 return new_tree;
4443 }
4444
4445 /* Similar, but look for a PLACEHOLDER_EXPR in EXP and find a replacement
4446 for it within OBJ, a tree that is an object or a chain of references. */
4447
4448 tree
4449 substitute_placeholder_in_expr (tree exp, tree obj)
4450 {
4451 enum tree_code code = TREE_CODE (exp);
4452 tree op0, op1, op2, op3;
4453 tree new_tree;
4454
4455 /* If this is a PLACEHOLDER_EXPR, see if we find a corresponding type
4456 in the chain of OBJ. */
4457 if (code == PLACEHOLDER_EXPR)
4458 {
4459 tree need_type = TYPE_MAIN_VARIANT (TREE_TYPE (exp));
4460 tree elt;
4461
4462 for (elt = obj; elt != 0;
4463 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
4464 || TREE_CODE (elt) == COND_EXPR)
4465 ? TREE_OPERAND (elt, 1)
4466 : (REFERENCE_CLASS_P (elt)
4467 || UNARY_CLASS_P (elt)
4468 || BINARY_CLASS_P (elt)
4469 || VL_EXP_CLASS_P (elt)
4470 || EXPRESSION_CLASS_P (elt))
4471 ? TREE_OPERAND (elt, 0) : 0))
4472 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
4473 return elt;
4474
4475 for (elt = obj; elt != 0;
4476 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
4477 || TREE_CODE (elt) == COND_EXPR)
4478 ? TREE_OPERAND (elt, 1)
4479 : (REFERENCE_CLASS_P (elt)
4480 || UNARY_CLASS_P (elt)
4481 || BINARY_CLASS_P (elt)
4482 || VL_EXP_CLASS_P (elt)
4483 || EXPRESSION_CLASS_P (elt))
4484 ? TREE_OPERAND (elt, 0) : 0))
4485 if (POINTER_TYPE_P (TREE_TYPE (elt))
4486 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
4487 == need_type))
4488 return fold_build1 (INDIRECT_REF, need_type, elt);
4489
4490 /* If we didn't find it, return the original PLACEHOLDER_EXPR. If it
4491 survives until RTL generation, there will be an error. */
4492 return exp;
4493 }
4494
4495 /* TREE_LIST is special because we need to look at TREE_VALUE
4496 and TREE_CHAIN, not TREE_OPERANDS. */
4497 else if (code == TREE_LIST)
4498 {
4499 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), obj);
4500 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), obj);
4501 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
4502 return exp;
4503
4504 return tree_cons (TREE_PURPOSE (exp), op1, op0);
4505 }
4506 else
4507 switch (TREE_CODE_CLASS (code))
4508 {
4509 case tcc_constant:
4510 case tcc_declaration:
4511 return exp;
4512
4513 case tcc_exceptional:
4514 case tcc_unary:
4515 case tcc_binary:
4516 case tcc_comparison:
4517 case tcc_expression:
4518 case tcc_reference:
4519 case tcc_statement:
4520 switch (TREE_CODE_LENGTH (code))
4521 {
4522 case 0:
4523 return exp;
4524
4525 case 1:
4526 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4527 if (op0 == TREE_OPERAND (exp, 0))
4528 return exp;
4529
4530 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
4531 break;
4532
4533 case 2:
4534 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4535 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4536
4537 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
4538 return exp;
4539
4540 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
4541 break;
4542
4543 case 3:
4544 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4545 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4546 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
4547
4548 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4549 && op2 == TREE_OPERAND (exp, 2))
4550 return exp;
4551
4552 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
4553 break;
4554
4555 case 4:
4556 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4557 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4558 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
4559 op3 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 3), obj);
4560
4561 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4562 && op2 == TREE_OPERAND (exp, 2)
4563 && op3 == TREE_OPERAND (exp, 3))
4564 return exp;
4565
4566 new_tree
4567 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
4568 break;
4569
4570 default:
4571 gcc_unreachable ();
4572 }
4573 break;
4574
4575 case tcc_vl_exp:
4576 {
4577 int i;
4578
4579 new_tree = NULL_TREE;
4580
4581 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4582 {
4583 tree op = TREE_OPERAND (exp, i);
4584 tree new_op = SUBSTITUTE_PLACEHOLDER_IN_EXPR (op, obj);
4585 if (new_op != op)
4586 {
4587 if (!new_tree)
4588 new_tree = copy_node (exp);
4589 TREE_OPERAND (new_tree, i) = new_op;
4590 }
4591 }
4592
4593 if (new_tree)
4594 {
4595 new_tree = fold (new_tree);
4596 if (TREE_CODE (new_tree) == CALL_EXPR)
4597 process_call_operands (new_tree);
4598 }
4599 else
4600 return exp;
4601 }
4602 break;
4603
4604 default:
4605 gcc_unreachable ();
4606 }
4607
4608 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
4609
4610 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
4611 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
4612
4613 return new_tree;
4614 }
4615 \f
4616
4617 /* Subroutine of stabilize_reference; this is called for subtrees of
4618 references. Any expression with side-effects must be put in a SAVE_EXPR
4619 to ensure that it is only evaluated once.
4620
4621 We don't put SAVE_EXPR nodes around everything, because assigning very
4622 simple expressions to temporaries causes us to miss good opportunities
4623 for optimizations. Among other things, the opportunity to fold in the
4624 addition of a constant into an addressing mode often gets lost, e.g.
4625 "y[i+1] += x;". In general, we take the approach that we should not make
4626 an assignment unless we are forced into it - i.e., that any non-side effect
4627 operator should be allowed, and that cse should take care of coalescing
4628 multiple utterances of the same expression should that prove fruitful. */
4629
4630 static tree
4631 stabilize_reference_1 (tree e)
4632 {
4633 tree result;
4634 enum tree_code code = TREE_CODE (e);
4635
4636 /* We cannot ignore const expressions because it might be a reference
4637 to a const array but whose index contains side-effects. But we can
4638 ignore things that are actual constant or that already have been
4639 handled by this function. */
4640
4641 if (tree_invariant_p (e))
4642 return e;
4643
4644 switch (TREE_CODE_CLASS (code))
4645 {
4646 case tcc_exceptional:
4647 /* Always wrap STATEMENT_LIST into SAVE_EXPR, even if it doesn't
4648 have side-effects. */
4649 if (code == STATEMENT_LIST)
4650 return save_expr (e);
4651 /* FALLTHRU */
4652 case tcc_type:
4653 case tcc_declaration:
4654 case tcc_comparison:
4655 case tcc_statement:
4656 case tcc_expression:
4657 case tcc_reference:
4658 case tcc_vl_exp:
4659 /* If the expression has side-effects, then encase it in a SAVE_EXPR
4660 so that it will only be evaluated once. */
4661 /* The reference (r) and comparison (<) classes could be handled as
4662 below, but it is generally faster to only evaluate them once. */
4663 if (TREE_SIDE_EFFECTS (e))
4664 return save_expr (e);
4665 return e;
4666
4667 case tcc_constant:
4668 /* Constants need no processing. In fact, we should never reach
4669 here. */
4670 return e;
4671
4672 case tcc_binary:
4673 /* Division is slow and tends to be compiled with jumps,
4674 especially the division by powers of 2 that is often
4675 found inside of an array reference. So do it just once. */
4676 if (code == TRUNC_DIV_EXPR || code == TRUNC_MOD_EXPR
4677 || code == FLOOR_DIV_EXPR || code == FLOOR_MOD_EXPR
4678 || code == CEIL_DIV_EXPR || code == CEIL_MOD_EXPR
4679 || code == ROUND_DIV_EXPR || code == ROUND_MOD_EXPR)
4680 return save_expr (e);
4681 /* Recursively stabilize each operand. */
4682 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)),
4683 stabilize_reference_1 (TREE_OPERAND (e, 1)));
4684 break;
4685
4686 case tcc_unary:
4687 /* Recursively stabilize each operand. */
4688 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)));
4689 break;
4690
4691 default:
4692 gcc_unreachable ();
4693 }
4694
4695 TREE_TYPE (result) = TREE_TYPE (e);
4696 TREE_READONLY (result) = TREE_READONLY (e);
4697 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (e);
4698 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
4699
4700 return result;
4701 }
4702
4703 /* Stabilize a reference so that we can use it any number of times
4704 without causing its operands to be evaluated more than once.
4705 Returns the stabilized reference. This works by means of save_expr,
4706 so see the caveats in the comments about save_expr.
4707
4708 Also allows conversion expressions whose operands are references.
4709 Any other kind of expression is returned unchanged. */
4710
4711 tree
4712 stabilize_reference (tree ref)
4713 {
4714 tree result;
4715 enum tree_code code = TREE_CODE (ref);
4716
4717 switch (code)
4718 {
4719 case VAR_DECL:
4720 case PARM_DECL:
4721 case RESULT_DECL:
4722 /* No action is needed in this case. */
4723 return ref;
4724
4725 CASE_CONVERT:
4726 case FLOAT_EXPR:
4727 case FIX_TRUNC_EXPR:
4728 result = build_nt (code, stabilize_reference (TREE_OPERAND (ref, 0)));
4729 break;
4730
4731 case INDIRECT_REF:
4732 result = build_nt (INDIRECT_REF,
4733 stabilize_reference_1 (TREE_OPERAND (ref, 0)));
4734 break;
4735
4736 case COMPONENT_REF:
4737 result = build_nt (COMPONENT_REF,
4738 stabilize_reference (TREE_OPERAND (ref, 0)),
4739 TREE_OPERAND (ref, 1), NULL_TREE);
4740 break;
4741
4742 case BIT_FIELD_REF:
4743 result = build_nt (BIT_FIELD_REF,
4744 stabilize_reference (TREE_OPERAND (ref, 0)),
4745 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
4746 REF_REVERSE_STORAGE_ORDER (result) = REF_REVERSE_STORAGE_ORDER (ref);
4747 break;
4748
4749 case ARRAY_REF:
4750 result = build_nt (ARRAY_REF,
4751 stabilize_reference (TREE_OPERAND (ref, 0)),
4752 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4753 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4754 break;
4755
4756 case ARRAY_RANGE_REF:
4757 result = build_nt (ARRAY_RANGE_REF,
4758 stabilize_reference (TREE_OPERAND (ref, 0)),
4759 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4760 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4761 break;
4762
4763 case COMPOUND_EXPR:
4764 /* We cannot wrap the first expression in a SAVE_EXPR, as then
4765 it wouldn't be ignored. This matters when dealing with
4766 volatiles. */
4767 return stabilize_reference_1 (ref);
4768
4769 /* If arg isn't a kind of lvalue we recognize, make no change.
4770 Caller should recognize the error for an invalid lvalue. */
4771 default:
4772 return ref;
4773
4774 case ERROR_MARK:
4775 return error_mark_node;
4776 }
4777
4778 TREE_TYPE (result) = TREE_TYPE (ref);
4779 TREE_READONLY (result) = TREE_READONLY (ref);
4780 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (ref);
4781 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
4782
4783 return result;
4784 }
4785 \f
4786 /* Low-level constructors for expressions. */
4787
4788 /* A helper function for build1 and constant folders. Set TREE_CONSTANT,
4789 and TREE_SIDE_EFFECTS for an ADDR_EXPR. */
4790
4791 void
4792 recompute_tree_invariant_for_addr_expr (tree t)
4793 {
4794 tree node;
4795 bool tc = true, se = false;
4796
4797 gcc_assert (TREE_CODE (t) == ADDR_EXPR);
4798
4799 /* We started out assuming this address is both invariant and constant, but
4800 does not have side effects. Now go down any handled components and see if
4801 any of them involve offsets that are either non-constant or non-invariant.
4802 Also check for side-effects.
4803
4804 ??? Note that this code makes no attempt to deal with the case where
4805 taking the address of something causes a copy due to misalignment. */
4806
4807 #define UPDATE_FLAGS(NODE) \
4808 do { tree _node = (NODE); \
4809 if (_node && !TREE_CONSTANT (_node)) tc = false; \
4810 if (_node && TREE_SIDE_EFFECTS (_node)) se = true; } while (0)
4811
4812 for (node = TREE_OPERAND (t, 0); handled_component_p (node);
4813 node = TREE_OPERAND (node, 0))
4814 {
4815 /* If the first operand doesn't have an ARRAY_TYPE, this is a bogus
4816 array reference (probably made temporarily by the G++ front end),
4817 so ignore all the operands. */
4818 if ((TREE_CODE (node) == ARRAY_REF
4819 || TREE_CODE (node) == ARRAY_RANGE_REF)
4820 && TREE_CODE (TREE_TYPE (TREE_OPERAND (node, 0))) == ARRAY_TYPE)
4821 {
4822 UPDATE_FLAGS (TREE_OPERAND (node, 1));
4823 if (TREE_OPERAND (node, 2))
4824 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4825 if (TREE_OPERAND (node, 3))
4826 UPDATE_FLAGS (TREE_OPERAND (node, 3));
4827 }
4828 /* Likewise, just because this is a COMPONENT_REF doesn't mean we have a
4829 FIELD_DECL, apparently. The G++ front end can put something else
4830 there, at least temporarily. */
4831 else if (TREE_CODE (node) == COMPONENT_REF
4832 && TREE_CODE (TREE_OPERAND (node, 1)) == FIELD_DECL)
4833 {
4834 if (TREE_OPERAND (node, 2))
4835 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4836 }
4837 }
4838
4839 node = lang_hooks.expr_to_decl (node, &tc, &se);
4840
4841 /* Now see what's inside. If it's an INDIRECT_REF, copy our properties from
4842 the address, since &(*a)->b is a form of addition. If it's a constant, the
4843 address is constant too. If it's a decl, its address is constant if the
4844 decl is static. Everything else is not constant and, furthermore,
4845 taking the address of a volatile variable is not volatile. */
4846 if (TREE_CODE (node) == INDIRECT_REF
4847 || TREE_CODE (node) == MEM_REF)
4848 UPDATE_FLAGS (TREE_OPERAND (node, 0));
4849 else if (CONSTANT_CLASS_P (node))
4850 ;
4851 else if (DECL_P (node))
4852 tc &= (staticp (node) != NULL_TREE);
4853 else
4854 {
4855 tc = false;
4856 se |= TREE_SIDE_EFFECTS (node);
4857 }
4858
4859
4860 TREE_CONSTANT (t) = tc;
4861 TREE_SIDE_EFFECTS (t) = se;
4862 #undef UPDATE_FLAGS
4863 }
4864
4865 /* Build an expression of code CODE, data type TYPE, and operands as
4866 specified. Expressions and reference nodes can be created this way.
4867 Constants, decls, types and misc nodes cannot be.
4868
4869 We define 5 non-variadic functions, from 0 to 4 arguments. This is
4870 enough for all extant tree codes. */
4871
4872 tree
4873 build0 (enum tree_code code, tree tt MEM_STAT_DECL)
4874 {
4875 tree t;
4876
4877 gcc_assert (TREE_CODE_LENGTH (code) == 0);
4878
4879 t = make_node (code PASS_MEM_STAT);
4880 TREE_TYPE (t) = tt;
4881
4882 return t;
4883 }
4884
4885 tree
4886 build1 (enum tree_code code, tree type, tree node MEM_STAT_DECL)
4887 {
4888 int length = sizeof (struct tree_exp);
4889 tree t;
4890
4891 record_node_allocation_statistics (code, length);
4892
4893 gcc_assert (TREE_CODE_LENGTH (code) == 1);
4894
4895 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
4896
4897 memset (t, 0, sizeof (struct tree_common));
4898
4899 TREE_SET_CODE (t, code);
4900
4901 TREE_TYPE (t) = type;
4902 SET_EXPR_LOCATION (t, UNKNOWN_LOCATION);
4903 TREE_OPERAND (t, 0) = node;
4904 if (node && !TYPE_P (node))
4905 {
4906 TREE_SIDE_EFFECTS (t) = TREE_SIDE_EFFECTS (node);
4907 TREE_READONLY (t) = TREE_READONLY (node);
4908 }
4909
4910 if (TREE_CODE_CLASS (code) == tcc_statement)
4911 {
4912 if (code != DEBUG_BEGIN_STMT)
4913 TREE_SIDE_EFFECTS (t) = 1;
4914 }
4915 else switch (code)
4916 {
4917 case VA_ARG_EXPR:
4918 /* All of these have side-effects, no matter what their
4919 operands are. */
4920 TREE_SIDE_EFFECTS (t) = 1;
4921 TREE_READONLY (t) = 0;
4922 break;
4923
4924 case INDIRECT_REF:
4925 /* Whether a dereference is readonly has nothing to do with whether
4926 its operand is readonly. */
4927 TREE_READONLY (t) = 0;
4928 break;
4929
4930 case ADDR_EXPR:
4931 if (node)
4932 recompute_tree_invariant_for_addr_expr (t);
4933 break;
4934
4935 default:
4936 if ((TREE_CODE_CLASS (code) == tcc_unary || code == VIEW_CONVERT_EXPR)
4937 && node && !TYPE_P (node)
4938 && TREE_CONSTANT (node))
4939 TREE_CONSTANT (t) = 1;
4940 if (TREE_CODE_CLASS (code) == tcc_reference
4941 && node && TREE_THIS_VOLATILE (node))
4942 TREE_THIS_VOLATILE (t) = 1;
4943 break;
4944 }
4945
4946 return t;
4947 }
4948
4949 #define PROCESS_ARG(N) \
4950 do { \
4951 TREE_OPERAND (t, N) = arg##N; \
4952 if (arg##N &&!TYPE_P (arg##N)) \
4953 { \
4954 if (TREE_SIDE_EFFECTS (arg##N)) \
4955 side_effects = 1; \
4956 if (!TREE_READONLY (arg##N) \
4957 && !CONSTANT_CLASS_P (arg##N)) \
4958 (void) (read_only = 0); \
4959 if (!TREE_CONSTANT (arg##N)) \
4960 (void) (constant = 0); \
4961 } \
4962 } while (0)
4963
4964 tree
4965 build2 (enum tree_code code, tree tt, tree arg0, tree arg1 MEM_STAT_DECL)
4966 {
4967 bool constant, read_only, side_effects, div_by_zero;
4968 tree t;
4969
4970 gcc_assert (TREE_CODE_LENGTH (code) == 2);
4971
4972 if ((code == MINUS_EXPR || code == PLUS_EXPR || code == MULT_EXPR)
4973 && arg0 && arg1 && tt && POINTER_TYPE_P (tt)
4974 /* When sizetype precision doesn't match that of pointers
4975 we need to be able to build explicit extensions or truncations
4976 of the offset argument. */
4977 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (tt))
4978 gcc_assert (TREE_CODE (arg0) == INTEGER_CST
4979 && TREE_CODE (arg1) == INTEGER_CST);
4980
4981 if (code == POINTER_PLUS_EXPR && arg0 && arg1 && tt)
4982 gcc_assert (POINTER_TYPE_P (tt) && POINTER_TYPE_P (TREE_TYPE (arg0))
4983 && ptrofftype_p (TREE_TYPE (arg1)));
4984
4985 t = make_node (code PASS_MEM_STAT);
4986 TREE_TYPE (t) = tt;
4987
4988 /* Below, we automatically set TREE_SIDE_EFFECTS and TREE_READONLY for the
4989 result based on those same flags for the arguments. But if the
4990 arguments aren't really even `tree' expressions, we shouldn't be trying
4991 to do this. */
4992
4993 /* Expressions without side effects may be constant if their
4994 arguments are as well. */
4995 constant = (TREE_CODE_CLASS (code) == tcc_comparison
4996 || TREE_CODE_CLASS (code) == tcc_binary);
4997 read_only = 1;
4998 side_effects = TREE_SIDE_EFFECTS (t);
4999
5000 switch (code)
5001 {
5002 case TRUNC_DIV_EXPR:
5003 case CEIL_DIV_EXPR:
5004 case FLOOR_DIV_EXPR:
5005 case ROUND_DIV_EXPR:
5006 case EXACT_DIV_EXPR:
5007 case CEIL_MOD_EXPR:
5008 case FLOOR_MOD_EXPR:
5009 case ROUND_MOD_EXPR:
5010 case TRUNC_MOD_EXPR:
5011 div_by_zero = integer_zerop (arg1);
5012 break;
5013 default:
5014 div_by_zero = false;
5015 }
5016
5017 PROCESS_ARG (0);
5018 PROCESS_ARG (1);
5019
5020 TREE_SIDE_EFFECTS (t) = side_effects;
5021 if (code == MEM_REF)
5022 {
5023 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
5024 {
5025 tree o = TREE_OPERAND (arg0, 0);
5026 TREE_READONLY (t) = TREE_READONLY (o);
5027 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
5028 }
5029 }
5030 else
5031 {
5032 TREE_READONLY (t) = read_only;
5033 /* Don't mark X / 0 as constant. */
5034 TREE_CONSTANT (t) = constant && !div_by_zero;
5035 TREE_THIS_VOLATILE (t)
5036 = (TREE_CODE_CLASS (code) == tcc_reference
5037 && arg0 && TREE_THIS_VOLATILE (arg0));
5038 }
5039
5040 return t;
5041 }
5042
5043
5044 tree
5045 build3 (enum tree_code code, tree tt, tree arg0, tree arg1,
5046 tree arg2 MEM_STAT_DECL)
5047 {
5048 bool constant, read_only, side_effects;
5049 tree t;
5050
5051 gcc_assert (TREE_CODE_LENGTH (code) == 3);
5052 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
5053
5054 t = make_node (code PASS_MEM_STAT);
5055 TREE_TYPE (t) = tt;
5056
5057 read_only = 1;
5058
5059 /* As a special exception, if COND_EXPR has NULL branches, we
5060 assume that it is a gimple statement and always consider
5061 it to have side effects. */
5062 if (code == COND_EXPR
5063 && tt == void_type_node
5064 && arg1 == NULL_TREE
5065 && arg2 == NULL_TREE)
5066 side_effects = true;
5067 else
5068 side_effects = TREE_SIDE_EFFECTS (t);
5069
5070 PROCESS_ARG (0);
5071 PROCESS_ARG (1);
5072 PROCESS_ARG (2);
5073
5074 if (code == COND_EXPR)
5075 TREE_READONLY (t) = read_only;
5076
5077 TREE_SIDE_EFFECTS (t) = side_effects;
5078 TREE_THIS_VOLATILE (t)
5079 = (TREE_CODE_CLASS (code) == tcc_reference
5080 && arg0 && TREE_THIS_VOLATILE (arg0));
5081
5082 return t;
5083 }
5084
5085 tree
5086 build4 (enum tree_code code, tree tt, tree arg0, tree arg1,
5087 tree arg2, tree arg3 MEM_STAT_DECL)
5088 {
5089 bool constant, read_only, side_effects;
5090 tree t;
5091
5092 gcc_assert (TREE_CODE_LENGTH (code) == 4);
5093
5094 t = make_node (code PASS_MEM_STAT);
5095 TREE_TYPE (t) = tt;
5096
5097 side_effects = TREE_SIDE_EFFECTS (t);
5098
5099 PROCESS_ARG (0);
5100 PROCESS_ARG (1);
5101 PROCESS_ARG (2);
5102 PROCESS_ARG (3);
5103
5104 TREE_SIDE_EFFECTS (t) = side_effects;
5105 TREE_THIS_VOLATILE (t)
5106 = (TREE_CODE_CLASS (code) == tcc_reference
5107 && arg0 && TREE_THIS_VOLATILE (arg0));
5108
5109 return t;
5110 }
5111
5112 tree
5113 build5 (enum tree_code code, tree tt, tree arg0, tree arg1,
5114 tree arg2, tree arg3, tree arg4 MEM_STAT_DECL)
5115 {
5116 bool constant, read_only, side_effects;
5117 tree t;
5118
5119 gcc_assert (TREE_CODE_LENGTH (code) == 5);
5120
5121 t = make_node (code PASS_MEM_STAT);
5122 TREE_TYPE (t) = tt;
5123
5124 side_effects = TREE_SIDE_EFFECTS (t);
5125
5126 PROCESS_ARG (0);
5127 PROCESS_ARG (1);
5128 PROCESS_ARG (2);
5129 PROCESS_ARG (3);
5130 PROCESS_ARG (4);
5131
5132 TREE_SIDE_EFFECTS (t) = side_effects;
5133 if (code == TARGET_MEM_REF)
5134 {
5135 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
5136 {
5137 tree o = TREE_OPERAND (arg0, 0);
5138 TREE_READONLY (t) = TREE_READONLY (o);
5139 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
5140 }
5141 }
5142 else
5143 TREE_THIS_VOLATILE (t)
5144 = (TREE_CODE_CLASS (code) == tcc_reference
5145 && arg0 && TREE_THIS_VOLATILE (arg0));
5146
5147 return t;
5148 }
5149
5150 /* Build a simple MEM_REF tree with the sematics of a plain INDIRECT_REF
5151 on the pointer PTR. */
5152
5153 tree
5154 build_simple_mem_ref_loc (location_t loc, tree ptr)
5155 {
5156 poly_int64 offset = 0;
5157 tree ptype = TREE_TYPE (ptr);
5158 tree tem;
5159 /* For convenience allow addresses that collapse to a simple base
5160 and offset. */
5161 if (TREE_CODE (ptr) == ADDR_EXPR
5162 && (handled_component_p (TREE_OPERAND (ptr, 0))
5163 || TREE_CODE (TREE_OPERAND (ptr, 0)) == MEM_REF))
5164 {
5165 ptr = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &offset);
5166 gcc_assert (ptr);
5167 if (TREE_CODE (ptr) == MEM_REF)
5168 {
5169 offset += mem_ref_offset (ptr).force_shwi ();
5170 ptr = TREE_OPERAND (ptr, 0);
5171 }
5172 else
5173 ptr = build_fold_addr_expr (ptr);
5174 gcc_assert (is_gimple_reg (ptr) || is_gimple_min_invariant (ptr));
5175 }
5176 tem = build2 (MEM_REF, TREE_TYPE (ptype),
5177 ptr, build_int_cst (ptype, offset));
5178 SET_EXPR_LOCATION (tem, loc);
5179 return tem;
5180 }
5181
5182 /* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T. */
5183
5184 poly_offset_int
5185 mem_ref_offset (const_tree t)
5186 {
5187 return poly_offset_int::from (wi::to_poly_wide (TREE_OPERAND (t, 1)),
5188 SIGNED);
5189 }
5190
5191 /* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE
5192 offsetted by OFFSET units. */
5193
5194 tree
5195 build_invariant_address (tree type, tree base, poly_int64 offset)
5196 {
5197 tree ref = fold_build2 (MEM_REF, TREE_TYPE (type),
5198 build_fold_addr_expr (base),
5199 build_int_cst (ptr_type_node, offset));
5200 tree addr = build1 (ADDR_EXPR, type, ref);
5201 recompute_tree_invariant_for_addr_expr (addr);
5202 return addr;
5203 }
5204
5205 /* Similar except don't specify the TREE_TYPE
5206 and leave the TREE_SIDE_EFFECTS as 0.
5207 It is permissible for arguments to be null,
5208 or even garbage if their values do not matter. */
5209
5210 tree
5211 build_nt (enum tree_code code, ...)
5212 {
5213 tree t;
5214 int length;
5215 int i;
5216 va_list p;
5217
5218 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
5219
5220 va_start (p, code);
5221
5222 t = make_node (code);
5223 length = TREE_CODE_LENGTH (code);
5224
5225 for (i = 0; i < length; i++)
5226 TREE_OPERAND (t, i) = va_arg (p, tree);
5227
5228 va_end (p);
5229 return t;
5230 }
5231
5232 /* Similar to build_nt, but for creating a CALL_EXPR object with a
5233 tree vec. */
5234
5235 tree
5236 build_nt_call_vec (tree fn, vec<tree, va_gc> *args)
5237 {
5238 tree ret, t;
5239 unsigned int ix;
5240
5241 ret = build_vl_exp (CALL_EXPR, vec_safe_length (args) + 3);
5242 CALL_EXPR_FN (ret) = fn;
5243 CALL_EXPR_STATIC_CHAIN (ret) = NULL_TREE;
5244 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
5245 CALL_EXPR_ARG (ret, ix) = t;
5246 return ret;
5247 }
5248 \f
5249 /* Create a DECL_... node of code CODE, name NAME (if non-null)
5250 and data type TYPE.
5251 We do NOT enter this node in any sort of symbol table.
5252
5253 LOC is the location of the decl.
5254
5255 layout_decl is used to set up the decl's storage layout.
5256 Other slots are initialized to 0 or null pointers. */
5257
5258 tree
5259 build_decl (location_t loc, enum tree_code code, tree name,
5260 tree type MEM_STAT_DECL)
5261 {
5262 tree t;
5263
5264 t = make_node (code PASS_MEM_STAT);
5265 DECL_SOURCE_LOCATION (t) = loc;
5266
5267 /* if (type == error_mark_node)
5268 type = integer_type_node; */
5269 /* That is not done, deliberately, so that having error_mark_node
5270 as the type can suppress useless errors in the use of this variable. */
5271
5272 DECL_NAME (t) = name;
5273 TREE_TYPE (t) = type;
5274
5275 if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
5276 layout_decl (t, 0);
5277
5278 return t;
5279 }
5280
5281 /* Builds and returns function declaration with NAME and TYPE. */
5282
5283 tree
5284 build_fn_decl (const char *name, tree type)
5285 {
5286 tree id = get_identifier (name);
5287 tree decl = build_decl (input_location, FUNCTION_DECL, id, type);
5288
5289 DECL_EXTERNAL (decl) = 1;
5290 TREE_PUBLIC (decl) = 1;
5291 DECL_ARTIFICIAL (decl) = 1;
5292 TREE_NOTHROW (decl) = 1;
5293
5294 return decl;
5295 }
5296
5297 vec<tree, va_gc> *all_translation_units;
5298
5299 /* Builds a new translation-unit decl with name NAME, queues it in the
5300 global list of translation-unit decls and returns it. */
5301
5302 tree
5303 build_translation_unit_decl (tree name)
5304 {
5305 tree tu = build_decl (UNKNOWN_LOCATION, TRANSLATION_UNIT_DECL,
5306 name, NULL_TREE);
5307 TRANSLATION_UNIT_LANGUAGE (tu) = lang_hooks.name;
5308 vec_safe_push (all_translation_units, tu);
5309 return tu;
5310 }
5311
5312 \f
5313 /* BLOCK nodes are used to represent the structure of binding contours
5314 and declarations, once those contours have been exited and their contents
5315 compiled. This information is used for outputting debugging info. */
5316
5317 tree
5318 build_block (tree vars, tree subblocks, tree supercontext, tree chain)
5319 {
5320 tree block = make_node (BLOCK);
5321
5322 BLOCK_VARS (block) = vars;
5323 BLOCK_SUBBLOCKS (block) = subblocks;
5324 BLOCK_SUPERCONTEXT (block) = supercontext;
5325 BLOCK_CHAIN (block) = chain;
5326 return block;
5327 }
5328
5329 \f
5330 /* Like SET_EXPR_LOCATION, but make sure the tree can have a location.
5331
5332 LOC is the location to use in tree T. */
5333
5334 void
5335 protected_set_expr_location (tree t, location_t loc)
5336 {
5337 if (CAN_HAVE_LOCATION_P (t))
5338 SET_EXPR_LOCATION (t, loc);
5339 else if (t && TREE_CODE (t) == STATEMENT_LIST)
5340 {
5341 t = expr_single (t);
5342 if (t && CAN_HAVE_LOCATION_P (t))
5343 SET_EXPR_LOCATION (t, loc);
5344 }
5345 }
5346
5347 /* Like PROTECTED_SET_EXPR_LOCATION, but only do that if T has
5348 UNKNOWN_LOCATION. */
5349
5350 void
5351 protected_set_expr_location_if_unset (tree t, location_t loc)
5352 {
5353 t = expr_single (t);
5354 if (t && !EXPR_HAS_LOCATION (t))
5355 protected_set_expr_location (t, loc);
5356 }
5357 \f
5358 /* Set the type qualifiers for TYPE to TYPE_QUALS, which is a bitmask
5359 of the various TYPE_QUAL values. */
5360
5361 static void
5362 set_type_quals (tree type, int type_quals)
5363 {
5364 TYPE_READONLY (type) = (type_quals & TYPE_QUAL_CONST) != 0;
5365 TYPE_VOLATILE (type) = (type_quals & TYPE_QUAL_VOLATILE) != 0;
5366 TYPE_RESTRICT (type) = (type_quals & TYPE_QUAL_RESTRICT) != 0;
5367 TYPE_ATOMIC (type) = (type_quals & TYPE_QUAL_ATOMIC) != 0;
5368 TYPE_ADDR_SPACE (type) = DECODE_QUAL_ADDR_SPACE (type_quals);
5369 }
5370
5371 /* Returns true iff CAND and BASE have equivalent language-specific
5372 qualifiers. */
5373
5374 bool
5375 check_lang_type (const_tree cand, const_tree base)
5376 {
5377 if (lang_hooks.types.type_hash_eq == NULL)
5378 return true;
5379 /* type_hash_eq currently only applies to these types. */
5380 if (TREE_CODE (cand) != FUNCTION_TYPE
5381 && TREE_CODE (cand) != METHOD_TYPE)
5382 return true;
5383 return lang_hooks.types.type_hash_eq (cand, base);
5384 }
5385
5386 /* This function checks to see if TYPE matches the size one of the built-in
5387 atomic types, and returns that core atomic type. */
5388
5389 static tree
5390 find_atomic_core_type (const_tree type)
5391 {
5392 tree base_atomic_type;
5393
5394 /* Only handle complete types. */
5395 if (!tree_fits_uhwi_p (TYPE_SIZE (type)))
5396 return NULL_TREE;
5397
5398 switch (tree_to_uhwi (TYPE_SIZE (type)))
5399 {
5400 case 8:
5401 base_atomic_type = atomicQI_type_node;
5402 break;
5403
5404 case 16:
5405 base_atomic_type = atomicHI_type_node;
5406 break;
5407
5408 case 32:
5409 base_atomic_type = atomicSI_type_node;
5410 break;
5411
5412 case 64:
5413 base_atomic_type = atomicDI_type_node;
5414 break;
5415
5416 case 128:
5417 base_atomic_type = atomicTI_type_node;
5418 break;
5419
5420 default:
5421 base_atomic_type = NULL_TREE;
5422 }
5423
5424 return base_atomic_type;
5425 }
5426
5427 /* Returns true iff unqualified CAND and BASE are equivalent. */
5428
5429 bool
5430 check_base_type (const_tree cand, const_tree base)
5431 {
5432 if (TYPE_NAME (cand) != TYPE_NAME (base)
5433 /* Apparently this is needed for Objective-C. */
5434 || TYPE_CONTEXT (cand) != TYPE_CONTEXT (base)
5435 || !attribute_list_equal (TYPE_ATTRIBUTES (cand),
5436 TYPE_ATTRIBUTES (base)))
5437 return false;
5438 /* Check alignment. */
5439 if (TYPE_ALIGN (cand) == TYPE_ALIGN (base)
5440 && TYPE_USER_ALIGN (cand) == TYPE_USER_ALIGN (base))
5441 return true;
5442 /* Atomic types increase minimal alignment. We must to do so as well
5443 or we get duplicated canonical types. See PR88686. */
5444 if ((TYPE_QUALS (cand) & TYPE_QUAL_ATOMIC))
5445 {
5446 /* See if this object can map to a basic atomic type. */
5447 tree atomic_type = find_atomic_core_type (cand);
5448 if (atomic_type && TYPE_ALIGN (atomic_type) == TYPE_ALIGN (cand))
5449 return true;
5450 }
5451 return false;
5452 }
5453
5454 /* Returns true iff CAND is equivalent to BASE with TYPE_QUALS. */
5455
5456 bool
5457 check_qualified_type (const_tree cand, const_tree base, int type_quals)
5458 {
5459 return (TYPE_QUALS (cand) == type_quals
5460 && check_base_type (cand, base)
5461 && check_lang_type (cand, base));
5462 }
5463
5464 /* Returns true iff CAND is equivalent to BASE with ALIGN. */
5465
5466 static bool
5467 check_aligned_type (const_tree cand, const_tree base, unsigned int align)
5468 {
5469 return (TYPE_QUALS (cand) == TYPE_QUALS (base)
5470 && TYPE_NAME (cand) == TYPE_NAME (base)
5471 /* Apparently this is needed for Objective-C. */
5472 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
5473 /* Check alignment. */
5474 && TYPE_ALIGN (cand) == align
5475 /* Check this is a user-aligned type as build_aligned_type
5476 would create. */
5477 && TYPE_USER_ALIGN (cand)
5478 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
5479 TYPE_ATTRIBUTES (base))
5480 && check_lang_type (cand, base));
5481 }
5482
5483 /* Return a version of the TYPE, qualified as indicated by the
5484 TYPE_QUALS, if one exists. If no qualified version exists yet,
5485 return NULL_TREE. */
5486
5487 tree
5488 get_qualified_type (tree type, int type_quals)
5489 {
5490 if (TYPE_QUALS (type) == type_quals)
5491 return type;
5492
5493 tree mv = TYPE_MAIN_VARIANT (type);
5494 if (check_qualified_type (mv, type, type_quals))
5495 return mv;
5496
5497 /* Search the chain of variants to see if there is already one there just
5498 like the one we need to have. If so, use that existing one. We must
5499 preserve the TYPE_NAME, since there is code that depends on this. */
5500 for (tree *tp = &TYPE_NEXT_VARIANT (mv); *tp; tp = &TYPE_NEXT_VARIANT (*tp))
5501 if (check_qualified_type (*tp, type, type_quals))
5502 {
5503 /* Put the found variant at the head of the variant list so
5504 frequently searched variants get found faster. The C++ FE
5505 benefits greatly from this. */
5506 tree t = *tp;
5507 *tp = TYPE_NEXT_VARIANT (t);
5508 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (mv);
5509 TYPE_NEXT_VARIANT (mv) = t;
5510 return t;
5511 }
5512
5513 return NULL_TREE;
5514 }
5515
5516 /* Like get_qualified_type, but creates the type if it does not
5517 exist. This function never returns NULL_TREE. */
5518
5519 tree
5520 build_qualified_type (tree type, int type_quals MEM_STAT_DECL)
5521 {
5522 tree t;
5523
5524 /* See if we already have the appropriate qualified variant. */
5525 t = get_qualified_type (type, type_quals);
5526
5527 /* If not, build it. */
5528 if (!t)
5529 {
5530 t = build_variant_type_copy (type PASS_MEM_STAT);
5531 set_type_quals (t, type_quals);
5532
5533 if (((type_quals & TYPE_QUAL_ATOMIC) == TYPE_QUAL_ATOMIC))
5534 {
5535 /* See if this object can map to a basic atomic type. */
5536 tree atomic_type = find_atomic_core_type (type);
5537 if (atomic_type)
5538 {
5539 /* Ensure the alignment of this type is compatible with
5540 the required alignment of the atomic type. */
5541 if (TYPE_ALIGN (atomic_type) > TYPE_ALIGN (t))
5542 SET_TYPE_ALIGN (t, TYPE_ALIGN (atomic_type));
5543 }
5544 }
5545
5546 if (TYPE_STRUCTURAL_EQUALITY_P (type))
5547 /* Propagate structural equality. */
5548 SET_TYPE_STRUCTURAL_EQUALITY (t);
5549 else if (TYPE_CANONICAL (type) != type)
5550 /* Build the underlying canonical type, since it is different
5551 from TYPE. */
5552 {
5553 tree c = build_qualified_type (TYPE_CANONICAL (type), type_quals);
5554 TYPE_CANONICAL (t) = TYPE_CANONICAL (c);
5555 }
5556 else
5557 /* T is its own canonical type. */
5558 TYPE_CANONICAL (t) = t;
5559
5560 }
5561
5562 return t;
5563 }
5564
5565 /* Create a variant of type T with alignment ALIGN. */
5566
5567 tree
5568 build_aligned_type (tree type, unsigned int align)
5569 {
5570 tree t;
5571
5572 if (TYPE_PACKED (type)
5573 || TYPE_ALIGN (type) == align)
5574 return type;
5575
5576 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
5577 if (check_aligned_type (t, type, align))
5578 return t;
5579
5580 t = build_variant_type_copy (type);
5581 SET_TYPE_ALIGN (t, align);
5582 TYPE_USER_ALIGN (t) = 1;
5583
5584 return t;
5585 }
5586
5587 /* Create a new distinct copy of TYPE. The new type is made its own
5588 MAIN_VARIANT. If TYPE requires structural equality checks, the
5589 resulting type requires structural equality checks; otherwise, its
5590 TYPE_CANONICAL points to itself. */
5591
5592 tree
5593 build_distinct_type_copy (tree type MEM_STAT_DECL)
5594 {
5595 tree t = copy_node (type PASS_MEM_STAT);
5596
5597 TYPE_POINTER_TO (t) = 0;
5598 TYPE_REFERENCE_TO (t) = 0;
5599
5600 /* Set the canonical type either to a new equivalence class, or
5601 propagate the need for structural equality checks. */
5602 if (TYPE_STRUCTURAL_EQUALITY_P (type))
5603 SET_TYPE_STRUCTURAL_EQUALITY (t);
5604 else
5605 TYPE_CANONICAL (t) = t;
5606
5607 /* Make it its own variant. */
5608 TYPE_MAIN_VARIANT (t) = t;
5609 TYPE_NEXT_VARIANT (t) = 0;
5610
5611 /* Note that it is now possible for TYPE_MIN_VALUE to be a value
5612 whose TREE_TYPE is not t. This can also happen in the Ada
5613 frontend when using subtypes. */
5614
5615 return t;
5616 }
5617
5618 /* Create a new variant of TYPE, equivalent but distinct. This is so
5619 the caller can modify it. TYPE_CANONICAL for the return type will
5620 be equivalent to TYPE_CANONICAL of TYPE, indicating that the types
5621 are considered equal by the language itself (or that both types
5622 require structural equality checks). */
5623
5624 tree
5625 build_variant_type_copy (tree type MEM_STAT_DECL)
5626 {
5627 tree t, m = TYPE_MAIN_VARIANT (type);
5628
5629 t = build_distinct_type_copy (type PASS_MEM_STAT);
5630
5631 /* Since we're building a variant, assume that it is a non-semantic
5632 variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */
5633 TYPE_CANONICAL (t) = TYPE_CANONICAL (type);
5634 /* Type variants have no alias set defined. */
5635 TYPE_ALIAS_SET (t) = -1;
5636
5637 /* Add the new type to the chain of variants of TYPE. */
5638 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (m);
5639 TYPE_NEXT_VARIANT (m) = t;
5640 TYPE_MAIN_VARIANT (t) = m;
5641
5642 return t;
5643 }
5644 \f
5645 /* Return true if the from tree in both tree maps are equal. */
5646
5647 int
5648 tree_map_base_eq (const void *va, const void *vb)
5649 {
5650 const struct tree_map_base *const a = (const struct tree_map_base *) va,
5651 *const b = (const struct tree_map_base *) vb;
5652 return (a->from == b->from);
5653 }
5654
5655 /* Hash a from tree in a tree_base_map. */
5656
5657 unsigned int
5658 tree_map_base_hash (const void *item)
5659 {
5660 return htab_hash_pointer (((const struct tree_map_base *)item)->from);
5661 }
5662
5663 /* Return true if this tree map structure is marked for garbage collection
5664 purposes. We simply return true if the from tree is marked, so that this
5665 structure goes away when the from tree goes away. */
5666
5667 int
5668 tree_map_base_marked_p (const void *p)
5669 {
5670 return ggc_marked_p (((const struct tree_map_base *) p)->from);
5671 }
5672
5673 /* Hash a from tree in a tree_map. */
5674
5675 unsigned int
5676 tree_map_hash (const void *item)
5677 {
5678 return (((const struct tree_map *) item)->hash);
5679 }
5680
5681 /* Hash a from tree in a tree_decl_map. */
5682
5683 unsigned int
5684 tree_decl_map_hash (const void *item)
5685 {
5686 return DECL_UID (((const struct tree_decl_map *) item)->base.from);
5687 }
5688
5689 /* Return the initialization priority for DECL. */
5690
5691 priority_type
5692 decl_init_priority_lookup (tree decl)
5693 {
5694 symtab_node *snode = symtab_node::get (decl);
5695
5696 if (!snode)
5697 return DEFAULT_INIT_PRIORITY;
5698 return
5699 snode->get_init_priority ();
5700 }
5701
5702 /* Return the finalization priority for DECL. */
5703
5704 priority_type
5705 decl_fini_priority_lookup (tree decl)
5706 {
5707 cgraph_node *node = cgraph_node::get (decl);
5708
5709 if (!node)
5710 return DEFAULT_INIT_PRIORITY;
5711 return
5712 node->get_fini_priority ();
5713 }
5714
5715 /* Set the initialization priority for DECL to PRIORITY. */
5716
5717 void
5718 decl_init_priority_insert (tree decl, priority_type priority)
5719 {
5720 struct symtab_node *snode;
5721
5722 if (priority == DEFAULT_INIT_PRIORITY)
5723 {
5724 snode = symtab_node::get (decl);
5725 if (!snode)
5726 return;
5727 }
5728 else if (VAR_P (decl))
5729 snode = varpool_node::get_create (decl);
5730 else
5731 snode = cgraph_node::get_create (decl);
5732 snode->set_init_priority (priority);
5733 }
5734
5735 /* Set the finalization priority for DECL to PRIORITY. */
5736
5737 void
5738 decl_fini_priority_insert (tree decl, priority_type priority)
5739 {
5740 struct cgraph_node *node;
5741
5742 if (priority == DEFAULT_INIT_PRIORITY)
5743 {
5744 node = cgraph_node::get (decl);
5745 if (!node)
5746 return;
5747 }
5748 else
5749 node = cgraph_node::get_create (decl);
5750 node->set_fini_priority (priority);
5751 }
5752
5753 /* Print out the statistics for the DECL_DEBUG_EXPR hash table. */
5754
5755 static void
5756 print_debug_expr_statistics (void)
5757 {
5758 fprintf (stderr, "DECL_DEBUG_EXPR hash: size %ld, %ld elements, %f collisions\n",
5759 (long) debug_expr_for_decl->size (),
5760 (long) debug_expr_for_decl->elements (),
5761 debug_expr_for_decl->collisions ());
5762 }
5763
5764 /* Print out the statistics for the DECL_VALUE_EXPR hash table. */
5765
5766 static void
5767 print_value_expr_statistics (void)
5768 {
5769 fprintf (stderr, "DECL_VALUE_EXPR hash: size %ld, %ld elements, %f collisions\n",
5770 (long) value_expr_for_decl->size (),
5771 (long) value_expr_for_decl->elements (),
5772 value_expr_for_decl->collisions ());
5773 }
5774
5775 /* Lookup a debug expression for FROM, and return it if we find one. */
5776
5777 tree
5778 decl_debug_expr_lookup (tree from)
5779 {
5780 struct tree_decl_map *h, in;
5781 in.base.from = from;
5782
5783 h = debug_expr_for_decl->find_with_hash (&in, DECL_UID (from));
5784 if (h)
5785 return h->to;
5786 return NULL_TREE;
5787 }
5788
5789 /* Insert a mapping FROM->TO in the debug expression hashtable. */
5790
5791 void
5792 decl_debug_expr_insert (tree from, tree to)
5793 {
5794 struct tree_decl_map *h;
5795
5796 h = ggc_alloc<tree_decl_map> ();
5797 h->base.from = from;
5798 h->to = to;
5799 *debug_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
5800 }
5801
5802 /* Lookup a value expression for FROM, and return it if we find one. */
5803
5804 tree
5805 decl_value_expr_lookup (tree from)
5806 {
5807 struct tree_decl_map *h, in;
5808 in.base.from = from;
5809
5810 h = value_expr_for_decl->find_with_hash (&in, DECL_UID (from));
5811 if (h)
5812 return h->to;
5813 return NULL_TREE;
5814 }
5815
5816 /* Insert a mapping FROM->TO in the value expression hashtable. */
5817
5818 void
5819 decl_value_expr_insert (tree from, tree to)
5820 {
5821 struct tree_decl_map *h;
5822
5823 h = ggc_alloc<tree_decl_map> ();
5824 h->base.from = from;
5825 h->to = to;
5826 *value_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
5827 }
5828
5829 /* Lookup a vector of debug arguments for FROM, and return it if we
5830 find one. */
5831
5832 vec<tree, va_gc> **
5833 decl_debug_args_lookup (tree from)
5834 {
5835 struct tree_vec_map *h, in;
5836
5837 if (!DECL_HAS_DEBUG_ARGS_P (from))
5838 return NULL;
5839 gcc_checking_assert (debug_args_for_decl != NULL);
5840 in.base.from = from;
5841 h = debug_args_for_decl->find_with_hash (&in, DECL_UID (from));
5842 if (h)
5843 return &h->to;
5844 return NULL;
5845 }
5846
5847 /* Insert a mapping FROM->empty vector of debug arguments in the value
5848 expression hashtable. */
5849
5850 vec<tree, va_gc> **
5851 decl_debug_args_insert (tree from)
5852 {
5853 struct tree_vec_map *h;
5854 tree_vec_map **loc;
5855
5856 if (DECL_HAS_DEBUG_ARGS_P (from))
5857 return decl_debug_args_lookup (from);
5858 if (debug_args_for_decl == NULL)
5859 debug_args_for_decl = hash_table<tree_vec_map_cache_hasher>::create_ggc (64);
5860 h = ggc_alloc<tree_vec_map> ();
5861 h->base.from = from;
5862 h->to = NULL;
5863 loc = debug_args_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT);
5864 *loc = h;
5865 DECL_HAS_DEBUG_ARGS_P (from) = 1;
5866 return &h->to;
5867 }
5868
5869 /* Hashing of types so that we don't make duplicates.
5870 The entry point is `type_hash_canon'. */
5871
5872 /* Generate the default hash code for TYPE. This is designed for
5873 speed, rather than maximum entropy. */
5874
5875 hashval_t
5876 type_hash_canon_hash (tree type)
5877 {
5878 inchash::hash hstate;
5879
5880 hstate.add_int (TREE_CODE (type));
5881
5882 if (TREE_TYPE (type))
5883 hstate.add_object (TYPE_HASH (TREE_TYPE (type)));
5884
5885 for (tree t = TYPE_ATTRIBUTES (type); t; t = TREE_CHAIN (t))
5886 /* Just the identifier is adequate to distinguish. */
5887 hstate.add_object (IDENTIFIER_HASH_VALUE (get_attribute_name (t)));
5888
5889 switch (TREE_CODE (type))
5890 {
5891 case METHOD_TYPE:
5892 hstate.add_object (TYPE_HASH (TYPE_METHOD_BASETYPE (type)));
5893 /* FALLTHROUGH. */
5894 case FUNCTION_TYPE:
5895 for (tree t = TYPE_ARG_TYPES (type); t; t = TREE_CHAIN (t))
5896 if (TREE_VALUE (t) != error_mark_node)
5897 hstate.add_object (TYPE_HASH (TREE_VALUE (t)));
5898 break;
5899
5900 case OFFSET_TYPE:
5901 hstate.add_object (TYPE_HASH (TYPE_OFFSET_BASETYPE (type)));
5902 break;
5903
5904 case ARRAY_TYPE:
5905 {
5906 if (TYPE_DOMAIN (type))
5907 hstate.add_object (TYPE_HASH (TYPE_DOMAIN (type)));
5908 if (!AGGREGATE_TYPE_P (TREE_TYPE (type)))
5909 {
5910 unsigned typeless = TYPE_TYPELESS_STORAGE (type);
5911 hstate.add_object (typeless);
5912 }
5913 }
5914 break;
5915
5916 case INTEGER_TYPE:
5917 {
5918 tree t = TYPE_MAX_VALUE (type);
5919 if (!t)
5920 t = TYPE_MIN_VALUE (type);
5921 for (int i = 0; i < TREE_INT_CST_NUNITS (t); i++)
5922 hstate.add_object (TREE_INT_CST_ELT (t, i));
5923 break;
5924 }
5925
5926 case REAL_TYPE:
5927 case FIXED_POINT_TYPE:
5928 {
5929 unsigned prec = TYPE_PRECISION (type);
5930 hstate.add_object (prec);
5931 break;
5932 }
5933
5934 case VECTOR_TYPE:
5935 hstate.add_poly_int (TYPE_VECTOR_SUBPARTS (type));
5936 break;
5937
5938 default:
5939 break;
5940 }
5941
5942 return hstate.end ();
5943 }
5944
5945 /* These are the Hashtable callback functions. */
5946
5947 /* Returns true iff the types are equivalent. */
5948
5949 bool
5950 type_cache_hasher::equal (type_hash *a, type_hash *b)
5951 {
5952 /* First test the things that are the same for all types. */
5953 if (a->hash != b->hash
5954 || TREE_CODE (a->type) != TREE_CODE (b->type)
5955 || TREE_TYPE (a->type) != TREE_TYPE (b->type)
5956 || !attribute_list_equal (TYPE_ATTRIBUTES (a->type),
5957 TYPE_ATTRIBUTES (b->type))
5958 || (TREE_CODE (a->type) != COMPLEX_TYPE
5959 && TYPE_NAME (a->type) != TYPE_NAME (b->type)))
5960 return 0;
5961
5962 /* Be careful about comparing arrays before and after the element type
5963 has been completed; don't compare TYPE_ALIGN unless both types are
5964 complete. */
5965 if (COMPLETE_TYPE_P (a->type) && COMPLETE_TYPE_P (b->type)
5966 && (TYPE_ALIGN (a->type) != TYPE_ALIGN (b->type)
5967 || TYPE_MODE (a->type) != TYPE_MODE (b->type)))
5968 return 0;
5969
5970 switch (TREE_CODE (a->type))
5971 {
5972 case VOID_TYPE:
5973 case OPAQUE_TYPE:
5974 case COMPLEX_TYPE:
5975 case POINTER_TYPE:
5976 case REFERENCE_TYPE:
5977 case NULLPTR_TYPE:
5978 return 1;
5979
5980 case VECTOR_TYPE:
5981 return known_eq (TYPE_VECTOR_SUBPARTS (a->type),
5982 TYPE_VECTOR_SUBPARTS (b->type));
5983
5984 case ENUMERAL_TYPE:
5985 if (TYPE_VALUES (a->type) != TYPE_VALUES (b->type)
5986 && !(TYPE_VALUES (a->type)
5987 && TREE_CODE (TYPE_VALUES (a->type)) == TREE_LIST
5988 && TYPE_VALUES (b->type)
5989 && TREE_CODE (TYPE_VALUES (b->type)) == TREE_LIST
5990 && type_list_equal (TYPE_VALUES (a->type),
5991 TYPE_VALUES (b->type))))
5992 return 0;
5993
5994 /* fall through */
5995
5996 case INTEGER_TYPE:
5997 case REAL_TYPE:
5998 case BOOLEAN_TYPE:
5999 if (TYPE_PRECISION (a->type) != TYPE_PRECISION (b->type))
6000 return false;
6001 return ((TYPE_MAX_VALUE (a->type) == TYPE_MAX_VALUE (b->type)
6002 || tree_int_cst_equal (TYPE_MAX_VALUE (a->type),
6003 TYPE_MAX_VALUE (b->type)))
6004 && (TYPE_MIN_VALUE (a->type) == TYPE_MIN_VALUE (b->type)
6005 || tree_int_cst_equal (TYPE_MIN_VALUE (a->type),
6006 TYPE_MIN_VALUE (b->type))));
6007
6008 case FIXED_POINT_TYPE:
6009 return TYPE_SATURATING (a->type) == TYPE_SATURATING (b->type);
6010
6011 case OFFSET_TYPE:
6012 return TYPE_OFFSET_BASETYPE (a->type) == TYPE_OFFSET_BASETYPE (b->type);
6013
6014 case METHOD_TYPE:
6015 if (TYPE_METHOD_BASETYPE (a->type) == TYPE_METHOD_BASETYPE (b->type)
6016 && (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6017 || (TYPE_ARG_TYPES (a->type)
6018 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6019 && TYPE_ARG_TYPES (b->type)
6020 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6021 && type_list_equal (TYPE_ARG_TYPES (a->type),
6022 TYPE_ARG_TYPES (b->type)))))
6023 break;
6024 return 0;
6025 case ARRAY_TYPE:
6026 /* Don't compare TYPE_TYPELESS_STORAGE flag on aggregates,
6027 where the flag should be inherited from the element type
6028 and can change after ARRAY_TYPEs are created; on non-aggregates
6029 compare it and hash it, scalars will never have that flag set
6030 and we need to differentiate between arrays created by different
6031 front-ends or middle-end created arrays. */
6032 return (TYPE_DOMAIN (a->type) == TYPE_DOMAIN (b->type)
6033 && (AGGREGATE_TYPE_P (TREE_TYPE (a->type))
6034 || (TYPE_TYPELESS_STORAGE (a->type)
6035 == TYPE_TYPELESS_STORAGE (b->type))));
6036
6037 case RECORD_TYPE:
6038 case UNION_TYPE:
6039 case QUAL_UNION_TYPE:
6040 return (TYPE_FIELDS (a->type) == TYPE_FIELDS (b->type)
6041 || (TYPE_FIELDS (a->type)
6042 && TREE_CODE (TYPE_FIELDS (a->type)) == TREE_LIST
6043 && TYPE_FIELDS (b->type)
6044 && TREE_CODE (TYPE_FIELDS (b->type)) == TREE_LIST
6045 && type_list_equal (TYPE_FIELDS (a->type),
6046 TYPE_FIELDS (b->type))));
6047
6048 case FUNCTION_TYPE:
6049 if (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6050 || (TYPE_ARG_TYPES (a->type)
6051 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6052 && TYPE_ARG_TYPES (b->type)
6053 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6054 && type_list_equal (TYPE_ARG_TYPES (a->type),
6055 TYPE_ARG_TYPES (b->type))))
6056 break;
6057 return 0;
6058
6059 default:
6060 return 0;
6061 }
6062
6063 if (lang_hooks.types.type_hash_eq != NULL)
6064 return lang_hooks.types.type_hash_eq (a->type, b->type);
6065
6066 return 1;
6067 }
6068
6069 /* Given TYPE, and HASHCODE its hash code, return the canonical
6070 object for an identical type if one already exists.
6071 Otherwise, return TYPE, and record it as the canonical object.
6072
6073 To use this function, first create a type of the sort you want.
6074 Then compute its hash code from the fields of the type that
6075 make it different from other similar types.
6076 Then call this function and use the value. */
6077
6078 tree
6079 type_hash_canon (unsigned int hashcode, tree type)
6080 {
6081 type_hash in;
6082 type_hash **loc;
6083
6084 /* The hash table only contains main variants, so ensure that's what we're
6085 being passed. */
6086 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
6087
6088 /* The TYPE_ALIGN field of a type is set by layout_type(), so we
6089 must call that routine before comparing TYPE_ALIGNs. */
6090 layout_type (type);
6091
6092 in.hash = hashcode;
6093 in.type = type;
6094
6095 loc = type_hash_table->find_slot_with_hash (&in, hashcode, INSERT);
6096 if (*loc)
6097 {
6098 tree t1 = ((type_hash *) *loc)->type;
6099 gcc_assert (TYPE_MAIN_VARIANT (t1) == t1
6100 && t1 != type);
6101 if (TYPE_UID (type) + 1 == next_type_uid)
6102 --next_type_uid;
6103 /* Free also min/max values and the cache for integer
6104 types. This can't be done in free_node, as LTO frees
6105 those on its own. */
6106 if (TREE_CODE (type) == INTEGER_TYPE)
6107 {
6108 if (TYPE_MIN_VALUE (type)
6109 && TREE_TYPE (TYPE_MIN_VALUE (type)) == type)
6110 {
6111 /* Zero is always in TYPE_CACHED_VALUES. */
6112 if (! TYPE_UNSIGNED (type))
6113 int_cst_hash_table->remove_elt (TYPE_MIN_VALUE (type));
6114 ggc_free (TYPE_MIN_VALUE (type));
6115 }
6116 if (TYPE_MAX_VALUE (type)
6117 && TREE_TYPE (TYPE_MAX_VALUE (type)) == type)
6118 {
6119 int_cst_hash_table->remove_elt (TYPE_MAX_VALUE (type));
6120 ggc_free (TYPE_MAX_VALUE (type));
6121 }
6122 if (TYPE_CACHED_VALUES_P (type))
6123 ggc_free (TYPE_CACHED_VALUES (type));
6124 }
6125 free_node (type);
6126 return t1;
6127 }
6128 else
6129 {
6130 struct type_hash *h;
6131
6132 h = ggc_alloc<type_hash> ();
6133 h->hash = hashcode;
6134 h->type = type;
6135 *loc = h;
6136
6137 return type;
6138 }
6139 }
6140
6141 static void
6142 print_type_hash_statistics (void)
6143 {
6144 fprintf (stderr, "Type hash: size %ld, %ld elements, %f collisions\n",
6145 (long) type_hash_table->size (),
6146 (long) type_hash_table->elements (),
6147 type_hash_table->collisions ());
6148 }
6149
6150 /* Given two lists of types
6151 (chains of TREE_LIST nodes with types in the TREE_VALUE slots)
6152 return 1 if the lists contain the same types in the same order.
6153 Also, the TREE_PURPOSEs must match. */
6154
6155 bool
6156 type_list_equal (const_tree l1, const_tree l2)
6157 {
6158 const_tree t1, t2;
6159
6160 for (t1 = l1, t2 = l2; t1 && t2; t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
6161 if (TREE_VALUE (t1) != TREE_VALUE (t2)
6162 || (TREE_PURPOSE (t1) != TREE_PURPOSE (t2)
6163 && ! (1 == simple_cst_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2))
6164 && (TREE_TYPE (TREE_PURPOSE (t1))
6165 == TREE_TYPE (TREE_PURPOSE (t2))))))
6166 return false;
6167
6168 return t1 == t2;
6169 }
6170
6171 /* Returns the number of arguments to the FUNCTION_TYPE or METHOD_TYPE
6172 given by TYPE. If the argument list accepts variable arguments,
6173 then this function counts only the ordinary arguments. */
6174
6175 int
6176 type_num_arguments (const_tree fntype)
6177 {
6178 int i = 0;
6179
6180 for (tree t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
6181 /* If the function does not take a variable number of arguments,
6182 the last element in the list will have type `void'. */
6183 if (VOID_TYPE_P (TREE_VALUE (t)))
6184 break;
6185 else
6186 ++i;
6187
6188 return i;
6189 }
6190
6191 /* Return the type of the function TYPE's argument ARGNO if known.
6192 For vararg function's where ARGNO refers to one of the variadic
6193 arguments return null. Otherwise, return a void_type_node for
6194 out-of-bounds ARGNO. */
6195
6196 tree
6197 type_argument_type (const_tree fntype, unsigned argno)
6198 {
6199 /* Treat zero the same as an out-of-bounds argument number. */
6200 if (!argno)
6201 return void_type_node;
6202
6203 function_args_iterator iter;
6204
6205 tree argtype;
6206 unsigned i = 1;
6207 FOREACH_FUNCTION_ARGS (fntype, argtype, iter)
6208 {
6209 /* A vararg function's argument list ends in a null. Otherwise,
6210 an ordinary function's argument list ends with void. Return
6211 null if ARGNO refers to a vararg argument, void_type_node if
6212 it's out of bounds, and the formal argument type otherwise. */
6213 if (!argtype)
6214 break;
6215
6216 if (i == argno || VOID_TYPE_P (argtype))
6217 return argtype;
6218
6219 ++i;
6220 }
6221
6222 return NULL_TREE;
6223 }
6224
6225 /* Nonzero if integer constants T1 and T2
6226 represent the same constant value. */
6227
6228 int
6229 tree_int_cst_equal (const_tree t1, const_tree t2)
6230 {
6231 if (t1 == t2)
6232 return 1;
6233
6234 if (t1 == 0 || t2 == 0)
6235 return 0;
6236
6237 STRIP_ANY_LOCATION_WRAPPER (t1);
6238 STRIP_ANY_LOCATION_WRAPPER (t2);
6239
6240 if (TREE_CODE (t1) == INTEGER_CST
6241 && TREE_CODE (t2) == INTEGER_CST
6242 && wi::to_widest (t1) == wi::to_widest (t2))
6243 return 1;
6244
6245 return 0;
6246 }
6247
6248 /* Return true if T is an INTEGER_CST whose numerical value (extended
6249 according to TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. */
6250
6251 bool
6252 tree_fits_shwi_p (const_tree t)
6253 {
6254 return (t != NULL_TREE
6255 && TREE_CODE (t) == INTEGER_CST
6256 && wi::fits_shwi_p (wi::to_widest (t)));
6257 }
6258
6259 /* Return true if T is an INTEGER_CST or POLY_INT_CST whose numerical
6260 value (extended according to TYPE_UNSIGNED) fits in a poly_int64. */
6261
6262 bool
6263 tree_fits_poly_int64_p (const_tree t)
6264 {
6265 if (t == NULL_TREE)
6266 return false;
6267 if (POLY_INT_CST_P (t))
6268 {
6269 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; i++)
6270 if (!wi::fits_shwi_p (wi::to_wide (POLY_INT_CST_COEFF (t, i))))
6271 return false;
6272 return true;
6273 }
6274 return (TREE_CODE (t) == INTEGER_CST
6275 && wi::fits_shwi_p (wi::to_widest (t)));
6276 }
6277
6278 /* Return true if T is an INTEGER_CST whose numerical value (extended
6279 according to TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. */
6280
6281 bool
6282 tree_fits_uhwi_p (const_tree t)
6283 {
6284 return (t != NULL_TREE
6285 && TREE_CODE (t) == INTEGER_CST
6286 && wi::fits_uhwi_p (wi::to_widest (t)));
6287 }
6288
6289 /* Return true if T is an INTEGER_CST or POLY_INT_CST whose numerical
6290 value (extended according to TYPE_UNSIGNED) fits in a poly_uint64. */
6291
6292 bool
6293 tree_fits_poly_uint64_p (const_tree t)
6294 {
6295 if (t == NULL_TREE)
6296 return false;
6297 if (POLY_INT_CST_P (t))
6298 {
6299 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; i++)
6300 if (!wi::fits_uhwi_p (wi::to_widest (POLY_INT_CST_COEFF (t, i))))
6301 return false;
6302 return true;
6303 }
6304 return (TREE_CODE (t) == INTEGER_CST
6305 && wi::fits_uhwi_p (wi::to_widest (t)));
6306 }
6307
6308 /* T is an INTEGER_CST whose numerical value (extended according to
6309 TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. Return that
6310 HOST_WIDE_INT. */
6311
6312 HOST_WIDE_INT
6313 tree_to_shwi (const_tree t)
6314 {
6315 gcc_assert (tree_fits_shwi_p (t));
6316 return TREE_INT_CST_LOW (t);
6317 }
6318
6319 /* T is an INTEGER_CST whose numerical value (extended according to
6320 TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. Return that
6321 HOST_WIDE_INT. */
6322
6323 unsigned HOST_WIDE_INT
6324 tree_to_uhwi (const_tree t)
6325 {
6326 gcc_assert (tree_fits_uhwi_p (t));
6327 return TREE_INT_CST_LOW (t);
6328 }
6329
6330 /* Return the most significant (sign) bit of T. */
6331
6332 int
6333 tree_int_cst_sign_bit (const_tree t)
6334 {
6335 unsigned bitno = TYPE_PRECISION (TREE_TYPE (t)) - 1;
6336
6337 return wi::extract_uhwi (wi::to_wide (t), bitno, 1);
6338 }
6339
6340 /* Return an indication of the sign of the integer constant T.
6341 The return value is -1 if T < 0, 0 if T == 0, and 1 if T > 0.
6342 Note that -1 will never be returned if T's type is unsigned. */
6343
6344 int
6345 tree_int_cst_sgn (const_tree t)
6346 {
6347 if (wi::to_wide (t) == 0)
6348 return 0;
6349 else if (TYPE_UNSIGNED (TREE_TYPE (t)))
6350 return 1;
6351 else if (wi::neg_p (wi::to_wide (t)))
6352 return -1;
6353 else
6354 return 1;
6355 }
6356
6357 /* Return the minimum number of bits needed to represent VALUE in a
6358 signed or unsigned type, UNSIGNEDP says which. */
6359
6360 unsigned int
6361 tree_int_cst_min_precision (tree value, signop sgn)
6362 {
6363 /* If the value is negative, compute its negative minus 1. The latter
6364 adjustment is because the absolute value of the largest negative value
6365 is one larger than the largest positive value. This is equivalent to
6366 a bit-wise negation, so use that operation instead. */
6367
6368 if (tree_int_cst_sgn (value) < 0)
6369 value = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (value), value);
6370
6371 /* Return the number of bits needed, taking into account the fact
6372 that we need one more bit for a signed than unsigned type.
6373 If value is 0 or -1, the minimum precision is 1 no matter
6374 whether unsignedp is true or false. */
6375
6376 if (integer_zerop (value))
6377 return 1;
6378 else
6379 return tree_floor_log2 (value) + 1 + (sgn == SIGNED ? 1 : 0) ;
6380 }
6381
6382 /* Return truthvalue of whether T1 is the same tree structure as T2.
6383 Return 1 if they are the same.
6384 Return 0 if they are understandably different.
6385 Return -1 if either contains tree structure not understood by
6386 this function. */
6387
6388 int
6389 simple_cst_equal (const_tree t1, const_tree t2)
6390 {
6391 enum tree_code code1, code2;
6392 int cmp;
6393 int i;
6394
6395 if (t1 == t2)
6396 return 1;
6397 if (t1 == 0 || t2 == 0)
6398 return 0;
6399
6400 /* For location wrappers to be the same, they must be at the same
6401 source location (and wrap the same thing). */
6402 if (location_wrapper_p (t1) && location_wrapper_p (t2))
6403 {
6404 if (EXPR_LOCATION (t1) != EXPR_LOCATION (t2))
6405 return 0;
6406 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6407 }
6408
6409 code1 = TREE_CODE (t1);
6410 code2 = TREE_CODE (t2);
6411
6412 if (CONVERT_EXPR_CODE_P (code1) || code1 == NON_LVALUE_EXPR)
6413 {
6414 if (CONVERT_EXPR_CODE_P (code2)
6415 || code2 == NON_LVALUE_EXPR)
6416 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6417 else
6418 return simple_cst_equal (TREE_OPERAND (t1, 0), t2);
6419 }
6420
6421 else if (CONVERT_EXPR_CODE_P (code2)
6422 || code2 == NON_LVALUE_EXPR)
6423 return simple_cst_equal (t1, TREE_OPERAND (t2, 0));
6424
6425 if (code1 != code2)
6426 return 0;
6427
6428 switch (code1)
6429 {
6430 case INTEGER_CST:
6431 return wi::to_widest (t1) == wi::to_widest (t2);
6432
6433 case REAL_CST:
6434 return real_identical (&TREE_REAL_CST (t1), &TREE_REAL_CST (t2));
6435
6436 case FIXED_CST:
6437 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2));
6438
6439 case STRING_CST:
6440 return (TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
6441 && ! memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
6442 TREE_STRING_LENGTH (t1)));
6443
6444 case CONSTRUCTOR:
6445 {
6446 unsigned HOST_WIDE_INT idx;
6447 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (t1);
6448 vec<constructor_elt, va_gc> *v2 = CONSTRUCTOR_ELTS (t2);
6449
6450 if (vec_safe_length (v1) != vec_safe_length (v2))
6451 return false;
6452
6453 for (idx = 0; idx < vec_safe_length (v1); ++idx)
6454 /* ??? Should we handle also fields here? */
6455 if (!simple_cst_equal ((*v1)[idx].value, (*v2)[idx].value))
6456 return false;
6457 return true;
6458 }
6459
6460 case SAVE_EXPR:
6461 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6462
6463 case CALL_EXPR:
6464 cmp = simple_cst_equal (CALL_EXPR_FN (t1), CALL_EXPR_FN (t2));
6465 if (cmp <= 0)
6466 return cmp;
6467 if (call_expr_nargs (t1) != call_expr_nargs (t2))
6468 return 0;
6469 {
6470 const_tree arg1, arg2;
6471 const_call_expr_arg_iterator iter1, iter2;
6472 for (arg1 = first_const_call_expr_arg (t1, &iter1),
6473 arg2 = first_const_call_expr_arg (t2, &iter2);
6474 arg1 && arg2;
6475 arg1 = next_const_call_expr_arg (&iter1),
6476 arg2 = next_const_call_expr_arg (&iter2))
6477 {
6478 cmp = simple_cst_equal (arg1, arg2);
6479 if (cmp <= 0)
6480 return cmp;
6481 }
6482 return arg1 == arg2;
6483 }
6484
6485 case TARGET_EXPR:
6486 /* Special case: if either target is an unallocated VAR_DECL,
6487 it means that it's going to be unified with whatever the
6488 TARGET_EXPR is really supposed to initialize, so treat it
6489 as being equivalent to anything. */
6490 if ((TREE_CODE (TREE_OPERAND (t1, 0)) == VAR_DECL
6491 && DECL_NAME (TREE_OPERAND (t1, 0)) == NULL_TREE
6492 && !DECL_RTL_SET_P (TREE_OPERAND (t1, 0)))
6493 || (TREE_CODE (TREE_OPERAND (t2, 0)) == VAR_DECL
6494 && DECL_NAME (TREE_OPERAND (t2, 0)) == NULL_TREE
6495 && !DECL_RTL_SET_P (TREE_OPERAND (t2, 0))))
6496 cmp = 1;
6497 else
6498 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6499
6500 if (cmp <= 0)
6501 return cmp;
6502
6503 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
6504
6505 case WITH_CLEANUP_EXPR:
6506 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6507 if (cmp <= 0)
6508 return cmp;
6509
6510 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t1, 1));
6511
6512 case COMPONENT_REF:
6513 if (TREE_OPERAND (t1, 1) == TREE_OPERAND (t2, 1))
6514 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6515
6516 return 0;
6517
6518 case VAR_DECL:
6519 case PARM_DECL:
6520 case CONST_DECL:
6521 case FUNCTION_DECL:
6522 return 0;
6523
6524 default:
6525 if (POLY_INT_CST_P (t1))
6526 /* A false return means maybe_ne rather than known_ne. */
6527 return known_eq (poly_widest_int::from (poly_int_cst_value (t1),
6528 TYPE_SIGN (TREE_TYPE (t1))),
6529 poly_widest_int::from (poly_int_cst_value (t2),
6530 TYPE_SIGN (TREE_TYPE (t2))));
6531 break;
6532 }
6533
6534 /* This general rule works for most tree codes. All exceptions should be
6535 handled above. If this is a language-specific tree code, we can't
6536 trust what might be in the operand, so say we don't know
6537 the situation. */
6538 if ((int) code1 >= (int) LAST_AND_UNUSED_TREE_CODE)
6539 return -1;
6540
6541 switch (TREE_CODE_CLASS (code1))
6542 {
6543 case tcc_unary:
6544 case tcc_binary:
6545 case tcc_comparison:
6546 case tcc_expression:
6547 case tcc_reference:
6548 case tcc_statement:
6549 cmp = 1;
6550 for (i = 0; i < TREE_CODE_LENGTH (code1); i++)
6551 {
6552 cmp = simple_cst_equal (TREE_OPERAND (t1, i), TREE_OPERAND (t2, i));
6553 if (cmp <= 0)
6554 return cmp;
6555 }
6556
6557 return cmp;
6558
6559 default:
6560 return -1;
6561 }
6562 }
6563
6564 /* Compare the value of T, an INTEGER_CST, with U, an unsigned integer value.
6565 Return -1, 0, or 1 if the value of T is less than, equal to, or greater
6566 than U, respectively. */
6567
6568 int
6569 compare_tree_int (const_tree t, unsigned HOST_WIDE_INT u)
6570 {
6571 if (tree_int_cst_sgn (t) < 0)
6572 return -1;
6573 else if (!tree_fits_uhwi_p (t))
6574 return 1;
6575 else if (TREE_INT_CST_LOW (t) == u)
6576 return 0;
6577 else if (TREE_INT_CST_LOW (t) < u)
6578 return -1;
6579 else
6580 return 1;
6581 }
6582
6583 /* Return true if SIZE represents a constant size that is in bounds of
6584 what the middle-end and the backend accepts (covering not more than
6585 half of the address-space).
6586 When PERR is non-null, set *PERR on failure to the description of
6587 why SIZE is not valid. */
6588
6589 bool
6590 valid_constant_size_p (const_tree size, cst_size_error *perr /* = NULL */)
6591 {
6592 if (POLY_INT_CST_P (size))
6593 {
6594 if (TREE_OVERFLOW (size))
6595 return false;
6596 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
6597 if (!valid_constant_size_p (POLY_INT_CST_COEFF (size, i)))
6598 return false;
6599 return true;
6600 }
6601
6602 cst_size_error error;
6603 if (!perr)
6604 perr = &error;
6605
6606 if (TREE_CODE (size) != INTEGER_CST)
6607 {
6608 *perr = cst_size_not_constant;
6609 return false;
6610 }
6611
6612 if (TREE_OVERFLOW_P (size))
6613 {
6614 *perr = cst_size_overflow;
6615 return false;
6616 }
6617
6618 if (tree_int_cst_sgn (size) < 0)
6619 {
6620 *perr = cst_size_negative;
6621 return false;
6622 }
6623 if (!tree_fits_uhwi_p (size)
6624 || (wi::to_widest (TYPE_MAX_VALUE (sizetype))
6625 < wi::to_widest (size) * 2))
6626 {
6627 *perr = cst_size_too_big;
6628 return false;
6629 }
6630
6631 return true;
6632 }
6633
6634 /* Return the precision of the type, or for a complex or vector type the
6635 precision of the type of its elements. */
6636
6637 unsigned int
6638 element_precision (const_tree type)
6639 {
6640 if (!TYPE_P (type))
6641 type = TREE_TYPE (type);
6642 enum tree_code code = TREE_CODE (type);
6643 if (code == COMPLEX_TYPE || code == VECTOR_TYPE)
6644 type = TREE_TYPE (type);
6645
6646 return TYPE_PRECISION (type);
6647 }
6648
6649 /* Return true if CODE represents an associative tree code. Otherwise
6650 return false. */
6651 bool
6652 associative_tree_code (enum tree_code code)
6653 {
6654 switch (code)
6655 {
6656 case BIT_IOR_EXPR:
6657 case BIT_AND_EXPR:
6658 case BIT_XOR_EXPR:
6659 case PLUS_EXPR:
6660 case MULT_EXPR:
6661 case MIN_EXPR:
6662 case MAX_EXPR:
6663 return true;
6664
6665 default:
6666 break;
6667 }
6668 return false;
6669 }
6670
6671 /* Return true if CODE represents a commutative tree code. Otherwise
6672 return false. */
6673 bool
6674 commutative_tree_code (enum tree_code code)
6675 {
6676 switch (code)
6677 {
6678 case PLUS_EXPR:
6679 case MULT_EXPR:
6680 case MULT_HIGHPART_EXPR:
6681 case MIN_EXPR:
6682 case MAX_EXPR:
6683 case BIT_IOR_EXPR:
6684 case BIT_XOR_EXPR:
6685 case BIT_AND_EXPR:
6686 case NE_EXPR:
6687 case EQ_EXPR:
6688 case UNORDERED_EXPR:
6689 case ORDERED_EXPR:
6690 case UNEQ_EXPR:
6691 case LTGT_EXPR:
6692 case TRUTH_AND_EXPR:
6693 case TRUTH_XOR_EXPR:
6694 case TRUTH_OR_EXPR:
6695 case WIDEN_MULT_EXPR:
6696 case VEC_WIDEN_MULT_HI_EXPR:
6697 case VEC_WIDEN_MULT_LO_EXPR:
6698 case VEC_WIDEN_MULT_EVEN_EXPR:
6699 case VEC_WIDEN_MULT_ODD_EXPR:
6700 return true;
6701
6702 default:
6703 break;
6704 }
6705 return false;
6706 }
6707
6708 /* Return true if CODE represents a ternary tree code for which the
6709 first two operands are commutative. Otherwise return false. */
6710 bool
6711 commutative_ternary_tree_code (enum tree_code code)
6712 {
6713 switch (code)
6714 {
6715 case WIDEN_MULT_PLUS_EXPR:
6716 case WIDEN_MULT_MINUS_EXPR:
6717 case DOT_PROD_EXPR:
6718 return true;
6719
6720 default:
6721 break;
6722 }
6723 return false;
6724 }
6725
6726 /* Returns true if CODE can overflow. */
6727
6728 bool
6729 operation_can_overflow (enum tree_code code)
6730 {
6731 switch (code)
6732 {
6733 case PLUS_EXPR:
6734 case MINUS_EXPR:
6735 case MULT_EXPR:
6736 case LSHIFT_EXPR:
6737 /* Can overflow in various ways. */
6738 return true;
6739 case TRUNC_DIV_EXPR:
6740 case EXACT_DIV_EXPR:
6741 case FLOOR_DIV_EXPR:
6742 case CEIL_DIV_EXPR:
6743 /* For INT_MIN / -1. */
6744 return true;
6745 case NEGATE_EXPR:
6746 case ABS_EXPR:
6747 /* For -INT_MIN. */
6748 return true;
6749 default:
6750 /* These operators cannot overflow. */
6751 return false;
6752 }
6753 }
6754
6755 /* Returns true if CODE operating on operands of type TYPE doesn't overflow, or
6756 ftrapv doesn't generate trapping insns for CODE. */
6757
6758 bool
6759 operation_no_trapping_overflow (tree type, enum tree_code code)
6760 {
6761 gcc_checking_assert (ANY_INTEGRAL_TYPE_P (type));
6762
6763 /* We don't generate instructions that trap on overflow for complex or vector
6764 types. */
6765 if (!INTEGRAL_TYPE_P (type))
6766 return true;
6767
6768 if (!TYPE_OVERFLOW_TRAPS (type))
6769 return true;
6770
6771 switch (code)
6772 {
6773 case PLUS_EXPR:
6774 case MINUS_EXPR:
6775 case MULT_EXPR:
6776 case NEGATE_EXPR:
6777 case ABS_EXPR:
6778 /* These operators can overflow, and -ftrapv generates trapping code for
6779 these. */
6780 return false;
6781 case TRUNC_DIV_EXPR:
6782 case EXACT_DIV_EXPR:
6783 case FLOOR_DIV_EXPR:
6784 case CEIL_DIV_EXPR:
6785 case LSHIFT_EXPR:
6786 /* These operators can overflow, but -ftrapv does not generate trapping
6787 code for these. */
6788 return true;
6789 default:
6790 /* These operators cannot overflow. */
6791 return true;
6792 }
6793 }
6794
6795 /* Constructors for pointer, array and function types.
6796 (RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are
6797 constructed by language-dependent code, not here.) */
6798
6799 /* Construct, lay out and return the type of pointers to TO_TYPE with
6800 mode MODE. If MODE is VOIDmode, a pointer mode for the address
6801 space of TO_TYPE will be picked. If CAN_ALIAS_ALL is TRUE,
6802 indicate this type can reference all of memory. If such a type has
6803 already been constructed, reuse it. */
6804
6805 tree
6806 build_pointer_type_for_mode (tree to_type, machine_mode mode,
6807 bool can_alias_all)
6808 {
6809 tree t;
6810 bool could_alias = can_alias_all;
6811
6812 if (to_type == error_mark_node)
6813 return error_mark_node;
6814
6815 if (mode == VOIDmode)
6816 {
6817 addr_space_t as = TYPE_ADDR_SPACE (to_type);
6818 mode = targetm.addr_space.pointer_mode (as);
6819 }
6820
6821 /* If the pointed-to type has the may_alias attribute set, force
6822 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
6823 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
6824 can_alias_all = true;
6825
6826 /* In some cases, languages will have things that aren't a POINTER_TYPE
6827 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_POINTER_TO.
6828 In that case, return that type without regard to the rest of our
6829 operands.
6830
6831 ??? This is a kludge, but consistent with the way this function has
6832 always operated and there doesn't seem to be a good way to avoid this
6833 at the moment. */
6834 if (TYPE_POINTER_TO (to_type) != 0
6835 && TREE_CODE (TYPE_POINTER_TO (to_type)) != POINTER_TYPE)
6836 return TYPE_POINTER_TO (to_type);
6837
6838 /* First, if we already have a type for pointers to TO_TYPE and it's
6839 the proper mode, use it. */
6840 for (t = TYPE_POINTER_TO (to_type); t; t = TYPE_NEXT_PTR_TO (t))
6841 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
6842 return t;
6843
6844 t = make_node (POINTER_TYPE);
6845
6846 TREE_TYPE (t) = to_type;
6847 SET_TYPE_MODE (t, mode);
6848 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
6849 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (to_type);
6850 TYPE_POINTER_TO (to_type) = t;
6851
6852 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */
6853 if (TYPE_STRUCTURAL_EQUALITY_P (to_type) || in_lto_p)
6854 SET_TYPE_STRUCTURAL_EQUALITY (t);
6855 else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
6856 TYPE_CANONICAL (t)
6857 = build_pointer_type_for_mode (TYPE_CANONICAL (to_type),
6858 mode, false);
6859
6860 /* Lay out the type. This function has many callers that are concerned
6861 with expression-construction, and this simplifies them all. */
6862 layout_type (t);
6863
6864 return t;
6865 }
6866
6867 /* By default build pointers in ptr_mode. */
6868
6869 tree
6870 build_pointer_type (tree to_type)
6871 {
6872 return build_pointer_type_for_mode (to_type, VOIDmode, false);
6873 }
6874
6875 /* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE. */
6876
6877 tree
6878 build_reference_type_for_mode (tree to_type, machine_mode mode,
6879 bool can_alias_all)
6880 {
6881 tree t;
6882 bool could_alias = can_alias_all;
6883
6884 if (to_type == error_mark_node)
6885 return error_mark_node;
6886
6887 if (mode == VOIDmode)
6888 {
6889 addr_space_t as = TYPE_ADDR_SPACE (to_type);
6890 mode = targetm.addr_space.pointer_mode (as);
6891 }
6892
6893 /* If the pointed-to type has the may_alias attribute set, force
6894 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
6895 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
6896 can_alias_all = true;
6897
6898 /* In some cases, languages will have things that aren't a REFERENCE_TYPE
6899 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_REFERENCE_TO.
6900 In that case, return that type without regard to the rest of our
6901 operands.
6902
6903 ??? This is a kludge, but consistent with the way this function has
6904 always operated and there doesn't seem to be a good way to avoid this
6905 at the moment. */
6906 if (TYPE_REFERENCE_TO (to_type) != 0
6907 && TREE_CODE (TYPE_REFERENCE_TO (to_type)) != REFERENCE_TYPE)
6908 return TYPE_REFERENCE_TO (to_type);
6909
6910 /* First, if we already have a type for pointers to TO_TYPE and it's
6911 the proper mode, use it. */
6912 for (t = TYPE_REFERENCE_TO (to_type); t; t = TYPE_NEXT_REF_TO (t))
6913 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
6914 return t;
6915
6916 t = make_node (REFERENCE_TYPE);
6917
6918 TREE_TYPE (t) = to_type;
6919 SET_TYPE_MODE (t, mode);
6920 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
6921 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (to_type);
6922 TYPE_REFERENCE_TO (to_type) = t;
6923
6924 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */
6925 if (TYPE_STRUCTURAL_EQUALITY_P (to_type) || in_lto_p)
6926 SET_TYPE_STRUCTURAL_EQUALITY (t);
6927 else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
6928 TYPE_CANONICAL (t)
6929 = build_reference_type_for_mode (TYPE_CANONICAL (to_type),
6930 mode, false);
6931
6932 layout_type (t);
6933
6934 return t;
6935 }
6936
6937
6938 /* Build the node for the type of references-to-TO_TYPE by default
6939 in ptr_mode. */
6940
6941 tree
6942 build_reference_type (tree to_type)
6943 {
6944 return build_reference_type_for_mode (to_type, VOIDmode, false);
6945 }
6946
6947 #define MAX_INT_CACHED_PREC \
6948 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
6949 static GTY(()) tree nonstandard_integer_type_cache[2 * MAX_INT_CACHED_PREC + 2];
6950
6951 /* Builds a signed or unsigned integer type of precision PRECISION.
6952 Used for C bitfields whose precision does not match that of
6953 built-in target types. */
6954 tree
6955 build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision,
6956 int unsignedp)
6957 {
6958 tree itype, ret;
6959
6960 if (unsignedp)
6961 unsignedp = MAX_INT_CACHED_PREC + 1;
6962
6963 if (precision <= MAX_INT_CACHED_PREC)
6964 {
6965 itype = nonstandard_integer_type_cache[precision + unsignedp];
6966 if (itype)
6967 return itype;
6968 }
6969
6970 itype = make_node (INTEGER_TYPE);
6971 TYPE_PRECISION (itype) = precision;
6972
6973 if (unsignedp)
6974 fixup_unsigned_type (itype);
6975 else
6976 fixup_signed_type (itype);
6977
6978 inchash::hash hstate;
6979 inchash::add_expr (TYPE_MAX_VALUE (itype), hstate);
6980 ret = type_hash_canon (hstate.end (), itype);
6981 if (precision <= MAX_INT_CACHED_PREC)
6982 nonstandard_integer_type_cache[precision + unsignedp] = ret;
6983
6984 return ret;
6985 }
6986
6987 #define MAX_BOOL_CACHED_PREC \
6988 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
6989 static GTY(()) tree nonstandard_boolean_type_cache[MAX_BOOL_CACHED_PREC + 1];
6990
6991 /* Builds a boolean type of precision PRECISION.
6992 Used for boolean vectors to choose proper vector element size. */
6993 tree
6994 build_nonstandard_boolean_type (unsigned HOST_WIDE_INT precision)
6995 {
6996 tree type;
6997
6998 if (precision <= MAX_BOOL_CACHED_PREC)
6999 {
7000 type = nonstandard_boolean_type_cache[precision];
7001 if (type)
7002 return type;
7003 }
7004
7005 type = make_node (BOOLEAN_TYPE);
7006 TYPE_PRECISION (type) = precision;
7007 fixup_signed_type (type);
7008
7009 if (precision <= MAX_INT_CACHED_PREC)
7010 nonstandard_boolean_type_cache[precision] = type;
7011
7012 return type;
7013 }
7014
7015 /* Create a range of some discrete type TYPE (an INTEGER_TYPE, ENUMERAL_TYPE
7016 or BOOLEAN_TYPE) with low bound LOWVAL and high bound HIGHVAL. If SHARED
7017 is true, reuse such a type that has already been constructed. */
7018
7019 static tree
7020 build_range_type_1 (tree type, tree lowval, tree highval, bool shared)
7021 {
7022 tree itype = make_node (INTEGER_TYPE);
7023
7024 TREE_TYPE (itype) = type;
7025
7026 TYPE_MIN_VALUE (itype) = fold_convert (type, lowval);
7027 TYPE_MAX_VALUE (itype) = highval ? fold_convert (type, highval) : NULL;
7028
7029 TYPE_PRECISION (itype) = TYPE_PRECISION (type);
7030 SET_TYPE_MODE (itype, TYPE_MODE (type));
7031 TYPE_SIZE (itype) = TYPE_SIZE (type);
7032 TYPE_SIZE_UNIT (itype) = TYPE_SIZE_UNIT (type);
7033 SET_TYPE_ALIGN (itype, TYPE_ALIGN (type));
7034 TYPE_USER_ALIGN (itype) = TYPE_USER_ALIGN (type);
7035 SET_TYPE_WARN_IF_NOT_ALIGN (itype, TYPE_WARN_IF_NOT_ALIGN (type));
7036
7037 if (!shared)
7038 return itype;
7039
7040 if ((TYPE_MIN_VALUE (itype)
7041 && TREE_CODE (TYPE_MIN_VALUE (itype)) != INTEGER_CST)
7042 || (TYPE_MAX_VALUE (itype)
7043 && TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST))
7044 {
7045 /* Since we cannot reliably merge this type, we need to compare it using
7046 structural equality checks. */
7047 SET_TYPE_STRUCTURAL_EQUALITY (itype);
7048 return itype;
7049 }
7050
7051 hashval_t hash = type_hash_canon_hash (itype);
7052 itype = type_hash_canon (hash, itype);
7053
7054 return itype;
7055 }
7056
7057 /* Wrapper around build_range_type_1 with SHARED set to true. */
7058
7059 tree
7060 build_range_type (tree type, tree lowval, tree highval)
7061 {
7062 return build_range_type_1 (type, lowval, highval, true);
7063 }
7064
7065 /* Wrapper around build_range_type_1 with SHARED set to false. */
7066
7067 tree
7068 build_nonshared_range_type (tree type, tree lowval, tree highval)
7069 {
7070 return build_range_type_1 (type, lowval, highval, false);
7071 }
7072
7073 /* Create a type of integers to be the TYPE_DOMAIN of an ARRAY_TYPE.
7074 MAXVAL should be the maximum value in the domain
7075 (one less than the length of the array).
7076
7077 The maximum value that MAXVAL can have is INT_MAX for a HOST_WIDE_INT.
7078 We don't enforce this limit, that is up to caller (e.g. language front end).
7079 The limit exists because the result is a signed type and we don't handle
7080 sizes that use more than one HOST_WIDE_INT. */
7081
7082 tree
7083 build_index_type (tree maxval)
7084 {
7085 return build_range_type (sizetype, size_zero_node, maxval);
7086 }
7087
7088 /* Return true if the debug information for TYPE, a subtype, should be emitted
7089 as a subrange type. If so, set LOWVAL to the low bound and HIGHVAL to the
7090 high bound, respectively. Sometimes doing so unnecessarily obfuscates the
7091 debug info and doesn't reflect the source code. */
7092
7093 bool
7094 subrange_type_for_debug_p (const_tree type, tree *lowval, tree *highval)
7095 {
7096 tree base_type = TREE_TYPE (type), low, high;
7097
7098 /* Subrange types have a base type which is an integral type. */
7099 if (!INTEGRAL_TYPE_P (base_type))
7100 return false;
7101
7102 /* Get the real bounds of the subtype. */
7103 if (lang_hooks.types.get_subrange_bounds)
7104 lang_hooks.types.get_subrange_bounds (type, &low, &high);
7105 else
7106 {
7107 low = TYPE_MIN_VALUE (type);
7108 high = TYPE_MAX_VALUE (type);
7109 }
7110
7111 /* If the type and its base type have the same representation and the same
7112 name, then the type is not a subrange but a copy of the base type. */
7113 if ((TREE_CODE (base_type) == INTEGER_TYPE
7114 || TREE_CODE (base_type) == BOOLEAN_TYPE)
7115 && int_size_in_bytes (type) == int_size_in_bytes (base_type)
7116 && tree_int_cst_equal (low, TYPE_MIN_VALUE (base_type))
7117 && tree_int_cst_equal (high, TYPE_MAX_VALUE (base_type))
7118 && TYPE_IDENTIFIER (type) == TYPE_IDENTIFIER (base_type))
7119 return false;
7120
7121 if (lowval)
7122 *lowval = low;
7123 if (highval)
7124 *highval = high;
7125 return true;
7126 }
7127
7128 /* Construct, lay out and return the type of arrays of elements with ELT_TYPE
7129 and number of elements specified by the range of values of INDEX_TYPE.
7130 If TYPELESS_STORAGE is true, TYPE_TYPELESS_STORAGE flag is set on the type.
7131 If SHARED is true, reuse such a type that has already been constructed.
7132 If SET_CANONICAL is true, compute TYPE_CANONICAL from the element type. */
7133
7134 tree
7135 build_array_type_1 (tree elt_type, tree index_type, bool typeless_storage,
7136 bool shared, bool set_canonical)
7137 {
7138 tree t;
7139
7140 if (TREE_CODE (elt_type) == FUNCTION_TYPE)
7141 {
7142 error ("arrays of functions are not meaningful");
7143 elt_type = integer_type_node;
7144 }
7145
7146 t = make_node (ARRAY_TYPE);
7147 TREE_TYPE (t) = elt_type;
7148 TYPE_DOMAIN (t) = index_type;
7149 TYPE_ADDR_SPACE (t) = TYPE_ADDR_SPACE (elt_type);
7150 TYPE_TYPELESS_STORAGE (t) = typeless_storage;
7151 layout_type (t);
7152
7153 if (shared)
7154 {
7155 hashval_t hash = type_hash_canon_hash (t);
7156 t = type_hash_canon (hash, t);
7157 }
7158
7159 if (TYPE_CANONICAL (t) == t && set_canonical)
7160 {
7161 if (TYPE_STRUCTURAL_EQUALITY_P (elt_type)
7162 || (index_type && TYPE_STRUCTURAL_EQUALITY_P (index_type))
7163 || in_lto_p)
7164 SET_TYPE_STRUCTURAL_EQUALITY (t);
7165 else if (TYPE_CANONICAL (elt_type) != elt_type
7166 || (index_type && TYPE_CANONICAL (index_type) != index_type))
7167 TYPE_CANONICAL (t)
7168 = build_array_type_1 (TYPE_CANONICAL (elt_type),
7169 index_type
7170 ? TYPE_CANONICAL (index_type) : NULL_TREE,
7171 typeless_storage, shared, set_canonical);
7172 }
7173
7174 return t;
7175 }
7176
7177 /* Wrapper around build_array_type_1 with SHARED set to true. */
7178
7179 tree
7180 build_array_type (tree elt_type, tree index_type, bool typeless_storage)
7181 {
7182 return
7183 build_array_type_1 (elt_type, index_type, typeless_storage, true, true);
7184 }
7185
7186 /* Wrapper around build_array_type_1 with SHARED set to false. */
7187
7188 tree
7189 build_nonshared_array_type (tree elt_type, tree index_type)
7190 {
7191 return build_array_type_1 (elt_type, index_type, false, false, true);
7192 }
7193
7194 /* Return a representation of ELT_TYPE[NELTS], using indices of type
7195 sizetype. */
7196
7197 tree
7198 build_array_type_nelts (tree elt_type, poly_uint64 nelts)
7199 {
7200 return build_array_type (elt_type, build_index_type (size_int (nelts - 1)));
7201 }
7202
7203 /* Recursively examines the array elements of TYPE, until a non-array
7204 element type is found. */
7205
7206 tree
7207 strip_array_types (tree type)
7208 {
7209 while (TREE_CODE (type) == ARRAY_TYPE)
7210 type = TREE_TYPE (type);
7211
7212 return type;
7213 }
7214
7215 /* Computes the canonical argument types from the argument type list
7216 ARGTYPES.
7217
7218 Upon return, *ANY_STRUCTURAL_P will be true iff either it was true
7219 on entry to this function, or if any of the ARGTYPES are
7220 structural.
7221
7222 Upon return, *ANY_NONCANONICAL_P will be true iff either it was
7223 true on entry to this function, or if any of the ARGTYPES are
7224 non-canonical.
7225
7226 Returns a canonical argument list, which may be ARGTYPES when the
7227 canonical argument list is unneeded (i.e., *ANY_STRUCTURAL_P is
7228 true) or would not differ from ARGTYPES. */
7229
7230 static tree
7231 maybe_canonicalize_argtypes (tree argtypes,
7232 bool *any_structural_p,
7233 bool *any_noncanonical_p)
7234 {
7235 tree arg;
7236 bool any_noncanonical_argtypes_p = false;
7237
7238 for (arg = argtypes; arg && !(*any_structural_p); arg = TREE_CHAIN (arg))
7239 {
7240 if (!TREE_VALUE (arg) || TREE_VALUE (arg) == error_mark_node)
7241 /* Fail gracefully by stating that the type is structural. */
7242 *any_structural_p = true;
7243 else if (TYPE_STRUCTURAL_EQUALITY_P (TREE_VALUE (arg)))
7244 *any_structural_p = true;
7245 else if (TYPE_CANONICAL (TREE_VALUE (arg)) != TREE_VALUE (arg)
7246 || TREE_PURPOSE (arg))
7247 /* If the argument has a default argument, we consider it
7248 non-canonical even though the type itself is canonical.
7249 That way, different variants of function and method types
7250 with default arguments will all point to the variant with
7251 no defaults as their canonical type. */
7252 any_noncanonical_argtypes_p = true;
7253 }
7254
7255 if (*any_structural_p)
7256 return argtypes;
7257
7258 if (any_noncanonical_argtypes_p)
7259 {
7260 /* Build the canonical list of argument types. */
7261 tree canon_argtypes = NULL_TREE;
7262 bool is_void = false;
7263
7264 for (arg = argtypes; arg; arg = TREE_CHAIN (arg))
7265 {
7266 if (arg == void_list_node)
7267 is_void = true;
7268 else
7269 canon_argtypes = tree_cons (NULL_TREE,
7270 TYPE_CANONICAL (TREE_VALUE (arg)),
7271 canon_argtypes);
7272 }
7273
7274 canon_argtypes = nreverse (canon_argtypes);
7275 if (is_void)
7276 canon_argtypes = chainon (canon_argtypes, void_list_node);
7277
7278 /* There is a non-canonical type. */
7279 *any_noncanonical_p = true;
7280 return canon_argtypes;
7281 }
7282
7283 /* The canonical argument types are the same as ARGTYPES. */
7284 return argtypes;
7285 }
7286
7287 /* Construct, lay out and return
7288 the type of functions returning type VALUE_TYPE
7289 given arguments of types ARG_TYPES.
7290 ARG_TYPES is a chain of TREE_LIST nodes whose TREE_VALUEs
7291 are data type nodes for the arguments of the function.
7292 If such a type has already been constructed, reuse it. */
7293
7294 tree
7295 build_function_type (tree value_type, tree arg_types)
7296 {
7297 tree t;
7298 inchash::hash hstate;
7299 bool any_structural_p, any_noncanonical_p;
7300 tree canon_argtypes;
7301
7302 gcc_assert (arg_types != error_mark_node);
7303
7304 if (TREE_CODE (value_type) == FUNCTION_TYPE)
7305 {
7306 error ("function return type cannot be function");
7307 value_type = integer_type_node;
7308 }
7309
7310 /* Make a node of the sort we want. */
7311 t = make_node (FUNCTION_TYPE);
7312 TREE_TYPE (t) = value_type;
7313 TYPE_ARG_TYPES (t) = arg_types;
7314
7315 /* If we already have such a type, use the old one. */
7316 hashval_t hash = type_hash_canon_hash (t);
7317 t = type_hash_canon (hash, t);
7318
7319 /* Set up the canonical type. */
7320 any_structural_p = TYPE_STRUCTURAL_EQUALITY_P (value_type);
7321 any_noncanonical_p = TYPE_CANONICAL (value_type) != value_type;
7322 canon_argtypes = maybe_canonicalize_argtypes (arg_types,
7323 &any_structural_p,
7324 &any_noncanonical_p);
7325 if (any_structural_p)
7326 SET_TYPE_STRUCTURAL_EQUALITY (t);
7327 else if (any_noncanonical_p)
7328 TYPE_CANONICAL (t) = build_function_type (TYPE_CANONICAL (value_type),
7329 canon_argtypes);
7330
7331 if (!COMPLETE_TYPE_P (t))
7332 layout_type (t);
7333 return t;
7334 }
7335
7336 /* Build a function type. The RETURN_TYPE is the type returned by the
7337 function. If VAARGS is set, no void_type_node is appended to the
7338 list. ARGP must be always be terminated be a NULL_TREE. */
7339
7340 static tree
7341 build_function_type_list_1 (bool vaargs, tree return_type, va_list argp)
7342 {
7343 tree t, args, last;
7344
7345 t = va_arg (argp, tree);
7346 for (args = NULL_TREE; t != NULL_TREE; t = va_arg (argp, tree))
7347 args = tree_cons (NULL_TREE, t, args);
7348
7349 if (vaargs)
7350 {
7351 last = args;
7352 if (args != NULL_TREE)
7353 args = nreverse (args);
7354 gcc_assert (last != void_list_node);
7355 }
7356 else if (args == NULL_TREE)
7357 args = void_list_node;
7358 else
7359 {
7360 last = args;
7361 args = nreverse (args);
7362 TREE_CHAIN (last) = void_list_node;
7363 }
7364 args = build_function_type (return_type, args);
7365
7366 return args;
7367 }
7368
7369 /* Build a function type. The RETURN_TYPE is the type returned by the
7370 function. If additional arguments are provided, they are
7371 additional argument types. The list of argument types must always
7372 be terminated by NULL_TREE. */
7373
7374 tree
7375 build_function_type_list (tree return_type, ...)
7376 {
7377 tree args;
7378 va_list p;
7379
7380 va_start (p, return_type);
7381 args = build_function_type_list_1 (false, return_type, p);
7382 va_end (p);
7383 return args;
7384 }
7385
7386 /* Build a variable argument function type. The RETURN_TYPE is the
7387 type returned by the function. If additional arguments are provided,
7388 they are additional argument types. The list of argument types must
7389 always be terminated by NULL_TREE. */
7390
7391 tree
7392 build_varargs_function_type_list (tree return_type, ...)
7393 {
7394 tree args;
7395 va_list p;
7396
7397 va_start (p, return_type);
7398 args = build_function_type_list_1 (true, return_type, p);
7399 va_end (p);
7400
7401 return args;
7402 }
7403
7404 /* Build a function type. RETURN_TYPE is the type returned by the
7405 function; VAARGS indicates whether the function takes varargs. The
7406 function takes N named arguments, the types of which are provided in
7407 ARG_TYPES. */
7408
7409 static tree
7410 build_function_type_array_1 (bool vaargs, tree return_type, int n,
7411 tree *arg_types)
7412 {
7413 int i;
7414 tree t = vaargs ? NULL_TREE : void_list_node;
7415
7416 for (i = n - 1; i >= 0; i--)
7417 t = tree_cons (NULL_TREE, arg_types[i], t);
7418
7419 return build_function_type (return_type, t);
7420 }
7421
7422 /* Build a function type. RETURN_TYPE is the type returned by the
7423 function. The function takes N named arguments, the types of which
7424 are provided in ARG_TYPES. */
7425
7426 tree
7427 build_function_type_array (tree return_type, int n, tree *arg_types)
7428 {
7429 return build_function_type_array_1 (false, return_type, n, arg_types);
7430 }
7431
7432 /* Build a variable argument function type. RETURN_TYPE is the type
7433 returned by the function. The function takes N named arguments, the
7434 types of which are provided in ARG_TYPES. */
7435
7436 tree
7437 build_varargs_function_type_array (tree return_type, int n, tree *arg_types)
7438 {
7439 return build_function_type_array_1 (true, return_type, n, arg_types);
7440 }
7441
7442 /* Build a METHOD_TYPE for a member of BASETYPE. The RETTYPE (a TYPE)
7443 and ARGTYPES (a TREE_LIST) are the return type and arguments types
7444 for the method. An implicit additional parameter (of type
7445 pointer-to-BASETYPE) is added to the ARGTYPES. */
7446
7447 tree
7448 build_method_type_directly (tree basetype,
7449 tree rettype,
7450 tree argtypes)
7451 {
7452 tree t;
7453 tree ptype;
7454 bool any_structural_p, any_noncanonical_p;
7455 tree canon_argtypes;
7456
7457 /* Make a node of the sort we want. */
7458 t = make_node (METHOD_TYPE);
7459
7460 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
7461 TREE_TYPE (t) = rettype;
7462 ptype = build_pointer_type (basetype);
7463
7464 /* The actual arglist for this function includes a "hidden" argument
7465 which is "this". Put it into the list of argument types. */
7466 argtypes = tree_cons (NULL_TREE, ptype, argtypes);
7467 TYPE_ARG_TYPES (t) = argtypes;
7468
7469 /* If we already have such a type, use the old one. */
7470 hashval_t hash = type_hash_canon_hash (t);
7471 t = type_hash_canon (hash, t);
7472
7473 /* Set up the canonical type. */
7474 any_structural_p
7475 = (TYPE_STRUCTURAL_EQUALITY_P (basetype)
7476 || TYPE_STRUCTURAL_EQUALITY_P (rettype));
7477 any_noncanonical_p
7478 = (TYPE_CANONICAL (basetype) != basetype
7479 || TYPE_CANONICAL (rettype) != rettype);
7480 canon_argtypes = maybe_canonicalize_argtypes (TREE_CHAIN (argtypes),
7481 &any_structural_p,
7482 &any_noncanonical_p);
7483 if (any_structural_p)
7484 SET_TYPE_STRUCTURAL_EQUALITY (t);
7485 else if (any_noncanonical_p)
7486 TYPE_CANONICAL (t)
7487 = build_method_type_directly (TYPE_CANONICAL (basetype),
7488 TYPE_CANONICAL (rettype),
7489 canon_argtypes);
7490 if (!COMPLETE_TYPE_P (t))
7491 layout_type (t);
7492
7493 return t;
7494 }
7495
7496 /* Construct, lay out and return the type of methods belonging to class
7497 BASETYPE and whose arguments and values are described by TYPE.
7498 If that type exists already, reuse it.
7499 TYPE must be a FUNCTION_TYPE node. */
7500
7501 tree
7502 build_method_type (tree basetype, tree type)
7503 {
7504 gcc_assert (TREE_CODE (type) == FUNCTION_TYPE);
7505
7506 return build_method_type_directly (basetype,
7507 TREE_TYPE (type),
7508 TYPE_ARG_TYPES (type));
7509 }
7510
7511 /* Construct, lay out and return the type of offsets to a value
7512 of type TYPE, within an object of type BASETYPE.
7513 If a suitable offset type exists already, reuse it. */
7514
7515 tree
7516 build_offset_type (tree basetype, tree type)
7517 {
7518 tree t;
7519
7520 /* Make a node of the sort we want. */
7521 t = make_node (OFFSET_TYPE);
7522
7523 TYPE_OFFSET_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
7524 TREE_TYPE (t) = type;
7525
7526 /* If we already have such a type, use the old one. */
7527 hashval_t hash = type_hash_canon_hash (t);
7528 t = type_hash_canon (hash, t);
7529
7530 if (!COMPLETE_TYPE_P (t))
7531 layout_type (t);
7532
7533 if (TYPE_CANONICAL (t) == t)
7534 {
7535 if (TYPE_STRUCTURAL_EQUALITY_P (basetype)
7536 || TYPE_STRUCTURAL_EQUALITY_P (type))
7537 SET_TYPE_STRUCTURAL_EQUALITY (t);
7538 else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)) != basetype
7539 || TYPE_CANONICAL (type) != type)
7540 TYPE_CANONICAL (t)
7541 = build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)),
7542 TYPE_CANONICAL (type));
7543 }
7544
7545 return t;
7546 }
7547
7548 /* Create a complex type whose components are COMPONENT_TYPE.
7549
7550 If NAMED is true, the type is given a TYPE_NAME. We do not always
7551 do so because this creates a DECL node and thus make the DECL_UIDs
7552 dependent on the type canonicalization hashtable, which is GC-ed,
7553 so the DECL_UIDs would not be stable wrt garbage collection. */
7554
7555 tree
7556 build_complex_type (tree component_type, bool named)
7557 {
7558 gcc_assert (INTEGRAL_TYPE_P (component_type)
7559 || SCALAR_FLOAT_TYPE_P (component_type)
7560 || FIXED_POINT_TYPE_P (component_type));
7561
7562 /* Make a node of the sort we want. */
7563 tree probe = make_node (COMPLEX_TYPE);
7564
7565 TREE_TYPE (probe) = TYPE_MAIN_VARIANT (component_type);
7566
7567 /* If we already have such a type, use the old one. */
7568 hashval_t hash = type_hash_canon_hash (probe);
7569 tree t = type_hash_canon (hash, probe);
7570
7571 if (t == probe)
7572 {
7573 /* We created a new type. The hash insertion will have laid
7574 out the type. We need to check the canonicalization and
7575 maybe set the name. */
7576 gcc_checking_assert (COMPLETE_TYPE_P (t)
7577 && !TYPE_NAME (t)
7578 && TYPE_CANONICAL (t) == t);
7579
7580 if (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (t)))
7581 SET_TYPE_STRUCTURAL_EQUALITY (t);
7582 else if (TYPE_CANONICAL (TREE_TYPE (t)) != TREE_TYPE (t))
7583 TYPE_CANONICAL (t)
7584 = build_complex_type (TYPE_CANONICAL (TREE_TYPE (t)), named);
7585
7586 /* We need to create a name, since complex is a fundamental type. */
7587 if (named)
7588 {
7589 const char *name = NULL;
7590
7591 if (TREE_TYPE (t) == char_type_node)
7592 name = "complex char";
7593 else if (TREE_TYPE (t) == signed_char_type_node)
7594 name = "complex signed char";
7595 else if (TREE_TYPE (t) == unsigned_char_type_node)
7596 name = "complex unsigned char";
7597 else if (TREE_TYPE (t) == short_integer_type_node)
7598 name = "complex short int";
7599 else if (TREE_TYPE (t) == short_unsigned_type_node)
7600 name = "complex short unsigned int";
7601 else if (TREE_TYPE (t) == integer_type_node)
7602 name = "complex int";
7603 else if (TREE_TYPE (t) == unsigned_type_node)
7604 name = "complex unsigned int";
7605 else if (TREE_TYPE (t) == long_integer_type_node)
7606 name = "complex long int";
7607 else if (TREE_TYPE (t) == long_unsigned_type_node)
7608 name = "complex long unsigned int";
7609 else if (TREE_TYPE (t) == long_long_integer_type_node)
7610 name = "complex long long int";
7611 else if (TREE_TYPE (t) == long_long_unsigned_type_node)
7612 name = "complex long long unsigned int";
7613
7614 if (name != NULL)
7615 TYPE_NAME (t) = build_decl (UNKNOWN_LOCATION, TYPE_DECL,
7616 get_identifier (name), t);
7617 }
7618 }
7619
7620 return build_qualified_type (t, TYPE_QUALS (component_type));
7621 }
7622
7623 /* If TYPE is a real or complex floating-point type and the target
7624 does not directly support arithmetic on TYPE then return the wider
7625 type to be used for arithmetic on TYPE. Otherwise, return
7626 NULL_TREE. */
7627
7628 tree
7629 excess_precision_type (tree type)
7630 {
7631 /* The target can give two different responses to the question of
7632 which excess precision mode it would like depending on whether we
7633 are in -fexcess-precision=standard or -fexcess-precision=fast. */
7634
7635 enum excess_precision_type requested_type
7636 = (flag_excess_precision == EXCESS_PRECISION_FAST
7637 ? EXCESS_PRECISION_TYPE_FAST
7638 : EXCESS_PRECISION_TYPE_STANDARD);
7639
7640 enum flt_eval_method target_flt_eval_method
7641 = targetm.c.excess_precision (requested_type);
7642
7643 /* The target should not ask for unpredictable float evaluation (though
7644 it might advertise that implicitly the evaluation is unpredictable,
7645 but we don't care about that here, it will have been reported
7646 elsewhere). If it does ask for unpredictable evaluation, we have
7647 nothing to do here. */
7648 gcc_assert (target_flt_eval_method != FLT_EVAL_METHOD_UNPREDICTABLE);
7649
7650 /* Nothing to do. The target has asked for all types we know about
7651 to be computed with their native precision and range. */
7652 if (target_flt_eval_method == FLT_EVAL_METHOD_PROMOTE_TO_FLOAT16)
7653 return NULL_TREE;
7654
7655 /* The target will promote this type in a target-dependent way, so excess
7656 precision ought to leave it alone. */
7657 if (targetm.promoted_type (type) != NULL_TREE)
7658 return NULL_TREE;
7659
7660 machine_mode float16_type_mode = (float16_type_node
7661 ? TYPE_MODE (float16_type_node)
7662 : VOIDmode);
7663 machine_mode float_type_mode = TYPE_MODE (float_type_node);
7664 machine_mode double_type_mode = TYPE_MODE (double_type_node);
7665
7666 switch (TREE_CODE (type))
7667 {
7668 case REAL_TYPE:
7669 {
7670 machine_mode type_mode = TYPE_MODE (type);
7671 switch (target_flt_eval_method)
7672 {
7673 case FLT_EVAL_METHOD_PROMOTE_TO_FLOAT:
7674 if (type_mode == float16_type_mode)
7675 return float_type_node;
7676 break;
7677 case FLT_EVAL_METHOD_PROMOTE_TO_DOUBLE:
7678 if (type_mode == float16_type_mode
7679 || type_mode == float_type_mode)
7680 return double_type_node;
7681 break;
7682 case FLT_EVAL_METHOD_PROMOTE_TO_LONG_DOUBLE:
7683 if (type_mode == float16_type_mode
7684 || type_mode == float_type_mode
7685 || type_mode == double_type_mode)
7686 return long_double_type_node;
7687 break;
7688 default:
7689 gcc_unreachable ();
7690 }
7691 break;
7692 }
7693 case COMPLEX_TYPE:
7694 {
7695 if (TREE_CODE (TREE_TYPE (type)) != REAL_TYPE)
7696 return NULL_TREE;
7697 machine_mode type_mode = TYPE_MODE (TREE_TYPE (type));
7698 switch (target_flt_eval_method)
7699 {
7700 case FLT_EVAL_METHOD_PROMOTE_TO_FLOAT:
7701 if (type_mode == float16_type_mode)
7702 return complex_float_type_node;
7703 break;
7704 case FLT_EVAL_METHOD_PROMOTE_TO_DOUBLE:
7705 if (type_mode == float16_type_mode
7706 || type_mode == float_type_mode)
7707 return complex_double_type_node;
7708 break;
7709 case FLT_EVAL_METHOD_PROMOTE_TO_LONG_DOUBLE:
7710 if (type_mode == float16_type_mode
7711 || type_mode == float_type_mode
7712 || type_mode == double_type_mode)
7713 return complex_long_double_type_node;
7714 break;
7715 default:
7716 gcc_unreachable ();
7717 }
7718 break;
7719 }
7720 default:
7721 break;
7722 }
7723
7724 return NULL_TREE;
7725 }
7726 \f
7727 /* Return OP, stripped of any conversions to wider types as much as is safe.
7728 Converting the value back to OP's type makes a value equivalent to OP.
7729
7730 If FOR_TYPE is nonzero, we return a value which, if converted to
7731 type FOR_TYPE, would be equivalent to converting OP to type FOR_TYPE.
7732
7733 OP must have integer, real or enumeral type. Pointers are not allowed!
7734
7735 There are some cases where the obvious value we could return
7736 would regenerate to OP if converted to OP's type,
7737 but would not extend like OP to wider types.
7738 If FOR_TYPE indicates such extension is contemplated, we eschew such values.
7739 For example, if OP is (unsigned short)(signed char)-1,
7740 we avoid returning (signed char)-1 if FOR_TYPE is int,
7741 even though extending that to an unsigned short would regenerate OP,
7742 since the result of extending (signed char)-1 to (int)
7743 is different from (int) OP. */
7744
7745 tree
7746 get_unwidened (tree op, tree for_type)
7747 {
7748 /* Set UNS initially if converting OP to FOR_TYPE is a zero-extension. */
7749 tree type = TREE_TYPE (op);
7750 unsigned final_prec
7751 = TYPE_PRECISION (for_type != 0 ? for_type : type);
7752 int uns
7753 = (for_type != 0 && for_type != type
7754 && final_prec > TYPE_PRECISION (type)
7755 && TYPE_UNSIGNED (type));
7756 tree win = op;
7757
7758 while (CONVERT_EXPR_P (op))
7759 {
7760 int bitschange;
7761
7762 /* TYPE_PRECISION on vector types has different meaning
7763 (TYPE_VECTOR_SUBPARTS) and casts from vectors are view conversions,
7764 so avoid them here. */
7765 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op, 0))) == VECTOR_TYPE)
7766 break;
7767
7768 bitschange = TYPE_PRECISION (TREE_TYPE (op))
7769 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0)));
7770
7771 /* Truncations are many-one so cannot be removed.
7772 Unless we are later going to truncate down even farther. */
7773 if (bitschange < 0
7774 && final_prec > TYPE_PRECISION (TREE_TYPE (op)))
7775 break;
7776
7777 /* See what's inside this conversion. If we decide to strip it,
7778 we will set WIN. */
7779 op = TREE_OPERAND (op, 0);
7780
7781 /* If we have not stripped any zero-extensions (uns is 0),
7782 we can strip any kind of extension.
7783 If we have previously stripped a zero-extension,
7784 only zero-extensions can safely be stripped.
7785 Any extension can be stripped if the bits it would produce
7786 are all going to be discarded later by truncating to FOR_TYPE. */
7787
7788 if (bitschange > 0)
7789 {
7790 if (! uns || final_prec <= TYPE_PRECISION (TREE_TYPE (op)))
7791 win = op;
7792 /* TYPE_UNSIGNED says whether this is a zero-extension.
7793 Let's avoid computing it if it does not affect WIN
7794 and if UNS will not be needed again. */
7795 if ((uns
7796 || CONVERT_EXPR_P (op))
7797 && TYPE_UNSIGNED (TREE_TYPE (op)))
7798 {
7799 uns = 1;
7800 win = op;
7801 }
7802 }
7803 }
7804
7805 /* If we finally reach a constant see if it fits in sth smaller and
7806 in that case convert it. */
7807 if (TREE_CODE (win) == INTEGER_CST)
7808 {
7809 tree wtype = TREE_TYPE (win);
7810 unsigned prec = wi::min_precision (wi::to_wide (win), TYPE_SIGN (wtype));
7811 if (for_type)
7812 prec = MAX (prec, final_prec);
7813 if (prec < TYPE_PRECISION (wtype))
7814 {
7815 tree t = lang_hooks.types.type_for_size (prec, TYPE_UNSIGNED (wtype));
7816 if (t && TYPE_PRECISION (t) < TYPE_PRECISION (wtype))
7817 win = fold_convert (t, win);
7818 }
7819 }
7820
7821 return win;
7822 }
7823 \f
7824 /* Return OP or a simpler expression for a narrower value
7825 which can be sign-extended or zero-extended to give back OP.
7826 Store in *UNSIGNEDP_PTR either 1 if the value should be zero-extended
7827 or 0 if the value should be sign-extended. */
7828
7829 tree
7830 get_narrower (tree op, int *unsignedp_ptr)
7831 {
7832 int uns = 0;
7833 int first = 1;
7834 tree win = op;
7835 bool integral_p = INTEGRAL_TYPE_P (TREE_TYPE (op));
7836
7837 if (TREE_CODE (op) == COMPOUND_EXPR)
7838 {
7839 do
7840 op = TREE_OPERAND (op, 1);
7841 while (TREE_CODE (op) == COMPOUND_EXPR);
7842 tree ret = get_narrower (op, unsignedp_ptr);
7843 if (ret == op)
7844 return win;
7845 auto_vec <tree, 16> v;
7846 unsigned int i;
7847 for (op = win; TREE_CODE (op) == COMPOUND_EXPR;
7848 op = TREE_OPERAND (op, 1))
7849 v.safe_push (op);
7850 FOR_EACH_VEC_ELT_REVERSE (v, i, op)
7851 ret = build2_loc (EXPR_LOCATION (op), COMPOUND_EXPR,
7852 TREE_TYPE (ret), TREE_OPERAND (op, 0),
7853 ret);
7854 return ret;
7855 }
7856 while (TREE_CODE (op) == NOP_EXPR)
7857 {
7858 int bitschange
7859 = (TYPE_PRECISION (TREE_TYPE (op))
7860 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0))));
7861
7862 /* Truncations are many-one so cannot be removed. */
7863 if (bitschange < 0)
7864 break;
7865
7866 /* See what's inside this conversion. If we decide to strip it,
7867 we will set WIN. */
7868
7869 if (bitschange > 0)
7870 {
7871 op = TREE_OPERAND (op, 0);
7872 /* An extension: the outermost one can be stripped,
7873 but remember whether it is zero or sign extension. */
7874 if (first)
7875 uns = TYPE_UNSIGNED (TREE_TYPE (op));
7876 /* Otherwise, if a sign extension has been stripped,
7877 only sign extensions can now be stripped;
7878 if a zero extension has been stripped, only zero-extensions. */
7879 else if (uns != TYPE_UNSIGNED (TREE_TYPE (op)))
7880 break;
7881 first = 0;
7882 }
7883 else /* bitschange == 0 */
7884 {
7885 /* A change in nominal type can always be stripped, but we must
7886 preserve the unsignedness. */
7887 if (first)
7888 uns = TYPE_UNSIGNED (TREE_TYPE (op));
7889 first = 0;
7890 op = TREE_OPERAND (op, 0);
7891 /* Keep trying to narrow, but don't assign op to win if it
7892 would turn an integral type into something else. */
7893 if (INTEGRAL_TYPE_P (TREE_TYPE (op)) != integral_p)
7894 continue;
7895 }
7896
7897 win = op;
7898 }
7899
7900 if (TREE_CODE (op) == COMPONENT_REF
7901 /* Since type_for_size always gives an integer type. */
7902 && TREE_CODE (TREE_TYPE (op)) != REAL_TYPE
7903 && TREE_CODE (TREE_TYPE (op)) != FIXED_POINT_TYPE
7904 /* Ensure field is laid out already. */
7905 && DECL_SIZE (TREE_OPERAND (op, 1)) != 0
7906 && tree_fits_uhwi_p (DECL_SIZE (TREE_OPERAND (op, 1))))
7907 {
7908 unsigned HOST_WIDE_INT innerprec
7909 = tree_to_uhwi (DECL_SIZE (TREE_OPERAND (op, 1)));
7910 int unsignedp = (DECL_UNSIGNED (TREE_OPERAND (op, 1))
7911 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op, 1))));
7912 tree type = lang_hooks.types.type_for_size (innerprec, unsignedp);
7913
7914 /* We can get this structure field in a narrower type that fits it,
7915 but the resulting extension to its nominal type (a fullword type)
7916 must satisfy the same conditions as for other extensions.
7917
7918 Do this only for fields that are aligned (not bit-fields),
7919 because when bit-field insns will be used there is no
7920 advantage in doing this. */
7921
7922 if (innerprec < TYPE_PRECISION (TREE_TYPE (op))
7923 && ! DECL_BIT_FIELD (TREE_OPERAND (op, 1))
7924 && (first || uns == DECL_UNSIGNED (TREE_OPERAND (op, 1)))
7925 && type != 0)
7926 {
7927 if (first)
7928 uns = DECL_UNSIGNED (TREE_OPERAND (op, 1));
7929 win = fold_convert (type, op);
7930 }
7931 }
7932
7933 *unsignedp_ptr = uns;
7934 return win;
7935 }
7936 \f
7937 /* Return true if integer constant C has a value that is permissible
7938 for TYPE, an integral type. */
7939
7940 bool
7941 int_fits_type_p (const_tree c, const_tree type)
7942 {
7943 tree type_low_bound, type_high_bound;
7944 bool ok_for_low_bound, ok_for_high_bound;
7945 signop sgn_c = TYPE_SIGN (TREE_TYPE (c));
7946
7947 /* Non-standard boolean types can have arbitrary precision but various
7948 transformations assume that they can only take values 0 and +/-1. */
7949 if (TREE_CODE (type) == BOOLEAN_TYPE)
7950 return wi::fits_to_boolean_p (wi::to_wide (c), type);
7951
7952 retry:
7953 type_low_bound = TYPE_MIN_VALUE (type);
7954 type_high_bound = TYPE_MAX_VALUE (type);
7955
7956 /* If at least one bound of the type is a constant integer, we can check
7957 ourselves and maybe make a decision. If no such decision is possible, but
7958 this type is a subtype, try checking against that. Otherwise, use
7959 fits_to_tree_p, which checks against the precision.
7960
7961 Compute the status for each possibly constant bound, and return if we see
7962 one does not match. Use ok_for_xxx_bound for this purpose, assigning -1
7963 for "unknown if constant fits", 0 for "constant known *not* to fit" and 1
7964 for "constant known to fit". */
7965
7966 /* Check if c >= type_low_bound. */
7967 if (type_low_bound && TREE_CODE (type_low_bound) == INTEGER_CST)
7968 {
7969 if (tree_int_cst_lt (c, type_low_bound))
7970 return false;
7971 ok_for_low_bound = true;
7972 }
7973 else
7974 ok_for_low_bound = false;
7975
7976 /* Check if c <= type_high_bound. */
7977 if (type_high_bound && TREE_CODE (type_high_bound) == INTEGER_CST)
7978 {
7979 if (tree_int_cst_lt (type_high_bound, c))
7980 return false;
7981 ok_for_high_bound = true;
7982 }
7983 else
7984 ok_for_high_bound = false;
7985
7986 /* If the constant fits both bounds, the result is known. */
7987 if (ok_for_low_bound && ok_for_high_bound)
7988 return true;
7989
7990 /* Perform some generic filtering which may allow making a decision
7991 even if the bounds are not constant. First, negative integers
7992 never fit in unsigned types, */
7993 if (TYPE_UNSIGNED (type) && sgn_c == SIGNED && wi::neg_p (wi::to_wide (c)))
7994 return false;
7995
7996 /* Second, narrower types always fit in wider ones. */
7997 if (TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (c)))
7998 return true;
7999
8000 /* Third, unsigned integers with top bit set never fit signed types. */
8001 if (!TYPE_UNSIGNED (type) && sgn_c == UNSIGNED)
8002 {
8003 int prec = GET_MODE_PRECISION (SCALAR_INT_TYPE_MODE (TREE_TYPE (c))) - 1;
8004 if (prec < TYPE_PRECISION (TREE_TYPE (c)))
8005 {
8006 /* When a tree_cst is converted to a wide-int, the precision
8007 is taken from the type. However, if the precision of the
8008 mode underneath the type is smaller than that, it is
8009 possible that the value will not fit. The test below
8010 fails if any bit is set between the sign bit of the
8011 underlying mode and the top bit of the type. */
8012 if (wi::zext (wi::to_wide (c), prec - 1) != wi::to_wide (c))
8013 return false;
8014 }
8015 else if (wi::neg_p (wi::to_wide (c)))
8016 return false;
8017 }
8018
8019 /* If we haven't been able to decide at this point, there nothing more we
8020 can check ourselves here. Look at the base type if we have one and it
8021 has the same precision. */
8022 if (TREE_CODE (type) == INTEGER_TYPE
8023 && TREE_TYPE (type) != 0
8024 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (type)))
8025 {
8026 type = TREE_TYPE (type);
8027 goto retry;
8028 }
8029
8030 /* Or to fits_to_tree_p, if nothing else. */
8031 return wi::fits_to_tree_p (wi::to_wide (c), type);
8032 }
8033
8034 /* Stores bounds of an integer TYPE in MIN and MAX. If TYPE has non-constant
8035 bounds or is a POINTER_TYPE, the maximum and/or minimum values that can be
8036 represented (assuming two's-complement arithmetic) within the bit
8037 precision of the type are returned instead. */
8038
8039 void
8040 get_type_static_bounds (const_tree type, mpz_t min, mpz_t max)
8041 {
8042 if (!POINTER_TYPE_P (type) && TYPE_MIN_VALUE (type)
8043 && TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST)
8044 wi::to_mpz (wi::to_wide (TYPE_MIN_VALUE (type)), min, TYPE_SIGN (type));
8045 else
8046 {
8047 if (TYPE_UNSIGNED (type))
8048 mpz_set_ui (min, 0);
8049 else
8050 {
8051 wide_int mn = wi::min_value (TYPE_PRECISION (type), SIGNED);
8052 wi::to_mpz (mn, min, SIGNED);
8053 }
8054 }
8055
8056 if (!POINTER_TYPE_P (type) && TYPE_MAX_VALUE (type)
8057 && TREE_CODE (TYPE_MAX_VALUE (type)) == INTEGER_CST)
8058 wi::to_mpz (wi::to_wide (TYPE_MAX_VALUE (type)), max, TYPE_SIGN (type));
8059 else
8060 {
8061 wide_int mn = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
8062 wi::to_mpz (mn, max, TYPE_SIGN (type));
8063 }
8064 }
8065
8066 /* Return true if VAR is an automatic variable. */
8067
8068 bool
8069 auto_var_p (const_tree var)
8070 {
8071 return ((((VAR_P (var) && ! DECL_EXTERNAL (var))
8072 || TREE_CODE (var) == PARM_DECL)
8073 && ! TREE_STATIC (var))
8074 || TREE_CODE (var) == RESULT_DECL);
8075 }
8076
8077 /* Return true if VAR is an automatic variable defined in function FN. */
8078
8079 bool
8080 auto_var_in_fn_p (const_tree var, const_tree fn)
8081 {
8082 return (DECL_P (var) && DECL_CONTEXT (var) == fn
8083 && (auto_var_p (var)
8084 || TREE_CODE (var) == LABEL_DECL));
8085 }
8086
8087 /* Subprogram of following function. Called by walk_tree.
8088
8089 Return *TP if it is an automatic variable or parameter of the
8090 function passed in as DATA. */
8091
8092 static tree
8093 find_var_from_fn (tree *tp, int *walk_subtrees, void *data)
8094 {
8095 tree fn = (tree) data;
8096
8097 if (TYPE_P (*tp))
8098 *walk_subtrees = 0;
8099
8100 else if (DECL_P (*tp)
8101 && auto_var_in_fn_p (*tp, fn))
8102 return *tp;
8103
8104 return NULL_TREE;
8105 }
8106
8107 /* Returns true if T is, contains, or refers to a type with variable
8108 size. For METHOD_TYPEs and FUNCTION_TYPEs we exclude the
8109 arguments, but not the return type. If FN is nonzero, only return
8110 true if a modifier of the type or position of FN is a variable or
8111 parameter inside FN.
8112
8113 This concept is more general than that of C99 'variably modified types':
8114 in C99, a struct type is never variably modified because a VLA may not
8115 appear as a structure member. However, in GNU C code like:
8116
8117 struct S { int i[f()]; };
8118
8119 is valid, and other languages may define similar constructs. */
8120
8121 bool
8122 variably_modified_type_p (tree type, tree fn)
8123 {
8124 tree t;
8125
8126 /* Test if T is either variable (if FN is zero) or an expression containing
8127 a variable in FN. If TYPE isn't gimplified, return true also if
8128 gimplify_one_sizepos would gimplify the expression into a local
8129 variable. */
8130 #define RETURN_TRUE_IF_VAR(T) \
8131 do { tree _t = (T); \
8132 if (_t != NULL_TREE \
8133 && _t != error_mark_node \
8134 && !CONSTANT_CLASS_P (_t) \
8135 && TREE_CODE (_t) != PLACEHOLDER_EXPR \
8136 && (!fn \
8137 || (!TYPE_SIZES_GIMPLIFIED (type) \
8138 && (TREE_CODE (_t) != VAR_DECL \
8139 && !CONTAINS_PLACEHOLDER_P (_t))) \
8140 || walk_tree (&_t, find_var_from_fn, fn, NULL))) \
8141 return true; } while (0)
8142
8143 if (type == error_mark_node)
8144 return false;
8145
8146 /* If TYPE itself has variable size, it is variably modified. */
8147 RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
8148 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
8149
8150 switch (TREE_CODE (type))
8151 {
8152 case POINTER_TYPE:
8153 case REFERENCE_TYPE:
8154 case VECTOR_TYPE:
8155 /* Ada can have pointer types refering to themselves indirectly. */
8156 if (TREE_VISITED (type))
8157 return false;
8158 TREE_VISITED (type) = true;
8159 if (variably_modified_type_p (TREE_TYPE (type), fn))
8160 {
8161 TREE_VISITED (type) = false;
8162 return true;
8163 }
8164 TREE_VISITED (type) = false;
8165 break;
8166
8167 case FUNCTION_TYPE:
8168 case METHOD_TYPE:
8169 /* If TYPE is a function type, it is variably modified if the
8170 return type is variably modified. */
8171 if (variably_modified_type_p (TREE_TYPE (type), fn))
8172 return true;
8173 break;
8174
8175 case INTEGER_TYPE:
8176 case REAL_TYPE:
8177 case FIXED_POINT_TYPE:
8178 case ENUMERAL_TYPE:
8179 case BOOLEAN_TYPE:
8180 /* Scalar types are variably modified if their end points
8181 aren't constant. */
8182 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
8183 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
8184 break;
8185
8186 case RECORD_TYPE:
8187 case UNION_TYPE:
8188 case QUAL_UNION_TYPE:
8189 /* We can't see if any of the fields are variably-modified by the
8190 definition we normally use, since that would produce infinite
8191 recursion via pointers. */
8192 /* This is variably modified if some field's type is. */
8193 for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
8194 if (TREE_CODE (t) == FIELD_DECL)
8195 {
8196 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
8197 RETURN_TRUE_IF_VAR (DECL_SIZE (t));
8198 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
8199
8200 /* If the type is a qualified union, then the DECL_QUALIFIER
8201 of fields can also be an expression containing a variable. */
8202 if (TREE_CODE (type) == QUAL_UNION_TYPE)
8203 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
8204
8205 /* If the field is a qualified union, then it's only a container
8206 for what's inside so we look into it. That's necessary in LTO
8207 mode because the sizes of the field tested above have been set
8208 to PLACEHOLDER_EXPRs by free_lang_data. */
8209 if (TREE_CODE (TREE_TYPE (t)) == QUAL_UNION_TYPE
8210 && variably_modified_type_p (TREE_TYPE (t), fn))
8211 return true;
8212 }
8213 break;
8214
8215 case ARRAY_TYPE:
8216 /* Do not call ourselves to avoid infinite recursion. This is
8217 variably modified if the element type is. */
8218 RETURN_TRUE_IF_VAR (TYPE_SIZE (TREE_TYPE (type)));
8219 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (TREE_TYPE (type)));
8220 break;
8221
8222 default:
8223 break;
8224 }
8225
8226 /* The current language may have other cases to check, but in general,
8227 all other types are not variably modified. */
8228 return lang_hooks.tree_inlining.var_mod_type_p (type, fn);
8229
8230 #undef RETURN_TRUE_IF_VAR
8231 }
8232
8233 /* Given a DECL or TYPE, return the scope in which it was declared, or
8234 NULL_TREE if there is no containing scope. */
8235
8236 tree
8237 get_containing_scope (const_tree t)
8238 {
8239 return (TYPE_P (t) ? TYPE_CONTEXT (t) : DECL_CONTEXT (t));
8240 }
8241
8242 /* Returns the ultimate TRANSLATION_UNIT_DECL context of DECL or NULL. */
8243
8244 const_tree
8245 get_ultimate_context (const_tree decl)
8246 {
8247 while (decl && TREE_CODE (decl) != TRANSLATION_UNIT_DECL)
8248 {
8249 if (TREE_CODE (decl) == BLOCK)
8250 decl = BLOCK_SUPERCONTEXT (decl);
8251 else
8252 decl = get_containing_scope (decl);
8253 }
8254 return decl;
8255 }
8256
8257 /* Return the innermost context enclosing DECL that is
8258 a FUNCTION_DECL, or zero if none. */
8259
8260 tree
8261 decl_function_context (const_tree decl)
8262 {
8263 tree context;
8264
8265 if (TREE_CODE (decl) == ERROR_MARK)
8266 return 0;
8267
8268 /* C++ virtual functions use DECL_CONTEXT for the class of the vtable
8269 where we look up the function at runtime. Such functions always take
8270 a first argument of type 'pointer to real context'.
8271
8272 C++ should really be fixed to use DECL_CONTEXT for the real context,
8273 and use something else for the "virtual context". */
8274 else if (TREE_CODE (decl) == FUNCTION_DECL && DECL_VIRTUAL_P (decl))
8275 context
8276 = TYPE_MAIN_VARIANT
8277 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
8278 else
8279 context = DECL_CONTEXT (decl);
8280
8281 while (context && TREE_CODE (context) != FUNCTION_DECL)
8282 {
8283 if (TREE_CODE (context) == BLOCK)
8284 context = BLOCK_SUPERCONTEXT (context);
8285 else
8286 context = get_containing_scope (context);
8287 }
8288
8289 return context;
8290 }
8291
8292 /* Return the innermost context enclosing DECL that is
8293 a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE, or zero if none.
8294 TYPE_DECLs and FUNCTION_DECLs are transparent to this function. */
8295
8296 tree
8297 decl_type_context (const_tree decl)
8298 {
8299 tree context = DECL_CONTEXT (decl);
8300
8301 while (context)
8302 switch (TREE_CODE (context))
8303 {
8304 case NAMESPACE_DECL:
8305 case TRANSLATION_UNIT_DECL:
8306 return NULL_TREE;
8307
8308 case RECORD_TYPE:
8309 case UNION_TYPE:
8310 case QUAL_UNION_TYPE:
8311 return context;
8312
8313 case TYPE_DECL:
8314 case FUNCTION_DECL:
8315 context = DECL_CONTEXT (context);
8316 break;
8317
8318 case BLOCK:
8319 context = BLOCK_SUPERCONTEXT (context);
8320 break;
8321
8322 default:
8323 gcc_unreachable ();
8324 }
8325
8326 return NULL_TREE;
8327 }
8328
8329 /* CALL is a CALL_EXPR. Return the declaration for the function
8330 called, or NULL_TREE if the called function cannot be
8331 determined. */
8332
8333 tree
8334 get_callee_fndecl (const_tree call)
8335 {
8336 tree addr;
8337
8338 if (call == error_mark_node)
8339 return error_mark_node;
8340
8341 /* It's invalid to call this function with anything but a
8342 CALL_EXPR. */
8343 gcc_assert (TREE_CODE (call) == CALL_EXPR);
8344
8345 /* The first operand to the CALL is the address of the function
8346 called. */
8347 addr = CALL_EXPR_FN (call);
8348
8349 /* If there is no function, return early. */
8350 if (addr == NULL_TREE)
8351 return NULL_TREE;
8352
8353 STRIP_NOPS (addr);
8354
8355 /* If this is a readonly function pointer, extract its initial value. */
8356 if (DECL_P (addr) && TREE_CODE (addr) != FUNCTION_DECL
8357 && TREE_READONLY (addr) && ! TREE_THIS_VOLATILE (addr)
8358 && DECL_INITIAL (addr))
8359 addr = DECL_INITIAL (addr);
8360
8361 /* If the address is just `&f' for some function `f', then we know
8362 that `f' is being called. */
8363 if (TREE_CODE (addr) == ADDR_EXPR
8364 && TREE_CODE (TREE_OPERAND (addr, 0)) == FUNCTION_DECL)
8365 return TREE_OPERAND (addr, 0);
8366
8367 /* We couldn't figure out what was being called. */
8368 return NULL_TREE;
8369 }
8370
8371 /* If CALL_EXPR CALL calls a normal built-in function or an internal function,
8372 return the associated function code, otherwise return CFN_LAST. */
8373
8374 combined_fn
8375 get_call_combined_fn (const_tree call)
8376 {
8377 /* It's invalid to call this function with anything but a CALL_EXPR. */
8378 gcc_assert (TREE_CODE (call) == CALL_EXPR);
8379
8380 if (!CALL_EXPR_FN (call))
8381 return as_combined_fn (CALL_EXPR_IFN (call));
8382
8383 tree fndecl = get_callee_fndecl (call);
8384 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
8385 return as_combined_fn (DECL_FUNCTION_CODE (fndecl));
8386
8387 return CFN_LAST;
8388 }
8389
8390 /* Comparator of indices based on tree_node_counts. */
8391
8392 static int
8393 tree_nodes_cmp (const void *p1, const void *p2)
8394 {
8395 const unsigned *n1 = (const unsigned *)p1;
8396 const unsigned *n2 = (const unsigned *)p2;
8397
8398 return tree_node_counts[*n1] - tree_node_counts[*n2];
8399 }
8400
8401 /* Comparator of indices based on tree_code_counts. */
8402
8403 static int
8404 tree_codes_cmp (const void *p1, const void *p2)
8405 {
8406 const unsigned *n1 = (const unsigned *)p1;
8407 const unsigned *n2 = (const unsigned *)p2;
8408
8409 return tree_code_counts[*n1] - tree_code_counts[*n2];
8410 }
8411
8412 #define TREE_MEM_USAGE_SPACES 40
8413
8414 /* Print debugging information about tree nodes generated during the compile,
8415 and any language-specific information. */
8416
8417 void
8418 dump_tree_statistics (void)
8419 {
8420 if (GATHER_STATISTICS)
8421 {
8422 uint64_t total_nodes, total_bytes;
8423 fprintf (stderr, "\nKind Nodes Bytes\n");
8424 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8425 total_nodes = total_bytes = 0;
8426
8427 {
8428 auto_vec<unsigned> indices (all_kinds);
8429 for (unsigned i = 0; i < all_kinds; i++)
8430 indices.quick_push (i);
8431 indices.qsort (tree_nodes_cmp);
8432
8433 for (unsigned i = 0; i < (int) all_kinds; i++)
8434 {
8435 unsigned j = indices[i];
8436 fprintf (stderr, "%-20s %6" PRIu64 "%c %9" PRIu64 "%c\n",
8437 tree_node_kind_names[j], SIZE_AMOUNT (tree_node_counts[j]),
8438 SIZE_AMOUNT (tree_node_sizes[j]));
8439 total_nodes += tree_node_counts[j];
8440 total_bytes += tree_node_sizes[j];
8441 }
8442 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8443 fprintf (stderr, "%-20s %6" PRIu64 "%c %9" PRIu64 "%c\n", "Total",
8444 SIZE_AMOUNT (total_nodes), SIZE_AMOUNT (total_bytes));
8445 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8446 }
8447
8448 {
8449 fprintf (stderr, "Code Nodes\n");
8450 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8451
8452 auto_vec<unsigned> indices (MAX_TREE_CODES);
8453 for (unsigned i = 0; i < MAX_TREE_CODES; i++)
8454 indices.quick_push (i);
8455 indices.qsort (tree_codes_cmp);
8456
8457 for (unsigned i = 0; i < MAX_TREE_CODES; i++)
8458 {
8459 unsigned j = indices[i];
8460 fprintf (stderr, "%-32s %6" PRIu64 "%c\n",
8461 get_tree_code_name ((enum tree_code) j),
8462 SIZE_AMOUNT (tree_code_counts[j]));
8463 }
8464 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8465 fprintf (stderr, "\n");
8466 ssanames_print_statistics ();
8467 fprintf (stderr, "\n");
8468 phinodes_print_statistics ();
8469 fprintf (stderr, "\n");
8470 }
8471 }
8472 else
8473 fprintf (stderr, "(No per-node statistics)\n");
8474
8475 print_type_hash_statistics ();
8476 print_debug_expr_statistics ();
8477 print_value_expr_statistics ();
8478 lang_hooks.print_statistics ();
8479 }
8480 \f
8481 #define FILE_FUNCTION_FORMAT "_GLOBAL__%s_%s"
8482
8483 /* Generate a crc32 of the low BYTES bytes of VALUE. */
8484
8485 unsigned
8486 crc32_unsigned_n (unsigned chksum, unsigned value, unsigned bytes)
8487 {
8488 /* This relies on the raw feedback's top 4 bits being zero. */
8489 #define FEEDBACK(X) ((X) * 0x04c11db7)
8490 #define SYNDROME(X) (FEEDBACK ((X) & 1) ^ FEEDBACK ((X) & 2) \
8491 ^ FEEDBACK ((X) & 4) ^ FEEDBACK ((X) & 8))
8492 static const unsigned syndromes[16] =
8493 {
8494 SYNDROME(0x0), SYNDROME(0x1), SYNDROME(0x2), SYNDROME(0x3),
8495 SYNDROME(0x4), SYNDROME(0x5), SYNDROME(0x6), SYNDROME(0x7),
8496 SYNDROME(0x8), SYNDROME(0x9), SYNDROME(0xa), SYNDROME(0xb),
8497 SYNDROME(0xc), SYNDROME(0xd), SYNDROME(0xe), SYNDROME(0xf),
8498 };
8499 #undef FEEDBACK
8500 #undef SYNDROME
8501
8502 value <<= (32 - bytes * 8);
8503 for (unsigned ix = bytes * 2; ix--; value <<= 4)
8504 {
8505 unsigned feedback = syndromes[((value ^ chksum) >> 28) & 0xf];
8506
8507 chksum = (chksum << 4) ^ feedback;
8508 }
8509
8510 return chksum;
8511 }
8512
8513 /* Generate a crc32 of a string. */
8514
8515 unsigned
8516 crc32_string (unsigned chksum, const char *string)
8517 {
8518 do
8519 chksum = crc32_byte (chksum, *string);
8520 while (*string++);
8521 return chksum;
8522 }
8523
8524 /* P is a string that will be used in a symbol. Mask out any characters
8525 that are not valid in that context. */
8526
8527 void
8528 clean_symbol_name (char *p)
8529 {
8530 for (; *p; p++)
8531 if (! (ISALNUM (*p)
8532 #ifndef NO_DOLLAR_IN_LABEL /* this for `$'; unlikely, but... -- kr */
8533 || *p == '$'
8534 #endif
8535 #ifndef NO_DOT_IN_LABEL /* this for `.'; unlikely, but... */
8536 || *p == '.'
8537 #endif
8538 ))
8539 *p = '_';
8540 }
8541
8542 static GTY(()) unsigned anon_cnt = 0; /* Saved for PCH. */
8543
8544 /* Create a unique anonymous identifier. The identifier is still a
8545 valid assembly label. */
8546
8547 tree
8548 make_anon_name ()
8549 {
8550 const char *fmt =
8551 #if !defined (NO_DOT_IN_LABEL)
8552 "."
8553 #elif !defined (NO_DOLLAR_IN_LABEL)
8554 "$"
8555 #else
8556 "_"
8557 #endif
8558 "_anon_%d";
8559
8560 char buf[24];
8561 int len = snprintf (buf, sizeof (buf), fmt, anon_cnt++);
8562 gcc_checking_assert (len < int (sizeof (buf)));
8563
8564 tree id = get_identifier_with_length (buf, len);
8565 IDENTIFIER_ANON_P (id) = true;
8566
8567 return id;
8568 }
8569
8570 /* Generate a name for a special-purpose function.
8571 The generated name may need to be unique across the whole link.
8572 Changes to this function may also require corresponding changes to
8573 xstrdup_mask_random.
8574 TYPE is some string to identify the purpose of this function to the
8575 linker or collect2; it must start with an uppercase letter,
8576 one of:
8577 I - for constructors
8578 D - for destructors
8579 N - for C++ anonymous namespaces
8580 F - for DWARF unwind frame information. */
8581
8582 tree
8583 get_file_function_name (const char *type)
8584 {
8585 char *buf;
8586 const char *p;
8587 char *q;
8588
8589 /* If we already have a name we know to be unique, just use that. */
8590 if (first_global_object_name)
8591 p = q = ASTRDUP (first_global_object_name);
8592 /* If the target is handling the constructors/destructors, they
8593 will be local to this file and the name is only necessary for
8594 debugging purposes.
8595 We also assign sub_I and sub_D sufixes to constructors called from
8596 the global static constructors. These are always local. */
8597 else if (((type[0] == 'I' || type[0] == 'D') && targetm.have_ctors_dtors)
8598 || (startswith (type, "sub_")
8599 && (type[4] == 'I' || type[4] == 'D')))
8600 {
8601 const char *file = main_input_filename;
8602 if (! file)
8603 file = LOCATION_FILE (input_location);
8604 /* Just use the file's basename, because the full pathname
8605 might be quite long. */
8606 p = q = ASTRDUP (lbasename (file));
8607 }
8608 else
8609 {
8610 /* Otherwise, the name must be unique across the entire link.
8611 We don't have anything that we know to be unique to this translation
8612 unit, so use what we do have and throw in some randomness. */
8613 unsigned len;
8614 const char *name = weak_global_object_name;
8615 const char *file = main_input_filename;
8616
8617 if (! name)
8618 name = "";
8619 if (! file)
8620 file = LOCATION_FILE (input_location);
8621
8622 len = strlen (file);
8623 q = (char *) alloca (9 + 19 + len + 1);
8624 memcpy (q, file, len + 1);
8625
8626 snprintf (q + len, 9 + 19 + 1, "_%08X_" HOST_WIDE_INT_PRINT_HEX,
8627 crc32_string (0, name), get_random_seed (false));
8628
8629 p = q;
8630 }
8631
8632 clean_symbol_name (q);
8633 buf = (char *) alloca (sizeof (FILE_FUNCTION_FORMAT) + strlen (p)
8634 + strlen (type));
8635
8636 /* Set up the name of the file-level functions we may need.
8637 Use a global object (which is already required to be unique over
8638 the program) rather than the file name (which imposes extra
8639 constraints). */
8640 sprintf (buf, FILE_FUNCTION_FORMAT, type, p);
8641
8642 return get_identifier (buf);
8643 }
8644 \f
8645 #if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007)
8646
8647 /* Complain that the tree code of NODE does not match the expected 0
8648 terminated list of trailing codes. The trailing code list can be
8649 empty, for a more vague error message. FILE, LINE, and FUNCTION
8650 are of the caller. */
8651
8652 void
8653 tree_check_failed (const_tree node, const char *file,
8654 int line, const char *function, ...)
8655 {
8656 va_list args;
8657 const char *buffer;
8658 unsigned length = 0;
8659 enum tree_code code;
8660
8661 va_start (args, function);
8662 while ((code = (enum tree_code) va_arg (args, int)))
8663 length += 4 + strlen (get_tree_code_name (code));
8664 va_end (args);
8665 if (length)
8666 {
8667 char *tmp;
8668 va_start (args, function);
8669 length += strlen ("expected ");
8670 buffer = tmp = (char *) alloca (length);
8671 length = 0;
8672 while ((code = (enum tree_code) va_arg (args, int)))
8673 {
8674 const char *prefix = length ? " or " : "expected ";
8675
8676 strcpy (tmp + length, prefix);
8677 length += strlen (prefix);
8678 strcpy (tmp + length, get_tree_code_name (code));
8679 length += strlen (get_tree_code_name (code));
8680 }
8681 va_end (args);
8682 }
8683 else
8684 buffer = "unexpected node";
8685
8686 internal_error ("tree check: %s, have %s in %s, at %s:%d",
8687 buffer, get_tree_code_name (TREE_CODE (node)),
8688 function, trim_filename (file), line);
8689 }
8690
8691 /* Complain that the tree code of NODE does match the expected 0
8692 terminated list of trailing codes. FILE, LINE, and FUNCTION are of
8693 the caller. */
8694
8695 void
8696 tree_not_check_failed (const_tree node, const char *file,
8697 int line, const char *function, ...)
8698 {
8699 va_list args;
8700 char *buffer;
8701 unsigned length = 0;
8702 enum tree_code code;
8703
8704 va_start (args, function);
8705 while ((code = (enum tree_code) va_arg (args, int)))
8706 length += 4 + strlen (get_tree_code_name (code));
8707 va_end (args);
8708 va_start (args, function);
8709 buffer = (char *) alloca (length);
8710 length = 0;
8711 while ((code = (enum tree_code) va_arg (args, int)))
8712 {
8713 if (length)
8714 {
8715 strcpy (buffer + length, " or ");
8716 length += 4;
8717 }
8718 strcpy (buffer + length, get_tree_code_name (code));
8719 length += strlen (get_tree_code_name (code));
8720 }
8721 va_end (args);
8722
8723 internal_error ("tree check: expected none of %s, have %s in %s, at %s:%d",
8724 buffer, get_tree_code_name (TREE_CODE (node)),
8725 function, trim_filename (file), line);
8726 }
8727
8728 /* Similar to tree_check_failed, except that we check for a class of tree
8729 code, given in CL. */
8730
8731 void
8732 tree_class_check_failed (const_tree node, const enum tree_code_class cl,
8733 const char *file, int line, const char *function)
8734 {
8735 internal_error
8736 ("tree check: expected class %qs, have %qs (%s) in %s, at %s:%d",
8737 TREE_CODE_CLASS_STRING (cl),
8738 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
8739 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
8740 }
8741
8742 /* Similar to tree_check_failed, except that instead of specifying a
8743 dozen codes, use the knowledge that they're all sequential. */
8744
8745 void
8746 tree_range_check_failed (const_tree node, const char *file, int line,
8747 const char *function, enum tree_code c1,
8748 enum tree_code c2)
8749 {
8750 char *buffer;
8751 unsigned length = 0;
8752 unsigned int c;
8753
8754 for (c = c1; c <= c2; ++c)
8755 length += 4 + strlen (get_tree_code_name ((enum tree_code) c));
8756
8757 length += strlen ("expected ");
8758 buffer = (char *) alloca (length);
8759 length = 0;
8760
8761 for (c = c1; c <= c2; ++c)
8762 {
8763 const char *prefix = length ? " or " : "expected ";
8764
8765 strcpy (buffer + length, prefix);
8766 length += strlen (prefix);
8767 strcpy (buffer + length, get_tree_code_name ((enum tree_code) c));
8768 length += strlen (get_tree_code_name ((enum tree_code) c));
8769 }
8770
8771 internal_error ("tree check: %s, have %s in %s, at %s:%d",
8772 buffer, get_tree_code_name (TREE_CODE (node)),
8773 function, trim_filename (file), line);
8774 }
8775
8776
8777 /* Similar to tree_check_failed, except that we check that a tree does
8778 not have the specified code, given in CL. */
8779
8780 void
8781 tree_not_class_check_failed (const_tree node, const enum tree_code_class cl,
8782 const char *file, int line, const char *function)
8783 {
8784 internal_error
8785 ("tree check: did not expect class %qs, have %qs (%s) in %s, at %s:%d",
8786 TREE_CODE_CLASS_STRING (cl),
8787 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
8788 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
8789 }
8790
8791
8792 /* Similar to tree_check_failed but applied to OMP_CLAUSE codes. */
8793
8794 void
8795 omp_clause_check_failed (const_tree node, const char *file, int line,
8796 const char *function, enum omp_clause_code code)
8797 {
8798 internal_error ("tree check: expected %<omp_clause %s%>, have %qs "
8799 "in %s, at %s:%d",
8800 omp_clause_code_name[code],
8801 get_tree_code_name (TREE_CODE (node)),
8802 function, trim_filename (file), line);
8803 }
8804
8805
8806 /* Similar to tree_range_check_failed but applied to OMP_CLAUSE codes. */
8807
8808 void
8809 omp_clause_range_check_failed (const_tree node, const char *file, int line,
8810 const char *function, enum omp_clause_code c1,
8811 enum omp_clause_code c2)
8812 {
8813 char *buffer;
8814 unsigned length = 0;
8815 unsigned int c;
8816
8817 for (c = c1; c <= c2; ++c)
8818 length += 4 + strlen (omp_clause_code_name[c]);
8819
8820 length += strlen ("expected ");
8821 buffer = (char *) alloca (length);
8822 length = 0;
8823
8824 for (c = c1; c <= c2; ++c)
8825 {
8826 const char *prefix = length ? " or " : "expected ";
8827
8828 strcpy (buffer + length, prefix);
8829 length += strlen (prefix);
8830 strcpy (buffer + length, omp_clause_code_name[c]);
8831 length += strlen (omp_clause_code_name[c]);
8832 }
8833
8834 internal_error ("tree check: %s, have %s in %s, at %s:%d",
8835 buffer, omp_clause_code_name[TREE_CODE (node)],
8836 function, trim_filename (file), line);
8837 }
8838
8839
8840 #undef DEFTREESTRUCT
8841 #define DEFTREESTRUCT(VAL, NAME) NAME,
8842
8843 static const char *ts_enum_names[] = {
8844 #include "treestruct.def"
8845 };
8846 #undef DEFTREESTRUCT
8847
8848 #define TS_ENUM_NAME(EN) (ts_enum_names[(EN)])
8849
8850 /* Similar to tree_class_check_failed, except that we check for
8851 whether CODE contains the tree structure identified by EN. */
8852
8853 void
8854 tree_contains_struct_check_failed (const_tree node,
8855 const enum tree_node_structure_enum en,
8856 const char *file, int line,
8857 const char *function)
8858 {
8859 internal_error
8860 ("tree check: expected tree that contains %qs structure, have %qs in %s, at %s:%d",
8861 TS_ENUM_NAME (en),
8862 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
8863 }
8864
8865
8866 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
8867 (dynamically sized) vector. */
8868
8869 void
8870 tree_int_cst_elt_check_failed (int idx, int len, const char *file, int line,
8871 const char *function)
8872 {
8873 internal_error
8874 ("tree check: accessed elt %d of %<tree_int_cst%> with %d elts in %s, "
8875 "at %s:%d",
8876 idx + 1, len, function, trim_filename (file), line);
8877 }
8878
8879 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
8880 (dynamically sized) vector. */
8881
8882 void
8883 tree_vec_elt_check_failed (int idx, int len, const char *file, int line,
8884 const char *function)
8885 {
8886 internal_error
8887 ("tree check: accessed elt %d of %<tree_vec%> with %d elts in %s, at %s:%d",
8888 idx + 1, len, function, trim_filename (file), line);
8889 }
8890
8891 /* Similar to above, except that the check is for the bounds of the operand
8892 vector of an expression node EXP. */
8893
8894 void
8895 tree_operand_check_failed (int idx, const_tree exp, const char *file,
8896 int line, const char *function)
8897 {
8898 enum tree_code code = TREE_CODE (exp);
8899 internal_error
8900 ("tree check: accessed operand %d of %s with %d operands in %s, at %s:%d",
8901 idx + 1, get_tree_code_name (code), TREE_OPERAND_LENGTH (exp),
8902 function, trim_filename (file), line);
8903 }
8904
8905 /* Similar to above, except that the check is for the number of
8906 operands of an OMP_CLAUSE node. */
8907
8908 void
8909 omp_clause_operand_check_failed (int idx, const_tree t, const char *file,
8910 int line, const char *function)
8911 {
8912 internal_error
8913 ("tree check: accessed operand %d of %<omp_clause %s%> with %d operands "
8914 "in %s, at %s:%d", idx + 1, omp_clause_code_name[OMP_CLAUSE_CODE (t)],
8915 omp_clause_num_ops [OMP_CLAUSE_CODE (t)], function,
8916 trim_filename (file), line);
8917 }
8918 #endif /* ENABLE_TREE_CHECKING */
8919 \f
8920 /* Create a new vector type node holding NUNITS units of type INNERTYPE,
8921 and mapped to the machine mode MODE. Initialize its fields and build
8922 the information necessary for debugging output. */
8923
8924 static tree
8925 make_vector_type (tree innertype, poly_int64 nunits, machine_mode mode)
8926 {
8927 tree t;
8928 tree mv_innertype = TYPE_MAIN_VARIANT (innertype);
8929
8930 t = make_node (VECTOR_TYPE);
8931 TREE_TYPE (t) = mv_innertype;
8932 SET_TYPE_VECTOR_SUBPARTS (t, nunits);
8933 SET_TYPE_MODE (t, mode);
8934
8935 if (TYPE_STRUCTURAL_EQUALITY_P (mv_innertype) || in_lto_p)
8936 SET_TYPE_STRUCTURAL_EQUALITY (t);
8937 else if ((TYPE_CANONICAL (mv_innertype) != innertype
8938 || mode != VOIDmode)
8939 && !VECTOR_BOOLEAN_TYPE_P (t))
8940 TYPE_CANONICAL (t)
8941 = make_vector_type (TYPE_CANONICAL (mv_innertype), nunits, VOIDmode);
8942
8943 layout_type (t);
8944
8945 hashval_t hash = type_hash_canon_hash (t);
8946 t = type_hash_canon (hash, t);
8947
8948 /* We have built a main variant, based on the main variant of the
8949 inner type. Use it to build the variant we return. */
8950 if ((TYPE_ATTRIBUTES (innertype) || TYPE_QUALS (innertype))
8951 && TREE_TYPE (t) != innertype)
8952 return build_type_attribute_qual_variant (t,
8953 TYPE_ATTRIBUTES (innertype),
8954 TYPE_QUALS (innertype));
8955
8956 return t;
8957 }
8958
8959 static tree
8960 make_or_reuse_type (unsigned size, int unsignedp)
8961 {
8962 int i;
8963
8964 if (size == INT_TYPE_SIZE)
8965 return unsignedp ? unsigned_type_node : integer_type_node;
8966 if (size == CHAR_TYPE_SIZE)
8967 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
8968 if (size == SHORT_TYPE_SIZE)
8969 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
8970 if (size == LONG_TYPE_SIZE)
8971 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
8972 if (size == LONG_LONG_TYPE_SIZE)
8973 return (unsignedp ? long_long_unsigned_type_node
8974 : long_long_integer_type_node);
8975
8976 for (i = 0; i < NUM_INT_N_ENTS; i ++)
8977 if (size == int_n_data[i].bitsize
8978 && int_n_enabled_p[i])
8979 return (unsignedp ? int_n_trees[i].unsigned_type
8980 : int_n_trees[i].signed_type);
8981
8982 if (unsignedp)
8983 return make_unsigned_type (size);
8984 else
8985 return make_signed_type (size);
8986 }
8987
8988 /* Create or reuse a fract type by SIZE, UNSIGNEDP, and SATP. */
8989
8990 static tree
8991 make_or_reuse_fract_type (unsigned size, int unsignedp, int satp)
8992 {
8993 if (satp)
8994 {
8995 if (size == SHORT_FRACT_TYPE_SIZE)
8996 return unsignedp ? sat_unsigned_short_fract_type_node
8997 : sat_short_fract_type_node;
8998 if (size == FRACT_TYPE_SIZE)
8999 return unsignedp ? sat_unsigned_fract_type_node : sat_fract_type_node;
9000 if (size == LONG_FRACT_TYPE_SIZE)
9001 return unsignedp ? sat_unsigned_long_fract_type_node
9002 : sat_long_fract_type_node;
9003 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9004 return unsignedp ? sat_unsigned_long_long_fract_type_node
9005 : sat_long_long_fract_type_node;
9006 }
9007 else
9008 {
9009 if (size == SHORT_FRACT_TYPE_SIZE)
9010 return unsignedp ? unsigned_short_fract_type_node
9011 : short_fract_type_node;
9012 if (size == FRACT_TYPE_SIZE)
9013 return unsignedp ? unsigned_fract_type_node : fract_type_node;
9014 if (size == LONG_FRACT_TYPE_SIZE)
9015 return unsignedp ? unsigned_long_fract_type_node
9016 : long_fract_type_node;
9017 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9018 return unsignedp ? unsigned_long_long_fract_type_node
9019 : long_long_fract_type_node;
9020 }
9021
9022 return make_fract_type (size, unsignedp, satp);
9023 }
9024
9025 /* Create or reuse an accum type by SIZE, UNSIGNEDP, and SATP. */
9026
9027 static tree
9028 make_or_reuse_accum_type (unsigned size, int unsignedp, int satp)
9029 {
9030 if (satp)
9031 {
9032 if (size == SHORT_ACCUM_TYPE_SIZE)
9033 return unsignedp ? sat_unsigned_short_accum_type_node
9034 : sat_short_accum_type_node;
9035 if (size == ACCUM_TYPE_SIZE)
9036 return unsignedp ? sat_unsigned_accum_type_node : sat_accum_type_node;
9037 if (size == LONG_ACCUM_TYPE_SIZE)
9038 return unsignedp ? sat_unsigned_long_accum_type_node
9039 : sat_long_accum_type_node;
9040 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9041 return unsignedp ? sat_unsigned_long_long_accum_type_node
9042 : sat_long_long_accum_type_node;
9043 }
9044 else
9045 {
9046 if (size == SHORT_ACCUM_TYPE_SIZE)
9047 return unsignedp ? unsigned_short_accum_type_node
9048 : short_accum_type_node;
9049 if (size == ACCUM_TYPE_SIZE)
9050 return unsignedp ? unsigned_accum_type_node : accum_type_node;
9051 if (size == LONG_ACCUM_TYPE_SIZE)
9052 return unsignedp ? unsigned_long_accum_type_node
9053 : long_accum_type_node;
9054 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9055 return unsignedp ? unsigned_long_long_accum_type_node
9056 : long_long_accum_type_node;
9057 }
9058
9059 return make_accum_type (size, unsignedp, satp);
9060 }
9061
9062
9063 /* Create an atomic variant node for TYPE. This routine is called
9064 during initialization of data types to create the 5 basic atomic
9065 types. The generic build_variant_type function requires these to
9066 already be set up in order to function properly, so cannot be
9067 called from there. If ALIGN is non-zero, then ensure alignment is
9068 overridden to this value. */
9069
9070 static tree
9071 build_atomic_base (tree type, unsigned int align)
9072 {
9073 tree t;
9074
9075 /* Make sure its not already registered. */
9076 if ((t = get_qualified_type (type, TYPE_QUAL_ATOMIC)))
9077 return t;
9078
9079 t = build_variant_type_copy (type);
9080 set_type_quals (t, TYPE_QUAL_ATOMIC);
9081
9082 if (align)
9083 SET_TYPE_ALIGN (t, align);
9084
9085 return t;
9086 }
9087
9088 /* Information about the _FloatN and _FloatNx types. This must be in
9089 the same order as the corresponding TI_* enum values. */
9090 const floatn_type_info floatn_nx_types[NUM_FLOATN_NX_TYPES] =
9091 {
9092 { 16, false },
9093 { 32, false },
9094 { 64, false },
9095 { 128, false },
9096 { 32, true },
9097 { 64, true },
9098 { 128, true },
9099 };
9100
9101
9102 /* Create nodes for all integer types (and error_mark_node) using the sizes
9103 of C datatypes. SIGNED_CHAR specifies whether char is signed. */
9104
9105 void
9106 build_common_tree_nodes (bool signed_char)
9107 {
9108 int i;
9109
9110 error_mark_node = make_node (ERROR_MARK);
9111 TREE_TYPE (error_mark_node) = error_mark_node;
9112
9113 initialize_sizetypes ();
9114
9115 /* Define both `signed char' and `unsigned char'. */
9116 signed_char_type_node = make_signed_type (CHAR_TYPE_SIZE);
9117 TYPE_STRING_FLAG (signed_char_type_node) = 1;
9118 unsigned_char_type_node = make_unsigned_type (CHAR_TYPE_SIZE);
9119 TYPE_STRING_FLAG (unsigned_char_type_node) = 1;
9120
9121 /* Define `char', which is like either `signed char' or `unsigned char'
9122 but not the same as either. */
9123 char_type_node
9124 = (signed_char
9125 ? make_signed_type (CHAR_TYPE_SIZE)
9126 : make_unsigned_type (CHAR_TYPE_SIZE));
9127 TYPE_STRING_FLAG (char_type_node) = 1;
9128
9129 short_integer_type_node = make_signed_type (SHORT_TYPE_SIZE);
9130 short_unsigned_type_node = make_unsigned_type (SHORT_TYPE_SIZE);
9131 integer_type_node = make_signed_type (INT_TYPE_SIZE);
9132 unsigned_type_node = make_unsigned_type (INT_TYPE_SIZE);
9133 long_integer_type_node = make_signed_type (LONG_TYPE_SIZE);
9134 long_unsigned_type_node = make_unsigned_type (LONG_TYPE_SIZE);
9135 long_long_integer_type_node = make_signed_type (LONG_LONG_TYPE_SIZE);
9136 long_long_unsigned_type_node = make_unsigned_type (LONG_LONG_TYPE_SIZE);
9137
9138 for (i = 0; i < NUM_INT_N_ENTS; i ++)
9139 {
9140 int_n_trees[i].signed_type = make_signed_type (int_n_data[i].bitsize);
9141 int_n_trees[i].unsigned_type = make_unsigned_type (int_n_data[i].bitsize);
9142
9143 if (int_n_enabled_p[i])
9144 {
9145 integer_types[itk_intN_0 + i * 2] = int_n_trees[i].signed_type;
9146 integer_types[itk_unsigned_intN_0 + i * 2] = int_n_trees[i].unsigned_type;
9147 }
9148 }
9149
9150 /* Define a boolean type. This type only represents boolean values but
9151 may be larger than char depending on the value of BOOL_TYPE_SIZE. */
9152 boolean_type_node = make_unsigned_type (BOOL_TYPE_SIZE);
9153 TREE_SET_CODE (boolean_type_node, BOOLEAN_TYPE);
9154 TYPE_PRECISION (boolean_type_node) = 1;
9155 TYPE_MAX_VALUE (boolean_type_node) = build_int_cst (boolean_type_node, 1);
9156
9157 /* Define what type to use for size_t. */
9158 if (strcmp (SIZE_TYPE, "unsigned int") == 0)
9159 size_type_node = unsigned_type_node;
9160 else if (strcmp (SIZE_TYPE, "long unsigned int") == 0)
9161 size_type_node = long_unsigned_type_node;
9162 else if (strcmp (SIZE_TYPE, "long long unsigned int") == 0)
9163 size_type_node = long_long_unsigned_type_node;
9164 else if (strcmp (SIZE_TYPE, "short unsigned int") == 0)
9165 size_type_node = short_unsigned_type_node;
9166 else
9167 {
9168 int i;
9169
9170 size_type_node = NULL_TREE;
9171 for (i = 0; i < NUM_INT_N_ENTS; i++)
9172 if (int_n_enabled_p[i])
9173 {
9174 char name[50], altname[50];
9175 sprintf (name, "__int%d unsigned", int_n_data[i].bitsize);
9176 sprintf (altname, "__int%d__ unsigned", int_n_data[i].bitsize);
9177
9178 if (strcmp (name, SIZE_TYPE) == 0
9179 || strcmp (altname, SIZE_TYPE) == 0)
9180 {
9181 size_type_node = int_n_trees[i].unsigned_type;
9182 }
9183 }
9184 if (size_type_node == NULL_TREE)
9185 gcc_unreachable ();
9186 }
9187
9188 /* Define what type to use for ptrdiff_t. */
9189 if (strcmp (PTRDIFF_TYPE, "int") == 0)
9190 ptrdiff_type_node = integer_type_node;
9191 else if (strcmp (PTRDIFF_TYPE, "long int") == 0)
9192 ptrdiff_type_node = long_integer_type_node;
9193 else if (strcmp (PTRDIFF_TYPE, "long long int") == 0)
9194 ptrdiff_type_node = long_long_integer_type_node;
9195 else if (strcmp (PTRDIFF_TYPE, "short int") == 0)
9196 ptrdiff_type_node = short_integer_type_node;
9197 else
9198 {
9199 ptrdiff_type_node = NULL_TREE;
9200 for (int i = 0; i < NUM_INT_N_ENTS; i++)
9201 if (int_n_enabled_p[i])
9202 {
9203 char name[50], altname[50];
9204 sprintf (name, "__int%d", int_n_data[i].bitsize);
9205 sprintf (altname, "__int%d__", int_n_data[i].bitsize);
9206
9207 if (strcmp (name, PTRDIFF_TYPE) == 0
9208 || strcmp (altname, PTRDIFF_TYPE) == 0)
9209 ptrdiff_type_node = int_n_trees[i].signed_type;
9210 }
9211 if (ptrdiff_type_node == NULL_TREE)
9212 gcc_unreachable ();
9213 }
9214
9215 /* Fill in the rest of the sized types. Reuse existing type nodes
9216 when possible. */
9217 intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 0);
9218 intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 0);
9219 intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 0);
9220 intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 0);
9221 intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 0);
9222
9223 unsigned_intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 1);
9224 unsigned_intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 1);
9225 unsigned_intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 1);
9226 unsigned_intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 1);
9227 unsigned_intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 1);
9228
9229 /* Don't call build_qualified type for atomics. That routine does
9230 special processing for atomics, and until they are initialized
9231 it's better not to make that call.
9232
9233 Check to see if there is a target override for atomic types. */
9234
9235 atomicQI_type_node = build_atomic_base (unsigned_intQI_type_node,
9236 targetm.atomic_align_for_mode (QImode));
9237 atomicHI_type_node = build_atomic_base (unsigned_intHI_type_node,
9238 targetm.atomic_align_for_mode (HImode));
9239 atomicSI_type_node = build_atomic_base (unsigned_intSI_type_node,
9240 targetm.atomic_align_for_mode (SImode));
9241 atomicDI_type_node = build_atomic_base (unsigned_intDI_type_node,
9242 targetm.atomic_align_for_mode (DImode));
9243 atomicTI_type_node = build_atomic_base (unsigned_intTI_type_node,
9244 targetm.atomic_align_for_mode (TImode));
9245
9246 access_public_node = get_identifier ("public");
9247 access_protected_node = get_identifier ("protected");
9248 access_private_node = get_identifier ("private");
9249
9250 /* Define these next since types below may used them. */
9251 integer_zero_node = build_int_cst (integer_type_node, 0);
9252 integer_one_node = build_int_cst (integer_type_node, 1);
9253 integer_three_node = build_int_cst (integer_type_node, 3);
9254 integer_minus_one_node = build_int_cst (integer_type_node, -1);
9255
9256 size_zero_node = size_int (0);
9257 size_one_node = size_int (1);
9258 bitsize_zero_node = bitsize_int (0);
9259 bitsize_one_node = bitsize_int (1);
9260 bitsize_unit_node = bitsize_int (BITS_PER_UNIT);
9261
9262 boolean_false_node = TYPE_MIN_VALUE (boolean_type_node);
9263 boolean_true_node = TYPE_MAX_VALUE (boolean_type_node);
9264
9265 void_type_node = make_node (VOID_TYPE);
9266 layout_type (void_type_node);
9267
9268 /* We are not going to have real types in C with less than byte alignment,
9269 so we might as well not have any types that claim to have it. */
9270 SET_TYPE_ALIGN (void_type_node, BITS_PER_UNIT);
9271 TYPE_USER_ALIGN (void_type_node) = 0;
9272
9273 void_node = make_node (VOID_CST);
9274 TREE_TYPE (void_node) = void_type_node;
9275
9276 null_pointer_node = build_int_cst (build_pointer_type (void_type_node), 0);
9277 layout_type (TREE_TYPE (null_pointer_node));
9278
9279 ptr_type_node = build_pointer_type (void_type_node);
9280 const_ptr_type_node
9281 = build_pointer_type (build_type_variant (void_type_node, 1, 0));
9282 for (unsigned i = 0;
9283 i < sizeof (builtin_structptr_types) / sizeof (builtin_structptr_type);
9284 ++i)
9285 builtin_structptr_types[i].node = builtin_structptr_types[i].base;
9286
9287 pointer_sized_int_node = build_nonstandard_integer_type (POINTER_SIZE, 1);
9288
9289 float_type_node = make_node (REAL_TYPE);
9290 TYPE_PRECISION (float_type_node) = FLOAT_TYPE_SIZE;
9291 layout_type (float_type_node);
9292
9293 double_type_node = make_node (REAL_TYPE);
9294 TYPE_PRECISION (double_type_node) = DOUBLE_TYPE_SIZE;
9295 layout_type (double_type_node);
9296
9297 long_double_type_node = make_node (REAL_TYPE);
9298 TYPE_PRECISION (long_double_type_node) = LONG_DOUBLE_TYPE_SIZE;
9299 layout_type (long_double_type_node);
9300
9301 for (i = 0; i < NUM_FLOATN_NX_TYPES; i++)
9302 {
9303 int n = floatn_nx_types[i].n;
9304 bool extended = floatn_nx_types[i].extended;
9305 scalar_float_mode mode;
9306 if (!targetm.floatn_mode (n, extended).exists (&mode))
9307 continue;
9308 int precision = GET_MODE_PRECISION (mode);
9309 /* Work around the rs6000 KFmode having precision 113 not
9310 128. */
9311 const struct real_format *fmt = REAL_MODE_FORMAT (mode);
9312 gcc_assert (fmt->b == 2 && fmt->emin + fmt->emax == 3);
9313 int min_precision = fmt->p + ceil_log2 (fmt->emax - fmt->emin);
9314 if (!extended)
9315 gcc_assert (min_precision == n);
9316 if (precision < min_precision)
9317 precision = min_precision;
9318 FLOATN_NX_TYPE_NODE (i) = make_node (REAL_TYPE);
9319 TYPE_PRECISION (FLOATN_NX_TYPE_NODE (i)) = precision;
9320 layout_type (FLOATN_NX_TYPE_NODE (i));
9321 SET_TYPE_MODE (FLOATN_NX_TYPE_NODE (i), mode);
9322 }
9323
9324 float_ptr_type_node = build_pointer_type (float_type_node);
9325 double_ptr_type_node = build_pointer_type (double_type_node);
9326 long_double_ptr_type_node = build_pointer_type (long_double_type_node);
9327 integer_ptr_type_node = build_pointer_type (integer_type_node);
9328
9329 /* Fixed size integer types. */
9330 uint16_type_node = make_or_reuse_type (16, 1);
9331 uint32_type_node = make_or_reuse_type (32, 1);
9332 uint64_type_node = make_or_reuse_type (64, 1);
9333 if (targetm.scalar_mode_supported_p (TImode))
9334 uint128_type_node = make_or_reuse_type (128, 1);
9335
9336 /* Decimal float types. */
9337 if (targetm.decimal_float_supported_p ())
9338 {
9339 dfloat32_type_node = make_node (REAL_TYPE);
9340 TYPE_PRECISION (dfloat32_type_node) = DECIMAL32_TYPE_SIZE;
9341 SET_TYPE_MODE (dfloat32_type_node, SDmode);
9342 layout_type (dfloat32_type_node);
9343
9344 dfloat64_type_node = make_node (REAL_TYPE);
9345 TYPE_PRECISION (dfloat64_type_node) = DECIMAL64_TYPE_SIZE;
9346 SET_TYPE_MODE (dfloat64_type_node, DDmode);
9347 layout_type (dfloat64_type_node);
9348
9349 dfloat128_type_node = make_node (REAL_TYPE);
9350 TYPE_PRECISION (dfloat128_type_node) = DECIMAL128_TYPE_SIZE;
9351 SET_TYPE_MODE (dfloat128_type_node, TDmode);
9352 layout_type (dfloat128_type_node);
9353 }
9354
9355 complex_integer_type_node = build_complex_type (integer_type_node, true);
9356 complex_float_type_node = build_complex_type (float_type_node, true);
9357 complex_double_type_node = build_complex_type (double_type_node, true);
9358 complex_long_double_type_node = build_complex_type (long_double_type_node,
9359 true);
9360
9361 for (i = 0; i < NUM_FLOATN_NX_TYPES; i++)
9362 {
9363 if (FLOATN_NX_TYPE_NODE (i) != NULL_TREE)
9364 COMPLEX_FLOATN_NX_TYPE_NODE (i)
9365 = build_complex_type (FLOATN_NX_TYPE_NODE (i));
9366 }
9367
9368 /* Make fixed-point nodes based on sat/non-sat and signed/unsigned. */
9369 #define MAKE_FIXED_TYPE_NODE(KIND,SIZE) \
9370 sat_ ## KIND ## _type_node = \
9371 make_sat_signed_ ## KIND ## _type (SIZE); \
9372 sat_unsigned_ ## KIND ## _type_node = \
9373 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9374 KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9375 unsigned_ ## KIND ## _type_node = \
9376 make_unsigned_ ## KIND ## _type (SIZE);
9377
9378 #define MAKE_FIXED_TYPE_NODE_WIDTH(KIND,WIDTH,SIZE) \
9379 sat_ ## WIDTH ## KIND ## _type_node = \
9380 make_sat_signed_ ## KIND ## _type (SIZE); \
9381 sat_unsigned_ ## WIDTH ## KIND ## _type_node = \
9382 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9383 WIDTH ## KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9384 unsigned_ ## WIDTH ## KIND ## _type_node = \
9385 make_unsigned_ ## KIND ## _type (SIZE);
9386
9387 /* Make fixed-point type nodes based on four different widths. */
9388 #define MAKE_FIXED_TYPE_NODE_FAMILY(N1,N2) \
9389 MAKE_FIXED_TYPE_NODE_WIDTH (N1, short_, SHORT_ ## N2 ## _TYPE_SIZE) \
9390 MAKE_FIXED_TYPE_NODE (N1, N2 ## _TYPE_SIZE) \
9391 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_, LONG_ ## N2 ## _TYPE_SIZE) \
9392 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_long_, LONG_LONG_ ## N2 ## _TYPE_SIZE)
9393
9394 /* Make fixed-point mode nodes based on sat/non-sat and signed/unsigned. */
9395 #define MAKE_FIXED_MODE_NODE(KIND,NAME,MODE) \
9396 NAME ## _type_node = \
9397 make_or_reuse_signed_ ## KIND ## _type (GET_MODE_BITSIZE (MODE ## mode)); \
9398 u ## NAME ## _type_node = \
9399 make_or_reuse_unsigned_ ## KIND ## _type \
9400 (GET_MODE_BITSIZE (U ## MODE ## mode)); \
9401 sat_ ## NAME ## _type_node = \
9402 make_or_reuse_sat_signed_ ## KIND ## _type \
9403 (GET_MODE_BITSIZE (MODE ## mode)); \
9404 sat_u ## NAME ## _type_node = \
9405 make_or_reuse_sat_unsigned_ ## KIND ## _type \
9406 (GET_MODE_BITSIZE (U ## MODE ## mode));
9407
9408 /* Fixed-point type and mode nodes. */
9409 MAKE_FIXED_TYPE_NODE_FAMILY (fract, FRACT)
9410 MAKE_FIXED_TYPE_NODE_FAMILY (accum, ACCUM)
9411 MAKE_FIXED_MODE_NODE (fract, qq, QQ)
9412 MAKE_FIXED_MODE_NODE (fract, hq, HQ)
9413 MAKE_FIXED_MODE_NODE (fract, sq, SQ)
9414 MAKE_FIXED_MODE_NODE (fract, dq, DQ)
9415 MAKE_FIXED_MODE_NODE (fract, tq, TQ)
9416 MAKE_FIXED_MODE_NODE (accum, ha, HA)
9417 MAKE_FIXED_MODE_NODE (accum, sa, SA)
9418 MAKE_FIXED_MODE_NODE (accum, da, DA)
9419 MAKE_FIXED_MODE_NODE (accum, ta, TA)
9420
9421 {
9422 tree t = targetm.build_builtin_va_list ();
9423
9424 /* Many back-ends define record types without setting TYPE_NAME.
9425 If we copied the record type here, we'd keep the original
9426 record type without a name. This breaks name mangling. So,
9427 don't copy record types and let c_common_nodes_and_builtins()
9428 declare the type to be __builtin_va_list. */
9429 if (TREE_CODE (t) != RECORD_TYPE)
9430 t = build_variant_type_copy (t);
9431
9432 va_list_type_node = t;
9433 }
9434
9435 /* SCEV analyzer global shared trees. */
9436 chrec_dont_know = make_node (SCEV_NOT_KNOWN);
9437 TREE_TYPE (chrec_dont_know) = void_type_node;
9438 chrec_known = make_node (SCEV_KNOWN);
9439 TREE_TYPE (chrec_known) = void_type_node;
9440 }
9441
9442 /* Modify DECL for given flags.
9443 TM_PURE attribute is set only on types, so the function will modify
9444 DECL's type when ECF_TM_PURE is used. */
9445
9446 void
9447 set_call_expr_flags (tree decl, int flags)
9448 {
9449 if (flags & ECF_NOTHROW)
9450 TREE_NOTHROW (decl) = 1;
9451 if (flags & ECF_CONST)
9452 TREE_READONLY (decl) = 1;
9453 if (flags & ECF_PURE)
9454 DECL_PURE_P (decl) = 1;
9455 if (flags & ECF_LOOPING_CONST_OR_PURE)
9456 DECL_LOOPING_CONST_OR_PURE_P (decl) = 1;
9457 if (flags & ECF_NOVOPS)
9458 DECL_IS_NOVOPS (decl) = 1;
9459 if (flags & ECF_NORETURN)
9460 TREE_THIS_VOLATILE (decl) = 1;
9461 if (flags & ECF_MALLOC)
9462 DECL_IS_MALLOC (decl) = 1;
9463 if (flags & ECF_RETURNS_TWICE)
9464 DECL_IS_RETURNS_TWICE (decl) = 1;
9465 if (flags & ECF_LEAF)
9466 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("leaf"),
9467 NULL, DECL_ATTRIBUTES (decl));
9468 if (flags & ECF_COLD)
9469 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("cold"),
9470 NULL, DECL_ATTRIBUTES (decl));
9471 if (flags & ECF_RET1)
9472 DECL_ATTRIBUTES (decl)
9473 = tree_cons (get_identifier ("fn spec"),
9474 build_tree_list (NULL_TREE, build_string (2, "1 ")),
9475 DECL_ATTRIBUTES (decl));
9476 if ((flags & ECF_TM_PURE) && flag_tm)
9477 apply_tm_attr (decl, get_identifier ("transaction_pure"));
9478 /* Looping const or pure is implied by noreturn.
9479 There is currently no way to declare looping const or looping pure alone. */
9480 gcc_assert (!(flags & ECF_LOOPING_CONST_OR_PURE)
9481 || ((flags & ECF_NORETURN) && (flags & (ECF_CONST | ECF_PURE))));
9482 }
9483
9484
9485 /* A subroutine of build_common_builtin_nodes. Define a builtin function. */
9486
9487 static void
9488 local_define_builtin (const char *name, tree type, enum built_in_function code,
9489 const char *library_name, int ecf_flags)
9490 {
9491 tree decl;
9492
9493 decl = add_builtin_function (name, type, code, BUILT_IN_NORMAL,
9494 library_name, NULL_TREE);
9495 set_call_expr_flags (decl, ecf_flags);
9496
9497 set_builtin_decl (code, decl, true);
9498 }
9499
9500 /* Call this function after instantiating all builtins that the language
9501 front end cares about. This will build the rest of the builtins
9502 and internal functions that are relied upon by the tree optimizers and
9503 the middle-end. */
9504
9505 void
9506 build_common_builtin_nodes (void)
9507 {
9508 tree tmp, ftype;
9509 int ecf_flags;
9510
9511 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE)
9512 || !builtin_decl_explicit_p (BUILT_IN_ABORT))
9513 {
9514 ftype = build_function_type (void_type_node, void_list_node);
9515 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE))
9516 local_define_builtin ("__builtin_unreachable", ftype,
9517 BUILT_IN_UNREACHABLE,
9518 "__builtin_unreachable",
9519 ECF_NOTHROW | ECF_LEAF | ECF_NORETURN
9520 | ECF_CONST | ECF_COLD);
9521 if (!builtin_decl_explicit_p (BUILT_IN_ABORT))
9522 local_define_builtin ("__builtin_abort", ftype, BUILT_IN_ABORT,
9523 "abort",
9524 ECF_LEAF | ECF_NORETURN | ECF_CONST | ECF_COLD);
9525 }
9526
9527 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY)
9528 || !builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
9529 {
9530 ftype = build_function_type_list (ptr_type_node,
9531 ptr_type_node, const_ptr_type_node,
9532 size_type_node, NULL_TREE);
9533
9534 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY))
9535 local_define_builtin ("__builtin_memcpy", ftype, BUILT_IN_MEMCPY,
9536 "memcpy", ECF_NOTHROW | ECF_LEAF);
9537 if (!builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
9538 local_define_builtin ("__builtin_memmove", ftype, BUILT_IN_MEMMOVE,
9539 "memmove", ECF_NOTHROW | ECF_LEAF);
9540 }
9541
9542 if (!builtin_decl_explicit_p (BUILT_IN_MEMCMP))
9543 {
9544 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
9545 const_ptr_type_node, size_type_node,
9546 NULL_TREE);
9547 local_define_builtin ("__builtin_memcmp", ftype, BUILT_IN_MEMCMP,
9548 "memcmp", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9549 }
9550
9551 if (!builtin_decl_explicit_p (BUILT_IN_MEMSET))
9552 {
9553 ftype = build_function_type_list (ptr_type_node,
9554 ptr_type_node, integer_type_node,
9555 size_type_node, NULL_TREE);
9556 local_define_builtin ("__builtin_memset", ftype, BUILT_IN_MEMSET,
9557 "memset", ECF_NOTHROW | ECF_LEAF);
9558 }
9559
9560 /* If we're checking the stack, `alloca' can throw. */
9561 const int alloca_flags
9562 = ECF_MALLOC | ECF_LEAF | (flag_stack_check ? 0 : ECF_NOTHROW);
9563
9564 if (!builtin_decl_explicit_p (BUILT_IN_ALLOCA))
9565 {
9566 ftype = build_function_type_list (ptr_type_node,
9567 size_type_node, NULL_TREE);
9568 local_define_builtin ("__builtin_alloca", ftype, BUILT_IN_ALLOCA,
9569 "alloca", alloca_flags);
9570 }
9571
9572 ftype = build_function_type_list (ptr_type_node, size_type_node,
9573 size_type_node, NULL_TREE);
9574 local_define_builtin ("__builtin_alloca_with_align", ftype,
9575 BUILT_IN_ALLOCA_WITH_ALIGN,
9576 "__builtin_alloca_with_align",
9577 alloca_flags);
9578
9579 ftype = build_function_type_list (ptr_type_node, size_type_node,
9580 size_type_node, size_type_node, NULL_TREE);
9581 local_define_builtin ("__builtin_alloca_with_align_and_max", ftype,
9582 BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX,
9583 "__builtin_alloca_with_align_and_max",
9584 alloca_flags);
9585
9586 ftype = build_function_type_list (void_type_node,
9587 ptr_type_node, ptr_type_node,
9588 ptr_type_node, NULL_TREE);
9589 local_define_builtin ("__builtin_init_trampoline", ftype,
9590 BUILT_IN_INIT_TRAMPOLINE,
9591 "__builtin_init_trampoline", ECF_NOTHROW | ECF_LEAF);
9592 local_define_builtin ("__builtin_init_heap_trampoline", ftype,
9593 BUILT_IN_INIT_HEAP_TRAMPOLINE,
9594 "__builtin_init_heap_trampoline",
9595 ECF_NOTHROW | ECF_LEAF);
9596 local_define_builtin ("__builtin_init_descriptor", ftype,
9597 BUILT_IN_INIT_DESCRIPTOR,
9598 "__builtin_init_descriptor", ECF_NOTHROW | ECF_LEAF);
9599
9600 ftype = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE);
9601 local_define_builtin ("__builtin_adjust_trampoline", ftype,
9602 BUILT_IN_ADJUST_TRAMPOLINE,
9603 "__builtin_adjust_trampoline",
9604 ECF_CONST | ECF_NOTHROW);
9605 local_define_builtin ("__builtin_adjust_descriptor", ftype,
9606 BUILT_IN_ADJUST_DESCRIPTOR,
9607 "__builtin_adjust_descriptor",
9608 ECF_CONST | ECF_NOTHROW);
9609
9610 ftype = build_function_type_list (void_type_node,
9611 ptr_type_node, ptr_type_node, NULL_TREE);
9612 if (!builtin_decl_explicit_p (BUILT_IN_CLEAR_CACHE))
9613 local_define_builtin ("__builtin___clear_cache", ftype,
9614 BUILT_IN_CLEAR_CACHE,
9615 "__clear_cache",
9616 ECF_NOTHROW);
9617
9618 local_define_builtin ("__builtin_nonlocal_goto", ftype,
9619 BUILT_IN_NONLOCAL_GOTO,
9620 "__builtin_nonlocal_goto",
9621 ECF_NORETURN | ECF_NOTHROW);
9622
9623 ftype = build_function_type_list (void_type_node,
9624 ptr_type_node, ptr_type_node, NULL_TREE);
9625 local_define_builtin ("__builtin_setjmp_setup", ftype,
9626 BUILT_IN_SETJMP_SETUP,
9627 "__builtin_setjmp_setup", ECF_NOTHROW);
9628
9629 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
9630 local_define_builtin ("__builtin_setjmp_receiver", ftype,
9631 BUILT_IN_SETJMP_RECEIVER,
9632 "__builtin_setjmp_receiver", ECF_NOTHROW | ECF_LEAF);
9633
9634 ftype = build_function_type_list (ptr_type_node, NULL_TREE);
9635 local_define_builtin ("__builtin_stack_save", ftype, BUILT_IN_STACK_SAVE,
9636 "__builtin_stack_save", ECF_NOTHROW | ECF_LEAF);
9637
9638 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
9639 local_define_builtin ("__builtin_stack_restore", ftype,
9640 BUILT_IN_STACK_RESTORE,
9641 "__builtin_stack_restore", ECF_NOTHROW | ECF_LEAF);
9642
9643 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
9644 const_ptr_type_node, size_type_node,
9645 NULL_TREE);
9646 local_define_builtin ("__builtin_memcmp_eq", ftype, BUILT_IN_MEMCMP_EQ,
9647 "__builtin_memcmp_eq",
9648 ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9649
9650 local_define_builtin ("__builtin_strncmp_eq", ftype, BUILT_IN_STRNCMP_EQ,
9651 "__builtin_strncmp_eq",
9652 ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9653
9654 local_define_builtin ("__builtin_strcmp_eq", ftype, BUILT_IN_STRCMP_EQ,
9655 "__builtin_strcmp_eq",
9656 ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9657
9658 /* If there's a possibility that we might use the ARM EABI, build the
9659 alternate __cxa_end_cleanup node used to resume from C++. */
9660 if (targetm.arm_eabi_unwinder)
9661 {
9662 ftype = build_function_type_list (void_type_node, NULL_TREE);
9663 local_define_builtin ("__builtin_cxa_end_cleanup", ftype,
9664 BUILT_IN_CXA_END_CLEANUP,
9665 "__cxa_end_cleanup", ECF_NORETURN | ECF_LEAF);
9666 }
9667
9668 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
9669 local_define_builtin ("__builtin_unwind_resume", ftype,
9670 BUILT_IN_UNWIND_RESUME,
9671 ((targetm_common.except_unwind_info (&global_options)
9672 == UI_SJLJ)
9673 ? "_Unwind_SjLj_Resume" : "_Unwind_Resume"),
9674 ECF_NORETURN);
9675
9676 if (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS) == NULL_TREE)
9677 {
9678 ftype = build_function_type_list (ptr_type_node, integer_type_node,
9679 NULL_TREE);
9680 local_define_builtin ("__builtin_return_address", ftype,
9681 BUILT_IN_RETURN_ADDRESS,
9682 "__builtin_return_address",
9683 ECF_NOTHROW);
9684 }
9685
9686 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER)
9687 || !builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
9688 {
9689 ftype = build_function_type_list (void_type_node, ptr_type_node,
9690 ptr_type_node, NULL_TREE);
9691 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER))
9692 local_define_builtin ("__cyg_profile_func_enter", ftype,
9693 BUILT_IN_PROFILE_FUNC_ENTER,
9694 "__cyg_profile_func_enter", 0);
9695 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
9696 local_define_builtin ("__cyg_profile_func_exit", ftype,
9697 BUILT_IN_PROFILE_FUNC_EXIT,
9698 "__cyg_profile_func_exit", 0);
9699 }
9700
9701 /* The exception object and filter values from the runtime. The argument
9702 must be zero before exception lowering, i.e. from the front end. After
9703 exception lowering, it will be the region number for the exception
9704 landing pad. These functions are PURE instead of CONST to prevent
9705 them from being hoisted past the exception edge that will initialize
9706 its value in the landing pad. */
9707 ftype = build_function_type_list (ptr_type_node,
9708 integer_type_node, NULL_TREE);
9709 ecf_flags = ECF_PURE | ECF_NOTHROW | ECF_LEAF;
9710 /* Only use TM_PURE if we have TM language support. */
9711 if (builtin_decl_explicit_p (BUILT_IN_TM_LOAD_1))
9712 ecf_flags |= ECF_TM_PURE;
9713 local_define_builtin ("__builtin_eh_pointer", ftype, BUILT_IN_EH_POINTER,
9714 "__builtin_eh_pointer", ecf_flags);
9715
9716 tmp = lang_hooks.types.type_for_mode (targetm.eh_return_filter_mode (), 0);
9717 ftype = build_function_type_list (tmp, integer_type_node, NULL_TREE);
9718 local_define_builtin ("__builtin_eh_filter", ftype, BUILT_IN_EH_FILTER,
9719 "__builtin_eh_filter", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9720
9721 ftype = build_function_type_list (void_type_node,
9722 integer_type_node, integer_type_node,
9723 NULL_TREE);
9724 local_define_builtin ("__builtin_eh_copy_values", ftype,
9725 BUILT_IN_EH_COPY_VALUES,
9726 "__builtin_eh_copy_values", ECF_NOTHROW);
9727
9728 /* Complex multiplication and division. These are handled as builtins
9729 rather than optabs because emit_library_call_value doesn't support
9730 complex. Further, we can do slightly better with folding these
9731 beasties if the real and complex parts of the arguments are separate. */
9732 {
9733 int mode;
9734
9735 for (mode = MIN_MODE_COMPLEX_FLOAT; mode <= MAX_MODE_COMPLEX_FLOAT; ++mode)
9736 {
9737 char mode_name_buf[4], *q;
9738 const char *p;
9739 enum built_in_function mcode, dcode;
9740 tree type, inner_type;
9741 const char *prefix = "__";
9742
9743 if (targetm.libfunc_gnu_prefix)
9744 prefix = "__gnu_";
9745
9746 type = lang_hooks.types.type_for_mode ((machine_mode) mode, 0);
9747 if (type == NULL)
9748 continue;
9749 inner_type = TREE_TYPE (type);
9750
9751 ftype = build_function_type_list (type, inner_type, inner_type,
9752 inner_type, inner_type, NULL_TREE);
9753
9754 mcode = ((enum built_in_function)
9755 (BUILT_IN_COMPLEX_MUL_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
9756 dcode = ((enum built_in_function)
9757 (BUILT_IN_COMPLEX_DIV_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
9758
9759 for (p = GET_MODE_NAME (mode), q = mode_name_buf; *p; p++, q++)
9760 *q = TOLOWER (*p);
9761 *q = '\0';
9762
9763 /* For -ftrapping-math these should throw from a former
9764 -fnon-call-exception stmt. */
9765 built_in_names[mcode] = concat (prefix, "mul", mode_name_buf, "3",
9766 NULL);
9767 local_define_builtin (built_in_names[mcode], ftype, mcode,
9768 built_in_names[mcode],
9769 ECF_CONST | ECF_LEAF);
9770
9771 built_in_names[dcode] = concat (prefix, "div", mode_name_buf, "3",
9772 NULL);
9773 local_define_builtin (built_in_names[dcode], ftype, dcode,
9774 built_in_names[dcode],
9775 ECF_CONST | ECF_LEAF);
9776 }
9777 }
9778
9779 init_internal_fns ();
9780 }
9781
9782 /* HACK. GROSS. This is absolutely disgusting. I wish there was a
9783 better way.
9784
9785 If we requested a pointer to a vector, build up the pointers that
9786 we stripped off while looking for the inner type. Similarly for
9787 return values from functions.
9788
9789 The argument TYPE is the top of the chain, and BOTTOM is the
9790 new type which we will point to. */
9791
9792 tree
9793 reconstruct_complex_type (tree type, tree bottom)
9794 {
9795 tree inner, outer;
9796
9797 if (TREE_CODE (type) == POINTER_TYPE)
9798 {
9799 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
9800 outer = build_pointer_type_for_mode (inner, TYPE_MODE (type),
9801 TYPE_REF_CAN_ALIAS_ALL (type));
9802 }
9803 else if (TREE_CODE (type) == REFERENCE_TYPE)
9804 {
9805 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
9806 outer = build_reference_type_for_mode (inner, TYPE_MODE (type),
9807 TYPE_REF_CAN_ALIAS_ALL (type));
9808 }
9809 else if (TREE_CODE (type) == ARRAY_TYPE)
9810 {
9811 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
9812 outer = build_array_type (inner, TYPE_DOMAIN (type));
9813 }
9814 else if (TREE_CODE (type) == FUNCTION_TYPE)
9815 {
9816 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
9817 outer = build_function_type (inner, TYPE_ARG_TYPES (type));
9818 }
9819 else if (TREE_CODE (type) == METHOD_TYPE)
9820 {
9821 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
9822 /* The build_method_type_directly() routine prepends 'this' to argument list,
9823 so we must compensate by getting rid of it. */
9824 outer
9825 = build_method_type_directly
9826 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (type))),
9827 inner,
9828 TREE_CHAIN (TYPE_ARG_TYPES (type)));
9829 }
9830 else if (TREE_CODE (type) == OFFSET_TYPE)
9831 {
9832 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
9833 outer = build_offset_type (TYPE_OFFSET_BASETYPE (type), inner);
9834 }
9835 else
9836 return bottom;
9837
9838 return build_type_attribute_qual_variant (outer, TYPE_ATTRIBUTES (type),
9839 TYPE_QUALS (type));
9840 }
9841
9842 /* Returns a vector tree node given a mode (integer, vector, or BLKmode) and
9843 the inner type. */
9844 tree
9845 build_vector_type_for_mode (tree innertype, machine_mode mode)
9846 {
9847 poly_int64 nunits;
9848 unsigned int bitsize;
9849
9850 switch (GET_MODE_CLASS (mode))
9851 {
9852 case MODE_VECTOR_BOOL:
9853 case MODE_VECTOR_INT:
9854 case MODE_VECTOR_FLOAT:
9855 case MODE_VECTOR_FRACT:
9856 case MODE_VECTOR_UFRACT:
9857 case MODE_VECTOR_ACCUM:
9858 case MODE_VECTOR_UACCUM:
9859 nunits = GET_MODE_NUNITS (mode);
9860 break;
9861
9862 case MODE_INT:
9863 /* Check that there are no leftover bits. */
9864 bitsize = GET_MODE_BITSIZE (as_a <scalar_int_mode> (mode));
9865 gcc_assert (bitsize % TREE_INT_CST_LOW (TYPE_SIZE (innertype)) == 0);
9866 nunits = bitsize / TREE_INT_CST_LOW (TYPE_SIZE (innertype));
9867 break;
9868
9869 default:
9870 gcc_unreachable ();
9871 }
9872
9873 return make_vector_type (innertype, nunits, mode);
9874 }
9875
9876 /* Similarly, but takes the inner type and number of units, which must be
9877 a power of two. */
9878
9879 tree
9880 build_vector_type (tree innertype, poly_int64 nunits)
9881 {
9882 return make_vector_type (innertype, nunits, VOIDmode);
9883 }
9884
9885 /* Build a truth vector with NUNITS units, giving it mode MASK_MODE. */
9886
9887 tree
9888 build_truth_vector_type_for_mode (poly_uint64 nunits, machine_mode mask_mode)
9889 {
9890 gcc_assert (mask_mode != BLKmode);
9891
9892 unsigned HOST_WIDE_INT esize;
9893 if (VECTOR_MODE_P (mask_mode))
9894 {
9895 poly_uint64 vsize = GET_MODE_BITSIZE (mask_mode);
9896 esize = vector_element_size (vsize, nunits);
9897 }
9898 else
9899 esize = 1;
9900
9901 tree bool_type = build_nonstandard_boolean_type (esize);
9902
9903 return make_vector_type (bool_type, nunits, mask_mode);
9904 }
9905
9906 /* Build a vector type that holds one boolean result for each element of
9907 vector type VECTYPE. The public interface for this operation is
9908 truth_type_for. */
9909
9910 static tree
9911 build_truth_vector_type_for (tree vectype)
9912 {
9913 machine_mode vector_mode = TYPE_MODE (vectype);
9914 poly_uint64 nunits = TYPE_VECTOR_SUBPARTS (vectype);
9915
9916 machine_mode mask_mode;
9917 if (VECTOR_MODE_P (vector_mode)
9918 && targetm.vectorize.get_mask_mode (vector_mode).exists (&mask_mode))
9919 return build_truth_vector_type_for_mode (nunits, mask_mode);
9920
9921 poly_uint64 vsize = tree_to_poly_uint64 (TYPE_SIZE (vectype));
9922 unsigned HOST_WIDE_INT esize = vector_element_size (vsize, nunits);
9923 tree bool_type = build_nonstandard_boolean_type (esize);
9924
9925 return make_vector_type (bool_type, nunits, VOIDmode);
9926 }
9927
9928 /* Like build_vector_type, but builds a variant type with TYPE_VECTOR_OPAQUE
9929 set. */
9930
9931 tree
9932 build_opaque_vector_type (tree innertype, poly_int64 nunits)
9933 {
9934 tree t = make_vector_type (innertype, nunits, VOIDmode);
9935 tree cand;
9936 /* We always build the non-opaque variant before the opaque one,
9937 so if it already exists, it is TYPE_NEXT_VARIANT of this one. */
9938 cand = TYPE_NEXT_VARIANT (t);
9939 if (cand
9940 && TYPE_VECTOR_OPAQUE (cand)
9941 && check_qualified_type (cand, t, TYPE_QUALS (t)))
9942 return cand;
9943 /* Othewise build a variant type and make sure to queue it after
9944 the non-opaque type. */
9945 cand = build_distinct_type_copy (t);
9946 TYPE_VECTOR_OPAQUE (cand) = true;
9947 TYPE_CANONICAL (cand) = TYPE_CANONICAL (t);
9948 TYPE_NEXT_VARIANT (cand) = TYPE_NEXT_VARIANT (t);
9949 TYPE_NEXT_VARIANT (t) = cand;
9950 TYPE_MAIN_VARIANT (cand) = TYPE_MAIN_VARIANT (t);
9951 return cand;
9952 }
9953
9954 /* Return the value of element I of VECTOR_CST T as a wide_int. */
9955
9956 static poly_wide_int
9957 vector_cst_int_elt (const_tree t, unsigned int i)
9958 {
9959 /* First handle elements that are directly encoded. */
9960 unsigned int encoded_nelts = vector_cst_encoded_nelts (t);
9961 if (i < encoded_nelts)
9962 return wi::to_poly_wide (VECTOR_CST_ENCODED_ELT (t, i));
9963
9964 /* Identify the pattern that contains element I and work out the index of
9965 the last encoded element for that pattern. */
9966 unsigned int npatterns = VECTOR_CST_NPATTERNS (t);
9967 unsigned int pattern = i % npatterns;
9968 unsigned int count = i / npatterns;
9969 unsigned int final_i = encoded_nelts - npatterns + pattern;
9970
9971 /* If there are no steps, the final encoded value is the right one. */
9972 if (!VECTOR_CST_STEPPED_P (t))
9973 return wi::to_poly_wide (VECTOR_CST_ENCODED_ELT (t, final_i));
9974
9975 /* Otherwise work out the value from the last two encoded elements. */
9976 tree v1 = VECTOR_CST_ENCODED_ELT (t, final_i - npatterns);
9977 tree v2 = VECTOR_CST_ENCODED_ELT (t, final_i);
9978 poly_wide_int diff = wi::to_poly_wide (v2) - wi::to_poly_wide (v1);
9979 return wi::to_poly_wide (v2) + (count - 2) * diff;
9980 }
9981
9982 /* Return the value of element I of VECTOR_CST T. */
9983
9984 tree
9985 vector_cst_elt (const_tree t, unsigned int i)
9986 {
9987 /* First handle elements that are directly encoded. */
9988 unsigned int encoded_nelts = vector_cst_encoded_nelts (t);
9989 if (i < encoded_nelts)
9990 return VECTOR_CST_ENCODED_ELT (t, i);
9991
9992 /* If there are no steps, the final encoded value is the right one. */
9993 if (!VECTOR_CST_STEPPED_P (t))
9994 {
9995 /* Identify the pattern that contains element I and work out the index of
9996 the last encoded element for that pattern. */
9997 unsigned int npatterns = VECTOR_CST_NPATTERNS (t);
9998 unsigned int pattern = i % npatterns;
9999 unsigned int final_i = encoded_nelts - npatterns + pattern;
10000 return VECTOR_CST_ENCODED_ELT (t, final_i);
10001 }
10002
10003 /* Otherwise work out the value from the last two encoded elements. */
10004 return wide_int_to_tree (TREE_TYPE (TREE_TYPE (t)),
10005 vector_cst_int_elt (t, i));
10006 }
10007
10008 /* Given an initializer INIT, return TRUE if INIT is zero or some
10009 aggregate of zeros. Otherwise return FALSE. If NONZERO is not
10010 null, set *NONZERO if and only if INIT is known not to be all
10011 zeros. The combination of return value of false and *NONZERO
10012 false implies that INIT may but need not be all zeros. Other
10013 combinations indicate definitive answers. */
10014
10015 bool
10016 initializer_zerop (const_tree init, bool *nonzero /* = NULL */)
10017 {
10018 bool dummy;
10019 if (!nonzero)
10020 nonzero = &dummy;
10021
10022 /* Conservatively clear NONZERO and set it only if INIT is definitely
10023 not all zero. */
10024 *nonzero = false;
10025
10026 STRIP_NOPS (init);
10027
10028 unsigned HOST_WIDE_INT off = 0;
10029
10030 switch (TREE_CODE (init))
10031 {
10032 case INTEGER_CST:
10033 if (integer_zerop (init))
10034 return true;
10035
10036 *nonzero = true;
10037 return false;
10038
10039 case REAL_CST:
10040 /* ??? Note that this is not correct for C4X float formats. There,
10041 a bit pattern of all zeros is 1.0; 0.0 is encoded with the most
10042 negative exponent. */
10043 if (real_zerop (init)
10044 && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (init)))
10045 return true;
10046
10047 *nonzero = true;
10048 return false;
10049
10050 case FIXED_CST:
10051 if (fixed_zerop (init))
10052 return true;
10053
10054 *nonzero = true;
10055 return false;
10056
10057 case COMPLEX_CST:
10058 if (integer_zerop (init)
10059 || (real_zerop (init)
10060 && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_REALPART (init)))
10061 && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_IMAGPART (init)))))
10062 return true;
10063
10064 *nonzero = true;
10065 return false;
10066
10067 case VECTOR_CST:
10068 if (VECTOR_CST_NPATTERNS (init) == 1
10069 && VECTOR_CST_DUPLICATE_P (init)
10070 && initializer_zerop (VECTOR_CST_ENCODED_ELT (init, 0)))
10071 return true;
10072
10073 *nonzero = true;
10074 return false;
10075
10076 case CONSTRUCTOR:
10077 {
10078 if (TREE_CLOBBER_P (init))
10079 return false;
10080
10081 unsigned HOST_WIDE_INT idx;
10082 tree elt;
10083
10084 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init), idx, elt)
10085 if (!initializer_zerop (elt, nonzero))
10086 return false;
10087
10088 return true;
10089 }
10090
10091 case MEM_REF:
10092 {
10093 tree arg = TREE_OPERAND (init, 0);
10094 if (TREE_CODE (arg) != ADDR_EXPR)
10095 return false;
10096 tree offset = TREE_OPERAND (init, 1);
10097 if (TREE_CODE (offset) != INTEGER_CST
10098 || !tree_fits_uhwi_p (offset))
10099 return false;
10100 off = tree_to_uhwi (offset);
10101 if (INT_MAX < off)
10102 return false;
10103 arg = TREE_OPERAND (arg, 0);
10104 if (TREE_CODE (arg) != STRING_CST)
10105 return false;
10106 init = arg;
10107 }
10108 /* Fall through. */
10109
10110 case STRING_CST:
10111 {
10112 gcc_assert (off <= INT_MAX);
10113
10114 int i = off;
10115 int n = TREE_STRING_LENGTH (init);
10116 if (n <= i)
10117 return false;
10118
10119 /* We need to loop through all elements to handle cases like
10120 "\0" and "\0foobar". */
10121 for (i = 0; i < n; ++i)
10122 if (TREE_STRING_POINTER (init)[i] != '\0')
10123 {
10124 *nonzero = true;
10125 return false;
10126 }
10127
10128 return true;
10129 }
10130
10131 default:
10132 return false;
10133 }
10134 }
10135
10136 /* Return true if EXPR is an initializer expression in which every element
10137 is a constant that is numerically equal to 0 or 1. The elements do not
10138 need to be equal to each other. */
10139
10140 bool
10141 initializer_each_zero_or_onep (const_tree expr)
10142 {
10143 STRIP_ANY_LOCATION_WRAPPER (expr);
10144
10145 switch (TREE_CODE (expr))
10146 {
10147 case INTEGER_CST:
10148 return integer_zerop (expr) || integer_onep (expr);
10149
10150 case REAL_CST:
10151 return real_zerop (expr) || real_onep (expr);
10152
10153 case VECTOR_CST:
10154 {
10155 unsigned HOST_WIDE_INT nelts = vector_cst_encoded_nelts (expr);
10156 if (VECTOR_CST_STEPPED_P (expr)
10157 && !TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr)).is_constant (&nelts))
10158 return false;
10159
10160 for (unsigned int i = 0; i < nelts; ++i)
10161 {
10162 tree elt = vector_cst_elt (expr, i);
10163 if (!initializer_each_zero_or_onep (elt))
10164 return false;
10165 }
10166
10167 return true;
10168 }
10169
10170 default:
10171 return false;
10172 }
10173 }
10174
10175 /* Check if vector VEC consists of all the equal elements and
10176 that the number of elements corresponds to the type of VEC.
10177 The function returns first element of the vector
10178 or NULL_TREE if the vector is not uniform. */
10179 tree
10180 uniform_vector_p (const_tree vec)
10181 {
10182 tree first, t;
10183 unsigned HOST_WIDE_INT i, nelts;
10184
10185 if (vec == NULL_TREE)
10186 return NULL_TREE;
10187
10188 gcc_assert (VECTOR_TYPE_P (TREE_TYPE (vec)));
10189
10190 if (TREE_CODE (vec) == VEC_DUPLICATE_EXPR)
10191 return TREE_OPERAND (vec, 0);
10192
10193 else if (TREE_CODE (vec) == VECTOR_CST)
10194 {
10195 if (VECTOR_CST_NPATTERNS (vec) == 1 && VECTOR_CST_DUPLICATE_P (vec))
10196 return VECTOR_CST_ENCODED_ELT (vec, 0);
10197 return NULL_TREE;
10198 }
10199
10200 else if (TREE_CODE (vec) == CONSTRUCTOR
10201 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (vec)).is_constant (&nelts))
10202 {
10203 first = error_mark_node;
10204
10205 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (vec), i, t)
10206 {
10207 if (i == 0)
10208 {
10209 first = t;
10210 continue;
10211 }
10212 if (!operand_equal_p (first, t, 0))
10213 return NULL_TREE;
10214 }
10215 if (i != nelts)
10216 return NULL_TREE;
10217
10218 return first;
10219 }
10220
10221 return NULL_TREE;
10222 }
10223
10224 /* If the argument is INTEGER_CST, return it. If the argument is vector
10225 with all elements the same INTEGER_CST, return that INTEGER_CST. Otherwise
10226 return NULL_TREE.
10227 Look through location wrappers. */
10228
10229 tree
10230 uniform_integer_cst_p (tree t)
10231 {
10232 STRIP_ANY_LOCATION_WRAPPER (t);
10233
10234 if (TREE_CODE (t) == INTEGER_CST)
10235 return t;
10236
10237 if (VECTOR_TYPE_P (TREE_TYPE (t)))
10238 {
10239 t = uniform_vector_p (t);
10240 if (t && TREE_CODE (t) == INTEGER_CST)
10241 return t;
10242 }
10243
10244 return NULL_TREE;
10245 }
10246
10247 /* If VECTOR_CST T has a single nonzero element, return the index of that
10248 element, otherwise return -1. */
10249
10250 int
10251 single_nonzero_element (const_tree t)
10252 {
10253 unsigned HOST_WIDE_INT nelts;
10254 unsigned int repeat_nelts;
10255 if (VECTOR_CST_NELTS (t).is_constant (&nelts))
10256 repeat_nelts = nelts;
10257 else if (VECTOR_CST_NELTS_PER_PATTERN (t) == 2)
10258 {
10259 nelts = vector_cst_encoded_nelts (t);
10260 repeat_nelts = VECTOR_CST_NPATTERNS (t);
10261 }
10262 else
10263 return -1;
10264
10265 int res = -1;
10266 for (unsigned int i = 0; i < nelts; ++i)
10267 {
10268 tree elt = vector_cst_elt (t, i);
10269 if (!integer_zerop (elt) && !real_zerop (elt))
10270 {
10271 if (res >= 0 || i >= repeat_nelts)
10272 return -1;
10273 res = i;
10274 }
10275 }
10276 return res;
10277 }
10278
10279 /* Build an empty statement at location LOC. */
10280
10281 tree
10282 build_empty_stmt (location_t loc)
10283 {
10284 tree t = build1 (NOP_EXPR, void_type_node, size_zero_node);
10285 SET_EXPR_LOCATION (t, loc);
10286 return t;
10287 }
10288
10289
10290 /* Build an OpenMP clause with code CODE. LOC is the location of the
10291 clause. */
10292
10293 tree
10294 build_omp_clause (location_t loc, enum omp_clause_code code)
10295 {
10296 tree t;
10297 int size, length;
10298
10299 length = omp_clause_num_ops[code];
10300 size = (sizeof (struct tree_omp_clause) + (length - 1) * sizeof (tree));
10301
10302 record_node_allocation_statistics (OMP_CLAUSE, size);
10303
10304 t = (tree) ggc_internal_alloc (size);
10305 memset (t, 0, size);
10306 TREE_SET_CODE (t, OMP_CLAUSE);
10307 OMP_CLAUSE_SET_CODE (t, code);
10308 OMP_CLAUSE_LOCATION (t) = loc;
10309
10310 return t;
10311 }
10312
10313 /* Build a tcc_vl_exp object with code CODE and room for LEN operands. LEN
10314 includes the implicit operand count in TREE_OPERAND 0, and so must be >= 1.
10315 Except for the CODE and operand count field, other storage for the
10316 object is initialized to zeros. */
10317
10318 tree
10319 build_vl_exp (enum tree_code code, int len MEM_STAT_DECL)
10320 {
10321 tree t;
10322 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_exp);
10323
10324 gcc_assert (TREE_CODE_CLASS (code) == tcc_vl_exp);
10325 gcc_assert (len >= 1);
10326
10327 record_node_allocation_statistics (code, length);
10328
10329 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
10330
10331 TREE_SET_CODE (t, code);
10332
10333 /* Can't use TREE_OPERAND to store the length because if checking is
10334 enabled, it will try to check the length before we store it. :-P */
10335 t->exp.operands[0] = build_int_cst (sizetype, len);
10336
10337 return t;
10338 }
10339
10340 /* Helper function for build_call_* functions; build a CALL_EXPR with
10341 indicated RETURN_TYPE, FN, and NARGS, but do not initialize any of
10342 the argument slots. */
10343
10344 static tree
10345 build_call_1 (tree return_type, tree fn, int nargs)
10346 {
10347 tree t;
10348
10349 t = build_vl_exp (CALL_EXPR, nargs + 3);
10350 TREE_TYPE (t) = return_type;
10351 CALL_EXPR_FN (t) = fn;
10352 CALL_EXPR_STATIC_CHAIN (t) = NULL;
10353
10354 return t;
10355 }
10356
10357 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10358 FN and a null static chain slot. NARGS is the number of call arguments
10359 which are specified as "..." arguments. */
10360
10361 tree
10362 build_call_nary (tree return_type, tree fn, int nargs, ...)
10363 {
10364 tree ret;
10365 va_list args;
10366 va_start (args, nargs);
10367 ret = build_call_valist (return_type, fn, nargs, args);
10368 va_end (args);
10369 return ret;
10370 }
10371
10372 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10373 FN and a null static chain slot. NARGS is the number of call arguments
10374 which are specified as a va_list ARGS. */
10375
10376 tree
10377 build_call_valist (tree return_type, tree fn, int nargs, va_list args)
10378 {
10379 tree t;
10380 int i;
10381
10382 t = build_call_1 (return_type, fn, nargs);
10383 for (i = 0; i < nargs; i++)
10384 CALL_EXPR_ARG (t, i) = va_arg (args, tree);
10385 process_call_operands (t);
10386 return t;
10387 }
10388
10389 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10390 FN and a null static chain slot. NARGS is the number of call arguments
10391 which are specified as a tree array ARGS. */
10392
10393 tree
10394 build_call_array_loc (location_t loc, tree return_type, tree fn,
10395 int nargs, const tree *args)
10396 {
10397 tree t;
10398 int i;
10399
10400 t = build_call_1 (return_type, fn, nargs);
10401 for (i = 0; i < nargs; i++)
10402 CALL_EXPR_ARG (t, i) = args[i];
10403 process_call_operands (t);
10404 SET_EXPR_LOCATION (t, loc);
10405 return t;
10406 }
10407
10408 /* Like build_call_array, but takes a vec. */
10409
10410 tree
10411 build_call_vec (tree return_type, tree fn, vec<tree, va_gc> *args)
10412 {
10413 tree ret, t;
10414 unsigned int ix;
10415
10416 ret = build_call_1 (return_type, fn, vec_safe_length (args));
10417 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
10418 CALL_EXPR_ARG (ret, ix) = t;
10419 process_call_operands (ret);
10420 return ret;
10421 }
10422
10423 /* Conveniently construct a function call expression. FNDECL names the
10424 function to be called and N arguments are passed in the array
10425 ARGARRAY. */
10426
10427 tree
10428 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
10429 {
10430 tree fntype = TREE_TYPE (fndecl);
10431 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10432
10433 return fold_build_call_array_loc (loc, TREE_TYPE (fntype), fn, n, argarray);
10434 }
10435
10436 /* Conveniently construct a function call expression. FNDECL names the
10437 function to be called and the arguments are passed in the vector
10438 VEC. */
10439
10440 tree
10441 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec)
10442 {
10443 return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec),
10444 vec_safe_address (vec));
10445 }
10446
10447
10448 /* Conveniently construct a function call expression. FNDECL names the
10449 function to be called, N is the number of arguments, and the "..."
10450 parameters are the argument expressions. */
10451
10452 tree
10453 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
10454 {
10455 va_list ap;
10456 tree *argarray = XALLOCAVEC (tree, n);
10457 int i;
10458
10459 va_start (ap, n);
10460 for (i = 0; i < n; i++)
10461 argarray[i] = va_arg (ap, tree);
10462 va_end (ap);
10463 return build_call_expr_loc_array (loc, fndecl, n, argarray);
10464 }
10465
10466 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
10467 varargs macros aren't supported by all bootstrap compilers. */
10468
10469 tree
10470 build_call_expr (tree fndecl, int n, ...)
10471 {
10472 va_list ap;
10473 tree *argarray = XALLOCAVEC (tree, n);
10474 int i;
10475
10476 va_start (ap, n);
10477 for (i = 0; i < n; i++)
10478 argarray[i] = va_arg (ap, tree);
10479 va_end (ap);
10480 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
10481 }
10482
10483 /* Build an internal call to IFN, with arguments ARGS[0:N-1] and with return
10484 type TYPE. This is just like CALL_EXPR, except its CALL_EXPR_FN is NULL.
10485 It will get gimplified later into an ordinary internal function. */
10486
10487 tree
10488 build_call_expr_internal_loc_array (location_t loc, internal_fn ifn,
10489 tree type, int n, const tree *args)
10490 {
10491 tree t = build_call_1 (type, NULL_TREE, n);
10492 for (int i = 0; i < n; ++i)
10493 CALL_EXPR_ARG (t, i) = args[i];
10494 SET_EXPR_LOCATION (t, loc);
10495 CALL_EXPR_IFN (t) = ifn;
10496 process_call_operands (t);
10497 return t;
10498 }
10499
10500 /* Build internal call expression. This is just like CALL_EXPR, except
10501 its CALL_EXPR_FN is NULL. It will get gimplified later into ordinary
10502 internal function. */
10503
10504 tree
10505 build_call_expr_internal_loc (location_t loc, enum internal_fn ifn,
10506 tree type, int n, ...)
10507 {
10508 va_list ap;
10509 tree *argarray = XALLOCAVEC (tree, n);
10510 int i;
10511
10512 va_start (ap, n);
10513 for (i = 0; i < n; i++)
10514 argarray[i] = va_arg (ap, tree);
10515 va_end (ap);
10516 return build_call_expr_internal_loc_array (loc, ifn, type, n, argarray);
10517 }
10518
10519 /* Return a function call to FN, if the target is guaranteed to support it,
10520 or null otherwise.
10521
10522 N is the number of arguments, passed in the "...", and TYPE is the
10523 type of the return value. */
10524
10525 tree
10526 maybe_build_call_expr_loc (location_t loc, combined_fn fn, tree type,
10527 int n, ...)
10528 {
10529 va_list ap;
10530 tree *argarray = XALLOCAVEC (tree, n);
10531 int i;
10532
10533 va_start (ap, n);
10534 for (i = 0; i < n; i++)
10535 argarray[i] = va_arg (ap, tree);
10536 va_end (ap);
10537 if (internal_fn_p (fn))
10538 {
10539 internal_fn ifn = as_internal_fn (fn);
10540 if (direct_internal_fn_p (ifn))
10541 {
10542 tree_pair types = direct_internal_fn_types (ifn, type, argarray);
10543 if (!direct_internal_fn_supported_p (ifn, types,
10544 OPTIMIZE_FOR_BOTH))
10545 return NULL_TREE;
10546 }
10547 return build_call_expr_internal_loc_array (loc, ifn, type, n, argarray);
10548 }
10549 else
10550 {
10551 tree fndecl = builtin_decl_implicit (as_builtin_fn (fn));
10552 if (!fndecl)
10553 return NULL_TREE;
10554 return build_call_expr_loc_array (loc, fndecl, n, argarray);
10555 }
10556 }
10557
10558 /* Return a function call to the appropriate builtin alloca variant.
10559
10560 SIZE is the size to be allocated. ALIGN, if non-zero, is the requested
10561 alignment of the allocated area. MAX_SIZE, if non-negative, is an upper
10562 bound for SIZE in case it is not a fixed value. */
10563
10564 tree
10565 build_alloca_call_expr (tree size, unsigned int align, HOST_WIDE_INT max_size)
10566 {
10567 if (max_size >= 0)
10568 {
10569 tree t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX);
10570 return
10571 build_call_expr (t, 3, size, size_int (align), size_int (max_size));
10572 }
10573 else if (align > 0)
10574 {
10575 tree t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
10576 return build_call_expr (t, 2, size, size_int (align));
10577 }
10578 else
10579 {
10580 tree t = builtin_decl_explicit (BUILT_IN_ALLOCA);
10581 return build_call_expr (t, 1, size);
10582 }
10583 }
10584
10585 /* Create a new constant string literal of type ELTYPE[SIZE] (or LEN
10586 if SIZE == -1) and return a tree node representing char* pointer to
10587 it as an ADDR_EXPR (ARRAY_REF (ELTYPE, ...)). When STR is nonnull
10588 the STRING_CST value is the LEN bytes at STR (the representation
10589 of the string, which may be wide). Otherwise it's all zeros. */
10590
10591 tree
10592 build_string_literal (unsigned len, const char *str /* = NULL */,
10593 tree eltype /* = char_type_node */,
10594 unsigned HOST_WIDE_INT size /* = -1 */)
10595 {
10596 tree t = build_string (len, str);
10597 /* Set the maximum valid index based on the string length or SIZE. */
10598 unsigned HOST_WIDE_INT maxidx
10599 = (size == HOST_WIDE_INT_M1U ? len : size) - 1;
10600
10601 tree index = build_index_type (size_int (maxidx));
10602 eltype = build_type_variant (eltype, 1, 0);
10603 tree type = build_array_type (eltype, index);
10604 TREE_TYPE (t) = type;
10605 TREE_CONSTANT (t) = 1;
10606 TREE_READONLY (t) = 1;
10607 TREE_STATIC (t) = 1;
10608
10609 type = build_pointer_type (eltype);
10610 t = build1 (ADDR_EXPR, type,
10611 build4 (ARRAY_REF, eltype,
10612 t, integer_zero_node, NULL_TREE, NULL_TREE));
10613 return t;
10614 }
10615
10616
10617
10618 /* Return true if T (assumed to be a DECL) must be assigned a memory
10619 location. */
10620
10621 bool
10622 needs_to_live_in_memory (const_tree t)
10623 {
10624 return (TREE_ADDRESSABLE (t)
10625 || is_global_var (t)
10626 || (TREE_CODE (t) == RESULT_DECL
10627 && !DECL_BY_REFERENCE (t)
10628 && aggregate_value_p (t, current_function_decl)));
10629 }
10630
10631 /* Return value of a constant X and sign-extend it. */
10632
10633 HOST_WIDE_INT
10634 int_cst_value (const_tree x)
10635 {
10636 unsigned bits = TYPE_PRECISION (TREE_TYPE (x));
10637 unsigned HOST_WIDE_INT val = TREE_INT_CST_LOW (x);
10638
10639 /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
10640 gcc_assert (cst_and_fits_in_hwi (x));
10641
10642 if (bits < HOST_BITS_PER_WIDE_INT)
10643 {
10644 bool negative = ((val >> (bits - 1)) & 1) != 0;
10645 if (negative)
10646 val |= HOST_WIDE_INT_M1U << (bits - 1) << 1;
10647 else
10648 val &= ~(HOST_WIDE_INT_M1U << (bits - 1) << 1);
10649 }
10650
10651 return val;
10652 }
10653
10654 /* If TYPE is an integral or pointer type, return an integer type with
10655 the same precision which is unsigned iff UNSIGNEDP is true, or itself
10656 if TYPE is already an integer type of signedness UNSIGNEDP.
10657 If TYPE is a floating-point type, return an integer type with the same
10658 bitsize and with the signedness given by UNSIGNEDP; this is useful
10659 when doing bit-level operations on a floating-point value. */
10660
10661 tree
10662 signed_or_unsigned_type_for (int unsignedp, tree type)
10663 {
10664 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_UNSIGNED (type) == unsignedp)
10665 return type;
10666
10667 if (TREE_CODE (type) == VECTOR_TYPE)
10668 {
10669 tree inner = TREE_TYPE (type);
10670 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
10671 if (!inner2)
10672 return NULL_TREE;
10673 if (inner == inner2)
10674 return type;
10675 return build_vector_type (inner2, TYPE_VECTOR_SUBPARTS (type));
10676 }
10677
10678 if (TREE_CODE (type) == COMPLEX_TYPE)
10679 {
10680 tree inner = TREE_TYPE (type);
10681 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
10682 if (!inner2)
10683 return NULL_TREE;
10684 if (inner == inner2)
10685 return type;
10686 return build_complex_type (inner2);
10687 }
10688
10689 unsigned int bits;
10690 if (INTEGRAL_TYPE_P (type)
10691 || POINTER_TYPE_P (type)
10692 || TREE_CODE (type) == OFFSET_TYPE)
10693 bits = TYPE_PRECISION (type);
10694 else if (TREE_CODE (type) == REAL_TYPE)
10695 bits = GET_MODE_BITSIZE (SCALAR_TYPE_MODE (type));
10696 else
10697 return NULL_TREE;
10698
10699 return build_nonstandard_integer_type (bits, unsignedp);
10700 }
10701
10702 /* If TYPE is an integral or pointer type, return an integer type with
10703 the same precision which is unsigned, or itself if TYPE is already an
10704 unsigned integer type. If TYPE is a floating-point type, return an
10705 unsigned integer type with the same bitsize as TYPE. */
10706
10707 tree
10708 unsigned_type_for (tree type)
10709 {
10710 return signed_or_unsigned_type_for (1, type);
10711 }
10712
10713 /* If TYPE is an integral or pointer type, return an integer type with
10714 the same precision which is signed, or itself if TYPE is already a
10715 signed integer type. If TYPE is a floating-point type, return a
10716 signed integer type with the same bitsize as TYPE. */
10717
10718 tree
10719 signed_type_for (tree type)
10720 {
10721 return signed_or_unsigned_type_for (0, type);
10722 }
10723
10724 /* If TYPE is a vector type, return a signed integer vector type with the
10725 same width and number of subparts. Otherwise return boolean_type_node. */
10726
10727 tree
10728 truth_type_for (tree type)
10729 {
10730 if (TREE_CODE (type) == VECTOR_TYPE)
10731 {
10732 if (VECTOR_BOOLEAN_TYPE_P (type))
10733 return type;
10734 return build_truth_vector_type_for (type);
10735 }
10736 else
10737 return boolean_type_node;
10738 }
10739
10740 /* Returns the largest value obtainable by casting something in INNER type to
10741 OUTER type. */
10742
10743 tree
10744 upper_bound_in_type (tree outer, tree inner)
10745 {
10746 unsigned int det = 0;
10747 unsigned oprec = TYPE_PRECISION (outer);
10748 unsigned iprec = TYPE_PRECISION (inner);
10749 unsigned prec;
10750
10751 /* Compute a unique number for every combination. */
10752 det |= (oprec > iprec) ? 4 : 0;
10753 det |= TYPE_UNSIGNED (outer) ? 2 : 0;
10754 det |= TYPE_UNSIGNED (inner) ? 1 : 0;
10755
10756 /* Determine the exponent to use. */
10757 switch (det)
10758 {
10759 case 0:
10760 case 1:
10761 /* oprec <= iprec, outer: signed, inner: don't care. */
10762 prec = oprec - 1;
10763 break;
10764 case 2:
10765 case 3:
10766 /* oprec <= iprec, outer: unsigned, inner: don't care. */
10767 prec = oprec;
10768 break;
10769 case 4:
10770 /* oprec > iprec, outer: signed, inner: signed. */
10771 prec = iprec - 1;
10772 break;
10773 case 5:
10774 /* oprec > iprec, outer: signed, inner: unsigned. */
10775 prec = iprec;
10776 break;
10777 case 6:
10778 /* oprec > iprec, outer: unsigned, inner: signed. */
10779 prec = oprec;
10780 break;
10781 case 7:
10782 /* oprec > iprec, outer: unsigned, inner: unsigned. */
10783 prec = iprec;
10784 break;
10785 default:
10786 gcc_unreachable ();
10787 }
10788
10789 return wide_int_to_tree (outer,
10790 wi::mask (prec, false, TYPE_PRECISION (outer)));
10791 }
10792
10793 /* Returns the smallest value obtainable by casting something in INNER type to
10794 OUTER type. */
10795
10796 tree
10797 lower_bound_in_type (tree outer, tree inner)
10798 {
10799 unsigned oprec = TYPE_PRECISION (outer);
10800 unsigned iprec = TYPE_PRECISION (inner);
10801
10802 /* If OUTER type is unsigned, we can definitely cast 0 to OUTER type
10803 and obtain 0. */
10804 if (TYPE_UNSIGNED (outer)
10805 /* If we are widening something of an unsigned type, OUTER type
10806 contains all values of INNER type. In particular, both INNER
10807 and OUTER types have zero in common. */
10808 || (oprec > iprec && TYPE_UNSIGNED (inner)))
10809 return build_int_cst (outer, 0);
10810 else
10811 {
10812 /* If we are widening a signed type to another signed type, we
10813 want to obtain -2^^(iprec-1). If we are keeping the
10814 precision or narrowing to a signed type, we want to obtain
10815 -2^(oprec-1). */
10816 unsigned prec = oprec > iprec ? iprec : oprec;
10817 return wide_int_to_tree (outer,
10818 wi::mask (prec - 1, true,
10819 TYPE_PRECISION (outer)));
10820 }
10821 }
10822
10823 /* Return nonzero if two operands that are suitable for PHI nodes are
10824 necessarily equal. Specifically, both ARG0 and ARG1 must be either
10825 SSA_NAME or invariant. Note that this is strictly an optimization.
10826 That is, callers of this function can directly call operand_equal_p
10827 and get the same result, only slower. */
10828
10829 int
10830 operand_equal_for_phi_arg_p (const_tree arg0, const_tree arg1)
10831 {
10832 if (arg0 == arg1)
10833 return 1;
10834 if (TREE_CODE (arg0) == SSA_NAME || TREE_CODE (arg1) == SSA_NAME)
10835 return 0;
10836 return operand_equal_p (arg0, arg1, 0);
10837 }
10838
10839 /* Returns number of zeros at the end of binary representation of X. */
10840
10841 tree
10842 num_ending_zeros (const_tree x)
10843 {
10844 return build_int_cst (TREE_TYPE (x), wi::ctz (wi::to_wide (x)));
10845 }
10846
10847
10848 #define WALK_SUBTREE(NODE) \
10849 do \
10850 { \
10851 result = walk_tree_1 (&(NODE), func, data, pset, lh); \
10852 if (result) \
10853 return result; \
10854 } \
10855 while (0)
10856
10857 /* This is a subroutine of walk_tree that walks field of TYPE that are to
10858 be walked whenever a type is seen in the tree. Rest of operands and return
10859 value are as for walk_tree. */
10860
10861 static tree
10862 walk_type_fields (tree type, walk_tree_fn func, void *data,
10863 hash_set<tree> *pset, walk_tree_lh lh)
10864 {
10865 tree result = NULL_TREE;
10866
10867 switch (TREE_CODE (type))
10868 {
10869 case POINTER_TYPE:
10870 case REFERENCE_TYPE:
10871 case VECTOR_TYPE:
10872 /* We have to worry about mutually recursive pointers. These can't
10873 be written in C. They can in Ada. It's pathological, but
10874 there's an ACATS test (c38102a) that checks it. Deal with this
10875 by checking if we're pointing to another pointer, that one
10876 points to another pointer, that one does too, and we have no htab.
10877 If so, get a hash table. We check three levels deep to avoid
10878 the cost of the hash table if we don't need one. */
10879 if (POINTER_TYPE_P (TREE_TYPE (type))
10880 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type)))
10881 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type))))
10882 && !pset)
10883 {
10884 result = walk_tree_without_duplicates (&TREE_TYPE (type),
10885 func, data);
10886 if (result)
10887 return result;
10888
10889 break;
10890 }
10891
10892 /* fall through */
10893
10894 case COMPLEX_TYPE:
10895 WALK_SUBTREE (TREE_TYPE (type));
10896 break;
10897
10898 case METHOD_TYPE:
10899 WALK_SUBTREE (TYPE_METHOD_BASETYPE (type));
10900
10901 /* Fall through. */
10902
10903 case FUNCTION_TYPE:
10904 WALK_SUBTREE (TREE_TYPE (type));
10905 {
10906 tree arg;
10907
10908 /* We never want to walk into default arguments. */
10909 for (arg = TYPE_ARG_TYPES (type); arg; arg = TREE_CHAIN (arg))
10910 WALK_SUBTREE (TREE_VALUE (arg));
10911 }
10912 break;
10913
10914 case ARRAY_TYPE:
10915 /* Don't follow this nodes's type if a pointer for fear that
10916 we'll have infinite recursion. If we have a PSET, then we
10917 need not fear. */
10918 if (pset
10919 || (!POINTER_TYPE_P (TREE_TYPE (type))
10920 && TREE_CODE (TREE_TYPE (type)) != OFFSET_TYPE))
10921 WALK_SUBTREE (TREE_TYPE (type));
10922 WALK_SUBTREE (TYPE_DOMAIN (type));
10923 break;
10924
10925 case OFFSET_TYPE:
10926 WALK_SUBTREE (TREE_TYPE (type));
10927 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type));
10928 break;
10929
10930 default:
10931 break;
10932 }
10933
10934 return NULL_TREE;
10935 }
10936
10937 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is
10938 called with the DATA and the address of each sub-tree. If FUNC returns a
10939 non-NULL value, the traversal is stopped, and the value returned by FUNC
10940 is returned. If PSET is non-NULL it is used to record the nodes visited,
10941 and to avoid visiting a node more than once. */
10942
10943 tree
10944 walk_tree_1 (tree *tp, walk_tree_fn func, void *data,
10945 hash_set<tree> *pset, walk_tree_lh lh)
10946 {
10947 enum tree_code code;
10948 int walk_subtrees;
10949 tree result;
10950
10951 #define WALK_SUBTREE_TAIL(NODE) \
10952 do \
10953 { \
10954 tp = & (NODE); \
10955 goto tail_recurse; \
10956 } \
10957 while (0)
10958
10959 tail_recurse:
10960 /* Skip empty subtrees. */
10961 if (!*tp)
10962 return NULL_TREE;
10963
10964 /* Don't walk the same tree twice, if the user has requested
10965 that we avoid doing so. */
10966 if (pset && pset->add (*tp))
10967 return NULL_TREE;
10968
10969 /* Call the function. */
10970 walk_subtrees = 1;
10971 result = (*func) (tp, &walk_subtrees, data);
10972
10973 /* If we found something, return it. */
10974 if (result)
10975 return result;
10976
10977 code = TREE_CODE (*tp);
10978
10979 /* Even if we didn't, FUNC may have decided that there was nothing
10980 interesting below this point in the tree. */
10981 if (!walk_subtrees)
10982 {
10983 /* But we still need to check our siblings. */
10984 if (code == TREE_LIST)
10985 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
10986 else if (code == OMP_CLAUSE)
10987 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
10988 else
10989 return NULL_TREE;
10990 }
10991
10992 if (lh)
10993 {
10994 result = (*lh) (tp, &walk_subtrees, func, data, pset);
10995 if (result || !walk_subtrees)
10996 return result;
10997 }
10998
10999 switch (code)
11000 {
11001 case ERROR_MARK:
11002 case IDENTIFIER_NODE:
11003 case INTEGER_CST:
11004 case REAL_CST:
11005 case FIXED_CST:
11006 case STRING_CST:
11007 case BLOCK:
11008 case PLACEHOLDER_EXPR:
11009 case SSA_NAME:
11010 case FIELD_DECL:
11011 case RESULT_DECL:
11012 /* None of these have subtrees other than those already walked
11013 above. */
11014 break;
11015
11016 case TREE_LIST:
11017 WALK_SUBTREE (TREE_VALUE (*tp));
11018 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11019 break;
11020
11021 case TREE_VEC:
11022 {
11023 int len = TREE_VEC_LENGTH (*tp);
11024
11025 if (len == 0)
11026 break;
11027
11028 /* Walk all elements but the first. */
11029 while (--len)
11030 WALK_SUBTREE (TREE_VEC_ELT (*tp, len));
11031
11032 /* Now walk the first one as a tail call. */
11033 WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp, 0));
11034 }
11035
11036 case VECTOR_CST:
11037 {
11038 unsigned len = vector_cst_encoded_nelts (*tp);
11039 if (len == 0)
11040 break;
11041 /* Walk all elements but the first. */
11042 while (--len)
11043 WALK_SUBTREE (VECTOR_CST_ENCODED_ELT (*tp, len));
11044 /* Now walk the first one as a tail call. */
11045 WALK_SUBTREE_TAIL (VECTOR_CST_ENCODED_ELT (*tp, 0));
11046 }
11047
11048 case COMPLEX_CST:
11049 WALK_SUBTREE (TREE_REALPART (*tp));
11050 WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp));
11051
11052 case CONSTRUCTOR:
11053 {
11054 unsigned HOST_WIDE_INT idx;
11055 constructor_elt *ce;
11056
11057 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (*tp), idx, &ce);
11058 idx++)
11059 WALK_SUBTREE (ce->value);
11060 }
11061 break;
11062
11063 case SAVE_EXPR:
11064 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 0));
11065
11066 case BIND_EXPR:
11067 {
11068 tree decl;
11069 for (decl = BIND_EXPR_VARS (*tp); decl; decl = DECL_CHAIN (decl))
11070 {
11071 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
11072 into declarations that are just mentioned, rather than
11073 declared; they don't really belong to this part of the tree.
11074 And, we can see cycles: the initializer for a declaration
11075 can refer to the declaration itself. */
11076 WALK_SUBTREE (DECL_INITIAL (decl));
11077 WALK_SUBTREE (DECL_SIZE (decl));
11078 WALK_SUBTREE (DECL_SIZE_UNIT (decl));
11079 }
11080 WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp));
11081 }
11082
11083 case STATEMENT_LIST:
11084 {
11085 tree_stmt_iterator i;
11086 for (i = tsi_start (*tp); !tsi_end_p (i); tsi_next (&i))
11087 WALK_SUBTREE (*tsi_stmt_ptr (i));
11088 }
11089 break;
11090
11091 case OMP_CLAUSE:
11092 switch (OMP_CLAUSE_CODE (*tp))
11093 {
11094 case OMP_CLAUSE_GANG:
11095 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
11096 /* FALLTHRU */
11097
11098 case OMP_CLAUSE_AFFINITY:
11099 case OMP_CLAUSE_ASYNC:
11100 case OMP_CLAUSE_WAIT:
11101 case OMP_CLAUSE_WORKER:
11102 case OMP_CLAUSE_VECTOR:
11103 case OMP_CLAUSE_NUM_GANGS:
11104 case OMP_CLAUSE_NUM_WORKERS:
11105 case OMP_CLAUSE_VECTOR_LENGTH:
11106 case OMP_CLAUSE_PRIVATE:
11107 case OMP_CLAUSE_SHARED:
11108 case OMP_CLAUSE_FIRSTPRIVATE:
11109 case OMP_CLAUSE_COPYIN:
11110 case OMP_CLAUSE_COPYPRIVATE:
11111 case OMP_CLAUSE_FINAL:
11112 case OMP_CLAUSE_IF:
11113 case OMP_CLAUSE_NUM_THREADS:
11114 case OMP_CLAUSE_SCHEDULE:
11115 case OMP_CLAUSE_UNIFORM:
11116 case OMP_CLAUSE_DEPEND:
11117 case OMP_CLAUSE_NONTEMPORAL:
11118 case OMP_CLAUSE_NUM_TEAMS:
11119 case OMP_CLAUSE_THREAD_LIMIT:
11120 case OMP_CLAUSE_DEVICE:
11121 case OMP_CLAUSE_DIST_SCHEDULE:
11122 case OMP_CLAUSE_SAFELEN:
11123 case OMP_CLAUSE_SIMDLEN:
11124 case OMP_CLAUSE_ORDERED:
11125 case OMP_CLAUSE_PRIORITY:
11126 case OMP_CLAUSE_GRAINSIZE:
11127 case OMP_CLAUSE_NUM_TASKS:
11128 case OMP_CLAUSE_HINT:
11129 case OMP_CLAUSE_TO_DECLARE:
11130 case OMP_CLAUSE_LINK:
11131 case OMP_CLAUSE_DETACH:
11132 case OMP_CLAUSE_USE_DEVICE_PTR:
11133 case OMP_CLAUSE_USE_DEVICE_ADDR:
11134 case OMP_CLAUSE_IS_DEVICE_PTR:
11135 case OMP_CLAUSE_INCLUSIVE:
11136 case OMP_CLAUSE_EXCLUSIVE:
11137 case OMP_CLAUSE__LOOPTEMP_:
11138 case OMP_CLAUSE__REDUCTEMP_:
11139 case OMP_CLAUSE__CONDTEMP_:
11140 case OMP_CLAUSE__SCANTEMP_:
11141 case OMP_CLAUSE__SIMDUID_:
11142 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 0));
11143 /* FALLTHRU */
11144
11145 case OMP_CLAUSE_INDEPENDENT:
11146 case OMP_CLAUSE_NOWAIT:
11147 case OMP_CLAUSE_DEFAULT:
11148 case OMP_CLAUSE_UNTIED:
11149 case OMP_CLAUSE_MERGEABLE:
11150 case OMP_CLAUSE_PROC_BIND:
11151 case OMP_CLAUSE_DEVICE_TYPE:
11152 case OMP_CLAUSE_INBRANCH:
11153 case OMP_CLAUSE_NOTINBRANCH:
11154 case OMP_CLAUSE_FOR:
11155 case OMP_CLAUSE_PARALLEL:
11156 case OMP_CLAUSE_SECTIONS:
11157 case OMP_CLAUSE_TASKGROUP:
11158 case OMP_CLAUSE_NOGROUP:
11159 case OMP_CLAUSE_THREADS:
11160 case OMP_CLAUSE_SIMD:
11161 case OMP_CLAUSE_DEFAULTMAP:
11162 case OMP_CLAUSE_ORDER:
11163 case OMP_CLAUSE_BIND:
11164 case OMP_CLAUSE_AUTO:
11165 case OMP_CLAUSE_SEQ:
11166 case OMP_CLAUSE_TILE:
11167 case OMP_CLAUSE__SIMT_:
11168 case OMP_CLAUSE_IF_PRESENT:
11169 case OMP_CLAUSE_FINALIZE:
11170 case OMP_CLAUSE_NOHOST:
11171 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11172
11173 case OMP_CLAUSE_LASTPRIVATE:
11174 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11175 WALK_SUBTREE (OMP_CLAUSE_LASTPRIVATE_STMT (*tp));
11176 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11177
11178 case OMP_CLAUSE_COLLAPSE:
11179 {
11180 int i;
11181 for (i = 0; i < 3; i++)
11182 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11183 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11184 }
11185
11186 case OMP_CLAUSE_LINEAR:
11187 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11188 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STEP (*tp));
11189 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STMT (*tp));
11190 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11191
11192 case OMP_CLAUSE_ALIGNED:
11193 case OMP_CLAUSE_ALLOCATE:
11194 case OMP_CLAUSE_FROM:
11195 case OMP_CLAUSE_TO:
11196 case OMP_CLAUSE_MAP:
11197 case OMP_CLAUSE__CACHE_:
11198 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11199 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
11200 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11201
11202 case OMP_CLAUSE_REDUCTION:
11203 case OMP_CLAUSE_TASK_REDUCTION:
11204 case OMP_CLAUSE_IN_REDUCTION:
11205 {
11206 int i;
11207 for (i = 0; i < 5; i++)
11208 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11209 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11210 }
11211
11212 default:
11213 gcc_unreachable ();
11214 }
11215 break;
11216
11217 case TARGET_EXPR:
11218 {
11219 int i, len;
11220
11221 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
11222 But, we only want to walk once. */
11223 len = (TREE_OPERAND (*tp, 3) == TREE_OPERAND (*tp, 1)) ? 2 : 3;
11224 for (i = 0; i < len; ++i)
11225 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11226 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len));
11227 }
11228
11229 case DECL_EXPR:
11230 /* If this is a TYPE_DECL, walk into the fields of the type that it's
11231 defining. We only want to walk into these fields of a type in this
11232 case and not in the general case of a mere reference to the type.
11233
11234 The criterion is as follows: if the field can be an expression, it
11235 must be walked only here. This should be in keeping with the fields
11236 that are directly gimplified in gimplify_type_sizes in order for the
11237 mark/copy-if-shared/unmark machinery of the gimplifier to work with
11238 variable-sized types.
11239
11240 Note that DECLs get walked as part of processing the BIND_EXPR. */
11241 if (TREE_CODE (DECL_EXPR_DECL (*tp)) == TYPE_DECL)
11242 {
11243 /* Call the function for the decl so e.g. copy_tree_body_r can
11244 replace it with the remapped one. */
11245 result = (*func) (&DECL_EXPR_DECL (*tp), &walk_subtrees, data);
11246 if (result || !walk_subtrees)
11247 return result;
11248
11249 tree *type_p = &TREE_TYPE (DECL_EXPR_DECL (*tp));
11250 if (TREE_CODE (*type_p) == ERROR_MARK)
11251 return NULL_TREE;
11252
11253 /* Call the function for the type. See if it returns anything or
11254 doesn't want us to continue. If we are to continue, walk both
11255 the normal fields and those for the declaration case. */
11256 result = (*func) (type_p, &walk_subtrees, data);
11257 if (result || !walk_subtrees)
11258 return result;
11259
11260 /* But do not walk a pointed-to type since it may itself need to
11261 be walked in the declaration case if it isn't anonymous. */
11262 if (!POINTER_TYPE_P (*type_p))
11263 {
11264 result = walk_type_fields (*type_p, func, data, pset, lh);
11265 if (result)
11266 return result;
11267 }
11268
11269 /* If this is a record type, also walk the fields. */
11270 if (RECORD_OR_UNION_TYPE_P (*type_p))
11271 {
11272 tree field;
11273
11274 for (field = TYPE_FIELDS (*type_p); field;
11275 field = DECL_CHAIN (field))
11276 {
11277 /* We'd like to look at the type of the field, but we can
11278 easily get infinite recursion. So assume it's pointed
11279 to elsewhere in the tree. Also, ignore things that
11280 aren't fields. */
11281 if (TREE_CODE (field) != FIELD_DECL)
11282 continue;
11283
11284 WALK_SUBTREE (DECL_FIELD_OFFSET (field));
11285 WALK_SUBTREE (DECL_SIZE (field));
11286 WALK_SUBTREE (DECL_SIZE_UNIT (field));
11287 if (TREE_CODE (*type_p) == QUAL_UNION_TYPE)
11288 WALK_SUBTREE (DECL_QUALIFIER (field));
11289 }
11290 }
11291
11292 /* Same for scalar types. */
11293 else if (TREE_CODE (*type_p) == BOOLEAN_TYPE
11294 || TREE_CODE (*type_p) == ENUMERAL_TYPE
11295 || TREE_CODE (*type_p) == INTEGER_TYPE
11296 || TREE_CODE (*type_p) == FIXED_POINT_TYPE
11297 || TREE_CODE (*type_p) == REAL_TYPE)
11298 {
11299 WALK_SUBTREE (TYPE_MIN_VALUE (*type_p));
11300 WALK_SUBTREE (TYPE_MAX_VALUE (*type_p));
11301 }
11302
11303 WALK_SUBTREE (TYPE_SIZE (*type_p));
11304 WALK_SUBTREE_TAIL (TYPE_SIZE_UNIT (*type_p));
11305 }
11306 /* FALLTHRU */
11307
11308 default:
11309 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
11310 {
11311 int i, len;
11312
11313 /* Walk over all the sub-trees of this operand. */
11314 len = TREE_OPERAND_LENGTH (*tp);
11315
11316 /* Go through the subtrees. We need to do this in forward order so
11317 that the scope of a FOR_EXPR is handled properly. */
11318 if (len)
11319 {
11320 for (i = 0; i < len - 1; ++i)
11321 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11322 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len - 1));
11323 }
11324 }
11325 /* If this is a type, walk the needed fields in the type. */
11326 else if (TYPE_P (*tp))
11327 return walk_type_fields (*tp, func, data, pset, lh);
11328 break;
11329 }
11330
11331 /* We didn't find what we were looking for. */
11332 return NULL_TREE;
11333
11334 #undef WALK_SUBTREE_TAIL
11335 }
11336 #undef WALK_SUBTREE
11337
11338 /* Like walk_tree, but does not walk duplicate nodes more than once. */
11339
11340 tree
11341 walk_tree_without_duplicates_1 (tree *tp, walk_tree_fn func, void *data,
11342 walk_tree_lh lh)
11343 {
11344 tree result;
11345
11346 hash_set<tree> pset;
11347 result = walk_tree_1 (tp, func, data, &pset, lh);
11348 return result;
11349 }
11350
11351
11352 tree
11353 tree_block (tree t)
11354 {
11355 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11356
11357 if (IS_EXPR_CODE_CLASS (c))
11358 return LOCATION_BLOCK (t->exp.locus);
11359 gcc_unreachable ();
11360 return NULL;
11361 }
11362
11363 void
11364 tree_set_block (tree t, tree b)
11365 {
11366 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11367
11368 if (IS_EXPR_CODE_CLASS (c))
11369 {
11370 t->exp.locus = set_block (t->exp.locus, b);
11371 }
11372 else
11373 gcc_unreachable ();
11374 }
11375
11376 /* Create a nameless artificial label and put it in the current
11377 function context. The label has a location of LOC. Returns the
11378 newly created label. */
11379
11380 tree
11381 create_artificial_label (location_t loc)
11382 {
11383 tree lab = build_decl (loc,
11384 LABEL_DECL, NULL_TREE, void_type_node);
11385
11386 DECL_ARTIFICIAL (lab) = 1;
11387 DECL_IGNORED_P (lab) = 1;
11388 DECL_CONTEXT (lab) = current_function_decl;
11389 return lab;
11390 }
11391
11392 /* Given a tree, try to return a useful variable name that we can use
11393 to prefix a temporary that is being assigned the value of the tree.
11394 I.E. given <temp> = &A, return A. */
11395
11396 const char *
11397 get_name (tree t)
11398 {
11399 tree stripped_decl;
11400
11401 stripped_decl = t;
11402 STRIP_NOPS (stripped_decl);
11403 if (DECL_P (stripped_decl) && DECL_NAME (stripped_decl))
11404 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl));
11405 else if (TREE_CODE (stripped_decl) == SSA_NAME)
11406 {
11407 tree name = SSA_NAME_IDENTIFIER (stripped_decl);
11408 if (!name)
11409 return NULL;
11410 return IDENTIFIER_POINTER (name);
11411 }
11412 else
11413 {
11414 switch (TREE_CODE (stripped_decl))
11415 {
11416 case ADDR_EXPR:
11417 return get_name (TREE_OPERAND (stripped_decl, 0));
11418 default:
11419 return NULL;
11420 }
11421 }
11422 }
11423
11424 /* Return true if TYPE has a variable argument list. */
11425
11426 bool
11427 stdarg_p (const_tree fntype)
11428 {
11429 function_args_iterator args_iter;
11430 tree n = NULL_TREE, t;
11431
11432 if (!fntype)
11433 return false;
11434
11435 FOREACH_FUNCTION_ARGS (fntype, t, args_iter)
11436 {
11437 n = t;
11438 }
11439
11440 return n != NULL_TREE && n != void_type_node;
11441 }
11442
11443 /* Return true if TYPE has a prototype. */
11444
11445 bool
11446 prototype_p (const_tree fntype)
11447 {
11448 tree t;
11449
11450 gcc_assert (fntype != NULL_TREE);
11451
11452 t = TYPE_ARG_TYPES (fntype);
11453 return (t != NULL_TREE);
11454 }
11455
11456 /* If BLOCK is inlined from an __attribute__((__artificial__))
11457 routine, return pointer to location from where it has been
11458 called. */
11459 location_t *
11460 block_nonartificial_location (tree block)
11461 {
11462 location_t *ret = NULL;
11463
11464 while (block && TREE_CODE (block) == BLOCK
11465 && BLOCK_ABSTRACT_ORIGIN (block))
11466 {
11467 tree ao = BLOCK_ABSTRACT_ORIGIN (block);
11468 if (TREE_CODE (ao) == FUNCTION_DECL)
11469 {
11470 /* If AO is an artificial inline, point RET to the
11471 call site locus at which it has been inlined and continue
11472 the loop, in case AO's caller is also an artificial
11473 inline. */
11474 if (DECL_DECLARED_INLINE_P (ao)
11475 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao)))
11476 ret = &BLOCK_SOURCE_LOCATION (block);
11477 else
11478 break;
11479 }
11480 else if (TREE_CODE (ao) != BLOCK)
11481 break;
11482
11483 block = BLOCK_SUPERCONTEXT (block);
11484 }
11485 return ret;
11486 }
11487
11488
11489 /* If EXP is inlined from an __attribute__((__artificial__))
11490 function, return the location of the original call expression. */
11491
11492 location_t
11493 tree_nonartificial_location (tree exp)
11494 {
11495 location_t *loc = block_nonartificial_location (TREE_BLOCK (exp));
11496
11497 if (loc)
11498 return *loc;
11499 else
11500 return EXPR_LOCATION (exp);
11501 }
11502
11503 /* Return the location into which EXP has been inlined. Analogous
11504 to tree_nonartificial_location() above but not limited to artificial
11505 functions declared inline. If SYSTEM_HEADER is true, return
11506 the macro expansion point of the location if it's in a system header */
11507
11508 location_t
11509 tree_inlined_location (tree exp, bool system_header /* = true */)
11510 {
11511 location_t loc = UNKNOWN_LOCATION;
11512
11513 tree block = TREE_BLOCK (exp);
11514
11515 while (block && TREE_CODE (block) == BLOCK
11516 && BLOCK_ABSTRACT_ORIGIN (block))
11517 {
11518 tree ao = BLOCK_ABSTRACT_ORIGIN (block);
11519 if (TREE_CODE (ao) == FUNCTION_DECL)
11520 loc = BLOCK_SOURCE_LOCATION (block);
11521 else if (TREE_CODE (ao) != BLOCK)
11522 break;
11523
11524 block = BLOCK_SUPERCONTEXT (block);
11525 }
11526
11527 if (loc == UNKNOWN_LOCATION)
11528 {
11529 loc = EXPR_LOCATION (exp);
11530 if (system_header)
11531 /* Only consider macro expansion when the block traversal failed
11532 to find a location. Otherwise it's not relevant. */
11533 return expansion_point_location_if_in_system_header (loc);
11534 }
11535
11536 return loc;
11537 }
11538
11539 /* These are the hash table functions for the hash table of OPTIMIZATION_NODE
11540 nodes. */
11541
11542 /* Return the hash code X, an OPTIMIZATION_NODE or TARGET_OPTION code. */
11543
11544 hashval_t
11545 cl_option_hasher::hash (tree x)
11546 {
11547 const_tree const t = x;
11548 const char *p;
11549 size_t i;
11550 size_t len = 0;
11551 hashval_t hash = 0;
11552
11553 if (TREE_CODE (t) == OPTIMIZATION_NODE)
11554 {
11555 p = (const char *)TREE_OPTIMIZATION (t);
11556 len = sizeof (struct cl_optimization);
11557 }
11558
11559 else if (TREE_CODE (t) == TARGET_OPTION_NODE)
11560 return cl_target_option_hash (TREE_TARGET_OPTION (t));
11561
11562 else
11563 gcc_unreachable ();
11564
11565 /* assume most opt flags are just 0/1, some are 2-3, and a few might be
11566 something else. */
11567 for (i = 0; i < len; i++)
11568 if (p[i])
11569 hash = (hash << 4) ^ ((i << 2) | p[i]);
11570
11571 return hash;
11572 }
11573
11574 /* Return nonzero if the value represented by *X (an OPTIMIZATION or
11575 TARGET_OPTION tree node) is the same as that given by *Y, which is the
11576 same. */
11577
11578 bool
11579 cl_option_hasher::equal (tree x, tree y)
11580 {
11581 const_tree const xt = x;
11582 const_tree const yt = y;
11583
11584 if (TREE_CODE (xt) != TREE_CODE (yt))
11585 return 0;
11586
11587 if (TREE_CODE (xt) == OPTIMIZATION_NODE)
11588 return cl_optimization_option_eq (TREE_OPTIMIZATION (xt),
11589 TREE_OPTIMIZATION (yt));
11590 else if (TREE_CODE (xt) == TARGET_OPTION_NODE)
11591 return cl_target_option_eq (TREE_TARGET_OPTION (xt),
11592 TREE_TARGET_OPTION (yt));
11593 else
11594 gcc_unreachable ();
11595 }
11596
11597 /* Build an OPTIMIZATION_NODE based on the options in OPTS and OPTS_SET. */
11598
11599 tree
11600 build_optimization_node (struct gcc_options *opts,
11601 struct gcc_options *opts_set)
11602 {
11603 tree t;
11604
11605 /* Use the cache of optimization nodes. */
11606
11607 cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node),
11608 opts, opts_set);
11609
11610 tree *slot = cl_option_hash_table->find_slot (cl_optimization_node, INSERT);
11611 t = *slot;
11612 if (!t)
11613 {
11614 /* Insert this one into the hash table. */
11615 t = cl_optimization_node;
11616 *slot = t;
11617
11618 /* Make a new node for next time round. */
11619 cl_optimization_node = make_node (OPTIMIZATION_NODE);
11620 }
11621
11622 return t;
11623 }
11624
11625 /* Build a TARGET_OPTION_NODE based on the options in OPTS and OPTS_SET. */
11626
11627 tree
11628 build_target_option_node (struct gcc_options *opts,
11629 struct gcc_options *opts_set)
11630 {
11631 tree t;
11632
11633 /* Use the cache of optimization nodes. */
11634
11635 cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node),
11636 opts, opts_set);
11637
11638 tree *slot = cl_option_hash_table->find_slot (cl_target_option_node, INSERT);
11639 t = *slot;
11640 if (!t)
11641 {
11642 /* Insert this one into the hash table. */
11643 t = cl_target_option_node;
11644 *slot = t;
11645
11646 /* Make a new node for next time round. */
11647 cl_target_option_node = make_node (TARGET_OPTION_NODE);
11648 }
11649
11650 return t;
11651 }
11652
11653 /* Clear TREE_TARGET_GLOBALS of all TARGET_OPTION_NODE trees,
11654 so that they aren't saved during PCH writing. */
11655
11656 void
11657 prepare_target_option_nodes_for_pch (void)
11658 {
11659 hash_table<cl_option_hasher>::iterator iter = cl_option_hash_table->begin ();
11660 for (; iter != cl_option_hash_table->end (); ++iter)
11661 if (TREE_CODE (*iter) == TARGET_OPTION_NODE)
11662 TREE_TARGET_GLOBALS (*iter) = NULL;
11663 }
11664
11665 /* Determine the "ultimate origin" of a block. */
11666
11667 tree
11668 block_ultimate_origin (const_tree block)
11669 {
11670 tree origin = BLOCK_ABSTRACT_ORIGIN (block);
11671
11672 if (origin == NULL_TREE)
11673 return NULL_TREE;
11674 else
11675 {
11676 gcc_checking_assert ((DECL_P (origin)
11677 && DECL_ORIGIN (origin) == origin)
11678 || BLOCK_ORIGIN (origin) == origin);
11679 return origin;
11680 }
11681 }
11682
11683 /* Return true iff conversion from INNER_TYPE to OUTER_TYPE generates
11684 no instruction. */
11685
11686 bool
11687 tree_nop_conversion_p (const_tree outer_type, const_tree inner_type)
11688 {
11689 /* Do not strip casts into or out of differing address spaces. */
11690 if (POINTER_TYPE_P (outer_type)
11691 && TYPE_ADDR_SPACE (TREE_TYPE (outer_type)) != ADDR_SPACE_GENERIC)
11692 {
11693 if (!POINTER_TYPE_P (inner_type)
11694 || (TYPE_ADDR_SPACE (TREE_TYPE (outer_type))
11695 != TYPE_ADDR_SPACE (TREE_TYPE (inner_type))))
11696 return false;
11697 }
11698 else if (POINTER_TYPE_P (inner_type)
11699 && TYPE_ADDR_SPACE (TREE_TYPE (inner_type)) != ADDR_SPACE_GENERIC)
11700 {
11701 /* We already know that outer_type is not a pointer with
11702 a non-generic address space. */
11703 return false;
11704 }
11705
11706 /* Use precision rather then machine mode when we can, which gives
11707 the correct answer even for submode (bit-field) types. */
11708 if ((INTEGRAL_TYPE_P (outer_type)
11709 || POINTER_TYPE_P (outer_type)
11710 || TREE_CODE (outer_type) == OFFSET_TYPE)
11711 && (INTEGRAL_TYPE_P (inner_type)
11712 || POINTER_TYPE_P (inner_type)
11713 || TREE_CODE (inner_type) == OFFSET_TYPE))
11714 return TYPE_PRECISION (outer_type) == TYPE_PRECISION (inner_type);
11715
11716 /* Otherwise fall back on comparing machine modes (e.g. for
11717 aggregate types, floats). */
11718 return TYPE_MODE (outer_type) == TYPE_MODE (inner_type);
11719 }
11720
11721 /* Return true iff conversion in EXP generates no instruction. Mark
11722 it inline so that we fully inline into the stripping functions even
11723 though we have two uses of this function. */
11724
11725 static inline bool
11726 tree_nop_conversion (const_tree exp)
11727 {
11728 tree outer_type, inner_type;
11729
11730 if (location_wrapper_p (exp))
11731 return true;
11732 if (!CONVERT_EXPR_P (exp)
11733 && TREE_CODE (exp) != NON_LVALUE_EXPR)
11734 return false;
11735
11736 outer_type = TREE_TYPE (exp);
11737 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
11738 if (!inner_type || inner_type == error_mark_node)
11739 return false;
11740
11741 return tree_nop_conversion_p (outer_type, inner_type);
11742 }
11743
11744 /* Return true iff conversion in EXP generates no instruction. Don't
11745 consider conversions changing the signedness. */
11746
11747 static bool
11748 tree_sign_nop_conversion (const_tree exp)
11749 {
11750 tree outer_type, inner_type;
11751
11752 if (!tree_nop_conversion (exp))
11753 return false;
11754
11755 outer_type = TREE_TYPE (exp);
11756 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
11757
11758 return (TYPE_UNSIGNED (outer_type) == TYPE_UNSIGNED (inner_type)
11759 && POINTER_TYPE_P (outer_type) == POINTER_TYPE_P (inner_type));
11760 }
11761
11762 /* Strip conversions from EXP according to tree_nop_conversion and
11763 return the resulting expression. */
11764
11765 tree
11766 tree_strip_nop_conversions (tree exp)
11767 {
11768 while (tree_nop_conversion (exp))
11769 exp = TREE_OPERAND (exp, 0);
11770 return exp;
11771 }
11772
11773 /* Strip conversions from EXP according to tree_sign_nop_conversion
11774 and return the resulting expression. */
11775
11776 tree
11777 tree_strip_sign_nop_conversions (tree exp)
11778 {
11779 while (tree_sign_nop_conversion (exp))
11780 exp = TREE_OPERAND (exp, 0);
11781 return exp;
11782 }
11783
11784 /* Avoid any floating point extensions from EXP. */
11785 tree
11786 strip_float_extensions (tree exp)
11787 {
11788 tree sub, expt, subt;
11789
11790 /* For floating point constant look up the narrowest type that can hold
11791 it properly and handle it like (type)(narrowest_type)constant.
11792 This way we can optimize for instance a=a*2.0 where "a" is float
11793 but 2.0 is double constant. */
11794 if (TREE_CODE (exp) == REAL_CST && !DECIMAL_FLOAT_TYPE_P (TREE_TYPE (exp)))
11795 {
11796 REAL_VALUE_TYPE orig;
11797 tree type = NULL;
11798
11799 orig = TREE_REAL_CST (exp);
11800 if (TYPE_PRECISION (TREE_TYPE (exp)) > TYPE_PRECISION (float_type_node)
11801 && exact_real_truncate (TYPE_MODE (float_type_node), &orig))
11802 type = float_type_node;
11803 else if (TYPE_PRECISION (TREE_TYPE (exp))
11804 > TYPE_PRECISION (double_type_node)
11805 && exact_real_truncate (TYPE_MODE (double_type_node), &orig))
11806 type = double_type_node;
11807 if (type)
11808 return build_real_truncate (type, orig);
11809 }
11810
11811 if (!CONVERT_EXPR_P (exp))
11812 return exp;
11813
11814 sub = TREE_OPERAND (exp, 0);
11815 subt = TREE_TYPE (sub);
11816 expt = TREE_TYPE (exp);
11817
11818 if (!FLOAT_TYPE_P (subt))
11819 return exp;
11820
11821 if (DECIMAL_FLOAT_TYPE_P (expt) != DECIMAL_FLOAT_TYPE_P (subt))
11822 return exp;
11823
11824 if (TYPE_PRECISION (subt) > TYPE_PRECISION (expt))
11825 return exp;
11826
11827 return strip_float_extensions (sub);
11828 }
11829
11830 /* Strip out all handled components that produce invariant
11831 offsets. */
11832
11833 const_tree
11834 strip_invariant_refs (const_tree op)
11835 {
11836 while (handled_component_p (op))
11837 {
11838 switch (TREE_CODE (op))
11839 {
11840 case ARRAY_REF:
11841 case ARRAY_RANGE_REF:
11842 if (!is_gimple_constant (TREE_OPERAND (op, 1))
11843 || TREE_OPERAND (op, 2) != NULL_TREE
11844 || TREE_OPERAND (op, 3) != NULL_TREE)
11845 return NULL;
11846 break;
11847
11848 case COMPONENT_REF:
11849 if (TREE_OPERAND (op, 2) != NULL_TREE)
11850 return NULL;
11851 break;
11852
11853 default:;
11854 }
11855 op = TREE_OPERAND (op, 0);
11856 }
11857
11858 return op;
11859 }
11860
11861 static GTY(()) tree gcc_eh_personality_decl;
11862
11863 /* Return the GCC personality function decl. */
11864
11865 tree
11866 lhd_gcc_personality (void)
11867 {
11868 if (!gcc_eh_personality_decl)
11869 gcc_eh_personality_decl = build_personality_function ("gcc");
11870 return gcc_eh_personality_decl;
11871 }
11872
11873 /* TARGET is a call target of GIMPLE call statement
11874 (obtained by gimple_call_fn). Return true if it is
11875 OBJ_TYPE_REF representing an virtual call of C++ method.
11876 (As opposed to OBJ_TYPE_REF representing objc calls
11877 through a cast where middle-end devirtualization machinery
11878 can't apply.) FOR_DUMP_P is true when being called from
11879 the dump routines. */
11880
11881 bool
11882 virtual_method_call_p (const_tree target, bool for_dump_p)
11883 {
11884 if (TREE_CODE (target) != OBJ_TYPE_REF)
11885 return false;
11886 tree t = TREE_TYPE (target);
11887 gcc_checking_assert (TREE_CODE (t) == POINTER_TYPE);
11888 t = TREE_TYPE (t);
11889 if (TREE_CODE (t) == FUNCTION_TYPE)
11890 return false;
11891 gcc_checking_assert (TREE_CODE (t) == METHOD_TYPE);
11892 /* If we do not have BINFO associated, it means that type was built
11893 without devirtualization enabled. Do not consider this a virtual
11894 call. */
11895 if (!TYPE_BINFO (obj_type_ref_class (target, for_dump_p)))
11896 return false;
11897 return true;
11898 }
11899
11900 /* Lookup sub-BINFO of BINFO of TYPE at offset POS. */
11901
11902 static tree
11903 lookup_binfo_at_offset (tree binfo, tree type, HOST_WIDE_INT pos)
11904 {
11905 unsigned int i;
11906 tree base_binfo, b;
11907
11908 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
11909 if (pos == tree_to_shwi (BINFO_OFFSET (base_binfo))
11910 && types_same_for_odr (TREE_TYPE (base_binfo), type))
11911 return base_binfo;
11912 else if ((b = lookup_binfo_at_offset (base_binfo, type, pos)) != NULL)
11913 return b;
11914 return NULL;
11915 }
11916
11917 /* Try to find a base info of BINFO that would have its field decl at offset
11918 OFFSET within the BINFO type and which is of EXPECTED_TYPE. If it can be
11919 found, return, otherwise return NULL_TREE. */
11920
11921 tree
11922 get_binfo_at_offset (tree binfo, poly_int64 offset, tree expected_type)
11923 {
11924 tree type = BINFO_TYPE (binfo);
11925
11926 while (true)
11927 {
11928 HOST_WIDE_INT pos, size;
11929 tree fld;
11930 int i;
11931
11932 if (types_same_for_odr (type, expected_type))
11933 return binfo;
11934 if (maybe_lt (offset, 0))
11935 return NULL_TREE;
11936
11937 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
11938 {
11939 if (TREE_CODE (fld) != FIELD_DECL || !DECL_ARTIFICIAL (fld))
11940 continue;
11941
11942 pos = int_bit_position (fld);
11943 size = tree_to_uhwi (DECL_SIZE (fld));
11944 if (known_in_range_p (offset, pos, size))
11945 break;
11946 }
11947 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
11948 return NULL_TREE;
11949
11950 /* Offset 0 indicates the primary base, whose vtable contents are
11951 represented in the binfo for the derived class. */
11952 else if (maybe_ne (offset, 0))
11953 {
11954 tree found_binfo = NULL, base_binfo;
11955 /* Offsets in BINFO are in bytes relative to the whole structure
11956 while POS is in bits relative to the containing field. */
11957 int binfo_offset = (tree_to_shwi (BINFO_OFFSET (binfo)) + pos
11958 / BITS_PER_UNIT);
11959
11960 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
11961 if (tree_to_shwi (BINFO_OFFSET (base_binfo)) == binfo_offset
11962 && types_same_for_odr (TREE_TYPE (base_binfo), TREE_TYPE (fld)))
11963 {
11964 found_binfo = base_binfo;
11965 break;
11966 }
11967 if (found_binfo)
11968 binfo = found_binfo;
11969 else
11970 binfo = lookup_binfo_at_offset (binfo, TREE_TYPE (fld),
11971 binfo_offset);
11972 }
11973
11974 type = TREE_TYPE (fld);
11975 offset -= pos;
11976 }
11977 }
11978
11979 /* Returns true if X is a typedef decl. */
11980
11981 bool
11982 is_typedef_decl (const_tree x)
11983 {
11984 return (x && TREE_CODE (x) == TYPE_DECL
11985 && DECL_ORIGINAL_TYPE (x) != NULL_TREE);
11986 }
11987
11988 /* Returns true iff TYPE is a type variant created for a typedef. */
11989
11990 bool
11991 typedef_variant_p (const_tree type)
11992 {
11993 return is_typedef_decl (TYPE_NAME (type));
11994 }
11995
11996 /* PR 84195: Replace control characters in "unescaped" with their
11997 escaped equivalents. Allow newlines if -fmessage-length has
11998 been set to a non-zero value. This is done here, rather than
11999 where the attribute is recorded as the message length can
12000 change between these two locations. */
12001
12002 void
12003 escaped_string::escape (const char *unescaped)
12004 {
12005 char *escaped;
12006 size_t i, new_i, len;
12007
12008 if (m_owned)
12009 free (m_str);
12010
12011 m_str = const_cast<char *> (unescaped);
12012 m_owned = false;
12013
12014 if (unescaped == NULL || *unescaped == 0)
12015 return;
12016
12017 len = strlen (unescaped);
12018 escaped = NULL;
12019 new_i = 0;
12020
12021 for (i = 0; i < len; i++)
12022 {
12023 char c = unescaped[i];
12024
12025 if (!ISCNTRL (c))
12026 {
12027 if (escaped)
12028 escaped[new_i++] = c;
12029 continue;
12030 }
12031
12032 if (c != '\n' || !pp_is_wrapping_line (global_dc->printer))
12033 {
12034 if (escaped == NULL)
12035 {
12036 /* We only allocate space for a new string if we
12037 actually encounter a control character that
12038 needs replacing. */
12039 escaped = (char *) xmalloc (len * 2 + 1);
12040 strncpy (escaped, unescaped, i);
12041 new_i = i;
12042 }
12043
12044 escaped[new_i++] = '\\';
12045
12046 switch (c)
12047 {
12048 case '\a': escaped[new_i++] = 'a'; break;
12049 case '\b': escaped[new_i++] = 'b'; break;
12050 case '\f': escaped[new_i++] = 'f'; break;
12051 case '\n': escaped[new_i++] = 'n'; break;
12052 case '\r': escaped[new_i++] = 'r'; break;
12053 case '\t': escaped[new_i++] = 't'; break;
12054 case '\v': escaped[new_i++] = 'v'; break;
12055 default: escaped[new_i++] = '?'; break;
12056 }
12057 }
12058 else if (escaped)
12059 escaped[new_i++] = c;
12060 }
12061
12062 if (escaped)
12063 {
12064 escaped[new_i] = 0;
12065 m_str = escaped;
12066 m_owned = true;
12067 }
12068 }
12069
12070 /* Warn about a use of an identifier which was marked deprecated. Returns
12071 whether a warning was given. */
12072
12073 bool
12074 warn_deprecated_use (tree node, tree attr)
12075 {
12076 escaped_string msg;
12077
12078 if (node == 0 || !warn_deprecated_decl)
12079 return false;
12080
12081 if (!attr)
12082 {
12083 if (DECL_P (node))
12084 attr = DECL_ATTRIBUTES (node);
12085 else if (TYPE_P (node))
12086 {
12087 tree decl = TYPE_STUB_DECL (node);
12088 if (decl)
12089 attr = lookup_attribute ("deprecated",
12090 TYPE_ATTRIBUTES (TREE_TYPE (decl)));
12091 }
12092 }
12093
12094 if (attr)
12095 attr = lookup_attribute ("deprecated", attr);
12096
12097 if (attr)
12098 msg.escape (TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
12099
12100 bool w = false;
12101 if (DECL_P (node))
12102 {
12103 auto_diagnostic_group d;
12104 if (msg)
12105 w = warning (OPT_Wdeprecated_declarations,
12106 "%qD is deprecated: %s", node, (const char *) msg);
12107 else
12108 w = warning (OPT_Wdeprecated_declarations,
12109 "%qD is deprecated", node);
12110 if (w)
12111 inform (DECL_SOURCE_LOCATION (node), "declared here");
12112 }
12113 else if (TYPE_P (node))
12114 {
12115 tree what = NULL_TREE;
12116 tree decl = TYPE_STUB_DECL (node);
12117
12118 if (TYPE_NAME (node))
12119 {
12120 if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE)
12121 what = TYPE_NAME (node);
12122 else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL
12123 && DECL_NAME (TYPE_NAME (node)))
12124 what = DECL_NAME (TYPE_NAME (node));
12125 }
12126
12127 auto_diagnostic_group d;
12128 if (what)
12129 {
12130 if (msg)
12131 w = warning (OPT_Wdeprecated_declarations,
12132 "%qE is deprecated: %s", what, (const char *) msg);
12133 else
12134 w = warning (OPT_Wdeprecated_declarations,
12135 "%qE is deprecated", what);
12136 }
12137 else
12138 {
12139 if (msg)
12140 w = warning (OPT_Wdeprecated_declarations,
12141 "type is deprecated: %s", (const char *) msg);
12142 else
12143 w = warning (OPT_Wdeprecated_declarations,
12144 "type is deprecated");
12145 }
12146
12147 if (w && decl)
12148 inform (DECL_SOURCE_LOCATION (decl), "declared here");
12149 }
12150
12151 return w;
12152 }
12153
12154 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
12155 somewhere in it. */
12156
12157 bool
12158 contains_bitfld_component_ref_p (const_tree ref)
12159 {
12160 while (handled_component_p (ref))
12161 {
12162 if (TREE_CODE (ref) == COMPONENT_REF
12163 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1)))
12164 return true;
12165 ref = TREE_OPERAND (ref, 0);
12166 }
12167
12168 return false;
12169 }
12170
12171 /* Try to determine whether a TRY_CATCH expression can fall through.
12172 This is a subroutine of block_may_fallthru. */
12173
12174 static bool
12175 try_catch_may_fallthru (const_tree stmt)
12176 {
12177 tree_stmt_iterator i;
12178
12179 /* If the TRY block can fall through, the whole TRY_CATCH can
12180 fall through. */
12181 if (block_may_fallthru (TREE_OPERAND (stmt, 0)))
12182 return true;
12183
12184 i = tsi_start (TREE_OPERAND (stmt, 1));
12185 switch (TREE_CODE (tsi_stmt (i)))
12186 {
12187 case CATCH_EXPR:
12188 /* We expect to see a sequence of CATCH_EXPR trees, each with a
12189 catch expression and a body. The whole TRY_CATCH may fall
12190 through iff any of the catch bodies falls through. */
12191 for (; !tsi_end_p (i); tsi_next (&i))
12192 {
12193 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i))))
12194 return true;
12195 }
12196 return false;
12197
12198 case EH_FILTER_EXPR:
12199 /* The exception filter expression only matters if there is an
12200 exception. If the exception does not match EH_FILTER_TYPES,
12201 we will execute EH_FILTER_FAILURE, and we will fall through
12202 if that falls through. If the exception does match
12203 EH_FILTER_TYPES, the stack unwinder will continue up the
12204 stack, so we will not fall through. We don't know whether we
12205 will throw an exception which matches EH_FILTER_TYPES or not,
12206 so we just ignore EH_FILTER_TYPES and assume that we might
12207 throw an exception which doesn't match. */
12208 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i)));
12209
12210 default:
12211 /* This case represents statements to be executed when an
12212 exception occurs. Those statements are implicitly followed
12213 by a RESX statement to resume execution after the exception.
12214 So in this case the TRY_CATCH never falls through. */
12215 return false;
12216 }
12217 }
12218
12219 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
12220 need not be 100% accurate; simply be conservative and return true if we
12221 don't know. This is used only to avoid stupidly generating extra code.
12222 If we're wrong, we'll just delete the extra code later. */
12223
12224 bool
12225 block_may_fallthru (const_tree block)
12226 {
12227 /* This CONST_CAST is okay because expr_last returns its argument
12228 unmodified and we assign it to a const_tree. */
12229 const_tree stmt = expr_last (CONST_CAST_TREE (block));
12230
12231 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
12232 {
12233 case GOTO_EXPR:
12234 case RETURN_EXPR:
12235 /* Easy cases. If the last statement of the block implies
12236 control transfer, then we can't fall through. */
12237 return false;
12238
12239 case SWITCH_EXPR:
12240 /* If there is a default: label or case labels cover all possible
12241 SWITCH_COND values, then the SWITCH_EXPR will transfer control
12242 to some case label in all cases and all we care is whether the
12243 SWITCH_BODY falls through. */
12244 if (SWITCH_ALL_CASES_P (stmt))
12245 return block_may_fallthru (SWITCH_BODY (stmt));
12246 return true;
12247
12248 case COND_EXPR:
12249 if (block_may_fallthru (COND_EXPR_THEN (stmt)))
12250 return true;
12251 return block_may_fallthru (COND_EXPR_ELSE (stmt));
12252
12253 case BIND_EXPR:
12254 return block_may_fallthru (BIND_EXPR_BODY (stmt));
12255
12256 case TRY_CATCH_EXPR:
12257 return try_catch_may_fallthru (stmt);
12258
12259 case TRY_FINALLY_EXPR:
12260 /* The finally clause is always executed after the try clause,
12261 so if it does not fall through, then the try-finally will not
12262 fall through. Otherwise, if the try clause does not fall
12263 through, then when the finally clause falls through it will
12264 resume execution wherever the try clause was going. So the
12265 whole try-finally will only fall through if both the try
12266 clause and the finally clause fall through. */
12267 return (block_may_fallthru (TREE_OPERAND (stmt, 0))
12268 && block_may_fallthru (TREE_OPERAND (stmt, 1)));
12269
12270 case EH_ELSE_EXPR:
12271 return block_may_fallthru (TREE_OPERAND (stmt, 0));
12272
12273 case MODIFY_EXPR:
12274 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
12275 stmt = TREE_OPERAND (stmt, 1);
12276 else
12277 return true;
12278 /* FALLTHRU */
12279
12280 case CALL_EXPR:
12281 /* Functions that do not return do not fall through. */
12282 return (call_expr_flags (stmt) & ECF_NORETURN) == 0;
12283
12284 case CLEANUP_POINT_EXPR:
12285 return block_may_fallthru (TREE_OPERAND (stmt, 0));
12286
12287 case TARGET_EXPR:
12288 return block_may_fallthru (TREE_OPERAND (stmt, 1));
12289
12290 case ERROR_MARK:
12291 return true;
12292
12293 default:
12294 return lang_hooks.block_may_fallthru (stmt);
12295 }
12296 }
12297
12298 /* True if we are using EH to handle cleanups. */
12299 static bool using_eh_for_cleanups_flag = false;
12300
12301 /* This routine is called from front ends to indicate eh should be used for
12302 cleanups. */
12303 void
12304 using_eh_for_cleanups (void)
12305 {
12306 using_eh_for_cleanups_flag = true;
12307 }
12308
12309 /* Query whether EH is used for cleanups. */
12310 bool
12311 using_eh_for_cleanups_p (void)
12312 {
12313 return using_eh_for_cleanups_flag;
12314 }
12315
12316 /* Wrapper for tree_code_name to ensure that tree code is valid */
12317 const char *
12318 get_tree_code_name (enum tree_code code)
12319 {
12320 const char *invalid = "<invalid tree code>";
12321
12322 /* The tree_code enum promotes to signed, but we could be getting
12323 invalid values, so force an unsigned comparison. */
12324 if (unsigned (code) >= MAX_TREE_CODES)
12325 {
12326 if ((unsigned)code == 0xa5a5)
12327 return "ggc_freed";
12328 return invalid;
12329 }
12330
12331 return tree_code_name[code];
12332 }
12333
12334 /* Drops the TREE_OVERFLOW flag from T. */
12335
12336 tree
12337 drop_tree_overflow (tree t)
12338 {
12339 gcc_checking_assert (TREE_OVERFLOW (t));
12340
12341 /* For tree codes with a sharing machinery re-build the result. */
12342 if (poly_int_tree_p (t))
12343 return wide_int_to_tree (TREE_TYPE (t), wi::to_poly_wide (t));
12344
12345 /* For VECTOR_CST, remove the overflow bits from the encoded elements
12346 and canonicalize the result. */
12347 if (TREE_CODE (t) == VECTOR_CST)
12348 {
12349 tree_vector_builder builder;
12350 builder.new_unary_operation (TREE_TYPE (t), t, true);
12351 unsigned int count = builder.encoded_nelts ();
12352 for (unsigned int i = 0; i < count; ++i)
12353 {
12354 tree elt = VECTOR_CST_ELT (t, i);
12355 if (TREE_OVERFLOW (elt))
12356 elt = drop_tree_overflow (elt);
12357 builder.quick_push (elt);
12358 }
12359 return builder.build ();
12360 }
12361
12362 /* Otherwise, as all tcc_constants are possibly shared, copy the node
12363 and drop the flag. */
12364 t = copy_node (t);
12365 TREE_OVERFLOW (t) = 0;
12366
12367 /* For constants that contain nested constants, drop the flag
12368 from those as well. */
12369 if (TREE_CODE (t) == COMPLEX_CST)
12370 {
12371 if (TREE_OVERFLOW (TREE_REALPART (t)))
12372 TREE_REALPART (t) = drop_tree_overflow (TREE_REALPART (t));
12373 if (TREE_OVERFLOW (TREE_IMAGPART (t)))
12374 TREE_IMAGPART (t) = drop_tree_overflow (TREE_IMAGPART (t));
12375 }
12376
12377 return t;
12378 }
12379
12380 /* Given a memory reference expression T, return its base address.
12381 The base address of a memory reference expression is the main
12382 object being referenced. For instance, the base address for
12383 'array[i].fld[j]' is 'array'. You can think of this as stripping
12384 away the offset part from a memory address.
12385
12386 This function calls handled_component_p to strip away all the inner
12387 parts of the memory reference until it reaches the base object. */
12388
12389 tree
12390 get_base_address (tree t)
12391 {
12392 if (TREE_CODE (t) == WITH_SIZE_EXPR)
12393 t = TREE_OPERAND (t, 0);
12394 while (handled_component_p (t))
12395 t = TREE_OPERAND (t, 0);
12396
12397 if ((TREE_CODE (t) == MEM_REF
12398 || TREE_CODE (t) == TARGET_MEM_REF)
12399 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
12400 t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
12401
12402 return t;
12403 }
12404
12405 /* Return a tree of sizetype representing the size, in bytes, of the element
12406 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12407
12408 tree
12409 array_ref_element_size (tree exp)
12410 {
12411 tree aligned_size = TREE_OPERAND (exp, 3);
12412 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
12413 location_t loc = EXPR_LOCATION (exp);
12414
12415 /* If a size was specified in the ARRAY_REF, it's the size measured
12416 in alignment units of the element type. So multiply by that value. */
12417 if (aligned_size)
12418 {
12419 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
12420 sizetype from another type of the same width and signedness. */
12421 if (TREE_TYPE (aligned_size) != sizetype)
12422 aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
12423 return size_binop_loc (loc, MULT_EXPR, aligned_size,
12424 size_int (TYPE_ALIGN_UNIT (elmt_type)));
12425 }
12426
12427 /* Otherwise, take the size from that of the element type. Substitute
12428 any PLACEHOLDER_EXPR that we have. */
12429 else
12430 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
12431 }
12432
12433 /* Return a tree representing the lower bound of the array mentioned in
12434 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12435
12436 tree
12437 array_ref_low_bound (tree exp)
12438 {
12439 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
12440
12441 /* If a lower bound is specified in EXP, use it. */
12442 if (TREE_OPERAND (exp, 2))
12443 return TREE_OPERAND (exp, 2);
12444
12445 /* Otherwise, if there is a domain type and it has a lower bound, use it,
12446 substituting for a PLACEHOLDER_EXPR as needed. */
12447 if (domain_type && TYPE_MIN_VALUE (domain_type))
12448 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
12449
12450 /* Otherwise, return a zero of the appropriate type. */
12451 tree idxtype = TREE_TYPE (TREE_OPERAND (exp, 1));
12452 return (idxtype == error_mark_node
12453 ? integer_zero_node : build_int_cst (idxtype, 0));
12454 }
12455
12456 /* Return a tree representing the upper bound of the array mentioned in
12457 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12458
12459 tree
12460 array_ref_up_bound (tree exp)
12461 {
12462 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
12463
12464 /* If there is a domain type and it has an upper bound, use it, substituting
12465 for a PLACEHOLDER_EXPR as needed. */
12466 if (domain_type && TYPE_MAX_VALUE (domain_type))
12467 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
12468
12469 /* Otherwise fail. */
12470 return NULL_TREE;
12471 }
12472
12473 /* Returns true if REF is an array reference, component reference,
12474 or memory reference to an array at the end of a structure.
12475 If this is the case, the array may be allocated larger
12476 than its upper bound implies. */
12477
12478 bool
12479 array_at_struct_end_p (tree ref)
12480 {
12481 tree atype;
12482
12483 if (TREE_CODE (ref) == ARRAY_REF
12484 || TREE_CODE (ref) == ARRAY_RANGE_REF)
12485 {
12486 atype = TREE_TYPE (TREE_OPERAND (ref, 0));
12487 ref = TREE_OPERAND (ref, 0);
12488 }
12489 else if (TREE_CODE (ref) == COMPONENT_REF
12490 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 1))) == ARRAY_TYPE)
12491 atype = TREE_TYPE (TREE_OPERAND (ref, 1));
12492 else if (TREE_CODE (ref) == MEM_REF)
12493 {
12494 tree arg = TREE_OPERAND (ref, 0);
12495 if (TREE_CODE (arg) == ADDR_EXPR)
12496 arg = TREE_OPERAND (arg, 0);
12497 tree argtype = TREE_TYPE (arg);
12498 if (TREE_CODE (argtype) == RECORD_TYPE)
12499 {
12500 if (tree fld = last_field (argtype))
12501 {
12502 atype = TREE_TYPE (fld);
12503 if (TREE_CODE (atype) != ARRAY_TYPE)
12504 return false;
12505 if (VAR_P (arg) && DECL_SIZE (fld))
12506 return false;
12507 }
12508 else
12509 return false;
12510 }
12511 else
12512 return false;
12513 }
12514 else
12515 return false;
12516
12517 if (TREE_CODE (ref) == STRING_CST)
12518 return false;
12519
12520 tree ref_to_array = ref;
12521 while (handled_component_p (ref))
12522 {
12523 /* If the reference chain contains a component reference to a
12524 non-union type and there follows another field the reference
12525 is not at the end of a structure. */
12526 if (TREE_CODE (ref) == COMPONENT_REF)
12527 {
12528 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 0))) == RECORD_TYPE)
12529 {
12530 tree nextf = DECL_CHAIN (TREE_OPERAND (ref, 1));
12531 while (nextf && TREE_CODE (nextf) != FIELD_DECL)
12532 nextf = DECL_CHAIN (nextf);
12533 if (nextf)
12534 return false;
12535 }
12536 }
12537 /* If we have a multi-dimensional array we do not consider
12538 a non-innermost dimension as flex array if the whole
12539 multi-dimensional array is at struct end.
12540 Same for an array of aggregates with a trailing array
12541 member. */
12542 else if (TREE_CODE (ref) == ARRAY_REF)
12543 return false;
12544 else if (TREE_CODE (ref) == ARRAY_RANGE_REF)
12545 ;
12546 /* If we view an underlying object as sth else then what we
12547 gathered up to now is what we have to rely on. */
12548 else if (TREE_CODE (ref) == VIEW_CONVERT_EXPR)
12549 break;
12550 else
12551 gcc_unreachable ();
12552
12553 ref = TREE_OPERAND (ref, 0);
12554 }
12555
12556 /* The array now is at struct end. Treat flexible arrays as
12557 always subject to extend, even into just padding constrained by
12558 an underlying decl. */
12559 if (! TYPE_SIZE (atype)
12560 || ! TYPE_DOMAIN (atype)
12561 || ! TYPE_MAX_VALUE (TYPE_DOMAIN (atype)))
12562 return true;
12563
12564 /* If the reference is based on a declared entity, the size of the array
12565 is constrained by its given domain. (Do not trust commons PR/69368). */
12566 ref = get_base_address (ref);
12567 if (ref
12568 && DECL_P (ref)
12569 && !(flag_unconstrained_commons
12570 && VAR_P (ref) && DECL_COMMON (ref))
12571 && DECL_SIZE_UNIT (ref)
12572 && TREE_CODE (DECL_SIZE_UNIT (ref)) == INTEGER_CST)
12573 {
12574 /* Check whether the array domain covers all of the available
12575 padding. */
12576 poly_int64 offset;
12577 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (atype))) != INTEGER_CST
12578 || TREE_CODE (TYPE_MAX_VALUE (TYPE_DOMAIN (atype))) != INTEGER_CST
12579 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (atype))) != INTEGER_CST)
12580 return true;
12581 if (! get_addr_base_and_unit_offset (ref_to_array, &offset))
12582 return true;
12583
12584 /* If at least one extra element fits it is a flexarray. */
12585 if (known_le ((wi::to_offset (TYPE_MAX_VALUE (TYPE_DOMAIN (atype)))
12586 - wi::to_offset (TYPE_MIN_VALUE (TYPE_DOMAIN (atype)))
12587 + 2)
12588 * wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (atype))),
12589 wi::to_offset (DECL_SIZE_UNIT (ref)) - offset))
12590 return true;
12591
12592 return false;
12593 }
12594
12595 return true;
12596 }
12597
12598 /* Return a tree representing the offset, in bytes, of the field referenced
12599 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
12600
12601 tree
12602 component_ref_field_offset (tree exp)
12603 {
12604 tree aligned_offset = TREE_OPERAND (exp, 2);
12605 tree field = TREE_OPERAND (exp, 1);
12606 location_t loc = EXPR_LOCATION (exp);
12607
12608 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
12609 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
12610 value. */
12611 if (aligned_offset)
12612 {
12613 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
12614 sizetype from another type of the same width and signedness. */
12615 if (TREE_TYPE (aligned_offset) != sizetype)
12616 aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
12617 return size_binop_loc (loc, MULT_EXPR, aligned_offset,
12618 size_int (DECL_OFFSET_ALIGN (field)
12619 / BITS_PER_UNIT));
12620 }
12621
12622 /* Otherwise, take the offset from that of the field. Substitute
12623 any PLACEHOLDER_EXPR that we have. */
12624 else
12625 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
12626 }
12627
12628 /* Given the initializer INIT, return the initializer for the field
12629 DECL if it exists, otherwise null. Used to obtain the initializer
12630 for a flexible array member and determine its size. */
12631
12632 static tree
12633 get_initializer_for (tree init, tree decl)
12634 {
12635 STRIP_NOPS (init);
12636
12637 tree fld, fld_init;
12638 unsigned HOST_WIDE_INT i;
12639 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), i, fld, fld_init)
12640 {
12641 if (decl == fld)
12642 return fld_init;
12643
12644 if (TREE_CODE (fld) == CONSTRUCTOR)
12645 {
12646 fld_init = get_initializer_for (fld_init, decl);
12647 if (fld_init)
12648 return fld_init;
12649 }
12650 }
12651
12652 return NULL_TREE;
12653 }
12654
12655 /* Determines the size of the member referenced by the COMPONENT_REF
12656 REF, using its initializer expression if necessary in order to
12657 determine the size of an initialized flexible array member.
12658 If non-null, set *ARK when REF refers to an interior zero-length
12659 array or a trailing one-element array.
12660 Returns the size as sizetype (which might be zero for an object
12661 with an uninitialized flexible array member) or null if the size
12662 cannot be determined. */
12663
12664 tree
12665 component_ref_size (tree ref, special_array_member *sam /* = NULL */)
12666 {
12667 gcc_assert (TREE_CODE (ref) == COMPONENT_REF);
12668
12669 special_array_member sambuf;
12670 if (!sam)
12671 sam = &sambuf;
12672 *sam = special_array_member::none;
12673
12674 /* The object/argument referenced by the COMPONENT_REF and its type. */
12675 tree arg = TREE_OPERAND (ref, 0);
12676 tree argtype = TREE_TYPE (arg);
12677 /* The referenced member. */
12678 tree member = TREE_OPERAND (ref, 1);
12679
12680 tree memsize = DECL_SIZE_UNIT (member);
12681 if (memsize)
12682 {
12683 tree memtype = TREE_TYPE (member);
12684 if (TREE_CODE (memtype) != ARRAY_TYPE)
12685 /* DECL_SIZE may be less than TYPE_SIZE in C++ when referring
12686 to the type of a class with a virtual base which doesn't
12687 reflect the size of the virtual's members (see pr97595).
12688 If that's the case fail for now and implement something
12689 more robust in the future. */
12690 return (tree_int_cst_equal (memsize, TYPE_SIZE_UNIT (memtype))
12691 ? memsize : NULL_TREE);
12692
12693 bool trailing = array_at_struct_end_p (ref);
12694 bool zero_length = integer_zerop (memsize);
12695 if (!trailing && !zero_length)
12696 /* MEMBER is either an interior array or is an array with
12697 more than one element. */
12698 return memsize;
12699
12700 if (zero_length)
12701 {
12702 if (trailing)
12703 *sam = special_array_member::trail_0;
12704 else
12705 {
12706 *sam = special_array_member::int_0;
12707 memsize = NULL_TREE;
12708 }
12709 }
12710
12711 if (!zero_length)
12712 if (tree dom = TYPE_DOMAIN (memtype))
12713 if (tree min = TYPE_MIN_VALUE (dom))
12714 if (tree max = TYPE_MAX_VALUE (dom))
12715 if (TREE_CODE (min) == INTEGER_CST
12716 && TREE_CODE (max) == INTEGER_CST)
12717 {
12718 offset_int minidx = wi::to_offset (min);
12719 offset_int maxidx = wi::to_offset (max);
12720 offset_int neltsm1 = maxidx - minidx;
12721 if (neltsm1 > 0)
12722 /* MEMBER is an array with more than one element. */
12723 return memsize;
12724
12725 if (neltsm1 == 0)
12726 *sam = special_array_member::trail_1;
12727 }
12728
12729 /* For a reference to a zero- or one-element array member of a union
12730 use the size of the union instead of the size of the member. */
12731 if (TREE_CODE (argtype) == UNION_TYPE)
12732 memsize = TYPE_SIZE_UNIT (argtype);
12733 }
12734
12735 /* MEMBER is either a bona fide flexible array member, or a zero-length
12736 array member, or an array of length one treated as such. */
12737
12738 /* If the reference is to a declared object and the member a true
12739 flexible array, try to determine its size from its initializer. */
12740 poly_int64 baseoff = 0;
12741 tree base = get_addr_base_and_unit_offset (ref, &baseoff);
12742 if (!base || !VAR_P (base))
12743 {
12744 if (*sam != special_array_member::int_0)
12745 return NULL_TREE;
12746
12747 if (TREE_CODE (arg) != COMPONENT_REF)
12748 return NULL_TREE;
12749
12750 base = arg;
12751 while (TREE_CODE (base) == COMPONENT_REF)
12752 base = TREE_OPERAND (base, 0);
12753 baseoff = tree_to_poly_int64 (byte_position (TREE_OPERAND (ref, 1)));
12754 }
12755
12756 /* BASE is the declared object of which MEMBER is either a member
12757 or that is cast to ARGTYPE (e.g., a char buffer used to store
12758 an ARGTYPE object). */
12759 tree basetype = TREE_TYPE (base);
12760
12761 /* Determine the base type of the referenced object. If it's
12762 the same as ARGTYPE and MEMBER has a known size, return it. */
12763 tree bt = basetype;
12764 if (*sam != special_array_member::int_0)
12765 while (TREE_CODE (bt) == ARRAY_TYPE)
12766 bt = TREE_TYPE (bt);
12767 bool typematch = useless_type_conversion_p (argtype, bt);
12768 if (memsize && typematch)
12769 return memsize;
12770
12771 memsize = NULL_TREE;
12772
12773 if (typematch)
12774 /* MEMBER is a true flexible array member. Compute its size from
12775 the initializer of the BASE object if it has one. */
12776 if (tree init = DECL_P (base) ? DECL_INITIAL (base) : NULL_TREE)
12777 if (init != error_mark_node)
12778 {
12779 init = get_initializer_for (init, member);
12780 if (init)
12781 {
12782 memsize = TYPE_SIZE_UNIT (TREE_TYPE (init));
12783 if (tree refsize = TYPE_SIZE_UNIT (argtype))
12784 {
12785 /* Use the larger of the initializer size and the tail
12786 padding in the enclosing struct. */
12787 poly_int64 rsz = tree_to_poly_int64 (refsize);
12788 rsz -= baseoff;
12789 if (known_lt (tree_to_poly_int64 (memsize), rsz))
12790 memsize = wide_int_to_tree (TREE_TYPE (memsize), rsz);
12791 }
12792
12793 baseoff = 0;
12794 }
12795 }
12796
12797 if (!memsize)
12798 {
12799 if (typematch)
12800 {
12801 if (DECL_P (base)
12802 && DECL_EXTERNAL (base)
12803 && bt == basetype
12804 && *sam != special_array_member::int_0)
12805 /* The size of a flexible array member of an extern struct
12806 with no initializer cannot be determined (it's defined
12807 in another translation unit and can have an initializer
12808 with an arbitrary number of elements). */
12809 return NULL_TREE;
12810
12811 /* Use the size of the base struct or, for interior zero-length
12812 arrays, the size of the enclosing type. */
12813 memsize = TYPE_SIZE_UNIT (bt);
12814 }
12815 else if (DECL_P (base))
12816 /* Use the size of the BASE object (possibly an array of some
12817 other type such as char used to store the struct). */
12818 memsize = DECL_SIZE_UNIT (base);
12819 else
12820 return NULL_TREE;
12821 }
12822
12823 /* If the flexible array member has a known size use the greater
12824 of it and the tail padding in the enclosing struct.
12825 Otherwise, when the size of the flexible array member is unknown
12826 and the referenced object is not a struct, use the size of its
12827 type when known. This detects sizes of array buffers when cast
12828 to struct types with flexible array members. */
12829 if (memsize)
12830 {
12831 poly_int64 memsz64 = memsize ? tree_to_poly_int64 (memsize) : 0;
12832 if (known_lt (baseoff, memsz64))
12833 {
12834 memsz64 -= baseoff;
12835 return wide_int_to_tree (TREE_TYPE (memsize), memsz64);
12836 }
12837 return size_zero_node;
12838 }
12839
12840 /* Return "don't know" for an external non-array object since its
12841 flexible array member can be initialized to have any number of
12842 elements. Otherwise, return zero because the flexible array
12843 member has no elements. */
12844 return (DECL_P (base)
12845 && DECL_EXTERNAL (base)
12846 && (!typematch
12847 || TREE_CODE (basetype) != ARRAY_TYPE)
12848 ? NULL_TREE : size_zero_node);
12849 }
12850
12851 /* Return the machine mode of T. For vectors, returns the mode of the
12852 inner type. The main use case is to feed the result to HONOR_NANS,
12853 avoiding the BLKmode that a direct TYPE_MODE (T) might return. */
12854
12855 machine_mode
12856 element_mode (const_tree t)
12857 {
12858 if (!TYPE_P (t))
12859 t = TREE_TYPE (t);
12860 if (VECTOR_TYPE_P (t) || TREE_CODE (t) == COMPLEX_TYPE)
12861 t = TREE_TYPE (t);
12862 return TYPE_MODE (t);
12863 }
12864
12865 /* Vector types need to re-check the target flags each time we report
12866 the machine mode. We need to do this because attribute target can
12867 change the result of vector_mode_supported_p and have_regs_of_mode
12868 on a per-function basis. Thus the TYPE_MODE of a VECTOR_TYPE can
12869 change on a per-function basis. */
12870 /* ??? Possibly a better solution is to run through all the types
12871 referenced by a function and re-compute the TYPE_MODE once, rather
12872 than make the TYPE_MODE macro call a function. */
12873
12874 machine_mode
12875 vector_type_mode (const_tree t)
12876 {
12877 machine_mode mode;
12878
12879 gcc_assert (TREE_CODE (t) == VECTOR_TYPE);
12880
12881 mode = t->type_common.mode;
12882 if (VECTOR_MODE_P (mode)
12883 && (!targetm.vector_mode_supported_p (mode)
12884 || !have_regs_of_mode[mode]))
12885 {
12886 scalar_int_mode innermode;
12887
12888 /* For integers, try mapping it to a same-sized scalar mode. */
12889 if (is_int_mode (TREE_TYPE (t)->type_common.mode, &innermode))
12890 {
12891 poly_int64 size = (TYPE_VECTOR_SUBPARTS (t)
12892 * GET_MODE_BITSIZE (innermode));
12893 scalar_int_mode mode;
12894 if (int_mode_for_size (size, 0).exists (&mode)
12895 && have_regs_of_mode[mode])
12896 return mode;
12897 }
12898
12899 return BLKmode;
12900 }
12901
12902 return mode;
12903 }
12904
12905 /* Return the size in bits of each element of vector type TYPE. */
12906
12907 unsigned int
12908 vector_element_bits (const_tree type)
12909 {
12910 gcc_checking_assert (VECTOR_TYPE_P (type));
12911 if (VECTOR_BOOLEAN_TYPE_P (type))
12912 return TYPE_PRECISION (TREE_TYPE (type));
12913 return tree_to_uhwi (TYPE_SIZE (TREE_TYPE (type)));
12914 }
12915
12916 /* Calculate the size in bits of each element of vector type TYPE
12917 and return the result as a tree of type bitsizetype. */
12918
12919 tree
12920 vector_element_bits_tree (const_tree type)
12921 {
12922 gcc_checking_assert (VECTOR_TYPE_P (type));
12923 if (VECTOR_BOOLEAN_TYPE_P (type))
12924 return bitsize_int (vector_element_bits (type));
12925 return TYPE_SIZE (TREE_TYPE (type));
12926 }
12927
12928 /* Verify that basic properties of T match TV and thus T can be a variant of
12929 TV. TV should be the more specified variant (i.e. the main variant). */
12930
12931 static bool
12932 verify_type_variant (const_tree t, tree tv)
12933 {
12934 /* Type variant can differ by:
12935
12936 - TYPE_QUALS: TYPE_READONLY, TYPE_VOLATILE, TYPE_ATOMIC, TYPE_RESTRICT,
12937 ENCODE_QUAL_ADDR_SPACE.
12938 - main variant may be TYPE_COMPLETE_P and variant types !TYPE_COMPLETE_P
12939 in this case some values may not be set in the variant types
12940 (see TYPE_COMPLETE_P checks).
12941 - it is possible to have TYPE_ARTIFICIAL variant of non-artifical type
12942 - by TYPE_NAME and attributes (i.e. when variant originate by typedef)
12943 - TYPE_CANONICAL (TYPE_ALIAS_SET is the same among variants)
12944 - by the alignment: TYPE_ALIGN and TYPE_USER_ALIGN
12945 - during LTO by TYPE_CONTEXT if type is TYPE_FILE_SCOPE_P
12946 this is necessary to make it possible to merge types form different TUs
12947 - arrays, pointers and references may have TREE_TYPE that is a variant
12948 of TREE_TYPE of their main variants.
12949 - aggregates may have new TYPE_FIELDS list that list variants of
12950 the main variant TYPE_FIELDS.
12951 - vector types may differ by TYPE_VECTOR_OPAQUE
12952 */
12953
12954 /* Convenience macro for matching individual fields. */
12955 #define verify_variant_match(flag) \
12956 do { \
12957 if (flag (tv) != flag (t)) \
12958 { \
12959 error ("type variant differs by %s", #flag); \
12960 debug_tree (tv); \
12961 return false; \
12962 } \
12963 } while (false)
12964
12965 /* tree_base checks. */
12966
12967 verify_variant_match (TREE_CODE);
12968 /* FIXME: Ada builds non-artificial variants of artificial types. */
12969 #if 0
12970 if (TYPE_ARTIFICIAL (tv))
12971 verify_variant_match (TYPE_ARTIFICIAL);
12972 #endif
12973 if (POINTER_TYPE_P (tv))
12974 verify_variant_match (TYPE_REF_CAN_ALIAS_ALL);
12975 /* FIXME: TYPE_SIZES_GIMPLIFIED may differs for Ada build. */
12976 verify_variant_match (TYPE_UNSIGNED);
12977 verify_variant_match (TYPE_PACKED);
12978 if (TREE_CODE (t) == REFERENCE_TYPE)
12979 verify_variant_match (TYPE_REF_IS_RVALUE);
12980 if (AGGREGATE_TYPE_P (t))
12981 verify_variant_match (TYPE_REVERSE_STORAGE_ORDER);
12982 else
12983 verify_variant_match (TYPE_SATURATING);
12984 /* FIXME: This check trigger during libstdc++ build. */
12985 #if 0
12986 if (RECORD_OR_UNION_TYPE_P (t) && COMPLETE_TYPE_P (t))
12987 verify_variant_match (TYPE_FINAL_P);
12988 #endif
12989
12990 /* tree_type_common checks. */
12991
12992 if (COMPLETE_TYPE_P (t))
12993 {
12994 verify_variant_match (TYPE_MODE);
12995 if (TREE_CODE (TYPE_SIZE (t)) != PLACEHOLDER_EXPR
12996 && TREE_CODE (TYPE_SIZE (tv)) != PLACEHOLDER_EXPR)
12997 verify_variant_match (TYPE_SIZE);
12998 if (TREE_CODE (TYPE_SIZE_UNIT (t)) != PLACEHOLDER_EXPR
12999 && TREE_CODE (TYPE_SIZE_UNIT (tv)) != PLACEHOLDER_EXPR
13000 && TYPE_SIZE_UNIT (t) != TYPE_SIZE_UNIT (tv))
13001 {
13002 gcc_assert (!operand_equal_p (TYPE_SIZE_UNIT (t),
13003 TYPE_SIZE_UNIT (tv), 0));
13004 error ("type variant has different %<TYPE_SIZE_UNIT%>");
13005 debug_tree (tv);
13006 error ("type variant%'s %<TYPE_SIZE_UNIT%>");
13007 debug_tree (TYPE_SIZE_UNIT (tv));
13008 error ("type%'s %<TYPE_SIZE_UNIT%>");
13009 debug_tree (TYPE_SIZE_UNIT (t));
13010 return false;
13011 }
13012 verify_variant_match (TYPE_NEEDS_CONSTRUCTING);
13013 }
13014 verify_variant_match (TYPE_PRECISION);
13015 if (RECORD_OR_UNION_TYPE_P (t))
13016 verify_variant_match (TYPE_TRANSPARENT_AGGR);
13017 else if (TREE_CODE (t) == ARRAY_TYPE)
13018 verify_variant_match (TYPE_NONALIASED_COMPONENT);
13019 /* During LTO we merge variant lists from diferent translation units
13020 that may differ BY TYPE_CONTEXT that in turn may point
13021 to TRANSLATION_UNIT_DECL.
13022 Ada also builds variants of types with different TYPE_CONTEXT. */
13023 #if 0
13024 if (!in_lto_p || !TYPE_FILE_SCOPE_P (t))
13025 verify_variant_match (TYPE_CONTEXT);
13026 #endif
13027 if (TREE_CODE (t) == ARRAY_TYPE || TREE_CODE (t) == INTEGER_TYPE)
13028 verify_variant_match (TYPE_STRING_FLAG);
13029 if (TREE_CODE (t) == RECORD_TYPE || TREE_CODE (t) == UNION_TYPE)
13030 verify_variant_match (TYPE_CXX_ODR_P);
13031 if (TYPE_ALIAS_SET_KNOWN_P (t))
13032 {
13033 error ("type variant with %<TYPE_ALIAS_SET_KNOWN_P%>");
13034 debug_tree (tv);
13035 return false;
13036 }
13037
13038 /* tree_type_non_common checks. */
13039
13040 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
13041 and dangle the pointer from time to time. */
13042 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_VFIELD (t) != TYPE_VFIELD (tv)
13043 && (in_lto_p || !TYPE_VFIELD (tv)
13044 || TREE_CODE (TYPE_VFIELD (tv)) != TREE_LIST))
13045 {
13046 error ("type variant has different %<TYPE_VFIELD%>");
13047 debug_tree (tv);
13048 return false;
13049 }
13050 if ((TREE_CODE (t) == ENUMERAL_TYPE && COMPLETE_TYPE_P (t))
13051 || TREE_CODE (t) == INTEGER_TYPE
13052 || TREE_CODE (t) == BOOLEAN_TYPE
13053 || TREE_CODE (t) == REAL_TYPE
13054 || TREE_CODE (t) == FIXED_POINT_TYPE)
13055 {
13056 verify_variant_match (TYPE_MAX_VALUE);
13057 verify_variant_match (TYPE_MIN_VALUE);
13058 }
13059 if (TREE_CODE (t) == METHOD_TYPE)
13060 verify_variant_match (TYPE_METHOD_BASETYPE);
13061 if (TREE_CODE (t) == OFFSET_TYPE)
13062 verify_variant_match (TYPE_OFFSET_BASETYPE);
13063 if (TREE_CODE (t) == ARRAY_TYPE)
13064 verify_variant_match (TYPE_ARRAY_MAX_SIZE);
13065 /* FIXME: Be lax and allow TYPE_BINFO to be missing in variant types
13066 or even type's main variant. This is needed to make bootstrap pass
13067 and the bug seems new in GCC 5.
13068 C++ FE should be updated to make this consistent and we should check
13069 that TYPE_BINFO is always NULL for !COMPLETE_TYPE_P and otherwise there
13070 is a match with main variant.
13071
13072 Also disable the check for Java for now because of parser hack that builds
13073 first an dummy BINFO and then sometimes replace it by real BINFO in some
13074 of the copies. */
13075 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t) && TYPE_BINFO (tv)
13076 && TYPE_BINFO (t) != TYPE_BINFO (tv)
13077 /* FIXME: Java sometimes keep dump TYPE_BINFOs on variant types.
13078 Since there is no cheap way to tell C++/Java type w/o LTO, do checking
13079 at LTO time only. */
13080 && (in_lto_p && odr_type_p (t)))
13081 {
13082 error ("type variant has different %<TYPE_BINFO%>");
13083 debug_tree (tv);
13084 error ("type variant%'s %<TYPE_BINFO%>");
13085 debug_tree (TYPE_BINFO (tv));
13086 error ("type%'s %<TYPE_BINFO%>");
13087 debug_tree (TYPE_BINFO (t));
13088 return false;
13089 }
13090
13091 /* Check various uses of TYPE_VALUES_RAW. */
13092 if (TREE_CODE (t) == ENUMERAL_TYPE
13093 && TYPE_VALUES (t))
13094 verify_variant_match (TYPE_VALUES);
13095 else if (TREE_CODE (t) == ARRAY_TYPE)
13096 verify_variant_match (TYPE_DOMAIN);
13097 /* Permit incomplete variants of complete type. While FEs may complete
13098 all variants, this does not happen for C++ templates in all cases. */
13099 else if (RECORD_OR_UNION_TYPE_P (t)
13100 && COMPLETE_TYPE_P (t)
13101 && TYPE_FIELDS (t) != TYPE_FIELDS (tv))
13102 {
13103 tree f1, f2;
13104
13105 /* Fortran builds qualified variants as new records with items of
13106 qualified type. Verify that they looks same. */
13107 for (f1 = TYPE_FIELDS (t), f2 = TYPE_FIELDS (tv);
13108 f1 && f2;
13109 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
13110 if (TREE_CODE (f1) != FIELD_DECL || TREE_CODE (f2) != FIELD_DECL
13111 || (TYPE_MAIN_VARIANT (TREE_TYPE (f1))
13112 != TYPE_MAIN_VARIANT (TREE_TYPE (f2))
13113 /* FIXME: gfc_nonrestricted_type builds all types as variants
13114 with exception of pointer types. It deeply copies the type
13115 which means that we may end up with a variant type
13116 referring non-variant pointer. We may change it to
13117 produce types as variants, too, like
13118 objc_get_protocol_qualified_type does. */
13119 && !POINTER_TYPE_P (TREE_TYPE (f1)))
13120 || DECL_FIELD_OFFSET (f1) != DECL_FIELD_OFFSET (f2)
13121 || DECL_FIELD_BIT_OFFSET (f1) != DECL_FIELD_BIT_OFFSET (f2))
13122 break;
13123 if (f1 || f2)
13124 {
13125 error ("type variant has different %<TYPE_FIELDS%>");
13126 debug_tree (tv);
13127 error ("first mismatch is field");
13128 debug_tree (f1);
13129 error ("and field");
13130 debug_tree (f2);
13131 return false;
13132 }
13133 }
13134 else if ((TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE))
13135 verify_variant_match (TYPE_ARG_TYPES);
13136 /* For C++ the qualified variant of array type is really an array type
13137 of qualified TREE_TYPE.
13138 objc builds variants of pointer where pointer to type is a variant, too
13139 in objc_get_protocol_qualified_type. */
13140 if (TREE_TYPE (t) != TREE_TYPE (tv)
13141 && ((TREE_CODE (t) != ARRAY_TYPE
13142 && !POINTER_TYPE_P (t))
13143 || TYPE_MAIN_VARIANT (TREE_TYPE (t))
13144 != TYPE_MAIN_VARIANT (TREE_TYPE (tv))))
13145 {
13146 error ("type variant has different %<TREE_TYPE%>");
13147 debug_tree (tv);
13148 error ("type variant%'s %<TREE_TYPE%>");
13149 debug_tree (TREE_TYPE (tv));
13150 error ("type%'s %<TREE_TYPE%>");
13151 debug_tree (TREE_TYPE (t));
13152 return false;
13153 }
13154 if (type_with_alias_set_p (t)
13155 && !gimple_canonical_types_compatible_p (t, tv, false))
13156 {
13157 error ("type is not compatible with its variant");
13158 debug_tree (tv);
13159 error ("type variant%'s %<TREE_TYPE%>");
13160 debug_tree (TREE_TYPE (tv));
13161 error ("type%'s %<TREE_TYPE%>");
13162 debug_tree (TREE_TYPE (t));
13163 return false;
13164 }
13165 return true;
13166 #undef verify_variant_match
13167 }
13168
13169
13170 /* The TYPE_CANONICAL merging machinery. It should closely resemble
13171 the middle-end types_compatible_p function. It needs to avoid
13172 claiming types are different for types that should be treated
13173 the same with respect to TBAA. Canonical types are also used
13174 for IL consistency checks via the useless_type_conversion_p
13175 predicate which does not handle all type kinds itself but falls
13176 back to pointer-comparison of TYPE_CANONICAL for aggregates
13177 for example. */
13178
13179 /* Return true if TYPE_UNSIGNED of TYPE should be ignored for canonical
13180 type calculation because we need to allow inter-operability between signed
13181 and unsigned variants. */
13182
13183 bool
13184 type_with_interoperable_signedness (const_tree type)
13185 {
13186 /* Fortran standard require C_SIGNED_CHAR to be interoperable with both
13187 signed char and unsigned char. Similarly fortran FE builds
13188 C_SIZE_T as signed type, while C defines it unsigned. */
13189
13190 return tree_code_for_canonical_type_merging (TREE_CODE (type))
13191 == INTEGER_TYPE
13192 && (TYPE_PRECISION (type) == TYPE_PRECISION (signed_char_type_node)
13193 || TYPE_PRECISION (type) == TYPE_PRECISION (size_type_node));
13194 }
13195
13196 /* Return true iff T1 and T2 are structurally identical for what
13197 TBAA is concerned.
13198 This function is used both by lto.c canonical type merging and by the
13199 verifier. If TRUST_TYPE_CANONICAL we do not look into structure of types
13200 that have TYPE_CANONICAL defined and assume them equivalent. This is useful
13201 only for LTO because only in these cases TYPE_CANONICAL equivalence
13202 correspond to one defined by gimple_canonical_types_compatible_p. */
13203
13204 bool
13205 gimple_canonical_types_compatible_p (const_tree t1, const_tree t2,
13206 bool trust_type_canonical)
13207 {
13208 /* Type variants should be same as the main variant. When not doing sanity
13209 checking to verify this fact, go to main variants and save some work. */
13210 if (trust_type_canonical)
13211 {
13212 t1 = TYPE_MAIN_VARIANT (t1);
13213 t2 = TYPE_MAIN_VARIANT (t2);
13214 }
13215
13216 /* Check first for the obvious case of pointer identity. */
13217 if (t1 == t2)
13218 return true;
13219
13220 /* Check that we have two types to compare. */
13221 if (t1 == NULL_TREE || t2 == NULL_TREE)
13222 return false;
13223
13224 /* We consider complete types always compatible with incomplete type.
13225 This does not make sense for canonical type calculation and thus we
13226 need to ensure that we are never called on it.
13227
13228 FIXME: For more correctness the function probably should have three modes
13229 1) mode assuming that types are complete mathcing their structure
13230 2) mode allowing incomplete types but producing equivalence classes
13231 and thus ignoring all info from complete types
13232 3) mode allowing incomplete types to match complete but checking
13233 compatibility between complete types.
13234
13235 1 and 2 can be used for canonical type calculation. 3 is the real
13236 definition of type compatibility that can be used i.e. for warnings during
13237 declaration merging. */
13238
13239 gcc_assert (!trust_type_canonical
13240 || (type_with_alias_set_p (t1) && type_with_alias_set_p (t2)));
13241
13242 /* If the types have been previously registered and found equal
13243 they still are. */
13244
13245 if (TYPE_CANONICAL (t1) && TYPE_CANONICAL (t2)
13246 && trust_type_canonical)
13247 {
13248 /* Do not use TYPE_CANONICAL of pointer types. For LTO streamed types
13249 they are always NULL, but they are set to non-NULL for types
13250 constructed by build_pointer_type and variants. In this case the
13251 TYPE_CANONICAL is more fine grained than the equivalnce we test (where
13252 all pointers are considered equal. Be sure to not return false
13253 negatives. */
13254 gcc_checking_assert (canonical_type_used_p (t1)
13255 && canonical_type_used_p (t2));
13256 return TYPE_CANONICAL (t1) == TYPE_CANONICAL (t2);
13257 }
13258
13259 /* For types where we do ODR based TBAA the canonical type is always
13260 set correctly, so we know that types are different if their
13261 canonical types does not match. */
13262 if (trust_type_canonical
13263 && (odr_type_p (t1) && odr_based_tbaa_p (t1))
13264 != (odr_type_p (t2) && odr_based_tbaa_p (t2)))
13265 return false;
13266
13267 /* Can't be the same type if the types don't have the same code. */
13268 enum tree_code code = tree_code_for_canonical_type_merging (TREE_CODE (t1));
13269 if (code != tree_code_for_canonical_type_merging (TREE_CODE (t2)))
13270 return false;
13271
13272 /* Qualifiers do not matter for canonical type comparison purposes. */
13273
13274 /* Void types and nullptr types are always the same. */
13275 if (TREE_CODE (t1) == VOID_TYPE
13276 || TREE_CODE (t1) == NULLPTR_TYPE)
13277 return true;
13278
13279 /* Can't be the same type if they have different mode. */
13280 if (TYPE_MODE (t1) != TYPE_MODE (t2))
13281 return false;
13282
13283 /* Non-aggregate types can be handled cheaply. */
13284 if (INTEGRAL_TYPE_P (t1)
13285 || SCALAR_FLOAT_TYPE_P (t1)
13286 || FIXED_POINT_TYPE_P (t1)
13287 || TREE_CODE (t1) == VECTOR_TYPE
13288 || TREE_CODE (t1) == COMPLEX_TYPE
13289 || TREE_CODE (t1) == OFFSET_TYPE
13290 || POINTER_TYPE_P (t1))
13291 {
13292 /* Can't be the same type if they have different recision. */
13293 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2))
13294 return false;
13295
13296 /* In some cases the signed and unsigned types are required to be
13297 inter-operable. */
13298 if (TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2)
13299 && !type_with_interoperable_signedness (t1))
13300 return false;
13301
13302 /* Fortran's C_SIGNED_CHAR is !TYPE_STRING_FLAG but needs to be
13303 interoperable with "signed char". Unless all frontends are revisited
13304 to agree on these types, we must ignore the flag completely. */
13305
13306 /* Fortran standard define C_PTR type that is compatible with every
13307 C pointer. For this reason we need to glob all pointers into one.
13308 Still pointers in different address spaces are not compatible. */
13309 if (POINTER_TYPE_P (t1))
13310 {
13311 if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
13312 != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
13313 return false;
13314 }
13315
13316 /* Tail-recurse to components. */
13317 if (TREE_CODE (t1) == VECTOR_TYPE
13318 || TREE_CODE (t1) == COMPLEX_TYPE)
13319 return gimple_canonical_types_compatible_p (TREE_TYPE (t1),
13320 TREE_TYPE (t2),
13321 trust_type_canonical);
13322
13323 return true;
13324 }
13325
13326 /* Do type-specific comparisons. */
13327 switch (TREE_CODE (t1))
13328 {
13329 case ARRAY_TYPE:
13330 /* Array types are the same if the element types are the same and
13331 the number of elements are the same. */
13332 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
13333 trust_type_canonical)
13334 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
13335 || TYPE_REVERSE_STORAGE_ORDER (t1) != TYPE_REVERSE_STORAGE_ORDER (t2)
13336 || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
13337 return false;
13338 else
13339 {
13340 tree i1 = TYPE_DOMAIN (t1);
13341 tree i2 = TYPE_DOMAIN (t2);
13342
13343 /* For an incomplete external array, the type domain can be
13344 NULL_TREE. Check this condition also. */
13345 if (i1 == NULL_TREE && i2 == NULL_TREE)
13346 return true;
13347 else if (i1 == NULL_TREE || i2 == NULL_TREE)
13348 return false;
13349 else
13350 {
13351 tree min1 = TYPE_MIN_VALUE (i1);
13352 tree min2 = TYPE_MIN_VALUE (i2);
13353 tree max1 = TYPE_MAX_VALUE (i1);
13354 tree max2 = TYPE_MAX_VALUE (i2);
13355
13356 /* The minimum/maximum values have to be the same. */
13357 if ((min1 == min2
13358 || (min1 && min2
13359 && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
13360 && TREE_CODE (min2) == PLACEHOLDER_EXPR)
13361 || operand_equal_p (min1, min2, 0))))
13362 && (max1 == max2
13363 || (max1 && max2
13364 && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
13365 && TREE_CODE (max2) == PLACEHOLDER_EXPR)
13366 || operand_equal_p (max1, max2, 0)))))
13367 return true;
13368 else
13369 return false;
13370 }
13371 }
13372
13373 case METHOD_TYPE:
13374 case FUNCTION_TYPE:
13375 /* Function types are the same if the return type and arguments types
13376 are the same. */
13377 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
13378 trust_type_canonical))
13379 return false;
13380
13381 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
13382 return true;
13383 else
13384 {
13385 tree parms1, parms2;
13386
13387 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
13388 parms1 && parms2;
13389 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
13390 {
13391 if (!gimple_canonical_types_compatible_p
13392 (TREE_VALUE (parms1), TREE_VALUE (parms2),
13393 trust_type_canonical))
13394 return false;
13395 }
13396
13397 if (parms1 || parms2)
13398 return false;
13399
13400 return true;
13401 }
13402
13403 case RECORD_TYPE:
13404 case UNION_TYPE:
13405 case QUAL_UNION_TYPE:
13406 {
13407 tree f1, f2;
13408
13409 /* Don't try to compare variants of an incomplete type, before
13410 TYPE_FIELDS has been copied around. */
13411 if (!COMPLETE_TYPE_P (t1) && !COMPLETE_TYPE_P (t2))
13412 return true;
13413
13414
13415 if (TYPE_REVERSE_STORAGE_ORDER (t1) != TYPE_REVERSE_STORAGE_ORDER (t2))
13416 return false;
13417
13418 /* For aggregate types, all the fields must be the same. */
13419 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
13420 f1 || f2;
13421 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
13422 {
13423 /* Skip non-fields and zero-sized fields. */
13424 while (f1 && (TREE_CODE (f1) != FIELD_DECL
13425 || (DECL_SIZE (f1)
13426 && integer_zerop (DECL_SIZE (f1)))))
13427 f1 = TREE_CHAIN (f1);
13428 while (f2 && (TREE_CODE (f2) != FIELD_DECL
13429 || (DECL_SIZE (f2)
13430 && integer_zerop (DECL_SIZE (f2)))))
13431 f2 = TREE_CHAIN (f2);
13432 if (!f1 || !f2)
13433 break;
13434 /* The fields must have the same name, offset and type. */
13435 if (DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
13436 || !gimple_compare_field_offset (f1, f2)
13437 || !gimple_canonical_types_compatible_p
13438 (TREE_TYPE (f1), TREE_TYPE (f2),
13439 trust_type_canonical))
13440 return false;
13441 }
13442
13443 /* If one aggregate has more fields than the other, they
13444 are not the same. */
13445 if (f1 || f2)
13446 return false;
13447
13448 return true;
13449 }
13450
13451 default:
13452 /* Consider all types with language specific trees in them mutually
13453 compatible. This is executed only from verify_type and false
13454 positives can be tolerated. */
13455 gcc_assert (!in_lto_p);
13456 return true;
13457 }
13458 }
13459
13460 /* Verify type T. */
13461
13462 void
13463 verify_type (const_tree t)
13464 {
13465 bool error_found = false;
13466 tree mv = TYPE_MAIN_VARIANT (t);
13467 if (!mv)
13468 {
13469 error ("main variant is not defined");
13470 error_found = true;
13471 }
13472 else if (mv != TYPE_MAIN_VARIANT (mv))
13473 {
13474 error ("%<TYPE_MAIN_VARIANT%> has different %<TYPE_MAIN_VARIANT%>");
13475 debug_tree (mv);
13476 error_found = true;
13477 }
13478 else if (t != mv && !verify_type_variant (t, mv))
13479 error_found = true;
13480
13481 tree ct = TYPE_CANONICAL (t);
13482 if (!ct)
13483 ;
13484 else if (TYPE_CANONICAL (t) != ct)
13485 {
13486 error ("%<TYPE_CANONICAL%> has different %<TYPE_CANONICAL%>");
13487 debug_tree (ct);
13488 error_found = true;
13489 }
13490 /* Method and function types cannot be used to address memory and thus
13491 TYPE_CANONICAL really matters only for determining useless conversions.
13492
13493 FIXME: C++ FE produce declarations of builtin functions that are not
13494 compatible with main variants. */
13495 else if (TREE_CODE (t) == FUNCTION_TYPE)
13496 ;
13497 else if (t != ct
13498 /* FIXME: gimple_canonical_types_compatible_p cannot compare types
13499 with variably sized arrays because their sizes possibly
13500 gimplified to different variables. */
13501 && !variably_modified_type_p (ct, NULL)
13502 && !gimple_canonical_types_compatible_p (t, ct, false)
13503 && COMPLETE_TYPE_P (t))
13504 {
13505 error ("%<TYPE_CANONICAL%> is not compatible");
13506 debug_tree (ct);
13507 error_found = true;
13508 }
13509
13510 if (COMPLETE_TYPE_P (t) && TYPE_CANONICAL (t)
13511 && TYPE_MODE (t) != TYPE_MODE (TYPE_CANONICAL (t)))
13512 {
13513 error ("%<TYPE_MODE%> of %<TYPE_CANONICAL%> is not compatible");
13514 debug_tree (ct);
13515 error_found = true;
13516 }
13517 if (TYPE_MAIN_VARIANT (t) == t && ct && TYPE_MAIN_VARIANT (ct) != ct)
13518 {
13519 error ("%<TYPE_CANONICAL%> of main variant is not main variant");
13520 debug_tree (ct);
13521 debug_tree (TYPE_MAIN_VARIANT (ct));
13522 error_found = true;
13523 }
13524
13525
13526 /* Check various uses of TYPE_MIN_VALUE_RAW. */
13527 if (RECORD_OR_UNION_TYPE_P (t))
13528 {
13529 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
13530 and danagle the pointer from time to time. */
13531 if (TYPE_VFIELD (t)
13532 && TREE_CODE (TYPE_VFIELD (t)) != FIELD_DECL
13533 && TREE_CODE (TYPE_VFIELD (t)) != TREE_LIST)
13534 {
13535 error ("%<TYPE_VFIELD%> is not %<FIELD_DECL%> nor %<TREE_LIST%>");
13536 debug_tree (TYPE_VFIELD (t));
13537 error_found = true;
13538 }
13539 }
13540 else if (TREE_CODE (t) == POINTER_TYPE)
13541 {
13542 if (TYPE_NEXT_PTR_TO (t)
13543 && TREE_CODE (TYPE_NEXT_PTR_TO (t)) != POINTER_TYPE)
13544 {
13545 error ("%<TYPE_NEXT_PTR_TO%> is not %<POINTER_TYPE%>");
13546 debug_tree (TYPE_NEXT_PTR_TO (t));
13547 error_found = true;
13548 }
13549 }
13550 else if (TREE_CODE (t) == REFERENCE_TYPE)
13551 {
13552 if (TYPE_NEXT_REF_TO (t)
13553 && TREE_CODE (TYPE_NEXT_REF_TO (t)) != REFERENCE_TYPE)
13554 {
13555 error ("%<TYPE_NEXT_REF_TO%> is not %<REFERENCE_TYPE%>");
13556 debug_tree (TYPE_NEXT_REF_TO (t));
13557 error_found = true;
13558 }
13559 }
13560 else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
13561 || TREE_CODE (t) == FIXED_POINT_TYPE)
13562 {
13563 /* FIXME: The following check should pass:
13564 useless_type_conversion_p (const_cast <tree> (t),
13565 TREE_TYPE (TYPE_MIN_VALUE (t))
13566 but does not for C sizetypes in LTO. */
13567 }
13568
13569 /* Check various uses of TYPE_MAXVAL_RAW. */
13570 if (RECORD_OR_UNION_TYPE_P (t))
13571 {
13572 if (!TYPE_BINFO (t))
13573 ;
13574 else if (TREE_CODE (TYPE_BINFO (t)) != TREE_BINFO)
13575 {
13576 error ("%<TYPE_BINFO%> is not %<TREE_BINFO%>");
13577 debug_tree (TYPE_BINFO (t));
13578 error_found = true;
13579 }
13580 else if (TREE_TYPE (TYPE_BINFO (t)) != TYPE_MAIN_VARIANT (t))
13581 {
13582 error ("%<TYPE_BINFO%> type is not %<TYPE_MAIN_VARIANT%>");
13583 debug_tree (TREE_TYPE (TYPE_BINFO (t)));
13584 error_found = true;
13585 }
13586 }
13587 else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
13588 {
13589 if (TYPE_METHOD_BASETYPE (t)
13590 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != RECORD_TYPE
13591 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != UNION_TYPE)
13592 {
13593 error ("%<TYPE_METHOD_BASETYPE%> is not record nor union");
13594 debug_tree (TYPE_METHOD_BASETYPE (t));
13595 error_found = true;
13596 }
13597 }
13598 else if (TREE_CODE (t) == OFFSET_TYPE)
13599 {
13600 if (TYPE_OFFSET_BASETYPE (t)
13601 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != RECORD_TYPE
13602 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != UNION_TYPE)
13603 {
13604 error ("%<TYPE_OFFSET_BASETYPE%> is not record nor union");
13605 debug_tree (TYPE_OFFSET_BASETYPE (t));
13606 error_found = true;
13607 }
13608 }
13609 else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
13610 || TREE_CODE (t) == FIXED_POINT_TYPE)
13611 {
13612 /* FIXME: The following check should pass:
13613 useless_type_conversion_p (const_cast <tree> (t),
13614 TREE_TYPE (TYPE_MAX_VALUE (t))
13615 but does not for C sizetypes in LTO. */
13616 }
13617 else if (TREE_CODE (t) == ARRAY_TYPE)
13618 {
13619 if (TYPE_ARRAY_MAX_SIZE (t)
13620 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (t)) != INTEGER_CST)
13621 {
13622 error ("%<TYPE_ARRAY_MAX_SIZE%> not %<INTEGER_CST%>");
13623 debug_tree (TYPE_ARRAY_MAX_SIZE (t));
13624 error_found = true;
13625 }
13626 }
13627 else if (TYPE_MAX_VALUE_RAW (t))
13628 {
13629 error ("%<TYPE_MAX_VALUE_RAW%> non-NULL");
13630 debug_tree (TYPE_MAX_VALUE_RAW (t));
13631 error_found = true;
13632 }
13633
13634 if (TYPE_LANG_SLOT_1 (t) && in_lto_p)
13635 {
13636 error ("%<TYPE_LANG_SLOT_1 (binfo)%> field is non-NULL");
13637 debug_tree (TYPE_LANG_SLOT_1 (t));
13638 error_found = true;
13639 }
13640
13641 /* Check various uses of TYPE_VALUES_RAW. */
13642 if (TREE_CODE (t) == ENUMERAL_TYPE)
13643 for (tree l = TYPE_VALUES (t); l; l = TREE_CHAIN (l))
13644 {
13645 tree value = TREE_VALUE (l);
13646 tree name = TREE_PURPOSE (l);
13647
13648 /* C FE porduce INTEGER_CST of INTEGER_TYPE, while C++ FE uses
13649 CONST_DECL of ENUMERAL TYPE. */
13650 if (TREE_CODE (value) != INTEGER_CST && TREE_CODE (value) != CONST_DECL)
13651 {
13652 error ("enum value is not %<CONST_DECL%> or %<INTEGER_CST%>");
13653 debug_tree (value);
13654 debug_tree (name);
13655 error_found = true;
13656 }
13657 if (TREE_CODE (TREE_TYPE (value)) != INTEGER_TYPE
13658 && !useless_type_conversion_p (const_cast <tree> (t), TREE_TYPE (value)))
13659 {
13660 error ("enum value type is not %<INTEGER_TYPE%> nor convertible "
13661 "to the enum");
13662 debug_tree (value);
13663 debug_tree (name);
13664 error_found = true;
13665 }
13666 if (TREE_CODE (name) != IDENTIFIER_NODE)
13667 {
13668 error ("enum value name is not %<IDENTIFIER_NODE%>");
13669 debug_tree (value);
13670 debug_tree (name);
13671 error_found = true;
13672 }
13673 }
13674 else if (TREE_CODE (t) == ARRAY_TYPE)
13675 {
13676 if (TYPE_DOMAIN (t) && TREE_CODE (TYPE_DOMAIN (t)) != INTEGER_TYPE)
13677 {
13678 error ("array %<TYPE_DOMAIN%> is not integer type");
13679 debug_tree (TYPE_DOMAIN (t));
13680 error_found = true;
13681 }
13682 }
13683 else if (RECORD_OR_UNION_TYPE_P (t))
13684 {
13685 if (TYPE_FIELDS (t) && !COMPLETE_TYPE_P (t) && in_lto_p)
13686 {
13687 error ("%<TYPE_FIELDS%> defined in incomplete type");
13688 error_found = true;
13689 }
13690 for (tree fld = TYPE_FIELDS (t); fld; fld = TREE_CHAIN (fld))
13691 {
13692 /* TODO: verify properties of decls. */
13693 if (TREE_CODE (fld) == FIELD_DECL)
13694 ;
13695 else if (TREE_CODE (fld) == TYPE_DECL)
13696 ;
13697 else if (TREE_CODE (fld) == CONST_DECL)
13698 ;
13699 else if (VAR_P (fld))
13700 ;
13701 else if (TREE_CODE (fld) == TEMPLATE_DECL)
13702 ;
13703 else if (TREE_CODE (fld) == USING_DECL)
13704 ;
13705 else if (TREE_CODE (fld) == FUNCTION_DECL)
13706 ;
13707 else
13708 {
13709 error ("wrong tree in %<TYPE_FIELDS%> list");
13710 debug_tree (fld);
13711 error_found = true;
13712 }
13713 }
13714 }
13715 else if (TREE_CODE (t) == INTEGER_TYPE
13716 || TREE_CODE (t) == BOOLEAN_TYPE
13717 || TREE_CODE (t) == OFFSET_TYPE
13718 || TREE_CODE (t) == REFERENCE_TYPE
13719 || TREE_CODE (t) == NULLPTR_TYPE
13720 || TREE_CODE (t) == POINTER_TYPE)
13721 {
13722 if (TYPE_CACHED_VALUES_P (t) != (TYPE_CACHED_VALUES (t) != NULL))
13723 {
13724 error ("%<TYPE_CACHED_VALUES_P%> is %i while %<TYPE_CACHED_VALUES%> "
13725 "is %p",
13726 TYPE_CACHED_VALUES_P (t), (void *)TYPE_CACHED_VALUES (t));
13727 error_found = true;
13728 }
13729 else if (TYPE_CACHED_VALUES_P (t) && TREE_CODE (TYPE_CACHED_VALUES (t)) != TREE_VEC)
13730 {
13731 error ("%<TYPE_CACHED_VALUES%> is not %<TREE_VEC%>");
13732 debug_tree (TYPE_CACHED_VALUES (t));
13733 error_found = true;
13734 }
13735 /* Verify just enough of cache to ensure that no one copied it to new type.
13736 All copying should go by copy_node that should clear it. */
13737 else if (TYPE_CACHED_VALUES_P (t))
13738 {
13739 int i;
13740 for (i = 0; i < TREE_VEC_LENGTH (TYPE_CACHED_VALUES (t)); i++)
13741 if (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)
13742 && TREE_TYPE (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)) != t)
13743 {
13744 error ("wrong %<TYPE_CACHED_VALUES%> entry");
13745 debug_tree (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i));
13746 error_found = true;
13747 break;
13748 }
13749 }
13750 }
13751 else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
13752 for (tree l = TYPE_ARG_TYPES (t); l; l = TREE_CHAIN (l))
13753 {
13754 /* C++ FE uses TREE_PURPOSE to store initial values. */
13755 if (TREE_PURPOSE (l) && in_lto_p)
13756 {
13757 error ("%<TREE_PURPOSE%> is non-NULL in %<TYPE_ARG_TYPES%> list");
13758 debug_tree (l);
13759 error_found = true;
13760 }
13761 if (!TYPE_P (TREE_VALUE (l)))
13762 {
13763 error ("wrong entry in %<TYPE_ARG_TYPES%> list");
13764 debug_tree (l);
13765 error_found = true;
13766 }
13767 }
13768 else if (!is_lang_specific (t) && TYPE_VALUES_RAW (t))
13769 {
13770 error ("%<TYPE_VALUES_RAW%> field is non-NULL");
13771 debug_tree (TYPE_VALUES_RAW (t));
13772 error_found = true;
13773 }
13774 if (TREE_CODE (t) != INTEGER_TYPE
13775 && TREE_CODE (t) != BOOLEAN_TYPE
13776 && TREE_CODE (t) != OFFSET_TYPE
13777 && TREE_CODE (t) != REFERENCE_TYPE
13778 && TREE_CODE (t) != NULLPTR_TYPE
13779 && TREE_CODE (t) != POINTER_TYPE
13780 && TYPE_CACHED_VALUES_P (t))
13781 {
13782 error ("%<TYPE_CACHED_VALUES_P%> is set while it should not be");
13783 error_found = true;
13784 }
13785
13786 /* ipa-devirt makes an assumption that TYPE_METHOD_BASETYPE is always
13787 TYPE_MAIN_VARIANT and it would be odd to add methods only to variatns
13788 of a type. */
13789 if (TREE_CODE (t) == METHOD_TYPE
13790 && TYPE_MAIN_VARIANT (TYPE_METHOD_BASETYPE (t)) != TYPE_METHOD_BASETYPE (t))
13791 {
13792 error ("%<TYPE_METHOD_BASETYPE%> is not main variant");
13793 error_found = true;
13794 }
13795
13796 if (error_found)
13797 {
13798 debug_tree (const_cast <tree> (t));
13799 internal_error ("%qs failed", __func__);
13800 }
13801 }
13802
13803
13804 /* Return 1 if ARG interpreted as signed in its precision is known to be
13805 always positive or 2 if ARG is known to be always negative, or 3 if
13806 ARG may be positive or negative. */
13807
13808 int
13809 get_range_pos_neg (tree arg)
13810 {
13811 if (arg == error_mark_node)
13812 return 3;
13813
13814 int prec = TYPE_PRECISION (TREE_TYPE (arg));
13815 int cnt = 0;
13816 if (TREE_CODE (arg) == INTEGER_CST)
13817 {
13818 wide_int w = wi::sext (wi::to_wide (arg), prec);
13819 if (wi::neg_p (w))
13820 return 2;
13821 else
13822 return 1;
13823 }
13824 while (CONVERT_EXPR_P (arg)
13825 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
13826 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg, 0))) <= prec)
13827 {
13828 arg = TREE_OPERAND (arg, 0);
13829 /* Narrower value zero extended into wider type
13830 will always result in positive values. */
13831 if (TYPE_UNSIGNED (TREE_TYPE (arg))
13832 && TYPE_PRECISION (TREE_TYPE (arg)) < prec)
13833 return 1;
13834 prec = TYPE_PRECISION (TREE_TYPE (arg));
13835 if (++cnt > 30)
13836 return 3;
13837 }
13838
13839 if (TREE_CODE (arg) != SSA_NAME)
13840 return 3;
13841 value_range r;
13842 while (!get_global_range_query ()->range_of_expr (r, arg) || r.kind () != VR_RANGE)
13843 {
13844 gimple *g = SSA_NAME_DEF_STMT (arg);
13845 if (is_gimple_assign (g)
13846 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (g)))
13847 {
13848 tree t = gimple_assign_rhs1 (g);
13849 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
13850 && TYPE_PRECISION (TREE_TYPE (t)) <= prec)
13851 {
13852 if (TYPE_UNSIGNED (TREE_TYPE (t))
13853 && TYPE_PRECISION (TREE_TYPE (t)) < prec)
13854 return 1;
13855 prec = TYPE_PRECISION (TREE_TYPE (t));
13856 arg = t;
13857 if (++cnt > 30)
13858 return 3;
13859 continue;
13860 }
13861 }
13862 return 3;
13863 }
13864 if (TYPE_UNSIGNED (TREE_TYPE (arg)))
13865 {
13866 /* For unsigned values, the "positive" range comes
13867 below the "negative" range. */
13868 if (!wi::neg_p (wi::sext (r.upper_bound (), prec), SIGNED))
13869 return 1;
13870 if (wi::neg_p (wi::sext (r.lower_bound (), prec), SIGNED))
13871 return 2;
13872 }
13873 else
13874 {
13875 if (!wi::neg_p (wi::sext (r.lower_bound (), prec), SIGNED))
13876 return 1;
13877 if (wi::neg_p (wi::sext (r.upper_bound (), prec), SIGNED))
13878 return 2;
13879 }
13880 return 3;
13881 }
13882
13883
13884
13885
13886 /* Return true if ARG is marked with the nonnull attribute in the
13887 current function signature. */
13888
13889 bool
13890 nonnull_arg_p (const_tree arg)
13891 {
13892 tree t, attrs, fntype;
13893 unsigned HOST_WIDE_INT arg_num;
13894
13895 gcc_assert (TREE_CODE (arg) == PARM_DECL
13896 && (POINTER_TYPE_P (TREE_TYPE (arg))
13897 || TREE_CODE (TREE_TYPE (arg)) == OFFSET_TYPE));
13898
13899 /* The static chain decl is always non null. */
13900 if (arg == cfun->static_chain_decl)
13901 return true;
13902
13903 /* THIS argument of method is always non-NULL. */
13904 if (TREE_CODE (TREE_TYPE (cfun->decl)) == METHOD_TYPE
13905 && arg == DECL_ARGUMENTS (cfun->decl)
13906 && flag_delete_null_pointer_checks)
13907 return true;
13908
13909 /* Values passed by reference are always non-NULL. */
13910 if (TREE_CODE (TREE_TYPE (arg)) == REFERENCE_TYPE
13911 && flag_delete_null_pointer_checks)
13912 return true;
13913
13914 fntype = TREE_TYPE (cfun->decl);
13915 for (attrs = TYPE_ATTRIBUTES (fntype); attrs; attrs = TREE_CHAIN (attrs))
13916 {
13917 attrs = lookup_attribute ("nonnull", attrs);
13918
13919 /* If "nonnull" wasn't specified, we know nothing about the argument. */
13920 if (attrs == NULL_TREE)
13921 return false;
13922
13923 /* If "nonnull" applies to all the arguments, then ARG is non-null. */
13924 if (TREE_VALUE (attrs) == NULL_TREE)
13925 return true;
13926
13927 /* Get the position number for ARG in the function signature. */
13928 for (arg_num = 1, t = DECL_ARGUMENTS (cfun->decl);
13929 t;
13930 t = DECL_CHAIN (t), arg_num++)
13931 {
13932 if (t == arg)
13933 break;
13934 }
13935
13936 gcc_assert (t == arg);
13937
13938 /* Now see if ARG_NUM is mentioned in the nonnull list. */
13939 for (t = TREE_VALUE (attrs); t; t = TREE_CHAIN (t))
13940 {
13941 if (compare_tree_int (TREE_VALUE (t), arg_num) == 0)
13942 return true;
13943 }
13944 }
13945
13946 return false;
13947 }
13948
13949 /* Combine LOC and BLOCK to a combined adhoc loc, retaining any range
13950 information. */
13951
13952 location_t
13953 set_block (location_t loc, tree block)
13954 {
13955 location_t pure_loc = get_pure_location (loc);
13956 source_range src_range = get_range_from_loc (line_table, loc);
13957 return COMBINE_LOCATION_DATA (line_table, pure_loc, src_range, block);
13958 }
13959
13960 location_t
13961 set_source_range (tree expr, location_t start, location_t finish)
13962 {
13963 source_range src_range;
13964 src_range.m_start = start;
13965 src_range.m_finish = finish;
13966 return set_source_range (expr, src_range);
13967 }
13968
13969 location_t
13970 set_source_range (tree expr, source_range src_range)
13971 {
13972 if (!EXPR_P (expr))
13973 return UNKNOWN_LOCATION;
13974
13975 location_t pure_loc = get_pure_location (EXPR_LOCATION (expr));
13976 location_t adhoc = COMBINE_LOCATION_DATA (line_table,
13977 pure_loc,
13978 src_range,
13979 NULL);
13980 SET_EXPR_LOCATION (expr, adhoc);
13981 return adhoc;
13982 }
13983
13984 /* Return EXPR, potentially wrapped with a node expression LOC,
13985 if !CAN_HAVE_LOCATION_P (expr).
13986
13987 NON_LVALUE_EXPR is used for wrapping constants, apart from STRING_CST.
13988 VIEW_CONVERT_EXPR is used for wrapping non-constants and STRING_CST.
13989
13990 Wrapper nodes can be identified using location_wrapper_p. */
13991
13992 tree
13993 maybe_wrap_with_location (tree expr, location_t loc)
13994 {
13995 if (expr == NULL)
13996 return NULL;
13997 if (loc == UNKNOWN_LOCATION)
13998 return expr;
13999 if (CAN_HAVE_LOCATION_P (expr))
14000 return expr;
14001 /* We should only be adding wrappers for constants and for decls,
14002 or for some exceptional tree nodes (e.g. BASELINK in the C++ FE). */
14003 gcc_assert (CONSTANT_CLASS_P (expr)
14004 || DECL_P (expr)
14005 || EXCEPTIONAL_CLASS_P (expr));
14006
14007 /* For now, don't add wrappers to exceptional tree nodes, to minimize
14008 any impact of the wrapper nodes. */
14009 if (EXCEPTIONAL_CLASS_P (expr))
14010 return expr;
14011
14012 /* Compiler-generated temporary variables don't need a wrapper. */
14013 if (DECL_P (expr) && DECL_ARTIFICIAL (expr) && DECL_IGNORED_P (expr))
14014 return expr;
14015
14016 /* If any auto_suppress_location_wrappers are active, don't create
14017 wrappers. */
14018 if (suppress_location_wrappers > 0)
14019 return expr;
14020
14021 tree_code code
14022 = (((CONSTANT_CLASS_P (expr) && TREE_CODE (expr) != STRING_CST)
14023 || (TREE_CODE (expr) == CONST_DECL && !TREE_STATIC (expr)))
14024 ? NON_LVALUE_EXPR : VIEW_CONVERT_EXPR);
14025 tree wrapper = build1_loc (loc, code, TREE_TYPE (expr), expr);
14026 /* Mark this node as being a wrapper. */
14027 EXPR_LOCATION_WRAPPER_P (wrapper) = 1;
14028 return wrapper;
14029 }
14030
14031 int suppress_location_wrappers;
14032
14033 /* Return the name of combined function FN, for debugging purposes. */
14034
14035 const char *
14036 combined_fn_name (combined_fn fn)
14037 {
14038 if (builtin_fn_p (fn))
14039 {
14040 tree fndecl = builtin_decl_explicit (as_builtin_fn (fn));
14041 return IDENTIFIER_POINTER (DECL_NAME (fndecl));
14042 }
14043 else
14044 return internal_fn_name (as_internal_fn (fn));
14045 }
14046
14047 /* Return a bitmap with a bit set corresponding to each argument in
14048 a function call type FNTYPE declared with attribute nonnull,
14049 or null if none of the function's argument are nonnull. The caller
14050 must free the bitmap. */
14051
14052 bitmap
14053 get_nonnull_args (const_tree fntype)
14054 {
14055 if (fntype == NULL_TREE)
14056 return NULL;
14057
14058 bitmap argmap = NULL;
14059 if (TREE_CODE (fntype) == METHOD_TYPE)
14060 {
14061 /* The this pointer in C++ non-static member functions is
14062 implicitly nonnull whether or not it's declared as such. */
14063 argmap = BITMAP_ALLOC (NULL);
14064 bitmap_set_bit (argmap, 0);
14065 }
14066
14067 tree attrs = TYPE_ATTRIBUTES (fntype);
14068 if (!attrs)
14069 return argmap;
14070
14071 /* A function declaration can specify multiple attribute nonnull,
14072 each with zero or more arguments. The loop below creates a bitmap
14073 representing a union of all the arguments. An empty (but non-null)
14074 bitmap means that all arguments have been declaraed nonnull. */
14075 for ( ; attrs; attrs = TREE_CHAIN (attrs))
14076 {
14077 attrs = lookup_attribute ("nonnull", attrs);
14078 if (!attrs)
14079 break;
14080
14081 if (!argmap)
14082 argmap = BITMAP_ALLOC (NULL);
14083
14084 if (!TREE_VALUE (attrs))
14085 {
14086 /* Clear the bitmap in case a previous attribute nonnull
14087 set it and this one overrides it for all arguments. */
14088 bitmap_clear (argmap);
14089 return argmap;
14090 }
14091
14092 /* Iterate over the indices of the format arguments declared nonnull
14093 and set a bit for each. */
14094 for (tree idx = TREE_VALUE (attrs); idx; idx = TREE_CHAIN (idx))
14095 {
14096 unsigned int val = TREE_INT_CST_LOW (TREE_VALUE (idx)) - 1;
14097 bitmap_set_bit (argmap, val);
14098 }
14099 }
14100
14101 return argmap;
14102 }
14103
14104 /* Returns true if TYPE is a type where it and all of its subobjects
14105 (recursively) are of structure, union, or array type. */
14106
14107 bool
14108 is_empty_type (const_tree type)
14109 {
14110 if (RECORD_OR_UNION_TYPE_P (type))
14111 {
14112 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
14113 if (TREE_CODE (field) == FIELD_DECL
14114 && !DECL_PADDING_P (field)
14115 && !is_empty_type (TREE_TYPE (field)))
14116 return false;
14117 return true;
14118 }
14119 else if (TREE_CODE (type) == ARRAY_TYPE)
14120 return (integer_minus_onep (array_type_nelts (type))
14121 || TYPE_DOMAIN (type) == NULL_TREE
14122 || is_empty_type (TREE_TYPE (type)));
14123 return false;
14124 }
14125
14126 /* Implement TARGET_EMPTY_RECORD_P. Return true if TYPE is an empty type
14127 that shouldn't be passed via stack. */
14128
14129 bool
14130 default_is_empty_record (const_tree type)
14131 {
14132 if (!abi_version_at_least (12))
14133 return false;
14134
14135 if (type == error_mark_node)
14136 return false;
14137
14138 if (TREE_ADDRESSABLE (type))
14139 return false;
14140
14141 return is_empty_type (TYPE_MAIN_VARIANT (type));
14142 }
14143
14144 /* Determine whether TYPE is a structure with a flexible array member,
14145 or a union containing such a structure (possibly recursively). */
14146
14147 bool
14148 flexible_array_type_p (const_tree type)
14149 {
14150 tree x, last;
14151 switch (TREE_CODE (type))
14152 {
14153 case RECORD_TYPE:
14154 last = NULL_TREE;
14155 for (x = TYPE_FIELDS (type); x != NULL_TREE; x = DECL_CHAIN (x))
14156 if (TREE_CODE (x) == FIELD_DECL)
14157 last = x;
14158 if (last == NULL_TREE)
14159 return false;
14160 if (TREE_CODE (TREE_TYPE (last)) == ARRAY_TYPE
14161 && TYPE_SIZE (TREE_TYPE (last)) == NULL_TREE
14162 && TYPE_DOMAIN (TREE_TYPE (last)) != NULL_TREE
14163 && TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (last))) == NULL_TREE)
14164 return true;
14165 return false;
14166 case UNION_TYPE:
14167 for (x = TYPE_FIELDS (type); x != NULL_TREE; x = DECL_CHAIN (x))
14168 {
14169 if (TREE_CODE (x) == FIELD_DECL
14170 && flexible_array_type_p (TREE_TYPE (x)))
14171 return true;
14172 }
14173 return false;
14174 default:
14175 return false;
14176 }
14177 }
14178
14179 /* Like int_size_in_bytes, but handle empty records specially. */
14180
14181 HOST_WIDE_INT
14182 arg_int_size_in_bytes (const_tree type)
14183 {
14184 return TYPE_EMPTY_P (type) ? 0 : int_size_in_bytes (type);
14185 }
14186
14187 /* Like size_in_bytes, but handle empty records specially. */
14188
14189 tree
14190 arg_size_in_bytes (const_tree type)
14191 {
14192 return TYPE_EMPTY_P (type) ? size_zero_node : size_in_bytes (type);
14193 }
14194
14195 /* Return true if an expression with CODE has to have the same result type as
14196 its first operand. */
14197
14198 bool
14199 expr_type_first_operand_type_p (tree_code code)
14200 {
14201 switch (code)
14202 {
14203 case NEGATE_EXPR:
14204 case ABS_EXPR:
14205 case BIT_NOT_EXPR:
14206 case PAREN_EXPR:
14207 case CONJ_EXPR:
14208
14209 case PLUS_EXPR:
14210 case MINUS_EXPR:
14211 case MULT_EXPR:
14212 case TRUNC_DIV_EXPR:
14213 case CEIL_DIV_EXPR:
14214 case FLOOR_DIV_EXPR:
14215 case ROUND_DIV_EXPR:
14216 case TRUNC_MOD_EXPR:
14217 case CEIL_MOD_EXPR:
14218 case FLOOR_MOD_EXPR:
14219 case ROUND_MOD_EXPR:
14220 case RDIV_EXPR:
14221 case EXACT_DIV_EXPR:
14222 case MIN_EXPR:
14223 case MAX_EXPR:
14224 case BIT_IOR_EXPR:
14225 case BIT_XOR_EXPR:
14226 case BIT_AND_EXPR:
14227
14228 case LSHIFT_EXPR:
14229 case RSHIFT_EXPR:
14230 case LROTATE_EXPR:
14231 case RROTATE_EXPR:
14232 return true;
14233
14234 default:
14235 return false;
14236 }
14237 }
14238
14239 /* Return a typenode for the "standard" C type with a given name. */
14240 tree
14241 get_typenode_from_name (const char *name)
14242 {
14243 if (name == NULL || *name == '\0')
14244 return NULL_TREE;
14245
14246 if (strcmp (name, "char") == 0)
14247 return char_type_node;
14248 if (strcmp (name, "unsigned char") == 0)
14249 return unsigned_char_type_node;
14250 if (strcmp (name, "signed char") == 0)
14251 return signed_char_type_node;
14252
14253 if (strcmp (name, "short int") == 0)
14254 return short_integer_type_node;
14255 if (strcmp (name, "short unsigned int") == 0)
14256 return short_unsigned_type_node;
14257
14258 if (strcmp (name, "int") == 0)
14259 return integer_type_node;
14260 if (strcmp (name, "unsigned int") == 0)
14261 return unsigned_type_node;
14262
14263 if (strcmp (name, "long int") == 0)
14264 return long_integer_type_node;
14265 if (strcmp (name, "long unsigned int") == 0)
14266 return long_unsigned_type_node;
14267
14268 if (strcmp (name, "long long int") == 0)
14269 return long_long_integer_type_node;
14270 if (strcmp (name, "long long unsigned int") == 0)
14271 return long_long_unsigned_type_node;
14272
14273 gcc_unreachable ();
14274 }
14275
14276 /* List of pointer types used to declare builtins before we have seen their
14277 real declaration.
14278
14279 Keep the size up to date in tree.h ! */
14280 const builtin_structptr_type builtin_structptr_types[6] =
14281 {
14282 { fileptr_type_node, ptr_type_node, "FILE" },
14283 { const_tm_ptr_type_node, const_ptr_type_node, "tm" },
14284 { fenv_t_ptr_type_node, ptr_type_node, "fenv_t" },
14285 { const_fenv_t_ptr_type_node, const_ptr_type_node, "fenv_t" },
14286 { fexcept_t_ptr_type_node, ptr_type_node, "fexcept_t" },
14287 { const_fexcept_t_ptr_type_node, const_ptr_type_node, "fexcept_t" }
14288 };
14289
14290 /* Return the maximum object size. */
14291
14292 tree
14293 max_object_size (void)
14294 {
14295 /* To do: Make this a configurable parameter. */
14296 return TYPE_MAX_VALUE (ptrdiff_type_node);
14297 }
14298
14299 /* A wrapper around TARGET_VERIFY_TYPE_CONTEXT that makes the silent_p
14300 parameter default to false and that weeds out error_mark_node. */
14301
14302 bool
14303 verify_type_context (location_t loc, type_context_kind context,
14304 const_tree type, bool silent_p)
14305 {
14306 if (type == error_mark_node)
14307 return true;
14308
14309 gcc_assert (TYPE_P (type));
14310 return (!targetm.verify_type_context
14311 || targetm.verify_type_context (loc, context, type, silent_p));
14312 }
14313
14314 /* Return that NEW_ASM and DELETE_ASM name a valid pair of new and
14315 delete operators. */
14316
14317 bool
14318 valid_new_delete_pair_p (tree new_asm, tree delete_asm)
14319 {
14320 const char *new_name = IDENTIFIER_POINTER (new_asm);
14321 const char *delete_name = IDENTIFIER_POINTER (delete_asm);
14322 unsigned int new_len = IDENTIFIER_LENGTH (new_asm);
14323 unsigned int delete_len = IDENTIFIER_LENGTH (delete_asm);
14324
14325 if (new_len < 5 || delete_len < 6)
14326 return false;
14327 if (new_name[0] == '_')
14328 ++new_name, --new_len;
14329 if (new_name[0] == '_')
14330 ++new_name, --new_len;
14331 if (delete_name[0] == '_')
14332 ++delete_name, --delete_len;
14333 if (delete_name[0] == '_')
14334 ++delete_name, --delete_len;
14335 if (new_len < 4 || delete_len < 5)
14336 return false;
14337 /* *_len is now just the length after initial underscores. */
14338 if (new_name[0] != 'Z' || new_name[1] != 'n')
14339 return false;
14340 if (delete_name[0] != 'Z' || delete_name[1] != 'd')
14341 return false;
14342 /* _Znw must match _Zdl, _Zna must match _Zda. */
14343 if ((new_name[2] != 'w' || delete_name[2] != 'l')
14344 && (new_name[2] != 'a' || delete_name[2] != 'a'))
14345 return false;
14346 /* 'j', 'm' and 'y' correspond to size_t. */
14347 if (new_name[3] != 'j' && new_name[3] != 'm' && new_name[3] != 'y')
14348 return false;
14349 if (delete_name[3] != 'P' || delete_name[4] != 'v')
14350 return false;
14351 if (new_len == 4
14352 || (new_len == 18 && !memcmp (new_name + 4, "RKSt9nothrow_t", 14)))
14353 {
14354 /* _ZnXY or _ZnXYRKSt9nothrow_t matches
14355 _ZdXPv, _ZdXPvY and _ZdXPvRKSt9nothrow_t. */
14356 if (delete_len == 5)
14357 return true;
14358 if (delete_len == 6 && delete_name[5] == new_name[3])
14359 return true;
14360 if (delete_len == 19 && !memcmp (delete_name + 5, "RKSt9nothrow_t", 14))
14361 return true;
14362 }
14363 else if ((new_len == 19 && !memcmp (new_name + 4, "St11align_val_t", 15))
14364 || (new_len == 33
14365 && !memcmp (new_name + 4, "St11align_val_tRKSt9nothrow_t", 29)))
14366 {
14367 /* _ZnXYSt11align_val_t or _ZnXYSt11align_val_tRKSt9nothrow_t matches
14368 _ZdXPvSt11align_val_t or _ZdXPvYSt11align_val_t or or
14369 _ZdXPvSt11align_val_tRKSt9nothrow_t. */
14370 if (delete_len == 20 && !memcmp (delete_name + 5, "St11align_val_t", 15))
14371 return true;
14372 if (delete_len == 21
14373 && delete_name[5] == new_name[3]
14374 && !memcmp (delete_name + 6, "St11align_val_t", 15))
14375 return true;
14376 if (delete_len == 34
14377 && !memcmp (delete_name + 5, "St11align_val_tRKSt9nothrow_t", 29))
14378 return true;
14379 }
14380 return false;
14381 }
14382
14383 #if CHECKING_P
14384
14385 namespace selftest {
14386
14387 /* Selftests for tree. */
14388
14389 /* Verify that integer constants are sane. */
14390
14391 static void
14392 test_integer_constants ()
14393 {
14394 ASSERT_TRUE (integer_type_node != NULL);
14395 ASSERT_TRUE (build_int_cst (integer_type_node, 0) != NULL);
14396
14397 tree type = integer_type_node;
14398
14399 tree zero = build_zero_cst (type);
14400 ASSERT_EQ (INTEGER_CST, TREE_CODE (zero));
14401 ASSERT_EQ (type, TREE_TYPE (zero));
14402
14403 tree one = build_int_cst (type, 1);
14404 ASSERT_EQ (INTEGER_CST, TREE_CODE (one));
14405 ASSERT_EQ (type, TREE_TYPE (zero));
14406 }
14407
14408 /* Verify identifiers. */
14409
14410 static void
14411 test_identifiers ()
14412 {
14413 tree identifier = get_identifier ("foo");
14414 ASSERT_EQ (3, IDENTIFIER_LENGTH (identifier));
14415 ASSERT_STREQ ("foo", IDENTIFIER_POINTER (identifier));
14416 }
14417
14418 /* Verify LABEL_DECL. */
14419
14420 static void
14421 test_labels ()
14422 {
14423 tree identifier = get_identifier ("err");
14424 tree label_decl = build_decl (UNKNOWN_LOCATION, LABEL_DECL,
14425 identifier, void_type_node);
14426 ASSERT_EQ (-1, LABEL_DECL_UID (label_decl));
14427 ASSERT_FALSE (FORCED_LABEL (label_decl));
14428 }
14429
14430 /* Return a new VECTOR_CST node whose type is TYPE and whose values
14431 are given by VALS. */
14432
14433 static tree
14434 build_vector (tree type, const vec<tree> &vals MEM_STAT_DECL)
14435 {
14436 gcc_assert (known_eq (vals.length (), TYPE_VECTOR_SUBPARTS (type)));
14437 tree_vector_builder builder (type, vals.length (), 1);
14438 builder.splice (vals);
14439 return builder.build ();
14440 }
14441
14442 /* Check that VECTOR_CST ACTUAL contains the elements in EXPECTED. */
14443
14444 static void
14445 check_vector_cst (const vec<tree> &expected, tree actual)
14446 {
14447 ASSERT_KNOWN_EQ (expected.length (),
14448 TYPE_VECTOR_SUBPARTS (TREE_TYPE (actual)));
14449 for (unsigned int i = 0; i < expected.length (); ++i)
14450 ASSERT_EQ (wi::to_wide (expected[i]),
14451 wi::to_wide (vector_cst_elt (actual, i)));
14452 }
14453
14454 /* Check that VECTOR_CST ACTUAL contains NPATTERNS duplicated elements,
14455 and that its elements match EXPECTED. */
14456
14457 static void
14458 check_vector_cst_duplicate (const vec<tree> &expected, tree actual,
14459 unsigned int npatterns)
14460 {
14461 ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
14462 ASSERT_EQ (1, VECTOR_CST_NELTS_PER_PATTERN (actual));
14463 ASSERT_EQ (npatterns, vector_cst_encoded_nelts (actual));
14464 ASSERT_TRUE (VECTOR_CST_DUPLICATE_P (actual));
14465 ASSERT_FALSE (VECTOR_CST_STEPPED_P (actual));
14466 check_vector_cst (expected, actual);
14467 }
14468
14469 /* Check that VECTOR_CST ACTUAL contains NPATTERNS foreground elements
14470 and NPATTERNS background elements, and that its elements match
14471 EXPECTED. */
14472
14473 static void
14474 check_vector_cst_fill (const vec<tree> &expected, tree actual,
14475 unsigned int npatterns)
14476 {
14477 ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
14478 ASSERT_EQ (2, VECTOR_CST_NELTS_PER_PATTERN (actual));
14479 ASSERT_EQ (2 * npatterns, vector_cst_encoded_nelts (actual));
14480 ASSERT_FALSE (VECTOR_CST_DUPLICATE_P (actual));
14481 ASSERT_FALSE (VECTOR_CST_STEPPED_P (actual));
14482 check_vector_cst (expected, actual);
14483 }
14484
14485 /* Check that VECTOR_CST ACTUAL contains NPATTERNS stepped patterns,
14486 and that its elements match EXPECTED. */
14487
14488 static void
14489 check_vector_cst_stepped (const vec<tree> &expected, tree actual,
14490 unsigned int npatterns)
14491 {
14492 ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
14493 ASSERT_EQ (3, VECTOR_CST_NELTS_PER_PATTERN (actual));
14494 ASSERT_EQ (3 * npatterns, vector_cst_encoded_nelts (actual));
14495 ASSERT_FALSE (VECTOR_CST_DUPLICATE_P (actual));
14496 ASSERT_TRUE (VECTOR_CST_STEPPED_P (actual));
14497 check_vector_cst (expected, actual);
14498 }
14499
14500 /* Test the creation of VECTOR_CSTs. */
14501
14502 static void
14503 test_vector_cst_patterns (ALONE_CXX_MEM_STAT_INFO)
14504 {
14505 auto_vec<tree, 8> elements (8);
14506 elements.quick_grow (8);
14507 tree element_type = build_nonstandard_integer_type (16, true);
14508 tree vector_type = build_vector_type (element_type, 8);
14509
14510 /* Test a simple linear series with a base of 0 and a step of 1:
14511 { 0, 1, 2, 3, 4, 5, 6, 7 }. */
14512 for (unsigned int i = 0; i < 8; ++i)
14513 elements[i] = build_int_cst (element_type, i);
14514 tree vector = build_vector (vector_type, elements PASS_MEM_STAT);
14515 check_vector_cst_stepped (elements, vector, 1);
14516
14517 /* Try the same with the first element replaced by 100:
14518 { 100, 1, 2, 3, 4, 5, 6, 7 }. */
14519 elements[0] = build_int_cst (element_type, 100);
14520 vector = build_vector (vector_type, elements PASS_MEM_STAT);
14521 check_vector_cst_stepped (elements, vector, 1);
14522
14523 /* Try a series that wraps around.
14524 { 100, 65531, 65532, 65533, 65534, 65535, 0, 1 }. */
14525 for (unsigned int i = 1; i < 8; ++i)
14526 elements[i] = build_int_cst (element_type, (65530 + i) & 0xffff);
14527 vector = build_vector (vector_type, elements PASS_MEM_STAT);
14528 check_vector_cst_stepped (elements, vector, 1);
14529
14530 /* Try a downward series:
14531 { 100, 79, 78, 77, 76, 75, 75, 73 }. */
14532 for (unsigned int i = 1; i < 8; ++i)
14533 elements[i] = build_int_cst (element_type, 80 - i);
14534 vector = build_vector (vector_type, elements PASS_MEM_STAT);
14535 check_vector_cst_stepped (elements, vector, 1);
14536
14537 /* Try two interleaved series with different bases and steps:
14538 { 100, 53, 66, 206, 62, 212, 58, 218 }. */
14539 elements[1] = build_int_cst (element_type, 53);
14540 for (unsigned int i = 2; i < 8; i += 2)
14541 {
14542 elements[i] = build_int_cst (element_type, 70 - i * 2);
14543 elements[i + 1] = build_int_cst (element_type, 200 + i * 3);
14544 }
14545 vector = build_vector (vector_type, elements PASS_MEM_STAT);
14546 check_vector_cst_stepped (elements, vector, 2);
14547
14548 /* Try a duplicated value:
14549 { 100, 100, 100, 100, 100, 100, 100, 100 }. */
14550 for (unsigned int i = 1; i < 8; ++i)
14551 elements[i] = elements[0];
14552 vector = build_vector (vector_type, elements PASS_MEM_STAT);
14553 check_vector_cst_duplicate (elements, vector, 1);
14554
14555 /* Try an interleaved duplicated value:
14556 { 100, 55, 100, 55, 100, 55, 100, 55 }. */
14557 elements[1] = build_int_cst (element_type, 55);
14558 for (unsigned int i = 2; i < 8; ++i)
14559 elements[i] = elements[i - 2];
14560 vector = build_vector (vector_type, elements PASS_MEM_STAT);
14561 check_vector_cst_duplicate (elements, vector, 2);
14562
14563 /* Try a duplicated value with 2 exceptions
14564 { 41, 97, 100, 55, 100, 55, 100, 55 }. */
14565 elements[0] = build_int_cst (element_type, 41);
14566 elements[1] = build_int_cst (element_type, 97);
14567 vector = build_vector (vector_type, elements PASS_MEM_STAT);
14568 check_vector_cst_fill (elements, vector, 2);
14569
14570 /* Try with and without a step
14571 { 41, 97, 100, 21, 100, 35, 100, 49 }. */
14572 for (unsigned int i = 3; i < 8; i += 2)
14573 elements[i] = build_int_cst (element_type, i * 7);
14574 vector = build_vector (vector_type, elements PASS_MEM_STAT);
14575 check_vector_cst_stepped (elements, vector, 2);
14576
14577 /* Try a fully-general constant:
14578 { 41, 97, 100, 21, 100, 9990, 100, 49 }. */
14579 elements[5] = build_int_cst (element_type, 9990);
14580 vector = build_vector (vector_type, elements PASS_MEM_STAT);
14581 check_vector_cst_fill (elements, vector, 4);
14582 }
14583
14584 /* Verify that STRIP_NOPS (NODE) is EXPECTED.
14585 Helper function for test_location_wrappers, to deal with STRIP_NOPS
14586 modifying its argument in-place. */
14587
14588 static void
14589 check_strip_nops (tree node, tree expected)
14590 {
14591 STRIP_NOPS (node);
14592 ASSERT_EQ (expected, node);
14593 }
14594
14595 /* Verify location wrappers. */
14596
14597 static void
14598 test_location_wrappers ()
14599 {
14600 location_t loc = BUILTINS_LOCATION;
14601
14602 ASSERT_EQ (NULL_TREE, maybe_wrap_with_location (NULL_TREE, loc));
14603
14604 /* Wrapping a constant. */
14605 tree int_cst = build_int_cst (integer_type_node, 42);
14606 ASSERT_FALSE (CAN_HAVE_LOCATION_P (int_cst));
14607 ASSERT_FALSE (location_wrapper_p (int_cst));
14608
14609 tree wrapped_int_cst = maybe_wrap_with_location (int_cst, loc);
14610 ASSERT_TRUE (location_wrapper_p (wrapped_int_cst));
14611 ASSERT_EQ (loc, EXPR_LOCATION (wrapped_int_cst));
14612 ASSERT_EQ (int_cst, tree_strip_any_location_wrapper (wrapped_int_cst));
14613
14614 /* We shouldn't add wrapper nodes for UNKNOWN_LOCATION. */
14615 ASSERT_EQ (int_cst, maybe_wrap_with_location (int_cst, UNKNOWN_LOCATION));
14616
14617 /* We shouldn't add wrapper nodes for nodes that CAN_HAVE_LOCATION_P. */
14618 tree cast = build1 (NOP_EXPR, char_type_node, int_cst);
14619 ASSERT_TRUE (CAN_HAVE_LOCATION_P (cast));
14620 ASSERT_EQ (cast, maybe_wrap_with_location (cast, loc));
14621
14622 /* Wrapping a STRING_CST. */
14623 tree string_cst = build_string (4, "foo");
14624 ASSERT_FALSE (CAN_HAVE_LOCATION_P (string_cst));
14625 ASSERT_FALSE (location_wrapper_p (string_cst));
14626
14627 tree wrapped_string_cst = maybe_wrap_with_location (string_cst, loc);
14628 ASSERT_TRUE (location_wrapper_p (wrapped_string_cst));
14629 ASSERT_EQ (VIEW_CONVERT_EXPR, TREE_CODE (wrapped_string_cst));
14630 ASSERT_EQ (loc, EXPR_LOCATION (wrapped_string_cst));
14631 ASSERT_EQ (string_cst, tree_strip_any_location_wrapper (wrapped_string_cst));
14632
14633
14634 /* Wrapping a variable. */
14635 tree int_var = build_decl (UNKNOWN_LOCATION, VAR_DECL,
14636 get_identifier ("some_int_var"),
14637 integer_type_node);
14638 ASSERT_FALSE (CAN_HAVE_LOCATION_P (int_var));
14639 ASSERT_FALSE (location_wrapper_p (int_var));
14640
14641 tree wrapped_int_var = maybe_wrap_with_location (int_var, loc);
14642 ASSERT_TRUE (location_wrapper_p (wrapped_int_var));
14643 ASSERT_EQ (loc, EXPR_LOCATION (wrapped_int_var));
14644 ASSERT_EQ (int_var, tree_strip_any_location_wrapper (wrapped_int_var));
14645
14646 /* Verify that "reinterpret_cast<int>(some_int_var)" is not a location
14647 wrapper. */
14648 tree r_cast = build1 (NON_LVALUE_EXPR, integer_type_node, int_var);
14649 ASSERT_FALSE (location_wrapper_p (r_cast));
14650 ASSERT_EQ (r_cast, tree_strip_any_location_wrapper (r_cast));
14651
14652 /* Verify that STRIP_NOPS removes wrappers. */
14653 check_strip_nops (wrapped_int_cst, int_cst);
14654 check_strip_nops (wrapped_string_cst, string_cst);
14655 check_strip_nops (wrapped_int_var, int_var);
14656 }
14657
14658 /* Test various tree predicates. Verify that location wrappers don't
14659 affect the results. */
14660
14661 static void
14662 test_predicates ()
14663 {
14664 /* Build various constants and wrappers around them. */
14665
14666 location_t loc = BUILTINS_LOCATION;
14667
14668 tree i_0 = build_int_cst (integer_type_node, 0);
14669 tree wr_i_0 = maybe_wrap_with_location (i_0, loc);
14670
14671 tree i_1 = build_int_cst (integer_type_node, 1);
14672 tree wr_i_1 = maybe_wrap_with_location (i_1, loc);
14673
14674 tree i_m1 = build_int_cst (integer_type_node, -1);
14675 tree wr_i_m1 = maybe_wrap_with_location (i_m1, loc);
14676
14677 tree f_0 = build_real_from_int_cst (float_type_node, i_0);
14678 tree wr_f_0 = maybe_wrap_with_location (f_0, loc);
14679 tree f_1 = build_real_from_int_cst (float_type_node, i_1);
14680 tree wr_f_1 = maybe_wrap_with_location (f_1, loc);
14681 tree f_m1 = build_real_from_int_cst (float_type_node, i_m1);
14682 tree wr_f_m1 = maybe_wrap_with_location (f_m1, loc);
14683
14684 tree c_i_0 = build_complex (NULL_TREE, i_0, i_0);
14685 tree c_i_1 = build_complex (NULL_TREE, i_1, i_0);
14686 tree c_i_m1 = build_complex (NULL_TREE, i_m1, i_0);
14687
14688 tree c_f_0 = build_complex (NULL_TREE, f_0, f_0);
14689 tree c_f_1 = build_complex (NULL_TREE, f_1, f_0);
14690 tree c_f_m1 = build_complex (NULL_TREE, f_m1, f_0);
14691
14692 /* TODO: vector constants. */
14693
14694 /* Test integer_onep. */
14695 ASSERT_FALSE (integer_onep (i_0));
14696 ASSERT_FALSE (integer_onep (wr_i_0));
14697 ASSERT_TRUE (integer_onep (i_1));
14698 ASSERT_TRUE (integer_onep (wr_i_1));
14699 ASSERT_FALSE (integer_onep (i_m1));
14700 ASSERT_FALSE (integer_onep (wr_i_m1));
14701 ASSERT_FALSE (integer_onep (f_0));
14702 ASSERT_FALSE (integer_onep (wr_f_0));
14703 ASSERT_FALSE (integer_onep (f_1));
14704 ASSERT_FALSE (integer_onep (wr_f_1));
14705 ASSERT_FALSE (integer_onep (f_m1));
14706 ASSERT_FALSE (integer_onep (wr_f_m1));
14707 ASSERT_FALSE (integer_onep (c_i_0));
14708 ASSERT_TRUE (integer_onep (c_i_1));
14709 ASSERT_FALSE (integer_onep (c_i_m1));
14710 ASSERT_FALSE (integer_onep (c_f_0));
14711 ASSERT_FALSE (integer_onep (c_f_1));
14712 ASSERT_FALSE (integer_onep (c_f_m1));
14713
14714 /* Test integer_zerop. */
14715 ASSERT_TRUE (integer_zerop (i_0));
14716 ASSERT_TRUE (integer_zerop (wr_i_0));
14717 ASSERT_FALSE (integer_zerop (i_1));
14718 ASSERT_FALSE (integer_zerop (wr_i_1));
14719 ASSERT_FALSE (integer_zerop (i_m1));
14720 ASSERT_FALSE (integer_zerop (wr_i_m1));
14721 ASSERT_FALSE (integer_zerop (f_0));
14722 ASSERT_FALSE (integer_zerop (wr_f_0));
14723 ASSERT_FALSE (integer_zerop (f_1));
14724 ASSERT_FALSE (integer_zerop (wr_f_1));
14725 ASSERT_FALSE (integer_zerop (f_m1));
14726 ASSERT_FALSE (integer_zerop (wr_f_m1));
14727 ASSERT_TRUE (integer_zerop (c_i_0));
14728 ASSERT_FALSE (integer_zerop (c_i_1));
14729 ASSERT_FALSE (integer_zerop (c_i_m1));
14730 ASSERT_FALSE (integer_zerop (c_f_0));
14731 ASSERT_FALSE (integer_zerop (c_f_1));
14732 ASSERT_FALSE (integer_zerop (c_f_m1));
14733
14734 /* Test integer_all_onesp. */
14735 ASSERT_FALSE (integer_all_onesp (i_0));
14736 ASSERT_FALSE (integer_all_onesp (wr_i_0));
14737 ASSERT_FALSE (integer_all_onesp (i_1));
14738 ASSERT_FALSE (integer_all_onesp (wr_i_1));
14739 ASSERT_TRUE (integer_all_onesp (i_m1));
14740 ASSERT_TRUE (integer_all_onesp (wr_i_m1));
14741 ASSERT_FALSE (integer_all_onesp (f_0));
14742 ASSERT_FALSE (integer_all_onesp (wr_f_0));
14743 ASSERT_FALSE (integer_all_onesp (f_1));
14744 ASSERT_FALSE (integer_all_onesp (wr_f_1));
14745 ASSERT_FALSE (integer_all_onesp (f_m1));
14746 ASSERT_FALSE (integer_all_onesp (wr_f_m1));
14747 ASSERT_FALSE (integer_all_onesp (c_i_0));
14748 ASSERT_FALSE (integer_all_onesp (c_i_1));
14749 ASSERT_FALSE (integer_all_onesp (c_i_m1));
14750 ASSERT_FALSE (integer_all_onesp (c_f_0));
14751 ASSERT_FALSE (integer_all_onesp (c_f_1));
14752 ASSERT_FALSE (integer_all_onesp (c_f_m1));
14753
14754 /* Test integer_minus_onep. */
14755 ASSERT_FALSE (integer_minus_onep (i_0));
14756 ASSERT_FALSE (integer_minus_onep (wr_i_0));
14757 ASSERT_FALSE (integer_minus_onep (i_1));
14758 ASSERT_FALSE (integer_minus_onep (wr_i_1));
14759 ASSERT_TRUE (integer_minus_onep (i_m1));
14760 ASSERT_TRUE (integer_minus_onep (wr_i_m1));
14761 ASSERT_FALSE (integer_minus_onep (f_0));
14762 ASSERT_FALSE (integer_minus_onep (wr_f_0));
14763 ASSERT_FALSE (integer_minus_onep (f_1));
14764 ASSERT_FALSE (integer_minus_onep (wr_f_1));
14765 ASSERT_FALSE (integer_minus_onep (f_m1));
14766 ASSERT_FALSE (integer_minus_onep (wr_f_m1));
14767 ASSERT_FALSE (integer_minus_onep (c_i_0));
14768 ASSERT_FALSE (integer_minus_onep (c_i_1));
14769 ASSERT_TRUE (integer_minus_onep (c_i_m1));
14770 ASSERT_FALSE (integer_minus_onep (c_f_0));
14771 ASSERT_FALSE (integer_minus_onep (c_f_1));
14772 ASSERT_FALSE (integer_minus_onep (c_f_m1));
14773
14774 /* Test integer_each_onep. */
14775 ASSERT_FALSE (integer_each_onep (i_0));
14776 ASSERT_FALSE (integer_each_onep (wr_i_0));
14777 ASSERT_TRUE (integer_each_onep (i_1));
14778 ASSERT_TRUE (integer_each_onep (wr_i_1));
14779 ASSERT_FALSE (integer_each_onep (i_m1));
14780 ASSERT_FALSE (integer_each_onep (wr_i_m1));
14781 ASSERT_FALSE (integer_each_onep (f_0));
14782 ASSERT_FALSE (integer_each_onep (wr_f_0));
14783 ASSERT_FALSE (integer_each_onep (f_1));
14784 ASSERT_FALSE (integer_each_onep (wr_f_1));
14785 ASSERT_FALSE (integer_each_onep (f_m1));
14786 ASSERT_FALSE (integer_each_onep (wr_f_m1));
14787 ASSERT_FALSE (integer_each_onep (c_i_0));
14788 ASSERT_FALSE (integer_each_onep (c_i_1));
14789 ASSERT_FALSE (integer_each_onep (c_i_m1));
14790 ASSERT_FALSE (integer_each_onep (c_f_0));
14791 ASSERT_FALSE (integer_each_onep (c_f_1));
14792 ASSERT_FALSE (integer_each_onep (c_f_m1));
14793
14794 /* Test integer_truep. */
14795 ASSERT_FALSE (integer_truep (i_0));
14796 ASSERT_FALSE (integer_truep (wr_i_0));
14797 ASSERT_TRUE (integer_truep (i_1));
14798 ASSERT_TRUE (integer_truep (wr_i_1));
14799 ASSERT_FALSE (integer_truep (i_m1));
14800 ASSERT_FALSE (integer_truep (wr_i_m1));
14801 ASSERT_FALSE (integer_truep (f_0));
14802 ASSERT_FALSE (integer_truep (wr_f_0));
14803 ASSERT_FALSE (integer_truep (f_1));
14804 ASSERT_FALSE (integer_truep (wr_f_1));
14805 ASSERT_FALSE (integer_truep (f_m1));
14806 ASSERT_FALSE (integer_truep (wr_f_m1));
14807 ASSERT_FALSE (integer_truep (c_i_0));
14808 ASSERT_TRUE (integer_truep (c_i_1));
14809 ASSERT_FALSE (integer_truep (c_i_m1));
14810 ASSERT_FALSE (integer_truep (c_f_0));
14811 ASSERT_FALSE (integer_truep (c_f_1));
14812 ASSERT_FALSE (integer_truep (c_f_m1));
14813
14814 /* Test integer_nonzerop. */
14815 ASSERT_FALSE (integer_nonzerop (i_0));
14816 ASSERT_FALSE (integer_nonzerop (wr_i_0));
14817 ASSERT_TRUE (integer_nonzerop (i_1));
14818 ASSERT_TRUE (integer_nonzerop (wr_i_1));
14819 ASSERT_TRUE (integer_nonzerop (i_m1));
14820 ASSERT_TRUE (integer_nonzerop (wr_i_m1));
14821 ASSERT_FALSE (integer_nonzerop (f_0));
14822 ASSERT_FALSE (integer_nonzerop (wr_f_0));
14823 ASSERT_FALSE (integer_nonzerop (f_1));
14824 ASSERT_FALSE (integer_nonzerop (wr_f_1));
14825 ASSERT_FALSE (integer_nonzerop (f_m1));
14826 ASSERT_FALSE (integer_nonzerop (wr_f_m1));
14827 ASSERT_FALSE (integer_nonzerop (c_i_0));
14828 ASSERT_TRUE (integer_nonzerop (c_i_1));
14829 ASSERT_TRUE (integer_nonzerop (c_i_m1));
14830 ASSERT_FALSE (integer_nonzerop (c_f_0));
14831 ASSERT_FALSE (integer_nonzerop (c_f_1));
14832 ASSERT_FALSE (integer_nonzerop (c_f_m1));
14833
14834 /* Test real_zerop. */
14835 ASSERT_FALSE (real_zerop (i_0));
14836 ASSERT_FALSE (real_zerop (wr_i_0));
14837 ASSERT_FALSE (real_zerop (i_1));
14838 ASSERT_FALSE (real_zerop (wr_i_1));
14839 ASSERT_FALSE (real_zerop (i_m1));
14840 ASSERT_FALSE (real_zerop (wr_i_m1));
14841 ASSERT_TRUE (real_zerop (f_0));
14842 ASSERT_TRUE (real_zerop (wr_f_0));
14843 ASSERT_FALSE (real_zerop (f_1));
14844 ASSERT_FALSE (real_zerop (wr_f_1));
14845 ASSERT_FALSE (real_zerop (f_m1));
14846 ASSERT_FALSE (real_zerop (wr_f_m1));
14847 ASSERT_FALSE (real_zerop (c_i_0));
14848 ASSERT_FALSE (real_zerop (c_i_1));
14849 ASSERT_FALSE (real_zerop (c_i_m1));
14850 ASSERT_TRUE (real_zerop (c_f_0));
14851 ASSERT_FALSE (real_zerop (c_f_1));
14852 ASSERT_FALSE (real_zerop (c_f_m1));
14853
14854 /* Test real_onep. */
14855 ASSERT_FALSE (real_onep (i_0));
14856 ASSERT_FALSE (real_onep (wr_i_0));
14857 ASSERT_FALSE (real_onep (i_1));
14858 ASSERT_FALSE (real_onep (wr_i_1));
14859 ASSERT_FALSE (real_onep (i_m1));
14860 ASSERT_FALSE (real_onep (wr_i_m1));
14861 ASSERT_FALSE (real_onep (f_0));
14862 ASSERT_FALSE (real_onep (wr_f_0));
14863 ASSERT_TRUE (real_onep (f_1));
14864 ASSERT_TRUE (real_onep (wr_f_1));
14865 ASSERT_FALSE (real_onep (f_m1));
14866 ASSERT_FALSE (real_onep (wr_f_m1));
14867 ASSERT_FALSE (real_onep (c_i_0));
14868 ASSERT_FALSE (real_onep (c_i_1));
14869 ASSERT_FALSE (real_onep (c_i_m1));
14870 ASSERT_FALSE (real_onep (c_f_0));
14871 ASSERT_TRUE (real_onep (c_f_1));
14872 ASSERT_FALSE (real_onep (c_f_m1));
14873
14874 /* Test real_minus_onep. */
14875 ASSERT_FALSE (real_minus_onep (i_0));
14876 ASSERT_FALSE (real_minus_onep (wr_i_0));
14877 ASSERT_FALSE (real_minus_onep (i_1));
14878 ASSERT_FALSE (real_minus_onep (wr_i_1));
14879 ASSERT_FALSE (real_minus_onep (i_m1));
14880 ASSERT_FALSE (real_minus_onep (wr_i_m1));
14881 ASSERT_FALSE (real_minus_onep (f_0));
14882 ASSERT_FALSE (real_minus_onep (wr_f_0));
14883 ASSERT_FALSE (real_minus_onep (f_1));
14884 ASSERT_FALSE (real_minus_onep (wr_f_1));
14885 ASSERT_TRUE (real_minus_onep (f_m1));
14886 ASSERT_TRUE (real_minus_onep (wr_f_m1));
14887 ASSERT_FALSE (real_minus_onep (c_i_0));
14888 ASSERT_FALSE (real_minus_onep (c_i_1));
14889 ASSERT_FALSE (real_minus_onep (c_i_m1));
14890 ASSERT_FALSE (real_minus_onep (c_f_0));
14891 ASSERT_FALSE (real_minus_onep (c_f_1));
14892 ASSERT_TRUE (real_minus_onep (c_f_m1));
14893
14894 /* Test zerop. */
14895 ASSERT_TRUE (zerop (i_0));
14896 ASSERT_TRUE (zerop (wr_i_0));
14897 ASSERT_FALSE (zerop (i_1));
14898 ASSERT_FALSE (zerop (wr_i_1));
14899 ASSERT_FALSE (zerop (i_m1));
14900 ASSERT_FALSE (zerop (wr_i_m1));
14901 ASSERT_TRUE (zerop (f_0));
14902 ASSERT_TRUE (zerop (wr_f_0));
14903 ASSERT_FALSE (zerop (f_1));
14904 ASSERT_FALSE (zerop (wr_f_1));
14905 ASSERT_FALSE (zerop (f_m1));
14906 ASSERT_FALSE (zerop (wr_f_m1));
14907 ASSERT_TRUE (zerop (c_i_0));
14908 ASSERT_FALSE (zerop (c_i_1));
14909 ASSERT_FALSE (zerop (c_i_m1));
14910 ASSERT_TRUE (zerop (c_f_0));
14911 ASSERT_FALSE (zerop (c_f_1));
14912 ASSERT_FALSE (zerop (c_f_m1));
14913
14914 /* Test tree_expr_nonnegative_p. */
14915 ASSERT_TRUE (tree_expr_nonnegative_p (i_0));
14916 ASSERT_TRUE (tree_expr_nonnegative_p (wr_i_0));
14917 ASSERT_TRUE (tree_expr_nonnegative_p (i_1));
14918 ASSERT_TRUE (tree_expr_nonnegative_p (wr_i_1));
14919 ASSERT_FALSE (tree_expr_nonnegative_p (i_m1));
14920 ASSERT_FALSE (tree_expr_nonnegative_p (wr_i_m1));
14921 ASSERT_TRUE (tree_expr_nonnegative_p (f_0));
14922 ASSERT_TRUE (tree_expr_nonnegative_p (wr_f_0));
14923 ASSERT_TRUE (tree_expr_nonnegative_p (f_1));
14924 ASSERT_TRUE (tree_expr_nonnegative_p (wr_f_1));
14925 ASSERT_FALSE (tree_expr_nonnegative_p (f_m1));
14926 ASSERT_FALSE (tree_expr_nonnegative_p (wr_f_m1));
14927 ASSERT_FALSE (tree_expr_nonnegative_p (c_i_0));
14928 ASSERT_FALSE (tree_expr_nonnegative_p (c_i_1));
14929 ASSERT_FALSE (tree_expr_nonnegative_p (c_i_m1));
14930 ASSERT_FALSE (tree_expr_nonnegative_p (c_f_0));
14931 ASSERT_FALSE (tree_expr_nonnegative_p (c_f_1));
14932 ASSERT_FALSE (tree_expr_nonnegative_p (c_f_m1));
14933
14934 /* Test tree_expr_nonzero_p. */
14935 ASSERT_FALSE (tree_expr_nonzero_p (i_0));
14936 ASSERT_FALSE (tree_expr_nonzero_p (wr_i_0));
14937 ASSERT_TRUE (tree_expr_nonzero_p (i_1));
14938 ASSERT_TRUE (tree_expr_nonzero_p (wr_i_1));
14939 ASSERT_TRUE (tree_expr_nonzero_p (i_m1));
14940 ASSERT_TRUE (tree_expr_nonzero_p (wr_i_m1));
14941
14942 /* Test integer_valued_real_p. */
14943 ASSERT_FALSE (integer_valued_real_p (i_0));
14944 ASSERT_TRUE (integer_valued_real_p (f_0));
14945 ASSERT_TRUE (integer_valued_real_p (wr_f_0));
14946 ASSERT_TRUE (integer_valued_real_p (f_1));
14947 ASSERT_TRUE (integer_valued_real_p (wr_f_1));
14948
14949 /* Test integer_pow2p. */
14950 ASSERT_FALSE (integer_pow2p (i_0));
14951 ASSERT_TRUE (integer_pow2p (i_1));
14952 ASSERT_TRUE (integer_pow2p (wr_i_1));
14953
14954 /* Test uniform_integer_cst_p. */
14955 ASSERT_TRUE (uniform_integer_cst_p (i_0));
14956 ASSERT_TRUE (uniform_integer_cst_p (wr_i_0));
14957 ASSERT_TRUE (uniform_integer_cst_p (i_1));
14958 ASSERT_TRUE (uniform_integer_cst_p (wr_i_1));
14959 ASSERT_TRUE (uniform_integer_cst_p (i_m1));
14960 ASSERT_TRUE (uniform_integer_cst_p (wr_i_m1));
14961 ASSERT_FALSE (uniform_integer_cst_p (f_0));
14962 ASSERT_FALSE (uniform_integer_cst_p (wr_f_0));
14963 ASSERT_FALSE (uniform_integer_cst_p (f_1));
14964 ASSERT_FALSE (uniform_integer_cst_p (wr_f_1));
14965 ASSERT_FALSE (uniform_integer_cst_p (f_m1));
14966 ASSERT_FALSE (uniform_integer_cst_p (wr_f_m1));
14967 ASSERT_FALSE (uniform_integer_cst_p (c_i_0));
14968 ASSERT_FALSE (uniform_integer_cst_p (c_i_1));
14969 ASSERT_FALSE (uniform_integer_cst_p (c_i_m1));
14970 ASSERT_FALSE (uniform_integer_cst_p (c_f_0));
14971 ASSERT_FALSE (uniform_integer_cst_p (c_f_1));
14972 ASSERT_FALSE (uniform_integer_cst_p (c_f_m1));
14973 }
14974
14975 /* Check that string escaping works correctly. */
14976
14977 static void
14978 test_escaped_strings (void)
14979 {
14980 int saved_cutoff;
14981 escaped_string msg;
14982
14983 msg.escape (NULL);
14984 /* ASSERT_STREQ does not accept NULL as a valid test
14985 result, so we have to use ASSERT_EQ instead. */
14986 ASSERT_EQ (NULL, (const char *) msg);
14987
14988 msg.escape ("");
14989 ASSERT_STREQ ("", (const char *) msg);
14990
14991 msg.escape ("foobar");
14992 ASSERT_STREQ ("foobar", (const char *) msg);
14993
14994 /* Ensure that we have -fmessage-length set to 0. */
14995 saved_cutoff = pp_line_cutoff (global_dc->printer);
14996 pp_line_cutoff (global_dc->printer) = 0;
14997
14998 msg.escape ("foo\nbar");
14999 ASSERT_STREQ ("foo\\nbar", (const char *) msg);
15000
15001 msg.escape ("\a\b\f\n\r\t\v");
15002 ASSERT_STREQ ("\\a\\b\\f\\n\\r\\t\\v", (const char *) msg);
15003
15004 /* Now repeat the tests with -fmessage-length set to 5. */
15005 pp_line_cutoff (global_dc->printer) = 5;
15006
15007 /* Note that the newline is not translated into an escape. */
15008 msg.escape ("foo\nbar");
15009 ASSERT_STREQ ("foo\nbar", (const char *) msg);
15010
15011 msg.escape ("\a\b\f\n\r\t\v");
15012 ASSERT_STREQ ("\\a\\b\\f\n\\r\\t\\v", (const char *) msg);
15013
15014 /* Restore the original message length setting. */
15015 pp_line_cutoff (global_dc->printer) = saved_cutoff;
15016 }
15017
15018 /* Run all of the selftests within this file. */
15019
15020 void
15021 tree_c_tests ()
15022 {
15023 test_integer_constants ();
15024 test_identifiers ();
15025 test_labels ();
15026 test_vector_cst_patterns ();
15027 test_location_wrappers ();
15028 test_predicates ();
15029 test_escaped_strings ();
15030 }
15031
15032 } // namespace selftest
15033
15034 #endif /* CHECKING_P */
15035
15036 #include "gt-tree.h"