]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree.c
Put the CL into the right dir.
[thirdparty/gcc.git] / gcc / tree.c
1 /* Language-independent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987-2019 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* This file contains the low level primitives for operating on tree nodes,
21 including allocation, list operations, interning of identifiers,
22 construction of data type nodes and statement nodes,
23 and construction of type conversion nodes. It also contains
24 tables index by tree code that describe how to take apart
25 nodes of that code.
26
27 It is intended to be language-independent but can occasionally
28 calls language-dependent routines. */
29
30 #include "config.h"
31 #include "system.h"
32 #include "coretypes.h"
33 #include "backend.h"
34 #include "target.h"
35 #include "tree.h"
36 #include "gimple.h"
37 #include "tree-pass.h"
38 #include "ssa.h"
39 #include "cgraph.h"
40 #include "diagnostic.h"
41 #include "flags.h"
42 #include "alias.h"
43 #include "fold-const.h"
44 #include "stor-layout.h"
45 #include "calls.h"
46 #include "attribs.h"
47 #include "toplev.h" /* get_random_seed */
48 #include "output.h"
49 #include "common/common-target.h"
50 #include "langhooks.h"
51 #include "tree-inline.h"
52 #include "tree-iterator.h"
53 #include "internal-fn.h"
54 #include "gimple-iterator.h"
55 #include "gimplify.h"
56 #include "tree-dfa.h"
57 #include "params.h"
58 #include "langhooks-def.h"
59 #include "tree-diagnostic.h"
60 #include "except.h"
61 #include "builtins.h"
62 #include "print-tree.h"
63 #include "ipa-utils.h"
64 #include "selftest.h"
65 #include "stringpool.h"
66 #include "attribs.h"
67 #include "rtl.h"
68 #include "regs.h"
69 #include "tree-vector-builder.h"
70
71 /* Tree code classes. */
72
73 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) TYPE,
74 #define END_OF_BASE_TREE_CODES tcc_exceptional,
75
76 const enum tree_code_class tree_code_type[] = {
77 #include "all-tree.def"
78 };
79
80 #undef DEFTREECODE
81 #undef END_OF_BASE_TREE_CODES
82
83 /* Table indexed by tree code giving number of expression
84 operands beyond the fixed part of the node structure.
85 Not used for types or decls. */
86
87 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) LENGTH,
88 #define END_OF_BASE_TREE_CODES 0,
89
90 const unsigned char tree_code_length[] = {
91 #include "all-tree.def"
92 };
93
94 #undef DEFTREECODE
95 #undef END_OF_BASE_TREE_CODES
96
97 /* Names of tree components.
98 Used for printing out the tree and error messages. */
99 #define DEFTREECODE(SYM, NAME, TYPE, LEN) NAME,
100 #define END_OF_BASE_TREE_CODES "@dummy",
101
102 static const char *const tree_code_name[] = {
103 #include "all-tree.def"
104 };
105
106 #undef DEFTREECODE
107 #undef END_OF_BASE_TREE_CODES
108
109 /* Each tree code class has an associated string representation.
110 These must correspond to the tree_code_class entries. */
111
112 const char *const tree_code_class_strings[] =
113 {
114 "exceptional",
115 "constant",
116 "type",
117 "declaration",
118 "reference",
119 "comparison",
120 "unary",
121 "binary",
122 "statement",
123 "vl_exp",
124 "expression"
125 };
126
127 /* obstack.[ch] explicitly declined to prototype this. */
128 extern int _obstack_allocated_p (struct obstack *h, void *obj);
129
130 /* Statistics-gathering stuff. */
131
132 static uint64_t tree_code_counts[MAX_TREE_CODES];
133 uint64_t tree_node_counts[(int) all_kinds];
134 uint64_t tree_node_sizes[(int) all_kinds];
135
136 /* Keep in sync with tree.h:enum tree_node_kind. */
137 static const char * const tree_node_kind_names[] = {
138 "decls",
139 "types",
140 "blocks",
141 "stmts",
142 "refs",
143 "exprs",
144 "constants",
145 "identifiers",
146 "vecs",
147 "binfos",
148 "ssa names",
149 "constructors",
150 "random kinds",
151 "lang_decl kinds",
152 "lang_type kinds",
153 "omp clauses",
154 };
155
156 /* Unique id for next decl created. */
157 static GTY(()) int next_decl_uid;
158 /* Unique id for next type created. */
159 static GTY(()) unsigned next_type_uid = 1;
160 /* Unique id for next debug decl created. Use negative numbers,
161 to catch erroneous uses. */
162 static GTY(()) int next_debug_decl_uid;
163
164 /* Since we cannot rehash a type after it is in the table, we have to
165 keep the hash code. */
166
167 struct GTY((for_user)) type_hash {
168 unsigned long hash;
169 tree type;
170 };
171
172 /* Initial size of the hash table (rounded to next prime). */
173 #define TYPE_HASH_INITIAL_SIZE 1000
174
175 struct type_cache_hasher : ggc_cache_ptr_hash<type_hash>
176 {
177 static hashval_t hash (type_hash *t) { return t->hash; }
178 static bool equal (type_hash *a, type_hash *b);
179
180 static int
181 keep_cache_entry (type_hash *&t)
182 {
183 return ggc_marked_p (t->type);
184 }
185 };
186
187 /* Now here is the hash table. When recording a type, it is added to
188 the slot whose index is the hash code. Note that the hash table is
189 used for several kinds of types (function types, array types and
190 array index range types, for now). While all these live in the
191 same table, they are completely independent, and the hash code is
192 computed differently for each of these. */
193
194 static GTY ((cache)) hash_table<type_cache_hasher> *type_hash_table;
195
196 /* Hash table and temporary node for larger integer const values. */
197 static GTY (()) tree int_cst_node;
198
199 struct int_cst_hasher : ggc_cache_ptr_hash<tree_node>
200 {
201 static hashval_t hash (tree t);
202 static bool equal (tree x, tree y);
203 };
204
205 static GTY ((cache)) hash_table<int_cst_hasher> *int_cst_hash_table;
206
207 /* Class and variable for making sure that there is a single POLY_INT_CST
208 for a given value. */
209 struct poly_int_cst_hasher : ggc_cache_ptr_hash<tree_node>
210 {
211 typedef std::pair<tree, const poly_wide_int *> compare_type;
212 static hashval_t hash (tree t);
213 static bool equal (tree x, const compare_type &y);
214 };
215
216 static GTY ((cache)) hash_table<poly_int_cst_hasher> *poly_int_cst_hash_table;
217
218 /* Hash table for optimization flags and target option flags. Use the same
219 hash table for both sets of options. Nodes for building the current
220 optimization and target option nodes. The assumption is most of the time
221 the options created will already be in the hash table, so we avoid
222 allocating and freeing up a node repeatably. */
223 static GTY (()) tree cl_optimization_node;
224 static GTY (()) tree cl_target_option_node;
225
226 struct cl_option_hasher : ggc_cache_ptr_hash<tree_node>
227 {
228 static hashval_t hash (tree t);
229 static bool equal (tree x, tree y);
230 };
231
232 static GTY ((cache)) hash_table<cl_option_hasher> *cl_option_hash_table;
233
234 /* General tree->tree mapping structure for use in hash tables. */
235
236
237 static GTY ((cache))
238 hash_table<tree_decl_map_cache_hasher> *debug_expr_for_decl;
239
240 static GTY ((cache))
241 hash_table<tree_decl_map_cache_hasher> *value_expr_for_decl;
242
243 struct tree_vec_map_cache_hasher : ggc_cache_ptr_hash<tree_vec_map>
244 {
245 static hashval_t hash (tree_vec_map *m) { return DECL_UID (m->base.from); }
246
247 static bool
248 equal (tree_vec_map *a, tree_vec_map *b)
249 {
250 return a->base.from == b->base.from;
251 }
252
253 static int
254 keep_cache_entry (tree_vec_map *&m)
255 {
256 return ggc_marked_p (m->base.from);
257 }
258 };
259
260 static GTY ((cache))
261 hash_table<tree_vec_map_cache_hasher> *debug_args_for_decl;
262
263 static void set_type_quals (tree, int);
264 static void print_type_hash_statistics (void);
265 static void print_debug_expr_statistics (void);
266 static void print_value_expr_statistics (void);
267
268 static tree build_array_type_1 (tree, tree, bool, bool);
269
270 tree global_trees[TI_MAX];
271 tree integer_types[itk_none];
272
273 bool int_n_enabled_p[NUM_INT_N_ENTS];
274 struct int_n_trees_t int_n_trees [NUM_INT_N_ENTS];
275
276 bool tree_contains_struct[MAX_TREE_CODES][64];
277
278 /* Number of operands for each OpenMP clause. */
279 unsigned const char omp_clause_num_ops[] =
280 {
281 0, /* OMP_CLAUSE_ERROR */
282 1, /* OMP_CLAUSE_PRIVATE */
283 1, /* OMP_CLAUSE_SHARED */
284 1, /* OMP_CLAUSE_FIRSTPRIVATE */
285 2, /* OMP_CLAUSE_LASTPRIVATE */
286 5, /* OMP_CLAUSE_REDUCTION */
287 5, /* OMP_CLAUSE_TASK_REDUCTION */
288 5, /* OMP_CLAUSE_IN_REDUCTION */
289 1, /* OMP_CLAUSE_COPYIN */
290 1, /* OMP_CLAUSE_COPYPRIVATE */
291 3, /* OMP_CLAUSE_LINEAR */
292 2, /* OMP_CLAUSE_ALIGNED */
293 1, /* OMP_CLAUSE_DEPEND */
294 1, /* OMP_CLAUSE_NONTEMPORAL */
295 1, /* OMP_CLAUSE_UNIFORM */
296 1, /* OMP_CLAUSE_TO_DECLARE */
297 1, /* OMP_CLAUSE_LINK */
298 2, /* OMP_CLAUSE_FROM */
299 2, /* OMP_CLAUSE_TO */
300 2, /* OMP_CLAUSE_MAP */
301 1, /* OMP_CLAUSE_USE_DEVICE_PTR */
302 1, /* OMP_CLAUSE_USE_DEVICE_ADDR */
303 1, /* OMP_CLAUSE_IS_DEVICE_PTR */
304 1, /* OMP_CLAUSE_INCLUSIVE */
305 1, /* OMP_CLAUSE_EXCLUSIVE */
306 2, /* OMP_CLAUSE__CACHE_ */
307 2, /* OMP_CLAUSE_GANG */
308 1, /* OMP_CLAUSE_ASYNC */
309 1, /* OMP_CLAUSE_WAIT */
310 0, /* OMP_CLAUSE_AUTO */
311 0, /* OMP_CLAUSE_SEQ */
312 1, /* OMP_CLAUSE__LOOPTEMP_ */
313 1, /* OMP_CLAUSE__REDUCTEMP_ */
314 1, /* OMP_CLAUSE__CONDTEMP_ */
315 1, /* OMP_CLAUSE__SCANTEMP_ */
316 1, /* OMP_CLAUSE_IF */
317 1, /* OMP_CLAUSE_NUM_THREADS */
318 1, /* OMP_CLAUSE_SCHEDULE */
319 0, /* OMP_CLAUSE_NOWAIT */
320 1, /* OMP_CLAUSE_ORDERED */
321 0, /* OMP_CLAUSE_DEFAULT */
322 3, /* OMP_CLAUSE_COLLAPSE */
323 0, /* OMP_CLAUSE_UNTIED */
324 1, /* OMP_CLAUSE_FINAL */
325 0, /* OMP_CLAUSE_MERGEABLE */
326 1, /* OMP_CLAUSE_DEVICE */
327 1, /* OMP_CLAUSE_DIST_SCHEDULE */
328 0, /* OMP_CLAUSE_INBRANCH */
329 0, /* OMP_CLAUSE_NOTINBRANCH */
330 1, /* OMP_CLAUSE_NUM_TEAMS */
331 1, /* OMP_CLAUSE_THREAD_LIMIT */
332 0, /* OMP_CLAUSE_PROC_BIND */
333 1, /* OMP_CLAUSE_SAFELEN */
334 1, /* OMP_CLAUSE_SIMDLEN */
335 0, /* OMP_CLAUSE_DEVICE_TYPE */
336 0, /* OMP_CLAUSE_FOR */
337 0, /* OMP_CLAUSE_PARALLEL */
338 0, /* OMP_CLAUSE_SECTIONS */
339 0, /* OMP_CLAUSE_TASKGROUP */
340 1, /* OMP_CLAUSE_PRIORITY */
341 1, /* OMP_CLAUSE_GRAINSIZE */
342 1, /* OMP_CLAUSE_NUM_TASKS */
343 0, /* OMP_CLAUSE_NOGROUP */
344 0, /* OMP_CLAUSE_THREADS */
345 0, /* OMP_CLAUSE_SIMD */
346 1, /* OMP_CLAUSE_HINT */
347 0, /* OMP_CLAUSE_DEFAULTMAP */
348 0, /* OMP_CLAUSE_ORDER */
349 0, /* OMP_CLAUSE_BIND */
350 1, /* OMP_CLAUSE__SIMDUID_ */
351 0, /* OMP_CLAUSE__SIMT_ */
352 0, /* OMP_CLAUSE_INDEPENDENT */
353 1, /* OMP_CLAUSE_WORKER */
354 1, /* OMP_CLAUSE_VECTOR */
355 1, /* OMP_CLAUSE_NUM_GANGS */
356 1, /* OMP_CLAUSE_NUM_WORKERS */
357 1, /* OMP_CLAUSE_VECTOR_LENGTH */
358 3, /* OMP_CLAUSE_TILE */
359 2, /* OMP_CLAUSE__GRIDDIM_ */
360 0, /* OMP_CLAUSE_IF_PRESENT */
361 0, /* OMP_CLAUSE_FINALIZE */
362 };
363
364 const char * const omp_clause_code_name[] =
365 {
366 "error_clause",
367 "private",
368 "shared",
369 "firstprivate",
370 "lastprivate",
371 "reduction",
372 "task_reduction",
373 "in_reduction",
374 "copyin",
375 "copyprivate",
376 "linear",
377 "aligned",
378 "depend",
379 "nontemporal",
380 "uniform",
381 "to",
382 "link",
383 "from",
384 "to",
385 "map",
386 "use_device_ptr",
387 "use_device_addr",
388 "is_device_ptr",
389 "inclusive",
390 "exclusive",
391 "_cache_",
392 "gang",
393 "async",
394 "wait",
395 "auto",
396 "seq",
397 "_looptemp_",
398 "_reductemp_",
399 "_condtemp_",
400 "_scantemp_",
401 "if",
402 "num_threads",
403 "schedule",
404 "nowait",
405 "ordered",
406 "default",
407 "collapse",
408 "untied",
409 "final",
410 "mergeable",
411 "device",
412 "dist_schedule",
413 "inbranch",
414 "notinbranch",
415 "num_teams",
416 "thread_limit",
417 "proc_bind",
418 "safelen",
419 "simdlen",
420 "device_type",
421 "for",
422 "parallel",
423 "sections",
424 "taskgroup",
425 "priority",
426 "grainsize",
427 "num_tasks",
428 "nogroup",
429 "threads",
430 "simd",
431 "hint",
432 "defaultmap",
433 "order",
434 "bind",
435 "_simduid_",
436 "_simt_",
437 "independent",
438 "worker",
439 "vector",
440 "num_gangs",
441 "num_workers",
442 "vector_length",
443 "tile",
444 "_griddim_",
445 "if_present",
446 "finalize",
447 };
448
449
450 /* Return the tree node structure used by tree code CODE. */
451
452 static inline enum tree_node_structure_enum
453 tree_node_structure_for_code (enum tree_code code)
454 {
455 switch (TREE_CODE_CLASS (code))
456 {
457 case tcc_declaration:
458 {
459 switch (code)
460 {
461 case FIELD_DECL:
462 return TS_FIELD_DECL;
463 case PARM_DECL:
464 return TS_PARM_DECL;
465 case VAR_DECL:
466 return TS_VAR_DECL;
467 case LABEL_DECL:
468 return TS_LABEL_DECL;
469 case RESULT_DECL:
470 return TS_RESULT_DECL;
471 case DEBUG_EXPR_DECL:
472 return TS_DECL_WRTL;
473 case CONST_DECL:
474 return TS_CONST_DECL;
475 case TYPE_DECL:
476 return TS_TYPE_DECL;
477 case FUNCTION_DECL:
478 return TS_FUNCTION_DECL;
479 case TRANSLATION_UNIT_DECL:
480 return TS_TRANSLATION_UNIT_DECL;
481 default:
482 return TS_DECL_NON_COMMON;
483 }
484 }
485 case tcc_type:
486 return TS_TYPE_NON_COMMON;
487 case tcc_reference:
488 case tcc_comparison:
489 case tcc_unary:
490 case tcc_binary:
491 case tcc_expression:
492 case tcc_statement:
493 case tcc_vl_exp:
494 return TS_EXP;
495 default: /* tcc_constant and tcc_exceptional */
496 break;
497 }
498 switch (code)
499 {
500 /* tcc_constant cases. */
501 case VOID_CST: return TS_TYPED;
502 case INTEGER_CST: return TS_INT_CST;
503 case POLY_INT_CST: return TS_POLY_INT_CST;
504 case REAL_CST: return TS_REAL_CST;
505 case FIXED_CST: return TS_FIXED_CST;
506 case COMPLEX_CST: return TS_COMPLEX;
507 case VECTOR_CST: return TS_VECTOR;
508 case STRING_CST: return TS_STRING;
509 /* tcc_exceptional cases. */
510 case ERROR_MARK: return TS_COMMON;
511 case IDENTIFIER_NODE: return TS_IDENTIFIER;
512 case TREE_LIST: return TS_LIST;
513 case TREE_VEC: return TS_VEC;
514 case SSA_NAME: return TS_SSA_NAME;
515 case PLACEHOLDER_EXPR: return TS_COMMON;
516 case STATEMENT_LIST: return TS_STATEMENT_LIST;
517 case BLOCK: return TS_BLOCK;
518 case CONSTRUCTOR: return TS_CONSTRUCTOR;
519 case TREE_BINFO: return TS_BINFO;
520 case OMP_CLAUSE: return TS_OMP_CLAUSE;
521 case OPTIMIZATION_NODE: return TS_OPTIMIZATION;
522 case TARGET_OPTION_NODE: return TS_TARGET_OPTION;
523
524 default:
525 gcc_unreachable ();
526 }
527 }
528
529
530 /* Initialize tree_contains_struct to describe the hierarchy of tree
531 nodes. */
532
533 static void
534 initialize_tree_contains_struct (void)
535 {
536 unsigned i;
537
538 for (i = ERROR_MARK; i < LAST_AND_UNUSED_TREE_CODE; i++)
539 {
540 enum tree_code code;
541 enum tree_node_structure_enum ts_code;
542
543 code = (enum tree_code) i;
544 ts_code = tree_node_structure_for_code (code);
545
546 /* Mark the TS structure itself. */
547 tree_contains_struct[code][ts_code] = 1;
548
549 /* Mark all the structures that TS is derived from. */
550 switch (ts_code)
551 {
552 case TS_TYPED:
553 case TS_BLOCK:
554 case TS_OPTIMIZATION:
555 case TS_TARGET_OPTION:
556 MARK_TS_BASE (code);
557 break;
558
559 case TS_COMMON:
560 case TS_INT_CST:
561 case TS_POLY_INT_CST:
562 case TS_REAL_CST:
563 case TS_FIXED_CST:
564 case TS_VECTOR:
565 case TS_STRING:
566 case TS_COMPLEX:
567 case TS_SSA_NAME:
568 case TS_CONSTRUCTOR:
569 case TS_EXP:
570 case TS_STATEMENT_LIST:
571 MARK_TS_TYPED (code);
572 break;
573
574 case TS_IDENTIFIER:
575 case TS_DECL_MINIMAL:
576 case TS_TYPE_COMMON:
577 case TS_LIST:
578 case TS_VEC:
579 case TS_BINFO:
580 case TS_OMP_CLAUSE:
581 MARK_TS_COMMON (code);
582 break;
583
584 case TS_TYPE_WITH_LANG_SPECIFIC:
585 MARK_TS_TYPE_COMMON (code);
586 break;
587
588 case TS_TYPE_NON_COMMON:
589 MARK_TS_TYPE_WITH_LANG_SPECIFIC (code);
590 break;
591
592 case TS_DECL_COMMON:
593 MARK_TS_DECL_MINIMAL (code);
594 break;
595
596 case TS_DECL_WRTL:
597 case TS_CONST_DECL:
598 MARK_TS_DECL_COMMON (code);
599 break;
600
601 case TS_DECL_NON_COMMON:
602 MARK_TS_DECL_WITH_VIS (code);
603 break;
604
605 case TS_DECL_WITH_VIS:
606 case TS_PARM_DECL:
607 case TS_LABEL_DECL:
608 case TS_RESULT_DECL:
609 MARK_TS_DECL_WRTL (code);
610 break;
611
612 case TS_FIELD_DECL:
613 MARK_TS_DECL_COMMON (code);
614 break;
615
616 case TS_VAR_DECL:
617 MARK_TS_DECL_WITH_VIS (code);
618 break;
619
620 case TS_TYPE_DECL:
621 case TS_FUNCTION_DECL:
622 MARK_TS_DECL_NON_COMMON (code);
623 break;
624
625 case TS_TRANSLATION_UNIT_DECL:
626 MARK_TS_DECL_COMMON (code);
627 break;
628
629 default:
630 gcc_unreachable ();
631 }
632 }
633
634 /* Basic consistency checks for attributes used in fold. */
635 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_NON_COMMON]);
636 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_NON_COMMON]);
637 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_COMMON]);
638 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_COMMON]);
639 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_COMMON]);
640 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_COMMON]);
641 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_COMMON]);
642 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_COMMON]);
643 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_COMMON]);
644 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_COMMON]);
645 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_COMMON]);
646 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WRTL]);
647 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_WRTL]);
648 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_WRTL]);
649 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WRTL]);
650 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_WRTL]);
651 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_MINIMAL]);
652 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_MINIMAL]);
653 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_MINIMAL]);
654 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_MINIMAL]);
655 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_MINIMAL]);
656 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_MINIMAL]);
657 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_MINIMAL]);
658 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_MINIMAL]);
659 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_MINIMAL]);
660 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WITH_VIS]);
661 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WITH_VIS]);
662 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_WITH_VIS]);
663 gcc_assert (tree_contains_struct[VAR_DECL][TS_VAR_DECL]);
664 gcc_assert (tree_contains_struct[FIELD_DECL][TS_FIELD_DECL]);
665 gcc_assert (tree_contains_struct[PARM_DECL][TS_PARM_DECL]);
666 gcc_assert (tree_contains_struct[LABEL_DECL][TS_LABEL_DECL]);
667 gcc_assert (tree_contains_struct[RESULT_DECL][TS_RESULT_DECL]);
668 gcc_assert (tree_contains_struct[CONST_DECL][TS_CONST_DECL]);
669 gcc_assert (tree_contains_struct[TYPE_DECL][TS_TYPE_DECL]);
670 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_FUNCTION_DECL]);
671 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_MINIMAL]);
672 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_COMMON]);
673 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_MINIMAL]);
674 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_COMMON]);
675 }
676
677
678 /* Init tree.c. */
679
680 void
681 init_ttree (void)
682 {
683 /* Initialize the hash table of types. */
684 type_hash_table
685 = hash_table<type_cache_hasher>::create_ggc (TYPE_HASH_INITIAL_SIZE);
686
687 debug_expr_for_decl
688 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
689
690 value_expr_for_decl
691 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
692
693 int_cst_hash_table = hash_table<int_cst_hasher>::create_ggc (1024);
694
695 poly_int_cst_hash_table = hash_table<poly_int_cst_hasher>::create_ggc (64);
696
697 int_cst_node = make_int_cst (1, 1);
698
699 cl_option_hash_table = hash_table<cl_option_hasher>::create_ggc (64);
700
701 cl_optimization_node = make_node (OPTIMIZATION_NODE);
702 cl_target_option_node = make_node (TARGET_OPTION_NODE);
703
704 /* Initialize the tree_contains_struct array. */
705 initialize_tree_contains_struct ();
706 lang_hooks.init_ts ();
707 }
708
709 \f
710 /* The name of the object as the assembler will see it (but before any
711 translations made by ASM_OUTPUT_LABELREF). Often this is the same
712 as DECL_NAME. It is an IDENTIFIER_NODE. */
713 tree
714 decl_assembler_name (tree decl)
715 {
716 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
717 lang_hooks.set_decl_assembler_name (decl);
718 return DECL_ASSEMBLER_NAME_RAW (decl);
719 }
720
721 /* The DECL_ASSEMBLER_NAME_RAW of DECL is being explicitly set to NAME
722 (either of which may be NULL). Inform the FE, if this changes the
723 name. */
724
725 void
726 overwrite_decl_assembler_name (tree decl, tree name)
727 {
728 if (DECL_ASSEMBLER_NAME_RAW (decl) != name)
729 lang_hooks.overwrite_decl_assembler_name (decl, name);
730 }
731
732 /* When the target supports COMDAT groups, this indicates which group the
733 DECL is associated with. This can be either an IDENTIFIER_NODE or a
734 decl, in which case its DECL_ASSEMBLER_NAME identifies the group. */
735 tree
736 decl_comdat_group (const_tree node)
737 {
738 struct symtab_node *snode = symtab_node::get (node);
739 if (!snode)
740 return NULL;
741 return snode->get_comdat_group ();
742 }
743
744 /* Likewise, but make sure it's been reduced to an IDENTIFIER_NODE. */
745 tree
746 decl_comdat_group_id (const_tree node)
747 {
748 struct symtab_node *snode = symtab_node::get (node);
749 if (!snode)
750 return NULL;
751 return snode->get_comdat_group_id ();
752 }
753
754 /* When the target supports named section, return its name as IDENTIFIER_NODE
755 or NULL if it is in no section. */
756 const char *
757 decl_section_name (const_tree node)
758 {
759 struct symtab_node *snode = symtab_node::get (node);
760 if (!snode)
761 return NULL;
762 return snode->get_section ();
763 }
764
765 /* Set section name of NODE to VALUE (that is expected to be
766 identifier node) */
767 void
768 set_decl_section_name (tree node, const char *value)
769 {
770 struct symtab_node *snode;
771
772 if (value == NULL)
773 {
774 snode = symtab_node::get (node);
775 if (!snode)
776 return;
777 }
778 else if (VAR_P (node))
779 snode = varpool_node::get_create (node);
780 else
781 snode = cgraph_node::get_create (node);
782 snode->set_section (value);
783 }
784
785 /* Return TLS model of a variable NODE. */
786 enum tls_model
787 decl_tls_model (const_tree node)
788 {
789 struct varpool_node *snode = varpool_node::get (node);
790 if (!snode)
791 return TLS_MODEL_NONE;
792 return snode->tls_model;
793 }
794
795 /* Set TLS model of variable NODE to MODEL. */
796 void
797 set_decl_tls_model (tree node, enum tls_model model)
798 {
799 struct varpool_node *vnode;
800
801 if (model == TLS_MODEL_NONE)
802 {
803 vnode = varpool_node::get (node);
804 if (!vnode)
805 return;
806 }
807 else
808 vnode = varpool_node::get_create (node);
809 vnode->tls_model = model;
810 }
811
812 /* Compute the number of bytes occupied by a tree with code CODE.
813 This function cannot be used for nodes that have variable sizes,
814 including TREE_VEC, INTEGER_CST, STRING_CST, and CALL_EXPR. */
815 size_t
816 tree_code_size (enum tree_code code)
817 {
818 switch (TREE_CODE_CLASS (code))
819 {
820 case tcc_declaration: /* A decl node */
821 switch (code)
822 {
823 case FIELD_DECL: return sizeof (tree_field_decl);
824 case PARM_DECL: return sizeof (tree_parm_decl);
825 case VAR_DECL: return sizeof (tree_var_decl);
826 case LABEL_DECL: return sizeof (tree_label_decl);
827 case RESULT_DECL: return sizeof (tree_result_decl);
828 case CONST_DECL: return sizeof (tree_const_decl);
829 case TYPE_DECL: return sizeof (tree_type_decl);
830 case FUNCTION_DECL: return sizeof (tree_function_decl);
831 case DEBUG_EXPR_DECL: return sizeof (tree_decl_with_rtl);
832 case TRANSLATION_UNIT_DECL: return sizeof (tree_translation_unit_decl);
833 case NAMESPACE_DECL:
834 case IMPORTED_DECL:
835 case NAMELIST_DECL: return sizeof (tree_decl_non_common);
836 default:
837 gcc_checking_assert (code >= NUM_TREE_CODES);
838 return lang_hooks.tree_size (code);
839 }
840
841 case tcc_type: /* a type node */
842 switch (code)
843 {
844 case OFFSET_TYPE:
845 case ENUMERAL_TYPE:
846 case BOOLEAN_TYPE:
847 case INTEGER_TYPE:
848 case REAL_TYPE:
849 case POINTER_TYPE:
850 case REFERENCE_TYPE:
851 case NULLPTR_TYPE:
852 case FIXED_POINT_TYPE:
853 case COMPLEX_TYPE:
854 case VECTOR_TYPE:
855 case ARRAY_TYPE:
856 case RECORD_TYPE:
857 case UNION_TYPE:
858 case QUAL_UNION_TYPE:
859 case VOID_TYPE:
860 case FUNCTION_TYPE:
861 case METHOD_TYPE:
862 case LANG_TYPE: return sizeof (tree_type_non_common);
863 default:
864 gcc_checking_assert (code >= NUM_TREE_CODES);
865 return lang_hooks.tree_size (code);
866 }
867
868 case tcc_reference: /* a reference */
869 case tcc_expression: /* an expression */
870 case tcc_statement: /* an expression with side effects */
871 case tcc_comparison: /* a comparison expression */
872 case tcc_unary: /* a unary arithmetic expression */
873 case tcc_binary: /* a binary arithmetic expression */
874 return (sizeof (struct tree_exp)
875 + (TREE_CODE_LENGTH (code) - 1) * sizeof (tree));
876
877 case tcc_constant: /* a constant */
878 switch (code)
879 {
880 case VOID_CST: return sizeof (tree_typed);
881 case INTEGER_CST: gcc_unreachable ();
882 case POLY_INT_CST: return sizeof (tree_poly_int_cst);
883 case REAL_CST: return sizeof (tree_real_cst);
884 case FIXED_CST: return sizeof (tree_fixed_cst);
885 case COMPLEX_CST: return sizeof (tree_complex);
886 case VECTOR_CST: gcc_unreachable ();
887 case STRING_CST: gcc_unreachable ();
888 default:
889 gcc_checking_assert (code >= NUM_TREE_CODES);
890 return lang_hooks.tree_size (code);
891 }
892
893 case tcc_exceptional: /* something random, like an identifier. */
894 switch (code)
895 {
896 case IDENTIFIER_NODE: return lang_hooks.identifier_size;
897 case TREE_LIST: return sizeof (tree_list);
898
899 case ERROR_MARK:
900 case PLACEHOLDER_EXPR: return sizeof (tree_common);
901
902 case TREE_VEC: gcc_unreachable ();
903 case OMP_CLAUSE: gcc_unreachable ();
904
905 case SSA_NAME: return sizeof (tree_ssa_name);
906
907 case STATEMENT_LIST: return sizeof (tree_statement_list);
908 case BLOCK: return sizeof (struct tree_block);
909 case CONSTRUCTOR: return sizeof (tree_constructor);
910 case OPTIMIZATION_NODE: return sizeof (tree_optimization_option);
911 case TARGET_OPTION_NODE: return sizeof (tree_target_option);
912
913 default:
914 gcc_checking_assert (code >= NUM_TREE_CODES);
915 return lang_hooks.tree_size (code);
916 }
917
918 default:
919 gcc_unreachable ();
920 }
921 }
922
923 /* Compute the number of bytes occupied by NODE. This routine only
924 looks at TREE_CODE, except for those nodes that have variable sizes. */
925 size_t
926 tree_size (const_tree node)
927 {
928 const enum tree_code code = TREE_CODE (node);
929 switch (code)
930 {
931 case INTEGER_CST:
932 return (sizeof (struct tree_int_cst)
933 + (TREE_INT_CST_EXT_NUNITS (node) - 1) * sizeof (HOST_WIDE_INT));
934
935 case TREE_BINFO:
936 return (offsetof (struct tree_binfo, base_binfos)
937 + vec<tree, va_gc>
938 ::embedded_size (BINFO_N_BASE_BINFOS (node)));
939
940 case TREE_VEC:
941 return (sizeof (struct tree_vec)
942 + (TREE_VEC_LENGTH (node) - 1) * sizeof (tree));
943
944 case VECTOR_CST:
945 return (sizeof (struct tree_vector)
946 + (vector_cst_encoded_nelts (node) - 1) * sizeof (tree));
947
948 case STRING_CST:
949 return TREE_STRING_LENGTH (node) + offsetof (struct tree_string, str) + 1;
950
951 case OMP_CLAUSE:
952 return (sizeof (struct tree_omp_clause)
953 + (omp_clause_num_ops[OMP_CLAUSE_CODE (node)] - 1)
954 * sizeof (tree));
955
956 default:
957 if (TREE_CODE_CLASS (code) == tcc_vl_exp)
958 return (sizeof (struct tree_exp)
959 + (VL_EXP_OPERAND_LENGTH (node) - 1) * sizeof (tree));
960 else
961 return tree_code_size (code);
962 }
963 }
964
965 /* Return tree node kind based on tree CODE. */
966
967 static tree_node_kind
968 get_stats_node_kind (enum tree_code code)
969 {
970 enum tree_code_class type = TREE_CODE_CLASS (code);
971
972 switch (type)
973 {
974 case tcc_declaration: /* A decl node */
975 return d_kind;
976 case tcc_type: /* a type node */
977 return t_kind;
978 case tcc_statement: /* an expression with side effects */
979 return s_kind;
980 case tcc_reference: /* a reference */
981 return r_kind;
982 case tcc_expression: /* an expression */
983 case tcc_comparison: /* a comparison expression */
984 case tcc_unary: /* a unary arithmetic expression */
985 case tcc_binary: /* a binary arithmetic expression */
986 return e_kind;
987 case tcc_constant: /* a constant */
988 return c_kind;
989 case tcc_exceptional: /* something random, like an identifier. */
990 switch (code)
991 {
992 case IDENTIFIER_NODE:
993 return id_kind;
994 case TREE_VEC:
995 return vec_kind;
996 case TREE_BINFO:
997 return binfo_kind;
998 case SSA_NAME:
999 return ssa_name_kind;
1000 case BLOCK:
1001 return b_kind;
1002 case CONSTRUCTOR:
1003 return constr_kind;
1004 case OMP_CLAUSE:
1005 return omp_clause_kind;
1006 default:
1007 return x_kind;
1008 }
1009 break;
1010 case tcc_vl_exp:
1011 return e_kind;
1012 default:
1013 gcc_unreachable ();
1014 }
1015 }
1016
1017 /* Record interesting allocation statistics for a tree node with CODE
1018 and LENGTH. */
1019
1020 static void
1021 record_node_allocation_statistics (enum tree_code code, size_t length)
1022 {
1023 if (!GATHER_STATISTICS)
1024 return;
1025
1026 tree_node_kind kind = get_stats_node_kind (code);
1027
1028 tree_code_counts[(int) code]++;
1029 tree_node_counts[(int) kind]++;
1030 tree_node_sizes[(int) kind] += length;
1031 }
1032
1033 /* Allocate and return a new UID from the DECL_UID namespace. */
1034
1035 int
1036 allocate_decl_uid (void)
1037 {
1038 return next_decl_uid++;
1039 }
1040
1041 /* Return a newly allocated node of code CODE. For decl and type
1042 nodes, some other fields are initialized. The rest of the node is
1043 initialized to zero. This function cannot be used for TREE_VEC,
1044 INTEGER_CST or OMP_CLAUSE nodes, which is enforced by asserts in
1045 tree_code_size.
1046
1047 Achoo! I got a code in the node. */
1048
1049 tree
1050 make_node (enum tree_code code MEM_STAT_DECL)
1051 {
1052 tree t;
1053 enum tree_code_class type = TREE_CODE_CLASS (code);
1054 size_t length = tree_code_size (code);
1055
1056 record_node_allocation_statistics (code, length);
1057
1058 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1059 TREE_SET_CODE (t, code);
1060
1061 switch (type)
1062 {
1063 case tcc_statement:
1064 if (code != DEBUG_BEGIN_STMT)
1065 TREE_SIDE_EFFECTS (t) = 1;
1066 break;
1067
1068 case tcc_declaration:
1069 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1070 {
1071 if (code == FUNCTION_DECL)
1072 {
1073 SET_DECL_ALIGN (t, FUNCTION_ALIGNMENT (FUNCTION_BOUNDARY));
1074 SET_DECL_MODE (t, FUNCTION_MODE);
1075 }
1076 else
1077 SET_DECL_ALIGN (t, 1);
1078 }
1079 DECL_SOURCE_LOCATION (t) = input_location;
1080 if (TREE_CODE (t) == DEBUG_EXPR_DECL)
1081 DECL_UID (t) = --next_debug_decl_uid;
1082 else
1083 {
1084 DECL_UID (t) = allocate_decl_uid ();
1085 SET_DECL_PT_UID (t, -1);
1086 }
1087 if (TREE_CODE (t) == LABEL_DECL)
1088 LABEL_DECL_UID (t) = -1;
1089
1090 break;
1091
1092 case tcc_type:
1093 TYPE_UID (t) = next_type_uid++;
1094 SET_TYPE_ALIGN (t, BITS_PER_UNIT);
1095 TYPE_USER_ALIGN (t) = 0;
1096 TYPE_MAIN_VARIANT (t) = t;
1097 TYPE_CANONICAL (t) = t;
1098
1099 /* Default to no attributes for type, but let target change that. */
1100 TYPE_ATTRIBUTES (t) = NULL_TREE;
1101 targetm.set_default_type_attributes (t);
1102
1103 /* We have not yet computed the alias set for this type. */
1104 TYPE_ALIAS_SET (t) = -1;
1105 break;
1106
1107 case tcc_constant:
1108 TREE_CONSTANT (t) = 1;
1109 break;
1110
1111 case tcc_expression:
1112 switch (code)
1113 {
1114 case INIT_EXPR:
1115 case MODIFY_EXPR:
1116 case VA_ARG_EXPR:
1117 case PREDECREMENT_EXPR:
1118 case PREINCREMENT_EXPR:
1119 case POSTDECREMENT_EXPR:
1120 case POSTINCREMENT_EXPR:
1121 /* All of these have side-effects, no matter what their
1122 operands are. */
1123 TREE_SIDE_EFFECTS (t) = 1;
1124 break;
1125
1126 default:
1127 break;
1128 }
1129 break;
1130
1131 case tcc_exceptional:
1132 switch (code)
1133 {
1134 case TARGET_OPTION_NODE:
1135 TREE_TARGET_OPTION(t)
1136 = ggc_cleared_alloc<struct cl_target_option> ();
1137 break;
1138
1139 case OPTIMIZATION_NODE:
1140 TREE_OPTIMIZATION (t)
1141 = ggc_cleared_alloc<struct cl_optimization> ();
1142 break;
1143
1144 default:
1145 break;
1146 }
1147 break;
1148
1149 default:
1150 /* Other classes need no special treatment. */
1151 break;
1152 }
1153
1154 return t;
1155 }
1156
1157 /* Free tree node. */
1158
1159 void
1160 free_node (tree node)
1161 {
1162 enum tree_code code = TREE_CODE (node);
1163 if (GATHER_STATISTICS)
1164 {
1165 enum tree_node_kind kind = get_stats_node_kind (code);
1166
1167 gcc_checking_assert (tree_code_counts[(int) TREE_CODE (node)] != 0);
1168 gcc_checking_assert (tree_node_counts[(int) kind] != 0);
1169 gcc_checking_assert (tree_node_sizes[(int) kind] >= tree_size (node));
1170
1171 tree_code_counts[(int) TREE_CODE (node)]--;
1172 tree_node_counts[(int) kind]--;
1173 tree_node_sizes[(int) kind] -= tree_size (node);
1174 }
1175 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1176 vec_free (CONSTRUCTOR_ELTS (node));
1177 else if (code == BLOCK)
1178 vec_free (BLOCK_NONLOCALIZED_VARS (node));
1179 else if (code == TREE_BINFO)
1180 vec_free (BINFO_BASE_ACCESSES (node));
1181 ggc_free (node);
1182 }
1183 \f
1184 /* Return a new node with the same contents as NODE except that its
1185 TREE_CHAIN, if it has one, is zero and it has a fresh uid. */
1186
1187 tree
1188 copy_node (tree node MEM_STAT_DECL)
1189 {
1190 tree t;
1191 enum tree_code code = TREE_CODE (node);
1192 size_t length;
1193
1194 gcc_assert (code != STATEMENT_LIST);
1195
1196 length = tree_size (node);
1197 record_node_allocation_statistics (code, length);
1198 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
1199 memcpy (t, node, length);
1200
1201 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
1202 TREE_CHAIN (t) = 0;
1203 TREE_ASM_WRITTEN (t) = 0;
1204 TREE_VISITED (t) = 0;
1205
1206 if (TREE_CODE_CLASS (code) == tcc_declaration)
1207 {
1208 if (code == DEBUG_EXPR_DECL)
1209 DECL_UID (t) = --next_debug_decl_uid;
1210 else
1211 {
1212 DECL_UID (t) = allocate_decl_uid ();
1213 if (DECL_PT_UID_SET_P (node))
1214 SET_DECL_PT_UID (t, DECL_PT_UID (node));
1215 }
1216 if ((TREE_CODE (node) == PARM_DECL || VAR_P (node))
1217 && DECL_HAS_VALUE_EXPR_P (node))
1218 {
1219 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (node));
1220 DECL_HAS_VALUE_EXPR_P (t) = 1;
1221 }
1222 /* DECL_DEBUG_EXPR is copied explicitely by callers. */
1223 if (VAR_P (node))
1224 {
1225 DECL_HAS_DEBUG_EXPR_P (t) = 0;
1226 t->decl_with_vis.symtab_node = NULL;
1227 }
1228 if (VAR_P (node) && DECL_HAS_INIT_PRIORITY_P (node))
1229 {
1230 SET_DECL_INIT_PRIORITY (t, DECL_INIT_PRIORITY (node));
1231 DECL_HAS_INIT_PRIORITY_P (t) = 1;
1232 }
1233 if (TREE_CODE (node) == FUNCTION_DECL)
1234 {
1235 DECL_STRUCT_FUNCTION (t) = NULL;
1236 t->decl_with_vis.symtab_node = NULL;
1237 }
1238 }
1239 else if (TREE_CODE_CLASS (code) == tcc_type)
1240 {
1241 TYPE_UID (t) = next_type_uid++;
1242 /* The following is so that the debug code for
1243 the copy is different from the original type.
1244 The two statements usually duplicate each other
1245 (because they clear fields of the same union),
1246 but the optimizer should catch that. */
1247 TYPE_SYMTAB_ADDRESS (t) = 0;
1248 TYPE_SYMTAB_DIE (t) = 0;
1249
1250 /* Do not copy the values cache. */
1251 if (TYPE_CACHED_VALUES_P (t))
1252 {
1253 TYPE_CACHED_VALUES_P (t) = 0;
1254 TYPE_CACHED_VALUES (t) = NULL_TREE;
1255 }
1256 }
1257 else if (code == TARGET_OPTION_NODE)
1258 {
1259 TREE_TARGET_OPTION (t) = ggc_alloc<struct cl_target_option>();
1260 memcpy (TREE_TARGET_OPTION (t), TREE_TARGET_OPTION (node),
1261 sizeof (struct cl_target_option));
1262 }
1263 else if (code == OPTIMIZATION_NODE)
1264 {
1265 TREE_OPTIMIZATION (t) = ggc_alloc<struct cl_optimization>();
1266 memcpy (TREE_OPTIMIZATION (t), TREE_OPTIMIZATION (node),
1267 sizeof (struct cl_optimization));
1268 }
1269
1270 return t;
1271 }
1272
1273 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
1274 For example, this can copy a list made of TREE_LIST nodes. */
1275
1276 tree
1277 copy_list (tree list)
1278 {
1279 tree head;
1280 tree prev, next;
1281
1282 if (list == 0)
1283 return 0;
1284
1285 head = prev = copy_node (list);
1286 next = TREE_CHAIN (list);
1287 while (next)
1288 {
1289 TREE_CHAIN (prev) = copy_node (next);
1290 prev = TREE_CHAIN (prev);
1291 next = TREE_CHAIN (next);
1292 }
1293 return head;
1294 }
1295
1296 \f
1297 /* Return the value that TREE_INT_CST_EXT_NUNITS should have for an
1298 INTEGER_CST with value CST and type TYPE. */
1299
1300 static unsigned int
1301 get_int_cst_ext_nunits (tree type, const wide_int &cst)
1302 {
1303 gcc_checking_assert (cst.get_precision () == TYPE_PRECISION (type));
1304 /* We need extra HWIs if CST is an unsigned integer with its
1305 upper bit set. */
1306 if (TYPE_UNSIGNED (type) && wi::neg_p (cst))
1307 return cst.get_precision () / HOST_BITS_PER_WIDE_INT + 1;
1308 return cst.get_len ();
1309 }
1310
1311 /* Return a new INTEGER_CST with value CST and type TYPE. */
1312
1313 static tree
1314 build_new_int_cst (tree type, const wide_int &cst)
1315 {
1316 unsigned int len = cst.get_len ();
1317 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1318 tree nt = make_int_cst (len, ext_len);
1319
1320 if (len < ext_len)
1321 {
1322 --ext_len;
1323 TREE_INT_CST_ELT (nt, ext_len)
1324 = zext_hwi (-1, cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1325 for (unsigned int i = len; i < ext_len; ++i)
1326 TREE_INT_CST_ELT (nt, i) = -1;
1327 }
1328 else if (TYPE_UNSIGNED (type)
1329 && cst.get_precision () < len * HOST_BITS_PER_WIDE_INT)
1330 {
1331 len--;
1332 TREE_INT_CST_ELT (nt, len)
1333 = zext_hwi (cst.elt (len),
1334 cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1335 }
1336
1337 for (unsigned int i = 0; i < len; i++)
1338 TREE_INT_CST_ELT (nt, i) = cst.elt (i);
1339 TREE_TYPE (nt) = type;
1340 return nt;
1341 }
1342
1343 /* Return a new POLY_INT_CST with coefficients COEFFS and type TYPE. */
1344
1345 static tree
1346 build_new_poly_int_cst (tree type, tree (&coeffs)[NUM_POLY_INT_COEFFS]
1347 CXX_MEM_STAT_INFO)
1348 {
1349 size_t length = sizeof (struct tree_poly_int_cst);
1350 record_node_allocation_statistics (POLY_INT_CST, length);
1351
1352 tree t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1353
1354 TREE_SET_CODE (t, POLY_INT_CST);
1355 TREE_CONSTANT (t) = 1;
1356 TREE_TYPE (t) = type;
1357 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1358 POLY_INT_CST_COEFF (t, i) = coeffs[i];
1359 return t;
1360 }
1361
1362 /* Create a constant tree that contains CST sign-extended to TYPE. */
1363
1364 tree
1365 build_int_cst (tree type, poly_int64 cst)
1366 {
1367 /* Support legacy code. */
1368 if (!type)
1369 type = integer_type_node;
1370
1371 return wide_int_to_tree (type, wi::shwi (cst, TYPE_PRECISION (type)));
1372 }
1373
1374 /* Create a constant tree that contains CST zero-extended to TYPE. */
1375
1376 tree
1377 build_int_cstu (tree type, poly_uint64 cst)
1378 {
1379 return wide_int_to_tree (type, wi::uhwi (cst, TYPE_PRECISION (type)));
1380 }
1381
1382 /* Create a constant tree that contains CST sign-extended to TYPE. */
1383
1384 tree
1385 build_int_cst_type (tree type, poly_int64 cst)
1386 {
1387 gcc_assert (type);
1388 return wide_int_to_tree (type, wi::shwi (cst, TYPE_PRECISION (type)));
1389 }
1390
1391 /* Constructs tree in type TYPE from with value given by CST. Signedness
1392 of CST is assumed to be the same as the signedness of TYPE. */
1393
1394 tree
1395 double_int_to_tree (tree type, double_int cst)
1396 {
1397 return wide_int_to_tree (type, widest_int::from (cst, TYPE_SIGN (type)));
1398 }
1399
1400 /* We force the wide_int CST to the range of the type TYPE by sign or
1401 zero extending it. OVERFLOWABLE indicates if we are interested in
1402 overflow of the value, when >0 we are only interested in signed
1403 overflow, for <0 we are interested in any overflow. OVERFLOWED
1404 indicates whether overflow has already occurred. CONST_OVERFLOWED
1405 indicates whether constant overflow has already occurred. We force
1406 T's value to be within range of T's type (by setting to 0 or 1 all
1407 the bits outside the type's range). We set TREE_OVERFLOWED if,
1408 OVERFLOWED is nonzero,
1409 or OVERFLOWABLE is >0 and signed overflow occurs
1410 or OVERFLOWABLE is <0 and any overflow occurs
1411 We return a new tree node for the extended wide_int. The node
1412 is shared if no overflow flags are set. */
1413
1414
1415 tree
1416 force_fit_type (tree type, const poly_wide_int_ref &cst,
1417 int overflowable, bool overflowed)
1418 {
1419 signop sign = TYPE_SIGN (type);
1420
1421 /* If we need to set overflow flags, return a new unshared node. */
1422 if (overflowed || !wi::fits_to_tree_p (cst, type))
1423 {
1424 if (overflowed
1425 || overflowable < 0
1426 || (overflowable > 0 && sign == SIGNED))
1427 {
1428 poly_wide_int tmp = poly_wide_int::from (cst, TYPE_PRECISION (type),
1429 sign);
1430 tree t;
1431 if (tmp.is_constant ())
1432 t = build_new_int_cst (type, tmp.coeffs[0]);
1433 else
1434 {
1435 tree coeffs[NUM_POLY_INT_COEFFS];
1436 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1437 {
1438 coeffs[i] = build_new_int_cst (type, tmp.coeffs[i]);
1439 TREE_OVERFLOW (coeffs[i]) = 1;
1440 }
1441 t = build_new_poly_int_cst (type, coeffs);
1442 }
1443 TREE_OVERFLOW (t) = 1;
1444 return t;
1445 }
1446 }
1447
1448 /* Else build a shared node. */
1449 return wide_int_to_tree (type, cst);
1450 }
1451
1452 /* These are the hash table functions for the hash table of INTEGER_CST
1453 nodes of a sizetype. */
1454
1455 /* Return the hash code X, an INTEGER_CST. */
1456
1457 hashval_t
1458 int_cst_hasher::hash (tree x)
1459 {
1460 const_tree const t = x;
1461 hashval_t code = TYPE_UID (TREE_TYPE (t));
1462 int i;
1463
1464 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
1465 code = iterative_hash_host_wide_int (TREE_INT_CST_ELT(t, i), code);
1466
1467 return code;
1468 }
1469
1470 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1471 is the same as that given by *Y, which is the same. */
1472
1473 bool
1474 int_cst_hasher::equal (tree x, tree y)
1475 {
1476 const_tree const xt = x;
1477 const_tree const yt = y;
1478
1479 if (TREE_TYPE (xt) != TREE_TYPE (yt)
1480 || TREE_INT_CST_NUNITS (xt) != TREE_INT_CST_NUNITS (yt)
1481 || TREE_INT_CST_EXT_NUNITS (xt) != TREE_INT_CST_EXT_NUNITS (yt))
1482 return false;
1483
1484 for (int i = 0; i < TREE_INT_CST_NUNITS (xt); i++)
1485 if (TREE_INT_CST_ELT (xt, i) != TREE_INT_CST_ELT (yt, i))
1486 return false;
1487
1488 return true;
1489 }
1490
1491 /* Create an INT_CST node of TYPE and value CST.
1492 The returned node is always shared. For small integers we use a
1493 per-type vector cache, for larger ones we use a single hash table.
1494 The value is extended from its precision according to the sign of
1495 the type to be a multiple of HOST_BITS_PER_WIDE_INT. This defines
1496 the upper bits and ensures that hashing and value equality based
1497 upon the underlying HOST_WIDE_INTs works without masking. */
1498
1499 static tree
1500 wide_int_to_tree_1 (tree type, const wide_int_ref &pcst)
1501 {
1502 tree t;
1503 int ix = -1;
1504 int limit = 0;
1505
1506 gcc_assert (type);
1507 unsigned int prec = TYPE_PRECISION (type);
1508 signop sgn = TYPE_SIGN (type);
1509
1510 /* Verify that everything is canonical. */
1511 int l = pcst.get_len ();
1512 if (l > 1)
1513 {
1514 if (pcst.elt (l - 1) == 0)
1515 gcc_checking_assert (pcst.elt (l - 2) < 0);
1516 if (pcst.elt (l - 1) == HOST_WIDE_INT_M1)
1517 gcc_checking_assert (pcst.elt (l - 2) >= 0);
1518 }
1519
1520 wide_int cst = wide_int::from (pcst, prec, sgn);
1521 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1522
1523 if (ext_len == 1)
1524 {
1525 /* We just need to store a single HOST_WIDE_INT. */
1526 HOST_WIDE_INT hwi;
1527 if (TYPE_UNSIGNED (type))
1528 hwi = cst.to_uhwi ();
1529 else
1530 hwi = cst.to_shwi ();
1531
1532 switch (TREE_CODE (type))
1533 {
1534 case NULLPTR_TYPE:
1535 gcc_assert (hwi == 0);
1536 /* Fallthru. */
1537
1538 case POINTER_TYPE:
1539 case REFERENCE_TYPE:
1540 /* Cache NULL pointer and zero bounds. */
1541 if (hwi == 0)
1542 {
1543 limit = 1;
1544 ix = 0;
1545 }
1546 break;
1547
1548 case BOOLEAN_TYPE:
1549 /* Cache false or true. */
1550 limit = 2;
1551 if (IN_RANGE (hwi, 0, 1))
1552 ix = hwi;
1553 break;
1554
1555 case INTEGER_TYPE:
1556 case OFFSET_TYPE:
1557 if (TYPE_SIGN (type) == UNSIGNED)
1558 {
1559 /* Cache [0, N). */
1560 limit = INTEGER_SHARE_LIMIT;
1561 if (IN_RANGE (hwi, 0, INTEGER_SHARE_LIMIT - 1))
1562 ix = hwi;
1563 }
1564 else
1565 {
1566 /* Cache [-1, N). */
1567 limit = INTEGER_SHARE_LIMIT + 1;
1568 if (IN_RANGE (hwi, -1, INTEGER_SHARE_LIMIT - 1))
1569 ix = hwi + 1;
1570 }
1571 break;
1572
1573 case ENUMERAL_TYPE:
1574 break;
1575
1576 default:
1577 gcc_unreachable ();
1578 }
1579
1580 if (ix >= 0)
1581 {
1582 /* Look for it in the type's vector of small shared ints. */
1583 if (!TYPE_CACHED_VALUES_P (type))
1584 {
1585 TYPE_CACHED_VALUES_P (type) = 1;
1586 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1587 }
1588
1589 t = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix);
1590 if (t)
1591 /* Make sure no one is clobbering the shared constant. */
1592 gcc_checking_assert (TREE_TYPE (t) == type
1593 && TREE_INT_CST_NUNITS (t) == 1
1594 && TREE_INT_CST_OFFSET_NUNITS (t) == 1
1595 && TREE_INT_CST_EXT_NUNITS (t) == 1
1596 && TREE_INT_CST_ELT (t, 0) == hwi);
1597 else
1598 {
1599 /* Create a new shared int. */
1600 t = build_new_int_cst (type, cst);
1601 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1602 }
1603 }
1604 else
1605 {
1606 /* Use the cache of larger shared ints, using int_cst_node as
1607 a temporary. */
1608
1609 TREE_INT_CST_ELT (int_cst_node, 0) = hwi;
1610 TREE_TYPE (int_cst_node) = type;
1611
1612 tree *slot = int_cst_hash_table->find_slot (int_cst_node, INSERT);
1613 t = *slot;
1614 if (!t)
1615 {
1616 /* Insert this one into the hash table. */
1617 t = int_cst_node;
1618 *slot = t;
1619 /* Make a new node for next time round. */
1620 int_cst_node = make_int_cst (1, 1);
1621 }
1622 }
1623 }
1624 else
1625 {
1626 /* The value either hashes properly or we drop it on the floor
1627 for the gc to take care of. There will not be enough of them
1628 to worry about. */
1629
1630 tree nt = build_new_int_cst (type, cst);
1631 tree *slot = int_cst_hash_table->find_slot (nt, INSERT);
1632 t = *slot;
1633 if (!t)
1634 {
1635 /* Insert this one into the hash table. */
1636 t = nt;
1637 *slot = t;
1638 }
1639 else
1640 ggc_free (nt);
1641 }
1642
1643 return t;
1644 }
1645
1646 hashval_t
1647 poly_int_cst_hasher::hash (tree t)
1648 {
1649 inchash::hash hstate;
1650
1651 hstate.add_int (TYPE_UID (TREE_TYPE (t)));
1652 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1653 hstate.add_wide_int (wi::to_wide (POLY_INT_CST_COEFF (t, i)));
1654
1655 return hstate.end ();
1656 }
1657
1658 bool
1659 poly_int_cst_hasher::equal (tree x, const compare_type &y)
1660 {
1661 if (TREE_TYPE (x) != y.first)
1662 return false;
1663 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1664 if (wi::to_wide (POLY_INT_CST_COEFF (x, i)) != y.second->coeffs[i])
1665 return false;
1666 return true;
1667 }
1668
1669 /* Build a POLY_INT_CST node with type TYPE and with the elements in VALUES.
1670 The elements must also have type TYPE. */
1671
1672 tree
1673 build_poly_int_cst (tree type, const poly_wide_int_ref &values)
1674 {
1675 unsigned int prec = TYPE_PRECISION (type);
1676 gcc_assert (prec <= values.coeffs[0].get_precision ());
1677 poly_wide_int c = poly_wide_int::from (values, prec, SIGNED);
1678
1679 inchash::hash h;
1680 h.add_int (TYPE_UID (type));
1681 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1682 h.add_wide_int (c.coeffs[i]);
1683 poly_int_cst_hasher::compare_type comp (type, &c);
1684 tree *slot = poly_int_cst_hash_table->find_slot_with_hash (comp, h.end (),
1685 INSERT);
1686 if (*slot == NULL_TREE)
1687 {
1688 tree coeffs[NUM_POLY_INT_COEFFS];
1689 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1690 coeffs[i] = wide_int_to_tree_1 (type, c.coeffs[i]);
1691 *slot = build_new_poly_int_cst (type, coeffs);
1692 }
1693 return *slot;
1694 }
1695
1696 /* Create a constant tree with value VALUE in type TYPE. */
1697
1698 tree
1699 wide_int_to_tree (tree type, const poly_wide_int_ref &value)
1700 {
1701 if (value.is_constant ())
1702 return wide_int_to_tree_1 (type, value.coeffs[0]);
1703 return build_poly_int_cst (type, value);
1704 }
1705
1706 void
1707 cache_integer_cst (tree t)
1708 {
1709 tree type = TREE_TYPE (t);
1710 int ix = -1;
1711 int limit = 0;
1712 int prec = TYPE_PRECISION (type);
1713
1714 gcc_assert (!TREE_OVERFLOW (t));
1715
1716 switch (TREE_CODE (type))
1717 {
1718 case NULLPTR_TYPE:
1719 gcc_assert (integer_zerop (t));
1720 /* Fallthru. */
1721
1722 case POINTER_TYPE:
1723 case REFERENCE_TYPE:
1724 /* Cache NULL pointer. */
1725 if (integer_zerop (t))
1726 {
1727 limit = 1;
1728 ix = 0;
1729 }
1730 break;
1731
1732 case BOOLEAN_TYPE:
1733 /* Cache false or true. */
1734 limit = 2;
1735 if (wi::ltu_p (wi::to_wide (t), 2))
1736 ix = TREE_INT_CST_ELT (t, 0);
1737 break;
1738
1739 case INTEGER_TYPE:
1740 case OFFSET_TYPE:
1741 if (TYPE_UNSIGNED (type))
1742 {
1743 /* Cache 0..N */
1744 limit = INTEGER_SHARE_LIMIT;
1745
1746 /* This is a little hokie, but if the prec is smaller than
1747 what is necessary to hold INTEGER_SHARE_LIMIT, then the
1748 obvious test will not get the correct answer. */
1749 if (prec < HOST_BITS_PER_WIDE_INT)
1750 {
1751 if (tree_to_uhwi (t) < (unsigned HOST_WIDE_INT) INTEGER_SHARE_LIMIT)
1752 ix = tree_to_uhwi (t);
1753 }
1754 else if (wi::ltu_p (wi::to_wide (t), INTEGER_SHARE_LIMIT))
1755 ix = tree_to_uhwi (t);
1756 }
1757 else
1758 {
1759 /* Cache -1..N */
1760 limit = INTEGER_SHARE_LIMIT + 1;
1761
1762 if (integer_minus_onep (t))
1763 ix = 0;
1764 else if (!wi::neg_p (wi::to_wide (t)))
1765 {
1766 if (prec < HOST_BITS_PER_WIDE_INT)
1767 {
1768 if (tree_to_shwi (t) < INTEGER_SHARE_LIMIT)
1769 ix = tree_to_shwi (t) + 1;
1770 }
1771 else if (wi::ltu_p (wi::to_wide (t), INTEGER_SHARE_LIMIT))
1772 ix = tree_to_shwi (t) + 1;
1773 }
1774 }
1775 break;
1776
1777 case ENUMERAL_TYPE:
1778 break;
1779
1780 default:
1781 gcc_unreachable ();
1782 }
1783
1784 if (ix >= 0)
1785 {
1786 /* Look for it in the type's vector of small shared ints. */
1787 if (!TYPE_CACHED_VALUES_P (type))
1788 {
1789 TYPE_CACHED_VALUES_P (type) = 1;
1790 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1791 }
1792
1793 gcc_assert (TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) == NULL_TREE);
1794 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1795 }
1796 else
1797 {
1798 /* Use the cache of larger shared ints. */
1799 tree *slot = int_cst_hash_table->find_slot (t, INSERT);
1800 /* If there is already an entry for the number verify it's the
1801 same. */
1802 if (*slot)
1803 gcc_assert (wi::to_wide (tree (*slot)) == wi::to_wide (t));
1804 else
1805 /* Otherwise insert this one into the hash table. */
1806 *slot = t;
1807 }
1808 }
1809
1810
1811 /* Builds an integer constant in TYPE such that lowest BITS bits are ones
1812 and the rest are zeros. */
1813
1814 tree
1815 build_low_bits_mask (tree type, unsigned bits)
1816 {
1817 gcc_assert (bits <= TYPE_PRECISION (type));
1818
1819 return wide_int_to_tree (type, wi::mask (bits, false,
1820 TYPE_PRECISION (type)));
1821 }
1822
1823 /* Checks that X is integer constant that can be expressed in (unsigned)
1824 HOST_WIDE_INT without loss of precision. */
1825
1826 bool
1827 cst_and_fits_in_hwi (const_tree x)
1828 {
1829 return (TREE_CODE (x) == INTEGER_CST
1830 && (tree_fits_shwi_p (x) || tree_fits_uhwi_p (x)));
1831 }
1832
1833 /* Build a newly constructed VECTOR_CST with the given values of
1834 (VECTOR_CST_)LOG2_NPATTERNS and (VECTOR_CST_)NELTS_PER_PATTERN. */
1835
1836 tree
1837 make_vector (unsigned log2_npatterns,
1838 unsigned int nelts_per_pattern MEM_STAT_DECL)
1839 {
1840 gcc_assert (IN_RANGE (nelts_per_pattern, 1, 3));
1841 tree t;
1842 unsigned npatterns = 1 << log2_npatterns;
1843 unsigned encoded_nelts = npatterns * nelts_per_pattern;
1844 unsigned length = (sizeof (struct tree_vector)
1845 + (encoded_nelts - 1) * sizeof (tree));
1846
1847 record_node_allocation_statistics (VECTOR_CST, length);
1848
1849 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1850
1851 TREE_SET_CODE (t, VECTOR_CST);
1852 TREE_CONSTANT (t) = 1;
1853 VECTOR_CST_LOG2_NPATTERNS (t) = log2_npatterns;
1854 VECTOR_CST_NELTS_PER_PATTERN (t) = nelts_per_pattern;
1855
1856 return t;
1857 }
1858
1859 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1860 are extracted from V, a vector of CONSTRUCTOR_ELT. */
1861
1862 tree
1863 build_vector_from_ctor (tree type, vec<constructor_elt, va_gc> *v)
1864 {
1865 unsigned HOST_WIDE_INT idx, nelts;
1866 tree value;
1867
1868 /* We can't construct a VECTOR_CST for a variable number of elements. */
1869 nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
1870 tree_vector_builder vec (type, nelts, 1);
1871 FOR_EACH_CONSTRUCTOR_VALUE (v, idx, value)
1872 {
1873 if (TREE_CODE (value) == VECTOR_CST)
1874 {
1875 /* If NELTS is constant then this must be too. */
1876 unsigned int sub_nelts = VECTOR_CST_NELTS (value).to_constant ();
1877 for (unsigned i = 0; i < sub_nelts; ++i)
1878 vec.quick_push (VECTOR_CST_ELT (value, i));
1879 }
1880 else
1881 vec.quick_push (value);
1882 }
1883 while (vec.length () < nelts)
1884 vec.quick_push (build_zero_cst (TREE_TYPE (type)));
1885
1886 return vec.build ();
1887 }
1888
1889 /* Build a vector of type VECTYPE where all the elements are SCs. */
1890 tree
1891 build_vector_from_val (tree vectype, tree sc)
1892 {
1893 unsigned HOST_WIDE_INT i, nunits;
1894
1895 if (sc == error_mark_node)
1896 return sc;
1897
1898 /* Verify that the vector type is suitable for SC. Note that there
1899 is some inconsistency in the type-system with respect to restrict
1900 qualifications of pointers. Vector types always have a main-variant
1901 element type and the qualification is applied to the vector-type.
1902 So TREE_TYPE (vector-type) does not return a properly qualified
1903 vector element-type. */
1904 gcc_checking_assert (types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (sc)),
1905 TREE_TYPE (vectype)));
1906
1907 if (CONSTANT_CLASS_P (sc))
1908 {
1909 tree_vector_builder v (vectype, 1, 1);
1910 v.quick_push (sc);
1911 return v.build ();
1912 }
1913 else if (!TYPE_VECTOR_SUBPARTS (vectype).is_constant (&nunits))
1914 return fold_build1 (VEC_DUPLICATE_EXPR, vectype, sc);
1915 else
1916 {
1917 vec<constructor_elt, va_gc> *v;
1918 vec_alloc (v, nunits);
1919 for (i = 0; i < nunits; ++i)
1920 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, sc);
1921 return build_constructor (vectype, v);
1922 }
1923 }
1924
1925 /* If TYPE is not a vector type, just return SC, otherwise return
1926 build_vector_from_val (TYPE, SC). */
1927
1928 tree
1929 build_uniform_cst (tree type, tree sc)
1930 {
1931 if (!VECTOR_TYPE_P (type))
1932 return sc;
1933
1934 return build_vector_from_val (type, sc);
1935 }
1936
1937 /* Build a vector series of type TYPE in which element I has the value
1938 BASE + I * STEP. The result is a constant if BASE and STEP are constant
1939 and a VEC_SERIES_EXPR otherwise. */
1940
1941 tree
1942 build_vec_series (tree type, tree base, tree step)
1943 {
1944 if (integer_zerop (step))
1945 return build_vector_from_val (type, base);
1946 if (TREE_CODE (base) == INTEGER_CST && TREE_CODE (step) == INTEGER_CST)
1947 {
1948 tree_vector_builder builder (type, 1, 3);
1949 tree elt1 = wide_int_to_tree (TREE_TYPE (base),
1950 wi::to_wide (base) + wi::to_wide (step));
1951 tree elt2 = wide_int_to_tree (TREE_TYPE (base),
1952 wi::to_wide (elt1) + wi::to_wide (step));
1953 builder.quick_push (base);
1954 builder.quick_push (elt1);
1955 builder.quick_push (elt2);
1956 return builder.build ();
1957 }
1958 return build2 (VEC_SERIES_EXPR, type, base, step);
1959 }
1960
1961 /* Return a vector with the same number of units and number of bits
1962 as VEC_TYPE, but in which the elements are a linear series of unsigned
1963 integers { BASE, BASE + STEP, BASE + STEP * 2, ... }. */
1964
1965 tree
1966 build_index_vector (tree vec_type, poly_uint64 base, poly_uint64 step)
1967 {
1968 tree index_vec_type = vec_type;
1969 tree index_elt_type = TREE_TYPE (vec_type);
1970 poly_uint64 nunits = TYPE_VECTOR_SUBPARTS (vec_type);
1971 if (!INTEGRAL_TYPE_P (index_elt_type) || !TYPE_UNSIGNED (index_elt_type))
1972 {
1973 index_elt_type = build_nonstandard_integer_type
1974 (GET_MODE_BITSIZE (SCALAR_TYPE_MODE (index_elt_type)), true);
1975 index_vec_type = build_vector_type (index_elt_type, nunits);
1976 }
1977
1978 tree_vector_builder v (index_vec_type, 1, 3);
1979 for (unsigned int i = 0; i < 3; ++i)
1980 v.quick_push (build_int_cstu (index_elt_type, base + i * step));
1981 return v.build ();
1982 }
1983
1984 /* Return a VECTOR_CST of type VEC_TYPE in which the first NUM_A
1985 elements are A and the rest are B. */
1986
1987 tree
1988 build_vector_a_then_b (tree vec_type, unsigned int num_a, tree a, tree b)
1989 {
1990 gcc_assert (known_le (num_a, TYPE_VECTOR_SUBPARTS (vec_type)));
1991 unsigned int count = constant_lower_bound (TYPE_VECTOR_SUBPARTS (vec_type));
1992 /* Optimize the constant case. */
1993 if ((count & 1) == 0 && TYPE_VECTOR_SUBPARTS (vec_type).is_constant ())
1994 count /= 2;
1995 tree_vector_builder builder (vec_type, count, 2);
1996 for (unsigned int i = 0; i < count * 2; ++i)
1997 builder.quick_push (i < num_a ? a : b);
1998 return builder.build ();
1999 }
2000
2001 /* Something has messed with the elements of CONSTRUCTOR C after it was built;
2002 calculate TREE_CONSTANT and TREE_SIDE_EFFECTS. */
2003
2004 void
2005 recompute_constructor_flags (tree c)
2006 {
2007 unsigned int i;
2008 tree val;
2009 bool constant_p = true;
2010 bool side_effects_p = false;
2011 vec<constructor_elt, va_gc> *vals = CONSTRUCTOR_ELTS (c);
2012
2013 FOR_EACH_CONSTRUCTOR_VALUE (vals, i, val)
2014 {
2015 /* Mostly ctors will have elts that don't have side-effects, so
2016 the usual case is to scan all the elements. Hence a single
2017 loop for both const and side effects, rather than one loop
2018 each (with early outs). */
2019 if (!TREE_CONSTANT (val))
2020 constant_p = false;
2021 if (TREE_SIDE_EFFECTS (val))
2022 side_effects_p = true;
2023 }
2024
2025 TREE_SIDE_EFFECTS (c) = side_effects_p;
2026 TREE_CONSTANT (c) = constant_p;
2027 }
2028
2029 /* Make sure that TREE_CONSTANT and TREE_SIDE_EFFECTS are correct for
2030 CONSTRUCTOR C. */
2031
2032 void
2033 verify_constructor_flags (tree c)
2034 {
2035 unsigned int i;
2036 tree val;
2037 bool constant_p = TREE_CONSTANT (c);
2038 bool side_effects_p = TREE_SIDE_EFFECTS (c);
2039 vec<constructor_elt, va_gc> *vals = CONSTRUCTOR_ELTS (c);
2040
2041 FOR_EACH_CONSTRUCTOR_VALUE (vals, i, val)
2042 {
2043 if (constant_p && !TREE_CONSTANT (val))
2044 internal_error ("non-constant element in constant CONSTRUCTOR");
2045 if (!side_effects_p && TREE_SIDE_EFFECTS (val))
2046 internal_error ("side-effects element in no-side-effects CONSTRUCTOR");
2047 }
2048 }
2049
2050 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
2051 are in the vec pointed to by VALS. */
2052 tree
2053 build_constructor (tree type, vec<constructor_elt, va_gc> *vals MEM_STAT_DECL)
2054 {
2055 tree c = make_node (CONSTRUCTOR PASS_MEM_STAT);
2056
2057 TREE_TYPE (c) = type;
2058 CONSTRUCTOR_ELTS (c) = vals;
2059
2060 recompute_constructor_flags (c);
2061
2062 return c;
2063 }
2064
2065 /* Build a CONSTRUCTOR node made of a single initializer, with the specified
2066 INDEX and VALUE. */
2067 tree
2068 build_constructor_single (tree type, tree index, tree value)
2069 {
2070 vec<constructor_elt, va_gc> *v;
2071 constructor_elt elt = {index, value};
2072
2073 vec_alloc (v, 1);
2074 v->quick_push (elt);
2075
2076 return build_constructor (type, v);
2077 }
2078
2079
2080 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
2081 are in a list pointed to by VALS. */
2082 tree
2083 build_constructor_from_list (tree type, tree vals)
2084 {
2085 tree t;
2086 vec<constructor_elt, va_gc> *v = NULL;
2087
2088 if (vals)
2089 {
2090 vec_alloc (v, list_length (vals));
2091 for (t = vals; t; t = TREE_CHAIN (t))
2092 CONSTRUCTOR_APPEND_ELT (v, TREE_PURPOSE (t), TREE_VALUE (t));
2093 }
2094
2095 return build_constructor (type, v);
2096 }
2097
2098 /* Return a new CONSTRUCTOR node whose type is TYPE. NELTS is the number
2099 of elements, provided as index/value pairs. */
2100
2101 tree
2102 build_constructor_va (tree type, int nelts, ...)
2103 {
2104 vec<constructor_elt, va_gc> *v = NULL;
2105 va_list p;
2106
2107 va_start (p, nelts);
2108 vec_alloc (v, nelts);
2109 while (nelts--)
2110 {
2111 tree index = va_arg (p, tree);
2112 tree value = va_arg (p, tree);
2113 CONSTRUCTOR_APPEND_ELT (v, index, value);
2114 }
2115 va_end (p);
2116 return build_constructor (type, v);
2117 }
2118
2119 /* Return a node of type TYPE for which TREE_CLOBBER_P is true. */
2120
2121 tree
2122 build_clobber (tree type)
2123 {
2124 tree clobber = build_constructor (type, NULL);
2125 TREE_THIS_VOLATILE (clobber) = true;
2126 return clobber;
2127 }
2128
2129 /* Return a new FIXED_CST node whose type is TYPE and value is F. */
2130
2131 tree
2132 build_fixed (tree type, FIXED_VALUE_TYPE f)
2133 {
2134 tree v;
2135 FIXED_VALUE_TYPE *fp;
2136
2137 v = make_node (FIXED_CST);
2138 fp = ggc_alloc<fixed_value> ();
2139 memcpy (fp, &f, sizeof (FIXED_VALUE_TYPE));
2140
2141 TREE_TYPE (v) = type;
2142 TREE_FIXED_CST_PTR (v) = fp;
2143 return v;
2144 }
2145
2146 /* Return a new REAL_CST node whose type is TYPE and value is D. */
2147
2148 tree
2149 build_real (tree type, REAL_VALUE_TYPE d)
2150 {
2151 tree v;
2152 REAL_VALUE_TYPE *dp;
2153 int overflow = 0;
2154
2155 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
2156 Consider doing it via real_convert now. */
2157
2158 v = make_node (REAL_CST);
2159 dp = ggc_alloc<real_value> ();
2160 memcpy (dp, &d, sizeof (REAL_VALUE_TYPE));
2161
2162 TREE_TYPE (v) = type;
2163 TREE_REAL_CST_PTR (v) = dp;
2164 TREE_OVERFLOW (v) = overflow;
2165 return v;
2166 }
2167
2168 /* Like build_real, but first truncate D to the type. */
2169
2170 tree
2171 build_real_truncate (tree type, REAL_VALUE_TYPE d)
2172 {
2173 return build_real (type, real_value_truncate (TYPE_MODE (type), d));
2174 }
2175
2176 /* Return a new REAL_CST node whose type is TYPE
2177 and whose value is the integer value of the INTEGER_CST node I. */
2178
2179 REAL_VALUE_TYPE
2180 real_value_from_int_cst (const_tree type, const_tree i)
2181 {
2182 REAL_VALUE_TYPE d;
2183
2184 /* Clear all bits of the real value type so that we can later do
2185 bitwise comparisons to see if two values are the same. */
2186 memset (&d, 0, sizeof d);
2187
2188 real_from_integer (&d, type ? TYPE_MODE (type) : VOIDmode, wi::to_wide (i),
2189 TYPE_SIGN (TREE_TYPE (i)));
2190 return d;
2191 }
2192
2193 /* Given a tree representing an integer constant I, return a tree
2194 representing the same value as a floating-point constant of type TYPE. */
2195
2196 tree
2197 build_real_from_int_cst (tree type, const_tree i)
2198 {
2199 tree v;
2200 int overflow = TREE_OVERFLOW (i);
2201
2202 v = build_real (type, real_value_from_int_cst (type, i));
2203
2204 TREE_OVERFLOW (v) |= overflow;
2205 return v;
2206 }
2207
2208 /* Return a newly constructed STRING_CST node whose value is
2209 the LEN characters at STR.
2210 Note that for a C string literal, LEN should include the trailing NUL.
2211 The TREE_TYPE is not initialized. */
2212
2213 tree
2214 build_string (int len, const char *str)
2215 {
2216 tree s;
2217 size_t length;
2218
2219 /* Do not waste bytes provided by padding of struct tree_string. */
2220 length = len + offsetof (struct tree_string, str) + 1;
2221
2222 record_node_allocation_statistics (STRING_CST, length);
2223
2224 s = (tree) ggc_internal_alloc (length);
2225
2226 memset (s, 0, sizeof (struct tree_typed));
2227 TREE_SET_CODE (s, STRING_CST);
2228 TREE_CONSTANT (s) = 1;
2229 TREE_STRING_LENGTH (s) = len;
2230 memcpy (s->string.str, str, len);
2231 s->string.str[len] = '\0';
2232
2233 return s;
2234 }
2235
2236 /* Return a newly constructed COMPLEX_CST node whose value is
2237 specified by the real and imaginary parts REAL and IMAG.
2238 Both REAL and IMAG should be constant nodes. TYPE, if specified,
2239 will be the type of the COMPLEX_CST; otherwise a new type will be made. */
2240
2241 tree
2242 build_complex (tree type, tree real, tree imag)
2243 {
2244 gcc_assert (CONSTANT_CLASS_P (real));
2245 gcc_assert (CONSTANT_CLASS_P (imag));
2246
2247 tree t = make_node (COMPLEX_CST);
2248
2249 TREE_REALPART (t) = real;
2250 TREE_IMAGPART (t) = imag;
2251 TREE_TYPE (t) = type ? type : build_complex_type (TREE_TYPE (real));
2252 TREE_OVERFLOW (t) = TREE_OVERFLOW (real) | TREE_OVERFLOW (imag);
2253 return t;
2254 }
2255
2256 /* Build a complex (inf +- 0i), such as for the result of cproj.
2257 TYPE is the complex tree type of the result. If NEG is true, the
2258 imaginary zero is negative. */
2259
2260 tree
2261 build_complex_inf (tree type, bool neg)
2262 {
2263 REAL_VALUE_TYPE rinf, rzero = dconst0;
2264
2265 real_inf (&rinf);
2266 rzero.sign = neg;
2267 return build_complex (type, build_real (TREE_TYPE (type), rinf),
2268 build_real (TREE_TYPE (type), rzero));
2269 }
2270
2271 /* Return the constant 1 in type TYPE. If TYPE has several elements, each
2272 element is set to 1. In particular, this is 1 + i for complex types. */
2273
2274 tree
2275 build_each_one_cst (tree type)
2276 {
2277 if (TREE_CODE (type) == COMPLEX_TYPE)
2278 {
2279 tree scalar = build_one_cst (TREE_TYPE (type));
2280 return build_complex (type, scalar, scalar);
2281 }
2282 else
2283 return build_one_cst (type);
2284 }
2285
2286 /* Return a constant of arithmetic type TYPE which is the
2287 multiplicative identity of the set TYPE. */
2288
2289 tree
2290 build_one_cst (tree type)
2291 {
2292 switch (TREE_CODE (type))
2293 {
2294 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2295 case POINTER_TYPE: case REFERENCE_TYPE:
2296 case OFFSET_TYPE:
2297 return build_int_cst (type, 1);
2298
2299 case REAL_TYPE:
2300 return build_real (type, dconst1);
2301
2302 case FIXED_POINT_TYPE:
2303 /* We can only generate 1 for accum types. */
2304 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2305 return build_fixed (type, FCONST1 (TYPE_MODE (type)));
2306
2307 case VECTOR_TYPE:
2308 {
2309 tree scalar = build_one_cst (TREE_TYPE (type));
2310
2311 return build_vector_from_val (type, scalar);
2312 }
2313
2314 case COMPLEX_TYPE:
2315 return build_complex (type,
2316 build_one_cst (TREE_TYPE (type)),
2317 build_zero_cst (TREE_TYPE (type)));
2318
2319 default:
2320 gcc_unreachable ();
2321 }
2322 }
2323
2324 /* Return an integer of type TYPE containing all 1's in as much precision as
2325 it contains, or a complex or vector whose subparts are such integers. */
2326
2327 tree
2328 build_all_ones_cst (tree type)
2329 {
2330 if (TREE_CODE (type) == COMPLEX_TYPE)
2331 {
2332 tree scalar = build_all_ones_cst (TREE_TYPE (type));
2333 return build_complex (type, scalar, scalar);
2334 }
2335 else
2336 return build_minus_one_cst (type);
2337 }
2338
2339 /* Return a constant of arithmetic type TYPE which is the
2340 opposite of the multiplicative identity of the set TYPE. */
2341
2342 tree
2343 build_minus_one_cst (tree type)
2344 {
2345 switch (TREE_CODE (type))
2346 {
2347 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2348 case POINTER_TYPE: case REFERENCE_TYPE:
2349 case OFFSET_TYPE:
2350 return build_int_cst (type, -1);
2351
2352 case REAL_TYPE:
2353 return build_real (type, dconstm1);
2354
2355 case FIXED_POINT_TYPE:
2356 /* We can only generate 1 for accum types. */
2357 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2358 return build_fixed (type,
2359 fixed_from_double_int (double_int_minus_one,
2360 SCALAR_TYPE_MODE (type)));
2361
2362 case VECTOR_TYPE:
2363 {
2364 tree scalar = build_minus_one_cst (TREE_TYPE (type));
2365
2366 return build_vector_from_val (type, scalar);
2367 }
2368
2369 case COMPLEX_TYPE:
2370 return build_complex (type,
2371 build_minus_one_cst (TREE_TYPE (type)),
2372 build_zero_cst (TREE_TYPE (type)));
2373
2374 default:
2375 gcc_unreachable ();
2376 }
2377 }
2378
2379 /* Build 0 constant of type TYPE. This is used by constructor folding
2380 and thus the constant should be represented in memory by
2381 zero(es). */
2382
2383 tree
2384 build_zero_cst (tree type)
2385 {
2386 switch (TREE_CODE (type))
2387 {
2388 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2389 case POINTER_TYPE: case REFERENCE_TYPE:
2390 case OFFSET_TYPE: case NULLPTR_TYPE:
2391 return build_int_cst (type, 0);
2392
2393 case REAL_TYPE:
2394 return build_real (type, dconst0);
2395
2396 case FIXED_POINT_TYPE:
2397 return build_fixed (type, FCONST0 (TYPE_MODE (type)));
2398
2399 case VECTOR_TYPE:
2400 {
2401 tree scalar = build_zero_cst (TREE_TYPE (type));
2402
2403 return build_vector_from_val (type, scalar);
2404 }
2405
2406 case COMPLEX_TYPE:
2407 {
2408 tree zero = build_zero_cst (TREE_TYPE (type));
2409
2410 return build_complex (type, zero, zero);
2411 }
2412
2413 default:
2414 if (!AGGREGATE_TYPE_P (type))
2415 return fold_convert (type, integer_zero_node);
2416 return build_constructor (type, NULL);
2417 }
2418 }
2419
2420
2421 /* Build a BINFO with LEN language slots. */
2422
2423 tree
2424 make_tree_binfo (unsigned base_binfos MEM_STAT_DECL)
2425 {
2426 tree t;
2427 size_t length = (offsetof (struct tree_binfo, base_binfos)
2428 + vec<tree, va_gc>::embedded_size (base_binfos));
2429
2430 record_node_allocation_statistics (TREE_BINFO, length);
2431
2432 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
2433
2434 memset (t, 0, offsetof (struct tree_binfo, base_binfos));
2435
2436 TREE_SET_CODE (t, TREE_BINFO);
2437
2438 BINFO_BASE_BINFOS (t)->embedded_init (base_binfos);
2439
2440 return t;
2441 }
2442
2443 /* Create a CASE_LABEL_EXPR tree node and return it. */
2444
2445 tree
2446 build_case_label (tree low_value, tree high_value, tree label_decl)
2447 {
2448 tree t = make_node (CASE_LABEL_EXPR);
2449
2450 TREE_TYPE (t) = void_type_node;
2451 SET_EXPR_LOCATION (t, DECL_SOURCE_LOCATION (label_decl));
2452
2453 CASE_LOW (t) = low_value;
2454 CASE_HIGH (t) = high_value;
2455 CASE_LABEL (t) = label_decl;
2456 CASE_CHAIN (t) = NULL_TREE;
2457
2458 return t;
2459 }
2460
2461 /* Build a newly constructed INTEGER_CST node. LEN and EXT_LEN are the
2462 values of TREE_INT_CST_NUNITS and TREE_INT_CST_EXT_NUNITS respectively.
2463 The latter determines the length of the HOST_WIDE_INT vector. */
2464
2465 tree
2466 make_int_cst (int len, int ext_len MEM_STAT_DECL)
2467 {
2468 tree t;
2469 int length = ((ext_len - 1) * sizeof (HOST_WIDE_INT)
2470 + sizeof (struct tree_int_cst));
2471
2472 gcc_assert (len);
2473 record_node_allocation_statistics (INTEGER_CST, length);
2474
2475 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2476
2477 TREE_SET_CODE (t, INTEGER_CST);
2478 TREE_INT_CST_NUNITS (t) = len;
2479 TREE_INT_CST_EXT_NUNITS (t) = ext_len;
2480 /* to_offset can only be applied to trees that are offset_int-sized
2481 or smaller. EXT_LEN is correct if it fits, otherwise the constant
2482 must be exactly the precision of offset_int and so LEN is correct. */
2483 if (ext_len <= OFFSET_INT_ELTS)
2484 TREE_INT_CST_OFFSET_NUNITS (t) = ext_len;
2485 else
2486 TREE_INT_CST_OFFSET_NUNITS (t) = len;
2487
2488 TREE_CONSTANT (t) = 1;
2489
2490 return t;
2491 }
2492
2493 /* Build a newly constructed TREE_VEC node of length LEN. */
2494
2495 tree
2496 make_tree_vec (int len MEM_STAT_DECL)
2497 {
2498 tree t;
2499 size_t length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2500
2501 record_node_allocation_statistics (TREE_VEC, length);
2502
2503 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2504
2505 TREE_SET_CODE (t, TREE_VEC);
2506 TREE_VEC_LENGTH (t) = len;
2507
2508 return t;
2509 }
2510
2511 /* Grow a TREE_VEC node to new length LEN. */
2512
2513 tree
2514 grow_tree_vec (tree v, int len MEM_STAT_DECL)
2515 {
2516 gcc_assert (TREE_CODE (v) == TREE_VEC);
2517
2518 int oldlen = TREE_VEC_LENGTH (v);
2519 gcc_assert (len > oldlen);
2520
2521 size_t oldlength = (oldlen - 1) * sizeof (tree) + sizeof (struct tree_vec);
2522 size_t length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2523
2524 record_node_allocation_statistics (TREE_VEC, length - oldlength);
2525
2526 v = (tree) ggc_realloc (v, length PASS_MEM_STAT);
2527
2528 TREE_VEC_LENGTH (v) = len;
2529
2530 return v;
2531 }
2532 \f
2533 /* Return 1 if EXPR is the constant zero, whether it is integral, float or
2534 fixed, and scalar, complex or vector. */
2535
2536 bool
2537 zerop (const_tree expr)
2538 {
2539 return (integer_zerop (expr)
2540 || real_zerop (expr)
2541 || fixed_zerop (expr));
2542 }
2543
2544 /* Return 1 if EXPR is the integer constant zero or a complex constant
2545 of zero, or a location wrapper for such a constant. */
2546
2547 bool
2548 integer_zerop (const_tree expr)
2549 {
2550 STRIP_ANY_LOCATION_WRAPPER (expr);
2551
2552 switch (TREE_CODE (expr))
2553 {
2554 case INTEGER_CST:
2555 return wi::to_wide (expr) == 0;
2556 case COMPLEX_CST:
2557 return (integer_zerop (TREE_REALPART (expr))
2558 && integer_zerop (TREE_IMAGPART (expr)));
2559 case VECTOR_CST:
2560 return (VECTOR_CST_NPATTERNS (expr) == 1
2561 && VECTOR_CST_DUPLICATE_P (expr)
2562 && integer_zerop (VECTOR_CST_ENCODED_ELT (expr, 0)));
2563 default:
2564 return false;
2565 }
2566 }
2567
2568 /* Return 1 if EXPR is the integer constant one or the corresponding
2569 complex constant, or a location wrapper for such a constant. */
2570
2571 bool
2572 integer_onep (const_tree expr)
2573 {
2574 STRIP_ANY_LOCATION_WRAPPER (expr);
2575
2576 switch (TREE_CODE (expr))
2577 {
2578 case INTEGER_CST:
2579 return wi::eq_p (wi::to_widest (expr), 1);
2580 case COMPLEX_CST:
2581 return (integer_onep (TREE_REALPART (expr))
2582 && integer_zerop (TREE_IMAGPART (expr)));
2583 case VECTOR_CST:
2584 return (VECTOR_CST_NPATTERNS (expr) == 1
2585 && VECTOR_CST_DUPLICATE_P (expr)
2586 && integer_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
2587 default:
2588 return false;
2589 }
2590 }
2591
2592 /* Return 1 if EXPR is the integer constant one. For complex and vector,
2593 return 1 if every piece is the integer constant one.
2594 Also return 1 for location wrappers for such a constant. */
2595
2596 bool
2597 integer_each_onep (const_tree expr)
2598 {
2599 STRIP_ANY_LOCATION_WRAPPER (expr);
2600
2601 if (TREE_CODE (expr) == COMPLEX_CST)
2602 return (integer_onep (TREE_REALPART (expr))
2603 && integer_onep (TREE_IMAGPART (expr)));
2604 else
2605 return integer_onep (expr);
2606 }
2607
2608 /* Return 1 if EXPR is an integer containing all 1's in as much precision as
2609 it contains, or a complex or vector whose subparts are such integers,
2610 or a location wrapper for such a constant. */
2611
2612 bool
2613 integer_all_onesp (const_tree expr)
2614 {
2615 STRIP_ANY_LOCATION_WRAPPER (expr);
2616
2617 if (TREE_CODE (expr) == COMPLEX_CST
2618 && integer_all_onesp (TREE_REALPART (expr))
2619 && integer_all_onesp (TREE_IMAGPART (expr)))
2620 return true;
2621
2622 else if (TREE_CODE (expr) == VECTOR_CST)
2623 return (VECTOR_CST_NPATTERNS (expr) == 1
2624 && VECTOR_CST_DUPLICATE_P (expr)
2625 && integer_all_onesp (VECTOR_CST_ENCODED_ELT (expr, 0)));
2626
2627 else if (TREE_CODE (expr) != INTEGER_CST)
2628 return false;
2629
2630 return (wi::max_value (TYPE_PRECISION (TREE_TYPE (expr)), UNSIGNED)
2631 == wi::to_wide (expr));
2632 }
2633
2634 /* Return 1 if EXPR is the integer constant minus one, or a location wrapper
2635 for such a constant. */
2636
2637 bool
2638 integer_minus_onep (const_tree expr)
2639 {
2640 STRIP_ANY_LOCATION_WRAPPER (expr);
2641
2642 if (TREE_CODE (expr) == COMPLEX_CST)
2643 return (integer_all_onesp (TREE_REALPART (expr))
2644 && integer_zerop (TREE_IMAGPART (expr)));
2645 else
2646 return integer_all_onesp (expr);
2647 }
2648
2649 /* Return 1 if EXPR is an integer constant that is a power of 2 (i.e., has only
2650 one bit on), or a location wrapper for such a constant. */
2651
2652 bool
2653 integer_pow2p (const_tree expr)
2654 {
2655 STRIP_ANY_LOCATION_WRAPPER (expr);
2656
2657 if (TREE_CODE (expr) == COMPLEX_CST
2658 && integer_pow2p (TREE_REALPART (expr))
2659 && integer_zerop (TREE_IMAGPART (expr)))
2660 return true;
2661
2662 if (TREE_CODE (expr) != INTEGER_CST)
2663 return false;
2664
2665 return wi::popcount (wi::to_wide (expr)) == 1;
2666 }
2667
2668 /* Return 1 if EXPR is an integer constant other than zero or a
2669 complex constant other than zero, or a location wrapper for such a
2670 constant. */
2671
2672 bool
2673 integer_nonzerop (const_tree expr)
2674 {
2675 STRIP_ANY_LOCATION_WRAPPER (expr);
2676
2677 return ((TREE_CODE (expr) == INTEGER_CST
2678 && wi::to_wide (expr) != 0)
2679 || (TREE_CODE (expr) == COMPLEX_CST
2680 && (integer_nonzerop (TREE_REALPART (expr))
2681 || integer_nonzerop (TREE_IMAGPART (expr)))));
2682 }
2683
2684 /* Return 1 if EXPR is the integer constant one. For vector,
2685 return 1 if every piece is the integer constant minus one
2686 (representing the value TRUE).
2687 Also return 1 for location wrappers for such a constant. */
2688
2689 bool
2690 integer_truep (const_tree expr)
2691 {
2692 STRIP_ANY_LOCATION_WRAPPER (expr);
2693
2694 if (TREE_CODE (expr) == VECTOR_CST)
2695 return integer_all_onesp (expr);
2696 return integer_onep (expr);
2697 }
2698
2699 /* Return 1 if EXPR is the fixed-point constant zero, or a location wrapper
2700 for such a constant. */
2701
2702 bool
2703 fixed_zerop (const_tree expr)
2704 {
2705 STRIP_ANY_LOCATION_WRAPPER (expr);
2706
2707 return (TREE_CODE (expr) == FIXED_CST
2708 && TREE_FIXED_CST (expr).data.is_zero ());
2709 }
2710
2711 /* Return the power of two represented by a tree node known to be a
2712 power of two. */
2713
2714 int
2715 tree_log2 (const_tree expr)
2716 {
2717 if (TREE_CODE (expr) == COMPLEX_CST)
2718 return tree_log2 (TREE_REALPART (expr));
2719
2720 return wi::exact_log2 (wi::to_wide (expr));
2721 }
2722
2723 /* Similar, but return the largest integer Y such that 2 ** Y is less
2724 than or equal to EXPR. */
2725
2726 int
2727 tree_floor_log2 (const_tree expr)
2728 {
2729 if (TREE_CODE (expr) == COMPLEX_CST)
2730 return tree_log2 (TREE_REALPART (expr));
2731
2732 return wi::floor_log2 (wi::to_wide (expr));
2733 }
2734
2735 /* Return number of known trailing zero bits in EXPR, or, if the value of
2736 EXPR is known to be zero, the precision of it's type. */
2737
2738 unsigned int
2739 tree_ctz (const_tree expr)
2740 {
2741 if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
2742 && !POINTER_TYPE_P (TREE_TYPE (expr)))
2743 return 0;
2744
2745 unsigned int ret1, ret2, prec = TYPE_PRECISION (TREE_TYPE (expr));
2746 switch (TREE_CODE (expr))
2747 {
2748 case INTEGER_CST:
2749 ret1 = wi::ctz (wi::to_wide (expr));
2750 return MIN (ret1, prec);
2751 case SSA_NAME:
2752 ret1 = wi::ctz (get_nonzero_bits (expr));
2753 return MIN (ret1, prec);
2754 case PLUS_EXPR:
2755 case MINUS_EXPR:
2756 case BIT_IOR_EXPR:
2757 case BIT_XOR_EXPR:
2758 case MIN_EXPR:
2759 case MAX_EXPR:
2760 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2761 if (ret1 == 0)
2762 return ret1;
2763 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2764 return MIN (ret1, ret2);
2765 case POINTER_PLUS_EXPR:
2766 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2767 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2768 /* Second operand is sizetype, which could be in theory
2769 wider than pointer's precision. Make sure we never
2770 return more than prec. */
2771 ret2 = MIN (ret2, prec);
2772 return MIN (ret1, ret2);
2773 case BIT_AND_EXPR:
2774 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2775 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2776 return MAX (ret1, ret2);
2777 case MULT_EXPR:
2778 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2779 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2780 return MIN (ret1 + ret2, prec);
2781 case LSHIFT_EXPR:
2782 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2783 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2784 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2785 {
2786 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2787 return MIN (ret1 + ret2, prec);
2788 }
2789 return ret1;
2790 case RSHIFT_EXPR:
2791 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2792 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2793 {
2794 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2795 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2796 if (ret1 > ret2)
2797 return ret1 - ret2;
2798 }
2799 return 0;
2800 case TRUNC_DIV_EXPR:
2801 case CEIL_DIV_EXPR:
2802 case FLOOR_DIV_EXPR:
2803 case ROUND_DIV_EXPR:
2804 case EXACT_DIV_EXPR:
2805 if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
2806 && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) == 1)
2807 {
2808 int l = tree_log2 (TREE_OPERAND (expr, 1));
2809 if (l >= 0)
2810 {
2811 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2812 ret2 = l;
2813 if (ret1 > ret2)
2814 return ret1 - ret2;
2815 }
2816 }
2817 return 0;
2818 CASE_CONVERT:
2819 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2820 if (ret1 && ret1 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
2821 ret1 = prec;
2822 return MIN (ret1, prec);
2823 case SAVE_EXPR:
2824 return tree_ctz (TREE_OPERAND (expr, 0));
2825 case COND_EXPR:
2826 ret1 = tree_ctz (TREE_OPERAND (expr, 1));
2827 if (ret1 == 0)
2828 return 0;
2829 ret2 = tree_ctz (TREE_OPERAND (expr, 2));
2830 return MIN (ret1, ret2);
2831 case COMPOUND_EXPR:
2832 return tree_ctz (TREE_OPERAND (expr, 1));
2833 case ADDR_EXPR:
2834 ret1 = get_pointer_alignment (CONST_CAST_TREE (expr));
2835 if (ret1 > BITS_PER_UNIT)
2836 {
2837 ret1 = ctz_hwi (ret1 / BITS_PER_UNIT);
2838 return MIN (ret1, prec);
2839 }
2840 return 0;
2841 default:
2842 return 0;
2843 }
2844 }
2845
2846 /* Return 1 if EXPR is the real constant zero. Trailing zeroes matter for
2847 decimal float constants, so don't return 1 for them.
2848 Also return 1 for location wrappers around such a constant. */
2849
2850 bool
2851 real_zerop (const_tree expr)
2852 {
2853 STRIP_ANY_LOCATION_WRAPPER (expr);
2854
2855 switch (TREE_CODE (expr))
2856 {
2857 case REAL_CST:
2858 return real_equal (&TREE_REAL_CST (expr), &dconst0)
2859 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2860 case COMPLEX_CST:
2861 return real_zerop (TREE_REALPART (expr))
2862 && real_zerop (TREE_IMAGPART (expr));
2863 case VECTOR_CST:
2864 {
2865 /* Don't simply check for a duplicate because the predicate
2866 accepts both +0.0 and -0.0. */
2867 unsigned count = vector_cst_encoded_nelts (expr);
2868 for (unsigned int i = 0; i < count; ++i)
2869 if (!real_zerop (VECTOR_CST_ENCODED_ELT (expr, i)))
2870 return false;
2871 return true;
2872 }
2873 default:
2874 return false;
2875 }
2876 }
2877
2878 /* Return 1 if EXPR is the real constant one in real or complex form.
2879 Trailing zeroes matter for decimal float constants, so don't return
2880 1 for them.
2881 Also return 1 for location wrappers around such a constant. */
2882
2883 bool
2884 real_onep (const_tree expr)
2885 {
2886 STRIP_ANY_LOCATION_WRAPPER (expr);
2887
2888 switch (TREE_CODE (expr))
2889 {
2890 case REAL_CST:
2891 return real_equal (&TREE_REAL_CST (expr), &dconst1)
2892 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2893 case COMPLEX_CST:
2894 return real_onep (TREE_REALPART (expr))
2895 && real_zerop (TREE_IMAGPART (expr));
2896 case VECTOR_CST:
2897 return (VECTOR_CST_NPATTERNS (expr) == 1
2898 && VECTOR_CST_DUPLICATE_P (expr)
2899 && real_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
2900 default:
2901 return false;
2902 }
2903 }
2904
2905 /* Return 1 if EXPR is the real constant minus one. Trailing zeroes
2906 matter for decimal float constants, so don't return 1 for them.
2907 Also return 1 for location wrappers around such a constant. */
2908
2909 bool
2910 real_minus_onep (const_tree expr)
2911 {
2912 STRIP_ANY_LOCATION_WRAPPER (expr);
2913
2914 switch (TREE_CODE (expr))
2915 {
2916 case REAL_CST:
2917 return real_equal (&TREE_REAL_CST (expr), &dconstm1)
2918 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2919 case COMPLEX_CST:
2920 return real_minus_onep (TREE_REALPART (expr))
2921 && real_zerop (TREE_IMAGPART (expr));
2922 case VECTOR_CST:
2923 return (VECTOR_CST_NPATTERNS (expr) == 1
2924 && VECTOR_CST_DUPLICATE_P (expr)
2925 && real_minus_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
2926 default:
2927 return false;
2928 }
2929 }
2930
2931 /* Nonzero if EXP is a constant or a cast of a constant. */
2932
2933 bool
2934 really_constant_p (const_tree exp)
2935 {
2936 /* This is not quite the same as STRIP_NOPS. It does more. */
2937 while (CONVERT_EXPR_P (exp)
2938 || TREE_CODE (exp) == NON_LVALUE_EXPR)
2939 exp = TREE_OPERAND (exp, 0);
2940 return TREE_CONSTANT (exp);
2941 }
2942
2943 /* Return true if T holds a polynomial pointer difference, storing it in
2944 *VALUE if so. A true return means that T's precision is no greater
2945 than 64 bits, which is the largest address space we support, so *VALUE
2946 never loses precision. However, the signedness of the result does
2947 not necessarily match the signedness of T: sometimes an unsigned type
2948 like sizetype is used to encode a value that is actually negative. */
2949
2950 bool
2951 ptrdiff_tree_p (const_tree t, poly_int64_pod *value)
2952 {
2953 if (!t)
2954 return false;
2955 if (TREE_CODE (t) == INTEGER_CST)
2956 {
2957 if (!cst_and_fits_in_hwi (t))
2958 return false;
2959 *value = int_cst_value (t);
2960 return true;
2961 }
2962 if (POLY_INT_CST_P (t))
2963 {
2964 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
2965 if (!cst_and_fits_in_hwi (POLY_INT_CST_COEFF (t, i)))
2966 return false;
2967 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
2968 value->coeffs[i] = int_cst_value (POLY_INT_CST_COEFF (t, i));
2969 return true;
2970 }
2971 return false;
2972 }
2973
2974 poly_int64
2975 tree_to_poly_int64 (const_tree t)
2976 {
2977 gcc_assert (tree_fits_poly_int64_p (t));
2978 if (POLY_INT_CST_P (t))
2979 return poly_int_cst_value (t).force_shwi ();
2980 return TREE_INT_CST_LOW (t);
2981 }
2982
2983 poly_uint64
2984 tree_to_poly_uint64 (const_tree t)
2985 {
2986 gcc_assert (tree_fits_poly_uint64_p (t));
2987 if (POLY_INT_CST_P (t))
2988 return poly_int_cst_value (t).force_uhwi ();
2989 return TREE_INT_CST_LOW (t);
2990 }
2991 \f
2992 /* Return first list element whose TREE_VALUE is ELEM.
2993 Return 0 if ELEM is not in LIST. */
2994
2995 tree
2996 value_member (tree elem, tree list)
2997 {
2998 while (list)
2999 {
3000 if (elem == TREE_VALUE (list))
3001 return list;
3002 list = TREE_CHAIN (list);
3003 }
3004 return NULL_TREE;
3005 }
3006
3007 /* Return first list element whose TREE_PURPOSE is ELEM.
3008 Return 0 if ELEM is not in LIST. */
3009
3010 tree
3011 purpose_member (const_tree elem, tree list)
3012 {
3013 while (list)
3014 {
3015 if (elem == TREE_PURPOSE (list))
3016 return list;
3017 list = TREE_CHAIN (list);
3018 }
3019 return NULL_TREE;
3020 }
3021
3022 /* Return true if ELEM is in V. */
3023
3024 bool
3025 vec_member (const_tree elem, vec<tree, va_gc> *v)
3026 {
3027 unsigned ix;
3028 tree t;
3029 FOR_EACH_VEC_SAFE_ELT (v, ix, t)
3030 if (elem == t)
3031 return true;
3032 return false;
3033 }
3034
3035 /* Returns element number IDX (zero-origin) of chain CHAIN, or
3036 NULL_TREE. */
3037
3038 tree
3039 chain_index (int idx, tree chain)
3040 {
3041 for (; chain && idx > 0; --idx)
3042 chain = TREE_CHAIN (chain);
3043 return chain;
3044 }
3045
3046 /* Return nonzero if ELEM is part of the chain CHAIN. */
3047
3048 bool
3049 chain_member (const_tree elem, const_tree chain)
3050 {
3051 while (chain)
3052 {
3053 if (elem == chain)
3054 return true;
3055 chain = DECL_CHAIN (chain);
3056 }
3057
3058 return false;
3059 }
3060
3061 /* Return the length of a chain of nodes chained through TREE_CHAIN.
3062 We expect a null pointer to mark the end of the chain.
3063 This is the Lisp primitive `length'. */
3064
3065 int
3066 list_length (const_tree t)
3067 {
3068 const_tree p = t;
3069 #ifdef ENABLE_TREE_CHECKING
3070 const_tree q = t;
3071 #endif
3072 int len = 0;
3073
3074 while (p)
3075 {
3076 p = TREE_CHAIN (p);
3077 #ifdef ENABLE_TREE_CHECKING
3078 if (len % 2)
3079 q = TREE_CHAIN (q);
3080 gcc_assert (p != q);
3081 #endif
3082 len++;
3083 }
3084
3085 return len;
3086 }
3087
3088 /* Returns the first FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
3089 UNION_TYPE TYPE, or NULL_TREE if none. */
3090
3091 tree
3092 first_field (const_tree type)
3093 {
3094 tree t = TYPE_FIELDS (type);
3095 while (t && TREE_CODE (t) != FIELD_DECL)
3096 t = TREE_CHAIN (t);
3097 return t;
3098 }
3099
3100 /* Concatenate two chains of nodes (chained through TREE_CHAIN)
3101 by modifying the last node in chain 1 to point to chain 2.
3102 This is the Lisp primitive `nconc'. */
3103
3104 tree
3105 chainon (tree op1, tree op2)
3106 {
3107 tree t1;
3108
3109 if (!op1)
3110 return op2;
3111 if (!op2)
3112 return op1;
3113
3114 for (t1 = op1; TREE_CHAIN (t1); t1 = TREE_CHAIN (t1))
3115 continue;
3116 TREE_CHAIN (t1) = op2;
3117
3118 #ifdef ENABLE_TREE_CHECKING
3119 {
3120 tree t2;
3121 for (t2 = op2; t2; t2 = TREE_CHAIN (t2))
3122 gcc_assert (t2 != t1);
3123 }
3124 #endif
3125
3126 return op1;
3127 }
3128
3129 /* Return the last node in a chain of nodes (chained through TREE_CHAIN). */
3130
3131 tree
3132 tree_last (tree chain)
3133 {
3134 tree next;
3135 if (chain)
3136 while ((next = TREE_CHAIN (chain)))
3137 chain = next;
3138 return chain;
3139 }
3140
3141 /* Reverse the order of elements in the chain T,
3142 and return the new head of the chain (old last element). */
3143
3144 tree
3145 nreverse (tree t)
3146 {
3147 tree prev = 0, decl, next;
3148 for (decl = t; decl; decl = next)
3149 {
3150 /* We shouldn't be using this function to reverse BLOCK chains; we
3151 have blocks_nreverse for that. */
3152 gcc_checking_assert (TREE_CODE (decl) != BLOCK);
3153 next = TREE_CHAIN (decl);
3154 TREE_CHAIN (decl) = prev;
3155 prev = decl;
3156 }
3157 return prev;
3158 }
3159 \f
3160 /* Return a newly created TREE_LIST node whose
3161 purpose and value fields are PARM and VALUE. */
3162
3163 tree
3164 build_tree_list (tree parm, tree value MEM_STAT_DECL)
3165 {
3166 tree t = make_node (TREE_LIST PASS_MEM_STAT);
3167 TREE_PURPOSE (t) = parm;
3168 TREE_VALUE (t) = value;
3169 return t;
3170 }
3171
3172 /* Build a chain of TREE_LIST nodes from a vector. */
3173
3174 tree
3175 build_tree_list_vec (const vec<tree, va_gc> *vec MEM_STAT_DECL)
3176 {
3177 tree ret = NULL_TREE;
3178 tree *pp = &ret;
3179 unsigned int i;
3180 tree t;
3181 FOR_EACH_VEC_SAFE_ELT (vec, i, t)
3182 {
3183 *pp = build_tree_list (NULL, t PASS_MEM_STAT);
3184 pp = &TREE_CHAIN (*pp);
3185 }
3186 return ret;
3187 }
3188
3189 /* Return a newly created TREE_LIST node whose
3190 purpose and value fields are PURPOSE and VALUE
3191 and whose TREE_CHAIN is CHAIN. */
3192
3193 tree
3194 tree_cons (tree purpose, tree value, tree chain MEM_STAT_DECL)
3195 {
3196 tree node;
3197
3198 node = ggc_alloc_tree_node_stat (sizeof (struct tree_list) PASS_MEM_STAT);
3199 memset (node, 0, sizeof (struct tree_common));
3200
3201 record_node_allocation_statistics (TREE_LIST, sizeof (struct tree_list));
3202
3203 TREE_SET_CODE (node, TREE_LIST);
3204 TREE_CHAIN (node) = chain;
3205 TREE_PURPOSE (node) = purpose;
3206 TREE_VALUE (node) = value;
3207 return node;
3208 }
3209
3210 /* Return the values of the elements of a CONSTRUCTOR as a vector of
3211 trees. */
3212
3213 vec<tree, va_gc> *
3214 ctor_to_vec (tree ctor)
3215 {
3216 vec<tree, va_gc> *vec;
3217 vec_alloc (vec, CONSTRUCTOR_NELTS (ctor));
3218 unsigned int ix;
3219 tree val;
3220
3221 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor), ix, val)
3222 vec->quick_push (val);
3223
3224 return vec;
3225 }
3226 \f
3227 /* Return the size nominally occupied by an object of type TYPE
3228 when it resides in memory. The value is measured in units of bytes,
3229 and its data type is that normally used for type sizes
3230 (which is the first type created by make_signed_type or
3231 make_unsigned_type). */
3232
3233 tree
3234 size_in_bytes_loc (location_t loc, const_tree type)
3235 {
3236 tree t;
3237
3238 if (type == error_mark_node)
3239 return integer_zero_node;
3240
3241 type = TYPE_MAIN_VARIANT (type);
3242 t = TYPE_SIZE_UNIT (type);
3243
3244 if (t == 0)
3245 {
3246 lang_hooks.types.incomplete_type_error (loc, NULL_TREE, type);
3247 return size_zero_node;
3248 }
3249
3250 return t;
3251 }
3252
3253 /* Return the size of TYPE (in bytes) as a wide integer
3254 or return -1 if the size can vary or is larger than an integer. */
3255
3256 HOST_WIDE_INT
3257 int_size_in_bytes (const_tree type)
3258 {
3259 tree t;
3260
3261 if (type == error_mark_node)
3262 return 0;
3263
3264 type = TYPE_MAIN_VARIANT (type);
3265 t = TYPE_SIZE_UNIT (type);
3266
3267 if (t && tree_fits_uhwi_p (t))
3268 return TREE_INT_CST_LOW (t);
3269 else
3270 return -1;
3271 }
3272
3273 /* Return the maximum size of TYPE (in bytes) as a wide integer
3274 or return -1 if the size can vary or is larger than an integer. */
3275
3276 HOST_WIDE_INT
3277 max_int_size_in_bytes (const_tree type)
3278 {
3279 HOST_WIDE_INT size = -1;
3280 tree size_tree;
3281
3282 /* If this is an array type, check for a possible MAX_SIZE attached. */
3283
3284 if (TREE_CODE (type) == ARRAY_TYPE)
3285 {
3286 size_tree = TYPE_ARRAY_MAX_SIZE (type);
3287
3288 if (size_tree && tree_fits_uhwi_p (size_tree))
3289 size = tree_to_uhwi (size_tree);
3290 }
3291
3292 /* If we still haven't been able to get a size, see if the language
3293 can compute a maximum size. */
3294
3295 if (size == -1)
3296 {
3297 size_tree = lang_hooks.types.max_size (type);
3298
3299 if (size_tree && tree_fits_uhwi_p (size_tree))
3300 size = tree_to_uhwi (size_tree);
3301 }
3302
3303 return size;
3304 }
3305 \f
3306 /* Return the bit position of FIELD, in bits from the start of the record.
3307 This is a tree of type bitsizetype. */
3308
3309 tree
3310 bit_position (const_tree field)
3311 {
3312 return bit_from_pos (DECL_FIELD_OFFSET (field),
3313 DECL_FIELD_BIT_OFFSET (field));
3314 }
3315 \f
3316 /* Return the byte position of FIELD, in bytes from the start of the record.
3317 This is a tree of type sizetype. */
3318
3319 tree
3320 byte_position (const_tree field)
3321 {
3322 return byte_from_pos (DECL_FIELD_OFFSET (field),
3323 DECL_FIELD_BIT_OFFSET (field));
3324 }
3325
3326 /* Likewise, but return as an integer. It must be representable in
3327 that way (since it could be a signed value, we don't have the
3328 option of returning -1 like int_size_in_byte can. */
3329
3330 HOST_WIDE_INT
3331 int_byte_position (const_tree field)
3332 {
3333 return tree_to_shwi (byte_position (field));
3334 }
3335 \f
3336 /* Return the strictest alignment, in bits, that T is known to have. */
3337
3338 unsigned int
3339 expr_align (const_tree t)
3340 {
3341 unsigned int align0, align1;
3342
3343 switch (TREE_CODE (t))
3344 {
3345 CASE_CONVERT: case NON_LVALUE_EXPR:
3346 /* If we have conversions, we know that the alignment of the
3347 object must meet each of the alignments of the types. */
3348 align0 = expr_align (TREE_OPERAND (t, 0));
3349 align1 = TYPE_ALIGN (TREE_TYPE (t));
3350 return MAX (align0, align1);
3351
3352 case SAVE_EXPR: case COMPOUND_EXPR: case MODIFY_EXPR:
3353 case INIT_EXPR: case TARGET_EXPR: case WITH_CLEANUP_EXPR:
3354 case CLEANUP_POINT_EXPR:
3355 /* These don't change the alignment of an object. */
3356 return expr_align (TREE_OPERAND (t, 0));
3357
3358 case COND_EXPR:
3359 /* The best we can do is say that the alignment is the least aligned
3360 of the two arms. */
3361 align0 = expr_align (TREE_OPERAND (t, 1));
3362 align1 = expr_align (TREE_OPERAND (t, 2));
3363 return MIN (align0, align1);
3364
3365 /* FIXME: LABEL_DECL and CONST_DECL never have DECL_ALIGN set
3366 meaningfully, it's always 1. */
3367 case LABEL_DECL: case CONST_DECL:
3368 case VAR_DECL: case PARM_DECL: case RESULT_DECL:
3369 case FUNCTION_DECL:
3370 gcc_assert (DECL_ALIGN (t) != 0);
3371 return DECL_ALIGN (t);
3372
3373 default:
3374 break;
3375 }
3376
3377 /* Otherwise take the alignment from that of the type. */
3378 return TYPE_ALIGN (TREE_TYPE (t));
3379 }
3380 \f
3381 /* Return, as a tree node, the number of elements for TYPE (which is an
3382 ARRAY_TYPE) minus one. This counts only elements of the top array. */
3383
3384 tree
3385 array_type_nelts (const_tree type)
3386 {
3387 tree index_type, min, max;
3388
3389 /* If they did it with unspecified bounds, then we should have already
3390 given an error about it before we got here. */
3391 if (! TYPE_DOMAIN (type))
3392 return error_mark_node;
3393
3394 index_type = TYPE_DOMAIN (type);
3395 min = TYPE_MIN_VALUE (index_type);
3396 max = TYPE_MAX_VALUE (index_type);
3397
3398 /* TYPE_MAX_VALUE may not be set if the array has unknown length. */
3399 if (!max)
3400 return error_mark_node;
3401
3402 return (integer_zerop (min)
3403 ? max
3404 : fold_build2 (MINUS_EXPR, TREE_TYPE (max), max, min));
3405 }
3406 \f
3407 /* If arg is static -- a reference to an object in static storage -- then
3408 return the object. This is not the same as the C meaning of `static'.
3409 If arg isn't static, return NULL. */
3410
3411 tree
3412 staticp (tree arg)
3413 {
3414 switch (TREE_CODE (arg))
3415 {
3416 case FUNCTION_DECL:
3417 /* Nested functions are static, even though taking their address will
3418 involve a trampoline as we unnest the nested function and create
3419 the trampoline on the tree level. */
3420 return arg;
3421
3422 case VAR_DECL:
3423 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3424 && ! DECL_THREAD_LOCAL_P (arg)
3425 && ! DECL_DLLIMPORT_P (arg)
3426 ? arg : NULL);
3427
3428 case CONST_DECL:
3429 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3430 ? arg : NULL);
3431
3432 case CONSTRUCTOR:
3433 return TREE_STATIC (arg) ? arg : NULL;
3434
3435 case LABEL_DECL:
3436 case STRING_CST:
3437 return arg;
3438
3439 case COMPONENT_REF:
3440 /* If the thing being referenced is not a field, then it is
3441 something language specific. */
3442 gcc_assert (TREE_CODE (TREE_OPERAND (arg, 1)) == FIELD_DECL);
3443
3444 /* If we are referencing a bitfield, we can't evaluate an
3445 ADDR_EXPR at compile time and so it isn't a constant. */
3446 if (DECL_BIT_FIELD (TREE_OPERAND (arg, 1)))
3447 return NULL;
3448
3449 return staticp (TREE_OPERAND (arg, 0));
3450
3451 case BIT_FIELD_REF:
3452 return NULL;
3453
3454 case INDIRECT_REF:
3455 return TREE_CONSTANT (TREE_OPERAND (arg, 0)) ? arg : NULL;
3456
3457 case ARRAY_REF:
3458 case ARRAY_RANGE_REF:
3459 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (arg))) == INTEGER_CST
3460 && TREE_CODE (TREE_OPERAND (arg, 1)) == INTEGER_CST)
3461 return staticp (TREE_OPERAND (arg, 0));
3462 else
3463 return NULL;
3464
3465 case COMPOUND_LITERAL_EXPR:
3466 return TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (arg)) ? arg : NULL;
3467
3468 default:
3469 return NULL;
3470 }
3471 }
3472
3473 \f
3474
3475
3476 /* Return whether OP is a DECL whose address is function-invariant. */
3477
3478 bool
3479 decl_address_invariant_p (const_tree op)
3480 {
3481 /* The conditions below are slightly less strict than the one in
3482 staticp. */
3483
3484 switch (TREE_CODE (op))
3485 {
3486 case PARM_DECL:
3487 case RESULT_DECL:
3488 case LABEL_DECL:
3489 case FUNCTION_DECL:
3490 return true;
3491
3492 case VAR_DECL:
3493 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3494 || DECL_THREAD_LOCAL_P (op)
3495 || DECL_CONTEXT (op) == current_function_decl
3496 || decl_function_context (op) == current_function_decl)
3497 return true;
3498 break;
3499
3500 case CONST_DECL:
3501 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3502 || decl_function_context (op) == current_function_decl)
3503 return true;
3504 break;
3505
3506 default:
3507 break;
3508 }
3509
3510 return false;
3511 }
3512
3513 /* Return whether OP is a DECL whose address is interprocedural-invariant. */
3514
3515 bool
3516 decl_address_ip_invariant_p (const_tree op)
3517 {
3518 /* The conditions below are slightly less strict than the one in
3519 staticp. */
3520
3521 switch (TREE_CODE (op))
3522 {
3523 case LABEL_DECL:
3524 case FUNCTION_DECL:
3525 case STRING_CST:
3526 return true;
3527
3528 case VAR_DECL:
3529 if (((TREE_STATIC (op) || DECL_EXTERNAL (op))
3530 && !DECL_DLLIMPORT_P (op))
3531 || DECL_THREAD_LOCAL_P (op))
3532 return true;
3533 break;
3534
3535 case CONST_DECL:
3536 if ((TREE_STATIC (op) || DECL_EXTERNAL (op)))
3537 return true;
3538 break;
3539
3540 default:
3541 break;
3542 }
3543
3544 return false;
3545 }
3546
3547
3548 /* Return true if T is function-invariant (internal function, does
3549 not handle arithmetic; that's handled in skip_simple_arithmetic and
3550 tree_invariant_p). */
3551
3552 static bool
3553 tree_invariant_p_1 (tree t)
3554 {
3555 tree op;
3556
3557 if (TREE_CONSTANT (t)
3558 || (TREE_READONLY (t) && !TREE_SIDE_EFFECTS (t)))
3559 return true;
3560
3561 switch (TREE_CODE (t))
3562 {
3563 case SAVE_EXPR:
3564 return true;
3565
3566 case ADDR_EXPR:
3567 op = TREE_OPERAND (t, 0);
3568 while (handled_component_p (op))
3569 {
3570 switch (TREE_CODE (op))
3571 {
3572 case ARRAY_REF:
3573 case ARRAY_RANGE_REF:
3574 if (!tree_invariant_p (TREE_OPERAND (op, 1))
3575 || TREE_OPERAND (op, 2) != NULL_TREE
3576 || TREE_OPERAND (op, 3) != NULL_TREE)
3577 return false;
3578 break;
3579
3580 case COMPONENT_REF:
3581 if (TREE_OPERAND (op, 2) != NULL_TREE)
3582 return false;
3583 break;
3584
3585 default:;
3586 }
3587 op = TREE_OPERAND (op, 0);
3588 }
3589
3590 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
3591
3592 default:
3593 break;
3594 }
3595
3596 return false;
3597 }
3598
3599 /* Return true if T is function-invariant. */
3600
3601 bool
3602 tree_invariant_p (tree t)
3603 {
3604 tree inner = skip_simple_arithmetic (t);
3605 return tree_invariant_p_1 (inner);
3606 }
3607
3608 /* Wrap a SAVE_EXPR around EXPR, if appropriate.
3609 Do this to any expression which may be used in more than one place,
3610 but must be evaluated only once.
3611
3612 Normally, expand_expr would reevaluate the expression each time.
3613 Calling save_expr produces something that is evaluated and recorded
3614 the first time expand_expr is called on it. Subsequent calls to
3615 expand_expr just reuse the recorded value.
3616
3617 The call to expand_expr that generates code that actually computes
3618 the value is the first call *at compile time*. Subsequent calls
3619 *at compile time* generate code to use the saved value.
3620 This produces correct result provided that *at run time* control
3621 always flows through the insns made by the first expand_expr
3622 before reaching the other places where the save_expr was evaluated.
3623 You, the caller of save_expr, must make sure this is so.
3624
3625 Constants, and certain read-only nodes, are returned with no
3626 SAVE_EXPR because that is safe. Expressions containing placeholders
3627 are not touched; see tree.def for an explanation of what these
3628 are used for. */
3629
3630 tree
3631 save_expr (tree expr)
3632 {
3633 tree inner;
3634
3635 /* If the tree evaluates to a constant, then we don't want to hide that
3636 fact (i.e. this allows further folding, and direct checks for constants).
3637 However, a read-only object that has side effects cannot be bypassed.
3638 Since it is no problem to reevaluate literals, we just return the
3639 literal node. */
3640 inner = skip_simple_arithmetic (expr);
3641 if (TREE_CODE (inner) == ERROR_MARK)
3642 return inner;
3643
3644 if (tree_invariant_p_1 (inner))
3645 return expr;
3646
3647 /* If INNER contains a PLACEHOLDER_EXPR, we must evaluate it each time, since
3648 it means that the size or offset of some field of an object depends on
3649 the value within another field.
3650
3651 Note that it must not be the case that EXPR contains both a PLACEHOLDER_EXPR
3652 and some variable since it would then need to be both evaluated once and
3653 evaluated more than once. Front-ends must assure this case cannot
3654 happen by surrounding any such subexpressions in their own SAVE_EXPR
3655 and forcing evaluation at the proper time. */
3656 if (contains_placeholder_p (inner))
3657 return expr;
3658
3659 expr = build1_loc (EXPR_LOCATION (expr), SAVE_EXPR, TREE_TYPE (expr), expr);
3660
3661 /* This expression might be placed ahead of a jump to ensure that the
3662 value was computed on both sides of the jump. So make sure it isn't
3663 eliminated as dead. */
3664 TREE_SIDE_EFFECTS (expr) = 1;
3665 return expr;
3666 }
3667
3668 /* Look inside EXPR into any simple arithmetic operations. Return the
3669 outermost non-arithmetic or non-invariant node. */
3670
3671 tree
3672 skip_simple_arithmetic (tree expr)
3673 {
3674 /* We don't care about whether this can be used as an lvalue in this
3675 context. */
3676 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3677 expr = TREE_OPERAND (expr, 0);
3678
3679 /* If we have simple operations applied to a SAVE_EXPR or to a SAVE_EXPR and
3680 a constant, it will be more efficient to not make another SAVE_EXPR since
3681 it will allow better simplification and GCSE will be able to merge the
3682 computations if they actually occur. */
3683 while (true)
3684 {
3685 if (UNARY_CLASS_P (expr))
3686 expr = TREE_OPERAND (expr, 0);
3687 else if (BINARY_CLASS_P (expr))
3688 {
3689 if (tree_invariant_p (TREE_OPERAND (expr, 1)))
3690 expr = TREE_OPERAND (expr, 0);
3691 else if (tree_invariant_p (TREE_OPERAND (expr, 0)))
3692 expr = TREE_OPERAND (expr, 1);
3693 else
3694 break;
3695 }
3696 else
3697 break;
3698 }
3699
3700 return expr;
3701 }
3702
3703 /* Look inside EXPR into simple arithmetic operations involving constants.
3704 Return the outermost non-arithmetic or non-constant node. */
3705
3706 tree
3707 skip_simple_constant_arithmetic (tree expr)
3708 {
3709 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3710 expr = TREE_OPERAND (expr, 0);
3711
3712 while (true)
3713 {
3714 if (UNARY_CLASS_P (expr))
3715 expr = TREE_OPERAND (expr, 0);
3716 else if (BINARY_CLASS_P (expr))
3717 {
3718 if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
3719 expr = TREE_OPERAND (expr, 0);
3720 else if (TREE_CONSTANT (TREE_OPERAND (expr, 0)))
3721 expr = TREE_OPERAND (expr, 1);
3722 else
3723 break;
3724 }
3725 else
3726 break;
3727 }
3728
3729 return expr;
3730 }
3731
3732 /* Return which tree structure is used by T. */
3733
3734 enum tree_node_structure_enum
3735 tree_node_structure (const_tree t)
3736 {
3737 const enum tree_code code = TREE_CODE (t);
3738 return tree_node_structure_for_code (code);
3739 }
3740
3741 /* Set various status flags when building a CALL_EXPR object T. */
3742
3743 static void
3744 process_call_operands (tree t)
3745 {
3746 bool side_effects = TREE_SIDE_EFFECTS (t);
3747 bool read_only = false;
3748 int i = call_expr_flags (t);
3749
3750 /* Calls have side-effects, except those to const or pure functions. */
3751 if ((i & ECF_LOOPING_CONST_OR_PURE) || !(i & (ECF_CONST | ECF_PURE)))
3752 side_effects = true;
3753 /* Propagate TREE_READONLY of arguments for const functions. */
3754 if (i & ECF_CONST)
3755 read_only = true;
3756
3757 if (!side_effects || read_only)
3758 for (i = 1; i < TREE_OPERAND_LENGTH (t); i++)
3759 {
3760 tree op = TREE_OPERAND (t, i);
3761 if (op && TREE_SIDE_EFFECTS (op))
3762 side_effects = true;
3763 if (op && !TREE_READONLY (op) && !CONSTANT_CLASS_P (op))
3764 read_only = false;
3765 }
3766
3767 TREE_SIDE_EFFECTS (t) = side_effects;
3768 TREE_READONLY (t) = read_only;
3769 }
3770 \f
3771 /* Return true if EXP contains a PLACEHOLDER_EXPR, i.e. if it represents a
3772 size or offset that depends on a field within a record. */
3773
3774 bool
3775 contains_placeholder_p (const_tree exp)
3776 {
3777 enum tree_code code;
3778
3779 if (!exp)
3780 return 0;
3781
3782 code = TREE_CODE (exp);
3783 if (code == PLACEHOLDER_EXPR)
3784 return 1;
3785
3786 switch (TREE_CODE_CLASS (code))
3787 {
3788 case tcc_reference:
3789 /* Don't look at any PLACEHOLDER_EXPRs that might be in index or bit
3790 position computations since they will be converted into a
3791 WITH_RECORD_EXPR involving the reference, which will assume
3792 here will be valid. */
3793 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3794
3795 case tcc_exceptional:
3796 if (code == TREE_LIST)
3797 return (CONTAINS_PLACEHOLDER_P (TREE_VALUE (exp))
3798 || CONTAINS_PLACEHOLDER_P (TREE_CHAIN (exp)));
3799 break;
3800
3801 case tcc_unary:
3802 case tcc_binary:
3803 case tcc_comparison:
3804 case tcc_expression:
3805 switch (code)
3806 {
3807 case COMPOUND_EXPR:
3808 /* Ignoring the first operand isn't quite right, but works best. */
3809 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1));
3810
3811 case COND_EXPR:
3812 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3813 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1))
3814 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 2)));
3815
3816 case SAVE_EXPR:
3817 /* The save_expr function never wraps anything containing
3818 a PLACEHOLDER_EXPR. */
3819 return 0;
3820
3821 default:
3822 break;
3823 }
3824
3825 switch (TREE_CODE_LENGTH (code))
3826 {
3827 case 1:
3828 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3829 case 2:
3830 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3831 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1)));
3832 default:
3833 return 0;
3834 }
3835
3836 case tcc_vl_exp:
3837 switch (code)
3838 {
3839 case CALL_EXPR:
3840 {
3841 const_tree arg;
3842 const_call_expr_arg_iterator iter;
3843 FOR_EACH_CONST_CALL_EXPR_ARG (arg, iter, exp)
3844 if (CONTAINS_PLACEHOLDER_P (arg))
3845 return 1;
3846 return 0;
3847 }
3848 default:
3849 return 0;
3850 }
3851
3852 default:
3853 return 0;
3854 }
3855 return 0;
3856 }
3857
3858 /* Return true if any part of the structure of TYPE involves a PLACEHOLDER_EXPR
3859 directly. This includes size, bounds, qualifiers (for QUAL_UNION_TYPE) and
3860 field positions. */
3861
3862 static bool
3863 type_contains_placeholder_1 (const_tree type)
3864 {
3865 /* If the size contains a placeholder or the parent type (component type in
3866 the case of arrays) type involves a placeholder, this type does. */
3867 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type))
3868 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (type))
3869 || (!POINTER_TYPE_P (type)
3870 && TREE_TYPE (type)
3871 && type_contains_placeholder_p (TREE_TYPE (type))))
3872 return true;
3873
3874 /* Now do type-specific checks. Note that the last part of the check above
3875 greatly limits what we have to do below. */
3876 switch (TREE_CODE (type))
3877 {
3878 case VOID_TYPE:
3879 case COMPLEX_TYPE:
3880 case ENUMERAL_TYPE:
3881 case BOOLEAN_TYPE:
3882 case POINTER_TYPE:
3883 case OFFSET_TYPE:
3884 case REFERENCE_TYPE:
3885 case METHOD_TYPE:
3886 case FUNCTION_TYPE:
3887 case VECTOR_TYPE:
3888 case NULLPTR_TYPE:
3889 return false;
3890
3891 case INTEGER_TYPE:
3892 case REAL_TYPE:
3893 case FIXED_POINT_TYPE:
3894 /* Here we just check the bounds. */
3895 return (CONTAINS_PLACEHOLDER_P (TYPE_MIN_VALUE (type))
3896 || CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type)));
3897
3898 case ARRAY_TYPE:
3899 /* We have already checked the component type above, so just check
3900 the domain type. Flexible array members have a null domain. */
3901 return TYPE_DOMAIN (type) ?
3902 type_contains_placeholder_p (TYPE_DOMAIN (type)) : false;
3903
3904 case RECORD_TYPE:
3905 case UNION_TYPE:
3906 case QUAL_UNION_TYPE:
3907 {
3908 tree field;
3909
3910 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3911 if (TREE_CODE (field) == FIELD_DECL
3912 && (CONTAINS_PLACEHOLDER_P (DECL_FIELD_OFFSET (field))
3913 || (TREE_CODE (type) == QUAL_UNION_TYPE
3914 && CONTAINS_PLACEHOLDER_P (DECL_QUALIFIER (field)))
3915 || type_contains_placeholder_p (TREE_TYPE (field))))
3916 return true;
3917
3918 return false;
3919 }
3920
3921 default:
3922 gcc_unreachable ();
3923 }
3924 }
3925
3926 /* Wrapper around above function used to cache its result. */
3927
3928 bool
3929 type_contains_placeholder_p (tree type)
3930 {
3931 bool result;
3932
3933 /* If the contains_placeholder_bits field has been initialized,
3934 then we know the answer. */
3935 if (TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) > 0)
3936 return TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) - 1;
3937
3938 /* Indicate that we've seen this type node, and the answer is false.
3939 This is what we want to return if we run into recursion via fields. */
3940 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = 1;
3941
3942 /* Compute the real value. */
3943 result = type_contains_placeholder_1 (type);
3944
3945 /* Store the real value. */
3946 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = result + 1;
3947
3948 return result;
3949 }
3950 \f
3951 /* Push tree EXP onto vector QUEUE if it is not already present. */
3952
3953 static void
3954 push_without_duplicates (tree exp, vec<tree> *queue)
3955 {
3956 unsigned int i;
3957 tree iter;
3958
3959 FOR_EACH_VEC_ELT (*queue, i, iter)
3960 if (simple_cst_equal (iter, exp) == 1)
3961 break;
3962
3963 if (!iter)
3964 queue->safe_push (exp);
3965 }
3966
3967 /* Given a tree EXP, find all occurrences of references to fields
3968 in a PLACEHOLDER_EXPR and place them in vector REFS without
3969 duplicates. Also record VAR_DECLs and CONST_DECLs. Note that
3970 we assume here that EXP contains only arithmetic expressions
3971 or CALL_EXPRs with PLACEHOLDER_EXPRs occurring only in their
3972 argument list. */
3973
3974 void
3975 find_placeholder_in_expr (tree exp, vec<tree> *refs)
3976 {
3977 enum tree_code code = TREE_CODE (exp);
3978 tree inner;
3979 int i;
3980
3981 /* We handle TREE_LIST and COMPONENT_REF separately. */
3982 if (code == TREE_LIST)
3983 {
3984 FIND_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), refs);
3985 FIND_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), refs);
3986 }
3987 else if (code == COMPONENT_REF)
3988 {
3989 for (inner = TREE_OPERAND (exp, 0);
3990 REFERENCE_CLASS_P (inner);
3991 inner = TREE_OPERAND (inner, 0))
3992 ;
3993
3994 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
3995 push_without_duplicates (exp, refs);
3996 else
3997 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), refs);
3998 }
3999 else
4000 switch (TREE_CODE_CLASS (code))
4001 {
4002 case tcc_constant:
4003 break;
4004
4005 case tcc_declaration:
4006 /* Variables allocated to static storage can stay. */
4007 if (!TREE_STATIC (exp))
4008 push_without_duplicates (exp, refs);
4009 break;
4010
4011 case tcc_expression:
4012 /* This is the pattern built in ada/make_aligning_type. */
4013 if (code == ADDR_EXPR
4014 && TREE_CODE (TREE_OPERAND (exp, 0)) == PLACEHOLDER_EXPR)
4015 {
4016 push_without_duplicates (exp, refs);
4017 break;
4018 }
4019
4020 /* Fall through. */
4021
4022 case tcc_exceptional:
4023 case tcc_unary:
4024 case tcc_binary:
4025 case tcc_comparison:
4026 case tcc_reference:
4027 for (i = 0; i < TREE_CODE_LENGTH (code); i++)
4028 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
4029 break;
4030
4031 case tcc_vl_exp:
4032 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4033 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
4034 break;
4035
4036 default:
4037 gcc_unreachable ();
4038 }
4039 }
4040
4041 /* Given a tree EXP, a FIELD_DECL F, and a replacement value R,
4042 return a tree with all occurrences of references to F in a
4043 PLACEHOLDER_EXPR replaced by R. Also handle VAR_DECLs and
4044 CONST_DECLs. Note that we assume here that EXP contains only
4045 arithmetic expressions or CALL_EXPRs with PLACEHOLDER_EXPRs
4046 occurring only in their argument list. */
4047
4048 tree
4049 substitute_in_expr (tree exp, tree f, tree r)
4050 {
4051 enum tree_code code = TREE_CODE (exp);
4052 tree op0, op1, op2, op3;
4053 tree new_tree;
4054
4055 /* We handle TREE_LIST and COMPONENT_REF separately. */
4056 if (code == TREE_LIST)
4057 {
4058 op0 = SUBSTITUTE_IN_EXPR (TREE_CHAIN (exp), f, r);
4059 op1 = SUBSTITUTE_IN_EXPR (TREE_VALUE (exp), f, r);
4060 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
4061 return exp;
4062
4063 return tree_cons (TREE_PURPOSE (exp), op1, op0);
4064 }
4065 else if (code == COMPONENT_REF)
4066 {
4067 tree inner;
4068
4069 /* If this expression is getting a value from a PLACEHOLDER_EXPR
4070 and it is the right field, replace it with R. */
4071 for (inner = TREE_OPERAND (exp, 0);
4072 REFERENCE_CLASS_P (inner);
4073 inner = TREE_OPERAND (inner, 0))
4074 ;
4075
4076 /* The field. */
4077 op1 = TREE_OPERAND (exp, 1);
4078
4079 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && op1 == f)
4080 return r;
4081
4082 /* If this expression hasn't been completed let, leave it alone. */
4083 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && !TREE_TYPE (inner))
4084 return exp;
4085
4086 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4087 if (op0 == TREE_OPERAND (exp, 0))
4088 return exp;
4089
4090 new_tree
4091 = fold_build3 (COMPONENT_REF, TREE_TYPE (exp), op0, op1, NULL_TREE);
4092 }
4093 else
4094 switch (TREE_CODE_CLASS (code))
4095 {
4096 case tcc_constant:
4097 return exp;
4098
4099 case tcc_declaration:
4100 if (exp == f)
4101 return r;
4102 else
4103 return exp;
4104
4105 case tcc_expression:
4106 if (exp == f)
4107 return r;
4108
4109 /* Fall through. */
4110
4111 case tcc_exceptional:
4112 case tcc_unary:
4113 case tcc_binary:
4114 case tcc_comparison:
4115 case tcc_reference:
4116 switch (TREE_CODE_LENGTH (code))
4117 {
4118 case 0:
4119 return exp;
4120
4121 case 1:
4122 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4123 if (op0 == TREE_OPERAND (exp, 0))
4124 return exp;
4125
4126 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
4127 break;
4128
4129 case 2:
4130 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4131 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
4132
4133 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
4134 return exp;
4135
4136 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
4137 break;
4138
4139 case 3:
4140 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4141 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
4142 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
4143
4144 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4145 && op2 == TREE_OPERAND (exp, 2))
4146 return exp;
4147
4148 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
4149 break;
4150
4151 case 4:
4152 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4153 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
4154 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
4155 op3 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 3), f, r);
4156
4157 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4158 && op2 == TREE_OPERAND (exp, 2)
4159 && op3 == TREE_OPERAND (exp, 3))
4160 return exp;
4161
4162 new_tree
4163 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
4164 break;
4165
4166 default:
4167 gcc_unreachable ();
4168 }
4169 break;
4170
4171 case tcc_vl_exp:
4172 {
4173 int i;
4174
4175 new_tree = NULL_TREE;
4176
4177 /* If we are trying to replace F with a constant or with another
4178 instance of one of the arguments of the call, inline back
4179 functions which do nothing else than computing a value from
4180 the arguments they are passed. This makes it possible to
4181 fold partially or entirely the replacement expression. */
4182 if (code == CALL_EXPR)
4183 {
4184 bool maybe_inline = false;
4185 if (CONSTANT_CLASS_P (r))
4186 maybe_inline = true;
4187 else
4188 for (i = 3; i < TREE_OPERAND_LENGTH (exp); i++)
4189 if (operand_equal_p (TREE_OPERAND (exp, i), r, 0))
4190 {
4191 maybe_inline = true;
4192 break;
4193 }
4194 if (maybe_inline)
4195 {
4196 tree t = maybe_inline_call_in_expr (exp);
4197 if (t)
4198 return SUBSTITUTE_IN_EXPR (t, f, r);
4199 }
4200 }
4201
4202 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4203 {
4204 tree op = TREE_OPERAND (exp, i);
4205 tree new_op = SUBSTITUTE_IN_EXPR (op, f, r);
4206 if (new_op != op)
4207 {
4208 if (!new_tree)
4209 new_tree = copy_node (exp);
4210 TREE_OPERAND (new_tree, i) = new_op;
4211 }
4212 }
4213
4214 if (new_tree)
4215 {
4216 new_tree = fold (new_tree);
4217 if (TREE_CODE (new_tree) == CALL_EXPR)
4218 process_call_operands (new_tree);
4219 }
4220 else
4221 return exp;
4222 }
4223 break;
4224
4225 default:
4226 gcc_unreachable ();
4227 }
4228
4229 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
4230
4231 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
4232 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
4233
4234 return new_tree;
4235 }
4236
4237 /* Similar, but look for a PLACEHOLDER_EXPR in EXP and find a replacement
4238 for it within OBJ, a tree that is an object or a chain of references. */
4239
4240 tree
4241 substitute_placeholder_in_expr (tree exp, tree obj)
4242 {
4243 enum tree_code code = TREE_CODE (exp);
4244 tree op0, op1, op2, op3;
4245 tree new_tree;
4246
4247 /* If this is a PLACEHOLDER_EXPR, see if we find a corresponding type
4248 in the chain of OBJ. */
4249 if (code == PLACEHOLDER_EXPR)
4250 {
4251 tree need_type = TYPE_MAIN_VARIANT (TREE_TYPE (exp));
4252 tree elt;
4253
4254 for (elt = obj; elt != 0;
4255 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
4256 || TREE_CODE (elt) == COND_EXPR)
4257 ? TREE_OPERAND (elt, 1)
4258 : (REFERENCE_CLASS_P (elt)
4259 || UNARY_CLASS_P (elt)
4260 || BINARY_CLASS_P (elt)
4261 || VL_EXP_CLASS_P (elt)
4262 || EXPRESSION_CLASS_P (elt))
4263 ? TREE_OPERAND (elt, 0) : 0))
4264 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
4265 return elt;
4266
4267 for (elt = obj; elt != 0;
4268 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
4269 || TREE_CODE (elt) == COND_EXPR)
4270 ? TREE_OPERAND (elt, 1)
4271 : (REFERENCE_CLASS_P (elt)
4272 || UNARY_CLASS_P (elt)
4273 || BINARY_CLASS_P (elt)
4274 || VL_EXP_CLASS_P (elt)
4275 || EXPRESSION_CLASS_P (elt))
4276 ? TREE_OPERAND (elt, 0) : 0))
4277 if (POINTER_TYPE_P (TREE_TYPE (elt))
4278 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
4279 == need_type))
4280 return fold_build1 (INDIRECT_REF, need_type, elt);
4281
4282 /* If we didn't find it, return the original PLACEHOLDER_EXPR. If it
4283 survives until RTL generation, there will be an error. */
4284 return exp;
4285 }
4286
4287 /* TREE_LIST is special because we need to look at TREE_VALUE
4288 and TREE_CHAIN, not TREE_OPERANDS. */
4289 else if (code == TREE_LIST)
4290 {
4291 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), obj);
4292 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), obj);
4293 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
4294 return exp;
4295
4296 return tree_cons (TREE_PURPOSE (exp), op1, op0);
4297 }
4298 else
4299 switch (TREE_CODE_CLASS (code))
4300 {
4301 case tcc_constant:
4302 case tcc_declaration:
4303 return exp;
4304
4305 case tcc_exceptional:
4306 case tcc_unary:
4307 case tcc_binary:
4308 case tcc_comparison:
4309 case tcc_expression:
4310 case tcc_reference:
4311 case tcc_statement:
4312 switch (TREE_CODE_LENGTH (code))
4313 {
4314 case 0:
4315 return exp;
4316
4317 case 1:
4318 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4319 if (op0 == TREE_OPERAND (exp, 0))
4320 return exp;
4321
4322 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
4323 break;
4324
4325 case 2:
4326 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4327 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4328
4329 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
4330 return exp;
4331
4332 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
4333 break;
4334
4335 case 3:
4336 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4337 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4338 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
4339
4340 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4341 && op2 == TREE_OPERAND (exp, 2))
4342 return exp;
4343
4344 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
4345 break;
4346
4347 case 4:
4348 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4349 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4350 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
4351 op3 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 3), obj);
4352
4353 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4354 && op2 == TREE_OPERAND (exp, 2)
4355 && op3 == TREE_OPERAND (exp, 3))
4356 return exp;
4357
4358 new_tree
4359 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
4360 break;
4361
4362 default:
4363 gcc_unreachable ();
4364 }
4365 break;
4366
4367 case tcc_vl_exp:
4368 {
4369 int i;
4370
4371 new_tree = NULL_TREE;
4372
4373 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4374 {
4375 tree op = TREE_OPERAND (exp, i);
4376 tree new_op = SUBSTITUTE_PLACEHOLDER_IN_EXPR (op, obj);
4377 if (new_op != op)
4378 {
4379 if (!new_tree)
4380 new_tree = copy_node (exp);
4381 TREE_OPERAND (new_tree, i) = new_op;
4382 }
4383 }
4384
4385 if (new_tree)
4386 {
4387 new_tree = fold (new_tree);
4388 if (TREE_CODE (new_tree) == CALL_EXPR)
4389 process_call_operands (new_tree);
4390 }
4391 else
4392 return exp;
4393 }
4394 break;
4395
4396 default:
4397 gcc_unreachable ();
4398 }
4399
4400 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
4401
4402 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
4403 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
4404
4405 return new_tree;
4406 }
4407 \f
4408
4409 /* Subroutine of stabilize_reference; this is called for subtrees of
4410 references. Any expression with side-effects must be put in a SAVE_EXPR
4411 to ensure that it is only evaluated once.
4412
4413 We don't put SAVE_EXPR nodes around everything, because assigning very
4414 simple expressions to temporaries causes us to miss good opportunities
4415 for optimizations. Among other things, the opportunity to fold in the
4416 addition of a constant into an addressing mode often gets lost, e.g.
4417 "y[i+1] += x;". In general, we take the approach that we should not make
4418 an assignment unless we are forced into it - i.e., that any non-side effect
4419 operator should be allowed, and that cse should take care of coalescing
4420 multiple utterances of the same expression should that prove fruitful. */
4421
4422 static tree
4423 stabilize_reference_1 (tree e)
4424 {
4425 tree result;
4426 enum tree_code code = TREE_CODE (e);
4427
4428 /* We cannot ignore const expressions because it might be a reference
4429 to a const array but whose index contains side-effects. But we can
4430 ignore things that are actual constant or that already have been
4431 handled by this function. */
4432
4433 if (tree_invariant_p (e))
4434 return e;
4435
4436 switch (TREE_CODE_CLASS (code))
4437 {
4438 case tcc_exceptional:
4439 /* Always wrap STATEMENT_LIST into SAVE_EXPR, even if it doesn't
4440 have side-effects. */
4441 if (code == STATEMENT_LIST)
4442 return save_expr (e);
4443 /* FALLTHRU */
4444 case tcc_type:
4445 case tcc_declaration:
4446 case tcc_comparison:
4447 case tcc_statement:
4448 case tcc_expression:
4449 case tcc_reference:
4450 case tcc_vl_exp:
4451 /* If the expression has side-effects, then encase it in a SAVE_EXPR
4452 so that it will only be evaluated once. */
4453 /* The reference (r) and comparison (<) classes could be handled as
4454 below, but it is generally faster to only evaluate them once. */
4455 if (TREE_SIDE_EFFECTS (e))
4456 return save_expr (e);
4457 return e;
4458
4459 case tcc_constant:
4460 /* Constants need no processing. In fact, we should never reach
4461 here. */
4462 return e;
4463
4464 case tcc_binary:
4465 /* Division is slow and tends to be compiled with jumps,
4466 especially the division by powers of 2 that is often
4467 found inside of an array reference. So do it just once. */
4468 if (code == TRUNC_DIV_EXPR || code == TRUNC_MOD_EXPR
4469 || code == FLOOR_DIV_EXPR || code == FLOOR_MOD_EXPR
4470 || code == CEIL_DIV_EXPR || code == CEIL_MOD_EXPR
4471 || code == ROUND_DIV_EXPR || code == ROUND_MOD_EXPR)
4472 return save_expr (e);
4473 /* Recursively stabilize each operand. */
4474 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)),
4475 stabilize_reference_1 (TREE_OPERAND (e, 1)));
4476 break;
4477
4478 case tcc_unary:
4479 /* Recursively stabilize each operand. */
4480 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)));
4481 break;
4482
4483 default:
4484 gcc_unreachable ();
4485 }
4486
4487 TREE_TYPE (result) = TREE_TYPE (e);
4488 TREE_READONLY (result) = TREE_READONLY (e);
4489 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (e);
4490 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
4491
4492 return result;
4493 }
4494
4495 /* Stabilize a reference so that we can use it any number of times
4496 without causing its operands to be evaluated more than once.
4497 Returns the stabilized reference. This works by means of save_expr,
4498 so see the caveats in the comments about save_expr.
4499
4500 Also allows conversion expressions whose operands are references.
4501 Any other kind of expression is returned unchanged. */
4502
4503 tree
4504 stabilize_reference (tree ref)
4505 {
4506 tree result;
4507 enum tree_code code = TREE_CODE (ref);
4508
4509 switch (code)
4510 {
4511 case VAR_DECL:
4512 case PARM_DECL:
4513 case RESULT_DECL:
4514 /* No action is needed in this case. */
4515 return ref;
4516
4517 CASE_CONVERT:
4518 case FLOAT_EXPR:
4519 case FIX_TRUNC_EXPR:
4520 result = build_nt (code, stabilize_reference (TREE_OPERAND (ref, 0)));
4521 break;
4522
4523 case INDIRECT_REF:
4524 result = build_nt (INDIRECT_REF,
4525 stabilize_reference_1 (TREE_OPERAND (ref, 0)));
4526 break;
4527
4528 case COMPONENT_REF:
4529 result = build_nt (COMPONENT_REF,
4530 stabilize_reference (TREE_OPERAND (ref, 0)),
4531 TREE_OPERAND (ref, 1), NULL_TREE);
4532 break;
4533
4534 case BIT_FIELD_REF:
4535 result = build_nt (BIT_FIELD_REF,
4536 stabilize_reference (TREE_OPERAND (ref, 0)),
4537 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
4538 REF_REVERSE_STORAGE_ORDER (result) = REF_REVERSE_STORAGE_ORDER (ref);
4539 break;
4540
4541 case ARRAY_REF:
4542 result = build_nt (ARRAY_REF,
4543 stabilize_reference (TREE_OPERAND (ref, 0)),
4544 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4545 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4546 break;
4547
4548 case ARRAY_RANGE_REF:
4549 result = build_nt (ARRAY_RANGE_REF,
4550 stabilize_reference (TREE_OPERAND (ref, 0)),
4551 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4552 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4553 break;
4554
4555 case COMPOUND_EXPR:
4556 /* We cannot wrap the first expression in a SAVE_EXPR, as then
4557 it wouldn't be ignored. This matters when dealing with
4558 volatiles. */
4559 return stabilize_reference_1 (ref);
4560
4561 /* If arg isn't a kind of lvalue we recognize, make no change.
4562 Caller should recognize the error for an invalid lvalue. */
4563 default:
4564 return ref;
4565
4566 case ERROR_MARK:
4567 return error_mark_node;
4568 }
4569
4570 TREE_TYPE (result) = TREE_TYPE (ref);
4571 TREE_READONLY (result) = TREE_READONLY (ref);
4572 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (ref);
4573 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
4574
4575 return result;
4576 }
4577 \f
4578 /* Low-level constructors for expressions. */
4579
4580 /* A helper function for build1 and constant folders. Set TREE_CONSTANT,
4581 and TREE_SIDE_EFFECTS for an ADDR_EXPR. */
4582
4583 void
4584 recompute_tree_invariant_for_addr_expr (tree t)
4585 {
4586 tree node;
4587 bool tc = true, se = false;
4588
4589 gcc_assert (TREE_CODE (t) == ADDR_EXPR);
4590
4591 /* We started out assuming this address is both invariant and constant, but
4592 does not have side effects. Now go down any handled components and see if
4593 any of them involve offsets that are either non-constant or non-invariant.
4594 Also check for side-effects.
4595
4596 ??? Note that this code makes no attempt to deal with the case where
4597 taking the address of something causes a copy due to misalignment. */
4598
4599 #define UPDATE_FLAGS(NODE) \
4600 do { tree _node = (NODE); \
4601 if (_node && !TREE_CONSTANT (_node)) tc = false; \
4602 if (_node && TREE_SIDE_EFFECTS (_node)) se = true; } while (0)
4603
4604 for (node = TREE_OPERAND (t, 0); handled_component_p (node);
4605 node = TREE_OPERAND (node, 0))
4606 {
4607 /* If the first operand doesn't have an ARRAY_TYPE, this is a bogus
4608 array reference (probably made temporarily by the G++ front end),
4609 so ignore all the operands. */
4610 if ((TREE_CODE (node) == ARRAY_REF
4611 || TREE_CODE (node) == ARRAY_RANGE_REF)
4612 && TREE_CODE (TREE_TYPE (TREE_OPERAND (node, 0))) == ARRAY_TYPE)
4613 {
4614 UPDATE_FLAGS (TREE_OPERAND (node, 1));
4615 if (TREE_OPERAND (node, 2))
4616 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4617 if (TREE_OPERAND (node, 3))
4618 UPDATE_FLAGS (TREE_OPERAND (node, 3));
4619 }
4620 /* Likewise, just because this is a COMPONENT_REF doesn't mean we have a
4621 FIELD_DECL, apparently. The G++ front end can put something else
4622 there, at least temporarily. */
4623 else if (TREE_CODE (node) == COMPONENT_REF
4624 && TREE_CODE (TREE_OPERAND (node, 1)) == FIELD_DECL)
4625 {
4626 if (TREE_OPERAND (node, 2))
4627 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4628 }
4629 }
4630
4631 node = lang_hooks.expr_to_decl (node, &tc, &se);
4632
4633 /* Now see what's inside. If it's an INDIRECT_REF, copy our properties from
4634 the address, since &(*a)->b is a form of addition. If it's a constant, the
4635 address is constant too. If it's a decl, its address is constant if the
4636 decl is static. Everything else is not constant and, furthermore,
4637 taking the address of a volatile variable is not volatile. */
4638 if (TREE_CODE (node) == INDIRECT_REF
4639 || TREE_CODE (node) == MEM_REF)
4640 UPDATE_FLAGS (TREE_OPERAND (node, 0));
4641 else if (CONSTANT_CLASS_P (node))
4642 ;
4643 else if (DECL_P (node))
4644 tc &= (staticp (node) != NULL_TREE);
4645 else
4646 {
4647 tc = false;
4648 se |= TREE_SIDE_EFFECTS (node);
4649 }
4650
4651
4652 TREE_CONSTANT (t) = tc;
4653 TREE_SIDE_EFFECTS (t) = se;
4654 #undef UPDATE_FLAGS
4655 }
4656
4657 /* Build an expression of code CODE, data type TYPE, and operands as
4658 specified. Expressions and reference nodes can be created this way.
4659 Constants, decls, types and misc nodes cannot be.
4660
4661 We define 5 non-variadic functions, from 0 to 4 arguments. This is
4662 enough for all extant tree codes. */
4663
4664 tree
4665 build0 (enum tree_code code, tree tt MEM_STAT_DECL)
4666 {
4667 tree t;
4668
4669 gcc_assert (TREE_CODE_LENGTH (code) == 0);
4670
4671 t = make_node (code PASS_MEM_STAT);
4672 TREE_TYPE (t) = tt;
4673
4674 return t;
4675 }
4676
4677 tree
4678 build1 (enum tree_code code, tree type, tree node MEM_STAT_DECL)
4679 {
4680 int length = sizeof (struct tree_exp);
4681 tree t;
4682
4683 record_node_allocation_statistics (code, length);
4684
4685 gcc_assert (TREE_CODE_LENGTH (code) == 1);
4686
4687 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
4688
4689 memset (t, 0, sizeof (struct tree_common));
4690
4691 TREE_SET_CODE (t, code);
4692
4693 TREE_TYPE (t) = type;
4694 SET_EXPR_LOCATION (t, UNKNOWN_LOCATION);
4695 TREE_OPERAND (t, 0) = node;
4696 if (node && !TYPE_P (node))
4697 {
4698 TREE_SIDE_EFFECTS (t) = TREE_SIDE_EFFECTS (node);
4699 TREE_READONLY (t) = TREE_READONLY (node);
4700 }
4701
4702 if (TREE_CODE_CLASS (code) == tcc_statement)
4703 {
4704 if (code != DEBUG_BEGIN_STMT)
4705 TREE_SIDE_EFFECTS (t) = 1;
4706 }
4707 else switch (code)
4708 {
4709 case VA_ARG_EXPR:
4710 /* All of these have side-effects, no matter what their
4711 operands are. */
4712 TREE_SIDE_EFFECTS (t) = 1;
4713 TREE_READONLY (t) = 0;
4714 break;
4715
4716 case INDIRECT_REF:
4717 /* Whether a dereference is readonly has nothing to do with whether
4718 its operand is readonly. */
4719 TREE_READONLY (t) = 0;
4720 break;
4721
4722 case ADDR_EXPR:
4723 if (node)
4724 recompute_tree_invariant_for_addr_expr (t);
4725 break;
4726
4727 default:
4728 if ((TREE_CODE_CLASS (code) == tcc_unary || code == VIEW_CONVERT_EXPR)
4729 && node && !TYPE_P (node)
4730 && TREE_CONSTANT (node))
4731 TREE_CONSTANT (t) = 1;
4732 if (TREE_CODE_CLASS (code) == tcc_reference
4733 && node && TREE_THIS_VOLATILE (node))
4734 TREE_THIS_VOLATILE (t) = 1;
4735 break;
4736 }
4737
4738 return t;
4739 }
4740
4741 #define PROCESS_ARG(N) \
4742 do { \
4743 TREE_OPERAND (t, N) = arg##N; \
4744 if (arg##N &&!TYPE_P (arg##N)) \
4745 { \
4746 if (TREE_SIDE_EFFECTS (arg##N)) \
4747 side_effects = 1; \
4748 if (!TREE_READONLY (arg##N) \
4749 && !CONSTANT_CLASS_P (arg##N)) \
4750 (void) (read_only = 0); \
4751 if (!TREE_CONSTANT (arg##N)) \
4752 (void) (constant = 0); \
4753 } \
4754 } while (0)
4755
4756 tree
4757 build2 (enum tree_code code, tree tt, tree arg0, tree arg1 MEM_STAT_DECL)
4758 {
4759 bool constant, read_only, side_effects, div_by_zero;
4760 tree t;
4761
4762 gcc_assert (TREE_CODE_LENGTH (code) == 2);
4763
4764 if ((code == MINUS_EXPR || code == PLUS_EXPR || code == MULT_EXPR)
4765 && arg0 && arg1 && tt && POINTER_TYPE_P (tt)
4766 /* When sizetype precision doesn't match that of pointers
4767 we need to be able to build explicit extensions or truncations
4768 of the offset argument. */
4769 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (tt))
4770 gcc_assert (TREE_CODE (arg0) == INTEGER_CST
4771 && TREE_CODE (arg1) == INTEGER_CST);
4772
4773 if (code == POINTER_PLUS_EXPR && arg0 && arg1 && tt)
4774 gcc_assert (POINTER_TYPE_P (tt) && POINTER_TYPE_P (TREE_TYPE (arg0))
4775 && ptrofftype_p (TREE_TYPE (arg1)));
4776
4777 t = make_node (code PASS_MEM_STAT);
4778 TREE_TYPE (t) = tt;
4779
4780 /* Below, we automatically set TREE_SIDE_EFFECTS and TREE_READONLY for the
4781 result based on those same flags for the arguments. But if the
4782 arguments aren't really even `tree' expressions, we shouldn't be trying
4783 to do this. */
4784
4785 /* Expressions without side effects may be constant if their
4786 arguments are as well. */
4787 constant = (TREE_CODE_CLASS (code) == tcc_comparison
4788 || TREE_CODE_CLASS (code) == tcc_binary);
4789 read_only = 1;
4790 side_effects = TREE_SIDE_EFFECTS (t);
4791
4792 switch (code)
4793 {
4794 case TRUNC_DIV_EXPR:
4795 case CEIL_DIV_EXPR:
4796 case FLOOR_DIV_EXPR:
4797 case ROUND_DIV_EXPR:
4798 case EXACT_DIV_EXPR:
4799 case CEIL_MOD_EXPR:
4800 case FLOOR_MOD_EXPR:
4801 case ROUND_MOD_EXPR:
4802 case TRUNC_MOD_EXPR:
4803 div_by_zero = integer_zerop (arg1);
4804 break;
4805 default:
4806 div_by_zero = false;
4807 }
4808
4809 PROCESS_ARG (0);
4810 PROCESS_ARG (1);
4811
4812 TREE_SIDE_EFFECTS (t) = side_effects;
4813 if (code == MEM_REF)
4814 {
4815 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
4816 {
4817 tree o = TREE_OPERAND (arg0, 0);
4818 TREE_READONLY (t) = TREE_READONLY (o);
4819 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
4820 }
4821 }
4822 else
4823 {
4824 TREE_READONLY (t) = read_only;
4825 /* Don't mark X / 0 as constant. */
4826 TREE_CONSTANT (t) = constant && !div_by_zero;
4827 TREE_THIS_VOLATILE (t)
4828 = (TREE_CODE_CLASS (code) == tcc_reference
4829 && arg0 && TREE_THIS_VOLATILE (arg0));
4830 }
4831
4832 return t;
4833 }
4834
4835
4836 tree
4837 build3 (enum tree_code code, tree tt, tree arg0, tree arg1,
4838 tree arg2 MEM_STAT_DECL)
4839 {
4840 bool constant, read_only, side_effects;
4841 tree t;
4842
4843 gcc_assert (TREE_CODE_LENGTH (code) == 3);
4844 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4845
4846 t = make_node (code PASS_MEM_STAT);
4847 TREE_TYPE (t) = tt;
4848
4849 read_only = 1;
4850
4851 /* As a special exception, if COND_EXPR has NULL branches, we
4852 assume that it is a gimple statement and always consider
4853 it to have side effects. */
4854 if (code == COND_EXPR
4855 && tt == void_type_node
4856 && arg1 == NULL_TREE
4857 && arg2 == NULL_TREE)
4858 side_effects = true;
4859 else
4860 side_effects = TREE_SIDE_EFFECTS (t);
4861
4862 PROCESS_ARG (0);
4863 PROCESS_ARG (1);
4864 PROCESS_ARG (2);
4865
4866 if (code == COND_EXPR)
4867 TREE_READONLY (t) = read_only;
4868
4869 TREE_SIDE_EFFECTS (t) = side_effects;
4870 TREE_THIS_VOLATILE (t)
4871 = (TREE_CODE_CLASS (code) == tcc_reference
4872 && arg0 && TREE_THIS_VOLATILE (arg0));
4873
4874 return t;
4875 }
4876
4877 tree
4878 build4 (enum tree_code code, tree tt, tree arg0, tree arg1,
4879 tree arg2, tree arg3 MEM_STAT_DECL)
4880 {
4881 bool constant, read_only, side_effects;
4882 tree t;
4883
4884 gcc_assert (TREE_CODE_LENGTH (code) == 4);
4885
4886 t = make_node (code PASS_MEM_STAT);
4887 TREE_TYPE (t) = tt;
4888
4889 side_effects = TREE_SIDE_EFFECTS (t);
4890
4891 PROCESS_ARG (0);
4892 PROCESS_ARG (1);
4893 PROCESS_ARG (2);
4894 PROCESS_ARG (3);
4895
4896 TREE_SIDE_EFFECTS (t) = side_effects;
4897 TREE_THIS_VOLATILE (t)
4898 = (TREE_CODE_CLASS (code) == tcc_reference
4899 && arg0 && TREE_THIS_VOLATILE (arg0));
4900
4901 return t;
4902 }
4903
4904 tree
4905 build5 (enum tree_code code, tree tt, tree arg0, tree arg1,
4906 tree arg2, tree arg3, tree arg4 MEM_STAT_DECL)
4907 {
4908 bool constant, read_only, side_effects;
4909 tree t;
4910
4911 gcc_assert (TREE_CODE_LENGTH (code) == 5);
4912
4913 t = make_node (code PASS_MEM_STAT);
4914 TREE_TYPE (t) = tt;
4915
4916 side_effects = TREE_SIDE_EFFECTS (t);
4917
4918 PROCESS_ARG (0);
4919 PROCESS_ARG (1);
4920 PROCESS_ARG (2);
4921 PROCESS_ARG (3);
4922 PROCESS_ARG (4);
4923
4924 TREE_SIDE_EFFECTS (t) = side_effects;
4925 if (code == TARGET_MEM_REF)
4926 {
4927 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
4928 {
4929 tree o = TREE_OPERAND (arg0, 0);
4930 TREE_READONLY (t) = TREE_READONLY (o);
4931 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
4932 }
4933 }
4934 else
4935 TREE_THIS_VOLATILE (t)
4936 = (TREE_CODE_CLASS (code) == tcc_reference
4937 && arg0 && TREE_THIS_VOLATILE (arg0));
4938
4939 return t;
4940 }
4941
4942 /* Build a simple MEM_REF tree with the sematics of a plain INDIRECT_REF
4943 on the pointer PTR. */
4944
4945 tree
4946 build_simple_mem_ref_loc (location_t loc, tree ptr)
4947 {
4948 poly_int64 offset = 0;
4949 tree ptype = TREE_TYPE (ptr);
4950 tree tem;
4951 /* For convenience allow addresses that collapse to a simple base
4952 and offset. */
4953 if (TREE_CODE (ptr) == ADDR_EXPR
4954 && (handled_component_p (TREE_OPERAND (ptr, 0))
4955 || TREE_CODE (TREE_OPERAND (ptr, 0)) == MEM_REF))
4956 {
4957 ptr = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &offset);
4958 gcc_assert (ptr);
4959 if (TREE_CODE (ptr) == MEM_REF)
4960 {
4961 offset += mem_ref_offset (ptr).force_shwi ();
4962 ptr = TREE_OPERAND (ptr, 0);
4963 }
4964 else
4965 ptr = build_fold_addr_expr (ptr);
4966 gcc_assert (is_gimple_reg (ptr) || is_gimple_min_invariant (ptr));
4967 }
4968 tem = build2 (MEM_REF, TREE_TYPE (ptype),
4969 ptr, build_int_cst (ptype, offset));
4970 SET_EXPR_LOCATION (tem, loc);
4971 return tem;
4972 }
4973
4974 /* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T. */
4975
4976 poly_offset_int
4977 mem_ref_offset (const_tree t)
4978 {
4979 return poly_offset_int::from (wi::to_poly_wide (TREE_OPERAND (t, 1)),
4980 SIGNED);
4981 }
4982
4983 /* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE
4984 offsetted by OFFSET units. */
4985
4986 tree
4987 build_invariant_address (tree type, tree base, poly_int64 offset)
4988 {
4989 tree ref = fold_build2 (MEM_REF, TREE_TYPE (type),
4990 build_fold_addr_expr (base),
4991 build_int_cst (ptr_type_node, offset));
4992 tree addr = build1 (ADDR_EXPR, type, ref);
4993 recompute_tree_invariant_for_addr_expr (addr);
4994 return addr;
4995 }
4996
4997 /* Similar except don't specify the TREE_TYPE
4998 and leave the TREE_SIDE_EFFECTS as 0.
4999 It is permissible for arguments to be null,
5000 or even garbage if their values do not matter. */
5001
5002 tree
5003 build_nt (enum tree_code code, ...)
5004 {
5005 tree t;
5006 int length;
5007 int i;
5008 va_list p;
5009
5010 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
5011
5012 va_start (p, code);
5013
5014 t = make_node (code);
5015 length = TREE_CODE_LENGTH (code);
5016
5017 for (i = 0; i < length; i++)
5018 TREE_OPERAND (t, i) = va_arg (p, tree);
5019
5020 va_end (p);
5021 return t;
5022 }
5023
5024 /* Similar to build_nt, but for creating a CALL_EXPR object with a
5025 tree vec. */
5026
5027 tree
5028 build_nt_call_vec (tree fn, vec<tree, va_gc> *args)
5029 {
5030 tree ret, t;
5031 unsigned int ix;
5032
5033 ret = build_vl_exp (CALL_EXPR, vec_safe_length (args) + 3);
5034 CALL_EXPR_FN (ret) = fn;
5035 CALL_EXPR_STATIC_CHAIN (ret) = NULL_TREE;
5036 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
5037 CALL_EXPR_ARG (ret, ix) = t;
5038 return ret;
5039 }
5040 \f
5041 /* Create a DECL_... node of code CODE, name NAME (if non-null)
5042 and data type TYPE.
5043 We do NOT enter this node in any sort of symbol table.
5044
5045 LOC is the location of the decl.
5046
5047 layout_decl is used to set up the decl's storage layout.
5048 Other slots are initialized to 0 or null pointers. */
5049
5050 tree
5051 build_decl (location_t loc, enum tree_code code, tree name,
5052 tree type MEM_STAT_DECL)
5053 {
5054 tree t;
5055
5056 t = make_node (code PASS_MEM_STAT);
5057 DECL_SOURCE_LOCATION (t) = loc;
5058
5059 /* if (type == error_mark_node)
5060 type = integer_type_node; */
5061 /* That is not done, deliberately, so that having error_mark_node
5062 as the type can suppress useless errors in the use of this variable. */
5063
5064 DECL_NAME (t) = name;
5065 TREE_TYPE (t) = type;
5066
5067 if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
5068 layout_decl (t, 0);
5069
5070 return t;
5071 }
5072
5073 /* Builds and returns function declaration with NAME and TYPE. */
5074
5075 tree
5076 build_fn_decl (const char *name, tree type)
5077 {
5078 tree id = get_identifier (name);
5079 tree decl = build_decl (input_location, FUNCTION_DECL, id, type);
5080
5081 DECL_EXTERNAL (decl) = 1;
5082 TREE_PUBLIC (decl) = 1;
5083 DECL_ARTIFICIAL (decl) = 1;
5084 TREE_NOTHROW (decl) = 1;
5085
5086 return decl;
5087 }
5088
5089 vec<tree, va_gc> *all_translation_units;
5090
5091 /* Builds a new translation-unit decl with name NAME, queues it in the
5092 global list of translation-unit decls and returns it. */
5093
5094 tree
5095 build_translation_unit_decl (tree name)
5096 {
5097 tree tu = build_decl (UNKNOWN_LOCATION, TRANSLATION_UNIT_DECL,
5098 name, NULL_TREE);
5099 TRANSLATION_UNIT_LANGUAGE (tu) = lang_hooks.name;
5100 vec_safe_push (all_translation_units, tu);
5101 return tu;
5102 }
5103
5104 \f
5105 /* BLOCK nodes are used to represent the structure of binding contours
5106 and declarations, once those contours have been exited and their contents
5107 compiled. This information is used for outputting debugging info. */
5108
5109 tree
5110 build_block (tree vars, tree subblocks, tree supercontext, tree chain)
5111 {
5112 tree block = make_node (BLOCK);
5113
5114 BLOCK_VARS (block) = vars;
5115 BLOCK_SUBBLOCKS (block) = subblocks;
5116 BLOCK_SUPERCONTEXT (block) = supercontext;
5117 BLOCK_CHAIN (block) = chain;
5118 return block;
5119 }
5120
5121 \f
5122 /* Like SET_EXPR_LOCATION, but make sure the tree can have a location.
5123
5124 LOC is the location to use in tree T. */
5125
5126 void
5127 protected_set_expr_location (tree t, location_t loc)
5128 {
5129 if (CAN_HAVE_LOCATION_P (t))
5130 SET_EXPR_LOCATION (t, loc);
5131 }
5132
5133 /* Data used when collecting DECLs and TYPEs for language data removal. */
5134
5135 class free_lang_data_d
5136 {
5137 public:
5138 free_lang_data_d () : decls (100), types (100) {}
5139
5140 /* Worklist to avoid excessive recursion. */
5141 auto_vec<tree> worklist;
5142
5143 /* Set of traversed objects. Used to avoid duplicate visits. */
5144 hash_set<tree> pset;
5145
5146 /* Array of symbols to process with free_lang_data_in_decl. */
5147 auto_vec<tree> decls;
5148
5149 /* Array of types to process with free_lang_data_in_type. */
5150 auto_vec<tree> types;
5151 };
5152
5153
5154 /* Add type or decl T to one of the list of tree nodes that need their
5155 language data removed. The lists are held inside FLD. */
5156
5157 static void
5158 add_tree_to_fld_list (tree t, class free_lang_data_d *fld)
5159 {
5160 if (DECL_P (t))
5161 fld->decls.safe_push (t);
5162 else if (TYPE_P (t))
5163 fld->types.safe_push (t);
5164 else
5165 gcc_unreachable ();
5166 }
5167
5168 /* Push tree node T into FLD->WORKLIST. */
5169
5170 static inline void
5171 fld_worklist_push (tree t, class free_lang_data_d *fld)
5172 {
5173 if (t && !is_lang_specific (t) && !fld->pset.contains (t))
5174 fld->worklist.safe_push ((t));
5175 }
5176
5177
5178 \f
5179 /* Return simplified TYPE_NAME of TYPE. */
5180
5181 static tree
5182 fld_simplified_type_name (tree type)
5183 {
5184 if (!TYPE_NAME (type) || TREE_CODE (TYPE_NAME (type)) != TYPE_DECL)
5185 return TYPE_NAME (type);
5186 /* Drop TYPE_DECLs in TYPE_NAME in favor of the identifier in the
5187 TYPE_DECL if the type doesn't have linkage.
5188 this must match fld_ */
5189 if (type != TYPE_MAIN_VARIANT (type)
5190 || (!DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (type))
5191 && (TREE_CODE (type) != RECORD_TYPE
5192 || !TYPE_BINFO (type)
5193 || !BINFO_VTABLE (TYPE_BINFO (type)))))
5194 return DECL_NAME (TYPE_NAME (type));
5195 return TYPE_NAME (type);
5196 }
5197
5198 /* Do same comparsion as check_qualified_type skipping lang part of type
5199 and be more permissive about type names: we only care that names are
5200 same (for diagnostics) and that ODR names are the same.
5201 If INNER_TYPE is non-NULL, be sure that TREE_TYPE match it. */
5202
5203 static bool
5204 fld_type_variant_equal_p (tree t, tree v, tree inner_type)
5205 {
5206 if (TYPE_QUALS (t) != TYPE_QUALS (v)
5207 /* We want to match incomplete variants with complete types.
5208 In this case we need to ignore alignment. */
5209 || ((!RECORD_OR_UNION_TYPE_P (t) || COMPLETE_TYPE_P (v))
5210 && (TYPE_ALIGN (t) != TYPE_ALIGN (v)
5211 || TYPE_USER_ALIGN (t) != TYPE_USER_ALIGN (v)))
5212 || fld_simplified_type_name (t) != fld_simplified_type_name (v)
5213 || !attribute_list_equal (TYPE_ATTRIBUTES (t),
5214 TYPE_ATTRIBUTES (v))
5215 || (inner_type && TREE_TYPE (v) != inner_type))
5216 return false;
5217
5218 return true;
5219 }
5220
5221 /* Find variant of FIRST that match T and create new one if necessary.
5222 Set TREE_TYPE to INNER_TYPE if non-NULL. */
5223
5224 static tree
5225 fld_type_variant (tree first, tree t, class free_lang_data_d *fld,
5226 tree inner_type = NULL)
5227 {
5228 if (first == TYPE_MAIN_VARIANT (t))
5229 return t;
5230 for (tree v = first; v; v = TYPE_NEXT_VARIANT (v))
5231 if (fld_type_variant_equal_p (t, v, inner_type))
5232 return v;
5233 tree v = build_variant_type_copy (first);
5234 TYPE_READONLY (v) = TYPE_READONLY (t);
5235 TYPE_VOLATILE (v) = TYPE_VOLATILE (t);
5236 TYPE_ATOMIC (v) = TYPE_ATOMIC (t);
5237 TYPE_RESTRICT (v) = TYPE_RESTRICT (t);
5238 TYPE_ADDR_SPACE (v) = TYPE_ADDR_SPACE (t);
5239 TYPE_NAME (v) = TYPE_NAME (t);
5240 TYPE_ATTRIBUTES (v) = TYPE_ATTRIBUTES (t);
5241 TYPE_CANONICAL (v) = TYPE_CANONICAL (t);
5242 /* Variants of incomplete types should have alignment
5243 set to BITS_PER_UNIT. Do not copy the actual alignment. */
5244 if (!RECORD_OR_UNION_TYPE_P (v) || COMPLETE_TYPE_P (v))
5245 {
5246 SET_TYPE_ALIGN (v, TYPE_ALIGN (t));
5247 TYPE_USER_ALIGN (v) = TYPE_USER_ALIGN (t);
5248 }
5249 if (inner_type)
5250 TREE_TYPE (v) = inner_type;
5251 gcc_checking_assert (fld_type_variant_equal_p (t,v, inner_type));
5252 if (!fld->pset.add (v))
5253 add_tree_to_fld_list (v, fld);
5254 return v;
5255 }
5256
5257 /* Map complete types to incomplete types. */
5258
5259 static hash_map<tree, tree> *fld_incomplete_types;
5260
5261 /* Map types to simplified types. */
5262
5263 static hash_map<tree, tree> *fld_simplified_types;
5264
5265 /* Produce variant of T whose TREE_TYPE is T2. If it is main variant,
5266 use MAP to prevent duplicates. */
5267
5268 static tree
5269 fld_process_array_type (tree t, tree t2, hash_map<tree, tree> *map,
5270 class free_lang_data_d *fld)
5271 {
5272 if (TREE_TYPE (t) == t2)
5273 return t;
5274
5275 if (TYPE_MAIN_VARIANT (t) != t)
5276 {
5277 return fld_type_variant
5278 (fld_process_array_type (TYPE_MAIN_VARIANT (t),
5279 TYPE_MAIN_VARIANT (t2), map, fld),
5280 t, fld, t2);
5281 }
5282
5283 bool existed;
5284 tree &array
5285 = map->get_or_insert (t, &existed);
5286 if (!existed)
5287 {
5288 array = build_array_type_1 (t2, TYPE_DOMAIN (t),
5289 TYPE_TYPELESS_STORAGE (t), false);
5290 TYPE_CANONICAL (array) = TYPE_CANONICAL (t);
5291 if (!fld->pset.add (array))
5292 add_tree_to_fld_list (array, fld);
5293 }
5294 return array;
5295 }
5296
5297 /* Return CTX after removal of contexts that are not relevant */
5298
5299 static tree
5300 fld_decl_context (tree ctx)
5301 {
5302 /* Variably modified types are needed for tree_is_indexable to decide
5303 whether the type needs to go to local or global section.
5304 This code is semi-broken but for now it is easiest to keep contexts
5305 as expected. */
5306 if (ctx && TYPE_P (ctx)
5307 && !variably_modified_type_p (ctx, NULL_TREE))
5308 {
5309 while (ctx && TYPE_P (ctx))
5310 ctx = TYPE_CONTEXT (ctx);
5311 }
5312 return ctx;
5313 }
5314
5315 /* For T being aggregate type try to turn it into a incomplete variant.
5316 Return T if no simplification is possible. */
5317
5318 static tree
5319 fld_incomplete_type_of (tree t, class free_lang_data_d *fld)
5320 {
5321 if (!t)
5322 return NULL;
5323 if (POINTER_TYPE_P (t))
5324 {
5325 tree t2 = fld_incomplete_type_of (TREE_TYPE (t), fld);
5326 if (t2 != TREE_TYPE (t))
5327 {
5328 tree first;
5329 if (TREE_CODE (t) == POINTER_TYPE)
5330 first = build_pointer_type_for_mode (t2, TYPE_MODE (t),
5331 TYPE_REF_CAN_ALIAS_ALL (t));
5332 else
5333 first = build_reference_type_for_mode (t2, TYPE_MODE (t),
5334 TYPE_REF_CAN_ALIAS_ALL (t));
5335 gcc_assert (TYPE_CANONICAL (t2) != t2
5336 && TYPE_CANONICAL (t2) == TYPE_CANONICAL (TREE_TYPE (t)));
5337 if (!fld->pset.add (first))
5338 add_tree_to_fld_list (first, fld);
5339 return fld_type_variant (first, t, fld);
5340 }
5341 return t;
5342 }
5343 if (TREE_CODE (t) == ARRAY_TYPE)
5344 return fld_process_array_type (t,
5345 fld_incomplete_type_of (TREE_TYPE (t), fld),
5346 fld_incomplete_types, fld);
5347 if ((!RECORD_OR_UNION_TYPE_P (t) && TREE_CODE (t) != ENUMERAL_TYPE)
5348 || !COMPLETE_TYPE_P (t))
5349 return t;
5350 if (TYPE_MAIN_VARIANT (t) == t)
5351 {
5352 bool existed;
5353 tree &copy
5354 = fld_incomplete_types->get_or_insert (t, &existed);
5355
5356 if (!existed)
5357 {
5358 copy = build_distinct_type_copy (t);
5359
5360 /* It is possible that type was not seen by free_lang_data yet. */
5361 if (!fld->pset.add (copy))
5362 add_tree_to_fld_list (copy, fld);
5363 TYPE_SIZE (copy) = NULL;
5364 TYPE_USER_ALIGN (copy) = 0;
5365 TYPE_SIZE_UNIT (copy) = NULL;
5366 TYPE_CANONICAL (copy) = TYPE_CANONICAL (t);
5367 TREE_ADDRESSABLE (copy) = 0;
5368 if (AGGREGATE_TYPE_P (t))
5369 {
5370 SET_TYPE_MODE (copy, VOIDmode);
5371 SET_TYPE_ALIGN (copy, BITS_PER_UNIT);
5372 TYPE_TYPELESS_STORAGE (copy) = 0;
5373 TYPE_FIELDS (copy) = NULL;
5374 TYPE_BINFO (copy) = NULL;
5375 }
5376 else
5377 TYPE_VALUES (copy) = NULL;
5378
5379 /* Build copy of TYPE_DECL in TYPE_NAME if necessary.
5380 This is needed for ODR violation warnings to come out right (we
5381 want duplicate TYPE_DECLs whenever the type is duplicated because
5382 of ODR violation. Because lang data in the TYPE_DECL may not
5383 have been freed yet, rebuild it from scratch and copy relevant
5384 fields. */
5385 TYPE_NAME (copy) = fld_simplified_type_name (copy);
5386 tree name = TYPE_NAME (copy);
5387
5388 if (name && TREE_CODE (name) == TYPE_DECL)
5389 {
5390 gcc_checking_assert (TREE_TYPE (name) == t);
5391 tree name2 = build_decl (DECL_SOURCE_LOCATION (name), TYPE_DECL,
5392 DECL_NAME (name), copy);
5393 if (DECL_ASSEMBLER_NAME_SET_P (name))
5394 SET_DECL_ASSEMBLER_NAME (name2, DECL_ASSEMBLER_NAME (name));
5395 SET_DECL_ALIGN (name2, 0);
5396 DECL_CONTEXT (name2) = fld_decl_context
5397 (DECL_CONTEXT (name));
5398 TYPE_NAME (copy) = name2;
5399 }
5400 }
5401 return copy;
5402 }
5403 return (fld_type_variant
5404 (fld_incomplete_type_of (TYPE_MAIN_VARIANT (t), fld), t, fld));
5405 }
5406
5407 /* Simplify type T for scenarios where we do not need complete pointer
5408 types. */
5409
5410 static tree
5411 fld_simplified_type (tree t, class free_lang_data_d *fld)
5412 {
5413 if (!t)
5414 return t;
5415 if (POINTER_TYPE_P (t))
5416 return fld_incomplete_type_of (t, fld);
5417 /* FIXME: This triggers verification error, see PR88140. */
5418 if (TREE_CODE (t) == ARRAY_TYPE && 0)
5419 return fld_process_array_type (t, fld_simplified_type (TREE_TYPE (t), fld),
5420 fld_simplified_types, fld);
5421 return t;
5422 }
5423
5424 /* Reset the expression *EXPR_P, a size or position.
5425
5426 ??? We could reset all non-constant sizes or positions. But it's cheap
5427 enough to not do so and refrain from adding workarounds to dwarf2out.c.
5428
5429 We need to reset self-referential sizes or positions because they cannot
5430 be gimplified and thus can contain a CALL_EXPR after the gimplification
5431 is finished, which will run afoul of LTO streaming. And they need to be
5432 reset to something essentially dummy but not constant, so as to preserve
5433 the properties of the object they are attached to. */
5434
5435 static inline void
5436 free_lang_data_in_one_sizepos (tree *expr_p)
5437 {
5438 tree expr = *expr_p;
5439 if (CONTAINS_PLACEHOLDER_P (expr))
5440 *expr_p = build0 (PLACEHOLDER_EXPR, TREE_TYPE (expr));
5441 }
5442
5443
5444 /* Reset all the fields in a binfo node BINFO. We only keep
5445 BINFO_VTABLE, which is used by gimple_fold_obj_type_ref. */
5446
5447 static void
5448 free_lang_data_in_binfo (tree binfo)
5449 {
5450 unsigned i;
5451 tree t;
5452
5453 gcc_assert (TREE_CODE (binfo) == TREE_BINFO);
5454
5455 BINFO_VIRTUALS (binfo) = NULL_TREE;
5456 BINFO_BASE_ACCESSES (binfo) = NULL;
5457 BINFO_INHERITANCE_CHAIN (binfo) = NULL_TREE;
5458 BINFO_SUBVTT_INDEX (binfo) = NULL_TREE;
5459 BINFO_VPTR_FIELD (binfo) = NULL_TREE;
5460
5461 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (binfo), i, t)
5462 free_lang_data_in_binfo (t);
5463 }
5464
5465
5466 /* Reset all language specific information still present in TYPE. */
5467
5468 static void
5469 free_lang_data_in_type (tree type, class free_lang_data_d *fld)
5470 {
5471 gcc_assert (TYPE_P (type));
5472
5473 /* Give the FE a chance to remove its own data first. */
5474 lang_hooks.free_lang_data (type);
5475
5476 TREE_LANG_FLAG_0 (type) = 0;
5477 TREE_LANG_FLAG_1 (type) = 0;
5478 TREE_LANG_FLAG_2 (type) = 0;
5479 TREE_LANG_FLAG_3 (type) = 0;
5480 TREE_LANG_FLAG_4 (type) = 0;
5481 TREE_LANG_FLAG_5 (type) = 0;
5482 TREE_LANG_FLAG_6 (type) = 0;
5483
5484 TYPE_NEEDS_CONSTRUCTING (type) = 0;
5485
5486 /* Purge non-marked variants from the variants chain, so that they
5487 don't reappear in the IL after free_lang_data. */
5488 while (TYPE_NEXT_VARIANT (type)
5489 && !fld->pset.contains (TYPE_NEXT_VARIANT (type)))
5490 {
5491 tree t = TYPE_NEXT_VARIANT (type);
5492 TYPE_NEXT_VARIANT (type) = TYPE_NEXT_VARIANT (t);
5493 /* Turn the removed types into distinct types. */
5494 TYPE_MAIN_VARIANT (t) = t;
5495 TYPE_NEXT_VARIANT (t) = NULL_TREE;
5496 }
5497
5498 if (TREE_CODE (type) == FUNCTION_TYPE)
5499 {
5500 TREE_TYPE (type) = fld_simplified_type (TREE_TYPE (type), fld);
5501 /* Remove the const and volatile qualifiers from arguments. The
5502 C++ front end removes them, but the C front end does not,
5503 leading to false ODR violation errors when merging two
5504 instances of the same function signature compiled by
5505 different front ends. */
5506 for (tree p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
5507 {
5508 TREE_VALUE (p) = fld_simplified_type (TREE_VALUE (p), fld);
5509 tree arg_type = TREE_VALUE (p);
5510
5511 if (TYPE_READONLY (arg_type) || TYPE_VOLATILE (arg_type))
5512 {
5513 int quals = TYPE_QUALS (arg_type)
5514 & ~TYPE_QUAL_CONST
5515 & ~TYPE_QUAL_VOLATILE;
5516 TREE_VALUE (p) = build_qualified_type (arg_type, quals);
5517 if (!fld->pset.add (TREE_VALUE (p)))
5518 free_lang_data_in_type (TREE_VALUE (p), fld);
5519 }
5520 /* C++ FE uses TREE_PURPOSE to store initial values. */
5521 TREE_PURPOSE (p) = NULL;
5522 }
5523 }
5524 else if (TREE_CODE (type) == METHOD_TYPE)
5525 {
5526 TREE_TYPE (type) = fld_simplified_type (TREE_TYPE (type), fld);
5527 for (tree p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
5528 {
5529 /* C++ FE uses TREE_PURPOSE to store initial values. */
5530 TREE_VALUE (p) = fld_simplified_type (TREE_VALUE (p), fld);
5531 TREE_PURPOSE (p) = NULL;
5532 }
5533 }
5534 else if (RECORD_OR_UNION_TYPE_P (type))
5535 {
5536 /* Remove members that are not FIELD_DECLs from the field list
5537 of an aggregate. These occur in C++. */
5538 for (tree *prev = &TYPE_FIELDS (type), member; (member = *prev);)
5539 if (TREE_CODE (member) == FIELD_DECL)
5540 prev = &DECL_CHAIN (member);
5541 else
5542 *prev = DECL_CHAIN (member);
5543
5544 TYPE_VFIELD (type) = NULL_TREE;
5545
5546 if (TYPE_BINFO (type))
5547 {
5548 free_lang_data_in_binfo (TYPE_BINFO (type));
5549 /* We need to preserve link to bases and virtual table for all
5550 polymorphic types to make devirtualization machinery working. */
5551 if (!BINFO_VTABLE (TYPE_BINFO (type)))
5552 TYPE_BINFO (type) = NULL;
5553 }
5554 }
5555 else if (INTEGRAL_TYPE_P (type)
5556 || SCALAR_FLOAT_TYPE_P (type)
5557 || FIXED_POINT_TYPE_P (type))
5558 {
5559 if (TREE_CODE (type) == ENUMERAL_TYPE)
5560 {
5561 /* Type values are used only for C++ ODR checking. Drop them
5562 for all type variants and non-ODR types.
5563 For ODR types the data is freed in free_odr_warning_data. */
5564 if (TYPE_MAIN_VARIANT (type) != type
5565 || !type_with_linkage_p (type))
5566 TYPE_VALUES (type) = NULL;
5567 else
5568 /* Simplify representation by recording only values rather
5569 than const decls. */
5570 for (tree e = TYPE_VALUES (type); e; e = TREE_CHAIN (e))
5571 if (TREE_CODE (TREE_VALUE (e)) == CONST_DECL)
5572 TREE_VALUE (e) = DECL_INITIAL (TREE_VALUE (e));
5573 }
5574 free_lang_data_in_one_sizepos (&TYPE_MIN_VALUE (type));
5575 free_lang_data_in_one_sizepos (&TYPE_MAX_VALUE (type));
5576 }
5577
5578 TYPE_LANG_SLOT_1 (type) = NULL_TREE;
5579
5580 free_lang_data_in_one_sizepos (&TYPE_SIZE (type));
5581 free_lang_data_in_one_sizepos (&TYPE_SIZE_UNIT (type));
5582
5583 if (TYPE_CONTEXT (type)
5584 && TREE_CODE (TYPE_CONTEXT (type)) == BLOCK)
5585 {
5586 tree ctx = TYPE_CONTEXT (type);
5587 do
5588 {
5589 ctx = BLOCK_SUPERCONTEXT (ctx);
5590 }
5591 while (ctx && TREE_CODE (ctx) == BLOCK);
5592 TYPE_CONTEXT (type) = ctx;
5593 }
5594
5595 TYPE_STUB_DECL (type) = NULL;
5596 TYPE_NAME (type) = fld_simplified_type_name (type);
5597 }
5598
5599
5600 /* Return true if DECL may need an assembler name to be set. */
5601
5602 static inline bool
5603 need_assembler_name_p (tree decl)
5604 {
5605 /* We use DECL_ASSEMBLER_NAME to hold mangled type names for One Definition
5606 Rule merging. This makes type_odr_p to return true on those types during
5607 LTO and by comparing the mangled name, we can say what types are intended
5608 to be equivalent across compilation unit.
5609
5610 We do not store names of type_in_anonymous_namespace_p.
5611
5612 Record, union and enumeration type have linkage that allows use
5613 to check type_in_anonymous_namespace_p. We do not mangle compound types
5614 that always can be compared structurally.
5615
5616 Similarly for builtin types, we compare properties of their main variant.
5617 A special case are integer types where mangling do make differences
5618 between char/signed char/unsigned char etc. Storing name for these makes
5619 e.g. -fno-signed-char/-fsigned-char mismatches to be handled well.
5620 See cp/mangle.c:write_builtin_type for details. */
5621
5622 if (TREE_CODE (decl) == TYPE_DECL)
5623 {
5624 if (DECL_NAME (decl)
5625 && decl == TYPE_NAME (TREE_TYPE (decl))
5626 && TYPE_MAIN_VARIANT (TREE_TYPE (decl)) == TREE_TYPE (decl)
5627 && !TYPE_ARTIFICIAL (TREE_TYPE (decl))
5628 && ((TREE_CODE (TREE_TYPE (decl)) != RECORD_TYPE
5629 && TREE_CODE (TREE_TYPE (decl)) != UNION_TYPE)
5630 || TYPE_CXX_ODR_P (TREE_TYPE (decl)))
5631 && (type_with_linkage_p (TREE_TYPE (decl))
5632 || TREE_CODE (TREE_TYPE (decl)) == INTEGER_TYPE)
5633 && !variably_modified_type_p (TREE_TYPE (decl), NULL_TREE))
5634 return !DECL_ASSEMBLER_NAME_SET_P (decl);
5635 return false;
5636 }
5637 /* Only FUNCTION_DECLs and VAR_DECLs are considered. */
5638 if (!VAR_OR_FUNCTION_DECL_P (decl))
5639 return false;
5640
5641 /* If DECL already has its assembler name set, it does not need a
5642 new one. */
5643 if (!HAS_DECL_ASSEMBLER_NAME_P (decl)
5644 || DECL_ASSEMBLER_NAME_SET_P (decl))
5645 return false;
5646
5647 /* Abstract decls do not need an assembler name. */
5648 if (DECL_ABSTRACT_P (decl))
5649 return false;
5650
5651 /* For VAR_DECLs, only static, public and external symbols need an
5652 assembler name. */
5653 if (VAR_P (decl)
5654 && !TREE_STATIC (decl)
5655 && !TREE_PUBLIC (decl)
5656 && !DECL_EXTERNAL (decl))
5657 return false;
5658
5659 if (TREE_CODE (decl) == FUNCTION_DECL)
5660 {
5661 /* Do not set assembler name on builtins. Allow RTL expansion to
5662 decide whether to expand inline or via a regular call. */
5663 if (fndecl_built_in_p (decl)
5664 && DECL_BUILT_IN_CLASS (decl) != BUILT_IN_FRONTEND)
5665 return false;
5666
5667 /* Functions represented in the callgraph need an assembler name. */
5668 if (cgraph_node::get (decl) != NULL)
5669 return true;
5670
5671 /* Unused and not public functions don't need an assembler name. */
5672 if (!TREE_USED (decl) && !TREE_PUBLIC (decl))
5673 return false;
5674 }
5675
5676 return true;
5677 }
5678
5679
5680 /* Reset all language specific information still present in symbol
5681 DECL. */
5682
5683 static void
5684 free_lang_data_in_decl (tree decl, class free_lang_data_d *fld)
5685 {
5686 gcc_assert (DECL_P (decl));
5687
5688 /* Give the FE a chance to remove its own data first. */
5689 lang_hooks.free_lang_data (decl);
5690
5691 TREE_LANG_FLAG_0 (decl) = 0;
5692 TREE_LANG_FLAG_1 (decl) = 0;
5693 TREE_LANG_FLAG_2 (decl) = 0;
5694 TREE_LANG_FLAG_3 (decl) = 0;
5695 TREE_LANG_FLAG_4 (decl) = 0;
5696 TREE_LANG_FLAG_5 (decl) = 0;
5697 TREE_LANG_FLAG_6 (decl) = 0;
5698
5699 free_lang_data_in_one_sizepos (&DECL_SIZE (decl));
5700 free_lang_data_in_one_sizepos (&DECL_SIZE_UNIT (decl));
5701 if (TREE_CODE (decl) == FIELD_DECL)
5702 {
5703 DECL_FCONTEXT (decl) = NULL;
5704 free_lang_data_in_one_sizepos (&DECL_FIELD_OFFSET (decl));
5705 if (TREE_CODE (DECL_CONTEXT (decl)) == QUAL_UNION_TYPE)
5706 DECL_QUALIFIER (decl) = NULL_TREE;
5707 }
5708
5709 if (TREE_CODE (decl) == FUNCTION_DECL)
5710 {
5711 struct cgraph_node *node;
5712 /* Frontends do not set TREE_ADDRESSABLE on public variables even though
5713 the address may be taken in other unit, so this flag has no practical
5714 use for middle-end.
5715
5716 It would make more sense if frontends set TREE_ADDRESSABLE to 0 only
5717 for public objects that indeed cannot be adressed, but it is not
5718 the case. Set the flag to true so we do not get merge failures for
5719 i.e. virtual tables between units that take address of it and
5720 units that don't. */
5721 if (TREE_PUBLIC (decl))
5722 TREE_ADDRESSABLE (decl) = true;
5723 TREE_TYPE (decl) = fld_simplified_type (TREE_TYPE (decl), fld);
5724 if (!(node = cgraph_node::get (decl))
5725 || (!node->definition && !node->clones))
5726 {
5727 if (node)
5728 node->release_body ();
5729 else
5730 {
5731 release_function_body (decl);
5732 DECL_ARGUMENTS (decl) = NULL;
5733 DECL_RESULT (decl) = NULL;
5734 DECL_INITIAL (decl) = error_mark_node;
5735 }
5736 }
5737 if (gimple_has_body_p (decl) || (node && node->thunk.thunk_p))
5738 {
5739 tree t;
5740
5741 /* If DECL has a gimple body, then the context for its
5742 arguments must be DECL. Otherwise, it doesn't really
5743 matter, as we will not be emitting any code for DECL. In
5744 general, there may be other instances of DECL created by
5745 the front end and since PARM_DECLs are generally shared,
5746 their DECL_CONTEXT changes as the replicas of DECL are
5747 created. The only time where DECL_CONTEXT is important
5748 is for the FUNCTION_DECLs that have a gimple body (since
5749 the PARM_DECL will be used in the function's body). */
5750 for (t = DECL_ARGUMENTS (decl); t; t = TREE_CHAIN (t))
5751 DECL_CONTEXT (t) = decl;
5752 if (!DECL_FUNCTION_SPECIFIC_TARGET (decl))
5753 DECL_FUNCTION_SPECIFIC_TARGET (decl)
5754 = target_option_default_node;
5755 if (!DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl))
5756 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
5757 = optimization_default_node;
5758 }
5759
5760 /* DECL_SAVED_TREE holds the GENERIC representation for DECL.
5761 At this point, it is not needed anymore. */
5762 DECL_SAVED_TREE (decl) = NULL_TREE;
5763
5764 /* Clear the abstract origin if it refers to a method.
5765 Otherwise dwarf2out.c will ICE as we splice functions out of
5766 TYPE_FIELDS and thus the origin will not be output
5767 correctly. */
5768 if (DECL_ABSTRACT_ORIGIN (decl)
5769 && DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))
5770 && RECORD_OR_UNION_TYPE_P
5771 (DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))))
5772 DECL_ABSTRACT_ORIGIN (decl) = NULL_TREE;
5773
5774 DECL_VINDEX (decl) = NULL_TREE;
5775 }
5776 else if (VAR_P (decl))
5777 {
5778 /* See comment above why we set the flag for functoins. */
5779 if (TREE_PUBLIC (decl))
5780 TREE_ADDRESSABLE (decl) = true;
5781 if ((DECL_EXTERNAL (decl)
5782 && (!TREE_STATIC (decl) || !TREE_READONLY (decl)))
5783 || (decl_function_context (decl) && !TREE_STATIC (decl)))
5784 DECL_INITIAL (decl) = NULL_TREE;
5785 }
5786 else if (TREE_CODE (decl) == TYPE_DECL)
5787 {
5788 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5789 DECL_VISIBILITY_SPECIFIED (decl) = 0;
5790 TREE_PUBLIC (decl) = 0;
5791 TREE_PRIVATE (decl) = 0;
5792 DECL_ARTIFICIAL (decl) = 0;
5793 TYPE_DECL_SUPPRESS_DEBUG (decl) = 0;
5794 DECL_INITIAL (decl) = NULL_TREE;
5795 DECL_ORIGINAL_TYPE (decl) = NULL_TREE;
5796 DECL_MODE (decl) = VOIDmode;
5797 SET_DECL_ALIGN (decl, 0);
5798 /* TREE_TYPE is cleared at WPA time in free_odr_warning_data. */
5799 }
5800 else if (TREE_CODE (decl) == FIELD_DECL)
5801 {
5802 TREE_TYPE (decl) = fld_simplified_type (TREE_TYPE (decl), fld);
5803 DECL_INITIAL (decl) = NULL_TREE;
5804 }
5805 else if (TREE_CODE (decl) == TRANSLATION_UNIT_DECL
5806 && DECL_INITIAL (decl)
5807 && TREE_CODE (DECL_INITIAL (decl)) == BLOCK)
5808 {
5809 /* Strip builtins from the translation-unit BLOCK. We still have targets
5810 without builtin_decl_explicit support and also builtins are shared
5811 nodes and thus we can't use TREE_CHAIN in multiple lists. */
5812 tree *nextp = &BLOCK_VARS (DECL_INITIAL (decl));
5813 while (*nextp)
5814 {
5815 tree var = *nextp;
5816 if (fndecl_built_in_p (var))
5817 *nextp = TREE_CHAIN (var);
5818 else
5819 nextp = &TREE_CHAIN (var);
5820 }
5821 }
5822 /* We need to keep field decls associated with their trees. Otherwise tree
5823 merging may merge some fileds and keep others disjoint wich in turn will
5824 not do well with TREE_CHAIN pointers linking them.
5825
5826 Also do not drop containing types for virtual methods and tables because
5827 these are needed by devirtualization.
5828 C++ destructors are special because C++ frontends sometimes produces
5829 virtual destructor as an alias of non-virtual destructor. In
5830 devirutalization code we always walk through aliases and we need
5831 context to be preserved too. See PR89335 */
5832 if (TREE_CODE (decl) != FIELD_DECL
5833 && ((TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != FUNCTION_DECL)
5834 || (!DECL_VIRTUAL_P (decl)
5835 && (TREE_CODE (decl) != FUNCTION_DECL
5836 || !DECL_CXX_DESTRUCTOR_P (decl)))))
5837 DECL_CONTEXT (decl) = fld_decl_context (DECL_CONTEXT (decl));
5838 }
5839
5840
5841 /* Operand callback helper for free_lang_data_in_node. *TP is the
5842 subtree operand being considered. */
5843
5844 static tree
5845 find_decls_types_r (tree *tp, int *ws, void *data)
5846 {
5847 tree t = *tp;
5848 class free_lang_data_d *fld = (class free_lang_data_d *) data;
5849
5850 if (TREE_CODE (t) == TREE_LIST)
5851 return NULL_TREE;
5852
5853 /* Language specific nodes will be removed, so there is no need
5854 to gather anything under them. */
5855 if (is_lang_specific (t))
5856 {
5857 *ws = 0;
5858 return NULL_TREE;
5859 }
5860
5861 if (DECL_P (t))
5862 {
5863 /* Note that walk_tree does not traverse every possible field in
5864 decls, so we have to do our own traversals here. */
5865 add_tree_to_fld_list (t, fld);
5866
5867 fld_worklist_push (DECL_NAME (t), fld);
5868 fld_worklist_push (DECL_CONTEXT (t), fld);
5869 fld_worklist_push (DECL_SIZE (t), fld);
5870 fld_worklist_push (DECL_SIZE_UNIT (t), fld);
5871
5872 /* We are going to remove everything under DECL_INITIAL for
5873 TYPE_DECLs. No point walking them. */
5874 if (TREE_CODE (t) != TYPE_DECL)
5875 fld_worklist_push (DECL_INITIAL (t), fld);
5876
5877 fld_worklist_push (DECL_ATTRIBUTES (t), fld);
5878 fld_worklist_push (DECL_ABSTRACT_ORIGIN (t), fld);
5879
5880 if (TREE_CODE (t) == FUNCTION_DECL)
5881 {
5882 fld_worklist_push (DECL_ARGUMENTS (t), fld);
5883 fld_worklist_push (DECL_RESULT (t), fld);
5884 }
5885 else if (TREE_CODE (t) == FIELD_DECL)
5886 {
5887 fld_worklist_push (DECL_FIELD_OFFSET (t), fld);
5888 fld_worklist_push (DECL_BIT_FIELD_TYPE (t), fld);
5889 fld_worklist_push (DECL_FIELD_BIT_OFFSET (t), fld);
5890 fld_worklist_push (DECL_FCONTEXT (t), fld);
5891 }
5892
5893 if ((VAR_P (t) || TREE_CODE (t) == PARM_DECL)
5894 && DECL_HAS_VALUE_EXPR_P (t))
5895 fld_worklist_push (DECL_VALUE_EXPR (t), fld);
5896
5897 if (TREE_CODE (t) != FIELD_DECL
5898 && TREE_CODE (t) != TYPE_DECL)
5899 fld_worklist_push (TREE_CHAIN (t), fld);
5900 *ws = 0;
5901 }
5902 else if (TYPE_P (t))
5903 {
5904 /* Note that walk_tree does not traverse every possible field in
5905 types, so we have to do our own traversals here. */
5906 add_tree_to_fld_list (t, fld);
5907
5908 if (!RECORD_OR_UNION_TYPE_P (t))
5909 fld_worklist_push (TYPE_CACHED_VALUES (t), fld);
5910 fld_worklist_push (TYPE_SIZE (t), fld);
5911 fld_worklist_push (TYPE_SIZE_UNIT (t), fld);
5912 fld_worklist_push (TYPE_ATTRIBUTES (t), fld);
5913 fld_worklist_push (TYPE_POINTER_TO (t), fld);
5914 fld_worklist_push (TYPE_REFERENCE_TO (t), fld);
5915 fld_worklist_push (TYPE_NAME (t), fld);
5916 /* While we do not stream TYPE_POINTER_TO and TYPE_REFERENCE_TO
5917 lists, we may look types up in these lists and use them while
5918 optimizing the function body. Thus we need to free lang data
5919 in them. */
5920 if (TREE_CODE (t) == POINTER_TYPE)
5921 fld_worklist_push (TYPE_NEXT_PTR_TO (t), fld);
5922 if (TREE_CODE (t) == REFERENCE_TYPE)
5923 fld_worklist_push (TYPE_NEXT_REF_TO (t), fld);
5924 if (!POINTER_TYPE_P (t))
5925 fld_worklist_push (TYPE_MIN_VALUE_RAW (t), fld);
5926 /* TYPE_MAX_VALUE_RAW is TYPE_BINFO for record types. */
5927 if (!RECORD_OR_UNION_TYPE_P (t))
5928 fld_worklist_push (TYPE_MAX_VALUE_RAW (t), fld);
5929 fld_worklist_push (TYPE_MAIN_VARIANT (t), fld);
5930 /* Do not walk TYPE_NEXT_VARIANT. We do not stream it and thus
5931 do not and want not to reach unused variants this way. */
5932 if (TYPE_CONTEXT (t))
5933 {
5934 tree ctx = TYPE_CONTEXT (t);
5935 /* We adjust BLOCK TYPE_CONTEXTs to the innermost non-BLOCK one.
5936 So push that instead. */
5937 while (ctx && TREE_CODE (ctx) == BLOCK)
5938 ctx = BLOCK_SUPERCONTEXT (ctx);
5939 fld_worklist_push (ctx, fld);
5940 }
5941 fld_worklist_push (TYPE_CANONICAL (t), fld);
5942
5943 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t))
5944 {
5945 unsigned i;
5946 tree tem;
5947 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (TYPE_BINFO (t)), i, tem)
5948 fld_worklist_push (TREE_TYPE (tem), fld);
5949 fld_worklist_push (BINFO_TYPE (TYPE_BINFO (t)), fld);
5950 fld_worklist_push (BINFO_VTABLE (TYPE_BINFO (t)), fld);
5951 }
5952 if (RECORD_OR_UNION_TYPE_P (t))
5953 {
5954 tree tem;
5955 /* Push all TYPE_FIELDS - there can be interleaving interesting
5956 and non-interesting things. */
5957 tem = TYPE_FIELDS (t);
5958 while (tem)
5959 {
5960 if (TREE_CODE (tem) == FIELD_DECL)
5961 fld_worklist_push (tem, fld);
5962 tem = TREE_CHAIN (tem);
5963 }
5964 }
5965 if (FUNC_OR_METHOD_TYPE_P (t))
5966 fld_worklist_push (TYPE_METHOD_BASETYPE (t), fld);
5967
5968 fld_worklist_push (TYPE_STUB_DECL (t), fld);
5969 *ws = 0;
5970 }
5971 else if (TREE_CODE (t) == BLOCK)
5972 {
5973 for (tree *tem = &BLOCK_VARS (t); *tem; )
5974 {
5975 if (TREE_CODE (*tem) != VAR_DECL
5976 || !auto_var_in_fn_p (*tem, DECL_CONTEXT (*tem)))
5977 {
5978 gcc_assert (TREE_CODE (*tem) != RESULT_DECL
5979 && TREE_CODE (*tem) != PARM_DECL);
5980 *tem = TREE_CHAIN (*tem);
5981 }
5982 else
5983 {
5984 fld_worklist_push (*tem, fld);
5985 tem = &TREE_CHAIN (*tem);
5986 }
5987 }
5988 for (tree tem = BLOCK_SUBBLOCKS (t); tem; tem = BLOCK_CHAIN (tem))
5989 fld_worklist_push (tem, fld);
5990 fld_worklist_push (BLOCK_ABSTRACT_ORIGIN (t), fld);
5991 }
5992
5993 if (TREE_CODE (t) != IDENTIFIER_NODE
5994 && CODE_CONTAINS_STRUCT (TREE_CODE (t), TS_TYPED))
5995 fld_worklist_push (TREE_TYPE (t), fld);
5996
5997 return NULL_TREE;
5998 }
5999
6000
6001 /* Find decls and types in T. */
6002
6003 static void
6004 find_decls_types (tree t, class free_lang_data_d *fld)
6005 {
6006 while (1)
6007 {
6008 if (!fld->pset.contains (t))
6009 walk_tree (&t, find_decls_types_r, fld, &fld->pset);
6010 if (fld->worklist.is_empty ())
6011 break;
6012 t = fld->worklist.pop ();
6013 }
6014 }
6015
6016 /* Translate all the types in LIST with the corresponding runtime
6017 types. */
6018
6019 static tree
6020 get_eh_types_for_runtime (tree list)
6021 {
6022 tree head, prev;
6023
6024 if (list == NULL_TREE)
6025 return NULL_TREE;
6026
6027 head = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
6028 prev = head;
6029 list = TREE_CHAIN (list);
6030 while (list)
6031 {
6032 tree n = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
6033 TREE_CHAIN (prev) = n;
6034 prev = TREE_CHAIN (prev);
6035 list = TREE_CHAIN (list);
6036 }
6037
6038 return head;
6039 }
6040
6041
6042 /* Find decls and types referenced in EH region R and store them in
6043 FLD->DECLS and FLD->TYPES. */
6044
6045 static void
6046 find_decls_types_in_eh_region (eh_region r, class free_lang_data_d *fld)
6047 {
6048 switch (r->type)
6049 {
6050 case ERT_CLEANUP:
6051 break;
6052
6053 case ERT_TRY:
6054 {
6055 eh_catch c;
6056
6057 /* The types referenced in each catch must first be changed to the
6058 EH types used at runtime. This removes references to FE types
6059 in the region. */
6060 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
6061 {
6062 c->type_list = get_eh_types_for_runtime (c->type_list);
6063 walk_tree (&c->type_list, find_decls_types_r, fld, &fld->pset);
6064 }
6065 }
6066 break;
6067
6068 case ERT_ALLOWED_EXCEPTIONS:
6069 r->u.allowed.type_list
6070 = get_eh_types_for_runtime (r->u.allowed.type_list);
6071 walk_tree (&r->u.allowed.type_list, find_decls_types_r, fld, &fld->pset);
6072 break;
6073
6074 case ERT_MUST_NOT_THROW:
6075 walk_tree (&r->u.must_not_throw.failure_decl,
6076 find_decls_types_r, fld, &fld->pset);
6077 break;
6078 }
6079 }
6080
6081
6082 /* Find decls and types referenced in cgraph node N and store them in
6083 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
6084 look for *every* kind of DECL and TYPE node reachable from N,
6085 including those embedded inside types and decls (i.e,, TYPE_DECLs,
6086 NAMESPACE_DECLs, etc). */
6087
6088 static void
6089 find_decls_types_in_node (struct cgraph_node *n, class free_lang_data_d *fld)
6090 {
6091 basic_block bb;
6092 struct function *fn;
6093 unsigned ix;
6094 tree t;
6095
6096 find_decls_types (n->decl, fld);
6097
6098 if (!gimple_has_body_p (n->decl))
6099 return;
6100
6101 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
6102
6103 fn = DECL_STRUCT_FUNCTION (n->decl);
6104
6105 /* Traverse locals. */
6106 FOR_EACH_LOCAL_DECL (fn, ix, t)
6107 find_decls_types (t, fld);
6108
6109 /* Traverse EH regions in FN. */
6110 {
6111 eh_region r;
6112 FOR_ALL_EH_REGION_FN (r, fn)
6113 find_decls_types_in_eh_region (r, fld);
6114 }
6115
6116 /* Traverse every statement in FN. */
6117 FOR_EACH_BB_FN (bb, fn)
6118 {
6119 gphi_iterator psi;
6120 gimple_stmt_iterator si;
6121 unsigned i;
6122
6123 for (psi = gsi_start_phis (bb); !gsi_end_p (psi); gsi_next (&psi))
6124 {
6125 gphi *phi = psi.phi ();
6126
6127 for (i = 0; i < gimple_phi_num_args (phi); i++)
6128 {
6129 tree *arg_p = gimple_phi_arg_def_ptr (phi, i);
6130 find_decls_types (*arg_p, fld);
6131 }
6132 }
6133
6134 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6135 {
6136 gimple *stmt = gsi_stmt (si);
6137
6138 if (is_gimple_call (stmt))
6139 find_decls_types (gimple_call_fntype (stmt), fld);
6140
6141 for (i = 0; i < gimple_num_ops (stmt); i++)
6142 {
6143 tree arg = gimple_op (stmt, i);
6144 find_decls_types (arg, fld);
6145 }
6146 }
6147 }
6148 }
6149
6150
6151 /* Find decls and types referenced in varpool node N and store them in
6152 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
6153 look for *every* kind of DECL and TYPE node reachable from N,
6154 including those embedded inside types and decls (i.e,, TYPE_DECLs,
6155 NAMESPACE_DECLs, etc). */
6156
6157 static void
6158 find_decls_types_in_var (varpool_node *v, class free_lang_data_d *fld)
6159 {
6160 find_decls_types (v->decl, fld);
6161 }
6162
6163 /* If T needs an assembler name, have one created for it. */
6164
6165 void
6166 assign_assembler_name_if_needed (tree t)
6167 {
6168 if (need_assembler_name_p (t))
6169 {
6170 /* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit
6171 diagnostics that use input_location to show locus
6172 information. The problem here is that, at this point,
6173 input_location is generally anchored to the end of the file
6174 (since the parser is long gone), so we don't have a good
6175 position to pin it to.
6176
6177 To alleviate this problem, this uses the location of T's
6178 declaration. Examples of this are
6179 testsuite/g++.dg/template/cond2.C and
6180 testsuite/g++.dg/template/pr35240.C. */
6181 location_t saved_location = input_location;
6182 input_location = DECL_SOURCE_LOCATION (t);
6183
6184 decl_assembler_name (t);
6185
6186 input_location = saved_location;
6187 }
6188 }
6189
6190
6191 /* Free language specific information for every operand and expression
6192 in every node of the call graph. This process operates in three stages:
6193
6194 1- Every callgraph node and varpool node is traversed looking for
6195 decls and types embedded in them. This is a more exhaustive
6196 search than that done by find_referenced_vars, because it will
6197 also collect individual fields, decls embedded in types, etc.
6198
6199 2- All the decls found are sent to free_lang_data_in_decl.
6200
6201 3- All the types found are sent to free_lang_data_in_type.
6202
6203 The ordering between decls and types is important because
6204 free_lang_data_in_decl sets assembler names, which includes
6205 mangling. So types cannot be freed up until assembler names have
6206 been set up. */
6207
6208 static void
6209 free_lang_data_in_cgraph (class free_lang_data_d *fld)
6210 {
6211 struct cgraph_node *n;
6212 varpool_node *v;
6213 tree t;
6214 unsigned i;
6215 alias_pair *p;
6216
6217 /* Find decls and types in the body of every function in the callgraph. */
6218 FOR_EACH_FUNCTION (n)
6219 find_decls_types_in_node (n, fld);
6220
6221 FOR_EACH_VEC_SAFE_ELT (alias_pairs, i, p)
6222 find_decls_types (p->decl, fld);
6223
6224 /* Find decls and types in every varpool symbol. */
6225 FOR_EACH_VARIABLE (v)
6226 find_decls_types_in_var (v, fld);
6227
6228 /* Set the assembler name on every decl found. We need to do this
6229 now because free_lang_data_in_decl will invalidate data needed
6230 for mangling. This breaks mangling on interdependent decls. */
6231 FOR_EACH_VEC_ELT (fld->decls, i, t)
6232 assign_assembler_name_if_needed (t);
6233
6234 /* Traverse every decl found freeing its language data. */
6235 FOR_EACH_VEC_ELT (fld->decls, i, t)
6236 free_lang_data_in_decl (t, fld);
6237
6238 /* Traverse every type found freeing its language data. */
6239 FOR_EACH_VEC_ELT (fld->types, i, t)
6240 free_lang_data_in_type (t, fld);
6241 }
6242
6243
6244 /* Free resources that are used by FE but are not needed once they are done. */
6245
6246 static unsigned
6247 free_lang_data (void)
6248 {
6249 unsigned i;
6250 class free_lang_data_d fld;
6251
6252 /* If we are the LTO frontend we have freed lang-specific data already. */
6253 if (in_lto_p
6254 || (!flag_generate_lto && !flag_generate_offload))
6255 {
6256 /* Rebuild type inheritance graph even when not doing LTO to get
6257 consistent profile data. */
6258 rebuild_type_inheritance_graph ();
6259 return 0;
6260 }
6261
6262 fld_incomplete_types = new hash_map<tree, tree>;
6263 fld_simplified_types = new hash_map<tree, tree>;
6264
6265 /* Provide a dummy TRANSLATION_UNIT_DECL if the FE failed to provide one. */
6266 if (vec_safe_is_empty (all_translation_units))
6267 build_translation_unit_decl (NULL_TREE);
6268
6269 /* Allocate and assign alias sets to the standard integer types
6270 while the slots are still in the way the frontends generated them. */
6271 for (i = 0; i < itk_none; ++i)
6272 if (integer_types[i])
6273 TYPE_ALIAS_SET (integer_types[i]) = get_alias_set (integer_types[i]);
6274
6275 /* Traverse the IL resetting language specific information for
6276 operands, expressions, etc. */
6277 free_lang_data_in_cgraph (&fld);
6278
6279 /* Create gimple variants for common types. */
6280 for (unsigned i = 0;
6281 i < sizeof (builtin_structptr_types) / sizeof (builtin_structptr_type);
6282 ++i)
6283 builtin_structptr_types[i].node = builtin_structptr_types[i].base;
6284
6285 /* Reset some langhooks. Do not reset types_compatible_p, it may
6286 still be used indirectly via the get_alias_set langhook. */
6287 lang_hooks.dwarf_name = lhd_dwarf_name;
6288 lang_hooks.decl_printable_name = gimple_decl_printable_name;
6289 lang_hooks.gimplify_expr = lhd_gimplify_expr;
6290 lang_hooks.overwrite_decl_assembler_name = lhd_overwrite_decl_assembler_name;
6291 lang_hooks.print_xnode = lhd_print_tree_nothing;
6292 lang_hooks.print_decl = lhd_print_tree_nothing;
6293 lang_hooks.print_type = lhd_print_tree_nothing;
6294 lang_hooks.print_identifier = lhd_print_tree_nothing;
6295
6296 lang_hooks.tree_inlining.var_mod_type_p = hook_bool_tree_tree_false;
6297
6298 if (flag_checking)
6299 {
6300 int i;
6301 tree t;
6302
6303 FOR_EACH_VEC_ELT (fld.types, i, t)
6304 verify_type (t);
6305 }
6306
6307 /* We do not want the default decl_assembler_name implementation,
6308 rather if we have fixed everything we want a wrapper around it
6309 asserting that all non-local symbols already got their assembler
6310 name and only produce assembler names for local symbols. Or rather
6311 make sure we never call decl_assembler_name on local symbols and
6312 devise a separate, middle-end private scheme for it. */
6313
6314 /* Reset diagnostic machinery. */
6315 tree_diagnostics_defaults (global_dc);
6316
6317 rebuild_type_inheritance_graph ();
6318
6319 delete fld_incomplete_types;
6320 delete fld_simplified_types;
6321
6322 return 0;
6323 }
6324
6325
6326 namespace {
6327
6328 const pass_data pass_data_ipa_free_lang_data =
6329 {
6330 SIMPLE_IPA_PASS, /* type */
6331 "*free_lang_data", /* name */
6332 OPTGROUP_NONE, /* optinfo_flags */
6333 TV_IPA_FREE_LANG_DATA, /* tv_id */
6334 0, /* properties_required */
6335 0, /* properties_provided */
6336 0, /* properties_destroyed */
6337 0, /* todo_flags_start */
6338 0, /* todo_flags_finish */
6339 };
6340
6341 class pass_ipa_free_lang_data : public simple_ipa_opt_pass
6342 {
6343 public:
6344 pass_ipa_free_lang_data (gcc::context *ctxt)
6345 : simple_ipa_opt_pass (pass_data_ipa_free_lang_data, ctxt)
6346 {}
6347
6348 /* opt_pass methods: */
6349 virtual unsigned int execute (function *) { return free_lang_data (); }
6350
6351 }; // class pass_ipa_free_lang_data
6352
6353 } // anon namespace
6354
6355 simple_ipa_opt_pass *
6356 make_pass_ipa_free_lang_data (gcc::context *ctxt)
6357 {
6358 return new pass_ipa_free_lang_data (ctxt);
6359 }
6360 \f
6361 /* Set the type qualifiers for TYPE to TYPE_QUALS, which is a bitmask
6362 of the various TYPE_QUAL values. */
6363
6364 static void
6365 set_type_quals (tree type, int type_quals)
6366 {
6367 TYPE_READONLY (type) = (type_quals & TYPE_QUAL_CONST) != 0;
6368 TYPE_VOLATILE (type) = (type_quals & TYPE_QUAL_VOLATILE) != 0;
6369 TYPE_RESTRICT (type) = (type_quals & TYPE_QUAL_RESTRICT) != 0;
6370 TYPE_ATOMIC (type) = (type_quals & TYPE_QUAL_ATOMIC) != 0;
6371 TYPE_ADDR_SPACE (type) = DECODE_QUAL_ADDR_SPACE (type_quals);
6372 }
6373
6374 /* Returns true iff CAND and BASE have equivalent language-specific
6375 qualifiers. */
6376
6377 bool
6378 check_lang_type (const_tree cand, const_tree base)
6379 {
6380 if (lang_hooks.types.type_hash_eq == NULL)
6381 return true;
6382 /* type_hash_eq currently only applies to these types. */
6383 if (TREE_CODE (cand) != FUNCTION_TYPE
6384 && TREE_CODE (cand) != METHOD_TYPE)
6385 return true;
6386 return lang_hooks.types.type_hash_eq (cand, base);
6387 }
6388
6389 /* This function checks to see if TYPE matches the size one of the built-in
6390 atomic types, and returns that core atomic type. */
6391
6392 static tree
6393 find_atomic_core_type (const_tree type)
6394 {
6395 tree base_atomic_type;
6396
6397 /* Only handle complete types. */
6398 if (!tree_fits_uhwi_p (TYPE_SIZE (type)))
6399 return NULL_TREE;
6400
6401 switch (tree_to_uhwi (TYPE_SIZE (type)))
6402 {
6403 case 8:
6404 base_atomic_type = atomicQI_type_node;
6405 break;
6406
6407 case 16:
6408 base_atomic_type = atomicHI_type_node;
6409 break;
6410
6411 case 32:
6412 base_atomic_type = atomicSI_type_node;
6413 break;
6414
6415 case 64:
6416 base_atomic_type = atomicDI_type_node;
6417 break;
6418
6419 case 128:
6420 base_atomic_type = atomicTI_type_node;
6421 break;
6422
6423 default:
6424 base_atomic_type = NULL_TREE;
6425 }
6426
6427 return base_atomic_type;
6428 }
6429
6430 /* Returns true iff unqualified CAND and BASE are equivalent. */
6431
6432 bool
6433 check_base_type (const_tree cand, const_tree base)
6434 {
6435 if (TYPE_NAME (cand) != TYPE_NAME (base)
6436 /* Apparently this is needed for Objective-C. */
6437 || TYPE_CONTEXT (cand) != TYPE_CONTEXT (base)
6438 || !attribute_list_equal (TYPE_ATTRIBUTES (cand),
6439 TYPE_ATTRIBUTES (base)))
6440 return false;
6441 /* Check alignment. */
6442 if (TYPE_ALIGN (cand) == TYPE_ALIGN (base))
6443 return true;
6444 /* Atomic types increase minimal alignment. We must to do so as well
6445 or we get duplicated canonical types. See PR88686. */
6446 if ((TYPE_QUALS (cand) & TYPE_QUAL_ATOMIC))
6447 {
6448 /* See if this object can map to a basic atomic type. */
6449 tree atomic_type = find_atomic_core_type (cand);
6450 if (atomic_type && TYPE_ALIGN (atomic_type) == TYPE_ALIGN (cand))
6451 return true;
6452 }
6453 return false;
6454 }
6455
6456 /* Returns true iff CAND is equivalent to BASE with TYPE_QUALS. */
6457
6458 bool
6459 check_qualified_type (const_tree cand, const_tree base, int type_quals)
6460 {
6461 return (TYPE_QUALS (cand) == type_quals
6462 && check_base_type (cand, base)
6463 && check_lang_type (cand, base));
6464 }
6465
6466 /* Returns true iff CAND is equivalent to BASE with ALIGN. */
6467
6468 static bool
6469 check_aligned_type (const_tree cand, const_tree base, unsigned int align)
6470 {
6471 return (TYPE_QUALS (cand) == TYPE_QUALS (base)
6472 && TYPE_NAME (cand) == TYPE_NAME (base)
6473 /* Apparently this is needed for Objective-C. */
6474 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6475 /* Check alignment. */
6476 && TYPE_ALIGN (cand) == align
6477 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6478 TYPE_ATTRIBUTES (base))
6479 && check_lang_type (cand, base));
6480 }
6481
6482 /* Return a version of the TYPE, qualified as indicated by the
6483 TYPE_QUALS, if one exists. If no qualified version exists yet,
6484 return NULL_TREE. */
6485
6486 tree
6487 get_qualified_type (tree type, int type_quals)
6488 {
6489 if (TYPE_QUALS (type) == type_quals)
6490 return type;
6491
6492 tree mv = TYPE_MAIN_VARIANT (type);
6493 if (check_qualified_type (mv, type, type_quals))
6494 return mv;
6495
6496 /* Search the chain of variants to see if there is already one there just
6497 like the one we need to have. If so, use that existing one. We must
6498 preserve the TYPE_NAME, since there is code that depends on this. */
6499 for (tree *tp = &TYPE_NEXT_VARIANT (mv); *tp; tp = &TYPE_NEXT_VARIANT (*tp))
6500 if (check_qualified_type (*tp, type, type_quals))
6501 {
6502 /* Put the found variant at the head of the variant list so
6503 frequently searched variants get found faster. The C++ FE
6504 benefits greatly from this. */
6505 tree t = *tp;
6506 *tp = TYPE_NEXT_VARIANT (t);
6507 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (mv);
6508 TYPE_NEXT_VARIANT (mv) = t;
6509 return t;
6510 }
6511
6512 return NULL_TREE;
6513 }
6514
6515 /* Like get_qualified_type, but creates the type if it does not
6516 exist. This function never returns NULL_TREE. */
6517
6518 tree
6519 build_qualified_type (tree type, int type_quals MEM_STAT_DECL)
6520 {
6521 tree t;
6522
6523 /* See if we already have the appropriate qualified variant. */
6524 t = get_qualified_type (type, type_quals);
6525
6526 /* If not, build it. */
6527 if (!t)
6528 {
6529 t = build_variant_type_copy (type PASS_MEM_STAT);
6530 set_type_quals (t, type_quals);
6531
6532 if (((type_quals & TYPE_QUAL_ATOMIC) == TYPE_QUAL_ATOMIC))
6533 {
6534 /* See if this object can map to a basic atomic type. */
6535 tree atomic_type = find_atomic_core_type (type);
6536 if (atomic_type)
6537 {
6538 /* Ensure the alignment of this type is compatible with
6539 the required alignment of the atomic type. */
6540 if (TYPE_ALIGN (atomic_type) > TYPE_ALIGN (t))
6541 SET_TYPE_ALIGN (t, TYPE_ALIGN (atomic_type));
6542 }
6543 }
6544
6545 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6546 /* Propagate structural equality. */
6547 SET_TYPE_STRUCTURAL_EQUALITY (t);
6548 else if (TYPE_CANONICAL (type) != type)
6549 /* Build the underlying canonical type, since it is different
6550 from TYPE. */
6551 {
6552 tree c = build_qualified_type (TYPE_CANONICAL (type), type_quals);
6553 TYPE_CANONICAL (t) = TYPE_CANONICAL (c);
6554 }
6555 else
6556 /* T is its own canonical type. */
6557 TYPE_CANONICAL (t) = t;
6558
6559 }
6560
6561 return t;
6562 }
6563
6564 /* Create a variant of type T with alignment ALIGN. */
6565
6566 tree
6567 build_aligned_type (tree type, unsigned int align)
6568 {
6569 tree t;
6570
6571 if (TYPE_PACKED (type)
6572 || TYPE_ALIGN (type) == align)
6573 return type;
6574
6575 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6576 if (check_aligned_type (t, type, align))
6577 return t;
6578
6579 t = build_variant_type_copy (type);
6580 SET_TYPE_ALIGN (t, align);
6581 TYPE_USER_ALIGN (t) = 1;
6582
6583 return t;
6584 }
6585
6586 /* Create a new distinct copy of TYPE. The new type is made its own
6587 MAIN_VARIANT. If TYPE requires structural equality checks, the
6588 resulting type requires structural equality checks; otherwise, its
6589 TYPE_CANONICAL points to itself. */
6590
6591 tree
6592 build_distinct_type_copy (tree type MEM_STAT_DECL)
6593 {
6594 tree t = copy_node (type PASS_MEM_STAT);
6595
6596 TYPE_POINTER_TO (t) = 0;
6597 TYPE_REFERENCE_TO (t) = 0;
6598
6599 /* Set the canonical type either to a new equivalence class, or
6600 propagate the need for structural equality checks. */
6601 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6602 SET_TYPE_STRUCTURAL_EQUALITY (t);
6603 else
6604 TYPE_CANONICAL (t) = t;
6605
6606 /* Make it its own variant. */
6607 TYPE_MAIN_VARIANT (t) = t;
6608 TYPE_NEXT_VARIANT (t) = 0;
6609
6610 /* Note that it is now possible for TYPE_MIN_VALUE to be a value
6611 whose TREE_TYPE is not t. This can also happen in the Ada
6612 frontend when using subtypes. */
6613
6614 return t;
6615 }
6616
6617 /* Create a new variant of TYPE, equivalent but distinct. This is so
6618 the caller can modify it. TYPE_CANONICAL for the return type will
6619 be equivalent to TYPE_CANONICAL of TYPE, indicating that the types
6620 are considered equal by the language itself (or that both types
6621 require structural equality checks). */
6622
6623 tree
6624 build_variant_type_copy (tree type MEM_STAT_DECL)
6625 {
6626 tree t, m = TYPE_MAIN_VARIANT (type);
6627
6628 t = build_distinct_type_copy (type PASS_MEM_STAT);
6629
6630 /* Since we're building a variant, assume that it is a non-semantic
6631 variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */
6632 TYPE_CANONICAL (t) = TYPE_CANONICAL (type);
6633 /* Type variants have no alias set defined. */
6634 TYPE_ALIAS_SET (t) = -1;
6635
6636 /* Add the new type to the chain of variants of TYPE. */
6637 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (m);
6638 TYPE_NEXT_VARIANT (m) = t;
6639 TYPE_MAIN_VARIANT (t) = m;
6640
6641 return t;
6642 }
6643 \f
6644 /* Return true if the from tree in both tree maps are equal. */
6645
6646 int
6647 tree_map_base_eq (const void *va, const void *vb)
6648 {
6649 const struct tree_map_base *const a = (const struct tree_map_base *) va,
6650 *const b = (const struct tree_map_base *) vb;
6651 return (a->from == b->from);
6652 }
6653
6654 /* Hash a from tree in a tree_base_map. */
6655
6656 unsigned int
6657 tree_map_base_hash (const void *item)
6658 {
6659 return htab_hash_pointer (((const struct tree_map_base *)item)->from);
6660 }
6661
6662 /* Return true if this tree map structure is marked for garbage collection
6663 purposes. We simply return true if the from tree is marked, so that this
6664 structure goes away when the from tree goes away. */
6665
6666 int
6667 tree_map_base_marked_p (const void *p)
6668 {
6669 return ggc_marked_p (((const struct tree_map_base *) p)->from);
6670 }
6671
6672 /* Hash a from tree in a tree_map. */
6673
6674 unsigned int
6675 tree_map_hash (const void *item)
6676 {
6677 return (((const struct tree_map *) item)->hash);
6678 }
6679
6680 /* Hash a from tree in a tree_decl_map. */
6681
6682 unsigned int
6683 tree_decl_map_hash (const void *item)
6684 {
6685 return DECL_UID (((const struct tree_decl_map *) item)->base.from);
6686 }
6687
6688 /* Return the initialization priority for DECL. */
6689
6690 priority_type
6691 decl_init_priority_lookup (tree decl)
6692 {
6693 symtab_node *snode = symtab_node::get (decl);
6694
6695 if (!snode)
6696 return DEFAULT_INIT_PRIORITY;
6697 return
6698 snode->get_init_priority ();
6699 }
6700
6701 /* Return the finalization priority for DECL. */
6702
6703 priority_type
6704 decl_fini_priority_lookup (tree decl)
6705 {
6706 cgraph_node *node = cgraph_node::get (decl);
6707
6708 if (!node)
6709 return DEFAULT_INIT_PRIORITY;
6710 return
6711 node->get_fini_priority ();
6712 }
6713
6714 /* Set the initialization priority for DECL to PRIORITY. */
6715
6716 void
6717 decl_init_priority_insert (tree decl, priority_type priority)
6718 {
6719 struct symtab_node *snode;
6720
6721 if (priority == DEFAULT_INIT_PRIORITY)
6722 {
6723 snode = symtab_node::get (decl);
6724 if (!snode)
6725 return;
6726 }
6727 else if (VAR_P (decl))
6728 snode = varpool_node::get_create (decl);
6729 else
6730 snode = cgraph_node::get_create (decl);
6731 snode->set_init_priority (priority);
6732 }
6733
6734 /* Set the finalization priority for DECL to PRIORITY. */
6735
6736 void
6737 decl_fini_priority_insert (tree decl, priority_type priority)
6738 {
6739 struct cgraph_node *node;
6740
6741 if (priority == DEFAULT_INIT_PRIORITY)
6742 {
6743 node = cgraph_node::get (decl);
6744 if (!node)
6745 return;
6746 }
6747 else
6748 node = cgraph_node::get_create (decl);
6749 node->set_fini_priority (priority);
6750 }
6751
6752 /* Print out the statistics for the DECL_DEBUG_EXPR hash table. */
6753
6754 static void
6755 print_debug_expr_statistics (void)
6756 {
6757 fprintf (stderr, "DECL_DEBUG_EXPR hash: size %ld, %ld elements, %f collisions\n",
6758 (long) debug_expr_for_decl->size (),
6759 (long) debug_expr_for_decl->elements (),
6760 debug_expr_for_decl->collisions ());
6761 }
6762
6763 /* Print out the statistics for the DECL_VALUE_EXPR hash table. */
6764
6765 static void
6766 print_value_expr_statistics (void)
6767 {
6768 fprintf (stderr, "DECL_VALUE_EXPR hash: size %ld, %ld elements, %f collisions\n",
6769 (long) value_expr_for_decl->size (),
6770 (long) value_expr_for_decl->elements (),
6771 value_expr_for_decl->collisions ());
6772 }
6773
6774 /* Lookup a debug expression for FROM, and return it if we find one. */
6775
6776 tree
6777 decl_debug_expr_lookup (tree from)
6778 {
6779 struct tree_decl_map *h, in;
6780 in.base.from = from;
6781
6782 h = debug_expr_for_decl->find_with_hash (&in, DECL_UID (from));
6783 if (h)
6784 return h->to;
6785 return NULL_TREE;
6786 }
6787
6788 /* Insert a mapping FROM->TO in the debug expression hashtable. */
6789
6790 void
6791 decl_debug_expr_insert (tree from, tree to)
6792 {
6793 struct tree_decl_map *h;
6794
6795 h = ggc_alloc<tree_decl_map> ();
6796 h->base.from = from;
6797 h->to = to;
6798 *debug_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
6799 }
6800
6801 /* Lookup a value expression for FROM, and return it if we find one. */
6802
6803 tree
6804 decl_value_expr_lookup (tree from)
6805 {
6806 struct tree_decl_map *h, in;
6807 in.base.from = from;
6808
6809 h = value_expr_for_decl->find_with_hash (&in, DECL_UID (from));
6810 if (h)
6811 return h->to;
6812 return NULL_TREE;
6813 }
6814
6815 /* Insert a mapping FROM->TO in the value expression hashtable. */
6816
6817 void
6818 decl_value_expr_insert (tree from, tree to)
6819 {
6820 struct tree_decl_map *h;
6821
6822 h = ggc_alloc<tree_decl_map> ();
6823 h->base.from = from;
6824 h->to = to;
6825 *value_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
6826 }
6827
6828 /* Lookup a vector of debug arguments for FROM, and return it if we
6829 find one. */
6830
6831 vec<tree, va_gc> **
6832 decl_debug_args_lookup (tree from)
6833 {
6834 struct tree_vec_map *h, in;
6835
6836 if (!DECL_HAS_DEBUG_ARGS_P (from))
6837 return NULL;
6838 gcc_checking_assert (debug_args_for_decl != NULL);
6839 in.base.from = from;
6840 h = debug_args_for_decl->find_with_hash (&in, DECL_UID (from));
6841 if (h)
6842 return &h->to;
6843 return NULL;
6844 }
6845
6846 /* Insert a mapping FROM->empty vector of debug arguments in the value
6847 expression hashtable. */
6848
6849 vec<tree, va_gc> **
6850 decl_debug_args_insert (tree from)
6851 {
6852 struct tree_vec_map *h;
6853 tree_vec_map **loc;
6854
6855 if (DECL_HAS_DEBUG_ARGS_P (from))
6856 return decl_debug_args_lookup (from);
6857 if (debug_args_for_decl == NULL)
6858 debug_args_for_decl = hash_table<tree_vec_map_cache_hasher>::create_ggc (64);
6859 h = ggc_alloc<tree_vec_map> ();
6860 h->base.from = from;
6861 h->to = NULL;
6862 loc = debug_args_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT);
6863 *loc = h;
6864 DECL_HAS_DEBUG_ARGS_P (from) = 1;
6865 return &h->to;
6866 }
6867
6868 /* Hashing of types so that we don't make duplicates.
6869 The entry point is `type_hash_canon'. */
6870
6871 /* Generate the default hash code for TYPE. This is designed for
6872 speed, rather than maximum entropy. */
6873
6874 hashval_t
6875 type_hash_canon_hash (tree type)
6876 {
6877 inchash::hash hstate;
6878
6879 hstate.add_int (TREE_CODE (type));
6880
6881 if (TREE_TYPE (type))
6882 hstate.add_object (TYPE_HASH (TREE_TYPE (type)));
6883
6884 for (tree t = TYPE_ATTRIBUTES (type); t; t = TREE_CHAIN (t))
6885 /* Just the identifier is adequate to distinguish. */
6886 hstate.add_object (IDENTIFIER_HASH_VALUE (get_attribute_name (t)));
6887
6888 switch (TREE_CODE (type))
6889 {
6890 case METHOD_TYPE:
6891 hstate.add_object (TYPE_HASH (TYPE_METHOD_BASETYPE (type)));
6892 /* FALLTHROUGH. */
6893 case FUNCTION_TYPE:
6894 for (tree t = TYPE_ARG_TYPES (type); t; t = TREE_CHAIN (t))
6895 if (TREE_VALUE (t) != error_mark_node)
6896 hstate.add_object (TYPE_HASH (TREE_VALUE (t)));
6897 break;
6898
6899 case OFFSET_TYPE:
6900 hstate.add_object (TYPE_HASH (TYPE_OFFSET_BASETYPE (type)));
6901 break;
6902
6903 case ARRAY_TYPE:
6904 {
6905 if (TYPE_DOMAIN (type))
6906 hstate.add_object (TYPE_HASH (TYPE_DOMAIN (type)));
6907 if (!AGGREGATE_TYPE_P (TREE_TYPE (type)))
6908 {
6909 unsigned typeless = TYPE_TYPELESS_STORAGE (type);
6910 hstate.add_object (typeless);
6911 }
6912 }
6913 break;
6914
6915 case INTEGER_TYPE:
6916 {
6917 tree t = TYPE_MAX_VALUE (type);
6918 if (!t)
6919 t = TYPE_MIN_VALUE (type);
6920 for (int i = 0; i < TREE_INT_CST_NUNITS (t); i++)
6921 hstate.add_object (TREE_INT_CST_ELT (t, i));
6922 break;
6923 }
6924
6925 case REAL_TYPE:
6926 case FIXED_POINT_TYPE:
6927 {
6928 unsigned prec = TYPE_PRECISION (type);
6929 hstate.add_object (prec);
6930 break;
6931 }
6932
6933 case VECTOR_TYPE:
6934 hstate.add_poly_int (TYPE_VECTOR_SUBPARTS (type));
6935 break;
6936
6937 default:
6938 break;
6939 }
6940
6941 return hstate.end ();
6942 }
6943
6944 /* These are the Hashtable callback functions. */
6945
6946 /* Returns true iff the types are equivalent. */
6947
6948 bool
6949 type_cache_hasher::equal (type_hash *a, type_hash *b)
6950 {
6951 /* First test the things that are the same for all types. */
6952 if (a->hash != b->hash
6953 || TREE_CODE (a->type) != TREE_CODE (b->type)
6954 || TREE_TYPE (a->type) != TREE_TYPE (b->type)
6955 || !attribute_list_equal (TYPE_ATTRIBUTES (a->type),
6956 TYPE_ATTRIBUTES (b->type))
6957 || (TREE_CODE (a->type) != COMPLEX_TYPE
6958 && TYPE_NAME (a->type) != TYPE_NAME (b->type)))
6959 return 0;
6960
6961 /* Be careful about comparing arrays before and after the element type
6962 has been completed; don't compare TYPE_ALIGN unless both types are
6963 complete. */
6964 if (COMPLETE_TYPE_P (a->type) && COMPLETE_TYPE_P (b->type)
6965 && (TYPE_ALIGN (a->type) != TYPE_ALIGN (b->type)
6966 || TYPE_MODE (a->type) != TYPE_MODE (b->type)))
6967 return 0;
6968
6969 switch (TREE_CODE (a->type))
6970 {
6971 case VOID_TYPE:
6972 case COMPLEX_TYPE:
6973 case POINTER_TYPE:
6974 case REFERENCE_TYPE:
6975 case NULLPTR_TYPE:
6976 return 1;
6977
6978 case VECTOR_TYPE:
6979 return known_eq (TYPE_VECTOR_SUBPARTS (a->type),
6980 TYPE_VECTOR_SUBPARTS (b->type));
6981
6982 case ENUMERAL_TYPE:
6983 if (TYPE_VALUES (a->type) != TYPE_VALUES (b->type)
6984 && !(TYPE_VALUES (a->type)
6985 && TREE_CODE (TYPE_VALUES (a->type)) == TREE_LIST
6986 && TYPE_VALUES (b->type)
6987 && TREE_CODE (TYPE_VALUES (b->type)) == TREE_LIST
6988 && type_list_equal (TYPE_VALUES (a->type),
6989 TYPE_VALUES (b->type))))
6990 return 0;
6991
6992 /* fall through */
6993
6994 case INTEGER_TYPE:
6995 case REAL_TYPE:
6996 case BOOLEAN_TYPE:
6997 if (TYPE_PRECISION (a->type) != TYPE_PRECISION (b->type))
6998 return false;
6999 return ((TYPE_MAX_VALUE (a->type) == TYPE_MAX_VALUE (b->type)
7000 || tree_int_cst_equal (TYPE_MAX_VALUE (a->type),
7001 TYPE_MAX_VALUE (b->type)))
7002 && (TYPE_MIN_VALUE (a->type) == TYPE_MIN_VALUE (b->type)
7003 || tree_int_cst_equal (TYPE_MIN_VALUE (a->type),
7004 TYPE_MIN_VALUE (b->type))));
7005
7006 case FIXED_POINT_TYPE:
7007 return TYPE_SATURATING (a->type) == TYPE_SATURATING (b->type);
7008
7009 case OFFSET_TYPE:
7010 return TYPE_OFFSET_BASETYPE (a->type) == TYPE_OFFSET_BASETYPE (b->type);
7011
7012 case METHOD_TYPE:
7013 if (TYPE_METHOD_BASETYPE (a->type) == TYPE_METHOD_BASETYPE (b->type)
7014 && (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
7015 || (TYPE_ARG_TYPES (a->type)
7016 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
7017 && TYPE_ARG_TYPES (b->type)
7018 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
7019 && type_list_equal (TYPE_ARG_TYPES (a->type),
7020 TYPE_ARG_TYPES (b->type)))))
7021 break;
7022 return 0;
7023 case ARRAY_TYPE:
7024 /* Don't compare TYPE_TYPELESS_STORAGE flag on aggregates,
7025 where the flag should be inherited from the element type
7026 and can change after ARRAY_TYPEs are created; on non-aggregates
7027 compare it and hash it, scalars will never have that flag set
7028 and we need to differentiate between arrays created by different
7029 front-ends or middle-end created arrays. */
7030 return (TYPE_DOMAIN (a->type) == TYPE_DOMAIN (b->type)
7031 && (AGGREGATE_TYPE_P (TREE_TYPE (a->type))
7032 || (TYPE_TYPELESS_STORAGE (a->type)
7033 == TYPE_TYPELESS_STORAGE (b->type))));
7034
7035 case RECORD_TYPE:
7036 case UNION_TYPE:
7037 case QUAL_UNION_TYPE:
7038 return (TYPE_FIELDS (a->type) == TYPE_FIELDS (b->type)
7039 || (TYPE_FIELDS (a->type)
7040 && TREE_CODE (TYPE_FIELDS (a->type)) == TREE_LIST
7041 && TYPE_FIELDS (b->type)
7042 && TREE_CODE (TYPE_FIELDS (b->type)) == TREE_LIST
7043 && type_list_equal (TYPE_FIELDS (a->type),
7044 TYPE_FIELDS (b->type))));
7045
7046 case FUNCTION_TYPE:
7047 if (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
7048 || (TYPE_ARG_TYPES (a->type)
7049 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
7050 && TYPE_ARG_TYPES (b->type)
7051 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
7052 && type_list_equal (TYPE_ARG_TYPES (a->type),
7053 TYPE_ARG_TYPES (b->type))))
7054 break;
7055 return 0;
7056
7057 default:
7058 return 0;
7059 }
7060
7061 if (lang_hooks.types.type_hash_eq != NULL)
7062 return lang_hooks.types.type_hash_eq (a->type, b->type);
7063
7064 return 1;
7065 }
7066
7067 /* Given TYPE, and HASHCODE its hash code, return the canonical
7068 object for an identical type if one already exists.
7069 Otherwise, return TYPE, and record it as the canonical object.
7070
7071 To use this function, first create a type of the sort you want.
7072 Then compute its hash code from the fields of the type that
7073 make it different from other similar types.
7074 Then call this function and use the value. */
7075
7076 tree
7077 type_hash_canon (unsigned int hashcode, tree type)
7078 {
7079 type_hash in;
7080 type_hash **loc;
7081
7082 /* The hash table only contains main variants, so ensure that's what we're
7083 being passed. */
7084 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
7085
7086 /* The TYPE_ALIGN field of a type is set by layout_type(), so we
7087 must call that routine before comparing TYPE_ALIGNs. */
7088 layout_type (type);
7089
7090 in.hash = hashcode;
7091 in.type = type;
7092
7093 loc = type_hash_table->find_slot_with_hash (&in, hashcode, INSERT);
7094 if (*loc)
7095 {
7096 tree t1 = ((type_hash *) *loc)->type;
7097 gcc_assert (TYPE_MAIN_VARIANT (t1) == t1
7098 && t1 != type);
7099 if (TYPE_UID (type) + 1 == next_type_uid)
7100 --next_type_uid;
7101 /* Free also min/max values and the cache for integer
7102 types. This can't be done in free_node, as LTO frees
7103 those on its own. */
7104 if (TREE_CODE (type) == INTEGER_TYPE)
7105 {
7106 if (TYPE_MIN_VALUE (type)
7107 && TREE_TYPE (TYPE_MIN_VALUE (type)) == type)
7108 {
7109 /* Zero is always in TYPE_CACHED_VALUES. */
7110 if (! TYPE_UNSIGNED (type))
7111 int_cst_hash_table->remove_elt (TYPE_MIN_VALUE (type));
7112 ggc_free (TYPE_MIN_VALUE (type));
7113 }
7114 if (TYPE_MAX_VALUE (type)
7115 && TREE_TYPE (TYPE_MAX_VALUE (type)) == type)
7116 {
7117 int_cst_hash_table->remove_elt (TYPE_MAX_VALUE (type));
7118 ggc_free (TYPE_MAX_VALUE (type));
7119 }
7120 if (TYPE_CACHED_VALUES_P (type))
7121 ggc_free (TYPE_CACHED_VALUES (type));
7122 }
7123 free_node (type);
7124 return t1;
7125 }
7126 else
7127 {
7128 struct type_hash *h;
7129
7130 h = ggc_alloc<type_hash> ();
7131 h->hash = hashcode;
7132 h->type = type;
7133 *loc = h;
7134
7135 return type;
7136 }
7137 }
7138
7139 static void
7140 print_type_hash_statistics (void)
7141 {
7142 fprintf (stderr, "Type hash: size %ld, %ld elements, %f collisions\n",
7143 (long) type_hash_table->size (),
7144 (long) type_hash_table->elements (),
7145 type_hash_table->collisions ());
7146 }
7147
7148 /* Given two lists of types
7149 (chains of TREE_LIST nodes with types in the TREE_VALUE slots)
7150 return 1 if the lists contain the same types in the same order.
7151 Also, the TREE_PURPOSEs must match. */
7152
7153 bool
7154 type_list_equal (const_tree l1, const_tree l2)
7155 {
7156 const_tree t1, t2;
7157
7158 for (t1 = l1, t2 = l2; t1 && t2; t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
7159 if (TREE_VALUE (t1) != TREE_VALUE (t2)
7160 || (TREE_PURPOSE (t1) != TREE_PURPOSE (t2)
7161 && ! (1 == simple_cst_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2))
7162 && (TREE_TYPE (TREE_PURPOSE (t1))
7163 == TREE_TYPE (TREE_PURPOSE (t2))))))
7164 return false;
7165
7166 return t1 == t2;
7167 }
7168
7169 /* Returns the number of arguments to the FUNCTION_TYPE or METHOD_TYPE
7170 given by TYPE. If the argument list accepts variable arguments,
7171 then this function counts only the ordinary arguments. */
7172
7173 int
7174 type_num_arguments (const_tree fntype)
7175 {
7176 int i = 0;
7177
7178 for (tree t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
7179 /* If the function does not take a variable number of arguments,
7180 the last element in the list will have type `void'. */
7181 if (VOID_TYPE_P (TREE_VALUE (t)))
7182 break;
7183 else
7184 ++i;
7185
7186 return i;
7187 }
7188
7189 /* Return the type of the function TYPE's argument ARGNO if known.
7190 For vararg function's where ARGNO refers to one of the variadic
7191 arguments return null. Otherwise, return a void_type_node for
7192 out-of-bounds ARGNO. */
7193
7194 tree
7195 type_argument_type (const_tree fntype, unsigned argno)
7196 {
7197 /* Treat zero the same as an out-of-bounds argument number. */
7198 if (!argno)
7199 return void_type_node;
7200
7201 function_args_iterator iter;
7202
7203 tree argtype;
7204 unsigned i = 1;
7205 FOREACH_FUNCTION_ARGS (fntype, argtype, iter)
7206 {
7207 /* A vararg function's argument list ends in a null. Otherwise,
7208 an ordinary function's argument list ends with void. Return
7209 null if ARGNO refers to a vararg argument, void_type_node if
7210 it's out of bounds, and the formal argument type otherwise. */
7211 if (!argtype)
7212 break;
7213
7214 if (i == argno || VOID_TYPE_P (argtype))
7215 return argtype;
7216
7217 ++i;
7218 }
7219
7220 return NULL_TREE;
7221 }
7222
7223 /* Nonzero if integer constants T1 and T2
7224 represent the same constant value. */
7225
7226 int
7227 tree_int_cst_equal (const_tree t1, const_tree t2)
7228 {
7229 if (t1 == t2)
7230 return 1;
7231
7232 if (t1 == 0 || t2 == 0)
7233 return 0;
7234
7235 STRIP_ANY_LOCATION_WRAPPER (t1);
7236 STRIP_ANY_LOCATION_WRAPPER (t2);
7237
7238 if (TREE_CODE (t1) == INTEGER_CST
7239 && TREE_CODE (t2) == INTEGER_CST
7240 && wi::to_widest (t1) == wi::to_widest (t2))
7241 return 1;
7242
7243 return 0;
7244 }
7245
7246 /* Return true if T is an INTEGER_CST whose numerical value (extended
7247 according to TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. */
7248
7249 bool
7250 tree_fits_shwi_p (const_tree t)
7251 {
7252 return (t != NULL_TREE
7253 && TREE_CODE (t) == INTEGER_CST
7254 && wi::fits_shwi_p (wi::to_widest (t)));
7255 }
7256
7257 /* Return true if T is an INTEGER_CST or POLY_INT_CST whose numerical
7258 value (extended according to TYPE_UNSIGNED) fits in a poly_int64. */
7259
7260 bool
7261 tree_fits_poly_int64_p (const_tree t)
7262 {
7263 if (t == NULL_TREE)
7264 return false;
7265 if (POLY_INT_CST_P (t))
7266 {
7267 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; i++)
7268 if (!wi::fits_shwi_p (wi::to_wide (POLY_INT_CST_COEFF (t, i))))
7269 return false;
7270 return true;
7271 }
7272 return (TREE_CODE (t) == INTEGER_CST
7273 && wi::fits_shwi_p (wi::to_widest (t)));
7274 }
7275
7276 /* Return true if T is an INTEGER_CST whose numerical value (extended
7277 according to TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. */
7278
7279 bool
7280 tree_fits_uhwi_p (const_tree t)
7281 {
7282 return (t != NULL_TREE
7283 && TREE_CODE (t) == INTEGER_CST
7284 && wi::fits_uhwi_p (wi::to_widest (t)));
7285 }
7286
7287 /* Return true if T is an INTEGER_CST or POLY_INT_CST whose numerical
7288 value (extended according to TYPE_UNSIGNED) fits in a poly_uint64. */
7289
7290 bool
7291 tree_fits_poly_uint64_p (const_tree t)
7292 {
7293 if (t == NULL_TREE)
7294 return false;
7295 if (POLY_INT_CST_P (t))
7296 {
7297 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; i++)
7298 if (!wi::fits_uhwi_p (wi::to_widest (POLY_INT_CST_COEFF (t, i))))
7299 return false;
7300 return true;
7301 }
7302 return (TREE_CODE (t) == INTEGER_CST
7303 && wi::fits_uhwi_p (wi::to_widest (t)));
7304 }
7305
7306 /* T is an INTEGER_CST whose numerical value (extended according to
7307 TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. Return that
7308 HOST_WIDE_INT. */
7309
7310 HOST_WIDE_INT
7311 tree_to_shwi (const_tree t)
7312 {
7313 gcc_assert (tree_fits_shwi_p (t));
7314 return TREE_INT_CST_LOW (t);
7315 }
7316
7317 /* T is an INTEGER_CST whose numerical value (extended according to
7318 TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. Return that
7319 HOST_WIDE_INT. */
7320
7321 unsigned HOST_WIDE_INT
7322 tree_to_uhwi (const_tree t)
7323 {
7324 gcc_assert (tree_fits_uhwi_p (t));
7325 return TREE_INT_CST_LOW (t);
7326 }
7327
7328 /* Return the most significant (sign) bit of T. */
7329
7330 int
7331 tree_int_cst_sign_bit (const_tree t)
7332 {
7333 unsigned bitno = TYPE_PRECISION (TREE_TYPE (t)) - 1;
7334
7335 return wi::extract_uhwi (wi::to_wide (t), bitno, 1);
7336 }
7337
7338 /* Return an indication of the sign of the integer constant T.
7339 The return value is -1 if T < 0, 0 if T == 0, and 1 if T > 0.
7340 Note that -1 will never be returned if T's type is unsigned. */
7341
7342 int
7343 tree_int_cst_sgn (const_tree t)
7344 {
7345 if (wi::to_wide (t) == 0)
7346 return 0;
7347 else if (TYPE_UNSIGNED (TREE_TYPE (t)))
7348 return 1;
7349 else if (wi::neg_p (wi::to_wide (t)))
7350 return -1;
7351 else
7352 return 1;
7353 }
7354
7355 /* Return the minimum number of bits needed to represent VALUE in a
7356 signed or unsigned type, UNSIGNEDP says which. */
7357
7358 unsigned int
7359 tree_int_cst_min_precision (tree value, signop sgn)
7360 {
7361 /* If the value is negative, compute its negative minus 1. The latter
7362 adjustment is because the absolute value of the largest negative value
7363 is one larger than the largest positive value. This is equivalent to
7364 a bit-wise negation, so use that operation instead. */
7365
7366 if (tree_int_cst_sgn (value) < 0)
7367 value = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (value), value);
7368
7369 /* Return the number of bits needed, taking into account the fact
7370 that we need one more bit for a signed than unsigned type.
7371 If value is 0 or -1, the minimum precision is 1 no matter
7372 whether unsignedp is true or false. */
7373
7374 if (integer_zerop (value))
7375 return 1;
7376 else
7377 return tree_floor_log2 (value) + 1 + (sgn == SIGNED ? 1 : 0) ;
7378 }
7379
7380 /* Return truthvalue of whether T1 is the same tree structure as T2.
7381 Return 1 if they are the same.
7382 Return 0 if they are understandably different.
7383 Return -1 if either contains tree structure not understood by
7384 this function. */
7385
7386 int
7387 simple_cst_equal (const_tree t1, const_tree t2)
7388 {
7389 enum tree_code code1, code2;
7390 int cmp;
7391 int i;
7392
7393 if (t1 == t2)
7394 return 1;
7395 if (t1 == 0 || t2 == 0)
7396 return 0;
7397
7398 /* For location wrappers to be the same, they must be at the same
7399 source location (and wrap the same thing). */
7400 if (location_wrapper_p (t1) && location_wrapper_p (t2))
7401 {
7402 if (EXPR_LOCATION (t1) != EXPR_LOCATION (t2))
7403 return 0;
7404 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7405 }
7406
7407 code1 = TREE_CODE (t1);
7408 code2 = TREE_CODE (t2);
7409
7410 if (CONVERT_EXPR_CODE_P (code1) || code1 == NON_LVALUE_EXPR)
7411 {
7412 if (CONVERT_EXPR_CODE_P (code2)
7413 || code2 == NON_LVALUE_EXPR)
7414 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7415 else
7416 return simple_cst_equal (TREE_OPERAND (t1, 0), t2);
7417 }
7418
7419 else if (CONVERT_EXPR_CODE_P (code2)
7420 || code2 == NON_LVALUE_EXPR)
7421 return simple_cst_equal (t1, TREE_OPERAND (t2, 0));
7422
7423 if (code1 != code2)
7424 return 0;
7425
7426 switch (code1)
7427 {
7428 case INTEGER_CST:
7429 return wi::to_widest (t1) == wi::to_widest (t2);
7430
7431 case REAL_CST:
7432 return real_identical (&TREE_REAL_CST (t1), &TREE_REAL_CST (t2));
7433
7434 case FIXED_CST:
7435 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2));
7436
7437 case STRING_CST:
7438 return (TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
7439 && ! memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
7440 TREE_STRING_LENGTH (t1)));
7441
7442 case CONSTRUCTOR:
7443 {
7444 unsigned HOST_WIDE_INT idx;
7445 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (t1);
7446 vec<constructor_elt, va_gc> *v2 = CONSTRUCTOR_ELTS (t2);
7447
7448 if (vec_safe_length (v1) != vec_safe_length (v2))
7449 return false;
7450
7451 for (idx = 0; idx < vec_safe_length (v1); ++idx)
7452 /* ??? Should we handle also fields here? */
7453 if (!simple_cst_equal ((*v1)[idx].value, (*v2)[idx].value))
7454 return false;
7455 return true;
7456 }
7457
7458 case SAVE_EXPR:
7459 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7460
7461 case CALL_EXPR:
7462 cmp = simple_cst_equal (CALL_EXPR_FN (t1), CALL_EXPR_FN (t2));
7463 if (cmp <= 0)
7464 return cmp;
7465 if (call_expr_nargs (t1) != call_expr_nargs (t2))
7466 return 0;
7467 {
7468 const_tree arg1, arg2;
7469 const_call_expr_arg_iterator iter1, iter2;
7470 for (arg1 = first_const_call_expr_arg (t1, &iter1),
7471 arg2 = first_const_call_expr_arg (t2, &iter2);
7472 arg1 && arg2;
7473 arg1 = next_const_call_expr_arg (&iter1),
7474 arg2 = next_const_call_expr_arg (&iter2))
7475 {
7476 cmp = simple_cst_equal (arg1, arg2);
7477 if (cmp <= 0)
7478 return cmp;
7479 }
7480 return arg1 == arg2;
7481 }
7482
7483 case TARGET_EXPR:
7484 /* Special case: if either target is an unallocated VAR_DECL,
7485 it means that it's going to be unified with whatever the
7486 TARGET_EXPR is really supposed to initialize, so treat it
7487 as being equivalent to anything. */
7488 if ((TREE_CODE (TREE_OPERAND (t1, 0)) == VAR_DECL
7489 && DECL_NAME (TREE_OPERAND (t1, 0)) == NULL_TREE
7490 && !DECL_RTL_SET_P (TREE_OPERAND (t1, 0)))
7491 || (TREE_CODE (TREE_OPERAND (t2, 0)) == VAR_DECL
7492 && DECL_NAME (TREE_OPERAND (t2, 0)) == NULL_TREE
7493 && !DECL_RTL_SET_P (TREE_OPERAND (t2, 0))))
7494 cmp = 1;
7495 else
7496 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7497
7498 if (cmp <= 0)
7499 return cmp;
7500
7501 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
7502
7503 case WITH_CLEANUP_EXPR:
7504 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7505 if (cmp <= 0)
7506 return cmp;
7507
7508 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t1, 1));
7509
7510 case COMPONENT_REF:
7511 if (TREE_OPERAND (t1, 1) == TREE_OPERAND (t2, 1))
7512 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7513
7514 return 0;
7515
7516 case VAR_DECL:
7517 case PARM_DECL:
7518 case CONST_DECL:
7519 case FUNCTION_DECL:
7520 return 0;
7521
7522 default:
7523 if (POLY_INT_CST_P (t1))
7524 /* A false return means maybe_ne rather than known_ne. */
7525 return known_eq (poly_widest_int::from (poly_int_cst_value (t1),
7526 TYPE_SIGN (TREE_TYPE (t1))),
7527 poly_widest_int::from (poly_int_cst_value (t2),
7528 TYPE_SIGN (TREE_TYPE (t2))));
7529 break;
7530 }
7531
7532 /* This general rule works for most tree codes. All exceptions should be
7533 handled above. If this is a language-specific tree code, we can't
7534 trust what might be in the operand, so say we don't know
7535 the situation. */
7536 if ((int) code1 >= (int) LAST_AND_UNUSED_TREE_CODE)
7537 return -1;
7538
7539 switch (TREE_CODE_CLASS (code1))
7540 {
7541 case tcc_unary:
7542 case tcc_binary:
7543 case tcc_comparison:
7544 case tcc_expression:
7545 case tcc_reference:
7546 case tcc_statement:
7547 cmp = 1;
7548 for (i = 0; i < TREE_CODE_LENGTH (code1); i++)
7549 {
7550 cmp = simple_cst_equal (TREE_OPERAND (t1, i), TREE_OPERAND (t2, i));
7551 if (cmp <= 0)
7552 return cmp;
7553 }
7554
7555 return cmp;
7556
7557 default:
7558 return -1;
7559 }
7560 }
7561
7562 /* Compare the value of T, an INTEGER_CST, with U, an unsigned integer value.
7563 Return -1, 0, or 1 if the value of T is less than, equal to, or greater
7564 than U, respectively. */
7565
7566 int
7567 compare_tree_int (const_tree t, unsigned HOST_WIDE_INT u)
7568 {
7569 if (tree_int_cst_sgn (t) < 0)
7570 return -1;
7571 else if (!tree_fits_uhwi_p (t))
7572 return 1;
7573 else if (TREE_INT_CST_LOW (t) == u)
7574 return 0;
7575 else if (TREE_INT_CST_LOW (t) < u)
7576 return -1;
7577 else
7578 return 1;
7579 }
7580
7581 /* Return true if SIZE represents a constant size that is in bounds of
7582 what the middle-end and the backend accepts (covering not more than
7583 half of the address-space).
7584 When PERR is non-null, set *PERR on failure to the description of
7585 why SIZE is not valid. */
7586
7587 bool
7588 valid_constant_size_p (const_tree size, cst_size_error *perr /* = NULL */)
7589 {
7590 if (POLY_INT_CST_P (size))
7591 {
7592 if (TREE_OVERFLOW (size))
7593 return false;
7594 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
7595 if (!valid_constant_size_p (POLY_INT_CST_COEFF (size, i)))
7596 return false;
7597 return true;
7598 }
7599
7600 cst_size_error error;
7601 if (!perr)
7602 perr = &error;
7603
7604 if (TREE_CODE (size) != INTEGER_CST)
7605 {
7606 *perr = cst_size_not_constant;
7607 return false;
7608 }
7609
7610 if (TREE_OVERFLOW_P (size))
7611 {
7612 *perr = cst_size_overflow;
7613 return false;
7614 }
7615
7616 if (tree_int_cst_sgn (size) < 0)
7617 {
7618 *perr = cst_size_negative;
7619 return false;
7620 }
7621 if (!tree_fits_uhwi_p (size)
7622 || (wi::to_widest (TYPE_MAX_VALUE (sizetype))
7623 < wi::to_widest (size) * 2))
7624 {
7625 *perr = cst_size_too_big;
7626 return false;
7627 }
7628
7629 return true;
7630 }
7631
7632 /* Return the precision of the type, or for a complex or vector type the
7633 precision of the type of its elements. */
7634
7635 unsigned int
7636 element_precision (const_tree type)
7637 {
7638 if (!TYPE_P (type))
7639 type = TREE_TYPE (type);
7640 enum tree_code code = TREE_CODE (type);
7641 if (code == COMPLEX_TYPE || code == VECTOR_TYPE)
7642 type = TREE_TYPE (type);
7643
7644 return TYPE_PRECISION (type);
7645 }
7646
7647 /* Return true if CODE represents an associative tree code. Otherwise
7648 return false. */
7649 bool
7650 associative_tree_code (enum tree_code code)
7651 {
7652 switch (code)
7653 {
7654 case BIT_IOR_EXPR:
7655 case BIT_AND_EXPR:
7656 case BIT_XOR_EXPR:
7657 case PLUS_EXPR:
7658 case MULT_EXPR:
7659 case MIN_EXPR:
7660 case MAX_EXPR:
7661 return true;
7662
7663 default:
7664 break;
7665 }
7666 return false;
7667 }
7668
7669 /* Return true if CODE represents a commutative tree code. Otherwise
7670 return false. */
7671 bool
7672 commutative_tree_code (enum tree_code code)
7673 {
7674 switch (code)
7675 {
7676 case PLUS_EXPR:
7677 case MULT_EXPR:
7678 case MULT_HIGHPART_EXPR:
7679 case MIN_EXPR:
7680 case MAX_EXPR:
7681 case BIT_IOR_EXPR:
7682 case BIT_XOR_EXPR:
7683 case BIT_AND_EXPR:
7684 case NE_EXPR:
7685 case EQ_EXPR:
7686 case UNORDERED_EXPR:
7687 case ORDERED_EXPR:
7688 case UNEQ_EXPR:
7689 case LTGT_EXPR:
7690 case TRUTH_AND_EXPR:
7691 case TRUTH_XOR_EXPR:
7692 case TRUTH_OR_EXPR:
7693 case WIDEN_MULT_EXPR:
7694 case VEC_WIDEN_MULT_HI_EXPR:
7695 case VEC_WIDEN_MULT_LO_EXPR:
7696 case VEC_WIDEN_MULT_EVEN_EXPR:
7697 case VEC_WIDEN_MULT_ODD_EXPR:
7698 return true;
7699
7700 default:
7701 break;
7702 }
7703 return false;
7704 }
7705
7706 /* Return true if CODE represents a ternary tree code for which the
7707 first two operands are commutative. Otherwise return false. */
7708 bool
7709 commutative_ternary_tree_code (enum tree_code code)
7710 {
7711 switch (code)
7712 {
7713 case WIDEN_MULT_PLUS_EXPR:
7714 case WIDEN_MULT_MINUS_EXPR:
7715 case DOT_PROD_EXPR:
7716 return true;
7717
7718 default:
7719 break;
7720 }
7721 return false;
7722 }
7723
7724 /* Returns true if CODE can overflow. */
7725
7726 bool
7727 operation_can_overflow (enum tree_code code)
7728 {
7729 switch (code)
7730 {
7731 case PLUS_EXPR:
7732 case MINUS_EXPR:
7733 case MULT_EXPR:
7734 case LSHIFT_EXPR:
7735 /* Can overflow in various ways. */
7736 return true;
7737 case TRUNC_DIV_EXPR:
7738 case EXACT_DIV_EXPR:
7739 case FLOOR_DIV_EXPR:
7740 case CEIL_DIV_EXPR:
7741 /* For INT_MIN / -1. */
7742 return true;
7743 case NEGATE_EXPR:
7744 case ABS_EXPR:
7745 /* For -INT_MIN. */
7746 return true;
7747 default:
7748 /* These operators cannot overflow. */
7749 return false;
7750 }
7751 }
7752
7753 /* Returns true if CODE operating on operands of type TYPE doesn't overflow, or
7754 ftrapv doesn't generate trapping insns for CODE. */
7755
7756 bool
7757 operation_no_trapping_overflow (tree type, enum tree_code code)
7758 {
7759 gcc_checking_assert (ANY_INTEGRAL_TYPE_P (type));
7760
7761 /* We don't generate instructions that trap on overflow for complex or vector
7762 types. */
7763 if (!INTEGRAL_TYPE_P (type))
7764 return true;
7765
7766 if (!TYPE_OVERFLOW_TRAPS (type))
7767 return true;
7768
7769 switch (code)
7770 {
7771 case PLUS_EXPR:
7772 case MINUS_EXPR:
7773 case MULT_EXPR:
7774 case NEGATE_EXPR:
7775 case ABS_EXPR:
7776 /* These operators can overflow, and -ftrapv generates trapping code for
7777 these. */
7778 return false;
7779 case TRUNC_DIV_EXPR:
7780 case EXACT_DIV_EXPR:
7781 case FLOOR_DIV_EXPR:
7782 case CEIL_DIV_EXPR:
7783 case LSHIFT_EXPR:
7784 /* These operators can overflow, but -ftrapv does not generate trapping
7785 code for these. */
7786 return true;
7787 default:
7788 /* These operators cannot overflow. */
7789 return true;
7790 }
7791 }
7792
7793 namespace inchash
7794 {
7795
7796 /* Generate a hash value for an expression. This can be used iteratively
7797 by passing a previous result as the HSTATE argument.
7798
7799 This function is intended to produce the same hash for expressions which
7800 would compare equal using operand_equal_p. */
7801 void
7802 add_expr (const_tree t, inchash::hash &hstate, unsigned int flags)
7803 {
7804 int i;
7805 enum tree_code code;
7806 enum tree_code_class tclass;
7807
7808 if (t == NULL_TREE || t == error_mark_node)
7809 {
7810 hstate.merge_hash (0);
7811 return;
7812 }
7813
7814 STRIP_ANY_LOCATION_WRAPPER (t);
7815
7816 if (!(flags & OEP_ADDRESS_OF))
7817 STRIP_NOPS (t);
7818
7819 code = TREE_CODE (t);
7820
7821 switch (code)
7822 {
7823 /* Alas, constants aren't shared, so we can't rely on pointer
7824 identity. */
7825 case VOID_CST:
7826 hstate.merge_hash (0);
7827 return;
7828 case INTEGER_CST:
7829 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
7830 for (i = 0; i < TREE_INT_CST_EXT_NUNITS (t); i++)
7831 hstate.add_hwi (TREE_INT_CST_ELT (t, i));
7832 return;
7833 case REAL_CST:
7834 {
7835 unsigned int val2;
7836 if (!HONOR_SIGNED_ZEROS (t) && real_zerop (t))
7837 val2 = rvc_zero;
7838 else
7839 val2 = real_hash (TREE_REAL_CST_PTR (t));
7840 hstate.merge_hash (val2);
7841 return;
7842 }
7843 case FIXED_CST:
7844 {
7845 unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t));
7846 hstate.merge_hash (val2);
7847 return;
7848 }
7849 case STRING_CST:
7850 hstate.add ((const void *) TREE_STRING_POINTER (t),
7851 TREE_STRING_LENGTH (t));
7852 return;
7853 case COMPLEX_CST:
7854 inchash::add_expr (TREE_REALPART (t), hstate, flags);
7855 inchash::add_expr (TREE_IMAGPART (t), hstate, flags);
7856 return;
7857 case VECTOR_CST:
7858 {
7859 hstate.add_int (VECTOR_CST_NPATTERNS (t));
7860 hstate.add_int (VECTOR_CST_NELTS_PER_PATTERN (t));
7861 unsigned int count = vector_cst_encoded_nelts (t);
7862 for (unsigned int i = 0; i < count; ++i)
7863 inchash::add_expr (VECTOR_CST_ENCODED_ELT (t, i), hstate, flags);
7864 return;
7865 }
7866 case SSA_NAME:
7867 /* We can just compare by pointer. */
7868 hstate.add_hwi (SSA_NAME_VERSION (t));
7869 return;
7870 case PLACEHOLDER_EXPR:
7871 /* The node itself doesn't matter. */
7872 return;
7873 case BLOCK:
7874 case OMP_CLAUSE:
7875 /* Ignore. */
7876 return;
7877 case TREE_LIST:
7878 /* A list of expressions, for a CALL_EXPR or as the elements of a
7879 VECTOR_CST. */
7880 for (; t; t = TREE_CHAIN (t))
7881 inchash::add_expr (TREE_VALUE (t), hstate, flags);
7882 return;
7883 case CONSTRUCTOR:
7884 {
7885 unsigned HOST_WIDE_INT idx;
7886 tree field, value;
7887 flags &= ~OEP_ADDRESS_OF;
7888 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value)
7889 {
7890 inchash::add_expr (field, hstate, flags);
7891 inchash::add_expr (value, hstate, flags);
7892 }
7893 return;
7894 }
7895 case STATEMENT_LIST:
7896 {
7897 tree_stmt_iterator i;
7898 for (i = tsi_start (CONST_CAST_TREE (t));
7899 !tsi_end_p (i); tsi_next (&i))
7900 inchash::add_expr (tsi_stmt (i), hstate, flags);
7901 return;
7902 }
7903 case TREE_VEC:
7904 for (i = 0; i < TREE_VEC_LENGTH (t); ++i)
7905 inchash::add_expr (TREE_VEC_ELT (t, i), hstate, flags);
7906 return;
7907 case IDENTIFIER_NODE:
7908 hstate.add_object (IDENTIFIER_HASH_VALUE (t));
7909 return;
7910 case FUNCTION_DECL:
7911 /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
7912 Otherwise nodes that compare equal according to operand_equal_p might
7913 get different hash codes. However, don't do this for machine specific
7914 or front end builtins, since the function code is overloaded in those
7915 cases. */
7916 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
7917 && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t)))
7918 {
7919 t = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
7920 code = TREE_CODE (t);
7921 }
7922 /* FALL THROUGH */
7923 default:
7924 if (POLY_INT_CST_P (t))
7925 {
7926 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
7927 hstate.add_wide_int (wi::to_wide (POLY_INT_CST_COEFF (t, i)));
7928 return;
7929 }
7930 tclass = TREE_CODE_CLASS (code);
7931
7932 if (tclass == tcc_declaration)
7933 {
7934 /* DECL's have a unique ID */
7935 hstate.add_hwi (DECL_UID (t));
7936 }
7937 else if (tclass == tcc_comparison && !commutative_tree_code (code))
7938 {
7939 /* For comparisons that can be swapped, use the lower
7940 tree code. */
7941 enum tree_code ccode = swap_tree_comparison (code);
7942 if (code < ccode)
7943 ccode = code;
7944 hstate.add_object (ccode);
7945 inchash::add_expr (TREE_OPERAND (t, ccode != code), hstate, flags);
7946 inchash::add_expr (TREE_OPERAND (t, ccode == code), hstate, flags);
7947 }
7948 else if (CONVERT_EXPR_CODE_P (code))
7949 {
7950 /* NOP_EXPR and CONVERT_EXPR are considered equal by
7951 operand_equal_p. */
7952 enum tree_code ccode = NOP_EXPR;
7953 hstate.add_object (ccode);
7954
7955 /* Don't hash the type, that can lead to having nodes which
7956 compare equal according to operand_equal_p, but which
7957 have different hash codes. Make sure to include signedness
7958 in the hash computation. */
7959 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
7960 inchash::add_expr (TREE_OPERAND (t, 0), hstate, flags);
7961 }
7962 /* For OEP_ADDRESS_OF, hash MEM_EXPR[&decl, 0] the same as decl. */
7963 else if (code == MEM_REF
7964 && (flags & OEP_ADDRESS_OF) != 0
7965 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
7966 && DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0))
7967 && integer_zerop (TREE_OPERAND (t, 1)))
7968 inchash::add_expr (TREE_OPERAND (TREE_OPERAND (t, 0), 0),
7969 hstate, flags);
7970 /* Don't ICE on FE specific trees, or their arguments etc.
7971 during operand_equal_p hash verification. */
7972 else if (!IS_EXPR_CODE_CLASS (tclass))
7973 gcc_assert (flags & OEP_HASH_CHECK);
7974 else
7975 {
7976 unsigned int sflags = flags;
7977
7978 hstate.add_object (code);
7979
7980 switch (code)
7981 {
7982 case ADDR_EXPR:
7983 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
7984 flags |= OEP_ADDRESS_OF;
7985 sflags = flags;
7986 break;
7987
7988 case INDIRECT_REF:
7989 case MEM_REF:
7990 case TARGET_MEM_REF:
7991 flags &= ~OEP_ADDRESS_OF;
7992 sflags = flags;
7993 break;
7994
7995 case ARRAY_REF:
7996 case ARRAY_RANGE_REF:
7997 case COMPONENT_REF:
7998 case BIT_FIELD_REF:
7999 sflags &= ~OEP_ADDRESS_OF;
8000 break;
8001
8002 case COND_EXPR:
8003 flags &= ~OEP_ADDRESS_OF;
8004 break;
8005
8006 case WIDEN_MULT_PLUS_EXPR:
8007 case WIDEN_MULT_MINUS_EXPR:
8008 {
8009 /* The multiplication operands are commutative. */
8010 inchash::hash one, two;
8011 inchash::add_expr (TREE_OPERAND (t, 0), one, flags);
8012 inchash::add_expr (TREE_OPERAND (t, 1), two, flags);
8013 hstate.add_commutative (one, two);
8014 inchash::add_expr (TREE_OPERAND (t, 2), two, flags);
8015 return;
8016 }
8017
8018 case CALL_EXPR:
8019 if (CALL_EXPR_FN (t) == NULL_TREE)
8020 hstate.add_int (CALL_EXPR_IFN (t));
8021 break;
8022
8023 case TARGET_EXPR:
8024 /* For TARGET_EXPR, just hash on the TARGET_EXPR_SLOT.
8025 Usually different TARGET_EXPRs just should use
8026 different temporaries in their slots. */
8027 inchash::add_expr (TARGET_EXPR_SLOT (t), hstate, flags);
8028 return;
8029
8030 default:
8031 break;
8032 }
8033
8034 /* Don't hash the type, that can lead to having nodes which
8035 compare equal according to operand_equal_p, but which
8036 have different hash codes. */
8037 if (code == NON_LVALUE_EXPR)
8038 {
8039 /* Make sure to include signness in the hash computation. */
8040 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
8041 inchash::add_expr (TREE_OPERAND (t, 0), hstate, flags);
8042 }
8043
8044 else if (commutative_tree_code (code))
8045 {
8046 /* It's a commutative expression. We want to hash it the same
8047 however it appears. We do this by first hashing both operands
8048 and then rehashing based on the order of their independent
8049 hashes. */
8050 inchash::hash one, two;
8051 inchash::add_expr (TREE_OPERAND (t, 0), one, flags);
8052 inchash::add_expr (TREE_OPERAND (t, 1), two, flags);
8053 hstate.add_commutative (one, two);
8054 }
8055 else
8056 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
8057 inchash::add_expr (TREE_OPERAND (t, i), hstate,
8058 i == 0 ? flags : sflags);
8059 }
8060 return;
8061 }
8062 }
8063
8064 }
8065
8066 /* Constructors for pointer, array and function types.
8067 (RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are
8068 constructed by language-dependent code, not here.) */
8069
8070 /* Construct, lay out and return the type of pointers to TO_TYPE with
8071 mode MODE. If CAN_ALIAS_ALL is TRUE, indicate this type can
8072 reference all of memory. If such a type has already been
8073 constructed, reuse it. */
8074
8075 tree
8076 build_pointer_type_for_mode (tree to_type, machine_mode mode,
8077 bool can_alias_all)
8078 {
8079 tree t;
8080 bool could_alias = can_alias_all;
8081
8082 if (to_type == error_mark_node)
8083 return error_mark_node;
8084
8085 /* If the pointed-to type has the may_alias attribute set, force
8086 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
8087 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
8088 can_alias_all = true;
8089
8090 /* In some cases, languages will have things that aren't a POINTER_TYPE
8091 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_POINTER_TO.
8092 In that case, return that type without regard to the rest of our
8093 operands.
8094
8095 ??? This is a kludge, but consistent with the way this function has
8096 always operated and there doesn't seem to be a good way to avoid this
8097 at the moment. */
8098 if (TYPE_POINTER_TO (to_type) != 0
8099 && TREE_CODE (TYPE_POINTER_TO (to_type)) != POINTER_TYPE)
8100 return TYPE_POINTER_TO (to_type);
8101
8102 /* First, if we already have a type for pointers to TO_TYPE and it's
8103 the proper mode, use it. */
8104 for (t = TYPE_POINTER_TO (to_type); t; t = TYPE_NEXT_PTR_TO (t))
8105 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
8106 return t;
8107
8108 t = make_node (POINTER_TYPE);
8109
8110 TREE_TYPE (t) = to_type;
8111 SET_TYPE_MODE (t, mode);
8112 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
8113 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (to_type);
8114 TYPE_POINTER_TO (to_type) = t;
8115
8116 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */
8117 if (TYPE_STRUCTURAL_EQUALITY_P (to_type) || in_lto_p)
8118 SET_TYPE_STRUCTURAL_EQUALITY (t);
8119 else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
8120 TYPE_CANONICAL (t)
8121 = build_pointer_type_for_mode (TYPE_CANONICAL (to_type),
8122 mode, false);
8123
8124 /* Lay out the type. This function has many callers that are concerned
8125 with expression-construction, and this simplifies them all. */
8126 layout_type (t);
8127
8128 return t;
8129 }
8130
8131 /* By default build pointers in ptr_mode. */
8132
8133 tree
8134 build_pointer_type (tree to_type)
8135 {
8136 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
8137 : TYPE_ADDR_SPACE (to_type);
8138 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
8139 return build_pointer_type_for_mode (to_type, pointer_mode, false);
8140 }
8141
8142 /* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE. */
8143
8144 tree
8145 build_reference_type_for_mode (tree to_type, machine_mode mode,
8146 bool can_alias_all)
8147 {
8148 tree t;
8149 bool could_alias = can_alias_all;
8150
8151 if (to_type == error_mark_node)
8152 return error_mark_node;
8153
8154 /* If the pointed-to type has the may_alias attribute set, force
8155 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
8156 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
8157 can_alias_all = true;
8158
8159 /* In some cases, languages will have things that aren't a REFERENCE_TYPE
8160 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_REFERENCE_TO.
8161 In that case, return that type without regard to the rest of our
8162 operands.
8163
8164 ??? This is a kludge, but consistent with the way this function has
8165 always operated and there doesn't seem to be a good way to avoid this
8166 at the moment. */
8167 if (TYPE_REFERENCE_TO (to_type) != 0
8168 && TREE_CODE (TYPE_REFERENCE_TO (to_type)) != REFERENCE_TYPE)
8169 return TYPE_REFERENCE_TO (to_type);
8170
8171 /* First, if we already have a type for pointers to TO_TYPE and it's
8172 the proper mode, use it. */
8173 for (t = TYPE_REFERENCE_TO (to_type); t; t = TYPE_NEXT_REF_TO (t))
8174 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
8175 return t;
8176
8177 t = make_node (REFERENCE_TYPE);
8178
8179 TREE_TYPE (t) = to_type;
8180 SET_TYPE_MODE (t, mode);
8181 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
8182 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (to_type);
8183 TYPE_REFERENCE_TO (to_type) = t;
8184
8185 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */
8186 if (TYPE_STRUCTURAL_EQUALITY_P (to_type) || in_lto_p)
8187 SET_TYPE_STRUCTURAL_EQUALITY (t);
8188 else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
8189 TYPE_CANONICAL (t)
8190 = build_reference_type_for_mode (TYPE_CANONICAL (to_type),
8191 mode, false);
8192
8193 layout_type (t);
8194
8195 return t;
8196 }
8197
8198
8199 /* Build the node for the type of references-to-TO_TYPE by default
8200 in ptr_mode. */
8201
8202 tree
8203 build_reference_type (tree to_type)
8204 {
8205 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
8206 : TYPE_ADDR_SPACE (to_type);
8207 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
8208 return build_reference_type_for_mode (to_type, pointer_mode, false);
8209 }
8210
8211 #define MAX_INT_CACHED_PREC \
8212 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
8213 static GTY(()) tree nonstandard_integer_type_cache[2 * MAX_INT_CACHED_PREC + 2];
8214
8215 /* Builds a signed or unsigned integer type of precision PRECISION.
8216 Used for C bitfields whose precision does not match that of
8217 built-in target types. */
8218 tree
8219 build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision,
8220 int unsignedp)
8221 {
8222 tree itype, ret;
8223
8224 if (unsignedp)
8225 unsignedp = MAX_INT_CACHED_PREC + 1;
8226
8227 if (precision <= MAX_INT_CACHED_PREC)
8228 {
8229 itype = nonstandard_integer_type_cache[precision + unsignedp];
8230 if (itype)
8231 return itype;
8232 }
8233
8234 itype = make_node (INTEGER_TYPE);
8235 TYPE_PRECISION (itype) = precision;
8236
8237 if (unsignedp)
8238 fixup_unsigned_type (itype);
8239 else
8240 fixup_signed_type (itype);
8241
8242 inchash::hash hstate;
8243 inchash::add_expr (TYPE_MAX_VALUE (itype), hstate);
8244 ret = type_hash_canon (hstate.end (), itype);
8245 if (precision <= MAX_INT_CACHED_PREC)
8246 nonstandard_integer_type_cache[precision + unsignedp] = ret;
8247
8248 return ret;
8249 }
8250
8251 #define MAX_BOOL_CACHED_PREC \
8252 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
8253 static GTY(()) tree nonstandard_boolean_type_cache[MAX_BOOL_CACHED_PREC + 1];
8254
8255 /* Builds a boolean type of precision PRECISION.
8256 Used for boolean vectors to choose proper vector element size. */
8257 tree
8258 build_nonstandard_boolean_type (unsigned HOST_WIDE_INT precision)
8259 {
8260 tree type;
8261
8262 if (precision <= MAX_BOOL_CACHED_PREC)
8263 {
8264 type = nonstandard_boolean_type_cache[precision];
8265 if (type)
8266 return type;
8267 }
8268
8269 type = make_node (BOOLEAN_TYPE);
8270 TYPE_PRECISION (type) = precision;
8271 fixup_signed_type (type);
8272
8273 if (precision <= MAX_INT_CACHED_PREC)
8274 nonstandard_boolean_type_cache[precision] = type;
8275
8276 return type;
8277 }
8278
8279 /* Create a range of some discrete type TYPE (an INTEGER_TYPE, ENUMERAL_TYPE
8280 or BOOLEAN_TYPE) with low bound LOWVAL and high bound HIGHVAL. If SHARED
8281 is true, reuse such a type that has already been constructed. */
8282
8283 static tree
8284 build_range_type_1 (tree type, tree lowval, tree highval, bool shared)
8285 {
8286 tree itype = make_node (INTEGER_TYPE);
8287
8288 TREE_TYPE (itype) = type;
8289
8290 TYPE_MIN_VALUE (itype) = fold_convert (type, lowval);
8291 TYPE_MAX_VALUE (itype) = highval ? fold_convert (type, highval) : NULL;
8292
8293 TYPE_PRECISION (itype) = TYPE_PRECISION (type);
8294 SET_TYPE_MODE (itype, TYPE_MODE (type));
8295 TYPE_SIZE (itype) = TYPE_SIZE (type);
8296 TYPE_SIZE_UNIT (itype) = TYPE_SIZE_UNIT (type);
8297 SET_TYPE_ALIGN (itype, TYPE_ALIGN (type));
8298 TYPE_USER_ALIGN (itype) = TYPE_USER_ALIGN (type);
8299 SET_TYPE_WARN_IF_NOT_ALIGN (itype, TYPE_WARN_IF_NOT_ALIGN (type));
8300
8301 if (!shared)
8302 return itype;
8303
8304 if ((TYPE_MIN_VALUE (itype)
8305 && TREE_CODE (TYPE_MIN_VALUE (itype)) != INTEGER_CST)
8306 || (TYPE_MAX_VALUE (itype)
8307 && TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST))
8308 {
8309 /* Since we cannot reliably merge this type, we need to compare it using
8310 structural equality checks. */
8311 SET_TYPE_STRUCTURAL_EQUALITY (itype);
8312 return itype;
8313 }
8314
8315 hashval_t hash = type_hash_canon_hash (itype);
8316 itype = type_hash_canon (hash, itype);
8317
8318 return itype;
8319 }
8320
8321 /* Wrapper around build_range_type_1 with SHARED set to true. */
8322
8323 tree
8324 build_range_type (tree type, tree lowval, tree highval)
8325 {
8326 return build_range_type_1 (type, lowval, highval, true);
8327 }
8328
8329 /* Wrapper around build_range_type_1 with SHARED set to false. */
8330
8331 tree
8332 build_nonshared_range_type (tree type, tree lowval, tree highval)
8333 {
8334 return build_range_type_1 (type, lowval, highval, false);
8335 }
8336
8337 /* Create a type of integers to be the TYPE_DOMAIN of an ARRAY_TYPE.
8338 MAXVAL should be the maximum value in the domain
8339 (one less than the length of the array).
8340
8341 The maximum value that MAXVAL can have is INT_MAX for a HOST_WIDE_INT.
8342 We don't enforce this limit, that is up to caller (e.g. language front end).
8343 The limit exists because the result is a signed type and we don't handle
8344 sizes that use more than one HOST_WIDE_INT. */
8345
8346 tree
8347 build_index_type (tree maxval)
8348 {
8349 return build_range_type (sizetype, size_zero_node, maxval);
8350 }
8351
8352 /* Return true if the debug information for TYPE, a subtype, should be emitted
8353 as a subrange type. If so, set LOWVAL to the low bound and HIGHVAL to the
8354 high bound, respectively. Sometimes doing so unnecessarily obfuscates the
8355 debug info and doesn't reflect the source code. */
8356
8357 bool
8358 subrange_type_for_debug_p (const_tree type, tree *lowval, tree *highval)
8359 {
8360 tree base_type = TREE_TYPE (type), low, high;
8361
8362 /* Subrange types have a base type which is an integral type. */
8363 if (!INTEGRAL_TYPE_P (base_type))
8364 return false;
8365
8366 /* Get the real bounds of the subtype. */
8367 if (lang_hooks.types.get_subrange_bounds)
8368 lang_hooks.types.get_subrange_bounds (type, &low, &high);
8369 else
8370 {
8371 low = TYPE_MIN_VALUE (type);
8372 high = TYPE_MAX_VALUE (type);
8373 }
8374
8375 /* If the type and its base type have the same representation and the same
8376 name, then the type is not a subrange but a copy of the base type. */
8377 if ((TREE_CODE (base_type) == INTEGER_TYPE
8378 || TREE_CODE (base_type) == BOOLEAN_TYPE)
8379 && int_size_in_bytes (type) == int_size_in_bytes (base_type)
8380 && tree_int_cst_equal (low, TYPE_MIN_VALUE (base_type))
8381 && tree_int_cst_equal (high, TYPE_MAX_VALUE (base_type))
8382 && TYPE_IDENTIFIER (type) == TYPE_IDENTIFIER (base_type))
8383 return false;
8384
8385 if (lowval)
8386 *lowval = low;
8387 if (highval)
8388 *highval = high;
8389 return true;
8390 }
8391
8392 /* Construct, lay out and return the type of arrays of elements with ELT_TYPE
8393 and number of elements specified by the range of values of INDEX_TYPE.
8394 If TYPELESS_STORAGE is true, TYPE_TYPELESS_STORAGE flag is set on the type.
8395 If SHARED is true, reuse such a type that has already been constructed. */
8396
8397 static tree
8398 build_array_type_1 (tree elt_type, tree index_type, bool typeless_storage,
8399 bool shared)
8400 {
8401 tree t;
8402
8403 if (TREE_CODE (elt_type) == FUNCTION_TYPE)
8404 {
8405 error ("arrays of functions are not meaningful");
8406 elt_type = integer_type_node;
8407 }
8408
8409 t = make_node (ARRAY_TYPE);
8410 TREE_TYPE (t) = elt_type;
8411 TYPE_DOMAIN (t) = index_type;
8412 TYPE_ADDR_SPACE (t) = TYPE_ADDR_SPACE (elt_type);
8413 TYPE_TYPELESS_STORAGE (t) = typeless_storage;
8414 layout_type (t);
8415
8416 /* If the element type is incomplete at this point we get marked for
8417 structural equality. Do not record these types in the canonical
8418 type hashtable. */
8419 if (TYPE_STRUCTURAL_EQUALITY_P (t))
8420 return t;
8421
8422 if (shared)
8423 {
8424 hashval_t hash = type_hash_canon_hash (t);
8425 t = type_hash_canon (hash, t);
8426 }
8427
8428 if (TYPE_CANONICAL (t) == t)
8429 {
8430 if (TYPE_STRUCTURAL_EQUALITY_P (elt_type)
8431 || (index_type && TYPE_STRUCTURAL_EQUALITY_P (index_type))
8432 || in_lto_p)
8433 SET_TYPE_STRUCTURAL_EQUALITY (t);
8434 else if (TYPE_CANONICAL (elt_type) != elt_type
8435 || (index_type && TYPE_CANONICAL (index_type) != index_type))
8436 TYPE_CANONICAL (t)
8437 = build_array_type_1 (TYPE_CANONICAL (elt_type),
8438 index_type
8439 ? TYPE_CANONICAL (index_type) : NULL_TREE,
8440 typeless_storage, shared);
8441 }
8442
8443 return t;
8444 }
8445
8446 /* Wrapper around build_array_type_1 with SHARED set to true. */
8447
8448 tree
8449 build_array_type (tree elt_type, tree index_type, bool typeless_storage)
8450 {
8451 return build_array_type_1 (elt_type, index_type, typeless_storage, true);
8452 }
8453
8454 /* Wrapper around build_array_type_1 with SHARED set to false. */
8455
8456 tree
8457 build_nonshared_array_type (tree elt_type, tree index_type)
8458 {
8459 return build_array_type_1 (elt_type, index_type, false, false);
8460 }
8461
8462 /* Return a representation of ELT_TYPE[NELTS], using indices of type
8463 sizetype. */
8464
8465 tree
8466 build_array_type_nelts (tree elt_type, poly_uint64 nelts)
8467 {
8468 return build_array_type (elt_type, build_index_type (size_int (nelts - 1)));
8469 }
8470
8471 /* Recursively examines the array elements of TYPE, until a non-array
8472 element type is found. */
8473
8474 tree
8475 strip_array_types (tree type)
8476 {
8477 while (TREE_CODE (type) == ARRAY_TYPE)
8478 type = TREE_TYPE (type);
8479
8480 return type;
8481 }
8482
8483 /* Computes the canonical argument types from the argument type list
8484 ARGTYPES.
8485
8486 Upon return, *ANY_STRUCTURAL_P will be true iff either it was true
8487 on entry to this function, or if any of the ARGTYPES are
8488 structural.
8489
8490 Upon return, *ANY_NONCANONICAL_P will be true iff either it was
8491 true on entry to this function, or if any of the ARGTYPES are
8492 non-canonical.
8493
8494 Returns a canonical argument list, which may be ARGTYPES when the
8495 canonical argument list is unneeded (i.e., *ANY_STRUCTURAL_P is
8496 true) or would not differ from ARGTYPES. */
8497
8498 static tree
8499 maybe_canonicalize_argtypes (tree argtypes,
8500 bool *any_structural_p,
8501 bool *any_noncanonical_p)
8502 {
8503 tree arg;
8504 bool any_noncanonical_argtypes_p = false;
8505
8506 for (arg = argtypes; arg && !(*any_structural_p); arg = TREE_CHAIN (arg))
8507 {
8508 if (!TREE_VALUE (arg) || TREE_VALUE (arg) == error_mark_node)
8509 /* Fail gracefully by stating that the type is structural. */
8510 *any_structural_p = true;
8511 else if (TYPE_STRUCTURAL_EQUALITY_P (TREE_VALUE (arg)))
8512 *any_structural_p = true;
8513 else if (TYPE_CANONICAL (TREE_VALUE (arg)) != TREE_VALUE (arg)
8514 || TREE_PURPOSE (arg))
8515 /* If the argument has a default argument, we consider it
8516 non-canonical even though the type itself is canonical.
8517 That way, different variants of function and method types
8518 with default arguments will all point to the variant with
8519 no defaults as their canonical type. */
8520 any_noncanonical_argtypes_p = true;
8521 }
8522
8523 if (*any_structural_p)
8524 return argtypes;
8525
8526 if (any_noncanonical_argtypes_p)
8527 {
8528 /* Build the canonical list of argument types. */
8529 tree canon_argtypes = NULL_TREE;
8530 bool is_void = false;
8531
8532 for (arg = argtypes; arg; arg = TREE_CHAIN (arg))
8533 {
8534 if (arg == void_list_node)
8535 is_void = true;
8536 else
8537 canon_argtypes = tree_cons (NULL_TREE,
8538 TYPE_CANONICAL (TREE_VALUE (arg)),
8539 canon_argtypes);
8540 }
8541
8542 canon_argtypes = nreverse (canon_argtypes);
8543 if (is_void)
8544 canon_argtypes = chainon (canon_argtypes, void_list_node);
8545
8546 /* There is a non-canonical type. */
8547 *any_noncanonical_p = true;
8548 return canon_argtypes;
8549 }
8550
8551 /* The canonical argument types are the same as ARGTYPES. */
8552 return argtypes;
8553 }
8554
8555 /* Construct, lay out and return
8556 the type of functions returning type VALUE_TYPE
8557 given arguments of types ARG_TYPES.
8558 ARG_TYPES is a chain of TREE_LIST nodes whose TREE_VALUEs
8559 are data type nodes for the arguments of the function.
8560 If such a type has already been constructed, reuse it. */
8561
8562 tree
8563 build_function_type (tree value_type, tree arg_types)
8564 {
8565 tree t;
8566 inchash::hash hstate;
8567 bool any_structural_p, any_noncanonical_p;
8568 tree canon_argtypes;
8569
8570 gcc_assert (arg_types != error_mark_node);
8571
8572 if (TREE_CODE (value_type) == FUNCTION_TYPE)
8573 {
8574 error ("function return type cannot be function");
8575 value_type = integer_type_node;
8576 }
8577
8578 /* Make a node of the sort we want. */
8579 t = make_node (FUNCTION_TYPE);
8580 TREE_TYPE (t) = value_type;
8581 TYPE_ARG_TYPES (t) = arg_types;
8582
8583 /* If we already have such a type, use the old one. */
8584 hashval_t hash = type_hash_canon_hash (t);
8585 t = type_hash_canon (hash, t);
8586
8587 /* Set up the canonical type. */
8588 any_structural_p = TYPE_STRUCTURAL_EQUALITY_P (value_type);
8589 any_noncanonical_p = TYPE_CANONICAL (value_type) != value_type;
8590 canon_argtypes = maybe_canonicalize_argtypes (arg_types,
8591 &any_structural_p,
8592 &any_noncanonical_p);
8593 if (any_structural_p)
8594 SET_TYPE_STRUCTURAL_EQUALITY (t);
8595 else if (any_noncanonical_p)
8596 TYPE_CANONICAL (t) = build_function_type (TYPE_CANONICAL (value_type),
8597 canon_argtypes);
8598
8599 if (!COMPLETE_TYPE_P (t))
8600 layout_type (t);
8601 return t;
8602 }
8603
8604 /* Build a function type. The RETURN_TYPE is the type returned by the
8605 function. If VAARGS is set, no void_type_node is appended to the
8606 list. ARGP must be always be terminated be a NULL_TREE. */
8607
8608 static tree
8609 build_function_type_list_1 (bool vaargs, tree return_type, va_list argp)
8610 {
8611 tree t, args, last;
8612
8613 t = va_arg (argp, tree);
8614 for (args = NULL_TREE; t != NULL_TREE; t = va_arg (argp, tree))
8615 args = tree_cons (NULL_TREE, t, args);
8616
8617 if (vaargs)
8618 {
8619 last = args;
8620 if (args != NULL_TREE)
8621 args = nreverse (args);
8622 gcc_assert (last != void_list_node);
8623 }
8624 else if (args == NULL_TREE)
8625 args = void_list_node;
8626 else
8627 {
8628 last = args;
8629 args = nreverse (args);
8630 TREE_CHAIN (last) = void_list_node;
8631 }
8632 args = build_function_type (return_type, args);
8633
8634 return args;
8635 }
8636
8637 /* Build a function type. The RETURN_TYPE is the type returned by the
8638 function. If additional arguments are provided, they are
8639 additional argument types. The list of argument types must always
8640 be terminated by NULL_TREE. */
8641
8642 tree
8643 build_function_type_list (tree return_type, ...)
8644 {
8645 tree args;
8646 va_list p;
8647
8648 va_start (p, return_type);
8649 args = build_function_type_list_1 (false, return_type, p);
8650 va_end (p);
8651 return args;
8652 }
8653
8654 /* Build a variable argument function type. The RETURN_TYPE is the
8655 type returned by the function. If additional arguments are provided,
8656 they are additional argument types. The list of argument types must
8657 always be terminated by NULL_TREE. */
8658
8659 tree
8660 build_varargs_function_type_list (tree return_type, ...)
8661 {
8662 tree args;
8663 va_list p;
8664
8665 va_start (p, return_type);
8666 args = build_function_type_list_1 (true, return_type, p);
8667 va_end (p);
8668
8669 return args;
8670 }
8671
8672 /* Build a function type. RETURN_TYPE is the type returned by the
8673 function; VAARGS indicates whether the function takes varargs. The
8674 function takes N named arguments, the types of which are provided in
8675 ARG_TYPES. */
8676
8677 static tree
8678 build_function_type_array_1 (bool vaargs, tree return_type, int n,
8679 tree *arg_types)
8680 {
8681 int i;
8682 tree t = vaargs ? NULL_TREE : void_list_node;
8683
8684 for (i = n - 1; i >= 0; i--)
8685 t = tree_cons (NULL_TREE, arg_types[i], t);
8686
8687 return build_function_type (return_type, t);
8688 }
8689
8690 /* Build a function type. RETURN_TYPE is the type returned by the
8691 function. The function takes N named arguments, the types of which
8692 are provided in ARG_TYPES. */
8693
8694 tree
8695 build_function_type_array (tree return_type, int n, tree *arg_types)
8696 {
8697 return build_function_type_array_1 (false, return_type, n, arg_types);
8698 }
8699
8700 /* Build a variable argument function type. RETURN_TYPE is the type
8701 returned by the function. The function takes N named arguments, the
8702 types of which are provided in ARG_TYPES. */
8703
8704 tree
8705 build_varargs_function_type_array (tree return_type, int n, tree *arg_types)
8706 {
8707 return build_function_type_array_1 (true, return_type, n, arg_types);
8708 }
8709
8710 /* Build a METHOD_TYPE for a member of BASETYPE. The RETTYPE (a TYPE)
8711 and ARGTYPES (a TREE_LIST) are the return type and arguments types
8712 for the method. An implicit additional parameter (of type
8713 pointer-to-BASETYPE) is added to the ARGTYPES. */
8714
8715 tree
8716 build_method_type_directly (tree basetype,
8717 tree rettype,
8718 tree argtypes)
8719 {
8720 tree t;
8721 tree ptype;
8722 bool any_structural_p, any_noncanonical_p;
8723 tree canon_argtypes;
8724
8725 /* Make a node of the sort we want. */
8726 t = make_node (METHOD_TYPE);
8727
8728 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8729 TREE_TYPE (t) = rettype;
8730 ptype = build_pointer_type (basetype);
8731
8732 /* The actual arglist for this function includes a "hidden" argument
8733 which is "this". Put it into the list of argument types. */
8734 argtypes = tree_cons (NULL_TREE, ptype, argtypes);
8735 TYPE_ARG_TYPES (t) = argtypes;
8736
8737 /* If we already have such a type, use the old one. */
8738 hashval_t hash = type_hash_canon_hash (t);
8739 t = type_hash_canon (hash, t);
8740
8741 /* Set up the canonical type. */
8742 any_structural_p
8743 = (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8744 || TYPE_STRUCTURAL_EQUALITY_P (rettype));
8745 any_noncanonical_p
8746 = (TYPE_CANONICAL (basetype) != basetype
8747 || TYPE_CANONICAL (rettype) != rettype);
8748 canon_argtypes = maybe_canonicalize_argtypes (TREE_CHAIN (argtypes),
8749 &any_structural_p,
8750 &any_noncanonical_p);
8751 if (any_structural_p)
8752 SET_TYPE_STRUCTURAL_EQUALITY (t);
8753 else if (any_noncanonical_p)
8754 TYPE_CANONICAL (t)
8755 = build_method_type_directly (TYPE_CANONICAL (basetype),
8756 TYPE_CANONICAL (rettype),
8757 canon_argtypes);
8758 if (!COMPLETE_TYPE_P (t))
8759 layout_type (t);
8760
8761 return t;
8762 }
8763
8764 /* Construct, lay out and return the type of methods belonging to class
8765 BASETYPE and whose arguments and values are described by TYPE.
8766 If that type exists already, reuse it.
8767 TYPE must be a FUNCTION_TYPE node. */
8768
8769 tree
8770 build_method_type (tree basetype, tree type)
8771 {
8772 gcc_assert (TREE_CODE (type) == FUNCTION_TYPE);
8773
8774 return build_method_type_directly (basetype,
8775 TREE_TYPE (type),
8776 TYPE_ARG_TYPES (type));
8777 }
8778
8779 /* Construct, lay out and return the type of offsets to a value
8780 of type TYPE, within an object of type BASETYPE.
8781 If a suitable offset type exists already, reuse it. */
8782
8783 tree
8784 build_offset_type (tree basetype, tree type)
8785 {
8786 tree t;
8787
8788 /* Make a node of the sort we want. */
8789 t = make_node (OFFSET_TYPE);
8790
8791 TYPE_OFFSET_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8792 TREE_TYPE (t) = type;
8793
8794 /* If we already have such a type, use the old one. */
8795 hashval_t hash = type_hash_canon_hash (t);
8796 t = type_hash_canon (hash, t);
8797
8798 if (!COMPLETE_TYPE_P (t))
8799 layout_type (t);
8800
8801 if (TYPE_CANONICAL (t) == t)
8802 {
8803 if (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8804 || TYPE_STRUCTURAL_EQUALITY_P (type))
8805 SET_TYPE_STRUCTURAL_EQUALITY (t);
8806 else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)) != basetype
8807 || TYPE_CANONICAL (type) != type)
8808 TYPE_CANONICAL (t)
8809 = build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)),
8810 TYPE_CANONICAL (type));
8811 }
8812
8813 return t;
8814 }
8815
8816 /* Create a complex type whose components are COMPONENT_TYPE.
8817
8818 If NAMED is true, the type is given a TYPE_NAME. We do not always
8819 do so because this creates a DECL node and thus make the DECL_UIDs
8820 dependent on the type canonicalization hashtable, which is GC-ed,
8821 so the DECL_UIDs would not be stable wrt garbage collection. */
8822
8823 tree
8824 build_complex_type (tree component_type, bool named)
8825 {
8826 gcc_assert (INTEGRAL_TYPE_P (component_type)
8827 || SCALAR_FLOAT_TYPE_P (component_type)
8828 || FIXED_POINT_TYPE_P (component_type));
8829
8830 /* Make a node of the sort we want. */
8831 tree probe = make_node (COMPLEX_TYPE);
8832
8833 TREE_TYPE (probe) = TYPE_MAIN_VARIANT (component_type);
8834
8835 /* If we already have such a type, use the old one. */
8836 hashval_t hash = type_hash_canon_hash (probe);
8837 tree t = type_hash_canon (hash, probe);
8838
8839 if (t == probe)
8840 {
8841 /* We created a new type. The hash insertion will have laid
8842 out the type. We need to check the canonicalization and
8843 maybe set the name. */
8844 gcc_checking_assert (COMPLETE_TYPE_P (t)
8845 && !TYPE_NAME (t)
8846 && TYPE_CANONICAL (t) == t);
8847
8848 if (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (t)))
8849 SET_TYPE_STRUCTURAL_EQUALITY (t);
8850 else if (TYPE_CANONICAL (TREE_TYPE (t)) != TREE_TYPE (t))
8851 TYPE_CANONICAL (t)
8852 = build_complex_type (TYPE_CANONICAL (TREE_TYPE (t)), named);
8853
8854 /* We need to create a name, since complex is a fundamental type. */
8855 if (named)
8856 {
8857 const char *name = NULL;
8858
8859 if (TREE_TYPE (t) == char_type_node)
8860 name = "complex char";
8861 else if (TREE_TYPE (t) == signed_char_type_node)
8862 name = "complex signed char";
8863 else if (TREE_TYPE (t) == unsigned_char_type_node)
8864 name = "complex unsigned char";
8865 else if (TREE_TYPE (t) == short_integer_type_node)
8866 name = "complex short int";
8867 else if (TREE_TYPE (t) == short_unsigned_type_node)
8868 name = "complex short unsigned int";
8869 else if (TREE_TYPE (t) == integer_type_node)
8870 name = "complex int";
8871 else if (TREE_TYPE (t) == unsigned_type_node)
8872 name = "complex unsigned int";
8873 else if (TREE_TYPE (t) == long_integer_type_node)
8874 name = "complex long int";
8875 else if (TREE_TYPE (t) == long_unsigned_type_node)
8876 name = "complex long unsigned int";
8877 else if (TREE_TYPE (t) == long_long_integer_type_node)
8878 name = "complex long long int";
8879 else if (TREE_TYPE (t) == long_long_unsigned_type_node)
8880 name = "complex long long unsigned int";
8881
8882 if (name != NULL)
8883 TYPE_NAME (t) = build_decl (UNKNOWN_LOCATION, TYPE_DECL,
8884 get_identifier (name), t);
8885 }
8886 }
8887
8888 return build_qualified_type (t, TYPE_QUALS (component_type));
8889 }
8890
8891 /* If TYPE is a real or complex floating-point type and the target
8892 does not directly support arithmetic on TYPE then return the wider
8893 type to be used for arithmetic on TYPE. Otherwise, return
8894 NULL_TREE. */
8895
8896 tree
8897 excess_precision_type (tree type)
8898 {
8899 /* The target can give two different responses to the question of
8900 which excess precision mode it would like depending on whether we
8901 are in -fexcess-precision=standard or -fexcess-precision=fast. */
8902
8903 enum excess_precision_type requested_type
8904 = (flag_excess_precision == EXCESS_PRECISION_FAST
8905 ? EXCESS_PRECISION_TYPE_FAST
8906 : EXCESS_PRECISION_TYPE_STANDARD);
8907
8908 enum flt_eval_method target_flt_eval_method
8909 = targetm.c.excess_precision (requested_type);
8910
8911 /* The target should not ask for unpredictable float evaluation (though
8912 it might advertise that implicitly the evaluation is unpredictable,
8913 but we don't care about that here, it will have been reported
8914 elsewhere). If it does ask for unpredictable evaluation, we have
8915 nothing to do here. */
8916 gcc_assert (target_flt_eval_method != FLT_EVAL_METHOD_UNPREDICTABLE);
8917
8918 /* Nothing to do. The target has asked for all types we know about
8919 to be computed with their native precision and range. */
8920 if (target_flt_eval_method == FLT_EVAL_METHOD_PROMOTE_TO_FLOAT16)
8921 return NULL_TREE;
8922
8923 /* The target will promote this type in a target-dependent way, so excess
8924 precision ought to leave it alone. */
8925 if (targetm.promoted_type (type) != NULL_TREE)
8926 return NULL_TREE;
8927
8928 machine_mode float16_type_mode = (float16_type_node
8929 ? TYPE_MODE (float16_type_node)
8930 : VOIDmode);
8931 machine_mode float_type_mode = TYPE_MODE (float_type_node);
8932 machine_mode double_type_mode = TYPE_MODE (double_type_node);
8933
8934 switch (TREE_CODE (type))
8935 {
8936 case REAL_TYPE:
8937 {
8938 machine_mode type_mode = TYPE_MODE (type);
8939 switch (target_flt_eval_method)
8940 {
8941 case FLT_EVAL_METHOD_PROMOTE_TO_FLOAT:
8942 if (type_mode == float16_type_mode)
8943 return float_type_node;
8944 break;
8945 case FLT_EVAL_METHOD_PROMOTE_TO_DOUBLE:
8946 if (type_mode == float16_type_mode
8947 || type_mode == float_type_mode)
8948 return double_type_node;
8949 break;
8950 case FLT_EVAL_METHOD_PROMOTE_TO_LONG_DOUBLE:
8951 if (type_mode == float16_type_mode
8952 || type_mode == float_type_mode
8953 || type_mode == double_type_mode)
8954 return long_double_type_node;
8955 break;
8956 default:
8957 gcc_unreachable ();
8958 }
8959 break;
8960 }
8961 case COMPLEX_TYPE:
8962 {
8963 if (TREE_CODE (TREE_TYPE (type)) != REAL_TYPE)
8964 return NULL_TREE;
8965 machine_mode type_mode = TYPE_MODE (TREE_TYPE (type));
8966 switch (target_flt_eval_method)
8967 {
8968 case FLT_EVAL_METHOD_PROMOTE_TO_FLOAT:
8969 if (type_mode == float16_type_mode)
8970 return complex_float_type_node;
8971 break;
8972 case FLT_EVAL_METHOD_PROMOTE_TO_DOUBLE:
8973 if (type_mode == float16_type_mode
8974 || type_mode == float_type_mode)
8975 return complex_double_type_node;
8976 break;
8977 case FLT_EVAL_METHOD_PROMOTE_TO_LONG_DOUBLE:
8978 if (type_mode == float16_type_mode
8979 || type_mode == float_type_mode
8980 || type_mode == double_type_mode)
8981 return complex_long_double_type_node;
8982 break;
8983 default:
8984 gcc_unreachable ();
8985 }
8986 break;
8987 }
8988 default:
8989 break;
8990 }
8991
8992 return NULL_TREE;
8993 }
8994 \f
8995 /* Return OP, stripped of any conversions to wider types as much as is safe.
8996 Converting the value back to OP's type makes a value equivalent to OP.
8997
8998 If FOR_TYPE is nonzero, we return a value which, if converted to
8999 type FOR_TYPE, would be equivalent to converting OP to type FOR_TYPE.
9000
9001 OP must have integer, real or enumeral type. Pointers are not allowed!
9002
9003 There are some cases where the obvious value we could return
9004 would regenerate to OP if converted to OP's type,
9005 but would not extend like OP to wider types.
9006 If FOR_TYPE indicates such extension is contemplated, we eschew such values.
9007 For example, if OP is (unsigned short)(signed char)-1,
9008 we avoid returning (signed char)-1 if FOR_TYPE is int,
9009 even though extending that to an unsigned short would regenerate OP,
9010 since the result of extending (signed char)-1 to (int)
9011 is different from (int) OP. */
9012
9013 tree
9014 get_unwidened (tree op, tree for_type)
9015 {
9016 /* Set UNS initially if converting OP to FOR_TYPE is a zero-extension. */
9017 tree type = TREE_TYPE (op);
9018 unsigned final_prec
9019 = TYPE_PRECISION (for_type != 0 ? for_type : type);
9020 int uns
9021 = (for_type != 0 && for_type != type
9022 && final_prec > TYPE_PRECISION (type)
9023 && TYPE_UNSIGNED (type));
9024 tree win = op;
9025
9026 while (CONVERT_EXPR_P (op))
9027 {
9028 int bitschange;
9029
9030 /* TYPE_PRECISION on vector types has different meaning
9031 (TYPE_VECTOR_SUBPARTS) and casts from vectors are view conversions,
9032 so avoid them here. */
9033 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op, 0))) == VECTOR_TYPE)
9034 break;
9035
9036 bitschange = TYPE_PRECISION (TREE_TYPE (op))
9037 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0)));
9038
9039 /* Truncations are many-one so cannot be removed.
9040 Unless we are later going to truncate down even farther. */
9041 if (bitschange < 0
9042 && final_prec > TYPE_PRECISION (TREE_TYPE (op)))
9043 break;
9044
9045 /* See what's inside this conversion. If we decide to strip it,
9046 we will set WIN. */
9047 op = TREE_OPERAND (op, 0);
9048
9049 /* If we have not stripped any zero-extensions (uns is 0),
9050 we can strip any kind of extension.
9051 If we have previously stripped a zero-extension,
9052 only zero-extensions can safely be stripped.
9053 Any extension can be stripped if the bits it would produce
9054 are all going to be discarded later by truncating to FOR_TYPE. */
9055
9056 if (bitschange > 0)
9057 {
9058 if (! uns || final_prec <= TYPE_PRECISION (TREE_TYPE (op)))
9059 win = op;
9060 /* TYPE_UNSIGNED says whether this is a zero-extension.
9061 Let's avoid computing it if it does not affect WIN
9062 and if UNS will not be needed again. */
9063 if ((uns
9064 || CONVERT_EXPR_P (op))
9065 && TYPE_UNSIGNED (TREE_TYPE (op)))
9066 {
9067 uns = 1;
9068 win = op;
9069 }
9070 }
9071 }
9072
9073 /* If we finally reach a constant see if it fits in sth smaller and
9074 in that case convert it. */
9075 if (TREE_CODE (win) == INTEGER_CST)
9076 {
9077 tree wtype = TREE_TYPE (win);
9078 unsigned prec = wi::min_precision (wi::to_wide (win), TYPE_SIGN (wtype));
9079 if (for_type)
9080 prec = MAX (prec, final_prec);
9081 if (prec < TYPE_PRECISION (wtype))
9082 {
9083 tree t = lang_hooks.types.type_for_size (prec, TYPE_UNSIGNED (wtype));
9084 if (t && TYPE_PRECISION (t) < TYPE_PRECISION (wtype))
9085 win = fold_convert (t, win);
9086 }
9087 }
9088
9089 return win;
9090 }
9091 \f
9092 /* Return OP or a simpler expression for a narrower value
9093 which can be sign-extended or zero-extended to give back OP.
9094 Store in *UNSIGNEDP_PTR either 1 if the value should be zero-extended
9095 or 0 if the value should be sign-extended. */
9096
9097 tree
9098 get_narrower (tree op, int *unsignedp_ptr)
9099 {
9100 int uns = 0;
9101 int first = 1;
9102 tree win = op;
9103 bool integral_p = INTEGRAL_TYPE_P (TREE_TYPE (op));
9104
9105 while (TREE_CODE (op) == NOP_EXPR)
9106 {
9107 int bitschange
9108 = (TYPE_PRECISION (TREE_TYPE (op))
9109 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0))));
9110
9111 /* Truncations are many-one so cannot be removed. */
9112 if (bitschange < 0)
9113 break;
9114
9115 /* See what's inside this conversion. If we decide to strip it,
9116 we will set WIN. */
9117
9118 if (bitschange > 0)
9119 {
9120 op = TREE_OPERAND (op, 0);
9121 /* An extension: the outermost one can be stripped,
9122 but remember whether it is zero or sign extension. */
9123 if (first)
9124 uns = TYPE_UNSIGNED (TREE_TYPE (op));
9125 /* Otherwise, if a sign extension has been stripped,
9126 only sign extensions can now be stripped;
9127 if a zero extension has been stripped, only zero-extensions. */
9128 else if (uns != TYPE_UNSIGNED (TREE_TYPE (op)))
9129 break;
9130 first = 0;
9131 }
9132 else /* bitschange == 0 */
9133 {
9134 /* A change in nominal type can always be stripped, but we must
9135 preserve the unsignedness. */
9136 if (first)
9137 uns = TYPE_UNSIGNED (TREE_TYPE (op));
9138 first = 0;
9139 op = TREE_OPERAND (op, 0);
9140 /* Keep trying to narrow, but don't assign op to win if it
9141 would turn an integral type into something else. */
9142 if (INTEGRAL_TYPE_P (TREE_TYPE (op)) != integral_p)
9143 continue;
9144 }
9145
9146 win = op;
9147 }
9148
9149 if (TREE_CODE (op) == COMPONENT_REF
9150 /* Since type_for_size always gives an integer type. */
9151 && TREE_CODE (TREE_TYPE (op)) != REAL_TYPE
9152 && TREE_CODE (TREE_TYPE (op)) != FIXED_POINT_TYPE
9153 /* Ensure field is laid out already. */
9154 && DECL_SIZE (TREE_OPERAND (op, 1)) != 0
9155 && tree_fits_uhwi_p (DECL_SIZE (TREE_OPERAND (op, 1))))
9156 {
9157 unsigned HOST_WIDE_INT innerprec
9158 = tree_to_uhwi (DECL_SIZE (TREE_OPERAND (op, 1)));
9159 int unsignedp = (DECL_UNSIGNED (TREE_OPERAND (op, 1))
9160 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op, 1))));
9161 tree type = lang_hooks.types.type_for_size (innerprec, unsignedp);
9162
9163 /* We can get this structure field in a narrower type that fits it,
9164 but the resulting extension to its nominal type (a fullword type)
9165 must satisfy the same conditions as for other extensions.
9166
9167 Do this only for fields that are aligned (not bit-fields),
9168 because when bit-field insns will be used there is no
9169 advantage in doing this. */
9170
9171 if (innerprec < TYPE_PRECISION (TREE_TYPE (op))
9172 && ! DECL_BIT_FIELD (TREE_OPERAND (op, 1))
9173 && (first || uns == DECL_UNSIGNED (TREE_OPERAND (op, 1)))
9174 && type != 0)
9175 {
9176 if (first)
9177 uns = DECL_UNSIGNED (TREE_OPERAND (op, 1));
9178 win = fold_convert (type, op);
9179 }
9180 }
9181
9182 *unsignedp_ptr = uns;
9183 return win;
9184 }
9185 \f
9186 /* Return true if integer constant C has a value that is permissible
9187 for TYPE, an integral type. */
9188
9189 bool
9190 int_fits_type_p (const_tree c, const_tree type)
9191 {
9192 tree type_low_bound, type_high_bound;
9193 bool ok_for_low_bound, ok_for_high_bound;
9194 signop sgn_c = TYPE_SIGN (TREE_TYPE (c));
9195
9196 /* Non-standard boolean types can have arbitrary precision but various
9197 transformations assume that they can only take values 0 and +/-1. */
9198 if (TREE_CODE (type) == BOOLEAN_TYPE)
9199 return wi::fits_to_boolean_p (wi::to_wide (c), type);
9200
9201 retry:
9202 type_low_bound = TYPE_MIN_VALUE (type);
9203 type_high_bound = TYPE_MAX_VALUE (type);
9204
9205 /* If at least one bound of the type is a constant integer, we can check
9206 ourselves and maybe make a decision. If no such decision is possible, but
9207 this type is a subtype, try checking against that. Otherwise, use
9208 fits_to_tree_p, which checks against the precision.
9209
9210 Compute the status for each possibly constant bound, and return if we see
9211 one does not match. Use ok_for_xxx_bound for this purpose, assigning -1
9212 for "unknown if constant fits", 0 for "constant known *not* to fit" and 1
9213 for "constant known to fit". */
9214
9215 /* Check if c >= type_low_bound. */
9216 if (type_low_bound && TREE_CODE (type_low_bound) == INTEGER_CST)
9217 {
9218 if (tree_int_cst_lt (c, type_low_bound))
9219 return false;
9220 ok_for_low_bound = true;
9221 }
9222 else
9223 ok_for_low_bound = false;
9224
9225 /* Check if c <= type_high_bound. */
9226 if (type_high_bound && TREE_CODE (type_high_bound) == INTEGER_CST)
9227 {
9228 if (tree_int_cst_lt (type_high_bound, c))
9229 return false;
9230 ok_for_high_bound = true;
9231 }
9232 else
9233 ok_for_high_bound = false;
9234
9235 /* If the constant fits both bounds, the result is known. */
9236 if (ok_for_low_bound && ok_for_high_bound)
9237 return true;
9238
9239 /* Perform some generic filtering which may allow making a decision
9240 even if the bounds are not constant. First, negative integers
9241 never fit in unsigned types, */
9242 if (TYPE_UNSIGNED (type) && sgn_c == SIGNED && wi::neg_p (wi::to_wide (c)))
9243 return false;
9244
9245 /* Second, narrower types always fit in wider ones. */
9246 if (TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (c)))
9247 return true;
9248
9249 /* Third, unsigned integers with top bit set never fit signed types. */
9250 if (!TYPE_UNSIGNED (type) && sgn_c == UNSIGNED)
9251 {
9252 int prec = GET_MODE_PRECISION (SCALAR_INT_TYPE_MODE (TREE_TYPE (c))) - 1;
9253 if (prec < TYPE_PRECISION (TREE_TYPE (c)))
9254 {
9255 /* When a tree_cst is converted to a wide-int, the precision
9256 is taken from the type. However, if the precision of the
9257 mode underneath the type is smaller than that, it is
9258 possible that the value will not fit. The test below
9259 fails if any bit is set between the sign bit of the
9260 underlying mode and the top bit of the type. */
9261 if (wi::zext (wi::to_wide (c), prec - 1) != wi::to_wide (c))
9262 return false;
9263 }
9264 else if (wi::neg_p (wi::to_wide (c)))
9265 return false;
9266 }
9267
9268 /* If we haven't been able to decide at this point, there nothing more we
9269 can check ourselves here. Look at the base type if we have one and it
9270 has the same precision. */
9271 if (TREE_CODE (type) == INTEGER_TYPE
9272 && TREE_TYPE (type) != 0
9273 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (type)))
9274 {
9275 type = TREE_TYPE (type);
9276 goto retry;
9277 }
9278
9279 /* Or to fits_to_tree_p, if nothing else. */
9280 return wi::fits_to_tree_p (wi::to_wide (c), type);
9281 }
9282
9283 /* Stores bounds of an integer TYPE in MIN and MAX. If TYPE has non-constant
9284 bounds or is a POINTER_TYPE, the maximum and/or minimum values that can be
9285 represented (assuming two's-complement arithmetic) within the bit
9286 precision of the type are returned instead. */
9287
9288 void
9289 get_type_static_bounds (const_tree type, mpz_t min, mpz_t max)
9290 {
9291 if (!POINTER_TYPE_P (type) && TYPE_MIN_VALUE (type)
9292 && TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST)
9293 wi::to_mpz (wi::to_wide (TYPE_MIN_VALUE (type)), min, TYPE_SIGN (type));
9294 else
9295 {
9296 if (TYPE_UNSIGNED (type))
9297 mpz_set_ui (min, 0);
9298 else
9299 {
9300 wide_int mn = wi::min_value (TYPE_PRECISION (type), SIGNED);
9301 wi::to_mpz (mn, min, SIGNED);
9302 }
9303 }
9304
9305 if (!POINTER_TYPE_P (type) && TYPE_MAX_VALUE (type)
9306 && TREE_CODE (TYPE_MAX_VALUE (type)) == INTEGER_CST)
9307 wi::to_mpz (wi::to_wide (TYPE_MAX_VALUE (type)), max, TYPE_SIGN (type));
9308 else
9309 {
9310 wide_int mn = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
9311 wi::to_mpz (mn, max, TYPE_SIGN (type));
9312 }
9313 }
9314
9315 /* Return true if VAR is an automatic variable. */
9316
9317 bool
9318 auto_var_p (const_tree var)
9319 {
9320 return ((((VAR_P (var) && ! DECL_EXTERNAL (var))
9321 || TREE_CODE (var) == PARM_DECL)
9322 && ! TREE_STATIC (var))
9323 || TREE_CODE (var) == RESULT_DECL);
9324 }
9325
9326 /* Return true if VAR is an automatic variable defined in function FN. */
9327
9328 bool
9329 auto_var_in_fn_p (const_tree var, const_tree fn)
9330 {
9331 return (DECL_P (var) && DECL_CONTEXT (var) == fn
9332 && (auto_var_p (var)
9333 || TREE_CODE (var) == LABEL_DECL));
9334 }
9335
9336 /* Subprogram of following function. Called by walk_tree.
9337
9338 Return *TP if it is an automatic variable or parameter of the
9339 function passed in as DATA. */
9340
9341 static tree
9342 find_var_from_fn (tree *tp, int *walk_subtrees, void *data)
9343 {
9344 tree fn = (tree) data;
9345
9346 if (TYPE_P (*tp))
9347 *walk_subtrees = 0;
9348
9349 else if (DECL_P (*tp)
9350 && auto_var_in_fn_p (*tp, fn))
9351 return *tp;
9352
9353 return NULL_TREE;
9354 }
9355
9356 /* Returns true if T is, contains, or refers to a type with variable
9357 size. For METHOD_TYPEs and FUNCTION_TYPEs we exclude the
9358 arguments, but not the return type. If FN is nonzero, only return
9359 true if a modifier of the type or position of FN is a variable or
9360 parameter inside FN.
9361
9362 This concept is more general than that of C99 'variably modified types':
9363 in C99, a struct type is never variably modified because a VLA may not
9364 appear as a structure member. However, in GNU C code like:
9365
9366 struct S { int i[f()]; };
9367
9368 is valid, and other languages may define similar constructs. */
9369
9370 bool
9371 variably_modified_type_p (tree type, tree fn)
9372 {
9373 tree t;
9374
9375 /* Test if T is either variable (if FN is zero) or an expression containing
9376 a variable in FN. If TYPE isn't gimplified, return true also if
9377 gimplify_one_sizepos would gimplify the expression into a local
9378 variable. */
9379 #define RETURN_TRUE_IF_VAR(T) \
9380 do { tree _t = (T); \
9381 if (_t != NULL_TREE \
9382 && _t != error_mark_node \
9383 && !CONSTANT_CLASS_P (_t) \
9384 && TREE_CODE (_t) != PLACEHOLDER_EXPR \
9385 && (!fn \
9386 || (!TYPE_SIZES_GIMPLIFIED (type) \
9387 && (TREE_CODE (_t) != VAR_DECL \
9388 && !CONTAINS_PLACEHOLDER_P (_t))) \
9389 || walk_tree (&_t, find_var_from_fn, fn, NULL))) \
9390 return true; } while (0)
9391
9392 if (type == error_mark_node)
9393 return false;
9394
9395 /* If TYPE itself has variable size, it is variably modified. */
9396 RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
9397 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
9398
9399 switch (TREE_CODE (type))
9400 {
9401 case POINTER_TYPE:
9402 case REFERENCE_TYPE:
9403 case VECTOR_TYPE:
9404 /* Ada can have pointer types refering to themselves indirectly. */
9405 if (TREE_VISITED (type))
9406 return false;
9407 TREE_VISITED (type) = true;
9408 if (variably_modified_type_p (TREE_TYPE (type), fn))
9409 {
9410 TREE_VISITED (type) = false;
9411 return true;
9412 }
9413 TREE_VISITED (type) = false;
9414 break;
9415
9416 case FUNCTION_TYPE:
9417 case METHOD_TYPE:
9418 /* If TYPE is a function type, it is variably modified if the
9419 return type is variably modified. */
9420 if (variably_modified_type_p (TREE_TYPE (type), fn))
9421 return true;
9422 break;
9423
9424 case INTEGER_TYPE:
9425 case REAL_TYPE:
9426 case FIXED_POINT_TYPE:
9427 case ENUMERAL_TYPE:
9428 case BOOLEAN_TYPE:
9429 /* Scalar types are variably modified if their end points
9430 aren't constant. */
9431 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
9432 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
9433 break;
9434
9435 case RECORD_TYPE:
9436 case UNION_TYPE:
9437 case QUAL_UNION_TYPE:
9438 /* We can't see if any of the fields are variably-modified by the
9439 definition we normally use, since that would produce infinite
9440 recursion via pointers. */
9441 /* This is variably modified if some field's type is. */
9442 for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
9443 if (TREE_CODE (t) == FIELD_DECL)
9444 {
9445 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
9446 RETURN_TRUE_IF_VAR (DECL_SIZE (t));
9447 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
9448
9449 if (TREE_CODE (type) == QUAL_UNION_TYPE)
9450 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
9451 }
9452 break;
9453
9454 case ARRAY_TYPE:
9455 /* Do not call ourselves to avoid infinite recursion. This is
9456 variably modified if the element type is. */
9457 RETURN_TRUE_IF_VAR (TYPE_SIZE (TREE_TYPE (type)));
9458 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (TREE_TYPE (type)));
9459 break;
9460
9461 default:
9462 break;
9463 }
9464
9465 /* The current language may have other cases to check, but in general,
9466 all other types are not variably modified. */
9467 return lang_hooks.tree_inlining.var_mod_type_p (type, fn);
9468
9469 #undef RETURN_TRUE_IF_VAR
9470 }
9471
9472 /* Given a DECL or TYPE, return the scope in which it was declared, or
9473 NULL_TREE if there is no containing scope. */
9474
9475 tree
9476 get_containing_scope (const_tree t)
9477 {
9478 return (TYPE_P (t) ? TYPE_CONTEXT (t) : DECL_CONTEXT (t));
9479 }
9480
9481 /* Returns the ultimate TRANSLATION_UNIT_DECL context of DECL or NULL. */
9482
9483 const_tree
9484 get_ultimate_context (const_tree decl)
9485 {
9486 while (decl && TREE_CODE (decl) != TRANSLATION_UNIT_DECL)
9487 {
9488 if (TREE_CODE (decl) == BLOCK)
9489 decl = BLOCK_SUPERCONTEXT (decl);
9490 else
9491 decl = get_containing_scope (decl);
9492 }
9493 return decl;
9494 }
9495
9496 /* Return the innermost context enclosing DECL that is
9497 a FUNCTION_DECL, or zero if none. */
9498
9499 tree
9500 decl_function_context (const_tree decl)
9501 {
9502 tree context;
9503
9504 if (TREE_CODE (decl) == ERROR_MARK)
9505 return 0;
9506
9507 /* C++ virtual functions use DECL_CONTEXT for the class of the vtable
9508 where we look up the function at runtime. Such functions always take
9509 a first argument of type 'pointer to real context'.
9510
9511 C++ should really be fixed to use DECL_CONTEXT for the real context,
9512 and use something else for the "virtual context". */
9513 else if (TREE_CODE (decl) == FUNCTION_DECL && DECL_VIRTUAL_P (decl))
9514 context
9515 = TYPE_MAIN_VARIANT
9516 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
9517 else
9518 context = DECL_CONTEXT (decl);
9519
9520 while (context && TREE_CODE (context) != FUNCTION_DECL)
9521 {
9522 if (TREE_CODE (context) == BLOCK)
9523 context = BLOCK_SUPERCONTEXT (context);
9524 else
9525 context = get_containing_scope (context);
9526 }
9527
9528 return context;
9529 }
9530
9531 /* Return the innermost context enclosing DECL that is
9532 a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE, or zero if none.
9533 TYPE_DECLs and FUNCTION_DECLs are transparent to this function. */
9534
9535 tree
9536 decl_type_context (const_tree decl)
9537 {
9538 tree context = DECL_CONTEXT (decl);
9539
9540 while (context)
9541 switch (TREE_CODE (context))
9542 {
9543 case NAMESPACE_DECL:
9544 case TRANSLATION_UNIT_DECL:
9545 return NULL_TREE;
9546
9547 case RECORD_TYPE:
9548 case UNION_TYPE:
9549 case QUAL_UNION_TYPE:
9550 return context;
9551
9552 case TYPE_DECL:
9553 case FUNCTION_DECL:
9554 context = DECL_CONTEXT (context);
9555 break;
9556
9557 case BLOCK:
9558 context = BLOCK_SUPERCONTEXT (context);
9559 break;
9560
9561 default:
9562 gcc_unreachable ();
9563 }
9564
9565 return NULL_TREE;
9566 }
9567
9568 /* CALL is a CALL_EXPR. Return the declaration for the function
9569 called, or NULL_TREE if the called function cannot be
9570 determined. */
9571
9572 tree
9573 get_callee_fndecl (const_tree call)
9574 {
9575 tree addr;
9576
9577 if (call == error_mark_node)
9578 return error_mark_node;
9579
9580 /* It's invalid to call this function with anything but a
9581 CALL_EXPR. */
9582 gcc_assert (TREE_CODE (call) == CALL_EXPR);
9583
9584 /* The first operand to the CALL is the address of the function
9585 called. */
9586 addr = CALL_EXPR_FN (call);
9587
9588 /* If there is no function, return early. */
9589 if (addr == NULL_TREE)
9590 return NULL_TREE;
9591
9592 STRIP_NOPS (addr);
9593
9594 /* If this is a readonly function pointer, extract its initial value. */
9595 if (DECL_P (addr) && TREE_CODE (addr) != FUNCTION_DECL
9596 && TREE_READONLY (addr) && ! TREE_THIS_VOLATILE (addr)
9597 && DECL_INITIAL (addr))
9598 addr = DECL_INITIAL (addr);
9599
9600 /* If the address is just `&f' for some function `f', then we know
9601 that `f' is being called. */
9602 if (TREE_CODE (addr) == ADDR_EXPR
9603 && TREE_CODE (TREE_OPERAND (addr, 0)) == FUNCTION_DECL)
9604 return TREE_OPERAND (addr, 0);
9605
9606 /* We couldn't figure out what was being called. */
9607 return NULL_TREE;
9608 }
9609
9610 /* If CALL_EXPR CALL calls a normal built-in function or an internal function,
9611 return the associated function code, otherwise return CFN_LAST. */
9612
9613 combined_fn
9614 get_call_combined_fn (const_tree call)
9615 {
9616 /* It's invalid to call this function with anything but a CALL_EXPR. */
9617 gcc_assert (TREE_CODE (call) == CALL_EXPR);
9618
9619 if (!CALL_EXPR_FN (call))
9620 return as_combined_fn (CALL_EXPR_IFN (call));
9621
9622 tree fndecl = get_callee_fndecl (call);
9623 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
9624 return as_combined_fn (DECL_FUNCTION_CODE (fndecl));
9625
9626 return CFN_LAST;
9627 }
9628
9629 /* Comparator of indices based on tree_node_counts. */
9630
9631 static int
9632 tree_nodes_cmp (const void *p1, const void *p2)
9633 {
9634 const unsigned *n1 = (const unsigned *)p1;
9635 const unsigned *n2 = (const unsigned *)p2;
9636
9637 return tree_node_counts[*n1] - tree_node_counts[*n2];
9638 }
9639
9640 /* Comparator of indices based on tree_code_counts. */
9641
9642 static int
9643 tree_codes_cmp (const void *p1, const void *p2)
9644 {
9645 const unsigned *n1 = (const unsigned *)p1;
9646 const unsigned *n2 = (const unsigned *)p2;
9647
9648 return tree_code_counts[*n1] - tree_code_counts[*n2];
9649 }
9650
9651 #define TREE_MEM_USAGE_SPACES 40
9652
9653 /* Print debugging information about tree nodes generated during the compile,
9654 and any language-specific information. */
9655
9656 void
9657 dump_tree_statistics (void)
9658 {
9659 if (GATHER_STATISTICS)
9660 {
9661 uint64_t total_nodes, total_bytes;
9662 fprintf (stderr, "\nKind Nodes Bytes\n");
9663 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9664 total_nodes = total_bytes = 0;
9665
9666 {
9667 auto_vec<unsigned> indices (all_kinds);
9668 for (unsigned i = 0; i < all_kinds; i++)
9669 indices.quick_push (i);
9670 indices.qsort (tree_nodes_cmp);
9671
9672 for (unsigned i = 0; i < (int) all_kinds; i++)
9673 {
9674 unsigned j = indices[i];
9675 fprintf (stderr, "%-20s %6" PRIu64 "%c %9" PRIu64 "%c\n",
9676 tree_node_kind_names[i], SIZE_AMOUNT (tree_node_counts[j]),
9677 SIZE_AMOUNT (tree_node_sizes[j]));
9678 total_nodes += tree_node_counts[j];
9679 total_bytes += tree_node_sizes[j];
9680 }
9681 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9682 fprintf (stderr, "%-20s %6" PRIu64 "%c %9" PRIu64 "%c\n", "Total",
9683 SIZE_AMOUNT (total_nodes), SIZE_AMOUNT (total_bytes));
9684 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9685 }
9686
9687 {
9688 fprintf (stderr, "Code Nodes\n");
9689 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9690
9691 auto_vec<unsigned> indices (MAX_TREE_CODES);
9692 for (unsigned i = 0; i < MAX_TREE_CODES; i++)
9693 indices.quick_push (i);
9694 indices.qsort (tree_codes_cmp);
9695
9696 for (unsigned i = 0; i < MAX_TREE_CODES; i++)
9697 {
9698 unsigned j = indices[i];
9699 fprintf (stderr, "%-32s %6" PRIu64 "%c\n",
9700 get_tree_code_name ((enum tree_code) j),
9701 SIZE_AMOUNT (tree_code_counts[j]));
9702 }
9703 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9704 fprintf (stderr, "\n");
9705 ssanames_print_statistics ();
9706 fprintf (stderr, "\n");
9707 phinodes_print_statistics ();
9708 fprintf (stderr, "\n");
9709 }
9710 }
9711 else
9712 fprintf (stderr, "(No per-node statistics)\n");
9713
9714 print_type_hash_statistics ();
9715 print_debug_expr_statistics ();
9716 print_value_expr_statistics ();
9717 lang_hooks.print_statistics ();
9718 }
9719 \f
9720 #define FILE_FUNCTION_FORMAT "_GLOBAL__%s_%s"
9721
9722 /* Generate a crc32 of the low BYTES bytes of VALUE. */
9723
9724 unsigned
9725 crc32_unsigned_n (unsigned chksum, unsigned value, unsigned bytes)
9726 {
9727 /* This relies on the raw feedback's top 4 bits being zero. */
9728 #define FEEDBACK(X) ((X) * 0x04c11db7)
9729 #define SYNDROME(X) (FEEDBACK ((X) & 1) ^ FEEDBACK ((X) & 2) \
9730 ^ FEEDBACK ((X) & 4) ^ FEEDBACK ((X) & 8))
9731 static const unsigned syndromes[16] =
9732 {
9733 SYNDROME(0x0), SYNDROME(0x1), SYNDROME(0x2), SYNDROME(0x3),
9734 SYNDROME(0x4), SYNDROME(0x5), SYNDROME(0x6), SYNDROME(0x7),
9735 SYNDROME(0x8), SYNDROME(0x9), SYNDROME(0xa), SYNDROME(0xb),
9736 SYNDROME(0xc), SYNDROME(0xd), SYNDROME(0xe), SYNDROME(0xf),
9737 };
9738 #undef FEEDBACK
9739 #undef SYNDROME
9740
9741 value <<= (32 - bytes * 8);
9742 for (unsigned ix = bytes * 2; ix--; value <<= 4)
9743 {
9744 unsigned feedback = syndromes[((value ^ chksum) >> 28) & 0xf];
9745
9746 chksum = (chksum << 4) ^ feedback;
9747 }
9748
9749 return chksum;
9750 }
9751
9752 /* Generate a crc32 of a string. */
9753
9754 unsigned
9755 crc32_string (unsigned chksum, const char *string)
9756 {
9757 do
9758 chksum = crc32_byte (chksum, *string);
9759 while (*string++);
9760 return chksum;
9761 }
9762
9763 /* P is a string that will be used in a symbol. Mask out any characters
9764 that are not valid in that context. */
9765
9766 void
9767 clean_symbol_name (char *p)
9768 {
9769 for (; *p; p++)
9770 if (! (ISALNUM (*p)
9771 #ifndef NO_DOLLAR_IN_LABEL /* this for `$'; unlikely, but... -- kr */
9772 || *p == '$'
9773 #endif
9774 #ifndef NO_DOT_IN_LABEL /* this for `.'; unlikely, but... */
9775 || *p == '.'
9776 #endif
9777 ))
9778 *p = '_';
9779 }
9780
9781 static GTY(()) unsigned anon_cnt = 0; /* Saved for PCH. */
9782
9783 /* Create a unique anonymous identifier. The identifier is still a
9784 valid assembly label. */
9785
9786 tree
9787 make_anon_name ()
9788 {
9789 const char *fmt =
9790 #if !defined (NO_DOT_IN_LABEL)
9791 "."
9792 #elif !defined (NO_DOLLAR_IN_LABEL)
9793 "$"
9794 #else
9795 "_"
9796 #endif
9797 "_anon_%d";
9798
9799 char buf[24];
9800 int len = snprintf (buf, sizeof (buf), fmt, anon_cnt++);
9801 gcc_checking_assert (len < int (sizeof (buf)));
9802
9803 tree id = get_identifier_with_length (buf, len);
9804 IDENTIFIER_ANON_P (id) = true;
9805
9806 return id;
9807 }
9808
9809 /* Generate a name for a special-purpose function.
9810 The generated name may need to be unique across the whole link.
9811 Changes to this function may also require corresponding changes to
9812 xstrdup_mask_random.
9813 TYPE is some string to identify the purpose of this function to the
9814 linker or collect2; it must start with an uppercase letter,
9815 one of:
9816 I - for constructors
9817 D - for destructors
9818 N - for C++ anonymous namespaces
9819 F - for DWARF unwind frame information. */
9820
9821 tree
9822 get_file_function_name (const char *type)
9823 {
9824 char *buf;
9825 const char *p;
9826 char *q;
9827
9828 /* If we already have a name we know to be unique, just use that. */
9829 if (first_global_object_name)
9830 p = q = ASTRDUP (first_global_object_name);
9831 /* If the target is handling the constructors/destructors, they
9832 will be local to this file and the name is only necessary for
9833 debugging purposes.
9834 We also assign sub_I and sub_D sufixes to constructors called from
9835 the global static constructors. These are always local. */
9836 else if (((type[0] == 'I' || type[0] == 'D') && targetm.have_ctors_dtors)
9837 || (strncmp (type, "sub_", 4) == 0
9838 && (type[4] == 'I' || type[4] == 'D')))
9839 {
9840 const char *file = main_input_filename;
9841 if (! file)
9842 file = LOCATION_FILE (input_location);
9843 /* Just use the file's basename, because the full pathname
9844 might be quite long. */
9845 p = q = ASTRDUP (lbasename (file));
9846 }
9847 else
9848 {
9849 /* Otherwise, the name must be unique across the entire link.
9850 We don't have anything that we know to be unique to this translation
9851 unit, so use what we do have and throw in some randomness. */
9852 unsigned len;
9853 const char *name = weak_global_object_name;
9854 const char *file = main_input_filename;
9855
9856 if (! name)
9857 name = "";
9858 if (! file)
9859 file = LOCATION_FILE (input_location);
9860
9861 len = strlen (file);
9862 q = (char *) alloca (9 + 19 + len + 1);
9863 memcpy (q, file, len + 1);
9864
9865 snprintf (q + len, 9 + 19 + 1, "_%08X_" HOST_WIDE_INT_PRINT_HEX,
9866 crc32_string (0, name), get_random_seed (false));
9867
9868 p = q;
9869 }
9870
9871 clean_symbol_name (q);
9872 buf = (char *) alloca (sizeof (FILE_FUNCTION_FORMAT) + strlen (p)
9873 + strlen (type));
9874
9875 /* Set up the name of the file-level functions we may need.
9876 Use a global object (which is already required to be unique over
9877 the program) rather than the file name (which imposes extra
9878 constraints). */
9879 sprintf (buf, FILE_FUNCTION_FORMAT, type, p);
9880
9881 return get_identifier (buf);
9882 }
9883 \f
9884 #if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007)
9885
9886 /* Complain that the tree code of NODE does not match the expected 0
9887 terminated list of trailing codes. The trailing code list can be
9888 empty, for a more vague error message. FILE, LINE, and FUNCTION
9889 are of the caller. */
9890
9891 void
9892 tree_check_failed (const_tree node, const char *file,
9893 int line, const char *function, ...)
9894 {
9895 va_list args;
9896 const char *buffer;
9897 unsigned length = 0;
9898 enum tree_code code;
9899
9900 va_start (args, function);
9901 while ((code = (enum tree_code) va_arg (args, int)))
9902 length += 4 + strlen (get_tree_code_name (code));
9903 va_end (args);
9904 if (length)
9905 {
9906 char *tmp;
9907 va_start (args, function);
9908 length += strlen ("expected ");
9909 buffer = tmp = (char *) alloca (length);
9910 length = 0;
9911 while ((code = (enum tree_code) va_arg (args, int)))
9912 {
9913 const char *prefix = length ? " or " : "expected ";
9914
9915 strcpy (tmp + length, prefix);
9916 length += strlen (prefix);
9917 strcpy (tmp + length, get_tree_code_name (code));
9918 length += strlen (get_tree_code_name (code));
9919 }
9920 va_end (args);
9921 }
9922 else
9923 buffer = "unexpected node";
9924
9925 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9926 buffer, get_tree_code_name (TREE_CODE (node)),
9927 function, trim_filename (file), line);
9928 }
9929
9930 /* Complain that the tree code of NODE does match the expected 0
9931 terminated list of trailing codes. FILE, LINE, and FUNCTION are of
9932 the caller. */
9933
9934 void
9935 tree_not_check_failed (const_tree node, const char *file,
9936 int line, const char *function, ...)
9937 {
9938 va_list args;
9939 char *buffer;
9940 unsigned length = 0;
9941 enum tree_code code;
9942
9943 va_start (args, function);
9944 while ((code = (enum tree_code) va_arg (args, int)))
9945 length += 4 + strlen (get_tree_code_name (code));
9946 va_end (args);
9947 va_start (args, function);
9948 buffer = (char *) alloca (length);
9949 length = 0;
9950 while ((code = (enum tree_code) va_arg (args, int)))
9951 {
9952 if (length)
9953 {
9954 strcpy (buffer + length, " or ");
9955 length += 4;
9956 }
9957 strcpy (buffer + length, get_tree_code_name (code));
9958 length += strlen (get_tree_code_name (code));
9959 }
9960 va_end (args);
9961
9962 internal_error ("tree check: expected none of %s, have %s in %s, at %s:%d",
9963 buffer, get_tree_code_name (TREE_CODE (node)),
9964 function, trim_filename (file), line);
9965 }
9966
9967 /* Similar to tree_check_failed, except that we check for a class of tree
9968 code, given in CL. */
9969
9970 void
9971 tree_class_check_failed (const_tree node, const enum tree_code_class cl,
9972 const char *file, int line, const char *function)
9973 {
9974 internal_error
9975 ("tree check: expected class %qs, have %qs (%s) in %s, at %s:%d",
9976 TREE_CODE_CLASS_STRING (cl),
9977 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9978 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9979 }
9980
9981 /* Similar to tree_check_failed, except that instead of specifying a
9982 dozen codes, use the knowledge that they're all sequential. */
9983
9984 void
9985 tree_range_check_failed (const_tree node, const char *file, int line,
9986 const char *function, enum tree_code c1,
9987 enum tree_code c2)
9988 {
9989 char *buffer;
9990 unsigned length = 0;
9991 unsigned int c;
9992
9993 for (c = c1; c <= c2; ++c)
9994 length += 4 + strlen (get_tree_code_name ((enum tree_code) c));
9995
9996 length += strlen ("expected ");
9997 buffer = (char *) alloca (length);
9998 length = 0;
9999
10000 for (c = c1; c <= c2; ++c)
10001 {
10002 const char *prefix = length ? " or " : "expected ";
10003
10004 strcpy (buffer + length, prefix);
10005 length += strlen (prefix);
10006 strcpy (buffer + length, get_tree_code_name ((enum tree_code) c));
10007 length += strlen (get_tree_code_name ((enum tree_code) c));
10008 }
10009
10010 internal_error ("tree check: %s, have %s in %s, at %s:%d",
10011 buffer, get_tree_code_name (TREE_CODE (node)),
10012 function, trim_filename (file), line);
10013 }
10014
10015
10016 /* Similar to tree_check_failed, except that we check that a tree does
10017 not have the specified code, given in CL. */
10018
10019 void
10020 tree_not_class_check_failed (const_tree node, const enum tree_code_class cl,
10021 const char *file, int line, const char *function)
10022 {
10023 internal_error
10024 ("tree check: did not expect class %qs, have %qs (%s) in %s, at %s:%d",
10025 TREE_CODE_CLASS_STRING (cl),
10026 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
10027 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
10028 }
10029
10030
10031 /* Similar to tree_check_failed but applied to OMP_CLAUSE codes. */
10032
10033 void
10034 omp_clause_check_failed (const_tree node, const char *file, int line,
10035 const char *function, enum omp_clause_code code)
10036 {
10037 internal_error ("tree check: expected %<omp_clause %s%>, have %qs "
10038 "in %s, at %s:%d",
10039 omp_clause_code_name[code],
10040 get_tree_code_name (TREE_CODE (node)),
10041 function, trim_filename (file), line);
10042 }
10043
10044
10045 /* Similar to tree_range_check_failed but applied to OMP_CLAUSE codes. */
10046
10047 void
10048 omp_clause_range_check_failed (const_tree node, const char *file, int line,
10049 const char *function, enum omp_clause_code c1,
10050 enum omp_clause_code c2)
10051 {
10052 char *buffer;
10053 unsigned length = 0;
10054 unsigned int c;
10055
10056 for (c = c1; c <= c2; ++c)
10057 length += 4 + strlen (omp_clause_code_name[c]);
10058
10059 length += strlen ("expected ");
10060 buffer = (char *) alloca (length);
10061 length = 0;
10062
10063 for (c = c1; c <= c2; ++c)
10064 {
10065 const char *prefix = length ? " or " : "expected ";
10066
10067 strcpy (buffer + length, prefix);
10068 length += strlen (prefix);
10069 strcpy (buffer + length, omp_clause_code_name[c]);
10070 length += strlen (omp_clause_code_name[c]);
10071 }
10072
10073 internal_error ("tree check: %s, have %s in %s, at %s:%d",
10074 buffer, omp_clause_code_name[TREE_CODE (node)],
10075 function, trim_filename (file), line);
10076 }
10077
10078
10079 #undef DEFTREESTRUCT
10080 #define DEFTREESTRUCT(VAL, NAME) NAME,
10081
10082 static const char *ts_enum_names[] = {
10083 #include "treestruct.def"
10084 };
10085 #undef DEFTREESTRUCT
10086
10087 #define TS_ENUM_NAME(EN) (ts_enum_names[(EN)])
10088
10089 /* Similar to tree_class_check_failed, except that we check for
10090 whether CODE contains the tree structure identified by EN. */
10091
10092 void
10093 tree_contains_struct_check_failed (const_tree node,
10094 const enum tree_node_structure_enum en,
10095 const char *file, int line,
10096 const char *function)
10097 {
10098 internal_error
10099 ("tree check: expected tree that contains %qs structure, have %qs in %s, at %s:%d",
10100 TS_ENUM_NAME (en),
10101 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
10102 }
10103
10104
10105 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
10106 (dynamically sized) vector. */
10107
10108 void
10109 tree_int_cst_elt_check_failed (int idx, int len, const char *file, int line,
10110 const char *function)
10111 {
10112 internal_error
10113 ("tree check: accessed elt %d of %<tree_int_cst%> with %d elts in %s, "
10114 "at %s:%d",
10115 idx + 1, len, function, trim_filename (file), line);
10116 }
10117
10118 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
10119 (dynamically sized) vector. */
10120
10121 void
10122 tree_vec_elt_check_failed (int idx, int len, const char *file, int line,
10123 const char *function)
10124 {
10125 internal_error
10126 ("tree check: accessed elt %d of %<tree_vec%> with %d elts in %s, at %s:%d",
10127 idx + 1, len, function, trim_filename (file), line);
10128 }
10129
10130 /* Similar to above, except that the check is for the bounds of the operand
10131 vector of an expression node EXP. */
10132
10133 void
10134 tree_operand_check_failed (int idx, const_tree exp, const char *file,
10135 int line, const char *function)
10136 {
10137 enum tree_code code = TREE_CODE (exp);
10138 internal_error
10139 ("tree check: accessed operand %d of %s with %d operands in %s, at %s:%d",
10140 idx + 1, get_tree_code_name (code), TREE_OPERAND_LENGTH (exp),
10141 function, trim_filename (file), line);
10142 }
10143
10144 /* Similar to above, except that the check is for the number of
10145 operands of an OMP_CLAUSE node. */
10146
10147 void
10148 omp_clause_operand_check_failed (int idx, const_tree t, const char *file,
10149 int line, const char *function)
10150 {
10151 internal_error
10152 ("tree check: accessed operand %d of %<omp_clause %s%> with %d operands "
10153 "in %s, at %s:%d", idx + 1, omp_clause_code_name[OMP_CLAUSE_CODE (t)],
10154 omp_clause_num_ops [OMP_CLAUSE_CODE (t)], function,
10155 trim_filename (file), line);
10156 }
10157 #endif /* ENABLE_TREE_CHECKING */
10158 \f
10159 /* Create a new vector type node holding NUNITS units of type INNERTYPE,
10160 and mapped to the machine mode MODE. Initialize its fields and build
10161 the information necessary for debugging output. */
10162
10163 static tree
10164 make_vector_type (tree innertype, poly_int64 nunits, machine_mode mode)
10165 {
10166 tree t;
10167 tree mv_innertype = TYPE_MAIN_VARIANT (innertype);
10168
10169 t = make_node (VECTOR_TYPE);
10170 TREE_TYPE (t) = mv_innertype;
10171 SET_TYPE_VECTOR_SUBPARTS (t, nunits);
10172 SET_TYPE_MODE (t, mode);
10173
10174 if (TYPE_STRUCTURAL_EQUALITY_P (mv_innertype) || in_lto_p)
10175 SET_TYPE_STRUCTURAL_EQUALITY (t);
10176 else if ((TYPE_CANONICAL (mv_innertype) != innertype
10177 || mode != VOIDmode)
10178 && !VECTOR_BOOLEAN_TYPE_P (t))
10179 TYPE_CANONICAL (t)
10180 = make_vector_type (TYPE_CANONICAL (mv_innertype), nunits, VOIDmode);
10181
10182 layout_type (t);
10183
10184 hashval_t hash = type_hash_canon_hash (t);
10185 t = type_hash_canon (hash, t);
10186
10187 /* We have built a main variant, based on the main variant of the
10188 inner type. Use it to build the variant we return. */
10189 if ((TYPE_ATTRIBUTES (innertype) || TYPE_QUALS (innertype))
10190 && TREE_TYPE (t) != innertype)
10191 return build_type_attribute_qual_variant (t,
10192 TYPE_ATTRIBUTES (innertype),
10193 TYPE_QUALS (innertype));
10194
10195 return t;
10196 }
10197
10198 static tree
10199 make_or_reuse_type (unsigned size, int unsignedp)
10200 {
10201 int i;
10202
10203 if (size == INT_TYPE_SIZE)
10204 return unsignedp ? unsigned_type_node : integer_type_node;
10205 if (size == CHAR_TYPE_SIZE)
10206 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
10207 if (size == SHORT_TYPE_SIZE)
10208 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
10209 if (size == LONG_TYPE_SIZE)
10210 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
10211 if (size == LONG_LONG_TYPE_SIZE)
10212 return (unsignedp ? long_long_unsigned_type_node
10213 : long_long_integer_type_node);
10214
10215 for (i = 0; i < NUM_INT_N_ENTS; i ++)
10216 if (size == int_n_data[i].bitsize
10217 && int_n_enabled_p[i])
10218 return (unsignedp ? int_n_trees[i].unsigned_type
10219 : int_n_trees[i].signed_type);
10220
10221 if (unsignedp)
10222 return make_unsigned_type (size);
10223 else
10224 return make_signed_type (size);
10225 }
10226
10227 /* Create or reuse a fract type by SIZE, UNSIGNEDP, and SATP. */
10228
10229 static tree
10230 make_or_reuse_fract_type (unsigned size, int unsignedp, int satp)
10231 {
10232 if (satp)
10233 {
10234 if (size == SHORT_FRACT_TYPE_SIZE)
10235 return unsignedp ? sat_unsigned_short_fract_type_node
10236 : sat_short_fract_type_node;
10237 if (size == FRACT_TYPE_SIZE)
10238 return unsignedp ? sat_unsigned_fract_type_node : sat_fract_type_node;
10239 if (size == LONG_FRACT_TYPE_SIZE)
10240 return unsignedp ? sat_unsigned_long_fract_type_node
10241 : sat_long_fract_type_node;
10242 if (size == LONG_LONG_FRACT_TYPE_SIZE)
10243 return unsignedp ? sat_unsigned_long_long_fract_type_node
10244 : sat_long_long_fract_type_node;
10245 }
10246 else
10247 {
10248 if (size == SHORT_FRACT_TYPE_SIZE)
10249 return unsignedp ? unsigned_short_fract_type_node
10250 : short_fract_type_node;
10251 if (size == FRACT_TYPE_SIZE)
10252 return unsignedp ? unsigned_fract_type_node : fract_type_node;
10253 if (size == LONG_FRACT_TYPE_SIZE)
10254 return unsignedp ? unsigned_long_fract_type_node
10255 : long_fract_type_node;
10256 if (size == LONG_LONG_FRACT_TYPE_SIZE)
10257 return unsignedp ? unsigned_long_long_fract_type_node
10258 : long_long_fract_type_node;
10259 }
10260
10261 return make_fract_type (size, unsignedp, satp);
10262 }
10263
10264 /* Create or reuse an accum type by SIZE, UNSIGNEDP, and SATP. */
10265
10266 static tree
10267 make_or_reuse_accum_type (unsigned size, int unsignedp, int satp)
10268 {
10269 if (satp)
10270 {
10271 if (size == SHORT_ACCUM_TYPE_SIZE)
10272 return unsignedp ? sat_unsigned_short_accum_type_node
10273 : sat_short_accum_type_node;
10274 if (size == ACCUM_TYPE_SIZE)
10275 return unsignedp ? sat_unsigned_accum_type_node : sat_accum_type_node;
10276 if (size == LONG_ACCUM_TYPE_SIZE)
10277 return unsignedp ? sat_unsigned_long_accum_type_node
10278 : sat_long_accum_type_node;
10279 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
10280 return unsignedp ? sat_unsigned_long_long_accum_type_node
10281 : sat_long_long_accum_type_node;
10282 }
10283 else
10284 {
10285 if (size == SHORT_ACCUM_TYPE_SIZE)
10286 return unsignedp ? unsigned_short_accum_type_node
10287 : short_accum_type_node;
10288 if (size == ACCUM_TYPE_SIZE)
10289 return unsignedp ? unsigned_accum_type_node : accum_type_node;
10290 if (size == LONG_ACCUM_TYPE_SIZE)
10291 return unsignedp ? unsigned_long_accum_type_node
10292 : long_accum_type_node;
10293 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
10294 return unsignedp ? unsigned_long_long_accum_type_node
10295 : long_long_accum_type_node;
10296 }
10297
10298 return make_accum_type (size, unsignedp, satp);
10299 }
10300
10301
10302 /* Create an atomic variant node for TYPE. This routine is called
10303 during initialization of data types to create the 5 basic atomic
10304 types. The generic build_variant_type function requires these to
10305 already be set up in order to function properly, so cannot be
10306 called from there. If ALIGN is non-zero, then ensure alignment is
10307 overridden to this value. */
10308
10309 static tree
10310 build_atomic_base (tree type, unsigned int align)
10311 {
10312 tree t;
10313
10314 /* Make sure its not already registered. */
10315 if ((t = get_qualified_type (type, TYPE_QUAL_ATOMIC)))
10316 return t;
10317
10318 t = build_variant_type_copy (type);
10319 set_type_quals (t, TYPE_QUAL_ATOMIC);
10320
10321 if (align)
10322 SET_TYPE_ALIGN (t, align);
10323
10324 return t;
10325 }
10326
10327 /* Information about the _FloatN and _FloatNx types. This must be in
10328 the same order as the corresponding TI_* enum values. */
10329 const floatn_type_info floatn_nx_types[NUM_FLOATN_NX_TYPES] =
10330 {
10331 { 16, false },
10332 { 32, false },
10333 { 64, false },
10334 { 128, false },
10335 { 32, true },
10336 { 64, true },
10337 { 128, true },
10338 };
10339
10340
10341 /* Create nodes for all integer types (and error_mark_node) using the sizes
10342 of C datatypes. SIGNED_CHAR specifies whether char is signed. */
10343
10344 void
10345 build_common_tree_nodes (bool signed_char)
10346 {
10347 int i;
10348
10349 error_mark_node = make_node (ERROR_MARK);
10350 TREE_TYPE (error_mark_node) = error_mark_node;
10351
10352 initialize_sizetypes ();
10353
10354 /* Define both `signed char' and `unsigned char'. */
10355 signed_char_type_node = make_signed_type (CHAR_TYPE_SIZE);
10356 TYPE_STRING_FLAG (signed_char_type_node) = 1;
10357 unsigned_char_type_node = make_unsigned_type (CHAR_TYPE_SIZE);
10358 TYPE_STRING_FLAG (unsigned_char_type_node) = 1;
10359
10360 /* Define `char', which is like either `signed char' or `unsigned char'
10361 but not the same as either. */
10362 char_type_node
10363 = (signed_char
10364 ? make_signed_type (CHAR_TYPE_SIZE)
10365 : make_unsigned_type (CHAR_TYPE_SIZE));
10366 TYPE_STRING_FLAG (char_type_node) = 1;
10367
10368 short_integer_type_node = make_signed_type (SHORT_TYPE_SIZE);
10369 short_unsigned_type_node = make_unsigned_type (SHORT_TYPE_SIZE);
10370 integer_type_node = make_signed_type (INT_TYPE_SIZE);
10371 unsigned_type_node = make_unsigned_type (INT_TYPE_SIZE);
10372 long_integer_type_node = make_signed_type (LONG_TYPE_SIZE);
10373 long_unsigned_type_node = make_unsigned_type (LONG_TYPE_SIZE);
10374 long_long_integer_type_node = make_signed_type (LONG_LONG_TYPE_SIZE);
10375 long_long_unsigned_type_node = make_unsigned_type (LONG_LONG_TYPE_SIZE);
10376
10377 for (i = 0; i < NUM_INT_N_ENTS; i ++)
10378 {
10379 int_n_trees[i].signed_type = make_signed_type (int_n_data[i].bitsize);
10380 int_n_trees[i].unsigned_type = make_unsigned_type (int_n_data[i].bitsize);
10381
10382 if (int_n_enabled_p[i])
10383 {
10384 integer_types[itk_intN_0 + i * 2] = int_n_trees[i].signed_type;
10385 integer_types[itk_unsigned_intN_0 + i * 2] = int_n_trees[i].unsigned_type;
10386 }
10387 }
10388
10389 /* Define a boolean type. This type only represents boolean values but
10390 may be larger than char depending on the value of BOOL_TYPE_SIZE. */
10391 boolean_type_node = make_unsigned_type (BOOL_TYPE_SIZE);
10392 TREE_SET_CODE (boolean_type_node, BOOLEAN_TYPE);
10393 TYPE_PRECISION (boolean_type_node) = 1;
10394 TYPE_MAX_VALUE (boolean_type_node) = build_int_cst (boolean_type_node, 1);
10395
10396 /* Define what type to use for size_t. */
10397 if (strcmp (SIZE_TYPE, "unsigned int") == 0)
10398 size_type_node = unsigned_type_node;
10399 else if (strcmp (SIZE_TYPE, "long unsigned int") == 0)
10400 size_type_node = long_unsigned_type_node;
10401 else if (strcmp (SIZE_TYPE, "long long unsigned int") == 0)
10402 size_type_node = long_long_unsigned_type_node;
10403 else if (strcmp (SIZE_TYPE, "short unsigned int") == 0)
10404 size_type_node = short_unsigned_type_node;
10405 else
10406 {
10407 int i;
10408
10409 size_type_node = NULL_TREE;
10410 for (i = 0; i < NUM_INT_N_ENTS; i++)
10411 if (int_n_enabled_p[i])
10412 {
10413 char name[50], altname[50];
10414 sprintf (name, "__int%d unsigned", int_n_data[i].bitsize);
10415 sprintf (altname, "__int%d__ unsigned", int_n_data[i].bitsize);
10416
10417 if (strcmp (name, SIZE_TYPE) == 0
10418 || strcmp (altname, SIZE_TYPE) == 0)
10419 {
10420 size_type_node = int_n_trees[i].unsigned_type;
10421 }
10422 }
10423 if (size_type_node == NULL_TREE)
10424 gcc_unreachable ();
10425 }
10426
10427 /* Define what type to use for ptrdiff_t. */
10428 if (strcmp (PTRDIFF_TYPE, "int") == 0)
10429 ptrdiff_type_node = integer_type_node;
10430 else if (strcmp (PTRDIFF_TYPE, "long int") == 0)
10431 ptrdiff_type_node = long_integer_type_node;
10432 else if (strcmp (PTRDIFF_TYPE, "long long int") == 0)
10433 ptrdiff_type_node = long_long_integer_type_node;
10434 else if (strcmp (PTRDIFF_TYPE, "short int") == 0)
10435 ptrdiff_type_node = short_integer_type_node;
10436 else
10437 {
10438 ptrdiff_type_node = NULL_TREE;
10439 for (int i = 0; i < NUM_INT_N_ENTS; i++)
10440 if (int_n_enabled_p[i])
10441 {
10442 char name[50], altname[50];
10443 sprintf (name, "__int%d", int_n_data[i].bitsize);
10444 sprintf (altname, "__int%d__", int_n_data[i].bitsize);
10445
10446 if (strcmp (name, PTRDIFF_TYPE) == 0
10447 || strcmp (altname, PTRDIFF_TYPE) == 0)
10448 ptrdiff_type_node = int_n_trees[i].signed_type;
10449 }
10450 if (ptrdiff_type_node == NULL_TREE)
10451 gcc_unreachable ();
10452 }
10453
10454 /* Fill in the rest of the sized types. Reuse existing type nodes
10455 when possible. */
10456 intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 0);
10457 intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 0);
10458 intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 0);
10459 intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 0);
10460 intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 0);
10461
10462 unsigned_intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 1);
10463 unsigned_intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 1);
10464 unsigned_intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 1);
10465 unsigned_intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 1);
10466 unsigned_intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 1);
10467
10468 /* Don't call build_qualified type for atomics. That routine does
10469 special processing for atomics, and until they are initialized
10470 it's better not to make that call.
10471
10472 Check to see if there is a target override for atomic types. */
10473
10474 atomicQI_type_node = build_atomic_base (unsigned_intQI_type_node,
10475 targetm.atomic_align_for_mode (QImode));
10476 atomicHI_type_node = build_atomic_base (unsigned_intHI_type_node,
10477 targetm.atomic_align_for_mode (HImode));
10478 atomicSI_type_node = build_atomic_base (unsigned_intSI_type_node,
10479 targetm.atomic_align_for_mode (SImode));
10480 atomicDI_type_node = build_atomic_base (unsigned_intDI_type_node,
10481 targetm.atomic_align_for_mode (DImode));
10482 atomicTI_type_node = build_atomic_base (unsigned_intTI_type_node,
10483 targetm.atomic_align_for_mode (TImode));
10484
10485 access_public_node = get_identifier ("public");
10486 access_protected_node = get_identifier ("protected");
10487 access_private_node = get_identifier ("private");
10488
10489 /* Define these next since types below may used them. */
10490 integer_zero_node = build_int_cst (integer_type_node, 0);
10491 integer_one_node = build_int_cst (integer_type_node, 1);
10492 integer_three_node = build_int_cst (integer_type_node, 3);
10493 integer_minus_one_node = build_int_cst (integer_type_node, -1);
10494
10495 size_zero_node = size_int (0);
10496 size_one_node = size_int (1);
10497 bitsize_zero_node = bitsize_int (0);
10498 bitsize_one_node = bitsize_int (1);
10499 bitsize_unit_node = bitsize_int (BITS_PER_UNIT);
10500
10501 boolean_false_node = TYPE_MIN_VALUE (boolean_type_node);
10502 boolean_true_node = TYPE_MAX_VALUE (boolean_type_node);
10503
10504 void_type_node = make_node (VOID_TYPE);
10505 layout_type (void_type_node);
10506
10507 /* We are not going to have real types in C with less than byte alignment,
10508 so we might as well not have any types that claim to have it. */
10509 SET_TYPE_ALIGN (void_type_node, BITS_PER_UNIT);
10510 TYPE_USER_ALIGN (void_type_node) = 0;
10511
10512 void_node = make_node (VOID_CST);
10513 TREE_TYPE (void_node) = void_type_node;
10514
10515 null_pointer_node = build_int_cst (build_pointer_type (void_type_node), 0);
10516 layout_type (TREE_TYPE (null_pointer_node));
10517
10518 ptr_type_node = build_pointer_type (void_type_node);
10519 const_ptr_type_node
10520 = build_pointer_type (build_type_variant (void_type_node, 1, 0));
10521 for (unsigned i = 0;
10522 i < sizeof (builtin_structptr_types) / sizeof (builtin_structptr_type);
10523 ++i)
10524 builtin_structptr_types[i].node = builtin_structptr_types[i].base;
10525
10526 pointer_sized_int_node = build_nonstandard_integer_type (POINTER_SIZE, 1);
10527
10528 float_type_node = make_node (REAL_TYPE);
10529 TYPE_PRECISION (float_type_node) = FLOAT_TYPE_SIZE;
10530 layout_type (float_type_node);
10531
10532 double_type_node = make_node (REAL_TYPE);
10533 TYPE_PRECISION (double_type_node) = DOUBLE_TYPE_SIZE;
10534 layout_type (double_type_node);
10535
10536 long_double_type_node = make_node (REAL_TYPE);
10537 TYPE_PRECISION (long_double_type_node) = LONG_DOUBLE_TYPE_SIZE;
10538 layout_type (long_double_type_node);
10539
10540 for (i = 0; i < NUM_FLOATN_NX_TYPES; i++)
10541 {
10542 int n = floatn_nx_types[i].n;
10543 bool extended = floatn_nx_types[i].extended;
10544 scalar_float_mode mode;
10545 if (!targetm.floatn_mode (n, extended).exists (&mode))
10546 continue;
10547 int precision = GET_MODE_PRECISION (mode);
10548 /* Work around the rs6000 KFmode having precision 113 not
10549 128. */
10550 const struct real_format *fmt = REAL_MODE_FORMAT (mode);
10551 gcc_assert (fmt->b == 2 && fmt->emin + fmt->emax == 3);
10552 int min_precision = fmt->p + ceil_log2 (fmt->emax - fmt->emin);
10553 if (!extended)
10554 gcc_assert (min_precision == n);
10555 if (precision < min_precision)
10556 precision = min_precision;
10557 FLOATN_NX_TYPE_NODE (i) = make_node (REAL_TYPE);
10558 TYPE_PRECISION (FLOATN_NX_TYPE_NODE (i)) = precision;
10559 layout_type (FLOATN_NX_TYPE_NODE (i));
10560 SET_TYPE_MODE (FLOATN_NX_TYPE_NODE (i), mode);
10561 }
10562
10563 float_ptr_type_node = build_pointer_type (float_type_node);
10564 double_ptr_type_node = build_pointer_type (double_type_node);
10565 long_double_ptr_type_node = build_pointer_type (long_double_type_node);
10566 integer_ptr_type_node = build_pointer_type (integer_type_node);
10567
10568 /* Fixed size integer types. */
10569 uint16_type_node = make_or_reuse_type (16, 1);
10570 uint32_type_node = make_or_reuse_type (32, 1);
10571 uint64_type_node = make_or_reuse_type (64, 1);
10572
10573 /* Decimal float types. */
10574 dfloat32_type_node = make_node (REAL_TYPE);
10575 TYPE_PRECISION (dfloat32_type_node) = DECIMAL32_TYPE_SIZE;
10576 SET_TYPE_MODE (dfloat32_type_node, SDmode);
10577 layout_type (dfloat32_type_node);
10578 dfloat32_ptr_type_node = build_pointer_type (dfloat32_type_node);
10579
10580 dfloat64_type_node = make_node (REAL_TYPE);
10581 TYPE_PRECISION (dfloat64_type_node) = DECIMAL64_TYPE_SIZE;
10582 SET_TYPE_MODE (dfloat64_type_node, DDmode);
10583 layout_type (dfloat64_type_node);
10584 dfloat64_ptr_type_node = build_pointer_type (dfloat64_type_node);
10585
10586 dfloat128_type_node = make_node (REAL_TYPE);
10587 TYPE_PRECISION (dfloat128_type_node) = DECIMAL128_TYPE_SIZE;
10588 SET_TYPE_MODE (dfloat128_type_node, TDmode);
10589 layout_type (dfloat128_type_node);
10590 dfloat128_ptr_type_node = build_pointer_type (dfloat128_type_node);
10591
10592 complex_integer_type_node = build_complex_type (integer_type_node, true);
10593 complex_float_type_node = build_complex_type (float_type_node, true);
10594 complex_double_type_node = build_complex_type (double_type_node, true);
10595 complex_long_double_type_node = build_complex_type (long_double_type_node,
10596 true);
10597
10598 for (i = 0; i < NUM_FLOATN_NX_TYPES; i++)
10599 {
10600 if (FLOATN_NX_TYPE_NODE (i) != NULL_TREE)
10601 COMPLEX_FLOATN_NX_TYPE_NODE (i)
10602 = build_complex_type (FLOATN_NX_TYPE_NODE (i));
10603 }
10604
10605 /* Make fixed-point nodes based on sat/non-sat and signed/unsigned. */
10606 #define MAKE_FIXED_TYPE_NODE(KIND,SIZE) \
10607 sat_ ## KIND ## _type_node = \
10608 make_sat_signed_ ## KIND ## _type (SIZE); \
10609 sat_unsigned_ ## KIND ## _type_node = \
10610 make_sat_unsigned_ ## KIND ## _type (SIZE); \
10611 KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
10612 unsigned_ ## KIND ## _type_node = \
10613 make_unsigned_ ## KIND ## _type (SIZE);
10614
10615 #define MAKE_FIXED_TYPE_NODE_WIDTH(KIND,WIDTH,SIZE) \
10616 sat_ ## WIDTH ## KIND ## _type_node = \
10617 make_sat_signed_ ## KIND ## _type (SIZE); \
10618 sat_unsigned_ ## WIDTH ## KIND ## _type_node = \
10619 make_sat_unsigned_ ## KIND ## _type (SIZE); \
10620 WIDTH ## KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
10621 unsigned_ ## WIDTH ## KIND ## _type_node = \
10622 make_unsigned_ ## KIND ## _type (SIZE);
10623
10624 /* Make fixed-point type nodes based on four different widths. */
10625 #define MAKE_FIXED_TYPE_NODE_FAMILY(N1,N2) \
10626 MAKE_FIXED_TYPE_NODE_WIDTH (N1, short_, SHORT_ ## N2 ## _TYPE_SIZE) \
10627 MAKE_FIXED_TYPE_NODE (N1, N2 ## _TYPE_SIZE) \
10628 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_, LONG_ ## N2 ## _TYPE_SIZE) \
10629 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_long_, LONG_LONG_ ## N2 ## _TYPE_SIZE)
10630
10631 /* Make fixed-point mode nodes based on sat/non-sat and signed/unsigned. */
10632 #define MAKE_FIXED_MODE_NODE(KIND,NAME,MODE) \
10633 NAME ## _type_node = \
10634 make_or_reuse_signed_ ## KIND ## _type (GET_MODE_BITSIZE (MODE ## mode)); \
10635 u ## NAME ## _type_node = \
10636 make_or_reuse_unsigned_ ## KIND ## _type \
10637 (GET_MODE_BITSIZE (U ## MODE ## mode)); \
10638 sat_ ## NAME ## _type_node = \
10639 make_or_reuse_sat_signed_ ## KIND ## _type \
10640 (GET_MODE_BITSIZE (MODE ## mode)); \
10641 sat_u ## NAME ## _type_node = \
10642 make_or_reuse_sat_unsigned_ ## KIND ## _type \
10643 (GET_MODE_BITSIZE (U ## MODE ## mode));
10644
10645 /* Fixed-point type and mode nodes. */
10646 MAKE_FIXED_TYPE_NODE_FAMILY (fract, FRACT)
10647 MAKE_FIXED_TYPE_NODE_FAMILY (accum, ACCUM)
10648 MAKE_FIXED_MODE_NODE (fract, qq, QQ)
10649 MAKE_FIXED_MODE_NODE (fract, hq, HQ)
10650 MAKE_FIXED_MODE_NODE (fract, sq, SQ)
10651 MAKE_FIXED_MODE_NODE (fract, dq, DQ)
10652 MAKE_FIXED_MODE_NODE (fract, tq, TQ)
10653 MAKE_FIXED_MODE_NODE (accum, ha, HA)
10654 MAKE_FIXED_MODE_NODE (accum, sa, SA)
10655 MAKE_FIXED_MODE_NODE (accum, da, DA)
10656 MAKE_FIXED_MODE_NODE (accum, ta, TA)
10657
10658 {
10659 tree t = targetm.build_builtin_va_list ();
10660
10661 /* Many back-ends define record types without setting TYPE_NAME.
10662 If we copied the record type here, we'd keep the original
10663 record type without a name. This breaks name mangling. So,
10664 don't copy record types and let c_common_nodes_and_builtins()
10665 declare the type to be __builtin_va_list. */
10666 if (TREE_CODE (t) != RECORD_TYPE)
10667 t = build_variant_type_copy (t);
10668
10669 va_list_type_node = t;
10670 }
10671
10672 /* SCEV analyzer global shared trees. */
10673 chrec_dont_know = make_node (SCEV_NOT_KNOWN);
10674 TREE_TYPE (chrec_dont_know) = void_type_node;
10675 chrec_known = make_node (SCEV_KNOWN);
10676 TREE_TYPE (chrec_known) = void_type_node;
10677 }
10678
10679 /* Modify DECL for given flags.
10680 TM_PURE attribute is set only on types, so the function will modify
10681 DECL's type when ECF_TM_PURE is used. */
10682
10683 void
10684 set_call_expr_flags (tree decl, int flags)
10685 {
10686 if (flags & ECF_NOTHROW)
10687 TREE_NOTHROW (decl) = 1;
10688 if (flags & ECF_CONST)
10689 TREE_READONLY (decl) = 1;
10690 if (flags & ECF_PURE)
10691 DECL_PURE_P (decl) = 1;
10692 if (flags & ECF_LOOPING_CONST_OR_PURE)
10693 DECL_LOOPING_CONST_OR_PURE_P (decl) = 1;
10694 if (flags & ECF_NOVOPS)
10695 DECL_IS_NOVOPS (decl) = 1;
10696 if (flags & ECF_NORETURN)
10697 TREE_THIS_VOLATILE (decl) = 1;
10698 if (flags & ECF_MALLOC)
10699 DECL_IS_MALLOC (decl) = 1;
10700 if (flags & ECF_RETURNS_TWICE)
10701 DECL_IS_RETURNS_TWICE (decl) = 1;
10702 if (flags & ECF_LEAF)
10703 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("leaf"),
10704 NULL, DECL_ATTRIBUTES (decl));
10705 if (flags & ECF_COLD)
10706 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("cold"),
10707 NULL, DECL_ATTRIBUTES (decl));
10708 if (flags & ECF_RET1)
10709 DECL_ATTRIBUTES (decl)
10710 = tree_cons (get_identifier ("fn spec"),
10711 build_tree_list (NULL_TREE, build_string (1, "1")),
10712 DECL_ATTRIBUTES (decl));
10713 if ((flags & ECF_TM_PURE) && flag_tm)
10714 apply_tm_attr (decl, get_identifier ("transaction_pure"));
10715 /* Looping const or pure is implied by noreturn.
10716 There is currently no way to declare looping const or looping pure alone. */
10717 gcc_assert (!(flags & ECF_LOOPING_CONST_OR_PURE)
10718 || ((flags & ECF_NORETURN) && (flags & (ECF_CONST | ECF_PURE))));
10719 }
10720
10721
10722 /* A subroutine of build_common_builtin_nodes. Define a builtin function. */
10723
10724 static void
10725 local_define_builtin (const char *name, tree type, enum built_in_function code,
10726 const char *library_name, int ecf_flags)
10727 {
10728 tree decl;
10729
10730 decl = add_builtin_function (name, type, code, BUILT_IN_NORMAL,
10731 library_name, NULL_TREE);
10732 set_call_expr_flags (decl, ecf_flags);
10733
10734 set_builtin_decl (code, decl, true);
10735 }
10736
10737 /* Call this function after instantiating all builtins that the language
10738 front end cares about. This will build the rest of the builtins
10739 and internal functions that are relied upon by the tree optimizers and
10740 the middle-end. */
10741
10742 void
10743 build_common_builtin_nodes (void)
10744 {
10745 tree tmp, ftype;
10746 int ecf_flags;
10747
10748 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE)
10749 || !builtin_decl_explicit_p (BUILT_IN_ABORT))
10750 {
10751 ftype = build_function_type (void_type_node, void_list_node);
10752 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE))
10753 local_define_builtin ("__builtin_unreachable", ftype,
10754 BUILT_IN_UNREACHABLE,
10755 "__builtin_unreachable",
10756 ECF_NOTHROW | ECF_LEAF | ECF_NORETURN
10757 | ECF_CONST | ECF_COLD);
10758 if (!builtin_decl_explicit_p (BUILT_IN_ABORT))
10759 local_define_builtin ("__builtin_abort", ftype, BUILT_IN_ABORT,
10760 "abort",
10761 ECF_LEAF | ECF_NORETURN | ECF_CONST | ECF_COLD);
10762 }
10763
10764 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY)
10765 || !builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
10766 {
10767 ftype = build_function_type_list (ptr_type_node,
10768 ptr_type_node, const_ptr_type_node,
10769 size_type_node, NULL_TREE);
10770
10771 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY))
10772 local_define_builtin ("__builtin_memcpy", ftype, BUILT_IN_MEMCPY,
10773 "memcpy", ECF_NOTHROW | ECF_LEAF | ECF_RET1);
10774 if (!builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
10775 local_define_builtin ("__builtin_memmove", ftype, BUILT_IN_MEMMOVE,
10776 "memmove", ECF_NOTHROW | ECF_LEAF | ECF_RET1);
10777 }
10778
10779 if (!builtin_decl_explicit_p (BUILT_IN_MEMCMP))
10780 {
10781 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
10782 const_ptr_type_node, size_type_node,
10783 NULL_TREE);
10784 local_define_builtin ("__builtin_memcmp", ftype, BUILT_IN_MEMCMP,
10785 "memcmp", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10786 }
10787
10788 if (!builtin_decl_explicit_p (BUILT_IN_MEMSET))
10789 {
10790 ftype = build_function_type_list (ptr_type_node,
10791 ptr_type_node, integer_type_node,
10792 size_type_node, NULL_TREE);
10793 local_define_builtin ("__builtin_memset", ftype, BUILT_IN_MEMSET,
10794 "memset", ECF_NOTHROW | ECF_LEAF | ECF_RET1);
10795 }
10796
10797 /* If we're checking the stack, `alloca' can throw. */
10798 const int alloca_flags
10799 = ECF_MALLOC | ECF_LEAF | (flag_stack_check ? 0 : ECF_NOTHROW);
10800
10801 if (!builtin_decl_explicit_p (BUILT_IN_ALLOCA))
10802 {
10803 ftype = build_function_type_list (ptr_type_node,
10804 size_type_node, NULL_TREE);
10805 local_define_builtin ("__builtin_alloca", ftype, BUILT_IN_ALLOCA,
10806 "alloca", alloca_flags);
10807 }
10808
10809 ftype = build_function_type_list (ptr_type_node, size_type_node,
10810 size_type_node, NULL_TREE);
10811 local_define_builtin ("__builtin_alloca_with_align", ftype,
10812 BUILT_IN_ALLOCA_WITH_ALIGN,
10813 "__builtin_alloca_with_align",
10814 alloca_flags);
10815
10816 ftype = build_function_type_list (ptr_type_node, size_type_node,
10817 size_type_node, size_type_node, NULL_TREE);
10818 local_define_builtin ("__builtin_alloca_with_align_and_max", ftype,
10819 BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX,
10820 "__builtin_alloca_with_align_and_max",
10821 alloca_flags);
10822
10823 ftype = build_function_type_list (void_type_node,
10824 ptr_type_node, ptr_type_node,
10825 ptr_type_node, NULL_TREE);
10826 local_define_builtin ("__builtin_init_trampoline", ftype,
10827 BUILT_IN_INIT_TRAMPOLINE,
10828 "__builtin_init_trampoline", ECF_NOTHROW | ECF_LEAF);
10829 local_define_builtin ("__builtin_init_heap_trampoline", ftype,
10830 BUILT_IN_INIT_HEAP_TRAMPOLINE,
10831 "__builtin_init_heap_trampoline",
10832 ECF_NOTHROW | ECF_LEAF);
10833 local_define_builtin ("__builtin_init_descriptor", ftype,
10834 BUILT_IN_INIT_DESCRIPTOR,
10835 "__builtin_init_descriptor", ECF_NOTHROW | ECF_LEAF);
10836
10837 ftype = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE);
10838 local_define_builtin ("__builtin_adjust_trampoline", ftype,
10839 BUILT_IN_ADJUST_TRAMPOLINE,
10840 "__builtin_adjust_trampoline",
10841 ECF_CONST | ECF_NOTHROW);
10842 local_define_builtin ("__builtin_adjust_descriptor", ftype,
10843 BUILT_IN_ADJUST_DESCRIPTOR,
10844 "__builtin_adjust_descriptor",
10845 ECF_CONST | ECF_NOTHROW);
10846
10847 ftype = build_function_type_list (void_type_node,
10848 ptr_type_node, ptr_type_node, NULL_TREE);
10849 local_define_builtin ("__builtin_nonlocal_goto", ftype,
10850 BUILT_IN_NONLOCAL_GOTO,
10851 "__builtin_nonlocal_goto",
10852 ECF_NORETURN | ECF_NOTHROW);
10853
10854 ftype = build_function_type_list (void_type_node,
10855 ptr_type_node, ptr_type_node, NULL_TREE);
10856 local_define_builtin ("__builtin_setjmp_setup", ftype,
10857 BUILT_IN_SETJMP_SETUP,
10858 "__builtin_setjmp_setup", ECF_NOTHROW);
10859
10860 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10861 local_define_builtin ("__builtin_setjmp_receiver", ftype,
10862 BUILT_IN_SETJMP_RECEIVER,
10863 "__builtin_setjmp_receiver", ECF_NOTHROW | ECF_LEAF);
10864
10865 ftype = build_function_type_list (ptr_type_node, NULL_TREE);
10866 local_define_builtin ("__builtin_stack_save", ftype, BUILT_IN_STACK_SAVE,
10867 "__builtin_stack_save", ECF_NOTHROW | ECF_LEAF);
10868
10869 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10870 local_define_builtin ("__builtin_stack_restore", ftype,
10871 BUILT_IN_STACK_RESTORE,
10872 "__builtin_stack_restore", ECF_NOTHROW | ECF_LEAF);
10873
10874 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
10875 const_ptr_type_node, size_type_node,
10876 NULL_TREE);
10877 local_define_builtin ("__builtin_memcmp_eq", ftype, BUILT_IN_MEMCMP_EQ,
10878 "__builtin_memcmp_eq",
10879 ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10880
10881 local_define_builtin ("__builtin_strncmp_eq", ftype, BUILT_IN_STRNCMP_EQ,
10882 "__builtin_strncmp_eq",
10883 ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10884
10885 local_define_builtin ("__builtin_strcmp_eq", ftype, BUILT_IN_STRCMP_EQ,
10886 "__builtin_strcmp_eq",
10887 ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10888
10889 /* If there's a possibility that we might use the ARM EABI, build the
10890 alternate __cxa_end_cleanup node used to resume from C++. */
10891 if (targetm.arm_eabi_unwinder)
10892 {
10893 ftype = build_function_type_list (void_type_node, NULL_TREE);
10894 local_define_builtin ("__builtin_cxa_end_cleanup", ftype,
10895 BUILT_IN_CXA_END_CLEANUP,
10896 "__cxa_end_cleanup", ECF_NORETURN | ECF_LEAF);
10897 }
10898
10899 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10900 local_define_builtin ("__builtin_unwind_resume", ftype,
10901 BUILT_IN_UNWIND_RESUME,
10902 ((targetm_common.except_unwind_info (&global_options)
10903 == UI_SJLJ)
10904 ? "_Unwind_SjLj_Resume" : "_Unwind_Resume"),
10905 ECF_NORETURN);
10906
10907 if (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS) == NULL_TREE)
10908 {
10909 ftype = build_function_type_list (ptr_type_node, integer_type_node,
10910 NULL_TREE);
10911 local_define_builtin ("__builtin_return_address", ftype,
10912 BUILT_IN_RETURN_ADDRESS,
10913 "__builtin_return_address",
10914 ECF_NOTHROW);
10915 }
10916
10917 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER)
10918 || !builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10919 {
10920 ftype = build_function_type_list (void_type_node, ptr_type_node,
10921 ptr_type_node, NULL_TREE);
10922 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER))
10923 local_define_builtin ("__cyg_profile_func_enter", ftype,
10924 BUILT_IN_PROFILE_FUNC_ENTER,
10925 "__cyg_profile_func_enter", 0);
10926 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10927 local_define_builtin ("__cyg_profile_func_exit", ftype,
10928 BUILT_IN_PROFILE_FUNC_EXIT,
10929 "__cyg_profile_func_exit", 0);
10930 }
10931
10932 /* The exception object and filter values from the runtime. The argument
10933 must be zero before exception lowering, i.e. from the front end. After
10934 exception lowering, it will be the region number for the exception
10935 landing pad. These functions are PURE instead of CONST to prevent
10936 them from being hoisted past the exception edge that will initialize
10937 its value in the landing pad. */
10938 ftype = build_function_type_list (ptr_type_node,
10939 integer_type_node, NULL_TREE);
10940 ecf_flags = ECF_PURE | ECF_NOTHROW | ECF_LEAF;
10941 /* Only use TM_PURE if we have TM language support. */
10942 if (builtin_decl_explicit_p (BUILT_IN_TM_LOAD_1))
10943 ecf_flags |= ECF_TM_PURE;
10944 local_define_builtin ("__builtin_eh_pointer", ftype, BUILT_IN_EH_POINTER,
10945 "__builtin_eh_pointer", ecf_flags);
10946
10947 tmp = lang_hooks.types.type_for_mode (targetm.eh_return_filter_mode (), 0);
10948 ftype = build_function_type_list (tmp, integer_type_node, NULL_TREE);
10949 local_define_builtin ("__builtin_eh_filter", ftype, BUILT_IN_EH_FILTER,
10950 "__builtin_eh_filter", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10951
10952 ftype = build_function_type_list (void_type_node,
10953 integer_type_node, integer_type_node,
10954 NULL_TREE);
10955 local_define_builtin ("__builtin_eh_copy_values", ftype,
10956 BUILT_IN_EH_COPY_VALUES,
10957 "__builtin_eh_copy_values", ECF_NOTHROW);
10958
10959 /* Complex multiplication and division. These are handled as builtins
10960 rather than optabs because emit_library_call_value doesn't support
10961 complex. Further, we can do slightly better with folding these
10962 beasties if the real and complex parts of the arguments are separate. */
10963 {
10964 int mode;
10965
10966 for (mode = MIN_MODE_COMPLEX_FLOAT; mode <= MAX_MODE_COMPLEX_FLOAT; ++mode)
10967 {
10968 char mode_name_buf[4], *q;
10969 const char *p;
10970 enum built_in_function mcode, dcode;
10971 tree type, inner_type;
10972 const char *prefix = "__";
10973
10974 if (targetm.libfunc_gnu_prefix)
10975 prefix = "__gnu_";
10976
10977 type = lang_hooks.types.type_for_mode ((machine_mode) mode, 0);
10978 if (type == NULL)
10979 continue;
10980 inner_type = TREE_TYPE (type);
10981
10982 ftype = build_function_type_list (type, inner_type, inner_type,
10983 inner_type, inner_type, NULL_TREE);
10984
10985 mcode = ((enum built_in_function)
10986 (BUILT_IN_COMPLEX_MUL_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10987 dcode = ((enum built_in_function)
10988 (BUILT_IN_COMPLEX_DIV_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10989
10990 for (p = GET_MODE_NAME (mode), q = mode_name_buf; *p; p++, q++)
10991 *q = TOLOWER (*p);
10992 *q = '\0';
10993
10994 /* For -ftrapping-math these should throw from a former
10995 -fnon-call-exception stmt. */
10996 built_in_names[mcode] = concat (prefix, "mul", mode_name_buf, "3",
10997 NULL);
10998 local_define_builtin (built_in_names[mcode], ftype, mcode,
10999 built_in_names[mcode],
11000 ECF_CONST | ECF_LEAF);
11001
11002 built_in_names[dcode] = concat (prefix, "div", mode_name_buf, "3",
11003 NULL);
11004 local_define_builtin (built_in_names[dcode], ftype, dcode,
11005 built_in_names[dcode],
11006 ECF_CONST | ECF_LEAF);
11007 }
11008 }
11009
11010 init_internal_fns ();
11011 }
11012
11013 /* HACK. GROSS. This is absolutely disgusting. I wish there was a
11014 better way.
11015
11016 If we requested a pointer to a vector, build up the pointers that
11017 we stripped off while looking for the inner type. Similarly for
11018 return values from functions.
11019
11020 The argument TYPE is the top of the chain, and BOTTOM is the
11021 new type which we will point to. */
11022
11023 tree
11024 reconstruct_complex_type (tree type, tree bottom)
11025 {
11026 tree inner, outer;
11027
11028 if (TREE_CODE (type) == POINTER_TYPE)
11029 {
11030 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
11031 outer = build_pointer_type_for_mode (inner, TYPE_MODE (type),
11032 TYPE_REF_CAN_ALIAS_ALL (type));
11033 }
11034 else if (TREE_CODE (type) == REFERENCE_TYPE)
11035 {
11036 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
11037 outer = build_reference_type_for_mode (inner, TYPE_MODE (type),
11038 TYPE_REF_CAN_ALIAS_ALL (type));
11039 }
11040 else if (TREE_CODE (type) == ARRAY_TYPE)
11041 {
11042 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
11043 outer = build_array_type (inner, TYPE_DOMAIN (type));
11044 }
11045 else if (TREE_CODE (type) == FUNCTION_TYPE)
11046 {
11047 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
11048 outer = build_function_type (inner, TYPE_ARG_TYPES (type));
11049 }
11050 else if (TREE_CODE (type) == METHOD_TYPE)
11051 {
11052 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
11053 /* The build_method_type_directly() routine prepends 'this' to argument list,
11054 so we must compensate by getting rid of it. */
11055 outer
11056 = build_method_type_directly
11057 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (type))),
11058 inner,
11059 TREE_CHAIN (TYPE_ARG_TYPES (type)));
11060 }
11061 else if (TREE_CODE (type) == OFFSET_TYPE)
11062 {
11063 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
11064 outer = build_offset_type (TYPE_OFFSET_BASETYPE (type), inner);
11065 }
11066 else
11067 return bottom;
11068
11069 return build_type_attribute_qual_variant (outer, TYPE_ATTRIBUTES (type),
11070 TYPE_QUALS (type));
11071 }
11072
11073 /* Returns a vector tree node given a mode (integer, vector, or BLKmode) and
11074 the inner type. */
11075 tree
11076 build_vector_type_for_mode (tree innertype, machine_mode mode)
11077 {
11078 poly_int64 nunits;
11079 unsigned int bitsize;
11080
11081 switch (GET_MODE_CLASS (mode))
11082 {
11083 case MODE_VECTOR_BOOL:
11084 case MODE_VECTOR_INT:
11085 case MODE_VECTOR_FLOAT:
11086 case MODE_VECTOR_FRACT:
11087 case MODE_VECTOR_UFRACT:
11088 case MODE_VECTOR_ACCUM:
11089 case MODE_VECTOR_UACCUM:
11090 nunits = GET_MODE_NUNITS (mode);
11091 break;
11092
11093 case MODE_INT:
11094 /* Check that there are no leftover bits. */
11095 bitsize = GET_MODE_BITSIZE (as_a <scalar_int_mode> (mode));
11096 gcc_assert (bitsize % TREE_INT_CST_LOW (TYPE_SIZE (innertype)) == 0);
11097 nunits = bitsize / TREE_INT_CST_LOW (TYPE_SIZE (innertype));
11098 break;
11099
11100 default:
11101 gcc_unreachable ();
11102 }
11103
11104 return make_vector_type (innertype, nunits, mode);
11105 }
11106
11107 /* Similarly, but takes the inner type and number of units, which must be
11108 a power of two. */
11109
11110 tree
11111 build_vector_type (tree innertype, poly_int64 nunits)
11112 {
11113 return make_vector_type (innertype, nunits, VOIDmode);
11114 }
11115
11116 /* Build truth vector with specified length and number of units. */
11117
11118 tree
11119 build_truth_vector_type (poly_uint64 nunits, poly_uint64 vector_size)
11120 {
11121 machine_mode mask_mode
11122 = targetm.vectorize.get_mask_mode (nunits, vector_size).else_blk ();
11123
11124 poly_uint64 vsize;
11125 if (mask_mode == BLKmode)
11126 vsize = vector_size * BITS_PER_UNIT;
11127 else
11128 vsize = GET_MODE_BITSIZE (mask_mode);
11129
11130 unsigned HOST_WIDE_INT esize = vector_element_size (vsize, nunits);
11131
11132 tree bool_type = build_nonstandard_boolean_type (esize);
11133
11134 return make_vector_type (bool_type, nunits, mask_mode);
11135 }
11136
11137 /* Returns a vector type corresponding to a comparison of VECTYPE. */
11138
11139 tree
11140 build_same_sized_truth_vector_type (tree vectype)
11141 {
11142 if (VECTOR_BOOLEAN_TYPE_P (vectype))
11143 return vectype;
11144
11145 poly_uint64 size = GET_MODE_SIZE (TYPE_MODE (vectype));
11146
11147 if (known_eq (size, 0U))
11148 size = tree_to_uhwi (TYPE_SIZE_UNIT (vectype));
11149
11150 return build_truth_vector_type (TYPE_VECTOR_SUBPARTS (vectype), size);
11151 }
11152
11153 /* Similarly, but builds a variant type with TYPE_VECTOR_OPAQUE set. */
11154
11155 tree
11156 build_opaque_vector_type (tree innertype, poly_int64 nunits)
11157 {
11158 tree t = make_vector_type (innertype, nunits, VOIDmode);
11159 tree cand;
11160 /* We always build the non-opaque variant before the opaque one,
11161 so if it already exists, it is TYPE_NEXT_VARIANT of this one. */
11162 cand = TYPE_NEXT_VARIANT (t);
11163 if (cand
11164 && TYPE_VECTOR_OPAQUE (cand)
11165 && check_qualified_type (cand, t, TYPE_QUALS (t)))
11166 return cand;
11167 /* Othewise build a variant type and make sure to queue it after
11168 the non-opaque type. */
11169 cand = build_distinct_type_copy (t);
11170 TYPE_VECTOR_OPAQUE (cand) = true;
11171 TYPE_CANONICAL (cand) = TYPE_CANONICAL (t);
11172 TYPE_NEXT_VARIANT (cand) = TYPE_NEXT_VARIANT (t);
11173 TYPE_NEXT_VARIANT (t) = cand;
11174 TYPE_MAIN_VARIANT (cand) = TYPE_MAIN_VARIANT (t);
11175 return cand;
11176 }
11177
11178 /* Return the value of element I of VECTOR_CST T as a wide_int. */
11179
11180 wide_int
11181 vector_cst_int_elt (const_tree t, unsigned int i)
11182 {
11183 /* First handle elements that are directly encoded. */
11184 unsigned int encoded_nelts = vector_cst_encoded_nelts (t);
11185 if (i < encoded_nelts)
11186 return wi::to_wide (VECTOR_CST_ENCODED_ELT (t, i));
11187
11188 /* Identify the pattern that contains element I and work out the index of
11189 the last encoded element for that pattern. */
11190 unsigned int npatterns = VECTOR_CST_NPATTERNS (t);
11191 unsigned int pattern = i % npatterns;
11192 unsigned int count = i / npatterns;
11193 unsigned int final_i = encoded_nelts - npatterns + pattern;
11194
11195 /* If there are no steps, the final encoded value is the right one. */
11196 if (!VECTOR_CST_STEPPED_P (t))
11197 return wi::to_wide (VECTOR_CST_ENCODED_ELT (t, final_i));
11198
11199 /* Otherwise work out the value from the last two encoded elements. */
11200 tree v1 = VECTOR_CST_ENCODED_ELT (t, final_i - npatterns);
11201 tree v2 = VECTOR_CST_ENCODED_ELT (t, final_i);
11202 wide_int diff = wi::to_wide (v2) - wi::to_wide (v1);
11203 return wi::to_wide (v2) + (count - 2) * diff;
11204 }
11205
11206 /* Return the value of element I of VECTOR_CST T. */
11207
11208 tree
11209 vector_cst_elt (const_tree t, unsigned int i)
11210 {
11211 /* First handle elements that are directly encoded. */
11212 unsigned int encoded_nelts = vector_cst_encoded_nelts (t);
11213 if (i < encoded_nelts)
11214 return VECTOR_CST_ENCODED_ELT (t, i);
11215
11216 /* If there are no steps, the final encoded value is the right one. */
11217 if (!VECTOR_CST_STEPPED_P (t))
11218 {
11219 /* Identify the pattern that contains element I and work out the index of
11220 the last encoded element for that pattern. */
11221 unsigned int npatterns = VECTOR_CST_NPATTERNS (t);
11222 unsigned int pattern = i % npatterns;
11223 unsigned int final_i = encoded_nelts - npatterns + pattern;
11224 return VECTOR_CST_ENCODED_ELT (t, final_i);
11225 }
11226
11227 /* Otherwise work out the value from the last two encoded elements. */
11228 return wide_int_to_tree (TREE_TYPE (TREE_TYPE (t)),
11229 vector_cst_int_elt (t, i));
11230 }
11231
11232 /* Given an initializer INIT, return TRUE if INIT is zero or some
11233 aggregate of zeros. Otherwise return FALSE. If NONZERO is not
11234 null, set *NONZERO if and only if INIT is known not to be all
11235 zeros. The combination of return value of false and *NONZERO
11236 false implies that INIT may but need not be all zeros. Other
11237 combinations indicate definitive answers. */
11238
11239 bool
11240 initializer_zerop (const_tree init, bool *nonzero /* = NULL */)
11241 {
11242 bool dummy;
11243 if (!nonzero)
11244 nonzero = &dummy;
11245
11246 /* Conservatively clear NONZERO and set it only if INIT is definitely
11247 not all zero. */
11248 *nonzero = false;
11249
11250 STRIP_NOPS (init);
11251
11252 unsigned HOST_WIDE_INT off = 0;
11253
11254 switch (TREE_CODE (init))
11255 {
11256 case INTEGER_CST:
11257 if (integer_zerop (init))
11258 return true;
11259
11260 *nonzero = true;
11261 return false;
11262
11263 case REAL_CST:
11264 /* ??? Note that this is not correct for C4X float formats. There,
11265 a bit pattern of all zeros is 1.0; 0.0 is encoded with the most
11266 negative exponent. */
11267 if (real_zerop (init)
11268 && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (init)))
11269 return true;
11270
11271 *nonzero = true;
11272 return false;
11273
11274 case FIXED_CST:
11275 if (fixed_zerop (init))
11276 return true;
11277
11278 *nonzero = true;
11279 return false;
11280
11281 case COMPLEX_CST:
11282 if (integer_zerop (init)
11283 || (real_zerop (init)
11284 && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_REALPART (init)))
11285 && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_IMAGPART (init)))))
11286 return true;
11287
11288 *nonzero = true;
11289 return false;
11290
11291 case VECTOR_CST:
11292 if (VECTOR_CST_NPATTERNS (init) == 1
11293 && VECTOR_CST_DUPLICATE_P (init)
11294 && initializer_zerop (VECTOR_CST_ENCODED_ELT (init, 0)))
11295 return true;
11296
11297 *nonzero = true;
11298 return false;
11299
11300 case CONSTRUCTOR:
11301 {
11302 if (TREE_CLOBBER_P (init))
11303 return false;
11304
11305 unsigned HOST_WIDE_INT idx;
11306 tree elt;
11307
11308 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init), idx, elt)
11309 if (!initializer_zerop (elt, nonzero))
11310 return false;
11311
11312 return true;
11313 }
11314
11315 case MEM_REF:
11316 {
11317 tree arg = TREE_OPERAND (init, 0);
11318 if (TREE_CODE (arg) != ADDR_EXPR)
11319 return false;
11320 tree offset = TREE_OPERAND (init, 1);
11321 if (TREE_CODE (offset) != INTEGER_CST
11322 || !tree_fits_uhwi_p (offset))
11323 return false;
11324 off = tree_to_uhwi (offset);
11325 if (INT_MAX < off)
11326 return false;
11327 arg = TREE_OPERAND (arg, 0);
11328 if (TREE_CODE (arg) != STRING_CST)
11329 return false;
11330 init = arg;
11331 }
11332 /* Fall through. */
11333
11334 case STRING_CST:
11335 {
11336 gcc_assert (off <= INT_MAX);
11337
11338 int i = off;
11339 int n = TREE_STRING_LENGTH (init);
11340 if (n <= i)
11341 return false;
11342
11343 /* We need to loop through all elements to handle cases like
11344 "\0" and "\0foobar". */
11345 for (i = 0; i < n; ++i)
11346 if (TREE_STRING_POINTER (init)[i] != '\0')
11347 {
11348 *nonzero = true;
11349 return false;
11350 }
11351
11352 return true;
11353 }
11354
11355 default:
11356 return false;
11357 }
11358 }
11359
11360 /* Return true if EXPR is an initializer expression in which every element
11361 is a constant that is numerically equal to 0 or 1. The elements do not
11362 need to be equal to each other. */
11363
11364 bool
11365 initializer_each_zero_or_onep (const_tree expr)
11366 {
11367 STRIP_ANY_LOCATION_WRAPPER (expr);
11368
11369 switch (TREE_CODE (expr))
11370 {
11371 case INTEGER_CST:
11372 return integer_zerop (expr) || integer_onep (expr);
11373
11374 case REAL_CST:
11375 return real_zerop (expr) || real_onep (expr);
11376
11377 case VECTOR_CST:
11378 {
11379 unsigned HOST_WIDE_INT nelts = vector_cst_encoded_nelts (expr);
11380 if (VECTOR_CST_STEPPED_P (expr)
11381 && !TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr)).is_constant (&nelts))
11382 return false;
11383
11384 for (unsigned int i = 0; i < nelts; ++i)
11385 {
11386 tree elt = vector_cst_elt (expr, i);
11387 if (!initializer_each_zero_or_onep (elt))
11388 return false;
11389 }
11390
11391 return true;
11392 }
11393
11394 default:
11395 return false;
11396 }
11397 }
11398
11399 /* Given an initializer INIT for a TYPE, return true if INIT is zero
11400 so that it can be replaced by value initialization. This function
11401 distinguishes betwen empty strings as initializers for arrays and
11402 for pointers (which make it return false). */
11403
11404 bool
11405 type_initializer_zero_p (tree type, tree init)
11406 {
11407 if (type == error_mark_node || init == error_mark_node)
11408 return false;
11409
11410 STRIP_NOPS (init);
11411
11412 if (POINTER_TYPE_P (type))
11413 return TREE_CODE (init) != STRING_CST && initializer_zerop (init);
11414
11415 if (TREE_CODE (init) != CONSTRUCTOR)
11416 return initializer_zerop (init);
11417
11418 if (TREE_CODE (type) == ARRAY_TYPE)
11419 {
11420 tree elt_type = TREE_TYPE (type);
11421 elt_type = TYPE_MAIN_VARIANT (elt_type);
11422 if (elt_type == char_type_node)
11423 return initializer_zerop (init);
11424
11425 tree elt_init;
11426 unsigned HOST_WIDE_INT i;
11427 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init), i, elt_init)
11428 if (!type_initializer_zero_p (elt_type, elt_init))
11429 return false;
11430 return true;
11431 }
11432
11433 if (TREE_CODE (type) != RECORD_TYPE)
11434 return initializer_zerop (init);
11435
11436 tree fld = TYPE_FIELDS (type);
11437
11438 tree fld_init;
11439 unsigned HOST_WIDE_INT i;
11440 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init), i, fld_init)
11441 {
11442 /* Advance to the next member, skipping over everything that
11443 canot be initialized (including unnamed bit-fields). */
11444 while (TREE_CODE (fld) != FIELD_DECL
11445 || DECL_ARTIFICIAL (fld)
11446 || (DECL_BIT_FIELD (fld) && !DECL_NAME (fld)))
11447 {
11448 fld = DECL_CHAIN (fld);
11449 if (!fld)
11450 return true;
11451 continue;
11452 }
11453
11454 tree fldtype = TREE_TYPE (fld);
11455 if (!type_initializer_zero_p (fldtype, fld_init))
11456 return false;
11457
11458 fld = DECL_CHAIN (fld);
11459 if (!fld)
11460 break;
11461 }
11462
11463 return true;
11464 }
11465
11466 /* Check if vector VEC consists of all the equal elements and
11467 that the number of elements corresponds to the type of VEC.
11468 The function returns first element of the vector
11469 or NULL_TREE if the vector is not uniform. */
11470 tree
11471 uniform_vector_p (const_tree vec)
11472 {
11473 tree first, t;
11474 unsigned HOST_WIDE_INT i, nelts;
11475
11476 if (vec == NULL_TREE)
11477 return NULL_TREE;
11478
11479 gcc_assert (VECTOR_TYPE_P (TREE_TYPE (vec)));
11480
11481 if (TREE_CODE (vec) == VEC_DUPLICATE_EXPR)
11482 return TREE_OPERAND (vec, 0);
11483
11484 else if (TREE_CODE (vec) == VECTOR_CST)
11485 {
11486 if (VECTOR_CST_NPATTERNS (vec) == 1 && VECTOR_CST_DUPLICATE_P (vec))
11487 return VECTOR_CST_ENCODED_ELT (vec, 0);
11488 return NULL_TREE;
11489 }
11490
11491 else if (TREE_CODE (vec) == CONSTRUCTOR
11492 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (vec)).is_constant (&nelts))
11493 {
11494 first = error_mark_node;
11495
11496 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (vec), i, t)
11497 {
11498 if (i == 0)
11499 {
11500 first = t;
11501 continue;
11502 }
11503 if (!operand_equal_p (first, t, 0))
11504 return NULL_TREE;
11505 }
11506 if (i != nelts)
11507 return NULL_TREE;
11508
11509 return first;
11510 }
11511
11512 return NULL_TREE;
11513 }
11514
11515 /* If the argument is INTEGER_CST, return it. If the argument is vector
11516 with all elements the same INTEGER_CST, return that INTEGER_CST. Otherwise
11517 return NULL_TREE.
11518 Look through location wrappers. */
11519
11520 tree
11521 uniform_integer_cst_p (tree t)
11522 {
11523 STRIP_ANY_LOCATION_WRAPPER (t);
11524
11525 if (TREE_CODE (t) == INTEGER_CST)
11526 return t;
11527
11528 if (VECTOR_TYPE_P (TREE_TYPE (t)))
11529 {
11530 t = uniform_vector_p (t);
11531 if (t && TREE_CODE (t) == INTEGER_CST)
11532 return t;
11533 }
11534
11535 return NULL_TREE;
11536 }
11537
11538 /* If VECTOR_CST T has a single nonzero element, return the index of that
11539 element, otherwise return -1. */
11540
11541 int
11542 single_nonzero_element (const_tree t)
11543 {
11544 unsigned HOST_WIDE_INT nelts;
11545 unsigned int repeat_nelts;
11546 if (VECTOR_CST_NELTS (t).is_constant (&nelts))
11547 repeat_nelts = nelts;
11548 else if (VECTOR_CST_NELTS_PER_PATTERN (t) == 2)
11549 {
11550 nelts = vector_cst_encoded_nelts (t);
11551 repeat_nelts = VECTOR_CST_NPATTERNS (t);
11552 }
11553 else
11554 return -1;
11555
11556 int res = -1;
11557 for (unsigned int i = 0; i < nelts; ++i)
11558 {
11559 tree elt = vector_cst_elt (t, i);
11560 if (!integer_zerop (elt) && !real_zerop (elt))
11561 {
11562 if (res >= 0 || i >= repeat_nelts)
11563 return -1;
11564 res = i;
11565 }
11566 }
11567 return res;
11568 }
11569
11570 /* Build an empty statement at location LOC. */
11571
11572 tree
11573 build_empty_stmt (location_t loc)
11574 {
11575 tree t = build1 (NOP_EXPR, void_type_node, size_zero_node);
11576 SET_EXPR_LOCATION (t, loc);
11577 return t;
11578 }
11579
11580
11581 /* Build an OpenMP clause with code CODE. LOC is the location of the
11582 clause. */
11583
11584 tree
11585 build_omp_clause (location_t loc, enum omp_clause_code code)
11586 {
11587 tree t;
11588 int size, length;
11589
11590 length = omp_clause_num_ops[code];
11591 size = (sizeof (struct tree_omp_clause) + (length - 1) * sizeof (tree));
11592
11593 record_node_allocation_statistics (OMP_CLAUSE, size);
11594
11595 t = (tree) ggc_internal_alloc (size);
11596 memset (t, 0, size);
11597 TREE_SET_CODE (t, OMP_CLAUSE);
11598 OMP_CLAUSE_SET_CODE (t, code);
11599 OMP_CLAUSE_LOCATION (t) = loc;
11600
11601 return t;
11602 }
11603
11604 /* Build a tcc_vl_exp object with code CODE and room for LEN operands. LEN
11605 includes the implicit operand count in TREE_OPERAND 0, and so must be >= 1.
11606 Except for the CODE and operand count field, other storage for the
11607 object is initialized to zeros. */
11608
11609 tree
11610 build_vl_exp (enum tree_code code, int len MEM_STAT_DECL)
11611 {
11612 tree t;
11613 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_exp);
11614
11615 gcc_assert (TREE_CODE_CLASS (code) == tcc_vl_exp);
11616 gcc_assert (len >= 1);
11617
11618 record_node_allocation_statistics (code, length);
11619
11620 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
11621
11622 TREE_SET_CODE (t, code);
11623
11624 /* Can't use TREE_OPERAND to store the length because if checking is
11625 enabled, it will try to check the length before we store it. :-P */
11626 t->exp.operands[0] = build_int_cst (sizetype, len);
11627
11628 return t;
11629 }
11630
11631 /* Helper function for build_call_* functions; build a CALL_EXPR with
11632 indicated RETURN_TYPE, FN, and NARGS, but do not initialize any of
11633 the argument slots. */
11634
11635 static tree
11636 build_call_1 (tree return_type, tree fn, int nargs)
11637 {
11638 tree t;
11639
11640 t = build_vl_exp (CALL_EXPR, nargs + 3);
11641 TREE_TYPE (t) = return_type;
11642 CALL_EXPR_FN (t) = fn;
11643 CALL_EXPR_STATIC_CHAIN (t) = NULL;
11644
11645 return t;
11646 }
11647
11648 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
11649 FN and a null static chain slot. NARGS is the number of call arguments
11650 which are specified as "..." arguments. */
11651
11652 tree
11653 build_call_nary (tree return_type, tree fn, int nargs, ...)
11654 {
11655 tree ret;
11656 va_list args;
11657 va_start (args, nargs);
11658 ret = build_call_valist (return_type, fn, nargs, args);
11659 va_end (args);
11660 return ret;
11661 }
11662
11663 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
11664 FN and a null static chain slot. NARGS is the number of call arguments
11665 which are specified as a va_list ARGS. */
11666
11667 tree
11668 build_call_valist (tree return_type, tree fn, int nargs, va_list args)
11669 {
11670 tree t;
11671 int i;
11672
11673 t = build_call_1 (return_type, fn, nargs);
11674 for (i = 0; i < nargs; i++)
11675 CALL_EXPR_ARG (t, i) = va_arg (args, tree);
11676 process_call_operands (t);
11677 return t;
11678 }
11679
11680 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
11681 FN and a null static chain slot. NARGS is the number of call arguments
11682 which are specified as a tree array ARGS. */
11683
11684 tree
11685 build_call_array_loc (location_t loc, tree return_type, tree fn,
11686 int nargs, const tree *args)
11687 {
11688 tree t;
11689 int i;
11690
11691 t = build_call_1 (return_type, fn, nargs);
11692 for (i = 0; i < nargs; i++)
11693 CALL_EXPR_ARG (t, i) = args[i];
11694 process_call_operands (t);
11695 SET_EXPR_LOCATION (t, loc);
11696 return t;
11697 }
11698
11699 /* Like build_call_array, but takes a vec. */
11700
11701 tree
11702 build_call_vec (tree return_type, tree fn, vec<tree, va_gc> *args)
11703 {
11704 tree ret, t;
11705 unsigned int ix;
11706
11707 ret = build_call_1 (return_type, fn, vec_safe_length (args));
11708 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
11709 CALL_EXPR_ARG (ret, ix) = t;
11710 process_call_operands (ret);
11711 return ret;
11712 }
11713
11714 /* Conveniently construct a function call expression. FNDECL names the
11715 function to be called and N arguments are passed in the array
11716 ARGARRAY. */
11717
11718 tree
11719 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
11720 {
11721 tree fntype = TREE_TYPE (fndecl);
11722 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
11723
11724 return fold_build_call_array_loc (loc, TREE_TYPE (fntype), fn, n, argarray);
11725 }
11726
11727 /* Conveniently construct a function call expression. FNDECL names the
11728 function to be called and the arguments are passed in the vector
11729 VEC. */
11730
11731 tree
11732 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec)
11733 {
11734 return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec),
11735 vec_safe_address (vec));
11736 }
11737
11738
11739 /* Conveniently construct a function call expression. FNDECL names the
11740 function to be called, N is the number of arguments, and the "..."
11741 parameters are the argument expressions. */
11742
11743 tree
11744 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
11745 {
11746 va_list ap;
11747 tree *argarray = XALLOCAVEC (tree, n);
11748 int i;
11749
11750 va_start (ap, n);
11751 for (i = 0; i < n; i++)
11752 argarray[i] = va_arg (ap, tree);
11753 va_end (ap);
11754 return build_call_expr_loc_array (loc, fndecl, n, argarray);
11755 }
11756
11757 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
11758 varargs macros aren't supported by all bootstrap compilers. */
11759
11760 tree
11761 build_call_expr (tree fndecl, int n, ...)
11762 {
11763 va_list ap;
11764 tree *argarray = XALLOCAVEC (tree, n);
11765 int i;
11766
11767 va_start (ap, n);
11768 for (i = 0; i < n; i++)
11769 argarray[i] = va_arg (ap, tree);
11770 va_end (ap);
11771 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
11772 }
11773
11774 /* Build an internal call to IFN, with arguments ARGS[0:N-1] and with return
11775 type TYPE. This is just like CALL_EXPR, except its CALL_EXPR_FN is NULL.
11776 It will get gimplified later into an ordinary internal function. */
11777
11778 tree
11779 build_call_expr_internal_loc_array (location_t loc, internal_fn ifn,
11780 tree type, int n, const tree *args)
11781 {
11782 tree t = build_call_1 (type, NULL_TREE, n);
11783 for (int i = 0; i < n; ++i)
11784 CALL_EXPR_ARG (t, i) = args[i];
11785 SET_EXPR_LOCATION (t, loc);
11786 CALL_EXPR_IFN (t) = ifn;
11787 return t;
11788 }
11789
11790 /* Build internal call expression. This is just like CALL_EXPR, except
11791 its CALL_EXPR_FN is NULL. It will get gimplified later into ordinary
11792 internal function. */
11793
11794 tree
11795 build_call_expr_internal_loc (location_t loc, enum internal_fn ifn,
11796 tree type, int n, ...)
11797 {
11798 va_list ap;
11799 tree *argarray = XALLOCAVEC (tree, n);
11800 int i;
11801
11802 va_start (ap, n);
11803 for (i = 0; i < n; i++)
11804 argarray[i] = va_arg (ap, tree);
11805 va_end (ap);
11806 return build_call_expr_internal_loc_array (loc, ifn, type, n, argarray);
11807 }
11808
11809 /* Return a function call to FN, if the target is guaranteed to support it,
11810 or null otherwise.
11811
11812 N is the number of arguments, passed in the "...", and TYPE is the
11813 type of the return value. */
11814
11815 tree
11816 maybe_build_call_expr_loc (location_t loc, combined_fn fn, tree type,
11817 int n, ...)
11818 {
11819 va_list ap;
11820 tree *argarray = XALLOCAVEC (tree, n);
11821 int i;
11822
11823 va_start (ap, n);
11824 for (i = 0; i < n; i++)
11825 argarray[i] = va_arg (ap, tree);
11826 va_end (ap);
11827 if (internal_fn_p (fn))
11828 {
11829 internal_fn ifn = as_internal_fn (fn);
11830 if (direct_internal_fn_p (ifn))
11831 {
11832 tree_pair types = direct_internal_fn_types (ifn, type, argarray);
11833 if (!direct_internal_fn_supported_p (ifn, types,
11834 OPTIMIZE_FOR_BOTH))
11835 return NULL_TREE;
11836 }
11837 return build_call_expr_internal_loc_array (loc, ifn, type, n, argarray);
11838 }
11839 else
11840 {
11841 tree fndecl = builtin_decl_implicit (as_builtin_fn (fn));
11842 if (!fndecl)
11843 return NULL_TREE;
11844 return build_call_expr_loc_array (loc, fndecl, n, argarray);
11845 }
11846 }
11847
11848 /* Return a function call to the appropriate builtin alloca variant.
11849
11850 SIZE is the size to be allocated. ALIGN, if non-zero, is the requested
11851 alignment of the allocated area. MAX_SIZE, if non-negative, is an upper
11852 bound for SIZE in case it is not a fixed value. */
11853
11854 tree
11855 build_alloca_call_expr (tree size, unsigned int align, HOST_WIDE_INT max_size)
11856 {
11857 if (max_size >= 0)
11858 {
11859 tree t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX);
11860 return
11861 build_call_expr (t, 3, size, size_int (align), size_int (max_size));
11862 }
11863 else if (align > 0)
11864 {
11865 tree t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
11866 return build_call_expr (t, 2, size, size_int (align));
11867 }
11868 else
11869 {
11870 tree t = builtin_decl_explicit (BUILT_IN_ALLOCA);
11871 return build_call_expr (t, 1, size);
11872 }
11873 }
11874
11875 /* Create a new constant string literal consisting of elements of type
11876 ELTYPE and return a tree node representing char* pointer to it as
11877 an ADDR_EXPR (ARRAY_REF (ELTYPE, ...)). The STRING_CST value is
11878 the LEN bytes at STR (the representation of the string, which may
11879 be wide). */
11880
11881 tree
11882 build_string_literal (int len, const char *str,
11883 tree eltype /* = char_type_node */)
11884 {
11885 tree t = build_string (len, str);
11886 tree index = build_index_type (size_int (len - 1));
11887 eltype = build_type_variant (eltype, 1, 0);
11888 tree type = build_array_type (eltype, index);
11889 TREE_TYPE (t) = type;
11890 TREE_CONSTANT (t) = 1;
11891 TREE_READONLY (t) = 1;
11892 TREE_STATIC (t) = 1;
11893
11894 type = build_pointer_type (eltype);
11895 t = build1 (ADDR_EXPR, type,
11896 build4 (ARRAY_REF, eltype,
11897 t, integer_zero_node, NULL_TREE, NULL_TREE));
11898 return t;
11899 }
11900
11901
11902
11903 /* Return true if T (assumed to be a DECL) must be assigned a memory
11904 location. */
11905
11906 bool
11907 needs_to_live_in_memory (const_tree t)
11908 {
11909 return (TREE_ADDRESSABLE (t)
11910 || is_global_var (t)
11911 || (TREE_CODE (t) == RESULT_DECL
11912 && !DECL_BY_REFERENCE (t)
11913 && aggregate_value_p (t, current_function_decl)));
11914 }
11915
11916 /* Return value of a constant X and sign-extend it. */
11917
11918 HOST_WIDE_INT
11919 int_cst_value (const_tree x)
11920 {
11921 unsigned bits = TYPE_PRECISION (TREE_TYPE (x));
11922 unsigned HOST_WIDE_INT val = TREE_INT_CST_LOW (x);
11923
11924 /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
11925 gcc_assert (cst_and_fits_in_hwi (x));
11926
11927 if (bits < HOST_BITS_PER_WIDE_INT)
11928 {
11929 bool negative = ((val >> (bits - 1)) & 1) != 0;
11930 if (negative)
11931 val |= HOST_WIDE_INT_M1U << (bits - 1) << 1;
11932 else
11933 val &= ~(HOST_WIDE_INT_M1U << (bits - 1) << 1);
11934 }
11935
11936 return val;
11937 }
11938
11939 /* If TYPE is an integral or pointer type, return an integer type with
11940 the same precision which is unsigned iff UNSIGNEDP is true, or itself
11941 if TYPE is already an integer type of signedness UNSIGNEDP.
11942 If TYPE is a floating-point type, return an integer type with the same
11943 bitsize and with the signedness given by UNSIGNEDP; this is useful
11944 when doing bit-level operations on a floating-point value. */
11945
11946 tree
11947 signed_or_unsigned_type_for (int unsignedp, tree type)
11948 {
11949 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_UNSIGNED (type) == unsignedp)
11950 return type;
11951
11952 if (TREE_CODE (type) == VECTOR_TYPE)
11953 {
11954 tree inner = TREE_TYPE (type);
11955 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
11956 if (!inner2)
11957 return NULL_TREE;
11958 if (inner == inner2)
11959 return type;
11960 return build_vector_type (inner2, TYPE_VECTOR_SUBPARTS (type));
11961 }
11962
11963 if (TREE_CODE (type) == COMPLEX_TYPE)
11964 {
11965 tree inner = TREE_TYPE (type);
11966 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
11967 if (!inner2)
11968 return NULL_TREE;
11969 if (inner == inner2)
11970 return type;
11971 return build_complex_type (inner2);
11972 }
11973
11974 unsigned int bits;
11975 if (INTEGRAL_TYPE_P (type)
11976 || POINTER_TYPE_P (type)
11977 || TREE_CODE (type) == OFFSET_TYPE)
11978 bits = TYPE_PRECISION (type);
11979 else if (TREE_CODE (type) == REAL_TYPE)
11980 bits = GET_MODE_BITSIZE (SCALAR_TYPE_MODE (type));
11981 else
11982 return NULL_TREE;
11983
11984 return build_nonstandard_integer_type (bits, unsignedp);
11985 }
11986
11987 /* If TYPE is an integral or pointer type, return an integer type with
11988 the same precision which is unsigned, or itself if TYPE is already an
11989 unsigned integer type. If TYPE is a floating-point type, return an
11990 unsigned integer type with the same bitsize as TYPE. */
11991
11992 tree
11993 unsigned_type_for (tree type)
11994 {
11995 return signed_or_unsigned_type_for (1, type);
11996 }
11997
11998 /* If TYPE is an integral or pointer type, return an integer type with
11999 the same precision which is signed, or itself if TYPE is already a
12000 signed integer type. If TYPE is a floating-point type, return a
12001 signed integer type with the same bitsize as TYPE. */
12002
12003 tree
12004 signed_type_for (tree type)
12005 {
12006 return signed_or_unsigned_type_for (0, type);
12007 }
12008
12009 /* If TYPE is a vector type, return a signed integer vector type with the
12010 same width and number of subparts. Otherwise return boolean_type_node. */
12011
12012 tree
12013 truth_type_for (tree type)
12014 {
12015 if (TREE_CODE (type) == VECTOR_TYPE)
12016 {
12017 if (VECTOR_BOOLEAN_TYPE_P (type))
12018 return type;
12019 return build_truth_vector_type (TYPE_VECTOR_SUBPARTS (type),
12020 GET_MODE_SIZE (TYPE_MODE (type)));
12021 }
12022 else
12023 return boolean_type_node;
12024 }
12025
12026 /* Returns the largest value obtainable by casting something in INNER type to
12027 OUTER type. */
12028
12029 tree
12030 upper_bound_in_type (tree outer, tree inner)
12031 {
12032 unsigned int det = 0;
12033 unsigned oprec = TYPE_PRECISION (outer);
12034 unsigned iprec = TYPE_PRECISION (inner);
12035 unsigned prec;
12036
12037 /* Compute a unique number for every combination. */
12038 det |= (oprec > iprec) ? 4 : 0;
12039 det |= TYPE_UNSIGNED (outer) ? 2 : 0;
12040 det |= TYPE_UNSIGNED (inner) ? 1 : 0;
12041
12042 /* Determine the exponent to use. */
12043 switch (det)
12044 {
12045 case 0:
12046 case 1:
12047 /* oprec <= iprec, outer: signed, inner: don't care. */
12048 prec = oprec - 1;
12049 break;
12050 case 2:
12051 case 3:
12052 /* oprec <= iprec, outer: unsigned, inner: don't care. */
12053 prec = oprec;
12054 break;
12055 case 4:
12056 /* oprec > iprec, outer: signed, inner: signed. */
12057 prec = iprec - 1;
12058 break;
12059 case 5:
12060 /* oprec > iprec, outer: signed, inner: unsigned. */
12061 prec = iprec;
12062 break;
12063 case 6:
12064 /* oprec > iprec, outer: unsigned, inner: signed. */
12065 prec = oprec;
12066 break;
12067 case 7:
12068 /* oprec > iprec, outer: unsigned, inner: unsigned. */
12069 prec = iprec;
12070 break;
12071 default:
12072 gcc_unreachable ();
12073 }
12074
12075 return wide_int_to_tree (outer,
12076 wi::mask (prec, false, TYPE_PRECISION (outer)));
12077 }
12078
12079 /* Returns the smallest value obtainable by casting something in INNER type to
12080 OUTER type. */
12081
12082 tree
12083 lower_bound_in_type (tree outer, tree inner)
12084 {
12085 unsigned oprec = TYPE_PRECISION (outer);
12086 unsigned iprec = TYPE_PRECISION (inner);
12087
12088 /* If OUTER type is unsigned, we can definitely cast 0 to OUTER type
12089 and obtain 0. */
12090 if (TYPE_UNSIGNED (outer)
12091 /* If we are widening something of an unsigned type, OUTER type
12092 contains all values of INNER type. In particular, both INNER
12093 and OUTER types have zero in common. */
12094 || (oprec > iprec && TYPE_UNSIGNED (inner)))
12095 return build_int_cst (outer, 0);
12096 else
12097 {
12098 /* If we are widening a signed type to another signed type, we
12099 want to obtain -2^^(iprec-1). If we are keeping the
12100 precision or narrowing to a signed type, we want to obtain
12101 -2^(oprec-1). */
12102 unsigned prec = oprec > iprec ? iprec : oprec;
12103 return wide_int_to_tree (outer,
12104 wi::mask (prec - 1, true,
12105 TYPE_PRECISION (outer)));
12106 }
12107 }
12108
12109 /* Return nonzero if two operands that are suitable for PHI nodes are
12110 necessarily equal. Specifically, both ARG0 and ARG1 must be either
12111 SSA_NAME or invariant. Note that this is strictly an optimization.
12112 That is, callers of this function can directly call operand_equal_p
12113 and get the same result, only slower. */
12114
12115 int
12116 operand_equal_for_phi_arg_p (const_tree arg0, const_tree arg1)
12117 {
12118 if (arg0 == arg1)
12119 return 1;
12120 if (TREE_CODE (arg0) == SSA_NAME || TREE_CODE (arg1) == SSA_NAME)
12121 return 0;
12122 return operand_equal_p (arg0, arg1, 0);
12123 }
12124
12125 /* Returns number of zeros at the end of binary representation of X. */
12126
12127 tree
12128 num_ending_zeros (const_tree x)
12129 {
12130 return build_int_cst (TREE_TYPE (x), wi::ctz (wi::to_wide (x)));
12131 }
12132
12133
12134 #define WALK_SUBTREE(NODE) \
12135 do \
12136 { \
12137 result = walk_tree_1 (&(NODE), func, data, pset, lh); \
12138 if (result) \
12139 return result; \
12140 } \
12141 while (0)
12142
12143 /* This is a subroutine of walk_tree that walks field of TYPE that are to
12144 be walked whenever a type is seen in the tree. Rest of operands and return
12145 value are as for walk_tree. */
12146
12147 static tree
12148 walk_type_fields (tree type, walk_tree_fn func, void *data,
12149 hash_set<tree> *pset, walk_tree_lh lh)
12150 {
12151 tree result = NULL_TREE;
12152
12153 switch (TREE_CODE (type))
12154 {
12155 case POINTER_TYPE:
12156 case REFERENCE_TYPE:
12157 case VECTOR_TYPE:
12158 /* We have to worry about mutually recursive pointers. These can't
12159 be written in C. They can in Ada. It's pathological, but
12160 there's an ACATS test (c38102a) that checks it. Deal with this
12161 by checking if we're pointing to another pointer, that one
12162 points to another pointer, that one does too, and we have no htab.
12163 If so, get a hash table. We check three levels deep to avoid
12164 the cost of the hash table if we don't need one. */
12165 if (POINTER_TYPE_P (TREE_TYPE (type))
12166 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type)))
12167 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type))))
12168 && !pset)
12169 {
12170 result = walk_tree_without_duplicates (&TREE_TYPE (type),
12171 func, data);
12172 if (result)
12173 return result;
12174
12175 break;
12176 }
12177
12178 /* fall through */
12179
12180 case COMPLEX_TYPE:
12181 WALK_SUBTREE (TREE_TYPE (type));
12182 break;
12183
12184 case METHOD_TYPE:
12185 WALK_SUBTREE (TYPE_METHOD_BASETYPE (type));
12186
12187 /* Fall through. */
12188
12189 case FUNCTION_TYPE:
12190 WALK_SUBTREE (TREE_TYPE (type));
12191 {
12192 tree arg;
12193
12194 /* We never want to walk into default arguments. */
12195 for (arg = TYPE_ARG_TYPES (type); arg; arg = TREE_CHAIN (arg))
12196 WALK_SUBTREE (TREE_VALUE (arg));
12197 }
12198 break;
12199
12200 case ARRAY_TYPE:
12201 /* Don't follow this nodes's type if a pointer for fear that
12202 we'll have infinite recursion. If we have a PSET, then we
12203 need not fear. */
12204 if (pset
12205 || (!POINTER_TYPE_P (TREE_TYPE (type))
12206 && TREE_CODE (TREE_TYPE (type)) != OFFSET_TYPE))
12207 WALK_SUBTREE (TREE_TYPE (type));
12208 WALK_SUBTREE (TYPE_DOMAIN (type));
12209 break;
12210
12211 case OFFSET_TYPE:
12212 WALK_SUBTREE (TREE_TYPE (type));
12213 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type));
12214 break;
12215
12216 default:
12217 break;
12218 }
12219
12220 return NULL_TREE;
12221 }
12222
12223 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is
12224 called with the DATA and the address of each sub-tree. If FUNC returns a
12225 non-NULL value, the traversal is stopped, and the value returned by FUNC
12226 is returned. If PSET is non-NULL it is used to record the nodes visited,
12227 and to avoid visiting a node more than once. */
12228
12229 tree
12230 walk_tree_1 (tree *tp, walk_tree_fn func, void *data,
12231 hash_set<tree> *pset, walk_tree_lh lh)
12232 {
12233 enum tree_code code;
12234 int walk_subtrees;
12235 tree result;
12236
12237 #define WALK_SUBTREE_TAIL(NODE) \
12238 do \
12239 { \
12240 tp = & (NODE); \
12241 goto tail_recurse; \
12242 } \
12243 while (0)
12244
12245 tail_recurse:
12246 /* Skip empty subtrees. */
12247 if (!*tp)
12248 return NULL_TREE;
12249
12250 /* Don't walk the same tree twice, if the user has requested
12251 that we avoid doing so. */
12252 if (pset && pset->add (*tp))
12253 return NULL_TREE;
12254
12255 /* Call the function. */
12256 walk_subtrees = 1;
12257 result = (*func) (tp, &walk_subtrees, data);
12258
12259 /* If we found something, return it. */
12260 if (result)
12261 return result;
12262
12263 code = TREE_CODE (*tp);
12264
12265 /* Even if we didn't, FUNC may have decided that there was nothing
12266 interesting below this point in the tree. */
12267 if (!walk_subtrees)
12268 {
12269 /* But we still need to check our siblings. */
12270 if (code == TREE_LIST)
12271 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
12272 else if (code == OMP_CLAUSE)
12273 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
12274 else
12275 return NULL_TREE;
12276 }
12277
12278 if (lh)
12279 {
12280 result = (*lh) (tp, &walk_subtrees, func, data, pset);
12281 if (result || !walk_subtrees)
12282 return result;
12283 }
12284
12285 switch (code)
12286 {
12287 case ERROR_MARK:
12288 case IDENTIFIER_NODE:
12289 case INTEGER_CST:
12290 case REAL_CST:
12291 case FIXED_CST:
12292 case VECTOR_CST:
12293 case STRING_CST:
12294 case BLOCK:
12295 case PLACEHOLDER_EXPR:
12296 case SSA_NAME:
12297 case FIELD_DECL:
12298 case RESULT_DECL:
12299 /* None of these have subtrees other than those already walked
12300 above. */
12301 break;
12302
12303 case TREE_LIST:
12304 WALK_SUBTREE (TREE_VALUE (*tp));
12305 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
12306 break;
12307
12308 case TREE_VEC:
12309 {
12310 int len = TREE_VEC_LENGTH (*tp);
12311
12312 if (len == 0)
12313 break;
12314
12315 /* Walk all elements but the first. */
12316 while (--len)
12317 WALK_SUBTREE (TREE_VEC_ELT (*tp, len));
12318
12319 /* Now walk the first one as a tail call. */
12320 WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp, 0));
12321 }
12322
12323 case COMPLEX_CST:
12324 WALK_SUBTREE (TREE_REALPART (*tp));
12325 WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp));
12326
12327 case CONSTRUCTOR:
12328 {
12329 unsigned HOST_WIDE_INT idx;
12330 constructor_elt *ce;
12331
12332 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (*tp), idx, &ce);
12333 idx++)
12334 WALK_SUBTREE (ce->value);
12335 }
12336 break;
12337
12338 case SAVE_EXPR:
12339 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 0));
12340
12341 case BIND_EXPR:
12342 {
12343 tree decl;
12344 for (decl = BIND_EXPR_VARS (*tp); decl; decl = DECL_CHAIN (decl))
12345 {
12346 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
12347 into declarations that are just mentioned, rather than
12348 declared; they don't really belong to this part of the tree.
12349 And, we can see cycles: the initializer for a declaration
12350 can refer to the declaration itself. */
12351 WALK_SUBTREE (DECL_INITIAL (decl));
12352 WALK_SUBTREE (DECL_SIZE (decl));
12353 WALK_SUBTREE (DECL_SIZE_UNIT (decl));
12354 }
12355 WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp));
12356 }
12357
12358 case STATEMENT_LIST:
12359 {
12360 tree_stmt_iterator i;
12361 for (i = tsi_start (*tp); !tsi_end_p (i); tsi_next (&i))
12362 WALK_SUBTREE (*tsi_stmt_ptr (i));
12363 }
12364 break;
12365
12366 case OMP_CLAUSE:
12367 switch (OMP_CLAUSE_CODE (*tp))
12368 {
12369 case OMP_CLAUSE_GANG:
12370 case OMP_CLAUSE__GRIDDIM_:
12371 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
12372 /* FALLTHRU */
12373
12374 case OMP_CLAUSE_ASYNC:
12375 case OMP_CLAUSE_WAIT:
12376 case OMP_CLAUSE_WORKER:
12377 case OMP_CLAUSE_VECTOR:
12378 case OMP_CLAUSE_NUM_GANGS:
12379 case OMP_CLAUSE_NUM_WORKERS:
12380 case OMP_CLAUSE_VECTOR_LENGTH:
12381 case OMP_CLAUSE_PRIVATE:
12382 case OMP_CLAUSE_SHARED:
12383 case OMP_CLAUSE_FIRSTPRIVATE:
12384 case OMP_CLAUSE_COPYIN:
12385 case OMP_CLAUSE_COPYPRIVATE:
12386 case OMP_CLAUSE_FINAL:
12387 case OMP_CLAUSE_IF:
12388 case OMP_CLAUSE_NUM_THREADS:
12389 case OMP_CLAUSE_SCHEDULE:
12390 case OMP_CLAUSE_UNIFORM:
12391 case OMP_CLAUSE_DEPEND:
12392 case OMP_CLAUSE_NONTEMPORAL:
12393 case OMP_CLAUSE_NUM_TEAMS:
12394 case OMP_CLAUSE_THREAD_LIMIT:
12395 case OMP_CLAUSE_DEVICE:
12396 case OMP_CLAUSE_DIST_SCHEDULE:
12397 case OMP_CLAUSE_SAFELEN:
12398 case OMP_CLAUSE_SIMDLEN:
12399 case OMP_CLAUSE_ORDERED:
12400 case OMP_CLAUSE_PRIORITY:
12401 case OMP_CLAUSE_GRAINSIZE:
12402 case OMP_CLAUSE_NUM_TASKS:
12403 case OMP_CLAUSE_HINT:
12404 case OMP_CLAUSE_TO_DECLARE:
12405 case OMP_CLAUSE_LINK:
12406 case OMP_CLAUSE_USE_DEVICE_PTR:
12407 case OMP_CLAUSE_USE_DEVICE_ADDR:
12408 case OMP_CLAUSE_IS_DEVICE_PTR:
12409 case OMP_CLAUSE_INCLUSIVE:
12410 case OMP_CLAUSE_EXCLUSIVE:
12411 case OMP_CLAUSE__LOOPTEMP_:
12412 case OMP_CLAUSE__REDUCTEMP_:
12413 case OMP_CLAUSE__CONDTEMP_:
12414 case OMP_CLAUSE__SCANTEMP_:
12415 case OMP_CLAUSE__SIMDUID_:
12416 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 0));
12417 /* FALLTHRU */
12418
12419 case OMP_CLAUSE_INDEPENDENT:
12420 case OMP_CLAUSE_NOWAIT:
12421 case OMP_CLAUSE_DEFAULT:
12422 case OMP_CLAUSE_UNTIED:
12423 case OMP_CLAUSE_MERGEABLE:
12424 case OMP_CLAUSE_PROC_BIND:
12425 case OMP_CLAUSE_DEVICE_TYPE:
12426 case OMP_CLAUSE_INBRANCH:
12427 case OMP_CLAUSE_NOTINBRANCH:
12428 case OMP_CLAUSE_FOR:
12429 case OMP_CLAUSE_PARALLEL:
12430 case OMP_CLAUSE_SECTIONS:
12431 case OMP_CLAUSE_TASKGROUP:
12432 case OMP_CLAUSE_NOGROUP:
12433 case OMP_CLAUSE_THREADS:
12434 case OMP_CLAUSE_SIMD:
12435 case OMP_CLAUSE_DEFAULTMAP:
12436 case OMP_CLAUSE_ORDER:
12437 case OMP_CLAUSE_BIND:
12438 case OMP_CLAUSE_AUTO:
12439 case OMP_CLAUSE_SEQ:
12440 case OMP_CLAUSE_TILE:
12441 case OMP_CLAUSE__SIMT_:
12442 case OMP_CLAUSE_IF_PRESENT:
12443 case OMP_CLAUSE_FINALIZE:
12444 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
12445
12446 case OMP_CLAUSE_LASTPRIVATE:
12447 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
12448 WALK_SUBTREE (OMP_CLAUSE_LASTPRIVATE_STMT (*tp));
12449 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
12450
12451 case OMP_CLAUSE_COLLAPSE:
12452 {
12453 int i;
12454 for (i = 0; i < 3; i++)
12455 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
12456 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
12457 }
12458
12459 case OMP_CLAUSE_LINEAR:
12460 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
12461 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STEP (*tp));
12462 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STMT (*tp));
12463 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
12464
12465 case OMP_CLAUSE_ALIGNED:
12466 case OMP_CLAUSE_FROM:
12467 case OMP_CLAUSE_TO:
12468 case OMP_CLAUSE_MAP:
12469 case OMP_CLAUSE__CACHE_:
12470 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
12471 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
12472 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
12473
12474 case OMP_CLAUSE_REDUCTION:
12475 case OMP_CLAUSE_TASK_REDUCTION:
12476 case OMP_CLAUSE_IN_REDUCTION:
12477 {
12478 int i;
12479 for (i = 0; i < 5; i++)
12480 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
12481 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
12482 }
12483
12484 default:
12485 gcc_unreachable ();
12486 }
12487 break;
12488
12489 case TARGET_EXPR:
12490 {
12491 int i, len;
12492
12493 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
12494 But, we only want to walk once. */
12495 len = (TREE_OPERAND (*tp, 3) == TREE_OPERAND (*tp, 1)) ? 2 : 3;
12496 for (i = 0; i < len; ++i)
12497 WALK_SUBTREE (TREE_OPERAND (*tp, i));
12498 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len));
12499 }
12500
12501 case DECL_EXPR:
12502 /* If this is a TYPE_DECL, walk into the fields of the type that it's
12503 defining. We only want to walk into these fields of a type in this
12504 case and not in the general case of a mere reference to the type.
12505
12506 The criterion is as follows: if the field can be an expression, it
12507 must be walked only here. This should be in keeping with the fields
12508 that are directly gimplified in gimplify_type_sizes in order for the
12509 mark/copy-if-shared/unmark machinery of the gimplifier to work with
12510 variable-sized types.
12511
12512 Note that DECLs get walked as part of processing the BIND_EXPR. */
12513 if (TREE_CODE (DECL_EXPR_DECL (*tp)) == TYPE_DECL)
12514 {
12515 tree *type_p = &TREE_TYPE (DECL_EXPR_DECL (*tp));
12516 if (TREE_CODE (*type_p) == ERROR_MARK)
12517 return NULL_TREE;
12518
12519 /* Call the function for the type. See if it returns anything or
12520 doesn't want us to continue. If we are to continue, walk both
12521 the normal fields and those for the declaration case. */
12522 result = (*func) (type_p, &walk_subtrees, data);
12523 if (result || !walk_subtrees)
12524 return result;
12525
12526 /* But do not walk a pointed-to type since it may itself need to
12527 be walked in the declaration case if it isn't anonymous. */
12528 if (!POINTER_TYPE_P (*type_p))
12529 {
12530 result = walk_type_fields (*type_p, func, data, pset, lh);
12531 if (result)
12532 return result;
12533 }
12534
12535 /* If this is a record type, also walk the fields. */
12536 if (RECORD_OR_UNION_TYPE_P (*type_p))
12537 {
12538 tree field;
12539
12540 for (field = TYPE_FIELDS (*type_p); field;
12541 field = DECL_CHAIN (field))
12542 {
12543 /* We'd like to look at the type of the field, but we can
12544 easily get infinite recursion. So assume it's pointed
12545 to elsewhere in the tree. Also, ignore things that
12546 aren't fields. */
12547 if (TREE_CODE (field) != FIELD_DECL)
12548 continue;
12549
12550 WALK_SUBTREE (DECL_FIELD_OFFSET (field));
12551 WALK_SUBTREE (DECL_SIZE (field));
12552 WALK_SUBTREE (DECL_SIZE_UNIT (field));
12553 if (TREE_CODE (*type_p) == QUAL_UNION_TYPE)
12554 WALK_SUBTREE (DECL_QUALIFIER (field));
12555 }
12556 }
12557
12558 /* Same for scalar types. */
12559 else if (TREE_CODE (*type_p) == BOOLEAN_TYPE
12560 || TREE_CODE (*type_p) == ENUMERAL_TYPE
12561 || TREE_CODE (*type_p) == INTEGER_TYPE
12562 || TREE_CODE (*type_p) == FIXED_POINT_TYPE
12563 || TREE_CODE (*type_p) == REAL_TYPE)
12564 {
12565 WALK_SUBTREE (TYPE_MIN_VALUE (*type_p));
12566 WALK_SUBTREE (TYPE_MAX_VALUE (*type_p));
12567 }
12568
12569 WALK_SUBTREE (TYPE_SIZE (*type_p));
12570 WALK_SUBTREE_TAIL (TYPE_SIZE_UNIT (*type_p));
12571 }
12572 /* FALLTHRU */
12573
12574 default:
12575 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
12576 {
12577 int i, len;
12578
12579 /* Walk over all the sub-trees of this operand. */
12580 len = TREE_OPERAND_LENGTH (*tp);
12581
12582 /* Go through the subtrees. We need to do this in forward order so
12583 that the scope of a FOR_EXPR is handled properly. */
12584 if (len)
12585 {
12586 for (i = 0; i < len - 1; ++i)
12587 WALK_SUBTREE (TREE_OPERAND (*tp, i));
12588 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len - 1));
12589 }
12590 }
12591 /* If this is a type, walk the needed fields in the type. */
12592 else if (TYPE_P (*tp))
12593 return walk_type_fields (*tp, func, data, pset, lh);
12594 break;
12595 }
12596
12597 /* We didn't find what we were looking for. */
12598 return NULL_TREE;
12599
12600 #undef WALK_SUBTREE_TAIL
12601 }
12602 #undef WALK_SUBTREE
12603
12604 /* Like walk_tree, but does not walk duplicate nodes more than once. */
12605
12606 tree
12607 walk_tree_without_duplicates_1 (tree *tp, walk_tree_fn func, void *data,
12608 walk_tree_lh lh)
12609 {
12610 tree result;
12611
12612 hash_set<tree> pset;
12613 result = walk_tree_1 (tp, func, data, &pset, lh);
12614 return result;
12615 }
12616
12617
12618 tree
12619 tree_block (tree t)
12620 {
12621 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
12622
12623 if (IS_EXPR_CODE_CLASS (c))
12624 return LOCATION_BLOCK (t->exp.locus);
12625 gcc_unreachable ();
12626 return NULL;
12627 }
12628
12629 void
12630 tree_set_block (tree t, tree b)
12631 {
12632 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
12633
12634 if (IS_EXPR_CODE_CLASS (c))
12635 {
12636 t->exp.locus = set_block (t->exp.locus, b);
12637 }
12638 else
12639 gcc_unreachable ();
12640 }
12641
12642 /* Create a nameless artificial label and put it in the current
12643 function context. The label has a location of LOC. Returns the
12644 newly created label. */
12645
12646 tree
12647 create_artificial_label (location_t loc)
12648 {
12649 tree lab = build_decl (loc,
12650 LABEL_DECL, NULL_TREE, void_type_node);
12651
12652 DECL_ARTIFICIAL (lab) = 1;
12653 DECL_IGNORED_P (lab) = 1;
12654 DECL_CONTEXT (lab) = current_function_decl;
12655 return lab;
12656 }
12657
12658 /* Given a tree, try to return a useful variable name that we can use
12659 to prefix a temporary that is being assigned the value of the tree.
12660 I.E. given <temp> = &A, return A. */
12661
12662 const char *
12663 get_name (tree t)
12664 {
12665 tree stripped_decl;
12666
12667 stripped_decl = t;
12668 STRIP_NOPS (stripped_decl);
12669 if (DECL_P (stripped_decl) && DECL_NAME (stripped_decl))
12670 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl));
12671 else if (TREE_CODE (stripped_decl) == SSA_NAME)
12672 {
12673 tree name = SSA_NAME_IDENTIFIER (stripped_decl);
12674 if (!name)
12675 return NULL;
12676 return IDENTIFIER_POINTER (name);
12677 }
12678 else
12679 {
12680 switch (TREE_CODE (stripped_decl))
12681 {
12682 case ADDR_EXPR:
12683 return get_name (TREE_OPERAND (stripped_decl, 0));
12684 default:
12685 return NULL;
12686 }
12687 }
12688 }
12689
12690 /* Return true if TYPE has a variable argument list. */
12691
12692 bool
12693 stdarg_p (const_tree fntype)
12694 {
12695 function_args_iterator args_iter;
12696 tree n = NULL_TREE, t;
12697
12698 if (!fntype)
12699 return false;
12700
12701 FOREACH_FUNCTION_ARGS (fntype, t, args_iter)
12702 {
12703 n = t;
12704 }
12705
12706 return n != NULL_TREE && n != void_type_node;
12707 }
12708
12709 /* Return true if TYPE has a prototype. */
12710
12711 bool
12712 prototype_p (const_tree fntype)
12713 {
12714 tree t;
12715
12716 gcc_assert (fntype != NULL_TREE);
12717
12718 t = TYPE_ARG_TYPES (fntype);
12719 return (t != NULL_TREE);
12720 }
12721
12722 /* If BLOCK is inlined from an __attribute__((__artificial__))
12723 routine, return pointer to location from where it has been
12724 called. */
12725 location_t *
12726 block_nonartificial_location (tree block)
12727 {
12728 location_t *ret = NULL;
12729
12730 while (block && TREE_CODE (block) == BLOCK
12731 && BLOCK_ABSTRACT_ORIGIN (block))
12732 {
12733 tree ao = BLOCK_ABSTRACT_ORIGIN (block);
12734 if (TREE_CODE (ao) == FUNCTION_DECL)
12735 {
12736 /* If AO is an artificial inline, point RET to the
12737 call site locus at which it has been inlined and continue
12738 the loop, in case AO's caller is also an artificial
12739 inline. */
12740 if (DECL_DECLARED_INLINE_P (ao)
12741 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao)))
12742 ret = &BLOCK_SOURCE_LOCATION (block);
12743 else
12744 break;
12745 }
12746 else if (TREE_CODE (ao) != BLOCK)
12747 break;
12748
12749 block = BLOCK_SUPERCONTEXT (block);
12750 }
12751 return ret;
12752 }
12753
12754
12755 /* If EXP is inlined from an __attribute__((__artificial__))
12756 function, return the location of the original call expression. */
12757
12758 location_t
12759 tree_nonartificial_location (tree exp)
12760 {
12761 location_t *loc = block_nonartificial_location (TREE_BLOCK (exp));
12762
12763 if (loc)
12764 return *loc;
12765 else
12766 return EXPR_LOCATION (exp);
12767 }
12768
12769
12770 /* These are the hash table functions for the hash table of OPTIMIZATION_NODEq
12771 nodes. */
12772
12773 /* Return the hash code X, an OPTIMIZATION_NODE or TARGET_OPTION code. */
12774
12775 hashval_t
12776 cl_option_hasher::hash (tree x)
12777 {
12778 const_tree const t = x;
12779 const char *p;
12780 size_t i;
12781 size_t len = 0;
12782 hashval_t hash = 0;
12783
12784 if (TREE_CODE (t) == OPTIMIZATION_NODE)
12785 {
12786 p = (const char *)TREE_OPTIMIZATION (t);
12787 len = sizeof (struct cl_optimization);
12788 }
12789
12790 else if (TREE_CODE (t) == TARGET_OPTION_NODE)
12791 return cl_target_option_hash (TREE_TARGET_OPTION (t));
12792
12793 else
12794 gcc_unreachable ();
12795
12796 /* assume most opt flags are just 0/1, some are 2-3, and a few might be
12797 something else. */
12798 for (i = 0; i < len; i++)
12799 if (p[i])
12800 hash = (hash << 4) ^ ((i << 2) | p[i]);
12801
12802 return hash;
12803 }
12804
12805 /* Return nonzero if the value represented by *X (an OPTIMIZATION or
12806 TARGET_OPTION tree node) is the same as that given by *Y, which is the
12807 same. */
12808
12809 bool
12810 cl_option_hasher::equal (tree x, tree y)
12811 {
12812 const_tree const xt = x;
12813 const_tree const yt = y;
12814
12815 if (TREE_CODE (xt) != TREE_CODE (yt))
12816 return 0;
12817
12818 if (TREE_CODE (xt) == OPTIMIZATION_NODE)
12819 return cl_optimization_option_eq (TREE_OPTIMIZATION (xt),
12820 TREE_OPTIMIZATION (yt));
12821 else if (TREE_CODE (xt) == TARGET_OPTION_NODE)
12822 return cl_target_option_eq (TREE_TARGET_OPTION (xt),
12823 TREE_TARGET_OPTION (yt));
12824 else
12825 gcc_unreachable ();
12826 }
12827
12828 /* Build an OPTIMIZATION_NODE based on the options in OPTS. */
12829
12830 tree
12831 build_optimization_node (struct gcc_options *opts)
12832 {
12833 tree t;
12834
12835 /* Use the cache of optimization nodes. */
12836
12837 cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node),
12838 opts);
12839
12840 tree *slot = cl_option_hash_table->find_slot (cl_optimization_node, INSERT);
12841 t = *slot;
12842 if (!t)
12843 {
12844 /* Insert this one into the hash table. */
12845 t = cl_optimization_node;
12846 *slot = t;
12847
12848 /* Make a new node for next time round. */
12849 cl_optimization_node = make_node (OPTIMIZATION_NODE);
12850 }
12851
12852 return t;
12853 }
12854
12855 /* Build a TARGET_OPTION_NODE based on the options in OPTS. */
12856
12857 tree
12858 build_target_option_node (struct gcc_options *opts)
12859 {
12860 tree t;
12861
12862 /* Use the cache of optimization nodes. */
12863
12864 cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node),
12865 opts);
12866
12867 tree *slot = cl_option_hash_table->find_slot (cl_target_option_node, INSERT);
12868 t = *slot;
12869 if (!t)
12870 {
12871 /* Insert this one into the hash table. */
12872 t = cl_target_option_node;
12873 *slot = t;
12874
12875 /* Make a new node for next time round. */
12876 cl_target_option_node = make_node (TARGET_OPTION_NODE);
12877 }
12878
12879 return t;
12880 }
12881
12882 /* Clear TREE_TARGET_GLOBALS of all TARGET_OPTION_NODE trees,
12883 so that they aren't saved during PCH writing. */
12884
12885 void
12886 prepare_target_option_nodes_for_pch (void)
12887 {
12888 hash_table<cl_option_hasher>::iterator iter = cl_option_hash_table->begin ();
12889 for (; iter != cl_option_hash_table->end (); ++iter)
12890 if (TREE_CODE (*iter) == TARGET_OPTION_NODE)
12891 TREE_TARGET_GLOBALS (*iter) = NULL;
12892 }
12893
12894 /* Determine the "ultimate origin" of a block. */
12895
12896 tree
12897 block_ultimate_origin (const_tree block)
12898 {
12899 tree origin = BLOCK_ABSTRACT_ORIGIN (block);
12900
12901 if (origin == NULL_TREE)
12902 return NULL_TREE;
12903 else
12904 {
12905 gcc_checking_assert ((DECL_P (origin)
12906 && DECL_ORIGIN (origin) == origin)
12907 || BLOCK_ORIGIN (origin) == origin);
12908 return origin;
12909 }
12910 }
12911
12912 /* Return true iff conversion from INNER_TYPE to OUTER_TYPE generates
12913 no instruction. */
12914
12915 bool
12916 tree_nop_conversion_p (const_tree outer_type, const_tree inner_type)
12917 {
12918 /* Do not strip casts into or out of differing address spaces. */
12919 if (POINTER_TYPE_P (outer_type)
12920 && TYPE_ADDR_SPACE (TREE_TYPE (outer_type)) != ADDR_SPACE_GENERIC)
12921 {
12922 if (!POINTER_TYPE_P (inner_type)
12923 || (TYPE_ADDR_SPACE (TREE_TYPE (outer_type))
12924 != TYPE_ADDR_SPACE (TREE_TYPE (inner_type))))
12925 return false;
12926 }
12927 else if (POINTER_TYPE_P (inner_type)
12928 && TYPE_ADDR_SPACE (TREE_TYPE (inner_type)) != ADDR_SPACE_GENERIC)
12929 {
12930 /* We already know that outer_type is not a pointer with
12931 a non-generic address space. */
12932 return false;
12933 }
12934
12935 /* Use precision rather then machine mode when we can, which gives
12936 the correct answer even for submode (bit-field) types. */
12937 if ((INTEGRAL_TYPE_P (outer_type)
12938 || POINTER_TYPE_P (outer_type)
12939 || TREE_CODE (outer_type) == OFFSET_TYPE)
12940 && (INTEGRAL_TYPE_P (inner_type)
12941 || POINTER_TYPE_P (inner_type)
12942 || TREE_CODE (inner_type) == OFFSET_TYPE))
12943 return TYPE_PRECISION (outer_type) == TYPE_PRECISION (inner_type);
12944
12945 /* Otherwise fall back on comparing machine modes (e.g. for
12946 aggregate types, floats). */
12947 return TYPE_MODE (outer_type) == TYPE_MODE (inner_type);
12948 }
12949
12950 /* Return true iff conversion in EXP generates no instruction. Mark
12951 it inline so that we fully inline into the stripping functions even
12952 though we have two uses of this function. */
12953
12954 static inline bool
12955 tree_nop_conversion (const_tree exp)
12956 {
12957 tree outer_type, inner_type;
12958
12959 if (location_wrapper_p (exp))
12960 return true;
12961 if (!CONVERT_EXPR_P (exp)
12962 && TREE_CODE (exp) != NON_LVALUE_EXPR)
12963 return false;
12964
12965 outer_type = TREE_TYPE (exp);
12966 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
12967 if (!inner_type || inner_type == error_mark_node)
12968 return false;
12969
12970 return tree_nop_conversion_p (outer_type, inner_type);
12971 }
12972
12973 /* Return true iff conversion in EXP generates no instruction. Don't
12974 consider conversions changing the signedness. */
12975
12976 static bool
12977 tree_sign_nop_conversion (const_tree exp)
12978 {
12979 tree outer_type, inner_type;
12980
12981 if (!tree_nop_conversion (exp))
12982 return false;
12983
12984 outer_type = TREE_TYPE (exp);
12985 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
12986
12987 return (TYPE_UNSIGNED (outer_type) == TYPE_UNSIGNED (inner_type)
12988 && POINTER_TYPE_P (outer_type) == POINTER_TYPE_P (inner_type));
12989 }
12990
12991 /* Strip conversions from EXP according to tree_nop_conversion and
12992 return the resulting expression. */
12993
12994 tree
12995 tree_strip_nop_conversions (tree exp)
12996 {
12997 while (tree_nop_conversion (exp))
12998 exp = TREE_OPERAND (exp, 0);
12999 return exp;
13000 }
13001
13002 /* Strip conversions from EXP according to tree_sign_nop_conversion
13003 and return the resulting expression. */
13004
13005 tree
13006 tree_strip_sign_nop_conversions (tree exp)
13007 {
13008 while (tree_sign_nop_conversion (exp))
13009 exp = TREE_OPERAND (exp, 0);
13010 return exp;
13011 }
13012
13013 /* Avoid any floating point extensions from EXP. */
13014 tree
13015 strip_float_extensions (tree exp)
13016 {
13017 tree sub, expt, subt;
13018
13019 /* For floating point constant look up the narrowest type that can hold
13020 it properly and handle it like (type)(narrowest_type)constant.
13021 This way we can optimize for instance a=a*2.0 where "a" is float
13022 but 2.0 is double constant. */
13023 if (TREE_CODE (exp) == REAL_CST && !DECIMAL_FLOAT_TYPE_P (TREE_TYPE (exp)))
13024 {
13025 REAL_VALUE_TYPE orig;
13026 tree type = NULL;
13027
13028 orig = TREE_REAL_CST (exp);
13029 if (TYPE_PRECISION (TREE_TYPE (exp)) > TYPE_PRECISION (float_type_node)
13030 && exact_real_truncate (TYPE_MODE (float_type_node), &orig))
13031 type = float_type_node;
13032 else if (TYPE_PRECISION (TREE_TYPE (exp))
13033 > TYPE_PRECISION (double_type_node)
13034 && exact_real_truncate (TYPE_MODE (double_type_node), &orig))
13035 type = double_type_node;
13036 if (type)
13037 return build_real_truncate (type, orig);
13038 }
13039
13040 if (!CONVERT_EXPR_P (exp))
13041 return exp;
13042
13043 sub = TREE_OPERAND (exp, 0);
13044 subt = TREE_TYPE (sub);
13045 expt = TREE_TYPE (exp);
13046
13047 if (!FLOAT_TYPE_P (subt))
13048 return exp;
13049
13050 if (DECIMAL_FLOAT_TYPE_P (expt) != DECIMAL_FLOAT_TYPE_P (subt))
13051 return exp;
13052
13053 if (TYPE_PRECISION (subt) > TYPE_PRECISION (expt))
13054 return exp;
13055
13056 return strip_float_extensions (sub);
13057 }
13058
13059 /* Strip out all handled components that produce invariant
13060 offsets. */
13061
13062 const_tree
13063 strip_invariant_refs (const_tree op)
13064 {
13065 while (handled_component_p (op))
13066 {
13067 switch (TREE_CODE (op))
13068 {
13069 case ARRAY_REF:
13070 case ARRAY_RANGE_REF:
13071 if (!is_gimple_constant (TREE_OPERAND (op, 1))
13072 || TREE_OPERAND (op, 2) != NULL_TREE
13073 || TREE_OPERAND (op, 3) != NULL_TREE)
13074 return NULL;
13075 break;
13076
13077 case COMPONENT_REF:
13078 if (TREE_OPERAND (op, 2) != NULL_TREE)
13079 return NULL;
13080 break;
13081
13082 default:;
13083 }
13084 op = TREE_OPERAND (op, 0);
13085 }
13086
13087 return op;
13088 }
13089
13090 static GTY(()) tree gcc_eh_personality_decl;
13091
13092 /* Return the GCC personality function decl. */
13093
13094 tree
13095 lhd_gcc_personality (void)
13096 {
13097 if (!gcc_eh_personality_decl)
13098 gcc_eh_personality_decl = build_personality_function ("gcc");
13099 return gcc_eh_personality_decl;
13100 }
13101
13102 /* TARGET is a call target of GIMPLE call statement
13103 (obtained by gimple_call_fn). Return true if it is
13104 OBJ_TYPE_REF representing an virtual call of C++ method.
13105 (As opposed to OBJ_TYPE_REF representing objc calls
13106 through a cast where middle-end devirtualization machinery
13107 can't apply.) */
13108
13109 bool
13110 virtual_method_call_p (const_tree target)
13111 {
13112 if (TREE_CODE (target) != OBJ_TYPE_REF)
13113 return false;
13114 tree t = TREE_TYPE (target);
13115 gcc_checking_assert (TREE_CODE (t) == POINTER_TYPE);
13116 t = TREE_TYPE (t);
13117 if (TREE_CODE (t) == FUNCTION_TYPE)
13118 return false;
13119 gcc_checking_assert (TREE_CODE (t) == METHOD_TYPE);
13120 /* If we do not have BINFO associated, it means that type was built
13121 without devirtualization enabled. Do not consider this a virtual
13122 call. */
13123 if (!TYPE_BINFO (obj_type_ref_class (target)))
13124 return false;
13125 return true;
13126 }
13127
13128 /* Lookup sub-BINFO of BINFO of TYPE at offset POS. */
13129
13130 static tree
13131 lookup_binfo_at_offset (tree binfo, tree type, HOST_WIDE_INT pos)
13132 {
13133 unsigned int i;
13134 tree base_binfo, b;
13135
13136 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
13137 if (pos == tree_to_shwi (BINFO_OFFSET (base_binfo))
13138 && types_same_for_odr (TREE_TYPE (base_binfo), type))
13139 return base_binfo;
13140 else if ((b = lookup_binfo_at_offset (base_binfo, type, pos)) != NULL)
13141 return b;
13142 return NULL;
13143 }
13144
13145 /* Try to find a base info of BINFO that would have its field decl at offset
13146 OFFSET within the BINFO type and which is of EXPECTED_TYPE. If it can be
13147 found, return, otherwise return NULL_TREE. */
13148
13149 tree
13150 get_binfo_at_offset (tree binfo, poly_int64 offset, tree expected_type)
13151 {
13152 tree type = BINFO_TYPE (binfo);
13153
13154 while (true)
13155 {
13156 HOST_WIDE_INT pos, size;
13157 tree fld;
13158 int i;
13159
13160 if (types_same_for_odr (type, expected_type))
13161 return binfo;
13162 if (maybe_lt (offset, 0))
13163 return NULL_TREE;
13164
13165 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
13166 {
13167 if (TREE_CODE (fld) != FIELD_DECL || !DECL_ARTIFICIAL (fld))
13168 continue;
13169
13170 pos = int_bit_position (fld);
13171 size = tree_to_uhwi (DECL_SIZE (fld));
13172 if (known_in_range_p (offset, pos, size))
13173 break;
13174 }
13175 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
13176 return NULL_TREE;
13177
13178 /* Offset 0 indicates the primary base, whose vtable contents are
13179 represented in the binfo for the derived class. */
13180 else if (maybe_ne (offset, 0))
13181 {
13182 tree found_binfo = NULL, base_binfo;
13183 /* Offsets in BINFO are in bytes relative to the whole structure
13184 while POS is in bits relative to the containing field. */
13185 int binfo_offset = (tree_to_shwi (BINFO_OFFSET (binfo)) + pos
13186 / BITS_PER_UNIT);
13187
13188 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
13189 if (tree_to_shwi (BINFO_OFFSET (base_binfo)) == binfo_offset
13190 && types_same_for_odr (TREE_TYPE (base_binfo), TREE_TYPE (fld)))
13191 {
13192 found_binfo = base_binfo;
13193 break;
13194 }
13195 if (found_binfo)
13196 binfo = found_binfo;
13197 else
13198 binfo = lookup_binfo_at_offset (binfo, TREE_TYPE (fld),
13199 binfo_offset);
13200 }
13201
13202 type = TREE_TYPE (fld);
13203 offset -= pos;
13204 }
13205 }
13206
13207 /* Returns true if X is a typedef decl. */
13208
13209 bool
13210 is_typedef_decl (const_tree x)
13211 {
13212 return (x && TREE_CODE (x) == TYPE_DECL
13213 && DECL_ORIGINAL_TYPE (x) != NULL_TREE);
13214 }
13215
13216 /* Returns true iff TYPE is a type variant created for a typedef. */
13217
13218 bool
13219 typedef_variant_p (const_tree type)
13220 {
13221 return is_typedef_decl (TYPE_NAME (type));
13222 }
13223
13224 /* A class to handle converting a string that might contain
13225 control characters, (eg newline, form-feed, etc), into one
13226 in which contains escape sequences instead. */
13227
13228 class escaped_string
13229 {
13230 public:
13231 escaped_string () { m_owned = false; m_str = NULL; };
13232 ~escaped_string () { if (m_owned) free (m_str); }
13233 operator const char *() const { return (const char *) m_str; }
13234 void escape (const char *);
13235 private:
13236 char *m_str;
13237 bool m_owned;
13238 };
13239
13240 /* PR 84195: Replace control characters in "unescaped" with their
13241 escaped equivalents. Allow newlines if -fmessage-length has
13242 been set to a non-zero value. This is done here, rather than
13243 where the attribute is recorded as the message length can
13244 change between these two locations. */
13245
13246 void
13247 escaped_string::escape (const char *unescaped)
13248 {
13249 char *escaped;
13250 size_t i, new_i, len;
13251
13252 if (m_owned)
13253 free (m_str);
13254
13255 m_str = const_cast<char *> (unescaped);
13256 m_owned = false;
13257
13258 if (unescaped == NULL || *unescaped == 0)
13259 return;
13260
13261 len = strlen (unescaped);
13262 escaped = NULL;
13263 new_i = 0;
13264
13265 for (i = 0; i < len; i++)
13266 {
13267 char c = unescaped[i];
13268
13269 if (!ISCNTRL (c))
13270 {
13271 if (escaped)
13272 escaped[new_i++] = c;
13273 continue;
13274 }
13275
13276 if (c != '\n' || !pp_is_wrapping_line (global_dc->printer))
13277 {
13278 if (escaped == NULL)
13279 {
13280 /* We only allocate space for a new string if we
13281 actually encounter a control character that
13282 needs replacing. */
13283 escaped = (char *) xmalloc (len * 2 + 1);
13284 strncpy (escaped, unescaped, i);
13285 new_i = i;
13286 }
13287
13288 escaped[new_i++] = '\\';
13289
13290 switch (c)
13291 {
13292 case '\a': escaped[new_i++] = 'a'; break;
13293 case '\b': escaped[new_i++] = 'b'; break;
13294 case '\f': escaped[new_i++] = 'f'; break;
13295 case '\n': escaped[new_i++] = 'n'; break;
13296 case '\r': escaped[new_i++] = 'r'; break;
13297 case '\t': escaped[new_i++] = 't'; break;
13298 case '\v': escaped[new_i++] = 'v'; break;
13299 default: escaped[new_i++] = '?'; break;
13300 }
13301 }
13302 else if (escaped)
13303 escaped[new_i++] = c;
13304 }
13305
13306 if (escaped)
13307 {
13308 escaped[new_i] = 0;
13309 m_str = escaped;
13310 m_owned = true;
13311 }
13312 }
13313
13314 /* Warn about a use of an identifier which was marked deprecated. Returns
13315 whether a warning was given. */
13316
13317 bool
13318 warn_deprecated_use (tree node, tree attr)
13319 {
13320 escaped_string msg;
13321
13322 if (node == 0 || !warn_deprecated_decl)
13323 return false;
13324
13325 if (!attr)
13326 {
13327 if (DECL_P (node))
13328 attr = DECL_ATTRIBUTES (node);
13329 else if (TYPE_P (node))
13330 {
13331 tree decl = TYPE_STUB_DECL (node);
13332 if (decl)
13333 attr = lookup_attribute ("deprecated",
13334 TYPE_ATTRIBUTES (TREE_TYPE (decl)));
13335 }
13336 }
13337
13338 if (attr)
13339 attr = lookup_attribute ("deprecated", attr);
13340
13341 if (attr)
13342 msg.escape (TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
13343
13344 bool w = false;
13345 if (DECL_P (node))
13346 {
13347 auto_diagnostic_group d;
13348 if (msg)
13349 w = warning (OPT_Wdeprecated_declarations,
13350 "%qD is deprecated: %s", node, (const char *) msg);
13351 else
13352 w = warning (OPT_Wdeprecated_declarations,
13353 "%qD is deprecated", node);
13354 if (w)
13355 inform (DECL_SOURCE_LOCATION (node), "declared here");
13356 }
13357 else if (TYPE_P (node))
13358 {
13359 tree what = NULL_TREE;
13360 tree decl = TYPE_STUB_DECL (node);
13361
13362 if (TYPE_NAME (node))
13363 {
13364 if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE)
13365 what = TYPE_NAME (node);
13366 else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL
13367 && DECL_NAME (TYPE_NAME (node)))
13368 what = DECL_NAME (TYPE_NAME (node));
13369 }
13370
13371 auto_diagnostic_group d;
13372 if (what)
13373 {
13374 if (msg)
13375 w = warning (OPT_Wdeprecated_declarations,
13376 "%qE is deprecated: %s", what, (const char *) msg);
13377 else
13378 w = warning (OPT_Wdeprecated_declarations,
13379 "%qE is deprecated", what);
13380 }
13381 else
13382 {
13383 if (msg)
13384 w = warning (OPT_Wdeprecated_declarations,
13385 "type is deprecated: %s", (const char *) msg);
13386 else
13387 w = warning (OPT_Wdeprecated_declarations,
13388 "type is deprecated");
13389 }
13390
13391 if (w && decl)
13392 inform (DECL_SOURCE_LOCATION (decl), "declared here");
13393 }
13394
13395 return w;
13396 }
13397
13398 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
13399 somewhere in it. */
13400
13401 bool
13402 contains_bitfld_component_ref_p (const_tree ref)
13403 {
13404 while (handled_component_p (ref))
13405 {
13406 if (TREE_CODE (ref) == COMPONENT_REF
13407 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1)))
13408 return true;
13409 ref = TREE_OPERAND (ref, 0);
13410 }
13411
13412 return false;
13413 }
13414
13415 /* Try to determine whether a TRY_CATCH expression can fall through.
13416 This is a subroutine of block_may_fallthru. */
13417
13418 static bool
13419 try_catch_may_fallthru (const_tree stmt)
13420 {
13421 tree_stmt_iterator i;
13422
13423 /* If the TRY block can fall through, the whole TRY_CATCH can
13424 fall through. */
13425 if (block_may_fallthru (TREE_OPERAND (stmt, 0)))
13426 return true;
13427
13428 i = tsi_start (TREE_OPERAND (stmt, 1));
13429 switch (TREE_CODE (tsi_stmt (i)))
13430 {
13431 case CATCH_EXPR:
13432 /* We expect to see a sequence of CATCH_EXPR trees, each with a
13433 catch expression and a body. The whole TRY_CATCH may fall
13434 through iff any of the catch bodies falls through. */
13435 for (; !tsi_end_p (i); tsi_next (&i))
13436 {
13437 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i))))
13438 return true;
13439 }
13440 return false;
13441
13442 case EH_FILTER_EXPR:
13443 /* The exception filter expression only matters if there is an
13444 exception. If the exception does not match EH_FILTER_TYPES,
13445 we will execute EH_FILTER_FAILURE, and we will fall through
13446 if that falls through. If the exception does match
13447 EH_FILTER_TYPES, the stack unwinder will continue up the
13448 stack, so we will not fall through. We don't know whether we
13449 will throw an exception which matches EH_FILTER_TYPES or not,
13450 so we just ignore EH_FILTER_TYPES and assume that we might
13451 throw an exception which doesn't match. */
13452 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i)));
13453
13454 default:
13455 /* This case represents statements to be executed when an
13456 exception occurs. Those statements are implicitly followed
13457 by a RESX statement to resume execution after the exception.
13458 So in this case the TRY_CATCH never falls through. */
13459 return false;
13460 }
13461 }
13462
13463 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
13464 need not be 100% accurate; simply be conservative and return true if we
13465 don't know. This is used only to avoid stupidly generating extra code.
13466 If we're wrong, we'll just delete the extra code later. */
13467
13468 bool
13469 block_may_fallthru (const_tree block)
13470 {
13471 /* This CONST_CAST is okay because expr_last returns its argument
13472 unmodified and we assign it to a const_tree. */
13473 const_tree stmt = expr_last (CONST_CAST_TREE (block));
13474
13475 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
13476 {
13477 case GOTO_EXPR:
13478 case RETURN_EXPR:
13479 /* Easy cases. If the last statement of the block implies
13480 control transfer, then we can't fall through. */
13481 return false;
13482
13483 case SWITCH_EXPR:
13484 /* If there is a default: label or case labels cover all possible
13485 SWITCH_COND values, then the SWITCH_EXPR will transfer control
13486 to some case label in all cases and all we care is whether the
13487 SWITCH_BODY falls through. */
13488 if (SWITCH_ALL_CASES_P (stmt))
13489 return block_may_fallthru (SWITCH_BODY (stmt));
13490 return true;
13491
13492 case COND_EXPR:
13493 if (block_may_fallthru (COND_EXPR_THEN (stmt)))
13494 return true;
13495 return block_may_fallthru (COND_EXPR_ELSE (stmt));
13496
13497 case BIND_EXPR:
13498 return block_may_fallthru (BIND_EXPR_BODY (stmt));
13499
13500 case TRY_CATCH_EXPR:
13501 return try_catch_may_fallthru (stmt);
13502
13503 case TRY_FINALLY_EXPR:
13504 /* The finally clause is always executed after the try clause,
13505 so if it does not fall through, then the try-finally will not
13506 fall through. Otherwise, if the try clause does not fall
13507 through, then when the finally clause falls through it will
13508 resume execution wherever the try clause was going. So the
13509 whole try-finally will only fall through if both the try
13510 clause and the finally clause fall through. */
13511 return (block_may_fallthru (TREE_OPERAND (stmt, 0))
13512 && block_may_fallthru (TREE_OPERAND (stmt, 1)));
13513
13514 case EH_ELSE_EXPR:
13515 return block_may_fallthru (TREE_OPERAND (stmt, 0));
13516
13517 case MODIFY_EXPR:
13518 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
13519 stmt = TREE_OPERAND (stmt, 1);
13520 else
13521 return true;
13522 /* FALLTHRU */
13523
13524 case CALL_EXPR:
13525 /* Functions that do not return do not fall through. */
13526 return (call_expr_flags (stmt) & ECF_NORETURN) == 0;
13527
13528 case CLEANUP_POINT_EXPR:
13529 return block_may_fallthru (TREE_OPERAND (stmt, 0));
13530
13531 case TARGET_EXPR:
13532 return block_may_fallthru (TREE_OPERAND (stmt, 1));
13533
13534 case ERROR_MARK:
13535 return true;
13536
13537 default:
13538 return lang_hooks.block_may_fallthru (stmt);
13539 }
13540 }
13541
13542 /* True if we are using EH to handle cleanups. */
13543 static bool using_eh_for_cleanups_flag = false;
13544
13545 /* This routine is called from front ends to indicate eh should be used for
13546 cleanups. */
13547 void
13548 using_eh_for_cleanups (void)
13549 {
13550 using_eh_for_cleanups_flag = true;
13551 }
13552
13553 /* Query whether EH is used for cleanups. */
13554 bool
13555 using_eh_for_cleanups_p (void)
13556 {
13557 return using_eh_for_cleanups_flag;
13558 }
13559
13560 /* Wrapper for tree_code_name to ensure that tree code is valid */
13561 const char *
13562 get_tree_code_name (enum tree_code code)
13563 {
13564 const char *invalid = "<invalid tree code>";
13565
13566 if (code >= MAX_TREE_CODES)
13567 {
13568 if (code == 0xa5a5)
13569 return "ggc_freed";
13570 return invalid;
13571 }
13572
13573 return tree_code_name[code];
13574 }
13575
13576 /* Drops the TREE_OVERFLOW flag from T. */
13577
13578 tree
13579 drop_tree_overflow (tree t)
13580 {
13581 gcc_checking_assert (TREE_OVERFLOW (t));
13582
13583 /* For tree codes with a sharing machinery re-build the result. */
13584 if (poly_int_tree_p (t))
13585 return wide_int_to_tree (TREE_TYPE (t), wi::to_poly_wide (t));
13586
13587 /* For VECTOR_CST, remove the overflow bits from the encoded elements
13588 and canonicalize the result. */
13589 if (TREE_CODE (t) == VECTOR_CST)
13590 {
13591 tree_vector_builder builder;
13592 builder.new_unary_operation (TREE_TYPE (t), t, true);
13593 unsigned int count = builder.encoded_nelts ();
13594 for (unsigned int i = 0; i < count; ++i)
13595 {
13596 tree elt = VECTOR_CST_ELT (t, i);
13597 if (TREE_OVERFLOW (elt))
13598 elt = drop_tree_overflow (elt);
13599 builder.quick_push (elt);
13600 }
13601 return builder.build ();
13602 }
13603
13604 /* Otherwise, as all tcc_constants are possibly shared, copy the node
13605 and drop the flag. */
13606 t = copy_node (t);
13607 TREE_OVERFLOW (t) = 0;
13608
13609 /* For constants that contain nested constants, drop the flag
13610 from those as well. */
13611 if (TREE_CODE (t) == COMPLEX_CST)
13612 {
13613 if (TREE_OVERFLOW (TREE_REALPART (t)))
13614 TREE_REALPART (t) = drop_tree_overflow (TREE_REALPART (t));
13615 if (TREE_OVERFLOW (TREE_IMAGPART (t)))
13616 TREE_IMAGPART (t) = drop_tree_overflow (TREE_IMAGPART (t));
13617 }
13618
13619 return t;
13620 }
13621
13622 /* Given a memory reference expression T, return its base address.
13623 The base address of a memory reference expression is the main
13624 object being referenced. For instance, the base address for
13625 'array[i].fld[j]' is 'array'. You can think of this as stripping
13626 away the offset part from a memory address.
13627
13628 This function calls handled_component_p to strip away all the inner
13629 parts of the memory reference until it reaches the base object. */
13630
13631 tree
13632 get_base_address (tree t)
13633 {
13634 while (handled_component_p (t))
13635 t = TREE_OPERAND (t, 0);
13636
13637 if ((TREE_CODE (t) == MEM_REF
13638 || TREE_CODE (t) == TARGET_MEM_REF)
13639 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
13640 t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
13641
13642 /* ??? Either the alias oracle or all callers need to properly deal
13643 with WITH_SIZE_EXPRs before we can look through those. */
13644 if (TREE_CODE (t) == WITH_SIZE_EXPR)
13645 return NULL_TREE;
13646
13647 return t;
13648 }
13649
13650 /* Return a tree of sizetype representing the size, in bytes, of the element
13651 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
13652
13653 tree
13654 array_ref_element_size (tree exp)
13655 {
13656 tree aligned_size = TREE_OPERAND (exp, 3);
13657 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
13658 location_t loc = EXPR_LOCATION (exp);
13659
13660 /* If a size was specified in the ARRAY_REF, it's the size measured
13661 in alignment units of the element type. So multiply by that value. */
13662 if (aligned_size)
13663 {
13664 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
13665 sizetype from another type of the same width and signedness. */
13666 if (TREE_TYPE (aligned_size) != sizetype)
13667 aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
13668 return size_binop_loc (loc, MULT_EXPR, aligned_size,
13669 size_int (TYPE_ALIGN_UNIT (elmt_type)));
13670 }
13671
13672 /* Otherwise, take the size from that of the element type. Substitute
13673 any PLACEHOLDER_EXPR that we have. */
13674 else
13675 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
13676 }
13677
13678 /* Return a tree representing the lower bound of the array mentioned in
13679 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
13680
13681 tree
13682 array_ref_low_bound (tree exp)
13683 {
13684 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
13685
13686 /* If a lower bound is specified in EXP, use it. */
13687 if (TREE_OPERAND (exp, 2))
13688 return TREE_OPERAND (exp, 2);
13689
13690 /* Otherwise, if there is a domain type and it has a lower bound, use it,
13691 substituting for a PLACEHOLDER_EXPR as needed. */
13692 if (domain_type && TYPE_MIN_VALUE (domain_type))
13693 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
13694
13695 /* Otherwise, return a zero of the appropriate type. */
13696 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
13697 }
13698
13699 /* Return a tree representing the upper bound of the array mentioned in
13700 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
13701
13702 tree
13703 array_ref_up_bound (tree exp)
13704 {
13705 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
13706
13707 /* If there is a domain type and it has an upper bound, use it, substituting
13708 for a PLACEHOLDER_EXPR as needed. */
13709 if (domain_type && TYPE_MAX_VALUE (domain_type))
13710 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
13711
13712 /* Otherwise fail. */
13713 return NULL_TREE;
13714 }
13715
13716 /* Returns true if REF is an array reference or a component reference
13717 to an array at the end of a structure.
13718 If this is the case, the array may be allocated larger
13719 than its upper bound implies. */
13720
13721 bool
13722 array_at_struct_end_p (tree ref)
13723 {
13724 tree atype;
13725
13726 if (TREE_CODE (ref) == ARRAY_REF
13727 || TREE_CODE (ref) == ARRAY_RANGE_REF)
13728 {
13729 atype = TREE_TYPE (TREE_OPERAND (ref, 0));
13730 ref = TREE_OPERAND (ref, 0);
13731 }
13732 else if (TREE_CODE (ref) == COMPONENT_REF
13733 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 1))) == ARRAY_TYPE)
13734 atype = TREE_TYPE (TREE_OPERAND (ref, 1));
13735 else
13736 return false;
13737
13738 if (TREE_CODE (ref) == STRING_CST)
13739 return false;
13740
13741 tree ref_to_array = ref;
13742 while (handled_component_p (ref))
13743 {
13744 /* If the reference chain contains a component reference to a
13745 non-union type and there follows another field the reference
13746 is not at the end of a structure. */
13747 if (TREE_CODE (ref) == COMPONENT_REF)
13748 {
13749 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 0))) == RECORD_TYPE)
13750 {
13751 tree nextf = DECL_CHAIN (TREE_OPERAND (ref, 1));
13752 while (nextf && TREE_CODE (nextf) != FIELD_DECL)
13753 nextf = DECL_CHAIN (nextf);
13754 if (nextf)
13755 return false;
13756 }
13757 }
13758 /* If we have a multi-dimensional array we do not consider
13759 a non-innermost dimension as flex array if the whole
13760 multi-dimensional array is at struct end.
13761 Same for an array of aggregates with a trailing array
13762 member. */
13763 else if (TREE_CODE (ref) == ARRAY_REF)
13764 return false;
13765 else if (TREE_CODE (ref) == ARRAY_RANGE_REF)
13766 ;
13767 /* If we view an underlying object as sth else then what we
13768 gathered up to now is what we have to rely on. */
13769 else if (TREE_CODE (ref) == VIEW_CONVERT_EXPR)
13770 break;
13771 else
13772 gcc_unreachable ();
13773
13774 ref = TREE_OPERAND (ref, 0);
13775 }
13776
13777 /* The array now is at struct end. Treat flexible arrays as
13778 always subject to extend, even into just padding constrained by
13779 an underlying decl. */
13780 if (! TYPE_SIZE (atype)
13781 || ! TYPE_DOMAIN (atype)
13782 || ! TYPE_MAX_VALUE (TYPE_DOMAIN (atype)))
13783 return true;
13784
13785 if (TREE_CODE (ref) == MEM_REF
13786 && TREE_CODE (TREE_OPERAND (ref, 0)) == ADDR_EXPR)
13787 ref = TREE_OPERAND (TREE_OPERAND (ref, 0), 0);
13788
13789 /* If the reference is based on a declared entity, the size of the array
13790 is constrained by its given domain. (Do not trust commons PR/69368). */
13791 if (DECL_P (ref)
13792 && !(flag_unconstrained_commons
13793 && VAR_P (ref) && DECL_COMMON (ref))
13794 && DECL_SIZE_UNIT (ref)
13795 && TREE_CODE (DECL_SIZE_UNIT (ref)) == INTEGER_CST)
13796 {
13797 /* Check whether the array domain covers all of the available
13798 padding. */
13799 poly_int64 offset;
13800 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (atype))) != INTEGER_CST
13801 || TREE_CODE (TYPE_MAX_VALUE (TYPE_DOMAIN (atype))) != INTEGER_CST
13802 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (atype))) != INTEGER_CST)
13803 return true;
13804 if (! get_addr_base_and_unit_offset (ref_to_array, &offset))
13805 return true;
13806
13807 /* If at least one extra element fits it is a flexarray. */
13808 if (known_le ((wi::to_offset (TYPE_MAX_VALUE (TYPE_DOMAIN (atype)))
13809 - wi::to_offset (TYPE_MIN_VALUE (TYPE_DOMAIN (atype)))
13810 + 2)
13811 * wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (atype))),
13812 wi::to_offset (DECL_SIZE_UNIT (ref)) - offset))
13813 return true;
13814
13815 return false;
13816 }
13817
13818 return true;
13819 }
13820
13821 /* Return a tree representing the offset, in bytes, of the field referenced
13822 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
13823
13824 tree
13825 component_ref_field_offset (tree exp)
13826 {
13827 tree aligned_offset = TREE_OPERAND (exp, 2);
13828 tree field = TREE_OPERAND (exp, 1);
13829 location_t loc = EXPR_LOCATION (exp);
13830
13831 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
13832 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
13833 value. */
13834 if (aligned_offset)
13835 {
13836 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
13837 sizetype from another type of the same width and signedness. */
13838 if (TREE_TYPE (aligned_offset) != sizetype)
13839 aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
13840 return size_binop_loc (loc, MULT_EXPR, aligned_offset,
13841 size_int (DECL_OFFSET_ALIGN (field)
13842 / BITS_PER_UNIT));
13843 }
13844
13845 /* Otherwise, take the offset from that of the field. Substitute
13846 any PLACEHOLDER_EXPR that we have. */
13847 else
13848 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
13849 }
13850
13851 /* Return the machine mode of T. For vectors, returns the mode of the
13852 inner type. The main use case is to feed the result to HONOR_NANS,
13853 avoiding the BLKmode that a direct TYPE_MODE (T) might return. */
13854
13855 machine_mode
13856 element_mode (const_tree t)
13857 {
13858 if (!TYPE_P (t))
13859 t = TREE_TYPE (t);
13860 if (VECTOR_TYPE_P (t) || TREE_CODE (t) == COMPLEX_TYPE)
13861 t = TREE_TYPE (t);
13862 return TYPE_MODE (t);
13863 }
13864
13865 /* Vector types need to re-check the target flags each time we report
13866 the machine mode. We need to do this because attribute target can
13867 change the result of vector_mode_supported_p and have_regs_of_mode
13868 on a per-function basis. Thus the TYPE_MODE of a VECTOR_TYPE can
13869 change on a per-function basis. */
13870 /* ??? Possibly a better solution is to run through all the types
13871 referenced by a function and re-compute the TYPE_MODE once, rather
13872 than make the TYPE_MODE macro call a function. */
13873
13874 machine_mode
13875 vector_type_mode (const_tree t)
13876 {
13877 machine_mode mode;
13878
13879 gcc_assert (TREE_CODE (t) == VECTOR_TYPE);
13880
13881 mode = t->type_common.mode;
13882 if (VECTOR_MODE_P (mode)
13883 && (!targetm.vector_mode_supported_p (mode)
13884 || !have_regs_of_mode[mode]))
13885 {
13886 scalar_int_mode innermode;
13887
13888 /* For integers, try mapping it to a same-sized scalar mode. */
13889 if (is_int_mode (TREE_TYPE (t)->type_common.mode, &innermode))
13890 {
13891 poly_int64 size = (TYPE_VECTOR_SUBPARTS (t)
13892 * GET_MODE_BITSIZE (innermode));
13893 scalar_int_mode mode;
13894 if (int_mode_for_size (size, 0).exists (&mode)
13895 && have_regs_of_mode[mode])
13896 return mode;
13897 }
13898
13899 return BLKmode;
13900 }
13901
13902 return mode;
13903 }
13904
13905 /* Verify that basic properties of T match TV and thus T can be a variant of
13906 TV. TV should be the more specified variant (i.e. the main variant). */
13907
13908 static bool
13909 verify_type_variant (const_tree t, tree tv)
13910 {
13911 /* Type variant can differ by:
13912
13913 - TYPE_QUALS: TYPE_READONLY, TYPE_VOLATILE, TYPE_ATOMIC, TYPE_RESTRICT,
13914 ENCODE_QUAL_ADDR_SPACE.
13915 - main variant may be TYPE_COMPLETE_P and variant types !TYPE_COMPLETE_P
13916 in this case some values may not be set in the variant types
13917 (see TYPE_COMPLETE_P checks).
13918 - it is possible to have TYPE_ARTIFICIAL variant of non-artifical type
13919 - by TYPE_NAME and attributes (i.e. when variant originate by typedef)
13920 - TYPE_CANONICAL (TYPE_ALIAS_SET is the same among variants)
13921 - by the alignment: TYPE_ALIGN and TYPE_USER_ALIGN
13922 - during LTO by TYPE_CONTEXT if type is TYPE_FILE_SCOPE_P
13923 this is necessary to make it possible to merge types form different TUs
13924 - arrays, pointers and references may have TREE_TYPE that is a variant
13925 of TREE_TYPE of their main variants.
13926 - aggregates may have new TYPE_FIELDS list that list variants of
13927 the main variant TYPE_FIELDS.
13928 - vector types may differ by TYPE_VECTOR_OPAQUE
13929 */
13930
13931 /* Convenience macro for matching individual fields. */
13932 #define verify_variant_match(flag) \
13933 do { \
13934 if (flag (tv) != flag (t)) \
13935 { \
13936 error ("type variant differs by %s", #flag); \
13937 debug_tree (tv); \
13938 return false; \
13939 } \
13940 } while (false)
13941
13942 /* tree_base checks. */
13943
13944 verify_variant_match (TREE_CODE);
13945 /* FIXME: Ada builds non-artificial variants of artificial types. */
13946 if (TYPE_ARTIFICIAL (tv) && 0)
13947 verify_variant_match (TYPE_ARTIFICIAL);
13948 if (POINTER_TYPE_P (tv))
13949 verify_variant_match (TYPE_REF_CAN_ALIAS_ALL);
13950 /* FIXME: TYPE_SIZES_GIMPLIFIED may differs for Ada build. */
13951 verify_variant_match (TYPE_UNSIGNED);
13952 verify_variant_match (TYPE_PACKED);
13953 if (TREE_CODE (t) == REFERENCE_TYPE)
13954 verify_variant_match (TYPE_REF_IS_RVALUE);
13955 if (AGGREGATE_TYPE_P (t))
13956 verify_variant_match (TYPE_REVERSE_STORAGE_ORDER);
13957 else
13958 verify_variant_match (TYPE_SATURATING);
13959 /* FIXME: This check trigger during libstdc++ build. */
13960 if (RECORD_OR_UNION_TYPE_P (t) && COMPLETE_TYPE_P (t) && 0)
13961 verify_variant_match (TYPE_FINAL_P);
13962
13963 /* tree_type_common checks. */
13964
13965 if (COMPLETE_TYPE_P (t))
13966 {
13967 verify_variant_match (TYPE_MODE);
13968 if (TREE_CODE (TYPE_SIZE (t)) != PLACEHOLDER_EXPR
13969 && TREE_CODE (TYPE_SIZE (tv)) != PLACEHOLDER_EXPR)
13970 verify_variant_match (TYPE_SIZE);
13971 if (TREE_CODE (TYPE_SIZE_UNIT (t)) != PLACEHOLDER_EXPR
13972 && TREE_CODE (TYPE_SIZE_UNIT (tv)) != PLACEHOLDER_EXPR
13973 && TYPE_SIZE_UNIT (t) != TYPE_SIZE_UNIT (tv))
13974 {
13975 gcc_assert (!operand_equal_p (TYPE_SIZE_UNIT (t),
13976 TYPE_SIZE_UNIT (tv), 0));
13977 error ("type variant has different %<TYPE_SIZE_UNIT%>");
13978 debug_tree (tv);
13979 error ("type variant%'s %<TYPE_SIZE_UNIT%>");
13980 debug_tree (TYPE_SIZE_UNIT (tv));
13981 error ("type%'s %<TYPE_SIZE_UNIT%>");
13982 debug_tree (TYPE_SIZE_UNIT (t));
13983 return false;
13984 }
13985 }
13986 verify_variant_match (TYPE_PRECISION);
13987 verify_variant_match (TYPE_NEEDS_CONSTRUCTING);
13988 if (RECORD_OR_UNION_TYPE_P (t))
13989 verify_variant_match (TYPE_TRANSPARENT_AGGR);
13990 else if (TREE_CODE (t) == ARRAY_TYPE)
13991 verify_variant_match (TYPE_NONALIASED_COMPONENT);
13992 /* During LTO we merge variant lists from diferent translation units
13993 that may differ BY TYPE_CONTEXT that in turn may point
13994 to TRANSLATION_UNIT_DECL.
13995 Ada also builds variants of types with different TYPE_CONTEXT. */
13996 if ((!in_lto_p || !TYPE_FILE_SCOPE_P (t)) && 0)
13997 verify_variant_match (TYPE_CONTEXT);
13998 if (TREE_CODE (t) == ARRAY_TYPE || TREE_CODE (t) == INTEGER_TYPE)
13999 verify_variant_match (TYPE_STRING_FLAG);
14000 if (TREE_CODE (t) == RECORD_TYPE || TREE_CODE (t) == UNION_TYPE)
14001 verify_variant_match (TYPE_CXX_ODR_P);
14002 if (TYPE_ALIAS_SET_KNOWN_P (t))
14003 {
14004 error ("type variant with %<TYPE_ALIAS_SET_KNOWN_P%>");
14005 debug_tree (tv);
14006 return false;
14007 }
14008
14009 /* tree_type_non_common checks. */
14010
14011 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
14012 and dangle the pointer from time to time. */
14013 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_VFIELD (t) != TYPE_VFIELD (tv)
14014 && (in_lto_p || !TYPE_VFIELD (tv)
14015 || TREE_CODE (TYPE_VFIELD (tv)) != TREE_LIST))
14016 {
14017 error ("type variant has different %<TYPE_VFIELD%>");
14018 debug_tree (tv);
14019 return false;
14020 }
14021 if ((TREE_CODE (t) == ENUMERAL_TYPE && COMPLETE_TYPE_P (t))
14022 || TREE_CODE (t) == INTEGER_TYPE
14023 || TREE_CODE (t) == BOOLEAN_TYPE
14024 || TREE_CODE (t) == REAL_TYPE
14025 || TREE_CODE (t) == FIXED_POINT_TYPE)
14026 {
14027 verify_variant_match (TYPE_MAX_VALUE);
14028 verify_variant_match (TYPE_MIN_VALUE);
14029 }
14030 if (TREE_CODE (t) == METHOD_TYPE)
14031 verify_variant_match (TYPE_METHOD_BASETYPE);
14032 if (TREE_CODE (t) == OFFSET_TYPE)
14033 verify_variant_match (TYPE_OFFSET_BASETYPE);
14034 if (TREE_CODE (t) == ARRAY_TYPE)
14035 verify_variant_match (TYPE_ARRAY_MAX_SIZE);
14036 /* FIXME: Be lax and allow TYPE_BINFO to be missing in variant types
14037 or even type's main variant. This is needed to make bootstrap pass
14038 and the bug seems new in GCC 5.
14039 C++ FE should be updated to make this consistent and we should check
14040 that TYPE_BINFO is always NULL for !COMPLETE_TYPE_P and otherwise there
14041 is a match with main variant.
14042
14043 Also disable the check for Java for now because of parser hack that builds
14044 first an dummy BINFO and then sometimes replace it by real BINFO in some
14045 of the copies. */
14046 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t) && TYPE_BINFO (tv)
14047 && TYPE_BINFO (t) != TYPE_BINFO (tv)
14048 /* FIXME: Java sometimes keep dump TYPE_BINFOs on variant types.
14049 Since there is no cheap way to tell C++/Java type w/o LTO, do checking
14050 at LTO time only. */
14051 && (in_lto_p && odr_type_p (t)))
14052 {
14053 error ("type variant has different %<TYPE_BINFO%>");
14054 debug_tree (tv);
14055 error ("type variant%'s %<TYPE_BINFO%>");
14056 debug_tree (TYPE_BINFO (tv));
14057 error ("type%'s %<TYPE_BINFO%>");
14058 debug_tree (TYPE_BINFO (t));
14059 return false;
14060 }
14061
14062 /* Check various uses of TYPE_VALUES_RAW. */
14063 if (TREE_CODE (t) == ENUMERAL_TYPE
14064 && TYPE_VALUES (t))
14065 verify_variant_match (TYPE_VALUES);
14066 else if (TREE_CODE (t) == ARRAY_TYPE)
14067 verify_variant_match (TYPE_DOMAIN);
14068 /* Permit incomplete variants of complete type. While FEs may complete
14069 all variants, this does not happen for C++ templates in all cases. */
14070 else if (RECORD_OR_UNION_TYPE_P (t)
14071 && COMPLETE_TYPE_P (t)
14072 && TYPE_FIELDS (t) != TYPE_FIELDS (tv))
14073 {
14074 tree f1, f2;
14075
14076 /* Fortran builds qualified variants as new records with items of
14077 qualified type. Verify that they looks same. */
14078 for (f1 = TYPE_FIELDS (t), f2 = TYPE_FIELDS (tv);
14079 f1 && f2;
14080 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
14081 if (TREE_CODE (f1) != FIELD_DECL || TREE_CODE (f2) != FIELD_DECL
14082 || (TYPE_MAIN_VARIANT (TREE_TYPE (f1))
14083 != TYPE_MAIN_VARIANT (TREE_TYPE (f2))
14084 /* FIXME: gfc_nonrestricted_type builds all types as variants
14085 with exception of pointer types. It deeply copies the type
14086 which means that we may end up with a variant type
14087 referring non-variant pointer. We may change it to
14088 produce types as variants, too, like
14089 objc_get_protocol_qualified_type does. */
14090 && !POINTER_TYPE_P (TREE_TYPE (f1)))
14091 || DECL_FIELD_OFFSET (f1) != DECL_FIELD_OFFSET (f2)
14092 || DECL_FIELD_BIT_OFFSET (f1) != DECL_FIELD_BIT_OFFSET (f2))
14093 break;
14094 if (f1 || f2)
14095 {
14096 error ("type variant has different %<TYPE_FIELDS%>");
14097 debug_tree (tv);
14098 error ("first mismatch is field");
14099 debug_tree (f1);
14100 error ("and field");
14101 debug_tree (f2);
14102 return false;
14103 }
14104 }
14105 else if ((TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE))
14106 verify_variant_match (TYPE_ARG_TYPES);
14107 /* For C++ the qualified variant of array type is really an array type
14108 of qualified TREE_TYPE.
14109 objc builds variants of pointer where pointer to type is a variant, too
14110 in objc_get_protocol_qualified_type. */
14111 if (TREE_TYPE (t) != TREE_TYPE (tv)
14112 && ((TREE_CODE (t) != ARRAY_TYPE
14113 && !POINTER_TYPE_P (t))
14114 || TYPE_MAIN_VARIANT (TREE_TYPE (t))
14115 != TYPE_MAIN_VARIANT (TREE_TYPE (tv))))
14116 {
14117 error ("type variant has different %<TREE_TYPE%>");
14118 debug_tree (tv);
14119 error ("type variant%'s %<TREE_TYPE%>");
14120 debug_tree (TREE_TYPE (tv));
14121 error ("type%'s %<TREE_TYPE%>");
14122 debug_tree (TREE_TYPE (t));
14123 return false;
14124 }
14125 if (type_with_alias_set_p (t)
14126 && !gimple_canonical_types_compatible_p (t, tv, false))
14127 {
14128 error ("type is not compatible with its variant");
14129 debug_tree (tv);
14130 error ("type variant%'s %<TREE_TYPE%>");
14131 debug_tree (TREE_TYPE (tv));
14132 error ("type%'s %<TREE_TYPE%>");
14133 debug_tree (TREE_TYPE (t));
14134 return false;
14135 }
14136 return true;
14137 #undef verify_variant_match
14138 }
14139
14140
14141 /* The TYPE_CANONICAL merging machinery. It should closely resemble
14142 the middle-end types_compatible_p function. It needs to avoid
14143 claiming types are different for types that should be treated
14144 the same with respect to TBAA. Canonical types are also used
14145 for IL consistency checks via the useless_type_conversion_p
14146 predicate which does not handle all type kinds itself but falls
14147 back to pointer-comparison of TYPE_CANONICAL for aggregates
14148 for example. */
14149
14150 /* Return true if TYPE_UNSIGNED of TYPE should be ignored for canonical
14151 type calculation because we need to allow inter-operability between signed
14152 and unsigned variants. */
14153
14154 bool
14155 type_with_interoperable_signedness (const_tree type)
14156 {
14157 /* Fortran standard require C_SIGNED_CHAR to be interoperable with both
14158 signed char and unsigned char. Similarly fortran FE builds
14159 C_SIZE_T as signed type, while C defines it unsigned. */
14160
14161 return tree_code_for_canonical_type_merging (TREE_CODE (type))
14162 == INTEGER_TYPE
14163 && (TYPE_PRECISION (type) == TYPE_PRECISION (signed_char_type_node)
14164 || TYPE_PRECISION (type) == TYPE_PRECISION (size_type_node));
14165 }
14166
14167 /* Return true iff T1 and T2 are structurally identical for what
14168 TBAA is concerned.
14169 This function is used both by lto.c canonical type merging and by the
14170 verifier. If TRUST_TYPE_CANONICAL we do not look into structure of types
14171 that have TYPE_CANONICAL defined and assume them equivalent. This is useful
14172 only for LTO because only in these cases TYPE_CANONICAL equivalence
14173 correspond to one defined by gimple_canonical_types_compatible_p. */
14174
14175 bool
14176 gimple_canonical_types_compatible_p (const_tree t1, const_tree t2,
14177 bool trust_type_canonical)
14178 {
14179 /* Type variants should be same as the main variant. When not doing sanity
14180 checking to verify this fact, go to main variants and save some work. */
14181 if (trust_type_canonical)
14182 {
14183 t1 = TYPE_MAIN_VARIANT (t1);
14184 t2 = TYPE_MAIN_VARIANT (t2);
14185 }
14186
14187 /* Check first for the obvious case of pointer identity. */
14188 if (t1 == t2)
14189 return true;
14190
14191 /* Check that we have two types to compare. */
14192 if (t1 == NULL_TREE || t2 == NULL_TREE)
14193 return false;
14194
14195 /* We consider complete types always compatible with incomplete type.
14196 This does not make sense for canonical type calculation and thus we
14197 need to ensure that we are never called on it.
14198
14199 FIXME: For more correctness the function probably should have three modes
14200 1) mode assuming that types are complete mathcing their structure
14201 2) mode allowing incomplete types but producing equivalence classes
14202 and thus ignoring all info from complete types
14203 3) mode allowing incomplete types to match complete but checking
14204 compatibility between complete types.
14205
14206 1 and 2 can be used for canonical type calculation. 3 is the real
14207 definition of type compatibility that can be used i.e. for warnings during
14208 declaration merging. */
14209
14210 gcc_assert (!trust_type_canonical
14211 || (type_with_alias_set_p (t1) && type_with_alias_set_p (t2)));
14212
14213 /* If the types have been previously registered and found equal
14214 they still are. */
14215
14216 if (TYPE_CANONICAL (t1) && TYPE_CANONICAL (t2)
14217 && trust_type_canonical)
14218 {
14219 /* Do not use TYPE_CANONICAL of pointer types. For LTO streamed types
14220 they are always NULL, but they are set to non-NULL for types
14221 constructed by build_pointer_type and variants. In this case the
14222 TYPE_CANONICAL is more fine grained than the equivalnce we test (where
14223 all pointers are considered equal. Be sure to not return false
14224 negatives. */
14225 gcc_checking_assert (canonical_type_used_p (t1)
14226 && canonical_type_used_p (t2));
14227 return TYPE_CANONICAL (t1) == TYPE_CANONICAL (t2);
14228 }
14229
14230 /* For types where we do ODR based TBAA the canonical type is always
14231 set correctly, so we know that types are different if their
14232 canonical types does not match. */
14233 if (trust_type_canonical
14234 && (odr_type_p (t1) && odr_based_tbaa_p (t1))
14235 != (odr_type_p (t2) && odr_based_tbaa_p (t2)))
14236 return false;
14237
14238 /* Can't be the same type if the types don't have the same code. */
14239 enum tree_code code = tree_code_for_canonical_type_merging (TREE_CODE (t1));
14240 if (code != tree_code_for_canonical_type_merging (TREE_CODE (t2)))
14241 return false;
14242
14243 /* Qualifiers do not matter for canonical type comparison purposes. */
14244
14245 /* Void types and nullptr types are always the same. */
14246 if (TREE_CODE (t1) == VOID_TYPE
14247 || TREE_CODE (t1) == NULLPTR_TYPE)
14248 return true;
14249
14250 /* Can't be the same type if they have different mode. */
14251 if (TYPE_MODE (t1) != TYPE_MODE (t2))
14252 return false;
14253
14254 /* Non-aggregate types can be handled cheaply. */
14255 if (INTEGRAL_TYPE_P (t1)
14256 || SCALAR_FLOAT_TYPE_P (t1)
14257 || FIXED_POINT_TYPE_P (t1)
14258 || TREE_CODE (t1) == VECTOR_TYPE
14259 || TREE_CODE (t1) == COMPLEX_TYPE
14260 || TREE_CODE (t1) == OFFSET_TYPE
14261 || POINTER_TYPE_P (t1))
14262 {
14263 /* Can't be the same type if they have different recision. */
14264 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2))
14265 return false;
14266
14267 /* In some cases the signed and unsigned types are required to be
14268 inter-operable. */
14269 if (TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2)
14270 && !type_with_interoperable_signedness (t1))
14271 return false;
14272
14273 /* Fortran's C_SIGNED_CHAR is !TYPE_STRING_FLAG but needs to be
14274 interoperable with "signed char". Unless all frontends are revisited
14275 to agree on these types, we must ignore the flag completely. */
14276
14277 /* Fortran standard define C_PTR type that is compatible with every
14278 C pointer. For this reason we need to glob all pointers into one.
14279 Still pointers in different address spaces are not compatible. */
14280 if (POINTER_TYPE_P (t1))
14281 {
14282 if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
14283 != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
14284 return false;
14285 }
14286
14287 /* Tail-recurse to components. */
14288 if (TREE_CODE (t1) == VECTOR_TYPE
14289 || TREE_CODE (t1) == COMPLEX_TYPE)
14290 return gimple_canonical_types_compatible_p (TREE_TYPE (t1),
14291 TREE_TYPE (t2),
14292 trust_type_canonical);
14293
14294 return true;
14295 }
14296
14297 /* Do type-specific comparisons. */
14298 switch (TREE_CODE (t1))
14299 {
14300 case ARRAY_TYPE:
14301 /* Array types are the same if the element types are the same and
14302 the number of elements are the same. */
14303 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
14304 trust_type_canonical)
14305 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
14306 || TYPE_REVERSE_STORAGE_ORDER (t1) != TYPE_REVERSE_STORAGE_ORDER (t2)
14307 || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
14308 return false;
14309 else
14310 {
14311 tree i1 = TYPE_DOMAIN (t1);
14312 tree i2 = TYPE_DOMAIN (t2);
14313
14314 /* For an incomplete external array, the type domain can be
14315 NULL_TREE. Check this condition also. */
14316 if (i1 == NULL_TREE && i2 == NULL_TREE)
14317 return true;
14318 else if (i1 == NULL_TREE || i2 == NULL_TREE)
14319 return false;
14320 else
14321 {
14322 tree min1 = TYPE_MIN_VALUE (i1);
14323 tree min2 = TYPE_MIN_VALUE (i2);
14324 tree max1 = TYPE_MAX_VALUE (i1);
14325 tree max2 = TYPE_MAX_VALUE (i2);
14326
14327 /* The minimum/maximum values have to be the same. */
14328 if ((min1 == min2
14329 || (min1 && min2
14330 && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
14331 && TREE_CODE (min2) == PLACEHOLDER_EXPR)
14332 || operand_equal_p (min1, min2, 0))))
14333 && (max1 == max2
14334 || (max1 && max2
14335 && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
14336 && TREE_CODE (max2) == PLACEHOLDER_EXPR)
14337 || operand_equal_p (max1, max2, 0)))))
14338 return true;
14339 else
14340 return false;
14341 }
14342 }
14343
14344 case METHOD_TYPE:
14345 case FUNCTION_TYPE:
14346 /* Function types are the same if the return type and arguments types
14347 are the same. */
14348 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
14349 trust_type_canonical))
14350 return false;
14351
14352 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
14353 return true;
14354 else
14355 {
14356 tree parms1, parms2;
14357
14358 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
14359 parms1 && parms2;
14360 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
14361 {
14362 if (!gimple_canonical_types_compatible_p
14363 (TREE_VALUE (parms1), TREE_VALUE (parms2),
14364 trust_type_canonical))
14365 return false;
14366 }
14367
14368 if (parms1 || parms2)
14369 return false;
14370
14371 return true;
14372 }
14373
14374 case RECORD_TYPE:
14375 case UNION_TYPE:
14376 case QUAL_UNION_TYPE:
14377 {
14378 tree f1, f2;
14379
14380 /* Don't try to compare variants of an incomplete type, before
14381 TYPE_FIELDS has been copied around. */
14382 if (!COMPLETE_TYPE_P (t1) && !COMPLETE_TYPE_P (t2))
14383 return true;
14384
14385
14386 if (TYPE_REVERSE_STORAGE_ORDER (t1) != TYPE_REVERSE_STORAGE_ORDER (t2))
14387 return false;
14388
14389 /* For aggregate types, all the fields must be the same. */
14390 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
14391 f1 || f2;
14392 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
14393 {
14394 /* Skip non-fields and zero-sized fields. */
14395 while (f1 && (TREE_CODE (f1) != FIELD_DECL
14396 || (DECL_SIZE (f1)
14397 && integer_zerop (DECL_SIZE (f1)))))
14398 f1 = TREE_CHAIN (f1);
14399 while (f2 && (TREE_CODE (f2) != FIELD_DECL
14400 || (DECL_SIZE (f2)
14401 && integer_zerop (DECL_SIZE (f2)))))
14402 f2 = TREE_CHAIN (f2);
14403 if (!f1 || !f2)
14404 break;
14405 /* The fields must have the same name, offset and type. */
14406 if (DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
14407 || !gimple_compare_field_offset (f1, f2)
14408 || !gimple_canonical_types_compatible_p
14409 (TREE_TYPE (f1), TREE_TYPE (f2),
14410 trust_type_canonical))
14411 return false;
14412 }
14413
14414 /* If one aggregate has more fields than the other, they
14415 are not the same. */
14416 if (f1 || f2)
14417 return false;
14418
14419 return true;
14420 }
14421
14422 default:
14423 /* Consider all types with language specific trees in them mutually
14424 compatible. This is executed only from verify_type and false
14425 positives can be tolerated. */
14426 gcc_assert (!in_lto_p);
14427 return true;
14428 }
14429 }
14430
14431 /* Verify type T. */
14432
14433 void
14434 verify_type (const_tree t)
14435 {
14436 bool error_found = false;
14437 tree mv = TYPE_MAIN_VARIANT (t);
14438 if (!mv)
14439 {
14440 error ("main variant is not defined");
14441 error_found = true;
14442 }
14443 else if (mv != TYPE_MAIN_VARIANT (mv))
14444 {
14445 error ("%<TYPE_MAIN_VARIANT%> has different %<TYPE_MAIN_VARIANT%>");
14446 debug_tree (mv);
14447 error_found = true;
14448 }
14449 else if (t != mv && !verify_type_variant (t, mv))
14450 error_found = true;
14451
14452 tree ct = TYPE_CANONICAL (t);
14453 if (!ct)
14454 ;
14455 else if (TYPE_CANONICAL (t) != ct)
14456 {
14457 error ("%<TYPE_CANONICAL%> has different %<TYPE_CANONICAL%>");
14458 debug_tree (ct);
14459 error_found = true;
14460 }
14461 /* Method and function types cannot be used to address memory and thus
14462 TYPE_CANONICAL really matters only for determining useless conversions.
14463
14464 FIXME: C++ FE produce declarations of builtin functions that are not
14465 compatible with main variants. */
14466 else if (TREE_CODE (t) == FUNCTION_TYPE)
14467 ;
14468 else if (t != ct
14469 /* FIXME: gimple_canonical_types_compatible_p cannot compare types
14470 with variably sized arrays because their sizes possibly
14471 gimplified to different variables. */
14472 && !variably_modified_type_p (ct, NULL)
14473 && !gimple_canonical_types_compatible_p (t, ct, false)
14474 && COMPLETE_TYPE_P (t))
14475 {
14476 error ("%<TYPE_CANONICAL%> is not compatible");
14477 debug_tree (ct);
14478 error_found = true;
14479 }
14480
14481 if (COMPLETE_TYPE_P (t) && TYPE_CANONICAL (t)
14482 && TYPE_MODE (t) != TYPE_MODE (TYPE_CANONICAL (t)))
14483 {
14484 error ("%<TYPE_MODE%> of %<TYPE_CANONICAL%> is not compatible");
14485 debug_tree (ct);
14486 error_found = true;
14487 }
14488 if (TYPE_MAIN_VARIANT (t) == t && ct && TYPE_MAIN_VARIANT (ct) != ct)
14489 {
14490 error ("%<TYPE_CANONICAL%> of main variant is not main variant");
14491 debug_tree (ct);
14492 debug_tree (TYPE_MAIN_VARIANT (ct));
14493 error_found = true;
14494 }
14495
14496
14497 /* Check various uses of TYPE_MIN_VALUE_RAW. */
14498 if (RECORD_OR_UNION_TYPE_P (t))
14499 {
14500 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
14501 and danagle the pointer from time to time. */
14502 if (TYPE_VFIELD (t)
14503 && TREE_CODE (TYPE_VFIELD (t)) != FIELD_DECL
14504 && TREE_CODE (TYPE_VFIELD (t)) != TREE_LIST)
14505 {
14506 error ("%<TYPE_VFIELD%> is not %<FIELD_DECL%> nor %<TREE_LIST%>");
14507 debug_tree (TYPE_VFIELD (t));
14508 error_found = true;
14509 }
14510 }
14511 else if (TREE_CODE (t) == POINTER_TYPE)
14512 {
14513 if (TYPE_NEXT_PTR_TO (t)
14514 && TREE_CODE (TYPE_NEXT_PTR_TO (t)) != POINTER_TYPE)
14515 {
14516 error ("%<TYPE_NEXT_PTR_TO%> is not %<POINTER_TYPE%>");
14517 debug_tree (TYPE_NEXT_PTR_TO (t));
14518 error_found = true;
14519 }
14520 }
14521 else if (TREE_CODE (t) == REFERENCE_TYPE)
14522 {
14523 if (TYPE_NEXT_REF_TO (t)
14524 && TREE_CODE (TYPE_NEXT_REF_TO (t)) != REFERENCE_TYPE)
14525 {
14526 error ("%<TYPE_NEXT_REF_TO%> is not %<REFERENCE_TYPE%>");
14527 debug_tree (TYPE_NEXT_REF_TO (t));
14528 error_found = true;
14529 }
14530 }
14531 else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
14532 || TREE_CODE (t) == FIXED_POINT_TYPE)
14533 {
14534 /* FIXME: The following check should pass:
14535 useless_type_conversion_p (const_cast <tree> (t),
14536 TREE_TYPE (TYPE_MIN_VALUE (t))
14537 but does not for C sizetypes in LTO. */
14538 }
14539
14540 /* Check various uses of TYPE_MAXVAL_RAW. */
14541 if (RECORD_OR_UNION_TYPE_P (t))
14542 {
14543 if (!TYPE_BINFO (t))
14544 ;
14545 else if (TREE_CODE (TYPE_BINFO (t)) != TREE_BINFO)
14546 {
14547 error ("%<TYPE_BINFO%> is not %<TREE_BINFO%>");
14548 debug_tree (TYPE_BINFO (t));
14549 error_found = true;
14550 }
14551 else if (TREE_TYPE (TYPE_BINFO (t)) != TYPE_MAIN_VARIANT (t))
14552 {
14553 error ("%<TYPE_BINFO%> type is not %<TYPE_MAIN_VARIANT%>");
14554 debug_tree (TREE_TYPE (TYPE_BINFO (t)));
14555 error_found = true;
14556 }
14557 }
14558 else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
14559 {
14560 if (TYPE_METHOD_BASETYPE (t)
14561 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != RECORD_TYPE
14562 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != UNION_TYPE)
14563 {
14564 error ("%<TYPE_METHOD_BASETYPE%> is not record nor union");
14565 debug_tree (TYPE_METHOD_BASETYPE (t));
14566 error_found = true;
14567 }
14568 }
14569 else if (TREE_CODE (t) == OFFSET_TYPE)
14570 {
14571 if (TYPE_OFFSET_BASETYPE (t)
14572 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != RECORD_TYPE
14573 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != UNION_TYPE)
14574 {
14575 error ("%<TYPE_OFFSET_BASETYPE%> is not record nor union");
14576 debug_tree (TYPE_OFFSET_BASETYPE (t));
14577 error_found = true;
14578 }
14579 }
14580 else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
14581 || TREE_CODE (t) == FIXED_POINT_TYPE)
14582 {
14583 /* FIXME: The following check should pass:
14584 useless_type_conversion_p (const_cast <tree> (t),
14585 TREE_TYPE (TYPE_MAX_VALUE (t))
14586 but does not for C sizetypes in LTO. */
14587 }
14588 else if (TREE_CODE (t) == ARRAY_TYPE)
14589 {
14590 if (TYPE_ARRAY_MAX_SIZE (t)
14591 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (t)) != INTEGER_CST)
14592 {
14593 error ("%<TYPE_ARRAY_MAX_SIZE%> not %<INTEGER_CST%>");
14594 debug_tree (TYPE_ARRAY_MAX_SIZE (t));
14595 error_found = true;
14596 }
14597 }
14598 else if (TYPE_MAX_VALUE_RAW (t))
14599 {
14600 error ("%<TYPE_MAX_VALUE_RAW%> non-NULL");
14601 debug_tree (TYPE_MAX_VALUE_RAW (t));
14602 error_found = true;
14603 }
14604
14605 if (TYPE_LANG_SLOT_1 (t) && in_lto_p)
14606 {
14607 error ("%<TYPE_LANG_SLOT_1 (binfo)%> field is non-NULL");
14608 debug_tree (TYPE_LANG_SLOT_1 (t));
14609 error_found = true;
14610 }
14611
14612 /* Check various uses of TYPE_VALUES_RAW. */
14613 if (TREE_CODE (t) == ENUMERAL_TYPE)
14614 for (tree l = TYPE_VALUES (t); l; l = TREE_CHAIN (l))
14615 {
14616 tree value = TREE_VALUE (l);
14617 tree name = TREE_PURPOSE (l);
14618
14619 /* C FE porduce INTEGER_CST of INTEGER_TYPE, while C++ FE uses
14620 CONST_DECL of ENUMERAL TYPE. */
14621 if (TREE_CODE (value) != INTEGER_CST && TREE_CODE (value) != CONST_DECL)
14622 {
14623 error ("enum value is not %<CONST_DECL%> or %<INTEGER_CST%>");
14624 debug_tree (value);
14625 debug_tree (name);
14626 error_found = true;
14627 }
14628 if (TREE_CODE (TREE_TYPE (value)) != INTEGER_TYPE
14629 && !useless_type_conversion_p (const_cast <tree> (t), TREE_TYPE (value)))
14630 {
14631 error ("enum value type is not %<INTEGER_TYPE%> nor convertible "
14632 "to the enum");
14633 debug_tree (value);
14634 debug_tree (name);
14635 error_found = true;
14636 }
14637 if (TREE_CODE (name) != IDENTIFIER_NODE)
14638 {
14639 error ("enum value name is not %<IDENTIFIER_NODE%>");
14640 debug_tree (value);
14641 debug_tree (name);
14642 error_found = true;
14643 }
14644 }
14645 else if (TREE_CODE (t) == ARRAY_TYPE)
14646 {
14647 if (TYPE_DOMAIN (t) && TREE_CODE (TYPE_DOMAIN (t)) != INTEGER_TYPE)
14648 {
14649 error ("array %<TYPE_DOMAIN%> is not integer type");
14650 debug_tree (TYPE_DOMAIN (t));
14651 error_found = true;
14652 }
14653 }
14654 else if (RECORD_OR_UNION_TYPE_P (t))
14655 {
14656 if (TYPE_FIELDS (t) && !COMPLETE_TYPE_P (t) && in_lto_p)
14657 {
14658 error ("%<TYPE_FIELDS%> defined in incomplete type");
14659 error_found = true;
14660 }
14661 for (tree fld = TYPE_FIELDS (t); fld; fld = TREE_CHAIN (fld))
14662 {
14663 /* TODO: verify properties of decls. */
14664 if (TREE_CODE (fld) == FIELD_DECL)
14665 ;
14666 else if (TREE_CODE (fld) == TYPE_DECL)
14667 ;
14668 else if (TREE_CODE (fld) == CONST_DECL)
14669 ;
14670 else if (VAR_P (fld))
14671 ;
14672 else if (TREE_CODE (fld) == TEMPLATE_DECL)
14673 ;
14674 else if (TREE_CODE (fld) == USING_DECL)
14675 ;
14676 else if (TREE_CODE (fld) == FUNCTION_DECL)
14677 ;
14678 else
14679 {
14680 error ("wrong tree in %<TYPE_FIELDS%> list");
14681 debug_tree (fld);
14682 error_found = true;
14683 }
14684 }
14685 }
14686 else if (TREE_CODE (t) == INTEGER_TYPE
14687 || TREE_CODE (t) == BOOLEAN_TYPE
14688 || TREE_CODE (t) == OFFSET_TYPE
14689 || TREE_CODE (t) == REFERENCE_TYPE
14690 || TREE_CODE (t) == NULLPTR_TYPE
14691 || TREE_CODE (t) == POINTER_TYPE)
14692 {
14693 if (TYPE_CACHED_VALUES_P (t) != (TYPE_CACHED_VALUES (t) != NULL))
14694 {
14695 error ("%<TYPE_CACHED_VALUES_P%> is %i while %<TYPE_CACHED_VALUES%> "
14696 "is %p",
14697 TYPE_CACHED_VALUES_P (t), (void *)TYPE_CACHED_VALUES (t));
14698 error_found = true;
14699 }
14700 else if (TYPE_CACHED_VALUES_P (t) && TREE_CODE (TYPE_CACHED_VALUES (t)) != TREE_VEC)
14701 {
14702 error ("%<TYPE_CACHED_VALUES%> is not %<TREE_VEC%>");
14703 debug_tree (TYPE_CACHED_VALUES (t));
14704 error_found = true;
14705 }
14706 /* Verify just enough of cache to ensure that no one copied it to new type.
14707 All copying should go by copy_node that should clear it. */
14708 else if (TYPE_CACHED_VALUES_P (t))
14709 {
14710 int i;
14711 for (i = 0; i < TREE_VEC_LENGTH (TYPE_CACHED_VALUES (t)); i++)
14712 if (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)
14713 && TREE_TYPE (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)) != t)
14714 {
14715 error ("wrong %<TYPE_CACHED_VALUES%> entry");
14716 debug_tree (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i));
14717 error_found = true;
14718 break;
14719 }
14720 }
14721 }
14722 else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
14723 for (tree l = TYPE_ARG_TYPES (t); l; l = TREE_CHAIN (l))
14724 {
14725 /* C++ FE uses TREE_PURPOSE to store initial values. */
14726 if (TREE_PURPOSE (l) && in_lto_p)
14727 {
14728 error ("%<TREE_PURPOSE%> is non-NULL in %<TYPE_ARG_TYPES%> list");
14729 debug_tree (l);
14730 error_found = true;
14731 }
14732 if (!TYPE_P (TREE_VALUE (l)))
14733 {
14734 error ("wrong entry in %<TYPE_ARG_TYPES%> list");
14735 debug_tree (l);
14736 error_found = true;
14737 }
14738 }
14739 else if (!is_lang_specific (t) && TYPE_VALUES_RAW (t))
14740 {
14741 error ("%<TYPE_VALUES_RAW%> field is non-NULL");
14742 debug_tree (TYPE_VALUES_RAW (t));
14743 error_found = true;
14744 }
14745 if (TREE_CODE (t) != INTEGER_TYPE
14746 && TREE_CODE (t) != BOOLEAN_TYPE
14747 && TREE_CODE (t) != OFFSET_TYPE
14748 && TREE_CODE (t) != REFERENCE_TYPE
14749 && TREE_CODE (t) != NULLPTR_TYPE
14750 && TREE_CODE (t) != POINTER_TYPE
14751 && TYPE_CACHED_VALUES_P (t))
14752 {
14753 error ("%<TYPE_CACHED_VALUES_P%> is set while it should not be");
14754 error_found = true;
14755 }
14756
14757 /* ipa-devirt makes an assumption that TYPE_METHOD_BASETYPE is always
14758 TYPE_MAIN_VARIANT and it would be odd to add methods only to variatns
14759 of a type. */
14760 if (TREE_CODE (t) == METHOD_TYPE
14761 && TYPE_MAIN_VARIANT (TYPE_METHOD_BASETYPE (t)) != TYPE_METHOD_BASETYPE (t))
14762 {
14763 error ("%<TYPE_METHOD_BASETYPE%> is not main variant");
14764 error_found = true;
14765 }
14766
14767 if (error_found)
14768 {
14769 debug_tree (const_cast <tree> (t));
14770 internal_error ("%qs failed", __func__);
14771 }
14772 }
14773
14774
14775 /* Return 1 if ARG interpreted as signed in its precision is known to be
14776 always positive or 2 if ARG is known to be always negative, or 3 if
14777 ARG may be positive or negative. */
14778
14779 int
14780 get_range_pos_neg (tree arg)
14781 {
14782 if (arg == error_mark_node)
14783 return 3;
14784
14785 int prec = TYPE_PRECISION (TREE_TYPE (arg));
14786 int cnt = 0;
14787 if (TREE_CODE (arg) == INTEGER_CST)
14788 {
14789 wide_int w = wi::sext (wi::to_wide (arg), prec);
14790 if (wi::neg_p (w))
14791 return 2;
14792 else
14793 return 1;
14794 }
14795 while (CONVERT_EXPR_P (arg)
14796 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
14797 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg, 0))) <= prec)
14798 {
14799 arg = TREE_OPERAND (arg, 0);
14800 /* Narrower value zero extended into wider type
14801 will always result in positive values. */
14802 if (TYPE_UNSIGNED (TREE_TYPE (arg))
14803 && TYPE_PRECISION (TREE_TYPE (arg)) < prec)
14804 return 1;
14805 prec = TYPE_PRECISION (TREE_TYPE (arg));
14806 if (++cnt > 30)
14807 return 3;
14808 }
14809
14810 if (TREE_CODE (arg) != SSA_NAME)
14811 return 3;
14812 wide_int arg_min, arg_max;
14813 while (get_range_info (arg, &arg_min, &arg_max) != VR_RANGE)
14814 {
14815 gimple *g = SSA_NAME_DEF_STMT (arg);
14816 if (is_gimple_assign (g)
14817 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (g)))
14818 {
14819 tree t = gimple_assign_rhs1 (g);
14820 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
14821 && TYPE_PRECISION (TREE_TYPE (t)) <= prec)
14822 {
14823 if (TYPE_UNSIGNED (TREE_TYPE (t))
14824 && TYPE_PRECISION (TREE_TYPE (t)) < prec)
14825 return 1;
14826 prec = TYPE_PRECISION (TREE_TYPE (t));
14827 arg = t;
14828 if (++cnt > 30)
14829 return 3;
14830 continue;
14831 }
14832 }
14833 return 3;
14834 }
14835 if (TYPE_UNSIGNED (TREE_TYPE (arg)))
14836 {
14837 /* For unsigned values, the "positive" range comes
14838 below the "negative" range. */
14839 if (!wi::neg_p (wi::sext (arg_max, prec), SIGNED))
14840 return 1;
14841 if (wi::neg_p (wi::sext (arg_min, prec), SIGNED))
14842 return 2;
14843 }
14844 else
14845 {
14846 if (!wi::neg_p (wi::sext (arg_min, prec), SIGNED))
14847 return 1;
14848 if (wi::neg_p (wi::sext (arg_max, prec), SIGNED))
14849 return 2;
14850 }
14851 return 3;
14852 }
14853
14854
14855
14856
14857 /* Return true if ARG is marked with the nonnull attribute in the
14858 current function signature. */
14859
14860 bool
14861 nonnull_arg_p (const_tree arg)
14862 {
14863 tree t, attrs, fntype;
14864 unsigned HOST_WIDE_INT arg_num;
14865
14866 gcc_assert (TREE_CODE (arg) == PARM_DECL
14867 && (POINTER_TYPE_P (TREE_TYPE (arg))
14868 || TREE_CODE (TREE_TYPE (arg)) == OFFSET_TYPE));
14869
14870 /* The static chain decl is always non null. */
14871 if (arg == cfun->static_chain_decl)
14872 return true;
14873
14874 /* THIS argument of method is always non-NULL. */
14875 if (TREE_CODE (TREE_TYPE (cfun->decl)) == METHOD_TYPE
14876 && arg == DECL_ARGUMENTS (cfun->decl)
14877 && flag_delete_null_pointer_checks)
14878 return true;
14879
14880 /* Values passed by reference are always non-NULL. */
14881 if (TREE_CODE (TREE_TYPE (arg)) == REFERENCE_TYPE
14882 && flag_delete_null_pointer_checks)
14883 return true;
14884
14885 fntype = TREE_TYPE (cfun->decl);
14886 for (attrs = TYPE_ATTRIBUTES (fntype); attrs; attrs = TREE_CHAIN (attrs))
14887 {
14888 attrs = lookup_attribute ("nonnull", attrs);
14889
14890 /* If "nonnull" wasn't specified, we know nothing about the argument. */
14891 if (attrs == NULL_TREE)
14892 return false;
14893
14894 /* If "nonnull" applies to all the arguments, then ARG is non-null. */
14895 if (TREE_VALUE (attrs) == NULL_TREE)
14896 return true;
14897
14898 /* Get the position number for ARG in the function signature. */
14899 for (arg_num = 1, t = DECL_ARGUMENTS (cfun->decl);
14900 t;
14901 t = DECL_CHAIN (t), arg_num++)
14902 {
14903 if (t == arg)
14904 break;
14905 }
14906
14907 gcc_assert (t == arg);
14908
14909 /* Now see if ARG_NUM is mentioned in the nonnull list. */
14910 for (t = TREE_VALUE (attrs); t; t = TREE_CHAIN (t))
14911 {
14912 if (compare_tree_int (TREE_VALUE (t), arg_num) == 0)
14913 return true;
14914 }
14915 }
14916
14917 return false;
14918 }
14919
14920 /* Combine LOC and BLOCK to a combined adhoc loc, retaining any range
14921 information. */
14922
14923 location_t
14924 set_block (location_t loc, tree block)
14925 {
14926 location_t pure_loc = get_pure_location (loc);
14927 source_range src_range = get_range_from_loc (line_table, loc);
14928 return COMBINE_LOCATION_DATA (line_table, pure_loc, src_range, block);
14929 }
14930
14931 location_t
14932 set_source_range (tree expr, location_t start, location_t finish)
14933 {
14934 source_range src_range;
14935 src_range.m_start = start;
14936 src_range.m_finish = finish;
14937 return set_source_range (expr, src_range);
14938 }
14939
14940 location_t
14941 set_source_range (tree expr, source_range src_range)
14942 {
14943 if (!EXPR_P (expr))
14944 return UNKNOWN_LOCATION;
14945
14946 location_t pure_loc = get_pure_location (EXPR_LOCATION (expr));
14947 location_t adhoc = COMBINE_LOCATION_DATA (line_table,
14948 pure_loc,
14949 src_range,
14950 NULL);
14951 SET_EXPR_LOCATION (expr, adhoc);
14952 return adhoc;
14953 }
14954
14955 /* Return EXPR, potentially wrapped with a node expression LOC,
14956 if !CAN_HAVE_LOCATION_P (expr).
14957
14958 NON_LVALUE_EXPR is used for wrapping constants, apart from STRING_CST.
14959 VIEW_CONVERT_EXPR is used for wrapping non-constants and STRING_CST.
14960
14961 Wrapper nodes can be identified using location_wrapper_p. */
14962
14963 tree
14964 maybe_wrap_with_location (tree expr, location_t loc)
14965 {
14966 if (expr == NULL)
14967 return NULL;
14968 if (loc == UNKNOWN_LOCATION)
14969 return expr;
14970 if (CAN_HAVE_LOCATION_P (expr))
14971 return expr;
14972 /* We should only be adding wrappers for constants and for decls,
14973 or for some exceptional tree nodes (e.g. BASELINK in the C++ FE). */
14974 gcc_assert (CONSTANT_CLASS_P (expr)
14975 || DECL_P (expr)
14976 || EXCEPTIONAL_CLASS_P (expr));
14977
14978 /* For now, don't add wrappers to exceptional tree nodes, to minimize
14979 any impact of the wrapper nodes. */
14980 if (EXCEPTIONAL_CLASS_P (expr))
14981 return expr;
14982
14983 /* If any auto_suppress_location_wrappers are active, don't create
14984 wrappers. */
14985 if (suppress_location_wrappers > 0)
14986 return expr;
14987
14988 tree_code code
14989 = (((CONSTANT_CLASS_P (expr) && TREE_CODE (expr) != STRING_CST)
14990 || (TREE_CODE (expr) == CONST_DECL && !TREE_STATIC (expr)))
14991 ? NON_LVALUE_EXPR : VIEW_CONVERT_EXPR);
14992 tree wrapper = build1_loc (loc, code, TREE_TYPE (expr), expr);
14993 /* Mark this node as being a wrapper. */
14994 EXPR_LOCATION_WRAPPER_P (wrapper) = 1;
14995 return wrapper;
14996 }
14997
14998 int suppress_location_wrappers;
14999
15000 /* Return the name of combined function FN, for debugging purposes. */
15001
15002 const char *
15003 combined_fn_name (combined_fn fn)
15004 {
15005 if (builtin_fn_p (fn))
15006 {
15007 tree fndecl = builtin_decl_explicit (as_builtin_fn (fn));
15008 return IDENTIFIER_POINTER (DECL_NAME (fndecl));
15009 }
15010 else
15011 return internal_fn_name (as_internal_fn (fn));
15012 }
15013
15014 /* Return a bitmap with a bit set corresponding to each argument in
15015 a function call type FNTYPE declared with attribute nonnull,
15016 or null if none of the function's argument are nonnull. The caller
15017 must free the bitmap. */
15018
15019 bitmap
15020 get_nonnull_args (const_tree fntype)
15021 {
15022 if (fntype == NULL_TREE)
15023 return NULL;
15024
15025 tree attrs = TYPE_ATTRIBUTES (fntype);
15026 if (!attrs)
15027 return NULL;
15028
15029 bitmap argmap = NULL;
15030
15031 /* A function declaration can specify multiple attribute nonnull,
15032 each with zero or more arguments. The loop below creates a bitmap
15033 representing a union of all the arguments. An empty (but non-null)
15034 bitmap means that all arguments have been declaraed nonnull. */
15035 for ( ; attrs; attrs = TREE_CHAIN (attrs))
15036 {
15037 attrs = lookup_attribute ("nonnull", attrs);
15038 if (!attrs)
15039 break;
15040
15041 if (!argmap)
15042 argmap = BITMAP_ALLOC (NULL);
15043
15044 if (!TREE_VALUE (attrs))
15045 {
15046 /* Clear the bitmap in case a previous attribute nonnull
15047 set it and this one overrides it for all arguments. */
15048 bitmap_clear (argmap);
15049 return argmap;
15050 }
15051
15052 /* Iterate over the indices of the format arguments declared nonnull
15053 and set a bit for each. */
15054 for (tree idx = TREE_VALUE (attrs); idx; idx = TREE_CHAIN (idx))
15055 {
15056 unsigned int val = TREE_INT_CST_LOW (TREE_VALUE (idx)) - 1;
15057 bitmap_set_bit (argmap, val);
15058 }
15059 }
15060
15061 return argmap;
15062 }
15063
15064 /* Returns true if TYPE is a type where it and all of its subobjects
15065 (recursively) are of structure, union, or array type. */
15066
15067 static bool
15068 default_is_empty_type (tree type)
15069 {
15070 if (RECORD_OR_UNION_TYPE_P (type))
15071 {
15072 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
15073 if (TREE_CODE (field) == FIELD_DECL
15074 && !DECL_PADDING_P (field)
15075 && !default_is_empty_type (TREE_TYPE (field)))
15076 return false;
15077 return true;
15078 }
15079 else if (TREE_CODE (type) == ARRAY_TYPE)
15080 return (integer_minus_onep (array_type_nelts (type))
15081 || TYPE_DOMAIN (type) == NULL_TREE
15082 || default_is_empty_type (TREE_TYPE (type)));
15083 return false;
15084 }
15085
15086 /* Implement TARGET_EMPTY_RECORD_P. Return true if TYPE is an empty type
15087 that shouldn't be passed via stack. */
15088
15089 bool
15090 default_is_empty_record (const_tree type)
15091 {
15092 if (!abi_version_at_least (12))
15093 return false;
15094
15095 if (type == error_mark_node)
15096 return false;
15097
15098 if (TREE_ADDRESSABLE (type))
15099 return false;
15100
15101 return default_is_empty_type (TYPE_MAIN_VARIANT (type));
15102 }
15103
15104 /* Like int_size_in_bytes, but handle empty records specially. */
15105
15106 HOST_WIDE_INT
15107 arg_int_size_in_bytes (const_tree type)
15108 {
15109 return TYPE_EMPTY_P (type) ? 0 : int_size_in_bytes (type);
15110 }
15111
15112 /* Like size_in_bytes, but handle empty records specially. */
15113
15114 tree
15115 arg_size_in_bytes (const_tree type)
15116 {
15117 return TYPE_EMPTY_P (type) ? size_zero_node : size_in_bytes (type);
15118 }
15119
15120 /* Return true if an expression with CODE has to have the same result type as
15121 its first operand. */
15122
15123 bool
15124 expr_type_first_operand_type_p (tree_code code)
15125 {
15126 switch (code)
15127 {
15128 case NEGATE_EXPR:
15129 case ABS_EXPR:
15130 case BIT_NOT_EXPR:
15131 case PAREN_EXPR:
15132 case CONJ_EXPR:
15133
15134 case PLUS_EXPR:
15135 case MINUS_EXPR:
15136 case MULT_EXPR:
15137 case TRUNC_DIV_EXPR:
15138 case CEIL_DIV_EXPR:
15139 case FLOOR_DIV_EXPR:
15140 case ROUND_DIV_EXPR:
15141 case TRUNC_MOD_EXPR:
15142 case CEIL_MOD_EXPR:
15143 case FLOOR_MOD_EXPR:
15144 case ROUND_MOD_EXPR:
15145 case RDIV_EXPR:
15146 case EXACT_DIV_EXPR:
15147 case MIN_EXPR:
15148 case MAX_EXPR:
15149 case BIT_IOR_EXPR:
15150 case BIT_XOR_EXPR:
15151 case BIT_AND_EXPR:
15152
15153 case LSHIFT_EXPR:
15154 case RSHIFT_EXPR:
15155 case LROTATE_EXPR:
15156 case RROTATE_EXPR:
15157 return true;
15158
15159 default:
15160 return false;
15161 }
15162 }
15163
15164 /* Return a typenode for the "standard" C type with a given name. */
15165 tree
15166 get_typenode_from_name (const char *name)
15167 {
15168 if (name == NULL || *name == '\0')
15169 return NULL_TREE;
15170
15171 if (strcmp (name, "char") == 0)
15172 return char_type_node;
15173 if (strcmp (name, "unsigned char") == 0)
15174 return unsigned_char_type_node;
15175 if (strcmp (name, "signed char") == 0)
15176 return signed_char_type_node;
15177
15178 if (strcmp (name, "short int") == 0)
15179 return short_integer_type_node;
15180 if (strcmp (name, "short unsigned int") == 0)
15181 return short_unsigned_type_node;
15182
15183 if (strcmp (name, "int") == 0)
15184 return integer_type_node;
15185 if (strcmp (name, "unsigned int") == 0)
15186 return unsigned_type_node;
15187
15188 if (strcmp (name, "long int") == 0)
15189 return long_integer_type_node;
15190 if (strcmp (name, "long unsigned int") == 0)
15191 return long_unsigned_type_node;
15192
15193 if (strcmp (name, "long long int") == 0)
15194 return long_long_integer_type_node;
15195 if (strcmp (name, "long long unsigned int") == 0)
15196 return long_long_unsigned_type_node;
15197
15198 gcc_unreachable ();
15199 }
15200
15201 /* List of pointer types used to declare builtins before we have seen their
15202 real declaration.
15203
15204 Keep the size up to date in tree.h ! */
15205 const builtin_structptr_type builtin_structptr_types[6] =
15206 {
15207 { fileptr_type_node, ptr_type_node, "FILE" },
15208 { const_tm_ptr_type_node, const_ptr_type_node, "tm" },
15209 { fenv_t_ptr_type_node, ptr_type_node, "fenv_t" },
15210 { const_fenv_t_ptr_type_node, const_ptr_type_node, "fenv_t" },
15211 { fexcept_t_ptr_type_node, ptr_type_node, "fexcept_t" },
15212 { const_fexcept_t_ptr_type_node, const_ptr_type_node, "fexcept_t" }
15213 };
15214
15215 /* Return the maximum object size. */
15216
15217 tree
15218 max_object_size (void)
15219 {
15220 /* To do: Make this a configurable parameter. */
15221 return TYPE_MAX_VALUE (ptrdiff_type_node);
15222 }
15223
15224 #if CHECKING_P
15225
15226 namespace selftest {
15227
15228 /* Selftests for tree. */
15229
15230 /* Verify that integer constants are sane. */
15231
15232 static void
15233 test_integer_constants ()
15234 {
15235 ASSERT_TRUE (integer_type_node != NULL);
15236 ASSERT_TRUE (build_int_cst (integer_type_node, 0) != NULL);
15237
15238 tree type = integer_type_node;
15239
15240 tree zero = build_zero_cst (type);
15241 ASSERT_EQ (INTEGER_CST, TREE_CODE (zero));
15242 ASSERT_EQ (type, TREE_TYPE (zero));
15243
15244 tree one = build_int_cst (type, 1);
15245 ASSERT_EQ (INTEGER_CST, TREE_CODE (one));
15246 ASSERT_EQ (type, TREE_TYPE (zero));
15247 }
15248
15249 /* Verify identifiers. */
15250
15251 static void
15252 test_identifiers ()
15253 {
15254 tree identifier = get_identifier ("foo");
15255 ASSERT_EQ (3, IDENTIFIER_LENGTH (identifier));
15256 ASSERT_STREQ ("foo", IDENTIFIER_POINTER (identifier));
15257 }
15258
15259 /* Verify LABEL_DECL. */
15260
15261 static void
15262 test_labels ()
15263 {
15264 tree identifier = get_identifier ("err");
15265 tree label_decl = build_decl (UNKNOWN_LOCATION, LABEL_DECL,
15266 identifier, void_type_node);
15267 ASSERT_EQ (-1, LABEL_DECL_UID (label_decl));
15268 ASSERT_FALSE (FORCED_LABEL (label_decl));
15269 }
15270
15271 /* Return a new VECTOR_CST node whose type is TYPE and whose values
15272 are given by VALS. */
15273
15274 static tree
15275 build_vector (tree type, vec<tree> vals MEM_STAT_DECL)
15276 {
15277 gcc_assert (known_eq (vals.length (), TYPE_VECTOR_SUBPARTS (type)));
15278 tree_vector_builder builder (type, vals.length (), 1);
15279 builder.splice (vals);
15280 return builder.build ();
15281 }
15282
15283 /* Check that VECTOR_CST ACTUAL contains the elements in EXPECTED. */
15284
15285 static void
15286 check_vector_cst (vec<tree> expected, tree actual)
15287 {
15288 ASSERT_KNOWN_EQ (expected.length (),
15289 TYPE_VECTOR_SUBPARTS (TREE_TYPE (actual)));
15290 for (unsigned int i = 0; i < expected.length (); ++i)
15291 ASSERT_EQ (wi::to_wide (expected[i]),
15292 wi::to_wide (vector_cst_elt (actual, i)));
15293 }
15294
15295 /* Check that VECTOR_CST ACTUAL contains NPATTERNS duplicated elements,
15296 and that its elements match EXPECTED. */
15297
15298 static void
15299 check_vector_cst_duplicate (vec<tree> expected, tree actual,
15300 unsigned int npatterns)
15301 {
15302 ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
15303 ASSERT_EQ (1, VECTOR_CST_NELTS_PER_PATTERN (actual));
15304 ASSERT_EQ (npatterns, vector_cst_encoded_nelts (actual));
15305 ASSERT_TRUE (VECTOR_CST_DUPLICATE_P (actual));
15306 ASSERT_FALSE (VECTOR_CST_STEPPED_P (actual));
15307 check_vector_cst (expected, actual);
15308 }
15309
15310 /* Check that VECTOR_CST ACTUAL contains NPATTERNS foreground elements
15311 and NPATTERNS background elements, and that its elements match
15312 EXPECTED. */
15313
15314 static void
15315 check_vector_cst_fill (vec<tree> expected, tree actual,
15316 unsigned int npatterns)
15317 {
15318 ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
15319 ASSERT_EQ (2, VECTOR_CST_NELTS_PER_PATTERN (actual));
15320 ASSERT_EQ (2 * npatterns, vector_cst_encoded_nelts (actual));
15321 ASSERT_FALSE (VECTOR_CST_DUPLICATE_P (actual));
15322 ASSERT_FALSE (VECTOR_CST_STEPPED_P (actual));
15323 check_vector_cst (expected, actual);
15324 }
15325
15326 /* Check that VECTOR_CST ACTUAL contains NPATTERNS stepped patterns,
15327 and that its elements match EXPECTED. */
15328
15329 static void
15330 check_vector_cst_stepped (vec<tree> expected, tree actual,
15331 unsigned int npatterns)
15332 {
15333 ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
15334 ASSERT_EQ (3, VECTOR_CST_NELTS_PER_PATTERN (actual));
15335 ASSERT_EQ (3 * npatterns, vector_cst_encoded_nelts (actual));
15336 ASSERT_FALSE (VECTOR_CST_DUPLICATE_P (actual));
15337 ASSERT_TRUE (VECTOR_CST_STEPPED_P (actual));
15338 check_vector_cst (expected, actual);
15339 }
15340
15341 /* Test the creation of VECTOR_CSTs. */
15342
15343 static void
15344 test_vector_cst_patterns (ALONE_CXX_MEM_STAT_INFO)
15345 {
15346 auto_vec<tree, 8> elements (8);
15347 elements.quick_grow (8);
15348 tree element_type = build_nonstandard_integer_type (16, true);
15349 tree vector_type = build_vector_type (element_type, 8);
15350
15351 /* Test a simple linear series with a base of 0 and a step of 1:
15352 { 0, 1, 2, 3, 4, 5, 6, 7 }. */
15353 for (unsigned int i = 0; i < 8; ++i)
15354 elements[i] = build_int_cst (element_type, i);
15355 tree vector = build_vector (vector_type, elements PASS_MEM_STAT);
15356 check_vector_cst_stepped (elements, vector, 1);
15357
15358 /* Try the same with the first element replaced by 100:
15359 { 100, 1, 2, 3, 4, 5, 6, 7 }. */
15360 elements[0] = build_int_cst (element_type, 100);
15361 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15362 check_vector_cst_stepped (elements, vector, 1);
15363
15364 /* Try a series that wraps around.
15365 { 100, 65531, 65532, 65533, 65534, 65535, 0, 1 }. */
15366 for (unsigned int i = 1; i < 8; ++i)
15367 elements[i] = build_int_cst (element_type, (65530 + i) & 0xffff);
15368 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15369 check_vector_cst_stepped (elements, vector, 1);
15370
15371 /* Try a downward series:
15372 { 100, 79, 78, 77, 76, 75, 75, 73 }. */
15373 for (unsigned int i = 1; i < 8; ++i)
15374 elements[i] = build_int_cst (element_type, 80 - i);
15375 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15376 check_vector_cst_stepped (elements, vector, 1);
15377
15378 /* Try two interleaved series with different bases and steps:
15379 { 100, 53, 66, 206, 62, 212, 58, 218 }. */
15380 elements[1] = build_int_cst (element_type, 53);
15381 for (unsigned int i = 2; i < 8; i += 2)
15382 {
15383 elements[i] = build_int_cst (element_type, 70 - i * 2);
15384 elements[i + 1] = build_int_cst (element_type, 200 + i * 3);
15385 }
15386 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15387 check_vector_cst_stepped (elements, vector, 2);
15388
15389 /* Try a duplicated value:
15390 { 100, 100, 100, 100, 100, 100, 100, 100 }. */
15391 for (unsigned int i = 1; i < 8; ++i)
15392 elements[i] = elements[0];
15393 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15394 check_vector_cst_duplicate (elements, vector, 1);
15395
15396 /* Try an interleaved duplicated value:
15397 { 100, 55, 100, 55, 100, 55, 100, 55 }. */
15398 elements[1] = build_int_cst (element_type, 55);
15399 for (unsigned int i = 2; i < 8; ++i)
15400 elements[i] = elements[i - 2];
15401 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15402 check_vector_cst_duplicate (elements, vector, 2);
15403
15404 /* Try a duplicated value with 2 exceptions
15405 { 41, 97, 100, 55, 100, 55, 100, 55 }. */
15406 elements[0] = build_int_cst (element_type, 41);
15407 elements[1] = build_int_cst (element_type, 97);
15408 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15409 check_vector_cst_fill (elements, vector, 2);
15410
15411 /* Try with and without a step
15412 { 41, 97, 100, 21, 100, 35, 100, 49 }. */
15413 for (unsigned int i = 3; i < 8; i += 2)
15414 elements[i] = build_int_cst (element_type, i * 7);
15415 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15416 check_vector_cst_stepped (elements, vector, 2);
15417
15418 /* Try a fully-general constant:
15419 { 41, 97, 100, 21, 100, 9990, 100, 49 }. */
15420 elements[5] = build_int_cst (element_type, 9990);
15421 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15422 check_vector_cst_fill (elements, vector, 4);
15423 }
15424
15425 /* Verify that STRIP_NOPS (NODE) is EXPECTED.
15426 Helper function for test_location_wrappers, to deal with STRIP_NOPS
15427 modifying its argument in-place. */
15428
15429 static void
15430 check_strip_nops (tree node, tree expected)
15431 {
15432 STRIP_NOPS (node);
15433 ASSERT_EQ (expected, node);
15434 }
15435
15436 /* Verify location wrappers. */
15437
15438 static void
15439 test_location_wrappers ()
15440 {
15441 location_t loc = BUILTINS_LOCATION;
15442
15443 ASSERT_EQ (NULL_TREE, maybe_wrap_with_location (NULL_TREE, loc));
15444
15445 /* Wrapping a constant. */
15446 tree int_cst = build_int_cst (integer_type_node, 42);
15447 ASSERT_FALSE (CAN_HAVE_LOCATION_P (int_cst));
15448 ASSERT_FALSE (location_wrapper_p (int_cst));
15449
15450 tree wrapped_int_cst = maybe_wrap_with_location (int_cst, loc);
15451 ASSERT_TRUE (location_wrapper_p (wrapped_int_cst));
15452 ASSERT_EQ (loc, EXPR_LOCATION (wrapped_int_cst));
15453 ASSERT_EQ (int_cst, tree_strip_any_location_wrapper (wrapped_int_cst));
15454
15455 /* We shouldn't add wrapper nodes for UNKNOWN_LOCATION. */
15456 ASSERT_EQ (int_cst, maybe_wrap_with_location (int_cst, UNKNOWN_LOCATION));
15457
15458 /* We shouldn't add wrapper nodes for nodes that CAN_HAVE_LOCATION_P. */
15459 tree cast = build1 (NOP_EXPR, char_type_node, int_cst);
15460 ASSERT_TRUE (CAN_HAVE_LOCATION_P (cast));
15461 ASSERT_EQ (cast, maybe_wrap_with_location (cast, loc));
15462
15463 /* Wrapping a STRING_CST. */
15464 tree string_cst = build_string (4, "foo");
15465 ASSERT_FALSE (CAN_HAVE_LOCATION_P (string_cst));
15466 ASSERT_FALSE (location_wrapper_p (string_cst));
15467
15468 tree wrapped_string_cst = maybe_wrap_with_location (string_cst, loc);
15469 ASSERT_TRUE (location_wrapper_p (wrapped_string_cst));
15470 ASSERT_EQ (VIEW_CONVERT_EXPR, TREE_CODE (wrapped_string_cst));
15471 ASSERT_EQ (loc, EXPR_LOCATION (wrapped_string_cst));
15472 ASSERT_EQ (string_cst, tree_strip_any_location_wrapper (wrapped_string_cst));
15473
15474
15475 /* Wrapping a variable. */
15476 tree int_var = build_decl (UNKNOWN_LOCATION, VAR_DECL,
15477 get_identifier ("some_int_var"),
15478 integer_type_node);
15479 ASSERT_FALSE (CAN_HAVE_LOCATION_P (int_var));
15480 ASSERT_FALSE (location_wrapper_p (int_var));
15481
15482 tree wrapped_int_var = maybe_wrap_with_location (int_var, loc);
15483 ASSERT_TRUE (location_wrapper_p (wrapped_int_var));
15484 ASSERT_EQ (loc, EXPR_LOCATION (wrapped_int_var));
15485 ASSERT_EQ (int_var, tree_strip_any_location_wrapper (wrapped_int_var));
15486
15487 /* Verify that "reinterpret_cast<int>(some_int_var)" is not a location
15488 wrapper. */
15489 tree r_cast = build1 (NON_LVALUE_EXPR, integer_type_node, int_var);
15490 ASSERT_FALSE (location_wrapper_p (r_cast));
15491 ASSERT_EQ (r_cast, tree_strip_any_location_wrapper (r_cast));
15492
15493 /* Verify that STRIP_NOPS removes wrappers. */
15494 check_strip_nops (wrapped_int_cst, int_cst);
15495 check_strip_nops (wrapped_string_cst, string_cst);
15496 check_strip_nops (wrapped_int_var, int_var);
15497 }
15498
15499 /* Test various tree predicates. Verify that location wrappers don't
15500 affect the results. */
15501
15502 static void
15503 test_predicates ()
15504 {
15505 /* Build various constants and wrappers around them. */
15506
15507 location_t loc = BUILTINS_LOCATION;
15508
15509 tree i_0 = build_int_cst (integer_type_node, 0);
15510 tree wr_i_0 = maybe_wrap_with_location (i_0, loc);
15511
15512 tree i_1 = build_int_cst (integer_type_node, 1);
15513 tree wr_i_1 = maybe_wrap_with_location (i_1, loc);
15514
15515 tree i_m1 = build_int_cst (integer_type_node, -1);
15516 tree wr_i_m1 = maybe_wrap_with_location (i_m1, loc);
15517
15518 tree f_0 = build_real_from_int_cst (float_type_node, i_0);
15519 tree wr_f_0 = maybe_wrap_with_location (f_0, loc);
15520 tree f_1 = build_real_from_int_cst (float_type_node, i_1);
15521 tree wr_f_1 = maybe_wrap_with_location (f_1, loc);
15522 tree f_m1 = build_real_from_int_cst (float_type_node, i_m1);
15523 tree wr_f_m1 = maybe_wrap_with_location (f_m1, loc);
15524
15525 tree c_i_0 = build_complex (NULL_TREE, i_0, i_0);
15526 tree c_i_1 = build_complex (NULL_TREE, i_1, i_0);
15527 tree c_i_m1 = build_complex (NULL_TREE, i_m1, i_0);
15528
15529 tree c_f_0 = build_complex (NULL_TREE, f_0, f_0);
15530 tree c_f_1 = build_complex (NULL_TREE, f_1, f_0);
15531 tree c_f_m1 = build_complex (NULL_TREE, f_m1, f_0);
15532
15533 /* TODO: vector constants. */
15534
15535 /* Test integer_onep. */
15536 ASSERT_FALSE (integer_onep (i_0));
15537 ASSERT_FALSE (integer_onep (wr_i_0));
15538 ASSERT_TRUE (integer_onep (i_1));
15539 ASSERT_TRUE (integer_onep (wr_i_1));
15540 ASSERT_FALSE (integer_onep (i_m1));
15541 ASSERT_FALSE (integer_onep (wr_i_m1));
15542 ASSERT_FALSE (integer_onep (f_0));
15543 ASSERT_FALSE (integer_onep (wr_f_0));
15544 ASSERT_FALSE (integer_onep (f_1));
15545 ASSERT_FALSE (integer_onep (wr_f_1));
15546 ASSERT_FALSE (integer_onep (f_m1));
15547 ASSERT_FALSE (integer_onep (wr_f_m1));
15548 ASSERT_FALSE (integer_onep (c_i_0));
15549 ASSERT_TRUE (integer_onep (c_i_1));
15550 ASSERT_FALSE (integer_onep (c_i_m1));
15551 ASSERT_FALSE (integer_onep (c_f_0));
15552 ASSERT_FALSE (integer_onep (c_f_1));
15553 ASSERT_FALSE (integer_onep (c_f_m1));
15554
15555 /* Test integer_zerop. */
15556 ASSERT_TRUE (integer_zerop (i_0));
15557 ASSERT_TRUE (integer_zerop (wr_i_0));
15558 ASSERT_FALSE (integer_zerop (i_1));
15559 ASSERT_FALSE (integer_zerop (wr_i_1));
15560 ASSERT_FALSE (integer_zerop (i_m1));
15561 ASSERT_FALSE (integer_zerop (wr_i_m1));
15562 ASSERT_FALSE (integer_zerop (f_0));
15563 ASSERT_FALSE (integer_zerop (wr_f_0));
15564 ASSERT_FALSE (integer_zerop (f_1));
15565 ASSERT_FALSE (integer_zerop (wr_f_1));
15566 ASSERT_FALSE (integer_zerop (f_m1));
15567 ASSERT_FALSE (integer_zerop (wr_f_m1));
15568 ASSERT_TRUE (integer_zerop (c_i_0));
15569 ASSERT_FALSE (integer_zerop (c_i_1));
15570 ASSERT_FALSE (integer_zerop (c_i_m1));
15571 ASSERT_FALSE (integer_zerop (c_f_0));
15572 ASSERT_FALSE (integer_zerop (c_f_1));
15573 ASSERT_FALSE (integer_zerop (c_f_m1));
15574
15575 /* Test integer_all_onesp. */
15576 ASSERT_FALSE (integer_all_onesp (i_0));
15577 ASSERT_FALSE (integer_all_onesp (wr_i_0));
15578 ASSERT_FALSE (integer_all_onesp (i_1));
15579 ASSERT_FALSE (integer_all_onesp (wr_i_1));
15580 ASSERT_TRUE (integer_all_onesp (i_m1));
15581 ASSERT_TRUE (integer_all_onesp (wr_i_m1));
15582 ASSERT_FALSE (integer_all_onesp (f_0));
15583 ASSERT_FALSE (integer_all_onesp (wr_f_0));
15584 ASSERT_FALSE (integer_all_onesp (f_1));
15585 ASSERT_FALSE (integer_all_onesp (wr_f_1));
15586 ASSERT_FALSE (integer_all_onesp (f_m1));
15587 ASSERT_FALSE (integer_all_onesp (wr_f_m1));
15588 ASSERT_FALSE (integer_all_onesp (c_i_0));
15589 ASSERT_FALSE (integer_all_onesp (c_i_1));
15590 ASSERT_FALSE (integer_all_onesp (c_i_m1));
15591 ASSERT_FALSE (integer_all_onesp (c_f_0));
15592 ASSERT_FALSE (integer_all_onesp (c_f_1));
15593 ASSERT_FALSE (integer_all_onesp (c_f_m1));
15594
15595 /* Test integer_minus_onep. */
15596 ASSERT_FALSE (integer_minus_onep (i_0));
15597 ASSERT_FALSE (integer_minus_onep (wr_i_0));
15598 ASSERT_FALSE (integer_minus_onep (i_1));
15599 ASSERT_FALSE (integer_minus_onep (wr_i_1));
15600 ASSERT_TRUE (integer_minus_onep (i_m1));
15601 ASSERT_TRUE (integer_minus_onep (wr_i_m1));
15602 ASSERT_FALSE (integer_minus_onep (f_0));
15603 ASSERT_FALSE (integer_minus_onep (wr_f_0));
15604 ASSERT_FALSE (integer_minus_onep (f_1));
15605 ASSERT_FALSE (integer_minus_onep (wr_f_1));
15606 ASSERT_FALSE (integer_minus_onep (f_m1));
15607 ASSERT_FALSE (integer_minus_onep (wr_f_m1));
15608 ASSERT_FALSE (integer_minus_onep (c_i_0));
15609 ASSERT_FALSE (integer_minus_onep (c_i_1));
15610 ASSERT_TRUE (integer_minus_onep (c_i_m1));
15611 ASSERT_FALSE (integer_minus_onep (c_f_0));
15612 ASSERT_FALSE (integer_minus_onep (c_f_1));
15613 ASSERT_FALSE (integer_minus_onep (c_f_m1));
15614
15615 /* Test integer_each_onep. */
15616 ASSERT_FALSE (integer_each_onep (i_0));
15617 ASSERT_FALSE (integer_each_onep (wr_i_0));
15618 ASSERT_TRUE (integer_each_onep (i_1));
15619 ASSERT_TRUE (integer_each_onep (wr_i_1));
15620 ASSERT_FALSE (integer_each_onep (i_m1));
15621 ASSERT_FALSE (integer_each_onep (wr_i_m1));
15622 ASSERT_FALSE (integer_each_onep (f_0));
15623 ASSERT_FALSE (integer_each_onep (wr_f_0));
15624 ASSERT_FALSE (integer_each_onep (f_1));
15625 ASSERT_FALSE (integer_each_onep (wr_f_1));
15626 ASSERT_FALSE (integer_each_onep (f_m1));
15627 ASSERT_FALSE (integer_each_onep (wr_f_m1));
15628 ASSERT_FALSE (integer_each_onep (c_i_0));
15629 ASSERT_FALSE (integer_each_onep (c_i_1));
15630 ASSERT_FALSE (integer_each_onep (c_i_m1));
15631 ASSERT_FALSE (integer_each_onep (c_f_0));
15632 ASSERT_FALSE (integer_each_onep (c_f_1));
15633 ASSERT_FALSE (integer_each_onep (c_f_m1));
15634
15635 /* Test integer_truep. */
15636 ASSERT_FALSE (integer_truep (i_0));
15637 ASSERT_FALSE (integer_truep (wr_i_0));
15638 ASSERT_TRUE (integer_truep (i_1));
15639 ASSERT_TRUE (integer_truep (wr_i_1));
15640 ASSERT_FALSE (integer_truep (i_m1));
15641 ASSERT_FALSE (integer_truep (wr_i_m1));
15642 ASSERT_FALSE (integer_truep (f_0));
15643 ASSERT_FALSE (integer_truep (wr_f_0));
15644 ASSERT_FALSE (integer_truep (f_1));
15645 ASSERT_FALSE (integer_truep (wr_f_1));
15646 ASSERT_FALSE (integer_truep (f_m1));
15647 ASSERT_FALSE (integer_truep (wr_f_m1));
15648 ASSERT_FALSE (integer_truep (c_i_0));
15649 ASSERT_TRUE (integer_truep (c_i_1));
15650 ASSERT_FALSE (integer_truep (c_i_m1));
15651 ASSERT_FALSE (integer_truep (c_f_0));
15652 ASSERT_FALSE (integer_truep (c_f_1));
15653 ASSERT_FALSE (integer_truep (c_f_m1));
15654
15655 /* Test integer_nonzerop. */
15656 ASSERT_FALSE (integer_nonzerop (i_0));
15657 ASSERT_FALSE (integer_nonzerop (wr_i_0));
15658 ASSERT_TRUE (integer_nonzerop (i_1));
15659 ASSERT_TRUE (integer_nonzerop (wr_i_1));
15660 ASSERT_TRUE (integer_nonzerop (i_m1));
15661 ASSERT_TRUE (integer_nonzerop (wr_i_m1));
15662 ASSERT_FALSE (integer_nonzerop (f_0));
15663 ASSERT_FALSE (integer_nonzerop (wr_f_0));
15664 ASSERT_FALSE (integer_nonzerop (f_1));
15665 ASSERT_FALSE (integer_nonzerop (wr_f_1));
15666 ASSERT_FALSE (integer_nonzerop (f_m1));
15667 ASSERT_FALSE (integer_nonzerop (wr_f_m1));
15668 ASSERT_FALSE (integer_nonzerop (c_i_0));
15669 ASSERT_TRUE (integer_nonzerop (c_i_1));
15670 ASSERT_TRUE (integer_nonzerop (c_i_m1));
15671 ASSERT_FALSE (integer_nonzerop (c_f_0));
15672 ASSERT_FALSE (integer_nonzerop (c_f_1));
15673 ASSERT_FALSE (integer_nonzerop (c_f_m1));
15674
15675 /* Test real_zerop. */
15676 ASSERT_FALSE (real_zerop (i_0));
15677 ASSERT_FALSE (real_zerop (wr_i_0));
15678 ASSERT_FALSE (real_zerop (i_1));
15679 ASSERT_FALSE (real_zerop (wr_i_1));
15680 ASSERT_FALSE (real_zerop (i_m1));
15681 ASSERT_FALSE (real_zerop (wr_i_m1));
15682 ASSERT_TRUE (real_zerop (f_0));
15683 ASSERT_TRUE (real_zerop (wr_f_0));
15684 ASSERT_FALSE (real_zerop (f_1));
15685 ASSERT_FALSE (real_zerop (wr_f_1));
15686 ASSERT_FALSE (real_zerop (f_m1));
15687 ASSERT_FALSE (real_zerop (wr_f_m1));
15688 ASSERT_FALSE (real_zerop (c_i_0));
15689 ASSERT_FALSE (real_zerop (c_i_1));
15690 ASSERT_FALSE (real_zerop (c_i_m1));
15691 ASSERT_TRUE (real_zerop (c_f_0));
15692 ASSERT_FALSE (real_zerop (c_f_1));
15693 ASSERT_FALSE (real_zerop (c_f_m1));
15694
15695 /* Test real_onep. */
15696 ASSERT_FALSE (real_onep (i_0));
15697 ASSERT_FALSE (real_onep (wr_i_0));
15698 ASSERT_FALSE (real_onep (i_1));
15699 ASSERT_FALSE (real_onep (wr_i_1));
15700 ASSERT_FALSE (real_onep (i_m1));
15701 ASSERT_FALSE (real_onep (wr_i_m1));
15702 ASSERT_FALSE (real_onep (f_0));
15703 ASSERT_FALSE (real_onep (wr_f_0));
15704 ASSERT_TRUE (real_onep (f_1));
15705 ASSERT_TRUE (real_onep (wr_f_1));
15706 ASSERT_FALSE (real_onep (f_m1));
15707 ASSERT_FALSE (real_onep (wr_f_m1));
15708 ASSERT_FALSE (real_onep (c_i_0));
15709 ASSERT_FALSE (real_onep (c_i_1));
15710 ASSERT_FALSE (real_onep (c_i_m1));
15711 ASSERT_FALSE (real_onep (c_f_0));
15712 ASSERT_TRUE (real_onep (c_f_1));
15713 ASSERT_FALSE (real_onep (c_f_m1));
15714
15715 /* Test real_minus_onep. */
15716 ASSERT_FALSE (real_minus_onep (i_0));
15717 ASSERT_FALSE (real_minus_onep (wr_i_0));
15718 ASSERT_FALSE (real_minus_onep (i_1));
15719 ASSERT_FALSE (real_minus_onep (wr_i_1));
15720 ASSERT_FALSE (real_minus_onep (i_m1));
15721 ASSERT_FALSE (real_minus_onep (wr_i_m1));
15722 ASSERT_FALSE (real_minus_onep (f_0));
15723 ASSERT_FALSE (real_minus_onep (wr_f_0));
15724 ASSERT_FALSE (real_minus_onep (f_1));
15725 ASSERT_FALSE (real_minus_onep (wr_f_1));
15726 ASSERT_TRUE (real_minus_onep (f_m1));
15727 ASSERT_TRUE (real_minus_onep (wr_f_m1));
15728 ASSERT_FALSE (real_minus_onep (c_i_0));
15729 ASSERT_FALSE (real_minus_onep (c_i_1));
15730 ASSERT_FALSE (real_minus_onep (c_i_m1));
15731 ASSERT_FALSE (real_minus_onep (c_f_0));
15732 ASSERT_FALSE (real_minus_onep (c_f_1));
15733 ASSERT_TRUE (real_minus_onep (c_f_m1));
15734
15735 /* Test zerop. */
15736 ASSERT_TRUE (zerop (i_0));
15737 ASSERT_TRUE (zerop (wr_i_0));
15738 ASSERT_FALSE (zerop (i_1));
15739 ASSERT_FALSE (zerop (wr_i_1));
15740 ASSERT_FALSE (zerop (i_m1));
15741 ASSERT_FALSE (zerop (wr_i_m1));
15742 ASSERT_TRUE (zerop (f_0));
15743 ASSERT_TRUE (zerop (wr_f_0));
15744 ASSERT_FALSE (zerop (f_1));
15745 ASSERT_FALSE (zerop (wr_f_1));
15746 ASSERT_FALSE (zerop (f_m1));
15747 ASSERT_FALSE (zerop (wr_f_m1));
15748 ASSERT_TRUE (zerop (c_i_0));
15749 ASSERT_FALSE (zerop (c_i_1));
15750 ASSERT_FALSE (zerop (c_i_m1));
15751 ASSERT_TRUE (zerop (c_f_0));
15752 ASSERT_FALSE (zerop (c_f_1));
15753 ASSERT_FALSE (zerop (c_f_m1));
15754
15755 /* Test tree_expr_nonnegative_p. */
15756 ASSERT_TRUE (tree_expr_nonnegative_p (i_0));
15757 ASSERT_TRUE (tree_expr_nonnegative_p (wr_i_0));
15758 ASSERT_TRUE (tree_expr_nonnegative_p (i_1));
15759 ASSERT_TRUE (tree_expr_nonnegative_p (wr_i_1));
15760 ASSERT_FALSE (tree_expr_nonnegative_p (i_m1));
15761 ASSERT_FALSE (tree_expr_nonnegative_p (wr_i_m1));
15762 ASSERT_TRUE (tree_expr_nonnegative_p (f_0));
15763 ASSERT_TRUE (tree_expr_nonnegative_p (wr_f_0));
15764 ASSERT_TRUE (tree_expr_nonnegative_p (f_1));
15765 ASSERT_TRUE (tree_expr_nonnegative_p (wr_f_1));
15766 ASSERT_FALSE (tree_expr_nonnegative_p (f_m1));
15767 ASSERT_FALSE (tree_expr_nonnegative_p (wr_f_m1));
15768 ASSERT_FALSE (tree_expr_nonnegative_p (c_i_0));
15769 ASSERT_FALSE (tree_expr_nonnegative_p (c_i_1));
15770 ASSERT_FALSE (tree_expr_nonnegative_p (c_i_m1));
15771 ASSERT_FALSE (tree_expr_nonnegative_p (c_f_0));
15772 ASSERT_FALSE (tree_expr_nonnegative_p (c_f_1));
15773 ASSERT_FALSE (tree_expr_nonnegative_p (c_f_m1));
15774
15775 /* Test tree_expr_nonzero_p. */
15776 ASSERT_FALSE (tree_expr_nonzero_p (i_0));
15777 ASSERT_FALSE (tree_expr_nonzero_p (wr_i_0));
15778 ASSERT_TRUE (tree_expr_nonzero_p (i_1));
15779 ASSERT_TRUE (tree_expr_nonzero_p (wr_i_1));
15780 ASSERT_TRUE (tree_expr_nonzero_p (i_m1));
15781 ASSERT_TRUE (tree_expr_nonzero_p (wr_i_m1));
15782
15783 /* Test integer_valued_real_p. */
15784 ASSERT_FALSE (integer_valued_real_p (i_0));
15785 ASSERT_TRUE (integer_valued_real_p (f_0));
15786 ASSERT_TRUE (integer_valued_real_p (wr_f_0));
15787 ASSERT_TRUE (integer_valued_real_p (f_1));
15788 ASSERT_TRUE (integer_valued_real_p (wr_f_1));
15789
15790 /* Test integer_pow2p. */
15791 ASSERT_FALSE (integer_pow2p (i_0));
15792 ASSERT_TRUE (integer_pow2p (i_1));
15793 ASSERT_TRUE (integer_pow2p (wr_i_1));
15794
15795 /* Test uniform_integer_cst_p. */
15796 ASSERT_TRUE (uniform_integer_cst_p (i_0));
15797 ASSERT_TRUE (uniform_integer_cst_p (wr_i_0));
15798 ASSERT_TRUE (uniform_integer_cst_p (i_1));
15799 ASSERT_TRUE (uniform_integer_cst_p (wr_i_1));
15800 ASSERT_TRUE (uniform_integer_cst_p (i_m1));
15801 ASSERT_TRUE (uniform_integer_cst_p (wr_i_m1));
15802 ASSERT_FALSE (uniform_integer_cst_p (f_0));
15803 ASSERT_FALSE (uniform_integer_cst_p (wr_f_0));
15804 ASSERT_FALSE (uniform_integer_cst_p (f_1));
15805 ASSERT_FALSE (uniform_integer_cst_p (wr_f_1));
15806 ASSERT_FALSE (uniform_integer_cst_p (f_m1));
15807 ASSERT_FALSE (uniform_integer_cst_p (wr_f_m1));
15808 ASSERT_FALSE (uniform_integer_cst_p (c_i_0));
15809 ASSERT_FALSE (uniform_integer_cst_p (c_i_1));
15810 ASSERT_FALSE (uniform_integer_cst_p (c_i_m1));
15811 ASSERT_FALSE (uniform_integer_cst_p (c_f_0));
15812 ASSERT_FALSE (uniform_integer_cst_p (c_f_1));
15813 ASSERT_FALSE (uniform_integer_cst_p (c_f_m1));
15814 }
15815
15816 /* Check that string escaping works correctly. */
15817
15818 static void
15819 test_escaped_strings (void)
15820 {
15821 int saved_cutoff;
15822 escaped_string msg;
15823
15824 msg.escape (NULL);
15825 /* ASSERT_STREQ does not accept NULL as a valid test
15826 result, so we have to use ASSERT_EQ instead. */
15827 ASSERT_EQ (NULL, (const char *) msg);
15828
15829 msg.escape ("");
15830 ASSERT_STREQ ("", (const char *) msg);
15831
15832 msg.escape ("foobar");
15833 ASSERT_STREQ ("foobar", (const char *) msg);
15834
15835 /* Ensure that we have -fmessage-length set to 0. */
15836 saved_cutoff = pp_line_cutoff (global_dc->printer);
15837 pp_line_cutoff (global_dc->printer) = 0;
15838
15839 msg.escape ("foo\nbar");
15840 ASSERT_STREQ ("foo\\nbar", (const char *) msg);
15841
15842 msg.escape ("\a\b\f\n\r\t\v");
15843 ASSERT_STREQ ("\\a\\b\\f\\n\\r\\t\\v", (const char *) msg);
15844
15845 /* Now repeat the tests with -fmessage-length set to 5. */
15846 pp_line_cutoff (global_dc->printer) = 5;
15847
15848 /* Note that the newline is not translated into an escape. */
15849 msg.escape ("foo\nbar");
15850 ASSERT_STREQ ("foo\nbar", (const char *) msg);
15851
15852 msg.escape ("\a\b\f\n\r\t\v");
15853 ASSERT_STREQ ("\\a\\b\\f\n\\r\\t\\v", (const char *) msg);
15854
15855 /* Restore the original message length setting. */
15856 pp_line_cutoff (global_dc->printer) = saved_cutoff;
15857 }
15858
15859 /* Run all of the selftests within this file. */
15860
15861 void
15862 tree_c_tests ()
15863 {
15864 test_integer_constants ();
15865 test_identifiers ();
15866 test_labels ();
15867 test_vector_cst_patterns ();
15868 test_location_wrappers ();
15869 test_predicates ();
15870 test_escaped_strings ();
15871 }
15872
15873 } // namespace selftest
15874
15875 #endif /* CHECKING_P */
15876
15877 #include "gt-tree.h"