]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree.c
tree-core.h: Include symtab.h.
[thirdparty/gcc.git] / gcc / tree.c
1 /* Language-independent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* This file contains the low level primitives for operating on tree nodes,
21 including allocation, list operations, interning of identifiers,
22 construction of data type nodes and statement nodes,
23 and construction of type conversion nodes. It also contains
24 tables index by tree code that describe how to take apart
25 nodes of that code.
26
27 It is intended to be language-independent but can occasionally
28 calls language-dependent routines. */
29
30 #include "config.h"
31 #include "system.h"
32 #include "coretypes.h"
33 #include "backend.h"
34 #include "tree.h"
35 #include "gimple.h"
36 #include "rtl.h"
37 #include "ssa.h"
38 #include "flags.h"
39 #include "alias.h"
40 #include "fold-const.h"
41 #include "stor-layout.h"
42 #include "calls.h"
43 #include "attribs.h"
44 #include "varasm.h"
45 #include "tm_p.h"
46 #include "obstack.h"
47 #include "toplev.h" /* get_random_seed */
48 #include "filenames.h"
49 #include "output.h"
50 #include "target.h"
51 #include "common/common-target.h"
52 #include "langhooks.h"
53 #include "tree-inline.h"
54 #include "tree-iterator.h"
55 #include "internal-fn.h"
56 #include "gimple-iterator.h"
57 #include "gimplify.h"
58 #include "cgraph.h"
59 #include "insn-config.h"
60 #include "expmed.h"
61 #include "dojump.h"
62 #include "explow.h"
63 #include "emit-rtl.h"
64 #include "stmt.h"
65 #include "expr.h"
66 #include "tree-dfa.h"
67 #include "params.h"
68 #include "tree-pass.h"
69 #include "langhooks-def.h"
70 #include "diagnostic.h"
71 #include "tree-diagnostic.h"
72 #include "tree-pretty-print.h"
73 #include "except.h"
74 #include "debug.h"
75 #include "intl.h"
76 #include "builtins.h"
77 #include "print-tree.h"
78 #include "ipa-utils.h"
79
80 /* Tree code classes. */
81
82 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) TYPE,
83 #define END_OF_BASE_TREE_CODES tcc_exceptional,
84
85 const enum tree_code_class tree_code_type[] = {
86 #include "all-tree.def"
87 };
88
89 #undef DEFTREECODE
90 #undef END_OF_BASE_TREE_CODES
91
92 /* Table indexed by tree code giving number of expression
93 operands beyond the fixed part of the node structure.
94 Not used for types or decls. */
95
96 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) LENGTH,
97 #define END_OF_BASE_TREE_CODES 0,
98
99 const unsigned char tree_code_length[] = {
100 #include "all-tree.def"
101 };
102
103 #undef DEFTREECODE
104 #undef END_OF_BASE_TREE_CODES
105
106 /* Names of tree components.
107 Used for printing out the tree and error messages. */
108 #define DEFTREECODE(SYM, NAME, TYPE, LEN) NAME,
109 #define END_OF_BASE_TREE_CODES "@dummy",
110
111 static const char *const tree_code_name[] = {
112 #include "all-tree.def"
113 };
114
115 #undef DEFTREECODE
116 #undef END_OF_BASE_TREE_CODES
117
118 /* Each tree code class has an associated string representation.
119 These must correspond to the tree_code_class entries. */
120
121 const char *const tree_code_class_strings[] =
122 {
123 "exceptional",
124 "constant",
125 "type",
126 "declaration",
127 "reference",
128 "comparison",
129 "unary",
130 "binary",
131 "statement",
132 "vl_exp",
133 "expression"
134 };
135
136 /* obstack.[ch] explicitly declined to prototype this. */
137 extern int _obstack_allocated_p (struct obstack *h, void *obj);
138
139 /* Statistics-gathering stuff. */
140
141 static int tree_code_counts[MAX_TREE_CODES];
142 int tree_node_counts[(int) all_kinds];
143 int tree_node_sizes[(int) all_kinds];
144
145 /* Keep in sync with tree.h:enum tree_node_kind. */
146 static const char * const tree_node_kind_names[] = {
147 "decls",
148 "types",
149 "blocks",
150 "stmts",
151 "refs",
152 "exprs",
153 "constants",
154 "identifiers",
155 "vecs",
156 "binfos",
157 "ssa names",
158 "constructors",
159 "random kinds",
160 "lang_decl kinds",
161 "lang_type kinds",
162 "omp clauses",
163 };
164
165 /* Unique id for next decl created. */
166 static GTY(()) int next_decl_uid;
167 /* Unique id for next type created. */
168 static GTY(()) int next_type_uid = 1;
169 /* Unique id for next debug decl created. Use negative numbers,
170 to catch erroneous uses. */
171 static GTY(()) int next_debug_decl_uid;
172
173 /* Since we cannot rehash a type after it is in the table, we have to
174 keep the hash code. */
175
176 struct GTY((for_user)) type_hash {
177 unsigned long hash;
178 tree type;
179 };
180
181 /* Initial size of the hash table (rounded to next prime). */
182 #define TYPE_HASH_INITIAL_SIZE 1000
183
184 struct type_cache_hasher : ggc_cache_ptr_hash<type_hash>
185 {
186 static hashval_t hash (type_hash *t) { return t->hash; }
187 static bool equal (type_hash *a, type_hash *b);
188
189 static int
190 keep_cache_entry (type_hash *&t)
191 {
192 return ggc_marked_p (t->type);
193 }
194 };
195
196 /* Now here is the hash table. When recording a type, it is added to
197 the slot whose index is the hash code. Note that the hash table is
198 used for several kinds of types (function types, array types and
199 array index range types, for now). While all these live in the
200 same table, they are completely independent, and the hash code is
201 computed differently for each of these. */
202
203 static GTY ((cache)) hash_table<type_cache_hasher> *type_hash_table;
204
205 /* Hash table and temporary node for larger integer const values. */
206 static GTY (()) tree int_cst_node;
207
208 struct int_cst_hasher : ggc_cache_ptr_hash<tree_node>
209 {
210 static hashval_t hash (tree t);
211 static bool equal (tree x, tree y);
212 };
213
214 static GTY ((cache)) hash_table<int_cst_hasher> *int_cst_hash_table;
215
216 /* Hash table for optimization flags and target option flags. Use the same
217 hash table for both sets of options. Nodes for building the current
218 optimization and target option nodes. The assumption is most of the time
219 the options created will already be in the hash table, so we avoid
220 allocating and freeing up a node repeatably. */
221 static GTY (()) tree cl_optimization_node;
222 static GTY (()) tree cl_target_option_node;
223
224 struct cl_option_hasher : ggc_cache_ptr_hash<tree_node>
225 {
226 static hashval_t hash (tree t);
227 static bool equal (tree x, tree y);
228 };
229
230 static GTY ((cache)) hash_table<cl_option_hasher> *cl_option_hash_table;
231
232 /* General tree->tree mapping structure for use in hash tables. */
233
234
235 static GTY ((cache))
236 hash_table<tree_decl_map_cache_hasher> *debug_expr_for_decl;
237
238 static GTY ((cache))
239 hash_table<tree_decl_map_cache_hasher> *value_expr_for_decl;
240
241 struct tree_vec_map_cache_hasher : ggc_cache_ptr_hash<tree_vec_map>
242 {
243 static hashval_t hash (tree_vec_map *m) { return DECL_UID (m->base.from); }
244
245 static bool
246 equal (tree_vec_map *a, tree_vec_map *b)
247 {
248 return a->base.from == b->base.from;
249 }
250
251 static int
252 keep_cache_entry (tree_vec_map *&m)
253 {
254 return ggc_marked_p (m->base.from);
255 }
256 };
257
258 static GTY ((cache))
259 hash_table<tree_vec_map_cache_hasher> *debug_args_for_decl;
260
261 static void set_type_quals (tree, int);
262 static void print_type_hash_statistics (void);
263 static void print_debug_expr_statistics (void);
264 static void print_value_expr_statistics (void);
265 static void type_hash_list (const_tree, inchash::hash &);
266 static void attribute_hash_list (const_tree, inchash::hash &);
267
268 tree global_trees[TI_MAX];
269 tree integer_types[itk_none];
270
271 bool int_n_enabled_p[NUM_INT_N_ENTS];
272 struct int_n_trees_t int_n_trees [NUM_INT_N_ENTS];
273
274 unsigned char tree_contains_struct[MAX_TREE_CODES][64];
275
276 /* Number of operands for each OpenMP clause. */
277 unsigned const char omp_clause_num_ops[] =
278 {
279 0, /* OMP_CLAUSE_ERROR */
280 1, /* OMP_CLAUSE_PRIVATE */
281 1, /* OMP_CLAUSE_SHARED */
282 1, /* OMP_CLAUSE_FIRSTPRIVATE */
283 2, /* OMP_CLAUSE_LASTPRIVATE */
284 4, /* OMP_CLAUSE_REDUCTION */
285 1, /* OMP_CLAUSE_COPYIN */
286 1, /* OMP_CLAUSE_COPYPRIVATE */
287 3, /* OMP_CLAUSE_LINEAR */
288 2, /* OMP_CLAUSE_ALIGNED */
289 1, /* OMP_CLAUSE_DEPEND */
290 1, /* OMP_CLAUSE_UNIFORM */
291 2, /* OMP_CLAUSE_FROM */
292 2, /* OMP_CLAUSE_TO */
293 2, /* OMP_CLAUSE_MAP */
294 2, /* OMP_CLAUSE__CACHE_ */
295 1, /* OMP_CLAUSE_DEVICE_RESIDENT */
296 1, /* OMP_CLAUSE_USE_DEVICE */
297 2, /* OMP_CLAUSE_GANG */
298 1, /* OMP_CLAUSE_ASYNC */
299 1, /* OMP_CLAUSE_WAIT */
300 0, /* OMP_CLAUSE_AUTO */
301 0, /* OMP_CLAUSE_SEQ */
302 1, /* OMP_CLAUSE__LOOPTEMP_ */
303 1, /* OMP_CLAUSE_IF */
304 1, /* OMP_CLAUSE_NUM_THREADS */
305 1, /* OMP_CLAUSE_SCHEDULE */
306 0, /* OMP_CLAUSE_NOWAIT */
307 0, /* OMP_CLAUSE_ORDERED */
308 0, /* OMP_CLAUSE_DEFAULT */
309 3, /* OMP_CLAUSE_COLLAPSE */
310 0, /* OMP_CLAUSE_UNTIED */
311 1, /* OMP_CLAUSE_FINAL */
312 0, /* OMP_CLAUSE_MERGEABLE */
313 1, /* OMP_CLAUSE_DEVICE */
314 1, /* OMP_CLAUSE_DIST_SCHEDULE */
315 0, /* OMP_CLAUSE_INBRANCH */
316 0, /* OMP_CLAUSE_NOTINBRANCH */
317 1, /* OMP_CLAUSE_NUM_TEAMS */
318 1, /* OMP_CLAUSE_THREAD_LIMIT */
319 0, /* OMP_CLAUSE_PROC_BIND */
320 1, /* OMP_CLAUSE_SAFELEN */
321 1, /* OMP_CLAUSE_SIMDLEN */
322 0, /* OMP_CLAUSE_FOR */
323 0, /* OMP_CLAUSE_PARALLEL */
324 0, /* OMP_CLAUSE_SECTIONS */
325 0, /* OMP_CLAUSE_TASKGROUP */
326 1, /* OMP_CLAUSE__SIMDUID_ */
327 1, /* OMP_CLAUSE__CILK_FOR_COUNT_ */
328 0, /* OMP_CLAUSE_INDEPENDENT */
329 1, /* OMP_CLAUSE_WORKER */
330 1, /* OMP_CLAUSE_VECTOR */
331 1, /* OMP_CLAUSE_NUM_GANGS */
332 1, /* OMP_CLAUSE_NUM_WORKERS */
333 1, /* OMP_CLAUSE_VECTOR_LENGTH */
334 };
335
336 const char * const omp_clause_code_name[] =
337 {
338 "error_clause",
339 "private",
340 "shared",
341 "firstprivate",
342 "lastprivate",
343 "reduction",
344 "copyin",
345 "copyprivate",
346 "linear",
347 "aligned",
348 "depend",
349 "uniform",
350 "from",
351 "to",
352 "map",
353 "_cache_",
354 "device_resident",
355 "use_device",
356 "gang",
357 "async",
358 "wait",
359 "auto",
360 "seq",
361 "_looptemp_",
362 "if",
363 "num_threads",
364 "schedule",
365 "nowait",
366 "ordered",
367 "default",
368 "collapse",
369 "untied",
370 "final",
371 "mergeable",
372 "device",
373 "dist_schedule",
374 "inbranch",
375 "notinbranch",
376 "num_teams",
377 "thread_limit",
378 "proc_bind",
379 "safelen",
380 "simdlen",
381 "for",
382 "parallel",
383 "sections",
384 "taskgroup",
385 "_simduid_",
386 "_Cilk_for_count_",
387 "independent",
388 "worker",
389 "vector",
390 "num_gangs",
391 "num_workers",
392 "vector_length"
393 };
394
395
396 /* Return the tree node structure used by tree code CODE. */
397
398 static inline enum tree_node_structure_enum
399 tree_node_structure_for_code (enum tree_code code)
400 {
401 switch (TREE_CODE_CLASS (code))
402 {
403 case tcc_declaration:
404 {
405 switch (code)
406 {
407 case FIELD_DECL:
408 return TS_FIELD_DECL;
409 case PARM_DECL:
410 return TS_PARM_DECL;
411 case VAR_DECL:
412 return TS_VAR_DECL;
413 case LABEL_DECL:
414 return TS_LABEL_DECL;
415 case RESULT_DECL:
416 return TS_RESULT_DECL;
417 case DEBUG_EXPR_DECL:
418 return TS_DECL_WRTL;
419 case CONST_DECL:
420 return TS_CONST_DECL;
421 case TYPE_DECL:
422 return TS_TYPE_DECL;
423 case FUNCTION_DECL:
424 return TS_FUNCTION_DECL;
425 case TRANSLATION_UNIT_DECL:
426 return TS_TRANSLATION_UNIT_DECL;
427 default:
428 return TS_DECL_NON_COMMON;
429 }
430 }
431 case tcc_type:
432 return TS_TYPE_NON_COMMON;
433 case tcc_reference:
434 case tcc_comparison:
435 case tcc_unary:
436 case tcc_binary:
437 case tcc_expression:
438 case tcc_statement:
439 case tcc_vl_exp:
440 return TS_EXP;
441 default: /* tcc_constant and tcc_exceptional */
442 break;
443 }
444 switch (code)
445 {
446 /* tcc_constant cases. */
447 case VOID_CST: return TS_TYPED;
448 case INTEGER_CST: return TS_INT_CST;
449 case REAL_CST: return TS_REAL_CST;
450 case FIXED_CST: return TS_FIXED_CST;
451 case COMPLEX_CST: return TS_COMPLEX;
452 case VECTOR_CST: return TS_VECTOR;
453 case STRING_CST: return TS_STRING;
454 /* tcc_exceptional cases. */
455 case ERROR_MARK: return TS_COMMON;
456 case IDENTIFIER_NODE: return TS_IDENTIFIER;
457 case TREE_LIST: return TS_LIST;
458 case TREE_VEC: return TS_VEC;
459 case SSA_NAME: return TS_SSA_NAME;
460 case PLACEHOLDER_EXPR: return TS_COMMON;
461 case STATEMENT_LIST: return TS_STATEMENT_LIST;
462 case BLOCK: return TS_BLOCK;
463 case CONSTRUCTOR: return TS_CONSTRUCTOR;
464 case TREE_BINFO: return TS_BINFO;
465 case OMP_CLAUSE: return TS_OMP_CLAUSE;
466 case OPTIMIZATION_NODE: return TS_OPTIMIZATION;
467 case TARGET_OPTION_NODE: return TS_TARGET_OPTION;
468
469 default:
470 gcc_unreachable ();
471 }
472 }
473
474
475 /* Initialize tree_contains_struct to describe the hierarchy of tree
476 nodes. */
477
478 static void
479 initialize_tree_contains_struct (void)
480 {
481 unsigned i;
482
483 for (i = ERROR_MARK; i < LAST_AND_UNUSED_TREE_CODE; i++)
484 {
485 enum tree_code code;
486 enum tree_node_structure_enum ts_code;
487
488 code = (enum tree_code) i;
489 ts_code = tree_node_structure_for_code (code);
490
491 /* Mark the TS structure itself. */
492 tree_contains_struct[code][ts_code] = 1;
493
494 /* Mark all the structures that TS is derived from. */
495 switch (ts_code)
496 {
497 case TS_TYPED:
498 case TS_BLOCK:
499 MARK_TS_BASE (code);
500 break;
501
502 case TS_COMMON:
503 case TS_INT_CST:
504 case TS_REAL_CST:
505 case TS_FIXED_CST:
506 case TS_VECTOR:
507 case TS_STRING:
508 case TS_COMPLEX:
509 case TS_SSA_NAME:
510 case TS_CONSTRUCTOR:
511 case TS_EXP:
512 case TS_STATEMENT_LIST:
513 MARK_TS_TYPED (code);
514 break;
515
516 case TS_IDENTIFIER:
517 case TS_DECL_MINIMAL:
518 case TS_TYPE_COMMON:
519 case TS_LIST:
520 case TS_VEC:
521 case TS_BINFO:
522 case TS_OMP_CLAUSE:
523 case TS_OPTIMIZATION:
524 case TS_TARGET_OPTION:
525 MARK_TS_COMMON (code);
526 break;
527
528 case TS_TYPE_WITH_LANG_SPECIFIC:
529 MARK_TS_TYPE_COMMON (code);
530 break;
531
532 case TS_TYPE_NON_COMMON:
533 MARK_TS_TYPE_WITH_LANG_SPECIFIC (code);
534 break;
535
536 case TS_DECL_COMMON:
537 MARK_TS_DECL_MINIMAL (code);
538 break;
539
540 case TS_DECL_WRTL:
541 case TS_CONST_DECL:
542 MARK_TS_DECL_COMMON (code);
543 break;
544
545 case TS_DECL_NON_COMMON:
546 MARK_TS_DECL_WITH_VIS (code);
547 break;
548
549 case TS_DECL_WITH_VIS:
550 case TS_PARM_DECL:
551 case TS_LABEL_DECL:
552 case TS_RESULT_DECL:
553 MARK_TS_DECL_WRTL (code);
554 break;
555
556 case TS_FIELD_DECL:
557 MARK_TS_DECL_COMMON (code);
558 break;
559
560 case TS_VAR_DECL:
561 MARK_TS_DECL_WITH_VIS (code);
562 break;
563
564 case TS_TYPE_DECL:
565 case TS_FUNCTION_DECL:
566 MARK_TS_DECL_NON_COMMON (code);
567 break;
568
569 case TS_TRANSLATION_UNIT_DECL:
570 MARK_TS_DECL_COMMON (code);
571 break;
572
573 default:
574 gcc_unreachable ();
575 }
576 }
577
578 /* Basic consistency checks for attributes used in fold. */
579 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_NON_COMMON]);
580 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_NON_COMMON]);
581 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_COMMON]);
582 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_COMMON]);
583 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_COMMON]);
584 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_COMMON]);
585 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_COMMON]);
586 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_COMMON]);
587 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_COMMON]);
588 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_COMMON]);
589 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_COMMON]);
590 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WRTL]);
591 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_WRTL]);
592 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_WRTL]);
593 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WRTL]);
594 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_WRTL]);
595 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_MINIMAL]);
596 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_MINIMAL]);
597 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_MINIMAL]);
598 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_MINIMAL]);
599 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_MINIMAL]);
600 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_MINIMAL]);
601 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_MINIMAL]);
602 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_MINIMAL]);
603 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_MINIMAL]);
604 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WITH_VIS]);
605 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WITH_VIS]);
606 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_WITH_VIS]);
607 gcc_assert (tree_contains_struct[VAR_DECL][TS_VAR_DECL]);
608 gcc_assert (tree_contains_struct[FIELD_DECL][TS_FIELD_DECL]);
609 gcc_assert (tree_contains_struct[PARM_DECL][TS_PARM_DECL]);
610 gcc_assert (tree_contains_struct[LABEL_DECL][TS_LABEL_DECL]);
611 gcc_assert (tree_contains_struct[RESULT_DECL][TS_RESULT_DECL]);
612 gcc_assert (tree_contains_struct[CONST_DECL][TS_CONST_DECL]);
613 gcc_assert (tree_contains_struct[TYPE_DECL][TS_TYPE_DECL]);
614 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_FUNCTION_DECL]);
615 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_MINIMAL]);
616 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_COMMON]);
617 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_MINIMAL]);
618 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_COMMON]);
619 }
620
621
622 /* Init tree.c. */
623
624 void
625 init_ttree (void)
626 {
627 /* Initialize the hash table of types. */
628 type_hash_table
629 = hash_table<type_cache_hasher>::create_ggc (TYPE_HASH_INITIAL_SIZE);
630
631 debug_expr_for_decl
632 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
633
634 value_expr_for_decl
635 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
636
637 int_cst_hash_table = hash_table<int_cst_hasher>::create_ggc (1024);
638
639 int_cst_node = make_int_cst (1, 1);
640
641 cl_option_hash_table = hash_table<cl_option_hasher>::create_ggc (64);
642
643 cl_optimization_node = make_node (OPTIMIZATION_NODE);
644 cl_target_option_node = make_node (TARGET_OPTION_NODE);
645
646 /* Initialize the tree_contains_struct array. */
647 initialize_tree_contains_struct ();
648 lang_hooks.init_ts ();
649 }
650
651 \f
652 /* The name of the object as the assembler will see it (but before any
653 translations made by ASM_OUTPUT_LABELREF). Often this is the same
654 as DECL_NAME. It is an IDENTIFIER_NODE. */
655 tree
656 decl_assembler_name (tree decl)
657 {
658 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
659 lang_hooks.set_decl_assembler_name (decl);
660 return DECL_WITH_VIS_CHECK (decl)->decl_with_vis.assembler_name;
661 }
662
663 /* When the target supports COMDAT groups, this indicates which group the
664 DECL is associated with. This can be either an IDENTIFIER_NODE or a
665 decl, in which case its DECL_ASSEMBLER_NAME identifies the group. */
666 tree
667 decl_comdat_group (const_tree node)
668 {
669 struct symtab_node *snode = symtab_node::get (node);
670 if (!snode)
671 return NULL;
672 return snode->get_comdat_group ();
673 }
674
675 /* Likewise, but make sure it's been reduced to an IDENTIFIER_NODE. */
676 tree
677 decl_comdat_group_id (const_tree node)
678 {
679 struct symtab_node *snode = symtab_node::get (node);
680 if (!snode)
681 return NULL;
682 return snode->get_comdat_group_id ();
683 }
684
685 /* When the target supports named section, return its name as IDENTIFIER_NODE
686 or NULL if it is in no section. */
687 const char *
688 decl_section_name (const_tree node)
689 {
690 struct symtab_node *snode = symtab_node::get (node);
691 if (!snode)
692 return NULL;
693 return snode->get_section ();
694 }
695
696 /* Set section section name of NODE to VALUE (that is expected to
697 be identifier node) */
698 void
699 set_decl_section_name (tree node, const char *value)
700 {
701 struct symtab_node *snode;
702
703 if (value == NULL)
704 {
705 snode = symtab_node::get (node);
706 if (!snode)
707 return;
708 }
709 else if (TREE_CODE (node) == VAR_DECL)
710 snode = varpool_node::get_create (node);
711 else
712 snode = cgraph_node::get_create (node);
713 snode->set_section (value);
714 }
715
716 /* Return TLS model of a variable NODE. */
717 enum tls_model
718 decl_tls_model (const_tree node)
719 {
720 struct varpool_node *snode = varpool_node::get (node);
721 if (!snode)
722 return TLS_MODEL_NONE;
723 return snode->tls_model;
724 }
725
726 /* Set TLS model of variable NODE to MODEL. */
727 void
728 set_decl_tls_model (tree node, enum tls_model model)
729 {
730 struct varpool_node *vnode;
731
732 if (model == TLS_MODEL_NONE)
733 {
734 vnode = varpool_node::get (node);
735 if (!vnode)
736 return;
737 }
738 else
739 vnode = varpool_node::get_create (node);
740 vnode->tls_model = model;
741 }
742
743 /* Compute the number of bytes occupied by a tree with code CODE.
744 This function cannot be used for nodes that have variable sizes,
745 including TREE_VEC, INTEGER_CST, STRING_CST, and CALL_EXPR. */
746 size_t
747 tree_code_size (enum tree_code code)
748 {
749 switch (TREE_CODE_CLASS (code))
750 {
751 case tcc_declaration: /* A decl node */
752 {
753 switch (code)
754 {
755 case FIELD_DECL:
756 return sizeof (struct tree_field_decl);
757 case PARM_DECL:
758 return sizeof (struct tree_parm_decl);
759 case VAR_DECL:
760 return sizeof (struct tree_var_decl);
761 case LABEL_DECL:
762 return sizeof (struct tree_label_decl);
763 case RESULT_DECL:
764 return sizeof (struct tree_result_decl);
765 case CONST_DECL:
766 return sizeof (struct tree_const_decl);
767 case TYPE_DECL:
768 return sizeof (struct tree_type_decl);
769 case FUNCTION_DECL:
770 return sizeof (struct tree_function_decl);
771 case DEBUG_EXPR_DECL:
772 return sizeof (struct tree_decl_with_rtl);
773 case TRANSLATION_UNIT_DECL:
774 return sizeof (struct tree_translation_unit_decl);
775 case NAMESPACE_DECL:
776 case IMPORTED_DECL:
777 case NAMELIST_DECL:
778 return sizeof (struct tree_decl_non_common);
779 default:
780 return lang_hooks.tree_size (code);
781 }
782 }
783
784 case tcc_type: /* a type node */
785 return sizeof (struct tree_type_non_common);
786
787 case tcc_reference: /* a reference */
788 case tcc_expression: /* an expression */
789 case tcc_statement: /* an expression with side effects */
790 case tcc_comparison: /* a comparison expression */
791 case tcc_unary: /* a unary arithmetic expression */
792 case tcc_binary: /* a binary arithmetic expression */
793 return (sizeof (struct tree_exp)
794 + (TREE_CODE_LENGTH (code) - 1) * sizeof (tree));
795
796 case tcc_constant: /* a constant */
797 switch (code)
798 {
799 case VOID_CST: return sizeof (struct tree_typed);
800 case INTEGER_CST: gcc_unreachable ();
801 case REAL_CST: return sizeof (struct tree_real_cst);
802 case FIXED_CST: return sizeof (struct tree_fixed_cst);
803 case COMPLEX_CST: return sizeof (struct tree_complex);
804 case VECTOR_CST: return sizeof (struct tree_vector);
805 case STRING_CST: gcc_unreachable ();
806 default:
807 return lang_hooks.tree_size (code);
808 }
809
810 case tcc_exceptional: /* something random, like an identifier. */
811 switch (code)
812 {
813 case IDENTIFIER_NODE: return lang_hooks.identifier_size;
814 case TREE_LIST: return sizeof (struct tree_list);
815
816 case ERROR_MARK:
817 case PLACEHOLDER_EXPR: return sizeof (struct tree_common);
818
819 case TREE_VEC:
820 case OMP_CLAUSE: gcc_unreachable ();
821
822 case SSA_NAME: return sizeof (struct tree_ssa_name);
823
824 case STATEMENT_LIST: return sizeof (struct tree_statement_list);
825 case BLOCK: return sizeof (struct tree_block);
826 case CONSTRUCTOR: return sizeof (struct tree_constructor);
827 case OPTIMIZATION_NODE: return sizeof (struct tree_optimization_option);
828 case TARGET_OPTION_NODE: return sizeof (struct tree_target_option);
829
830 default:
831 return lang_hooks.tree_size (code);
832 }
833
834 default:
835 gcc_unreachable ();
836 }
837 }
838
839 /* Compute the number of bytes occupied by NODE. This routine only
840 looks at TREE_CODE, except for those nodes that have variable sizes. */
841 size_t
842 tree_size (const_tree node)
843 {
844 const enum tree_code code = TREE_CODE (node);
845 switch (code)
846 {
847 case INTEGER_CST:
848 return (sizeof (struct tree_int_cst)
849 + (TREE_INT_CST_EXT_NUNITS (node) - 1) * sizeof (HOST_WIDE_INT));
850
851 case TREE_BINFO:
852 return (offsetof (struct tree_binfo, base_binfos)
853 + vec<tree, va_gc>
854 ::embedded_size (BINFO_N_BASE_BINFOS (node)));
855
856 case TREE_VEC:
857 return (sizeof (struct tree_vec)
858 + (TREE_VEC_LENGTH (node) - 1) * sizeof (tree));
859
860 case VECTOR_CST:
861 return (sizeof (struct tree_vector)
862 + (TYPE_VECTOR_SUBPARTS (TREE_TYPE (node)) - 1) * sizeof (tree));
863
864 case STRING_CST:
865 return TREE_STRING_LENGTH (node) + offsetof (struct tree_string, str) + 1;
866
867 case OMP_CLAUSE:
868 return (sizeof (struct tree_omp_clause)
869 + (omp_clause_num_ops[OMP_CLAUSE_CODE (node)] - 1)
870 * sizeof (tree));
871
872 default:
873 if (TREE_CODE_CLASS (code) == tcc_vl_exp)
874 return (sizeof (struct tree_exp)
875 + (VL_EXP_OPERAND_LENGTH (node) - 1) * sizeof (tree));
876 else
877 return tree_code_size (code);
878 }
879 }
880
881 /* Record interesting allocation statistics for a tree node with CODE
882 and LENGTH. */
883
884 static void
885 record_node_allocation_statistics (enum tree_code code ATTRIBUTE_UNUSED,
886 size_t length ATTRIBUTE_UNUSED)
887 {
888 enum tree_code_class type = TREE_CODE_CLASS (code);
889 tree_node_kind kind;
890
891 if (!GATHER_STATISTICS)
892 return;
893
894 switch (type)
895 {
896 case tcc_declaration: /* A decl node */
897 kind = d_kind;
898 break;
899
900 case tcc_type: /* a type node */
901 kind = t_kind;
902 break;
903
904 case tcc_statement: /* an expression with side effects */
905 kind = s_kind;
906 break;
907
908 case tcc_reference: /* a reference */
909 kind = r_kind;
910 break;
911
912 case tcc_expression: /* an expression */
913 case tcc_comparison: /* a comparison expression */
914 case tcc_unary: /* a unary arithmetic expression */
915 case tcc_binary: /* a binary arithmetic expression */
916 kind = e_kind;
917 break;
918
919 case tcc_constant: /* a constant */
920 kind = c_kind;
921 break;
922
923 case tcc_exceptional: /* something random, like an identifier. */
924 switch (code)
925 {
926 case IDENTIFIER_NODE:
927 kind = id_kind;
928 break;
929
930 case TREE_VEC:
931 kind = vec_kind;
932 break;
933
934 case TREE_BINFO:
935 kind = binfo_kind;
936 break;
937
938 case SSA_NAME:
939 kind = ssa_name_kind;
940 break;
941
942 case BLOCK:
943 kind = b_kind;
944 break;
945
946 case CONSTRUCTOR:
947 kind = constr_kind;
948 break;
949
950 case OMP_CLAUSE:
951 kind = omp_clause_kind;
952 break;
953
954 default:
955 kind = x_kind;
956 break;
957 }
958 break;
959
960 case tcc_vl_exp:
961 kind = e_kind;
962 break;
963
964 default:
965 gcc_unreachable ();
966 }
967
968 tree_code_counts[(int) code]++;
969 tree_node_counts[(int) kind]++;
970 tree_node_sizes[(int) kind] += length;
971 }
972
973 /* Allocate and return a new UID from the DECL_UID namespace. */
974
975 int
976 allocate_decl_uid (void)
977 {
978 return next_decl_uid++;
979 }
980
981 /* Return a newly allocated node of code CODE. For decl and type
982 nodes, some other fields are initialized. The rest of the node is
983 initialized to zero. This function cannot be used for TREE_VEC,
984 INTEGER_CST or OMP_CLAUSE nodes, which is enforced by asserts in
985 tree_code_size.
986
987 Achoo! I got a code in the node. */
988
989 tree
990 make_node_stat (enum tree_code code MEM_STAT_DECL)
991 {
992 tree t;
993 enum tree_code_class type = TREE_CODE_CLASS (code);
994 size_t length = tree_code_size (code);
995
996 record_node_allocation_statistics (code, length);
997
998 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
999 TREE_SET_CODE (t, code);
1000
1001 switch (type)
1002 {
1003 case tcc_statement:
1004 TREE_SIDE_EFFECTS (t) = 1;
1005 break;
1006
1007 case tcc_declaration:
1008 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1009 {
1010 if (code == FUNCTION_DECL)
1011 {
1012 DECL_ALIGN (t) = FUNCTION_BOUNDARY;
1013 DECL_MODE (t) = FUNCTION_MODE;
1014 }
1015 else
1016 DECL_ALIGN (t) = 1;
1017 }
1018 DECL_SOURCE_LOCATION (t) = input_location;
1019 if (TREE_CODE (t) == DEBUG_EXPR_DECL)
1020 DECL_UID (t) = --next_debug_decl_uid;
1021 else
1022 {
1023 DECL_UID (t) = allocate_decl_uid ();
1024 SET_DECL_PT_UID (t, -1);
1025 }
1026 if (TREE_CODE (t) == LABEL_DECL)
1027 LABEL_DECL_UID (t) = -1;
1028
1029 break;
1030
1031 case tcc_type:
1032 TYPE_UID (t) = next_type_uid++;
1033 TYPE_ALIGN (t) = BITS_PER_UNIT;
1034 TYPE_USER_ALIGN (t) = 0;
1035 TYPE_MAIN_VARIANT (t) = t;
1036 TYPE_CANONICAL (t) = t;
1037
1038 /* Default to no attributes for type, but let target change that. */
1039 TYPE_ATTRIBUTES (t) = NULL_TREE;
1040 targetm.set_default_type_attributes (t);
1041
1042 /* We have not yet computed the alias set for this type. */
1043 TYPE_ALIAS_SET (t) = -1;
1044 break;
1045
1046 case tcc_constant:
1047 TREE_CONSTANT (t) = 1;
1048 break;
1049
1050 case tcc_expression:
1051 switch (code)
1052 {
1053 case INIT_EXPR:
1054 case MODIFY_EXPR:
1055 case VA_ARG_EXPR:
1056 case PREDECREMENT_EXPR:
1057 case PREINCREMENT_EXPR:
1058 case POSTDECREMENT_EXPR:
1059 case POSTINCREMENT_EXPR:
1060 /* All of these have side-effects, no matter what their
1061 operands are. */
1062 TREE_SIDE_EFFECTS (t) = 1;
1063 break;
1064
1065 default:
1066 break;
1067 }
1068 break;
1069
1070 case tcc_exceptional:
1071 switch (code)
1072 {
1073 case TARGET_OPTION_NODE:
1074 TREE_TARGET_OPTION(t)
1075 = ggc_cleared_alloc<struct cl_target_option> ();
1076 break;
1077
1078 case OPTIMIZATION_NODE:
1079 TREE_OPTIMIZATION (t)
1080 = ggc_cleared_alloc<struct cl_optimization> ();
1081 break;
1082
1083 default:
1084 break;
1085 }
1086 break;
1087
1088 default:
1089 /* Other classes need no special treatment. */
1090 break;
1091 }
1092
1093 return t;
1094 }
1095 \f
1096 /* Return a new node with the same contents as NODE except that its
1097 TREE_CHAIN, if it has one, is zero and it has a fresh uid. */
1098
1099 tree
1100 copy_node_stat (tree node MEM_STAT_DECL)
1101 {
1102 tree t;
1103 enum tree_code code = TREE_CODE (node);
1104 size_t length;
1105
1106 gcc_assert (code != STATEMENT_LIST);
1107
1108 length = tree_size (node);
1109 record_node_allocation_statistics (code, length);
1110 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
1111 memcpy (t, node, length);
1112
1113 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
1114 TREE_CHAIN (t) = 0;
1115 TREE_ASM_WRITTEN (t) = 0;
1116 TREE_VISITED (t) = 0;
1117
1118 if (TREE_CODE_CLASS (code) == tcc_declaration)
1119 {
1120 if (code == DEBUG_EXPR_DECL)
1121 DECL_UID (t) = --next_debug_decl_uid;
1122 else
1123 {
1124 DECL_UID (t) = allocate_decl_uid ();
1125 if (DECL_PT_UID_SET_P (node))
1126 SET_DECL_PT_UID (t, DECL_PT_UID (node));
1127 }
1128 if ((TREE_CODE (node) == PARM_DECL || TREE_CODE (node) == VAR_DECL)
1129 && DECL_HAS_VALUE_EXPR_P (node))
1130 {
1131 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (node));
1132 DECL_HAS_VALUE_EXPR_P (t) = 1;
1133 }
1134 /* DECL_DEBUG_EXPR is copied explicitely by callers. */
1135 if (TREE_CODE (node) == VAR_DECL)
1136 {
1137 DECL_HAS_DEBUG_EXPR_P (t) = 0;
1138 t->decl_with_vis.symtab_node = NULL;
1139 }
1140 if (TREE_CODE (node) == VAR_DECL && DECL_HAS_INIT_PRIORITY_P (node))
1141 {
1142 SET_DECL_INIT_PRIORITY (t, DECL_INIT_PRIORITY (node));
1143 DECL_HAS_INIT_PRIORITY_P (t) = 1;
1144 }
1145 if (TREE_CODE (node) == FUNCTION_DECL)
1146 {
1147 DECL_STRUCT_FUNCTION (t) = NULL;
1148 t->decl_with_vis.symtab_node = NULL;
1149 }
1150 }
1151 else if (TREE_CODE_CLASS (code) == tcc_type)
1152 {
1153 TYPE_UID (t) = next_type_uid++;
1154 /* The following is so that the debug code for
1155 the copy is different from the original type.
1156 The two statements usually duplicate each other
1157 (because they clear fields of the same union),
1158 but the optimizer should catch that. */
1159 TYPE_SYMTAB_POINTER (t) = 0;
1160 TYPE_SYMTAB_ADDRESS (t) = 0;
1161
1162 /* Do not copy the values cache. */
1163 if (TYPE_CACHED_VALUES_P (t))
1164 {
1165 TYPE_CACHED_VALUES_P (t) = 0;
1166 TYPE_CACHED_VALUES (t) = NULL_TREE;
1167 }
1168 }
1169 else if (code == TARGET_OPTION_NODE)
1170 {
1171 TREE_TARGET_OPTION (t) = ggc_alloc<struct cl_target_option>();
1172 memcpy (TREE_TARGET_OPTION (t), TREE_TARGET_OPTION (node),
1173 sizeof (struct cl_target_option));
1174 }
1175 else if (code == OPTIMIZATION_NODE)
1176 {
1177 TREE_OPTIMIZATION (t) = ggc_alloc<struct cl_optimization>();
1178 memcpy (TREE_OPTIMIZATION (t), TREE_OPTIMIZATION (node),
1179 sizeof (struct cl_optimization));
1180 }
1181
1182 return t;
1183 }
1184
1185 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
1186 For example, this can copy a list made of TREE_LIST nodes. */
1187
1188 tree
1189 copy_list (tree list)
1190 {
1191 tree head;
1192 tree prev, next;
1193
1194 if (list == 0)
1195 return 0;
1196
1197 head = prev = copy_node (list);
1198 next = TREE_CHAIN (list);
1199 while (next)
1200 {
1201 TREE_CHAIN (prev) = copy_node (next);
1202 prev = TREE_CHAIN (prev);
1203 next = TREE_CHAIN (next);
1204 }
1205 return head;
1206 }
1207
1208 \f
1209 /* Return the value that TREE_INT_CST_EXT_NUNITS should have for an
1210 INTEGER_CST with value CST and type TYPE. */
1211
1212 static unsigned int
1213 get_int_cst_ext_nunits (tree type, const wide_int &cst)
1214 {
1215 gcc_checking_assert (cst.get_precision () == TYPE_PRECISION (type));
1216 /* We need an extra zero HWI if CST is an unsigned integer with its
1217 upper bit set, and if CST occupies a whole number of HWIs. */
1218 if (TYPE_UNSIGNED (type)
1219 && wi::neg_p (cst)
1220 && (cst.get_precision () % HOST_BITS_PER_WIDE_INT) == 0)
1221 return cst.get_precision () / HOST_BITS_PER_WIDE_INT + 1;
1222 return cst.get_len ();
1223 }
1224
1225 /* Return a new INTEGER_CST with value CST and type TYPE. */
1226
1227 static tree
1228 build_new_int_cst (tree type, const wide_int &cst)
1229 {
1230 unsigned int len = cst.get_len ();
1231 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1232 tree nt = make_int_cst (len, ext_len);
1233
1234 if (len < ext_len)
1235 {
1236 --ext_len;
1237 TREE_INT_CST_ELT (nt, ext_len) = 0;
1238 for (unsigned int i = len; i < ext_len; ++i)
1239 TREE_INT_CST_ELT (nt, i) = -1;
1240 }
1241 else if (TYPE_UNSIGNED (type)
1242 && cst.get_precision () < len * HOST_BITS_PER_WIDE_INT)
1243 {
1244 len--;
1245 TREE_INT_CST_ELT (nt, len)
1246 = zext_hwi (cst.elt (len),
1247 cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1248 }
1249
1250 for (unsigned int i = 0; i < len; i++)
1251 TREE_INT_CST_ELT (nt, i) = cst.elt (i);
1252 TREE_TYPE (nt) = type;
1253 return nt;
1254 }
1255
1256 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1257
1258 tree
1259 build_int_cst (tree type, HOST_WIDE_INT low)
1260 {
1261 /* Support legacy code. */
1262 if (!type)
1263 type = integer_type_node;
1264
1265 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
1266 }
1267
1268 tree
1269 build_int_cstu (tree type, unsigned HOST_WIDE_INT cst)
1270 {
1271 return wide_int_to_tree (type, wi::uhwi (cst, TYPE_PRECISION (type)));
1272 }
1273
1274 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1275
1276 tree
1277 build_int_cst_type (tree type, HOST_WIDE_INT low)
1278 {
1279 gcc_assert (type);
1280 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
1281 }
1282
1283 /* Constructs tree in type TYPE from with value given by CST. Signedness
1284 of CST is assumed to be the same as the signedness of TYPE. */
1285
1286 tree
1287 double_int_to_tree (tree type, double_int cst)
1288 {
1289 return wide_int_to_tree (type, widest_int::from (cst, TYPE_SIGN (type)));
1290 }
1291
1292 /* We force the wide_int CST to the range of the type TYPE by sign or
1293 zero extending it. OVERFLOWABLE indicates if we are interested in
1294 overflow of the value, when >0 we are only interested in signed
1295 overflow, for <0 we are interested in any overflow. OVERFLOWED
1296 indicates whether overflow has already occurred. CONST_OVERFLOWED
1297 indicates whether constant overflow has already occurred. We force
1298 T's value to be within range of T's type (by setting to 0 or 1 all
1299 the bits outside the type's range). We set TREE_OVERFLOWED if,
1300 OVERFLOWED is nonzero,
1301 or OVERFLOWABLE is >0 and signed overflow occurs
1302 or OVERFLOWABLE is <0 and any overflow occurs
1303 We return a new tree node for the extended wide_int. The node
1304 is shared if no overflow flags are set. */
1305
1306
1307 tree
1308 force_fit_type (tree type, const wide_int_ref &cst,
1309 int overflowable, bool overflowed)
1310 {
1311 signop sign = TYPE_SIGN (type);
1312
1313 /* If we need to set overflow flags, return a new unshared node. */
1314 if (overflowed || !wi::fits_to_tree_p (cst, type))
1315 {
1316 if (overflowed
1317 || overflowable < 0
1318 || (overflowable > 0 && sign == SIGNED))
1319 {
1320 wide_int tmp = wide_int::from (cst, TYPE_PRECISION (type), sign);
1321 tree t = build_new_int_cst (type, tmp);
1322 TREE_OVERFLOW (t) = 1;
1323 return t;
1324 }
1325 }
1326
1327 /* Else build a shared node. */
1328 return wide_int_to_tree (type, cst);
1329 }
1330
1331 /* These are the hash table functions for the hash table of INTEGER_CST
1332 nodes of a sizetype. */
1333
1334 /* Return the hash code code X, an INTEGER_CST. */
1335
1336 hashval_t
1337 int_cst_hasher::hash (tree x)
1338 {
1339 const_tree const t = x;
1340 hashval_t code = TYPE_UID (TREE_TYPE (t));
1341 int i;
1342
1343 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
1344 code ^= TREE_INT_CST_ELT (t, i);
1345
1346 return code;
1347 }
1348
1349 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1350 is the same as that given by *Y, which is the same. */
1351
1352 bool
1353 int_cst_hasher::equal (tree x, tree y)
1354 {
1355 const_tree const xt = x;
1356 const_tree const yt = y;
1357
1358 if (TREE_TYPE (xt) != TREE_TYPE (yt)
1359 || TREE_INT_CST_NUNITS (xt) != TREE_INT_CST_NUNITS (yt)
1360 || TREE_INT_CST_EXT_NUNITS (xt) != TREE_INT_CST_EXT_NUNITS (yt))
1361 return false;
1362
1363 for (int i = 0; i < TREE_INT_CST_NUNITS (xt); i++)
1364 if (TREE_INT_CST_ELT (xt, i) != TREE_INT_CST_ELT (yt, i))
1365 return false;
1366
1367 return true;
1368 }
1369
1370 /* Create an INT_CST node of TYPE and value CST.
1371 The returned node is always shared. For small integers we use a
1372 per-type vector cache, for larger ones we use a single hash table.
1373 The value is extended from its precision according to the sign of
1374 the type to be a multiple of HOST_BITS_PER_WIDE_INT. This defines
1375 the upper bits and ensures that hashing and value equality based
1376 upon the underlying HOST_WIDE_INTs works without masking. */
1377
1378 tree
1379 wide_int_to_tree (tree type, const wide_int_ref &pcst)
1380 {
1381 tree t;
1382 int ix = -1;
1383 int limit = 0;
1384
1385 gcc_assert (type);
1386 unsigned int prec = TYPE_PRECISION (type);
1387 signop sgn = TYPE_SIGN (type);
1388
1389 /* Verify that everything is canonical. */
1390 int l = pcst.get_len ();
1391 if (l > 1)
1392 {
1393 if (pcst.elt (l - 1) == 0)
1394 gcc_checking_assert (pcst.elt (l - 2) < 0);
1395 if (pcst.elt (l - 1) == (HOST_WIDE_INT) -1)
1396 gcc_checking_assert (pcst.elt (l - 2) >= 0);
1397 }
1398
1399 wide_int cst = wide_int::from (pcst, prec, sgn);
1400 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1401
1402 if (ext_len == 1)
1403 {
1404 /* We just need to store a single HOST_WIDE_INT. */
1405 HOST_WIDE_INT hwi;
1406 if (TYPE_UNSIGNED (type))
1407 hwi = cst.to_uhwi ();
1408 else
1409 hwi = cst.to_shwi ();
1410
1411 switch (TREE_CODE (type))
1412 {
1413 case NULLPTR_TYPE:
1414 gcc_assert (hwi == 0);
1415 /* Fallthru. */
1416
1417 case POINTER_TYPE:
1418 case REFERENCE_TYPE:
1419 case POINTER_BOUNDS_TYPE:
1420 /* Cache NULL pointer and zero bounds. */
1421 if (hwi == 0)
1422 {
1423 limit = 1;
1424 ix = 0;
1425 }
1426 break;
1427
1428 case BOOLEAN_TYPE:
1429 /* Cache false or true. */
1430 limit = 2;
1431 if (hwi < 2)
1432 ix = hwi;
1433 break;
1434
1435 case INTEGER_TYPE:
1436 case OFFSET_TYPE:
1437 if (TYPE_SIGN (type) == UNSIGNED)
1438 {
1439 /* Cache [0, N). */
1440 limit = INTEGER_SHARE_LIMIT;
1441 if (IN_RANGE (hwi, 0, INTEGER_SHARE_LIMIT - 1))
1442 ix = hwi;
1443 }
1444 else
1445 {
1446 /* Cache [-1, N). */
1447 limit = INTEGER_SHARE_LIMIT + 1;
1448 if (IN_RANGE (hwi, -1, INTEGER_SHARE_LIMIT - 1))
1449 ix = hwi + 1;
1450 }
1451 break;
1452
1453 case ENUMERAL_TYPE:
1454 break;
1455
1456 default:
1457 gcc_unreachable ();
1458 }
1459
1460 if (ix >= 0)
1461 {
1462 /* Look for it in the type's vector of small shared ints. */
1463 if (!TYPE_CACHED_VALUES_P (type))
1464 {
1465 TYPE_CACHED_VALUES_P (type) = 1;
1466 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1467 }
1468
1469 t = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix);
1470 if (t)
1471 /* Make sure no one is clobbering the shared constant. */
1472 gcc_checking_assert (TREE_TYPE (t) == type
1473 && TREE_INT_CST_NUNITS (t) == 1
1474 && TREE_INT_CST_OFFSET_NUNITS (t) == 1
1475 && TREE_INT_CST_EXT_NUNITS (t) == 1
1476 && TREE_INT_CST_ELT (t, 0) == hwi);
1477 else
1478 {
1479 /* Create a new shared int. */
1480 t = build_new_int_cst (type, cst);
1481 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1482 }
1483 }
1484 else
1485 {
1486 /* Use the cache of larger shared ints, using int_cst_node as
1487 a temporary. */
1488
1489 TREE_INT_CST_ELT (int_cst_node, 0) = hwi;
1490 TREE_TYPE (int_cst_node) = type;
1491
1492 tree *slot = int_cst_hash_table->find_slot (int_cst_node, INSERT);
1493 t = *slot;
1494 if (!t)
1495 {
1496 /* Insert this one into the hash table. */
1497 t = int_cst_node;
1498 *slot = t;
1499 /* Make a new node for next time round. */
1500 int_cst_node = make_int_cst (1, 1);
1501 }
1502 }
1503 }
1504 else
1505 {
1506 /* The value either hashes properly or we drop it on the floor
1507 for the gc to take care of. There will not be enough of them
1508 to worry about. */
1509
1510 tree nt = build_new_int_cst (type, cst);
1511 tree *slot = int_cst_hash_table->find_slot (nt, INSERT);
1512 t = *slot;
1513 if (!t)
1514 {
1515 /* Insert this one into the hash table. */
1516 t = nt;
1517 *slot = t;
1518 }
1519 }
1520
1521 return t;
1522 }
1523
1524 void
1525 cache_integer_cst (tree t)
1526 {
1527 tree type = TREE_TYPE (t);
1528 int ix = -1;
1529 int limit = 0;
1530 int prec = TYPE_PRECISION (type);
1531
1532 gcc_assert (!TREE_OVERFLOW (t));
1533
1534 switch (TREE_CODE (type))
1535 {
1536 case NULLPTR_TYPE:
1537 gcc_assert (integer_zerop (t));
1538 /* Fallthru. */
1539
1540 case POINTER_TYPE:
1541 case REFERENCE_TYPE:
1542 /* Cache NULL pointer. */
1543 if (integer_zerop (t))
1544 {
1545 limit = 1;
1546 ix = 0;
1547 }
1548 break;
1549
1550 case BOOLEAN_TYPE:
1551 /* Cache false or true. */
1552 limit = 2;
1553 if (wi::ltu_p (t, 2))
1554 ix = TREE_INT_CST_ELT (t, 0);
1555 break;
1556
1557 case INTEGER_TYPE:
1558 case OFFSET_TYPE:
1559 if (TYPE_UNSIGNED (type))
1560 {
1561 /* Cache 0..N */
1562 limit = INTEGER_SHARE_LIMIT;
1563
1564 /* This is a little hokie, but if the prec is smaller than
1565 what is necessary to hold INTEGER_SHARE_LIMIT, then the
1566 obvious test will not get the correct answer. */
1567 if (prec < HOST_BITS_PER_WIDE_INT)
1568 {
1569 if (tree_to_uhwi (t) < (unsigned HOST_WIDE_INT) INTEGER_SHARE_LIMIT)
1570 ix = tree_to_uhwi (t);
1571 }
1572 else if (wi::ltu_p (t, INTEGER_SHARE_LIMIT))
1573 ix = tree_to_uhwi (t);
1574 }
1575 else
1576 {
1577 /* Cache -1..N */
1578 limit = INTEGER_SHARE_LIMIT + 1;
1579
1580 if (integer_minus_onep (t))
1581 ix = 0;
1582 else if (!wi::neg_p (t))
1583 {
1584 if (prec < HOST_BITS_PER_WIDE_INT)
1585 {
1586 if (tree_to_shwi (t) < INTEGER_SHARE_LIMIT)
1587 ix = tree_to_shwi (t) + 1;
1588 }
1589 else if (wi::ltu_p (t, INTEGER_SHARE_LIMIT))
1590 ix = tree_to_shwi (t) + 1;
1591 }
1592 }
1593 break;
1594
1595 case ENUMERAL_TYPE:
1596 break;
1597
1598 default:
1599 gcc_unreachable ();
1600 }
1601
1602 if (ix >= 0)
1603 {
1604 /* Look for it in the type's vector of small shared ints. */
1605 if (!TYPE_CACHED_VALUES_P (type))
1606 {
1607 TYPE_CACHED_VALUES_P (type) = 1;
1608 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1609 }
1610
1611 gcc_assert (TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) == NULL_TREE);
1612 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1613 }
1614 else
1615 {
1616 /* Use the cache of larger shared ints. */
1617 tree *slot = int_cst_hash_table->find_slot (t, INSERT);
1618 /* If there is already an entry for the number verify it's the
1619 same. */
1620 if (*slot)
1621 gcc_assert (wi::eq_p (tree (*slot), t));
1622 else
1623 /* Otherwise insert this one into the hash table. */
1624 *slot = t;
1625 }
1626 }
1627
1628
1629 /* Builds an integer constant in TYPE such that lowest BITS bits are ones
1630 and the rest are zeros. */
1631
1632 tree
1633 build_low_bits_mask (tree type, unsigned bits)
1634 {
1635 gcc_assert (bits <= TYPE_PRECISION (type));
1636
1637 return wide_int_to_tree (type, wi::mask (bits, false,
1638 TYPE_PRECISION (type)));
1639 }
1640
1641 /* Checks that X is integer constant that can be expressed in (unsigned)
1642 HOST_WIDE_INT without loss of precision. */
1643
1644 bool
1645 cst_and_fits_in_hwi (const_tree x)
1646 {
1647 if (TREE_CODE (x) != INTEGER_CST)
1648 return false;
1649
1650 if (TYPE_PRECISION (TREE_TYPE (x)) > HOST_BITS_PER_WIDE_INT)
1651 return false;
1652
1653 return TREE_INT_CST_NUNITS (x) == 1;
1654 }
1655
1656 /* Build a newly constructed VECTOR_CST node of length LEN. */
1657
1658 tree
1659 make_vector_stat (unsigned len MEM_STAT_DECL)
1660 {
1661 tree t;
1662 unsigned length = (len - 1) * sizeof (tree) + sizeof (struct tree_vector);
1663
1664 record_node_allocation_statistics (VECTOR_CST, length);
1665
1666 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1667
1668 TREE_SET_CODE (t, VECTOR_CST);
1669 TREE_CONSTANT (t) = 1;
1670
1671 return t;
1672 }
1673
1674 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1675 are in a list pointed to by VALS. */
1676
1677 tree
1678 build_vector_stat (tree type, tree *vals MEM_STAT_DECL)
1679 {
1680 int over = 0;
1681 unsigned cnt = 0;
1682 tree v = make_vector (TYPE_VECTOR_SUBPARTS (type));
1683 TREE_TYPE (v) = type;
1684
1685 /* Iterate through elements and check for overflow. */
1686 for (cnt = 0; cnt < TYPE_VECTOR_SUBPARTS (type); ++cnt)
1687 {
1688 tree value = vals[cnt];
1689
1690 VECTOR_CST_ELT (v, cnt) = value;
1691
1692 /* Don't crash if we get an address constant. */
1693 if (!CONSTANT_CLASS_P (value))
1694 continue;
1695
1696 over |= TREE_OVERFLOW (value);
1697 }
1698
1699 TREE_OVERFLOW (v) = over;
1700 return v;
1701 }
1702
1703 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1704 are extracted from V, a vector of CONSTRUCTOR_ELT. */
1705
1706 tree
1707 build_vector_from_ctor (tree type, vec<constructor_elt, va_gc> *v)
1708 {
1709 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
1710 unsigned HOST_WIDE_INT idx;
1711 tree value;
1712
1713 FOR_EACH_CONSTRUCTOR_VALUE (v, idx, value)
1714 vec[idx] = value;
1715 for (; idx < TYPE_VECTOR_SUBPARTS (type); ++idx)
1716 vec[idx] = build_zero_cst (TREE_TYPE (type));
1717
1718 return build_vector (type, vec);
1719 }
1720
1721 /* Build a vector of type VECTYPE where all the elements are SCs. */
1722 tree
1723 build_vector_from_val (tree vectype, tree sc)
1724 {
1725 int i, nunits = TYPE_VECTOR_SUBPARTS (vectype);
1726
1727 if (sc == error_mark_node)
1728 return sc;
1729
1730 /* Verify that the vector type is suitable for SC. Note that there
1731 is some inconsistency in the type-system with respect to restrict
1732 qualifications of pointers. Vector types always have a main-variant
1733 element type and the qualification is applied to the vector-type.
1734 So TREE_TYPE (vector-type) does not return a properly qualified
1735 vector element-type. */
1736 gcc_checking_assert (types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (sc)),
1737 TREE_TYPE (vectype)));
1738
1739 if (CONSTANT_CLASS_P (sc))
1740 {
1741 tree *v = XALLOCAVEC (tree, nunits);
1742 for (i = 0; i < nunits; ++i)
1743 v[i] = sc;
1744 return build_vector (vectype, v);
1745 }
1746 else
1747 {
1748 vec<constructor_elt, va_gc> *v;
1749 vec_alloc (v, nunits);
1750 for (i = 0; i < nunits; ++i)
1751 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, sc);
1752 return build_constructor (vectype, v);
1753 }
1754 }
1755
1756 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1757 are in the vec pointed to by VALS. */
1758 tree
1759 build_constructor (tree type, vec<constructor_elt, va_gc> *vals)
1760 {
1761 tree c = make_node (CONSTRUCTOR);
1762 unsigned int i;
1763 constructor_elt *elt;
1764 bool constant_p = true;
1765 bool side_effects_p = false;
1766
1767 TREE_TYPE (c) = type;
1768 CONSTRUCTOR_ELTS (c) = vals;
1769
1770 FOR_EACH_VEC_SAFE_ELT (vals, i, elt)
1771 {
1772 /* Mostly ctors will have elts that don't have side-effects, so
1773 the usual case is to scan all the elements. Hence a single
1774 loop for both const and side effects, rather than one loop
1775 each (with early outs). */
1776 if (!TREE_CONSTANT (elt->value))
1777 constant_p = false;
1778 if (TREE_SIDE_EFFECTS (elt->value))
1779 side_effects_p = true;
1780 }
1781
1782 TREE_SIDE_EFFECTS (c) = side_effects_p;
1783 TREE_CONSTANT (c) = constant_p;
1784
1785 return c;
1786 }
1787
1788 /* Build a CONSTRUCTOR node made of a single initializer, with the specified
1789 INDEX and VALUE. */
1790 tree
1791 build_constructor_single (tree type, tree index, tree value)
1792 {
1793 vec<constructor_elt, va_gc> *v;
1794 constructor_elt elt = {index, value};
1795
1796 vec_alloc (v, 1);
1797 v->quick_push (elt);
1798
1799 return build_constructor (type, v);
1800 }
1801
1802
1803 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1804 are in a list pointed to by VALS. */
1805 tree
1806 build_constructor_from_list (tree type, tree vals)
1807 {
1808 tree t;
1809 vec<constructor_elt, va_gc> *v = NULL;
1810
1811 if (vals)
1812 {
1813 vec_alloc (v, list_length (vals));
1814 for (t = vals; t; t = TREE_CHAIN (t))
1815 CONSTRUCTOR_APPEND_ELT (v, TREE_PURPOSE (t), TREE_VALUE (t));
1816 }
1817
1818 return build_constructor (type, v);
1819 }
1820
1821 /* Return a new CONSTRUCTOR node whose type is TYPE. NELTS is the number
1822 of elements, provided as index/value pairs. */
1823
1824 tree
1825 build_constructor_va (tree type, int nelts, ...)
1826 {
1827 vec<constructor_elt, va_gc> *v = NULL;
1828 va_list p;
1829
1830 va_start (p, nelts);
1831 vec_alloc (v, nelts);
1832 while (nelts--)
1833 {
1834 tree index = va_arg (p, tree);
1835 tree value = va_arg (p, tree);
1836 CONSTRUCTOR_APPEND_ELT (v, index, value);
1837 }
1838 va_end (p);
1839 return build_constructor (type, v);
1840 }
1841
1842 /* Return a new FIXED_CST node whose type is TYPE and value is F. */
1843
1844 tree
1845 build_fixed (tree type, FIXED_VALUE_TYPE f)
1846 {
1847 tree v;
1848 FIXED_VALUE_TYPE *fp;
1849
1850 v = make_node (FIXED_CST);
1851 fp = ggc_alloc<fixed_value> ();
1852 memcpy (fp, &f, sizeof (FIXED_VALUE_TYPE));
1853
1854 TREE_TYPE (v) = type;
1855 TREE_FIXED_CST_PTR (v) = fp;
1856 return v;
1857 }
1858
1859 /* Return a new REAL_CST node whose type is TYPE and value is D. */
1860
1861 tree
1862 build_real (tree type, REAL_VALUE_TYPE d)
1863 {
1864 tree v;
1865 REAL_VALUE_TYPE *dp;
1866 int overflow = 0;
1867
1868 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
1869 Consider doing it via real_convert now. */
1870
1871 v = make_node (REAL_CST);
1872 dp = ggc_alloc<real_value> ();
1873 memcpy (dp, &d, sizeof (REAL_VALUE_TYPE));
1874
1875 TREE_TYPE (v) = type;
1876 TREE_REAL_CST_PTR (v) = dp;
1877 TREE_OVERFLOW (v) = overflow;
1878 return v;
1879 }
1880
1881 /* Return a new REAL_CST node whose type is TYPE
1882 and whose value is the integer value of the INTEGER_CST node I. */
1883
1884 REAL_VALUE_TYPE
1885 real_value_from_int_cst (const_tree type, const_tree i)
1886 {
1887 REAL_VALUE_TYPE d;
1888
1889 /* Clear all bits of the real value type so that we can later do
1890 bitwise comparisons to see if two values are the same. */
1891 memset (&d, 0, sizeof d);
1892
1893 real_from_integer (&d, type ? TYPE_MODE (type) : VOIDmode, i,
1894 TYPE_SIGN (TREE_TYPE (i)));
1895 return d;
1896 }
1897
1898 /* Given a tree representing an integer constant I, return a tree
1899 representing the same value as a floating-point constant of type TYPE. */
1900
1901 tree
1902 build_real_from_int_cst (tree type, const_tree i)
1903 {
1904 tree v;
1905 int overflow = TREE_OVERFLOW (i);
1906
1907 v = build_real (type, real_value_from_int_cst (type, i));
1908
1909 TREE_OVERFLOW (v) |= overflow;
1910 return v;
1911 }
1912
1913 /* Return a newly constructed STRING_CST node whose value is
1914 the LEN characters at STR.
1915 Note that for a C string literal, LEN should include the trailing NUL.
1916 The TREE_TYPE is not initialized. */
1917
1918 tree
1919 build_string (int len, const char *str)
1920 {
1921 tree s;
1922 size_t length;
1923
1924 /* Do not waste bytes provided by padding of struct tree_string. */
1925 length = len + offsetof (struct tree_string, str) + 1;
1926
1927 record_node_allocation_statistics (STRING_CST, length);
1928
1929 s = (tree) ggc_internal_alloc (length);
1930
1931 memset (s, 0, sizeof (struct tree_typed));
1932 TREE_SET_CODE (s, STRING_CST);
1933 TREE_CONSTANT (s) = 1;
1934 TREE_STRING_LENGTH (s) = len;
1935 memcpy (s->string.str, str, len);
1936 s->string.str[len] = '\0';
1937
1938 return s;
1939 }
1940
1941 /* Return a newly constructed COMPLEX_CST node whose value is
1942 specified by the real and imaginary parts REAL and IMAG.
1943 Both REAL and IMAG should be constant nodes. TYPE, if specified,
1944 will be the type of the COMPLEX_CST; otherwise a new type will be made. */
1945
1946 tree
1947 build_complex (tree type, tree real, tree imag)
1948 {
1949 tree t = make_node (COMPLEX_CST);
1950
1951 TREE_REALPART (t) = real;
1952 TREE_IMAGPART (t) = imag;
1953 TREE_TYPE (t) = type ? type : build_complex_type (TREE_TYPE (real));
1954 TREE_OVERFLOW (t) = TREE_OVERFLOW (real) | TREE_OVERFLOW (imag);
1955 return t;
1956 }
1957
1958 /* Return the constant 1 in type TYPE. If TYPE has several elements, each
1959 element is set to 1. In particular, this is 1 + i for complex types. */
1960
1961 tree
1962 build_each_one_cst (tree type)
1963 {
1964 if (TREE_CODE (type) == COMPLEX_TYPE)
1965 {
1966 tree scalar = build_one_cst (TREE_TYPE (type));
1967 return build_complex (type, scalar, scalar);
1968 }
1969 else
1970 return build_one_cst (type);
1971 }
1972
1973 /* Return a constant of arithmetic type TYPE which is the
1974 multiplicative identity of the set TYPE. */
1975
1976 tree
1977 build_one_cst (tree type)
1978 {
1979 switch (TREE_CODE (type))
1980 {
1981 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1982 case POINTER_TYPE: case REFERENCE_TYPE:
1983 case OFFSET_TYPE:
1984 return build_int_cst (type, 1);
1985
1986 case REAL_TYPE:
1987 return build_real (type, dconst1);
1988
1989 case FIXED_POINT_TYPE:
1990 /* We can only generate 1 for accum types. */
1991 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
1992 return build_fixed (type, FCONST1 (TYPE_MODE (type)));
1993
1994 case VECTOR_TYPE:
1995 {
1996 tree scalar = build_one_cst (TREE_TYPE (type));
1997
1998 return build_vector_from_val (type, scalar);
1999 }
2000
2001 case COMPLEX_TYPE:
2002 return build_complex (type,
2003 build_one_cst (TREE_TYPE (type)),
2004 build_zero_cst (TREE_TYPE (type)));
2005
2006 default:
2007 gcc_unreachable ();
2008 }
2009 }
2010
2011 /* Return an integer of type TYPE containing all 1's in as much precision as
2012 it contains, or a complex or vector whose subparts are such integers. */
2013
2014 tree
2015 build_all_ones_cst (tree type)
2016 {
2017 if (TREE_CODE (type) == COMPLEX_TYPE)
2018 {
2019 tree scalar = build_all_ones_cst (TREE_TYPE (type));
2020 return build_complex (type, scalar, scalar);
2021 }
2022 else
2023 return build_minus_one_cst (type);
2024 }
2025
2026 /* Return a constant of arithmetic type TYPE which is the
2027 opposite of the multiplicative identity of the set TYPE. */
2028
2029 tree
2030 build_minus_one_cst (tree type)
2031 {
2032 switch (TREE_CODE (type))
2033 {
2034 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2035 case POINTER_TYPE: case REFERENCE_TYPE:
2036 case OFFSET_TYPE:
2037 return build_int_cst (type, -1);
2038
2039 case REAL_TYPE:
2040 return build_real (type, dconstm1);
2041
2042 case FIXED_POINT_TYPE:
2043 /* We can only generate 1 for accum types. */
2044 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2045 return build_fixed (type, fixed_from_double_int (double_int_minus_one,
2046 TYPE_MODE (type)));
2047
2048 case VECTOR_TYPE:
2049 {
2050 tree scalar = build_minus_one_cst (TREE_TYPE (type));
2051
2052 return build_vector_from_val (type, scalar);
2053 }
2054
2055 case COMPLEX_TYPE:
2056 return build_complex (type,
2057 build_minus_one_cst (TREE_TYPE (type)),
2058 build_zero_cst (TREE_TYPE (type)));
2059
2060 default:
2061 gcc_unreachable ();
2062 }
2063 }
2064
2065 /* Build 0 constant of type TYPE. This is used by constructor folding
2066 and thus the constant should be represented in memory by
2067 zero(es). */
2068
2069 tree
2070 build_zero_cst (tree type)
2071 {
2072 switch (TREE_CODE (type))
2073 {
2074 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2075 case POINTER_TYPE: case REFERENCE_TYPE:
2076 case OFFSET_TYPE: case NULLPTR_TYPE:
2077 return build_int_cst (type, 0);
2078
2079 case REAL_TYPE:
2080 return build_real (type, dconst0);
2081
2082 case FIXED_POINT_TYPE:
2083 return build_fixed (type, FCONST0 (TYPE_MODE (type)));
2084
2085 case VECTOR_TYPE:
2086 {
2087 tree scalar = build_zero_cst (TREE_TYPE (type));
2088
2089 return build_vector_from_val (type, scalar);
2090 }
2091
2092 case COMPLEX_TYPE:
2093 {
2094 tree zero = build_zero_cst (TREE_TYPE (type));
2095
2096 return build_complex (type, zero, zero);
2097 }
2098
2099 default:
2100 if (!AGGREGATE_TYPE_P (type))
2101 return fold_convert (type, integer_zero_node);
2102 return build_constructor (type, NULL);
2103 }
2104 }
2105
2106
2107 /* Build a BINFO with LEN language slots. */
2108
2109 tree
2110 make_tree_binfo_stat (unsigned base_binfos MEM_STAT_DECL)
2111 {
2112 tree t;
2113 size_t length = (offsetof (struct tree_binfo, base_binfos)
2114 + vec<tree, va_gc>::embedded_size (base_binfos));
2115
2116 record_node_allocation_statistics (TREE_BINFO, length);
2117
2118 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
2119
2120 memset (t, 0, offsetof (struct tree_binfo, base_binfos));
2121
2122 TREE_SET_CODE (t, TREE_BINFO);
2123
2124 BINFO_BASE_BINFOS (t)->embedded_init (base_binfos);
2125
2126 return t;
2127 }
2128
2129 /* Create a CASE_LABEL_EXPR tree node and return it. */
2130
2131 tree
2132 build_case_label (tree low_value, tree high_value, tree label_decl)
2133 {
2134 tree t = make_node (CASE_LABEL_EXPR);
2135
2136 TREE_TYPE (t) = void_type_node;
2137 SET_EXPR_LOCATION (t, DECL_SOURCE_LOCATION (label_decl));
2138
2139 CASE_LOW (t) = low_value;
2140 CASE_HIGH (t) = high_value;
2141 CASE_LABEL (t) = label_decl;
2142 CASE_CHAIN (t) = NULL_TREE;
2143
2144 return t;
2145 }
2146
2147 /* Build a newly constructed INTEGER_CST node. LEN and EXT_LEN are the
2148 values of TREE_INT_CST_NUNITS and TREE_INT_CST_EXT_NUNITS respectively.
2149 The latter determines the length of the HOST_WIDE_INT vector. */
2150
2151 tree
2152 make_int_cst_stat (int len, int ext_len MEM_STAT_DECL)
2153 {
2154 tree t;
2155 int length = ((ext_len - 1) * sizeof (HOST_WIDE_INT)
2156 + sizeof (struct tree_int_cst));
2157
2158 gcc_assert (len);
2159 record_node_allocation_statistics (INTEGER_CST, length);
2160
2161 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2162
2163 TREE_SET_CODE (t, INTEGER_CST);
2164 TREE_INT_CST_NUNITS (t) = len;
2165 TREE_INT_CST_EXT_NUNITS (t) = ext_len;
2166 /* to_offset can only be applied to trees that are offset_int-sized
2167 or smaller. EXT_LEN is correct if it fits, otherwise the constant
2168 must be exactly the precision of offset_int and so LEN is correct. */
2169 if (ext_len <= OFFSET_INT_ELTS)
2170 TREE_INT_CST_OFFSET_NUNITS (t) = ext_len;
2171 else
2172 TREE_INT_CST_OFFSET_NUNITS (t) = len;
2173
2174 TREE_CONSTANT (t) = 1;
2175
2176 return t;
2177 }
2178
2179 /* Build a newly constructed TREE_VEC node of length LEN. */
2180
2181 tree
2182 make_tree_vec_stat (int len MEM_STAT_DECL)
2183 {
2184 tree t;
2185 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2186
2187 record_node_allocation_statistics (TREE_VEC, length);
2188
2189 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2190
2191 TREE_SET_CODE (t, TREE_VEC);
2192 TREE_VEC_LENGTH (t) = len;
2193
2194 return t;
2195 }
2196
2197 /* Grow a TREE_VEC node to new length LEN. */
2198
2199 tree
2200 grow_tree_vec_stat (tree v, int len MEM_STAT_DECL)
2201 {
2202 gcc_assert (TREE_CODE (v) == TREE_VEC);
2203
2204 int oldlen = TREE_VEC_LENGTH (v);
2205 gcc_assert (len > oldlen);
2206
2207 int oldlength = (oldlen - 1) * sizeof (tree) + sizeof (struct tree_vec);
2208 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2209
2210 record_node_allocation_statistics (TREE_VEC, length - oldlength);
2211
2212 v = (tree) ggc_realloc (v, length PASS_MEM_STAT);
2213
2214 TREE_VEC_LENGTH (v) = len;
2215
2216 return v;
2217 }
2218 \f
2219 /* Return 1 if EXPR is the integer constant zero or a complex constant
2220 of zero. */
2221
2222 int
2223 integer_zerop (const_tree expr)
2224 {
2225 STRIP_NOPS (expr);
2226
2227 switch (TREE_CODE (expr))
2228 {
2229 case INTEGER_CST:
2230 return wi::eq_p (expr, 0);
2231 case COMPLEX_CST:
2232 return (integer_zerop (TREE_REALPART (expr))
2233 && integer_zerop (TREE_IMAGPART (expr)));
2234 case VECTOR_CST:
2235 {
2236 unsigned i;
2237 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2238 if (!integer_zerop (VECTOR_CST_ELT (expr, i)))
2239 return false;
2240 return true;
2241 }
2242 default:
2243 return false;
2244 }
2245 }
2246
2247 /* Return 1 if EXPR is the integer constant one or the corresponding
2248 complex constant. */
2249
2250 int
2251 integer_onep (const_tree expr)
2252 {
2253 STRIP_NOPS (expr);
2254
2255 switch (TREE_CODE (expr))
2256 {
2257 case INTEGER_CST:
2258 return wi::eq_p (wi::to_widest (expr), 1);
2259 case COMPLEX_CST:
2260 return (integer_onep (TREE_REALPART (expr))
2261 && integer_zerop (TREE_IMAGPART (expr)));
2262 case VECTOR_CST:
2263 {
2264 unsigned i;
2265 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2266 if (!integer_onep (VECTOR_CST_ELT (expr, i)))
2267 return false;
2268 return true;
2269 }
2270 default:
2271 return false;
2272 }
2273 }
2274
2275 /* Return 1 if EXPR is the integer constant one. For complex and vector,
2276 return 1 if every piece is the integer constant one. */
2277
2278 int
2279 integer_each_onep (const_tree expr)
2280 {
2281 STRIP_NOPS (expr);
2282
2283 if (TREE_CODE (expr) == COMPLEX_CST)
2284 return (integer_onep (TREE_REALPART (expr))
2285 && integer_onep (TREE_IMAGPART (expr)));
2286 else
2287 return integer_onep (expr);
2288 }
2289
2290 /* Return 1 if EXPR is an integer containing all 1's in as much precision as
2291 it contains, or a complex or vector whose subparts are such integers. */
2292
2293 int
2294 integer_all_onesp (const_tree expr)
2295 {
2296 STRIP_NOPS (expr);
2297
2298 if (TREE_CODE (expr) == COMPLEX_CST
2299 && integer_all_onesp (TREE_REALPART (expr))
2300 && integer_all_onesp (TREE_IMAGPART (expr)))
2301 return 1;
2302
2303 else if (TREE_CODE (expr) == VECTOR_CST)
2304 {
2305 unsigned i;
2306 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2307 if (!integer_all_onesp (VECTOR_CST_ELT (expr, i)))
2308 return 0;
2309 return 1;
2310 }
2311
2312 else if (TREE_CODE (expr) != INTEGER_CST)
2313 return 0;
2314
2315 return wi::max_value (TYPE_PRECISION (TREE_TYPE (expr)), UNSIGNED) == expr;
2316 }
2317
2318 /* Return 1 if EXPR is the integer constant minus one. */
2319
2320 int
2321 integer_minus_onep (const_tree expr)
2322 {
2323 STRIP_NOPS (expr);
2324
2325 if (TREE_CODE (expr) == COMPLEX_CST)
2326 return (integer_all_onesp (TREE_REALPART (expr))
2327 && integer_zerop (TREE_IMAGPART (expr)));
2328 else
2329 return integer_all_onesp (expr);
2330 }
2331
2332 /* Return 1 if EXPR is an integer constant that is a power of 2 (i.e., has only
2333 one bit on). */
2334
2335 int
2336 integer_pow2p (const_tree expr)
2337 {
2338 STRIP_NOPS (expr);
2339
2340 if (TREE_CODE (expr) == COMPLEX_CST
2341 && integer_pow2p (TREE_REALPART (expr))
2342 && integer_zerop (TREE_IMAGPART (expr)))
2343 return 1;
2344
2345 if (TREE_CODE (expr) != INTEGER_CST)
2346 return 0;
2347
2348 return wi::popcount (expr) == 1;
2349 }
2350
2351 /* Return 1 if EXPR is an integer constant other than zero or a
2352 complex constant other than zero. */
2353
2354 int
2355 integer_nonzerop (const_tree expr)
2356 {
2357 STRIP_NOPS (expr);
2358
2359 return ((TREE_CODE (expr) == INTEGER_CST
2360 && !wi::eq_p (expr, 0))
2361 || (TREE_CODE (expr) == COMPLEX_CST
2362 && (integer_nonzerop (TREE_REALPART (expr))
2363 || integer_nonzerop (TREE_IMAGPART (expr)))));
2364 }
2365
2366 /* Return 1 if EXPR is the integer constant one. For vector,
2367 return 1 if every piece is the integer constant minus one
2368 (representing the value TRUE). */
2369
2370 int
2371 integer_truep (const_tree expr)
2372 {
2373 STRIP_NOPS (expr);
2374
2375 if (TREE_CODE (expr) == VECTOR_CST)
2376 return integer_all_onesp (expr);
2377 return integer_onep (expr);
2378 }
2379
2380 /* Return 1 if EXPR is the fixed-point constant zero. */
2381
2382 int
2383 fixed_zerop (const_tree expr)
2384 {
2385 return (TREE_CODE (expr) == FIXED_CST
2386 && TREE_FIXED_CST (expr).data.is_zero ());
2387 }
2388
2389 /* Return the power of two represented by a tree node known to be a
2390 power of two. */
2391
2392 int
2393 tree_log2 (const_tree expr)
2394 {
2395 STRIP_NOPS (expr);
2396
2397 if (TREE_CODE (expr) == COMPLEX_CST)
2398 return tree_log2 (TREE_REALPART (expr));
2399
2400 return wi::exact_log2 (expr);
2401 }
2402
2403 /* Similar, but return the largest integer Y such that 2 ** Y is less
2404 than or equal to EXPR. */
2405
2406 int
2407 tree_floor_log2 (const_tree expr)
2408 {
2409 STRIP_NOPS (expr);
2410
2411 if (TREE_CODE (expr) == COMPLEX_CST)
2412 return tree_log2 (TREE_REALPART (expr));
2413
2414 return wi::floor_log2 (expr);
2415 }
2416
2417 /* Return number of known trailing zero bits in EXPR, or, if the value of
2418 EXPR is known to be zero, the precision of it's type. */
2419
2420 unsigned int
2421 tree_ctz (const_tree expr)
2422 {
2423 if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
2424 && !POINTER_TYPE_P (TREE_TYPE (expr)))
2425 return 0;
2426
2427 unsigned int ret1, ret2, prec = TYPE_PRECISION (TREE_TYPE (expr));
2428 switch (TREE_CODE (expr))
2429 {
2430 case INTEGER_CST:
2431 ret1 = wi::ctz (expr);
2432 return MIN (ret1, prec);
2433 case SSA_NAME:
2434 ret1 = wi::ctz (get_nonzero_bits (expr));
2435 return MIN (ret1, prec);
2436 case PLUS_EXPR:
2437 case MINUS_EXPR:
2438 case BIT_IOR_EXPR:
2439 case BIT_XOR_EXPR:
2440 case MIN_EXPR:
2441 case MAX_EXPR:
2442 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2443 if (ret1 == 0)
2444 return ret1;
2445 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2446 return MIN (ret1, ret2);
2447 case POINTER_PLUS_EXPR:
2448 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2449 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2450 /* Second operand is sizetype, which could be in theory
2451 wider than pointer's precision. Make sure we never
2452 return more than prec. */
2453 ret2 = MIN (ret2, prec);
2454 return MIN (ret1, ret2);
2455 case BIT_AND_EXPR:
2456 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2457 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2458 return MAX (ret1, ret2);
2459 case MULT_EXPR:
2460 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2461 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2462 return MIN (ret1 + ret2, prec);
2463 case LSHIFT_EXPR:
2464 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2465 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2466 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2467 {
2468 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2469 return MIN (ret1 + ret2, prec);
2470 }
2471 return ret1;
2472 case RSHIFT_EXPR:
2473 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2474 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2475 {
2476 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2477 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2478 if (ret1 > ret2)
2479 return ret1 - ret2;
2480 }
2481 return 0;
2482 case TRUNC_DIV_EXPR:
2483 case CEIL_DIV_EXPR:
2484 case FLOOR_DIV_EXPR:
2485 case ROUND_DIV_EXPR:
2486 case EXACT_DIV_EXPR:
2487 if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
2488 && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) == 1)
2489 {
2490 int l = tree_log2 (TREE_OPERAND (expr, 1));
2491 if (l >= 0)
2492 {
2493 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2494 ret2 = l;
2495 if (ret1 > ret2)
2496 return ret1 - ret2;
2497 }
2498 }
2499 return 0;
2500 CASE_CONVERT:
2501 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2502 if (ret1 && ret1 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
2503 ret1 = prec;
2504 return MIN (ret1, prec);
2505 case SAVE_EXPR:
2506 return tree_ctz (TREE_OPERAND (expr, 0));
2507 case COND_EXPR:
2508 ret1 = tree_ctz (TREE_OPERAND (expr, 1));
2509 if (ret1 == 0)
2510 return 0;
2511 ret2 = tree_ctz (TREE_OPERAND (expr, 2));
2512 return MIN (ret1, ret2);
2513 case COMPOUND_EXPR:
2514 return tree_ctz (TREE_OPERAND (expr, 1));
2515 case ADDR_EXPR:
2516 ret1 = get_pointer_alignment (CONST_CAST_TREE (expr));
2517 if (ret1 > BITS_PER_UNIT)
2518 {
2519 ret1 = ctz_hwi (ret1 / BITS_PER_UNIT);
2520 return MIN (ret1, prec);
2521 }
2522 return 0;
2523 default:
2524 return 0;
2525 }
2526 }
2527
2528 /* Return 1 if EXPR is the real constant zero. Trailing zeroes matter for
2529 decimal float constants, so don't return 1 for them. */
2530
2531 int
2532 real_zerop (const_tree expr)
2533 {
2534 STRIP_NOPS (expr);
2535
2536 switch (TREE_CODE (expr))
2537 {
2538 case REAL_CST:
2539 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconst0)
2540 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2541 case COMPLEX_CST:
2542 return real_zerop (TREE_REALPART (expr))
2543 && real_zerop (TREE_IMAGPART (expr));
2544 case VECTOR_CST:
2545 {
2546 unsigned i;
2547 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2548 if (!real_zerop (VECTOR_CST_ELT (expr, i)))
2549 return false;
2550 return true;
2551 }
2552 default:
2553 return false;
2554 }
2555 }
2556
2557 /* Return 1 if EXPR is the real constant one in real or complex form.
2558 Trailing zeroes matter for decimal float constants, so don't return
2559 1 for them. */
2560
2561 int
2562 real_onep (const_tree expr)
2563 {
2564 STRIP_NOPS (expr);
2565
2566 switch (TREE_CODE (expr))
2567 {
2568 case REAL_CST:
2569 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconst1)
2570 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2571 case COMPLEX_CST:
2572 return real_onep (TREE_REALPART (expr))
2573 && real_zerop (TREE_IMAGPART (expr));
2574 case VECTOR_CST:
2575 {
2576 unsigned i;
2577 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2578 if (!real_onep (VECTOR_CST_ELT (expr, i)))
2579 return false;
2580 return true;
2581 }
2582 default:
2583 return false;
2584 }
2585 }
2586
2587 /* Return 1 if EXPR is the real constant minus one. Trailing zeroes
2588 matter for decimal float constants, so don't return 1 for them. */
2589
2590 int
2591 real_minus_onep (const_tree expr)
2592 {
2593 STRIP_NOPS (expr);
2594
2595 switch (TREE_CODE (expr))
2596 {
2597 case REAL_CST:
2598 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconstm1)
2599 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2600 case COMPLEX_CST:
2601 return real_minus_onep (TREE_REALPART (expr))
2602 && real_zerop (TREE_IMAGPART (expr));
2603 case VECTOR_CST:
2604 {
2605 unsigned i;
2606 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2607 if (!real_minus_onep (VECTOR_CST_ELT (expr, i)))
2608 return false;
2609 return true;
2610 }
2611 default:
2612 return false;
2613 }
2614 }
2615
2616 /* Nonzero if EXP is a constant or a cast of a constant. */
2617
2618 int
2619 really_constant_p (const_tree exp)
2620 {
2621 /* This is not quite the same as STRIP_NOPS. It does more. */
2622 while (CONVERT_EXPR_P (exp)
2623 || TREE_CODE (exp) == NON_LVALUE_EXPR)
2624 exp = TREE_OPERAND (exp, 0);
2625 return TREE_CONSTANT (exp);
2626 }
2627 \f
2628 /* Return first list element whose TREE_VALUE is ELEM.
2629 Return 0 if ELEM is not in LIST. */
2630
2631 tree
2632 value_member (tree elem, tree list)
2633 {
2634 while (list)
2635 {
2636 if (elem == TREE_VALUE (list))
2637 return list;
2638 list = TREE_CHAIN (list);
2639 }
2640 return NULL_TREE;
2641 }
2642
2643 /* Return first list element whose TREE_PURPOSE is ELEM.
2644 Return 0 if ELEM is not in LIST. */
2645
2646 tree
2647 purpose_member (const_tree elem, tree list)
2648 {
2649 while (list)
2650 {
2651 if (elem == TREE_PURPOSE (list))
2652 return list;
2653 list = TREE_CHAIN (list);
2654 }
2655 return NULL_TREE;
2656 }
2657
2658 /* Return true if ELEM is in V. */
2659
2660 bool
2661 vec_member (const_tree elem, vec<tree, va_gc> *v)
2662 {
2663 unsigned ix;
2664 tree t;
2665 FOR_EACH_VEC_SAFE_ELT (v, ix, t)
2666 if (elem == t)
2667 return true;
2668 return false;
2669 }
2670
2671 /* Returns element number IDX (zero-origin) of chain CHAIN, or
2672 NULL_TREE. */
2673
2674 tree
2675 chain_index (int idx, tree chain)
2676 {
2677 for (; chain && idx > 0; --idx)
2678 chain = TREE_CHAIN (chain);
2679 return chain;
2680 }
2681
2682 /* Return nonzero if ELEM is part of the chain CHAIN. */
2683
2684 int
2685 chain_member (const_tree elem, const_tree chain)
2686 {
2687 while (chain)
2688 {
2689 if (elem == chain)
2690 return 1;
2691 chain = DECL_CHAIN (chain);
2692 }
2693
2694 return 0;
2695 }
2696
2697 /* Return the length of a chain of nodes chained through TREE_CHAIN.
2698 We expect a null pointer to mark the end of the chain.
2699 This is the Lisp primitive `length'. */
2700
2701 int
2702 list_length (const_tree t)
2703 {
2704 const_tree p = t;
2705 #ifdef ENABLE_TREE_CHECKING
2706 const_tree q = t;
2707 #endif
2708 int len = 0;
2709
2710 while (p)
2711 {
2712 p = TREE_CHAIN (p);
2713 #ifdef ENABLE_TREE_CHECKING
2714 if (len % 2)
2715 q = TREE_CHAIN (q);
2716 gcc_assert (p != q);
2717 #endif
2718 len++;
2719 }
2720
2721 return len;
2722 }
2723
2724 /* Returns the first FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
2725 UNION_TYPE TYPE, or NULL_TREE if none. */
2726
2727 tree
2728 first_field (const_tree type)
2729 {
2730 tree t = TYPE_FIELDS (type);
2731 while (t && TREE_CODE (t) != FIELD_DECL)
2732 t = TREE_CHAIN (t);
2733 return t;
2734 }
2735
2736 /* Concatenate two chains of nodes (chained through TREE_CHAIN)
2737 by modifying the last node in chain 1 to point to chain 2.
2738 This is the Lisp primitive `nconc'. */
2739
2740 tree
2741 chainon (tree op1, tree op2)
2742 {
2743 tree t1;
2744
2745 if (!op1)
2746 return op2;
2747 if (!op2)
2748 return op1;
2749
2750 for (t1 = op1; TREE_CHAIN (t1); t1 = TREE_CHAIN (t1))
2751 continue;
2752 TREE_CHAIN (t1) = op2;
2753
2754 #ifdef ENABLE_TREE_CHECKING
2755 {
2756 tree t2;
2757 for (t2 = op2; t2; t2 = TREE_CHAIN (t2))
2758 gcc_assert (t2 != t1);
2759 }
2760 #endif
2761
2762 return op1;
2763 }
2764
2765 /* Return the last node in a chain of nodes (chained through TREE_CHAIN). */
2766
2767 tree
2768 tree_last (tree chain)
2769 {
2770 tree next;
2771 if (chain)
2772 while ((next = TREE_CHAIN (chain)))
2773 chain = next;
2774 return chain;
2775 }
2776
2777 /* Reverse the order of elements in the chain T,
2778 and return the new head of the chain (old last element). */
2779
2780 tree
2781 nreverse (tree t)
2782 {
2783 tree prev = 0, decl, next;
2784 for (decl = t; decl; decl = next)
2785 {
2786 /* We shouldn't be using this function to reverse BLOCK chains; we
2787 have blocks_nreverse for that. */
2788 gcc_checking_assert (TREE_CODE (decl) != BLOCK);
2789 next = TREE_CHAIN (decl);
2790 TREE_CHAIN (decl) = prev;
2791 prev = decl;
2792 }
2793 return prev;
2794 }
2795 \f
2796 /* Return a newly created TREE_LIST node whose
2797 purpose and value fields are PARM and VALUE. */
2798
2799 tree
2800 build_tree_list_stat (tree parm, tree value MEM_STAT_DECL)
2801 {
2802 tree t = make_node_stat (TREE_LIST PASS_MEM_STAT);
2803 TREE_PURPOSE (t) = parm;
2804 TREE_VALUE (t) = value;
2805 return t;
2806 }
2807
2808 /* Build a chain of TREE_LIST nodes from a vector. */
2809
2810 tree
2811 build_tree_list_vec_stat (const vec<tree, va_gc> *vec MEM_STAT_DECL)
2812 {
2813 tree ret = NULL_TREE;
2814 tree *pp = &ret;
2815 unsigned int i;
2816 tree t;
2817 FOR_EACH_VEC_SAFE_ELT (vec, i, t)
2818 {
2819 *pp = build_tree_list_stat (NULL, t PASS_MEM_STAT);
2820 pp = &TREE_CHAIN (*pp);
2821 }
2822 return ret;
2823 }
2824
2825 /* Return a newly created TREE_LIST node whose
2826 purpose and value fields are PURPOSE and VALUE
2827 and whose TREE_CHAIN is CHAIN. */
2828
2829 tree
2830 tree_cons_stat (tree purpose, tree value, tree chain MEM_STAT_DECL)
2831 {
2832 tree node;
2833
2834 node = ggc_alloc_tree_node_stat (sizeof (struct tree_list) PASS_MEM_STAT);
2835 memset (node, 0, sizeof (struct tree_common));
2836
2837 record_node_allocation_statistics (TREE_LIST, sizeof (struct tree_list));
2838
2839 TREE_SET_CODE (node, TREE_LIST);
2840 TREE_CHAIN (node) = chain;
2841 TREE_PURPOSE (node) = purpose;
2842 TREE_VALUE (node) = value;
2843 return node;
2844 }
2845
2846 /* Return the values of the elements of a CONSTRUCTOR as a vector of
2847 trees. */
2848
2849 vec<tree, va_gc> *
2850 ctor_to_vec (tree ctor)
2851 {
2852 vec<tree, va_gc> *vec;
2853 vec_alloc (vec, CONSTRUCTOR_NELTS (ctor));
2854 unsigned int ix;
2855 tree val;
2856
2857 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor), ix, val)
2858 vec->quick_push (val);
2859
2860 return vec;
2861 }
2862 \f
2863 /* Return the size nominally occupied by an object of type TYPE
2864 when it resides in memory. The value is measured in units of bytes,
2865 and its data type is that normally used for type sizes
2866 (which is the first type created by make_signed_type or
2867 make_unsigned_type). */
2868
2869 tree
2870 size_in_bytes (const_tree type)
2871 {
2872 tree t;
2873
2874 if (type == error_mark_node)
2875 return integer_zero_node;
2876
2877 type = TYPE_MAIN_VARIANT (type);
2878 t = TYPE_SIZE_UNIT (type);
2879
2880 if (t == 0)
2881 {
2882 lang_hooks.types.incomplete_type_error (NULL_TREE, type);
2883 return size_zero_node;
2884 }
2885
2886 return t;
2887 }
2888
2889 /* Return the size of TYPE (in bytes) as a wide integer
2890 or return -1 if the size can vary or is larger than an integer. */
2891
2892 HOST_WIDE_INT
2893 int_size_in_bytes (const_tree type)
2894 {
2895 tree t;
2896
2897 if (type == error_mark_node)
2898 return 0;
2899
2900 type = TYPE_MAIN_VARIANT (type);
2901 t = TYPE_SIZE_UNIT (type);
2902
2903 if (t && tree_fits_uhwi_p (t))
2904 return TREE_INT_CST_LOW (t);
2905 else
2906 return -1;
2907 }
2908
2909 /* Return the maximum size of TYPE (in bytes) as a wide integer
2910 or return -1 if the size can vary or is larger than an integer. */
2911
2912 HOST_WIDE_INT
2913 max_int_size_in_bytes (const_tree type)
2914 {
2915 HOST_WIDE_INT size = -1;
2916 tree size_tree;
2917
2918 /* If this is an array type, check for a possible MAX_SIZE attached. */
2919
2920 if (TREE_CODE (type) == ARRAY_TYPE)
2921 {
2922 size_tree = TYPE_ARRAY_MAX_SIZE (type);
2923
2924 if (size_tree && tree_fits_uhwi_p (size_tree))
2925 size = tree_to_uhwi (size_tree);
2926 }
2927
2928 /* If we still haven't been able to get a size, see if the language
2929 can compute a maximum size. */
2930
2931 if (size == -1)
2932 {
2933 size_tree = lang_hooks.types.max_size (type);
2934
2935 if (size_tree && tree_fits_uhwi_p (size_tree))
2936 size = tree_to_uhwi (size_tree);
2937 }
2938
2939 return size;
2940 }
2941 \f
2942 /* Return the bit position of FIELD, in bits from the start of the record.
2943 This is a tree of type bitsizetype. */
2944
2945 tree
2946 bit_position (const_tree field)
2947 {
2948 return bit_from_pos (DECL_FIELD_OFFSET (field),
2949 DECL_FIELD_BIT_OFFSET (field));
2950 }
2951 \f
2952 /* Return the byte position of FIELD, in bytes from the start of the record.
2953 This is a tree of type sizetype. */
2954
2955 tree
2956 byte_position (const_tree field)
2957 {
2958 return byte_from_pos (DECL_FIELD_OFFSET (field),
2959 DECL_FIELD_BIT_OFFSET (field));
2960 }
2961
2962 /* Likewise, but return as an integer. It must be representable in
2963 that way (since it could be a signed value, we don't have the
2964 option of returning -1 like int_size_in_byte can. */
2965
2966 HOST_WIDE_INT
2967 int_byte_position (const_tree field)
2968 {
2969 return tree_to_shwi (byte_position (field));
2970 }
2971 \f
2972 /* Return the strictest alignment, in bits, that T is known to have. */
2973
2974 unsigned int
2975 expr_align (const_tree t)
2976 {
2977 unsigned int align0, align1;
2978
2979 switch (TREE_CODE (t))
2980 {
2981 CASE_CONVERT: case NON_LVALUE_EXPR:
2982 /* If we have conversions, we know that the alignment of the
2983 object must meet each of the alignments of the types. */
2984 align0 = expr_align (TREE_OPERAND (t, 0));
2985 align1 = TYPE_ALIGN (TREE_TYPE (t));
2986 return MAX (align0, align1);
2987
2988 case SAVE_EXPR: case COMPOUND_EXPR: case MODIFY_EXPR:
2989 case INIT_EXPR: case TARGET_EXPR: case WITH_CLEANUP_EXPR:
2990 case CLEANUP_POINT_EXPR:
2991 /* These don't change the alignment of an object. */
2992 return expr_align (TREE_OPERAND (t, 0));
2993
2994 case COND_EXPR:
2995 /* The best we can do is say that the alignment is the least aligned
2996 of the two arms. */
2997 align0 = expr_align (TREE_OPERAND (t, 1));
2998 align1 = expr_align (TREE_OPERAND (t, 2));
2999 return MIN (align0, align1);
3000
3001 /* FIXME: LABEL_DECL and CONST_DECL never have DECL_ALIGN set
3002 meaningfully, it's always 1. */
3003 case LABEL_DECL: case CONST_DECL:
3004 case VAR_DECL: case PARM_DECL: case RESULT_DECL:
3005 case FUNCTION_DECL:
3006 gcc_assert (DECL_ALIGN (t) != 0);
3007 return DECL_ALIGN (t);
3008
3009 default:
3010 break;
3011 }
3012
3013 /* Otherwise take the alignment from that of the type. */
3014 return TYPE_ALIGN (TREE_TYPE (t));
3015 }
3016 \f
3017 /* Return, as a tree node, the number of elements for TYPE (which is an
3018 ARRAY_TYPE) minus one. This counts only elements of the top array. */
3019
3020 tree
3021 array_type_nelts (const_tree type)
3022 {
3023 tree index_type, min, max;
3024
3025 /* If they did it with unspecified bounds, then we should have already
3026 given an error about it before we got here. */
3027 if (! TYPE_DOMAIN (type))
3028 return error_mark_node;
3029
3030 index_type = TYPE_DOMAIN (type);
3031 min = TYPE_MIN_VALUE (index_type);
3032 max = TYPE_MAX_VALUE (index_type);
3033
3034 /* TYPE_MAX_VALUE may not be set if the array has unknown length. */
3035 if (!max)
3036 return error_mark_node;
3037
3038 return (integer_zerop (min)
3039 ? max
3040 : fold_build2 (MINUS_EXPR, TREE_TYPE (max), max, min));
3041 }
3042 \f
3043 /* If arg is static -- a reference to an object in static storage -- then
3044 return the object. This is not the same as the C meaning of `static'.
3045 If arg isn't static, return NULL. */
3046
3047 tree
3048 staticp (tree arg)
3049 {
3050 switch (TREE_CODE (arg))
3051 {
3052 case FUNCTION_DECL:
3053 /* Nested functions are static, even though taking their address will
3054 involve a trampoline as we unnest the nested function and create
3055 the trampoline on the tree level. */
3056 return arg;
3057
3058 case VAR_DECL:
3059 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3060 && ! DECL_THREAD_LOCAL_P (arg)
3061 && ! DECL_DLLIMPORT_P (arg)
3062 ? arg : NULL);
3063
3064 case CONST_DECL:
3065 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3066 ? arg : NULL);
3067
3068 case CONSTRUCTOR:
3069 return TREE_STATIC (arg) ? arg : NULL;
3070
3071 case LABEL_DECL:
3072 case STRING_CST:
3073 return arg;
3074
3075 case COMPONENT_REF:
3076 /* If the thing being referenced is not a field, then it is
3077 something language specific. */
3078 gcc_assert (TREE_CODE (TREE_OPERAND (arg, 1)) == FIELD_DECL);
3079
3080 /* If we are referencing a bitfield, we can't evaluate an
3081 ADDR_EXPR at compile time and so it isn't a constant. */
3082 if (DECL_BIT_FIELD (TREE_OPERAND (arg, 1)))
3083 return NULL;
3084
3085 return staticp (TREE_OPERAND (arg, 0));
3086
3087 case BIT_FIELD_REF:
3088 return NULL;
3089
3090 case INDIRECT_REF:
3091 return TREE_CONSTANT (TREE_OPERAND (arg, 0)) ? arg : NULL;
3092
3093 case ARRAY_REF:
3094 case ARRAY_RANGE_REF:
3095 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (arg))) == INTEGER_CST
3096 && TREE_CODE (TREE_OPERAND (arg, 1)) == INTEGER_CST)
3097 return staticp (TREE_OPERAND (arg, 0));
3098 else
3099 return NULL;
3100
3101 case COMPOUND_LITERAL_EXPR:
3102 return TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (arg)) ? arg : NULL;
3103
3104 default:
3105 return NULL;
3106 }
3107 }
3108
3109 \f
3110
3111
3112 /* Return whether OP is a DECL whose address is function-invariant. */
3113
3114 bool
3115 decl_address_invariant_p (const_tree op)
3116 {
3117 /* The conditions below are slightly less strict than the one in
3118 staticp. */
3119
3120 switch (TREE_CODE (op))
3121 {
3122 case PARM_DECL:
3123 case RESULT_DECL:
3124 case LABEL_DECL:
3125 case FUNCTION_DECL:
3126 return true;
3127
3128 case VAR_DECL:
3129 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3130 || DECL_THREAD_LOCAL_P (op)
3131 || DECL_CONTEXT (op) == current_function_decl
3132 || decl_function_context (op) == current_function_decl)
3133 return true;
3134 break;
3135
3136 case CONST_DECL:
3137 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3138 || decl_function_context (op) == current_function_decl)
3139 return true;
3140 break;
3141
3142 default:
3143 break;
3144 }
3145
3146 return false;
3147 }
3148
3149 /* Return whether OP is a DECL whose address is interprocedural-invariant. */
3150
3151 bool
3152 decl_address_ip_invariant_p (const_tree op)
3153 {
3154 /* The conditions below are slightly less strict than the one in
3155 staticp. */
3156
3157 switch (TREE_CODE (op))
3158 {
3159 case LABEL_DECL:
3160 case FUNCTION_DECL:
3161 case STRING_CST:
3162 return true;
3163
3164 case VAR_DECL:
3165 if (((TREE_STATIC (op) || DECL_EXTERNAL (op))
3166 && !DECL_DLLIMPORT_P (op))
3167 || DECL_THREAD_LOCAL_P (op))
3168 return true;
3169 break;
3170
3171 case CONST_DECL:
3172 if ((TREE_STATIC (op) || DECL_EXTERNAL (op)))
3173 return true;
3174 break;
3175
3176 default:
3177 break;
3178 }
3179
3180 return false;
3181 }
3182
3183
3184 /* Return true if T is function-invariant (internal function, does
3185 not handle arithmetic; that's handled in skip_simple_arithmetic and
3186 tree_invariant_p). */
3187
3188 static bool tree_invariant_p (tree t);
3189
3190 static bool
3191 tree_invariant_p_1 (tree t)
3192 {
3193 tree op;
3194
3195 if (TREE_CONSTANT (t)
3196 || (TREE_READONLY (t) && !TREE_SIDE_EFFECTS (t)))
3197 return true;
3198
3199 switch (TREE_CODE (t))
3200 {
3201 case SAVE_EXPR:
3202 return true;
3203
3204 case ADDR_EXPR:
3205 op = TREE_OPERAND (t, 0);
3206 while (handled_component_p (op))
3207 {
3208 switch (TREE_CODE (op))
3209 {
3210 case ARRAY_REF:
3211 case ARRAY_RANGE_REF:
3212 if (!tree_invariant_p (TREE_OPERAND (op, 1))
3213 || TREE_OPERAND (op, 2) != NULL_TREE
3214 || TREE_OPERAND (op, 3) != NULL_TREE)
3215 return false;
3216 break;
3217
3218 case COMPONENT_REF:
3219 if (TREE_OPERAND (op, 2) != NULL_TREE)
3220 return false;
3221 break;
3222
3223 default:;
3224 }
3225 op = TREE_OPERAND (op, 0);
3226 }
3227
3228 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
3229
3230 default:
3231 break;
3232 }
3233
3234 return false;
3235 }
3236
3237 /* Return true if T is function-invariant. */
3238
3239 static bool
3240 tree_invariant_p (tree t)
3241 {
3242 tree inner = skip_simple_arithmetic (t);
3243 return tree_invariant_p_1 (inner);
3244 }
3245
3246 /* Wrap a SAVE_EXPR around EXPR, if appropriate.
3247 Do this to any expression which may be used in more than one place,
3248 but must be evaluated only once.
3249
3250 Normally, expand_expr would reevaluate the expression each time.
3251 Calling save_expr produces something that is evaluated and recorded
3252 the first time expand_expr is called on it. Subsequent calls to
3253 expand_expr just reuse the recorded value.
3254
3255 The call to expand_expr that generates code that actually computes
3256 the value is the first call *at compile time*. Subsequent calls
3257 *at compile time* generate code to use the saved value.
3258 This produces correct result provided that *at run time* control
3259 always flows through the insns made by the first expand_expr
3260 before reaching the other places where the save_expr was evaluated.
3261 You, the caller of save_expr, must make sure this is so.
3262
3263 Constants, and certain read-only nodes, are returned with no
3264 SAVE_EXPR because that is safe. Expressions containing placeholders
3265 are not touched; see tree.def for an explanation of what these
3266 are used for. */
3267
3268 tree
3269 save_expr (tree expr)
3270 {
3271 tree t = fold (expr);
3272 tree inner;
3273
3274 /* If the tree evaluates to a constant, then we don't want to hide that
3275 fact (i.e. this allows further folding, and direct checks for constants).
3276 However, a read-only object that has side effects cannot be bypassed.
3277 Since it is no problem to reevaluate literals, we just return the
3278 literal node. */
3279 inner = skip_simple_arithmetic (t);
3280 if (TREE_CODE (inner) == ERROR_MARK)
3281 return inner;
3282
3283 if (tree_invariant_p_1 (inner))
3284 return t;
3285
3286 /* If INNER contains a PLACEHOLDER_EXPR, we must evaluate it each time, since
3287 it means that the size or offset of some field of an object depends on
3288 the value within another field.
3289
3290 Note that it must not be the case that T contains both a PLACEHOLDER_EXPR
3291 and some variable since it would then need to be both evaluated once and
3292 evaluated more than once. Front-ends must assure this case cannot
3293 happen by surrounding any such subexpressions in their own SAVE_EXPR
3294 and forcing evaluation at the proper time. */
3295 if (contains_placeholder_p (inner))
3296 return t;
3297
3298 t = build1 (SAVE_EXPR, TREE_TYPE (expr), t);
3299 SET_EXPR_LOCATION (t, EXPR_LOCATION (expr));
3300
3301 /* This expression might be placed ahead of a jump to ensure that the
3302 value was computed on both sides of the jump. So make sure it isn't
3303 eliminated as dead. */
3304 TREE_SIDE_EFFECTS (t) = 1;
3305 return t;
3306 }
3307
3308 /* Look inside EXPR into any simple arithmetic operations. Return the
3309 outermost non-arithmetic or non-invariant node. */
3310
3311 tree
3312 skip_simple_arithmetic (tree expr)
3313 {
3314 /* We don't care about whether this can be used as an lvalue in this
3315 context. */
3316 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3317 expr = TREE_OPERAND (expr, 0);
3318
3319 /* If we have simple operations applied to a SAVE_EXPR or to a SAVE_EXPR and
3320 a constant, it will be more efficient to not make another SAVE_EXPR since
3321 it will allow better simplification and GCSE will be able to merge the
3322 computations if they actually occur. */
3323 while (true)
3324 {
3325 if (UNARY_CLASS_P (expr))
3326 expr = TREE_OPERAND (expr, 0);
3327 else if (BINARY_CLASS_P (expr))
3328 {
3329 if (tree_invariant_p (TREE_OPERAND (expr, 1)))
3330 expr = TREE_OPERAND (expr, 0);
3331 else if (tree_invariant_p (TREE_OPERAND (expr, 0)))
3332 expr = TREE_OPERAND (expr, 1);
3333 else
3334 break;
3335 }
3336 else
3337 break;
3338 }
3339
3340 return expr;
3341 }
3342
3343 /* Look inside EXPR into simple arithmetic operations involving constants.
3344 Return the outermost non-arithmetic or non-constant node. */
3345
3346 tree
3347 skip_simple_constant_arithmetic (tree expr)
3348 {
3349 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3350 expr = TREE_OPERAND (expr, 0);
3351
3352 while (true)
3353 {
3354 if (UNARY_CLASS_P (expr))
3355 expr = TREE_OPERAND (expr, 0);
3356 else if (BINARY_CLASS_P (expr))
3357 {
3358 if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
3359 expr = TREE_OPERAND (expr, 0);
3360 else if (TREE_CONSTANT (TREE_OPERAND (expr, 0)))
3361 expr = TREE_OPERAND (expr, 1);
3362 else
3363 break;
3364 }
3365 else
3366 break;
3367 }
3368
3369 return expr;
3370 }
3371
3372 /* Return which tree structure is used by T. */
3373
3374 enum tree_node_structure_enum
3375 tree_node_structure (const_tree t)
3376 {
3377 const enum tree_code code = TREE_CODE (t);
3378 return tree_node_structure_for_code (code);
3379 }
3380
3381 /* Set various status flags when building a CALL_EXPR object T. */
3382
3383 static void
3384 process_call_operands (tree t)
3385 {
3386 bool side_effects = TREE_SIDE_EFFECTS (t);
3387 bool read_only = false;
3388 int i = call_expr_flags (t);
3389
3390 /* Calls have side-effects, except those to const or pure functions. */
3391 if ((i & ECF_LOOPING_CONST_OR_PURE) || !(i & (ECF_CONST | ECF_PURE)))
3392 side_effects = true;
3393 /* Propagate TREE_READONLY of arguments for const functions. */
3394 if (i & ECF_CONST)
3395 read_only = true;
3396
3397 if (!side_effects || read_only)
3398 for (i = 1; i < TREE_OPERAND_LENGTH (t); i++)
3399 {
3400 tree op = TREE_OPERAND (t, i);
3401 if (op && TREE_SIDE_EFFECTS (op))
3402 side_effects = true;
3403 if (op && !TREE_READONLY (op) && !CONSTANT_CLASS_P (op))
3404 read_only = false;
3405 }
3406
3407 TREE_SIDE_EFFECTS (t) = side_effects;
3408 TREE_READONLY (t) = read_only;
3409 }
3410 \f
3411 /* Return true if EXP contains a PLACEHOLDER_EXPR, i.e. if it represents a
3412 size or offset that depends on a field within a record. */
3413
3414 bool
3415 contains_placeholder_p (const_tree exp)
3416 {
3417 enum tree_code code;
3418
3419 if (!exp)
3420 return 0;
3421
3422 code = TREE_CODE (exp);
3423 if (code == PLACEHOLDER_EXPR)
3424 return 1;
3425
3426 switch (TREE_CODE_CLASS (code))
3427 {
3428 case tcc_reference:
3429 /* Don't look at any PLACEHOLDER_EXPRs that might be in index or bit
3430 position computations since they will be converted into a
3431 WITH_RECORD_EXPR involving the reference, which will assume
3432 here will be valid. */
3433 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3434
3435 case tcc_exceptional:
3436 if (code == TREE_LIST)
3437 return (CONTAINS_PLACEHOLDER_P (TREE_VALUE (exp))
3438 || CONTAINS_PLACEHOLDER_P (TREE_CHAIN (exp)));
3439 break;
3440
3441 case tcc_unary:
3442 case tcc_binary:
3443 case tcc_comparison:
3444 case tcc_expression:
3445 switch (code)
3446 {
3447 case COMPOUND_EXPR:
3448 /* Ignoring the first operand isn't quite right, but works best. */
3449 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1));
3450
3451 case COND_EXPR:
3452 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3453 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1))
3454 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 2)));
3455
3456 case SAVE_EXPR:
3457 /* The save_expr function never wraps anything containing
3458 a PLACEHOLDER_EXPR. */
3459 return 0;
3460
3461 default:
3462 break;
3463 }
3464
3465 switch (TREE_CODE_LENGTH (code))
3466 {
3467 case 1:
3468 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3469 case 2:
3470 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3471 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1)));
3472 default:
3473 return 0;
3474 }
3475
3476 case tcc_vl_exp:
3477 switch (code)
3478 {
3479 case CALL_EXPR:
3480 {
3481 const_tree arg;
3482 const_call_expr_arg_iterator iter;
3483 FOR_EACH_CONST_CALL_EXPR_ARG (arg, iter, exp)
3484 if (CONTAINS_PLACEHOLDER_P (arg))
3485 return 1;
3486 return 0;
3487 }
3488 default:
3489 return 0;
3490 }
3491
3492 default:
3493 return 0;
3494 }
3495 return 0;
3496 }
3497
3498 /* Return true if any part of the structure of TYPE involves a PLACEHOLDER_EXPR
3499 directly. This includes size, bounds, qualifiers (for QUAL_UNION_TYPE) and
3500 field positions. */
3501
3502 static bool
3503 type_contains_placeholder_1 (const_tree type)
3504 {
3505 /* If the size contains a placeholder or the parent type (component type in
3506 the case of arrays) type involves a placeholder, this type does. */
3507 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type))
3508 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (type))
3509 || (!POINTER_TYPE_P (type)
3510 && TREE_TYPE (type)
3511 && type_contains_placeholder_p (TREE_TYPE (type))))
3512 return true;
3513
3514 /* Now do type-specific checks. Note that the last part of the check above
3515 greatly limits what we have to do below. */
3516 switch (TREE_CODE (type))
3517 {
3518 case VOID_TYPE:
3519 case POINTER_BOUNDS_TYPE:
3520 case COMPLEX_TYPE:
3521 case ENUMERAL_TYPE:
3522 case BOOLEAN_TYPE:
3523 case POINTER_TYPE:
3524 case OFFSET_TYPE:
3525 case REFERENCE_TYPE:
3526 case METHOD_TYPE:
3527 case FUNCTION_TYPE:
3528 case VECTOR_TYPE:
3529 case NULLPTR_TYPE:
3530 return false;
3531
3532 case INTEGER_TYPE:
3533 case REAL_TYPE:
3534 case FIXED_POINT_TYPE:
3535 /* Here we just check the bounds. */
3536 return (CONTAINS_PLACEHOLDER_P (TYPE_MIN_VALUE (type))
3537 || CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type)));
3538
3539 case ARRAY_TYPE:
3540 /* We have already checked the component type above, so just check the
3541 domain type. */
3542 return type_contains_placeholder_p (TYPE_DOMAIN (type));
3543
3544 case RECORD_TYPE:
3545 case UNION_TYPE:
3546 case QUAL_UNION_TYPE:
3547 {
3548 tree field;
3549
3550 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3551 if (TREE_CODE (field) == FIELD_DECL
3552 && (CONTAINS_PLACEHOLDER_P (DECL_FIELD_OFFSET (field))
3553 || (TREE_CODE (type) == QUAL_UNION_TYPE
3554 && CONTAINS_PLACEHOLDER_P (DECL_QUALIFIER (field)))
3555 || type_contains_placeholder_p (TREE_TYPE (field))))
3556 return true;
3557
3558 return false;
3559 }
3560
3561 default:
3562 gcc_unreachable ();
3563 }
3564 }
3565
3566 /* Wrapper around above function used to cache its result. */
3567
3568 bool
3569 type_contains_placeholder_p (tree type)
3570 {
3571 bool result;
3572
3573 /* If the contains_placeholder_bits field has been initialized,
3574 then we know the answer. */
3575 if (TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) > 0)
3576 return TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) - 1;
3577
3578 /* Indicate that we've seen this type node, and the answer is false.
3579 This is what we want to return if we run into recursion via fields. */
3580 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = 1;
3581
3582 /* Compute the real value. */
3583 result = type_contains_placeholder_1 (type);
3584
3585 /* Store the real value. */
3586 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = result + 1;
3587
3588 return result;
3589 }
3590 \f
3591 /* Push tree EXP onto vector QUEUE if it is not already present. */
3592
3593 static void
3594 push_without_duplicates (tree exp, vec<tree> *queue)
3595 {
3596 unsigned int i;
3597 tree iter;
3598
3599 FOR_EACH_VEC_ELT (*queue, i, iter)
3600 if (simple_cst_equal (iter, exp) == 1)
3601 break;
3602
3603 if (!iter)
3604 queue->safe_push (exp);
3605 }
3606
3607 /* Given a tree EXP, find all occurrences of references to fields
3608 in a PLACEHOLDER_EXPR and place them in vector REFS without
3609 duplicates. Also record VAR_DECLs and CONST_DECLs. Note that
3610 we assume here that EXP contains only arithmetic expressions
3611 or CALL_EXPRs with PLACEHOLDER_EXPRs occurring only in their
3612 argument list. */
3613
3614 void
3615 find_placeholder_in_expr (tree exp, vec<tree> *refs)
3616 {
3617 enum tree_code code = TREE_CODE (exp);
3618 tree inner;
3619 int i;
3620
3621 /* We handle TREE_LIST and COMPONENT_REF separately. */
3622 if (code == TREE_LIST)
3623 {
3624 FIND_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), refs);
3625 FIND_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), refs);
3626 }
3627 else if (code == COMPONENT_REF)
3628 {
3629 for (inner = TREE_OPERAND (exp, 0);
3630 REFERENCE_CLASS_P (inner);
3631 inner = TREE_OPERAND (inner, 0))
3632 ;
3633
3634 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
3635 push_without_duplicates (exp, refs);
3636 else
3637 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), refs);
3638 }
3639 else
3640 switch (TREE_CODE_CLASS (code))
3641 {
3642 case tcc_constant:
3643 break;
3644
3645 case tcc_declaration:
3646 /* Variables allocated to static storage can stay. */
3647 if (!TREE_STATIC (exp))
3648 push_without_duplicates (exp, refs);
3649 break;
3650
3651 case tcc_expression:
3652 /* This is the pattern built in ada/make_aligning_type. */
3653 if (code == ADDR_EXPR
3654 && TREE_CODE (TREE_OPERAND (exp, 0)) == PLACEHOLDER_EXPR)
3655 {
3656 push_without_duplicates (exp, refs);
3657 break;
3658 }
3659
3660 /* Fall through... */
3661
3662 case tcc_exceptional:
3663 case tcc_unary:
3664 case tcc_binary:
3665 case tcc_comparison:
3666 case tcc_reference:
3667 for (i = 0; i < TREE_CODE_LENGTH (code); i++)
3668 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3669 break;
3670
3671 case tcc_vl_exp:
3672 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3673 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3674 break;
3675
3676 default:
3677 gcc_unreachable ();
3678 }
3679 }
3680
3681 /* Given a tree EXP, a FIELD_DECL F, and a replacement value R,
3682 return a tree with all occurrences of references to F in a
3683 PLACEHOLDER_EXPR replaced by R. Also handle VAR_DECLs and
3684 CONST_DECLs. Note that we assume here that EXP contains only
3685 arithmetic expressions or CALL_EXPRs with PLACEHOLDER_EXPRs
3686 occurring only in their argument list. */
3687
3688 tree
3689 substitute_in_expr (tree exp, tree f, tree r)
3690 {
3691 enum tree_code code = TREE_CODE (exp);
3692 tree op0, op1, op2, op3;
3693 tree new_tree;
3694
3695 /* We handle TREE_LIST and COMPONENT_REF separately. */
3696 if (code == TREE_LIST)
3697 {
3698 op0 = SUBSTITUTE_IN_EXPR (TREE_CHAIN (exp), f, r);
3699 op1 = SUBSTITUTE_IN_EXPR (TREE_VALUE (exp), f, r);
3700 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
3701 return exp;
3702
3703 return tree_cons (TREE_PURPOSE (exp), op1, op0);
3704 }
3705 else if (code == COMPONENT_REF)
3706 {
3707 tree inner;
3708
3709 /* If this expression is getting a value from a PLACEHOLDER_EXPR
3710 and it is the right field, replace it with R. */
3711 for (inner = TREE_OPERAND (exp, 0);
3712 REFERENCE_CLASS_P (inner);
3713 inner = TREE_OPERAND (inner, 0))
3714 ;
3715
3716 /* The field. */
3717 op1 = TREE_OPERAND (exp, 1);
3718
3719 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && op1 == f)
3720 return r;
3721
3722 /* If this expression hasn't been completed let, leave it alone. */
3723 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && !TREE_TYPE (inner))
3724 return exp;
3725
3726 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3727 if (op0 == TREE_OPERAND (exp, 0))
3728 return exp;
3729
3730 new_tree
3731 = fold_build3 (COMPONENT_REF, TREE_TYPE (exp), op0, op1, NULL_TREE);
3732 }
3733 else
3734 switch (TREE_CODE_CLASS (code))
3735 {
3736 case tcc_constant:
3737 return exp;
3738
3739 case tcc_declaration:
3740 if (exp == f)
3741 return r;
3742 else
3743 return exp;
3744
3745 case tcc_expression:
3746 if (exp == f)
3747 return r;
3748
3749 /* Fall through... */
3750
3751 case tcc_exceptional:
3752 case tcc_unary:
3753 case tcc_binary:
3754 case tcc_comparison:
3755 case tcc_reference:
3756 switch (TREE_CODE_LENGTH (code))
3757 {
3758 case 0:
3759 return exp;
3760
3761 case 1:
3762 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3763 if (op0 == TREE_OPERAND (exp, 0))
3764 return exp;
3765
3766 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
3767 break;
3768
3769 case 2:
3770 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3771 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3772
3773 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3774 return exp;
3775
3776 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
3777 break;
3778
3779 case 3:
3780 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3781 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3782 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3783
3784 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3785 && op2 == TREE_OPERAND (exp, 2))
3786 return exp;
3787
3788 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
3789 break;
3790
3791 case 4:
3792 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3793 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3794 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3795 op3 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 3), f, r);
3796
3797 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3798 && op2 == TREE_OPERAND (exp, 2)
3799 && op3 == TREE_OPERAND (exp, 3))
3800 return exp;
3801
3802 new_tree
3803 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
3804 break;
3805
3806 default:
3807 gcc_unreachable ();
3808 }
3809 break;
3810
3811 case tcc_vl_exp:
3812 {
3813 int i;
3814
3815 new_tree = NULL_TREE;
3816
3817 /* If we are trying to replace F with a constant, inline back
3818 functions which do nothing else than computing a value from
3819 the arguments they are passed. This makes it possible to
3820 fold partially or entirely the replacement expression. */
3821 if (CONSTANT_CLASS_P (r) && code == CALL_EXPR)
3822 {
3823 tree t = maybe_inline_call_in_expr (exp);
3824 if (t)
3825 return SUBSTITUTE_IN_EXPR (t, f, r);
3826 }
3827
3828 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3829 {
3830 tree op = TREE_OPERAND (exp, i);
3831 tree new_op = SUBSTITUTE_IN_EXPR (op, f, r);
3832 if (new_op != op)
3833 {
3834 if (!new_tree)
3835 new_tree = copy_node (exp);
3836 TREE_OPERAND (new_tree, i) = new_op;
3837 }
3838 }
3839
3840 if (new_tree)
3841 {
3842 new_tree = fold (new_tree);
3843 if (TREE_CODE (new_tree) == CALL_EXPR)
3844 process_call_operands (new_tree);
3845 }
3846 else
3847 return exp;
3848 }
3849 break;
3850
3851 default:
3852 gcc_unreachable ();
3853 }
3854
3855 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
3856
3857 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
3858 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
3859
3860 return new_tree;
3861 }
3862
3863 /* Similar, but look for a PLACEHOLDER_EXPR in EXP and find a replacement
3864 for it within OBJ, a tree that is an object or a chain of references. */
3865
3866 tree
3867 substitute_placeholder_in_expr (tree exp, tree obj)
3868 {
3869 enum tree_code code = TREE_CODE (exp);
3870 tree op0, op1, op2, op3;
3871 tree new_tree;
3872
3873 /* If this is a PLACEHOLDER_EXPR, see if we find a corresponding type
3874 in the chain of OBJ. */
3875 if (code == PLACEHOLDER_EXPR)
3876 {
3877 tree need_type = TYPE_MAIN_VARIANT (TREE_TYPE (exp));
3878 tree elt;
3879
3880 for (elt = obj; elt != 0;
3881 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
3882 || TREE_CODE (elt) == COND_EXPR)
3883 ? TREE_OPERAND (elt, 1)
3884 : (REFERENCE_CLASS_P (elt)
3885 || UNARY_CLASS_P (elt)
3886 || BINARY_CLASS_P (elt)
3887 || VL_EXP_CLASS_P (elt)
3888 || EXPRESSION_CLASS_P (elt))
3889 ? TREE_OPERAND (elt, 0) : 0))
3890 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
3891 return elt;
3892
3893 for (elt = obj; elt != 0;
3894 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
3895 || TREE_CODE (elt) == COND_EXPR)
3896 ? TREE_OPERAND (elt, 1)
3897 : (REFERENCE_CLASS_P (elt)
3898 || UNARY_CLASS_P (elt)
3899 || BINARY_CLASS_P (elt)
3900 || VL_EXP_CLASS_P (elt)
3901 || EXPRESSION_CLASS_P (elt))
3902 ? TREE_OPERAND (elt, 0) : 0))
3903 if (POINTER_TYPE_P (TREE_TYPE (elt))
3904 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
3905 == need_type))
3906 return fold_build1 (INDIRECT_REF, need_type, elt);
3907
3908 /* If we didn't find it, return the original PLACEHOLDER_EXPR. If it
3909 survives until RTL generation, there will be an error. */
3910 return exp;
3911 }
3912
3913 /* TREE_LIST is special because we need to look at TREE_VALUE
3914 and TREE_CHAIN, not TREE_OPERANDS. */
3915 else if (code == TREE_LIST)
3916 {
3917 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), obj);
3918 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), obj);
3919 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
3920 return exp;
3921
3922 return tree_cons (TREE_PURPOSE (exp), op1, op0);
3923 }
3924 else
3925 switch (TREE_CODE_CLASS (code))
3926 {
3927 case tcc_constant:
3928 case tcc_declaration:
3929 return exp;
3930
3931 case tcc_exceptional:
3932 case tcc_unary:
3933 case tcc_binary:
3934 case tcc_comparison:
3935 case tcc_expression:
3936 case tcc_reference:
3937 case tcc_statement:
3938 switch (TREE_CODE_LENGTH (code))
3939 {
3940 case 0:
3941 return exp;
3942
3943 case 1:
3944 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3945 if (op0 == TREE_OPERAND (exp, 0))
3946 return exp;
3947
3948 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
3949 break;
3950
3951 case 2:
3952 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3953 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3954
3955 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3956 return exp;
3957
3958 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
3959 break;
3960
3961 case 3:
3962 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3963 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3964 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
3965
3966 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3967 && op2 == TREE_OPERAND (exp, 2))
3968 return exp;
3969
3970 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
3971 break;
3972
3973 case 4:
3974 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3975 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3976 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
3977 op3 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 3), obj);
3978
3979 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3980 && op2 == TREE_OPERAND (exp, 2)
3981 && op3 == TREE_OPERAND (exp, 3))
3982 return exp;
3983
3984 new_tree
3985 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
3986 break;
3987
3988 default:
3989 gcc_unreachable ();
3990 }
3991 break;
3992
3993 case tcc_vl_exp:
3994 {
3995 int i;
3996
3997 new_tree = NULL_TREE;
3998
3999 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4000 {
4001 tree op = TREE_OPERAND (exp, i);
4002 tree new_op = SUBSTITUTE_PLACEHOLDER_IN_EXPR (op, obj);
4003 if (new_op != op)
4004 {
4005 if (!new_tree)
4006 new_tree = copy_node (exp);
4007 TREE_OPERAND (new_tree, i) = new_op;
4008 }
4009 }
4010
4011 if (new_tree)
4012 {
4013 new_tree = fold (new_tree);
4014 if (TREE_CODE (new_tree) == CALL_EXPR)
4015 process_call_operands (new_tree);
4016 }
4017 else
4018 return exp;
4019 }
4020 break;
4021
4022 default:
4023 gcc_unreachable ();
4024 }
4025
4026 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
4027
4028 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
4029 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
4030
4031 return new_tree;
4032 }
4033 \f
4034
4035 /* Subroutine of stabilize_reference; this is called for subtrees of
4036 references. Any expression with side-effects must be put in a SAVE_EXPR
4037 to ensure that it is only evaluated once.
4038
4039 We don't put SAVE_EXPR nodes around everything, because assigning very
4040 simple expressions to temporaries causes us to miss good opportunities
4041 for optimizations. Among other things, the opportunity to fold in the
4042 addition of a constant into an addressing mode often gets lost, e.g.
4043 "y[i+1] += x;". In general, we take the approach that we should not make
4044 an assignment unless we are forced into it - i.e., that any non-side effect
4045 operator should be allowed, and that cse should take care of coalescing
4046 multiple utterances of the same expression should that prove fruitful. */
4047
4048 static tree
4049 stabilize_reference_1 (tree e)
4050 {
4051 tree result;
4052 enum tree_code code = TREE_CODE (e);
4053
4054 /* We cannot ignore const expressions because it might be a reference
4055 to a const array but whose index contains side-effects. But we can
4056 ignore things that are actual constant or that already have been
4057 handled by this function. */
4058
4059 if (tree_invariant_p (e))
4060 return e;
4061
4062 switch (TREE_CODE_CLASS (code))
4063 {
4064 case tcc_exceptional:
4065 case tcc_type:
4066 case tcc_declaration:
4067 case tcc_comparison:
4068 case tcc_statement:
4069 case tcc_expression:
4070 case tcc_reference:
4071 case tcc_vl_exp:
4072 /* If the expression has side-effects, then encase it in a SAVE_EXPR
4073 so that it will only be evaluated once. */
4074 /* The reference (r) and comparison (<) classes could be handled as
4075 below, but it is generally faster to only evaluate them once. */
4076 if (TREE_SIDE_EFFECTS (e))
4077 return save_expr (e);
4078 return e;
4079
4080 case tcc_constant:
4081 /* Constants need no processing. In fact, we should never reach
4082 here. */
4083 return e;
4084
4085 case tcc_binary:
4086 /* Division is slow and tends to be compiled with jumps,
4087 especially the division by powers of 2 that is often
4088 found inside of an array reference. So do it just once. */
4089 if (code == TRUNC_DIV_EXPR || code == TRUNC_MOD_EXPR
4090 || code == FLOOR_DIV_EXPR || code == FLOOR_MOD_EXPR
4091 || code == CEIL_DIV_EXPR || code == CEIL_MOD_EXPR
4092 || code == ROUND_DIV_EXPR || code == ROUND_MOD_EXPR)
4093 return save_expr (e);
4094 /* Recursively stabilize each operand. */
4095 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)),
4096 stabilize_reference_1 (TREE_OPERAND (e, 1)));
4097 break;
4098
4099 case tcc_unary:
4100 /* Recursively stabilize each operand. */
4101 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)));
4102 break;
4103
4104 default:
4105 gcc_unreachable ();
4106 }
4107
4108 TREE_TYPE (result) = TREE_TYPE (e);
4109 TREE_READONLY (result) = TREE_READONLY (e);
4110 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (e);
4111 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
4112
4113 return result;
4114 }
4115
4116 /* Stabilize a reference so that we can use it any number of times
4117 without causing its operands to be evaluated more than once.
4118 Returns the stabilized reference. This works by means of save_expr,
4119 so see the caveats in the comments about save_expr.
4120
4121 Also allows conversion expressions whose operands are references.
4122 Any other kind of expression is returned unchanged. */
4123
4124 tree
4125 stabilize_reference (tree ref)
4126 {
4127 tree result;
4128 enum tree_code code = TREE_CODE (ref);
4129
4130 switch (code)
4131 {
4132 case VAR_DECL:
4133 case PARM_DECL:
4134 case RESULT_DECL:
4135 /* No action is needed in this case. */
4136 return ref;
4137
4138 CASE_CONVERT:
4139 case FLOAT_EXPR:
4140 case FIX_TRUNC_EXPR:
4141 result = build_nt (code, stabilize_reference (TREE_OPERAND (ref, 0)));
4142 break;
4143
4144 case INDIRECT_REF:
4145 result = build_nt (INDIRECT_REF,
4146 stabilize_reference_1 (TREE_OPERAND (ref, 0)));
4147 break;
4148
4149 case COMPONENT_REF:
4150 result = build_nt (COMPONENT_REF,
4151 stabilize_reference (TREE_OPERAND (ref, 0)),
4152 TREE_OPERAND (ref, 1), NULL_TREE);
4153 break;
4154
4155 case BIT_FIELD_REF:
4156 result = build_nt (BIT_FIELD_REF,
4157 stabilize_reference (TREE_OPERAND (ref, 0)),
4158 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
4159 break;
4160
4161 case ARRAY_REF:
4162 result = build_nt (ARRAY_REF,
4163 stabilize_reference (TREE_OPERAND (ref, 0)),
4164 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4165 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4166 break;
4167
4168 case ARRAY_RANGE_REF:
4169 result = build_nt (ARRAY_RANGE_REF,
4170 stabilize_reference (TREE_OPERAND (ref, 0)),
4171 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4172 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4173 break;
4174
4175 case COMPOUND_EXPR:
4176 /* We cannot wrap the first expression in a SAVE_EXPR, as then
4177 it wouldn't be ignored. This matters when dealing with
4178 volatiles. */
4179 return stabilize_reference_1 (ref);
4180
4181 /* If arg isn't a kind of lvalue we recognize, make no change.
4182 Caller should recognize the error for an invalid lvalue. */
4183 default:
4184 return ref;
4185
4186 case ERROR_MARK:
4187 return error_mark_node;
4188 }
4189
4190 TREE_TYPE (result) = TREE_TYPE (ref);
4191 TREE_READONLY (result) = TREE_READONLY (ref);
4192 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (ref);
4193 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
4194
4195 return result;
4196 }
4197 \f
4198 /* Low-level constructors for expressions. */
4199
4200 /* A helper function for build1 and constant folders. Set TREE_CONSTANT,
4201 and TREE_SIDE_EFFECTS for an ADDR_EXPR. */
4202
4203 void
4204 recompute_tree_invariant_for_addr_expr (tree t)
4205 {
4206 tree node;
4207 bool tc = true, se = false;
4208
4209 /* We started out assuming this address is both invariant and constant, but
4210 does not have side effects. Now go down any handled components and see if
4211 any of them involve offsets that are either non-constant or non-invariant.
4212 Also check for side-effects.
4213
4214 ??? Note that this code makes no attempt to deal with the case where
4215 taking the address of something causes a copy due to misalignment. */
4216
4217 #define UPDATE_FLAGS(NODE) \
4218 do { tree _node = (NODE); \
4219 if (_node && !TREE_CONSTANT (_node)) tc = false; \
4220 if (_node && TREE_SIDE_EFFECTS (_node)) se = true; } while (0)
4221
4222 for (node = TREE_OPERAND (t, 0); handled_component_p (node);
4223 node = TREE_OPERAND (node, 0))
4224 {
4225 /* If the first operand doesn't have an ARRAY_TYPE, this is a bogus
4226 array reference (probably made temporarily by the G++ front end),
4227 so ignore all the operands. */
4228 if ((TREE_CODE (node) == ARRAY_REF
4229 || TREE_CODE (node) == ARRAY_RANGE_REF)
4230 && TREE_CODE (TREE_TYPE (TREE_OPERAND (node, 0))) == ARRAY_TYPE)
4231 {
4232 UPDATE_FLAGS (TREE_OPERAND (node, 1));
4233 if (TREE_OPERAND (node, 2))
4234 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4235 if (TREE_OPERAND (node, 3))
4236 UPDATE_FLAGS (TREE_OPERAND (node, 3));
4237 }
4238 /* Likewise, just because this is a COMPONENT_REF doesn't mean we have a
4239 FIELD_DECL, apparently. The G++ front end can put something else
4240 there, at least temporarily. */
4241 else if (TREE_CODE (node) == COMPONENT_REF
4242 && TREE_CODE (TREE_OPERAND (node, 1)) == FIELD_DECL)
4243 {
4244 if (TREE_OPERAND (node, 2))
4245 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4246 }
4247 }
4248
4249 node = lang_hooks.expr_to_decl (node, &tc, &se);
4250
4251 /* Now see what's inside. If it's an INDIRECT_REF, copy our properties from
4252 the address, since &(*a)->b is a form of addition. If it's a constant, the
4253 address is constant too. If it's a decl, its address is constant if the
4254 decl is static. Everything else is not constant and, furthermore,
4255 taking the address of a volatile variable is not volatile. */
4256 if (TREE_CODE (node) == INDIRECT_REF
4257 || TREE_CODE (node) == MEM_REF)
4258 UPDATE_FLAGS (TREE_OPERAND (node, 0));
4259 else if (CONSTANT_CLASS_P (node))
4260 ;
4261 else if (DECL_P (node))
4262 tc &= (staticp (node) != NULL_TREE);
4263 else
4264 {
4265 tc = false;
4266 se |= TREE_SIDE_EFFECTS (node);
4267 }
4268
4269
4270 TREE_CONSTANT (t) = tc;
4271 TREE_SIDE_EFFECTS (t) = se;
4272 #undef UPDATE_FLAGS
4273 }
4274
4275 /* Build an expression of code CODE, data type TYPE, and operands as
4276 specified. Expressions and reference nodes can be created this way.
4277 Constants, decls, types and misc nodes cannot be.
4278
4279 We define 5 non-variadic functions, from 0 to 4 arguments. This is
4280 enough for all extant tree codes. */
4281
4282 tree
4283 build0_stat (enum tree_code code, tree tt MEM_STAT_DECL)
4284 {
4285 tree t;
4286
4287 gcc_assert (TREE_CODE_LENGTH (code) == 0);
4288
4289 t = make_node_stat (code PASS_MEM_STAT);
4290 TREE_TYPE (t) = tt;
4291
4292 return t;
4293 }
4294
4295 tree
4296 build1_stat (enum tree_code code, tree type, tree node MEM_STAT_DECL)
4297 {
4298 int length = sizeof (struct tree_exp);
4299 tree t;
4300
4301 record_node_allocation_statistics (code, length);
4302
4303 gcc_assert (TREE_CODE_LENGTH (code) == 1);
4304
4305 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
4306
4307 memset (t, 0, sizeof (struct tree_common));
4308
4309 TREE_SET_CODE (t, code);
4310
4311 TREE_TYPE (t) = type;
4312 SET_EXPR_LOCATION (t, UNKNOWN_LOCATION);
4313 TREE_OPERAND (t, 0) = node;
4314 if (node && !TYPE_P (node))
4315 {
4316 TREE_SIDE_EFFECTS (t) = TREE_SIDE_EFFECTS (node);
4317 TREE_READONLY (t) = TREE_READONLY (node);
4318 }
4319
4320 if (TREE_CODE_CLASS (code) == tcc_statement)
4321 TREE_SIDE_EFFECTS (t) = 1;
4322 else switch (code)
4323 {
4324 case VA_ARG_EXPR:
4325 /* All of these have side-effects, no matter what their
4326 operands are. */
4327 TREE_SIDE_EFFECTS (t) = 1;
4328 TREE_READONLY (t) = 0;
4329 break;
4330
4331 case INDIRECT_REF:
4332 /* Whether a dereference is readonly has nothing to do with whether
4333 its operand is readonly. */
4334 TREE_READONLY (t) = 0;
4335 break;
4336
4337 case ADDR_EXPR:
4338 if (node)
4339 recompute_tree_invariant_for_addr_expr (t);
4340 break;
4341
4342 default:
4343 if ((TREE_CODE_CLASS (code) == tcc_unary || code == VIEW_CONVERT_EXPR)
4344 && node && !TYPE_P (node)
4345 && TREE_CONSTANT (node))
4346 TREE_CONSTANT (t) = 1;
4347 if (TREE_CODE_CLASS (code) == tcc_reference
4348 && node && TREE_THIS_VOLATILE (node))
4349 TREE_THIS_VOLATILE (t) = 1;
4350 break;
4351 }
4352
4353 return t;
4354 }
4355
4356 #define PROCESS_ARG(N) \
4357 do { \
4358 TREE_OPERAND (t, N) = arg##N; \
4359 if (arg##N &&!TYPE_P (arg##N)) \
4360 { \
4361 if (TREE_SIDE_EFFECTS (arg##N)) \
4362 side_effects = 1; \
4363 if (!TREE_READONLY (arg##N) \
4364 && !CONSTANT_CLASS_P (arg##N)) \
4365 (void) (read_only = 0); \
4366 if (!TREE_CONSTANT (arg##N)) \
4367 (void) (constant = 0); \
4368 } \
4369 } while (0)
4370
4371 tree
4372 build2_stat (enum tree_code code, tree tt, tree arg0, tree arg1 MEM_STAT_DECL)
4373 {
4374 bool constant, read_only, side_effects;
4375 tree t;
4376
4377 gcc_assert (TREE_CODE_LENGTH (code) == 2);
4378
4379 if ((code == MINUS_EXPR || code == PLUS_EXPR || code == MULT_EXPR)
4380 && arg0 && arg1 && tt && POINTER_TYPE_P (tt)
4381 /* When sizetype precision doesn't match that of pointers
4382 we need to be able to build explicit extensions or truncations
4383 of the offset argument. */
4384 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (tt))
4385 gcc_assert (TREE_CODE (arg0) == INTEGER_CST
4386 && TREE_CODE (arg1) == INTEGER_CST);
4387
4388 if (code == POINTER_PLUS_EXPR && arg0 && arg1 && tt)
4389 gcc_assert (POINTER_TYPE_P (tt) && POINTER_TYPE_P (TREE_TYPE (arg0))
4390 && ptrofftype_p (TREE_TYPE (arg1)));
4391
4392 t = make_node_stat (code PASS_MEM_STAT);
4393 TREE_TYPE (t) = tt;
4394
4395 /* Below, we automatically set TREE_SIDE_EFFECTS and TREE_READONLY for the
4396 result based on those same flags for the arguments. But if the
4397 arguments aren't really even `tree' expressions, we shouldn't be trying
4398 to do this. */
4399
4400 /* Expressions without side effects may be constant if their
4401 arguments are as well. */
4402 constant = (TREE_CODE_CLASS (code) == tcc_comparison
4403 || TREE_CODE_CLASS (code) == tcc_binary);
4404 read_only = 1;
4405 side_effects = TREE_SIDE_EFFECTS (t);
4406
4407 PROCESS_ARG (0);
4408 PROCESS_ARG (1);
4409
4410 TREE_SIDE_EFFECTS (t) = side_effects;
4411 if (code == MEM_REF)
4412 {
4413 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
4414 {
4415 tree o = TREE_OPERAND (arg0, 0);
4416 TREE_READONLY (t) = TREE_READONLY (o);
4417 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
4418 }
4419 }
4420 else
4421 {
4422 TREE_READONLY (t) = read_only;
4423 TREE_CONSTANT (t) = constant;
4424 TREE_THIS_VOLATILE (t)
4425 = (TREE_CODE_CLASS (code) == tcc_reference
4426 && arg0 && TREE_THIS_VOLATILE (arg0));
4427 }
4428
4429 return t;
4430 }
4431
4432
4433 tree
4434 build3_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4435 tree arg2 MEM_STAT_DECL)
4436 {
4437 bool constant, read_only, side_effects;
4438 tree t;
4439
4440 gcc_assert (TREE_CODE_LENGTH (code) == 3);
4441 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4442
4443 t = make_node_stat (code PASS_MEM_STAT);
4444 TREE_TYPE (t) = tt;
4445
4446 read_only = 1;
4447
4448 /* As a special exception, if COND_EXPR has NULL branches, we
4449 assume that it is a gimple statement and always consider
4450 it to have side effects. */
4451 if (code == COND_EXPR
4452 && tt == void_type_node
4453 && arg1 == NULL_TREE
4454 && arg2 == NULL_TREE)
4455 side_effects = true;
4456 else
4457 side_effects = TREE_SIDE_EFFECTS (t);
4458
4459 PROCESS_ARG (0);
4460 PROCESS_ARG (1);
4461 PROCESS_ARG (2);
4462
4463 if (code == COND_EXPR)
4464 TREE_READONLY (t) = read_only;
4465
4466 TREE_SIDE_EFFECTS (t) = side_effects;
4467 TREE_THIS_VOLATILE (t)
4468 = (TREE_CODE_CLASS (code) == tcc_reference
4469 && arg0 && TREE_THIS_VOLATILE (arg0));
4470
4471 return t;
4472 }
4473
4474 tree
4475 build4_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4476 tree arg2, tree arg3 MEM_STAT_DECL)
4477 {
4478 bool constant, read_only, side_effects;
4479 tree t;
4480
4481 gcc_assert (TREE_CODE_LENGTH (code) == 4);
4482
4483 t = make_node_stat (code PASS_MEM_STAT);
4484 TREE_TYPE (t) = tt;
4485
4486 side_effects = TREE_SIDE_EFFECTS (t);
4487
4488 PROCESS_ARG (0);
4489 PROCESS_ARG (1);
4490 PROCESS_ARG (2);
4491 PROCESS_ARG (3);
4492
4493 TREE_SIDE_EFFECTS (t) = side_effects;
4494 TREE_THIS_VOLATILE (t)
4495 = (TREE_CODE_CLASS (code) == tcc_reference
4496 && arg0 && TREE_THIS_VOLATILE (arg0));
4497
4498 return t;
4499 }
4500
4501 tree
4502 build5_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4503 tree arg2, tree arg3, tree arg4 MEM_STAT_DECL)
4504 {
4505 bool constant, read_only, side_effects;
4506 tree t;
4507
4508 gcc_assert (TREE_CODE_LENGTH (code) == 5);
4509
4510 t = make_node_stat (code PASS_MEM_STAT);
4511 TREE_TYPE (t) = tt;
4512
4513 side_effects = TREE_SIDE_EFFECTS (t);
4514
4515 PROCESS_ARG (0);
4516 PROCESS_ARG (1);
4517 PROCESS_ARG (2);
4518 PROCESS_ARG (3);
4519 PROCESS_ARG (4);
4520
4521 TREE_SIDE_EFFECTS (t) = side_effects;
4522 if (code == TARGET_MEM_REF)
4523 {
4524 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
4525 {
4526 tree o = TREE_OPERAND (arg0, 0);
4527 TREE_READONLY (t) = TREE_READONLY (o);
4528 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
4529 }
4530 }
4531 else
4532 TREE_THIS_VOLATILE (t)
4533 = (TREE_CODE_CLASS (code) == tcc_reference
4534 && arg0 && TREE_THIS_VOLATILE (arg0));
4535
4536 return t;
4537 }
4538
4539 /* Build a simple MEM_REF tree with the sematics of a plain INDIRECT_REF
4540 on the pointer PTR. */
4541
4542 tree
4543 build_simple_mem_ref_loc (location_t loc, tree ptr)
4544 {
4545 HOST_WIDE_INT offset = 0;
4546 tree ptype = TREE_TYPE (ptr);
4547 tree tem;
4548 /* For convenience allow addresses that collapse to a simple base
4549 and offset. */
4550 if (TREE_CODE (ptr) == ADDR_EXPR
4551 && (handled_component_p (TREE_OPERAND (ptr, 0))
4552 || TREE_CODE (TREE_OPERAND (ptr, 0)) == MEM_REF))
4553 {
4554 ptr = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &offset);
4555 gcc_assert (ptr);
4556 ptr = build_fold_addr_expr (ptr);
4557 gcc_assert (is_gimple_reg (ptr) || is_gimple_min_invariant (ptr));
4558 }
4559 tem = build2 (MEM_REF, TREE_TYPE (ptype),
4560 ptr, build_int_cst (ptype, offset));
4561 SET_EXPR_LOCATION (tem, loc);
4562 return tem;
4563 }
4564
4565 /* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T. */
4566
4567 offset_int
4568 mem_ref_offset (const_tree t)
4569 {
4570 return offset_int::from (TREE_OPERAND (t, 1), SIGNED);
4571 }
4572
4573 /* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE
4574 offsetted by OFFSET units. */
4575
4576 tree
4577 build_invariant_address (tree type, tree base, HOST_WIDE_INT offset)
4578 {
4579 tree ref = fold_build2 (MEM_REF, TREE_TYPE (type),
4580 build_fold_addr_expr (base),
4581 build_int_cst (ptr_type_node, offset));
4582 tree addr = build1 (ADDR_EXPR, type, ref);
4583 recompute_tree_invariant_for_addr_expr (addr);
4584 return addr;
4585 }
4586
4587 /* Similar except don't specify the TREE_TYPE
4588 and leave the TREE_SIDE_EFFECTS as 0.
4589 It is permissible for arguments to be null,
4590 or even garbage if their values do not matter. */
4591
4592 tree
4593 build_nt (enum tree_code code, ...)
4594 {
4595 tree t;
4596 int length;
4597 int i;
4598 va_list p;
4599
4600 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4601
4602 va_start (p, code);
4603
4604 t = make_node (code);
4605 length = TREE_CODE_LENGTH (code);
4606
4607 for (i = 0; i < length; i++)
4608 TREE_OPERAND (t, i) = va_arg (p, tree);
4609
4610 va_end (p);
4611 return t;
4612 }
4613
4614 /* Similar to build_nt, but for creating a CALL_EXPR object with a
4615 tree vec. */
4616
4617 tree
4618 build_nt_call_vec (tree fn, vec<tree, va_gc> *args)
4619 {
4620 tree ret, t;
4621 unsigned int ix;
4622
4623 ret = build_vl_exp (CALL_EXPR, vec_safe_length (args) + 3);
4624 CALL_EXPR_FN (ret) = fn;
4625 CALL_EXPR_STATIC_CHAIN (ret) = NULL_TREE;
4626 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
4627 CALL_EXPR_ARG (ret, ix) = t;
4628 return ret;
4629 }
4630 \f
4631 /* Create a DECL_... node of code CODE, name NAME and data type TYPE.
4632 We do NOT enter this node in any sort of symbol table.
4633
4634 LOC is the location of the decl.
4635
4636 layout_decl is used to set up the decl's storage layout.
4637 Other slots are initialized to 0 or null pointers. */
4638
4639 tree
4640 build_decl_stat (location_t loc, enum tree_code code, tree name,
4641 tree type MEM_STAT_DECL)
4642 {
4643 tree t;
4644
4645 t = make_node_stat (code PASS_MEM_STAT);
4646 DECL_SOURCE_LOCATION (t) = loc;
4647
4648 /* if (type == error_mark_node)
4649 type = integer_type_node; */
4650 /* That is not done, deliberately, so that having error_mark_node
4651 as the type can suppress useless errors in the use of this variable. */
4652
4653 DECL_NAME (t) = name;
4654 TREE_TYPE (t) = type;
4655
4656 if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
4657 layout_decl (t, 0);
4658
4659 return t;
4660 }
4661
4662 /* Builds and returns function declaration with NAME and TYPE. */
4663
4664 tree
4665 build_fn_decl (const char *name, tree type)
4666 {
4667 tree id = get_identifier (name);
4668 tree decl = build_decl (input_location, FUNCTION_DECL, id, type);
4669
4670 DECL_EXTERNAL (decl) = 1;
4671 TREE_PUBLIC (decl) = 1;
4672 DECL_ARTIFICIAL (decl) = 1;
4673 TREE_NOTHROW (decl) = 1;
4674
4675 return decl;
4676 }
4677
4678 vec<tree, va_gc> *all_translation_units;
4679
4680 /* Builds a new translation-unit decl with name NAME, queues it in the
4681 global list of translation-unit decls and returns it. */
4682
4683 tree
4684 build_translation_unit_decl (tree name)
4685 {
4686 tree tu = build_decl (UNKNOWN_LOCATION, TRANSLATION_UNIT_DECL,
4687 name, NULL_TREE);
4688 TRANSLATION_UNIT_LANGUAGE (tu) = lang_hooks.name;
4689 vec_safe_push (all_translation_units, tu);
4690 return tu;
4691 }
4692
4693 \f
4694 /* BLOCK nodes are used to represent the structure of binding contours
4695 and declarations, once those contours have been exited and their contents
4696 compiled. This information is used for outputting debugging info. */
4697
4698 tree
4699 build_block (tree vars, tree subblocks, tree supercontext, tree chain)
4700 {
4701 tree block = make_node (BLOCK);
4702
4703 BLOCK_VARS (block) = vars;
4704 BLOCK_SUBBLOCKS (block) = subblocks;
4705 BLOCK_SUPERCONTEXT (block) = supercontext;
4706 BLOCK_CHAIN (block) = chain;
4707 return block;
4708 }
4709
4710 \f
4711 /* Like SET_EXPR_LOCATION, but make sure the tree can have a location.
4712
4713 LOC is the location to use in tree T. */
4714
4715 void
4716 protected_set_expr_location (tree t, location_t loc)
4717 {
4718 if (CAN_HAVE_LOCATION_P (t))
4719 SET_EXPR_LOCATION (t, loc);
4720 }
4721 \f
4722 /* Return a declaration like DDECL except that its DECL_ATTRIBUTES
4723 is ATTRIBUTE. */
4724
4725 tree
4726 build_decl_attribute_variant (tree ddecl, tree attribute)
4727 {
4728 DECL_ATTRIBUTES (ddecl) = attribute;
4729 return ddecl;
4730 }
4731
4732 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
4733 is ATTRIBUTE and its qualifiers are QUALS.
4734
4735 Record such modified types already made so we don't make duplicates. */
4736
4737 tree
4738 build_type_attribute_qual_variant (tree ttype, tree attribute, int quals)
4739 {
4740 if (! attribute_list_equal (TYPE_ATTRIBUTES (ttype), attribute))
4741 {
4742 inchash::hash hstate;
4743 tree ntype;
4744 int i;
4745 tree t;
4746 enum tree_code code = TREE_CODE (ttype);
4747
4748 /* Building a distinct copy of a tagged type is inappropriate; it
4749 causes breakage in code that expects there to be a one-to-one
4750 relationship between a struct and its fields.
4751 build_duplicate_type is another solution (as used in
4752 handle_transparent_union_attribute), but that doesn't play well
4753 with the stronger C++ type identity model. */
4754 if (TREE_CODE (ttype) == RECORD_TYPE
4755 || TREE_CODE (ttype) == UNION_TYPE
4756 || TREE_CODE (ttype) == QUAL_UNION_TYPE
4757 || TREE_CODE (ttype) == ENUMERAL_TYPE)
4758 {
4759 warning (OPT_Wattributes,
4760 "ignoring attributes applied to %qT after definition",
4761 TYPE_MAIN_VARIANT (ttype));
4762 return build_qualified_type (ttype, quals);
4763 }
4764
4765 ttype = build_qualified_type (ttype, TYPE_UNQUALIFIED);
4766 ntype = build_distinct_type_copy (ttype);
4767
4768 TYPE_ATTRIBUTES (ntype) = attribute;
4769
4770 hstate.add_int (code);
4771 if (TREE_TYPE (ntype))
4772 hstate.add_object (TYPE_HASH (TREE_TYPE (ntype)));
4773 attribute_hash_list (attribute, hstate);
4774
4775 switch (TREE_CODE (ntype))
4776 {
4777 case FUNCTION_TYPE:
4778 type_hash_list (TYPE_ARG_TYPES (ntype), hstate);
4779 break;
4780 case ARRAY_TYPE:
4781 if (TYPE_DOMAIN (ntype))
4782 hstate.add_object (TYPE_HASH (TYPE_DOMAIN (ntype)));
4783 break;
4784 case INTEGER_TYPE:
4785 t = TYPE_MAX_VALUE (ntype);
4786 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
4787 hstate.add_object (TREE_INT_CST_ELT (t, i));
4788 break;
4789 case REAL_TYPE:
4790 case FIXED_POINT_TYPE:
4791 {
4792 unsigned int precision = TYPE_PRECISION (ntype);
4793 hstate.add_object (precision);
4794 }
4795 break;
4796 default:
4797 break;
4798 }
4799
4800 ntype = type_hash_canon (hstate.end(), ntype);
4801
4802 /* If the target-dependent attributes make NTYPE different from
4803 its canonical type, we will need to use structural equality
4804 checks for this type. */
4805 if (TYPE_STRUCTURAL_EQUALITY_P (ttype)
4806 || !comp_type_attributes (ntype, ttype))
4807 SET_TYPE_STRUCTURAL_EQUALITY (ntype);
4808 else if (TYPE_CANONICAL (ntype) == ntype)
4809 TYPE_CANONICAL (ntype) = TYPE_CANONICAL (ttype);
4810
4811 ttype = build_qualified_type (ntype, quals);
4812 }
4813 else if (TYPE_QUALS (ttype) != quals)
4814 ttype = build_qualified_type (ttype, quals);
4815
4816 return ttype;
4817 }
4818
4819 /* Check if "omp declare simd" attribute arguments, CLAUSES1 and CLAUSES2, are
4820 the same. */
4821
4822 static bool
4823 omp_declare_simd_clauses_equal (tree clauses1, tree clauses2)
4824 {
4825 tree cl1, cl2;
4826 for (cl1 = clauses1, cl2 = clauses2;
4827 cl1 && cl2;
4828 cl1 = OMP_CLAUSE_CHAIN (cl1), cl2 = OMP_CLAUSE_CHAIN (cl2))
4829 {
4830 if (OMP_CLAUSE_CODE (cl1) != OMP_CLAUSE_CODE (cl2))
4831 return false;
4832 if (OMP_CLAUSE_CODE (cl1) != OMP_CLAUSE_SIMDLEN)
4833 {
4834 if (simple_cst_equal (OMP_CLAUSE_DECL (cl1),
4835 OMP_CLAUSE_DECL (cl2)) != 1)
4836 return false;
4837 }
4838 switch (OMP_CLAUSE_CODE (cl1))
4839 {
4840 case OMP_CLAUSE_ALIGNED:
4841 if (simple_cst_equal (OMP_CLAUSE_ALIGNED_ALIGNMENT (cl1),
4842 OMP_CLAUSE_ALIGNED_ALIGNMENT (cl2)) != 1)
4843 return false;
4844 break;
4845 case OMP_CLAUSE_LINEAR:
4846 if (simple_cst_equal (OMP_CLAUSE_LINEAR_STEP (cl1),
4847 OMP_CLAUSE_LINEAR_STEP (cl2)) != 1)
4848 return false;
4849 break;
4850 case OMP_CLAUSE_SIMDLEN:
4851 if (simple_cst_equal (OMP_CLAUSE_SIMDLEN_EXPR (cl1),
4852 OMP_CLAUSE_SIMDLEN_EXPR (cl2)) != 1)
4853 return false;
4854 default:
4855 break;
4856 }
4857 }
4858 return true;
4859 }
4860
4861 /* Compare two constructor-element-type constants. Return 1 if the lists
4862 are known to be equal; otherwise return 0. */
4863
4864 static bool
4865 simple_cst_list_equal (const_tree l1, const_tree l2)
4866 {
4867 while (l1 != NULL_TREE && l2 != NULL_TREE)
4868 {
4869 if (simple_cst_equal (TREE_VALUE (l1), TREE_VALUE (l2)) != 1)
4870 return false;
4871
4872 l1 = TREE_CHAIN (l1);
4873 l2 = TREE_CHAIN (l2);
4874 }
4875
4876 return l1 == l2;
4877 }
4878
4879 /* Compare two identifier nodes representing attributes. Either one may
4880 be in wrapped __ATTR__ form. Return true if they are the same, false
4881 otherwise. */
4882
4883 static bool
4884 cmp_attrib_identifiers (const_tree attr1, const_tree attr2)
4885 {
4886 /* Make sure we're dealing with IDENTIFIER_NODEs. */
4887 gcc_checking_assert (TREE_CODE (attr1) == IDENTIFIER_NODE
4888 && TREE_CODE (attr2) == IDENTIFIER_NODE);
4889
4890 /* Identifiers can be compared directly for equality. */
4891 if (attr1 == attr2)
4892 return true;
4893
4894 /* If they are not equal, they may still be one in the form
4895 'text' while the other one is in the form '__text__'. TODO:
4896 If we were storing attributes in normalized 'text' form, then
4897 this could all go away and we could take full advantage of
4898 the fact that we're comparing identifiers. :-) */
4899 const size_t attr1_len = IDENTIFIER_LENGTH (attr1);
4900 const size_t attr2_len = IDENTIFIER_LENGTH (attr2);
4901
4902 if (attr2_len == attr1_len + 4)
4903 {
4904 const char *p = IDENTIFIER_POINTER (attr2);
4905 const char *q = IDENTIFIER_POINTER (attr1);
4906 if (p[0] == '_' && p[1] == '_'
4907 && p[attr2_len - 2] == '_' && p[attr2_len - 1] == '_'
4908 && strncmp (q, p + 2, attr1_len) == 0)
4909 return true;;
4910 }
4911 else if (attr2_len + 4 == attr1_len)
4912 {
4913 const char *p = IDENTIFIER_POINTER (attr2);
4914 const char *q = IDENTIFIER_POINTER (attr1);
4915 if (q[0] == '_' && q[1] == '_'
4916 && q[attr1_len - 2] == '_' && q[attr1_len - 1] == '_'
4917 && strncmp (q + 2, p, attr2_len) == 0)
4918 return true;
4919 }
4920
4921 return false;
4922 }
4923
4924 /* Compare two attributes for their value identity. Return true if the
4925 attribute values are known to be equal; otherwise return false. */
4926
4927 bool
4928 attribute_value_equal (const_tree attr1, const_tree attr2)
4929 {
4930 if (TREE_VALUE (attr1) == TREE_VALUE (attr2))
4931 return true;
4932
4933 if (TREE_VALUE (attr1) != NULL_TREE
4934 && TREE_CODE (TREE_VALUE (attr1)) == TREE_LIST
4935 && TREE_VALUE (attr2) != NULL_TREE
4936 && TREE_CODE (TREE_VALUE (attr2)) == TREE_LIST)
4937 {
4938 /* Handle attribute format. */
4939 if (is_attribute_p ("format", TREE_PURPOSE (attr1)))
4940 {
4941 attr1 = TREE_VALUE (attr1);
4942 attr2 = TREE_VALUE (attr2);
4943 /* Compare the archetypes (printf/scanf/strftime/...). */
4944 if (!cmp_attrib_identifiers (TREE_VALUE (attr1),
4945 TREE_VALUE (attr2)))
4946 return false;
4947 /* Archetypes are the same. Compare the rest. */
4948 return (simple_cst_list_equal (TREE_CHAIN (attr1),
4949 TREE_CHAIN (attr2)) == 1);
4950 }
4951 return (simple_cst_list_equal (TREE_VALUE (attr1),
4952 TREE_VALUE (attr2)) == 1);
4953 }
4954
4955 if ((flag_openmp || flag_openmp_simd)
4956 && TREE_VALUE (attr1) && TREE_VALUE (attr2)
4957 && TREE_CODE (TREE_VALUE (attr1)) == OMP_CLAUSE
4958 && TREE_CODE (TREE_VALUE (attr2)) == OMP_CLAUSE)
4959 return omp_declare_simd_clauses_equal (TREE_VALUE (attr1),
4960 TREE_VALUE (attr2));
4961
4962 return (simple_cst_equal (TREE_VALUE (attr1), TREE_VALUE (attr2)) == 1);
4963 }
4964
4965 /* Return 0 if the attributes for two types are incompatible, 1 if they
4966 are compatible, and 2 if they are nearly compatible (which causes a
4967 warning to be generated). */
4968 int
4969 comp_type_attributes (const_tree type1, const_tree type2)
4970 {
4971 const_tree a1 = TYPE_ATTRIBUTES (type1);
4972 const_tree a2 = TYPE_ATTRIBUTES (type2);
4973 const_tree a;
4974
4975 if (a1 == a2)
4976 return 1;
4977 for (a = a1; a != NULL_TREE; a = TREE_CHAIN (a))
4978 {
4979 const struct attribute_spec *as;
4980 const_tree attr;
4981
4982 as = lookup_attribute_spec (get_attribute_name (a));
4983 if (!as || as->affects_type_identity == false)
4984 continue;
4985
4986 attr = lookup_attribute (as->name, CONST_CAST_TREE (a2));
4987 if (!attr || !attribute_value_equal (a, attr))
4988 break;
4989 }
4990 if (!a)
4991 {
4992 for (a = a2; a != NULL_TREE; a = TREE_CHAIN (a))
4993 {
4994 const struct attribute_spec *as;
4995
4996 as = lookup_attribute_spec (get_attribute_name (a));
4997 if (!as || as->affects_type_identity == false)
4998 continue;
4999
5000 if (!lookup_attribute (as->name, CONST_CAST_TREE (a1)))
5001 break;
5002 /* We don't need to compare trees again, as we did this
5003 already in first loop. */
5004 }
5005 /* All types - affecting identity - are equal, so
5006 there is no need to call target hook for comparison. */
5007 if (!a)
5008 return 1;
5009 }
5010 /* As some type combinations - like default calling-convention - might
5011 be compatible, we have to call the target hook to get the final result. */
5012 return targetm.comp_type_attributes (type1, type2);
5013 }
5014
5015 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
5016 is ATTRIBUTE.
5017
5018 Record such modified types already made so we don't make duplicates. */
5019
5020 tree
5021 build_type_attribute_variant (tree ttype, tree attribute)
5022 {
5023 return build_type_attribute_qual_variant (ttype, attribute,
5024 TYPE_QUALS (ttype));
5025 }
5026
5027
5028 /* Reset the expression *EXPR_P, a size or position.
5029
5030 ??? We could reset all non-constant sizes or positions. But it's cheap
5031 enough to not do so and refrain from adding workarounds to dwarf2out.c.
5032
5033 We need to reset self-referential sizes or positions because they cannot
5034 be gimplified and thus can contain a CALL_EXPR after the gimplification
5035 is finished, which will run afoul of LTO streaming. And they need to be
5036 reset to something essentially dummy but not constant, so as to preserve
5037 the properties of the object they are attached to. */
5038
5039 static inline void
5040 free_lang_data_in_one_sizepos (tree *expr_p)
5041 {
5042 tree expr = *expr_p;
5043 if (CONTAINS_PLACEHOLDER_P (expr))
5044 *expr_p = build0 (PLACEHOLDER_EXPR, TREE_TYPE (expr));
5045 }
5046
5047
5048 /* Reset all the fields in a binfo node BINFO. We only keep
5049 BINFO_VTABLE, which is used by gimple_fold_obj_type_ref. */
5050
5051 static void
5052 free_lang_data_in_binfo (tree binfo)
5053 {
5054 unsigned i;
5055 tree t;
5056
5057 gcc_assert (TREE_CODE (binfo) == TREE_BINFO);
5058
5059 BINFO_VIRTUALS (binfo) = NULL_TREE;
5060 BINFO_BASE_ACCESSES (binfo) = NULL;
5061 BINFO_INHERITANCE_CHAIN (binfo) = NULL_TREE;
5062 BINFO_SUBVTT_INDEX (binfo) = NULL_TREE;
5063
5064 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (binfo), i, t)
5065 free_lang_data_in_binfo (t);
5066 }
5067
5068
5069 /* Reset all language specific information still present in TYPE. */
5070
5071 static void
5072 free_lang_data_in_type (tree type)
5073 {
5074 gcc_assert (TYPE_P (type));
5075
5076 /* Give the FE a chance to remove its own data first. */
5077 lang_hooks.free_lang_data (type);
5078
5079 TREE_LANG_FLAG_0 (type) = 0;
5080 TREE_LANG_FLAG_1 (type) = 0;
5081 TREE_LANG_FLAG_2 (type) = 0;
5082 TREE_LANG_FLAG_3 (type) = 0;
5083 TREE_LANG_FLAG_4 (type) = 0;
5084 TREE_LANG_FLAG_5 (type) = 0;
5085 TREE_LANG_FLAG_6 (type) = 0;
5086
5087 if (TREE_CODE (type) == FUNCTION_TYPE)
5088 {
5089 /* Remove the const and volatile qualifiers from arguments. The
5090 C++ front end removes them, but the C front end does not,
5091 leading to false ODR violation errors when merging two
5092 instances of the same function signature compiled by
5093 different front ends. */
5094 tree p;
5095
5096 for (p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
5097 {
5098 tree arg_type = TREE_VALUE (p);
5099
5100 if (TYPE_READONLY (arg_type) || TYPE_VOLATILE (arg_type))
5101 {
5102 int quals = TYPE_QUALS (arg_type)
5103 & ~TYPE_QUAL_CONST
5104 & ~TYPE_QUAL_VOLATILE;
5105 TREE_VALUE (p) = build_qualified_type (arg_type, quals);
5106 free_lang_data_in_type (TREE_VALUE (p));
5107 }
5108 /* C++ FE uses TREE_PURPOSE to store initial values. */
5109 TREE_PURPOSE (p) = NULL;
5110 }
5111 /* Java uses TYPE_MINVAL for TYPE_ARGUMENT_SIGNATURE. */
5112 TYPE_MINVAL (type) = NULL;
5113 }
5114 if (TREE_CODE (type) == METHOD_TYPE)
5115 {
5116 tree p;
5117
5118 for (p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
5119 {
5120 /* C++ FE uses TREE_PURPOSE to store initial values. */
5121 TREE_PURPOSE (p) = NULL;
5122 }
5123 /* Java uses TYPE_MINVAL for TYPE_ARGUMENT_SIGNATURE. */
5124 TYPE_MINVAL (type) = NULL;
5125 }
5126
5127 /* Remove members that are not actually FIELD_DECLs from the field
5128 list of an aggregate. These occur in C++. */
5129 if (RECORD_OR_UNION_TYPE_P (type))
5130 {
5131 tree prev, member;
5132
5133 /* Note that TYPE_FIELDS can be shared across distinct
5134 TREE_TYPEs. Therefore, if the first field of TYPE_FIELDS is
5135 to be removed, we cannot set its TREE_CHAIN to NULL.
5136 Otherwise, we would not be able to find all the other fields
5137 in the other instances of this TREE_TYPE.
5138
5139 This was causing an ICE in testsuite/g++.dg/lto/20080915.C. */
5140 prev = NULL_TREE;
5141 member = TYPE_FIELDS (type);
5142 while (member)
5143 {
5144 if (TREE_CODE (member) == FIELD_DECL
5145 || TREE_CODE (member) == TYPE_DECL)
5146 {
5147 if (prev)
5148 TREE_CHAIN (prev) = member;
5149 else
5150 TYPE_FIELDS (type) = member;
5151 prev = member;
5152 }
5153
5154 member = TREE_CHAIN (member);
5155 }
5156
5157 if (prev)
5158 TREE_CHAIN (prev) = NULL_TREE;
5159 else
5160 TYPE_FIELDS (type) = NULL_TREE;
5161
5162 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
5163 and danagle the pointer from time to time. */
5164 if (TYPE_VFIELD (type) && TREE_CODE (TYPE_VFIELD (type)) != FIELD_DECL)
5165 TYPE_VFIELD (type) = NULL_TREE;
5166
5167 /* Remove TYPE_METHODS list. While it would be nice to keep it
5168 to enable ODR warnings about different method lists, doing so
5169 seems to impractically increase size of LTO data streamed.
5170 Keep the infrmation if TYPE_METHODS was non-NULL. This is used
5171 by function.c and pretty printers. */
5172 if (TYPE_METHODS (type))
5173 TYPE_METHODS (type) = error_mark_node;
5174 if (TYPE_BINFO (type))
5175 {
5176 free_lang_data_in_binfo (TYPE_BINFO (type));
5177 /* We need to preserve link to bases and virtual table for all
5178 polymorphic types to make devirtualization machinery working.
5179 Debug output cares only about bases, but output also
5180 virtual table pointers so merging of -fdevirtualize and
5181 -fno-devirtualize units is easier. */
5182 if ((!BINFO_VTABLE (TYPE_BINFO (type))
5183 || !flag_devirtualize)
5184 && ((!BINFO_N_BASE_BINFOS (TYPE_BINFO (type))
5185 && !BINFO_VTABLE (TYPE_BINFO (type)))
5186 || debug_info_level != DINFO_LEVEL_NONE))
5187 TYPE_BINFO (type) = NULL;
5188 }
5189 }
5190 else
5191 {
5192 /* For non-aggregate types, clear out the language slot (which
5193 overloads TYPE_BINFO). */
5194 TYPE_LANG_SLOT_1 (type) = NULL_TREE;
5195
5196 if (INTEGRAL_TYPE_P (type)
5197 || SCALAR_FLOAT_TYPE_P (type)
5198 || FIXED_POINT_TYPE_P (type))
5199 {
5200 free_lang_data_in_one_sizepos (&TYPE_MIN_VALUE (type));
5201 free_lang_data_in_one_sizepos (&TYPE_MAX_VALUE (type));
5202 }
5203 }
5204
5205 free_lang_data_in_one_sizepos (&TYPE_SIZE (type));
5206 free_lang_data_in_one_sizepos (&TYPE_SIZE_UNIT (type));
5207
5208 if (TYPE_CONTEXT (type)
5209 && TREE_CODE (TYPE_CONTEXT (type)) == BLOCK)
5210 {
5211 tree ctx = TYPE_CONTEXT (type);
5212 do
5213 {
5214 ctx = BLOCK_SUPERCONTEXT (ctx);
5215 }
5216 while (ctx && TREE_CODE (ctx) == BLOCK);
5217 TYPE_CONTEXT (type) = ctx;
5218 }
5219 }
5220
5221
5222 /* Return true if DECL may need an assembler name to be set. */
5223
5224 static inline bool
5225 need_assembler_name_p (tree decl)
5226 {
5227 /* We use DECL_ASSEMBLER_NAME to hold mangled type names for One Definition
5228 Rule merging. This makes type_odr_p to return true on those types during
5229 LTO and by comparing the mangled name, we can say what types are intended
5230 to be equivalent across compilation unit.
5231
5232 We do not store names of type_in_anonymous_namespace_p.
5233
5234 Record, union and enumeration type have linkage that allows use
5235 to check type_in_anonymous_namespace_p. We do not mangle compound types
5236 that always can be compared structurally.
5237
5238 Similarly for builtin types, we compare properties of their main variant.
5239 A special case are integer types where mangling do make differences
5240 between char/signed char/unsigned char etc. Storing name for these makes
5241 e.g. -fno-signed-char/-fsigned-char mismatches to be handled well.
5242 See cp/mangle.c:write_builtin_type for details. */
5243
5244 if (flag_lto_odr_type_mering
5245 && TREE_CODE (decl) == TYPE_DECL
5246 && DECL_NAME (decl)
5247 && decl == TYPE_NAME (TREE_TYPE (decl))
5248 && !TYPE_ARTIFICIAL (TREE_TYPE (decl))
5249 && (type_with_linkage_p (TREE_TYPE (decl))
5250 || TREE_CODE (TREE_TYPE (decl)) == INTEGER_TYPE)
5251 && !variably_modified_type_p (TREE_TYPE (decl), NULL_TREE))
5252 return !DECL_ASSEMBLER_NAME_SET_P (decl);
5253 /* Only FUNCTION_DECLs and VAR_DECLs are considered. */
5254 if (TREE_CODE (decl) != FUNCTION_DECL
5255 && TREE_CODE (decl) != VAR_DECL)
5256 return false;
5257
5258 /* If DECL already has its assembler name set, it does not need a
5259 new one. */
5260 if (!HAS_DECL_ASSEMBLER_NAME_P (decl)
5261 || DECL_ASSEMBLER_NAME_SET_P (decl))
5262 return false;
5263
5264 /* Abstract decls do not need an assembler name. */
5265 if (DECL_ABSTRACT_P (decl))
5266 return false;
5267
5268 /* For VAR_DECLs, only static, public and external symbols need an
5269 assembler name. */
5270 if (TREE_CODE (decl) == VAR_DECL
5271 && !TREE_STATIC (decl)
5272 && !TREE_PUBLIC (decl)
5273 && !DECL_EXTERNAL (decl))
5274 return false;
5275
5276 if (TREE_CODE (decl) == FUNCTION_DECL)
5277 {
5278 /* Do not set assembler name on builtins. Allow RTL expansion to
5279 decide whether to expand inline or via a regular call. */
5280 if (DECL_BUILT_IN (decl)
5281 && DECL_BUILT_IN_CLASS (decl) != BUILT_IN_FRONTEND)
5282 return false;
5283
5284 /* Functions represented in the callgraph need an assembler name. */
5285 if (cgraph_node::get (decl) != NULL)
5286 return true;
5287
5288 /* Unused and not public functions don't need an assembler name. */
5289 if (!TREE_USED (decl) && !TREE_PUBLIC (decl))
5290 return false;
5291 }
5292
5293 return true;
5294 }
5295
5296
5297 /* Reset all language specific information still present in symbol
5298 DECL. */
5299
5300 static void
5301 free_lang_data_in_decl (tree decl)
5302 {
5303 gcc_assert (DECL_P (decl));
5304
5305 /* Give the FE a chance to remove its own data first. */
5306 lang_hooks.free_lang_data (decl);
5307
5308 TREE_LANG_FLAG_0 (decl) = 0;
5309 TREE_LANG_FLAG_1 (decl) = 0;
5310 TREE_LANG_FLAG_2 (decl) = 0;
5311 TREE_LANG_FLAG_3 (decl) = 0;
5312 TREE_LANG_FLAG_4 (decl) = 0;
5313 TREE_LANG_FLAG_5 (decl) = 0;
5314 TREE_LANG_FLAG_6 (decl) = 0;
5315
5316 free_lang_data_in_one_sizepos (&DECL_SIZE (decl));
5317 free_lang_data_in_one_sizepos (&DECL_SIZE_UNIT (decl));
5318 if (TREE_CODE (decl) == FIELD_DECL)
5319 {
5320 free_lang_data_in_one_sizepos (&DECL_FIELD_OFFSET (decl));
5321 if (TREE_CODE (DECL_CONTEXT (decl)) == QUAL_UNION_TYPE)
5322 DECL_QUALIFIER (decl) = NULL_TREE;
5323 }
5324
5325 if (TREE_CODE (decl) == FUNCTION_DECL)
5326 {
5327 struct cgraph_node *node;
5328 if (!(node = cgraph_node::get (decl))
5329 || (!node->definition && !node->clones))
5330 {
5331 if (node)
5332 node->release_body ();
5333 else
5334 {
5335 release_function_body (decl);
5336 DECL_ARGUMENTS (decl) = NULL;
5337 DECL_RESULT (decl) = NULL;
5338 DECL_INITIAL (decl) = error_mark_node;
5339 }
5340 }
5341 if (gimple_has_body_p (decl))
5342 {
5343 tree t;
5344
5345 /* If DECL has a gimple body, then the context for its
5346 arguments must be DECL. Otherwise, it doesn't really
5347 matter, as we will not be emitting any code for DECL. In
5348 general, there may be other instances of DECL created by
5349 the front end and since PARM_DECLs are generally shared,
5350 their DECL_CONTEXT changes as the replicas of DECL are
5351 created. The only time where DECL_CONTEXT is important
5352 is for the FUNCTION_DECLs that have a gimple body (since
5353 the PARM_DECL will be used in the function's body). */
5354 for (t = DECL_ARGUMENTS (decl); t; t = TREE_CHAIN (t))
5355 DECL_CONTEXT (t) = decl;
5356 if (!DECL_FUNCTION_SPECIFIC_TARGET (decl))
5357 DECL_FUNCTION_SPECIFIC_TARGET (decl)
5358 = target_option_default_node;
5359 if (!DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl))
5360 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
5361 = optimization_default_node;
5362 }
5363
5364 /* DECL_SAVED_TREE holds the GENERIC representation for DECL.
5365 At this point, it is not needed anymore. */
5366 DECL_SAVED_TREE (decl) = NULL_TREE;
5367
5368 /* Clear the abstract origin if it refers to a method. Otherwise
5369 dwarf2out.c will ICE as we clear TYPE_METHODS and thus the
5370 origin will not be output correctly. */
5371 if (DECL_ABSTRACT_ORIGIN (decl)
5372 && DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))
5373 && RECORD_OR_UNION_TYPE_P
5374 (DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))))
5375 DECL_ABSTRACT_ORIGIN (decl) = NULL_TREE;
5376
5377 /* Sometimes the C++ frontend doesn't manage to transform a temporary
5378 DECL_VINDEX referring to itself into a vtable slot number as it
5379 should. Happens with functions that are copied and then forgotten
5380 about. Just clear it, it won't matter anymore. */
5381 if (DECL_VINDEX (decl) && !tree_fits_shwi_p (DECL_VINDEX (decl)))
5382 DECL_VINDEX (decl) = NULL_TREE;
5383 }
5384 else if (TREE_CODE (decl) == VAR_DECL)
5385 {
5386 if ((DECL_EXTERNAL (decl)
5387 && (!TREE_STATIC (decl) || !TREE_READONLY (decl)))
5388 || (decl_function_context (decl) && !TREE_STATIC (decl)))
5389 DECL_INITIAL (decl) = NULL_TREE;
5390 }
5391 else if (TREE_CODE (decl) == TYPE_DECL
5392 || TREE_CODE (decl) == FIELD_DECL)
5393 DECL_INITIAL (decl) = NULL_TREE;
5394 else if (TREE_CODE (decl) == TRANSLATION_UNIT_DECL
5395 && DECL_INITIAL (decl)
5396 && TREE_CODE (DECL_INITIAL (decl)) == BLOCK)
5397 {
5398 /* Strip builtins from the translation-unit BLOCK. We still have targets
5399 without builtin_decl_explicit support and also builtins are shared
5400 nodes and thus we can't use TREE_CHAIN in multiple lists. */
5401 tree *nextp = &BLOCK_VARS (DECL_INITIAL (decl));
5402 while (*nextp)
5403 {
5404 tree var = *nextp;
5405 if (TREE_CODE (var) == FUNCTION_DECL
5406 && DECL_BUILT_IN (var))
5407 *nextp = TREE_CHAIN (var);
5408 else
5409 nextp = &TREE_CHAIN (var);
5410 }
5411 }
5412 }
5413
5414
5415 /* Data used when collecting DECLs and TYPEs for language data removal. */
5416
5417 struct free_lang_data_d
5418 {
5419 /* Worklist to avoid excessive recursion. */
5420 vec<tree> worklist;
5421
5422 /* Set of traversed objects. Used to avoid duplicate visits. */
5423 hash_set<tree> *pset;
5424
5425 /* Array of symbols to process with free_lang_data_in_decl. */
5426 vec<tree> decls;
5427
5428 /* Array of types to process with free_lang_data_in_type. */
5429 vec<tree> types;
5430 };
5431
5432
5433 /* Save all language fields needed to generate proper debug information
5434 for DECL. This saves most fields cleared out by free_lang_data_in_decl. */
5435
5436 static void
5437 save_debug_info_for_decl (tree t)
5438 {
5439 /*struct saved_debug_info_d *sdi;*/
5440
5441 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && DECL_P (t));
5442
5443 /* FIXME. Partial implementation for saving debug info removed. */
5444 }
5445
5446
5447 /* Save all language fields needed to generate proper debug information
5448 for TYPE. This saves most fields cleared out by free_lang_data_in_type. */
5449
5450 static void
5451 save_debug_info_for_type (tree t)
5452 {
5453 /*struct saved_debug_info_d *sdi;*/
5454
5455 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && TYPE_P (t));
5456
5457 /* FIXME. Partial implementation for saving debug info removed. */
5458 }
5459
5460
5461 /* Add type or decl T to one of the list of tree nodes that need their
5462 language data removed. The lists are held inside FLD. */
5463
5464 static void
5465 add_tree_to_fld_list (tree t, struct free_lang_data_d *fld)
5466 {
5467 if (DECL_P (t))
5468 {
5469 fld->decls.safe_push (t);
5470 if (debug_info_level > DINFO_LEVEL_TERSE)
5471 save_debug_info_for_decl (t);
5472 }
5473 else if (TYPE_P (t))
5474 {
5475 fld->types.safe_push (t);
5476 if (debug_info_level > DINFO_LEVEL_TERSE)
5477 save_debug_info_for_type (t);
5478 }
5479 else
5480 gcc_unreachable ();
5481 }
5482
5483 /* Push tree node T into FLD->WORKLIST. */
5484
5485 static inline void
5486 fld_worklist_push (tree t, struct free_lang_data_d *fld)
5487 {
5488 if (t && !is_lang_specific (t) && !fld->pset->contains (t))
5489 fld->worklist.safe_push ((t));
5490 }
5491
5492
5493 /* Operand callback helper for free_lang_data_in_node. *TP is the
5494 subtree operand being considered. */
5495
5496 static tree
5497 find_decls_types_r (tree *tp, int *ws, void *data)
5498 {
5499 tree t = *tp;
5500 struct free_lang_data_d *fld = (struct free_lang_data_d *) data;
5501
5502 if (TREE_CODE (t) == TREE_LIST)
5503 return NULL_TREE;
5504
5505 /* Language specific nodes will be removed, so there is no need
5506 to gather anything under them. */
5507 if (is_lang_specific (t))
5508 {
5509 *ws = 0;
5510 return NULL_TREE;
5511 }
5512
5513 if (DECL_P (t))
5514 {
5515 /* Note that walk_tree does not traverse every possible field in
5516 decls, so we have to do our own traversals here. */
5517 add_tree_to_fld_list (t, fld);
5518
5519 fld_worklist_push (DECL_NAME (t), fld);
5520 fld_worklist_push (DECL_CONTEXT (t), fld);
5521 fld_worklist_push (DECL_SIZE (t), fld);
5522 fld_worklist_push (DECL_SIZE_UNIT (t), fld);
5523
5524 /* We are going to remove everything under DECL_INITIAL for
5525 TYPE_DECLs. No point walking them. */
5526 if (TREE_CODE (t) != TYPE_DECL)
5527 fld_worklist_push (DECL_INITIAL (t), fld);
5528
5529 fld_worklist_push (DECL_ATTRIBUTES (t), fld);
5530 fld_worklist_push (DECL_ABSTRACT_ORIGIN (t), fld);
5531
5532 if (TREE_CODE (t) == FUNCTION_DECL)
5533 {
5534 fld_worklist_push (DECL_ARGUMENTS (t), fld);
5535 fld_worklist_push (DECL_RESULT (t), fld);
5536 }
5537 else if (TREE_CODE (t) == TYPE_DECL)
5538 {
5539 fld_worklist_push (DECL_ORIGINAL_TYPE (t), fld);
5540 }
5541 else if (TREE_CODE (t) == FIELD_DECL)
5542 {
5543 fld_worklist_push (DECL_FIELD_OFFSET (t), fld);
5544 fld_worklist_push (DECL_BIT_FIELD_TYPE (t), fld);
5545 fld_worklist_push (DECL_FIELD_BIT_OFFSET (t), fld);
5546 fld_worklist_push (DECL_FCONTEXT (t), fld);
5547 }
5548
5549 if ((TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL)
5550 && DECL_HAS_VALUE_EXPR_P (t))
5551 fld_worklist_push (DECL_VALUE_EXPR (t), fld);
5552
5553 if (TREE_CODE (t) != FIELD_DECL
5554 && TREE_CODE (t) != TYPE_DECL)
5555 fld_worklist_push (TREE_CHAIN (t), fld);
5556 *ws = 0;
5557 }
5558 else if (TYPE_P (t))
5559 {
5560 /* Note that walk_tree does not traverse every possible field in
5561 types, so we have to do our own traversals here. */
5562 add_tree_to_fld_list (t, fld);
5563
5564 if (!RECORD_OR_UNION_TYPE_P (t))
5565 fld_worklist_push (TYPE_CACHED_VALUES (t), fld);
5566 fld_worklist_push (TYPE_SIZE (t), fld);
5567 fld_worklist_push (TYPE_SIZE_UNIT (t), fld);
5568 fld_worklist_push (TYPE_ATTRIBUTES (t), fld);
5569 fld_worklist_push (TYPE_POINTER_TO (t), fld);
5570 fld_worklist_push (TYPE_REFERENCE_TO (t), fld);
5571 fld_worklist_push (TYPE_NAME (t), fld);
5572 /* Do not walk TYPE_NEXT_PTR_TO or TYPE_NEXT_REF_TO. We do not stream
5573 them and thus do not and want not to reach unused pointer types
5574 this way. */
5575 if (!POINTER_TYPE_P (t))
5576 fld_worklist_push (TYPE_MINVAL (t), fld);
5577 if (!RECORD_OR_UNION_TYPE_P (t))
5578 fld_worklist_push (TYPE_MAXVAL (t), fld);
5579 fld_worklist_push (TYPE_MAIN_VARIANT (t), fld);
5580 /* Do not walk TYPE_NEXT_VARIANT. We do not stream it and thus
5581 do not and want not to reach unused variants this way. */
5582 if (TYPE_CONTEXT (t))
5583 {
5584 tree ctx = TYPE_CONTEXT (t);
5585 /* We adjust BLOCK TYPE_CONTEXTs to the innermost non-BLOCK one.
5586 So push that instead. */
5587 while (ctx && TREE_CODE (ctx) == BLOCK)
5588 ctx = BLOCK_SUPERCONTEXT (ctx);
5589 fld_worklist_push (ctx, fld);
5590 }
5591 /* Do not walk TYPE_CANONICAL. We do not stream it and thus do not
5592 and want not to reach unused types this way. */
5593
5594 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t))
5595 {
5596 unsigned i;
5597 tree tem;
5598 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (TYPE_BINFO (t)), i, tem)
5599 fld_worklist_push (TREE_TYPE (tem), fld);
5600 tem = BINFO_VIRTUALS (TYPE_BINFO (t));
5601 if (tem
5602 /* The Java FE overloads BINFO_VIRTUALS for its own purpose. */
5603 && TREE_CODE (tem) == TREE_LIST)
5604 do
5605 {
5606 fld_worklist_push (TREE_VALUE (tem), fld);
5607 tem = TREE_CHAIN (tem);
5608 }
5609 while (tem);
5610 }
5611 if (RECORD_OR_UNION_TYPE_P (t))
5612 {
5613 tree tem;
5614 /* Push all TYPE_FIELDS - there can be interleaving interesting
5615 and non-interesting things. */
5616 tem = TYPE_FIELDS (t);
5617 while (tem)
5618 {
5619 if (TREE_CODE (tem) == FIELD_DECL
5620 || TREE_CODE (tem) == TYPE_DECL)
5621 fld_worklist_push (tem, fld);
5622 tem = TREE_CHAIN (tem);
5623 }
5624 }
5625
5626 fld_worklist_push (TYPE_STUB_DECL (t), fld);
5627 *ws = 0;
5628 }
5629 else if (TREE_CODE (t) == BLOCK)
5630 {
5631 tree tem;
5632 for (tem = BLOCK_VARS (t); tem; tem = TREE_CHAIN (tem))
5633 fld_worklist_push (tem, fld);
5634 for (tem = BLOCK_SUBBLOCKS (t); tem; tem = BLOCK_CHAIN (tem))
5635 fld_worklist_push (tem, fld);
5636 fld_worklist_push (BLOCK_ABSTRACT_ORIGIN (t), fld);
5637 }
5638
5639 if (TREE_CODE (t) != IDENTIFIER_NODE
5640 && CODE_CONTAINS_STRUCT (TREE_CODE (t), TS_TYPED))
5641 fld_worklist_push (TREE_TYPE (t), fld);
5642
5643 return NULL_TREE;
5644 }
5645
5646
5647 /* Find decls and types in T. */
5648
5649 static void
5650 find_decls_types (tree t, struct free_lang_data_d *fld)
5651 {
5652 while (1)
5653 {
5654 if (!fld->pset->contains (t))
5655 walk_tree (&t, find_decls_types_r, fld, fld->pset);
5656 if (fld->worklist.is_empty ())
5657 break;
5658 t = fld->worklist.pop ();
5659 }
5660 }
5661
5662 /* Translate all the types in LIST with the corresponding runtime
5663 types. */
5664
5665 static tree
5666 get_eh_types_for_runtime (tree list)
5667 {
5668 tree head, prev;
5669
5670 if (list == NULL_TREE)
5671 return NULL_TREE;
5672
5673 head = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5674 prev = head;
5675 list = TREE_CHAIN (list);
5676 while (list)
5677 {
5678 tree n = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5679 TREE_CHAIN (prev) = n;
5680 prev = TREE_CHAIN (prev);
5681 list = TREE_CHAIN (list);
5682 }
5683
5684 return head;
5685 }
5686
5687
5688 /* Find decls and types referenced in EH region R and store them in
5689 FLD->DECLS and FLD->TYPES. */
5690
5691 static void
5692 find_decls_types_in_eh_region (eh_region r, struct free_lang_data_d *fld)
5693 {
5694 switch (r->type)
5695 {
5696 case ERT_CLEANUP:
5697 break;
5698
5699 case ERT_TRY:
5700 {
5701 eh_catch c;
5702
5703 /* The types referenced in each catch must first be changed to the
5704 EH types used at runtime. This removes references to FE types
5705 in the region. */
5706 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
5707 {
5708 c->type_list = get_eh_types_for_runtime (c->type_list);
5709 walk_tree (&c->type_list, find_decls_types_r, fld, fld->pset);
5710 }
5711 }
5712 break;
5713
5714 case ERT_ALLOWED_EXCEPTIONS:
5715 r->u.allowed.type_list
5716 = get_eh_types_for_runtime (r->u.allowed.type_list);
5717 walk_tree (&r->u.allowed.type_list, find_decls_types_r, fld, fld->pset);
5718 break;
5719
5720 case ERT_MUST_NOT_THROW:
5721 walk_tree (&r->u.must_not_throw.failure_decl,
5722 find_decls_types_r, fld, fld->pset);
5723 break;
5724 }
5725 }
5726
5727
5728 /* Find decls and types referenced in cgraph node N and store them in
5729 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5730 look for *every* kind of DECL and TYPE node reachable from N,
5731 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5732 NAMESPACE_DECLs, etc). */
5733
5734 static void
5735 find_decls_types_in_node (struct cgraph_node *n, struct free_lang_data_d *fld)
5736 {
5737 basic_block bb;
5738 struct function *fn;
5739 unsigned ix;
5740 tree t;
5741
5742 find_decls_types (n->decl, fld);
5743
5744 if (!gimple_has_body_p (n->decl))
5745 return;
5746
5747 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
5748
5749 fn = DECL_STRUCT_FUNCTION (n->decl);
5750
5751 /* Traverse locals. */
5752 FOR_EACH_LOCAL_DECL (fn, ix, t)
5753 find_decls_types (t, fld);
5754
5755 /* Traverse EH regions in FN. */
5756 {
5757 eh_region r;
5758 FOR_ALL_EH_REGION_FN (r, fn)
5759 find_decls_types_in_eh_region (r, fld);
5760 }
5761
5762 /* Traverse every statement in FN. */
5763 FOR_EACH_BB_FN (bb, fn)
5764 {
5765 gphi_iterator psi;
5766 gimple_stmt_iterator si;
5767 unsigned i;
5768
5769 for (psi = gsi_start_phis (bb); !gsi_end_p (psi); gsi_next (&psi))
5770 {
5771 gphi *phi = psi.phi ();
5772
5773 for (i = 0; i < gimple_phi_num_args (phi); i++)
5774 {
5775 tree *arg_p = gimple_phi_arg_def_ptr (phi, i);
5776 find_decls_types (*arg_p, fld);
5777 }
5778 }
5779
5780 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
5781 {
5782 gimple stmt = gsi_stmt (si);
5783
5784 if (is_gimple_call (stmt))
5785 find_decls_types (gimple_call_fntype (stmt), fld);
5786
5787 for (i = 0; i < gimple_num_ops (stmt); i++)
5788 {
5789 tree arg = gimple_op (stmt, i);
5790 find_decls_types (arg, fld);
5791 }
5792 }
5793 }
5794 }
5795
5796
5797 /* Find decls and types referenced in varpool node N and store them in
5798 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5799 look for *every* kind of DECL and TYPE node reachable from N,
5800 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5801 NAMESPACE_DECLs, etc). */
5802
5803 static void
5804 find_decls_types_in_var (varpool_node *v, struct free_lang_data_d *fld)
5805 {
5806 find_decls_types (v->decl, fld);
5807 }
5808
5809 /* If T needs an assembler name, have one created for it. */
5810
5811 void
5812 assign_assembler_name_if_neeeded (tree t)
5813 {
5814 if (need_assembler_name_p (t))
5815 {
5816 /* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit
5817 diagnostics that use input_location to show locus
5818 information. The problem here is that, at this point,
5819 input_location is generally anchored to the end of the file
5820 (since the parser is long gone), so we don't have a good
5821 position to pin it to.
5822
5823 To alleviate this problem, this uses the location of T's
5824 declaration. Examples of this are
5825 testsuite/g++.dg/template/cond2.C and
5826 testsuite/g++.dg/template/pr35240.C. */
5827 location_t saved_location = input_location;
5828 input_location = DECL_SOURCE_LOCATION (t);
5829
5830 decl_assembler_name (t);
5831
5832 input_location = saved_location;
5833 }
5834 }
5835
5836
5837 /* Free language specific information for every operand and expression
5838 in every node of the call graph. This process operates in three stages:
5839
5840 1- Every callgraph node and varpool node is traversed looking for
5841 decls and types embedded in them. This is a more exhaustive
5842 search than that done by find_referenced_vars, because it will
5843 also collect individual fields, decls embedded in types, etc.
5844
5845 2- All the decls found are sent to free_lang_data_in_decl.
5846
5847 3- All the types found are sent to free_lang_data_in_type.
5848
5849 The ordering between decls and types is important because
5850 free_lang_data_in_decl sets assembler names, which includes
5851 mangling. So types cannot be freed up until assembler names have
5852 been set up. */
5853
5854 static void
5855 free_lang_data_in_cgraph (void)
5856 {
5857 struct cgraph_node *n;
5858 varpool_node *v;
5859 struct free_lang_data_d fld;
5860 tree t;
5861 unsigned i;
5862 alias_pair *p;
5863
5864 /* Initialize sets and arrays to store referenced decls and types. */
5865 fld.pset = new hash_set<tree>;
5866 fld.worklist.create (0);
5867 fld.decls.create (100);
5868 fld.types.create (100);
5869
5870 /* Find decls and types in the body of every function in the callgraph. */
5871 FOR_EACH_FUNCTION (n)
5872 find_decls_types_in_node (n, &fld);
5873
5874 FOR_EACH_VEC_SAFE_ELT (alias_pairs, i, p)
5875 find_decls_types (p->decl, &fld);
5876
5877 /* Find decls and types in every varpool symbol. */
5878 FOR_EACH_VARIABLE (v)
5879 find_decls_types_in_var (v, &fld);
5880
5881 /* Set the assembler name on every decl found. We need to do this
5882 now because free_lang_data_in_decl will invalidate data needed
5883 for mangling. This breaks mangling on interdependent decls. */
5884 FOR_EACH_VEC_ELT (fld.decls, i, t)
5885 assign_assembler_name_if_neeeded (t);
5886
5887 /* Traverse every decl found freeing its language data. */
5888 FOR_EACH_VEC_ELT (fld.decls, i, t)
5889 free_lang_data_in_decl (t);
5890
5891 /* Traverse every type found freeing its language data. */
5892 FOR_EACH_VEC_ELT (fld.types, i, t)
5893 free_lang_data_in_type (t);
5894 #ifdef ENABLE_CHECKING
5895 FOR_EACH_VEC_ELT (fld.types, i, t)
5896 verify_type (t);
5897 #endif
5898
5899 delete fld.pset;
5900 fld.worklist.release ();
5901 fld.decls.release ();
5902 fld.types.release ();
5903 }
5904
5905
5906 /* Free resources that are used by FE but are not needed once they are done. */
5907
5908 static unsigned
5909 free_lang_data (void)
5910 {
5911 unsigned i;
5912
5913 /* If we are the LTO frontend we have freed lang-specific data already. */
5914 if (in_lto_p
5915 || (!flag_generate_lto && !flag_generate_offload))
5916 return 0;
5917
5918 /* Allocate and assign alias sets to the standard integer types
5919 while the slots are still in the way the frontends generated them. */
5920 for (i = 0; i < itk_none; ++i)
5921 if (integer_types[i])
5922 TYPE_ALIAS_SET (integer_types[i]) = get_alias_set (integer_types[i]);
5923
5924 /* Traverse the IL resetting language specific information for
5925 operands, expressions, etc. */
5926 free_lang_data_in_cgraph ();
5927
5928 /* Create gimple variants for common types. */
5929 ptrdiff_type_node = integer_type_node;
5930 fileptr_type_node = ptr_type_node;
5931
5932 /* Reset some langhooks. Do not reset types_compatible_p, it may
5933 still be used indirectly via the get_alias_set langhook. */
5934 lang_hooks.dwarf_name = lhd_dwarf_name;
5935 lang_hooks.decl_printable_name = gimple_decl_printable_name;
5936 lang_hooks.gimplify_expr = lhd_gimplify_expr;
5937
5938 /* We do not want the default decl_assembler_name implementation,
5939 rather if we have fixed everything we want a wrapper around it
5940 asserting that all non-local symbols already got their assembler
5941 name and only produce assembler names for local symbols. Or rather
5942 make sure we never call decl_assembler_name on local symbols and
5943 devise a separate, middle-end private scheme for it. */
5944
5945 /* Reset diagnostic machinery. */
5946 tree_diagnostics_defaults (global_dc);
5947
5948 return 0;
5949 }
5950
5951
5952 namespace {
5953
5954 const pass_data pass_data_ipa_free_lang_data =
5955 {
5956 SIMPLE_IPA_PASS, /* type */
5957 "*free_lang_data", /* name */
5958 OPTGROUP_NONE, /* optinfo_flags */
5959 TV_IPA_FREE_LANG_DATA, /* tv_id */
5960 0, /* properties_required */
5961 0, /* properties_provided */
5962 0, /* properties_destroyed */
5963 0, /* todo_flags_start */
5964 0, /* todo_flags_finish */
5965 };
5966
5967 class pass_ipa_free_lang_data : public simple_ipa_opt_pass
5968 {
5969 public:
5970 pass_ipa_free_lang_data (gcc::context *ctxt)
5971 : simple_ipa_opt_pass (pass_data_ipa_free_lang_data, ctxt)
5972 {}
5973
5974 /* opt_pass methods: */
5975 virtual unsigned int execute (function *) { return free_lang_data (); }
5976
5977 }; // class pass_ipa_free_lang_data
5978
5979 } // anon namespace
5980
5981 simple_ipa_opt_pass *
5982 make_pass_ipa_free_lang_data (gcc::context *ctxt)
5983 {
5984 return new pass_ipa_free_lang_data (ctxt);
5985 }
5986
5987 /* The backbone of is_attribute_p(). ATTR_LEN is the string length of
5988 ATTR_NAME. Also used internally by remove_attribute(). */
5989 bool
5990 private_is_attribute_p (const char *attr_name, size_t attr_len, const_tree ident)
5991 {
5992 size_t ident_len = IDENTIFIER_LENGTH (ident);
5993
5994 if (ident_len == attr_len)
5995 {
5996 if (strcmp (attr_name, IDENTIFIER_POINTER (ident)) == 0)
5997 return true;
5998 }
5999 else if (ident_len == attr_len + 4)
6000 {
6001 /* There is the possibility that ATTR is 'text' and IDENT is
6002 '__text__'. */
6003 const char *p = IDENTIFIER_POINTER (ident);
6004 if (p[0] == '_' && p[1] == '_'
6005 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
6006 && strncmp (attr_name, p + 2, attr_len) == 0)
6007 return true;
6008 }
6009
6010 return false;
6011 }
6012
6013 /* The backbone of lookup_attribute(). ATTR_LEN is the string length
6014 of ATTR_NAME, and LIST is not NULL_TREE. */
6015 tree
6016 private_lookup_attribute (const char *attr_name, size_t attr_len, tree list)
6017 {
6018 while (list)
6019 {
6020 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
6021
6022 if (ident_len == attr_len)
6023 {
6024 if (!strcmp (attr_name,
6025 IDENTIFIER_POINTER (get_attribute_name (list))))
6026 break;
6027 }
6028 /* TODO: If we made sure that attributes were stored in the
6029 canonical form without '__...__' (ie, as in 'text' as opposed
6030 to '__text__') then we could avoid the following case. */
6031 else if (ident_len == attr_len + 4)
6032 {
6033 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
6034 if (p[0] == '_' && p[1] == '_'
6035 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
6036 && strncmp (attr_name, p + 2, attr_len) == 0)
6037 break;
6038 }
6039 list = TREE_CHAIN (list);
6040 }
6041
6042 return list;
6043 }
6044
6045 /* Given an attribute name ATTR_NAME and a list of attributes LIST,
6046 return a pointer to the attribute's list first element if the attribute
6047 starts with ATTR_NAME. ATTR_NAME must be in the form 'text' (not
6048 '__text__'). */
6049
6050 tree
6051 private_lookup_attribute_by_prefix (const char *attr_name, size_t attr_len,
6052 tree list)
6053 {
6054 while (list)
6055 {
6056 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
6057
6058 if (attr_len > ident_len)
6059 {
6060 list = TREE_CHAIN (list);
6061 continue;
6062 }
6063
6064 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
6065
6066 if (strncmp (attr_name, p, attr_len) == 0)
6067 break;
6068
6069 /* TODO: If we made sure that attributes were stored in the
6070 canonical form without '__...__' (ie, as in 'text' as opposed
6071 to '__text__') then we could avoid the following case. */
6072 if (p[0] == '_' && p[1] == '_' &&
6073 strncmp (attr_name, p + 2, attr_len) == 0)
6074 break;
6075
6076 list = TREE_CHAIN (list);
6077 }
6078
6079 return list;
6080 }
6081
6082
6083 /* A variant of lookup_attribute() that can be used with an identifier
6084 as the first argument, and where the identifier can be either
6085 'text' or '__text__'.
6086
6087 Given an attribute ATTR_IDENTIFIER, and a list of attributes LIST,
6088 return a pointer to the attribute's list element if the attribute
6089 is part of the list, or NULL_TREE if not found. If the attribute
6090 appears more than once, this only returns the first occurrence; the
6091 TREE_CHAIN of the return value should be passed back in if further
6092 occurrences are wanted. ATTR_IDENTIFIER must be an identifier but
6093 can be in the form 'text' or '__text__'. */
6094 static tree
6095 lookup_ident_attribute (tree attr_identifier, tree list)
6096 {
6097 gcc_checking_assert (TREE_CODE (attr_identifier) == IDENTIFIER_NODE);
6098
6099 while (list)
6100 {
6101 gcc_checking_assert (TREE_CODE (get_attribute_name (list))
6102 == IDENTIFIER_NODE);
6103
6104 if (cmp_attrib_identifiers (attr_identifier,
6105 get_attribute_name (list)))
6106 /* Found it. */
6107 break;
6108 list = TREE_CHAIN (list);
6109 }
6110
6111 return list;
6112 }
6113
6114 /* Remove any instances of attribute ATTR_NAME in LIST and return the
6115 modified list. */
6116
6117 tree
6118 remove_attribute (const char *attr_name, tree list)
6119 {
6120 tree *p;
6121 size_t attr_len = strlen (attr_name);
6122
6123 gcc_checking_assert (attr_name[0] != '_');
6124
6125 for (p = &list; *p; )
6126 {
6127 tree l = *p;
6128 /* TODO: If we were storing attributes in normalized form, here
6129 we could use a simple strcmp(). */
6130 if (private_is_attribute_p (attr_name, attr_len, get_attribute_name (l)))
6131 *p = TREE_CHAIN (l);
6132 else
6133 p = &TREE_CHAIN (l);
6134 }
6135
6136 return list;
6137 }
6138
6139 /* Return an attribute list that is the union of a1 and a2. */
6140
6141 tree
6142 merge_attributes (tree a1, tree a2)
6143 {
6144 tree attributes;
6145
6146 /* Either one unset? Take the set one. */
6147
6148 if ((attributes = a1) == 0)
6149 attributes = a2;
6150
6151 /* One that completely contains the other? Take it. */
6152
6153 else if (a2 != 0 && ! attribute_list_contained (a1, a2))
6154 {
6155 if (attribute_list_contained (a2, a1))
6156 attributes = a2;
6157 else
6158 {
6159 /* Pick the longest list, and hang on the other list. */
6160
6161 if (list_length (a1) < list_length (a2))
6162 attributes = a2, a2 = a1;
6163
6164 for (; a2 != 0; a2 = TREE_CHAIN (a2))
6165 {
6166 tree a;
6167 for (a = lookup_ident_attribute (get_attribute_name (a2),
6168 attributes);
6169 a != NULL_TREE && !attribute_value_equal (a, a2);
6170 a = lookup_ident_attribute (get_attribute_name (a2),
6171 TREE_CHAIN (a)))
6172 ;
6173 if (a == NULL_TREE)
6174 {
6175 a1 = copy_node (a2);
6176 TREE_CHAIN (a1) = attributes;
6177 attributes = a1;
6178 }
6179 }
6180 }
6181 }
6182 return attributes;
6183 }
6184
6185 /* Given types T1 and T2, merge their attributes and return
6186 the result. */
6187
6188 tree
6189 merge_type_attributes (tree t1, tree t2)
6190 {
6191 return merge_attributes (TYPE_ATTRIBUTES (t1),
6192 TYPE_ATTRIBUTES (t2));
6193 }
6194
6195 /* Given decls OLDDECL and NEWDECL, merge their attributes and return
6196 the result. */
6197
6198 tree
6199 merge_decl_attributes (tree olddecl, tree newdecl)
6200 {
6201 return merge_attributes (DECL_ATTRIBUTES (olddecl),
6202 DECL_ATTRIBUTES (newdecl));
6203 }
6204
6205 #if TARGET_DLLIMPORT_DECL_ATTRIBUTES
6206
6207 /* Specialization of merge_decl_attributes for various Windows targets.
6208
6209 This handles the following situation:
6210
6211 __declspec (dllimport) int foo;
6212 int foo;
6213
6214 The second instance of `foo' nullifies the dllimport. */
6215
6216 tree
6217 merge_dllimport_decl_attributes (tree old, tree new_tree)
6218 {
6219 tree a;
6220 int delete_dllimport_p = 1;
6221
6222 /* What we need to do here is remove from `old' dllimport if it doesn't
6223 appear in `new'. dllimport behaves like extern: if a declaration is
6224 marked dllimport and a definition appears later, then the object
6225 is not dllimport'd. We also remove a `new' dllimport if the old list
6226 contains dllexport: dllexport always overrides dllimport, regardless
6227 of the order of declaration. */
6228 if (!VAR_OR_FUNCTION_DECL_P (new_tree))
6229 delete_dllimport_p = 0;
6230 else if (DECL_DLLIMPORT_P (new_tree)
6231 && lookup_attribute ("dllexport", DECL_ATTRIBUTES (old)))
6232 {
6233 DECL_DLLIMPORT_P (new_tree) = 0;
6234 warning (OPT_Wattributes, "%q+D already declared with dllexport attribute: "
6235 "dllimport ignored", new_tree);
6236 }
6237 else if (DECL_DLLIMPORT_P (old) && !DECL_DLLIMPORT_P (new_tree))
6238 {
6239 /* Warn about overriding a symbol that has already been used, e.g.:
6240 extern int __attribute__ ((dllimport)) foo;
6241 int* bar () {return &foo;}
6242 int foo;
6243 */
6244 if (TREE_USED (old))
6245 {
6246 warning (0, "%q+D redeclared without dllimport attribute "
6247 "after being referenced with dll linkage", new_tree);
6248 /* If we have used a variable's address with dllimport linkage,
6249 keep the old DECL_DLLIMPORT_P flag: the ADDR_EXPR using the
6250 decl may already have had TREE_CONSTANT computed.
6251 We still remove the attribute so that assembler code refers
6252 to '&foo rather than '_imp__foo'. */
6253 if (TREE_CODE (old) == VAR_DECL && TREE_ADDRESSABLE (old))
6254 DECL_DLLIMPORT_P (new_tree) = 1;
6255 }
6256
6257 /* Let an inline definition silently override the external reference,
6258 but otherwise warn about attribute inconsistency. */
6259 else if (TREE_CODE (new_tree) == VAR_DECL
6260 || !DECL_DECLARED_INLINE_P (new_tree))
6261 warning (OPT_Wattributes, "%q+D redeclared without dllimport attribute: "
6262 "previous dllimport ignored", new_tree);
6263 }
6264 else
6265 delete_dllimport_p = 0;
6266
6267 a = merge_attributes (DECL_ATTRIBUTES (old), DECL_ATTRIBUTES (new_tree));
6268
6269 if (delete_dllimport_p)
6270 a = remove_attribute ("dllimport", a);
6271
6272 return a;
6273 }
6274
6275 /* Handle a "dllimport" or "dllexport" attribute; arguments as in
6276 struct attribute_spec.handler. */
6277
6278 tree
6279 handle_dll_attribute (tree * pnode, tree name, tree args, int flags,
6280 bool *no_add_attrs)
6281 {
6282 tree node = *pnode;
6283 bool is_dllimport;
6284
6285 /* These attributes may apply to structure and union types being created,
6286 but otherwise should pass to the declaration involved. */
6287 if (!DECL_P (node))
6288 {
6289 if (flags & ((int) ATTR_FLAG_DECL_NEXT | (int) ATTR_FLAG_FUNCTION_NEXT
6290 | (int) ATTR_FLAG_ARRAY_NEXT))
6291 {
6292 *no_add_attrs = true;
6293 return tree_cons (name, args, NULL_TREE);
6294 }
6295 if (TREE_CODE (node) == RECORD_TYPE
6296 || TREE_CODE (node) == UNION_TYPE)
6297 {
6298 node = TYPE_NAME (node);
6299 if (!node)
6300 return NULL_TREE;
6301 }
6302 else
6303 {
6304 warning (OPT_Wattributes, "%qE attribute ignored",
6305 name);
6306 *no_add_attrs = true;
6307 return NULL_TREE;
6308 }
6309 }
6310
6311 if (TREE_CODE (node) != FUNCTION_DECL
6312 && TREE_CODE (node) != VAR_DECL
6313 && TREE_CODE (node) != TYPE_DECL)
6314 {
6315 *no_add_attrs = true;
6316 warning (OPT_Wattributes, "%qE attribute ignored",
6317 name);
6318 return NULL_TREE;
6319 }
6320
6321 if (TREE_CODE (node) == TYPE_DECL
6322 && TREE_CODE (TREE_TYPE (node)) != RECORD_TYPE
6323 && TREE_CODE (TREE_TYPE (node)) != UNION_TYPE)
6324 {
6325 *no_add_attrs = true;
6326 warning (OPT_Wattributes, "%qE attribute ignored",
6327 name);
6328 return NULL_TREE;
6329 }
6330
6331 is_dllimport = is_attribute_p ("dllimport", name);
6332
6333 /* Report error on dllimport ambiguities seen now before they cause
6334 any damage. */
6335 if (is_dllimport)
6336 {
6337 /* Honor any target-specific overrides. */
6338 if (!targetm.valid_dllimport_attribute_p (node))
6339 *no_add_attrs = true;
6340
6341 else if (TREE_CODE (node) == FUNCTION_DECL
6342 && DECL_DECLARED_INLINE_P (node))
6343 {
6344 warning (OPT_Wattributes, "inline function %q+D declared as "
6345 " dllimport: attribute ignored", node);
6346 *no_add_attrs = true;
6347 }
6348 /* Like MS, treat definition of dllimported variables and
6349 non-inlined functions on declaration as syntax errors. */
6350 else if (TREE_CODE (node) == FUNCTION_DECL && DECL_INITIAL (node))
6351 {
6352 error ("function %q+D definition is marked dllimport", node);
6353 *no_add_attrs = true;
6354 }
6355
6356 else if (TREE_CODE (node) == VAR_DECL)
6357 {
6358 if (DECL_INITIAL (node))
6359 {
6360 error ("variable %q+D definition is marked dllimport",
6361 node);
6362 *no_add_attrs = true;
6363 }
6364
6365 /* `extern' needn't be specified with dllimport.
6366 Specify `extern' now and hope for the best. Sigh. */
6367 DECL_EXTERNAL (node) = 1;
6368 /* Also, implicitly give dllimport'd variables declared within
6369 a function global scope, unless declared static. */
6370 if (current_function_decl != NULL_TREE && !TREE_STATIC (node))
6371 TREE_PUBLIC (node) = 1;
6372 }
6373
6374 if (*no_add_attrs == false)
6375 DECL_DLLIMPORT_P (node) = 1;
6376 }
6377 else if (TREE_CODE (node) == FUNCTION_DECL
6378 && DECL_DECLARED_INLINE_P (node)
6379 && flag_keep_inline_dllexport)
6380 /* An exported function, even if inline, must be emitted. */
6381 DECL_EXTERNAL (node) = 0;
6382
6383 /* Report error if symbol is not accessible at global scope. */
6384 if (!TREE_PUBLIC (node)
6385 && (TREE_CODE (node) == VAR_DECL
6386 || TREE_CODE (node) == FUNCTION_DECL))
6387 {
6388 error ("external linkage required for symbol %q+D because of "
6389 "%qE attribute", node, name);
6390 *no_add_attrs = true;
6391 }
6392
6393 /* A dllexport'd entity must have default visibility so that other
6394 program units (shared libraries or the main executable) can see
6395 it. A dllimport'd entity must have default visibility so that
6396 the linker knows that undefined references within this program
6397 unit can be resolved by the dynamic linker. */
6398 if (!*no_add_attrs)
6399 {
6400 if (DECL_VISIBILITY_SPECIFIED (node)
6401 && DECL_VISIBILITY (node) != VISIBILITY_DEFAULT)
6402 error ("%qE implies default visibility, but %qD has already "
6403 "been declared with a different visibility",
6404 name, node);
6405 DECL_VISIBILITY (node) = VISIBILITY_DEFAULT;
6406 DECL_VISIBILITY_SPECIFIED (node) = 1;
6407 }
6408
6409 return NULL_TREE;
6410 }
6411
6412 #endif /* TARGET_DLLIMPORT_DECL_ATTRIBUTES */
6413 \f
6414 /* Set the type qualifiers for TYPE to TYPE_QUALS, which is a bitmask
6415 of the various TYPE_QUAL values. */
6416
6417 static void
6418 set_type_quals (tree type, int type_quals)
6419 {
6420 TYPE_READONLY (type) = (type_quals & TYPE_QUAL_CONST) != 0;
6421 TYPE_VOLATILE (type) = (type_quals & TYPE_QUAL_VOLATILE) != 0;
6422 TYPE_RESTRICT (type) = (type_quals & TYPE_QUAL_RESTRICT) != 0;
6423 TYPE_ATOMIC (type) = (type_quals & TYPE_QUAL_ATOMIC) != 0;
6424 TYPE_ADDR_SPACE (type) = DECODE_QUAL_ADDR_SPACE (type_quals);
6425 }
6426
6427 /* Returns true iff unqualified CAND and BASE are equivalent. */
6428
6429 bool
6430 check_base_type (const_tree cand, const_tree base)
6431 {
6432 return (TYPE_NAME (cand) == TYPE_NAME (base)
6433 /* Apparently this is needed for Objective-C. */
6434 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6435 /* Check alignment. */
6436 && TYPE_ALIGN (cand) == TYPE_ALIGN (base)
6437 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6438 TYPE_ATTRIBUTES (base)));
6439 }
6440
6441 /* Returns true iff CAND is equivalent to BASE with TYPE_QUALS. */
6442
6443 bool
6444 check_qualified_type (const_tree cand, const_tree base, int type_quals)
6445 {
6446 return (TYPE_QUALS (cand) == type_quals
6447 && check_base_type (cand, base));
6448 }
6449
6450 /* Returns true iff CAND is equivalent to BASE with ALIGN. */
6451
6452 static bool
6453 check_aligned_type (const_tree cand, const_tree base, unsigned int align)
6454 {
6455 return (TYPE_QUALS (cand) == TYPE_QUALS (base)
6456 && TYPE_NAME (cand) == TYPE_NAME (base)
6457 /* Apparently this is needed for Objective-C. */
6458 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6459 /* Check alignment. */
6460 && TYPE_ALIGN (cand) == align
6461 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6462 TYPE_ATTRIBUTES (base)));
6463 }
6464
6465 /* This function checks to see if TYPE matches the size one of the built-in
6466 atomic types, and returns that core atomic type. */
6467
6468 static tree
6469 find_atomic_core_type (tree type)
6470 {
6471 tree base_atomic_type;
6472
6473 /* Only handle complete types. */
6474 if (TYPE_SIZE (type) == NULL_TREE)
6475 return NULL_TREE;
6476
6477 HOST_WIDE_INT type_size = tree_to_uhwi (TYPE_SIZE (type));
6478 switch (type_size)
6479 {
6480 case 8:
6481 base_atomic_type = atomicQI_type_node;
6482 break;
6483
6484 case 16:
6485 base_atomic_type = atomicHI_type_node;
6486 break;
6487
6488 case 32:
6489 base_atomic_type = atomicSI_type_node;
6490 break;
6491
6492 case 64:
6493 base_atomic_type = atomicDI_type_node;
6494 break;
6495
6496 case 128:
6497 base_atomic_type = atomicTI_type_node;
6498 break;
6499
6500 default:
6501 base_atomic_type = NULL_TREE;
6502 }
6503
6504 return base_atomic_type;
6505 }
6506
6507 /* Return a version of the TYPE, qualified as indicated by the
6508 TYPE_QUALS, if one exists. If no qualified version exists yet,
6509 return NULL_TREE. */
6510
6511 tree
6512 get_qualified_type (tree type, int type_quals)
6513 {
6514 tree t;
6515
6516 if (TYPE_QUALS (type) == type_quals)
6517 return type;
6518
6519 /* Search the chain of variants to see if there is already one there just
6520 like the one we need to have. If so, use that existing one. We must
6521 preserve the TYPE_NAME, since there is code that depends on this. */
6522 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6523 if (check_qualified_type (t, type, type_quals))
6524 return t;
6525
6526 return NULL_TREE;
6527 }
6528
6529 /* Like get_qualified_type, but creates the type if it does not
6530 exist. This function never returns NULL_TREE. */
6531
6532 tree
6533 build_qualified_type (tree type, int type_quals)
6534 {
6535 tree t;
6536
6537 /* See if we already have the appropriate qualified variant. */
6538 t = get_qualified_type (type, type_quals);
6539
6540 /* If not, build it. */
6541 if (!t)
6542 {
6543 t = build_variant_type_copy (type);
6544 set_type_quals (t, type_quals);
6545
6546 if (((type_quals & TYPE_QUAL_ATOMIC) == TYPE_QUAL_ATOMIC))
6547 {
6548 /* See if this object can map to a basic atomic type. */
6549 tree atomic_type = find_atomic_core_type (type);
6550 if (atomic_type)
6551 {
6552 /* Ensure the alignment of this type is compatible with
6553 the required alignment of the atomic type. */
6554 if (TYPE_ALIGN (atomic_type) > TYPE_ALIGN (t))
6555 TYPE_ALIGN (t) = TYPE_ALIGN (atomic_type);
6556 }
6557 }
6558
6559 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6560 /* Propagate structural equality. */
6561 SET_TYPE_STRUCTURAL_EQUALITY (t);
6562 else if (TYPE_CANONICAL (type) != type)
6563 /* Build the underlying canonical type, since it is different
6564 from TYPE. */
6565 {
6566 tree c = build_qualified_type (TYPE_CANONICAL (type), type_quals);
6567 TYPE_CANONICAL (t) = TYPE_CANONICAL (c);
6568 }
6569 else
6570 /* T is its own canonical type. */
6571 TYPE_CANONICAL (t) = t;
6572
6573 }
6574
6575 return t;
6576 }
6577
6578 /* Create a variant of type T with alignment ALIGN. */
6579
6580 tree
6581 build_aligned_type (tree type, unsigned int align)
6582 {
6583 tree t;
6584
6585 if (TYPE_PACKED (type)
6586 || TYPE_ALIGN (type) == align)
6587 return type;
6588
6589 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6590 if (check_aligned_type (t, type, align))
6591 return t;
6592
6593 t = build_variant_type_copy (type);
6594 TYPE_ALIGN (t) = align;
6595
6596 return t;
6597 }
6598
6599 /* Create a new distinct copy of TYPE. The new type is made its own
6600 MAIN_VARIANT. If TYPE requires structural equality checks, the
6601 resulting type requires structural equality checks; otherwise, its
6602 TYPE_CANONICAL points to itself. */
6603
6604 tree
6605 build_distinct_type_copy (tree type)
6606 {
6607 tree t = copy_node (type);
6608
6609 TYPE_POINTER_TO (t) = 0;
6610 TYPE_REFERENCE_TO (t) = 0;
6611
6612 /* Set the canonical type either to a new equivalence class, or
6613 propagate the need for structural equality checks. */
6614 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6615 SET_TYPE_STRUCTURAL_EQUALITY (t);
6616 else
6617 TYPE_CANONICAL (t) = t;
6618
6619 /* Make it its own variant. */
6620 TYPE_MAIN_VARIANT (t) = t;
6621 TYPE_NEXT_VARIANT (t) = 0;
6622
6623 /* We do not record methods in type copies nor variants
6624 so we do not need to keep them up to date when new method
6625 is inserted. */
6626 if (RECORD_OR_UNION_TYPE_P (t))
6627 TYPE_METHODS (t) = NULL_TREE;
6628
6629 /* Note that it is now possible for TYPE_MIN_VALUE to be a value
6630 whose TREE_TYPE is not t. This can also happen in the Ada
6631 frontend when using subtypes. */
6632
6633 return t;
6634 }
6635
6636 /* Create a new variant of TYPE, equivalent but distinct. This is so
6637 the caller can modify it. TYPE_CANONICAL for the return type will
6638 be equivalent to TYPE_CANONICAL of TYPE, indicating that the types
6639 are considered equal by the language itself (or that both types
6640 require structural equality checks). */
6641
6642 tree
6643 build_variant_type_copy (tree type)
6644 {
6645 tree t, m = TYPE_MAIN_VARIANT (type);
6646
6647 t = build_distinct_type_copy (type);
6648
6649 /* Since we're building a variant, assume that it is a non-semantic
6650 variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */
6651 TYPE_CANONICAL (t) = TYPE_CANONICAL (type);
6652
6653 /* Add the new type to the chain of variants of TYPE. */
6654 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (m);
6655 TYPE_NEXT_VARIANT (m) = t;
6656 TYPE_MAIN_VARIANT (t) = m;
6657
6658 return t;
6659 }
6660 \f
6661 /* Return true if the from tree in both tree maps are equal. */
6662
6663 int
6664 tree_map_base_eq (const void *va, const void *vb)
6665 {
6666 const struct tree_map_base *const a = (const struct tree_map_base *) va,
6667 *const b = (const struct tree_map_base *) vb;
6668 return (a->from == b->from);
6669 }
6670
6671 /* Hash a from tree in a tree_base_map. */
6672
6673 unsigned int
6674 tree_map_base_hash (const void *item)
6675 {
6676 return htab_hash_pointer (((const struct tree_map_base *)item)->from);
6677 }
6678
6679 /* Return true if this tree map structure is marked for garbage collection
6680 purposes. We simply return true if the from tree is marked, so that this
6681 structure goes away when the from tree goes away. */
6682
6683 int
6684 tree_map_base_marked_p (const void *p)
6685 {
6686 return ggc_marked_p (((const struct tree_map_base *) p)->from);
6687 }
6688
6689 /* Hash a from tree in a tree_map. */
6690
6691 unsigned int
6692 tree_map_hash (const void *item)
6693 {
6694 return (((const struct tree_map *) item)->hash);
6695 }
6696
6697 /* Hash a from tree in a tree_decl_map. */
6698
6699 unsigned int
6700 tree_decl_map_hash (const void *item)
6701 {
6702 return DECL_UID (((const struct tree_decl_map *) item)->base.from);
6703 }
6704
6705 /* Return the initialization priority for DECL. */
6706
6707 priority_type
6708 decl_init_priority_lookup (tree decl)
6709 {
6710 symtab_node *snode = symtab_node::get (decl);
6711
6712 if (!snode)
6713 return DEFAULT_INIT_PRIORITY;
6714 return
6715 snode->get_init_priority ();
6716 }
6717
6718 /* Return the finalization priority for DECL. */
6719
6720 priority_type
6721 decl_fini_priority_lookup (tree decl)
6722 {
6723 cgraph_node *node = cgraph_node::get (decl);
6724
6725 if (!node)
6726 return DEFAULT_INIT_PRIORITY;
6727 return
6728 node->get_fini_priority ();
6729 }
6730
6731 /* Set the initialization priority for DECL to PRIORITY. */
6732
6733 void
6734 decl_init_priority_insert (tree decl, priority_type priority)
6735 {
6736 struct symtab_node *snode;
6737
6738 if (priority == DEFAULT_INIT_PRIORITY)
6739 {
6740 snode = symtab_node::get (decl);
6741 if (!snode)
6742 return;
6743 }
6744 else if (TREE_CODE (decl) == VAR_DECL)
6745 snode = varpool_node::get_create (decl);
6746 else
6747 snode = cgraph_node::get_create (decl);
6748 snode->set_init_priority (priority);
6749 }
6750
6751 /* Set the finalization priority for DECL to PRIORITY. */
6752
6753 void
6754 decl_fini_priority_insert (tree decl, priority_type priority)
6755 {
6756 struct cgraph_node *node;
6757
6758 if (priority == DEFAULT_INIT_PRIORITY)
6759 {
6760 node = cgraph_node::get (decl);
6761 if (!node)
6762 return;
6763 }
6764 else
6765 node = cgraph_node::get_create (decl);
6766 node->set_fini_priority (priority);
6767 }
6768
6769 /* Print out the statistics for the DECL_DEBUG_EXPR hash table. */
6770
6771 static void
6772 print_debug_expr_statistics (void)
6773 {
6774 fprintf (stderr, "DECL_DEBUG_EXPR hash: size %ld, %ld elements, %f collisions\n",
6775 (long) debug_expr_for_decl->size (),
6776 (long) debug_expr_for_decl->elements (),
6777 debug_expr_for_decl->collisions ());
6778 }
6779
6780 /* Print out the statistics for the DECL_VALUE_EXPR hash table. */
6781
6782 static void
6783 print_value_expr_statistics (void)
6784 {
6785 fprintf (stderr, "DECL_VALUE_EXPR hash: size %ld, %ld elements, %f collisions\n",
6786 (long) value_expr_for_decl->size (),
6787 (long) value_expr_for_decl->elements (),
6788 value_expr_for_decl->collisions ());
6789 }
6790
6791 /* Lookup a debug expression for FROM, and return it if we find one. */
6792
6793 tree
6794 decl_debug_expr_lookup (tree from)
6795 {
6796 struct tree_decl_map *h, in;
6797 in.base.from = from;
6798
6799 h = debug_expr_for_decl->find_with_hash (&in, DECL_UID (from));
6800 if (h)
6801 return h->to;
6802 return NULL_TREE;
6803 }
6804
6805 /* Insert a mapping FROM->TO in the debug expression hashtable. */
6806
6807 void
6808 decl_debug_expr_insert (tree from, tree to)
6809 {
6810 struct tree_decl_map *h;
6811
6812 h = ggc_alloc<tree_decl_map> ();
6813 h->base.from = from;
6814 h->to = to;
6815 *debug_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
6816 }
6817
6818 /* Lookup a value expression for FROM, and return it if we find one. */
6819
6820 tree
6821 decl_value_expr_lookup (tree from)
6822 {
6823 struct tree_decl_map *h, in;
6824 in.base.from = from;
6825
6826 h = value_expr_for_decl->find_with_hash (&in, DECL_UID (from));
6827 if (h)
6828 return h->to;
6829 return NULL_TREE;
6830 }
6831
6832 /* Insert a mapping FROM->TO in the value expression hashtable. */
6833
6834 void
6835 decl_value_expr_insert (tree from, tree to)
6836 {
6837 struct tree_decl_map *h;
6838
6839 h = ggc_alloc<tree_decl_map> ();
6840 h->base.from = from;
6841 h->to = to;
6842 *value_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
6843 }
6844
6845 /* Lookup a vector of debug arguments for FROM, and return it if we
6846 find one. */
6847
6848 vec<tree, va_gc> **
6849 decl_debug_args_lookup (tree from)
6850 {
6851 struct tree_vec_map *h, in;
6852
6853 if (!DECL_HAS_DEBUG_ARGS_P (from))
6854 return NULL;
6855 gcc_checking_assert (debug_args_for_decl != NULL);
6856 in.base.from = from;
6857 h = debug_args_for_decl->find_with_hash (&in, DECL_UID (from));
6858 if (h)
6859 return &h->to;
6860 return NULL;
6861 }
6862
6863 /* Insert a mapping FROM->empty vector of debug arguments in the value
6864 expression hashtable. */
6865
6866 vec<tree, va_gc> **
6867 decl_debug_args_insert (tree from)
6868 {
6869 struct tree_vec_map *h;
6870 tree_vec_map **loc;
6871
6872 if (DECL_HAS_DEBUG_ARGS_P (from))
6873 return decl_debug_args_lookup (from);
6874 if (debug_args_for_decl == NULL)
6875 debug_args_for_decl = hash_table<tree_vec_map_cache_hasher>::create_ggc (64);
6876 h = ggc_alloc<tree_vec_map> ();
6877 h->base.from = from;
6878 h->to = NULL;
6879 loc = debug_args_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT);
6880 *loc = h;
6881 DECL_HAS_DEBUG_ARGS_P (from) = 1;
6882 return &h->to;
6883 }
6884
6885 /* Hashing of types so that we don't make duplicates.
6886 The entry point is `type_hash_canon'. */
6887
6888 /* Compute a hash code for a list of types (chain of TREE_LIST nodes
6889 with types in the TREE_VALUE slots), by adding the hash codes
6890 of the individual types. */
6891
6892 static void
6893 type_hash_list (const_tree list, inchash::hash &hstate)
6894 {
6895 const_tree tail;
6896
6897 for (tail = list; tail; tail = TREE_CHAIN (tail))
6898 if (TREE_VALUE (tail) != error_mark_node)
6899 hstate.add_object (TYPE_HASH (TREE_VALUE (tail)));
6900 }
6901
6902 /* These are the Hashtable callback functions. */
6903
6904 /* Returns true iff the types are equivalent. */
6905
6906 bool
6907 type_cache_hasher::equal (type_hash *a, type_hash *b)
6908 {
6909 /* First test the things that are the same for all types. */
6910 if (a->hash != b->hash
6911 || TREE_CODE (a->type) != TREE_CODE (b->type)
6912 || TREE_TYPE (a->type) != TREE_TYPE (b->type)
6913 || !attribute_list_equal (TYPE_ATTRIBUTES (a->type),
6914 TYPE_ATTRIBUTES (b->type))
6915 || (TREE_CODE (a->type) != COMPLEX_TYPE
6916 && TYPE_NAME (a->type) != TYPE_NAME (b->type)))
6917 return 0;
6918
6919 /* Be careful about comparing arrays before and after the element type
6920 has been completed; don't compare TYPE_ALIGN unless both types are
6921 complete. */
6922 if (COMPLETE_TYPE_P (a->type) && COMPLETE_TYPE_P (b->type)
6923 && (TYPE_ALIGN (a->type) != TYPE_ALIGN (b->type)
6924 || TYPE_MODE (a->type) != TYPE_MODE (b->type)))
6925 return 0;
6926
6927 switch (TREE_CODE (a->type))
6928 {
6929 case VOID_TYPE:
6930 case COMPLEX_TYPE:
6931 case POINTER_TYPE:
6932 case REFERENCE_TYPE:
6933 case NULLPTR_TYPE:
6934 return 1;
6935
6936 case VECTOR_TYPE:
6937 return TYPE_VECTOR_SUBPARTS (a->type) == TYPE_VECTOR_SUBPARTS (b->type);
6938
6939 case ENUMERAL_TYPE:
6940 if (TYPE_VALUES (a->type) != TYPE_VALUES (b->type)
6941 && !(TYPE_VALUES (a->type)
6942 && TREE_CODE (TYPE_VALUES (a->type)) == TREE_LIST
6943 && TYPE_VALUES (b->type)
6944 && TREE_CODE (TYPE_VALUES (b->type)) == TREE_LIST
6945 && type_list_equal (TYPE_VALUES (a->type),
6946 TYPE_VALUES (b->type))))
6947 return 0;
6948
6949 /* ... fall through ... */
6950
6951 case INTEGER_TYPE:
6952 case REAL_TYPE:
6953 case BOOLEAN_TYPE:
6954 if (TYPE_PRECISION (a->type) != TYPE_PRECISION (b->type))
6955 return false;
6956 return ((TYPE_MAX_VALUE (a->type) == TYPE_MAX_VALUE (b->type)
6957 || tree_int_cst_equal (TYPE_MAX_VALUE (a->type),
6958 TYPE_MAX_VALUE (b->type)))
6959 && (TYPE_MIN_VALUE (a->type) == TYPE_MIN_VALUE (b->type)
6960 || tree_int_cst_equal (TYPE_MIN_VALUE (a->type),
6961 TYPE_MIN_VALUE (b->type))));
6962
6963 case FIXED_POINT_TYPE:
6964 return TYPE_SATURATING (a->type) == TYPE_SATURATING (b->type);
6965
6966 case OFFSET_TYPE:
6967 return TYPE_OFFSET_BASETYPE (a->type) == TYPE_OFFSET_BASETYPE (b->type);
6968
6969 case METHOD_TYPE:
6970 if (TYPE_METHOD_BASETYPE (a->type) == TYPE_METHOD_BASETYPE (b->type)
6971 && (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6972 || (TYPE_ARG_TYPES (a->type)
6973 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6974 && TYPE_ARG_TYPES (b->type)
6975 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6976 && type_list_equal (TYPE_ARG_TYPES (a->type),
6977 TYPE_ARG_TYPES (b->type)))))
6978 break;
6979 return 0;
6980 case ARRAY_TYPE:
6981 return TYPE_DOMAIN (a->type) == TYPE_DOMAIN (b->type);
6982
6983 case RECORD_TYPE:
6984 case UNION_TYPE:
6985 case QUAL_UNION_TYPE:
6986 return (TYPE_FIELDS (a->type) == TYPE_FIELDS (b->type)
6987 || (TYPE_FIELDS (a->type)
6988 && TREE_CODE (TYPE_FIELDS (a->type)) == TREE_LIST
6989 && TYPE_FIELDS (b->type)
6990 && TREE_CODE (TYPE_FIELDS (b->type)) == TREE_LIST
6991 && type_list_equal (TYPE_FIELDS (a->type),
6992 TYPE_FIELDS (b->type))));
6993
6994 case FUNCTION_TYPE:
6995 if (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6996 || (TYPE_ARG_TYPES (a->type)
6997 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6998 && TYPE_ARG_TYPES (b->type)
6999 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
7000 && type_list_equal (TYPE_ARG_TYPES (a->type),
7001 TYPE_ARG_TYPES (b->type))))
7002 break;
7003 return 0;
7004
7005 default:
7006 return 0;
7007 }
7008
7009 if (lang_hooks.types.type_hash_eq != NULL)
7010 return lang_hooks.types.type_hash_eq (a->type, b->type);
7011
7012 return 1;
7013 }
7014
7015 /* Given TYPE, and HASHCODE its hash code, return the canonical
7016 object for an identical type if one already exists.
7017 Otherwise, return TYPE, and record it as the canonical object.
7018
7019 To use this function, first create a type of the sort you want.
7020 Then compute its hash code from the fields of the type that
7021 make it different from other similar types.
7022 Then call this function and use the value. */
7023
7024 tree
7025 type_hash_canon (unsigned int hashcode, tree type)
7026 {
7027 type_hash in;
7028 type_hash **loc;
7029
7030 /* The hash table only contains main variants, so ensure that's what we're
7031 being passed. */
7032 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
7033
7034 /* The TYPE_ALIGN field of a type is set by layout_type(), so we
7035 must call that routine before comparing TYPE_ALIGNs. */
7036 layout_type (type);
7037
7038 in.hash = hashcode;
7039 in.type = type;
7040
7041 loc = type_hash_table->find_slot_with_hash (&in, hashcode, INSERT);
7042 if (*loc)
7043 {
7044 tree t1 = ((type_hash *) *loc)->type;
7045 gcc_assert (TYPE_MAIN_VARIANT (t1) == t1);
7046 if (GATHER_STATISTICS)
7047 {
7048 tree_code_counts[(int) TREE_CODE (type)]--;
7049 tree_node_counts[(int) t_kind]--;
7050 tree_node_sizes[(int) t_kind] -= sizeof (struct tree_type_non_common);
7051 }
7052 return t1;
7053 }
7054 else
7055 {
7056 struct type_hash *h;
7057
7058 h = ggc_alloc<type_hash> ();
7059 h->hash = hashcode;
7060 h->type = type;
7061 *loc = h;
7062
7063 return type;
7064 }
7065 }
7066
7067 static void
7068 print_type_hash_statistics (void)
7069 {
7070 fprintf (stderr, "Type hash: size %ld, %ld elements, %f collisions\n",
7071 (long) type_hash_table->size (),
7072 (long) type_hash_table->elements (),
7073 type_hash_table->collisions ());
7074 }
7075
7076 /* Compute a hash code for a list of attributes (chain of TREE_LIST nodes
7077 with names in the TREE_PURPOSE slots and args in the TREE_VALUE slots),
7078 by adding the hash codes of the individual attributes. */
7079
7080 static void
7081 attribute_hash_list (const_tree list, inchash::hash &hstate)
7082 {
7083 const_tree tail;
7084
7085 for (tail = list; tail; tail = TREE_CHAIN (tail))
7086 /* ??? Do we want to add in TREE_VALUE too? */
7087 hstate.add_object (IDENTIFIER_HASH_VALUE (get_attribute_name (tail)));
7088 }
7089
7090 /* Given two lists of attributes, return true if list l2 is
7091 equivalent to l1. */
7092
7093 int
7094 attribute_list_equal (const_tree l1, const_tree l2)
7095 {
7096 if (l1 == l2)
7097 return 1;
7098
7099 return attribute_list_contained (l1, l2)
7100 && attribute_list_contained (l2, l1);
7101 }
7102
7103 /* Given two lists of attributes, return true if list L2 is
7104 completely contained within L1. */
7105 /* ??? This would be faster if attribute names were stored in a canonicalized
7106 form. Otherwise, if L1 uses `foo' and L2 uses `__foo__', the long method
7107 must be used to show these elements are equivalent (which they are). */
7108 /* ??? It's not clear that attributes with arguments will always be handled
7109 correctly. */
7110
7111 int
7112 attribute_list_contained (const_tree l1, const_tree l2)
7113 {
7114 const_tree t1, t2;
7115
7116 /* First check the obvious, maybe the lists are identical. */
7117 if (l1 == l2)
7118 return 1;
7119
7120 /* Maybe the lists are similar. */
7121 for (t1 = l1, t2 = l2;
7122 t1 != 0 && t2 != 0
7123 && get_attribute_name (t1) == get_attribute_name (t2)
7124 && TREE_VALUE (t1) == TREE_VALUE (t2);
7125 t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
7126 ;
7127
7128 /* Maybe the lists are equal. */
7129 if (t1 == 0 && t2 == 0)
7130 return 1;
7131
7132 for (; t2 != 0; t2 = TREE_CHAIN (t2))
7133 {
7134 const_tree attr;
7135 /* This CONST_CAST is okay because lookup_attribute does not
7136 modify its argument and the return value is assigned to a
7137 const_tree. */
7138 for (attr = lookup_ident_attribute (get_attribute_name (t2),
7139 CONST_CAST_TREE (l1));
7140 attr != NULL_TREE && !attribute_value_equal (t2, attr);
7141 attr = lookup_ident_attribute (get_attribute_name (t2),
7142 TREE_CHAIN (attr)))
7143 ;
7144
7145 if (attr == NULL_TREE)
7146 return 0;
7147 }
7148
7149 return 1;
7150 }
7151
7152 /* Given two lists of types
7153 (chains of TREE_LIST nodes with types in the TREE_VALUE slots)
7154 return 1 if the lists contain the same types in the same order.
7155 Also, the TREE_PURPOSEs must match. */
7156
7157 int
7158 type_list_equal (const_tree l1, const_tree l2)
7159 {
7160 const_tree t1, t2;
7161
7162 for (t1 = l1, t2 = l2; t1 && t2; t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
7163 if (TREE_VALUE (t1) != TREE_VALUE (t2)
7164 || (TREE_PURPOSE (t1) != TREE_PURPOSE (t2)
7165 && ! (1 == simple_cst_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2))
7166 && (TREE_TYPE (TREE_PURPOSE (t1))
7167 == TREE_TYPE (TREE_PURPOSE (t2))))))
7168 return 0;
7169
7170 return t1 == t2;
7171 }
7172
7173 /* Returns the number of arguments to the FUNCTION_TYPE or METHOD_TYPE
7174 given by TYPE. If the argument list accepts variable arguments,
7175 then this function counts only the ordinary arguments. */
7176
7177 int
7178 type_num_arguments (const_tree type)
7179 {
7180 int i = 0;
7181 tree t;
7182
7183 for (t = TYPE_ARG_TYPES (type); t; t = TREE_CHAIN (t))
7184 /* If the function does not take a variable number of arguments,
7185 the last element in the list will have type `void'. */
7186 if (VOID_TYPE_P (TREE_VALUE (t)))
7187 break;
7188 else
7189 ++i;
7190
7191 return i;
7192 }
7193
7194 /* Nonzero if integer constants T1 and T2
7195 represent the same constant value. */
7196
7197 int
7198 tree_int_cst_equal (const_tree t1, const_tree t2)
7199 {
7200 if (t1 == t2)
7201 return 1;
7202
7203 if (t1 == 0 || t2 == 0)
7204 return 0;
7205
7206 if (TREE_CODE (t1) == INTEGER_CST
7207 && TREE_CODE (t2) == INTEGER_CST
7208 && wi::to_widest (t1) == wi::to_widest (t2))
7209 return 1;
7210
7211 return 0;
7212 }
7213
7214 /* Return true if T is an INTEGER_CST whose numerical value (extended
7215 according to TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. */
7216
7217 bool
7218 tree_fits_shwi_p (const_tree t)
7219 {
7220 return (t != NULL_TREE
7221 && TREE_CODE (t) == INTEGER_CST
7222 && wi::fits_shwi_p (wi::to_widest (t)));
7223 }
7224
7225 /* Return true if T is an INTEGER_CST whose numerical value (extended
7226 according to TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. */
7227
7228 bool
7229 tree_fits_uhwi_p (const_tree t)
7230 {
7231 return (t != NULL_TREE
7232 && TREE_CODE (t) == INTEGER_CST
7233 && wi::fits_uhwi_p (wi::to_widest (t)));
7234 }
7235
7236 /* T is an INTEGER_CST whose numerical value (extended according to
7237 TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. Return that
7238 HOST_WIDE_INT. */
7239
7240 HOST_WIDE_INT
7241 tree_to_shwi (const_tree t)
7242 {
7243 gcc_assert (tree_fits_shwi_p (t));
7244 return TREE_INT_CST_LOW (t);
7245 }
7246
7247 /* T is an INTEGER_CST whose numerical value (extended according to
7248 TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. Return that
7249 HOST_WIDE_INT. */
7250
7251 unsigned HOST_WIDE_INT
7252 tree_to_uhwi (const_tree t)
7253 {
7254 gcc_assert (tree_fits_uhwi_p (t));
7255 return TREE_INT_CST_LOW (t);
7256 }
7257
7258 /* Return the most significant (sign) bit of T. */
7259
7260 int
7261 tree_int_cst_sign_bit (const_tree t)
7262 {
7263 unsigned bitno = TYPE_PRECISION (TREE_TYPE (t)) - 1;
7264
7265 return wi::extract_uhwi (t, bitno, 1);
7266 }
7267
7268 /* Return an indication of the sign of the integer constant T.
7269 The return value is -1 if T < 0, 0 if T == 0, and 1 if T > 0.
7270 Note that -1 will never be returned if T's type is unsigned. */
7271
7272 int
7273 tree_int_cst_sgn (const_tree t)
7274 {
7275 if (wi::eq_p (t, 0))
7276 return 0;
7277 else if (TYPE_UNSIGNED (TREE_TYPE (t)))
7278 return 1;
7279 else if (wi::neg_p (t))
7280 return -1;
7281 else
7282 return 1;
7283 }
7284
7285 /* Return the minimum number of bits needed to represent VALUE in a
7286 signed or unsigned type, UNSIGNEDP says which. */
7287
7288 unsigned int
7289 tree_int_cst_min_precision (tree value, signop sgn)
7290 {
7291 /* If the value is negative, compute its negative minus 1. The latter
7292 adjustment is because the absolute value of the largest negative value
7293 is one larger than the largest positive value. This is equivalent to
7294 a bit-wise negation, so use that operation instead. */
7295
7296 if (tree_int_cst_sgn (value) < 0)
7297 value = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (value), value);
7298
7299 /* Return the number of bits needed, taking into account the fact
7300 that we need one more bit for a signed than unsigned type.
7301 If value is 0 or -1, the minimum precision is 1 no matter
7302 whether unsignedp is true or false. */
7303
7304 if (integer_zerop (value))
7305 return 1;
7306 else
7307 return tree_floor_log2 (value) + 1 + (sgn == SIGNED ? 1 : 0) ;
7308 }
7309
7310 /* Return truthvalue of whether T1 is the same tree structure as T2.
7311 Return 1 if they are the same.
7312 Return 0 if they are understandably different.
7313 Return -1 if either contains tree structure not understood by
7314 this function. */
7315
7316 int
7317 simple_cst_equal (const_tree t1, const_tree t2)
7318 {
7319 enum tree_code code1, code2;
7320 int cmp;
7321 int i;
7322
7323 if (t1 == t2)
7324 return 1;
7325 if (t1 == 0 || t2 == 0)
7326 return 0;
7327
7328 code1 = TREE_CODE (t1);
7329 code2 = TREE_CODE (t2);
7330
7331 if (CONVERT_EXPR_CODE_P (code1) || code1 == NON_LVALUE_EXPR)
7332 {
7333 if (CONVERT_EXPR_CODE_P (code2)
7334 || code2 == NON_LVALUE_EXPR)
7335 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7336 else
7337 return simple_cst_equal (TREE_OPERAND (t1, 0), t2);
7338 }
7339
7340 else if (CONVERT_EXPR_CODE_P (code2)
7341 || code2 == NON_LVALUE_EXPR)
7342 return simple_cst_equal (t1, TREE_OPERAND (t2, 0));
7343
7344 if (code1 != code2)
7345 return 0;
7346
7347 switch (code1)
7348 {
7349 case INTEGER_CST:
7350 return wi::to_widest (t1) == wi::to_widest (t2);
7351
7352 case REAL_CST:
7353 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (t1), TREE_REAL_CST (t2));
7354
7355 case FIXED_CST:
7356 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2));
7357
7358 case STRING_CST:
7359 return (TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
7360 && ! memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
7361 TREE_STRING_LENGTH (t1)));
7362
7363 case CONSTRUCTOR:
7364 {
7365 unsigned HOST_WIDE_INT idx;
7366 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (t1);
7367 vec<constructor_elt, va_gc> *v2 = CONSTRUCTOR_ELTS (t2);
7368
7369 if (vec_safe_length (v1) != vec_safe_length (v2))
7370 return false;
7371
7372 for (idx = 0; idx < vec_safe_length (v1); ++idx)
7373 /* ??? Should we handle also fields here? */
7374 if (!simple_cst_equal ((*v1)[idx].value, (*v2)[idx].value))
7375 return false;
7376 return true;
7377 }
7378
7379 case SAVE_EXPR:
7380 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7381
7382 case CALL_EXPR:
7383 cmp = simple_cst_equal (CALL_EXPR_FN (t1), CALL_EXPR_FN (t2));
7384 if (cmp <= 0)
7385 return cmp;
7386 if (call_expr_nargs (t1) != call_expr_nargs (t2))
7387 return 0;
7388 {
7389 const_tree arg1, arg2;
7390 const_call_expr_arg_iterator iter1, iter2;
7391 for (arg1 = first_const_call_expr_arg (t1, &iter1),
7392 arg2 = first_const_call_expr_arg (t2, &iter2);
7393 arg1 && arg2;
7394 arg1 = next_const_call_expr_arg (&iter1),
7395 arg2 = next_const_call_expr_arg (&iter2))
7396 {
7397 cmp = simple_cst_equal (arg1, arg2);
7398 if (cmp <= 0)
7399 return cmp;
7400 }
7401 return arg1 == arg2;
7402 }
7403
7404 case TARGET_EXPR:
7405 /* Special case: if either target is an unallocated VAR_DECL,
7406 it means that it's going to be unified with whatever the
7407 TARGET_EXPR is really supposed to initialize, so treat it
7408 as being equivalent to anything. */
7409 if ((TREE_CODE (TREE_OPERAND (t1, 0)) == VAR_DECL
7410 && DECL_NAME (TREE_OPERAND (t1, 0)) == NULL_TREE
7411 && !DECL_RTL_SET_P (TREE_OPERAND (t1, 0)))
7412 || (TREE_CODE (TREE_OPERAND (t2, 0)) == VAR_DECL
7413 && DECL_NAME (TREE_OPERAND (t2, 0)) == NULL_TREE
7414 && !DECL_RTL_SET_P (TREE_OPERAND (t2, 0))))
7415 cmp = 1;
7416 else
7417 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7418
7419 if (cmp <= 0)
7420 return cmp;
7421
7422 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
7423
7424 case WITH_CLEANUP_EXPR:
7425 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7426 if (cmp <= 0)
7427 return cmp;
7428
7429 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t1, 1));
7430
7431 case COMPONENT_REF:
7432 if (TREE_OPERAND (t1, 1) == TREE_OPERAND (t2, 1))
7433 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7434
7435 return 0;
7436
7437 case VAR_DECL:
7438 case PARM_DECL:
7439 case CONST_DECL:
7440 case FUNCTION_DECL:
7441 return 0;
7442
7443 default:
7444 break;
7445 }
7446
7447 /* This general rule works for most tree codes. All exceptions should be
7448 handled above. If this is a language-specific tree code, we can't
7449 trust what might be in the operand, so say we don't know
7450 the situation. */
7451 if ((int) code1 >= (int) LAST_AND_UNUSED_TREE_CODE)
7452 return -1;
7453
7454 switch (TREE_CODE_CLASS (code1))
7455 {
7456 case tcc_unary:
7457 case tcc_binary:
7458 case tcc_comparison:
7459 case tcc_expression:
7460 case tcc_reference:
7461 case tcc_statement:
7462 cmp = 1;
7463 for (i = 0; i < TREE_CODE_LENGTH (code1); i++)
7464 {
7465 cmp = simple_cst_equal (TREE_OPERAND (t1, i), TREE_OPERAND (t2, i));
7466 if (cmp <= 0)
7467 return cmp;
7468 }
7469
7470 return cmp;
7471
7472 default:
7473 return -1;
7474 }
7475 }
7476
7477 /* Compare the value of T, an INTEGER_CST, with U, an unsigned integer value.
7478 Return -1, 0, or 1 if the value of T is less than, equal to, or greater
7479 than U, respectively. */
7480
7481 int
7482 compare_tree_int (const_tree t, unsigned HOST_WIDE_INT u)
7483 {
7484 if (tree_int_cst_sgn (t) < 0)
7485 return -1;
7486 else if (!tree_fits_uhwi_p (t))
7487 return 1;
7488 else if (TREE_INT_CST_LOW (t) == u)
7489 return 0;
7490 else if (TREE_INT_CST_LOW (t) < u)
7491 return -1;
7492 else
7493 return 1;
7494 }
7495
7496 /* Return true if SIZE represents a constant size that is in bounds of
7497 what the middle-end and the backend accepts (covering not more than
7498 half of the address-space). */
7499
7500 bool
7501 valid_constant_size_p (const_tree size)
7502 {
7503 if (! tree_fits_uhwi_p (size)
7504 || TREE_OVERFLOW (size)
7505 || tree_int_cst_sign_bit (size) != 0)
7506 return false;
7507 return true;
7508 }
7509
7510 /* Return the precision of the type, or for a complex or vector type the
7511 precision of the type of its elements. */
7512
7513 unsigned int
7514 element_precision (const_tree type)
7515 {
7516 enum tree_code code = TREE_CODE (type);
7517 if (code == COMPLEX_TYPE || code == VECTOR_TYPE)
7518 type = TREE_TYPE (type);
7519
7520 return TYPE_PRECISION (type);
7521 }
7522
7523 /* Return true if CODE represents an associative tree code. Otherwise
7524 return false. */
7525 bool
7526 associative_tree_code (enum tree_code code)
7527 {
7528 switch (code)
7529 {
7530 case BIT_IOR_EXPR:
7531 case BIT_AND_EXPR:
7532 case BIT_XOR_EXPR:
7533 case PLUS_EXPR:
7534 case MULT_EXPR:
7535 case MIN_EXPR:
7536 case MAX_EXPR:
7537 return true;
7538
7539 default:
7540 break;
7541 }
7542 return false;
7543 }
7544
7545 /* Return true if CODE represents a commutative tree code. Otherwise
7546 return false. */
7547 bool
7548 commutative_tree_code (enum tree_code code)
7549 {
7550 switch (code)
7551 {
7552 case PLUS_EXPR:
7553 case MULT_EXPR:
7554 case MULT_HIGHPART_EXPR:
7555 case MIN_EXPR:
7556 case MAX_EXPR:
7557 case BIT_IOR_EXPR:
7558 case BIT_XOR_EXPR:
7559 case BIT_AND_EXPR:
7560 case NE_EXPR:
7561 case EQ_EXPR:
7562 case UNORDERED_EXPR:
7563 case ORDERED_EXPR:
7564 case UNEQ_EXPR:
7565 case LTGT_EXPR:
7566 case TRUTH_AND_EXPR:
7567 case TRUTH_XOR_EXPR:
7568 case TRUTH_OR_EXPR:
7569 case WIDEN_MULT_EXPR:
7570 case VEC_WIDEN_MULT_HI_EXPR:
7571 case VEC_WIDEN_MULT_LO_EXPR:
7572 case VEC_WIDEN_MULT_EVEN_EXPR:
7573 case VEC_WIDEN_MULT_ODD_EXPR:
7574 return true;
7575
7576 default:
7577 break;
7578 }
7579 return false;
7580 }
7581
7582 /* Return true if CODE represents a ternary tree code for which the
7583 first two operands are commutative. Otherwise return false. */
7584 bool
7585 commutative_ternary_tree_code (enum tree_code code)
7586 {
7587 switch (code)
7588 {
7589 case WIDEN_MULT_PLUS_EXPR:
7590 case WIDEN_MULT_MINUS_EXPR:
7591 case DOT_PROD_EXPR:
7592 case FMA_EXPR:
7593 return true;
7594
7595 default:
7596 break;
7597 }
7598 return false;
7599 }
7600
7601 namespace inchash
7602 {
7603
7604 /* Generate a hash value for an expression. This can be used iteratively
7605 by passing a previous result as the HSTATE argument.
7606
7607 This function is intended to produce the same hash for expressions which
7608 would compare equal using operand_equal_p. */
7609 void
7610 add_expr (const_tree t, inchash::hash &hstate)
7611 {
7612 int i;
7613 enum tree_code code;
7614 enum tree_code_class tclass;
7615
7616 if (t == NULL_TREE)
7617 {
7618 hstate.merge_hash (0);
7619 return;
7620 }
7621
7622 code = TREE_CODE (t);
7623
7624 switch (code)
7625 {
7626 /* Alas, constants aren't shared, so we can't rely on pointer
7627 identity. */
7628 case VOID_CST:
7629 hstate.merge_hash (0);
7630 return;
7631 case INTEGER_CST:
7632 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
7633 hstate.add_wide_int (TREE_INT_CST_ELT (t, i));
7634 return;
7635 case REAL_CST:
7636 {
7637 unsigned int val2 = real_hash (TREE_REAL_CST_PTR (t));
7638 hstate.merge_hash (val2);
7639 return;
7640 }
7641 case FIXED_CST:
7642 {
7643 unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t));
7644 hstate.merge_hash (val2);
7645 return;
7646 }
7647 case STRING_CST:
7648 hstate.add ((const void *) TREE_STRING_POINTER (t), TREE_STRING_LENGTH (t));
7649 return;
7650 case COMPLEX_CST:
7651 inchash::add_expr (TREE_REALPART (t), hstate);
7652 inchash::add_expr (TREE_IMAGPART (t), hstate);
7653 return;
7654 case VECTOR_CST:
7655 {
7656 unsigned i;
7657 for (i = 0; i < VECTOR_CST_NELTS (t); ++i)
7658 inchash::add_expr (VECTOR_CST_ELT (t, i), hstate);
7659 return;
7660 }
7661 case SSA_NAME:
7662 /* We can just compare by pointer. */
7663 hstate.add_wide_int (SSA_NAME_VERSION (t));
7664 return;
7665 case PLACEHOLDER_EXPR:
7666 /* The node itself doesn't matter. */
7667 return;
7668 case TREE_LIST:
7669 /* A list of expressions, for a CALL_EXPR or as the elements of a
7670 VECTOR_CST. */
7671 for (; t; t = TREE_CHAIN (t))
7672 inchash::add_expr (TREE_VALUE (t), hstate);
7673 return;
7674 case CONSTRUCTOR:
7675 {
7676 unsigned HOST_WIDE_INT idx;
7677 tree field, value;
7678 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value)
7679 {
7680 inchash::add_expr (field, hstate);
7681 inchash::add_expr (value, hstate);
7682 }
7683 return;
7684 }
7685 case FUNCTION_DECL:
7686 /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
7687 Otherwise nodes that compare equal according to operand_equal_p might
7688 get different hash codes. However, don't do this for machine specific
7689 or front end builtins, since the function code is overloaded in those
7690 cases. */
7691 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
7692 && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t)))
7693 {
7694 t = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
7695 code = TREE_CODE (t);
7696 }
7697 /* FALL THROUGH */
7698 default:
7699 tclass = TREE_CODE_CLASS (code);
7700
7701 if (tclass == tcc_declaration)
7702 {
7703 /* DECL's have a unique ID */
7704 hstate.add_wide_int (DECL_UID (t));
7705 }
7706 else
7707 {
7708 gcc_assert (IS_EXPR_CODE_CLASS (tclass));
7709
7710 hstate.add_object (code);
7711
7712 /* Don't hash the type, that can lead to having nodes which
7713 compare equal according to operand_equal_p, but which
7714 have different hash codes. */
7715 if (CONVERT_EXPR_CODE_P (code)
7716 || code == NON_LVALUE_EXPR)
7717 {
7718 /* Make sure to include signness in the hash computation. */
7719 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
7720 inchash::add_expr (TREE_OPERAND (t, 0), hstate);
7721 }
7722
7723 else if (commutative_tree_code (code))
7724 {
7725 /* It's a commutative expression. We want to hash it the same
7726 however it appears. We do this by first hashing both operands
7727 and then rehashing based on the order of their independent
7728 hashes. */
7729 inchash::hash one, two;
7730 inchash::add_expr (TREE_OPERAND (t, 0), one);
7731 inchash::add_expr (TREE_OPERAND (t, 1), two);
7732 hstate.add_commutative (one, two);
7733 }
7734 else
7735 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
7736 inchash::add_expr (TREE_OPERAND (t, i), hstate);
7737 }
7738 return;
7739 }
7740 }
7741
7742 }
7743
7744 /* Constructors for pointer, array and function types.
7745 (RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are
7746 constructed by language-dependent code, not here.) */
7747
7748 /* Construct, lay out and return the type of pointers to TO_TYPE with
7749 mode MODE. If CAN_ALIAS_ALL is TRUE, indicate this type can
7750 reference all of memory. If such a type has already been
7751 constructed, reuse it. */
7752
7753 tree
7754 build_pointer_type_for_mode (tree to_type, machine_mode mode,
7755 bool can_alias_all)
7756 {
7757 tree t;
7758 bool could_alias = can_alias_all;
7759
7760 if (to_type == error_mark_node)
7761 return error_mark_node;
7762
7763 /* If the pointed-to type has the may_alias attribute set, force
7764 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7765 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7766 can_alias_all = true;
7767
7768 /* In some cases, languages will have things that aren't a POINTER_TYPE
7769 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_POINTER_TO.
7770 In that case, return that type without regard to the rest of our
7771 operands.
7772
7773 ??? This is a kludge, but consistent with the way this function has
7774 always operated and there doesn't seem to be a good way to avoid this
7775 at the moment. */
7776 if (TYPE_POINTER_TO (to_type) != 0
7777 && TREE_CODE (TYPE_POINTER_TO (to_type)) != POINTER_TYPE)
7778 return TYPE_POINTER_TO (to_type);
7779
7780 /* First, if we already have a type for pointers to TO_TYPE and it's
7781 the proper mode, use it. */
7782 for (t = TYPE_POINTER_TO (to_type); t; t = TYPE_NEXT_PTR_TO (t))
7783 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7784 return t;
7785
7786 t = make_node (POINTER_TYPE);
7787
7788 TREE_TYPE (t) = to_type;
7789 SET_TYPE_MODE (t, mode);
7790 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7791 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (to_type);
7792 TYPE_POINTER_TO (to_type) = t;
7793
7794 if (TYPE_STRUCTURAL_EQUALITY_P (to_type))
7795 SET_TYPE_STRUCTURAL_EQUALITY (t);
7796 else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
7797 TYPE_CANONICAL (t)
7798 = build_pointer_type_for_mode (TYPE_CANONICAL (to_type),
7799 mode, false);
7800
7801 /* Lay out the type. This function has many callers that are concerned
7802 with expression-construction, and this simplifies them all. */
7803 layout_type (t);
7804
7805 return t;
7806 }
7807
7808 /* By default build pointers in ptr_mode. */
7809
7810 tree
7811 build_pointer_type (tree to_type)
7812 {
7813 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7814 : TYPE_ADDR_SPACE (to_type);
7815 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
7816 return build_pointer_type_for_mode (to_type, pointer_mode, false);
7817 }
7818
7819 /* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE. */
7820
7821 tree
7822 build_reference_type_for_mode (tree to_type, machine_mode mode,
7823 bool can_alias_all)
7824 {
7825 tree t;
7826 bool could_alias = can_alias_all;
7827
7828 if (to_type == error_mark_node)
7829 return error_mark_node;
7830
7831 /* If the pointed-to type has the may_alias attribute set, force
7832 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7833 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7834 can_alias_all = true;
7835
7836 /* In some cases, languages will have things that aren't a REFERENCE_TYPE
7837 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_REFERENCE_TO.
7838 In that case, return that type without regard to the rest of our
7839 operands.
7840
7841 ??? This is a kludge, but consistent with the way this function has
7842 always operated and there doesn't seem to be a good way to avoid this
7843 at the moment. */
7844 if (TYPE_REFERENCE_TO (to_type) != 0
7845 && TREE_CODE (TYPE_REFERENCE_TO (to_type)) != REFERENCE_TYPE)
7846 return TYPE_REFERENCE_TO (to_type);
7847
7848 /* First, if we already have a type for pointers to TO_TYPE and it's
7849 the proper mode, use it. */
7850 for (t = TYPE_REFERENCE_TO (to_type); t; t = TYPE_NEXT_REF_TO (t))
7851 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7852 return t;
7853
7854 t = make_node (REFERENCE_TYPE);
7855
7856 TREE_TYPE (t) = to_type;
7857 SET_TYPE_MODE (t, mode);
7858 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7859 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (to_type);
7860 TYPE_REFERENCE_TO (to_type) = t;
7861
7862 if (TYPE_STRUCTURAL_EQUALITY_P (to_type))
7863 SET_TYPE_STRUCTURAL_EQUALITY (t);
7864 else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
7865 TYPE_CANONICAL (t)
7866 = build_reference_type_for_mode (TYPE_CANONICAL (to_type),
7867 mode, false);
7868
7869 layout_type (t);
7870
7871 return t;
7872 }
7873
7874
7875 /* Build the node for the type of references-to-TO_TYPE by default
7876 in ptr_mode. */
7877
7878 tree
7879 build_reference_type (tree to_type)
7880 {
7881 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7882 : TYPE_ADDR_SPACE (to_type);
7883 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
7884 return build_reference_type_for_mode (to_type, pointer_mode, false);
7885 }
7886
7887 #define MAX_INT_CACHED_PREC \
7888 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
7889 static GTY(()) tree nonstandard_integer_type_cache[2 * MAX_INT_CACHED_PREC + 2];
7890
7891 /* Builds a signed or unsigned integer type of precision PRECISION.
7892 Used for C bitfields whose precision does not match that of
7893 built-in target types. */
7894 tree
7895 build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision,
7896 int unsignedp)
7897 {
7898 tree itype, ret;
7899
7900 if (unsignedp)
7901 unsignedp = MAX_INT_CACHED_PREC + 1;
7902
7903 if (precision <= MAX_INT_CACHED_PREC)
7904 {
7905 itype = nonstandard_integer_type_cache[precision + unsignedp];
7906 if (itype)
7907 return itype;
7908 }
7909
7910 itype = make_node (INTEGER_TYPE);
7911 TYPE_PRECISION (itype) = precision;
7912
7913 if (unsignedp)
7914 fixup_unsigned_type (itype);
7915 else
7916 fixup_signed_type (itype);
7917
7918 ret = itype;
7919 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (itype)))
7920 ret = type_hash_canon (tree_to_uhwi (TYPE_MAX_VALUE (itype)), itype);
7921 if (precision <= MAX_INT_CACHED_PREC)
7922 nonstandard_integer_type_cache[precision + unsignedp] = ret;
7923
7924 return ret;
7925 }
7926
7927 /* Create a range of some discrete type TYPE (an INTEGER_TYPE, ENUMERAL_TYPE
7928 or BOOLEAN_TYPE) with low bound LOWVAL and high bound HIGHVAL. If SHARED
7929 is true, reuse such a type that has already been constructed. */
7930
7931 static tree
7932 build_range_type_1 (tree type, tree lowval, tree highval, bool shared)
7933 {
7934 tree itype = make_node (INTEGER_TYPE);
7935 inchash::hash hstate;
7936
7937 TREE_TYPE (itype) = type;
7938
7939 TYPE_MIN_VALUE (itype) = fold_convert (type, lowval);
7940 TYPE_MAX_VALUE (itype) = highval ? fold_convert (type, highval) : NULL;
7941
7942 TYPE_PRECISION (itype) = TYPE_PRECISION (type);
7943 SET_TYPE_MODE (itype, TYPE_MODE (type));
7944 TYPE_SIZE (itype) = TYPE_SIZE (type);
7945 TYPE_SIZE_UNIT (itype) = TYPE_SIZE_UNIT (type);
7946 TYPE_ALIGN (itype) = TYPE_ALIGN (type);
7947 TYPE_USER_ALIGN (itype) = TYPE_USER_ALIGN (type);
7948
7949 if (!shared)
7950 return itype;
7951
7952 if ((TYPE_MIN_VALUE (itype)
7953 && TREE_CODE (TYPE_MIN_VALUE (itype)) != INTEGER_CST)
7954 || (TYPE_MAX_VALUE (itype)
7955 && TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST))
7956 {
7957 /* Since we cannot reliably merge this type, we need to compare it using
7958 structural equality checks. */
7959 SET_TYPE_STRUCTURAL_EQUALITY (itype);
7960 return itype;
7961 }
7962
7963 inchash::add_expr (TYPE_MIN_VALUE (itype), hstate);
7964 inchash::add_expr (TYPE_MAX_VALUE (itype), hstate);
7965 hstate.merge_hash (TYPE_HASH (type));
7966 itype = type_hash_canon (hstate.end (), itype);
7967
7968 return itype;
7969 }
7970
7971 /* Wrapper around build_range_type_1 with SHARED set to true. */
7972
7973 tree
7974 build_range_type (tree type, tree lowval, tree highval)
7975 {
7976 return build_range_type_1 (type, lowval, highval, true);
7977 }
7978
7979 /* Wrapper around build_range_type_1 with SHARED set to false. */
7980
7981 tree
7982 build_nonshared_range_type (tree type, tree lowval, tree highval)
7983 {
7984 return build_range_type_1 (type, lowval, highval, false);
7985 }
7986
7987 /* Create a type of integers to be the TYPE_DOMAIN of an ARRAY_TYPE.
7988 MAXVAL should be the maximum value in the domain
7989 (one less than the length of the array).
7990
7991 The maximum value that MAXVAL can have is INT_MAX for a HOST_WIDE_INT.
7992 We don't enforce this limit, that is up to caller (e.g. language front end).
7993 The limit exists because the result is a signed type and we don't handle
7994 sizes that use more than one HOST_WIDE_INT. */
7995
7996 tree
7997 build_index_type (tree maxval)
7998 {
7999 return build_range_type (sizetype, size_zero_node, maxval);
8000 }
8001
8002 /* Return true if the debug information for TYPE, a subtype, should be emitted
8003 as a subrange type. If so, set LOWVAL to the low bound and HIGHVAL to the
8004 high bound, respectively. Sometimes doing so unnecessarily obfuscates the
8005 debug info and doesn't reflect the source code. */
8006
8007 bool
8008 subrange_type_for_debug_p (const_tree type, tree *lowval, tree *highval)
8009 {
8010 tree base_type = TREE_TYPE (type), low, high;
8011
8012 /* Subrange types have a base type which is an integral type. */
8013 if (!INTEGRAL_TYPE_P (base_type))
8014 return false;
8015
8016 /* Get the real bounds of the subtype. */
8017 if (lang_hooks.types.get_subrange_bounds)
8018 lang_hooks.types.get_subrange_bounds (type, &low, &high);
8019 else
8020 {
8021 low = TYPE_MIN_VALUE (type);
8022 high = TYPE_MAX_VALUE (type);
8023 }
8024
8025 /* If the type and its base type have the same representation and the same
8026 name, then the type is not a subrange but a copy of the base type. */
8027 if ((TREE_CODE (base_type) == INTEGER_TYPE
8028 || TREE_CODE (base_type) == BOOLEAN_TYPE)
8029 && int_size_in_bytes (type) == int_size_in_bytes (base_type)
8030 && tree_int_cst_equal (low, TYPE_MIN_VALUE (base_type))
8031 && tree_int_cst_equal (high, TYPE_MAX_VALUE (base_type))
8032 && TYPE_IDENTIFIER (type) == TYPE_IDENTIFIER (base_type))
8033 return false;
8034
8035 if (lowval)
8036 *lowval = low;
8037 if (highval)
8038 *highval = high;
8039 return true;
8040 }
8041
8042 /* Construct, lay out and return the type of arrays of elements with ELT_TYPE
8043 and number of elements specified by the range of values of INDEX_TYPE.
8044 If SHARED is true, reuse such a type that has already been constructed. */
8045
8046 static tree
8047 build_array_type_1 (tree elt_type, tree index_type, bool shared)
8048 {
8049 tree t;
8050
8051 if (TREE_CODE (elt_type) == FUNCTION_TYPE)
8052 {
8053 error ("arrays of functions are not meaningful");
8054 elt_type = integer_type_node;
8055 }
8056
8057 t = make_node (ARRAY_TYPE);
8058 TREE_TYPE (t) = elt_type;
8059 TYPE_DOMAIN (t) = index_type;
8060 TYPE_ADDR_SPACE (t) = TYPE_ADDR_SPACE (elt_type);
8061 layout_type (t);
8062
8063 /* If the element type is incomplete at this point we get marked for
8064 structural equality. Do not record these types in the canonical
8065 type hashtable. */
8066 if (TYPE_STRUCTURAL_EQUALITY_P (t))
8067 return t;
8068
8069 if (shared)
8070 {
8071 inchash::hash hstate;
8072 hstate.add_object (TYPE_HASH (elt_type));
8073 if (index_type)
8074 hstate.add_object (TYPE_HASH (index_type));
8075 t = type_hash_canon (hstate.end (), t);
8076 }
8077
8078 if (TYPE_CANONICAL (t) == t)
8079 {
8080 if (TYPE_STRUCTURAL_EQUALITY_P (elt_type)
8081 || (index_type && TYPE_STRUCTURAL_EQUALITY_P (index_type)))
8082 SET_TYPE_STRUCTURAL_EQUALITY (t);
8083 else if (TYPE_CANONICAL (elt_type) != elt_type
8084 || (index_type && TYPE_CANONICAL (index_type) != index_type))
8085 TYPE_CANONICAL (t)
8086 = build_array_type_1 (TYPE_CANONICAL (elt_type),
8087 index_type
8088 ? TYPE_CANONICAL (index_type) : NULL_TREE,
8089 shared);
8090 }
8091
8092 return t;
8093 }
8094
8095 /* Wrapper around build_array_type_1 with SHARED set to true. */
8096
8097 tree
8098 build_array_type (tree elt_type, tree index_type)
8099 {
8100 return build_array_type_1 (elt_type, index_type, true);
8101 }
8102
8103 /* Wrapper around build_array_type_1 with SHARED set to false. */
8104
8105 tree
8106 build_nonshared_array_type (tree elt_type, tree index_type)
8107 {
8108 return build_array_type_1 (elt_type, index_type, false);
8109 }
8110
8111 /* Return a representation of ELT_TYPE[NELTS], using indices of type
8112 sizetype. */
8113
8114 tree
8115 build_array_type_nelts (tree elt_type, unsigned HOST_WIDE_INT nelts)
8116 {
8117 return build_array_type (elt_type, build_index_type (size_int (nelts - 1)));
8118 }
8119
8120 /* Recursively examines the array elements of TYPE, until a non-array
8121 element type is found. */
8122
8123 tree
8124 strip_array_types (tree type)
8125 {
8126 while (TREE_CODE (type) == ARRAY_TYPE)
8127 type = TREE_TYPE (type);
8128
8129 return type;
8130 }
8131
8132 /* Computes the canonical argument types from the argument type list
8133 ARGTYPES.
8134
8135 Upon return, *ANY_STRUCTURAL_P will be true iff either it was true
8136 on entry to this function, or if any of the ARGTYPES are
8137 structural.
8138
8139 Upon return, *ANY_NONCANONICAL_P will be true iff either it was
8140 true on entry to this function, or if any of the ARGTYPES are
8141 non-canonical.
8142
8143 Returns a canonical argument list, which may be ARGTYPES when the
8144 canonical argument list is unneeded (i.e., *ANY_STRUCTURAL_P is
8145 true) or would not differ from ARGTYPES. */
8146
8147 static tree
8148 maybe_canonicalize_argtypes (tree argtypes,
8149 bool *any_structural_p,
8150 bool *any_noncanonical_p)
8151 {
8152 tree arg;
8153 bool any_noncanonical_argtypes_p = false;
8154
8155 for (arg = argtypes; arg && !(*any_structural_p); arg = TREE_CHAIN (arg))
8156 {
8157 if (!TREE_VALUE (arg) || TREE_VALUE (arg) == error_mark_node)
8158 /* Fail gracefully by stating that the type is structural. */
8159 *any_structural_p = true;
8160 else if (TYPE_STRUCTURAL_EQUALITY_P (TREE_VALUE (arg)))
8161 *any_structural_p = true;
8162 else if (TYPE_CANONICAL (TREE_VALUE (arg)) != TREE_VALUE (arg)
8163 || TREE_PURPOSE (arg))
8164 /* If the argument has a default argument, we consider it
8165 non-canonical even though the type itself is canonical.
8166 That way, different variants of function and method types
8167 with default arguments will all point to the variant with
8168 no defaults as their canonical type. */
8169 any_noncanonical_argtypes_p = true;
8170 }
8171
8172 if (*any_structural_p)
8173 return argtypes;
8174
8175 if (any_noncanonical_argtypes_p)
8176 {
8177 /* Build the canonical list of argument types. */
8178 tree canon_argtypes = NULL_TREE;
8179 bool is_void = false;
8180
8181 for (arg = argtypes; arg; arg = TREE_CHAIN (arg))
8182 {
8183 if (arg == void_list_node)
8184 is_void = true;
8185 else
8186 canon_argtypes = tree_cons (NULL_TREE,
8187 TYPE_CANONICAL (TREE_VALUE (arg)),
8188 canon_argtypes);
8189 }
8190
8191 canon_argtypes = nreverse (canon_argtypes);
8192 if (is_void)
8193 canon_argtypes = chainon (canon_argtypes, void_list_node);
8194
8195 /* There is a non-canonical type. */
8196 *any_noncanonical_p = true;
8197 return canon_argtypes;
8198 }
8199
8200 /* The canonical argument types are the same as ARGTYPES. */
8201 return argtypes;
8202 }
8203
8204 /* Construct, lay out and return
8205 the type of functions returning type VALUE_TYPE
8206 given arguments of types ARG_TYPES.
8207 ARG_TYPES is a chain of TREE_LIST nodes whose TREE_VALUEs
8208 are data type nodes for the arguments of the function.
8209 If such a type has already been constructed, reuse it. */
8210
8211 tree
8212 build_function_type (tree value_type, tree arg_types)
8213 {
8214 tree t;
8215 inchash::hash hstate;
8216 bool any_structural_p, any_noncanonical_p;
8217 tree canon_argtypes;
8218
8219 if (TREE_CODE (value_type) == FUNCTION_TYPE)
8220 {
8221 error ("function return type cannot be function");
8222 value_type = integer_type_node;
8223 }
8224
8225 /* Make a node of the sort we want. */
8226 t = make_node (FUNCTION_TYPE);
8227 TREE_TYPE (t) = value_type;
8228 TYPE_ARG_TYPES (t) = arg_types;
8229
8230 /* If we already have such a type, use the old one. */
8231 hstate.add_object (TYPE_HASH (value_type));
8232 type_hash_list (arg_types, hstate);
8233 t = type_hash_canon (hstate.end (), t);
8234
8235 /* Set up the canonical type. */
8236 any_structural_p = TYPE_STRUCTURAL_EQUALITY_P (value_type);
8237 any_noncanonical_p = TYPE_CANONICAL (value_type) != value_type;
8238 canon_argtypes = maybe_canonicalize_argtypes (arg_types,
8239 &any_structural_p,
8240 &any_noncanonical_p);
8241 if (any_structural_p)
8242 SET_TYPE_STRUCTURAL_EQUALITY (t);
8243 else if (any_noncanonical_p)
8244 TYPE_CANONICAL (t) = build_function_type (TYPE_CANONICAL (value_type),
8245 canon_argtypes);
8246
8247 if (!COMPLETE_TYPE_P (t))
8248 layout_type (t);
8249 return t;
8250 }
8251
8252 /* Build a function type. The RETURN_TYPE is the type returned by the
8253 function. If VAARGS is set, no void_type_node is appended to the
8254 the list. ARGP must be always be terminated be a NULL_TREE. */
8255
8256 static tree
8257 build_function_type_list_1 (bool vaargs, tree return_type, va_list argp)
8258 {
8259 tree t, args, last;
8260
8261 t = va_arg (argp, tree);
8262 for (args = NULL_TREE; t != NULL_TREE; t = va_arg (argp, tree))
8263 args = tree_cons (NULL_TREE, t, args);
8264
8265 if (vaargs)
8266 {
8267 last = args;
8268 if (args != NULL_TREE)
8269 args = nreverse (args);
8270 gcc_assert (last != void_list_node);
8271 }
8272 else if (args == NULL_TREE)
8273 args = void_list_node;
8274 else
8275 {
8276 last = args;
8277 args = nreverse (args);
8278 TREE_CHAIN (last) = void_list_node;
8279 }
8280 args = build_function_type (return_type, args);
8281
8282 return args;
8283 }
8284
8285 /* Build a function type. The RETURN_TYPE is the type returned by the
8286 function. If additional arguments are provided, they are
8287 additional argument types. The list of argument types must always
8288 be terminated by NULL_TREE. */
8289
8290 tree
8291 build_function_type_list (tree return_type, ...)
8292 {
8293 tree args;
8294 va_list p;
8295
8296 va_start (p, return_type);
8297 args = build_function_type_list_1 (false, return_type, p);
8298 va_end (p);
8299 return args;
8300 }
8301
8302 /* Build a variable argument function type. The RETURN_TYPE is the
8303 type returned by the function. If additional arguments are provided,
8304 they are additional argument types. The list of argument types must
8305 always be terminated by NULL_TREE. */
8306
8307 tree
8308 build_varargs_function_type_list (tree return_type, ...)
8309 {
8310 tree args;
8311 va_list p;
8312
8313 va_start (p, return_type);
8314 args = build_function_type_list_1 (true, return_type, p);
8315 va_end (p);
8316
8317 return args;
8318 }
8319
8320 /* Build a function type. RETURN_TYPE is the type returned by the
8321 function; VAARGS indicates whether the function takes varargs. The
8322 function takes N named arguments, the types of which are provided in
8323 ARG_TYPES. */
8324
8325 static tree
8326 build_function_type_array_1 (bool vaargs, tree return_type, int n,
8327 tree *arg_types)
8328 {
8329 int i;
8330 tree t = vaargs ? NULL_TREE : void_list_node;
8331
8332 for (i = n - 1; i >= 0; i--)
8333 t = tree_cons (NULL_TREE, arg_types[i], t);
8334
8335 return build_function_type (return_type, t);
8336 }
8337
8338 /* Build a function type. RETURN_TYPE is the type returned by the
8339 function. The function takes N named arguments, the types of which
8340 are provided in ARG_TYPES. */
8341
8342 tree
8343 build_function_type_array (tree return_type, int n, tree *arg_types)
8344 {
8345 return build_function_type_array_1 (false, return_type, n, arg_types);
8346 }
8347
8348 /* Build a variable argument function type. RETURN_TYPE is the type
8349 returned by the function. The function takes N named arguments, the
8350 types of which are provided in ARG_TYPES. */
8351
8352 tree
8353 build_varargs_function_type_array (tree return_type, int n, tree *arg_types)
8354 {
8355 return build_function_type_array_1 (true, return_type, n, arg_types);
8356 }
8357
8358 /* Build a METHOD_TYPE for a member of BASETYPE. The RETTYPE (a TYPE)
8359 and ARGTYPES (a TREE_LIST) are the return type and arguments types
8360 for the method. An implicit additional parameter (of type
8361 pointer-to-BASETYPE) is added to the ARGTYPES. */
8362
8363 tree
8364 build_method_type_directly (tree basetype,
8365 tree rettype,
8366 tree argtypes)
8367 {
8368 tree t;
8369 tree ptype;
8370 inchash::hash hstate;
8371 bool any_structural_p, any_noncanonical_p;
8372 tree canon_argtypes;
8373
8374 /* Make a node of the sort we want. */
8375 t = make_node (METHOD_TYPE);
8376
8377 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8378 TREE_TYPE (t) = rettype;
8379 ptype = build_pointer_type (basetype);
8380
8381 /* The actual arglist for this function includes a "hidden" argument
8382 which is "this". Put it into the list of argument types. */
8383 argtypes = tree_cons (NULL_TREE, ptype, argtypes);
8384 TYPE_ARG_TYPES (t) = argtypes;
8385
8386 /* If we already have such a type, use the old one. */
8387 hstate.add_object (TYPE_HASH (basetype));
8388 hstate.add_object (TYPE_HASH (rettype));
8389 type_hash_list (argtypes, hstate);
8390 t = type_hash_canon (hstate.end (), t);
8391
8392 /* Set up the canonical type. */
8393 any_structural_p
8394 = (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8395 || TYPE_STRUCTURAL_EQUALITY_P (rettype));
8396 any_noncanonical_p
8397 = (TYPE_CANONICAL (basetype) != basetype
8398 || TYPE_CANONICAL (rettype) != rettype);
8399 canon_argtypes = maybe_canonicalize_argtypes (TREE_CHAIN (argtypes),
8400 &any_structural_p,
8401 &any_noncanonical_p);
8402 if (any_structural_p)
8403 SET_TYPE_STRUCTURAL_EQUALITY (t);
8404 else if (any_noncanonical_p)
8405 TYPE_CANONICAL (t)
8406 = build_method_type_directly (TYPE_CANONICAL (basetype),
8407 TYPE_CANONICAL (rettype),
8408 canon_argtypes);
8409 if (!COMPLETE_TYPE_P (t))
8410 layout_type (t);
8411
8412 return t;
8413 }
8414
8415 /* Construct, lay out and return the type of methods belonging to class
8416 BASETYPE and whose arguments and values are described by TYPE.
8417 If that type exists already, reuse it.
8418 TYPE must be a FUNCTION_TYPE node. */
8419
8420 tree
8421 build_method_type (tree basetype, tree type)
8422 {
8423 gcc_assert (TREE_CODE (type) == FUNCTION_TYPE);
8424
8425 return build_method_type_directly (basetype,
8426 TREE_TYPE (type),
8427 TYPE_ARG_TYPES (type));
8428 }
8429
8430 /* Construct, lay out and return the type of offsets to a value
8431 of type TYPE, within an object of type BASETYPE.
8432 If a suitable offset type exists already, reuse it. */
8433
8434 tree
8435 build_offset_type (tree basetype, tree type)
8436 {
8437 tree t;
8438 inchash::hash hstate;
8439
8440 /* Make a node of the sort we want. */
8441 t = make_node (OFFSET_TYPE);
8442
8443 TYPE_OFFSET_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8444 TREE_TYPE (t) = type;
8445
8446 /* If we already have such a type, use the old one. */
8447 hstate.add_object (TYPE_HASH (basetype));
8448 hstate.add_object (TYPE_HASH (type));
8449 t = type_hash_canon (hstate.end (), t);
8450
8451 if (!COMPLETE_TYPE_P (t))
8452 layout_type (t);
8453
8454 if (TYPE_CANONICAL (t) == t)
8455 {
8456 if (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8457 || TYPE_STRUCTURAL_EQUALITY_P (type))
8458 SET_TYPE_STRUCTURAL_EQUALITY (t);
8459 else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)) != basetype
8460 || TYPE_CANONICAL (type) != type)
8461 TYPE_CANONICAL (t)
8462 = build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)),
8463 TYPE_CANONICAL (type));
8464 }
8465
8466 return t;
8467 }
8468
8469 /* Create a complex type whose components are COMPONENT_TYPE. */
8470
8471 tree
8472 build_complex_type (tree component_type)
8473 {
8474 tree t;
8475 inchash::hash hstate;
8476
8477 gcc_assert (INTEGRAL_TYPE_P (component_type)
8478 || SCALAR_FLOAT_TYPE_P (component_type)
8479 || FIXED_POINT_TYPE_P (component_type));
8480
8481 /* Make a node of the sort we want. */
8482 t = make_node (COMPLEX_TYPE);
8483
8484 TREE_TYPE (t) = TYPE_MAIN_VARIANT (component_type);
8485
8486 /* If we already have such a type, use the old one. */
8487 hstate.add_object (TYPE_HASH (component_type));
8488 t = type_hash_canon (hstate.end (), t);
8489
8490 if (!COMPLETE_TYPE_P (t))
8491 layout_type (t);
8492
8493 if (TYPE_CANONICAL (t) == t)
8494 {
8495 if (TYPE_STRUCTURAL_EQUALITY_P (component_type))
8496 SET_TYPE_STRUCTURAL_EQUALITY (t);
8497 else if (TYPE_CANONICAL (component_type) != component_type)
8498 TYPE_CANONICAL (t)
8499 = build_complex_type (TYPE_CANONICAL (component_type));
8500 }
8501
8502 /* We need to create a name, since complex is a fundamental type. */
8503 if (! TYPE_NAME (t))
8504 {
8505 const char *name;
8506 if (component_type == char_type_node)
8507 name = "complex char";
8508 else if (component_type == signed_char_type_node)
8509 name = "complex signed char";
8510 else if (component_type == unsigned_char_type_node)
8511 name = "complex unsigned char";
8512 else if (component_type == short_integer_type_node)
8513 name = "complex short int";
8514 else if (component_type == short_unsigned_type_node)
8515 name = "complex short unsigned int";
8516 else if (component_type == integer_type_node)
8517 name = "complex int";
8518 else if (component_type == unsigned_type_node)
8519 name = "complex unsigned int";
8520 else if (component_type == long_integer_type_node)
8521 name = "complex long int";
8522 else if (component_type == long_unsigned_type_node)
8523 name = "complex long unsigned int";
8524 else if (component_type == long_long_integer_type_node)
8525 name = "complex long long int";
8526 else if (component_type == long_long_unsigned_type_node)
8527 name = "complex long long unsigned int";
8528 else
8529 name = 0;
8530
8531 if (name != 0)
8532 TYPE_NAME (t) = build_decl (UNKNOWN_LOCATION, TYPE_DECL,
8533 get_identifier (name), t);
8534 }
8535
8536 return build_qualified_type (t, TYPE_QUALS (component_type));
8537 }
8538
8539 /* If TYPE is a real or complex floating-point type and the target
8540 does not directly support arithmetic on TYPE then return the wider
8541 type to be used for arithmetic on TYPE. Otherwise, return
8542 NULL_TREE. */
8543
8544 tree
8545 excess_precision_type (tree type)
8546 {
8547 if (flag_excess_precision != EXCESS_PRECISION_FAST)
8548 {
8549 int flt_eval_method = TARGET_FLT_EVAL_METHOD;
8550 switch (TREE_CODE (type))
8551 {
8552 case REAL_TYPE:
8553 switch (flt_eval_method)
8554 {
8555 case 1:
8556 if (TYPE_MODE (type) == TYPE_MODE (float_type_node))
8557 return double_type_node;
8558 break;
8559 case 2:
8560 if (TYPE_MODE (type) == TYPE_MODE (float_type_node)
8561 || TYPE_MODE (type) == TYPE_MODE (double_type_node))
8562 return long_double_type_node;
8563 break;
8564 default:
8565 gcc_unreachable ();
8566 }
8567 break;
8568 case COMPLEX_TYPE:
8569 if (TREE_CODE (TREE_TYPE (type)) != REAL_TYPE)
8570 return NULL_TREE;
8571 switch (flt_eval_method)
8572 {
8573 case 1:
8574 if (TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (float_type_node))
8575 return complex_double_type_node;
8576 break;
8577 case 2:
8578 if (TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (float_type_node)
8579 || (TYPE_MODE (TREE_TYPE (type))
8580 == TYPE_MODE (double_type_node)))
8581 return complex_long_double_type_node;
8582 break;
8583 default:
8584 gcc_unreachable ();
8585 }
8586 break;
8587 default:
8588 break;
8589 }
8590 }
8591 return NULL_TREE;
8592 }
8593 \f
8594 /* Return OP, stripped of any conversions to wider types as much as is safe.
8595 Converting the value back to OP's type makes a value equivalent to OP.
8596
8597 If FOR_TYPE is nonzero, we return a value which, if converted to
8598 type FOR_TYPE, would be equivalent to converting OP to type FOR_TYPE.
8599
8600 OP must have integer, real or enumeral type. Pointers are not allowed!
8601
8602 There are some cases where the obvious value we could return
8603 would regenerate to OP if converted to OP's type,
8604 but would not extend like OP to wider types.
8605 If FOR_TYPE indicates such extension is contemplated, we eschew such values.
8606 For example, if OP is (unsigned short)(signed char)-1,
8607 we avoid returning (signed char)-1 if FOR_TYPE is int,
8608 even though extending that to an unsigned short would regenerate OP,
8609 since the result of extending (signed char)-1 to (int)
8610 is different from (int) OP. */
8611
8612 tree
8613 get_unwidened (tree op, tree for_type)
8614 {
8615 /* Set UNS initially if converting OP to FOR_TYPE is a zero-extension. */
8616 tree type = TREE_TYPE (op);
8617 unsigned final_prec
8618 = TYPE_PRECISION (for_type != 0 ? for_type : type);
8619 int uns
8620 = (for_type != 0 && for_type != type
8621 && final_prec > TYPE_PRECISION (type)
8622 && TYPE_UNSIGNED (type));
8623 tree win = op;
8624
8625 while (CONVERT_EXPR_P (op))
8626 {
8627 int bitschange;
8628
8629 /* TYPE_PRECISION on vector types has different meaning
8630 (TYPE_VECTOR_SUBPARTS) and casts from vectors are view conversions,
8631 so avoid them here. */
8632 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op, 0))) == VECTOR_TYPE)
8633 break;
8634
8635 bitschange = TYPE_PRECISION (TREE_TYPE (op))
8636 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0)));
8637
8638 /* Truncations are many-one so cannot be removed.
8639 Unless we are later going to truncate down even farther. */
8640 if (bitschange < 0
8641 && final_prec > TYPE_PRECISION (TREE_TYPE (op)))
8642 break;
8643
8644 /* See what's inside this conversion. If we decide to strip it,
8645 we will set WIN. */
8646 op = TREE_OPERAND (op, 0);
8647
8648 /* If we have not stripped any zero-extensions (uns is 0),
8649 we can strip any kind of extension.
8650 If we have previously stripped a zero-extension,
8651 only zero-extensions can safely be stripped.
8652 Any extension can be stripped if the bits it would produce
8653 are all going to be discarded later by truncating to FOR_TYPE. */
8654
8655 if (bitschange > 0)
8656 {
8657 if (! uns || final_prec <= TYPE_PRECISION (TREE_TYPE (op)))
8658 win = op;
8659 /* TYPE_UNSIGNED says whether this is a zero-extension.
8660 Let's avoid computing it if it does not affect WIN
8661 and if UNS will not be needed again. */
8662 if ((uns
8663 || CONVERT_EXPR_P (op))
8664 && TYPE_UNSIGNED (TREE_TYPE (op)))
8665 {
8666 uns = 1;
8667 win = op;
8668 }
8669 }
8670 }
8671
8672 /* If we finally reach a constant see if it fits in for_type and
8673 in that case convert it. */
8674 if (for_type
8675 && TREE_CODE (win) == INTEGER_CST
8676 && TREE_TYPE (win) != for_type
8677 && int_fits_type_p (win, for_type))
8678 win = fold_convert (for_type, win);
8679
8680 return win;
8681 }
8682 \f
8683 /* Return OP or a simpler expression for a narrower value
8684 which can be sign-extended or zero-extended to give back OP.
8685 Store in *UNSIGNEDP_PTR either 1 if the value should be zero-extended
8686 or 0 if the value should be sign-extended. */
8687
8688 tree
8689 get_narrower (tree op, int *unsignedp_ptr)
8690 {
8691 int uns = 0;
8692 int first = 1;
8693 tree win = op;
8694 bool integral_p = INTEGRAL_TYPE_P (TREE_TYPE (op));
8695
8696 while (TREE_CODE (op) == NOP_EXPR)
8697 {
8698 int bitschange
8699 = (TYPE_PRECISION (TREE_TYPE (op))
8700 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0))));
8701
8702 /* Truncations are many-one so cannot be removed. */
8703 if (bitschange < 0)
8704 break;
8705
8706 /* See what's inside this conversion. If we decide to strip it,
8707 we will set WIN. */
8708
8709 if (bitschange > 0)
8710 {
8711 op = TREE_OPERAND (op, 0);
8712 /* An extension: the outermost one can be stripped,
8713 but remember whether it is zero or sign extension. */
8714 if (first)
8715 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8716 /* Otherwise, if a sign extension has been stripped,
8717 only sign extensions can now be stripped;
8718 if a zero extension has been stripped, only zero-extensions. */
8719 else if (uns != TYPE_UNSIGNED (TREE_TYPE (op)))
8720 break;
8721 first = 0;
8722 }
8723 else /* bitschange == 0 */
8724 {
8725 /* A change in nominal type can always be stripped, but we must
8726 preserve the unsignedness. */
8727 if (first)
8728 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8729 first = 0;
8730 op = TREE_OPERAND (op, 0);
8731 /* Keep trying to narrow, but don't assign op to win if it
8732 would turn an integral type into something else. */
8733 if (INTEGRAL_TYPE_P (TREE_TYPE (op)) != integral_p)
8734 continue;
8735 }
8736
8737 win = op;
8738 }
8739
8740 if (TREE_CODE (op) == COMPONENT_REF
8741 /* Since type_for_size always gives an integer type. */
8742 && TREE_CODE (TREE_TYPE (op)) != REAL_TYPE
8743 && TREE_CODE (TREE_TYPE (op)) != FIXED_POINT_TYPE
8744 /* Ensure field is laid out already. */
8745 && DECL_SIZE (TREE_OPERAND (op, 1)) != 0
8746 && tree_fits_uhwi_p (DECL_SIZE (TREE_OPERAND (op, 1))))
8747 {
8748 unsigned HOST_WIDE_INT innerprec
8749 = tree_to_uhwi (DECL_SIZE (TREE_OPERAND (op, 1)));
8750 int unsignedp = (DECL_UNSIGNED (TREE_OPERAND (op, 1))
8751 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op, 1))));
8752 tree type = lang_hooks.types.type_for_size (innerprec, unsignedp);
8753
8754 /* We can get this structure field in a narrower type that fits it,
8755 but the resulting extension to its nominal type (a fullword type)
8756 must satisfy the same conditions as for other extensions.
8757
8758 Do this only for fields that are aligned (not bit-fields),
8759 because when bit-field insns will be used there is no
8760 advantage in doing this. */
8761
8762 if (innerprec < TYPE_PRECISION (TREE_TYPE (op))
8763 && ! DECL_BIT_FIELD (TREE_OPERAND (op, 1))
8764 && (first || uns == DECL_UNSIGNED (TREE_OPERAND (op, 1)))
8765 && type != 0)
8766 {
8767 if (first)
8768 uns = DECL_UNSIGNED (TREE_OPERAND (op, 1));
8769 win = fold_convert (type, op);
8770 }
8771 }
8772
8773 *unsignedp_ptr = uns;
8774 return win;
8775 }
8776 \f
8777 /* Returns true if integer constant C has a value that is permissible
8778 for type TYPE (an INTEGER_TYPE). */
8779
8780 bool
8781 int_fits_type_p (const_tree c, const_tree type)
8782 {
8783 tree type_low_bound, type_high_bound;
8784 bool ok_for_low_bound, ok_for_high_bound;
8785 signop sgn_c = TYPE_SIGN (TREE_TYPE (c));
8786
8787 retry:
8788 type_low_bound = TYPE_MIN_VALUE (type);
8789 type_high_bound = TYPE_MAX_VALUE (type);
8790
8791 /* If at least one bound of the type is a constant integer, we can check
8792 ourselves and maybe make a decision. If no such decision is possible, but
8793 this type is a subtype, try checking against that. Otherwise, use
8794 fits_to_tree_p, which checks against the precision.
8795
8796 Compute the status for each possibly constant bound, and return if we see
8797 one does not match. Use ok_for_xxx_bound for this purpose, assigning -1
8798 for "unknown if constant fits", 0 for "constant known *not* to fit" and 1
8799 for "constant known to fit". */
8800
8801 /* Check if c >= type_low_bound. */
8802 if (type_low_bound && TREE_CODE (type_low_bound) == INTEGER_CST)
8803 {
8804 if (tree_int_cst_lt (c, type_low_bound))
8805 return false;
8806 ok_for_low_bound = true;
8807 }
8808 else
8809 ok_for_low_bound = false;
8810
8811 /* Check if c <= type_high_bound. */
8812 if (type_high_bound && TREE_CODE (type_high_bound) == INTEGER_CST)
8813 {
8814 if (tree_int_cst_lt (type_high_bound, c))
8815 return false;
8816 ok_for_high_bound = true;
8817 }
8818 else
8819 ok_for_high_bound = false;
8820
8821 /* If the constant fits both bounds, the result is known. */
8822 if (ok_for_low_bound && ok_for_high_bound)
8823 return true;
8824
8825 /* Perform some generic filtering which may allow making a decision
8826 even if the bounds are not constant. First, negative integers
8827 never fit in unsigned types, */
8828 if (TYPE_UNSIGNED (type) && sgn_c == SIGNED && wi::neg_p (c))
8829 return false;
8830
8831 /* Second, narrower types always fit in wider ones. */
8832 if (TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (c)))
8833 return true;
8834
8835 /* Third, unsigned integers with top bit set never fit signed types. */
8836 if (!TYPE_UNSIGNED (type) && sgn_c == UNSIGNED)
8837 {
8838 int prec = GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (c))) - 1;
8839 if (prec < TYPE_PRECISION (TREE_TYPE (c)))
8840 {
8841 /* When a tree_cst is converted to a wide-int, the precision
8842 is taken from the type. However, if the precision of the
8843 mode underneath the type is smaller than that, it is
8844 possible that the value will not fit. The test below
8845 fails if any bit is set between the sign bit of the
8846 underlying mode and the top bit of the type. */
8847 if (wi::ne_p (wi::zext (c, prec - 1), c))
8848 return false;
8849 }
8850 else if (wi::neg_p (c))
8851 return false;
8852 }
8853
8854 /* If we haven't been able to decide at this point, there nothing more we
8855 can check ourselves here. Look at the base type if we have one and it
8856 has the same precision. */
8857 if (TREE_CODE (type) == INTEGER_TYPE
8858 && TREE_TYPE (type) != 0
8859 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (type)))
8860 {
8861 type = TREE_TYPE (type);
8862 goto retry;
8863 }
8864
8865 /* Or to fits_to_tree_p, if nothing else. */
8866 return wi::fits_to_tree_p (c, type);
8867 }
8868
8869 /* Stores bounds of an integer TYPE in MIN and MAX. If TYPE has non-constant
8870 bounds or is a POINTER_TYPE, the maximum and/or minimum values that can be
8871 represented (assuming two's-complement arithmetic) within the bit
8872 precision of the type are returned instead. */
8873
8874 void
8875 get_type_static_bounds (const_tree type, mpz_t min, mpz_t max)
8876 {
8877 if (!POINTER_TYPE_P (type) && TYPE_MIN_VALUE (type)
8878 && TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST)
8879 wi::to_mpz (TYPE_MIN_VALUE (type), min, TYPE_SIGN (type));
8880 else
8881 {
8882 if (TYPE_UNSIGNED (type))
8883 mpz_set_ui (min, 0);
8884 else
8885 {
8886 wide_int mn = wi::min_value (TYPE_PRECISION (type), SIGNED);
8887 wi::to_mpz (mn, min, SIGNED);
8888 }
8889 }
8890
8891 if (!POINTER_TYPE_P (type) && TYPE_MAX_VALUE (type)
8892 && TREE_CODE (TYPE_MAX_VALUE (type)) == INTEGER_CST)
8893 wi::to_mpz (TYPE_MAX_VALUE (type), max, TYPE_SIGN (type));
8894 else
8895 {
8896 wide_int mn = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
8897 wi::to_mpz (mn, max, TYPE_SIGN (type));
8898 }
8899 }
8900
8901 /* Return true if VAR is an automatic variable defined in function FN. */
8902
8903 bool
8904 auto_var_in_fn_p (const_tree var, const_tree fn)
8905 {
8906 return (DECL_P (var) && DECL_CONTEXT (var) == fn
8907 && ((((TREE_CODE (var) == VAR_DECL && ! DECL_EXTERNAL (var))
8908 || TREE_CODE (var) == PARM_DECL)
8909 && ! TREE_STATIC (var))
8910 || TREE_CODE (var) == LABEL_DECL
8911 || TREE_CODE (var) == RESULT_DECL));
8912 }
8913
8914 /* Subprogram of following function. Called by walk_tree.
8915
8916 Return *TP if it is an automatic variable or parameter of the
8917 function passed in as DATA. */
8918
8919 static tree
8920 find_var_from_fn (tree *tp, int *walk_subtrees, void *data)
8921 {
8922 tree fn = (tree) data;
8923
8924 if (TYPE_P (*tp))
8925 *walk_subtrees = 0;
8926
8927 else if (DECL_P (*tp)
8928 && auto_var_in_fn_p (*tp, fn))
8929 return *tp;
8930
8931 return NULL_TREE;
8932 }
8933
8934 /* Returns true if T is, contains, or refers to a type with variable
8935 size. For METHOD_TYPEs and FUNCTION_TYPEs we exclude the
8936 arguments, but not the return type. If FN is nonzero, only return
8937 true if a modifier of the type or position of FN is a variable or
8938 parameter inside FN.
8939
8940 This concept is more general than that of C99 'variably modified types':
8941 in C99, a struct type is never variably modified because a VLA may not
8942 appear as a structure member. However, in GNU C code like:
8943
8944 struct S { int i[f()]; };
8945
8946 is valid, and other languages may define similar constructs. */
8947
8948 bool
8949 variably_modified_type_p (tree type, tree fn)
8950 {
8951 tree t;
8952
8953 /* Test if T is either variable (if FN is zero) or an expression containing
8954 a variable in FN. If TYPE isn't gimplified, return true also if
8955 gimplify_one_sizepos would gimplify the expression into a local
8956 variable. */
8957 #define RETURN_TRUE_IF_VAR(T) \
8958 do { tree _t = (T); \
8959 if (_t != NULL_TREE \
8960 && _t != error_mark_node \
8961 && TREE_CODE (_t) != INTEGER_CST \
8962 && TREE_CODE (_t) != PLACEHOLDER_EXPR \
8963 && (!fn \
8964 || (!TYPE_SIZES_GIMPLIFIED (type) \
8965 && !is_gimple_sizepos (_t)) \
8966 || walk_tree (&_t, find_var_from_fn, fn, NULL))) \
8967 return true; } while (0)
8968
8969 if (type == error_mark_node)
8970 return false;
8971
8972 /* If TYPE itself has variable size, it is variably modified. */
8973 RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
8974 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
8975
8976 switch (TREE_CODE (type))
8977 {
8978 case POINTER_TYPE:
8979 case REFERENCE_TYPE:
8980 case VECTOR_TYPE:
8981 if (variably_modified_type_p (TREE_TYPE (type), fn))
8982 return true;
8983 break;
8984
8985 case FUNCTION_TYPE:
8986 case METHOD_TYPE:
8987 /* If TYPE is a function type, it is variably modified if the
8988 return type is variably modified. */
8989 if (variably_modified_type_p (TREE_TYPE (type), fn))
8990 return true;
8991 break;
8992
8993 case INTEGER_TYPE:
8994 case REAL_TYPE:
8995 case FIXED_POINT_TYPE:
8996 case ENUMERAL_TYPE:
8997 case BOOLEAN_TYPE:
8998 /* Scalar types are variably modified if their end points
8999 aren't constant. */
9000 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
9001 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
9002 break;
9003
9004 case RECORD_TYPE:
9005 case UNION_TYPE:
9006 case QUAL_UNION_TYPE:
9007 /* We can't see if any of the fields are variably-modified by the
9008 definition we normally use, since that would produce infinite
9009 recursion via pointers. */
9010 /* This is variably modified if some field's type is. */
9011 for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
9012 if (TREE_CODE (t) == FIELD_DECL)
9013 {
9014 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
9015 RETURN_TRUE_IF_VAR (DECL_SIZE (t));
9016 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
9017
9018 if (TREE_CODE (type) == QUAL_UNION_TYPE)
9019 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
9020 }
9021 break;
9022
9023 case ARRAY_TYPE:
9024 /* Do not call ourselves to avoid infinite recursion. This is
9025 variably modified if the element type is. */
9026 RETURN_TRUE_IF_VAR (TYPE_SIZE (TREE_TYPE (type)));
9027 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (TREE_TYPE (type)));
9028 break;
9029
9030 default:
9031 break;
9032 }
9033
9034 /* The current language may have other cases to check, but in general,
9035 all other types are not variably modified. */
9036 return lang_hooks.tree_inlining.var_mod_type_p (type, fn);
9037
9038 #undef RETURN_TRUE_IF_VAR
9039 }
9040
9041 /* Given a DECL or TYPE, return the scope in which it was declared, or
9042 NULL_TREE if there is no containing scope. */
9043
9044 tree
9045 get_containing_scope (const_tree t)
9046 {
9047 return (TYPE_P (t) ? TYPE_CONTEXT (t) : DECL_CONTEXT (t));
9048 }
9049
9050 /* Return the innermost context enclosing DECL that is
9051 a FUNCTION_DECL, or zero if none. */
9052
9053 tree
9054 decl_function_context (const_tree decl)
9055 {
9056 tree context;
9057
9058 if (TREE_CODE (decl) == ERROR_MARK)
9059 return 0;
9060
9061 /* C++ virtual functions use DECL_CONTEXT for the class of the vtable
9062 where we look up the function at runtime. Such functions always take
9063 a first argument of type 'pointer to real context'.
9064
9065 C++ should really be fixed to use DECL_CONTEXT for the real context,
9066 and use something else for the "virtual context". */
9067 else if (TREE_CODE (decl) == FUNCTION_DECL && DECL_VINDEX (decl))
9068 context
9069 = TYPE_MAIN_VARIANT
9070 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
9071 else
9072 context = DECL_CONTEXT (decl);
9073
9074 while (context && TREE_CODE (context) != FUNCTION_DECL)
9075 {
9076 if (TREE_CODE (context) == BLOCK)
9077 context = BLOCK_SUPERCONTEXT (context);
9078 else
9079 context = get_containing_scope (context);
9080 }
9081
9082 return context;
9083 }
9084
9085 /* Return the innermost context enclosing DECL that is
9086 a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE, or zero if none.
9087 TYPE_DECLs and FUNCTION_DECLs are transparent to this function. */
9088
9089 tree
9090 decl_type_context (const_tree decl)
9091 {
9092 tree context = DECL_CONTEXT (decl);
9093
9094 while (context)
9095 switch (TREE_CODE (context))
9096 {
9097 case NAMESPACE_DECL:
9098 case TRANSLATION_UNIT_DECL:
9099 return NULL_TREE;
9100
9101 case RECORD_TYPE:
9102 case UNION_TYPE:
9103 case QUAL_UNION_TYPE:
9104 return context;
9105
9106 case TYPE_DECL:
9107 case FUNCTION_DECL:
9108 context = DECL_CONTEXT (context);
9109 break;
9110
9111 case BLOCK:
9112 context = BLOCK_SUPERCONTEXT (context);
9113 break;
9114
9115 default:
9116 gcc_unreachable ();
9117 }
9118
9119 return NULL_TREE;
9120 }
9121
9122 /* CALL is a CALL_EXPR. Return the declaration for the function
9123 called, or NULL_TREE if the called function cannot be
9124 determined. */
9125
9126 tree
9127 get_callee_fndecl (const_tree call)
9128 {
9129 tree addr;
9130
9131 if (call == error_mark_node)
9132 return error_mark_node;
9133
9134 /* It's invalid to call this function with anything but a
9135 CALL_EXPR. */
9136 gcc_assert (TREE_CODE (call) == CALL_EXPR);
9137
9138 /* The first operand to the CALL is the address of the function
9139 called. */
9140 addr = CALL_EXPR_FN (call);
9141
9142 /* If there is no function, return early. */
9143 if (addr == NULL_TREE)
9144 return NULL_TREE;
9145
9146 STRIP_NOPS (addr);
9147
9148 /* If this is a readonly function pointer, extract its initial value. */
9149 if (DECL_P (addr) && TREE_CODE (addr) != FUNCTION_DECL
9150 && TREE_READONLY (addr) && ! TREE_THIS_VOLATILE (addr)
9151 && DECL_INITIAL (addr))
9152 addr = DECL_INITIAL (addr);
9153
9154 /* If the address is just `&f' for some function `f', then we know
9155 that `f' is being called. */
9156 if (TREE_CODE (addr) == ADDR_EXPR
9157 && TREE_CODE (TREE_OPERAND (addr, 0)) == FUNCTION_DECL)
9158 return TREE_OPERAND (addr, 0);
9159
9160 /* We couldn't figure out what was being called. */
9161 return NULL_TREE;
9162 }
9163
9164 #define TREE_MEM_USAGE_SPACES 40
9165
9166 /* Print debugging information about tree nodes generated during the compile,
9167 and any language-specific information. */
9168
9169 void
9170 dump_tree_statistics (void)
9171 {
9172 if (GATHER_STATISTICS)
9173 {
9174 int i;
9175 int total_nodes, total_bytes;
9176 fprintf (stderr, "\nKind Nodes Bytes\n");
9177 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9178 total_nodes = total_bytes = 0;
9179 for (i = 0; i < (int) all_kinds; i++)
9180 {
9181 fprintf (stderr, "%-20s %7d %10d\n", tree_node_kind_names[i],
9182 tree_node_counts[i], tree_node_sizes[i]);
9183 total_nodes += tree_node_counts[i];
9184 total_bytes += tree_node_sizes[i];
9185 }
9186 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9187 fprintf (stderr, "%-20s %7d %10d\n", "Total", total_nodes, total_bytes);
9188 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9189 fprintf (stderr, "Code Nodes\n");
9190 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9191 for (i = 0; i < (int) MAX_TREE_CODES; i++)
9192 fprintf (stderr, "%-32s %7d\n", get_tree_code_name ((enum tree_code) i),
9193 tree_code_counts[i]);
9194 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9195 fprintf (stderr, "\n");
9196 ssanames_print_statistics ();
9197 fprintf (stderr, "\n");
9198 phinodes_print_statistics ();
9199 fprintf (stderr, "\n");
9200 }
9201 else
9202 fprintf (stderr, "(No per-node statistics)\n");
9203
9204 print_type_hash_statistics ();
9205 print_debug_expr_statistics ();
9206 print_value_expr_statistics ();
9207 lang_hooks.print_statistics ();
9208 }
9209 \f
9210 #define FILE_FUNCTION_FORMAT "_GLOBAL__%s_%s"
9211
9212 /* Generate a crc32 of a byte. */
9213
9214 static unsigned
9215 crc32_unsigned_bits (unsigned chksum, unsigned value, unsigned bits)
9216 {
9217 unsigned ix;
9218
9219 for (ix = bits; ix--; value <<= 1)
9220 {
9221 unsigned feedback;
9222
9223 feedback = (value ^ chksum) & 0x80000000 ? 0x04c11db7 : 0;
9224 chksum <<= 1;
9225 chksum ^= feedback;
9226 }
9227 return chksum;
9228 }
9229
9230 /* Generate a crc32 of a 32-bit unsigned. */
9231
9232 unsigned
9233 crc32_unsigned (unsigned chksum, unsigned value)
9234 {
9235 return crc32_unsigned_bits (chksum, value, 32);
9236 }
9237
9238 /* Generate a crc32 of a byte. */
9239
9240 unsigned
9241 crc32_byte (unsigned chksum, char byte)
9242 {
9243 return crc32_unsigned_bits (chksum, (unsigned) byte << 24, 8);
9244 }
9245
9246 /* Generate a crc32 of a string. */
9247
9248 unsigned
9249 crc32_string (unsigned chksum, const char *string)
9250 {
9251 do
9252 {
9253 chksum = crc32_byte (chksum, *string);
9254 }
9255 while (*string++);
9256 return chksum;
9257 }
9258
9259 /* P is a string that will be used in a symbol. Mask out any characters
9260 that are not valid in that context. */
9261
9262 void
9263 clean_symbol_name (char *p)
9264 {
9265 for (; *p; p++)
9266 if (! (ISALNUM (*p)
9267 #ifndef NO_DOLLAR_IN_LABEL /* this for `$'; unlikely, but... -- kr */
9268 || *p == '$'
9269 #endif
9270 #ifndef NO_DOT_IN_LABEL /* this for `.'; unlikely, but... */
9271 || *p == '.'
9272 #endif
9273 ))
9274 *p = '_';
9275 }
9276
9277 /* For anonymous aggregate types, we need some sort of name to
9278 hold on to. In practice, this should not appear, but it should
9279 not be harmful if it does. */
9280 bool
9281 anon_aggrname_p(const_tree id_node)
9282 {
9283 #ifndef NO_DOT_IN_LABEL
9284 return (IDENTIFIER_POINTER (id_node)[0] == '.'
9285 && IDENTIFIER_POINTER (id_node)[1] == '_');
9286 #else /* NO_DOT_IN_LABEL */
9287 #ifndef NO_DOLLAR_IN_LABEL
9288 return (IDENTIFIER_POINTER (id_node)[0] == '$' \
9289 && IDENTIFIER_POINTER (id_node)[1] == '_');
9290 #else /* NO_DOLLAR_IN_LABEL */
9291 #define ANON_AGGRNAME_PREFIX "__anon_"
9292 return (!strncmp (IDENTIFIER_POINTER (id_node), ANON_AGGRNAME_PREFIX,
9293 sizeof (ANON_AGGRNAME_PREFIX) - 1));
9294 #endif /* NO_DOLLAR_IN_LABEL */
9295 #endif /* NO_DOT_IN_LABEL */
9296 }
9297
9298 /* Return a format for an anonymous aggregate name. */
9299 const char *
9300 anon_aggrname_format()
9301 {
9302 #ifndef NO_DOT_IN_LABEL
9303 return "._%d";
9304 #else /* NO_DOT_IN_LABEL */
9305 #ifndef NO_DOLLAR_IN_LABEL
9306 return "$_%d";
9307 #else /* NO_DOLLAR_IN_LABEL */
9308 return "__anon_%d";
9309 #endif /* NO_DOLLAR_IN_LABEL */
9310 #endif /* NO_DOT_IN_LABEL */
9311 }
9312
9313 /* Generate a name for a special-purpose function.
9314 The generated name may need to be unique across the whole link.
9315 Changes to this function may also require corresponding changes to
9316 xstrdup_mask_random.
9317 TYPE is some string to identify the purpose of this function to the
9318 linker or collect2; it must start with an uppercase letter,
9319 one of:
9320 I - for constructors
9321 D - for destructors
9322 N - for C++ anonymous namespaces
9323 F - for DWARF unwind frame information. */
9324
9325 tree
9326 get_file_function_name (const char *type)
9327 {
9328 char *buf;
9329 const char *p;
9330 char *q;
9331
9332 /* If we already have a name we know to be unique, just use that. */
9333 if (first_global_object_name)
9334 p = q = ASTRDUP (first_global_object_name);
9335 /* If the target is handling the constructors/destructors, they
9336 will be local to this file and the name is only necessary for
9337 debugging purposes.
9338 We also assign sub_I and sub_D sufixes to constructors called from
9339 the global static constructors. These are always local. */
9340 else if (((type[0] == 'I' || type[0] == 'D') && targetm.have_ctors_dtors)
9341 || (strncmp (type, "sub_", 4) == 0
9342 && (type[4] == 'I' || type[4] == 'D')))
9343 {
9344 const char *file = main_input_filename;
9345 if (! file)
9346 file = LOCATION_FILE (input_location);
9347 /* Just use the file's basename, because the full pathname
9348 might be quite long. */
9349 p = q = ASTRDUP (lbasename (file));
9350 }
9351 else
9352 {
9353 /* Otherwise, the name must be unique across the entire link.
9354 We don't have anything that we know to be unique to this translation
9355 unit, so use what we do have and throw in some randomness. */
9356 unsigned len;
9357 const char *name = weak_global_object_name;
9358 const char *file = main_input_filename;
9359
9360 if (! name)
9361 name = "";
9362 if (! file)
9363 file = LOCATION_FILE (input_location);
9364
9365 len = strlen (file);
9366 q = (char *) alloca (9 + 17 + len + 1);
9367 memcpy (q, file, len + 1);
9368
9369 snprintf (q + len, 9 + 17 + 1, "_%08X_" HOST_WIDE_INT_PRINT_HEX,
9370 crc32_string (0, name), get_random_seed (false));
9371
9372 p = q;
9373 }
9374
9375 clean_symbol_name (q);
9376 buf = (char *) alloca (sizeof (FILE_FUNCTION_FORMAT) + strlen (p)
9377 + strlen (type));
9378
9379 /* Set up the name of the file-level functions we may need.
9380 Use a global object (which is already required to be unique over
9381 the program) rather than the file name (which imposes extra
9382 constraints). */
9383 sprintf (buf, FILE_FUNCTION_FORMAT, type, p);
9384
9385 return get_identifier (buf);
9386 }
9387 \f
9388 #if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007)
9389
9390 /* Complain that the tree code of NODE does not match the expected 0
9391 terminated list of trailing codes. The trailing code list can be
9392 empty, for a more vague error message. FILE, LINE, and FUNCTION
9393 are of the caller. */
9394
9395 void
9396 tree_check_failed (const_tree node, const char *file,
9397 int line, const char *function, ...)
9398 {
9399 va_list args;
9400 const char *buffer;
9401 unsigned length = 0;
9402 enum tree_code code;
9403
9404 va_start (args, function);
9405 while ((code = (enum tree_code) va_arg (args, int)))
9406 length += 4 + strlen (get_tree_code_name (code));
9407 va_end (args);
9408 if (length)
9409 {
9410 char *tmp;
9411 va_start (args, function);
9412 length += strlen ("expected ");
9413 buffer = tmp = (char *) alloca (length);
9414 length = 0;
9415 while ((code = (enum tree_code) va_arg (args, int)))
9416 {
9417 const char *prefix = length ? " or " : "expected ";
9418
9419 strcpy (tmp + length, prefix);
9420 length += strlen (prefix);
9421 strcpy (tmp + length, get_tree_code_name (code));
9422 length += strlen (get_tree_code_name (code));
9423 }
9424 va_end (args);
9425 }
9426 else
9427 buffer = "unexpected node";
9428
9429 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9430 buffer, get_tree_code_name (TREE_CODE (node)),
9431 function, trim_filename (file), line);
9432 }
9433
9434 /* Complain that the tree code of NODE does match the expected 0
9435 terminated list of trailing codes. FILE, LINE, and FUNCTION are of
9436 the caller. */
9437
9438 void
9439 tree_not_check_failed (const_tree node, const char *file,
9440 int line, const char *function, ...)
9441 {
9442 va_list args;
9443 char *buffer;
9444 unsigned length = 0;
9445 enum tree_code code;
9446
9447 va_start (args, function);
9448 while ((code = (enum tree_code) va_arg (args, int)))
9449 length += 4 + strlen (get_tree_code_name (code));
9450 va_end (args);
9451 va_start (args, function);
9452 buffer = (char *) alloca (length);
9453 length = 0;
9454 while ((code = (enum tree_code) va_arg (args, int)))
9455 {
9456 if (length)
9457 {
9458 strcpy (buffer + length, " or ");
9459 length += 4;
9460 }
9461 strcpy (buffer + length, get_tree_code_name (code));
9462 length += strlen (get_tree_code_name (code));
9463 }
9464 va_end (args);
9465
9466 internal_error ("tree check: expected none of %s, have %s in %s, at %s:%d",
9467 buffer, get_tree_code_name (TREE_CODE (node)),
9468 function, trim_filename (file), line);
9469 }
9470
9471 /* Similar to tree_check_failed, except that we check for a class of tree
9472 code, given in CL. */
9473
9474 void
9475 tree_class_check_failed (const_tree node, const enum tree_code_class cl,
9476 const char *file, int line, const char *function)
9477 {
9478 internal_error
9479 ("tree check: expected class %qs, have %qs (%s) in %s, at %s:%d",
9480 TREE_CODE_CLASS_STRING (cl),
9481 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9482 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9483 }
9484
9485 /* Similar to tree_check_failed, except that instead of specifying a
9486 dozen codes, use the knowledge that they're all sequential. */
9487
9488 void
9489 tree_range_check_failed (const_tree node, const char *file, int line,
9490 const char *function, enum tree_code c1,
9491 enum tree_code c2)
9492 {
9493 char *buffer;
9494 unsigned length = 0;
9495 unsigned int c;
9496
9497 for (c = c1; c <= c2; ++c)
9498 length += 4 + strlen (get_tree_code_name ((enum tree_code) c));
9499
9500 length += strlen ("expected ");
9501 buffer = (char *) alloca (length);
9502 length = 0;
9503
9504 for (c = c1; c <= c2; ++c)
9505 {
9506 const char *prefix = length ? " or " : "expected ";
9507
9508 strcpy (buffer + length, prefix);
9509 length += strlen (prefix);
9510 strcpy (buffer + length, get_tree_code_name ((enum tree_code) c));
9511 length += strlen (get_tree_code_name ((enum tree_code) c));
9512 }
9513
9514 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9515 buffer, get_tree_code_name (TREE_CODE (node)),
9516 function, trim_filename (file), line);
9517 }
9518
9519
9520 /* Similar to tree_check_failed, except that we check that a tree does
9521 not have the specified code, given in CL. */
9522
9523 void
9524 tree_not_class_check_failed (const_tree node, const enum tree_code_class cl,
9525 const char *file, int line, const char *function)
9526 {
9527 internal_error
9528 ("tree check: did not expect class %qs, have %qs (%s) in %s, at %s:%d",
9529 TREE_CODE_CLASS_STRING (cl),
9530 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9531 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9532 }
9533
9534
9535 /* Similar to tree_check_failed but applied to OMP_CLAUSE codes. */
9536
9537 void
9538 omp_clause_check_failed (const_tree node, const char *file, int line,
9539 const char *function, enum omp_clause_code code)
9540 {
9541 internal_error ("tree check: expected omp_clause %s, have %s in %s, at %s:%d",
9542 omp_clause_code_name[code], get_tree_code_name (TREE_CODE (node)),
9543 function, trim_filename (file), line);
9544 }
9545
9546
9547 /* Similar to tree_range_check_failed but applied to OMP_CLAUSE codes. */
9548
9549 void
9550 omp_clause_range_check_failed (const_tree node, const char *file, int line,
9551 const char *function, enum omp_clause_code c1,
9552 enum omp_clause_code c2)
9553 {
9554 char *buffer;
9555 unsigned length = 0;
9556 unsigned int c;
9557
9558 for (c = c1; c <= c2; ++c)
9559 length += 4 + strlen (omp_clause_code_name[c]);
9560
9561 length += strlen ("expected ");
9562 buffer = (char *) alloca (length);
9563 length = 0;
9564
9565 for (c = c1; c <= c2; ++c)
9566 {
9567 const char *prefix = length ? " or " : "expected ";
9568
9569 strcpy (buffer + length, prefix);
9570 length += strlen (prefix);
9571 strcpy (buffer + length, omp_clause_code_name[c]);
9572 length += strlen (omp_clause_code_name[c]);
9573 }
9574
9575 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9576 buffer, omp_clause_code_name[TREE_CODE (node)],
9577 function, trim_filename (file), line);
9578 }
9579
9580
9581 #undef DEFTREESTRUCT
9582 #define DEFTREESTRUCT(VAL, NAME) NAME,
9583
9584 static const char *ts_enum_names[] = {
9585 #include "treestruct.def"
9586 };
9587 #undef DEFTREESTRUCT
9588
9589 #define TS_ENUM_NAME(EN) (ts_enum_names[(EN)])
9590
9591 /* Similar to tree_class_check_failed, except that we check for
9592 whether CODE contains the tree structure identified by EN. */
9593
9594 void
9595 tree_contains_struct_check_failed (const_tree node,
9596 const enum tree_node_structure_enum en,
9597 const char *file, int line,
9598 const char *function)
9599 {
9600 internal_error
9601 ("tree check: expected tree that contains %qs structure, have %qs in %s, at %s:%d",
9602 TS_ENUM_NAME (en),
9603 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9604 }
9605
9606
9607 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9608 (dynamically sized) vector. */
9609
9610 void
9611 tree_int_cst_elt_check_failed (int idx, int len, const char *file, int line,
9612 const char *function)
9613 {
9614 internal_error
9615 ("tree check: accessed elt %d of tree_int_cst with %d elts in %s, at %s:%d",
9616 idx + 1, len, function, trim_filename (file), line);
9617 }
9618
9619 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9620 (dynamically sized) vector. */
9621
9622 void
9623 tree_vec_elt_check_failed (int idx, int len, const char *file, int line,
9624 const char *function)
9625 {
9626 internal_error
9627 ("tree check: accessed elt %d of tree_vec with %d elts in %s, at %s:%d",
9628 idx + 1, len, function, trim_filename (file), line);
9629 }
9630
9631 /* Similar to above, except that the check is for the bounds of the operand
9632 vector of an expression node EXP. */
9633
9634 void
9635 tree_operand_check_failed (int idx, const_tree exp, const char *file,
9636 int line, const char *function)
9637 {
9638 enum tree_code code = TREE_CODE (exp);
9639 internal_error
9640 ("tree check: accessed operand %d of %s with %d operands in %s, at %s:%d",
9641 idx + 1, get_tree_code_name (code), TREE_OPERAND_LENGTH (exp),
9642 function, trim_filename (file), line);
9643 }
9644
9645 /* Similar to above, except that the check is for the number of
9646 operands of an OMP_CLAUSE node. */
9647
9648 void
9649 omp_clause_operand_check_failed (int idx, const_tree t, const char *file,
9650 int line, const char *function)
9651 {
9652 internal_error
9653 ("tree check: accessed operand %d of omp_clause %s with %d operands "
9654 "in %s, at %s:%d", idx + 1, omp_clause_code_name[OMP_CLAUSE_CODE (t)],
9655 omp_clause_num_ops [OMP_CLAUSE_CODE (t)], function,
9656 trim_filename (file), line);
9657 }
9658 #endif /* ENABLE_TREE_CHECKING */
9659 \f
9660 /* Create a new vector type node holding SUBPARTS units of type INNERTYPE,
9661 and mapped to the machine mode MODE. Initialize its fields and build
9662 the information necessary for debugging output. */
9663
9664 static tree
9665 make_vector_type (tree innertype, int nunits, machine_mode mode)
9666 {
9667 tree t;
9668 inchash::hash hstate;
9669
9670 t = make_node (VECTOR_TYPE);
9671 TREE_TYPE (t) = TYPE_MAIN_VARIANT (innertype);
9672 SET_TYPE_VECTOR_SUBPARTS (t, nunits);
9673 SET_TYPE_MODE (t, mode);
9674
9675 if (TYPE_STRUCTURAL_EQUALITY_P (innertype))
9676 SET_TYPE_STRUCTURAL_EQUALITY (t);
9677 else if (TYPE_CANONICAL (innertype) != innertype
9678 || mode != VOIDmode)
9679 TYPE_CANONICAL (t)
9680 = make_vector_type (TYPE_CANONICAL (innertype), nunits, VOIDmode);
9681
9682 layout_type (t);
9683
9684 hstate.add_wide_int (VECTOR_TYPE);
9685 hstate.add_wide_int (nunits);
9686 hstate.add_wide_int (mode);
9687 hstate.add_object (TYPE_HASH (TREE_TYPE (t)));
9688 t = type_hash_canon (hstate.end (), t);
9689
9690 /* We have built a main variant, based on the main variant of the
9691 inner type. Use it to build the variant we return. */
9692 if ((TYPE_ATTRIBUTES (innertype) || TYPE_QUALS (innertype))
9693 && TREE_TYPE (t) != innertype)
9694 return build_type_attribute_qual_variant (t,
9695 TYPE_ATTRIBUTES (innertype),
9696 TYPE_QUALS (innertype));
9697
9698 return t;
9699 }
9700
9701 static tree
9702 make_or_reuse_type (unsigned size, int unsignedp)
9703 {
9704 int i;
9705
9706 if (size == INT_TYPE_SIZE)
9707 return unsignedp ? unsigned_type_node : integer_type_node;
9708 if (size == CHAR_TYPE_SIZE)
9709 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
9710 if (size == SHORT_TYPE_SIZE)
9711 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
9712 if (size == LONG_TYPE_SIZE)
9713 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
9714 if (size == LONG_LONG_TYPE_SIZE)
9715 return (unsignedp ? long_long_unsigned_type_node
9716 : long_long_integer_type_node);
9717
9718 for (i = 0; i < NUM_INT_N_ENTS; i ++)
9719 if (size == int_n_data[i].bitsize
9720 && int_n_enabled_p[i])
9721 return (unsignedp ? int_n_trees[i].unsigned_type
9722 : int_n_trees[i].signed_type);
9723
9724 if (unsignedp)
9725 return make_unsigned_type (size);
9726 else
9727 return make_signed_type (size);
9728 }
9729
9730 /* Create or reuse a fract type by SIZE, UNSIGNEDP, and SATP. */
9731
9732 static tree
9733 make_or_reuse_fract_type (unsigned size, int unsignedp, int satp)
9734 {
9735 if (satp)
9736 {
9737 if (size == SHORT_FRACT_TYPE_SIZE)
9738 return unsignedp ? sat_unsigned_short_fract_type_node
9739 : sat_short_fract_type_node;
9740 if (size == FRACT_TYPE_SIZE)
9741 return unsignedp ? sat_unsigned_fract_type_node : sat_fract_type_node;
9742 if (size == LONG_FRACT_TYPE_SIZE)
9743 return unsignedp ? sat_unsigned_long_fract_type_node
9744 : sat_long_fract_type_node;
9745 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9746 return unsignedp ? sat_unsigned_long_long_fract_type_node
9747 : sat_long_long_fract_type_node;
9748 }
9749 else
9750 {
9751 if (size == SHORT_FRACT_TYPE_SIZE)
9752 return unsignedp ? unsigned_short_fract_type_node
9753 : short_fract_type_node;
9754 if (size == FRACT_TYPE_SIZE)
9755 return unsignedp ? unsigned_fract_type_node : fract_type_node;
9756 if (size == LONG_FRACT_TYPE_SIZE)
9757 return unsignedp ? unsigned_long_fract_type_node
9758 : long_fract_type_node;
9759 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9760 return unsignedp ? unsigned_long_long_fract_type_node
9761 : long_long_fract_type_node;
9762 }
9763
9764 return make_fract_type (size, unsignedp, satp);
9765 }
9766
9767 /* Create or reuse an accum type by SIZE, UNSIGNEDP, and SATP. */
9768
9769 static tree
9770 make_or_reuse_accum_type (unsigned size, int unsignedp, int satp)
9771 {
9772 if (satp)
9773 {
9774 if (size == SHORT_ACCUM_TYPE_SIZE)
9775 return unsignedp ? sat_unsigned_short_accum_type_node
9776 : sat_short_accum_type_node;
9777 if (size == ACCUM_TYPE_SIZE)
9778 return unsignedp ? sat_unsigned_accum_type_node : sat_accum_type_node;
9779 if (size == LONG_ACCUM_TYPE_SIZE)
9780 return unsignedp ? sat_unsigned_long_accum_type_node
9781 : sat_long_accum_type_node;
9782 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9783 return unsignedp ? sat_unsigned_long_long_accum_type_node
9784 : sat_long_long_accum_type_node;
9785 }
9786 else
9787 {
9788 if (size == SHORT_ACCUM_TYPE_SIZE)
9789 return unsignedp ? unsigned_short_accum_type_node
9790 : short_accum_type_node;
9791 if (size == ACCUM_TYPE_SIZE)
9792 return unsignedp ? unsigned_accum_type_node : accum_type_node;
9793 if (size == LONG_ACCUM_TYPE_SIZE)
9794 return unsignedp ? unsigned_long_accum_type_node
9795 : long_accum_type_node;
9796 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9797 return unsignedp ? unsigned_long_long_accum_type_node
9798 : long_long_accum_type_node;
9799 }
9800
9801 return make_accum_type (size, unsignedp, satp);
9802 }
9803
9804
9805 /* Create an atomic variant node for TYPE. This routine is called
9806 during initialization of data types to create the 5 basic atomic
9807 types. The generic build_variant_type function requires these to
9808 already be set up in order to function properly, so cannot be
9809 called from there. If ALIGN is non-zero, then ensure alignment is
9810 overridden to this value. */
9811
9812 static tree
9813 build_atomic_base (tree type, unsigned int align)
9814 {
9815 tree t;
9816
9817 /* Make sure its not already registered. */
9818 if ((t = get_qualified_type (type, TYPE_QUAL_ATOMIC)))
9819 return t;
9820
9821 t = build_variant_type_copy (type);
9822 set_type_quals (t, TYPE_QUAL_ATOMIC);
9823
9824 if (align)
9825 TYPE_ALIGN (t) = align;
9826
9827 return t;
9828 }
9829
9830 /* Create nodes for all integer types (and error_mark_node) using the sizes
9831 of C datatypes. SIGNED_CHAR specifies whether char is signed,
9832 SHORT_DOUBLE specifies whether double should be of the same precision
9833 as float. */
9834
9835 void
9836 build_common_tree_nodes (bool signed_char, bool short_double)
9837 {
9838 int i;
9839
9840 error_mark_node = make_node (ERROR_MARK);
9841 TREE_TYPE (error_mark_node) = error_mark_node;
9842
9843 initialize_sizetypes ();
9844
9845 /* Define both `signed char' and `unsigned char'. */
9846 signed_char_type_node = make_signed_type (CHAR_TYPE_SIZE);
9847 TYPE_STRING_FLAG (signed_char_type_node) = 1;
9848 unsigned_char_type_node = make_unsigned_type (CHAR_TYPE_SIZE);
9849 TYPE_STRING_FLAG (unsigned_char_type_node) = 1;
9850
9851 /* Define `char', which is like either `signed char' or `unsigned char'
9852 but not the same as either. */
9853 char_type_node
9854 = (signed_char
9855 ? make_signed_type (CHAR_TYPE_SIZE)
9856 : make_unsigned_type (CHAR_TYPE_SIZE));
9857 TYPE_STRING_FLAG (char_type_node) = 1;
9858
9859 short_integer_type_node = make_signed_type (SHORT_TYPE_SIZE);
9860 short_unsigned_type_node = make_unsigned_type (SHORT_TYPE_SIZE);
9861 integer_type_node = make_signed_type (INT_TYPE_SIZE);
9862 unsigned_type_node = make_unsigned_type (INT_TYPE_SIZE);
9863 long_integer_type_node = make_signed_type (LONG_TYPE_SIZE);
9864 long_unsigned_type_node = make_unsigned_type (LONG_TYPE_SIZE);
9865 long_long_integer_type_node = make_signed_type (LONG_LONG_TYPE_SIZE);
9866 long_long_unsigned_type_node = make_unsigned_type (LONG_LONG_TYPE_SIZE);
9867
9868 for (i = 0; i < NUM_INT_N_ENTS; i ++)
9869 {
9870 int_n_trees[i].signed_type = make_signed_type (int_n_data[i].bitsize);
9871 int_n_trees[i].unsigned_type = make_unsigned_type (int_n_data[i].bitsize);
9872 TYPE_SIZE (int_n_trees[i].signed_type) = bitsize_int (int_n_data[i].bitsize);
9873 TYPE_SIZE (int_n_trees[i].unsigned_type) = bitsize_int (int_n_data[i].bitsize);
9874
9875 if (int_n_data[i].bitsize > LONG_LONG_TYPE_SIZE
9876 && int_n_enabled_p[i])
9877 {
9878 integer_types[itk_intN_0 + i * 2] = int_n_trees[i].signed_type;
9879 integer_types[itk_unsigned_intN_0 + i * 2] = int_n_trees[i].unsigned_type;
9880 }
9881 }
9882
9883 /* Define a boolean type. This type only represents boolean values but
9884 may be larger than char depending on the value of BOOL_TYPE_SIZE. */
9885 boolean_type_node = make_unsigned_type (BOOL_TYPE_SIZE);
9886 TREE_SET_CODE (boolean_type_node, BOOLEAN_TYPE);
9887 TYPE_PRECISION (boolean_type_node) = 1;
9888 TYPE_MAX_VALUE (boolean_type_node) = build_int_cst (boolean_type_node, 1);
9889
9890 /* Define what type to use for size_t. */
9891 if (strcmp (SIZE_TYPE, "unsigned int") == 0)
9892 size_type_node = unsigned_type_node;
9893 else if (strcmp (SIZE_TYPE, "long unsigned int") == 0)
9894 size_type_node = long_unsigned_type_node;
9895 else if (strcmp (SIZE_TYPE, "long long unsigned int") == 0)
9896 size_type_node = long_long_unsigned_type_node;
9897 else if (strcmp (SIZE_TYPE, "short unsigned int") == 0)
9898 size_type_node = short_unsigned_type_node;
9899 else
9900 {
9901 int i;
9902
9903 size_type_node = NULL_TREE;
9904 for (i = 0; i < NUM_INT_N_ENTS; i++)
9905 if (int_n_enabled_p[i])
9906 {
9907 char name[50];
9908 sprintf (name, "__int%d unsigned", int_n_data[i].bitsize);
9909
9910 if (strcmp (name, SIZE_TYPE) == 0)
9911 {
9912 size_type_node = int_n_trees[i].unsigned_type;
9913 }
9914 }
9915 if (size_type_node == NULL_TREE)
9916 gcc_unreachable ();
9917 }
9918
9919 /* Fill in the rest of the sized types. Reuse existing type nodes
9920 when possible. */
9921 intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 0);
9922 intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 0);
9923 intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 0);
9924 intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 0);
9925 intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 0);
9926
9927 unsigned_intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 1);
9928 unsigned_intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 1);
9929 unsigned_intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 1);
9930 unsigned_intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 1);
9931 unsigned_intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 1);
9932
9933 /* Don't call build_qualified type for atomics. That routine does
9934 special processing for atomics, and until they are initialized
9935 it's better not to make that call.
9936
9937 Check to see if there is a target override for atomic types. */
9938
9939 atomicQI_type_node = build_atomic_base (unsigned_intQI_type_node,
9940 targetm.atomic_align_for_mode (QImode));
9941 atomicHI_type_node = build_atomic_base (unsigned_intHI_type_node,
9942 targetm.atomic_align_for_mode (HImode));
9943 atomicSI_type_node = build_atomic_base (unsigned_intSI_type_node,
9944 targetm.atomic_align_for_mode (SImode));
9945 atomicDI_type_node = build_atomic_base (unsigned_intDI_type_node,
9946 targetm.atomic_align_for_mode (DImode));
9947 atomicTI_type_node = build_atomic_base (unsigned_intTI_type_node,
9948 targetm.atomic_align_for_mode (TImode));
9949
9950 access_public_node = get_identifier ("public");
9951 access_protected_node = get_identifier ("protected");
9952 access_private_node = get_identifier ("private");
9953
9954 /* Define these next since types below may used them. */
9955 integer_zero_node = build_int_cst (integer_type_node, 0);
9956 integer_one_node = build_int_cst (integer_type_node, 1);
9957 integer_three_node = build_int_cst (integer_type_node, 3);
9958 integer_minus_one_node = build_int_cst (integer_type_node, -1);
9959
9960 size_zero_node = size_int (0);
9961 size_one_node = size_int (1);
9962 bitsize_zero_node = bitsize_int (0);
9963 bitsize_one_node = bitsize_int (1);
9964 bitsize_unit_node = bitsize_int (BITS_PER_UNIT);
9965
9966 boolean_false_node = TYPE_MIN_VALUE (boolean_type_node);
9967 boolean_true_node = TYPE_MAX_VALUE (boolean_type_node);
9968
9969 void_type_node = make_node (VOID_TYPE);
9970 layout_type (void_type_node);
9971
9972 pointer_bounds_type_node = targetm.chkp_bound_type ();
9973
9974 /* We are not going to have real types in C with less than byte alignment,
9975 so we might as well not have any types that claim to have it. */
9976 TYPE_ALIGN (void_type_node) = BITS_PER_UNIT;
9977 TYPE_USER_ALIGN (void_type_node) = 0;
9978
9979 void_node = make_node (VOID_CST);
9980 TREE_TYPE (void_node) = void_type_node;
9981
9982 null_pointer_node = build_int_cst (build_pointer_type (void_type_node), 0);
9983 layout_type (TREE_TYPE (null_pointer_node));
9984
9985 ptr_type_node = build_pointer_type (void_type_node);
9986 const_ptr_type_node
9987 = build_pointer_type (build_type_variant (void_type_node, 1, 0));
9988 fileptr_type_node = ptr_type_node;
9989
9990 pointer_sized_int_node = build_nonstandard_integer_type (POINTER_SIZE, 1);
9991
9992 float_type_node = make_node (REAL_TYPE);
9993 TYPE_PRECISION (float_type_node) = FLOAT_TYPE_SIZE;
9994 layout_type (float_type_node);
9995
9996 double_type_node = make_node (REAL_TYPE);
9997 if (short_double)
9998 TYPE_PRECISION (double_type_node) = FLOAT_TYPE_SIZE;
9999 else
10000 TYPE_PRECISION (double_type_node) = DOUBLE_TYPE_SIZE;
10001 layout_type (double_type_node);
10002
10003 long_double_type_node = make_node (REAL_TYPE);
10004 TYPE_PRECISION (long_double_type_node) = LONG_DOUBLE_TYPE_SIZE;
10005 layout_type (long_double_type_node);
10006
10007 float_ptr_type_node = build_pointer_type (float_type_node);
10008 double_ptr_type_node = build_pointer_type (double_type_node);
10009 long_double_ptr_type_node = build_pointer_type (long_double_type_node);
10010 integer_ptr_type_node = build_pointer_type (integer_type_node);
10011
10012 /* Fixed size integer types. */
10013 uint16_type_node = make_or_reuse_type (16, 1);
10014 uint32_type_node = make_or_reuse_type (32, 1);
10015 uint64_type_node = make_or_reuse_type (64, 1);
10016
10017 /* Decimal float types. */
10018 dfloat32_type_node = make_node (REAL_TYPE);
10019 TYPE_PRECISION (dfloat32_type_node) = DECIMAL32_TYPE_SIZE;
10020 layout_type (dfloat32_type_node);
10021 SET_TYPE_MODE (dfloat32_type_node, SDmode);
10022 dfloat32_ptr_type_node = build_pointer_type (dfloat32_type_node);
10023
10024 dfloat64_type_node = make_node (REAL_TYPE);
10025 TYPE_PRECISION (dfloat64_type_node) = DECIMAL64_TYPE_SIZE;
10026 layout_type (dfloat64_type_node);
10027 SET_TYPE_MODE (dfloat64_type_node, DDmode);
10028 dfloat64_ptr_type_node = build_pointer_type (dfloat64_type_node);
10029
10030 dfloat128_type_node = make_node (REAL_TYPE);
10031 TYPE_PRECISION (dfloat128_type_node) = DECIMAL128_TYPE_SIZE;
10032 layout_type (dfloat128_type_node);
10033 SET_TYPE_MODE (dfloat128_type_node, TDmode);
10034 dfloat128_ptr_type_node = build_pointer_type (dfloat128_type_node);
10035
10036 complex_integer_type_node = build_complex_type (integer_type_node);
10037 complex_float_type_node = build_complex_type (float_type_node);
10038 complex_double_type_node = build_complex_type (double_type_node);
10039 complex_long_double_type_node = build_complex_type (long_double_type_node);
10040
10041 /* Make fixed-point nodes based on sat/non-sat and signed/unsigned. */
10042 #define MAKE_FIXED_TYPE_NODE(KIND,SIZE) \
10043 sat_ ## KIND ## _type_node = \
10044 make_sat_signed_ ## KIND ## _type (SIZE); \
10045 sat_unsigned_ ## KIND ## _type_node = \
10046 make_sat_unsigned_ ## KIND ## _type (SIZE); \
10047 KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
10048 unsigned_ ## KIND ## _type_node = \
10049 make_unsigned_ ## KIND ## _type (SIZE);
10050
10051 #define MAKE_FIXED_TYPE_NODE_WIDTH(KIND,WIDTH,SIZE) \
10052 sat_ ## WIDTH ## KIND ## _type_node = \
10053 make_sat_signed_ ## KIND ## _type (SIZE); \
10054 sat_unsigned_ ## WIDTH ## KIND ## _type_node = \
10055 make_sat_unsigned_ ## KIND ## _type (SIZE); \
10056 WIDTH ## KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
10057 unsigned_ ## WIDTH ## KIND ## _type_node = \
10058 make_unsigned_ ## KIND ## _type (SIZE);
10059
10060 /* Make fixed-point type nodes based on four different widths. */
10061 #define MAKE_FIXED_TYPE_NODE_FAMILY(N1,N2) \
10062 MAKE_FIXED_TYPE_NODE_WIDTH (N1, short_, SHORT_ ## N2 ## _TYPE_SIZE) \
10063 MAKE_FIXED_TYPE_NODE (N1, N2 ## _TYPE_SIZE) \
10064 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_, LONG_ ## N2 ## _TYPE_SIZE) \
10065 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_long_, LONG_LONG_ ## N2 ## _TYPE_SIZE)
10066
10067 /* Make fixed-point mode nodes based on sat/non-sat and signed/unsigned. */
10068 #define MAKE_FIXED_MODE_NODE(KIND,NAME,MODE) \
10069 NAME ## _type_node = \
10070 make_or_reuse_signed_ ## KIND ## _type (GET_MODE_BITSIZE (MODE ## mode)); \
10071 u ## NAME ## _type_node = \
10072 make_or_reuse_unsigned_ ## KIND ## _type \
10073 (GET_MODE_BITSIZE (U ## MODE ## mode)); \
10074 sat_ ## NAME ## _type_node = \
10075 make_or_reuse_sat_signed_ ## KIND ## _type \
10076 (GET_MODE_BITSIZE (MODE ## mode)); \
10077 sat_u ## NAME ## _type_node = \
10078 make_or_reuse_sat_unsigned_ ## KIND ## _type \
10079 (GET_MODE_BITSIZE (U ## MODE ## mode));
10080
10081 /* Fixed-point type and mode nodes. */
10082 MAKE_FIXED_TYPE_NODE_FAMILY (fract, FRACT)
10083 MAKE_FIXED_TYPE_NODE_FAMILY (accum, ACCUM)
10084 MAKE_FIXED_MODE_NODE (fract, qq, QQ)
10085 MAKE_FIXED_MODE_NODE (fract, hq, HQ)
10086 MAKE_FIXED_MODE_NODE (fract, sq, SQ)
10087 MAKE_FIXED_MODE_NODE (fract, dq, DQ)
10088 MAKE_FIXED_MODE_NODE (fract, tq, TQ)
10089 MAKE_FIXED_MODE_NODE (accum, ha, HA)
10090 MAKE_FIXED_MODE_NODE (accum, sa, SA)
10091 MAKE_FIXED_MODE_NODE (accum, da, DA)
10092 MAKE_FIXED_MODE_NODE (accum, ta, TA)
10093
10094 {
10095 tree t = targetm.build_builtin_va_list ();
10096
10097 /* Many back-ends define record types without setting TYPE_NAME.
10098 If we copied the record type here, we'd keep the original
10099 record type without a name. This breaks name mangling. So,
10100 don't copy record types and let c_common_nodes_and_builtins()
10101 declare the type to be __builtin_va_list. */
10102 if (TREE_CODE (t) != RECORD_TYPE)
10103 t = build_variant_type_copy (t);
10104
10105 va_list_type_node = t;
10106 }
10107 }
10108
10109 /* Modify DECL for given flags.
10110 TM_PURE attribute is set only on types, so the function will modify
10111 DECL's type when ECF_TM_PURE is used. */
10112
10113 void
10114 set_call_expr_flags (tree decl, int flags)
10115 {
10116 if (flags & ECF_NOTHROW)
10117 TREE_NOTHROW (decl) = 1;
10118 if (flags & ECF_CONST)
10119 TREE_READONLY (decl) = 1;
10120 if (flags & ECF_PURE)
10121 DECL_PURE_P (decl) = 1;
10122 if (flags & ECF_LOOPING_CONST_OR_PURE)
10123 DECL_LOOPING_CONST_OR_PURE_P (decl) = 1;
10124 if (flags & ECF_NOVOPS)
10125 DECL_IS_NOVOPS (decl) = 1;
10126 if (flags & ECF_NORETURN)
10127 TREE_THIS_VOLATILE (decl) = 1;
10128 if (flags & ECF_MALLOC)
10129 DECL_IS_MALLOC (decl) = 1;
10130 if (flags & ECF_RETURNS_TWICE)
10131 DECL_IS_RETURNS_TWICE (decl) = 1;
10132 if (flags & ECF_LEAF)
10133 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("leaf"),
10134 NULL, DECL_ATTRIBUTES (decl));
10135 if ((flags & ECF_TM_PURE) && flag_tm)
10136 apply_tm_attr (decl, get_identifier ("transaction_pure"));
10137 /* Looping const or pure is implied by noreturn.
10138 There is currently no way to declare looping const or looping pure alone. */
10139 gcc_assert (!(flags & ECF_LOOPING_CONST_OR_PURE)
10140 || ((flags & ECF_NORETURN) && (flags & (ECF_CONST | ECF_PURE))));
10141 }
10142
10143
10144 /* A subroutine of build_common_builtin_nodes. Define a builtin function. */
10145
10146 static void
10147 local_define_builtin (const char *name, tree type, enum built_in_function code,
10148 const char *library_name, int ecf_flags)
10149 {
10150 tree decl;
10151
10152 decl = add_builtin_function (name, type, code, BUILT_IN_NORMAL,
10153 library_name, NULL_TREE);
10154 set_call_expr_flags (decl, ecf_flags);
10155
10156 set_builtin_decl (code, decl, true);
10157 }
10158
10159 /* Call this function after instantiating all builtins that the language
10160 front end cares about. This will build the rest of the builtins
10161 and internal functions that are relied upon by the tree optimizers and
10162 the middle-end. */
10163
10164 void
10165 build_common_builtin_nodes (void)
10166 {
10167 tree tmp, ftype;
10168 int ecf_flags;
10169
10170 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE))
10171 {
10172 ftype = build_function_type (void_type_node, void_list_node);
10173 local_define_builtin ("__builtin_unreachable", ftype, BUILT_IN_UNREACHABLE,
10174 "__builtin_unreachable",
10175 ECF_NOTHROW | ECF_LEAF | ECF_NORETURN
10176 | ECF_CONST);
10177 }
10178
10179 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY)
10180 || !builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
10181 {
10182 ftype = build_function_type_list (ptr_type_node,
10183 ptr_type_node, const_ptr_type_node,
10184 size_type_node, NULL_TREE);
10185
10186 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY))
10187 local_define_builtin ("__builtin_memcpy", ftype, BUILT_IN_MEMCPY,
10188 "memcpy", ECF_NOTHROW | ECF_LEAF);
10189 if (!builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
10190 local_define_builtin ("__builtin_memmove", ftype, BUILT_IN_MEMMOVE,
10191 "memmove", ECF_NOTHROW | ECF_LEAF);
10192 }
10193
10194 if (!builtin_decl_explicit_p (BUILT_IN_MEMCMP))
10195 {
10196 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
10197 const_ptr_type_node, size_type_node,
10198 NULL_TREE);
10199 local_define_builtin ("__builtin_memcmp", ftype, BUILT_IN_MEMCMP,
10200 "memcmp", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10201 }
10202
10203 if (!builtin_decl_explicit_p (BUILT_IN_MEMSET))
10204 {
10205 ftype = build_function_type_list (ptr_type_node,
10206 ptr_type_node, integer_type_node,
10207 size_type_node, NULL_TREE);
10208 local_define_builtin ("__builtin_memset", ftype, BUILT_IN_MEMSET,
10209 "memset", ECF_NOTHROW | ECF_LEAF);
10210 }
10211
10212 if (!builtin_decl_explicit_p (BUILT_IN_ALLOCA))
10213 {
10214 ftype = build_function_type_list (ptr_type_node,
10215 size_type_node, NULL_TREE);
10216 local_define_builtin ("__builtin_alloca", ftype, BUILT_IN_ALLOCA,
10217 "alloca", ECF_MALLOC | ECF_NOTHROW | ECF_LEAF);
10218 }
10219
10220 ftype = build_function_type_list (ptr_type_node, size_type_node,
10221 size_type_node, NULL_TREE);
10222 local_define_builtin ("__builtin_alloca_with_align", ftype,
10223 BUILT_IN_ALLOCA_WITH_ALIGN,
10224 "__builtin_alloca_with_align",
10225 ECF_MALLOC | ECF_NOTHROW | ECF_LEAF);
10226
10227 /* If we're checking the stack, `alloca' can throw. */
10228 if (flag_stack_check)
10229 {
10230 TREE_NOTHROW (builtin_decl_explicit (BUILT_IN_ALLOCA)) = 0;
10231 TREE_NOTHROW (builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN)) = 0;
10232 }
10233
10234 ftype = build_function_type_list (void_type_node,
10235 ptr_type_node, ptr_type_node,
10236 ptr_type_node, NULL_TREE);
10237 local_define_builtin ("__builtin_init_trampoline", ftype,
10238 BUILT_IN_INIT_TRAMPOLINE,
10239 "__builtin_init_trampoline", ECF_NOTHROW | ECF_LEAF);
10240 local_define_builtin ("__builtin_init_heap_trampoline", ftype,
10241 BUILT_IN_INIT_HEAP_TRAMPOLINE,
10242 "__builtin_init_heap_trampoline",
10243 ECF_NOTHROW | ECF_LEAF);
10244
10245 ftype = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE);
10246 local_define_builtin ("__builtin_adjust_trampoline", ftype,
10247 BUILT_IN_ADJUST_TRAMPOLINE,
10248 "__builtin_adjust_trampoline",
10249 ECF_CONST | ECF_NOTHROW);
10250
10251 ftype = build_function_type_list (void_type_node,
10252 ptr_type_node, ptr_type_node, NULL_TREE);
10253 local_define_builtin ("__builtin_nonlocal_goto", ftype,
10254 BUILT_IN_NONLOCAL_GOTO,
10255 "__builtin_nonlocal_goto",
10256 ECF_NORETURN | ECF_NOTHROW);
10257
10258 ftype = build_function_type_list (void_type_node,
10259 ptr_type_node, ptr_type_node, NULL_TREE);
10260 local_define_builtin ("__builtin_setjmp_setup", ftype,
10261 BUILT_IN_SETJMP_SETUP,
10262 "__builtin_setjmp_setup", ECF_NOTHROW);
10263
10264 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10265 local_define_builtin ("__builtin_setjmp_receiver", ftype,
10266 BUILT_IN_SETJMP_RECEIVER,
10267 "__builtin_setjmp_receiver", ECF_NOTHROW | ECF_LEAF);
10268
10269 ftype = build_function_type_list (ptr_type_node, NULL_TREE);
10270 local_define_builtin ("__builtin_stack_save", ftype, BUILT_IN_STACK_SAVE,
10271 "__builtin_stack_save", ECF_NOTHROW | ECF_LEAF);
10272
10273 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10274 local_define_builtin ("__builtin_stack_restore", ftype,
10275 BUILT_IN_STACK_RESTORE,
10276 "__builtin_stack_restore", ECF_NOTHROW | ECF_LEAF);
10277
10278 /* If there's a possibility that we might use the ARM EABI, build the
10279 alternate __cxa_end_cleanup node used to resume from C++ and Java. */
10280 if (targetm.arm_eabi_unwinder)
10281 {
10282 ftype = build_function_type_list (void_type_node, NULL_TREE);
10283 local_define_builtin ("__builtin_cxa_end_cleanup", ftype,
10284 BUILT_IN_CXA_END_CLEANUP,
10285 "__cxa_end_cleanup", ECF_NORETURN | ECF_LEAF);
10286 }
10287
10288 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10289 local_define_builtin ("__builtin_unwind_resume", ftype,
10290 BUILT_IN_UNWIND_RESUME,
10291 ((targetm_common.except_unwind_info (&global_options)
10292 == UI_SJLJ)
10293 ? "_Unwind_SjLj_Resume" : "_Unwind_Resume"),
10294 ECF_NORETURN);
10295
10296 if (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS) == NULL_TREE)
10297 {
10298 ftype = build_function_type_list (ptr_type_node, integer_type_node,
10299 NULL_TREE);
10300 local_define_builtin ("__builtin_return_address", ftype,
10301 BUILT_IN_RETURN_ADDRESS,
10302 "__builtin_return_address",
10303 ECF_NOTHROW);
10304 }
10305
10306 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER)
10307 || !builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10308 {
10309 ftype = build_function_type_list (void_type_node, ptr_type_node,
10310 ptr_type_node, NULL_TREE);
10311 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER))
10312 local_define_builtin ("__cyg_profile_func_enter", ftype,
10313 BUILT_IN_PROFILE_FUNC_ENTER,
10314 "__cyg_profile_func_enter", 0);
10315 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10316 local_define_builtin ("__cyg_profile_func_exit", ftype,
10317 BUILT_IN_PROFILE_FUNC_EXIT,
10318 "__cyg_profile_func_exit", 0);
10319 }
10320
10321 /* The exception object and filter values from the runtime. The argument
10322 must be zero before exception lowering, i.e. from the front end. After
10323 exception lowering, it will be the region number for the exception
10324 landing pad. These functions are PURE instead of CONST to prevent
10325 them from being hoisted past the exception edge that will initialize
10326 its value in the landing pad. */
10327 ftype = build_function_type_list (ptr_type_node,
10328 integer_type_node, NULL_TREE);
10329 ecf_flags = ECF_PURE | ECF_NOTHROW | ECF_LEAF;
10330 /* Only use TM_PURE if we we have TM language support. */
10331 if (builtin_decl_explicit_p (BUILT_IN_TM_LOAD_1))
10332 ecf_flags |= ECF_TM_PURE;
10333 local_define_builtin ("__builtin_eh_pointer", ftype, BUILT_IN_EH_POINTER,
10334 "__builtin_eh_pointer", ecf_flags);
10335
10336 tmp = lang_hooks.types.type_for_mode (targetm.eh_return_filter_mode (), 0);
10337 ftype = build_function_type_list (tmp, integer_type_node, NULL_TREE);
10338 local_define_builtin ("__builtin_eh_filter", ftype, BUILT_IN_EH_FILTER,
10339 "__builtin_eh_filter", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10340
10341 ftype = build_function_type_list (void_type_node,
10342 integer_type_node, integer_type_node,
10343 NULL_TREE);
10344 local_define_builtin ("__builtin_eh_copy_values", ftype,
10345 BUILT_IN_EH_COPY_VALUES,
10346 "__builtin_eh_copy_values", ECF_NOTHROW);
10347
10348 /* Complex multiplication and division. These are handled as builtins
10349 rather than optabs because emit_library_call_value doesn't support
10350 complex. Further, we can do slightly better with folding these
10351 beasties if the real and complex parts of the arguments are separate. */
10352 {
10353 int mode;
10354
10355 for (mode = MIN_MODE_COMPLEX_FLOAT; mode <= MAX_MODE_COMPLEX_FLOAT; ++mode)
10356 {
10357 char mode_name_buf[4], *q;
10358 const char *p;
10359 enum built_in_function mcode, dcode;
10360 tree type, inner_type;
10361 const char *prefix = "__";
10362
10363 if (targetm.libfunc_gnu_prefix)
10364 prefix = "__gnu_";
10365
10366 type = lang_hooks.types.type_for_mode ((machine_mode) mode, 0);
10367 if (type == NULL)
10368 continue;
10369 inner_type = TREE_TYPE (type);
10370
10371 ftype = build_function_type_list (type, inner_type, inner_type,
10372 inner_type, inner_type, NULL_TREE);
10373
10374 mcode = ((enum built_in_function)
10375 (BUILT_IN_COMPLEX_MUL_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10376 dcode = ((enum built_in_function)
10377 (BUILT_IN_COMPLEX_DIV_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10378
10379 for (p = GET_MODE_NAME (mode), q = mode_name_buf; *p; p++, q++)
10380 *q = TOLOWER (*p);
10381 *q = '\0';
10382
10383 built_in_names[mcode] = concat (prefix, "mul", mode_name_buf, "3",
10384 NULL);
10385 local_define_builtin (built_in_names[mcode], ftype, mcode,
10386 built_in_names[mcode],
10387 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10388
10389 built_in_names[dcode] = concat (prefix, "div", mode_name_buf, "3",
10390 NULL);
10391 local_define_builtin (built_in_names[dcode], ftype, dcode,
10392 built_in_names[dcode],
10393 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10394 }
10395 }
10396
10397 init_internal_fns ();
10398 }
10399
10400 /* HACK. GROSS. This is absolutely disgusting. I wish there was a
10401 better way.
10402
10403 If we requested a pointer to a vector, build up the pointers that
10404 we stripped off while looking for the inner type. Similarly for
10405 return values from functions.
10406
10407 The argument TYPE is the top of the chain, and BOTTOM is the
10408 new type which we will point to. */
10409
10410 tree
10411 reconstruct_complex_type (tree type, tree bottom)
10412 {
10413 tree inner, outer;
10414
10415 if (TREE_CODE (type) == POINTER_TYPE)
10416 {
10417 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10418 outer = build_pointer_type_for_mode (inner, TYPE_MODE (type),
10419 TYPE_REF_CAN_ALIAS_ALL (type));
10420 }
10421 else if (TREE_CODE (type) == REFERENCE_TYPE)
10422 {
10423 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10424 outer = build_reference_type_for_mode (inner, TYPE_MODE (type),
10425 TYPE_REF_CAN_ALIAS_ALL (type));
10426 }
10427 else if (TREE_CODE (type) == ARRAY_TYPE)
10428 {
10429 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10430 outer = build_array_type (inner, TYPE_DOMAIN (type));
10431 }
10432 else if (TREE_CODE (type) == FUNCTION_TYPE)
10433 {
10434 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10435 outer = build_function_type (inner, TYPE_ARG_TYPES (type));
10436 }
10437 else if (TREE_CODE (type) == METHOD_TYPE)
10438 {
10439 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10440 /* The build_method_type_directly() routine prepends 'this' to argument list,
10441 so we must compensate by getting rid of it. */
10442 outer
10443 = build_method_type_directly
10444 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (type))),
10445 inner,
10446 TREE_CHAIN (TYPE_ARG_TYPES (type)));
10447 }
10448 else if (TREE_CODE (type) == OFFSET_TYPE)
10449 {
10450 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10451 outer = build_offset_type (TYPE_OFFSET_BASETYPE (type), inner);
10452 }
10453 else
10454 return bottom;
10455
10456 return build_type_attribute_qual_variant (outer, TYPE_ATTRIBUTES (type),
10457 TYPE_QUALS (type));
10458 }
10459
10460 /* Returns a vector tree node given a mode (integer, vector, or BLKmode) and
10461 the inner type. */
10462 tree
10463 build_vector_type_for_mode (tree innertype, machine_mode mode)
10464 {
10465 int nunits;
10466
10467 switch (GET_MODE_CLASS (mode))
10468 {
10469 case MODE_VECTOR_INT:
10470 case MODE_VECTOR_FLOAT:
10471 case MODE_VECTOR_FRACT:
10472 case MODE_VECTOR_UFRACT:
10473 case MODE_VECTOR_ACCUM:
10474 case MODE_VECTOR_UACCUM:
10475 nunits = GET_MODE_NUNITS (mode);
10476 break;
10477
10478 case MODE_INT:
10479 /* Check that there are no leftover bits. */
10480 gcc_assert (GET_MODE_BITSIZE (mode)
10481 % TREE_INT_CST_LOW (TYPE_SIZE (innertype)) == 0);
10482
10483 nunits = GET_MODE_BITSIZE (mode)
10484 / TREE_INT_CST_LOW (TYPE_SIZE (innertype));
10485 break;
10486
10487 default:
10488 gcc_unreachable ();
10489 }
10490
10491 return make_vector_type (innertype, nunits, mode);
10492 }
10493
10494 /* Similarly, but takes the inner type and number of units, which must be
10495 a power of two. */
10496
10497 tree
10498 build_vector_type (tree innertype, int nunits)
10499 {
10500 return make_vector_type (innertype, nunits, VOIDmode);
10501 }
10502
10503 /* Similarly, but builds a variant type with TYPE_VECTOR_OPAQUE set. */
10504
10505 tree
10506 build_opaque_vector_type (tree innertype, int nunits)
10507 {
10508 tree t = make_vector_type (innertype, nunits, VOIDmode);
10509 tree cand;
10510 /* We always build the non-opaque variant before the opaque one,
10511 so if it already exists, it is TYPE_NEXT_VARIANT of this one. */
10512 cand = TYPE_NEXT_VARIANT (t);
10513 if (cand
10514 && TYPE_VECTOR_OPAQUE (cand)
10515 && check_qualified_type (cand, t, TYPE_QUALS (t)))
10516 return cand;
10517 /* Othewise build a variant type and make sure to queue it after
10518 the non-opaque type. */
10519 cand = build_distinct_type_copy (t);
10520 TYPE_VECTOR_OPAQUE (cand) = true;
10521 TYPE_CANONICAL (cand) = TYPE_CANONICAL (t);
10522 TYPE_NEXT_VARIANT (cand) = TYPE_NEXT_VARIANT (t);
10523 TYPE_NEXT_VARIANT (t) = cand;
10524 TYPE_MAIN_VARIANT (cand) = TYPE_MAIN_VARIANT (t);
10525 return cand;
10526 }
10527
10528
10529 /* Given an initializer INIT, return TRUE if INIT is zero or some
10530 aggregate of zeros. Otherwise return FALSE. */
10531 bool
10532 initializer_zerop (const_tree init)
10533 {
10534 tree elt;
10535
10536 STRIP_NOPS (init);
10537
10538 switch (TREE_CODE (init))
10539 {
10540 case INTEGER_CST:
10541 return integer_zerop (init);
10542
10543 case REAL_CST:
10544 /* ??? Note that this is not correct for C4X float formats. There,
10545 a bit pattern of all zeros is 1.0; 0.0 is encoded with the most
10546 negative exponent. */
10547 return real_zerop (init)
10548 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (init));
10549
10550 case FIXED_CST:
10551 return fixed_zerop (init);
10552
10553 case COMPLEX_CST:
10554 return integer_zerop (init)
10555 || (real_zerop (init)
10556 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_REALPART (init)))
10557 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_IMAGPART (init))));
10558
10559 case VECTOR_CST:
10560 {
10561 unsigned i;
10562 for (i = 0; i < VECTOR_CST_NELTS (init); ++i)
10563 if (!initializer_zerop (VECTOR_CST_ELT (init, i)))
10564 return false;
10565 return true;
10566 }
10567
10568 case CONSTRUCTOR:
10569 {
10570 unsigned HOST_WIDE_INT idx;
10571
10572 if (TREE_CLOBBER_P (init))
10573 return false;
10574 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init), idx, elt)
10575 if (!initializer_zerop (elt))
10576 return false;
10577 return true;
10578 }
10579
10580 case STRING_CST:
10581 {
10582 int i;
10583
10584 /* We need to loop through all elements to handle cases like
10585 "\0" and "\0foobar". */
10586 for (i = 0; i < TREE_STRING_LENGTH (init); ++i)
10587 if (TREE_STRING_POINTER (init)[i] != '\0')
10588 return false;
10589
10590 return true;
10591 }
10592
10593 default:
10594 return false;
10595 }
10596 }
10597
10598 /* Check if vector VEC consists of all the equal elements and
10599 that the number of elements corresponds to the type of VEC.
10600 The function returns first element of the vector
10601 or NULL_TREE if the vector is not uniform. */
10602 tree
10603 uniform_vector_p (const_tree vec)
10604 {
10605 tree first, t;
10606 unsigned i;
10607
10608 if (vec == NULL_TREE)
10609 return NULL_TREE;
10610
10611 gcc_assert (VECTOR_TYPE_P (TREE_TYPE (vec)));
10612
10613 if (TREE_CODE (vec) == VECTOR_CST)
10614 {
10615 first = VECTOR_CST_ELT (vec, 0);
10616 for (i = 1; i < VECTOR_CST_NELTS (vec); ++i)
10617 if (!operand_equal_p (first, VECTOR_CST_ELT (vec, i), 0))
10618 return NULL_TREE;
10619
10620 return first;
10621 }
10622
10623 else if (TREE_CODE (vec) == CONSTRUCTOR)
10624 {
10625 first = error_mark_node;
10626
10627 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (vec), i, t)
10628 {
10629 if (i == 0)
10630 {
10631 first = t;
10632 continue;
10633 }
10634 if (!operand_equal_p (first, t, 0))
10635 return NULL_TREE;
10636 }
10637 if (i != TYPE_VECTOR_SUBPARTS (TREE_TYPE (vec)))
10638 return NULL_TREE;
10639
10640 return first;
10641 }
10642
10643 return NULL_TREE;
10644 }
10645
10646 /* Build an empty statement at location LOC. */
10647
10648 tree
10649 build_empty_stmt (location_t loc)
10650 {
10651 tree t = build1 (NOP_EXPR, void_type_node, size_zero_node);
10652 SET_EXPR_LOCATION (t, loc);
10653 return t;
10654 }
10655
10656
10657 /* Build an OpenMP clause with code CODE. LOC is the location of the
10658 clause. */
10659
10660 tree
10661 build_omp_clause (location_t loc, enum omp_clause_code code)
10662 {
10663 tree t;
10664 int size, length;
10665
10666 length = omp_clause_num_ops[code];
10667 size = (sizeof (struct tree_omp_clause) + (length - 1) * sizeof (tree));
10668
10669 record_node_allocation_statistics (OMP_CLAUSE, size);
10670
10671 t = (tree) ggc_internal_alloc (size);
10672 memset (t, 0, size);
10673 TREE_SET_CODE (t, OMP_CLAUSE);
10674 OMP_CLAUSE_SET_CODE (t, code);
10675 OMP_CLAUSE_LOCATION (t) = loc;
10676
10677 return t;
10678 }
10679
10680 /* Build a tcc_vl_exp object with code CODE and room for LEN operands. LEN
10681 includes the implicit operand count in TREE_OPERAND 0, and so must be >= 1.
10682 Except for the CODE and operand count field, other storage for the
10683 object is initialized to zeros. */
10684
10685 tree
10686 build_vl_exp_stat (enum tree_code code, int len MEM_STAT_DECL)
10687 {
10688 tree t;
10689 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_exp);
10690
10691 gcc_assert (TREE_CODE_CLASS (code) == tcc_vl_exp);
10692 gcc_assert (len >= 1);
10693
10694 record_node_allocation_statistics (code, length);
10695
10696 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
10697
10698 TREE_SET_CODE (t, code);
10699
10700 /* Can't use TREE_OPERAND to store the length because if checking is
10701 enabled, it will try to check the length before we store it. :-P */
10702 t->exp.operands[0] = build_int_cst (sizetype, len);
10703
10704 return t;
10705 }
10706
10707 /* Helper function for build_call_* functions; build a CALL_EXPR with
10708 indicated RETURN_TYPE, FN, and NARGS, but do not initialize any of
10709 the argument slots. */
10710
10711 static tree
10712 build_call_1 (tree return_type, tree fn, int nargs)
10713 {
10714 tree t;
10715
10716 t = build_vl_exp (CALL_EXPR, nargs + 3);
10717 TREE_TYPE (t) = return_type;
10718 CALL_EXPR_FN (t) = fn;
10719 CALL_EXPR_STATIC_CHAIN (t) = NULL;
10720
10721 return t;
10722 }
10723
10724 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10725 FN and a null static chain slot. NARGS is the number of call arguments
10726 which are specified as "..." arguments. */
10727
10728 tree
10729 build_call_nary (tree return_type, tree fn, int nargs, ...)
10730 {
10731 tree ret;
10732 va_list args;
10733 va_start (args, nargs);
10734 ret = build_call_valist (return_type, fn, nargs, args);
10735 va_end (args);
10736 return ret;
10737 }
10738
10739 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10740 FN and a null static chain slot. NARGS is the number of call arguments
10741 which are specified as a va_list ARGS. */
10742
10743 tree
10744 build_call_valist (tree return_type, tree fn, int nargs, va_list args)
10745 {
10746 tree t;
10747 int i;
10748
10749 t = build_call_1 (return_type, fn, nargs);
10750 for (i = 0; i < nargs; i++)
10751 CALL_EXPR_ARG (t, i) = va_arg (args, tree);
10752 process_call_operands (t);
10753 return t;
10754 }
10755
10756 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10757 FN and a null static chain slot. NARGS is the number of call arguments
10758 which are specified as a tree array ARGS. */
10759
10760 tree
10761 build_call_array_loc (location_t loc, tree return_type, tree fn,
10762 int nargs, const tree *args)
10763 {
10764 tree t;
10765 int i;
10766
10767 t = build_call_1 (return_type, fn, nargs);
10768 for (i = 0; i < nargs; i++)
10769 CALL_EXPR_ARG (t, i) = args[i];
10770 process_call_operands (t);
10771 SET_EXPR_LOCATION (t, loc);
10772 return t;
10773 }
10774
10775 /* Like build_call_array, but takes a vec. */
10776
10777 tree
10778 build_call_vec (tree return_type, tree fn, vec<tree, va_gc> *args)
10779 {
10780 tree ret, t;
10781 unsigned int ix;
10782
10783 ret = build_call_1 (return_type, fn, vec_safe_length (args));
10784 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
10785 CALL_EXPR_ARG (ret, ix) = t;
10786 process_call_operands (ret);
10787 return ret;
10788 }
10789
10790 /* Conveniently construct a function call expression. FNDECL names the
10791 function to be called and N arguments are passed in the array
10792 ARGARRAY. */
10793
10794 tree
10795 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
10796 {
10797 tree fntype = TREE_TYPE (fndecl);
10798 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10799
10800 return fold_build_call_array_loc (loc, TREE_TYPE (fntype), fn, n, argarray);
10801 }
10802
10803 /* Conveniently construct a function call expression. FNDECL names the
10804 function to be called and the arguments are passed in the vector
10805 VEC. */
10806
10807 tree
10808 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec)
10809 {
10810 return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec),
10811 vec_safe_address (vec));
10812 }
10813
10814
10815 /* Conveniently construct a function call expression. FNDECL names the
10816 function to be called, N is the number of arguments, and the "..."
10817 parameters are the argument expressions. */
10818
10819 tree
10820 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
10821 {
10822 va_list ap;
10823 tree *argarray = XALLOCAVEC (tree, n);
10824 int i;
10825
10826 va_start (ap, n);
10827 for (i = 0; i < n; i++)
10828 argarray[i] = va_arg (ap, tree);
10829 va_end (ap);
10830 return build_call_expr_loc_array (loc, fndecl, n, argarray);
10831 }
10832
10833 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
10834 varargs macros aren't supported by all bootstrap compilers. */
10835
10836 tree
10837 build_call_expr (tree fndecl, int n, ...)
10838 {
10839 va_list ap;
10840 tree *argarray = XALLOCAVEC (tree, n);
10841 int i;
10842
10843 va_start (ap, n);
10844 for (i = 0; i < n; i++)
10845 argarray[i] = va_arg (ap, tree);
10846 va_end (ap);
10847 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
10848 }
10849
10850 /* Build internal call expression. This is just like CALL_EXPR, except
10851 its CALL_EXPR_FN is NULL. It will get gimplified later into ordinary
10852 internal function. */
10853
10854 tree
10855 build_call_expr_internal_loc (location_t loc, enum internal_fn ifn,
10856 tree type, int n, ...)
10857 {
10858 va_list ap;
10859 int i;
10860
10861 tree fn = build_call_1 (type, NULL_TREE, n);
10862 va_start (ap, n);
10863 for (i = 0; i < n; i++)
10864 CALL_EXPR_ARG (fn, i) = va_arg (ap, tree);
10865 va_end (ap);
10866 SET_EXPR_LOCATION (fn, loc);
10867 CALL_EXPR_IFN (fn) = ifn;
10868 return fn;
10869 }
10870
10871 /* Create a new constant string literal and return a char* pointer to it.
10872 The STRING_CST value is the LEN characters at STR. */
10873 tree
10874 build_string_literal (int len, const char *str)
10875 {
10876 tree t, elem, index, type;
10877
10878 t = build_string (len, str);
10879 elem = build_type_variant (char_type_node, 1, 0);
10880 index = build_index_type (size_int (len - 1));
10881 type = build_array_type (elem, index);
10882 TREE_TYPE (t) = type;
10883 TREE_CONSTANT (t) = 1;
10884 TREE_READONLY (t) = 1;
10885 TREE_STATIC (t) = 1;
10886
10887 type = build_pointer_type (elem);
10888 t = build1 (ADDR_EXPR, type,
10889 build4 (ARRAY_REF, elem,
10890 t, integer_zero_node, NULL_TREE, NULL_TREE));
10891 return t;
10892 }
10893
10894
10895
10896 /* Return true if T (assumed to be a DECL) must be assigned a memory
10897 location. */
10898
10899 bool
10900 needs_to_live_in_memory (const_tree t)
10901 {
10902 return (TREE_ADDRESSABLE (t)
10903 || is_global_var (t)
10904 || (TREE_CODE (t) == RESULT_DECL
10905 && !DECL_BY_REFERENCE (t)
10906 && aggregate_value_p (t, current_function_decl)));
10907 }
10908
10909 /* Return value of a constant X and sign-extend it. */
10910
10911 HOST_WIDE_INT
10912 int_cst_value (const_tree x)
10913 {
10914 unsigned bits = TYPE_PRECISION (TREE_TYPE (x));
10915 unsigned HOST_WIDE_INT val = TREE_INT_CST_LOW (x);
10916
10917 /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
10918 gcc_assert (cst_and_fits_in_hwi (x));
10919
10920 if (bits < HOST_BITS_PER_WIDE_INT)
10921 {
10922 bool negative = ((val >> (bits - 1)) & 1) != 0;
10923 if (negative)
10924 val |= (~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1;
10925 else
10926 val &= ~((~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1);
10927 }
10928
10929 return val;
10930 }
10931
10932 /* If TYPE is an integral or pointer type, return an integer type with
10933 the same precision which is unsigned iff UNSIGNEDP is true, or itself
10934 if TYPE is already an integer type of signedness UNSIGNEDP. */
10935
10936 tree
10937 signed_or_unsigned_type_for (int unsignedp, tree type)
10938 {
10939 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type) == unsignedp)
10940 return type;
10941
10942 if (TREE_CODE (type) == VECTOR_TYPE)
10943 {
10944 tree inner = TREE_TYPE (type);
10945 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
10946 if (!inner2)
10947 return NULL_TREE;
10948 if (inner == inner2)
10949 return type;
10950 return build_vector_type (inner2, TYPE_VECTOR_SUBPARTS (type));
10951 }
10952
10953 if (!INTEGRAL_TYPE_P (type)
10954 && !POINTER_TYPE_P (type)
10955 && TREE_CODE (type) != OFFSET_TYPE)
10956 return NULL_TREE;
10957
10958 return build_nonstandard_integer_type (TYPE_PRECISION (type), unsignedp);
10959 }
10960
10961 /* If TYPE is an integral or pointer type, return an integer type with
10962 the same precision which is unsigned, or itself if TYPE is already an
10963 unsigned integer type. */
10964
10965 tree
10966 unsigned_type_for (tree type)
10967 {
10968 return signed_or_unsigned_type_for (1, type);
10969 }
10970
10971 /* If TYPE is an integral or pointer type, return an integer type with
10972 the same precision which is signed, or itself if TYPE is already a
10973 signed integer type. */
10974
10975 tree
10976 signed_type_for (tree type)
10977 {
10978 return signed_or_unsigned_type_for (0, type);
10979 }
10980
10981 /* If TYPE is a vector type, return a signed integer vector type with the
10982 same width and number of subparts. Otherwise return boolean_type_node. */
10983
10984 tree
10985 truth_type_for (tree type)
10986 {
10987 if (TREE_CODE (type) == VECTOR_TYPE)
10988 {
10989 tree elem = lang_hooks.types.type_for_size
10990 (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))), 0);
10991 return build_opaque_vector_type (elem, TYPE_VECTOR_SUBPARTS (type));
10992 }
10993 else
10994 return boolean_type_node;
10995 }
10996
10997 /* Returns the largest value obtainable by casting something in INNER type to
10998 OUTER type. */
10999
11000 tree
11001 upper_bound_in_type (tree outer, tree inner)
11002 {
11003 unsigned int det = 0;
11004 unsigned oprec = TYPE_PRECISION (outer);
11005 unsigned iprec = TYPE_PRECISION (inner);
11006 unsigned prec;
11007
11008 /* Compute a unique number for every combination. */
11009 det |= (oprec > iprec) ? 4 : 0;
11010 det |= TYPE_UNSIGNED (outer) ? 2 : 0;
11011 det |= TYPE_UNSIGNED (inner) ? 1 : 0;
11012
11013 /* Determine the exponent to use. */
11014 switch (det)
11015 {
11016 case 0:
11017 case 1:
11018 /* oprec <= iprec, outer: signed, inner: don't care. */
11019 prec = oprec - 1;
11020 break;
11021 case 2:
11022 case 3:
11023 /* oprec <= iprec, outer: unsigned, inner: don't care. */
11024 prec = oprec;
11025 break;
11026 case 4:
11027 /* oprec > iprec, outer: signed, inner: signed. */
11028 prec = iprec - 1;
11029 break;
11030 case 5:
11031 /* oprec > iprec, outer: signed, inner: unsigned. */
11032 prec = iprec;
11033 break;
11034 case 6:
11035 /* oprec > iprec, outer: unsigned, inner: signed. */
11036 prec = oprec;
11037 break;
11038 case 7:
11039 /* oprec > iprec, outer: unsigned, inner: unsigned. */
11040 prec = iprec;
11041 break;
11042 default:
11043 gcc_unreachable ();
11044 }
11045
11046 return wide_int_to_tree (outer,
11047 wi::mask (prec, false, TYPE_PRECISION (outer)));
11048 }
11049
11050 /* Returns the smallest value obtainable by casting something in INNER type to
11051 OUTER type. */
11052
11053 tree
11054 lower_bound_in_type (tree outer, tree inner)
11055 {
11056 unsigned oprec = TYPE_PRECISION (outer);
11057 unsigned iprec = TYPE_PRECISION (inner);
11058
11059 /* If OUTER type is unsigned, we can definitely cast 0 to OUTER type
11060 and obtain 0. */
11061 if (TYPE_UNSIGNED (outer)
11062 /* If we are widening something of an unsigned type, OUTER type
11063 contains all values of INNER type. In particular, both INNER
11064 and OUTER types have zero in common. */
11065 || (oprec > iprec && TYPE_UNSIGNED (inner)))
11066 return build_int_cst (outer, 0);
11067 else
11068 {
11069 /* If we are widening a signed type to another signed type, we
11070 want to obtain -2^^(iprec-1). If we are keeping the
11071 precision or narrowing to a signed type, we want to obtain
11072 -2^(oprec-1). */
11073 unsigned prec = oprec > iprec ? iprec : oprec;
11074 return wide_int_to_tree (outer,
11075 wi::mask (prec - 1, true,
11076 TYPE_PRECISION (outer)));
11077 }
11078 }
11079
11080 /* Return nonzero if two operands that are suitable for PHI nodes are
11081 necessarily equal. Specifically, both ARG0 and ARG1 must be either
11082 SSA_NAME or invariant. Note that this is strictly an optimization.
11083 That is, callers of this function can directly call operand_equal_p
11084 and get the same result, only slower. */
11085
11086 int
11087 operand_equal_for_phi_arg_p (const_tree arg0, const_tree arg1)
11088 {
11089 if (arg0 == arg1)
11090 return 1;
11091 if (TREE_CODE (arg0) == SSA_NAME || TREE_CODE (arg1) == SSA_NAME)
11092 return 0;
11093 return operand_equal_p (arg0, arg1, 0);
11094 }
11095
11096 /* Returns number of zeros at the end of binary representation of X. */
11097
11098 tree
11099 num_ending_zeros (const_tree x)
11100 {
11101 return build_int_cst (TREE_TYPE (x), wi::ctz (x));
11102 }
11103
11104
11105 #define WALK_SUBTREE(NODE) \
11106 do \
11107 { \
11108 result = walk_tree_1 (&(NODE), func, data, pset, lh); \
11109 if (result) \
11110 return result; \
11111 } \
11112 while (0)
11113
11114 /* This is a subroutine of walk_tree that walks field of TYPE that are to
11115 be walked whenever a type is seen in the tree. Rest of operands and return
11116 value are as for walk_tree. */
11117
11118 static tree
11119 walk_type_fields (tree type, walk_tree_fn func, void *data,
11120 hash_set<tree> *pset, walk_tree_lh lh)
11121 {
11122 tree result = NULL_TREE;
11123
11124 switch (TREE_CODE (type))
11125 {
11126 case POINTER_TYPE:
11127 case REFERENCE_TYPE:
11128 case VECTOR_TYPE:
11129 /* We have to worry about mutually recursive pointers. These can't
11130 be written in C. They can in Ada. It's pathological, but
11131 there's an ACATS test (c38102a) that checks it. Deal with this
11132 by checking if we're pointing to another pointer, that one
11133 points to another pointer, that one does too, and we have no htab.
11134 If so, get a hash table. We check three levels deep to avoid
11135 the cost of the hash table if we don't need one. */
11136 if (POINTER_TYPE_P (TREE_TYPE (type))
11137 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type)))
11138 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type))))
11139 && !pset)
11140 {
11141 result = walk_tree_without_duplicates (&TREE_TYPE (type),
11142 func, data);
11143 if (result)
11144 return result;
11145
11146 break;
11147 }
11148
11149 /* ... fall through ... */
11150
11151 case COMPLEX_TYPE:
11152 WALK_SUBTREE (TREE_TYPE (type));
11153 break;
11154
11155 case METHOD_TYPE:
11156 WALK_SUBTREE (TYPE_METHOD_BASETYPE (type));
11157
11158 /* Fall through. */
11159
11160 case FUNCTION_TYPE:
11161 WALK_SUBTREE (TREE_TYPE (type));
11162 {
11163 tree arg;
11164
11165 /* We never want to walk into default arguments. */
11166 for (arg = TYPE_ARG_TYPES (type); arg; arg = TREE_CHAIN (arg))
11167 WALK_SUBTREE (TREE_VALUE (arg));
11168 }
11169 break;
11170
11171 case ARRAY_TYPE:
11172 /* Don't follow this nodes's type if a pointer for fear that
11173 we'll have infinite recursion. If we have a PSET, then we
11174 need not fear. */
11175 if (pset
11176 || (!POINTER_TYPE_P (TREE_TYPE (type))
11177 && TREE_CODE (TREE_TYPE (type)) != OFFSET_TYPE))
11178 WALK_SUBTREE (TREE_TYPE (type));
11179 WALK_SUBTREE (TYPE_DOMAIN (type));
11180 break;
11181
11182 case OFFSET_TYPE:
11183 WALK_SUBTREE (TREE_TYPE (type));
11184 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type));
11185 break;
11186
11187 default:
11188 break;
11189 }
11190
11191 return NULL_TREE;
11192 }
11193
11194 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is
11195 called with the DATA and the address of each sub-tree. If FUNC returns a
11196 non-NULL value, the traversal is stopped, and the value returned by FUNC
11197 is returned. If PSET is non-NULL it is used to record the nodes visited,
11198 and to avoid visiting a node more than once. */
11199
11200 tree
11201 walk_tree_1 (tree *tp, walk_tree_fn func, void *data,
11202 hash_set<tree> *pset, walk_tree_lh lh)
11203 {
11204 enum tree_code code;
11205 int walk_subtrees;
11206 tree result;
11207
11208 #define WALK_SUBTREE_TAIL(NODE) \
11209 do \
11210 { \
11211 tp = & (NODE); \
11212 goto tail_recurse; \
11213 } \
11214 while (0)
11215
11216 tail_recurse:
11217 /* Skip empty subtrees. */
11218 if (!*tp)
11219 return NULL_TREE;
11220
11221 /* Don't walk the same tree twice, if the user has requested
11222 that we avoid doing so. */
11223 if (pset && pset->add (*tp))
11224 return NULL_TREE;
11225
11226 /* Call the function. */
11227 walk_subtrees = 1;
11228 result = (*func) (tp, &walk_subtrees, data);
11229
11230 /* If we found something, return it. */
11231 if (result)
11232 return result;
11233
11234 code = TREE_CODE (*tp);
11235
11236 /* Even if we didn't, FUNC may have decided that there was nothing
11237 interesting below this point in the tree. */
11238 if (!walk_subtrees)
11239 {
11240 /* But we still need to check our siblings. */
11241 if (code == TREE_LIST)
11242 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11243 else if (code == OMP_CLAUSE)
11244 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11245 else
11246 return NULL_TREE;
11247 }
11248
11249 if (lh)
11250 {
11251 result = (*lh) (tp, &walk_subtrees, func, data, pset);
11252 if (result || !walk_subtrees)
11253 return result;
11254 }
11255
11256 switch (code)
11257 {
11258 case ERROR_MARK:
11259 case IDENTIFIER_NODE:
11260 case INTEGER_CST:
11261 case REAL_CST:
11262 case FIXED_CST:
11263 case VECTOR_CST:
11264 case STRING_CST:
11265 case BLOCK:
11266 case PLACEHOLDER_EXPR:
11267 case SSA_NAME:
11268 case FIELD_DECL:
11269 case RESULT_DECL:
11270 /* None of these have subtrees other than those already walked
11271 above. */
11272 break;
11273
11274 case TREE_LIST:
11275 WALK_SUBTREE (TREE_VALUE (*tp));
11276 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11277 break;
11278
11279 case TREE_VEC:
11280 {
11281 int len = TREE_VEC_LENGTH (*tp);
11282
11283 if (len == 0)
11284 break;
11285
11286 /* Walk all elements but the first. */
11287 while (--len)
11288 WALK_SUBTREE (TREE_VEC_ELT (*tp, len));
11289
11290 /* Now walk the first one as a tail call. */
11291 WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp, 0));
11292 }
11293
11294 case COMPLEX_CST:
11295 WALK_SUBTREE (TREE_REALPART (*tp));
11296 WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp));
11297
11298 case CONSTRUCTOR:
11299 {
11300 unsigned HOST_WIDE_INT idx;
11301 constructor_elt *ce;
11302
11303 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (*tp), idx, &ce);
11304 idx++)
11305 WALK_SUBTREE (ce->value);
11306 }
11307 break;
11308
11309 case SAVE_EXPR:
11310 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 0));
11311
11312 case BIND_EXPR:
11313 {
11314 tree decl;
11315 for (decl = BIND_EXPR_VARS (*tp); decl; decl = DECL_CHAIN (decl))
11316 {
11317 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
11318 into declarations that are just mentioned, rather than
11319 declared; they don't really belong to this part of the tree.
11320 And, we can see cycles: the initializer for a declaration
11321 can refer to the declaration itself. */
11322 WALK_SUBTREE (DECL_INITIAL (decl));
11323 WALK_SUBTREE (DECL_SIZE (decl));
11324 WALK_SUBTREE (DECL_SIZE_UNIT (decl));
11325 }
11326 WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp));
11327 }
11328
11329 case STATEMENT_LIST:
11330 {
11331 tree_stmt_iterator i;
11332 for (i = tsi_start (*tp); !tsi_end_p (i); tsi_next (&i))
11333 WALK_SUBTREE (*tsi_stmt_ptr (i));
11334 }
11335 break;
11336
11337 case OMP_CLAUSE:
11338 switch (OMP_CLAUSE_CODE (*tp))
11339 {
11340 case OMP_CLAUSE_GANG:
11341 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
11342 /* FALLTHRU */
11343
11344 case OMP_CLAUSE_DEVICE_RESIDENT:
11345 case OMP_CLAUSE_USE_DEVICE:
11346 case OMP_CLAUSE_ASYNC:
11347 case OMP_CLAUSE_WAIT:
11348 case OMP_CLAUSE_WORKER:
11349 case OMP_CLAUSE_VECTOR:
11350 case OMP_CLAUSE_NUM_GANGS:
11351 case OMP_CLAUSE_NUM_WORKERS:
11352 case OMP_CLAUSE_VECTOR_LENGTH:
11353 case OMP_CLAUSE_PRIVATE:
11354 case OMP_CLAUSE_SHARED:
11355 case OMP_CLAUSE_FIRSTPRIVATE:
11356 case OMP_CLAUSE_COPYIN:
11357 case OMP_CLAUSE_COPYPRIVATE:
11358 case OMP_CLAUSE_FINAL:
11359 case OMP_CLAUSE_IF:
11360 case OMP_CLAUSE_NUM_THREADS:
11361 case OMP_CLAUSE_SCHEDULE:
11362 case OMP_CLAUSE_UNIFORM:
11363 case OMP_CLAUSE_DEPEND:
11364 case OMP_CLAUSE_NUM_TEAMS:
11365 case OMP_CLAUSE_THREAD_LIMIT:
11366 case OMP_CLAUSE_DEVICE:
11367 case OMP_CLAUSE_DIST_SCHEDULE:
11368 case OMP_CLAUSE_SAFELEN:
11369 case OMP_CLAUSE_SIMDLEN:
11370 case OMP_CLAUSE__LOOPTEMP_:
11371 case OMP_CLAUSE__SIMDUID_:
11372 case OMP_CLAUSE__CILK_FOR_COUNT_:
11373 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 0));
11374 /* FALLTHRU */
11375
11376 case OMP_CLAUSE_INDEPENDENT:
11377 case OMP_CLAUSE_NOWAIT:
11378 case OMP_CLAUSE_ORDERED:
11379 case OMP_CLAUSE_DEFAULT:
11380 case OMP_CLAUSE_UNTIED:
11381 case OMP_CLAUSE_MERGEABLE:
11382 case OMP_CLAUSE_PROC_BIND:
11383 case OMP_CLAUSE_INBRANCH:
11384 case OMP_CLAUSE_NOTINBRANCH:
11385 case OMP_CLAUSE_FOR:
11386 case OMP_CLAUSE_PARALLEL:
11387 case OMP_CLAUSE_SECTIONS:
11388 case OMP_CLAUSE_TASKGROUP:
11389 case OMP_CLAUSE_AUTO:
11390 case OMP_CLAUSE_SEQ:
11391 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11392
11393 case OMP_CLAUSE_LASTPRIVATE:
11394 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11395 WALK_SUBTREE (OMP_CLAUSE_LASTPRIVATE_STMT (*tp));
11396 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11397
11398 case OMP_CLAUSE_COLLAPSE:
11399 {
11400 int i;
11401 for (i = 0; i < 3; i++)
11402 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11403 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11404 }
11405
11406 case OMP_CLAUSE_LINEAR:
11407 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11408 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STEP (*tp));
11409 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STMT (*tp));
11410 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11411
11412 case OMP_CLAUSE_ALIGNED:
11413 case OMP_CLAUSE_FROM:
11414 case OMP_CLAUSE_TO:
11415 case OMP_CLAUSE_MAP:
11416 case OMP_CLAUSE__CACHE_:
11417 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11418 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
11419 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11420
11421 case OMP_CLAUSE_REDUCTION:
11422 {
11423 int i;
11424 for (i = 0; i < 4; i++)
11425 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11426 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11427 }
11428
11429 default:
11430 gcc_unreachable ();
11431 }
11432 break;
11433
11434 case TARGET_EXPR:
11435 {
11436 int i, len;
11437
11438 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
11439 But, we only want to walk once. */
11440 len = (TREE_OPERAND (*tp, 3) == TREE_OPERAND (*tp, 1)) ? 2 : 3;
11441 for (i = 0; i < len; ++i)
11442 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11443 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len));
11444 }
11445
11446 case DECL_EXPR:
11447 /* If this is a TYPE_DECL, walk into the fields of the type that it's
11448 defining. We only want to walk into these fields of a type in this
11449 case and not in the general case of a mere reference to the type.
11450
11451 The criterion is as follows: if the field can be an expression, it
11452 must be walked only here. This should be in keeping with the fields
11453 that are directly gimplified in gimplify_type_sizes in order for the
11454 mark/copy-if-shared/unmark machinery of the gimplifier to work with
11455 variable-sized types.
11456
11457 Note that DECLs get walked as part of processing the BIND_EXPR. */
11458 if (TREE_CODE (DECL_EXPR_DECL (*tp)) == TYPE_DECL)
11459 {
11460 tree *type_p = &TREE_TYPE (DECL_EXPR_DECL (*tp));
11461 if (TREE_CODE (*type_p) == ERROR_MARK)
11462 return NULL_TREE;
11463
11464 /* Call the function for the type. See if it returns anything or
11465 doesn't want us to continue. If we are to continue, walk both
11466 the normal fields and those for the declaration case. */
11467 result = (*func) (type_p, &walk_subtrees, data);
11468 if (result || !walk_subtrees)
11469 return result;
11470
11471 /* But do not walk a pointed-to type since it may itself need to
11472 be walked in the declaration case if it isn't anonymous. */
11473 if (!POINTER_TYPE_P (*type_p))
11474 {
11475 result = walk_type_fields (*type_p, func, data, pset, lh);
11476 if (result)
11477 return result;
11478 }
11479
11480 /* If this is a record type, also walk the fields. */
11481 if (RECORD_OR_UNION_TYPE_P (*type_p))
11482 {
11483 tree field;
11484
11485 for (field = TYPE_FIELDS (*type_p); field;
11486 field = DECL_CHAIN (field))
11487 {
11488 /* We'd like to look at the type of the field, but we can
11489 easily get infinite recursion. So assume it's pointed
11490 to elsewhere in the tree. Also, ignore things that
11491 aren't fields. */
11492 if (TREE_CODE (field) != FIELD_DECL)
11493 continue;
11494
11495 WALK_SUBTREE (DECL_FIELD_OFFSET (field));
11496 WALK_SUBTREE (DECL_SIZE (field));
11497 WALK_SUBTREE (DECL_SIZE_UNIT (field));
11498 if (TREE_CODE (*type_p) == QUAL_UNION_TYPE)
11499 WALK_SUBTREE (DECL_QUALIFIER (field));
11500 }
11501 }
11502
11503 /* Same for scalar types. */
11504 else if (TREE_CODE (*type_p) == BOOLEAN_TYPE
11505 || TREE_CODE (*type_p) == ENUMERAL_TYPE
11506 || TREE_CODE (*type_p) == INTEGER_TYPE
11507 || TREE_CODE (*type_p) == FIXED_POINT_TYPE
11508 || TREE_CODE (*type_p) == REAL_TYPE)
11509 {
11510 WALK_SUBTREE (TYPE_MIN_VALUE (*type_p));
11511 WALK_SUBTREE (TYPE_MAX_VALUE (*type_p));
11512 }
11513
11514 WALK_SUBTREE (TYPE_SIZE (*type_p));
11515 WALK_SUBTREE_TAIL (TYPE_SIZE_UNIT (*type_p));
11516 }
11517 /* FALLTHRU */
11518
11519 default:
11520 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
11521 {
11522 int i, len;
11523
11524 /* Walk over all the sub-trees of this operand. */
11525 len = TREE_OPERAND_LENGTH (*tp);
11526
11527 /* Go through the subtrees. We need to do this in forward order so
11528 that the scope of a FOR_EXPR is handled properly. */
11529 if (len)
11530 {
11531 for (i = 0; i < len - 1; ++i)
11532 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11533 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len - 1));
11534 }
11535 }
11536 /* If this is a type, walk the needed fields in the type. */
11537 else if (TYPE_P (*tp))
11538 return walk_type_fields (*tp, func, data, pset, lh);
11539 break;
11540 }
11541
11542 /* We didn't find what we were looking for. */
11543 return NULL_TREE;
11544
11545 #undef WALK_SUBTREE_TAIL
11546 }
11547 #undef WALK_SUBTREE
11548
11549 /* Like walk_tree, but does not walk duplicate nodes more than once. */
11550
11551 tree
11552 walk_tree_without_duplicates_1 (tree *tp, walk_tree_fn func, void *data,
11553 walk_tree_lh lh)
11554 {
11555 tree result;
11556
11557 hash_set<tree> pset;
11558 result = walk_tree_1 (tp, func, data, &pset, lh);
11559 return result;
11560 }
11561
11562
11563 tree
11564 tree_block (tree t)
11565 {
11566 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11567
11568 if (IS_EXPR_CODE_CLASS (c))
11569 return LOCATION_BLOCK (t->exp.locus);
11570 gcc_unreachable ();
11571 return NULL;
11572 }
11573
11574 void
11575 tree_set_block (tree t, tree b)
11576 {
11577 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11578
11579 if (IS_EXPR_CODE_CLASS (c))
11580 {
11581 if (b)
11582 t->exp.locus = COMBINE_LOCATION_DATA (line_table, t->exp.locus, b);
11583 else
11584 t->exp.locus = LOCATION_LOCUS (t->exp.locus);
11585 }
11586 else
11587 gcc_unreachable ();
11588 }
11589
11590 /* Create a nameless artificial label and put it in the current
11591 function context. The label has a location of LOC. Returns the
11592 newly created label. */
11593
11594 tree
11595 create_artificial_label (location_t loc)
11596 {
11597 tree lab = build_decl (loc,
11598 LABEL_DECL, NULL_TREE, void_type_node);
11599
11600 DECL_ARTIFICIAL (lab) = 1;
11601 DECL_IGNORED_P (lab) = 1;
11602 DECL_CONTEXT (lab) = current_function_decl;
11603 return lab;
11604 }
11605
11606 /* Given a tree, try to return a useful variable name that we can use
11607 to prefix a temporary that is being assigned the value of the tree.
11608 I.E. given <temp> = &A, return A. */
11609
11610 const char *
11611 get_name (tree t)
11612 {
11613 tree stripped_decl;
11614
11615 stripped_decl = t;
11616 STRIP_NOPS (stripped_decl);
11617 if (DECL_P (stripped_decl) && DECL_NAME (stripped_decl))
11618 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl));
11619 else if (TREE_CODE (stripped_decl) == SSA_NAME)
11620 {
11621 tree name = SSA_NAME_IDENTIFIER (stripped_decl);
11622 if (!name)
11623 return NULL;
11624 return IDENTIFIER_POINTER (name);
11625 }
11626 else
11627 {
11628 switch (TREE_CODE (stripped_decl))
11629 {
11630 case ADDR_EXPR:
11631 return get_name (TREE_OPERAND (stripped_decl, 0));
11632 default:
11633 return NULL;
11634 }
11635 }
11636 }
11637
11638 /* Return true if TYPE has a variable argument list. */
11639
11640 bool
11641 stdarg_p (const_tree fntype)
11642 {
11643 function_args_iterator args_iter;
11644 tree n = NULL_TREE, t;
11645
11646 if (!fntype)
11647 return false;
11648
11649 FOREACH_FUNCTION_ARGS (fntype, t, args_iter)
11650 {
11651 n = t;
11652 }
11653
11654 return n != NULL_TREE && n != void_type_node;
11655 }
11656
11657 /* Return true if TYPE has a prototype. */
11658
11659 bool
11660 prototype_p (const_tree fntype)
11661 {
11662 tree t;
11663
11664 gcc_assert (fntype != NULL_TREE);
11665
11666 t = TYPE_ARG_TYPES (fntype);
11667 return (t != NULL_TREE);
11668 }
11669
11670 /* If BLOCK is inlined from an __attribute__((__artificial__))
11671 routine, return pointer to location from where it has been
11672 called. */
11673 location_t *
11674 block_nonartificial_location (tree block)
11675 {
11676 location_t *ret = NULL;
11677
11678 while (block && TREE_CODE (block) == BLOCK
11679 && BLOCK_ABSTRACT_ORIGIN (block))
11680 {
11681 tree ao = BLOCK_ABSTRACT_ORIGIN (block);
11682
11683 while (TREE_CODE (ao) == BLOCK
11684 && BLOCK_ABSTRACT_ORIGIN (ao)
11685 && BLOCK_ABSTRACT_ORIGIN (ao) != ao)
11686 ao = BLOCK_ABSTRACT_ORIGIN (ao);
11687
11688 if (TREE_CODE (ao) == FUNCTION_DECL)
11689 {
11690 /* If AO is an artificial inline, point RET to the
11691 call site locus at which it has been inlined and continue
11692 the loop, in case AO's caller is also an artificial
11693 inline. */
11694 if (DECL_DECLARED_INLINE_P (ao)
11695 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao)))
11696 ret = &BLOCK_SOURCE_LOCATION (block);
11697 else
11698 break;
11699 }
11700 else if (TREE_CODE (ao) != BLOCK)
11701 break;
11702
11703 block = BLOCK_SUPERCONTEXT (block);
11704 }
11705 return ret;
11706 }
11707
11708
11709 /* If EXP is inlined from an __attribute__((__artificial__))
11710 function, return the location of the original call expression. */
11711
11712 location_t
11713 tree_nonartificial_location (tree exp)
11714 {
11715 location_t *loc = block_nonartificial_location (TREE_BLOCK (exp));
11716
11717 if (loc)
11718 return *loc;
11719 else
11720 return EXPR_LOCATION (exp);
11721 }
11722
11723
11724 /* These are the hash table functions for the hash table of OPTIMIZATION_NODEq
11725 nodes. */
11726
11727 /* Return the hash code code X, an OPTIMIZATION_NODE or TARGET_OPTION code. */
11728
11729 hashval_t
11730 cl_option_hasher::hash (tree x)
11731 {
11732 const_tree const t = x;
11733 const char *p;
11734 size_t i;
11735 size_t len = 0;
11736 hashval_t hash = 0;
11737
11738 if (TREE_CODE (t) == OPTIMIZATION_NODE)
11739 {
11740 p = (const char *)TREE_OPTIMIZATION (t);
11741 len = sizeof (struct cl_optimization);
11742 }
11743
11744 else if (TREE_CODE (t) == TARGET_OPTION_NODE)
11745 return cl_target_option_hash (TREE_TARGET_OPTION (t));
11746
11747 else
11748 gcc_unreachable ();
11749
11750 /* assume most opt flags are just 0/1, some are 2-3, and a few might be
11751 something else. */
11752 for (i = 0; i < len; i++)
11753 if (p[i])
11754 hash = (hash << 4) ^ ((i << 2) | p[i]);
11755
11756 return hash;
11757 }
11758
11759 /* Return nonzero if the value represented by *X (an OPTIMIZATION or
11760 TARGET_OPTION tree node) is the same as that given by *Y, which is the
11761 same. */
11762
11763 bool
11764 cl_option_hasher::equal (tree x, tree y)
11765 {
11766 const_tree const xt = x;
11767 const_tree const yt = y;
11768 const char *xp;
11769 const char *yp;
11770 size_t len;
11771
11772 if (TREE_CODE (xt) != TREE_CODE (yt))
11773 return 0;
11774
11775 if (TREE_CODE (xt) == OPTIMIZATION_NODE)
11776 {
11777 xp = (const char *)TREE_OPTIMIZATION (xt);
11778 yp = (const char *)TREE_OPTIMIZATION (yt);
11779 len = sizeof (struct cl_optimization);
11780 }
11781
11782 else if (TREE_CODE (xt) == TARGET_OPTION_NODE)
11783 {
11784 return cl_target_option_eq (TREE_TARGET_OPTION (xt),
11785 TREE_TARGET_OPTION (yt));
11786 }
11787
11788 else
11789 gcc_unreachable ();
11790
11791 return (memcmp (xp, yp, len) == 0);
11792 }
11793
11794 /* Build an OPTIMIZATION_NODE based on the options in OPTS. */
11795
11796 tree
11797 build_optimization_node (struct gcc_options *opts)
11798 {
11799 tree t;
11800
11801 /* Use the cache of optimization nodes. */
11802
11803 cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node),
11804 opts);
11805
11806 tree *slot = cl_option_hash_table->find_slot (cl_optimization_node, INSERT);
11807 t = *slot;
11808 if (!t)
11809 {
11810 /* Insert this one into the hash table. */
11811 t = cl_optimization_node;
11812 *slot = t;
11813
11814 /* Make a new node for next time round. */
11815 cl_optimization_node = make_node (OPTIMIZATION_NODE);
11816 }
11817
11818 return t;
11819 }
11820
11821 /* Build a TARGET_OPTION_NODE based on the options in OPTS. */
11822
11823 tree
11824 build_target_option_node (struct gcc_options *opts)
11825 {
11826 tree t;
11827
11828 /* Use the cache of optimization nodes. */
11829
11830 cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node),
11831 opts);
11832
11833 tree *slot = cl_option_hash_table->find_slot (cl_target_option_node, INSERT);
11834 t = *slot;
11835 if (!t)
11836 {
11837 /* Insert this one into the hash table. */
11838 t = cl_target_option_node;
11839 *slot = t;
11840
11841 /* Make a new node for next time round. */
11842 cl_target_option_node = make_node (TARGET_OPTION_NODE);
11843 }
11844
11845 return t;
11846 }
11847
11848 /* Clear TREE_TARGET_GLOBALS of all TARGET_OPTION_NODE trees,
11849 so that they aren't saved during PCH writing. */
11850
11851 void
11852 prepare_target_option_nodes_for_pch (void)
11853 {
11854 hash_table<cl_option_hasher>::iterator iter = cl_option_hash_table->begin ();
11855 for (; iter != cl_option_hash_table->end (); ++iter)
11856 if (TREE_CODE (*iter) == TARGET_OPTION_NODE)
11857 TREE_TARGET_GLOBALS (*iter) = NULL;
11858 }
11859
11860 /* Determine the "ultimate origin" of a block. The block may be an inlined
11861 instance of an inlined instance of a block which is local to an inline
11862 function, so we have to trace all of the way back through the origin chain
11863 to find out what sort of node actually served as the original seed for the
11864 given block. */
11865
11866 tree
11867 block_ultimate_origin (const_tree block)
11868 {
11869 tree immediate_origin = BLOCK_ABSTRACT_ORIGIN (block);
11870
11871 /* BLOCK_ABSTRACT_ORIGIN can point to itself; ignore that if
11872 we're trying to output the abstract instance of this function. */
11873 if (BLOCK_ABSTRACT (block) && immediate_origin == block)
11874 return NULL_TREE;
11875
11876 if (immediate_origin == NULL_TREE)
11877 return NULL_TREE;
11878 else
11879 {
11880 tree ret_val;
11881 tree lookahead = immediate_origin;
11882
11883 do
11884 {
11885 ret_val = lookahead;
11886 lookahead = (TREE_CODE (ret_val) == BLOCK
11887 ? BLOCK_ABSTRACT_ORIGIN (ret_val) : NULL);
11888 }
11889 while (lookahead != NULL && lookahead != ret_val);
11890
11891 /* The block's abstract origin chain may not be the *ultimate* origin of
11892 the block. It could lead to a DECL that has an abstract origin set.
11893 If so, we want that DECL's abstract origin (which is what DECL_ORIGIN
11894 will give us if it has one). Note that DECL's abstract origins are
11895 supposed to be the most distant ancestor (or so decl_ultimate_origin
11896 claims), so we don't need to loop following the DECL origins. */
11897 if (DECL_P (ret_val))
11898 return DECL_ORIGIN (ret_val);
11899
11900 return ret_val;
11901 }
11902 }
11903
11904 /* Return true iff conversion from INNER_TYPE to OUTER_TYPE generates
11905 no instruction. */
11906
11907 bool
11908 tree_nop_conversion_p (const_tree outer_type, const_tree inner_type)
11909 {
11910 /* Use precision rather then machine mode when we can, which gives
11911 the correct answer even for submode (bit-field) types. */
11912 if ((INTEGRAL_TYPE_P (outer_type)
11913 || POINTER_TYPE_P (outer_type)
11914 || TREE_CODE (outer_type) == OFFSET_TYPE)
11915 && (INTEGRAL_TYPE_P (inner_type)
11916 || POINTER_TYPE_P (inner_type)
11917 || TREE_CODE (inner_type) == OFFSET_TYPE))
11918 return TYPE_PRECISION (outer_type) == TYPE_PRECISION (inner_type);
11919
11920 /* Otherwise fall back on comparing machine modes (e.g. for
11921 aggregate types, floats). */
11922 return TYPE_MODE (outer_type) == TYPE_MODE (inner_type);
11923 }
11924
11925 /* Return true iff conversion in EXP generates no instruction. Mark
11926 it inline so that we fully inline into the stripping functions even
11927 though we have two uses of this function. */
11928
11929 static inline bool
11930 tree_nop_conversion (const_tree exp)
11931 {
11932 tree outer_type, inner_type;
11933
11934 if (!CONVERT_EXPR_P (exp)
11935 && TREE_CODE (exp) != NON_LVALUE_EXPR)
11936 return false;
11937 if (TREE_OPERAND (exp, 0) == error_mark_node)
11938 return false;
11939
11940 outer_type = TREE_TYPE (exp);
11941 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
11942
11943 if (!inner_type)
11944 return false;
11945
11946 return tree_nop_conversion_p (outer_type, inner_type);
11947 }
11948
11949 /* Return true iff conversion in EXP generates no instruction. Don't
11950 consider conversions changing the signedness. */
11951
11952 static bool
11953 tree_sign_nop_conversion (const_tree exp)
11954 {
11955 tree outer_type, inner_type;
11956
11957 if (!tree_nop_conversion (exp))
11958 return false;
11959
11960 outer_type = TREE_TYPE (exp);
11961 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
11962
11963 return (TYPE_UNSIGNED (outer_type) == TYPE_UNSIGNED (inner_type)
11964 && POINTER_TYPE_P (outer_type) == POINTER_TYPE_P (inner_type));
11965 }
11966
11967 /* Strip conversions from EXP according to tree_nop_conversion and
11968 return the resulting expression. */
11969
11970 tree
11971 tree_strip_nop_conversions (tree exp)
11972 {
11973 while (tree_nop_conversion (exp))
11974 exp = TREE_OPERAND (exp, 0);
11975 return exp;
11976 }
11977
11978 /* Strip conversions from EXP according to tree_sign_nop_conversion
11979 and return the resulting expression. */
11980
11981 tree
11982 tree_strip_sign_nop_conversions (tree exp)
11983 {
11984 while (tree_sign_nop_conversion (exp))
11985 exp = TREE_OPERAND (exp, 0);
11986 return exp;
11987 }
11988
11989 /* Avoid any floating point extensions from EXP. */
11990 tree
11991 strip_float_extensions (tree exp)
11992 {
11993 tree sub, expt, subt;
11994
11995 /* For floating point constant look up the narrowest type that can hold
11996 it properly and handle it like (type)(narrowest_type)constant.
11997 This way we can optimize for instance a=a*2.0 where "a" is float
11998 but 2.0 is double constant. */
11999 if (TREE_CODE (exp) == REAL_CST && !DECIMAL_FLOAT_TYPE_P (TREE_TYPE (exp)))
12000 {
12001 REAL_VALUE_TYPE orig;
12002 tree type = NULL;
12003
12004 orig = TREE_REAL_CST (exp);
12005 if (TYPE_PRECISION (TREE_TYPE (exp)) > TYPE_PRECISION (float_type_node)
12006 && exact_real_truncate (TYPE_MODE (float_type_node), &orig))
12007 type = float_type_node;
12008 else if (TYPE_PRECISION (TREE_TYPE (exp))
12009 > TYPE_PRECISION (double_type_node)
12010 && exact_real_truncate (TYPE_MODE (double_type_node), &orig))
12011 type = double_type_node;
12012 if (type)
12013 return build_real (type, real_value_truncate (TYPE_MODE (type), orig));
12014 }
12015
12016 if (!CONVERT_EXPR_P (exp))
12017 return exp;
12018
12019 sub = TREE_OPERAND (exp, 0);
12020 subt = TREE_TYPE (sub);
12021 expt = TREE_TYPE (exp);
12022
12023 if (!FLOAT_TYPE_P (subt))
12024 return exp;
12025
12026 if (DECIMAL_FLOAT_TYPE_P (expt) != DECIMAL_FLOAT_TYPE_P (subt))
12027 return exp;
12028
12029 if (TYPE_PRECISION (subt) > TYPE_PRECISION (expt))
12030 return exp;
12031
12032 return strip_float_extensions (sub);
12033 }
12034
12035 /* Strip out all handled components that produce invariant
12036 offsets. */
12037
12038 const_tree
12039 strip_invariant_refs (const_tree op)
12040 {
12041 while (handled_component_p (op))
12042 {
12043 switch (TREE_CODE (op))
12044 {
12045 case ARRAY_REF:
12046 case ARRAY_RANGE_REF:
12047 if (!is_gimple_constant (TREE_OPERAND (op, 1))
12048 || TREE_OPERAND (op, 2) != NULL_TREE
12049 || TREE_OPERAND (op, 3) != NULL_TREE)
12050 return NULL;
12051 break;
12052
12053 case COMPONENT_REF:
12054 if (TREE_OPERAND (op, 2) != NULL_TREE)
12055 return NULL;
12056 break;
12057
12058 default:;
12059 }
12060 op = TREE_OPERAND (op, 0);
12061 }
12062
12063 return op;
12064 }
12065
12066 static GTY(()) tree gcc_eh_personality_decl;
12067
12068 /* Return the GCC personality function decl. */
12069
12070 tree
12071 lhd_gcc_personality (void)
12072 {
12073 if (!gcc_eh_personality_decl)
12074 gcc_eh_personality_decl = build_personality_function ("gcc");
12075 return gcc_eh_personality_decl;
12076 }
12077
12078 /* TARGET is a call target of GIMPLE call statement
12079 (obtained by gimple_call_fn). Return true if it is
12080 OBJ_TYPE_REF representing an virtual call of C++ method.
12081 (As opposed to OBJ_TYPE_REF representing objc calls
12082 through a cast where middle-end devirtualization machinery
12083 can't apply.) */
12084
12085 bool
12086 virtual_method_call_p (const_tree target)
12087 {
12088 if (TREE_CODE (target) != OBJ_TYPE_REF)
12089 return false;
12090 tree t = TREE_TYPE (target);
12091 gcc_checking_assert (TREE_CODE (t) == POINTER_TYPE);
12092 t = TREE_TYPE (t);
12093 if (TREE_CODE (t) == FUNCTION_TYPE)
12094 return false;
12095 gcc_checking_assert (TREE_CODE (t) == METHOD_TYPE);
12096 /* If we do not have BINFO associated, it means that type was built
12097 without devirtualization enabled. Do not consider this a virtual
12098 call. */
12099 if (!TYPE_BINFO (obj_type_ref_class (target)))
12100 return false;
12101 return true;
12102 }
12103
12104 /* REF is OBJ_TYPE_REF, return the class the ref corresponds to. */
12105
12106 tree
12107 obj_type_ref_class (const_tree ref)
12108 {
12109 gcc_checking_assert (TREE_CODE (ref) == OBJ_TYPE_REF);
12110 ref = TREE_TYPE (ref);
12111 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
12112 ref = TREE_TYPE (ref);
12113 /* We look for type THIS points to. ObjC also builds
12114 OBJ_TYPE_REF with non-method calls, Their first parameter
12115 ID however also corresponds to class type. */
12116 gcc_checking_assert (TREE_CODE (ref) == METHOD_TYPE
12117 || TREE_CODE (ref) == FUNCTION_TYPE);
12118 ref = TREE_VALUE (TYPE_ARG_TYPES (ref));
12119 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
12120 return TREE_TYPE (ref);
12121 }
12122
12123 /* Lookup sub-BINFO of BINFO of TYPE at offset POS. */
12124
12125 static tree
12126 lookup_binfo_at_offset (tree binfo, tree type, HOST_WIDE_INT pos)
12127 {
12128 unsigned int i;
12129 tree base_binfo, b;
12130
12131 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
12132 if (pos == tree_to_shwi (BINFO_OFFSET (base_binfo))
12133 && types_same_for_odr (TREE_TYPE (base_binfo), type))
12134 return base_binfo;
12135 else if ((b = lookup_binfo_at_offset (base_binfo, type, pos)) != NULL)
12136 return b;
12137 return NULL;
12138 }
12139
12140 /* Try to find a base info of BINFO that would have its field decl at offset
12141 OFFSET within the BINFO type and which is of EXPECTED_TYPE. If it can be
12142 found, return, otherwise return NULL_TREE. */
12143
12144 tree
12145 get_binfo_at_offset (tree binfo, HOST_WIDE_INT offset, tree expected_type)
12146 {
12147 tree type = BINFO_TYPE (binfo);
12148
12149 while (true)
12150 {
12151 HOST_WIDE_INT pos, size;
12152 tree fld;
12153 int i;
12154
12155 if (types_same_for_odr (type, expected_type))
12156 return binfo;
12157 if (offset < 0)
12158 return NULL_TREE;
12159
12160 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
12161 {
12162 if (TREE_CODE (fld) != FIELD_DECL || !DECL_ARTIFICIAL (fld))
12163 continue;
12164
12165 pos = int_bit_position (fld);
12166 size = tree_to_uhwi (DECL_SIZE (fld));
12167 if (pos <= offset && (pos + size) > offset)
12168 break;
12169 }
12170 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
12171 return NULL_TREE;
12172
12173 /* Offset 0 indicates the primary base, whose vtable contents are
12174 represented in the binfo for the derived class. */
12175 else if (offset != 0)
12176 {
12177 tree found_binfo = NULL, base_binfo;
12178 /* Offsets in BINFO are in bytes relative to the whole structure
12179 while POS is in bits relative to the containing field. */
12180 int binfo_offset = (tree_to_shwi (BINFO_OFFSET (binfo)) + pos
12181 / BITS_PER_UNIT);
12182
12183 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
12184 if (tree_to_shwi (BINFO_OFFSET (base_binfo)) == binfo_offset
12185 && types_same_for_odr (TREE_TYPE (base_binfo), TREE_TYPE (fld)))
12186 {
12187 found_binfo = base_binfo;
12188 break;
12189 }
12190 if (found_binfo)
12191 binfo = found_binfo;
12192 else
12193 binfo = lookup_binfo_at_offset (binfo, TREE_TYPE (fld),
12194 binfo_offset);
12195 }
12196
12197 type = TREE_TYPE (fld);
12198 offset -= pos;
12199 }
12200 }
12201
12202 /* Returns true if X is a typedef decl. */
12203
12204 bool
12205 is_typedef_decl (const_tree x)
12206 {
12207 return (x && TREE_CODE (x) == TYPE_DECL
12208 && DECL_ORIGINAL_TYPE (x) != NULL_TREE);
12209 }
12210
12211 /* Returns true iff TYPE is a type variant created for a typedef. */
12212
12213 bool
12214 typedef_variant_p (const_tree type)
12215 {
12216 return is_typedef_decl (TYPE_NAME (type));
12217 }
12218
12219 /* Warn about a use of an identifier which was marked deprecated. */
12220 void
12221 warn_deprecated_use (tree node, tree attr)
12222 {
12223 const char *msg;
12224
12225 if (node == 0 || !warn_deprecated_decl)
12226 return;
12227
12228 if (!attr)
12229 {
12230 if (DECL_P (node))
12231 attr = DECL_ATTRIBUTES (node);
12232 else if (TYPE_P (node))
12233 {
12234 tree decl = TYPE_STUB_DECL (node);
12235 if (decl)
12236 attr = lookup_attribute ("deprecated",
12237 TYPE_ATTRIBUTES (TREE_TYPE (decl)));
12238 }
12239 }
12240
12241 if (attr)
12242 attr = lookup_attribute ("deprecated", attr);
12243
12244 if (attr)
12245 msg = TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr)));
12246 else
12247 msg = NULL;
12248
12249 bool w;
12250 if (DECL_P (node))
12251 {
12252 if (msg)
12253 w = warning (OPT_Wdeprecated_declarations,
12254 "%qD is deprecated: %s", node, msg);
12255 else
12256 w = warning (OPT_Wdeprecated_declarations,
12257 "%qD is deprecated", node);
12258 if (w)
12259 inform (DECL_SOURCE_LOCATION (node), "declared here");
12260 }
12261 else if (TYPE_P (node))
12262 {
12263 tree what = NULL_TREE;
12264 tree decl = TYPE_STUB_DECL (node);
12265
12266 if (TYPE_NAME (node))
12267 {
12268 if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE)
12269 what = TYPE_NAME (node);
12270 else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL
12271 && DECL_NAME (TYPE_NAME (node)))
12272 what = DECL_NAME (TYPE_NAME (node));
12273 }
12274
12275 if (decl)
12276 {
12277 if (what)
12278 {
12279 if (msg)
12280 w = warning (OPT_Wdeprecated_declarations,
12281 "%qE is deprecated: %s", what, msg);
12282 else
12283 w = warning (OPT_Wdeprecated_declarations,
12284 "%qE is deprecated", what);
12285 }
12286 else
12287 {
12288 if (msg)
12289 w = warning (OPT_Wdeprecated_declarations,
12290 "type is deprecated: %s", msg);
12291 else
12292 w = warning (OPT_Wdeprecated_declarations,
12293 "type is deprecated");
12294 }
12295 if (w)
12296 inform (DECL_SOURCE_LOCATION (decl), "declared here");
12297 }
12298 else
12299 {
12300 if (what)
12301 {
12302 if (msg)
12303 warning (OPT_Wdeprecated_declarations, "%qE is deprecated: %s",
12304 what, msg);
12305 else
12306 warning (OPT_Wdeprecated_declarations, "%qE is deprecated", what);
12307 }
12308 else
12309 {
12310 if (msg)
12311 warning (OPT_Wdeprecated_declarations, "type is deprecated: %s",
12312 msg);
12313 else
12314 warning (OPT_Wdeprecated_declarations, "type is deprecated");
12315 }
12316 }
12317 }
12318 }
12319
12320 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
12321 somewhere in it. */
12322
12323 bool
12324 contains_bitfld_component_ref_p (const_tree ref)
12325 {
12326 while (handled_component_p (ref))
12327 {
12328 if (TREE_CODE (ref) == COMPONENT_REF
12329 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1)))
12330 return true;
12331 ref = TREE_OPERAND (ref, 0);
12332 }
12333
12334 return false;
12335 }
12336
12337 /* Try to determine whether a TRY_CATCH expression can fall through.
12338 This is a subroutine of block_may_fallthru. */
12339
12340 static bool
12341 try_catch_may_fallthru (const_tree stmt)
12342 {
12343 tree_stmt_iterator i;
12344
12345 /* If the TRY block can fall through, the whole TRY_CATCH can
12346 fall through. */
12347 if (block_may_fallthru (TREE_OPERAND (stmt, 0)))
12348 return true;
12349
12350 i = tsi_start (TREE_OPERAND (stmt, 1));
12351 switch (TREE_CODE (tsi_stmt (i)))
12352 {
12353 case CATCH_EXPR:
12354 /* We expect to see a sequence of CATCH_EXPR trees, each with a
12355 catch expression and a body. The whole TRY_CATCH may fall
12356 through iff any of the catch bodies falls through. */
12357 for (; !tsi_end_p (i); tsi_next (&i))
12358 {
12359 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i))))
12360 return true;
12361 }
12362 return false;
12363
12364 case EH_FILTER_EXPR:
12365 /* The exception filter expression only matters if there is an
12366 exception. If the exception does not match EH_FILTER_TYPES,
12367 we will execute EH_FILTER_FAILURE, and we will fall through
12368 if that falls through. If the exception does match
12369 EH_FILTER_TYPES, the stack unwinder will continue up the
12370 stack, so we will not fall through. We don't know whether we
12371 will throw an exception which matches EH_FILTER_TYPES or not,
12372 so we just ignore EH_FILTER_TYPES and assume that we might
12373 throw an exception which doesn't match. */
12374 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i)));
12375
12376 default:
12377 /* This case represents statements to be executed when an
12378 exception occurs. Those statements are implicitly followed
12379 by a RESX statement to resume execution after the exception.
12380 So in this case the TRY_CATCH never falls through. */
12381 return false;
12382 }
12383 }
12384
12385 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
12386 need not be 100% accurate; simply be conservative and return true if we
12387 don't know. This is used only to avoid stupidly generating extra code.
12388 If we're wrong, we'll just delete the extra code later. */
12389
12390 bool
12391 block_may_fallthru (const_tree block)
12392 {
12393 /* This CONST_CAST is okay because expr_last returns its argument
12394 unmodified and we assign it to a const_tree. */
12395 const_tree stmt = expr_last (CONST_CAST_TREE (block));
12396
12397 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
12398 {
12399 case GOTO_EXPR:
12400 case RETURN_EXPR:
12401 /* Easy cases. If the last statement of the block implies
12402 control transfer, then we can't fall through. */
12403 return false;
12404
12405 case SWITCH_EXPR:
12406 /* If SWITCH_LABELS is set, this is lowered, and represents a
12407 branch to a selected label and hence can not fall through.
12408 Otherwise SWITCH_BODY is set, and the switch can fall
12409 through. */
12410 return SWITCH_LABELS (stmt) == NULL_TREE;
12411
12412 case COND_EXPR:
12413 if (block_may_fallthru (COND_EXPR_THEN (stmt)))
12414 return true;
12415 return block_may_fallthru (COND_EXPR_ELSE (stmt));
12416
12417 case BIND_EXPR:
12418 return block_may_fallthru (BIND_EXPR_BODY (stmt));
12419
12420 case TRY_CATCH_EXPR:
12421 return try_catch_may_fallthru (stmt);
12422
12423 case TRY_FINALLY_EXPR:
12424 /* The finally clause is always executed after the try clause,
12425 so if it does not fall through, then the try-finally will not
12426 fall through. Otherwise, if the try clause does not fall
12427 through, then when the finally clause falls through it will
12428 resume execution wherever the try clause was going. So the
12429 whole try-finally will only fall through if both the try
12430 clause and the finally clause fall through. */
12431 return (block_may_fallthru (TREE_OPERAND (stmt, 0))
12432 && block_may_fallthru (TREE_OPERAND (stmt, 1)));
12433
12434 case MODIFY_EXPR:
12435 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
12436 stmt = TREE_OPERAND (stmt, 1);
12437 else
12438 return true;
12439 /* FALLTHRU */
12440
12441 case CALL_EXPR:
12442 /* Functions that do not return do not fall through. */
12443 return (call_expr_flags (stmt) & ECF_NORETURN) == 0;
12444
12445 case CLEANUP_POINT_EXPR:
12446 return block_may_fallthru (TREE_OPERAND (stmt, 0));
12447
12448 case TARGET_EXPR:
12449 return block_may_fallthru (TREE_OPERAND (stmt, 1));
12450
12451 case ERROR_MARK:
12452 return true;
12453
12454 default:
12455 return lang_hooks.block_may_fallthru (stmt);
12456 }
12457 }
12458
12459 /* True if we are using EH to handle cleanups. */
12460 static bool using_eh_for_cleanups_flag = false;
12461
12462 /* This routine is called from front ends to indicate eh should be used for
12463 cleanups. */
12464 void
12465 using_eh_for_cleanups (void)
12466 {
12467 using_eh_for_cleanups_flag = true;
12468 }
12469
12470 /* Query whether EH is used for cleanups. */
12471 bool
12472 using_eh_for_cleanups_p (void)
12473 {
12474 return using_eh_for_cleanups_flag;
12475 }
12476
12477 /* Wrapper for tree_code_name to ensure that tree code is valid */
12478 const char *
12479 get_tree_code_name (enum tree_code code)
12480 {
12481 const char *invalid = "<invalid tree code>";
12482
12483 if (code >= MAX_TREE_CODES)
12484 return invalid;
12485
12486 return tree_code_name[code];
12487 }
12488
12489 /* Drops the TREE_OVERFLOW flag from T. */
12490
12491 tree
12492 drop_tree_overflow (tree t)
12493 {
12494 gcc_checking_assert (TREE_OVERFLOW (t));
12495
12496 /* For tree codes with a sharing machinery re-build the result. */
12497 if (TREE_CODE (t) == INTEGER_CST)
12498 return wide_int_to_tree (TREE_TYPE (t), t);
12499
12500 /* Otherwise, as all tcc_constants are possibly shared, copy the node
12501 and drop the flag. */
12502 t = copy_node (t);
12503 TREE_OVERFLOW (t) = 0;
12504 return t;
12505 }
12506
12507 /* Given a memory reference expression T, return its base address.
12508 The base address of a memory reference expression is the main
12509 object being referenced. For instance, the base address for
12510 'array[i].fld[j]' is 'array'. You can think of this as stripping
12511 away the offset part from a memory address.
12512
12513 This function calls handled_component_p to strip away all the inner
12514 parts of the memory reference until it reaches the base object. */
12515
12516 tree
12517 get_base_address (tree t)
12518 {
12519 while (handled_component_p (t))
12520 t = TREE_OPERAND (t, 0);
12521
12522 if ((TREE_CODE (t) == MEM_REF
12523 || TREE_CODE (t) == TARGET_MEM_REF)
12524 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
12525 t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
12526
12527 /* ??? Either the alias oracle or all callers need to properly deal
12528 with WITH_SIZE_EXPRs before we can look through those. */
12529 if (TREE_CODE (t) == WITH_SIZE_EXPR)
12530 return NULL_TREE;
12531
12532 return t;
12533 }
12534
12535 /* Return a tree of sizetype representing the size, in bytes, of the element
12536 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12537
12538 tree
12539 array_ref_element_size (tree exp)
12540 {
12541 tree aligned_size = TREE_OPERAND (exp, 3);
12542 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
12543 location_t loc = EXPR_LOCATION (exp);
12544
12545 /* If a size was specified in the ARRAY_REF, it's the size measured
12546 in alignment units of the element type. So multiply by that value. */
12547 if (aligned_size)
12548 {
12549 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
12550 sizetype from another type of the same width and signedness. */
12551 if (TREE_TYPE (aligned_size) != sizetype)
12552 aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
12553 return size_binop_loc (loc, MULT_EXPR, aligned_size,
12554 size_int (TYPE_ALIGN_UNIT (elmt_type)));
12555 }
12556
12557 /* Otherwise, take the size from that of the element type. Substitute
12558 any PLACEHOLDER_EXPR that we have. */
12559 else
12560 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
12561 }
12562
12563 /* Return a tree representing the lower bound of the array mentioned in
12564 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12565
12566 tree
12567 array_ref_low_bound (tree exp)
12568 {
12569 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
12570
12571 /* If a lower bound is specified in EXP, use it. */
12572 if (TREE_OPERAND (exp, 2))
12573 return TREE_OPERAND (exp, 2);
12574
12575 /* Otherwise, if there is a domain type and it has a lower bound, use it,
12576 substituting for a PLACEHOLDER_EXPR as needed. */
12577 if (domain_type && TYPE_MIN_VALUE (domain_type))
12578 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
12579
12580 /* Otherwise, return a zero of the appropriate type. */
12581 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
12582 }
12583
12584 /* Return a tree representing the upper bound of the array mentioned in
12585 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12586
12587 tree
12588 array_ref_up_bound (tree exp)
12589 {
12590 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
12591
12592 /* If there is a domain type and it has an upper bound, use it, substituting
12593 for a PLACEHOLDER_EXPR as needed. */
12594 if (domain_type && TYPE_MAX_VALUE (domain_type))
12595 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
12596
12597 /* Otherwise fail. */
12598 return NULL_TREE;
12599 }
12600
12601 /* Returns true if REF is an array reference to an array at the end of
12602 a structure. If this is the case, the array may be allocated larger
12603 than its upper bound implies. */
12604
12605 bool
12606 array_at_struct_end_p (tree ref)
12607 {
12608 if (TREE_CODE (ref) != ARRAY_REF
12609 && TREE_CODE (ref) != ARRAY_RANGE_REF)
12610 return false;
12611
12612 while (handled_component_p (ref))
12613 {
12614 /* If the reference chain contains a component reference to a
12615 non-union type and there follows another field the reference
12616 is not at the end of a structure. */
12617 if (TREE_CODE (ref) == COMPONENT_REF
12618 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 0))) == RECORD_TYPE)
12619 {
12620 tree nextf = DECL_CHAIN (TREE_OPERAND (ref, 1));
12621 while (nextf && TREE_CODE (nextf) != FIELD_DECL)
12622 nextf = DECL_CHAIN (nextf);
12623 if (nextf)
12624 return false;
12625 }
12626
12627 ref = TREE_OPERAND (ref, 0);
12628 }
12629
12630 /* If the reference is based on a declared entity, the size of the array
12631 is constrained by its given domain. */
12632 if (DECL_P (ref))
12633 return false;
12634
12635 return true;
12636 }
12637
12638 /* Return a tree representing the offset, in bytes, of the field referenced
12639 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
12640
12641 tree
12642 component_ref_field_offset (tree exp)
12643 {
12644 tree aligned_offset = TREE_OPERAND (exp, 2);
12645 tree field = TREE_OPERAND (exp, 1);
12646 location_t loc = EXPR_LOCATION (exp);
12647
12648 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
12649 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
12650 value. */
12651 if (aligned_offset)
12652 {
12653 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
12654 sizetype from another type of the same width and signedness. */
12655 if (TREE_TYPE (aligned_offset) != sizetype)
12656 aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
12657 return size_binop_loc (loc, MULT_EXPR, aligned_offset,
12658 size_int (DECL_OFFSET_ALIGN (field)
12659 / BITS_PER_UNIT));
12660 }
12661
12662 /* Otherwise, take the offset from that of the field. Substitute
12663 any PLACEHOLDER_EXPR that we have. */
12664 else
12665 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
12666 }
12667
12668 /* Return the machine mode of T. For vectors, returns the mode of the
12669 inner type. The main use case is to feed the result to HONOR_NANS,
12670 avoiding the BLKmode that a direct TYPE_MODE (T) might return. */
12671
12672 machine_mode
12673 element_mode (const_tree t)
12674 {
12675 if (!TYPE_P (t))
12676 t = TREE_TYPE (t);
12677 if (VECTOR_TYPE_P (t) || TREE_CODE (t) == COMPLEX_TYPE)
12678 t = TREE_TYPE (t);
12679 return TYPE_MODE (t);
12680 }
12681
12682
12683 /* Veirfy that basic properties of T match TV and thus T can be a variant of
12684 TV. TV should be the more specified variant (i.e. the main variant). */
12685
12686 static bool
12687 verify_type_variant (const_tree t, tree tv)
12688 {
12689 /* Type variant can differ by:
12690
12691 - TYPE_QUALS: TYPE_READONLY, TYPE_VOLATILE, TYPE_ATOMIC, TYPE_RESTRICT,
12692 ENCODE_QUAL_ADDR_SPACE.
12693 - main variant may be TYPE_COMPLETE_P and variant types !TYPE_COMPLETE_P
12694 in this case some values may not be set in the variant types
12695 (see TYPE_COMPLETE_P checks).
12696 - it is possible to have TYPE_ARTIFICIAL variant of non-artifical type
12697 - by TYPE_NAME and attributes (i.e. when variant originate by typedef)
12698 - TYPE_CANONICAL (TYPE_ALIAS_SET is the same among variants)
12699 - by the alignment: TYPE_ALIGN and TYPE_USER_ALIGN
12700 - during LTO by TYPE_CONTEXT if type is TYPE_FILE_SCOPE_P
12701 this is necessary to make it possible to merge types form different TUs
12702 - arrays, pointers and references may have TREE_TYPE that is a variant
12703 of TREE_TYPE of their main variants.
12704 - aggregates may have new TYPE_FIELDS list that list variants of
12705 the main variant TYPE_FIELDS.
12706 - vector types may differ by TYPE_VECTOR_OPAQUE
12707 - TYPE_METHODS is always NULL for vairant types and maintained for
12708 main variant only.
12709 */
12710
12711 /* Convenience macro for matching individual fields. */
12712 #define verify_variant_match(flag) \
12713 do { \
12714 if (flag (tv) != flag (t)) \
12715 { \
12716 error ("type variant differs by " #flag "."); \
12717 debug_tree (tv); \
12718 return false; \
12719 } \
12720 } while (false)
12721
12722 /* tree_base checks. */
12723
12724 verify_variant_match (TREE_CODE);
12725 /* FIXME: Ada builds non-artificial variants of artificial types. */
12726 if (TYPE_ARTIFICIAL (tv) && 0)
12727 verify_variant_match (TYPE_ARTIFICIAL);
12728 if (POINTER_TYPE_P (tv))
12729 verify_variant_match (TYPE_REF_CAN_ALIAS_ALL);
12730 /* FIXME: TYPE_SIZES_GIMPLIFIED may differs for Ada build. */
12731 verify_variant_match (TYPE_UNSIGNED);
12732 verify_variant_match (TYPE_ALIGN_OK);
12733 verify_variant_match (TYPE_PACKED);
12734 if (TREE_CODE (t) == REFERENCE_TYPE)
12735 verify_variant_match (TYPE_REF_IS_RVALUE);
12736 verify_variant_match (TYPE_SATURATING);
12737 /* FIXME: This check trigger during libstdc++ build. */
12738 if (RECORD_OR_UNION_TYPE_P (t) && COMPLETE_TYPE_P (t) && 0)
12739 verify_variant_match (TYPE_FINAL_P);
12740
12741 /* tree_type_common checks. */
12742
12743 if (COMPLETE_TYPE_P (t))
12744 {
12745 verify_variant_match (TYPE_SIZE);
12746 verify_variant_match (TYPE_MODE);
12747 if (TYPE_SIZE_UNIT (t) != TYPE_SIZE_UNIT (tv)
12748 /* FIXME: ideally we should compare pointer equality, but java FE
12749 produce variants where size is INTEGER_CST of different type (int
12750 wrt size_type) during libjava biuld. */
12751 && !operand_equal_p (TYPE_SIZE_UNIT (t), TYPE_SIZE_UNIT (tv), 0))
12752 {
12753 error ("type variant has different TYPE_SIZE_UNIT");
12754 debug_tree (tv);
12755 error ("type variant's TYPE_SIZE_UNIT");
12756 debug_tree (TYPE_SIZE_UNIT (tv));
12757 error ("type's TYPE_SIZE_UNIT");
12758 debug_tree (TYPE_SIZE_UNIT (t));
12759 return false;
12760 }
12761 }
12762 verify_variant_match (TYPE_PRECISION);
12763 verify_variant_match (TYPE_NEEDS_CONSTRUCTING);
12764 if (RECORD_OR_UNION_TYPE_P (t))
12765 verify_variant_match (TYPE_TRANSPARENT_AGGR);
12766 else if (TREE_CODE (t) == ARRAY_TYPE)
12767 verify_variant_match (TYPE_NONALIASED_COMPONENT);
12768 /* During LTO we merge variant lists from diferent translation units
12769 that may differ BY TYPE_CONTEXT that in turn may point
12770 to TRANSLATION_UNIT_DECL.
12771 Ada also builds variants of types with different TYPE_CONTEXT. */
12772 if ((!in_lto_p || !TYPE_FILE_SCOPE_P (t)) && 0)
12773 verify_variant_match (TYPE_CONTEXT);
12774 verify_variant_match (TYPE_STRING_FLAG);
12775 if (TYPE_ALIAS_SET_KNOWN_P (t) && TYPE_ALIAS_SET_KNOWN_P (tv))
12776 verify_variant_match (TYPE_ALIAS_SET);
12777
12778 /* tree_type_non_common checks. */
12779
12780 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
12781 and dangle the pointer from time to time. */
12782 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_VFIELD (t) != TYPE_VFIELD (tv)
12783 && (in_lto_p || !TYPE_VFIELD (tv)
12784 || TREE_CODE (TYPE_VFIELD (tv)) != TREE_LIST))
12785 {
12786 error ("type variant has different TYPE_VFIELD");
12787 debug_tree (tv);
12788 return false;
12789 }
12790 if ((TREE_CODE (t) == ENUMERAL_TYPE && COMPLETE_TYPE_P (t))
12791 || TREE_CODE (t) == INTEGER_TYPE
12792 || TREE_CODE (t) == BOOLEAN_TYPE
12793 || TREE_CODE (t) == REAL_TYPE
12794 || TREE_CODE (t) == FIXED_POINT_TYPE)
12795 {
12796 verify_variant_match (TYPE_MAX_VALUE);
12797 verify_variant_match (TYPE_MIN_VALUE);
12798 }
12799 if (TREE_CODE (t) == METHOD_TYPE)
12800 verify_variant_match (TYPE_METHOD_BASETYPE);
12801 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_METHODS (t))
12802 {
12803 error ("type variant has TYPE_METHODS");
12804 debug_tree (tv);
12805 return false;
12806 }
12807 if (TREE_CODE (t) == OFFSET_TYPE)
12808 verify_variant_match (TYPE_OFFSET_BASETYPE);
12809 if (TREE_CODE (t) == ARRAY_TYPE)
12810 verify_variant_match (TYPE_ARRAY_MAX_SIZE);
12811 /* FIXME: Be lax and allow TYPE_BINFO to be missing in variant types
12812 or even type's main variant. This is needed to make bootstrap pass
12813 and the bug seems new in GCC 5.
12814 C++ FE should be updated to make this consistent and we should check
12815 that TYPE_BINFO is always NULL for !COMPLETE_TYPE_P and otherwise there
12816 is a match with main variant.
12817
12818 Also disable the check for Java for now because of parser hack that builds
12819 first an dummy BINFO and then sometimes replace it by real BINFO in some
12820 of the copies. */
12821 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t) && TYPE_BINFO (tv)
12822 && TYPE_BINFO (t) != TYPE_BINFO (tv)
12823 /* FIXME: Java sometimes keep dump TYPE_BINFOs on variant types.
12824 Since there is no cheap way to tell C++/Java type w/o LTO, do checking
12825 at LTO time only. */
12826 && (in_lto_p && odr_type_p (t)))
12827 {
12828 error ("type variant has different TYPE_BINFO");
12829 debug_tree (tv);
12830 error ("type variant's TYPE_BINFO");
12831 debug_tree (TYPE_BINFO (tv));
12832 error ("type's TYPE_BINFO");
12833 debug_tree (TYPE_BINFO (t));
12834 return false;
12835 }
12836
12837 /* Check various uses of TYPE_VALUES_RAW. */
12838 if (TREE_CODE (t) == ENUMERAL_TYPE)
12839 verify_variant_match (TYPE_VALUES);
12840 else if (TREE_CODE (t) == ARRAY_TYPE)
12841 verify_variant_match (TYPE_DOMAIN);
12842 /* Permit incomplete variants of complete type. While FEs may complete
12843 all variants, this does not happen for C++ templates in all cases. */
12844 else if (RECORD_OR_UNION_TYPE_P (t)
12845 && COMPLETE_TYPE_P (t)
12846 && TYPE_FIELDS (t) != TYPE_FIELDS (tv))
12847 {
12848 tree f1, f2;
12849
12850 /* Fortran builds qualified variants as new records with items of
12851 qualified type. Verify that they looks same. */
12852 for (f1 = TYPE_FIELDS (t), f2 = TYPE_FIELDS (tv);
12853 f1 && f2;
12854 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
12855 if (TREE_CODE (f1) != FIELD_DECL || TREE_CODE (f2) != FIELD_DECL
12856 || (TYPE_MAIN_VARIANT (TREE_TYPE (f1))
12857 != TYPE_MAIN_VARIANT (TREE_TYPE (f2))
12858 /* FIXME: gfc_nonrestricted_type builds all types as variants
12859 with exception of pointer types. It deeply copies the type
12860 which means that we may end up with a variant type
12861 referring non-variant pointer. We may change it to
12862 produce types as variants, too, like
12863 objc_get_protocol_qualified_type does. */
12864 && !POINTER_TYPE_P (TREE_TYPE (f1)))
12865 || DECL_FIELD_OFFSET (f1) != DECL_FIELD_OFFSET (f2)
12866 || DECL_FIELD_BIT_OFFSET (f1) != DECL_FIELD_BIT_OFFSET (f2))
12867 break;
12868 if (f1 || f2)
12869 {
12870 error ("type variant has different TYPE_FIELDS");
12871 debug_tree (tv);
12872 error ("first mismatch is field");
12873 debug_tree (f1);
12874 error ("and field");
12875 debug_tree (f2);
12876 return false;
12877 }
12878 }
12879 else if ((TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE))
12880 verify_variant_match (TYPE_ARG_TYPES);
12881 /* For C++ the qualified variant of array type is really an array type
12882 of qualified TREE_TYPE.
12883 objc builds variants of pointer where pointer to type is a variant, too
12884 in objc_get_protocol_qualified_type. */
12885 if (TREE_TYPE (t) != TREE_TYPE (tv)
12886 && ((TREE_CODE (t) != ARRAY_TYPE
12887 && !POINTER_TYPE_P (t))
12888 || TYPE_MAIN_VARIANT (TREE_TYPE (t))
12889 != TYPE_MAIN_VARIANT (TREE_TYPE (tv))))
12890 {
12891 error ("type variant has different TREE_TYPE");
12892 debug_tree (tv);
12893 error ("type variant's TREE_TYPE");
12894 debug_tree (TREE_TYPE (tv));
12895 error ("type's TREE_TYPE");
12896 debug_tree (TREE_TYPE (t));
12897 return false;
12898 }
12899 if (type_with_alias_set_p (t)
12900 && !gimple_canonical_types_compatible_p (t, tv, false))
12901 {
12902 error ("type is not compatible with its vairant");
12903 debug_tree (tv);
12904 error ("type variant's TREE_TYPE");
12905 debug_tree (TREE_TYPE (tv));
12906 error ("type's TREE_TYPE");
12907 debug_tree (TREE_TYPE (t));
12908 return false;
12909 }
12910 return true;
12911 #undef verify_variant_match
12912 }
12913
12914
12915 /* The TYPE_CANONICAL merging machinery. It should closely resemble
12916 the middle-end types_compatible_p function. It needs to avoid
12917 claiming types are different for types that should be treated
12918 the same with respect to TBAA. Canonical types are also used
12919 for IL consistency checks via the useless_type_conversion_p
12920 predicate which does not handle all type kinds itself but falls
12921 back to pointer-comparison of TYPE_CANONICAL for aggregates
12922 for example. */
12923
12924 /* Return true iff T1 and T2 are structurally identical for what
12925 TBAA is concerned.
12926 This function is used both by lto.c canonical type merging and by the
12927 verifier. If TRUST_TYPE_CANONICAL we do not look into structure of types
12928 that have TYPE_CANONICAL defined and assume them equivalent. */
12929
12930 bool
12931 gimple_canonical_types_compatible_p (const_tree t1, const_tree t2,
12932 bool trust_type_canonical)
12933 {
12934 /* Type variants should be same as the main variant. When not doing sanity
12935 checking to verify this fact, go to main variants and save some work. */
12936 if (trust_type_canonical)
12937 {
12938 t1 = TYPE_MAIN_VARIANT (t1);
12939 t2 = TYPE_MAIN_VARIANT (t2);
12940 }
12941
12942 /* Check first for the obvious case of pointer identity. */
12943 if (t1 == t2)
12944 return true;
12945
12946 /* Check that we have two types to compare. */
12947 if (t1 == NULL_TREE || t2 == NULL_TREE)
12948 return false;
12949
12950 /* We consider complete types always compatible with incomplete type.
12951 This does not make sense for canonical type calculation and thus we
12952 need to ensure that we are never called on it.
12953
12954 FIXME: For more correctness the function probably should have three modes
12955 1) mode assuming that types are complete mathcing their structure
12956 2) mode allowing incomplete types but producing equivalence classes
12957 and thus ignoring all info from complete types
12958 3) mode allowing incomplete types to match complete but checking
12959 compatibility between complete types.
12960
12961 1 and 2 can be used for canonical type calculation. 3 is the real
12962 definition of type compatibility that can be used i.e. for warnings during
12963 declaration merging. */
12964
12965 gcc_assert (!trust_type_canonical
12966 || (type_with_alias_set_p (t1) && type_with_alias_set_p (t2)));
12967 /* If the types have been previously registered and found equal
12968 they still are. */
12969 if (TYPE_CANONICAL (t1) && TYPE_CANONICAL (t2)
12970 && trust_type_canonical)
12971 return TYPE_CANONICAL (t1) == TYPE_CANONICAL (t2);
12972
12973 /* Can't be the same type if the types don't have the same code. */
12974 if (tree_code_for_canonical_type_merging (TREE_CODE (t1))
12975 != tree_code_for_canonical_type_merging (TREE_CODE (t2)))
12976 return false;
12977
12978 /* Qualifiers do not matter for canonical type comparison purposes. */
12979
12980 /* Void types and nullptr types are always the same. */
12981 if (TREE_CODE (t1) == VOID_TYPE
12982 || TREE_CODE (t1) == NULLPTR_TYPE)
12983 return true;
12984
12985 /* Can't be the same type if they have different mode. */
12986 if (TYPE_MODE (t1) != TYPE_MODE (t2))
12987 return false;
12988
12989 /* Non-aggregate types can be handled cheaply. */
12990 if (INTEGRAL_TYPE_P (t1)
12991 || SCALAR_FLOAT_TYPE_P (t1)
12992 || FIXED_POINT_TYPE_P (t1)
12993 || TREE_CODE (t1) == VECTOR_TYPE
12994 || TREE_CODE (t1) == COMPLEX_TYPE
12995 || TREE_CODE (t1) == OFFSET_TYPE
12996 || POINTER_TYPE_P (t1))
12997 {
12998 /* Can't be the same type if they have different sign or precision. */
12999 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2)
13000 || TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
13001 return false;
13002
13003 /* Fortran's C_SIGNED_CHAR is !TYPE_STRING_FLAG but needs to be
13004 interoperable with "signed char". Unless all frontends are revisited
13005 to agree on these types, we must ignore the flag completely. */
13006
13007 /* Fortran standard define C_PTR type that is compatible with every
13008 C pointer. For this reason we need to glob all pointers into one.
13009 Still pointers in different address spaces are not compatible. */
13010 if (POINTER_TYPE_P (t1))
13011 {
13012 if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
13013 != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
13014 return false;
13015 }
13016
13017 /* Tail-recurse to components. */
13018 if (TREE_CODE (t1) == VECTOR_TYPE
13019 || TREE_CODE (t1) == COMPLEX_TYPE)
13020 return gimple_canonical_types_compatible_p (TREE_TYPE (t1),
13021 TREE_TYPE (t2),
13022 trust_type_canonical);
13023
13024 return true;
13025 }
13026
13027 /* Do type-specific comparisons. */
13028 switch (TREE_CODE (t1))
13029 {
13030 case ARRAY_TYPE:
13031 /* Array types are the same if the element types are the same and
13032 the number of elements are the same. */
13033 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
13034 trust_type_canonical)
13035 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
13036 || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
13037 return false;
13038 else
13039 {
13040 tree i1 = TYPE_DOMAIN (t1);
13041 tree i2 = TYPE_DOMAIN (t2);
13042
13043 /* For an incomplete external array, the type domain can be
13044 NULL_TREE. Check this condition also. */
13045 if (i1 == NULL_TREE && i2 == NULL_TREE)
13046 return true;
13047 else if (i1 == NULL_TREE || i2 == NULL_TREE)
13048 return false;
13049 else
13050 {
13051 tree min1 = TYPE_MIN_VALUE (i1);
13052 tree min2 = TYPE_MIN_VALUE (i2);
13053 tree max1 = TYPE_MAX_VALUE (i1);
13054 tree max2 = TYPE_MAX_VALUE (i2);
13055
13056 /* The minimum/maximum values have to be the same. */
13057 if ((min1 == min2
13058 || (min1 && min2
13059 && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
13060 && TREE_CODE (min2) == PLACEHOLDER_EXPR)
13061 || operand_equal_p (min1, min2, 0))))
13062 && (max1 == max2
13063 || (max1 && max2
13064 && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
13065 && TREE_CODE (max2) == PLACEHOLDER_EXPR)
13066 || operand_equal_p (max1, max2, 0)))))
13067 return true;
13068 else
13069 return false;
13070 }
13071 }
13072
13073 case METHOD_TYPE:
13074 case FUNCTION_TYPE:
13075 /* Function types are the same if the return type and arguments types
13076 are the same. */
13077 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
13078 trust_type_canonical))
13079 return false;
13080
13081 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
13082 return true;
13083 else
13084 {
13085 tree parms1, parms2;
13086
13087 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
13088 parms1 && parms2;
13089 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
13090 {
13091 if (!gimple_canonical_types_compatible_p
13092 (TREE_VALUE (parms1), TREE_VALUE (parms2),
13093 trust_type_canonical))
13094 return false;
13095 }
13096
13097 if (parms1 || parms2)
13098 return false;
13099
13100 return true;
13101 }
13102
13103 case RECORD_TYPE:
13104 case UNION_TYPE:
13105 case QUAL_UNION_TYPE:
13106 {
13107 tree f1, f2;
13108
13109 /* For aggregate types, all the fields must be the same. */
13110 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
13111 f1 || f2;
13112 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
13113 {
13114 /* Skip non-fields. */
13115 while (f1 && TREE_CODE (f1) != FIELD_DECL)
13116 f1 = TREE_CHAIN (f1);
13117 while (f2 && TREE_CODE (f2) != FIELD_DECL)
13118 f2 = TREE_CHAIN (f2);
13119 if (!f1 || !f2)
13120 break;
13121 /* The fields must have the same name, offset and type. */
13122 if (DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
13123 || !gimple_compare_field_offset (f1, f2)
13124 || !gimple_canonical_types_compatible_p
13125 (TREE_TYPE (f1), TREE_TYPE (f2),
13126 trust_type_canonical))
13127 return false;
13128 }
13129
13130 /* If one aggregate has more fields than the other, they
13131 are not the same. */
13132 if (f1 || f2)
13133 return false;
13134
13135 return true;
13136 }
13137
13138 default:
13139 /* Consider all types with language specific trees in them mutually
13140 compatible. This is executed only from verify_type and false
13141 positives can be tolerated. */
13142 gcc_assert (!in_lto_p);
13143 return true;
13144 }
13145 }
13146
13147 /* Verify type T. */
13148
13149 void
13150 verify_type (const_tree t)
13151 {
13152 bool error_found = false;
13153 tree mv = TYPE_MAIN_VARIANT (t);
13154 if (!mv)
13155 {
13156 error ("Main variant is not defined");
13157 error_found = true;
13158 }
13159 else if (mv != TYPE_MAIN_VARIANT (mv))
13160 {
13161 error ("TYPE_MAIN_VARIANT has different TYPE_MAIN_VARIANT");
13162 debug_tree (mv);
13163 error_found = true;
13164 }
13165 else if (t != mv && !verify_type_variant (t, mv))
13166 error_found = true;
13167
13168 tree ct = TYPE_CANONICAL (t);
13169 if (!ct)
13170 ;
13171 else if (TYPE_CANONICAL (t) != ct)
13172 {
13173 error ("TYPE_CANONICAL has different TYPE_CANONICAL");
13174 debug_tree (ct);
13175 error_found = true;
13176 }
13177 /* Method and function types can not be used to address memory and thus
13178 TYPE_CANONICAL really matters only for determining useless conversions.
13179
13180 FIXME: C++ FE produce declarations of builtin functions that are not
13181 compatible with main variants. */
13182 else if (TREE_CODE (t) == FUNCTION_TYPE)
13183 ;
13184 else if (t != ct
13185 /* FIXME: gimple_canonical_types_compatible_p can not compare types
13186 with variably sized arrays because their sizes possibly
13187 gimplified to different variables. */
13188 && !variably_modified_type_p (ct, NULL)
13189 && !gimple_canonical_types_compatible_p (t, ct, false))
13190 {
13191 error ("TYPE_CANONICAL is not compatible");
13192 debug_tree (ct);
13193 error_found = true;
13194 }
13195
13196
13197 /* Check various uses of TYPE_MINVAL. */
13198 if (RECORD_OR_UNION_TYPE_P (t))
13199 {
13200 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
13201 and danagle the pointer from time to time. */
13202 if (TYPE_VFIELD (t)
13203 && TREE_CODE (TYPE_VFIELD (t)) != FIELD_DECL
13204 && TREE_CODE (TYPE_VFIELD (t)) != TREE_LIST)
13205 {
13206 error ("TYPE_VFIELD is not FIELD_DECL nor TREE_LIST");
13207 debug_tree (TYPE_VFIELD (t));
13208 error_found = true;
13209 }
13210 }
13211 else if (TREE_CODE (t) == POINTER_TYPE)
13212 {
13213 if (TYPE_NEXT_PTR_TO (t)
13214 && TREE_CODE (TYPE_NEXT_PTR_TO (t)) != POINTER_TYPE)
13215 {
13216 error ("TYPE_NEXT_PTR_TO is not POINTER_TYPE");
13217 debug_tree (TYPE_NEXT_PTR_TO (t));
13218 error_found = true;
13219 }
13220 }
13221 else if (TREE_CODE (t) == REFERENCE_TYPE)
13222 {
13223 if (TYPE_NEXT_REF_TO (t)
13224 && TREE_CODE (TYPE_NEXT_REF_TO (t)) != REFERENCE_TYPE)
13225 {
13226 error ("TYPE_NEXT_REF_TO is not REFERENCE_TYPE");
13227 debug_tree (TYPE_NEXT_REF_TO (t));
13228 error_found = true;
13229 }
13230 }
13231 else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
13232 || TREE_CODE (t) == FIXED_POINT_TYPE)
13233 {
13234 /* FIXME: The following check should pass:
13235 useless_type_conversion_p (const_cast <tree> (t),
13236 TREE_TYPE (TYPE_MIN_VALUE (t))
13237 but does not for C sizetypes in LTO. */
13238 }
13239 /* Java uses TYPE_MINVAL for TYPE_ARGUMENT_SIGNATURE. */
13240 else if (TYPE_MINVAL (t)
13241 && ((TREE_CODE (t) != METHOD_TYPE && TREE_CODE (t) != FUNCTION_TYPE)
13242 || in_lto_p))
13243 {
13244 error ("TYPE_MINVAL non-NULL");
13245 debug_tree (TYPE_MINVAL (t));
13246 error_found = true;
13247 }
13248
13249 /* Check various uses of TYPE_MAXVAL. */
13250 if (RECORD_OR_UNION_TYPE_P (t))
13251 {
13252 if (TYPE_METHODS (t) && TREE_CODE (TYPE_METHODS (t)) != FUNCTION_DECL
13253 && TREE_CODE (TYPE_METHODS (t)) != TEMPLATE_DECL
13254 && TYPE_METHODS (t) != error_mark_node)
13255 {
13256 error ("TYPE_METHODS is not FUNCTION_DECL, TEMPLATE_DECL nor error_mark_node");
13257 debug_tree (TYPE_METHODS (t));
13258 error_found = true;
13259 }
13260 }
13261 else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
13262 {
13263 if (TYPE_METHOD_BASETYPE (t)
13264 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != RECORD_TYPE
13265 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != UNION_TYPE)
13266 {
13267 error ("TYPE_METHOD_BASETYPE is not record nor union");
13268 debug_tree (TYPE_METHOD_BASETYPE (t));
13269 error_found = true;
13270 }
13271 }
13272 else if (TREE_CODE (t) == OFFSET_TYPE)
13273 {
13274 if (TYPE_OFFSET_BASETYPE (t)
13275 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != RECORD_TYPE
13276 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != UNION_TYPE)
13277 {
13278 error ("TYPE_OFFSET_BASETYPE is not record nor union");
13279 debug_tree (TYPE_OFFSET_BASETYPE (t));
13280 error_found = true;
13281 }
13282 }
13283 else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
13284 || TREE_CODE (t) == FIXED_POINT_TYPE)
13285 {
13286 /* FIXME: The following check should pass:
13287 useless_type_conversion_p (const_cast <tree> (t),
13288 TREE_TYPE (TYPE_MAX_VALUE (t))
13289 but does not for C sizetypes in LTO. */
13290 }
13291 else if (TREE_CODE (t) == ARRAY_TYPE)
13292 {
13293 if (TYPE_ARRAY_MAX_SIZE (t)
13294 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (t)) != INTEGER_CST)
13295 {
13296 error ("TYPE_ARRAY_MAX_SIZE not INTEGER_CST");
13297 debug_tree (TYPE_ARRAY_MAX_SIZE (t));
13298 error_found = true;
13299 }
13300 }
13301 else if (TYPE_MAXVAL (t))
13302 {
13303 error ("TYPE_MAXVAL non-NULL");
13304 debug_tree (TYPE_MAXVAL (t));
13305 error_found = true;
13306 }
13307
13308 /* Check various uses of TYPE_BINFO. */
13309 if (RECORD_OR_UNION_TYPE_P (t))
13310 {
13311 if (!TYPE_BINFO (t))
13312 ;
13313 else if (TREE_CODE (TYPE_BINFO (t)) != TREE_BINFO)
13314 {
13315 error ("TYPE_BINFO is not TREE_BINFO");
13316 debug_tree (TYPE_BINFO (t));
13317 error_found = true;
13318 }
13319 /* FIXME: Java builds invalid empty binfos that do not have
13320 TREE_TYPE set. */
13321 else if (TREE_TYPE (TYPE_BINFO (t)) != TYPE_MAIN_VARIANT (t) && 0)
13322 {
13323 error ("TYPE_BINFO type is not TYPE_MAIN_VARIANT");
13324 debug_tree (TREE_TYPE (TYPE_BINFO (t)));
13325 error_found = true;
13326 }
13327 }
13328 else if (TYPE_LANG_SLOT_1 (t) && in_lto_p)
13329 {
13330 error ("TYPE_LANG_SLOT_1 (binfo) field is non-NULL");
13331 debug_tree (TYPE_LANG_SLOT_1 (t));
13332 error_found = true;
13333 }
13334
13335 /* Check various uses of TYPE_VALUES_RAW. */
13336 if (TREE_CODE (t) == ENUMERAL_TYPE)
13337 for (tree l = TYPE_VALUES (t); l; l = TREE_CHAIN (l))
13338 {
13339 tree value = TREE_VALUE (l);
13340 tree name = TREE_PURPOSE (l);
13341
13342 /* C FE porduce INTEGER_CST of INTEGER_TYPE, while C++ FE uses
13343 CONST_DECL of ENUMERAL TYPE. */
13344 if (TREE_CODE (value) != INTEGER_CST && TREE_CODE (value) != CONST_DECL)
13345 {
13346 error ("Enum value is not CONST_DECL or INTEGER_CST");
13347 debug_tree (value);
13348 debug_tree (name);
13349 error_found = true;
13350 }
13351 if (TREE_CODE (TREE_TYPE (value)) != INTEGER_TYPE
13352 && !useless_type_conversion_p (const_cast <tree> (t), TREE_TYPE (value)))
13353 {
13354 error ("Enum value type is not INTEGER_TYPE nor convertible to the enum");
13355 debug_tree (value);
13356 debug_tree (name);
13357 error_found = true;
13358 }
13359 if (TREE_CODE (name) != IDENTIFIER_NODE)
13360 {
13361 error ("Enum value name is not IDENTIFIER_NODE");
13362 debug_tree (value);
13363 debug_tree (name);
13364 error_found = true;
13365 }
13366 }
13367 else if (TREE_CODE (t) == ARRAY_TYPE)
13368 {
13369 if (TYPE_DOMAIN (t) && TREE_CODE (TYPE_DOMAIN (t)) != INTEGER_TYPE)
13370 {
13371 error ("Array TYPE_DOMAIN is not integer type");
13372 debug_tree (TYPE_DOMAIN (t));
13373 error_found = true;
13374 }
13375 }
13376 else if (RECORD_OR_UNION_TYPE_P (t))
13377 for (tree fld = TYPE_FIELDS (t); fld; fld = TREE_CHAIN (fld))
13378 {
13379 /* TODO: verify properties of decls. */
13380 if (TREE_CODE (fld) == FIELD_DECL)
13381 ;
13382 else if (TREE_CODE (fld) == TYPE_DECL)
13383 ;
13384 else if (TREE_CODE (fld) == CONST_DECL)
13385 ;
13386 else if (TREE_CODE (fld) == VAR_DECL)
13387 ;
13388 else if (TREE_CODE (fld) == TEMPLATE_DECL)
13389 ;
13390 else if (TREE_CODE (fld) == USING_DECL)
13391 ;
13392 else
13393 {
13394 error ("Wrong tree in TYPE_FIELDS list");
13395 debug_tree (fld);
13396 error_found = true;
13397 }
13398 }
13399 else if (TREE_CODE (t) == INTEGER_TYPE
13400 || TREE_CODE (t) == BOOLEAN_TYPE
13401 || TREE_CODE (t) == OFFSET_TYPE
13402 || TREE_CODE (t) == REFERENCE_TYPE
13403 || TREE_CODE (t) == NULLPTR_TYPE
13404 || TREE_CODE (t) == POINTER_TYPE)
13405 {
13406 if (TYPE_CACHED_VALUES_P (t) != (TYPE_CACHED_VALUES (t) != NULL))
13407 {
13408 error ("TYPE_CACHED_VALUES_P is %i while TYPE_CACHED_VALUES is %p",
13409 TYPE_CACHED_VALUES_P (t), (void *)TYPE_CACHED_VALUES (t));
13410 error_found = true;
13411 }
13412 else if (TYPE_CACHED_VALUES_P (t) && TREE_CODE (TYPE_CACHED_VALUES (t)) != TREE_VEC)
13413 {
13414 error ("TYPE_CACHED_VALUES is not TREE_VEC");
13415 debug_tree (TYPE_CACHED_VALUES (t));
13416 error_found = true;
13417 }
13418 /* Verify just enough of cache to ensure that no one copied it to new type.
13419 All copying should go by copy_node that should clear it. */
13420 else if (TYPE_CACHED_VALUES_P (t))
13421 {
13422 int i;
13423 for (i = 0; i < TREE_VEC_LENGTH (TYPE_CACHED_VALUES (t)); i++)
13424 if (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)
13425 && TREE_TYPE (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)) != t)
13426 {
13427 error ("wrong TYPE_CACHED_VALUES entry");
13428 debug_tree (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i));
13429 error_found = true;
13430 break;
13431 }
13432 }
13433 }
13434 else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
13435 for (tree l = TYPE_ARG_TYPES (t); l; l = TREE_CHAIN (l))
13436 {
13437 /* C++ FE uses TREE_PURPOSE to store initial values. */
13438 if (TREE_PURPOSE (l) && in_lto_p)
13439 {
13440 error ("TREE_PURPOSE is non-NULL in TYPE_ARG_TYPES list");
13441 debug_tree (l);
13442 error_found = true;
13443 }
13444 if (!TYPE_P (TREE_VALUE (l)))
13445 {
13446 error ("Wrong entry in TYPE_ARG_TYPES list");
13447 debug_tree (l);
13448 error_found = true;
13449 }
13450 }
13451 else if (!is_lang_specific (t) && TYPE_VALUES_RAW (t))
13452 {
13453 error ("TYPE_VALUES_RAW field is non-NULL");
13454 debug_tree (TYPE_VALUES_RAW (t));
13455 error_found = true;
13456 }
13457 if (TREE_CODE (t) != INTEGER_TYPE
13458 && TREE_CODE (t) != BOOLEAN_TYPE
13459 && TREE_CODE (t) != OFFSET_TYPE
13460 && TREE_CODE (t) != REFERENCE_TYPE
13461 && TREE_CODE (t) != NULLPTR_TYPE
13462 && TREE_CODE (t) != POINTER_TYPE
13463 && TYPE_CACHED_VALUES_P (t))
13464 {
13465 error ("TYPE_CACHED_VALUES_P is set while it should not");
13466 error_found = true;
13467 }
13468 if (TYPE_STRING_FLAG (t)
13469 && TREE_CODE (t) != ARRAY_TYPE && TREE_CODE (t) != INTEGER_TYPE)
13470 {
13471 error ("TYPE_STRING_FLAG is set on wrong type code");
13472 error_found = true;
13473 }
13474 else if (TYPE_STRING_FLAG (t))
13475 {
13476 const_tree b = t;
13477 if (TREE_CODE (b) == ARRAY_TYPE)
13478 b = TREE_TYPE (t);
13479 /* Java builds arrays with TYPE_STRING_FLAG of promoted_char_type
13480 that is 32bits. */
13481 if (TREE_CODE (b) != INTEGER_TYPE)
13482 {
13483 error ("TYPE_STRING_FLAG is set on type that does not look like "
13484 "char nor array of chars");
13485 error_found = true;
13486 }
13487 }
13488
13489 /* ipa-devirt makes an assumption that TYPE_METHOD_BASETYPE is always
13490 TYPE_MAIN_VARIANT and it would be odd to add methods only to variatns
13491 of a type. */
13492 if (TREE_CODE (t) == METHOD_TYPE
13493 && TYPE_MAIN_VARIANT (TYPE_METHOD_BASETYPE (t)) != TYPE_METHOD_BASETYPE (t))
13494 {
13495 error ("TYPE_METHOD_BASETYPE is not main variant");
13496 error_found = true;
13497 }
13498
13499 if (error_found)
13500 {
13501 debug_tree (const_cast <tree> (t));
13502 internal_error ("verify_type failed");
13503 }
13504 }
13505
13506 #include "gt-tree.h"